summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--.cmake.conf2
-rw-r--r--coin/module_config.yaml1
-rw-r--r--dependencies.yaml12
-rw-r--r--examples/multimedia/audiosource/CMakeLists.txt1
-rw-r--r--examples/multimedia/audiosource/Info.plist.in44
-rw-r--r--examples/multimedia/devices/CMakeLists.txt4
-rw-r--r--examples/multimedia/video/doc/src/qmlvideo.qdoc2
-rw-r--r--examples/multimedia/video/qmlvideo/CMakeLists.txt1
-rw-r--r--examples/multimedia/video/qmlvideo/qml/qmlvideo/FileBrowser.qml419
-rw-r--r--examples/multimedia/video/qmlvideo/qml/qmlvideo/main.qml43
-rw-r--r--examples/multimedia/video/qmlvideo/qmlvideo.pro1
-rw-r--r--examples/multimedia/video/recorder/AudioInputSelect.qml2
-rw-r--r--src/android/jar/src/org/qtproject/qt/android/multimedia/QtAndroidMediaPlayer.java25
-rw-r--r--src/android/jar/src/org/qtproject/qt/android/multimedia/QtAudioDeviceManager.java78
-rw-r--r--src/android/jar/src/org/qtproject/qt/android/multimedia/QtCameraListener.java16
-rw-r--r--src/android/jar/src/org/qtproject/qt/android/multimedia/QtMultimediaUtils.java28
-rw-r--r--src/multimedia/audio/qaudiobuffer.h2
-rw-r--r--src/multimedia/audio/qaudiodevice.h2
-rw-r--r--src/multimedia/audio/qaudiohelpers.cpp2
-rw-r--r--src/multimedia/camera/qcameradevice.cpp3
-rw-r--r--src/multimedia/doc/snippets/multimedia-snippets/audio.cpp8
-rw-r--r--src/multimedia/doc/snippets/multimedia-snippets/camera.cpp2
-rw-r--r--src/multimedia/platform/android/audio/qandroidaudiodecoder.cpp156
-rw-r--r--src/multimedia/platform/android/audio/qandroidaudiodecoder_p.h12
-rw-r--r--src/multimedia/platform/android/audio/qandroidaudiosink.cpp8
-rw-r--r--src/multimedia/platform/android/audio/qandroidaudiosource.cpp2
-rw-r--r--src/multimedia/platform/android/audio/qopenslesengine.cpp52
-rw-r--r--src/multimedia/platform/android/audio/qopenslesengine_p.h3
-rw-r--r--src/multimedia/platform/android/common/qandroidvideooutput.cpp37
-rw-r--r--src/multimedia/platform/android/common/qandroidvideooutput_p.h4
-rw-r--r--src/multimedia/platform/android/mediacapture/qandroidcamera.cpp10
-rw-r--r--src/multimedia/platform/android/mediacapture/qandroidcamera_p.h1
-rw-r--r--src/multimedia/platform/android/mediacapture/qandroidcamerasession.cpp25
-rw-r--r--src/multimedia/platform/android/mediacapture/qandroidcamerasession_p.h2
-rw-r--r--src/multimedia/platform/android/mediacapture/qandroidcapturesession.cpp47
-rw-r--r--src/multimedia/platform/android/mediacapture/qandroidcapturesession_p.h3
-rw-r--r--src/multimedia/platform/android/mediacapture/qandroidmediacapturesession.cpp4
-rw-r--r--src/multimedia/platform/android/mediaplayer/qandroidmediaplayer.cpp60
-rw-r--r--src/multimedia/platform/android/mediaplayer/qandroidmediaplayer_p.h4
-rw-r--r--src/multimedia/platform/android/mediaplayer/qandroidmetadata.cpp11
-rw-r--r--src/multimedia/platform/android/qandroidformatsinfo.cpp12
-rw-r--r--src/multimedia/platform/android/wrappers/jni/androidcamera.cpp6
-rw-r--r--src/multimedia/platform/android/wrappers/jni/androidmediaplayer.cpp44
-rw-r--r--src/multimedia/platform/android/wrappers/jni/androidmediaplayer_p.h2
-rw-r--r--src/multimedia/platform/darwin/camera/avfcamera.mm75
-rw-r--r--src/multimedia/platform/darwin/camera/avfmediaencoder.mm15
-rw-r--r--src/multimedia/platform/darwin/mediaplayer/avfmediaplayer.mm13
-rw-r--r--src/multimedia/platform/gstreamer/audio/qgstreameraudiodecoder.cpp1
-rw-r--r--src/multimedia/platform/gstreamer/common/qgstreamermediaplayer.cpp34
-rw-r--r--src/multimedia/platform/gstreamer/common/qgstreamermediaplayer_p.h1
-rw-r--r--src/multimedia/platform/gstreamer/common/qgstvideorenderersink.cpp1
-rw-r--r--src/multimedia/platform/gstreamer/mediacapture/qgstreamercamera.cpp8
-rw-r--r--src/multimedia/platform/gstreamer/qgstreamermediadevices.cpp98
-rw-r--r--src/multimedia/platform/gstreamer/qgstreamermediadevices_p.h10
-rw-r--r--src/multimedia/platform/pulseaudio/qpulseaudiosink.cpp36
-rw-r--r--src/multimedia/platform/pulseaudio/qpulseaudiosource.cpp22
-rw-r--r--src/multimedia/platform/qnx/common/windowgrabber.cpp8
-rw-r--r--src/multimedia/platform/windows/audio/qwindowsaudiosink.cpp5
-rw-r--r--src/multimedia/platform/windows/audio/qwindowsaudiosource.cpp13
-rw-r--r--src/multimedia/platform/windows/common/qwindowsmfdefs.cpp1
-rw-r--r--src/multimedia/platform/windows/common/qwindowsmfdefs_p.h2
-rw-r--r--src/multimedia/platform/windows/evr/evrcustompresenter.cpp36
-rw-r--r--src/multimedia/platform/windows/evr/evrcustompresenter_p.h3
-rw-r--r--src/multimedia/platform/windows/evr/evrd3dpresentengine.cpp7
-rw-r--r--src/multimedia/platform/windows/evr/evrd3dpresentengine_p.h3
-rw-r--r--src/multimedia/platform/windows/mediacapture/qwindowsmediadevicereader.cpp2
-rw-r--r--src/multimedia/platform/windows/mediacapture/qwindowsmediaencoder.cpp9
-rw-r--r--src/multimedia/platform/windows/player/mfplayersession.cpp97
-rw-r--r--src/multimedia/platform/windows/player/mfplayersession_p.h3
-rw-r--r--src/multimedia/platform/windows/player/mfvideorenderercontrol.cpp21
-rw-r--r--src/multimedia/platform/windows/player/mfvideorenderercontrol_p.h3
-rw-r--r--src/multimedia/platform/windows/qwindowsmediadevices.cpp232
-rw-r--r--src/multimedia/playback/qmediaplayer.cpp9
-rw-r--r--src/multimedia/playback/qmediaplayer.h3
-rw-r--r--src/multimedia/qmediaformat.h8
-rw-r--r--src/multimedia/qmediatimerange.h2
-rw-r--r--src/multimedia/recording/qmediarecorder.cpp4
-rw-r--r--src/multimedia/video/qvideoframe.h2
-rw-r--r--src/multimedia/video/qvideoframeformat.h2
-rw-r--r--src/multimedia/video/qvideooutputorientationhandler.cpp4
-rw-r--r--src/multimedia/video/qvideosink.cpp1
-rw-r--r--src/multimediaquick/qquickvideooutput.cpp2
-rw-r--r--src/multimediaquick/qsgvideonode_p.cpp20
-rw-r--r--src/multimediaquick/qsgvideonode_p.h2
-rw-r--r--src/multimediaquick/qtmultimediaquicktypes_p.h2
-rw-r--r--src/multimediawidgets/qvideowidget.cpp13
-rw-r--r--tests/auto/integration/qaudiodecoderbackend/tst_qaudiodecoderbackend.cpp40
-rw-r--r--tests/auto/integration/qaudiosource/tst_qaudiosource.cpp4
-rw-r--r--tests/auto/integration/qmediaplayerbackend/tst_qmediaplayerbackend.cpp7
89 files changed, 1122 insertions, 965 deletions
diff --git a/.cmake.conf b/.cmake.conf
index ac3b6f4ad..e62fae5ef 100644
--- a/.cmake.conf
+++ b/.cmake.conf
@@ -1,2 +1,2 @@
-set(QT_REPO_MODULE_VERSION "6.2.4")
+set(QT_REPO_MODULE_VERSION "6.2.8")
set(QT_REPO_MODULE_PRERELEASE_VERSION_SEGMENT "")
diff --git a/coin/module_config.yaml b/coin/module_config.yaml
index 7733000cb..c118553ca 100644
--- a/coin/module_config.yaml
+++ b/coin/module_config.yaml
@@ -1,4 +1,5 @@
version: 2
+alias: qtmultimedia
accept_configuration:
condition: property
property: features
diff --git a/dependencies.yaml b/dependencies.yaml
index b5b7b70f0..a253e8b89 100644
--- a/dependencies.yaml
+++ b/dependencies.yaml
@@ -1,10 +1,10 @@
dependencies:
- ../qtbase:
- ref: 7a5b98e0fefcbf061cb9e3d9edecc75fff364519
+ ../tqtc-qtbase:
+ ref: 4c1c38dede55565afa846685b3e19cf8f1cfed0c
required: true
- ../qtdeclarative:
- ref: 39bc38d85abef1d7a471a335cdec1773b19aa6e8
+ ../tqtc-qtdeclarative:
+ ref: 9919f58fce6329a233ca885188d0aba5f484e546
required: false
- ../qtshadertools:
- ref: fe2eb51d2f0202647f96afbefe3b4755f9084cc4
+ ../tqtc-qtshadertools:
+ ref: 754297822b244b98494e5ab09ca024a2e7acb54a
required: true
diff --git a/examples/multimedia/audiosource/CMakeLists.txt b/examples/multimedia/audiosource/CMakeLists.txt
index 692a24564..1c9259c91 100644
--- a/examples/multimedia/audiosource/CMakeLists.txt
+++ b/examples/multimedia/audiosource/CMakeLists.txt
@@ -25,6 +25,7 @@ qt_add_executable(audiosource
set_target_properties(audiosource PROPERTIES
WIN32_EXECUTABLE TRUE
MACOSX_BUNDLE TRUE
+ MACOSX_BUNDLE_INFO_PLIST ${CMAKE_CURRENT_SOURCE_DIR}/Info.plist.in
)
# special case begin
target_include_directories(audiosource PUBLIC
diff --git a/examples/multimedia/audiosource/Info.plist.in b/examples/multimedia/audiosource/Info.plist.in
new file mode 100644
index 000000000..43b966509
--- /dev/null
+++ b/examples/multimedia/audiosource/Info.plist.in
@@ -0,0 +1,44 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple Computer//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+ <key>CFBundleInfoDictionaryVersion</key>
+ <string>6.0</string>
+ <key>CFBundlePackageType</key>
+ <string>APPL</string>
+
+ <key>CFBundleName</key>
+ <string>${MACOSX_BUNDLE_BUNDLE_NAME}</string>
+ <key>CFBundleIdentifier</key>
+ <string>${MACOSX_BUNDLE_GUI_IDENTIFIER}</string>
+ <key>CFBundleExecutable</key>
+ <string>${MACOSX_BUNDLE_EXECUTABLE_NAME}</string>
+
+ <key>CFBundleVersion</key>
+ <string>${MACOSX_BUNDLE_BUNDLE_VERSION}</string>
+ <key>CFBundleShortVersionString</key>
+ <string>${MACOSX_BUNDLE_SHORT_VERSION_STRING}</string>
+ <key>CFBundleLongVersionString</key>
+ <string>${MACOSX_BUNDLE_LONG_VERSION_STRING}</string>
+
+ <key>LSMinimumSystemVersion</key>
+ <string>${CMAKE_OSX_DEPLOYMENT_TARGET}</string>
+
+ <key>CFBundleGetInfoString</key>
+ <string>${MACOSX_BUNDLE_INFO_STRING}</string>
+ <key>NSHumanReadableCopyright</key>
+ <string>${MACOSX_BUNDLE_COPYRIGHT}</string>
+
+ <key>CFBundleIconFile</key>
+ <string>${MACOSX_BUNDLE_ICON_FILE}</string>
+
+ <key>CFBundleDevelopmentRegion</key>
+ <string>English</string>
+
+ <key>NSMicrophoneUsageDescription</key>
+ <string>Qt Multimedia Example</string>
+
+ <key>NSSupportsAutomaticGraphicsSwitching</key>
+ <true/>
+</dict>
+</plist>
diff --git a/examples/multimedia/devices/CMakeLists.txt b/examples/multimedia/devices/CMakeLists.txt
index d75b4f23f..c7e038bb5 100644
--- a/examples/multimedia/devices/CMakeLists.txt
+++ b/examples/multimedia/devices/CMakeLists.txt
@@ -3,6 +3,10 @@
cmake_minimum_required(VERSION 3.16)
project(devices LANGUAGES CXX)
+if(ANDROID OR IOS)
+ message(FATAL_ERROR "This is a commandline tool that is not supported on mobile platforms")
+endif()
+
set(CMAKE_INCLUDE_CURRENT_DIR ON)
set(CMAKE_AUTOMOC ON)
diff --git a/examples/multimedia/video/doc/src/qmlvideo.qdoc b/examples/multimedia/video/doc/src/qmlvideo.qdoc
index b8ab25d03..e3edf39e7 100644
--- a/examples/multimedia/video/doc/src/qmlvideo.qdoc
+++ b/examples/multimedia/video/doc/src/qmlvideo.qdoc
@@ -54,7 +54,7 @@ The \c main.qml file creates a UI which includes the following items:
\list
\li Two \c Button instances, each of which displays a filename, and can be
- used to launch a \c FileBrowser.
+ used to launch a \c FileDialog.
\li An exit \c Button.
\li A \c SceneSelectionPanel, which is a flickable list displaying the
available scenes.
diff --git a/examples/multimedia/video/qmlvideo/CMakeLists.txt b/examples/multimedia/video/qmlvideo/CMakeLists.txt
index c3eff7876..2eb9bc82d 100644
--- a/examples/multimedia/video/qmlvideo/CMakeLists.txt
+++ b/examples/multimedia/video/qmlvideo/CMakeLists.txt
@@ -79,7 +79,6 @@ set(qmlvideo_resource_files
"qml/qmlvideo/CameraSpin.qml"
"qml/qmlvideo/Content.qml"
"qml/qmlvideo/ErrorDialog.qml"
- "qml/qmlvideo/FileBrowser.qml"
"qml/qmlvideo/Scene.qml"
"qml/qmlvideo/SceneBasic.qml"
"qml/qmlvideo/SceneDrag.qml"
diff --git a/examples/multimedia/video/qmlvideo/qml/qmlvideo/FileBrowser.qml b/examples/multimedia/video/qmlvideo/qml/qmlvideo/FileBrowser.qml
deleted file mode 100644
index 31684368e..000000000
--- a/examples/multimedia/video/qmlvideo/qml/qmlvideo/FileBrowser.qml
+++ /dev/null
@@ -1,419 +0,0 @@
-/****************************************************************************
-**
-** Copyright (C) 2016 The Qt Company Ltd.
-** Contact: https://www.qt.io/licensing/
-**
-** This file is part of the Qt Mobility Components.
-**
-** $QT_BEGIN_LICENSE:BSD$
-** Commercial License Usage
-** Licensees holding valid commercial Qt licenses may use this file in
-** accordance with the commercial license agreement provided with the
-** Software or, alternatively, in accordance with the terms contained in
-** a written agreement between you and The Qt Company. For licensing terms
-** and conditions see https://www.qt.io/terms-conditions. For further
-** information use the contact form at https://www.qt.io/contact-us.
-**
-** BSD License Usage
-** Alternatively, you may use this file under the terms of the BSD license
-** as follows:
-**
-** "Redistribution and use in source and binary forms, with or without
-** modification, are permitted provided that the following conditions are
-** met:
-** * Redistributions of source code must retain the above copyright
-** notice, this list of conditions and the following disclaimer.
-** * Redistributions in binary form must reproduce the above copyright
-** notice, this list of conditions and the following disclaimer in
-** the documentation and/or other materials provided with the
-** distribution.
-** * Neither the name of The Qt Company Ltd nor the names of its
-** contributors may be used to endorse or promote products derived
-** from this software without specific prior written permission.
-**
-**
-** THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-** "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-** LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-** A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-** OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-** SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-** LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-** DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-** THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-** (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-** OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
-**
-** $QT_END_LICENSE$
-**
-****************************************************************************/
-
-import QtQuick
-import Qt.labs.folderlistmodel
-
-Rectangle {
- id: fileBrowser
- color: "transparent"
-
- property string folder
-
- property int itemHeight: Math.min(parent.width, parent.height) / 15
- property int buttonHeight: Math.min(parent.width, parent.height) / 12
-
- signal fileSelected(string file)
-
- function selectFile(file) {
- if (file !== "") {
- folder = loader.item.folders.folder
- fileBrowser.fileSelected(file)
- }
- loader.sourceComponent = undefined
- }
-
- Loader {
- id: loader
- }
-
- function show() {
- loader.sourceComponent = fileBrowserComponent
- loader.item.parent = fileBrowser
- loader.item.anchors.fill = fileBrowser
- loader.item.folder = fileBrowser.folder
- }
-
- Component {
- id: fileBrowserComponent
-
- Rectangle {
- id: root
- color: "black"
- property bool showFocusHighlight: false
- property variant folders: folders1
- property variant view: view1
- property alias folder: folders1.folder
- property color textColor: "white"
-
- FolderListModel {
- id: folders1
- folder: folder
- }
-
- FolderListModel {
- id: folders2
- folder: folder
- }
-
- SystemPalette {
- id: palette
- }
-
- Component {
- id: folderDelegate
-
- Rectangle {
- id: wrapper
- function launch() {
- var path = "file://";
- if (filePath.length > 2 && filePath[1] === ':') // Windows drive logic, see QUrl::fromLocalFile()
- path += '/';
- path += filePath;
- if (folders.isFolder(index))
- down(path);
- else
- fileBrowser.selectFile(path)
- }
- width: root.width
- height: folderImage.height
- color: "transparent"
-
- Rectangle {
- id: highlight
- visible: false
- anchors.fill: parent
- anchors.leftMargin: 5
- anchors.rightMargin: 5
- color: "#212121"
- }
-
- Item {
- id: folderImage
- width: itemHeight
- height: itemHeight
- Image {
- id: folderPicture
- source: "qrc:/folder.png"
- width: itemHeight * 0.9
- height: itemHeight * 0.9
- anchors.left: parent.left
- anchors.margins: 5
- visible: folders.isFolder(index)
- }
- }
-
- Text {
- id: nameText
- anchors.fill: parent;
- verticalAlignment: Text.AlignVCenter
- text: fileName
- anchors.leftMargin: itemHeight + 10
- color: (wrapper.ListView.isCurrentItem && root.showFocusHighlight) ? palette.highlightedText : textColor
- elide: Text.ElideRight
- }
-
- MouseArea {
- id: mouseRegion
- anchors.fill: parent
- onPressed: {
- root.showFocusHighlight = false;
- wrapper.ListView.view.currentIndex = index;
- }
- onClicked: { if (folders === wrapper.ListView.view.model) launch() }
- }
-
- states: [
- State {
- name: "pressed"
- when: mouseRegion.pressed
- PropertyChanges { target: highlight; visible: true }
- PropertyChanges { target: nameText; color: palette.highlightedText }
- }
- ]
- }
- }
-
- ListView {
- id: view1
- anchors.top: titleBar.bottom
- anchors.bottom: cancelButton.top
- width: parent.width
- model: folders1
- delegate: folderDelegate
- highlight: Rectangle {
- color: "#212121"
- visible: root.showFocusHighlight && view1.count != 0
- width: view1.currentItem == null ? 0 : view1.currentItem.width
- }
- highlightMoveVelocity: 1000
- pressDelay: 100
- focus: true
- state: "current"
- states: [
- State {
- name: "current"
- PropertyChanges { target: view1; x: 0 }
- },
- State {
- name: "exitLeft"
- PropertyChanges { target: view1; x: -root.width }
- },
- State {
- name: "exitRight"
- PropertyChanges { target: view1; x: root.width }
- }
- ]
- transitions: [
- Transition {
- to: "current"
- SequentialAnimation {
- NumberAnimation { properties: "x"; duration: 250 }
- }
- },
- Transition {
- NumberAnimation { properties: "x"; duration: 250 }
- NumberAnimation { properties: "x"; duration: 250 }
- }
- ]
- Keys.onPressed: root.keyPressed(event.key)
- }
-
- ListView {
- id: view2
- anchors.top: titleBar.bottom
- anchors.bottom: parent.bottom
- x: parent.width
- width: parent.width
- model: folders2
- delegate: folderDelegate
- highlight: Rectangle {
- color: "#212121"
- visible: root.showFocusHighlight && view2.count != 0
- width: view1.currentItem == null ? 0 : view1.currentItem.width
- }
- highlightMoveVelocity: 1000
- pressDelay: 100
- states: [
- State {
- name: "current"
- PropertyChanges { target: view2; x: 0 }
- },
- State {
- name: "exitLeft"
- PropertyChanges { target: view2; x: -root.width }
- },
- State {
- name: "exitRight"
- PropertyChanges { target: view2; x: root.width }
- }
- ]
- transitions: [
- Transition {
- to: "current"
- SequentialAnimation {
- NumberAnimation { properties: "x"; duration: 250 }
- }
- },
- Transition {
- NumberAnimation { properties: "x"; duration: 250 }
- }
- ]
- Keys.onPressed: root.keyPressed(event.key)
- }
-
- Rectangle {
- width: parent.width
- height: buttonHeight + 10
- anchors.bottom: parent.bottom
- color: "black"
- }
-
- Rectangle {
- id: cancelButton
- width: parent.width
- height: buttonHeight
- color: "#212121"
- anchors.bottom: parent.bottom
- anchors.left: parent.left
- anchors.right: parent.right
- anchors.margins: 5
- radius: buttonHeight / 15
-
- Text {
- anchors.fill: parent
- text: "Cancel"
- color: "white"
- horizontalAlignment: Text.AlignHCenter
- verticalAlignment: Text.AlignVCenter
- }
-
- MouseArea {
- anchors.fill: parent
- onClicked: fileBrowser.selectFile("")
- }
- }
-
- Keys.onPressed: {
- root.keyPressed(event.key);
- if (event.key === Qt.Key_Return || event.key === Qt.Key_Select || event.key === Qt.Key_Right) {
- view.currentItem.launch();
- event.accepted = true;
- } else if (event.key === Qt.Key_Left) {
- up();
- }
- }
-
-
- Rectangle {
- id: titleBar
- width: parent.width
- height: buttonHeight + 10
- anchors.top: parent.top
- color: "black"
-
- Rectangle {
- width: parent.width;
- height: buttonHeight
- color: "#212121"
- anchors.margins: 5
- anchors.top: parent.top
- anchors.left: parent.left
- anchors.right: parent.right
- radius: buttonHeight / 15
-
- Rectangle {
- id: upButton
- width: buttonHeight
- height: buttonHeight
- color: "transparent"
- Image {
- width: itemHeight
- height: itemHeight
- anchors.centerIn: parent
- source: "qrc:/up.png"
- }
- MouseArea { id: upRegion; anchors.centerIn: parent
- width: buttonHeight
- height: buttonHeight
- onClicked: up()
- }
- states: [
- State {
- name: "pressed"
- when: upRegion.pressed
- PropertyChanges { target: upButton; color: palette.highlight }
- }
- ]
- }
-
- Text {
- anchors.left: upButton.right; anchors.right: parent.right; height: parent.height
- anchors.leftMargin: 5; anchors.rightMargin: 5
- text: folders.folder
- color: "white"
- elide: Text.ElideLeft;
- horizontalAlignment: Text.AlignLeft;
- verticalAlignment: Text.AlignVCenter
- }
- }
- }
-
- function down(path) {
- if (folders == folders1) {
- view = view2
- folders = folders2;
- view1.state = "exitLeft";
- } else {
- view = view1
- folders = folders1;
- view2.state = "exitLeft";
- }
- view.x = root.width;
- view.state = "current";
- view.focus = true;
- folders.folder = path;
- }
-
- function up() {
- var path = folders.parentFolder;
- if (path.toString().length === 0 || path.toString() === 'file:')
- return;
- if (folders == folders1) {
- view = view2
- folders = folders2;
- view1.state = "exitRight";
- } else {
- view = view1
- folders = folders1;
- view2.state = "exitRight";
- }
- view.x = -root.width;
- view.state = "current";
- view.focus = true;
- folders.folder = path;
- }
-
- function keyPressed(key) {
- switch (key) {
- case Qt.Key_Up:
- case Qt.Key_Down:
- case Qt.Key_Left:
- case Qt.Key_Right:
- root.showFocusHighlight = true;
- break;
- default:
- // do nothing
- break;
- }
- }
- }
- }
-}
diff --git a/examples/multimedia/video/qmlvideo/qml/qmlvideo/main.qml b/examples/multimedia/video/qmlvideo/qml/qmlvideo/main.qml
index 4289f79f1..e8c200301 100644
--- a/examples/multimedia/video/qmlvideo/qml/qmlvideo/main.qml
+++ b/examples/multimedia/video/qmlvideo/qml/qmlvideo/main.qml
@@ -49,6 +49,7 @@
****************************************************************************/
import QtQuick
+import QtQuick.Dialogs
Rectangle {
id: root
@@ -114,7 +115,10 @@ Rectangle {
textColorSelected: "white"
height: d.buttonHeight
text: (root.source1 == "") ? "Select file 1" : root.source1
- onClicked: fileBrowser1.show()
+ onClicked: {
+ fileBrowser.setFirstSource = true
+ fileBrowser.open()
+ }
}
Button {
@@ -130,7 +134,10 @@ Rectangle {
textColorSelected: "white"
height: d.buttonHeight
text: (root.source2 == "") ? "Select file 2" : root.source2
- onClicked: fileBrowser2.show()
+ onClicked: {
+ fileBrowser.setFirstSource = false
+ fileBrowser.open()
+ }
}
Button {
@@ -237,26 +244,15 @@ Rectangle {
ignoreUnknownSignals: true
}
- FileBrowser {
- id: fileBrowser1
- anchors.fill: root
- onFolderChanged: fileBrowser2.folder = folder
- Component.onCompleted: fileSelected.connect(root.openFile1)
- }
-
- FileBrowser {
- id: fileBrowser2
- anchors.fill: root
- onFolderChanged: fileBrowser1.folder = folder
- Component.onCompleted: fileSelected.connect(root.openFile2)
- }
-
- function openFile1(path) {
- root.source1 = path
- }
-
- function openFile2(path) {
- root.source2 = path
+ FileDialog {
+ id: fileBrowser
+ property bool setFirstSource
+ onAccepted: {
+ if (setFirstSource)
+ root.source1 = currentFile
+ else
+ root.source2 = currentFile
+ }
}
ErrorDialog {
@@ -270,8 +266,7 @@ Rectangle {
// Called from main() once root properties have been set
function init() {
performanceLoader.init()
- fileBrowser1.folder = videoPath
- fileBrowser2.folder = videoPath
+ fileBrowser.currentFolder = videoPath
}
function qmlFramePainted() {
diff --git a/examples/multimedia/video/qmlvideo/qmlvideo.pro b/examples/multimedia/video/qmlvideo/qmlvideo.pro
index 7a6bbccff..b16368f7c 100644
--- a/examples/multimedia/video/qmlvideo/qmlvideo.pro
+++ b/examples/multimedia/video/qmlvideo/qmlvideo.pro
@@ -44,7 +44,6 @@ resources.files = \
qml/qmlvideo/CameraSpin.qml \
qml/qmlvideo/Content.qml \
qml/qmlvideo/ErrorDialog.qml \
- qml/qmlvideo/FileBrowser.qml \
qml/qmlvideo/Scene.qml \
qml/qmlvideo/SceneBasic.qml \
qml/qmlvideo/SceneDrag.qml \
diff --git a/examples/multimedia/video/recorder/AudioInputSelect.qml b/examples/multimedia/video/recorder/AudioInputSelect.qml
index bb79f2793..757eab98f 100644
--- a/examples/multimedia/video/recorder/AudioInputSelect.qml
+++ b/examples/multimedia/video/recorder/AudioInputSelect.qml
@@ -60,7 +60,7 @@ Row {
MediaDevices { id: mediaDevices }
- AudioInput { id: audioInput; muted: false }
+ AudioInput { id: audioInput; muted: !audioSwitch.checked }
Switch {
id: audioSwitch;
diff --git a/src/android/jar/src/org/qtproject/qt/android/multimedia/QtAndroidMediaPlayer.java b/src/android/jar/src/org/qtproject/qt/android/multimedia/QtAndroidMediaPlayer.java
index ce5dd5008..753586f21 100644
--- a/src/android/jar/src/org/qtproject/qt/android/multimedia/QtAndroidMediaPlayer.java
+++ b/src/android/jar/src/org/qtproject/qt/android/multimedia/QtAndroidMediaPlayer.java
@@ -48,6 +48,7 @@ import java.io.FileInputStream;
import android.content.Context;
import android.media.MediaPlayer;
import android.media.MediaFormat;
+import android.media.PlaybackParams;
import android.media.AudioAttributes;
import android.media.TimedText;
import android.net.Uri;
@@ -759,4 +760,28 @@ public class QtAndroidMediaPlayer
Log.w(TAG, exception);
}
}
+
+ public boolean setPlaybackRate(float rate)
+ {
+ PlaybackParams playbackParams = mMediaPlayer.getPlaybackParams();
+ playbackParams.setSpeed(rate);
+ // According to discussion under the patch from QTBUG-61115: At least with DirectShow
+ // and GStreamer, it changes both speed and pitch. (...) need to be consistent
+ if (rate != 0.0)
+ playbackParams.setPitch(Math.abs(rate));
+
+ try {
+ mMediaPlayer.setPlaybackParams(playbackParams);
+ } catch (IllegalStateException | IllegalArgumentException e) {
+ Log.e(TAG, "Cannot set playback rate " + rate + " :" + e.toString());
+ return false;
+ }
+
+ if ((mState & State.Started) == 0 && mMediaPlayer.isPlaying()) {
+ setState(State.Started);
+ startProgressWatcher();
+ }
+
+ return true;
+ }
}
diff --git a/src/android/jar/src/org/qtproject/qt/android/multimedia/QtAudioDeviceManager.java b/src/android/jar/src/org/qtproject/qt/android/multimedia/QtAudioDeviceManager.java
index c2a8a0a51..62a2554cf 100644
--- a/src/android/jar/src/org/qtproject/qt/android/multimedia/QtAudioDeviceManager.java
+++ b/src/android/jar/src/org/qtproject/qt/android/multimedia/QtAudioDeviceManager.java
@@ -49,13 +49,26 @@ import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.media.AudioDeviceInfo;
+import android.media.AudioFormat;
import android.media.AudioManager;
+import android.media.AudioRecord;
+import android.media.AudioTrack;
import android.media.MediaRecorder;
+import android.util.Log;
public class QtAudioDeviceManager
{
+ private static final String TAG = "QtAudioDeviceManager";
static private AudioManager m_audioManager = null;
static private final AudioDevicesReceiver m_audioDevicesReceiver = new AudioDevicesReceiver();
+ static private AudioRecord m_recorder = null;
+ static private AudioTrack m_streamPlayer = null;
+ static private Thread m_streamingThread = null;
+ static private boolean m_isStreaming = false;
+ static private final int m_sampleRate = 8000;
+ static private final int m_channels = AudioFormat.CHANNEL_CONFIGURATION_MONO;
+ static private final int m_audioFormat = AudioFormat.ENCODING_PCM_16BIT;
+ static private final int m_bufferSize = AudioRecord.getMinBufferSize(m_sampleRate, m_channels, m_audioFormat);
public static native void onAudioInputDevicesUpdated();
public static native void onAudioOutputDevicesUpdated();
@@ -252,7 +265,7 @@ public class QtAudioDeviceManager
setAudioOutput(AudioManager.MODE_IN_COMMUNICATION, true, false);
return true;
case AudioDeviceInfo.TYPE_BUILTIN_SPEAKER:
- setAudioOutput(AudioManager.MODE_NORMAL, false, true);
+ setAudioOutput(AudioManager.STREAM_MUSIC, false, true);
return true;
case AudioDeviceInfo.TYPE_WIRED_HEADSET:
case AudioDeviceInfo.TYPE_WIRED_HEADPHONES:
@@ -262,6 +275,8 @@ public class QtAudioDeviceManager
// It doesn't work when WIRED HEADPHONES are connected
// Earpiece has the lowest priority and setWiredHeadsetOn(boolean)
// method to force it is deprecated
+ Log.w(TAG, "Built in Earpiece may not work when "
+ + "Wired Headphones are connected");
setAudioOutput(AudioManager.MODE_IN_CALL, false, false);
return true;
default:
@@ -284,4 +299,65 @@ public class QtAudioDeviceManager
m_audioManager.setSpeakerphoneOn(speakerOn);
}
+
+ private static void streamSound()
+ {
+ byte data[] = new byte[m_bufferSize];
+ while (m_isStreaming) {
+ m_recorder.read(data, 0, m_bufferSize);
+ m_streamPlayer.play();
+ m_streamPlayer.write(data, 0, m_bufferSize);
+ m_streamPlayer.stop();
+ }
+ }
+
+ private static void startSoundStreaming(int inputId, int outputId)
+ {
+ if (m_isStreaming)
+ stopSoundStreaming();
+
+ m_recorder = new AudioRecord(MediaRecorder.AudioSource.DEFAULT, m_sampleRate, m_channels,
+ m_audioFormat, m_bufferSize);
+ m_streamPlayer = new AudioTrack(AudioManager.STREAM_MUSIC, m_sampleRate, m_channels,
+ m_audioFormat, m_bufferSize, AudioTrack.MODE_STREAM);
+
+ final AudioDeviceInfo[] devices = m_audioManager.getDevices(AudioManager.GET_DEVICES_ALL);
+ for (AudioDeviceInfo deviceInfo : devices) {
+ if (deviceInfo.getId() == outputId) {
+ m_streamPlayer.setPreferredDevice(deviceInfo);
+ } else if (deviceInfo.getId() == inputId) {
+ m_recorder.setPreferredDevice(deviceInfo);
+ }
+ }
+
+ m_recorder.startRecording();
+ m_isStreaming = true;
+
+ m_streamingThread = new Thread(new Runnable() {
+ public void run() {
+ streamSound();
+ }
+ });
+
+ m_streamingThread.start();
+ }
+
+ private static void stopSoundStreaming()
+ {
+ if (!m_isStreaming)
+ return;
+
+ m_isStreaming = false;
+ try {
+ m_streamingThread.join();
+ m_streamingThread = null;
+ } catch (InterruptedException e) {
+ e.printStackTrace();
+ }
+ m_recorder.stop();
+ m_recorder.release();
+ m_streamPlayer.release();
+ m_streamPlayer = null;
+ m_recorder = null;
+ }
}
diff --git a/src/android/jar/src/org/qtproject/qt/android/multimedia/QtCameraListener.java b/src/android/jar/src/org/qtproject/qt/android/multimedia/QtCameraListener.java
index a37544916..7f5361e77 100644
--- a/src/android/jar/src/org/qtproject/qt/android/multimedia/QtCameraListener.java
+++ b/src/android/jar/src/org/qtproject/qt/android/multimedia/QtCameraListener.java
@@ -45,11 +45,11 @@ import android.hardware.Camera.CameraInfo;
import android.graphics.ImageFormat;
import android.graphics.SurfaceTexture;
import android.util.Log;
+import java.io.File;
+import java.io.FileOutputStream;
import java.lang.Math;
import android.media.ExifInterface;
-import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
-import java.io.InputStream;
import java.lang.String;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
@@ -208,11 +208,16 @@ public class QtCameraListener implements Camera.ShutterCallback,
@Override
public void onPictureTaken(byte[] data, Camera camera)
{
+ File outputFile = null;
try {
- InputStream stream = new ByteArrayInputStream(data);
+ outputFile = File.createTempFile("pic_", ".jpg", QtMultimediaUtils.getCacheDirectory());
+ FileOutputStream out = new FileOutputStream(outputFile);
- ExifInterface exif = new ExifInterface(stream);
+ // we just want to read the exif...
+ BitmapFactory.decodeByteArray(data, 0, data.length)
+ .compress(Bitmap.CompressFormat.JPEG, 10, out);
+ ExifInterface exif = new ExifInterface(outputFile.getAbsolutePath());
int orientation = exif.getAttributeInt(ExifInterface.TAG_ORIENTATION,
ExifInterface.ORIENTATION_UNDEFINED);
@@ -260,6 +265,9 @@ public class QtCameraListener implements Camera.ShutterCallback,
} catch (Exception e) {
Log.w(TAG, "Error fixing bitmap orientation.");
e.printStackTrace();
+ } finally {
+ if (outputFile != null && outputFile.exists())
+ outputFile.delete();
}
notifyPictureCaptured(m_cameraId, data);
diff --git a/src/android/jar/src/org/qtproject/qt/android/multimedia/QtMultimediaUtils.java b/src/android/jar/src/org/qtproject/qt/android/multimedia/QtMultimediaUtils.java
index 68f63067e..cae69e7e8 100644
--- a/src/android/jar/src/org/qtproject/qt/android/multimedia/QtMultimediaUtils.java
+++ b/src/android/jar/src/org/qtproject/qt/android/multimedia/QtMultimediaUtils.java
@@ -44,10 +44,14 @@ import android.content.Context;
import android.media.MediaCodecInfo;
import android.media.MediaCodecList;
import android.view.OrientationEventListener;
+import android.webkit.MimeTypeMap;
+import android.net.Uri;
+import android.content.ContentResolver;
import android.os.Environment;
import android.media.MediaScannerConnection;
import java.lang.String;
import java.io.File;
+import android.util.Log;
public class QtMultimediaUtils
{
@@ -72,6 +76,7 @@ public class QtMultimediaUtils
static private Context m_context = null;
static private OrientationListener m_orientationListener = null;
+ private static final String QtTAG = "Qt QtMultimediaUtils";
static public void setActivity(Activity qtMainActivity, Object qtActivityDelegate)
{
@@ -138,6 +143,8 @@ public class QtMultimediaUtils
MediaScannerConnection.scanFile(m_context, new String[] { file }, null, null);
}
+ static File getCacheDirectory() { return m_context.getCacheDir(); }
+
/*
The array of codecs is in the form:
c2.qti.vp9.decoder
@@ -153,4 +160,25 @@ public class QtMultimediaUtils
codecs[i] = codecInfoArray[i].getName();
return codecs;
}
+
+ public static String getMimeType(Context context, String url)
+ {
+ Uri parsedUri = Uri.parse(url);
+ String type = null;
+
+ try {
+ String scheme = parsedUri.getScheme();
+ if (scheme != null && scheme.contains("content")) {
+ ContentResolver cR = context.getContentResolver();
+ type = cR.getType(parsedUri);
+ } else {
+ String extension = MimeTypeMap.getFileExtensionFromUrl(url);
+ if (extension != null)
+ type = MimeTypeMap.getSingleton().getMimeTypeFromExtension(extension);
+ }
+ } catch (Exception e) {
+ Log.e(QtTAG, "getMimeType(): " + e.toString());
+ }
+ return type;
+ }
}
diff --git a/src/multimedia/audio/qaudiobuffer.h b/src/multimedia/audio/qaudiobuffer.h
index dad011750..624c8aabe 100644
--- a/src/multimedia/audio/qaudiobuffer.h
+++ b/src/multimedia/audio/qaudiobuffer.h
@@ -161,7 +161,7 @@ public:
QAudioBuffer(QAudioBuffer &&other) noexcept = default;
QT_MOVE_ASSIGNMENT_OPERATOR_IMPL_VIA_PURE_SWAP(QAudioBuffer)
void swap(QAudioBuffer &other) noexcept
- { qSwap(d, other.d); }
+ { d.swap(other.d); }
bool isValid() const noexcept { return d != nullptr; };
diff --git a/src/multimedia/audio/qaudiodevice.h b/src/multimedia/audio/qaudiodevice.h
index f414af88f..f83b9375d 100644
--- a/src/multimedia/audio/qaudiodevice.h
+++ b/src/multimedia/audio/qaudiodevice.h
@@ -79,7 +79,7 @@ public:
QAudioDevice(QAudioDevice &&other) noexcept = default;
QT_MOVE_ASSIGNMENT_OPERATOR_IMPL_VIA_PURE_SWAP(QAudioDevice)
void swap(QAudioDevice &other) noexcept
- { qSwap(d, other.d); }
+ { d.swap(other.d); }
QAudioDevice& operator=(const QAudioDevice& other);
diff --git a/src/multimedia/audio/qaudiohelpers.cpp b/src/multimedia/audio/qaudiohelpers.cpp
index 1e7c4ac81..8efd33ebd 100644
--- a/src/multimedia/audio/qaudiohelpers.cpp
+++ b/src/multimedia/audio/qaudiohelpers.cpp
@@ -60,7 +60,7 @@ template<class T> struct signedVersion {};
template<> struct signedVersion<quint8>
{
using TS = qint8;
- enum {offset = 0x80};
+ static constexpr int offset = 0x80;
};
template<class T> void adjustUnsignedSamples(qreal factor, const void *src, void *dst, int samples)
diff --git a/src/multimedia/camera/qcameradevice.cpp b/src/multimedia/camera/qcameradevice.cpp
index 5aadd4b79..426f2a66e 100644
--- a/src/multimedia/camera/qcameradevice.cpp
+++ b/src/multimedia/camera/qcameradevice.cpp
@@ -236,8 +236,7 @@ bool QCameraFormat::operator==(const QCameraFormat &other) const
\snippet multimedia-snippets/camera.cpp Camera selection
You can also use QCameraDevice to get general information about a camera
- device such as description, physical position on the system, or camera sensor
- orientation.
+ device such as description and physical position on the system.
\snippet multimedia-snippets/camera.cpp Camera info
diff --git a/src/multimedia/doc/snippets/multimedia-snippets/audio.cpp b/src/multimedia/doc/snippets/multimedia-snippets/audio.cpp
index 24045adab..701786282 100644
--- a/src/multimedia/doc/snippets/multimedia-snippets/audio.cpp
+++ b/src/multimedia/doc/snippets/multimedia-snippets/audio.cpp
@@ -156,7 +156,7 @@ void AudioOutputExample::setup()
format.setChannelCount(1);
format.setSampleFormat(QAudioFormat::UInt8);
- QAudioDevice info(QAudioDevice::defaultOutputDevice());
+ QAudioDevice info(QMediaDevices::defaultAudioOutput());
if (!info.isFormatSupported(format)) {
qWarning() << "Raw audio format not supported by backend, cannot play audio.";
return;
@@ -203,9 +203,9 @@ void AudioDeviceInfo()
//! [Setting audio format]
//! [Dumping audio formats]
- const auto deviceInfos = QMediaDevices::availableDevices(QAudioDevice::Output);
- for (const QAudioDevice &deviceInfo : deviceInfos)
- qDebug() << "Device: " << deviceInfo.description();
+ const auto devices = QMediaDevices::audioOutputs();
+ for (const QAudioDevice &device : devices)
+ qDebug() << "Device: " << device.description();
//! [Dumping audio formats]
}
diff --git a/src/multimedia/doc/snippets/multimedia-snippets/camera.cpp b/src/multimedia/doc/snippets/multimedia-snippets/camera.cpp
index 7c7b5e5a0..26e3231f1 100644
--- a/src/multimedia/doc/snippets/multimedia-snippets/camera.cpp
+++ b/src/multimedia/doc/snippets/multimedia-snippets/camera.cpp
@@ -201,8 +201,6 @@ void camera_info()
qDebug() << "The camera is on the front face of the hardware system.";
else if (cameraDevice.position() == QCameraDevice::BackFace)
qDebug() << "The camera is on the back face of the hardware system.";
-
- qDebug() << "The camera sensor orientation is " << cameraDevice.orientation() << " degrees.";
//! [Camera info]
}
diff --git a/src/multimedia/platform/android/audio/qandroidaudiodecoder.cpp b/src/multimedia/platform/android/audio/qandroidaudiodecoder.cpp
index 7e3fe9a01..2a35a06eb 100644
--- a/src/multimedia/platform/android/audio/qandroidaudiodecoder.cpp
+++ b/src/multimedia/platform/android/audio/qandroidaudiodecoder.cpp
@@ -53,8 +53,7 @@
QT_BEGIN_NAMESPACE
-static const char tempFile[] = "encoded.tmp";
-static const char tempPath[] = "/storage/emulated/0/data/local/tmp/audiodecoder/";
+static const char tempFile[] = "encoded.wav";
constexpr int dequeueTimeout = 5000;
Q_LOGGING_CATEGORY(adLogger, "QAndroidAudioDecoder")
@@ -92,6 +91,20 @@ void Decoder::stop()
void Decoder::setSource(const QUrl &source)
{
+ const QJniObject path = QJniObject::callStaticObjectMethod(
+ "org/qtproject/qt/android/multimedia/QtMultimediaUtils",
+ "getMimeType",
+ "(Landroid/content/Context;Ljava/lang/String;)Ljava/lang/String;",
+ QNativeInterface::QAndroidApplication::context(),
+ QJniObject::fromString(source.path()).object());
+
+ const QString mime = path.isValid() ? path.toString() : "";
+
+ if (!mime.isEmpty() && !mime.contains("audio", Qt::CaseInsensitive)) {
+ m_formatError = tr("Cannot set source, invalid mime type for the provided source.");
+ return;
+ }
+
if (!m_extractor)
m_extractor = AMediaExtractor_new();
@@ -108,9 +121,9 @@ void Decoder::setSource(const QUrl &source)
}
if (fd < 0) {
- emit error(QAudioDecoder::ResourceError, tr("Invalid fileDescriptor for source."));
- return;
- }
+ emit error(QAudioDecoder::ResourceError, tr("Invalid fileDescriptor for source."));
+ return;
+ }
const int size = QFile(source.toString()).size();
media_status_t status = AMediaExtractor_setDataSourceFd(m_extractor, fd, 0,
size > 0 ? size : LONG_MAX);
@@ -121,7 +134,7 @@ void Decoder::setSource(const QUrl &source)
AMediaExtractor_delete(m_extractor);
m_extractor = nullptr;
}
- emit error(QAudioDecoder::ResourceError, tr("Setting source for Audio Decoder failed."));
+ m_formatError = tr("Setting source for Audio Decoder failed.");
}
}
@@ -162,6 +175,11 @@ void Decoder::createDecoder()
void Decoder::doDecode()
{
+ if (!m_formatError.isEmpty()) {
+ emit error(QAudioDecoder::FormatError, m_formatError);
+ return;
+ }
+
if (!m_extractor) {
emit error(QAudioDecoder::ResourceError, tr("Cannot decode, source not set."));
return;
@@ -190,6 +208,7 @@ void Decoder::doDecode()
AMediaExtractor_selectTrack(m_extractor, 0);
+ emit decodingChanged(true);
m_inputEOS = false;
while (!m_inputEOS) {
// handle input buffer
@@ -212,6 +231,15 @@ void Decoder::doDecode()
// handle output buffer
AMediaCodecBufferInfo info;
ssize_t idx = AMediaCodec_dequeueOutputBuffer(m_codec, &info, dequeueTimeout);
+
+ while (idx == AMEDIACODEC_INFO_OUTPUT_FORMAT_CHANGED
+ || idx == AMEDIACODEC_INFO_OUTPUT_BUFFERS_CHANGED) {
+ if (idx == AMEDIACODEC_INFO_OUTPUT_FORMAT_CHANGED)
+ qCWarning(adLogger) << "dequeueOutputBuffer() status: outputFormat changed";
+
+ idx = AMediaCodec_dequeueOutputBuffer(m_codec, &info, dequeueTimeout);
+ }
+
if (idx >= 0) {
if (info.flags & AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM)
break;
@@ -222,30 +250,22 @@ void Decoder::doDecode()
&bufferSize);
const QByteArray data((const char*)(bufferData + info.offset), info.size);
auto audioBuffer = QAudioBuffer(data, m_outputFormat, presentationTimeUs);
- if (presentationTimeUs > 0)
+ if (presentationTimeUs >= 0)
emit positionChanged(std::move(audioBuffer), presentationTimeUs / 1000);
+
AMediaCodec_releaseOutputBuffer(m_codec, idx, false);
}
+ } else if (idx == AMEDIACODEC_INFO_TRY_AGAIN_LATER) {
+ qCWarning(adLogger) << "dequeueOutputBuffer() status: try again later";
+ break;
} else {
- // The outputIndex doubles as a status return if its value is < 0
- switch (idx) {
- case AMEDIACODEC_INFO_TRY_AGAIN_LATER:
- qCWarning(adLogger) << "dequeueOutputBuffer() status: try again later";
- break;
- case AMEDIACODEC_INFO_OUTPUT_BUFFERS_CHANGED:
- qCWarning(adLogger) << "dequeueOutputBuffer() status: output buffers changed";
- break;
- case AMEDIACODEC_INFO_OUTPUT_FORMAT_CHANGED:
- m_format = AMediaCodec_getOutputFormat(m_codec);
- qCWarning(adLogger) << "dequeueOutputBuffer() status: outputFormat changed";
- break;
- }
+ qCWarning(adLogger) <<
+ "AMediaCodec_dequeueOutputBuffer() status: invalid buffer idx " << idx;
}
} else {
qCWarning(adLogger) << "dequeueInputBuffer() status: invalid buffer idx " << bufferIdx;
}
}
-
emit finished();
}
@@ -257,12 +277,16 @@ QAndroidAudioDecoder::QAndroidAudioDecoder(QAudioDecoder *parent)
connect(m_decoder, &Decoder::durationChanged, this, &QAndroidAudioDecoder::durationChanged);
connect(m_decoder, &Decoder::error, this, &QAndroidAudioDecoder::error);
connect(m_decoder, &Decoder::finished, this, &QAndroidAudioDecoder::finished);
+ connect(m_decoder, &Decoder::decodingChanged, this, &QPlatformAudioDecoder::setIsDecoding);
+ connect(this, &QAndroidAudioDecoder::setSourceUrl, m_decoder, & Decoder::setSource);
}
QAndroidAudioDecoder::~QAndroidAudioDecoder()
{
- m_decoder->thread()->exit();
- m_decoder->deleteLater();
+ m_decoder->thread()->quit();
+ m_decoder->thread()->wait();
+ delete m_threadDecoder;
+ delete m_decoder;
}
void QAndroidAudioDecoder::setSource(const QUrl &fileName)
@@ -278,7 +302,7 @@ void QAndroidAudioDecoder::setSource(const QUrl &fileName)
if (m_source != fileName) {
m_source = fileName;
- m_decoder->setSource(m_source);
+ emit setSourceUrl(m_source);
sourceChanged();
}
}
@@ -304,38 +328,45 @@ void QAndroidAudioDecoder::start()
if (isDecoding())
return;
- setIsDecoding(true);
m_position = -1;
- m_threadDecoder = new QThread(this);
- m_decoder->moveToThread(m_threadDecoder);
- m_threadDecoder->start();
+ if (m_device && (!m_device->isOpen() || !m_device->isReadable())) {
+ emit error(QAudioDecoder::ResourceError,
+ QString::fromUtf8("Unable to read from the specified device"));
+ return;
+ }
+
+ if (!m_threadDecoder) {
+ m_threadDecoder = new QThread(this);
+ m_decoder->moveToThread(m_threadDecoder);
+ m_threadDecoder->start();
+ }
+
decode();
}
void QAndroidAudioDecoder::stop()
{
- if (!isDecoding())
+ if (!isDecoding() && m_position < 0 && m_duration < 0)
return;
m_decoder->stop();
-
- if (m_threadDecoder && m_threadDecoder->isRunning())
- m_threadDecoder->exit();
-
- QMutexLocker locker(&m_buffersMutex);
- m_position = -1;
m_audioBuffer.clear();
- locker.unlock();
+ m_position = -1;
+ m_duration = -1;
setIsDecoding(false);
+
+ emit bufferAvailableChanged(false);
+ emit QPlatformAudioDecoder::positionChanged(m_position);
}
QAudioBuffer QAndroidAudioDecoder::read()
{
- QMutexLocker locker(&m_buffersMutex);
- if (m_buffersAvailable && !m_audioBuffer.isEmpty()) {
- --m_buffersAvailable;
- return m_audioBuffer.takeFirst();
+ if (!m_audioBuffer.isEmpty()) {
+ QPair<QAudioBuffer, int> buffer = m_audioBuffer.takeFirst();
+ m_position = buffer.second;
+ emit QPlatformAudioDecoder::positionChanged(buffer.second);
+ return buffer.first;
}
// no buffers available
@@ -344,38 +375,29 @@ QAudioBuffer QAndroidAudioDecoder::read()
bool QAndroidAudioDecoder::bufferAvailable() const
{
- QMutexLocker locker(&m_buffersMutex);
- return m_buffersAvailable;
+ return m_audioBuffer.size() > 0;
}
qint64 QAndroidAudioDecoder::position() const
{
- QMutexLocker locker(&m_buffersMutex);
return m_position;
}
qint64 QAndroidAudioDecoder::duration() const
{
- QMutexLocker locker(&m_buffersMutex);
return m_duration;
}
void QAndroidAudioDecoder::positionChanged(QAudioBuffer audioBuffer, qint64 position)
{
- QMutexLocker locker(&m_buffersMutex);
- m_audioBuffer.append(audioBuffer);
+ m_audioBuffer.append(QPair<QAudioBuffer, int>(audioBuffer, position));
m_position = position;
- m_buffersAvailable++;
- locker.unlock();
emit bufferReady();
- emit QPlatformAudioDecoder::positionChanged(position);
}
void QAndroidAudioDecoder::durationChanged(qint64 duration)
{
- QMutexLocker locker(&m_buffersMutex);
m_duration = duration;
- locker.unlock();
emit QPlatformAudioDecoder::durationChanged(duration);
}
@@ -387,9 +409,13 @@ void QAndroidAudioDecoder::error(const QAudioDecoder::Error err, const QString &
void QAndroidAudioDecoder::finished()
{
- stop();
+ emit bufferAvailableChanged(m_audioBuffer.size() > 0);
+
+ if (m_duration != -1)
+ emit durationChanged(m_duration);
+
// remove temp file when decoding is finished
- QFile(QString::fromUtf8(tempPath).append(QString::fromUtf8(tempFile))).remove();
+ QFile(QString(QDir::tempPath()).append(QString::fromUtf8(tempFile))).remove();
emit QPlatformAudioDecoder::finished();
}
@@ -415,22 +441,22 @@ void QAndroidAudioDecoder::decode()
bool QAndroidAudioDecoder::createTempFile()
{
- QFile file = QFile(QString::fromUtf8(tempPath).append(QString::fromUtf8(tempFile)));
- if (!QDir().mkpath(QString::fromUtf8(tempPath)) || !file.open(QIODevice::WriteOnly)) {
- emit error(QAudioDecoder::ResourceError,
- QString::fromUtf8("Error while creating or opening tmp file"));
- return false;
- }
+ QFile file = QFile(QDir::tempPath().append(QString::fromUtf8(tempFile)), this);
- QDataStream out;
- out.setDevice(&file);
- out << m_deviceBuffer;
- file.close();
+ bool success = file.open(QIODevice::QIODevice::ReadWrite);
+ if (!success)
+ emit error(QAudioDecoder::ResourceError, tr("Error while opening tmp file"));
+ success &= (file.write(m_deviceBuffer) == m_deviceBuffer.size());
+ if (!success)
+ emit error(QAudioDecoder::ResourceError, tr("Error while writing data to tmp file"));
+
+ file.close();
m_deviceBuffer.clear();
- m_decoder->setSource(file.fileName());
+ if (success)
+ m_decoder->setSource(file.fileName());
- return true;
+ return success;
}
void QAndroidAudioDecoder::readDevice() {
diff --git a/src/multimedia/platform/android/audio/qandroidaudiodecoder_p.h b/src/multimedia/platform/android/audio/qandroidaudiodecoder_p.h
index efb7cdc24..3707f4c50 100644
--- a/src/multimedia/platform/android/audio/qandroidaudiodecoder_p.h
+++ b/src/multimedia/platform/android/audio/qandroidaudiodecoder_p.h
@@ -53,7 +53,6 @@
#include "private/qplatformaudiodecoder_p.h"
#include <QtCore/qurl.h>
-#include <QtCore/qmutex.h>
#include <QThread>
#include "media/NdkMediaCodec.h"
@@ -81,6 +80,7 @@ signals:
void durationChanged(const qint64 duration);
void error(const QAudioDecoder::Error error, const QString &errorString);
void finished();
+ void decodingChanged(bool decoding);
private:
void createDecoder();
@@ -90,6 +90,7 @@ private:
AMediaFormat *m_format = nullptr;
QAudioFormat m_outputFormat;
+ QString m_formatError;
bool m_inputEOS;
};
@@ -119,6 +120,9 @@ public:
qint64 position() const override;
qint64 duration() const override;
+signals:
+ void setSourceUrl(const QUrl &source);
+
private slots:
void positionChanged(QAudioBuffer audioBuffer, qint64 position);
void durationChanged(qint64 duration);
@@ -134,18 +138,16 @@ private:
QIODevice *m_device = nullptr;
Decoder *m_decoder;
- QList<QAudioBuffer> m_audioBuffer;
+ QList<QPair<QAudioBuffer, int>> m_audioBuffer;
QUrl m_source;
- mutable QMutex m_buffersMutex;
qint64 m_position = -1;
qint64 m_duration = -1;
long long m_presentationTimeUs = 0;
- int m_buffersAvailable = 0;
QByteArray m_deviceBuffer;
- QThread *m_threadDecoder;
+ QThread *m_threadDecoder = nullptr;
};
QT_END_NAMESPACE
diff --git a/src/multimedia/platform/android/audio/qandroidaudiosink.cpp b/src/multimedia/platform/android/audio/qandroidaudiosink.cpp
index 1a0b622a3..d7a67f207 100644
--- a/src/multimedia/platform/android/audio/qandroidaudiosink.cpp
+++ b/src/multimedia/platform/android/audio/qandroidaudiosink.cpp
@@ -328,6 +328,9 @@ void QAndroidAudioSink::bufferAvailable(quint32 count, quint32 playIndex)
m_nextBuffer = (m_nextBuffer + 1) % BUFFER_COUNT;
QMetaObject::invokeMethod(this, "onBytesProcessed", Qt::QueuedConnection, Q_ARG(qint64, readSize));
+
+ if (m_audioSource->atEnd())
+ setState(QAudio::IdleState);
}
void QAndroidAudioSink::playCallback(SLPlayItf player, void *ctx, SLuint32 event)
@@ -353,6 +356,9 @@ bool QAndroidAudioSink::preparePlayer()
else
return true;
+ if (!QOpenSLESEngine::setAudioOutput(m_deviceName))
+ qWarning() << "Unable to setup Audio Output Device";
+
SLEngineItf engine = QOpenSLESEngine::instance()->slEngine();
if (!engine) {
qWarning() << "No engine";
@@ -361,7 +367,7 @@ bool QAndroidAudioSink::preparePlayer()
}
SLDataLocator_BufferQueue bufferQueueLocator = { SL_DATALOCATOR_BUFFERQUEUE, BUFFER_COUNT };
- SLDataFormat_PCM pcmFormat = QOpenSLESEngine::audioFormatToSLFormatPCM(m_format);
+ SLAndroidDataFormat_PCM_EX pcmFormat = QOpenSLESEngine::audioFormatToSLFormatPCM(m_format);
SLDataSource audioSrc = { &bufferQueueLocator, &pcmFormat };
diff --git a/src/multimedia/platform/android/audio/qandroidaudiosource.cpp b/src/multimedia/platform/android/audio/qandroidaudiosource.cpp
index c7eaf57ad..2f8d52830 100644
--- a/src/multimedia/platform/android/audio/qandroidaudiosource.cpp
+++ b/src/multimedia/platform/android/audio/qandroidaudiosource.cpp
@@ -220,7 +220,7 @@ bool QAndroidAudioSource::startRecording()
SLDataLocator_BufferQueue loc_bq = { SL_DATALOCATOR_BUFFERQUEUE, NUM_BUFFERS };
#endif
- SLDataFormat_PCM format_pcm = QOpenSLESEngine::audioFormatToSLFormatPCM(m_format);
+ SLAndroidDataFormat_PCM_EX format_pcm = QOpenSLESEngine::audioFormatToSLFormatPCM(m_format);
SLDataSink audioSnk = { &loc_bq, &format_pcm };
// create audio recorder
diff --git a/src/multimedia/platform/android/audio/qopenslesengine.cpp b/src/multimedia/platform/android/audio/qopenslesengine.cpp
index 7d207a369..6032c06cc 100644
--- a/src/multimedia/platform/android/audio/qopenslesengine.cpp
+++ b/src/multimedia/platform/android/audio/qopenslesengine.cpp
@@ -51,6 +51,8 @@
#define CheckError(message) if (result != SL_RESULT_SUCCESS) { qWarning(message); return; }
+#define SL_ANDROID_PCM_REPRESENTATION_INVALID 0
+
Q_GLOBAL_STATIC(QOpenSLESEngine, openslesEngine);
QOpenSLESEngine::QOpenSLESEngine()
@@ -81,12 +83,12 @@ QOpenSLESEngine *QOpenSLESEngine::instance()
return openslesEngine();
}
-SLDataFormat_PCM QOpenSLESEngine::audioFormatToSLFormatPCM(const QAudioFormat &format)
+SLAndroidDataFormat_PCM_EX QOpenSLESEngine::audioFormatToSLFormatPCM(const QAudioFormat &format)
{
- SLDataFormat_PCM format_pcm;
- format_pcm.formatType = SL_DATAFORMAT_PCM;
+ SLAndroidDataFormat_PCM_EX format_pcm;
+ format_pcm.formatType = SL_ANDROID_DATAFORMAT_PCM_EX;
format_pcm.numChannels = format.channelCount();
- format_pcm.samplesPerSec = format.sampleRate() * 1000;
+ format_pcm.sampleRate = format.sampleRate() * 1000;
format_pcm.bitsPerSample = format.bytesPerSample() * 8;
format_pcm.containerSize = format.bytesPerSample() * 8;
format_pcm.channelMask = (format.channelCount() == 1 ?
@@ -95,8 +97,25 @@ SLDataFormat_PCM QOpenSLESEngine::audioFormatToSLFormatPCM(const QAudioFormat &f
format_pcm.endianness = (QSysInfo::ByteOrder == QSysInfo::LittleEndian ?
SL_BYTEORDER_LITTLEENDIAN :
SL_BYTEORDER_BIGENDIAN);
- return format_pcm;
+ switch (format.sampleFormat()) {
+ case QAudioFormat::SampleFormat::UInt8:
+ format_pcm.representation = SL_ANDROID_PCM_REPRESENTATION_UNSIGNED_INT;
+ break;
+ case QAudioFormat::SampleFormat::Int16:
+ case QAudioFormat::SampleFormat::Int32:
+ format_pcm.representation = SL_ANDROID_PCM_REPRESENTATION_SIGNED_INT;
+ break;
+ case QAudioFormat::SampleFormat::Float:
+ format_pcm.representation = SL_ANDROID_PCM_REPRESENTATION_FLOAT;
+ break;
+ case QAudioFormat::SampleFormat::NSampleFormats:
+ case QAudioFormat::SampleFormat::Unknown:
+ format_pcm.representation = SL_ANDROID_PCM_REPRESENTATION_INVALID;
+ break;
+ }
+
+ return format_pcm;
}
QList<QAudioDevice> QOpenSLESEngine::availableDevices(QAudioDevice::Mode mode)
@@ -128,15 +147,34 @@ QList<QAudioDevice> QOpenSLESEngine::availableDevices(QAudioDevice::Mode mode)
return devices;
}
+bool QOpenSLESEngine::setAudioOutput(const QByteArray &deviceId)
+{
+ return QJniObject::callStaticMethod<jboolean>(
+ "org/qtproject/qt/android/multimedia/QtAudioDeviceManager",
+ "setAudioOutput",
+ "(I)Z",
+ deviceId.toInt());
+}
+
static bool hasRecordPermission()
{
const auto recordPerm = QtAndroidPrivate::checkPermission(QtAndroidPrivate::Microphone);
return recordPerm.result() == QtAndroidPrivate::Authorized;
}
+static bool requestPermissions()
+{
+ const auto recordPerm = QtAndroidPrivate::requestPermission(QtAndroidPrivate::Microphone);
+ return recordPerm.result() == QtAndroidPrivate::Authorized;
+}
+
QList<int> QOpenSLESEngine::supportedChannelCounts(QAudioDevice::Mode mode) const
{
- if (mode == QAudioDevice::Input && hasRecordPermission()) {
+ bool hasRecordPermissions = hasRecordPermission();
+ if (!hasRecordPermissions)
+ hasRecordPermissions = requestPermissions();
+
+ if (mode == QAudioDevice::Input && hasRecordPermissions) {
if (!m_checkedInputFormats)
const_cast<QOpenSLESEngine *>(this)->checkSupportedInputFormats();
return m_supportedInputChannelCounts;
@@ -302,9 +340,9 @@ void QOpenSLESEngine::checkSupportedInputFormats()
defaultFormat.sampleRate = SL_SAMPLINGRATE_44_1;
defaultFormat.bitsPerSample = SL_PCMSAMPLEFORMAT_FIXED_32;
defaultFormat.containerSize = SL_PCMSAMPLEFORMAT_FIXED_32;
- defaultFormat.representation = SL_ANDROID_PCM_REPRESENTATION_FLOAT;
defaultFormat.channelMask = SL_ANDROID_MAKE_INDEXED_CHANNEL_MASK(SL_SPEAKER_FRONT_CENTER);
defaultFormat.endianness = SL_BYTEORDER_LITTLEENDIAN;
+ defaultFormat.representation = SL_ANDROID_PCM_REPRESENTATION_SIGNED_INT;
const SLuint32 rates[13] = { SL_SAMPLINGRATE_8,
SL_SAMPLINGRATE_11_025,
diff --git a/src/multimedia/platform/android/audio/qopenslesengine_p.h b/src/multimedia/platform/android/audio/qopenslesengine_p.h
index 36e994fb2..0f9781bd5 100644
--- a/src/multimedia/platform/android/audio/qopenslesengine_p.h
+++ b/src/multimedia/platform/android/audio/qopenslesengine_p.h
@@ -72,9 +72,10 @@ public:
SLEngineItf slEngine() const { return m_engine; }
- static SLDataFormat_PCM audioFormatToSLFormatPCM(const QAudioFormat &format);
+ static SLAndroidDataFormat_PCM_EX audioFormatToSLFormatPCM(const QAudioFormat &format);
static QList<QAudioDevice> availableDevices(QAudioDevice::Mode mode);
+ static bool setAudioOutput(const QByteArray &deviceId);
QList<int> supportedChannelCounts(QAudioDevice::Mode mode) const;
QList<int> supportedSampleRates(QAudioDevice::Mode mode) const;
diff --git a/src/multimedia/platform/android/common/qandroidvideooutput.cpp b/src/multimedia/platform/android/common/qandroidvideooutput.cpp
index 6508cb554..f3a8bff2e 100644
--- a/src/multimedia/platform/android/common/qandroidvideooutput.cpp
+++ b/src/multimedia/platform/android/common/qandroidvideooutput.cpp
@@ -123,10 +123,12 @@ static QMatrix4x4 extTransformMatrix(AndroidSurfaceTexture *surfaceTexture)
quint64 AndroidTextureVideoBuffer::textureHandle(int plane) const
{
- if (plane != 0 || !rhi || !m_output->m_nativeSize.isValid())
+ if (plane != 0 || !rhi || !m_output->m_nativeSize.isValid() || !m_output->m_readbackRhi
+ || !m_output->m_surfaceTexture)
return 0;
- m_output->ensureExternalTexture(rhi);
+ m_output->m_readbackRhi->makeThreadLocalNativeContextCurrent();
+ m_output->ensureExternalTexture(m_output->m_readbackRhi);
m_output->m_surfaceTexture->updateTexImage();
m_externalMatrix = extTransformMatrix(m_output->m_surfaceTexture);
return m_output->m_externalTex->nativeTexture().object;
@@ -233,15 +235,13 @@ void QAndroidTextureVideoOutput::setVideoSize(const QSize &size)
if (m_nativeSize == size)
return;
- stop();
-
m_nativeSize = size;
}
void QAndroidTextureVideoOutput::start()
{
m_started = true;
- renderAndReadbackFrame();
+ QMetaObject::invokeMethod(this, "onFrameAvailable", Qt::QueuedConnection);
}
void QAndroidTextureVideoOutput::stop()
@@ -304,6 +304,22 @@ void QAndroidTextureVideoOutput::onFrameAvailable()
if (!(m_nativeSize.isValid() && m_sink) || !(m_started || m_renderFrame))
return;
+ const bool needsToBeInOpenGLThread =
+ !m_readbackRhi || !m_readbackTex || !m_readbackSrb || !m_readbackPs;
+
+ const bool movedToOpenGLThread = needsToBeInOpenGLThread && moveToOpenGLContextThread();
+
+ if (movedToOpenGLThread || QThread::currentThread() != m_thread) {
+ // the render thread may get blocked waiting for events, force refresh until get back to
+ // original thread.
+ QMetaObject::invokeMethod(this, "onFrameAvailable", Qt::QueuedConnection);
+
+ if (!needsToBeInOpenGLThread) {
+ parent()->moveToThread(m_thread);
+ moveToThread(m_thread);
+ }
+ }
+
m_renderFrame = false;
QRhi *rhi = m_sink ? m_sink->rhi() : nullptr;
@@ -313,11 +329,6 @@ void QAndroidTextureVideoOutput::onFrameAvailable()
: QVideoFrameFormat::Format_RGBA8888;
QVideoFrame frame(buffer, QVideoFrameFormat(m_nativeSize, format));
m_sink->platformVideoSink()->setVideoFrame(frame);
-
- QMetaObject::invokeMethod(m_surfaceTexture
- , "frameAvailable"
- , Qt::QueuedConnection
- );
}
static const float g_quad[] = {
@@ -343,12 +354,6 @@ bool QAndroidTextureVideoOutput::renderAndReadbackFrame()
if (!m_nativeSize.isValid() || !m_surfaceTexture)
return false;
- if (moveToOpenGLContextThread()) {
- // just moved to another thread, must close the execution of this method
- QMetaObject::invokeMethod(this, "onFrameAvailable", Qt::QueuedConnection);
- return false;
- }
-
if (!m_readbackRhi) {
QRhi *sinkRhi = m_sink ? m_sink->rhi() : nullptr;
if (sinkRhi && sinkRhi->backend() == QRhi::OpenGLES2) {
diff --git a/src/multimedia/platform/android/common/qandroidvideooutput_p.h b/src/multimedia/platform/android/common/qandroidvideooutput_p.h
index 473c58552..d7eedc985 100644
--- a/src/multimedia/platform/android/common/qandroidvideooutput_p.h
+++ b/src/multimedia/platform/android/common/qandroidvideooutput_p.h
@@ -82,6 +82,7 @@ public:
virtual void start() { }
virtual void stop() { }
virtual void reset() { }
+ virtual QSize getVideoSize() const { return QSize(0, 0); }
Q_SIGNALS:
void readyChanged(bool);
@@ -122,6 +123,7 @@ public:
void stop() override;
void reset() override;
void renderFrame();
+ QSize getVideoSize() const override { return m_nativeSize; }
void setSubtitle(const QString &subtitle);
private Q_SLOTS:
@@ -166,6 +168,8 @@ private:
QString m_subtitleText;
QPixmap m_subtitlePixmap;
+ QThread *m_thread = QThread::currentThread();
+
GraphicsResourceDeleter *m_graphicsDeleter = nullptr;
friend class AndroidTextureVideoBuffer;
diff --git a/src/multimedia/platform/android/mediacapture/qandroidcamera.cpp b/src/multimedia/platform/android/mediacapture/qandroidcamera.cpp
index 3bcc93564..0db9b7bbc 100644
--- a/src/multimedia/platform/android/mediacapture/qandroidcamera.cpp
+++ b/src/multimedia/platform/android/mediacapture/qandroidcamera.cpp
@@ -60,8 +60,11 @@ QAndroidCamera::~QAndroidCamera()
void QAndroidCamera::setActive(bool active)
{
- if (m_cameraSession)
+ if (m_cameraSession) {
m_cameraSession->setActive(active);
+ } else {
+ isPendingSetActive = active;
+ }
}
bool QAndroidCamera::isActive() const
@@ -135,6 +138,11 @@ void QAndroidCamera::setCaptureSession(QPlatformMediaCaptureSession *session)
connect(m_cameraSession, &QAndroidCameraSession::activeChanged, this, &QAndroidCamera::activeChanged);
connect(m_cameraSession, &QAndroidCameraSession::error, this, &QAndroidCamera::error);
connect(m_cameraSession, &QAndroidCameraSession::opened, this, &QAndroidCamera::onCameraOpened);
+
+ if (isPendingSetActive) {
+ setActive(true);
+ isPendingSetActive = false;
+ }
}
void QAndroidCamera::setFocusMode(QCamera::FocusMode mode)
diff --git a/src/multimedia/platform/android/mediacapture/qandroidcamera_p.h b/src/multimedia/platform/android/mediacapture/qandroidcamera_p.h
index e97368698..45e97a4fa 100644
--- a/src/multimedia/platform/android/mediacapture/qandroidcamera_p.h
+++ b/src/multimedia/platform/android/mediacapture/qandroidcamera_p.h
@@ -121,6 +121,7 @@ private:
bool isFlashSupported = false;
bool isFlashAutoSupported = false;
bool isTorchSupported = false;
+ bool isPendingSetActive = false;
QCameraDevice m_cameraDev;
QMap<QCamera::WhiteBalanceMode, QString> m_supportedWhiteBalanceModes;
diff --git a/src/multimedia/platform/android/mediacapture/qandroidcamerasession.cpp b/src/multimedia/platform/android/mediacapture/qandroidcamerasession.cpp
index 5df959341..19db3ec73 100644
--- a/src/multimedia/platform/android/mediacapture/qandroidcamerasession.cpp
+++ b/src/multimedia/platform/android/mediacapture/qandroidcamerasession.cpp
@@ -269,6 +269,8 @@ void QAndroidCameraSession::applyResolution(const QSize &captureSize, bool resta
// -- adjust resolution
QSize adjustedViewfinderResolution;
+ const QList<QSize> previewSizes = m_camera->getSupportedPreviewSizes();
+
const bool validCaptureSize = captureSize.width() > 0 && captureSize.height() > 0;
if (validCaptureSize
&& m_camera->getPreferredPreviewSizeForVideo().isEmpty()) {
@@ -280,8 +282,6 @@ void QAndroidCameraSession::applyResolution(const QSize &captureSize, bool resta
if (validCaptureSize)
captureAspectRatio = qreal(captureSize.width()) / qreal(captureSize.height());
- const QList<QSize> previewSizes = m_camera->getSupportedPreviewSizes();
-
if (validCaptureSize) {
// search for viewfinder resolution with the same aspect ratio
qreal minAspectDiff = 1;
@@ -326,24 +326,31 @@ void QAndroidCameraSession::applyResolution(const QSize &captureSize, bool resta
// -- Set values on camera
// fix the resolution of output based on the orientation
- QSize outputResolution = adjustedViewfinderResolution;
+ QSize cameraOutputResolution = adjustedViewfinderResolution;
+ QSize videoOutputResolution = adjustedViewfinderResolution;
+ QSize currentVideoOutputResolution = m_videoOutput ? m_videoOutput->getVideoSize() : QSize(0, 0);
const int rotation = currentCameraRotation();
- if (rotation == 90 || rotation == 270)
- outputResolution.transpose();
+ // only transpose if it's valid for the preview
+ if (rotation == 90 || rotation == 270) {
+ videoOutputResolution.transpose();
+ if (previewSizes.contains(cameraOutputResolution.transposed()))
+ cameraOutputResolution.transpose();
+ }
- if (currentViewfinderResolution != outputResolution
+ if (currentViewfinderResolution != cameraOutputResolution
+ || (m_videoOutput && currentVideoOutputResolution != videoOutputResolution)
|| currentPreviewFormat != adjustedPreviewFormat || currentFpsRange.min != adjustedFps.min
|| currentFpsRange.max != adjustedFps.max) {
if (m_videoOutput) {
- m_videoOutput->setVideoSize(outputResolution);
+ m_videoOutput->setVideoSize(videoOutputResolution);
}
// if preview is started, we have to stop it first before changing its size
if (m_previewStarted && restartPreview)
m_camera->stopPreview();
- m_camera->setPreviewSize(outputResolution);
+ m_camera->setPreviewSize(cameraOutputResolution);
m_camera->setPreviewFormat(adjustedPreviewFormat);
m_camera->setPreviewFpsRange(adjustedFps);
@@ -409,6 +416,7 @@ bool QAndroidCameraSession::startPreview()
m_camera->startPreview();
m_previewStarted = true;
+ m_videoOutput->start();
return true;
}
@@ -612,7 +620,6 @@ int QAndroidCameraSession::captureImage()
m_currentImageCaptureId = newImageCaptureId;
- applyImageSettings();
applyResolution(m_actualImageSettings.resolution());
m_camera->takePicture();
diff --git a/src/multimedia/platform/android/mediacapture/qandroidcamerasession_p.h b/src/multimedia/platform/android/mediacapture/qandroidcamerasession_p.h
index fdafe3520..2fdd9d33b 100644
--- a/src/multimedia/platform/android/mediacapture/qandroidcamerasession_p.h
+++ b/src/multimedia/platform/android/mediacapture/qandroidcamerasession_p.h
@@ -183,7 +183,7 @@ private:
QImageEncoderSettings m_requestedImageSettings;
QImageEncoderSettings m_actualImageSettings;
AndroidCamera::FpsRange m_requestedFpsRange;
- AndroidCamera::ImageFormat m_requestedPixelFromat;
+ AndroidCamera::ImageFormat m_requestedPixelFromat = AndroidCamera::ImageFormat::NV21;
bool m_readyForCapture;
int m_currentImageCaptureId;
diff --git a/src/multimedia/platform/android/mediacapture/qandroidcapturesession.cpp b/src/multimedia/platform/android/mediacapture/qandroidcapturesession.cpp
index d274fc9e9..ba296e4f7 100644
--- a/src/multimedia/platform/android/mediacapture/qandroidcapturesession.cpp
+++ b/src/multimedia/platform/android/mediacapture/qandroidcapturesession.cpp
@@ -41,6 +41,8 @@
#include "androidcamera_p.h"
#include "qandroidcamerasession_p.h"
+#include "qaudioinput.h"
+#include "qaudiooutput.h"
#include "androidmediaplayer_p.h"
#include "androidmultimediautils_p.h"
#include "qandroidmultimediautils_p.h"
@@ -74,6 +76,8 @@ QAndroidCaptureSession::~QAndroidCaptureSession()
{
stop();
m_mediaRecorder = nullptr;
+ if (m_audioInput && m_audioOutput)
+ AndroidMediaPlayer::stopSoundStreaming();
}
void QAndroidCaptureSession::setCameraSession(QAndroidCameraSession *cameraSession)
@@ -97,7 +101,23 @@ void QAndroidCaptureSession::setCameraSession(QAndroidCameraSession *cameraSessi
void QAndroidCaptureSession::setAudioInput(QPlatformAudioInput *input)
{
+ if (m_audioInput == input)
+ return;
+
+ if (m_audioInput) {
+ disconnect(m_audioInputChanged);
+ }
+
m_audioInput = input;
+
+ if (m_audioInput) {
+ m_audioInputChanged = connect(m_audioInput->q, &QAudioInput::deviceChanged, this, [this]() {
+ if (m_state == QMediaRecorder::RecordingState)
+ m_mediaRecorder->setAudioInput(m_audioInput->device.id());
+ updateStreamingState();
+ });
+ }
+ updateStreamingState();
}
void QAndroidCaptureSession::setAudioOutput(QPlatformAudioOutput *output)
@@ -105,10 +125,30 @@ void QAndroidCaptureSession::setAudioOutput(QPlatformAudioOutput *output)
if (m_audioOutput == output)
return;
+ if (m_audioOutput)
+ disconnect(m_audioOutputChanged);
+
m_audioOutput = output;
- if (m_audioOutput)
+ if (m_audioOutput) {
+ m_audioOutputChanged = connect(m_audioOutput->q, &QAudioOutput::deviceChanged, this,
+ [this] () {
+ AndroidMediaPlayer::setAudioOutput(m_audioOutput->device.id());
+ updateStreamingState();
+ });
AndroidMediaPlayer::setAudioOutput(m_audioOutput->device.id());
+ }
+ updateStreamingState();
+}
+
+void QAndroidCaptureSession::updateStreamingState()
+{
+ if (m_audioInput && m_audioOutput) {
+ AndroidMediaPlayer::startSoundStreaming(m_audioInput->device.id().toInt(),
+ m_audioOutput->device.id().toInt());
+ } else {
+ AndroidMediaPlayer::stopSoundStreaming();
+ }
}
QMediaRecorder::RecorderState QAndroidCaptureSession::state() const
@@ -160,7 +200,6 @@ void QAndroidCaptureSession::start(QMediaEncoderSettings &settings, const QUrl &
// Set audio/video sources
if (validCameraSession) {
m_cameraSession->camera()->stopPreviewSynchronous();
- m_cameraSession->applyResolution(settings.videoResolution(), false);
m_cameraSession->camera()->unlock();
m_mediaRecorder->setCamera(m_cameraSession->camera());
@@ -434,6 +473,10 @@ void QAndroidCaptureSession::onCameraOpened()
std::sort(m_supportedResolutions.begin(), m_supportedResolutions.end(), qt_sizeLessThan);
std::sort(m_supportedFramerates.begin(), m_supportedFramerates.end());
+
+ QMediaEncoderSettings defaultSettings;
+ applySettings(defaultSettings);
+ m_cameraSession->applyResolution(defaultSettings.videoResolution());
}
QAndroidCaptureSession::CaptureProfile QAndroidCaptureSession::getProfile(int id)
diff --git a/src/multimedia/platform/android/mediacapture/qandroidcapturesession_p.h b/src/multimedia/platform/android/mediacapture/qandroidcapturesession_p.h
index e91e5b210..c3b4926cb 100644
--- a/src/multimedia/platform/android/mediacapture/qandroidcapturesession_p.h
+++ b/src/multimedia/platform/android/mediacapture/qandroidcapturesession_p.h
@@ -153,6 +153,7 @@ private:
CaptureProfile getProfile(int id);
void restartViewfinder();
+ void updateStreamingState();
QAndroidMediaEncoder *m_mediaEncoder = nullptr;
std::shared_ptr<AndroidMediaRecorder> m_mediaRecorder;
@@ -179,6 +180,8 @@ private:
QList<QSize> m_supportedResolutions;
QList<qreal> m_supportedFramerates;
+ QMetaObject::Connection m_audioInputChanged;
+ QMetaObject::Connection m_audioOutputChanged;
QMetaObject::Connection m_connOpenCamera;
QMetaObject::Connection m_connActiveChangedCamera;
diff --git a/src/multimedia/platform/android/mediacapture/qandroidmediacapturesession.cpp b/src/multimedia/platform/android/mediacapture/qandroidmediacapturesession.cpp
index ddc690d77..ce7135466 100644
--- a/src/multimedia/platform/android/mediacapture/qandroidmediacapturesession.cpp
+++ b/src/multimedia/platform/android/mediacapture/qandroidmediacapturesession.cpp
@@ -83,10 +83,8 @@ void QAndroidMediaCaptureSession::setCamera(QPlatformCamera *camera)
m_cameraControl->setCaptureSession(nullptr);
m_cameraControl = control;
- if (m_cameraControl) {
+ if (m_cameraControl)
m_cameraControl->setCaptureSession(this);
- m_cameraControl->setActive(true);
- }
emit cameraChanged();
}
diff --git a/src/multimedia/platform/android/mediaplayer/qandroidmediaplayer.cpp b/src/multimedia/platform/android/mediaplayer/qandroidmediaplayer.cpp
index 01a57c298..cbcc34ca3 100644
--- a/src/multimedia/platform/android/mediaplayer/qandroidmediaplayer.cpp
+++ b/src/multimedia/platform/android/mediaplayer/qandroidmediaplayer.cpp
@@ -84,6 +84,8 @@ QAndroidMediaPlayer::QAndroidMediaPlayer(QMediaPlayer *parent)
mMediaPlayer(new AndroidMediaPlayer),
mState(AndroidMediaPlayer::Uninitialized)
{
+ // Set seekable to True by default. It changes if MEDIA_INFO_NOT_SEEKABLE is received
+ seekableChanged(true);
connect(mMediaPlayer, &AndroidMediaPlayer::bufferingChanged, this,
&QAndroidMediaPlayer::onBufferingChanged);
connect(mMediaPlayer, &AndroidMediaPlayer::info, this, &QAndroidMediaPlayer::onInfo);
@@ -249,43 +251,24 @@ void QAndroidMediaPlayer::updateAvailablePlaybackRanges()
qreal QAndroidMediaPlayer::playbackRate() const
{
- if (mHasPendingPlaybackRate ||
- (mState & (AndroidMediaPlayer::Initialized
- | AndroidMediaPlayer::Prepared
- | AndroidMediaPlayer::Started
- | AndroidMediaPlayer::Paused
- | AndroidMediaPlayer::PlaybackCompleted
- | AndroidMediaPlayer::Error)) == 0) {
- return mPendingPlaybackRate;
- }
-
- return mMediaPlayer->playbackRate();
+ return mCurrentPlaybackRate;
}
void QAndroidMediaPlayer::setPlaybackRate(qreal rate)
{
- if ((mState & (AndroidMediaPlayer::Initialized
- | AndroidMediaPlayer::Prepared
- | AndroidMediaPlayer::Started
- | AndroidMediaPlayer::Paused
- | AndroidMediaPlayer::PlaybackCompleted
- | AndroidMediaPlayer::Error)) == 0) {
- if (mPendingPlaybackRate != rate) {
- mPendingPlaybackRate = rate;
+ if (mState != AndroidMediaPlayer::Started) {
+ // If video isn't playing, changing speed rate may start it automatically
+ // It need to be postponed
+ if (mCurrentPlaybackRate != rate) {
+ mCurrentPlaybackRate = rate;
mHasPendingPlaybackRate = true;
Q_EMIT playbackRateChanged(rate);
}
return;
}
- bool succeeded = mMediaPlayer->setPlaybackRate(rate);
-
- if (mHasPendingPlaybackRate) {
- mHasPendingPlaybackRate = false;
- mPendingPlaybackRate = qreal(1.0);
- if (!succeeded)
- Q_EMIT playbackRateChanged(playbackRate());
- } else if (succeeded) {
+ if (mMediaPlayer->setPlaybackRate(rate)) {
+ mCurrentPlaybackRate = rate;
Q_EMIT playbackRateChanged(rate);
}
}
@@ -416,6 +399,14 @@ void QAndroidMediaPlayer::play()
updateAudioDevice();
+ if (mHasPendingPlaybackRate) {
+ mHasPendingPlaybackRate = false;
+ if (mMediaPlayer->setPlaybackRate(mCurrentPlaybackRate))
+ return;
+ mCurrentPlaybackRate = mMediaPlayer->playbackRate();
+ Q_EMIT playbackRateChanged(mCurrentPlaybackRate);
+ }
+
mMediaPlayer->play();
}
@@ -462,17 +453,16 @@ void QAndroidMediaPlayer::stop()
return;
}
+ if (mCurrentPlaybackRate != 1.)
+ // Playback rate need to by reapplied
+ mHasPendingPlaybackRate = true;
+
if (mVideoOutput)
mVideoOutput->stop();
mMediaPlayer->stop();
}
-bool QAndroidMediaPlayer::isSeekable() const
-{
- return true;
-}
-
void QAndroidMediaPlayer::onInfo(qint32 what, qint32 extra)
{
StateChangeNotifier notifier(this);
@@ -552,7 +542,9 @@ void QAndroidMediaPlayer::onError(qint32 what, qint32 extra)
setMediaStatus(QMediaPlayer::InvalidMedia);
break;
case AndroidMediaPlayer::MEDIA_ERROR_BAD_THINGS_ARE_GOING_TO_HAPPEN:
- errorString += QLatin1String(" (Unknown error/Insufficient resources)");
+ errorString += mMediaContent.scheme() == QLatin1String("rtsp")
+ ? QLatin1String(" (Unknown error/Insufficient resources or RTSP may not be supported)")
+ : QLatin1String(" (Unknown error/Insufficient resources)");
error = QMediaPlayer::ResourceError;
break;
}
@@ -981,8 +973,6 @@ void QAndroidMediaPlayer::flushPendingStates()
setVolume(mPendingVolume);
if (mPendingMute != -1)
setMuted((mPendingMute == 1));
- if (mHasPendingPlaybackRate)
- setPlaybackRate(mPendingPlaybackRate);
switch (newState) {
case QMediaPlayer::PlayingState:
diff --git a/src/multimedia/platform/android/mediaplayer/qandroidmediaplayer_p.h b/src/multimedia/platform/android/mediaplayer/qandroidmediaplayer_p.h
index b8e187a08..486b0ddb3 100644
--- a/src/multimedia/platform/android/mediaplayer/qandroidmediaplayer_p.h
+++ b/src/multimedia/platform/android/mediaplayer/qandroidmediaplayer_p.h
@@ -96,8 +96,6 @@ public:
void pause() override;
void stop() override;
- bool isSeekable() const override;
-
int trackCount(TrackType trackType) override;
QMediaMetaData trackMetaData(TrackType trackType, int streamNumber) override;
int activeTrack(TrackType trackType) override;
@@ -137,7 +135,7 @@ private:
int mPendingMute = -1;
bool mReloadingMedia = false;
int mActiveStateChangeNotifiers = 0;
- qreal mPendingPlaybackRate = 1.;
+ qreal mCurrentPlaybackRate = 1.;
bool mHasPendingPlaybackRate = false; // we need this because the rate can theoretically be negative
QMap<TrackType, QList<QAndroidMetaData>> mTracksMetadata;
diff --git a/src/multimedia/platform/android/mediaplayer/qandroidmetadata.cpp b/src/multimedia/platform/android/mediaplayer/qandroidmetadata.cpp
index 4765fa0ad..93d3b246d 100644
--- a/src/multimedia/platform/android/mediaplayer/qandroidmetadata.cpp
+++ b/src/multimedia/platform/android/mediaplayer/qandroidmetadata.cpp
@@ -46,6 +46,7 @@
#include <QtCore/qlist.h>
#include <QtConcurrent/qtconcurrentrun.h>
#include <QLoggingCategory>
+#include <private/qiso639_2_p.h>
QT_BEGIN_NAMESPACE
@@ -170,15 +171,7 @@ QLocale::Language getLocaleLanguage(const QString &language)
if (language == QLatin1String("und") || language == QStringLiteral("mis"))
return QLocale::AnyLanguage;
- QLocale locale(language);
-
- if (locale == QLocale::c()) {
- qCWarning(lcaMetadata) << "Could not parse language:" << language
- << ". It is not a valid Unicode CLDR language code.";
- return QLocale::AnyLanguage;
- }
-
- return locale.language();
+ return QtMultimediaPrivate::fromIso639(language.toStdString().c_str());
}
QAndroidMetaData::QAndroidMetaData(int trackType, int androidTrackType, int androidTrackNumber,
diff --git a/src/multimedia/platform/android/qandroidformatsinfo.cpp b/src/multimedia/platform/android/qandroidformatsinfo.cpp
index 584c7a122..e32479e28 100644
--- a/src/multimedia/platform/android/qandroidformatsinfo.cpp
+++ b/src/multimedia/platform/android/qandroidformatsinfo.cpp
@@ -108,7 +108,6 @@ QAndroidFormatInfo::QAndroidFormatInfo()
{
const QMediaFormat::AudioCodec aac = hasEncoder(QMediaFormat::AudioCodec::AAC);
const QMediaFormat::AudioCodec mp3 = hasEncoder(QMediaFormat::AudioCodec::MP3);
- const QMediaFormat::AudioCodec flac = hasEncoder(QMediaFormat::AudioCodec::FLAC);
const QMediaFormat::AudioCodec opus = hasEncoder(QMediaFormat::AudioCodec::Opus);
const QMediaFormat::AudioCodec vorbis = hasEncoder(QMediaFormat::AudioCodec::Vorbis);
@@ -123,11 +122,12 @@ QAndroidFormatInfo::QAndroidFormatInfo()
encoders = {
{ QMediaFormat::AAC, {aac}, {} },
{ QMediaFormat::MP3, {mp3}, {} },
- { QMediaFormat::FLAC, {flac}, {} },
- { QMediaFormat::Mpeg4Audio, {mp3, aac, flac, vorbis}, {} },
- { QMediaFormat::MPEG4, {mp3, aac, flac, vorbis}, {h264, h265, av1} },
- { QMediaFormat::Ogg, {opus, vorbis, flac}, {} },
- { QMediaFormat::Matroska, {mp3, opus, flac}, {vp8, vp9, h264, h265, av1} },
+ // FLAC encoder is not supported by the MediaRecorder used for recording
+ // { QMediaFormat::FLAC, {flac}, {} },
+ { QMediaFormat::Mpeg4Audio, {mp3, aac, vorbis}, {} },
+ { QMediaFormat::MPEG4, {mp3, aac, vorbis}, {h264, h265, av1} },
+ { QMediaFormat::Ogg, {opus, vorbis}, {} },
+ { QMediaFormat::Matroska, {mp3, opus}, {vp8, vp9, h264, h265, av1} },
// NOTE: WebM seems to be documented to supported with VP8 encoder,
// but the Camera API doesn't work with it, keep it commented for now.
// { QMediaFormat::WebM, {vorbis, opus}, {vp8, vp9} }
diff --git a/src/multimedia/platform/android/wrappers/jni/androidcamera.cpp b/src/multimedia/platform/android/wrappers/jni/androidcamera.cpp
index 80830565e..70a8e52ba 100644
--- a/src/multimedia/platform/android/wrappers/jni/androidcamera.cpp
+++ b/src/multimedia/platform/android/wrappers/jni/androidcamera.cpp
@@ -836,6 +836,12 @@ void AndroidCamera::getCameraInfo(int id, QCameraDevicePrivate *info)
default:
break;
}
+ // Add a number to allow correct access to cameras on systems with two
+ // (and more) front/back cameras
+ if (id > 1) {
+ info->id.append(QByteArray::number(id));
+ info->description.append(QString(" %1").arg(id));
+ }
}
QVideoFrameFormat::PixelFormat AndroidCamera::QtPixelFormatFromAndroidImageFormat(AndroidCamera::ImageFormat format)
diff --git a/src/multimedia/platform/android/wrappers/jni/androidmediaplayer.cpp b/src/multimedia/platform/android/wrappers/jni/androidmediaplayer.cpp
index 1378cfbeb..1bdc517d6 100644
--- a/src/multimedia/platform/android/wrappers/jni/androidmediaplayer.cpp
+++ b/src/multimedia/platform/android/wrappers/jni/androidmediaplayer.cpp
@@ -288,6 +288,21 @@ void AndroidMediaPlayer::unblockAudio()
mAudioBlocked = false;
}
+void AndroidMediaPlayer::startSoundStreaming(const int inputId, const int outputId)
+{
+ QJniObject::callStaticMethod<void>("org/qtproject/qt/android/multimedia/QtAudioDeviceManager",
+ "startSoundStreaming",
+ "(II)V",
+ inputId,
+ outputId);
+}
+
+void AndroidMediaPlayer::stopSoundStreaming()
+{
+ QJniObject::callStaticMethod<void>(
+ "org/qtproject/qt/android/multimedia/QtAudioDeviceManager", "stopSoundStreaming");
+}
+
bool AndroidMediaPlayer::setPlaybackRate(qreal rate)
{
if (QNativeInterface::QAndroidApplication::sdkVersion() < 23) {
@@ -296,34 +311,7 @@ bool AndroidMediaPlayer::setPlaybackRate(qreal rate)
return false;
}
- QJniObject player = mMediaPlayer.callObjectMethod("getMediaPlayerHandle",
- "()Landroid/media/MediaPlayer;");
- if (player.isValid()) {
- QJniObject playbackParams = player.callObjectMethod("getPlaybackParams",
- "()Landroid/media/PlaybackParams;");
- if (playbackParams.isValid()) {
- playbackParams.callObjectMethod("setSpeed", "(F)Landroid/media/PlaybackParams;",
- jfloat(rate));
- // pitch can only be > 0
- if (!qFuzzyIsNull(rate))
- playbackParams.callObjectMethod("setPitch", "(F)Landroid/media/PlaybackParams;",
- jfloat(qAbs(rate)));
-
- QJniEnvironment env;
- auto methodId = env->GetMethodID(player.objectClass(), "setPlaybackParams",
- "(Landroid/media/PlaybackParams;)V");
- env->CallVoidMethod(player.object(), methodId, playbackParams.object());
-
- if (env.checkAndClearExceptions()) {
- qWarning() << "Invalid playback rate" << rate;
- return false;
- } else {
- return true;
- }
- }
- }
-
- return false;
+ return mMediaPlayer.callMethod<jboolean>("setPlaybackRate", "(F)V", jfloat(rate));
}
void AndroidMediaPlayer::setDisplay(AndroidSurfaceTexture *surfaceTexture)
diff --git a/src/multimedia/platform/android/wrappers/jni/androidmediaplayer_p.h b/src/multimedia/platform/android/wrappers/jni/androidmediaplayer_p.h
index d5cf07f9c..83fb212c7 100644
--- a/src/multimedia/platform/android/wrappers/jni/androidmediaplayer_p.h
+++ b/src/multimedia/platform/android/wrappers/jni/androidmediaplayer_p.h
@@ -136,6 +136,8 @@ public:
void setDataSource(const QNetworkRequest &request);
void prepareAsync();
void setVolume(int volume);
+ static void startSoundStreaming(const int inputId, const int outputId);
+ static void stopSoundStreaming();
bool setPlaybackRate(qreal rate);
void setDisplay(AndroidSurfaceTexture *surfaceTexture);
static bool setAudioOutput(const QByteArray &deviceId);
diff --git a/src/multimedia/platform/darwin/camera/avfcamera.mm b/src/multimedia/platform/darwin/camera/avfcamera.mm
index a0f890a39..fe0a37822 100644
--- a/src/multimedia/platform/darwin/camera/avfcamera.mm
+++ b/src/multimedia/platform/darwin/camera/avfcamera.mm
@@ -129,6 +129,22 @@ bool qt_convert_exposure_mode(AVCaptureDevice *captureDevice, QCamera::ExposureM
#endif // defined(Q_OS_IOS)
+bool isFlashAvailable(AVCaptureDevice* captureDevice) {
+ if (@available(macOS 10.15, *)) {
+ return [captureDevice isFlashAvailable];
+ }
+
+ return true;
+}
+
+bool isTorchAvailable(AVCaptureDevice* captureDevice) {
+ if (@available(macOS 10.15, *)) {
+ return [captureDevice isTorchAvailable];
+ }
+
+ return true;
+}
+
} // Unnamed namespace.
@@ -589,14 +605,7 @@ bool AVFCamera::isFlashReady() const
if (!isFlashModeSupported(flashMode()))
return false;
-#ifdef Q_OS_IOS
- // AVCaptureDevice's docs:
- // "The flash may become unavailable if, for example,
- // the device overheats and needs to cool off."
- return [captureDevice isFlashAvailable];
-#endif
-
- return true;
+ return isFlashAvailable(captureDevice);
}
void AVFCamera::setTorchMode(QCamera::TorchMode mode)
@@ -691,37 +700,51 @@ void AVFCamera::applyFlashSettings()
if (captureDevice.hasFlash) {
auto mode = flashMode();
+
+ auto setAvFlashModeSafe = [&captureDevice](AVCaptureFlashMode avFlashMode) {
+ // Note, in some cases captureDevice.hasFlash == false even though
+ // no there're no supported flash modes.
+ if ([captureDevice isFlashModeSupported:avFlashMode])
+ captureDevice.flashMode = avFlashMode;
+ else
+ qDebugCamera() << Q_FUNC_INFO << "Attempt to setup unsupported flash mode " << avFlashMode;
+ };
+
if (mode == QCamera::FlashOff) {
- captureDevice.flashMode = AVCaptureFlashModeOff;
+ setAvFlashModeSafe(AVCaptureFlashModeOff);
} else {
-#ifdef Q_OS_IOS
- if (![captureDevice isFlashAvailable]) {
+ if (isFlashAvailable(captureDevice)) {
+ if (mode == QCamera::FlashOn)
+ setAvFlashModeSafe(AVCaptureFlashModeOn);
+ else if (mode == QCamera::FlashAuto)
+ setAvFlashModeSafe(AVCaptureFlashModeAuto);
+ } else {
qDebugCamera() << Q_FUNC_INFO << "flash is not available at the moment";
- return;
}
-#endif
- if (mode == QCamera::FlashOn)
- captureDevice.flashMode = AVCaptureFlashModeOn;
- else if (mode == QCamera::FlashAuto)
- captureDevice.flashMode = AVCaptureFlashModeAuto;
}
}
if (captureDevice.hasTorch) {
auto mode = torchMode();
+
+ auto setAvTorchModeSafe = [&captureDevice](AVCaptureTorchMode avTorchMode) {
+ if ([captureDevice isTorchModeSupported:avTorchMode])
+ captureDevice.torchMode = avTorchMode;
+ else
+ qDebugCamera() << Q_FUNC_INFO << "Attempt to setup unsupported torch mode " << avTorchMode;
+ };
+
if (mode == QCamera::TorchOff) {
- captureDevice.torchMode = AVCaptureTorchModeOff;
+ setAvTorchModeSafe(AVCaptureTorchModeOff);
} else {
-#ifdef Q_OS_IOS
- if (![captureDevice isTorchAvailable]) {
+ if (isTorchAvailable(captureDevice)) {
+ if (mode == QCamera::TorchOn)
+ setAvTorchModeSafe(AVCaptureTorchModeOn);
+ else if (mode == QCamera::TorchAuto)
+ setAvTorchModeSafe(AVCaptureTorchModeAuto);
+ } else {
qDebugCamera() << Q_FUNC_INFO << "torch is not available at the moment";
- return;
}
-#endif
- if (mode == QCamera::TorchOn)
- captureDevice.torchMode = AVCaptureTorchModeOn;
- else if (mode == QCamera::TorchAuto)
- captureDevice.torchMode = AVCaptureTorchModeAuto;
}
}
}
diff --git a/src/multimedia/platform/darwin/camera/avfmediaencoder.mm b/src/multimedia/platform/darwin/camera/avfmediaencoder.mm
index b669dd960..1cd6ce087 100644
--- a/src/multimedia/platform/darwin/camera/avfmediaencoder.mm
+++ b/src/multimedia/platform/darwin/camera/avfmediaencoder.mm
@@ -52,6 +52,7 @@
#include "private/qmediarecorder_p.h"
#include "private/qdarwinformatsinfo_p.h"
#include "private/qplatformaudiooutput_p.h"
+#include <private/qplatformaudioinput_p.h>
#include <QtCore/qmath.h>
#include <QtCore/qdebug.h>
@@ -135,7 +136,7 @@ void AVFMediaEncoder::updateDuration(qint64 duration)
durationChanged(m_duration);
}
-static NSDictionary *avfAudioSettings(const QMediaEncoderSettings &encoderSettings)
+static NSDictionary *avfAudioSettings(const QMediaEncoderSettings &encoderSettings, const QAudioFormat &format)
{
NSMutableDictionary *settings = [NSMutableDictionary dictionary];
@@ -204,6 +205,10 @@ static NSDictionary *avfAudioSettings(const QMediaEncoderSettings &encoderSettin
// Channels
int channelCount = encoderSettings.audioChannelCount();
+ // if no channel count is set in the encoder settings,
+ // set it to the device's format channel count
+ if (channelCount <= 0)
+ channelCount = format.channelCount();
bool isChannelCountSupported = false;
if (channelCount > 0) {
std::optional<QList<UInt32>> channelCounts = qt_supported_channel_counts_for_format(codecId);
@@ -221,7 +226,7 @@ static NSDictionary *avfAudioSettings(const QMediaEncoderSettings &encoderSettin
}
}
-if (isChannelCountSupported && channelCount > 2) {
+ if (isChannelCountSupported && channelCount > 2) {
AudioChannelLayout channelLayout;
memset(&channelLayout, 0, sizeof(AudioChannelLayout));
auto channelLayoutTags = qt_supported_channel_layout_tags_for_format(codecId, channelCount);
@@ -233,7 +238,7 @@ if (isChannelCountSupported && channelCount > 2) {
}
}
if (!isChannelCountSupported)
- channelCount = 2;
+ channelCount = 1;
[settings setObject:[NSNumber numberWithInt:channelCount] forKey:AVNumberOfChannelsKey];
if (codecId == kAudioFormatAppleLossless)
@@ -421,7 +426,9 @@ void AVFMediaEncoder::applySettings(QMediaEncoderSettings &settings)
AVFCameraSession *session = m_service->session();
// audio settings
- m_audioSettings = avfAudioSettings(settings);
+ const auto audioInput = m_service->audioInput();
+ const QAudioFormat audioFormat = audioInput ? audioInput->device.preferredFormat() : QAudioFormat();
+ m_audioSettings = avfAudioSettings(settings, audioFormat);
if (m_audioSettings)
[m_audioSettings retain];
diff --git a/src/multimedia/platform/darwin/mediaplayer/avfmediaplayer.mm b/src/multimedia/platform/darwin/mediaplayer/avfmediaplayer.mm
index c4c661464..95d6b922f 100644
--- a/src/multimedia/platform/darwin/mediaplayer/avfmediaplayer.mm
+++ b/src/multimedia/platform/darwin/mediaplayer/avfmediaplayer.mm
@@ -127,6 +127,9 @@ static void *AVFMediaPlayerObserverCurrentItemDurationObservationContext = &AVFM
- (void) setURL:(NSURL *)url mimeType:(NSString *)mimeType
{
+ if (!m_session)
+ return;
+
[m_mimeType release];
m_mimeType = [mimeType retain];
@@ -145,18 +148,17 @@ static void *AVFMediaPlayerObserverCurrentItemDurationObservationContext = &AVFM
__block NSArray *requestedKeys = [[NSArray arrayWithObjects:AVF_TRACKS_KEY, AVF_PLAYABLE_KEY, nil] retain];
- __block AVFMediaPlayerObserver *blockSelf = self;
- QPointer<AVFMediaPlayer> session(m_session);
+ __block AVFMediaPlayerObserver *blockSelf = [self retain];
// Tells the asset to load the values of any of the specified keys that are not already loaded.
[asset loadValuesAsynchronouslyForKeys:requestedKeys completionHandler:
^{
dispatch_async( dispatch_get_main_queue(),
^{
- if (session)
- [blockSelf prepareToPlayAsset:asset withKeys:requestedKeys];
+ [blockSelf prepareToPlayAsset:asset withKeys:requestedKeys];
[asset release];
[requestedKeys release];
+ [blockSelf release];
});
}];
}
@@ -193,6 +195,9 @@ static void *AVFMediaPlayerObserverCurrentItemDurationObservationContext = &AVFM
- (void) prepareToPlayAsset:(AVURLAsset *)asset
withKeys:(NSArray *)requestedKeys
{
+ if (!m_session)
+ return;
+
//Make sure that the value of each key has loaded successfully.
for (NSString *thisKey in requestedKeys)
{
diff --git a/src/multimedia/platform/gstreamer/audio/qgstreameraudiodecoder.cpp b/src/multimedia/platform/gstreamer/audio/qgstreameraudiodecoder.cpp
index 361bf2207..9f024d268 100644
--- a/src/multimedia/platform/gstreamer/audio/qgstreameraudiodecoder.cpp
+++ b/src/multimedia/platform/gstreamer/audio/qgstreameraudiodecoder.cpp
@@ -188,6 +188,7 @@ bool QGstreamerAudioDecoder::processBusMessage(const QGstreamerMessage &message)
break;
case GST_MESSAGE_EOS:
+ m_playbin.setState(GST_STATE_NULL);
finished();
break;
diff --git a/src/multimedia/platform/gstreamer/common/qgstreamermediaplayer.cpp b/src/multimedia/platform/gstreamer/common/qgstreamermediaplayer.cpp
index e9ac97724..e4bdc6c56 100644
--- a/src/multimedia/platform/gstreamer/common/qgstreamermediaplayer.cpp
+++ b/src/multimedia/platform/gstreamer/common/qgstreamermediaplayer.cpp
@@ -635,6 +635,39 @@ void QGstreamerMediaPlayer::uridecodebinElementAddedCallback(GstElement */*uride
}
}
+void QGstreamerMediaPlayer::sourceSetupCallback(GstElement *uridecodebin, GstElement *source, QGstreamerMediaPlayer *that)
+{
+ Q_UNUSED(uridecodebin)
+ Q_UNUSED(that)
+
+ qCDebug(qLcMediaPlayer) << "Setting up source:" << g_type_name_from_instance((GTypeInstance*)source);
+
+ if (QLatin1String("GstRTSPSrc") == QString::fromUtf8(g_type_name_from_instance((GTypeInstance*)source))) {
+ QGstElement s(source);
+ int latency{40};
+ bool ok{false};
+ int v = QString::fromLocal8Bit(qgetenv("QT_MEDIA_RTSP_LATENCY")).toUInt(&ok);
+ if (ok)
+ latency = v;
+ qCDebug(qLcMediaPlayer) << " -> setting source latency to:" << latency << "ms";
+ s.set("latency", latency);
+
+ bool drop{true};
+ v = QString::fromLocal8Bit(qgetenv("QT_MEDIA_RTSP_DROP_ON_LATENCY")).toUInt(&ok);
+ if (ok && v == 0)
+ drop = false;
+ qCDebug(qLcMediaPlayer) << " -> setting drop-on-latency to:" << drop;
+ s.set("drop-on-latency", drop);
+
+ bool retrans{false};
+ v = QString::fromLocal8Bit(qgetenv("QT_MEDIA_RTSP_DO_RETRANSMISSION")).toUInt(&ok);
+ if (ok && v not_eq 0)
+ retrans = true;
+ qCDebug(qLcMediaPlayer) << " -> setting do-retransmission to:" << retrans;
+ s.set("do-retransmission", retrans);
+ }
+}
+
void QGstreamerMediaPlayer::setMedia(const QUrl &content, QIODevice *stream)
{
qCDebug(qLcMediaPlayer) << Q_FUNC_INFO << "setting location to" << content;
@@ -691,6 +724,7 @@ void QGstreamerMediaPlayer::setMedia(const QUrl &content, QIODevice *stream)
playerPipeline.add(decoder);
// can't set post-stream-topology to true, as uridecodebin doesn't have the property. Use a hack
decoder.connect("element-added", GCallback(QGstreamerMediaPlayer::uridecodebinElementAddedCallback), this);
+ decoder.connect("source-setup", GCallback(QGstreamerMediaPlayer::sourceSetupCallback), this);
decoder.set("uri", content.toEncoded().constData());
if (m_bufferProgress != 0) {
diff --git a/src/multimedia/platform/gstreamer/common/qgstreamermediaplayer_p.h b/src/multimedia/platform/gstreamer/common/qgstreamermediaplayer_p.h
index 26c3fa362..387c3334a 100644
--- a/src/multimedia/platform/gstreamer/common/qgstreamermediaplayer_p.h
+++ b/src/multimedia/platform/gstreamer/common/qgstreamermediaplayer_p.h
@@ -143,6 +143,7 @@ private:
void decoderPadAdded(const QGstElement &src, const QGstPad &pad);
void decoderPadRemoved(const QGstElement &src, const QGstPad &pad);
static void uridecodebinElementAddedCallback(GstElement *uridecodebin, GstElement *child, QGstreamerMediaPlayer *that);
+ static void sourceSetupCallback(GstElement *uridecodebin, GstElement *source, QGstreamerMediaPlayer *that);
void parseStreamsAndMetadata();
void connectOutput(TrackSelector &ts);
void removeOutput(TrackSelector &ts);
diff --git a/src/multimedia/platform/gstreamer/common/qgstvideorenderersink.cpp b/src/multimedia/platform/gstreamer/common/qgstvideorenderersink.cpp
index 1a8107889..e1ee28be8 100644
--- a/src/multimedia/platform/gstreamer/common/qgstvideorenderersink.cpp
+++ b/src/multimedia/platform/gstreamer/common/qgstvideorenderersink.cpp
@@ -334,6 +334,7 @@ bool QGstVideoRenderer::handleEvent(QMutexLocker<QMutex> *locker)
if (m_sink && !m_flushed)
m_sink->setVideoFrame(QVideoFrame());
m_flushed = true;
+ locker->relock();
}
} else if (m_stop) {
m_stop = false;
diff --git a/src/multimedia/platform/gstreamer/mediacapture/qgstreamercamera.cpp b/src/multimedia/platform/gstreamer/mediacapture/qgstreamercamera.cpp
index 18e6862ff..cf745b1a4 100644
--- a/src/multimedia/platform/gstreamer/mediacapture/qgstreamercamera.cpp
+++ b/src/multimedia/platform/gstreamer/mediacapture/qgstreamercamera.cpp
@@ -107,9 +107,11 @@ void QGstreamerCamera::setCamera(const QCameraDevice &camera)
auto *devices = static_cast<QGstreamerMediaDevices *>(QGstreamerIntegration::instance()->devices());
auto *device = devices->videoDevice(camera.id());
gstNewCamera = gst_device_create_element(device, "camerasrc");
- QGstStructure properties = gst_device_get_properties(device);
- if (properties.name() == "v4l2deviceprovider")
- m_v4l2Device = QString::fromUtf8(properties["device.path"].toString());
+ if (QGstStructure properties = gst_device_get_properties(device); !properties.isNull()) {
+ if (properties.name() == "v4l2deviceprovider")
+ m_v4l2Device = QString::fromUtf8(properties["device.path"].toString());
+ properties.free();
+ }
}
QCameraFormat f = findBestCameraFormat(camera);
diff --git a/src/multimedia/platform/gstreamer/qgstreamermediadevices.cpp b/src/multimedia/platform/gstreamer/qgstreamermediadevices.cpp
index 6e49057cf..bd75d80c3 100644
--- a/src/multimedia/platform/gstreamer/qgstreamermediadevices.cpp
+++ b/src/multimedia/platform/gstreamer/qgstreamermediadevices.cpp
@@ -136,52 +136,53 @@ QList<QCameraDevice> QGstreamerMediaDevices::videoInputs() const
{
QList<QCameraDevice> devices;
- for (auto *d : qAsConst(m_videoSources)) {
- QGstStructure properties = gst_device_get_properties(d);
- if (!properties.isNull()) {
- QCameraDevicePrivate *info = new QCameraDevicePrivate;
- auto *desc = gst_device_get_display_name(d);
- info->description = QString::fromUtf8(desc);
- g_free(desc);
-
- info->id = properties["device.path"].toString();
+ for (auto device : m_videoSources) {
+ QCameraDevicePrivate *info = new QCameraDevicePrivate;
+ auto *desc = gst_device_get_display_name(device.gstDevice);
+ info->description = QString::fromUtf8(desc);
+ g_free(desc);
+ info->id = device.id;
+
+ if (QGstStructure properties = gst_device_get_properties(device.gstDevice); !properties.isNull()) {
auto def = properties["is-default"].toBool();
info->isDefault = def && *def;
- if (def)
- devices.prepend(info->create());
- else
- devices.append(info->create());
properties.free();
- QGstCaps caps = gst_device_get_caps(d);
- if (!caps.isNull()) {
- QList<QCameraFormat> formats;
- QSet<QSize> photoResolutions;
-
- int size = caps.size();
- for (int i = 0; i < size; ++i) {
- auto cap = caps.at(i);
-
- QSize resolution = cap.resolution();
- if (!resolution.isValid())
- continue;
-
- auto pixelFormat = cap.pixelFormat();
- auto frameRate = cap.frameRateRange();
-
- auto *f = new QCameraFormatPrivate{
- QSharedData(),
- pixelFormat,
- resolution,
- frameRate.min,
- frameRate.max
- };
- formats << f->create();
- photoResolutions.insert(resolution);
- }
- info->videoFormats = formats;
- // ### sort resolutions?
- info->photoResolutions = photoResolutions.values();
+ }
+
+ if (info->isDefault)
+ devices.prepend(info->create());
+ else
+ devices.append(info->create());
+
+ QGstCaps caps = gst_device_get_caps(device.gstDevice);
+ if (!caps.isNull()) {
+ QList<QCameraFormat> formats;
+ QSet<QSize> photoResolutions;
+
+ int size = caps.size();
+ for (int i = 0; i < size; ++i) {
+ auto cap = caps.at(i);
+
+ QSize resolution = cap.resolution();
+ if (!resolution.isValid())
+ continue;
+
+ auto pixelFormat = cap.pixelFormat();
+ auto frameRate = cap.frameRateRange();
+
+ auto *f = new QCameraFormatPrivate{
+ QSharedData(),
+ pixelFormat,
+ resolution,
+ frameRate.min,
+ frameRate.max
+ };
+ formats << f->create();
+ photoResolutions.insert(resolution);
}
+ info->videoFormats = formats;
+ // ### sort resolutions?
+ info->photoResolutions = photoResolutions.values();
}
}
return devices;
@@ -203,8 +204,9 @@ void QGstreamerMediaDevices::addDevice(GstDevice *device)
// qDebug() << "adding device:" << device << type << gst_device_get_display_name(device) << gst_structure_to_string(gst_device_get_properties(device));
gst_object_ref(device);
if (!strcmp(type, "Video/Source")) {
- m_videoSources.insert(device);
+ m_videoSources.push_back({device, QByteArray::number(m_idGenerator)});
videoInputsChanged();
+ m_idGenerator++;
} else if (!strcmp(type, "Audio/Source")) {
m_audioSources.insert(device);
audioInputsChanged();
@@ -220,7 +222,11 @@ void QGstreamerMediaDevices::addDevice(GstDevice *device)
void QGstreamerMediaDevices::removeDevice(GstDevice *device)
{
// qDebug() << "removing device:" << device << gst_device_get_display_name(device);
- if (m_videoSources.remove(device)) {
+ auto it = std::find_if(m_videoSources.begin(), m_videoSources.end(),
+ [=](const QGstDevice &a) { return a.gstDevice == device; });
+
+ if (it != m_videoSources.end()) {
+ m_videoSources.erase(it);
videoInputsChanged();
} else if (m_audioSources.remove(device)) {
audioInputsChanged();
@@ -259,7 +265,9 @@ GstDevice *QGstreamerMediaDevices::audioDevice(const QByteArray &id, QAudioDevic
GstDevice *QGstreamerMediaDevices::videoDevice(const QByteArray &id) const
{
- return getDevice(m_videoSources, "device.path", id);
+ auto it = std::find_if(m_videoSources.begin(), m_videoSources.end(),
+ [=](const QGstDevice &a) { return a.id == id; });
+ return it != m_videoSources.end() ? it->gstDevice : nullptr;
}
QT_END_NAMESPACE
diff --git a/src/multimedia/platform/gstreamer/qgstreamermediadevices_p.h b/src/multimedia/platform/gstreamer/qgstreamermediadevices_p.h
index e3f34433f..121e080e6 100644
--- a/src/multimedia/platform/gstreamer/qgstreamermediadevices_p.h
+++ b/src/multimedia/platform/gstreamer/qgstreamermediadevices_p.h
@@ -55,6 +55,7 @@
#include <gst/gst.h>
#include <qset.h>
#include <qaudiodevice.h>
+#include <vector>
QT_BEGIN_NAMESPACE
@@ -76,7 +77,14 @@ public:
GstDevice *videoDevice(const QByteArray &id) const;
private:
- QSet<GstDevice *> m_videoSources;
+ struct QGstDevice {
+ GstDevice *gstDevice = nullptr;
+ QByteArray id;
+ };
+
+ quint64 m_idGenerator = 0;
+ std::vector<QGstDevice> m_videoSources;
+
QSet<GstDevice *> m_audioSources;
QSet<GstDevice *> m_audioSinks;
};
diff --git a/src/multimedia/platform/pulseaudio/qpulseaudiosink.cpp b/src/multimedia/platform/pulseaudio/qpulseaudiosink.cpp
index 0521a5670..ae011ef0e 100644
--- a/src/multimedia/platform/pulseaudio/qpulseaudiosink.cpp
+++ b/src/multimedia/platform/pulseaudio/qpulseaudiosink.cpp
@@ -512,26 +512,29 @@ qint64 QPulseAudioSink::write(const char *data, qint64 len)
pulseEngine->lock();
- len = qMin(len, static_cast<qint64>(pa_stream_writable_size(m_stream)));
+ size_t nbytes = len;
+ void *dest = nullptr;
+
+ if (pa_stream_begin_write(m_stream, &dest, &nbytes) < 0) {
+ qWarning("QAudioSink(pulseaudio): pa_stream_begin_write, error = %s",
+ pa_strerror(pa_context_errno(pulseEngine->context())));
+ setError(QAudio::IOError);
+ return 0;
+ }
+
+ len = qMin(len, qint64(nbytes));
if (m_volume < 1.0f) {
// Don't use PulseAudio volume, as it might affect all other streams of the same category
// or even affect the system volume if flat volumes are enabled
- void *dest = nullptr;
- size_t nbytes = len;
- if (pa_stream_begin_write(m_stream, &dest, &nbytes) < 0) {
- qWarning("QAudioSink(pulseaudio): pa_stream_begin_write, error = %s",
- pa_strerror(pa_context_errno(pulseEngine->context())));
- setError(QAudio::IOError);
- return 0;
- }
-
- len = int(nbytes);
QAudioHelperInternal::qMultiplySamples(m_volume, m_format, data, dest, len);
- data = reinterpret_cast<char *>(dest);
+ } else {
+ memcpy(dest, data, len);
}
- if (pa_stream_write(m_stream, data, len, nullptr, 0, PA_SEEK_RELATIVE) < 0) {
+ data = reinterpret_cast<char *>(dest);
+
+ if ((pa_stream_write(m_stream, data, len, nullptr, 0, PA_SEEK_RELATIVE)) < 0) {
qWarning("QAudioSink(pulseaudio): pa_stream_write, error = %s",
pa_strerror(pa_context_errno(pulseEngine->context())));
setError(QAudio::IOError);
@@ -672,18 +675,15 @@ qint64 PulseOutputPrivate::readData(char *data, qint64 len)
qint64 PulseOutputPrivate::writeData(const char *data, qint64 len)
{
- int retry = 0;
qint64 written = 0;
if ((m_audioDevice->m_deviceState == QAudio::ActiveState
|| m_audioDevice->m_deviceState == QAudio::IdleState)) {
- while(written < len) {
+ while (written < len) {
int chunk = m_audioDevice->write(data+written, (len-written));
if (chunk <= 0)
- retry++;
- written+=chunk;
- if (retry > 10)
return written;
+ written += chunk;
}
}
diff --git a/src/multimedia/platform/pulseaudio/qpulseaudiosource.cpp b/src/multimedia/platform/pulseaudio/qpulseaudiosource.cpp
index d20929078..19e82c092 100644
--- a/src/multimedia/platform/pulseaudio/qpulseaudiosource.cpp
+++ b/src/multimedia/platform/pulseaudio/qpulseaudiosource.cpp
@@ -327,6 +327,12 @@ bool QPulseAudioSource::open()
return false;
}
+// auto *ss = pa_stream_get_sample_spec(m_stream);
+// qDebug() << "connected stream:";
+// qDebug() << " channels" << ss->channels << spec.channels;
+// qDebug() << " format" << ss->format << spec.format;
+// qDebug() << " rate" << ss->rate << spec.rate;
+
while (pa_stream_get_state(m_stream) != PA_STREAM_READY)
pa_threaded_mainloop_wait(pulseEngine->mainloop());
@@ -400,16 +406,20 @@ qsizetype QPulseAudioSource::bytesReady() const
qint64 QPulseAudioSource::read(char *data, qint64 len)
{
+ Q_ASSERT(data != nullptr || len == 0);
+
m_bytesAvailable = checkBytesReady();
setError(QAudio::NoError);
- setState(QAudio::ActiveState);
+ if (state() == QAudio::IdleState)
+ setState(QAudio::ActiveState);
int readBytes = 0;
if (!m_pullMode && !m_tempBuffer.isEmpty()) {
readBytes = qMin(static_cast<int>(len), m_tempBuffer.size());
- memcpy(data, m_tempBuffer.constData(), readBytes);
+ if (readBytes)
+ memcpy(data, m_tempBuffer.constData(), readBytes);
m_totalTimeValue += readBytes;
if (readBytes < m_tempBuffer.size()) {
@@ -495,9 +505,10 @@ qint64 QPulseAudioSource::read(char *data, qint64 len)
void QPulseAudioSource::applyVolume(const void *src, void *dest, int len)
{
+ Q_ASSERT((src && dest) || len == 0);
if (m_volume < 1.f)
QAudioHelperInternal::qMultiplySamples(m_volume, m_format, src, dest, len);
- else
+ else if (len)
memcpy(dest, src, len);
}
@@ -549,8 +560,9 @@ qint64 QPulseAudioSource::processedUSecs() const
{
pa_usec_t usecs = 0;
int result = pa_stream_get_time(m_stream, &usecs);
- if (result != 0)
- qWarning() << "no timing info from pulse";
+ Q_UNUSED(result);
+// if (result != 0)
+// qWarning() << "no timing info from pulse";
return usecs;
}
diff --git a/src/multimedia/platform/qnx/common/windowgrabber.cpp b/src/multimedia/platform/qnx/common/windowgrabber.cpp
index e4c8c926d..77588a7a8 100644
--- a/src/multimedia/platform/qnx/common/windowgrabber.cpp
+++ b/src/multimedia/platform/qnx/common/windowgrabber.cpp
@@ -393,8 +393,12 @@ GLuint
WindowGrabberImage::getTexture(screen_window_t window, const QSize &size)
{
if (size != m_size) {
- if (!m_glTexture)
- glGenTextures(1, &m_glTexture);
+ // create a brand new texture to be the KHR image sibling, as
+ // previously used textures cannot be reused with new KHR image
+ // sources - note that glDeleteTextures handles nullptr gracefully
+ glDeleteTextures(1, &m_glTexture);
+ glGenTextures(1, &m_glTexture);
+
glBindTexture(GL_TEXTURE_2D, m_glTexture);
if (m_eglImage) {
glEGLImageTargetTexture2DOES(GL_TEXTURE_2D, 0);
diff --git a/src/multimedia/platform/windows/audio/qwindowsaudiosink.cpp b/src/multimedia/platform/windows/audio/qwindowsaudiosink.cpp
index 046a91ab0..39625ed1b 100644
--- a/src/multimedia/platform/windows/audio/qwindowsaudiosink.cpp
+++ b/src/multimedia/platform/windows/audio/qwindowsaudiosink.cpp
@@ -200,6 +200,9 @@ void QWindowsAudioSink::start(QIODevice* device)
if (deviceState != QAudio::StoppedState)
close();
+ if (device == nullptr)
+ return;
+
if (!open()) {
errorState = QAudio::OpenError;
emit errorChanged(QAudio::OpenError);
@@ -210,7 +213,7 @@ void QWindowsAudioSink::start(QIODevice* device)
m_timer.disconnect();
m_timer.callOnTimeout(this, &QWindowsAudioSink::pullSource);
- m_timer.start(0);
+ pullSource();
}
qint64 QWindowsAudioSink::push(const char *data, qint64 len)
diff --git a/src/multimedia/platform/windows/audio/qwindowsaudiosource.cpp b/src/multimedia/platform/windows/audio/qwindowsaudiosource.cpp
index d86b27157..8523f9982 100644
--- a/src/multimedia/platform/windows/audio/qwindowsaudiosource.cpp
+++ b/src/multimedia/platform/windows/audio/qwindowsaudiosource.cpp
@@ -305,14 +305,11 @@ bool QWindowsAudioSource::open()
if (!QWindowsAudioUtils::formatToWaveFormatExtensible(settings, wfx)) {
qWarning("QAudioSource: open error, invalid format.");
} else if (buffer_size == 0) {
- buffer_size
- = (settings.sampleRate()
- * settings.channelCount()
- * settings.bytesPerSample()
- + 39) / 5;
- period_size = buffer_size / 5;
+ period_size = settings.sampleRate() / 25 * settings.bytesPerFrame();
+ buffer_size = period_size * 5;
} else {
- period_size = buffer_size / 5;
+ if (int bpf = settings.bytesPerFrame())
+ period_size = bpf * (buffer_size / 5 / bpf);
}
if (period_size == 0) {
@@ -645,7 +642,7 @@ bool QWindowsAudioSource::deviceReady()
if(pullMode) {
// reads some audio data and writes it to QIODevice
- read(0, buffer_size);
+ read(0, period_size * (buffer_size / period_size));
} else {
// emits readyRead() so user will call read() on QIODevice to get some audio data
InputPrivate* a = qobject_cast<InputPrivate*>(audioSource);
diff --git a/src/multimedia/platform/windows/common/qwindowsmfdefs.cpp b/src/multimedia/platform/windows/common/qwindowsmfdefs.cpp
index 97eae9743..f62ef8ee0 100644
--- a/src/multimedia/platform/windows/common/qwindowsmfdefs.cpp
+++ b/src/multimedia/platform/windows/common/qwindowsmfdefs.cpp
@@ -55,6 +55,7 @@ const GUID QMM_MF_SD_STREAM_NAME = {0x4f1b099d, 0xd314, 0x41e5, {0xa7, 0x81, 0x7
const GUID QMM_MF_SD_LANGUAGE = {0xaf2180, 0xbdc2, 0x423c, {0xab, 0xca, 0xf5, 0x3, 0x59, 0x3b, 0xc1, 0x21}};
const GUID QMM_KSCATEGORY_VIDEO_CAMERA = {0xe5323777, 0xf976, 0x4f5b, {0x9b, 0x55, 0xb9, 0x46, 0x99, 0xc4, 0x6e, 0x44}};
+const GUID QMM_KSCATEGORY_SENSOR_CAMERA = {0x24e552d7, 0x6523, 0x47f7, {0xa6, 0x47, 0xd3, 0x46, 0x5b, 0xf1, 0xf5, 0xca}};
const GUID QMM_MR_POLICY_VOLUME_SERVICE = {0x1abaa2ac, 0x9d3b, 0x47c6, {0xab, 0x48, 0xc5, 0x95, 0x6, 0xde, 0x78, 0x4d}};
diff --git a/src/multimedia/platform/windows/common/qwindowsmfdefs_p.h b/src/multimedia/platform/windows/common/qwindowsmfdefs_p.h
index 173c8f8f0..87ef2272e 100644
--- a/src/multimedia/platform/windows/common/qwindowsmfdefs_p.h
+++ b/src/multimedia/platform/windows/common/qwindowsmfdefs_p.h
@@ -73,6 +73,7 @@ extern const GUID QMM_MF_SD_STREAM_NAME;
extern const GUID QMM_MF_SD_LANGUAGE;
extern const GUID QMM_KSCATEGORY_VIDEO_CAMERA;
+extern const GUID QMM_KSCATEGORY_SENSOR_CAMERA;
extern const GUID QMM_MR_POLICY_VOLUME_SERVICE;
@@ -82,6 +83,7 @@ extern "C" HRESULT WINAPI MFCreateDeviceSource(IMFAttributes *pAttributes, IMFMe
#define QMM_MFSESSION_GETFULLTOPOLOGY_CURRENT 1
#define QMM_PRESENTATION_CURRENT_POSITION 0x7fffffffffffffff
+#define QMM_WININET_E_CANNOT_CONNECT ((HRESULT)0x80072EFDL)
#ifndef __IMFVideoProcessor_INTERFACE_DEFINED__
#define __IMFVideoProcessor_INTERFACE_DEFINED__
diff --git a/src/multimedia/platform/windows/evr/evrcustompresenter.cpp b/src/multimedia/platform/windows/evr/evrcustompresenter.cpp
index e8b99a09f..d05269976 100644
--- a/src/multimedia/platform/windows/evr/evrcustompresenter.cpp
+++ b/src/multimedia/platform/windows/evr/evrcustompresenter.cpp
@@ -1052,6 +1052,13 @@ void EVRCustomPresenter::setSink(QVideoSink *sink)
supportedFormatsChanged();
}
+void EVRCustomPresenter::setCropRect(QRect cropRect)
+{
+ m_mutex.lock();
+ m_cropRect = cropRect;
+ m_mutex.unlock();
+}
+
HRESULT EVRCustomPresenter::configureMixer(IMFTransform *mixer)
{
// Set the zoom rectangle (ie, the source clipping rectangle).
@@ -1342,13 +1349,30 @@ HRESULT EVRCustomPresenter::createOptimalVideoType(IMFMediaType *proposedType, I
hr = proposedType->GetUINT64(MF_MT_FRAME_SIZE, &size);
width = int(HI32(size));
height = int(LO32(size));
- rcOutput.left = 0;
- rcOutput.top = 0;
- rcOutput.right = width;
- rcOutput.bottom = height;
+
+ if (m_cropRect.isValid()) {
+ rcOutput.left = m_cropRect.x();
+ rcOutput.top = m_cropRect.y();
+ rcOutput.right = m_cropRect.x() + m_cropRect.width();
+ rcOutput.bottom = m_cropRect.y() + m_cropRect.height();
+
+ m_sourceRect.left = float(m_cropRect.x()) / width;
+ m_sourceRect.top = float(m_cropRect.y()) / height;
+ m_sourceRect.right = float(m_cropRect.x() + m_cropRect.width()) / width;
+ m_sourceRect.bottom = float(m_cropRect.y() + m_cropRect.height()) / height;
+
+ if (m_mixer)
+ configureMixer(m_mixer);
+ } else {
+ rcOutput.left = 0;
+ rcOutput.top = 0;
+ rcOutput.right = width;
+ rcOutput.bottom = height;
+ }
// Set the geometric aperture, and disable pan/scan.
- displayArea = qt_evr_makeMFArea(0, 0, rcOutput.right, rcOutput.bottom);
+ displayArea = qt_evr_makeMFArea(0, 0, rcOutput.right - rcOutput.left,
+ rcOutput.bottom - rcOutput.top);
hr = mtOptimal->SetUINT32(MF_MT_PAN_SCAN_ENABLED, FALSE);
if (FAILED(hr))
@@ -1414,7 +1438,7 @@ HRESULT EVRCustomPresenter::setMediaType(IMFMediaType *mediaType)
// Initialize the presenter engine with the new media type.
// The presenter engine allocates the samples.
- hr = m_presentEngine->createVideoSamples(mediaType, sampleQueue);
+ hr = m_presentEngine->createVideoSamples(mediaType, sampleQueue, m_cropRect.size());
if (FAILED(hr))
goto done;
diff --git a/src/multimedia/platform/windows/evr/evrcustompresenter_p.h b/src/multimedia/platform/windows/evr/evrcustompresenter_p.h
index 1bf443efa..ec8f414dc 100644
--- a/src/multimedia/platform/windows/evr/evrcustompresenter_p.h
+++ b/src/multimedia/platform/windows/evr/evrcustompresenter_p.h
@@ -55,6 +55,7 @@
#include <qmutex.h>
#include <qqueue.h>
#include <qevent.h>
+#include <qrect.h>
#include <qvideoframeformat.h>
#include <qvideosink.h>
@@ -273,6 +274,7 @@ public:
void supportedFormatsChanged();
void setSink(QVideoSink *sink);
+ void setCropRect(QRect cropRect);
void startSurface();
void stopSurface();
@@ -384,6 +386,7 @@ private:
QVideoSink *m_videoSink;
bool m_canRenderToSurface;
qint64 m_positionOffset; // Seek position in microseconds.
+ QRect m_cropRect; // Video crop rectangle
};
bool qt_evr_setCustomPresenter(IUnknown *evr, EVRCustomPresenter *presenter);
diff --git a/src/multimedia/platform/windows/evr/evrd3dpresentengine.cpp b/src/multimedia/platform/windows/evr/evrd3dpresentengine.cpp
index 5a975f09e..65a9e684e 100644
--- a/src/multimedia/platform/windows/evr/evrd3dpresentengine.cpp
+++ b/src/multimedia/platform/windows/evr/evrd3dpresentengine.cpp
@@ -511,7 +511,7 @@ HRESULT D3DPresentEngine::checkFormat(D3DFORMAT format)
return hr;
}
-HRESULT D3DPresentEngine::createVideoSamples(IMFMediaType *format, QList<IMFSample*> &videoSampleQueue)
+HRESULT D3DPresentEngine::createVideoSamples(IMFMediaType *format, QList<IMFSample*> &videoSampleQueue, QSize frameSize)
{
if (!format || !m_device)
return MF_E_UNEXPECTED;
@@ -524,6 +524,11 @@ HRESULT D3DPresentEngine::createVideoSamples(IMFMediaType *format, QList<IMFSamp
if (FAILED(hr))
return hr;
+ if (frameSize.isValid() && !frameSize.isEmpty()) {
+ width = frameSize.width();
+ height = frameSize.height();
+ }
+
DWORD d3dFormat = 0;
hr = qt_evr_getFourCC(format, &d3dFormat);
if (FAILED(hr))
diff --git a/src/multimedia/platform/windows/evr/evrd3dpresentengine_p.h b/src/multimedia/platform/windows/evr/evrd3dpresentengine_p.h
index f68ae50f1..de0245cb1 100644
--- a/src/multimedia/platform/windows/evr/evrd3dpresentengine_p.h
+++ b/src/multimedia/platform/windows/evr/evrd3dpresentengine_p.h
@@ -52,6 +52,7 @@
//
#include <QMutex>
+#include <QSize>
#include <QVideoFrameFormat>
#include <private/qwindowsiupointer_p.h>
@@ -141,7 +142,7 @@ public:
HRESULT checkFormat(D3DFORMAT format);
UINT refreshRate() const { return m_displayMode.RefreshRate; }
- HRESULT createVideoSamples(IMFMediaType *format, QList<IMFSample*>& videoSampleQueue);
+ HRESULT createVideoSamples(IMFMediaType *format, QList<IMFSample*>& videoSampleQueue, QSize frameSize);
QVideoFrameFormat videoSurfaceFormat() const { return m_surfaceFormat; }
QVideoFrame makeVideoFrame(IMFSample* sample);
diff --git a/src/multimedia/platform/windows/mediacapture/qwindowsmediadevicereader.cpp b/src/multimedia/platform/windows/mediacapture/qwindowsmediadevicereader.cpp
index 16414cca5..6f11f6a54 100644
--- a/src/multimedia/platform/windows/mediacapture/qwindowsmediadevicereader.cpp
+++ b/src/multimedia/platform/windows/mediacapture/qwindowsmediadevicereader.cpp
@@ -246,7 +246,7 @@ HRESULT QWindowsMediaDeviceReader::prepareVideoStream(DWORD mediaTypeIndex)
// and the stride, which we need to convert the frame later
hr = MFGetStrideForBitmapInfoHeader(subtype.Data1, m_frameWidth, &m_stride);
if (SUCCEEDED(hr)) {
-
+ m_stride = qAbs(m_stride);
UINT32 frameRateNum, frameRateDen;
hr = MFGetAttributeRatio(m_videoMediaType, MF_MT_FRAME_RATE, &frameRateNum, &frameRateDen);
if (SUCCEEDED(hr)) {
diff --git a/src/multimedia/platform/windows/mediacapture/qwindowsmediaencoder.cpp b/src/multimedia/platform/windows/mediacapture/qwindowsmediaencoder.cpp
index ee2ba3c6a..8f5daf819 100644
--- a/src/multimedia/platform/windows/mediacapture/qwindowsmediaencoder.cpp
+++ b/src/multimedia/platform/windows/mediacapture/qwindowsmediaencoder.cpp
@@ -81,7 +81,7 @@ void QWindowsMediaEncoder::record(QMediaEncoderSettings &settings)
if (m_state != QMediaRecorder::StoppedState)
return;
- m_sessionWasActive = m_mediaDeviceSession->isActive();
+ m_sessionWasActive = m_mediaDeviceSession->isActive() || m_mediaDeviceSession->isActivating();
if (!m_sessionWasActive) {
@@ -140,10 +140,11 @@ void QWindowsMediaEncoder::resume()
void QWindowsMediaEncoder::stop()
{
- if (m_mediaDeviceSession && m_state != QMediaRecorder::StoppedState)
+ if (m_mediaDeviceSession && m_state != QMediaRecorder::StoppedState) {
m_mediaDeviceSession->stopRecording();
- if (!m_sessionWasActive)
- m_mediaDeviceSession->setActive(false);
+ if (!m_sessionWasActive)
+ m_mediaDeviceSession->setActive(false);
+ }
}
diff --git a/src/multimedia/platform/windows/player/mfplayersession.cpp b/src/multimedia/platform/windows/player/mfplayersession.cpp
index 3c4fe9929..ebdbff696 100644
--- a/src/multimedia/platform/windows/player/mfplayersession.cpp
+++ b/src/multimedia/platform/windows/player/mfplayersession.cpp
@@ -59,6 +59,7 @@
#include "mfplayersession_p.h"
#include <mferror.h>
#include <nserror.h>
+#include <winerror.h>
#include "private/sourceresolver_p.h"
#include "samplegrabber_p.h"
#include "mftvideo_p.h"
@@ -110,7 +111,7 @@ MFPlayerSession::MFPlayerSession(MFPlayerControl *playerControl)
m_request.rate = 1.0f;
m_audioSampleGrabber = new AudioSampleGrabberCallback;
- m_videoRendererControl = new MFVideoRendererControl;
+ m_videoRendererControl = new MFVideoRendererControl(this);
}
void MFPlayerSession::timeout()
@@ -152,7 +153,7 @@ void MFPlayerSession::close()
m_closing = true;
hr = m_session->Close();
if (SUCCEEDED(hr)) {
- DWORD dwWaitResult = WaitForSingleObject(m_hCloseEvent, 100);
+ DWORD dwWaitResult = WaitForSingleObject(m_hCloseEvent, 2000);
if (dwWaitResult == WAIT_TIMEOUT) {
qWarning() << "session close time out!";
}
@@ -187,6 +188,7 @@ void MFPlayerSession::close()
CloseHandle(m_hCloseEvent);
m_hCloseEvent = 0;
m_lastPosition = -1;
+ m_position = 0;
}
MFPlayerSession::~MFPlayerSession()
@@ -216,7 +218,8 @@ void MFPlayerSession::load(const QUrl &url, QIODevice *stream)
createSession();
changeStatus(QMediaPlayer::LoadingMedia);
m_sourceResolver->load(url, stream);
- m_updateRoutingOnStart = true;
+ if (url.isLocalFile())
+ m_updateRoutingOnStart = true;
}
positionChanged(position());
}
@@ -240,7 +243,17 @@ void MFPlayerSession::handleSourceError(long hr)
errorCode = QMediaPlayer::FormatError;
errorString = tr("Unsupported media type.");
break;
+ case MF_E_UNSUPPORTED_SCHEME:
+ errorCode = QMediaPlayer::ResourceError;
+ errorString = tr("Unsupported URL scheme.");
+ break;
+ case QMM_WININET_E_CANNOT_CONNECT:
+ errorCode = QMediaPlayer::NetworkError;
+ errorString = tr("A connection with the server could not be established.");
+ break;
default:
+ qWarning() << "handleSourceError:"
+ << Qt::showbase << Qt::hex << Qt::uppercasedigits << static_cast<quint32>(hr);
errorString = tr("Failed to load source.");
break;
}
@@ -283,9 +296,10 @@ void MFPlayerSession::handleMediaSourceReady()
bool MFPlayerSession::getStreamInfo(IMFStreamDescriptor *stream,
MFPlayerSession::MediaType *type,
QString *name,
- QString *language) const
+ QString *language,
+ GUID *format) const
{
- if (!stream || !type || !name || !language)
+ if (!stream || !type || !name || !language || !format)
return false;
*type = Unknown;
@@ -319,6 +333,13 @@ bool MFPlayerSession::getStreamInfo(IMFStreamDescriptor *stream,
else if (guidMajorType == MFMediaType_Video)
*type = Video;
}
+
+ IMFMediaType *mediaType = nullptr;
+ if (SUCCEEDED(typeHandler->GetCurrentMediaType(&mediaType))) {
+ mediaType->GetGUID(MF_MT_SUBTYPE, format);
+ mediaType->Release();
+ }
+
typeHandler->Release();
}
@@ -359,8 +380,9 @@ void MFPlayerSession::setupPlaybackTopology(IMFMediaSource *source, IMFPresentat
MediaType mediaType = Unknown;
QString streamName;
QString streamLanguage;
+ GUID format = GUID_NULL;
- if (getStreamInfo(streamDesc, &mediaType, &streamName, &streamLanguage)) {
+ if (getStreamInfo(streamDesc, &mediaType, &streamName, &streamLanguage, &format)) {
QPlatformMediaPlayer::TrackType trackType = (mediaType == Audio) ?
QPlatformMediaPlayer::AudioStream : QPlatformMediaPlayer::VideoStream;
@@ -374,6 +396,7 @@ void MFPlayerSession::setupPlaybackTopology(IMFMediaSource *source, IMFPresentat
m_trackInfo[trackType].metaData.append(metaData);
m_trackInfo[trackType].nativeIndexes.append(i);
+ m_trackInfo[trackType].format = format;
if (((m_mediaTypes & mediaType) == 0) && selected) { // Check if this type isn't already added
IMFTopologyNode *sourceNode = addSourceNode(topology, source, sourcePD, streamDesc);
@@ -481,20 +504,15 @@ IMFTopologyNode* MFPlayerSession::addOutputNode(MediaType mediaType, IMFTopology
}
auto id = m_audioOutput->device.id();
- if (!id.isEmpty()) {
- QString s = QString::fromUtf8(id);
- hr = activate->SetString(MF_AUDIO_RENDERER_ATTRIBUTE_ENDPOINT_ID, (LPCWSTR)s.utf16());
- } else {
- //This is the default one that has been inserted in updateEndpoints(),
- //so give the activate a hint that we want to use the device for multimedia playback
- //then the media foundation will choose an appropriate one.
-
- //from MSDN:
- //The ERole enumeration defines constants that indicate the role that the system has assigned to an audio endpoint device.
- //eMultimedia: Music, movies, narration, and live music recording.
- hr = activate->SetUINT32(MF_AUDIO_RENDERER_ATTRIBUTE_ENDPOINT_ROLE, eMultimedia);
+ if (id.isEmpty()) {
+ qWarning() << "No audio output";
+ activate->Release();
+ node->Release();
+ return NULL;
}
+ QString s = QString::fromUtf8(id);
+ hr = activate->SetString(MF_AUDIO_RENDERER_ATTRIBUTE_ENDPOINT_ID, (LPCWSTR)s.utf16());
if (FAILED(hr)) {
qWarning() << "Failed to set attribute for audio device" << m_audioOutput->device.description();
activate->Release();
@@ -504,6 +522,12 @@ IMFTopologyNode* MFPlayerSession::addOutputNode(MediaType mediaType, IMFTopology
}
} else if (mediaType == Video) {
activate = m_videoRendererControl->createActivate();
+
+ QSize resolution = m_metaData.value(QMediaMetaData::Resolution).toSize();
+
+ if (resolution.isValid())
+ m_videoRendererControl->setCropRect(QRect(QPoint(), resolution));
+
} else {
// Unknown stream type.
emit error(QMediaPlayer::FormatError, tr("Unknown stream type."), false);
@@ -1569,8 +1593,13 @@ ULONG MFPlayerSession::AddRef(void)
ULONG MFPlayerSession::Release(void)
{
LONG cRef = InterlockedDecrement(&m_cRef);
- if (cRef == 0)
+ if (cRef == 0) {
this->deleteLater();
+
+ // In rare cases the session has queued events to be run between deleteLater and deleting,
+ // so we set the parent control to nullptr in order to prevent crashes in the cases.
+ m_playerControl = nullptr;
+ }
return cRef;
}
@@ -1654,8 +1683,25 @@ void MFPlayerSession::handleSessionEvent(IMFMediaEvent *sessionEvent)
break;
}
changeStatus(QMediaPlayer::InvalidMedia);
- qWarning() << "handleSessionEvent: serious error = " << hrStatus;
- emit error(QMediaPlayer::ResourceError, tr("Media session serious error."), true);
+ qWarning() << "handleSessionEvent: serious error = "
+ << Qt::showbase << Qt::hex << Qt::uppercasedigits << static_cast<quint32>(hrStatus);
+ switch (hrStatus) {
+ case MF_E_NET_READ:
+ emit error(QMediaPlayer::NetworkError, tr("Error reading from the network."), true);
+ break;
+ case MF_E_NET_WRITE:
+ emit error(QMediaPlayer::NetworkError, tr("Error writing to the network."), true);
+ break;
+ case NS_E_FIREWALL:
+ emit error(QMediaPlayer::NetworkError, tr("Network packets might be blocked by a firewall."), true);
+ break;
+ case MF_E_MEDIAPROC_WRONGSTATE:
+ emit error(QMediaPlayer::ResourceError, tr("Media session state error."), true);
+ break;
+ default:
+ emit error(QMediaPlayer::ResourceError, tr("Media session serious error."), true);
+ break;
+ }
break;
case MESessionRateChanged:
// If the rate change succeeded, we've already got the rate
@@ -1793,8 +1839,7 @@ void MFPlayerSession::handleSessionEvent(IMFMediaEvent *sessionEvent)
if (SUCCEEDED(MFGetService(m_session, MF_RATE_CONTROL_SERVICE, IID_PPV_ARGS(&m_rateControl)))) {
if (SUCCEEDED(MFGetService(m_session, MF_RATE_CONTROL_SERVICE, IID_PPV_ARGS(&m_rateSupport)))) {
- if ((m_mediaTypes & Video) == Video
- && SUCCEEDED(m_rateSupport->IsRateSupported(TRUE, 0, NULL)))
+ if (SUCCEEDED(m_rateSupport->IsRateSupported(TRUE, 0, NULL)))
m_canScrub = true;
}
BOOL isThin = FALSE;
@@ -1903,6 +1948,7 @@ void MFPlayerSession::clear()
m_trackInfo[i].currentIndex = -1;
m_trackInfo[i].sourceNodeId = TOPOID(-1);
m_trackInfo[i].outputNodeId = TOPOID(-1);
+ m_trackInfo[i].format = GUID_NULL;
}
if (!m_metaData.isEmpty()) {
@@ -1981,6 +2027,11 @@ void MFPlayerSession::setActiveTrack(QPlatformMediaPlayer::TrackType type, int i
if (index < -1 || index >= nativeIndexes.count())
return;
+ // Updating the topology fails if there is a HEVC video stream,
+ // which causes other issues. Ignoring the change, for now.
+ if (m_trackInfo[QPlatformMediaPlayer::VideoStream].format == MFVideoFormat_HEVC)
+ return;
+
IMFTopology *topology = nullptr;
if (SUCCEEDED(m_session->GetFullTopology(QMM_MFSESSION_GETFULLTOPOLOGY_CURRENT, 0, &topology))) {
diff --git a/src/multimedia/platform/windows/player/mfplayersession_p.h b/src/multimedia/platform/windows/player/mfplayersession_p.h
index 8f1af9f8b..cb5e48a6c 100644
--- a/src/multimedia/platform/windows/player/mfplayersession_p.h
+++ b/src/multimedia/platform/windows/player/mfplayersession_p.h
@@ -243,6 +243,7 @@ private:
int currentIndex = -1;
TOPOID sourceNodeId = -1;
TOPOID outputNodeId = -1;
+ GUID format = GUID_NULL;
};
TrackInfo m_trackInfo[QPlatformMediaPlayer::NTrackTypes];
@@ -258,7 +259,7 @@ private:
void createSession();
void setupPlaybackTopology(IMFMediaSource *source, IMFPresentationDescriptor *sourcePD);
- bool getStreamInfo(IMFStreamDescriptor *stream, MFPlayerSession::MediaType *type, QString *name, QString *language) const;
+ bool getStreamInfo(IMFStreamDescriptor *stream, MFPlayerSession::MediaType *type, QString *name, QString *language, GUID *format) const;
IMFTopologyNode* addSourceNode(IMFTopology* topology, IMFMediaSource* source,
IMFPresentationDescriptor* presentationDesc, IMFStreamDescriptor *streamDesc);
IMFTopologyNode* addOutputNode(MediaType mediaType, IMFTopology* topology, DWORD sinkID);
diff --git a/src/multimedia/platform/windows/player/mfvideorenderercontrol.cpp b/src/multimedia/platform/windows/player/mfvideorenderercontrol.cpp
index 0c028933c..0330bbef9 100644
--- a/src/multimedia/platform/windows/player/mfvideorenderercontrol.cpp
+++ b/src/multimedia/platform/windows/player/mfvideorenderercontrol.cpp
@@ -2136,10 +2136,12 @@ public:
STDMETHODIMP DetachObject() override;
void setSink(QVideoSink *sink);
+ void setCropRect(QRect cropRect);
private:
EVRCustomPresenter *m_presenter;
QVideoSink *m_videoSink;
+ QRect m_cropRect;
QMutex m_mutex;
};
@@ -2192,6 +2194,12 @@ void MFVideoRendererControl::setSink(QVideoSink *sink)
static_cast<VideoRendererActivate*>(m_currentActivate)->setSink(m_sink);
}
+void MFVideoRendererControl::setCropRect(QRect cropRect)
+{
+ if (m_presenterActivate)
+ m_presenterActivate->setCropRect(cropRect);
+}
+
void MFVideoRendererControl::customEvent(QEvent *event)
{
if (m_presenterActivate)
@@ -2261,6 +2269,7 @@ HRESULT EVRCustomPresenterActivate::ActivateObject(REFIID riid, void **ppv)
QMutexLocker locker(&m_mutex);
if (!m_presenter) {
m_presenter = new EVRCustomPresenter(m_videoSink);
+ m_presenter->setCropRect(m_cropRect);
}
return m_presenter->QueryInterface(riid, ppv);
}
@@ -2294,5 +2303,17 @@ void EVRCustomPresenterActivate::setSink(QVideoSink *sink)
m_presenter->setSink(sink);
}
+void EVRCustomPresenterActivate::setCropRect(QRect cropRect)
+{
+ QMutexLocker locker(&m_mutex);
+ if (m_cropRect == cropRect)
+ return;
+
+ m_cropRect = cropRect;
+
+ if (m_presenter)
+ m_presenter->setCropRect(cropRect);
+}
+
#include "moc_mfvideorenderercontrol_p.cpp"
#include "mfvideorenderercontrol.moc"
diff --git a/src/multimedia/platform/windows/player/mfvideorenderercontrol_p.h b/src/multimedia/platform/windows/player/mfvideorenderercontrol_p.h
index 9b48803d9..09c16326e 100644
--- a/src/multimedia/platform/windows/player/mfvideorenderercontrol_p.h
+++ b/src/multimedia/platform/windows/player/mfvideorenderercontrol_p.h
@@ -52,6 +52,7 @@
//
#include "qobject.h"
+#include <qrect.h>
#include <mfapi.h>
#include <mfidl.h>
@@ -73,6 +74,8 @@ public:
QVideoSink *sink() const;
void setSink(QVideoSink *surface);
+ void setCropRect(QRect cropRect);
+
IMFActivate* createActivate();
void releaseActivate();
diff --git a/src/multimedia/platform/windows/qwindowsmediadevices.cpp b/src/multimedia/platform/windows/qwindowsmediadevices.cpp
index 61a389ca0..06eab2d9d 100644
--- a/src/multimedia/platform/windows/qwindowsmediadevices.cpp
+++ b/src/multimedia/platform/windows/qwindowsmediadevices.cpp
@@ -64,6 +64,8 @@
#include <private/qwindowsaudioutils_p.h>
#include <private/qwindowsmfdefs_p.h>
+#include <QtCore/qmap.h>
+
QT_BEGIN_NAMESPACE
class CMMNotificationClient : public IMMNotificationClient
@@ -393,126 +395,130 @@ QList<QAudioDevice> QWindowsMediaDevices::audioOutputs() const
return availableDevices(QAudioDevice::Output);
}
-QList<QCameraDevice> QWindowsMediaDevices::videoInputs() const
+static std::optional<QCameraFormat> createCameraFormat(IMFMediaType *mediaFormat)
{
- QList<QCameraDevice> cameras;
+ GUID subtype = GUID_NULL;
+ if (FAILED(mediaFormat->GetGUID(MF_MT_SUBTYPE, &subtype)))
+ return {};
+
+ auto pixelFormat = QWindowsMultimediaUtils::pixelFormatFromMediaSubtype(subtype);
+ if (pixelFormat == QVideoFrameFormat::Format_Invalid)
+ return {};
+
+ UINT32 width = 0u;
+ UINT32 height = 0u;
+ if (FAILED(MFGetAttributeSize(mediaFormat, MF_MT_FRAME_SIZE, &width, &height)))
+ return {};
+ QSize resolution{ int(width), int(height) };
- IMFAttributes *pAttributes = NULL;
- IMFActivate **ppDevices = NULL;
+ UINT32 num = 0u;
+ UINT32 den = 0u;
+ float minFr = 0.f;
+ float maxFr = 0.f;
+
+ if (SUCCEEDED(MFGetAttributeRatio(mediaFormat, MF_MT_FRAME_RATE_RANGE_MIN, &num, &den)))
+ minFr = float(num) / float(den);
+
+ if (SUCCEEDED(MFGetAttributeRatio(mediaFormat, MF_MT_FRAME_RATE_RANGE_MAX, &num, &den)))
+ maxFr = float(num) / float(den);
+
+ auto *f = new QCameraFormatPrivate{ QSharedData(), pixelFormat, resolution, minFr, maxFr };
+ return f->create();
+}
- // Create an attribute store to specify the enumeration parameters.
- HRESULT hr = MFCreateAttributes(&pAttributes, 1);
+static QString getString(IMFActivate *device, const IID &id)
+{
+ WCHAR *str = NULL;
+ UINT32 length = 0;
+ HRESULT hr = device->GetAllocatedString(id, &str, &length);
if (SUCCEEDED(hr)) {
- // Source type: video capture devices
- hr = pAttributes->SetGUID(MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE,
- MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID);
-
- if (SUCCEEDED(hr)) {
- // Enumerate devices.
- UINT32 count;
- hr = MFEnumDeviceSources(pAttributes, &ppDevices, &count);
- if (SUCCEEDED(hr)) {
- // Iterate through devices.
- for (int index = 0; index < int(count); index++) {
- QCameraDevicePrivate *info = new QCameraDevicePrivate;
-
- IMFMediaSource *pSource = NULL;
- IMFSourceReader *reader = NULL;
-
- WCHAR *deviceName = NULL;
- UINT32 deviceNameLength = 0;
- UINT32 deviceIdLength = 0;
- WCHAR *deviceId = NULL;
-
- hr = ppDevices[index]->GetAllocatedString(MF_DEVSOURCE_ATTRIBUTE_FRIENDLY_NAME,
- &deviceName, &deviceNameLength);
- if (SUCCEEDED(hr))
- info->description = QString::fromWCharArray(deviceName);
- CoTaskMemFree(deviceName);
-
- hr = ppDevices[index]->GetAllocatedString(
- MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_SYMBOLIC_LINK, &deviceId,
- &deviceIdLength);
- if (SUCCEEDED(hr))
- info->id = QString::fromWCharArray(deviceId).toUtf8();
- CoTaskMemFree(deviceId);
-
- // Create the media source object.
- hr = ppDevices[index]->ActivateObject(
- IID_PPV_ARGS(&pSource));
- // Create the media source reader.
- hr = MFCreateSourceReaderFromMediaSource(pSource, NULL, &reader);
- if (SUCCEEDED(hr)) {
- QList<QSize> photoResolutions;
- QList<QCameraFormat> videoFormats;
-
- DWORD dwMediaTypeIndex = 0;
- IMFMediaType *mediaFormat = NULL;
- GUID subtype = GUID_NULL;
- HRESULT mediaFormatResult = S_OK;
-
- UINT32 frameRateMin = 0u;
- UINT32 frameRateMax = 0u;
- UINT32 denominator = 0u;
- UINT32 width = 0u;
- UINT32 height = 0u;
-
- while (SUCCEEDED(mediaFormatResult)) {
- // Loop through the supported formats for the video device
- mediaFormatResult = reader->GetNativeMediaType(
- (DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM, dwMediaTypeIndex,
- &mediaFormat);
- if (mediaFormatResult == MF_E_NO_MORE_TYPES)
- break;
- else if (SUCCEEDED(mediaFormatResult)) {
- QVideoFrameFormat::PixelFormat pixelFormat = QVideoFrameFormat::Format_Invalid;
- QSize resolution;
- float minFr = .0;
- float maxFr = .0;
-
- if (SUCCEEDED(mediaFormat->GetGUID(MF_MT_SUBTYPE, &subtype)))
- pixelFormat = QWindowsMultimediaUtils::pixelFormatFromMediaSubtype(subtype);
-
- if (SUCCEEDED(MFGetAttributeSize(mediaFormat, MF_MT_FRAME_SIZE, &width,
- &height))) {
- resolution.rheight() = (int)height;
- resolution.rwidth() = (int)width;
- photoResolutions << resolution;
- }
-
- if (SUCCEEDED(MFGetAttributeRatio(mediaFormat, MF_MT_FRAME_RATE_RANGE_MIN,
- &frameRateMin, &denominator)))
- minFr = qreal(frameRateMin) / denominator;
- if (SUCCEEDED(MFGetAttributeRatio(mediaFormat, MF_MT_FRAME_RATE_RANGE_MAX,
- &frameRateMax, &denominator)))
- maxFr = qreal(frameRateMax) / denominator;
-
- auto *f = new QCameraFormatPrivate { QSharedData(), pixelFormat,
- resolution, minFr, maxFr };
- videoFormats << f->create();
- }
- ++dwMediaTypeIndex;
- }
- if (mediaFormat)
- mediaFormat->Release();
-
- info->videoFormats = videoFormats;
- info->photoResolutions = photoResolutions;
- }
- if (reader)
- reader->Release();
- cameras.append(info->create());
- }
- }
- for (DWORD i = 0; i < count; i++) {
- if (ppDevices[i])
- ppDevices[i]->Release();
+ auto qstr = QString::fromWCharArray(str);
+ CoTaskMemFree(str);
+ return qstr;
+ } else {
+ return {};
+ }
+}
+
+static std::optional<QCameraDevice> createCameraDevice(IMFActivate *device)
+{
+ auto info = std::make_unique<QCameraDevicePrivate>();
+ info->description = getString(device, MF_DEVSOURCE_ATTRIBUTE_FRIENDLY_NAME);
+ info->id = getString(device, MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_SYMBOLIC_LINK).toUtf8();
+
+ IMFMediaSource *source = NULL;
+ HRESULT hr = device->ActivateObject(IID_PPV_ARGS(&source));
+ if (FAILED(hr))
+ return {};
+
+ QWindowsIUPointer<IMFSourceReader> reader;
+ hr = MFCreateSourceReaderFromMediaSource(source, NULL, reader.address());
+ if (FAILED(hr))
+ return {};
+
+ QList<QSize> photoResolutions;
+ QList<QCameraFormat> videoFormats;
+ for (DWORD i = 0;; ++i) {
+ // Loop through the supported formats for the video device
+ QWindowsIUPointer<IMFMediaType> mediaFormat;
+ hr = reader->GetNativeMediaType((DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM, i,
+ mediaFormat.address());
+ if (FAILED(hr))
+ break;
+
+ auto maybeCamera = createCameraFormat(mediaFormat.get());
+ if (maybeCamera) {
+ videoFormats << *maybeCamera;
+ photoResolutions << maybeCamera->resolution();
+ }
+ }
+
+ info->videoFormats = videoFormats;
+ info->photoResolutions = photoResolutions;
+ return info.release()->create();
+}
+
+static QList<QCameraDevice> readCameraDevices(IMFAttributes *attr)
+{
+ QList<QCameraDevice> cameras;
+ UINT32 count = 0;
+ IMFActivate **devices = NULL;
+ HRESULT hr = MFEnumDeviceSources(attr, &devices, &count);
+ if (SUCCEEDED(hr)) {
+ for (UINT32 i = 0; i < count; i++) {
+ IMFActivate *device = devices[i];
+ if (device) {
+ auto maybeCamera = createCameraDevice(device);
+ if (maybeCamera)
+ cameras << *maybeCamera;
+
+ device->Release();
}
- CoTaskMemFree(ppDevices);
}
+ CoTaskMemFree(devices);
+ }
+ return cameras;
+}
+
+QList<QCameraDevice> QWindowsMediaDevices::videoInputs() const
+{
+ QList<QCameraDevice> cameras;
+
+ QWindowsIUPointer<IMFAttributes> attr;
+ HRESULT hr = MFCreateAttributes(attr.address(), 2);
+ if (FAILED(hr))
+ return {};
+
+ hr = attr->SetGUID(MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE,
+ MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID);
+ if (SUCCEEDED(hr)) {
+ cameras << readCameraDevices(attr.get());
+
+ hr = attr->SetGUID(MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_CATEGORY,
+ QMM_KSCATEGORY_SENSOR_CAMERA);
+ if (SUCCEEDED(hr))
+ cameras << readCameraDevices(attr.get());
}
- if (pAttributes)
- pAttributes->Release();
return cameras;
}
diff --git a/src/multimedia/playback/qmediaplayer.cpp b/src/multimedia/playback/qmediaplayer.cpp
index 5c11d8e81..ffa683642 100644
--- a/src/multimedia/playback/qmediaplayer.cpp
+++ b/src/multimedia/playback/qmediaplayer.cpp
@@ -195,7 +195,14 @@ void QMediaPlayerPrivate::setMedia(const QUrl &media, QIODevice *stream)
tempFile->setFileTemplate(tempFile->fileTemplate() + QLatin1Char('.') + suffix);
// Copy the qrc data into the temporary file
- tempFile->open();
+ if (!tempFile->open()) {
+ control->setMedia(QUrl(), nullptr);
+ control->mediaStatusChanged(QMediaPlayer::InvalidMedia);
+ control->error(QMediaPlayer::ResourceError, tempFile->errorString());
+ delete tempFile;
+ qrcFile.reset();
+ return;
+ }
char buffer[4096];
while (true) {
qint64 len = file->read(buffer, sizeof(buffer));
diff --git a/src/multimedia/playback/qmediaplayer.h b/src/multimedia/playback/qmediaplayer.h
index 0ccb0eaaf..d3e0139f6 100644
--- a/src/multimedia/playback/qmediaplayer.h
+++ b/src/multimedia/playback/qmediaplayer.h
@@ -146,9 +146,6 @@ public:
void setVideoOutput(QObject *);
QObject *videoOutput() const;
-#if 0
- void setVideoOutput(const QList<QVideoSink *> &sinks);
-#endif
void setVideoSink(QVideoSink *sink);
QVideoSink *videoSink() const;
diff --git a/src/multimedia/qmediaformat.h b/src/multimedia/qmediaformat.h
index f72dcb597..cff960514 100644
--- a/src/multimedia/qmediaformat.h
+++ b/src/multimedia/qmediaformat.h
@@ -135,10 +135,10 @@ public:
QT_MOVE_ASSIGNMENT_OPERATOR_IMPL_VIA_PURE_SWAP(QMediaFormat)
void swap(QMediaFormat &other) noexcept
{
- qSwap(fmt, other.fmt);
- qSwap(audio, other.audio);
- qSwap(video, other.video);
- qSwap(d, other.d);
+ std::swap(fmt, other.fmt);
+ std::swap(audio, other.audio);
+ std::swap(video, other.video);
+ d.swap(other.d);
}
FileFormat fileFormat() const { return fmt; }
diff --git a/src/multimedia/qmediatimerange.h b/src/multimedia/qmediatimerange.h
index 2a8e948c3..8fcef3be8 100644
--- a/src/multimedia/qmediatimerange.h
+++ b/src/multimedia/qmediatimerange.h
@@ -107,7 +107,7 @@ public:
QMediaTimeRange(QMediaTimeRange &&other) noexcept = default;
QT_MOVE_ASSIGNMENT_OPERATOR_IMPL_VIA_PURE_SWAP(QMediaTimeRange)
void swap(QMediaTimeRange &other) noexcept
- { qSwap(d, other.d); }
+ { d.swap(other.d); }
void detach();
QMediaTimeRange &operator=(const Interval&);
diff --git a/src/multimedia/recording/qmediarecorder.cpp b/src/multimedia/recording/qmediarecorder.cpp
index 80c53ee46..7ea934158 100644
--- a/src/multimedia/recording/qmediarecorder.cpp
+++ b/src/multimedia/recording/qmediarecorder.cpp
@@ -106,14 +106,14 @@ QT_BEGIN_NAMESPACE
\qml
CameraButton {
text: "Record"
- visible: recorder.status !== MediaRecorder.RecordingStatus
+ visible: recorder.recorderState !== MediaRecorder.RecordingState
onClicked: recorder.record()
}
CameraButton {
id: stopButton
text: "Stop"
- visible: recorder.status === MediaRecorder.RecordingStatus
+ visible: recorder.recorderState === MediaRecorder.RecordingState
onClicked: recorder.stop()
}
\endqml
diff --git a/src/multimedia/video/qvideoframe.h b/src/multimedia/video/qvideoframe.h
index d87331d89..454496353 100644
--- a/src/multimedia/video/qvideoframe.h
+++ b/src/multimedia/video/qvideoframe.h
@@ -92,7 +92,7 @@ public:
QVideoFrame(QVideoFrame &&other) noexcept = default;
QT_MOVE_ASSIGNMENT_OPERATOR_IMPL_VIA_PURE_SWAP(QVideoFrame)
void swap(QVideoFrame &other) noexcept
- { qSwap(d, other.d); }
+ { d.swap(other.d); }
QVideoFrame &operator =(const QVideoFrame &other);
diff --git a/src/multimedia/video/qvideoframeformat.h b/src/multimedia/video/qvideoframeformat.h
index 6983b6581..f92d8e614 100644
--- a/src/multimedia/video/qvideoframeformat.h
+++ b/src/multimedia/video/qvideoframeformat.h
@@ -128,7 +128,7 @@ public:
QVideoFrameFormat(QVideoFrameFormat &&other) noexcept = default;
QT_MOVE_ASSIGNMENT_OPERATOR_IMPL_VIA_PURE_SWAP(QVideoFrameFormat);
void swap(QVideoFrameFormat &other) noexcept
- { qSwap(d, other.d); }
+ { d.swap(other.d); }
void detach();
diff --git a/src/multimedia/video/qvideooutputorientationhandler.cpp b/src/multimedia/video/qvideooutputorientationhandler.cpp
index 2a6781b47..86e6bd664 100644
--- a/src/multimedia/video/qvideooutputorientationhandler.cpp
+++ b/src/multimedia/video/qvideooutputorientationhandler.cpp
@@ -51,6 +51,8 @@ QVideoOutputOrientationHandler::QVideoOutputOrientationHandler(QObject *parent)
, m_currentOrientation(0)
{
QScreen *screen = QGuiApplication::primaryScreen();
+ if (!screen)
+ return;
connect(screen, SIGNAL(orientationChanged(Qt::ScreenOrientation)),
this, SLOT(screenOrientationChanged(Qt::ScreenOrientation)));
@@ -69,6 +71,8 @@ void QVideoOutputOrientationHandler::screenOrientationChanged(Qt::ScreenOrientat
return;
const QScreen *screen = QGuiApplication::primaryScreen();
+ if (!screen)
+ return;
const int angle = (360 - screen->angleBetween(screen->nativeOrientation(), orientation)) % 360;
diff --git a/src/multimedia/video/qvideosink.cpp b/src/multimedia/video/qvideosink.cpp
index f70763157..19e7093c8 100644
--- a/src/multimedia/video/qvideosink.cpp
+++ b/src/multimedia/video/qvideosink.cpp
@@ -122,6 +122,7 @@ QVideoSink::QVideoSink(QObject *parent)
*/
QVideoSink::~QVideoSink()
{
+ disconnect(this);
d->unregisterSource();
delete d;
}
diff --git a/src/multimediaquick/qquickvideooutput.cpp b/src/multimediaquick/qquickvideooutput.cpp
index fe2119e61..bbaa42e9f 100644
--- a/src/multimediaquick/qquickvideooutput.cpp
+++ b/src/multimediaquick/qquickvideooutput.cpp
@@ -356,7 +356,7 @@ QRectF QQuickVideoOutput::sourceRect() const
if (!size.isValid())
return {};
- if (!qIsDefaultAspect(m_orientation))
+ if (!qIsDefaultAspect(m_orientation + m_frameOrientation))
size.transpose();
diff --git a/src/multimediaquick/qsgvideonode_p.cpp b/src/multimediaquick/qsgvideonode_p.cpp
index 000bda7b9..4a6d3113a 100644
--- a/src/multimediaquick/qsgvideonode_p.cpp
+++ b/src/multimediaquick/qsgvideonode_p.cpp
@@ -227,8 +227,10 @@ QSGVideoMaterial::QSGVideoMaterial(const QVideoFrameFormat &format) :
QSGVideoNode::QSGVideoNode(QQuickVideoOutput *parent, const QVideoFrameFormat &format)
: m_parent(parent),
- m_orientation(-1),
- m_format(format)
+ m_orientation(-1),
+ m_frameOrientation(-1),
+ m_frameMirrored(false),
+ m_format(format)
{
setFlag(QSGNode::OwnsMaterial);
setFlag(QSGNode::OwnsGeometry);
@@ -303,12 +305,24 @@ void QSGVideoNode::setSubtitleGeometry()
/* Update the vertices and texture coordinates. Orientation must be in {0,90,180,270} */
void QSGVideoNode::setTexturedRectGeometry(const QRectF &rect, const QRectF &textureRect, int orientation)
{
- if (rect == m_rect && textureRect == m_textureRect && orientation == m_orientation)
+ bool frameChanged = false;
+ if (m_material) {
+ if (m_material->m_currentFrame.rotationAngle() != m_frameOrientation
+ || m_material->m_currentFrame.mirrored() != m_frameMirrored) {
+ frameChanged = true;
+ }
+ }
+ if (rect == m_rect && textureRect == m_textureRect && orientation == m_orientation
+ && !frameChanged)
return;
m_rect = rect;
m_textureRect = textureRect;
m_orientation = orientation;
+ if (m_material) {
+ m_frameOrientation = m_material->m_currentFrame.rotationAngle();
+ m_frameMirrored = m_material->m_currentFrame.mirrored();
+ }
int videoRotation = orientation;
videoRotation += m_material ? m_material->m_currentFrame.rotationAngle() : 0;
videoRotation %= 360;
diff --git a/src/multimediaquick/qsgvideonode_p.h b/src/multimediaquick/qsgvideonode_p.h
index d62248b7c..6508581a7 100644
--- a/src/multimediaquick/qsgvideonode_p.h
+++ b/src/multimediaquick/qsgvideonode_p.h
@@ -86,6 +86,8 @@ private:
QRectF m_rect;
QRectF m_textureRect;
int m_orientation;
+ int m_frameOrientation;
+ bool m_frameMirrored;
QVideoFrameFormat m_format;
QSGVideoMaterial *m_material;
diff --git a/src/multimediaquick/qtmultimediaquicktypes_p.h b/src/multimediaquick/qtmultimediaquicktypes_p.h
index 18ee97bfa..a9c1c757e 100644
--- a/src/multimediaquick/qtmultimediaquicktypes_p.h
+++ b/src/multimediaquick/qtmultimediaquicktypes_p.h
@@ -74,8 +74,8 @@ struct QCameraForeign
struct QImageCaptureForeign
{
Q_GADGET
+ QML_ANONYMOUS
QML_FOREIGN(QImageCapture)
- QML_NAMED_ELEMENT(ImageCapture)
};
struct QMediaRecorderForeign
diff --git a/src/multimediawidgets/qvideowidget.cpp b/src/multimediawidgets/qvideowidget.cpp
index 511489dd9..313bac477 100644
--- a/src/multimediawidgets/qvideowidget.cpp
+++ b/src/multimediawidgets/qvideowidget.cpp
@@ -142,6 +142,8 @@ void QVideoWidget::setAspectRatioMode(Qt::AspectRatioMode mode)
void QVideoWidget::setFullScreen(bool fullScreen)
{
Q_D(QVideoWidget);
+ if (isFullScreen() == fullScreen)
+ return;
Qt::WindowFlags flags = windowFlags();
@@ -167,7 +169,6 @@ void QVideoWidget::setFullScreen(bool fullScreen)
move(d_ptr->nonFullscreenPos);
d_ptr->nonFullscreenPos = {};
}
- d->wasFullScreen = fullScreen;
}
/*!
@@ -201,12 +202,10 @@ bool QVideoWidget::event(QEvent *event)
Q_D(QVideoWidget);
if (event->type() == QEvent::WindowStateChange) {
- if (windowState() & Qt::WindowFullScreen) {
- if (!d->wasFullScreen)
- emit fullScreenChanged(d->wasFullScreen = true);
- } else {
- if (d->wasFullScreen)
- emit fullScreenChanged(d->wasFullScreen = false);
+ bool fullScreen = bool(windowState() & Qt::WindowFullScreen);
+ if (fullScreen != d->wasFullScreen) {
+ emit fullScreenChanged(fullScreen);
+ d->wasFullScreen = fullScreen;
}
}
diff --git a/tests/auto/integration/qaudiodecoderbackend/tst_qaudiodecoderbackend.cpp b/tests/auto/integration/qaudiodecoderbackend/tst_qaudiodecoderbackend.cpp
index d7031ee70..3526cebd0 100644
--- a/tests/auto/integration/qaudiodecoderbackend/tst_qaudiodecoderbackend.cpp
+++ b/tests/auto/integration/qaudiodecoderbackend/tst_qaudiodecoderbackend.cpp
@@ -65,6 +65,7 @@ private slots:
private:
bool isWavSupported();
+ QUrl testFileUrl(const QString filePath);
};
void tst_QAudioDecoderBackend::init()
@@ -91,6 +92,24 @@ bool tst_QAudioDecoderBackend::isWavSupported()
#endif
}
+QUrl tst_QAudioDecoderBackend::testFileUrl(const QString filePath)
+{
+ QUrl url;
+#ifndef Q_OS_ANDROID
+ QFileInfo fileInfo(QFINDTESTDATA(filePath));
+ url = QUrl::fromLocalFile(fileInfo.absoluteFilePath());
+#else
+ QFile file(":/" + filePath);
+ QTemporaryFile *temporaryFile = nullptr;
+ if (file.open(QIODevice::ReadOnly)) {
+ temporaryFile = QTemporaryFile::createNativeFile(file);
+ url = QUrl(temporaryFile->fileName());
+ }
+ temporaryFile->deleteLater();
+#endif
+ return url;
+}
+
void tst_QAudioDecoderBackend::fileTest()
{
if (!isWavSupported())
@@ -110,8 +129,7 @@ void tst_QAudioDecoderBackend::fileTest()
QVERIFY(d.audioFormat() == QAudioFormat());
// Test local file
- QFileInfo fileInfo(QFINDTESTDATA(TEST_FILE_NAME));
- QUrl url = QUrl::fromLocalFile(fileInfo.absoluteFilePath());
+ QUrl url = testFileUrl(TEST_FILE_NAME);
d.setSource(url);
QVERIFY(!d.isDecoding());
QVERIFY(!d.bufferAvailable());
@@ -126,7 +144,7 @@ void tst_QAudioDecoderBackend::fileTest()
QSignalSpy positionSpy(&d, SIGNAL(positionChanged(qint64)));
d.start();
- QTRY_VERIFY(d.isDecoding());
+
QTRY_VERIFY(!isDecodingSpy.isEmpty());
QTRY_VERIFY(!readySpy.isEmpty());
QTRY_VERIFY(!bufferChangedSpy.isEmpty());
@@ -192,6 +210,9 @@ void tst_QAudioDecoderBackend::fileTest()
finishedSpy.clear();
positionSpy.clear();
+#ifdef Q_OS_ANDROID
+ QSKIP("Setting a desired audio format is not yet supported on Android", QTest::SkipSingle);
+#endif
// change output audio format
QAudioFormat format;
format.setChannelCount(2);
@@ -366,8 +387,7 @@ void tst_QAudioDecoderBackend::corruptedFileTest()
QVERIFY(d.audioFormat() == QAudioFormat());
// Test local file
- QFileInfo fileInfo(QFINDTESTDATA(TEST_CORRUPTED_FILE_NAME));
- QUrl url = QUrl::fromLocalFile(fileInfo.absoluteFilePath());
+ QUrl url = testFileUrl(TEST_CORRUPTED_FILE_NAME);
d.setSource(url);
QVERIFY(!d.isDecoding());
QVERIFY(!d.bufferAvailable());
@@ -421,7 +441,6 @@ void tst_QAudioDecoderBackend::corruptedFileTest()
QVERIFY(positionSpy.isEmpty());
QVERIFY(durationSpy.isEmpty());
-
d.stop();
QTRY_VERIFY(!d.isDecoding());
QCOMPARE(d.duration(), qint64(-1));
@@ -543,9 +562,12 @@ void tst_QAudioDecoderBackend::deviceTest()
QVERIFY(d.bufferAvailable() == false);
QCOMPARE(d.source(), QString(""));
QVERIFY(d.audioFormat() == QAudioFormat());
-
+#ifndef Q_OS_ANDROID
QFileInfo fileInfo(QFINDTESTDATA(TEST_FILE_NAME));
QFile file(fileInfo.absoluteFilePath());
+#else
+ QFile file(":/" TEST_FILE_NAME);
+#endif
QVERIFY(file.open(QIODevice::ReadOnly));
d.setSourceDevice(&file);
@@ -557,7 +579,6 @@ void tst_QAudioDecoderBackend::deviceTest()
d.start();
- QTRY_VERIFY(d.isDecoding());
QTRY_VERIFY(!isDecodingSpy.isEmpty());
QTRY_VERIFY(!readySpy.isEmpty());
QTRY_VERIFY(!bufferChangedSpy.isEmpty());
@@ -617,6 +638,9 @@ void tst_QAudioDecoderBackend::deviceTest()
finishedSpy.clear();
positionSpy.clear();
+#ifdef Q_OS_ANDROID
+ QSKIP("Setting a desired audio format is not yet supported on Android", QTest::SkipSingle);
+#endif
// Now try changing formats
QAudioFormat format;
format.setChannelCount(2);
diff --git a/tests/auto/integration/qaudiosource/tst_qaudiosource.cpp b/tests/auto/integration/qaudiosource/tst_qaudiosource.cpp
index e966498de..db17a500d 100644
--- a/tests/auto/integration/qaudiosource/tst_qaudiosource.cpp
+++ b/tests/auto/integration/qaudiosource/tst_qaudiosource.cpp
@@ -509,7 +509,7 @@ void tst_QAudioSource::push()
QCOMPARE(waveDecoder.size(), QWaveDecoder::headerLength());
// Set a large buffer to avoid underruns during QTest::qWaits
- audioInput.setBufferSize(audioFormat.bytesForDuration(1000000));
+ audioInput.setBufferSize(audioFormat.bytesForDuration(100000));
QIODevice* feed = audioInput.start();
@@ -572,7 +572,7 @@ void tst_QAudioSource::pushSuspendResume()
QFETCH(QAudioFormat, audioFormat);
QAudioSource audioInput(audioFormat, this);
- audioInput.setBufferSize(audioFormat.bytesForDuration(1000000));
+ audioInput.setBufferSize(audioFormat.bytesForDuration(100000));
QSignalSpy stateSignal(&audioInput, SIGNAL(stateChanged(QAudio::State)));
diff --git a/tests/auto/integration/qmediaplayerbackend/tst_qmediaplayerbackend.cpp b/tests/auto/integration/qmediaplayerbackend/tst_qmediaplayerbackend.cpp
index afce5052f..aae70cb43 100644
--- a/tests/auto/integration/qmediaplayerbackend/tst_qmediaplayerbackend.cpp
+++ b/tests/auto/integration/qmediaplayerbackend/tst_qmediaplayerbackend.cpp
@@ -410,6 +410,7 @@ void tst_QMediaPlayerBackend::playPauseStop()
//ensure the position is reset to 0 at stop and positionChanged(0) is emitted
QTRY_COMPARE(player.position(), qint64(0));
+ QTRY_VERIFY(positionSpy.count() > 0);
QCOMPARE(positionSpy.last()[0].value<qint64>(), qint64(0));
QVERIFY(player.duration() > 0);
@@ -446,9 +447,12 @@ void tst_QMediaPlayerBackend::playPauseStop()
player.setSource(localWavFile);
QTRY_COMPARE(player.mediaStatus(), QMediaPlayer::LoadedMedia);
+ QTRY_VERIFY(statusSpy.count() > 0);
QCOMPARE(statusSpy.last()[0].value<QMediaPlayer::MediaStatus>(), QMediaPlayer::LoadedMedia);
QCOMPARE(player.playbackState(), QMediaPlayer::StoppedState);
+ QTRY_VERIFY(stateSpy.count() > 0);
QCOMPARE(stateSpy.last()[0].value<QMediaPlayer::PlaybackState>(), QMediaPlayer::StoppedState);
+ QTRY_VERIFY(positionSpy.count() > 0);
QCOMPARE(player.position(), 0);
QCOMPARE(positionSpy.last()[0].value<qint64>(), 0);
@@ -463,9 +467,12 @@ void tst_QMediaPlayerBackend::playPauseStop()
player.setSource(QUrl());
QTRY_COMPARE(player.mediaStatus(), QMediaPlayer::NoMedia);
+ QTRY_VERIFY(statusSpy.count() > 0);
QCOMPARE(statusSpy.last()[0].value<QMediaPlayer::MediaStatus>(), QMediaPlayer::NoMedia);
QCOMPARE(player.playbackState(), QMediaPlayer::StoppedState);
+ QTRY_VERIFY(stateSpy.count() > 0);
QCOMPARE(stateSpy.last()[0].value<QMediaPlayer::PlaybackState>(), QMediaPlayer::StoppedState);
+ QTRY_VERIFY(positionSpy.count() > 0);
QCOMPARE(player.position(), 0);
QCOMPARE(positionSpy.last()[0].value<qint64>(), 0);
QCOMPARE(player.duration(), 0);