summaryrefslogtreecommitdiffstats
path: root/src/plugins/multimedia/windows
diff options
context:
space:
mode:
Diffstat (limited to 'src/plugins/multimedia/windows')
-rw-r--r--src/plugins/multimedia/windows/CMakeLists.txt69
-rw-r--r--src/plugins/multimedia/windows/common/mfmetadata.cpp408
-rw-r--r--src/plugins/multimedia/windows/common/mfmetadata_p.h30
-rw-r--r--src/plugins/multimedia/windows/decoder/mfaudiodecodercontrol.cpp225
-rw-r--r--src/plugins/multimedia/windows/decoder/mfaudiodecodercontrol_p.h75
-rw-r--r--src/plugins/multimedia/windows/decoder/mfdecodersourcereader.cpp103
-rw-r--r--src/plugins/multimedia/windows/decoder/mfdecodersourcereader_p.h63
-rw-r--r--src/plugins/multimedia/windows/evr/evrcustompresenter.cpp1849
-rw-r--r--src/plugins/multimedia/windows/evr/evrcustompresenter_p.h357
-rw-r--r--src/plugins/multimedia/windows/evr/evrd3dpresentengine.cpp699
-rw-r--r--src/plugins/multimedia/windows/evr/evrd3dpresentengine_p.h153
-rw-r--r--src/plugins/multimedia/windows/evr/evrhelpers.cpp140
-rw-r--r--src/plugins/multimedia/windows/evr/evrhelpers_p.h93
-rw-r--r--src/plugins/multimedia/windows/evr/evrvideowindowcontrol.cpp228
-rw-r--r--src/plugins/multimedia/windows/evr/evrvideowindowcontrol_p.h72
-rw-r--r--src/plugins/multimedia/windows/mediacapture/qwindowscamera.cpp101
-rw-r--r--src/plugins/multimedia/windows/mediacapture/qwindowscamera_p.h55
-rw-r--r--src/plugins/multimedia/windows/mediacapture/qwindowsimagecapture.cpp207
-rw-r--r--src/plugins/multimedia/windows/mediacapture/qwindowsimagecapture_p.h64
-rw-r--r--src/plugins/multimedia/windows/mediacapture/qwindowsmediacapture.cpp109
-rw-r--r--src/plugins/multimedia/windows/mediacapture/qwindowsmediacapture_p.h62
-rw-r--r--src/plugins/multimedia/windows/mediacapture/qwindowsmediadevicereader.cpp1019
-rw-r--r--src/plugins/multimedia/windows/mediacapture/qwindowsmediadevicereader_p.h154
-rw-r--r--src/plugins/multimedia/windows/mediacapture/qwindowsmediadevicesession.cpp376
-rw-r--r--src/plugins/multimedia/windows/mediacapture/qwindowsmediadevicesession_p.h100
-rw-r--r--src/plugins/multimedia/windows/mediacapture/qwindowsmediaencoder.cpp225
-rw-r--r--src/plugins/multimedia/windows/mediacapture/qwindowsmediaencoder_p.h71
-rw-r--r--src/plugins/multimedia/windows/mfstream.cpp326
-rw-r--r--src/plugins/multimedia/windows/mfstream_p.h124
-rw-r--r--src/plugins/multimedia/windows/player/mfactivate.cpp17
-rw-r--r--src/plugins/multimedia/windows/player/mfactivate_p.h202
-rw-r--r--src/plugins/multimedia/windows/player/mfevrvideowindowcontrol.cpp55
-rw-r--r--src/plugins/multimedia/windows/player/mfevrvideowindowcontrol_p.h38
-rw-r--r--src/plugins/multimedia/windows/player/mfplayercontrol.cpp306
-rw-r--r--src/plugins/multimedia/windows/player/mfplayercontrol_p.h103
-rw-r--r--src/plugins/multimedia/windows/player/mfplayersession.cpp1736
-rw-r--r--src/plugins/multimedia/windows/player/mfplayersession_p.h240
-rw-r--r--src/plugins/multimedia/windows/player/mfvideorenderercontrol.cpp152
-rw-r--r--src/plugins/multimedia/windows/player/mfvideorenderercontrol_p.h47
-rw-r--r--src/plugins/multimedia/windows/qwindowsformatinfo.cpp187
-rw-r--r--src/plugins/multimedia/windows/qwindowsformatinfo_p.h31
-rw-r--r--src/plugins/multimedia/windows/qwindowsintegration.cpp96
-rw-r--r--src/plugins/multimedia/windows/qwindowsintegration_p.h51
-rw-r--r--src/plugins/multimedia/windows/qwindowsvideodevices.cpp228
-rw-r--r--src/plugins/multimedia/windows/qwindowsvideodevices_p.h44
-rw-r--r--src/plugins/multimedia/windows/sourceresolver.cpp294
-rw-r--r--src/plugins/multimedia/windows/sourceresolver_p.h83
-rw-r--r--src/plugins/multimedia/windows/windows.json3
48 files changed, 11470 insertions, 0 deletions
diff --git a/src/plugins/multimedia/windows/CMakeLists.txt b/src/plugins/multimedia/windows/CMakeLists.txt
new file mode 100644
index 000000000..963081e0a
--- /dev/null
+++ b/src/plugins/multimedia/windows/CMakeLists.txt
@@ -0,0 +1,69 @@
+# Copyright (C) 2022 The Qt Company Ltd.
+# SPDX-License-Identifier: BSD-3-Clause
+
+qt_internal_add_plugin(QWindowsMediaPlugin
+ OUTPUT_NAME windowsmediaplugin
+ PLUGIN_TYPE multimedia
+ SOURCES
+ common/mfmetadata.cpp common/mfmetadata_p.h
+ decoder/mfaudiodecodercontrol.cpp decoder/mfaudiodecodercontrol_p.h
+ decoder/mfdecodersourcereader.cpp decoder/mfdecodersourcereader_p.h
+ evr/evrcustompresenter.cpp evr/evrcustompresenter_p.h
+ evr/evrd3dpresentengine.cpp evr/evrd3dpresentengine_p.h
+ evr/evrhelpers.cpp evr/evrhelpers_p.h
+ evr/evrvideowindowcontrol.cpp evr/evrvideowindowcontrol_p.h
+ mfstream.cpp mfstream_p.h
+ player/mfactivate.cpp player/mfactivate_p.h
+ player/mfevrvideowindowcontrol.cpp player/mfevrvideowindowcontrol_p.h
+ player/mfplayercontrol.cpp player/mfplayercontrol_p.h
+ player/mfplayersession.cpp player/mfplayersession_p.h
+ player/mfvideorenderercontrol.cpp player/mfvideorenderercontrol_p.h
+ mediacapture/qwindowscamera.cpp
+ mediacapture/qwindowscamera_p.h
+ mediacapture/qwindowsimagecapture.cpp
+ mediacapture/qwindowsimagecapture_p.h
+ mediacapture/qwindowsmediacapture.cpp
+ mediacapture/qwindowsmediacapture_p.h
+ mediacapture/qwindowsmediadevicereader.cpp
+ mediacapture/qwindowsmediadevicereader_p.h
+ mediacapture/qwindowsmediadevicesession.cpp
+ mediacapture/qwindowsmediadevicesession_p.h
+ mediacapture/qwindowsmediaencoder.cpp
+ mediacapture/qwindowsmediaencoder_p.h
+ qwindowsformatinfo.cpp qwindowsformatinfo_p.h
+ qwindowsintegration.cpp qwindowsintegration_p.h
+ qwindowsvideodevices.cpp qwindowsvideodevices_p.h
+ sourceresolver.cpp sourceresolver_p.h
+ INCLUDE_DIRECTORIES
+ audio
+ common
+ decoder
+ evr
+ player
+ mediacapture
+ LIBRARIES
+ Qt::MultimediaPrivate
+ Qt::CorePrivate
+ uuid
+ WMF::WMF
+ d3d9
+ dxva2
+ evr
+ gdi32
+ ksuser
+ mf
+ mfcore
+ mfplat
+ mfreadwrite
+ mfuuid
+ ole32
+ oleaut32
+ propsys
+ shlwapi
+ strmiids
+ amstrmid
+ user32
+ winmm
+ wmcodecdspuuid
+)
+
diff --git a/src/plugins/multimedia/windows/common/mfmetadata.cpp b/src/plugins/multimedia/windows/common/mfmetadata.cpp
new file mode 100644
index 000000000..cc8c425e3
--- /dev/null
+++ b/src/plugins/multimedia/windows/common/mfmetadata.cpp
@@ -0,0 +1,408 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include <qmediametadata.h>
+#include <qdatetime.h>
+#include <qtimezone.h>
+#include <qimage.h>
+#include <quuid.h>
+
+#include <mfapi.h>
+#include <mfidl.h>
+#include <propvarutil.h>
+#include <propkey.h>
+
+#include "private/qwindowsmultimediautils_p.h"
+#include "mfmetadata_p.h"
+
+//#define DEBUG_MEDIAFOUNDATION
+
+static const PROPERTYKEY PROP_KEY_NULL = {GUID_NULL, 0};
+
+static QVariant convertValue(const PROPVARIANT& var)
+{
+ QVariant value;
+ switch (var.vt) {
+ case VT_LPWSTR:
+ value = QString::fromUtf16(reinterpret_cast<const char16_t *>(var.pwszVal));
+ break;
+ case VT_UI4:
+ value = uint(var.ulVal);
+ break;
+ case VT_UI8:
+ value = qulonglong(var.uhVal.QuadPart);
+ break;
+ case VT_BOOL:
+ value = bool(var.boolVal);
+ break;
+ case VT_FILETIME:
+ SYSTEMTIME t;
+ if (!FileTimeToSystemTime(&var.filetime, &t))
+ break;
+
+ value = QDateTime(QDate(t.wYear, t.wMonth, t.wDay),
+ QTime(t.wHour, t.wMinute, t.wSecond, t.wMilliseconds),
+ QTimeZone(QTimeZone::UTC));
+ break;
+ case VT_STREAM:
+ {
+ STATSTG stat;
+ if (FAILED(var.pStream->Stat(&stat, STATFLAG_NONAME)))
+ break;
+ void *data = malloc(stat.cbSize.QuadPart);
+ ULONG read = 0;
+ if (FAILED(var.pStream->Read(data, stat.cbSize.QuadPart, &read))) {
+ free(data);
+ break;
+ }
+ value = QImage::fromData((const uchar*)data, read);
+ free(data);
+ }
+ break;
+ case VT_VECTOR | VT_LPWSTR:
+ QStringList vList;
+ for (ULONG i = 0; i < var.calpwstr.cElems; ++i)
+ vList.append(QString::fromUtf16(reinterpret_cast<const char16_t *>(var.calpwstr.pElems[i])));
+ value = vList;
+ break;
+ }
+ return value;
+}
+
+static QVariant metaDataValue(IPropertyStore *content, const PROPERTYKEY &key)
+{
+ QVariant value;
+
+ PROPVARIANT var;
+ PropVariantInit(&var);
+ HRESULT hr = S_FALSE;
+ if (content)
+ hr = content->GetValue(key, &var);
+
+ if (SUCCEEDED(hr)) {
+ value = convertValue(var);
+
+ // some metadata needs to be reformatted
+ if (value.isValid() && content) {
+ if (key == PKEY_Media_ClassPrimaryID /*QMediaMetaData::MediaType*/) {
+ QString v = value.toString();
+ if (v == QLatin1String("{D1607DBC-E323-4BE2-86A1-48A42A28441E}"))
+ value = QStringLiteral("Music");
+ else if (v == QLatin1String("{DB9830BD-3AB3-4FAB-8A37-1A995F7FF74B}"))
+ value = QStringLiteral("Video");
+ else if (v == QLatin1String("{01CD0F29-DA4E-4157-897B-6275D50C4F11}"))
+ value = QStringLiteral("Audio");
+ else if (v == QLatin1String("{FCF24A76-9A57-4036-990D-E35DD8B244E1}"))
+ value = QStringLiteral("Other");
+ } else if (key == PKEY_Media_Duration) {
+ // duration is provided in 100-nanosecond units, convert to milliseconds
+ value = (value.toLongLong() + 10000) / 10000;
+ } else if (key == PKEY_Video_Compression) {
+ value = int(QWindowsMultimediaUtils::codecForVideoFormat(value.toUuid()));
+ } else if (key == PKEY_Audio_Format) {
+ value = int(QWindowsMultimediaUtils::codecForAudioFormat(value.toUuid()));
+ } else if (key == PKEY_Video_FrameHeight /*Resolution*/) {
+ QSize res;
+ res.setHeight(value.toUInt());
+ if (content && SUCCEEDED(content->GetValue(PKEY_Video_FrameWidth, &var)))
+ res.setWidth(convertValue(var).toUInt());
+ value = res;
+ } else if (key == PKEY_Video_Orientation) {
+ uint orientation = 0;
+ if (content && SUCCEEDED(content->GetValue(PKEY_Video_Orientation, &var)))
+ orientation = convertValue(var).toUInt();
+ value = orientation;
+ } else if (key == PKEY_Video_FrameRate) {
+ value = value.toReal() / 1000.f;
+ }
+ }
+ }
+
+ PropVariantClear(&var);
+ return value;
+}
+
+QMediaMetaData MFMetaData::fromNative(IMFMediaSource* mediaSource)
+{
+ QMediaMetaData metaData;
+
+ IPropertyStore *content = nullptr;
+ if (!SUCCEEDED(MFGetService(mediaSource, MF_PROPERTY_HANDLER_SERVICE, IID_PPV_ARGS(&content))))
+ return metaData;
+
+ Q_ASSERT(content);
+ DWORD cProps;
+ if (SUCCEEDED(content->GetCount(&cProps))) {
+ for (DWORD i = 0; i < cProps; i++)
+ {
+ PROPERTYKEY key;
+ if (FAILED(content->GetAt(i, &key)))
+ continue;
+ QMediaMetaData::Key mediaKey;
+ if (key == PKEY_Author) {
+ mediaKey = QMediaMetaData::Author;
+ } else if (key == PKEY_Title) {
+ mediaKey = QMediaMetaData::Title;
+// } else if (key == PKEY_Media_SubTitle) {
+// mediaKey = QMediaMetaData::SubTitle;
+// } else if (key == PKEY_ParentalRating) {
+// mediaKey = QMediaMetaData::ParentalRating;
+ } else if (key == PKEY_Media_EncodingSettings) {
+ mediaKey = QMediaMetaData::Description;
+ } else if (key == PKEY_Copyright) {
+ mediaKey = QMediaMetaData::Copyright;
+ } else if (key == PKEY_Comment) {
+ mediaKey = QMediaMetaData::Comment;
+ } else if (key == PKEY_Media_ProviderStyle) {
+ mediaKey = QMediaMetaData::Genre;
+ } else if (key == PKEY_Media_DateEncoded) {
+ mediaKey = QMediaMetaData::Date;
+// } else if (key == PKEY_Rating) {
+// mediaKey = QMediaMetaData::UserRating;
+// } else if (key == PKEY_Keywords) {
+// mediaKey = QMediaMetaData::Keywords;
+ } else if (key == PKEY_Language) {
+ mediaKey = QMediaMetaData::Language;
+ } else if (key == PKEY_Media_Publisher) {
+ mediaKey = QMediaMetaData::Publisher;
+ } else if (key == PKEY_Media_ClassPrimaryID) {
+ mediaKey = QMediaMetaData::MediaType;
+ } else if (key == PKEY_Media_Duration) {
+ mediaKey = QMediaMetaData::Duration;
+ } else if (key == PKEY_Audio_EncodingBitrate) {
+ mediaKey = QMediaMetaData::AudioBitRate;
+ } else if (key == PKEY_Audio_Format) {
+ mediaKey = QMediaMetaData::AudioCodec;
+// } else if (key == PKEY_Media_AverageLevel) {
+// mediaKey = QMediaMetaData::AverageLevel;
+// } else if (key == PKEY_Audio_ChannelCount) {
+// mediaKey = QMediaMetaData::ChannelCount;
+// } else if (key == PKEY_Audio_PeakValue) {
+// mediaKey = QMediaMetaData::PeakValue;
+// } else if (key == PKEY_Audio_SampleRate) {
+// mediaKey = QMediaMetaData::SampleRate;
+ } else if (key == PKEY_Music_AlbumTitle) {
+ mediaKey = QMediaMetaData::AlbumTitle;
+ } else if (key == PKEY_Music_AlbumArtist) {
+ mediaKey = QMediaMetaData::AlbumArtist;
+ } else if (key == PKEY_Music_Artist) {
+ mediaKey = QMediaMetaData::ContributingArtist;
+ } else if (key == PKEY_Music_Composer) {
+ mediaKey = QMediaMetaData::Composer;
+// } else if (key == PKEY_Music_Conductor) {
+// mediaKey = QMediaMetaData::Conductor;
+// } else if (key == PKEY_Music_Lyrics) {
+// mediaKey = QMediaMetaData::Lyrics;
+// } else if (key == PKEY_Music_Mood) {
+// mediaKey = QMediaMetaData::Mood;
+ } else if (key == PKEY_Music_TrackNumber) {
+ mediaKey = QMediaMetaData::TrackNumber;
+ } else if (key == PKEY_Music_Genre) {
+ mediaKey = QMediaMetaData::Genre;
+ } else if (key == PKEY_ThumbnailStream) {
+ mediaKey = QMediaMetaData::ThumbnailImage;
+ } else if (key == PKEY_Video_FrameHeight) {
+ mediaKey = QMediaMetaData::Resolution;
+ } else if (key == PKEY_Video_Orientation) {
+ mediaKey = QMediaMetaData::Orientation;
+ } else if (key == PKEY_Video_FrameRate) {
+ mediaKey = QMediaMetaData::VideoFrameRate;
+ } else if (key == PKEY_Video_EncodingBitrate) {
+ mediaKey = QMediaMetaData::VideoBitRate;
+ } else if (key == PKEY_Video_Compression) {
+ mediaKey = QMediaMetaData::VideoCodec;
+// } else if (key == PKEY_Video_Director) {
+// mediaKey = QMediaMetaData::Director;
+// } else if (key == PKEY_Media_Writer) {
+// mediaKey = QMediaMetaData::Writer;
+ } else {
+ continue;
+ }
+ metaData.insert(mediaKey, metaDataValue(content, key));
+ }
+ }
+
+ content->Release();
+
+ return metaData;
+}
+
+static REFPROPERTYKEY propertyKeyForMetaDataKey(QMediaMetaData::Key key)
+{
+ switch (key) {
+ case QMediaMetaData::Key::Title:
+ return PKEY_Title;
+ case QMediaMetaData::Key::Author:
+ return PKEY_Author;
+ case QMediaMetaData::Key::Comment:
+ return PKEY_Comment;
+ case QMediaMetaData::Key::Genre:
+ return PKEY_Music_Genre;
+ case QMediaMetaData::Key::Copyright:
+ return PKEY_Copyright;
+ case QMediaMetaData::Key::Publisher:
+ return PKEY_Media_Publisher;
+ case QMediaMetaData::Key::Url:
+ return PKEY_Media_AuthorUrl;
+ case QMediaMetaData::Key::AlbumTitle:
+ return PKEY_Music_AlbumTitle;
+ case QMediaMetaData::Key::AlbumArtist:
+ return PKEY_Music_AlbumArtist;
+ case QMediaMetaData::Key::TrackNumber:
+ return PKEY_Music_TrackNumber;
+ case QMediaMetaData::Key::Date:
+ return PKEY_Media_DateEncoded;
+ case QMediaMetaData::Key::Composer:
+ return PKEY_Music_Composer;
+ case QMediaMetaData::Key::Duration:
+ return PKEY_Media_Duration;
+ case QMediaMetaData::Key::Language:
+ return PKEY_Language;
+ case QMediaMetaData::Key::Description:
+ return PKEY_Media_EncodingSettings;
+ case QMediaMetaData::Key::AudioBitRate:
+ return PKEY_Audio_EncodingBitrate;
+ case QMediaMetaData::Key::ContributingArtist:
+ return PKEY_Music_Artist;
+ case QMediaMetaData::Key::ThumbnailImage:
+ return PKEY_ThumbnailStream;
+ case QMediaMetaData::Key::Orientation:
+ return PKEY_Video_Orientation;
+ case QMediaMetaData::Key::VideoFrameRate:
+ return PKEY_Video_FrameRate;
+ case QMediaMetaData::Key::VideoBitRate:
+ return PKEY_Video_EncodingBitrate;
+ case QMediaMetaData::MediaType:
+ return PKEY_Media_ClassPrimaryID;
+ default:
+ return PROP_KEY_NULL;
+ }
+}
+
+static void setStringProperty(IPropertyStore *content, REFPROPERTYKEY key, const QString &value)
+{
+ PROPVARIANT propValue = {};
+ if (SUCCEEDED(InitPropVariantFromString(reinterpret_cast<LPCWSTR>(value.utf16()), &propValue))) {
+ if (SUCCEEDED(PSCoerceToCanonicalValue(key, &propValue)))
+ content->SetValue(key, propValue);
+ PropVariantClear(&propValue);
+ }
+}
+
+static void setUInt32Property(IPropertyStore *content, REFPROPERTYKEY key, quint32 value)
+{
+ PROPVARIANT propValue = {};
+ if (SUCCEEDED(InitPropVariantFromUInt32(ULONG(value), &propValue))) {
+ if (SUCCEEDED(PSCoerceToCanonicalValue(key, &propValue)))
+ content->SetValue(key, propValue);
+ PropVariantClear(&propValue);
+ }
+}
+
+static void setUInt64Property(IPropertyStore *content, REFPROPERTYKEY key, quint64 value)
+{
+ PROPVARIANT propValue = {};
+ if (SUCCEEDED(InitPropVariantFromUInt64(ULONGLONG(value), &propValue))) {
+ if (SUCCEEDED(PSCoerceToCanonicalValue(key, &propValue)))
+ content->SetValue(key, propValue);
+ PropVariantClear(&propValue);
+ }
+}
+
+static void setFileTimeProperty(IPropertyStore *content, REFPROPERTYKEY key, const FILETIME *ft)
+{
+ PROPVARIANT propValue = {};
+ if (SUCCEEDED(InitPropVariantFromFileTime(ft, &propValue))) {
+ if (SUCCEEDED(PSCoerceToCanonicalValue(key, &propValue)))
+ content->SetValue(key, propValue);
+ PropVariantClear(&propValue);
+ }
+}
+
+void MFMetaData::toNative(const QMediaMetaData &metaData, IPropertyStore *content)
+{
+ if (content) {
+
+ for (const auto &key : metaData.keys()) {
+
+ QVariant value = metaData.value(key);
+
+ if (key == QMediaMetaData::Key::MediaType) {
+
+ QString strValue = metaData.stringValue(key);
+ QString v;
+
+ // Sets property to one of the MediaClassPrimaryID values defined by Microsoft:
+ // https://docs.microsoft.com/en-us/windows/win32/wmformat/wm-mediaprimaryid
+ if (strValue == QLatin1String("Music"))
+ v = QLatin1String("{D1607DBC-E323-4BE2-86A1-48A42A28441E}");
+ else if (strValue == QLatin1String("Video"))
+ v = QLatin1String("{DB9830BD-3AB3-4FAB-8A37-1A995F7FF74B}");
+ else if (strValue == QLatin1String("Audio"))
+ v = QLatin1String("{01CD0F29-DA4E-4157-897B-6275D50C4F11}");
+ else
+ v = QLatin1String("{FCF24A76-9A57-4036-990D-E35DD8B244E1}");
+
+ setStringProperty(content, PKEY_Media_ClassPrimaryID, v);
+
+ } else if (key == QMediaMetaData::Key::Duration) {
+
+ setUInt64Property(content, PKEY_Media_Duration, value.toULongLong() * 10000);
+
+ } else if (key == QMediaMetaData::Key::Resolution) {
+
+ QSize res = value.toSize();
+ setUInt32Property(content, PKEY_Video_FrameWidth, quint32(res.width()));
+ setUInt32Property(content, PKEY_Video_FrameHeight, quint32(res.height()));
+
+ } else if (key == QMediaMetaData::Key::Orientation) {
+
+ setUInt32Property(content, PKEY_Video_Orientation, value.toUInt());
+
+ } else if (key == QMediaMetaData::Key::VideoFrameRate) {
+
+ qreal fps = value.toReal();
+ setUInt32Property(content, PKEY_Video_FrameRate, quint32(fps * 1000));
+
+ } else if (key == QMediaMetaData::Key::TrackNumber) {
+
+ setUInt32Property(content, PKEY_Music_TrackNumber, value.toUInt());
+
+ } else if (key == QMediaMetaData::Key::AudioBitRate) {
+
+ setUInt32Property(content, PKEY_Audio_EncodingBitrate, value.toUInt());
+
+ } else if (key == QMediaMetaData::Key::VideoBitRate) {
+
+ setUInt32Property(content, PKEY_Video_EncodingBitrate, value.toUInt());
+
+ } else if (key == QMediaMetaData::Key::Date) {
+
+ // Convert QDateTime to FILETIME by converting to 100-nsecs since
+ // 01/01/1970 UTC and adding the difference from 1601 to 1970.
+ ULARGE_INTEGER t = {};
+ t.QuadPart = ULONGLONG(value.toDateTime().toUTC().toMSecsSinceEpoch() * 10000
+ + 116444736000000000LL);
+
+ FILETIME ft = {};
+ ft.dwHighDateTime = t.HighPart;
+ ft.dwLowDateTime = t.LowPart;
+
+ setFileTimeProperty(content, PKEY_Media_DateEncoded, &ft);
+
+ } else {
+
+ // By default use as string and let PSCoerceToCanonicalValue()
+ // do validation and type conversion.
+ REFPROPERTYKEY propKey = propertyKeyForMetaDataKey(key);
+
+ if (propKey != PROP_KEY_NULL) {
+ QString strValue = metaData.stringValue(key);
+ if (!strValue.isEmpty())
+ setStringProperty(content, propKey, strValue);
+ }
+ }
+ }
+ }
+}
+
diff --git a/src/plugins/multimedia/windows/common/mfmetadata_p.h b/src/plugins/multimedia/windows/common/mfmetadata_p.h
new file mode 100644
index 000000000..9ff196240
--- /dev/null
+++ b/src/plugins/multimedia/windows/common/mfmetadata_p.h
@@ -0,0 +1,30 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef MFMETADATACONTROL_H
+#define MFMETADATACONTROL_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <qmediametadata.h>
+#include "mfidl.h"
+
+QT_USE_NAMESPACE
+
+class MFMetaData
+{
+public:
+ static QMediaMetaData fromNative(IMFMediaSource* mediaSource);
+ static void toNative(const QMediaMetaData &metaData, IPropertyStore *content);
+};
+
+#endif
diff --git a/src/plugins/multimedia/windows/decoder/mfaudiodecodercontrol.cpp b/src/plugins/multimedia/windows/decoder/mfaudiodecodercontrol.cpp
new file mode 100644
index 000000000..912ab5e94
--- /dev/null
+++ b/src/plugins/multimedia/windows/decoder/mfaudiodecodercontrol.cpp
@@ -0,0 +1,225 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include <system_error>
+#include <mferror.h>
+#include <qglobal.h>
+#include "wmcodecdsp.h"
+#include "mfaudiodecodercontrol_p.h"
+#include <private/qwindowsaudioutils_p.h>
+
+QT_BEGIN_NAMESPACE
+
+MFAudioDecoderControl::MFAudioDecoderControl(QAudioDecoder *parent)
+ : QPlatformAudioDecoder(parent)
+ , m_sourceResolver(new SourceResolver)
+{
+ connect(m_sourceResolver, &SourceResolver::mediaSourceReady, this, &MFAudioDecoderControl::handleMediaSourceReady);
+ connect(m_sourceResolver, &SourceResolver::error, this, &MFAudioDecoderControl::handleMediaSourceError);
+}
+
+MFAudioDecoderControl::~MFAudioDecoderControl()
+{
+ m_sourceResolver->shutdown();
+ m_sourceResolver->Release();
+}
+
+void MFAudioDecoderControl::setSource(const QUrl &fileName)
+{
+ if (!m_device && m_source == fileName)
+ return;
+ stop();
+ m_sourceResolver->cancel();
+ m_sourceResolver->shutdown();
+ m_device = nullptr;
+ m_source = fileName;
+ sourceChanged();
+
+ if (!m_source.isEmpty()) {
+ m_sourceResolver->load(m_source, 0);
+ m_loadingSource = true;
+ }
+}
+
+void MFAudioDecoderControl::setSourceDevice(QIODevice *device)
+{
+ if (m_device == device && m_source.isEmpty())
+ return;
+ stop();
+ m_sourceResolver->cancel();
+ m_sourceResolver->shutdown();
+ m_source.clear();
+ m_device = device;
+ sourceChanged();
+
+ if (m_device) {
+ if (m_device->isOpen() && m_device->isReadable()) {
+ m_sourceResolver->load(QUrl(), m_device);
+ m_loadingSource = true;
+ }
+ }
+}
+
+void MFAudioDecoderControl::handleMediaSourceReady()
+{
+ m_loadingSource = false;
+ if (m_deferredStart) {
+ m_deferredStart = false;
+ startReadingSource(m_sourceResolver->mediaSource());
+ }
+}
+
+void MFAudioDecoderControl::handleMediaSourceError(long hr)
+{
+ m_loadingSource = false;
+ m_deferredStart = false;
+ if (hr == MF_E_UNSUPPORTED_BYTESTREAM_TYPE) {
+ error(QAudioDecoder::FormatError, tr("Unsupported media type"));
+ } else if (hr == ERROR_FILE_NOT_FOUND) {
+ error(QAudioDecoder::ResourceError, tr("Media not found"));
+ } else {
+ error(QAudioDecoder::ResourceError, tr("Unable to load specified URL")
+ + QString::fromStdString(std::system_category().message(hr)));
+ }
+}
+
+void MFAudioDecoderControl::startReadingSource(IMFMediaSource *source)
+{
+ Q_ASSERT(source);
+
+ m_decoderSourceReader = makeComObject<MFDecoderSourceReader>();
+ if (!m_decoderSourceReader) {
+ error(QAudioDecoder::ResourceError, tr("Could not instantiate MFDecoderSourceReader"));
+ return;
+ }
+
+ auto mediaType = m_decoderSourceReader->setSource(source, m_outputFormat.sampleFormat());
+ QAudioFormat mediaFormat = QWindowsAudioUtils::mediaTypeToFormat(mediaType.Get());
+ if (!mediaFormat.isValid()) {
+ error(QAudioDecoder::FormatError, tr("Invalid media format"));
+ m_decoderSourceReader.Reset();
+ return;
+ }
+
+ ComPtr<IMFPresentationDescriptor> pd;
+ if (SUCCEEDED(source->CreatePresentationDescriptor(pd.GetAddressOf()))) {
+ UINT64 duration = 0;
+ pd->GetUINT64(MF_PD_DURATION, &duration);
+ duration /= 10000;
+ m_duration = qint64(duration);
+ durationChanged(m_duration);
+ }
+
+ if (!m_resampler.setup(mediaFormat, m_outputFormat.isValid() ? m_outputFormat : mediaFormat)) {
+ qWarning() << "Failed to set up resampler";
+ return;
+ }
+
+ connect(m_decoderSourceReader.Get(), &MFDecoderSourceReader::finished, this, &MFAudioDecoderControl::handleSourceFinished);
+ connect(m_decoderSourceReader.Get(), &MFDecoderSourceReader::newSample, this, &MFAudioDecoderControl::handleNewSample);
+
+ setIsDecoding(true);
+
+ m_decoderSourceReader->readNextSample();
+}
+
+void MFAudioDecoderControl::start()
+{
+ if (isDecoding())
+ return;
+
+ if (m_loadingSource) {
+ m_deferredStart = true;
+ } else {
+ IMFMediaSource *source = m_sourceResolver->mediaSource();
+ if (!source) {
+ if (m_device)
+ error(QAudioDecoder::ResourceError, tr("Unable to read from specified device"));
+ else if (m_source.isValid())
+ error(QAudioDecoder::ResourceError, tr("Unable to load specified URL"));
+ else
+ error(QAudioDecoder::ResourceError, tr("No media source specified"));
+ return;
+ } else {
+ startReadingSource(source);
+ }
+ }
+}
+
+void MFAudioDecoderControl::stop()
+{
+ m_deferredStart = false;
+ if (!isDecoding())
+ return;
+
+ disconnect(m_decoderSourceReader.Get());
+ m_decoderSourceReader->clearSource();
+ m_decoderSourceReader.Reset();
+
+ if (bufferAvailable()) {
+ QAudioBuffer buffer;
+ m_audioBuffer.swap(buffer);
+ bufferAvailableChanged(false);
+ }
+ setIsDecoding(false);
+
+ if (m_position != -1) {
+ m_position = -1;
+ positionChanged(m_position);
+ }
+ if (m_duration != -1) {
+ m_duration = -1;
+ durationChanged(m_duration);
+ }
+}
+
+void MFAudioDecoderControl::handleNewSample(ComPtr<IMFSample> sample)
+{
+ Q_ASSERT(sample);
+
+ qint64 sampleStartTimeUs = m_resampler.outputFormat().durationForBytes(m_resampler.totalOutputBytes());
+ QByteArray out = m_resampler.resample(sample.Get());
+
+ if (out.isEmpty()) {
+ error(QAudioDecoder::Error::ResourceError, tr("Failed processing a sample"));
+
+ } else {
+ m_audioBuffer = QAudioBuffer(out, m_resampler.outputFormat(), sampleStartTimeUs);
+
+ bufferAvailableChanged(true);
+ bufferReady();
+ }
+}
+
+void MFAudioDecoderControl::handleSourceFinished()
+{
+ stop();
+ finished();
+}
+
+void MFAudioDecoderControl::setAudioFormat(const QAudioFormat &format)
+{
+ if (m_outputFormat == format)
+ return;
+ m_outputFormat = format;
+ formatChanged(m_outputFormat);
+}
+
+QAudioBuffer MFAudioDecoderControl::read()
+{
+ QAudioBuffer buffer;
+
+ if (bufferAvailable()) {
+ buffer.swap(m_audioBuffer);
+ m_position = buffer.startTime() / 1000;
+ positionChanged(m_position);
+ bufferAvailableChanged(false);
+ m_decoderSourceReader->readNextSample();
+ }
+
+ return buffer;
+}
+
+QT_END_NAMESPACE
+
+#include "moc_mfaudiodecodercontrol_p.cpp"
diff --git a/src/plugins/multimedia/windows/decoder/mfaudiodecodercontrol_p.h b/src/plugins/multimedia/windows/decoder/mfaudiodecodercontrol_p.h
new file mode 100644
index 000000000..9bb2371ec
--- /dev/null
+++ b/src/plugins/multimedia/windows/decoder/mfaudiodecodercontrol_p.h
@@ -0,0 +1,75 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef MFAUDIODECODERCONTROL_H
+#define MFAUDIODECODERCONTROL_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include "mfdecodersourcereader_p.h"
+#include <private/qplatformaudiodecoder_p.h>
+#include <sourceresolver_p.h>
+#include <private/qcomptr_p.h>
+#include <private/qwindowsresampler_p.h>
+
+QT_BEGIN_NAMESPACE
+
+class MFAudioDecoderControl : public QPlatformAudioDecoder
+{
+ Q_OBJECT
+public:
+ MFAudioDecoderControl(QAudioDecoder *parent);
+ ~MFAudioDecoderControl() override;
+
+ QUrl source() const override { return m_source; }
+ void setSource(const QUrl &fileName) override;
+
+ QIODevice* sourceDevice() const override { return m_device; }
+ void setSourceDevice(QIODevice *device) override;
+
+ void start() override;
+ void stop() override;
+
+ QAudioFormat audioFormat() const override { return m_outputFormat; }
+ void setAudioFormat(const QAudioFormat &format) override;
+
+ QAudioBuffer read() override;
+ bool bufferAvailable() const override { return m_audioBuffer.sampleCount() > 0; }
+
+ qint64 position() const override { return m_position; }
+ qint64 duration() const override { return m_duration; }
+
+private Q_SLOTS:
+ void handleMediaSourceReady();
+ void handleMediaSourceError(long hr);
+ void handleNewSample(ComPtr<IMFSample>);
+ void handleSourceFinished();
+
+private:
+ void startReadingSource(IMFMediaSource *source);
+
+ ComPtr<MFDecoderSourceReader> m_decoderSourceReader;
+ SourceResolver *m_sourceResolver;
+ QWindowsResampler m_resampler;
+ QUrl m_source;
+ QIODevice *m_device = nullptr;
+ QAudioFormat m_outputFormat;
+ QAudioBuffer m_audioBuffer;
+ qint64 m_duration = -1;
+ qint64 m_position = -1;
+ bool m_loadingSource = false;
+ bool m_deferredStart = false;
+};
+
+QT_END_NAMESPACE
+
+#endif//MFAUDIODECODERCONTROL_H
diff --git a/src/plugins/multimedia/windows/decoder/mfdecodersourcereader.cpp b/src/plugins/multimedia/windows/decoder/mfdecodersourcereader.cpp
new file mode 100644
index 000000000..097f83437
--- /dev/null
+++ b/src/plugins/multimedia/windows/decoder/mfdecodersourcereader.cpp
@@ -0,0 +1,103 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include <system_error>
+#include <mferror.h>
+#include <qlogging.h>
+#include <qdebug.h>
+#include "mfdecodersourcereader_p.h"
+
+QT_BEGIN_NAMESPACE
+
+ComPtr<IMFMediaType> MFDecoderSourceReader::setSource(IMFMediaSource *source, QAudioFormat::SampleFormat sampleFormat)
+{
+ ComPtr<IMFMediaType> mediaType;
+ m_sourceReader.Reset();
+
+ if (!source)
+ return mediaType;
+
+ ComPtr<IMFAttributes> attr;
+ MFCreateAttributes(attr.GetAddressOf(), 1);
+ if (FAILED(attr->SetUnknown(MF_SOURCE_READER_ASYNC_CALLBACK, this)))
+ return mediaType;
+ if (FAILED(attr->SetUINT32(MF_SOURCE_READER_DISCONNECT_MEDIASOURCE_ON_SHUTDOWN, TRUE)))
+ return mediaType;
+
+ HRESULT hr = MFCreateSourceReaderFromMediaSource(source, attr.Get(), m_sourceReader.GetAddressOf());
+ if (FAILED(hr)) {
+ qWarning() << "MFDecoderSourceReader: failed to set up source reader: "
+ << std::system_category().message(hr).c_str();
+ return mediaType;
+ }
+
+ m_sourceReader->SetStreamSelection(DWORD(MF_SOURCE_READER_ALL_STREAMS), FALSE);
+ m_sourceReader->SetStreamSelection(DWORD(MF_SOURCE_READER_FIRST_AUDIO_STREAM), TRUE);
+
+ ComPtr<IMFMediaType> pPartialType;
+ MFCreateMediaType(pPartialType.GetAddressOf());
+ pPartialType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Audio);
+ pPartialType->SetGUID(MF_MT_SUBTYPE, sampleFormat == QAudioFormat::Float ? MFAudioFormat_Float : MFAudioFormat_PCM);
+ m_sourceReader->SetCurrentMediaType(DWORD(MF_SOURCE_READER_FIRST_AUDIO_STREAM), nullptr, pPartialType.Get());
+ m_sourceReader->GetCurrentMediaType(DWORD(MF_SOURCE_READER_FIRST_AUDIO_STREAM), mediaType.GetAddressOf());
+ // Ensure the stream is selected.
+ m_sourceReader->SetStreamSelection(DWORD(MF_SOURCE_READER_FIRST_AUDIO_STREAM), TRUE);
+
+ return mediaType;
+}
+
+void MFDecoderSourceReader::readNextSample()
+{
+ if (m_sourceReader)
+ m_sourceReader->ReadSample(MF_SOURCE_READER_FIRST_AUDIO_STREAM, 0, NULL, NULL, NULL, NULL);
+}
+
+//from IUnknown
+STDMETHODIMP MFDecoderSourceReader::QueryInterface(REFIID riid, LPVOID *ppvObject)
+{
+ if (!ppvObject)
+ return E_POINTER;
+ if (riid == IID_IMFSourceReaderCallback) {
+ *ppvObject = static_cast<IMFSourceReaderCallback*>(this);
+ } else if (riid == IID_IUnknown) {
+ *ppvObject = static_cast<IUnknown*>(this);
+ } else {
+ *ppvObject = NULL;
+ return E_NOINTERFACE;
+ }
+ AddRef();
+ return S_OK;
+}
+
+STDMETHODIMP_(ULONG) MFDecoderSourceReader::AddRef()
+{
+ return InterlockedIncrement(&m_cRef);
+}
+
+STDMETHODIMP_(ULONG) MFDecoderSourceReader::Release()
+{
+ LONG cRef = InterlockedDecrement(&m_cRef);
+ if (cRef == 0) {
+ this->deleteLater();
+ }
+ return cRef;
+}
+
+//from IMFSourceReaderCallback
+STDMETHODIMP MFDecoderSourceReader::OnReadSample(HRESULT hrStatus, DWORD dwStreamIndex,
+ DWORD dwStreamFlags, LONGLONG llTimestamp, IMFSample *pSample)
+{
+ Q_UNUSED(hrStatus);
+ Q_UNUSED(dwStreamIndex);
+ Q_UNUSED(llTimestamp);
+ if (pSample) {
+ emit newSample(ComPtr<IMFSample>{pSample});
+ } else if ((dwStreamFlags & MF_SOURCE_READERF_ENDOFSTREAM) == MF_SOURCE_READERF_ENDOFSTREAM) {
+ emit finished();
+ }
+ return S_OK;
+}
+
+QT_END_NAMESPACE
+
+#include "moc_mfdecodersourcereader_p.cpp"
diff --git a/src/plugins/multimedia/windows/decoder/mfdecodersourcereader_p.h b/src/plugins/multimedia/windows/decoder/mfdecodersourcereader_p.h
new file mode 100644
index 000000000..dee6f8bf5
--- /dev/null
+++ b/src/plugins/multimedia/windows/decoder/mfdecodersourcereader_p.h
@@ -0,0 +1,63 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef MFDECODERSOURCEREADER_H
+#define MFDECODERSOURCEREADER_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <mfapi.h>
+#include <mfidl.h>
+#include <mfreadwrite.h>
+
+#include <QtCore/qobject.h>
+#include "qaudioformat.h"
+#include <private/qcomptr_p.h>
+
+QT_BEGIN_NAMESPACE
+
+class MFDecoderSourceReader : public QObject, public IMFSourceReaderCallback
+{
+ Q_OBJECT
+public:
+ MFDecoderSourceReader() {}
+ ~MFDecoderSourceReader() override {}
+
+ void clearSource() { m_sourceReader.Reset(); }
+ ComPtr<IMFMediaType> setSource(IMFMediaSource *source, QAudioFormat::SampleFormat);
+
+ void readNextSample();
+
+ //from IUnknown
+ STDMETHODIMP QueryInterface(REFIID riid, LPVOID *ppvObject) override;
+ STDMETHODIMP_(ULONG) AddRef() override;
+ STDMETHODIMP_(ULONG) Release() override;
+
+ //from IMFSourceReaderCallback
+ STDMETHODIMP OnReadSample(HRESULT hrStatus, DWORD dwStreamIndex,
+ DWORD dwStreamFlags, LONGLONG llTimestamp, IMFSample *pSample) override;
+ STDMETHODIMP OnFlush(DWORD) override { return S_OK; }
+ STDMETHODIMP OnEvent(DWORD, IMFMediaEvent *) override { return S_OK; }
+
+Q_SIGNALS:
+ void newSample(ComPtr<IMFSample>);
+ void finished();
+
+private:
+ long m_cRef = 1;
+ ComPtr<IMFSourceReader> m_sourceReader;
+
+};
+
+QT_END_NAMESPACE
+
+#endif//MFDECODERSOURCEREADER_H
diff --git a/src/plugins/multimedia/windows/evr/evrcustompresenter.cpp b/src/plugins/multimedia/windows/evr/evrcustompresenter.cpp
new file mode 100644
index 000000000..2a3433f4d
--- /dev/null
+++ b/src/plugins/multimedia/windows/evr/evrcustompresenter.cpp
@@ -0,0 +1,1849 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "evrcustompresenter_p.h"
+
+#include "evrd3dpresentengine_p.h"
+#include "evrhelpers_p.h"
+#include <private/qwindowsmultimediautils_p.h>
+#include <private/qplatformvideosink_p.h>
+#include <private/qwindowsmfdefs_p.h>
+
+#include <rhi/qrhi.h>
+
+#include <QtCore/qmutex.h>
+#include <QtCore/qvarlengtharray.h>
+#include <QtCore/qrect.h>
+#include <qthread.h>
+#include <qcoreapplication.h>
+#include <qmath.h>
+#include <qloggingcategory.h>
+
+#include <mutex>
+
+#include <float.h>
+#include <evcode.h>
+
+QT_BEGIN_NAMESPACE
+
+static Q_LOGGING_CATEGORY(qLcEvrCustomPresenter, "qt.multimedia.evrcustompresenter")
+
+const static MFRatio g_DefaultFrameRate = { 30, 1 };
+static const DWORD SCHEDULER_TIMEOUT = 5000;
+static const MFTIME ONE_SECOND = 10000000;
+static const LONG ONE_MSEC = 1000;
+
+// Function declarations.
+static HRESULT setMixerSourceRect(IMFTransform *mixer, const MFVideoNormalizedRect& nrcSource);
+static QVideoFrameFormat::PixelFormat pixelFormatFromMediaType(IMFMediaType *type);
+
+static inline LONG MFTimeToMsec(const LONGLONG& time)
+{
+ return (LONG)(time / (ONE_SECOND / ONE_MSEC));
+}
+
+bool qt_evr_setCustomPresenter(IUnknown *evr, EVRCustomPresenter *presenter)
+{
+ if (!evr || !presenter)
+ return false;
+
+ HRESULT result = E_FAIL;
+
+ IMFVideoRenderer *renderer = NULL;
+ if (SUCCEEDED(evr->QueryInterface(IID_PPV_ARGS(&renderer)))) {
+ result = renderer->InitializeRenderer(NULL, presenter);
+ renderer->Release();
+ }
+
+ return result == S_OK;
+}
+
+class PresentSampleEvent : public QEvent
+{
+public:
+ explicit PresentSampleEvent(const ComPtr<IMFSample> &sample)
+ : QEvent(static_cast<Type>(EVRCustomPresenter::PresentSample)), m_sample(sample)
+ {
+ }
+
+ ComPtr<IMFSample> sample() const { return m_sample; }
+
+private:
+ const ComPtr<IMFSample> m_sample;
+};
+
+Scheduler::Scheduler(EVRCustomPresenter *presenter)
+ : m_presenter(presenter)
+ , m_threadID(0)
+ , m_playbackRate(1.0f)
+ , m_perFrame_1_4th(0)
+{
+}
+
+Scheduler::~Scheduler()
+{
+ m_scheduledSamples.clear();
+}
+
+void Scheduler::setFrameRate(const MFRatio& fps)
+{
+ UINT64 AvgTimePerFrame = 0;
+
+ // Convert to a duration.
+ MFFrameRateToAverageTimePerFrame(fps.Numerator, fps.Denominator, &AvgTimePerFrame);
+
+ // Calculate 1/4th of this value, because we use it frequently.
+ m_perFrame_1_4th = AvgTimePerFrame / 4;
+}
+
+HRESULT Scheduler::startScheduler(ComPtr<IMFClock> clock)
+{
+ if (m_schedulerThread)
+ return E_UNEXPECTED;
+
+ HRESULT hr = S_OK;
+ DWORD dwID = 0;
+ HANDLE hObjects[2];
+ DWORD dwWait = 0;
+
+ m_clock = clock;
+
+ // Set a high the timer resolution (ie, short timer period).
+ timeBeginPeriod(1);
+
+ // Create an event to wait for the thread to start.
+ m_threadReadyEvent = EventHandle{ CreateEvent(NULL, FALSE, FALSE, NULL) };
+ if (!m_threadReadyEvent) {
+ hr = HRESULT_FROM_WIN32(GetLastError());
+ goto done;
+ }
+
+ // Create an event to wait for flush commands to complete.
+ m_flushEvent = EventHandle{ CreateEvent(NULL, FALSE, FALSE, NULL) };
+ if (!m_flushEvent) {
+ hr = HRESULT_FROM_WIN32(GetLastError());
+ goto done;
+ }
+
+ // Create the scheduler thread.
+ m_schedulerThread = ThreadHandle{ CreateThread(NULL, 0, schedulerThreadProc, (LPVOID)this, 0, &dwID) };
+ if (!m_schedulerThread) {
+ hr = HRESULT_FROM_WIN32(GetLastError());
+ goto done;
+ }
+
+ // Wait for the thread to signal the "thread ready" event.
+ hObjects[0] = m_threadReadyEvent.get();
+ hObjects[1] = m_schedulerThread.get();
+ dwWait = WaitForMultipleObjects(2, hObjects, FALSE, INFINITE); // Wait for EITHER of these handles.
+ if (WAIT_OBJECT_0 != dwWait) {
+ // The thread terminated early for some reason. This is an error condition.
+ m_schedulerThread = {};
+
+ hr = E_UNEXPECTED;
+ goto done;
+ }
+
+ m_threadID = dwID;
+
+done:
+ // Regardless success/failure, we are done using the "thread ready" event.
+ m_threadReadyEvent = {};
+
+ return hr;
+}
+
+HRESULT Scheduler::stopScheduler()
+{
+ if (!m_schedulerThread)
+ return S_OK;
+
+ // Ask the scheduler thread to exit.
+ PostThreadMessage(m_threadID, Terminate, 0, 0);
+
+ // Wait for the thread to exit.
+ WaitForSingleObject(m_schedulerThread.get(), INFINITE);
+
+ // Close handles.
+ m_schedulerThread = {};
+ m_flushEvent = {};
+
+ // Discard samples.
+ m_mutex.lock();
+ m_scheduledSamples.clear();
+ m_mutex.unlock();
+
+ // Restore the timer resolution.
+ timeEndPeriod(1);
+
+ return S_OK;
+}
+
+HRESULT Scheduler::flush()
+{
+ if (m_schedulerThread) {
+ // Ask the scheduler thread to flush.
+ PostThreadMessage(m_threadID, Flush, 0 , 0);
+
+ // Wait for the scheduler thread to signal the flush event,
+ // OR for the thread to terminate.
+ HANDLE objects[] = { m_flushEvent.get(), m_schedulerThread.get() };
+
+ WaitForMultipleObjects(ARRAYSIZE(objects), objects, FALSE, SCHEDULER_TIMEOUT);
+ }
+
+ return S_OK;
+}
+
+bool Scheduler::areSamplesScheduled()
+{
+ QMutexLocker locker(&m_mutex);
+ return m_scheduledSamples.count() > 0;
+}
+
+HRESULT Scheduler::scheduleSample(const ComPtr<IMFSample> &sample, bool presentNow)
+{
+ if (!m_schedulerThread)
+ return MF_E_NOT_INITIALIZED;
+
+ HRESULT hr = S_OK;
+ DWORD dwExitCode = 0;
+
+ GetExitCodeThread(m_schedulerThread.get(), &dwExitCode);
+ if (dwExitCode != STILL_ACTIVE)
+ return E_FAIL;
+
+ if (presentNow || !m_clock) {
+ m_presenter->presentSample(sample);
+ } else {
+ if (m_playbackRate > 0.0f && qt_evr_isSampleTimePassed(m_clock.Get(), sample.Get())) {
+ qCDebug(qLcEvrCustomPresenter) << "Discard the sample, it came too late";
+ return hr;
+ }
+
+ // Queue the sample and ask the scheduler thread to wake up.
+ m_mutex.lock();
+ m_scheduledSamples.enqueue(sample);
+ m_mutex.unlock();
+
+ if (SUCCEEDED(hr))
+ PostThreadMessage(m_threadID, Schedule, 0, 0);
+ }
+
+ return hr;
+}
+
+HRESULT Scheduler::processSamplesInQueue(LONG *nextSleep)
+{
+ HRESULT hr = S_OK;
+ LONG wait = 0;
+
+ QQueue<ComPtr<IMFSample>> scheduledSamples;
+
+ m_mutex.lock();
+ m_scheduledSamples.swap(scheduledSamples);
+ m_mutex.unlock();
+
+ // Process samples until the queue is empty or until the wait time > 0.
+ while (!scheduledSamples.isEmpty()) {
+ ComPtr<IMFSample> sample = scheduledSamples.dequeue();
+
+ // Process the next sample in the queue. If the sample is not ready
+ // for presentation. the value returned in wait is > 0, which
+ // means the scheduler should sleep for that amount of time.
+ if (isSampleReadyToPresent(sample.Get(), &wait)) {
+ m_presenter->presentSample(sample.Get());
+ continue;
+ }
+
+ if (wait > 0) {
+ // return the sample to scheduler
+ scheduledSamples.prepend(sample);
+ break;
+ }
+ }
+
+ m_mutex.lock();
+ scheduledSamples.append(std::move(m_scheduledSamples));
+ m_scheduledSamples.swap(scheduledSamples);
+ m_mutex.unlock();
+
+ // If the wait time is zero, it means we stopped because the queue is
+ // empty (or an error occurred). Set the wait time to infinite; this will
+ // make the scheduler thread sleep until it gets another thread message.
+ if (wait == 0)
+ wait = INFINITE;
+
+ *nextSleep = wait;
+ return hr;
+}
+
+bool Scheduler::isSampleReadyToPresent(IMFSample *sample, LONG *pNextSleep) const
+{
+ *pNextSleep = 0;
+ if (!m_clock)
+ return true;
+
+ MFTIME hnsPresentationTime = 0;
+ MFTIME hnsTimeNow = 0;
+ MFTIME hnsSystemTime = 0;
+
+ // Get the sample's time stamp. It is valid for a sample to
+ // have no time stamp.
+ HRESULT hr = sample->GetSampleTime(&hnsPresentationTime);
+
+ // Get the clock time. (But if the sample does not have a time stamp,
+ // we don't need the clock time.)
+ if (SUCCEEDED(hr))
+ hr = m_clock->GetCorrelatedTime(0, &hnsTimeNow, &hnsSystemTime);
+
+ // Calculate the time until the sample's presentation time.
+ // A negative value means the sample is late.
+ MFTIME hnsDelta = hnsPresentationTime - hnsTimeNow;
+ if (m_playbackRate < 0) {
+ // For reverse playback, the clock runs backward. Therefore, the
+ // delta is reversed.
+ hnsDelta = - hnsDelta;
+ }
+
+ if (hnsDelta < - m_perFrame_1_4th) {
+ // This sample is late - skip.
+ return false;
+ } else if (hnsDelta > (3 * m_perFrame_1_4th)) {
+ // This sample came too early - reschedule
+ *pNextSleep = MFTimeToMsec(hnsDelta - (3 * m_perFrame_1_4th));
+
+ // Adjust the sleep time for the clock rate. (The presentation clock runs
+ // at m_fRate, but sleeping uses the system clock.)
+ if (m_playbackRate != 0)
+ *pNextSleep = (LONG)(*pNextSleep / qFabs(m_playbackRate));
+ return *pNextSleep == 0;
+ } else {
+ // This sample can be presented right now
+ return true;
+ }
+}
+
+DWORD WINAPI Scheduler::schedulerThreadProc(LPVOID parameter)
+{
+ Scheduler* scheduler = reinterpret_cast<Scheduler*>(parameter);
+ if (!scheduler)
+ return -1;
+ return scheduler->schedulerThreadProcPrivate();
+}
+
+DWORD Scheduler::schedulerThreadProcPrivate()
+{
+ HRESULT hr = S_OK;
+ MSG msg;
+ LONG wait = INFINITE;
+ bool exitThread = false;
+
+ // Force the system to create a message queue for this thread.
+ // (See MSDN documentation for PostThreadMessage.)
+ PeekMessage(&msg, NULL, WM_USER, WM_USER, PM_NOREMOVE);
+
+ // Signal to the scheduler that the thread is ready.
+ SetEvent(m_threadReadyEvent.get());
+
+ while (!exitThread) {
+ // Wait for a thread message OR until the wait time expires.
+ DWORD result = MsgWaitForMultipleObjects(0, NULL, FALSE, wait, QS_POSTMESSAGE);
+
+ if (result == WAIT_TIMEOUT) {
+ // If we timed out, then process the samples in the queue
+ hr = processSamplesInQueue(&wait);
+ if (FAILED(hr))
+ exitThread = true;
+ }
+
+ while (PeekMessage(&msg, NULL, 0, 0, PM_REMOVE)) {
+ bool processSamples = true;
+
+ switch (msg.message) {
+ case Terminate:
+ exitThread = true;
+ break;
+ case Flush:
+ // Flushing: Clear the sample queue and set the event.
+ m_mutex.lock();
+ m_scheduledSamples.clear();
+ m_mutex.unlock();
+ wait = INFINITE;
+ SetEvent(m_flushEvent.get());
+ break;
+ case Schedule:
+ // Process as many samples as we can.
+ if (processSamples) {
+ hr = processSamplesInQueue(&wait);
+ if (FAILED(hr))
+ exitThread = true;
+ processSamples = (wait != (LONG)INFINITE);
+ }
+ break;
+ }
+ }
+
+ }
+
+ return (SUCCEEDED(hr) ? 0 : 1);
+}
+
+
+SamplePool::SamplePool()
+ : m_initialized(false)
+{
+}
+
+SamplePool::~SamplePool()
+{
+ clear();
+}
+
+ComPtr<IMFSample> SamplePool::takeSample()
+{
+ QMutexLocker locker(&m_mutex);
+
+ Q_ASSERT(m_initialized);
+ if (!m_initialized) {
+ qCWarning(qLcEvrCustomPresenter) << "SamplePool is not initialized yet";
+ return nullptr;
+ }
+
+ if (m_videoSampleQueue.isEmpty()) {
+ qCDebug(qLcEvrCustomPresenter) << "SamplePool is empty";
+ return nullptr;
+ }
+
+ // Get a sample from the allocated queue.
+
+ // It doesn't matter if we pull them from the head or tail of the list,
+ // but when we get it back, we want to re-insert it onto the opposite end.
+ // (see returnSample)
+
+ return m_videoSampleQueue.takeFirst();
+}
+
+void SamplePool::returnSample(const ComPtr<IMFSample> &sample)
+{
+ QMutexLocker locker(&m_mutex);
+
+ Q_ASSERT(m_initialized);
+ if (!m_initialized) {
+ qCWarning(qLcEvrCustomPresenter) << "SamplePool is not initialized yet";
+ return;
+ }
+
+ m_videoSampleQueue.append(sample);
+}
+
+HRESULT SamplePool::initialize(QList<ComPtr<IMFSample>> &&samples)
+{
+ QMutexLocker locker(&m_mutex);
+
+ if (m_initialized)
+ return MF_E_INVALIDREQUEST;
+
+ // Move these samples into our allocated queue.
+ m_videoSampleQueue.append(std::move(samples));
+
+ m_initialized = true;
+
+ return S_OK;
+}
+
+HRESULT SamplePool::clear()
+{
+ QMutexLocker locker(&m_mutex);
+
+ m_videoSampleQueue.clear();
+ m_initialized = false;
+
+ return S_OK;
+}
+
+
+EVRCustomPresenter::EVRCustomPresenter(QVideoSink *sink)
+ : QObject()
+ , m_sampleFreeCB(this, &EVRCustomPresenter::onSampleFree)
+ , m_refCount(1)
+ , m_renderState(RenderShutdown)
+ , m_scheduler(this)
+ , m_tokenCounter(0)
+ , m_sampleNotify(false)
+ , m_prerolled(false)
+ , m_endStreaming(false)
+ , m_playbackRate(1.0f)
+ , m_presentEngine(new D3DPresentEngine(sink))
+ , m_mediaType(0)
+ , m_videoSink(0)
+ , m_canRenderToSurface(false)
+ , m_positionOffset(0)
+{
+ // Initial source rectangle = (0,0,1,1)
+ m_sourceRect.top = 0;
+ m_sourceRect.left = 0;
+ m_sourceRect.bottom = 1;
+ m_sourceRect.right = 1;
+
+ setSink(sink);
+}
+
+EVRCustomPresenter::~EVRCustomPresenter()
+{
+ m_scheduler.flush();
+ m_scheduler.stopScheduler();
+ m_samplePool.clear();
+
+ delete m_presentEngine;
+}
+
+HRESULT EVRCustomPresenter::QueryInterface(REFIID riid, void ** ppvObject)
+{
+ if (!ppvObject)
+ return E_POINTER;
+ if (riid == IID_IMFGetService) {
+ *ppvObject = static_cast<IMFGetService*>(this);
+ } else if (riid == IID_IMFTopologyServiceLookupClient) {
+ *ppvObject = static_cast<IMFTopologyServiceLookupClient*>(this);
+ } else if (riid == IID_IMFVideoDeviceID) {
+ *ppvObject = static_cast<IMFVideoDeviceID*>(this);
+ } else if (riid == IID_IMFVideoPresenter) {
+ *ppvObject = static_cast<IMFVideoPresenter*>(this);
+ } else if (riid == IID_IMFRateSupport) {
+ *ppvObject = static_cast<IMFRateSupport*>(this);
+ } else if (riid == IID_IUnknown) {
+ *ppvObject = static_cast<IUnknown*>(static_cast<IMFGetService*>(this));
+ } else if (riid == IID_IMFClockStateSink) {
+ *ppvObject = static_cast<IMFClockStateSink*>(this);
+ } else {
+ *ppvObject = NULL;
+ return E_NOINTERFACE;
+ }
+ AddRef();
+ return S_OK;
+}
+
+ULONG EVRCustomPresenter::AddRef()
+{
+ return InterlockedIncrement(&m_refCount);
+}
+
+ULONG EVRCustomPresenter::Release()
+{
+ ULONG uCount = InterlockedDecrement(&m_refCount);
+ if (uCount == 0)
+ deleteLater();
+ return uCount;
+}
+
+HRESULT EVRCustomPresenter::GetService(REFGUID guidService, REFIID riid, LPVOID *ppvObject)
+{
+ HRESULT hr = S_OK;
+
+ if (!ppvObject)
+ return E_POINTER;
+
+ // The only service GUID that we support is MR_VIDEO_RENDER_SERVICE.
+ if (guidService != MR_VIDEO_RENDER_SERVICE)
+ return MF_E_UNSUPPORTED_SERVICE;
+
+ // First try to get the service interface from the D3DPresentEngine object.
+ hr = m_presentEngine->getService(guidService, riid, ppvObject);
+ if (FAILED(hr))
+ // Next, check if this object supports the interface.
+ hr = QueryInterface(riid, ppvObject);
+
+ return hr;
+}
+
+HRESULT EVRCustomPresenter::GetDeviceID(IID* deviceID)
+{
+ if (!deviceID)
+ return E_POINTER;
+
+ *deviceID = IID_IDirect3DDevice9;
+
+ return S_OK;
+}
+
+HRESULT EVRCustomPresenter::InitServicePointers(IMFTopologyServiceLookup *lookup)
+{
+ if (!lookup)
+ return E_POINTER;
+
+ HRESULT hr = S_OK;
+ DWORD objectCount = 0;
+
+ const std::lock_guard<QRecursiveMutex> locker(m_mutex);
+
+ // Do not allow initializing when playing or paused.
+ if (isActive())
+ return MF_E_INVALIDREQUEST;
+
+ m_clock.Reset();
+ m_mixer.Reset();
+ m_mediaEventSink.Reset();
+
+ // Ask for the clock. Optional, because the EVR might not have a clock.
+ objectCount = 1;
+
+ lookup->LookupService(MF_SERVICE_LOOKUP_GLOBAL, 0,
+ MR_VIDEO_RENDER_SERVICE, IID_PPV_ARGS(&m_clock),
+ &objectCount
+ );
+
+ // Ask for the mixer. (Required.)
+ objectCount = 1;
+
+ hr = lookup->LookupService(MF_SERVICE_LOOKUP_GLOBAL, 0,
+ MR_VIDEO_MIXER_SERVICE, IID_PPV_ARGS(&m_mixer),
+ &objectCount
+ );
+
+ if (FAILED(hr))
+ return hr;
+
+ // Make sure that we can work with this mixer.
+ hr = configureMixer(m_mixer.Get());
+ if (FAILED(hr))
+ return hr;
+
+ // Ask for the EVR's event-sink interface. (Required.)
+ objectCount = 1;
+
+ hr = lookup->LookupService(MF_SERVICE_LOOKUP_GLOBAL, 0,
+ MR_VIDEO_RENDER_SERVICE, IID_PPV_ARGS(&m_mediaEventSink),
+ &objectCount
+ );
+
+ if (SUCCEEDED(hr))
+ m_renderState = RenderStopped;
+
+ return hr;
+}
+
+HRESULT EVRCustomPresenter::ReleaseServicePointers()
+{
+ // Enter the shut-down state.
+ m_mutex.lock();
+
+ m_renderState = RenderShutdown;
+
+ m_mutex.unlock();
+
+ // Flush any samples that were scheduled.
+ flush();
+
+ // Clear the media type and release related resources.
+ setMediaType(NULL);
+
+ // Release all services that were acquired from InitServicePointers.
+ m_clock.Reset();
+ m_mixer.Reset();
+ m_mediaEventSink.Reset();
+
+ return S_OK;
+}
+
+bool EVRCustomPresenter::isValid() const
+{
+ return m_presentEngine->isValid() && m_canRenderToSurface;
+}
+
+HRESULT EVRCustomPresenter::ProcessMessage(MFVP_MESSAGE_TYPE message, ULONG_PTR param)
+{
+ HRESULT hr = S_OK;
+
+ const std::lock_guard<QRecursiveMutex> locker(m_mutex);
+
+ hr = checkShutdown();
+ if (FAILED(hr))
+ return hr;
+
+ switch (message) {
+ // Flush all pending samples.
+ case MFVP_MESSAGE_FLUSH:
+ hr = flush();
+ break;
+
+ // Renegotiate the media type with the mixer.
+ case MFVP_MESSAGE_INVALIDATEMEDIATYPE:
+ hr = renegotiateMediaType();
+ break;
+
+ // The mixer received a new input sample.
+ case MFVP_MESSAGE_PROCESSINPUTNOTIFY:
+ hr = processInputNotify();
+ break;
+
+ // Streaming is about to start.
+ case MFVP_MESSAGE_BEGINSTREAMING:
+ hr = beginStreaming();
+ break;
+
+ // Streaming has ended. (The EVR has stopped.)
+ case MFVP_MESSAGE_ENDSTREAMING:
+ hr = endStreaming();
+ break;
+
+ // All input streams have ended.
+ case MFVP_MESSAGE_ENDOFSTREAM:
+ // Set the EOS flag.
+ m_endStreaming = true;
+ // Check if it's time to send the EC_COMPLETE event to the EVR.
+ hr = checkEndOfStream();
+ break;
+
+ // Frame-stepping is starting.
+ case MFVP_MESSAGE_STEP:
+ hr = prepareFrameStep(DWORD(param));
+ break;
+
+ // Cancels frame-stepping.
+ case MFVP_MESSAGE_CANCELSTEP:
+ hr = cancelFrameStep();
+ break;
+
+ default:
+ hr = E_INVALIDARG; // Unknown message. This case should never occur.
+ break;
+ }
+
+ return hr;
+}
+
+HRESULT EVRCustomPresenter::GetCurrentMediaType(IMFVideoMediaType **mediaType)
+{
+ HRESULT hr = S_OK;
+
+ if (!mediaType)
+ return E_POINTER;
+
+ *mediaType = NULL;
+
+ const std::lock_guard<QRecursiveMutex> locker(m_mutex);
+
+ hr = checkShutdown();
+ if (FAILED(hr))
+ return hr;
+
+ if (!m_mediaType)
+ return MF_E_NOT_INITIALIZED;
+
+ return m_mediaType->QueryInterface(IID_PPV_ARGS(mediaType));
+}
+
+HRESULT EVRCustomPresenter::OnClockStart(MFTIME, LONGLONG clockStartOffset)
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_mutex);
+
+ // We cannot start after shutdown.
+ HRESULT hr = checkShutdown();
+ if (FAILED(hr))
+ return hr;
+
+ // Check if the clock is already active (not stopped).
+ if (isActive()) {
+ m_renderState = RenderStarted;
+
+ // If the clock position changes while the clock is active, it
+ // is a seek request. We need to flush all pending samples.
+ if (clockStartOffset != QMM_PRESENTATION_CURRENT_POSITION)
+ flush();
+ } else {
+ m_renderState = RenderStarted;
+
+ // The clock has started from the stopped state.
+
+ // Possibly we are in the middle of frame-stepping OR have samples waiting
+ // in the frame-step queue. Deal with these two cases first:
+ hr = startFrameStep();
+ if (FAILED(hr))
+ return hr;
+ }
+
+ // Now try to get new output samples from the mixer.
+ processOutputLoop();
+
+ return hr;
+}
+
+HRESULT EVRCustomPresenter::OnClockRestart(MFTIME)
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_mutex);
+
+ HRESULT hr = checkShutdown();
+ if (FAILED(hr))
+ return hr;
+
+ // The EVR calls OnClockRestart only while paused.
+
+ m_renderState = RenderStarted;
+
+ // Possibly we are in the middle of frame-stepping OR we have samples waiting
+ // in the frame-step queue. Deal with these two cases first:
+ hr = startFrameStep();
+ if (FAILED(hr))
+ return hr;
+
+ // Now resume the presentation loop.
+ processOutputLoop();
+
+ return hr;
+}
+
+HRESULT EVRCustomPresenter::OnClockStop(MFTIME)
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_mutex);
+
+ HRESULT hr = checkShutdown();
+ if (FAILED(hr))
+ return hr;
+
+ if (m_renderState != RenderStopped) {
+ m_renderState = RenderStopped;
+ flush();
+
+ // If we are in the middle of frame-stepping, cancel it now.
+ if (m_frameStep.state != FrameStepNone)
+ cancelFrameStep();
+ }
+
+ return S_OK;
+}
+
+HRESULT EVRCustomPresenter::OnClockPause(MFTIME)
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_mutex);
+
+ // We cannot pause the clock after shutdown.
+ HRESULT hr = checkShutdown();
+
+ if (SUCCEEDED(hr))
+ m_renderState = RenderPaused;
+
+ return hr;
+}
+
+HRESULT EVRCustomPresenter::OnClockSetRate(MFTIME, float rate)
+{
+ // Note:
+ // The presenter reports its maximum rate through the IMFRateSupport interface.
+ // Here, we assume that the EVR honors the maximum rate.
+
+ const std::lock_guard<QRecursiveMutex> locker(m_mutex);
+
+ HRESULT hr = checkShutdown();
+ if (FAILED(hr))
+ return hr;
+
+ // If the rate is changing from zero (scrubbing) to non-zero, cancel the
+ // frame-step operation.
+ if ((m_playbackRate == 0.0f) && (rate != 0.0f)) {
+ cancelFrameStep();
+ m_frameStep.samples.clear();
+ }
+
+ m_playbackRate = rate;
+
+ // Tell the scheduler about the new rate.
+ m_scheduler.setClockRate(rate);
+
+ return S_OK;
+}
+
+HRESULT EVRCustomPresenter::GetSlowestRate(MFRATE_DIRECTION, BOOL, float *rate)
+{
+ if (!rate)
+ return E_POINTER;
+
+ const std::lock_guard<QRecursiveMutex> locker(m_mutex);
+
+ HRESULT hr = checkShutdown();
+
+ if (SUCCEEDED(hr)) {
+ // There is no minimum playback rate, so the minimum is zero.
+ *rate = 0;
+ }
+
+ return S_OK;
+}
+
+HRESULT EVRCustomPresenter::GetFastestRate(MFRATE_DIRECTION direction, BOOL thin, float *rate)
+{
+ if (!rate)
+ return E_POINTER;
+
+ const std::lock_guard<QRecursiveMutex> locker(m_mutex);
+
+ float maxRate = 0.0f;
+
+ HRESULT hr = checkShutdown();
+ if (FAILED(hr))
+ return hr;
+
+ // Get the maximum *forward* rate.
+ maxRate = getMaxRate(thin);
+
+ // For reverse playback, it's the negative of maxRate.
+ if (direction == MFRATE_REVERSE)
+ maxRate = -maxRate;
+
+ *rate = maxRate;
+
+ return S_OK;
+}
+
+HRESULT EVRCustomPresenter::IsRateSupported(BOOL thin, float rate, float *nearestSupportedRate)
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_mutex);
+
+ float maxRate = 0.0f;
+ float nearestRate = rate; // If we support rate, that is the nearest.
+
+ HRESULT hr = checkShutdown();
+ if (FAILED(hr))
+ return hr;
+
+ // Find the maximum forward rate.
+ // Note: We have no minimum rate (that is, we support anything down to 0).
+ maxRate = getMaxRate(thin);
+
+ if (qFabs(rate) > maxRate) {
+ // The (absolute) requested rate exceeds the maximum rate.
+ hr = MF_E_UNSUPPORTED_RATE;
+
+ // The nearest supported rate is maxRate.
+ nearestRate = maxRate;
+ if (rate < 0) {
+ // Negative for reverse playback.
+ nearestRate = -nearestRate;
+ }
+ }
+
+ // Return the nearest supported rate.
+ if (nearestSupportedRate)
+ *nearestSupportedRate = nearestRate;
+
+ return hr;
+}
+
+void EVRCustomPresenter::supportedFormatsChanged()
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_mutex);
+
+ m_canRenderToSurface = false;
+
+ // check if we can render to the surface (compatible formats)
+ if (m_videoSink) {
+ for (int f = 0; f < QVideoFrameFormat::NPixelFormats; ++f) {
+ // ### set a better preference order
+ QVideoFrameFormat::PixelFormat format = QVideoFrameFormat::PixelFormat(f);
+ if (SUCCEEDED(m_presentEngine->checkFormat(qt_evr_D3DFormatFromPixelFormat(format)))) {
+ m_canRenderToSurface = true;
+ break;
+ }
+ }
+ }
+
+ // TODO: if media type already set, renegotiate?
+}
+
+void EVRCustomPresenter::setSink(QVideoSink *sink)
+{
+ m_mutex.lock();
+ m_videoSink = sink;
+ m_presentEngine->setSink(sink);
+ m_mutex.unlock();
+
+ supportedFormatsChanged();
+}
+
+void EVRCustomPresenter::setCropRect(QRect cropRect)
+{
+ m_mutex.lock();
+ m_cropRect = cropRect;
+ m_mutex.unlock();
+}
+
+HRESULT EVRCustomPresenter::configureMixer(IMFTransform *mixer)
+{
+ // Set the zoom rectangle (ie, the source clipping rectangle).
+ return setMixerSourceRect(mixer, m_sourceRect);
+}
+
+HRESULT EVRCustomPresenter::renegotiateMediaType()
+{
+ HRESULT hr = S_OK;
+ bool foundMediaType = false;
+
+ IMFMediaType *mixerType = NULL;
+ IMFMediaType *optimalType = NULL;
+
+ if (!m_mixer)
+ return MF_E_INVALIDREQUEST;
+
+ // Loop through all of the mixer's proposed output types.
+ DWORD typeIndex = 0;
+ while (!foundMediaType && (hr != MF_E_NO_MORE_TYPES)) {
+ qt_evr_safe_release(&mixerType);
+ qt_evr_safe_release(&optimalType);
+
+ // Step 1. Get the next media type supported by mixer.
+ hr = m_mixer->GetOutputAvailableType(0, typeIndex++, &mixerType);
+ if (FAILED(hr))
+ break;
+
+ // From now on, if anything in this loop fails, try the next type,
+ // until we succeed or the mixer runs out of types.
+
+ // Step 2. Check if we support this media type.
+ if (SUCCEEDED(hr))
+ hr = isMediaTypeSupported(mixerType);
+
+ // Step 3. Adjust the mixer's type to match our requirements.
+ if (SUCCEEDED(hr))
+ hr = createOptimalVideoType(mixerType, &optimalType);
+
+ // Step 4. Check if the mixer will accept this media type.
+ if (SUCCEEDED(hr))
+ hr = m_mixer->SetOutputType(0, optimalType, MFT_SET_TYPE_TEST_ONLY);
+
+ // Step 5. Try to set the media type on ourselves.
+ if (SUCCEEDED(hr))
+ hr = setMediaType(optimalType);
+
+ // Step 6. Set output media type on mixer.
+ if (SUCCEEDED(hr)) {
+ hr = m_mixer->SetOutputType(0, optimalType, 0);
+
+ // If something went wrong, clear the media type.
+ if (FAILED(hr))
+ setMediaType(NULL);
+ }
+
+ if (SUCCEEDED(hr))
+ foundMediaType = true;
+ }
+
+ qt_evr_safe_release(&mixerType);
+ qt_evr_safe_release(&optimalType);
+
+ return hr;
+}
+
+HRESULT EVRCustomPresenter::flush()
+{
+ m_prerolled = false;
+
+ // The scheduler might have samples that are waiting for
+ // their presentation time. Tell the scheduler to flush.
+
+ // This call blocks until the scheduler threads discards all scheduled samples.
+ m_scheduler.flush();
+
+ // Flush the frame-step queue.
+ m_frameStep.samples.clear();
+
+ if (m_renderState == RenderStopped && m_videoSink) {
+ // Repaint with black.
+ presentSample(nullptr);
+ }
+
+ return S_OK;
+}
+
+HRESULT EVRCustomPresenter::processInputNotify()
+{
+ HRESULT hr = S_OK;
+
+ // Set the flag that says the mixer has a new sample.
+ m_sampleNotify = true;
+
+ if (!m_mediaType) {
+ // We don't have a valid media type yet.
+ hr = MF_E_TRANSFORM_TYPE_NOT_SET;
+ } else {
+ // Try to process an output sample.
+ processOutputLoop();
+ }
+ return hr;
+}
+
+HRESULT EVRCustomPresenter::beginStreaming()
+{
+ HRESULT hr = S_OK;
+
+ // Start the scheduler thread.
+ hr = m_scheduler.startScheduler(m_clock);
+
+ return hr;
+}
+
+HRESULT EVRCustomPresenter::endStreaming()
+{
+ HRESULT hr = S_OK;
+
+ // Stop the scheduler thread.
+ hr = m_scheduler.stopScheduler();
+
+ return hr;
+}
+
+HRESULT EVRCustomPresenter::checkEndOfStream()
+{
+ if (!m_endStreaming) {
+ // The EVR did not send the MFVP_MESSAGE_ENDOFSTREAM message.
+ return S_OK;
+ }
+
+ if (m_sampleNotify) {
+ // The mixer still has input.
+ return S_OK;
+ }
+
+ if (m_scheduler.areSamplesScheduled()) {
+ // Samples are still scheduled for rendering.
+ return S_OK;
+ }
+
+ // Everything is complete. Now we can tell the EVR that we are done.
+ notifyEvent(EC_COMPLETE, (LONG_PTR)S_OK, 0);
+ m_endStreaming = false;
+
+ stopSurface();
+ return S_OK;
+}
+
+HRESULT EVRCustomPresenter::prepareFrameStep(DWORD steps)
+{
+ HRESULT hr = S_OK;
+
+ // Cache the step count.
+ m_frameStep.steps += steps;
+
+ // Set the frame-step state.
+ m_frameStep.state = FrameStepWaitingStart;
+
+ // If the clock is are already running, we can start frame-stepping now.
+ // Otherwise, we will start when the clock starts.
+ if (m_renderState == RenderStarted)
+ hr = startFrameStep();
+
+ return hr;
+}
+
+HRESULT EVRCustomPresenter::startFrameStep()
+{
+ if (m_frameStep.state == FrameStepWaitingStart) {
+ // We have a frame-step request, and are waiting for the clock to start.
+ // Set the state to "pending," which means we are waiting for samples.
+ m_frameStep.state = FrameStepPending;
+
+ // If the frame-step queue already has samples, process them now.
+ while (!m_frameStep.samples.isEmpty() && (m_frameStep.state == FrameStepPending)) {
+ const ComPtr<IMFSample> sample = m_frameStep.samples.takeFirst();
+
+ const HRESULT hr = deliverFrameStepSample(sample.Get());
+ if (FAILED(hr))
+ return hr;
+
+ // We break from this loop when:
+ // (a) the frame-step queue is empty, or
+ // (b) the frame-step operation is complete.
+ }
+ } else if (m_frameStep.state == FrameStepNone) {
+ // We are not frame stepping. Therefore, if the frame-step queue has samples,
+ // we need to process them normally.
+ while (!m_frameStep.samples.isEmpty()) {
+ const ComPtr<IMFSample> sample = m_frameStep.samples.takeFirst();
+
+ const HRESULT hr = deliverSample(sample.Get());
+ if (FAILED(hr))
+ return hr;
+ }
+ }
+
+ return S_OK;
+}
+
+HRESULT EVRCustomPresenter::completeFrameStep(const ComPtr<IMFSample> &sample)
+{
+ HRESULT hr = S_OK;
+ MFTIME sampleTime = 0;
+ MFTIME systemTime = 0;
+
+ // Update our state.
+ m_frameStep.state = FrameStepComplete;
+ m_frameStep.sampleNoRef = 0;
+
+ // Notify the EVR that the frame-step is complete.
+ notifyEvent(EC_STEP_COMPLETE, FALSE, 0); // FALSE = completed (not cancelled)
+
+ // If we are scrubbing (rate == 0), also send the "scrub time" event.
+ if (isScrubbing()) {
+ // Get the time stamp from the sample.
+ hr = sample->GetSampleTime(&sampleTime);
+ if (FAILED(hr)) {
+ // No time stamp. Use the current presentation time.
+ if (m_clock)
+ m_clock->GetCorrelatedTime(0, &sampleTime, &systemTime);
+
+ hr = S_OK; // (Not an error condition.)
+ }
+
+ notifyEvent(EC_SCRUB_TIME, DWORD(sampleTime), DWORD(((sampleTime) >> 32) & 0xffffffff));
+ }
+ return hr;
+}
+
+HRESULT EVRCustomPresenter::cancelFrameStep()
+{
+ FrameStepState oldState = m_frameStep.state;
+
+ m_frameStep.state = FrameStepNone;
+ m_frameStep.steps = 0;
+ m_frameStep.sampleNoRef = 0;
+ // Don't clear the frame-step queue yet, because we might frame step again.
+
+ if (oldState > FrameStepNone && oldState < FrameStepComplete) {
+ // We were in the middle of frame-stepping when it was cancelled.
+ // Notify the EVR.
+ notifyEvent(EC_STEP_COMPLETE, TRUE, 0); // TRUE = cancelled
+ }
+ return S_OK;
+}
+
+HRESULT EVRCustomPresenter::createOptimalVideoType(IMFMediaType *proposedType, IMFMediaType **optimalType)
+{
+ HRESULT hr = S_OK;
+
+ RECT rcOutput;
+ ZeroMemory(&rcOutput, sizeof(rcOutput));
+
+ MFVideoArea displayArea;
+ ZeroMemory(&displayArea, sizeof(displayArea));
+
+ IMFMediaType *mtOptimal = NULL;
+
+ UINT64 size;
+ int width;
+ int height;
+
+ // Clone the proposed type.
+
+ hr = MFCreateMediaType(&mtOptimal);
+ if (FAILED(hr))
+ goto done;
+
+ hr = proposedType->CopyAllItems(mtOptimal);
+ if (FAILED(hr))
+ goto done;
+
+ // Modify the new type.
+
+ hr = proposedType->GetUINT64(MF_MT_FRAME_SIZE, &size);
+ width = int(HI32(size));
+ height = int(LO32(size));
+
+ if (m_cropRect.isValid()) {
+ rcOutput.left = m_cropRect.x();
+ rcOutput.top = m_cropRect.y();
+ rcOutput.right = m_cropRect.x() + m_cropRect.width();
+ rcOutput.bottom = m_cropRect.y() + m_cropRect.height();
+
+ m_sourceRect.left = float(m_cropRect.x()) / width;
+ m_sourceRect.top = float(m_cropRect.y()) / height;
+ m_sourceRect.right = float(m_cropRect.x() + m_cropRect.width()) / width;
+ m_sourceRect.bottom = float(m_cropRect.y() + m_cropRect.height()) / height;
+
+ if (m_mixer)
+ configureMixer(m_mixer.Get());
+ } else {
+ rcOutput.left = 0;
+ rcOutput.top = 0;
+ rcOutput.right = width;
+ rcOutput.bottom = height;
+ }
+
+ // Set the geometric aperture, and disable pan/scan.
+ displayArea = qt_evr_makeMFArea(0, 0, rcOutput.right - rcOutput.left,
+ rcOutput.bottom - rcOutput.top);
+
+ hr = mtOptimal->SetUINT32(MF_MT_PAN_SCAN_ENABLED, FALSE);
+ if (FAILED(hr))
+ goto done;
+
+ hr = mtOptimal->SetBlob(MF_MT_GEOMETRIC_APERTURE, reinterpret_cast<UINT8*>(&displayArea),
+ sizeof(displayArea));
+ if (FAILED(hr))
+ goto done;
+
+ // Set the pan/scan aperture and the minimum display aperture. We don't care
+ // about them per se, but the mixer will reject the type if these exceed the
+ // frame dimentions.
+ hr = mtOptimal->SetBlob(MF_MT_PAN_SCAN_APERTURE, reinterpret_cast<UINT8*>(&displayArea),
+ sizeof(displayArea));
+ if (FAILED(hr))
+ goto done;
+
+ hr = mtOptimal->SetBlob(MF_MT_MINIMUM_DISPLAY_APERTURE, reinterpret_cast<UINT8*>(&displayArea),
+ sizeof(displayArea));
+ if (FAILED(hr))
+ goto done;
+
+ // Return the pointer to the caller.
+ *optimalType = mtOptimal;
+ (*optimalType)->AddRef();
+
+done:
+ qt_evr_safe_release(&mtOptimal);
+ return hr;
+
+}
+
+HRESULT EVRCustomPresenter::setMediaType(IMFMediaType *mediaType)
+{
+ // Note: mediaType can be NULL (to clear the type)
+
+ // Clearing the media type is allowed in any state (including shutdown).
+ if (!mediaType) {
+ stopSurface();
+ m_mediaType.Reset();
+ releaseResources();
+ return S_OK;
+ }
+
+ MFRatio fps = { 0, 0 };
+ QList<ComPtr<IMFSample>> sampleQueue;
+
+ // Cannot set the media type after shutdown.
+ HRESULT hr = checkShutdown();
+ if (FAILED(hr))
+ goto done;
+
+ // Check if the new type is actually different.
+ // Note: This function safely handles NULL input parameters.
+ if (qt_evr_areMediaTypesEqual(m_mediaType.Get(), mediaType))
+ goto done; // Nothing more to do.
+
+ // We're really changing the type. First get rid of the old type.
+ m_mediaType.Reset();
+ releaseResources();
+
+ // Initialize the presenter engine with the new media type.
+ // The presenter engine allocates the samples.
+
+ hr = m_presentEngine->createVideoSamples(mediaType, sampleQueue, m_cropRect.size());
+ if (FAILED(hr))
+ goto done;
+
+ // Mark each sample with our token counter. If this batch of samples becomes
+ // invalid, we increment the counter, so that we know they should be discarded.
+ for (auto sample : std::as_const(sampleQueue)) {
+ hr = sample->SetUINT32(MFSamplePresenter_SampleCounter, m_tokenCounter);
+ if (FAILED(hr))
+ goto done;
+ }
+
+ // Add the samples to the sample pool.
+ hr = m_samplePool.initialize(std::move(sampleQueue));
+ if (FAILED(hr))
+ goto done;
+
+ // Set the frame rate on the scheduler.
+ if (SUCCEEDED(qt_evr_getFrameRate(mediaType, &fps)) && (fps.Numerator != 0) && (fps.Denominator != 0)) {
+ m_scheduler.setFrameRate(fps);
+ } else {
+ // NOTE: The mixer's proposed type might not have a frame rate, in which case
+ // we'll use an arbitrary default. (Although it's unlikely the video source
+ // does not have a frame rate.)
+ m_scheduler.setFrameRate(g_DefaultFrameRate);
+ }
+
+ // Store the media type.
+ m_mediaType = mediaType;
+ m_mediaType->AddRef();
+
+ startSurface();
+
+done:
+ if (FAILED(hr))
+ releaseResources();
+ return hr;
+}
+
+HRESULT EVRCustomPresenter::isMediaTypeSupported(IMFMediaType *proposed)
+{
+ D3DFORMAT d3dFormat = D3DFMT_UNKNOWN;
+ BOOL compressed = FALSE;
+ MFVideoInterlaceMode interlaceMode = MFVideoInterlace_Unknown;
+ MFVideoArea videoCropArea;
+ UINT32 width = 0, height = 0;
+
+ // Validate the format.
+ HRESULT hr = qt_evr_getFourCC(proposed, reinterpret_cast<DWORD*>(&d3dFormat));
+ if (FAILED(hr))
+ return hr;
+
+ QVideoFrameFormat::PixelFormat pixelFormat = pixelFormatFromMediaType(proposed);
+ if (pixelFormat == QVideoFrameFormat::Format_Invalid)
+ return MF_E_INVALIDMEDIATYPE;
+
+ // Reject compressed media types.
+ hr = proposed->IsCompressedFormat(&compressed);
+ if (FAILED(hr))
+ return hr;
+
+ if (compressed)
+ return MF_E_INVALIDMEDIATYPE;
+
+ // The D3DPresentEngine checks whether surfaces can be created using this format
+ hr = m_presentEngine->checkFormat(d3dFormat);
+ if (FAILED(hr))
+ return hr;
+
+ // Reject interlaced formats.
+ hr = proposed->GetUINT32(MF_MT_INTERLACE_MODE, reinterpret_cast<UINT32*>(&interlaceMode));
+ if (FAILED(hr))
+ return hr;
+
+ if (interlaceMode != MFVideoInterlace_Progressive)
+ return MF_E_INVALIDMEDIATYPE;
+
+ hr = MFGetAttributeSize(proposed, MF_MT_FRAME_SIZE, &width, &height);
+ if (FAILED(hr))
+ return hr;
+
+ // Validate the various apertures (cropping regions) against the frame size.
+ // Any of these apertures may be unspecified in the media type, in which case
+ // we ignore it. We just want to reject invalid apertures.
+
+ if (SUCCEEDED(proposed->GetBlob(MF_MT_PAN_SCAN_APERTURE,
+ reinterpret_cast<UINT8*>(&videoCropArea),
+ sizeof(videoCropArea), nullptr))) {
+ hr = qt_evr_validateVideoArea(videoCropArea, width, height);
+ }
+ if (SUCCEEDED(proposed->GetBlob(MF_MT_GEOMETRIC_APERTURE,
+ reinterpret_cast<UINT8*>(&videoCropArea),
+ sizeof(videoCropArea), nullptr))) {
+ hr = qt_evr_validateVideoArea(videoCropArea, width, height);
+ }
+ if (SUCCEEDED(proposed->GetBlob(MF_MT_MINIMUM_DISPLAY_APERTURE,
+ reinterpret_cast<UINT8*>(&videoCropArea),
+ sizeof(videoCropArea), nullptr))) {
+ hr = qt_evr_validateVideoArea(videoCropArea, width, height);
+ }
+ return hr;
+}
+
+void EVRCustomPresenter::processOutputLoop()
+{
+ HRESULT hr = S_OK;
+
+ // Process as many samples as possible.
+ while (hr == S_OK) {
+ // If the mixer doesn't have a new input sample, break from the loop.
+ if (!m_sampleNotify) {
+ hr = MF_E_TRANSFORM_NEED_MORE_INPUT;
+ break;
+ }
+
+ // Try to process a sample.
+ hr = processOutput();
+
+ // NOTE: ProcessOutput can return S_FALSE to indicate it did not
+ // process a sample. If so, break out of the loop.
+ }
+
+ if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT) {
+ // The mixer has run out of input data. Check for end-of-stream.
+ checkEndOfStream();
+ }
+}
+
+HRESULT EVRCustomPresenter::processOutput()
+{
+ // If the clock is not running, we present the first sample,
+ // and then don't present any more until the clock starts.
+ if ((m_renderState != RenderStarted) && m_prerolled)
+ return S_FALSE;
+
+ // Make sure we have a pointer to the mixer.
+ if (!m_mixer)
+ return MF_E_INVALIDREQUEST;
+
+ // Try to get a free sample from the video sample pool.
+ ComPtr<IMFSample> sample = m_samplePool.takeSample();
+ if (!sample)
+ return S_FALSE; // No free samples. Try again when a sample is released.
+
+ // From now on, we have a valid video sample pointer, where the mixer will
+ // write the video data.
+
+ LONGLONG mixerStartTime = 0, mixerEndTime = 0;
+ MFTIME systemTime = 0;
+
+ if (m_clock) {
+ // Latency: Record the starting time for ProcessOutput.
+ m_clock->GetCorrelatedTime(0, &mixerStartTime, &systemTime);
+ }
+
+ // Now we are ready to get an output sample from the mixer.
+ DWORD status = 0;
+ MFT_OUTPUT_DATA_BUFFER dataBuffer = {};
+ dataBuffer.pSample = sample.Get();
+ HRESULT hr = m_mixer->ProcessOutput(0, 1, &dataBuffer, &status);
+ // Important: Release any events returned from the ProcessOutput method.
+ qt_evr_safe_release(&dataBuffer.pEvents);
+
+ if (FAILED(hr)) {
+ // Return the sample to the pool.
+ m_samplePool.returnSample(sample);
+
+ // Handle some known error codes from ProcessOutput.
+ if (hr == MF_E_TRANSFORM_TYPE_NOT_SET) {
+ // The mixer's format is not set. Negotiate a new format.
+ hr = renegotiateMediaType();
+ } else if (hr == MF_E_TRANSFORM_STREAM_CHANGE) {
+ // There was a dynamic media type change. Clear our media type.
+ setMediaType(NULL);
+ } else if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT) {
+ // The mixer needs more input.
+ // We have to wait for the mixer to get more input.
+ m_sampleNotify = false;
+ }
+
+ return hr;
+ }
+
+ // We got an output sample from the mixer.
+ if (m_clock) {
+ // Latency: Record the ending time for the ProcessOutput operation,
+ // and notify the EVR of the latency.
+
+ m_clock->GetCorrelatedTime(0, &mixerEndTime, &systemTime);
+
+ LONGLONG latencyTime = mixerEndTime - mixerStartTime;
+ notifyEvent(EC_PROCESSING_LATENCY, reinterpret_cast<LONG_PTR>(&latencyTime), 0);
+ }
+
+ // Set up notification for when the sample is released.
+ hr = trackSample(sample);
+ if (FAILED(hr))
+ return hr;
+
+ // Schedule the sample.
+ if (m_frameStep.state == FrameStepNone)
+ hr = deliverSample(sample);
+ else // We are frame-stepping
+ hr = deliverFrameStepSample(sample);
+
+ if (FAILED(hr))
+ return hr;
+
+ m_prerolled = true; // We have presented at least one sample now.
+ return S_OK;
+}
+
+HRESULT EVRCustomPresenter::deliverSample(const ComPtr<IMFSample> &sample)
+{
+ // If we are not actively playing, OR we are scrubbing (rate = 0),
+ // then we need to present the sample immediately. Otherwise,
+ // schedule it normally.
+
+ bool presentNow = ((m_renderState != RenderStarted) || isScrubbing());
+
+ HRESULT hr = m_scheduler.scheduleSample(sample, presentNow);
+
+ if (FAILED(hr)) {
+ // Notify the EVR that we have failed during streaming. The EVR will notify the
+ // pipeline.
+
+ notifyEvent(EC_ERRORABORT, hr, 0);
+ }
+
+ return hr;
+}
+
+HRESULT EVRCustomPresenter::deliverFrameStepSample(const ComPtr<IMFSample> &sample)
+{
+ HRESULT hr = S_OK;
+ IUnknown *unk = NULL;
+
+ // For rate 0, discard any sample that ends earlier than the clock time.
+ if (isScrubbing() && m_clock && qt_evr_isSampleTimePassed(m_clock.Get(), sample.Get())) {
+ // Discard this sample.
+ } else if (m_frameStep.state >= FrameStepScheduled) {
+ // A frame was already submitted. Put this sample on the frame-step queue,
+ // in case we are asked to step to the next frame. If frame-stepping is
+ // cancelled, this sample will be processed normally.
+ m_frameStep.samples.append(sample);
+ } else {
+ // We're ready to frame-step.
+
+ // Decrement the number of steps.
+ if (m_frameStep.steps > 0)
+ m_frameStep.steps--;
+
+ if (m_frameStep.steps > 0) {
+ // This is not the last step. Discard this sample.
+ } else if (m_frameStep.state == FrameStepWaitingStart) {
+ // This is the right frame, but the clock hasn't started yet. Put the
+ // sample on the frame-step queue. When the clock starts, the sample
+ // will be processed.
+ m_frameStep.samples.append(sample);
+ } else {
+ // This is the right frame *and* the clock has started. Deliver this sample.
+ hr = deliverSample(sample);
+ if (FAILED(hr))
+ goto done;
+
+ // Query for IUnknown so that we can identify the sample later.
+ // Per COM rules, an object always returns the same pointer when QI'ed for IUnknown.
+ hr = sample->QueryInterface(IID_PPV_ARGS(&unk));
+ if (FAILED(hr))
+ goto done;
+
+ m_frameStep.sampleNoRef = reinterpret_cast<DWORD_PTR>(unk); // No add-ref.
+
+ // NOTE: We do not AddRef the IUnknown pointer, because that would prevent the
+ // sample from invoking the OnSampleFree callback after the sample is presented.
+ // We use this IUnknown pointer purely to identify the sample later; we never
+ // attempt to dereference the pointer.
+
+ m_frameStep.state = FrameStepScheduled;
+ }
+ }
+done:
+ qt_evr_safe_release(&unk);
+ return hr;
+}
+
+HRESULT EVRCustomPresenter::trackSample(const ComPtr<IMFSample> &sample)
+{
+ IMFTrackedSample *tracked = NULL;
+
+ HRESULT hr = sample->QueryInterface(IID_PPV_ARGS(&tracked));
+
+ if (SUCCEEDED(hr))
+ hr = tracked->SetAllocator(&m_sampleFreeCB, NULL);
+
+ qt_evr_safe_release(&tracked);
+ return hr;
+}
+
+void EVRCustomPresenter::releaseResources()
+{
+ // Increment the token counter to indicate that all existing video samples
+ // are "stale." As these samples get released, we'll dispose of them.
+ //
+ // Note: The token counter is required because the samples are shared
+ // between more than one thread, and they are returned to the presenter
+ // through an asynchronous callback (onSampleFree). Without the token, we
+ // might accidentally re-use a stale sample after the ReleaseResources
+ // method returns.
+
+ m_tokenCounter++;
+
+ flush();
+
+ m_samplePool.clear();
+
+ m_presentEngine->releaseResources();
+}
+
+HRESULT EVRCustomPresenter::onSampleFree(IMFAsyncResult *result)
+{
+ IUnknown *object = NULL;
+ IMFSample *sample = NULL;
+ IUnknown *unk = NULL;
+ UINT32 token;
+
+ // Get the sample from the async result object.
+ HRESULT hr = result->GetObject(&object);
+ if (FAILED(hr))
+ goto done;
+
+ hr = object->QueryInterface(IID_PPV_ARGS(&sample));
+ if (FAILED(hr))
+ goto done;
+
+ // If this sample was submitted for a frame-step, the frame step operation
+ // is complete.
+
+ if (m_frameStep.state == FrameStepScheduled) {
+ // Query the sample for IUnknown and compare it to our cached value.
+ hr = sample->QueryInterface(IID_PPV_ARGS(&unk));
+ if (FAILED(hr))
+ goto done;
+
+ if (m_frameStep.sampleNoRef == reinterpret_cast<DWORD_PTR>(unk)) {
+ // Notify the EVR.
+ hr = completeFrameStep(sample);
+ if (FAILED(hr))
+ goto done;
+ }
+
+ // Note: Although object is also an IUnknown pointer, it is not
+ // guaranteed to be the exact pointer value returned through
+ // QueryInterface. Therefore, the second QueryInterface call is
+ // required.
+ }
+
+ m_mutex.lock();
+
+ token = MFGetAttributeUINT32(sample, MFSamplePresenter_SampleCounter, (UINT32)-1);
+
+ if (token == m_tokenCounter) {
+ // Return the sample to the sample pool.
+ m_samplePool.returnSample(sample);
+ // A free sample is available. Process more data if possible.
+ processOutputLoop();
+ }
+
+ m_mutex.unlock();
+
+done:
+ if (FAILED(hr))
+ notifyEvent(EC_ERRORABORT, hr, 0);
+ qt_evr_safe_release(&object);
+ qt_evr_safe_release(&sample);
+ qt_evr_safe_release(&unk);
+ return hr;
+}
+
+float EVRCustomPresenter::getMaxRate(bool thin)
+{
+ // Non-thinned:
+ // If we have a valid frame rate and a monitor refresh rate, the maximum
+ // playback rate is equal to the refresh rate. Otherwise, the maximum rate
+ // is unbounded (FLT_MAX).
+
+ // Thinned: The maximum rate is unbounded.
+
+ float maxRate = FLT_MAX;
+ MFRatio fps = { 0, 0 };
+ UINT monitorRateHz = 0;
+
+ if (!thin && m_mediaType) {
+ qt_evr_getFrameRate(m_mediaType.Get(), &fps);
+ monitorRateHz = m_presentEngine->refreshRate();
+
+ if (fps.Denominator && fps.Numerator && monitorRateHz) {
+ // Max Rate = Refresh Rate / Frame Rate
+ maxRate = (float)MulDiv(monitorRateHz, fps.Denominator, fps.Numerator);
+ }
+ }
+
+ return maxRate;
+}
+
+bool EVRCustomPresenter::event(QEvent *e)
+{
+ switch (int(e->type())) {
+ case StartSurface:
+ startSurface();
+ return true;
+ case StopSurface:
+ stopSurface();
+ return true;
+ case PresentSample:
+ presentSample(static_cast<PresentSampleEvent *>(e)->sample());
+ return true;
+ default:
+ break;
+ }
+ return QObject::event(e);
+}
+
+void EVRCustomPresenter::startSurface()
+{
+ if (thread() != QThread::currentThread()) {
+ QCoreApplication::postEvent(this, new QEvent(QEvent::Type(StartSurface)));
+ return;
+ }
+}
+
+void EVRCustomPresenter::stopSurface()
+{
+ if (thread() != QThread::currentThread()) {
+ QCoreApplication::postEvent(this, new QEvent(QEvent::Type(StopSurface)));
+ return;
+ }
+}
+
+void EVRCustomPresenter::presentSample(const ComPtr<IMFSample> &sample)
+{
+ if (thread() != QThread::currentThread()) {
+ QCoreApplication::postEvent(this, new PresentSampleEvent(sample));
+ return;
+ }
+
+ if (!m_videoSink || !m_presentEngine->videoSurfaceFormat().isValid())
+ return;
+
+ QVideoFrame frame = m_presentEngine->makeVideoFrame(sample);
+
+ // Since start/end times are related to a position when the clock is started,
+ // to have times from the beginning, need to adjust it by adding seeked position.
+ if (m_positionOffset) {
+ if (frame.startTime())
+ frame.setStartTime(frame.startTime() + m_positionOffset);
+ if (frame.endTime())
+ frame.setEndTime(frame.endTime() + m_positionOffset);
+ }
+
+ ComPtr<IMFMediaType> inputStreamType;
+ if (SUCCEEDED(m_mixer->GetInputCurrentType(0, inputStreamType.GetAddressOf()))) {
+ auto rotation = static_cast<MFVideoRotationFormat>(MFGetAttributeUINT32(inputStreamType.Get(), MF_MT_VIDEO_ROTATION, 0));
+ switch (rotation) {
+ case MFVideoRotationFormat_0: frame.setRotation(QtVideo::Rotation::None); break;
+ case MFVideoRotationFormat_90: frame.setRotation(QtVideo::Rotation::Clockwise90); break;
+ case MFVideoRotationFormat_180: frame.setRotation(QtVideo::Rotation::Clockwise180); break;
+ case MFVideoRotationFormat_270: frame.setRotation(QtVideo::Rotation::Clockwise270); break;
+ default: frame.setRotation(QtVideo::Rotation::None);
+ }
+ }
+
+ m_videoSink->platformVideoSink()->setVideoFrame(frame);
+}
+
+void EVRCustomPresenter::positionChanged(qint64 position)
+{
+ m_positionOffset = position * 1000;
+}
+
+HRESULT setMixerSourceRect(IMFTransform *mixer, const MFVideoNormalizedRect &sourceRect)
+{
+ if (!mixer)
+ return E_POINTER;
+
+ IMFAttributes *attributes = NULL;
+
+ HRESULT hr = mixer->GetAttributes(&attributes);
+ if (SUCCEEDED(hr)) {
+ hr = attributes->SetBlob(VIDEO_ZOOM_RECT, reinterpret_cast<const UINT8*>(&sourceRect),
+ sizeof(sourceRect));
+ attributes->Release();
+ }
+ return hr;
+}
+
+static QVideoFrameFormat::PixelFormat pixelFormatFromMediaType(IMFMediaType *type)
+{
+ GUID majorType;
+ if (FAILED(type->GetMajorType(&majorType)))
+ return QVideoFrameFormat::Format_Invalid;
+ if (majorType != MFMediaType_Video)
+ return QVideoFrameFormat::Format_Invalid;
+
+ GUID subtype;
+ if (FAILED(type->GetGUID(MF_MT_SUBTYPE, &subtype)))
+ return QVideoFrameFormat::Format_Invalid;
+
+ return QWindowsMultimediaUtils::pixelFormatFromMediaSubtype(subtype);
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/windows/evr/evrcustompresenter_p.h b/src/plugins/multimedia/windows/evr/evrcustompresenter_p.h
new file mode 100644
index 000000000..28f1cbc68
--- /dev/null
+++ b/src/plugins/multimedia/windows/evr/evrcustompresenter_p.h
@@ -0,0 +1,357 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef EVRCUSTOMPRESENTER_H
+#define EVRCUSTOMPRESENTER_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QObject>
+#include <qmutex.h>
+#include <qqueue.h>
+#include <qevent.h>
+#include <qrect.h>
+#include <qvideoframeformat.h>
+#include <qvideosink.h>
+#include <qpointer.h>
+#include <private/qcomptr_p.h>
+#include "evrhelpers_p.h"
+
+#include <d3d9.h>
+#include <dxva2api.h>
+#include <evr9.h>
+#include <evr.h>
+#include <mfidl.h>
+#include <mfapi.h>
+#include <mferror.h>
+
+QT_BEGIN_NAMESPACE
+
+class EVRCustomPresenter;
+class D3DPresentEngine;
+
+template<class T>
+class AsyncCallback : public IMFAsyncCallback
+{
+ Q_DISABLE_COPY(AsyncCallback)
+public:
+ typedef HRESULT (T::*InvokeFn)(IMFAsyncResult *asyncResult);
+
+ AsyncCallback(T *parent, InvokeFn fn) : m_parent(parent), m_invokeFn(fn)
+ {
+ }
+
+ // IUnknown
+ STDMETHODIMP QueryInterface(REFIID iid, void** ppv) override
+ {
+ if (!ppv)
+ return E_POINTER;
+
+ if (iid == __uuidof(IUnknown)) {
+ *ppv = static_cast<IUnknown*>(static_cast<IMFAsyncCallback*>(this));
+ } else if (iid == __uuidof(IMFAsyncCallback)) {
+ *ppv = static_cast<IMFAsyncCallback*>(this);
+ } else {
+ *ppv = NULL;
+ return E_NOINTERFACE;
+ }
+ AddRef();
+ return S_OK;
+ }
+
+ STDMETHODIMP_(ULONG) AddRef() override {
+ // Delegate to parent class.
+ return m_parent->AddRef();
+ }
+ STDMETHODIMP_(ULONG) Release() override {
+ // Delegate to parent class.
+ return m_parent->Release();
+ }
+
+ // IMFAsyncCallback methods
+ STDMETHODIMP GetParameters(DWORD*, DWORD*) override
+ {
+ // Implementation of this method is optional.
+ return E_NOTIMPL;
+ }
+
+ STDMETHODIMP Invoke(IMFAsyncResult* asyncResult) override
+ {
+ return (m_parent->*m_invokeFn)(asyncResult);
+ }
+
+ T *m_parent;
+ InvokeFn m_invokeFn;
+};
+
+class Scheduler
+{
+ Q_DISABLE_COPY(Scheduler)
+public:
+ enum ScheduleEvent
+ {
+ Terminate = WM_USER,
+ Schedule = WM_USER + 1,
+ Flush = WM_USER + 2
+ };
+
+ Scheduler(EVRCustomPresenter *presenter);
+ ~Scheduler();
+
+ void setFrameRate(const MFRatio &fps);
+ void setClockRate(float rate) { m_playbackRate = rate; }
+
+ HRESULT startScheduler(ComPtr<IMFClock> clock);
+ HRESULT stopScheduler();
+
+ HRESULT scheduleSample(const ComPtr<IMFSample> &sample, bool presentNow);
+ HRESULT processSamplesInQueue(LONG *nextSleep);
+ HRESULT flush();
+
+ bool areSamplesScheduled();
+
+ // ThreadProc for the scheduler thread.
+ static DWORD WINAPI schedulerThreadProc(LPVOID parameter);
+
+private:
+ DWORD schedulerThreadProcPrivate();
+ bool isSampleReadyToPresent(IMFSample *sample, LONG *pNextSleep) const;
+
+ EVRCustomPresenter *m_presenter;
+
+ QQueue<ComPtr<IMFSample>> m_scheduledSamples; // Samples waiting to be presented.
+
+ ComPtr<IMFClock> m_clock; // Presentation clock. Can be NULL.
+
+ DWORD m_threadID;
+ ThreadHandle m_schedulerThread;
+ EventHandle m_threadReadyEvent;
+ EventHandle m_flushEvent;
+
+ float m_playbackRate;
+ MFTIME m_perFrame_1_4th; // 1/4th of the frame duration.
+
+ QMutex m_mutex;
+};
+
+class SamplePool
+{
+ Q_DISABLE_COPY(SamplePool)
+public:
+ SamplePool();
+ ~SamplePool();
+
+ HRESULT initialize(QList<ComPtr<IMFSample>> &&samples);
+ HRESULT clear();
+
+ ComPtr<IMFSample> takeSample();
+ void returnSample(const ComPtr<IMFSample> &sample);
+
+private:
+ QMutex m_mutex;
+ QList<ComPtr<IMFSample>> m_videoSampleQueue;
+ bool m_initialized;
+};
+
+class EVRCustomPresenter
+ : public QObject
+ , public IMFVideoDeviceID
+ , public IMFVideoPresenter // Inherits IMFClockStateSink
+ , public IMFRateSupport
+ , public IMFGetService
+ , public IMFTopologyServiceLookupClient
+{
+ Q_DISABLE_COPY(EVRCustomPresenter)
+public:
+ // Defines the state of the presenter.
+ enum RenderState
+ {
+ RenderStarted = 1,
+ RenderStopped,
+ RenderPaused,
+ RenderShutdown // Initial state.
+ };
+
+ // Defines the presenter's state with respect to frame-stepping.
+ enum FrameStepState
+ {
+ FrameStepNone, // Not frame stepping.
+ FrameStepWaitingStart, // Frame stepping, but the clock is not started.
+ FrameStepPending, // Clock is started. Waiting for samples.
+ FrameStepScheduled, // Submitted a sample for rendering.
+ FrameStepComplete // Sample was rendered.
+ };
+
+ enum PresenterEvents
+ {
+ StartSurface = QEvent::User,
+ StopSurface = QEvent::User + 1,
+ PresentSample = QEvent::User + 2
+ };
+
+ EVRCustomPresenter(QVideoSink *sink = 0);
+ ~EVRCustomPresenter() override;
+
+ bool isValid() const;
+
+ // IUnknown methods
+ STDMETHODIMP QueryInterface(REFIID riid, void ** ppv) override;
+ STDMETHODIMP_(ULONG) AddRef() override;
+ STDMETHODIMP_(ULONG) Release() override;
+
+ // IMFGetService methods
+ STDMETHODIMP GetService(REFGUID guidService, REFIID riid, LPVOID *ppvObject) override;
+
+ // IMFVideoPresenter methods
+ STDMETHODIMP ProcessMessage(MFVP_MESSAGE_TYPE message, ULONG_PTR param) override;
+ STDMETHODIMP GetCurrentMediaType(IMFVideoMediaType** mediaType) override;
+
+ // IMFClockStateSink methods
+ STDMETHODIMP OnClockStart(MFTIME systemTime, LONGLONG clockStartOffset) override;
+ STDMETHODIMP OnClockStop(MFTIME systemTime) override;
+ STDMETHODIMP OnClockPause(MFTIME systemTime) override;
+ STDMETHODIMP OnClockRestart(MFTIME systemTime) override;
+ STDMETHODIMP OnClockSetRate(MFTIME systemTime, float rate) override;
+
+ // IMFRateSupport methods
+ STDMETHODIMP GetSlowestRate(MFRATE_DIRECTION direction, BOOL thin, float *rate) override;
+ STDMETHODIMP GetFastestRate(MFRATE_DIRECTION direction, BOOL thin, float *rate) override;
+ STDMETHODIMP IsRateSupported(BOOL thin, float rate, float *nearestSupportedRate) override;
+
+ // IMFVideoDeviceID methods
+ STDMETHODIMP GetDeviceID(IID* deviceID) override;
+
+ // IMFTopologyServiceLookupClient methods
+ STDMETHODIMP InitServicePointers(IMFTopologyServiceLookup *lookup) override;
+ STDMETHODIMP ReleaseServicePointers() override;
+
+ void supportedFormatsChanged();
+ void setSink(QVideoSink *sink);
+ void setCropRect(QRect cropRect);
+
+ void startSurface();
+ void stopSurface();
+ void presentSample(const ComPtr<IMFSample> &sample);
+
+ bool event(QEvent *) override;
+
+public Q_SLOTS:
+ void positionChanged(qint64 position);
+
+private:
+ HRESULT checkShutdown() const
+ {
+ if (m_renderState == RenderShutdown)
+ return MF_E_SHUTDOWN;
+ else
+ return S_OK;
+ }
+
+ // The "active" state is started or paused.
+ inline bool isActive() const
+ {
+ return ((m_renderState == RenderStarted) || (m_renderState == RenderPaused));
+ }
+
+ // Scrubbing occurs when the frame rate is 0.
+ inline bool isScrubbing() const { return m_playbackRate == 0.0f; }
+
+ // Send an event to the EVR through its IMediaEventSink interface.
+ void notifyEvent(long eventCode, LONG_PTR param1, LONG_PTR param2)
+ {
+ if (m_mediaEventSink)
+ m_mediaEventSink->Notify(eventCode, param1, param2);
+ }
+
+ float getMaxRate(bool thin);
+
+ // Mixer operations
+ HRESULT configureMixer(IMFTransform *mixer);
+
+ // Formats
+ HRESULT createOptimalVideoType(IMFMediaType* proposed, IMFMediaType **optimal);
+ HRESULT setMediaType(IMFMediaType *mediaType);
+ HRESULT isMediaTypeSupported(IMFMediaType *mediaType);
+
+ // Message handlers
+ HRESULT flush();
+ HRESULT renegotiateMediaType();
+ HRESULT processInputNotify();
+ HRESULT beginStreaming();
+ HRESULT endStreaming();
+ HRESULT checkEndOfStream();
+
+ // Managing samples
+ void processOutputLoop();
+ HRESULT processOutput();
+ HRESULT deliverSample(const ComPtr<IMFSample> &sample);
+ HRESULT trackSample(const ComPtr<IMFSample> &sample);
+ void releaseResources();
+
+ // Frame-stepping
+ HRESULT prepareFrameStep(DWORD steps);
+ HRESULT startFrameStep();
+ HRESULT deliverFrameStepSample(const ComPtr<IMFSample> &sample);
+ HRESULT completeFrameStep(const ComPtr<IMFSample> &sample);
+ HRESULT cancelFrameStep();
+
+ // Callback when a video sample is released.
+ HRESULT onSampleFree(IMFAsyncResult *result);
+ AsyncCallback<EVRCustomPresenter> m_sampleFreeCB;
+
+ // Holds information related to frame-stepping.
+ struct FrameStep
+ {
+ FrameStepState state = FrameStepNone;
+ QList<ComPtr<IMFSample>> samples;
+ DWORD steps = 0;
+ DWORD_PTR sampleNoRef = 0;
+ };
+
+ long m_refCount;
+
+ RenderState m_renderState;
+ FrameStep m_frameStep;
+
+ QRecursiveMutex m_mutex;
+
+ // Samples and scheduling
+ Scheduler m_scheduler; // Manages scheduling of samples.
+ SamplePool m_samplePool; // Pool of allocated samples.
+ DWORD m_tokenCounter; // Counter. Incremented whenever we create new samples.
+
+ // Rendering state
+ bool m_sampleNotify; // Did the mixer signal it has an input sample?
+ bool m_prerolled; // Have we presented at least one sample?
+ bool m_endStreaming; // Did we reach the end of the stream (EOS)?
+
+ MFVideoNormalizedRect m_sourceRect;
+ float m_playbackRate;
+
+ D3DPresentEngine *m_presentEngine; // Rendering engine. (Never null if the constructor succeeds.)
+
+ ComPtr<IMFClock> m_clock; // The EVR's clock.
+ ComPtr<IMFTransform> m_mixer; // The EVR's mixer.
+ ComPtr<IMediaEventSink> m_mediaEventSink; // The EVR's event-sink interface.
+ ComPtr<IMFMediaType> m_mediaType; // Output media type
+
+ QPointer<QVideoSink> m_videoSink;
+ bool m_canRenderToSurface;
+ qint64 m_positionOffset; // Seek position in microseconds.
+ QRect m_cropRect; // Video crop rectangle
+};
+
+bool qt_evr_setCustomPresenter(IUnknown *evr, EVRCustomPresenter *presenter);
+
+QT_END_NAMESPACE
+
+#endif // EVRCUSTOMPRESENTER_H
diff --git a/src/plugins/multimedia/windows/evr/evrd3dpresentengine.cpp b/src/plugins/multimedia/windows/evr/evrd3dpresentengine.cpp
new file mode 100644
index 000000000..517f1d969
--- /dev/null
+++ b/src/plugins/multimedia/windows/evr/evrd3dpresentengine.cpp
@@ -0,0 +1,699 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "evrd3dpresentengine_p.h"
+
+#include "evrhelpers_p.h"
+
+#include <private/qhwvideobuffer_p.h>
+#include <private/qvideoframe_p.h>
+#include <qvideoframe.h>
+#include <QDebug>
+#include <qthread.h>
+#include <qvideosink.h>
+#include <qloggingcategory.h>
+
+#include <d3d11_1.h>
+
+#include <rhi/qrhi.h>
+
+#if QT_CONFIG(opengl)
+# include <qopenglcontext.h>
+# include <qopenglfunctions.h>
+# include <qoffscreensurface.h>
+#endif
+
+QT_BEGIN_NAMESPACE
+
+static Q_LOGGING_CATEGORY(qLcEvrD3DPresentEngine, "qt.multimedia.evrd3dpresentengine");
+
+class IMFSampleVideoBuffer : public QHwVideoBuffer
+{
+public:
+ IMFSampleVideoBuffer(ComPtr<IDirect3DDevice9Ex> device, const ComPtr<IMFSample> &sample,
+ QRhi *rhi, QVideoFrame::HandleType type = QVideoFrame::NoHandle)
+ : QHwVideoBuffer(type, rhi),
+ m_device(device),
+ m_sample(sample),
+ m_mapMode(QtVideo::MapMode::NotMapped)
+ {
+ }
+
+ ~IMFSampleVideoBuffer() override
+ {
+ if (m_memSurface && m_mapMode != QtVideo::MapMode::NotMapped)
+ m_memSurface->UnlockRect();
+ }
+
+ MapData map(QtVideo::MapMode mode) override
+ {
+ if (!m_sample || m_mapMode != QtVideo::MapMode::NotMapped || mode != QtVideo::MapMode::ReadOnly)
+ return {};
+
+ D3DSURFACE_DESC desc;
+ if (m_memSurface) {
+ if (FAILED(m_memSurface->GetDesc(&desc)))
+ return {};
+
+ } else {
+ ComPtr<IMFMediaBuffer> buffer;
+ HRESULT hr = m_sample->GetBufferByIndex(0, buffer.GetAddressOf());
+ if (FAILED(hr))
+ return {};
+
+ ComPtr<IDirect3DSurface9> surface;
+ hr = MFGetService(buffer.Get(), MR_BUFFER_SERVICE, IID_IDirect3DSurface9, (void **)(surface.GetAddressOf()));
+ if (FAILED(hr))
+ return {};
+
+ if (FAILED(surface->GetDesc(&desc)))
+ return {};
+
+ if (FAILED(m_device->CreateOffscreenPlainSurface(desc.Width, desc.Height, desc.Format, D3DPOOL_SYSTEMMEM, m_memSurface.GetAddressOf(), nullptr)))
+ return {};
+
+ if (FAILED(m_device->GetRenderTargetData(surface.Get(), m_memSurface.Get()))) {
+ m_memSurface.Reset();
+ return {};
+ }
+ }
+
+ D3DLOCKED_RECT rect;
+ if (FAILED(m_memSurface->LockRect(&rect, NULL, mode == QtVideo::MapMode::ReadOnly ? D3DLOCK_READONLY : 0)))
+ return {};
+
+ m_mapMode = mode;
+
+ MapData mapData;
+ mapData.planeCount = 1;
+ mapData.bytesPerLine[0] = (int)rect.Pitch;
+ mapData.data[0] = reinterpret_cast<uchar *>(rect.pBits);
+ mapData.dataSize[0] = (int)(rect.Pitch * desc.Height);
+ return mapData;
+ }
+
+ void unmap() override
+ {
+ if (m_mapMode == QtVideo::MapMode::NotMapped)
+ return;
+
+ m_mapMode = QtVideo::MapMode::NotMapped;
+ if (m_memSurface)
+ m_memSurface->UnlockRect();
+ }
+
+protected:
+ ComPtr<IDirect3DDevice9Ex> m_device;
+ ComPtr<IMFSample> m_sample;
+
+private:
+ ComPtr<IDirect3DSurface9> m_memSurface;
+ QtVideo::MapMode m_mapMode;
+};
+
+class QVideoFrameD3D11Textures: public QVideoFrameTextures
+{
+public:
+ QVideoFrameD3D11Textures(std::unique_ptr<QRhiTexture> &&tex, ComPtr<ID3D11Texture2D> &&d3d11tex)
+ : m_tex(std::move(tex))
+ , m_d3d11tex(std::move(d3d11tex))
+ {}
+
+ QRhiTexture *texture(uint plane) const override
+ {
+ return plane == 0 ? m_tex.get() : nullptr;
+ };
+
+private:
+ std::unique_ptr<QRhiTexture> m_tex;
+ ComPtr<ID3D11Texture2D> m_d3d11tex;
+};
+
+class D3D11TextureVideoBuffer: public IMFSampleVideoBuffer
+{
+public:
+ D3D11TextureVideoBuffer(ComPtr<IDirect3DDevice9Ex> device, const ComPtr<IMFSample> &sample,
+ HANDLE sharedHandle, QRhi *rhi)
+ : IMFSampleVideoBuffer(std::move(device), sample, rhi, QVideoFrame::RhiTextureHandle)
+ , m_sharedHandle(sharedHandle)
+ {}
+
+ std::unique_ptr<QVideoFrameTextures> mapTextures(QRhi *rhi) override
+ {
+ if (!rhi || rhi->backend() != QRhi::D3D11)
+ return {};
+
+ auto nh = static_cast<const QRhiD3D11NativeHandles*>(rhi->nativeHandles());
+ if (!nh)
+ return {};
+
+ auto dev = reinterpret_cast<ID3D11Device *>(nh->dev);
+ if (!dev)
+ return {};
+
+ ComPtr<ID3D11Texture2D> d3d11tex;
+ HRESULT hr = dev->OpenSharedResource(m_sharedHandle, __uuidof(ID3D11Texture2D), (void**)(d3d11tex.GetAddressOf()));
+ if (SUCCEEDED(hr)) {
+ D3D11_TEXTURE2D_DESC desc = {};
+ d3d11tex->GetDesc(&desc);
+ QRhiTexture::Format format;
+ if (desc.Format == DXGI_FORMAT_B8G8R8A8_UNORM)
+ format = QRhiTexture::BGRA8;
+ else if (desc.Format == DXGI_FORMAT_R8G8B8A8_UNORM)
+ format = QRhiTexture::RGBA8;
+ else
+ return {};
+
+ std::unique_ptr<QRhiTexture> tex(rhi->newTexture(format, QSize{int(desc.Width), int(desc.Height)}, 1, {}));
+ tex->createFrom({quint64(d3d11tex.Get()), 0});
+ return std::make_unique<QVideoFrameD3D11Textures>(std::move(tex), std::move(d3d11tex));
+
+ } else {
+ qCDebug(qLcEvrD3DPresentEngine) << "Failed to obtain D3D11Texture2D from D3D9Texture2D handle";
+ }
+ return {};
+ }
+
+private:
+ HANDLE m_sharedHandle = nullptr;
+};
+
+#if QT_CONFIG(opengl)
+class QVideoFrameOpenGlTextures : public QVideoFrameTextures
+{
+ struct InterOpHandles {
+ GLuint textureName = 0;
+ HANDLE device = nullptr;
+ HANDLE texture = nullptr;
+ };
+
+public:
+ Q_DISABLE_COPY(QVideoFrameOpenGlTextures);
+
+ QVideoFrameOpenGlTextures(std::unique_ptr<QRhiTexture> &&tex, const WglNvDxInterop &wgl, InterOpHandles &handles)
+ : m_tex(std::move(tex))
+ , m_wgl(wgl)
+ , m_handles(handles)
+ {}
+
+ ~QVideoFrameOpenGlTextures() override {
+ if (QOpenGLContext::currentContext()) {
+ if (!m_wgl.wglDXUnlockObjectsNV(m_handles.device, 1, &m_handles.texture))
+ qCDebug(qLcEvrD3DPresentEngine) << "Failed to unlock OpenGL texture";
+
+ if (!m_wgl.wglDXUnregisterObjectNV(m_handles.device, m_handles.texture))
+ qCDebug(qLcEvrD3DPresentEngine) << "Failed to unregister OpenGL texture";
+
+ QOpenGLFunctions *funcs = QOpenGLContext::currentContext()->functions();
+ if (funcs)
+ funcs->glDeleteTextures(1, &m_handles.textureName);
+ else
+ qCDebug(qLcEvrD3DPresentEngine) << "Could not delete texture, OpenGL context functions missing";
+
+ if (!m_wgl.wglDXCloseDeviceNV(m_handles.device))
+ qCDebug(qLcEvrD3DPresentEngine) << "Failed to close D3D-GL device";
+
+ } else {
+ qCDebug(qLcEvrD3DPresentEngine) << "Could not release texture, OpenGL context missing";
+ }
+ }
+
+ static std::unique_ptr<QVideoFrameOpenGlTextures> create(const WglNvDxInterop &wgl, QRhi *rhi,
+ IDirect3DDevice9Ex *device, IDirect3DTexture9 *texture,
+ HANDLE sharedHandle)
+ {
+ if (!rhi || rhi->backend() != QRhi::OpenGLES2)
+ return {};
+
+ if (!QOpenGLContext::currentContext())
+ return {};
+
+ InterOpHandles handles = {};
+ handles.device = wgl.wglDXOpenDeviceNV(device);
+ if (!handles.device) {
+ qCDebug(qLcEvrD3DPresentEngine) << "Failed to open D3D device";
+ return {};
+ }
+
+ wgl.wglDXSetResourceShareHandleNV(texture, sharedHandle);
+
+ QOpenGLFunctions *funcs = QOpenGLContext::currentContext()->functions();
+ if (funcs) {
+ funcs->glGenTextures(1, &handles.textureName);
+ handles.texture = wgl.wglDXRegisterObjectNV(handles.device, texture, handles.textureName,
+ GL_TEXTURE_2D, WglNvDxInterop::WGL_ACCESS_READ_ONLY_NV);
+ if (handles.texture) {
+ if (wgl.wglDXLockObjectsNV(handles.device, 1, &handles.texture)) {
+ D3DSURFACE_DESC desc;
+ texture->GetLevelDesc(0, &desc);
+ QRhiTexture::Format format;
+ if (desc.Format == D3DFMT_A8R8G8B8)
+ format = QRhiTexture::BGRA8;
+ else if (desc.Format == D3DFMT_A8B8G8R8)
+ format = QRhiTexture::RGBA8;
+ else
+ return {};
+
+ std::unique_ptr<QRhiTexture> tex(rhi->newTexture(format, QSize{int(desc.Width), int(desc.Height)}, 1, {}));
+ tex->createFrom({quint64(handles.textureName), 0});
+ return std::make_unique<QVideoFrameOpenGlTextures>(std::move(tex), wgl, handles);
+ }
+
+ qCDebug(qLcEvrD3DPresentEngine) << "Failed to lock OpenGL texture";
+ wgl.wglDXUnregisterObjectNV(handles.device, handles.texture);
+ } else {
+ qCDebug(qLcEvrD3DPresentEngine) << "Could not register D3D9 texture in OpenGL";
+ }
+
+ funcs->glDeleteTextures(1, &handles.textureName);
+ } else {
+ qCDebug(qLcEvrD3DPresentEngine) << "Failed generate texture names, OpenGL context functions missing";
+ }
+ return {};
+ }
+
+ QRhiTexture *texture(uint plane) const override
+ {
+ return plane == 0 ? m_tex.get() : nullptr;
+ };
+private:
+ std::unique_ptr<QRhiTexture> m_tex;
+ WglNvDxInterop m_wgl;
+ InterOpHandles m_handles;
+};
+
+class OpenGlVideoBuffer: public IMFSampleVideoBuffer
+{
+public:
+ OpenGlVideoBuffer(ComPtr<IDirect3DDevice9Ex> device, const ComPtr<IMFSample> &sample,
+ const WglNvDxInterop &wglNvDxInterop, HANDLE sharedHandle, QRhi *rhi)
+ : IMFSampleVideoBuffer(std::move(device), sample, rhi, QVideoFrame::RhiTextureHandle)
+ , m_sharedHandle(sharedHandle)
+ , m_wgl(wglNvDxInterop)
+ {}
+
+ std::unique_ptr<QVideoFrameTextures> mapTextures(QRhi *rhi) override
+ {
+ if (!m_texture) {
+ ComPtr<IMFMediaBuffer> buffer;
+ HRESULT hr = m_sample->GetBufferByIndex(0, buffer.GetAddressOf());
+ if (FAILED(hr))
+ return {};
+
+ ComPtr<IDirect3DSurface9> surface;
+ hr = MFGetService(buffer.Get(), MR_BUFFER_SERVICE, IID_IDirect3DSurface9,
+ (void **)(surface.GetAddressOf()));
+ if (FAILED(hr))
+ return {};
+
+ hr = surface->GetContainer(IID_IDirect3DTexture9, (void **)m_texture.GetAddressOf());
+ if (FAILED(hr))
+ return {};
+ }
+
+ return QVideoFrameOpenGlTextures::create(m_wgl, rhi, m_device.Get(), m_texture.Get(), m_sharedHandle);
+ }
+
+private:
+ HANDLE m_sharedHandle = nullptr;
+ WglNvDxInterop m_wgl;
+ ComPtr<IDirect3DTexture9> m_texture;
+};
+#endif
+
+D3DPresentEngine::D3DPresentEngine(QVideoSink *sink)
+ : m_deviceResetToken(0)
+{
+ ZeroMemory(&m_displayMode, sizeof(m_displayMode));
+ setSink(sink);
+}
+
+D3DPresentEngine::~D3DPresentEngine()
+{
+ releaseResources();
+}
+
+void D3DPresentEngine::setSink(QVideoSink *sink)
+{
+ if (sink == m_sink)
+ return;
+
+ m_sink = sink;
+
+ releaseResources();
+ m_device.Reset();
+ m_devices.Reset();
+ m_D3D9.Reset();
+
+ if (!m_sink)
+ return;
+
+ HRESULT hr = initializeD3D();
+
+ if (SUCCEEDED(hr)) {
+ hr = createD3DDevice();
+ if (FAILED(hr))
+ qWarning("Failed to create D3D device");
+ } else {
+ qWarning("Failed to initialize D3D");
+ }
+}
+
+HRESULT D3DPresentEngine::initializeD3D()
+{
+ HRESULT hr = Direct3DCreate9Ex(D3D_SDK_VERSION, m_D3D9.GetAddressOf());
+
+ if (SUCCEEDED(hr))
+ hr = DXVA2CreateDirect3DDeviceManager9(&m_deviceResetToken, m_devices.GetAddressOf());
+
+ return hr;
+}
+
+static bool findD3D11AdapterID(QRhi &rhi, IDirect3D9Ex *D3D9, UINT &adapterID)
+{
+ auto nh = static_cast<const QRhiD3D11NativeHandles*>(rhi.nativeHandles());
+ if (D3D9 && nh) {
+ for (auto i = 0u; i < D3D9->GetAdapterCount(); ++i) {
+ LUID luid = {};
+ D3D9->GetAdapterLUID(i, &luid);
+ if (luid.LowPart == nh->adapterLuidLow && luid.HighPart == nh->adapterLuidHigh) {
+ adapterID = i;
+ return true;
+ }
+ }
+ }
+
+ return false;
+}
+
+#if QT_CONFIG(opengl)
+template <typename T>
+static bool getProc(const QOpenGLContext *ctx, T &fn, const char *fName)
+{
+ fn = reinterpret_cast<T>(ctx->getProcAddress(fName));
+ return fn != nullptr;
+}
+
+static bool readWglNvDxInteropProc(WglNvDxInterop &f)
+{
+ QScopedPointer<QOffscreenSurface> surface(new QOffscreenSurface);
+ surface->create();
+ QScopedPointer<QOpenGLContext> ctx(new QOpenGLContext);
+ ctx->create();
+ ctx->makeCurrent(surface.get());
+
+ auto wglGetExtensionsStringARB = reinterpret_cast<const char* (WINAPI* )(HDC)>
+ (ctx->getProcAddress("wglGetExtensionsStringARB"));
+ if (!wglGetExtensionsStringARB) {
+ qCDebug(qLcEvrD3DPresentEngine) << "WGL extensions missing (no wglGetExtensionsStringARB function)";
+ return false;
+ }
+
+ HWND hwnd = ::GetShellWindow();
+ auto dc = ::GetDC(hwnd);
+
+ const char *wglExtString = wglGetExtensionsStringARB(dc);
+ if (!wglExtString)
+ qCDebug(qLcEvrD3DPresentEngine) << "WGL extensions missing (wglGetExtensionsStringARB returned null)";
+
+ bool hasExtension = wglExtString && strstr(wglExtString, "WGL_NV_DX_interop");
+ ReleaseDC(hwnd, dc);
+ if (!hasExtension) {
+ qCDebug(qLcEvrD3DPresentEngine) << "WGL_NV_DX_interop missing";
+ return false;
+ }
+
+ return getProc(ctx.get(), f.wglDXOpenDeviceNV, "wglDXOpenDeviceNV")
+ && getProc(ctx.get(), f.wglDXCloseDeviceNV, "wglDXCloseDeviceNV")
+ && getProc(ctx.get(), f.wglDXSetResourceShareHandleNV, "wglDXSetResourceShareHandleNV")
+ && getProc(ctx.get(), f.wglDXRegisterObjectNV, "wglDXRegisterObjectNV")
+ && getProc(ctx.get(), f.wglDXUnregisterObjectNV, "wglDXUnregisterObjectNV")
+ && getProc(ctx.get(), f.wglDXLockObjectsNV, "wglDXLockObjectsNV")
+ && getProc(ctx.get(), f.wglDXUnlockObjectsNV, "wglDXUnlockObjectsNV");
+}
+#endif
+
+namespace {
+
+bool hwTextureRenderingEnabled() {
+ // add possibility for an user to opt-out HW video rendering
+ // using the same env. variable as for FFmpeg backend
+ static bool isDisableConversionSet = false;
+ static const int disableHwConversion = qEnvironmentVariableIntValue(
+ "QT_DISABLE_HW_TEXTURES_CONVERSION", &isDisableConversionSet);
+
+ return !isDisableConversionSet || !disableHwConversion;
+}
+
+}
+
+HRESULT D3DPresentEngine::createD3DDevice()
+{
+ if (!m_D3D9 || !m_devices)
+ return MF_E_NOT_INITIALIZED;
+
+ m_useTextureRendering = false;
+ UINT adapterID = 0;
+
+ if (hwTextureRenderingEnabled()) {
+ QRhi *rhi = m_sink ? m_sink->rhi() : nullptr;
+ if (rhi) {
+ if (rhi->backend() == QRhi::D3D11) {
+ m_useTextureRendering = findD3D11AdapterID(*rhi, m_D3D9.Get(), adapterID);
+#if QT_CONFIG(opengl)
+ } else if (rhi->backend() == QRhi::OpenGLES2) {
+ m_useTextureRendering = readWglNvDxInteropProc(m_wglNvDxInterop);
+#endif
+ } else {
+ qCDebug(qLcEvrD3DPresentEngine) << "Not supported RHI backend type";
+ }
+ } else {
+ qCDebug(qLcEvrD3DPresentEngine) << "No RHI associated with this sink";
+ }
+
+ if (!m_useTextureRendering)
+ qCDebug(qLcEvrD3DPresentEngine) << "Could not find compatible RHI adapter, zero copy disabled";
+ }
+
+ D3DCAPS9 ddCaps;
+ ZeroMemory(&ddCaps, sizeof(ddCaps));
+
+ HRESULT hr = m_D3D9->GetDeviceCaps(adapterID, D3DDEVTYPE_HAL, &ddCaps);
+ if (FAILED(hr))
+ return hr;
+
+ DWORD vp = 0;
+ if (ddCaps.DevCaps & D3DDEVCAPS_HWTRANSFORMANDLIGHT)
+ vp = D3DCREATE_HARDWARE_VERTEXPROCESSING;
+ else
+ vp = D3DCREATE_SOFTWARE_VERTEXPROCESSING;
+
+ D3DPRESENT_PARAMETERS pp;
+ ZeroMemory(&pp, sizeof(pp));
+
+ pp.BackBufferWidth = 1;
+ pp.BackBufferHeight = 1;
+ pp.BackBufferCount = 1;
+ pp.Windowed = TRUE;
+ pp.SwapEffect = D3DSWAPEFFECT_DISCARD;
+ pp.BackBufferFormat = D3DFMT_UNKNOWN;
+ pp.hDeviceWindow = nullptr;
+ pp.Flags = D3DPRESENTFLAG_VIDEO;
+ pp.PresentationInterval = D3DPRESENT_INTERVAL_DEFAULT;
+
+ ComPtr<IDirect3DDevice9Ex> device;
+
+ hr = m_D3D9->CreateDeviceEx(
+ adapterID,
+ D3DDEVTYPE_HAL,
+ pp.hDeviceWindow,
+ vp | D3DCREATE_NOWINDOWCHANGES | D3DCREATE_MULTITHREADED | D3DCREATE_FPU_PRESERVE,
+ &pp,
+ NULL,
+ device.GetAddressOf()
+ );
+ if (FAILED(hr))
+ return hr;
+
+ hr = m_D3D9->GetAdapterDisplayMode(adapterID, &m_displayMode);
+ if (FAILED(hr))
+ return hr;
+
+ hr = m_devices->ResetDevice(device.Get(), m_deviceResetToken);
+ if (FAILED(hr))
+ return hr;
+
+ m_device = device;
+ return hr;
+}
+
+bool D3DPresentEngine::isValid() const
+{
+ return m_device.Get() != nullptr;
+}
+
+void D3DPresentEngine::releaseResources()
+{
+ m_surfaceFormat = QVideoFrameFormat();
+}
+
+HRESULT D3DPresentEngine::getService(REFGUID, REFIID riid, void** ppv)
+{
+ HRESULT hr = S_OK;
+
+ if (riid == __uuidof(IDirect3DDeviceManager9)) {
+ if (!m_devices) {
+ hr = MF_E_UNSUPPORTED_SERVICE;
+ } else {
+ *ppv = m_devices.Get();
+ m_devices->AddRef();
+ }
+ } else {
+ hr = MF_E_UNSUPPORTED_SERVICE;
+ }
+
+ return hr;
+}
+
+HRESULT D3DPresentEngine::checkFormat(D3DFORMAT format)
+{
+ if (!m_D3D9 || !m_device)
+ return E_FAIL;
+
+ HRESULT hr = S_OK;
+
+ D3DDISPLAYMODE mode;
+ D3DDEVICE_CREATION_PARAMETERS params;
+
+ hr = m_device->GetCreationParameters(&params);
+ if (FAILED(hr))
+ return hr;
+
+ UINT uAdapter = params.AdapterOrdinal;
+ D3DDEVTYPE type = params.DeviceType;
+
+ hr = m_D3D9->GetAdapterDisplayMode(uAdapter, &mode);
+ if (FAILED(hr))
+ return hr;
+
+ hr = m_D3D9->CheckDeviceFormat(uAdapter, type, mode.Format,
+ D3DUSAGE_RENDERTARGET,
+ D3DRTYPE_SURFACE,
+ format);
+ if (FAILED(hr))
+ return hr;
+
+ bool ok = format == D3DFMT_X8R8G8B8
+ || format == D3DFMT_A8R8G8B8
+ || format == D3DFMT_X8B8G8R8
+ || format == D3DFMT_A8B8G8R8;
+
+ return ok ? S_OK : D3DERR_NOTAVAILABLE;
+}
+
+HRESULT D3DPresentEngine::createVideoSamples(IMFMediaType *format,
+ QList<ComPtr<IMFSample>> &videoSampleQueue,
+ QSize frameSize)
+{
+ if (!format || !m_device)
+ return MF_E_UNEXPECTED;
+
+ HRESULT hr = S_OK;
+ releaseResources();
+
+ UINT32 width = 0, height = 0;
+ hr = MFGetAttributeSize(format, MF_MT_FRAME_SIZE, &width, &height);
+ if (FAILED(hr))
+ return hr;
+
+ if (frameSize.isValid() && !frameSize.isEmpty()) {
+ width = frameSize.width();
+ height = frameSize.height();
+ }
+
+ DWORD d3dFormat = 0;
+ hr = qt_evr_getFourCC(format, &d3dFormat);
+ if (FAILED(hr))
+ return hr;
+
+ // FIXME: RHI defines only RGBA, thus add the alpha channel to the selected format
+ if (d3dFormat == D3DFMT_X8R8G8B8)
+ d3dFormat = D3DFMT_A8R8G8B8;
+ else if (d3dFormat == D3DFMT_X8B8G8R8)
+ d3dFormat = D3DFMT_A8B8G8R8;
+
+ for (int i = 0; i < PRESENTER_BUFFER_COUNT; i++) {
+ // texture ref cnt is increased by GetSurfaceLevel()/MFCreateVideoSampleFromSurface()
+ // below, so it will be destroyed only when the sample pool is released.
+ ComPtr<IDirect3DTexture9> texture;
+ HANDLE sharedHandle = nullptr;
+ hr = m_device->CreateTexture(width, height, 1, D3DUSAGE_RENDERTARGET, (D3DFORMAT)d3dFormat, D3DPOOL_DEFAULT, texture.GetAddressOf(), &sharedHandle);
+ if (FAILED(hr))
+ break;
+
+ ComPtr<IDirect3DSurface9> surface;
+ hr = texture->GetSurfaceLevel(0, surface.GetAddressOf());
+ if (FAILED(hr))
+ break;
+
+ ComPtr<IMFSample> videoSample;
+ hr = MFCreateVideoSampleFromSurface(surface.Get(), videoSample.GetAddressOf());
+ if (FAILED(hr))
+ break;
+
+ m_sampleTextureHandle[i] = {videoSample.Get(), sharedHandle};
+ videoSampleQueue.append(videoSample);
+ }
+
+ if (SUCCEEDED(hr)) {
+ m_surfaceFormat = QVideoFrameFormat(QSize(width, height), qt_evr_pixelFormatFromD3DFormat(d3dFormat));
+ } else {
+ releaseResources();
+ }
+
+ return hr;
+}
+
+QVideoFrame D3DPresentEngine::makeVideoFrame(const ComPtr<IMFSample> &sample)
+{
+ if (!sample)
+ return {};
+
+ HANDLE sharedHandle = nullptr;
+ for (const auto &p : m_sampleTextureHandle)
+ if (p.first == sample.Get())
+ sharedHandle = p.second;
+
+ std::unique_ptr<IMFSampleVideoBuffer> vb;
+ QRhi *rhi = m_sink ? m_sink->rhi() : nullptr;
+ if (m_useTextureRendering && sharedHandle && rhi) {
+ if (rhi->backend() == QRhi::D3D11) {
+ vb = std::make_unique<D3D11TextureVideoBuffer>(m_device, sample, sharedHandle, rhi);
+#if QT_CONFIG(opengl)
+ } else if (rhi->backend() == QRhi::OpenGLES2) {
+ vb = std::make_unique<OpenGlVideoBuffer>(m_device, sample, m_wglNvDxInterop,
+ sharedHandle, rhi);
+#endif
+ }
+ }
+
+ if (!vb)
+ vb = std::make_unique<IMFSampleVideoBuffer>(m_device, sample, rhi);
+
+ QVideoFrame frame = QVideoFramePrivate::createFrame(std::move(vb), m_surfaceFormat);
+
+ // WMF uses 100-nanosecond units, Qt uses microseconds
+ LONGLONG startTime = 0;
+ auto hr = sample->GetSampleTime(&startTime);
+ if (SUCCEEDED(hr)) {
+ frame.setStartTime(startTime / 10);
+
+ LONGLONG duration = -1;
+ if (SUCCEEDED(sample->GetSampleDuration(&duration)))
+ frame.setEndTime((startTime + duration) / 10);
+ }
+
+ return frame;
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/windows/evr/evrd3dpresentengine_p.h b/src/plugins/multimedia/windows/evr/evrd3dpresentengine_p.h
new file mode 100644
index 000000000..93aa90b71
--- /dev/null
+++ b/src/plugins/multimedia/windows/evr/evrd3dpresentengine_p.h
@@ -0,0 +1,153 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef EVRD3DPRESENTENGINE_H
+#define EVRD3DPRESENTENGINE_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QMutex>
+#include <QSize>
+#include <QVideoFrameFormat>
+#include <private/qcomptr_p.h>
+#include <qpointer.h>
+
+#include <d3d9.h>
+
+struct IDirect3D9Ex;
+struct IDirect3DDevice9Ex;
+struct IDirect3DDeviceManager9;
+struct IDirect3DSurface9;
+struct IDirect3DTexture9;
+struct IMFSample;
+struct IMFMediaType;
+
+QT_BEGIN_NAMESPACE
+class QVideoFrame;
+class QVideoSink;
+QT_END_NAMESPACE
+
+// Randomly generated GUIDs
+static const GUID MFSamplePresenter_SampleCounter =
+{ 0xb0bb83cc, 0xf10f, 0x4e2e, { 0xaa, 0x2b, 0x29, 0xea, 0x5e, 0x92, 0xef, 0x85 } };
+
+#if QT_CONFIG(opengl)
+# include <qopengl.h>
+#endif
+
+QT_BEGIN_NAMESPACE
+
+#ifdef MAYBE_ANGLE
+
+class OpenGLResources;
+
+class EGLWrapper
+{
+ Q_DISABLE_COPY(EGLWrapper)
+public:
+ EGLWrapper();
+
+ __eglMustCastToProperFunctionPointerType getProcAddress(const char *procname);
+ EGLSurface createPbufferSurface(EGLDisplay dpy, EGLConfig config, const EGLint *attrib_list);
+ EGLBoolean destroySurface(EGLDisplay dpy, EGLSurface surface);
+ EGLBoolean bindTexImage(EGLDisplay dpy, EGLSurface surface, EGLint buffer);
+ EGLBoolean releaseTexImage(EGLDisplay dpy, EGLSurface surface, EGLint buffer);
+
+private:
+ typedef __eglMustCastToProperFunctionPointerType (EGLAPIENTRYP EglGetProcAddress)(const char *procname);
+ typedef EGLSurface (EGLAPIENTRYP EglCreatePbufferSurface)(EGLDisplay dpy, EGLConfig config, const EGLint *attrib_list);
+ typedef EGLBoolean (EGLAPIENTRYP EglDestroySurface)(EGLDisplay dpy, EGLSurface surface);
+ typedef EGLBoolean (EGLAPIENTRYP EglBindTexImage)(EGLDisplay dpy, EGLSurface surface, EGLint buffer);
+ typedef EGLBoolean (EGLAPIENTRYP EglReleaseTexImage)(EGLDisplay dpy, EGLSurface surface, EGLint buffer);
+
+ EglGetProcAddress m_eglGetProcAddress;
+ EglCreatePbufferSurface m_eglCreatePbufferSurface;
+ EglDestroySurface m_eglDestroySurface;
+ EglBindTexImage m_eglBindTexImage;
+ EglReleaseTexImage m_eglReleaseTexImage;
+};
+
+#endif // MAYBE_ANGLE
+
+#if QT_CONFIG(opengl)
+
+struct WglNvDxInterop {
+ HANDLE (WINAPI* wglDXOpenDeviceNV) (void* dxDevice);
+ BOOL (WINAPI* wglDXCloseDeviceNV) (HANDLE hDevice);
+ HANDLE (WINAPI* wglDXRegisterObjectNV) (HANDLE hDevice, void *dxObject, GLuint name, GLenum type, GLenum access);
+ BOOL (WINAPI* wglDXSetResourceShareHandleNV) (void *dxResource, HANDLE shareHandle);
+ BOOL (WINAPI* wglDXLockObjectsNV) (HANDLE hDevice, GLint count, HANDLE *hObjects);
+ BOOL (WINAPI* wglDXUnlockObjectsNV) (HANDLE hDevice, GLint count, HANDLE *hObjects);
+ BOOL (WINAPI* wglDXUnregisterObjectNV) (HANDLE hDevice, HANDLE hObject);
+
+ static const int WGL_ACCESS_READ_ONLY_NV = 0;
+};
+
+#endif
+
+class D3DPresentEngine
+{
+ Q_DISABLE_COPY(D3DPresentEngine)
+public:
+ D3DPresentEngine(QVideoSink *sink);
+ virtual ~D3DPresentEngine();
+
+ bool isValid() const;
+
+ HRESULT getService(REFGUID guidService, REFIID riid, void** ppv);
+ HRESULT checkFormat(D3DFORMAT format);
+ UINT refreshRate() const { return m_displayMode.RefreshRate; }
+
+ HRESULT createVideoSamples(IMFMediaType *format, QList<ComPtr<IMFSample>> &videoSampleQueue,
+ QSize frameSize);
+ QVideoFrameFormat videoSurfaceFormat() const { return m_surfaceFormat; }
+ QVideoFrame makeVideoFrame(const ComPtr<IMFSample> &sample);
+
+ void releaseResources();
+ void setSink(QVideoSink *sink);
+
+private:
+ static const int PRESENTER_BUFFER_COUNT = 3;
+
+ HRESULT initializeD3D();
+ HRESULT createD3DDevice();
+
+ std::pair<IMFSample *, HANDLE> m_sampleTextureHandle[PRESENTER_BUFFER_COUNT] = {};
+
+ UINT m_deviceResetToken;
+ D3DDISPLAYMODE m_displayMode;
+
+ ComPtr<IDirect3D9Ex> m_D3D9;
+ ComPtr<IDirect3DDevice9Ex> m_device;
+ ComPtr<IDirect3DDeviceManager9> m_devices;
+
+ QVideoFrameFormat m_surfaceFormat;
+
+ QPointer<QVideoSink> m_sink;
+ bool m_useTextureRendering = false;
+#if QT_CONFIG(opengl)
+ WglNvDxInterop m_wglNvDxInterop;
+#endif
+
+#ifdef MAYBE_ANGLE
+ unsigned int updateTexture(IDirect3DSurface9 *src);
+
+ OpenGLResources *m_glResources;
+ IDirect3DTexture9 *m_texture;
+#endif
+
+ friend class IMFSampleVideoBuffer;
+};
+
+QT_END_NAMESPACE
+
+#endif // EVRD3DPRESENTENGINE_H
diff --git a/src/plugins/multimedia/windows/evr/evrhelpers.cpp b/src/plugins/multimedia/windows/evr/evrhelpers.cpp
new file mode 100644
index 000000000..bf4347c69
--- /dev/null
+++ b/src/plugins/multimedia/windows/evr/evrhelpers.cpp
@@ -0,0 +1,140 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "evrhelpers_p.h"
+
+#ifndef D3DFMT_YV12
+#define D3DFMT_YV12 (D3DFORMAT)MAKEFOURCC ('Y', 'V', '1', '2')
+#endif
+#ifndef D3DFMT_NV12
+#define D3DFMT_NV12 (D3DFORMAT)MAKEFOURCC ('N', 'V', '1', '2')
+#endif
+
+QT_BEGIN_NAMESPACE
+
+HRESULT qt_evr_getFourCC(IMFMediaType *type, DWORD *fourCC)
+{
+ if (!fourCC)
+ return E_POINTER;
+
+ HRESULT hr = S_OK;
+ GUID guidSubType = GUID_NULL;
+
+ if (SUCCEEDED(hr))
+ hr = type->GetGUID(MF_MT_SUBTYPE, &guidSubType);
+
+ if (SUCCEEDED(hr))
+ *fourCC = guidSubType.Data1;
+
+ return hr;
+}
+
+bool qt_evr_areMediaTypesEqual(IMFMediaType *type1, IMFMediaType *type2)
+{
+ if (!type1 && !type2)
+ return true;
+ if (!type1 || !type2)
+ return false;
+
+ DWORD dwFlags = 0;
+ HRESULT hr = type1->IsEqual(type2, &dwFlags);
+
+ return (hr == S_OK);
+}
+
+HRESULT qt_evr_validateVideoArea(const MFVideoArea& area, UINT32 width, UINT32 height)
+{
+ float fOffsetX = qt_evr_MFOffsetToFloat(area.OffsetX);
+ float fOffsetY = qt_evr_MFOffsetToFloat(area.OffsetY);
+
+ if ( ((LONG)fOffsetX + area.Area.cx > (LONG)width) ||
+ ((LONG)fOffsetY + area.Area.cy > (LONG)height) ) {
+ return MF_E_INVALIDMEDIATYPE;
+ }
+ return S_OK;
+}
+
+bool qt_evr_isSampleTimePassed(IMFClock *clock, IMFSample *sample)
+{
+ if (!sample || !clock)
+ return false;
+
+ HRESULT hr = S_OK;
+ MFTIME hnsTimeNow = 0;
+ MFTIME hnsSystemTime = 0;
+ MFTIME hnsSampleStart = 0;
+ MFTIME hnsSampleDuration = 0;
+
+ hr = clock->GetCorrelatedTime(0, &hnsTimeNow, &hnsSystemTime);
+
+ if (SUCCEEDED(hr))
+ hr = sample->GetSampleTime(&hnsSampleStart);
+
+ if (SUCCEEDED(hr))
+ hr = sample->GetSampleDuration(&hnsSampleDuration);
+
+ if (SUCCEEDED(hr)) {
+ if (hnsSampleStart + hnsSampleDuration < hnsTimeNow)
+ return true;
+ }
+
+ return false;
+}
+
+QVideoFrameFormat::PixelFormat qt_evr_pixelFormatFromD3DFormat(DWORD format)
+{
+ switch (format) {
+ case D3DFMT_A8R8G8B8:
+ return QVideoFrameFormat::Format_BGRA8888;
+ case D3DFMT_X8R8G8B8:
+ return QVideoFrameFormat::Format_BGRX8888;
+ case D3DFMT_A8:
+ return QVideoFrameFormat::Format_Y8;
+ case D3DFMT_A8B8G8R8:
+ return QVideoFrameFormat::Format_RGBA8888;
+ case D3DFMT_X8B8G8R8:
+ return QVideoFrameFormat::Format_RGBX8888;
+ case D3DFMT_UYVY:
+ return QVideoFrameFormat::Format_UYVY;
+ case D3DFMT_YUY2:
+ return QVideoFrameFormat::Format_YUYV;
+ case D3DFMT_NV12:
+ return QVideoFrameFormat::Format_NV12;
+ case D3DFMT_YV12:
+ return QVideoFrameFormat::Format_YV12;
+ case D3DFMT_UNKNOWN:
+ default:
+ return QVideoFrameFormat::Format_Invalid;
+ }
+}
+
+D3DFORMAT qt_evr_D3DFormatFromPixelFormat(QVideoFrameFormat::PixelFormat format)
+{
+ switch (format) {
+ case QVideoFrameFormat::Format_ARGB8888:
+ return D3DFMT_A8B8G8R8;
+ case QVideoFrameFormat::Format_BGRA8888:
+ return D3DFMT_A8R8G8B8;
+ case QVideoFrameFormat::Format_BGRX8888:
+ return D3DFMT_X8R8G8B8;
+ case QVideoFrameFormat::Format_Y8:
+ return D3DFMT_A8;
+ case QVideoFrameFormat::Format_RGBA8888:
+ return D3DFMT_A8B8G8R8;
+ case QVideoFrameFormat::Format_RGBX8888:
+ return D3DFMT_X8B8G8R8;
+ case QVideoFrameFormat::Format_UYVY:
+ return D3DFMT_UYVY;
+ case QVideoFrameFormat::Format_YUYV:
+ return D3DFMT_YUY2;
+ case QVideoFrameFormat::Format_NV12:
+ return D3DFMT_NV12;
+ case QVideoFrameFormat::Format_YV12:
+ return D3DFMT_YV12;
+ case QVideoFrameFormat::Format_Invalid:
+ default:
+ return D3DFMT_UNKNOWN;
+ }
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/windows/evr/evrhelpers_p.h b/src/plugins/multimedia/windows/evr/evrhelpers_p.h
new file mode 100644
index 000000000..30779c835
--- /dev/null
+++ b/src/plugins/multimedia/windows/evr/evrhelpers_p.h
@@ -0,0 +1,93 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef EVRHELPERS_H
+#define EVRHELPERS_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <qvideoframe.h>
+#include <d3d9.h>
+#include <dxva2api.h>
+#include <evr9.h>
+#include <evr.h>
+#include <mfidl.h>
+#include <mfapi.h>
+#include <mferror.h>
+#include <private/quniquehandle_p.h>
+
+QT_BEGIN_NAMESPACE
+
+template<class T>
+static inline void qt_evr_safe_release(T **unk)
+{
+ if (*unk) {
+ (*unk)->Release();
+ *unk = NULL;
+ }
+}
+
+HRESULT qt_evr_getFourCC(IMFMediaType *type, DWORD *fourCC);
+
+bool qt_evr_areMediaTypesEqual(IMFMediaType *type1, IMFMediaType *type2);
+
+HRESULT qt_evr_validateVideoArea(const MFVideoArea& area, UINT32 width, UINT32 height);
+
+bool qt_evr_isSampleTimePassed(IMFClock *clock, IMFSample *sample);
+
+inline float qt_evr_MFOffsetToFloat(const MFOffset& offset)
+{
+ return offset.value + (float(offset.fract) / 65536);
+}
+
+inline MFOffset qt_evr_makeMFOffset(float v)
+{
+ MFOffset offset;
+ offset.value = short(v);
+ offset.fract = WORD(65536 * (v-offset.value));
+ return offset;
+}
+
+inline MFVideoArea qt_evr_makeMFArea(float x, float y, DWORD width, DWORD height)
+{
+ MFVideoArea area;
+ area.OffsetX = qt_evr_makeMFOffset(x);
+ area.OffsetY = qt_evr_makeMFOffset(y);
+ area.Area.cx = width;
+ area.Area.cy = height;
+ return area;
+}
+
+inline HRESULT qt_evr_getFrameRate(IMFMediaType *pType, MFRatio *pRatio)
+{
+ return MFGetAttributeRatio(pType, MF_MT_FRAME_RATE,
+ reinterpret_cast<UINT32*>(&pRatio->Numerator),
+ reinterpret_cast<UINT32*>(&pRatio->Denominator));
+}
+
+QVideoFrameFormat::PixelFormat qt_evr_pixelFormatFromD3DFormat(DWORD format);
+D3DFORMAT qt_evr_D3DFormatFromPixelFormat(QVideoFrameFormat::PixelFormat format);
+
+struct NullHandleTraits
+{
+ using Type = HANDLE;
+ static Type invalidValue() { return nullptr; }
+ static bool close(Type handle) { return CloseHandle(handle) != 0; }
+};
+
+using EventHandle = QUniqueHandle<NullHandleTraits>;
+using ThreadHandle = QUniqueHandle<NullHandleTraits>;
+
+QT_END_NAMESPACE
+
+#endif // EVRHELPERS_H
+
diff --git a/src/plugins/multimedia/windows/evr/evrvideowindowcontrol.cpp b/src/plugins/multimedia/windows/evr/evrvideowindowcontrol.cpp
new file mode 100644
index 000000000..854c9ddb2
--- /dev/null
+++ b/src/plugins/multimedia/windows/evr/evrvideowindowcontrol.cpp
@@ -0,0 +1,228 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "evrvideowindowcontrol_p.h"
+
+QT_BEGIN_NAMESPACE
+
+EvrVideoWindowControl::EvrVideoWindowControl(QVideoSink *parent)
+ : QPlatformVideoSink(parent)
+ , m_windowId(0)
+ , m_windowColor(RGB(0, 0, 0))
+ , m_dirtyValues(0)
+ , m_aspectRatioMode(Qt::KeepAspectRatio)
+ , m_brightness(0)
+ , m_contrast(0)
+ , m_hue(0)
+ , m_saturation(0)
+ , m_fullScreen(false)
+ , m_displayControl(0)
+ , m_processor(0)
+{
+}
+
+EvrVideoWindowControl::~EvrVideoWindowControl()
+{
+ clear();
+}
+
+bool EvrVideoWindowControl::setEvr(IUnknown *evr)
+{
+ clear();
+
+ if (!evr)
+ return true;
+
+ IMFGetService *service = NULL;
+
+ if (SUCCEEDED(evr->QueryInterface(IID_PPV_ARGS(&service)))
+ && SUCCEEDED(service->GetService(MR_VIDEO_RENDER_SERVICE, IID_PPV_ARGS(&m_displayControl)))) {
+
+ service->GetService(MR_VIDEO_MIXER_SERVICE, IID_PPV_ARGS(&m_processor));
+
+ setWinId(m_windowId);
+ setDisplayRect(m_displayRect);
+ setAspectRatioMode(m_aspectRatioMode);
+ m_dirtyValues = DXVA2_ProcAmp_Brightness | DXVA2_ProcAmp_Contrast | DXVA2_ProcAmp_Hue | DXVA2_ProcAmp_Saturation;
+ applyImageControls();
+ }
+
+ if (service)
+ service->Release();
+
+ return m_displayControl != NULL;
+}
+
+void EvrVideoWindowControl::clear()
+{
+ if (m_displayControl)
+ m_displayControl->Release();
+ m_displayControl = NULL;
+
+ if (m_processor)
+ m_processor->Release();
+ m_processor = NULL;
+}
+
+void EvrVideoWindowControl::setWinId(WId id)
+{
+ m_windowId = id;
+
+ if (m_displayControl)
+ m_displayControl->SetVideoWindow(HWND(m_windowId));
+}
+
+void EvrVideoWindowControl::setDisplayRect(const QRect &rect)
+{
+ m_displayRect = rect;
+
+ if (m_displayControl) {
+ RECT displayRect = { rect.left(), rect.top(), rect.right() + 1, rect.bottom() + 1 };
+ QSize sourceSize = nativeSize();
+
+ RECT sourceRect = { 0, 0, sourceSize.width(), sourceSize.height() };
+
+ if (m_aspectRatioMode == Qt::KeepAspectRatioByExpanding) {
+ QSize clippedSize = rect.size();
+ clippedSize.scale(sourceRect.right, sourceRect.bottom, Qt::KeepAspectRatio);
+
+ sourceRect.left = (sourceRect.right - clippedSize.width()) / 2;
+ sourceRect.top = (sourceRect.bottom - clippedSize.height()) / 2;
+ sourceRect.right = sourceRect.left + clippedSize.width();
+ sourceRect.bottom = sourceRect.top + clippedSize.height();
+ }
+
+ if (sourceSize.width() > 0 && sourceSize.height() > 0) {
+ MFVideoNormalizedRect sourceNormRect;
+ sourceNormRect.left = float(sourceRect.left) / float(sourceRect.right);
+ sourceNormRect.top = float(sourceRect.top) / float(sourceRect.bottom);
+ sourceNormRect.right = float(sourceRect.right) / float(sourceRect.right);
+ sourceNormRect.bottom = float(sourceRect.bottom) / float(sourceRect.bottom);
+ m_displayControl->SetVideoPosition(&sourceNormRect, &displayRect);
+ } else {
+ m_displayControl->SetVideoPosition(NULL, &displayRect);
+ }
+ }
+}
+
+void EvrVideoWindowControl::setFullScreen(bool fullScreen)
+{
+ if (m_fullScreen == fullScreen)
+ return;
+}
+
+void EvrVideoWindowControl::setAspectRatioMode(Qt::AspectRatioMode mode)
+{
+ m_aspectRatioMode = mode;
+
+ if (m_displayControl) {
+ switch (mode) {
+ case Qt::IgnoreAspectRatio:
+ //comment from MSDN: Do not maintain the aspect ratio of the video. Stretch the video to fit the output rectangle.
+ m_displayControl->SetAspectRatioMode(MFVideoARMode_None);
+ break;
+ case Qt::KeepAspectRatio:
+ //comment from MSDN: Preserve the aspect ratio of the video by letterboxing or within the output rectangle.
+ m_displayControl->SetAspectRatioMode(MFVideoARMode_PreservePicture);
+ break;
+ case Qt::KeepAspectRatioByExpanding:
+ //for this mode, more adjustment will be done in setDisplayRect
+ m_displayControl->SetAspectRatioMode(MFVideoARMode_PreservePicture);
+ break;
+ default:
+ break;
+ }
+ setDisplayRect(m_displayRect);
+ }
+}
+
+void EvrVideoWindowControl::setBrightness(float brightness)
+{
+ if (m_brightness == brightness)
+ return;
+
+ m_brightness = brightness;
+
+ m_dirtyValues |= DXVA2_ProcAmp_Brightness;
+
+ applyImageControls();
+}
+
+void EvrVideoWindowControl::setContrast(float contrast)
+{
+ if (m_contrast == contrast)
+ return;
+
+ m_contrast = contrast;
+
+ m_dirtyValues |= DXVA2_ProcAmp_Contrast;
+
+ applyImageControls();
+}
+
+void EvrVideoWindowControl::setHue(float hue)
+{
+ if (m_hue == hue)
+ return;
+
+ m_hue = hue;
+
+ m_dirtyValues |= DXVA2_ProcAmp_Hue;
+
+ applyImageControls();
+}
+
+void EvrVideoWindowControl::setSaturation(float saturation)
+{
+ if (m_saturation == saturation)
+ return;
+
+ m_saturation = saturation;
+
+ m_dirtyValues |= DXVA2_ProcAmp_Saturation;
+
+ applyImageControls();
+}
+
+void EvrVideoWindowControl::applyImageControls()
+{
+ if (m_processor) {
+ DXVA2_ProcAmpValues values;
+ if (m_dirtyValues & DXVA2_ProcAmp_Brightness) {
+ values.Brightness = scaleProcAmpValue(DXVA2_ProcAmp_Brightness, m_brightness);
+ }
+ if (m_dirtyValues & DXVA2_ProcAmp_Contrast) {
+ values.Contrast = scaleProcAmpValue(DXVA2_ProcAmp_Contrast, m_contrast);
+ }
+ if (m_dirtyValues & DXVA2_ProcAmp_Hue) {
+ values.Hue = scaleProcAmpValue(DXVA2_ProcAmp_Hue, m_hue);
+ }
+ if (m_dirtyValues & DXVA2_ProcAmp_Saturation) {
+ values.Saturation = scaleProcAmpValue(DXVA2_ProcAmp_Saturation, m_saturation);
+ }
+
+ if (SUCCEEDED(m_processor->SetProcAmpValues(m_dirtyValues, &values))) {
+ m_dirtyValues = 0;
+ }
+ }
+}
+
+DXVA2_Fixed32 EvrVideoWindowControl::scaleProcAmpValue(DWORD prop, float value) const
+{
+ float scaledValue = 0.0;
+
+ DXVA2_ValueRange range;
+ if (SUCCEEDED(m_processor->GetProcAmpRange(prop, &range))) {
+ scaledValue = DXVA2FixedToFloat(range.DefaultValue);
+ if (value > 0)
+ scaledValue += float(value) * (DXVA2FixedToFloat(range.MaxValue) - DXVA2FixedToFloat(range.DefaultValue));
+ else if (value < 0)
+ scaledValue -= float(value) * (DXVA2FixedToFloat(range.MinValue) - DXVA2FixedToFloat(range.DefaultValue));
+ }
+
+ return DXVA2FloatToFixed(scaledValue);
+}
+
+QT_END_NAMESPACE
+
+#include "moc_evrvideowindowcontrol_p.cpp"
diff --git a/src/plugins/multimedia/windows/evr/evrvideowindowcontrol_p.h b/src/plugins/multimedia/windows/evr/evrvideowindowcontrol_p.h
new file mode 100644
index 000000000..c4875d28d
--- /dev/null
+++ b/src/plugins/multimedia/windows/evr/evrvideowindowcontrol_p.h
@@ -0,0 +1,72 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef EVRVIDEOWINDOWCONTROL_H
+#define EVRVIDEOWINDOWCONTROL_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <d3d9.h>
+#include <dxva2api.h>
+#include <evr9.h>
+#include <evr.h>
+#include <private/qplatformvideosink_p.h>
+#include <private/qwindowsmfdefs_p.h>
+
+QT_BEGIN_NAMESPACE
+
+class EvrVideoWindowControl : public QPlatformVideoSink
+{
+ Q_OBJECT
+public:
+ EvrVideoWindowControl(QVideoSink *parent = 0);
+ ~EvrVideoWindowControl() override;
+
+ bool setEvr(IUnknown *evr);
+
+ void setWinId(WId id) override;
+
+ void setDisplayRect(const QRect &rect) override;
+
+ void setFullScreen(bool fullScreen) override;
+
+ void setAspectRatioMode(Qt::AspectRatioMode mode) override;
+
+ void setBrightness(float brightness) override;
+ void setContrast(float contrast) override;
+ void setHue(float hue) override;
+ void setSaturation(float saturation) override;
+
+ void applyImageControls();
+
+private:
+ void clear();
+ DXVA2_Fixed32 scaleProcAmpValue(DWORD prop, float value) const;
+
+ WId m_windowId;
+ COLORREF m_windowColor;
+ DWORD m_dirtyValues;
+ Qt::AspectRatioMode m_aspectRatioMode;
+ QRect m_displayRect;
+ float m_brightness;
+ float m_contrast;
+ float m_hue;
+ float m_saturation;
+ bool m_fullScreen;
+
+ IMFVideoDisplayControl *m_displayControl;
+ IMFVideoProcessor *m_processor;
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/windows/mediacapture/qwindowscamera.cpp b/src/plugins/multimedia/windows/mediacapture/qwindowscamera.cpp
new file mode 100644
index 000000000..d5e25e1c5
--- /dev/null
+++ b/src/plugins/multimedia/windows/mediacapture/qwindowscamera.cpp
@@ -0,0 +1,101 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qwindowscamera_p.h"
+
+#include "qwindowsmediadevicesession_p.h"
+#include "qwindowsmediacapture_p.h"
+#include <qcameradevice.h>
+
+QT_BEGIN_NAMESPACE
+
+QWindowsCamera::QWindowsCamera(QCamera *camera)
+ : QPlatformCamera(camera)
+{
+}
+
+QWindowsCamera::~QWindowsCamera() = default;
+
+bool QWindowsCamera::isActive() const
+{
+ return m_active;
+}
+
+void QWindowsCamera::setActive(bool active)
+{
+ if (m_active == active)
+ return;
+ if (m_cameraDevice.isNull() && active)
+ return;
+ m_active = active;
+ if (m_mediaDeviceSession)
+ m_mediaDeviceSession->setActive(active);
+
+ emit activeChanged(m_active);
+}
+
+void QWindowsCamera::setCamera(const QCameraDevice &camera)
+{
+ if (m_cameraDevice == camera)
+ return;
+ m_cameraDevice = camera;
+ if (m_mediaDeviceSession)
+ m_mediaDeviceSession->setActiveCamera(camera);
+}
+
+void QWindowsCamera::setCaptureSession(QPlatformMediaCaptureSession *session)
+{
+ QWindowsMediaCaptureService *captureService = static_cast<QWindowsMediaCaptureService *>(session);
+ if (m_captureService == captureService)
+ return;
+
+ if (m_mediaDeviceSession) {
+ m_mediaDeviceSession->disconnect(this);
+ m_mediaDeviceSession->setActive(false);
+ m_mediaDeviceSession->setCameraFormat({});
+ m_mediaDeviceSession->setActiveCamera({});
+ }
+
+ m_captureService = captureService;
+ if (!m_captureService) {
+ m_mediaDeviceSession = nullptr;
+ return;
+ }
+
+ m_mediaDeviceSession = m_captureService->session();
+ Q_ASSERT(m_mediaDeviceSession);
+
+ m_mediaDeviceSession->setActive(false);
+ m_mediaDeviceSession->setActiveCamera(m_cameraDevice);
+ m_mediaDeviceSession->setCameraFormat(m_cameraFormat);
+ m_mediaDeviceSession->setActive(m_active);
+
+ connect(m_mediaDeviceSession, &QWindowsMediaDeviceSession::activeChanged,
+ this, &QWindowsCamera::onActiveChanged);
+}
+
+bool QWindowsCamera::setCameraFormat(const QCameraFormat &format)
+{
+ if (!format.isNull() && !m_cameraDevice.videoFormats().contains(format))
+ return false;
+
+ m_cameraFormat = format.isNull() ? findBestCameraFormat(m_cameraDevice) : format;
+
+ if (m_mediaDeviceSession)
+ m_mediaDeviceSession->setCameraFormat(m_cameraFormat);
+ return true;
+}
+
+void QWindowsCamera::onActiveChanged(bool active)
+{
+ if (m_active == active)
+ return;
+ if (m_cameraDevice.isNull() && active)
+ return;
+ m_active = active;
+ emit activeChanged(m_active);
+}
+
+QT_END_NAMESPACE
+
+#include "moc_qwindowscamera_p.cpp"
diff --git a/src/plugins/multimedia/windows/mediacapture/qwindowscamera_p.h b/src/plugins/multimedia/windows/mediacapture/qwindowscamera_p.h
new file mode 100644
index 000000000..2aec11165
--- /dev/null
+++ b/src/plugins/multimedia/windows/mediacapture/qwindowscamera_p.h
@@ -0,0 +1,55 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QWINDOWSCAMERA_H
+#define QWINDOWSCAMERA_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <private/qplatformcamera_p.h>
+
+QT_BEGIN_NAMESPACE
+
+class QWindowsMediaCaptureService;
+class QWindowsMediaDeviceSession;
+
+class QWindowsCamera : public QPlatformCamera
+{
+ Q_OBJECT
+public:
+ explicit QWindowsCamera(QCamera *camera);
+ virtual ~QWindowsCamera();
+
+ bool isActive() const override;
+
+ void setCamera(const QCameraDevice &camera) override;
+
+ void setCaptureSession(QPlatformMediaCaptureSession *) override;
+
+ bool setCameraFormat(const QCameraFormat &format) override;
+
+ void setActive(bool active) override;
+
+private Q_SLOTS:
+ void onActiveChanged(bool active);
+
+private:
+ QWindowsMediaCaptureService *m_captureService = nullptr;
+ QWindowsMediaDeviceSession *m_mediaDeviceSession = nullptr;
+ QCameraDevice m_cameraDevice;
+ QCameraFormat m_cameraFormat;
+ bool m_active = false;
+};
+
+QT_END_NAMESPACE
+
+#endif // QWINDOWSCAMERA_H
diff --git a/src/plugins/multimedia/windows/mediacapture/qwindowsimagecapture.cpp b/src/plugins/multimedia/windows/mediacapture/qwindowsimagecapture.cpp
new file mode 100644
index 000000000..ea66d561a
--- /dev/null
+++ b/src/plugins/multimedia/windows/mediacapture/qwindowsimagecapture.cpp
@@ -0,0 +1,207 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qwindowsimagecapture_p.h"
+
+#include "qwindowsmediadevicesession_p.h"
+#include "qwindowsmediacapture_p.h"
+#include <private/qmediastoragelocation_p.h>
+
+#include <QtConcurrent/qtconcurrentrun.h>
+#include <QtGui/qimagewriter.h>
+
+QT_BEGIN_NAMESPACE
+
+QWindowsImageCapture::QWindowsImageCapture(QImageCapture *parent)
+ : QPlatformImageCapture(parent)
+{
+}
+
+QWindowsImageCapture::~QWindowsImageCapture() = default;
+
+bool QWindowsImageCapture::isReadyForCapture() const
+{
+ if (!m_mediaDeviceSession)
+ return false;
+ return !m_capturing && m_mediaDeviceSession->isActive() && !m_mediaDeviceSession->activeCamera().isNull();
+}
+
+int QWindowsImageCapture::capture(const QString &fileName)
+{
+ auto ext = writerFormat(m_settings.format());
+ auto path = QMediaStorageLocation::generateFileName(fileName, QStandardPaths::PicturesLocation, ext);
+ return doCapture(path);
+}
+
+int QWindowsImageCapture::captureToBuffer()
+{
+ return doCapture(QString());
+}
+
+int QWindowsImageCapture::doCapture(const QString &fileName)
+{
+ if (!isReadyForCapture())
+ return -1;
+ m_fileName = fileName;
+ m_capturing = true;
+ return m_captureId;
+}
+
+QImageEncoderSettings QWindowsImageCapture::imageSettings() const
+{
+ return m_settings;
+}
+
+void QWindowsImageCapture::setImageSettings(const QImageEncoderSettings &settings)
+{
+ m_settings = settings;
+}
+
+void QWindowsImageCapture::setCaptureSession(QPlatformMediaCaptureSession *session)
+{
+ QWindowsMediaCaptureService *captureService = static_cast<QWindowsMediaCaptureService *>(session);
+ if (m_captureService == captureService)
+ return;
+
+ auto readyForCapture = isReadyForCapture();
+ if (m_mediaDeviceSession)
+ disconnect(m_mediaDeviceSession, nullptr, this, nullptr);
+
+ m_captureService = captureService;
+ if (!m_captureService) {
+ if (readyForCapture)
+ emit readyForCaptureChanged(false);
+ m_mediaDeviceSession = nullptr;
+ return;
+ }
+
+ m_mediaDeviceSession = m_captureService->session();
+ Q_ASSERT(m_mediaDeviceSession);
+
+ if (isReadyForCapture() != readyForCapture)
+ emit readyForCaptureChanged(isReadyForCapture());
+
+ connect(m_mediaDeviceSession, &QWindowsMediaDeviceSession::readyForCaptureChanged,
+ this, &QWindowsImageCapture::readyForCaptureChanged);
+
+ connect(m_mediaDeviceSession, &QWindowsMediaDeviceSession::videoFrameChanged,
+ this, &QWindowsImageCapture::handleVideoFrameChanged);
+}
+
+void QWindowsImageCapture::handleVideoFrameChanged(const QVideoFrame &frame)
+{
+ if (m_capturing) {
+
+ QImage image = frame.toImage();
+
+ QSize size = m_settings.resolution();
+ if (size.isValid() && image.size() != size) {
+ image = image.scaled(size, Qt::KeepAspectRatioByExpanding);
+ if (image.size() != size) {
+ int xoff = (image.size().width() - size.width()) / 2;
+ int yoff = (image.size().height() - size.height()) / 2;
+ image = image.copy(xoff, yoff, size.width(), size.height());
+ }
+ }
+
+ emit imageExposed(m_captureId);
+ emit imageAvailable(m_captureId, frame);
+ emit imageCaptured(m_captureId, image);
+
+ QMediaMetaData metaData = this->metaData();
+ metaData.insert(QMediaMetaData::Date, QDateTime::currentDateTime());
+ metaData.insert(QMediaMetaData::Resolution, size);
+
+ emit imageMetadataAvailable(m_captureId, metaData);
+
+ if (!m_fileName.isEmpty()) {
+
+ (void)QtConcurrent::run(&QWindowsImageCapture::saveImage, this,
+ m_captureId, m_fileName, image, metaData, m_settings);
+ }
+
+ ++m_captureId;
+ m_capturing = false;
+ }
+}
+
+void QWindowsImageCapture::saveImage(int captureId, const QString &fileName,
+ const QImage &image, const QMediaMetaData &metaData,
+ const QImageEncoderSettings &settings)
+{
+ QImageWriter imageWriter;
+ imageWriter.setFileName(fileName);
+
+ QString format = writerFormat(settings.format());
+ imageWriter.setFormat(format.toUtf8());
+
+ int quality = writerQuality(format, settings.quality());
+ if (quality > -1)
+ imageWriter.setQuality(quality);
+
+ for (auto key : metaData.keys())
+ imageWriter.setText(QMediaMetaData::metaDataKeyToString(key),
+ metaData.stringValue(key));
+
+ imageWriter.write(image);
+
+ QMetaObject::invokeMethod(this, "imageSaved", Qt::QueuedConnection,
+ Q_ARG(int, captureId), Q_ARG(QString, fileName));
+}
+
+QString QWindowsImageCapture::writerFormat(QImageCapture::FileFormat reqFormat)
+{
+ QString format;
+
+ switch (reqFormat) {
+ case QImageCapture::FileFormat::JPEG:
+ format = QLatin1String("jpg");
+ break;
+ case QImageCapture::FileFormat::PNG:
+ format = QLatin1String("png");
+ break;
+ case QImageCapture::FileFormat::WebP:
+ format = QLatin1String("webp");
+ break;
+ case QImageCapture::FileFormat::Tiff:
+ format = QLatin1String("tiff");
+ break;
+ default:
+ format = QLatin1String("jpg");
+ }
+
+ auto supported = QImageWriter::supportedImageFormats();
+ for (const auto &f : supported)
+ if (format.compare(QString::fromUtf8(f), Qt::CaseInsensitive) == 0)
+ return format;
+
+ return QLatin1String("jpg");
+}
+
+int QWindowsImageCapture::writerQuality(const QString &writerFormat,
+ QImageCapture::Quality quality)
+{
+ if (writerFormat.compare(QLatin1String("jpg"), Qt::CaseInsensitive) == 0 ||
+ writerFormat.compare(QLatin1String("jpeg"), Qt::CaseInsensitive) == 0) {
+
+ switch (quality) {
+ case QImageCapture::Quality::VeryLowQuality:
+ return 10;
+ case QImageCapture::Quality::LowQuality:
+ return 30;
+ case QImageCapture::Quality::NormalQuality:
+ return 75;
+ case QImageCapture::Quality::HighQuality:
+ return 90;
+ case QImageCapture::Quality::VeryHighQuality:
+ return 98;
+ default:
+ return 75;
+ }
+ }
+ return -1;
+}
+
+QT_END_NAMESPACE
+
+#include "moc_qwindowsimagecapture_p.cpp"
diff --git a/src/plugins/multimedia/windows/mediacapture/qwindowsimagecapture_p.h b/src/plugins/multimedia/windows/mediacapture/qwindowsimagecapture_p.h
new file mode 100644
index 000000000..746732e73
--- /dev/null
+++ b/src/plugins/multimedia/windows/mediacapture/qwindowsimagecapture_p.h
@@ -0,0 +1,64 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QWINDOWSIMAGECAPTURE_H
+#define QWINDOWSIMAGECAPTURE_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <private/qplatformimagecapture_p.h>
+
+QT_BEGIN_NAMESPACE
+
+class QWindowsMediaDeviceSession;
+class QWindowsMediaCaptureService;
+
+class QWindowsImageCapture : public QPlatformImageCapture
+{
+ Q_OBJECT
+public:
+ explicit QWindowsImageCapture(QImageCapture *parent);
+ virtual ~QWindowsImageCapture();
+
+ bool isReadyForCapture() const override;
+
+ int capture(const QString &fileName) override;
+ int captureToBuffer() override;
+
+ QImageEncoderSettings imageSettings() const override;
+ void setImageSettings(const QImageEncoderSettings &settings) override;
+
+ void setCaptureSession(QPlatformMediaCaptureSession *session);
+
+private Q_SLOTS:
+ void handleVideoFrameChanged(const QVideoFrame &frame);
+
+private:
+ int doCapture(const QString &fileName);
+ void saveImage(int captureId, const QString &fileName,
+ const QImage &image, const QMediaMetaData &metaData,
+ const QImageEncoderSettings &settings);
+ QString writerFormat(QImageCapture::FileFormat reqFormat);
+ int writerQuality(const QString &writerFormat,
+ QImageCapture::Quality quality);
+
+ QWindowsMediaCaptureService *m_captureService = nullptr;
+ QWindowsMediaDeviceSession *m_mediaDeviceSession = nullptr;
+ QImageEncoderSettings m_settings;
+ int m_captureId = 0;
+ bool m_capturing = false;
+ QString m_fileName;
+};
+
+QT_END_NAMESPACE
+
+#endif // QWINDOWSIMAGECAPTURE_H
diff --git a/src/plugins/multimedia/windows/mediacapture/qwindowsmediacapture.cpp b/src/plugins/multimedia/windows/mediacapture/qwindowsmediacapture.cpp
new file mode 100644
index 000000000..d349b2c43
--- /dev/null
+++ b/src/plugins/multimedia/windows/mediacapture/qwindowsmediacapture.cpp
@@ -0,0 +1,109 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qwindowsmediacapture_p.h"
+
+#include "qwindowsmediaencoder_p.h"
+#include "qwindowscamera_p.h"
+#include "qwindowsmediadevicesession_p.h"
+#include "qwindowsimagecapture_p.h"
+#include "qmediadevices.h"
+#include "qaudiodevice.h"
+#include "private/qplatformaudioinput_p.h"
+#include "private/qplatformaudiooutput_p.h"
+
+QT_BEGIN_NAMESPACE
+
+QWindowsMediaCaptureService::QWindowsMediaCaptureService()
+{
+ m_mediaDeviceSession = new QWindowsMediaDeviceSession(this);
+}
+
+QWindowsMediaCaptureService::~QWindowsMediaCaptureService()
+{
+ delete m_mediaDeviceSession;
+}
+
+QPlatformCamera *QWindowsMediaCaptureService::camera()
+{
+ return m_camera;
+}
+
+void QWindowsMediaCaptureService::setCamera(QPlatformCamera *camera)
+{
+ QWindowsCamera *control = static_cast<QWindowsCamera*>(camera);
+ if (m_camera == control)
+ return;
+
+ if (m_camera)
+ m_camera->setCaptureSession(nullptr);
+
+ m_camera = control;
+ if (m_camera)
+ m_camera->setCaptureSession(this);
+ emit cameraChanged();
+}
+
+QPlatformImageCapture *QWindowsMediaCaptureService::imageCapture()
+{
+ return m_imageCapture;
+}
+
+void QWindowsMediaCaptureService::setImageCapture(QPlatformImageCapture *imageCapture)
+{
+ QWindowsImageCapture *control = static_cast<QWindowsImageCapture *>(imageCapture);
+ if (m_imageCapture == control)
+ return;
+
+ if (m_imageCapture)
+ m_imageCapture->setCaptureSession(nullptr);
+
+ m_imageCapture = control;
+ if (m_imageCapture)
+ m_imageCapture->setCaptureSession(this);
+ emit imageCaptureChanged();
+}
+
+QPlatformMediaRecorder *QWindowsMediaCaptureService::mediaRecorder()
+{
+ return m_encoder;
+}
+
+void QWindowsMediaCaptureService::setMediaRecorder(QPlatformMediaRecorder *recorder)
+{
+ QWindowsMediaEncoder *control = static_cast<QWindowsMediaEncoder *>(recorder);
+ if (m_encoder == control)
+ return;
+
+ if (m_encoder)
+ m_encoder->setCaptureSession(nullptr);
+
+ m_encoder = control;
+ if (m_encoder)
+ m_encoder->setCaptureSession(this);
+ emit encoderChanged();
+}
+
+void QWindowsMediaCaptureService::setAudioInput(QPlatformAudioInput *input)
+{
+ m_mediaDeviceSession->setAudioInput(input ? input->q : nullptr);
+}
+
+void QWindowsMediaCaptureService::setAudioOutput(QPlatformAudioOutput *output)
+{
+ m_mediaDeviceSession->setAudioOutput(output ? output->q : nullptr);
+}
+
+void QWindowsMediaCaptureService::setVideoPreview(QVideoSink *sink)
+{
+ m_mediaDeviceSession->setVideoSink(sink);
+}
+
+QWindowsMediaDeviceSession *QWindowsMediaCaptureService::session() const
+{
+ return m_mediaDeviceSession;
+}
+
+QT_END_NAMESPACE
+
+#include "moc_qwindowsmediacapture_p.cpp"
diff --git a/src/plugins/multimedia/windows/mediacapture/qwindowsmediacapture_p.h b/src/plugins/multimedia/windows/mediacapture/qwindowsmediacapture_p.h
new file mode 100644
index 000000000..579310afd
--- /dev/null
+++ b/src/plugins/multimedia/windows/mediacapture/qwindowsmediacapture_p.h
@@ -0,0 +1,62 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+#ifndef QWINDOWSMEDIACAPTURE_H
+#define QWINDOWSMEDIACAPTURE_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <private/qplatformmediacapture_p.h>
+#include <private/qplatformmediaintegration_p.h>
+
+QT_BEGIN_NAMESPACE
+
+class QWindowsMediaEncoder;
+class QWindowsCamera;
+class QWindowsMediaDeviceSession;
+class QWindowsImageCapture;
+class QPlatformAudioInput;
+
+class QWindowsMediaCaptureService : public QPlatformMediaCaptureSession
+{
+ Q_OBJECT
+
+public:
+ QWindowsMediaCaptureService();
+ virtual ~QWindowsMediaCaptureService();
+
+ QPlatformCamera *camera() override;
+ void setCamera(QPlatformCamera *camera) override;
+
+ QPlatformImageCapture *imageCapture() override;
+ void setImageCapture(QPlatformImageCapture *imageCapture) override;
+
+ QPlatformMediaRecorder *mediaRecorder() override;
+ void setMediaRecorder(QPlatformMediaRecorder *recorder) override;
+
+ void setAudioInput(QPlatformAudioInput *) override;
+
+ void setAudioOutput(QPlatformAudioOutput *output) override;
+
+ void setVideoPreview(QVideoSink *sink) override;
+
+ QWindowsMediaDeviceSession *session() const;
+
+private:
+ QWindowsCamera *m_camera = nullptr;
+ QWindowsMediaDeviceSession *m_mediaDeviceSession = nullptr;
+ QWindowsImageCapture *m_imageCapture = nullptr;
+ QWindowsMediaEncoder *m_encoder = nullptr;
+};
+
+QT_END_NAMESPACE
+
+#endif // QWINDOWSMEDIAINTERFACE_H
diff --git a/src/plugins/multimedia/windows/mediacapture/qwindowsmediadevicereader.cpp b/src/plugins/multimedia/windows/mediacapture/qwindowsmediadevicereader.cpp
new file mode 100644
index 000000000..e99b95ad2
--- /dev/null
+++ b/src/plugins/multimedia/windows/mediacapture/qwindowsmediadevicereader.cpp
@@ -0,0 +1,1019 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qwindowsmediadevicereader_p.h"
+
+#include "private/qwindowsmultimediautils_p.h"
+#include <qvideosink.h>
+#include <qmediadevices.h>
+#include <qaudiodevice.h>
+#include <private/qmemoryvideobuffer_p.h>
+#include <private/qvideoframe_p.h>
+#include <private/qwindowsmfdefs_p.h>
+#include <private/qcomptr_p.h>
+#include <QtCore/qdebug.h>
+
+#include <mmdeviceapi.h>
+
+QT_BEGIN_NAMESPACE
+
+enum { MEDIA_TYPE_INDEX_DEFAULT = 0xffffffff };
+
+QWindowsMediaDeviceReader::QWindowsMediaDeviceReader(QObject *parent)
+ : QObject(parent)
+{
+ m_durationTimer.setInterval(100);
+ connect(&m_durationTimer, &QTimer::timeout, this, &QWindowsMediaDeviceReader::updateDuration);
+}
+
+QWindowsMediaDeviceReader::~QWindowsMediaDeviceReader()
+{
+ stopRecording();
+ deactivate();
+}
+
+// Creates a video or audio media source specified by deviceId (symbolic link)
+HRESULT QWindowsMediaDeviceReader::createSource(const QString &deviceId, bool video, IMFMediaSource **source)
+{
+ if (!source)
+ return E_INVALIDARG;
+
+ *source = nullptr;
+ IMFAttributes *sourceAttributes = nullptr;
+
+ HRESULT hr = MFCreateAttributes(&sourceAttributes, 2);
+ if (SUCCEEDED(hr)) {
+
+ hr = sourceAttributes->SetGUID(MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE,
+ video ? QMM_MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID
+ : QMM_MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_AUDCAP_GUID);
+ if (SUCCEEDED(hr)) {
+
+ hr = sourceAttributes->SetString(video ? MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_SYMBOLIC_LINK
+ : MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_AUDCAP_ENDPOINT_ID,
+ reinterpret_cast<LPCWSTR>(deviceId.utf16()));
+ if (SUCCEEDED(hr)) {
+
+ hr = MFCreateDeviceSource(sourceAttributes, source);
+ }
+ }
+ sourceAttributes->Release();
+ }
+
+ return hr;
+}
+
+// Creates a source/reader aggregating two other sources (video/audio).
+// If one of the sources is null the result will be video-only or audio-only.
+HRESULT QWindowsMediaDeviceReader::createAggregateReader(IMFMediaSource *firstSource,
+ IMFMediaSource *secondSource,
+ IMFMediaSource **aggregateSource,
+ IMFSourceReader **sourceReader)
+{
+ if ((!firstSource && !secondSource) || !aggregateSource || !sourceReader)
+ return E_INVALIDARG;
+
+ *aggregateSource = nullptr;
+ *sourceReader = nullptr;
+
+ IMFCollection *sourceCollection = nullptr;
+
+ HRESULT hr = MFCreateCollection(&sourceCollection);
+ if (SUCCEEDED(hr)) {
+
+ if (firstSource)
+ sourceCollection->AddElement(firstSource);
+
+ if (secondSource)
+ sourceCollection->AddElement(secondSource);
+
+ hr = MFCreateAggregateSource(sourceCollection, aggregateSource);
+ if (SUCCEEDED(hr)) {
+
+ IMFAttributes *readerAttributes = nullptr;
+
+ hr = MFCreateAttributes(&readerAttributes, 1);
+ if (SUCCEEDED(hr)) {
+
+ // Set callback so OnReadSample() is called for each new video frame or audio sample.
+ hr = readerAttributes->SetUnknown(MF_SOURCE_READER_ASYNC_CALLBACK,
+ static_cast<IMFSourceReaderCallback*>(this));
+ if (SUCCEEDED(hr)) {
+
+ hr = MFCreateSourceReaderFromMediaSource(*aggregateSource, readerAttributes, sourceReader);
+ }
+ readerAttributes->Release();
+ }
+ }
+ sourceCollection->Release();
+ }
+ return hr;
+}
+
+// Selects the requested resolution/frame rate (if specified),
+// or chooses a high quality configuration otherwise.
+DWORD QWindowsMediaDeviceReader::findMediaTypeIndex(const QCameraFormat &reqFormat)
+{
+ DWORD mediaIndex = MEDIA_TYPE_INDEX_DEFAULT;
+
+ if (m_sourceReader && m_videoSource) {
+
+ DWORD index = 0;
+ IMFMediaType *mediaType = nullptr;
+
+ UINT32 currArea = 0;
+ float currFrameRate = 0.0f;
+
+ while (SUCCEEDED(m_sourceReader->GetNativeMediaType(DWORD(MF_SOURCE_READER_FIRST_VIDEO_STREAM),
+ index, &mediaType))) {
+
+ GUID subtype = GUID_NULL;
+ if (SUCCEEDED(mediaType->GetGUID(MF_MT_SUBTYPE, &subtype))) {
+
+ auto pixelFormat = QWindowsMultimediaUtils::pixelFormatFromMediaSubtype(subtype);
+ if (pixelFormat != QVideoFrameFormat::Format_Invalid) {
+
+ UINT32 width, height;
+ if (SUCCEEDED(MFGetAttributeSize(mediaType, MF_MT_FRAME_SIZE, &width, &height))) {
+
+ UINT32 num, den;
+ if (SUCCEEDED(MFGetAttributeRatio(mediaType, MF_MT_FRAME_RATE, &num, &den))) {
+
+ UINT32 area = width * height;
+ float frameRate = float(num) / den;
+
+ if (!reqFormat.isNull()
+ && UINT32(reqFormat.resolution().width()) == width
+ && UINT32(reqFormat.resolution().height()) == height
+ && qFuzzyCompare(reqFormat.maxFrameRate(), frameRate)
+ && reqFormat.pixelFormat() == pixelFormat) {
+ mediaType->Release();
+ return index;
+ }
+
+ if ((currFrameRate < 29.9 && currFrameRate < frameRate) ||
+ (currFrameRate == frameRate && currArea < area)) {
+ currArea = area;
+ currFrameRate = frameRate;
+ mediaIndex = index;
+ }
+ }
+ }
+ }
+ }
+ mediaType->Release();
+ ++index;
+ }
+ }
+
+ return mediaIndex;
+}
+
+
+// Prepares the source video stream and gets some metadata.
+HRESULT QWindowsMediaDeviceReader::prepareVideoStream(DWORD mediaTypeIndex)
+{
+ if (!m_sourceReader)
+ return E_FAIL;
+
+ if (!m_videoSource)
+ return S_OK; // It may be audio-only
+
+ HRESULT hr;
+
+ if (mediaTypeIndex == MEDIA_TYPE_INDEX_DEFAULT) {
+ hr = m_sourceReader->GetCurrentMediaType(DWORD(MF_SOURCE_READER_FIRST_VIDEO_STREAM),
+ &m_videoMediaType);
+ } else {
+ hr = m_sourceReader->GetNativeMediaType(DWORD(MF_SOURCE_READER_FIRST_VIDEO_STREAM),
+ mediaTypeIndex, &m_videoMediaType);
+ if (SUCCEEDED(hr))
+ hr = m_sourceReader->SetCurrentMediaType(DWORD(MF_SOURCE_READER_FIRST_VIDEO_STREAM),
+ nullptr, m_videoMediaType);
+ }
+
+ if (SUCCEEDED(hr)) {
+
+ GUID subtype = GUID_NULL;
+ hr = m_videoMediaType->GetGUID(MF_MT_SUBTYPE, &subtype);
+ if (SUCCEEDED(hr)) {
+
+ m_pixelFormat = QWindowsMultimediaUtils::pixelFormatFromMediaSubtype(subtype);
+
+ if (m_pixelFormat == QVideoFrameFormat::Format_Invalid) {
+ hr = E_FAIL;
+ } else {
+
+ // get the frame dimensions
+ hr = MFGetAttributeSize(m_videoMediaType, MF_MT_FRAME_SIZE, &m_frameWidth, &m_frameHeight);
+ if (SUCCEEDED(hr)) {
+
+ // and the stride, which we need to convert the frame later
+ hr = MFGetStrideForBitmapInfoHeader(subtype.Data1, m_frameWidth, &m_stride);
+ if (SUCCEEDED(hr)) {
+ m_stride = qAbs(m_stride);
+ UINT32 frameRateNum, frameRateDen;
+ hr = MFGetAttributeRatio(m_videoMediaType, MF_MT_FRAME_RATE, &frameRateNum, &frameRateDen);
+ if (SUCCEEDED(hr)) {
+
+ m_frameRate = qreal(frameRateNum) / frameRateDen;
+
+ hr = m_sourceReader->SetStreamSelection(DWORD(MF_SOURCE_READER_FIRST_VIDEO_STREAM), TRUE);
+ }
+ }
+ }
+ }
+ }
+ }
+
+ return hr;
+}
+
+HRESULT QWindowsMediaDeviceReader::initAudioType(IMFMediaType *mediaType, UINT32 channels, UINT32 samplesPerSec, bool flt)
+{
+ if (!mediaType)
+ return E_INVALIDARG;
+
+ HRESULT hr = mediaType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Audio);
+ if (SUCCEEDED(hr)) {
+ hr = mediaType->SetGUID(MF_MT_SUBTYPE, flt ? MFAudioFormat_Float : MFAudioFormat_PCM);
+ if (SUCCEEDED(hr)) {
+ hr = mediaType->SetUINT32(MF_MT_AUDIO_NUM_CHANNELS, channels);
+ if (SUCCEEDED(hr)) {
+ hr = mediaType->SetUINT32(MF_MT_AUDIO_CHANNEL_MASK,
+ (channels == 1) ? SPEAKER_FRONT_CENTER : (SPEAKER_FRONT_LEFT | SPEAKER_FRONT_RIGHT ));
+ if (SUCCEEDED(hr)) {
+ hr = mediaType->SetUINT32(MF_MT_AUDIO_SAMPLES_PER_SECOND, samplesPerSec);
+ if (SUCCEEDED(hr)) {
+ UINT32 bitsPerSample = flt ? 32 : 16;
+ UINT32 bytesPerFrame = channels * bitsPerSample/8;
+ hr = mediaType->SetUINT32(MF_MT_AUDIO_BITS_PER_SAMPLE, bitsPerSample);
+ if (SUCCEEDED(hr)) {
+ hr = mediaType->SetUINT32(MF_MT_AUDIO_BLOCK_ALIGNMENT, bytesPerFrame);
+ if (SUCCEEDED(hr)) {
+ hr = mediaType->SetUINT32(MF_MT_AUDIO_AVG_BYTES_PER_SECOND, bytesPerFrame * samplesPerSec);
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+
+ return hr;
+}
+
+// Prepares the source audio stream.
+HRESULT QWindowsMediaDeviceReader::prepareAudioStream()
+{
+ if (!m_sourceReader)
+ return E_FAIL;
+
+ if (!m_audioSource)
+ return S_OK; // It may be video-only
+
+ HRESULT hr = m_sourceReader->GetCurrentMediaType(DWORD(MF_SOURCE_READER_FIRST_AUDIO_STREAM),
+ &m_audioMediaType);
+ if (SUCCEEDED(hr)) {
+ hr = initAudioType(m_audioMediaType, 2, 48000, true);
+ if (SUCCEEDED(hr)) {
+ hr = m_sourceReader->SetCurrentMediaType(DWORD(MF_SOURCE_READER_FIRST_AUDIO_STREAM),
+ nullptr, m_audioMediaType);
+ if (SUCCEEDED(hr)) {
+ hr = m_sourceReader->SetStreamSelection(DWORD(MF_SOURCE_READER_FIRST_AUDIO_STREAM), TRUE);
+ }
+ }
+ }
+ return hr;
+}
+
+// Retrieves the indexes for selected video/audio streams.
+HRESULT QWindowsMediaDeviceReader::initSourceIndexes()
+{
+ if (!m_sourceReader)
+ return E_FAIL;
+
+ m_sourceVideoStreamIndex = MF_SOURCE_READER_INVALID_STREAM_INDEX;
+ m_sourceAudioStreamIndex = MF_SOURCE_READER_INVALID_STREAM_INDEX;
+
+ DWORD index = 0;
+ BOOL selected = FALSE;
+
+ while (m_sourceReader->GetStreamSelection(index, &selected) == S_OK) {
+ if (selected) {
+ IMFMediaType *mediaType = nullptr;
+ if (SUCCEEDED(m_sourceReader->GetCurrentMediaType(index, &mediaType))) {
+ GUID majorType = GUID_NULL;
+ if (SUCCEEDED(mediaType->GetGUID(MF_MT_MAJOR_TYPE, &majorType))) {
+ if (majorType == MFMediaType_Video)
+ m_sourceVideoStreamIndex = index;
+ else if (majorType == MFMediaType_Audio)
+ m_sourceAudioStreamIndex = index;
+ }
+ mediaType->Release();
+ }
+ }
+ ++index;
+ }
+ if ((m_videoSource && m_sourceVideoStreamIndex == MF_SOURCE_READER_INVALID_STREAM_INDEX) ||
+ (m_audioSource && m_sourceAudioStreamIndex == MF_SOURCE_READER_INVALID_STREAM_INDEX))
+ return E_FAIL;
+ return S_OK;
+}
+
+bool QWindowsMediaDeviceReader::setAudioOutput(const QString &audioOutputId)
+{
+ QMutexLocker locker(&m_mutex);
+
+ stopMonitoring();
+
+ m_audioOutputId = audioOutputId;
+
+ if (!m_active || m_audioOutputId.isEmpty())
+ return true;
+
+ HRESULT hr = startMonitoring();
+
+ return SUCCEEDED(hr);
+}
+
+HRESULT QWindowsMediaDeviceReader::startMonitoring()
+{
+ if (m_audioOutputId.isEmpty())
+ return E_FAIL;
+
+ IMFAttributes *sinkAttributes = nullptr;
+
+ HRESULT hr = MFCreateAttributes(&sinkAttributes, 1);
+ if (SUCCEEDED(hr)) {
+
+ hr = sinkAttributes->SetString(MF_AUDIO_RENDERER_ATTRIBUTE_ENDPOINT_ID,
+ reinterpret_cast<LPCWSTR>(m_audioOutputId.utf16()));
+ if (SUCCEEDED(hr)) {
+
+ IMFMediaSink *mediaSink = nullptr;
+ hr = MFCreateAudioRenderer(sinkAttributes, &mediaSink);
+ if (SUCCEEDED(hr)) {
+
+ IMFStreamSink *streamSink = nullptr;
+ hr = mediaSink->GetStreamSinkByIndex(0, &streamSink);
+ if (SUCCEEDED(hr)) {
+
+ IMFMediaTypeHandler *typeHandler = nullptr;
+ hr = streamSink->GetMediaTypeHandler(&typeHandler);
+ if (SUCCEEDED(hr)) {
+
+ hr = typeHandler->IsMediaTypeSupported(m_audioMediaType, nullptr);
+ if (SUCCEEDED(hr)) {
+
+ hr = typeHandler->SetCurrentMediaType(m_audioMediaType);
+ if (SUCCEEDED(hr)) {
+
+ IMFAttributes *writerAttributes = nullptr;
+
+ HRESULT hr = MFCreateAttributes(&writerAttributes, 1);
+ if (SUCCEEDED(hr)) {
+
+ hr = writerAttributes->SetUINT32(MF_SINK_WRITER_DISABLE_THROTTLING, TRUE);
+ if (SUCCEEDED(hr)) {
+
+ IMFSinkWriter *sinkWriter = nullptr;
+ hr = MFCreateSinkWriterFromMediaSink(mediaSink, writerAttributes, &sinkWriter);
+ if (SUCCEEDED(hr)) {
+
+ hr = sinkWriter->SetInputMediaType(0, m_audioMediaType, nullptr);
+ if (SUCCEEDED(hr)) {
+
+ IMFSimpleAudioVolume *audioVolume = nullptr;
+
+ if (SUCCEEDED(MFGetService(mediaSink, QMM_MR_POLICY_VOLUME_SERVICE, IID_PPV_ARGS(&audioVolume)))) {
+ audioVolume->SetMasterVolume(float(m_outputVolume));
+ audioVolume->SetMute(m_outputMuted);
+ audioVolume->Release();
+ }
+
+ hr = sinkWriter->BeginWriting();
+ if (SUCCEEDED(hr)) {
+ m_monitorSink = mediaSink;
+ m_monitorSink->AddRef();
+ m_monitorWriter = sinkWriter;
+ m_monitorWriter->AddRef();
+ }
+ }
+ sinkWriter->Release();
+ }
+ }
+ writerAttributes->Release();
+ }
+ }
+ }
+ typeHandler->Release();
+ }
+ streamSink->Release();
+ }
+ mediaSink->Release();
+ }
+ }
+ sinkAttributes->Release();
+ }
+
+ return hr;
+}
+
+void QWindowsMediaDeviceReader::stopMonitoring()
+{
+ if (m_monitorWriter) {
+ m_monitorWriter->Release();
+ m_monitorWriter = nullptr;
+ }
+ if (m_monitorSink) {
+ m_monitorSink->Shutdown();
+ m_monitorSink->Release();
+ m_monitorSink = nullptr;
+ }
+}
+
+// Activates the requested camera/microphone for streaming.
+// One of the IDs may be empty for video-only/audio-only.
+bool QWindowsMediaDeviceReader::activate(const QString &cameraId,
+ const QCameraFormat &cameraFormat,
+ const QString &microphoneId)
+{
+ QMutexLocker locker(&m_mutex);
+
+ if (cameraId.isEmpty() && microphoneId.isEmpty())
+ return false;
+
+ stopMonitoring();
+ releaseResources();
+
+ m_active = false;
+ m_streaming = false;
+
+ if (!cameraId.isEmpty()) {
+ if (!SUCCEEDED(createSource(cameraId, true, &m_videoSource))) {
+ releaseResources();
+ return false;
+ }
+ }
+
+ if (!microphoneId.isEmpty()) {
+ if (!SUCCEEDED(createSource(microphoneId, false, &m_audioSource))) {
+ releaseResources();
+ return false;
+ }
+ }
+
+ if (!SUCCEEDED(createAggregateReader(m_videoSource, m_audioSource, &m_aggregateSource, &m_sourceReader))) {
+ releaseResources();
+ return false;
+ }
+
+ DWORD mediaTypeIndex = findMediaTypeIndex(cameraFormat);
+
+ if (!SUCCEEDED(prepareVideoStream(mediaTypeIndex))) {
+ releaseResources();
+ return false;
+ }
+
+ if (!SUCCEEDED(prepareAudioStream())) {
+ releaseResources();
+ return false;
+ }
+
+ if (!SUCCEEDED(initSourceIndexes())) {
+ releaseResources();
+ return false;
+ }
+
+ updateSinkInputMediaTypes();
+ startMonitoring();
+
+ // Request the first frame or audio sample.
+ if (!SUCCEEDED(m_sourceReader->ReadSample(MF_SOURCE_READER_ANY_STREAM, 0, nullptr, nullptr, nullptr, nullptr))) {
+ releaseResources();
+ return false;
+ }
+
+ m_active = true;
+ return true;
+}
+
+void QWindowsMediaDeviceReader::deactivate()
+{
+ stopMonitoring();
+ stopStreaming();
+ m_active = false;
+ m_streaming = false;
+}
+
+void QWindowsMediaDeviceReader::stopStreaming()
+{
+ QMutexLocker locker(&m_mutex);
+ releaseResources();
+}
+
+// Releases allocated streaming stuff.
+void QWindowsMediaDeviceReader::releaseResources()
+{
+ if (m_videoMediaType) {
+ m_videoMediaType->Release();
+ m_videoMediaType = nullptr;
+ }
+ if (m_audioMediaType) {
+ m_audioMediaType->Release();
+ m_audioMediaType = nullptr;
+ }
+ if (m_sourceReader) {
+ m_sourceReader->Release();
+ m_sourceReader = nullptr;
+ }
+ if (m_aggregateSource) {
+ m_aggregateSource->Release();
+ m_aggregateSource = nullptr;
+ }
+ if (m_videoSource) {
+ m_videoSource->Release();
+ m_videoSource = nullptr;
+ }
+ if (m_audioSource) {
+ m_audioSource->Release();
+ m_audioSource = nullptr;
+ }
+}
+
+HRESULT QWindowsMediaDeviceReader::createVideoMediaType(const GUID &format, UINT32 bitRate, UINT32 width,
+ UINT32 height, qreal frameRate, IMFMediaType **mediaType)
+{
+ if (!mediaType)
+ return E_INVALIDARG;
+
+ *mediaType = nullptr;
+ IMFMediaType *targetMediaType = nullptr;
+
+ if (SUCCEEDED(MFCreateMediaType(&targetMediaType))) {
+
+ if (SUCCEEDED(targetMediaType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video))) {
+
+ if (SUCCEEDED(targetMediaType->SetGUID(MF_MT_SUBTYPE, format))) {
+
+ if (SUCCEEDED(targetMediaType->SetUINT32(MF_MT_AVG_BITRATE, bitRate))) {
+
+ if (SUCCEEDED(MFSetAttributeSize(targetMediaType, MF_MT_FRAME_SIZE, width, height))) {
+
+ if (SUCCEEDED(MFSetAttributeRatio(targetMediaType, MF_MT_FRAME_RATE,
+ UINT32(frameRate * 1000), 1000))) {
+ UINT32 t1, t2;
+ if (SUCCEEDED(MFGetAttributeRatio(m_videoMediaType, MF_MT_PIXEL_ASPECT_RATIO, &t1, &t2))) {
+
+ if (SUCCEEDED(MFSetAttributeRatio(targetMediaType, MF_MT_PIXEL_ASPECT_RATIO, t1, t2))) {
+
+ if (SUCCEEDED(m_videoMediaType->GetUINT32(MF_MT_INTERLACE_MODE, &t1))) {
+
+ if (SUCCEEDED(targetMediaType->SetUINT32(MF_MT_INTERLACE_MODE, t1))) {
+
+ *mediaType = targetMediaType;
+ return S_OK;
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ targetMediaType->Release();
+ }
+ return E_FAIL;
+}
+
+HRESULT QWindowsMediaDeviceReader::createAudioMediaType(const GUID &format, UINT32 bitRate, IMFMediaType **mediaType)
+{
+ if (!mediaType)
+ return E_INVALIDARG;
+
+ *mediaType = nullptr;
+ IMFMediaType *targetMediaType = nullptr;
+
+ if (SUCCEEDED(MFCreateMediaType(&targetMediaType))) {
+
+ if (SUCCEEDED(targetMediaType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Audio))) {
+
+ if (SUCCEEDED(targetMediaType->SetGUID(MF_MT_SUBTYPE, format))) {
+
+ if (bitRate == 0 || SUCCEEDED(targetMediaType->SetUINT32(MF_MT_AUDIO_AVG_BYTES_PER_SECOND, bitRate / 8))) {
+
+ *mediaType = targetMediaType;
+ return S_OK;
+ }
+ }
+ }
+ targetMediaType->Release();
+ }
+ return E_FAIL;
+}
+
+HRESULT QWindowsMediaDeviceReader::updateSinkInputMediaTypes()
+{
+ HRESULT hr = S_OK;
+ if (m_sinkWriter) {
+ if (m_videoSource && m_videoMediaType && m_sinkVideoStreamIndex != MF_SINK_WRITER_INVALID_STREAM_INDEX) {
+ hr = m_sinkWriter->SetInputMediaType(m_sinkVideoStreamIndex, m_videoMediaType, nullptr);
+ }
+ if (SUCCEEDED(hr)) {
+ if (m_audioSource && m_audioMediaType && m_sinkAudioStreamIndex != MF_SINK_WRITER_INVALID_STREAM_INDEX) {
+ hr = m_sinkWriter->SetInputMediaType(m_sinkAudioStreamIndex, m_audioMediaType, nullptr);
+ }
+ }
+ }
+ return hr;
+}
+
+QMediaRecorder::Error QWindowsMediaDeviceReader::startRecording(
+ const QString &fileName, const GUID &container, const GUID &videoFormat, UINT32 videoBitRate,
+ UINT32 width, UINT32 height, qreal frameRate, const GUID &audioFormat, UINT32 audioBitRate)
+{
+ QMutexLocker locker(&m_mutex);
+
+ if (!m_active || m_recording || (videoFormat == GUID_NULL && audioFormat == GUID_NULL))
+ return QMediaRecorder::ResourceError;
+
+ ComPtr<IMFAttributes> writerAttributes;
+
+ HRESULT hr = MFCreateAttributes(writerAttributes.GetAddressOf(), 2);
+ if (FAILED(hr))
+ return QMediaRecorder::ResourceError;
+
+ // Set callback so OnFinalize() is called after video is saved.
+ hr = writerAttributes->SetUnknown(MF_SINK_WRITER_ASYNC_CALLBACK,
+ static_cast<IMFSinkWriterCallback*>(this));
+ if (FAILED(hr))
+ return QMediaRecorder::ResourceError;
+
+ hr = writerAttributes->SetGUID(QMM_MF_TRANSCODE_CONTAINERTYPE, container);
+ if (FAILED(hr))
+ return QMediaRecorder::ResourceError;
+
+ ComPtr<IMFSinkWriter> sinkWriter;
+ hr = MFCreateSinkWriterFromURL(reinterpret_cast<LPCWSTR>(fileName.utf16()),
+ nullptr, writerAttributes.Get(), sinkWriter.GetAddressOf());
+ if (FAILED(hr))
+ return QMediaRecorder::LocationNotWritable;
+
+ m_sinkVideoStreamIndex = MF_SINK_WRITER_INVALID_STREAM_INDEX;
+ m_sinkAudioStreamIndex = MF_SINK_WRITER_INVALID_STREAM_INDEX;
+
+ if (m_videoSource && videoFormat != GUID_NULL) {
+ IMFMediaType *targetMediaType = nullptr;
+
+ hr = createVideoMediaType(videoFormat, videoBitRate, width, height, frameRate, &targetMediaType);
+ if (SUCCEEDED(hr)) {
+
+ hr = sinkWriter->AddStream(targetMediaType, &m_sinkVideoStreamIndex);
+ if (SUCCEEDED(hr)) {
+
+ hr = sinkWriter->SetInputMediaType(m_sinkVideoStreamIndex, m_videoMediaType, nullptr);
+ }
+ targetMediaType->Release();
+ }
+ }
+
+ if (SUCCEEDED(hr)) {
+ if (m_audioSource && audioFormat != GUID_NULL) {
+ IMFMediaType *targetMediaType = nullptr;
+
+ hr = createAudioMediaType(audioFormat, audioBitRate, &targetMediaType);
+ if (SUCCEEDED(hr)) {
+
+ hr = sinkWriter->AddStream(targetMediaType, &m_sinkAudioStreamIndex);
+ if (SUCCEEDED(hr)) {
+
+ hr = sinkWriter->SetInputMediaType(m_sinkAudioStreamIndex, m_audioMediaType, nullptr);
+ }
+ targetMediaType->Release();
+ }
+ }
+ }
+
+ if (FAILED(hr))
+ return QMediaRecorder::FormatError;
+
+ hr = sinkWriter->BeginWriting();
+ if (FAILED(hr))
+ return QMediaRecorder::ResourceError;
+
+ m_sinkWriter = sinkWriter.Detach();
+ m_lastDuration = -1;
+ m_currentDuration = 0;
+ updateDuration();
+ m_durationTimer.start();
+ m_recording = true;
+ m_firstFrame = true;
+ m_paused = false;
+ m_pauseChanging = false;
+
+ return QMediaRecorder::NoError;
+}
+
+void QWindowsMediaDeviceReader::stopRecording()
+{
+ QMutexLocker locker(&m_mutex);
+
+ if (m_sinkWriter && m_recording) {
+
+ HRESULT hr = m_sinkWriter->Finalize();
+
+ if (SUCCEEDED(hr)) {
+ m_hasFinalized.wait(&m_mutex);
+ } else {
+ m_sinkWriter->Release();
+ m_sinkWriter = nullptr;
+
+ QMetaObject::invokeMethod(this, "recordingError",
+ Qt::QueuedConnection, Q_ARG(int, hr));
+ }
+ }
+
+ m_recording = false;
+ m_paused = false;
+ m_pauseChanging = false;
+
+ m_durationTimer.stop();
+ m_lastDuration = -1;
+ m_currentDuration = -1;
+}
+
+bool QWindowsMediaDeviceReader::pauseRecording()
+{
+ if (!m_recording || m_paused)
+ return false;
+ m_pauseTime = m_lastTimestamp;
+ m_paused = true;
+ m_pauseChanging = true;
+ return true;
+}
+
+bool QWindowsMediaDeviceReader::resumeRecording()
+{
+ if (!m_recording || !m_paused)
+ return false;
+ m_paused = false;
+ m_pauseChanging = true;
+ return true;
+}
+
+//from IUnknown
+STDMETHODIMP QWindowsMediaDeviceReader::QueryInterface(REFIID riid, LPVOID *ppvObject)
+{
+ if (!ppvObject)
+ return E_POINTER;
+ if (riid == IID_IMFSourceReaderCallback) {
+ *ppvObject = static_cast<IMFSourceReaderCallback*>(this);
+ } else if (riid == IID_IMFSinkWriterCallback) {
+ *ppvObject = static_cast<IMFSinkWriterCallback*>(this);
+ } else if (riid == IID_IUnknown) {
+ *ppvObject = static_cast<IUnknown*>(static_cast<IMFSourceReaderCallback*>(this));
+ } else {
+ *ppvObject = nullptr;
+ return E_NOINTERFACE;
+ }
+ AddRef();
+ return S_OK;
+}
+
+STDMETHODIMP_(ULONG) QWindowsMediaDeviceReader::AddRef(void)
+{
+ return InterlockedIncrement(&m_cRef);
+}
+
+STDMETHODIMP_(ULONG) QWindowsMediaDeviceReader::Release(void)
+{
+ LONG cRef = InterlockedDecrement(&m_cRef);
+ if (cRef == 0) {
+ this->deleteLater();
+ }
+ return cRef;
+}
+
+UINT32 QWindowsMediaDeviceReader::frameWidth() const
+{
+ return m_frameWidth;
+}
+
+UINT32 QWindowsMediaDeviceReader::frameHeight() const
+{
+ return m_frameHeight;
+}
+
+qreal QWindowsMediaDeviceReader::frameRate() const
+{
+ return m_frameRate;
+}
+
+void QWindowsMediaDeviceReader::setInputMuted(bool muted)
+{
+ m_inputMuted = muted;
+}
+
+void QWindowsMediaDeviceReader::setInputVolume(qreal volume)
+{
+ m_inputVolume = qBound(0.0, volume, 1.0);
+}
+
+void QWindowsMediaDeviceReader::setOutputMuted(bool muted)
+{
+ QMutexLocker locker(&m_mutex);
+
+ m_outputMuted = muted;
+
+ if (m_active && m_monitorSink) {
+ IMFSimpleAudioVolume *audioVolume = nullptr;
+ if (SUCCEEDED(MFGetService(m_monitorSink, QMM_MR_POLICY_VOLUME_SERVICE,
+ IID_PPV_ARGS(&audioVolume)))) {
+ audioVolume->SetMute(m_outputMuted);
+ audioVolume->Release();
+ }
+ }
+}
+
+void QWindowsMediaDeviceReader::setOutputVolume(qreal volume)
+{
+ QMutexLocker locker(&m_mutex);
+
+ m_outputVolume = qBound(0.0, volume, 1.0);
+
+ if (m_active && m_monitorSink) {
+ IMFSimpleAudioVolume *audioVolume = nullptr;
+ if (SUCCEEDED(MFGetService(m_monitorSink, QMM_MR_POLICY_VOLUME_SERVICE,
+ IID_PPV_ARGS(&audioVolume)))) {
+ audioVolume->SetMasterVolume(float(m_outputVolume));
+ audioVolume->Release();
+ }
+ }
+}
+
+void QWindowsMediaDeviceReader::updateDuration()
+{
+ if (m_currentDuration >= 0 && m_lastDuration != m_currentDuration) {
+ m_lastDuration = m_currentDuration;
+ emit durationChanged(m_currentDuration);
+ }
+}
+
+//from IMFSourceReaderCallback
+STDMETHODIMP QWindowsMediaDeviceReader::OnReadSample(HRESULT hrStatus, DWORD dwStreamIndex,
+ DWORD dwStreamFlags, LONGLONG llTimestamp,
+ IMFSample *pSample)
+{
+ QMutexLocker locker(&m_mutex);
+
+ if (FAILED(hrStatus)) {
+ emit streamingError(int(hrStatus));
+ return hrStatus;
+ }
+
+ m_lastTimestamp = llTimestamp;
+
+ if ((dwStreamFlags & MF_SOURCE_READERF_ENDOFSTREAM) == MF_SOURCE_READERF_ENDOFSTREAM) {
+ m_streaming = false;
+ emit streamingStopped();
+ } else {
+
+ if (!m_streaming) {
+ m_streaming = true;
+ emit streamingStarted();
+ }
+ if (pSample) {
+
+ if (m_monitorWriter && dwStreamIndex == m_sourceAudioStreamIndex)
+ m_monitorWriter->WriteSample(0, pSample);
+
+ if (m_recording) {
+
+ if (m_firstFrame) {
+ m_timeOffset = llTimestamp;
+ m_firstFrame = false;
+ emit recordingStarted();
+ }
+
+ if (m_pauseChanging) {
+ // Recording time should not pass while paused.
+ if (m_paused)
+ m_pauseTime = llTimestamp;
+ else
+ m_timeOffset += llTimestamp - m_pauseTime;
+ m_pauseChanging = false;
+ }
+
+ // Send the video frame or audio sample to be encoded.
+ if (m_sinkWriter && !m_paused) {
+
+ pSample->SetSampleTime(llTimestamp - m_timeOffset);
+
+ if (dwStreamIndex == m_sourceVideoStreamIndex) {
+
+ m_sinkWriter->WriteSample(m_sinkVideoStreamIndex, pSample);
+
+ } else if (dwStreamIndex == m_sourceAudioStreamIndex) {
+
+ float volume = m_inputMuted ? 0.0f : float(m_inputVolume);
+
+ // Change the volume of the audio sample, if needed.
+ if (volume != 1.0f) {
+ IMFMediaBuffer *mediaBuffer = nullptr;
+ if (SUCCEEDED(pSample->ConvertToContiguousBuffer(&mediaBuffer))) {
+
+ DWORD bufLen = 0;
+ BYTE *buffer = nullptr;
+
+ if (SUCCEEDED(mediaBuffer->Lock(&buffer, nullptr, &bufLen))) {
+
+ float *floatBuffer = reinterpret_cast<float*>(buffer);
+
+ for (DWORD i = 0; i < bufLen/4; ++i)
+ floatBuffer[i] *= volume;
+
+ mediaBuffer->Unlock();
+ }
+ mediaBuffer->Release();
+ }
+ }
+
+ m_sinkWriter->WriteSample(m_sinkAudioStreamIndex, pSample);
+ }
+ m_currentDuration = (llTimestamp - m_timeOffset) / 10000;
+ }
+ }
+
+ // Generate a new QVideoFrame from IMFSample.
+ if (dwStreamIndex == m_sourceVideoStreamIndex) {
+ IMFMediaBuffer *mediaBuffer = nullptr;
+ if (SUCCEEDED(pSample->ConvertToContiguousBuffer(&mediaBuffer))) {
+
+ DWORD bufLen = 0;
+ BYTE *buffer = nullptr;
+
+ if (SUCCEEDED(mediaBuffer->Lock(&buffer, nullptr, &bufLen))) {
+ auto bytes = QByteArray(reinterpret_cast<char*>(buffer), bufLen);
+ QVideoFrameFormat format(QSize(m_frameWidth, m_frameHeight), m_pixelFormat);
+
+ QVideoFrame frame = QVideoFramePrivate::createFrame(
+ std::make_unique<QMemoryVideoBuffer>(std::move(bytes), m_stride),
+ std::move(format));
+
+ // WMF uses 100-nanosecond units, Qt uses microseconds
+ frame.setStartTime(llTimestamp * 0.1);
+
+ LONGLONG duration = -1;
+ if (SUCCEEDED(pSample->GetSampleDuration(&duration)))
+ frame.setEndTime((llTimestamp + duration) * 0.1);
+
+ emit videoFrameChanged(frame);
+
+ mediaBuffer->Unlock();
+ }
+ mediaBuffer->Release();
+ }
+ }
+ }
+ // request the next video frame or sound sample
+ if (m_sourceReader)
+ m_sourceReader->ReadSample(MF_SOURCE_READER_ANY_STREAM,
+ 0, nullptr, nullptr, nullptr, nullptr);
+ }
+
+ return S_OK;
+}
+
+STDMETHODIMP QWindowsMediaDeviceReader::OnFlush(DWORD)
+{
+ return S_OK;
+}
+
+STDMETHODIMP QWindowsMediaDeviceReader::OnEvent(DWORD, IMFMediaEvent*)
+{
+ return S_OK;
+}
+
+//from IMFSinkWriterCallback
+STDMETHODIMP QWindowsMediaDeviceReader::OnFinalize(HRESULT)
+{
+ QMutexLocker locker(&m_mutex);
+ if (m_sinkWriter) {
+ m_sinkWriter->Release();
+ m_sinkWriter = nullptr;
+ }
+ emit recordingStopped();
+ m_hasFinalized.notify_one();
+ return S_OK;
+}
+
+STDMETHODIMP QWindowsMediaDeviceReader::OnMarker(DWORD, LPVOID)
+{
+ return S_OK;
+}
+
+QT_END_NAMESPACE
+
+#include "moc_qwindowsmediadevicereader_p.cpp"
diff --git a/src/plugins/multimedia/windows/mediacapture/qwindowsmediadevicereader_p.h b/src/plugins/multimedia/windows/mediacapture/qwindowsmediadevicereader_p.h
new file mode 100644
index 000000000..4699a463a
--- /dev/null
+++ b/src/plugins/multimedia/windows/mediacapture/qwindowsmediadevicereader_p.h
@@ -0,0 +1,154 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QWINDOWSMEDIADEVICEREADER_H
+#define QWINDOWSMEDIADEVICEREADER_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <mfapi.h>
+#include <mfidl.h>
+#include <mferror.h>
+#include <mfreadwrite.h>
+
+#include <QtCore/qobject.h>
+#include <QtCore/qmutex.h>
+#include <QtCore/qwaitcondition.h>
+#include <QtCore/qtimer.h>
+#include <qvideoframe.h>
+#include <qcameradevice.h>
+#include <qmediarecorder.h>
+
+QT_BEGIN_NAMESPACE
+
+class QVideoSink;
+
+class QWindowsMediaDeviceReader : public QObject,
+ public IMFSourceReaderCallback,
+ public IMFSinkWriterCallback
+{
+ Q_OBJECT
+public:
+ explicit QWindowsMediaDeviceReader(QObject *parent = nullptr);
+ ~QWindowsMediaDeviceReader();
+
+ //from IUnknown
+ STDMETHODIMP QueryInterface(REFIID riid, LPVOID *ppvObject) override;
+ STDMETHODIMP_(ULONG) AddRef(void) override;
+ STDMETHODIMP_(ULONG) Release(void) override;
+
+ //from IMFSourceReaderCallback
+ STDMETHODIMP OnReadSample(HRESULT hrStatus, DWORD dwStreamIndex,
+ DWORD dwStreamFlags, LONGLONG llTimestamp, IMFSample *pSample) override;
+ STDMETHODIMP OnFlush(DWORD dwStreamIndex) override;
+ STDMETHODIMP OnEvent(DWORD dwStreamIndex, IMFMediaEvent *pEvent) override;
+
+ //from IMFSinkWriterCallback
+ STDMETHODIMP OnFinalize(HRESULT hrStatus) override;
+ STDMETHODIMP OnMarker(DWORD dwStreamIndex, LPVOID pvContext) override;
+
+ bool activate(const QString &cameraId,
+ const QCameraFormat &cameraFormat,
+ const QString &microphoneId);
+ void deactivate();
+
+ QMediaRecorder::Error startRecording(const QString &fileName, const GUID &container,
+ const GUID &videoFormat, UINT32 videoBitRate, UINT32 width,
+ UINT32 height, qreal frameRate, const GUID &audioFormat,
+ UINT32 audioBitRate);
+ void stopRecording();
+ bool pauseRecording();
+ bool resumeRecording();
+
+ UINT32 frameWidth() const;
+ UINT32 frameHeight() const;
+ qreal frameRate() const;
+ void setInputMuted(bool muted);
+ void setInputVolume(qreal volume);
+ void setOutputMuted(bool muted);
+ void setOutputVolume(qreal volume);
+ bool setAudioOutput(const QString &audioOutputId);
+
+Q_SIGNALS:
+ void streamingStarted();
+ void streamingStopped();
+ void streamingError(int errorCode);
+ void recordingStarted();
+ void recordingStopped();
+ void recordingError(int errorCode);
+ void durationChanged(qint64 duration);
+ void videoFrameChanged(const QVideoFrame &frame);
+
+private slots:
+ void updateDuration();
+
+private:
+ HRESULT createSource(const QString &deviceId, bool video, IMFMediaSource **source);
+ HRESULT createAggregateReader(IMFMediaSource *firstSource, IMFMediaSource *secondSource,
+ IMFMediaSource **aggregateSource, IMFSourceReader **sourceReader);
+ HRESULT createVideoMediaType(const GUID &format, UINT32 bitRate, UINT32 width, UINT32 height,
+ qreal frameRate, IMFMediaType **mediaType);
+ HRESULT createAudioMediaType(const GUID &format, UINT32 bitRate, IMFMediaType **mediaType);
+ HRESULT initAudioType(IMFMediaType *mediaType, UINT32 channels, UINT32 samplesPerSec, bool flt);
+ HRESULT prepareVideoStream(DWORD mediaTypeIndex);
+ HRESULT prepareAudioStream();
+ HRESULT initSourceIndexes();
+ HRESULT updateSinkInputMediaTypes();
+ HRESULT startMonitoring();
+ void stopMonitoring();
+ void releaseResources();
+ void stopStreaming();
+ DWORD findMediaTypeIndex(const QCameraFormat &reqFormat);
+
+ long m_cRef = 1;
+ QMutex m_mutex;
+ QWaitCondition m_hasFinalized;
+ IMFMediaSource *m_videoSource = nullptr;
+ IMFMediaType *m_videoMediaType = nullptr;
+ IMFMediaSource *m_audioSource = nullptr;
+ IMFMediaType *m_audioMediaType = nullptr;
+ IMFMediaSource *m_aggregateSource = nullptr;
+ IMFSourceReader *m_sourceReader = nullptr;
+ IMFSinkWriter *m_sinkWriter = nullptr;
+ IMFMediaSink *m_monitorSink = nullptr;
+ IMFSinkWriter *m_monitorWriter = nullptr;
+ QString m_audioOutputId;
+ DWORD m_sourceVideoStreamIndex = MF_SOURCE_READER_INVALID_STREAM_INDEX;
+ DWORD m_sourceAudioStreamIndex = MF_SOURCE_READER_INVALID_STREAM_INDEX;
+ DWORD m_sinkVideoStreamIndex = MF_SINK_WRITER_INVALID_STREAM_INDEX;
+ DWORD m_sinkAudioStreamIndex = MF_SINK_WRITER_INVALID_STREAM_INDEX;
+ UINT32 m_frameWidth = 0;
+ UINT32 m_frameHeight = 0;
+ qreal m_frameRate = 0.0;
+ LONG m_stride = 0;
+ bool m_active = false;
+ bool m_streaming = false;
+ bool m_recording = false;
+ bool m_firstFrame = false;
+ bool m_paused = false;
+ bool m_pauseChanging = false;
+ bool m_inputMuted = false;
+ bool m_outputMuted = false;
+ qreal m_inputVolume = 1.0;
+ qreal m_outputVolume = 1.0;
+ QVideoFrameFormat::PixelFormat m_pixelFormat = QVideoFrameFormat::Format_Invalid;
+ LONGLONG m_timeOffset = 0;
+ LONGLONG m_pauseTime = 0;
+ LONGLONG m_lastTimestamp = 0;
+ QTimer m_durationTimer;
+ qint64 m_currentDuration = -1;
+ qint64 m_lastDuration = -1;
+};
+
+QT_END_NAMESPACE
+
+#endif // QWINDOWSMEDIADEVICEREADER_H
diff --git a/src/plugins/multimedia/windows/mediacapture/qwindowsmediadevicesession.cpp b/src/plugins/multimedia/windows/mediacapture/qwindowsmediadevicesession.cpp
new file mode 100644
index 000000000..b13599444
--- /dev/null
+++ b/src/plugins/multimedia/windows/mediacapture/qwindowsmediadevicesession.cpp
@@ -0,0 +1,376 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qwindowsmediadevicesession_p.h"
+
+#include "qwindowsmediadevicereader_p.h"
+#include "private/qwindowsmultimediautils_p.h"
+#include "private/qplatformvideosink_p.h"
+#include <qvideosink.h>
+#include <QtCore/qdebug.h>
+#include <qaudioinput.h>
+#include <qaudiooutput.h>
+
+QT_BEGIN_NAMESPACE
+
+QWindowsMediaDeviceSession::QWindowsMediaDeviceSession(QObject *parent)
+ : QObject(parent)
+{
+ m_mediaDeviceReader = new QWindowsMediaDeviceReader(this);
+ connect(m_mediaDeviceReader, &QWindowsMediaDeviceReader::streamingStarted,
+ this, &QWindowsMediaDeviceSession::handleStreamingStarted);
+ connect(m_mediaDeviceReader, &QWindowsMediaDeviceReader::streamingStopped,
+ this, &QWindowsMediaDeviceSession::handleStreamingStopped);
+ connect(m_mediaDeviceReader, &QWindowsMediaDeviceReader::streamingError,
+ this, &QWindowsMediaDeviceSession::handleStreamingError);
+ connect(m_mediaDeviceReader, &QWindowsMediaDeviceReader::videoFrameChanged,
+ this, &QWindowsMediaDeviceSession::handleVideoFrameChanged);
+ connect(m_mediaDeviceReader, &QWindowsMediaDeviceReader::recordingStarted,
+ this, &QWindowsMediaDeviceSession::recordingStarted);
+ connect(m_mediaDeviceReader, &QWindowsMediaDeviceReader::recordingStopped,
+ this, &QWindowsMediaDeviceSession::recordingStopped);
+ connect(m_mediaDeviceReader, &QWindowsMediaDeviceReader::recordingError,
+ this, &QWindowsMediaDeviceSession::recordingError);
+ connect(m_mediaDeviceReader, &QWindowsMediaDeviceReader::durationChanged,
+ this, &QWindowsMediaDeviceSession::durationChanged);
+}
+
+QWindowsMediaDeviceSession::~QWindowsMediaDeviceSession()
+{
+ delete m_mediaDeviceReader;
+}
+
+bool QWindowsMediaDeviceSession::isActive() const
+{
+ return m_active;
+}
+
+bool QWindowsMediaDeviceSession::isActivating() const
+{
+ return m_activating;
+}
+
+void QWindowsMediaDeviceSession::setActive(bool active)
+{
+ if ((active && (m_active || m_activating)) || (!active && !m_active && !m_activating))
+ return;
+
+ if (active) {
+ auto camId = QString::fromUtf8(m_activeCameraDevice.id());
+ auto micId = m_audioInput ? QString::fromUtf8(m_audioInput->device().id()) : QString();
+ if (!camId.isEmpty() || !micId.isEmpty()) {
+ if (m_mediaDeviceReader->activate(camId, m_cameraFormat, micId)) {
+ m_activating = true;
+ } else {
+ emit streamingError(MF_E_NOT_AVAILABLE);
+ }
+ } else {
+ qWarning() << Q_FUNC_INFO << "Camera ID and Microphone ID both undefined.";
+ }
+ } else {
+ m_mediaDeviceReader->deactivate();
+ m_active = false;
+ m_activating = false;
+ emit activeChanged(m_active);
+ emit readyForCaptureChanged(m_active);
+ }
+}
+
+void QWindowsMediaDeviceSession::reactivate()
+{
+ if (m_active || m_activating) {
+ pauseRecording();
+ setActive(false);
+ setActive(true);
+ resumeRecording();
+ }
+}
+
+void QWindowsMediaDeviceSession::setActiveCamera(const QCameraDevice &camera)
+{
+ m_activeCameraDevice = camera;
+ reactivate();
+}
+
+QCameraDevice QWindowsMediaDeviceSession::activeCamera() const
+{
+ return m_activeCameraDevice;
+}
+
+void QWindowsMediaDeviceSession::setCameraFormat(const QCameraFormat &cameraFormat)
+{
+ m_cameraFormat = cameraFormat;
+}
+
+void QWindowsMediaDeviceSession::setVideoSink(QVideoSink *surface)
+{
+ m_surface = surface;
+}
+
+void QWindowsMediaDeviceSession::handleStreamingStarted()
+{
+ if (m_activating) {
+ m_active = true;
+ m_activating = false;
+ emit activeChanged(m_active);
+ emit readyForCaptureChanged(m_active);
+ }
+}
+
+void QWindowsMediaDeviceSession::handleStreamingStopped()
+{
+ m_active = false;
+ emit activeChanged(m_active);
+ emit readyForCaptureChanged(m_active);
+}
+
+void QWindowsMediaDeviceSession::handleStreamingError(int errorCode)
+{
+ if (m_surface)
+ m_surface->platformVideoSink()->setVideoFrame(QVideoFrame());
+ emit streamingError(errorCode);
+}
+
+void QWindowsMediaDeviceSession::handleVideoFrameChanged(const QVideoFrame &frame)
+{
+ if (m_surface)
+ m_surface->platformVideoSink()->setVideoFrame(frame);
+ emit videoFrameChanged(frame);
+}
+
+void QWindowsMediaDeviceSession::setAudioInputMuted(bool muted)
+{
+ m_mediaDeviceReader->setInputMuted(muted);
+}
+
+void QWindowsMediaDeviceSession::setAudioInputVolume(float volume)
+{
+ m_mediaDeviceReader->setInputVolume(volume);
+}
+
+void QWindowsMediaDeviceSession::audioInputDeviceChanged()
+{
+ reactivate();
+}
+
+void QWindowsMediaDeviceSession::setAudioOutputMuted(bool muted)
+{
+ m_mediaDeviceReader->setOutputMuted(muted);
+}
+
+void QWindowsMediaDeviceSession::setAudioOutputVolume(float volume)
+{
+ m_mediaDeviceReader->setOutputVolume(volume);
+}
+
+void QWindowsMediaDeviceSession::audioOutputDeviceChanged()
+{
+ if (m_active || m_activating)
+ m_mediaDeviceReader->setAudioOutput(QString::fromUtf8(m_audioOutput->device().id()));
+}
+
+void QWindowsMediaDeviceSession::setAudioInput(QAudioInput *input)
+{
+ if (m_audioInput == input)
+ return;
+ if (m_audioInput)
+ m_audioInput->disconnect(this);
+ m_audioInput = input;
+
+ audioInputDeviceChanged();
+
+ if (!m_audioInput)
+ return;
+ connect(m_audioInput, &QAudioInput::mutedChanged, this, &QWindowsMediaDeviceSession::setAudioInputMuted);
+ connect(m_audioInput, &QAudioInput::volumeChanged, this, &QWindowsMediaDeviceSession::setAudioInputVolume);
+ connect(m_audioInput, &QAudioInput::deviceChanged, this, &QWindowsMediaDeviceSession::audioInputDeviceChanged);
+}
+
+void QWindowsMediaDeviceSession::setAudioOutput(QAudioOutput *output)
+{
+ if (m_audioOutput == output)
+ return;
+ if (m_audioOutput)
+ m_audioOutput->disconnect(this);
+ m_audioOutput = output;
+ if (!m_audioOutput) {
+ m_mediaDeviceReader->setAudioOutput({});
+ return;
+ }
+
+ m_mediaDeviceReader->setAudioOutput(QString::fromUtf8(m_audioOutput->device().id()));
+
+ connect(m_audioOutput, &QAudioOutput::mutedChanged, this, &QWindowsMediaDeviceSession::setAudioOutputMuted);
+ connect(m_audioOutput, &QAudioOutput::volumeChanged, this, &QWindowsMediaDeviceSession::setAudioOutputVolume);
+ connect(m_audioOutput, &QAudioOutput::deviceChanged, this, &QWindowsMediaDeviceSession::audioOutputDeviceChanged);
+}
+
+QMediaRecorder::Error QWindowsMediaDeviceSession::startRecording(QMediaEncoderSettings &settings, const QString &fileName, bool audioOnly)
+{
+ GUID container = audioOnly ? QWindowsMultimediaUtils::containerForAudioFileFormat(settings.mediaFormat().fileFormat())
+ : QWindowsMultimediaUtils::containerForVideoFileFormat(settings.mediaFormat().fileFormat());
+ GUID videoFormat = QWindowsMultimediaUtils::videoFormatForCodec(settings.videoCodec());
+ GUID audioFormat = QWindowsMultimediaUtils::audioFormatForCodec(settings.audioCodec());
+
+ QSize res = settings.videoResolution();
+ UINT32 width, height;
+ if (res.width() > 0 && res.height() > 0) {
+ width = UINT32(res.width());
+ height = UINT32(res.height());
+ } else {
+ width = m_mediaDeviceReader->frameWidth();
+ height = m_mediaDeviceReader->frameHeight();
+ settings.setVideoResolution(QSize(int(width), int(height)));
+ }
+
+ qreal frameRate = settings.videoFrameRate();
+ if (frameRate <= 0) {
+ frameRate = m_mediaDeviceReader->frameRate();
+ settings.setVideoFrameRate(frameRate);
+ }
+
+ auto quality = settings.quality();
+
+ UINT32 videoBitRate = 0;
+ if (settings.videoBitRate() > 0) {
+ videoBitRate = UINT32(settings.videoBitRate());
+ } else {
+ videoBitRate = estimateVideoBitRate(videoFormat, width, height, frameRate, quality);
+ settings.setVideoBitRate(int(videoBitRate));
+ }
+
+ UINT32 audioBitRate = 0;
+ if (settings.audioBitRate() > 0) {
+ audioBitRate = UINT32(settings.audioBitRate());
+ } else {
+ audioBitRate = estimateAudioBitRate(audioFormat, quality);
+ settings.setAudioBitRate(int(audioBitRate));
+ }
+
+ return m_mediaDeviceReader->startRecording(fileName, container, audioOnly ? GUID_NULL : videoFormat,
+ videoBitRate, width, height, frameRate,
+ audioFormat, audioBitRate);
+}
+
+void QWindowsMediaDeviceSession::stopRecording()
+{
+ m_mediaDeviceReader->stopRecording();
+}
+
+bool QWindowsMediaDeviceSession::pauseRecording()
+{
+ return m_mediaDeviceReader->pauseRecording();
+}
+
+bool QWindowsMediaDeviceSession::resumeRecording()
+{
+ return m_mediaDeviceReader->resumeRecording();
+}
+
+// empirical estimate of the required video bitrate (for H.264)
+quint32 QWindowsMediaDeviceSession::estimateVideoBitRate(const GUID &videoFormat, quint32 width, quint32 height,
+ qreal frameRate, QMediaRecorder::Quality quality)
+{
+ Q_UNUSED(videoFormat);
+
+ qreal bitsPerPixel;
+ switch (quality) {
+ case QMediaRecorder::Quality::VeryLowQuality:
+ bitsPerPixel = 0.08;
+ break;
+ case QMediaRecorder::Quality::LowQuality:
+ bitsPerPixel = 0.2;
+ break;
+ case QMediaRecorder::Quality::NormalQuality:
+ bitsPerPixel = 0.3;
+ break;
+ case QMediaRecorder::Quality::HighQuality:
+ bitsPerPixel = 0.5;
+ break;
+ case QMediaRecorder::Quality::VeryHighQuality:
+ bitsPerPixel = 0.8;
+ break;
+ default:
+ bitsPerPixel = 0.3;
+ }
+
+ // Required bitrate is not linear on the number of pixels; small resolutions
+ // require more BPP, thus the minimum values, to try to compensate it.
+ quint32 pixelsPerSec = quint32(qMax(width, 320u) * qMax(height, 240u) * qMax(frameRate, 6.0));
+ return pixelsPerSec * bitsPerPixel;
+}
+
+quint32 QWindowsMediaDeviceSession::estimateAudioBitRate(const GUID &audioFormat, QMediaRecorder::Quality quality)
+{
+ if (audioFormat == MFAudioFormat_AAC) {
+ // Bitrates supported by the AAC encoder are 96K, 128K, 160K, 192K.
+ switch (quality) {
+ case QMediaRecorder::Quality::VeryLowQuality:
+ return 96000;
+ case QMediaRecorder::Quality::LowQuality:
+ return 96000;
+ case QMediaRecorder::Quality::NormalQuality:
+ return 128000;
+ case QMediaRecorder::Quality::HighQuality:
+ return 160000;
+ case QMediaRecorder::Quality::VeryHighQuality:
+ return 192000;
+ default:
+ return 128000;
+ }
+ } else if (audioFormat == MFAudioFormat_MP3) {
+ // Bitrates supported by the MP3 encoder are
+ // 32K, 40K, 48K, 56K, 64K, 80K, 96K, 112K, 128K, 160K, 192K, 224K, 256K, 320K.
+ switch (quality) {
+ case QMediaRecorder::Quality::VeryLowQuality:
+ return 48000;
+ case QMediaRecorder::Quality::LowQuality:
+ return 96000;
+ case QMediaRecorder::Quality::NormalQuality:
+ return 128000;
+ case QMediaRecorder::Quality::HighQuality:
+ return 224000;
+ case QMediaRecorder::Quality::VeryHighQuality:
+ return 320000;
+ default:
+ return 128000;
+ }
+ } else if (audioFormat == MFAudioFormat_WMAudioV8) {
+ // Bitrates supported by the Windows Media Audio 8 encoder
+ switch (quality) {
+ case QMediaRecorder::Quality::VeryLowQuality:
+ return 32000;
+ case QMediaRecorder::Quality::LowQuality:
+ return 96000;
+ case QMediaRecorder::Quality::NormalQuality:
+ return 192000;
+ case QMediaRecorder::Quality::HighQuality:
+ return 256016;
+ case QMediaRecorder::Quality::VeryHighQuality:
+ return 320032;
+ default:
+ return 192000;
+ }
+ } else if (audioFormat == MFAudioFormat_WMAudioV9) {
+ // Bitrates supported by the Windows Media Audio 9 encoder
+ switch (quality) {
+ case QMediaRecorder::Quality::VeryLowQuality:
+ return 32000;
+ case QMediaRecorder::Quality::LowQuality:
+ return 96000;
+ case QMediaRecorder::Quality::NormalQuality:
+ return 192000;
+ case QMediaRecorder::Quality::HighQuality:
+ return 256016;
+ case QMediaRecorder::Quality::VeryHighQuality:
+ return 384000;
+ default:
+ return 192000;
+ }
+ }
+ return 0; // Use default for format
+}
+
+QT_END_NAMESPACE
+
+#include "moc_qwindowsmediadevicesession_p.cpp"
diff --git a/src/plugins/multimedia/windows/mediacapture/qwindowsmediadevicesession_p.h b/src/plugins/multimedia/windows/mediacapture/qwindowsmediadevicesession_p.h
new file mode 100644
index 000000000..c3998ce6c
--- /dev/null
+++ b/src/plugins/multimedia/windows/mediacapture/qwindowsmediadevicesession_p.h
@@ -0,0 +1,100 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QWINDOWSMEDIADEVICESESSION_H
+#define QWINDOWSMEDIADEVICESESSION_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <private/qtmultimediaglobal_p.h>
+#include <qcamera.h>
+#include <qaudiodevice.h>
+#include <private/qwindowsmultimediautils_p.h>
+#include <private/qplatformmediarecorder_p.h>
+
+QT_BEGIN_NAMESPACE
+
+class QAudioInput;
+class QAudioOutput;
+class QVideoSink;
+class QWindowsMediaDeviceReader;
+
+class QWindowsMediaDeviceSession : public QObject
+{
+ Q_OBJECT
+public:
+ explicit QWindowsMediaDeviceSession(QObject *parent = nullptr);
+ ~QWindowsMediaDeviceSession();
+
+ bool isActive() const;
+ void setActive(bool active);
+
+ bool isActivating() const;
+
+ void setActiveCamera(const QCameraDevice &camera);
+ QCameraDevice activeCamera() const;
+
+ void setCameraFormat(const QCameraFormat &cameraFormat);
+
+ void setVideoSink(QVideoSink *surface);
+
+public Q_SLOTS:
+ void setAudioInputMuted(bool muted);
+ void setAudioInputVolume(float volume);
+ void audioInputDeviceChanged();
+ void setAudioOutputMuted(bool muted);
+ void setAudioOutputVolume(float volume);
+ void audioOutputDeviceChanged();
+
+public:
+ void setAudioInput(QAudioInput *input);
+ void setAudioOutput(QAudioOutput *output);
+
+ QMediaRecorder::Error startRecording(QMediaEncoderSettings &settings, const QString &fileName, bool audioOnly);
+ void stopRecording();
+ bool pauseRecording();
+ bool resumeRecording();
+
+Q_SIGNALS:
+ void activeChanged(bool);
+ void readyForCaptureChanged(bool);
+ void durationChanged(qint64 duration);
+ void recordingStarted();
+ void recordingStopped();
+ void streamingError(int errorCode);
+ void recordingError(int errorCode);
+ void videoFrameChanged(const QVideoFrame &frame);
+
+private Q_SLOTS:
+ void handleStreamingStarted();
+ void handleStreamingStopped();
+ void handleStreamingError(int errorCode);
+ void handleVideoFrameChanged(const QVideoFrame &frame);
+
+private:
+ void reactivate();
+ quint32 estimateVideoBitRate(const GUID &videoFormat, quint32 width, quint32 height,
+ qreal frameRate, QMediaRecorder::Quality quality);
+ quint32 estimateAudioBitRate(const GUID &audioFormat, QMediaRecorder::Quality quality);
+ bool m_active = false;
+ bool m_activating = false;
+ QCameraDevice m_activeCameraDevice;
+ QCameraFormat m_cameraFormat;
+ QWindowsMediaDeviceReader *m_mediaDeviceReader = nullptr;
+ QAudioInput *m_audioInput = nullptr;
+ QAudioOutput *m_audioOutput = nullptr;
+ QVideoSink *m_surface = nullptr;
+};
+
+QT_END_NAMESPACE
+
+#endif // QWINDOWSMEDIADEVICESESSION_H
diff --git a/src/plugins/multimedia/windows/mediacapture/qwindowsmediaencoder.cpp b/src/plugins/multimedia/windows/mediacapture/qwindowsmediaencoder.cpp
new file mode 100644
index 000000000..512110af6
--- /dev/null
+++ b/src/plugins/multimedia/windows/mediacapture/qwindowsmediaencoder.cpp
@@ -0,0 +1,225 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qwindowsmediaencoder_p.h"
+
+#include "qwindowsmediadevicesession_p.h"
+#include "qwindowsmediacapture_p.h"
+#include "mfmetadata_p.h"
+#include <QtCore/QUrl>
+#include <QtCore/QMimeType>
+#include <mferror.h>
+#include <shobjidl.h>
+#include <private/qmediastoragelocation_p.h>
+#include <private/qmediarecorder_p.h>
+
+QT_BEGIN_NAMESPACE
+
+QWindowsMediaEncoder::QWindowsMediaEncoder(QMediaRecorder *parent)
+ : QObject(parent),
+ QPlatformMediaRecorder(parent)
+{
+}
+
+bool QWindowsMediaEncoder::isLocationWritable(const QUrl &location) const
+{
+ return location.scheme() == QLatin1String("file") || location.scheme().isEmpty();
+}
+
+QMediaRecorder::RecorderState QWindowsMediaEncoder::state() const
+{
+ return m_state;
+}
+
+qint64 QWindowsMediaEncoder::duration() const
+{
+ return m_duration;
+}
+
+void QWindowsMediaEncoder::record(QMediaEncoderSettings &settings)
+{
+ if (!m_captureService || !m_mediaDeviceSession) {
+ qWarning() << Q_FUNC_INFO << "Encoder is not set to a capture session";
+ return;
+ }
+ if (m_state != QMediaRecorder::StoppedState)
+ return;
+
+ m_sessionWasActive = m_mediaDeviceSession->isActive() || m_mediaDeviceSession->isActivating();
+
+ if (!m_sessionWasActive) {
+
+ m_mediaDeviceSession->setActive(true);
+
+ if (!m_mediaDeviceSession->isActivating()) {
+ updateError(QMediaRecorder::ResourceError,
+ QMediaRecorderPrivate::msgFailedStartRecording());
+ return;
+ }
+ }
+
+ const auto audioOnly = settings.videoCodec() == QMediaFormat::VideoCodec::Unspecified;
+ m_fileName = QMediaStorageLocation::generateFileName(outputLocation().toLocalFile(), audioOnly
+ ? QStandardPaths::MusicLocation
+ : QStandardPaths::MoviesLocation,
+ settings.mimeType().preferredSuffix());
+
+ QMediaRecorder::Error ec = m_mediaDeviceSession->startRecording(settings, m_fileName, audioOnly);
+ if (ec == QMediaRecorder::NoError) {
+ m_state = QMediaRecorder::RecordingState;
+
+ actualLocationChanged(QUrl::fromLocalFile(m_fileName));
+ stateChanged(m_state);
+
+ } else {
+ updateError(ec, QMediaRecorderPrivate::msgFailedStartRecording());
+ }
+}
+
+void QWindowsMediaEncoder::pause()
+{
+ if (!m_mediaDeviceSession || m_state != QMediaRecorder::RecordingState)
+ return;
+
+ if (m_mediaDeviceSession->pauseRecording()) {
+ m_state = QMediaRecorder::PausedState;
+ stateChanged(m_state);
+ } else {
+ updateError(QMediaRecorder::FormatError, tr("Failed to pause recording"));
+ }
+}
+
+void QWindowsMediaEncoder::resume()
+{
+ if (!m_mediaDeviceSession || m_state != QMediaRecorder::PausedState)
+ return;
+
+ if (m_mediaDeviceSession->resumeRecording()) {
+ m_state = QMediaRecorder::RecordingState;
+ stateChanged(m_state);
+ } else {
+ updateError(QMediaRecorder::FormatError, tr("Failed to resume recording"));
+ }
+}
+
+void QWindowsMediaEncoder::stop()
+{
+ if (m_mediaDeviceSession && m_state != QMediaRecorder::StoppedState) {
+ m_mediaDeviceSession->stopRecording();
+ if (!m_sessionWasActive)
+ m_mediaDeviceSession->setActive(false);
+ }
+}
+
+
+
+void QWindowsMediaEncoder::setCaptureSession(QPlatformMediaCaptureSession *session)
+{
+ QWindowsMediaCaptureService *captureSession = static_cast<QWindowsMediaCaptureService *>(session);
+ if (m_captureService == captureSession)
+ return;
+
+ if (m_captureService)
+ stop();
+
+ m_captureService = captureSession;
+ if (!m_captureService) {
+ m_mediaDeviceSession = nullptr;
+ return;
+ }
+
+ m_mediaDeviceSession = m_captureService->session();
+ Q_ASSERT(m_mediaDeviceSession);
+
+ connect(m_mediaDeviceSession, &QWindowsMediaDeviceSession::recordingStarted, this, &QWindowsMediaEncoder::onRecordingStarted);
+ connect(m_mediaDeviceSession, &QWindowsMediaDeviceSession::recordingStopped, this, &QWindowsMediaEncoder::onRecordingStopped);
+ connect(m_mediaDeviceSession, &QWindowsMediaDeviceSession::streamingError, this, &QWindowsMediaEncoder::onStreamingError);
+ connect(m_mediaDeviceSession, &QWindowsMediaDeviceSession::recordingError, this, &QWindowsMediaEncoder::onRecordingError);
+ connect(m_mediaDeviceSession, &QWindowsMediaDeviceSession::durationChanged, this, &QWindowsMediaEncoder::onDurationChanged);
+ connect(m_captureService, &QWindowsMediaCaptureService::cameraChanged, this, &QWindowsMediaEncoder::onCameraChanged);
+ onCameraChanged();
+}
+
+void QWindowsMediaEncoder::setMetaData(const QMediaMetaData &metaData)
+{
+ m_metaData = metaData;
+}
+
+QMediaMetaData QWindowsMediaEncoder::metaData() const
+{
+ return m_metaData;
+}
+
+void QWindowsMediaEncoder::saveMetadata()
+{
+ if (!m_metaData.isEmpty()) {
+
+ const QString nativeFileName = QDir::toNativeSeparators(m_fileName);
+
+ IPropertyStore *store = nullptr;
+
+ if (SUCCEEDED(SHGetPropertyStoreFromParsingName(reinterpret_cast<LPCWSTR>(nativeFileName.utf16()),
+ nullptr, GPS_READWRITE, IID_PPV_ARGS(&store)))) {
+
+ MFMetaData::toNative(m_metaData, store);
+
+ store->Commit();
+ store->Release();
+ }
+ }
+}
+
+void QWindowsMediaEncoder::onDurationChanged(qint64 duration)
+{
+ m_duration = duration;
+ durationChanged(m_duration);
+}
+
+void QWindowsMediaEncoder::onStreamingError(int errorCode)
+{
+ if (errorCode == MF_E_VIDEO_RECORDING_DEVICE_INVALIDATED)
+ updateError(QMediaRecorder::ResourceError, tr("Camera is no longer present"));
+ else if (errorCode == MF_E_AUDIO_RECORDING_DEVICE_INVALIDATED)
+ updateError(QMediaRecorder::ResourceError, tr("Audio input is no longer present"));
+ else
+ updateError(QMediaRecorder::ResourceError, tr("Streaming error"));
+
+ if (m_state != QMediaRecorder::StoppedState) {
+ m_mediaDeviceSession->stopRecording();
+ if (!m_sessionWasActive)
+ m_mediaDeviceSession->setActive(false);
+ }
+}
+
+void QWindowsMediaEncoder::onRecordingError(int errorCode)
+{
+ Q_UNUSED(errorCode);
+ updateError(QMediaRecorder::ResourceError, tr("Recording error"));
+
+ auto lastState = m_state;
+ m_state = QMediaRecorder::StoppedState;
+ if (m_state != lastState)
+ stateChanged(m_state);
+}
+
+void QWindowsMediaEncoder::onCameraChanged()
+{
+}
+
+void QWindowsMediaEncoder::onRecordingStarted()
+{
+}
+
+void QWindowsMediaEncoder::onRecordingStopped()
+{
+ saveMetadata();
+
+ auto lastState = m_state;
+ m_state = QMediaRecorder::StoppedState;
+ if (m_state != lastState)
+ stateChanged(m_state);
+}
+
+QT_END_NAMESPACE
+
+#include "moc_qwindowsmediaencoder_p.cpp"
diff --git a/src/plugins/multimedia/windows/mediacapture/qwindowsmediaencoder_p.h b/src/plugins/multimedia/windows/mediacapture/qwindowsmediaencoder_p.h
new file mode 100644
index 000000000..51f35ce9d
--- /dev/null
+++ b/src/plugins/multimedia/windows/mediacapture/qwindowsmediaencoder_p.h
@@ -0,0 +1,71 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#ifndef QWINDOWSMEDIAENCODER_H
+#define QWINDOWSMEDIAENCODER_H
+
+#include <private/qplatformmediarecorder_p.h>
+
+#include <QtCore/qglobal.h>
+#include <QtCore/qurl.h>
+
+QT_BEGIN_NAMESPACE
+
+class QWindowsMediaDeviceSession;
+class QPlatformMediaCaptureSession;
+class QWindowsMediaCaptureService;
+
+class QWindowsMediaEncoder : public QObject, public QPlatformMediaRecorder
+{
+ Q_OBJECT
+public:
+ explicit QWindowsMediaEncoder(QMediaRecorder *parent);
+
+ bool isLocationWritable(const QUrl &location) const override;
+ QMediaRecorder::RecorderState state() const override;
+ qint64 duration() const override;
+
+ void setMetaData(const QMediaMetaData &metaData) override;
+ QMediaMetaData metaData() const override;
+
+ void setCaptureSession(QPlatformMediaCaptureSession *session);
+
+ void record(QMediaEncoderSettings &settings) override;
+ void pause() override;
+ void resume() override;
+ void stop() override;
+
+private Q_SLOTS:
+ void onCameraChanged();
+ void onRecordingStarted();
+ void onRecordingStopped();
+ void onDurationChanged(qint64 duration);
+ void onStreamingError(int errorCode);
+ void onRecordingError(int errorCode);
+
+private:
+ void saveMetadata();
+
+ QWindowsMediaCaptureService *m_captureService = nullptr;
+ QWindowsMediaDeviceSession *m_mediaDeviceSession = nullptr;
+ QMediaRecorder::RecorderState m_state = QMediaRecorder::StoppedState;
+ QString m_fileName;
+ QMediaMetaData m_metaData;
+ qint64 m_duration = 0;
+ bool m_sessionWasActive = false;
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/windows/mfstream.cpp b/src/plugins/multimedia/windows/mfstream.cpp
new file mode 100644
index 000000000..fb37ce293
--- /dev/null
+++ b/src/plugins/multimedia/windows/mfstream.cpp
@@ -0,0 +1,326 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "mfstream_p.h"
+#include <QtCore/qcoreapplication.h>
+
+QT_BEGIN_NAMESPACE
+//MFStream is added for supporting QIODevice type of media source.
+//It is used to delegate invocations from media foundation(through IMFByteStream) to QIODevice.
+
+MFStream::MFStream(QIODevice *stream, bool ownStream)
+ : m_cRef(1)
+ , m_stream(stream)
+ , m_ownStream(ownStream)
+ , m_currentReadResult(0)
+{
+ //Move to the thread of the stream object
+ //to make sure invocations on stream
+ //are happened in the same thread of stream object
+ this->moveToThread(stream->thread());
+}
+
+MFStream::~MFStream()
+{
+ if (m_currentReadResult)
+ m_currentReadResult->Release();
+ if (m_ownStream)
+ m_stream->deleteLater();
+}
+
+//from IUnknown
+STDMETHODIMP MFStream::QueryInterface(REFIID riid, LPVOID *ppvObject)
+{
+ if (!ppvObject)
+ return E_POINTER;
+ if (riid == IID_IMFByteStream) {
+ *ppvObject = static_cast<IMFByteStream*>(this);
+ } else if (riid == IID_IUnknown) {
+ *ppvObject = static_cast<IUnknown*>(this);
+ } else {
+ *ppvObject = NULL;
+ return E_NOINTERFACE;
+ }
+ AddRef();
+ return S_OK;
+}
+
+STDMETHODIMP_(ULONG) MFStream::AddRef(void)
+{
+ return InterlockedIncrement(&m_cRef);
+}
+
+STDMETHODIMP_(ULONG) MFStream::Release(void)
+{
+ LONG cRef = InterlockedDecrement(&m_cRef);
+ if (cRef == 0) {
+ this->deleteLater();
+ }
+ return cRef;
+}
+
+
+//from IMFByteStream
+STDMETHODIMP MFStream::GetCapabilities(DWORD *pdwCapabilities)
+{
+ if (!pdwCapabilities)
+ return E_INVALIDARG;
+ *pdwCapabilities = MFBYTESTREAM_IS_READABLE;
+ if (!m_stream->isSequential())
+ *pdwCapabilities |= MFBYTESTREAM_IS_SEEKABLE;
+ return S_OK;
+}
+
+STDMETHODIMP MFStream::GetLength(QWORD *pqwLength)
+{
+ if (!pqwLength)
+ return E_INVALIDARG;
+ QMutexLocker locker(&m_mutex);
+ *pqwLength = QWORD(m_stream->size());
+ return S_OK;
+}
+
+STDMETHODIMP MFStream::SetLength(QWORD)
+{
+ return E_NOTIMPL;
+}
+
+STDMETHODIMP MFStream::GetCurrentPosition(QWORD *pqwPosition)
+{
+ if (!pqwPosition)
+ return E_INVALIDARG;
+ QMutexLocker locker(&m_mutex);
+ *pqwPosition = m_stream->pos();
+ return S_OK;
+}
+
+STDMETHODIMP MFStream::SetCurrentPosition(QWORD qwPosition)
+{
+ QMutexLocker locker(&m_mutex);
+ //SetCurrentPosition may happend during the BeginRead/EndRead pair,
+ //refusing to execute SetCurrentPosition during that time seems to be
+ //the simplest workable solution
+ if (m_currentReadResult)
+ return S_FALSE;
+
+ bool seekOK = m_stream->seek(qint64(qwPosition));
+ if (seekOK)
+ return S_OK;
+ else
+ return S_FALSE;
+}
+
+STDMETHODIMP MFStream::IsEndOfStream(BOOL *pfEndOfStream)
+{
+ if (!pfEndOfStream)
+ return E_INVALIDARG;
+ QMutexLocker locker(&m_mutex);
+ *pfEndOfStream = m_stream->atEnd() ? TRUE : FALSE;
+ return S_OK;
+}
+
+STDMETHODIMP MFStream::Read(BYTE *pb, ULONG cb, ULONG *pcbRead)
+{
+ QMutexLocker locker(&m_mutex);
+ qint64 read = m_stream->read((char*)(pb), qint64(cb));
+ if (pcbRead)
+ *pcbRead = ULONG(read);
+ return S_OK;
+}
+
+STDMETHODIMP MFStream::BeginRead(BYTE *pb, ULONG cb, IMFAsyncCallback *pCallback,
+ IUnknown *punkState)
+{
+ if (!pCallback || !pb)
+ return E_INVALIDARG;
+
+ Q_ASSERT(m_currentReadResult == NULL);
+
+ AsyncReadState *state = new (std::nothrow) AsyncReadState(pb, cb);
+ if (state == NULL)
+ return E_OUTOFMEMORY;
+
+ HRESULT hr = MFCreateAsyncResult(state, pCallback, punkState, &m_currentReadResult);
+ state->Release();
+ if (FAILED(hr))
+ return hr;
+
+ QCoreApplication::postEvent(this, new QEvent(QEvent::User));
+ return hr;
+}
+
+STDMETHODIMP MFStream::EndRead(IMFAsyncResult* pResult, ULONG *pcbRead)
+{
+ if (!pcbRead)
+ return E_INVALIDARG;
+ IUnknown *pUnk;
+ pResult->GetObject(&pUnk);
+ AsyncReadState *state = static_cast<AsyncReadState*>(pUnk);
+ *pcbRead = state->bytesRead();
+ pUnk->Release();
+
+ m_currentReadResult->Release();
+ m_currentReadResult = NULL;
+
+ return S_OK;
+}
+
+STDMETHODIMP MFStream::Write(const BYTE *, ULONG, ULONG *)
+{
+ return E_NOTIMPL;
+}
+
+STDMETHODIMP MFStream::BeginWrite(const BYTE *, ULONG ,
+ IMFAsyncCallback *,
+ IUnknown *)
+{
+ return E_NOTIMPL;
+}
+
+STDMETHODIMP MFStream::EndWrite(IMFAsyncResult *,
+ ULONG *)
+{
+ return E_NOTIMPL;
+}
+
+STDMETHODIMP MFStream::Seek(
+ MFBYTESTREAM_SEEK_ORIGIN SeekOrigin,
+ LONGLONG llSeekOffset,
+ DWORD,
+ QWORD *pqwCurrentPosition)
+{
+ QMutexLocker locker(&m_mutex);
+ if (m_currentReadResult)
+ return S_FALSE;
+
+ qint64 pos = qint64(llSeekOffset);
+ switch (SeekOrigin) {
+ case msoBegin:
+ break;
+ case msoCurrent:
+ pos += m_stream->pos();
+ break;
+ }
+ bool seekOK = m_stream->seek(pos);
+ if (pqwCurrentPosition)
+ *pqwCurrentPosition = pos;
+ if (seekOK)
+ return S_OK;
+ else
+ return S_FALSE;
+}
+
+STDMETHODIMP MFStream::Flush()
+{
+ return E_NOTIMPL;
+}
+
+STDMETHODIMP MFStream::Close()
+{
+ QMutexLocker locker(&m_mutex);
+ if (m_ownStream)
+ m_stream->close();
+ return S_OK;
+}
+
+void MFStream::doRead()
+{
+ if (!m_stream)
+ return;
+
+ bool readDone = true;
+ IUnknown *pUnk = NULL;
+ HRESULT hr = m_currentReadResult->GetObject(&pUnk);
+ if (SUCCEEDED(hr)) {
+ //do actual read
+ AsyncReadState *state = static_cast<AsyncReadState*>(pUnk);
+ ULONG cbRead;
+ Read(state->pb(), state->cb() - state->bytesRead(), &cbRead);
+ pUnk->Release();
+
+ state->setBytesRead(cbRead + state->bytesRead());
+ if (state->cb() > state->bytesRead() && !m_stream->atEnd()) {
+ readDone = false;
+ }
+ }
+
+ if (readDone) {
+ //now inform the original caller
+ m_currentReadResult->SetStatus(hr);
+ MFInvokeCallback(m_currentReadResult);
+ }
+}
+
+void MFStream::customEvent(QEvent *event)
+{
+ if (event->type() != QEvent::User) {
+ QObject::customEvent(event);
+ return;
+ }
+ doRead();
+}
+
+//AsyncReadState is a helper class used in BeginRead for asynchronous operation
+//to record some BeginRead parameters, so these parameters could be
+//used later when actually executing the read operation in another thread.
+MFStream::AsyncReadState::AsyncReadState(BYTE *pb, ULONG cb)
+ : m_cRef(1)
+ , m_pb(pb)
+ , m_cb(cb)
+ , m_cbRead(0)
+{
+}
+
+//from IUnknown
+STDMETHODIMP MFStream::AsyncReadState::QueryInterface(REFIID riid, LPVOID *ppvObject)
+{
+ if (!ppvObject)
+ return E_POINTER;
+
+ if (riid == IID_IUnknown) {
+ *ppvObject = static_cast<IUnknown*>(this);
+ } else {
+ *ppvObject = NULL;
+ return E_NOINTERFACE;
+ }
+ AddRef();
+ return S_OK;
+}
+
+STDMETHODIMP_(ULONG) MFStream::AsyncReadState::AddRef(void)
+{
+ return InterlockedIncrement(&m_cRef);
+}
+
+STDMETHODIMP_(ULONG) MFStream::AsyncReadState::Release(void)
+{
+ LONG cRef = InterlockedDecrement(&m_cRef);
+ if (cRef == 0)
+ delete this;
+ // For thread safety, return a temporary variable.
+ return cRef;
+}
+
+BYTE* MFStream::AsyncReadState::pb() const
+{
+ return m_pb;
+}
+
+ULONG MFStream::AsyncReadState::cb() const
+{
+ return m_cb;
+}
+
+ULONG MFStream::AsyncReadState::bytesRead() const
+{
+ return m_cbRead;
+}
+
+void MFStream::AsyncReadState::setBytesRead(ULONG cbRead)
+{
+ m_cbRead = cbRead;
+}
+
+QT_END_NAMESPACE
+
+#include "moc_mfstream_p.cpp"
diff --git a/src/plugins/multimedia/windows/mfstream_p.h b/src/plugins/multimedia/windows/mfstream_p.h
new file mode 100644
index 000000000..a5221ed75
--- /dev/null
+++ b/src/plugins/multimedia/windows/mfstream_p.h
@@ -0,0 +1,124 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef MFSTREAM_H
+#define MFSTREAM_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <mfapi.h>
+#include <mfidl.h>
+#include <QtCore/qmutex.h>
+#include <QtCore/qiodevice.h>
+#include <QtCore/qcoreevent.h>
+#include <QtCore/qpointer.h>
+
+QT_BEGIN_NAMESPACE
+
+class MFStream : public QObject, public IMFByteStream
+{
+ Q_OBJECT
+public:
+ MFStream(QIODevice *stream, bool ownStream);
+
+ ~MFStream();
+
+ //from IUnknown
+ STDMETHODIMP QueryInterface(REFIID riid, LPVOID *ppvObject) override;
+
+ STDMETHODIMP_(ULONG) AddRef(void) override;
+
+ STDMETHODIMP_(ULONG) Release(void) override;
+
+
+ //from IMFByteStream
+ STDMETHODIMP GetCapabilities(DWORD *pdwCapabilities) override;
+
+ STDMETHODIMP GetLength(QWORD *pqwLength) override;
+
+ STDMETHODIMP SetLength(QWORD) override;
+
+ STDMETHODIMP GetCurrentPosition(QWORD *pqwPosition) override;
+
+ STDMETHODIMP SetCurrentPosition(QWORD qwPosition) override;
+
+ STDMETHODIMP IsEndOfStream(BOOL *pfEndOfStream) override;
+
+ STDMETHODIMP Read(BYTE *pb, ULONG cb, ULONG *pcbRead) override;
+
+ STDMETHODIMP BeginRead(BYTE *pb, ULONG cb, IMFAsyncCallback *pCallback,
+ IUnknown *punkState) override;
+
+ STDMETHODIMP EndRead(IMFAsyncResult* pResult, ULONG *pcbRead) override;
+
+ STDMETHODIMP Write(const BYTE *, ULONG, ULONG *) override;
+
+ STDMETHODIMP BeginWrite(const BYTE *, ULONG ,
+ IMFAsyncCallback *,
+ IUnknown *) override;
+
+ STDMETHODIMP EndWrite(IMFAsyncResult *,
+ ULONG *) override;
+
+ STDMETHODIMP Seek(
+ MFBYTESTREAM_SEEK_ORIGIN SeekOrigin,
+ LONGLONG llSeekOffset,
+ DWORD,
+ QWORD *pqwCurrentPosition) override;
+
+ STDMETHODIMP Flush() override;
+
+ STDMETHODIMP Close() override;
+
+private:
+ class AsyncReadState : public IUnknown
+ {
+ public:
+ AsyncReadState(BYTE *pb, ULONG cb);
+ virtual ~AsyncReadState() = default;
+
+ //from IUnknown
+ STDMETHODIMP QueryInterface(REFIID riid, LPVOID *ppvObject) override;
+
+ STDMETHODIMP_(ULONG) AddRef(void) override;
+
+ STDMETHODIMP_(ULONG) Release(void) override;
+
+ BYTE* pb() const;
+ ULONG cb() const;
+ ULONG bytesRead() const;
+
+ void setBytesRead(ULONG cbRead);
+
+ private:
+ long m_cRef;
+ BYTE *m_pb;
+ ULONG m_cb;
+ ULONG m_cbRead;
+ };
+
+ long m_cRef;
+ QPointer<QIODevice> m_stream;
+ bool m_ownStream;
+ DWORD m_workQueueId;
+ QMutex m_mutex;
+
+ void doRead();
+
+protected:
+ void customEvent(QEvent *event) override;
+ IMFAsyncResult *m_currentReadResult;
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/windows/player/mfactivate.cpp b/src/plugins/multimedia/windows/player/mfactivate.cpp
new file mode 100644
index 000000000..644c96529
--- /dev/null
+++ b/src/plugins/multimedia/windows/player/mfactivate.cpp
@@ -0,0 +1,17 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "mfactivate_p.h"
+
+#include <mfapi.h>
+
+MFAbstractActivate::MFAbstractActivate()
+{
+ MFCreateAttributes(&m_attributes, 0);
+}
+
+MFAbstractActivate::~MFAbstractActivate()
+{
+ if (m_attributes)
+ m_attributes->Release();
+}
diff --git a/src/plugins/multimedia/windows/player/mfactivate_p.h b/src/plugins/multimedia/windows/player/mfactivate_p.h
new file mode 100644
index 000000000..efe75474b
--- /dev/null
+++ b/src/plugins/multimedia/windows/player/mfactivate_p.h
@@ -0,0 +1,202 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef MFACTIVATE_H
+#define MFACTIVATE_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <mfidl.h>
+#include <private/qcomobject_p.h>
+
+QT_BEGIN_NAMESPACE
+
+namespace QtPrivate {
+
+template <>
+struct QComObjectTraits<IMFActivate>
+{
+ static constexpr bool isGuidOf(REFIID riid) noexcept
+ {
+ return QComObjectTraits<IMFActivate, IMFAttributes>::isGuidOf(riid);
+ }
+};
+
+} // namespace QtPrivate
+
+class MFAbstractActivate : public QComObject<IMFActivate>
+{
+public:
+ explicit MFAbstractActivate();
+
+ //from IMFAttributes
+ STDMETHODIMP GetItem(REFGUID guidKey, PROPVARIANT *pValue) override
+ {
+ return m_attributes->GetItem(guidKey, pValue);
+ }
+
+ STDMETHODIMP GetItemType(REFGUID guidKey, MF_ATTRIBUTE_TYPE *pType) override
+ {
+ return m_attributes->GetItemType(guidKey, pType);
+ }
+
+ STDMETHODIMP CompareItem(REFGUID guidKey, REFPROPVARIANT Value, BOOL *pbResult) override
+ {
+ return m_attributes->CompareItem(guidKey, Value, pbResult);
+ }
+
+ STDMETHODIMP Compare(IMFAttributes *pTheirs, MF_ATTRIBUTES_MATCH_TYPE MatchType, BOOL *pbResult) override
+ {
+ return m_attributes->Compare(pTheirs, MatchType, pbResult);
+ }
+
+ STDMETHODIMP GetUINT32(REFGUID guidKey, UINT32 *punValue) override
+ {
+ return m_attributes->GetUINT32(guidKey, punValue);
+ }
+
+ STDMETHODIMP GetUINT64(REFGUID guidKey, UINT64 *punValue) override
+ {
+ return m_attributes->GetUINT64(guidKey, punValue);
+ }
+
+ STDMETHODIMP GetDouble(REFGUID guidKey, double *pfValue) override
+ {
+ return m_attributes->GetDouble(guidKey, pfValue);
+ }
+
+ STDMETHODIMP GetGUID(REFGUID guidKey, GUID *pguidValue) override
+ {
+ return m_attributes->GetGUID(guidKey, pguidValue);
+ }
+
+ STDMETHODIMP GetStringLength(REFGUID guidKey, UINT32 *pcchLength) override
+ {
+ return m_attributes->GetStringLength(guidKey, pcchLength);
+ }
+
+ STDMETHODIMP GetString(REFGUID guidKey, LPWSTR pwszValue, UINT32 cchBufSize, UINT32 *pcchLength) override
+ {
+ return m_attributes->GetString(guidKey, pwszValue, cchBufSize, pcchLength);
+ }
+
+ STDMETHODIMP GetAllocatedString(REFGUID guidKey, LPWSTR *ppwszValue, UINT32 *pcchLength) override
+ {
+ return m_attributes->GetAllocatedString(guidKey, ppwszValue, pcchLength);
+ }
+
+ STDMETHODIMP GetBlobSize(REFGUID guidKey, UINT32 *pcbBlobSize) override
+ {
+ return m_attributes->GetBlobSize(guidKey, pcbBlobSize);
+ }
+
+ STDMETHODIMP GetBlob(REFGUID guidKey, UINT8 *pBuf, UINT32 cbBufSize, UINT32 *pcbBlobSize) override
+ {
+ return m_attributes->GetBlob(guidKey, pBuf, cbBufSize, pcbBlobSize);
+ }
+
+ STDMETHODIMP GetAllocatedBlob(REFGUID guidKey, UINT8 **ppBuf, UINT32 *pcbSize) override
+ {
+ return m_attributes->GetAllocatedBlob(guidKey, ppBuf, pcbSize);
+ }
+
+ STDMETHODIMP GetUnknown(REFGUID guidKey, REFIID riid, LPVOID *ppv) override
+ {
+ return m_attributes->GetUnknown(guidKey, riid, ppv);
+ }
+
+ STDMETHODIMP SetItem(REFGUID guidKey, REFPROPVARIANT Value) override
+ {
+ return m_attributes->SetItem(guidKey, Value);
+ }
+
+ STDMETHODIMP DeleteItem(REFGUID guidKey) override
+ {
+ return m_attributes->DeleteItem(guidKey);
+ }
+
+ STDMETHODIMP DeleteAllItems() override
+ {
+ return m_attributes->DeleteAllItems();
+ }
+
+ STDMETHODIMP SetUINT32(REFGUID guidKey, UINT32 unValue) override
+ {
+ return m_attributes->SetUINT32(guidKey, unValue);
+ }
+
+ STDMETHODIMP SetUINT64(REFGUID guidKey, UINT64 unValue) override
+ {
+ return m_attributes->SetUINT64(guidKey, unValue);
+ }
+
+ STDMETHODIMP SetDouble(REFGUID guidKey, double fValue) override
+ {
+ return m_attributes->SetDouble(guidKey, fValue);
+ }
+
+ STDMETHODIMP SetGUID(REFGUID guidKey, REFGUID guidValue) override
+ {
+ return m_attributes->SetGUID(guidKey, guidValue);
+ }
+
+ STDMETHODIMP SetString(REFGUID guidKey, LPCWSTR wszValue) override
+ {
+ return m_attributes->SetString(guidKey, wszValue);
+ }
+
+ STDMETHODIMP SetBlob(REFGUID guidKey, const UINT8 *pBuf, UINT32 cbBufSize) override
+ {
+ return m_attributes->SetBlob(guidKey, pBuf, cbBufSize);
+ }
+
+ STDMETHODIMP SetUnknown(REFGUID guidKey, IUnknown *pUnknown) override
+ {
+ return m_attributes->SetUnknown(guidKey, pUnknown);
+ }
+
+ STDMETHODIMP LockStore() override
+ {
+ return m_attributes->LockStore();
+ }
+
+ STDMETHODIMP UnlockStore() override
+ {
+ return m_attributes->UnlockStore();
+ }
+
+ STDMETHODIMP GetCount(UINT32 *pcItems) override
+ {
+ return m_attributes->GetCount(pcItems);
+ }
+
+ STDMETHODIMP GetItemByIndex(UINT32 unIndex, GUID *pguidKey, PROPVARIANT *pValue) override
+ {
+ return m_attributes->GetItemByIndex(unIndex, pguidKey, pValue);
+ }
+
+ STDMETHODIMP CopyAllItems(IMFAttributes *pDest) override
+ {
+ return m_attributes->CopyAllItems(pDest);
+ }
+
+protected:
+ // Destructor is not public. Caller should call Release.
+ ~MFAbstractActivate() override;
+
+private:
+ IMFAttributes *m_attributes = nullptr;
+};
+
+QT_END_NAMESPACE
+
+#endif // MFACTIVATE_H
diff --git a/src/plugins/multimedia/windows/player/mfevrvideowindowcontrol.cpp b/src/plugins/multimedia/windows/player/mfevrvideowindowcontrol.cpp
new file mode 100644
index 000000000..109f7964b
--- /dev/null
+++ b/src/plugins/multimedia/windows/player/mfevrvideowindowcontrol.cpp
@@ -0,0 +1,55 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "mfevrvideowindowcontrol_p.h"
+
+#include <qdebug.h>
+
+MFEvrVideoWindowControl::MFEvrVideoWindowControl(QVideoSink *parent)
+ : EvrVideoWindowControl(parent)
+ , m_currentActivate(NULL)
+ , m_evrSink(NULL)
+{
+}
+
+MFEvrVideoWindowControl::~MFEvrVideoWindowControl()
+{
+ clear();
+}
+
+void MFEvrVideoWindowControl::clear()
+{
+ setEvr(NULL);
+
+ if (m_evrSink)
+ m_evrSink->Release();
+ if (m_currentActivate) {
+ m_currentActivate->ShutdownObject();
+ m_currentActivate->Release();
+ }
+ m_evrSink = NULL;
+ m_currentActivate = NULL;
+}
+
+IMFActivate* MFEvrVideoWindowControl::createActivate()
+{
+ clear();
+
+ if (FAILED(MFCreateVideoRendererActivate(0, &m_currentActivate))) {
+ qWarning() << "Failed to create evr video renderer activate!";
+ return NULL;
+ }
+ if (FAILED(m_currentActivate->ActivateObject(IID_IMFMediaSink, (LPVOID*)(&m_evrSink)))) {
+ qWarning() << "Failed to activate evr media sink!";
+ return NULL;
+ }
+ if (!setEvr(m_evrSink))
+ return NULL;
+
+ return m_currentActivate;
+}
+
+void MFEvrVideoWindowControl::releaseActivate()
+{
+ clear();
+}
diff --git a/src/plugins/multimedia/windows/player/mfevrvideowindowcontrol_p.h b/src/plugins/multimedia/windows/player/mfevrvideowindowcontrol_p.h
new file mode 100644
index 000000000..1ac90e8ce
--- /dev/null
+++ b/src/plugins/multimedia/windows/player/mfevrvideowindowcontrol_p.h
@@ -0,0 +1,38 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef MFEVRVIDEOWINDOWCONTROL_H
+#define MFEVRVIDEOWINDOWCONTROL_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include "evrvideowindowcontrol_p.h"
+
+QT_USE_NAMESPACE
+
+class MFEvrVideoWindowControl : public EvrVideoWindowControl
+{
+public:
+ MFEvrVideoWindowControl(QVideoSink *parent = 0);
+ ~MFEvrVideoWindowControl();
+
+ IMFActivate* createActivate();
+ void releaseActivate();
+
+private:
+ void clear();
+
+ IMFActivate *m_currentActivate;
+ IMFMediaSink *m_evrSink;
+};
+
+#endif // MFEVRVIDEOWINDOWCONTROL_H
diff --git a/src/plugins/multimedia/windows/player/mfplayercontrol.cpp b/src/plugins/multimedia/windows/player/mfplayercontrol.cpp
new file mode 100644
index 000000000..ae0022773
--- /dev/null
+++ b/src/plugins/multimedia/windows/player/mfplayercontrol.cpp
@@ -0,0 +1,306 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "mfplayercontrol_p.h"
+#include "mfplayersession_p.h"
+#include "mfvideorenderercontrol_p.h"
+#include <qdebug.h>
+
+//#define DEBUG_MEDIAFOUNDATION
+
+MFPlayerControl::MFPlayerControl(QMediaPlayer *player)
+ : QPlatformMediaPlayer(player)
+ , m_state(QMediaPlayer::StoppedState)
+ , m_stateDirty(false)
+ , m_videoAvailable(false)
+ , m_audioAvailable(false)
+ , m_duration(0)
+ , m_seekable(false)
+{
+ m_session = new MFPlayerSession(this);
+}
+
+MFPlayerControl::~MFPlayerControl()
+{
+ m_session->close();
+ m_session->setPlayerControl(nullptr);
+ m_session->Release();
+}
+
+void MFPlayerControl::setMedia(const QUrl &media, QIODevice *stream)
+{
+ if (m_state != QMediaPlayer::StoppedState) {
+ changeState(QMediaPlayer::StoppedState);
+ m_session->stop(true);
+ refreshState();
+ }
+
+ m_media = media;
+ m_stream = stream;
+ resetAudioVideoAvailable();
+ handleDurationUpdate(0);
+ handleSeekableUpdate(false);
+ m_session->load(media, stream);
+}
+
+void MFPlayerControl::play()
+{
+ if (m_state == QMediaPlayer::PlayingState)
+ return;
+ resetCurrentLoop();
+ if (QMediaPlayer::InvalidMedia == m_session->status())
+ m_session->load(m_media, m_stream);
+
+ switch (m_session->status()) {
+ case QMediaPlayer::NoMedia:
+ case QMediaPlayer::InvalidMedia:
+ return;
+ case QMediaPlayer::LoadedMedia:
+ case QMediaPlayer::BufferingMedia:
+ case QMediaPlayer::BufferedMedia:
+ case QMediaPlayer::EndOfMedia:
+ changeState(QMediaPlayer::PlayingState);
+ m_session->start();
+ break;
+ default: //Loading/Stalled
+ changeState(QMediaPlayer::PlayingState);
+ break;
+ }
+ refreshState();
+}
+
+void MFPlayerControl::pause()
+{
+ if (m_state == QMediaPlayer::PausedState)
+ return;
+
+ if (m_session->status() == QMediaPlayer::NoMedia ||
+ m_session->status() == QMediaPlayer::InvalidMedia)
+ return;
+
+ changeState(QMediaPlayer::PausedState);
+ m_session->pause();
+ refreshState();
+}
+
+void MFPlayerControl::stop()
+{
+ if (m_state == QMediaPlayer::StoppedState)
+ return;
+ changeState(QMediaPlayer::StoppedState);
+ m_session->stop();
+ refreshState();
+}
+
+QMediaMetaData MFPlayerControl::metaData() const
+{
+ return m_session->metaData();
+}
+
+void MFPlayerControl::setAudioOutput(QPlatformAudioOutput *output)
+{
+ m_session->setAudioOutput(output);
+}
+
+void MFPlayerControl::setVideoSink(QVideoSink *sink)
+{
+ m_session->setVideoSink(sink);
+}
+
+void MFPlayerControl::changeState(QMediaPlayer::PlaybackState state)
+{
+ if (m_state == state)
+ return;
+ m_state = state;
+ m_stateDirty = true;
+}
+
+void MFPlayerControl::refreshState()
+{
+ if (!m_stateDirty)
+ return;
+ m_stateDirty = false;
+#ifdef DEBUG_MEDIAFOUNDATION
+ qDebug() << "MFPlayerControl::emit stateChanged" << m_state;
+#endif
+ stateChanged(m_state);
+}
+
+void MFPlayerControl::handleStatusChanged()
+{
+ QMediaPlayer::MediaStatus status = m_session->status();
+ switch (status) {
+ case QMediaPlayer::EndOfMedia:
+ if (doLoop()) {
+ setPosition(0);
+ m_session->start();
+ } else {
+ changeState(QMediaPlayer::StoppedState);
+ }
+ break;
+ case QMediaPlayer::InvalidMedia:
+ break;
+ case QMediaPlayer::LoadedMedia:
+ case QMediaPlayer::BufferingMedia:
+ case QMediaPlayer::BufferedMedia:
+ if (m_state == QMediaPlayer::PlayingState)
+ m_session->start();
+ break;
+ default:
+ break;
+ }
+ mediaStatusChanged(m_session->status());
+ refreshState();
+}
+
+void MFPlayerControl::handleTracksChanged()
+{
+ tracksChanged();
+}
+
+void MFPlayerControl::handleVideoAvailable()
+{
+ if (m_videoAvailable)
+ return;
+ m_videoAvailable = true;
+ videoAvailableChanged(m_videoAvailable);
+}
+
+void MFPlayerControl::handleAudioAvailable()
+{
+ if (m_audioAvailable)
+ return;
+ m_audioAvailable = true;
+ audioAvailableChanged(m_audioAvailable);
+}
+
+void MFPlayerControl::resetAudioVideoAvailable()
+{
+ bool videoDirty = false;
+ if (m_videoAvailable) {
+ m_videoAvailable = false;
+ videoDirty = true;
+ }
+ if (m_audioAvailable) {
+ m_audioAvailable = false;
+ audioAvailableChanged(m_audioAvailable);
+ }
+ if (videoDirty)
+ videoAvailableChanged(m_videoAvailable);
+}
+
+void MFPlayerControl::handleDurationUpdate(qint64 duration)
+{
+ if (m_duration == duration)
+ return;
+ m_duration = duration;
+ durationChanged(m_duration);
+}
+
+void MFPlayerControl::handleSeekableUpdate(bool seekable)
+{
+ if (m_seekable == seekable)
+ return;
+ m_seekable = seekable;
+ seekableChanged(m_seekable);
+}
+
+QMediaPlayer::PlaybackState MFPlayerControl::state() const
+{
+ return m_state;
+}
+
+QMediaPlayer::MediaStatus MFPlayerControl::mediaStatus() const
+{
+ return m_session->status();
+}
+
+qint64 MFPlayerControl::duration() const
+{
+ return m_duration;
+}
+
+qint64 MFPlayerControl::position() const
+{
+ return m_session->position();
+}
+
+void MFPlayerControl::setPosition(qint64 position)
+{
+ if (!m_seekable || position == m_session->position())
+ return;
+ m_session->setPosition(position);
+}
+
+float MFPlayerControl::bufferProgress() const
+{
+ return m_session->bufferProgress() / 100.;
+}
+
+bool MFPlayerControl::isAudioAvailable() const
+{
+ return m_audioAvailable;
+}
+
+bool MFPlayerControl::isVideoAvailable() const
+{
+ return m_videoAvailable;
+}
+
+bool MFPlayerControl::isSeekable() const
+{
+ return m_seekable;
+}
+
+QMediaTimeRange MFPlayerControl::availablePlaybackRanges() const
+{
+ return m_session->availablePlaybackRanges();
+}
+
+qreal MFPlayerControl::playbackRate() const
+{
+ return m_session->playbackRate();
+}
+
+void MFPlayerControl::setPlaybackRate(qreal rate)
+{
+ m_session->setPlaybackRate(rate);
+}
+
+QUrl MFPlayerControl::media() const
+{
+ return m_media;
+}
+
+const QIODevice* MFPlayerControl::mediaStream() const
+{
+ return m_stream;
+}
+
+void MFPlayerControl::handleError(QMediaPlayer::Error errorCode, const QString& errorString, bool isFatal)
+{
+ if (isFatal)
+ stop();
+ error(int(errorCode), errorString);
+}
+
+void MFPlayerControl::setActiveTrack(TrackType type, int index)
+{
+ m_session->setActiveTrack(type, index);
+}
+
+int MFPlayerControl::activeTrack(TrackType type)
+{
+ return m_session->activeTrack(type);
+}
+
+int MFPlayerControl::trackCount(TrackType type)
+{
+ return m_session->trackCount(type);
+}
+
+QMediaMetaData MFPlayerControl::trackMetaData(TrackType type, int trackNumber)
+{
+ return m_session->trackMetaData(type, trackNumber);
+}
+
diff --git a/src/plugins/multimedia/windows/player/mfplayercontrol_p.h b/src/plugins/multimedia/windows/player/mfplayercontrol_p.h
new file mode 100644
index 000000000..db863afaa
--- /dev/null
+++ b/src/plugins/multimedia/windows/player/mfplayercontrol_p.h
@@ -0,0 +1,103 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef MFPLAYERCONTROL_H
+#define MFPLAYERCONTROL_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include "qurl.h"
+#include "private/qplatformmediaplayer_p.h"
+
+#include <QtCore/qcoreevent.h>
+
+QT_BEGIN_NAMESPACE
+
+class MFPlayerSession;
+
+class MFPlayerControl : public QPlatformMediaPlayer
+{
+public:
+ MFPlayerControl(QMediaPlayer *player);
+ ~MFPlayerControl();
+
+ QMediaPlayer::PlaybackState state() const override;
+
+ QMediaPlayer::MediaStatus mediaStatus() const override;
+
+ qint64 duration() const override;
+
+ qint64 position() const override;
+ void setPosition(qint64 position) override;
+
+ float bufferProgress() const override;
+
+ bool isAudioAvailable() const override;
+ bool isVideoAvailable() const override;
+
+ bool isSeekable() const override;
+
+ QMediaTimeRange availablePlaybackRanges() const override;
+
+ qreal playbackRate() const override;
+ void setPlaybackRate(qreal rate) override;
+
+ QUrl media() const override;
+ const QIODevice *mediaStream() const override;
+ void setMedia(const QUrl &media, QIODevice *stream) override;
+
+ void play() override;
+ void pause() override;
+ void stop() override;
+
+ bool streamPlaybackSupported() const override { return true; }
+
+ QMediaMetaData metaData() const override;
+
+ void setAudioOutput(QPlatformAudioOutput *output) override;
+
+ void setVideoSink(QVideoSink *sink) override;
+
+ void setActiveTrack(TrackType type, int index) override;
+ int activeTrack(TrackType type) override;
+ int trackCount(TrackType type) override;
+ QMediaMetaData trackMetaData(TrackType type, int trackNumber) override;
+
+ void handleStatusChanged();
+ void handleTracksChanged();
+ void handleVideoAvailable();
+ void handleAudioAvailable();
+ void handleDurationUpdate(qint64 duration);
+ void handleSeekableUpdate(bool seekable);
+ void handleError(QMediaPlayer::Error errorCode, const QString& errorString, bool isFatal);
+
+private:
+ void changeState(QMediaPlayer::PlaybackState state);
+ void resetAudioVideoAvailable();
+ void refreshState();
+
+ QMediaPlayer::PlaybackState m_state;
+ bool m_stateDirty;
+
+ bool m_videoAvailable;
+ bool m_audioAvailable;
+ qint64 m_duration;
+ bool m_seekable;
+
+ QIODevice *m_stream;
+ QUrl m_media;
+ MFPlayerSession *m_session;
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/windows/player/mfplayersession.cpp b/src/plugins/multimedia/windows/player/mfplayersession.cpp
new file mode 100644
index 000000000..996ce35d8
--- /dev/null
+++ b/src/plugins/multimedia/windows/player/mfplayersession.cpp
@@ -0,0 +1,1736 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "private/qplatformmediaplayer_p.h"
+
+#include <QtCore/qcoreapplication.h>
+#include <QtCore/qdatetime.h>
+#include <QtCore/qthread.h>
+#include <QtCore/qvarlengtharray.h>
+#include <QtCore/qdebug.h>
+#include <QtCore/qfile.h>
+#include <QtCore/qbuffer.h>
+
+#include "private/qplatformaudiooutput_p.h"
+#include "qaudiooutput.h"
+
+#include "mfplayercontrol_p.h"
+#include "mfvideorenderercontrol_p.h"
+#include <mfmetadata_p.h>
+#include <private/qwindowsmfdefs_p.h>
+#include <private/qwindowsaudioutils_p.h>
+
+#include "mfplayersession_p.h"
+#include <mferror.h>
+#include <nserror.h>
+#include <winerror.h>
+#include "sourceresolver_p.h"
+#include <wmcodecdsp.h>
+
+#include <mmdeviceapi.h>
+#include <propvarutil.h>
+#include <functiondiscoverykeys_devpkey.h>
+
+//#define DEBUG_MEDIAFOUNDATION
+
+QT_BEGIN_NAMESPACE
+
+MFPlayerSession::MFPlayerSession(MFPlayerControl *playerControl)
+ : m_cRef(1),
+ m_playerControl(playerControl),
+ m_scrubbing(false),
+ m_restoreRate(1),
+ m_closing(false),
+ m_mediaTypes(0),
+ m_pendingRate(1),
+ m_status(QMediaPlayer::NoMedia)
+
+{
+ connect(this, &MFPlayerSession::sessionEvent, this, &MFPlayerSession::handleSessionEvent);
+
+ m_signalPositionChangeTimer.setInterval(10);
+ m_signalPositionChangeTimer.setTimerType(Qt::PreciseTimer);
+ m_signalPositionChangeTimer.callOnTimeout(this, &MFPlayerSession::timeout);
+
+ m_pendingState = NoPending;
+ ZeroMemory(&m_state, sizeof(m_state));
+ m_state.command = CmdStop;
+ m_state.prevCmd = CmdNone;
+ m_state.rate = 1.0f;
+ ZeroMemory(&m_request, sizeof(m_request));
+ m_request.command = CmdNone;
+ m_request.prevCmd = CmdNone;
+ m_request.rate = 1.0f;
+
+ m_videoRendererControl = new MFVideoRendererControl(this);
+}
+
+void MFPlayerSession::timeout()
+{
+ const qint64 pos = position();
+
+ if (pos != m_lastPosition) {
+ const bool updatePos = m_timeCounter++ % 10 == 0;
+ if (pos >= qint64(m_duration / 10000 - 20)) {
+ if (m_playerControl->doLoop()) {
+ m_session->Pause();
+ setPosition(0);
+ positionChanged(0);
+ } else {
+ if (updatePos)
+ positionChanged(pos);
+ }
+ } else {
+ if (updatePos)
+ positionChanged(pos);
+ }
+ m_lastPosition = pos;
+ }
+}
+
+void MFPlayerSession::close()
+{
+#ifdef DEBUG_MEDIAFOUNDATION
+ qDebug() << "close";
+#endif
+
+ m_signalPositionChangeTimer.stop();
+ clear();
+ if (!m_session)
+ return;
+
+ HRESULT hr = S_OK;
+ if (m_session) {
+ m_closing = true;
+ hr = m_session->Close();
+ if (SUCCEEDED(hr)) {
+ DWORD dwWaitResult = WaitForSingleObject(m_hCloseEvent.get(), 2000);
+ if (dwWaitResult == WAIT_TIMEOUT) {
+ qWarning() << "session close time out!";
+ }
+ }
+ m_closing = false;
+ }
+
+ if (SUCCEEDED(hr)) {
+ if (m_session)
+ m_session->Shutdown();
+ if (m_sourceResolver)
+ m_sourceResolver->shutdown();
+ }
+ m_sourceResolver.Reset();
+
+ m_videoRendererControl->releaseActivate();
+// } else if (m_playerService->videoWindowControl()) {
+// m_playerService->videoWindowControl()->releaseActivate();
+// }
+
+ m_session.Reset();
+ m_hCloseEvent = {};
+ m_lastPosition = -1;
+ m_position = 0;
+}
+
+void MFPlayerSession::load(const QUrl &url, QIODevice *stream)
+{
+#ifdef DEBUG_MEDIAFOUNDATION
+ qDebug() << "load";
+#endif
+ clear();
+
+ if (m_status == QMediaPlayer::LoadingMedia && m_sourceResolver)
+ m_sourceResolver->cancel();
+
+ if (url.isEmpty() && !stream) {
+ close();
+ changeStatus(QMediaPlayer::NoMedia);
+ } else if (stream && (!stream->isReadable())) {
+ close();
+ changeStatus(QMediaPlayer::InvalidMedia);
+ error(QMediaPlayer::ResourceError, tr("Invalid stream source."), true);
+ } else if (createSession()) {
+ changeStatus(QMediaPlayer::LoadingMedia);
+ m_sourceResolver->load(url, stream);
+ if (url.isLocalFile())
+ m_updateRoutingOnStart = true;
+ }
+ positionChanged(position());
+}
+
+void MFPlayerSession::handleSourceError(long hr)
+{
+ QString errorString;
+ QMediaPlayer::Error errorCode = QMediaPlayer::ResourceError;
+ switch (hr) {
+ case QMediaPlayer::FormatError:
+ errorCode = QMediaPlayer::FormatError;
+ errorString = tr("Attempting to play invalid Qt resource.");
+ break;
+ case NS_E_FILE_NOT_FOUND:
+ errorString = tr("The system cannot find the file specified.");
+ break;
+ case NS_E_SERVER_NOT_FOUND:
+ errorString = tr("The specified server could not be found.");
+ break;
+ case MF_E_UNSUPPORTED_BYTESTREAM_TYPE:
+ errorCode = QMediaPlayer::FormatError;
+ errorString = tr("Unsupported media type.");
+ break;
+ case MF_E_UNSUPPORTED_SCHEME:
+ errorCode = QMediaPlayer::ResourceError;
+ errorString = tr("Unsupported URL scheme.");
+ break;
+ case QMM_WININET_E_CANNOT_CONNECT:
+ errorCode = QMediaPlayer::NetworkError;
+ errorString = tr("Connection to server could not be established.");
+ break;
+ default:
+ qWarning() << "handleSourceError:"
+ << Qt::showbase << Qt::hex << Qt::uppercasedigits << static_cast<quint32>(hr);
+ errorString = tr("Failed to load source.");
+ break;
+ }
+ changeStatus(QMediaPlayer::InvalidMedia);
+ error(errorCode, errorString, true);
+}
+
+void MFPlayerSession::handleMediaSourceReady()
+{
+ if (QMediaPlayer::LoadingMedia != m_status || !m_sourceResolver
+ || m_sourceResolver.Get() != sender())
+ return;
+#ifdef DEBUG_MEDIAFOUNDATION
+ qDebug() << "handleMediaSourceReady";
+#endif
+ HRESULT hr = S_OK;
+ IMFMediaSource* mediaSource = m_sourceResolver->mediaSource();
+
+ DWORD dwCharacteristics = 0;
+ mediaSource->GetCharacteristics(&dwCharacteristics);
+ seekableUpdate(MFMEDIASOURCE_CAN_SEEK & dwCharacteristics);
+
+ ComPtr<IMFPresentationDescriptor> sourcePD;
+ hr = mediaSource->CreatePresentationDescriptor(&sourcePD);
+ if (SUCCEEDED(hr)) {
+ m_duration = 0;
+ m_metaData = MFMetaData::fromNative(mediaSource);
+ metaDataChanged();
+ sourcePD->GetUINT64(MF_PD_DURATION, &m_duration);
+ //convert from 100 nanosecond to milisecond
+ durationUpdate(qint64(m_duration / 10000));
+ setupPlaybackTopology(mediaSource, sourcePD.Get());
+ tracksChanged();
+ } else {
+ changeStatus(QMediaPlayer::InvalidMedia);
+ error(QMediaPlayer::ResourceError, tr("Cannot create presentation descriptor."), true);
+ }
+}
+
+bool MFPlayerSession::getStreamInfo(IMFStreamDescriptor *stream,
+ MFPlayerSession::MediaType *type,
+ QString *name,
+ QString *language,
+ GUID *format) const
+{
+ if (!stream || !type || !name || !language || !format)
+ return false;
+
+ *type = Unknown;
+ *name = QString();
+ *language = QString();
+
+ ComPtr<IMFMediaTypeHandler> typeHandler;
+
+ if (SUCCEEDED(stream->GetMediaTypeHandler(&typeHandler))) {
+
+ UINT32 len = 0;
+ if (SUCCEEDED(stream->GetStringLength(QMM_MF_SD_STREAM_NAME, &len)) && len > 0) {
+ WCHAR *wstr = new WCHAR[len+1];
+ if (SUCCEEDED(stream->GetString(QMM_MF_SD_STREAM_NAME, wstr, len+1, &len))) {
+ *name = QString::fromUtf16(reinterpret_cast<const char16_t *>(wstr));
+ }
+ delete []wstr;
+ }
+ if (SUCCEEDED(stream->GetStringLength(QMM_MF_SD_LANGUAGE, &len)) && len > 0) {
+ WCHAR *wstr = new WCHAR[len+1];
+ if (SUCCEEDED(stream->GetString(QMM_MF_SD_LANGUAGE, wstr, len+1, &len))) {
+ *language = QString::fromUtf16(reinterpret_cast<const char16_t *>(wstr));
+ }
+ delete []wstr;
+ }
+
+ GUID guidMajorType;
+ if (SUCCEEDED(typeHandler->GetMajorType(&guidMajorType))) {
+ if (guidMajorType == MFMediaType_Audio)
+ *type = Audio;
+ else if (guidMajorType == MFMediaType_Video)
+ *type = Video;
+ }
+
+ ComPtr<IMFMediaType> mediaType;
+ if (SUCCEEDED(typeHandler->GetCurrentMediaType(&mediaType))) {
+ mediaType->GetGUID(MF_MT_SUBTYPE, format);
+ }
+ }
+
+ return *type != Unknown;
+}
+
+void MFPlayerSession::setupPlaybackTopology(IMFMediaSource *source, IMFPresentationDescriptor *sourcePD)
+{
+ HRESULT hr = S_OK;
+ // Get the number of streams in the media source.
+ DWORD cSourceStreams = 0;
+ hr = sourcePD->GetStreamDescriptorCount(&cSourceStreams);
+ if (FAILED(hr)) {
+ changeStatus(QMediaPlayer::InvalidMedia);
+ error(QMediaPlayer::ResourceError, tr("Failed to get stream count."), true);
+ return;
+ }
+
+ ComPtr<IMFTopology> topology;
+ hr = MFCreateTopology(&topology);
+ if (FAILED(hr)) {
+ changeStatus(QMediaPlayer::InvalidMedia);
+ error(QMediaPlayer::ResourceError, tr("Failed to create topology."), true);
+ return;
+ }
+
+ // For each stream, create the topology nodes and add them to the topology.
+ DWORD succeededCount = 0;
+ for (DWORD i = 0; i < cSourceStreams; i++) {
+ BOOL selected = FALSE;
+ bool streamAdded = false;
+ ComPtr<IMFStreamDescriptor> streamDesc;
+
+ HRESULT hr = sourcePD->GetStreamDescriptorByIndex(i, &selected, &streamDesc);
+ if (SUCCEEDED(hr)) {
+ // The media might have multiple audio and video streams,
+ // only use one of each kind, and only if it is selected by default.
+ MediaType mediaType = Unknown;
+ QString streamName;
+ QString streamLanguage;
+ GUID format = GUID_NULL;
+
+ if (getStreamInfo(streamDesc.Get(), &mediaType, &streamName, &streamLanguage,
+ &format)) {
+
+ QPlatformMediaPlayer::TrackType trackType = (mediaType == Audio) ?
+ QPlatformMediaPlayer::AudioStream : QPlatformMediaPlayer::VideoStream;
+
+ QLocale::Language lang = streamLanguage.isEmpty() ?
+ QLocale::Language::AnyLanguage : QLocale(streamLanguage).language();
+
+ QMediaMetaData metaData;
+ metaData.insert(QMediaMetaData::Title, streamName);
+ metaData.insert(QMediaMetaData::Language, lang);
+
+ m_trackInfo[trackType].metaData.append(metaData);
+ m_trackInfo[trackType].nativeIndexes.append(i);
+ m_trackInfo[trackType].format = format;
+
+ if (((m_mediaTypes & mediaType) == 0) && selected) { // Check if this type isn't already added
+ ComPtr<IMFTopologyNode> sourceNode =
+ addSourceNode(topology.Get(), source, sourcePD, streamDesc.Get());
+ if (sourceNode) {
+ ComPtr<IMFTopologyNode> outputNode =
+ addOutputNode(mediaType, topology.Get(), 0);
+ if (outputNode) {
+ sourceNode->GetTopoNodeID(&m_trackInfo[trackType].sourceNodeId);
+ outputNode->GetTopoNodeID(&m_trackInfo[trackType].outputNodeId);
+
+ hr = sourceNode->ConnectOutput(0, outputNode.Get(), 0);
+
+ if (FAILED(hr)) {
+ error(QMediaPlayer::FormatError, tr("Unable to play any stream."), false);
+ } else {
+ m_trackInfo[trackType].currentIndex = m_trackInfo[trackType].nativeIndexes.count() - 1;
+ streamAdded = true;
+ succeededCount++;
+ m_mediaTypes |= mediaType;
+ switch (mediaType) {
+ case Audio:
+ audioAvailable();
+ break;
+ case Video:
+ videoAvailable();
+ break;
+ default:
+ break;
+ }
+ }
+ } else {
+ // remove the source node if the output node cannot be created
+ topology->RemoveNode(sourceNode.Get());
+ }
+ }
+ }
+ }
+
+ if (selected && !streamAdded)
+ sourcePD->DeselectStream(i);
+ }
+ }
+
+ if (succeededCount == 0) {
+ changeStatus(QMediaPlayer::InvalidMedia);
+ error(QMediaPlayer::ResourceError, tr("Unable to play."), true);
+ } else {
+ if (m_trackInfo[QPlatformMediaPlayer::VideoStream].outputNodeId != TOPOID(-1))
+ topology = insertMFT(topology, m_trackInfo[QPlatformMediaPlayer::VideoStream].outputNodeId);
+
+ hr = m_session->SetTopology(MFSESSION_SETTOPOLOGY_IMMEDIATE, topology.Get());
+ if (SUCCEEDED(hr)) {
+ m_updatingTopology = true;
+ } else {
+ changeStatus(QMediaPlayer::InvalidMedia);
+ error(QMediaPlayer::ResourceError, tr("Failed to set topology."), true);
+ }
+ }
+}
+
+ComPtr<IMFTopologyNode> MFPlayerSession::addSourceNode(IMFTopology *topology,
+ IMFMediaSource *source,
+ IMFPresentationDescriptor *presentationDesc,
+ IMFStreamDescriptor *streamDesc)
+{
+ ComPtr<IMFTopologyNode> node;
+ HRESULT hr = MFCreateTopologyNode(MF_TOPOLOGY_SOURCESTREAM_NODE, &node);
+ if (SUCCEEDED(hr)) {
+ hr = node->SetUnknown(MF_TOPONODE_SOURCE, source);
+ if (SUCCEEDED(hr)) {
+ hr = node->SetUnknown(MF_TOPONODE_PRESENTATION_DESCRIPTOR, presentationDesc);
+ if (SUCCEEDED(hr)) {
+ hr = node->SetUnknown(MF_TOPONODE_STREAM_DESCRIPTOR, streamDesc);
+ if (SUCCEEDED(hr)) {
+ hr = topology->AddNode(node.Get());
+ if (SUCCEEDED(hr))
+ return node;
+ }
+ }
+ }
+ }
+ return NULL;
+}
+
+ComPtr<IMFTopologyNode> MFPlayerSession::addOutputNode(MediaType mediaType, IMFTopology *topology,
+ DWORD sinkID)
+{
+ ComPtr<IMFTopologyNode> node;
+ if (FAILED(MFCreateTopologyNode(MF_TOPOLOGY_OUTPUT_NODE, &node)))
+ return NULL;
+
+ ComPtr<IMFActivate> activate;
+ if (mediaType == Audio) {
+ if (m_audioOutput) {
+ auto id = m_audioOutput->device.id();
+ if (id.isEmpty()) {
+ qInfo() << "No audio output";
+ return NULL;
+ }
+
+ HRESULT hr = MFCreateAudioRendererActivate(&activate);
+ if (FAILED(hr)) {
+ qWarning() << "Failed to create audio renderer activate";
+ return NULL;
+ }
+
+ QString s = QString::fromUtf8(id);
+ hr = activate->SetString(MF_AUDIO_RENDERER_ATTRIBUTE_ENDPOINT_ID, (LPCWSTR)s.utf16());
+ if (FAILED(hr)) {
+ qWarning() << "Failed to set attribute for audio device"
+ << m_audioOutput->device.description();
+ return NULL;
+ }
+ }
+ } else if (mediaType == Video) {
+ activate = m_videoRendererControl->createActivate();
+
+ QSize resolution = m_metaData.value(QMediaMetaData::Resolution).toSize();
+
+ if (resolution.isValid())
+ m_videoRendererControl->setCropRect(QRect(QPoint(), resolution));
+
+ } else {
+ // Unknown stream type.
+ error(QMediaPlayer::FormatError, tr("Unknown stream type."), false);
+ }
+
+ if (!activate || FAILED(node->SetObject(activate.Get()))
+ || FAILED(node->SetUINT32(MF_TOPONODE_STREAMID, sinkID))
+ || FAILED(node->SetUINT32(MF_TOPONODE_NOSHUTDOWN_ON_REMOVE, FALSE))
+ || FAILED(topology->AddNode(node.Get()))) {
+ node.Reset();
+ }
+
+ if (activate && mediaType == Audio)
+ activate.Reset();
+
+ return node;
+}
+
+// BindOutputNode
+// Sets the IMFStreamSink pointer on an output node.
+// IMFActivate pointer in the output node must be converted to an
+// IMFStreamSink pointer before the topology loader resolves the topology.
+HRESULT BindOutputNode(IMFTopologyNode *pNode)
+{
+ ComPtr<IUnknown> nodeObject;
+ ComPtr<IMFActivate> activate;
+ ComPtr<IMFStreamSink> stream;
+ ComPtr<IMFMediaSink> sink;
+
+ HRESULT hr = pNode->GetObject(&nodeObject);
+ if (FAILED(hr))
+ return hr;
+
+ hr = nodeObject->QueryInterface(IID_PPV_ARGS(&activate));
+ if (SUCCEEDED(hr)) {
+ DWORD dwStreamID = 0;
+
+ // Try to create the media sink.
+ hr = activate->ActivateObject(IID_PPV_ARGS(&sink));
+ if (SUCCEEDED(hr))
+ dwStreamID = MFGetAttributeUINT32(pNode, MF_TOPONODE_STREAMID, 0);
+
+ if (SUCCEEDED(hr)) {
+ // First check if the media sink already has a stream sink with the requested ID.
+ hr = sink->GetStreamSinkById(dwStreamID, &stream);
+ if (FAILED(hr)) {
+ // Create the stream sink.
+ hr = sink->AddStreamSink(dwStreamID, NULL, &stream);
+ }
+ }
+
+ // Replace the node's object pointer with the stream sink.
+ if (SUCCEEDED(hr)) {
+ hr = pNode->SetObject(stream.Get());
+ }
+ } else {
+ hr = nodeObject->QueryInterface(IID_PPV_ARGS(&stream));
+ }
+
+ return hr;
+}
+
+// BindOutputNodes
+// Sets the IMFStreamSink pointers on all of the output nodes in a topology.
+HRESULT BindOutputNodes(IMFTopology *pTopology)
+{
+ ComPtr<IMFCollection> collection;
+
+ // Get the collection of output nodes.
+ HRESULT hr = pTopology->GetOutputNodeCollection(&collection);
+
+ // Enumerate all of the nodes in the collection.
+ if (SUCCEEDED(hr)) {
+ DWORD cNodes;
+ hr = collection->GetElementCount(&cNodes);
+
+ if (SUCCEEDED(hr)) {
+ for (DWORD i = 0; i < cNodes; i++) {
+ ComPtr<IUnknown> element;
+ hr = collection->GetElement(i, &element);
+ if (FAILED(hr))
+ break;
+
+ ComPtr<IMFTopologyNode> node;
+ hr = element->QueryInterface(IID_IMFTopologyNode, &node);
+ if (FAILED(hr))
+ break;
+
+ // Bind this node.
+ hr = BindOutputNode(node.Get());
+ if (FAILED(hr))
+ break;
+ }
+ }
+ }
+
+ return hr;
+}
+
+// This method binds output nodes to complete the topology,
+// then loads the topology and inserts MFT between the output node
+// and a filter connected to the output node.
+ComPtr<IMFTopology> MFPlayerSession::insertMFT(const ComPtr<IMFTopology> &topology,
+ TOPOID outputNodeId)
+{
+ bool isNewTopology = false;
+
+ ComPtr<IMFTopoLoader> topoLoader;
+ ComPtr<IMFTopology> resolvedTopology;
+ ComPtr<IMFCollection> outputNodes;
+
+ do {
+ if (FAILED(BindOutputNodes(topology.Get())))
+ break;
+
+ if (FAILED(MFCreateTopoLoader(&topoLoader)))
+ break;
+
+ if (FAILED(topoLoader->Load(topology.Get(), &resolvedTopology, NULL))) {
+ // Topology could not be resolved, adding ourselves a color converter
+ // to the topology might solve the problem
+ insertColorConverter(topology.Get(), outputNodeId);
+ if (FAILED(topoLoader->Load(topology.Get(), &resolvedTopology, NULL)))
+ break;
+ }
+
+ if (insertResizer(resolvedTopology.Get()))
+ isNewTopology = true;
+ } while (false);
+
+ if (isNewTopology) {
+ return resolvedTopology;
+ }
+
+ return topology;
+}
+
+// This method checks if the topology contains a color converter transform (CColorConvertDMO),
+// if it does it inserts a resizer transform (CResizerDMO) to handle dynamic frame size change
+// of the video stream.
+// Returns true if it inserted a resizer
+bool MFPlayerSession::insertResizer(IMFTopology *topology)
+{
+ bool inserted = false;
+ WORD elementCount = 0;
+ ComPtr<IMFTopologyNode> node;
+ ComPtr<IUnknown> object;
+ ComPtr<IWMColorConvProps> colorConv;
+ ComPtr<IMFTransform> resizer;
+ ComPtr<IMFTopologyNode> resizerNode;
+ ComPtr<IMFTopologyNode> inputNode;
+
+ HRESULT hr = topology->GetNodeCount(&elementCount);
+ if (FAILED(hr))
+ return false;
+
+ for (WORD i = 0; i < elementCount; ++i) {
+ node.Reset();
+ object.Reset();
+
+ if (FAILED(topology->GetNode(i, &node)))
+ break;
+
+ MF_TOPOLOGY_TYPE nodeType;
+ if (FAILED(node->GetNodeType(&nodeType)))
+ break;
+
+ if (nodeType != MF_TOPOLOGY_TRANSFORM_NODE)
+ continue;
+
+ if (FAILED(node->GetObject(&object)))
+ break;
+
+ if (FAILED(object->QueryInterface(IID_PPV_ARGS(&colorConv))))
+ continue;
+
+ if (FAILED(CoCreateInstance(CLSID_CResizerDMO, NULL, CLSCTX_INPROC_SERVER, IID_IMFTransform,
+ &resizer)))
+ break;
+
+ if (FAILED(MFCreateTopologyNode(MF_TOPOLOGY_TRANSFORM_NODE, &resizerNode)))
+ break;
+
+ if (FAILED(resizerNode->SetObject(resizer.Get())))
+ break;
+
+ if (FAILED(topology->AddNode(resizerNode.Get())))
+ break;
+
+ DWORD outputIndex = 0;
+ if (FAILED(node->GetInput(0, &inputNode, &outputIndex))) {
+ topology->RemoveNode(resizerNode.Get());
+ break;
+ }
+
+ if (FAILED(inputNode->ConnectOutput(0, resizerNode.Get(), 0))) {
+ topology->RemoveNode(resizerNode.Get());
+ break;
+ }
+
+ if (FAILED(resizerNode->ConnectOutput(0, node.Get(), 0))) {
+ inputNode->ConnectOutput(0, node.Get(), 0);
+ topology->RemoveNode(resizerNode.Get());
+ break;
+ }
+
+ inserted = true;
+ break;
+ }
+
+ return inserted;
+}
+
+// This method inserts a color converter (CColorConvertDMO) in the topology,
+// typically to convert to RGB format.
+// Usually this converter is automatically inserted when the topology is resolved but
+// for some reason it fails to do so in some cases, we then do it ourselves.
+void MFPlayerSession::insertColorConverter(IMFTopology *topology, TOPOID outputNodeId)
+{
+ ComPtr<IMFCollection> outputNodes;
+
+ if (FAILED(topology->GetOutputNodeCollection(&outputNodes)))
+ return;
+
+ DWORD elementCount = 0;
+ if (FAILED(outputNodes->GetElementCount(&elementCount)))
+ return;
+
+ for (DWORD n = 0; n < elementCount; n++) {
+ ComPtr<IUnknown> element;
+ ComPtr<IMFTopologyNode> node;
+ ComPtr<IMFTopologyNode> inputNode;
+ ComPtr<IMFTopologyNode> mftNode;
+ ComPtr<IMFTransform> converter;
+
+ do {
+ if (FAILED(outputNodes->GetElement(n, &element)))
+ break;
+
+ if (FAILED(element->QueryInterface(IID_IMFTopologyNode, &node)))
+ break;
+
+ TOPOID id;
+ if (FAILED(node->GetTopoNodeID(&id)))
+ break;
+
+ if (id != outputNodeId)
+ break;
+
+ DWORD outputIndex = 0;
+ if (FAILED(node->GetInput(0, &inputNode, &outputIndex)))
+ break;
+
+ if (FAILED(MFCreateTopologyNode(MF_TOPOLOGY_TRANSFORM_NODE, &mftNode)))
+ break;
+
+ if (FAILED(CoCreateInstance(CLSID_CColorConvertDMO, NULL, CLSCTX_INPROC_SERVER,
+ IID_IMFTransform, &converter)))
+ break;
+
+ if (FAILED(mftNode->SetObject(converter.Get())))
+ break;
+
+ if (FAILED(topology->AddNode(mftNode.Get())))
+ break;
+
+ if (FAILED(inputNode->ConnectOutput(0, mftNode.Get(), 0)))
+ break;
+
+ if (FAILED(mftNode->ConnectOutput(0, node.Get(), 0)))
+ break;
+
+ } while (false);
+ }
+}
+
+void MFPlayerSession::stop(bool immediate)
+{
+#ifdef DEBUG_MEDIAFOUNDATION
+ qDebug() << "stop";
+#endif
+ if (!immediate && m_pendingState != NoPending) {
+ m_request.setCommand(CmdStop);
+ } else {
+ if (m_state.command == CmdStop)
+ return;
+
+ if (m_scrubbing)
+ scrub(false);
+
+ if (SUCCEEDED(m_session->Stop())) {
+
+ m_state.setCommand(CmdStop);
+ m_pendingState = CmdPending;
+ if (m_status != QMediaPlayer::EndOfMedia) {
+ m_position = 0;
+ positionChanged(0);
+ }
+ } else {
+ error(QMediaPlayer::ResourceError, tr("Failed to stop."), true);
+ }
+ }
+}
+
+void MFPlayerSession::start()
+{
+ if (status() == QMediaPlayer::LoadedMedia && m_updateRoutingOnStart) {
+ m_updateRoutingOnStart = false;
+ updateOutputRouting();
+ }
+
+ if (m_status == QMediaPlayer::EndOfMedia) {
+ m_position = 0; // restart from the beginning
+ positionChanged(0);
+ }
+
+#ifdef DEBUG_MEDIAFOUNDATION
+ qDebug() << "start";
+#endif
+
+ if (m_pendingState != NoPending) {
+ m_request.setCommand(CmdStart);
+ } else {
+ if (m_state.command == CmdStart)
+ return;
+
+ if (m_scrubbing) {
+ scrub(false);
+ m_position = position() * 10000;
+ }
+
+ if (m_restorePosition >= 0) {
+ m_position = m_restorePosition;
+ if (!m_updatingTopology)
+ m_restorePosition = -1;
+ }
+
+ PROPVARIANT varStart;
+ InitPropVariantFromInt64(m_position, &varStart);
+
+ if (SUCCEEDED(m_session->Start(&GUID_NULL, &varStart))) {
+ m_state.setCommand(CmdStart);
+ m_pendingState = CmdPending;
+ } else {
+ error(QMediaPlayer::ResourceError, tr("failed to start playback"), true);
+ }
+ PropVariantClear(&varStart);
+ }
+}
+
+void MFPlayerSession::pause()
+{
+#ifdef DEBUG_MEDIAFOUNDATION
+ qDebug() << "pause";
+#endif
+ if (m_pendingState != NoPending) {
+ m_request.setCommand(CmdPause);
+ } else {
+ if (m_state.command == CmdPause)
+ return;
+
+ if (SUCCEEDED(m_session->Pause())) {
+ m_state.setCommand(CmdPause);
+ m_pendingState = CmdPending;
+ } else {
+ error(QMediaPlayer::ResourceError, tr("Failed to pause."), false);
+ }
+ if (m_status == QMediaPlayer::EndOfMedia) {
+ setPosition(0);
+ positionChanged(0);
+ }
+ }
+}
+
+void MFPlayerSession::changeStatus(QMediaPlayer::MediaStatus newStatus)
+{
+ if (m_status == newStatus)
+ return;
+#ifdef DEBUG_MEDIAFOUNDATION
+ qDebug() << "MFPlayerSession::changeStatus" << newStatus;
+#endif
+ m_status = newStatus;
+ statusChanged();
+}
+
+QMediaPlayer::MediaStatus MFPlayerSession::status() const
+{
+ return m_status;
+}
+
+bool MFPlayerSession::createSession()
+{
+ close();
+
+ Q_ASSERT(m_session == NULL);
+
+ HRESULT hr = MFCreateMediaSession(NULL, &m_session);
+ if (FAILED(hr)) {
+ changeStatus(QMediaPlayer::InvalidMedia);
+ error(QMediaPlayer::ResourceError, tr("Unable to create mediasession."), true);
+ return false;
+ }
+
+ m_hCloseEvent = EventHandle{ CreateEvent(NULL, FALSE, FALSE, NULL) };
+
+ hr = m_session->BeginGetEvent(this, m_session.Get());
+ if (FAILED(hr)) {
+ changeStatus(QMediaPlayer::InvalidMedia);
+ error(QMediaPlayer::ResourceError, tr("Unable to pull session events."), false);
+ close();
+ return false;
+ }
+
+ m_sourceResolver = makeComObject<SourceResolver>();
+ QObject::connect(m_sourceResolver.Get(), &SourceResolver::mediaSourceReady, this,
+ &MFPlayerSession::handleMediaSourceReady);
+ QObject::connect(m_sourceResolver.Get(), &SourceResolver::error, this,
+ &MFPlayerSession::handleSourceError);
+
+ m_position = 0;
+ return true;
+}
+
+qint64 MFPlayerSession::position()
+{
+ if (m_request.command == CmdSeek || m_request.command == CmdSeekResume)
+ return m_request.start;
+
+ if (m_pendingState == SeekPending)
+ return m_state.start;
+
+ if (m_state.command == CmdStop)
+ return m_position / 10000;
+
+ if (m_presentationClock) {
+ MFTIME time, sysTime;
+ if (FAILED(m_presentationClock->GetCorrelatedTime(0, &time, &sysTime)))
+ return m_position / 10000;
+ return qint64(time / 10000);
+ }
+ return m_position / 10000;
+}
+
+void MFPlayerSession::setPosition(qint64 position)
+{
+#ifdef DEBUG_MEDIAFOUNDATION
+ qDebug() << "setPosition";
+#endif
+ if (m_pendingState != NoPending) {
+ m_request.setCommand(CmdSeek);
+ m_request.start = position;
+ } else {
+ setPositionInternal(position, CmdNone);
+ }
+}
+
+void MFPlayerSession::setPositionInternal(qint64 position, Command requestCmd)
+{
+ if (m_status == QMediaPlayer::EndOfMedia)
+ changeStatus(QMediaPlayer::LoadedMedia);
+ if (m_state.command == CmdStop && requestCmd != CmdSeekResume) {
+ m_position = position * 10000;
+ // Even though the position is not actually set on the session yet,
+ // report it to have changed anyway for UI controls to be updated
+ positionChanged(this->position());
+ return;
+ }
+
+ if (m_state.command == CmdPause)
+ scrub(true);
+
+#ifdef DEBUG_MEDIAFOUNDATION
+ qDebug() << "setPositionInternal";
+#endif
+
+ PROPVARIANT varStart;
+ varStart.vt = VT_I8;
+ varStart.hVal.QuadPart = LONGLONG(position * 10000);
+ if (SUCCEEDED(m_session->Start(NULL, &varStart))) {
+ PropVariantClear(&varStart);
+ // Store the pending state.
+ m_state.setCommand(CmdStart);
+ m_state.start = position;
+ m_pendingState = SeekPending;
+ } else {
+ error(QMediaPlayer::ResourceError, tr("Failed to seek."), true);
+ }
+}
+
+qreal MFPlayerSession::playbackRate() const
+{
+ if (m_scrubbing)
+ return m_restoreRate;
+ return m_state.rate;
+}
+
+void MFPlayerSession::setPlaybackRate(qreal rate)
+{
+ if (m_scrubbing) {
+ m_restoreRate = rate;
+ playbackRateChanged(rate);
+ return;
+ }
+ setPlaybackRateInternal(rate);
+}
+
+void MFPlayerSession::setPlaybackRateInternal(qreal rate)
+{
+ if (rate == m_request.rate)
+ return;
+
+ m_pendingRate = rate;
+ if (!m_rateSupport)
+ return;
+
+#ifdef DEBUG_MEDIAFOUNDATION
+ qDebug() << "setPlaybackRate";
+#endif
+ BOOL isThin = FALSE;
+
+ //from MSDN http://msdn.microsoft.com/en-us/library/aa965220%28v=vs.85%29.aspx
+ //Thinning applies primarily to video streams.
+ //In thinned mode, the source drops delta frames and deliver only key frames.
+ //At very high playback rates, the source might skip some key frames (for example, deliver every other key frame).
+
+ if (FAILED(m_rateSupport->IsRateSupported(FALSE, rate, NULL))) {
+ isThin = TRUE;
+ if (FAILED(m_rateSupport->IsRateSupported(isThin, rate, NULL))) {
+ qWarning() << "unable to set playbackrate = " << rate;
+ m_pendingRate = m_request.rate = m_state.rate;
+ return;
+ }
+ }
+ if (m_pendingState != NoPending) {
+ m_request.rate = rate;
+ m_request.isThin = isThin;
+ // Remember the current transport state (play, paused, etc), so that we
+ // can restore it after the rate change, if necessary. However, if
+ // anothercommand is already pending, that one takes precedent.
+ if (m_request.command == CmdNone)
+ m_request.setCommand(m_state.command);
+ } else {
+ //No pending operation. Commit the new rate.
+ commitRateChange(rate, isThin);
+ }
+}
+
+void MFPlayerSession::commitRateChange(qreal rate, BOOL isThin)
+{
+#ifdef DEBUG_MEDIAFOUNDATION
+ qDebug() << "commitRateChange";
+#endif
+ Q_ASSERT(m_pendingState == NoPending);
+ MFTIME hnsSystemTime = 0;
+ MFTIME hnsClockTime = 0;
+ Command cmdNow = m_state.command;
+ bool resetPosition = false;
+ // Allowed rate transitions:
+ // Positive <-> negative: Stopped
+ // Negative <-> zero: Stopped
+ // Postive <-> zero: Paused or stopped
+ if ((rate > 0 && m_state.rate <= 0) || (rate < 0 && m_state.rate >= 0)) {
+ if (cmdNow == CmdStart) {
+ // Get the current clock position. This will be the restart time.
+ m_presentationClock->GetCorrelatedTime(0, &hnsClockTime, &hnsSystemTime);
+ Q_ASSERT(hnsSystemTime != 0);
+
+ if (rate < 0 || m_state.rate < 0)
+ m_request.setCommand(CmdSeekResume);
+ else if (isThin || m_state.isThin)
+ m_request.setCommand(CmdStartAndSeek);
+ else
+ m_request.setCommand(CmdStart);
+
+ // We need to stop only when dealing with negative rates
+ if (rate >= 0 && m_state.rate >= 0)
+ pause();
+ else
+ stop();
+
+ // If we deal with negative rates, we stopped the session and consequently
+ // reset the position to zero. We then need to resume to the current position.
+ m_request.start = hnsClockTime / 10000;
+ } else if (cmdNow == CmdPause) {
+ if (rate < 0 || m_state.rate < 0) {
+ // The current state is paused.
+ // For this rate change, the session must be stopped. However, the
+ // session cannot transition back from stopped to paused.
+ // Therefore, this rate transition is not supported while paused.
+ qWarning() << "Unable to change rate from positive to negative or vice versa in paused state";
+ rate = m_state.rate;
+ isThin = m_state.isThin;
+ goto done;
+ }
+
+ // This happens when resuming playback after scrubbing in pause mode.
+ // This transition requires the session to be paused. Even though our
+ // internal state is set to paused, the session might not be so we need
+ // to enforce it
+ if (rate > 0 && m_state.rate == 0) {
+ m_state.setCommand(CmdNone);
+ pause();
+ }
+ }
+ } else if (rate == 0 && m_state.rate > 0) {
+ if (cmdNow != CmdPause) {
+ // Transition to paused.
+ // This transisition requires the paused state.
+ // Pause and set the rate.
+ pause();
+
+ // Request: Switch back to current state.
+ m_request.setCommand(cmdNow);
+ }
+ } else if (rate == 0 && m_state.rate < 0) {
+ // Changing rate from negative to zero requires to stop the session
+ m_presentationClock->GetCorrelatedTime(0, &hnsClockTime, &hnsSystemTime);
+
+ m_request.setCommand(CmdSeekResume);
+
+ stop();
+
+ // Resume to the current position (stop() will reset the position to 0)
+ m_request.start = hnsClockTime / 10000;
+ } else if (!isThin && m_state.isThin) {
+ if (cmdNow == CmdStart) {
+ // When thinning, only key frames are read and presented. Going back
+ // to normal playback requires to reset the current position to force
+ // the pipeline to decode the actual frame at the current position
+ // (which might be earlier than the last decoded key frame)
+ resetPosition = true;
+ } else if (cmdNow == CmdPause) {
+ // If paused, don't reset the position until we resume, otherwise
+ // a new frame will be rendered
+ m_presentationClock->GetCorrelatedTime(0, &hnsClockTime, &hnsSystemTime);
+ m_request.setCommand(CmdSeekResume);
+ m_request.start = hnsClockTime / 10000;
+ }
+
+ }
+
+ // Set the rate.
+ if (FAILED(m_rateControl->SetRate(isThin, rate))) {
+ qWarning() << "failed to set playbackrate = " << rate;
+ rate = m_state.rate;
+ isThin = m_state.isThin;
+ goto done;
+ }
+
+ if (resetPosition) {
+ m_presentationClock->GetCorrelatedTime(0, &hnsClockTime, &hnsSystemTime);
+ setPosition(hnsClockTime / 10000);
+ }
+
+done:
+ // Adjust our current rate and requested rate.
+ m_pendingRate = m_request.rate = m_state.rate = rate;
+ if (rate != 0)
+ m_state.isThin = isThin;
+ playbackRateChanged(rate);
+}
+
+void MFPlayerSession::scrub(bool enableScrub)
+{
+ if (m_scrubbing == enableScrub)
+ return;
+
+ m_scrubbing = enableScrub;
+
+ if (!canScrub()) {
+ if (!enableScrub)
+ m_pendingRate = m_restoreRate;
+ return;
+ }
+
+ if (enableScrub) {
+ // Enter scrubbing mode. Cache the rate.
+ m_restoreRate = m_request.rate;
+ setPlaybackRateInternal(0.0f);
+ } else {
+ // Leaving scrubbing mode. Restore the old rate.
+ setPlaybackRateInternal(m_restoreRate);
+ }
+}
+
+void MFPlayerSession::setVolume(float volume)
+{
+ if (m_volume == volume)
+ return;
+ m_volume = volume;
+
+ if (!m_muted)
+ setVolumeInternal(volume);
+}
+
+void MFPlayerSession::setMuted(bool muted)
+{
+ if (m_muted == muted)
+ return;
+ m_muted = muted;
+
+ setVolumeInternal(muted ? 0 : m_volume);
+}
+
+void MFPlayerSession::setVolumeInternal(float volume)
+{
+ if (m_volumeControl) {
+ quint32 channelCount = 0;
+ if (!SUCCEEDED(m_volumeControl->GetChannelCount(&channelCount))
+ || channelCount == 0)
+ return;
+
+ for (quint32 i = 0; i < channelCount; ++i)
+ m_volumeControl->SetChannelVolume(i, volume);
+ }
+}
+
+float MFPlayerSession::bufferProgress()
+{
+ if (!m_netsourceStatistics)
+ return 0;
+ PROPVARIANT var;
+ PropVariantInit(&var);
+ PROPERTYKEY key;
+ key.fmtid = MFNETSOURCE_STATISTICS;
+ key.pid = MFNETSOURCE_BUFFERPROGRESS_ID;
+ int progress = -1;
+ // GetValue returns S_FALSE if the property is not available, which has
+ // a value > 0. We therefore can't use the SUCCEEDED macro here.
+ if (m_netsourceStatistics->GetValue(key, &var) == S_OK) {
+ progress = var.lVal;
+ PropVariantClear(&var);
+ }
+
+#ifdef DEBUG_MEDIAFOUNDATION
+ qDebug() << "bufferProgress: progress = " << progress;
+#endif
+
+ return progress/100.;
+}
+
+QMediaTimeRange MFPlayerSession::availablePlaybackRanges()
+{
+ // defaults to the whole media
+ qint64 start = 0;
+ qint64 end = qint64(m_duration / 10000);
+
+ if (m_netsourceStatistics) {
+ PROPVARIANT var;
+ PropVariantInit(&var);
+ PROPERTYKEY key;
+ key.fmtid = MFNETSOURCE_STATISTICS;
+ key.pid = MFNETSOURCE_SEEKRANGESTART_ID;
+ // GetValue returns S_FALSE if the property is not available, which has
+ // a value > 0. We therefore can't use the SUCCEEDED macro here.
+ if (m_netsourceStatistics->GetValue(key, &var) == S_OK) {
+ start = qint64(var.uhVal.QuadPart / 10000);
+ PropVariantClear(&var);
+ PropVariantInit(&var);
+ key.pid = MFNETSOURCE_SEEKRANGEEND_ID;
+ if (m_netsourceStatistics->GetValue(key, &var) == S_OK) {
+ end = qint64(var.uhVal.QuadPart / 10000);
+ PropVariantClear(&var);
+ }
+ }
+ }
+
+ return QMediaTimeRange(start, end);
+}
+
+HRESULT MFPlayerSession::QueryInterface(REFIID riid, void** ppvObject)
+{
+ if (!ppvObject)
+ return E_POINTER;
+ if (riid == IID_IMFAsyncCallback) {
+ *ppvObject = static_cast<IMFAsyncCallback*>(this);
+ } else if (riid == IID_IUnknown) {
+ *ppvObject = static_cast<IUnknown*>(this);
+ } else {
+ *ppvObject = NULL;
+ return E_NOINTERFACE;
+ }
+ return S_OK;
+}
+
+ULONG MFPlayerSession::AddRef(void)
+{
+ return InterlockedIncrement(&m_cRef);
+}
+
+ULONG MFPlayerSession::Release(void)
+{
+ LONG cRef = InterlockedDecrement(&m_cRef);
+ if (cRef == 0) {
+ deleteLater();
+
+ // In rare cases the session has queued events to be run between deleteLater and deleting,
+ // so we set the parent control to nullptr in order to prevent crashes in the cases.
+ m_playerControl = nullptr;
+ }
+ return cRef;
+}
+
+HRESULT MFPlayerSession::Invoke(IMFAsyncResult *pResult)
+{
+ if (pResult->GetStateNoAddRef() != m_session.Get())
+ return S_OK;
+
+ ComPtr<IMFMediaEvent> pEvent;
+ // Get the event from the event queue.
+ HRESULT hr = m_session->EndGetEvent(pResult, &pEvent);
+ if (FAILED(hr)) {
+ return S_OK;
+ }
+
+ MediaEventType meType = MEUnknown;
+ hr = pEvent->GetType(&meType);
+ if (FAILED(hr)) {
+ return S_OK;
+ }
+
+ if (meType == MESessionClosed) {
+ SetEvent(m_hCloseEvent.get());
+ return S_OK;
+ } else {
+ hr = m_session->BeginGetEvent(this, m_session.Get());
+ if (FAILED(hr)) {
+ return S_OK;
+ }
+ }
+
+ if (!m_closing) {
+ emit sessionEvent(pEvent);
+ }
+ return S_OK;
+}
+
+void MFPlayerSession::handleSessionEvent(const ComPtr<IMFMediaEvent> &sessionEvent)
+{
+ HRESULT hrStatus = S_OK;
+ HRESULT hr = sessionEvent->GetStatus(&hrStatus);
+ if (FAILED(hr) || !m_session) {
+ return;
+ }
+
+ MediaEventType meType = MEUnknown;
+ hr = sessionEvent->GetType(&meType);
+#ifdef DEBUG_MEDIAFOUNDATION
+ if (FAILED(hrStatus))
+ qDebug() << "handleSessionEvent: MediaEventType = " << meType << "Failed";
+ else
+ qDebug() << "handleSessionEvent: MediaEventType = " << meType;
+#endif
+
+ switch (meType) {
+ case MENonFatalError: {
+ PROPVARIANT var;
+ PropVariantInit(&var);
+ sessionEvent->GetValue(&var);
+ qWarning() << "handleSessionEvent: non fatal error = " << var.ulVal;
+ PropVariantClear(&var);
+ error(QMediaPlayer::ResourceError, tr("Media session non-fatal error."), false);
+ }
+ break;
+ case MESourceUnknown:
+ changeStatus(QMediaPlayer::InvalidMedia);
+ break;
+ case MEError:
+ if (hrStatus == MF_E_ALREADY_INITIALIZED) {
+ // Workaround for a possible WMF issue that causes an error
+ // with some specific videos, which play fine otherwise.
+#ifdef DEBUG_MEDIAFOUNDATION
+ qDebug() << "handleSessionEvent: ignoring MF_E_ALREADY_INITIALIZED";
+#endif
+ break;
+ }
+ changeStatus(QMediaPlayer::InvalidMedia);
+ qWarning() << "handleSessionEvent: serious error = "
+ << Qt::showbase << Qt::hex << Qt::uppercasedigits << static_cast<quint32>(hrStatus);
+ switch (hrStatus) {
+ case MF_E_NET_READ:
+ error(QMediaPlayer::NetworkError, tr("Error reading from the network."), true);
+ break;
+ case MF_E_NET_WRITE:
+ error(QMediaPlayer::NetworkError, tr("Error writing to the network."), true);
+ break;
+ case NS_E_FIREWALL:
+ error(QMediaPlayer::NetworkError, tr("Network packets might be blocked by a firewall."), true);
+ break;
+ case MF_E_MEDIAPROC_WRONGSTATE:
+ error(QMediaPlayer::ResourceError, tr("Media session state error."), true);
+ break;
+ case MF_E_INVALID_STREAM_DATA:
+ error(QMediaPlayer::ResourceError, tr("Invalid stream data."), true);
+ break;
+ default:
+ error(QMediaPlayer::ResourceError, tr("Media session serious error."), true);
+ break;
+ }
+ break;
+ case MESessionRateChanged:
+ // If the rate change succeeded, we've already got the rate
+ // cached. If it failed, try to get the actual rate.
+ if (FAILED(hrStatus)) {
+ PROPVARIANT var;
+ PropVariantInit(&var);
+ if (SUCCEEDED(sessionEvent->GetValue(&var)) && (var.vt == VT_R4)) {
+ m_state.rate = var.fltVal;
+ }
+ playbackRateChanged(playbackRate());
+ }
+ break;
+ case MESessionScrubSampleComplete :
+ if (m_scrubbing)
+ updatePendingCommands(CmdStart);
+ break;
+ case MESessionStarted:
+ if (m_status == QMediaPlayer::EndOfMedia
+ || m_status == QMediaPlayer::LoadedMedia) {
+ // If the session started, then enough data is buffered to play
+ changeStatus(QMediaPlayer::BufferedMedia);
+ }
+
+ updatePendingCommands(CmdStart);
+ // playback started, we can now set again the procAmpValues if they have been
+ // changed previously (these are lost when loading a new media)
+// if (m_playerService->videoWindowControl()) {
+// m_playerService->videoWindowControl()->applyImageControls();
+// }
+ m_signalPositionChangeTimer.start();
+ break;
+ case MESessionStopped:
+ if (m_status != QMediaPlayer::EndOfMedia) {
+ m_position = 0;
+
+ // Reset to Loaded status unless we are loading a new media
+ // or changing the playback rate to negative values (stop required)
+ if (m_status != QMediaPlayer::LoadingMedia && m_request.command != CmdSeekResume)
+ changeStatus(QMediaPlayer::LoadedMedia);
+ }
+ updatePendingCommands(CmdStop);
+ m_signalPositionChangeTimer.stop();
+ break;
+ case MESessionPaused:
+ m_position = position() * 10000;
+ updatePendingCommands(CmdPause);
+ m_signalPositionChangeTimer.stop();
+ if (m_status == QMediaPlayer::LoadedMedia)
+ setPosition(position());
+ break;
+ case MEReconnectStart:
+#ifdef DEBUG_MEDIAFOUNDATION
+ qDebug() << "MEReconnectStart" << ((hrStatus == S_OK) ? "OK" : "Failed");
+#endif
+ break;
+ case MEReconnectEnd:
+#ifdef DEBUG_MEDIAFOUNDATION
+ qDebug() << "MEReconnectEnd" << ((hrStatus == S_OK) ? "OK" : "Failed");
+#endif
+ break;
+ case MESessionTopologySet:
+ if (FAILED(hrStatus)) {
+ changeStatus(QMediaPlayer::InvalidMedia);
+ error(QMediaPlayer::FormatError, tr("Unsupported media, a codec is missing."), true);
+ } else {
+ // Topology is resolved and successfuly set, this happens only after loading a new media.
+ // Make sure we always start the media from the beginning
+ m_lastPosition = -1;
+ m_position = 0;
+ positionChanged(0);
+ changeStatus(QMediaPlayer::LoadedMedia);
+ }
+ break;
+ }
+
+ if (FAILED(hrStatus)) {
+ return;
+ }
+
+ switch (meType) {
+ case MEBufferingStarted:
+ changeStatus(QMediaPlayer::StalledMedia);
+ bufferProgressChanged(bufferProgress());
+ break;
+ case MEBufferingStopped:
+ changeStatus(QMediaPlayer::BufferedMedia);
+ bufferProgressChanged(bufferProgress());
+ break;
+ case MESessionEnded:
+ m_pendingState = NoPending;
+ m_state.command = CmdStop;
+ m_state.prevCmd = CmdNone;
+ m_request.command = CmdNone;
+ m_request.prevCmd = CmdNone;
+
+ //keep reporting the final position after end of media
+ m_position = qint64(m_duration);
+ positionChanged(position());
+
+ changeStatus(QMediaPlayer::EndOfMedia);
+ break;
+ case MEEndOfPresentationSegment:
+ break;
+ case MESessionTopologyStatus: {
+ UINT32 status;
+ if (SUCCEEDED(sessionEvent->GetUINT32(MF_EVENT_TOPOLOGY_STATUS, &status))) {
+ if (status == MF_TOPOSTATUS_READY) {
+ ComPtr<IMFClock> clock;
+ if (SUCCEEDED(m_session->GetClock(&clock))) {
+ clock->QueryInterface(IID_IMFPresentationClock, &m_presentationClock);
+ }
+
+ if (SUCCEEDED(MFGetService(m_session.Get(), MF_RATE_CONTROL_SERVICE,
+ IID_PPV_ARGS(&m_rateControl)))) {
+ if (SUCCEEDED(MFGetService(m_session.Get(), MF_RATE_CONTROL_SERVICE,
+ IID_PPV_ARGS(&m_rateSupport)))) {
+ if (SUCCEEDED(m_rateSupport->IsRateSupported(TRUE, 0, NULL)))
+ m_canScrub = true;
+ }
+ BOOL isThin = FALSE;
+ float rate = 1;
+ if (SUCCEEDED(m_rateControl->GetRate(&isThin, &rate))) {
+ if (m_pendingRate != rate) {
+ m_state.rate = m_request.rate = rate;
+ setPlaybackRate(m_pendingRate);
+ }
+ }
+ }
+ MFGetService(m_session.Get(), MFNETSOURCE_STATISTICS_SERVICE,
+ IID_PPV_ARGS(&m_netsourceStatistics));
+
+ if (SUCCEEDED(MFGetService(m_session.Get(), MR_STREAM_VOLUME_SERVICE,
+ IID_PPV_ARGS(&m_volumeControl))))
+ setVolumeInternal(m_muted ? 0 : m_volume);
+
+ m_updatingTopology = false;
+ stop();
+ }
+ }
+ }
+ break;
+ default:
+ break;
+ }
+}
+
+void MFPlayerSession::updatePendingCommands(Command command)
+{
+ positionChanged(position());
+ if (m_state.command != command || m_pendingState == NoPending)
+ return;
+
+ // Seek while paused completed
+ if (m_pendingState == SeekPending && m_state.prevCmd == CmdPause) {
+ m_pendingState = NoPending;
+ // A seek operation actually restarts playback. If scrubbing is possible, playback rate
+ // is set to 0.0 at this point and we just need to reset the current state to Pause.
+ // If scrubbing is not possible, the playback rate was not changed and we explicitly need
+ // to re-pause playback.
+ if (!canScrub())
+ pause();
+ else
+ m_state.setCommand(CmdPause);
+ }
+
+ m_pendingState = NoPending;
+
+ //First look for rate changes.
+ if (m_request.rate != m_state.rate) {
+ commitRateChange(m_request.rate, m_request.isThin);
+ }
+
+ // Now look for new requests.
+ if (m_pendingState == NoPending) {
+ switch (m_request.command) {
+ case CmdStart:
+ start();
+ break;
+ case CmdPause:
+ pause();
+ break;
+ case CmdStop:
+ stop();
+ break;
+ case CmdSeek:
+ case CmdSeekResume:
+ setPositionInternal(m_request.start, m_request.command);
+ break;
+ case CmdStartAndSeek:
+ start();
+ setPositionInternal(m_request.start, m_request.command);
+ break;
+ default:
+ break;
+ }
+ m_request.setCommand(CmdNone);
+ }
+
+}
+
+bool MFPlayerSession::canScrub() const
+{
+ return m_canScrub && m_rateSupport && m_rateControl;
+}
+
+void MFPlayerSession::clear()
+{
+#ifdef DEBUG_MEDIAFOUNDATION
+ qDebug() << "MFPlayerSession::clear";
+#endif
+ m_mediaTypes = 0;
+ m_canScrub = false;
+
+ m_pendingState = NoPending;
+ m_state.command = CmdStop;
+ m_state.prevCmd = CmdNone;
+ m_request.command = CmdNone;
+ m_request.prevCmd = CmdNone;
+
+ for (int i = 0; i < QPlatformMediaPlayer::NTrackTypes; ++i) {
+ m_trackInfo[i].metaData.clear();
+ m_trackInfo[i].nativeIndexes.clear();
+ m_trackInfo[i].currentIndex = -1;
+ m_trackInfo[i].sourceNodeId = TOPOID(-1);
+ m_trackInfo[i].outputNodeId = TOPOID(-1);
+ m_trackInfo[i].format = GUID_NULL;
+ }
+
+ if (!m_metaData.isEmpty()) {
+ m_metaData.clear();
+ metaDataChanged();
+ }
+
+ m_presentationClock.Reset();
+ m_rateControl.Reset();
+ m_rateSupport.Reset();
+ m_volumeControl.Reset();
+ m_netsourceStatistics.Reset();
+}
+
+void MFPlayerSession::setAudioOutput(QPlatformAudioOutput *device)
+{
+ if (m_audioOutput == device)
+ return;
+
+ if (m_audioOutput)
+ m_audioOutput->q->disconnect(this);
+
+ m_audioOutput = device;
+ if (m_audioOutput) {
+ setMuted(m_audioOutput->q->isMuted());
+ setVolume(m_audioOutput->q->volume());
+ updateOutputRouting();
+ connect(m_audioOutput->q, &QAudioOutput::deviceChanged, this, &MFPlayerSession::updateOutputRouting);
+ connect(m_audioOutput->q, &QAudioOutput::volumeChanged, this, &MFPlayerSession::setVolume);
+ connect(m_audioOutput->q, &QAudioOutput::mutedChanged, this, &MFPlayerSession::setMuted);
+ }
+}
+
+void MFPlayerSession::updateOutputRouting()
+{
+ int currentAudioTrack = m_trackInfo[QPlatformMediaPlayer::AudioStream].currentIndex;
+ if (currentAudioTrack > -1)
+ setActiveTrack(QPlatformMediaPlayer::AudioStream, currentAudioTrack);
+}
+
+void MFPlayerSession::setVideoSink(QVideoSink *sink)
+{
+ m_videoRendererControl->setSink(sink);
+}
+
+void MFPlayerSession::setActiveTrack(QPlatformMediaPlayer::TrackType type, int index)
+{
+ if (!m_session)
+ return;
+
+ // Only audio track selection is currently supported.
+ if (type != QPlatformMediaPlayer::AudioStream)
+ return;
+
+ const auto &nativeIndexes = m_trackInfo[type].nativeIndexes;
+
+ if (index < -1 || index >= nativeIndexes.count())
+ return;
+
+ // Updating the topology fails if there is a HEVC video stream,
+ // which causes other issues. Ignoring the change, for now.
+ if (m_trackInfo[QPlatformMediaPlayer::VideoStream].format == MFVideoFormat_HEVC)
+ return;
+
+ ComPtr<IMFTopology> topology;
+
+ if (SUCCEEDED(m_session->GetFullTopology(QMM_MFSESSION_GETFULLTOPOLOGY_CURRENT, 0, &topology))) {
+
+ m_restorePosition = position() * 10000;
+
+ if (m_state.command == CmdStart)
+ stop();
+
+ if (m_trackInfo[type].outputNodeId != TOPOID(-1)) {
+ ComPtr<IMFTopologyNode> node;
+ if (SUCCEEDED(topology->GetNodeByID(m_trackInfo[type].outputNodeId, &node))) {
+ topology->RemoveNode(node.Get());
+ m_trackInfo[type].outputNodeId = TOPOID(-1);
+ }
+ }
+ if (m_trackInfo[type].sourceNodeId != TOPOID(-1)) {
+ ComPtr<IMFTopologyNode> node;
+ if (SUCCEEDED(topology->GetNodeByID(m_trackInfo[type].sourceNodeId, &node))) {
+ topology->RemoveNode(node.Get());
+ m_trackInfo[type].sourceNodeId = TOPOID(-1);
+ }
+ }
+
+ IMFMediaSource *mediaSource = m_sourceResolver->mediaSource();
+
+ ComPtr<IMFPresentationDescriptor> sourcePD;
+ if (SUCCEEDED(mediaSource->CreatePresentationDescriptor(&sourcePD))) {
+
+ if (m_trackInfo[type].currentIndex >= 0 && m_trackInfo[type].currentIndex < nativeIndexes.count())
+ sourcePD->DeselectStream(nativeIndexes.at(m_trackInfo[type].currentIndex));
+
+ m_trackInfo[type].currentIndex = index;
+
+ if (index == -1) {
+ m_session->SetTopology(MFSESSION_SETTOPOLOGY_IMMEDIATE, topology.Get());
+ } else {
+ int nativeIndex = nativeIndexes.at(index);
+ sourcePD->SelectStream(nativeIndex);
+
+ ComPtr<IMFStreamDescriptor> streamDesc;
+ BOOL selected = FALSE;
+
+ if (SUCCEEDED(sourcePD->GetStreamDescriptorByIndex(nativeIndex, &selected, &streamDesc))) {
+ ComPtr<IMFTopologyNode> sourceNode = addSourceNode(
+ topology.Get(), mediaSource, sourcePD.Get(), streamDesc.Get());
+ if (sourceNode) {
+ ComPtr<IMFTopologyNode> outputNode =
+ addOutputNode(MFPlayerSession::Audio, topology.Get(), 0);
+ if (outputNode) {
+ if (SUCCEEDED(sourceNode->ConnectOutput(0, outputNode.Get(), 0))) {
+ sourceNode->GetTopoNodeID(&m_trackInfo[type].sourceNodeId);
+ outputNode->GetTopoNodeID(&m_trackInfo[type].outputNodeId);
+ m_session->SetTopology(MFSESSION_SETTOPOLOGY_IMMEDIATE,
+ topology.Get());
+ }
+ }
+ }
+ }
+ }
+ m_updatingTopology = true;
+ }
+ }
+}
+
+int MFPlayerSession::activeTrack(QPlatformMediaPlayer::TrackType type)
+{
+ if (type >= QPlatformMediaPlayer::NTrackTypes)
+ return -1;
+ return m_trackInfo[type].currentIndex;
+}
+
+int MFPlayerSession::trackCount(QPlatformMediaPlayer::TrackType type)
+{
+ if (type >= QPlatformMediaPlayer::NTrackTypes)
+ return -1;
+ return m_trackInfo[type].metaData.count();
+}
+
+QMediaMetaData MFPlayerSession::trackMetaData(QPlatformMediaPlayer::TrackType type, int trackNumber)
+{
+ if (type >= QPlatformMediaPlayer::NTrackTypes)
+ return {};
+
+ if (trackNumber < 0 || trackNumber >= m_trackInfo[type].metaData.count())
+ return {};
+
+ return m_trackInfo[type].metaData.at(trackNumber);
+}
+
+QT_END_NAMESPACE
+
+#include "moc_mfplayersession_p.cpp"
diff --git a/src/plugins/multimedia/windows/player/mfplayersession_p.h b/src/plugins/multimedia/windows/player/mfplayersession_p.h
new file mode 100644
index 000000000..50141a7fb
--- /dev/null
+++ b/src/plugins/multimedia/windows/player/mfplayersession_p.h
@@ -0,0 +1,240 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef MFPLAYERSESSION_H
+#define MFPLAYERSESSION_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <mfapi.h>
+#include <mfidl.h>
+
+#include "qmediaplayer.h"
+#include "qmediatimerange.h"
+
+#include <QtCore/qcoreevent.h>
+#include <QtCore/qmutex.h>
+#include <QtCore/qurl.h>
+#include <QtCore/qwaitcondition.h>
+#include <QtMultimedia/qaudioformat.h>
+#include <QtMultimedia/qvideoframeformat.h>
+#include <qaudiodevice.h>
+#include <qtimer.h>
+#include "mfplayercontrol_p.h"
+#include <private/qcomptr_p.h>
+#include <evrhelpers_p.h>
+
+QT_BEGIN_NAMESPACE
+
+class QUrl;
+
+class SourceResolver;
+class MFVideoRendererControl;
+class MFPlayerControl;
+class MFPlayerService;
+class AudioSampleGrabberCallback;
+class MFTransform;
+
+class MFPlayerSession : public QObject, public IMFAsyncCallback
+{
+ Q_OBJECT
+ friend class SourceResolver;
+public:
+ MFPlayerSession(MFPlayerControl *playerControl = 0);
+
+ STDMETHODIMP QueryInterface(REFIID riid, LPVOID *ppvObject) override;
+
+ STDMETHODIMP_(ULONG) AddRef(void) override;
+
+ STDMETHODIMP_(ULONG) Release(void) override;
+
+ STDMETHODIMP Invoke(IMFAsyncResult *pResult) override;
+
+ STDMETHODIMP GetParameters(DWORD *pdwFlags, DWORD *pdwQueue) override
+ {
+ Q_UNUSED(pdwFlags);
+ Q_UNUSED(pdwQueue);
+ return E_NOTIMPL;
+ }
+
+ void load(const QUrl &media, QIODevice *stream);
+ void stop(bool immediate = false);
+ void start();
+ void pause();
+
+ QMediaPlayer::MediaStatus status() const;
+ qint64 position();
+ void setPosition(qint64 position);
+ qreal playbackRate() const;
+ void setPlaybackRate(qreal rate);
+ float bufferProgress();
+ QMediaTimeRange availablePlaybackRanges();
+
+ void changeStatus(QMediaPlayer::MediaStatus newStatus);
+
+ void close();
+
+ void setAudioOutput(QPlatformAudioOutput *device);
+
+ QMediaMetaData metaData() const { return m_metaData; }
+
+ void setVideoSink(QVideoSink *sink);
+
+ void setActiveTrack(QPlatformMediaPlayer::TrackType type, int index);
+ int activeTrack(QPlatformMediaPlayer::TrackType type);
+ int trackCount(QPlatformMediaPlayer::TrackType);
+ QMediaMetaData trackMetaData(QPlatformMediaPlayer::TrackType type, int trackNumber);
+
+ void setPlayerControl(MFPlayerControl *playerControl) { m_playerControl = playerControl; }
+
+ void statusChanged() { if (m_playerControl) m_playerControl->handleStatusChanged(); }
+ void tracksChanged() { if (m_playerControl) m_playerControl->handleTracksChanged(); }
+ void audioAvailable() { if (m_playerControl) m_playerControl->handleAudioAvailable(); }
+ void videoAvailable() { if (m_playerControl) m_playerControl->handleVideoAvailable(); }
+ void durationUpdate(qint64 duration) { if (m_playerControl) m_playerControl->handleDurationUpdate(duration); }
+ void seekableUpdate(bool seekable) { if (m_playerControl) m_playerControl->handleSeekableUpdate(seekable); }
+ void error(QMediaPlayer::Error error, QString errorString, bool isFatal) { if (m_playerControl) m_playerControl->handleError(error, errorString, isFatal); }
+ void playbackRateChanged(qreal rate) { if (m_playerControl) m_playerControl->playbackRateChanged(rate); }
+ void bufferProgressChanged(float percentFilled) { if (m_playerControl) m_playerControl->bufferProgressChanged(percentFilled); }
+ void metaDataChanged() { if (m_playerControl) m_playerControl->metaDataChanged(); }
+ void positionChanged(qint64 position) { if (m_playerControl) m_playerControl->positionChanged(position); }
+
+public Q_SLOTS:
+ void setVolume(float volume);
+ void setMuted(bool muted);
+ void updateOutputRouting();
+
+Q_SIGNALS:
+ void sessionEvent(const ComPtr<IMFMediaEvent> &sessionEvent);
+
+private Q_SLOTS:
+ void handleMediaSourceReady();
+ void handleSessionEvent(const ComPtr<IMFMediaEvent> &sessionEvent);
+ void handleSourceError(long hr);
+ void timeout();
+
+private:
+ long m_cRef;
+ MFPlayerControl *m_playerControl = nullptr;
+ MFVideoRendererControl *m_videoRendererControl = nullptr;
+ ComPtr<IMFMediaSession> m_session;
+ ComPtr<IMFPresentationClock> m_presentationClock;
+ ComPtr<IMFRateControl> m_rateControl;
+ ComPtr<IMFRateSupport> m_rateSupport;
+ ComPtr<IMFAudioStreamVolume> m_volumeControl;
+ ComPtr<IPropertyStore> m_netsourceStatistics;
+ qint64 m_position = 0;
+ qint64 m_restorePosition = -1;
+ qint64 m_timeCounter = 0;
+ UINT64 m_duration = 0;
+ bool m_updatingTopology = false;
+ bool m_updateRoutingOnStart = false;
+
+ enum Command
+ {
+ CmdNone = 0,
+ CmdStop,
+ CmdStart,
+ CmdPause,
+ CmdSeek,
+ CmdSeekResume,
+ CmdStartAndSeek
+ };
+
+ void clear();
+ void setPositionInternal(qint64 position, Command requestCmd);
+ void setPlaybackRateInternal(qreal rate);
+ void commitRateChange(qreal rate, BOOL isThin);
+ bool canScrub() const;
+ void scrub(bool enableScrub);
+ bool m_scrubbing;
+ float m_restoreRate;
+
+ ComPtr<SourceResolver> m_sourceResolver;
+ EventHandle m_hCloseEvent;
+ bool m_closing;
+
+ enum MediaType
+ {
+ Unknown = 0,
+ Audio = 1,
+ Video = 2,
+ };
+ DWORD m_mediaTypes;
+
+ enum PendingState
+ {
+ NoPending = 0,
+ CmdPending,
+ SeekPending,
+ };
+
+ struct SeekState
+ {
+ void setCommand(Command cmd) {
+ prevCmd = command;
+ command = cmd;
+ }
+ Command command;
+ Command prevCmd;
+ float rate; // Playback rate
+ BOOL isThin; // Thinned playback?
+ qint64 start; // Start position
+ };
+ SeekState m_state; // Current nominal state.
+ SeekState m_request; // Pending request.
+ PendingState m_pendingState;
+ float m_pendingRate;
+ void updatePendingCommands(Command command);
+
+ struct TrackInfo
+ {
+ QList<QMediaMetaData> metaData;
+ QList<int> nativeIndexes;
+ int currentIndex = -1;
+ TOPOID sourceNodeId = -1;
+ TOPOID outputNodeId = -1;
+ GUID format = GUID_NULL;
+ };
+ TrackInfo m_trackInfo[QPlatformMediaPlayer::NTrackTypes];
+
+ QMediaPlayer::MediaStatus m_status;
+ bool m_canScrub;
+ float m_volume = 1.;
+ bool m_muted = false;
+
+ QPlatformAudioOutput *m_audioOutput = nullptr;
+ QMediaMetaData m_metaData;
+
+ void setVolumeInternal(float volume);
+
+ bool createSession();
+ void setupPlaybackTopology(IMFMediaSource *source, IMFPresentationDescriptor *sourcePD);
+ bool getStreamInfo(IMFStreamDescriptor *stream, MFPlayerSession::MediaType *type, QString *name, QString *language, GUID *format) const;
+ ComPtr<IMFTopologyNode> addSourceNode(IMFTopology *topology, IMFMediaSource *source,
+ IMFPresentationDescriptor *presentationDesc,
+ IMFStreamDescriptor *streamDesc);
+ ComPtr<IMFTopologyNode> addOutputNode(MediaType mediaType, IMFTopology *topology, DWORD sinkID);
+
+ QAudioFormat audioFormatForMFMediaType(IMFMediaType *mediaType) const;
+
+ ComPtr<IMFTopology> insertMFT(const ComPtr<IMFTopology> &topology, TOPOID outputNodeId);
+ bool insertResizer(IMFTopology *topology);
+ void insertColorConverter(IMFTopology *topology, TOPOID outputNodeId);
+
+ QTimer m_signalPositionChangeTimer;
+ qint64 m_lastPosition = -1;
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/windows/player/mfvideorenderercontrol.cpp b/src/plugins/multimedia/windows/player/mfvideorenderercontrol.cpp
new file mode 100644
index 000000000..7c79c3a8a
--- /dev/null
+++ b/src/plugins/multimedia/windows/player/mfvideorenderercontrol.cpp
@@ -0,0 +1,152 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "mfvideorenderercontrol_p.h"
+#include "mfactivate_p.h"
+
+#include "evrcustompresenter_p.h"
+
+#include <private/qplatformvideosink_p.h>
+
+QT_BEGIN_NAMESPACE
+
+class EVRCustomPresenterActivate : public MFAbstractActivate
+{
+public:
+ EVRCustomPresenterActivate(QVideoSink *sink);
+
+ STDMETHODIMP ActivateObject(REFIID riid, void **ppv) override;
+ STDMETHODIMP ShutdownObject() override;
+ STDMETHODIMP DetachObject() override;
+
+ void setSink(QVideoSink *sink);
+ void setCropRect(QRect cropRect);
+
+private:
+ // Destructor is not public. Caller should call Release.
+ ~EVRCustomPresenterActivate() override { }
+
+ EVRCustomPresenter *m_presenter;
+ QVideoSink *m_videoSink;
+ QRect m_cropRect;
+ QMutex m_mutex;
+};
+
+
+MFVideoRendererControl::MFVideoRendererControl(QObject *parent)
+ : QObject(parent)
+{
+}
+
+MFVideoRendererControl::~MFVideoRendererControl()
+{
+ releaseActivate();
+}
+
+void MFVideoRendererControl::releaseActivate()
+{
+ if (m_sink)
+ m_sink->platformVideoSink()->setVideoFrame(QVideoFrame());
+
+ if (m_presenterActivate) {
+ m_presenterActivate->ShutdownObject();
+ m_presenterActivate->Release();
+ m_presenterActivate = NULL;
+ }
+
+ if (m_currentActivate) {
+ m_currentActivate->ShutdownObject();
+ m_currentActivate->Release();
+ }
+ m_currentActivate = NULL;
+}
+
+void MFVideoRendererControl::setSink(QVideoSink *sink)
+{
+ m_sink = sink;
+
+ if (m_presenterActivate)
+ m_presenterActivate->setSink(m_sink);
+}
+
+void MFVideoRendererControl::setCropRect(const QRect &cropRect)
+{
+ if (m_presenterActivate)
+ m_presenterActivate->setCropRect(cropRect);
+}
+
+IMFActivate* MFVideoRendererControl::createActivate()
+{
+ releaseActivate();
+
+ if (m_sink) {
+ // Create the EVR media sink, but replace the presenter with our own
+ if (SUCCEEDED(MFCreateVideoRendererActivate(::GetShellWindow(), &m_currentActivate))) {
+ m_presenterActivate = new EVRCustomPresenterActivate(m_sink);
+ m_currentActivate->SetUnknown(MF_ACTIVATE_CUSTOM_VIDEO_PRESENTER_ACTIVATE, m_presenterActivate);
+ }
+ }
+
+ return m_currentActivate;
+}
+
+EVRCustomPresenterActivate::EVRCustomPresenterActivate(QVideoSink *sink)
+ : MFAbstractActivate()
+ , m_presenter(0)
+ , m_videoSink(sink)
+{ }
+
+HRESULT EVRCustomPresenterActivate::ActivateObject(REFIID riid, void **ppv)
+{
+ if (!ppv)
+ return E_INVALIDARG;
+ QMutexLocker locker(&m_mutex);
+ if (!m_presenter) {
+ m_presenter = new EVRCustomPresenter(m_videoSink);
+ m_presenter->setCropRect(m_cropRect);
+ }
+ return m_presenter->QueryInterface(riid, ppv);
+}
+
+HRESULT EVRCustomPresenterActivate::ShutdownObject()
+{
+ // The presenter does not implement IMFShutdown so
+ // this function is the same as DetachObject()
+ return DetachObject();
+}
+
+HRESULT EVRCustomPresenterActivate::DetachObject()
+{
+ QMutexLocker locker(&m_mutex);
+ if (m_presenter) {
+ m_presenter->Release();
+ m_presenter = 0;
+ }
+ return S_OK;
+}
+
+void EVRCustomPresenterActivate::setSink(QVideoSink *sink)
+{
+ QMutexLocker locker(&m_mutex);
+ if (m_videoSink == sink)
+ return;
+
+ m_videoSink = sink;
+
+ if (m_presenter)
+ m_presenter->setSink(sink);
+}
+
+void EVRCustomPresenterActivate::setCropRect(QRect cropRect)
+{
+ QMutexLocker locker(&m_mutex);
+ if (m_cropRect == cropRect)
+ return;
+
+ m_cropRect = cropRect;
+
+ if (m_presenter)
+ m_presenter->setCropRect(cropRect);
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/windows/player/mfvideorenderercontrol_p.h b/src/plugins/multimedia/windows/player/mfvideorenderercontrol_p.h
new file mode 100644
index 000000000..ed5195240
--- /dev/null
+++ b/src/plugins/multimedia/windows/player/mfvideorenderercontrol_p.h
@@ -0,0 +1,47 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef MFVIDEORENDERERCONTROL_H
+#define MFVIDEORENDERERCONTROL_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <qobject.h>
+#include <qpointer.h>
+#include <qrect.h>
+#include <mfobjects.h>
+
+QT_BEGIN_NAMESPACE
+class EVRCustomPresenterActivate;
+class QVideoSink;
+
+class MFVideoRendererControl : public QObject
+{
+public:
+ MFVideoRendererControl(QObject *parent = 0);
+ ~MFVideoRendererControl();
+
+ void setSink(QVideoSink *surface);
+ void setCropRect(const QRect &cropRect);
+
+ IMFActivate* createActivate();
+ void releaseActivate();
+
+private:
+ QPointer<QVideoSink> m_sink;
+ IMFActivate *m_currentActivate = nullptr;
+ EVRCustomPresenterActivate *m_presenterActivate = nullptr;
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/windows/qwindowsformatinfo.cpp b/src/plugins/multimedia/windows/qwindowsformatinfo.cpp
new file mode 100644
index 000000000..6ef1f7f7f
--- /dev/null
+++ b/src/plugins/multimedia/windows/qwindowsformatinfo.cpp
@@ -0,0 +1,187 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qwindowsformatinfo_p.h"
+
+#include <mfapi.h>
+#include <mftransform.h>
+#include <private/qcomptr_p.h>
+#include <private/qwindowsmultimediautils_p.h>
+#include <private/qcomtaskresource_p.h>
+
+#include <QtCore/qlist.h>
+#include <QtCore/qset.h>
+#include <QtCore/qhash.h>
+#include <QtGui/qimagewriter.h>
+
+QT_BEGIN_NAMESPACE
+
+namespace {
+
+template<typename T>
+using CheckedCodecs = QHash<QPair<T, QMediaFormat::ConversionMode>, bool>;
+
+bool isSupportedMFT(const GUID &category, const MFT_REGISTER_TYPE_INFO &type, QMediaFormat::ConversionMode mode)
+{
+ UINT32 count = 0;
+ IMFActivate **activateArrayRaw = nullptr;
+ HRESULT hr = MFTEnumEx(
+ category,
+ MFT_ENUM_FLAG_ALL,
+ (mode == QMediaFormat::Encode) ? nullptr : &type, // Input type
+ (mode == QMediaFormat::Encode) ? &type : nullptr, // Output type
+ &activateArrayRaw,
+ &count
+ );
+
+ if (FAILED(hr))
+ return false;
+
+ QComTaskResource<IMFActivate *[], QComDeleter> activateArray(activateArrayRaw, count);
+ for (UINT32 i = 0; i < count; ++i) {
+ ComPtr<IMFTransform> transform;
+ hr = activateArray[i]->ActivateObject(IID_PPV_ARGS(transform.GetAddressOf()));
+ if (SUCCEEDED(hr))
+ return true;
+ }
+
+ return false;
+}
+
+bool isSupportedCodec(QMediaFormat::AudioCodec codec, QMediaFormat::ConversionMode mode)
+{
+ return isSupportedMFT((mode == QMediaFormat::Encode) ? MFT_CATEGORY_AUDIO_ENCODER : MFT_CATEGORY_AUDIO_DECODER,
+ { MFMediaType_Audio, QWindowsMultimediaUtils::audioFormatForCodec(codec) },
+ mode);
+}
+
+bool isSupportedCodec(QMediaFormat::VideoCodec codec, QMediaFormat::ConversionMode mode)
+{
+ return isSupportedMFT((mode == QMediaFormat::Encode) ? MFT_CATEGORY_VIDEO_ENCODER : MFT_CATEGORY_VIDEO_DECODER,
+ { MFMediaType_Video, QWindowsMultimediaUtils::videoFormatForCodec(codec) },
+ mode);
+}
+
+template <typename T>
+bool isSupportedCodec(T codec, QMediaFormat::ConversionMode m, CheckedCodecs<T> &checkedCodecs)
+{
+ if (auto it = checkedCodecs.constFind(qMakePair(codec, m)); it != checkedCodecs.constEnd())
+ return it.value();
+
+ const bool supported = isSupportedCodec(codec, m);
+
+ checkedCodecs.insert(qMakePair(codec, m), supported);
+ return supported;
+}
+
+}
+
+static QList<QImageCapture::FileFormat> getImageFormatList()
+{
+ QList<QImageCapture::FileFormat> list;
+ const auto formats = QImageWriter::supportedImageFormats();
+
+ for (const auto &f : formats) {
+ auto format = QString::fromUtf8(f);
+ if (format.compare(QLatin1String("jpg"), Qt::CaseInsensitive) == 0)
+ list.append(QImageCapture::FileFormat::JPEG);
+ else if (format.compare(QLatin1String("png"), Qt::CaseInsensitive) == 0)
+ list.append(QImageCapture::FileFormat::PNG);
+ else if (format.compare(QLatin1String("webp"), Qt::CaseInsensitive) == 0)
+ list.append(QImageCapture::FileFormat::WebP);
+ else if (format.compare(QLatin1String("tiff"), Qt::CaseInsensitive) == 0)
+ list.append(QImageCapture::FileFormat::Tiff);
+ }
+
+ return list;
+}
+
+QWindowsFormatInfo::QWindowsFormatInfo()
+{
+ const QList<CodecMap> containerTable = {
+ { QMediaFormat::MPEG4,
+ { QMediaFormat::AudioCodec::AAC, QMediaFormat::AudioCodec::MP3, QMediaFormat::AudioCodec::ALAC, QMediaFormat::AudioCodec::AC3, QMediaFormat::AudioCodec::EAC3 },
+ { QMediaFormat::VideoCodec::H264, QMediaFormat::VideoCodec::H265, QMediaFormat::VideoCodec::MotionJPEG } },
+ { QMediaFormat::Matroska,
+ { QMediaFormat::AudioCodec::AAC, QMediaFormat::AudioCodec::MP3, QMediaFormat::AudioCodec::ALAC, QMediaFormat::AudioCodec::AC3, QMediaFormat::AudioCodec::EAC3, QMediaFormat::AudioCodec::FLAC, QMediaFormat::AudioCodec::Vorbis, QMediaFormat::AudioCodec::Opus },
+ { QMediaFormat::VideoCodec::H264, QMediaFormat::VideoCodec::H265, QMediaFormat::VideoCodec::VP8, QMediaFormat::VideoCodec::VP9, QMediaFormat::VideoCodec::MotionJPEG } },
+ { QMediaFormat::WebM,
+ { QMediaFormat::AudioCodec::Vorbis, QMediaFormat::AudioCodec::Opus },
+ { QMediaFormat::VideoCodec::VP8, QMediaFormat::VideoCodec::VP9 } },
+ { QMediaFormat::QuickTime,
+ { QMediaFormat::AudioCodec::AAC, QMediaFormat::AudioCodec::MP3, QMediaFormat::AudioCodec::ALAC, QMediaFormat::AudioCodec::AC3, QMediaFormat::AudioCodec::EAC3 },
+ { QMediaFormat::VideoCodec::H264, QMediaFormat::VideoCodec::H265, QMediaFormat::VideoCodec::MotionJPEG } },
+ { QMediaFormat::AAC,
+ { QMediaFormat::AudioCodec::AAC },
+ {} },
+ { QMediaFormat::MP3,
+ { QMediaFormat::AudioCodec::MP3 },
+ {} },
+ { QMediaFormat::FLAC,
+ { QMediaFormat::AudioCodec::FLAC },
+ {} },
+ { QMediaFormat::Mpeg4Audio,
+ { QMediaFormat::AudioCodec::AAC, QMediaFormat::AudioCodec::MP3, QMediaFormat::AudioCodec::ALAC, QMediaFormat::AudioCodec::AC3, QMediaFormat::AudioCodec::EAC3 },
+ {} },
+ { QMediaFormat::WMA,
+ { QMediaFormat::AudioCodec::WMA },
+ {} },
+ { QMediaFormat::WMV,
+ { QMediaFormat::AudioCodec::WMA },
+ { QMediaFormat::VideoCodec::WMV } }
+ };
+
+ const QSet<QMediaFormat::FileFormat> decoderFormats = {
+ QMediaFormat::MPEG4,
+ QMediaFormat::Matroska,
+ QMediaFormat::WebM,
+ QMediaFormat::QuickTime,
+ QMediaFormat::AAC,
+ QMediaFormat::MP3,
+ QMediaFormat::FLAC,
+ QMediaFormat::Mpeg4Audio,
+ QMediaFormat::WMA,
+ QMediaFormat::WMV,
+ };
+
+ const QSet<QMediaFormat::FileFormat> encoderFormats = {
+ QMediaFormat::MPEG4,
+ QMediaFormat::AAC,
+ QMediaFormat::MP3,
+ QMediaFormat::FLAC,
+ QMediaFormat::Mpeg4Audio,
+ QMediaFormat::WMA,
+ QMediaFormat::WMV,
+ };
+
+ CheckedCodecs<QMediaFormat::AudioCodec> checkedAudioCodecs;
+ CheckedCodecs<QMediaFormat::VideoCodec> checkedVideoCodecs;
+
+ auto ensureCodecs = [&] (CodecMap &codecs, QMediaFormat::ConversionMode mode) {
+ codecs.audio.removeIf([&] (auto codec) { return !isSupportedCodec(codec, mode, checkedAudioCodecs); });
+ codecs.video.removeIf([&] (auto codec) { return !isSupportedCodec(codec, mode, checkedVideoCodecs); });
+ return !codecs.video.empty() || !codecs.audio.empty();
+ };
+
+ for (const auto &codecMap : containerTable) {
+ if (decoderFormats.contains(codecMap.format)) {
+ auto m = codecMap;
+ if (ensureCodecs(m, QMediaFormat::Decode))
+ decoders.append(m);
+ }
+
+ if (encoderFormats.contains(codecMap.format)) {
+ auto m = codecMap;
+ if (ensureCodecs(m, QMediaFormat::Encode))
+ encoders.append(m);
+ }
+ }
+
+ imageFormats = getImageFormatList();
+}
+
+QWindowsFormatInfo::~QWindowsFormatInfo()
+{
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/windows/qwindowsformatinfo_p.h b/src/plugins/multimedia/windows/qwindowsformatinfo_p.h
new file mode 100644
index 000000000..31e6dd986
--- /dev/null
+++ b/src/plugins/multimedia/windows/qwindowsformatinfo_p.h
@@ -0,0 +1,31 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QWINDOWSFORMATSINFO_H
+#define QWINDOWSFORMATSINFO_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <private/qplatformmediaformatinfo_p.h>
+
+QT_BEGIN_NAMESPACE
+
+class QWindowsFormatInfo : public QPlatformMediaFormatInfo
+{
+public:
+ QWindowsFormatInfo();
+ ~QWindowsFormatInfo();
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/windows/qwindowsintegration.cpp b/src/plugins/multimedia/windows/qwindowsintegration.cpp
new file mode 100644
index 000000000..1053f3c95
--- /dev/null
+++ b/src/plugins/multimedia/windows/qwindowsintegration.cpp
@@ -0,0 +1,96 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qwindowsintegration_p.h"
+#include <private/qwindowsmediadevices_p.h>
+#include <qwindowsformatinfo_p.h>
+#include <qwindowsmediacapture_p.h>
+#include <qwindowsimagecapture_p.h>
+#include <qwindowscamera_p.h>
+#include <qwindowsmediaencoder_p.h>
+#include <mfplayercontrol_p.h>
+#include <mfaudiodecodercontrol_p.h>
+#include <mfevrvideowindowcontrol_p.h>
+#include <private/qplatformmediaplugin_p.h>
+
+QT_BEGIN_NAMESPACE
+
+class QWindowsMediaPlugin : public QPlatformMediaPlugin
+{
+ Q_OBJECT
+ Q_PLUGIN_METADATA(IID QPlatformMediaPlugin_iid FILE "windows.json")
+
+public:
+ QWindowsMediaPlugin()
+ : QPlatformMediaPlugin()
+ {}
+
+ QPlatformMediaIntegration* create(const QString &name) override
+ {
+ if (name == u"windows")
+ return new QWindowsMediaIntegration;
+ return nullptr;
+ }
+};
+
+QWindowsMediaIntegration::QWindowsMediaIntegration()
+ : QPlatformMediaIntegration(QLatin1String("windows"))
+{
+ CoInitialize(NULL);
+ MFStartup(MF_VERSION);
+}
+
+QWindowsMediaIntegration::~QWindowsMediaIntegration()
+{
+ MFShutdown();
+ CoUninitialize();
+}
+
+QPlatformMediaFormatInfo *QWindowsMediaIntegration::createFormatInfo()
+{
+ return new QWindowsFormatInfo();
+}
+
+QPlatformVideoDevices *QWindowsMediaIntegration::createVideoDevices()
+{
+ return new QWindowsVideoDevices(this);
+}
+
+QMaybe<QPlatformMediaCaptureSession *> QWindowsMediaIntegration::createCaptureSession()
+{
+ return new QWindowsMediaCaptureService();
+}
+
+QMaybe<QPlatformAudioDecoder *> QWindowsMediaIntegration::createAudioDecoder(QAudioDecoder *decoder)
+{
+ return new MFAudioDecoderControl(decoder);
+}
+
+QMaybe<QPlatformMediaPlayer *> QWindowsMediaIntegration::createPlayer(QMediaPlayer *parent)
+{
+ return new MFPlayerControl(parent);
+}
+
+QMaybe<QPlatformCamera *> QWindowsMediaIntegration::createCamera(QCamera *camera)
+{
+ return new QWindowsCamera(camera);
+}
+
+QMaybe<QPlatformMediaRecorder *> QWindowsMediaIntegration::createRecorder(QMediaRecorder *recorder)
+{
+ return new QWindowsMediaEncoder(recorder);
+}
+
+QMaybe<QPlatformImageCapture *> QWindowsMediaIntegration::createImageCapture(QImageCapture *imageCapture)
+{
+ return new QWindowsImageCapture(imageCapture);
+}
+
+QMaybe<QPlatformVideoSink *> QWindowsMediaIntegration::createVideoSink(QVideoSink *sink)
+{
+ return new MFEvrVideoWindowControl(sink);
+}
+
+QT_END_NAMESPACE
+
+#include "qwindowsintegration.moc"
diff --git a/src/plugins/multimedia/windows/qwindowsintegration_p.h b/src/plugins/multimedia/windows/qwindowsintegration_p.h
new file mode 100644
index 000000000..29498fa42
--- /dev/null
+++ b/src/plugins/multimedia/windows/qwindowsintegration_p.h
@@ -0,0 +1,51 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QWINDOWSINTEGRATION_H
+#define QWINDOWSINTEGRATION_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <private/qplatformmediaintegration_p.h>
+#include "qwindowsvideodevices_p.h"
+
+QT_BEGIN_NAMESPACE
+
+class QWindowsMediaDevices;
+class QWindowsFormatInfo;
+
+class QWindowsMediaIntegration : public QPlatformMediaIntegration
+{
+ Q_OBJECT
+public:
+ QWindowsMediaIntegration();
+ ~QWindowsMediaIntegration();
+
+ QMaybe<QPlatformMediaCaptureSession *> createCaptureSession() override;
+
+ QMaybe<QPlatformAudioDecoder *> createAudioDecoder(QAudioDecoder *decoder) override;
+ QMaybe<QPlatformMediaPlayer *> createPlayer(QMediaPlayer *parent) override;
+ QMaybe<QPlatformCamera *> createCamera(QCamera *camera) override;
+ QMaybe<QPlatformMediaRecorder *> createRecorder(QMediaRecorder *recorder) override;
+ QMaybe<QPlatformImageCapture *> createImageCapture(QImageCapture *imageCapture) override;
+
+ QMaybe<QPlatformVideoSink *> createVideoSink(QVideoSink *sink) override;
+
+protected:
+ QPlatformMediaFormatInfo *createFormatInfo() override;
+
+ QPlatformVideoDevices *createVideoDevices() override;
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/windows/qwindowsvideodevices.cpp b/src/plugins/multimedia/windows/qwindowsvideodevices.cpp
new file mode 100644
index 000000000..8e5081d3b
--- /dev/null
+++ b/src/plugins/multimedia/windows/qwindowsvideodevices.cpp
@@ -0,0 +1,228 @@
+// Copyright (C) 2022 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qwindowsvideodevices_p.h"
+
+#include <private/qcameradevice_p.h>
+#include <private/qwindowsmfdefs_p.h>
+#include <private/qwindowsmultimediautils_p.h>
+#include <private/qcomptr_p.h>
+#include <private/qcomtaskresource_p.h>
+
+#include <dbt.h>
+
+#include <mfapi.h>
+#include <mfreadwrite.h>
+#include <mferror.h>
+
+QT_BEGIN_NAMESPACE
+
+LRESULT QT_WIN_CALLBACK deviceNotificationWndProc(HWND hWnd, UINT message, WPARAM wParam, LPARAM lParam)
+{
+ if (message == WM_DEVICECHANGE) {
+ auto b = (PDEV_BROADCAST_HDR)lParam;
+ if (b && b->dbch_devicetype == DBT_DEVTYP_DEVICEINTERFACE) {
+ auto wmd = reinterpret_cast<QWindowsVideoDevices *>(GetWindowLongPtr(hWnd, GWLP_USERDATA));
+ if (wmd) {
+ if (wParam == DBT_DEVICEARRIVAL || wParam == DBT_DEVICEREMOVECOMPLETE) {
+ emit wmd->videoInputsChanged();
+ }
+ }
+ }
+ }
+
+ return 1;
+}
+
+static const auto windowClassName = TEXT("QWindowsMediaDevicesMessageWindow");
+
+static HWND createMessageOnlyWindow()
+{
+ WNDCLASSEX wx = {};
+ wx.cbSize = sizeof(WNDCLASSEX);
+ wx.lpfnWndProc = deviceNotificationWndProc;
+ wx.hInstance = GetModuleHandle(nullptr);
+ wx.lpszClassName = windowClassName;
+
+ if (!RegisterClassEx(&wx))
+ return nullptr;
+
+ auto hwnd = CreateWindowEx(0, windowClassName, TEXT("Message"),
+ 0, 0, 0, 0, 0, HWND_MESSAGE, nullptr, nullptr, nullptr);
+ if (!hwnd) {
+ UnregisterClass(windowClassName, GetModuleHandle(nullptr));
+ return nullptr;
+ }
+
+ return hwnd;
+}
+
+QWindowsVideoDevices::QWindowsVideoDevices(QPlatformMediaIntegration *integration)
+ : QPlatformVideoDevices(integration)
+{
+ CoInitialize(nullptr);
+
+ m_videoDeviceMsgWindow = createMessageOnlyWindow();
+ if (m_videoDeviceMsgWindow) {
+ SetWindowLongPtr(m_videoDeviceMsgWindow, GWLP_USERDATA, (LONG_PTR)this);
+
+ DEV_BROADCAST_DEVICEINTERFACE di = {};
+ di.dbcc_size = sizeof(di);
+ di.dbcc_devicetype = DBT_DEVTYP_DEVICEINTERFACE;
+ di.dbcc_classguid = QMM_KSCATEGORY_VIDEO_CAMERA;
+
+ m_videoDeviceNotification =
+ RegisterDeviceNotification(m_videoDeviceMsgWindow, &di, DEVICE_NOTIFY_WINDOW_HANDLE);
+ if (!m_videoDeviceNotification) {
+ DestroyWindow(m_videoDeviceMsgWindow);
+ m_videoDeviceMsgWindow = nullptr;
+
+ UnregisterClass(windowClassName, GetModuleHandle(nullptr));
+ }
+ }
+
+ if (!m_videoDeviceNotification) {
+ qWarning() << "Video device change notification disabled";
+ }
+}
+
+QWindowsVideoDevices::~QWindowsVideoDevices()
+{
+ if (m_videoDeviceNotification) {
+ UnregisterDeviceNotification(m_videoDeviceNotification);
+ }
+
+ if (m_videoDeviceMsgWindow) {
+ DestroyWindow(m_videoDeviceMsgWindow);
+ UnregisterClass(windowClassName, GetModuleHandle(nullptr));
+ }
+
+ CoUninitialize();
+}
+
+static std::optional<QCameraFormat> createCameraFormat(IMFMediaType *mediaFormat)
+{
+ GUID subtype = GUID_NULL;
+ if (FAILED(mediaFormat->GetGUID(MF_MT_SUBTYPE, &subtype)))
+ return {};
+
+ auto pixelFormat = QWindowsMultimediaUtils::pixelFormatFromMediaSubtype(subtype);
+ if (pixelFormat == QVideoFrameFormat::Format_Invalid)
+ return {};
+
+ UINT32 width = 0u;
+ UINT32 height = 0u;
+ if (FAILED(MFGetAttributeSize(mediaFormat, MF_MT_FRAME_SIZE, &width, &height)))
+ return {};
+ QSize resolution{ int(width), int(height) };
+
+ UINT32 num = 0u;
+ UINT32 den = 0u;
+ float minFr = 0.f;
+ float maxFr = 0.f;
+
+ if (SUCCEEDED(MFGetAttributeRatio(mediaFormat, MF_MT_FRAME_RATE_RANGE_MIN, &num, &den)))
+ minFr = float(num) / float(den);
+
+ if (SUCCEEDED(MFGetAttributeRatio(mediaFormat, MF_MT_FRAME_RATE_RANGE_MAX, &num, &den)))
+ maxFr = float(num) / float(den);
+
+ auto *f = new QCameraFormatPrivate{ QSharedData(), pixelFormat, resolution, minFr, maxFr };
+ return f->create();
+}
+
+static QString getString(IMFActivate *device, const IID &id)
+{
+ QComTaskResource<WCHAR> str;
+ UINT32 length = 0;
+ HRESULT hr = device->GetAllocatedString(id, str.address(), &length);
+ if (SUCCEEDED(hr)) {
+ return QString::fromWCharArray(str.get());
+ } else {
+ return {};
+ }
+}
+
+static std::optional<QCameraDevice> createCameraDevice(IMFActivate *device)
+{
+ auto info = std::make_unique<QCameraDevicePrivate>();
+ info->description = getString(device, MF_DEVSOURCE_ATTRIBUTE_FRIENDLY_NAME);
+ info->id = getString(device, MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_SYMBOLIC_LINK).toUtf8();
+
+ IMFMediaSource *source = NULL;
+ HRESULT hr = device->ActivateObject(IID_PPV_ARGS(&source));
+ if (FAILED(hr))
+ return {};
+
+ ComPtr<IMFSourceReader> reader;
+ hr = MFCreateSourceReaderFromMediaSource(source, NULL, reader.GetAddressOf());
+ if (FAILED(hr))
+ return {};
+
+ QList<QSize> photoResolutions;
+ QList<QCameraFormat> videoFormats;
+ for (DWORD i = 0;; ++i) {
+ // Loop through the supported formats for the video device
+ ComPtr<IMFMediaType> mediaFormat;
+ hr = reader->GetNativeMediaType((DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM, i,
+ mediaFormat.GetAddressOf());
+ if (FAILED(hr))
+ break;
+
+ auto maybeCamera = createCameraFormat(mediaFormat.Get());
+ if (maybeCamera) {
+ videoFormats << *maybeCamera;
+ photoResolutions << maybeCamera->resolution();
+ }
+ }
+
+ info->videoFormats = videoFormats;
+ info->photoResolutions = photoResolutions;
+ return info.release()->create();
+}
+
+static QList<QCameraDevice> readCameraDevices(IMFAttributes *attr)
+{
+ QList<QCameraDevice> cameras;
+ UINT32 count = 0;
+ IMFActivate **devicesRaw = nullptr;
+ HRESULT hr = MFEnumDeviceSources(attr, &devicesRaw, &count);
+ if (SUCCEEDED(hr)) {
+ QComTaskResource<IMFActivate *[], QComDeleter> devices(devicesRaw, count);
+
+ for (UINT32 i = 0; i < count; i++) {
+ IMFActivate *device = devices[i];
+ if (device) {
+ auto maybeCamera = createCameraDevice(device);
+ if (maybeCamera)
+ cameras << *maybeCamera;
+ }
+ }
+ }
+ return cameras;
+}
+
+QList<QCameraDevice> QWindowsVideoDevices::videoDevices() const
+{
+ QList<QCameraDevice> cameras;
+
+ ComPtr<IMFAttributes> attr;
+ HRESULT hr = MFCreateAttributes(attr.GetAddressOf(), 2);
+ if (FAILED(hr))
+ return {};
+
+ hr = attr->SetGUID(MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE,
+ MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID);
+ if (SUCCEEDED(hr)) {
+ cameras << readCameraDevices(attr.Get());
+
+ hr = attr->SetGUID(MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_CATEGORY,
+ QMM_KSCATEGORY_SENSOR_CAMERA);
+ if (SUCCEEDED(hr))
+ cameras << readCameraDevices(attr.Get());
+ }
+
+ return cameras;
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/windows/qwindowsvideodevices_p.h b/src/plugins/multimedia/windows/qwindowsvideodevices_p.h
new file mode 100644
index 000000000..f8f5ed920
--- /dev/null
+++ b/src/plugins/multimedia/windows/qwindowsvideodevices_p.h
@@ -0,0 +1,44 @@
+// Copyright (C) 2022 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QWINDOWSVIDEODEVICES_H
+#define QWINDOWSVIDEODEVICES_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <private/qplatformvideodevices_p.h>
+#include <QtCore/qt_windows.h>
+
+QT_BEGIN_NAMESPACE
+
+LRESULT QT_WIN_CALLBACK deviceNotificationWndProc(HWND, UINT, WPARAM, LPARAM);
+
+class QWindowsVideoDevices : public QPlatformVideoDevices
+{
+public:
+ QWindowsVideoDevices(QPlatformMediaIntegration *integration);
+ ~QWindowsVideoDevices();
+
+ QList<QCameraDevice> videoDevices() const override;
+
+private:
+ HWND m_videoDeviceMsgWindow = nullptr;
+ HDEVNOTIFY m_videoDeviceNotification = nullptr;
+
+ friend LRESULT QT_WIN_CALLBACK deviceNotificationWndProc(HWND, UINT, WPARAM, LPARAM);
+};
+
+
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/windows/sourceresolver.cpp b/src/plugins/multimedia/windows/sourceresolver.cpp
new file mode 100644
index 000000000..52fb024be
--- /dev/null
+++ b/src/plugins/multimedia/windows/sourceresolver.cpp
@@ -0,0 +1,294 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "mfstream_p.h"
+#include "sourceresolver_p.h"
+#include <mferror.h>
+#include <nserror.h>
+#include <QtCore/qfile.h>
+#include <QtCore/qdebug.h>
+#include <QtMultimedia/qmediaplayer.h>
+
+QT_BEGIN_NAMESPACE
+
+/*
+ SourceResolver is separated from MFPlayerSession to handle the work of resolving a media source
+ asynchronously. You call SourceResolver::load to request resolving a media source asynchronously,
+ and it will emit mediaSourceReady() when resolving is done. You can call SourceResolver::cancel to
+ stop the previous load operation if there is any.
+*/
+
+SourceResolver::SourceResolver()
+ : m_cRef(1)
+ , m_cancelCookie(0)
+ , m_sourceResolver(0)
+ , m_mediaSource(0)
+ , m_stream(0)
+{
+}
+
+SourceResolver::~SourceResolver()
+{
+ shutdown();
+ if (m_mediaSource) {
+ m_mediaSource->Release();
+ m_mediaSource = NULL;
+ }
+
+ if (m_cancelCookie)
+ m_cancelCookie->Release();
+ if (m_sourceResolver)
+ m_sourceResolver->Release();
+}
+
+STDMETHODIMP SourceResolver::QueryInterface(REFIID riid, LPVOID *ppvObject)
+{
+ if (!ppvObject)
+ return E_POINTER;
+ if (riid == IID_IUnknown) {
+ *ppvObject = static_cast<IUnknown*>(this);
+ } else if (riid == IID_IMFAsyncCallback) {
+ *ppvObject = static_cast<IMFAsyncCallback*>(this);
+ } else {
+ *ppvObject = NULL;
+ return E_NOINTERFACE;
+ }
+ AddRef();
+ return S_OK;
+}
+
+STDMETHODIMP_(ULONG) SourceResolver::AddRef(void)
+{
+ return InterlockedIncrement(&m_cRef);
+}
+
+STDMETHODIMP_(ULONG) SourceResolver::Release(void)
+{
+ LONG cRef = InterlockedDecrement(&m_cRef);
+ if (cRef == 0)
+ this->deleteLater();
+ return cRef;
+}
+
+HRESULT STDMETHODCALLTYPE SourceResolver::Invoke(IMFAsyncResult *pAsyncResult)
+{
+ QMutexLocker locker(&m_mutex);
+
+ if (!m_sourceResolver)
+ return S_OK;
+
+ MF_OBJECT_TYPE ObjectType = MF_OBJECT_INVALID;
+ IUnknown* pSource = NULL;
+ State *state = static_cast<State*>(pAsyncResult->GetStateNoAddRef());
+
+ HRESULT hr = S_OK;
+ if (state->fromStream())
+ hr = m_sourceResolver->EndCreateObjectFromByteStream(pAsyncResult, &ObjectType, &pSource);
+ else
+ hr = m_sourceResolver->EndCreateObjectFromURL(pAsyncResult, &ObjectType, &pSource);
+
+ if (state->sourceResolver() != m_sourceResolver) {
+ //This is a cancelled one
+ return S_OK;
+ }
+
+ if (m_cancelCookie) {
+ m_cancelCookie->Release();
+ m_cancelCookie = NULL;
+ }
+
+ if (FAILED(hr)) {
+ emit error(hr);
+ return S_OK;
+ }
+
+ if (m_mediaSource) {
+ m_mediaSource->Release();
+ m_mediaSource = NULL;
+ }
+
+ hr = pSource->QueryInterface(IID_PPV_ARGS(&m_mediaSource));
+ pSource->Release();
+ if (FAILED(hr)) {
+ emit error(hr);
+ return S_OK;
+ }
+
+ emit mediaSourceReady();
+
+ return S_OK;
+}
+
+HRESULT STDMETHODCALLTYPE SourceResolver::GetParameters(DWORD*, DWORD*)
+{
+ return E_NOTIMPL;
+}
+
+void SourceResolver::load(const QUrl &url, QIODevice* stream)
+{
+ QMutexLocker locker(&m_mutex);
+ HRESULT hr = S_OK;
+ if (!m_sourceResolver)
+ hr = MFCreateSourceResolver(&m_sourceResolver);
+
+ if (m_stream) {
+ m_stream->Release();
+ m_stream = NULL;
+ }
+
+ if (FAILED(hr)) {
+ qWarning() << "Failed to create Source Resolver!";
+ emit error(hr);
+ } else if (stream) {
+ QString urlString = url.toString();
+ m_stream = new MFStream(stream, false);
+ hr = m_sourceResolver->BeginCreateObjectFromByteStream(
+ m_stream, urlString.isEmpty() ? 0 : reinterpret_cast<LPCWSTR>(urlString.utf16()),
+ MF_RESOLUTION_MEDIASOURCE | MF_RESOLUTION_CONTENT_DOES_NOT_HAVE_TO_MATCH_EXTENSION_OR_MIME_TYPE
+ , NULL, &m_cancelCookie, this, new State(m_sourceResolver, true));
+ if (FAILED(hr)) {
+ qWarning() << "Unsupported stream!";
+ emit error(hr);
+ }
+ } else {
+#ifdef DEBUG_MEDIAFOUNDATION
+ qDebug() << "loading :" << url;
+ qDebug() << "url path =" << url.path().mid(1);
+#endif
+#ifdef TEST_STREAMING
+ //Testing stream function
+ if (url.scheme() == QLatin1String("file")) {
+ stream = new QFile(url.path().mid(1));
+ if (stream->open(QIODevice::ReadOnly)) {
+ m_stream = new MFStream(stream, true);
+ hr = m_sourceResolver->BeginCreateObjectFromByteStream(
+ m_stream, reinterpret_cast<const OLECHAR *>(url.toString().utf16()),
+ MF_RESOLUTION_MEDIASOURCE | MF_RESOLUTION_CONTENT_DOES_NOT_HAVE_TO_MATCH_EXTENSION_OR_MIME_TYPE,
+ NULL, &m_cancelCookie, this, new State(m_sourceResolver, true));
+ if (FAILED(hr)) {
+ qWarning() << "Unsupported stream!";
+ emit error(hr);
+ }
+ } else {
+ delete stream;
+ emit error(QMediaPlayer::FormatError);
+ }
+ } else
+#endif
+ if (url.scheme() == QLatin1String("qrc")) {
+ // If the canonical URL refers to a Qt resource, open with QFile and use
+ // the stream playback capability to play.
+ stream = new QFile(QLatin1Char(':') + url.path());
+ if (stream->open(QIODevice::ReadOnly)) {
+ m_stream = new MFStream(stream, true);
+ hr = m_sourceResolver->BeginCreateObjectFromByteStream(
+ m_stream, reinterpret_cast<const OLECHAR *>(url.toString().utf16()),
+ MF_RESOLUTION_MEDIASOURCE | MF_RESOLUTION_CONTENT_DOES_NOT_HAVE_TO_MATCH_EXTENSION_OR_MIME_TYPE,
+ NULL, &m_cancelCookie, this, new State(m_sourceResolver, true));
+ if (FAILED(hr)) {
+ qWarning() << "Unsupported stream!";
+ emit error(hr);
+ }
+ } else {
+ delete stream;
+ emit error(QMediaPlayer::FormatError);
+ }
+ } else {
+ hr = m_sourceResolver->BeginCreateObjectFromURL(
+ reinterpret_cast<const OLECHAR *>(url.toString().utf16()),
+ MF_RESOLUTION_MEDIASOURCE | MF_RESOLUTION_CONTENT_DOES_NOT_HAVE_TO_MATCH_EXTENSION_OR_MIME_TYPE,
+ NULL, &m_cancelCookie, this, new State(m_sourceResolver, false));
+ if (FAILED(hr)) {
+ qWarning() << "Unsupported url scheme!";
+ emit error(hr);
+ }
+ }
+ }
+}
+
+void SourceResolver::cancel()
+{
+ QMutexLocker locker(&m_mutex);
+ if (m_cancelCookie) {
+ m_sourceResolver->CancelObjectCreation(m_cancelCookie);
+ m_cancelCookie->Release();
+ m_cancelCookie = NULL;
+ m_sourceResolver->Release();
+ m_sourceResolver = NULL;
+ }
+}
+
+void SourceResolver::shutdown()
+{
+ if (m_mediaSource) {
+ m_mediaSource->Shutdown();
+ m_mediaSource->Release();
+ m_mediaSource = NULL;
+ }
+
+ if (m_stream) {
+ m_stream->Release();
+ m_stream = NULL;
+ }
+}
+
+IMFMediaSource* SourceResolver::mediaSource() const
+{
+ return m_mediaSource;
+}
+
+/////////////////////////////////////////////////////////////////////////////////
+SourceResolver::State::State(IMFSourceResolver *sourceResolver, bool fromStream)
+ : m_cRef(0)
+ , m_sourceResolver(sourceResolver)
+ , m_fromStream(fromStream)
+{
+ sourceResolver->AddRef();
+}
+
+SourceResolver::State::~State()
+{
+ m_sourceResolver->Release();
+}
+
+STDMETHODIMP SourceResolver::State::QueryInterface(REFIID riid, LPVOID *ppvObject)
+{
+ if (!ppvObject)
+ return E_POINTER;
+ if (riid == IID_IUnknown) {
+ *ppvObject = static_cast<IUnknown*>(this);
+ } else {
+ *ppvObject = NULL;
+ return E_NOINTERFACE;
+ }
+ AddRef();
+ return S_OK;
+}
+
+STDMETHODIMP_(ULONG) SourceResolver::State::AddRef(void)
+{
+ return InterlockedIncrement(&m_cRef);
+}
+
+STDMETHODIMP_(ULONG) SourceResolver::State::Release(void)
+{
+ LONG cRef = InterlockedDecrement(&m_cRef);
+ if (cRef == 0)
+ delete this;
+ // For thread safety, return a temporary variable.
+ return cRef;
+}
+
+IMFSourceResolver* SourceResolver::State::sourceResolver() const
+{
+ return m_sourceResolver;
+}
+
+bool SourceResolver::State::fromStream() const
+{
+ return m_fromStream;
+}
+
+QT_END_NAMESPACE
+
+#include "moc_sourceresolver_p.cpp"
diff --git a/src/plugins/multimedia/windows/sourceresolver_p.h b/src/plugins/multimedia/windows/sourceresolver_p.h
new file mode 100644
index 000000000..57ac6fc9d
--- /dev/null
+++ b/src/plugins/multimedia/windows/sourceresolver_p.h
@@ -0,0 +1,83 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef SOURCERESOLVER_H
+#define SOURCERESOLVER_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include "mfstream_p.h"
+#include <QUrl>
+
+QT_BEGIN_NAMESPACE
+
+class SourceResolver: public QObject, public IMFAsyncCallback
+{
+ Q_OBJECT
+public:
+ SourceResolver();
+
+ ~SourceResolver();
+
+ STDMETHODIMP QueryInterface(REFIID riid, LPVOID *ppvObject) override;
+ STDMETHODIMP_(ULONG) AddRef(void) override;
+ STDMETHODIMP_(ULONG) Release(void) override;
+
+ HRESULT STDMETHODCALLTYPE Invoke(IMFAsyncResult *pAsyncResult) override;
+
+ HRESULT STDMETHODCALLTYPE GetParameters(DWORD*, DWORD*) override;
+
+ void load(const QUrl &url, QIODevice* stream);
+
+ void cancel();
+
+ void shutdown();
+
+ IMFMediaSource* mediaSource() const;
+
+Q_SIGNALS:
+ void error(long hr);
+ void mediaSourceReady();
+
+private:
+ class State : public IUnknown
+ {
+ public:
+ State(IMFSourceResolver *sourceResolver, bool fromStream);
+ virtual ~State();
+
+ STDMETHODIMP QueryInterface(REFIID riid, LPVOID *ppvObject) override;
+
+ STDMETHODIMP_(ULONG) AddRef(void) override;
+
+ STDMETHODIMP_(ULONG) Release(void) override;
+
+ IMFSourceResolver* sourceResolver() const;
+ bool fromStream() const;
+
+ private:
+ long m_cRef;
+ IMFSourceResolver *m_sourceResolver;
+ bool m_fromStream;
+ };
+
+ long m_cRef;
+ IUnknown *m_cancelCookie;
+ IMFSourceResolver *m_sourceResolver;
+ IMFMediaSource *m_mediaSource;
+ MFStream *m_stream;
+ QMutex m_mutex;
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/windows/windows.json b/src/plugins/multimedia/windows/windows.json
new file mode 100644
index 000000000..05032c1b7
--- /dev/null
+++ b/src/plugins/multimedia/windows/windows.json
@@ -0,0 +1,3 @@
+{
+ "Keys": [ "windows" ]
+}