summaryrefslogtreecommitdiffstats
path: root/src/multimedia/platform/windows
diff options
context:
space:
mode:
Diffstat (limited to 'src/multimedia/platform/windows')
-rw-r--r--src/multimedia/platform/windows/audio/qwindowsaudiosink.cpp5
-rw-r--r--src/multimedia/platform/windows/audio/qwindowsaudiosource.cpp13
-rw-r--r--src/multimedia/platform/windows/common/qwindowsmfdefs.cpp1
-rw-r--r--src/multimedia/platform/windows/common/qwindowsmfdefs_p.h2
-rw-r--r--src/multimedia/platform/windows/evr/evrcustompresenter.cpp36
-rw-r--r--src/multimedia/platform/windows/evr/evrcustompresenter_p.h3
-rw-r--r--src/multimedia/platform/windows/evr/evrd3dpresentengine.cpp7
-rw-r--r--src/multimedia/platform/windows/evr/evrd3dpresentengine_p.h3
-rw-r--r--src/multimedia/platform/windows/mediacapture/qwindowsmediadevicereader.cpp2
-rw-r--r--src/multimedia/platform/windows/mediacapture/qwindowsmediaencoder.cpp9
-rw-r--r--src/multimedia/platform/windows/player/mfplayersession.cpp97
-rw-r--r--src/multimedia/platform/windows/player/mfplayersession_p.h3
-rw-r--r--src/multimedia/platform/windows/player/mfvideorenderercontrol.cpp21
-rw-r--r--src/multimedia/platform/windows/player/mfvideorenderercontrol_p.h3
-rw-r--r--src/multimedia/platform/windows/qwindowsmediadevices.cpp232
15 files changed, 278 insertions, 159 deletions
diff --git a/src/multimedia/platform/windows/audio/qwindowsaudiosink.cpp b/src/multimedia/platform/windows/audio/qwindowsaudiosink.cpp
index 046a91ab0..39625ed1b 100644
--- a/src/multimedia/platform/windows/audio/qwindowsaudiosink.cpp
+++ b/src/multimedia/platform/windows/audio/qwindowsaudiosink.cpp
@@ -200,6 +200,9 @@ void QWindowsAudioSink::start(QIODevice* device)
if (deviceState != QAudio::StoppedState)
close();
+ if (device == nullptr)
+ return;
+
if (!open()) {
errorState = QAudio::OpenError;
emit errorChanged(QAudio::OpenError);
@@ -210,7 +213,7 @@ void QWindowsAudioSink::start(QIODevice* device)
m_timer.disconnect();
m_timer.callOnTimeout(this, &QWindowsAudioSink::pullSource);
- m_timer.start(0);
+ pullSource();
}
qint64 QWindowsAudioSink::push(const char *data, qint64 len)
diff --git a/src/multimedia/platform/windows/audio/qwindowsaudiosource.cpp b/src/multimedia/platform/windows/audio/qwindowsaudiosource.cpp
index d86b27157..8523f9982 100644
--- a/src/multimedia/platform/windows/audio/qwindowsaudiosource.cpp
+++ b/src/multimedia/platform/windows/audio/qwindowsaudiosource.cpp
@@ -305,14 +305,11 @@ bool QWindowsAudioSource::open()
if (!QWindowsAudioUtils::formatToWaveFormatExtensible(settings, wfx)) {
qWarning("QAudioSource: open error, invalid format.");
} else if (buffer_size == 0) {
- buffer_size
- = (settings.sampleRate()
- * settings.channelCount()
- * settings.bytesPerSample()
- + 39) / 5;
- period_size = buffer_size / 5;
+ period_size = settings.sampleRate() / 25 * settings.bytesPerFrame();
+ buffer_size = period_size * 5;
} else {
- period_size = buffer_size / 5;
+ if (int bpf = settings.bytesPerFrame())
+ period_size = bpf * (buffer_size / 5 / bpf);
}
if (period_size == 0) {
@@ -645,7 +642,7 @@ bool QWindowsAudioSource::deviceReady()
if(pullMode) {
// reads some audio data and writes it to QIODevice
- read(0, buffer_size);
+ read(0, period_size * (buffer_size / period_size));
} else {
// emits readyRead() so user will call read() on QIODevice to get some audio data
InputPrivate* a = qobject_cast<InputPrivate*>(audioSource);
diff --git a/src/multimedia/platform/windows/common/qwindowsmfdefs.cpp b/src/multimedia/platform/windows/common/qwindowsmfdefs.cpp
index 97eae9743..f62ef8ee0 100644
--- a/src/multimedia/platform/windows/common/qwindowsmfdefs.cpp
+++ b/src/multimedia/platform/windows/common/qwindowsmfdefs.cpp
@@ -55,6 +55,7 @@ const GUID QMM_MF_SD_STREAM_NAME = {0x4f1b099d, 0xd314, 0x41e5, {0xa7, 0x81, 0x7
const GUID QMM_MF_SD_LANGUAGE = {0xaf2180, 0xbdc2, 0x423c, {0xab, 0xca, 0xf5, 0x3, 0x59, 0x3b, 0xc1, 0x21}};
const GUID QMM_KSCATEGORY_VIDEO_CAMERA = {0xe5323777, 0xf976, 0x4f5b, {0x9b, 0x55, 0xb9, 0x46, 0x99, 0xc4, 0x6e, 0x44}};
+const GUID QMM_KSCATEGORY_SENSOR_CAMERA = {0x24e552d7, 0x6523, 0x47f7, {0xa6, 0x47, 0xd3, 0x46, 0x5b, 0xf1, 0xf5, 0xca}};
const GUID QMM_MR_POLICY_VOLUME_SERVICE = {0x1abaa2ac, 0x9d3b, 0x47c6, {0xab, 0x48, 0xc5, 0x95, 0x6, 0xde, 0x78, 0x4d}};
diff --git a/src/multimedia/platform/windows/common/qwindowsmfdefs_p.h b/src/multimedia/platform/windows/common/qwindowsmfdefs_p.h
index 173c8f8f0..87ef2272e 100644
--- a/src/multimedia/platform/windows/common/qwindowsmfdefs_p.h
+++ b/src/multimedia/platform/windows/common/qwindowsmfdefs_p.h
@@ -73,6 +73,7 @@ extern const GUID QMM_MF_SD_STREAM_NAME;
extern const GUID QMM_MF_SD_LANGUAGE;
extern const GUID QMM_KSCATEGORY_VIDEO_CAMERA;
+extern const GUID QMM_KSCATEGORY_SENSOR_CAMERA;
extern const GUID QMM_MR_POLICY_VOLUME_SERVICE;
@@ -82,6 +83,7 @@ extern "C" HRESULT WINAPI MFCreateDeviceSource(IMFAttributes *pAttributes, IMFMe
#define QMM_MFSESSION_GETFULLTOPOLOGY_CURRENT 1
#define QMM_PRESENTATION_CURRENT_POSITION 0x7fffffffffffffff
+#define QMM_WININET_E_CANNOT_CONNECT ((HRESULT)0x80072EFDL)
#ifndef __IMFVideoProcessor_INTERFACE_DEFINED__
#define __IMFVideoProcessor_INTERFACE_DEFINED__
diff --git a/src/multimedia/platform/windows/evr/evrcustompresenter.cpp b/src/multimedia/platform/windows/evr/evrcustompresenter.cpp
index e8b99a09f..d05269976 100644
--- a/src/multimedia/platform/windows/evr/evrcustompresenter.cpp
+++ b/src/multimedia/platform/windows/evr/evrcustompresenter.cpp
@@ -1052,6 +1052,13 @@ void EVRCustomPresenter::setSink(QVideoSink *sink)
supportedFormatsChanged();
}
+void EVRCustomPresenter::setCropRect(QRect cropRect)
+{
+ m_mutex.lock();
+ m_cropRect = cropRect;
+ m_mutex.unlock();
+}
+
HRESULT EVRCustomPresenter::configureMixer(IMFTransform *mixer)
{
// Set the zoom rectangle (ie, the source clipping rectangle).
@@ -1342,13 +1349,30 @@ HRESULT EVRCustomPresenter::createOptimalVideoType(IMFMediaType *proposedType, I
hr = proposedType->GetUINT64(MF_MT_FRAME_SIZE, &size);
width = int(HI32(size));
height = int(LO32(size));
- rcOutput.left = 0;
- rcOutput.top = 0;
- rcOutput.right = width;
- rcOutput.bottom = height;
+
+ if (m_cropRect.isValid()) {
+ rcOutput.left = m_cropRect.x();
+ rcOutput.top = m_cropRect.y();
+ rcOutput.right = m_cropRect.x() + m_cropRect.width();
+ rcOutput.bottom = m_cropRect.y() + m_cropRect.height();
+
+ m_sourceRect.left = float(m_cropRect.x()) / width;
+ m_sourceRect.top = float(m_cropRect.y()) / height;
+ m_sourceRect.right = float(m_cropRect.x() + m_cropRect.width()) / width;
+ m_sourceRect.bottom = float(m_cropRect.y() + m_cropRect.height()) / height;
+
+ if (m_mixer)
+ configureMixer(m_mixer);
+ } else {
+ rcOutput.left = 0;
+ rcOutput.top = 0;
+ rcOutput.right = width;
+ rcOutput.bottom = height;
+ }
// Set the geometric aperture, and disable pan/scan.
- displayArea = qt_evr_makeMFArea(0, 0, rcOutput.right, rcOutput.bottom);
+ displayArea = qt_evr_makeMFArea(0, 0, rcOutput.right - rcOutput.left,
+ rcOutput.bottom - rcOutput.top);
hr = mtOptimal->SetUINT32(MF_MT_PAN_SCAN_ENABLED, FALSE);
if (FAILED(hr))
@@ -1414,7 +1438,7 @@ HRESULT EVRCustomPresenter::setMediaType(IMFMediaType *mediaType)
// Initialize the presenter engine with the new media type.
// The presenter engine allocates the samples.
- hr = m_presentEngine->createVideoSamples(mediaType, sampleQueue);
+ hr = m_presentEngine->createVideoSamples(mediaType, sampleQueue, m_cropRect.size());
if (FAILED(hr))
goto done;
diff --git a/src/multimedia/platform/windows/evr/evrcustompresenter_p.h b/src/multimedia/platform/windows/evr/evrcustompresenter_p.h
index 1bf443efa..ec8f414dc 100644
--- a/src/multimedia/platform/windows/evr/evrcustompresenter_p.h
+++ b/src/multimedia/platform/windows/evr/evrcustompresenter_p.h
@@ -55,6 +55,7 @@
#include <qmutex.h>
#include <qqueue.h>
#include <qevent.h>
+#include <qrect.h>
#include <qvideoframeformat.h>
#include <qvideosink.h>
@@ -273,6 +274,7 @@ public:
void supportedFormatsChanged();
void setSink(QVideoSink *sink);
+ void setCropRect(QRect cropRect);
void startSurface();
void stopSurface();
@@ -384,6 +386,7 @@ private:
QVideoSink *m_videoSink;
bool m_canRenderToSurface;
qint64 m_positionOffset; // Seek position in microseconds.
+ QRect m_cropRect; // Video crop rectangle
};
bool qt_evr_setCustomPresenter(IUnknown *evr, EVRCustomPresenter *presenter);
diff --git a/src/multimedia/platform/windows/evr/evrd3dpresentengine.cpp b/src/multimedia/platform/windows/evr/evrd3dpresentengine.cpp
index 5a975f09e..65a9e684e 100644
--- a/src/multimedia/platform/windows/evr/evrd3dpresentengine.cpp
+++ b/src/multimedia/platform/windows/evr/evrd3dpresentengine.cpp
@@ -511,7 +511,7 @@ HRESULT D3DPresentEngine::checkFormat(D3DFORMAT format)
return hr;
}
-HRESULT D3DPresentEngine::createVideoSamples(IMFMediaType *format, QList<IMFSample*> &videoSampleQueue)
+HRESULT D3DPresentEngine::createVideoSamples(IMFMediaType *format, QList<IMFSample*> &videoSampleQueue, QSize frameSize)
{
if (!format || !m_device)
return MF_E_UNEXPECTED;
@@ -524,6 +524,11 @@ HRESULT D3DPresentEngine::createVideoSamples(IMFMediaType *format, QList<IMFSamp
if (FAILED(hr))
return hr;
+ if (frameSize.isValid() && !frameSize.isEmpty()) {
+ width = frameSize.width();
+ height = frameSize.height();
+ }
+
DWORD d3dFormat = 0;
hr = qt_evr_getFourCC(format, &d3dFormat);
if (FAILED(hr))
diff --git a/src/multimedia/platform/windows/evr/evrd3dpresentengine_p.h b/src/multimedia/platform/windows/evr/evrd3dpresentengine_p.h
index f68ae50f1..de0245cb1 100644
--- a/src/multimedia/platform/windows/evr/evrd3dpresentengine_p.h
+++ b/src/multimedia/platform/windows/evr/evrd3dpresentengine_p.h
@@ -52,6 +52,7 @@
//
#include <QMutex>
+#include <QSize>
#include <QVideoFrameFormat>
#include <private/qwindowsiupointer_p.h>
@@ -141,7 +142,7 @@ public:
HRESULT checkFormat(D3DFORMAT format);
UINT refreshRate() const { return m_displayMode.RefreshRate; }
- HRESULT createVideoSamples(IMFMediaType *format, QList<IMFSample*>& videoSampleQueue);
+ HRESULT createVideoSamples(IMFMediaType *format, QList<IMFSample*>& videoSampleQueue, QSize frameSize);
QVideoFrameFormat videoSurfaceFormat() const { return m_surfaceFormat; }
QVideoFrame makeVideoFrame(IMFSample* sample);
diff --git a/src/multimedia/platform/windows/mediacapture/qwindowsmediadevicereader.cpp b/src/multimedia/platform/windows/mediacapture/qwindowsmediadevicereader.cpp
index 16414cca5..6f11f6a54 100644
--- a/src/multimedia/platform/windows/mediacapture/qwindowsmediadevicereader.cpp
+++ b/src/multimedia/platform/windows/mediacapture/qwindowsmediadevicereader.cpp
@@ -246,7 +246,7 @@ HRESULT QWindowsMediaDeviceReader::prepareVideoStream(DWORD mediaTypeIndex)
// and the stride, which we need to convert the frame later
hr = MFGetStrideForBitmapInfoHeader(subtype.Data1, m_frameWidth, &m_stride);
if (SUCCEEDED(hr)) {
-
+ m_stride = qAbs(m_stride);
UINT32 frameRateNum, frameRateDen;
hr = MFGetAttributeRatio(m_videoMediaType, MF_MT_FRAME_RATE, &frameRateNum, &frameRateDen);
if (SUCCEEDED(hr)) {
diff --git a/src/multimedia/platform/windows/mediacapture/qwindowsmediaencoder.cpp b/src/multimedia/platform/windows/mediacapture/qwindowsmediaencoder.cpp
index ee2ba3c6a..8f5daf819 100644
--- a/src/multimedia/platform/windows/mediacapture/qwindowsmediaencoder.cpp
+++ b/src/multimedia/platform/windows/mediacapture/qwindowsmediaencoder.cpp
@@ -81,7 +81,7 @@ void QWindowsMediaEncoder::record(QMediaEncoderSettings &settings)
if (m_state != QMediaRecorder::StoppedState)
return;
- m_sessionWasActive = m_mediaDeviceSession->isActive();
+ m_sessionWasActive = m_mediaDeviceSession->isActive() || m_mediaDeviceSession->isActivating();
if (!m_sessionWasActive) {
@@ -140,10 +140,11 @@ void QWindowsMediaEncoder::resume()
void QWindowsMediaEncoder::stop()
{
- if (m_mediaDeviceSession && m_state != QMediaRecorder::StoppedState)
+ if (m_mediaDeviceSession && m_state != QMediaRecorder::StoppedState) {
m_mediaDeviceSession->stopRecording();
- if (!m_sessionWasActive)
- m_mediaDeviceSession->setActive(false);
+ if (!m_sessionWasActive)
+ m_mediaDeviceSession->setActive(false);
+ }
}
diff --git a/src/multimedia/platform/windows/player/mfplayersession.cpp b/src/multimedia/platform/windows/player/mfplayersession.cpp
index 3c4fe9929..ebdbff696 100644
--- a/src/multimedia/platform/windows/player/mfplayersession.cpp
+++ b/src/multimedia/platform/windows/player/mfplayersession.cpp
@@ -59,6 +59,7 @@
#include "mfplayersession_p.h"
#include <mferror.h>
#include <nserror.h>
+#include <winerror.h>
#include "private/sourceresolver_p.h"
#include "samplegrabber_p.h"
#include "mftvideo_p.h"
@@ -110,7 +111,7 @@ MFPlayerSession::MFPlayerSession(MFPlayerControl *playerControl)
m_request.rate = 1.0f;
m_audioSampleGrabber = new AudioSampleGrabberCallback;
- m_videoRendererControl = new MFVideoRendererControl;
+ m_videoRendererControl = new MFVideoRendererControl(this);
}
void MFPlayerSession::timeout()
@@ -152,7 +153,7 @@ void MFPlayerSession::close()
m_closing = true;
hr = m_session->Close();
if (SUCCEEDED(hr)) {
- DWORD dwWaitResult = WaitForSingleObject(m_hCloseEvent, 100);
+ DWORD dwWaitResult = WaitForSingleObject(m_hCloseEvent, 2000);
if (dwWaitResult == WAIT_TIMEOUT) {
qWarning() << "session close time out!";
}
@@ -187,6 +188,7 @@ void MFPlayerSession::close()
CloseHandle(m_hCloseEvent);
m_hCloseEvent = 0;
m_lastPosition = -1;
+ m_position = 0;
}
MFPlayerSession::~MFPlayerSession()
@@ -216,7 +218,8 @@ void MFPlayerSession::load(const QUrl &url, QIODevice *stream)
createSession();
changeStatus(QMediaPlayer::LoadingMedia);
m_sourceResolver->load(url, stream);
- m_updateRoutingOnStart = true;
+ if (url.isLocalFile())
+ m_updateRoutingOnStart = true;
}
positionChanged(position());
}
@@ -240,7 +243,17 @@ void MFPlayerSession::handleSourceError(long hr)
errorCode = QMediaPlayer::FormatError;
errorString = tr("Unsupported media type.");
break;
+ case MF_E_UNSUPPORTED_SCHEME:
+ errorCode = QMediaPlayer::ResourceError;
+ errorString = tr("Unsupported URL scheme.");
+ break;
+ case QMM_WININET_E_CANNOT_CONNECT:
+ errorCode = QMediaPlayer::NetworkError;
+ errorString = tr("A connection with the server could not be established.");
+ break;
default:
+ qWarning() << "handleSourceError:"
+ << Qt::showbase << Qt::hex << Qt::uppercasedigits << static_cast<quint32>(hr);
errorString = tr("Failed to load source.");
break;
}
@@ -283,9 +296,10 @@ void MFPlayerSession::handleMediaSourceReady()
bool MFPlayerSession::getStreamInfo(IMFStreamDescriptor *stream,
MFPlayerSession::MediaType *type,
QString *name,
- QString *language) const
+ QString *language,
+ GUID *format) const
{
- if (!stream || !type || !name || !language)
+ if (!stream || !type || !name || !language || !format)
return false;
*type = Unknown;
@@ -319,6 +333,13 @@ bool MFPlayerSession::getStreamInfo(IMFStreamDescriptor *stream,
else if (guidMajorType == MFMediaType_Video)
*type = Video;
}
+
+ IMFMediaType *mediaType = nullptr;
+ if (SUCCEEDED(typeHandler->GetCurrentMediaType(&mediaType))) {
+ mediaType->GetGUID(MF_MT_SUBTYPE, format);
+ mediaType->Release();
+ }
+
typeHandler->Release();
}
@@ -359,8 +380,9 @@ void MFPlayerSession::setupPlaybackTopology(IMFMediaSource *source, IMFPresentat
MediaType mediaType = Unknown;
QString streamName;
QString streamLanguage;
+ GUID format = GUID_NULL;
- if (getStreamInfo(streamDesc, &mediaType, &streamName, &streamLanguage)) {
+ if (getStreamInfo(streamDesc, &mediaType, &streamName, &streamLanguage, &format)) {
QPlatformMediaPlayer::TrackType trackType = (mediaType == Audio) ?
QPlatformMediaPlayer::AudioStream : QPlatformMediaPlayer::VideoStream;
@@ -374,6 +396,7 @@ void MFPlayerSession::setupPlaybackTopology(IMFMediaSource *source, IMFPresentat
m_trackInfo[trackType].metaData.append(metaData);
m_trackInfo[trackType].nativeIndexes.append(i);
+ m_trackInfo[trackType].format = format;
if (((m_mediaTypes & mediaType) == 0) && selected) { // Check if this type isn't already added
IMFTopologyNode *sourceNode = addSourceNode(topology, source, sourcePD, streamDesc);
@@ -481,20 +504,15 @@ IMFTopologyNode* MFPlayerSession::addOutputNode(MediaType mediaType, IMFTopology
}
auto id = m_audioOutput->device.id();
- if (!id.isEmpty()) {
- QString s = QString::fromUtf8(id);
- hr = activate->SetString(MF_AUDIO_RENDERER_ATTRIBUTE_ENDPOINT_ID, (LPCWSTR)s.utf16());
- } else {
- //This is the default one that has been inserted in updateEndpoints(),
- //so give the activate a hint that we want to use the device for multimedia playback
- //then the media foundation will choose an appropriate one.
-
- //from MSDN:
- //The ERole enumeration defines constants that indicate the role that the system has assigned to an audio endpoint device.
- //eMultimedia: Music, movies, narration, and live music recording.
- hr = activate->SetUINT32(MF_AUDIO_RENDERER_ATTRIBUTE_ENDPOINT_ROLE, eMultimedia);
+ if (id.isEmpty()) {
+ qWarning() << "No audio output";
+ activate->Release();
+ node->Release();
+ return NULL;
}
+ QString s = QString::fromUtf8(id);
+ hr = activate->SetString(MF_AUDIO_RENDERER_ATTRIBUTE_ENDPOINT_ID, (LPCWSTR)s.utf16());
if (FAILED(hr)) {
qWarning() << "Failed to set attribute for audio device" << m_audioOutput->device.description();
activate->Release();
@@ -504,6 +522,12 @@ IMFTopologyNode* MFPlayerSession::addOutputNode(MediaType mediaType, IMFTopology
}
} else if (mediaType == Video) {
activate = m_videoRendererControl->createActivate();
+
+ QSize resolution = m_metaData.value(QMediaMetaData::Resolution).toSize();
+
+ if (resolution.isValid())
+ m_videoRendererControl->setCropRect(QRect(QPoint(), resolution));
+
} else {
// Unknown stream type.
emit error(QMediaPlayer::FormatError, tr("Unknown stream type."), false);
@@ -1569,8 +1593,13 @@ ULONG MFPlayerSession::AddRef(void)
ULONG MFPlayerSession::Release(void)
{
LONG cRef = InterlockedDecrement(&m_cRef);
- if (cRef == 0)
+ if (cRef == 0) {
this->deleteLater();
+
+ // In rare cases the session has queued events to be run between deleteLater and deleting,
+ // so we set the parent control to nullptr in order to prevent crashes in the cases.
+ m_playerControl = nullptr;
+ }
return cRef;
}
@@ -1654,8 +1683,25 @@ void MFPlayerSession::handleSessionEvent(IMFMediaEvent *sessionEvent)
break;
}
changeStatus(QMediaPlayer::InvalidMedia);
- qWarning() << "handleSessionEvent: serious error = " << hrStatus;
- emit error(QMediaPlayer::ResourceError, tr("Media session serious error."), true);
+ qWarning() << "handleSessionEvent: serious error = "
+ << Qt::showbase << Qt::hex << Qt::uppercasedigits << static_cast<quint32>(hrStatus);
+ switch (hrStatus) {
+ case MF_E_NET_READ:
+ emit error(QMediaPlayer::NetworkError, tr("Error reading from the network."), true);
+ break;
+ case MF_E_NET_WRITE:
+ emit error(QMediaPlayer::NetworkError, tr("Error writing to the network."), true);
+ break;
+ case NS_E_FIREWALL:
+ emit error(QMediaPlayer::NetworkError, tr("Network packets might be blocked by a firewall."), true);
+ break;
+ case MF_E_MEDIAPROC_WRONGSTATE:
+ emit error(QMediaPlayer::ResourceError, tr("Media session state error."), true);
+ break;
+ default:
+ emit error(QMediaPlayer::ResourceError, tr("Media session serious error."), true);
+ break;
+ }
break;
case MESessionRateChanged:
// If the rate change succeeded, we've already got the rate
@@ -1793,8 +1839,7 @@ void MFPlayerSession::handleSessionEvent(IMFMediaEvent *sessionEvent)
if (SUCCEEDED(MFGetService(m_session, MF_RATE_CONTROL_SERVICE, IID_PPV_ARGS(&m_rateControl)))) {
if (SUCCEEDED(MFGetService(m_session, MF_RATE_CONTROL_SERVICE, IID_PPV_ARGS(&m_rateSupport)))) {
- if ((m_mediaTypes & Video) == Video
- && SUCCEEDED(m_rateSupport->IsRateSupported(TRUE, 0, NULL)))
+ if (SUCCEEDED(m_rateSupport->IsRateSupported(TRUE, 0, NULL)))
m_canScrub = true;
}
BOOL isThin = FALSE;
@@ -1903,6 +1948,7 @@ void MFPlayerSession::clear()
m_trackInfo[i].currentIndex = -1;
m_trackInfo[i].sourceNodeId = TOPOID(-1);
m_trackInfo[i].outputNodeId = TOPOID(-1);
+ m_trackInfo[i].format = GUID_NULL;
}
if (!m_metaData.isEmpty()) {
@@ -1981,6 +2027,11 @@ void MFPlayerSession::setActiveTrack(QPlatformMediaPlayer::TrackType type, int i
if (index < -1 || index >= nativeIndexes.count())
return;
+ // Updating the topology fails if there is a HEVC video stream,
+ // which causes other issues. Ignoring the change, for now.
+ if (m_trackInfo[QPlatformMediaPlayer::VideoStream].format == MFVideoFormat_HEVC)
+ return;
+
IMFTopology *topology = nullptr;
if (SUCCEEDED(m_session->GetFullTopology(QMM_MFSESSION_GETFULLTOPOLOGY_CURRENT, 0, &topology))) {
diff --git a/src/multimedia/platform/windows/player/mfplayersession_p.h b/src/multimedia/platform/windows/player/mfplayersession_p.h
index 8f1af9f8b..cb5e48a6c 100644
--- a/src/multimedia/platform/windows/player/mfplayersession_p.h
+++ b/src/multimedia/platform/windows/player/mfplayersession_p.h
@@ -243,6 +243,7 @@ private:
int currentIndex = -1;
TOPOID sourceNodeId = -1;
TOPOID outputNodeId = -1;
+ GUID format = GUID_NULL;
};
TrackInfo m_trackInfo[QPlatformMediaPlayer::NTrackTypes];
@@ -258,7 +259,7 @@ private:
void createSession();
void setupPlaybackTopology(IMFMediaSource *source, IMFPresentationDescriptor *sourcePD);
- bool getStreamInfo(IMFStreamDescriptor *stream, MFPlayerSession::MediaType *type, QString *name, QString *language) const;
+ bool getStreamInfo(IMFStreamDescriptor *stream, MFPlayerSession::MediaType *type, QString *name, QString *language, GUID *format) const;
IMFTopologyNode* addSourceNode(IMFTopology* topology, IMFMediaSource* source,
IMFPresentationDescriptor* presentationDesc, IMFStreamDescriptor *streamDesc);
IMFTopologyNode* addOutputNode(MediaType mediaType, IMFTopology* topology, DWORD sinkID);
diff --git a/src/multimedia/platform/windows/player/mfvideorenderercontrol.cpp b/src/multimedia/platform/windows/player/mfvideorenderercontrol.cpp
index 0c028933c..0330bbef9 100644
--- a/src/multimedia/platform/windows/player/mfvideorenderercontrol.cpp
+++ b/src/multimedia/platform/windows/player/mfvideorenderercontrol.cpp
@@ -2136,10 +2136,12 @@ public:
STDMETHODIMP DetachObject() override;
void setSink(QVideoSink *sink);
+ void setCropRect(QRect cropRect);
private:
EVRCustomPresenter *m_presenter;
QVideoSink *m_videoSink;
+ QRect m_cropRect;
QMutex m_mutex;
};
@@ -2192,6 +2194,12 @@ void MFVideoRendererControl::setSink(QVideoSink *sink)
static_cast<VideoRendererActivate*>(m_currentActivate)->setSink(m_sink);
}
+void MFVideoRendererControl::setCropRect(QRect cropRect)
+{
+ if (m_presenterActivate)
+ m_presenterActivate->setCropRect(cropRect);
+}
+
void MFVideoRendererControl::customEvent(QEvent *event)
{
if (m_presenterActivate)
@@ -2261,6 +2269,7 @@ HRESULT EVRCustomPresenterActivate::ActivateObject(REFIID riid, void **ppv)
QMutexLocker locker(&m_mutex);
if (!m_presenter) {
m_presenter = new EVRCustomPresenter(m_videoSink);
+ m_presenter->setCropRect(m_cropRect);
}
return m_presenter->QueryInterface(riid, ppv);
}
@@ -2294,5 +2303,17 @@ void EVRCustomPresenterActivate::setSink(QVideoSink *sink)
m_presenter->setSink(sink);
}
+void EVRCustomPresenterActivate::setCropRect(QRect cropRect)
+{
+ QMutexLocker locker(&m_mutex);
+ if (m_cropRect == cropRect)
+ return;
+
+ m_cropRect = cropRect;
+
+ if (m_presenter)
+ m_presenter->setCropRect(cropRect);
+}
+
#include "moc_mfvideorenderercontrol_p.cpp"
#include "mfvideorenderercontrol.moc"
diff --git a/src/multimedia/platform/windows/player/mfvideorenderercontrol_p.h b/src/multimedia/platform/windows/player/mfvideorenderercontrol_p.h
index 9b48803d9..09c16326e 100644
--- a/src/multimedia/platform/windows/player/mfvideorenderercontrol_p.h
+++ b/src/multimedia/platform/windows/player/mfvideorenderercontrol_p.h
@@ -52,6 +52,7 @@
//
#include "qobject.h"
+#include <qrect.h>
#include <mfapi.h>
#include <mfidl.h>
@@ -73,6 +74,8 @@ public:
QVideoSink *sink() const;
void setSink(QVideoSink *surface);
+ void setCropRect(QRect cropRect);
+
IMFActivate* createActivate();
void releaseActivate();
diff --git a/src/multimedia/platform/windows/qwindowsmediadevices.cpp b/src/multimedia/platform/windows/qwindowsmediadevices.cpp
index 61a389ca0..06eab2d9d 100644
--- a/src/multimedia/platform/windows/qwindowsmediadevices.cpp
+++ b/src/multimedia/platform/windows/qwindowsmediadevices.cpp
@@ -64,6 +64,8 @@
#include <private/qwindowsaudioutils_p.h>
#include <private/qwindowsmfdefs_p.h>
+#include <QtCore/qmap.h>
+
QT_BEGIN_NAMESPACE
class CMMNotificationClient : public IMMNotificationClient
@@ -393,126 +395,130 @@ QList<QAudioDevice> QWindowsMediaDevices::audioOutputs() const
return availableDevices(QAudioDevice::Output);
}
-QList<QCameraDevice> QWindowsMediaDevices::videoInputs() const
+static std::optional<QCameraFormat> createCameraFormat(IMFMediaType *mediaFormat)
{
- QList<QCameraDevice> cameras;
+ GUID subtype = GUID_NULL;
+ if (FAILED(mediaFormat->GetGUID(MF_MT_SUBTYPE, &subtype)))
+ return {};
+
+ auto pixelFormat = QWindowsMultimediaUtils::pixelFormatFromMediaSubtype(subtype);
+ if (pixelFormat == QVideoFrameFormat::Format_Invalid)
+ return {};
+
+ UINT32 width = 0u;
+ UINT32 height = 0u;
+ if (FAILED(MFGetAttributeSize(mediaFormat, MF_MT_FRAME_SIZE, &width, &height)))
+ return {};
+ QSize resolution{ int(width), int(height) };
- IMFAttributes *pAttributes = NULL;
- IMFActivate **ppDevices = NULL;
+ UINT32 num = 0u;
+ UINT32 den = 0u;
+ float minFr = 0.f;
+ float maxFr = 0.f;
+
+ if (SUCCEEDED(MFGetAttributeRatio(mediaFormat, MF_MT_FRAME_RATE_RANGE_MIN, &num, &den)))
+ minFr = float(num) / float(den);
+
+ if (SUCCEEDED(MFGetAttributeRatio(mediaFormat, MF_MT_FRAME_RATE_RANGE_MAX, &num, &den)))
+ maxFr = float(num) / float(den);
+
+ auto *f = new QCameraFormatPrivate{ QSharedData(), pixelFormat, resolution, minFr, maxFr };
+ return f->create();
+}
- // Create an attribute store to specify the enumeration parameters.
- HRESULT hr = MFCreateAttributes(&pAttributes, 1);
+static QString getString(IMFActivate *device, const IID &id)
+{
+ WCHAR *str = NULL;
+ UINT32 length = 0;
+ HRESULT hr = device->GetAllocatedString(id, &str, &length);
if (SUCCEEDED(hr)) {
- // Source type: video capture devices
- hr = pAttributes->SetGUID(MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE,
- MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID);
-
- if (SUCCEEDED(hr)) {
- // Enumerate devices.
- UINT32 count;
- hr = MFEnumDeviceSources(pAttributes, &ppDevices, &count);
- if (SUCCEEDED(hr)) {
- // Iterate through devices.
- for (int index = 0; index < int(count); index++) {
- QCameraDevicePrivate *info = new QCameraDevicePrivate;
-
- IMFMediaSource *pSource = NULL;
- IMFSourceReader *reader = NULL;
-
- WCHAR *deviceName = NULL;
- UINT32 deviceNameLength = 0;
- UINT32 deviceIdLength = 0;
- WCHAR *deviceId = NULL;
-
- hr = ppDevices[index]->GetAllocatedString(MF_DEVSOURCE_ATTRIBUTE_FRIENDLY_NAME,
- &deviceName, &deviceNameLength);
- if (SUCCEEDED(hr))
- info->description = QString::fromWCharArray(deviceName);
- CoTaskMemFree(deviceName);
-
- hr = ppDevices[index]->GetAllocatedString(
- MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_SYMBOLIC_LINK, &deviceId,
- &deviceIdLength);
- if (SUCCEEDED(hr))
- info->id = QString::fromWCharArray(deviceId).toUtf8();
- CoTaskMemFree(deviceId);
-
- // Create the media source object.
- hr = ppDevices[index]->ActivateObject(
- IID_PPV_ARGS(&pSource));
- // Create the media source reader.
- hr = MFCreateSourceReaderFromMediaSource(pSource, NULL, &reader);
- if (SUCCEEDED(hr)) {
- QList<QSize> photoResolutions;
- QList<QCameraFormat> videoFormats;
-
- DWORD dwMediaTypeIndex = 0;
- IMFMediaType *mediaFormat = NULL;
- GUID subtype = GUID_NULL;
- HRESULT mediaFormatResult = S_OK;
-
- UINT32 frameRateMin = 0u;
- UINT32 frameRateMax = 0u;
- UINT32 denominator = 0u;
- UINT32 width = 0u;
- UINT32 height = 0u;
-
- while (SUCCEEDED(mediaFormatResult)) {
- // Loop through the supported formats for the video device
- mediaFormatResult = reader->GetNativeMediaType(
- (DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM, dwMediaTypeIndex,
- &mediaFormat);
- if (mediaFormatResult == MF_E_NO_MORE_TYPES)
- break;
- else if (SUCCEEDED(mediaFormatResult)) {
- QVideoFrameFormat::PixelFormat pixelFormat = QVideoFrameFormat::Format_Invalid;
- QSize resolution;
- float minFr = .0;
- float maxFr = .0;
-
- if (SUCCEEDED(mediaFormat->GetGUID(MF_MT_SUBTYPE, &subtype)))
- pixelFormat = QWindowsMultimediaUtils::pixelFormatFromMediaSubtype(subtype);
-
- if (SUCCEEDED(MFGetAttributeSize(mediaFormat, MF_MT_FRAME_SIZE, &width,
- &height))) {
- resolution.rheight() = (int)height;
- resolution.rwidth() = (int)width;
- photoResolutions << resolution;
- }
-
- if (SUCCEEDED(MFGetAttributeRatio(mediaFormat, MF_MT_FRAME_RATE_RANGE_MIN,
- &frameRateMin, &denominator)))
- minFr = qreal(frameRateMin) / denominator;
- if (SUCCEEDED(MFGetAttributeRatio(mediaFormat, MF_MT_FRAME_RATE_RANGE_MAX,
- &frameRateMax, &denominator)))
- maxFr = qreal(frameRateMax) / denominator;
-
- auto *f = new QCameraFormatPrivate { QSharedData(), pixelFormat,
- resolution, minFr, maxFr };
- videoFormats << f->create();
- }
- ++dwMediaTypeIndex;
- }
- if (mediaFormat)
- mediaFormat->Release();
-
- info->videoFormats = videoFormats;
- info->photoResolutions = photoResolutions;
- }
- if (reader)
- reader->Release();
- cameras.append(info->create());
- }
- }
- for (DWORD i = 0; i < count; i++) {
- if (ppDevices[i])
- ppDevices[i]->Release();
+ auto qstr = QString::fromWCharArray(str);
+ CoTaskMemFree(str);
+ return qstr;
+ } else {
+ return {};
+ }
+}
+
+static std::optional<QCameraDevice> createCameraDevice(IMFActivate *device)
+{
+ auto info = std::make_unique<QCameraDevicePrivate>();
+ info->description = getString(device, MF_DEVSOURCE_ATTRIBUTE_FRIENDLY_NAME);
+ info->id = getString(device, MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_SYMBOLIC_LINK).toUtf8();
+
+ IMFMediaSource *source = NULL;
+ HRESULT hr = device->ActivateObject(IID_PPV_ARGS(&source));
+ if (FAILED(hr))
+ return {};
+
+ QWindowsIUPointer<IMFSourceReader> reader;
+ hr = MFCreateSourceReaderFromMediaSource(source, NULL, reader.address());
+ if (FAILED(hr))
+ return {};
+
+ QList<QSize> photoResolutions;
+ QList<QCameraFormat> videoFormats;
+ for (DWORD i = 0;; ++i) {
+ // Loop through the supported formats for the video device
+ QWindowsIUPointer<IMFMediaType> mediaFormat;
+ hr = reader->GetNativeMediaType((DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM, i,
+ mediaFormat.address());
+ if (FAILED(hr))
+ break;
+
+ auto maybeCamera = createCameraFormat(mediaFormat.get());
+ if (maybeCamera) {
+ videoFormats << *maybeCamera;
+ photoResolutions << maybeCamera->resolution();
+ }
+ }
+
+ info->videoFormats = videoFormats;
+ info->photoResolutions = photoResolutions;
+ return info.release()->create();
+}
+
+static QList<QCameraDevice> readCameraDevices(IMFAttributes *attr)
+{
+ QList<QCameraDevice> cameras;
+ UINT32 count = 0;
+ IMFActivate **devices = NULL;
+ HRESULT hr = MFEnumDeviceSources(attr, &devices, &count);
+ if (SUCCEEDED(hr)) {
+ for (UINT32 i = 0; i < count; i++) {
+ IMFActivate *device = devices[i];
+ if (device) {
+ auto maybeCamera = createCameraDevice(device);
+ if (maybeCamera)
+ cameras << *maybeCamera;
+
+ device->Release();
}
- CoTaskMemFree(ppDevices);
}
+ CoTaskMemFree(devices);
+ }
+ return cameras;
+}
+
+QList<QCameraDevice> QWindowsMediaDevices::videoInputs() const
+{
+ QList<QCameraDevice> cameras;
+
+ QWindowsIUPointer<IMFAttributes> attr;
+ HRESULT hr = MFCreateAttributes(attr.address(), 2);
+ if (FAILED(hr))
+ return {};
+
+ hr = attr->SetGUID(MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE,
+ MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID);
+ if (SUCCEEDED(hr)) {
+ cameras << readCameraDevices(attr.get());
+
+ hr = attr->SetGUID(MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_CATEGORY,
+ QMM_KSCATEGORY_SENSOR_CAMERA);
+ if (SUCCEEDED(hr))
+ cameras << readCameraDevices(attr.get());
}
- if (pAttributes)
- pAttributes->Release();
return cameras;
}