summaryrefslogtreecommitdiffstats
path: root/src/plugins/multimedia/windows/evr
diff options
context:
space:
mode:
Diffstat (limited to 'src/plugins/multimedia/windows/evr')
-rw-r--r--src/plugins/multimedia/windows/evr/evrcustompresenter.cpp1849
-rw-r--r--src/plugins/multimedia/windows/evr/evrcustompresenter_p.h357
-rw-r--r--src/plugins/multimedia/windows/evr/evrd3dpresentengine.cpp699
-rw-r--r--src/plugins/multimedia/windows/evr/evrd3dpresentengine_p.h153
-rw-r--r--src/plugins/multimedia/windows/evr/evrhelpers.cpp140
-rw-r--r--src/plugins/multimedia/windows/evr/evrhelpers_p.h93
-rw-r--r--src/plugins/multimedia/windows/evr/evrvideowindowcontrol.cpp228
-rw-r--r--src/plugins/multimedia/windows/evr/evrvideowindowcontrol_p.h72
8 files changed, 3591 insertions, 0 deletions
diff --git a/src/plugins/multimedia/windows/evr/evrcustompresenter.cpp b/src/plugins/multimedia/windows/evr/evrcustompresenter.cpp
new file mode 100644
index 000000000..2a3433f4d
--- /dev/null
+++ b/src/plugins/multimedia/windows/evr/evrcustompresenter.cpp
@@ -0,0 +1,1849 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "evrcustompresenter_p.h"
+
+#include "evrd3dpresentengine_p.h"
+#include "evrhelpers_p.h"
+#include <private/qwindowsmultimediautils_p.h>
+#include <private/qplatformvideosink_p.h>
+#include <private/qwindowsmfdefs_p.h>
+
+#include <rhi/qrhi.h>
+
+#include <QtCore/qmutex.h>
+#include <QtCore/qvarlengtharray.h>
+#include <QtCore/qrect.h>
+#include <qthread.h>
+#include <qcoreapplication.h>
+#include <qmath.h>
+#include <qloggingcategory.h>
+
+#include <mutex>
+
+#include <float.h>
+#include <evcode.h>
+
+QT_BEGIN_NAMESPACE
+
+static Q_LOGGING_CATEGORY(qLcEvrCustomPresenter, "qt.multimedia.evrcustompresenter")
+
+const static MFRatio g_DefaultFrameRate = { 30, 1 };
+static const DWORD SCHEDULER_TIMEOUT = 5000;
+static const MFTIME ONE_SECOND = 10000000;
+static const LONG ONE_MSEC = 1000;
+
+// Function declarations.
+static HRESULT setMixerSourceRect(IMFTransform *mixer, const MFVideoNormalizedRect& nrcSource);
+static QVideoFrameFormat::PixelFormat pixelFormatFromMediaType(IMFMediaType *type);
+
+static inline LONG MFTimeToMsec(const LONGLONG& time)
+{
+ return (LONG)(time / (ONE_SECOND / ONE_MSEC));
+}
+
+bool qt_evr_setCustomPresenter(IUnknown *evr, EVRCustomPresenter *presenter)
+{
+ if (!evr || !presenter)
+ return false;
+
+ HRESULT result = E_FAIL;
+
+ IMFVideoRenderer *renderer = NULL;
+ if (SUCCEEDED(evr->QueryInterface(IID_PPV_ARGS(&renderer)))) {
+ result = renderer->InitializeRenderer(NULL, presenter);
+ renderer->Release();
+ }
+
+ return result == S_OK;
+}
+
+class PresentSampleEvent : public QEvent
+{
+public:
+ explicit PresentSampleEvent(const ComPtr<IMFSample> &sample)
+ : QEvent(static_cast<Type>(EVRCustomPresenter::PresentSample)), m_sample(sample)
+ {
+ }
+
+ ComPtr<IMFSample> sample() const { return m_sample; }
+
+private:
+ const ComPtr<IMFSample> m_sample;
+};
+
+Scheduler::Scheduler(EVRCustomPresenter *presenter)
+ : m_presenter(presenter)
+ , m_threadID(0)
+ , m_playbackRate(1.0f)
+ , m_perFrame_1_4th(0)
+{
+}
+
+Scheduler::~Scheduler()
+{
+ m_scheduledSamples.clear();
+}
+
+void Scheduler::setFrameRate(const MFRatio& fps)
+{
+ UINT64 AvgTimePerFrame = 0;
+
+ // Convert to a duration.
+ MFFrameRateToAverageTimePerFrame(fps.Numerator, fps.Denominator, &AvgTimePerFrame);
+
+ // Calculate 1/4th of this value, because we use it frequently.
+ m_perFrame_1_4th = AvgTimePerFrame / 4;
+}
+
+HRESULT Scheduler::startScheduler(ComPtr<IMFClock> clock)
+{
+ if (m_schedulerThread)
+ return E_UNEXPECTED;
+
+ HRESULT hr = S_OK;
+ DWORD dwID = 0;
+ HANDLE hObjects[2];
+ DWORD dwWait = 0;
+
+ m_clock = clock;
+
+ // Set a high the timer resolution (ie, short timer period).
+ timeBeginPeriod(1);
+
+ // Create an event to wait for the thread to start.
+ m_threadReadyEvent = EventHandle{ CreateEvent(NULL, FALSE, FALSE, NULL) };
+ if (!m_threadReadyEvent) {
+ hr = HRESULT_FROM_WIN32(GetLastError());
+ goto done;
+ }
+
+ // Create an event to wait for flush commands to complete.
+ m_flushEvent = EventHandle{ CreateEvent(NULL, FALSE, FALSE, NULL) };
+ if (!m_flushEvent) {
+ hr = HRESULT_FROM_WIN32(GetLastError());
+ goto done;
+ }
+
+ // Create the scheduler thread.
+ m_schedulerThread = ThreadHandle{ CreateThread(NULL, 0, schedulerThreadProc, (LPVOID)this, 0, &dwID) };
+ if (!m_schedulerThread) {
+ hr = HRESULT_FROM_WIN32(GetLastError());
+ goto done;
+ }
+
+ // Wait for the thread to signal the "thread ready" event.
+ hObjects[0] = m_threadReadyEvent.get();
+ hObjects[1] = m_schedulerThread.get();
+ dwWait = WaitForMultipleObjects(2, hObjects, FALSE, INFINITE); // Wait for EITHER of these handles.
+ if (WAIT_OBJECT_0 != dwWait) {
+ // The thread terminated early for some reason. This is an error condition.
+ m_schedulerThread = {};
+
+ hr = E_UNEXPECTED;
+ goto done;
+ }
+
+ m_threadID = dwID;
+
+done:
+ // Regardless success/failure, we are done using the "thread ready" event.
+ m_threadReadyEvent = {};
+
+ return hr;
+}
+
+HRESULT Scheduler::stopScheduler()
+{
+ if (!m_schedulerThread)
+ return S_OK;
+
+ // Ask the scheduler thread to exit.
+ PostThreadMessage(m_threadID, Terminate, 0, 0);
+
+ // Wait for the thread to exit.
+ WaitForSingleObject(m_schedulerThread.get(), INFINITE);
+
+ // Close handles.
+ m_schedulerThread = {};
+ m_flushEvent = {};
+
+ // Discard samples.
+ m_mutex.lock();
+ m_scheduledSamples.clear();
+ m_mutex.unlock();
+
+ // Restore the timer resolution.
+ timeEndPeriod(1);
+
+ return S_OK;
+}
+
+HRESULT Scheduler::flush()
+{
+ if (m_schedulerThread) {
+ // Ask the scheduler thread to flush.
+ PostThreadMessage(m_threadID, Flush, 0 , 0);
+
+ // Wait for the scheduler thread to signal the flush event,
+ // OR for the thread to terminate.
+ HANDLE objects[] = { m_flushEvent.get(), m_schedulerThread.get() };
+
+ WaitForMultipleObjects(ARRAYSIZE(objects), objects, FALSE, SCHEDULER_TIMEOUT);
+ }
+
+ return S_OK;
+}
+
+bool Scheduler::areSamplesScheduled()
+{
+ QMutexLocker locker(&m_mutex);
+ return m_scheduledSamples.count() > 0;
+}
+
+HRESULT Scheduler::scheduleSample(const ComPtr<IMFSample> &sample, bool presentNow)
+{
+ if (!m_schedulerThread)
+ return MF_E_NOT_INITIALIZED;
+
+ HRESULT hr = S_OK;
+ DWORD dwExitCode = 0;
+
+ GetExitCodeThread(m_schedulerThread.get(), &dwExitCode);
+ if (dwExitCode != STILL_ACTIVE)
+ return E_FAIL;
+
+ if (presentNow || !m_clock) {
+ m_presenter->presentSample(sample);
+ } else {
+ if (m_playbackRate > 0.0f && qt_evr_isSampleTimePassed(m_clock.Get(), sample.Get())) {
+ qCDebug(qLcEvrCustomPresenter) << "Discard the sample, it came too late";
+ return hr;
+ }
+
+ // Queue the sample and ask the scheduler thread to wake up.
+ m_mutex.lock();
+ m_scheduledSamples.enqueue(sample);
+ m_mutex.unlock();
+
+ if (SUCCEEDED(hr))
+ PostThreadMessage(m_threadID, Schedule, 0, 0);
+ }
+
+ return hr;
+}
+
+HRESULT Scheduler::processSamplesInQueue(LONG *nextSleep)
+{
+ HRESULT hr = S_OK;
+ LONG wait = 0;
+
+ QQueue<ComPtr<IMFSample>> scheduledSamples;
+
+ m_mutex.lock();
+ m_scheduledSamples.swap(scheduledSamples);
+ m_mutex.unlock();
+
+ // Process samples until the queue is empty or until the wait time > 0.
+ while (!scheduledSamples.isEmpty()) {
+ ComPtr<IMFSample> sample = scheduledSamples.dequeue();
+
+ // Process the next sample in the queue. If the sample is not ready
+ // for presentation. the value returned in wait is > 0, which
+ // means the scheduler should sleep for that amount of time.
+ if (isSampleReadyToPresent(sample.Get(), &wait)) {
+ m_presenter->presentSample(sample.Get());
+ continue;
+ }
+
+ if (wait > 0) {
+ // return the sample to scheduler
+ scheduledSamples.prepend(sample);
+ break;
+ }
+ }
+
+ m_mutex.lock();
+ scheduledSamples.append(std::move(m_scheduledSamples));
+ m_scheduledSamples.swap(scheduledSamples);
+ m_mutex.unlock();
+
+ // If the wait time is zero, it means we stopped because the queue is
+ // empty (or an error occurred). Set the wait time to infinite; this will
+ // make the scheduler thread sleep until it gets another thread message.
+ if (wait == 0)
+ wait = INFINITE;
+
+ *nextSleep = wait;
+ return hr;
+}
+
+bool Scheduler::isSampleReadyToPresent(IMFSample *sample, LONG *pNextSleep) const
+{
+ *pNextSleep = 0;
+ if (!m_clock)
+ return true;
+
+ MFTIME hnsPresentationTime = 0;
+ MFTIME hnsTimeNow = 0;
+ MFTIME hnsSystemTime = 0;
+
+ // Get the sample's time stamp. It is valid for a sample to
+ // have no time stamp.
+ HRESULT hr = sample->GetSampleTime(&hnsPresentationTime);
+
+ // Get the clock time. (But if the sample does not have a time stamp,
+ // we don't need the clock time.)
+ if (SUCCEEDED(hr))
+ hr = m_clock->GetCorrelatedTime(0, &hnsTimeNow, &hnsSystemTime);
+
+ // Calculate the time until the sample's presentation time.
+ // A negative value means the sample is late.
+ MFTIME hnsDelta = hnsPresentationTime - hnsTimeNow;
+ if (m_playbackRate < 0) {
+ // For reverse playback, the clock runs backward. Therefore, the
+ // delta is reversed.
+ hnsDelta = - hnsDelta;
+ }
+
+ if (hnsDelta < - m_perFrame_1_4th) {
+ // This sample is late - skip.
+ return false;
+ } else if (hnsDelta > (3 * m_perFrame_1_4th)) {
+ // This sample came too early - reschedule
+ *pNextSleep = MFTimeToMsec(hnsDelta - (3 * m_perFrame_1_4th));
+
+ // Adjust the sleep time for the clock rate. (The presentation clock runs
+ // at m_fRate, but sleeping uses the system clock.)
+ if (m_playbackRate != 0)
+ *pNextSleep = (LONG)(*pNextSleep / qFabs(m_playbackRate));
+ return *pNextSleep == 0;
+ } else {
+ // This sample can be presented right now
+ return true;
+ }
+}
+
+DWORD WINAPI Scheduler::schedulerThreadProc(LPVOID parameter)
+{
+ Scheduler* scheduler = reinterpret_cast<Scheduler*>(parameter);
+ if (!scheduler)
+ return -1;
+ return scheduler->schedulerThreadProcPrivate();
+}
+
+DWORD Scheduler::schedulerThreadProcPrivate()
+{
+ HRESULT hr = S_OK;
+ MSG msg;
+ LONG wait = INFINITE;
+ bool exitThread = false;
+
+ // Force the system to create a message queue for this thread.
+ // (See MSDN documentation for PostThreadMessage.)
+ PeekMessage(&msg, NULL, WM_USER, WM_USER, PM_NOREMOVE);
+
+ // Signal to the scheduler that the thread is ready.
+ SetEvent(m_threadReadyEvent.get());
+
+ while (!exitThread) {
+ // Wait for a thread message OR until the wait time expires.
+ DWORD result = MsgWaitForMultipleObjects(0, NULL, FALSE, wait, QS_POSTMESSAGE);
+
+ if (result == WAIT_TIMEOUT) {
+ // If we timed out, then process the samples in the queue
+ hr = processSamplesInQueue(&wait);
+ if (FAILED(hr))
+ exitThread = true;
+ }
+
+ while (PeekMessage(&msg, NULL, 0, 0, PM_REMOVE)) {
+ bool processSamples = true;
+
+ switch (msg.message) {
+ case Terminate:
+ exitThread = true;
+ break;
+ case Flush:
+ // Flushing: Clear the sample queue and set the event.
+ m_mutex.lock();
+ m_scheduledSamples.clear();
+ m_mutex.unlock();
+ wait = INFINITE;
+ SetEvent(m_flushEvent.get());
+ break;
+ case Schedule:
+ // Process as many samples as we can.
+ if (processSamples) {
+ hr = processSamplesInQueue(&wait);
+ if (FAILED(hr))
+ exitThread = true;
+ processSamples = (wait != (LONG)INFINITE);
+ }
+ break;
+ }
+ }
+
+ }
+
+ return (SUCCEEDED(hr) ? 0 : 1);
+}
+
+
+SamplePool::SamplePool()
+ : m_initialized(false)
+{
+}
+
+SamplePool::~SamplePool()
+{
+ clear();
+}
+
+ComPtr<IMFSample> SamplePool::takeSample()
+{
+ QMutexLocker locker(&m_mutex);
+
+ Q_ASSERT(m_initialized);
+ if (!m_initialized) {
+ qCWarning(qLcEvrCustomPresenter) << "SamplePool is not initialized yet";
+ return nullptr;
+ }
+
+ if (m_videoSampleQueue.isEmpty()) {
+ qCDebug(qLcEvrCustomPresenter) << "SamplePool is empty";
+ return nullptr;
+ }
+
+ // Get a sample from the allocated queue.
+
+ // It doesn't matter if we pull them from the head or tail of the list,
+ // but when we get it back, we want to re-insert it onto the opposite end.
+ // (see returnSample)
+
+ return m_videoSampleQueue.takeFirst();
+}
+
+void SamplePool::returnSample(const ComPtr<IMFSample> &sample)
+{
+ QMutexLocker locker(&m_mutex);
+
+ Q_ASSERT(m_initialized);
+ if (!m_initialized) {
+ qCWarning(qLcEvrCustomPresenter) << "SamplePool is not initialized yet";
+ return;
+ }
+
+ m_videoSampleQueue.append(sample);
+}
+
+HRESULT SamplePool::initialize(QList<ComPtr<IMFSample>> &&samples)
+{
+ QMutexLocker locker(&m_mutex);
+
+ if (m_initialized)
+ return MF_E_INVALIDREQUEST;
+
+ // Move these samples into our allocated queue.
+ m_videoSampleQueue.append(std::move(samples));
+
+ m_initialized = true;
+
+ return S_OK;
+}
+
+HRESULT SamplePool::clear()
+{
+ QMutexLocker locker(&m_mutex);
+
+ m_videoSampleQueue.clear();
+ m_initialized = false;
+
+ return S_OK;
+}
+
+
+EVRCustomPresenter::EVRCustomPresenter(QVideoSink *sink)
+ : QObject()
+ , m_sampleFreeCB(this, &EVRCustomPresenter::onSampleFree)
+ , m_refCount(1)
+ , m_renderState(RenderShutdown)
+ , m_scheduler(this)
+ , m_tokenCounter(0)
+ , m_sampleNotify(false)
+ , m_prerolled(false)
+ , m_endStreaming(false)
+ , m_playbackRate(1.0f)
+ , m_presentEngine(new D3DPresentEngine(sink))
+ , m_mediaType(0)
+ , m_videoSink(0)
+ , m_canRenderToSurface(false)
+ , m_positionOffset(0)
+{
+ // Initial source rectangle = (0,0,1,1)
+ m_sourceRect.top = 0;
+ m_sourceRect.left = 0;
+ m_sourceRect.bottom = 1;
+ m_sourceRect.right = 1;
+
+ setSink(sink);
+}
+
+EVRCustomPresenter::~EVRCustomPresenter()
+{
+ m_scheduler.flush();
+ m_scheduler.stopScheduler();
+ m_samplePool.clear();
+
+ delete m_presentEngine;
+}
+
+HRESULT EVRCustomPresenter::QueryInterface(REFIID riid, void ** ppvObject)
+{
+ if (!ppvObject)
+ return E_POINTER;
+ if (riid == IID_IMFGetService) {
+ *ppvObject = static_cast<IMFGetService*>(this);
+ } else if (riid == IID_IMFTopologyServiceLookupClient) {
+ *ppvObject = static_cast<IMFTopologyServiceLookupClient*>(this);
+ } else if (riid == IID_IMFVideoDeviceID) {
+ *ppvObject = static_cast<IMFVideoDeviceID*>(this);
+ } else if (riid == IID_IMFVideoPresenter) {
+ *ppvObject = static_cast<IMFVideoPresenter*>(this);
+ } else if (riid == IID_IMFRateSupport) {
+ *ppvObject = static_cast<IMFRateSupport*>(this);
+ } else if (riid == IID_IUnknown) {
+ *ppvObject = static_cast<IUnknown*>(static_cast<IMFGetService*>(this));
+ } else if (riid == IID_IMFClockStateSink) {
+ *ppvObject = static_cast<IMFClockStateSink*>(this);
+ } else {
+ *ppvObject = NULL;
+ return E_NOINTERFACE;
+ }
+ AddRef();
+ return S_OK;
+}
+
+ULONG EVRCustomPresenter::AddRef()
+{
+ return InterlockedIncrement(&m_refCount);
+}
+
+ULONG EVRCustomPresenter::Release()
+{
+ ULONG uCount = InterlockedDecrement(&m_refCount);
+ if (uCount == 0)
+ deleteLater();
+ return uCount;
+}
+
+HRESULT EVRCustomPresenter::GetService(REFGUID guidService, REFIID riid, LPVOID *ppvObject)
+{
+ HRESULT hr = S_OK;
+
+ if (!ppvObject)
+ return E_POINTER;
+
+ // The only service GUID that we support is MR_VIDEO_RENDER_SERVICE.
+ if (guidService != MR_VIDEO_RENDER_SERVICE)
+ return MF_E_UNSUPPORTED_SERVICE;
+
+ // First try to get the service interface from the D3DPresentEngine object.
+ hr = m_presentEngine->getService(guidService, riid, ppvObject);
+ if (FAILED(hr))
+ // Next, check if this object supports the interface.
+ hr = QueryInterface(riid, ppvObject);
+
+ return hr;
+}
+
+HRESULT EVRCustomPresenter::GetDeviceID(IID* deviceID)
+{
+ if (!deviceID)
+ return E_POINTER;
+
+ *deviceID = IID_IDirect3DDevice9;
+
+ return S_OK;
+}
+
+HRESULT EVRCustomPresenter::InitServicePointers(IMFTopologyServiceLookup *lookup)
+{
+ if (!lookup)
+ return E_POINTER;
+
+ HRESULT hr = S_OK;
+ DWORD objectCount = 0;
+
+ const std::lock_guard<QRecursiveMutex> locker(m_mutex);
+
+ // Do not allow initializing when playing or paused.
+ if (isActive())
+ return MF_E_INVALIDREQUEST;
+
+ m_clock.Reset();
+ m_mixer.Reset();
+ m_mediaEventSink.Reset();
+
+ // Ask for the clock. Optional, because the EVR might not have a clock.
+ objectCount = 1;
+
+ lookup->LookupService(MF_SERVICE_LOOKUP_GLOBAL, 0,
+ MR_VIDEO_RENDER_SERVICE, IID_PPV_ARGS(&m_clock),
+ &objectCount
+ );
+
+ // Ask for the mixer. (Required.)
+ objectCount = 1;
+
+ hr = lookup->LookupService(MF_SERVICE_LOOKUP_GLOBAL, 0,
+ MR_VIDEO_MIXER_SERVICE, IID_PPV_ARGS(&m_mixer),
+ &objectCount
+ );
+
+ if (FAILED(hr))
+ return hr;
+
+ // Make sure that we can work with this mixer.
+ hr = configureMixer(m_mixer.Get());
+ if (FAILED(hr))
+ return hr;
+
+ // Ask for the EVR's event-sink interface. (Required.)
+ objectCount = 1;
+
+ hr = lookup->LookupService(MF_SERVICE_LOOKUP_GLOBAL, 0,
+ MR_VIDEO_RENDER_SERVICE, IID_PPV_ARGS(&m_mediaEventSink),
+ &objectCount
+ );
+
+ if (SUCCEEDED(hr))
+ m_renderState = RenderStopped;
+
+ return hr;
+}
+
+HRESULT EVRCustomPresenter::ReleaseServicePointers()
+{
+ // Enter the shut-down state.
+ m_mutex.lock();
+
+ m_renderState = RenderShutdown;
+
+ m_mutex.unlock();
+
+ // Flush any samples that were scheduled.
+ flush();
+
+ // Clear the media type and release related resources.
+ setMediaType(NULL);
+
+ // Release all services that were acquired from InitServicePointers.
+ m_clock.Reset();
+ m_mixer.Reset();
+ m_mediaEventSink.Reset();
+
+ return S_OK;
+}
+
+bool EVRCustomPresenter::isValid() const
+{
+ return m_presentEngine->isValid() && m_canRenderToSurface;
+}
+
+HRESULT EVRCustomPresenter::ProcessMessage(MFVP_MESSAGE_TYPE message, ULONG_PTR param)
+{
+ HRESULT hr = S_OK;
+
+ const std::lock_guard<QRecursiveMutex> locker(m_mutex);
+
+ hr = checkShutdown();
+ if (FAILED(hr))
+ return hr;
+
+ switch (message) {
+ // Flush all pending samples.
+ case MFVP_MESSAGE_FLUSH:
+ hr = flush();
+ break;
+
+ // Renegotiate the media type with the mixer.
+ case MFVP_MESSAGE_INVALIDATEMEDIATYPE:
+ hr = renegotiateMediaType();
+ break;
+
+ // The mixer received a new input sample.
+ case MFVP_MESSAGE_PROCESSINPUTNOTIFY:
+ hr = processInputNotify();
+ break;
+
+ // Streaming is about to start.
+ case MFVP_MESSAGE_BEGINSTREAMING:
+ hr = beginStreaming();
+ break;
+
+ // Streaming has ended. (The EVR has stopped.)
+ case MFVP_MESSAGE_ENDSTREAMING:
+ hr = endStreaming();
+ break;
+
+ // All input streams have ended.
+ case MFVP_MESSAGE_ENDOFSTREAM:
+ // Set the EOS flag.
+ m_endStreaming = true;
+ // Check if it's time to send the EC_COMPLETE event to the EVR.
+ hr = checkEndOfStream();
+ break;
+
+ // Frame-stepping is starting.
+ case MFVP_MESSAGE_STEP:
+ hr = prepareFrameStep(DWORD(param));
+ break;
+
+ // Cancels frame-stepping.
+ case MFVP_MESSAGE_CANCELSTEP:
+ hr = cancelFrameStep();
+ break;
+
+ default:
+ hr = E_INVALIDARG; // Unknown message. This case should never occur.
+ break;
+ }
+
+ return hr;
+}
+
+HRESULT EVRCustomPresenter::GetCurrentMediaType(IMFVideoMediaType **mediaType)
+{
+ HRESULT hr = S_OK;
+
+ if (!mediaType)
+ return E_POINTER;
+
+ *mediaType = NULL;
+
+ const std::lock_guard<QRecursiveMutex> locker(m_mutex);
+
+ hr = checkShutdown();
+ if (FAILED(hr))
+ return hr;
+
+ if (!m_mediaType)
+ return MF_E_NOT_INITIALIZED;
+
+ return m_mediaType->QueryInterface(IID_PPV_ARGS(mediaType));
+}
+
+HRESULT EVRCustomPresenter::OnClockStart(MFTIME, LONGLONG clockStartOffset)
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_mutex);
+
+ // We cannot start after shutdown.
+ HRESULT hr = checkShutdown();
+ if (FAILED(hr))
+ return hr;
+
+ // Check if the clock is already active (not stopped).
+ if (isActive()) {
+ m_renderState = RenderStarted;
+
+ // If the clock position changes while the clock is active, it
+ // is a seek request. We need to flush all pending samples.
+ if (clockStartOffset != QMM_PRESENTATION_CURRENT_POSITION)
+ flush();
+ } else {
+ m_renderState = RenderStarted;
+
+ // The clock has started from the stopped state.
+
+ // Possibly we are in the middle of frame-stepping OR have samples waiting
+ // in the frame-step queue. Deal with these two cases first:
+ hr = startFrameStep();
+ if (FAILED(hr))
+ return hr;
+ }
+
+ // Now try to get new output samples from the mixer.
+ processOutputLoop();
+
+ return hr;
+}
+
+HRESULT EVRCustomPresenter::OnClockRestart(MFTIME)
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_mutex);
+
+ HRESULT hr = checkShutdown();
+ if (FAILED(hr))
+ return hr;
+
+ // The EVR calls OnClockRestart only while paused.
+
+ m_renderState = RenderStarted;
+
+ // Possibly we are in the middle of frame-stepping OR we have samples waiting
+ // in the frame-step queue. Deal with these two cases first:
+ hr = startFrameStep();
+ if (FAILED(hr))
+ return hr;
+
+ // Now resume the presentation loop.
+ processOutputLoop();
+
+ return hr;
+}
+
+HRESULT EVRCustomPresenter::OnClockStop(MFTIME)
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_mutex);
+
+ HRESULT hr = checkShutdown();
+ if (FAILED(hr))
+ return hr;
+
+ if (m_renderState != RenderStopped) {
+ m_renderState = RenderStopped;
+ flush();
+
+ // If we are in the middle of frame-stepping, cancel it now.
+ if (m_frameStep.state != FrameStepNone)
+ cancelFrameStep();
+ }
+
+ return S_OK;
+}
+
+HRESULT EVRCustomPresenter::OnClockPause(MFTIME)
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_mutex);
+
+ // We cannot pause the clock after shutdown.
+ HRESULT hr = checkShutdown();
+
+ if (SUCCEEDED(hr))
+ m_renderState = RenderPaused;
+
+ return hr;
+}
+
+HRESULT EVRCustomPresenter::OnClockSetRate(MFTIME, float rate)
+{
+ // Note:
+ // The presenter reports its maximum rate through the IMFRateSupport interface.
+ // Here, we assume that the EVR honors the maximum rate.
+
+ const std::lock_guard<QRecursiveMutex> locker(m_mutex);
+
+ HRESULT hr = checkShutdown();
+ if (FAILED(hr))
+ return hr;
+
+ // If the rate is changing from zero (scrubbing) to non-zero, cancel the
+ // frame-step operation.
+ if ((m_playbackRate == 0.0f) && (rate != 0.0f)) {
+ cancelFrameStep();
+ m_frameStep.samples.clear();
+ }
+
+ m_playbackRate = rate;
+
+ // Tell the scheduler about the new rate.
+ m_scheduler.setClockRate(rate);
+
+ return S_OK;
+}
+
+HRESULT EVRCustomPresenter::GetSlowestRate(MFRATE_DIRECTION, BOOL, float *rate)
+{
+ if (!rate)
+ return E_POINTER;
+
+ const std::lock_guard<QRecursiveMutex> locker(m_mutex);
+
+ HRESULT hr = checkShutdown();
+
+ if (SUCCEEDED(hr)) {
+ // There is no minimum playback rate, so the minimum is zero.
+ *rate = 0;
+ }
+
+ return S_OK;
+}
+
+HRESULT EVRCustomPresenter::GetFastestRate(MFRATE_DIRECTION direction, BOOL thin, float *rate)
+{
+ if (!rate)
+ return E_POINTER;
+
+ const std::lock_guard<QRecursiveMutex> locker(m_mutex);
+
+ float maxRate = 0.0f;
+
+ HRESULT hr = checkShutdown();
+ if (FAILED(hr))
+ return hr;
+
+ // Get the maximum *forward* rate.
+ maxRate = getMaxRate(thin);
+
+ // For reverse playback, it's the negative of maxRate.
+ if (direction == MFRATE_REVERSE)
+ maxRate = -maxRate;
+
+ *rate = maxRate;
+
+ return S_OK;
+}
+
+HRESULT EVRCustomPresenter::IsRateSupported(BOOL thin, float rate, float *nearestSupportedRate)
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_mutex);
+
+ float maxRate = 0.0f;
+ float nearestRate = rate; // If we support rate, that is the nearest.
+
+ HRESULT hr = checkShutdown();
+ if (FAILED(hr))
+ return hr;
+
+ // Find the maximum forward rate.
+ // Note: We have no minimum rate (that is, we support anything down to 0).
+ maxRate = getMaxRate(thin);
+
+ if (qFabs(rate) > maxRate) {
+ // The (absolute) requested rate exceeds the maximum rate.
+ hr = MF_E_UNSUPPORTED_RATE;
+
+ // The nearest supported rate is maxRate.
+ nearestRate = maxRate;
+ if (rate < 0) {
+ // Negative for reverse playback.
+ nearestRate = -nearestRate;
+ }
+ }
+
+ // Return the nearest supported rate.
+ if (nearestSupportedRate)
+ *nearestSupportedRate = nearestRate;
+
+ return hr;
+}
+
+void EVRCustomPresenter::supportedFormatsChanged()
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_mutex);
+
+ m_canRenderToSurface = false;
+
+ // check if we can render to the surface (compatible formats)
+ if (m_videoSink) {
+ for (int f = 0; f < QVideoFrameFormat::NPixelFormats; ++f) {
+ // ### set a better preference order
+ QVideoFrameFormat::PixelFormat format = QVideoFrameFormat::PixelFormat(f);
+ if (SUCCEEDED(m_presentEngine->checkFormat(qt_evr_D3DFormatFromPixelFormat(format)))) {
+ m_canRenderToSurface = true;
+ break;
+ }
+ }
+ }
+
+ // TODO: if media type already set, renegotiate?
+}
+
+void EVRCustomPresenter::setSink(QVideoSink *sink)
+{
+ m_mutex.lock();
+ m_videoSink = sink;
+ m_presentEngine->setSink(sink);
+ m_mutex.unlock();
+
+ supportedFormatsChanged();
+}
+
+void EVRCustomPresenter::setCropRect(QRect cropRect)
+{
+ m_mutex.lock();
+ m_cropRect = cropRect;
+ m_mutex.unlock();
+}
+
+HRESULT EVRCustomPresenter::configureMixer(IMFTransform *mixer)
+{
+ // Set the zoom rectangle (ie, the source clipping rectangle).
+ return setMixerSourceRect(mixer, m_sourceRect);
+}
+
+HRESULT EVRCustomPresenter::renegotiateMediaType()
+{
+ HRESULT hr = S_OK;
+ bool foundMediaType = false;
+
+ IMFMediaType *mixerType = NULL;
+ IMFMediaType *optimalType = NULL;
+
+ if (!m_mixer)
+ return MF_E_INVALIDREQUEST;
+
+ // Loop through all of the mixer's proposed output types.
+ DWORD typeIndex = 0;
+ while (!foundMediaType && (hr != MF_E_NO_MORE_TYPES)) {
+ qt_evr_safe_release(&mixerType);
+ qt_evr_safe_release(&optimalType);
+
+ // Step 1. Get the next media type supported by mixer.
+ hr = m_mixer->GetOutputAvailableType(0, typeIndex++, &mixerType);
+ if (FAILED(hr))
+ break;
+
+ // From now on, if anything in this loop fails, try the next type,
+ // until we succeed or the mixer runs out of types.
+
+ // Step 2. Check if we support this media type.
+ if (SUCCEEDED(hr))
+ hr = isMediaTypeSupported(mixerType);
+
+ // Step 3. Adjust the mixer's type to match our requirements.
+ if (SUCCEEDED(hr))
+ hr = createOptimalVideoType(mixerType, &optimalType);
+
+ // Step 4. Check if the mixer will accept this media type.
+ if (SUCCEEDED(hr))
+ hr = m_mixer->SetOutputType(0, optimalType, MFT_SET_TYPE_TEST_ONLY);
+
+ // Step 5. Try to set the media type on ourselves.
+ if (SUCCEEDED(hr))
+ hr = setMediaType(optimalType);
+
+ // Step 6. Set output media type on mixer.
+ if (SUCCEEDED(hr)) {
+ hr = m_mixer->SetOutputType(0, optimalType, 0);
+
+ // If something went wrong, clear the media type.
+ if (FAILED(hr))
+ setMediaType(NULL);
+ }
+
+ if (SUCCEEDED(hr))
+ foundMediaType = true;
+ }
+
+ qt_evr_safe_release(&mixerType);
+ qt_evr_safe_release(&optimalType);
+
+ return hr;
+}
+
+HRESULT EVRCustomPresenter::flush()
+{
+ m_prerolled = false;
+
+ // The scheduler might have samples that are waiting for
+ // their presentation time. Tell the scheduler to flush.
+
+ // This call blocks until the scheduler threads discards all scheduled samples.
+ m_scheduler.flush();
+
+ // Flush the frame-step queue.
+ m_frameStep.samples.clear();
+
+ if (m_renderState == RenderStopped && m_videoSink) {
+ // Repaint with black.
+ presentSample(nullptr);
+ }
+
+ return S_OK;
+}
+
+HRESULT EVRCustomPresenter::processInputNotify()
+{
+ HRESULT hr = S_OK;
+
+ // Set the flag that says the mixer has a new sample.
+ m_sampleNotify = true;
+
+ if (!m_mediaType) {
+ // We don't have a valid media type yet.
+ hr = MF_E_TRANSFORM_TYPE_NOT_SET;
+ } else {
+ // Try to process an output sample.
+ processOutputLoop();
+ }
+ return hr;
+}
+
+HRESULT EVRCustomPresenter::beginStreaming()
+{
+ HRESULT hr = S_OK;
+
+ // Start the scheduler thread.
+ hr = m_scheduler.startScheduler(m_clock);
+
+ return hr;
+}
+
+HRESULT EVRCustomPresenter::endStreaming()
+{
+ HRESULT hr = S_OK;
+
+ // Stop the scheduler thread.
+ hr = m_scheduler.stopScheduler();
+
+ return hr;
+}
+
+HRESULT EVRCustomPresenter::checkEndOfStream()
+{
+ if (!m_endStreaming) {
+ // The EVR did not send the MFVP_MESSAGE_ENDOFSTREAM message.
+ return S_OK;
+ }
+
+ if (m_sampleNotify) {
+ // The mixer still has input.
+ return S_OK;
+ }
+
+ if (m_scheduler.areSamplesScheduled()) {
+ // Samples are still scheduled for rendering.
+ return S_OK;
+ }
+
+ // Everything is complete. Now we can tell the EVR that we are done.
+ notifyEvent(EC_COMPLETE, (LONG_PTR)S_OK, 0);
+ m_endStreaming = false;
+
+ stopSurface();
+ return S_OK;
+}
+
+HRESULT EVRCustomPresenter::prepareFrameStep(DWORD steps)
+{
+ HRESULT hr = S_OK;
+
+ // Cache the step count.
+ m_frameStep.steps += steps;
+
+ // Set the frame-step state.
+ m_frameStep.state = FrameStepWaitingStart;
+
+ // If the clock is are already running, we can start frame-stepping now.
+ // Otherwise, we will start when the clock starts.
+ if (m_renderState == RenderStarted)
+ hr = startFrameStep();
+
+ return hr;
+}
+
+HRESULT EVRCustomPresenter::startFrameStep()
+{
+ if (m_frameStep.state == FrameStepWaitingStart) {
+ // We have a frame-step request, and are waiting for the clock to start.
+ // Set the state to "pending," which means we are waiting for samples.
+ m_frameStep.state = FrameStepPending;
+
+ // If the frame-step queue already has samples, process them now.
+ while (!m_frameStep.samples.isEmpty() && (m_frameStep.state == FrameStepPending)) {
+ const ComPtr<IMFSample> sample = m_frameStep.samples.takeFirst();
+
+ const HRESULT hr = deliverFrameStepSample(sample.Get());
+ if (FAILED(hr))
+ return hr;
+
+ // We break from this loop when:
+ // (a) the frame-step queue is empty, or
+ // (b) the frame-step operation is complete.
+ }
+ } else if (m_frameStep.state == FrameStepNone) {
+ // We are not frame stepping. Therefore, if the frame-step queue has samples,
+ // we need to process them normally.
+ while (!m_frameStep.samples.isEmpty()) {
+ const ComPtr<IMFSample> sample = m_frameStep.samples.takeFirst();
+
+ const HRESULT hr = deliverSample(sample.Get());
+ if (FAILED(hr))
+ return hr;
+ }
+ }
+
+ return S_OK;
+}
+
+HRESULT EVRCustomPresenter::completeFrameStep(const ComPtr<IMFSample> &sample)
+{
+ HRESULT hr = S_OK;
+ MFTIME sampleTime = 0;
+ MFTIME systemTime = 0;
+
+ // Update our state.
+ m_frameStep.state = FrameStepComplete;
+ m_frameStep.sampleNoRef = 0;
+
+ // Notify the EVR that the frame-step is complete.
+ notifyEvent(EC_STEP_COMPLETE, FALSE, 0); // FALSE = completed (not cancelled)
+
+ // If we are scrubbing (rate == 0), also send the "scrub time" event.
+ if (isScrubbing()) {
+ // Get the time stamp from the sample.
+ hr = sample->GetSampleTime(&sampleTime);
+ if (FAILED(hr)) {
+ // No time stamp. Use the current presentation time.
+ if (m_clock)
+ m_clock->GetCorrelatedTime(0, &sampleTime, &systemTime);
+
+ hr = S_OK; // (Not an error condition.)
+ }
+
+ notifyEvent(EC_SCRUB_TIME, DWORD(sampleTime), DWORD(((sampleTime) >> 32) & 0xffffffff));
+ }
+ return hr;
+}
+
+HRESULT EVRCustomPresenter::cancelFrameStep()
+{
+ FrameStepState oldState = m_frameStep.state;
+
+ m_frameStep.state = FrameStepNone;
+ m_frameStep.steps = 0;
+ m_frameStep.sampleNoRef = 0;
+ // Don't clear the frame-step queue yet, because we might frame step again.
+
+ if (oldState > FrameStepNone && oldState < FrameStepComplete) {
+ // We were in the middle of frame-stepping when it was cancelled.
+ // Notify the EVR.
+ notifyEvent(EC_STEP_COMPLETE, TRUE, 0); // TRUE = cancelled
+ }
+ return S_OK;
+}
+
+HRESULT EVRCustomPresenter::createOptimalVideoType(IMFMediaType *proposedType, IMFMediaType **optimalType)
+{
+ HRESULT hr = S_OK;
+
+ RECT rcOutput;
+ ZeroMemory(&rcOutput, sizeof(rcOutput));
+
+ MFVideoArea displayArea;
+ ZeroMemory(&displayArea, sizeof(displayArea));
+
+ IMFMediaType *mtOptimal = NULL;
+
+ UINT64 size;
+ int width;
+ int height;
+
+ // Clone the proposed type.
+
+ hr = MFCreateMediaType(&mtOptimal);
+ if (FAILED(hr))
+ goto done;
+
+ hr = proposedType->CopyAllItems(mtOptimal);
+ if (FAILED(hr))
+ goto done;
+
+ // Modify the new type.
+
+ hr = proposedType->GetUINT64(MF_MT_FRAME_SIZE, &size);
+ width = int(HI32(size));
+ height = int(LO32(size));
+
+ if (m_cropRect.isValid()) {
+ rcOutput.left = m_cropRect.x();
+ rcOutput.top = m_cropRect.y();
+ rcOutput.right = m_cropRect.x() + m_cropRect.width();
+ rcOutput.bottom = m_cropRect.y() + m_cropRect.height();
+
+ m_sourceRect.left = float(m_cropRect.x()) / width;
+ m_sourceRect.top = float(m_cropRect.y()) / height;
+ m_sourceRect.right = float(m_cropRect.x() + m_cropRect.width()) / width;
+ m_sourceRect.bottom = float(m_cropRect.y() + m_cropRect.height()) / height;
+
+ if (m_mixer)
+ configureMixer(m_mixer.Get());
+ } else {
+ rcOutput.left = 0;
+ rcOutput.top = 0;
+ rcOutput.right = width;
+ rcOutput.bottom = height;
+ }
+
+ // Set the geometric aperture, and disable pan/scan.
+ displayArea = qt_evr_makeMFArea(0, 0, rcOutput.right - rcOutput.left,
+ rcOutput.bottom - rcOutput.top);
+
+ hr = mtOptimal->SetUINT32(MF_MT_PAN_SCAN_ENABLED, FALSE);
+ if (FAILED(hr))
+ goto done;
+
+ hr = mtOptimal->SetBlob(MF_MT_GEOMETRIC_APERTURE, reinterpret_cast<UINT8*>(&displayArea),
+ sizeof(displayArea));
+ if (FAILED(hr))
+ goto done;
+
+ // Set the pan/scan aperture and the minimum display aperture. We don't care
+ // about them per se, but the mixer will reject the type if these exceed the
+ // frame dimentions.
+ hr = mtOptimal->SetBlob(MF_MT_PAN_SCAN_APERTURE, reinterpret_cast<UINT8*>(&displayArea),
+ sizeof(displayArea));
+ if (FAILED(hr))
+ goto done;
+
+ hr = mtOptimal->SetBlob(MF_MT_MINIMUM_DISPLAY_APERTURE, reinterpret_cast<UINT8*>(&displayArea),
+ sizeof(displayArea));
+ if (FAILED(hr))
+ goto done;
+
+ // Return the pointer to the caller.
+ *optimalType = mtOptimal;
+ (*optimalType)->AddRef();
+
+done:
+ qt_evr_safe_release(&mtOptimal);
+ return hr;
+
+}
+
+HRESULT EVRCustomPresenter::setMediaType(IMFMediaType *mediaType)
+{
+ // Note: mediaType can be NULL (to clear the type)
+
+ // Clearing the media type is allowed in any state (including shutdown).
+ if (!mediaType) {
+ stopSurface();
+ m_mediaType.Reset();
+ releaseResources();
+ return S_OK;
+ }
+
+ MFRatio fps = { 0, 0 };
+ QList<ComPtr<IMFSample>> sampleQueue;
+
+ // Cannot set the media type after shutdown.
+ HRESULT hr = checkShutdown();
+ if (FAILED(hr))
+ goto done;
+
+ // Check if the new type is actually different.
+ // Note: This function safely handles NULL input parameters.
+ if (qt_evr_areMediaTypesEqual(m_mediaType.Get(), mediaType))
+ goto done; // Nothing more to do.
+
+ // We're really changing the type. First get rid of the old type.
+ m_mediaType.Reset();
+ releaseResources();
+
+ // Initialize the presenter engine with the new media type.
+ // The presenter engine allocates the samples.
+
+ hr = m_presentEngine->createVideoSamples(mediaType, sampleQueue, m_cropRect.size());
+ if (FAILED(hr))
+ goto done;
+
+ // Mark each sample with our token counter. If this batch of samples becomes
+ // invalid, we increment the counter, so that we know they should be discarded.
+ for (auto sample : std::as_const(sampleQueue)) {
+ hr = sample->SetUINT32(MFSamplePresenter_SampleCounter, m_tokenCounter);
+ if (FAILED(hr))
+ goto done;
+ }
+
+ // Add the samples to the sample pool.
+ hr = m_samplePool.initialize(std::move(sampleQueue));
+ if (FAILED(hr))
+ goto done;
+
+ // Set the frame rate on the scheduler.
+ if (SUCCEEDED(qt_evr_getFrameRate(mediaType, &fps)) && (fps.Numerator != 0) && (fps.Denominator != 0)) {
+ m_scheduler.setFrameRate(fps);
+ } else {
+ // NOTE: The mixer's proposed type might not have a frame rate, in which case
+ // we'll use an arbitrary default. (Although it's unlikely the video source
+ // does not have a frame rate.)
+ m_scheduler.setFrameRate(g_DefaultFrameRate);
+ }
+
+ // Store the media type.
+ m_mediaType = mediaType;
+ m_mediaType->AddRef();
+
+ startSurface();
+
+done:
+ if (FAILED(hr))
+ releaseResources();
+ return hr;
+}
+
+HRESULT EVRCustomPresenter::isMediaTypeSupported(IMFMediaType *proposed)
+{
+ D3DFORMAT d3dFormat = D3DFMT_UNKNOWN;
+ BOOL compressed = FALSE;
+ MFVideoInterlaceMode interlaceMode = MFVideoInterlace_Unknown;
+ MFVideoArea videoCropArea;
+ UINT32 width = 0, height = 0;
+
+ // Validate the format.
+ HRESULT hr = qt_evr_getFourCC(proposed, reinterpret_cast<DWORD*>(&d3dFormat));
+ if (FAILED(hr))
+ return hr;
+
+ QVideoFrameFormat::PixelFormat pixelFormat = pixelFormatFromMediaType(proposed);
+ if (pixelFormat == QVideoFrameFormat::Format_Invalid)
+ return MF_E_INVALIDMEDIATYPE;
+
+ // Reject compressed media types.
+ hr = proposed->IsCompressedFormat(&compressed);
+ if (FAILED(hr))
+ return hr;
+
+ if (compressed)
+ return MF_E_INVALIDMEDIATYPE;
+
+ // The D3DPresentEngine checks whether surfaces can be created using this format
+ hr = m_presentEngine->checkFormat(d3dFormat);
+ if (FAILED(hr))
+ return hr;
+
+ // Reject interlaced formats.
+ hr = proposed->GetUINT32(MF_MT_INTERLACE_MODE, reinterpret_cast<UINT32*>(&interlaceMode));
+ if (FAILED(hr))
+ return hr;
+
+ if (interlaceMode != MFVideoInterlace_Progressive)
+ return MF_E_INVALIDMEDIATYPE;
+
+ hr = MFGetAttributeSize(proposed, MF_MT_FRAME_SIZE, &width, &height);
+ if (FAILED(hr))
+ return hr;
+
+ // Validate the various apertures (cropping regions) against the frame size.
+ // Any of these apertures may be unspecified in the media type, in which case
+ // we ignore it. We just want to reject invalid apertures.
+
+ if (SUCCEEDED(proposed->GetBlob(MF_MT_PAN_SCAN_APERTURE,
+ reinterpret_cast<UINT8*>(&videoCropArea),
+ sizeof(videoCropArea), nullptr))) {
+ hr = qt_evr_validateVideoArea(videoCropArea, width, height);
+ }
+ if (SUCCEEDED(proposed->GetBlob(MF_MT_GEOMETRIC_APERTURE,
+ reinterpret_cast<UINT8*>(&videoCropArea),
+ sizeof(videoCropArea), nullptr))) {
+ hr = qt_evr_validateVideoArea(videoCropArea, width, height);
+ }
+ if (SUCCEEDED(proposed->GetBlob(MF_MT_MINIMUM_DISPLAY_APERTURE,
+ reinterpret_cast<UINT8*>(&videoCropArea),
+ sizeof(videoCropArea), nullptr))) {
+ hr = qt_evr_validateVideoArea(videoCropArea, width, height);
+ }
+ return hr;
+}
+
+void EVRCustomPresenter::processOutputLoop()
+{
+ HRESULT hr = S_OK;
+
+ // Process as many samples as possible.
+ while (hr == S_OK) {
+ // If the mixer doesn't have a new input sample, break from the loop.
+ if (!m_sampleNotify) {
+ hr = MF_E_TRANSFORM_NEED_MORE_INPUT;
+ break;
+ }
+
+ // Try to process a sample.
+ hr = processOutput();
+
+ // NOTE: ProcessOutput can return S_FALSE to indicate it did not
+ // process a sample. If so, break out of the loop.
+ }
+
+ if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT) {
+ // The mixer has run out of input data. Check for end-of-stream.
+ checkEndOfStream();
+ }
+}
+
+HRESULT EVRCustomPresenter::processOutput()
+{
+ // If the clock is not running, we present the first sample,
+ // and then don't present any more until the clock starts.
+ if ((m_renderState != RenderStarted) && m_prerolled)
+ return S_FALSE;
+
+ // Make sure we have a pointer to the mixer.
+ if (!m_mixer)
+ return MF_E_INVALIDREQUEST;
+
+ // Try to get a free sample from the video sample pool.
+ ComPtr<IMFSample> sample = m_samplePool.takeSample();
+ if (!sample)
+ return S_FALSE; // No free samples. Try again when a sample is released.
+
+ // From now on, we have a valid video sample pointer, where the mixer will
+ // write the video data.
+
+ LONGLONG mixerStartTime = 0, mixerEndTime = 0;
+ MFTIME systemTime = 0;
+
+ if (m_clock) {
+ // Latency: Record the starting time for ProcessOutput.
+ m_clock->GetCorrelatedTime(0, &mixerStartTime, &systemTime);
+ }
+
+ // Now we are ready to get an output sample from the mixer.
+ DWORD status = 0;
+ MFT_OUTPUT_DATA_BUFFER dataBuffer = {};
+ dataBuffer.pSample = sample.Get();
+ HRESULT hr = m_mixer->ProcessOutput(0, 1, &dataBuffer, &status);
+ // Important: Release any events returned from the ProcessOutput method.
+ qt_evr_safe_release(&dataBuffer.pEvents);
+
+ if (FAILED(hr)) {
+ // Return the sample to the pool.
+ m_samplePool.returnSample(sample);
+
+ // Handle some known error codes from ProcessOutput.
+ if (hr == MF_E_TRANSFORM_TYPE_NOT_SET) {
+ // The mixer's format is not set. Negotiate a new format.
+ hr = renegotiateMediaType();
+ } else if (hr == MF_E_TRANSFORM_STREAM_CHANGE) {
+ // There was a dynamic media type change. Clear our media type.
+ setMediaType(NULL);
+ } else if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT) {
+ // The mixer needs more input.
+ // We have to wait for the mixer to get more input.
+ m_sampleNotify = false;
+ }
+
+ return hr;
+ }
+
+ // We got an output sample from the mixer.
+ if (m_clock) {
+ // Latency: Record the ending time for the ProcessOutput operation,
+ // and notify the EVR of the latency.
+
+ m_clock->GetCorrelatedTime(0, &mixerEndTime, &systemTime);
+
+ LONGLONG latencyTime = mixerEndTime - mixerStartTime;
+ notifyEvent(EC_PROCESSING_LATENCY, reinterpret_cast<LONG_PTR>(&latencyTime), 0);
+ }
+
+ // Set up notification for when the sample is released.
+ hr = trackSample(sample);
+ if (FAILED(hr))
+ return hr;
+
+ // Schedule the sample.
+ if (m_frameStep.state == FrameStepNone)
+ hr = deliverSample(sample);
+ else // We are frame-stepping
+ hr = deliverFrameStepSample(sample);
+
+ if (FAILED(hr))
+ return hr;
+
+ m_prerolled = true; // We have presented at least one sample now.
+ return S_OK;
+}
+
+HRESULT EVRCustomPresenter::deliverSample(const ComPtr<IMFSample> &sample)
+{
+ // If we are not actively playing, OR we are scrubbing (rate = 0),
+ // then we need to present the sample immediately. Otherwise,
+ // schedule it normally.
+
+ bool presentNow = ((m_renderState != RenderStarted) || isScrubbing());
+
+ HRESULT hr = m_scheduler.scheduleSample(sample, presentNow);
+
+ if (FAILED(hr)) {
+ // Notify the EVR that we have failed during streaming. The EVR will notify the
+ // pipeline.
+
+ notifyEvent(EC_ERRORABORT, hr, 0);
+ }
+
+ return hr;
+}
+
+HRESULT EVRCustomPresenter::deliverFrameStepSample(const ComPtr<IMFSample> &sample)
+{
+ HRESULT hr = S_OK;
+ IUnknown *unk = NULL;
+
+ // For rate 0, discard any sample that ends earlier than the clock time.
+ if (isScrubbing() && m_clock && qt_evr_isSampleTimePassed(m_clock.Get(), sample.Get())) {
+ // Discard this sample.
+ } else if (m_frameStep.state >= FrameStepScheduled) {
+ // A frame was already submitted. Put this sample on the frame-step queue,
+ // in case we are asked to step to the next frame. If frame-stepping is
+ // cancelled, this sample will be processed normally.
+ m_frameStep.samples.append(sample);
+ } else {
+ // We're ready to frame-step.
+
+ // Decrement the number of steps.
+ if (m_frameStep.steps > 0)
+ m_frameStep.steps--;
+
+ if (m_frameStep.steps > 0) {
+ // This is not the last step. Discard this sample.
+ } else if (m_frameStep.state == FrameStepWaitingStart) {
+ // This is the right frame, but the clock hasn't started yet. Put the
+ // sample on the frame-step queue. When the clock starts, the sample
+ // will be processed.
+ m_frameStep.samples.append(sample);
+ } else {
+ // This is the right frame *and* the clock has started. Deliver this sample.
+ hr = deliverSample(sample);
+ if (FAILED(hr))
+ goto done;
+
+ // Query for IUnknown so that we can identify the sample later.
+ // Per COM rules, an object always returns the same pointer when QI'ed for IUnknown.
+ hr = sample->QueryInterface(IID_PPV_ARGS(&unk));
+ if (FAILED(hr))
+ goto done;
+
+ m_frameStep.sampleNoRef = reinterpret_cast<DWORD_PTR>(unk); // No add-ref.
+
+ // NOTE: We do not AddRef the IUnknown pointer, because that would prevent the
+ // sample from invoking the OnSampleFree callback after the sample is presented.
+ // We use this IUnknown pointer purely to identify the sample later; we never
+ // attempt to dereference the pointer.
+
+ m_frameStep.state = FrameStepScheduled;
+ }
+ }
+done:
+ qt_evr_safe_release(&unk);
+ return hr;
+}
+
+HRESULT EVRCustomPresenter::trackSample(const ComPtr<IMFSample> &sample)
+{
+ IMFTrackedSample *tracked = NULL;
+
+ HRESULT hr = sample->QueryInterface(IID_PPV_ARGS(&tracked));
+
+ if (SUCCEEDED(hr))
+ hr = tracked->SetAllocator(&m_sampleFreeCB, NULL);
+
+ qt_evr_safe_release(&tracked);
+ return hr;
+}
+
+void EVRCustomPresenter::releaseResources()
+{
+ // Increment the token counter to indicate that all existing video samples
+ // are "stale." As these samples get released, we'll dispose of them.
+ //
+ // Note: The token counter is required because the samples are shared
+ // between more than one thread, and they are returned to the presenter
+ // through an asynchronous callback (onSampleFree). Without the token, we
+ // might accidentally re-use a stale sample after the ReleaseResources
+ // method returns.
+
+ m_tokenCounter++;
+
+ flush();
+
+ m_samplePool.clear();
+
+ m_presentEngine->releaseResources();
+}
+
+HRESULT EVRCustomPresenter::onSampleFree(IMFAsyncResult *result)
+{
+ IUnknown *object = NULL;
+ IMFSample *sample = NULL;
+ IUnknown *unk = NULL;
+ UINT32 token;
+
+ // Get the sample from the async result object.
+ HRESULT hr = result->GetObject(&object);
+ if (FAILED(hr))
+ goto done;
+
+ hr = object->QueryInterface(IID_PPV_ARGS(&sample));
+ if (FAILED(hr))
+ goto done;
+
+ // If this sample was submitted for a frame-step, the frame step operation
+ // is complete.
+
+ if (m_frameStep.state == FrameStepScheduled) {
+ // Query the sample for IUnknown and compare it to our cached value.
+ hr = sample->QueryInterface(IID_PPV_ARGS(&unk));
+ if (FAILED(hr))
+ goto done;
+
+ if (m_frameStep.sampleNoRef == reinterpret_cast<DWORD_PTR>(unk)) {
+ // Notify the EVR.
+ hr = completeFrameStep(sample);
+ if (FAILED(hr))
+ goto done;
+ }
+
+ // Note: Although object is also an IUnknown pointer, it is not
+ // guaranteed to be the exact pointer value returned through
+ // QueryInterface. Therefore, the second QueryInterface call is
+ // required.
+ }
+
+ m_mutex.lock();
+
+ token = MFGetAttributeUINT32(sample, MFSamplePresenter_SampleCounter, (UINT32)-1);
+
+ if (token == m_tokenCounter) {
+ // Return the sample to the sample pool.
+ m_samplePool.returnSample(sample);
+ // A free sample is available. Process more data if possible.
+ processOutputLoop();
+ }
+
+ m_mutex.unlock();
+
+done:
+ if (FAILED(hr))
+ notifyEvent(EC_ERRORABORT, hr, 0);
+ qt_evr_safe_release(&object);
+ qt_evr_safe_release(&sample);
+ qt_evr_safe_release(&unk);
+ return hr;
+}
+
+float EVRCustomPresenter::getMaxRate(bool thin)
+{
+ // Non-thinned:
+ // If we have a valid frame rate and a monitor refresh rate, the maximum
+ // playback rate is equal to the refresh rate. Otherwise, the maximum rate
+ // is unbounded (FLT_MAX).
+
+ // Thinned: The maximum rate is unbounded.
+
+ float maxRate = FLT_MAX;
+ MFRatio fps = { 0, 0 };
+ UINT monitorRateHz = 0;
+
+ if (!thin && m_mediaType) {
+ qt_evr_getFrameRate(m_mediaType.Get(), &fps);
+ monitorRateHz = m_presentEngine->refreshRate();
+
+ if (fps.Denominator && fps.Numerator && monitorRateHz) {
+ // Max Rate = Refresh Rate / Frame Rate
+ maxRate = (float)MulDiv(monitorRateHz, fps.Denominator, fps.Numerator);
+ }
+ }
+
+ return maxRate;
+}
+
+bool EVRCustomPresenter::event(QEvent *e)
+{
+ switch (int(e->type())) {
+ case StartSurface:
+ startSurface();
+ return true;
+ case StopSurface:
+ stopSurface();
+ return true;
+ case PresentSample:
+ presentSample(static_cast<PresentSampleEvent *>(e)->sample());
+ return true;
+ default:
+ break;
+ }
+ return QObject::event(e);
+}
+
+void EVRCustomPresenter::startSurface()
+{
+ if (thread() != QThread::currentThread()) {
+ QCoreApplication::postEvent(this, new QEvent(QEvent::Type(StartSurface)));
+ return;
+ }
+}
+
+void EVRCustomPresenter::stopSurface()
+{
+ if (thread() != QThread::currentThread()) {
+ QCoreApplication::postEvent(this, new QEvent(QEvent::Type(StopSurface)));
+ return;
+ }
+}
+
+void EVRCustomPresenter::presentSample(const ComPtr<IMFSample> &sample)
+{
+ if (thread() != QThread::currentThread()) {
+ QCoreApplication::postEvent(this, new PresentSampleEvent(sample));
+ return;
+ }
+
+ if (!m_videoSink || !m_presentEngine->videoSurfaceFormat().isValid())
+ return;
+
+ QVideoFrame frame = m_presentEngine->makeVideoFrame(sample);
+
+ // Since start/end times are related to a position when the clock is started,
+ // to have times from the beginning, need to adjust it by adding seeked position.
+ if (m_positionOffset) {
+ if (frame.startTime())
+ frame.setStartTime(frame.startTime() + m_positionOffset);
+ if (frame.endTime())
+ frame.setEndTime(frame.endTime() + m_positionOffset);
+ }
+
+ ComPtr<IMFMediaType> inputStreamType;
+ if (SUCCEEDED(m_mixer->GetInputCurrentType(0, inputStreamType.GetAddressOf()))) {
+ auto rotation = static_cast<MFVideoRotationFormat>(MFGetAttributeUINT32(inputStreamType.Get(), MF_MT_VIDEO_ROTATION, 0));
+ switch (rotation) {
+ case MFVideoRotationFormat_0: frame.setRotation(QtVideo::Rotation::None); break;
+ case MFVideoRotationFormat_90: frame.setRotation(QtVideo::Rotation::Clockwise90); break;
+ case MFVideoRotationFormat_180: frame.setRotation(QtVideo::Rotation::Clockwise180); break;
+ case MFVideoRotationFormat_270: frame.setRotation(QtVideo::Rotation::Clockwise270); break;
+ default: frame.setRotation(QtVideo::Rotation::None);
+ }
+ }
+
+ m_videoSink->platformVideoSink()->setVideoFrame(frame);
+}
+
+void EVRCustomPresenter::positionChanged(qint64 position)
+{
+ m_positionOffset = position * 1000;
+}
+
+HRESULT setMixerSourceRect(IMFTransform *mixer, const MFVideoNormalizedRect &sourceRect)
+{
+ if (!mixer)
+ return E_POINTER;
+
+ IMFAttributes *attributes = NULL;
+
+ HRESULT hr = mixer->GetAttributes(&attributes);
+ if (SUCCEEDED(hr)) {
+ hr = attributes->SetBlob(VIDEO_ZOOM_RECT, reinterpret_cast<const UINT8*>(&sourceRect),
+ sizeof(sourceRect));
+ attributes->Release();
+ }
+ return hr;
+}
+
+static QVideoFrameFormat::PixelFormat pixelFormatFromMediaType(IMFMediaType *type)
+{
+ GUID majorType;
+ if (FAILED(type->GetMajorType(&majorType)))
+ return QVideoFrameFormat::Format_Invalid;
+ if (majorType != MFMediaType_Video)
+ return QVideoFrameFormat::Format_Invalid;
+
+ GUID subtype;
+ if (FAILED(type->GetGUID(MF_MT_SUBTYPE, &subtype)))
+ return QVideoFrameFormat::Format_Invalid;
+
+ return QWindowsMultimediaUtils::pixelFormatFromMediaSubtype(subtype);
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/windows/evr/evrcustompresenter_p.h b/src/plugins/multimedia/windows/evr/evrcustompresenter_p.h
new file mode 100644
index 000000000..28f1cbc68
--- /dev/null
+++ b/src/plugins/multimedia/windows/evr/evrcustompresenter_p.h
@@ -0,0 +1,357 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef EVRCUSTOMPRESENTER_H
+#define EVRCUSTOMPRESENTER_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QObject>
+#include <qmutex.h>
+#include <qqueue.h>
+#include <qevent.h>
+#include <qrect.h>
+#include <qvideoframeformat.h>
+#include <qvideosink.h>
+#include <qpointer.h>
+#include <private/qcomptr_p.h>
+#include "evrhelpers_p.h"
+
+#include <d3d9.h>
+#include <dxva2api.h>
+#include <evr9.h>
+#include <evr.h>
+#include <mfidl.h>
+#include <mfapi.h>
+#include <mferror.h>
+
+QT_BEGIN_NAMESPACE
+
+class EVRCustomPresenter;
+class D3DPresentEngine;
+
+template<class T>
+class AsyncCallback : public IMFAsyncCallback
+{
+ Q_DISABLE_COPY(AsyncCallback)
+public:
+ typedef HRESULT (T::*InvokeFn)(IMFAsyncResult *asyncResult);
+
+ AsyncCallback(T *parent, InvokeFn fn) : m_parent(parent), m_invokeFn(fn)
+ {
+ }
+
+ // IUnknown
+ STDMETHODIMP QueryInterface(REFIID iid, void** ppv) override
+ {
+ if (!ppv)
+ return E_POINTER;
+
+ if (iid == __uuidof(IUnknown)) {
+ *ppv = static_cast<IUnknown*>(static_cast<IMFAsyncCallback*>(this));
+ } else if (iid == __uuidof(IMFAsyncCallback)) {
+ *ppv = static_cast<IMFAsyncCallback*>(this);
+ } else {
+ *ppv = NULL;
+ return E_NOINTERFACE;
+ }
+ AddRef();
+ return S_OK;
+ }
+
+ STDMETHODIMP_(ULONG) AddRef() override {
+ // Delegate to parent class.
+ return m_parent->AddRef();
+ }
+ STDMETHODIMP_(ULONG) Release() override {
+ // Delegate to parent class.
+ return m_parent->Release();
+ }
+
+ // IMFAsyncCallback methods
+ STDMETHODIMP GetParameters(DWORD*, DWORD*) override
+ {
+ // Implementation of this method is optional.
+ return E_NOTIMPL;
+ }
+
+ STDMETHODIMP Invoke(IMFAsyncResult* asyncResult) override
+ {
+ return (m_parent->*m_invokeFn)(asyncResult);
+ }
+
+ T *m_parent;
+ InvokeFn m_invokeFn;
+};
+
+class Scheduler
+{
+ Q_DISABLE_COPY(Scheduler)
+public:
+ enum ScheduleEvent
+ {
+ Terminate = WM_USER,
+ Schedule = WM_USER + 1,
+ Flush = WM_USER + 2
+ };
+
+ Scheduler(EVRCustomPresenter *presenter);
+ ~Scheduler();
+
+ void setFrameRate(const MFRatio &fps);
+ void setClockRate(float rate) { m_playbackRate = rate; }
+
+ HRESULT startScheduler(ComPtr<IMFClock> clock);
+ HRESULT stopScheduler();
+
+ HRESULT scheduleSample(const ComPtr<IMFSample> &sample, bool presentNow);
+ HRESULT processSamplesInQueue(LONG *nextSleep);
+ HRESULT flush();
+
+ bool areSamplesScheduled();
+
+ // ThreadProc for the scheduler thread.
+ static DWORD WINAPI schedulerThreadProc(LPVOID parameter);
+
+private:
+ DWORD schedulerThreadProcPrivate();
+ bool isSampleReadyToPresent(IMFSample *sample, LONG *pNextSleep) const;
+
+ EVRCustomPresenter *m_presenter;
+
+ QQueue<ComPtr<IMFSample>> m_scheduledSamples; // Samples waiting to be presented.
+
+ ComPtr<IMFClock> m_clock; // Presentation clock. Can be NULL.
+
+ DWORD m_threadID;
+ ThreadHandle m_schedulerThread;
+ EventHandle m_threadReadyEvent;
+ EventHandle m_flushEvent;
+
+ float m_playbackRate;
+ MFTIME m_perFrame_1_4th; // 1/4th of the frame duration.
+
+ QMutex m_mutex;
+};
+
+class SamplePool
+{
+ Q_DISABLE_COPY(SamplePool)
+public:
+ SamplePool();
+ ~SamplePool();
+
+ HRESULT initialize(QList<ComPtr<IMFSample>> &&samples);
+ HRESULT clear();
+
+ ComPtr<IMFSample> takeSample();
+ void returnSample(const ComPtr<IMFSample> &sample);
+
+private:
+ QMutex m_mutex;
+ QList<ComPtr<IMFSample>> m_videoSampleQueue;
+ bool m_initialized;
+};
+
+class EVRCustomPresenter
+ : public QObject
+ , public IMFVideoDeviceID
+ , public IMFVideoPresenter // Inherits IMFClockStateSink
+ , public IMFRateSupport
+ , public IMFGetService
+ , public IMFTopologyServiceLookupClient
+{
+ Q_DISABLE_COPY(EVRCustomPresenter)
+public:
+ // Defines the state of the presenter.
+ enum RenderState
+ {
+ RenderStarted = 1,
+ RenderStopped,
+ RenderPaused,
+ RenderShutdown // Initial state.
+ };
+
+ // Defines the presenter's state with respect to frame-stepping.
+ enum FrameStepState
+ {
+ FrameStepNone, // Not frame stepping.
+ FrameStepWaitingStart, // Frame stepping, but the clock is not started.
+ FrameStepPending, // Clock is started. Waiting for samples.
+ FrameStepScheduled, // Submitted a sample for rendering.
+ FrameStepComplete // Sample was rendered.
+ };
+
+ enum PresenterEvents
+ {
+ StartSurface = QEvent::User,
+ StopSurface = QEvent::User + 1,
+ PresentSample = QEvent::User + 2
+ };
+
+ EVRCustomPresenter(QVideoSink *sink = 0);
+ ~EVRCustomPresenter() override;
+
+ bool isValid() const;
+
+ // IUnknown methods
+ STDMETHODIMP QueryInterface(REFIID riid, void ** ppv) override;
+ STDMETHODIMP_(ULONG) AddRef() override;
+ STDMETHODIMP_(ULONG) Release() override;
+
+ // IMFGetService methods
+ STDMETHODIMP GetService(REFGUID guidService, REFIID riid, LPVOID *ppvObject) override;
+
+ // IMFVideoPresenter methods
+ STDMETHODIMP ProcessMessage(MFVP_MESSAGE_TYPE message, ULONG_PTR param) override;
+ STDMETHODIMP GetCurrentMediaType(IMFVideoMediaType** mediaType) override;
+
+ // IMFClockStateSink methods
+ STDMETHODIMP OnClockStart(MFTIME systemTime, LONGLONG clockStartOffset) override;
+ STDMETHODIMP OnClockStop(MFTIME systemTime) override;
+ STDMETHODIMP OnClockPause(MFTIME systemTime) override;
+ STDMETHODIMP OnClockRestart(MFTIME systemTime) override;
+ STDMETHODIMP OnClockSetRate(MFTIME systemTime, float rate) override;
+
+ // IMFRateSupport methods
+ STDMETHODIMP GetSlowestRate(MFRATE_DIRECTION direction, BOOL thin, float *rate) override;
+ STDMETHODIMP GetFastestRate(MFRATE_DIRECTION direction, BOOL thin, float *rate) override;
+ STDMETHODIMP IsRateSupported(BOOL thin, float rate, float *nearestSupportedRate) override;
+
+ // IMFVideoDeviceID methods
+ STDMETHODIMP GetDeviceID(IID* deviceID) override;
+
+ // IMFTopologyServiceLookupClient methods
+ STDMETHODIMP InitServicePointers(IMFTopologyServiceLookup *lookup) override;
+ STDMETHODIMP ReleaseServicePointers() override;
+
+ void supportedFormatsChanged();
+ void setSink(QVideoSink *sink);
+ void setCropRect(QRect cropRect);
+
+ void startSurface();
+ void stopSurface();
+ void presentSample(const ComPtr<IMFSample> &sample);
+
+ bool event(QEvent *) override;
+
+public Q_SLOTS:
+ void positionChanged(qint64 position);
+
+private:
+ HRESULT checkShutdown() const
+ {
+ if (m_renderState == RenderShutdown)
+ return MF_E_SHUTDOWN;
+ else
+ return S_OK;
+ }
+
+ // The "active" state is started or paused.
+ inline bool isActive() const
+ {
+ return ((m_renderState == RenderStarted) || (m_renderState == RenderPaused));
+ }
+
+ // Scrubbing occurs when the frame rate is 0.
+ inline bool isScrubbing() const { return m_playbackRate == 0.0f; }
+
+ // Send an event to the EVR through its IMediaEventSink interface.
+ void notifyEvent(long eventCode, LONG_PTR param1, LONG_PTR param2)
+ {
+ if (m_mediaEventSink)
+ m_mediaEventSink->Notify(eventCode, param1, param2);
+ }
+
+ float getMaxRate(bool thin);
+
+ // Mixer operations
+ HRESULT configureMixer(IMFTransform *mixer);
+
+ // Formats
+ HRESULT createOptimalVideoType(IMFMediaType* proposed, IMFMediaType **optimal);
+ HRESULT setMediaType(IMFMediaType *mediaType);
+ HRESULT isMediaTypeSupported(IMFMediaType *mediaType);
+
+ // Message handlers
+ HRESULT flush();
+ HRESULT renegotiateMediaType();
+ HRESULT processInputNotify();
+ HRESULT beginStreaming();
+ HRESULT endStreaming();
+ HRESULT checkEndOfStream();
+
+ // Managing samples
+ void processOutputLoop();
+ HRESULT processOutput();
+ HRESULT deliverSample(const ComPtr<IMFSample> &sample);
+ HRESULT trackSample(const ComPtr<IMFSample> &sample);
+ void releaseResources();
+
+ // Frame-stepping
+ HRESULT prepareFrameStep(DWORD steps);
+ HRESULT startFrameStep();
+ HRESULT deliverFrameStepSample(const ComPtr<IMFSample> &sample);
+ HRESULT completeFrameStep(const ComPtr<IMFSample> &sample);
+ HRESULT cancelFrameStep();
+
+ // Callback when a video sample is released.
+ HRESULT onSampleFree(IMFAsyncResult *result);
+ AsyncCallback<EVRCustomPresenter> m_sampleFreeCB;
+
+ // Holds information related to frame-stepping.
+ struct FrameStep
+ {
+ FrameStepState state = FrameStepNone;
+ QList<ComPtr<IMFSample>> samples;
+ DWORD steps = 0;
+ DWORD_PTR sampleNoRef = 0;
+ };
+
+ long m_refCount;
+
+ RenderState m_renderState;
+ FrameStep m_frameStep;
+
+ QRecursiveMutex m_mutex;
+
+ // Samples and scheduling
+ Scheduler m_scheduler; // Manages scheduling of samples.
+ SamplePool m_samplePool; // Pool of allocated samples.
+ DWORD m_tokenCounter; // Counter. Incremented whenever we create new samples.
+
+ // Rendering state
+ bool m_sampleNotify; // Did the mixer signal it has an input sample?
+ bool m_prerolled; // Have we presented at least one sample?
+ bool m_endStreaming; // Did we reach the end of the stream (EOS)?
+
+ MFVideoNormalizedRect m_sourceRect;
+ float m_playbackRate;
+
+ D3DPresentEngine *m_presentEngine; // Rendering engine. (Never null if the constructor succeeds.)
+
+ ComPtr<IMFClock> m_clock; // The EVR's clock.
+ ComPtr<IMFTransform> m_mixer; // The EVR's mixer.
+ ComPtr<IMediaEventSink> m_mediaEventSink; // The EVR's event-sink interface.
+ ComPtr<IMFMediaType> m_mediaType; // Output media type
+
+ QPointer<QVideoSink> m_videoSink;
+ bool m_canRenderToSurface;
+ qint64 m_positionOffset; // Seek position in microseconds.
+ QRect m_cropRect; // Video crop rectangle
+};
+
+bool qt_evr_setCustomPresenter(IUnknown *evr, EVRCustomPresenter *presenter);
+
+QT_END_NAMESPACE
+
+#endif // EVRCUSTOMPRESENTER_H
diff --git a/src/plugins/multimedia/windows/evr/evrd3dpresentengine.cpp b/src/plugins/multimedia/windows/evr/evrd3dpresentengine.cpp
new file mode 100644
index 000000000..517f1d969
--- /dev/null
+++ b/src/plugins/multimedia/windows/evr/evrd3dpresentengine.cpp
@@ -0,0 +1,699 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "evrd3dpresentengine_p.h"
+
+#include "evrhelpers_p.h"
+
+#include <private/qhwvideobuffer_p.h>
+#include <private/qvideoframe_p.h>
+#include <qvideoframe.h>
+#include <QDebug>
+#include <qthread.h>
+#include <qvideosink.h>
+#include <qloggingcategory.h>
+
+#include <d3d11_1.h>
+
+#include <rhi/qrhi.h>
+
+#if QT_CONFIG(opengl)
+# include <qopenglcontext.h>
+# include <qopenglfunctions.h>
+# include <qoffscreensurface.h>
+#endif
+
+QT_BEGIN_NAMESPACE
+
+static Q_LOGGING_CATEGORY(qLcEvrD3DPresentEngine, "qt.multimedia.evrd3dpresentengine");
+
+class IMFSampleVideoBuffer : public QHwVideoBuffer
+{
+public:
+ IMFSampleVideoBuffer(ComPtr<IDirect3DDevice9Ex> device, const ComPtr<IMFSample> &sample,
+ QRhi *rhi, QVideoFrame::HandleType type = QVideoFrame::NoHandle)
+ : QHwVideoBuffer(type, rhi),
+ m_device(device),
+ m_sample(sample),
+ m_mapMode(QtVideo::MapMode::NotMapped)
+ {
+ }
+
+ ~IMFSampleVideoBuffer() override
+ {
+ if (m_memSurface && m_mapMode != QtVideo::MapMode::NotMapped)
+ m_memSurface->UnlockRect();
+ }
+
+ MapData map(QtVideo::MapMode mode) override
+ {
+ if (!m_sample || m_mapMode != QtVideo::MapMode::NotMapped || mode != QtVideo::MapMode::ReadOnly)
+ return {};
+
+ D3DSURFACE_DESC desc;
+ if (m_memSurface) {
+ if (FAILED(m_memSurface->GetDesc(&desc)))
+ return {};
+
+ } else {
+ ComPtr<IMFMediaBuffer> buffer;
+ HRESULT hr = m_sample->GetBufferByIndex(0, buffer.GetAddressOf());
+ if (FAILED(hr))
+ return {};
+
+ ComPtr<IDirect3DSurface9> surface;
+ hr = MFGetService(buffer.Get(), MR_BUFFER_SERVICE, IID_IDirect3DSurface9, (void **)(surface.GetAddressOf()));
+ if (FAILED(hr))
+ return {};
+
+ if (FAILED(surface->GetDesc(&desc)))
+ return {};
+
+ if (FAILED(m_device->CreateOffscreenPlainSurface(desc.Width, desc.Height, desc.Format, D3DPOOL_SYSTEMMEM, m_memSurface.GetAddressOf(), nullptr)))
+ return {};
+
+ if (FAILED(m_device->GetRenderTargetData(surface.Get(), m_memSurface.Get()))) {
+ m_memSurface.Reset();
+ return {};
+ }
+ }
+
+ D3DLOCKED_RECT rect;
+ if (FAILED(m_memSurface->LockRect(&rect, NULL, mode == QtVideo::MapMode::ReadOnly ? D3DLOCK_READONLY : 0)))
+ return {};
+
+ m_mapMode = mode;
+
+ MapData mapData;
+ mapData.planeCount = 1;
+ mapData.bytesPerLine[0] = (int)rect.Pitch;
+ mapData.data[0] = reinterpret_cast<uchar *>(rect.pBits);
+ mapData.dataSize[0] = (int)(rect.Pitch * desc.Height);
+ return mapData;
+ }
+
+ void unmap() override
+ {
+ if (m_mapMode == QtVideo::MapMode::NotMapped)
+ return;
+
+ m_mapMode = QtVideo::MapMode::NotMapped;
+ if (m_memSurface)
+ m_memSurface->UnlockRect();
+ }
+
+protected:
+ ComPtr<IDirect3DDevice9Ex> m_device;
+ ComPtr<IMFSample> m_sample;
+
+private:
+ ComPtr<IDirect3DSurface9> m_memSurface;
+ QtVideo::MapMode m_mapMode;
+};
+
+class QVideoFrameD3D11Textures: public QVideoFrameTextures
+{
+public:
+ QVideoFrameD3D11Textures(std::unique_ptr<QRhiTexture> &&tex, ComPtr<ID3D11Texture2D> &&d3d11tex)
+ : m_tex(std::move(tex))
+ , m_d3d11tex(std::move(d3d11tex))
+ {}
+
+ QRhiTexture *texture(uint plane) const override
+ {
+ return plane == 0 ? m_tex.get() : nullptr;
+ };
+
+private:
+ std::unique_ptr<QRhiTexture> m_tex;
+ ComPtr<ID3D11Texture2D> m_d3d11tex;
+};
+
+class D3D11TextureVideoBuffer: public IMFSampleVideoBuffer
+{
+public:
+ D3D11TextureVideoBuffer(ComPtr<IDirect3DDevice9Ex> device, const ComPtr<IMFSample> &sample,
+ HANDLE sharedHandle, QRhi *rhi)
+ : IMFSampleVideoBuffer(std::move(device), sample, rhi, QVideoFrame::RhiTextureHandle)
+ , m_sharedHandle(sharedHandle)
+ {}
+
+ std::unique_ptr<QVideoFrameTextures> mapTextures(QRhi *rhi) override
+ {
+ if (!rhi || rhi->backend() != QRhi::D3D11)
+ return {};
+
+ auto nh = static_cast<const QRhiD3D11NativeHandles*>(rhi->nativeHandles());
+ if (!nh)
+ return {};
+
+ auto dev = reinterpret_cast<ID3D11Device *>(nh->dev);
+ if (!dev)
+ return {};
+
+ ComPtr<ID3D11Texture2D> d3d11tex;
+ HRESULT hr = dev->OpenSharedResource(m_sharedHandle, __uuidof(ID3D11Texture2D), (void**)(d3d11tex.GetAddressOf()));
+ if (SUCCEEDED(hr)) {
+ D3D11_TEXTURE2D_DESC desc = {};
+ d3d11tex->GetDesc(&desc);
+ QRhiTexture::Format format;
+ if (desc.Format == DXGI_FORMAT_B8G8R8A8_UNORM)
+ format = QRhiTexture::BGRA8;
+ else if (desc.Format == DXGI_FORMAT_R8G8B8A8_UNORM)
+ format = QRhiTexture::RGBA8;
+ else
+ return {};
+
+ std::unique_ptr<QRhiTexture> tex(rhi->newTexture(format, QSize{int(desc.Width), int(desc.Height)}, 1, {}));
+ tex->createFrom({quint64(d3d11tex.Get()), 0});
+ return std::make_unique<QVideoFrameD3D11Textures>(std::move(tex), std::move(d3d11tex));
+
+ } else {
+ qCDebug(qLcEvrD3DPresentEngine) << "Failed to obtain D3D11Texture2D from D3D9Texture2D handle";
+ }
+ return {};
+ }
+
+private:
+ HANDLE m_sharedHandle = nullptr;
+};
+
+#if QT_CONFIG(opengl)
+class QVideoFrameOpenGlTextures : public QVideoFrameTextures
+{
+ struct InterOpHandles {
+ GLuint textureName = 0;
+ HANDLE device = nullptr;
+ HANDLE texture = nullptr;
+ };
+
+public:
+ Q_DISABLE_COPY(QVideoFrameOpenGlTextures);
+
+ QVideoFrameOpenGlTextures(std::unique_ptr<QRhiTexture> &&tex, const WglNvDxInterop &wgl, InterOpHandles &handles)
+ : m_tex(std::move(tex))
+ , m_wgl(wgl)
+ , m_handles(handles)
+ {}
+
+ ~QVideoFrameOpenGlTextures() override {
+ if (QOpenGLContext::currentContext()) {
+ if (!m_wgl.wglDXUnlockObjectsNV(m_handles.device, 1, &m_handles.texture))
+ qCDebug(qLcEvrD3DPresentEngine) << "Failed to unlock OpenGL texture";
+
+ if (!m_wgl.wglDXUnregisterObjectNV(m_handles.device, m_handles.texture))
+ qCDebug(qLcEvrD3DPresentEngine) << "Failed to unregister OpenGL texture";
+
+ QOpenGLFunctions *funcs = QOpenGLContext::currentContext()->functions();
+ if (funcs)
+ funcs->glDeleteTextures(1, &m_handles.textureName);
+ else
+ qCDebug(qLcEvrD3DPresentEngine) << "Could not delete texture, OpenGL context functions missing";
+
+ if (!m_wgl.wglDXCloseDeviceNV(m_handles.device))
+ qCDebug(qLcEvrD3DPresentEngine) << "Failed to close D3D-GL device";
+
+ } else {
+ qCDebug(qLcEvrD3DPresentEngine) << "Could not release texture, OpenGL context missing";
+ }
+ }
+
+ static std::unique_ptr<QVideoFrameOpenGlTextures> create(const WglNvDxInterop &wgl, QRhi *rhi,
+ IDirect3DDevice9Ex *device, IDirect3DTexture9 *texture,
+ HANDLE sharedHandle)
+ {
+ if (!rhi || rhi->backend() != QRhi::OpenGLES2)
+ return {};
+
+ if (!QOpenGLContext::currentContext())
+ return {};
+
+ InterOpHandles handles = {};
+ handles.device = wgl.wglDXOpenDeviceNV(device);
+ if (!handles.device) {
+ qCDebug(qLcEvrD3DPresentEngine) << "Failed to open D3D device";
+ return {};
+ }
+
+ wgl.wglDXSetResourceShareHandleNV(texture, sharedHandle);
+
+ QOpenGLFunctions *funcs = QOpenGLContext::currentContext()->functions();
+ if (funcs) {
+ funcs->glGenTextures(1, &handles.textureName);
+ handles.texture = wgl.wglDXRegisterObjectNV(handles.device, texture, handles.textureName,
+ GL_TEXTURE_2D, WglNvDxInterop::WGL_ACCESS_READ_ONLY_NV);
+ if (handles.texture) {
+ if (wgl.wglDXLockObjectsNV(handles.device, 1, &handles.texture)) {
+ D3DSURFACE_DESC desc;
+ texture->GetLevelDesc(0, &desc);
+ QRhiTexture::Format format;
+ if (desc.Format == D3DFMT_A8R8G8B8)
+ format = QRhiTexture::BGRA8;
+ else if (desc.Format == D3DFMT_A8B8G8R8)
+ format = QRhiTexture::RGBA8;
+ else
+ return {};
+
+ std::unique_ptr<QRhiTexture> tex(rhi->newTexture(format, QSize{int(desc.Width), int(desc.Height)}, 1, {}));
+ tex->createFrom({quint64(handles.textureName), 0});
+ return std::make_unique<QVideoFrameOpenGlTextures>(std::move(tex), wgl, handles);
+ }
+
+ qCDebug(qLcEvrD3DPresentEngine) << "Failed to lock OpenGL texture";
+ wgl.wglDXUnregisterObjectNV(handles.device, handles.texture);
+ } else {
+ qCDebug(qLcEvrD3DPresentEngine) << "Could not register D3D9 texture in OpenGL";
+ }
+
+ funcs->glDeleteTextures(1, &handles.textureName);
+ } else {
+ qCDebug(qLcEvrD3DPresentEngine) << "Failed generate texture names, OpenGL context functions missing";
+ }
+ return {};
+ }
+
+ QRhiTexture *texture(uint plane) const override
+ {
+ return plane == 0 ? m_tex.get() : nullptr;
+ };
+private:
+ std::unique_ptr<QRhiTexture> m_tex;
+ WglNvDxInterop m_wgl;
+ InterOpHandles m_handles;
+};
+
+class OpenGlVideoBuffer: public IMFSampleVideoBuffer
+{
+public:
+ OpenGlVideoBuffer(ComPtr<IDirect3DDevice9Ex> device, const ComPtr<IMFSample> &sample,
+ const WglNvDxInterop &wglNvDxInterop, HANDLE sharedHandle, QRhi *rhi)
+ : IMFSampleVideoBuffer(std::move(device), sample, rhi, QVideoFrame::RhiTextureHandle)
+ , m_sharedHandle(sharedHandle)
+ , m_wgl(wglNvDxInterop)
+ {}
+
+ std::unique_ptr<QVideoFrameTextures> mapTextures(QRhi *rhi) override
+ {
+ if (!m_texture) {
+ ComPtr<IMFMediaBuffer> buffer;
+ HRESULT hr = m_sample->GetBufferByIndex(0, buffer.GetAddressOf());
+ if (FAILED(hr))
+ return {};
+
+ ComPtr<IDirect3DSurface9> surface;
+ hr = MFGetService(buffer.Get(), MR_BUFFER_SERVICE, IID_IDirect3DSurface9,
+ (void **)(surface.GetAddressOf()));
+ if (FAILED(hr))
+ return {};
+
+ hr = surface->GetContainer(IID_IDirect3DTexture9, (void **)m_texture.GetAddressOf());
+ if (FAILED(hr))
+ return {};
+ }
+
+ return QVideoFrameOpenGlTextures::create(m_wgl, rhi, m_device.Get(), m_texture.Get(), m_sharedHandle);
+ }
+
+private:
+ HANDLE m_sharedHandle = nullptr;
+ WglNvDxInterop m_wgl;
+ ComPtr<IDirect3DTexture9> m_texture;
+};
+#endif
+
+D3DPresentEngine::D3DPresentEngine(QVideoSink *sink)
+ : m_deviceResetToken(0)
+{
+ ZeroMemory(&m_displayMode, sizeof(m_displayMode));
+ setSink(sink);
+}
+
+D3DPresentEngine::~D3DPresentEngine()
+{
+ releaseResources();
+}
+
+void D3DPresentEngine::setSink(QVideoSink *sink)
+{
+ if (sink == m_sink)
+ return;
+
+ m_sink = sink;
+
+ releaseResources();
+ m_device.Reset();
+ m_devices.Reset();
+ m_D3D9.Reset();
+
+ if (!m_sink)
+ return;
+
+ HRESULT hr = initializeD3D();
+
+ if (SUCCEEDED(hr)) {
+ hr = createD3DDevice();
+ if (FAILED(hr))
+ qWarning("Failed to create D3D device");
+ } else {
+ qWarning("Failed to initialize D3D");
+ }
+}
+
+HRESULT D3DPresentEngine::initializeD3D()
+{
+ HRESULT hr = Direct3DCreate9Ex(D3D_SDK_VERSION, m_D3D9.GetAddressOf());
+
+ if (SUCCEEDED(hr))
+ hr = DXVA2CreateDirect3DDeviceManager9(&m_deviceResetToken, m_devices.GetAddressOf());
+
+ return hr;
+}
+
+static bool findD3D11AdapterID(QRhi &rhi, IDirect3D9Ex *D3D9, UINT &adapterID)
+{
+ auto nh = static_cast<const QRhiD3D11NativeHandles*>(rhi.nativeHandles());
+ if (D3D9 && nh) {
+ for (auto i = 0u; i < D3D9->GetAdapterCount(); ++i) {
+ LUID luid = {};
+ D3D9->GetAdapterLUID(i, &luid);
+ if (luid.LowPart == nh->adapterLuidLow && luid.HighPart == nh->adapterLuidHigh) {
+ adapterID = i;
+ return true;
+ }
+ }
+ }
+
+ return false;
+}
+
+#if QT_CONFIG(opengl)
+template <typename T>
+static bool getProc(const QOpenGLContext *ctx, T &fn, const char *fName)
+{
+ fn = reinterpret_cast<T>(ctx->getProcAddress(fName));
+ return fn != nullptr;
+}
+
+static bool readWglNvDxInteropProc(WglNvDxInterop &f)
+{
+ QScopedPointer<QOffscreenSurface> surface(new QOffscreenSurface);
+ surface->create();
+ QScopedPointer<QOpenGLContext> ctx(new QOpenGLContext);
+ ctx->create();
+ ctx->makeCurrent(surface.get());
+
+ auto wglGetExtensionsStringARB = reinterpret_cast<const char* (WINAPI* )(HDC)>
+ (ctx->getProcAddress("wglGetExtensionsStringARB"));
+ if (!wglGetExtensionsStringARB) {
+ qCDebug(qLcEvrD3DPresentEngine) << "WGL extensions missing (no wglGetExtensionsStringARB function)";
+ return false;
+ }
+
+ HWND hwnd = ::GetShellWindow();
+ auto dc = ::GetDC(hwnd);
+
+ const char *wglExtString = wglGetExtensionsStringARB(dc);
+ if (!wglExtString)
+ qCDebug(qLcEvrD3DPresentEngine) << "WGL extensions missing (wglGetExtensionsStringARB returned null)";
+
+ bool hasExtension = wglExtString && strstr(wglExtString, "WGL_NV_DX_interop");
+ ReleaseDC(hwnd, dc);
+ if (!hasExtension) {
+ qCDebug(qLcEvrD3DPresentEngine) << "WGL_NV_DX_interop missing";
+ return false;
+ }
+
+ return getProc(ctx.get(), f.wglDXOpenDeviceNV, "wglDXOpenDeviceNV")
+ && getProc(ctx.get(), f.wglDXCloseDeviceNV, "wglDXCloseDeviceNV")
+ && getProc(ctx.get(), f.wglDXSetResourceShareHandleNV, "wglDXSetResourceShareHandleNV")
+ && getProc(ctx.get(), f.wglDXRegisterObjectNV, "wglDXRegisterObjectNV")
+ && getProc(ctx.get(), f.wglDXUnregisterObjectNV, "wglDXUnregisterObjectNV")
+ && getProc(ctx.get(), f.wglDXLockObjectsNV, "wglDXLockObjectsNV")
+ && getProc(ctx.get(), f.wglDXUnlockObjectsNV, "wglDXUnlockObjectsNV");
+}
+#endif
+
+namespace {
+
+bool hwTextureRenderingEnabled() {
+ // add possibility for an user to opt-out HW video rendering
+ // using the same env. variable as for FFmpeg backend
+ static bool isDisableConversionSet = false;
+ static const int disableHwConversion = qEnvironmentVariableIntValue(
+ "QT_DISABLE_HW_TEXTURES_CONVERSION", &isDisableConversionSet);
+
+ return !isDisableConversionSet || !disableHwConversion;
+}
+
+}
+
+HRESULT D3DPresentEngine::createD3DDevice()
+{
+ if (!m_D3D9 || !m_devices)
+ return MF_E_NOT_INITIALIZED;
+
+ m_useTextureRendering = false;
+ UINT adapterID = 0;
+
+ if (hwTextureRenderingEnabled()) {
+ QRhi *rhi = m_sink ? m_sink->rhi() : nullptr;
+ if (rhi) {
+ if (rhi->backend() == QRhi::D3D11) {
+ m_useTextureRendering = findD3D11AdapterID(*rhi, m_D3D9.Get(), adapterID);
+#if QT_CONFIG(opengl)
+ } else if (rhi->backend() == QRhi::OpenGLES2) {
+ m_useTextureRendering = readWglNvDxInteropProc(m_wglNvDxInterop);
+#endif
+ } else {
+ qCDebug(qLcEvrD3DPresentEngine) << "Not supported RHI backend type";
+ }
+ } else {
+ qCDebug(qLcEvrD3DPresentEngine) << "No RHI associated with this sink";
+ }
+
+ if (!m_useTextureRendering)
+ qCDebug(qLcEvrD3DPresentEngine) << "Could not find compatible RHI adapter, zero copy disabled";
+ }
+
+ D3DCAPS9 ddCaps;
+ ZeroMemory(&ddCaps, sizeof(ddCaps));
+
+ HRESULT hr = m_D3D9->GetDeviceCaps(adapterID, D3DDEVTYPE_HAL, &ddCaps);
+ if (FAILED(hr))
+ return hr;
+
+ DWORD vp = 0;
+ if (ddCaps.DevCaps & D3DDEVCAPS_HWTRANSFORMANDLIGHT)
+ vp = D3DCREATE_HARDWARE_VERTEXPROCESSING;
+ else
+ vp = D3DCREATE_SOFTWARE_VERTEXPROCESSING;
+
+ D3DPRESENT_PARAMETERS pp;
+ ZeroMemory(&pp, sizeof(pp));
+
+ pp.BackBufferWidth = 1;
+ pp.BackBufferHeight = 1;
+ pp.BackBufferCount = 1;
+ pp.Windowed = TRUE;
+ pp.SwapEffect = D3DSWAPEFFECT_DISCARD;
+ pp.BackBufferFormat = D3DFMT_UNKNOWN;
+ pp.hDeviceWindow = nullptr;
+ pp.Flags = D3DPRESENTFLAG_VIDEO;
+ pp.PresentationInterval = D3DPRESENT_INTERVAL_DEFAULT;
+
+ ComPtr<IDirect3DDevice9Ex> device;
+
+ hr = m_D3D9->CreateDeviceEx(
+ adapterID,
+ D3DDEVTYPE_HAL,
+ pp.hDeviceWindow,
+ vp | D3DCREATE_NOWINDOWCHANGES | D3DCREATE_MULTITHREADED | D3DCREATE_FPU_PRESERVE,
+ &pp,
+ NULL,
+ device.GetAddressOf()
+ );
+ if (FAILED(hr))
+ return hr;
+
+ hr = m_D3D9->GetAdapterDisplayMode(adapterID, &m_displayMode);
+ if (FAILED(hr))
+ return hr;
+
+ hr = m_devices->ResetDevice(device.Get(), m_deviceResetToken);
+ if (FAILED(hr))
+ return hr;
+
+ m_device = device;
+ return hr;
+}
+
+bool D3DPresentEngine::isValid() const
+{
+ return m_device.Get() != nullptr;
+}
+
+void D3DPresentEngine::releaseResources()
+{
+ m_surfaceFormat = QVideoFrameFormat();
+}
+
+HRESULT D3DPresentEngine::getService(REFGUID, REFIID riid, void** ppv)
+{
+ HRESULT hr = S_OK;
+
+ if (riid == __uuidof(IDirect3DDeviceManager9)) {
+ if (!m_devices) {
+ hr = MF_E_UNSUPPORTED_SERVICE;
+ } else {
+ *ppv = m_devices.Get();
+ m_devices->AddRef();
+ }
+ } else {
+ hr = MF_E_UNSUPPORTED_SERVICE;
+ }
+
+ return hr;
+}
+
+HRESULT D3DPresentEngine::checkFormat(D3DFORMAT format)
+{
+ if (!m_D3D9 || !m_device)
+ return E_FAIL;
+
+ HRESULT hr = S_OK;
+
+ D3DDISPLAYMODE mode;
+ D3DDEVICE_CREATION_PARAMETERS params;
+
+ hr = m_device->GetCreationParameters(&params);
+ if (FAILED(hr))
+ return hr;
+
+ UINT uAdapter = params.AdapterOrdinal;
+ D3DDEVTYPE type = params.DeviceType;
+
+ hr = m_D3D9->GetAdapterDisplayMode(uAdapter, &mode);
+ if (FAILED(hr))
+ return hr;
+
+ hr = m_D3D9->CheckDeviceFormat(uAdapter, type, mode.Format,
+ D3DUSAGE_RENDERTARGET,
+ D3DRTYPE_SURFACE,
+ format);
+ if (FAILED(hr))
+ return hr;
+
+ bool ok = format == D3DFMT_X8R8G8B8
+ || format == D3DFMT_A8R8G8B8
+ || format == D3DFMT_X8B8G8R8
+ || format == D3DFMT_A8B8G8R8;
+
+ return ok ? S_OK : D3DERR_NOTAVAILABLE;
+}
+
+HRESULT D3DPresentEngine::createVideoSamples(IMFMediaType *format,
+ QList<ComPtr<IMFSample>> &videoSampleQueue,
+ QSize frameSize)
+{
+ if (!format || !m_device)
+ return MF_E_UNEXPECTED;
+
+ HRESULT hr = S_OK;
+ releaseResources();
+
+ UINT32 width = 0, height = 0;
+ hr = MFGetAttributeSize(format, MF_MT_FRAME_SIZE, &width, &height);
+ if (FAILED(hr))
+ return hr;
+
+ if (frameSize.isValid() && !frameSize.isEmpty()) {
+ width = frameSize.width();
+ height = frameSize.height();
+ }
+
+ DWORD d3dFormat = 0;
+ hr = qt_evr_getFourCC(format, &d3dFormat);
+ if (FAILED(hr))
+ return hr;
+
+ // FIXME: RHI defines only RGBA, thus add the alpha channel to the selected format
+ if (d3dFormat == D3DFMT_X8R8G8B8)
+ d3dFormat = D3DFMT_A8R8G8B8;
+ else if (d3dFormat == D3DFMT_X8B8G8R8)
+ d3dFormat = D3DFMT_A8B8G8R8;
+
+ for (int i = 0; i < PRESENTER_BUFFER_COUNT; i++) {
+ // texture ref cnt is increased by GetSurfaceLevel()/MFCreateVideoSampleFromSurface()
+ // below, so it will be destroyed only when the sample pool is released.
+ ComPtr<IDirect3DTexture9> texture;
+ HANDLE sharedHandle = nullptr;
+ hr = m_device->CreateTexture(width, height, 1, D3DUSAGE_RENDERTARGET, (D3DFORMAT)d3dFormat, D3DPOOL_DEFAULT, texture.GetAddressOf(), &sharedHandle);
+ if (FAILED(hr))
+ break;
+
+ ComPtr<IDirect3DSurface9> surface;
+ hr = texture->GetSurfaceLevel(0, surface.GetAddressOf());
+ if (FAILED(hr))
+ break;
+
+ ComPtr<IMFSample> videoSample;
+ hr = MFCreateVideoSampleFromSurface(surface.Get(), videoSample.GetAddressOf());
+ if (FAILED(hr))
+ break;
+
+ m_sampleTextureHandle[i] = {videoSample.Get(), sharedHandle};
+ videoSampleQueue.append(videoSample);
+ }
+
+ if (SUCCEEDED(hr)) {
+ m_surfaceFormat = QVideoFrameFormat(QSize(width, height), qt_evr_pixelFormatFromD3DFormat(d3dFormat));
+ } else {
+ releaseResources();
+ }
+
+ return hr;
+}
+
+QVideoFrame D3DPresentEngine::makeVideoFrame(const ComPtr<IMFSample> &sample)
+{
+ if (!sample)
+ return {};
+
+ HANDLE sharedHandle = nullptr;
+ for (const auto &p : m_sampleTextureHandle)
+ if (p.first == sample.Get())
+ sharedHandle = p.second;
+
+ std::unique_ptr<IMFSampleVideoBuffer> vb;
+ QRhi *rhi = m_sink ? m_sink->rhi() : nullptr;
+ if (m_useTextureRendering && sharedHandle && rhi) {
+ if (rhi->backend() == QRhi::D3D11) {
+ vb = std::make_unique<D3D11TextureVideoBuffer>(m_device, sample, sharedHandle, rhi);
+#if QT_CONFIG(opengl)
+ } else if (rhi->backend() == QRhi::OpenGLES2) {
+ vb = std::make_unique<OpenGlVideoBuffer>(m_device, sample, m_wglNvDxInterop,
+ sharedHandle, rhi);
+#endif
+ }
+ }
+
+ if (!vb)
+ vb = std::make_unique<IMFSampleVideoBuffer>(m_device, sample, rhi);
+
+ QVideoFrame frame = QVideoFramePrivate::createFrame(std::move(vb), m_surfaceFormat);
+
+ // WMF uses 100-nanosecond units, Qt uses microseconds
+ LONGLONG startTime = 0;
+ auto hr = sample->GetSampleTime(&startTime);
+ if (SUCCEEDED(hr)) {
+ frame.setStartTime(startTime / 10);
+
+ LONGLONG duration = -1;
+ if (SUCCEEDED(sample->GetSampleDuration(&duration)))
+ frame.setEndTime((startTime + duration) / 10);
+ }
+
+ return frame;
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/windows/evr/evrd3dpresentengine_p.h b/src/plugins/multimedia/windows/evr/evrd3dpresentengine_p.h
new file mode 100644
index 000000000..93aa90b71
--- /dev/null
+++ b/src/plugins/multimedia/windows/evr/evrd3dpresentengine_p.h
@@ -0,0 +1,153 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef EVRD3DPRESENTENGINE_H
+#define EVRD3DPRESENTENGINE_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QMutex>
+#include <QSize>
+#include <QVideoFrameFormat>
+#include <private/qcomptr_p.h>
+#include <qpointer.h>
+
+#include <d3d9.h>
+
+struct IDirect3D9Ex;
+struct IDirect3DDevice9Ex;
+struct IDirect3DDeviceManager9;
+struct IDirect3DSurface9;
+struct IDirect3DTexture9;
+struct IMFSample;
+struct IMFMediaType;
+
+QT_BEGIN_NAMESPACE
+class QVideoFrame;
+class QVideoSink;
+QT_END_NAMESPACE
+
+// Randomly generated GUIDs
+static const GUID MFSamplePresenter_SampleCounter =
+{ 0xb0bb83cc, 0xf10f, 0x4e2e, { 0xaa, 0x2b, 0x29, 0xea, 0x5e, 0x92, 0xef, 0x85 } };
+
+#if QT_CONFIG(opengl)
+# include <qopengl.h>
+#endif
+
+QT_BEGIN_NAMESPACE
+
+#ifdef MAYBE_ANGLE
+
+class OpenGLResources;
+
+class EGLWrapper
+{
+ Q_DISABLE_COPY(EGLWrapper)
+public:
+ EGLWrapper();
+
+ __eglMustCastToProperFunctionPointerType getProcAddress(const char *procname);
+ EGLSurface createPbufferSurface(EGLDisplay dpy, EGLConfig config, const EGLint *attrib_list);
+ EGLBoolean destroySurface(EGLDisplay dpy, EGLSurface surface);
+ EGLBoolean bindTexImage(EGLDisplay dpy, EGLSurface surface, EGLint buffer);
+ EGLBoolean releaseTexImage(EGLDisplay dpy, EGLSurface surface, EGLint buffer);
+
+private:
+ typedef __eglMustCastToProperFunctionPointerType (EGLAPIENTRYP EglGetProcAddress)(const char *procname);
+ typedef EGLSurface (EGLAPIENTRYP EglCreatePbufferSurface)(EGLDisplay dpy, EGLConfig config, const EGLint *attrib_list);
+ typedef EGLBoolean (EGLAPIENTRYP EglDestroySurface)(EGLDisplay dpy, EGLSurface surface);
+ typedef EGLBoolean (EGLAPIENTRYP EglBindTexImage)(EGLDisplay dpy, EGLSurface surface, EGLint buffer);
+ typedef EGLBoolean (EGLAPIENTRYP EglReleaseTexImage)(EGLDisplay dpy, EGLSurface surface, EGLint buffer);
+
+ EglGetProcAddress m_eglGetProcAddress;
+ EglCreatePbufferSurface m_eglCreatePbufferSurface;
+ EglDestroySurface m_eglDestroySurface;
+ EglBindTexImage m_eglBindTexImage;
+ EglReleaseTexImage m_eglReleaseTexImage;
+};
+
+#endif // MAYBE_ANGLE
+
+#if QT_CONFIG(opengl)
+
+struct WglNvDxInterop {
+ HANDLE (WINAPI* wglDXOpenDeviceNV) (void* dxDevice);
+ BOOL (WINAPI* wglDXCloseDeviceNV) (HANDLE hDevice);
+ HANDLE (WINAPI* wglDXRegisterObjectNV) (HANDLE hDevice, void *dxObject, GLuint name, GLenum type, GLenum access);
+ BOOL (WINAPI* wglDXSetResourceShareHandleNV) (void *dxResource, HANDLE shareHandle);
+ BOOL (WINAPI* wglDXLockObjectsNV) (HANDLE hDevice, GLint count, HANDLE *hObjects);
+ BOOL (WINAPI* wglDXUnlockObjectsNV) (HANDLE hDevice, GLint count, HANDLE *hObjects);
+ BOOL (WINAPI* wglDXUnregisterObjectNV) (HANDLE hDevice, HANDLE hObject);
+
+ static const int WGL_ACCESS_READ_ONLY_NV = 0;
+};
+
+#endif
+
+class D3DPresentEngine
+{
+ Q_DISABLE_COPY(D3DPresentEngine)
+public:
+ D3DPresentEngine(QVideoSink *sink);
+ virtual ~D3DPresentEngine();
+
+ bool isValid() const;
+
+ HRESULT getService(REFGUID guidService, REFIID riid, void** ppv);
+ HRESULT checkFormat(D3DFORMAT format);
+ UINT refreshRate() const { return m_displayMode.RefreshRate; }
+
+ HRESULT createVideoSamples(IMFMediaType *format, QList<ComPtr<IMFSample>> &videoSampleQueue,
+ QSize frameSize);
+ QVideoFrameFormat videoSurfaceFormat() const { return m_surfaceFormat; }
+ QVideoFrame makeVideoFrame(const ComPtr<IMFSample> &sample);
+
+ void releaseResources();
+ void setSink(QVideoSink *sink);
+
+private:
+ static const int PRESENTER_BUFFER_COUNT = 3;
+
+ HRESULT initializeD3D();
+ HRESULT createD3DDevice();
+
+ std::pair<IMFSample *, HANDLE> m_sampleTextureHandle[PRESENTER_BUFFER_COUNT] = {};
+
+ UINT m_deviceResetToken;
+ D3DDISPLAYMODE m_displayMode;
+
+ ComPtr<IDirect3D9Ex> m_D3D9;
+ ComPtr<IDirect3DDevice9Ex> m_device;
+ ComPtr<IDirect3DDeviceManager9> m_devices;
+
+ QVideoFrameFormat m_surfaceFormat;
+
+ QPointer<QVideoSink> m_sink;
+ bool m_useTextureRendering = false;
+#if QT_CONFIG(opengl)
+ WglNvDxInterop m_wglNvDxInterop;
+#endif
+
+#ifdef MAYBE_ANGLE
+ unsigned int updateTexture(IDirect3DSurface9 *src);
+
+ OpenGLResources *m_glResources;
+ IDirect3DTexture9 *m_texture;
+#endif
+
+ friend class IMFSampleVideoBuffer;
+};
+
+QT_END_NAMESPACE
+
+#endif // EVRD3DPRESENTENGINE_H
diff --git a/src/plugins/multimedia/windows/evr/evrhelpers.cpp b/src/plugins/multimedia/windows/evr/evrhelpers.cpp
new file mode 100644
index 000000000..bf4347c69
--- /dev/null
+++ b/src/plugins/multimedia/windows/evr/evrhelpers.cpp
@@ -0,0 +1,140 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "evrhelpers_p.h"
+
+#ifndef D3DFMT_YV12
+#define D3DFMT_YV12 (D3DFORMAT)MAKEFOURCC ('Y', 'V', '1', '2')
+#endif
+#ifndef D3DFMT_NV12
+#define D3DFMT_NV12 (D3DFORMAT)MAKEFOURCC ('N', 'V', '1', '2')
+#endif
+
+QT_BEGIN_NAMESPACE
+
+HRESULT qt_evr_getFourCC(IMFMediaType *type, DWORD *fourCC)
+{
+ if (!fourCC)
+ return E_POINTER;
+
+ HRESULT hr = S_OK;
+ GUID guidSubType = GUID_NULL;
+
+ if (SUCCEEDED(hr))
+ hr = type->GetGUID(MF_MT_SUBTYPE, &guidSubType);
+
+ if (SUCCEEDED(hr))
+ *fourCC = guidSubType.Data1;
+
+ return hr;
+}
+
+bool qt_evr_areMediaTypesEqual(IMFMediaType *type1, IMFMediaType *type2)
+{
+ if (!type1 && !type2)
+ return true;
+ if (!type1 || !type2)
+ return false;
+
+ DWORD dwFlags = 0;
+ HRESULT hr = type1->IsEqual(type2, &dwFlags);
+
+ return (hr == S_OK);
+}
+
+HRESULT qt_evr_validateVideoArea(const MFVideoArea& area, UINT32 width, UINT32 height)
+{
+ float fOffsetX = qt_evr_MFOffsetToFloat(area.OffsetX);
+ float fOffsetY = qt_evr_MFOffsetToFloat(area.OffsetY);
+
+ if ( ((LONG)fOffsetX + area.Area.cx > (LONG)width) ||
+ ((LONG)fOffsetY + area.Area.cy > (LONG)height) ) {
+ return MF_E_INVALIDMEDIATYPE;
+ }
+ return S_OK;
+}
+
+bool qt_evr_isSampleTimePassed(IMFClock *clock, IMFSample *sample)
+{
+ if (!sample || !clock)
+ return false;
+
+ HRESULT hr = S_OK;
+ MFTIME hnsTimeNow = 0;
+ MFTIME hnsSystemTime = 0;
+ MFTIME hnsSampleStart = 0;
+ MFTIME hnsSampleDuration = 0;
+
+ hr = clock->GetCorrelatedTime(0, &hnsTimeNow, &hnsSystemTime);
+
+ if (SUCCEEDED(hr))
+ hr = sample->GetSampleTime(&hnsSampleStart);
+
+ if (SUCCEEDED(hr))
+ hr = sample->GetSampleDuration(&hnsSampleDuration);
+
+ if (SUCCEEDED(hr)) {
+ if (hnsSampleStart + hnsSampleDuration < hnsTimeNow)
+ return true;
+ }
+
+ return false;
+}
+
+QVideoFrameFormat::PixelFormat qt_evr_pixelFormatFromD3DFormat(DWORD format)
+{
+ switch (format) {
+ case D3DFMT_A8R8G8B8:
+ return QVideoFrameFormat::Format_BGRA8888;
+ case D3DFMT_X8R8G8B8:
+ return QVideoFrameFormat::Format_BGRX8888;
+ case D3DFMT_A8:
+ return QVideoFrameFormat::Format_Y8;
+ case D3DFMT_A8B8G8R8:
+ return QVideoFrameFormat::Format_RGBA8888;
+ case D3DFMT_X8B8G8R8:
+ return QVideoFrameFormat::Format_RGBX8888;
+ case D3DFMT_UYVY:
+ return QVideoFrameFormat::Format_UYVY;
+ case D3DFMT_YUY2:
+ return QVideoFrameFormat::Format_YUYV;
+ case D3DFMT_NV12:
+ return QVideoFrameFormat::Format_NV12;
+ case D3DFMT_YV12:
+ return QVideoFrameFormat::Format_YV12;
+ case D3DFMT_UNKNOWN:
+ default:
+ return QVideoFrameFormat::Format_Invalid;
+ }
+}
+
+D3DFORMAT qt_evr_D3DFormatFromPixelFormat(QVideoFrameFormat::PixelFormat format)
+{
+ switch (format) {
+ case QVideoFrameFormat::Format_ARGB8888:
+ return D3DFMT_A8B8G8R8;
+ case QVideoFrameFormat::Format_BGRA8888:
+ return D3DFMT_A8R8G8B8;
+ case QVideoFrameFormat::Format_BGRX8888:
+ return D3DFMT_X8R8G8B8;
+ case QVideoFrameFormat::Format_Y8:
+ return D3DFMT_A8;
+ case QVideoFrameFormat::Format_RGBA8888:
+ return D3DFMT_A8B8G8R8;
+ case QVideoFrameFormat::Format_RGBX8888:
+ return D3DFMT_X8B8G8R8;
+ case QVideoFrameFormat::Format_UYVY:
+ return D3DFMT_UYVY;
+ case QVideoFrameFormat::Format_YUYV:
+ return D3DFMT_YUY2;
+ case QVideoFrameFormat::Format_NV12:
+ return D3DFMT_NV12;
+ case QVideoFrameFormat::Format_YV12:
+ return D3DFMT_YV12;
+ case QVideoFrameFormat::Format_Invalid:
+ default:
+ return D3DFMT_UNKNOWN;
+ }
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/windows/evr/evrhelpers_p.h b/src/plugins/multimedia/windows/evr/evrhelpers_p.h
new file mode 100644
index 000000000..30779c835
--- /dev/null
+++ b/src/plugins/multimedia/windows/evr/evrhelpers_p.h
@@ -0,0 +1,93 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef EVRHELPERS_H
+#define EVRHELPERS_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <qvideoframe.h>
+#include <d3d9.h>
+#include <dxva2api.h>
+#include <evr9.h>
+#include <evr.h>
+#include <mfidl.h>
+#include <mfapi.h>
+#include <mferror.h>
+#include <private/quniquehandle_p.h>
+
+QT_BEGIN_NAMESPACE
+
+template<class T>
+static inline void qt_evr_safe_release(T **unk)
+{
+ if (*unk) {
+ (*unk)->Release();
+ *unk = NULL;
+ }
+}
+
+HRESULT qt_evr_getFourCC(IMFMediaType *type, DWORD *fourCC);
+
+bool qt_evr_areMediaTypesEqual(IMFMediaType *type1, IMFMediaType *type2);
+
+HRESULT qt_evr_validateVideoArea(const MFVideoArea& area, UINT32 width, UINT32 height);
+
+bool qt_evr_isSampleTimePassed(IMFClock *clock, IMFSample *sample);
+
+inline float qt_evr_MFOffsetToFloat(const MFOffset& offset)
+{
+ return offset.value + (float(offset.fract) / 65536);
+}
+
+inline MFOffset qt_evr_makeMFOffset(float v)
+{
+ MFOffset offset;
+ offset.value = short(v);
+ offset.fract = WORD(65536 * (v-offset.value));
+ return offset;
+}
+
+inline MFVideoArea qt_evr_makeMFArea(float x, float y, DWORD width, DWORD height)
+{
+ MFVideoArea area;
+ area.OffsetX = qt_evr_makeMFOffset(x);
+ area.OffsetY = qt_evr_makeMFOffset(y);
+ area.Area.cx = width;
+ area.Area.cy = height;
+ return area;
+}
+
+inline HRESULT qt_evr_getFrameRate(IMFMediaType *pType, MFRatio *pRatio)
+{
+ return MFGetAttributeRatio(pType, MF_MT_FRAME_RATE,
+ reinterpret_cast<UINT32*>(&pRatio->Numerator),
+ reinterpret_cast<UINT32*>(&pRatio->Denominator));
+}
+
+QVideoFrameFormat::PixelFormat qt_evr_pixelFormatFromD3DFormat(DWORD format);
+D3DFORMAT qt_evr_D3DFormatFromPixelFormat(QVideoFrameFormat::PixelFormat format);
+
+struct NullHandleTraits
+{
+ using Type = HANDLE;
+ static Type invalidValue() { return nullptr; }
+ static bool close(Type handle) { return CloseHandle(handle) != 0; }
+};
+
+using EventHandle = QUniqueHandle<NullHandleTraits>;
+using ThreadHandle = QUniqueHandle<NullHandleTraits>;
+
+QT_END_NAMESPACE
+
+#endif // EVRHELPERS_H
+
diff --git a/src/plugins/multimedia/windows/evr/evrvideowindowcontrol.cpp b/src/plugins/multimedia/windows/evr/evrvideowindowcontrol.cpp
new file mode 100644
index 000000000..854c9ddb2
--- /dev/null
+++ b/src/plugins/multimedia/windows/evr/evrvideowindowcontrol.cpp
@@ -0,0 +1,228 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "evrvideowindowcontrol_p.h"
+
+QT_BEGIN_NAMESPACE
+
+EvrVideoWindowControl::EvrVideoWindowControl(QVideoSink *parent)
+ : QPlatformVideoSink(parent)
+ , m_windowId(0)
+ , m_windowColor(RGB(0, 0, 0))
+ , m_dirtyValues(0)
+ , m_aspectRatioMode(Qt::KeepAspectRatio)
+ , m_brightness(0)
+ , m_contrast(0)
+ , m_hue(0)
+ , m_saturation(0)
+ , m_fullScreen(false)
+ , m_displayControl(0)
+ , m_processor(0)
+{
+}
+
+EvrVideoWindowControl::~EvrVideoWindowControl()
+{
+ clear();
+}
+
+bool EvrVideoWindowControl::setEvr(IUnknown *evr)
+{
+ clear();
+
+ if (!evr)
+ return true;
+
+ IMFGetService *service = NULL;
+
+ if (SUCCEEDED(evr->QueryInterface(IID_PPV_ARGS(&service)))
+ && SUCCEEDED(service->GetService(MR_VIDEO_RENDER_SERVICE, IID_PPV_ARGS(&m_displayControl)))) {
+
+ service->GetService(MR_VIDEO_MIXER_SERVICE, IID_PPV_ARGS(&m_processor));
+
+ setWinId(m_windowId);
+ setDisplayRect(m_displayRect);
+ setAspectRatioMode(m_aspectRatioMode);
+ m_dirtyValues = DXVA2_ProcAmp_Brightness | DXVA2_ProcAmp_Contrast | DXVA2_ProcAmp_Hue | DXVA2_ProcAmp_Saturation;
+ applyImageControls();
+ }
+
+ if (service)
+ service->Release();
+
+ return m_displayControl != NULL;
+}
+
+void EvrVideoWindowControl::clear()
+{
+ if (m_displayControl)
+ m_displayControl->Release();
+ m_displayControl = NULL;
+
+ if (m_processor)
+ m_processor->Release();
+ m_processor = NULL;
+}
+
+void EvrVideoWindowControl::setWinId(WId id)
+{
+ m_windowId = id;
+
+ if (m_displayControl)
+ m_displayControl->SetVideoWindow(HWND(m_windowId));
+}
+
+void EvrVideoWindowControl::setDisplayRect(const QRect &rect)
+{
+ m_displayRect = rect;
+
+ if (m_displayControl) {
+ RECT displayRect = { rect.left(), rect.top(), rect.right() + 1, rect.bottom() + 1 };
+ QSize sourceSize = nativeSize();
+
+ RECT sourceRect = { 0, 0, sourceSize.width(), sourceSize.height() };
+
+ if (m_aspectRatioMode == Qt::KeepAspectRatioByExpanding) {
+ QSize clippedSize = rect.size();
+ clippedSize.scale(sourceRect.right, sourceRect.bottom, Qt::KeepAspectRatio);
+
+ sourceRect.left = (sourceRect.right - clippedSize.width()) / 2;
+ sourceRect.top = (sourceRect.bottom - clippedSize.height()) / 2;
+ sourceRect.right = sourceRect.left + clippedSize.width();
+ sourceRect.bottom = sourceRect.top + clippedSize.height();
+ }
+
+ if (sourceSize.width() > 0 && sourceSize.height() > 0) {
+ MFVideoNormalizedRect sourceNormRect;
+ sourceNormRect.left = float(sourceRect.left) / float(sourceRect.right);
+ sourceNormRect.top = float(sourceRect.top) / float(sourceRect.bottom);
+ sourceNormRect.right = float(sourceRect.right) / float(sourceRect.right);
+ sourceNormRect.bottom = float(sourceRect.bottom) / float(sourceRect.bottom);
+ m_displayControl->SetVideoPosition(&sourceNormRect, &displayRect);
+ } else {
+ m_displayControl->SetVideoPosition(NULL, &displayRect);
+ }
+ }
+}
+
+void EvrVideoWindowControl::setFullScreen(bool fullScreen)
+{
+ if (m_fullScreen == fullScreen)
+ return;
+}
+
+void EvrVideoWindowControl::setAspectRatioMode(Qt::AspectRatioMode mode)
+{
+ m_aspectRatioMode = mode;
+
+ if (m_displayControl) {
+ switch (mode) {
+ case Qt::IgnoreAspectRatio:
+ //comment from MSDN: Do not maintain the aspect ratio of the video. Stretch the video to fit the output rectangle.
+ m_displayControl->SetAspectRatioMode(MFVideoARMode_None);
+ break;
+ case Qt::KeepAspectRatio:
+ //comment from MSDN: Preserve the aspect ratio of the video by letterboxing or within the output rectangle.
+ m_displayControl->SetAspectRatioMode(MFVideoARMode_PreservePicture);
+ break;
+ case Qt::KeepAspectRatioByExpanding:
+ //for this mode, more adjustment will be done in setDisplayRect
+ m_displayControl->SetAspectRatioMode(MFVideoARMode_PreservePicture);
+ break;
+ default:
+ break;
+ }
+ setDisplayRect(m_displayRect);
+ }
+}
+
+void EvrVideoWindowControl::setBrightness(float brightness)
+{
+ if (m_brightness == brightness)
+ return;
+
+ m_brightness = brightness;
+
+ m_dirtyValues |= DXVA2_ProcAmp_Brightness;
+
+ applyImageControls();
+}
+
+void EvrVideoWindowControl::setContrast(float contrast)
+{
+ if (m_contrast == contrast)
+ return;
+
+ m_contrast = contrast;
+
+ m_dirtyValues |= DXVA2_ProcAmp_Contrast;
+
+ applyImageControls();
+}
+
+void EvrVideoWindowControl::setHue(float hue)
+{
+ if (m_hue == hue)
+ return;
+
+ m_hue = hue;
+
+ m_dirtyValues |= DXVA2_ProcAmp_Hue;
+
+ applyImageControls();
+}
+
+void EvrVideoWindowControl::setSaturation(float saturation)
+{
+ if (m_saturation == saturation)
+ return;
+
+ m_saturation = saturation;
+
+ m_dirtyValues |= DXVA2_ProcAmp_Saturation;
+
+ applyImageControls();
+}
+
+void EvrVideoWindowControl::applyImageControls()
+{
+ if (m_processor) {
+ DXVA2_ProcAmpValues values;
+ if (m_dirtyValues & DXVA2_ProcAmp_Brightness) {
+ values.Brightness = scaleProcAmpValue(DXVA2_ProcAmp_Brightness, m_brightness);
+ }
+ if (m_dirtyValues & DXVA2_ProcAmp_Contrast) {
+ values.Contrast = scaleProcAmpValue(DXVA2_ProcAmp_Contrast, m_contrast);
+ }
+ if (m_dirtyValues & DXVA2_ProcAmp_Hue) {
+ values.Hue = scaleProcAmpValue(DXVA2_ProcAmp_Hue, m_hue);
+ }
+ if (m_dirtyValues & DXVA2_ProcAmp_Saturation) {
+ values.Saturation = scaleProcAmpValue(DXVA2_ProcAmp_Saturation, m_saturation);
+ }
+
+ if (SUCCEEDED(m_processor->SetProcAmpValues(m_dirtyValues, &values))) {
+ m_dirtyValues = 0;
+ }
+ }
+}
+
+DXVA2_Fixed32 EvrVideoWindowControl::scaleProcAmpValue(DWORD prop, float value) const
+{
+ float scaledValue = 0.0;
+
+ DXVA2_ValueRange range;
+ if (SUCCEEDED(m_processor->GetProcAmpRange(prop, &range))) {
+ scaledValue = DXVA2FixedToFloat(range.DefaultValue);
+ if (value > 0)
+ scaledValue += float(value) * (DXVA2FixedToFloat(range.MaxValue) - DXVA2FixedToFloat(range.DefaultValue));
+ else if (value < 0)
+ scaledValue -= float(value) * (DXVA2FixedToFloat(range.MinValue) - DXVA2FixedToFloat(range.DefaultValue));
+ }
+
+ return DXVA2FloatToFixed(scaledValue);
+}
+
+QT_END_NAMESPACE
+
+#include "moc_evrvideowindowcontrol_p.cpp"
diff --git a/src/plugins/multimedia/windows/evr/evrvideowindowcontrol_p.h b/src/plugins/multimedia/windows/evr/evrvideowindowcontrol_p.h
new file mode 100644
index 000000000..c4875d28d
--- /dev/null
+++ b/src/plugins/multimedia/windows/evr/evrvideowindowcontrol_p.h
@@ -0,0 +1,72 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef EVRVIDEOWINDOWCONTROL_H
+#define EVRVIDEOWINDOWCONTROL_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <d3d9.h>
+#include <dxva2api.h>
+#include <evr9.h>
+#include <evr.h>
+#include <private/qplatformvideosink_p.h>
+#include <private/qwindowsmfdefs_p.h>
+
+QT_BEGIN_NAMESPACE
+
+class EvrVideoWindowControl : public QPlatformVideoSink
+{
+ Q_OBJECT
+public:
+ EvrVideoWindowControl(QVideoSink *parent = 0);
+ ~EvrVideoWindowControl() override;
+
+ bool setEvr(IUnknown *evr);
+
+ void setWinId(WId id) override;
+
+ void setDisplayRect(const QRect &rect) override;
+
+ void setFullScreen(bool fullScreen) override;
+
+ void setAspectRatioMode(Qt::AspectRatioMode mode) override;
+
+ void setBrightness(float brightness) override;
+ void setContrast(float contrast) override;
+ void setHue(float hue) override;
+ void setSaturation(float saturation) override;
+
+ void applyImageControls();
+
+private:
+ void clear();
+ DXVA2_Fixed32 scaleProcAmpValue(DWORD prop, float value) const;
+
+ WId m_windowId;
+ COLORREF m_windowColor;
+ DWORD m_dirtyValues;
+ Qt::AspectRatioMode m_aspectRatioMode;
+ QRect m_displayRect;
+ float m_brightness;
+ float m_contrast;
+ float m_hue;
+ float m_saturation;
+ bool m_fullScreen;
+
+ IMFVideoDisplayControl *m_displayControl;
+ IMFVideoProcessor *m_processor;
+};
+
+QT_END_NAMESPACE
+
+#endif