summaryrefslogtreecommitdiffstats
path: root/src/plugins/wmf
diff options
context:
space:
mode:
Diffstat (limited to 'src/plugins/wmf')
-rw-r--r--src/plugins/wmf/evrcustompresenter.cpp2045
-rw-r--r--src/plugins/wmf/evrcustompresenter.h325
-rw-r--r--src/plugins/wmf/evrd3dpresentengine.cpp657
-rw-r--r--src/plugins/wmf/evrd3dpresentengine.h148
-rw-r--r--src/plugins/wmf/mfactivate.h2
-rw-r--r--src/plugins/wmf/mfglobal.cpp116
-rw-r--r--src/plugins/wmf/mfglobal.h149
-rw-r--r--src/plugins/wmf/mftvideo.cpp216
-rw-r--r--src/plugins/wmf/mftvideo.h4
-rw-r--r--src/plugins/wmf/player/mfplayersession.cpp181
-rw-r--r--src/plugins/wmf/player/mfplayersession.h3
-rw-r--r--src/plugins/wmf/player/mfvideorenderercontrol.cpp111
-rw-r--r--src/plugins/wmf/player/mfvideorenderercontrol.h6
-rw-r--r--src/plugins/wmf/wmf.pro17
14 files changed, 287 insertions, 3693 deletions
diff --git a/src/plugins/wmf/evrcustompresenter.cpp b/src/plugins/wmf/evrcustompresenter.cpp
deleted file mode 100644
index 967095b20..000000000
--- a/src/plugins/wmf/evrcustompresenter.cpp
+++ /dev/null
@@ -1,2045 +0,0 @@
-/****************************************************************************
-**
-** Copyright (C) 2015 The Qt Company Ltd.
-** Contact: http://www.qt.io/licensing/
-**
-** This file is part of the Qt Toolkit.
-**
-** $QT_BEGIN_LICENSE:LGPL21$
-** Commercial License Usage
-** Licensees holding valid commercial Qt licenses may use this file in
-** accordance with the commercial license agreement provided with the
-** Software or, alternatively, in accordance with the terms contained in
-** a written agreement between you and The Qt Company. For licensing terms
-** and conditions see http://www.qt.io/terms-conditions. For further
-** information use the contact form at http://www.qt.io/contact-us.
-**
-** GNU Lesser General Public License Usage
-** Alternatively, this file may be used under the terms of the GNU Lesser
-** General Public License version 2.1 or version 3 as published by the Free
-** Software Foundation and appearing in the file LICENSE.LGPLv21 and
-** LICENSE.LGPLv3 included in the packaging of this file. Please review the
-** following information to ensure the GNU Lesser General Public License
-** requirements will be met: https://www.gnu.org/licenses/lgpl.html and
-** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
-**
-** As a special exception, The Qt Company gives you certain additional
-** rights. These rights are described in The Qt Company LGPL Exception
-** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
-**
-** $QT_END_LICENSE$
-**
-****************************************************************************/
-
-#include "evrcustompresenter.h"
-
-#include "mfglobal.h"
-#include "evrd3dpresentengine.h"
-
-#include <QtCore/qmutex.h>
-#include <QtCore/qvarlengtharray.h>
-#include <QtCore/qrect.h>
-#include <qabstractvideosurface.h>
-#include <qthread.h>
-#include <qcoreapplication.h>
-#include <qmath.h>
-#include <QtCore/qdebug.h>
-#include <d3d9.h>
-#include <dshow.h>
-
-QT_USE_NAMESPACE
-
-const static MFRatio g_DefaultFrameRate = { 30, 1 };
-static const DWORD SCHEDULER_TIMEOUT = 5000;
-static const MFTIME ONE_SECOND = 10000000;
-static const LONG ONE_MSEC = 1000;
-
-// Function declarations.
-static HRESULT setDesiredSampleTime(IMFSample *sample, const LONGLONG& hnsSampleTime, const LONGLONG& hnsDuration);
-static HRESULT clearDesiredSampleTime(IMFSample *sample);
-static HRESULT setMixerSourceRect(IMFTransform *mixer, const MFVideoNormalizedRect& nrcSource);
-static DWORD getFourCCFromPixelFormat(QVideoFrame::PixelFormat pixelFormat);
-static QVideoFrame::PixelFormat pixelFormatFromMediaType(IMFMediaType *type);
-
-static inline LONG MFTimeToMsec(const LONGLONG& time)
-{
- return (LONG)(time / (ONE_SECOND / ONE_MSEC));
-}
-
-
-Scheduler::Scheduler()
- : m_CB(NULL)
- , m_clock(NULL)
- , m_threadID(0)
- , m_schedulerThread(0)
- , m_threadReadyEvent(0)
- , m_flushEvent(0)
- , m_playbackRate(1.0f)
- , m_lastSampleTime(0)
- , m_perFrameInterval(0)
- , m_perFrame_1_4th(0)
-{
-}
-
-Scheduler::~Scheduler()
-{
- qt_wmf_safeRelease(&m_clock);
- for (int i = 0; i < m_scheduledSamples.size(); ++i)
- m_scheduledSamples[i]->Release();
- m_scheduledSamples.clear();
-}
-
-void Scheduler::setFrameRate(const MFRatio& fps)
-{
- UINT64 AvgTimePerFrame = 0;
-
- // Convert to a duration.
- MFFrameRateToAverageTimePerFrame(fps.Numerator, fps.Denominator, &AvgTimePerFrame);
-
- m_perFrameInterval = (MFTIME)AvgTimePerFrame;
-
- // Calculate 1/4th of this value, because we use it frequently.
- m_perFrame_1_4th = m_perFrameInterval / 4;
-}
-
-HRESULT Scheduler::startScheduler(IMFClock *clock)
-{
- if (m_schedulerThread)
- return E_UNEXPECTED;
-
- HRESULT hr = S_OK;
- DWORD dwID = 0;
-
- qt_wmf_copyComPointer(m_clock, clock);
-
- // Set a high the timer resolution (ie, short timer period).
- timeBeginPeriod(1);
-
- // Create an event to wait for the thread to start.
- m_threadReadyEvent = CreateEvent(NULL, FALSE, FALSE, NULL);
- if (!m_threadReadyEvent) {
- hr = HRESULT_FROM_WIN32(GetLastError());
- goto done;
- }
-
- // Create an event to wait for flush commands to complete.
- m_flushEvent = CreateEvent(NULL, FALSE, FALSE, NULL);
- if (!m_flushEvent) {
- hr = HRESULT_FROM_WIN32(GetLastError());
- goto done;
- }
-
- // Create the scheduler thread.
- m_schedulerThread = CreateThread(NULL, 0, schedulerThreadProc, (LPVOID)this, 0, &dwID);
- if (!m_schedulerThread) {
- hr = HRESULT_FROM_WIN32(GetLastError());
- goto done;
- }
-
- HANDLE hObjects[] = { m_threadReadyEvent, m_schedulerThread };
- DWORD dwWait = 0;
-
- // Wait for the thread to signal the "thread ready" event.
- dwWait = WaitForMultipleObjects(2, hObjects, FALSE, INFINITE); // Wait for EITHER of these handles.
- if (WAIT_OBJECT_0 != dwWait) {
- // The thread terminated early for some reason. This is an error condition.
- CloseHandle(m_schedulerThread);
- m_schedulerThread = NULL;
-
- hr = E_UNEXPECTED;
- goto done;
- }
-
- m_threadID = dwID;
-
-done:
- // Regardless success/failure, we are done using the "thread ready" event.
- if (m_threadReadyEvent) {
- CloseHandle(m_threadReadyEvent);
- m_threadReadyEvent = NULL;
- }
- return hr;
-}
-
-HRESULT Scheduler::stopScheduler()
-{
- if (!m_schedulerThread)
- return S_OK;
-
- // Ask the scheduler thread to exit.
- PostThreadMessage(m_threadID, Terminate, 0, 0);
-
- // Wait for the thread to exit.
- WaitForSingleObject(m_schedulerThread, INFINITE);
-
- // Close handles.
- CloseHandle(m_schedulerThread);
- m_schedulerThread = NULL;
-
- CloseHandle(m_flushEvent);
- m_flushEvent = NULL;
-
- // Discard samples.
- m_mutex.lock();
- for (int i = 0; i < m_scheduledSamples.size(); ++i)
- m_scheduledSamples[i]->Release();
- m_scheduledSamples.clear();
- m_mutex.unlock();
-
- // Restore the timer resolution.
- timeEndPeriod(1);
-
- return S_OK;
-}
-
-HRESULT Scheduler::flush()
-{
- if (m_schedulerThread) {
- // Ask the scheduler thread to flush.
- PostThreadMessage(m_threadID, Flush, 0 , 0);
-
- // Wait for the scheduler thread to signal the flush event,
- // OR for the thread to terminate.
- HANDLE objects[] = { m_flushEvent, m_schedulerThread };
-
- WaitForMultipleObjects(ARRAYSIZE(objects), objects, FALSE, SCHEDULER_TIMEOUT);
- }
-
- return S_OK;
-}
-
-HRESULT Scheduler::scheduleSample(IMFSample *sample, bool presentNow)
-{
- if (!m_CB)
- return MF_E_NOT_INITIALIZED;
-
- if (!m_schedulerThread)
- return MF_E_NOT_INITIALIZED;
-
- HRESULT hr = S_OK;
- DWORD dwExitCode = 0;
-
- GetExitCodeThread(m_schedulerThread, &dwExitCode);
- if (dwExitCode != STILL_ACTIVE)
- return E_FAIL;
-
- if (presentNow || !m_clock) {
- // Present the sample immediately.
- sample->AddRef();
- QMetaObject::invokeMethod(m_CB,
- "presentSample",
- Qt::QueuedConnection,
- Q_ARG(void*, sample),
- Q_ARG(qint64, 0));
- } else {
- // Queue the sample and ask the scheduler thread to wake up.
- m_mutex.lock();
- sample->AddRef();
- m_scheduledSamples.enqueue(sample);
- m_mutex.unlock();
-
- if (SUCCEEDED(hr))
- PostThreadMessage(m_threadID, Schedule, 0, 0);
- }
-
- return hr;
-}
-
-HRESULT Scheduler::processSamplesInQueue(LONG *nextSleep)
-{
- HRESULT hr = S_OK;
- LONG wait = 0;
- IMFSample *sample = NULL;
-
- // Process samples until the queue is empty or until the wait time > 0.
- while (!m_scheduledSamples.isEmpty()) {
- m_mutex.lock();
- sample = m_scheduledSamples.dequeue();
- m_mutex.unlock();
-
- // Process the next sample in the queue. If the sample is not ready
- // for presentation. the value returned in wait is > 0, which
- // means the scheduler should sleep for that amount of time.
-
- hr = processSample(sample, &wait);
- qt_wmf_safeRelease(&sample);
-
- if (FAILED(hr) || wait > 0)
- break;
- }
-
- // If the wait time is zero, it means we stopped because the queue is
- // empty (or an error occurred). Set the wait time to infinite; this will
- // make the scheduler thread sleep until it gets another thread message.
- if (wait == 0)
- wait = INFINITE;
-
- *nextSleep = wait;
- return hr;
-}
-
-HRESULT Scheduler::processSample(IMFSample *sample, LONG *pNextSleep)
-{
- HRESULT hr = S_OK;
-
- LONGLONG hnsPresentationTime = 0;
- LONGLONG hnsTimeNow = 0;
- MFTIME hnsSystemTime = 0;
-
- bool presentNow = true;
- LONG nextSleep = 0;
-
- if (m_clock) {
- // Get the sample's time stamp. It is valid for a sample to
- // have no time stamp.
- hr = sample->GetSampleTime(&hnsPresentationTime);
-
- // Get the clock time. (But if the sample does not have a time stamp,
- // we don't need the clock time.)
- if (SUCCEEDED(hr))
- hr = m_clock->GetCorrelatedTime(0, &hnsTimeNow, &hnsSystemTime);
-
- // Calculate the time until the sample's presentation time.
- // A negative value means the sample is late.
- LONGLONG hnsDelta = hnsPresentationTime - hnsTimeNow;
- if (m_playbackRate < 0) {
- // For reverse playback, the clock runs backward. Therefore, the
- // delta is reversed.
- hnsDelta = - hnsDelta;
- }
-
- if (hnsDelta < - m_perFrame_1_4th) {
- // This sample is late.
- presentNow = true;
- } else if (hnsDelta > (3 * m_perFrame_1_4th)) {
- // This sample is still too early. Go to sleep.
- nextSleep = MFTimeToMsec(hnsDelta - (3 * m_perFrame_1_4th));
-
- // Adjust the sleep time for the clock rate. (The presentation clock runs
- // at m_fRate, but sleeping uses the system clock.)
- if (m_playbackRate != 0)
- nextSleep = (LONG)(nextSleep / qFabs(m_playbackRate));
-
- // Don't present yet.
- presentNow = false;
- }
- }
-
- if (presentNow) {
- sample->AddRef();
- QMetaObject::invokeMethod(m_CB,
- "presentSample",
- Qt::QueuedConnection,
- Q_ARG(void*, sample),
- Q_ARG(qint64, hnsPresentationTime));
- } else {
- // The sample is not ready yet. Return it to the queue.
- m_mutex.lock();
- sample->AddRef();
- m_scheduledSamples.prepend(sample);
- m_mutex.unlock();
- }
-
- *pNextSleep = nextSleep;
-
- return hr;
-}
-
-DWORD WINAPI Scheduler::schedulerThreadProc(LPVOID parameter)
-{
- Scheduler* scheduler = reinterpret_cast<Scheduler*>(parameter);
- if (!scheduler)
- return -1;
- return scheduler->schedulerThreadProcPrivate();
-}
-
-DWORD Scheduler::schedulerThreadProcPrivate()
-{
- HRESULT hr = S_OK;
- MSG msg;
- LONG wait = INFINITE;
- bool exitThread = false;
-
- // Force the system to create a message queue for this thread.
- // (See MSDN documentation for PostThreadMessage.)
- PeekMessage(&msg, NULL, WM_USER, WM_USER, PM_NOREMOVE);
-
- // Signal to the scheduler that the thread is ready.
- SetEvent(m_threadReadyEvent);
-
- while (!exitThread) {
- // Wait for a thread message OR until the wait time expires.
- DWORD result = MsgWaitForMultipleObjects(0, NULL, FALSE, wait, QS_POSTMESSAGE);
-
- if (result == WAIT_TIMEOUT) {
- // If we timed out, then process the samples in the queue
- hr = processSamplesInQueue(&wait);
- if (FAILED(hr))
- exitThread = true;
- }
-
- while (PeekMessage(&msg, NULL, 0, 0, PM_REMOVE)) {
- bool processSamples = true;
-
- switch (msg.message) {
- case Terminate:
- exitThread = true;
- break;
- case Flush:
- // Flushing: Clear the sample queue and set the event.
- m_mutex.lock();
- for (int i = 0; i < m_scheduledSamples.size(); ++i)
- m_scheduledSamples[i]->Release();
- m_scheduledSamples.clear();
- m_mutex.unlock();
- wait = INFINITE;
- SetEvent(m_flushEvent);
- break;
- case Schedule:
- // Process as many samples as we can.
- if (processSamples) {
- hr = processSamplesInQueue(&wait);
- if (FAILED(hr))
- exitThread = true;
- processSamples = (wait != INFINITE);
- }
- break;
- }
- }
-
- }
-
- return (SUCCEEDED(hr) ? 0 : 1);
-}
-
-
-SamplePool::SamplePool()
- : m_initialized(false)
- , m_pending(0)
-{
-}
-
-SamplePool::~SamplePool()
-{
- clear();
-}
-
-HRESULT SamplePool::getSample(IMFSample **sample)
-{
- QMutexLocker locker(&m_mutex);
-
- if (!m_initialized)
- return MF_E_NOT_INITIALIZED;
-
- if (m_videoSampleQueue.isEmpty())
- return MF_E_SAMPLEALLOCATOR_EMPTY;
-
- // Get a sample from the allocated queue.
-
- // It doesn't matter if we pull them from the head or tail of the list,
- // but when we get it back, we want to re-insert it onto the opposite end.
- // (see ReturnSample)
-
- IMFSample *taken = m_videoSampleQueue.takeFirst();
-
- m_pending++;
-
- // Give the sample to the caller.
- *sample = taken;
- (*sample)->AddRef();
-
- taken->Release();
-
- return S_OK;
-}
-
-HRESULT SamplePool::returnSample(IMFSample *sample)
-{
- QMutexLocker locker(&m_mutex);
-
- if (!m_initialized)
- return MF_E_NOT_INITIALIZED;
-
- m_videoSampleQueue.append(sample);
- sample->AddRef();
-
- m_pending--;
-
- return S_OK;
-}
-
-BOOL SamplePool::areSamplesPending()
-{
- QMutexLocker locker(&m_mutex);
-
- bool ret = false;
-
- if (!m_initialized)
- ret = false;
- else
- ret = (m_pending > 0);
-
- return ret;
-}
-
-HRESULT SamplePool::initialize(QList<IMFSample*> &samples)
-{
- QMutexLocker locker(&m_mutex);
-
- if (m_initialized)
- return MF_E_INVALIDREQUEST;
-
- IMFSample *sample = NULL;
-
- // Move these samples into our allocated queue.
- for (int i = 0; i < samples.size(); ++i) {
- sample = samples.at(i);
- sample->AddRef();
- m_videoSampleQueue.append(sample);
- }
-
- m_initialized = true;
-
- for (int i = 0; i < samples.size(); ++i)
- samples[i]->Release();
- samples.clear();
- return S_OK;
-}
-
-HRESULT SamplePool::clear()
-{
- QMutexLocker locker(&m_mutex);
-
- for (int i = 0; i < m_videoSampleQueue.size(); ++i)
- m_videoSampleQueue[i]->Release();
- m_videoSampleQueue.clear();
- m_initialized = false;
- m_pending = 0;
-
- return S_OK;
-}
-
-
-EVRCustomPresenter::EVRCustomPresenter()
- : QObject()
- , m_sampleFreeCB(this, &EVRCustomPresenter::onSampleFree)
- , m_refCount(1)
- , m_renderState(RenderShutdown)
- , m_mutex(QMutex::Recursive)
- , m_tokenCounter(0)
- , m_sampleNotify(false)
- , m_repaint(false)
- , m_prerolled(false)
- , m_endStreaming(false)
- , m_playbackRate(1.0f)
- , m_D3DPresentEngine(0)
- , m_clock(0)
- , m_mixer(0)
- , m_mediaEventSink(0)
- , m_mediaType(0)
- , m_surface(0)
-{
- // Initial source rectangle = (0,0,1,1)
- m_sourceRect.top = 0;
- m_sourceRect.left = 0;
- m_sourceRect.bottom = 1;
- m_sourceRect.right = 1;
-
- m_D3DPresentEngine = new D3DPresentEngine;
- m_scheduler.setCallback(m_D3DPresentEngine);
-}
-
-EVRCustomPresenter::~EVRCustomPresenter()
-{
- qt_wmf_safeRelease(&m_clock);
- qt_wmf_safeRelease(&m_mixer);
- qt_wmf_safeRelease(&m_mediaEventSink);
- qt_wmf_safeRelease(&m_mediaType);
-
- m_D3DPresentEngine->deleteLater();
-}
-
-HRESULT EVRCustomPresenter::QueryInterface(REFIID riid, void ** ppvObject)
-{
- if (!ppvObject)
- return E_POINTER;
- if (riid == IID_IMFGetService) {
- *ppvObject = static_cast<IMFGetService*>(this);
- } else if (riid == IID_IMFTopologyServiceLookupClient) {
- *ppvObject = static_cast<IMFTopologyServiceLookupClient*>(this);
- } else if (riid == IID_IMFVideoDeviceID) {
- *ppvObject = static_cast<IMFVideoDeviceID*>(this);
- } else if (riid == IID_IMFVideoPresenter) {
- *ppvObject = static_cast<IMFVideoPresenter*>(this);
- } else if (riid == IID_IMFRateSupport) {
- *ppvObject = static_cast<IMFRateSupport*>(this);
- } else if (riid == IID_IUnknown) {
- *ppvObject = static_cast<IUnknown*>(static_cast<IMFGetService*>(this));
- } else if (riid == IID_IMFClockStateSink) {
- *ppvObject = static_cast<IMFClockStateSink*>(this);
- } else {
- *ppvObject = NULL;
- return E_NOINTERFACE;
- }
- AddRef();
- return S_OK;
-}
-
-ULONG EVRCustomPresenter::AddRef()
-{
- return InterlockedIncrement(&m_refCount);
-}
-
-ULONG EVRCustomPresenter::Release()
-{
- ULONG uCount = InterlockedDecrement(&m_refCount);
- if (uCount == 0)
- delete this;
- return uCount;
-}
-
-HRESULT EVRCustomPresenter::GetService(REFGUID guidService, REFIID riid, LPVOID *ppvObject)
-{
- HRESULT hr = S_OK;
-
- if (!ppvObject)
- return E_POINTER;
-
- // The only service GUID that we support is MR_VIDEO_RENDER_SERVICE.
- if (guidService != MR_VIDEO_RENDER_SERVICE)
- return MF_E_UNSUPPORTED_SERVICE;
-
- // First try to get the service interface from the D3DPresentEngine object.
- hr = m_D3DPresentEngine->getService(guidService, riid, ppvObject);
- if (FAILED(hr))
- // Next, check if this object supports the interface.
- hr = QueryInterface(riid, ppvObject);
-
- return hr;
-}
-
-HRESULT EVRCustomPresenter::GetDeviceID(IID* deviceID)
-{
- if (!deviceID)
- return E_POINTER;
-
- *deviceID = IID_IDirect3DDevice9;
-
- return S_OK;
-}
-
-HRESULT EVRCustomPresenter::InitServicePointers(IMFTopologyServiceLookup *lookup)
-{
- if (!lookup)
- return E_POINTER;
-
- HRESULT hr = S_OK;
- DWORD objectCount = 0;
-
- QMutexLocker locker(&m_mutex);
-
- // Do not allow initializing when playing or paused.
- if (isActive())
- return MF_E_INVALIDREQUEST;
-
- qt_wmf_safeRelease(&m_clock);
- qt_wmf_safeRelease(&m_mixer);
- qt_wmf_safeRelease(&m_mediaEventSink);
-
- // Ask for the clock. Optional, because the EVR might not have a clock.
- objectCount = 1;
-
- lookup->LookupService(MF_SERVICE_LOOKUP_GLOBAL, 0,
- MR_VIDEO_RENDER_SERVICE, IID_PPV_ARGS(&m_clock),
- &objectCount
- );
-
- // Ask for the mixer. (Required.)
- objectCount = 1;
-
- hr = lookup->LookupService(MF_SERVICE_LOOKUP_GLOBAL, 0,
- MR_VIDEO_MIXER_SERVICE, IID_PPV_ARGS(&m_mixer),
- &objectCount
- );
-
- if (FAILED(hr))
- return hr;
-
- // Make sure that we can work with this mixer.
- hr = configureMixer(m_mixer);
- if (FAILED(hr))
- return hr;
-
- // Ask for the EVR's event-sink interface. (Required.)
- objectCount = 1;
-
- hr = lookup->LookupService(MF_SERVICE_LOOKUP_GLOBAL, 0,
- MR_VIDEO_RENDER_SERVICE, IID_PPV_ARGS(&m_mediaEventSink),
- &objectCount
- );
-
- if (SUCCEEDED(hr))
- m_renderState = RenderStopped;
-
- return hr;
-}
-
-HRESULT EVRCustomPresenter::ReleaseServicePointers()
-{
- // Enter the shut-down state.
- m_mutex.lock();
-
- m_renderState = RenderShutdown;
-
- m_mutex.unlock();
-
- // Flush any samples that were scheduled.
- flush();
-
- // Clear the media type and release related resources.
- setMediaType(NULL);
-
- // Release all services that were acquired from InitServicePointers.
- qt_wmf_safeRelease(&m_clock);
- qt_wmf_safeRelease(&m_mixer);
- qt_wmf_safeRelease(&m_mediaEventSink);
-
- return S_OK;
-}
-
-HRESULT EVRCustomPresenter::ProcessMessage(MFVP_MESSAGE_TYPE message, ULONG_PTR param)
-{
- HRESULT hr = S_OK;
-
- QMutexLocker locker(&m_mutex);
-
- hr = checkShutdown();
- if (FAILED(hr))
- return hr;
-
- switch (message) {
- // Flush all pending samples.
- case MFVP_MESSAGE_FLUSH:
- hr = flush();
- break;
-
- // Renegotiate the media type with the mixer.
- case MFVP_MESSAGE_INVALIDATEMEDIATYPE:
- hr = renegotiateMediaType();
- break;
-
- // The mixer received a new input sample.
- case MFVP_MESSAGE_PROCESSINPUTNOTIFY:
- hr = processInputNotify();
- break;
-
- // Streaming is about to start.
- case MFVP_MESSAGE_BEGINSTREAMING:
- hr = beginStreaming();
- break;
-
- // Streaming has ended. (The EVR has stopped.)
- case MFVP_MESSAGE_ENDSTREAMING:
- hr = endStreaming();
- break;
-
- // All input streams have ended.
- case MFVP_MESSAGE_ENDOFSTREAM:
- // Set the EOS flag.
- m_endStreaming = true;
- // Check if it's time to send the EC_COMPLETE event to the EVR.
- hr = checkEndOfStream();
- break;
-
- // Frame-stepping is starting.
- case MFVP_MESSAGE_STEP:
- hr = prepareFrameStep(DWORD(param));
- break;
-
- // Cancels frame-stepping.
- case MFVP_MESSAGE_CANCELSTEP:
- hr = cancelFrameStep();
- break;
-
- default:
- hr = E_INVALIDARG; // Unknown message. This case should never occur.
- break;
- }
-
- return hr;
-}
-
-HRESULT EVRCustomPresenter::GetCurrentMediaType(IMFVideoMediaType **mediaType)
-{
- HRESULT hr = S_OK;
-
- if (!mediaType)
- return E_POINTER;
-
- *mediaType = NULL;
-
- QMutexLocker locker(&m_mutex);
-
- hr = checkShutdown();
- if (FAILED(hr))
- return hr;
-
- if (!m_mediaType)
- return MF_E_NOT_INITIALIZED;
-
- return m_mediaType->QueryInterface(IID_PPV_ARGS(mediaType));
-}
-
-HRESULT EVRCustomPresenter::OnClockStart(MFTIME, LONGLONG clockStartOffset)
-{
- QMutexLocker locker(&m_mutex);
-
- // We cannot start after shutdown.
- HRESULT hr = checkShutdown();
- if (FAILED(hr))
- return hr;
-
- // Check if the clock is already active (not stopped).
- if (isActive()) {
- m_renderState = RenderStarted;
-
- // If the clock position changes while the clock is active, it
- // is a seek request. We need to flush all pending samples.
- if (clockStartOffset != PRESENTATION_CURRENT_POSITION)
- flush();
- } else {
- m_renderState = RenderStarted;
-
- // The clock has started from the stopped state.
-
- // Possibly we are in the middle of frame-stepping OR have samples waiting
- // in the frame-step queue. Deal with these two cases first:
- hr = startFrameStep();
- if (FAILED(hr))
- return hr;
- }
-
- // Start the video surface in the main thread
- if (thread() == QThread::currentThread())
- startSurface();
- else
- QMetaObject::invokeMethod(this, "startSurface", Qt::QueuedConnection);
-
- // Now try to get new output samples from the mixer.
- processOutputLoop();
-
- return hr;
-}
-
-HRESULT EVRCustomPresenter::OnClockRestart(MFTIME)
-{
- QMutexLocker locker(&m_mutex);
-
- HRESULT hr = checkShutdown();
- if (FAILED(hr))
- return hr;
-
- // The EVR calls OnClockRestart only while paused.
-
- m_renderState = RenderStarted;
-
- // Possibly we are in the middle of frame-stepping OR we have samples waiting
- // in the frame-step queue. Deal with these two cases first:
- hr = startFrameStep();
- if (FAILED(hr))
- return hr;
-
- // Now resume the presentation loop.
- processOutputLoop();
-
- return hr;
-}
-
-HRESULT EVRCustomPresenter::OnClockStop(MFTIME)
-{
- QMutexLocker locker(&m_mutex);
-
- HRESULT hr = checkShutdown();
- if (FAILED(hr))
- return hr;
-
- if (m_renderState != RenderStopped) {
- m_renderState = RenderStopped;
- flush();
-
- // If we are in the middle of frame-stepping, cancel it now.
- if (m_frameStep.state != FrameStepNone)
- cancelFrameStep();
- }
-
- // Stop the video surface in the main thread
- if (thread() == QThread::currentThread())
- stopSurface();
- else
- QMetaObject::invokeMethod(this, "stopSurface", Qt::QueuedConnection);
-
- return S_OK;
-}
-
-HRESULT EVRCustomPresenter::OnClockPause(MFTIME)
-{
- QMutexLocker locker(&m_mutex);
-
- // We cannot pause the clock after shutdown.
- HRESULT hr = checkShutdown();
-
- if (SUCCEEDED(hr))
- m_renderState = RenderPaused;
-
- return hr;
-}
-
-HRESULT EVRCustomPresenter::OnClockSetRate(MFTIME, float rate)
-{
- // Note:
- // The presenter reports its maximum rate through the IMFRateSupport interface.
- // Here, we assume that the EVR honors the maximum rate.
-
- QMutexLocker locker(&m_mutex);
-
- HRESULT hr = checkShutdown();
- if (FAILED(hr))
- return hr;
-
- // If the rate is changing from zero (scrubbing) to non-zero, cancel the
- // frame-step operation.
- if ((m_playbackRate == 0.0f) && (rate != 0.0f)) {
- cancelFrameStep();
- for (int i = 0; i < m_frameStep.samples.size(); ++i)
- m_frameStep.samples[i]->Release();
- m_frameStep.samples.clear();
- }
-
- m_playbackRate = rate;
-
- // Tell the scheduler about the new rate.
- m_scheduler.setClockRate(rate);
-
- return S_OK;
-}
-
-HRESULT EVRCustomPresenter::GetSlowestRate(MFRATE_DIRECTION, BOOL, float *rate)
-{
- if (!rate)
- return E_POINTER;
-
- QMutexLocker locker(&m_mutex);
-
- HRESULT hr = checkShutdown();
-
- if (SUCCEEDED(hr)) {
- // There is no minimum playback rate, so the minimum is zero.
- *rate = 0;
- }
-
- return S_OK;
-}
-
-HRESULT EVRCustomPresenter::GetFastestRate(MFRATE_DIRECTION direction, BOOL thin, float *rate)
-{
- if (!rate)
- return E_POINTER;
-
- QMutexLocker locker(&m_mutex);
-
- float maxRate = 0.0f;
-
- HRESULT hr = checkShutdown();
- if (FAILED(hr))
- return hr;
-
- // Get the maximum *forward* rate.
- maxRate = getMaxRate(thin);
-
- // For reverse playback, it's the negative of maxRate.
- if (direction == MFRATE_REVERSE)
- maxRate = -maxRate;
-
- *rate = maxRate;
-
- return S_OK;
-}
-
-HRESULT EVRCustomPresenter::IsRateSupported(BOOL thin, float rate, float *nearestSupportedRate)
-{
- QMutexLocker locker(&m_mutex);
-
- float maxRate = 0.0f;
- float nearestRate = rate; // If we support rate, that is the nearest.
-
- HRESULT hr = checkShutdown();
- if (FAILED(hr))
- return hr;
-
- // Find the maximum forward rate.
- // Note: We have no minimum rate (that is, we support anything down to 0).
- maxRate = getMaxRate(thin);
-
- if (qFabs(rate) > maxRate) {
- // The (absolute) requested rate exceeds the maximum rate.
- hr = MF_E_UNSUPPORTED_RATE;
-
- // The nearest supported rate is maxRate.
- nearestRate = maxRate;
- if (rate < 0) {
- // Negative for reverse playback.
- nearestRate = -nearestRate;
- }
- }
-
- // Return the nearest supported rate.
- if (nearestSupportedRate)
- *nearestSupportedRate = nearestRate;
-
- return hr;
-}
-
-void EVRCustomPresenter::supportedFormatsChanged()
-{
- QMutexLocker locker(&m_mutex);
-
- m_supportedGLFormats.clear();
- if (!m_surface)
- return;
-
- QList<QVideoFrame::PixelFormat> formats = m_surface->supportedPixelFormats(QAbstractVideoBuffer::GLTextureHandle);
- for (int i = 0; i < formats.size(); ++i) {
- DWORD fourCC = getFourCCFromPixelFormat(formats.at(i));
- if (fourCC)
- m_supportedGLFormats.append(fourCC);
- }
-}
-
-void EVRCustomPresenter::setSurface(QAbstractVideoSurface *surface)
-{
- m_mutex.lock();
-
- m_surface = surface;
-
- if (m_D3DPresentEngine)
- m_D3DPresentEngine->setSurface(surface);
-
- m_mutex.unlock();
-
- supportedFormatsChanged();
-}
-
-HRESULT EVRCustomPresenter::configureMixer(IMFTransform *mixer)
-{
- // Set the zoom rectangle (ie, the source clipping rectangle).
- return setMixerSourceRect(mixer, m_sourceRect);
-}
-
-HRESULT EVRCustomPresenter::renegotiateMediaType()
-{
- HRESULT hr = S_OK;
- bool foundMediaType = false;
-
- IMFMediaType *mixerType = NULL;
- IMFMediaType *optimalType = NULL;
-
- if (!m_mixer)
- return MF_E_INVALIDREQUEST;
-
- // Loop through all of the mixer's proposed output types.
- DWORD typeIndex = 0;
- while (!foundMediaType && (hr != MF_E_NO_MORE_TYPES)) {
- qt_wmf_safeRelease(&mixerType);
- qt_wmf_safeRelease(&optimalType);
-
- // Step 1. Get the next media type supported by mixer.
- hr = m_mixer->GetOutputAvailableType(0, typeIndex++, &mixerType);
- if (FAILED(hr))
- break;
-
- // From now on, if anything in this loop fails, try the next type,
- // until we succeed or the mixer runs out of types.
-
- // Step 2. Check if we support this media type.
- if (SUCCEEDED(hr))
- hr = isMediaTypeSupported(mixerType);
-
- // Step 3. Adjust the mixer's type to match our requirements.
- if (SUCCEEDED(hr))
- hr = createOptimalVideoType(mixerType, &optimalType);
-
- // Step 4. Check if the mixer will accept this media type.
- if (SUCCEEDED(hr))
- hr = m_mixer->SetOutputType(0, optimalType, MFT_SET_TYPE_TEST_ONLY);
-
- // Step 5. Try to set the media type on ourselves.
- if (SUCCEEDED(hr))
- hr = setMediaType(optimalType);
-
- // Step 6. Set output media type on mixer.
- if (SUCCEEDED(hr)) {
- hr = m_mixer->SetOutputType(0, optimalType, 0);
-
- // If something went wrong, clear the media type.
- if (FAILED(hr))
- setMediaType(NULL);
- }
-
- if (SUCCEEDED(hr))
- foundMediaType = true;
- }
-
- qt_wmf_safeRelease(&mixerType);
- qt_wmf_safeRelease(&optimalType);
-
- return hr;
-}
-
-HRESULT EVRCustomPresenter::flush()
-{
- m_prerolled = false;
-
- // The scheduler might have samples that are waiting for
- // their presentation time. Tell the scheduler to flush.
-
- // This call blocks until the scheduler threads discards all scheduled samples.
- m_scheduler.flush();
-
- // Flush the frame-step queue.
- for (int i = 0; i < m_frameStep.samples.size(); ++i)
- m_frameStep.samples[i]->Release();
- m_frameStep.samples.clear();
-
- if (m_renderState == RenderStopped) {
- // Repaint with black.
- QMetaObject::invokeMethod(m_D3DPresentEngine,
- "presentSample",
- Qt::QueuedConnection,
- Q_ARG(void*, 0),
- Q_ARG(qint64, 0));
- }
-
- return S_OK;
-}
-
-HRESULT EVRCustomPresenter::processInputNotify()
-{
- HRESULT hr = S_OK;
-
- // Set the flag that says the mixer has a new sample.
- m_sampleNotify = true;
-
- if (!m_mediaType) {
- // We don't have a valid media type yet.
- hr = MF_E_TRANSFORM_TYPE_NOT_SET;
- } else {
- // Try to process an output sample.
- processOutputLoop();
- }
- return hr;
-}
-
-HRESULT EVRCustomPresenter::beginStreaming()
-{
- HRESULT hr = S_OK;
-
- // Start the scheduler thread.
- hr = m_scheduler.startScheduler(m_clock);
-
- return hr;
-}
-
-HRESULT EVRCustomPresenter::endStreaming()
-{
- HRESULT hr = S_OK;
-
- // Stop the scheduler thread.
- hr = m_scheduler.stopScheduler();
-
- return hr;
-}
-
-HRESULT EVRCustomPresenter::checkEndOfStream()
-{
- if (!m_endStreaming) {
- // The EVR did not send the MFVP_MESSAGE_ENDOFSTREAM message.
- return S_OK;
- }
-
- if (m_sampleNotify) {
- // The mixer still has input.
- return S_OK;
- }
-
- if (m_samplePool.areSamplesPending()) {
- // Samples are still scheduled for rendering.
- return S_OK;
- }
-
- // Everything is complete. Now we can tell the EVR that we are done.
- notifyEvent(EC_COMPLETE, (LONG_PTR)S_OK, 0);
- m_endStreaming = false;
- return S_OK;
-}
-
-HRESULT EVRCustomPresenter::prepareFrameStep(DWORD steps)
-{
- HRESULT hr = S_OK;
-
- // Cache the step count.
- m_frameStep.steps += steps;
-
- // Set the frame-step state.
- m_frameStep.state = FrameStepWaitingStart;
-
- // If the clock is are already running, we can start frame-stepping now.
- // Otherwise, we will start when the clock starts.
- if (m_renderState == RenderStarted)
- hr = startFrameStep();
-
- return hr;
-}
-
-HRESULT EVRCustomPresenter::startFrameStep()
-{
- HRESULT hr = S_OK;
- IMFSample *sample = NULL;
-
- if (m_frameStep.state == FrameStepWaitingStart) {
- // We have a frame-step request, and are waiting for the clock to start.
- // Set the state to "pending," which means we are waiting for samples.
- m_frameStep.state = FrameStepPending;
-
- // If the frame-step queue already has samples, process them now.
- while (!m_frameStep.samples.isEmpty() && (m_frameStep.state == FrameStepPending)) {
- sample = m_frameStep.samples.takeFirst();
-
- hr = deliverFrameStepSample(sample);
- if (FAILED(hr))
- goto done;
-
- qt_wmf_safeRelease(&sample);
-
- // We break from this loop when:
- // (a) the frame-step queue is empty, or
- // (b) the frame-step operation is complete.
- }
- } else if (m_frameStep.state == FrameStepNone) {
- // We are not frame stepping. Therefore, if the frame-step queue has samples,
- // we need to process them normally.
- while (!m_frameStep.samples.isEmpty()) {
- sample = m_frameStep.samples.takeFirst();
-
- hr = deliverSample(sample, false);
- if (FAILED(hr))
- goto done;
-
- qt_wmf_safeRelease(&sample);
- }
- }
-
-done:
- qt_wmf_safeRelease(&sample);
- return hr;
-}
-
-HRESULT EVRCustomPresenter::completeFrameStep(IMFSample *sample)
-{
- HRESULT hr = S_OK;
- MFTIME sampleTime = 0;
- MFTIME systemTime = 0;
-
- // Update our state.
- m_frameStep.state = FrameStepComplete;
- m_frameStep.sampleNoRef = NULL;
-
- // Notify the EVR that the frame-step is complete.
- notifyEvent(EC_STEP_COMPLETE, FALSE, 0); // FALSE = completed (not cancelled)
-
- // If we are scrubbing (rate == 0), also send the "scrub time" event.
- if (isScrubbing()) {
- // Get the time stamp from the sample.
- hr = sample->GetSampleTime(&sampleTime);
- if (FAILED(hr)) {
- // No time stamp. Use the current presentation time.
- if (m_clock)
- m_clock->GetCorrelatedTime(0, &sampleTime, &systemTime);
-
- hr = S_OK; // (Not an error condition.)
- }
-
- notifyEvent(EC_SCRUB_TIME, DWORD(sampleTime), DWORD(((sampleTime) >> 32) & 0xffffffff));
- }
- return hr;
-}
-
-HRESULT EVRCustomPresenter::cancelFrameStep()
-{
- FrameStepState oldState = m_frameStep.state;
-
- m_frameStep.state = FrameStepNone;
- m_frameStep.steps = 0;
- m_frameStep.sampleNoRef = NULL;
- // Don't clear the frame-step queue yet, because we might frame step again.
-
- if (oldState > FrameStepNone && oldState < FrameStepComplete) {
- // We were in the middle of frame-stepping when it was cancelled.
- // Notify the EVR.
- notifyEvent(EC_STEP_COMPLETE, TRUE, 0); // TRUE = cancelled
- }
- return S_OK;
-}
-
-HRESULT EVRCustomPresenter::createOptimalVideoType(IMFMediaType *proposedType, IMFMediaType **optimalType)
-{
- HRESULT hr = S_OK;
-
- RECT rcOutput;
- ZeroMemory(&rcOutput, sizeof(rcOutput));
-
- MFVideoArea displayArea;
- ZeroMemory(&displayArea, sizeof(displayArea));
-
- IMFMediaType *mtOptimal = NULL;
-
- // Clone the proposed type.
-
- hr = MFCreateMediaType(&mtOptimal);
- if (FAILED(hr))
- goto done;
-
- hr = proposedType->CopyAllItems(mtOptimal);
- if (FAILED(hr))
- goto done;
-
- // Modify the new type.
-
- // Set the pixel aspect ratio (PAR) to 1:1 (see assumption #1, above)
- hr = MFSetAttributeRatio(mtOptimal, MF_MT_PIXEL_ASPECT_RATIO, 1, 1);
- if (FAILED(hr))
- goto done;
-
- UINT64 size;
- hr = proposedType->GetUINT64(MF_MT_FRAME_SIZE, &size);
- int width = int(HI32(size));
- int height = int(LO32(size));
- rcOutput.left = 0;
- rcOutput.top = 0;
- rcOutput.right = width;
- rcOutput.bottom = height;
-
- // Set the geometric aperture, and disable pan/scan.
- displayArea = qt_wmf_makeMFArea(0, 0, rcOutput.right, rcOutput.bottom);
-
- hr = mtOptimal->SetUINT32(MF_MT_PAN_SCAN_ENABLED, FALSE);
- if (FAILED(hr))
- goto done;
-
- hr = mtOptimal->SetBlob(MF_MT_GEOMETRIC_APERTURE, (UINT8*)&displayArea, sizeof(displayArea));
- if (FAILED(hr))
- goto done;
-
- // Set the pan/scan aperture and the minimum display aperture. We don't care
- // about them per se, but the mixer will reject the type if these exceed the
- // frame dimentions.
- hr = mtOptimal->SetBlob(MF_MT_PAN_SCAN_APERTURE, (UINT8*)&displayArea, sizeof(displayArea));
- if (FAILED(hr))
- goto done;
-
- hr = mtOptimal->SetBlob(MF_MT_MINIMUM_DISPLAY_APERTURE, (UINT8*)&displayArea, sizeof(displayArea));
- if (FAILED(hr))
- goto done;
-
- // Return the pointer to the caller.
- *optimalType = mtOptimal;
- (*optimalType)->AddRef();
-
-done:
- qt_wmf_safeRelease(&mtOptimal);
- return hr;
-
-}
-
-HRESULT EVRCustomPresenter::setMediaType(IMFMediaType *mediaType)
-{
- // Note: mediaType can be NULL (to clear the type)
-
- // Clearing the media type is allowed in any state (including shutdown).
- if (!mediaType) {
- qt_wmf_safeRelease(&m_mediaType);
- releaseResources();
- m_D3DPresentEngine->setSurfaceFormat(QVideoSurfaceFormat());
- return S_OK;
- }
-
- MFRatio fps = { 0, 0 };
- QList<IMFSample*> sampleQueue;
-
- IMFSample *sample = NULL;
-
- QVideoSurfaceFormat surfaceFormat;
-
- // Cannot set the media type after shutdown.
- HRESULT hr = checkShutdown();
- if (FAILED(hr))
- goto done;
-
- // Check if the new type is actually different.
- // Note: This function safely handles NULL input parameters.
- if (qt_wmf_areMediaTypesEqual(m_mediaType, mediaType))
- goto done; // Nothing more to do.
-
- // We're really changing the type. First get rid of the old type.
- qt_wmf_safeRelease(&m_mediaType);
- releaseResources();
-
- // Initialize the presenter engine with the new media type.
- // The presenter engine allocates the samples.
-
- hr = m_D3DPresentEngine->createVideoSamples(mediaType, sampleQueue);
- if (FAILED(hr))
- goto done;
-
- // Mark each sample with our token counter. If this batch of samples becomes
- // invalid, we increment the counter, so that we know they should be discarded.
- for (int i = 0; i < sampleQueue.size(); ++i) {
- sample = sampleQueue.at(i);
-
- hr = sample->SetUINT32(MFSamplePresenter_SampleCounter, m_tokenCounter);
- if (FAILED(hr))
- goto done;
- }
-
- // Add the samples to the sample pool.
- hr = m_samplePool.initialize(sampleQueue);
- if (FAILED(hr))
- goto done;
-
- // Set the frame rate on the scheduler.
- if (SUCCEEDED(qt_wmf_getFrameRate(mediaType, &fps)) && (fps.Numerator != 0) && (fps.Denominator != 0)) {
- m_scheduler.setFrameRate(fps);
- } else {
- // NOTE: The mixer's proposed type might not have a frame rate, in which case
- // we'll use an arbitrary default. (Although it's unlikely the video source
- // does not have a frame rate.)
- m_scheduler.setFrameRate(g_DefaultFrameRate);
- }
-
- // Store the media type.
- m_mediaType = mediaType;
- m_mediaType->AddRef();
-
- // Create the surface format
- UINT64 size;
- hr = m_mediaType->GetUINT64(MF_MT_FRAME_SIZE, &size);
- int width = int(HI32(size));
- int height = int(LO32(size));
- surfaceFormat = QVideoSurfaceFormat(QSize(width, height),
- pixelFormatFromMediaType(m_mediaType),
- QAbstractVideoBuffer::GLTextureHandle);
- m_D3DPresentEngine->setSurfaceFormat(surfaceFormat);
-
-done:
- if (FAILED(hr))
- releaseResources();
- return hr;
-}
-
-HRESULT EVRCustomPresenter::isMediaTypeSupported(IMFMediaType *proposed)
-{
- D3DFORMAT d3dFormat = D3DFMT_UNKNOWN;
- BOOL compressed = FALSE;
- MFVideoInterlaceMode interlaceMode = MFVideoInterlace_Unknown;
- MFVideoArea videoCropArea;
- UINT32 width = 0, height = 0;
-
- // Validate the format.
- HRESULT hr = qt_wmf_getFourCC(proposed, (DWORD*)&d3dFormat);
- if (FAILED(hr))
- return hr;
-
- // Only accept pixel formats supported by the video surface
- if (!m_supportedGLFormats.contains((DWORD)d3dFormat))
- return MF_E_INVALIDMEDIATYPE;
-
- // Reject compressed media types.
- hr = proposed->IsCompressedFormat(&compressed);
- if (FAILED(hr))
- return hr;
-
- if (compressed)
- return MF_E_INVALIDMEDIATYPE;
-
- // The D3DPresentEngine checks whether the format can be used as
- // the back-buffer format for the swap chains.
- hr = m_D3DPresentEngine->checkFormat(d3dFormat);
- if (FAILED(hr))
- return hr;
-
- // Reject interlaced formats.
- hr = proposed->GetUINT32(MF_MT_INTERLACE_MODE, (UINT32*)&interlaceMode);
- if (FAILED(hr))
- return hr;
-
- if (interlaceMode != MFVideoInterlace_Progressive)
- return MF_E_INVALIDMEDIATYPE;
-
- hr = MFGetAttributeSize(proposed, MF_MT_FRAME_SIZE, &width, &height);
- if (FAILED(hr))
- return hr;
-
- // Validate the various apertures (cropping regions) against the frame size.
- // Any of these apertures may be unspecified in the media type, in which case
- // we ignore it. We just want to reject invalid apertures.
-
- if (SUCCEEDED(proposed->GetBlob(MF_MT_PAN_SCAN_APERTURE, (UINT8*)&videoCropArea, sizeof(videoCropArea), NULL)))
- hr = qt_wmf_validateVideoArea(videoCropArea, width, height);
-
- if (SUCCEEDED(proposed->GetBlob(MF_MT_GEOMETRIC_APERTURE, (UINT8*)&videoCropArea, sizeof(videoCropArea), NULL)))
- hr = qt_wmf_validateVideoArea(videoCropArea, width, height);
-
- if (SUCCEEDED(proposed->GetBlob(MF_MT_MINIMUM_DISPLAY_APERTURE, (UINT8*)&videoCropArea, sizeof(videoCropArea), NULL)))
- hr = qt_wmf_validateVideoArea(videoCropArea, width, height);
-
- return hr;
-}
-
-void EVRCustomPresenter::processOutputLoop()
-{
- HRESULT hr = S_OK;
-
- // Process as many samples as possible.
- while (hr == S_OK) {
- // If the mixer doesn't have a new input sample, break from the loop.
- if (!m_sampleNotify) {
- hr = MF_E_TRANSFORM_NEED_MORE_INPUT;
- break;
- }
-
- // Try to process a sample.
- hr = processOutput();
-
- // NOTE: ProcessOutput can return S_FALSE to indicate it did not
- // process a sample. If so, break out of the loop.
- }
-
- if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT) {
- // The mixer has run out of input data. Check for end-of-stream.
- checkEndOfStream();
- }
-}
-
-HRESULT EVRCustomPresenter::processOutput()
-{
- HRESULT hr = S_OK;
- DWORD status = 0;
- LONGLONG mixerStartTime = 0, mixerEndTime = 0;
- MFTIME systemTime = 0;
- BOOL repaint = m_repaint; // Temporarily store this state flag.
-
- MFT_OUTPUT_DATA_BUFFER dataBuffer;
- ZeroMemory(&dataBuffer, sizeof(dataBuffer));
-
- IMFSample *sample = NULL;
-
- // If the clock is not running, we present the first sample,
- // and then don't present any more until the clock starts.
-
- if ((m_renderState != RenderStarted) && !m_repaint && m_prerolled)
- return S_FALSE;
-
- // Make sure we have a pointer to the mixer.
- if (!m_mixer)
- return MF_E_INVALIDREQUEST;
-
- // Try to get a free sample from the video sample pool.
- hr = m_samplePool.getSample(&sample);
- if (hr == MF_E_SAMPLEALLOCATOR_EMPTY) {
- // No free samples. Try again when a sample is released.
- return S_FALSE;
- } else if (FAILED(hr)) {
- return hr;
- }
-
- // From now on, we have a valid video sample pointer, where the mixer will
- // write the video data.
-
- if (m_repaint) {
- // Repaint request. Ask the mixer for the most recent sample.
- setDesiredSampleTime(sample, m_scheduler.lastSampleTime(), m_scheduler.frameDuration());
-
- m_repaint = false; // OK to clear this flag now.
- } else {
- // Not a repaint request. Clear the desired sample time; the mixer will
- // give us the next frame in the stream.
- clearDesiredSampleTime(sample);
-
- if (m_clock) {
- // Latency: Record the starting time for ProcessOutput.
- m_clock->GetCorrelatedTime(0, &mixerStartTime, &systemTime);
- }
- }
-
- // Now we are ready to get an output sample from the mixer.
- dataBuffer.dwStreamID = 0;
- dataBuffer.pSample = sample;
- dataBuffer.dwStatus = 0;
-
- hr = m_mixer->ProcessOutput(0, 1, &dataBuffer, &status);
-
- if (FAILED(hr)) {
- // Return the sample to the pool.
- HRESULT hr2 = m_samplePool.returnSample(sample);
- if (FAILED(hr2)) {
- hr = hr2;
- goto done;
- }
- // Handle some known error codes from ProcessOutput.
- if (hr == MF_E_TRANSFORM_TYPE_NOT_SET) {
- // The mixer's format is not set. Negotiate a new format.
- hr = renegotiateMediaType();
- } else if (hr == MF_E_TRANSFORM_STREAM_CHANGE) {
- // There was a dynamic media type change. Clear our media type.
- setMediaType(NULL);
- } else if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT) {
- // The mixer needs more input.
- // We have to wait for the mixer to get more input.
- m_sampleNotify = false;
- }
- } else {
- // We got an output sample from the mixer.
-
- if (m_clock && !repaint) {
- // Latency: Record the ending time for the ProcessOutput operation,
- // and notify the EVR of the latency.
-
- m_clock->GetCorrelatedTime(0, &mixerEndTime, &systemTime);
-
- LONGLONG latencyTime = mixerEndTime - mixerStartTime;
- notifyEvent(EC_PROCESSING_LATENCY, (LONG_PTR)&latencyTime, 0);
- }
-
- // Set up notification for when the sample is released.
- hr = trackSample(sample);
- if (FAILED(hr))
- goto done;
-
- // Schedule the sample.
- if ((m_frameStep.state == FrameStepNone) || repaint) {
- hr = deliverSample(sample, repaint);
- if (FAILED(hr))
- goto done;
- } else {
- // We are frame-stepping (and this is not a repaint request).
- hr = deliverFrameStepSample(sample);
- if (FAILED(hr))
- goto done;
- }
-
- m_prerolled = true; // We have presented at least one sample now.
- }
-
-done:
- qt_wmf_safeRelease(&sample);
-
- // Important: Release any events returned from the ProcessOutput method.
- qt_wmf_safeRelease(&dataBuffer.pEvents);
- return hr;
-}
-
-HRESULT EVRCustomPresenter::deliverSample(IMFSample *sample, bool repaint)
-{
- // If we are not actively playing, OR we are scrubbing (rate = 0) OR this is a
- // repaint request, then we need to present the sample immediately. Otherwise,
- // schedule it normally.
-
- bool presentNow = ((m_renderState != RenderStarted) || isScrubbing() || repaint);
-
- HRESULT hr = m_scheduler.scheduleSample(sample, presentNow);
-
- if (FAILED(hr)) {
- // Notify the EVR that we have failed during streaming. The EVR will notify the
- // pipeline.
-
- notifyEvent(EC_ERRORABORT, hr, 0);
- }
-
- return hr;
-}
-
-HRESULT EVRCustomPresenter::deliverFrameStepSample(IMFSample *sample)
-{
- HRESULT hr = S_OK;
- IUnknown *unk = NULL;
-
- // For rate 0, discard any sample that ends earlier than the clock time.
- if (isScrubbing() && m_clock && qt_wmf_isSampleTimePassed(m_clock, sample)) {
- // Discard this sample.
- } else if (m_frameStep.state >= FrameStepScheduled) {
- // A frame was already submitted. Put this sample on the frame-step queue,
- // in case we are asked to step to the next frame. If frame-stepping is
- // cancelled, this sample will be processed normally.
- sample->AddRef();
- m_frameStep.samples.append(sample);
- } else {
- // We're ready to frame-step.
-
- // Decrement the number of steps.
- if (m_frameStep.steps > 0)
- m_frameStep.steps--;
-
- if (m_frameStep.steps > 0) {
- // This is not the last step. Discard this sample.
- } else if (m_frameStep.state == FrameStepWaitingStart) {
- // This is the right frame, but the clock hasn't started yet. Put the
- // sample on the frame-step queue. When the clock starts, the sample
- // will be processed.
- sample->AddRef();
- m_frameStep.samples.append(sample);
- } else {
- // This is the right frame *and* the clock has started. Deliver this sample.
- hr = deliverSample(sample, false);
- if (FAILED(hr))
- goto done;
-
- // Query for IUnknown so that we can identify the sample later.
- // Per COM rules, an object always returns the same pointer when QI'ed for IUnknown.
- hr = sample->QueryInterface(IID_PPV_ARGS(&unk));
- if (FAILED(hr))
- goto done;
-
- m_frameStep.sampleNoRef = (DWORD_PTR)unk; // No add-ref.
-
- // NOTE: We do not AddRef the IUnknown pointer, because that would prevent the
- // sample from invoking the OnSampleFree callback after the sample is presented.
- // We use this IUnknown pointer purely to identify the sample later; we never
- // attempt to dereference the pointer.
-
- m_frameStep.state = FrameStepScheduled;
- }
- }
-done:
- qt_wmf_safeRelease(&unk);
- return hr;
-}
-
-HRESULT EVRCustomPresenter::trackSample(IMFSample *sample)
-{
- IMFTrackedSample *tracked = NULL;
-
- HRESULT hr = sample->QueryInterface(IID_PPV_ARGS(&tracked));
-
- if (SUCCEEDED(hr))
- hr = tracked->SetAllocator(&m_sampleFreeCB, NULL);
-
- qt_wmf_safeRelease(&tracked);
- return hr;
-}
-
-void EVRCustomPresenter::releaseResources()
-{
- // Increment the token counter to indicate that all existing video samples
- // are "stale." As these samples get released, we'll dispose of them.
- //
- // Note: The token counter is required because the samples are shared
- // between more than one thread, and they are returned to the presenter
- // through an asynchronous callback (onSampleFree). Without the token, we
- // might accidentally re-use a stale sample after the ReleaseResources
- // method returns.
-
- m_tokenCounter++;
-
- flush();
-
- m_samplePool.clear();
-
- m_D3DPresentEngine->releaseResources();
-}
-
-HRESULT EVRCustomPresenter::onSampleFree(IMFAsyncResult *result)
-{
- IUnknown *object = NULL;
- IMFSample *sample = NULL;
- IUnknown *unk = NULL;
-
- // Get the sample from the async result object.
- HRESULT hr = result->GetObject(&object);
- if (FAILED(hr))
- goto done;
-
- hr = object->QueryInterface(IID_PPV_ARGS(&sample));
- if (FAILED(hr))
- goto done;
-
- // If this sample was submitted for a frame-step, the frame step operation
- // is complete.
-
- if (m_frameStep.state == FrameStepScheduled) {
- // Query the sample for IUnknown and compare it to our cached value.
- hr = sample->QueryInterface(IID_PPV_ARGS(&unk));
- if (FAILED(hr))
- goto done;
-
- if (m_frameStep.sampleNoRef == (DWORD_PTR)unk) {
- // Notify the EVR.
- hr = completeFrameStep(sample);
- if (FAILED(hr))
- goto done;
- }
-
- // Note: Although object is also an IUnknown pointer, it is not
- // guaranteed to be the exact pointer value returned through
- // QueryInterface. Therefore, the second QueryInterface call is
- // required.
- }
-
- m_mutex.lock();
-
- UINT32 token = MFGetAttributeUINT32(sample, MFSamplePresenter_SampleCounter, (UINT32)-1);
-
- if (token == m_tokenCounter) {
- // Return the sample to the sample pool.
- hr = m_samplePool.returnSample(sample);
- if (SUCCEEDED(hr)) {
- // A free sample is available. Process more data if possible.
- processOutputLoop();
- }
- }
-
- m_mutex.unlock();
-
-done:
- if (FAILED(hr))
- notifyEvent(EC_ERRORABORT, hr, 0);
- qt_wmf_safeRelease(&object);
- qt_wmf_safeRelease(&sample);
- qt_wmf_safeRelease(&unk);
- return hr;
-}
-
-void EVRCustomPresenter::startSurface()
-{
- if (m_D3DPresentEngine)
- m_D3DPresentEngine->start();
-}
-
-void EVRCustomPresenter::stopSurface()
-{
- if (m_D3DPresentEngine)
- m_D3DPresentEngine->stop();
-}
-
-float EVRCustomPresenter::getMaxRate(bool thin)
-{
- // Non-thinned:
- // If we have a valid frame rate and a monitor refresh rate, the maximum
- // playback rate is equal to the refresh rate. Otherwise, the maximum rate
- // is unbounded (FLT_MAX).
-
- // Thinned: The maximum rate is unbounded.
-
- float maxRate = FLT_MAX;
- MFRatio fps = { 0, 0 };
- UINT monitorRateHz = 0;
-
- if (!thin && m_mediaType) {
- qt_wmf_getFrameRate(m_mediaType, &fps);
- monitorRateHz = m_D3DPresentEngine->refreshRate();
-
- if (fps.Denominator && fps.Numerator && monitorRateHz) {
- // Max Rate = Refresh Rate / Frame Rate
- maxRate = (float)MulDiv(monitorRateHz, fps.Denominator, fps.Numerator);
- }
- }
-
- return maxRate;
-}
-
-HRESULT setDesiredSampleTime(IMFSample *sample, const LONGLONG &sampleTime, const LONGLONG &duration)
-{
- if (!sample)
- return E_POINTER;
-
- HRESULT hr = S_OK;
- IMFDesiredSample *desired = NULL;
-
- hr = sample->QueryInterface(IID_PPV_ARGS(&desired));
- if (SUCCEEDED(hr))
- desired->SetDesiredSampleTimeAndDuration(sampleTime, duration);
-
- qt_wmf_safeRelease(&desired);
- return hr;
-}
-
-HRESULT clearDesiredSampleTime(IMFSample *sample)
-{
- if (!sample)
- return E_POINTER;
-
- HRESULT hr = S_OK;
-
- IMFDesiredSample *desired = NULL;
- IUnknown *unkSwapChain = NULL;
-
- // We store some custom attributes on the sample, so we need to cache them
- // and reset them.
- //
- // This works around the fact that IMFDesiredSample::Clear() removes all of the
- // attributes from the sample.
-
- UINT32 counter = MFGetAttributeUINT32(sample, MFSamplePresenter_SampleCounter, (UINT32)-1);
-
- sample->GetUnknown(MFSamplePresenter_SampleSwapChain, IID_IUnknown, (void**)&unkSwapChain);
-
- hr = sample->QueryInterface(IID_PPV_ARGS(&desired));
- if (SUCCEEDED(hr)) {
- desired->Clear();
-
- hr = sample->SetUINT32(MFSamplePresenter_SampleCounter, counter);
- if (FAILED(hr))
- goto done;
-
- if (unkSwapChain) {
- hr = sample->SetUnknown(MFSamplePresenter_SampleSwapChain, unkSwapChain);
- if (FAILED(hr))
- goto done;
- }
- }
-
-done:
- qt_wmf_safeRelease(&unkSwapChain);
- qt_wmf_safeRelease(&desired);
- return hr;
-}
-
-HRESULT setMixerSourceRect(IMFTransform *mixer, const MFVideoNormalizedRect &sourceRect)
-{
- if (!mixer)
- return E_POINTER;
-
- IMFAttributes *attributes = NULL;
-
- HRESULT hr = mixer->GetAttributes(&attributes);
- if (SUCCEEDED(hr)) {
- hr = attributes->SetBlob(VIDEO_ZOOM_RECT, (const UINT8*)&sourceRect, sizeof(sourceRect));
- attributes->Release();
- }
- return hr;
-}
-
-DWORD getFourCCFromPixelFormat(QVideoFrame::PixelFormat pixelFormat)
-{
- DWORD fourCC = 0;
- switch (pixelFormat) {
- case QVideoFrame::Format_ARGB32:
- case QVideoFrame::Format_ARGB32_Premultiplied:
- fourCC = MFVideoFormat_ARGB32.Data1;
- break;
- case QVideoFrame::Format_RGB32:
- fourCC = MFVideoFormat_RGB32.Data1;
- break;
- case QVideoFrame::Format_RGB24:
- fourCC = MFVideoFormat_RGB24.Data1;
- break;
- case QVideoFrame::Format_RGB565:
- fourCC = MFVideoFormat_RGB565.Data1;
- break;
- case QVideoFrame::Format_RGB555:
- fourCC = MFVideoFormat_RGB555.Data1;
- break;
- case QVideoFrame::Format_AYUV444:
- case QVideoFrame::Format_AYUV444_Premultiplied:
- fourCC = MFVideoFormat_AYUV.Data1;
- break;
- case QVideoFrame::Format_YUV420P:
- fourCC = MFVideoFormat_I420.Data1;
- break;
- case QVideoFrame::Format_UYVY:
- fourCC = MFVideoFormat_UYVY.Data1;
- break;
- case QVideoFrame::Format_YV12:
- fourCC = MFVideoFormat_YV12.Data1;
- break;
- case QVideoFrame::Format_NV12:
- fourCC = MFVideoFormat_NV12.Data1;
- break;
- default:
- break;
- }
- return fourCC;
-}
-
-static QVideoFrame::PixelFormat pixelFormatFromMediaType(IMFMediaType *type)
-{
- GUID majorType;
- if (FAILED(type->GetMajorType(&majorType)))
- return QVideoFrame::Format_Invalid;
- if (majorType != MFMediaType_Video)
- return QVideoFrame::Format_Invalid;
-
- GUID subType;
- if (FAILED(type->GetGUID(MF_MT_SUBTYPE, &subType)))
- return QVideoFrame::Format_Invalid;
-
- if (subType == MFVideoFormat_RGB32)
- return QVideoFrame::Format_RGB32;
-
- return QVideoFrame::Format_Invalid;
-}
-
-
-EVRCustomPresenterActivate::EVRCustomPresenterActivate()
- : MFAbstractActivate()
- , m_presenter(0)
- , m_surface(0)
-{ }
-
-HRESULT EVRCustomPresenterActivate::ActivateObject(REFIID riid, void **ppv)
-{
- if (!ppv)
- return E_INVALIDARG;
- QMutexLocker locker(&m_mutex);
- if (!m_presenter) {
- m_presenter = new EVRCustomPresenter;
- if (m_surface)
- m_presenter->setSurface(m_surface);
- }
- return m_presenter->QueryInterface(riid, ppv);
-}
-
-HRESULT EVRCustomPresenterActivate::ShutdownObject()
-{
- // The presenter does not implement IMFShutdown so
- // this function is the same as DetachObject()
- return DetachObject();
-}
-
-HRESULT EVRCustomPresenterActivate::DetachObject()
-{
- QMutexLocker locker(&m_mutex);
- if (m_presenter) {
- m_presenter->Release();
- m_presenter = 0;
- }
- return S_OK;
-}
-
-void EVRCustomPresenterActivate::setSurface(QAbstractVideoSurface *surface)
-{
- QMutexLocker locker(&m_mutex);
- if (m_surface == surface)
- return;
-
- m_surface = surface;
-
- if (m_presenter)
- m_presenter->setSurface(surface);
-}
-
-void EVRCustomPresenterActivate::supportedFormatsChanged()
-{
- QMutexLocker locker(&m_mutex);
-
- if (m_presenter)
- m_presenter->supportedFormatsChanged();
-}
diff --git a/src/plugins/wmf/evrcustompresenter.h b/src/plugins/wmf/evrcustompresenter.h
deleted file mode 100644
index 1d24feaa5..000000000
--- a/src/plugins/wmf/evrcustompresenter.h
+++ /dev/null
@@ -1,325 +0,0 @@
-/****************************************************************************
-**
-** Copyright (C) 2015 The Qt Company Ltd.
-** Contact: http://www.qt.io/licensing/
-**
-** This file is part of the Qt Toolkit.
-**
-** $QT_BEGIN_LICENSE:LGPL21$
-** Commercial License Usage
-** Licensees holding valid commercial Qt licenses may use this file in
-** accordance with the commercial license agreement provided with the
-** Software or, alternatively, in accordance with the terms contained in
-** a written agreement between you and The Qt Company. For licensing terms
-** and conditions see http://www.qt.io/terms-conditions. For further
-** information use the contact form at http://www.qt.io/contact-us.
-**
-** GNU Lesser General Public License Usage
-** Alternatively, this file may be used under the terms of the GNU Lesser
-** General Public License version 2.1 or version 3 as published by the Free
-** Software Foundation and appearing in the file LICENSE.LGPLv21 and
-** LICENSE.LGPLv3 included in the packaging of this file. Please review the
-** following information to ensure the GNU Lesser General Public License
-** requirements will be met: https://www.gnu.org/licenses/lgpl.html and
-** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
-**
-** As a special exception, The Qt Company gives you certain additional
-** rights. These rights are described in The Qt Company LGPL Exception
-** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
-**
-** $QT_END_LICENSE$
-**
-****************************************************************************/
-
-#ifndef EVRCUSTOMPRESENTER_H
-#define EVRCUSTOMPRESENTER_H
-
-#include <QObject>
-#include <qmutex.h>
-#include <qqueue.h>
-#include <evr.h>
-#include "mfactivate.h"
-
-QT_BEGIN_NAMESPACE
-
-class D3DPresentEngine;
-class QAbstractVideoSurface;
-
-class Scheduler
-{
-public:
- enum ScheduleEvent
- {
- Terminate = WM_USER,
- Schedule = WM_USER + 1,
- Flush = WM_USER + 2
- };
-
- Scheduler();
- ~Scheduler();
-
- void setCallback(QObject *cb) {
- m_CB = cb;
- }
-
- void setFrameRate(const MFRatio &fps);
- void setClockRate(float rate) { m_playbackRate = rate; }
-
- const LONGLONG &lastSampleTime() const { return m_lastSampleTime; }
- const LONGLONG &frameDuration() const { return m_perFrameInterval; }
-
- HRESULT startScheduler(IMFClock *clock);
- HRESULT stopScheduler();
-
- HRESULT scheduleSample(IMFSample *sample, bool presentNow);
- HRESULT processSamplesInQueue(LONG *nextSleep);
- HRESULT processSample(IMFSample *sample, LONG *nextSleep);
- HRESULT flush();
-
- // ThreadProc for the scheduler thread.
- static DWORD WINAPI schedulerThreadProc(LPVOID parameter);
-
-private:
- DWORD schedulerThreadProcPrivate();
-
- QQueue<IMFSample*> m_scheduledSamples; // Samples waiting to be presented.
-
- IMFClock *m_clock; // Presentation clock. Can be NULL.
- QObject *m_CB; // Weak reference; do not delete.
-
- DWORD m_threadID;
- HANDLE m_schedulerThread;
- HANDLE m_threadReadyEvent;
- HANDLE m_flushEvent;
-
- float m_playbackRate;
- MFTIME m_perFrameInterval; // Duration of each frame.
- LONGLONG m_perFrame_1_4th; // 1/4th of the frame duration.
- MFTIME m_lastSampleTime; // Most recent sample time.
-
- QMutex m_mutex;
-};
-
-class SamplePool
-{
-public:
- SamplePool();
- ~SamplePool();
-
- HRESULT initialize(QList<IMFSample*> &samples);
- HRESULT clear();
-
- HRESULT getSample(IMFSample **sample);
- HRESULT returnSample(IMFSample *sample);
- BOOL areSamplesPending();
-
-private:
- QMutex m_mutex;
- QList<IMFSample*> m_videoSampleQueue;
- bool m_initialized;
- DWORD m_pending;
-};
-
-class EVRCustomPresenter
- : public QObject
- , public IMFVideoDeviceID
- , public IMFVideoPresenter // Inherits IMFClockStateSink
- , public IMFRateSupport
- , public IMFGetService
- , public IMFTopologyServiceLookupClient
-{
- Q_OBJECT
-
-public:
- // Defines the state of the presenter.
- enum RenderState
- {
- RenderStarted = 1,
- RenderStopped,
- RenderPaused,
- RenderShutdown // Initial state.
- };
-
- // Defines the presenter's state with respect to frame-stepping.
- enum FrameStepState
- {
- FrameStepNone, // Not frame stepping.
- FrameStepWaitingStart, // Frame stepping, but the clock is not started.
- FrameStepPending, // Clock is started. Waiting for samples.
- FrameStepScheduled, // Submitted a sample for rendering.
- FrameStepComplete // Sample was rendered.
- };
-
- EVRCustomPresenter();
- ~EVRCustomPresenter();
-
- // IUnknown methods
- STDMETHODIMP QueryInterface(REFIID riid, void ** ppv);
- STDMETHODIMP_(ULONG) AddRef();
- STDMETHODIMP_(ULONG) Release();
-
- // IMFGetService methods
- STDMETHODIMP GetService(REFGUID guidService, REFIID riid, LPVOID *ppvObject);
-
- // IMFVideoPresenter methods
- STDMETHODIMP ProcessMessage(MFVP_MESSAGE_TYPE message, ULONG_PTR param);
- STDMETHODIMP GetCurrentMediaType(IMFVideoMediaType** mediaType);
-
- // IMFClockStateSink methods
- STDMETHODIMP OnClockStart(MFTIME systemTime, LONGLONG clockStartOffset);
- STDMETHODIMP OnClockStop(MFTIME systemTime);
- STDMETHODIMP OnClockPause(MFTIME systemTime);
- STDMETHODIMP OnClockRestart(MFTIME systemTime);
- STDMETHODIMP OnClockSetRate(MFTIME systemTime, float rate);
-
- // IMFRateSupport methods
- STDMETHODIMP GetSlowestRate(MFRATE_DIRECTION direction, BOOL thin, float *rate);
- STDMETHODIMP GetFastestRate(MFRATE_DIRECTION direction, BOOL thin, float *rate);
- STDMETHODIMP IsRateSupported(BOOL thin, float rate, float *nearestSupportedRate);
-
- // IMFVideoDeviceID methods
- STDMETHODIMP GetDeviceID(IID* deviceID);
-
- // IMFTopologyServiceLookupClient methods
- STDMETHODIMP InitServicePointers(IMFTopologyServiceLookup *lookup);
- STDMETHODIMP ReleaseServicePointers();
-
- void supportedFormatsChanged();
- void setSurface(QAbstractVideoSurface *surface);
-
-private Q_SLOTS:
- void startSurface();
- void stopSurface();
-
-private:
- HRESULT checkShutdown() const
- {
- if (m_renderState == RenderShutdown)
- return MF_E_SHUTDOWN;
- else
- return S_OK;
- }
-
- // The "active" state is started or paused.
- inline bool isActive() const
- {
- return ((m_renderState == RenderStarted) || (m_renderState == RenderPaused));
- }
-
- // Scrubbing occurs when the frame rate is 0.
- inline bool isScrubbing() const { return m_playbackRate == 0.0f; }
-
- // Send an event to the EVR through its IMediaEventSink interface.
- void notifyEvent(long eventCode, LONG_PTR param1, LONG_PTR param2)
- {
- if (m_mediaEventSink)
- m_mediaEventSink->Notify(eventCode, param1, param2);
- }
-
- float getMaxRate(bool thin);
-
- // Mixer operations
- HRESULT configureMixer(IMFTransform *mixer);
-
- // Formats
- HRESULT createOptimalVideoType(IMFMediaType* proposed, IMFMediaType **optimal);
- HRESULT setMediaType(IMFMediaType *mediaType);
- HRESULT isMediaTypeSupported(IMFMediaType *mediaType);
-
- // Message handlers
- HRESULT flush();
- HRESULT renegotiateMediaType();
- HRESULT processInputNotify();
- HRESULT beginStreaming();
- HRESULT endStreaming();
- HRESULT checkEndOfStream();
-
- // Managing samples
- void processOutputLoop();
- HRESULT processOutput();
- HRESULT deliverSample(IMFSample *sample, bool repaint);
- HRESULT trackSample(IMFSample *sample);
- void releaseResources();
-
- // Frame-stepping
- HRESULT prepareFrameStep(DWORD steps);
- HRESULT startFrameStep();
- HRESULT deliverFrameStepSample(IMFSample *sample);
- HRESULT completeFrameStep(IMFSample *sample);
- HRESULT cancelFrameStep();
-
- // Callback when a video sample is released.
- HRESULT onSampleFree(IMFAsyncResult *result);
- AsyncCallback<EVRCustomPresenter> m_sampleFreeCB;
-
- // Holds information related to frame-stepping.
- struct FrameStep
- {
- FrameStep()
- : state(FrameStepNone)
- , steps(0)
- , sampleNoRef(NULL)
- {
- }
-
- FrameStepState state;
- QList<IMFSample*> samples;
- DWORD steps;
- DWORD_PTR sampleNoRef;
- };
-
- long m_refCount;
-
- RenderState m_renderState;
- FrameStep m_frameStep;
-
- QMutex m_mutex;
-
- // Samples and scheduling
- Scheduler m_scheduler; // Manages scheduling of samples.
- SamplePool m_samplePool; // Pool of allocated samples.
- DWORD m_tokenCounter; // Counter. Incremented whenever we create new samples.
-
- // Rendering state
- bool m_sampleNotify; // Did the mixer signal it has an input sample?
- bool m_repaint; // Do we need to repaint the last sample?
- bool m_prerolled; // Have we presented at least one sample?
- bool m_endStreaming; // Did we reach the end of the stream (EOS)?
-
- MFVideoNormalizedRect m_sourceRect;
- float m_playbackRate;
-
- D3DPresentEngine *m_D3DPresentEngine; // Rendering engine. (Never null if the constructor succeeds.)
-
- IMFClock *m_clock; // The EVR's clock.
- IMFTransform *m_mixer; // The EVR's mixer.
- IMediaEventSink *m_mediaEventSink; // The EVR's event-sink interface.
- IMFMediaType *m_mediaType; // Output media type
-
- QAbstractVideoSurface *m_surface;
- QList<DWORD> m_supportedGLFormats;
-};
-
-class EVRCustomPresenterActivate : public MFAbstractActivate
-{
-public:
- EVRCustomPresenterActivate();
- ~EVRCustomPresenterActivate()
- { }
-
- STDMETHODIMP ActivateObject(REFIID riid, void **ppv);
- STDMETHODIMP ShutdownObject();
- STDMETHODIMP DetachObject();
-
- void setSurface(QAbstractVideoSurface *surface);
- void supportedFormatsChanged();
-
-private:
- EVRCustomPresenter *m_presenter;
- QAbstractVideoSurface *m_surface;
- QMutex m_mutex;
-};
-
-QT_END_NAMESPACE
-
-#endif // EVRCUSTOMPRESENTER_H
diff --git a/src/plugins/wmf/evrd3dpresentengine.cpp b/src/plugins/wmf/evrd3dpresentengine.cpp
deleted file mode 100644
index d66918a99..000000000
--- a/src/plugins/wmf/evrd3dpresentengine.cpp
+++ /dev/null
@@ -1,657 +0,0 @@
-/****************************************************************************
-**
-** Copyright (C) 2015 The Qt Company Ltd.
-** Contact: http://www.qt.io/licensing/
-**
-** This file is part of the Qt Toolkit.
-**
-** $QT_BEGIN_LICENSE:LGPL21$
-** Commercial License Usage
-** Licensees holding valid commercial Qt licenses may use this file in
-** accordance with the commercial license agreement provided with the
-** Software or, alternatively, in accordance with the terms contained in
-** a written agreement between you and The Qt Company. For licensing terms
-** and conditions see http://www.qt.io/terms-conditions. For further
-** information use the contact form at http://www.qt.io/contact-us.
-**
-** GNU Lesser General Public License Usage
-** Alternatively, this file may be used under the terms of the GNU Lesser
-** General Public License version 2.1 or version 3 as published by the Free
-** Software Foundation and appearing in the file LICENSE.LGPLv21 and
-** LICENSE.LGPLv3 included in the packaging of this file. Please review the
-** following information to ensure the GNU Lesser General Public License
-** requirements will be met: https://www.gnu.org/licenses/lgpl.html and
-** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
-**
-** As a special exception, The Qt Company gives you certain additional
-** rights. These rights are described in The Qt Company LGPL Exception
-** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
-**
-** $QT_END_LICENSE$
-**
-****************************************************************************/
-
-#include "evrd3dpresentengine.h"
-
-#include "mfglobal.h"
-
-#include <qtgui/qguiapplication.h>
-#include <qpa/qplatformnativeinterface.h>
-#include <qtgui/qopenglcontext.h>
-#include <qabstractvideobuffer.h>
-#include <QAbstractVideoSurface>
-#include <qvideoframe.h>
-#include <QDebug>
-#include <qopenglcontext.h>
-#include <qopenglfunctions.h>
-#include <qwindow.h>
-
-#include <EGL/egl.h>
-#include <EGL/eglext.h>
-#include <d3d9.h>
-#include <dxva2api.h>
-#include <WinUser.h>
-#include <evr.h>
-
-QT_USE_NAMESPACE
-
-static const DWORD PRESENTER_BUFFER_COUNT = 3;
-
-class TextureVideoBuffer : public QAbstractVideoBuffer
-{
-public:
- TextureVideoBuffer(GLuint textureId)
- : QAbstractVideoBuffer(GLTextureHandle)
- , m_textureId(textureId)
- {}
-
- ~TextureVideoBuffer() {}
-
- MapMode mapMode() const { return NotMapped; }
- uchar *map(MapMode, int*, int*) { return 0; }
- void unmap() {}
-
- QVariant handle() const
- {
- return QVariant::fromValue<unsigned int>(m_textureId);
- }
-
-private:
- GLuint m_textureId;
-};
-
-EGLWrapper::EGLWrapper()
-{
-#ifndef QT_OPENGL_ES_2_ANGLE_STATIC
- // Resolve the EGL functions we use. When configured for dynamic OpenGL, no
- // component in Qt will link to libEGL.lib and libGLESv2.lib. We know
- // however that libEGL is loaded for sure, since this is an ANGLE-only path.
-
-# ifdef QT_DEBUG
- HMODULE eglHandle = GetModuleHandle(L"libEGLd.dll");
-# else
- HMODULE eglHandle = GetModuleHandle(L"libEGL.dll");
-# endif
-
- if (!eglHandle)
- qWarning("No EGL library loaded");
-
- m_eglGetProcAddress = (EglGetProcAddress) GetProcAddress(eglHandle, "eglGetProcAddress");
- m_eglCreatePbufferSurface = (EglCreatePbufferSurface) GetProcAddress(eglHandle, "eglCreatePbufferSurface");
- m_eglDestroySurface = (EglDestroySurface) GetProcAddress(eglHandle, "eglDestroySurface");
- m_eglBindTexImage = (EglBindTexImage) GetProcAddress(eglHandle, "eglBindTexImage");
- m_eglReleaseTexImage = (EglReleaseTexImage) GetProcAddress(eglHandle, "eglReleaseTexImage");
-#else
- // Static ANGLE-only build. There is no libEGL.dll in use.
-
- m_eglGetProcAddress = ::eglGetProcAddress;
- m_eglCreatePbufferSurface = ::eglCreatePbufferSurface;
- m_eglDestroySurface = ::eglDestroySurface;
- m_eglBindTexImage = ::eglBindTexImage;
- m_eglReleaseTexImage = ::eglReleaseTexImage;
-#endif
-}
-
-__eglMustCastToProperFunctionPointerType EGLWrapper::getProcAddress(const char *procname)
-{
- Q_ASSERT(m_eglGetProcAddress);
- return m_eglGetProcAddress(procname);
-}
-
-EGLSurface EGLWrapper::createPbufferSurface(EGLDisplay dpy, EGLConfig config, const EGLint *attrib_list)
-{
- Q_ASSERT(m_eglCreatePbufferSurface);
- return m_eglCreatePbufferSurface(dpy, config, attrib_list);
-}
-
-EGLBoolean EGLWrapper::destroySurface(EGLDisplay dpy, EGLSurface surface)
-{
- Q_ASSERT(m_eglDestroySurface);
- return m_eglDestroySurface(dpy, surface);
-}
-
-EGLBoolean EGLWrapper::bindTexImage(EGLDisplay dpy, EGLSurface surface, EGLint buffer)
-{
- Q_ASSERT(m_eglBindTexImage);
- return m_eglBindTexImage(dpy, surface, buffer);
-}
-
-EGLBoolean EGLWrapper::releaseTexImage(EGLDisplay dpy, EGLSurface surface, EGLint buffer)
-{
- Q_ASSERT(m_eglReleaseTexImage);
- return m_eglReleaseTexImage(dpy, surface, buffer);
-}
-
-D3DPresentEngine::D3DPresentEngine()
- : QObject()
- , m_mutex(QMutex::Recursive)
- , m_deviceResetToken(0)
- , m_D3D9(0)
- , m_device(0)
- , m_deviceManager(0)
- , m_surface(0)
- , m_glContext(0)
- , m_offscreenSurface(0)
- , m_eglDisplay(0)
- , m_eglConfig(0)
- , m_eglSurface(0)
- , m_glTexture(0)
- , m_texture(0)
- , m_egl(0)
-{
- ZeroMemory(&m_displayMode, sizeof(m_displayMode));
-
- HRESULT hr = initializeD3D();
-
- if (SUCCEEDED(hr)) {
- hr = createD3DDevice();
- if (FAILED(hr))
- qWarning("Failed to create D3D device");
- } else {
- qWarning("Failed to initialize D3D");
- }
-}
-
-D3DPresentEngine::~D3DPresentEngine()
-{
- qt_wmf_safeRelease(&m_texture);
- qt_wmf_safeRelease(&m_device);
- qt_wmf_safeRelease(&m_deviceManager);
- qt_wmf_safeRelease(&m_D3D9);
-
- if (m_eglSurface) {
- m_egl->releaseTexImage(m_eglDisplay, m_eglSurface, EGL_BACK_BUFFER);
- m_egl->destroySurface(m_eglDisplay, m_eglSurface);
- m_eglSurface = NULL;
- }
- if (m_glTexture) {
- if (QOpenGLContext *current = QOpenGLContext::currentContext())
- current->functions()->glDeleteTextures(1, &m_glTexture);
- else
- qWarning() << "D3DPresentEngine: Cannot obtain GL context, unable to delete textures";
- }
-
- delete m_glContext;
- delete m_offscreenSurface;
- delete m_egl;
-}
-
-void D3DPresentEngine::start()
-{
- QMutexLocker locker(&m_mutex);
-
- if (!m_surfaceFormat.isValid())
- return;
-
- if (!m_texture)
- createOffscreenTexture();
-
- if (m_surface && !m_surface->isActive())
- m_surface->start(m_surfaceFormat);
-}
-
-void D3DPresentEngine::stop()
-{
- QMutexLocker locker(&m_mutex);
- if (m_surface && m_surface->isActive())
- m_surface->stop();
-}
-
-HRESULT D3DPresentEngine::getService(REFGUID, REFIID riid, void** ppv)
-{
- HRESULT hr = S_OK;
-
- if (riid == __uuidof(IDirect3DDeviceManager9)) {
- if (m_deviceManager == NULL) {
- hr = MF_E_UNSUPPORTED_SERVICE;
- } else {
- *ppv = m_deviceManager;
- m_deviceManager->AddRef();
- }
- } else {
- hr = MF_E_UNSUPPORTED_SERVICE;
- }
-
- return hr;
-}
-
-HRESULT D3DPresentEngine::checkFormat(D3DFORMAT format)
-{
- HRESULT hr = S_OK;
-
- UINT uAdapter = D3DADAPTER_DEFAULT;
- D3DDEVTYPE type = D3DDEVTYPE_HAL;
-
- D3DDISPLAYMODE mode;
- D3DDEVICE_CREATION_PARAMETERS params;
-
- // Our shared D3D/EGL surface only supports RGB32,
- // reject all other formats
- if (format != D3DFMT_X8R8G8B8)
- return MF_E_INVALIDMEDIATYPE;
-
- if (m_device) {
- hr = m_device->GetCreationParameters(&params);
- if (FAILED(hr))
- return hr;
-
- uAdapter = params.AdapterOrdinal;
- type = params.DeviceType;
- }
-
- hr = m_D3D9->GetAdapterDisplayMode(uAdapter, &mode);
- if (FAILED(hr))
- return hr;
-
- return m_D3D9->CheckDeviceType(uAdapter, type, mode.Format, format, TRUE);
-}
-
-HRESULT D3DPresentEngine::createVideoSamples(IMFMediaType *format, QList<IMFSample*> &videoSampleQueue)
-{
- if (!format)
- return MF_E_UNEXPECTED;
-
- HRESULT hr = S_OK;
- D3DPRESENT_PARAMETERS pp;
-
- IDirect3DSwapChain9 *swapChain = NULL;
- IMFSample *videoSample = NULL;
-
- QMutexLocker locker(&m_mutex);
-
- releaseResources();
-
- // Get the swap chain parameters from the media type.
- hr = getSwapChainPresentParameters(format, &pp);
- if (FAILED(hr))
- goto done;
-
- // Create the video samples.
- for (int i = 0; i < PRESENTER_BUFFER_COUNT; i++) {
- // Create a new swap chain.
- hr = m_device->CreateAdditionalSwapChain(&pp, &swapChain);
- if (FAILED(hr))
- goto done;
-
- // Create the video sample from the swap chain.
- hr = createD3DSample(swapChain, &videoSample);
- if (FAILED(hr))
- goto done;
-
- // Add it to the list.
- videoSample->AddRef();
- videoSampleQueue.append(videoSample);
-
- // Set the swap chain pointer as a custom attribute on the sample. This keeps
- // a reference count on the swap chain, so that the swap chain is kept alive
- // for the duration of the sample's lifetime.
- hr = videoSample->SetUnknown(MFSamplePresenter_SampleSwapChain, swapChain);
- if (FAILED(hr))
- goto done;
-
- qt_wmf_safeRelease(&videoSample);
- qt_wmf_safeRelease(&swapChain);
- }
-
-done:
- if (FAILED(hr))
- releaseResources();
-
- qt_wmf_safeRelease(&swapChain);
- qt_wmf_safeRelease(&videoSample);
- return hr;
-}
-
-void D3DPresentEngine::releaseResources()
-{
-}
-
-void D3DPresentEngine::presentSample(void *opaque, qint64)
-{
- HRESULT hr = S_OK;
-
- IMFSample *sample = reinterpret_cast<IMFSample*>(opaque);
- IMFMediaBuffer* buffer = NULL;
- IDirect3DSurface9* surface = NULL;
-
- if (m_surface && m_surface->isActive()) {
- if (sample) {
- // Get the buffer from the sample.
- hr = sample->GetBufferByIndex(0, &buffer);
- if (FAILED(hr))
- goto done;
-
- // Get the surface from the buffer.
- hr = MFGetService(buffer, MR_BUFFER_SERVICE, IID_PPV_ARGS(&surface));
- if (FAILED(hr))
- goto done;
- }
-
- if (surface && updateTexture(surface)) {
- QVideoFrame frame = QVideoFrame(new TextureVideoBuffer(m_glTexture),
- m_surfaceFormat.frameSize(),
- m_surfaceFormat.pixelFormat());
-
- // WMF uses 100-nanosecond units, Qt uses microseconds
- LONGLONG startTime = -1;
- if (SUCCEEDED(sample->GetSampleTime(&startTime))) {
- frame.setStartTime(startTime * 0.1);
-
- LONGLONG duration = -1;
- if (SUCCEEDED(sample->GetSampleDuration(&duration)))
- frame.setEndTime((startTime + duration) * 0.1);
- }
-
- m_surface->present(frame);
- }
- }
-
-done:
- qt_wmf_safeRelease(&surface);
- qt_wmf_safeRelease(&buffer);
- qt_wmf_safeRelease(&sample);
-}
-
-void D3DPresentEngine::setSurface(QAbstractVideoSurface *surface)
-{
- QMutexLocker locker(&m_mutex);
- m_surface = surface;
-}
-
-void D3DPresentEngine::setSurfaceFormat(const QVideoSurfaceFormat &format)
-{
- QMutexLocker locker(&m_mutex);
- m_surfaceFormat = format;
-}
-
-void D3DPresentEngine::createOffscreenTexture()
-{
- // First, check if we have a context on this thread
- QOpenGLContext *currentContext = QOpenGLContext::currentContext();
-
- if (!currentContext) {
- //Create OpenGL context and set share context from surface
- QOpenGLContext *shareContext = qobject_cast<QOpenGLContext*>(m_surface->property("GLContext").value<QObject*>());
- if (!shareContext)
- return;
-
- m_offscreenSurface = new QWindow;
- m_offscreenSurface->setSurfaceType(QWindow::OpenGLSurface);
- //Needs geometry to be a valid surface, but size is not important
- m_offscreenSurface->setGeometry(-1, -1, 1, 1);
- m_offscreenSurface->create();
-
- m_glContext = new QOpenGLContext;
- m_glContext->setFormat(m_offscreenSurface->requestedFormat());
- m_glContext->setShareContext(shareContext);
-
- if (!m_glContext->create()) {
- delete m_glContext;
- delete m_offscreenSurface;
- m_glContext = 0;
- m_offscreenSurface = 0;
- return;
- }
-
- currentContext = m_glContext;
- }
-
- if (m_glContext)
- m_glContext->makeCurrent(m_offscreenSurface);
-
- if (!m_egl)
- m_egl = new EGLWrapper;
-
- QPlatformNativeInterface *nativeInterface = QGuiApplication::platformNativeInterface();
- m_eglDisplay = static_cast<EGLDisplay*>(
- nativeInterface->nativeResourceForContext("eglDisplay", currentContext));
- m_eglConfig = static_cast<EGLConfig*>(
- nativeInterface->nativeResourceForContext("eglConfig", currentContext));
-
- currentContext->functions()->glGenTextures(1, &m_glTexture);
-
- int w = m_surfaceFormat.frameWidth();
- int h = m_surfaceFormat.frameHeight();
- bool hasAlpha = currentContext->format().hasAlpha();
-
- EGLint attribs[] = {
- EGL_WIDTH, w,
- EGL_HEIGHT, h,
- EGL_TEXTURE_FORMAT, hasAlpha ? EGL_TEXTURE_RGBA : EGL_TEXTURE_RGB,
- EGL_TEXTURE_TARGET, EGL_TEXTURE_2D,
- EGL_NONE
- };
-
- EGLSurface pbuffer = m_egl->createPbufferSurface(m_eglDisplay, m_eglConfig, attribs);
-
- HANDLE share_handle = 0;
- PFNEGLQUERYSURFACEPOINTERANGLEPROC eglQuerySurfacePointerANGLE =
- reinterpret_cast<PFNEGLQUERYSURFACEPOINTERANGLEPROC>(m_egl->getProcAddress("eglQuerySurfacePointerANGLE"));
- Q_ASSERT(eglQuerySurfacePointerANGLE);
- eglQuerySurfacePointerANGLE(
- m_eglDisplay,
- pbuffer,
- EGL_D3D_TEXTURE_2D_SHARE_HANDLE_ANGLE, &share_handle);
-
-
- m_device->CreateTexture(w, h, 1,
- D3DUSAGE_RENDERTARGET,
- hasAlpha ? D3DFMT_A8R8G8B8 : D3DFMT_X8R8G8B8,
- D3DPOOL_DEFAULT,
- &m_texture,
- &share_handle);
-
- m_eglSurface = pbuffer;
-
- if (m_glContext)
- m_glContext->doneCurrent();
-}
-
-bool D3DPresentEngine::updateTexture(IDirect3DSurface9 *src)
-{
- if (!m_texture)
- return false;
-
- if (m_glContext)
- m_glContext->makeCurrent(m_offscreenSurface);
-
- QOpenGLContext::currentContext()->functions()->glBindTexture(GL_TEXTURE_2D, m_glTexture);
-
- IDirect3DSurface9 *dest = NULL;
-
- // Copy the sample surface to the shared D3D/EGL surface
- HRESULT hr = m_texture->GetSurfaceLevel(0, &dest);
- if (FAILED(hr))
- goto done;
-
- hr = m_device->StretchRect(src, NULL, dest, NULL, D3DTEXF_NONE);
- if (FAILED(hr))
- qWarning("Failed to copy D3D surface");
-
- if (hr == S_OK)
- m_egl->bindTexImage(m_eglDisplay, m_eglSurface, EGL_BACK_BUFFER);
-
-done:
- qt_wmf_safeRelease(&dest);
-
- if (m_glContext)
- m_glContext->doneCurrent();
-
- return SUCCEEDED(hr);
-}
-
-HRESULT D3DPresentEngine::initializeD3D()
-{
- HRESULT hr = Direct3DCreate9Ex(D3D_SDK_VERSION, &m_D3D9);
-
- if (SUCCEEDED(hr))
- hr = DXVA2CreateDirect3DDeviceManager9(&m_deviceResetToken, &m_deviceManager);
-
- return hr;
-}
-
-HRESULT D3DPresentEngine::createD3DDevice()
-{
- HRESULT hr = S_OK;
- HWND hwnd = NULL;
- UINT uAdapterID = D3DADAPTER_DEFAULT;
- DWORD vp = 0;
-
- D3DCAPS9 ddCaps;
- ZeroMemory(&ddCaps, sizeof(ddCaps));
-
- IDirect3DDevice9Ex* device = NULL;
-
- // Hold the lock because we might be discarding an existing device.
- QMutexLocker locker(&m_mutex);
-
- if (!m_D3D9 || !m_deviceManager)
- return MF_E_NOT_INITIALIZED;
-
- hwnd = ::GetShellWindow();
-
- // Note: The presenter creates additional swap chains to present the
- // video frames. Therefore, it does not use the device's implicit
- // swap chain, so the size of the back buffer here is 1 x 1.
-
- D3DPRESENT_PARAMETERS pp;
- ZeroMemory(&pp, sizeof(pp));
-
- pp.BackBufferWidth = 1;
- pp.BackBufferHeight = 1;
- pp.BackBufferFormat = D3DFMT_UNKNOWN;
- pp.BackBufferCount = 1;
- pp.Windowed = TRUE;
- pp.SwapEffect = D3DSWAPEFFECT_DISCARD;
- pp.BackBufferFormat = D3DFMT_UNKNOWN;
- pp.hDeviceWindow = hwnd;
- pp.Flags = D3DPRESENTFLAG_VIDEO;
- pp.PresentationInterval = D3DPRESENT_INTERVAL_DEFAULT;
-
- hr = m_D3D9->GetDeviceCaps(uAdapterID, D3DDEVTYPE_HAL, &ddCaps);
- if (FAILED(hr))
- goto done;
-
- if (ddCaps.DevCaps & D3DDEVCAPS_HWTRANSFORMANDLIGHT)
- vp = D3DCREATE_HARDWARE_VERTEXPROCESSING;
- else
- vp = D3DCREATE_SOFTWARE_VERTEXPROCESSING;
-
- hr = m_D3D9->CreateDeviceEx(
- uAdapterID,
- D3DDEVTYPE_HAL,
- pp.hDeviceWindow,
- vp | D3DCREATE_NOWINDOWCHANGES | D3DCREATE_MULTITHREADED | D3DCREATE_FPU_PRESERVE,
- &pp,
- NULL,
- &device
- );
- if (FAILED(hr))
- goto done;
-
- hr = m_D3D9->GetAdapterDisplayMode(uAdapterID, &m_displayMode);
- if (FAILED(hr))
- goto done;
-
- hr = m_deviceManager->ResetDevice(device, m_deviceResetToken);
- if (FAILED(hr))
- goto done;
-
- qt_wmf_safeRelease(&m_device);
-
- m_device = device;
- m_device->AddRef();
-
-done:
- qt_wmf_safeRelease(&device);
- return hr;
-}
-
-HRESULT D3DPresentEngine::createD3DSample(IDirect3DSwapChain9 *swapChain, IMFSample **videoSample)
-{
- D3DCOLOR clrBlack = D3DCOLOR_ARGB(0xFF, 0x00, 0x00, 0x00);
-
- IDirect3DSurface9* surface = NULL;
- IMFSample* sample = NULL;
-
- // Get the back buffer surface.
- HRESULT hr = swapChain->GetBackBuffer(0, D3DBACKBUFFER_TYPE_MONO, &surface);
- if (FAILED(hr))
- goto done;
-
- // Fill it with black.
- hr = m_device->ColorFill(surface, NULL, clrBlack);
- if (FAILED(hr))
- goto done;
-
- hr = MFCreateVideoSampleFromSurface(surface, &sample);
- if (FAILED(hr))
- goto done;
-
- *videoSample = sample;
- (*videoSample)->AddRef();
-
-done:
- qt_wmf_safeRelease(&surface);
- qt_wmf_safeRelease(&sample);
- return hr;
-}
-
-HRESULT D3DPresentEngine::getSwapChainPresentParameters(IMFMediaType *type, D3DPRESENT_PARAMETERS* pp)
-{
- ZeroMemory(pp, sizeof(D3DPRESENT_PARAMETERS));
-
- // Get some information about the video format.
-
- UINT32 width = 0, height = 0;
-
- HRESULT hr = MFGetAttributeSize(type, MF_MT_FRAME_SIZE, &width, &height);
- if (FAILED(hr))
- return hr;
-
- DWORD d3dFormat = 0;
-
- hr = qt_wmf_getFourCC(type, &d3dFormat);
- if (FAILED(hr))
- return hr;
-
- ZeroMemory(pp, sizeof(D3DPRESENT_PARAMETERS));
- pp->BackBufferWidth = width;
- pp->BackBufferHeight = height;
- pp->Windowed = TRUE;
- pp->SwapEffect = D3DSWAPEFFECT_DISCARD;
- pp->BackBufferFormat = (D3DFORMAT)d3dFormat;
- pp->hDeviceWindow = ::GetShellWindow();
- pp->Flags = D3DPRESENTFLAG_VIDEO;
- pp->PresentationInterval = D3DPRESENT_INTERVAL_DEFAULT;
-
- D3DDEVICE_CREATION_PARAMETERS params;
- hr = m_device->GetCreationParameters(&params);
- if (FAILED(hr))
- return hr;
-
- if (params.DeviceType != D3DDEVTYPE_HAL)
- pp->Flags |= D3DPRESENTFLAG_LOCKABLE_BACKBUFFER;
-
- return S_OK;
-}
diff --git a/src/plugins/wmf/evrd3dpresentengine.h b/src/plugins/wmf/evrd3dpresentengine.h
deleted file mode 100644
index 7a88ee555..000000000
--- a/src/plugins/wmf/evrd3dpresentengine.h
+++ /dev/null
@@ -1,148 +0,0 @@
-/****************************************************************************
-**
-** Copyright (C) 2015 The Qt Company Ltd.
-** Contact: http://www.qt.io/licensing/
-**
-** This file is part of the Qt Toolkit.
-**
-** $QT_BEGIN_LICENSE:LGPL21$
-** Commercial License Usage
-** Licensees holding valid commercial Qt licenses may use this file in
-** accordance with the commercial license agreement provided with the
-** Software or, alternatively, in accordance with the terms contained in
-** a written agreement between you and The Qt Company. For licensing terms
-** and conditions see http://www.qt.io/terms-conditions. For further
-** information use the contact form at http://www.qt.io/contact-us.
-**
-** GNU Lesser General Public License Usage
-** Alternatively, this file may be used under the terms of the GNU Lesser
-** General Public License version 2.1 or version 3 as published by the Free
-** Software Foundation and appearing in the file LICENSE.LGPLv21 and
-** LICENSE.LGPLv3 included in the packaging of this file. Please review the
-** following information to ensure the GNU Lesser General Public License
-** requirements will be met: https://www.gnu.org/licenses/lgpl.html and
-** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
-**
-** As a special exception, The Qt Company gives you certain additional
-** rights. These rights are described in The Qt Company LGPL Exception
-** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
-**
-** $QT_END_LICENSE$
-**
-****************************************************************************/
-
-#ifndef EVRD3DPRESENTENGINE_H
-#define EVRD3DPRESENTENGINE_H
-
-#include <QObject>
-#include <EGL/egl.h>
-#include <QMutex>
-#include <d3d9types.h>
-#include <QVideoSurfaceFormat>
-
-struct IDirect3D9Ex;
-struct IDirect3DDevice9;
-struct IDirect3DDevice9Ex;
-struct IDirect3DDeviceManager9;
-struct IDirect3DSurface9;
-struct IDirect3DTexture9;
-struct IMFSample;
-struct IMFMediaType;
-struct IDirect3DSwapChain9;
-
-// Randomly generated GUIDs
-static const GUID MFSamplePresenter_SampleCounter =
-{ 0xb0bb83cc, 0xf10f, 0x4e2e, { 0xaa, 0x2b, 0x29, 0xea, 0x5e, 0x92, 0xef, 0x85 } };
-
-static const GUID MFSamplePresenter_SampleSwapChain =
-{ 0xad885bd1, 0x7def, 0x414a, { 0xb5, 0xb0, 0xd3, 0xd2, 0x63, 0xd6, 0xe9, 0x6d } };
-
-QT_BEGIN_NAMESPACE
-
-class QAbstractVideoSurface;
-class QOpenGLContext;
-
-class EGLWrapper
-{
-public:
- EGLWrapper();
-
- __eglMustCastToProperFunctionPointerType getProcAddress(const char *procname);
- EGLSurface createPbufferSurface(EGLDisplay dpy, EGLConfig config, const EGLint *attrib_list);
- EGLBoolean destroySurface(EGLDisplay dpy, EGLSurface surface);
- EGLBoolean bindTexImage(EGLDisplay dpy, EGLSurface surface, EGLint buffer);
- EGLBoolean releaseTexImage(EGLDisplay dpy, EGLSurface surface, EGLint buffer);
-
-private:
- typedef __eglMustCastToProperFunctionPointerType (EGLAPIENTRYP EglGetProcAddress)(const char *procname);
- typedef EGLSurface (EGLAPIENTRYP EglCreatePbufferSurface)(EGLDisplay dpy, EGLConfig config, const EGLint *attrib_list);
- typedef EGLBoolean (EGLAPIENTRYP EglDestroySurface)(EGLDisplay dpy, EGLSurface surface);
- typedef EGLBoolean (EGLAPIENTRYP EglBindTexImage)(EGLDisplay dpy, EGLSurface surface, EGLint buffer);
- typedef EGLBoolean (EGLAPIENTRYP EglReleaseTexImage)(EGLDisplay dpy, EGLSurface surface, EGLint buffer);
-
- EglGetProcAddress m_eglGetProcAddress;
- EglCreatePbufferSurface m_eglCreatePbufferSurface;
- EglDestroySurface m_eglDestroySurface;
- EglBindTexImage m_eglBindTexImage;
- EglReleaseTexImage m_eglReleaseTexImage;
-};
-
-class D3DPresentEngine : public QObject
-{
- Q_OBJECT
-public:
- D3DPresentEngine();
- virtual ~D3DPresentEngine();
-
- void start();
- void stop();
-
- HRESULT getService(REFGUID guidService, REFIID riid, void** ppv);
- HRESULT checkFormat(D3DFORMAT format);
-
- HRESULT createVideoSamples(IMFMediaType *format, QList<IMFSample*>& videoSampleQueue);
- void releaseResources();
-
- UINT refreshRate() const { return m_displayMode.RefreshRate; }
-
- void setSurface(QAbstractVideoSurface *surface);
- void setSurfaceFormat(const QVideoSurfaceFormat &format);
-
- void createOffscreenTexture();
- bool updateTexture(IDirect3DSurface9 *src);
-
-public Q_SLOTS:
- void presentSample(void* sample, qint64 llTarget);
-
-private:
- HRESULT initializeD3D();
- HRESULT getSwapChainPresentParameters(IMFMediaType *type, D3DPRESENT_PARAMETERS *pp);
- HRESULT createD3DDevice();
- HRESULT createD3DSample(IDirect3DSwapChain9 *swapChain, IMFSample **videoSample);
-
- QMutex m_mutex;
-
- UINT m_deviceResetToken;
- D3DDISPLAYMODE m_displayMode;
-
- IDirect3D9Ex *m_D3D9;
- IDirect3DDevice9Ex *m_device;
- IDirect3DDeviceManager9 *m_deviceManager;
-
- QVideoSurfaceFormat m_surfaceFormat;
- QAbstractVideoSurface *m_surface;
-
- QOpenGLContext *m_glContext;
- QWindow *m_offscreenSurface;
-
- EGLDisplay *m_eglDisplay;
- EGLConfig *m_eglConfig;
- EGLSurface m_eglSurface;
- unsigned int m_glTexture;
- IDirect3DTexture9 *m_texture;
- EGLWrapper *m_egl;
-};
-
-QT_END_NAMESPACE
-
-#endif // EVRD3DPRESENTENGINE_H
diff --git a/src/plugins/wmf/mfactivate.h b/src/plugins/wmf/mfactivate.h
index 878e30d4d..8b8e51b56 100644
--- a/src/plugins/wmf/mfactivate.h
+++ b/src/plugins/wmf/mfactivate.h
@@ -34,8 +34,6 @@
#ifndef MFACTIVATE_H
#define MFACTIVATE_H
-#include "mfglobal.h"
-
#include <mfidl.h>
class MFAbstractActivate : public IMFActivate
diff --git a/src/plugins/wmf/mfglobal.cpp b/src/plugins/wmf/mfglobal.cpp
deleted file mode 100644
index 55f2882db..000000000
--- a/src/plugins/wmf/mfglobal.cpp
+++ /dev/null
@@ -1,116 +0,0 @@
-/****************************************************************************
-**
-** Copyright (C) 2015 The Qt Company Ltd.
-** Contact: http://www.qt.io/licensing/
-**
-** This file is part of the Qt Toolkit.
-**
-** $QT_BEGIN_LICENSE:LGPL21$
-** Commercial License Usage
-** Licensees holding valid commercial Qt licenses may use this file in
-** accordance with the commercial license agreement provided with the
-** Software or, alternatively, in accordance with the terms contained in
-** a written agreement between you and The Qt Company. For licensing terms
-** and conditions see http://www.qt.io/terms-conditions. For further
-** information use the contact form at http://www.qt.io/contact-us.
-**
-** GNU Lesser General Public License Usage
-** Alternatively, this file may be used under the terms of the GNU Lesser
-** General Public License version 2.1 or version 3 as published by the Free
-** Software Foundation and appearing in the file LICENSE.LGPLv21 and
-** LICENSE.LGPLv3 included in the packaging of this file. Please review the
-** following information to ensure the GNU Lesser General Public License
-** requirements will be met: https://www.gnu.org/licenses/lgpl.html and
-** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
-**
-** As a special exception, The Qt Company gives you certain additional
-** rights. These rights are described in The Qt Company LGPL Exception
-** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
-**
-** $QT_END_LICENSE$
-**
-****************************************************************************/
-
-#include "mfglobal.h"
-
-HRESULT qt_wmf_getFourCC(IMFMediaType *type, DWORD *fourCC)
-{
- if (!fourCC)
- return E_POINTER;
-
- HRESULT hr = S_OK;
- GUID guidSubType = GUID_NULL;
-
- if (SUCCEEDED(hr))
- hr = type->GetGUID(MF_MT_SUBTYPE, &guidSubType);
-
- if (SUCCEEDED(hr))
- *fourCC = guidSubType.Data1;
-
- return hr;
-}
-
-MFRatio qt_wmf_getPixelAspectRatio(IMFMediaType *type)
-{
- MFRatio ratio = { 0, 0 };
- HRESULT hr = S_OK;
-
- hr = MFGetAttributeRatio(type, MF_MT_PIXEL_ASPECT_RATIO, (UINT32*)&ratio.Numerator, (UINT32*)&ratio.Denominator);
- if (FAILED(hr)) {
- ratio.Numerator = 1;
- ratio.Denominator = 1;
- }
- return ratio;
-}
-
-bool qt_wmf_areMediaTypesEqual(IMFMediaType *type1, IMFMediaType *type2)
-{
- if (!type1 && !type2)
- return true;
- else if (!type1 || !type2)
- return false;
-
- DWORD dwFlags = 0;
- HRESULT hr = type1->IsEqual(type2, &dwFlags);
-
- return (hr == S_OK);
-}
-
-HRESULT qt_wmf_validateVideoArea(const MFVideoArea& area, UINT32 width, UINT32 height)
-{
- float fOffsetX = qt_wmf_MFOffsetToFloat(area.OffsetX);
- float fOffsetY = qt_wmf_MFOffsetToFloat(area.OffsetY);
-
- if ( ((LONG)fOffsetX + area.Area.cx > (LONG)width) ||
- ((LONG)fOffsetY + area.Area.cy > (LONG)height) )
- return MF_E_INVALIDMEDIATYPE;
- else
- return S_OK;
-}
-
-bool qt_wmf_isSampleTimePassed(IMFClock *clock, IMFSample *sample)
-{
- if (!sample || !clock)
- return false;
-
- HRESULT hr = S_OK;
- MFTIME hnsTimeNow = 0;
- MFTIME hnsSystemTime = 0;
- MFTIME hnsSampleStart = 0;
- MFTIME hnsSampleDuration = 0;
-
- hr = clock->GetCorrelatedTime(0, &hnsTimeNow, &hnsSystemTime);
-
- if (SUCCEEDED(hr))
- hr = sample->GetSampleTime(&hnsSampleStart);
-
- if (SUCCEEDED(hr))
- hr = sample->GetSampleDuration(&hnsSampleDuration);
-
- if (SUCCEEDED(hr)) {
- if (hnsSampleStart + hnsSampleDuration < hnsTimeNow)
- return true;
- }
-
- return false;
-}
diff --git a/src/plugins/wmf/mfglobal.h b/src/plugins/wmf/mfglobal.h
deleted file mode 100644
index 073f959f7..000000000
--- a/src/plugins/wmf/mfglobal.h
+++ /dev/null
@@ -1,149 +0,0 @@
-/****************************************************************************
-**
-** Copyright (C) 2015 The Qt Company Ltd.
-** Contact: http://www.qt.io/licensing/
-**
-** This file is part of the Qt Toolkit.
-**
-** $QT_BEGIN_LICENSE:LGPL21$
-** Commercial License Usage
-** Licensees holding valid commercial Qt licenses may use this file in
-** accordance with the commercial license agreement provided with the
-** Software or, alternatively, in accordance with the terms contained in
-** a written agreement between you and The Qt Company. For licensing terms
-** and conditions see http://www.qt.io/terms-conditions. For further
-** information use the contact form at http://www.qt.io/contact-us.
-**
-** GNU Lesser General Public License Usage
-** Alternatively, this file may be used under the terms of the GNU Lesser
-** General Public License version 2.1 or version 3 as published by the Free
-** Software Foundation and appearing in the file LICENSE.LGPLv21 and
-** LICENSE.LGPLv3 included in the packaging of this file. Please review the
-** following information to ensure the GNU Lesser General Public License
-** requirements will be met: https://www.gnu.org/licenses/lgpl.html and
-** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
-**
-** As a special exception, The Qt Company gives you certain additional
-** rights. These rights are described in The Qt Company LGPL Exception
-** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
-**
-** $QT_END_LICENSE$
-**
-****************************************************************************/
-
-#ifndef MFGLOBAL_H
-#define MFGLOBAL_H
-
-#include <mfapi.h>
-#include <mfidl.h>
-#include <Mferror.h>
-
-
-template<class T>
-class AsyncCallback : public IMFAsyncCallback
-{
-public:
- typedef HRESULT (T::*InvokeFn)(IMFAsyncResult *asyncResult);
-
- AsyncCallback(T *parent, InvokeFn fn) : m_parent(parent), m_invokeFn(fn)
- {
- }
-
- // IUnknown
- STDMETHODIMP QueryInterface(REFIID iid, void** ppv)
- {
- if (!ppv)
- return E_POINTER;
-
- if (iid == __uuidof(IUnknown)) {
- *ppv = static_cast<IUnknown*>(static_cast<IMFAsyncCallback*>(this));
- } else if (iid == __uuidof(IMFAsyncCallback)) {
- *ppv = static_cast<IMFAsyncCallback*>(this);
- } else {
- *ppv = NULL;
- return E_NOINTERFACE;
- }
- AddRef();
- return S_OK;
- }
-
- STDMETHODIMP_(ULONG) AddRef() {
- // Delegate to parent class.
- return m_parent->AddRef();
- }
- STDMETHODIMP_(ULONG) Release() {
- // Delegate to parent class.
- return m_parent->Release();
- }
-
-
- // IMFAsyncCallback methods
- STDMETHODIMP GetParameters(DWORD*, DWORD*)
- {
- // Implementation of this method is optional.
- return E_NOTIMPL;
- }
-
- STDMETHODIMP Invoke(IMFAsyncResult* asyncResult)
- {
- return (m_parent->*m_invokeFn)(asyncResult);
- }
-
- T *m_parent;
- InvokeFn m_invokeFn;
-};
-
-template <class T> void qt_wmf_safeRelease(T **ppT)
-{
- if (*ppT) {
- (*ppT)->Release();
- *ppT = NULL;
- }
-}
-
-template <class T>
-void qt_wmf_copyComPointer(T* &dest, T *src)
-{
- if (dest)
- dest->Release();
- dest = src;
- if (dest)
- dest->AddRef();
-}
-
-HRESULT qt_wmf_getFourCC(IMFMediaType *type, DWORD *fourCC);
-MFRatio qt_wmf_getPixelAspectRatio(IMFMediaType *type);
-bool qt_wmf_areMediaTypesEqual(IMFMediaType *type1, IMFMediaType *type2);
-HRESULT qt_wmf_validateVideoArea(const MFVideoArea& area, UINT32 width, UINT32 height);
-bool qt_wmf_isSampleTimePassed(IMFClock *clock, IMFSample *sample);
-
-inline float qt_wmf_MFOffsetToFloat(const MFOffset& offset)
-{
- return offset.value + (float(offset.fract) / 65536);
-}
-
-inline MFOffset qt_wmf_makeMFOffset(float v)
-{
- MFOffset offset;
- offset.value = short(v);
- offset.fract = WORD(65536 * (v-offset.value));
- return offset;
-}
-
-inline MFVideoArea qt_wmf_makeMFArea(float x, float y, DWORD width, DWORD height)
-{
- MFVideoArea area;
- area.OffsetX = qt_wmf_makeMFOffset(x);
- area.OffsetY = qt_wmf_makeMFOffset(y);
- area.Area.cx = width;
- area.Area.cy = height;
- return area;
-}
-
-inline HRESULT qt_wmf_getFrameRate(IMFMediaType *pType, MFRatio *pRatio)
-{
- return MFGetAttributeRatio(pType, MF_MT_FRAME_RATE, (UINT32*)&pRatio->Numerator, (UINT32*)&pRatio->Denominator);
-}
-
-
-#endif // MFGLOBAL_H
diff --git a/src/plugins/wmf/mftvideo.cpp b/src/plugins/wmf/mftvideo.cpp
index 6faa8604c..b7a416213 100644
--- a/src/plugins/wmf/mftvideo.cpp
+++ b/src/plugins/wmf/mftvideo.cpp
@@ -52,6 +52,7 @@ MFTransform::MFTransform():
m_inputType(0),
m_outputType(0),
m_sample(0),
+ m_videoSinkTypeHandler(0),
m_bytesPerLine(0)
{
}
@@ -64,8 +65,8 @@ MFTransform::~MFTransform()
if (m_outputType)
m_outputType->Release();
- for (int i = 0; i < m_mediaTypes.size(); ++i)
- m_mediaTypes[i]->Release();
+ if (m_videoSinkTypeHandler)
+ m_videoSinkTypeHandler->Release();
}
void MFTransform::addProbe(MFVideoProbeControl *probe)
@@ -84,12 +85,18 @@ void MFTransform::removeProbe(MFVideoProbeControl *probe)
m_videoProbes.removeOne(probe);
}
-void MFTransform::addSupportedMediaType(IMFMediaType *type)
+void MFTransform::setVideoSink(IUnknown *videoSink)
{
- if (!type)
- return;
- QMutexLocker locker(&m_mutex);
- m_mediaTypes.append(type);
+ // This transform supports the same input types as the video sink.
+ // Store its type handler interface in order to report the correct supported types.
+
+ if (m_videoSinkTypeHandler) {
+ m_videoSinkTypeHandler->Release();
+ m_videoSinkTypeHandler = NULL;
+ }
+
+ if (videoSink)
+ videoSink->QueryInterface(IID_PPV_ARGS(&m_videoSinkTypeHandler));
}
STDMETHODIMP MFTransform::QueryInterface(REFIID riid, void** ppv)
@@ -165,9 +172,12 @@ STDMETHODIMP MFTransform::GetInputStreamInfo(DWORD dwInputStreamID, MFT_INPUT_ST
pStreamInfo->cbSize = 0;
pStreamInfo->hnsMaxLatency = 0;
- pStreamInfo->dwFlags = MFT_INPUT_STREAM_WHOLE_SAMPLES | MFT_INPUT_STREAM_SINGLE_SAMPLE_PER_BUFFER;
pStreamInfo->cbMaxLookahead = 0;
pStreamInfo->cbAlignment = 0;
+ pStreamInfo->dwFlags = MFT_INPUT_STREAM_WHOLE_SAMPLES
+ | MFT_INPUT_STREAM_SINGLE_SAMPLE_PER_BUFFER
+ | MFT_INPUT_STREAM_PROCESSES_IN_PLACE;
+
return S_OK;
}
@@ -182,8 +192,11 @@ STDMETHODIMP MFTransform::GetOutputStreamInfo(DWORD dwOutputStreamID, MFT_OUTPUT
return E_POINTER;
pStreamInfo->cbSize = 0;
- pStreamInfo->dwFlags = MFT_OUTPUT_STREAM_WHOLE_SAMPLES | MFT_OUTPUT_STREAM_SINGLE_SAMPLE_PER_BUFFER;
pStreamInfo->cbAlignment = 0;
+ pStreamInfo->dwFlags = MFT_OUTPUT_STREAM_WHOLE_SAMPLES
+ | MFT_OUTPUT_STREAM_SINGLE_SAMPLE_PER_BUFFER
+ | MFT_OUTPUT_STREAM_PROVIDES_SAMPLES
+ | MFT_OUTPUT_STREAM_DISCARDABLE;
return S_OK;
}
@@ -228,20 +241,42 @@ STDMETHODIMP MFTransform::AddInputStreams(DWORD cStreams, DWORD *adwStreamIDs)
STDMETHODIMP MFTransform::GetInputAvailableType(DWORD dwInputStreamID, DWORD dwTypeIndex, IMFMediaType **ppType)
{
- // This MFT does not have a list of preferred input types
- Q_UNUSED(dwInputStreamID);
- Q_UNUSED(dwTypeIndex);
- Q_UNUSED(ppType);
- return E_NOTIMPL;
+ // We support the same input types as the video sink
+ if (!m_videoSinkTypeHandler)
+ return E_NOTIMPL;
+
+ if (dwInputStreamID > 0)
+ return MF_E_INVALIDSTREAMNUMBER;
+
+ if (!ppType)
+ return E_POINTER;
+
+ return m_videoSinkTypeHandler->GetMediaTypeByIndex(dwTypeIndex, ppType);
}
-STDMETHODIMP MFTransform::GetOutputAvailableType(DWORD dwOutputStreamID,DWORD dwTypeIndex, IMFMediaType **ppType)
+STDMETHODIMP MFTransform::GetOutputAvailableType(DWORD dwOutputStreamID, DWORD dwTypeIndex, IMFMediaType **ppType)
{
- // This MFT does not have a list of preferred output types
- Q_UNUSED(dwOutputStreamID);
- Q_UNUSED(dwTypeIndex);
- Q_UNUSED(ppType);
- return E_NOTIMPL;
+ // Since we don't modify the samples, the output type must be the same as the input type.
+ // Report our input type as the only available output type.
+
+ if (dwOutputStreamID > 0)
+ return MF_E_INVALIDSTREAMNUMBER;
+
+ if (!ppType)
+ return E_POINTER;
+
+ // Input type must be set first
+ if (!m_inputType)
+ return MF_E_TRANSFORM_TYPE_NOT_SET;
+
+ if (dwTypeIndex > 0)
+ return MF_E_NO_MORE_TYPES;
+
+ // Return a copy to make sure our type is not modified
+ if (FAILED(MFCreateMediaType(ppType)))
+ return E_OUTOFMEMORY;
+
+ return m_inputType->CopyAllItems(*ppType);
}
STDMETHODIMP MFTransform::SetInputType(DWORD dwInputStreamID, IMFMediaType *pType, DWORD dwFlags)
@@ -257,17 +292,14 @@ STDMETHODIMP MFTransform::SetInputType(DWORD dwInputStreamID, IMFMediaType *pTyp
if (!isMediaTypeSupported(pType))
return MF_E_INVALIDMEDIATYPE;
- DWORD flags = 0;
- if (pType && !m_inputType && m_outputType && m_outputType->IsEqual(pType, &flags) != S_OK)
- return MF_E_INVALIDMEDIATYPE;
-
if (dwFlags == MFT_SET_TYPE_TEST_ONLY)
return pType ? S_OK : E_POINTER;
if (m_inputType) {
m_inputType->Release();
// Input type has changed, discard output type (if it's set) so it's reset later on
- if (m_outputType && m_outputType->IsEqual(pType, &flags) != S_OK) {
+ DWORD flags = 0;
+ if (m_outputType && m_outputType->IsEqual(pType, &flags) != S_OK) {
m_outputType->Release();
m_outputType = 0;
}
@@ -286,29 +318,27 @@ STDMETHODIMP MFTransform::SetOutputType(DWORD dwOutputStreamID, IMFMediaType *pT
if (dwOutputStreamID > 0)
return MF_E_INVALIDSTREAMNUMBER;
+ if (dwFlags == MFT_SET_TYPE_TEST_ONLY && !pType)
+ return E_POINTER;
+
QMutexLocker locker(&m_mutex);
+ // Input type must be set first
+ if (!m_inputType)
+ return MF_E_TRANSFORM_TYPE_NOT_SET;
+
if (m_sample)
return MF_E_TRANSFORM_CANNOT_CHANGE_MEDIATYPE_WHILE_PROCESSING;
- if (!isMediaTypeSupported(pType))
- return MF_E_INVALIDMEDIATYPE;
-
DWORD flags = 0;
- if (pType && !m_outputType && m_inputType && m_inputType->IsEqual(pType, &flags) != S_OK)
+ if (pType && m_inputType->IsEqual(pType, &flags) != S_OK)
return MF_E_INVALIDMEDIATYPE;
if (dwFlags == MFT_SET_TYPE_TEST_ONLY)
return pType ? S_OK : E_POINTER;
- if (m_outputType) {
+ if (m_outputType)
m_outputType->Release();
- // Output type has changed, discard input type (if it's set) so it's reset later on
- if (m_inputType && m_inputType->IsEqual(pType, &flags) != S_OK) {
- m_inputType->Release();
- m_inputType = 0;
- }
- }
m_outputType = pType;
@@ -333,10 +363,11 @@ STDMETHODIMP MFTransform::GetInputCurrentType(DWORD dwInputStreamID, IMFMediaTyp
if (!m_inputType)
return MF_E_TRANSFORM_TYPE_NOT_SET;
- *ppType = m_inputType;
- (*ppType)->AddRef();
+ // Return a copy to make sure our type is not modified
+ if (FAILED(MFCreateMediaType(ppType)))
+ return E_OUTOFMEMORY;
- return S_OK;
+ return m_inputType->CopyAllItems(*ppType);
}
STDMETHODIMP MFTransform::GetOutputCurrentType(DWORD dwOutputStreamID, IMFMediaType **ppType)
@@ -349,19 +380,14 @@ STDMETHODIMP MFTransform::GetOutputCurrentType(DWORD dwOutputStreamID, IMFMediaT
QMutexLocker locker(&m_mutex);
- if (!m_outputType) {
- if (m_inputType) {
- *ppType = m_inputType;
- (*ppType)->AddRef();
- return S_OK;
- }
+ if (!m_outputType)
return MF_E_TRANSFORM_TYPE_NOT_SET;
- }
- *ppType = m_outputType;
- (*ppType)->AddRef();
+ // Return a copy to make sure our type is not modified
+ if (FAILED(MFCreateMediaType(ppType)))
+ return E_OUTOFMEMORY;
- return S_OK;
+ return m_outputType->CopyAllItems(*ppType);
}
STDMETHODIMP MFTransform::GetInputStatus(DWORD dwInputStreamID, DWORD *pdwFlags)
@@ -374,7 +400,7 @@ STDMETHODIMP MFTransform::GetInputStatus(DWORD dwInputStreamID, DWORD *pdwFlags)
QMutexLocker locker(&m_mutex);
- if (!m_inputType)
+ if (!m_inputType || !m_outputType)
return MF_E_TRANSFORM_TYPE_NOT_SET;
if (m_sample)
@@ -392,7 +418,7 @@ STDMETHODIMP MFTransform::GetOutputStatus(DWORD *pdwFlags)
QMutexLocker locker(&m_mutex);
- if (!m_outputType)
+ if (!m_inputType || !m_outputType)
return MF_E_TRANSFORM_TYPE_NOT_SET;
if (m_sample)
@@ -464,7 +490,7 @@ STDMETHODIMP MFTransform::ProcessInput(DWORD dwInputStreamID, IMFSample *pSample
QMutexLocker locker(&m_mutex);
- if (!m_inputType || !m_outputType)
+ if (!m_inputType)
return MF_E_TRANSFORM_TYPE_NOT_SET;
if (m_sample)
@@ -499,9 +525,6 @@ STDMETHODIMP MFTransform::ProcessInput(DWORD dwInputStreamID, IMFSample *pSample
STDMETHODIMP MFTransform::ProcessOutput(DWORD dwFlags, DWORD cOutputBufferCount, MFT_OUTPUT_DATA_BUFFER *pOutputSamples, DWORD *pdwStatus)
{
- if (dwFlags != 0)
- return E_INVALIDARG;
-
if (pOutputSamples == NULL || pdwStatus == NULL)
return E_POINTER;
@@ -510,57 +533,44 @@ STDMETHODIMP MFTransform::ProcessOutput(DWORD dwFlags, DWORD cOutputBufferCount,
QMutexLocker locker(&m_mutex);
- if (!m_sample)
- return MF_E_TRANSFORM_NEED_MORE_INPUT;
+ if (!m_inputType)
+ return MF_E_TRANSFORM_TYPE_NOT_SET;
+
+ if (!m_outputType) {
+ pOutputSamples[0].dwStatus = MFT_OUTPUT_DATA_BUFFER_FORMAT_CHANGE;
+ return MF_E_TRANSFORM_STREAM_CHANGE;
+ }
IMFMediaBuffer *input = NULL;
IMFMediaBuffer *output = NULL;
- DWORD sampleLength = 0;
- m_sample->GetTotalLength(&sampleLength);
-
- // If the sample length is null, it means we're getting DXVA buffers.
- // In that case just pass on the sample we got as input.
- // Otherwise we need to copy the input buffer into the buffer the sink
- // is giving us.
- if (pOutputSamples[0].pSample && sampleLength > 0) {
-
- if (FAILED(m_sample->ConvertToContiguousBuffer(&input)))
- goto done;
-
- if (FAILED(pOutputSamples[0].pSample->ConvertToContiguousBuffer(&output)))
- goto done;
-
- DWORD inputLength = 0;
- DWORD outputLength = 0;
- input->GetMaxLength(&inputLength);
- output->GetMaxLength(&outputLength);
+ if (dwFlags == MFT_PROCESS_OUTPUT_DISCARD_WHEN_NO_BUFFER)
+ goto done;
+ else if (dwFlags != 0)
+ return E_INVALIDARG;
- if (outputLength < inputLength) {
- pOutputSamples[0].pSample->RemoveAllBuffers();
- output->Release();
- output = NULL;
- if (SUCCEEDED(MFCreateMemoryBuffer(inputLength, &output)))
- pOutputSamples[0].pSample->AddBuffer(output);
- }
+ if (!m_sample)
+ return MF_E_TRANSFORM_NEED_MORE_INPUT;
- if (output)
- m_sample->CopyToBuffer(output);
+ // Since the MFT_OUTPUT_STREAM_PROVIDES_SAMPLES flag is set, the client
+ // should not be providing samples here
+ if (pOutputSamples[0].pSample != NULL)
+ return E_INVALIDARG;
- LONGLONG hnsDuration = 0;
- LONGLONG hnsTime = 0;
- if (SUCCEEDED(m_sample->GetSampleDuration(&hnsDuration)))
- pOutputSamples[0].pSample->SetSampleDuration(hnsDuration);
- if (SUCCEEDED(m_sample->GetSampleTime(&hnsTime)))
- pOutputSamples[0].pSample->SetSampleTime(hnsTime);
+ pOutputSamples[0].pSample = m_sample;
+ pOutputSamples[0].pSample->AddRef();
+ // Send video frame to probes
+ // We do it here (instead of inside ProcessInput) to make sure samples discarded by the renderer
+ // are not sent.
+ m_videoProbeMutex.lock();
+ if (!m_videoProbes.isEmpty()) {
+ QVideoFrame frame = makeVideoFrame();
- } else {
- if (pOutputSamples[0].pSample)
- pOutputSamples[0].pSample->Release();
- pOutputSamples[0].pSample = m_sample;
- pOutputSamples[0].pSample->AddRef();
+ foreach (MFVideoProbeControl* probe, m_videoProbes)
+ probe->bufferProbed(frame);
}
+ m_videoProbeMutex.unlock();
done:
pOutputSamples[0].dwStatus = 0;
@@ -728,16 +738,10 @@ QByteArray MFTransform::dataFromBuffer(IMFMediaBuffer *buffer, int height, int *
bool MFTransform::isMediaTypeSupported(IMFMediaType *type)
{
- // if the list is empty, it supports all formats
- if (!type || m_mediaTypes.isEmpty())
+ // If we don't have the video sink's type handler,
+ // assume it supports anything...
+ if (!m_videoSinkTypeHandler || !type)
return true;
- for (int i = 0; i < m_mediaTypes.size(); ++i) {
- DWORD flags = 0;
- m_mediaTypes.at(i)->IsEqual(type, &flags);
- if (flags & MF_MEDIATYPE_EQUAL_FORMAT_TYPES)
- return true;
- }
-
- return false;
+ return m_videoSinkTypeHandler->IsMediaTypeSupported(type, NULL) == S_OK;
}
diff --git a/src/plugins/wmf/mftvideo.h b/src/plugins/wmf/mftvideo.h
index 1a188c4db..c37c8f700 100644
--- a/src/plugins/wmf/mftvideo.h
+++ b/src/plugins/wmf/mftvideo.h
@@ -53,7 +53,7 @@ public:
void addProbe(MFVideoProbeControl* probe);
void removeProbe(MFVideoProbeControl* probe);
- void addSupportedMediaType(IMFMediaType *type);
+ void setVideoSink(IUnknown *videoSink);
// IUnknown methods
STDMETHODIMP QueryInterface(REFIID iid, void** ppv);
@@ -99,7 +99,7 @@ private:
IMFSample *m_sample;
QMutex m_mutex;
- QList<IMFMediaType*> m_mediaTypes;
+ IMFMediaTypeHandler *m_videoSinkTypeHandler;
QList<MFVideoProbeControl*> m_videoProbes;
QMutex m_videoProbeMutex;
diff --git a/src/plugins/wmf/player/mfplayersession.cpp b/src/plugins/wmf/player/mfplayersession.cpp
index e4c498b76..0ac1c3d66 100644
--- a/src/plugins/wmf/player/mfplayersession.cpp
+++ b/src/plugins/wmf/player/mfplayersession.cpp
@@ -266,6 +266,25 @@ void MFPlayerSession::handleMediaSourceReady()
}
}
+MFPlayerSession::MediaType MFPlayerSession::getStreamType(IMFStreamDescriptor *stream) const
+{
+ if (!stream)
+ return Unknown;
+
+ IMFMediaTypeHandler *typeHandler = NULL;
+ if (SUCCEEDED(stream->GetMediaTypeHandler(&typeHandler))) {
+ GUID guidMajorType;
+ if (SUCCEEDED(typeHandler->GetMajorType(&guidMajorType))) {
+ if (guidMajorType == MFMediaType_Audio)
+ return Audio;
+ else if (guidMajorType == MFMediaType_Video)
+ return Video;
+ }
+ }
+
+ return Unknown;
+}
+
void MFPlayerSession::setupPlaybackTopology(IMFMediaSource *source, IMFPresentationDescriptor *sourcePD)
{
HRESULT hr = S_OK;
@@ -294,45 +313,58 @@ void MFPlayerSession::setupPlaybackTopology(IMFMediaSource *source, IMFPresentat
for (DWORD i = 0; i < cSourceStreams; i++)
{
BOOL fSelected = FALSE;
+ bool streamAdded = false;
IMFStreamDescriptor *streamDesc = NULL;
HRESULT hr = sourcePD->GetStreamDescriptorByIndex(i, &fSelected, &streamDesc);
if (SUCCEEDED(hr)) {
- MediaType mediaType = Unknown;
- IMFTopologyNode *sourceNode = addSourceNode(topology, source, sourcePD, streamDesc);
- if (sourceNode) {
- IMFTopologyNode *outputNode = addOutputNode(streamDesc, mediaType, topology, 0);
- if (outputNode) {
- bool connected = false;
- if (mediaType == Audio) {
- if (!m_audioSampleGrabberNode)
- connected = setupAudioSampleGrabber(topology, sourceNode, outputNode);
- } else if (mediaType == Video && outputNodeId == -1) {
- // Remember video output node ID.
- outputNode->GetTopoNodeID(&outputNodeId);
- }
+ // The media might have multiple audio and video streams,
+ // only use one of each kind, and only if it is selected by default.
+ MediaType mediaType = getStreamType(streamDesc);
+ if (mediaType != Unknown
+ && ((m_mediaTypes & mediaType) == 0) // Check if this type isn't already added
+ && fSelected) {
+
+ IMFTopologyNode *sourceNode = addSourceNode(topology, source, sourcePD, streamDesc);
+ if (sourceNode) {
+ IMFTopologyNode *outputNode = addOutputNode(mediaType, topology, 0);
+ if (outputNode) {
+ bool connected = false;
+ if (mediaType == Audio) {
+ if (!m_audioSampleGrabberNode)
+ connected = setupAudioSampleGrabber(topology, sourceNode, outputNode);
+ } else if (mediaType == Video && outputNodeId == -1) {
+ // Remember video output node ID.
+ outputNode->GetTopoNodeID(&outputNodeId);
+ }
- if (!connected)
- hr = sourceNode->ConnectOutput(0, outputNode, 0);
- if (FAILED(hr)) {
- emit error(QMediaPlayer::FormatError, tr("Unable to play any stream."), false);
- }
- else {
- succeededCount++;
- m_mediaTypes |= mediaType;
- switch (mediaType) {
- case Audio:
- emit audioAvailable();
- break;
- case Video:
- emit videoAvailable();
- break;
+ if (!connected)
+ hr = sourceNode->ConnectOutput(0, outputNode, 0);
+
+ if (FAILED(hr)) {
+ emit error(QMediaPlayer::FormatError, tr("Unable to play any stream."), false);
+ } else {
+ streamAdded = true;
+ succeededCount++;
+ m_mediaTypes |= mediaType;
+ switch (mediaType) {
+ case Audio:
+ emit audioAvailable();
+ break;
+ case Video:
+ emit videoAvailable();
+ break;
+ }
}
+ outputNode->Release();
}
- outputNode->Release();
+ sourceNode->Release();
}
- sourceNode->Release();
}
+
+ if (fSelected && !streamAdded)
+ sourcePD->DeselectStream(i);
+
streamDesc->Release();
}
}
@@ -377,56 +409,38 @@ IMFTopologyNode* MFPlayerSession::addSourceNode(IMFTopology* topology, IMFMediaS
return NULL;
}
-IMFTopologyNode* MFPlayerSession::addOutputNode(IMFStreamDescriptor *streamDesc, MediaType& mediaType, IMFTopology* topology, DWORD sinkID)
+IMFTopologyNode* MFPlayerSession::addOutputNode(MediaType mediaType, IMFTopology* topology, DWORD sinkID)
{
IMFTopologyNode *node = NULL;
- HRESULT hr = MFCreateTopologyNode(MF_TOPOLOGY_OUTPUT_NODE, &node);
- if (FAILED(hr))
+ if (FAILED(MFCreateTopologyNode(MF_TOPOLOGY_OUTPUT_NODE, &node)))
return NULL;
- node->SetUINT32(MF_TOPONODE_NOSHUTDOWN_ON_REMOVE, FALSE);
- mediaType = Unknown;
- IMFMediaTypeHandler *handler = NULL;
- hr = streamDesc->GetMediaTypeHandler(&handler);
- if (SUCCEEDED(hr)) {
- GUID guidMajorType;
- hr = handler->GetMajorType(&guidMajorType);
- if (SUCCEEDED(hr)) {
- IMFActivate *activate = NULL;
- if (MFMediaType_Audio == guidMajorType) {
- mediaType = Audio;
- activate = m_playerService->audioEndpointControl()->createActivate();
- } else if (MFMediaType_Video == guidMajorType) {
- mediaType = Video;
- if (m_playerService->videoRendererControl()) {
- activate = m_playerService->videoRendererControl()->createActivate();
- } else if (m_playerService->videoWindowControl()) {
- activate = m_playerService->videoWindowControl()->createActivate();
- } else {
- qWarning() << "no videoWindowControl or videoRendererControl, unable to add output node for video data";
- }
- } else {
- // Unknown stream type.
- emit error(QMediaPlayer::FormatError, tr("Unknown stream type."), false);
- }
-
- if (activate) {
- hr = node->SetObject(activate);
- if (SUCCEEDED(hr)) {
- hr = node->SetUINT32(MF_TOPONODE_STREAMID, sinkID);
- if (SUCCEEDED(hr)) {
- if (SUCCEEDED(topology->AddNode(node))) {
- handler->Release();
- return node;
- }
- }
- }
- }
+ IMFActivate *activate = NULL;
+ if (mediaType == Audio) {
+ activate = m_playerService->audioEndpointControl()->createActivate();
+ } else if (mediaType == Video) {
+ if (m_playerService->videoRendererControl()) {
+ activate = m_playerService->videoRendererControl()->createActivate();
+ } else if (m_playerService->videoWindowControl()) {
+ activate = m_playerService->videoWindowControl()->createActivate();
+ } else {
+ qWarning() << "no videoWindowControl or videoRendererControl, unable to add output node for video data";
}
- handler->Release();
+ } else {
+ // Unknown stream type.
+ emit error(QMediaPlayer::FormatError, tr("Unknown stream type."), false);
}
- node->Release();
- return NULL;
+
+ if (!activate
+ || FAILED(node->SetObject(activate))
+ || FAILED(node->SetUINT32(MF_TOPONODE_STREAMID, sinkID))
+ || FAILED(node->SetUINT32(MF_TOPONODE_NOSHUTDOWN_ON_REMOVE, FALSE))
+ || FAILED(topology->AddNode(node))) {
+ node->Release();
+ node = NULL;
+ }
+
+ return node;
}
bool MFPlayerSession::addAudioSampleGrabberNode(IMFTopology *topology)
@@ -692,7 +706,6 @@ IMFTopology *MFPlayerSession::insertMFT(IMFTopology *topology, TOPOID outputNode
IUnknown *element = 0;
IMFTopologyNode *node = 0;
IUnknown *outputObject = 0;
- IMFMediaTypeHandler *videoSink = 0;
IMFTopologyNode *inputNode = 0;
IMFTopologyNode *mftNode = 0;
bool mftAdded = false;
@@ -711,22 +724,10 @@ IMFTopology *MFPlayerSession::insertMFT(IMFTopology *topology, TOPOID outputNode
if (id != outputNodeId)
break;
- // Use output supported media types for the MFT
if (FAILED(node->GetObject(&outputObject)))
break;
- if (FAILED(outputObject->QueryInterface(IID_IMFMediaTypeHandler, (void**)&videoSink)))
- break;
-
- DWORD mtCount;
- if (FAILED(videoSink->GetMediaTypeCount(&mtCount)))
- break;
-
- for (DWORD i = 0; i < mtCount; ++i) {
- IMFMediaType *type = 0;
- if (SUCCEEDED(videoSink->GetMediaTypeByIndex(i, &type)))
- m_videoProbeMFT->addSupportedMediaType(type);
- }
+ m_videoProbeMFT->setVideoSink(outputObject);
// Insert MFT between the output node and the node connected to it.
DWORD outputIndex = 0;
@@ -760,13 +761,13 @@ IMFTopology *MFPlayerSession::insertMFT(IMFTopology *topology, TOPOID outputNode
node->Release();
if (element)
element->Release();
- if (videoSink)
- videoSink->Release();
if (outputObject)
outputObject->Release();
if (mftAdded)
break;
+ else
+ m_videoProbeMFT->setVideoSink(NULL);
}
} while (false);
diff --git a/src/plugins/wmf/player/mfplayersession.h b/src/plugins/wmf/player/mfplayersession.h
index 1d136ba55..5bbf8e212 100644
--- a/src/plugins/wmf/player/mfplayersession.h
+++ b/src/plugins/wmf/player/mfplayersession.h
@@ -215,9 +215,10 @@ private:
void createSession();
void setupPlaybackTopology(IMFMediaSource *source, IMFPresentationDescriptor *sourcePD);
+ MediaType getStreamType(IMFStreamDescriptor *stream) const;
IMFTopologyNode* addSourceNode(IMFTopology* topology, IMFMediaSource* source,
IMFPresentationDescriptor* presentationDesc, IMFStreamDescriptor *streamDesc);
- IMFTopologyNode* addOutputNode(IMFStreamDescriptor *streamDesc, MediaType& mediaType, IMFTopology* topology, DWORD sinkID);
+ IMFTopologyNode* addOutputNode(MediaType mediaType, IMFTopology* topology, DWORD sinkID);
bool addAudioSampleGrabberNode(IMFTopology* topology);
bool setupAudioSampleGrabber(IMFTopology *topology, IMFTopologyNode *sourceNode, IMFTopologyNode *outputNode);
diff --git a/src/plugins/wmf/player/mfvideorenderercontrol.cpp b/src/plugins/wmf/player/mfvideorenderercontrol.cpp
index 683dd4b71..222d74ef2 100644
--- a/src/plugins/wmf/player/mfvideorenderercontrol.cpp
+++ b/src/plugins/wmf/player/mfvideorenderercontrol.cpp
@@ -32,15 +32,9 @@
****************************************************************************/
#include "mfvideorenderercontrol.h"
-#include "mfglobal.h"
+#include "mfactivate.h"
-#if defined(QT_OPENGL_ES_2) || defined(QT_OPENGL_DYNAMIC)
-#define MAYBE_ANGLE
-#endif
-
-#ifdef MAYBE_ANGLE
#include "evrcustompresenter.h"
-#endif
#include <qabstractvideosurface.h>
#include <qvideosurfaceformat.h>
@@ -2226,6 +2220,27 @@ namespace
};
}
+
+class EVRCustomPresenterActivate : public MFAbstractActivate
+{
+public:
+ EVRCustomPresenterActivate();
+ ~EVRCustomPresenterActivate()
+ { }
+
+ STDMETHODIMP ActivateObject(REFIID riid, void **ppv);
+ STDMETHODIMP ShutdownObject();
+ STDMETHODIMP DetachObject();
+
+ void setSurface(QAbstractVideoSurface *surface);
+
+private:
+ EVRCustomPresenter *m_presenter;
+ QAbstractVideoSurface *m_surface;
+ QMutex m_mutex;
+};
+
+
MFVideoRendererControl::MFVideoRendererControl(QObject *parent)
: QVideoRendererControl(parent)
, m_surface(0)
@@ -2245,13 +2260,11 @@ void MFVideoRendererControl::clear()
if (m_surface)
m_surface->stop();
-#ifdef MAYBE_ANGLE
if (m_presenterActivate) {
m_presenterActivate->ShutdownObject();
m_presenterActivate->Release();
m_presenterActivate = NULL;
}
-#endif
if (m_currentActivate) {
m_currentActivate->ShutdownObject();
@@ -2280,12 +2293,9 @@ void MFVideoRendererControl::setSurface(QAbstractVideoSurface *surface)
connect(m_surface, SIGNAL(supportedFormatsChanged()), this, SLOT(supportedFormatsChanged()));
}
-#ifdef MAYBE_ANGLE
if (m_presenterActivate)
m_presenterActivate->setSurface(m_surface);
- else
-#endif
- if (m_currentActivate)
+ else if (m_currentActivate)
static_cast<VideoRendererActivate*>(m_currentActivate)->setSurface(m_surface);
}
@@ -2323,11 +2333,9 @@ void MFVideoRendererControl::customEvent(QEvent *event)
void MFVideoRendererControl::supportedFormatsChanged()
{
-#ifdef MAYBE_ANGLE
if (m_presenterActivate)
- m_presenterActivate->supportedFormatsChanged();
- else
-#endif
+ return;
+
if (m_currentActivate)
static_cast<VideoRendererActivate*>(m_currentActivate)->supportedFormatsChanged();
}
@@ -2347,26 +2355,67 @@ IMFActivate* MFVideoRendererControl::createActivate()
clear();
-#ifdef MAYBE_ANGLE
- // We can use the EVR with our custom presenter only if the surface supports OpenGL
- // texture handles. We also require ANGLE (due to the D3D interop).
- if (!m_surface->supportedPixelFormats(QAbstractVideoBuffer::GLTextureHandle).isEmpty()
- && QMediaOpenGLHelper::isANGLE()) {
- // Create the EVR media sink, but replace the presenter with our own
- if (SUCCEEDED(MFCreateVideoRendererActivate(::GetShellWindow(), &m_currentActivate))) {
- m_presenterActivate = new EVRCustomPresenterActivate;
- m_currentActivate->SetUnknown(MF_ACTIVATE_CUSTOM_VIDEO_PRESENTER_ACTIVATE, m_presenterActivate);
- }
- }
-#endif
-
- if (!m_currentActivate)
+ // Create the EVR media sink, but replace the presenter with our own
+ if (SUCCEEDED(MFCreateVideoRendererActivate(::GetShellWindow(), &m_currentActivate))) {
+ m_presenterActivate = new EVRCustomPresenterActivate;
+ m_currentActivate->SetUnknown(MF_ACTIVATE_CUSTOM_VIDEO_PRESENTER_ACTIVATE, m_presenterActivate);
+ } else {
m_currentActivate = new VideoRendererActivate(this);
+ }
setSurface(m_surface);
return m_currentActivate;
}
+
+EVRCustomPresenterActivate::EVRCustomPresenterActivate()
+ : MFAbstractActivate()
+ , m_presenter(0)
+ , m_surface(0)
+{ }
+
+HRESULT EVRCustomPresenterActivate::ActivateObject(REFIID riid, void **ppv)
+{
+ if (!ppv)
+ return E_INVALIDARG;
+ QMutexLocker locker(&m_mutex);
+ if (!m_presenter) {
+ m_presenter = new EVRCustomPresenter;
+ if (m_surface)
+ m_presenter->setSurface(m_surface);
+ }
+ return m_presenter->QueryInterface(riid, ppv);
+}
+
+HRESULT EVRCustomPresenterActivate::ShutdownObject()
+{
+ // The presenter does not implement IMFShutdown so
+ // this function is the same as DetachObject()
+ return DetachObject();
+}
+
+HRESULT EVRCustomPresenterActivate::DetachObject()
+{
+ QMutexLocker locker(&m_mutex);
+ if (m_presenter) {
+ m_presenter->Release();
+ m_presenter = 0;
+ }
+ return S_OK;
+}
+
+void EVRCustomPresenterActivate::setSurface(QAbstractVideoSurface *surface)
+{
+ QMutexLocker locker(&m_mutex);
+ if (m_surface == surface)
+ return;
+
+ m_surface = surface;
+
+ if (m_presenter)
+ m_presenter->setSurface(surface);
+}
+
#include "moc_mfvideorenderercontrol.cpp"
#include "mfvideorenderercontrol.moc"
diff --git a/src/plugins/wmf/player/mfvideorenderercontrol.h b/src/plugins/wmf/player/mfvideorenderercontrol.h
index ca3b95d10..224fcea51 100644
--- a/src/plugins/wmf/player/mfvideorenderercontrol.h
+++ b/src/plugins/wmf/player/mfvideorenderercontrol.h
@@ -38,14 +38,10 @@
#include <mfapi.h>
#include <mfidl.h>
-QT_BEGIN_NAMESPACE
+QT_USE_NAMESPACE
class EVRCustomPresenterActivate;
-QT_END_NAMESPACE
-
-QT_USE_NAMESPACE
-
class MFVideoRendererControl : public QVideoRendererControl
{
Q_OBJECT
diff --git a/src/plugins/wmf/wmf.pro b/src/plugins/wmf/wmf.pro
index 68a777f37..1f43bb128 100644
--- a/src/plugins/wmf/wmf.pro
+++ b/src/plugins/wmf/wmf.pro
@@ -17,7 +17,6 @@ HEADERS += \
sourceresolver.h \
samplegrabber.h \
mftvideo.h \
- mfglobal.h \
mfactivate.h
SOURCES += \
@@ -26,21 +25,7 @@ SOURCES += \
sourceresolver.cpp \
samplegrabber.cpp \
mftvideo.cpp \
- mfactivate.cpp \
- mfglobal.cpp
-
-contains(QT_CONFIG, angle)|contains(QT_CONFIG, dynamicgl) {
- LIBS += -ld3d9 -ldxva2 -lwinmm -levr
- QT += gui-private
-
- HEADERS += \
- $$PWD/evrcustompresenter.h \
- $$PWD/evrd3dpresentengine.h
-
- SOURCES += \
- $$PWD/evrcustompresenter.cpp \
- $$PWD/evrd3dpresentengine.cpp
-}
+ mfactivate.cpp
include (player/player.pri)
include (decoder/decoder.pri)