diff options
Diffstat (limited to 'src/plugins/common')
-rw-r--r-- | src/plugins/common/evr.pri | 26 | ||||
-rw-r--r-- | src/plugins/common/evr/evrcustompresenter.cpp | 2024 | ||||
-rw-r--r-- | src/plugins/common/evr/evrcustompresenter.h | 358 | ||||
-rw-r--r-- | src/plugins/common/evr/evrd3dpresentengine.cpp | 655 | ||||
-rw-r--r-- | src/plugins/common/evr/evrd3dpresentengine.h | 146 | ||||
-rw-r--r-- | src/plugins/common/evr/evrdefs.cpp | 42 | ||||
-rw-r--r-- | src/plugins/common/evr/evrdefs.h | 219 | ||||
-rw-r--r-- | src/plugins/common/evr/evrhelpers.cpp | 103 | ||||
-rw-r--r-- | src/plugins/common/evr/evrhelpers.h | 85 | ||||
-rw-r--r-- | src/plugins/common/evr/evrvideowindowcontrol.cpp | 2 |
10 files changed, 3655 insertions, 5 deletions
diff --git a/src/plugins/common/evr.pri b/src/plugins/common/evr.pri index f951f6730..aa272adab 100644 --- a/src/plugins/common/evr.pri +++ b/src/plugins/common/evr.pri @@ -2,7 +2,27 @@ INCLUDEPATH += $$PWD/evr qtHaveModule(widgets): QT += widgets -HEADERS += $$PWD/evr/evrvideowindowcontrol.h \ - $$PWD/evr/evrdefs.h +HEADERS += \ + $$PWD/evr/evrvideowindowcontrol.h \ + $$PWD/evr/evrdefs.h -SOURCES += $$PWD/evr/evrvideowindowcontrol.cpp +SOURCES += \ + $$PWD/evr/evrvideowindowcontrol.cpp \ + $$PWD/evr/evrdefs.cpp + +contains(QT_CONFIG, angle)|contains(QT_CONFIG, dynamicgl) { + LIBS += -lmf -lmfplat -lmfuuid -ld3d9 -ldxva2 -lwinmm -levr + QT += gui-private + + DEFINES += CUSTOM_EVR_PRESENTER + + HEADERS += \ + $$PWD/evr/evrcustompresenter.h \ + $$PWD/evr/evrd3dpresentengine.h \ + $$PWD/evr/evrhelpers.h + + SOURCES += \ + $$PWD/evr/evrcustompresenter.cpp \ + $$PWD/evr/evrd3dpresentengine.cpp \ + $$PWD/evr/evrhelpers.cpp +} diff --git a/src/plugins/common/evr/evrcustompresenter.cpp b/src/plugins/common/evr/evrcustompresenter.cpp new file mode 100644 index 000000000..969dd1415 --- /dev/null +++ b/src/plugins/common/evr/evrcustompresenter.cpp @@ -0,0 +1,2024 @@ +/**************************************************************************** +** +** Copyright (C) 2015 The Qt Company Ltd. +** Contact: http://www.qt.io/licensing/ +** +** This file is part of the Qt Toolkit. +** +** $QT_BEGIN_LICENSE:LGPL21$ +** Commercial License Usage +** Licensees holding valid commercial Qt licenses may use this file in +** accordance with the commercial license agreement provided with the +** Software or, alternatively, in accordance with the terms contained in +** a written agreement between you and The Qt Company. For licensing terms +** and conditions see http://www.qt.io/terms-conditions. For further +** information use the contact form at http://www.qt.io/contact-us. +** +** GNU Lesser General Public License Usage +** Alternatively, this file may be used under the terms of the GNU Lesser +** General Public License version 2.1 or version 3 as published by the Free +** Software Foundation and appearing in the file LICENSE.LGPLv21 and +** LICENSE.LGPLv3 included in the packaging of this file. Please review the +** following information to ensure the GNU Lesser General Public License +** requirements will be met: https://www.gnu.org/licenses/lgpl.html and +** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html. +** +** As a special exception, The Qt Company gives you certain additional +** rights. These rights are described in The Qt Company LGPL Exception +** version 1.1, included in the file LGPL_EXCEPTION.txt in this package. +** +** $QT_END_LICENSE$ +** +****************************************************************************/ + +#include "evrcustompresenter.h" + +#include "evrd3dpresentengine.h" +#include "evrhelpers.h" + +#include <QtCore/qmutex.h> +#include <QtCore/qvarlengtharray.h> +#include <QtCore/qrect.h> +#include <qabstractvideosurface.h> +#include <qthread.h> +#include <qcoreapplication.h> +#include <qmath.h> +#include <QtCore/qdebug.h> +#include <float.h> +#include <evcode.h> + +const static MFRatio g_DefaultFrameRate = { 30, 1 }; +static const DWORD SCHEDULER_TIMEOUT = 5000; +static const MFTIME ONE_SECOND = 10000000; +static const LONG ONE_MSEC = 1000; + +// Function declarations. +static HRESULT setDesiredSampleTime(IMFSample *sample, const LONGLONG& hnsSampleTime, const LONGLONG& hnsDuration); +static HRESULT clearDesiredSampleTime(IMFSample *sample); +static HRESULT setMixerSourceRect(IMFTransform *mixer, const MFVideoNormalizedRect& nrcSource); +static DWORD getFourCCFromPixelFormat(QVideoFrame::PixelFormat pixelFormat); +static QVideoFrame::PixelFormat pixelFormatFromMediaType(IMFMediaType *type); + +static inline LONG MFTimeToMsec(const LONGLONG& time) +{ + return (LONG)(time / (ONE_SECOND / ONE_MSEC)); +} + +bool qt_evr_setCustomPresenter(IUnknown *evr, EVRCustomPresenter *presenter) +{ + if (!evr || !presenter) + return false; + + HRESULT result = E_FAIL; + + IMFVideoRenderer *renderer = NULL; + if (SUCCEEDED(evr->QueryInterface(IID_PPV_ARGS(&renderer)))) { + result = renderer->InitializeRenderer(NULL, presenter); + renderer->Release(); + } + + return result == S_OK; +} + +Scheduler::Scheduler() + : m_clock(NULL) + , m_CB(NULL) + , m_threadID(0) + , m_schedulerThread(0) + , m_threadReadyEvent(0) + , m_flushEvent(0) + , m_playbackRate(1.0f) + , m_perFrameInterval(0) + , m_perFrame_1_4th(0) + , m_lastSampleTime(0) +{ +} + +Scheduler::~Scheduler() +{ + qt_evr_safe_release(&m_clock); + for (int i = 0; i < m_scheduledSamples.size(); ++i) + m_scheduledSamples[i]->Release(); + m_scheduledSamples.clear(); +} + +void Scheduler::setFrameRate(const MFRatio& fps) +{ + UINT64 AvgTimePerFrame = 0; + + // Convert to a duration. + MFFrameRateToAverageTimePerFrame(fps.Numerator, fps.Denominator, &AvgTimePerFrame); + + m_perFrameInterval = (MFTIME)AvgTimePerFrame; + + // Calculate 1/4th of this value, because we use it frequently. + m_perFrame_1_4th = m_perFrameInterval / 4; +} + +HRESULT Scheduler::startScheduler(IMFClock *clock) +{ + if (m_schedulerThread) + return E_UNEXPECTED; + + HRESULT hr = S_OK; + DWORD dwID = 0; + HANDLE hObjects[2]; + DWORD dwWait = 0; + + if (m_clock) + m_clock->Release(); + m_clock = clock; + if (m_clock) + m_clock->AddRef(); + + // Set a high the timer resolution (ie, short timer period). + timeBeginPeriod(1); + + // Create an event to wait for the thread to start. + m_threadReadyEvent = CreateEvent(NULL, FALSE, FALSE, NULL); + if (!m_threadReadyEvent) { + hr = HRESULT_FROM_WIN32(GetLastError()); + goto done; + } + + // Create an event to wait for flush commands to complete. + m_flushEvent = CreateEvent(NULL, FALSE, FALSE, NULL); + if (!m_flushEvent) { + hr = HRESULT_FROM_WIN32(GetLastError()); + goto done; + } + + // Create the scheduler thread. + m_schedulerThread = CreateThread(NULL, 0, schedulerThreadProc, (LPVOID)this, 0, &dwID); + if (!m_schedulerThread) { + hr = HRESULT_FROM_WIN32(GetLastError()); + goto done; + } + + // Wait for the thread to signal the "thread ready" event. + hObjects[0] = m_threadReadyEvent; + hObjects[1] = m_schedulerThread; + dwWait = WaitForMultipleObjects(2, hObjects, FALSE, INFINITE); // Wait for EITHER of these handles. + if (WAIT_OBJECT_0 != dwWait) { + // The thread terminated early for some reason. This is an error condition. + CloseHandle(m_schedulerThread); + m_schedulerThread = NULL; + + hr = E_UNEXPECTED; + goto done; + } + + m_threadID = dwID; + +done: + // Regardless success/failure, we are done using the "thread ready" event. + if (m_threadReadyEvent) { + CloseHandle(m_threadReadyEvent); + m_threadReadyEvent = NULL; + } + return hr; +} + +HRESULT Scheduler::stopScheduler() +{ + if (!m_schedulerThread) + return S_OK; + + // Ask the scheduler thread to exit. + PostThreadMessage(m_threadID, Terminate, 0, 0); + + // Wait for the thread to exit. + WaitForSingleObject(m_schedulerThread, INFINITE); + + // Close handles. + CloseHandle(m_schedulerThread); + m_schedulerThread = NULL; + + CloseHandle(m_flushEvent); + m_flushEvent = NULL; + + // Discard samples. + m_mutex.lock(); + for (int i = 0; i < m_scheduledSamples.size(); ++i) + m_scheduledSamples[i]->Release(); + m_scheduledSamples.clear(); + m_mutex.unlock(); + + // Restore the timer resolution. + timeEndPeriod(1); + + return S_OK; +} + +HRESULT Scheduler::flush() +{ + if (m_schedulerThread) { + // Ask the scheduler thread to flush. + PostThreadMessage(m_threadID, Flush, 0 , 0); + + // Wait for the scheduler thread to signal the flush event, + // OR for the thread to terminate. + HANDLE objects[] = { m_flushEvent, m_schedulerThread }; + + WaitForMultipleObjects(ARRAYSIZE(objects), objects, FALSE, SCHEDULER_TIMEOUT); + } + + return S_OK; +} + +HRESULT Scheduler::scheduleSample(IMFSample *sample, bool presentNow) +{ + if (!m_CB) + return MF_E_NOT_INITIALIZED; + + if (!m_schedulerThread) + return MF_E_NOT_INITIALIZED; + + HRESULT hr = S_OK; + DWORD dwExitCode = 0; + + GetExitCodeThread(m_schedulerThread, &dwExitCode); + if (dwExitCode != STILL_ACTIVE) + return E_FAIL; + + if (presentNow || !m_clock) { + // Present the sample immediately. + sample->AddRef(); + QMetaObject::invokeMethod(m_CB, + "presentSample", + Qt::QueuedConnection, + Q_ARG(void*, sample), + Q_ARG(qint64, 0)); + } else { + // Queue the sample and ask the scheduler thread to wake up. + m_mutex.lock(); + sample->AddRef(); + m_scheduledSamples.enqueue(sample); + m_mutex.unlock(); + + if (SUCCEEDED(hr)) + PostThreadMessage(m_threadID, Schedule, 0, 0); + } + + return hr; +} + +HRESULT Scheduler::processSamplesInQueue(LONG *nextSleep) +{ + HRESULT hr = S_OK; + LONG wait = 0; + IMFSample *sample = NULL; + + // Process samples until the queue is empty or until the wait time > 0. + while (!m_scheduledSamples.isEmpty()) { + m_mutex.lock(); + sample = m_scheduledSamples.dequeue(); + m_mutex.unlock(); + + // Process the next sample in the queue. If the sample is not ready + // for presentation. the value returned in wait is > 0, which + // means the scheduler should sleep for that amount of time. + + hr = processSample(sample, &wait); + qt_evr_safe_release(&sample); + + if (FAILED(hr) || wait > 0) + break; + } + + // If the wait time is zero, it means we stopped because the queue is + // empty (or an error occurred). Set the wait time to infinite; this will + // make the scheduler thread sleep until it gets another thread message. + if (wait == 0) + wait = INFINITE; + + *nextSleep = wait; + return hr; +} + +HRESULT Scheduler::processSample(IMFSample *sample, LONG *pNextSleep) +{ + HRESULT hr = S_OK; + + LONGLONG hnsPresentationTime = 0; + LONGLONG hnsTimeNow = 0; + MFTIME hnsSystemTime = 0; + + bool presentNow = true; + LONG nextSleep = 0; + + if (m_clock) { + // Get the sample's time stamp. It is valid for a sample to + // have no time stamp. + hr = sample->GetSampleTime(&hnsPresentationTime); + + // Get the clock time. (But if the sample does not have a time stamp, + // we don't need the clock time.) + if (SUCCEEDED(hr)) + hr = m_clock->GetCorrelatedTime(0, &hnsTimeNow, &hnsSystemTime); + + // Calculate the time until the sample's presentation time. + // A negative value means the sample is late. + LONGLONG hnsDelta = hnsPresentationTime - hnsTimeNow; + if (m_playbackRate < 0) { + // For reverse playback, the clock runs backward. Therefore, the + // delta is reversed. + hnsDelta = - hnsDelta; + } + + if (hnsDelta < - m_perFrame_1_4th) { + // This sample is late. + presentNow = true; + } else if (hnsDelta > (3 * m_perFrame_1_4th)) { + // This sample is still too early. Go to sleep. + nextSleep = MFTimeToMsec(hnsDelta - (3 * m_perFrame_1_4th)); + + // Adjust the sleep time for the clock rate. (The presentation clock runs + // at m_fRate, but sleeping uses the system clock.) + if (m_playbackRate != 0) + nextSleep = (LONG)(nextSleep / qFabs(m_playbackRate)); + + // Don't present yet. + presentNow = false; + } + } + + if (presentNow) { + sample->AddRef(); + QMetaObject::invokeMethod(m_CB, + "presentSample", + Qt::QueuedConnection, + Q_ARG(void*, sample), + Q_ARG(qint64, hnsPresentationTime)); + } else { + // The sample is not ready yet. Return it to the queue. + m_mutex.lock(); + sample->AddRef(); + m_scheduledSamples.prepend(sample); + m_mutex.unlock(); + } + + *pNextSleep = nextSleep; + + return hr; +} + +DWORD WINAPI Scheduler::schedulerThreadProc(LPVOID parameter) +{ + Scheduler* scheduler = reinterpret_cast<Scheduler*>(parameter); + if (!scheduler) + return -1; + return scheduler->schedulerThreadProcPrivate(); +} + +DWORD Scheduler::schedulerThreadProcPrivate() +{ + HRESULT hr = S_OK; + MSG msg; + LONG wait = INFINITE; + bool exitThread = false; + + // Force the system to create a message queue for this thread. + // (See MSDN documentation for PostThreadMessage.) + PeekMessage(&msg, NULL, WM_USER, WM_USER, PM_NOREMOVE); + + // Signal to the scheduler that the thread is ready. + SetEvent(m_threadReadyEvent); + + while (!exitThread) { + // Wait for a thread message OR until the wait time expires. + DWORD result = MsgWaitForMultipleObjects(0, NULL, FALSE, wait, QS_POSTMESSAGE); + + if (result == WAIT_TIMEOUT) { + // If we timed out, then process the samples in the queue + hr = processSamplesInQueue(&wait); + if (FAILED(hr)) + exitThread = true; + } + + while (PeekMessage(&msg, NULL, 0, 0, PM_REMOVE)) { + bool processSamples = true; + + switch (msg.message) { + case Terminate: + exitThread = true; + break; + case Flush: + // Flushing: Clear the sample queue and set the event. + m_mutex.lock(); + for (int i = 0; i < m_scheduledSamples.size(); ++i) + m_scheduledSamples[i]->Release(); + m_scheduledSamples.clear(); + m_mutex.unlock(); + wait = INFINITE; + SetEvent(m_flushEvent); + break; + case Schedule: + // Process as many samples as we can. + if (processSamples) { + hr = processSamplesInQueue(&wait); + if (FAILED(hr)) + exitThread = true; + processSamples = (wait != (LONG)INFINITE); + } + break; + } + } + + } + + return (SUCCEEDED(hr) ? 0 : 1); +} + + +SamplePool::SamplePool() + : m_initialized(false) + , m_pending(0) +{ +} + +SamplePool::~SamplePool() +{ + clear(); +} + +HRESULT SamplePool::getSample(IMFSample **sample) +{ + QMutexLocker locker(&m_mutex); + + if (!m_initialized) + return MF_E_NOT_INITIALIZED; + + if (m_videoSampleQueue.isEmpty()) + return MF_E_SAMPLEALLOCATOR_EMPTY; + + // Get a sample from the allocated queue. + + // It doesn't matter if we pull them from the head or tail of the list, + // but when we get it back, we want to re-insert it onto the opposite end. + // (see ReturnSample) + + IMFSample *taken = m_videoSampleQueue.takeFirst(); + + m_pending++; + + // Give the sample to the caller. + *sample = taken; + (*sample)->AddRef(); + + taken->Release(); + + return S_OK; +} + +HRESULT SamplePool::returnSample(IMFSample *sample) +{ + QMutexLocker locker(&m_mutex); + + if (!m_initialized) + return MF_E_NOT_INITIALIZED; + + m_videoSampleQueue.append(sample); + sample->AddRef(); + + m_pending--; + + return S_OK; +} + +BOOL SamplePool::areSamplesPending() +{ + QMutexLocker locker(&m_mutex); + + bool ret = false; + + if (!m_initialized) + ret = false; + else + ret = (m_pending > 0); + + return ret; +} + +HRESULT SamplePool::initialize(QList<IMFSample*> &samples) +{ + QMutexLocker locker(&m_mutex); + + if (m_initialized) + return MF_E_INVALIDREQUEST; + + IMFSample *sample = NULL; + + // Move these samples into our allocated queue. + for (int i = 0; i < samples.size(); ++i) { + sample = samples.at(i); + sample->AddRef(); + m_videoSampleQueue.append(sample); + } + + m_initialized = true; + + for (int i = 0; i < samples.size(); ++i) + samples[i]->Release(); + samples.clear(); + return S_OK; +} + +HRESULT SamplePool::clear() +{ + QMutexLocker locker(&m_mutex); + + for (int i = 0; i < m_videoSampleQueue.size(); ++i) + m_videoSampleQueue[i]->Release(); + m_videoSampleQueue.clear(); + m_initialized = false; + m_pending = 0; + + return S_OK; +} + + +EVRCustomPresenter::EVRCustomPresenter() + : QObject() + , m_sampleFreeCB(this, &EVRCustomPresenter::onSampleFree) + , m_refCount(1) + , m_renderState(RenderShutdown) + , m_mutex(QMutex::Recursive) + , m_tokenCounter(0) + , m_sampleNotify(false) + , m_repaint(false) + , m_prerolled(false) + , m_endStreaming(false) + , m_playbackRate(1.0f) + , m_D3DPresentEngine(0) + , m_clock(0) + , m_mixer(0) + , m_mediaEventSink(0) + , m_mediaType(0) + , m_surface(0) +{ + // Initial source rectangle = (0,0,1,1) + m_sourceRect.top = 0; + m_sourceRect.left = 0; + m_sourceRect.bottom = 1; + m_sourceRect.right = 1; + + m_D3DPresentEngine = new D3DPresentEngine; + m_scheduler.setCallback(m_D3DPresentEngine); +} + +EVRCustomPresenter::~EVRCustomPresenter() +{ + qt_evr_safe_release(&m_clock); + qt_evr_safe_release(&m_mixer); + qt_evr_safe_release(&m_mediaEventSink); + qt_evr_safe_release(&m_mediaType); + + m_D3DPresentEngine->deleteLater(); +} + +HRESULT EVRCustomPresenter::QueryInterface(REFIID riid, void ** ppvObject) +{ + if (!ppvObject) + return E_POINTER; + if (riid == IID_IMFGetService) { + *ppvObject = static_cast<IMFGetService*>(this); + } else if (riid == IID_IMFTopologyServiceLookupClient) { + *ppvObject = static_cast<IMFTopologyServiceLookupClient*>(this); + } else if (riid == IID_IMFVideoDeviceID) { + *ppvObject = static_cast<IMFVideoDeviceID*>(this); + } else if (riid == IID_IMFVideoPresenter) { + *ppvObject = static_cast<IMFVideoPresenter*>(this); + } else if (riid == IID_IMFRateSupport) { + *ppvObject = static_cast<IMFRateSupport*>(this); + } else if (riid == IID_IUnknown) { + *ppvObject = static_cast<IUnknown*>(static_cast<IMFGetService*>(this)); + } else if (riid == IID_IMFClockStateSink) { + *ppvObject = static_cast<IMFClockStateSink*>(this); + } else { + *ppvObject = NULL; + return E_NOINTERFACE; + } + AddRef(); + return S_OK; +} + +ULONG EVRCustomPresenter::AddRef() +{ + return InterlockedIncrement(&m_refCount); +} + +ULONG EVRCustomPresenter::Release() +{ + ULONG uCount = InterlockedDecrement(&m_refCount); + if (uCount == 0) + delete this; + return uCount; +} + +HRESULT EVRCustomPresenter::GetService(REFGUID guidService, REFIID riid, LPVOID *ppvObject) +{ + HRESULT hr = S_OK; + + if (!ppvObject) + return E_POINTER; + + // The only service GUID that we support is MR_VIDEO_RENDER_SERVICE. + if (guidService != mr_VIDEO_RENDER_SERVICE) + return MF_E_UNSUPPORTED_SERVICE; + + // First try to get the service interface from the D3DPresentEngine object. + hr = m_D3DPresentEngine->getService(guidService, riid, ppvObject); + if (FAILED(hr)) + // Next, check if this object supports the interface. + hr = QueryInterface(riid, ppvObject); + + return hr; +} + +HRESULT EVRCustomPresenter::GetDeviceID(IID* deviceID) +{ + if (!deviceID) + return E_POINTER; + + *deviceID = iid_IDirect3DDevice9; + + return S_OK; +} + +HRESULT EVRCustomPresenter::InitServicePointers(IMFTopologyServiceLookup *lookup) +{ + if (!lookup) + return E_POINTER; + + HRESULT hr = S_OK; + DWORD objectCount = 0; + + QMutexLocker locker(&m_mutex); + + // Do not allow initializing when playing or paused. + if (isActive()) + return MF_E_INVALIDREQUEST; + + qt_evr_safe_release(&m_clock); + qt_evr_safe_release(&m_mixer); + qt_evr_safe_release(&m_mediaEventSink); + + // Ask for the clock. Optional, because the EVR might not have a clock. + objectCount = 1; + + lookup->LookupService(MF_SERVICE_LOOKUP_GLOBAL, 0, + mr_VIDEO_RENDER_SERVICE, IID_PPV_ARGS(&m_clock), + &objectCount + ); + + // Ask for the mixer. (Required.) + objectCount = 1; + + hr = lookup->LookupService(MF_SERVICE_LOOKUP_GLOBAL, 0, + mr_VIDEO_MIXER_SERVICE, IID_PPV_ARGS(&m_mixer), + &objectCount + ); + + if (FAILED(hr)) + return hr; + + // Make sure that we can work with this mixer. + hr = configureMixer(m_mixer); + if (FAILED(hr)) + return hr; + + // Ask for the EVR's event-sink interface. (Required.) + objectCount = 1; + + hr = lookup->LookupService(MF_SERVICE_LOOKUP_GLOBAL, 0, + mr_VIDEO_RENDER_SERVICE, IID_PPV_ARGS(&m_mediaEventSink), + &objectCount + ); + + if (SUCCEEDED(hr)) + m_renderState = RenderStopped; + + return hr; +} + +HRESULT EVRCustomPresenter::ReleaseServicePointers() +{ + // Enter the shut-down state. + m_mutex.lock(); + + m_renderState = RenderShutdown; + + m_mutex.unlock(); + + // Flush any samples that were scheduled. + flush(); + + // Clear the media type and release related resources. + setMediaType(NULL); + + // Release all services that were acquired from InitServicePointers. + qt_evr_safe_release(&m_clock); + qt_evr_safe_release(&m_mixer); + qt_evr_safe_release(&m_mediaEventSink); + + return S_OK; +} + +HRESULT EVRCustomPresenter::ProcessMessage(MFVP_MESSAGE_TYPE message, ULONG_PTR param) +{ + HRESULT hr = S_OK; + + QMutexLocker locker(&m_mutex); + + hr = checkShutdown(); + if (FAILED(hr)) + return hr; + + switch (message) { + // Flush all pending samples. + case MFVP_MESSAGE_FLUSH: + hr = flush(); + break; + + // Renegotiate the media type with the mixer. + case MFVP_MESSAGE_INVALIDATEMEDIATYPE: + hr = renegotiateMediaType(); + break; + + // The mixer received a new input sample. + case MFVP_MESSAGE_PROCESSINPUTNOTIFY: + hr = processInputNotify(); + break; + + // Streaming is about to start. + case MFVP_MESSAGE_BEGINSTREAMING: + hr = beginStreaming(); + break; + + // Streaming has ended. (The EVR has stopped.) + case MFVP_MESSAGE_ENDSTREAMING: + hr = endStreaming(); + break; + + // All input streams have ended. + case MFVP_MESSAGE_ENDOFSTREAM: + // Set the EOS flag. + m_endStreaming = true; + // Check if it's time to send the EC_COMPLETE event to the EVR. + hr = checkEndOfStream(); + break; + + // Frame-stepping is starting. + case MFVP_MESSAGE_STEP: + hr = prepareFrameStep(DWORD(param)); + break; + + // Cancels frame-stepping. + case MFVP_MESSAGE_CANCELSTEP: + hr = cancelFrameStep(); + break; + + default: + hr = E_INVALIDARG; // Unknown message. This case should never occur. + break; + } + + return hr; +} + +HRESULT EVRCustomPresenter::GetCurrentMediaType(IMFVideoMediaType **mediaType) +{ + HRESULT hr = S_OK; + + if (!mediaType) + return E_POINTER; + + *mediaType = NULL; + + QMutexLocker locker(&m_mutex); + + hr = checkShutdown(); + if (FAILED(hr)) + return hr; + + if (!m_mediaType) + return MF_E_NOT_INITIALIZED; + + return m_mediaType->QueryInterface(IID_PPV_ARGS(mediaType)); +} + +HRESULT EVRCustomPresenter::OnClockStart(MFTIME, LONGLONG clockStartOffset) +{ + QMutexLocker locker(&m_mutex); + + // We cannot start after shutdown. + HRESULT hr = checkShutdown(); + if (FAILED(hr)) + return hr; + + // Check if the clock is already active (not stopped). + if (isActive()) { + m_renderState = RenderStarted; + + // If the clock position changes while the clock is active, it + // is a seek request. We need to flush all pending samples. + if (clockStartOffset != PRESENTATION_CURRENT_POSITION) + flush(); + } else { + m_renderState = RenderStarted; + + // The clock has started from the stopped state. + + // Possibly we are in the middle of frame-stepping OR have samples waiting + // in the frame-step queue. Deal with these two cases first: + hr = startFrameStep(); + if (FAILED(hr)) + return hr; + } + + // Start the video surface in the main thread + if (thread() == QThread::currentThread()) + startSurface(); + else + QMetaObject::invokeMethod(this, "startSurface", Qt::QueuedConnection); + + // Now try to get new output samples from the mixer. + processOutputLoop(); + + return hr; +} + +HRESULT EVRCustomPresenter::OnClockRestart(MFTIME) +{ + QMutexLocker locker(&m_mutex); + + HRESULT hr = checkShutdown(); + if (FAILED(hr)) + return hr; + + // The EVR calls OnClockRestart only while paused. + + m_renderState = RenderStarted; + + // Possibly we are in the middle of frame-stepping OR we have samples waiting + // in the frame-step queue. Deal with these two cases first: + hr = startFrameStep(); + if (FAILED(hr)) + return hr; + + // Now resume the presentation loop. + processOutputLoop(); + + return hr; +} + +HRESULT EVRCustomPresenter::OnClockStop(MFTIME) +{ + QMutexLocker locker(&m_mutex); + + HRESULT hr = checkShutdown(); + if (FAILED(hr)) + return hr; + + if (m_renderState != RenderStopped) { + m_renderState = RenderStopped; + flush(); + + // If we are in the middle of frame-stepping, cancel it now. + if (m_frameStep.state != FrameStepNone) + cancelFrameStep(); + } + + // Stop the video surface in the main thread + if (thread() == QThread::currentThread()) + stopSurface(); + else + QMetaObject::invokeMethod(this, "stopSurface", Qt::QueuedConnection); + + return S_OK; +} + +HRESULT EVRCustomPresenter::OnClockPause(MFTIME) +{ + QMutexLocker locker(&m_mutex); + + // We cannot pause the clock after shutdown. + HRESULT hr = checkShutdown(); + + if (SUCCEEDED(hr)) + m_renderState = RenderPaused; + + return hr; +} + +HRESULT EVRCustomPresenter::OnClockSetRate(MFTIME, float rate) +{ + // Note: + // The presenter reports its maximum rate through the IMFRateSupport interface. + // Here, we assume that the EVR honors the maximum rate. + + QMutexLocker locker(&m_mutex); + + HRESULT hr = checkShutdown(); + if (FAILED(hr)) + return hr; + + // If the rate is changing from zero (scrubbing) to non-zero, cancel the + // frame-step operation. + if ((m_playbackRate == 0.0f) && (rate != 0.0f)) { + cancelFrameStep(); + for (int i = 0; i < m_frameStep.samples.size(); ++i) + m_frameStep.samples[i]->Release(); + m_frameStep.samples.clear(); + } + + m_playbackRate = rate; + + // Tell the scheduler about the new rate. + m_scheduler.setClockRate(rate); + + return S_OK; +} + +HRESULT EVRCustomPresenter::GetSlowestRate(MFRATE_DIRECTION, BOOL, float *rate) +{ + if (!rate) + return E_POINTER; + + QMutexLocker locker(&m_mutex); + + HRESULT hr = checkShutdown(); + + if (SUCCEEDED(hr)) { + // There is no minimum playback rate, so the minimum is zero. + *rate = 0; + } + + return S_OK; +} + +HRESULT EVRCustomPresenter::GetFastestRate(MFRATE_DIRECTION direction, BOOL thin, float *rate) +{ + if (!rate) + return E_POINTER; + + QMutexLocker locker(&m_mutex); + + float maxRate = 0.0f; + + HRESULT hr = checkShutdown(); + if (FAILED(hr)) + return hr; + + // Get the maximum *forward* rate. + maxRate = getMaxRate(thin); + + // For reverse playback, it's the negative of maxRate. + if (direction == MFRATE_REVERSE) + maxRate = -maxRate; + + *rate = maxRate; + + return S_OK; +} + +HRESULT EVRCustomPresenter::IsRateSupported(BOOL thin, float rate, float *nearestSupportedRate) +{ + QMutexLocker locker(&m_mutex); + + float maxRate = 0.0f; + float nearestRate = rate; // If we support rate, that is the nearest. + + HRESULT hr = checkShutdown(); + if (FAILED(hr)) + return hr; + + // Find the maximum forward rate. + // Note: We have no minimum rate (that is, we support anything down to 0). + maxRate = getMaxRate(thin); + + if (qFabs(rate) > maxRate) { + // The (absolute) requested rate exceeds the maximum rate. + hr = MF_E_UNSUPPORTED_RATE; + + // The nearest supported rate is maxRate. + nearestRate = maxRate; + if (rate < 0) { + // Negative for reverse playback. + nearestRate = -nearestRate; + } + } + + // Return the nearest supported rate. + if (nearestSupportedRate) + *nearestSupportedRate = nearestRate; + + return hr; +} + +void EVRCustomPresenter::supportedFormatsChanged() +{ + QMutexLocker locker(&m_mutex); + + m_supportedGLFormats.clear(); + if (!m_surface) + return; + + QList<QVideoFrame::PixelFormat> formats = m_surface->supportedPixelFormats(QAbstractVideoBuffer::GLTextureHandle); + for (int i = 0; i < formats.size(); ++i) { + DWORD fourCC = getFourCCFromPixelFormat(formats.at(i)); + if (fourCC) + m_supportedGLFormats.append(fourCC); + } +} + +void EVRCustomPresenter::setSurface(QAbstractVideoSurface *surface) +{ + m_mutex.lock(); + + if (m_surface) { + disconnect(m_surface, &QAbstractVideoSurface::supportedFormatsChanged, + this, &EVRCustomPresenter::supportedFormatsChanged); + } + + m_surface = surface; + + if (m_D3DPresentEngine) + m_D3DPresentEngine->setSurface(surface); + + if (m_surface) { + connect(m_surface, &QAbstractVideoSurface::supportedFormatsChanged, + this, &EVRCustomPresenter::supportedFormatsChanged); + } + + m_mutex.unlock(); + + supportedFormatsChanged(); +} + +HRESULT EVRCustomPresenter::configureMixer(IMFTransform *mixer) +{ + // Set the zoom rectangle (ie, the source clipping rectangle). + return setMixerSourceRect(mixer, m_sourceRect); +} + +HRESULT EVRCustomPresenter::renegotiateMediaType() +{ + HRESULT hr = S_OK; + bool foundMediaType = false; + + IMFMediaType *mixerType = NULL; + IMFMediaType *optimalType = NULL; + + if (!m_mixer) + return MF_E_INVALIDREQUEST; + + // Loop through all of the mixer's proposed output types. + DWORD typeIndex = 0; + while (!foundMediaType && (hr != MF_E_NO_MORE_TYPES)) { + qt_evr_safe_release(&mixerType); + qt_evr_safe_release(&optimalType); + + // Step 1. Get the next media type supported by mixer. + hr = m_mixer->GetOutputAvailableType(0, typeIndex++, &mixerType); + if (FAILED(hr)) + break; + + // From now on, if anything in this loop fails, try the next type, + // until we succeed or the mixer runs out of types. + + // Step 2. Check if we support this media type. + if (SUCCEEDED(hr)) + hr = isMediaTypeSupported(mixerType); + + // Step 3. Adjust the mixer's type to match our requirements. + if (SUCCEEDED(hr)) + hr = createOptimalVideoType(mixerType, &optimalType); + + // Step 4. Check if the mixer will accept this media type. + if (SUCCEEDED(hr)) + hr = m_mixer->SetOutputType(0, optimalType, MFT_SET_TYPE_TEST_ONLY); + + // Step 5. Try to set the media type on ourselves. + if (SUCCEEDED(hr)) + hr = setMediaType(optimalType); + + // Step 6. Set output media type on mixer. + if (SUCCEEDED(hr)) { + hr = m_mixer->SetOutputType(0, optimalType, 0); + + // If something went wrong, clear the media type. + if (FAILED(hr)) + setMediaType(NULL); + } + + if (SUCCEEDED(hr)) + foundMediaType = true; + } + + qt_evr_safe_release(&mixerType); + qt_evr_safe_release(&optimalType); + + return hr; +} + +HRESULT EVRCustomPresenter::flush() +{ + m_prerolled = false; + + // The scheduler might have samples that are waiting for + // their presentation time. Tell the scheduler to flush. + + // This call blocks until the scheduler threads discards all scheduled samples. + m_scheduler.flush(); + + // Flush the frame-step queue. + for (int i = 0; i < m_frameStep.samples.size(); ++i) + m_frameStep.samples[i]->Release(); + m_frameStep.samples.clear(); + + if (m_renderState == RenderStopped) { + // Repaint with black. + QMetaObject::invokeMethod(m_D3DPresentEngine, + "presentSample", + Qt::QueuedConnection, + Q_ARG(void*, 0), + Q_ARG(qint64, 0)); + } + + return S_OK; +} + +HRESULT EVRCustomPresenter::processInputNotify() +{ + HRESULT hr = S_OK; + + // Set the flag that says the mixer has a new sample. + m_sampleNotify = true; + + if (!m_mediaType) { + // We don't have a valid media type yet. + hr = MF_E_TRANSFORM_TYPE_NOT_SET; + } else { + // Try to process an output sample. + processOutputLoop(); + } + return hr; +} + +HRESULT EVRCustomPresenter::beginStreaming() +{ + HRESULT hr = S_OK; + + // Start the scheduler thread. + hr = m_scheduler.startScheduler(m_clock); + + return hr; +} + +HRESULT EVRCustomPresenter::endStreaming() +{ + HRESULT hr = S_OK; + + // Stop the scheduler thread. + hr = m_scheduler.stopScheduler(); + + return hr; +} + +HRESULT EVRCustomPresenter::checkEndOfStream() +{ + if (!m_endStreaming) { + // The EVR did not send the MFVP_MESSAGE_ENDOFSTREAM message. + return S_OK; + } + + if (m_sampleNotify) { + // The mixer still has input. + return S_OK; + } + + if (m_samplePool.areSamplesPending()) { + // Samples are still scheduled for rendering. + return S_OK; + } + + // Everything is complete. Now we can tell the EVR that we are done. + notifyEvent(EC_COMPLETE, (LONG_PTR)S_OK, 0); + m_endStreaming = false; + return S_OK; +} + +HRESULT EVRCustomPresenter::prepareFrameStep(DWORD steps) +{ + HRESULT hr = S_OK; + + // Cache the step count. + m_frameStep.steps += steps; + + // Set the frame-step state. + m_frameStep.state = FrameStepWaitingStart; + + // If the clock is are already running, we can start frame-stepping now. + // Otherwise, we will start when the clock starts. + if (m_renderState == RenderStarted) + hr = startFrameStep(); + + return hr; +} + +HRESULT EVRCustomPresenter::startFrameStep() +{ + HRESULT hr = S_OK; + IMFSample *sample = NULL; + + if (m_frameStep.state == FrameStepWaitingStart) { + // We have a frame-step request, and are waiting for the clock to start. + // Set the state to "pending," which means we are waiting for samples. + m_frameStep.state = FrameStepPending; + + // If the frame-step queue already has samples, process them now. + while (!m_frameStep.samples.isEmpty() && (m_frameStep.state == FrameStepPending)) { + sample = m_frameStep.samples.takeFirst(); + + hr = deliverFrameStepSample(sample); + if (FAILED(hr)) + goto done; + + qt_evr_safe_release(&sample); + + // We break from this loop when: + // (a) the frame-step queue is empty, or + // (b) the frame-step operation is complete. + } + } else if (m_frameStep.state == FrameStepNone) { + // We are not frame stepping. Therefore, if the frame-step queue has samples, + // we need to process them normally. + while (!m_frameStep.samples.isEmpty()) { + sample = m_frameStep.samples.takeFirst(); + + hr = deliverSample(sample, false); + if (FAILED(hr)) + goto done; + + qt_evr_safe_release(&sample); + } + } + +done: + qt_evr_safe_release(&sample); + return hr; +} + +HRESULT EVRCustomPresenter::completeFrameStep(IMFSample *sample) +{ + HRESULT hr = S_OK; + MFTIME sampleTime = 0; + MFTIME systemTime = 0; + + // Update our state. + m_frameStep.state = FrameStepComplete; + m_frameStep.sampleNoRef = 0; + + // Notify the EVR that the frame-step is complete. + notifyEvent(EC_STEP_COMPLETE, FALSE, 0); // FALSE = completed (not cancelled) + + // If we are scrubbing (rate == 0), also send the "scrub time" event. + if (isScrubbing()) { + // Get the time stamp from the sample. + hr = sample->GetSampleTime(&sampleTime); + if (FAILED(hr)) { + // No time stamp. Use the current presentation time. + if (m_clock) + m_clock->GetCorrelatedTime(0, &sampleTime, &systemTime); + + hr = S_OK; // (Not an error condition.) + } + + notifyEvent(EC_SCRUB_TIME, DWORD(sampleTime), DWORD(((sampleTime) >> 32) & 0xffffffff)); + } + return hr; +} + +HRESULT EVRCustomPresenter::cancelFrameStep() +{ + FrameStepState oldState = m_frameStep.state; + + m_frameStep.state = FrameStepNone; + m_frameStep.steps = 0; + m_frameStep.sampleNoRef = 0; + // Don't clear the frame-step queue yet, because we might frame step again. + + if (oldState > FrameStepNone && oldState < FrameStepComplete) { + // We were in the middle of frame-stepping when it was cancelled. + // Notify the EVR. + notifyEvent(EC_STEP_COMPLETE, TRUE, 0); // TRUE = cancelled + } + return S_OK; +} + +HRESULT EVRCustomPresenter::createOptimalVideoType(IMFMediaType *proposedType, IMFMediaType **optimalType) +{ + HRESULT hr = S_OK; + + RECT rcOutput; + ZeroMemory(&rcOutput, sizeof(rcOutput)); + + MFVideoArea displayArea; + ZeroMemory(&displayArea, sizeof(displayArea)); + + IMFMediaType *mtOptimal = NULL; + + UINT64 size; + int width; + int height; + + // Clone the proposed type. + + hr = MFCreateMediaType(&mtOptimal); + if (FAILED(hr)) + goto done; + + hr = proposedType->CopyAllItems(mtOptimal); + if (FAILED(hr)) + goto done; + + // Modify the new type. + + // Set the pixel aspect ratio (PAR) to 1:1 (see assumption #1, above) + // The ratio is packed in a single UINT64. A helper function is normally available for + // that (MFSetAttributeRatio) but it's not correctly defined in MinGW 4.9.1. + hr = mtOptimal->SetUINT64(MF_MT_PIXEL_ASPECT_RATIO, (((UINT64) 1) << 32) | ((UINT64) 1)); + if (FAILED(hr)) + goto done; + + hr = proposedType->GetUINT64(MF_MT_FRAME_SIZE, &size); + width = int(HI32(size)); + height = int(LO32(size)); + rcOutput.left = 0; + rcOutput.top = 0; + rcOutput.right = width; + rcOutput.bottom = height; + + // Set the geometric aperture, and disable pan/scan. + displayArea = qt_evr_makeMFArea(0, 0, rcOutput.right, rcOutput.bottom); + + hr = mtOptimal->SetUINT32(MF_MT_PAN_SCAN_ENABLED, FALSE); + if (FAILED(hr)) + goto done; + + hr = mtOptimal->SetBlob(MF_MT_GEOMETRIC_APERTURE, (UINT8*)&displayArea, sizeof(displayArea)); + if (FAILED(hr)) + goto done; + + // Set the pan/scan aperture and the minimum display aperture. We don't care + // about them per se, but the mixer will reject the type if these exceed the + // frame dimentions. + hr = mtOptimal->SetBlob(MF_MT_PAN_SCAN_APERTURE, (UINT8*)&displayArea, sizeof(displayArea)); + if (FAILED(hr)) + goto done; + + hr = mtOptimal->SetBlob(MF_MT_MINIMUM_DISPLAY_APERTURE, (UINT8*)&displayArea, sizeof(displayArea)); + if (FAILED(hr)) + goto done; + + // Return the pointer to the caller. + *optimalType = mtOptimal; + (*optimalType)->AddRef(); + +done: + qt_evr_safe_release(&mtOptimal); + return hr; + +} + +HRESULT EVRCustomPresenter::setMediaType(IMFMediaType *mediaType) +{ + // Note: mediaType can be NULL (to clear the type) + + // Clearing the media type is allowed in any state (including shutdown). + if (!mediaType) { + qt_evr_safe_release(&m_mediaType); + releaseResources(); + m_D3DPresentEngine->setSurfaceFormat(QVideoSurfaceFormat()); + return S_OK; + } + + MFRatio fps = { 0, 0 }; + QList<IMFSample*> sampleQueue; + + IMFSample *sample = NULL; + + QVideoSurfaceFormat surfaceFormat; + UINT64 size; + int width; + int height; + + // Cannot set the media type after shutdown. + HRESULT hr = checkShutdown(); + if (FAILED(hr)) + goto done; + + // Check if the new type is actually different. + // Note: This function safely handles NULL input parameters. + if (qt_evr_areMediaTypesEqual(m_mediaType, mediaType)) + goto done; // Nothing more to do. + + // We're really changing the type. First get rid of the old type. + qt_evr_safe_release(&m_mediaType); + releaseResources(); + + // Initialize the presenter engine with the new media type. + // The presenter engine allocates the samples. + + hr = m_D3DPresentEngine->createVideoSamples(mediaType, sampleQueue); + if (FAILED(hr)) + goto done; + + // Mark each sample with our token counter. If this batch of samples becomes + // invalid, we increment the counter, so that we know they should be discarded. + for (int i = 0; i < sampleQueue.size(); ++i) { + sample = sampleQueue.at(i); + + hr = sample->SetUINT32(MFSamplePresenter_SampleCounter, m_tokenCounter); + if (FAILED(hr)) + goto done; + } + + // Add the samples to the sample pool. + hr = m_samplePool.initialize(sampleQueue); + if (FAILED(hr)) + goto done; + + // Set the frame rate on the scheduler. + if (SUCCEEDED(qt_evr_getFrameRate(mediaType, &fps)) && (fps.Numerator != 0) && (fps.Denominator != 0)) { + m_scheduler.setFrameRate(fps); + } else { + // NOTE: The mixer's proposed type might not have a frame rate, in which case + // we'll use an arbitrary default. (Although it's unlikely the video source + // does not have a frame rate.) + m_scheduler.setFrameRate(g_DefaultFrameRate); + } + + // Store the media type. + m_mediaType = mediaType; + m_mediaType->AddRef(); + + // Create the surface format + hr = m_mediaType->GetUINT64(MF_MT_FRAME_SIZE, &size); + width = int(HI32(size)); + height = int(LO32(size)); + surfaceFormat = QVideoSurfaceFormat(QSize(width, height), + pixelFormatFromMediaType(m_mediaType), + QAbstractVideoBuffer::GLTextureHandle); + m_D3DPresentEngine->setSurfaceFormat(surfaceFormat); + +done: + if (FAILED(hr)) + releaseResources(); + return hr; +} + +HRESULT EVRCustomPresenter::isMediaTypeSupported(IMFMediaType *proposed) +{ + D3DFORMAT d3dFormat = D3DFMT_UNKNOWN; + BOOL compressed = FALSE; + MFVideoInterlaceMode interlaceMode = MFVideoInterlace_Unknown; + MFVideoArea videoCropArea; + UINT32 width = 0, height = 0; + + // Validate the format. + HRESULT hr = qt_evr_getFourCC(proposed, (DWORD*)&d3dFormat); + if (FAILED(hr)) + return hr; + + // Only accept pixel formats supported by the video surface + if (!m_supportedGLFormats.contains((DWORD)d3dFormat)) + return MF_E_INVALIDMEDIATYPE; + + // Reject compressed media types. + hr = proposed->IsCompressedFormat(&compressed); + if (FAILED(hr)) + return hr; + + if (compressed) + return MF_E_INVALIDMEDIATYPE; + + // The D3DPresentEngine checks whether the format can be used as + // the back-buffer format for the swap chains. + hr = m_D3DPresentEngine->checkFormat(d3dFormat); + if (FAILED(hr)) + return hr; + + // Reject interlaced formats. + hr = proposed->GetUINT32(MF_MT_INTERLACE_MODE, (UINT32*)&interlaceMode); + if (FAILED(hr)) + return hr; + + if (interlaceMode != MFVideoInterlace_Progressive) + return MF_E_INVALIDMEDIATYPE; + + hr = MFGetAttributeSize(proposed, MF_MT_FRAME_SIZE, &width, &height); + if (FAILED(hr)) + return hr; + + // Validate the various apertures (cropping regions) against the frame size. + // Any of these apertures may be unspecified in the media type, in which case + // we ignore it. We just want to reject invalid apertures. + + if (SUCCEEDED(proposed->GetBlob(MF_MT_PAN_SCAN_APERTURE, (UINT8*)&videoCropArea, sizeof(videoCropArea), NULL))) + hr = qt_evr_validateVideoArea(videoCropArea, width, height); + + if (SUCCEEDED(proposed->GetBlob(MF_MT_GEOMETRIC_APERTURE, (UINT8*)&videoCropArea, sizeof(videoCropArea), NULL))) + hr = qt_evr_validateVideoArea(videoCropArea, width, height); + + if (SUCCEEDED(proposed->GetBlob(MF_MT_MINIMUM_DISPLAY_APERTURE, (UINT8*)&videoCropArea, sizeof(videoCropArea), NULL))) + hr = qt_evr_validateVideoArea(videoCropArea, width, height); + + return hr; +} + +void EVRCustomPresenter::processOutputLoop() +{ + HRESULT hr = S_OK; + + // Process as many samples as possible. + while (hr == S_OK) { + // If the mixer doesn't have a new input sample, break from the loop. + if (!m_sampleNotify) { + hr = MF_E_TRANSFORM_NEED_MORE_INPUT; + break; + } + + // Try to process a sample. + hr = processOutput(); + + // NOTE: ProcessOutput can return S_FALSE to indicate it did not + // process a sample. If so, break out of the loop. + } + + if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT) { + // The mixer has run out of input data. Check for end-of-stream. + checkEndOfStream(); + } +} + +HRESULT EVRCustomPresenter::processOutput() +{ + HRESULT hr = S_OK; + DWORD status = 0; + LONGLONG mixerStartTime = 0, mixerEndTime = 0; + MFTIME systemTime = 0; + BOOL repaint = m_repaint; // Temporarily store this state flag. + + MFT_OUTPUT_DATA_BUFFER dataBuffer; + ZeroMemory(&dataBuffer, sizeof(dataBuffer)); + + IMFSample *sample = NULL; + + // If the clock is not running, we present the first sample, + // and then don't present any more until the clock starts. + + if ((m_renderState != RenderStarted) && !m_repaint && m_prerolled) + return S_FALSE; + + // Make sure we have a pointer to the mixer. + if (!m_mixer) + return MF_E_INVALIDREQUEST; + + // Try to get a free sample from the video sample pool. + hr = m_samplePool.getSample(&sample); + if (hr == MF_E_SAMPLEALLOCATOR_EMPTY) { + // No free samples. Try again when a sample is released. + return S_FALSE; + } else if (FAILED(hr)) { + return hr; + } + + // From now on, we have a valid video sample pointer, where the mixer will + // write the video data. + + if (m_repaint) { + // Repaint request. Ask the mixer for the most recent sample. + setDesiredSampleTime(sample, m_scheduler.lastSampleTime(), m_scheduler.frameDuration()); + + m_repaint = false; // OK to clear this flag now. + } else { + // Not a repaint request. Clear the desired sample time; the mixer will + // give us the next frame in the stream. + clearDesiredSampleTime(sample); + + if (m_clock) { + // Latency: Record the starting time for ProcessOutput. + m_clock->GetCorrelatedTime(0, &mixerStartTime, &systemTime); + } + } + + // Now we are ready to get an output sample from the mixer. + dataBuffer.dwStreamID = 0; + dataBuffer.pSample = sample; + dataBuffer.dwStatus = 0; + + hr = m_mixer->ProcessOutput(0, 1, &dataBuffer, &status); + + if (FAILED(hr)) { + // Return the sample to the pool. + HRESULT hr2 = m_samplePool.returnSample(sample); + if (FAILED(hr2)) { + hr = hr2; + goto done; + } + // Handle some known error codes from ProcessOutput. + if (hr == MF_E_TRANSFORM_TYPE_NOT_SET) { + // The mixer's format is not set. Negotiate a new format. + hr = renegotiateMediaType(); + } else if (hr == MF_E_TRANSFORM_STREAM_CHANGE) { + // There was a dynamic media type change. Clear our media type. + setMediaType(NULL); + } else if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT) { + // The mixer needs more input. + // We have to wait for the mixer to get more input. + m_sampleNotify = false; + } + } else { + // We got an output sample from the mixer. + + if (m_clock && !repaint) { + // Latency: Record the ending time for the ProcessOutput operation, + // and notify the EVR of the latency. + + m_clock->GetCorrelatedTime(0, &mixerEndTime, &systemTime); + + LONGLONG latencyTime = mixerEndTime - mixerStartTime; + notifyEvent(EC_PROCESSING_LATENCY, (LONG_PTR)&latencyTime, 0); + } + + // Set up notification for when the sample is released. + hr = trackSample(sample); + if (FAILED(hr)) + goto done; + + // Schedule the sample. + if ((m_frameStep.state == FrameStepNone) || repaint) { + hr = deliverSample(sample, repaint); + if (FAILED(hr)) + goto done; + } else { + // We are frame-stepping (and this is not a repaint request). + hr = deliverFrameStepSample(sample); + if (FAILED(hr)) + goto done; + } + + m_prerolled = true; // We have presented at least one sample now. + } + +done: + qt_evr_safe_release(&sample); + + // Important: Release any events returned from the ProcessOutput method. + qt_evr_safe_release(&dataBuffer.pEvents); + return hr; +} + +HRESULT EVRCustomPresenter::deliverSample(IMFSample *sample, bool repaint) +{ + // If we are not actively playing, OR we are scrubbing (rate = 0) OR this is a + // repaint request, then we need to present the sample immediately. Otherwise, + // schedule it normally. + + bool presentNow = ((m_renderState != RenderStarted) || isScrubbing() || repaint); + + HRESULT hr = m_scheduler.scheduleSample(sample, presentNow); + + if (FAILED(hr)) { + // Notify the EVR that we have failed during streaming. The EVR will notify the + // pipeline. + + notifyEvent(EC_ERRORABORT, hr, 0); + } + + return hr; +} + +HRESULT EVRCustomPresenter::deliverFrameStepSample(IMFSample *sample) +{ + HRESULT hr = S_OK; + IUnknown *unk = NULL; + + // For rate 0, discard any sample that ends earlier than the clock time. + if (isScrubbing() && m_clock && qt_evr_isSampleTimePassed(m_clock, sample)) { + // Discard this sample. + } else if (m_frameStep.state >= FrameStepScheduled) { + // A frame was already submitted. Put this sample on the frame-step queue, + // in case we are asked to step to the next frame. If frame-stepping is + // cancelled, this sample will be processed normally. + sample->AddRef(); + m_frameStep.samples.append(sample); + } else { + // We're ready to frame-step. + + // Decrement the number of steps. + if (m_frameStep.steps > 0) + m_frameStep.steps--; + + if (m_frameStep.steps > 0) { + // This is not the last step. Discard this sample. + } else if (m_frameStep.state == FrameStepWaitingStart) { + // This is the right frame, but the clock hasn't started yet. Put the + // sample on the frame-step queue. When the clock starts, the sample + // will be processed. + sample->AddRef(); + m_frameStep.samples.append(sample); + } else { + // This is the right frame *and* the clock has started. Deliver this sample. + hr = deliverSample(sample, false); + if (FAILED(hr)) + goto done; + + // Query for IUnknown so that we can identify the sample later. + // Per COM rules, an object always returns the same pointer when QI'ed for IUnknown. + hr = sample->QueryInterface(IID_PPV_ARGS(&unk)); + if (FAILED(hr)) + goto done; + + m_frameStep.sampleNoRef = (DWORD_PTR)unk; // No add-ref. + + // NOTE: We do not AddRef the IUnknown pointer, because that would prevent the + // sample from invoking the OnSampleFree callback after the sample is presented. + // We use this IUnknown pointer purely to identify the sample later; we never + // attempt to dereference the pointer. + + m_frameStep.state = FrameStepScheduled; + } + } +done: + qt_evr_safe_release(&unk); + return hr; +} + +HRESULT EVRCustomPresenter::trackSample(IMFSample *sample) +{ + IMFTrackedSample *tracked = NULL; + + HRESULT hr = sample->QueryInterface(IID_PPV_ARGS(&tracked)); + + if (SUCCEEDED(hr)) + hr = tracked->SetAllocator(&m_sampleFreeCB, NULL); + + qt_evr_safe_release(&tracked); + return hr; +} + +void EVRCustomPresenter::releaseResources() +{ + // Increment the token counter to indicate that all existing video samples + // are "stale." As these samples get released, we'll dispose of them. + // + // Note: The token counter is required because the samples are shared + // between more than one thread, and they are returned to the presenter + // through an asynchronous callback (onSampleFree). Without the token, we + // might accidentally re-use a stale sample after the ReleaseResources + // method returns. + + m_tokenCounter++; + + flush(); + + m_samplePool.clear(); + + m_D3DPresentEngine->releaseResources(); +} + +HRESULT EVRCustomPresenter::onSampleFree(IMFAsyncResult *result) +{ + IUnknown *object = NULL; + IMFSample *sample = NULL; + IUnknown *unk = NULL; + UINT32 token; + + // Get the sample from the async result object. + HRESULT hr = result->GetObject(&object); + if (FAILED(hr)) + goto done; + + hr = object->QueryInterface(IID_PPV_ARGS(&sample)); + if (FAILED(hr)) + goto done; + + // If this sample was submitted for a frame-step, the frame step operation + // is complete. + + if (m_frameStep.state == FrameStepScheduled) { + // Query the sample for IUnknown and compare it to our cached value. + hr = sample->QueryInterface(IID_PPV_ARGS(&unk)); + if (FAILED(hr)) + goto done; + + if (m_frameStep.sampleNoRef == (DWORD_PTR)unk) { + // Notify the EVR. + hr = completeFrameStep(sample); + if (FAILED(hr)) + goto done; + } + + // Note: Although object is also an IUnknown pointer, it is not + // guaranteed to be the exact pointer value returned through + // QueryInterface. Therefore, the second QueryInterface call is + // required. + } + + m_mutex.lock(); + + token = MFGetAttributeUINT32(sample, MFSamplePresenter_SampleCounter, (UINT32)-1); + + if (token == m_tokenCounter) { + // Return the sample to the sample pool. + hr = m_samplePool.returnSample(sample); + if (SUCCEEDED(hr)) { + // A free sample is available. Process more data if possible. + processOutputLoop(); + } + } + + m_mutex.unlock(); + +done: + if (FAILED(hr)) + notifyEvent(EC_ERRORABORT, hr, 0); + qt_evr_safe_release(&object); + qt_evr_safe_release(&sample); + qt_evr_safe_release(&unk); + return hr; +} + +void EVRCustomPresenter::startSurface() +{ + if (m_D3DPresentEngine) + m_D3DPresentEngine->start(); +} + +void EVRCustomPresenter::stopSurface() +{ + if (m_D3DPresentEngine) + m_D3DPresentEngine->stop(); +} + +float EVRCustomPresenter::getMaxRate(bool thin) +{ + // Non-thinned: + // If we have a valid frame rate and a monitor refresh rate, the maximum + // playback rate is equal to the refresh rate. Otherwise, the maximum rate + // is unbounded (FLT_MAX). + + // Thinned: The maximum rate is unbounded. + + float maxRate = FLT_MAX; + MFRatio fps = { 0, 0 }; + UINT monitorRateHz = 0; + + if (!thin && m_mediaType) { + qt_evr_getFrameRate(m_mediaType, &fps); + monitorRateHz = m_D3DPresentEngine->refreshRate(); + + if (fps.Denominator && fps.Numerator && monitorRateHz) { + // Max Rate = Refresh Rate / Frame Rate + maxRate = (float)MulDiv(monitorRateHz, fps.Denominator, fps.Numerator); + } + } + + return maxRate; +} + +HRESULT setDesiredSampleTime(IMFSample *sample, const LONGLONG &sampleTime, const LONGLONG &duration) +{ + if (!sample) + return E_POINTER; + + HRESULT hr = S_OK; + IMFDesiredSample *desired = NULL; + + hr = sample->QueryInterface(IID_PPV_ARGS(&desired)); + if (SUCCEEDED(hr)) + desired->SetDesiredSampleTimeAndDuration(sampleTime, duration); + + qt_evr_safe_release(&desired); + return hr; +} + +HRESULT clearDesiredSampleTime(IMFSample *sample) +{ + if (!sample) + return E_POINTER; + + HRESULT hr = S_OK; + + IMFDesiredSample *desired = NULL; + IUnknown *unkSwapChain = NULL; + + // We store some custom attributes on the sample, so we need to cache them + // and reset them. + // + // This works around the fact that IMFDesiredSample::Clear() removes all of the + // attributes from the sample. + + UINT32 counter = MFGetAttributeUINT32(sample, MFSamplePresenter_SampleCounter, (UINT32)-1); + + sample->GetUnknown(MFSamplePresenter_SampleSwapChain, IID_IUnknown, (void**)&unkSwapChain); + + hr = sample->QueryInterface(IID_PPV_ARGS(&desired)); + if (SUCCEEDED(hr)) { + desired->Clear(); + + hr = sample->SetUINT32(MFSamplePresenter_SampleCounter, counter); + if (FAILED(hr)) + goto done; + + if (unkSwapChain) { + hr = sample->SetUnknown(MFSamplePresenter_SampleSwapChain, unkSwapChain); + if (FAILED(hr)) + goto done; + } + } + +done: + qt_evr_safe_release(&unkSwapChain); + qt_evr_safe_release(&desired); + return hr; +} + +HRESULT setMixerSourceRect(IMFTransform *mixer, const MFVideoNormalizedRect &sourceRect) +{ + if (!mixer) + return E_POINTER; + + IMFAttributes *attributes = NULL; + + HRESULT hr = mixer->GetAttributes(&attributes); + if (SUCCEEDED(hr)) { + hr = attributes->SetBlob(video_ZOOM_RECT, (const UINT8*)&sourceRect, sizeof(sourceRect)); + attributes->Release(); + } + return hr; +} + +DWORD getFourCCFromPixelFormat(QVideoFrame::PixelFormat pixelFormat) +{ + DWORD fourCC = 0; + switch (pixelFormat) { + case QVideoFrame::Format_ARGB32: + case QVideoFrame::Format_ARGB32_Premultiplied: + fourCC = MFVideoFormat_ARGB32.Data1; + break; + case QVideoFrame::Format_RGB32: + fourCC = MFVideoFormat_RGB32.Data1; + break; + case QVideoFrame::Format_RGB24: + fourCC = MFVideoFormat_RGB24.Data1; + break; + case QVideoFrame::Format_RGB565: + fourCC = MFVideoFormat_RGB565.Data1; + break; + case QVideoFrame::Format_RGB555: + fourCC = MFVideoFormat_RGB555.Data1; + break; + case QVideoFrame::Format_AYUV444: + case QVideoFrame::Format_AYUV444_Premultiplied: + fourCC = MFVideoFormat_AYUV.Data1; + break; + case QVideoFrame::Format_YUV420P: + fourCC = MFVideoFormat_I420.Data1; + break; + case QVideoFrame::Format_UYVY: + fourCC = MFVideoFormat_UYVY.Data1; + break; + case QVideoFrame::Format_YV12: + fourCC = MFVideoFormat_YV12.Data1; + break; + case QVideoFrame::Format_NV12: + fourCC = MFVideoFormat_NV12.Data1; + break; + default: + break; + } + return fourCC; +} + +static QVideoFrame::PixelFormat pixelFormatFromMediaType(IMFMediaType *type) +{ + GUID majorType; + if (FAILED(type->GetMajorType(&majorType))) + return QVideoFrame::Format_Invalid; + if (majorType != MFMediaType_Video) + return QVideoFrame::Format_Invalid; + + GUID subType; + if (FAILED(type->GetGUID(MF_MT_SUBTYPE, &subType))) + return QVideoFrame::Format_Invalid; + + if (subType == MFVideoFormat_RGB32) + return QVideoFrame::Format_RGB32; + + return QVideoFrame::Format_Invalid; +} diff --git a/src/plugins/common/evr/evrcustompresenter.h b/src/plugins/common/evr/evrcustompresenter.h new file mode 100644 index 000000000..b9067d9ae --- /dev/null +++ b/src/plugins/common/evr/evrcustompresenter.h @@ -0,0 +1,358 @@ +/**************************************************************************** +** +** Copyright (C) 2015 The Qt Company Ltd. +** Contact: http://www.qt.io/licensing/ +** +** This file is part of the Qt Toolkit. +** +** $QT_BEGIN_LICENSE:LGPL21$ +** Commercial License Usage +** Licensees holding valid commercial Qt licenses may use this file in +** accordance with the commercial license agreement provided with the +** Software or, alternatively, in accordance with the terms contained in +** a written agreement between you and The Qt Company. For licensing terms +** and conditions see http://www.qt.io/terms-conditions. For further +** information use the contact form at http://www.qt.io/contact-us. +** +** GNU Lesser General Public License Usage +** Alternatively, this file may be used under the terms of the GNU Lesser +** General Public License version 2.1 or version 3 as published by the Free +** Software Foundation and appearing in the file LICENSE.LGPLv21 and +** LICENSE.LGPLv3 included in the packaging of this file. Please review the +** following information to ensure the GNU Lesser General Public License +** requirements will be met: https://www.gnu.org/licenses/lgpl.html and +** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html. +** +** As a special exception, The Qt Company gives you certain additional +** rights. These rights are described in The Qt Company LGPL Exception +** version 1.1, included in the file LGPL_EXCEPTION.txt in this package. +** +** $QT_END_LICENSE$ +** +****************************************************************************/ + +#ifndef EVRCUSTOMPRESENTER_H +#define EVRCUSTOMPRESENTER_H + +#include <QObject> +#include <qmutex.h> +#include <qqueue.h> + +#include "evrdefs.h" + +QT_USE_NAMESPACE + +class D3DPresentEngine; +class QAbstractVideoSurface; + +template<class T> +class AsyncCallback : public IMFAsyncCallback +{ +public: + typedef HRESULT (T::*InvokeFn)(IMFAsyncResult *asyncResult); + + AsyncCallback(T *parent, InvokeFn fn) : m_parent(parent), m_invokeFn(fn) + { + } + + // IUnknown + STDMETHODIMP QueryInterface(REFIID iid, void** ppv) + { + if (!ppv) + return E_POINTER; + + if (iid == __uuidof(IUnknown)) { + *ppv = static_cast<IUnknown*>(static_cast<IMFAsyncCallback*>(this)); + } else if (iid == __uuidof(IMFAsyncCallback)) { + *ppv = static_cast<IMFAsyncCallback*>(this); + } else { + *ppv = NULL; + return E_NOINTERFACE; + } + AddRef(); + return S_OK; + } + + STDMETHODIMP_(ULONG) AddRef() { + // Delegate to parent class. + return m_parent->AddRef(); + } + STDMETHODIMP_(ULONG) Release() { + // Delegate to parent class. + return m_parent->Release(); + } + + // IMFAsyncCallback methods + STDMETHODIMP GetParameters(DWORD*, DWORD*) + { + // Implementation of this method is optional. + return E_NOTIMPL; + } + + STDMETHODIMP Invoke(IMFAsyncResult* asyncResult) + { + return (m_parent->*m_invokeFn)(asyncResult); + } + + T *m_parent; + InvokeFn m_invokeFn; +}; + +class Scheduler +{ +public: + enum ScheduleEvent + { + Terminate = WM_USER, + Schedule = WM_USER + 1, + Flush = WM_USER + 2 + }; + + Scheduler(); + ~Scheduler(); + + void setCallback(QObject *cb) { + m_CB = cb; + } + + void setFrameRate(const MFRatio &fps); + void setClockRate(float rate) { m_playbackRate = rate; } + + const LONGLONG &lastSampleTime() const { return m_lastSampleTime; } + const LONGLONG &frameDuration() const { return m_perFrameInterval; } + + HRESULT startScheduler(IMFClock *clock); + HRESULT stopScheduler(); + + HRESULT scheduleSample(IMFSample *sample, bool presentNow); + HRESULT processSamplesInQueue(LONG *nextSleep); + HRESULT processSample(IMFSample *sample, LONG *nextSleep); + HRESULT flush(); + + // ThreadProc for the scheduler thread. + static DWORD WINAPI schedulerThreadProc(LPVOID parameter); + +private: + DWORD schedulerThreadProcPrivate(); + + QQueue<IMFSample*> m_scheduledSamples; // Samples waiting to be presented. + + IMFClock *m_clock; // Presentation clock. Can be NULL. + QObject *m_CB; // Weak reference; do not delete. + + DWORD m_threadID; + HANDLE m_schedulerThread; + HANDLE m_threadReadyEvent; + HANDLE m_flushEvent; + + float m_playbackRate; + MFTIME m_perFrameInterval; // Duration of each frame. + LONGLONG m_perFrame_1_4th; // 1/4th of the frame duration. + MFTIME m_lastSampleTime; // Most recent sample time. + + QMutex m_mutex; +}; + +class SamplePool +{ +public: + SamplePool(); + ~SamplePool(); + + HRESULT initialize(QList<IMFSample*> &samples); + HRESULT clear(); + + HRESULT getSample(IMFSample **sample); + HRESULT returnSample(IMFSample *sample); + BOOL areSamplesPending(); + +private: + QMutex m_mutex; + QList<IMFSample*> m_videoSampleQueue; + bool m_initialized; + DWORD m_pending; +}; + +class EVRCustomPresenter + : public QObject + , public IMFVideoDeviceID + , public IMFVideoPresenter // Inherits IMFClockStateSink + , public IMFRateSupport + , public IMFGetService + , public IMFTopologyServiceLookupClient +{ + Q_OBJECT + +public: + // Defines the state of the presenter. + enum RenderState + { + RenderStarted = 1, + RenderStopped, + RenderPaused, + RenderShutdown // Initial state. + }; + + // Defines the presenter's state with respect to frame-stepping. + enum FrameStepState + { + FrameStepNone, // Not frame stepping. + FrameStepWaitingStart, // Frame stepping, but the clock is not started. + FrameStepPending, // Clock is started. Waiting for samples. + FrameStepScheduled, // Submitted a sample for rendering. + FrameStepComplete // Sample was rendered. + }; + + EVRCustomPresenter(); + ~EVRCustomPresenter(); + + // IUnknown methods + STDMETHODIMP QueryInterface(REFIID riid, void ** ppv); + STDMETHODIMP_(ULONG) AddRef(); + STDMETHODIMP_(ULONG) Release(); + + // IMFGetService methods + STDMETHODIMP GetService(REFGUID guidService, REFIID riid, LPVOID *ppvObject); + + // IMFVideoPresenter methods + STDMETHODIMP ProcessMessage(MFVP_MESSAGE_TYPE message, ULONG_PTR param); + STDMETHODIMP GetCurrentMediaType(IMFVideoMediaType** mediaType); + + // IMFClockStateSink methods + STDMETHODIMP OnClockStart(MFTIME systemTime, LONGLONG clockStartOffset); + STDMETHODIMP OnClockStop(MFTIME systemTime); + STDMETHODIMP OnClockPause(MFTIME systemTime); + STDMETHODIMP OnClockRestart(MFTIME systemTime); + STDMETHODIMP OnClockSetRate(MFTIME systemTime, float rate); + + // IMFRateSupport methods + STDMETHODIMP GetSlowestRate(MFRATE_DIRECTION direction, BOOL thin, float *rate); + STDMETHODIMP GetFastestRate(MFRATE_DIRECTION direction, BOOL thin, float *rate); + STDMETHODIMP IsRateSupported(BOOL thin, float rate, float *nearestSupportedRate); + + // IMFVideoDeviceID methods + STDMETHODIMP GetDeviceID(IID* deviceID); + + // IMFTopologyServiceLookupClient methods + STDMETHODIMP InitServicePointers(IMFTopologyServiceLookup *lookup); + STDMETHODIMP ReleaseServicePointers(); + + void supportedFormatsChanged(); + void setSurface(QAbstractVideoSurface *surface); + +private Q_SLOTS: + void startSurface(); + void stopSurface(); + +private: + HRESULT checkShutdown() const + { + if (m_renderState == RenderShutdown) + return MF_E_SHUTDOWN; + else + return S_OK; + } + + // The "active" state is started or paused. + inline bool isActive() const + { + return ((m_renderState == RenderStarted) || (m_renderState == RenderPaused)); + } + + // Scrubbing occurs when the frame rate is 0. + inline bool isScrubbing() const { return m_playbackRate == 0.0f; } + + // Send an event to the EVR through its IMediaEventSink interface. + void notifyEvent(long eventCode, LONG_PTR param1, LONG_PTR param2) + { + if (m_mediaEventSink) + m_mediaEventSink->Notify(eventCode, param1, param2); + } + + float getMaxRate(bool thin); + + // Mixer operations + HRESULT configureMixer(IMFTransform *mixer); + + // Formats + HRESULT createOptimalVideoType(IMFMediaType* proposed, IMFMediaType **optimal); + HRESULT setMediaType(IMFMediaType *mediaType); + HRESULT isMediaTypeSupported(IMFMediaType *mediaType); + + // Message handlers + HRESULT flush(); + HRESULT renegotiateMediaType(); + HRESULT processInputNotify(); + HRESULT beginStreaming(); + HRESULT endStreaming(); + HRESULT checkEndOfStream(); + + // Managing samples + void processOutputLoop(); + HRESULT processOutput(); + HRESULT deliverSample(IMFSample *sample, bool repaint); + HRESULT trackSample(IMFSample *sample); + void releaseResources(); + + // Frame-stepping + HRESULT prepareFrameStep(DWORD steps); + HRESULT startFrameStep(); + HRESULT deliverFrameStepSample(IMFSample *sample); + HRESULT completeFrameStep(IMFSample *sample); + HRESULT cancelFrameStep(); + + // Callback when a video sample is released. + HRESULT onSampleFree(IMFAsyncResult *result); + AsyncCallback<EVRCustomPresenter> m_sampleFreeCB; + + // Holds information related to frame-stepping. + struct FrameStep + { + FrameStep() + : state(FrameStepNone) + , steps(0) + , sampleNoRef(0) + { + } + + FrameStepState state; + QList<IMFSample*> samples; + DWORD steps; + DWORD_PTR sampleNoRef; + }; + + long m_refCount; + + RenderState m_renderState; + FrameStep m_frameStep; + + QMutex m_mutex; + + // Samples and scheduling + Scheduler m_scheduler; // Manages scheduling of samples. + SamplePool m_samplePool; // Pool of allocated samples. + DWORD m_tokenCounter; // Counter. Incremented whenever we create new samples. + + // Rendering state + bool m_sampleNotify; // Did the mixer signal it has an input sample? + bool m_repaint; // Do we need to repaint the last sample? + bool m_prerolled; // Have we presented at least one sample? + bool m_endStreaming; // Did we reach the end of the stream (EOS)? + + MFVideoNormalizedRect m_sourceRect; + float m_playbackRate; + + D3DPresentEngine *m_D3DPresentEngine; // Rendering engine. (Never null if the constructor succeeds.) + + IMFClock *m_clock; // The EVR's clock. + IMFTransform *m_mixer; // The EVR's mixer. + IMediaEventSink *m_mediaEventSink; // The EVR's event-sink interface. + IMFMediaType *m_mediaType; // Output media type + + QAbstractVideoSurface *m_surface; + QList<DWORD> m_supportedGLFormats; +}; + +bool qt_evr_setCustomPresenter(IUnknown *evr, EVRCustomPresenter *presenter); + +#endif // EVRCUSTOMPRESENTER_H diff --git a/src/plugins/common/evr/evrd3dpresentengine.cpp b/src/plugins/common/evr/evrd3dpresentengine.cpp new file mode 100644 index 000000000..61cee88d7 --- /dev/null +++ b/src/plugins/common/evr/evrd3dpresentengine.cpp @@ -0,0 +1,655 @@ +/**************************************************************************** +** +** Copyright (C) 2015 The Qt Company Ltd. +** Contact: http://www.qt.io/licensing/ +** +** This file is part of the Qt Toolkit. +** +** $QT_BEGIN_LICENSE:LGPL21$ +** Commercial License Usage +** Licensees holding valid commercial Qt licenses may use this file in +** accordance with the commercial license agreement provided with the +** Software or, alternatively, in accordance with the terms contained in +** a written agreement between you and The Qt Company. For licensing terms +** and conditions see http://www.qt.io/terms-conditions. For further +** information use the contact form at http://www.qt.io/contact-us. +** +** GNU Lesser General Public License Usage +** Alternatively, this file may be used under the terms of the GNU Lesser +** General Public License version 2.1 or version 3 as published by the Free +** Software Foundation and appearing in the file LICENSE.LGPLv21 and +** LICENSE.LGPLv3 included in the packaging of this file. Please review the +** following information to ensure the GNU Lesser General Public License +** requirements will be met: https://www.gnu.org/licenses/lgpl.html and +** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html. +** +** As a special exception, The Qt Company gives you certain additional +** rights. These rights are described in The Qt Company LGPL Exception +** version 1.1, included in the file LGPL_EXCEPTION.txt in this package. +** +** $QT_END_LICENSE$ +** +****************************************************************************/ + +#include "evrd3dpresentengine.h" + +#include "evrhelpers.h" + +#include <qtgui/qguiapplication.h> +#include <qpa/qplatformnativeinterface.h> +#include <qtgui/qopenglcontext.h> +#include <qabstractvideobuffer.h> +#include <QAbstractVideoSurface> +#include <qvideoframe.h> +#include <QDebug> +#include <qopenglcontext.h> +#include <qopenglfunctions.h> +#include <qwindow.h> + +#include <EGL/egl.h> +#include <EGL/eglext.h> +#include <d3d9.h> +#include <dxva2api.h> +#include <WinUser.h> +#include <evr.h> + +static const int PRESENTER_BUFFER_COUNT = 3; + +class TextureVideoBuffer : public QAbstractVideoBuffer +{ +public: + TextureVideoBuffer(GLuint textureId) + : QAbstractVideoBuffer(GLTextureHandle) + , m_textureId(textureId) + {} + + ~TextureVideoBuffer() {} + + MapMode mapMode() const { return NotMapped; } + uchar *map(MapMode, int*, int*) { return 0; } + void unmap() {} + + QVariant handle() const + { + return QVariant::fromValue<unsigned int>(m_textureId); + } + +private: + GLuint m_textureId; +}; + +EGLWrapper::EGLWrapper() +{ +#ifndef QT_OPENGL_ES_2_ANGLE_STATIC + // Resolve the EGL functions we use. When configured for dynamic OpenGL, no + // component in Qt will link to libEGL.lib and libGLESv2.lib. We know + // however that libEGL is loaded for sure, since this is an ANGLE-only path. + +# ifdef QT_DEBUG + HMODULE eglHandle = GetModuleHandle(L"libEGLd.dll"); +# else + HMODULE eglHandle = GetModuleHandle(L"libEGL.dll"); +# endif + + if (!eglHandle) + qWarning("No EGL library loaded"); + + m_eglGetProcAddress = (EglGetProcAddress) GetProcAddress(eglHandle, "eglGetProcAddress"); + m_eglCreatePbufferSurface = (EglCreatePbufferSurface) GetProcAddress(eglHandle, "eglCreatePbufferSurface"); + m_eglDestroySurface = (EglDestroySurface) GetProcAddress(eglHandle, "eglDestroySurface"); + m_eglBindTexImage = (EglBindTexImage) GetProcAddress(eglHandle, "eglBindTexImage"); + m_eglReleaseTexImage = (EglReleaseTexImage) GetProcAddress(eglHandle, "eglReleaseTexImage"); +#else + // Static ANGLE-only build. There is no libEGL.dll in use. + + m_eglGetProcAddress = ::eglGetProcAddress; + m_eglCreatePbufferSurface = ::eglCreatePbufferSurface; + m_eglDestroySurface = ::eglDestroySurface; + m_eglBindTexImage = ::eglBindTexImage; + m_eglReleaseTexImage = ::eglReleaseTexImage; +#endif +} + +__eglMustCastToProperFunctionPointerType EGLWrapper::getProcAddress(const char *procname) +{ + Q_ASSERT(m_eglGetProcAddress); + return m_eglGetProcAddress(procname); +} + +EGLSurface EGLWrapper::createPbufferSurface(EGLDisplay dpy, EGLConfig config, const EGLint *attrib_list) +{ + Q_ASSERT(m_eglCreatePbufferSurface); + return m_eglCreatePbufferSurface(dpy, config, attrib_list); +} + +EGLBoolean EGLWrapper::destroySurface(EGLDisplay dpy, EGLSurface surface) +{ + Q_ASSERT(m_eglDestroySurface); + return m_eglDestroySurface(dpy, surface); +} + +EGLBoolean EGLWrapper::bindTexImage(EGLDisplay dpy, EGLSurface surface, EGLint buffer) +{ + Q_ASSERT(m_eglBindTexImage); + return m_eglBindTexImage(dpy, surface, buffer); +} + +EGLBoolean EGLWrapper::releaseTexImage(EGLDisplay dpy, EGLSurface surface, EGLint buffer) +{ + Q_ASSERT(m_eglReleaseTexImage); + return m_eglReleaseTexImage(dpy, surface, buffer); +} + +D3DPresentEngine::D3DPresentEngine() + : QObject() + , m_mutex(QMutex::Recursive) + , m_deviceResetToken(0) + , m_D3D9(0) + , m_device(0) + , m_deviceManager(0) + , m_surface(0) + , m_glContext(0) + , m_offscreenSurface(0) + , m_eglDisplay(0) + , m_eglConfig(0) + , m_eglSurface(0) + , m_glTexture(0) + , m_texture(0) + , m_egl(0) +{ + ZeroMemory(&m_displayMode, sizeof(m_displayMode)); + + HRESULT hr = initializeD3D(); + + if (SUCCEEDED(hr)) { + hr = createD3DDevice(); + if (FAILED(hr)) + qWarning("Failed to create D3D device"); + } else { + qWarning("Failed to initialize D3D"); + } +} + +D3DPresentEngine::~D3DPresentEngine() +{ + qt_evr_safe_release(&m_texture); + qt_evr_safe_release(&m_device); + qt_evr_safe_release(&m_deviceManager); + qt_evr_safe_release(&m_D3D9); + + if (m_eglSurface) { + m_egl->releaseTexImage(m_eglDisplay, m_eglSurface, EGL_BACK_BUFFER); + m_egl->destroySurface(m_eglDisplay, m_eglSurface); + m_eglSurface = NULL; + } + if (m_glTexture) { + if (QOpenGLContext *current = QOpenGLContext::currentContext()) + current->functions()->glDeleteTextures(1, &m_glTexture); + else + qWarning() << "D3DPresentEngine: Cannot obtain GL context, unable to delete textures"; + } + + delete m_glContext; + delete m_offscreenSurface; + delete m_egl; +} + +void D3DPresentEngine::start() +{ + QMutexLocker locker(&m_mutex); + + if (!m_surfaceFormat.isValid()) + return; + + if (!m_texture) + createOffscreenTexture(); + + if (m_surface && !m_surface->isActive()) + m_surface->start(m_surfaceFormat); +} + +void D3DPresentEngine::stop() +{ + QMutexLocker locker(&m_mutex); + if (m_surface && m_surface->isActive()) + m_surface->stop(); +} + +HRESULT D3DPresentEngine::getService(REFGUID, REFIID riid, void** ppv) +{ + HRESULT hr = S_OK; + + if (riid == __uuidof(IDirect3DDeviceManager9)) { + if (m_deviceManager == NULL) { + hr = MF_E_UNSUPPORTED_SERVICE; + } else { + *ppv = m_deviceManager; + m_deviceManager->AddRef(); + } + } else { + hr = MF_E_UNSUPPORTED_SERVICE; + } + + return hr; +} + +HRESULT D3DPresentEngine::checkFormat(D3DFORMAT format) +{ + HRESULT hr = S_OK; + + UINT uAdapter = D3DADAPTER_DEFAULT; + D3DDEVTYPE type = D3DDEVTYPE_HAL; + + D3DDISPLAYMODE mode; + D3DDEVICE_CREATION_PARAMETERS params; + + // Our shared D3D/EGL surface only supports RGB32, + // reject all other formats + if (format != D3DFMT_X8R8G8B8) + return MF_E_INVALIDMEDIATYPE; + + if (m_device) { + hr = m_device->GetCreationParameters(¶ms); + if (FAILED(hr)) + return hr; + + uAdapter = params.AdapterOrdinal; + type = params.DeviceType; + } + + hr = m_D3D9->GetAdapterDisplayMode(uAdapter, &mode); + if (FAILED(hr)) + return hr; + + return m_D3D9->CheckDeviceType(uAdapter, type, mode.Format, format, TRUE); +} + +HRESULT D3DPresentEngine::createVideoSamples(IMFMediaType *format, QList<IMFSample*> &videoSampleQueue) +{ + if (!format) + return MF_E_UNEXPECTED; + + HRESULT hr = S_OK; + D3DPRESENT_PARAMETERS pp; + + IDirect3DSwapChain9 *swapChain = NULL; + IMFSample *videoSample = NULL; + + QMutexLocker locker(&m_mutex); + + releaseResources(); + + // Get the swap chain parameters from the media type. + hr = getSwapChainPresentParameters(format, &pp); + if (FAILED(hr)) + goto done; + + // Create the video samples. + for (int i = 0; i < PRESENTER_BUFFER_COUNT; i++) { + // Create a new swap chain. + hr = m_device->CreateAdditionalSwapChain(&pp, &swapChain); + if (FAILED(hr)) + goto done; + + // Create the video sample from the swap chain. + hr = createD3DSample(swapChain, &videoSample); + if (FAILED(hr)) + goto done; + + // Add it to the list. + videoSample->AddRef(); + videoSampleQueue.append(videoSample); + + // Set the swap chain pointer as a custom attribute on the sample. This keeps + // a reference count on the swap chain, so that the swap chain is kept alive + // for the duration of the sample's lifetime. + hr = videoSample->SetUnknown(MFSamplePresenter_SampleSwapChain, swapChain); + if (FAILED(hr)) + goto done; + + qt_evr_safe_release(&videoSample); + qt_evr_safe_release(&swapChain); + } + +done: + if (FAILED(hr)) + releaseResources(); + + qt_evr_safe_release(&swapChain); + qt_evr_safe_release(&videoSample); + return hr; +} + +void D3DPresentEngine::releaseResources() +{ +} + +void D3DPresentEngine::presentSample(void *opaque, qint64) +{ + HRESULT hr = S_OK; + + IMFSample *sample = reinterpret_cast<IMFSample*>(opaque); + IMFMediaBuffer* buffer = NULL; + IDirect3DSurface9* surface = NULL; + + if (m_surface && m_surface->isActive()) { + if (sample) { + // Get the buffer from the sample. + hr = sample->GetBufferByIndex(0, &buffer); + if (FAILED(hr)) + goto done; + + // Get the surface from the buffer. + hr = MFGetService(buffer, mr_BUFFER_SERVICE, IID_PPV_ARGS(&surface)); + if (FAILED(hr)) + goto done; + } + + if (surface && updateTexture(surface)) { + QVideoFrame frame = QVideoFrame(new TextureVideoBuffer(m_glTexture), + m_surfaceFormat.frameSize(), + m_surfaceFormat.pixelFormat()); + + // WMF uses 100-nanosecond units, Qt uses microseconds + LONGLONG startTime = -1; + if (SUCCEEDED(sample->GetSampleTime(&startTime))) { + frame.setStartTime(startTime * 0.1); + + LONGLONG duration = -1; + if (SUCCEEDED(sample->GetSampleDuration(&duration))) + frame.setEndTime((startTime + duration) * 0.1); + } + + m_surface->present(frame); + } + } + +done: + qt_evr_safe_release(&surface); + qt_evr_safe_release(&buffer); + qt_evr_safe_release(&sample); +} + +void D3DPresentEngine::setSurface(QAbstractVideoSurface *surface) +{ + QMutexLocker locker(&m_mutex); + m_surface = surface; +} + +void D3DPresentEngine::setSurfaceFormat(const QVideoSurfaceFormat &format) +{ + QMutexLocker locker(&m_mutex); + m_surfaceFormat = format; +} + +void D3DPresentEngine::createOffscreenTexture() +{ + // First, check if we have a context on this thread + QOpenGLContext *currentContext = QOpenGLContext::currentContext(); + + if (!currentContext) { + //Create OpenGL context and set share context from surface + QOpenGLContext *shareContext = qobject_cast<QOpenGLContext*>(m_surface->property("GLContext").value<QObject*>()); + if (!shareContext) + return; + + m_offscreenSurface = new QWindow; + m_offscreenSurface->setSurfaceType(QWindow::OpenGLSurface); + //Needs geometry to be a valid surface, but size is not important + m_offscreenSurface->setGeometry(-1, -1, 1, 1); + m_offscreenSurface->create(); + + m_glContext = new QOpenGLContext; + m_glContext->setFormat(m_offscreenSurface->requestedFormat()); + m_glContext->setShareContext(shareContext); + + if (!m_glContext->create()) { + delete m_glContext; + delete m_offscreenSurface; + m_glContext = 0; + m_offscreenSurface = 0; + return; + } + + currentContext = m_glContext; + } + + if (m_glContext) + m_glContext->makeCurrent(m_offscreenSurface); + + if (!m_egl) + m_egl = new EGLWrapper; + + QPlatformNativeInterface *nativeInterface = QGuiApplication::platformNativeInterface(); + m_eglDisplay = static_cast<EGLDisplay*>( + nativeInterface->nativeResourceForContext("eglDisplay", currentContext)); + m_eglConfig = static_cast<EGLConfig*>( + nativeInterface->nativeResourceForContext("eglConfig", currentContext)); + + currentContext->functions()->glGenTextures(1, &m_glTexture); + + int w = m_surfaceFormat.frameWidth(); + int h = m_surfaceFormat.frameHeight(); + bool hasAlpha = currentContext->format().hasAlpha(); + + EGLint attribs[] = { + EGL_WIDTH, w, + EGL_HEIGHT, h, + EGL_TEXTURE_FORMAT, hasAlpha ? EGL_TEXTURE_RGBA : EGL_TEXTURE_RGB, + EGL_TEXTURE_TARGET, EGL_TEXTURE_2D, + EGL_NONE + }; + + EGLSurface pbuffer = m_egl->createPbufferSurface(m_eglDisplay, m_eglConfig, attribs); + + HANDLE share_handle = 0; + PFNEGLQUERYSURFACEPOINTERANGLEPROC eglQuerySurfacePointerANGLE = + reinterpret_cast<PFNEGLQUERYSURFACEPOINTERANGLEPROC>(m_egl->getProcAddress("eglQuerySurfacePointerANGLE")); + Q_ASSERT(eglQuerySurfacePointerANGLE); + eglQuerySurfacePointerANGLE( + m_eglDisplay, + pbuffer, + EGL_D3D_TEXTURE_2D_SHARE_HANDLE_ANGLE, &share_handle); + + + m_device->CreateTexture(w, h, 1, + D3DUSAGE_RENDERTARGET, + hasAlpha ? D3DFMT_A8R8G8B8 : D3DFMT_X8R8G8B8, + D3DPOOL_DEFAULT, + &m_texture, + &share_handle); + + m_eglSurface = pbuffer; + + if (m_glContext) + m_glContext->doneCurrent(); +} + +bool D3DPresentEngine::updateTexture(IDirect3DSurface9 *src) +{ + if (!m_texture) + return false; + + if (m_glContext) + m_glContext->makeCurrent(m_offscreenSurface); + + QOpenGLContext::currentContext()->functions()->glBindTexture(GL_TEXTURE_2D, m_glTexture); + + IDirect3DSurface9 *dest = NULL; + + // Copy the sample surface to the shared D3D/EGL surface + HRESULT hr = m_texture->GetSurfaceLevel(0, &dest); + if (FAILED(hr)) + goto done; + + hr = m_device->StretchRect(src, NULL, dest, NULL, D3DTEXF_NONE); + if (FAILED(hr)) + qWarning("Failed to copy D3D surface"); + + if (hr == S_OK) + m_egl->bindTexImage(m_eglDisplay, m_eglSurface, EGL_BACK_BUFFER); + +done: + qt_evr_safe_release(&dest); + + if (m_glContext) + m_glContext->doneCurrent(); + + return SUCCEEDED(hr); +} + +HRESULT D3DPresentEngine::initializeD3D() +{ + HRESULT hr = Direct3DCreate9Ex(D3D_SDK_VERSION, &m_D3D9); + + if (SUCCEEDED(hr)) + hr = DXVA2CreateDirect3DDeviceManager9(&m_deviceResetToken, &m_deviceManager); + + return hr; +} + +HRESULT D3DPresentEngine::createD3DDevice() +{ + HRESULT hr = S_OK; + HWND hwnd = NULL; + UINT uAdapterID = D3DADAPTER_DEFAULT; + DWORD vp = 0; + + D3DCAPS9 ddCaps; + ZeroMemory(&ddCaps, sizeof(ddCaps)); + + IDirect3DDevice9Ex* device = NULL; + + // Hold the lock because we might be discarding an existing device. + QMutexLocker locker(&m_mutex); + + if (!m_D3D9 || !m_deviceManager) + return MF_E_NOT_INITIALIZED; + + hwnd = ::GetShellWindow(); + + // Note: The presenter creates additional swap chains to present the + // video frames. Therefore, it does not use the device's implicit + // swap chain, so the size of the back buffer here is 1 x 1. + + D3DPRESENT_PARAMETERS pp; + ZeroMemory(&pp, sizeof(pp)); + + pp.BackBufferWidth = 1; + pp.BackBufferHeight = 1; + pp.BackBufferFormat = D3DFMT_UNKNOWN; + pp.BackBufferCount = 1; + pp.Windowed = TRUE; + pp.SwapEffect = D3DSWAPEFFECT_DISCARD; + pp.BackBufferFormat = D3DFMT_UNKNOWN; + pp.hDeviceWindow = hwnd; + pp.Flags = D3DPRESENTFLAG_VIDEO; + pp.PresentationInterval = D3DPRESENT_INTERVAL_DEFAULT; + + hr = m_D3D9->GetDeviceCaps(uAdapterID, D3DDEVTYPE_HAL, &ddCaps); + if (FAILED(hr)) + goto done; + + if (ddCaps.DevCaps & D3DDEVCAPS_HWTRANSFORMANDLIGHT) + vp = D3DCREATE_HARDWARE_VERTEXPROCESSING; + else + vp = D3DCREATE_SOFTWARE_VERTEXPROCESSING; + + hr = m_D3D9->CreateDeviceEx( + uAdapterID, + D3DDEVTYPE_HAL, + pp.hDeviceWindow, + vp | D3DCREATE_NOWINDOWCHANGES | D3DCREATE_MULTITHREADED | D3DCREATE_FPU_PRESERVE, + &pp, + NULL, + &device + ); + if (FAILED(hr)) + goto done; + + hr = m_D3D9->GetAdapterDisplayMode(uAdapterID, &m_displayMode); + if (FAILED(hr)) + goto done; + + hr = m_deviceManager->ResetDevice(device, m_deviceResetToken); + if (FAILED(hr)) + goto done; + + qt_evr_safe_release(&m_device); + + m_device = device; + m_device->AddRef(); + +done: + qt_evr_safe_release(&device); + return hr; +} + +HRESULT D3DPresentEngine::createD3DSample(IDirect3DSwapChain9 *swapChain, IMFSample **videoSample) +{ + D3DCOLOR clrBlack = D3DCOLOR_ARGB(0xFF, 0x00, 0x00, 0x00); + + IDirect3DSurface9* surface = NULL; + IMFSample* sample = NULL; + + // Get the back buffer surface. + HRESULT hr = swapChain->GetBackBuffer(0, D3DBACKBUFFER_TYPE_MONO, &surface); + if (FAILED(hr)) + goto done; + + // Fill it with black. + hr = m_device->ColorFill(surface, NULL, clrBlack); + if (FAILED(hr)) + goto done; + + hr = MFCreateVideoSampleFromSurface(surface, &sample); + if (FAILED(hr)) + goto done; + + *videoSample = sample; + (*videoSample)->AddRef(); + +done: + qt_evr_safe_release(&surface); + qt_evr_safe_release(&sample); + return hr; +} + +HRESULT D3DPresentEngine::getSwapChainPresentParameters(IMFMediaType *type, D3DPRESENT_PARAMETERS* pp) +{ + ZeroMemory(pp, sizeof(D3DPRESENT_PARAMETERS)); + + // Get some information about the video format. + + UINT32 width = 0, height = 0; + + HRESULT hr = MFGetAttributeSize(type, MF_MT_FRAME_SIZE, &width, &height); + if (FAILED(hr)) + return hr; + + DWORD d3dFormat = 0; + + hr = qt_evr_getFourCC(type, &d3dFormat); + if (FAILED(hr)) + return hr; + + ZeroMemory(pp, sizeof(D3DPRESENT_PARAMETERS)); + pp->BackBufferWidth = width; + pp->BackBufferHeight = height; + pp->Windowed = TRUE; + pp->SwapEffect = D3DSWAPEFFECT_DISCARD; + pp->BackBufferFormat = (D3DFORMAT)d3dFormat; + pp->hDeviceWindow = ::GetShellWindow(); + pp->Flags = D3DPRESENTFLAG_VIDEO; + pp->PresentationInterval = D3DPRESENT_INTERVAL_DEFAULT; + + D3DDEVICE_CREATION_PARAMETERS params; + hr = m_device->GetCreationParameters(¶ms); + if (FAILED(hr)) + return hr; + + if (params.DeviceType != D3DDEVTYPE_HAL) + pp->Flags |= D3DPRESENTFLAG_LOCKABLE_BACKBUFFER; + + return S_OK; +} diff --git a/src/plugins/common/evr/evrd3dpresentengine.h b/src/plugins/common/evr/evrd3dpresentengine.h new file mode 100644 index 000000000..df695a5dd --- /dev/null +++ b/src/plugins/common/evr/evrd3dpresentengine.h @@ -0,0 +1,146 @@ +/**************************************************************************** +** +** Copyright (C) 2015 The Qt Company Ltd. +** Contact: http://www.qt.io/licensing/ +** +** This file is part of the Qt Toolkit. +** +** $QT_BEGIN_LICENSE:LGPL21$ +** Commercial License Usage +** Licensees holding valid commercial Qt licenses may use this file in +** accordance with the commercial license agreement provided with the +** Software or, alternatively, in accordance with the terms contained in +** a written agreement between you and The Qt Company. For licensing terms +** and conditions see http://www.qt.io/terms-conditions. For further +** information use the contact form at http://www.qt.io/contact-us. +** +** GNU Lesser General Public License Usage +** Alternatively, this file may be used under the terms of the GNU Lesser +** General Public License version 2.1 or version 3 as published by the Free +** Software Foundation and appearing in the file LICENSE.LGPLv21 and +** LICENSE.LGPLv3 included in the packaging of this file. Please review the +** following information to ensure the GNU Lesser General Public License +** requirements will be met: https://www.gnu.org/licenses/lgpl.html and +** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html. +** +** As a special exception, The Qt Company gives you certain additional +** rights. These rights are described in The Qt Company LGPL Exception +** version 1.1, included in the file LGPL_EXCEPTION.txt in this package. +** +** $QT_END_LICENSE$ +** +****************************************************************************/ + +#ifndef EVRD3DPRESENTENGINE_H +#define EVRD3DPRESENTENGINE_H + +#include <QObject> +#include <EGL/egl.h> +#include <QMutex> +#include <d3d9types.h> +#include <QVideoSurfaceFormat> + +struct IDirect3D9Ex; +struct IDirect3DDevice9; +struct IDirect3DDevice9Ex; +struct IDirect3DDeviceManager9; +struct IDirect3DSurface9; +struct IDirect3DTexture9; +struct IMFSample; +struct IMFMediaType; +struct IDirect3DSwapChain9; + +// Randomly generated GUIDs +static const GUID MFSamplePresenter_SampleCounter = +{ 0xb0bb83cc, 0xf10f, 0x4e2e, { 0xaa, 0x2b, 0x29, 0xea, 0x5e, 0x92, 0xef, 0x85 } }; + +static const GUID MFSamplePresenter_SampleSwapChain = +{ 0xad885bd1, 0x7def, 0x414a, { 0xb5, 0xb0, 0xd3, 0xd2, 0x63, 0xd6, 0xe9, 0x6d } }; + +QT_USE_NAMESPACE + +class QAbstractVideoSurface; +class QOpenGLContext; + +class EGLWrapper +{ +public: + EGLWrapper(); + + __eglMustCastToProperFunctionPointerType getProcAddress(const char *procname); + EGLSurface createPbufferSurface(EGLDisplay dpy, EGLConfig config, const EGLint *attrib_list); + EGLBoolean destroySurface(EGLDisplay dpy, EGLSurface surface); + EGLBoolean bindTexImage(EGLDisplay dpy, EGLSurface surface, EGLint buffer); + EGLBoolean releaseTexImage(EGLDisplay dpy, EGLSurface surface, EGLint buffer); + +private: + typedef __eglMustCastToProperFunctionPointerType (EGLAPIENTRYP EglGetProcAddress)(const char *procname); + typedef EGLSurface (EGLAPIENTRYP EglCreatePbufferSurface)(EGLDisplay dpy, EGLConfig config, const EGLint *attrib_list); + typedef EGLBoolean (EGLAPIENTRYP EglDestroySurface)(EGLDisplay dpy, EGLSurface surface); + typedef EGLBoolean (EGLAPIENTRYP EglBindTexImage)(EGLDisplay dpy, EGLSurface surface, EGLint buffer); + typedef EGLBoolean (EGLAPIENTRYP EglReleaseTexImage)(EGLDisplay dpy, EGLSurface surface, EGLint buffer); + + EglGetProcAddress m_eglGetProcAddress; + EglCreatePbufferSurface m_eglCreatePbufferSurface; + EglDestroySurface m_eglDestroySurface; + EglBindTexImage m_eglBindTexImage; + EglReleaseTexImage m_eglReleaseTexImage; +}; + +class D3DPresentEngine : public QObject +{ + Q_OBJECT +public: + D3DPresentEngine(); + virtual ~D3DPresentEngine(); + + void start(); + void stop(); + + HRESULT getService(REFGUID guidService, REFIID riid, void** ppv); + HRESULT checkFormat(D3DFORMAT format); + + HRESULT createVideoSamples(IMFMediaType *format, QList<IMFSample*>& videoSampleQueue); + void releaseResources(); + + UINT refreshRate() const { return m_displayMode.RefreshRate; } + + void setSurface(QAbstractVideoSurface *surface); + void setSurfaceFormat(const QVideoSurfaceFormat &format); + + void createOffscreenTexture(); + bool updateTexture(IDirect3DSurface9 *src); + +public Q_SLOTS: + void presentSample(void* sample, qint64 llTarget); + +private: + HRESULT initializeD3D(); + HRESULT getSwapChainPresentParameters(IMFMediaType *type, D3DPRESENT_PARAMETERS *pp); + HRESULT createD3DDevice(); + HRESULT createD3DSample(IDirect3DSwapChain9 *swapChain, IMFSample **videoSample); + + QMutex m_mutex; + + UINT m_deviceResetToken; + D3DDISPLAYMODE m_displayMode; + + IDirect3D9Ex *m_D3D9; + IDirect3DDevice9Ex *m_device; + IDirect3DDeviceManager9 *m_deviceManager; + + QVideoSurfaceFormat m_surfaceFormat; + QAbstractVideoSurface *m_surface; + + QOpenGLContext *m_glContext; + QWindow *m_offscreenSurface; + + EGLDisplay *m_eglDisplay; + EGLConfig *m_eglConfig; + EGLSurface m_eglSurface; + unsigned int m_glTexture; + IDirect3DTexture9 *m_texture; + EGLWrapper *m_egl; +}; + +#endif // EVRD3DPRESENTENGINE_H diff --git a/src/plugins/common/evr/evrdefs.cpp b/src/plugins/common/evr/evrdefs.cpp new file mode 100644 index 000000000..07d1c11eb --- /dev/null +++ b/src/plugins/common/evr/evrdefs.cpp @@ -0,0 +1,42 @@ +/**************************************************************************** +** +** Copyright (C) 2015 The Qt Company Ltd. +** Contact: http://www.qt.io/licensing/ +** +** This file is part of the Qt Toolkit. +** +** $QT_BEGIN_LICENSE:LGPL21$ +** Commercial License Usage +** Licensees holding valid commercial Qt licenses may use this file in +** accordance with the commercial license agreement provided with the +** Software or, alternatively, in accordance with the terms contained in +** a written agreement between you and The Qt Company. For licensing terms +** and conditions see http://www.qt.io/terms-conditions. For further +** information use the contact form at http://www.qt.io/contact-us. +** +** GNU Lesser General Public License Usage +** Alternatively, this file may be used under the terms of the GNU Lesser +** General Public License version 2.1 or version 3 as published by the Free +** Software Foundation and appearing in the file LICENSE.LGPLv21 and +** LICENSE.LGPLv3 included in the packaging of this file. Please review the +** following information to ensure the GNU Lesser General Public License +** requirements will be met: https://www.gnu.org/licenses/lgpl.html and +** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html. +** +** As a special exception, The Qt Company gives you certain additional +** rights. These rights are described in The Qt Company LGPL Exception +** version 1.1, included in the file LGPL_EXCEPTION.txt in this package. +** +** $QT_END_LICENSE$ +** +****************************************************************************/ + +#include "evrdefs.h" + +const CLSID clsid_EnhancedVideoRenderer = { 0xfa10746c, 0x9b63, 0x4b6c, {0xbc, 0x49, 0xfc, 0x30, 0xe, 0xa5, 0xf2, 0x56} }; +const GUID mr_VIDEO_RENDER_SERVICE = { 0x1092a86c, 0xab1a, 0x459a, {0xa3, 0x36, 0x83, 0x1f, 0xbc, 0x4d, 0x11, 0xff} }; +const GUID mr_VIDEO_MIXER_SERVICE = { 0x73cd2fc, 0x6cf4, 0x40b7, {0x88, 0x59, 0xe8, 0x95, 0x52, 0xc8, 0x41, 0xf8} }; +const GUID mr_BUFFER_SERVICE = { 0xa562248c, 0x9ac6, 0x4ffc, {0x9f, 0xba, 0x3a, 0xf8, 0xf8, 0xad, 0x1a, 0x4d} }; +const GUID video_ZOOM_RECT = { 0x7aaa1638, 0x1b7f, 0x4c93, {0xbd, 0x89, 0x5b, 0x9c, 0x9f, 0xb6, 0xfc, 0xf0} }; +const GUID iid_IDirect3DDevice9 = { 0xd0223b96, 0xbf7a, 0x43fd, {0x92, 0xbd, 0xa4, 0x3b, 0xd, 0x82, 0xb9, 0xeb} }; +const GUID iid_IDirect3DSurface9 = { 0xcfbaf3a, 0x9ff6, 0x429a, {0x99, 0xb3, 0xa2, 0x79, 0x6a, 0xf8, 0xb8, 0x9b} }; diff --git a/src/plugins/common/evr/evrdefs.h b/src/plugins/common/evr/evrdefs.h index ce6ca6584..3b2c2530a 100644 --- a/src/plugins/common/evr/evrdefs.h +++ b/src/plugins/common/evr/evrdefs.h @@ -36,10 +36,47 @@ #include <d3d9.h> #include <Evr9.h> +#include <evr.h> #include <dxva2api.h> +#include <mfapi.h> +#include <mfidl.h> +#include <Mferror.h> + +extern const CLSID clsid_EnhancedVideoRenderer; +extern const GUID mr_VIDEO_RENDER_SERVICE; +extern const GUID mr_VIDEO_MIXER_SERVICE; +extern const GUID mr_BUFFER_SERVICE; +extern const GUID video_ZOOM_RECT; +extern const GUID iid_IDirect3DDevice9; +extern const GUID iid_IDirect3DSurface9; // The following is required to compile with MinGW +extern "C" { +HRESULT WINAPI MFCreateVideoSampleFromSurface(IUnknown *pUnkSurface, IMFSample **ppSample); +HRESULT WINAPI Direct3DCreate9Ex(UINT SDKVersion, IDirect3D9Ex**); +} + +#ifndef PRESENTATION_CURRENT_POSITION +#define PRESENTATION_CURRENT_POSITION 0x7fffffffffffffff +#endif + +#ifndef MF_E_SHUTDOWN +#define MF_E_SHUTDOWN ((HRESULT)0xC00D3E85L) +#endif + +#ifndef MF_E_SAMPLEALLOCATOR_EMPTY +#define MF_E_SAMPLEALLOCATOR_EMPTY ((HRESULT)0xC00D4A3EL) +#endif + +#ifndef MF_E_TRANSFORM_STREAM_CHANGE +#define MF_E_TRANSFORM_STREAM_CHANGE ((HRESULT)0xC00D6D61L) +#endif + +#ifndef MF_E_TRANSFORM_NEED_MORE_INPUT +#define MF_E_TRANSFORM_NEED_MORE_INPUT ((HRESULT)0xC00D6D72L) +#endif + #ifdef __GNUC__ typedef struct MFVideoNormalizedRect { float left; @@ -49,6 +86,8 @@ typedef struct MFVideoNormalizedRect { } MFVideoNormalizedRect; #endif +#include <initguid.h> + #ifndef __IMFGetService_INTERFACE_DEFINED__ #define __IMFGetService_INTERFACE_DEFINED__ DEFINE_GUID(IID_IMFGetService, 0xfa993888, 0x4383, 0x415a, 0xa9,0x30, 0xdd,0x47,0x2a,0x8c,0xf6,0xf7); @@ -123,5 +162,185 @@ __CRT_UUID_DECL(IMFVideoProcessor, 0x6AB0000C, 0xFECE, 0x4d1f, 0xA2,0xAC, 0xA9,0 #endif #endif // __IMFVideoProcessor_INTERFACE_DEFINED__ +#ifndef __IMFVideoDeviceID_INTERFACE_DEFINED__ +#define __IMFVideoDeviceID_INTERFACE_DEFINED__ +DEFINE_GUID(IID_IMFVideoDeviceID, 0xA38D9567, 0x5A9C, 0x4f3c, 0xB2,0x93, 0x8E,0xB4,0x15,0xB2,0x79,0xBA); +MIDL_INTERFACE("A38D9567-5A9C-4f3c-B293-8EB415B279BA") +IMFVideoDeviceID : public IUnknown +{ +public: + virtual HRESULT STDMETHODCALLTYPE GetDeviceID(IID *pDeviceID) = 0; +}; +#ifdef __CRT_UUID_DECL +__CRT_UUID_DECL(IMFVideoDeviceID, 0xA38D9567, 0x5A9C, 0x4f3c, 0xB2,0x93, 0x8E,0xB4,0x15,0xB2,0x79,0xBA) +#endif +#endif // __IMFVideoDeviceID_INTERFACE_DEFINED__ + +#ifndef __IMFClockStateSink_INTERFACE_DEFINED__ +#define __IMFClockStateSink_INTERFACE_DEFINED__ +DEFINE_GUID(IID_IMFClockStateSink, 0xF6696E82, 0x74F7, 0x4f3d, 0xA1,0x78, 0x8A,0x5E,0x09,0xC3,0x65,0x9F); +MIDL_INTERFACE("F6696E82-74F7-4f3d-A178-8A5E09C3659F") +IMFClockStateSink : public IUnknown +{ +public: + virtual HRESULT STDMETHODCALLTYPE OnClockStart(MFTIME hnsSystemTime, LONGLONG llClockStartOffset) = 0; + virtual HRESULT STDMETHODCALLTYPE OnClockStop(MFTIME hnsSystemTime) = 0; + virtual HRESULT STDMETHODCALLTYPE OnClockPause(MFTIME hnsSystemTime) = 0; + virtual HRESULT STDMETHODCALLTYPE OnClockRestart(MFTIME hnsSystemTime) = 0; + virtual HRESULT STDMETHODCALLTYPE OnClockSetRate(MFTIME hnsSystemTime, float flRate) = 0; +}; +#ifdef __CRT_UUID_DECL +__CRT_UUID_DECL(IMFClockStateSink, 0xF6696E82, 0x74F7, 0x4f3d, 0xA1,0x78, 0x8A,0x5E,0x09,0xC3,0x65,0x9F) +#endif +#endif // __IMFClockStateSink_INTERFACE_DEFINED__ + +#ifndef __IMFVideoPresenter_INTERFACE_DEFINED__ +#define __IMFVideoPresenter_INTERFACE_DEFINED__ +typedef enum MFVP_MESSAGE_TYPE +{ + MFVP_MESSAGE_FLUSH = 0, + MFVP_MESSAGE_INVALIDATEMEDIATYPE = 0x1, + MFVP_MESSAGE_PROCESSINPUTNOTIFY = 0x2, + MFVP_MESSAGE_BEGINSTREAMING = 0x3, + MFVP_MESSAGE_ENDSTREAMING = 0x4, + MFVP_MESSAGE_ENDOFSTREAM = 0x5, + MFVP_MESSAGE_STEP = 0x6, + MFVP_MESSAGE_CANCELSTEP = 0x7 +} MFVP_MESSAGE_TYPE; + +DEFINE_GUID(IID_IMFVideoPresenter, 0x29AFF080, 0x182A, 0x4a5d, 0xAF,0x3B, 0x44,0x8F,0x3A,0x63,0x46,0xCB); +MIDL_INTERFACE("29AFF080-182A-4a5d-AF3B-448F3A6346CB") +IMFVideoPresenter : public IMFClockStateSink +{ +public: + virtual HRESULT STDMETHODCALLTYPE ProcessMessage(MFVP_MESSAGE_TYPE eMessage, ULONG_PTR ulParam) = 0; + virtual HRESULT STDMETHODCALLTYPE GetCurrentMediaType(IMFVideoMediaType **ppMediaType) = 0; +}; +#ifdef __CRT_UUID_DECL +__CRT_UUID_DECL(IMFVideoPresenter, 0x29AFF080, 0x182A, 0x4a5d, 0xAF,0x3B, 0x44,0x8F,0x3A,0x63,0x46,0xCB) +#endif +#endif // __IMFVideoPresenter_INTERFACE_DEFINED__ + +#ifndef __IMFRateSupport_INTERFACE_DEFINED__ +#define __IMFRateSupport_INTERFACE_DEFINED__ +DEFINE_GUID(IID_IMFRateSupport, 0x0a9ccdbc, 0xd797, 0x4563, 0x96,0x67, 0x94,0xec,0x5d,0x79,0x29,0x2d); +MIDL_INTERFACE("0a9ccdbc-d797-4563-9667-94ec5d79292d") +IMFRateSupport : public IUnknown +{ +public: + virtual HRESULT STDMETHODCALLTYPE GetSlowestRate(MFRATE_DIRECTION eDirection, BOOL fThin, float *pflRate) = 0; + virtual HRESULT STDMETHODCALLTYPE GetFastestRate(MFRATE_DIRECTION eDirection, BOOL fThin, float *pflRate) = 0; + virtual HRESULT STDMETHODCALLTYPE IsRateSupported(BOOL fThin, float flRate, float *pflNearestSupportedRate) = 0; +}; +#ifdef __CRT_UUID_DECL +__CRT_UUID_DECL(IMFRateSupport, 0x0a9ccdbc, 0xd797, 0x4563, 0x96,0x67, 0x94,0xec,0x5d,0x79,0x29,0x2d) +#endif +#endif // __IMFRateSupport_INTERFACE_DEFINED__ + +#ifndef __IMFTopologyServiceLookup_INTERFACE_DEFINED__ +#define __IMFTopologyServiceLookup_INTERFACE_DEFINED__ +typedef enum _MF_SERVICE_LOOKUP_TYPE +{ + MF_SERVICE_LOOKUP_UPSTREAM = 0, + MF_SERVICE_LOOKUP_UPSTREAM_DIRECT = (MF_SERVICE_LOOKUP_UPSTREAM + 1), + MF_SERVICE_LOOKUP_DOWNSTREAM = (MF_SERVICE_LOOKUP_UPSTREAM_DIRECT + 1), + MF_SERVICE_LOOKUP_DOWNSTREAM_DIRECT = (MF_SERVICE_LOOKUP_DOWNSTREAM + 1), + MF_SERVICE_LOOKUP_ALL = (MF_SERVICE_LOOKUP_DOWNSTREAM_DIRECT + 1), + MF_SERVICE_LOOKUP_GLOBAL = (MF_SERVICE_LOOKUP_ALL + 1) +} MF_SERVICE_LOOKUP_TYPE; + +DEFINE_GUID(IID_IMFTopologyServiceLookup, 0xfa993889, 0x4383, 0x415a, 0xa9,0x30, 0xdd,0x47,0x2a,0x8c,0xf6,0xf7); +MIDL_INTERFACE("fa993889-4383-415a-a930-dd472a8cf6f7") +IMFTopologyServiceLookup : public IUnknown +{ +public: + virtual HRESULT STDMETHODCALLTYPE LookupService(MF_SERVICE_LOOKUP_TYPE Type, + DWORD dwIndex, + REFGUID guidService, + REFIID riid, + LPVOID *ppvObjects, + DWORD *pnObjects) = 0; +}; +#ifdef __CRT_UUID_DECL +__CRT_UUID_DECL(IMFTopologyServiceLookup, 0xfa993889, 0x4383, 0x415a, 0xa9,0x30, 0xdd,0x47,0x2a,0x8c,0xf6,0xf7) +#endif +#endif // __IMFTopologyServiceLookup_INTERFACE_DEFINED__ + +#ifndef __IMFTopologyServiceLookupClient_INTERFACE_DEFINED__ +#define __IMFTopologyServiceLookupClient_INTERFACE_DEFINED__ +DEFINE_GUID(IID_IMFTopologyServiceLookupClient, 0xfa99388a, 0x4383, 0x415a, 0xa9,0x30, 0xdd,0x47,0x2a,0x8c,0xf6,0xf7); +MIDL_INTERFACE("fa99388a-4383-415a-a930-dd472a8cf6f7") +IMFTopologyServiceLookupClient : public IUnknown +{ +public: + virtual HRESULT STDMETHODCALLTYPE InitServicePointers(IMFTopologyServiceLookup *pLookup) = 0; + virtual HRESULT STDMETHODCALLTYPE ReleaseServicePointers(void) = 0; +}; +#ifdef __CRT_UUID_DECL +__CRT_UUID_DECL(IMFTopologyServiceLookupClient, 0xfa99388a, 0x4383, 0x415a, 0xa9,0x30, 0xdd,0x47,0x2a,0x8c,0xf6,0xf7) +#endif +#endif // __IMFTopologyServiceLookupClient_INTERFACE_DEFINED__ + +#ifndef __IMediaEventSink_INTERFACE_DEFINED__ +#define __IMediaEventSink_INTERFACE_DEFINED__ +DEFINE_GUID(IID_IMediaEventSink, 0x56a868a2, 0x0ad4, 0x11ce, 0xb0,0x3a, 0x00,0x20,0xaf,0x0b,0xa7,0x70); +MIDL_INTERFACE("56a868a2-0ad4-11ce-b03a-0020af0ba770") +IMediaEventSink : public IUnknown +{ +public: + virtual HRESULT STDMETHODCALLTYPE Notify(long EventCode, LONG_PTR EventParam1, LONG_PTR EventParam2) = 0; +}; +#ifdef __CRT_UUID_DECL +__CRT_UUID_DECL(IMediaEventSink, 0x56a868a2, 0x0ad4, 0x11ce, 0xb0,0x3a, 0x00,0x20,0xaf,0x0b,0xa7,0x70) +#endif +#endif // __IMediaEventSink_INTERFACE_DEFINED__ + +#ifndef __IMFVideoRenderer_INTERFACE_DEFINED__ +#define __IMFVideoRenderer_INTERFACE_DEFINED__ +DEFINE_GUID(IID_IMFVideoRenderer, 0xDFDFD197, 0xA9CA, 0x43d8, 0xB3,0x41, 0x6A,0xF3,0x50,0x37,0x92,0xCD); +MIDL_INTERFACE("DFDFD197-A9CA-43d8-B341-6AF3503792CD") +IMFVideoRenderer : public IUnknown +{ +public: + virtual HRESULT STDMETHODCALLTYPE InitializeRenderer(IMFTransform *pVideoMixer, + IMFVideoPresenter *pVideoPresenter) = 0; +}; +#ifdef __CRT_UUID_DECL +__CRT_UUID_DECL(IMFVideoRenderer, 0xDFDFD197, 0xA9CA, 0x43d8, 0xB3,0x41, 0x6A,0xF3,0x50,0x37,0x92,0xCD) +#endif +#endif // __IMFVideoRenderer_INTERFACE_DEFINED__ + +#ifndef __IMFTrackedSample_INTERFACE_DEFINED__ +#define __IMFTrackedSample_INTERFACE_DEFINED__ +DEFINE_GUID(IID_IMFTrackedSample, 0x245BF8E9, 0x0755, 0x40f7, 0x88,0xA5, 0xAE,0x0F,0x18,0xD5,0x5E,0x17); +MIDL_INTERFACE("245BF8E9-0755-40f7-88A5-AE0F18D55E17") +IMFTrackedSample : public IUnknown +{ +public: + virtual HRESULT STDMETHODCALLTYPE SetAllocator(IMFAsyncCallback *pSampleAllocator, IUnknown *pUnkState) = 0; +}; +#ifdef __CRT_UUID_DECL +__CRT_UUID_DECL(IMFTrackedSample, 0x245BF8E9, 0x0755, 0x40f7, 0x88,0xA5, 0xAE,0x0F,0x18,0xD5,0x5E,0x17) +#endif +#endif // __IMFTrackedSample_INTERFACE_DEFINED__ + +#ifndef __IMFDesiredSample_INTERFACE_DEFINED__ +#define __IMFDesiredSample_INTERFACE_DEFINED__ +DEFINE_GUID(IID_IMFDesiredSample, 0x56C294D0, 0x753E, 0x4260, 0x8D,0x61, 0xA3,0xD8,0x82,0x0B,0x1D,0x54); +MIDL_INTERFACE("56C294D0-753E-4260-8D61-A3D8820B1D54") +IMFDesiredSample : public IUnknown +{ +public: + virtual HRESULT STDMETHODCALLTYPE GetDesiredSampleTimeAndDuration(LONGLONG *phnsSampleTime, + LONGLONG *phnsSampleDuration) = 0; + virtual void STDMETHODCALLTYPE SetDesiredSampleTimeAndDuration(LONGLONG hnsSampleTime, + LONGLONG hnsSampleDuration) = 0; + virtual void STDMETHODCALLTYPE Clear( void) = 0; +}; +#ifdef __CRT_UUID_DECL +__CRT_UUID_DECL(IMFDesiredSample, 0x56C294D0, 0x753E, 0x4260, 0x8D,0x61, 0xA3,0xD8,0x82,0x0B,0x1D,0x54) +#endif +#endif + #endif // EVRDEFS_H diff --git a/src/plugins/common/evr/evrhelpers.cpp b/src/plugins/common/evr/evrhelpers.cpp new file mode 100644 index 000000000..ae289de3a --- /dev/null +++ b/src/plugins/common/evr/evrhelpers.cpp @@ -0,0 +1,103 @@ +/**************************************************************************** +** +** Copyright (C) 2015 The Qt Company Ltd. +** Contact: http://www.qt.io/licensing/ +** +** This file is part of the Qt Toolkit. +** +** $QT_BEGIN_LICENSE:LGPL21$ +** Commercial License Usage +** Licensees holding valid commercial Qt licenses may use this file in +** accordance with the commercial license agreement provided with the +** Software or, alternatively, in accordance with the terms contained in +** a written agreement between you and The Qt Company. For licensing terms +** and conditions see http://www.qt.io/terms-conditions. For further +** information use the contact form at http://www.qt.io/contact-us. +** +** GNU Lesser General Public License Usage +** Alternatively, this file may be used under the terms of the GNU Lesser +** General Public License version 2.1 or version 3 as published by the Free +** Software Foundation and appearing in the file LICENSE.LGPLv21 and +** LICENSE.LGPLv3 included in the packaging of this file. Please review the +** following information to ensure the GNU Lesser General Public License +** requirements will be met: https://www.gnu.org/licenses/lgpl.html and +** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html. +** +** As a special exception, The Qt Company gives you certain additional +** rights. These rights are described in The Qt Company LGPL Exception +** version 1.1, included in the file LGPL_EXCEPTION.txt in this package. +** +** $QT_END_LICENSE$ +** +****************************************************************************/ + +#include "evrhelpers.h" + +HRESULT qt_evr_getFourCC(IMFMediaType *type, DWORD *fourCC) +{ + if (!fourCC) + return E_POINTER; + + HRESULT hr = S_OK; + GUID guidSubType = GUID_NULL; + + if (SUCCEEDED(hr)) + hr = type->GetGUID(MF_MT_SUBTYPE, &guidSubType); + + if (SUCCEEDED(hr)) + *fourCC = guidSubType.Data1; + + return hr; +} + +bool qt_evr_areMediaTypesEqual(IMFMediaType *type1, IMFMediaType *type2) +{ + if (!type1 && !type2) + return true; + else if (!type1 || !type2) + return false; + + DWORD dwFlags = 0; + HRESULT hr = type1->IsEqual(type2, &dwFlags); + + return (hr == S_OK); +} + +HRESULT qt_evr_validateVideoArea(const MFVideoArea& area, UINT32 width, UINT32 height) +{ + float fOffsetX = qt_evr_MFOffsetToFloat(area.OffsetX); + float fOffsetY = qt_evr_MFOffsetToFloat(area.OffsetY); + + if ( ((LONG)fOffsetX + area.Area.cx > (LONG)width) || + ((LONG)fOffsetY + area.Area.cy > (LONG)height) ) + return MF_E_INVALIDMEDIATYPE; + else + return S_OK; +} + +bool qt_evr_isSampleTimePassed(IMFClock *clock, IMFSample *sample) +{ + if (!sample || !clock) + return false; + + HRESULT hr = S_OK; + MFTIME hnsTimeNow = 0; + MFTIME hnsSystemTime = 0; + MFTIME hnsSampleStart = 0; + MFTIME hnsSampleDuration = 0; + + hr = clock->GetCorrelatedTime(0, &hnsTimeNow, &hnsSystemTime); + + if (SUCCEEDED(hr)) + hr = sample->GetSampleTime(&hnsSampleStart); + + if (SUCCEEDED(hr)) + hr = sample->GetSampleDuration(&hnsSampleDuration); + + if (SUCCEEDED(hr)) { + if (hnsSampleStart + hnsSampleDuration < hnsTimeNow) + return true; + } + + return false; +} diff --git a/src/plugins/common/evr/evrhelpers.h b/src/plugins/common/evr/evrhelpers.h new file mode 100644 index 000000000..3883379a0 --- /dev/null +++ b/src/plugins/common/evr/evrhelpers.h @@ -0,0 +1,85 @@ +/**************************************************************************** +** +** Copyright (C) 2015 The Qt Company Ltd. +** Contact: http://www.qt.io/licensing/ +** +** This file is part of the Qt Toolkit. +** +** $QT_BEGIN_LICENSE:LGPL21$ +** Commercial License Usage +** Licensees holding valid commercial Qt licenses may use this file in +** accordance with the commercial license agreement provided with the +** Software or, alternatively, in accordance with the terms contained in +** a written agreement between you and The Qt Company. For licensing terms +** and conditions see http://www.qt.io/terms-conditions. For further +** information use the contact form at http://www.qt.io/contact-us. +** +** GNU Lesser General Public License Usage +** Alternatively, this file may be used under the terms of the GNU Lesser +** General Public License version 2.1 or version 3 as published by the Free +** Software Foundation and appearing in the file LICENSE.LGPLv21 and +** LICENSE.LGPLv3 included in the packaging of this file. Please review the +** following information to ensure the GNU Lesser General Public License +** requirements will be met: https://www.gnu.org/licenses/lgpl.html and +** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html. +** +** As a special exception, The Qt Company gives you certain additional +** rights. These rights are described in The Qt Company LGPL Exception +** version 1.1, included in the file LGPL_EXCEPTION.txt in this package. +** +** $QT_END_LICENSE$ +** +****************************************************************************/ + +#ifndef EVRHELPERS_H +#define EVRHELPERS_H + +#include "evrdefs.h" + +template<class T> +static inline void qt_evr_safe_release(T **unk) +{ + if (*unk) { + (*unk)->Release(); + *unk = NULL; + } +} + +HRESULT qt_evr_getFourCC(IMFMediaType *type, DWORD *fourCC); + +bool qt_evr_areMediaTypesEqual(IMFMediaType *type1, IMFMediaType *type2); + +HRESULT qt_evr_validateVideoArea(const MFVideoArea& area, UINT32 width, UINT32 height); + +bool qt_evr_isSampleTimePassed(IMFClock *clock, IMFSample *sample); + +inline float qt_evr_MFOffsetToFloat(const MFOffset& offset) +{ + return offset.value + (float(offset.fract) / 65536); +} + +inline MFOffset qt_evr_makeMFOffset(float v) +{ + MFOffset offset; + offset.value = short(v); + offset.fract = WORD(65536 * (v-offset.value)); + return offset; +} + +inline MFVideoArea qt_evr_makeMFArea(float x, float y, DWORD width, DWORD height) +{ + MFVideoArea area; + area.OffsetX = qt_evr_makeMFOffset(x); + area.OffsetY = qt_evr_makeMFOffset(y); + area.Area.cx = width; + area.Area.cy = height; + return area; +} + +inline HRESULT qt_evr_getFrameRate(IMFMediaType *pType, MFRatio *pRatio) +{ + return MFGetAttributeRatio(pType, MF_MT_FRAME_RATE, (UINT32*)&pRatio->Numerator, (UINT32*)&pRatio->Denominator); +} + +#endif // EVRHELPERS_H + diff --git a/src/plugins/common/evr/evrvideowindowcontrol.cpp b/src/plugins/common/evr/evrvideowindowcontrol.cpp index faa23d6e5..dae6583ff 100644 --- a/src/plugins/common/evr/evrvideowindowcontrol.cpp +++ b/src/plugins/common/evr/evrvideowindowcontrol.cpp @@ -65,8 +65,6 @@ bool EvrVideoWindowControl::setEvr(IUnknown *evr) if (!evr) return true; - static const GUID mr_VIDEO_RENDER_SERVICE = { 0x1092a86c, 0xab1a, 0x459a, {0xa3, 0x36, 0x83, 0x1f, 0xbc, 0x4d, 0x11, 0xff} }; - static const GUID mr_VIDEO_MIXER_SERVICE = { 0x73cd2fc, 0x6cf4, 0x40b7, {0x88, 0x59, 0xe8, 0x95, 0x52, 0xc8, 0x41, 0xf8} }; IMFGetService *service = NULL; if (SUCCEEDED(evr->QueryInterface(IID_PPV_ARGS(&service))) |