summaryrefslogtreecommitdiffstats
path: root/src/plugins/winrt/qwinrtcameravideorenderercontrol.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'src/plugins/winrt/qwinrtcameravideorenderercontrol.cpp')
-rw-r--r--src/plugins/winrt/qwinrtcameravideorenderercontrol.cpp260
1 files changed, 232 insertions, 28 deletions
diff --git a/src/plugins/winrt/qwinrtcameravideorenderercontrol.cpp b/src/plugins/winrt/qwinrtcameravideorenderercontrol.cpp
index 4878c55c9..fe07581c9 100644
--- a/src/plugins/winrt/qwinrtcameravideorenderercontrol.cpp
+++ b/src/plugins/winrt/qwinrtcameravideorenderercontrol.cpp
@@ -38,30 +38,106 @@
#include <QtCore/qfunctions_winrt.h>
#include <QtCore/QSize>
+#include <QtCore/QPointer>
#include <QtCore/QVector>
+#include <QVideoFrame>
#include <d3d11.h>
#include <mfapi.h>
#include <wrl.h>
+
+#include "qwinrtcameracontrol.h"
+
+#ifdef Q_OS_WINPHONE
+#include <Windows.Security.ExchangeActiveSyncProvisioning.h>
+using namespace ABI::Windows::Security::ExchangeActiveSyncProvisioning;
+#endif
+
using namespace Microsoft::WRL;
+using namespace Microsoft::WRL::Wrappers;
QT_BEGIN_NAMESPACE
+#ifdef Q_OS_WINPHONE
+template <int n>
+static bool blacklisted(const wchar_t (&blackListName)[n], const HString &deviceModel)
+{
+ quint32 deviceNameLength;
+ const wchar_t *deviceName = deviceModel.GetRawBuffer(&deviceNameLength);
+ return n - 1 <= deviceNameLength && !wmemcmp(blackListName, deviceName, n - 1);
+}
+#endif
+
+class QWinRTCameraVideoBuffer : public QAbstractVideoBuffer
+{
+public:
+ QWinRTCameraVideoBuffer(IMF2DBuffer *buffer, int size, QWinRTCameraControl *control)
+ : QAbstractVideoBuffer(NoHandle)
+ , currentMode(NotMapped)
+ , buffer(buffer)
+ , size(size)
+ , control(control)
+ {
+ Q_ASSERT(control);
+ }
+
+ ~QWinRTCameraVideoBuffer()
+ {
+ unmap();
+ }
+
+ MapMode mapMode() const Q_DECL_OVERRIDE
+ {
+ return currentMode;
+ }
+
+ uchar *map(MapMode mode, int *numBytes, int *bytesPerLine) Q_DECL_OVERRIDE
+ {
+ if (currentMode != NotMapped || mode == NotMapped || control && control->state() != QCamera::ActiveState)
+ return nullptr;
+
+ BYTE *bytes;
+ LONG stride;
+ HRESULT hr = buffer->Lock2D(&bytes, &stride);
+ RETURN_IF_FAILED("Failed to lock camera frame buffer", nullptr);
+ control->frameMapped();
+
+ if (bytesPerLine)
+ *bytesPerLine = stride;
+ if (numBytes)
+ *numBytes = size;
+ currentMode = mode;
+ return bytes;
+ }
+
+ void unmap() Q_DECL_OVERRIDE
+ {
+ if (currentMode == NotMapped)
+ return;
+ HRESULT hr = buffer->Unlock2D();
+ RETURN_VOID_IF_FAILED("Failed to unlock camera frame buffer");
+ currentMode = NotMapped;
+ if (control)
+ control->frameUnmapped();
+ }
+
+private:
+ ComPtr<IMF2DBuffer> buffer;
+ MapMode currentMode;
+ int size;
+ QPointer<QWinRTCameraControl> control;
+};
+
class D3DVideoBlitter
{
public:
- D3DVideoBlitter(ID3D11Device *device, ID3D11Texture2D *target)
- : m_d3dDevice(device), m_target(target)
+ D3DVideoBlitter(ID3D11Texture2D *target)
+ : m_target(target)
{
+ Q_ASSERT(target);
+ target->GetDevice(&m_d3dDevice);
+ Q_ASSERT(m_d3dDevice);
HRESULT hr;
- ComPtr<IDXGIResource> targetResource;
- hr = target->QueryInterface(IID_PPV_ARGS(&targetResource));
- Q_ASSERT_SUCCEEDED(hr);
- HANDLE sharedHandle;
- hr = targetResource->GetSharedHandle(&sharedHandle);
- Q_ASSERT_SUCCEEDED(hr);
- hr = m_d3dDevice->OpenSharedResource(sharedHandle, IID_PPV_ARGS(&m_targetTexture));
- Q_ASSERT_SUCCEEDED(hr);
hr = m_d3dDevice.As(&m_videoDevice);
Q_ASSERT_SUCCEEDED(hr);
}
@@ -86,7 +162,7 @@ public:
D3D11_VIDEO_FRAME_FORMAT_PROGRESSIVE,
{ 0 }, desc.Width, desc.Height,
{ 0 }, desc.Width, desc.Height,
- D3D11_VIDEO_USAGE_PLAYBACK_NORMAL
+ D3D11_VIDEO_USAGE_OPTIMAL_SPEED
};
hr = m_videoDevice->CreateVideoProcessorEnumerator(&videoProcessorDesc, &m_videoEnumerator);
RETURN_VOID_IF_FAILED("Failed to create video enumerator");
@@ -100,16 +176,26 @@ public:
if (!m_outputView) {
D3D11_VIDEO_PROCESSOR_OUTPUT_VIEW_DESC outputDesc = { D3D11_VPOV_DIMENSION_TEXTURE2D };
hr = m_videoDevice->CreateVideoProcessorOutputView(
- m_targetTexture.Get(), m_videoEnumerator.Get(), &outputDesc, &m_outputView);
+ m_target, m_videoEnumerator.Get(), &outputDesc, &m_outputView);
RETURN_VOID_IF_FAILED("Failed to create video output view");
}
+ ComPtr<IDXGIResource> sourceResource;
+ hr = texture->QueryInterface(IID_PPV_ARGS(&sourceResource));
+ Q_ASSERT_SUCCEEDED(hr);
+ HANDLE sharedHandle;
+ hr = sourceResource->GetSharedHandle(&sharedHandle);
+ Q_ASSERT_SUCCEEDED(hr);
+ ComPtr<ID3D11Texture2D> sharedTexture;
+ hr = m_d3dDevice->OpenSharedResource(sharedHandle, IID_PPV_ARGS(&sharedTexture));
+ Q_ASSERT_SUCCEEDED(hr);
+
D3D11_VIDEO_PROCESSOR_INPUT_VIEW_DESC inputViewDesc = {
0, D3D11_VPIV_DIMENSION_TEXTURE2D, { 0, 0 }
};
ComPtr<ID3D11VideoProcessorInputView> inputView;
hr = m_videoDevice->CreateVideoProcessorInputView(
- texture, m_videoEnumerator.Get(), &inputViewDesc, &inputView);
+ sharedTexture.Get(), m_videoEnumerator.Get(), &inputViewDesc, &inputView);
RETURN_VOID_IF_FAILED("Failed to create video input view");
ComPtr<ID3D11DeviceContext> context;
@@ -127,7 +213,6 @@ public:
private:
ComPtr<ID3D11Device> m_d3dDevice;
- ComPtr<ID3D11Texture2D> m_targetTexture;
ID3D11Texture2D *m_target;
ComPtr<ID3D11VideoDevice> m_videoDevice;
ComPtr<ID3D11VideoProcessorEnumerator> m_videoEnumerator;
@@ -143,11 +228,73 @@ public:
ComPtr<IMF2DBuffer> buffers[CAMERA_SAMPLE_QUEUE_SIZE];
QAtomicInteger<quint16> writeIndex;
QAtomicInteger<quint16> readIndex;
+ QVideoFrame::PixelFormat cameraSampleformat;
+ int cameraSampleSize;
+ uint videoProbesCounter;
+ bool getCameraSampleInfo(const ComPtr<IMF2DBuffer> &buffer,
+ QWinRTAbstractVideoRendererControl::BlitMode *mode);
+ ComPtr<IMF2DBuffer> dequeueBuffer();
};
+bool QWinRTCameraVideoRendererControlPrivate::getCameraSampleInfo(const ComPtr<IMF2DBuffer> &buffer,
+ QWinRTAbstractVideoRendererControl::BlitMode *mode)
+{
+ Q_ASSERT(mode);
+ ComPtr<ID3D11Texture2D> sourceTexture;
+ ComPtr<IMFDXGIBuffer> dxgiBuffer;
+ HRESULT hr = buffer.As(&dxgiBuffer);
+ Q_ASSERT_SUCCEEDED(hr);
+ hr = dxgiBuffer->GetResource(IID_PPV_ARGS(&sourceTexture));
+ if (FAILED(hr)) {
+ qErrnoWarning(hr, "The video frame does not support texture output");
+ cameraSampleformat = QVideoFrame::Format_Invalid;
+ return false;
+ }
+ D3D11_TEXTURE2D_DESC desc;
+ sourceTexture->GetDesc(&desc);
+
+ if (!(desc.MiscFlags & D3D11_RESOURCE_MISC_SHARED))
+ *mode = QWinRTAbstractVideoRendererControl::MediaFoundation;
+
+ switch (desc.Format) {
+ case DXGI_FORMAT_R8G8B8A8_TYPELESS:
+ cameraSampleformat = QVideoFrame::Format_ARGB32;
+ break;
+ case DXGI_FORMAT_NV12:
+ cameraSampleformat = QVideoFrame::Format_NV12;
+ break;
+ default:
+ cameraSampleformat = QVideoFrame::Format_Invalid;
+ qErrnoWarning("Unsupported camera probe format.");
+ return false;
+ }
+ DWORD pcbLength;
+ hr = buffer->GetContiguousLength(&pcbLength);
+ Q_ASSERT_SUCCEEDED(hr);
+ cameraSampleSize = pcbLength;
+ return true;
+}
+
QWinRTCameraVideoRendererControl::QWinRTCameraVideoRendererControl(const QSize &size, QObject *parent)
: QWinRTAbstractVideoRendererControl(size, parent), d_ptr(new QWinRTCameraVideoRendererControlPrivate)
{
+ Q_D(QWinRTCameraVideoRendererControl);
+ d->cameraSampleformat = QVideoFrame::Format_User;
+ d->videoProbesCounter = 0;
+
+#ifdef Q_OS_WINPHONE
+ // Workaround for certain devices which fail to blit.
+ ComPtr<IEasClientDeviceInformation> deviceInfo;
+ HRESULT hr = RoActivateInstance(HString::MakeReference(RuntimeClass_Windows_Security_ExchangeActiveSyncProvisioning_EasClientDeviceInformation).Get(),
+ &deviceInfo);
+ Q_ASSERT_SUCCEEDED(hr);
+ HString deviceModel;
+ hr = deviceInfo->get_SystemProductName(deviceModel.GetAddressOf());
+ Q_ASSERT_SUCCEEDED(hr);
+ const bool blacklist = blacklisted(L"RM-1045", deviceModel) // Lumia 930
+ || blacklisted(L"RM-937", deviceModel); // Lumia 1520
+ setBlitMode(blacklist ? MediaFoundation : DirectVideo);
+#endif
}
QWinRTCameraVideoRendererControl::~QWinRTCameraVideoRendererControl()
@@ -158,22 +305,15 @@ QWinRTCameraVideoRendererControl::~QWinRTCameraVideoRendererControl()
bool QWinRTCameraVideoRendererControl::render(ID3D11Texture2D *target)
{
Q_D(QWinRTCameraVideoRendererControl);
-
- const quint16 readIndex = d->readIndex;
- if (readIndex == d->writeIndex) {
+ ComPtr<IMF2DBuffer> buffer = d->dequeueBuffer();
+ if (!buffer) {
emit bufferRequested();
return false;
}
- HRESULT hr;
- ComPtr<IMF2DBuffer> buffer = d->buffers[readIndex];
- Q_ASSERT(buffer);
- d->buffers[readIndex].Reset();
- d->readIndex = (readIndex + 1) % CAMERA_SAMPLE_QUEUE_SIZE;
-
ComPtr<ID3D11Texture2D> sourceTexture;
ComPtr<IMFDXGIBuffer> dxgiBuffer;
- hr = buffer.As(&dxgiBuffer);
+ HRESULT hr = buffer.As(&dxgiBuffer);
Q_ASSERT_SUCCEEDED(hr);
hr = dxgiBuffer->GetResource(IID_PPV_ARGS(&sourceTexture));
if (FAILED(hr)) {
@@ -181,10 +321,8 @@ bool QWinRTCameraVideoRendererControl::render(ID3D11Texture2D *target)
return false;
}
- ComPtr<ID3D11Device> device;
- sourceTexture->GetDevice(&device);
- if (!d->blitter || d->blitter->device() != device.Get() || d->blitter->target() != target)
- d->blitter.reset(new D3DVideoBlitter(device.Get(), target));
+ if (!d->blitter || d->blitter->target() != target)
+ d->blitter.reset(new D3DVideoBlitter(target));
d->blitter->blit(sourceTexture.Get());
@@ -192,15 +330,68 @@ bool QWinRTCameraVideoRendererControl::render(ID3D11Texture2D *target)
return true;
}
+bool QWinRTCameraVideoRendererControl::dequeueFrame(QVideoFrame *frame)
+{
+ Q_ASSERT(frame);
+ Q_D(QWinRTCameraVideoRendererControl);
+
+ ComPtr<IMF2DBuffer> buffer = d->dequeueBuffer();
+ if (!buffer || d->cameraSampleformat == QVideoFrame::Format_Invalid) {
+ emit bufferRequested();
+ return false;
+ }
+
+ QWinRTCameraVideoBuffer *videoBuffer = new QWinRTCameraVideoBuffer(buffer.Get(),
+ d->cameraSampleSize,
+ static_cast<QWinRTCameraControl *>(parent()));
+ *frame = QVideoFrame(videoBuffer, size(), d->cameraSampleformat);
+
+ emit bufferRequested();
+ return true;
+}
+
void QWinRTCameraVideoRendererControl::queueBuffer(IMF2DBuffer *buffer)
{
Q_D(QWinRTCameraVideoRendererControl);
Q_ASSERT(buffer);
+
+ if (d->cameraSampleformat == QVideoFrame::Format_User) {
+ BlitMode mode = blitMode();
+ d->getCameraSampleInfo(buffer, &mode);
+ setBlitMode(mode);
+ }
+
+ if (d->videoProbesCounter > 0 && d->cameraSampleformat != QVideoFrame::Format_Invalid) {
+ QWinRTCameraVideoBuffer *videoBuffer = new QWinRTCameraVideoBuffer(buffer,
+ d->cameraSampleSize,
+ static_cast<QWinRTCameraControl *>(parent()));
+ QVideoFrame frame(videoBuffer, size(), d->cameraSampleformat);
+ emit videoFrameProbed(frame);
+ }
+
const quint16 writeIndex = (d->writeIndex + 1) % CAMERA_SAMPLE_QUEUE_SIZE;
if (d->readIndex == writeIndex) // Drop new sample if queue is full
return;
d->buffers[d->writeIndex] = buffer;
d->writeIndex = writeIndex;
+
+ if (!surface()) {
+ d->dequeueBuffer();
+ emit bufferRequested();
+ }
+}
+
+ComPtr<IMF2DBuffer> QWinRTCameraVideoRendererControlPrivate::dequeueBuffer()
+{
+ const quint16 currentReadIndex = readIndex;
+ if (currentReadIndex == writeIndex)
+ return nullptr;
+
+ ComPtr<IMF2DBuffer> buffer = buffers[currentReadIndex];
+ Q_ASSERT(buffer);
+ buffers[currentReadIndex].Reset();
+ readIndex = (currentReadIndex + 1) % CAMERA_SAMPLE_QUEUE_SIZE;
+ return buffer;
}
void QWinRTCameraVideoRendererControl::discardBuffers()
@@ -211,4 +402,17 @@ void QWinRTCameraVideoRendererControl::discardBuffers()
buffer.Reset();
}
+void QWinRTCameraVideoRendererControl::incrementProbe()
+{
+ Q_D(QWinRTCameraVideoRendererControl);
+ ++d->videoProbesCounter;
+}
+
+void QWinRTCameraVideoRendererControl::decrementProbe()
+{
+ Q_D(QWinRTCameraVideoRendererControl);
+ Q_ASSERT(d->videoProbesCounter > 0);
+ --d->videoProbesCounter;
+}
+
QT_END_NAMESPACE