Windows: Improve EVR presenter.

Removed hard dependency to Angle; it's now required only when rendering
to a texture. If Angle is not used, the buffers are now mappable to
system memory.

This enables HW-accelerated video decoding in QML for non-ANGLE
builds. Note that the frame data have to do a round-trip through system
memory, which is not ideal but still faster than SW decoding.

Task-number: QTBUG-45593
Change-Id: Icc3503142df4e8dbb53b4e11f409b161fd2f9bde
Reviewed-by: Christian Stromme <christian.stromme@theqtcompany.com>
This commit is contained in:
Yoann Lopes
2015-11-27 11:56:50 +01:00
parent 36549dbe14
commit c7397523e7
11 changed files with 761 additions and 708 deletions

View File

@@ -1,28 +1,20 @@
INCLUDEPATH += $$PWD/evr
qtHaveModule(widgets): QT += widgets
QT += gui-private
LIBS += -lmf -lmfplat -lmfuuid -ld3d9 -ldxva2 -lwinmm -levr
HEADERS += \
$$PWD/evr/evrvideowindowcontrol.h \
$$PWD/evr/evrcustompresenter.h \
$$PWD/evr/evrd3dpresentengine.h \
$$PWD/evr/evrhelpers.h \
$$PWD/evr/evrdefs.h
SOURCES += \
$$PWD/evr/evrvideowindowcontrol.cpp \
$$PWD/evr/evrcustompresenter.cpp \
$$PWD/evr/evrd3dpresentengine.cpp \
$$PWD/evr/evrhelpers.cpp \
$$PWD/evr/evrdefs.cpp
contains(QT_CONFIG, angle)|contains(QT_CONFIG, dynamicgl) {
LIBS += -lmf -lmfplat -lmfuuid -ld3d9 -ldxva2 -lwinmm -levr
QT += gui-private
DEFINES += CUSTOM_EVR_PRESENTER
HEADERS += \
$$PWD/evr/evrcustompresenter.h \
$$PWD/evr/evrd3dpresentengine.h \
$$PWD/evr/evrhelpers.h
SOURCES += \
$$PWD/evr/evrcustompresenter.cpp \
$$PWD/evr/evrd3dpresentengine.cpp \
$$PWD/evr/evrhelpers.cpp
}

View File

@@ -56,7 +56,6 @@ static const LONG ONE_MSEC = 1000;
static HRESULT setDesiredSampleTime(IMFSample *sample, const LONGLONG& hnsSampleTime, const LONGLONG& hnsDuration);
static HRESULT clearDesiredSampleTime(IMFSample *sample);
static HRESULT setMixerSourceRect(IMFTransform *mixer, const MFVideoNormalizedRect& nrcSource);
static DWORD getFourCCFromPixelFormat(QVideoFrame::PixelFormat pixelFormat);
static QVideoFrame::PixelFormat pixelFormatFromMediaType(IMFMediaType *type);
static inline LONG MFTimeToMsec(const LONGLONG& time)
@@ -80,9 +79,32 @@ bool qt_evr_setCustomPresenter(IUnknown *evr, EVRCustomPresenter *presenter)
return result == S_OK;
}
Scheduler::Scheduler()
: m_clock(NULL)
, m_CB(NULL)
class PresentSampleEvent : public QEvent
{
public:
PresentSampleEvent(IMFSample *sample)
: QEvent(QEvent::Type(EVRCustomPresenter::PresentSample))
, m_sample(sample)
{
if (m_sample)
m_sample->AddRef();
}
~PresentSampleEvent()
{
if (m_sample)
m_sample->Release();
}
IMFSample *sample() const { return m_sample; }
private:
IMFSample *m_sample;
};
Scheduler::Scheduler(EVRCustomPresenter *presenter)
: m_presenter(presenter)
, m_clock(NULL)
, m_threadID(0)
, m_schedulerThread(0)
, m_threadReadyEvent(0)
@@ -228,9 +250,6 @@ HRESULT Scheduler::flush()
HRESULT Scheduler::scheduleSample(IMFSample *sample, bool presentNow)
{
if (!m_CB)
return MF_E_NOT_INITIALIZED;
if (!m_schedulerThread)
return MF_E_NOT_INITIALIZED;
@@ -242,13 +261,7 @@ HRESULT Scheduler::scheduleSample(IMFSample *sample, bool presentNow)
return E_FAIL;
if (presentNow || !m_clock) {
// Present the sample immediately.
sample->AddRef();
QMetaObject::invokeMethod(m_CB,
"presentSample",
Qt::QueuedConnection,
Q_ARG(void*, sample),
Q_ARG(qint64, 0));
m_presenter->presentSample(sample);
} else {
// Queue the sample and ask the scheduler thread to wake up.
m_mutex.lock();
@@ -344,12 +357,7 @@ HRESULT Scheduler::processSample(IMFSample *sample, LONG *pNextSleep)
}
if (presentNow) {
sample->AddRef();
QMetaObject::invokeMethod(m_CB,
"presentSample",
Qt::QueuedConnection,
Q_ARG(void*, sample),
Q_ARG(qint64, hnsPresentationTime));
m_presenter->presentSample(sample);
} else {
// The sample is not ready yet. Return it to the queue.
m_mutex.lock();
@@ -538,24 +546,27 @@ HRESULT SamplePool::clear()
}
EVRCustomPresenter::EVRCustomPresenter()
EVRCustomPresenter::EVRCustomPresenter(QAbstractVideoSurface *surface)
: QObject()
, m_sampleFreeCB(this, &EVRCustomPresenter::onSampleFree)
, m_refCount(1)
, m_renderState(RenderShutdown)
, m_mutex(QMutex::Recursive)
, m_scheduler(this)
, m_tokenCounter(0)
, m_sampleNotify(false)
, m_repaint(false)
, m_prerolled(false)
, m_endStreaming(false)
, m_playbackRate(1.0f)
, m_D3DPresentEngine(0)
, m_presentEngine(new D3DPresentEngine)
, m_clock(0)
, m_mixer(0)
, m_mediaEventSink(0)
, m_mediaType(0)
, m_surface(0)
, m_canRenderToSurface(false)
, m_sampleToPresent(0)
{
// Initial source rectangle = (0,0,1,1)
m_sourceRect.top = 0;
@@ -563,18 +574,21 @@ EVRCustomPresenter::EVRCustomPresenter()
m_sourceRect.bottom = 1;
m_sourceRect.right = 1;
m_D3DPresentEngine = new D3DPresentEngine;
m_scheduler.setCallback(m_D3DPresentEngine);
setSurface(surface);
}
EVRCustomPresenter::~EVRCustomPresenter()
{
m_scheduler.flush();
m_scheduler.stopScheduler();
m_samplePool.clear();
qt_evr_safe_release(&m_clock);
qt_evr_safe_release(&m_mixer);
qt_evr_safe_release(&m_mediaEventSink);
qt_evr_safe_release(&m_mediaType);
m_D3DPresentEngine->deleteLater();
delete m_presentEngine;
}
HRESULT EVRCustomPresenter::QueryInterface(REFIID riid, void ** ppvObject)
@@ -628,7 +642,7 @@ HRESULT EVRCustomPresenter::GetService(REFGUID guidService, REFIID riid, LPVOID
return MF_E_UNSUPPORTED_SERVICE;
// First try to get the service interface from the D3DPresentEngine object.
hr = m_D3DPresentEngine->getService(guidService, riid, ppvObject);
hr = m_presentEngine->getService(guidService, riid, ppvObject);
if (FAILED(hr))
// Next, check if this object supports the interface.
hr = QueryInterface(riid, ppvObject);
@@ -725,6 +739,11 @@ HRESULT EVRCustomPresenter::ReleaseServicePointers()
return S_OK;
}
bool EVRCustomPresenter::isValid() const
{
return m_presentEngine->isValid() && m_canRenderToSurface;
}
HRESULT EVRCustomPresenter::ProcessMessage(MFVP_MESSAGE_TYPE message, ULONG_PTR param)
{
HRESULT hr = S_OK;
@@ -837,11 +856,7 @@ HRESULT EVRCustomPresenter::OnClockStart(MFTIME, LONGLONG clockStartOffset)
return hr;
}
// Start the video surface in the main thread
if (thread() == QThread::currentThread())
startSurface();
else
QMetaObject::invokeMethod(this, "startSurface", Qt::QueuedConnection);
startSurface();
// Now try to get new output samples from the mixer.
processOutputLoop();
@@ -890,11 +905,7 @@ HRESULT EVRCustomPresenter::OnClockStop(MFTIME)
cancelFrameStep();
}
// Stop the video surface in the main thread
if (thread() == QThread::currentThread())
stopSurface();
else
QMetaObject::invokeMethod(this, "stopSurface", Qt::QueuedConnection);
stopSurface();
return S_OK;
}
@@ -1021,16 +1032,27 @@ void EVRCustomPresenter::supportedFormatsChanged()
{
QMutexLocker locker(&m_mutex);
m_supportedGLFormats.clear();
if (!m_surface)
return;
m_canRenderToSurface = false;
m_presentEngine->setHint(D3DPresentEngine::RenderToTexture, false);
QList<QVideoFrame::PixelFormat> formats = m_surface->supportedPixelFormats(QAbstractVideoBuffer::GLTextureHandle);
for (int i = 0; i < formats.size(); ++i) {
DWORD fourCC = getFourCCFromPixelFormat(formats.at(i));
if (fourCC)
m_supportedGLFormats.append(fourCC);
// check if we can render to the surface (compatible formats)
if (m_surface) {
QList<QVideoFrame::PixelFormat> formats = m_surface->supportedPixelFormats(QAbstractVideoBuffer::GLTextureHandle);
if (m_presentEngine->supportsTextureRendering() && formats.contains(QVideoFrame::Format_RGB32)) {
m_presentEngine->setHint(D3DPresentEngine::RenderToTexture, true);
m_canRenderToSurface = true;
} else {
formats = m_surface->supportedPixelFormats(QAbstractVideoBuffer::NoHandle);
Q_FOREACH (QVideoFrame::PixelFormat format, formats) {
if (SUCCEEDED(m_presentEngine->checkFormat(qt_evr_D3DFormatFromPixelFormat(format)))) {
m_canRenderToSurface = true;
break;
}
}
}
}
// TODO: if media type already set, renegotiate?
}
void EVRCustomPresenter::setSurface(QAbstractVideoSurface *surface)
@@ -1044,9 +1066,6 @@ void EVRCustomPresenter::setSurface(QAbstractVideoSurface *surface)
m_surface = surface;
if (m_D3DPresentEngine)
m_D3DPresentEngine->setSurface(surface);
if (m_surface) {
connect(m_surface, &QAbstractVideoSurface::supportedFormatsChanged,
this, &EVRCustomPresenter::supportedFormatsChanged);
@@ -1140,11 +1159,7 @@ HRESULT EVRCustomPresenter::flush()
if (m_renderState == RenderStopped) {
// Repaint with black.
QMetaObject::invokeMethod(m_D3DPresentEngine,
"presentSample",
Qt::QueuedConnection,
Q_ARG(void*, 0),
Q_ARG(qint64, 0));
presentSample(NULL);
}
return S_OK;
@@ -1401,7 +1416,6 @@ HRESULT EVRCustomPresenter::setMediaType(IMFMediaType *mediaType)
if (!mediaType) {
qt_evr_safe_release(&m_mediaType);
releaseResources();
m_D3DPresentEngine->setSurfaceFormat(QVideoSurfaceFormat());
return S_OK;
}
@@ -1410,11 +1424,6 @@ HRESULT EVRCustomPresenter::setMediaType(IMFMediaType *mediaType)
IMFSample *sample = NULL;
QVideoSurfaceFormat surfaceFormat;
UINT64 size;
int width;
int height;
// Cannot set the media type after shutdown.
HRESULT hr = checkShutdown();
if (FAILED(hr))
@@ -1432,7 +1441,7 @@ HRESULT EVRCustomPresenter::setMediaType(IMFMediaType *mediaType)
// Initialize the presenter engine with the new media type.
// The presenter engine allocates the samples.
hr = m_D3DPresentEngine->createVideoSamples(mediaType, sampleQueue);
hr = m_presentEngine->createVideoSamples(mediaType, sampleQueue);
if (FAILED(hr))
goto done;
@@ -1465,15 +1474,6 @@ HRESULT EVRCustomPresenter::setMediaType(IMFMediaType *mediaType)
m_mediaType = mediaType;
m_mediaType->AddRef();
// Create the surface format
hr = m_mediaType->GetUINT64(MF_MT_FRAME_SIZE, &size);
width = int(HI32(size));
height = int(LO32(size));
surfaceFormat = QVideoSurfaceFormat(QSize(width, height),
pixelFormatFromMediaType(m_mediaType),
QAbstractVideoBuffer::GLTextureHandle);
m_D3DPresentEngine->setSurfaceFormat(surfaceFormat);
done:
if (FAILED(hr))
releaseResources();
@@ -1493,10 +1493,17 @@ HRESULT EVRCustomPresenter::isMediaTypeSupported(IMFMediaType *proposed)
if (FAILED(hr))
return hr;
// Only accept pixel formats supported by the video surface
if (!m_supportedGLFormats.contains((DWORD)d3dFormat))
QVideoFrame::PixelFormat pixelFormat = pixelFormatFromMediaType(proposed);
if (pixelFormat == QVideoFrame::Format_Invalid)
return MF_E_INVALIDMEDIATYPE;
// When not rendering to texture, only accept pixel formats supported by the video surface
if (!m_presentEngine->isTextureRenderingEnabled()
&& m_surface
&& !m_surface->supportedPixelFormats().contains(pixelFormat)) {
return MF_E_INVALIDMEDIATYPE;
}
// Reject compressed media types.
hr = proposed->IsCompressedFormat(&compressed);
if (FAILED(hr))
@@ -1505,9 +1512,8 @@ HRESULT EVRCustomPresenter::isMediaTypeSupported(IMFMediaType *proposed)
if (compressed)
return MF_E_INVALIDMEDIATYPE;
// The D3DPresentEngine checks whether the format can be used as
// the back-buffer format for the swap chains.
hr = m_D3DPresentEngine->checkFormat(d3dFormat);
// The D3DPresentEngine checks whether surfaces can be created using this format
hr = m_presentEngine->checkFormat(d3dFormat);
if (FAILED(hr))
return hr;
@@ -1788,7 +1794,7 @@ void EVRCustomPresenter::releaseResources()
m_samplePool.clear();
m_D3DPresentEngine->releaseResources();
m_presentEngine->releaseResources();
}
HRESULT EVRCustomPresenter::onSampleFree(IMFAsyncResult *result)
@@ -1853,18 +1859,6 @@ done:
return hr;
}
void EVRCustomPresenter::startSurface()
{
if (m_D3DPresentEngine)
m_D3DPresentEngine->start();
}
void EVRCustomPresenter::stopSurface()
{
if (m_D3DPresentEngine)
m_D3DPresentEngine->stop();
}
float EVRCustomPresenter::getMaxRate(bool thin)
{
// Non-thinned:
@@ -1880,7 +1874,7 @@ float EVRCustomPresenter::getMaxRate(bool thin)
if (!thin && m_mediaType) {
qt_evr_getFrameRate(m_mediaType, &fps);
monitorRateHz = m_D3DPresentEngine->refreshRate();
monitorRateHz = m_presentEngine->refreshRate();
if (fps.Denominator && fps.Numerator && monitorRateHz) {
// Max Rate = Refresh Rate / Frame Rate
@@ -1891,6 +1885,74 @@ float EVRCustomPresenter::getMaxRate(bool thin)
return maxRate;
}
bool EVRCustomPresenter::event(QEvent *e)
{
if (e->type() == StartSurface) {
startSurface();
return true;
} else if (e->type() == StopSurface) {
stopSurface();
return true;
} else if (e->type() == PresentSample) {
PresentSampleEvent *ev = static_cast<PresentSampleEvent *>(e);
presentSample(ev->sample());
return true;
}
return QObject::event(e);
}
void EVRCustomPresenter::startSurface()
{
if (thread() != QThread::currentThread()) {
QCoreApplication::postEvent(this, new QEvent(QEvent::Type(StartSurface)));
return;
}
if (!m_surface || m_surface->isActive())
return;
QVideoSurfaceFormat format = m_presentEngine->videoSurfaceFormat();
if (!format.isValid())
return;
m_surface->start(format);
}
void EVRCustomPresenter::stopSurface()
{
if (thread() != QThread::currentThread()) {
QCoreApplication::postEvent(this, new QEvent(QEvent::Type(StopSurface)));
return;
}
if (!m_surface || !m_surface->isActive())
return;
m_surface->stop();
}
void EVRCustomPresenter::presentSample(IMFSample *sample)
{
if (thread() != QThread::currentThread()) {
QCoreApplication::postEvent(this, new PresentSampleEvent(sample));
return;
}
if (!m_surface || !m_surface->isActive() || !m_presentEngine->videoSurfaceFormat().isValid())
return;
QVideoFrame frame = m_presentEngine->makeVideoFrame(sample);
if (m_surface->isActive() && m_surface->surfaceFormat() != m_presentEngine->videoSurfaceFormat()) {
m_surface->stop();
if (!m_surface->start(m_presentEngine->videoSurfaceFormat()))
return;
}
m_surface->present(frame);
}
HRESULT setDesiredSampleTime(IMFSample *sample, const LONGLONG &sampleTime, const LONGLONG &duration)
{
if (!sample)
@@ -1925,8 +1987,6 @@ HRESULT clearDesiredSampleTime(IMFSample *sample)
UINT32 counter = MFGetAttributeUINT32(sample, MFSamplePresenter_SampleCounter, (UINT32)-1);
sample->GetUnknown(MFSamplePresenter_SampleSwapChain, IID_IUnknown, (void**)&unkSwapChain);
hr = sample->QueryInterface(IID_PPV_ARGS(&desired));
if (SUCCEEDED(hr)) {
desired->Clear();
@@ -1934,12 +1994,6 @@ HRESULT clearDesiredSampleTime(IMFSample *sample)
hr = sample->SetUINT32(MFSamplePresenter_SampleCounter, counter);
if (FAILED(hr))
goto done;
if (unkSwapChain) {
hr = sample->SetUnknown(MFSamplePresenter_SampleSwapChain, unkSwapChain);
if (FAILED(hr))
goto done;
}
}
done:
@@ -1963,48 +2017,6 @@ HRESULT setMixerSourceRect(IMFTransform *mixer, const MFVideoNormalizedRect &sou
return hr;
}
DWORD getFourCCFromPixelFormat(QVideoFrame::PixelFormat pixelFormat)
{
DWORD fourCC = 0;
switch (pixelFormat) {
case QVideoFrame::Format_ARGB32:
case QVideoFrame::Format_ARGB32_Premultiplied:
fourCC = MFVideoFormat_ARGB32.Data1;
break;
case QVideoFrame::Format_RGB32:
fourCC = MFVideoFormat_RGB32.Data1;
break;
case QVideoFrame::Format_RGB24:
fourCC = MFVideoFormat_RGB24.Data1;
break;
case QVideoFrame::Format_RGB565:
fourCC = MFVideoFormat_RGB565.Data1;
break;
case QVideoFrame::Format_RGB555:
fourCC = MFVideoFormat_RGB555.Data1;
break;
case QVideoFrame::Format_AYUV444:
case QVideoFrame::Format_AYUV444_Premultiplied:
fourCC = MFVideoFormat_AYUV.Data1;
break;
case QVideoFrame::Format_YUV420P:
fourCC = MFVideoFormat_I420.Data1;
break;
case QVideoFrame::Format_UYVY:
fourCC = MFVideoFormat_UYVY.Data1;
break;
case QVideoFrame::Format_YV12:
fourCC = MFVideoFormat_YV12.Data1;
break;
case QVideoFrame::Format_NV12:
fourCC = MFVideoFormat_NV12.Data1;
break;
default:
break;
}
return fourCC;
}
static QVideoFrame::PixelFormat pixelFormatFromMediaType(IMFMediaType *type)
{
GUID majorType;
@@ -2013,12 +2025,30 @@ static QVideoFrame::PixelFormat pixelFormatFromMediaType(IMFMediaType *type)
if (majorType != MFMediaType_Video)
return QVideoFrame::Format_Invalid;
GUID subType;
if (FAILED(type->GetGUID(MF_MT_SUBTYPE, &subType)))
GUID subtype;
if (FAILED(type->GetGUID(MF_MT_SUBTYPE, &subtype)))
return QVideoFrame::Format_Invalid;
if (subType == MFVideoFormat_RGB32)
if (subtype == MFVideoFormat_RGB32)
return QVideoFrame::Format_RGB32;
else if (subtype == MFVideoFormat_ARGB32)
return QVideoFrame::Format_ARGB32;
else if (subtype == MFVideoFormat_RGB24)
return QVideoFrame::Format_RGB24;
else if (subtype == MFVideoFormat_RGB565)
return QVideoFrame::Format_RGB565;
else if (subtype == MFVideoFormat_RGB555)
return QVideoFrame::Format_RGB555;
else if (subtype == MFVideoFormat_AYUV)
return QVideoFrame::Format_AYUV444;
else if (subtype == MFVideoFormat_I420)
return QVideoFrame::Format_YUV420P;
else if (subtype == MFVideoFormat_UYVY)
return QVideoFrame::Format_UYVY;
else if (subtype == MFVideoFormat_YV12)
return QVideoFrame::Format_YV12;
else if (subtype == MFVideoFormat_NV12)
return QVideoFrame::Format_NV12;
return QVideoFrame::Format_Invalid;
}

View File

@@ -37,13 +37,19 @@
#include <QObject>
#include <qmutex.h>
#include <qqueue.h>
#include <qevent.h>
#include <qvideosurfaceformat.h>
#include "evrdefs.h"
QT_BEGIN_NAMESPACE
class QAbstractVideoSurface;
QT_END_NAMESPACE
QT_USE_NAMESPACE
class EVRCustomPresenter;
class D3DPresentEngine;
class QAbstractVideoSurface;
template<class T>
class AsyncCallback : public IMFAsyncCallback
@@ -108,13 +114,9 @@ public:
Flush = WM_USER + 2
};
Scheduler();
Scheduler(EVRCustomPresenter *presenter);
~Scheduler();
void setCallback(QObject *cb) {
m_CB = cb;
}
void setFrameRate(const MFRatio &fps);
void setClockRate(float rate) { m_playbackRate = rate; }
@@ -135,10 +137,11 @@ public:
private:
DWORD schedulerThreadProcPrivate();
EVRCustomPresenter *m_presenter;
QQueue<IMFSample*> m_scheduledSamples; // Samples waiting to be presented.
IMFClock *m_clock; // Presentation clock. Can be NULL.
QObject *m_CB; // Weak reference; do not delete.
DWORD m_threadID;
HANDLE m_schedulerThread;
@@ -181,8 +184,6 @@ class EVRCustomPresenter
, public IMFGetService
, public IMFTopologyServiceLookupClient
{
Q_OBJECT
public:
// Defines the state of the presenter.
enum RenderState
@@ -203,9 +204,18 @@ public:
FrameStepComplete // Sample was rendered.
};
EVRCustomPresenter();
enum PresenterEvents
{
StartSurface = QEvent::User,
StopSurface = QEvent::User + 1,
PresentSample = QEvent::User + 2
};
EVRCustomPresenter(QAbstractVideoSurface *surface = 0);
~EVRCustomPresenter();
bool isValid() const;
// IUnknown methods
STDMETHODIMP QueryInterface(REFIID riid, void ** ppv);
STDMETHODIMP_(ULONG) AddRef();
@@ -240,9 +250,11 @@ public:
void supportedFormatsChanged();
void setSurface(QAbstractVideoSurface *surface);
private Q_SLOTS:
void startSurface();
void stopSurface();
void presentSample(IMFSample *sample);
bool event(QEvent *);
private:
HRESULT checkShutdown() const
@@ -342,7 +354,7 @@ private:
MFVideoNormalizedRect m_sourceRect;
float m_playbackRate;
D3DPresentEngine *m_D3DPresentEngine; // Rendering engine. (Never null if the constructor succeeds.)
D3DPresentEngine *m_presentEngine; // Rendering engine. (Never null if the constructor succeeds.)
IMFClock *m_clock; // The EVR's clock.
IMFTransform *m_mixer; // The EVR's mixer.
@@ -350,7 +362,9 @@ private:
IMFMediaType *m_mediaType; // Output media type
QAbstractVideoSurface *m_surface;
QList<DWORD> m_supportedGLFormats;
bool m_canRenderToSurface;
IMFSample *m_sampleToPresent;
};
bool qt_evr_setCustomPresenter(IUnknown *evr, EVRCustomPresenter *presenter);

View File

@@ -35,48 +35,23 @@
#include "evrhelpers.h"
#include <qtgui/qguiapplication.h>
#include <qpa/qplatformnativeinterface.h>
#include <qtgui/qopenglcontext.h>
#include <qabstractvideobuffer.h>
#include <QAbstractVideoSurface>
#include <qvideoframe.h>
#include <QDebug>
#include <qopenglcontext.h>
#include <qopenglfunctions.h>
#include <qwindow.h>
#include <qthread.h>
#include <private/qmediaopenglhelper_p.h>
#include <EGL/egl.h>
#include <EGL/eglext.h>
#include <d3d9.h>
#include <dxva2api.h>
#include <WinUser.h>
#include <evr.h>
#ifdef MAYBE_ANGLE
# include <qtgui/qguiapplication.h>
# include <qpa/qplatformnativeinterface.h>
# include <qopenglfunctions.h>
# include <EGL/eglext.h>
#endif
static const int PRESENTER_BUFFER_COUNT = 3;
class TextureVideoBuffer : public QAbstractVideoBuffer
{
public:
TextureVideoBuffer(GLuint textureId)
: QAbstractVideoBuffer(GLTextureHandle)
, m_textureId(textureId)
{}
~TextureVideoBuffer() {}
MapMode mapMode() const { return NotMapped; }
uchar *map(MapMode, int*, int*) { return 0; }
void unmap() {}
QVariant handle() const
{
return QVariant::fromValue<unsigned int>(m_textureId);
}
private:
GLuint m_textureId;
};
#ifdef MAYBE_ANGLE
EGLWrapper::EGLWrapper()
{
@@ -140,22 +115,160 @@ EGLBoolean EGLWrapper::releaseTexImage(EGLDisplay dpy, EGLSurface surface, EGLin
return m_eglReleaseTexImage(dpy, surface, buffer);
}
class OpenGLResources : public QObject
{
public:
OpenGLResources()
: egl(new EGLWrapper)
, eglDisplay(0)
, eglSurface(0)
, glTexture(0)
{}
void release()
{
if (thread() == QThread::currentThread())
delete this;
else
deleteLater();
}
EGLWrapper *egl;
EGLDisplay *eglDisplay;
EGLSurface eglSurface;
unsigned int glTexture;
private:
~OpenGLResources()
{
Q_ASSERT(QOpenGLContext::currentContext() != NULL);
if (eglSurface && egl) {
egl->releaseTexImage(eglDisplay, eglSurface, EGL_BACK_BUFFER);
egl->destroySurface(eglDisplay, eglSurface);
}
if (glTexture)
QOpenGLContext::currentContext()->functions()->glDeleteTextures(1, &glTexture);
delete egl;
}
};
#endif // MAYBE_ANGLE
class IMFSampleVideoBuffer: public QAbstractVideoBuffer
{
public:
IMFSampleVideoBuffer(D3DPresentEngine *engine, IMFSample *sample, QAbstractVideoBuffer::HandleType handleType)
: QAbstractVideoBuffer(handleType)
, m_engine(engine)
, m_sample(sample)
, m_surface(0)
, m_mapMode(NotMapped)
, m_textureUpdated(false)
{
if (m_sample) {
m_sample->AddRef();
IMFMediaBuffer *buffer;
if (SUCCEEDED(m_sample->GetBufferByIndex(0, &buffer))) {
MFGetService(buffer,
mr_BUFFER_SERVICE,
iid_IDirect3DSurface9,
reinterpret_cast<void **>(&m_surface));
buffer->Release();
}
}
}
~IMFSampleVideoBuffer()
{
if (m_surface) {
if (m_mapMode != NotMapped)
m_surface->UnlockRect();
m_surface->Release();
}
if (m_sample)
m_sample->Release();
}
QVariant handle() const;
MapMode mapMode() const { return m_mapMode; }
uchar *map(MapMode, int*, int*);
void unmap();
private:
mutable D3DPresentEngine *m_engine;
IMFSample *m_sample;
IDirect3DSurface9 *m_surface;
MapMode m_mapMode;
mutable bool m_textureUpdated;
};
uchar *IMFSampleVideoBuffer::map(MapMode mode, int *numBytes, int *bytesPerLine)
{
if (!m_surface || m_mapMode != NotMapped)
return 0;
D3DSURFACE_DESC desc;
if (FAILED(m_surface->GetDesc(&desc)))
return 0;
D3DLOCKED_RECT rect;
if (FAILED(m_surface->LockRect(&rect, NULL, mode == ReadOnly ? D3DLOCK_READONLY : 0)))
return 0;
m_mapMode = mode;
if (numBytes)
*numBytes = (int)(rect.Pitch * desc.Height);
if (bytesPerLine)
*bytesPerLine = (int)rect.Pitch;
return reinterpret_cast<uchar *>(rect.pBits);
}
void IMFSampleVideoBuffer::unmap()
{
if (m_mapMode == NotMapped)
return;
m_mapMode = NotMapped;
m_surface->UnlockRect();
}
QVariant IMFSampleVideoBuffer::handle() const
{
QVariant handle;
#ifdef MAYBE_ANGLE
if (handleType() != GLTextureHandle)
return handle;
if (m_textureUpdated || m_engine->updateTexture(m_surface)) {
m_textureUpdated = true;
handle = QVariant::fromValue<unsigned int>(m_engine->m_glResources->glTexture);
}
#endif
return handle;
}
D3DPresentEngine::D3DPresentEngine()
: QObject()
, m_mutex(QMutex::Recursive)
, m_deviceResetToken(0)
: m_deviceResetToken(0)
, m_D3D9(0)
, m_device(0)
, m_deviceManager(0)
, m_surface(0)
, m_glContext(0)
, m_offscreenSurface(0)
, m_eglDisplay(0)
, m_eglConfig(0)
, m_eglSurface(0)
, m_glTexture(0)
, m_useTextureRendering(false)
#ifdef MAYBE_ANGLE
, m_glResources(0)
, m_texture(0)
, m_egl(0)
#endif
{
ZeroMemory(&m_displayMode, sizeof(m_displayMode));
@@ -172,330 +285,11 @@ D3DPresentEngine::D3DPresentEngine()
D3DPresentEngine::~D3DPresentEngine()
{
qt_evr_safe_release(&m_texture);
releaseResources();
qt_evr_safe_release(&m_device);
qt_evr_safe_release(&m_deviceManager);
qt_evr_safe_release(&m_D3D9);
if (m_eglSurface) {
m_egl->releaseTexImage(m_eglDisplay, m_eglSurface, EGL_BACK_BUFFER);
m_egl->destroySurface(m_eglDisplay, m_eglSurface);
m_eglSurface = NULL;
}
if (m_glTexture) {
if (QOpenGLContext *current = QOpenGLContext::currentContext())
current->functions()->glDeleteTextures(1, &m_glTexture);
else
qWarning() << "D3DPresentEngine: Cannot obtain GL context, unable to delete textures";
}
delete m_glContext;
delete m_offscreenSurface;
delete m_egl;
}
void D3DPresentEngine::start()
{
QMutexLocker locker(&m_mutex);
if (!m_surfaceFormat.isValid())
return;
if (!m_texture)
createOffscreenTexture();
if (m_surface && !m_surface->isActive())
m_surface->start(m_surfaceFormat);
}
void D3DPresentEngine::stop()
{
QMutexLocker locker(&m_mutex);
if (m_surface && m_surface->isActive())
m_surface->stop();
}
HRESULT D3DPresentEngine::getService(REFGUID, REFIID riid, void** ppv)
{
HRESULT hr = S_OK;
if (riid == __uuidof(IDirect3DDeviceManager9)) {
if (m_deviceManager == NULL) {
hr = MF_E_UNSUPPORTED_SERVICE;
} else {
*ppv = m_deviceManager;
m_deviceManager->AddRef();
}
} else {
hr = MF_E_UNSUPPORTED_SERVICE;
}
return hr;
}
HRESULT D3DPresentEngine::checkFormat(D3DFORMAT format)
{
HRESULT hr = S_OK;
UINT uAdapter = D3DADAPTER_DEFAULT;
D3DDEVTYPE type = D3DDEVTYPE_HAL;
D3DDISPLAYMODE mode;
D3DDEVICE_CREATION_PARAMETERS params;
// Our shared D3D/EGL surface only supports RGB32,
// reject all other formats
if (format != D3DFMT_X8R8G8B8)
return MF_E_INVALIDMEDIATYPE;
if (m_device) {
hr = m_device->GetCreationParameters(&params);
if (FAILED(hr))
return hr;
uAdapter = params.AdapterOrdinal;
type = params.DeviceType;
}
hr = m_D3D9->GetAdapterDisplayMode(uAdapter, &mode);
if (FAILED(hr))
return hr;
return m_D3D9->CheckDeviceType(uAdapter, type, mode.Format, format, TRUE);
}
HRESULT D3DPresentEngine::createVideoSamples(IMFMediaType *format, QList<IMFSample*> &videoSampleQueue)
{
if (!format)
return MF_E_UNEXPECTED;
HRESULT hr = S_OK;
D3DPRESENT_PARAMETERS pp;
IDirect3DSwapChain9 *swapChain = NULL;
IMFSample *videoSample = NULL;
QMutexLocker locker(&m_mutex);
releaseResources();
// Get the swap chain parameters from the media type.
hr = getSwapChainPresentParameters(format, &pp);
if (FAILED(hr))
goto done;
// Create the video samples.
for (int i = 0; i < PRESENTER_BUFFER_COUNT; i++) {
// Create a new swap chain.
hr = m_device->CreateAdditionalSwapChain(&pp, &swapChain);
if (FAILED(hr))
goto done;
// Create the video sample from the swap chain.
hr = createD3DSample(swapChain, &videoSample);
if (FAILED(hr))
goto done;
// Add it to the list.
videoSample->AddRef();
videoSampleQueue.append(videoSample);
// Set the swap chain pointer as a custom attribute on the sample. This keeps
// a reference count on the swap chain, so that the swap chain is kept alive
// for the duration of the sample's lifetime.
hr = videoSample->SetUnknown(MFSamplePresenter_SampleSwapChain, swapChain);
if (FAILED(hr))
goto done;
qt_evr_safe_release(&videoSample);
qt_evr_safe_release(&swapChain);
}
done:
if (FAILED(hr))
releaseResources();
qt_evr_safe_release(&swapChain);
qt_evr_safe_release(&videoSample);
return hr;
}
void D3DPresentEngine::releaseResources()
{
}
void D3DPresentEngine::presentSample(void *opaque, qint64)
{
HRESULT hr = S_OK;
IMFSample *sample = reinterpret_cast<IMFSample*>(opaque);
IMFMediaBuffer* buffer = NULL;
IDirect3DSurface9* surface = NULL;
if (m_surface && m_surface->isActive()) {
if (sample) {
// Get the buffer from the sample.
hr = sample->GetBufferByIndex(0, &buffer);
if (FAILED(hr))
goto done;
// Get the surface from the buffer.
hr = MFGetService(buffer, mr_BUFFER_SERVICE, IID_PPV_ARGS(&surface));
if (FAILED(hr))
goto done;
}
if (surface && updateTexture(surface)) {
QVideoFrame frame = QVideoFrame(new TextureVideoBuffer(m_glTexture),
m_surfaceFormat.frameSize(),
m_surfaceFormat.pixelFormat());
// WMF uses 100-nanosecond units, Qt uses microseconds
LONGLONG startTime = -1;
if (SUCCEEDED(sample->GetSampleTime(&startTime))) {
frame.setStartTime(startTime * 0.1);
LONGLONG duration = -1;
if (SUCCEEDED(sample->GetSampleDuration(&duration)))
frame.setEndTime((startTime + duration) * 0.1);
}
m_surface->present(frame);
}
}
done:
qt_evr_safe_release(&surface);
qt_evr_safe_release(&buffer);
qt_evr_safe_release(&sample);
}
void D3DPresentEngine::setSurface(QAbstractVideoSurface *surface)
{
QMutexLocker locker(&m_mutex);
m_surface = surface;
}
void D3DPresentEngine::setSurfaceFormat(const QVideoSurfaceFormat &format)
{
QMutexLocker locker(&m_mutex);
m_surfaceFormat = format;
}
void D3DPresentEngine::createOffscreenTexture()
{
// First, check if we have a context on this thread
QOpenGLContext *currentContext = QOpenGLContext::currentContext();
if (!currentContext) {
//Create OpenGL context and set share context from surface
QOpenGLContext *shareContext = qobject_cast<QOpenGLContext*>(m_surface->property("GLContext").value<QObject*>());
if (!shareContext)
return;
m_offscreenSurface = new QWindow;
m_offscreenSurface->setSurfaceType(QWindow::OpenGLSurface);
//Needs geometry to be a valid surface, but size is not important
m_offscreenSurface->setGeometry(-1, -1, 1, 1);
m_offscreenSurface->create();
m_glContext = new QOpenGLContext;
m_glContext->setFormat(m_offscreenSurface->requestedFormat());
m_glContext->setShareContext(shareContext);
if (!m_glContext->create()) {
delete m_glContext;
delete m_offscreenSurface;
m_glContext = 0;
m_offscreenSurface = 0;
return;
}
currentContext = m_glContext;
}
if (m_glContext)
m_glContext->makeCurrent(m_offscreenSurface);
if (!m_egl)
m_egl = new EGLWrapper;
QPlatformNativeInterface *nativeInterface = QGuiApplication::platformNativeInterface();
m_eglDisplay = static_cast<EGLDisplay*>(
nativeInterface->nativeResourceForContext("eglDisplay", currentContext));
m_eglConfig = static_cast<EGLConfig*>(
nativeInterface->nativeResourceForContext("eglConfig", currentContext));
currentContext->functions()->glGenTextures(1, &m_glTexture);
int w = m_surfaceFormat.frameWidth();
int h = m_surfaceFormat.frameHeight();
bool hasAlpha = currentContext->format().hasAlpha();
EGLint attribs[] = {
EGL_WIDTH, w,
EGL_HEIGHT, h,
EGL_TEXTURE_FORMAT, hasAlpha ? EGL_TEXTURE_RGBA : EGL_TEXTURE_RGB,
EGL_TEXTURE_TARGET, EGL_TEXTURE_2D,
EGL_NONE
};
EGLSurface pbuffer = m_egl->createPbufferSurface(m_eglDisplay, m_eglConfig, attribs);
HANDLE share_handle = 0;
PFNEGLQUERYSURFACEPOINTERANGLEPROC eglQuerySurfacePointerANGLE =
reinterpret_cast<PFNEGLQUERYSURFACEPOINTERANGLEPROC>(m_egl->getProcAddress("eglQuerySurfacePointerANGLE"));
Q_ASSERT(eglQuerySurfacePointerANGLE);
eglQuerySurfacePointerANGLE(
m_eglDisplay,
pbuffer,
EGL_D3D_TEXTURE_2D_SHARE_HANDLE_ANGLE, &share_handle);
m_device->CreateTexture(w, h, 1,
D3DUSAGE_RENDERTARGET,
hasAlpha ? D3DFMT_A8R8G8B8 : D3DFMT_X8R8G8B8,
D3DPOOL_DEFAULT,
&m_texture,
&share_handle);
m_eglSurface = pbuffer;
if (m_glContext)
m_glContext->doneCurrent();
}
bool D3DPresentEngine::updateTexture(IDirect3DSurface9 *src)
{
if (!m_texture)
return false;
if (m_glContext)
m_glContext->makeCurrent(m_offscreenSurface);
QOpenGLContext::currentContext()->functions()->glBindTexture(GL_TEXTURE_2D, m_glTexture);
IDirect3DSurface9 *dest = NULL;
// Copy the sample surface to the shared D3D/EGL surface
HRESULT hr = m_texture->GetSurfaceLevel(0, &dest);
if (FAILED(hr))
goto done;
hr = m_device->StretchRect(src, NULL, dest, NULL, D3DTEXF_NONE);
if (FAILED(hr))
qWarning("Failed to copy D3D surface");
if (hr == S_OK)
m_egl->bindTexImage(m_eglDisplay, m_eglSurface, EGL_BACK_BUFFER);
done:
qt_evr_safe_release(&dest);
if (m_glContext)
m_glContext->doneCurrent();
return SUCCEEDED(hr);
}
HRESULT D3DPresentEngine::initializeD3D()
@@ -520,18 +314,11 @@ HRESULT D3DPresentEngine::createD3DDevice()
IDirect3DDevice9Ex* device = NULL;
// Hold the lock because we might be discarding an existing device.
QMutexLocker locker(&m_mutex);
if (!m_D3D9 || !m_deviceManager)
return MF_E_NOT_INITIALIZED;
hwnd = ::GetShellWindow();
// Note: The presenter creates additional swap chains to present the
// video frames. Therefore, it does not use the device's implicit
// swap chain, so the size of the back buffer here is 1 x 1.
D3DPRESENT_PARAMETERS pp;
ZeroMemory(&pp, sizeof(pp));
@@ -585,71 +372,267 @@ done:
return hr;
}
HRESULT D3DPresentEngine::createD3DSample(IDirect3DSwapChain9 *swapChain, IMFSample **videoSample)
bool D3DPresentEngine::isValid() const
{
D3DCOLOR clrBlack = D3DCOLOR_ARGB(0xFF, 0x00, 0x00, 0x00);
return m_device != NULL;
}
IDirect3DSurface9* surface = NULL;
IMFSample* sample = NULL;
void D3DPresentEngine::releaseResources()
{
m_surfaceFormat = QVideoSurfaceFormat();
// Get the back buffer surface.
HRESULT hr = swapChain->GetBackBuffer(0, D3DBACKBUFFER_TYPE_MONO, &surface);
if (FAILED(hr))
goto done;
#ifdef MAYBE_ANGLE
qt_evr_safe_release(&m_texture);
// Fill it with black.
hr = m_device->ColorFill(surface, NULL, clrBlack);
if (FAILED(hr))
goto done;
if (m_glResources) {
m_glResources->release(); // deleted in GL thread
m_glResources = NULL;
}
#endif
}
hr = MFCreateVideoSampleFromSurface(surface, &sample);
if (FAILED(hr))
goto done;
HRESULT D3DPresentEngine::getService(REFGUID, REFIID riid, void** ppv)
{
HRESULT hr = S_OK;
*videoSample = sample;
(*videoSample)->AddRef();
if (riid == __uuidof(IDirect3DDeviceManager9)) {
if (m_deviceManager == NULL) {
hr = MF_E_UNSUPPORTED_SERVICE;
} else {
*ppv = m_deviceManager;
m_deviceManager->AddRef();
}
} else {
hr = MF_E_UNSUPPORTED_SERVICE;
}
done:
qt_evr_safe_release(&surface);
qt_evr_safe_release(&sample);
return hr;
}
HRESULT D3DPresentEngine::getSwapChainPresentParameters(IMFMediaType *type, D3DPRESENT_PARAMETERS* pp)
HRESULT D3DPresentEngine::checkFormat(D3DFORMAT format)
{
ZeroMemory(pp, sizeof(D3DPRESENT_PARAMETERS));
if (!m_D3D9 || !m_device)
return E_FAIL;
// Get some information about the video format.
UINT32 width = 0, height = 0;
HRESULT hr = MFGetAttributeSize(type, MF_MT_FRAME_SIZE, &width, &height);
if (FAILED(hr))
return hr;
DWORD d3dFormat = 0;
hr = qt_evr_getFourCC(type, &d3dFormat);
if (FAILED(hr))
return hr;
ZeroMemory(pp, sizeof(D3DPRESENT_PARAMETERS));
pp->BackBufferWidth = width;
pp->BackBufferHeight = height;
pp->Windowed = TRUE;
pp->SwapEffect = D3DSWAPEFFECT_DISCARD;
pp->BackBufferFormat = (D3DFORMAT)d3dFormat;
pp->hDeviceWindow = ::GetShellWindow();
pp->Flags = D3DPRESENTFLAG_VIDEO;
pp->PresentationInterval = D3DPRESENT_INTERVAL_DEFAULT;
HRESULT hr = S_OK;
D3DDISPLAYMODE mode;
D3DDEVICE_CREATION_PARAMETERS params;
hr = m_device->GetCreationParameters(&params);
if (FAILED(hr))
return hr;
if (params.DeviceType != D3DDEVTYPE_HAL)
pp->Flags |= D3DPRESENTFLAG_LOCKABLE_BACKBUFFER;
UINT uAdapter = params.AdapterOrdinal;
D3DDEVTYPE type = params.DeviceType;
return S_OK;
hr = m_D3D9->GetAdapterDisplayMode(uAdapter, &mode);
if (FAILED(hr))
return hr;
hr = m_D3D9->CheckDeviceFormat(uAdapter, type, mode.Format,
D3DUSAGE_RENDERTARGET,
D3DRTYPE_SURFACE,
format);
if (m_useTextureRendering && format != D3DFMT_X8R8G8B8 && format != D3DFMT_A8R8G8B8) {
// The texture is always in RGB32 so the d3d driver must support conversion from the
// requested format to RGB32.
hr = m_D3D9->CheckDeviceFormatConversion(uAdapter, type, format, D3DFMT_X8R8G8B8);
}
return hr;
}
bool D3DPresentEngine::supportsTextureRendering() const
{
#ifdef MAYBE_ANGLE
return QMediaOpenGLHelper::isANGLE();
#else
return false;
#endif
}
void D3DPresentEngine::setHint(Hint hint, bool enable)
{
if (hint == RenderToTexture)
m_useTextureRendering = enable && supportsTextureRendering();
}
HRESULT D3DPresentEngine::createVideoSamples(IMFMediaType *format, QList<IMFSample*> &videoSampleQueue)
{
if (!format)
return MF_E_UNEXPECTED;
HRESULT hr = S_OK;
IDirect3DSurface9 *surface = NULL;
IMFSample *videoSample = NULL;
releaseResources();
UINT32 width = 0, height = 0;
hr = MFGetAttributeSize(format, MF_MT_FRAME_SIZE, &width, &height);
if (FAILED(hr))
return hr;
DWORD d3dFormat = 0;
hr = qt_evr_getFourCC(format, &d3dFormat);
if (FAILED(hr))
return hr;
// Create the video samples.
for (int i = 0; i < PRESENTER_BUFFER_COUNT; i++) {
hr = m_device->CreateRenderTarget(width, height,
(D3DFORMAT)d3dFormat,
D3DMULTISAMPLE_NONE,
0,
TRUE,
&surface, NULL);
if (FAILED(hr))
goto done;
hr = MFCreateVideoSampleFromSurface(surface, &videoSample);
if (FAILED(hr))
goto done;
videoSample->AddRef();
videoSampleQueue.append(videoSample);
qt_evr_safe_release(&videoSample);
qt_evr_safe_release(&surface);
}
done:
if (SUCCEEDED(hr)) {
m_surfaceFormat = QVideoSurfaceFormat(QSize(width, height),
m_useTextureRendering ? QVideoFrame::Format_RGB32
: qt_evr_pixelFormatFromD3DFormat((D3DFORMAT)d3dFormat),
m_useTextureRendering ? QAbstractVideoBuffer::GLTextureHandle
: QAbstractVideoBuffer::NoHandle);
} else {
releaseResources();
}
qt_evr_safe_release(&videoSample);
qt_evr_safe_release(&surface);
return hr;
}
QVideoFrame D3DPresentEngine::makeVideoFrame(IMFSample *sample)
{
if (!sample)
return QVideoFrame();
QVideoFrame frame(new IMFSampleVideoBuffer(this, sample, m_surfaceFormat.handleType()),
m_surfaceFormat.frameSize(),
m_surfaceFormat.pixelFormat());
// WMF uses 100-nanosecond units, Qt uses microseconds
LONGLONG startTime = -1;
if (SUCCEEDED(sample->GetSampleTime(&startTime))) {
frame.setStartTime(startTime * 0.1);
LONGLONG duration = -1;
if (SUCCEEDED(sample->GetSampleDuration(&duration)))
frame.setEndTime((startTime + duration) * 0.1);
}
return frame;
}
#ifdef MAYBE_ANGLE
bool D3DPresentEngine::createRenderTexture()
{
if (m_texture)
return true;
Q_ASSERT(QOpenGLContext::currentContext() != NULL);
if (!m_glResources)
m_glResources = new OpenGLResources;
QOpenGLContext *currentContext = QOpenGLContext::currentContext();
if (!currentContext)
return false;
QPlatformNativeInterface *nativeInterface = QGuiApplication::platformNativeInterface();
m_glResources->eglDisplay = static_cast<EGLDisplay*>(
nativeInterface->nativeResourceForContext("eglDisplay", currentContext));
EGLConfig *eglConfig = static_cast<EGLConfig*>(
nativeInterface->nativeResourceForContext("eglConfig", currentContext));
currentContext->functions()->glGenTextures(1, &m_glResources->glTexture);
bool hasAlpha = currentContext->format().hasAlpha();
EGLint attribs[] = {
EGL_WIDTH, m_surfaceFormat.frameWidth(),
EGL_HEIGHT, m_surfaceFormat.frameHeight(),
EGL_TEXTURE_FORMAT, hasAlpha ? EGL_TEXTURE_RGBA : EGL_TEXTURE_RGB,
EGL_TEXTURE_TARGET, EGL_TEXTURE_2D,
EGL_NONE
};
EGLSurface pbuffer = m_glResources->egl->createPbufferSurface(m_glResources->eglDisplay, eglConfig, attribs);
HANDLE share_handle = 0;
PFNEGLQUERYSURFACEPOINTERANGLEPROC eglQuerySurfacePointerANGLE =
reinterpret_cast<PFNEGLQUERYSURFACEPOINTERANGLEPROC>(m_glResources->egl->getProcAddress("eglQuerySurfacePointerANGLE"));
Q_ASSERT(eglQuerySurfacePointerANGLE);
eglQuerySurfacePointerANGLE(
m_glResources->eglDisplay,
pbuffer,
EGL_D3D_TEXTURE_2D_SHARE_HANDLE_ANGLE, &share_handle);
m_device->CreateTexture(m_surfaceFormat.frameWidth(), m_surfaceFormat.frameHeight(), 1,
D3DUSAGE_RENDERTARGET,
hasAlpha ? D3DFMT_A8R8G8B8 : D3DFMT_X8R8G8B8,
D3DPOOL_DEFAULT,
&m_texture,
&share_handle);
m_glResources->eglSurface = pbuffer;
QOpenGLContext::currentContext()->functions()->glBindTexture(GL_TEXTURE_2D, m_glResources->glTexture);
m_glResources->egl->bindTexImage(m_glResources->eglDisplay, m_glResources->eglSurface, EGL_BACK_BUFFER);
return m_texture != NULL;
}
bool D3DPresentEngine::updateTexture(IDirect3DSurface9 *src)
{
if (!m_texture && !createRenderTexture())
return false;
IDirect3DSurface9 *dest = NULL;
// Copy the sample surface to the shared D3D/EGL surface
HRESULT hr = m_texture->GetSurfaceLevel(0, &dest);
if (FAILED(hr))
goto done;
hr = m_device->StretchRect(src, NULL, dest, NULL, D3DTEXF_NONE);
if (FAILED(hr)) {
qWarning("Failed to copy D3D surface");
} else {
// Shared surfaces are not synchronized, there's no guarantee that
// StretchRect is complete when the texture is later rendered by Qt.
// To make sure the next rendered frame is up to date, flush the command pipeline
// using an event query.
IDirect3DQuery9 *eventQuery = NULL;
m_device->CreateQuery(D3DQUERYTYPE_EVENT, &eventQuery);
eventQuery->Issue(D3DISSUE_END);
while (eventQuery->GetData(NULL, 0, D3DGETDATA_FLUSH) == S_FALSE);
eventQuery->Release();
}
done:
qt_evr_safe_release(&dest);
return SUCCEEDED(hr);
}
#endif // MAYBE_ANGLE

View File

@@ -34,33 +34,36 @@
#ifndef EVRD3DPRESENTENGINE_H
#define EVRD3DPRESENTENGINE_H
#include <QObject>
#include <EGL/egl.h>
#include <QMutex>
#include <d3d9types.h>
#include <QVideoSurfaceFormat>
#if defined(QT_OPENGL_ES_2) || defined(QT_OPENGL_DYNAMIC)
#define MAYBE_ANGLE
#endif
QT_BEGIN_NAMESPACE
class QAbstractVideoSurface;
QT_END_NAMESPACE
struct IDirect3D9Ex;
struct IDirect3DDevice9;
struct IDirect3DDevice9Ex;
struct IDirect3DDeviceManager9;
struct IDirect3DSurface9;
struct IDirect3DTexture9;
struct IMFSample;
struct IMFMediaType;
struct IDirect3DSwapChain9;
// Randomly generated GUIDs
static const GUID MFSamplePresenter_SampleCounter =
{ 0xb0bb83cc, 0xf10f, 0x4e2e, { 0xaa, 0x2b, 0x29, 0xea, 0x5e, 0x92, 0xef, 0x85 } };
static const GUID MFSamplePresenter_SampleSwapChain =
{ 0xad885bd1, 0x7def, 0x414a, { 0xb5, 0xb0, 0xd3, 0xd2, 0x63, 0xd6, 0xe9, 0x6d } };
QT_USE_NAMESPACE
class QAbstractVideoSurface;
class QOpenGLContext;
#ifdef MAYBE_ANGLE
class OpenGLResources;
class EGLWrapper
{
@@ -87,40 +90,39 @@ private:
EglReleaseTexImage m_eglReleaseTexImage;
};
class D3DPresentEngine : public QObject
#endif // MAYBE_ANGLE
class D3DPresentEngine
{
Q_OBJECT
public:
enum Hint
{
RenderToTexture
};
D3DPresentEngine();
virtual ~D3DPresentEngine();
void start();
void stop();
bool isValid() const;
void setHint(Hint hint, bool enable = true);
HRESULT getService(REFGUID guidService, REFIID riid, void** ppv);
HRESULT checkFormat(D3DFORMAT format);
HRESULT createVideoSamples(IMFMediaType *format, QList<IMFSample*>& videoSampleQueue);
void releaseResources();
UINT refreshRate() const { return m_displayMode.RefreshRate; }
void setSurface(QAbstractVideoSurface *surface);
void setSurfaceFormat(const QVideoSurfaceFormat &format);
bool supportsTextureRendering() const;
bool isTextureRenderingEnabled() const { return m_useTextureRendering; }
void createOffscreenTexture();
bool updateTexture(IDirect3DSurface9 *src);
HRESULT createVideoSamples(IMFMediaType *format, QList<IMFSample*>& videoSampleQueue);
QVideoSurfaceFormat videoSurfaceFormat() const { return m_surfaceFormat; }
QVideoFrame makeVideoFrame(IMFSample* sample);
public Q_SLOTS:
void presentSample(void* sample, qint64 llTarget);
void releaseResources();
private:
HRESULT initializeD3D();
HRESULT getSwapChainPresentParameters(IMFMediaType *type, D3DPRESENT_PARAMETERS *pp);
HRESULT createD3DDevice();
HRESULT createD3DSample(IDirect3DSwapChain9 *swapChain, IMFSample **videoSample);
QMutex m_mutex;
UINT m_deviceResetToken;
D3DDISPLAYMODE m_displayMode;
@@ -130,17 +132,18 @@ private:
IDirect3DDeviceManager9 *m_deviceManager;
QVideoSurfaceFormat m_surfaceFormat;
QAbstractVideoSurface *m_surface;
QOpenGLContext *m_glContext;
QWindow *m_offscreenSurface;
bool m_useTextureRendering;
EGLDisplay *m_eglDisplay;
EGLConfig *m_eglConfig;
EGLSurface m_eglSurface;
unsigned int m_glTexture;
#ifdef MAYBE_ANGLE
bool createRenderTexture();
bool updateTexture(IDirect3DSurface9 *src);
OpenGLResources *m_glResources;
IDirect3DTexture9 *m_texture;
EGLWrapper *m_egl;
#endif
friend class IMFSampleVideoBuffer;
};
#endif // EVRD3DPRESENTENGINE_H

View File

@@ -33,6 +33,13 @@
#include "evrhelpers.h"
#ifndef D3DFMT_YV12
#define D3DFMT_YV12 (D3DFORMAT)MAKEFOURCC ('Y', 'V', '1', '2')
#endif
#ifndef D3DFMT_NV12
#define D3DFMT_NV12 (D3DFORMAT)MAKEFOURCC ('N', 'V', '1', '2')
#endif
HRESULT qt_evr_getFourCC(IMFMediaType *type, DWORD *fourCC)
{
if (!fourCC)
@@ -101,3 +108,69 @@ bool qt_evr_isSampleTimePassed(IMFClock *clock, IMFSample *sample)
return false;
}
QVideoFrame::PixelFormat qt_evr_pixelFormatFromD3DFormat(D3DFORMAT format)
{
switch (format) {
case D3DFMT_R8G8B8:
return QVideoFrame::Format_RGB24;
case D3DFMT_A8R8G8B8:
return QVideoFrame::Format_ARGB32;
case D3DFMT_X8R8G8B8:
return QVideoFrame::Format_RGB32;
case D3DFMT_R5G6B5:
return QVideoFrame::Format_RGB565;
case D3DFMT_X1R5G5B5:
return QVideoFrame::Format_RGB555;
case D3DFMT_A8:
return QVideoFrame::Format_Y8;
case D3DFMT_A8B8G8R8:
return QVideoFrame::Format_BGRA32;
case D3DFMT_X8B8G8R8:
return QVideoFrame::Format_BGR32;
case D3DFMT_UYVY:
return QVideoFrame::Format_UYVY;
case D3DFMT_YUY2:
return QVideoFrame::Format_YUYV;
case D3DFMT_NV12:
return QVideoFrame::Format_NV12;
case D3DFMT_YV12:
return QVideoFrame::Format_YV12;
case D3DFMT_UNKNOWN:
default:
return QVideoFrame::Format_Invalid;
}
}
D3DFORMAT qt_evr_D3DFormatFromPixelFormat(QVideoFrame::PixelFormat format)
{
switch (format) {
case QVideoFrame::Format_RGB24:
return D3DFMT_R8G8B8;
case QVideoFrame::Format_ARGB32:
return D3DFMT_A8R8G8B8;
case QVideoFrame::Format_RGB32:
return D3DFMT_X8R8G8B8;
case QVideoFrame::Format_RGB565:
return D3DFMT_R5G6B5;
case QVideoFrame::Format_RGB555:
return D3DFMT_X1R5G5B5;
case QVideoFrame::Format_Y8:
return D3DFMT_A8;
case QVideoFrame::Format_BGRA32:
return D3DFMT_A8B8G8R8;
case QVideoFrame::Format_BGR32:
return D3DFMT_X8B8G8R8;
case QVideoFrame::Format_UYVY:
return D3DFMT_UYVY;
case QVideoFrame::Format_YUYV:
return D3DFMT_YUY2;
case QVideoFrame::Format_NV12:
return D3DFMT_NV12;
case QVideoFrame::Format_YV12:
return D3DFMT_YV12;
case QVideoFrame::Format_Invalid:
default:
return D3DFMT_UNKNOWN;
}
}

View File

@@ -35,6 +35,9 @@
#define EVRHELPERS_H
#include "evrdefs.h"
#include <qvideoframe.h>
QT_USE_NAMESPACE
template<class T>
static inline void qt_evr_safe_release(T **unk)
@@ -81,5 +84,8 @@ inline HRESULT qt_evr_getFrameRate(IMFMediaType *pType, MFRatio *pRatio)
return MFGetAttributeRatio(pType, MF_MT_FRAME_RATE, (UINT32*)&pRatio->Numerator, (UINT32*)&pRatio->Denominator);
}
QVideoFrame::PixelFormat qt_evr_pixelFormatFromD3DFormat(D3DFORMAT format);
D3DFORMAT qt_evr_D3DFormatFromPixelFormat(QVideoFrame::PixelFormat format);
#endif // EVRHELPERS_H

View File

@@ -35,21 +35,17 @@
#include "videosurfacefilter.h"
#ifdef CUSTOM_EVR_PRESENTER
#ifdef HAVE_EVR
#include "evrcustompresenter.h"
#endif
#include <qabstractvideosurface.h>
#include <QtMultimedia/private/qmediaopenglhelper_p.h>
DirectShowVideoRendererControl::DirectShowVideoRendererControl(DirectShowEventLoop *loop, QObject *parent)
: QVideoRendererControl(parent)
, m_loop(loop)
, m_surface(0)
, m_filter(0)
#ifdef CUSTOM_EVR_PRESENTER
, m_evrPresenter(0)
#endif
{
}
@@ -57,10 +53,6 @@ DirectShowVideoRendererControl::~DirectShowVideoRendererControl()
{
if (m_filter)
m_filter->Release();
#ifdef CUSTOM_EVR_PRESENTER
if (m_evrPresenter)
m_evrPresenter->Release();
#endif
}
QAbstractVideoSurface *DirectShowVideoRendererControl::surface() const
@@ -70,38 +62,34 @@ QAbstractVideoSurface *DirectShowVideoRendererControl::surface() const
void DirectShowVideoRendererControl::setSurface(QAbstractVideoSurface *surface)
{
if (surface != m_surface) {
m_surface = surface;
if (m_surface == surface)
return;
IBaseFilter *currentFilter = m_filter;
if (m_filter) {
m_filter->Release();
m_filter = 0;
if (surface) {
#ifdef CUSTOM_EVR_PRESENTER
if (!surface->supportedPixelFormats(QAbstractVideoBuffer::GLTextureHandle).isEmpty()
&& QMediaOpenGLHelper::isANGLE()) {
m_evrPresenter = new EVRCustomPresenter;
m_evrPresenter->setSurface(surface);
m_filter = com_new<IBaseFilter>(clsid_EnhancedVideoRenderer);
if (!qt_evr_setCustomPresenter(m_filter, m_evrPresenter)) {
m_evrPresenter->Release();
m_evrPresenter = 0;
m_filter->Release();
m_filter = 0;
}
}
if (!m_filter)
#endif
{
m_filter = new VideoSurfaceFilter(surface, m_loop);
}
}
emit filterChanged();
if (currentFilter)
currentFilter->Release();
}
m_surface = surface;
if (m_surface) {
#ifdef HAVE_EVR
m_filter = com_new<IBaseFilter>(clsid_EnhancedVideoRenderer);
EVRCustomPresenter *evrPresenter = new EVRCustomPresenter(m_surface);
if (!evrPresenter->isValid() || !qt_evr_setCustomPresenter(m_filter, evrPresenter)) {
m_filter->Release();
m_filter = 0;
}
evrPresenter->Release();
if (!m_filter)
#endif
{
m_filter = new VideoSurfaceFilter(m_surface, m_loop);
}
}
emit filterChanged();
}
IBaseFilter *DirectShowVideoRendererControl::filter()

View File

@@ -39,11 +39,6 @@
#include <dshow.h>
class DirectShowEventLoop;
class VideoSurfaceFilter;
#ifdef CUSTOM_EVR_PRESENTER
class EVRCustomPresenter;
#endif
QT_USE_NAMESPACE
@@ -66,10 +61,6 @@ private:
DirectShowEventLoop *m_loop;
QAbstractVideoSurface *m_surface;
IBaseFilter *m_filter;
#ifdef CUSTOM_EVR_PRESENTER
EVRCustomPresenter *m_evrPresenter;
#endif
};
#endif

View File

@@ -34,9 +34,7 @@
#include "mfvideorenderercontrol.h"
#include "mfactivate.h"
#ifdef CUSTOM_EVR_PRESENTER
#include "evrcustompresenter.h"
#endif
#include <qabstractvideosurface.h>
#include <qvideosurfaceformat.h>
@@ -2222,7 +2220,6 @@ namespace
};
}
#ifdef CUSTOM_EVR_PRESENTER
class EVRCustomPresenterActivate : public MFAbstractActivate
{
@@ -2243,16 +2240,13 @@ private:
QMutex m_mutex;
};
#endif // CUSTOM_EVR_PRESENTER
MFVideoRendererControl::MFVideoRendererControl(QObject *parent)
: QVideoRendererControl(parent)
, m_surface(0)
, m_currentActivate(0)
, m_callback(0)
#ifdef CUSTOM_EVR_PRESENTER
, m_presenterActivate(0)
#endif
{
}
@@ -2266,13 +2260,11 @@ void MFVideoRendererControl::clear()
if (m_surface)
m_surface->stop();
#ifdef CUSTOM_EVR_PRESENTER
if (m_presenterActivate) {
m_presenterActivate->ShutdownObject();
m_presenterActivate->Release();
m_presenterActivate = NULL;
}
#endif
if (m_currentActivate) {
m_currentActivate->ShutdownObject();
@@ -2301,21 +2293,16 @@ void MFVideoRendererControl::setSurface(QAbstractVideoSurface *surface)
connect(m_surface, SIGNAL(supportedFormatsChanged()), this, SLOT(supportedFormatsChanged()));
}
#ifdef CUSTOM_EVR_PRESENTER
if (m_presenterActivate)
m_presenterActivate->setSurface(m_surface);
else
#endif
if (m_currentActivate)
else if (m_currentActivate)
static_cast<VideoRendererActivate*>(m_currentActivate)->setSurface(m_surface);
}
void MFVideoRendererControl::customEvent(QEvent *event)
{
#ifdef CUSTOM_EVR_PRESENTER
if (m_presenterActivate)
return;
#endif
if (!m_currentActivate)
return;
@@ -2346,16 +2333,17 @@ void MFVideoRendererControl::customEvent(QEvent *event)
void MFVideoRendererControl::supportedFormatsChanged()
{
if (m_presenterActivate)
return;
if (m_currentActivate)
static_cast<VideoRendererActivate*>(m_currentActivate)->supportedFormatsChanged();
}
void MFVideoRendererControl::present()
{
#ifdef CUSTOM_EVR_PRESENTER
if (m_presenterActivate)
return;
#endif
if (m_currentActivate)
static_cast<VideoRendererActivate*>(m_currentActivate)->present();
@@ -2367,28 +2355,19 @@ IMFActivate* MFVideoRendererControl::createActivate()
clear();
#ifdef CUSTOM_EVR_PRESENTER
// We can use the EVR with our custom presenter only if the surface supports OpenGL
// texture handles. We also require ANGLE (due to the D3D interop).
if (!m_surface->supportedPixelFormats(QAbstractVideoBuffer::GLTextureHandle).isEmpty()
&& QMediaOpenGLHelper::isANGLE()) {
// Create the EVR media sink, but replace the presenter with our own
if (SUCCEEDED(MFCreateVideoRendererActivate(::GetShellWindow(), &m_currentActivate))) {
m_presenterActivate = new EVRCustomPresenterActivate;
m_currentActivate->SetUnknown(MF_ACTIVATE_CUSTOM_VIDEO_PRESENTER_ACTIVATE, m_presenterActivate);
}
}
#endif
if (!m_currentActivate)
// Create the EVR media sink, but replace the presenter with our own
if (SUCCEEDED(MFCreateVideoRendererActivate(::GetShellWindow(), &m_currentActivate))) {
m_presenterActivate = new EVRCustomPresenterActivate;
m_currentActivate->SetUnknown(MF_ACTIVATE_CUSTOM_VIDEO_PRESENTER_ACTIVATE, m_presenterActivate);
} else {
m_currentActivate = new VideoRendererActivate(this);
}
setSurface(m_surface);
return m_currentActivate;
}
#ifdef CUSTOM_EVR_PRESENTER
EVRCustomPresenterActivate::EVRCustomPresenterActivate()
: MFAbstractActivate()
@@ -2438,7 +2417,5 @@ void EVRCustomPresenterActivate::setSurface(QAbstractVideoSurface *surface)
m_presenter->setSurface(surface);
}
#endif // CUSTOM_EVR_PRESENTER
#include "moc_mfvideorenderercontrol.cpp"
#include "mfvideorenderercontrol.moc"

View File

@@ -40,9 +40,7 @@
QT_USE_NAMESPACE
#ifdef CUSTOM_EVR_PRESENTER
class EVRCustomPresenterActivate;
#endif
class MFVideoRendererControl : public QVideoRendererControl
{
@@ -71,9 +69,7 @@ private:
IMFActivate *m_currentActivate;
IMFSampleGrabberSinkCallback *m_callback;
#ifdef CUSTOM_EVR_PRESENTER
EVRCustomPresenterActivate *m_presenterActivate;
#endif
};
#endif