DirectShow: use the EVR in the renderer control.

As for the window control, the existing code from the WMF plugin has
been refactored out and is now shared for both plugins.
This enables HW-accelerated video decoding in QML, QGraphicsVideoItem
and custom QAbstractVideoSurfaces (Angle is required).

Task-number: QTBUG-45593
Change-Id: I1d4dbf5695cdd4dbee93f9f4a957fa4d813aa85d
Reviewed-by: Christian Stromme <christian.stromme@theqtcompany.com>
This commit is contained in:
Yoann Lopes
2015-11-05 13:48:40 +01:00
parent 963a534e33
commit 36549dbe14
19 changed files with 706 additions and 402 deletions

View File

@@ -2,7 +2,27 @@ INCLUDEPATH += $$PWD/evr
qtHaveModule(widgets): QT += widgets
HEADERS += $$PWD/evr/evrvideowindowcontrol.h \
$$PWD/evr/evrdefs.h
HEADERS += \
$$PWD/evr/evrvideowindowcontrol.h \
$$PWD/evr/evrdefs.h
SOURCES += $$PWD/evr/evrvideowindowcontrol.cpp
SOURCES += \
$$PWD/evr/evrvideowindowcontrol.cpp \
$$PWD/evr/evrdefs.cpp
contains(QT_CONFIG, angle)|contains(QT_CONFIG, dynamicgl) {
LIBS += -lmf -lmfplat -lmfuuid -ld3d9 -ldxva2 -lwinmm -levr
QT += gui-private
DEFINES += CUSTOM_EVR_PRESENTER
HEADERS += \
$$PWD/evr/evrcustompresenter.h \
$$PWD/evr/evrd3dpresentengine.h \
$$PWD/evr/evrhelpers.h
SOURCES += \
$$PWD/evr/evrcustompresenter.cpp \
$$PWD/evr/evrd3dpresentengine.cpp \
$$PWD/evr/evrhelpers.cpp
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,358 @@
/****************************************************************************
**
** Copyright (C) 2015 The Qt Company Ltd.
** Contact: http://www.qt.io/licensing/
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL21$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and The Qt Company. For licensing terms
** and conditions see http://www.qt.io/terms-conditions. For further
** information use the contact form at http://www.qt.io/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 or version 3 as published by the Free
** Software Foundation and appearing in the file LICENSE.LGPLv21 and
** LICENSE.LGPLv3 included in the packaging of this file. Please review the
** following information to ensure the GNU Lesser General Public License
** requirements will be met: https://www.gnu.org/licenses/lgpl.html and
** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** As a special exception, The Qt Company gives you certain additional
** rights. These rights are described in The Qt Company LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** $QT_END_LICENSE$
**
****************************************************************************/
#ifndef EVRCUSTOMPRESENTER_H
#define EVRCUSTOMPRESENTER_H
#include <QObject>
#include <qmutex.h>
#include <qqueue.h>
#include "evrdefs.h"
QT_USE_NAMESPACE
class D3DPresentEngine;
class QAbstractVideoSurface;
template<class T>
class AsyncCallback : public IMFAsyncCallback
{
public:
typedef HRESULT (T::*InvokeFn)(IMFAsyncResult *asyncResult);
AsyncCallback(T *parent, InvokeFn fn) : m_parent(parent), m_invokeFn(fn)
{
}
// IUnknown
STDMETHODIMP QueryInterface(REFIID iid, void** ppv)
{
if (!ppv)
return E_POINTER;
if (iid == __uuidof(IUnknown)) {
*ppv = static_cast<IUnknown*>(static_cast<IMFAsyncCallback*>(this));
} else if (iid == __uuidof(IMFAsyncCallback)) {
*ppv = static_cast<IMFAsyncCallback*>(this);
} else {
*ppv = NULL;
return E_NOINTERFACE;
}
AddRef();
return S_OK;
}
STDMETHODIMP_(ULONG) AddRef() {
// Delegate to parent class.
return m_parent->AddRef();
}
STDMETHODIMP_(ULONG) Release() {
// Delegate to parent class.
return m_parent->Release();
}
// IMFAsyncCallback methods
STDMETHODIMP GetParameters(DWORD*, DWORD*)
{
// Implementation of this method is optional.
return E_NOTIMPL;
}
STDMETHODIMP Invoke(IMFAsyncResult* asyncResult)
{
return (m_parent->*m_invokeFn)(asyncResult);
}
T *m_parent;
InvokeFn m_invokeFn;
};
class Scheduler
{
public:
enum ScheduleEvent
{
Terminate = WM_USER,
Schedule = WM_USER + 1,
Flush = WM_USER + 2
};
Scheduler();
~Scheduler();
void setCallback(QObject *cb) {
m_CB = cb;
}
void setFrameRate(const MFRatio &fps);
void setClockRate(float rate) { m_playbackRate = rate; }
const LONGLONG &lastSampleTime() const { return m_lastSampleTime; }
const LONGLONG &frameDuration() const { return m_perFrameInterval; }
HRESULT startScheduler(IMFClock *clock);
HRESULT stopScheduler();
HRESULT scheduleSample(IMFSample *sample, bool presentNow);
HRESULT processSamplesInQueue(LONG *nextSleep);
HRESULT processSample(IMFSample *sample, LONG *nextSleep);
HRESULT flush();
// ThreadProc for the scheduler thread.
static DWORD WINAPI schedulerThreadProc(LPVOID parameter);
private:
DWORD schedulerThreadProcPrivate();
QQueue<IMFSample*> m_scheduledSamples; // Samples waiting to be presented.
IMFClock *m_clock; // Presentation clock. Can be NULL.
QObject *m_CB; // Weak reference; do not delete.
DWORD m_threadID;
HANDLE m_schedulerThread;
HANDLE m_threadReadyEvent;
HANDLE m_flushEvent;
float m_playbackRate;
MFTIME m_perFrameInterval; // Duration of each frame.
LONGLONG m_perFrame_1_4th; // 1/4th of the frame duration.
MFTIME m_lastSampleTime; // Most recent sample time.
QMutex m_mutex;
};
class SamplePool
{
public:
SamplePool();
~SamplePool();
HRESULT initialize(QList<IMFSample*> &samples);
HRESULT clear();
HRESULT getSample(IMFSample **sample);
HRESULT returnSample(IMFSample *sample);
BOOL areSamplesPending();
private:
QMutex m_mutex;
QList<IMFSample*> m_videoSampleQueue;
bool m_initialized;
DWORD m_pending;
};
class EVRCustomPresenter
: public QObject
, public IMFVideoDeviceID
, public IMFVideoPresenter // Inherits IMFClockStateSink
, public IMFRateSupport
, public IMFGetService
, public IMFTopologyServiceLookupClient
{
Q_OBJECT
public:
// Defines the state of the presenter.
enum RenderState
{
RenderStarted = 1,
RenderStopped,
RenderPaused,
RenderShutdown // Initial state.
};
// Defines the presenter's state with respect to frame-stepping.
enum FrameStepState
{
FrameStepNone, // Not frame stepping.
FrameStepWaitingStart, // Frame stepping, but the clock is not started.
FrameStepPending, // Clock is started. Waiting for samples.
FrameStepScheduled, // Submitted a sample for rendering.
FrameStepComplete // Sample was rendered.
};
EVRCustomPresenter();
~EVRCustomPresenter();
// IUnknown methods
STDMETHODIMP QueryInterface(REFIID riid, void ** ppv);
STDMETHODIMP_(ULONG) AddRef();
STDMETHODIMP_(ULONG) Release();
// IMFGetService methods
STDMETHODIMP GetService(REFGUID guidService, REFIID riid, LPVOID *ppvObject);
// IMFVideoPresenter methods
STDMETHODIMP ProcessMessage(MFVP_MESSAGE_TYPE message, ULONG_PTR param);
STDMETHODIMP GetCurrentMediaType(IMFVideoMediaType** mediaType);
// IMFClockStateSink methods
STDMETHODIMP OnClockStart(MFTIME systemTime, LONGLONG clockStartOffset);
STDMETHODIMP OnClockStop(MFTIME systemTime);
STDMETHODIMP OnClockPause(MFTIME systemTime);
STDMETHODIMP OnClockRestart(MFTIME systemTime);
STDMETHODIMP OnClockSetRate(MFTIME systemTime, float rate);
// IMFRateSupport methods
STDMETHODIMP GetSlowestRate(MFRATE_DIRECTION direction, BOOL thin, float *rate);
STDMETHODIMP GetFastestRate(MFRATE_DIRECTION direction, BOOL thin, float *rate);
STDMETHODIMP IsRateSupported(BOOL thin, float rate, float *nearestSupportedRate);
// IMFVideoDeviceID methods
STDMETHODIMP GetDeviceID(IID* deviceID);
// IMFTopologyServiceLookupClient methods
STDMETHODIMP InitServicePointers(IMFTopologyServiceLookup *lookup);
STDMETHODIMP ReleaseServicePointers();
void supportedFormatsChanged();
void setSurface(QAbstractVideoSurface *surface);
private Q_SLOTS:
void startSurface();
void stopSurface();
private:
HRESULT checkShutdown() const
{
if (m_renderState == RenderShutdown)
return MF_E_SHUTDOWN;
else
return S_OK;
}
// The "active" state is started or paused.
inline bool isActive() const
{
return ((m_renderState == RenderStarted) || (m_renderState == RenderPaused));
}
// Scrubbing occurs when the frame rate is 0.
inline bool isScrubbing() const { return m_playbackRate == 0.0f; }
// Send an event to the EVR through its IMediaEventSink interface.
void notifyEvent(long eventCode, LONG_PTR param1, LONG_PTR param2)
{
if (m_mediaEventSink)
m_mediaEventSink->Notify(eventCode, param1, param2);
}
float getMaxRate(bool thin);
// Mixer operations
HRESULT configureMixer(IMFTransform *mixer);
// Formats
HRESULT createOptimalVideoType(IMFMediaType* proposed, IMFMediaType **optimal);
HRESULT setMediaType(IMFMediaType *mediaType);
HRESULT isMediaTypeSupported(IMFMediaType *mediaType);
// Message handlers
HRESULT flush();
HRESULT renegotiateMediaType();
HRESULT processInputNotify();
HRESULT beginStreaming();
HRESULT endStreaming();
HRESULT checkEndOfStream();
// Managing samples
void processOutputLoop();
HRESULT processOutput();
HRESULT deliverSample(IMFSample *sample, bool repaint);
HRESULT trackSample(IMFSample *sample);
void releaseResources();
// Frame-stepping
HRESULT prepareFrameStep(DWORD steps);
HRESULT startFrameStep();
HRESULT deliverFrameStepSample(IMFSample *sample);
HRESULT completeFrameStep(IMFSample *sample);
HRESULT cancelFrameStep();
// Callback when a video sample is released.
HRESULT onSampleFree(IMFAsyncResult *result);
AsyncCallback<EVRCustomPresenter> m_sampleFreeCB;
// Holds information related to frame-stepping.
struct FrameStep
{
FrameStep()
: state(FrameStepNone)
, steps(0)
, sampleNoRef(0)
{
}
FrameStepState state;
QList<IMFSample*> samples;
DWORD steps;
DWORD_PTR sampleNoRef;
};
long m_refCount;
RenderState m_renderState;
FrameStep m_frameStep;
QMutex m_mutex;
// Samples and scheduling
Scheduler m_scheduler; // Manages scheduling of samples.
SamplePool m_samplePool; // Pool of allocated samples.
DWORD m_tokenCounter; // Counter. Incremented whenever we create new samples.
// Rendering state
bool m_sampleNotify; // Did the mixer signal it has an input sample?
bool m_repaint; // Do we need to repaint the last sample?
bool m_prerolled; // Have we presented at least one sample?
bool m_endStreaming; // Did we reach the end of the stream (EOS)?
MFVideoNormalizedRect m_sourceRect;
float m_playbackRate;
D3DPresentEngine *m_D3DPresentEngine; // Rendering engine. (Never null if the constructor succeeds.)
IMFClock *m_clock; // The EVR's clock.
IMFTransform *m_mixer; // The EVR's mixer.
IMediaEventSink *m_mediaEventSink; // The EVR's event-sink interface.
IMFMediaType *m_mediaType; // Output media type
QAbstractVideoSurface *m_surface;
QList<DWORD> m_supportedGLFormats;
};
bool qt_evr_setCustomPresenter(IUnknown *evr, EVRCustomPresenter *presenter);
#endif // EVRCUSTOMPRESENTER_H

View File

@@ -0,0 +1,655 @@
/****************************************************************************
**
** Copyright (C) 2015 The Qt Company Ltd.
** Contact: http://www.qt.io/licensing/
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL21$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and The Qt Company. For licensing terms
** and conditions see http://www.qt.io/terms-conditions. For further
** information use the contact form at http://www.qt.io/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 or version 3 as published by the Free
** Software Foundation and appearing in the file LICENSE.LGPLv21 and
** LICENSE.LGPLv3 included in the packaging of this file. Please review the
** following information to ensure the GNU Lesser General Public License
** requirements will be met: https://www.gnu.org/licenses/lgpl.html and
** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** As a special exception, The Qt Company gives you certain additional
** rights. These rights are described in The Qt Company LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** $QT_END_LICENSE$
**
****************************************************************************/
#include "evrd3dpresentengine.h"
#include "evrhelpers.h"
#include <qtgui/qguiapplication.h>
#include <qpa/qplatformnativeinterface.h>
#include <qtgui/qopenglcontext.h>
#include <qabstractvideobuffer.h>
#include <QAbstractVideoSurface>
#include <qvideoframe.h>
#include <QDebug>
#include <qopenglcontext.h>
#include <qopenglfunctions.h>
#include <qwindow.h>
#include <EGL/egl.h>
#include <EGL/eglext.h>
#include <d3d9.h>
#include <dxva2api.h>
#include <WinUser.h>
#include <evr.h>
static const int PRESENTER_BUFFER_COUNT = 3;
class TextureVideoBuffer : public QAbstractVideoBuffer
{
public:
TextureVideoBuffer(GLuint textureId)
: QAbstractVideoBuffer(GLTextureHandle)
, m_textureId(textureId)
{}
~TextureVideoBuffer() {}
MapMode mapMode() const { return NotMapped; }
uchar *map(MapMode, int*, int*) { return 0; }
void unmap() {}
QVariant handle() const
{
return QVariant::fromValue<unsigned int>(m_textureId);
}
private:
GLuint m_textureId;
};
EGLWrapper::EGLWrapper()
{
#ifndef QT_OPENGL_ES_2_ANGLE_STATIC
// Resolve the EGL functions we use. When configured for dynamic OpenGL, no
// component in Qt will link to libEGL.lib and libGLESv2.lib. We know
// however that libEGL is loaded for sure, since this is an ANGLE-only path.
# ifdef QT_DEBUG
HMODULE eglHandle = GetModuleHandle(L"libEGLd.dll");
# else
HMODULE eglHandle = GetModuleHandle(L"libEGL.dll");
# endif
if (!eglHandle)
qWarning("No EGL library loaded");
m_eglGetProcAddress = (EglGetProcAddress) GetProcAddress(eglHandle, "eglGetProcAddress");
m_eglCreatePbufferSurface = (EglCreatePbufferSurface) GetProcAddress(eglHandle, "eglCreatePbufferSurface");
m_eglDestroySurface = (EglDestroySurface) GetProcAddress(eglHandle, "eglDestroySurface");
m_eglBindTexImage = (EglBindTexImage) GetProcAddress(eglHandle, "eglBindTexImage");
m_eglReleaseTexImage = (EglReleaseTexImage) GetProcAddress(eglHandle, "eglReleaseTexImage");
#else
// Static ANGLE-only build. There is no libEGL.dll in use.
m_eglGetProcAddress = ::eglGetProcAddress;
m_eglCreatePbufferSurface = ::eglCreatePbufferSurface;
m_eglDestroySurface = ::eglDestroySurface;
m_eglBindTexImage = ::eglBindTexImage;
m_eglReleaseTexImage = ::eglReleaseTexImage;
#endif
}
__eglMustCastToProperFunctionPointerType EGLWrapper::getProcAddress(const char *procname)
{
Q_ASSERT(m_eglGetProcAddress);
return m_eglGetProcAddress(procname);
}
EGLSurface EGLWrapper::createPbufferSurface(EGLDisplay dpy, EGLConfig config, const EGLint *attrib_list)
{
Q_ASSERT(m_eglCreatePbufferSurface);
return m_eglCreatePbufferSurface(dpy, config, attrib_list);
}
EGLBoolean EGLWrapper::destroySurface(EGLDisplay dpy, EGLSurface surface)
{
Q_ASSERT(m_eglDestroySurface);
return m_eglDestroySurface(dpy, surface);
}
EGLBoolean EGLWrapper::bindTexImage(EGLDisplay dpy, EGLSurface surface, EGLint buffer)
{
Q_ASSERT(m_eglBindTexImage);
return m_eglBindTexImage(dpy, surface, buffer);
}
EGLBoolean EGLWrapper::releaseTexImage(EGLDisplay dpy, EGLSurface surface, EGLint buffer)
{
Q_ASSERT(m_eglReleaseTexImage);
return m_eglReleaseTexImage(dpy, surface, buffer);
}
D3DPresentEngine::D3DPresentEngine()
: QObject()
, m_mutex(QMutex::Recursive)
, m_deviceResetToken(0)
, m_D3D9(0)
, m_device(0)
, m_deviceManager(0)
, m_surface(0)
, m_glContext(0)
, m_offscreenSurface(0)
, m_eglDisplay(0)
, m_eglConfig(0)
, m_eglSurface(0)
, m_glTexture(0)
, m_texture(0)
, m_egl(0)
{
ZeroMemory(&m_displayMode, sizeof(m_displayMode));
HRESULT hr = initializeD3D();
if (SUCCEEDED(hr)) {
hr = createD3DDevice();
if (FAILED(hr))
qWarning("Failed to create D3D device");
} else {
qWarning("Failed to initialize D3D");
}
}
D3DPresentEngine::~D3DPresentEngine()
{
qt_evr_safe_release(&m_texture);
qt_evr_safe_release(&m_device);
qt_evr_safe_release(&m_deviceManager);
qt_evr_safe_release(&m_D3D9);
if (m_eglSurface) {
m_egl->releaseTexImage(m_eglDisplay, m_eglSurface, EGL_BACK_BUFFER);
m_egl->destroySurface(m_eglDisplay, m_eglSurface);
m_eglSurface = NULL;
}
if (m_glTexture) {
if (QOpenGLContext *current = QOpenGLContext::currentContext())
current->functions()->glDeleteTextures(1, &m_glTexture);
else
qWarning() << "D3DPresentEngine: Cannot obtain GL context, unable to delete textures";
}
delete m_glContext;
delete m_offscreenSurface;
delete m_egl;
}
void D3DPresentEngine::start()
{
QMutexLocker locker(&m_mutex);
if (!m_surfaceFormat.isValid())
return;
if (!m_texture)
createOffscreenTexture();
if (m_surface && !m_surface->isActive())
m_surface->start(m_surfaceFormat);
}
void D3DPresentEngine::stop()
{
QMutexLocker locker(&m_mutex);
if (m_surface && m_surface->isActive())
m_surface->stop();
}
HRESULT D3DPresentEngine::getService(REFGUID, REFIID riid, void** ppv)
{
HRESULT hr = S_OK;
if (riid == __uuidof(IDirect3DDeviceManager9)) {
if (m_deviceManager == NULL) {
hr = MF_E_UNSUPPORTED_SERVICE;
} else {
*ppv = m_deviceManager;
m_deviceManager->AddRef();
}
} else {
hr = MF_E_UNSUPPORTED_SERVICE;
}
return hr;
}
HRESULT D3DPresentEngine::checkFormat(D3DFORMAT format)
{
HRESULT hr = S_OK;
UINT uAdapter = D3DADAPTER_DEFAULT;
D3DDEVTYPE type = D3DDEVTYPE_HAL;
D3DDISPLAYMODE mode;
D3DDEVICE_CREATION_PARAMETERS params;
// Our shared D3D/EGL surface only supports RGB32,
// reject all other formats
if (format != D3DFMT_X8R8G8B8)
return MF_E_INVALIDMEDIATYPE;
if (m_device) {
hr = m_device->GetCreationParameters(&params);
if (FAILED(hr))
return hr;
uAdapter = params.AdapterOrdinal;
type = params.DeviceType;
}
hr = m_D3D9->GetAdapterDisplayMode(uAdapter, &mode);
if (FAILED(hr))
return hr;
return m_D3D9->CheckDeviceType(uAdapter, type, mode.Format, format, TRUE);
}
HRESULT D3DPresentEngine::createVideoSamples(IMFMediaType *format, QList<IMFSample*> &videoSampleQueue)
{
if (!format)
return MF_E_UNEXPECTED;
HRESULT hr = S_OK;
D3DPRESENT_PARAMETERS pp;
IDirect3DSwapChain9 *swapChain = NULL;
IMFSample *videoSample = NULL;
QMutexLocker locker(&m_mutex);
releaseResources();
// Get the swap chain parameters from the media type.
hr = getSwapChainPresentParameters(format, &pp);
if (FAILED(hr))
goto done;
// Create the video samples.
for (int i = 0; i < PRESENTER_BUFFER_COUNT; i++) {
// Create a new swap chain.
hr = m_device->CreateAdditionalSwapChain(&pp, &swapChain);
if (FAILED(hr))
goto done;
// Create the video sample from the swap chain.
hr = createD3DSample(swapChain, &videoSample);
if (FAILED(hr))
goto done;
// Add it to the list.
videoSample->AddRef();
videoSampleQueue.append(videoSample);
// Set the swap chain pointer as a custom attribute on the sample. This keeps
// a reference count on the swap chain, so that the swap chain is kept alive
// for the duration of the sample's lifetime.
hr = videoSample->SetUnknown(MFSamplePresenter_SampleSwapChain, swapChain);
if (FAILED(hr))
goto done;
qt_evr_safe_release(&videoSample);
qt_evr_safe_release(&swapChain);
}
done:
if (FAILED(hr))
releaseResources();
qt_evr_safe_release(&swapChain);
qt_evr_safe_release(&videoSample);
return hr;
}
void D3DPresentEngine::releaseResources()
{
}
void D3DPresentEngine::presentSample(void *opaque, qint64)
{
HRESULT hr = S_OK;
IMFSample *sample = reinterpret_cast<IMFSample*>(opaque);
IMFMediaBuffer* buffer = NULL;
IDirect3DSurface9* surface = NULL;
if (m_surface && m_surface->isActive()) {
if (sample) {
// Get the buffer from the sample.
hr = sample->GetBufferByIndex(0, &buffer);
if (FAILED(hr))
goto done;
// Get the surface from the buffer.
hr = MFGetService(buffer, mr_BUFFER_SERVICE, IID_PPV_ARGS(&surface));
if (FAILED(hr))
goto done;
}
if (surface && updateTexture(surface)) {
QVideoFrame frame = QVideoFrame(new TextureVideoBuffer(m_glTexture),
m_surfaceFormat.frameSize(),
m_surfaceFormat.pixelFormat());
// WMF uses 100-nanosecond units, Qt uses microseconds
LONGLONG startTime = -1;
if (SUCCEEDED(sample->GetSampleTime(&startTime))) {
frame.setStartTime(startTime * 0.1);
LONGLONG duration = -1;
if (SUCCEEDED(sample->GetSampleDuration(&duration)))
frame.setEndTime((startTime + duration) * 0.1);
}
m_surface->present(frame);
}
}
done:
qt_evr_safe_release(&surface);
qt_evr_safe_release(&buffer);
qt_evr_safe_release(&sample);
}
void D3DPresentEngine::setSurface(QAbstractVideoSurface *surface)
{
QMutexLocker locker(&m_mutex);
m_surface = surface;
}
void D3DPresentEngine::setSurfaceFormat(const QVideoSurfaceFormat &format)
{
QMutexLocker locker(&m_mutex);
m_surfaceFormat = format;
}
void D3DPresentEngine::createOffscreenTexture()
{
// First, check if we have a context on this thread
QOpenGLContext *currentContext = QOpenGLContext::currentContext();
if (!currentContext) {
//Create OpenGL context and set share context from surface
QOpenGLContext *shareContext = qobject_cast<QOpenGLContext*>(m_surface->property("GLContext").value<QObject*>());
if (!shareContext)
return;
m_offscreenSurface = new QWindow;
m_offscreenSurface->setSurfaceType(QWindow::OpenGLSurface);
//Needs geometry to be a valid surface, but size is not important
m_offscreenSurface->setGeometry(-1, -1, 1, 1);
m_offscreenSurface->create();
m_glContext = new QOpenGLContext;
m_glContext->setFormat(m_offscreenSurface->requestedFormat());
m_glContext->setShareContext(shareContext);
if (!m_glContext->create()) {
delete m_glContext;
delete m_offscreenSurface;
m_glContext = 0;
m_offscreenSurface = 0;
return;
}
currentContext = m_glContext;
}
if (m_glContext)
m_glContext->makeCurrent(m_offscreenSurface);
if (!m_egl)
m_egl = new EGLWrapper;
QPlatformNativeInterface *nativeInterface = QGuiApplication::platformNativeInterface();
m_eglDisplay = static_cast<EGLDisplay*>(
nativeInterface->nativeResourceForContext("eglDisplay", currentContext));
m_eglConfig = static_cast<EGLConfig*>(
nativeInterface->nativeResourceForContext("eglConfig", currentContext));
currentContext->functions()->glGenTextures(1, &m_glTexture);
int w = m_surfaceFormat.frameWidth();
int h = m_surfaceFormat.frameHeight();
bool hasAlpha = currentContext->format().hasAlpha();
EGLint attribs[] = {
EGL_WIDTH, w,
EGL_HEIGHT, h,
EGL_TEXTURE_FORMAT, hasAlpha ? EGL_TEXTURE_RGBA : EGL_TEXTURE_RGB,
EGL_TEXTURE_TARGET, EGL_TEXTURE_2D,
EGL_NONE
};
EGLSurface pbuffer = m_egl->createPbufferSurface(m_eglDisplay, m_eglConfig, attribs);
HANDLE share_handle = 0;
PFNEGLQUERYSURFACEPOINTERANGLEPROC eglQuerySurfacePointerANGLE =
reinterpret_cast<PFNEGLQUERYSURFACEPOINTERANGLEPROC>(m_egl->getProcAddress("eglQuerySurfacePointerANGLE"));
Q_ASSERT(eglQuerySurfacePointerANGLE);
eglQuerySurfacePointerANGLE(
m_eglDisplay,
pbuffer,
EGL_D3D_TEXTURE_2D_SHARE_HANDLE_ANGLE, &share_handle);
m_device->CreateTexture(w, h, 1,
D3DUSAGE_RENDERTARGET,
hasAlpha ? D3DFMT_A8R8G8B8 : D3DFMT_X8R8G8B8,
D3DPOOL_DEFAULT,
&m_texture,
&share_handle);
m_eglSurface = pbuffer;
if (m_glContext)
m_glContext->doneCurrent();
}
bool D3DPresentEngine::updateTexture(IDirect3DSurface9 *src)
{
if (!m_texture)
return false;
if (m_glContext)
m_glContext->makeCurrent(m_offscreenSurface);
QOpenGLContext::currentContext()->functions()->glBindTexture(GL_TEXTURE_2D, m_glTexture);
IDirect3DSurface9 *dest = NULL;
// Copy the sample surface to the shared D3D/EGL surface
HRESULT hr = m_texture->GetSurfaceLevel(0, &dest);
if (FAILED(hr))
goto done;
hr = m_device->StretchRect(src, NULL, dest, NULL, D3DTEXF_NONE);
if (FAILED(hr))
qWarning("Failed to copy D3D surface");
if (hr == S_OK)
m_egl->bindTexImage(m_eglDisplay, m_eglSurface, EGL_BACK_BUFFER);
done:
qt_evr_safe_release(&dest);
if (m_glContext)
m_glContext->doneCurrent();
return SUCCEEDED(hr);
}
HRESULT D3DPresentEngine::initializeD3D()
{
HRESULT hr = Direct3DCreate9Ex(D3D_SDK_VERSION, &m_D3D9);
if (SUCCEEDED(hr))
hr = DXVA2CreateDirect3DDeviceManager9(&m_deviceResetToken, &m_deviceManager);
return hr;
}
HRESULT D3DPresentEngine::createD3DDevice()
{
HRESULT hr = S_OK;
HWND hwnd = NULL;
UINT uAdapterID = D3DADAPTER_DEFAULT;
DWORD vp = 0;
D3DCAPS9 ddCaps;
ZeroMemory(&ddCaps, sizeof(ddCaps));
IDirect3DDevice9Ex* device = NULL;
// Hold the lock because we might be discarding an existing device.
QMutexLocker locker(&m_mutex);
if (!m_D3D9 || !m_deviceManager)
return MF_E_NOT_INITIALIZED;
hwnd = ::GetShellWindow();
// Note: The presenter creates additional swap chains to present the
// video frames. Therefore, it does not use the device's implicit
// swap chain, so the size of the back buffer here is 1 x 1.
D3DPRESENT_PARAMETERS pp;
ZeroMemory(&pp, sizeof(pp));
pp.BackBufferWidth = 1;
pp.BackBufferHeight = 1;
pp.BackBufferFormat = D3DFMT_UNKNOWN;
pp.BackBufferCount = 1;
pp.Windowed = TRUE;
pp.SwapEffect = D3DSWAPEFFECT_DISCARD;
pp.BackBufferFormat = D3DFMT_UNKNOWN;
pp.hDeviceWindow = hwnd;
pp.Flags = D3DPRESENTFLAG_VIDEO;
pp.PresentationInterval = D3DPRESENT_INTERVAL_DEFAULT;
hr = m_D3D9->GetDeviceCaps(uAdapterID, D3DDEVTYPE_HAL, &ddCaps);
if (FAILED(hr))
goto done;
if (ddCaps.DevCaps & D3DDEVCAPS_HWTRANSFORMANDLIGHT)
vp = D3DCREATE_HARDWARE_VERTEXPROCESSING;
else
vp = D3DCREATE_SOFTWARE_VERTEXPROCESSING;
hr = m_D3D9->CreateDeviceEx(
uAdapterID,
D3DDEVTYPE_HAL,
pp.hDeviceWindow,
vp | D3DCREATE_NOWINDOWCHANGES | D3DCREATE_MULTITHREADED | D3DCREATE_FPU_PRESERVE,
&pp,
NULL,
&device
);
if (FAILED(hr))
goto done;
hr = m_D3D9->GetAdapterDisplayMode(uAdapterID, &m_displayMode);
if (FAILED(hr))
goto done;
hr = m_deviceManager->ResetDevice(device, m_deviceResetToken);
if (FAILED(hr))
goto done;
qt_evr_safe_release(&m_device);
m_device = device;
m_device->AddRef();
done:
qt_evr_safe_release(&device);
return hr;
}
HRESULT D3DPresentEngine::createD3DSample(IDirect3DSwapChain9 *swapChain, IMFSample **videoSample)
{
D3DCOLOR clrBlack = D3DCOLOR_ARGB(0xFF, 0x00, 0x00, 0x00);
IDirect3DSurface9* surface = NULL;
IMFSample* sample = NULL;
// Get the back buffer surface.
HRESULT hr = swapChain->GetBackBuffer(0, D3DBACKBUFFER_TYPE_MONO, &surface);
if (FAILED(hr))
goto done;
// Fill it with black.
hr = m_device->ColorFill(surface, NULL, clrBlack);
if (FAILED(hr))
goto done;
hr = MFCreateVideoSampleFromSurface(surface, &sample);
if (FAILED(hr))
goto done;
*videoSample = sample;
(*videoSample)->AddRef();
done:
qt_evr_safe_release(&surface);
qt_evr_safe_release(&sample);
return hr;
}
HRESULT D3DPresentEngine::getSwapChainPresentParameters(IMFMediaType *type, D3DPRESENT_PARAMETERS* pp)
{
ZeroMemory(pp, sizeof(D3DPRESENT_PARAMETERS));
// Get some information about the video format.
UINT32 width = 0, height = 0;
HRESULT hr = MFGetAttributeSize(type, MF_MT_FRAME_SIZE, &width, &height);
if (FAILED(hr))
return hr;
DWORD d3dFormat = 0;
hr = qt_evr_getFourCC(type, &d3dFormat);
if (FAILED(hr))
return hr;
ZeroMemory(pp, sizeof(D3DPRESENT_PARAMETERS));
pp->BackBufferWidth = width;
pp->BackBufferHeight = height;
pp->Windowed = TRUE;
pp->SwapEffect = D3DSWAPEFFECT_DISCARD;
pp->BackBufferFormat = (D3DFORMAT)d3dFormat;
pp->hDeviceWindow = ::GetShellWindow();
pp->Flags = D3DPRESENTFLAG_VIDEO;
pp->PresentationInterval = D3DPRESENT_INTERVAL_DEFAULT;
D3DDEVICE_CREATION_PARAMETERS params;
hr = m_device->GetCreationParameters(&params);
if (FAILED(hr))
return hr;
if (params.DeviceType != D3DDEVTYPE_HAL)
pp->Flags |= D3DPRESENTFLAG_LOCKABLE_BACKBUFFER;
return S_OK;
}

View File

@@ -0,0 +1,146 @@
/****************************************************************************
**
** Copyright (C) 2015 The Qt Company Ltd.
** Contact: http://www.qt.io/licensing/
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL21$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and The Qt Company. For licensing terms
** and conditions see http://www.qt.io/terms-conditions. For further
** information use the contact form at http://www.qt.io/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 or version 3 as published by the Free
** Software Foundation and appearing in the file LICENSE.LGPLv21 and
** LICENSE.LGPLv3 included in the packaging of this file. Please review the
** following information to ensure the GNU Lesser General Public License
** requirements will be met: https://www.gnu.org/licenses/lgpl.html and
** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** As a special exception, The Qt Company gives you certain additional
** rights. These rights are described in The Qt Company LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** $QT_END_LICENSE$
**
****************************************************************************/
#ifndef EVRD3DPRESENTENGINE_H
#define EVRD3DPRESENTENGINE_H
#include <QObject>
#include <EGL/egl.h>
#include <QMutex>
#include <d3d9types.h>
#include <QVideoSurfaceFormat>
struct IDirect3D9Ex;
struct IDirect3DDevice9;
struct IDirect3DDevice9Ex;
struct IDirect3DDeviceManager9;
struct IDirect3DSurface9;
struct IDirect3DTexture9;
struct IMFSample;
struct IMFMediaType;
struct IDirect3DSwapChain9;
// Randomly generated GUIDs
static const GUID MFSamplePresenter_SampleCounter =
{ 0xb0bb83cc, 0xf10f, 0x4e2e, { 0xaa, 0x2b, 0x29, 0xea, 0x5e, 0x92, 0xef, 0x85 } };
static const GUID MFSamplePresenter_SampleSwapChain =
{ 0xad885bd1, 0x7def, 0x414a, { 0xb5, 0xb0, 0xd3, 0xd2, 0x63, 0xd6, 0xe9, 0x6d } };
QT_USE_NAMESPACE
class QAbstractVideoSurface;
class QOpenGLContext;
class EGLWrapper
{
public:
EGLWrapper();
__eglMustCastToProperFunctionPointerType getProcAddress(const char *procname);
EGLSurface createPbufferSurface(EGLDisplay dpy, EGLConfig config, const EGLint *attrib_list);
EGLBoolean destroySurface(EGLDisplay dpy, EGLSurface surface);
EGLBoolean bindTexImage(EGLDisplay dpy, EGLSurface surface, EGLint buffer);
EGLBoolean releaseTexImage(EGLDisplay dpy, EGLSurface surface, EGLint buffer);
private:
typedef __eglMustCastToProperFunctionPointerType (EGLAPIENTRYP EglGetProcAddress)(const char *procname);
typedef EGLSurface (EGLAPIENTRYP EglCreatePbufferSurface)(EGLDisplay dpy, EGLConfig config, const EGLint *attrib_list);
typedef EGLBoolean (EGLAPIENTRYP EglDestroySurface)(EGLDisplay dpy, EGLSurface surface);
typedef EGLBoolean (EGLAPIENTRYP EglBindTexImage)(EGLDisplay dpy, EGLSurface surface, EGLint buffer);
typedef EGLBoolean (EGLAPIENTRYP EglReleaseTexImage)(EGLDisplay dpy, EGLSurface surface, EGLint buffer);
EglGetProcAddress m_eglGetProcAddress;
EglCreatePbufferSurface m_eglCreatePbufferSurface;
EglDestroySurface m_eglDestroySurface;
EglBindTexImage m_eglBindTexImage;
EglReleaseTexImage m_eglReleaseTexImage;
};
class D3DPresentEngine : public QObject
{
Q_OBJECT
public:
D3DPresentEngine();
virtual ~D3DPresentEngine();
void start();
void stop();
HRESULT getService(REFGUID guidService, REFIID riid, void** ppv);
HRESULT checkFormat(D3DFORMAT format);
HRESULT createVideoSamples(IMFMediaType *format, QList<IMFSample*>& videoSampleQueue);
void releaseResources();
UINT refreshRate() const { return m_displayMode.RefreshRate; }
void setSurface(QAbstractVideoSurface *surface);
void setSurfaceFormat(const QVideoSurfaceFormat &format);
void createOffscreenTexture();
bool updateTexture(IDirect3DSurface9 *src);
public Q_SLOTS:
void presentSample(void* sample, qint64 llTarget);
private:
HRESULT initializeD3D();
HRESULT getSwapChainPresentParameters(IMFMediaType *type, D3DPRESENT_PARAMETERS *pp);
HRESULT createD3DDevice();
HRESULT createD3DSample(IDirect3DSwapChain9 *swapChain, IMFSample **videoSample);
QMutex m_mutex;
UINT m_deviceResetToken;
D3DDISPLAYMODE m_displayMode;
IDirect3D9Ex *m_D3D9;
IDirect3DDevice9Ex *m_device;
IDirect3DDeviceManager9 *m_deviceManager;
QVideoSurfaceFormat m_surfaceFormat;
QAbstractVideoSurface *m_surface;
QOpenGLContext *m_glContext;
QWindow *m_offscreenSurface;
EGLDisplay *m_eglDisplay;
EGLConfig *m_eglConfig;
EGLSurface m_eglSurface;
unsigned int m_glTexture;
IDirect3DTexture9 *m_texture;
EGLWrapper *m_egl;
};
#endif // EVRD3DPRESENTENGINE_H

View File

@@ -0,0 +1,42 @@
/****************************************************************************
**
** Copyright (C) 2015 The Qt Company Ltd.
** Contact: http://www.qt.io/licensing/
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL21$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and The Qt Company. For licensing terms
** and conditions see http://www.qt.io/terms-conditions. For further
** information use the contact form at http://www.qt.io/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 or version 3 as published by the Free
** Software Foundation and appearing in the file LICENSE.LGPLv21 and
** LICENSE.LGPLv3 included in the packaging of this file. Please review the
** following information to ensure the GNU Lesser General Public License
** requirements will be met: https://www.gnu.org/licenses/lgpl.html and
** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** As a special exception, The Qt Company gives you certain additional
** rights. These rights are described in The Qt Company LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** $QT_END_LICENSE$
**
****************************************************************************/
#include "evrdefs.h"
const CLSID clsid_EnhancedVideoRenderer = { 0xfa10746c, 0x9b63, 0x4b6c, {0xbc, 0x49, 0xfc, 0x30, 0xe, 0xa5, 0xf2, 0x56} };
const GUID mr_VIDEO_RENDER_SERVICE = { 0x1092a86c, 0xab1a, 0x459a, {0xa3, 0x36, 0x83, 0x1f, 0xbc, 0x4d, 0x11, 0xff} };
const GUID mr_VIDEO_MIXER_SERVICE = { 0x73cd2fc, 0x6cf4, 0x40b7, {0x88, 0x59, 0xe8, 0x95, 0x52, 0xc8, 0x41, 0xf8} };
const GUID mr_BUFFER_SERVICE = { 0xa562248c, 0x9ac6, 0x4ffc, {0x9f, 0xba, 0x3a, 0xf8, 0xf8, 0xad, 0x1a, 0x4d} };
const GUID video_ZOOM_RECT = { 0x7aaa1638, 0x1b7f, 0x4c93, {0xbd, 0x89, 0x5b, 0x9c, 0x9f, 0xb6, 0xfc, 0xf0} };
const GUID iid_IDirect3DDevice9 = { 0xd0223b96, 0xbf7a, 0x43fd, {0x92, 0xbd, 0xa4, 0x3b, 0xd, 0x82, 0xb9, 0xeb} };
const GUID iid_IDirect3DSurface9 = { 0xcfbaf3a, 0x9ff6, 0x429a, {0x99, 0xb3, 0xa2, 0x79, 0x6a, 0xf8, 0xb8, 0x9b} };

View File

@@ -36,10 +36,47 @@
#include <d3d9.h>
#include <Evr9.h>
#include <evr.h>
#include <dxva2api.h>
#include <mfapi.h>
#include <mfidl.h>
#include <Mferror.h>
extern const CLSID clsid_EnhancedVideoRenderer;
extern const GUID mr_VIDEO_RENDER_SERVICE;
extern const GUID mr_VIDEO_MIXER_SERVICE;
extern const GUID mr_BUFFER_SERVICE;
extern const GUID video_ZOOM_RECT;
extern const GUID iid_IDirect3DDevice9;
extern const GUID iid_IDirect3DSurface9;
// The following is required to compile with MinGW
extern "C" {
HRESULT WINAPI MFCreateVideoSampleFromSurface(IUnknown *pUnkSurface, IMFSample **ppSample);
HRESULT WINAPI Direct3DCreate9Ex(UINT SDKVersion, IDirect3D9Ex**);
}
#ifndef PRESENTATION_CURRENT_POSITION
#define PRESENTATION_CURRENT_POSITION 0x7fffffffffffffff
#endif
#ifndef MF_E_SHUTDOWN
#define MF_E_SHUTDOWN ((HRESULT)0xC00D3E85L)
#endif
#ifndef MF_E_SAMPLEALLOCATOR_EMPTY
#define MF_E_SAMPLEALLOCATOR_EMPTY ((HRESULT)0xC00D4A3EL)
#endif
#ifndef MF_E_TRANSFORM_STREAM_CHANGE
#define MF_E_TRANSFORM_STREAM_CHANGE ((HRESULT)0xC00D6D61L)
#endif
#ifndef MF_E_TRANSFORM_NEED_MORE_INPUT
#define MF_E_TRANSFORM_NEED_MORE_INPUT ((HRESULT)0xC00D6D72L)
#endif
#ifdef __GNUC__
typedef struct MFVideoNormalizedRect {
float left;
@@ -49,6 +86,8 @@ typedef struct MFVideoNormalizedRect {
} MFVideoNormalizedRect;
#endif
#include <initguid.h>
#ifndef __IMFGetService_INTERFACE_DEFINED__
#define __IMFGetService_INTERFACE_DEFINED__
DEFINE_GUID(IID_IMFGetService, 0xfa993888, 0x4383, 0x415a, 0xa9,0x30, 0xdd,0x47,0x2a,0x8c,0xf6,0xf7);
@@ -123,5 +162,185 @@ __CRT_UUID_DECL(IMFVideoProcessor, 0x6AB0000C, 0xFECE, 0x4d1f, 0xA2,0xAC, 0xA9,0
#endif
#endif // __IMFVideoProcessor_INTERFACE_DEFINED__
#ifndef __IMFVideoDeviceID_INTERFACE_DEFINED__
#define __IMFVideoDeviceID_INTERFACE_DEFINED__
DEFINE_GUID(IID_IMFVideoDeviceID, 0xA38D9567, 0x5A9C, 0x4f3c, 0xB2,0x93, 0x8E,0xB4,0x15,0xB2,0x79,0xBA);
MIDL_INTERFACE("A38D9567-5A9C-4f3c-B293-8EB415B279BA")
IMFVideoDeviceID : public IUnknown
{
public:
virtual HRESULT STDMETHODCALLTYPE GetDeviceID(IID *pDeviceID) = 0;
};
#ifdef __CRT_UUID_DECL
__CRT_UUID_DECL(IMFVideoDeviceID, 0xA38D9567, 0x5A9C, 0x4f3c, 0xB2,0x93, 0x8E,0xB4,0x15,0xB2,0x79,0xBA)
#endif
#endif // __IMFVideoDeviceID_INTERFACE_DEFINED__
#ifndef __IMFClockStateSink_INTERFACE_DEFINED__
#define __IMFClockStateSink_INTERFACE_DEFINED__
DEFINE_GUID(IID_IMFClockStateSink, 0xF6696E82, 0x74F7, 0x4f3d, 0xA1,0x78, 0x8A,0x5E,0x09,0xC3,0x65,0x9F);
MIDL_INTERFACE("F6696E82-74F7-4f3d-A178-8A5E09C3659F")
IMFClockStateSink : public IUnknown
{
public:
virtual HRESULT STDMETHODCALLTYPE OnClockStart(MFTIME hnsSystemTime, LONGLONG llClockStartOffset) = 0;
virtual HRESULT STDMETHODCALLTYPE OnClockStop(MFTIME hnsSystemTime) = 0;
virtual HRESULT STDMETHODCALLTYPE OnClockPause(MFTIME hnsSystemTime) = 0;
virtual HRESULT STDMETHODCALLTYPE OnClockRestart(MFTIME hnsSystemTime) = 0;
virtual HRESULT STDMETHODCALLTYPE OnClockSetRate(MFTIME hnsSystemTime, float flRate) = 0;
};
#ifdef __CRT_UUID_DECL
__CRT_UUID_DECL(IMFClockStateSink, 0xF6696E82, 0x74F7, 0x4f3d, 0xA1,0x78, 0x8A,0x5E,0x09,0xC3,0x65,0x9F)
#endif
#endif // __IMFClockStateSink_INTERFACE_DEFINED__
#ifndef __IMFVideoPresenter_INTERFACE_DEFINED__
#define __IMFVideoPresenter_INTERFACE_DEFINED__
typedef enum MFVP_MESSAGE_TYPE
{
MFVP_MESSAGE_FLUSH = 0,
MFVP_MESSAGE_INVALIDATEMEDIATYPE = 0x1,
MFVP_MESSAGE_PROCESSINPUTNOTIFY = 0x2,
MFVP_MESSAGE_BEGINSTREAMING = 0x3,
MFVP_MESSAGE_ENDSTREAMING = 0x4,
MFVP_MESSAGE_ENDOFSTREAM = 0x5,
MFVP_MESSAGE_STEP = 0x6,
MFVP_MESSAGE_CANCELSTEP = 0x7
} MFVP_MESSAGE_TYPE;
DEFINE_GUID(IID_IMFVideoPresenter, 0x29AFF080, 0x182A, 0x4a5d, 0xAF,0x3B, 0x44,0x8F,0x3A,0x63,0x46,0xCB);
MIDL_INTERFACE("29AFF080-182A-4a5d-AF3B-448F3A6346CB")
IMFVideoPresenter : public IMFClockStateSink
{
public:
virtual HRESULT STDMETHODCALLTYPE ProcessMessage(MFVP_MESSAGE_TYPE eMessage, ULONG_PTR ulParam) = 0;
virtual HRESULT STDMETHODCALLTYPE GetCurrentMediaType(IMFVideoMediaType **ppMediaType) = 0;
};
#ifdef __CRT_UUID_DECL
__CRT_UUID_DECL(IMFVideoPresenter, 0x29AFF080, 0x182A, 0x4a5d, 0xAF,0x3B, 0x44,0x8F,0x3A,0x63,0x46,0xCB)
#endif
#endif // __IMFVideoPresenter_INTERFACE_DEFINED__
#ifndef __IMFRateSupport_INTERFACE_DEFINED__
#define __IMFRateSupport_INTERFACE_DEFINED__
DEFINE_GUID(IID_IMFRateSupport, 0x0a9ccdbc, 0xd797, 0x4563, 0x96,0x67, 0x94,0xec,0x5d,0x79,0x29,0x2d);
MIDL_INTERFACE("0a9ccdbc-d797-4563-9667-94ec5d79292d")
IMFRateSupport : public IUnknown
{
public:
virtual HRESULT STDMETHODCALLTYPE GetSlowestRate(MFRATE_DIRECTION eDirection, BOOL fThin, float *pflRate) = 0;
virtual HRESULT STDMETHODCALLTYPE GetFastestRate(MFRATE_DIRECTION eDirection, BOOL fThin, float *pflRate) = 0;
virtual HRESULT STDMETHODCALLTYPE IsRateSupported(BOOL fThin, float flRate, float *pflNearestSupportedRate) = 0;
};
#ifdef __CRT_UUID_DECL
__CRT_UUID_DECL(IMFRateSupport, 0x0a9ccdbc, 0xd797, 0x4563, 0x96,0x67, 0x94,0xec,0x5d,0x79,0x29,0x2d)
#endif
#endif // __IMFRateSupport_INTERFACE_DEFINED__
#ifndef __IMFTopologyServiceLookup_INTERFACE_DEFINED__
#define __IMFTopologyServiceLookup_INTERFACE_DEFINED__
typedef enum _MF_SERVICE_LOOKUP_TYPE
{
MF_SERVICE_LOOKUP_UPSTREAM = 0,
MF_SERVICE_LOOKUP_UPSTREAM_DIRECT = (MF_SERVICE_LOOKUP_UPSTREAM + 1),
MF_SERVICE_LOOKUP_DOWNSTREAM = (MF_SERVICE_LOOKUP_UPSTREAM_DIRECT + 1),
MF_SERVICE_LOOKUP_DOWNSTREAM_DIRECT = (MF_SERVICE_LOOKUP_DOWNSTREAM + 1),
MF_SERVICE_LOOKUP_ALL = (MF_SERVICE_LOOKUP_DOWNSTREAM_DIRECT + 1),
MF_SERVICE_LOOKUP_GLOBAL = (MF_SERVICE_LOOKUP_ALL + 1)
} MF_SERVICE_LOOKUP_TYPE;
DEFINE_GUID(IID_IMFTopologyServiceLookup, 0xfa993889, 0x4383, 0x415a, 0xa9,0x30, 0xdd,0x47,0x2a,0x8c,0xf6,0xf7);
MIDL_INTERFACE("fa993889-4383-415a-a930-dd472a8cf6f7")
IMFTopologyServiceLookup : public IUnknown
{
public:
virtual HRESULT STDMETHODCALLTYPE LookupService(MF_SERVICE_LOOKUP_TYPE Type,
DWORD dwIndex,
REFGUID guidService,
REFIID riid,
LPVOID *ppvObjects,
DWORD *pnObjects) = 0;
};
#ifdef __CRT_UUID_DECL
__CRT_UUID_DECL(IMFTopologyServiceLookup, 0xfa993889, 0x4383, 0x415a, 0xa9,0x30, 0xdd,0x47,0x2a,0x8c,0xf6,0xf7)
#endif
#endif // __IMFTopologyServiceLookup_INTERFACE_DEFINED__
#ifndef __IMFTopologyServiceLookupClient_INTERFACE_DEFINED__
#define __IMFTopologyServiceLookupClient_INTERFACE_DEFINED__
DEFINE_GUID(IID_IMFTopologyServiceLookupClient, 0xfa99388a, 0x4383, 0x415a, 0xa9,0x30, 0xdd,0x47,0x2a,0x8c,0xf6,0xf7);
MIDL_INTERFACE("fa99388a-4383-415a-a930-dd472a8cf6f7")
IMFTopologyServiceLookupClient : public IUnknown
{
public:
virtual HRESULT STDMETHODCALLTYPE InitServicePointers(IMFTopologyServiceLookup *pLookup) = 0;
virtual HRESULT STDMETHODCALLTYPE ReleaseServicePointers(void) = 0;
};
#ifdef __CRT_UUID_DECL
__CRT_UUID_DECL(IMFTopologyServiceLookupClient, 0xfa99388a, 0x4383, 0x415a, 0xa9,0x30, 0xdd,0x47,0x2a,0x8c,0xf6,0xf7)
#endif
#endif // __IMFTopologyServiceLookupClient_INTERFACE_DEFINED__
#ifndef __IMediaEventSink_INTERFACE_DEFINED__
#define __IMediaEventSink_INTERFACE_DEFINED__
DEFINE_GUID(IID_IMediaEventSink, 0x56a868a2, 0x0ad4, 0x11ce, 0xb0,0x3a, 0x00,0x20,0xaf,0x0b,0xa7,0x70);
MIDL_INTERFACE("56a868a2-0ad4-11ce-b03a-0020af0ba770")
IMediaEventSink : public IUnknown
{
public:
virtual HRESULT STDMETHODCALLTYPE Notify(long EventCode, LONG_PTR EventParam1, LONG_PTR EventParam2) = 0;
};
#ifdef __CRT_UUID_DECL
__CRT_UUID_DECL(IMediaEventSink, 0x56a868a2, 0x0ad4, 0x11ce, 0xb0,0x3a, 0x00,0x20,0xaf,0x0b,0xa7,0x70)
#endif
#endif // __IMediaEventSink_INTERFACE_DEFINED__
#ifndef __IMFVideoRenderer_INTERFACE_DEFINED__
#define __IMFVideoRenderer_INTERFACE_DEFINED__
DEFINE_GUID(IID_IMFVideoRenderer, 0xDFDFD197, 0xA9CA, 0x43d8, 0xB3,0x41, 0x6A,0xF3,0x50,0x37,0x92,0xCD);
MIDL_INTERFACE("DFDFD197-A9CA-43d8-B341-6AF3503792CD")
IMFVideoRenderer : public IUnknown
{
public:
virtual HRESULT STDMETHODCALLTYPE InitializeRenderer(IMFTransform *pVideoMixer,
IMFVideoPresenter *pVideoPresenter) = 0;
};
#ifdef __CRT_UUID_DECL
__CRT_UUID_DECL(IMFVideoRenderer, 0xDFDFD197, 0xA9CA, 0x43d8, 0xB3,0x41, 0x6A,0xF3,0x50,0x37,0x92,0xCD)
#endif
#endif // __IMFVideoRenderer_INTERFACE_DEFINED__
#ifndef __IMFTrackedSample_INTERFACE_DEFINED__
#define __IMFTrackedSample_INTERFACE_DEFINED__
DEFINE_GUID(IID_IMFTrackedSample, 0x245BF8E9, 0x0755, 0x40f7, 0x88,0xA5, 0xAE,0x0F,0x18,0xD5,0x5E,0x17);
MIDL_INTERFACE("245BF8E9-0755-40f7-88A5-AE0F18D55E17")
IMFTrackedSample : public IUnknown
{
public:
virtual HRESULT STDMETHODCALLTYPE SetAllocator(IMFAsyncCallback *pSampleAllocator, IUnknown *pUnkState) = 0;
};
#ifdef __CRT_UUID_DECL
__CRT_UUID_DECL(IMFTrackedSample, 0x245BF8E9, 0x0755, 0x40f7, 0x88,0xA5, 0xAE,0x0F,0x18,0xD5,0x5E,0x17)
#endif
#endif // __IMFTrackedSample_INTERFACE_DEFINED__
#ifndef __IMFDesiredSample_INTERFACE_DEFINED__
#define __IMFDesiredSample_INTERFACE_DEFINED__
DEFINE_GUID(IID_IMFDesiredSample, 0x56C294D0, 0x753E, 0x4260, 0x8D,0x61, 0xA3,0xD8,0x82,0x0B,0x1D,0x54);
MIDL_INTERFACE("56C294D0-753E-4260-8D61-A3D8820B1D54")
IMFDesiredSample : public IUnknown
{
public:
virtual HRESULT STDMETHODCALLTYPE GetDesiredSampleTimeAndDuration(LONGLONG *phnsSampleTime,
LONGLONG *phnsSampleDuration) = 0;
virtual void STDMETHODCALLTYPE SetDesiredSampleTimeAndDuration(LONGLONG hnsSampleTime,
LONGLONG hnsSampleDuration) = 0;
virtual void STDMETHODCALLTYPE Clear( void) = 0;
};
#ifdef __CRT_UUID_DECL
__CRT_UUID_DECL(IMFDesiredSample, 0x56C294D0, 0x753E, 0x4260, 0x8D,0x61, 0xA3,0xD8,0x82,0x0B,0x1D,0x54)
#endif
#endif
#endif // EVRDEFS_H

View File

@@ -0,0 +1,103 @@
/****************************************************************************
**
** Copyright (C) 2015 The Qt Company Ltd.
** Contact: http://www.qt.io/licensing/
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL21$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and The Qt Company. For licensing terms
** and conditions see http://www.qt.io/terms-conditions. For further
** information use the contact form at http://www.qt.io/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 or version 3 as published by the Free
** Software Foundation and appearing in the file LICENSE.LGPLv21 and
** LICENSE.LGPLv3 included in the packaging of this file. Please review the
** following information to ensure the GNU Lesser General Public License
** requirements will be met: https://www.gnu.org/licenses/lgpl.html and
** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** As a special exception, The Qt Company gives you certain additional
** rights. These rights are described in The Qt Company LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** $QT_END_LICENSE$
**
****************************************************************************/
#include "evrhelpers.h"
HRESULT qt_evr_getFourCC(IMFMediaType *type, DWORD *fourCC)
{
if (!fourCC)
return E_POINTER;
HRESULT hr = S_OK;
GUID guidSubType = GUID_NULL;
if (SUCCEEDED(hr))
hr = type->GetGUID(MF_MT_SUBTYPE, &guidSubType);
if (SUCCEEDED(hr))
*fourCC = guidSubType.Data1;
return hr;
}
bool qt_evr_areMediaTypesEqual(IMFMediaType *type1, IMFMediaType *type2)
{
if (!type1 && !type2)
return true;
else if (!type1 || !type2)
return false;
DWORD dwFlags = 0;
HRESULT hr = type1->IsEqual(type2, &dwFlags);
return (hr == S_OK);
}
HRESULT qt_evr_validateVideoArea(const MFVideoArea& area, UINT32 width, UINT32 height)
{
float fOffsetX = qt_evr_MFOffsetToFloat(area.OffsetX);
float fOffsetY = qt_evr_MFOffsetToFloat(area.OffsetY);
if ( ((LONG)fOffsetX + area.Area.cx > (LONG)width) ||
((LONG)fOffsetY + area.Area.cy > (LONG)height) )
return MF_E_INVALIDMEDIATYPE;
else
return S_OK;
}
bool qt_evr_isSampleTimePassed(IMFClock *clock, IMFSample *sample)
{
if (!sample || !clock)
return false;
HRESULT hr = S_OK;
MFTIME hnsTimeNow = 0;
MFTIME hnsSystemTime = 0;
MFTIME hnsSampleStart = 0;
MFTIME hnsSampleDuration = 0;
hr = clock->GetCorrelatedTime(0, &hnsTimeNow, &hnsSystemTime);
if (SUCCEEDED(hr))
hr = sample->GetSampleTime(&hnsSampleStart);
if (SUCCEEDED(hr))
hr = sample->GetSampleDuration(&hnsSampleDuration);
if (SUCCEEDED(hr)) {
if (hnsSampleStart + hnsSampleDuration < hnsTimeNow)
return true;
}
return false;
}

View File

@@ -0,0 +1,85 @@
/****************************************************************************
**
** Copyright (C) 2015 The Qt Company Ltd.
** Contact: http://www.qt.io/licensing/
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL21$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and The Qt Company. For licensing terms
** and conditions see http://www.qt.io/terms-conditions. For further
** information use the contact form at http://www.qt.io/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 or version 3 as published by the Free
** Software Foundation and appearing in the file LICENSE.LGPLv21 and
** LICENSE.LGPLv3 included in the packaging of this file. Please review the
** following information to ensure the GNU Lesser General Public License
** requirements will be met: https://www.gnu.org/licenses/lgpl.html and
** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** As a special exception, The Qt Company gives you certain additional
** rights. These rights are described in The Qt Company LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** $QT_END_LICENSE$
**
****************************************************************************/
#ifndef EVRHELPERS_H
#define EVRHELPERS_H
#include "evrdefs.h"
template<class T>
static inline void qt_evr_safe_release(T **unk)
{
if (*unk) {
(*unk)->Release();
*unk = NULL;
}
}
HRESULT qt_evr_getFourCC(IMFMediaType *type, DWORD *fourCC);
bool qt_evr_areMediaTypesEqual(IMFMediaType *type1, IMFMediaType *type2);
HRESULT qt_evr_validateVideoArea(const MFVideoArea& area, UINT32 width, UINT32 height);
bool qt_evr_isSampleTimePassed(IMFClock *clock, IMFSample *sample);
inline float qt_evr_MFOffsetToFloat(const MFOffset& offset)
{
return offset.value + (float(offset.fract) / 65536);
}
inline MFOffset qt_evr_makeMFOffset(float v)
{
MFOffset offset;
offset.value = short(v);
offset.fract = WORD(65536 * (v-offset.value));
return offset;
}
inline MFVideoArea qt_evr_makeMFArea(float x, float y, DWORD width, DWORD height)
{
MFVideoArea area;
area.OffsetX = qt_evr_makeMFOffset(x);
area.OffsetY = qt_evr_makeMFOffset(y);
area.Area.cx = width;
area.Area.cy = height;
return area;
}
inline HRESULT qt_evr_getFrameRate(IMFMediaType *pType, MFRatio *pRatio)
{
return MFGetAttributeRatio(pType, MF_MT_FRAME_RATE, (UINT32*)&pRatio->Numerator, (UINT32*)&pRatio->Denominator);
}
#endif // EVRHELPERS_H

View File

@@ -65,8 +65,6 @@ bool EvrVideoWindowControl::setEvr(IUnknown *evr)
if (!evr)
return true;
static const GUID mr_VIDEO_RENDER_SERVICE = { 0x1092a86c, 0xab1a, 0x459a, {0xa3, 0x36, 0x83, 0x1f, 0xbc, 0x4d, 0x11, 0xff} };
static const GUID mr_VIDEO_MIXER_SERVICE = { 0x73cd2fc, 0x6cf4, 0x40b7, {0x88, 0x59, 0xe8, 0x95, 0x52, 0xc8, 0x41, 0xf8} };
IMFGetService *service = NULL;
if (SUCCEEDED(evr->QueryInterface(IID_PPV_ARGS(&service)))