Add NV12/NV21 support into SG videonode.

Add new fragment shaders and update declarative render (video node) to support NV12/NV21
pixel format.

Task-number: QTBUG-45021
Change-Id: I5d52007f0da56165752268d06efca156f7496b42
Reviewed-by: Yoann Lopes <yoann.lopes@theqtcompany.com>
This commit is contained in:
Timur Pocheptsov
2015-03-18 11:36:50 +01:00
parent 09afe9377d
commit 71fc289373
4 changed files with 178 additions and 63 deletions

View File

@@ -36,7 +36,7 @@
#define QDECLARATIVEVIDEOOUTPUT_RENDER_P_H #define QDECLARATIVEVIDEOOUTPUT_RENDER_P_H
#include "qdeclarativevideooutput_backend_p.h" #include "qdeclarativevideooutput_backend_p.h"
#include "qsgvideonode_i420.h" #include "qsgvideonode_yuv.h"
#include "qsgvideonode_rgb.h" #include "qsgvideonode_rgb.h"
#include "qsgvideonode_texture.h" #include "qsgvideonode_texture.h"
@@ -86,7 +86,7 @@ private:
QOpenGLContext *m_glContext; QOpenGLContext *m_glContext;
QVideoFrame m_frame; QVideoFrame m_frame;
bool m_frameChanged; bool m_frameChanged;
QSGVideoNodeFactory_I420 m_i420Factory; QSGVideoNodeFactory_YUV m_i420Factory;
QSGVideoNodeFactory_RGB m_rgbFactory; QSGVideoNodeFactory_RGB m_rgbFactory;
QSGVideoNodeFactory_Texture m_textureFactory; QSGVideoNodeFactory_Texture m_textureFactory;
QMutex m_frameMutex; QMutex m_frameMutex;

View File

@@ -30,7 +30,7 @@
** $QT_END_LICENSE$ ** $QT_END_LICENSE$
** **
****************************************************************************/ ****************************************************************************/
#include "qsgvideonode_i420.h" #include "qsgvideonode_yuv.h"
#include <QtCore/qmutex.h> #include <QtCore/qmutex.h>
#include <QtQuick/qsgtexturematerial.h> #include <QtQuick/qsgtexturematerial.h>
#include <QtQuick/qsgmaterial.h> #include <QtQuick/qsgmaterial.h>
@@ -40,21 +40,23 @@
QT_BEGIN_NAMESPACE QT_BEGIN_NAMESPACE
QList<QVideoFrame::PixelFormat> QSGVideoNodeFactory_I420::supportedPixelFormats( QList<QVideoFrame::PixelFormat> QSGVideoNodeFactory_YUV::supportedPixelFormats(
QAbstractVideoBuffer::HandleType handleType) const QAbstractVideoBuffer::HandleType handleType) const
{ {
QList<QVideoFrame::PixelFormat> formats; QList<QVideoFrame::PixelFormat> formats;
if (handleType == QAbstractVideoBuffer::NoHandle) if (handleType == QAbstractVideoBuffer::NoHandle) {
formats << QVideoFrame::Format_YUV420P << QVideoFrame::Format_YV12; formats << QVideoFrame::Format_YUV420P << QVideoFrame::Format_YV12
<< QVideoFrame::Format_NV12 << QVideoFrame::Format_NV21;
}
return formats; return formats;
} }
QSGVideoNode *QSGVideoNodeFactory_I420::createNode(const QVideoSurfaceFormat &format) QSGVideoNode *QSGVideoNodeFactory_YUV::createNode(const QVideoSurfaceFormat &format)
{ {
if (supportedPixelFormats(format.handleType()).contains(format.pixelFormat())) if (supportedPixelFormats(format.handleType()).contains(format.pixelFormat()))
return new QSGVideoNode_I420(format); return new QSGVideoNode_YUV(format);
return 0; return 0;
} }
@@ -136,12 +138,85 @@ protected:
int m_id_opacity; int m_id_opacity;
}; };
class QSGVideoMaterialShader_NV_12_21 : public QSGVideoMaterialShader_YUV420
class QSGVideoMaterial_YUV420 : public QSGMaterial
{ {
public: public:
QSGVideoMaterial_YUV420(const QVideoSurfaceFormat &format); QSGVideoMaterialShader_NV_12_21(bool isNV21) : m_isNV21(isNV21) {
~QSGVideoMaterial_YUV420(); }
virtual void updateState(const RenderState &state, QSGMaterial *newMaterial, QSGMaterial *oldMaterial);
protected:
virtual const char *vertexShader() const {
const char *shader =
"uniform highp mat4 qt_Matrix; \n"
"uniform highp float yWidth; \n"
"attribute highp vec4 qt_VertexPosition; \n"
"attribute highp vec2 qt_VertexTexCoord; \n"
"varying highp vec2 yTexCoord; \n"
"void main() { \n"
" yTexCoord = qt_VertexTexCoord * vec2(yWidth, 1);\n"
" gl_Position = qt_Matrix * qt_VertexPosition; \n"
"}";
return shader;
}
virtual const char *fragmentShader() const {
static const char *shaderNV12 =
"uniform sampler2D yTexture; \n"
"uniform sampler2D uvTexture; \n"
"uniform mediump mat4 colorMatrix; \n"
"uniform lowp float opacity; \n"
"varying highp vec2 yTexCoord; \n"
"void main() \n"
"{ \n"
" mediump float Y = texture2D(yTexture, yTexCoord).r; \n"
" mediump vec2 UV = texture2D(uvTexture, yTexCoord).ra; \n"
" mediump vec4 color = vec4(Y, UV.x, UV.y, 1.); \n"
" gl_FragColor = colorMatrix * color * opacity; \n"
"}";
static const char *shaderNV21 =
"uniform sampler2D yTexture; \n"
"uniform sampler2D uvTexture; \n"
"uniform mediump mat4 colorMatrix; \n"
"uniform lowp float opacity; \n"
"varying highp vec2 yTexCoord; \n"
"void main() \n"
"{ \n"
" mediump float Y = texture2D(yTexture, yTexCoord).r; \n"
" mediump vec2 UV = texture2D(uvTexture, yTexCoord).ar; \n"
" mediump vec4 color = vec4(Y, UV.x, UV.y, 1.); \n"
" gl_FragColor = colorMatrix * color * opacity; \n"
"}";
return m_isNV21 ? shaderNV21 : shaderNV12;
}
virtual void initialize() {
m_id_yTexture = program()->uniformLocation("yTexture");
m_id_uTexture = program()->uniformLocation("uvTexture");
m_id_matrix = program()->uniformLocation("qt_Matrix");
m_id_yWidth = program()->uniformLocation("yWidth");
m_id_colorMatrix = program()->uniformLocation("colorMatrix");
m_id_opacity = program()->uniformLocation("opacity");
}
private:
bool m_isNV21;
};
class QSGVideoMaterial_YUV : public QSGMaterial
{
public:
QSGVideoMaterial_YUV(const QVideoSurfaceFormat &format);
~QSGVideoMaterial_YUV();
bool isNV12_21() const {
const QVideoFrame::PixelFormat pf = m_format.pixelFormat();
return pf == QVideoFrame::Format_NV12 || pf == QVideoFrame::Format_NV21;
}
virtual QSGMaterialType *type() const { virtual QSGMaterialType *type() const {
static QSGMaterialType theType; static QSGMaterialType theType;
@@ -149,18 +224,25 @@ public:
} }
virtual QSGMaterialShader *createShader() const { virtual QSGMaterialShader *createShader() const {
const QVideoFrame::PixelFormat pf = m_format.pixelFormat();
if (isNV12_21())
return new QSGVideoMaterialShader_NV_12_21(pf == QVideoFrame::Format_NV21);
return new QSGVideoMaterialShader_YUV420; return new QSGVideoMaterialShader_YUV420;
} }
virtual int compare(const QSGMaterial *other) const { virtual int compare(const QSGMaterial *other) const {
const QSGVideoMaterial_YUV420 *m = static_cast<const QSGVideoMaterial_YUV420 *>(other); const QSGVideoMaterial_YUV *m = static_cast<const QSGVideoMaterial_YUV *>(other);
int d = m_textureIds[0] - m->m_textureIds[0]; int d = m_textureIds[0] - m->m_textureIds[0];
if (d) if (d)
return d; return d;
else if ((d = m_textureIds[1] - m->m_textureIds[1]) != 0)
d = m_textureIds[1] - m->m_textureIds[1];
if (m_textureIds.size() == 2 || d != 0)
return d; return d;
else
return m_textureIds[2] - m->m_textureIds[2]; return m_textureIds[2] - m->m_textureIds[2];
} }
void updateBlending() { void updateBlending() {
@@ -173,13 +255,12 @@ public:
} }
void bind(); void bind();
void bindTexture(int id, int w, int h, const uchar *bits); void bindTexture(int id, int w, int h, const uchar *bits, GLenum format);
QVideoSurfaceFormat m_format; QVideoSurfaceFormat m_format;
QSize m_textureSize; QSize m_textureSize;
static const uint Num_Texture_IDs = 3; QVector<GLuint> m_textureIds;
GLuint m_textureIds[Num_Texture_IDs];
qreal m_opacity; qreal m_opacity;
GLfloat m_yWidth; GLfloat m_yWidth;
@@ -190,13 +271,13 @@ public:
QMutex m_frameMutex; QMutex m_frameMutex;
}; };
QSGVideoMaterial_YUV420::QSGVideoMaterial_YUV420(const QVideoSurfaceFormat &format) : QSGVideoMaterial_YUV::QSGVideoMaterial_YUV(const QVideoSurfaceFormat &format) :
m_format(format), m_format(format),
m_opacity(1.0), m_opacity(1.0),
m_yWidth(1.0), m_yWidth(1.0),
m_uvWidth(1.0) m_uvWidth(1.0)
{ {
memset(m_textureIds, 0, sizeof(m_textureIds)); m_textureIds.resize(isNV12_21() ? 2 : 3);
switch (format.yCbCrColorSpace()) { switch (format.yCbCrColorSpace()) {
case QVideoSurfaceFormat::YCbCr_JPEG: case QVideoSurfaceFormat::YCbCr_JPEG:
@@ -225,20 +306,19 @@ QSGVideoMaterial_YUV420::QSGVideoMaterial_YUV420(const QVideoSurfaceFormat &form
setFlag(Blending, false); setFlag(Blending, false);
} }
QSGVideoMaterial_YUV420::~QSGVideoMaterial_YUV420() QSGVideoMaterial_YUV::~QSGVideoMaterial_YUV()
{ {
if (!m_textureSize.isEmpty()) { if (!m_textureSize.isEmpty()) {
if (QOpenGLContext *current = QOpenGLContext::currentContext()) if (QOpenGLContext *current = QOpenGLContext::currentContext())
current->functions()->glDeleteTextures(Num_Texture_IDs, m_textureIds); current->functions()->glDeleteTextures(m_textureIds.size(), &m_textureIds[0]);
else else
qWarning() << "QSGVideoMaterial_YUV420: Cannot obtain GL context, unable to delete textures"; qWarning() << "QSGVideoMaterial_YUV: Cannot obtain GL context, unable to delete textures";
} }
} }
void QSGVideoMaterial_YUV420::bind() void QSGVideoMaterial_YUV::bind()
{ {
QOpenGLFunctions *functions = QOpenGLContext::currentContext()->functions(); QOpenGLFunctions *functions = QOpenGLContext::currentContext()->functions();
QMutexLocker lock(&m_frameMutex); QMutexLocker lock(&m_frameMutex);
if (m_frame.isValid()) { if (m_frame.isValid()) {
if (m_frame.map(QAbstractVideoBuffer::ReadOnly)) { if (m_frame.map(QAbstractVideoBuffer::ReadOnly)) {
@@ -248,31 +328,43 @@ void QSGVideoMaterial_YUV420::bind()
// Frame has changed size, recreate textures... // Frame has changed size, recreate textures...
if (m_textureSize != m_frame.size()) { if (m_textureSize != m_frame.size()) {
if (!m_textureSize.isEmpty()) if (!m_textureSize.isEmpty())
functions->glDeleteTextures(Num_Texture_IDs, m_textureIds); functions->glDeleteTextures(m_textureIds.size(), &m_textureIds[0]);
functions->glGenTextures(Num_Texture_IDs, m_textureIds); functions->glGenTextures(m_textureIds.size(), &m_textureIds[0]);
m_textureSize = m_frame.size(); m_textureSize = m_frame.size();
} }
const int y = 0;
const int u = m_frame.pixelFormat() == QVideoFrame::Format_YUV420P ? 1 : 2;
const int v = m_frame.pixelFormat() == QVideoFrame::Format_YUV420P ? 2 : 1;
m_yWidth = qreal(fw) / m_frame.bytesPerLine(y);
m_uvWidth = qreal(fw) / (2 * m_frame.bytesPerLine(u));
GLint previousAlignment; GLint previousAlignment;
functions->glGetIntegerv(GL_UNPACK_ALIGNMENT, &previousAlignment); functions->glGetIntegerv(GL_UNPACK_ALIGNMENT, &previousAlignment);
functions->glPixelStorei(GL_UNPACK_ALIGNMENT, 1); functions->glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
functions->glActiveTexture(GL_TEXTURE1); if (isNV12_21()) {
bindTexture(m_textureIds[1], m_frame.bytesPerLine(u), fh / 2, m_frame.bits(u)); const int y = 0;
functions->glActiveTexture(GL_TEXTURE2); const int uv = 1;
bindTexture(m_textureIds[2], m_frame.bytesPerLine(v), fh / 2, m_frame.bits(v));
functions->glActiveTexture(GL_TEXTURE0); // Finish with 0 as default texture unit m_yWidth = qreal(fw) / m_frame.bytesPerLine(y);
bindTexture(m_textureIds[0], m_frame.bytesPerLine(y), fh, m_frame.bits(y)); m_uvWidth = m_yWidth;
functions->glActiveTexture(GL_TEXTURE1);
bindTexture(m_textureIds[1], m_frame.bytesPerLine(uv) / 2, fh / 2, m_frame.bits(uv), GL_LUMINANCE_ALPHA);
functions->glActiveTexture(GL_TEXTURE0); // Finish with 0 as default texture unit
bindTexture(m_textureIds[0], m_frame.bytesPerLine(y), fh, m_frame.bits(y), GL_LUMINANCE);
} else {
const int y = 0;
const int u = m_frame.pixelFormat() == QVideoFrame::Format_YUV420P ? 1 : 2;
const int v = m_frame.pixelFormat() == QVideoFrame::Format_YUV420P ? 2 : 1;
m_yWidth = qreal(fw) / m_frame.bytesPerLine(y);
m_uvWidth = qreal(fw) / (2 * m_frame.bytesPerLine(u));
functions->glActiveTexture(GL_TEXTURE1);
bindTexture(m_textureIds[1], m_frame.bytesPerLine(u), fh / 2, m_frame.bits(u), GL_LUMINANCE);
functions->glActiveTexture(GL_TEXTURE2);
bindTexture(m_textureIds[2], m_frame.bytesPerLine(v), fh / 2, m_frame.bits(v), GL_LUMINANCE);
functions->glActiveTexture(GL_TEXTURE0); // Finish with 0 as default texture unit
bindTexture(m_textureIds[0], m_frame.bytesPerLine(y), fh, m_frame.bits(y), GL_LUMINANCE);
}
functions->glPixelStorei(GL_UNPACK_ALIGNMENT, previousAlignment); functions->glPixelStorei(GL_UNPACK_ALIGNMENT, previousAlignment);
m_frame.unmap(); m_frame.unmap();
} }
@@ -280,51 +372,52 @@ void QSGVideoMaterial_YUV420::bind()
} else { } else {
functions->glActiveTexture(GL_TEXTURE1); functions->glActiveTexture(GL_TEXTURE1);
functions->glBindTexture(GL_TEXTURE_2D, m_textureIds[1]); functions->glBindTexture(GL_TEXTURE_2D, m_textureIds[1]);
functions->glActiveTexture(GL_TEXTURE2); if (!isNV12_21()) {
functions->glBindTexture(GL_TEXTURE_2D, m_textureIds[2]); functions->glActiveTexture(GL_TEXTURE2);
functions->glBindTexture(GL_TEXTURE_2D, m_textureIds[2]);
}
functions->glActiveTexture(GL_TEXTURE0); // Finish with 0 as default texture unit functions->glActiveTexture(GL_TEXTURE0); // Finish with 0 as default texture unit
functions->glBindTexture(GL_TEXTURE_2D, m_textureIds[0]); functions->glBindTexture(GL_TEXTURE_2D, m_textureIds[0]);
} }
} }
void QSGVideoMaterial_YUV420::bindTexture(int id, int w, int h, const uchar *bits) void QSGVideoMaterial_YUV::bindTexture(int id, int w, int h, const uchar *bits, GLenum format)
{ {
QOpenGLFunctions *functions = QOpenGLContext::currentContext()->functions(); QOpenGLFunctions *functions = QOpenGLContext::currentContext()->functions();
functions->glBindTexture(GL_TEXTURE_2D, id); functions->glBindTexture(GL_TEXTURE_2D, id);
functions->glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, w, h, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, bits); functions->glTexImage2D(GL_TEXTURE_2D, 0, format, w, h, 0, format, GL_UNSIGNED_BYTE, bits);
functions->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); functions->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
functions->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); functions->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
functions->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); functions->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
functions->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); functions->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
} }
QSGVideoNode_I420::QSGVideoNode_I420(const QVideoSurfaceFormat &format) : QSGVideoNode_YUV::QSGVideoNode_YUV(const QVideoSurfaceFormat &format) :
m_format(format) m_format(format)
{ {
setFlag(QSGNode::OwnsMaterial); setFlag(QSGNode::OwnsMaterial);
m_material = new QSGVideoMaterial_YUV420(format); m_material = new QSGVideoMaterial_YUV(format);
setMaterial(m_material); setMaterial(m_material);
} }
QSGVideoNode_I420::~QSGVideoNode_I420() QSGVideoNode_YUV::~QSGVideoNode_YUV()
{ {
} }
void QSGVideoNode_I420::setCurrentFrame(const QVideoFrame &frame, FrameFlags) void QSGVideoNode_YUV::setCurrentFrame(const QVideoFrame &frame, FrameFlags)
{ {
m_material->setCurrentFrame(frame); m_material->setCurrentFrame(frame);
markDirty(DirtyMaterial); markDirty(DirtyMaterial);
} }
void QSGVideoMaterialShader_YUV420::updateState(const RenderState &state, void QSGVideoMaterialShader_YUV420::updateState(const RenderState &state,
QSGMaterial *newMaterial, QSGMaterial *newMaterial,
QSGMaterial *oldMaterial) QSGMaterial *oldMaterial)
{ {
Q_UNUSED(oldMaterial); Q_UNUSED(oldMaterial);
QSGVideoMaterial_YUV420 *mat = static_cast<QSGVideoMaterial_YUV420 *>(newMaterial); QSGVideoMaterial_YUV *mat = static_cast<QSGVideoMaterial_YUV *>(newMaterial);
program()->setUniformValue(m_id_yTexture, 0); program()->setUniformValue(m_id_yTexture, 0);
program()->setUniformValue(m_id_uTexture, 1); program()->setUniformValue(m_id_uTexture, 1);
program()->setUniformValue(m_id_vTexture, 2); program()->setUniformValue(m_id_vTexture, 2);
@@ -342,4 +435,26 @@ void QSGVideoMaterialShader_YUV420::updateState(const RenderState &state,
program()->setUniformValue(m_id_matrix, state.combinedMatrix()); program()->setUniformValue(m_id_matrix, state.combinedMatrix());
} }
void QSGVideoMaterialShader_NV_12_21::updateState(const RenderState &state,
QSGMaterial *newMaterial,
QSGMaterial *oldMaterial)
{
Q_UNUSED(oldMaterial);
QSGVideoMaterial_YUV *mat = static_cast<QSGVideoMaterial_YUV *>(newMaterial);
program()->setUniformValue(m_id_yTexture, 0);
program()->setUniformValue(m_id_uTexture, 1);
mat->bind();
program()->setUniformValue(m_id_colorMatrix, mat->m_colorMatrix);
program()->setUniformValue(m_id_yWidth, mat->m_yWidth);
if (state.isOpacityDirty()) {
mat->m_opacity = state.opacity();
program()->setUniformValue(m_id_opacity, GLfloat(mat->m_opacity));
}
if (state.isMatrixDirty())
program()->setUniformValue(m_id_matrix, state.combinedMatrix());
}
QT_END_NAMESPACE QT_END_NAMESPACE

View File

@@ -31,20 +31,20 @@
** **
****************************************************************************/ ****************************************************************************/
#ifndef QSGVIDEONODE_I420_H #ifndef QSGVIDEONODE_YUV_H
#define QSGVIDEONODE_I420_H #define QSGVIDEONODE_YUV_H
#include <private/qsgvideonode_p.h> #include <private/qsgvideonode_p.h>
#include <QtMultimedia/qvideosurfaceformat.h> #include <QtMultimedia/qvideosurfaceformat.h>
QT_BEGIN_NAMESPACE QT_BEGIN_NAMESPACE
class QSGVideoMaterial_YUV420; class QSGVideoMaterial_YUV;
class QSGVideoNode_I420 : public QSGVideoNode class QSGVideoNode_YUV : public QSGVideoNode
{ {
public: public:
QSGVideoNode_I420(const QVideoSurfaceFormat &format); QSGVideoNode_YUV(const QVideoSurfaceFormat &format);
~QSGVideoNode_I420(); ~QSGVideoNode_YUV();
virtual QVideoFrame::PixelFormat pixelFormat() const { virtual QVideoFrame::PixelFormat pixelFormat() const {
return m_format.pixelFormat(); return m_format.pixelFormat();
@@ -58,10 +58,10 @@ private:
void bindTexture(int id, int unit, int w, int h, const uchar *bits); void bindTexture(int id, int unit, int w, int h, const uchar *bits);
QVideoSurfaceFormat m_format; QVideoSurfaceFormat m_format;
QSGVideoMaterial_YUV420 *m_material; QSGVideoMaterial_YUV *m_material;
}; };
class QSGVideoNodeFactory_I420 : public QSGVideoNodeFactoryInterface { class QSGVideoNodeFactory_YUV : public QSGVideoNodeFactoryInterface {
public: public:
QList<QVideoFrame::PixelFormat> supportedPixelFormats(QAbstractVideoBuffer::HandleType handleType) const; QList<QVideoFrame::PixelFormat> supportedPixelFormats(QAbstractVideoBuffer::HandleType handleType) const;
QSGVideoNode *createNode(const QVideoSurfaceFormat &format); QSGVideoNode *createNode(const QVideoSurfaceFormat &format);
@@ -69,4 +69,4 @@ public:
QT_END_NAMESPACE QT_END_NAMESPACE
#endif // QSGVIDEONODE_I420_H #endif // QSGVIDEONODE_YUV_H

View File

@@ -21,7 +21,7 @@ SOURCES += \
qdeclarativevideooutput.cpp \ qdeclarativevideooutput.cpp \
qdeclarativevideooutput_render.cpp \ qdeclarativevideooutput_render.cpp \
qdeclarativevideooutput_window.cpp \ qdeclarativevideooutput_window.cpp \
qsgvideonode_i420.cpp \ qsgvideonode_yuv.cpp \
qsgvideonode_rgb.cpp \ qsgvideonode_rgb.cpp \
qsgvideonode_texture.cpp qsgvideonode_texture.cpp
@@ -29,6 +29,6 @@ HEADERS += \
$$PRIVATE_HEADERS \ $$PRIVATE_HEADERS \
qdeclarativevideooutput_render_p.h \ qdeclarativevideooutput_render_p.h \
qdeclarativevideooutput_window_p.h \ qdeclarativevideooutput_window_p.h \
qsgvideonode_i420.h \ qsgvideonode_yuv.h \
qsgvideonode_rgb.h \ qsgvideonode_rgb.h \
qsgvideonode_texture.h qsgvideonode_texture.h