Add video filtering support to VideoOutput

Add the QAbstractVideoFilter base class and integrate it with VideoOutput.

This can be used to perform arbitrary filtering or image processing
on the frames of a video stream of a VideoOutput element right before
the OpenGL texture is provided to the scenegraph by the video node.

This opens up the possibility to integrate computer vision
frameworks or accelerated image processing with Qt Quick applications
that display video streams using Qt Multimedia.

Conceptually it is somewhat similar to QVideoProbe, this
approach however allows modifying the frame, in real time
with tight integration to the scenegraph node, and targets
Qt Quick meaning setting up the filter and processing the results
of the computations happen completely in QML.

[ChangeLog] Added QAbstractVideoFilter that serves as a base class for QML
video filtering elements that integrate compute, vision, and image processing
frameworks with VideoOutput.

Change-Id: Ice1483f8c2daec5a43536978627a7bbb64549480
Reviewed-by: Yoann Lopes <yoann.lopes@theqtcompany.com>
This commit is contained in:
Laszlo Agocs
2015-01-08 14:32:41 +01:00
committed by Yoann Lopes
parent 2f49444638
commit 3e94b7ce2d
33 changed files with 1539 additions and 68 deletions

View File

@@ -0,0 +1,18 @@
This example performs some simple OpenCL operations on camera or video input which
is assumed to be provided in RGB format. The OpenCL operation is done on an
OpenGL texture using CL-GL interop, without any further readbacks or copies
(except for the initial texture upload, when necessary).
Currently only OS X and Windows with desktop OpenGL (opengl32.dll) are supported.
On Windows you may need to edit testplugin.pro to specify the location of the OpenCL
headers and libraries.
Note that an OpenCL implementation with GPU support is required.
The platform and device selection logic supports NVIDIA and Intel.
Porting to other platforms is probably simple, see clCreateContextFromType.
Note however that YUV formats, that are commonly used also for camera input
on some platforms, are not supported in this example.
Pass the name of a video file to perform video playback or launch without
arguments to use the camera.

View File

@@ -0,0 +1,481 @@
/****************************************************************************
**
** Copyright (C) 2015 Digia Plc and/or its subsidiary(-ies).
** Contact: http://www.qt-project.org/legal
**
** This file is part of the examples of the Qt Multimedia module.
**
** $QT_BEGIN_LICENSE:LGPL21$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and Digia. For licensing terms and
** conditions see http://qt.digia.com/licensing. For further information
** use the contact form at http://qt.digia.com/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 or version 3 as published by the Free
** Software Foundation and appearing in the file LICENSE.LGPLv21 and
** LICENSE.LGPLv3 included in the packaging of this file. Please review the
** following information to ensure the GNU Lesser General Public License
** requirements will be met: https://www.gnu.org/licenses/lgpl.html and
** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Digia gives you certain additional
** rights. These rights are described in the Digia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** $QT_END_LICENSE$
**
****************************************************************************/
#include <QGuiApplication>
#include <QQuickView>
#include <QOpenGLContext>
#include <QOpenGLFunctions>
#include <QAbstractVideoFilter>
#include <QQmlContext>
#include <QFileInfo>
#ifdef Q_OS_OSX
#include <OpenCL/opencl.h>
#include <OpenGL/OpenGL.h>
#else
#include <CL/opencl.h>
#endif
#include "rgbframehelper.h"
static const char *openclSrc =
"__constant sampler_t sampler = CLK_NORMALIZED_COORDS_FALSE | CLK_ADDRESS_CLAMP_TO_EDGE | CLK_FILTER_NEAREST;\n"
"__kernel void Emboss(__read_only image2d_t imgIn, __write_only image2d_t imgOut, float factor) {\n"
" const int2 pos = { get_global_id(0), get_global_id(1) };\n"
" float4 diff = read_imagef(imgIn, sampler, pos + (int2)(1,1)) - read_imagef(imgIn, sampler, pos - (int2)(1,1));\n"
" float color = (diff.x + diff.y + diff.z) / factor + 0.5f;\n"
" write_imagef(imgOut, pos, (float4)(color, color, color, 1.0f));\n"
"}\n";
class CLFilter : public QAbstractVideoFilter
{
Q_OBJECT
Q_PROPERTY(qreal factor READ factor WRITE setFactor NOTIFY factorChanged)
public:
CLFilter() : m_factor(1) { }
qreal factor() const { return m_factor; }
void setFactor(qreal v);
QVideoFilterRunnable *createFilterRunnable() Q_DECL_OVERRIDE;
signals:
void factorChanged();
private:
qreal m_factor;
};
class CLFilterRunnable : public QVideoFilterRunnable
{
public:
CLFilterRunnable(CLFilter *filter);
~CLFilterRunnable();
QVideoFrame run(QVideoFrame *input, const QVideoSurfaceFormat &surfaceFormat, RunFlags flags) Q_DECL_OVERRIDE;
private:
void releaseTextures();
uint newTexture();
CLFilter *m_filter;
QSize m_size;
uint m_tempTexture;
uint m_outTexture;
uint m_lastInputTexture;
cl_context m_clContext;
cl_device_id m_clDeviceId;
cl_mem m_clImage[2];
cl_command_queue m_clQueue;
cl_program m_clProgram;
cl_kernel m_clKernel;
};
QVideoFilterRunnable *CLFilter::createFilterRunnable()
{
return new CLFilterRunnable(this);
}
CLFilterRunnable::CLFilterRunnable(CLFilter *filter) :
m_filter(filter),
m_tempTexture(0),
m_outTexture(0),
m_lastInputTexture(0),
m_clContext(0),
m_clQueue(0),
m_clProgram(0),
m_clKernel(0)
{
m_clImage[0] = m_clImage[1] = 0;
// Set up OpenCL.
QOpenGLFunctions *f = QOpenGLContext::currentContext()->functions();
cl_int err;
cl_uint n;
if (clGetPlatformIDs(0, 0, &n) != CL_SUCCESS) {
qWarning("Failed to get platform ID count");
return;
}
if (n == 0) {
qWarning("No OpenCL platform found");
return;
}
QVector<cl_platform_id> platformIds;
platformIds.resize(n);
if (clGetPlatformIDs(n, platformIds.data(), 0) != CL_SUCCESS) {
qWarning("Failed to get platform IDs");
return;
}
cl_platform_id platform = platformIds[0];
const char *vendor = (const char *) f->glGetString(GL_VENDOR);
qDebug("GL_VENDOR: %s", vendor);
const bool isNV = vendor && strstr(vendor, "NVIDIA");
const bool isIntel = vendor && strstr(vendor, "Intel");
qDebug("Found %u OpenCL platforms:", n);
for (cl_uint i = 0; i < n; ++i) {
QByteArray name;
name.resize(1024);
clGetPlatformInfo(platformIds[i], CL_PLATFORM_NAME, name.size(), name.data(), 0);
qDebug("Platform %p: %s", platformIds[i], name.constData());
// Running with an OpenCL platform without GPU support is not going
// to cut it. In practice we want the platform for the GPU which we
// are using with OpenGL.
if (isNV && name.contains(QByteArrayLiteral("NVIDIA")))
platform = platformIds[i];
else if (isIntel && name.contains(QByteArrayLiteral("Intel")))
platform = platformIds[i];
}
qDebug("Using platform %p", platform);
// Set up the context with OpenCL/OpenGL interop.
#if defined (Q_OS_OSX)
cl_context_properties contextProps[] = { CL_CONTEXT_PROPERTY_USE_CGL_SHAREGROUP_APPLE,
(cl_context_properties) CGLGetShareGroup(CGLGetCurrentContext()),
0 };
#elif defined(Q_OS_WIN)
cl_context_properties contextProps[] = { CL_CONTEXT_PLATFORM, (cl_context_properties) platform,
CL_GL_CONTEXT_KHR, (cl_context_properties) wglGetCurrentContext(),
CL_WGL_HDC_KHR, (cl_context_properties) wglGetCurrentDC(),
0 };
#endif
m_clContext = clCreateContextFromType(contextProps, CL_DEVICE_TYPE_GPU, 0, 0, &err);
if (!m_clContext) {
qWarning("Failed to create OpenCL context: %d", err);
return;
}
// Get the GPU device id
#if defined(Q_OS_OSX)
// On OS X, get the "online" device/GPU. This is required for OpenCL/OpenGL context sharing.
if (clGetGLContextInfoAPPLE(m_clContext, CGLGetCurrentContext(),
CL_CGL_DEVICE_FOR_CURRENT_VIRTUAL_SCREEN_APPLE,
sizeof(cl_device_id), &m_clDeviceId, NULL) != CL_SUCCESS) {
qWarning("Failed to get OpenCL device for current screen: %d", err);
return;
}
#else
if (clGetDeviceIDs(platform, CL_DEVICE_TYPE_GPU, 1, &m_clDeviceId, 0) != CL_SUCCESS) {
qWarning("Failed to get OpenCL device");
return;
}
#endif
m_clQueue = clCreateCommandQueue(m_clContext, m_clDeviceId, 0, &err);
if (!m_clQueue) {
qWarning("Failed to create OpenCL command queue: %d", err);
return;
}
// Build the program.
m_clProgram = clCreateProgramWithSource(m_clContext, 1, &openclSrc, 0, &err);
if (!m_clProgram) {
qWarning("Failed to create OpenCL program: %d", err);
return;
}
if (clBuildProgram(m_clProgram, 1, &m_clDeviceId, 0, 0, 0) != CL_SUCCESS) {
qWarning("Failed to build OpenCL program");
QByteArray log;
log.resize(2048);
clGetProgramBuildInfo(m_clProgram, m_clDeviceId, CL_PROGRAM_BUILD_LOG, log.size(), log.data(), 0);
qDebug("Build log: %s", log.constData());
return;
}
m_clKernel = clCreateKernel(m_clProgram, "Emboss", &err);
if (!m_clKernel) {
qWarning("Failed to create emboss OpenCL kernel: %d", err);
return;
}
}
CLFilterRunnable::~CLFilterRunnable()
{
releaseTextures();
if (m_clKernel)
clReleaseKernel(m_clKernel);
if (m_clProgram)
clReleaseProgram(m_clProgram);
if (m_clQueue)
clReleaseCommandQueue(m_clQueue);
if (m_clContext)
clReleaseContext(m_clContext);
}
void CLFilterRunnable::releaseTextures()
{
QOpenGLFunctions *f = QOpenGLContext::currentContext()->functions();
if (m_tempTexture)
f->glDeleteTextures(1, &m_tempTexture);
if (m_outTexture)
f->glDeleteTextures(1, &m_outTexture);
m_tempTexture = m_outTexture = m_lastInputTexture = 0;
if (m_clImage[0])
clReleaseMemObject(m_clImage[0]);
if (m_clImage[1])
clReleaseMemObject(m_clImage[1]);
m_clImage[0] = m_clImage[1] = 0;
}
uint CLFilterRunnable::newTexture()
{
QOpenGLFunctions *f = QOpenGLContext::currentContext()->functions();
GLuint texture;
f->glGenTextures(1, &texture);
f->glBindTexture(GL_TEXTURE_2D, texture);
f->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
f->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
f->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
f->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
f->glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, m_size.width(), m_size.height(),
0, GL_RGBA, GL_UNSIGNED_BYTE, 0);
return texture;
}
QVideoFrame CLFilterRunnable::run(QVideoFrame *input, const QVideoSurfaceFormat &surfaceFormat, RunFlags flags)
{
Q_UNUSED(surfaceFormat);
Q_UNUSED(flags);
// This example supports RGB data only, either in system memory (typical with cameras on all
// platforms) or as an OpenGL texture (e.g. video playback on OS X or on Windows with ANGLE).
// The latter is the fast path where everything happens on GPU. THe former involves a texture upload.
// ANGLE is not compatible with this example since we only do CL-GL interop, not D3D9/11.
if (QOpenGLContext::openGLModuleType() == QOpenGLContext::LibGLES) {
qWarning("ANGLE is not supported");
return *input;
}
if (!input->isValid()
|| (input->handleType() != QAbstractVideoBuffer::NoHandle
&& input->handleType() != QAbstractVideoBuffer::GLTextureHandle)) {
qWarning("Invalid input format");
return *input;
}
if (input->pixelFormat() == QVideoFrame::Format_YUV420P
|| input->pixelFormat() == QVideoFrame::Format_YV12) {
qWarning("YUV data is not supported");
return *input;
}
if (m_size != input->size()) {
releaseTextures();
m_size = input->size();
}
// Create a texture from the image data.
QOpenGLFunctions *f = QOpenGLContext::currentContext()->functions();
GLuint texture;
if (input->handleType() == QAbstractVideoBuffer::NoHandle) {
// Upload.
if (m_tempTexture)
f->glBindTexture(GL_TEXTURE_2D, m_tempTexture);
else
m_tempTexture = newTexture();
input->map(QAbstractVideoBuffer::ReadOnly);
// glTexImage2D only once and use TexSubImage later on. This avoids the need
// to recreate the CL image object on every frame.
f->glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, m_size.width(), m_size.height(),
GL_RGBA, GL_UNSIGNED_BYTE, input->bits());
input->unmap();
texture = m_tempTexture;
} else {
// Already an OpenGL texture.
texture = input->handle().toUInt();
f->glBindTexture(GL_TEXTURE_2D, texture);
// Unlike on the other branch, the input texture may change, so m_clImage[0] may need to be recreated.
if (m_lastInputTexture && m_lastInputTexture != texture && m_clImage[0]) {
clReleaseMemObject(m_clImage[0]);
m_clImage[0] = 0;
}
m_lastInputTexture = texture;
}
// OpenCL image objects cannot be read and written at the same time. So use
// a separate texture for the result.
if (!m_outTexture)
m_outTexture = newTexture();
// Create the image objects if not yet done.
cl_int err;
if (!m_clImage[0]) {
m_clImage[0] = clCreateFromGLTexture2D(m_clContext, CL_MEM_READ_ONLY, GL_TEXTURE_2D, 0, texture, &err);
if (!m_clImage[0]) {
qWarning("Failed to create OpenGL image object from OpenGL texture: %d", err);
return *input;
}
cl_image_format fmt;
if (clGetImageInfo(m_clImage[0], CL_IMAGE_FORMAT, sizeof(fmt), &fmt, 0) != CL_SUCCESS) {
qWarning("Failed to query image format");
return *input;
}
if (fmt.image_channel_order != CL_RGBA)
qWarning("OpenCL image is not RGBA, expect errors");
}
if (!m_clImage[1]) {
m_clImage[1] = clCreateFromGLTexture2D(m_clContext, CL_MEM_WRITE_ONLY, GL_TEXTURE_2D, 0, m_outTexture, &err);
if (!m_clImage[1]) {
qWarning("Failed to create output OpenGL image object from OpenGL texture: %d", err);
return *input;
}
}
// We are all set. Queue acquiring the image objects.
f->glFinish();
clEnqueueAcquireGLObjects(m_clQueue, 2, m_clImage, 0, 0, 0);
// Set up the kernel arguments.
clSetKernelArg(m_clKernel, 0, sizeof(cl_mem), &m_clImage[0]);
clSetKernelArg(m_clKernel, 1, sizeof(cl_mem), &m_clImage[1]);
// Accessing dynamic properties on the filter element is simple:
cl_float factor = m_filter->factor();
clSetKernelArg(m_clKernel, 2, sizeof(cl_float), &factor);
// And queue the kernel.
const size_t workSize[] = { size_t(m_size.width()), size_t(m_size.height()) };
err = clEnqueueNDRangeKernel(m_clQueue, m_clKernel, 2, 0, workSize, 0, 0, 0, 0);
if (err != CL_SUCCESS)
qWarning("Failed to enqueue kernel: %d", err);
// Return the texture from our output image object.
// We return a texture even when the original video frame had pixel data in system memory.
// Qt Multimedia is smart enough to handle this. Once the data is on the GPU, it stays there. No readbacks, no copies.
clEnqueueReleaseGLObjects(m_clQueue, 2, m_clImage, 0, 0, 0);
clFinish(m_clQueue);
return frameFromTexture(m_outTexture, m_size, input->pixelFormat());
}
// InfoFilter will just provide some information about the video frame, to demonstrate
// passing arbitrary data to QML via its finished() signal.
class InfoFilter : public QAbstractVideoFilter
{
Q_OBJECT
public:
QVideoFilterRunnable *createFilterRunnable() Q_DECL_OVERRIDE;
signals:
void finished(QObject *result);
private:
friend class InfoFilterRunnable;
};
class InfoFilterRunnable : public QVideoFilterRunnable
{
public:
InfoFilterRunnable(InfoFilter *filter) : m_filter(filter) { }
QVideoFrame run(QVideoFrame *input, const QVideoSurfaceFormat &surfaceFormat, RunFlags flags) Q_DECL_OVERRIDE;
private:
InfoFilter *m_filter;
};
class InfoFilterResult : public QObject
{
Q_OBJECT
Q_PROPERTY(QSize frameResolution READ frameResolution)
Q_PROPERTY(QString handleType READ handleType)
Q_PROPERTY(int pixelFormat READ pixelFormat)
public:
InfoFilterResult() : m_pixelFormat(0) { }
QSize frameResolution() const { return m_frameResolution; }
QString handleType() const { return m_handleType; }
int pixelFormat() const { return m_pixelFormat; }
private:
QSize m_frameResolution;
QString m_handleType;
int m_pixelFormat;
friend class InfoFilterRunnable;
};
void CLFilter::setFactor(qreal v)
{
if (m_factor != v) {
m_factor = v;
emit factorChanged();
}
}
QVideoFilterRunnable *InfoFilter::createFilterRunnable()
{
return new InfoFilterRunnable(this);
}
QVideoFrame InfoFilterRunnable::run(QVideoFrame *input, const QVideoSurfaceFormat &surfaceFormat, RunFlags flags)
{
Q_UNUSED(surfaceFormat);
Q_UNUSED(flags);
InfoFilterResult *result = new InfoFilterResult;
result->m_frameResolution = input->size();
switch (input->handleType()) {
case QAbstractVideoBuffer::NoHandle:
result->m_handleType = QLatin1String("pixel data");
result->m_pixelFormat = input->pixelFormat();
break;
case QAbstractVideoBuffer::GLTextureHandle:
result->m_handleType = QLatin1String("OpenGL texture");
break;
default:
result->m_handleType = QLatin1String("unknown");
break;
}
emit m_filter->finished(result); // parent-less QObject -> ownership transferred to the JS engine
return *input;
}
int main(int argc, char **argv)
{
QGuiApplication app(argc, argv);
qmlRegisterType<CLFilter>("qmlvideofilter.cl.test", 1, 0, "CLFilter");
qmlRegisterType<InfoFilter>("qmlvideofilter.cl.test", 1, 0, "InfoFilter");
QQuickView view;
QString fn;
if (argc > 1) {
fn = QUrl::fromLocalFile(QFileInfo(QString::fromUtf8(argv[1])).absoluteFilePath()).toString();
qDebug("Playing video %s", qPrintable(fn));
} else {
qDebug("No video file specified, using camera instead.");
}
view.rootContext()->setContextProperty("videoFilename", fn);
view.setSource(QUrl("qrc:///main.qml"));
view.show();
return app.exec();
}
#include "main.moc"

View File

@@ -0,0 +1,114 @@
/****************************************************************************
**
** Copyright (C) 2015 Digia Plc and/or its subsidiary(-ies).
** Contact: http://www.qt-project.org/legal
**
** This file is part of the examples of the Qt Multimedia module.
**
** $QT_BEGIN_LICENSE:LGPL21$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and Digia. For licensing terms and
** conditions see http://qt.digia.com/licensing. For further information
** use the contact form at http://qt.digia.com/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 or version 3 as published by the Free
** Software Foundation and appearing in the file LICENSE.LGPLv21 and
** LICENSE.LGPLv3 included in the packaging of this file. Please review the
** following information to ensure the GNU Lesser General Public License
** requirements will be met: https://www.gnu.org/licenses/lgpl.html and
** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Digia gives you certain additional
** rights. These rights are described in the Digia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** $QT_END_LICENSE$
**
****************************************************************************/
import QtQuick 2.0
import QtMultimedia 5.5
import qmlvideofilter.cl.test 1.0
Item {
width: 1024
height: 768
Camera {
id: camera
}
MediaPlayer {
id: player
autoPlay: true
source: videoFilename
}
VideoOutput {
id: output
source: videoFilename !== "" ? player : camera
filters: [ infofilter, clfilter ]
anchors.fill: parent
}
CLFilter {
id: clfilter
// Animate a property which is passed to the OpenCL kernel.
SequentialAnimation on factor {
loops: Animation.Infinite
NumberAnimation {
from: 1
to: 20
duration: 6000
}
NumberAnimation {
from: 20
to: 1
duration: 3000
}
}
}
InfoFilter {
// This filter does not change the image. Instead, it provides some results calculated from the frame.
id: infofilter
onFinished: {
info.res = result.frameResolution.width + "x" + result.frameResolution.height;
info.type = result.handleType;
info.fmt = result.pixelFormat;
}
}
Column {
Text {
font.pointSize: 20
color: "green"
text: "Transformed with OpenCL on GPU\nClick to disable and enable the emboss filter"
}
Text {
font.pointSize: 12
color: "green"
text: "Emboss factor " + Math.round(clfilter.factor)
visible: clfilter.active
}
Text {
id: info
font.pointSize: 12
color: "green"
property string res
property string type
property int fmt
text: "Input resolution: " + res + " Input frame type: " + type + (fmt ? " Pixel format: " + fmt : "")
}
}
MouseArea {
anchors.fill: parent
onClicked: clfilter.active = !clfilter.active
}
}

View File

@@ -0,0 +1,22 @@
TEMPLATE = app
TARGET = qmlvideofilter_opencl
QT += quick multimedia
SOURCES = main.cpp
HEADERS = rgbframehelper.h
RESOURCES = qmlvideofilter_opencl.qrc
OTHER_FILES = main.qml
target.path = $$[QT_INSTALL_EXAMPLES]/multimedia/video/qmlvideofilter_opencl
INSTALLS += target
# Edit these as necessary
osx {
LIBS += -framework OpenCL
} else {
INCLUDEPATH += c:/cuda/include
LIBPATH += c:/cuda/lib/x64
LIBS += -lopengl32 -lOpenCL
}

View File

@@ -0,0 +1,5 @@
<RCC>
<qresource prefix="/">
<file>main.qml</file>
</qresource>
</RCC>

View File

@@ -0,0 +1,124 @@
/****************************************************************************
**
** Copyright (C) 2015 Digia Plc and/or its subsidiary(-ies).
** Contact: http://www.qt-project.org/legal
**
** This file is part of the examples of the Qt Multimedia module.
**
** $QT_BEGIN_LICENSE:LGPL21$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and Digia. For licensing terms and
** conditions see http://qt.digia.com/licensing. For further information
** use the contact form at http://qt.digia.com/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 or version 3 as published by the Free
** Software Foundation and appearing in the file LICENSE.LGPLv21 and
** LICENSE.LGPLv3 included in the packaging of this file. Please review the
** following information to ensure the GNU Lesser General Public License
** requirements will be met: https://www.gnu.org/licenses/lgpl.html and
** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Digia gives you certain additional
** rights. These rights are described in the Digia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** $QT_END_LICENSE$
**
****************************************************************************/
#ifndef RGBFRAMEHELPER_H
#define RGBFRAMEHELPER_H
#include <QImage>
#include <QAbstractVideoBuffer>
#include <QOpenGLContext>
#include <QOpenGLFunctions>
#include <QOpenGLFramebufferObject>
/*
Returns a QImage that wraps the given video frame.
This is suitable only for QAbstractVideoBuffer::NoHandle frames with RGB (or BGR)
data. YUV is not supported here.
The QVideoFrame must be mapped and kept mapped as long as the wrapping QImage
exists.
As a convenience the function also supports frames with a handle type of
QAbstractVideoBuffer::GLTextureHandle. This allows creating a system memory backed
QVideoFrame containing the image data from an OpenGL texture. However, readback is a
slow operation and may stall the GPU pipeline and should be avoided in production code.
*/
QImage imageWrapper(const QVideoFrame &frame)
{
#ifndef QT_NO_OPENGL
if (frame.handleType() == QAbstractVideoBuffer::GLTextureHandle) {
// Slow and inefficient path. Ideally what's on the GPU should remain on the GPU, instead of readbacks like this.
QImage img(frame.width(), frame.height(), QImage::Format_RGBA8888);
GLuint textureId = frame.handle().toUInt();
QOpenGLContext *ctx = QOpenGLContext::currentContext();
QOpenGLFunctions *f = ctx->functions();
GLuint fbo;
f->glGenFramebuffers(1, &fbo);
GLuint prevFbo;
f->glGetIntegerv(GL_FRAMEBUFFER_BINDING, (GLint *) &prevFbo);
f->glBindFramebuffer(GL_FRAMEBUFFER, fbo);
f->glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, textureId, 0);
f->glReadPixels(0, 0, frame.width(), frame.height(), GL_RGBA, GL_UNSIGNED_BYTE, img.bits());
f->glBindFramebuffer(GL_FRAMEBUFFER, prevFbo);
return img;
} else
#endif // QT_NO_OPENGL
{
if (!frame.isReadable()) {
qWarning("imageFromVideoFrame: No mapped image data available for read");
return QImage();
}
QImage::Format fmt = QVideoFrame::imageFormatFromPixelFormat(frame.pixelFormat());
if (fmt != QImage::Format_Invalid)
return QImage(frame.bits(), frame.width(), frame.height(), fmt);
qWarning("imageFromVideoFrame: No matching QImage format");
}
return QImage();
}
#ifndef QT_NO_OPENGL
class TextureBuffer : public QAbstractVideoBuffer
{
public:
TextureBuffer(uint id) : QAbstractVideoBuffer(GLTextureHandle), m_id(id) { }
MapMode mapMode() const { return NotMapped; }
uchar *map(MapMode, int *, int *) { return 0; }
void unmap() { }
QVariant handle() const { return QVariant::fromValue<uint>(m_id); }
private:
GLuint m_id;
};
#endif // QT_NO_OPENGL
/*
Creates and returns a new video frame wrapping the OpenGL texture textureId. The size
must be passed in size, together with the format of the underlying image data in
format. When the texture originates from a QImage, use
QVideoFrame::imageFormatFromPixelFormat() to get a suitable format. Ownership is not
altered, the new QVideoFrame will not destroy the texture.
*/
QVideoFrame frameFromTexture(uint textureId, const QSize &size, QVideoFrame::PixelFormat format)
{
#ifndef QT_NO_OPENGL
return QVideoFrame(new TextureBuffer(textureId), size, format);
#else
return QVideoFrame();
#endif // QT_NO_OPENGL
}
#endif

View File

@@ -38,6 +38,7 @@
#include "qsoundeffect.h"
#include <private/qdeclarativevideooutput_p.h>
#include "qabstractvideofilter.h"
#include "qdeclarativemultimediaglobal_p.h"
#include "qdeclarativemediametadata_p.h"
@@ -108,6 +109,7 @@ public:
qmlRegisterRevision<QDeclarativeCamera, 2>(uri, 5, 5);
qmlRegisterType<QDeclarativeMediaMetaData>();
qmlRegisterType<QAbstractVideoFilter>();
}
void initializeEngine(QQmlEngine *engine, const char *uri)

View File

@@ -1225,6 +1225,7 @@ Module {
Property { name: "orientation"; type: "int" }
Property { name: "sourceRect"; type: "QRectF"; isReadonly: true }
Property { name: "contentRect"; type: "QRectF"; isReadonly: true }
Property { name: "filters"; isList: true; isReadonly: true }
Signal {
name: "fillModeChanged"
Parameter { type: "QDeclarativeVideoOutput::FillMode" }

View File

@@ -46,6 +46,7 @@ QT_BEGIN_NAMESPACE
class QAbstractVideoSurface;
class QDeclarativeVideoOutput;
class QMediaService;
class QAbstractVideoFilter;
class Q_MULTIMEDIAQUICK_EXPORT QDeclarativeVideoBackend
{
@@ -70,6 +71,12 @@ public:
// The viewport, adjusted for the pixel aspect ratio
virtual QRectF adjustedViewport() const = 0;
virtual void appendFilter(QAbstractVideoFilter *filter) { Q_UNUSED(filter); }
virtual void clearFilters() { }
virtual void releaseResources() { }
virtual void invalidateSceneGraph() { }
protected:
QDeclarativeVideoOutput *q;
QPointer<QMediaService> m_service;

View File

@@ -40,6 +40,7 @@
#include <QtQuick/qquickitem.h>
#include <QtCore/qpointer.h>
#include <QtMultimedia/qcamerainfo.h>
#include <QtMultimedia/qabstractvideofilter.h>
#include <private/qtmultimediaquickdefs_p.h>
@@ -60,6 +61,7 @@ class Q_MULTIMEDIAQUICK_EXPORT QDeclarativeVideoOutput : public QQuickItem
Q_PROPERTY(bool autoOrientation READ autoOrientation WRITE setAutoOrientation NOTIFY autoOrientationChanged REVISION 2)
Q_PROPERTY(QRectF sourceRect READ sourceRect NOTIFY sourceRectChanged)
Q_PROPERTY(QRectF contentRect READ contentRect NOTIFY contentRectChanged)
Q_PROPERTY(QQmlListProperty<QAbstractVideoFilter> filters READ filters);
Q_ENUMS(FillMode)
public:
@@ -104,6 +106,8 @@ public:
};
SourceType sourceType() const;
QQmlListProperty<QAbstractVideoFilter> filters();
Q_SIGNALS:
void sourceChanged();
void fillModeChanged(QDeclarativeVideoOutput::FillMode);
@@ -116,6 +120,7 @@ protected:
QSGNode *updatePaintNode(QSGNode *, UpdatePaintNodeData *);
void itemChange(ItemChange change, const ItemChangeData &changeData);
void geometryChanged(const QRectF &newGeometry, const QRectF &oldGeometry);
void releaseResources();
private Q_SLOTS:
void _q_updateMediaObject();
@@ -123,10 +128,16 @@ private Q_SLOTS:
void _q_updateNativeSize();
void _q_updateGeometry();
void _q_screenOrientationChanged(int);
void _q_invalidateSceneGraph();
private:
bool createBackend(QMediaService *service);
static void filter_append(QQmlListProperty<QAbstractVideoFilter> *property, QAbstractVideoFilter *value);
static int filter_count(QQmlListProperty<QAbstractVideoFilter> *property);
static QAbstractVideoFilter *filter_at(QQmlListProperty<QAbstractVideoFilter> *property, int index);
static void filter_clear(QQmlListProperty<QAbstractVideoFilter> *property);
SourceType m_sourceType;
QPointer<QObject> m_source;
@@ -145,6 +156,8 @@ private:
QVideoOutputOrientationHandler *m_screenOrientationHandler;
QScopedPointer<QDeclarativeVideoBackend> m_backend;
QList<QAbstractVideoFilter *> m_filters;
};
QT_END_NAMESPACE

View File

@@ -46,10 +46,16 @@ QT_BEGIN_NAMESPACE
class Q_MULTIMEDIAQUICK_EXPORT QSGVideoNode : public QSGGeometryNode
{
public:
enum FrameFlag {
FrameFiltered = 0x01
};
Q_DECLARE_FLAGS(FrameFlags, FrameFlag)
QSGVideoNode();
virtual void setCurrentFrame(const QVideoFrame &frame) = 0;
virtual void setCurrentFrame(const QVideoFrame &frame, FrameFlags flags) = 0;
virtual QVideoFrame::PixelFormat pixelFormat() const = 0;
virtual QAbstractVideoBuffer::HandleType handleType() const = 0;
void setTexturedRectGeometry(const QRectF &boundingRect, const QRectF &textureRect, int orientation);
@@ -59,6 +65,8 @@ private:
int m_orientation;
};
Q_DECLARE_OPERATORS_FOR_FLAGS(QSGVideoNode::FrameFlags)
class Q_MULTIMEDIAQUICK_EXPORT QSGVideoNodeFactoryInterface
{
public:

View File

@@ -0,0 +1,319 @@
/****************************************************************************
**
** Copyright (C) 2015 Digia Plc and/or its subsidiary(-ies).
** Contact: http://www.qt-project.org/legal
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL21$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and Digia. For licensing terms and
** conditions see http://qt.digia.com/licensing. For further information
** use the contact form at http://qt.digia.com/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 or version 3 as published by the Free
** Software Foundation and appearing in the file LICENSE.LGPLv21 and
** LICENSE.LGPLv3 included in the packaging of this file. Please review the
** following information to ensure the GNU Lesser General Public License
** requirements will be met: https://www.gnu.org/licenses/lgpl.html and
** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Digia gives you certain additional
** rights. These rights are described in the Digia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** $QT_END_LICENSE$
**
****************************************************************************/
#include "qabstractvideofilter.h"
QT_BEGIN_NAMESPACE
/*!
\class QAbstractVideoFilter
\since 5.5
\brief The QAbstractVideoFilter class represents a filter that is applied to the video frames
received by a VideoOutput type.
\inmodule QtMultimedia
\ingroup multimedia
\ingroup multimedia_video
QAbstractVideoFilter provides a convenient way for applications to run image
processing, computer vision algorithms or any generic transformation or
calculation on the output of a VideoOutput type, regardless of the source
(video or camera). By providing a simple interface it allows applications and
third parties to easily develop QML types that provide image processing
algorithms using popular frameworks like \l{http://opencv.org}{OpenCV}. Due to
the close integration with the final stages of the Qt Multimedia video
pipeline, accelerated and possibly zero-copy solutions are feasible too: for
instance, a plugin providing OpenCL-based algorithms can use OpenCL's OpenGL
interop to use the OpenGL textures created by a hardware accelerated video
decoder, without additional readbacks and copies.
\note QAbstractVideoFilter is not always the best choice. To apply effects or
transformations using OpenGL shaders to the image shown on screen, the
standard Qt Quick approach of using ShaderEffect items in combination with
VideoOutput should be used. VideoFilter is not a replacement for this. It is
rather targeted for performing computations (that do not necessarily change
the image shown on screen) and computer vision algorithms provided by
external frameworks.
QAbstractVideoFilter is meant to be subclassed. The subclasses are then registered to
the QML engine, so they can be used as a QML type. The list of filters are
assigned to a VideoOutput type via its \l{QtMultimedia::VideoOutput::filters}{filters}
property.
A single filter represents one transformation or processing step on
a video frame. The output is a modified video frame, some arbitrary data or
both. For example, image transformations will result in a different image,
whereas an algorithm for detecting objects on an image will likely provide
a list of rectangles.
Arbitrary data can be represented as properties on the QAbstractVideoFilter subclass
and on the QObject or QJSValue instances passed to its signals. What exactly
these properties and signals are, is up to the individual video
filters. Completion of the operations can be indicated by
signals. Computations that do not result in a modified image will pass the
input image through so that subsequent filters can be placed after them.
Properties set on QAbstractVideoFilter serve as input to the computation, similarly
to how uniform values are specified in ShaderEffect types. The changed
property values are taken into use when the next video frame is processed.
The typical usage is to subclass QAbstractVideoFilter and QVideoFilterRunnable:
\badcode
class MyFilterRunnable : public QVideoFilterRunnable {
public:
QVideoFrame run(QVideoFrame *input, const QVideoSurfaceFormat &surfaceFormat, RunFlags flags) { ... }
};
class MyFilter : public QAbstractVideoFilter {
public:
QVideoFilterRunnable *createFilterRunnable() { return new MyFilterRunnable; }
signals:
void finished(QObject *result);
};
int main(int argc, char **argv) {
...
qmlRegisterType<MyFilter>("my.uri", 1, 0, "MyFilter");
...
}
\endcode
MyFilter is thus accessible from QML:
\badcode
import my.uri 1.0
Camera {
id: camera
}
MyFilter {
id: filter
// set properties, they can also be animated
onFinished: console.log("results of the computation: " + result)
}
VideoOutput {
source: camera
filters: [ filter ]
anchors.fill: parent
}
\endcode
This also allows providing filters in QML plugins, separately from the application.
\sa VideoOutput, Camera, MediaPlayer, QVideoFilterRunnable
*/
/*!
\class QVideoFilterRunnable
\since 5.5
\brief The QVideoFilterRunnable class represents the implementation of a filter
that owns all graphics and computational resources, and performs the actual filtering
or calculations.
\inmodule QtMultimedia
\ingroup multimedia
\ingroup multimedia_video
Video filters are split into QAbstractVideoFilter and corresponding QVideoFilterRunnable
instances, similar to QQuickItem and QSGNode. This is necessary to support
threaded rendering scenarios. When using the threaded render loop of the Qt
Quick scene graph, all rendering happens on a dedicated thread.
QVideoFilterRunnable instances always live on this thread and all its functions,
run(), the constructor, and the destructor, are guaranteed to be invoked on
that thread with the OpenGL context bound. QAbstractVideoFilter instances live on
the main (GUI) thread, like any other QObject and QQuickItem instances
created from QML.
Once created, QVideoFilterRunnable instances are managed by Qt Multimedia and
will be automatically destroyed and recreated when necessary, for example
when the scene graph is invalidated or the QQuickWindow changes or is closed.
Creation happens via the QAbstractVideoFilter::createFilterRunnable() factory function.
\sa QAbstractVideoFilter
*/
/*!
\fn QVideoFrame QVideoFilterRunnable::run(QVideoFrame *input, const QVideoSurfaceFormat &surfaceFormat, RunFlags flags)
Reimplement this function to perform filtering or computation on the \a
input video frame. Like the constructor and destructor, this function is
always called on the render thread with the OpenGL context bound.
Implementations that do not modify the video frame can simply return \a input.
It is safe to access properties of the associated QAbstractVideoFilter instance from
this function.
\a input will not be mapped, it is up to this function to call QVideoFrame::map()
and QVideoFrame::unmap() as necessary.
\a surfaceFormat provides additional information, for example it can be used
to determine which way is up in the input image as that is important for
filters to operate on multiple platforms with multiple cameras.
\a flags contains additional information about the filter's invocation. For
example the LastInChain flag indicates that the filter is the last in a
VideoOutput's associated filter list. This can be very useful in cases where
multiple filters are chained together and the work is performed on image data
in some custom format (for example a format specific to some computer vision
framework). To avoid conversion on every filter in the chain, all
intermediate filters can return a QVideoFrame hosting data in the custom
format. Only the last, where the flag is set, returns a QVideoFrame in a
format compatible with Qt.
Filters that want to expose the results of their computation to Javascript
code in QML can declare their own custom signals in the QAbstractVideoFilter
subclass to indicate the completion of the operation. For filters that only
calculate some results and do not modify the video frame, it is also possible
to operate asynchronously. They can queue the necessary operations using the
compute API and return from this function without emitting any signals. The
signal indicating the completion is then emitted only when the compute API
indicates that the operations were done and the results are available. Note
that it is strongly recommended to represent the filter's output data as a
separate instance of QJSValue or a QObject-derived class which is passed as a
parameter to the signal and becomes exposed to the Javascript engine. In case
of QObject the ownership of this object is controlled by the standard QML
rules: if it has no parent, ownership is transferred to the Javascript engine,
otherwise it stays with the emitter. Note that the signal connection may be
queued,for example when using the threaded render loop of Qt Quick, and so the
object must stay valid for a longer time, destroying it right after calling
this function is not safe. Using a dedicated results object is guaranteed to
be safe even when using threaded rendering. The same is not necessarily true
for properties on the QAbstractVideoFilter instance itself: properties can
safely be read in run() since the gui thread is blocked during that time but
writing may become problematic.
\note Avoid time consuming operations in this function as they block the
entire rendering of the application.
\note The handleType() and pixelFormat() of \a input is completely up to the
video decoding backend on the platform in use. On some platforms different
forms of input are used depending on the graphics stack. For example, when
playing back videos on Windows with the WMF backend, QVideoFrame contains
OpenGL-wrapped Direct3D textures in case of using ANGLE, but regular pixel
data when using desktop OpenGL (opengl32.dll). Similarly, the video file
format will often decide if the data is RGB or YUV, but this may also depend
on the decoder and the configuration in use. The returned video frame does
not have to be in the same format as the input, for example a filter with an
input of a QVideoFrame backed by system memory can output a QVideoFrame with
an OpenGL texture handle.
\sa QVideoFrame, QVideoSurfaceFormat
*/
/*!
\enum QVideoFilterRunnable::RunFlag
\value LastInChain Indicates that the filter runnable's associated QAbstractVideoFilter
is the last in the corresponding VideoOutput type's filters list, meaning
that the returned frame is the one that is going to be presented to the scene
graph without invoking any further filters.
*/
class QAbstractVideoFilterPrivate
{
public:
QAbstractVideoFilterPrivate() :
active(true)
{ }
bool active;
};
/*!
\internal
*/
QVideoFilterRunnable::~QVideoFilterRunnable()
{
}
/*!
Constructs a new QAbstractVideoFilter instance.
*/
QAbstractVideoFilter::QAbstractVideoFilter(QObject *parent) :
QObject(parent),
d_ptr(new QAbstractVideoFilterPrivate)
{
}
/*!
\internal
*/
QAbstractVideoFilter::~QAbstractVideoFilter()
{
delete d_ptr;
}
/*!
\return \c true if the filter is active.
By default filters are active. When set to \c false, the filter will be
ignored by the VideoOutput type.
*/
bool QAbstractVideoFilter::isActive() const
{
Q_D(const QAbstractVideoFilter);
return d->active;
}
/*!
\internal
*/
void QAbstractVideoFilter::setActive(bool v)
{
Q_D(QAbstractVideoFilter);
if (d->active != v) {
d->active = v;
emit activeChanged();
}
}
/*!
\fn QVideoFilterRunnable *QAbstractVideoFilter::createFilterRunnable()
Factory function to create a new instance of a QVideoFilterRunnable subclass
corresponding to this filter.
This function is called on the thread on which the Qt Quick scene graph
performs rendering, with the OpenGL context bound. Ownership of the returned
instance is transferred: the returned instance will live on the render thread
and will be destroyed automatically when necessary.
Typically, implementations of the function will simply construct a new
QVideoFilterRunnable instance, passing \c this to the constructor as the
filter runnables must know their associated QAbstractVideoFilter instance to
access dynamic properties and optionally emit signals.
*/
QT_END_NAMESPACE

View File

@@ -0,0 +1,85 @@
/****************************************************************************
**
** Copyright (C) 2015 Digia Plc and/or its subsidiary(-ies).
** Contact: http://www.qt-project.org/legal
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL21$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and Digia. For licensing terms and
** conditions see http://qt.digia.com/licensing. For further information
** use the contact form at http://qt.digia.com/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 or version 3 as published by the Free
** Software Foundation and appearing in the file LICENSE.LGPLv21 and
** LICENSE.LGPLv3 included in the packaging of this file. Please review the
** following information to ensure the GNU Lesser General Public License
** requirements will be met: https://www.gnu.org/licenses/lgpl.html and
** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Digia gives you certain additional
** rights. These rights are described in the Digia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** $QT_END_LICENSE$
**
****************************************************************************/
#ifndef QABSTRACTVIDEOFILTER_H
#define QABSTRACTVIDEOFILTER_H
#include <QtCore/qobject.h>
#include <QtMultimedia/qvideoframe.h>
#include <QtMultimedia/qvideosurfaceformat.h>
QT_BEGIN_NAMESPACE
class QAbstractVideoFilterPrivate;
class Q_MULTIMEDIA_EXPORT QVideoFilterRunnable
{
public:
enum RunFlag {
LastInChain = 0x01
};
Q_DECLARE_FLAGS(RunFlags, RunFlag)
virtual ~QVideoFilterRunnable();
virtual QVideoFrame run(QVideoFrame *input, const QVideoSurfaceFormat &surfaceFormat, RunFlags flags) = 0;
};
Q_DECLARE_OPERATORS_FOR_FLAGS(QVideoFilterRunnable::RunFlags)
class Q_MULTIMEDIA_EXPORT QAbstractVideoFilter : public QObject
{
Q_OBJECT
Q_PROPERTY(bool active READ isActive WRITE setActive NOTIFY activeChanged)
public:
QAbstractVideoFilter(QObject *parent = 0);
~QAbstractVideoFilter();
bool isActive() const;
void setActive(bool v);
virtual QVideoFilterRunnable *createFilterRunnable() = 0;
Q_SIGNALS:
void activeChanged();
private:
Q_DECLARE_PRIVATE(QAbstractVideoFilter)
Q_DISABLE_COPY(QAbstractVideoFilter)
QAbstractVideoFilterPrivate *d_ptr;
};
QT_END_NAMESPACE
#endif // QABSTRACTVIDEOFILTER_H

View File

@@ -368,6 +368,23 @@ QVideoFrame &QVideoFrame::operator =(const QVideoFrame &other)
return *this;
}
/*!
\return \c true if this QVideoFrame and \a other reflect the same frame.
*/
bool QVideoFrame::operator==(const QVideoFrame &other) const
{
// Due to explicit sharing we just compare the QSharedData which in turn compares the pointers.
return d == other.d;
}
/*!
\return \c true if this QVideoFrame and \a other do not reflect the same frame.
*/
bool QVideoFrame::operator!=(const QVideoFrame &other) const
{
return d != other.d;
}
/*!
Destroys a video frame.
*/

View File

@@ -107,6 +107,8 @@ public:
~QVideoFrame();
QVideoFrame &operator =(const QVideoFrame &other);
bool operator==(const QVideoFrame &other) const;
bool operator!=(const QVideoFrame &other) const;
bool isValid() const;

View File

@@ -6,7 +6,8 @@ PUBLIC_HEADERS += \
video/qabstractvideosurface.h \
video/qvideoframe.h \
video/qvideosurfaceformat.h \
video/qvideoprobe.h
video/qvideoprobe.h \
video/qabstractvideofilter.h
PRIVATE_HEADERS += \
video/qabstractvideobuffer_p.h \
@@ -24,8 +25,5 @@ SOURCES += \
video/qvideooutputorientationhandler.cpp \
video/qvideosurfaceformat.cpp \
video/qvideosurfaceoutput.cpp \
video/qvideoprobe.cpp
video/qvideoprobe.cpp \
video/qabstractvideofilter.cpp

View File

@@ -165,7 +165,7 @@ QAndroidSGVideoNode::~QAndroidSGVideoNode()
m_frame = QVideoFrame();
}
void QAndroidSGVideoNode::setCurrentFrame(const QVideoFrame &frame)
void QAndroidSGVideoNode::setCurrentFrame(const QVideoFrame &frame, FrameFlags)
{
QMutexLocker lock(&m_frameMutex);
m_frame = frame;

View File

@@ -47,8 +47,9 @@ public:
QAndroidSGVideoNode(const QVideoSurfaceFormat &format);
~QAndroidSGVideoNode();
void setCurrentFrame(const QVideoFrame &frame);
void setCurrentFrame(const QVideoFrame &frame, FrameFlags flags);
QVideoFrame::PixelFormat pixelFormat() const { return m_format.pixelFormat(); }
QAbstractVideoBuffer::HandleType handleType() const { return QAbstractVideoBuffer::GLTextureHandle; }
void preprocess();

View File

@@ -187,7 +187,7 @@ QSGVideoNode_EGL::~QSGVideoNode_EGL()
{
}
void QSGVideoNode_EGL::setCurrentFrame(const QVideoFrame &frame)
void QSGVideoNode_EGL::setCurrentFrame(const QVideoFrame &frame, FrameFlags)
{
EGLImageKHR image = frame.handle().value<void *>();
m_material.setImage(image);
@@ -199,6 +199,11 @@ QVideoFrame::PixelFormat QSGVideoNode_EGL::pixelFormat() const
return m_pixelFormat;
}
QAbstractVideoBuffer::HandleType QSGVideoNode_EGL::handleType() const
{
return QAbstractVideoBuffer::EGLImageHandle;
}
static bool isExtensionSupported()
{
static const bool supported = eglGetProcAddress("glEGLImageTargetTexture2DOES");

View File

@@ -74,8 +74,9 @@ public:
QSGVideoNode_EGL(const QVideoSurfaceFormat &format);
~QSGVideoNode_EGL();
void setCurrentFrame(const QVideoFrame &frame);
void setCurrentFrame(const QVideoFrame &frame, FrameFlags flags);
QVideoFrame::PixelFormat pixelFormat() const;
QAbstractVideoBuffer::HandleType handleType() const;
private:
QSGVideoMaterial_EGL m_material;

View File

@@ -51,7 +51,9 @@ QSGVivanteVideoMaterial::QSGVivanteVideoMaterial() :
mWidth(0),
mHeight(0),
mFormat(QVideoFrame::Format_Invalid),
mCurrentTexture(0)
mCurrentTexture(0),
mMappable(true),
mTexDirectTexture(0)
{
#ifdef QT_VIVANTE_VIDEO_DEBUG
qDebug() << Q_FUNC_INFO;
@@ -62,12 +64,7 @@ QSGVivanteVideoMaterial::QSGVivanteVideoMaterial() :
QSGVivanteVideoMaterial::~QSGVivanteVideoMaterial()
{
Q_FOREACH (GLuint id, mBitsToTextureMap.values()) {
#ifdef QT_VIVANTE_VIDEO_DEBUG
qDebug() << "delete texture: " << id;
#endif
glDeleteTextures(1, &id);
}
clearTextures();
}
QSGMaterialType *QSGVivanteVideoMaterial::type() const {
@@ -94,9 +91,11 @@ void QSGVivanteVideoMaterial::updateBlending() {
setFlag(Blending, qFuzzyCompare(mOpacity, qreal(1.0)) ? false : true);
}
void QSGVivanteVideoMaterial::setCurrentFrame(const QVideoFrame &frame) {
void QSGVivanteVideoMaterial::setCurrentFrame(const QVideoFrame &frame, QSGVideoNode::FrameFlags flags)
{
QMutexLocker lock(&mFrameMutex);
mNextFrame = frame;
mMappable = !flags.testFlag(QSGVideoNode::FrameFiltered);
#ifdef QT_VIVANTE_VIDEO_DEBUG
qDebug() << Q_FUNC_INFO << " new frame: " << frame;
@@ -122,6 +121,22 @@ void QSGVivanteVideoMaterial::bind()
glBindTexture(GL_TEXTURE_2D, mCurrentTexture);
}
void QSGVivanteVideoMaterial::clearTextures()
{
Q_FOREACH (GLuint id, mBitsToTextureMap.values()) {
#ifdef QT_VIVANTE_VIDEO_DEBUG
qDebug() << "delete texture: " << id;
#endif
glDeleteTextures(1, &id);
}
mBitsToTextureMap.clear();
if (mTexDirectTexture) {
glDeleteTextures(1, &mTexDirectTexture);
mTexDirectTexture = 0;
}
}
GLuint QSGVivanteVideoMaterial::vivanteMapping(QVideoFrame vF)
{
QOpenGLContext *glcontext = QOpenGLContext::currentContext();
@@ -130,14 +145,16 @@ GLuint QSGVivanteVideoMaterial::vivanteMapping(QVideoFrame vF)
return 0;
}
static PFNGLTEXDIRECTVIVPROC glTexDirectVIV_LOCAL = 0;
static PFNGLTEXDIRECTVIVMAPPROC glTexDirectVIVMap_LOCAL = 0;
static PFNGLTEXDIRECTINVALIDATEVIVPROC glTexDirectInvalidateVIV_LOCAL = 0;
if (glTexDirectVIVMap_LOCAL == 0 || glTexDirectInvalidateVIV_LOCAL == 0) {
if (glTexDirectVIV_LOCAL == 0 || glTexDirectVIVMap_LOCAL == 0 || glTexDirectInvalidateVIV_LOCAL == 0) {
glTexDirectVIV_LOCAL = reinterpret_cast<PFNGLTEXDIRECTVIVPROC>(glcontext->getProcAddress("glTexDirectVIV"));
glTexDirectVIVMap_LOCAL = reinterpret_cast<PFNGLTEXDIRECTVIVMAPPROC>(glcontext->getProcAddress("glTexDirectVIVMap"));
glTexDirectInvalidateVIV_LOCAL = reinterpret_cast<PFNGLTEXDIRECTINVALIDATEVIVPROC>(glcontext->getProcAddress("glTexDirectInvalidateVIV"));
}
if (glTexDirectVIVMap_LOCAL == 0 || glTexDirectInvalidateVIV_LOCAL == 0) {
if (glTexDirectVIV_LOCAL == 0 || glTexDirectVIVMap_LOCAL == 0 || glTexDirectInvalidateVIV_LOCAL == 0) {
qWarning() << Q_FUNC_INFO << "couldn't find \"glTexDirectVIVMap\" and/or \"glTexDirectInvalidateVIV\" => do nothing and return";
return 0;
}
@@ -146,49 +163,80 @@ GLuint QSGVivanteVideoMaterial::vivanteMapping(QVideoFrame vF)
mWidth = vF.width();
mHeight = vF.height();
mFormat = vF.pixelFormat();
Q_FOREACH (GLuint id, mBitsToTextureMap.values()) {
#ifdef QT_VIVANTE_VIDEO_DEBUG
qDebug() << "delete texture: " << id;
#endif
glDeleteTextures(1, &id);
}
mBitsToTextureMap.clear();
clearTextures();
}
if (vF.map(QAbstractVideoBuffer::ReadOnly)) {
if (!mBitsToTextureMap.contains(vF.bits())) {
GLuint tmpTexId;
glGenTextures(1, &tmpTexId);
mBitsToTextureMap.insert(vF.bits(), tmpTexId);
if (mMappable) {
if (!mBitsToTextureMap.contains(vF.bits())) {
// Haven't yet seen this logical address: map to texture.
GLuint tmpTexId;
glGenTextures(1, &tmpTexId);
mBitsToTextureMap.insert(vF.bits(), tmpTexId);
const uchar *constBits = vF.bits();
void *bits = (void*)constBits;
const uchar *constBits = vF.bits();
void *bits = (void*)constBits;
#ifdef QT_VIVANTE_VIDEO_DEBUG
qDebug() << Q_FUNC_INFO << "new texture, texId: " << tmpTexId << "; constBits: " << constBits;
qDebug() << Q_FUNC_INFO << "new texture, texId: " << tmpTexId << "; constBits: " << constBits;
#endif
GLuint physical = ~0U;
GLuint physical = ~0U;
glBindTexture(GL_TEXTURE_2D, tmpTexId);
glTexDirectVIVMap_LOCAL(GL_TEXTURE_2D,
vF.width(), vF.height(),
QSGVivanteVideoNode::getVideoFormat2GLFormatMap().value(vF.pixelFormat()),
&bits, &physical);
glBindTexture(GL_TEXTURE_2D, tmpTexId);
glTexDirectVIVMap_LOCAL(GL_TEXTURE_2D,
vF.width(), vF.height(),
QSGVivanteVideoNode::getVideoFormat2GLFormatMap().value(vF.pixelFormat()),
&bits, &physical);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexDirectInvalidateVIV_LOCAL(GL_TEXTURE_2D);
return tmpTexId;
} else {
// Fastest path: already seen this logical address. Just
// indicate that the data belonging to the texture has changed.
glBindTexture(GL_TEXTURE_2D, mBitsToTextureMap.value(vF.bits()));
glTexDirectInvalidateVIV_LOCAL(GL_TEXTURE_2D);
return mBitsToTextureMap.value(vF.bits());
}
} else {
// Cannot map. So copy.
if (!mTexDirectTexture) {
glGenTextures(1, &mTexDirectTexture);
glBindTexture(GL_TEXTURE_2D, mTexDirectTexture);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexDirectVIV_LOCAL(GL_TEXTURE_2D, mCurrentFrame.width(), mCurrentFrame.height(),
QSGVivanteVideoNode::getVideoFormat2GLFormatMap().value(mCurrentFrame.pixelFormat()),
(GLvoid **) &mTexDirectPlanes);
} else {
glBindTexture(GL_TEXTURE_2D, mTexDirectTexture);
}
switch (mCurrentFrame.pixelFormat()) {
case QVideoFrame::Format_YUV420P:
case QVideoFrame::Format_YV12:
memcpy(mTexDirectPlanes[0], mCurrentFrame.bits(0), mCurrentFrame.height() * mCurrentFrame.bytesPerLine(0));
memcpy(mTexDirectPlanes[1], mCurrentFrame.bits(1), mCurrentFrame.height() * mCurrentFrame.bytesPerLine(1));
memcpy(mTexDirectPlanes[2], mCurrentFrame.bits(2), mCurrentFrame.height() * mCurrentFrame.bytesPerLine(2));
break;
case QVideoFrame::Format_NV12:
case QVideoFrame::Format_NV21:
memcpy(mTexDirectPlanes[0], mCurrentFrame.bits(0), mCurrentFrame.height() * mCurrentFrame.bytesPerLine(0));
memcpy(mTexDirectPlanes[1], mCurrentFrame.bits(1), mCurrentFrame.height() / 2 * mCurrentFrame.bytesPerLine(1));
break;
default:
memcpy(mTexDirectPlanes[0], mCurrentFrame.bits(), mCurrentFrame.height() * mCurrentFrame.bytesPerLine());
break;
}
glTexDirectInvalidateVIV_LOCAL(GL_TEXTURE_2D);
return tmpTexId;
}
else {
glBindTexture(GL_TEXTURE_2D, mBitsToTextureMap.value(vF.bits()));
glTexDirectInvalidateVIV_LOCAL(GL_TEXTURE_2D);
return mBitsToTextureMap.value(vF.bits());
return mTexDirectTexture;
}
}
else {

View File

@@ -41,6 +41,7 @@
#include <QVideoFrame>
#include <QMutex>
#include <private/qsgvideonode_p.h>
class QSGVivanteVideoMaterial : public QSGMaterial
{
@@ -52,7 +53,7 @@ public:
virtual QSGMaterialShader *createShader() const;
virtual int compare(const QSGMaterial *other) const;
void updateBlending();
void setCurrentFrame(const QVideoFrame &frame);
void setCurrentFrame(const QVideoFrame &frame, QSGVideoNode::FrameFlags flags);
void bind();
GLuint vivanteMapping(QVideoFrame texIdVideoFramePair);
@@ -60,6 +61,8 @@ public:
void setOpacity(float o) { mOpacity = o; }
private:
void clearTextures();
qreal mOpacity;
int mWidth;
@@ -69,8 +72,12 @@ private:
QMap<const uchar*, GLuint> mBitsToTextureMap;
QVideoFrame mCurrentFrame, mNextFrame;
GLuint mCurrentTexture;
bool mMappable;
QMutex mFrameMutex;
GLuint mTexDirectTexture;
GLvoid *mTexDirectPlanes[3];
};
#endif // QSGVIDEOMATERIAL_VIVMAP_H

View File

@@ -52,9 +52,9 @@ QSGVivanteVideoNode::~QSGVivanteVideoNode()
{
}
void QSGVivanteVideoNode::setCurrentFrame(const QVideoFrame &frame)
void QSGVivanteVideoNode::setCurrentFrame(const QVideoFrame &frame, FrameFlags flags)
{
mMaterial->setCurrentFrame(frame);
mMaterial->setCurrentFrame(frame, flags);
markDirty(DirtyMaterial);
}

View File

@@ -44,8 +44,9 @@ public:
QSGVivanteVideoNode(const QVideoSurfaceFormat &format);
~QSGVivanteVideoNode();
virtual QVideoFrame::PixelFormat pixelFormat() const { return mFormat.pixelFormat(); }
void setCurrentFrame(const QVideoFrame &frame);
QVideoFrame::PixelFormat pixelFormat() const { return mFormat.pixelFormat(); }
QAbstractVideoBuffer::HandleType handleType() const { return QAbstractVideoBuffer::NoHandle; }
void setCurrentFrame(const QVideoFrame &frame, FrameFlags flags);
static const QMap<QVideoFrame::PixelFormat, GLenum>& getVideoFormat2GLFormatMap();

View File

@@ -262,6 +262,12 @@ bool QDeclarativeVideoOutput::createBackend(QMediaService *service)
m_backend->updateGeometry();
}
if (m_backend) {
m_backend->clearFilters();
for (int i = 0; i < m_filters.count(); ++i)
m_backend->appendFilter(m_filters[i]);
}
return backendAvailable;
}
@@ -795,6 +801,12 @@ void QDeclarativeVideoOutput::itemChange(QQuickItem::ItemChange change,
m_backend->itemChange(change, changeData);
}
void QDeclarativeVideoOutput::releaseResources()
{
if (m_backend)
m_backend->releaseResources();
}
void QDeclarativeVideoOutput::geometryChanged(const QRectF &newGeometry, const QRectF &oldGeometry)
{
Q_UNUSED(newGeometry);
@@ -809,4 +821,54 @@ void QDeclarativeVideoOutput::geometryChanged(const QRectF &newGeometry, const Q
_q_updateGeometry();
}
/*!
\qmlproperty list<object> QtMultimedia::VideoOutput::filters
This property holds the list of video filters that are run on the video
frames. The order of the filters in the list matches the order in which
they will be invoked on the video frames. The objects in the list must be
instances of a subclass of QAbstractVideoFilter.
\sa QAbstractVideoFilter
*/
QQmlListProperty<QAbstractVideoFilter> QDeclarativeVideoOutput::filters()
{
return QQmlListProperty<QAbstractVideoFilter>(this, 0, filter_append, filter_count, filter_at, filter_clear);
}
void QDeclarativeVideoOutput::filter_append(QQmlListProperty<QAbstractVideoFilter> *property, QAbstractVideoFilter *value)
{
QDeclarativeVideoOutput *self = static_cast<QDeclarativeVideoOutput *>(property->object);
self->m_filters.append(value);
if (self->m_backend)
self->m_backend->appendFilter(value);
}
int QDeclarativeVideoOutput::filter_count(QQmlListProperty<QAbstractVideoFilter> *property)
{
QDeclarativeVideoOutput *self = static_cast<QDeclarativeVideoOutput *>(property->object);
return self->m_filters.count();
}
QAbstractVideoFilter *QDeclarativeVideoOutput::filter_at(QQmlListProperty<QAbstractVideoFilter> *property, int index)
{
QDeclarativeVideoOutput *self = static_cast<QDeclarativeVideoOutput *>(property->object);
return self->m_filters.at(index);
}
void QDeclarativeVideoOutput::filter_clear(QQmlListProperty<QAbstractVideoFilter> *property)
{
QDeclarativeVideoOutput *self = static_cast<QDeclarativeVideoOutput *>(property->object);
self->m_filters.clear();
if (self->m_backend)
self->m_backend->clearFilters();
}
void QDeclarativeVideoOutput::_q_invalidateSceneGraph()
{
if (m_backend)
m_backend->invalidateSceneGraph();
}
QT_END_NAMESPACE

View File

@@ -34,6 +34,7 @@
#include "qdeclarativevideooutput_render_p.h"
#include "qdeclarativevideooutput_p.h"
#include <QtMultimedia/qabstractvideofilter.h>
#include <QtMultimedia/qvideorenderercontrol.h>
#include <QtMultimedia/qmediaservice.h>
#include <QtCore/qloggingcategory.h>
@@ -41,6 +42,8 @@
#include <private/qsgvideonode_p.h>
#include <QtGui/QOpenGLContext>
#include <QtQuick/QQuickWindow>
#include <QtCore/QRunnable>
QT_BEGIN_NAMESPACE
@@ -103,11 +106,79 @@ bool QDeclarativeVideoRendererBackend::init(QMediaService *service)
return false;
}
void QDeclarativeVideoRendererBackend::appendFilter(QAbstractVideoFilter *filter)
{
QMutexLocker lock(&m_frameMutex);
m_filters.append(Filter(filter));
}
void QDeclarativeVideoRendererBackend::clearFilters()
{
QMutexLocker lock(&m_frameMutex);
scheduleDeleteFilterResources();
m_filters.clear();
}
class FilterRunnableDeleter : public QRunnable
{
public:
FilterRunnableDeleter(const QList<QVideoFilterRunnable *> &runnables) : m_runnables(runnables) { }
void run() Q_DECL_OVERRIDE {
foreach (QVideoFilterRunnable *runnable, m_runnables)
delete runnable;
}
private:
QList<QVideoFilterRunnable *> m_runnables;
};
void QDeclarativeVideoRendererBackend::scheduleDeleteFilterResources()
{
if (!q->window())
return;
QList<QVideoFilterRunnable *> runnables;
for (int i = 0; i < m_filters.count(); ++i) {
if (m_filters[i].runnable) {
runnables.append(m_filters[i].runnable);
m_filters[i].runnable = 0;
}
}
if (!runnables.isEmpty()) {
// Request the scenegraph to run our cleanup job on the render thread.
// The execution of our QRunnable may happen after the QML tree including the QAbstractVideoFilter instance is
// destroyed on the main thread so no references to it must be used during cleanup.
q->window()->scheduleRenderJob(new FilterRunnableDeleter(runnables), QQuickWindow::BeforeSynchronizingStage);
}
}
void QDeclarativeVideoRendererBackend::releaseResources()
{
// Called on the gui thread when the window is closed or changed.
QMutexLocker lock(&m_frameMutex);
scheduleDeleteFilterResources();
}
void QDeclarativeVideoRendererBackend::invalidateSceneGraph()
{
// Called on the render thread, e.g. when the context is lost.
QMutexLocker lock(&m_frameMutex);
for (int i = 0; i < m_filters.count(); ++i) {
if (m_filters[i].runnable) {
delete m_filters[i].runnable;
m_filters[i].runnable = 0;
}
}
}
void QDeclarativeVideoRendererBackend::itemChange(QQuickItem::ItemChange change,
const QQuickItem::ItemChangeData &changeData)
{
Q_UNUSED(change);
Q_UNUSED(changeData);
if (change == QQuickItem::ItemSceneChange) {
if (changeData.window)
QObject::connect(changeData.window, SIGNAL(sceneGraphInvalidated()),
q, SLOT(_q_invalidateSceneGraph()), Qt::DirectConnection);
}
}
void QDeclarativeVideoRendererBackend::releaseSource()
@@ -216,8 +287,36 @@ QSGNode *QDeclarativeVideoRendererBackend::updatePaintNode(QSGNode *oldNode,
}
#endif
bool isFrameModified = false;
if (m_frameChanged) {
if (videoNode && videoNode->pixelFormat() != m_frame.pixelFormat()) {
// Run the VideoFilter if there is one. This must be done before potentially changing the videonode below.
if (m_frame.isValid() && !m_filters.isEmpty()) {
const QVideoSurfaceFormat surfaceFormat = videoSurface()->surfaceFormat();
for (int i = 0; i < m_filters.count(); ++i) {
QAbstractVideoFilter *filter = m_filters[i].filter;
QVideoFilterRunnable *&runnable = m_filters[i].runnable;
if (filter && filter->isActive()) {
// Create the filter runnable if not yet done. Ownership is taken and is tied to this thread, on which rendering happens.
if (!runnable)
runnable = filter->createFilterRunnable();
if (!runnable)
continue;
QVideoFilterRunnable::RunFlags flags = 0;
if (i == m_filters.count() - 1)
flags |= QVideoFilterRunnable::LastInChain;
QVideoFrame newFrame = runnable->run(&m_frame, surfaceFormat, flags);
if (newFrame.isValid() && newFrame != m_frame) {
isFrameModified = true;
m_frame = newFrame;
}
}
}
}
if (videoNode && (videoNode->pixelFormat() != m_frame.pixelFormat() || videoNode->handleType() != m_frame.handleType())) {
qCDebug(qLcVideo) << "updatePaintNode: deleting old video node because frame format changed";
delete videoNode;
videoNode = 0;
@@ -231,7 +330,9 @@ QSGNode *QDeclarativeVideoRendererBackend::updatePaintNode(QSGNode *oldNode,
if (!videoNode) {
foreach (QSGVideoNodeFactoryInterface* factory, m_videoNodeFactories) {
videoNode = factory->createNode(m_surface->surfaceFormat());
// Get a node that supports our frame. The surface is irrelevant, our
// QSGVideoItemSurface supports (logically) anything.
videoNode = factory->createNode(QVideoSurfaceFormat(m_frame.size(), m_frame.pixelFormat(), m_frame.handleType()));
if (videoNode) {
qCDebug(qLcVideo) << "updatePaintNode: Video node created. Handle type:" << m_frame.handleType()
<< " Supported formats for the handle by this node:"
@@ -252,7 +353,10 @@ QSGNode *QDeclarativeVideoRendererBackend::updatePaintNode(QSGNode *oldNode,
videoNode->setTexturedRectGeometry(m_renderedRect, m_sourceTextureRect,
qNormalizedOrientation(q->orientation()));
if (m_frameChanged) {
videoNode->setCurrentFrame(m_frame);
QSGVideoNode::FrameFlags flags = 0;
if (isFrameModified)
flags |= QSGVideoNode::FrameFiltered;
videoNode->setCurrentFrame(m_frame, flags);
//don't keep the frame for more than really necessary
m_frameChanged = false;
m_frame = QVideoFrame();

View File

@@ -48,6 +48,8 @@ QT_BEGIN_NAMESPACE
class QSGVideoItemSurface;
class QVideoRendererControl;
class QOpenGLContext;
class QAbstractVideoFilter;
class QVideoFilterRunnable;
class QDeclarativeVideoRendererBackend : public QDeclarativeVideoBackend
{
@@ -70,7 +72,14 @@ public:
void present(const QVideoFrame &frame);
void stop();
void appendFilter(QAbstractVideoFilter *filter) Q_DECL_OVERRIDE;
void clearFilters() Q_DECL_OVERRIDE;
void releaseResources() Q_DECL_OVERRIDE;
void invalidateSceneGraph() Q_DECL_OVERRIDE;
private:
void scheduleDeleteFilterResources();
QPointer<QVideoRendererControl> m_rendererControl;
QList<QSGVideoNodeFactoryInterface*> m_videoNodeFactories;
QSGVideoItemSurface *m_surface;
@@ -83,6 +92,14 @@ private:
QMutex m_frameMutex;
QRectF m_renderedRect; // Destination pixel coordinates, clipped
QRectF m_sourceTextureRect; // Source texture coordinates
struct Filter {
Filter() : filter(0), runnable(0) { }
Filter(QAbstractVideoFilter *filter) : filter(filter), runnable(0) { }
QAbstractVideoFilter *filter;
QVideoFilterRunnable *runnable;
};
QList<Filter> m_filters;
};
class QSGVideoItemSurface : public QAbstractVideoSurface

View File

@@ -311,7 +311,7 @@ QSGVideoNode_I420::~QSGVideoNode_I420()
{
}
void QSGVideoNode_I420::setCurrentFrame(const QVideoFrame &frame)
void QSGVideoNode_I420::setCurrentFrame(const QVideoFrame &frame, FrameFlags)
{
m_material->setCurrentFrame(frame);
markDirty(DirtyMaterial);

View File

@@ -49,7 +49,10 @@ public:
virtual QVideoFrame::PixelFormat pixelFormat() const {
return m_format.pixelFormat();
}
void setCurrentFrame(const QVideoFrame &frame);
QAbstractVideoBuffer::HandleType handleType() const {
return QAbstractVideoBuffer::NoHandle;
}
void setCurrentFrame(const QVideoFrame &frame, FrameFlags flags);
private:
void bindTexture(int id, int unit, int w, int h, const uchar *bits);

View File

@@ -278,7 +278,7 @@ QSGVideoNode_RGB::~QSGVideoNode_RGB()
{
}
void QSGVideoNode_RGB::setCurrentFrame(const QVideoFrame &frame)
void QSGVideoNode_RGB::setCurrentFrame(const QVideoFrame &frame, FrameFlags)
{
m_material->setVideoFrame(frame);
markDirty(DirtyMaterial);

View File

@@ -50,7 +50,10 @@ public:
virtual QVideoFrame::PixelFormat pixelFormat() const {
return m_format.pixelFormat();
}
void setCurrentFrame(const QVideoFrame &frame);
QAbstractVideoBuffer::HandleType handleType() const {
return QAbstractVideoBuffer::NoHandle;
}
void setCurrentFrame(const QVideoFrame &frame, FrameFlags flags);
private:
QVideoSurfaceFormat m_format;

View File

@@ -235,7 +235,7 @@ QSGVideoNode_Texture::~QSGVideoNode_Texture()
{
}
void QSGVideoNode_Texture::setCurrentFrame(const QVideoFrame &frame)
void QSGVideoNode_Texture::setCurrentFrame(const QVideoFrame &frame, FrameFlags)
{
m_material->setVideoFrame(frame);
markDirty(DirtyMaterial);

View File

@@ -50,7 +50,10 @@ public:
virtual QVideoFrame::PixelFormat pixelFormat() const {
return m_format.pixelFormat();
}
void setCurrentFrame(const QVideoFrame &frame);
QAbstractVideoBuffer::HandleType handleType() const {
return QAbstractVideoBuffer::GLTextureHandle;
}
void setCurrentFrame(const QVideoFrame &frame, FrameFlags flags);
private:
QVideoSurfaceFormat m_format;