Add video filtering support to VideoOutput

Add the QAbstractVideoFilter base class and integrate it with VideoOutput.

This can be used to perform arbitrary filtering or image processing
on the frames of a video stream of a VideoOutput element right before
the OpenGL texture is provided to the scenegraph by the video node.

This opens up the possibility to integrate computer vision
frameworks or accelerated image processing with Qt Quick applications
that display video streams using Qt Multimedia.

Conceptually it is somewhat similar to QVideoProbe, this
approach however allows modifying the frame, in real time
with tight integration to the scenegraph node, and targets
Qt Quick meaning setting up the filter and processing the results
of the computations happen completely in QML.

[ChangeLog] Added QAbstractVideoFilter that serves as a base class for QML
video filtering elements that integrate compute, vision, and image processing
frameworks with VideoOutput.

Change-Id: Ice1483f8c2daec5a43536978627a7bbb64549480
Reviewed-by: Yoann Lopes <yoann.lopes@theqtcompany.com>
This commit is contained in:
Laszlo Agocs
2015-01-08 14:32:41 +01:00
committed by Yoann Lopes
parent 2f49444638
commit 3e94b7ce2d
33 changed files with 1539 additions and 68 deletions

View File

@@ -262,6 +262,12 @@ bool QDeclarativeVideoOutput::createBackend(QMediaService *service)
m_backend->updateGeometry();
}
if (m_backend) {
m_backend->clearFilters();
for (int i = 0; i < m_filters.count(); ++i)
m_backend->appendFilter(m_filters[i]);
}
return backendAvailable;
}
@@ -795,6 +801,12 @@ void QDeclarativeVideoOutput::itemChange(QQuickItem::ItemChange change,
m_backend->itemChange(change, changeData);
}
void QDeclarativeVideoOutput::releaseResources()
{
if (m_backend)
m_backend->releaseResources();
}
void QDeclarativeVideoOutput::geometryChanged(const QRectF &newGeometry, const QRectF &oldGeometry)
{
Q_UNUSED(newGeometry);
@@ -809,4 +821,54 @@ void QDeclarativeVideoOutput::geometryChanged(const QRectF &newGeometry, const Q
_q_updateGeometry();
}
/*!
\qmlproperty list<object> QtMultimedia::VideoOutput::filters
This property holds the list of video filters that are run on the video
frames. The order of the filters in the list matches the order in which
they will be invoked on the video frames. The objects in the list must be
instances of a subclass of QAbstractVideoFilter.
\sa QAbstractVideoFilter
*/
QQmlListProperty<QAbstractVideoFilter> QDeclarativeVideoOutput::filters()
{
return QQmlListProperty<QAbstractVideoFilter>(this, 0, filter_append, filter_count, filter_at, filter_clear);
}
void QDeclarativeVideoOutput::filter_append(QQmlListProperty<QAbstractVideoFilter> *property, QAbstractVideoFilter *value)
{
QDeclarativeVideoOutput *self = static_cast<QDeclarativeVideoOutput *>(property->object);
self->m_filters.append(value);
if (self->m_backend)
self->m_backend->appendFilter(value);
}
int QDeclarativeVideoOutput::filter_count(QQmlListProperty<QAbstractVideoFilter> *property)
{
QDeclarativeVideoOutput *self = static_cast<QDeclarativeVideoOutput *>(property->object);
return self->m_filters.count();
}
QAbstractVideoFilter *QDeclarativeVideoOutput::filter_at(QQmlListProperty<QAbstractVideoFilter> *property, int index)
{
QDeclarativeVideoOutput *self = static_cast<QDeclarativeVideoOutput *>(property->object);
return self->m_filters.at(index);
}
void QDeclarativeVideoOutput::filter_clear(QQmlListProperty<QAbstractVideoFilter> *property)
{
QDeclarativeVideoOutput *self = static_cast<QDeclarativeVideoOutput *>(property->object);
self->m_filters.clear();
if (self->m_backend)
self->m_backend->clearFilters();
}
void QDeclarativeVideoOutput::_q_invalidateSceneGraph()
{
if (m_backend)
m_backend->invalidateSceneGraph();
}
QT_END_NAMESPACE

View File

@@ -34,6 +34,7 @@
#include "qdeclarativevideooutput_render_p.h"
#include "qdeclarativevideooutput_p.h"
#include <QtMultimedia/qabstractvideofilter.h>
#include <QtMultimedia/qvideorenderercontrol.h>
#include <QtMultimedia/qmediaservice.h>
#include <QtCore/qloggingcategory.h>
@@ -41,6 +42,8 @@
#include <private/qsgvideonode_p.h>
#include <QtGui/QOpenGLContext>
#include <QtQuick/QQuickWindow>
#include <QtCore/QRunnable>
QT_BEGIN_NAMESPACE
@@ -103,11 +106,79 @@ bool QDeclarativeVideoRendererBackend::init(QMediaService *service)
return false;
}
void QDeclarativeVideoRendererBackend::appendFilter(QAbstractVideoFilter *filter)
{
QMutexLocker lock(&m_frameMutex);
m_filters.append(Filter(filter));
}
void QDeclarativeVideoRendererBackend::clearFilters()
{
QMutexLocker lock(&m_frameMutex);
scheduleDeleteFilterResources();
m_filters.clear();
}
class FilterRunnableDeleter : public QRunnable
{
public:
FilterRunnableDeleter(const QList<QVideoFilterRunnable *> &runnables) : m_runnables(runnables) { }
void run() Q_DECL_OVERRIDE {
foreach (QVideoFilterRunnable *runnable, m_runnables)
delete runnable;
}
private:
QList<QVideoFilterRunnable *> m_runnables;
};
void QDeclarativeVideoRendererBackend::scheduleDeleteFilterResources()
{
if (!q->window())
return;
QList<QVideoFilterRunnable *> runnables;
for (int i = 0; i < m_filters.count(); ++i) {
if (m_filters[i].runnable) {
runnables.append(m_filters[i].runnable);
m_filters[i].runnable = 0;
}
}
if (!runnables.isEmpty()) {
// Request the scenegraph to run our cleanup job on the render thread.
// The execution of our QRunnable may happen after the QML tree including the QAbstractVideoFilter instance is
// destroyed on the main thread so no references to it must be used during cleanup.
q->window()->scheduleRenderJob(new FilterRunnableDeleter(runnables), QQuickWindow::BeforeSynchronizingStage);
}
}
void QDeclarativeVideoRendererBackend::releaseResources()
{
// Called on the gui thread when the window is closed or changed.
QMutexLocker lock(&m_frameMutex);
scheduleDeleteFilterResources();
}
void QDeclarativeVideoRendererBackend::invalidateSceneGraph()
{
// Called on the render thread, e.g. when the context is lost.
QMutexLocker lock(&m_frameMutex);
for (int i = 0; i < m_filters.count(); ++i) {
if (m_filters[i].runnable) {
delete m_filters[i].runnable;
m_filters[i].runnable = 0;
}
}
}
void QDeclarativeVideoRendererBackend::itemChange(QQuickItem::ItemChange change,
const QQuickItem::ItemChangeData &changeData)
{
Q_UNUSED(change);
Q_UNUSED(changeData);
if (change == QQuickItem::ItemSceneChange) {
if (changeData.window)
QObject::connect(changeData.window, SIGNAL(sceneGraphInvalidated()),
q, SLOT(_q_invalidateSceneGraph()), Qt::DirectConnection);
}
}
void QDeclarativeVideoRendererBackend::releaseSource()
@@ -216,8 +287,36 @@ QSGNode *QDeclarativeVideoRendererBackend::updatePaintNode(QSGNode *oldNode,
}
#endif
bool isFrameModified = false;
if (m_frameChanged) {
if (videoNode && videoNode->pixelFormat() != m_frame.pixelFormat()) {
// Run the VideoFilter if there is one. This must be done before potentially changing the videonode below.
if (m_frame.isValid() && !m_filters.isEmpty()) {
const QVideoSurfaceFormat surfaceFormat = videoSurface()->surfaceFormat();
for (int i = 0; i < m_filters.count(); ++i) {
QAbstractVideoFilter *filter = m_filters[i].filter;
QVideoFilterRunnable *&runnable = m_filters[i].runnable;
if (filter && filter->isActive()) {
// Create the filter runnable if not yet done. Ownership is taken and is tied to this thread, on which rendering happens.
if (!runnable)
runnable = filter->createFilterRunnable();
if (!runnable)
continue;
QVideoFilterRunnable::RunFlags flags = 0;
if (i == m_filters.count() - 1)
flags |= QVideoFilterRunnable::LastInChain;
QVideoFrame newFrame = runnable->run(&m_frame, surfaceFormat, flags);
if (newFrame.isValid() && newFrame != m_frame) {
isFrameModified = true;
m_frame = newFrame;
}
}
}
}
if (videoNode && (videoNode->pixelFormat() != m_frame.pixelFormat() || videoNode->handleType() != m_frame.handleType())) {
qCDebug(qLcVideo) << "updatePaintNode: deleting old video node because frame format changed";
delete videoNode;
videoNode = 0;
@@ -231,7 +330,9 @@ QSGNode *QDeclarativeVideoRendererBackend::updatePaintNode(QSGNode *oldNode,
if (!videoNode) {
foreach (QSGVideoNodeFactoryInterface* factory, m_videoNodeFactories) {
videoNode = factory->createNode(m_surface->surfaceFormat());
// Get a node that supports our frame. The surface is irrelevant, our
// QSGVideoItemSurface supports (logically) anything.
videoNode = factory->createNode(QVideoSurfaceFormat(m_frame.size(), m_frame.pixelFormat(), m_frame.handleType()));
if (videoNode) {
qCDebug(qLcVideo) << "updatePaintNode: Video node created. Handle type:" << m_frame.handleType()
<< " Supported formats for the handle by this node:"
@@ -252,7 +353,10 @@ QSGNode *QDeclarativeVideoRendererBackend::updatePaintNode(QSGNode *oldNode,
videoNode->setTexturedRectGeometry(m_renderedRect, m_sourceTextureRect,
qNormalizedOrientation(q->orientation()));
if (m_frameChanged) {
videoNode->setCurrentFrame(m_frame);
QSGVideoNode::FrameFlags flags = 0;
if (isFrameModified)
flags |= QSGVideoNode::FrameFiltered;
videoNode->setCurrentFrame(m_frame, flags);
//don't keep the frame for more than really necessary
m_frameChanged = false;
m_frame = QVideoFrame();

View File

@@ -48,6 +48,8 @@ QT_BEGIN_NAMESPACE
class QSGVideoItemSurface;
class QVideoRendererControl;
class QOpenGLContext;
class QAbstractVideoFilter;
class QVideoFilterRunnable;
class QDeclarativeVideoRendererBackend : public QDeclarativeVideoBackend
{
@@ -70,7 +72,14 @@ public:
void present(const QVideoFrame &frame);
void stop();
void appendFilter(QAbstractVideoFilter *filter) Q_DECL_OVERRIDE;
void clearFilters() Q_DECL_OVERRIDE;
void releaseResources() Q_DECL_OVERRIDE;
void invalidateSceneGraph() Q_DECL_OVERRIDE;
private:
void scheduleDeleteFilterResources();
QPointer<QVideoRendererControl> m_rendererControl;
QList<QSGVideoNodeFactoryInterface*> m_videoNodeFactories;
QSGVideoItemSurface *m_surface;
@@ -83,6 +92,14 @@ private:
QMutex m_frameMutex;
QRectF m_renderedRect; // Destination pixel coordinates, clipped
QRectF m_sourceTextureRect; // Source texture coordinates
struct Filter {
Filter() : filter(0), runnable(0) { }
Filter(QAbstractVideoFilter *filter) : filter(filter), runnable(0) { }
QAbstractVideoFilter *filter;
QVideoFilterRunnable *runnable;
};
QList<Filter> m_filters;
};
class QSGVideoItemSurface : public QAbstractVideoSurface

View File

@@ -311,7 +311,7 @@ QSGVideoNode_I420::~QSGVideoNode_I420()
{
}
void QSGVideoNode_I420::setCurrentFrame(const QVideoFrame &frame)
void QSGVideoNode_I420::setCurrentFrame(const QVideoFrame &frame, FrameFlags)
{
m_material->setCurrentFrame(frame);
markDirty(DirtyMaterial);

View File

@@ -49,7 +49,10 @@ public:
virtual QVideoFrame::PixelFormat pixelFormat() const {
return m_format.pixelFormat();
}
void setCurrentFrame(const QVideoFrame &frame);
QAbstractVideoBuffer::HandleType handleType() const {
return QAbstractVideoBuffer::NoHandle;
}
void setCurrentFrame(const QVideoFrame &frame, FrameFlags flags);
private:
void bindTexture(int id, int unit, int w, int h, const uchar *bits);

View File

@@ -278,7 +278,7 @@ QSGVideoNode_RGB::~QSGVideoNode_RGB()
{
}
void QSGVideoNode_RGB::setCurrentFrame(const QVideoFrame &frame)
void QSGVideoNode_RGB::setCurrentFrame(const QVideoFrame &frame, FrameFlags)
{
m_material->setVideoFrame(frame);
markDirty(DirtyMaterial);

View File

@@ -50,7 +50,10 @@ public:
virtual QVideoFrame::PixelFormat pixelFormat() const {
return m_format.pixelFormat();
}
void setCurrentFrame(const QVideoFrame &frame);
QAbstractVideoBuffer::HandleType handleType() const {
return QAbstractVideoBuffer::NoHandle;
}
void setCurrentFrame(const QVideoFrame &frame, FrameFlags flags);
private:
QVideoSurfaceFormat m_format;

View File

@@ -235,7 +235,7 @@ QSGVideoNode_Texture::~QSGVideoNode_Texture()
{
}
void QSGVideoNode_Texture::setCurrentFrame(const QVideoFrame &frame)
void QSGVideoNode_Texture::setCurrentFrame(const QVideoFrame &frame, FrameFlags)
{
m_material->setVideoFrame(frame);
markDirty(DirtyMaterial);

View File

@@ -50,7 +50,10 @@ public:
virtual QVideoFrame::PixelFormat pixelFormat() const {
return m_format.pixelFormat();
}
void setCurrentFrame(const QVideoFrame &frame);
QAbstractVideoBuffer::HandleType handleType() const {
return QAbstractVideoBuffer::GLTextureHandle;
}
void setCurrentFrame(const QVideoFrame &frame, FrameFlags flags);
private:
QVideoSurfaceFormat m_format;