Allow plugins to override the QML VideoOutput type.

Move QDeclarativeVideoOutput to the private QtMultimediaQuickTools
library to make the QDeclarativeVideoOutputBackend interface
implementable by a plugin.

Change-Id: I763c483a1fc9ec56dc7b8be0bc71523f029a36ee
Reviewed-by: Yoann Lopes <yoann.lopes@digia.com>
This commit is contained in:
Andrew den Exter
2013-12-05 15:38:14 +10:00
committed by The Qt Project
parent a52f552d42
commit 60fb11d9a2
21 changed files with 76 additions and 30 deletions

View File

@@ -45,9 +45,10 @@
#include <QtQml/qqmlcomponent.h>
#include "qsoundeffect.h"
#include <private/qdeclarativevideooutput_p.h>
#include "qdeclarativemediametadata_p.h"
#include "qdeclarativeaudio_p.h"
#include "qdeclarativevideooutput_p.h"
#include "qdeclarativeradio_p.h"
#include "qdeclarativeradiodata_p.h"
#include "qdeclarativecamera_p.h"

View File

@@ -3,13 +3,6 @@ QT += qml quick network multimedia-private qtmultimediaquicktools-private
HEADERS += \
qdeclarativeaudio_p.h \
qdeclarativemediametadata_p.h \
qdeclarativevideooutput_p.h \
qdeclarativevideooutput_backend_p.h \
qdeclarativevideooutput_render_p.h \
qdeclarativevideooutput_window_p.h \
qsgvideonode_i420.h \
qsgvideonode_rgb.h \
qsgvideonode_texture.h \
qdeclarativeradio_p.h \
qdeclarativeradiodata_p.h \
qdeclarativecamera_p.h \
@@ -25,12 +18,6 @@ HEADERS += \
SOURCES += \
multimedia.cpp \
qdeclarativeaudio.cpp \
qdeclarativevideooutput.cpp \
qdeclarativevideooutput_render.cpp \
qdeclarativevideooutput_window.cpp \
qsgvideonode_i420.cpp \
qsgvideonode_rgb.cpp \
qsgvideonode_texture.cpp \
qdeclarativeradio.cpp \
qdeclarativeradiodata.cpp \
qdeclarativecamera.cpp \

View File

@@ -1,743 +0,0 @@
/****************************************************************************
**
** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
** Copyright (C) 2012 Research In Motion
** Contact: http://www.qt-project.org/legal
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and Digia. For licensing terms and
** conditions see http://qt.digia.com/licensing. For further information
** use the contact form at http://qt.digia.com/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 as published by the Free Software
** Foundation and appearing in the file LICENSE.LGPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU Lesser General Public License version 2.1 requirements
** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Digia gives you certain additional
** rights. These rights are described in the Digia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** GNU General Public License Usage
** Alternatively, this file may be used under the terms of the GNU
** General Public License version 3.0 as published by the Free Software
** Foundation and appearing in the file LICENSE.GPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU General Public License version 3.0 requirements will be
** met: http://www.gnu.org/copyleft/gpl.html.
**
**
** $QT_END_LICENSE$
**
****************************************************************************/
#include "qdeclarativevideooutput_p.h"
#include "qdeclarativevideooutput_render_p.h"
#include "qdeclarativevideooutput_window_p.h"
#include <private/qvideooutputorientationhandler_p.h>
#include <QtMultimedia/qmediaobject.h>
#include <QtMultimedia/qmediaservice.h>
//#define DEBUG_VIDEOITEM
QT_BEGIN_NAMESPACE
/*!
\qmltype VideoOutput
\instantiates QDeclarativeVideoOutput
\brief Render video or camera viewfinder.
\ingroup multimedia_qml
\ingroup multimedia_video_qml
\inqmlmodule QtMultimedia
\c VideoOutput is part of the \b{QtMultimedia 5.0} module.
\qml
import QtQuick 2.0
import QtMultimedia 5.0
Rectangle {
width: 800
height: 600
color: "black"
MediaPlayer {
id: player
source: "file://video.webm"
autoPlay: true
}
VideoOutput {
id: videoOutput
source: player
anchors.fill: parent
}
}
\endqml
The VideoOutput item supports untransformed, stretched, and uniformly scaled video presentation.
For a description of stretched uniformly scaled presentation, see the \l fillMode property
description.
The VideoOutput item works with backends that support either QVideoRendererControl or
QVideoWindowControl. If the backend only supports QVideoWindowControl, the video is rendered
onto an overlay window that is layered on top of the QtQuick window. Due to the nature of the
video overlays, certain features are not available for these kind of backends:
\list
\li Some transformations like rotations
\li Having other QtQuick items on top of the VideoOutput item
\endlist
Most backends however do support QVideoRendererControl and therefore don't have the limitations
listed above.
\sa MediaPlayer, Camera
\section1 Screen Saver
If it is likely that an application will be playing video for an extended
period of time without user interaction it may be necessary to disable
the platform's screen saver. The \l ScreenSaver (from \l QtSystemInfo)
may be used to disable the screensaver in this fashion:
\qml
import QtSystemInfo 5.0
ScreenSaver { screenSaverEnabled: false }
\endqml
*/
/*!
\internal
\class QDeclarativeVideoOutput
\brief The QDeclarativeVideoOutput class provides a video output item.
*/
QDeclarativeVideoOutput::QDeclarativeVideoOutput(QQuickItem *parent) :
QQuickItem(parent),
m_sourceType(NoSource),
m_fillMode(PreserveAspectFit),
m_geometryDirty(true),
m_orientation(0),
m_autoOrientation(false),
m_screenOrientationHandler(0)
{
setFlag(ItemHasContents, true);
}
QDeclarativeVideoOutput::~QDeclarativeVideoOutput()
{
m_backend.reset();
m_source.clear();
_q_updateMediaObject();
}
/*!
\qmlproperty variant QtMultimedia::VideoOutput::source
This property holds the source item providing the video frames like MediaPlayer or Camera.
If you are extending your own C++ classes to interoperate with VideoOutput, you can
either provide a QObject based class with a \c mediaObject property that exposes a
QMediaObject derived class that has a QVideoRendererControl available, or you can
provide a QObject based class with a writable \c videoSurface property that can
accept a QAbstractVideoSurface based class and can follow the correct protocol to
deliver QVideoFrames to it.
*/
void QDeclarativeVideoOutput::setSource(QObject *source)
{
#ifdef DEBUG_VIDEOITEM
qDebug() << Q_FUNC_INFO << source;
#endif
if (source == m_source.data())
return;
if (m_source && m_sourceType == MediaObjectSource)
disconnect(m_source.data(), 0, this, SLOT(_q_updateMediaObject()));
if (m_backend)
m_backend->releaseSource();
m_source = source;
if (m_source) {
const QMetaObject *metaObject = m_source.data()->metaObject();
int mediaObjectPropertyIndex = metaObject->indexOfProperty("mediaObject");
if (mediaObjectPropertyIndex != -1) {
const QMetaProperty mediaObjectProperty = metaObject->property(mediaObjectPropertyIndex);
if (mediaObjectProperty.hasNotifySignal()) {
QMetaMethod method = mediaObjectProperty.notifySignal();
QMetaObject::connect(m_source.data(), method.methodIndex(),
this, this->metaObject()->indexOfSlot("_q_updateMediaObject()"),
Qt::DirectConnection, 0);
}
m_sourceType = MediaObjectSource;
} else if (metaObject->indexOfProperty("videoSurface") != -1) {
// Make sure our backend is a QDeclarativeVideoRendererBackend
m_backend.reset();
createBackend(0);
Q_ASSERT(m_backend);
#ifndef QT_NO_DYNAMIC_CAST
Q_ASSERT(dynamic_cast<QDeclarativeVideoRendererBackend *>(m_backend.data()));
#endif
QAbstractVideoSurface * const surface = m_backend->videoSurface();
Q_ASSERT(surface);
m_source.data()->setProperty("videoSurface",
QVariant::fromValue<QAbstractVideoSurface*>(surface));
m_sourceType = VideoSurfaceSource;
} else {
m_sourceType = NoSource;
}
} else {
m_sourceType = NoSource;
}
_q_updateMediaObject();
emit sourceChanged();
}
bool QDeclarativeVideoOutput::createBackend(QMediaService *service)
{
bool backendAvailable = false;
m_backend.reset(new QDeclarativeVideoRendererBackend(this));
if (m_backend->init(service))
backendAvailable = true;
// QDeclarativeVideoWindowBackend only works when there is a service with a QVideoWindowControl.
// Without service, the QDeclarativeVideoRendererBackend should always work.
if (!backendAvailable) {
Q_ASSERT(service);
m_backend.reset(new QDeclarativeVideoWindowBackend(this));
if (m_backend->init(service))
backendAvailable = true;
}
if (!backendAvailable) {
qWarning() << Q_FUNC_INFO << "Media service has neither renderer nor window control available.";
m_backend.reset();
}
return backendAvailable;
}
void QDeclarativeVideoOutput::_q_updateMediaObject()
{
QMediaObject *mediaObject = 0;
if (m_source)
mediaObject = qobject_cast<QMediaObject*>(m_source.data()->property("mediaObject").value<QObject*>());
#ifdef DEBUG_VIDEOITEM
qDebug() << Q_FUNC_INFO << mediaObject;
#endif
if (m_mediaObject.data() == mediaObject)
return;
if (m_sourceType != VideoSurfaceSource)
m_backend.reset();
m_mediaObject.clear();
m_service.clear();
if (mediaObject) {
if (QMediaService *service = mediaObject->service()) {
if (createBackend(service)) {
m_service = service;
m_mediaObject = mediaObject;
}
}
}
}
/*!
\qmlproperty enumeration QtMultimedia::VideoOutput::fillMode
Set this property to define how the video is scaled to fit the target area.
\list
\li Stretch - the video is scaled to fit.
\li PreserveAspectFit - the video is scaled uniformly to fit without cropping
\li PreserveAspectCrop - the video is scaled uniformly to fill, cropping if necessary
\endlist
The default fill mode is PreserveAspectFit.
*/
QDeclarativeVideoOutput::FillMode QDeclarativeVideoOutput::fillMode() const
{
return m_fillMode;
}
void QDeclarativeVideoOutput::setFillMode(FillMode mode)
{
if (mode == m_fillMode)
return;
m_fillMode = mode;
m_geometryDirty = true;
update();
emit fillModeChanged(mode);
}
void QDeclarativeVideoOutput::_q_updateNativeSize()
{
if (!m_backend)
return;
QSize size = m_backend->nativeSize();
if (!qIsDefaultAspect(m_orientation)) {
size.transpose();
}
if (m_nativeSize != size) {
m_nativeSize = size;
m_geometryDirty = true;
setImplicitWidth(size.width());
setImplicitHeight(size.height());
emit sourceRectChanged();
}
}
/* Based on fill mode and our size, figure out the source/dest rects */
void QDeclarativeVideoOutput::_q_updateGeometry()
{
const QRectF rect(0, 0, width(), height());
const QRectF absoluteRect(x(), y(), width(), height());
if (!m_geometryDirty && m_lastRect == absoluteRect)
return;
QRectF oldContentRect(m_contentRect);
m_geometryDirty = false;
m_lastRect = absoluteRect;
if (m_nativeSize.isEmpty()) {
//this is necessary for item to receive the
//first paint event and configure video surface.
m_contentRect = rect;
} else if (m_fillMode == Stretch) {
m_contentRect = rect;
} else if (m_fillMode == PreserveAspectFit || m_fillMode == PreserveAspectCrop) {
QSizeF scaled = m_nativeSize;
scaled.scale(rect.size(), m_fillMode == PreserveAspectFit ?
Qt::KeepAspectRatio : Qt::KeepAspectRatioByExpanding);
m_contentRect = QRectF(QPointF(), scaled);
m_contentRect.moveCenter(rect.center());
}
if (m_backend)
m_backend->updateGeometry();
if (m_contentRect != oldContentRect)
emit contentRectChanged();
}
void QDeclarativeVideoOutput::_q_screenOrientationChanged(int orientation)
{
setOrientation(orientation);
}
/*!
\qmlproperty int QtMultimedia::VideoOutput::orientation
In some cases the source video stream requires a certain
orientation to be correct. This includes
sources like a camera viewfinder, where the displayed
viewfinder should match reality, no matter what rotation
the rest of the user interface has.
This property allows you to apply a rotation (in steps
of 90 degrees) to compensate for any user interface
rotation, with positive values in the anti-clockwise direction.
The orientation change will also affect the mapping
of coordinates from source to viewport.
*/
int QDeclarativeVideoOutput::orientation() const
{
return m_orientation;
}
void QDeclarativeVideoOutput::setOrientation(int orientation)
{
// Make sure it's a multiple of 90.
if (orientation % 90)
return;
// If there's no actual change, return
if (m_orientation == orientation)
return;
// If the new orientation is the same effect
// as the old one, don't update the video node stuff
if ((m_orientation % 360) == (orientation % 360)) {
m_orientation = orientation;
emit orientationChanged();
return;
}
m_geometryDirty = true;
// Otherwise, a new orientation
// See if we need to change aspect ratio orientation too
bool oldAspect = qIsDefaultAspect(m_orientation);
bool newAspect = qIsDefaultAspect(orientation);
m_orientation = orientation;
if (oldAspect != newAspect) {
m_nativeSize.transpose();
setImplicitWidth(m_nativeSize.width());
setImplicitHeight(m_nativeSize.height());
// Source rectangle does not change for orientation
}
update();
emit orientationChanged();
}
/*!
\qmlproperty int QtMultimedia::VideoOutput::autoOrientation
This property allows you to enable and disable auto orientation
of the video stream, so that its orientation always matches
the orientation of the screen. If \c autoOrientation is enabled,
the \c orientation property is overwritten.
By default \c autoOrientation is disabled.
\since QtMultimedia 5.2
*/
bool QDeclarativeVideoOutput::autoOrientation() const
{
return m_autoOrientation;
}
void QDeclarativeVideoOutput::setAutoOrientation(bool autoOrientation)
{
if (autoOrientation == m_autoOrientation)
return;
m_autoOrientation = autoOrientation;
if (m_autoOrientation) {
m_screenOrientationHandler = new QVideoOutputOrientationHandler(this);
connect(m_screenOrientationHandler, SIGNAL(orientationChanged(int)),
this, SLOT(_q_screenOrientationChanged(int)));
_q_screenOrientationChanged(m_screenOrientationHandler->currentOrientation());
} else {
disconnect(m_screenOrientationHandler, SIGNAL(orientationChanged(int)),
this, SLOT(_q_screenOrientationChanged(int)));
m_screenOrientationHandler->deleteLater();
m_screenOrientationHandler = 0;
}
emit autoOrientationChanged();
}
/*!
\qmlproperty rectangle QtMultimedia::VideoOutput::contentRect
This property holds the item coordinates of the area that
would contain video to render. With certain fill modes,
this rectangle will be larger than the visible area of the
\c VideoOutput.
This property is useful when other coordinates are specified
in terms of the source dimensions - this applied for relative
(normalized) frame coordinates in the range of 0 to 1.0.
\sa mapRectToItem(), mapPointToItem()
Areas outside this will be transparent.
*/
QRectF QDeclarativeVideoOutput::contentRect() const
{
return m_contentRect;
}
/*!
\qmlproperty rectangle QtMultimedia::VideoOutput::sourceRect
This property holds the area of the source video
content that is considered for rendering. The
values are in source pixel coordinates, adjusted for
the source's pixel aspect ratio.
Note that typically the top left corner of this rectangle
will be \c {0,0} while the width and height will be the
width and height of the input content. Only when the video
source has a viewport set, these values will differ.
The orientation setting does not affect this rectangle.
\sa QVideoSurfaceFormat::pixelAspectRatio()
\sa QVideoSurfaceFormat::viewport()
*/
QRectF QDeclarativeVideoOutput::sourceRect() const
{
// We might have to transpose back
QSizeF size = m_nativeSize;
if (!qIsDefaultAspect(m_orientation)) {
size.transpose();
}
// No backend? Just assume no viewport.
if (!m_nativeSize.isValid() || !m_backend) {
return QRectF(QPointF(), size);
}
// Take the viewport into account for the top left position.
// m_nativeSize is already adjusted to the viewport, as it originats
// from QVideoSurfaceFormat::sizeHint(), which includes pixel aspect
// ratio and viewport.
const QRectF viewport = m_backend->adjustedViewport();
Q_ASSERT(viewport.size() == size);
return QRectF(viewport.topLeft(), size);
}
/*!
\qmlmethod QPointF QtMultimedia::VideoOutput::mapNormalizedPointToItem (const QPointF &point) const
Given normalized coordinates \a point (that is, each
component in the range of 0 to 1.0), return the mapped point
that it corresponds to (in item coordinates).
This mapping is affected by the orientation.
Depending on the fill mode, this point may lie outside the rendered
rectangle.
*/
QPointF QDeclarativeVideoOutput::mapNormalizedPointToItem(const QPointF &point) const
{
qreal dx = point.x();
qreal dy = point.y();
if (qIsDefaultAspect(m_orientation)) {
dx *= m_contentRect.width();
dy *= m_contentRect.height();
} else {
dx *= m_contentRect.height();
dy *= m_contentRect.width();
}
switch (qNormalizedOrientation(m_orientation)) {
case 0:
default:
return m_contentRect.topLeft() + QPointF(dx, dy);
case 90:
return m_contentRect.bottomLeft() + QPointF(dy, -dx);
case 180:
return m_contentRect.bottomRight() + QPointF(-dx, -dy);
case 270:
return m_contentRect.topRight() + QPointF(-dy, dx);
}
}
/*!
\qmlmethod QRectF QtMultimedia::VideoOutput::mapNormalizedRectToItem(const QRectF &rectangle) const
Given a rectangle \a rectangle in normalized
coordinates (that is, each component in the range of 0 to 1.0),
return the mapped rectangle that it corresponds to (in item coordinates).
This mapping is affected by the orientation.
Depending on the fill mode, this rectangle may extend outside the rendered
rectangle.
*/
QRectF QDeclarativeVideoOutput::mapNormalizedRectToItem(const QRectF &rectangle) const
{
return QRectF(mapNormalizedPointToItem(rectangle.topLeft()),
mapNormalizedPointToItem(rectangle.bottomRight())).normalized();
}
/*!
\qmlmethod QPointF QtMultimedia::VideoOutput::mapPointToSource(const QPointF &point) const
Given a point \a point in item coordinates, return the
corresponding point in source coordinates. This mapping is
affected by the orientation.
If the supplied point lies outside the rendered area, the returned
point will be outside the source rectangle.
*/
QPointF QDeclarativeVideoOutput::mapPointToSource(const QPointF &point) const
{
QPointF norm = mapPointToSourceNormalized(point);
if (qIsDefaultAspect(m_orientation))
return QPointF(norm.x() * m_nativeSize.width(), norm.y() * m_nativeSize.height());
else
return QPointF(norm.x() * m_nativeSize.height(), norm.y() * m_nativeSize.width());
}
/*!
\qmlmethod QRectF QtMultimedia::VideoOutput::mapRectToSource(const QRectF &rectangle) const
Given a rectangle \a rectangle in item coordinates, return the
corresponding rectangle in source coordinates. This mapping is
affected by the orientation.
This mapping is affected by the orientation.
If the supplied point lies outside the rendered area, the returned
point will be outside the source rectangle.
*/
QRectF QDeclarativeVideoOutput::mapRectToSource(const QRectF &rectangle) const
{
return QRectF(mapPointToSource(rectangle.topLeft()),
mapPointToSource(rectangle.bottomRight())).normalized();
}
/*!
\qmlmethod QPointF QtMultimedia::VideoOutput::mapPointToSourceNormalized(const QPointF &point) const
Given a point \a point in item coordinates, return the
corresponding point in normalized source coordinates. This mapping is
affected by the orientation.
If the supplied point lies outside the rendered area, the returned
point will be outside the source rectangle. No clamping is performed.
*/
QPointF QDeclarativeVideoOutput::mapPointToSourceNormalized(const QPointF &point) const
{
if (m_contentRect.isEmpty())
return QPointF();
// Normalize the item source point
qreal nx = (point.x() - m_contentRect.left()) / m_contentRect.width();
qreal ny = (point.y() - m_contentRect.top()) / m_contentRect.height();
const qreal one(1.0f);
// For now, the origin of the source rectangle is 0,0
switch (qNormalizedOrientation(m_orientation)) {
case 0:
default:
return QPointF(nx, ny);
case 90:
return QPointF(one - ny, nx);
case 180:
return QPointF(one - nx, one - ny);
case 270:
return QPointF(ny, one - nx);
}
}
/*!
\qmlmethod QRectF QtMultimedia::VideoOutput::mapRectToSourceNormalized(const QRectF &rectangle) const
Given a rectangle \a rectangle in item coordinates, return the
corresponding rectangle in normalized source coordinates. This mapping is
affected by the orientation.
This mapping is affected by the orientation.
If the supplied point lies outside the rendered area, the returned
point will be outside the source rectangle. No clamping is performed.
*/
QRectF QDeclarativeVideoOutput::mapRectToSourceNormalized(const QRectF &rectangle) const
{
return QRectF(mapPointToSourceNormalized(rectangle.topLeft()),
mapPointToSourceNormalized(rectangle.bottomRight())).normalized();
}
QDeclarativeVideoOutput::SourceType QDeclarativeVideoOutput::sourceType() const
{
return m_sourceType;
}
/*!
\qmlmethod QPointF QtMultimedia::VideoOutput::mapPointToItem(const QPointF &point) const
Given a point \a point in source coordinates, return the
corresponding point in item coordinates. This mapping is
affected by the orientation.
Depending on the fill mode, this point may lie outside the rendered
rectangle.
*/
QPointF QDeclarativeVideoOutput::mapPointToItem(const QPointF &point) const
{
if (m_nativeSize.isEmpty())
return QPointF();
// Just normalize and use that function
// m_nativeSize is transposed in some orientations
if (qIsDefaultAspect(m_orientation))
return mapNormalizedPointToItem(QPointF(point.x() / m_nativeSize.width(), point.y() / m_nativeSize.height()));
else
return mapNormalizedPointToItem(QPointF(point.x() / m_nativeSize.height(), point.y() / m_nativeSize.width()));
}
/*!
\qmlmethod QRectF QtMultimedia::VideoOutput::mapRectToItem(const QRectF &rectangle) const
Given a rectangle \a rectangle in source coordinates, return the
corresponding rectangle in item coordinates. This mapping is
affected by the orientation.
Depending on the fill mode, this rectangle may extend outside the rendered
rectangle.
*/
QRectF QDeclarativeVideoOutput::mapRectToItem(const QRectF &rectangle) const
{
return QRectF(mapPointToItem(rectangle.topLeft()),
mapPointToItem(rectangle.bottomRight())).normalized();
}
QSGNode *QDeclarativeVideoOutput::updatePaintNode(QSGNode *oldNode, UpdatePaintNodeData *data)
{
_q_updateGeometry();
if (!m_backend)
return 0;
return m_backend->updatePaintNode(oldNode, data);
}
void QDeclarativeVideoOutput::itemChange(QQuickItem::ItemChange change,
const QQuickItem::ItemChangeData &changeData)
{
if (m_backend)
m_backend->itemChange(change, changeData);
}
void QDeclarativeVideoOutput::geometryChanged(const QRectF &newGeometry, const QRectF &oldGeometry)
{
Q_UNUSED(newGeometry);
Q_UNUSED(oldGeometry);
QQuickItem::geometryChanged(newGeometry, oldGeometry);
// Explicitly listen to geometry changes here. This is needed since changing the position does
// not trigger a call to updatePaintNode().
// We need to react to position changes though, as the window backened's display rect gets
// changed in that situation.
_q_updateGeometry();
}
QT_END_NAMESPACE

View File

@@ -1,111 +0,0 @@
/****************************************************************************
**
** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
** Copyright (C) 2012 Research In Motion
** Contact: http://www.qt-project.org/legal
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and Digia. For licensing terms and
** conditions see http://qt.digia.com/licensing. For further information
** use the contact form at http://qt.digia.com/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 as published by the Free Software
** Foundation and appearing in the file LICENSE.LGPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU Lesser General Public License version 2.1 requirements
** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Digia gives you certain additional
** rights. These rights are described in the Digia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** GNU General Public License Usage
** Alternatively, this file may be used under the terms of the GNU
** General Public License version 3.0 as published by the Free Software
** Foundation and appearing in the file LICENSE.GPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU General Public License version 3.0 requirements will be
** met: http://www.gnu.org/copyleft/gpl.html.
**
**
** $QT_END_LICENSE$
**
****************************************************************************/
#ifndef QDECLARATIVEVIDEOOUTPUT_BACKEND_P_H
#define QDECLARATIVEVIDEOOUTPUT_BACKEND_P_H
#include <QtCore/qpointer.h>
#include <QtCore/qsize.h>
#include <QtQuick/qquickitem.h>
#include <QtQuick/qsgnode.h>
QT_BEGIN_NAMESPACE
class QAbstractVideoSurface;
class QDeclarativeVideoOutput;
class QMediaService;
class QDeclarativeVideoBackend
{
public:
explicit QDeclarativeVideoBackend(QDeclarativeVideoOutput *parent)
: q(parent)
{}
virtual ~QDeclarativeVideoBackend()
{}
virtual bool init(QMediaService *service) = 0;
virtual void releaseSource() = 0;
virtual void releaseControl() = 0;
virtual void itemChange(QQuickItem::ItemChange change,
const QQuickItem::ItemChangeData &changeData) = 0;
virtual QSize nativeSize() const = 0;
virtual void updateGeometry() = 0;
virtual QSGNode *updatePaintNode(QSGNode *oldNode, QQuickItem::UpdatePaintNodeData *data) = 0;
virtual QAbstractVideoSurface *videoSurface() const = 0;
// The viewport, adjusted for the pixel aspect ratio
virtual QRectF adjustedViewport() const = 0;
protected:
QDeclarativeVideoOutput *q;
QPointer<QMediaService> m_service;
};
/*
* Helper - returns true if the given orientation has the same aspect as the default (e.g. 180*n)
*/
namespace {
inline bool qIsDefaultAspect(int o)
{
return (o % 180) == 0;
}
/*
* Return the orientation normalized to 0-359
*/
inline int qNormalizedOrientation(int o)
{
// Negative orientations give negative results
int o2 = o % 360;
if (o2 < 0)
o2 += 360;
return o2;
}
}
QT_END_NAMESPACE
#endif

View File

@@ -1,155 +0,0 @@
/****************************************************************************
**
** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
** Copyright (C) 2012 Research In Motion
** Contact: http://www.qt-project.org/legal
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and Digia. For licensing terms and
** conditions see http://qt.digia.com/licensing. For further information
** use the contact form at http://qt.digia.com/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 as published by the Free Software
** Foundation and appearing in the file LICENSE.LGPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU Lesser General Public License version 2.1 requirements
** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Digia gives you certain additional
** rights. These rights are described in the Digia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** GNU General Public License Usage
** Alternatively, this file may be used under the terms of the GNU
** General Public License version 3.0 as published by the Free Software
** Foundation and appearing in the file LICENSE.GPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU General Public License version 3.0 requirements will be
** met: http://www.gnu.org/copyleft/gpl.html.
**
**
** $QT_END_LICENSE$
**
****************************************************************************/
#ifndef QDECLARATIVEVIDEOOUTPUT_P_H
#define QDECLARATIVEVIDEOOUTPUT_P_H
#include <QtCore/qrect.h>
#include <QtCore/qsharedpointer.h>
#include <QtQuick/qquickitem.h>
#include <QtCore/qpointer.h>
QT_BEGIN_NAMESPACE
class QMediaObject;
class QMediaService;
class QDeclarativeVideoBackend;
class QVideoOutputOrientationHandler;
class QDeclarativeVideoOutput : public QQuickItem
{
Q_OBJECT
Q_DISABLE_COPY(QDeclarativeVideoOutput)
Q_PROPERTY(QObject* source READ source WRITE setSource NOTIFY sourceChanged)
Q_PROPERTY(FillMode fillMode READ fillMode WRITE setFillMode NOTIFY fillModeChanged)
Q_PROPERTY(int orientation READ orientation WRITE setOrientation NOTIFY orientationChanged)
Q_PROPERTY(bool autoOrientation READ autoOrientation WRITE setAutoOrientation NOTIFY autoOrientationChanged REVISION 2)
Q_PROPERTY(QRectF sourceRect READ sourceRect NOTIFY sourceRectChanged)
Q_PROPERTY(QRectF contentRect READ contentRect NOTIFY contentRectChanged)
Q_ENUMS(FillMode)
public:
enum FillMode
{
Stretch = Qt::IgnoreAspectRatio,
PreserveAspectFit = Qt::KeepAspectRatio,
PreserveAspectCrop = Qt::KeepAspectRatioByExpanding
};
QDeclarativeVideoOutput(QQuickItem *parent = 0);
~QDeclarativeVideoOutput();
QObject *source() const { return m_source.data(); }
void setSource(QObject *source);
FillMode fillMode() const;
void setFillMode(FillMode mode);
int orientation() const;
void setOrientation(int);
bool autoOrientation() const;
void setAutoOrientation(bool);
QRectF sourceRect() const;
QRectF contentRect() const;
Q_INVOKABLE QPointF mapPointToItem(const QPointF &point) const;
Q_INVOKABLE QRectF mapRectToItem(const QRectF &rectangle) const;
Q_INVOKABLE QPointF mapNormalizedPointToItem(const QPointF &point) const;
Q_INVOKABLE QRectF mapNormalizedRectToItem(const QRectF &rectangle) const;
Q_INVOKABLE QPointF mapPointToSource(const QPointF &point) const;
Q_INVOKABLE QRectF mapRectToSource(const QRectF &rectangle) const;
Q_INVOKABLE QPointF mapPointToSourceNormalized(const QPointF &point) const;
Q_INVOKABLE QRectF mapRectToSourceNormalized(const QRectF &rectangle) const;
enum SourceType {
NoSource,
MediaObjectSource,
VideoSurfaceSource
};
SourceType sourceType() const;
Q_SIGNALS:
void sourceChanged();
void fillModeChanged(QDeclarativeVideoOutput::FillMode);
void orientationChanged();
void autoOrientationChanged();
void sourceRectChanged();
void contentRectChanged();
protected:
QSGNode *updatePaintNode(QSGNode *, UpdatePaintNodeData *);
void itemChange(ItemChange change, const ItemChangeData &changeData);
void geometryChanged(const QRectF &newGeometry, const QRectF &oldGeometry);
private Q_SLOTS:
void _q_updateMediaObject();
void _q_updateNativeSize();
void _q_updateGeometry();
void _q_screenOrientationChanged(int);
private:
bool createBackend(QMediaService *service);
SourceType m_sourceType;
QPointer<QObject> m_source;
QPointer<QMediaObject> m_mediaObject;
QPointer<QMediaService> m_service;
FillMode m_fillMode;
QSize m_nativeSize;
bool m_geometryDirty;
QRectF m_lastRect; // Cache of last rect to avoid recalculating geometry
QRectF m_contentRect; // Destination pixel coordinates, unclipped
int m_orientation;
bool m_autoOrientation;
QVideoOutputOrientationHandler *m_screenOrientationHandler;
QScopedPointer<QDeclarativeVideoBackend> m_backend;
};
QT_END_NAMESPACE
#endif // QDECLARATIVEVIDEOOUTPUT_H

View File

@@ -1,351 +0,0 @@
/****************************************************************************
**
** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
** Copyright (C) 2012 Research In Motion
** Contact: http://www.qt-project.org/legal
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and Digia. For licensing terms and
** conditions see http://qt.digia.com/licensing. For further information
** use the contact form at http://qt.digia.com/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 as published by the Free Software
** Foundation and appearing in the file LICENSE.LGPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU Lesser General Public License version 2.1 requirements
** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Digia gives you certain additional
** rights. These rights are described in the Digia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** GNU General Public License Usage
** Alternatively, this file may be used under the terms of the GNU
** General Public License version 3.0 as published by the Free Software
** Foundation and appearing in the file LICENSE.GPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU General Public License version 3.0 requirements will be
** met: http://www.gnu.org/copyleft/gpl.html.
**
**
** $QT_END_LICENSE$
**
****************************************************************************/
#include "qdeclarativevideooutput_render_p.h"
#include "qdeclarativevideooutput_p.h"
#include <QtMultimedia/qvideorenderercontrol.h>
#include <QtMultimedia/qmediaservice.h>
#include <private/qmediapluginloader_p.h>
#include <private/qsgvideonode_p.h>
#include <QtGui/QOpenGLContext>
QT_BEGIN_NAMESPACE
Q_GLOBAL_STATIC_WITH_ARGS(QMediaPluginLoader, videoNodeFactoryLoader,
(QSGVideoNodeFactoryInterface_iid, QLatin1String("video/videonode"), Qt::CaseInsensitive))
QDeclarativeVideoRendererBackend::QDeclarativeVideoRendererBackend(QDeclarativeVideoOutput *parent)
: QDeclarativeVideoBackend(parent),
m_glContext(0),
m_frameChanged(false)
{
m_surface = new QSGVideoItemSurface(this);
QObject::connect(m_surface, SIGNAL(surfaceFormatChanged(QVideoSurfaceFormat)),
q, SLOT(_q_updateNativeSize()), Qt::QueuedConnection);
foreach (QObject *instance, videoNodeFactoryLoader()->instances(QSGVideoNodeFactoryPluginKey)) {
QSGVideoNodeFactoryInterface* plugin = qobject_cast<QSGVideoNodeFactoryInterface*>(instance);
if (plugin)
m_videoNodeFactories.append(plugin);
}
// Append existing node factories as fallback if we have no plugins
m_videoNodeFactories.append(&m_i420Factory);
m_videoNodeFactories.append(&m_rgbFactory);
m_videoNodeFactories.append(&m_textureFactory);
}
QDeclarativeVideoRendererBackend::~QDeclarativeVideoRendererBackend()
{
releaseSource();
releaseControl();
delete m_surface;
}
bool QDeclarativeVideoRendererBackend::init(QMediaService *service)
{
// When there is no service, the source is an object with a "videoSurface" property, which
// doesn't require a QVideoRendererControl and therefore always works
if (!service)
return true;
if (QMediaControl *control = service->requestControl(QVideoRendererControl_iid)) {
if ((m_rendererControl = qobject_cast<QVideoRendererControl *>(control))) {
m_rendererControl->setSurface(m_surface);
m_service = service;
return true;
}
}
return false;
}
void QDeclarativeVideoRendererBackend::itemChange(QQuickItem::ItemChange change,
const QQuickItem::ItemChangeData &changeData)
{
Q_UNUSED(change);
Q_UNUSED(changeData);
}
void QDeclarativeVideoRendererBackend::releaseSource()
{
if (q->source() && q->sourceType() == QDeclarativeVideoOutput::VideoSurfaceSource) {
if (q->source()->property("videoSurface").value<QAbstractVideoSurface*>() == m_surface)
q->source()->setProperty("videoSurface", QVariant::fromValue<QAbstractVideoSurface*>(0));
}
m_surface->stop();
}
void QDeclarativeVideoRendererBackend::releaseControl()
{
if (m_rendererControl) {
m_rendererControl->setSurface(0);
if (m_service)
m_service->releaseControl(m_rendererControl);
m_rendererControl = 0;
}
}
QSize QDeclarativeVideoRendererBackend::nativeSize() const
{
return m_surface->surfaceFormat().sizeHint();
}
void QDeclarativeVideoRendererBackend::updateGeometry()
{
const QRectF viewport = videoSurface()->surfaceFormat().viewport();
const QSizeF frameSize = videoSurface()->surfaceFormat().frameSize();
const QRectF normalizedViewport(viewport.x() / frameSize.width(),
viewport.y() / frameSize.height(),
viewport.width() / frameSize.width(),
viewport.height() / frameSize.height());
const QRectF rect(0, 0, q->width(), q->height());
if (nativeSize().isEmpty()) {
m_renderedRect = rect;
m_sourceTextureRect = normalizedViewport;
} else if (q->fillMode() == QDeclarativeVideoOutput::Stretch) {
m_renderedRect = rect;
m_sourceTextureRect = normalizedViewport;
} else if (q->fillMode() == QDeclarativeVideoOutput::PreserveAspectFit) {
m_sourceTextureRect = normalizedViewport;
m_renderedRect = q->contentRect();
} else if (q->fillMode() == QDeclarativeVideoOutput::PreserveAspectCrop) {
m_renderedRect = rect;
const qreal contentHeight = q->contentRect().height();
const qreal contentWidth = q->contentRect().width();
// Calculate the size of the source rectangle without taking the viewport into account
const qreal relativeOffsetLeft = -q->contentRect().left() / contentWidth;
const qreal relativeOffsetTop = -q->contentRect().top() / contentHeight;
const qreal relativeWidth = rect.width() / contentWidth;
const qreal relativeHeight = rect.height() / contentHeight;
// Now take the viewport size into account
const qreal totalOffsetLeft = normalizedViewport.x() + relativeOffsetLeft * normalizedViewport.width();
const qreal totalOffsetTop = normalizedViewport.y() + relativeOffsetTop * normalizedViewport.height();
const qreal totalWidth = normalizedViewport.width() * relativeWidth;
const qreal totalHeight = normalizedViewport.height() * relativeHeight;
if (qIsDefaultAspect(q->orientation())) {
m_sourceTextureRect = QRectF(totalOffsetLeft, totalOffsetTop,
totalWidth, totalHeight);
} else {
m_sourceTextureRect = QRectF(totalOffsetTop, totalOffsetLeft,
totalHeight, totalWidth);
}
}
if (videoSurface()->surfaceFormat().scanLineDirection() == QVideoSurfaceFormat::BottomToTop) {
qreal top = m_sourceTextureRect.top();
m_sourceTextureRect.setTop(m_sourceTextureRect.bottom());
m_sourceTextureRect.setBottom(top);
}
}
QSGNode *QDeclarativeVideoRendererBackend::updatePaintNode(QSGNode *oldNode,
QQuickItem::UpdatePaintNodeData *data)
{
Q_UNUSED(data);
QSGVideoNode *videoNode = static_cast<QSGVideoNode *>(oldNode);
QMutexLocker lock(&m_frameMutex);
if (!m_glContext) {
m_glContext = QOpenGLContext::currentContext();
m_surface->scheduleOpenGLContextUpdate();
// Internal mechanism to call back the surface renderer from the QtQuick render thread
QObject *obj = m_surface->property("_q_GLThreadCallback").value<QObject*>();
if (obj) {
QEvent ev(QEvent::User);
obj->event(&ev);
}
}
if (m_frameChanged) {
if (videoNode && videoNode->pixelFormat() != m_frame.pixelFormat()) {
#ifdef DEBUG_VIDEOITEM
qDebug() << "updatePaintNode: deleting old video node because frame format changed...";
#endif
delete videoNode;
videoNode = 0;
}
if (!m_frame.isValid()) {
#ifdef DEBUG_VIDEOITEM
qDebug() << "updatePaintNode: no frames yet... aborting...";
#endif
m_frameChanged = false;
return 0;
}
if (!videoNode) {
foreach (QSGVideoNodeFactoryInterface* factory, m_videoNodeFactories) {
videoNode = factory->createNode(m_surface->surfaceFormat());
if (videoNode)
break;
}
}
}
if (!videoNode) {
m_frameChanged = false;
m_frame = QVideoFrame();
return 0;
}
// Negative rotations need lots of %360
videoNode->setTexturedRectGeometry(m_renderedRect, m_sourceTextureRect,
qNormalizedOrientation(q->orientation()));
if (m_frameChanged) {
videoNode->setCurrentFrame(m_frame);
//don't keep the frame for more than really necessary
m_frameChanged = false;
m_frame = QVideoFrame();
}
return videoNode;
}
QAbstractVideoSurface *QDeclarativeVideoRendererBackend::videoSurface() const
{
return m_surface;
}
QRectF QDeclarativeVideoRendererBackend::adjustedViewport() const
{
const QRectF viewport = m_surface->surfaceFormat().viewport();
const QSize pixelAspectRatio = m_surface->surfaceFormat().pixelAspectRatio();
if (pixelAspectRatio.height() != 0) {
const qreal ratio = pixelAspectRatio.width() / pixelAspectRatio.height();
QRectF result = viewport;
result.setX(result.x() * ratio);
result.setWidth(result.width() * ratio);
return result;
}
return viewport;
}
QOpenGLContext *QDeclarativeVideoRendererBackend::glContext() const
{
return m_glContext;
}
void QDeclarativeVideoRendererBackend::present(const QVideoFrame &frame)
{
m_frameMutex.lock();
m_frame = frame;
m_frameChanged = true;
m_frameMutex.unlock();
q->update();
}
void QDeclarativeVideoRendererBackend::stop()
{
present(QVideoFrame());
}
QSGVideoItemSurface::QSGVideoItemSurface(QDeclarativeVideoRendererBackend *backend, QObject *parent)
: QAbstractVideoSurface(parent),
m_backend(backend)
{
}
QSGVideoItemSurface::~QSGVideoItemSurface()
{
}
QList<QVideoFrame::PixelFormat> QSGVideoItemSurface::supportedPixelFormats(
QAbstractVideoBuffer::HandleType handleType) const
{
QList<QVideoFrame::PixelFormat> formats;
foreach (QSGVideoNodeFactoryInterface* factory, m_backend->m_videoNodeFactories)
formats.append(factory->supportedPixelFormats(handleType));
return formats;
}
bool QSGVideoItemSurface::start(const QVideoSurfaceFormat &format)
{
#ifdef DEBUG_VIDEOITEM
qDebug() << Q_FUNC_INFO << format;
#endif
if (!supportedPixelFormats(format.handleType()).contains(format.pixelFormat()))
return false;
return QAbstractVideoSurface::start(format);
}
void QSGVideoItemSurface::stop()
{
m_backend->stop();
QAbstractVideoSurface::stop();
}
bool QSGVideoItemSurface::present(const QVideoFrame &frame)
{
if (!frame.isValid()) {
qWarning() << Q_FUNC_INFO << "I'm getting bad frames here...";
return false;
}
m_backend->present(frame);
return true;
}
void QSGVideoItemSurface::scheduleOpenGLContextUpdate()
{
//This method is called from render thread
QMetaObject::invokeMethod(this, "updateOpenGLContext");
}
void QSGVideoItemSurface::updateOpenGLContext()
{
//Set a dynamic property to access the OpenGL context in Qt Quick render thread.
this->setProperty("GLContext", QVariant::fromValue<QObject*>(m_backend->glContext()));
}
QT_END_NAMESPACE

View File

@@ -1,117 +0,0 @@
/****************************************************************************
**
** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
** Copyright (C) 2012 Research In Motion
** Contact: http://www.qt-project.org/legal
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and Digia. For licensing terms and
** conditions see http://qt.digia.com/licensing. For further information
** use the contact form at http://qt.digia.com/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 as published by the Free Software
** Foundation and appearing in the file LICENSE.LGPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU Lesser General Public License version 2.1 requirements
** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Digia gives you certain additional
** rights. These rights are described in the Digia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** GNU General Public License Usage
** Alternatively, this file may be used under the terms of the GNU
** General Public License version 3.0 as published by the Free Software
** Foundation and appearing in the file LICENSE.GPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU General Public License version 3.0 requirements will be
** met: http://www.gnu.org/copyleft/gpl.html.
**
**
** $QT_END_LICENSE$
**
****************************************************************************/
#ifndef QDECLARATIVEVIDEOOUTPUT_RENDER_P_H
#define QDECLARATIVEVIDEOOUTPUT_RENDER_P_H
#include "qdeclarativevideooutput_backend_p.h"
#include "qsgvideonode_i420.h"
#include "qsgvideonode_rgb.h"
#include "qsgvideonode_texture.h"
#include <QtCore/qmutex.h>
#include <QtMultimedia/qabstractvideosurface.h>
QT_BEGIN_NAMESPACE
class QSGVideoItemSurface;
class QVideoRendererControl;
class QOpenGLContext;
class QDeclarativeVideoRendererBackend : public QDeclarativeVideoBackend
{
public:
QDeclarativeVideoRendererBackend(QDeclarativeVideoOutput *parent);
~QDeclarativeVideoRendererBackend();
bool init(QMediaService *service);
void itemChange(QQuickItem::ItemChange change, const QQuickItem::ItemChangeData &changeData);
void releaseSource();
void releaseControl();
QSize nativeSize() const;
void updateGeometry();
QSGNode *updatePaintNode(QSGNode *oldNode, QQuickItem::UpdatePaintNodeData *data);
QAbstractVideoSurface *videoSurface() const;
QRectF adjustedViewport() const Q_DECL_OVERRIDE;
QOpenGLContext *glContext() const;
friend class QSGVideoItemSurface;
void present(const QVideoFrame &frame);
void stop();
private:
QPointer<QVideoRendererControl> m_rendererControl;
QList<QSGVideoNodeFactoryInterface*> m_videoNodeFactories;
QSGVideoItemSurface *m_surface;
QOpenGLContext *m_glContext;
QVideoFrame m_frame;
bool m_frameChanged;
QSGVideoNodeFactory_I420 m_i420Factory;
QSGVideoNodeFactory_RGB m_rgbFactory;
QSGVideoNodeFactory_Texture m_textureFactory;
QMutex m_frameMutex;
QRectF m_renderedRect; // Destination pixel coordinates, clipped
QRectF m_sourceTextureRect; // Source texture coordinates
};
class QSGVideoItemSurface : public QAbstractVideoSurface
{
Q_OBJECT
public:
explicit QSGVideoItemSurface(QDeclarativeVideoRendererBackend *backend, QObject *parent = 0);
~QSGVideoItemSurface();
QList<QVideoFrame::PixelFormat> supportedPixelFormats(QAbstractVideoBuffer::HandleType handleType) const;
bool start(const QVideoSurfaceFormat &format);
void stop();
bool present(const QVideoFrame &frame);
void scheduleOpenGLContextUpdate();
private slots:
void updateOpenGLContext();
private:
QDeclarativeVideoRendererBackend *m_backend;
};
QT_END_NAMESPACE
#endif

View File

@@ -1,153 +0,0 @@
/****************************************************************************
**
** Copyright (C) 2012 Research In Motion
** Contact: http://www.qt-project.org/legal
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and Digia. For licensing terms and
** conditions see http://qt.digia.com/licensing. For further information
** use the contact form at http://qt.digia.com/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 as published by the Free Software
** Foundation and appearing in the file LICENSE.LGPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU Lesser General Public License version 2.1 requirements
** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Digia gives you certain additional
** rights. These rights are described in the Digia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** GNU General Public License Usage
** Alternatively, this file may be used under the terms of the GNU
** General Public License version 3.0 as published by the Free Software
** Foundation and appearing in the file LICENSE.GPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU General Public License version 3.0 requirements will be
** met: http://www.gnu.org/copyleft/gpl.html.
**
**
** $QT_END_LICENSE$
**
****************************************************************************/
#include "qdeclarativevideooutput_window_p.h"
#include "qdeclarativevideooutput_p.h"
#include <QtQuick/qquickwindow.h>
#include <QtMultimedia/qmediaservice.h>
#include <QtMultimedia/qvideowindowcontrol.h>
QT_BEGIN_NAMESPACE
QDeclarativeVideoWindowBackend::QDeclarativeVideoWindowBackend(QDeclarativeVideoOutput *parent)
: QDeclarativeVideoBackend(parent),
m_visible(true)
{
}
QDeclarativeVideoWindowBackend::~QDeclarativeVideoWindowBackend()
{
releaseSource();
releaseControl();
}
bool QDeclarativeVideoWindowBackend::init(QMediaService *service)
{
if (QMediaControl *control = service->requestControl(QVideoWindowControl_iid)) {
if ((m_videoWindowControl = qobject_cast<QVideoWindowControl *>(control))) {
if (q->window())
m_videoWindowControl->setWinId(q->window()->winId());
m_service = service;
QObject::connect(m_videoWindowControl.data(), SIGNAL(nativeSizeChanged()),
q, SLOT(_q_updateNativeSize()));
return true;
}
}
return false;
}
void QDeclarativeVideoWindowBackend::itemChange(QQuickItem::ItemChange change,
const QQuickItem::ItemChangeData &changeData)
{
if (!m_videoWindowControl)
return;
switch (change) {
case QQuickItem::ItemVisibleHasChanged:
m_visible = changeData.boolValue;
updateGeometry();
break;
case QQuickItem::ItemSceneChange:
if (changeData.window)
m_videoWindowControl->setWinId(changeData.window->winId());
else
m_videoWindowControl->setWinId(0);
break;
default: break;
}
}
void QDeclarativeVideoWindowBackend::releaseSource()
{
}
void QDeclarativeVideoWindowBackend::releaseControl()
{
if (m_videoWindowControl) {
m_videoWindowControl->setWinId(0);
if (m_service)
m_service->releaseControl(m_videoWindowControl);
m_videoWindowControl = 0;
}
}
QSize QDeclarativeVideoWindowBackend::nativeSize() const
{
return m_videoWindowControl->nativeSize();
}
void QDeclarativeVideoWindowBackend::updateGeometry()
{
switch (q->fillMode()) {
case QDeclarativeVideoOutput::PreserveAspectFit:
m_videoWindowControl->setAspectRatioMode(Qt::KeepAspectRatio); break;
case QDeclarativeVideoOutput::PreserveAspectCrop:
m_videoWindowControl->setAspectRatioMode(Qt::KeepAspectRatioByExpanding); break;
case QDeclarativeVideoOutput::Stretch:
m_videoWindowControl->setAspectRatioMode(Qt::IgnoreAspectRatio); break;
};
const QRectF canvasRect = q->mapRectToScene(QRectF(0, 0, q->width(), q->height()));
m_videoWindowControl->setDisplayRect(m_visible ? canvasRect.toAlignedRect() : QRect());
}
QSGNode *QDeclarativeVideoWindowBackend::updatePaintNode(QSGNode *oldNode,
QQuickItem::UpdatePaintNodeData *data)
{
Q_UNUSED(oldNode);
Q_UNUSED(data);
m_videoWindowControl->repaint();
return 0;
}
QAbstractVideoSurface *QDeclarativeVideoWindowBackend::videoSurface() const
{
return 0;
}
QRectF QDeclarativeVideoWindowBackend::adjustedViewport() const
{
// No viewport supported by QVideoWindowControl, so make the viewport the same size
// as the source
return QRectF(QPointF(0, 0), nativeSize());
}
QT_END_NAMESPACE

View File

@@ -1,74 +0,0 @@
/****************************************************************************
**
** Copyright (C) 2012 Research In Motion
** Contact: http://www.qt-project.org/legal
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and Digia. For licensing terms and
** conditions see http://qt.digia.com/licensing. For further information
** use the contact form at http://qt.digia.com/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 as published by the Free Software
** Foundation and appearing in the file LICENSE.LGPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU Lesser General Public License version 2.1 requirements
** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Digia gives you certain additional
** rights. These rights are described in the Digia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** GNU General Public License Usage
** Alternatively, this file may be used under the terms of the GNU
** General Public License version 3.0 as published by the Free Software
** Foundation and appearing in the file LICENSE.GPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU General Public License version 3.0 requirements will be
** met: http://www.gnu.org/copyleft/gpl.html.
**
**
** $QT_END_LICENSE$
**
****************************************************************************/
#ifndef QDECLARATIVEVIDEOOUTPUT_WINDOW_P_H
#define QDECLARATIVEVIDEOOUTPUT_WINDOW_P_H
#include "qdeclarativevideooutput_backend_p.h"
QT_BEGIN_NAMESPACE
class QVideoWindowControl;
class QDeclarativeVideoWindowBackend : public QDeclarativeVideoBackend
{
public:
QDeclarativeVideoWindowBackend(QDeclarativeVideoOutput *parent);
~QDeclarativeVideoWindowBackend();
bool init(QMediaService *service);
void itemChange(QQuickItem::ItemChange change, const QQuickItem::ItemChangeData &changeData);
void releaseSource();
void releaseControl();
QSize nativeSize() const;
void updateGeometry();
QSGNode *updatePaintNode(QSGNode *oldNode, QQuickItem::UpdatePaintNodeData *data);
QAbstractVideoSurface *videoSurface() const;
QRectF adjustedViewport() const Q_DECL_OVERRIDE;
private:
QPointer<QVideoWindowControl> m_videoWindowControl;
bool m_visible;
};
QT_END_NAMESPACE
#endif

View File

@@ -1,326 +0,0 @@
/****************************************************************************
**
** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
** Contact: http://www.qt-project.org/legal
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and Digia. For licensing terms and
** conditions see http://qt.digia.com/licensing. For further information
** use the contact form at http://qt.digia.com/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 as published by the Free Software
** Foundation and appearing in the file LICENSE.LGPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU Lesser General Public License version 2.1 requirements
** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Digia gives you certain additional
** rights. These rights are described in the Digia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** GNU General Public License Usage
** Alternatively, this file may be used under the terms of the GNU
** General Public License version 3.0 as published by the Free Software
** Foundation and appearing in the file LICENSE.GPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU General Public License version 3.0 requirements will be
** met: http://www.gnu.org/copyleft/gpl.html.
**
**
** $QT_END_LICENSE$
**
****************************************************************************/
#include "qsgvideonode_i420.h"
#include <QtCore/qmutex.h>
#include <QtQuick/qsgtexturematerial.h>
#include <QtQuick/qsgmaterial.h>
#include <QtGui/QOpenGLContext>
#include <QtGui/QOpenGLFunctions>
#include <QtGui/QOpenGLShaderProgram>
QList<QVideoFrame::PixelFormat> QSGVideoNodeFactory_I420::supportedPixelFormats(
QAbstractVideoBuffer::HandleType handleType) const
{
QList<QVideoFrame::PixelFormat> formats;
if (handleType == QAbstractVideoBuffer::NoHandle)
formats << QVideoFrame::Format_YUV420P << QVideoFrame::Format_YV12;
return formats;
}
QSGVideoNode *QSGVideoNodeFactory_I420::createNode(const QVideoSurfaceFormat &format)
{
if (supportedPixelFormats(format.handleType()).contains(format.pixelFormat()))
return new QSGVideoNode_I420(format);
return 0;
}
class QSGVideoMaterialShader_YUV420 : public QSGMaterialShader
{
public:
void updateState(const RenderState &state, QSGMaterial *newMaterial, QSGMaterial *oldMaterial);
virtual char const *const *attributeNames() const {
static const char *names[] = {
"qt_VertexPosition",
"qt_VertexTexCoord",
0
};
return names;
}
protected:
virtual const char *vertexShader() const {
const char *shader =
"uniform highp mat4 qt_Matrix; \n"
"attribute highp vec4 qt_VertexPosition; \n"
"attribute highp vec2 qt_VertexTexCoord; \n"
"varying highp vec2 qt_TexCoord; \n"
"void main() { \n"
" qt_TexCoord = qt_VertexTexCoord; \n"
" gl_Position = qt_Matrix * qt_VertexPosition; \n"
"}";
return shader;
}
virtual const char *fragmentShader() const {
static const char *shader =
"uniform sampler2D yTexture;"
"uniform sampler2D uTexture;"
"uniform sampler2D vTexture;"
"uniform mediump mat4 colorMatrix;"
"uniform lowp float opacity;"
""
"varying highp vec2 qt_TexCoord;"
""
"void main()"
"{"
" mediump float Y = texture2D(yTexture, qt_TexCoord).r;"
" mediump float U = texture2D(uTexture, qt_TexCoord).r;"
" mediump float V = texture2D(vTexture, qt_TexCoord).r;"
" mediump vec4 color = vec4(Y, U, V, 1.);"
" gl_FragColor = colorMatrix * color * opacity;"
"}";
return shader;
}
virtual void initialize() {
m_id_matrix = program()->uniformLocation("qt_Matrix");
m_id_yTexture = program()->uniformLocation("yTexture");
m_id_uTexture = program()->uniformLocation("uTexture");
m_id_vTexture = program()->uniformLocation("vTexture");
m_id_colorMatrix = program()->uniformLocation("colorMatrix");
m_id_opacity = program()->uniformLocation("opacity");
}
int m_id_matrix;
int m_id_yTexture;
int m_id_uTexture;
int m_id_vTexture;
int m_id_colorMatrix;
int m_id_opacity;
};
class QSGVideoMaterial_YUV420 : public QSGMaterial
{
public:
QSGVideoMaterial_YUV420(const QVideoSurfaceFormat &format);
~QSGVideoMaterial_YUV420();
virtual QSGMaterialType *type() const {
static QSGMaterialType theType;
return &theType;
}
virtual QSGMaterialShader *createShader() const {
return new QSGVideoMaterialShader_YUV420;
}
virtual int compare(const QSGMaterial *other) const {
const QSGVideoMaterial_YUV420 *m = static_cast<const QSGVideoMaterial_YUV420 *>(other);
int d = m_textureIds[0] - m->m_textureIds[0];
if (d)
return d;
else if ((d = m_textureIds[1] - m->m_textureIds[1]) != 0)
return d;
else
return m_textureIds[2] - m->m_textureIds[2];
}
void updateBlending() {
setFlag(Blending, qFuzzyCompare(m_opacity, qreal(1.0)) ? false : true);
}
void setCurrentFrame(const QVideoFrame &frame) {
QMutexLocker lock(&m_frameMutex);
m_frame = frame;
}
void bind();
void bindTexture(int id, int w, int h, const uchar *bits);
QVideoSurfaceFormat m_format;
QSize m_textureSize;
static const uint Num_Texture_IDs = 3;
GLuint m_textureIds[Num_Texture_IDs];
qreal m_opacity;
QMatrix4x4 m_colorMatrix;
QVideoFrame m_frame;
QMutex m_frameMutex;
};
QSGVideoMaterial_YUV420::QSGVideoMaterial_YUV420(const QVideoSurfaceFormat &format) :
m_format(format),
m_opacity(1.0)
{
memset(m_textureIds, 0, sizeof(m_textureIds));
switch (format.yCbCrColorSpace()) {
case QVideoSurfaceFormat::YCbCr_JPEG:
m_colorMatrix = QMatrix4x4(
1.0f, 0.000f, 1.402f, -0.701f,
1.0f, -0.344f, -0.714f, 0.529f,
1.0f, 1.772f, 0.000f, -0.886f,
0.0f, 0.000f, 0.000f, 1.0000f);
break;
case QVideoSurfaceFormat::YCbCr_BT709:
case QVideoSurfaceFormat::YCbCr_xvYCC709:
m_colorMatrix = QMatrix4x4(
1.164f, 0.000f, 1.793f, -0.5727f,
1.164f, -0.534f, -0.213f, 0.3007f,
1.164f, 2.115f, 0.000f, -1.1302f,
0.0f, 0.000f, 0.000f, 1.0000f);
break;
default: //BT 601:
m_colorMatrix = QMatrix4x4(
1.164f, 0.000f, 1.596f, -0.8708f,
1.164f, -0.392f, -0.813f, 0.5296f,
1.164f, 2.017f, 0.000f, -1.081f,
0.0f, 0.000f, 0.000f, 1.0000f);
}
setFlag(Blending, false);
}
QSGVideoMaterial_YUV420::~QSGVideoMaterial_YUV420()
{
if (!m_textureSize.isEmpty())
glDeleteTextures(Num_Texture_IDs, m_textureIds);
}
void QSGVideoMaterial_YUV420::bind()
{
QOpenGLFunctions *functions = QOpenGLContext::currentContext()->functions();
QMutexLocker lock(&m_frameMutex);
if (m_frame.isValid()) {
if (m_frame.map(QAbstractVideoBuffer::ReadOnly)) {
int fw = m_frame.width();
int fh = m_frame.height();
// Frame has changed size, recreate textures...
if (m_textureSize != m_frame.size()) {
if (!m_textureSize.isEmpty())
glDeleteTextures(Num_Texture_IDs, m_textureIds);
glGenTextures(Num_Texture_IDs, m_textureIds);
m_textureSize = m_frame.size();
}
const uchar *bits = m_frame.bits();
int bpl = m_frame.bytesPerLine();
int bpl2 = (bpl / 2 + 3) & ~3;
int offsetU = bpl * fh;
int offsetV = bpl * fh + bpl2 * fh / 2;
if (m_frame.pixelFormat() == QVideoFrame::Format_YV12)
qSwap(offsetU, offsetV);
functions->glActiveTexture(GL_TEXTURE1);
bindTexture(m_textureIds[1], fw/2, fh / 2, bits + offsetU);
functions->glActiveTexture(GL_TEXTURE2);
bindTexture(m_textureIds[2], fw/2, fh / 2, bits + offsetV);
functions->glActiveTexture(GL_TEXTURE0); // Finish with 0 as default texture unit
bindTexture(m_textureIds[0], fw, fh, bits);
m_frame.unmap();
}
m_frame = QVideoFrame();
} else {
functions->glActiveTexture(GL_TEXTURE1);
glBindTexture(GL_TEXTURE_2D, m_textureIds[1]);
functions->glActiveTexture(GL_TEXTURE2);
glBindTexture(GL_TEXTURE_2D, m_textureIds[2]);
functions->glActiveTexture(GL_TEXTURE0); // Finish with 0 as default texture unit
glBindTexture(GL_TEXTURE_2D, m_textureIds[0]);
}
}
void QSGVideoMaterial_YUV420::bindTexture(int id, int w, int h, const uchar *bits)
{
glBindTexture(GL_TEXTURE_2D, id);
glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, w, h, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, bits);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
}
QSGVideoNode_I420::QSGVideoNode_I420(const QVideoSurfaceFormat &format) :
m_format(format)
{
setFlag(QSGNode::OwnsMaterial);
m_material = new QSGVideoMaterial_YUV420(format);
setMaterial(m_material);
}
QSGVideoNode_I420::~QSGVideoNode_I420()
{
}
void QSGVideoNode_I420::setCurrentFrame(const QVideoFrame &frame)
{
m_material->setCurrentFrame(frame);
markDirty(DirtyMaterial);
}
void QSGVideoMaterialShader_YUV420::updateState(const RenderState &state,
QSGMaterial *newMaterial,
QSGMaterial *oldMaterial)
{
Q_UNUSED(oldMaterial);
QSGVideoMaterial_YUV420 *mat = static_cast<QSGVideoMaterial_YUV420 *>(newMaterial);
program()->setUniformValue(m_id_yTexture, 0);
program()->setUniformValue(m_id_uTexture, 1);
program()->setUniformValue(m_id_vTexture, 2);
mat->bind();
program()->setUniformValue(m_id_colorMatrix, mat->m_colorMatrix);
if (state.isOpacityDirty()) {
mat->m_opacity = state.opacity();
program()->setUniformValue(m_id_opacity, GLfloat(mat->m_opacity));
}
if (state.isMatrixDirty())
program()->setUniformValue(m_id_matrix, state.combinedMatrix());
}

View File

@@ -1,74 +0,0 @@
/****************************************************************************
**
** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
** Contact: http://www.qt-project.org/legal
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and Digia. For licensing terms and
** conditions see http://qt.digia.com/licensing. For further information
** use the contact form at http://qt.digia.com/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 as published by the Free Software
** Foundation and appearing in the file LICENSE.LGPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU Lesser General Public License version 2.1 requirements
** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Digia gives you certain additional
** rights. These rights are described in the Digia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** GNU General Public License Usage
** Alternatively, this file may be used under the terms of the GNU
** General Public License version 3.0 as published by the Free Software
** Foundation and appearing in the file LICENSE.GPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU General Public License version 3.0 requirements will be
** met: http://www.gnu.org/copyleft/gpl.html.
**
**
** $QT_END_LICENSE$
**
****************************************************************************/
#ifndef QSGVIDEONODE_I420_H
#define QSGVIDEONODE_I420_H
#include <private/qsgvideonode_p.h>
#include <QtMultimedia/qvideosurfaceformat.h>
class QSGVideoMaterial_YUV420;
class QSGVideoNode_I420 : public QSGVideoNode
{
public:
QSGVideoNode_I420(const QVideoSurfaceFormat &format);
~QSGVideoNode_I420();
virtual QVideoFrame::PixelFormat pixelFormat() const {
return m_format.pixelFormat();
}
void setCurrentFrame(const QVideoFrame &frame);
private:
void bindTexture(int id, int unit, int w, int h, const uchar *bits);
QVideoSurfaceFormat m_format;
QSGVideoMaterial_YUV420 *m_material;
};
class QSGVideoNodeFactory_I420 : public QSGVideoNodeFactoryInterface {
public:
QList<QVideoFrame::PixelFormat> supportedPixelFormats(QAbstractVideoBuffer::HandleType handleType) const;
QSGVideoNode *createNode(const QVideoSurfaceFormat &format);
};
#endif // QSGVIDEONODE_I420_H

View File

@@ -1,285 +0,0 @@
/****************************************************************************
**
** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
** Contact: http://www.qt-project.org/legal
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and Digia. For licensing terms and
** conditions see http://qt.digia.com/licensing. For further information
** use the contact form at http://qt.digia.com/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 as published by the Free Software
** Foundation and appearing in the file LICENSE.LGPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU Lesser General Public License version 2.1 requirements
** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Digia gives you certain additional
** rights. These rights are described in the Digia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** GNU General Public License Usage
** Alternatively, this file may be used under the terms of the GNU
** General Public License version 3.0 as published by the Free Software
** Foundation and appearing in the file LICENSE.GPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU General Public License version 3.0 requirements will be
** met: http://www.gnu.org/copyleft/gpl.html.
**
**
** $QT_END_LICENSE$
**
****************************************************************************/
#include "qsgvideonode_rgb.h"
#include <QtQuick/qsgtexturematerial.h>
#include <QtQuick/qsgmaterial.h>
#include <QtCore/qmutex.h>
#include <QtGui/QOpenGLContext>
#include <QtGui/QOpenGLFunctions>
#include <QtGui/QOpenGLShaderProgram>
QList<QVideoFrame::PixelFormat> QSGVideoNodeFactory_RGB::supportedPixelFormats(
QAbstractVideoBuffer::HandleType handleType) const
{
QList<QVideoFrame::PixelFormat> pixelFormats;
if (handleType == QAbstractVideoBuffer::NoHandle) {
pixelFormats.append(QVideoFrame::Format_RGB565);
pixelFormats.append(QVideoFrame::Format_RGB32);
pixelFormats.append(QVideoFrame::Format_ARGB32);
pixelFormats.append(QVideoFrame::Format_BGR32);
pixelFormats.append(QVideoFrame::Format_BGRA32);
}
return pixelFormats;
}
QSGVideoNode *QSGVideoNodeFactory_RGB::createNode(const QVideoSurfaceFormat &format)
{
if (supportedPixelFormats(format.handleType()).contains(format.pixelFormat()))
return new QSGVideoNode_RGB(format);
return 0;
}
class QSGVideoMaterialShader_RGB : public QSGMaterialShader
{
public:
QSGVideoMaterialShader_RGB(QVideoFrame::PixelFormat pixelFormat)
: QSGMaterialShader(),
m_id_matrix(-1),
m_id_rgbTexture(-1),
m_id_opacity(-1),
m_pixelFormat(pixelFormat)
{
}
void updateState(const RenderState &state, QSGMaterial *newMaterial, QSGMaterial *oldMaterial);
virtual char const *const *attributeNames() const {
static const char *names[] = {
"qt_VertexPosition",
"qt_VertexTexCoord",
0
};
return names;
}
protected:
virtual const char *vertexShader() const {
const char *shader =
"uniform highp mat4 qt_Matrix; \n"
"attribute highp vec4 qt_VertexPosition; \n"
"attribute highp vec2 qt_VertexTexCoord; \n"
"varying highp vec2 qt_TexCoord; \n"
"void main() { \n"
" qt_TexCoord = qt_VertexTexCoord; \n"
" gl_Position = qt_Matrix * qt_VertexPosition; \n"
"}";
return shader;
}
virtual const char *fragmentShader() const {
static const char *shader =
"uniform sampler2D rgbTexture;"
"uniform lowp float opacity;"
""
"varying highp vec2 qt_TexCoord;"
""
"void main()"
"{"
" gl_FragColor = texture2D(rgbTexture, qt_TexCoord) * opacity;"
"}";
static const char *colorsSwapShader =
"uniform sampler2D rgbTexture;"
"uniform lowp float opacity;"
""
"varying highp vec2 qt_TexCoord;"
""
"void main()"
"{"
" gl_FragColor = vec4(texture2D(rgbTexture, qt_TexCoord).bgr, 1.0) * opacity;"
"}";
switch (m_pixelFormat) {
case QVideoFrame::Format_RGB32:
case QVideoFrame::Format_ARGB32:
return colorsSwapShader;
default:
return shader;
}
}
virtual void initialize() {
m_id_matrix = program()->uniformLocation("qt_Matrix");
m_id_rgbTexture = program()->uniformLocation("rgbTexture");
m_id_opacity = program()->uniformLocation("opacity");
}
int m_id_matrix;
int m_id_rgbTexture;
int m_id_opacity;
QVideoFrame::PixelFormat m_pixelFormat;
};
class QSGVideoMaterial_RGB : public QSGMaterial
{
public:
QSGVideoMaterial_RGB(const QVideoSurfaceFormat &format) :
m_format(format),
m_textureId(0),
m_opacity(1.0)
{
setFlag(Blending, false);
}
~QSGVideoMaterial_RGB()
{
if (m_textureId)
glDeleteTextures(1, &m_textureId);
}
virtual QSGMaterialType *type() const {
static QSGMaterialType theType;
return &theType;
}
virtual QSGMaterialShader *createShader() const {
return new QSGVideoMaterialShader_RGB(m_format.pixelFormat());
}
virtual int compare(const QSGMaterial *other) const {
const QSGVideoMaterial_RGB *m = static_cast<const QSGVideoMaterial_RGB *>(other);
return m_textureId - m->m_textureId;
}
void updateBlending() {
setFlag(Blending, qFuzzyCompare(m_opacity, qreal(1.0)) ? false : true);
}
void setVideoFrame(const QVideoFrame &frame) {
QMutexLocker lock(&m_frameMutex);
m_frame = frame;
}
void bind()
{
QOpenGLFunctions *functions = QOpenGLContext::currentContext()->functions();
QMutexLocker lock(&m_frameMutex);
if (m_frame.isValid()) {
if (m_frame.map(QAbstractVideoBuffer::ReadOnly)) {
if (m_textureSize != m_frame.size()) {
if (!m_textureSize.isEmpty())
glDeleteTextures(1, &m_textureId);
glGenTextures(1, &m_textureId);
m_textureSize = m_frame.size();
}
GLint dataType = GL_UNSIGNED_BYTE;
GLint dataFormat = GL_RGBA;
if (m_frame.pixelFormat() == QVideoFrame::Format_RGB565) {
dataType = GL_UNSIGNED_SHORT_5_6_5;
dataFormat = GL_RGB;
}
functions->glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, m_textureId);
glTexImage2D(GL_TEXTURE_2D, 0, dataFormat,
m_textureSize.width(), m_textureSize.height(),
0, dataFormat, dataType, m_frame.bits());
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
m_frame.unmap();
}
m_frame = QVideoFrame();
} else {
functions->glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, m_textureId);
}
}
QVideoFrame m_frame;
QMutex m_frameMutex;
QSize m_textureSize;
QVideoSurfaceFormat m_format;
GLuint m_textureId;
qreal m_opacity;
};
QSGVideoNode_RGB::QSGVideoNode_RGB(const QVideoSurfaceFormat &format) :
m_format(format)
{
setFlag(QSGNode::OwnsMaterial);
m_material = new QSGVideoMaterial_RGB(format);
setMaterial(m_material);
}
QSGVideoNode_RGB::~QSGVideoNode_RGB()
{
}
void QSGVideoNode_RGB::setCurrentFrame(const QVideoFrame &frame)
{
m_material->setVideoFrame(frame);
markDirty(DirtyMaterial);
}
void QSGVideoMaterialShader_RGB::updateState(const RenderState &state,
QSGMaterial *newMaterial,
QSGMaterial *oldMaterial)
{
Q_UNUSED(oldMaterial);
QSGVideoMaterial_RGB *mat = static_cast<QSGVideoMaterial_RGB *>(newMaterial);
program()->setUniformValue(m_id_rgbTexture, 0);
mat->bind();
if (state.isOpacityDirty()) {
mat->m_opacity = state.opacity();
mat->updateBlending();
program()->setUniformValue(m_id_opacity, GLfloat(mat->m_opacity));
}
if (state.isMatrixDirty())
program()->setUniformValue(m_id_matrix, state.combinedMatrix());
}

View File

@@ -1,74 +0,0 @@
/****************************************************************************
**
** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
** Contact: http://www.qt-project.org/legal
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and Digia. For licensing terms and
** conditions see http://qt.digia.com/licensing. For further information
** use the contact form at http://qt.digia.com/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 as published by the Free Software
** Foundation and appearing in the file LICENSE.LGPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU Lesser General Public License version 2.1 requirements
** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Digia gives you certain additional
** rights. These rights are described in the Digia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** GNU General Public License Usage
** Alternatively, this file may be used under the terms of the GNU
** General Public License version 3.0 as published by the Free Software
** Foundation and appearing in the file LICENSE.GPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU General Public License version 3.0 requirements will be
** met: http://www.gnu.org/copyleft/gpl.html.
**
**
** $QT_END_LICENSE$
**
****************************************************************************/
#ifndef QSGVIDEONODE_RGB_H
#define QSGVIDEONODE_RGB_H
#include <private/qsgvideonode_p.h>
#include <QtMultimedia/qvideosurfaceformat.h>
class QSGVideoMaterial_RGB;
class QSGVideoNode_RGB : public QSGVideoNode
{
public:
QSGVideoNode_RGB(const QVideoSurfaceFormat &format);
~QSGVideoNode_RGB();
virtual QVideoFrame::PixelFormat pixelFormat() const {
return m_format.pixelFormat();
}
void setCurrentFrame(const QVideoFrame &frame);
private:
QVideoSurfaceFormat m_format;
QSGVideoMaterial_RGB *m_material;
QVideoFrame m_frame;
};
class QSGVideoNodeFactory_RGB : public QSGVideoNodeFactoryInterface {
public:
QList<QVideoFrame::PixelFormat> supportedPixelFormats(QAbstractVideoBuffer::HandleType handleType) const;
QSGVideoNode *createNode(const QVideoSurfaceFormat &format);
};
#endif // QSGVIDEONODE_RGB_H

View File

@@ -1,267 +0,0 @@
/****************************************************************************
**
** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
** Contact: http://www.qt-project.org/legal
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and Digia. For licensing terms and
** conditions see http://qt.digia.com/licensing. For further information
** use the contact form at http://qt.digia.com/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 as published by the Free Software
** Foundation and appearing in the file LICENSE.LGPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU Lesser General Public License version 2.1 requirements
** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Digia gives you certain additional
** rights. These rights are described in the Digia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** GNU General Public License Usage
** Alternatively, this file may be used under the terms of the GNU
** General Public License version 3.0 as published by the Free Software
** Foundation and appearing in the file LICENSE.GPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU General Public License version 3.0 requirements will be
** met: http://www.gnu.org/copyleft/gpl.html.
**
**
** $QT_END_LICENSE$
**
****************************************************************************/
#include "qsgvideonode_texture.h"
#include <QtQuick/qsgtexturematerial.h>
#include <QtQuick/qsgmaterial.h>
#include <QtCore/qmutex.h>
#include <QtGui/QOpenGLContext>
#include <QtGui/QOpenGLFunctions>
#include <QtGui/QOpenGLShaderProgram>
QList<QVideoFrame::PixelFormat> QSGVideoNodeFactory_Texture::supportedPixelFormats(
QAbstractVideoBuffer::HandleType handleType) const
{
QList<QVideoFrame::PixelFormat> pixelFormats;
if (handleType == QAbstractVideoBuffer::GLTextureHandle) {
pixelFormats.append(QVideoFrame::Format_RGB565);
pixelFormats.append(QVideoFrame::Format_RGB32);
pixelFormats.append(QVideoFrame::Format_ARGB32);
pixelFormats.append(QVideoFrame::Format_BGR32);
pixelFormats.append(QVideoFrame::Format_BGRA32);
}
return pixelFormats;
}
QSGVideoNode *QSGVideoNodeFactory_Texture::createNode(const QVideoSurfaceFormat &format)
{
if (supportedPixelFormats(format.handleType()).contains(format.pixelFormat()))
return new QSGVideoNode_Texture(format);
return 0;
}
class QSGVideoMaterialShader_Texture : public QSGMaterialShader
{
public:
QSGVideoMaterialShader_Texture(QVideoFrame::PixelFormat pixelFormat)
: QSGMaterialShader(),
m_pixelFormat(pixelFormat)
{
}
void updateState(const RenderState &state, QSGMaterial *newMaterial, QSGMaterial *oldMaterial);
virtual char const *const *attributeNames() const {
static const char *names[] = {
"qt_VertexPosition",
"qt_VertexTexCoord",
0
};
return names;
}
protected:
virtual const char *vertexShader() const {
const char *shader =
"uniform highp mat4 qt_Matrix; \n"
"attribute highp vec4 qt_VertexPosition; \n"
"attribute highp vec2 qt_VertexTexCoord; \n"
"varying highp vec2 qt_TexCoord; \n"
"void main() { \n"
" qt_TexCoord = qt_VertexTexCoord; \n"
" gl_Position = qt_Matrix * qt_VertexPosition; \n"
"}";
return shader;
}
virtual const char *fragmentShader() const {
static const char *shader =
"uniform sampler2D rgbTexture;"
"uniform lowp float opacity;"
""
"varying highp vec2 qt_TexCoord;"
""
"void main()"
"{"
" gl_FragColor = texture2D(rgbTexture, qt_TexCoord) * opacity;"
"}";
#ifndef QT_OPENGL_ES_2_ANGLE
static const char *colorsSwapShader =
"uniform sampler2D rgbTexture;"
"uniform lowp float opacity;"
""
"varying highp vec2 qt_TexCoord;"
""
"void main()"
"{"
" gl_FragColor = vec4(texture2D(rgbTexture, qt_TexCoord).bgr, 1.0) * opacity;"
"}";
switch (m_pixelFormat) {
case QVideoFrame::Format_RGB32:
case QVideoFrame::Format_ARGB32:
return colorsSwapShader;
default:
return shader;
}
#else
return shader;
#endif
}
virtual void initialize() {
m_id_matrix = program()->uniformLocation("qt_Matrix");
m_id_Texture = program()->uniformLocation("rgbTexture");
m_id_opacity = program()->uniformLocation("opacity");
}
int m_id_matrix;
int m_id_Texture;
int m_id_opacity;
QVideoFrame::PixelFormat m_pixelFormat;
};
class QSGVideoMaterial_Texture : public QSGMaterial
{
public:
QSGVideoMaterial_Texture(const QVideoSurfaceFormat &format) :
m_format(format),
m_textureId(0),
m_opacity(1.0)
{
setFlag(Blending, false);
}
~QSGVideoMaterial_Texture()
{
m_frame = QVideoFrame();
}
virtual QSGMaterialType *type() const {
static QSGMaterialType theType;
return &theType;
}
virtual QSGMaterialShader *createShader() const {
return new QSGVideoMaterialShader_Texture(m_format.pixelFormat());
}
virtual int compare(const QSGMaterial *other) const {
const QSGVideoMaterial_Texture *m = static_cast<const QSGVideoMaterial_Texture *>(other);
int diff = m_textureId - m->m_textureId;
if (diff)
return diff;
diff = m_format.pixelFormat() - m->m_format.pixelFormat();
if (diff)
return diff;
return (m_opacity > m->m_opacity) ? 1 : -1;
}
void updateBlending() {
setFlag(Blending, qFuzzyCompare(m_opacity, qreal(1.0)) ? false : true);
}
void setVideoFrame(const QVideoFrame &frame) {
QMutexLocker lock(&m_frameMutex);
m_frame = frame;
}
void bind()
{
QMutexLocker lock(&m_frameMutex);
if (m_frame.isValid()) {
m_textureId = m_frame.handle().toUInt();
glBindTexture(GL_TEXTURE_2D, m_textureId);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
} else {
m_textureId = 0;
}
}
QVideoFrame m_frame;
QMutex m_frameMutex;
QSize m_textureSize;
QVideoSurfaceFormat m_format;
GLuint m_textureId;
qreal m_opacity;
};
QSGVideoNode_Texture::QSGVideoNode_Texture(const QVideoSurfaceFormat &format) :
m_format(format)
{
setFlag(QSGNode::OwnsMaterial);
m_material = new QSGVideoMaterial_Texture(format);
setMaterial(m_material);
}
QSGVideoNode_Texture::~QSGVideoNode_Texture()
{
}
void QSGVideoNode_Texture::setCurrentFrame(const QVideoFrame &frame)
{
m_material->setVideoFrame(frame);
markDirty(DirtyMaterial);
}
void QSGVideoMaterialShader_Texture::updateState(const RenderState &state,
QSGMaterial *newMaterial,
QSGMaterial *oldMaterial)
{
Q_UNUSED(oldMaterial);
QSGVideoMaterial_Texture *mat = static_cast<QSGVideoMaterial_Texture *>(newMaterial);
program()->setUniformValue(m_id_Texture, 0);
mat->bind();
if (state.isOpacityDirty()) {
mat->m_opacity = state.opacity();
mat->updateBlending();
program()->setUniformValue(m_id_opacity, GLfloat(mat->m_opacity));
}
if (state.isMatrixDirty())
program()->setUniformValue(m_id_matrix, state.combinedMatrix());
}

View File

@@ -1,74 +0,0 @@
/****************************************************************************
**
** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
** Contact: http://www.qt-project.org/legal
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and Digia. For licensing terms and
** conditions see http://qt.digia.com/licensing. For further information
** use the contact form at http://qt.digia.com/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 as published by the Free Software
** Foundation and appearing in the file LICENSE.LGPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU Lesser General Public License version 2.1 requirements
** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Digia gives you certain additional
** rights. These rights are described in the Digia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** GNU General Public License Usage
** Alternatively, this file may be used under the terms of the GNU
** General Public License version 3.0 as published by the Free Software
** Foundation and appearing in the file LICENSE.GPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU General Public License version 3.0 requirements will be
** met: http://www.gnu.org/copyleft/gpl.html.
**
**
** $QT_END_LICENSE$
**
****************************************************************************/
#ifndef QSGVIDEONODE_TEXTURE_H
#define QSGVIDEONODE_TEXTURE_H
#include <private/qsgvideonode_p.h>
#include <QtMultimedia/qvideosurfaceformat.h>
class QSGVideoMaterial_Texture;
class QSGVideoNode_Texture : public QSGVideoNode
{
public:
QSGVideoNode_Texture(const QVideoSurfaceFormat &format);
~QSGVideoNode_Texture();
virtual QVideoFrame::PixelFormat pixelFormat() const {
return m_format.pixelFormat();
}
void setCurrentFrame(const QVideoFrame &frame);
private:
QVideoSurfaceFormat m_format;
QSGVideoMaterial_Texture *m_material;
QVideoFrame m_frame;
};
class QSGVideoNodeFactory_Texture : public QSGVideoNodeFactoryInterface {
public:
QList<QVideoFrame::PixelFormat> supportedPixelFormats(QAbstractVideoBuffer::HandleType handleType) const;
QSGVideoNode *createNode(const QVideoSurfaceFormat &format);
};
#endif