Merge branch 'camera' of git://scm.dev.nokia.troll.no/qt/qtmultimediakit

Change-Id: Iec6f96e3713257d25f7b1e06c6da9c153b23800b
This commit is contained in:
Michael Goddard
2011-08-30 14:42:21 +10:00
58 changed files with 2121 additions and 977 deletions

View File

@@ -47,8 +47,8 @@
#include "qdeclarativemediametadata_p.h"
#include "qdeclarativeaudio_p.h"
#include "qdeclarativevideooutput_p.h"
#if 0
#include "qdeclarativevideo_p.h"
#include "qdeclarativecamera_p.h"
#include "qdeclarativecamerapreviewprovider_p.h"
#endif
@@ -67,9 +67,10 @@ public:
qmlRegisterType<QSoundEffect>(uri, 4, 0, "SoundEffect");
qmlRegisterType<QDeclarativeAudio>(uri, 4, 0, "Audio");
qmlRegisterType<QDeclarativeAudio>(uri, 4, 0, "MediaPlayer");
qmlRegisterType<QDeclarativeVideoOutput>(uri, 4, 0, "VideoOutput");
/* Disabled until ported to scenegraph */
#if 0
qmlRegisterType<QDeclarativeVideo>(uri, 4, 0, "Video");
qmlRegisterType<QDeclarativeCamera>(uri, 4, 0, "Camera");
#endif
qmlRegisterType<QDeclarativeMediaMetaData>();

View File

@@ -12,11 +12,20 @@ HEADERS += \
qdeclarativeaudio_p.h \
qdeclarativemediabase_p.h \
qdeclarativemediametadata_p.h \
qdeclarativevideooutput_p.h \
qsgvideonode_p.h \
qsgvideonode_i420.h \
qsgvideonode_rgb32.h \
SOURCES += \
multimedia.cpp \
qdeclarativeaudio.cpp \
qdeclarativemediabase.cpp \
qdeclarativevideooutput.cpp \
qsgvideonode.cpp \
qsgvideonode_i420.cpp \
qsgvideonode_rgb32.cpp \
disabled {
HEADERS += \

View File

@@ -305,6 +305,7 @@ QDeclarativeAudio::Error QDeclarativeAudio::error() const
void QDeclarativeAudio::classBegin()
{
setObject(this);
emit mediaObjectChanged();
}
void QDeclarativeAudio::componentComplete()

View File

@@ -84,6 +84,7 @@ class QDeclarativeAudio : public QObject, public QDeclarativeMediaBase, public Q
Q_PROPERTY(Error error READ error NOTIFY errorChanged)
Q_PROPERTY(QString errorString READ errorString NOTIFY errorChanged)
Q_PROPERTY(QDeclarativeMediaMetaData *metaData READ metaData CONSTANT)
Q_PROPERTY(QObject *mediaObject READ mediaObject NOTIFY mediaObjectChanged SCRIPTABLE false DESIGNABLE false)
Q_ENUMS(Status)
Q_ENUMS(Error)
Q_ENUMS(Loop)
@@ -126,6 +127,8 @@ public:
void classBegin();
void componentComplete();
QObject *mediaObject() { return m_mediaObject; }
public Q_SLOTS:
void play();
void pause();
@@ -159,6 +162,8 @@ Q_SIGNALS:
void errorChanged();
void error(QDeclarativeAudio::Error error, const QString &errorString);
void mediaObjectChanged();
private Q_SLOTS:
void _q_error(int, const QString &);

View File

@@ -0,0 +1,368 @@
/****************************************************************************
**
** Copyright (C) 2011 Nokia Corporation and/or its subsidiary(-ies).
** All rights reserved.
** Contact: Nokia Corporation (qt-info@nokia.com)
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL$
** GNU Lesser General Public License Usage
** This file may be used under the terms of the GNU Lesser General Public
** License version 2.1 as published by the Free Software Foundation and
** appearing in the file LICENSE.LGPL included in the packaging of this
** file. Please review the following information to ensure the GNU Lesser
** General Public License version 2.1 requirements will be met:
** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Nokia gives you certain additional
** rights. These rights are described in the Nokia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** GNU General Public License Usage
** Alternatively, this file may be used under the terms of the GNU General
** Public License version 3.0 as published by the Free Software Foundation
** and appearing in the file LICENSE.GPL included in the packaging of this
** file. Please review the following information to ensure the GNU General
** Public License version 3.0 requirements will be met:
** http://www.gnu.org/copyleft/gpl.html.
**
** Other Usage
** Alternatively, this file may be used in accordance with the terms and
** conditions contained in a signed written agreement between you and Nokia.
**
**
**
**
**
** $QT_END_LICENSE$
**
****************************************************************************/
#include "qdeclarativevideooutput_p.h"
#include "qsgvideonode_p.h"
#include "qsgvideonode_i420.h"
#include "qsgvideonode_rgb32.h"
#include <QtDeclarative/qsgitem.h>
#include <QtMultimediaKit/QAbstractVideoSurface>
#include <QtMultimediaKit/qmediaservice.h>
#include <QtMultimediaKit/qvideorenderercontrol.h>
#include <QtMultimediaKit/qvideosurfaceformat.h>
#include <QtCore/qmetaobject.h>
//#define DEBUG_VIDEOITEM
class QSGVideoItemSurface : public QAbstractVideoSurface
{
public:
QSGVideoItemSurface(QDeclarativeVideoOutput *item, QObject *parent = 0) :
QAbstractVideoSurface(parent),
m_item(item)
{
}
~QSGVideoItemSurface()
{
}
QList<QVideoFrame::PixelFormat> supportedPixelFormats(QAbstractVideoBuffer::HandleType handleType) const
{
QList<QVideoFrame::PixelFormat> formats;
foreach (QSGVideoNodeFactory* factory, m_item->m_videoNodeFactories) {
formats.append(factory->supportedPixelFormats(handleType));
}
return formats;
}
bool start(const QVideoSurfaceFormat &format)
{
#ifdef DEBUG_VIDEOITEM
qDebug() << Q_FUNC_INFO << format;
#endif
if (!supportedPixelFormats(format.handleType()).contains(format.pixelFormat()))
return false;
return QAbstractVideoSurface::start(format);
}
virtual bool present(const QVideoFrame &frame)
{
if (!frame.isValid()) {
qWarning() << Q_FUNC_INFO << "I'm getting bad frames here...";
return false;
}
m_item->present(frame);
return true;
}
private:
QDeclarativeVideoOutput *m_item;
};
/*!
\qmlclass VideoOutput QDeclarativeVideoOutput
\brief The VideoOutput element allows you to render video or camera viewfinder.
\ingroup qml-multimedia
This element is part of the \bold{QtMultimediaKit 4.0} module.
\qml
import QtQuick 2.0
import Qt.multimediakit 4.0
Rectangle {
width: 800
height: 600
color: "black"
MediaPlayer {
id: player
source: "file://video.webm"
playing: true
}
VideoOutput {
id: videoOutput
source: player
anchors.fill: parent
}
}
\endqml
The VideoOutput item supports untransformed, stretched, and uniformly scaled video presentation.
For a description of stretched uniformly scaled presentation, see the \l fillMode property
description.
\sa MediaPlayer, Camera
*/
/*!
\internal
\class QDeclarativeVideoOutput
\brief The QDeclarativeVideoOutput class provides a video output item.
*/
QDeclarativeVideoOutput::QDeclarativeVideoOutput(QSGItem *parent) :
QSGItem(parent),
m_fillMode(PreserveAspectFit)
{
setFlag(ItemHasContents, true);
m_surface = new QSGVideoItemSurface(this);
connect(m_surface, SIGNAL(surfaceFormatChanged(QVideoSurfaceFormat)),
this, SLOT(_q_updateNativeSize(QVideoSurfaceFormat)), Qt::QueuedConnection);
m_videoNodeFactories.append(new QSGVideoNodeFactory_I420);
#ifndef QT_OPENGL_ES
m_videoNodeFactories.append(new QSGVideoNodeFactory_RGB32);
#endif
}
QDeclarativeVideoOutput::~QDeclarativeVideoOutput()
{
m_source.clear();
_q_updateMediaObject();
delete m_surface;
qDeleteAll(m_videoNodeFactories);
}
/*!
\qmlproperty variant VideoOutput::source
This property holds the source item providing the video frames like MediaPlayer or Camera.
*/
void QDeclarativeVideoOutput::setSource(QObject *source)
{
#ifdef DEBUG_VIDEOITEM
qDebug() << Q_FUNC_INFO << source;
#endif
if (source == m_source.data())
return;
if (m_source)
disconnect(0, m_source.data(), SLOT(_q_updateMediaObject()));
m_source = source;
if (m_source) {
const QMetaObject *metaObject = m_source.data()->metaObject();
const QMetaProperty mediaObjectProperty = metaObject->property(
metaObject->indexOfProperty("mediaObject"));
if (mediaObjectProperty.hasNotifySignal()) {
QMetaMethod method = mediaObjectProperty.notifySignal();
QMetaObject::connect(m_source.data(), method.methodIndex(),
this, this->metaObject()->indexOfSlot("updateMediaObject()"),
Qt::DirectConnection, 0);
}
}
_q_updateMediaObject();
emit sourceChanged();
}
void QDeclarativeVideoOutput::_q_updateMediaObject()
{
QMediaObject *mediaObject = 0;
if (m_source)
mediaObject = qobject_cast<QMediaObject*>(m_source.data()->property("mediaObject").value<QObject*>());
#ifdef DEBUG_VIDEOITEM
qDebug() << Q_FUNC_INFO << mediaObject;
#endif
if (m_mediaObject.data() == mediaObject)
return;
if (m_rendererControl) {
m_rendererControl.data()->setSurface(0);
m_service.data()->releaseControl(m_rendererControl.data());
}
m_mediaObject = mediaObject;
m_mediaObject.clear();
m_service.clear();
m_rendererControl.clear();
if (mediaObject) {
if (QMediaService *service = mediaObject->service()) {
if (QMediaControl *control = service->requestControl(QVideoRendererControl_iid)) {
if ((m_rendererControl = qobject_cast<QVideoRendererControl *>(control))) {
m_service = service;
m_mediaObject = mediaObject;
m_rendererControl.data()->setSurface(m_surface);
} else {
qWarning() << Q_FUNC_INFO << "Media service has no renderer control available";
service->releaseControl(control);
}
}
}
}
}
void QDeclarativeVideoOutput::present(const QVideoFrame &frame)
{
m_frame = frame;
update();
}
/*!
\qmlproperty enumeration VideoOutput::fillMode
Set this property to define how the video is scaled to fit the target area.
\list
\o Stretch - the video is scaled to fit.
\o PreserveAspectFit - the video is scaled uniformly to fit without cropping
\o PreserveAspectCrop - the video is scaled uniformly to fill, cropping if necessary
\endlist
The default fill mode is PreserveAspectFit.
*/
QDeclarativeVideoOutput::FillMode QDeclarativeVideoOutput::fillMode() const
{
return m_fillMode;
}
void QDeclarativeVideoOutput::setFillMode(FillMode mode)
{
if (mode == m_fillMode)
return;
m_fillMode = mode;
update();
emit fillModeChanged(mode);
}
void QDeclarativeVideoOutput::_q_updateNativeSize(const QVideoSurfaceFormat &format)
{
const QSize &size = format.sizeHint();
if (m_nativeSize != size) {
m_nativeSize = size;
setImplicitWidth(size.width());
setImplicitHeight(size.height());
}
}
void QDeclarativeVideoOutput::_q_updateGeometry()
{
QRectF rect(0, 0, width(), height());
if (m_nativeSize.isEmpty()) {
//this is necessary for item to receive the
//first paint event and configure video surface.
m_boundingRect = rect;
m_sourceRect = QRectF(0, 0, 1, 1);
} else if (m_fillMode == Stretch) {
m_boundingRect = rect;
m_sourceRect = QRectF(0, 0, 1, 1);
} else if (m_fillMode == PreserveAspectFit) {
QSizeF size = m_nativeSize;
size.scale(rect.size(), Qt::KeepAspectRatio);
m_boundingRect = QRectF(0, 0, size.width(), size.height());
m_boundingRect.moveCenter(rect.center());
m_sourceRect = QRectF(0, 0, 1, 1);
} else if (m_fillMode == PreserveAspectCrop) {
m_boundingRect = rect;
QSizeF size = rect.size();
size.scale(m_nativeSize, Qt::KeepAspectRatio);
m_sourceRect = QRectF(
0, 0, size.width() / m_nativeSize.width(), size.height() / m_nativeSize.height());
m_sourceRect.moveCenter(QPointF(0.5, 0.5));
}
}
QSGNode *QDeclarativeVideoOutput::updatePaintNode(QSGNode *oldNode, UpdatePaintNodeData *)
{
QSGVideoNode *videoNode = static_cast<QSGVideoNode *>(oldNode);
if (videoNode && videoNode->pixelFormat() != m_frame.pixelFormat()) {
#ifdef DEBUG_VIDEOITEM
qDebug() << "updatePaintNode: deleting old video node because frame format changed...";
#endif
delete videoNode;
videoNode = 0;
}
if (!m_frame.isValid()) {
#ifdef DEBUG_VIDEOITEM
qDebug() << "updatePaintNode: no frames yet... aborting...";
#endif
return 0;
}
if (videoNode == 0) {
foreach (QSGVideoNodeFactory* factory, m_videoNodeFactories) {
videoNode = factory->createNode(m_surface->surfaceFormat());
if (videoNode)
break;
}
}
if (videoNode == 0)
return 0;
_q_updateGeometry();
videoNode->setTexturedRectGeometry(m_boundingRect, m_sourceRect);
videoNode->setCurrentFrame(m_frame);
return videoNode;
}

View File

@@ -0,0 +1,115 @@
/****************************************************************************
**
** Copyright (C) 2011 Nokia Corporation and/or its subsidiary(-ies).
** All rights reserved.
** Contact: Nokia Corporation (qt-info@nokia.com)
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL$
** GNU Lesser General Public License Usage
** This file may be used under the terms of the GNU Lesser General Public
** License version 2.1 as published by the Free Software Foundation and
** appearing in the file LICENSE.LGPL included in the packaging of this
** file. Please review the following information to ensure the GNU Lesser
** General Public License version 2.1 requirements will be met:
** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Nokia gives you certain additional
** rights. These rights are described in the Nokia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** GNU General Public License Usage
** Alternatively, this file may be used under the terms of the GNU General
** Public License version 3.0 as published by the Free Software Foundation
** and appearing in the file LICENSE.GPL included in the packaging of this
** file. Please review the following information to ensure the GNU General
** Public License version 3.0 requirements will be met:
** http://www.gnu.org/copyleft/gpl.html.
**
** Other Usage
** Alternatively, this file may be used in accordance with the terms and
** conditions contained in a signed written agreement between you and Nokia.
**
**
**
**
**
** $QT_END_LICENSE$
**
****************************************************************************/
#ifndef QDECLARATIVEVIDEOOUTPUT_P_H
#define QDECLARATIVEVIDEOOUTPUT_P_H
#include <qsgitem.h>
#include <QtMultimediaKit/qvideoframe.h>
#include <QtMultimediaKit/qmediaobject.h>
#include <QtCore/qsharedpointer.h>
#include "qsgvideonode_p.h"
class QSGVideoItemSurface;
class QVideoRendererControl;
class QMediaService;
class QVideoSurfaceFormat;
class QDeclarativeVideoOutput : public QSGItem
{
Q_OBJECT
Q_DISABLE_COPY(QDeclarativeVideoOutput)
Q_PROPERTY(QObject* source READ source WRITE setSource NOTIFY sourceChanged)
Q_PROPERTY(FillMode fillMode READ fillMode WRITE setFillMode NOTIFY fillModeChanged)
Q_ENUMS(FillMode)
public:
enum FillMode
{
Stretch = Qt::IgnoreAspectRatio,
PreserveAspectFit = Qt::KeepAspectRatio,
PreserveAspectCrop = Qt::KeepAspectRatioByExpanding
};
QDeclarativeVideoOutput(QSGItem *parent = 0);
~QDeclarativeVideoOutput();
QObject *source() const { return m_source.data(); }
void setSource(QObject *source);
FillMode fillMode() const;
void setFillMode(FillMode mode);
Q_SIGNALS:
void sourceChanged();
void fillModeChanged(QDeclarativeVideoOutput::FillMode);
protected:
QSGNode *updatePaintNode(QSGNode *, UpdatePaintNodeData *);
private Q_SLOTS:
void _q_updateMediaObject();
void _q_updateNativeSize(const QVideoSurfaceFormat&);
void _q_updateGeometry();
private:
void present(const QVideoFrame &frame);
friend class QSGVideoItemSurface;
QWeakPointer<QObject> m_source;
QWeakPointer<QMediaObject> m_mediaObject;
QWeakPointer<QMediaService> m_service;
QWeakPointer<QVideoRendererControl> m_rendererControl;
QList<QSGVideoNodeFactory*> m_videoNodeFactories;
QSGVideoItemSurface *m_surface;
QVideoFrame m_frame;
FillMode m_fillMode;
QSize m_nativeSize;
QRectF m_boundingRect;
QRectF m_sourceRect;
};
#endif // QDECLARATIVEVIDEOOUTPUT_H

View File

@@ -0,0 +1,67 @@
/****************************************************************************
**
** Copyright (C) 2011 Nokia Corporation and/or its subsidiary(-ies).
** All rights reserved.
** Contact: Nokia Corporation (qt-info@nokia.com)
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL$
** GNU Lesser General Public License Usage
** This file may be used under the terms of the GNU Lesser General Public
** License version 2.1 as published by the Free Software Foundation and
** appearing in the file LICENSE.LGPL included in the packaging of this
** file. Please review the following information to ensure the GNU Lesser
** General Public License version 2.1 requirements will be met:
** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Nokia gives you certain additional
** rights. These rights are described in the Nokia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** GNU General Public License Usage
** Alternatively, this file may be used under the terms of the GNU General
** Public License version 3.0 as published by the Free Software Foundation
** and appearing in the file LICENSE.GPL included in the packaging of this
** file. Please review the following information to ensure the GNU General
** Public License version 3.0 requirements will be met:
** http://www.gnu.org/copyleft/gpl.html.
**
** Other Usage
** Alternatively, this file may be used in accordance with the terms and
** conditions contained in a signed written agreement between you and Nokia.
**
**
**
**
**
** $QT_END_LICENSE$
**
****************************************************************************/
#include "qsgvideonode_p.h"
QSGVideoNode::QSGVideoNode()
{
}
void QSGVideoNode::setTexturedRectGeometry(const QRectF &rect, const QRectF &textureRect)
{
if (rect == m_rect && textureRect == m_textureRect)
return;
m_rect = rect;
m_textureRect = textureRect;
QSGGeometry *g = geometry();
if (g == 0) {
g = new QSGGeometry(QSGGeometry::defaultAttributes_TexturedPoint2D(), 4);
QSGGeometry::updateTexturedRectGeometry(g, rect, textureRect);
setGeometry(g);
} else {
QSGGeometry::updateTexturedRectGeometry(g, rect, textureRect);
}
markDirty(DirtyGeometry);
}

View File

@@ -0,0 +1,295 @@
/****************************************************************************
**
** Copyright (C) 2011 Nokia Corporation and/or its subsidiary(-ies).
** All rights reserved.
** Contact: Nokia Corporation (qt-info@nokia.com)
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL$
** GNU Lesser General Public License Usage
** This file may be used under the terms of the GNU Lesser General Public
** License version 2.1 as published by the Free Software Foundation and
** appearing in the file LICENSE.LGPL included in the packaging of this
** file. Please review the following information to ensure the GNU Lesser
** General Public License version 2.1 requirements will be met:
** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Nokia gives you certain additional
** rights. These rights are described in the Nokia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** GNU General Public License Usage
** Alternatively, this file may be used under the terms of the GNU General
** Public License version 3.0 as published by the Free Software Foundation
** and appearing in the file LICENSE.GPL included in the packaging of this
** file. Please review the following information to ensure the GNU General
** Public License version 3.0 requirements will be met:
** http://www.gnu.org/copyleft/gpl.html.
**
** Other Usage
** Alternatively, this file may be used in accordance with the terms and
** conditions contained in a signed written agreement between you and Nokia.
**
**
**
**
**
** $QT_END_LICENSE$
**
****************************************************************************/
#include "qsgvideonode_i420.h"
#include <QtDeclarative/qsgtexturematerial.h>
#include <QtDeclarative/qsgmaterial.h>
#include <QtOpenGL/qglshaderprogram.h>
QList<QVideoFrame::PixelFormat> QSGVideoNodeFactory_I420::supportedPixelFormats(
QAbstractVideoBuffer::HandleType handleType) const
{
QList<QVideoFrame::PixelFormat> formats;
if (handleType == QAbstractVideoBuffer::NoHandle)
formats << QVideoFrame::Format_YUV420P << QVideoFrame::Format_YV12;
return formats;
}
QSGVideoNode *QSGVideoNodeFactory_I420::createNode(const QVideoSurfaceFormat &format)
{
if (supportedPixelFormats(format.handleType()).contains(format.pixelFormat()))
return new QSGVideoNode_I420(format);
return 0;
}
class QSGVideoMaterialShader_YUV420 : public QSGMaterialShader
{
public:
void updateState(const RenderState &state, QSGMaterial *newMaterial, QSGMaterial *oldMaterial);
virtual char const *const *attributeNames() const {
static const char *names[] = {
"qt_VertexPosition",
"qt_VertexTexCoord",
0
};
return names;
}
protected:
virtual const char *vertexShader() const {
const char *shader =
"uniform highp mat4 qt_Matrix; \n"
"attribute highp vec4 qt_VertexPosition; \n"
"attribute highp vec2 qt_VertexTexCoord; \n"
"varying highp vec2 qt_TexCoord; \n"
"void main() { \n"
" qt_TexCoord = qt_VertexTexCoord; \n"
" gl_Position = qt_Matrix * qt_VertexPosition; \n"
"}";
return shader;
}
virtual const char *fragmentShader() const {
static const char *shader =
"uniform sampler2D yTexture;"
"uniform sampler2D uTexture;"
"uniform sampler2D vTexture;"
"uniform mediump mat4 colorMatrix;"
"uniform lowp float opacity;"
""
"varying highp vec2 qt_TexCoord;"
""
"void main()"
"{"
" mediump float Y = texture2D(yTexture, qt_TexCoord).r;"
" mediump float U = texture2D(uTexture, qt_TexCoord).r;"
" mediump float V = texture2D(vTexture, qt_TexCoord).r;"
" mediump vec4 color = vec4(Y, U, V, 1.);"
" gl_FragColor = colorMatrix * color * opacity;"
"}";
return shader;
}
virtual void initialize() {
m_id_matrix = program()->uniformLocation("qt_Matrix");
m_id_yTexture = program()->uniformLocation("yTexture");
m_id_uTexture = program()->uniformLocation("uTexture");
m_id_vTexture = program()->uniformLocation("vTexture");
m_id_colorMatrix = program()->uniformLocation("colorMatrix");
m_id_opacity = program()->uniformLocation("opacity");
}
int m_id_matrix;
int m_id_yTexture;
int m_id_uTexture;
int m_id_vTexture;
int m_id_colorMatrix;
int m_id_opacity;
};
class QSGVideoMaterial_YUV420 : public QSGMaterial
{
public:
QSGVideoMaterial_YUV420(const QVideoSurfaceFormat &format)
{
switch (format.yCbCrColorSpace()) {
case QVideoSurfaceFormat::YCbCr_JPEG:
colorMatrix = QMatrix4x4(
1.0, 0.000, 1.402, -0.701,
1.0, -0.344, -0.714, 0.529,
1.0, 1.772, 0.000, -0.886,
0.0, 0.000, 0.000, 1.0000);
break;
case QVideoSurfaceFormat::YCbCr_BT709:
case QVideoSurfaceFormat::YCbCr_xvYCC709:
colorMatrix = QMatrix4x4(
1.164, 0.000, 1.793, -0.5727,
1.164, -0.534, -0.213, 0.3007,
1.164, 2.115, 0.000, -1.1302,
0.0, 0.000, 0.000, 1.0000);
break;
default: //BT 601:
colorMatrix = QMatrix4x4(
1.164, 0.000, 1.596, -0.8708,
1.164, -0.392, -0.813, 0.5296,
1.164, 2.017, 0.000, -1.081,
0.0, 0.000, 0.000, 1.0000);
}
setFlag(Blending, false);
}
virtual QSGMaterialType *type() const {
static QSGMaterialType theType;
return &theType;
}
virtual QSGMaterialShader *createShader() const {
return new QSGVideoMaterialShader_YUV420;
}
virtual int compare(const QSGMaterial *other) const {
const QSGVideoMaterial_YUV420 *m = static_cast<const QSGVideoMaterial_YUV420 *>(other);
int d = idY - m->idY;
if (d)
return d;
else if ((d = idU - m->idU) != 0)
return d;
else
return idV - m->idV;
}
void updateBlending() {
setFlag(Blending, qFuzzyCompare(opacity, 1.0) ? false : true);
}
GLuint idY;
GLuint idU;
GLuint idV;
qreal opacity;
QMatrix4x4 colorMatrix;
};
QSGVideoNode_I420::QSGVideoNode_I420(const QVideoSurfaceFormat &format) :
m_width(0),
m_height(0),
m_format(format)
{
m_material = new QSGVideoMaterial_YUV420(format);
setMaterial(m_material);
m_material->opacity = 1;
}
QSGVideoNode_I420::~QSGVideoNode_I420()
{
if (m_width != 0 && m_height != 0)
glDeleteTextures(3, m_id);
}
void QSGVideoNode_I420::setCurrentFrame(const QVideoFrame &frame)
{
m_frame = frame;
m_frame.map(QAbstractVideoBuffer::ReadOnly);
int fw = frame.width();
int fh = frame.height();
// Frame has changed size, recreate textures...
if (fw != m_width || fh != m_height) {
if (m_width != 0 && m_height != 0)
glDeleteTextures(3, m_id);
glGenTextures(3, m_id);
m_width = fw;
m_height = fh;
m_material->idY = m_id[0];
m_material->idU = m_id[1];
m_material->idV = m_id[2];
}
const uchar *bits = frame.bits();
int bpl = frame.bytesPerLine();
int bpl2 = (bpl / 2 + 3) & ~3;
int offsetU = bpl * fh;
int offsetV = bpl * fh + bpl2 * fh / 2;
if (m_frame.pixelFormat() == QVideoFrame::Format_YV12)
qSwap(offsetU, offsetV);
bindTexture(m_id[0], GL_TEXTURE0, fw, fh, bits);
bindTexture(m_id[1], GL_TEXTURE1, fw/2, fh / 2, bits + offsetU);
bindTexture(m_id[2], GL_TEXTURE2, fw/2, fh / 2, bits + offsetV);
m_frame.unmap();
markDirty(DirtyMaterial);
}
void QSGVideoNode_I420::bindTexture(int id, int unit, int w, int h, const uchar *bits)
{
QGLFunctions *functions = QGLContext::currentContext()->functions();
functions->glActiveTexture(unit);
glBindTexture(GL_TEXTURE_2D, id);
glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, w, h, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, bits);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
}
void QSGVideoMaterialShader_YUV420::updateState(const RenderState &state,
QSGMaterial *newMaterial,
QSGMaterial *oldMaterial)
{
Q_UNUSED(oldMaterial);
QGLFunctions *functions = state.context()->functions();
QSGVideoMaterial_YUV420 *mat = static_cast<QSGVideoMaterial_YUV420 *>(newMaterial);
program()->setUniformValue(m_id_yTexture, 0);
program()->setUniformValue(m_id_uTexture, 1);
program()->setUniformValue(m_id_vTexture, 2);
functions->glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, mat->idY);
functions->glActiveTexture(GL_TEXTURE1);
glBindTexture(GL_TEXTURE_2D, mat->idU);
functions->glActiveTexture(GL_TEXTURE2);
glBindTexture(GL_TEXTURE_2D, mat->idV);
program()->setUniformValue(m_id_colorMatrix, mat->colorMatrix);
if (state.isOpacityDirty()) {
mat->opacity = state.opacity();
program()->setUniformValue(m_id_opacity, GLfloat(mat->opacity));
}
if (state.isMatrixDirty())
program()->setUniformValue(m_id_matrix, state.combinedMatrix());
}

View File

@@ -0,0 +1,79 @@
/****************************************************************************
**
** Copyright (C) 2011 Nokia Corporation and/or its subsidiary(-ies).
** All rights reserved.
** Contact: Nokia Corporation (qt-info@nokia.com)
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL$
** GNU Lesser General Public License Usage
** This file may be used under the terms of the GNU Lesser General Public
** License version 2.1 as published by the Free Software Foundation and
** appearing in the file LICENSE.LGPL included in the packaging of this
** file. Please review the following information to ensure the GNU Lesser
** General Public License version 2.1 requirements will be met:
** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Nokia gives you certain additional
** rights. These rights are described in the Nokia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** GNU General Public License Usage
** Alternatively, this file may be used under the terms of the GNU General
** Public License version 3.0 as published by the Free Software Foundation
** and appearing in the file LICENSE.GPL included in the packaging of this
** file. Please review the following information to ensure the GNU General
** Public License version 3.0 requirements will be met:
** http://www.gnu.org/copyleft/gpl.html.
**
** Other Usage
** Alternatively, this file may be used in accordance with the terms and
** conditions contained in a signed written agreement between you and Nokia.
**
**
**
**
**
** $QT_END_LICENSE$
**
****************************************************************************/
#ifndef QSGVIDEONODE_I420_H
#define QSGVIDEONODE_I420_H
#include "qsgvideonode_p.h"
#include <QtMultimediaKit/qvideosurfaceformat.h>
class QSGVideoMaterial_YUV420;
class QSGVideoNode_I420 : public QSGVideoNode
{
public:
QSGVideoNode_I420(const QVideoSurfaceFormat &format);
~QSGVideoNode_I420();
virtual QVideoFrame::PixelFormat pixelFormat() const {
return m_format.pixelFormat();
}
void setCurrentFrame(const QVideoFrame &frame);
private:
void bindTexture(int id, int unit, int w, int h, const uchar *bits);
int m_width;
int m_height;
GLuint m_id[3];
QVideoSurfaceFormat m_format;
QSGVideoMaterial_YUV420 *m_material;
QVideoFrame m_frame;
};
class QSGVideoNodeFactory_I420 : public QSGVideoNodeFactory {
public:
QList<QVideoFrame::PixelFormat> supportedPixelFormats(QAbstractVideoBuffer::HandleType handleType) const;
QSGVideoNode *createNode(const QVideoSurfaceFormat &format);
};
#endif // QSGVIDEONODE_I420_H

View File

@@ -0,0 +1,72 @@
/****************************************************************************
**
** Copyright (C) 2011 Nokia Corporation and/or its subsidiary(-ies).
** All rights reserved.
** Contact: Nokia Corporation (qt-info@nokia.com)
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL$
** GNU Lesser General Public License Usage
** This file may be used under the terms of the GNU Lesser General Public
** License version 2.1 as published by the Free Software Foundation and
** appearing in the file LICENSE.LGPL included in the packaging of this
** file. Please review the following information to ensure the GNU Lesser
** General Public License version 2.1 requirements will be met:
** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Nokia gives you certain additional
** rights. These rights are described in the Nokia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** GNU General Public License Usage
** Alternatively, this file may be used under the terms of the GNU General
** Public License version 3.0 as published by the Free Software Foundation
** and appearing in the file LICENSE.GPL included in the packaging of this
** file. Please review the following information to ensure the GNU General
** Public License version 3.0 requirements will be met:
** http://www.gnu.org/copyleft/gpl.html.
**
** Other Usage
** Alternatively, this file may be used in accordance with the terms and
** conditions contained in a signed written agreement between you and Nokia.
**
**
**
**
**
** $QT_END_LICENSE$
**
****************************************************************************/
#ifndef QSGVIDEONODE_P_H
#define QSGVIDEONODE_P_H
#include <QtDeclarative/qsgnode.h>
#include <QtMultimediaKit/qvideoframe.h>
#include <QtMultimediaKit/qvideosurfaceformat.h>
#include <QtOpenGL/qglfunctions.h>
class QSGVideoNode : public QSGGeometryNode
{
public:
QSGVideoNode();
virtual void setCurrentFrame(const QVideoFrame &frame) = 0;
virtual QVideoFrame::PixelFormat pixelFormat() const = 0;
void setTexturedRectGeometry(const QRectF &boundingRect, const QRectF &textureRect);
private:
QRectF m_rect;
QRectF m_textureRect;
};
class QSGVideoNodeFactory {
public:
virtual QList<QVideoFrame::PixelFormat> supportedPixelFormats(QAbstractVideoBuffer::HandleType handleType) const = 0;
virtual QSGVideoNode *createNode(const QVideoSurfaceFormat &format) = 0;
};
#endif // QSGVIDEONODE_H

View File

@@ -0,0 +1,142 @@
/****************************************************************************
**
** Copyright (C) 2011 Nokia Corporation and/or its subsidiary(-ies).
** All rights reserved.
** Contact: Nokia Corporation (qt-info@nokia.com)
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL$
** GNU Lesser General Public License Usage
** This file may be used under the terms of the GNU Lesser General Public
** License version 2.1 as published by the Free Software Foundation and
** appearing in the file LICENSE.LGPL included in the packaging of this
** file. Please review the following information to ensure the GNU Lesser
** General Public License version 2.1 requirements will be met:
** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Nokia gives you certain additional
** rights. These rights are described in the Nokia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** GNU General Public License Usage
** Alternatively, this file may be used under the terms of the GNU General
** Public License version 3.0 as published by the Free Software Foundation
** and appearing in the file LICENSE.GPL included in the packaging of this
** file. Please review the following information to ensure the GNU General
** Public License version 3.0 requirements will be met:
** http://www.gnu.org/copyleft/gpl.html.
**
** Other Usage
** Alternatively, this file may be used in accordance with the terms and
** conditions contained in a signed written agreement between you and Nokia.
**
**
**
**
**
** $QT_END_LICENSE$
**
****************************************************************************/
#include "qsgvideonode_rgb32.h"
#include <QtDeclarative/qsgtexture.h>
QList<QVideoFrame::PixelFormat> QSGVideoNodeFactory_RGB32::supportedPixelFormats(
QAbstractVideoBuffer::HandleType handleType) const
{
QList<QVideoFrame::PixelFormat> formats;
if (handleType == QAbstractVideoBuffer::NoHandle)
formats.append(QVideoFrame::Format_RGB32);
return formats;
}
QSGVideoNode *QSGVideoNodeFactory_RGB32::createNode(const QVideoSurfaceFormat &format)
{
if (supportedPixelFormats(format.handleType()).contains(format.pixelFormat()))
return new QSGVideoNode_RGB32();
return 0;
}
class QSGVideoTexture_RGB32 : public QSGTexture
{
public:
QSGVideoTexture_RGB32();
int textureId() const { return m_textureId; }
QSize textureSize() const { return m_size; }
bool hasAlphaChannel() const { return false; }
bool hasMipmaps() const { return false; }
void setCurrentFrame(const QVideoFrame &frame) { m_frame = frame; }
//QRectF textureSubRect() const;
void bind();
private:
QVideoFrame m_frame;
GLuint m_textureId;
QSize m_size;
};
QSGVideoTexture_RGB32::QSGVideoTexture_RGB32()
: QSGTexture()
, m_textureId(0)
{
}
void QSGVideoTexture_RGB32::bind()
{
if (m_frame.isValid()) {
if (m_size != m_frame.size()) {
if (m_textureId)
glDeleteTextures(1, &m_textureId);
glGenTextures(1, &m_textureId);
m_size = m_frame.size();
}
if (m_frame.map(QAbstractVideoBuffer::ReadOnly)) {
QGLFunctions *functions = QGLContext::currentContext()->functions();
const uchar *bits = m_frame.bits();
functions->glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, m_textureId);
#ifdef QT_OPENGL_ES
qWarning() << "RGB video doesn't work on GL ES\n";
#else
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA,
m_size.width(), m_size.height(),
0, GL_BGRA, GL_UNSIGNED_BYTE, bits);
#endif
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
m_frame.unmap();
}
m_frame = QVideoFrame();
updateBindOptions(true);
} else {
glBindTexture(GL_TEXTURE_2D, m_textureId);
updateBindOptions(false);
}
}
QSGVideoNode_RGB32::QSGVideoNode_RGB32()
{
setMaterial(&m_material);
m_texture = new QSGVideoTexture_RGB32();
m_material.setTexture(m_texture);
m_material.setFiltering(QSGTexture::Linear);
}
void QSGVideoNode_RGB32::setCurrentFrame(const QVideoFrame &frame)
{
m_texture->setCurrentFrame(frame);
markDirty(DirtyMaterial);
}

View File

@@ -0,0 +1,71 @@
/****************************************************************************
**
** Copyright (C) 2011 Nokia Corporation and/or its subsidiary(-ies).
** All rights reserved.
** Contact: Nokia Corporation (qt-info@nokia.com)
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL$
** GNU Lesser General Public License Usage
** This file may be used under the terms of the GNU Lesser General Public
** License version 2.1 as published by the Free Software Foundation and
** appearing in the file LICENSE.LGPL included in the packaging of this
** file. Please review the following information to ensure the GNU Lesser
** General Public License version 2.1 requirements will be met:
** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Nokia gives you certain additional
** rights. These rights are described in the Nokia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** GNU General Public License Usage
** Alternatively, this file may be used under the terms of the GNU General
** Public License version 3.0 as published by the Free Software Foundation
** and appearing in the file LICENSE.GPL included in the packaging of this
** file. Please review the following information to ensure the GNU General
** Public License version 3.0 requirements will be met:
** http://www.gnu.org/copyleft/gpl.html.
**
** Other Usage
** Alternatively, this file may be used in accordance with the terms and
** conditions contained in a signed written agreement between you and Nokia.
**
**
**
**
**
** $QT_END_LICENSE$
**
****************************************************************************/
#ifndef QSGVIDEONODE_RGB32_H
#define QSGVIDEONODE_RGB32_H
#include "qsgvideonode_p.h"
#include <QtDeclarative/qsgtexturematerial.h>
class QSGVideoTexture_RGB32;
class QSGVideoNode_RGB32 : public QSGVideoNode
{
public:
QSGVideoNode_RGB32();
void setCurrentFrame(const QVideoFrame &frame);
QVideoFrame::PixelFormat pixelFormat() const { return QVideoFrame::Format_RGB32; }
private:
QSGTextureMaterial m_material;
QSGVideoTexture_RGB32 *m_texture;
};
class QSGVideoNodeFactory_RGB32 : public QSGVideoNodeFactory {
public:
QList<QVideoFrame::PixelFormat> supportedPixelFormats(QAbstractVideoBuffer::HandleType handleType) const;
QSGVideoNode *createNode(const QVideoSurfaceFormat &format);
};
#endif // QSGVIDEONODE_RGB32_H

View File

@@ -109,6 +109,7 @@ public:
void _q_error(int id, int error, const QString &errorString);
void _q_readyChanged(bool);
void _q_serviceDestroyed();
void unsetError() { error = QCameraImageCapture::NoError; errorString.clear(); }
@@ -141,6 +142,14 @@ void QCameraImageCapturePrivate::_q_readyChanged(bool ready)
emit q->readyForCaptureChanged(ready);
}
void QCameraImageCapturePrivate::_q_serviceDestroyed()
{
mediaObject = 0;
control = 0;
encoderControl = 0;
captureDestinationControl = 0;
bufferFormatControl = 0;
}
/*!
Constructs a media recorder which records the media produced by \a mediaObject.
@@ -225,6 +234,8 @@ bool QCameraImageCapture::setMediaObject(QMediaObject *mediaObject)
service->releaseControl(d->captureDestinationControl);
if (d->bufferFormatControl)
service->releaseControl(d->bufferFormatControl);
disconnect(service, SIGNAL(destroyed()), this, SLOT(_q_serviceDestroyed()));
}
}
@@ -269,6 +280,8 @@ bool QCameraImageCapture::setMediaObject(QMediaObject *mediaObject)
this, SIGNAL(bufferFormatChanged(QVideoFrame::PixelFormat)));
}
connect(service, SIGNAL(destroyed()), this, SLOT(_q_serviceDestroyed()));
return true;
}
}

View File

@@ -145,6 +145,7 @@ private:
Q_DECLARE_PRIVATE(QCameraImageCapture)
Q_PRIVATE_SLOT(d_func(), void _q_error(int, int, const QString &))
Q_PRIVATE_SLOT(d_func(), void _q_readyChanged(bool))
Q_PRIVATE_SLOT(d_func(), void _q_serviceDestroyed())
};
Q_DECLARE_OPERATORS_FOR_FLAGS(QCameraImageCapture::CaptureDestinations)

View File

@@ -343,7 +343,6 @@ void QCameraImageProcessing::setDenoisingLevel(int level)
\value WhiteBalanceShade Shade white balance mode.
\value WhiteBalanceTungsten Tungsten white balance mode.
\value WhiteBalanceFluorescent Fluorescent white balance mode.
\value WhiteBalanceIncandescent Incandescent white balance mode.
\value WhiteBalanceFlash Flash white balance mode.
\value WhiteBalanceSunset Sunset white balance mode.
\value WhiteBalanceVendor Vendor defined white balance mode.

View File

@@ -71,9 +71,8 @@ public:
WhiteBalanceShade = 4,
WhiteBalanceTungsten = 5,
WhiteBalanceFluorescent = 6,
WhiteBalanceIncandescent = 7,
WhiteBalanceFlash = 8,
WhiteBalanceSunset = 9,
WhiteBalanceFlash = 7,
WhiteBalanceSunset = 8,
WhiteBalanceVendor = 1000
};

View File

@@ -168,7 +168,12 @@ void QMediaRecorderPrivate::_q_error(int error, const QString &errorString)
void QMediaRecorderPrivate::_q_serviceDestroyed()
{
q_func()->setMediaObject(0);
mediaObject = 0;
control = 0;
formatControl = 0;
audioControl = 0;
videoControl = 0;
metaDataControl = 0;
}
void QMediaRecorderPrivate::_q_notify()

View File

@@ -41,68 +41,14 @@
#include "camerabinaudioencoder.h"
#include "camerabincontainer.h"
#include "qgstcodecsinfo.h"
#include <QtCore/qdebug.h>
CameraBinAudioEncoder::CameraBinAudioEncoder(QObject *parent)
:QAudioEncoderControl(parent)
:QAudioEncoderControl(parent),
m_codecs(QGstCodecsInfo::AudioEncoder)
{
QList<QByteArray> codecCandidates;
#if defined(Q_WS_MAEMO_6)
codecCandidates << "audio/AAC" << "audio/PCM" << "audio/AMR" << "audio/AMR-WB" << "audio/speex"
<< "audio/ADPCM" << "audio/iLBC" << "audio/vorbis" << "audio/mpeg" << "audio/FLAC";
m_elementNames["audio/AAC"] = "nokiaaacenc";
m_elementNames["audio/speex"] = "speexenc";
m_elementNames["audio/PCM"] = "audioresample";
m_elementNames["audio/AMR"] = "nokiaamrnbenc";
m_elementNames["audio/AMR-WB"] = "nokiaamrwbenc";
m_elementNames["audio/ADPCM"] = "nokiaadpcmenc";
m_elementNames["audio/iLBC"] = "nokiailbcenc";
m_elementNames["audio/vorbis"] = "vorbisenc";
m_elementNames["audio/FLAC"] = "flacenc";
m_elementNames["audio/mpeg"] = "ffenc_mp2";
#else
codecCandidates << "audio/mpeg" << "audio/vorbis" << "audio/speex" << "audio/GSM"
<< "audio/PCM" << "audio/AMR" << "audio/AMR-WB";
m_elementNames["audio/mpeg"] = "lamemp3enc";
m_elementNames["audio/vorbis"] = "vorbisenc";
m_elementNames["audio/speex"] = "speexenc";
m_elementNames["audio/GSM"] = "gsmenc";
m_elementNames["audio/PCM"] = "audioresample";
m_elementNames["audio/AMR"] = "amrnbenc";
m_elementNames["audio/AMR-WB"] = "amrwbenc";
m_codecOptions["audio/vorbis"] = QStringList() << "min-bitrate" << "max-bitrate";
m_codecOptions["audio/mpeg"] = QStringList() << "mode";
m_codecOptions["audio/speex"] = QStringList() << "mode" << "vbr" << "vad" << "dtx";
m_codecOptions["audio/GSM"] = QStringList();
m_codecOptions["audio/PCM"] = QStringList();
m_codecOptions["audio/AMR"] = QStringList();
m_codecOptions["audio/AMR-WB"] = QStringList();
#endif
foreach( const QByteArray& codecName, codecCandidates ) {
QByteArray elementName = m_elementNames[codecName];
GstElementFactory *factory = gst_element_factory_find(elementName.constData());
if (factory) {
m_codecs.append(codecName);
const gchar *descr = gst_element_factory_get_description(factory);
if (codecName == QByteArray("audio/PCM"))
m_codecDescriptions.insert(codecName, tr("Raw PCM audio"));
else
m_codecDescriptions.insert(codecName, QString::fromUtf8(descr));
m_streamTypes.insert(codecName,
CameraBinContainer::supportedStreamTypes(factory, GST_PAD_SRC));
gst_object_unref(GST_OBJECT(factory));
}
}
}
CameraBinAudioEncoder::~CameraBinAudioEncoder()
@@ -111,12 +57,12 @@ CameraBinAudioEncoder::~CameraBinAudioEncoder()
QStringList CameraBinAudioEncoder::supportedAudioCodecs() const
{
return m_codecs;
return m_codecs.supportedCodecs();
}
QString CameraBinAudioEncoder::codecDescription(const QString &codecName) const
{
return m_codecDescriptions.value(codecName);
return m_codecs.codecDescription(codecName);
}
QStringList CameraBinAudioEncoder::supportedEncodingOptions(const QString &codec) const
@@ -165,129 +111,19 @@ void CameraBinAudioEncoder::resetActualSettings()
m_audioSettings = m_userSettings;
}
GstElement *CameraBinAudioEncoder::createEncoder()
GstEncodingProfile *CameraBinAudioEncoder::createProfile()
{
QString codec = m_audioSettings.codec();
QByteArray encoderElementName = m_elementNames.value(codec);
GstElement *encoderElement = gst_element_factory_make(encoderElementName.constData(), NULL);
if (!encoderElement)
return 0;
GstCaps *caps;
GstBin * encoderBin = GST_BIN(gst_bin_new("audio-encoder-bin"));
GstElement *capsFilter = gst_element_factory_make("capsfilter", NULL);
gst_bin_add(encoderBin, capsFilter);
gst_bin_add(encoderBin, encoderElement);
gst_element_link(capsFilter, encoderElement);
// add ghostpads
GstPad *pad = gst_element_get_static_pad(capsFilter, "sink");
gst_element_add_pad(GST_ELEMENT(encoderBin), gst_ghost_pad_new("sink", pad));
gst_object_unref(GST_OBJECT(pad));
pad = gst_element_get_static_pad(encoderElement, "src");
gst_element_add_pad(GST_ELEMENT(encoderBin), gst_ghost_pad_new("src", pad));
gst_object_unref(GST_OBJECT(pad));
if (m_audioSettings.sampleRate() > 0 || m_audioSettings.channelCount() > 0) {
GstCaps *caps = gst_caps_new_empty();
GstStructure *structure = gst_structure_new("audio/x-raw-int", NULL);
if (m_audioSettings.sampleRate() > 0)
gst_structure_set(structure, "rate", G_TYPE_INT, m_audioSettings.sampleRate(), NULL );
if (m_audioSettings.channelCount() > 0)
gst_structure_set(structure, "channels", G_TYPE_INT, m_audioSettings.channelCount(), NULL );
gst_caps_append_structure(caps,structure);
g_object_set(G_OBJECT(capsFilter), "caps", caps, NULL);
}
if (encoderElement) {
if (m_audioSettings.encodingMode() == QtMultimediaKit::ConstantQualityEncoding) {
QtMultimediaKit::EncodingQuality qualityValue = m_audioSettings.quality();
if (encoderElementName == "lamemp3enc") {
g_object_set(G_OBJECT(encoderElement), "target", 0, NULL); //constant quality mode
qreal quality[] = {
10.0, //VeryLow
6.0, //Low
4.0, //Normal
2.0, //High
0.0 //VeryHigh
};
g_object_set(G_OBJECT(encoderElement), "quality", quality[qualityValue], NULL);
} else if (encoderElementName == "ffenc_mp2") {
int quality[] = {
8000, //VeryLow
64000, //Low
128000, //Normal
192000, //High
320000 //VeryHigh
};
g_object_set(G_OBJECT(encoderElement), "bitrate", quality[qualityValue], NULL);
} else if (codec == QLatin1String("audio/speex")) {
//0-10 range with default 8
double qualityTable[] = {
2, //VeryLow
5, //Low
8, //Normal
9, //High
10 //VeryHigh
};
g_object_set(G_OBJECT(encoderElement), "quality", qualityTable[qualityValue], NULL);
} else if (codec.startsWith("audio/AMR")) {
int band[] = {
0, //VeryLow
2, //Low
4, //Normal
6, //High
7 //VeryHigh
};
g_object_set(G_OBJECT(encoderElement), "band-mode", band[qualityValue], NULL);
}
} else {
int bitrate = m_audioSettings.bitRate();
if (bitrate > 0) {
g_object_set(G_OBJECT(encoderElement), "bitrate", bitrate, NULL);
}
}
QMap<QString, QVariant> options = m_options.value(codec);
QMapIterator<QString,QVariant> it(options);
while (it.hasNext()) {
it.next();
QString option = it.key();
QVariant value = it.value();
switch (value.type()) {
case QVariant::Int:
g_object_set(G_OBJECT(encoderElement), option.toAscii(), value.toInt(), NULL);
break;
case QVariant::Bool:
g_object_set(G_OBJECT(encoderElement), option.toAscii(), value.toBool(), NULL);
break;
case QVariant::Double:
g_object_set(G_OBJECT(encoderElement), option.toAscii(), value.toDouble(), NULL);
break;
case QVariant::String:
g_object_set(G_OBJECT(encoderElement), option.toAscii(), value.toString().toUtf8().constData(), NULL);
break;
default:
qWarning() << "unsupported option type:" << option << value;
break;
}
}
}
return GST_ELEMENT(encoderBin);
if (codec.isEmpty())
caps = gst_caps_new_any();
else
caps = gst_caps_from_string(codec.toLatin1());
}
QSet<QString> CameraBinAudioEncoder::supportedStreamTypes(const QString &codecName) const
{
return m_streamTypes.value(codecName);
return (GstEncodingProfile *)gst_encoding_audio_profile_new(
caps,
NULL, //preset
NULL, //restriction
0); //presence
}

View File

@@ -50,8 +50,11 @@ class CameraBinSession;
#include <QtCore/qset.h>
#include <gst/gst.h>
#include <gst/pbutils/pbutils.h>
#include <gst/pbutils/encoding-profile.h>
#include <qaudioformat.h>
#include "qgstcodecsinfo.h"
QT_USE_NAMESPACE
@@ -77,27 +80,19 @@ public:
QAudioEncoderSettings audioSettings() const;
void setAudioSettings(const QAudioEncoderSettings&);
GstElement *createEncoder();
QSet<QString> supportedStreamTypes(const QString &codecName) const;
void setActualAudioSettings(const QAudioEncoderSettings&);
void resetActualSettings();
GstEncodingProfile *createProfile();
Q_SIGNALS:
void settingsChanged();
private:
QStringList m_codecs;
QMap<QString,QByteArray> m_elementNames;
QMap<QString,QString> m_codecDescriptions;
QGstCodecsInfo m_codecs;
QMap<QString,QStringList> m_codecOptions;
QMap<QString, QMap<QString, QVariant> > m_options;
QMap<QString, QSet<QString> > m_streamTypes;
QAudioEncoderSettings m_audioSettings;
QAudioEncoderSettings m_userSettings;
};

View File

@@ -40,83 +40,90 @@
****************************************************************************/
#include "camerabincontainer.h"
#include <QtCore/qregexp.h>
#include <QtCore/qdebug.h>
CameraBinContainer::CameraBinContainer(QObject *parent)
:QMediaContainerControl(parent)
:QMediaContainerControl(parent),
m_supportedContainers(QGstCodecsInfo::Muxer)
{
QList<QByteArray> formatCandidates;
formatCandidates << "mp4" << "ogg" << "wav" << "amr" << "mkv"
<< "avi" << "3gp" << "3gp2" << "webm" << "mjpeg" << "asf" << "mov";
//extension for containers hard to guess from mimetype
m_fileExtensions["video/x-matroska"] = "mkv";
m_fileExtensions["video/quicktime"] = "mov";
m_fileExtensions["video/x-msvideo"] = "avi";
m_fileExtensions["video/msvideo"] = "avi";
m_fileExtensions["audio/mpeg"] = "mp3";
m_fileExtensions["application/x-shockwave-flash"] = "swf";
m_fileExtensions["application/x-pn-realmedia"] = "rm";
}
QMap<QString,QByteArray> elementNames;
QStringList CameraBinContainer::supportedContainers() const
{
return m_supportedContainers.supportedCodecs();
}
elementNames.insertMulti("mp4", "ffmux_mp4");
elementNames.insertMulti("mp4", "hantromp4mux");
elementNames.insertMulti("mp4", "mp4mux");
elementNames.insert("ogg", "oggmux");
elementNames["wav"] = "wavenc";
elementNames["amr"] = "ffmux_amr";
elementNames["mkv"] = "matroskamux";
elementNames["avi"] = "avimux";
elementNames["3gp"] = "ffmux_3gp";
elementNames["3gp2"] = "ffmux_3g2";
elementNames["webm"] = "webmmux";
elementNames["mjpeg"] = "ffmux_mjpeg";
elementNames["asf"] = "ffmux_asf";
elementNames["mov"] = "qtmux";
QString CameraBinContainer::containerDescription(const QString &formatMimeType) const
{
return m_supportedContainers.codecDescription(formatMimeType);
}
QSet<QString> allTypes;
QString CameraBinContainer::containerMimeType() const
{
return m_format;
}
foreach(const QByteArray &formatName, formatCandidates) {
foreach(const QByteArray &elementName, elementNames.values(formatName)) {
GstElementFactory *factory = gst_element_factory_find(elementName.constData());
if (factory) {
m_supportedContainers.append(formatName);
const gchar *descr = gst_element_factory_get_description(factory);
m_containerDescriptions.insert(formatName, QString::fromUtf8(descr));
void CameraBinContainer::setContainerMimeType(const QString &formatMimeType)
{
m_format = formatMimeType;
if (formatName == QByteArray("raw")) {
m_streamTypes.insert(formatName, allTypes);
} else {
QSet<QString> types = supportedStreamTypes(factory, GST_PAD_SINK);
m_streamTypes.insert(formatName, types);
allTypes.unite(types);
}
gst_object_unref(GST_OBJECT(factory));
m_elementNames.insert(formatName, elementName);
break;
}
}
if (m_userFormat != formatMimeType) {
m_userFormat = formatMimeType;
emit settingsChanged();
}
}
QSet<QString> CameraBinContainer::supportedStreamTypes(GstElementFactory *factory, GstPadDirection direction)
void CameraBinContainer::setActualContainer(const QString &formatMimeType)
{
QSet<QString> types;
const GList *pads = gst_element_factory_get_static_pad_templates(factory);
for (const GList *pad = pads; pad; pad = g_list_next(pad)) {
GstStaticPadTemplate *templ = (GstStaticPadTemplate*)pad->data;
if (templ->direction == direction) {
GstCaps *caps = gst_static_caps_get(&templ->static_caps);
for (uint i=0; i<gst_caps_get_size(caps); i++) {
GstStructure *structure = gst_caps_get_structure(caps, i);
types.insert( QString::fromUtf8(gst_structure_get_name(structure)) );
}
gst_caps_unref(caps);
}
}
return types;
m_format = formatMimeType;
}
QSet<QString> CameraBinContainer::supportedStreamTypes(const QString &container) const
void CameraBinContainer::resetActualContainer()
{
return m_streamTypes.value(container);
m_format = m_userFormat;
}
GstEncodingContainerProfile *CameraBinContainer::createProfile()
{
GstCaps *caps;
if (m_format.isEmpty())
caps = gst_caps_new_any();
else
caps = gst_caps_from_string(m_format.toLatin1());
return (GstEncodingContainerProfile *)gst_encoding_container_profile_new(
"camerabin2_profile",
(gchar *)"custom camera profile",
caps,
NULL); //preset
}
/*!
Suggest file extension for current container mimetype.
*/
QString CameraBinContainer::suggestedFileExtension() const
{
QString format = m_format.left(m_format.indexOf(','));
QString extension = m_fileExtensions.value(format);
if (!extension.isEmpty() || format.isEmpty())
return extension;
QRegExp rx("[-/]([\\w]+)$");
if (rx.indexIn(format) != -1)
extension = rx.cap(1);
return extension;
}

View File

@@ -48,6 +48,10 @@
#include <QtCore/qset.h>
#include <gst/gst.h>
#include <gst/pbutils/pbutils.h>
#include <gst/pbutils/encoding-profile.h>
#include "qgstcodecsinfo.h"
QT_USE_NAMESPACE
@@ -58,35 +62,18 @@ public:
CameraBinContainer(QObject *parent);
virtual ~CameraBinContainer() {}
virtual QStringList supportedContainers() const { return m_supportedContainers; }
virtual QString containerMimeType() const { return m_format; }
virtual void setContainerMimeType(const QString &formatMimeType)
{
m_format = formatMimeType;
virtual QStringList supportedContainers() const;
virtual QString containerDescription(const QString &formatMimeType) const;
if (m_userFormat != formatMimeType) {
m_userFormat = formatMimeType;
emit settingsChanged();
}
}
virtual QString containerMimeType() const;
virtual void setContainerMimeType(const QString &formatMimeType);
void setActualContainer(const QString &formatMimeType)
{
m_format = formatMimeType;
}
void setActualContainer(const QString &formatMimeType);
void resetActualContainer();
void resetActualContainer()
{
m_format = m_userFormat;
}
QString suggestedFileExtension() const;
virtual QString containerDescription(const QString &formatMimeType) const { return m_containerDescriptions.value(formatMimeType); }
QByteArray formatElementName() const { return m_elementNames.value(containerMimeType()); }
QSet<QString> supportedStreamTypes(const QString &container) const;
static QSet<QString> supportedStreamTypes(GstElementFactory *factory, GstPadDirection direction);
GstEncodingContainerProfile *createProfile();
Q_SIGNALS:
void settingsChanged();
@@ -94,10 +81,9 @@ Q_SIGNALS:
private:
QString m_format; // backend selected format, using m_userFormat
QString m_userFormat;
QStringList m_supportedContainers;
QMap<QString,QByteArray> m_elementNames;
QMap<QString, QString> m_containerDescriptions;
QMap<QString, QSet<QString> > m_streamTypes;
QMap<QString, QString> m_fileExtensions;
QGstCodecsInfo m_supportedContainers;
};
#endif // CAMERABINMEDIACONTAINERCONTROL_H

View File

@@ -75,14 +75,6 @@ CameraBinControl::CameraBinControl(CameraBinSession *session)
connect(m_session, SIGNAL(stateChanged(QCamera::State)),
this, SLOT(updateStatus()));
connect(m_session->audioEncodeControl(), SIGNAL(settingsChanged()),
SLOT(reloadLater()));
connect(m_session->videoEncodeControl(), SIGNAL(settingsChanged()),
SLOT(reloadLater()));
connect(m_session->mediaContainerControl(), SIGNAL(settingsChanged()),
SLOT(reloadLater()));
connect(m_session->imageEncodeControl(), SIGNAL(settingsChanged()),
SLOT(reloadLater()));
connect(m_session, SIGNAL(viewfinderChanged()),
SLOT(reloadLater()));
connect(m_session, SIGNAL(readyChanged(bool)),
@@ -115,7 +107,6 @@ void CameraBinControl::setCaptureMode(QCamera::CaptureMode mode)
{
if (m_session->captureMode() != mode) {
m_session->setCaptureMode(mode);
reloadLater();
if (m_state == QCamera::ActiveState) {
m_resourcePolicy->setResourceSet(

View File

@@ -48,7 +48,8 @@
#include <QtCore/qmetaobject.h>
//#define CAMERABIN_DEBUG 1
#define ENUM_NAME(c,e,v) (c::staticMetaObject.enumerator(c::staticMetaObject.indexOfEnumerator(e)).valueToKey((v)))
#define ZOOM_PROPERTY "zoom"
#define MAX_ZOOM_PROPERTY "max-zoom"
CameraBinFocus::CameraBinFocus(CameraBinSession *session)
:QCameraFocusControl(session),
@@ -59,8 +60,6 @@ CameraBinFocus::CameraBinFocus(CameraBinSession *session)
{
connect(m_session, SIGNAL(stateChanged(QCamera::State)),
this, SLOT(_q_handleCameraStateChange(QCamera::State)));
connect(m_session, SIGNAL(imageCaptured(int,QImage)),
this, SLOT(_q_handleCapturedImage()));
}
CameraBinFocus::~CameraBinFocus()
@@ -91,7 +90,9 @@ qreal CameraBinFocus::maximumOpticalZoom() const
qreal CameraBinFocus::maximumDigitalZoom() const
{
return 10;
gfloat zoomFactor = 1.0;
g_object_get(GST_BIN(m_session->cameraBin()), MAX_ZOOM_PROPERTY, &zoomFactor, NULL);
return zoomFactor;
}
qreal CameraBinFocus::opticalZoom() const
@@ -102,15 +103,15 @@ qreal CameraBinFocus::opticalZoom() const
qreal CameraBinFocus::digitalZoom() const
{
gfloat zoomFactor = 1.0;
g_object_get(GST_BIN(m_session->cameraBin()), "zoom", &zoomFactor, NULL);
g_object_get(GST_BIN(m_session->cameraBin()), ZOOM_PROPERTY, &zoomFactor, NULL);
return zoomFactor;
}
void CameraBinFocus::zoomTo(qreal optical, qreal digital)
{
Q_UNUSED(optical);
digital = qBound(qreal(1.0), digital, qreal(10.0));
g_object_set(GST_BIN(m_session->cameraBin()), "zoom", digital, NULL);
digital = qBound(qreal(1.0), digital, maximumDigitalZoom());
g_object_set(GST_BIN(m_session->cameraBin()), ZOOM_PROPERTY, digital, NULL);
emit digitalZoomChanged(digital);
}
@@ -184,9 +185,9 @@ void CameraBinFocus::_q_setFocusStatus(QCamera::LockStatus status, QCamera::Lock
{
#ifdef CAMERABIN_DEBUG
qDebug() << Q_FUNC_INFO << "Current:"
<< ENUM_NAME(QCamera, "LockStatus", m_focusStatus)
<< m_focusStatus
<< "New:"
<< ENUM_NAME(QCamera, "LockStatus", status) << ENUM_NAME(QCamera, "LockChangeReason", reason);
<< status << reason;
#endif
if (m_focusStatus != status) {
@@ -211,10 +212,6 @@ void CameraBinFocus::_q_handleCameraStateChange(QCamera::State state)
_q_setFocusStatus(QCamera::Unlocked, QCamera::LockLost);
}
void CameraBinFocus::_q_handleCapturedImage()
{
}
void CameraBinFocus::_q_startFocusing()
{
_q_setFocusStatus(QCamera::Searching, QCamera::UserRequest);

View File

@@ -92,7 +92,6 @@ public Q_SLOTS:
private Q_SLOTS:
void _q_setFocusStatus(QCamera::LockStatus status, QCamera::LockChangeReason reason);
void _q_handleCameraStateChange(QCamera::State state);
void _q_handleCapturedImage();
private:
CameraBinSession *m_session;

View File

@@ -82,9 +82,8 @@ CameraBinImageCapture::CameraBinImageCapture(CameraBinSession *session)
connect(m_session, SIGNAL(stateChanged(QCamera::State)), SLOT(updateState()));
connect(m_session, SIGNAL(imageExposed(int)), this, SIGNAL(imageExposed(int)));
connect(m_session, SIGNAL(imageCaptured(int,QImage)), this, SIGNAL(imageCaptured(int,QImage)));
connect(m_session, SIGNAL(busMessage(QGstreamerMessage)), SLOT(handleBusMessage(QGstreamerMessage)));
g_signal_connect(G_OBJECT(m_session->cameraBin()), IMAGE_DONE_SIGNAL, G_CALLBACK(handleImageSaved), this);
m_session->bus()->installMessageFilter(this);
}
CameraBinImageCapture::~CameraBinImageCapture()
@@ -127,39 +126,9 @@ void CameraBinImageCapture::updateState()
}
}
gboolean CameraBinImageCapture::handleImageSaved(GstElement *camera,
const gchar *filename,
CameraBinImageCapture *self)
{
#ifdef DEBUG_CAPTURE
qDebug() << "Image saved" << filename;
#endif
Q_UNUSED(camera);
if (self->m_session->captureDestinationControl()->captureDestination() & QCameraImageCapture::CaptureToFile) {
QMetaObject::invokeMethod(self, "imageSaved",
Qt::QueuedConnection,
Q_ARG(int, self->m_requestId),
Q_ARG(QString, QString::fromUtf8(filename)));
} else {
#ifdef DEBUG_CAPTURE
qDebug() << Q_FUNC_INFO << "Dropped saving file" << filename;
#endif
//camerabin creates an empty file when captured buffer is dropped,
//let's remove it
QFileInfo info(QString::fromUtf8(filename));
if (info.isFile() &&
info.filePath().startsWith("/home") &&
info.size() == 0) {
QFile(info.absoluteFilePath()).remove();
}
}
return true;
}
gboolean CameraBinImageCapture::metadataEventProbe(GstPad *pad, GstEvent *event, CameraBinImageCapture *self)
{
Q_UNUSED(pad);
if (GST_EVENT_TYPE(event) == GST_EVENT_TAG) {
GstTagList *gstTags;
@@ -281,7 +250,7 @@ gboolean CameraBinImageCapture::jpegBufferProbe(GstPad *pad, GstBuffer *buffer,
return destination & QCameraImageCapture::CaptureToFile;
}
void CameraBinImageCapture::handleBusMessage(const QGstreamerMessage &message)
bool CameraBinImageCapture::processBusMessage(const QGstreamerMessage &message)
{
//Install metadata event and buffer probes
@@ -298,7 +267,7 @@ void CameraBinImageCapture::handleBusMessage(const QGstreamerMessage &message)
if (newState == GST_STATE_READY) {
GstElement *element = GST_ELEMENT(GST_MESSAGE_SRC(gm));
if (!element)
return;
return false;
QString elementName = QString::fromLatin1(gst_element_get_name(element));
if (elementName.contains("jpegenc") && element != m_jpegEncoderElement) {
@@ -338,5 +307,33 @@ void CameraBinImageCapture::handleBusMessage(const QGstreamerMessage &message)
gst_object_unref(srcpad);
}
}
} else if (GST_MESSAGE_TYPE(gm) == GST_MESSAGE_ELEMENT) {
if (GST_MESSAGE_SRC(gm) == (GstObject *)m_session->cameraBin()) {
const GstStructure *structure = gst_message_get_structure(gm);
if (gst_structure_has_name (structure, "image-done")) {
const gchar *fileName = gst_structure_get_string (structure, "filename");
#ifdef DEBUG_CAPTURE
qDebug() << "Image saved" << fileName;
#endif
if (m_session->captureDestinationControl()->captureDestination() & QCameraImageCapture::CaptureToFile) {
emit imageSaved(m_requestId, QString::fromUtf8(fileName));
} else {
#ifdef DEBUG_CAPTURE
qDebug() << Q_FUNC_INFO << "Dropped saving file" << fileName;
#endif
//camerabin creates an empty file when captured buffer is dropped,
//let's remove it
QFileInfo info(QString::fromUtf8(fileName));
if (info.exists() && info.isFile() && info.size() == 0) {
QFile(info.absoluteFilePath()).remove();
}
}
}
}
}
return false;
}

View File

@@ -48,9 +48,10 @@
QT_USE_NAMESPACE
class CameraBinImageCapture : public QCameraImageCaptureControl
class CameraBinImageCapture : public QCameraImageCaptureControl, public QGstreamerBusMessageFilter
{
Q_OBJECT
Q_INTERFACES(QGstreamerBusMessageFilter)
public:
CameraBinImageCapture(CameraBinSession *session);
virtual ~CameraBinImageCapture();
@@ -62,15 +63,15 @@ public:
int capture(const QString &fileName);
void cancelCapture();
bool processBusMessage(const QGstreamerMessage &message);
private slots:
void updateState();
void handleBusMessage(const QGstreamerMessage &message);
private:
static gboolean metadataEventProbe(GstPad *pad, GstEvent *event, CameraBinImageCapture *);
static gboolean uncompressedBufferProbe(GstPad *pad, GstBuffer *buffer, CameraBinImageCapture *);
static gboolean jpegBufferProbe(GstPad *pad, GstBuffer *buffer, CameraBinImageCapture *);
static gboolean handleImageSaved(GstElement *camera, const gchar *filename, CameraBinImageCapture *);
CameraBinSession *m_session;
bool m_ready;

View File

@@ -45,6 +45,8 @@
#include "camerabincontainer.h"
#include <QtCore/QDebug>
#include <gst/pbutils/encoding-profile.h>
CameraBinRecorder::CameraBinRecorder(CameraBinSession *session)
:QMediaRecorderControl(session),
m_session(session),
@@ -92,10 +94,7 @@ qint64 CameraBinRecorder::duration() const
void CameraBinRecorder::record()
{
if (m_session->state() == QCamera::ActiveState) {
if (m_state == QMediaRecorder::PausedState)
m_session->resumeVideoRecording();
else
m_session->recordVideo();
m_session->recordVideo();
emit stateChanged(m_state = QMediaRecorder::RecordingState);
} else
emit error(QMediaRecorder::ResourceError, tr("Service has not been started"));
@@ -103,11 +102,7 @@ void CameraBinRecorder::record()
void CameraBinRecorder::pause()
{
if (m_session->state() == QCamera::ActiveState) {
m_session->pauseVideoRecording();
emit stateChanged(m_state = QMediaRecorder::PausedState);
} else
emit error(QMediaRecorder::ResourceError, tr("Service has not been started"));
emit error(QMediaRecorder::ResourceError, tr("QMediaRecorder::pause() is not supported by camerabin2."));
}
void CameraBinRecorder::stop()
@@ -118,100 +113,19 @@ void CameraBinRecorder::stop()
}
}
bool CameraBinRecorder::findCodecs()
{
//Check the codecs are compatible with container,
//and choose the compatible codecs/container if omitted
CameraBinAudioEncoder *audioEncodeControl = m_session->audioEncodeControl();
CameraBinVideoEncoder *videoEncodeControl = m_session->videoEncodeControl();
CameraBinContainer *mediaContainerControl = m_session->mediaContainerControl();
audioEncodeControl->resetActualSettings();
videoEncodeControl->resetActualSettings();
mediaContainerControl->resetActualContainer();
QStringList containerCandidates;
if (mediaContainerControl->containerMimeType().isEmpty())
containerCandidates = mediaContainerControl->supportedContainers();
else
containerCandidates << mediaContainerControl->containerMimeType();
QStringList audioCandidates;
QAudioEncoderSettings audioSettings = audioEncodeControl->audioSettings();
if (audioSettings.codec().isEmpty())
audioCandidates = audioEncodeControl->supportedAudioCodecs();
else
audioCandidates << audioSettings.codec();
QStringList videoCandidates;
QVideoEncoderSettings videoSettings = videoEncodeControl->videoSettings();
if (videoSettings.codec().isEmpty())
videoCandidates = videoEncodeControl->supportedVideoCodecs();
else
videoCandidates << videoSettings.codec();
QString container;
QString audioCodec;
QString videoCodec;
foreach (const QString &containerCandidate, containerCandidates) {
QSet<QString> supportedTypes = mediaContainerControl->supportedStreamTypes(containerCandidate);
audioCodec.clear();
videoCodec.clear();
bool found = false;
foreach (const QString &audioCandidate, audioCandidates) {
QSet<QString> audioTypes = audioEncodeControl->supportedStreamTypes(audioCandidate);
if (!audioTypes.intersect(supportedTypes).isEmpty()) {
found = true;
audioCodec = audioCandidate;
break;
}
}
if (!found)
continue;
found = false;
foreach (const QString &videoCandidate, videoCandidates) {
QSet<QString> videoTypes = videoEncodeControl->supportedStreamTypes(videoCandidate);
if (!videoTypes.intersect(supportedTypes).isEmpty()) {
found = true;
videoCodec = videoCandidate;
break;
}
}
if (!found)
continue;
container = containerCandidate;
break;
}
if (container.isEmpty()) {
qWarning() << "Camera error: Not compatible codecs and container format.";
emit error(QMediaRecorder::FormatError, tr("Not compatible codecs and container format."));
return false;
} else {
mediaContainerControl->setActualContainer(container);
QAudioEncoderSettings audioSettings = audioEncodeControl->audioSettings();
audioSettings.setCodec(audioCodec);
audioEncodeControl->setActualAudioSettings(audioSettings);
QVideoEncoderSettings videoSettings = videoEncodeControl->videoSettings();
videoSettings.setCodec(videoCodec);
videoEncodeControl->setActualVideoSettings(videoSettings);
}
return true;
}
void CameraBinRecorder::applySettings()
{
findCodecs();
GstEncodingContainerProfile *containerProfile = m_session->mediaContainerControl()->createProfile();
if (containerProfile) {
GstEncodingProfile *audioProfile = m_session->audioEncodeControl()->createProfile();
GstEncodingProfile *videoProfile = m_session->videoEncodeControl()->createProfile();
gst_encoding_container_profile_add_profile(containerProfile, audioProfile);
gst_encoding_container_profile_add_profile(containerProfile, videoProfile);
}
g_object_set (G_OBJECT(m_session->cameraBin()), "video-profile", containerProfile, NULL);
}
bool CameraBinRecorder::isMuted() const

View File

@@ -64,8 +64,6 @@ public:
bool isMuted() const;
bool findCodecs();
void applySettings();
public slots:

View File

@@ -160,17 +160,16 @@ QMediaControl *CameraBinService::requestControl(const char *name)
if (!m_videoOutput) {
if (qstrcmp(name, QVideoRendererControl_iid) == 0) {
m_videoOutput = m_videoRenderer;
m_captureSession->setViewfinder(m_videoRenderer);
} else if (qstrcmp(name, QVideoWindowControl_iid) == 0) {
m_videoOutput = m_videoWindow;
m_captureSession->setViewfinder(m_videoWindow);
} else if (qstrcmp(name, QVideoWidgetControl_iid) == 0) {
m_captureSession->setViewfinder(m_videoWidgetControl);
m_videoOutput = m_videoWidgetControl;
}
if (m_videoOutput)
if (m_videoOutput) {
m_captureSession->setViewfinder(m_videoOutput);
return m_videoOutput;
}
}
if (qstrcmp(name,QAudioEndpointSelector_iid) == 0)
@@ -238,7 +237,7 @@ void CameraBinService::releaseControl(QMediaControl *control)
bool CameraBinService::isCameraBinAvailable()
{
GstElementFactory *factory = gst_element_factory_find("camerabin");
GstElementFactory *factory = gst_element_factory_find("camerabin2");
if (factory) {
gst_object_unref(GST_OBJECT(factory));
return true;

View File

@@ -66,34 +66,30 @@
#include <QtGui/qimage.h>
//#define CAMERABIN_DEBUG 1
//#define CAMERABIN_DEBUG_DUMP_BIN 1
#define ENUM_NAME(c,e,v) (c::staticMetaObject.enumerator(c::staticMetaObject.indexOfEnumerator(e)).valueToKey((v)))
#define FILENAME_PROPERTY "filename"
#define FILENAME_PROPERTY "location"
#define MODE_PROPERTY "mode"
#define MUTE_PROPERTY "mute"
#define ZOOM_PROPERTY "zoom"
#define IMAGE_PP_PROPERTY "image-post-processing"
#define IMAGE_ENCODER_PROPERTY "image-encoder"
#define VIDEO_PP_PROPERTY "video-post-processing"
#define VIDEO_ENCODER_PROPERTY "video-encoder"
#define AUDIO_ENCODER_PROPERTY "audio-encoder"
#define VIDEO_MUXER_PROPERTY "video-muxer"
#define VIEWFINDER_SINK_PROPERTY "viewfinder-sink"
#define VIDEO_SOURCE_PROPERTY "video-source"
#define CAMERA_SOURCE_PROPERTY "camera-source"
#define AUDIO_SOURCE_PROPERTY "audio-source"
#define VIDEO_SOURCE_CAPS_PROPERTY "video-source-caps"
#define SUPPORTED_IMAGE_CAPTURE_CAPS_PROPERTY "image-capture-supported-caps"
#define SUPPORTED_VIDEO_CAPTURE_CAPS_PROPERTY "video-capture-supported-caps"
#define FILTER_CAPS_PROPERTY "filter-caps"
#define PREVIEW_CAPS_PROPERTY "preview-caps"
#define IMAGE_DONE_SIGNAL "image-done"
#define CAPTURE_START "capture-start"
#define CAPTURE_STOP "capture-stop"
#define CAPTURE_PAUSE "capture-pause"
#define CAPTURE_START "start-capture"
#define CAPTURE_STOP "stop-capture"
#define SET_VIDEO_RESOLUTION_FPS "set-video-resolution-fps"
#define SET_IMAGE_RESOLUTION "set-image-resolution"
#define CAMERABIN_IMAGE_MODE 0
#define CAMERABIN_VIDEO_MODE 1
#define CAMERABIN_IMAGE_MODE 1
#define CAMERABIN_VIDEO_MODE 2
#define gstRef(element) { gst_object_ref(GST_OBJECT(element)); gst_object_sink(GST_OBJECT(element)); }
#define gstUnref(element) { if (element) { gst_object_unref(GST_OBJECT(element)); element = 0; } }
@@ -106,16 +102,16 @@
#define VIEWFINDER_RESOLUTION_16x9 QSize(800, 450)
//using GST_STATE_READY for QCamera::LoadedState
//doesn't work reliably at least with some webcams.
#if defined(Q_WS_MAEMO_6)
#define USE_READY_STATE_ON_LOADED
#endif
//may not work reliably at least with some webcams.
//#define USE_READY_STATE_ON_LOADED
CameraBinSession::CameraBinSession(QObject *parent)
:QObject(parent),
m_recordingActive(false),
m_state(QCamera::UnloadedState),
m_pendingState(QCamera::UnloadedState),
m_recordingActive(false),
m_pendingResolutionUpdate(false),
m_muted(false),
m_busy(false),
@@ -124,12 +120,10 @@ CameraBinSession::CameraBinSession(QObject *parent)
m_videoInputFactory(0),
m_viewfinder(0),
m_viewfinderInterface(0),
m_pipeline(0),
m_videoSrc(0),
m_viewfinderElement(0),
m_viewfinderHasChanged(true),
m_videoInputHasChanged(true),
m_sourceCaps(0),
m_audioSrc(0),
m_audioConvert(0),
m_capsFilter(0),
@@ -137,16 +131,15 @@ CameraBinSession::CameraBinSession(QObject *parent)
m_audioEncoder(0),
m_muxer(0)
{
m_pipeline = gst_element_factory_make("camerabin", "camerabin");
g_signal_connect(G_OBJECT(m_pipeline), "notify::idle", G_CALLBACK(updateBusyStatus), this);
m_camerabin = gst_element_factory_make("camerabin2", "camerabin2");
g_signal_connect(G_OBJECT(m_camerabin), "notify::idle", G_CALLBACK(updateBusyStatus), this);
gstRef(m_camerabin);
gstRef(m_pipeline);
m_bus = gst_element_get_bus(m_pipeline);
m_bus = gst_element_get_bus(m_camerabin);
m_busHelper = new QGstreamerBusHelper(m_bus, this);
m_busHelper->installSyncEventFilter(this);
connect(m_busHelper, SIGNAL(message(QGstreamerMessage)), SLOT(handleBusMessage(QGstreamerMessage)));
m_busHelper->installMessageFilter(this);
m_audioEncodeControl = new CameraBinAudioEncoder(this);
m_videoEncodeControl = new CameraBinVideoEncoder(this);
m_imageEncodeControl = new CameraBinImageEncoder(this);
@@ -159,36 +152,40 @@ CameraBinSession::CameraBinSession(QObject *parent)
m_cameraLocksControl = new CameraBinLocks(this);
m_captureDestinationControl = new CameraBinCaptureDestination(this);
m_captureBufferFormatControl = new CameraBinCaptureBufferFormat(this);
//post image preview in RGB format
GstCaps *previewCaps = gst_caps_from_string("video/x-raw-rgb");
g_object_set(G_OBJECT(m_camerabin), PREVIEW_CAPS_PROPERTY, previewCaps, NULL);
gst_caps_unref(previewCaps);
}
CameraBinSession::~CameraBinSession()
{
if (m_pipeline) {
if (m_camerabin) {
if (m_viewfinderInterface)
m_viewfinderInterface->stopRenderer();
gst_element_set_state(m_pipeline, GST_STATE_NULL);
gst_element_get_state(m_pipeline, NULL, NULL, GST_CLOCK_TIME_NONE);
gstUnref(m_pipeline);
gst_element_set_state(m_camerabin, GST_STATE_NULL);
gst_element_get_state(m_camerabin, NULL, NULL, GST_CLOCK_TIME_NONE);
gstUnref(m_camerabin);
gstUnref(m_viewfinderElement);
}
}
GstPhotography *CameraBinSession::photography()
{
if (GST_IS_PHOTOGRAPHY(m_pipeline)) {
return GST_PHOTOGRAPHY(m_pipeline);
if (GST_IS_PHOTOGRAPHY(m_camerabin)) {
return GST_PHOTOGRAPHY(m_camerabin);
}
if (!m_videoSrc) {
m_videoSrc = buildVideoSrc();
m_videoSrc = buildCameraSource();
if (m_videoSrc)
g_object_set(m_pipeline, VIDEO_SOURCE_PROPERTY, m_videoSrc, NULL);
g_object_set(m_camerabin, CAMERA_SOURCE_PROPERTY, m_videoSrc, NULL);
else
g_object_get(m_pipeline, VIDEO_SOURCE_PROPERTY, &m_videoSrc, NULL);
g_object_get(m_camerabin, CAMERA_SOURCE_PROPERTY, &m_videoSrc, NULL);
updateVideoSourceCaps();
m_videoInputHasChanged = false;
}
@@ -203,33 +200,19 @@ CameraBinSession::CameraRole CameraBinSession::cameraRole() const
return BackCamera;
}
/*
Configure camera during Loaded->Active states stansition.
*/
bool CameraBinSession::setupCameraBin()
{
if (m_captureMode == QCamera::CaptureStillImage) {
g_object_set(m_pipeline, MODE_PROPERTY, CAMERABIN_IMAGE_MODE, NULL);
}
if (m_captureMode == QCamera::CaptureVideo) {
g_object_set(m_pipeline, MODE_PROPERTY, CAMERABIN_VIDEO_MODE, NULL);
if (!m_recorderControl->findCodecs())
return false;
g_object_set(m_pipeline, VIDEO_ENCODER_PROPERTY, m_videoEncodeControl->createEncoder(), NULL);
g_object_set(m_pipeline, AUDIO_ENCODER_PROPERTY, m_audioEncodeControl->createEncoder(), NULL);
g_object_set(m_pipeline, VIDEO_MUXER_PROPERTY,
gst_element_factory_make(m_mediaContainerControl->formatElementName().constData(), NULL), NULL);
}
if (m_videoInputHasChanged) {
m_videoSrc = buildVideoSrc();
m_videoSrc = buildCameraSource();
if (m_videoSrc)
g_object_set(m_pipeline, VIDEO_SOURCE_PROPERTY, m_videoSrc, NULL);
g_object_set(m_camerabin, CAMERA_SOURCE_PROPERTY, m_videoSrc, NULL);
else
g_object_get(m_pipeline, VIDEO_SOURCE_PROPERTY, &m_videoSrc, NULL);
g_object_get(m_camerabin, CAMERA_SOURCE_PROPERTY, &m_videoSrc, NULL);
updateVideoSourceCaps();
m_videoInputHasChanged = false;
}
@@ -248,25 +231,41 @@ bool CameraBinSession::setupCameraBin()
m_viewfinderElement = gst_element_factory_make("fakesink", NULL);
}
gst_object_ref(GST_OBJECT(m_viewfinderElement));
gst_element_set_state(m_pipeline, GST_STATE_NULL);
g_object_set(G_OBJECT(m_pipeline), VIEWFINDER_SINK_PROPERTY, m_viewfinderElement, NULL);
gst_element_set_state(m_camerabin, GST_STATE_NULL);
g_object_set(G_OBJECT(m_camerabin), VIEWFINDER_SINK_PROPERTY, m_viewfinderElement, NULL);
}
GstCaps *previewCaps = gst_caps_from_string(PREVIEW_CAPS_4_3);
g_object_set(G_OBJECT(m_pipeline), PREVIEW_CAPS_PROPERTY, previewCaps, NULL);
gst_caps_unref(previewCaps);
return true;
}
void CameraBinSession::updateVideoSourceCaps()
static GstCaps *resolutionToCaps(const QSize &resolution,
const QPair<int, int> &rate = qMakePair<int,int>(0,0))
{
if (m_sourceCaps) {
gst_caps_unref(m_sourceCaps);
m_sourceCaps = 0;
}
if (resolution.isEmpty())
return gst_caps_new_any();
g_object_get(G_OBJECT(m_pipeline), VIDEO_SOURCE_CAPS_PROPERTY, &m_sourceCaps, NULL);
GstCaps *caps = 0;
if (rate.second > 0) {
caps = gst_caps_new_full(gst_structure_new("video/x-raw-yuv",
"width", G_TYPE_INT, resolution.width(),
"height", G_TYPE_INT, resolution.height(),
"framerate", GST_TYPE_FRACTION, rate.first, rate.second,
NULL),
gst_structure_new("video/x-raw-rgb",
"width", G_TYPE_INT, resolution.width(),
"height", G_TYPE_INT, resolution.height(),
"framerate", GST_TYPE_FRACTION, rate.first, rate.second,
NULL), NULL);
} else {
caps = gst_caps_new_full (gst_structure_new ("video/x-raw-yuv",
"width", G_TYPE_INT, resolution.width(),
"height", G_TYPE_INT, resolution.height(),
NULL),
gst_structure_new ("video/x-raw-rgb",
"width", G_TYPE_INT, resolution.width(),
"height", G_TYPE_INT, resolution.height(), NULL), NULL);
}
return caps;
}
void CameraBinSession::setupCaptureResolution()
@@ -276,7 +275,6 @@ void CameraBinSession::setupCaptureResolution()
//by default select the maximum supported resolution
if (resolution.isEmpty()) {
updateVideoSourceCaps();
bool continuous = false;
QList<QSize> resolutions = supportedResolutions(qMakePair<int,int>(0,0),
&continuous,
@@ -285,17 +283,15 @@ void CameraBinSession::setupCaptureResolution()
resolution = resolutions.last();
}
QString previewCapsString = PREVIEW_CAPS_4_3;
QSize viewfinderResolution = VIEWFINDER_RESOLUTION_4x3;
if (!resolution.isEmpty()) {
GstCaps *caps = resolutionToCaps(resolution);
#if CAMERABIN_DEBUG
qDebug() << Q_FUNC_INFO << "set image resolution" << resolution;
qDebug() << Q_FUNC_INFO << "set image resolution" << resolution << gst_caps_to_string(caps);
#endif
g_signal_emit_by_name(G_OBJECT(m_pipeline), SET_IMAGE_RESOLUTION, resolution.width(), resolution.height(), NULL);
previewCapsString = QString("video/x-raw-rgb, width = (int) %1, height = (int) 480")
.arg(resolution.width()*480/resolution.height());
g_object_set(m_camerabin, "image-capture-caps", caps, NULL);
gst_caps_unref(caps);
if (!resolution.isEmpty()) {
qreal aspectRatio = qreal(resolution.width()) / resolution.height();
@@ -308,35 +304,25 @@ void CameraBinSession::setupCaptureResolution()
}
}
GstCaps *previewCaps = gst_caps_from_string(previewCapsString.toLatin1());
g_object_set(G_OBJECT(m_pipeline), PREVIEW_CAPS_PROPERTY, previewCaps, NULL);
gst_caps_unref(previewCaps);
//on low res cameras the viewfinder resolution should not be bigger
//then capture resolution
if (viewfinderResolution.width() > resolution.width())
if (viewfinderResolution.width() > resolution.width() && !resolution.isEmpty())
viewfinderResolution = resolution;
GstCaps *viewfinderCaps = resolutionToCaps(viewfinderResolution);
#if CAMERABIN_DEBUG
qDebug() << Q_FUNC_INFO << "set viewfinder resolution" << viewfinderResolution;
qDebug() << "Set viewfinder resolution" << viewfinderResolution <<gst_caps_to_string(viewfinderCaps);
#endif
g_signal_emit_by_name(G_OBJECT(m_pipeline),
SET_VIDEO_RESOLUTION_FPS,
viewfinderResolution.width(),
viewfinderResolution.height(),
0, // maximum framerate
1, // framerate denom
NULL);
g_object_set(m_camerabin, "viewfinder-caps", viewfinderCaps, NULL);
gst_caps_unref(viewfinderCaps);
}
if (m_captureMode == QCamera::CaptureVideo) {
QSize resolution = m_videoEncodeControl->videoSettings().resolution();
qreal framerate = m_videoEncodeControl->videoSettings().frameRate();
//qreal framerate = m_videoEncodeControl->videoSettings().frameRate();
if (resolution.isEmpty()) {
//select the hightest supported resolution
updateVideoSourceCaps();
bool continuous = false;
QList<QSize> resolutions = supportedResolutions(qMakePair<int,int>(0,0),
&continuous,
@@ -345,32 +331,27 @@ void CameraBinSession::setupCaptureResolution()
resolution = resolutions.last();
}
if (!resolution.isEmpty() || framerate > 0) {
GstCaps *caps = resolutionToCaps(resolution /*, framerate*/); //convert to rational
#if CAMERABIN_DEBUG
qDebug() << Q_FUNC_INFO << "set video resolution" << resolution;
qDebug() << Q_FUNC_INFO << "set video resolution" << resolution << gst_caps_to_string(caps);
#endif
g_signal_emit_by_name(G_OBJECT(m_pipeline),
SET_VIDEO_RESOLUTION_FPS,
resolution.width(),
resolution.height(),
0, //framerate nom == max rate
1, // framerate denom == max rate
NULL);
}
g_object_set(m_camerabin, "video-capture-caps", caps, NULL);
gst_caps_unref(caps);
}
}
GstElement *CameraBinSession::buildVideoSrc()
GstElement *CameraBinSession::buildCameraSource()
{
#if CAMERABIN_DEBUG
qDebug() << Q_FUNC_INFO;
#endif
GstElement *videoSrc = 0;
if (m_videoInputFactory) {
videoSrc = m_videoInputFactory->buildElement();
} else {
QList<QByteArray> candidates;
candidates << "subdevsrc"
<< "v4l2camsrc"
<< "v4l2src"
<< "autovideosrc";
candidates << "wrappercamerabinsrc";
QByteArray sourceElementName;
foreach(sourceElementName, candidates) {
@@ -389,7 +370,11 @@ GstElement *CameraBinSession::buildVideoSrc()
else
g_object_set(G_OBJECT(videoSrc), "camera-device", 0, NULL);
} else {
g_object_set(G_OBJECT(videoSrc), "device", m_inputDevice.toLocal8Bit().constData(), NULL);
if (g_object_class_find_property(G_OBJECT_GET_CLASS(videoSrc), "device"))
g_object_set(G_OBJECT(videoSrc),
"device",
m_inputDevice.toLocal8Bit().constData(),
NULL);
}
}
}
@@ -405,9 +390,13 @@ void CameraBinSession::captureImage(int requestId, const QString &fileName)
m_requestId = requestId;
g_object_set(G_OBJECT(m_pipeline), FILENAME_PROPERTY, actualFileName.toLocal8Bit().constData(), NULL);
#if CAMERABIN_DEBUG
qDebug() << Q_FUNC_INFO << m_requestId << fileName << "actual file name:" << actualFileName;
#endif
g_signal_emit_by_name(G_OBJECT(m_pipeline), CAPTURE_START, NULL);
g_object_set(G_OBJECT(m_camerabin), FILENAME_PROPERTY, actualFileName.toLocal8Bit().constData(), NULL);
g_signal_emit_by_name(G_OBJECT(m_camerabin), CAPTURE_START, NULL);
m_imageFileName = actualFileName;
}
@@ -418,10 +407,10 @@ void CameraBinSession::setCaptureMode(QCamera::CaptureMode mode)
switch (m_captureMode) {
case QCamera::CaptureStillImage:
g_object_set(m_pipeline, MODE_PROPERTY, CAMERABIN_IMAGE_MODE, NULL);
g_object_set(m_camerabin, MODE_PROPERTY, CAMERABIN_IMAGE_MODE, NULL);
break;
case QCamera::CaptureVideo:
g_object_set(m_pipeline, MODE_PROPERTY, CAMERABIN_VIDEO_MODE, NULL);
g_object_set(m_camerabin, MODE_PROPERTY, CAMERABIN_VIDEO_MODE, NULL);
break;
}
}
@@ -534,6 +523,8 @@ void CameraBinSession::setViewfinder(QObject *viewfinder)
this, SLOT(handleViewfinderChange()));
disconnect(m_viewfinder, SIGNAL(readyChanged(bool)),
this, SIGNAL(readyChanged(bool)));
m_busHelper->removeMessageFilter(m_viewfinder);
}
m_viewfinder = viewfinder;
@@ -544,6 +535,8 @@ void CameraBinSession::setViewfinder(QObject *viewfinder)
this, SLOT(handleViewfinderChange()));
connect(m_viewfinder, SIGNAL(readyChanged(bool)),
this, SIGNAL(readyChanged(bool)));
m_busHelper->installMessageFilter(m_viewfinder);
}
emit viewfinderChanged();
@@ -573,7 +566,7 @@ void CameraBinSession::setState(QCamera::State newState)
m_pendingState = newState;
#if CAMERABIN_DEBUG
qDebug() << Q_FUNC_INFO << ENUM_NAME(QCamera, "State", newState);
qDebug() << Q_FUNC_INFO << newState;
#endif
switch (newState) {
@@ -584,7 +577,7 @@ void CameraBinSession::setState(QCamera::State newState)
if (m_viewfinderInterface)
m_viewfinderInterface->stopRenderer();
gst_element_set_state(m_pipeline, GST_STATE_NULL);
gst_element_set_state(m_camerabin, GST_STATE_NULL);
m_state = newState;
if (m_busy)
emit busyChanged(m_busy = false);
@@ -599,19 +592,18 @@ void CameraBinSession::setState(QCamera::State newState)
if (m_viewfinderInterface)
m_viewfinderInterface->stopRenderer();
gst_element_set_state(m_pipeline, GST_STATE_NULL);
m_videoSrc = buildVideoSrc();
g_object_set(m_pipeline, VIDEO_SOURCE_PROPERTY, m_videoSrc, NULL);
updateVideoSourceCaps();
gst_element_set_state(m_camerabin, GST_STATE_NULL);
m_videoSrc = buildCameraSource();
g_object_set(m_camerabin, CAMERA_SOURCE_PROPERTY, m_videoSrc, NULL);
m_videoInputHasChanged = false;
}
#ifdef USE_READY_STATE_ON_LOADED
gst_element_set_state(m_pipeline, GST_STATE_READY);
gst_element_set_state(m_camerabin, GST_STATE_READY);
#else
m_state = QCamera::LoadedState;
if (m_viewfinderInterface)
m_viewfinderInterface->stopRenderer();
gst_element_set_state(m_pipeline, GST_STATE_NULL);
gst_element_set_state(m_camerabin, GST_STATE_NULL);
emit stateChanged(m_state);
#endif
break;
@@ -619,15 +611,15 @@ void CameraBinSession::setState(QCamera::State newState)
if (setupCameraBin()) {
GstState binState = GST_STATE_NULL;
GstState pending = GST_STATE_NULL;
gst_element_get_state(m_pipeline, &binState, &pending, 0);
gst_element_get_state(m_camerabin, &binState, &pending, 0);
if (pending == GST_STATE_VOID_PENDING && binState == GST_STATE_READY) {
m_pendingResolutionUpdate = false;
setupCaptureResolution();
gst_element_set_state(m_pipeline, GST_STATE_PLAYING);
gst_element_set_state(m_camerabin, GST_STATE_PLAYING);
} else {
m_pendingResolutionUpdate = true;
gst_element_set_state(m_pipeline, GST_STATE_READY);
gst_element_set_state(m_camerabin, GST_STATE_READY);
}
}
}
@@ -660,7 +652,7 @@ qint64 CameraBinSession::duration() const
GstFormat format = GST_FORMAT_TIME;
gint64 duration = 0;
if ( m_pipeline && gst_element_query_position(m_pipeline, &format, &duration))
if ( m_camerabin && gst_element_query_position(m_camerabin, &format, &duration))
return duration / 1000000;
else
return 0;
@@ -676,8 +668,8 @@ void CameraBinSession::setMuted(bool muted)
if (m_muted != muted) {
m_muted = muted;
if (m_pipeline)
g_object_set(G_OBJECT(m_pipeline), MUTE_PROPERTY, m_muted, NULL);
if (m_camerabin)
g_object_set(G_OBJECT(m_camerabin), MUTE_PROPERTY, m_muted, NULL);
emit mutedChanged(m_muted);
}
}
@@ -691,8 +683,8 @@ void CameraBinSession::setMetaData(const QMap<QByteArray, QVariant> &data)
{
m_metaData = data;
if (m_pipeline) {
GstIterator *elements = gst_bin_iterate_all_by_interface(GST_BIN(m_pipeline), GST_TYPE_TAG_SETTER);
if (m_camerabin) {
GstIterator *elements = gst_bin_iterate_all_by_interface(GST_BIN(m_camerabin), GST_TYPE_TAG_SETTER);
GstElement *element = 0;
while (gst_iterator_next(elements, (void**)&element) == GST_ITERATOR_OK) {
QMapIterator<QByteArray, QVariant> it(data);
@@ -743,6 +735,7 @@ bool CameraBinSession::processSyncMessage(const QGstreamerMessage &message)
if (m_captureMode == QCamera::CaptureStillImage &&
gst_structure_has_name(gm->structure, "preview-image")) {
st = gst_message_get_structure(gm);
if (gst_structure_has_field_typed(st, "buffer", GST_TYPE_BUFFER)) {
image = gst_structure_get_value(st, "buffer");
if (image) {
@@ -755,6 +748,9 @@ bool CameraBinSession::processSyncMessage(const QGstreamerMessage &message)
GstStructure *structure = gst_caps_get_structure(caps, 0);
gint width = 0;
gint height = 0;
#if CAMERABIN_DEBUG
qDebug() << "Preview caps:" << gst_structure_to_string(structure);
#endif
if (structure &&
gst_structure_get_int(structure, "width", &width) &&
@@ -795,24 +791,14 @@ bool CameraBinSession::processSyncMessage(const QGstreamerMessage &message)
}
}
if (gst_structure_has_name(gm->structure, "prepare-xwindow-id")) {
if (m_viewfinderInterface)
m_viewfinderInterface->precessNewStream();
return true;
}
if (gst_structure_has_name(gm->structure, GST_PHOTOGRAPHY_AUTOFOCUS_DONE))
m_cameraFocusControl->handleFocusMessage(gm);
if (m_viewfinderInterface && GST_MESSAGE_SRC(gm) == GST_OBJECT_CAST(m_viewfinderElement))
m_viewfinderInterface->handleSyncMessage(gm);
}
return false;
}
void CameraBinSession::handleBusMessage(const QGstreamerMessage &message)
bool CameraBinSession::processBusMessage(const QGstreamerMessage &message)
{
GstMessage* gm = message.rawMessage();
@@ -830,13 +816,20 @@ void CameraBinSession::handleBusMessage(const QGstreamerMessage &message)
}
//only report error messager from camerabin
if (GST_MESSAGE_SRC(gm) == GST_OBJECT_CAST(m_pipeline)) {
if (GST_MESSAGE_SRC(gm) == GST_OBJECT_CAST(m_camerabin)) {
if (message.isEmpty())
message = tr("Camera error");
emit error(int(QMediaRecorder::ResourceError), message);
}
#ifdef CAMERABIN_DEBUG_DUMP_BIN
_gst_debug_bin_to_dot_file_with_ts(GST_BIN(m_camerabin),
GstDebugGraphDetails(GST_DEBUG_GRAPH_SHOW_ALL /* GST_DEBUG_GRAPH_SHOW_MEDIA_TYPE | GST_DEBUG_GRAPH_SHOW_NON_DEFAULT_PARAMS | GST_DEBUG_GRAPH_SHOW_STATES*/),
"camerabin_error");
#endif
if (err)
g_error_free (err);
@@ -858,7 +851,7 @@ void CameraBinSession::handleBusMessage(const QGstreamerMessage &message)
g_free (debug);
}
if (GST_MESSAGE_SRC(gm) == GST_OBJECT_CAST(m_pipeline)) {
if (GST_MESSAGE_SRC(gm) == GST_OBJECT_CAST(m_camerabin)) {
switch (GST_MESSAGE_TYPE(gm)) {
case GST_MESSAGE_DURATION:
break;
@@ -884,6 +877,12 @@ void CameraBinSession::handleBusMessage(const QGstreamerMessage &message)
.arg(states[pending]);
#endif
#ifdef CAMERABIN_DEBUG_DUMP_BIN
_gst_debug_bin_to_dot_file_with_ts(GST_BIN(m_camerabin),
GstDebugGraphDetails(GST_DEBUG_GRAPH_SHOW_ALL /*GST_DEBUG_GRAPH_SHOW_MEDIA_TYPE | GST_DEBUG_GRAPH_SHOW_NON_DEFAULT_PARAMS | GST_DEBUG_GRAPH_SHOW_STATES*/),
"camerabin");
#endif
switch (newState) {
case GST_STATE_VOID_PENDING:
case GST_STATE_NULL:
@@ -894,7 +893,7 @@ void CameraBinSession::handleBusMessage(const QGstreamerMessage &message)
if (m_pendingResolutionUpdate) {
m_pendingResolutionUpdate = false;
setupCaptureResolution();
gst_element_set_state(m_pipeline, GST_STATE_PLAYING);
gst_element_set_state(m_camerabin, GST_STATE_PLAYING);
}
if (m_state != QCamera::LoadedState)
emit stateChanged(m_state = QCamera::LoadedState);
@@ -911,12 +910,9 @@ void CameraBinSession::handleBusMessage(const QGstreamerMessage &message)
}
//qDebug() << "New session state:" << ENUM_NAME(CameraBinSession,"State",m_state);
}
if (m_viewfinderInterface && GST_MESSAGE_SRC(gm) == GST_OBJECT_CAST(m_viewfinderElement))
m_viewfinderInterface->handleBusMessage(gm);
emit busMessage(message);
}
return false;
}
void CameraBinSession::recordVideo()
@@ -924,31 +920,19 @@ void CameraBinSession::recordVideo()
m_recordingActive = true;
m_actualSink = m_sink;
if (m_actualSink.isEmpty()) {
QString ext = m_mediaContainerControl->containerMimeType();
QString ext = m_mediaContainerControl->suggestedFileExtension();
m_actualSink = generateFileName("clip_", defaultDir(QCamera::CaptureVideo), ext);
}
g_object_set(G_OBJECT(m_pipeline), FILENAME_PROPERTY, m_actualSink.toEncoded().constData(), NULL);
g_object_set(G_OBJECT(m_camerabin), FILENAME_PROPERTY, m_actualSink.toEncoded().constData(), NULL);
g_signal_emit_by_name(G_OBJECT(m_pipeline), CAPTURE_START, NULL);
}
void CameraBinSession::resumeVideoRecording()
{
m_recordingActive = true;
g_signal_emit_by_name(G_OBJECT(m_pipeline), CAPTURE_START, NULL);
}
void CameraBinSession::pauseVideoRecording()
{
g_signal_emit_by_name(G_OBJECT(m_pipeline), CAPTURE_PAUSE, NULL);
g_signal_emit_by_name(G_OBJECT(m_camerabin), CAPTURE_START, NULL);
}
void CameraBinSession::stopVideoRecording()
{
m_recordingActive = false;
g_signal_emit_by_name(G_OBJECT(m_pipeline), CAPTURE_STOP, NULL);
g_signal_emit_by_name(G_OBJECT(m_camerabin), CAPTURE_STOP, NULL);
}
//internal, only used by CameraBinSession::supportedFrameRates.
@@ -985,13 +969,18 @@ QList< QPair<int,int> > CameraBinSession::supportedFrameRates(const QSize &frame
{
QList< QPair<int,int> > res;
if (!m_sourceCaps)
GstCaps *supportedCaps = 0;
g_object_get(G_OBJECT(m_camerabin),
SUPPORTED_VIDEO_CAPTURE_CAPS_PROPERTY,
&supportedCaps, NULL);
if (!supportedCaps)
return res;
GstCaps *caps = 0;
if (frameSize.isEmpty()) {
caps = gst_caps_copy(m_sourceCaps);
caps = gst_caps_copy(supportedCaps);
} else {
GstCaps *filter = gst_caps_new_full(
gst_structure_new(
@@ -1008,12 +997,13 @@ QList< QPair<int,int> > CameraBinSession::supportedFrameRates(const QSize &frame
"height" , G_TYPE_INT, frameSize.height(), NULL),
NULL);
caps = gst_caps_intersect(m_sourceCaps, filter);
caps = gst_caps_intersect(supportedCaps, filter);
gst_caps_unref(filter);
}
gst_caps_unref(supportedCaps);
//simplify to the list of rates only:
gst_caps_make_writable(caps);
caps = gst_caps_make_writable(caps);
for (uint i=0; i<gst_caps_get_size(caps); i++) {
GstStructure *structure = gst_caps_get_structure(caps, i);
gst_structure_set_name(structure, "video/x-raw-yuv");
@@ -1090,18 +1080,24 @@ QList<QSize> CameraBinSession::supportedResolutions(QPair<int,int> rate,
if (continuous)
*continuous = false;
if (!m_sourceCaps)
GstCaps *supportedCaps = 0;
g_object_get(G_OBJECT(m_camerabin),
(mode == QCamera::CaptureStillImage) ?
SUPPORTED_IMAGE_CAPTURE_CAPS_PROPERTY : SUPPORTED_VIDEO_CAPTURE_CAPS_PROPERTY,
&supportedCaps, NULL);
if (!supportedCaps)
return res;
#if CAMERABIN_DEBUG
qDebug() << "Source caps:" << gst_caps_to_string(m_sourceCaps);
qDebug() << "Source caps:" << gst_caps_to_string(supportedCaps);
#endif
GstCaps *caps = 0;
bool isContinuous = false;
if (rate.first <= 0 || rate.second <= 0) {
caps = gst_caps_copy(m_sourceCaps);
caps = gst_caps_copy(supportedCaps);
} else {
GstCaps *filter = gst_caps_new_full(
gst_structure_new(
@@ -1115,12 +1111,13 @@ QList<QSize> CameraBinSession::supportedResolutions(QPair<int,int> rate,
"framerate" , GST_TYPE_FRACTION , rate.first, rate.second, NULL),
NULL);
caps = gst_caps_intersect(m_sourceCaps, filter);
caps = gst_caps_intersect(supportedCaps, filter);
gst_caps_unref(filter);
}
gst_caps_unref(supportedCaps);
//simplify to the list of resolutions only:
gst_caps_make_writable(caps);
caps = gst_caps_make_writable(caps);
for (uint i=0; i<gst_caps_get_size(caps); i++) {
GstStructure *structure = gst_caps_get_structure(caps, i);
gst_structure_set_name(structure, "video/x-raw-yuv");
@@ -1184,16 +1181,6 @@ QList<QSize> CameraBinSession::supportedResolutions(QPair<int,int> rate,
<< QSize(2580, 1936);
QSize minSize = res.first();
QSize maxSize = res.last();
#if defined(Q_WS_MAEMO_6)
if (cameraRole() == FrontCamera && maxSize.width() > 640)
maxSize = QSize(640, 480);
else if (mode == QCamera::CaptureVideo && maxSize.width() > 1280)
maxSize = QSize(1280, 720);
#else
Q_UNUSED(mode);
#endif
res.clear();
foreach (const QSize &candidate, commonSizes) {

View File

@@ -76,10 +76,13 @@ public:
virtual GstElement *buildElement() = 0;
};
class CameraBinSession : public QObject, public QGstreamerSyncEventFilter
class CameraBinSession : public QObject,
public QGstreamerBusMessageFilter,
public QGstreamerSyncMessageFilter
{
Q_OBJECT
Q_PROPERTY(qint64 duration READ duration NOTIFY durationChanged)
Q_INTERFACES(QGstreamerBusMessageFilter QGstreamerSyncMessageFilter)
public:
enum CameraRole {
FrontCamera, // Secondary camera
@@ -90,7 +93,8 @@ public:
~CameraBinSession();
GstPhotography *photography();
GstElement *cameraBin() { return m_pipeline; }
GstElement *cameraBin() { return m_camerabin; }
QGstreamerBusHelper *bus() { return m_busHelper; }
CameraRole cameraRole() const;
@@ -139,13 +143,12 @@ public:
qint64 duration() const;
void recordVideo();
void pauseVideoRecording();
void resumeVideoRecording();
void stopVideoRecording();
bool isMuted() const;
bool processSyncMessage(const QGstreamerMessage &message);
bool processBusMessage(const QGstreamerMessage &message);
signals:
void stateChanged(QCamera::State state);
@@ -157,7 +160,6 @@ signals:
void viewfinderChanged();
void readyChanged(bool);
void busyChanged(bool);
void busMessage(const QGstreamerMessage &message);
public slots:
void setDevice(const QString &device);
@@ -167,14 +169,12 @@ public slots:
void setMuted(bool);
private slots:
void handleBusMessage(const QGstreamerMessage &message);
void handleViewfinderChange();
private:
bool setupCameraBin();
void setupCaptureResolution();
void updateVideoSourceCaps();
GstElement *buildVideoSrc();
GstElement *buildCameraSource();
static void updateBusyStatus(GObject *o, GParamSpec *p, gpointer d);
QUrl m_sink;
@@ -211,14 +211,12 @@ private:
QGstreamerBusHelper *m_busHelper;
GstBus* m_bus;
GstElement *m_pipeline;
GstElement *m_camerabin;
GstElement *m_videoSrc;
GstElement *m_viewfinderElement;
bool m_viewfinderHasChanged;
bool m_videoInputHasChanged;
GstCaps *m_sourceCaps;
GstElement *m_audioSrc;
GstElement *m_audioConvert;
GstElement *m_capsFilter;

View File

@@ -46,58 +46,10 @@
#include <QtCore/qdebug.h>
CameraBinVideoEncoder::CameraBinVideoEncoder(CameraBinSession *session)
:QVideoEncoderControl(session), m_session(session)
:QVideoEncoderControl(session),
m_session(session),
m_codecs(QGstCodecsInfo::VideoEncoder)
{
QList<QByteArray> codecCandidates;
#if defined(Q_WS_MAEMO_6)
codecCandidates << "video/mpeg4" << "video/h264" << "video/h263";
m_elementNames["video/h264"] = "dsph264enc";
m_elementNames["video/mpeg4"] = "dsphdmp4venc";
m_elementNames["video/h263"] = "dsph263enc";
QStringList options = QStringList() << "mode" << "keyframe-interval" << "max-bitrate" << "intra-refresh";
m_codecOptions["video/h264"] = options;
m_codecOptions["video/mpeg4"] = options;
m_codecOptions["video/h263"] = options;
#else
codecCandidates << "video/h264" << "video/xvid" << "video/mpeg4"
<< "video/mpeg1" << "video/mpeg2" << "video/theora"
<< "video/VP8" << "video/h261" << "video/mjpeg";
m_elementNames["video/h264"] = "x264enc";
m_elementNames["video/xvid"] = "xvidenc";
m_elementNames["video/mpeg4"] = "ffenc_mpeg4";
m_elementNames["video/mpeg1"] = "ffenc_mpeg1video";
m_elementNames["video/mpeg2"] = "ffenc_mpeg2video";
m_elementNames["video/theora"] = "theoraenc";
m_elementNames["video/mjpeg"] = "ffenc_mjpeg";
m_elementNames["video/VP8"] = "vp8enc";
m_elementNames["video/h261"] = "ffenc_h261";
m_codecOptions["video/h264"] = QStringList() << "quantizer";
m_codecOptions["video/xvid"] = QStringList() << "quantizer" << "profile";
m_codecOptions["video/mpeg4"] = QStringList() << "quantizer";
m_codecOptions["video/mpeg1"] = QStringList() << "quantizer";
m_codecOptions["video/mpeg2"] = QStringList() << "quantizer";
m_codecOptions["video/theora"] = QStringList();
#endif
foreach( const QByteArray& codecName, codecCandidates ) {
QByteArray elementName = m_elementNames[codecName];
GstElementFactory *factory = gst_element_factory_find(elementName.constData());
if (factory) {
m_codecs.append(codecName);
const gchar *descr = gst_element_factory_get_description(factory);
m_codecDescriptions.insert(codecName, QString::fromUtf8(descr));
m_streamTypes.insert(codecName,
CameraBinContainer::supportedStreamTypes(factory, GST_PAD_SRC));
gst_object_unref(GST_OBJECT(factory));
}
}
}
CameraBinVideoEncoder::~CameraBinVideoEncoder()
@@ -134,12 +86,12 @@ QList< qreal > CameraBinVideoEncoder::supportedFrameRates(const QVideoEncoderSet
QStringList CameraBinVideoEncoder::supportedVideoCodecs() const
{
return m_codecs;
return m_codecs.supportedCodecs();
}
QString CameraBinVideoEncoder::videoCodecDescription(const QString &codecName) const
{
return m_codecDescriptions.value(codecName);
return m_codecs.codecDescription(codecName);
}
QStringList CameraBinVideoEncoder::supportedEncodingOptions(const QString &codec) const
@@ -180,118 +132,6 @@ void CameraBinVideoEncoder::resetActualSettings()
m_videoSettings = m_userSettings;
}
GstElement *CameraBinVideoEncoder::createEncoder()
{
QString codec = m_videoSettings.codec();
QByteArray elementName = m_elementNames.value(codec);
GstElement *encoderElement = gst_element_factory_make( elementName.constData(), "video-encoder");
if (encoderElement) {
if (m_videoSettings.encodingMode() == QtMultimediaKit::ConstantQualityEncoding) {
QtMultimediaKit::EncodingQuality qualityValue = m_videoSettings.quality();
if (elementName == "x264enc") {
//constant quantizer mode
g_object_set(G_OBJECT(encoderElement), "pass", 4, NULL);
int qualityTable[] = {
50, //VeryLow
35, //Low
21, //Normal
15, //High
8 //VeryHigh
};
g_object_set(G_OBJECT(encoderElement), "quantizer", qualityTable[qualityValue], NULL);
} else if (elementName == "xvidenc") {
//constant quantizer mode
g_object_set(G_OBJECT(encoderElement), "pass", 3, NULL);
int qualityTable[] = {
32, //VeryLow
12, //Low
5, //Normal
3, //High
2 //VeryHigh
};
int quant = qualityTable[qualityValue];
g_object_set(G_OBJECT(encoderElement), "quantizer", quant, NULL);
} else if (elementName == "ffenc_mpeg4" ||
elementName == "ffenc_mpeg1video" ||
elementName == "ffenc_mpeg2video" ) {
//constant quantizer mode
g_object_set(G_OBJECT(encoderElement), "pass", 2, NULL);
//quant from 1 to 30, default ~3
double qualityTable[] = {
20, //VeryLow
8.0, //Low
3.0, //Normal
2.5, //High
2.0 //VeryHigh
};
double quant = qualityTable[qualityValue];
g_object_set(G_OBJECT(encoderElement), "quantizer", quant, NULL);
} else if (elementName == "theoraenc") {
int qualityTable[] = {
8, //VeryLow
16, //Low
32, //Normal
45, //High
60 //VeryHigh
};
//quality from 0 to 63
int quality = qualityTable[qualityValue];
g_object_set(G_OBJECT(encoderElement), "quality", quality, NULL);
} else if (elementName == "dsph264enc" ||
elementName == "dspmp4venc" ||
elementName == "dsphdmp4venc" ||
elementName == "dsph263enc") {
//only bitrate parameter is supported
int qualityTable[] = {
1000000, //VeryLow
2000000, //Low
4000000, //Normal
8000000, //High
16000000 //VeryHigh
};
int bitrate = qualityTable[qualityValue];
g_object_set(G_OBJECT(encoderElement), "bitrate", bitrate, NULL);
}
} else {
int bitrate = m_videoSettings.bitRate();
if (bitrate > 0) {
g_object_set(G_OBJECT(encoderElement), "bitrate", bitrate, NULL);
}
}
QMap<QString,QVariant> options = m_options.value(codec);
QMapIterator<QString,QVariant> it(options);
while (it.hasNext()) {
it.next();
QString option = it.key();
QVariant value = it.value();
switch (value.type()) {
case QVariant::Int:
g_object_set(G_OBJECT(encoderElement), option.toAscii(), value.toInt(), NULL);
break;
case QVariant::Bool:
g_object_set(G_OBJECT(encoderElement), option.toAscii(), value.toBool(), NULL);
break;
case QVariant::Double:
g_object_set(G_OBJECT(encoderElement), option.toAscii(), value.toDouble(), NULL);
break;
case QVariant::String:
g_object_set(G_OBJECT(encoderElement), option.toAscii(), value.toString().toUtf8().constData(), NULL);
break;
default:
qWarning() << "unsupported option type:" << option << value;
break;
}
}
}
return encoderElement;
}
QPair<int,int> CameraBinVideoEncoder::rateAsRational(qreal frameRate) const
{
@@ -324,8 +164,19 @@ QPair<int,int> CameraBinVideoEncoder::rateAsRational(qreal frameRate) const
return QPair<int,int>();
}
QSet<QString> CameraBinVideoEncoder::supportedStreamTypes(const QString &codecName) const
GstEncodingProfile *CameraBinVideoEncoder::createProfile()
{
return m_streamTypes.value(codecName);
QString codec = m_videoSettings.codec();
GstCaps *caps;
if (codec.isEmpty())
caps = gst_caps_new_any();
else
caps = gst_caps_from_string(codec.toLatin1());
return (GstEncodingProfile *)gst_encoding_video_profile_new(
caps,
NULL, //preset
NULL, //restriction
0); //presence
}

View File

@@ -50,6 +50,9 @@ class CameraBinSession;
#include <QtCore/qset.h>
#include <gst/gst.h>
#include <gst/pbutils/pbutils.h>
#include <gst/pbutils/encoding-profile.h>
#include "qgstcodecsinfo.h"
QT_USE_NAMESPACE
@@ -78,29 +81,23 @@ public:
QVariant encodingOption(const QString &codec, const QString &name) const;
void setEncodingOption(const QString &codec, const QString &name, const QVariant &value);
GstElement *createEncoder();
QSet<QString> supportedStreamTypes(const QString &codecName) const;
void setActualVideoSettings(const QVideoEncoderSettings&);
void resetActualSettings();
GstEncodingProfile *createProfile();
Q_SIGNALS:
void settingsChanged();
private:
CameraBinSession *m_session;
QStringList m_codecs;
QMap<QString,QString> m_codecDescriptions;
QMap<QString,QByteArray> m_elementNames;
QGstCodecsInfo m_codecs;
QMap<QString,QStringList> m_codecOptions;
QMap<QString, QMap<QString, QVariant> > m_options;
QVideoEncoderSettings m_videoSettings; // backend selected settings, using m_userSettings
QVideoEncoderSettings m_userSettings;
QMap<QString, QMap<QString, QVariant> > m_options;
QMap<QString, QSet<QString> > m_streamTypes;
};
#endif

View File

@@ -21,7 +21,8 @@ PKGCONFIG += \
gstreamer-base-0.10 \
gstreamer-interfaces-0.10 \
gstreamer-audio-0.10 \
gstreamer-video-0.10
gstreamer-video-0.10 \
gstreamer-pbutils-0.10
maemo*:PKGCONFIG +=gstreamer-plugins-bad-0.10
contains(config_test_gstreamer_appsrc, yes): PKGCONFIG += gstreamer-app-0.10
@@ -53,6 +54,7 @@ HEADERS += \
qgstreamervideoinputdevicecontrol.h \
gstvideoconnector.h \
qabstractgstbufferpool.h \
qgstcodecsinfo.h \
qgstutils.h
SOURCES += \
@@ -65,6 +67,7 @@ SOURCES += \
qgstvideobuffer.cpp \
qvideosurfacegstsink.cpp \
qgstreamervideoinputdevicecontrol.cpp \
qgstcodecsinfo.cpp \
gstvideoconnector.c \
qgstutils.cpp

View File

@@ -160,17 +160,16 @@ QMediaControl *QGstreamerCaptureService::requestControl(const char *name)
if (!m_videoOutput) {
if (qstrcmp(name, QVideoRendererControl_iid) == 0) {
m_videoOutput = m_videoRenderer;
m_captureSession->setVideoPreview(m_videoRenderer);
} else if (qstrcmp(name, QVideoWindowControl_iid) == 0) {
m_videoOutput = m_videoWindow;
m_captureSession->setVideoPreview(m_videoWindow);
} else if (qstrcmp(name, QVideoWidgetControl_iid) == 0) {
m_captureSession->setVideoPreview(m_videoWidgetControl);
m_videoOutput = m_videoWidgetControl;
}
if (m_videoOutput)
if (m_videoOutput) {
m_captureSession->setVideoPreview(m_videoOutput);
return m_videoOutput;
}
}
return 0;

View File

@@ -96,8 +96,8 @@ QGstreamerCaptureSession::QGstreamerCaptureSession(QGstreamerCaptureSession::Cap
m_bus = gst_element_get_bus(m_pipeline);
m_busHelper = new QGstreamerBusHelper(m_bus, this);
m_busHelper->installSyncEventFilter(this);
connect(m_busHelper, SIGNAL(message(QGstreamerMessage)), SLOT(busMessage(QGstreamerMessage)));
m_busHelper->installMessageFilter(this);
m_audioEncodeControl = new QGstreamerAudioEncode(this);
m_videoEncodeControl = new QGstreamerVideoEncode(this);
m_imageEncodeControl = new QGstreamerImageEncode(this);
@@ -735,6 +735,8 @@ void QGstreamerCaptureSession::setVideoPreview(QObject *viewfinder)
this, SIGNAL(viewfinderChanged()));
disconnect(m_viewfinder, SIGNAL(readyChanged(bool)),
this, SIGNAL(readyChanged(bool)));
m_busHelper->removeMessageFilter(m_viewfinder);
}
m_viewfinder = viewfinder;
@@ -745,6 +747,8 @@ void QGstreamerCaptureSession::setVideoPreview(QObject *viewfinder)
this, SIGNAL(viewfinderChanged()));
connect(m_viewfinder, SIGNAL(readyChanged(bool)),
this, SIGNAL(readyChanged(bool)));
m_busHelper->installMessageFilter(m_viewfinder);
}
emit viewfinderChanged();
@@ -917,29 +921,7 @@ void QGstreamerCaptureSession::setMetaData(const QMap<QByteArray, QVariant> &dat
}
}
bool QGstreamerCaptureSession::processSyncMessage(const QGstreamerMessage &message)
{
GstMessage* gm = message.rawMessage();
if (gm && GST_MESSAGE_TYPE(gm) == GST_MESSAGE_ELEMENT) {
if (GST_MESSAGE_SRC(gm) == GST_OBJECT_CAST(m_videoPreview))
m_viewfinderInterface->handleSyncMessage(gm);
if (gst_structure_has_name(gm->structure, "prepare-xwindow-id")) {
if (m_audioPreviewFactory)
m_audioPreviewFactory->prepareWinId();
if (m_viewfinderInterface)
m_viewfinderInterface->precessNewStream();
return true;
}
}
return false;
}
void QGstreamerCaptureSession::busMessage(const QGstreamerMessage &message)
bool QGstreamerCaptureSession::processBusMessage(const QGstreamerMessage &message)
{
GstMessage* gm = message.rawMessage();
@@ -1027,11 +1009,8 @@ void QGstreamerCaptureSession::busMessage(const QGstreamerMessage &message)
}
//qDebug() << "New session state:" << ENUM_NAME(QGstreamerCaptureSession,"State",m_state);
}
if (m_videoPreview && m_viewfinderInterface &&
GST_MESSAGE_SRC(gm) == GST_OBJECT_CAST(m_videoPreview))
m_viewfinderInterface->handleBusMessage(gm);
}
return false;
}
void QGstreamerCaptureSession::setMuted(bool muted)

View File

@@ -76,12 +76,13 @@ public:
virtual QList<QSize> supportedResolutions(qreal frameRate = -1) const = 0;
};
class QGstreamerCaptureSession : public QObject, public QGstreamerSyncEventFilter
class QGstreamerCaptureSession : public QObject, public QGstreamerBusMessageFilter
{
Q_OBJECT
Q_PROPERTY(qint64 duration READ duration NOTIFY durationChanged)
Q_ENUMS(State)
Q_ENUMS(CaptureMode)
Q_INTERFACES(QGstreamerBusMessageFilter)
public:
enum CaptureMode { Audio = 1, Video = 2, Image=4, AudioAndVideo = Audio | Video };
enum State { StoppedState, PreviewState, PausedState, RecordingState };
@@ -89,6 +90,8 @@ public:
QGstreamerCaptureSession(CaptureMode captureMode, QObject *parent);
~QGstreamerCaptureSession();
QGstreamerBusHelper *bus() { return m_busHelper; }
CaptureMode captureMode() const { return m_captureMode; }
void setCaptureMode(CaptureMode);
@@ -122,7 +125,7 @@ public:
bool isReady() const;
bool processSyncMessage(const QGstreamerMessage &message);
bool processBusMessage(const QGstreamerMessage &message);
signals:
void stateChanged(QGstreamerCaptureSession::State state);
@@ -144,9 +147,6 @@ public slots:
void setMetaData(const QMap<QByteArray, QVariant>&);
void setMuted(bool);
private slots:
void busMessage(const QGstreamerMessage &message);
private:
enum PipelineMode { EmptyPipeline, PreviewPipeline, RecordingPipeline, PreviewAndRecordingPipeline };

View File

@@ -155,8 +155,7 @@ QGstreamerPlayerSession::QGstreamerPlayerSession(QObject *parent)
// Sort out messages
m_bus = gst_element_get_bus(m_playbin);
m_busHelper = new QGstreamerBusHelper(m_bus, this);
connect(m_busHelper, SIGNAL(message(QGstreamerMessage)), SLOT(busMessage(QGstreamerMessage)));
m_busHelper->installSyncEventFilter(this);
m_busHelper->installMessageFilter(this);
g_object_set(G_OBJECT(m_playbin), "video-sink", m_videoOutputBin, NULL);
@@ -188,6 +187,11 @@ QGstreamerPlayerSession::~QGstreamerPlayerSession()
}
}
GstElement *QGstreamerPlayerSession::playbin() const
{
return m_playbin;
}
#if defined(HAVE_GST_APPSRC)
void QGstreamerPlayerSession::configureAppSrcElement(GObject* object, GObject *orig, GParamSpec *pspec, QGstreamerPlayerSession* self)
{
@@ -444,16 +448,20 @@ void QGstreamerPlayerSession::setVideoRenderer(QObject *videoOutput)
this, SLOT(updateVideoRenderer()));
disconnect(m_videoOutput, SIGNAL(readyChanged(bool)),
this, SLOT(updateVideoRenderer()));
}
if (videoOutput) {
connect(videoOutput, SIGNAL(sinkChanged()),
this, SLOT(updateVideoRenderer()));
connect(videoOutput, SIGNAL(readyChanged(bool)),
this, SLOT(updateVideoRenderer()));
m_busHelper->removeMessageFilter(m_videoOutput);
}
m_videoOutput = videoOutput;
if (m_videoOutput) {
connect(m_videoOutput, SIGNAL(sinkChanged()),
this, SLOT(updateVideoRenderer()));
connect(m_videoOutput, SIGNAL(readyChanged(bool)),
this, SLOT(updateVideoRenderer()));
m_busHelper->installMessageFilter(m_videoOutput);
}
}
QGstreamerVideoRendererInterface* renderer = qobject_cast<QGstreamerVideoRendererInterface*>(videoOutput);
@@ -877,29 +885,9 @@ void QGstreamerPlayerSession::setSeekable(bool seekable)
}
}
bool QGstreamerPlayerSession::processSyncMessage(const QGstreamerMessage &message)
bool QGstreamerPlayerSession::processBusMessage(const QGstreamerMessage &message)
{
GstMessage* gm = message.rawMessage();
if (gm && GST_MESSAGE_TYPE(gm) == GST_MESSAGE_ELEMENT) {
if (m_renderer) {
if (GST_MESSAGE_SRC(gm) == GST_OBJECT_CAST(m_videoSink))
m_renderer->handleSyncMessage(gm);
if (gst_structure_has_name(gm->structure, "prepare-xwindow-id")) {
m_renderer->precessNewStream();
return true;
}
}
}
return false;
}
void QGstreamerPlayerSession::busMessage(const QGstreamerMessage &message)
{
GstMessage* gm = message.rawMessage();
if (gm) {
//tag message comes from elements inside playbin, not from playbin itself
if (GST_MESSAGE_TYPE(gm) == GST_MESSAGE_TAG) {
@@ -1111,19 +1099,6 @@ void QGstreamerPlayerSession::busMessage(const QGstreamerMessage &message)
default:
break;
}
} else if (m_videoSink
&& m_renderer
&& GST_MESSAGE_SRC(gm) == GST_OBJECT_CAST(m_videoSink)) {
m_renderer->handleBusMessage(gm);
if (GST_MESSAGE_TYPE(gm) == GST_MESSAGE_STATE_CHANGED) {
GstState oldState;
GstState newState;
gst_message_parse_state_changed(gm, &oldState, &newState, 0);
if (oldState == GST_STATE_READY && newState == GST_STATE_PAUSED)
m_renderer->precessNewStream();
}
} else if (GST_MESSAGE_TYPE(gm) == GST_MESSAGE_ERROR) {
GError *err;
gchar *debug;
@@ -1196,6 +1171,8 @@ void QGstreamerPlayerSession::busMessage(const QGstreamerMessage &message)
}
}
}
return false;
}
void QGstreamerPlayerSession::getStreamsInfo()

View File

@@ -62,14 +62,19 @@ class QGstreamerVideoRendererInterface;
QT_USE_NAMESPACE
class QGstreamerPlayerSession : public QObject, public QGstreamerSyncEventFilter
class QGstreamerPlayerSession : public QObject,
public QGstreamerBusMessageFilter
{
Q_OBJECT
Q_INTERFACES(QGstreamerBusMessageFilter)
public:
QGstreamerPlayerSession(QObject *parent);
virtual ~QGstreamerPlayerSession();
GstElement *playbin() const;
QGstreamerBusHelper *bus() const { return m_busHelper; }
QNetworkRequest request() const;
QMediaPlayer::State state() const { return m_state; }
@@ -105,7 +110,7 @@ public:
int activeStream(QMediaStreamsControl::StreamType streamType) const;
void setActiveStream(QMediaStreamsControl::StreamType streamType, int streamNumber);
bool processSyncMessage(const QGstreamerMessage &message);
bool processBusMessage(const QGstreamerMessage &message);
#if defined(HAVE_GST_APPSRC)
QGstAppSrc *appsrc() const { return m_appSrc; }
@@ -145,7 +150,6 @@ signals:
void playbackRateChanged(qreal);
private slots:
void busMessage(const QGstreamerMessage &message);
void getStreamsInfo();
void setSeekable(bool);
void finishVideoOutputChange();

View File

@@ -0,0 +1,182 @@
/****************************************************************************
**
** Copyright (C) 2011 Nokia Corporation and/or its subsidiary(-ies).
** All rights reserved.
** Contact: Nokia Corporation (qt-info@nokia.com)
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL$
** GNU Lesser General Public License Usage
** This file may be used under the terms of the GNU Lesser General Public
** License version 2.1 as published by the Free Software Foundation and
** appearing in the file LICENSE.LGPL included in the packaging of this
** file. Please review the following information to ensure the GNU Lesser
** General Public License version 2.1 requirements will be met:
** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Nokia gives you certain additional
** rights. These rights are described in the Nokia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** GNU General Public License Usage
** Alternatively, this file may be used under the terms of the GNU General
** Public License version 3.0 as published by the Free Software Foundation
** and appearing in the file LICENSE.GPL included in the packaging of this
** file. Please review the following information to ensure the GNU General
** Public License version 3.0 requirements will be met:
** http://www.gnu.org/copyleft/gpl.html.
**
** Other Usage
** Alternatively, this file may be used in accordance with the terms and
** conditions contained in a signed written agreement between you and Nokia.
**
**
**
**
**
** $QT_END_LICENSE$
**
****************************************************************************/
#include "qgstcodecsinfo.h"
#include <QtCore/qset.h>
#ifdef QMEDIA_GSTREAMER_CAMERABIN
#include <gst/pbutils/pbutils.h>
#include <gst/pbutils/encoding-profile.h>
#endif
QGstCodecsInfo::QGstCodecsInfo(QGstCodecsInfo::ElementType elementType)
{
#if GST_CHECK_VERSION(0,10,31)
GstElementFactoryListType gstElementType = 0;
switch (elementType) {
case AudioEncoder:
gstElementType = GST_ELEMENT_FACTORY_TYPE_AUDIO_ENCODER;
break;
case VideoEncoder:
gstElementType = GST_ELEMENT_FACTORY_TYPE_VIDEO_ENCODER;
break;
case Muxer:
gstElementType = GST_ELEMENT_FACTORY_TYPE_MUXER;
break;
}
GstCaps *allCaps = supportedElementCaps(gstElementType);
GstCaps *caps = gst_caps_new_empty();
uint codecsCount = gst_caps_get_size(allCaps);
for (uint i=0; i<codecsCount; i++) {
gst_caps_append_structure(caps, gst_caps_steal_structure(allCaps, 0));
gchar * capsString = gst_caps_to_string(caps);
QString codec = QLatin1String(capsString);
m_codecs.append(codec);
#ifdef QMEDIA_GSTREAMER_CAMERABIN
gchar *description = gst_pb_utils_get_codec_description(caps);
m_codecDescriptions.insert(codec, QString::fromUtf8(description));
if (description)
g_free(description);
#else
m_codecDescriptions.insert(codec, codec);
#endif
if (capsString)
g_free(capsString);
gst_caps_remove_structure(caps, 0);
}
#endif // GST_CHECK_VERSION(0,10,31)
}
QStringList QGstCodecsInfo::supportedCodecs() const
{
return m_codecs;
}
QString QGstCodecsInfo::codecDescription(const QString &codec) const
{
return m_codecDescriptions.value(codec);
}
#if GST_CHECK_VERSION(0,10,31)
/*!
List all supported caps for all installed elements of type \a elementType.
Caps are simplified to mime type and a few field necessary to distinguish
different codecs like mpegversion or layer.
*/
GstCaps* QGstCodecsInfo::supportedElementCaps(GstElementFactoryListType elementType,
GstRank minimumRank,
GstPadDirection padDirection)
{
GList *elements = gst_element_factory_list_get_elements(elementType, minimumRank);
GstCaps *res = gst_caps_new_empty();
QSet<QByteArray> fakeEncoderMimeTypes;
fakeEncoderMimeTypes << "unknown/unknown"
<< "audio/x-raw-int" << "audio/x-raw-float"
<< "video/x-raw-yuv" << "video/x-raw-rgb";
QSet<QByteArray> fieldsToAdd;
fieldsToAdd << "mpegversion" << "layer" << "layout" << "raversion"
<< "wmaversion" << "wmvversion" << "variant";
GList *element = elements;
while (element) {
GstElementFactory *factory = (GstElementFactory *)element->data;
element = element->next;
const GList *padTemplates = gst_element_factory_get_static_pad_templates(factory);
while (padTemplates) {
GstStaticPadTemplate *padTemplate = (GstStaticPadTemplate *)padTemplates->data;
padTemplates = padTemplates->next;
if (padTemplate->direction == padDirection) {
const GstCaps *caps = gst_static_caps_get(&padTemplate->static_caps);
for (uint i=0; i<gst_caps_get_size(caps); i++) {
const GstStructure *structure = gst_caps_get_structure(caps, i);
//skip "fake" encoders
if (fakeEncoderMimeTypes.contains(gst_structure_get_name(structure)))
continue;
GstStructure *newStructure = gst_structure_new(gst_structure_get_name(structure), NULL);
//add structure fields to distinguish between formats with similar mime types,
//like audio/mpeg
for (int j=0; j<gst_structure_n_fields(structure); j++) {
const gchar* fieldName = gst_structure_nth_field_name(structure, j);
if (fieldsToAdd.contains(fieldName)) {
const GValue *value = gst_structure_get_value(structure, fieldName);
GType valueType = G_VALUE_TYPE(value);
//don't add values of range type,
//gst_pb_utils_get_codec_description complains about not fixed caps
if (valueType != GST_TYPE_INT_RANGE && valueType != GST_TYPE_DOUBLE_RANGE &&
valueType != GST_TYPE_FRACTION_RANGE && valueType != GST_TYPE_LIST &&
valueType != GST_TYPE_ARRAY)
gst_structure_set_value(newStructure, fieldName, value);
}
}
gst_caps_merge_structure(res, newStructure);
}
}
}
}
gst_plugin_feature_list_free(elements);
return res;
}
#endif //GST_CHECK_VERSION(0,10,31)

View File

@@ -0,0 +1,72 @@
/****************************************************************************
**
** Copyright (C) 2011 Nokia Corporation and/or its subsidiary(-ies).
** All rights reserved.
** Contact: Nokia Corporation (qt-info@nokia.com)
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL$
** GNU Lesser General Public License Usage
** This file may be used under the terms of the GNU Lesser General Public
** License version 2.1 as published by the Free Software Foundation and
** appearing in the file LICENSE.LGPL included in the packaging of this
** file. Please review the following information to ensure the GNU Lesser
** General Public License version 2.1 requirements will be met:
** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Nokia gives you certain additional
** rights. These rights are described in the Nokia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** GNU General Public License Usage
** Alternatively, this file may be used under the terms of the GNU General
** Public License version 3.0 as published by the Free Software Foundation
** and appearing in the file LICENSE.GPL included in the packaging of this
** file. Please review the following information to ensure the GNU General
** Public License version 3.0 requirements will be met:
** http://www.gnu.org/copyleft/gpl.html.
**
** Other Usage
** Alternatively, this file may be used in accordance with the terms and
** conditions contained in a signed written agreement between you and Nokia.
**
**
**
**
**
** $QT_END_LICENSE$
**
****************************************************************************/
#ifndef QGSTCODECSINFO_H
#define QGSTCODECSINFO_H
#include <QtCore/qmap.h>
#include <QtCore/qstringlist.h>
#include <gst/gst.h>
class QGstCodecsInfo
{
public:
enum ElementType { AudioEncoder, VideoEncoder, Muxer };
QGstCodecsInfo(ElementType elementType);
QStringList supportedCodecs() const;
QString codecDescription(const QString &codec) const;
#if GST_CHECK_VERSION(0,10,31)
static GstCaps* supportedElementCaps(GstElementFactoryListType elementType,
GstRank minimumRank = GST_RANK_MARGINAL,
GstPadDirection padDirection = GST_PAD_SRC);
#endif
private:
QStringList m_codecs;
QMap<QString,QString> m_codecDescriptions;
};
#endif

View File

@@ -39,9 +39,10 @@
**
****************************************************************************/
#include <QMap>
#include <QTimer>
#include <QMutex>
#include <QtCore/qmap.h>
#include <QtCore/qtimer.h>
#include <QtCore/qmutex.h>
#include <QtCore/qlist.h>
#include "qgstreamerbushelper.h"
@@ -57,7 +58,6 @@ public:
setParent(helper);
m_tag = gst_bus_add_watch_full(bus, 0, busCallback, this, NULL);
m_helper = helper;
filter = 0;
}
void removeWatch(QGstreamerBusHelper* helper)
@@ -75,7 +75,12 @@ private:
void processMessage(GstBus* bus, GstMessage* message)
{
Q_UNUSED(bus);
emit m_helper->message(message);
QGstreamerMessage msg(message);
foreach (QGstreamerBusMessageFilter *filter, busFilters) {
if (filter->processBusMessage(msg))
break;
}
emit m_helper->message(msg);
}
static gboolean busCallback(GstBus *bus, GstMessage *message, gpointer data)
@@ -89,8 +94,9 @@ private:
public:
GstBus* bus;
QGstreamerSyncEventFilter *filter;
QMutex filterMutex;
QList<QGstreamerSyncMessageFilter*> syncFilters;
QList<QGstreamerBusMessageFilter*> busFilters;
};
#else
@@ -131,7 +137,13 @@ private slots:
GstMessage* message;
while ((message = gst_bus_poll(it.value(), GST_MESSAGE_ANY, 0)) != 0) {
emit it.key()->message(message);
QGstreamerMessage msg(message);
foreach (QGstreamerBusMessageFilter *filter, busFilters) {
if (filter->processBusMessage(msg))
break;
}
emit it.key()->message(msg);
gst_message_unref(message);
}
@@ -153,8 +165,9 @@ private:
public:
GstBus* bus;
QGstreamerSyncEventFilter *filter;
QMutex filterMutex;
QList<QGstreamerSyncMessageFilter*> syncFilters;
QList<QGstreamerBusMessageFilter*> busFilters;
};
#endif
@@ -164,12 +177,12 @@ static GstBusSyncReply syncGstBusFilter(GstBus* bus, GstMessage* message, QGstre
Q_UNUSED(bus);
QMutexLocker lock(&d->filterMutex);
bool res = false;
foreach (QGstreamerSyncMessageFilter *filter, d->syncFilters) {
if (filter->processSyncMessage(QGstreamerMessage(message)))
return GST_BUS_DROP;
}
if (d->filter)
res = d->filter->processSyncMessage(QGstreamerMessage(message));
return res ? GST_BUS_DROP : GST_BUS_PASS;
return GST_BUS_PASS;
}
@@ -194,10 +207,31 @@ QGstreamerBusHelper::~QGstreamerBusHelper()
gst_bus_set_sync_handler(d->bus,0,0);
}
void QGstreamerBusHelper::installSyncEventFilter(QGstreamerSyncEventFilter *filter)
void QGstreamerBusHelper::installMessageFilter(QObject *filter)
{
QMutexLocker lock(&d->filterMutex);
d->filter = filter;
QGstreamerSyncMessageFilter *syncFilter = qobject_cast<QGstreamerSyncMessageFilter*>(filter);
if (syncFilter) {
QMutexLocker lock(&d->filterMutex);
if (!d->syncFilters.contains(syncFilter))
d->syncFilters.append(syncFilter);
}
QGstreamerBusMessageFilter *busFilter = qobject_cast<QGstreamerBusMessageFilter*>(filter);
if (busFilter && !d->busFilters.contains(busFilter))
d->busFilters.append(busFilter);
}
void QGstreamerBusHelper::removeMessageFilter(QObject *filter)
{
QGstreamerSyncMessageFilter *syncFilter = qobject_cast<QGstreamerSyncMessageFilter*>(filter);
if (syncFilter) {
QMutexLocker lock(&d->filterMutex);
d->syncFilters.removeAll(syncFilter);
}
QGstreamerBusMessageFilter *busFilter = qobject_cast<QGstreamerBusMessageFilter*>(filter);
if (busFilter)
d->busFilters.removeAll(busFilter);
}
#include "qgstreamerbushelper.moc"

View File

@@ -47,11 +47,23 @@
#include <qgstreamermessage.h>
#include <gst/gst.h>
class QGstreamerSyncEventFilter {
class QGstreamerSyncMessageFilter {
public:
//returns true if message was processed and should be dropped, false otherwise
virtual bool processSyncMessage(const QGstreamerMessage &message) = 0;
};
#define QGstreamerSyncMessageFilter_iid "com.nokia.Qt.QGstreamerSyncMessageFilter/1.0"
Q_DECLARE_INTERFACE(QGstreamerSyncMessageFilter, QGstreamerSyncMessageFilter_iid)
class QGstreamerBusMessageFilter {
public:
//returns true if message was processed and should be dropped, false otherwise
virtual bool processBusMessage(const QGstreamerMessage &message) = 0;
};
#define QGstreamerBusMessageFilter_iid "com.nokia.Qt.QGstreamerBusMessageFilter/1.0"
Q_DECLARE_INTERFACE(QGstreamerBusMessageFilter, QGstreamerBusMessageFilter_iid)
class QGstreamerBusHelperPrivate;
@@ -64,12 +76,12 @@ public:
QGstreamerBusHelper(GstBus* bus, QObject* parent = 0);
~QGstreamerBusHelper();
void installSyncEventFilter(QGstreamerSyncEventFilter *filter);
void installMessageFilter(QObject *filter);
void removeMessageFilter(QObject *filter);
signals:
void message(QGstreamerMessage const& message);
private:
QGstreamerBusHelperPrivate* d;
};

View File

@@ -364,13 +364,16 @@ bool QGstreamerGLTextureRenderer::isReady() const
return m_surface->supportedPixelFormats(EGLImageTextureHandle).isEmpty();
}
void QGstreamerGLTextureRenderer::handleBusMessage(GstMessage* gm)
bool QGstreamerGLTextureRenderer::processBusMessage(const QGstreamerMessage &message)
{
GstMessage* gm = message.rawMessage();
#ifdef GL_TEXTURE_SINK_DEBUG
qDebug() << Q_FUNC_INFO << GST_MESSAGE_TYPE_NAME(gm);
#endif
if (GST_MESSAGE_TYPE(gm) == GST_MESSAGE_STATE_CHANGED) {
if (GST_MESSAGE_TYPE(gm) == GST_MESSAGE_STATE_CHANGED &&
GST_MESSAGE_SRC(gm) == GST_OBJECT_CAST(m_videoSink)) {
GstState oldState;
GstState newState;
gst_message_parse_state_changed(gm, &oldState, &newState, 0);
@@ -387,22 +390,20 @@ void QGstreamerGLTextureRenderer::handleBusMessage(GstMessage* gm)
updateNativeVideoSize();
}
}
return false;
}
void QGstreamerGLTextureRenderer::handleSyncMessage(GstMessage* gm)
bool QGstreamerGLTextureRenderer::processSyncMessage(const QGstreamerMessage &message)
{
GstMessage* gm = message.rawMessage();
if ((GST_MESSAGE_TYPE(gm) == GST_MESSAGE_ELEMENT) &&
gst_structure_has_name(gm->structure, "prepare-xwindow-id") &&
m_videoSink && GST_IS_X_OVERLAY(m_videoSink)) {
#ifdef GL_TEXTURE_SINK_DEBUG
qDebug() << Q_FUNC_INFO;
#endif
if (GST_MESSAGE_TYPE(gm) == GST_MESSAGE_ELEMENT &&
gst_structure_has_name(gm->structure, "prepare-xwindow-id"))
precessNewStream();
}
void QGstreamerGLTextureRenderer::precessNewStream()
{
if (m_videoSink && GST_IS_X_OVERLAY(m_videoSink)) {
GstXOverlay *overlay = GST_X_OVERLAY(m_videoSink);
gst_x_overlay_set_xwindow_id(overlay, m_winId);
@@ -417,7 +418,11 @@ void QGstreamerGLTextureRenderer::precessNewStream()
GstPad *pad = gst_element_get_static_pad(m_videoSink,"sink");
m_bufferProbeId = gst_pad_add_buffer_probe(pad, G_CALLBACK(padBufferProbe), this);
return true;
}
return false;
}
void QGstreamerGLTextureRenderer::stopRenderer()

View File

@@ -44,6 +44,7 @@
#include <qvideorenderercontrol.h>
#include "qvideosurfacegstsink.h"
#include "qgstreamerbushelper.h"
#include "qgstreamervideorendererinterface.h"
#include <QtGui/qcolor.h>
@@ -54,10 +55,13 @@ QT_USE_NAMESPACE
class QGLContext;
class QGstreamerGLTextureRenderer : public QVideoRendererControl, public QGstreamerVideoRendererInterface
class QGstreamerGLTextureRenderer : public QVideoRendererControl,
public QGstreamerVideoRendererInterface,
public QGstreamerSyncMessageFilter,
public QGstreamerBusMessageFilter
{
Q_OBJECT
Q_INTERFACES(QGstreamerVideoRendererInterface)
Q_INTERFACES(QGstreamerVideoRendererInterface QGstreamerSyncMessageFilter QGstreamerBusMessageFilter)
Q_PROPERTY(bool overlayEnabled READ overlayEnabled WRITE setOverlayEnabled)
Q_PROPERTY(qulonglong winId READ winId WRITE setWinId)
@@ -75,9 +79,8 @@ public:
GstElement *videoSink();
bool isReady() const;
void handleBusMessage(GstMessage* gm);
void handleSyncMessage(GstMessage* gm);
void precessNewStream();
bool processBusMessage(const QGstreamerMessage &message);
bool processSyncMessage(const QGstreamerMessage &message);
void stopRenderer();
int framebufferNumber() const;

View File

@@ -61,7 +61,6 @@ public:
void setSurface(QAbstractVideoSurface *surface);
GstElement *videoSink();
void precessNewStream() {}
bool isReady() const { return m_surface != 0; }

View File

@@ -51,7 +51,6 @@ class QGstreamerVideoRendererInterface
public:
virtual ~QGstreamerVideoRendererInterface();
virtual GstElement *videoSink() = 0;
virtual void precessNewStream() {}
//stopRenderer() is called when the renderer element is stopped.
//it can be reimplemented when video renderer can't detect
@@ -62,10 +61,6 @@ public:
//(winId is known,
virtual bool isReady() const { return true; }
//video renderer may handle video sink specific gstreamer messages.
virtual void handleBusMessage(GstMessage*) {};
virtual void handleSyncMessage(GstMessage*) {};
//signals:
//void sinkChanged();
//void readyChanged(bool);

View File

@@ -179,10 +179,36 @@ bool QGstreamerVideoWidgetControl::eventFilter(QObject *object, QEvent *e)
return false;
}
void QGstreamerVideoWidgetControl::precessNewStream()
bool QGstreamerVideoWidgetControl::processSyncMessage(const QGstreamerMessage &message)
{
setOverlay();
QMetaObject::invokeMethod(this, "updateNativeVideoSize", Qt::QueuedConnection);
GstMessage* gm = message.rawMessage();
if (gm && (GST_MESSAGE_TYPE(gm) == GST_MESSAGE_ELEMENT) &&
gst_structure_has_name(gm->structure, "prepare-xwindow-id")) {
setOverlay();
QMetaObject::invokeMethod(this, "updateNativeVideoSize", Qt::QueuedConnection);
return true;
}
return false;
}
bool QGstreamerVideoWidgetControl::processBusMessage(const QGstreamerMessage &message)
{
GstMessage* gm = message.rawMessage();
if (GST_MESSAGE_TYPE(gm) == GST_MESSAGE_STATE_CHANGED &&
GST_MESSAGE_SRC(gm) == GST_OBJECT_CAST(m_videoSink)) {
GstState oldState;
GstState newState;
gst_message_parse_state_changed(gm, &oldState, &newState, 0);
if (oldState == GST_STATE_READY && newState == GST_STATE_PAUSED)
updateNativeVideoSize();
}
return false;
}
void QGstreamerVideoWidgetControl::setOverlay()

View File

@@ -45,6 +45,7 @@
#include <qvideowidgetcontrol.h>
#include "qgstreamervideorendererinterface.h"
#include "qgstreamerbushelper.h"
QT_USE_NAMESPACE
@@ -53,15 +54,16 @@ class QGstreamerVideoWidget;
class QGstreamerVideoWidgetControl
: public QVideoWidgetControl
, public QGstreamerVideoRendererInterface
, public QGstreamerSyncMessageFilter
, public QGstreamerBusMessageFilter
{
Q_OBJECT
Q_INTERFACES(QGstreamerVideoRendererInterface)
Q_INTERFACES(QGstreamerVideoRendererInterface QGstreamerSyncMessageFilter QGstreamerBusMessageFilter)
public:
QGstreamerVideoWidgetControl(QObject *parent = 0);
virtual ~QGstreamerVideoWidgetControl();
GstElement *videoSink();
void precessNewStream();
QWidget *videoWidget();
@@ -86,6 +88,8 @@ public:
void setOverlay();
bool eventFilter(QObject *object, QEvent *event);
bool processSyncMessage(const QGstreamerMessage &message);
bool processBusMessage(const QGstreamerMessage &message);
public slots:
void updateNativeVideoSize();

View File

@@ -115,14 +115,23 @@ void QGstreamerVideoWindow::setWinId(WId id)
emit readyChanged(false);
}
void QGstreamerVideoWindow::precessNewStream()
bool QGstreamerVideoWindow::processSyncMessage(const QGstreamerMessage &message)
{
if (m_videoSink && GST_IS_X_OVERLAY(m_videoSink)) {
GstMessage* gm = message.rawMessage();
if ((GST_MESSAGE_TYPE(gm) == GST_MESSAGE_ELEMENT) &&
gst_structure_has_name(gm->structure, "prepare-xwindow-id") &&
m_videoSink && GST_IS_X_OVERLAY(m_videoSink)) {
gst_x_overlay_set_xwindow_id(GST_X_OVERLAY(m_videoSink), m_windowId);
GstPad *pad = gst_element_get_static_pad(m_videoSink,"sink");
m_bufferProbeId = gst_pad_add_buffer_probe(pad, G_CALLBACK(padBufferProbe), this);
return true;
}
return false;
}
QRect QGstreamerVideoWindow::displayRect() const

View File

@@ -45,6 +45,7 @@
#include <qvideowindowcontrol.h>
#include "qgstreamervideorendererinterface.h"
#include "qgstreamerbushelper.h"
QT_BEGIN_NAMESPACE
class QAbstractVideoSurface;
@@ -55,10 +56,12 @@ class QX11VideoSurface;
QT_USE_NAMESPACE
class QGstreamerVideoWindow : public QVideoWindowControl, public QGstreamerVideoRendererInterface
class QGstreamerVideoWindow : public QVideoWindowControl,
public QGstreamerVideoRendererInterface,
public QGstreamerSyncMessageFilter
{
Q_OBJECT
Q_INTERFACES(QGstreamerVideoRendererInterface)
Q_INTERFACES(QGstreamerVideoRendererInterface QGstreamerSyncMessageFilter)
Q_PROPERTY(QColor colorKey READ colorKey WRITE setColorKey)
Q_PROPERTY(bool autopaintColorKey READ autopaintColorKey WRITE setAutopaintColorKey)
public:
@@ -103,7 +106,7 @@ public:
GstElement *videoSink();
void precessNewStream();
bool processSyncMessage(const QGstreamerMessage &message);
bool isReady() const { return m_windowId != 0; }
signals: