GStreamer: port to 1.0.

0.10 is still used by default.
To enable GStreamer 1.0, pass GST_VERSION=1.0 to qmake
for qtmultimedia.pro.

Contributions from:
Andrew den Exter <andrew.den.exter@qinetic.com.au>
Ilya Smelykh <ilya@videoexpertsgroup.com>
Jim Hodapp <jim.hodapp@canonical.com>
Sergio Schvezov <sergio.schvezov@canonical.com>

Change-Id: I72a46d1170a8794a149bdb5e20767afcc5b7587c
Reviewed-by: Andrew den Exter <andrew.den.exter@qinetic.com.au>
This commit is contained in:
Yoann Lopes
2014-11-20 17:54:18 +01:00
committed by Andrew den Exter
parent 7e3d69668e
commit 108dda7a90
71 changed files with 3669 additions and 1382 deletions

View File

@@ -2,6 +2,7 @@ TEMPLATE = lib
TARGET = qgsttools_p
QPRO_PWD = $$PWD
QT = core-private multimedia-private gui-private
!static:DEFINES += QT_MAKEDLL
@@ -15,15 +16,17 @@ LIBS_PRIVATE += \
CONFIG += link_pkgconfig
PKGCONFIG_PRIVATE += \
gstreamer-0.10 \
gstreamer-base-0.10 \
gstreamer-interfaces-0.10 \
gstreamer-audio-0.10 \
gstreamer-video-0.10 \
gstreamer-pbutils-0.10
PKGCONFIG += \
gstreamer-$$GST_VERSION \
gstreamer-base-$$GST_VERSION \
gstreamer-audio-$$GST_VERSION \
gstreamer-video-$$GST_VERSION \
gstreamer-pbutils-$$GST_VERSION
maemo*: PKGCONFIG_PRIVATE +=gstreamer-plugins-bad-0.10
equals(GST_VERSION,"0.10") {
PKGCONFIG_PRIVATE += gstreamer-interfaces-0.10
maemo*: PKGCONFIG_PRIVATE +=gstreamer-plugins-bad-0.10
}
config_resourcepolicy {
DEFINES += HAVE_RESOURCE_POLICY
@@ -33,38 +36,36 @@ config_resourcepolicy {
# Header files must go inside source directory of a module
# to be installed by syncqt.
INCLUDEPATH += ../multimedia/gsttools_headers/
INCLUDEPATH += ../plugins/gstreamer/mediaplayer/
VPATH += ../multimedia/gsttools_headers/
PRIVATE_HEADERS += \
qgstbufferpoolinterface_p.h \
qgstreamerbushelper_p.h \
qgstreamermessage_p.h \
qgstutils_p.h \
qgstvideobuffer_p.h \
qvideosurfacegstsink_p.h \
qgstreamerbufferprobe_p.h \
qgstreamervideorendererinterface_p.h \
qgstreameraudioinputselector_p.h \
qgstreamervideorenderer_p.h \
qgstreamervideoinputdevicecontrol_p.h \
gstvideoconnector_p.h \
qgstcodecsinfo_p.h \
qgstreamervideoprobecontrol_p.h \
qgstreameraudioprobecontrol_p.h \
qgstreamervideowindow_p.h
SOURCES += \
qgstbufferpoolinterface.cpp \
qgstreamerbushelper.cpp \
qgstreamermessage.cpp \
qgstutils.cpp \
qgstvideobuffer.cpp \
qvideosurfacegstsink.cpp \
qgstreamerbufferprobe.cpp \
qgstreamervideorendererinterface.cpp \
qgstreameraudioinputselector.cpp \
qgstreamervideorenderer.cpp \
qgstreamervideoinputdevicecontrol.cpp \
qgstcodecsinfo.cpp \
gstvideoconnector.c \
qgstreamervideoprobecontrol.cpp \
qgstreameraudioprobecontrol.cpp \
qgstreamervideowindow.cpp
@@ -79,25 +80,54 @@ qtHaveModule(widgets) {
qgstreamervideowidget.cpp
}
maemo6 {
PKGCONFIG_PRIVATE += qmsystem2
equals(GST_VERSION,"0.10") {
PRIVATE_HEADERS += \
qgstbufferpoolinterface_p.h \
gstvideoconnector_p.h \
contains(QT_CONFIG, opengles2):qtHaveModule(widgets) {
PRIVATE_HEADERS += qgstreamergltexturerenderer_p.h
SOURCES += qgstreamergltexturerenderer.cpp
QT += opengl
LIBS_PRIVATE += -lEGL -lgstmeegointerfaces-0.10
SOURCES += \
qgstbufferpoolinterface.cpp \
qvideosurfacegstsink.cpp \
gstvideoconnector.c
maemo6 {
PKGCONFIG_PRIVATE += qmsystem2
contains(QT_CONFIG, opengles2):qtHaveModule(widgets) {
PRIVATE_HEADERS += qgstreamergltexturerenderer_p.h
SOURCES += qgstreamergltexturerenderer.cpp
QT += opengl
LIBS_PRIVATE += -lEGL -lgstmeegointerfaces-0.10
}
}
} else {
PRIVATE_HEADERS += \
qgstvideorendererplugin_p.h \
qgstvideorenderersink_p.h
SOURCES += \
qgstvideorendererplugin.cpp \
qgstvideorenderersink.cpp
}
mir: {
contains(QT_CONFIG, opengles2):qtHaveModule(widgets) {
PRIVATE_HEADERS += qgstreamermirtexturerenderer_p.h
SOURCES += qgstreamermirtexturerenderer.cpp
QT += opengl quick
LIBS += -lEGL
}
DEFINES += HAVE_MIR
}
config_gstreamer_appsrc {
PKGCONFIG_PRIVATE += gstreamer-app-0.10
PKGCONFIG_PRIVATE += gstreamer-app-$$GST_VERSION
PRIVATE_HEADERS += qgstappsrc_p.h
SOURCES += qgstappsrc.cpp
DEFINES += HAVE_GST_APPSRC
LIBS_PRIVATE += -lgstapp-0.10
LIBS_PRIVATE += -lgstapp-$$GST_VERSION
}
config_linux_v4l: DEFINES += USE_V4L

View File

@@ -147,23 +147,44 @@ void QGstAppSrc::pushDataToAppSrc()
size = qMin(m_stream->bytesAvailable(), (qint64)m_dataRequestSize);
if (size) {
void *data = g_malloc(size);
GstBuffer* buffer = gst_app_buffer_new(data, size, g_free, data);
GstBuffer* buffer = gst_buffer_new_and_alloc(size);
#if GST_CHECK_VERSION(1,0,0)
GstMapInfo mapInfo;
gst_buffer_map(buffer, &mapInfo, GST_MAP_WRITE);
void* bufferData = mapInfo.data;
#else
void* bufferData = GST_BUFFER_DATA(buffer);
#endif
buffer->offset = m_stream->pos();
qint64 bytesRead = m_stream->read((char*)GST_BUFFER_DATA(buffer), size);
qint64 bytesRead = m_stream->read((char*)bufferData, size);
buffer->offset_end = buffer->offset + bytesRead - 1;
#if GST_CHECK_VERSION(1,0,0)
gst_buffer_unmap(buffer, &mapInfo);
#endif
if (bytesRead > 0) {
m_dataRequested = false;
m_enoughData = false;
GstFlowReturn ret = gst_app_src_push_buffer (GST_APP_SRC (element()), buffer);
if (ret == GST_FLOW_ERROR) {
qWarning()<<"appsrc: push buffer error";
#if GST_CHECK_VERSION(1,0,0)
} else if (ret == GST_FLOW_FLUSHING) {
qWarning()<<"appsrc: push buffer wrong state";
}
#else
} else if (ret == GST_FLOW_WRONG_STATE) {
qWarning()<<"appsrc: push buffer wrong state";
} else if (ret == GST_FLOW_RESEND) {
}
#endif
#if GST_VERSION_MAJOR < 1
else if (ret == GST_FLOW_RESEND) {
qWarning()<<"appsrc: push buffer resend";
}
#endif
}
} else {
sendEOS();

View File

@@ -32,7 +32,7 @@
****************************************************************************/
#include "qgstcodecsinfo_p.h"
#include "qgstutils_p.h"
#include <QtCore/qset.h>
#ifdef QMEDIA_GSTREAMER_CAMERABIN
@@ -146,7 +146,7 @@ GstCaps* QGstCodecsInfo::supportedElementCaps(GstElementFactoryListType elementT
if (fakeEncoderMimeTypes.contains(gst_structure_get_name(structure)))
continue;
GstStructure *newStructure = gst_structure_new(gst_structure_get_name(structure), NULL);
GstStructure *newStructure = qt_gst_structure_new_empty(gst_structure_get_name(structure));
//add structure fields to distinguish between formats with similar mime types,
//like audio/mpeg
@@ -166,7 +166,11 @@ GstCaps* QGstCodecsInfo::supportedElementCaps(GstElementFactoryListType elementT
}
}
#if GST_CHECK_VERSION(1,0,0)
res =
#endif
gst_caps_merge_structure(res, newStructure);
}
gst_caps_unref(caps);
}

View File

@@ -37,32 +37,48 @@
QGstreamerAudioProbeControl::QGstreamerAudioProbeControl(QObject *parent)
: QMediaAudioProbeControl(parent)
{
}
QGstreamerAudioProbeControl::~QGstreamerAudioProbeControl()
{
}
void QGstreamerAudioProbeControl::bufferProbed(GstBuffer* buffer)
void QGstreamerAudioProbeControl::probeCaps(GstCaps *caps)
{
GstCaps* caps = gst_buffer_get_caps(buffer);
if (!caps)
return;
QAudioFormat format = QGstUtils::audioFormatForCaps(caps);
gst_caps_unref(caps);
if (!format.isValid())
return;
QAudioBuffer audioBuffer = QAudioBuffer(QByteArray((const char*)buffer->data, buffer->size), format);
QMutexLocker locker(&m_bufferMutex);
m_format = format;
}
{
QMutexLocker locker(&m_bufferMutex);
m_pendingBuffer = audioBuffer;
QMetaObject::invokeMethod(this, "bufferProbed", Qt::QueuedConnection);
bool QGstreamerAudioProbeControl::probeBuffer(GstBuffer *buffer)
{
qint64 position = GST_BUFFER_TIMESTAMP(buffer);
position = position >= 0
? position / G_GINT64_CONSTANT(1000) // microseconds
: -1;
QByteArray data;
#if GST_CHECK_VERSION(1,0,0)
GstMapInfo info;
if (gst_buffer_map(buffer, &info, GST_MAP_READ)) {
data = QByteArray(reinterpret_cast<const char *>(info.data), info.size);
gst_buffer_unmap(buffer, &info);
} else {
return true;
}
#else
data = QByteArray(reinterpret_cast<const char *>(buffer->data), buffer->size);
#endif
QMutexLocker locker(&m_bufferMutex);
if (m_format.isValid()) {
if (!m_pendingBuffer.isValid())
QMetaObject::invokeMethod(this, "bufferProbed", Qt::QueuedConnection);
m_pendingBuffer = QAudioBuffer(data, m_format, position);
}
return true;
}
void QGstreamerAudioProbeControl::bufferProbed()
@@ -73,6 +89,7 @@ void QGstreamerAudioProbeControl::bufferProbed()
if (!m_pendingBuffer.isValid())
return;
audioBuffer = m_pendingBuffer;
m_pendingBuffer = QAudioBuffer();
}
emit audioBufferProbed(audioBuffer);
}

View File

@@ -0,0 +1,174 @@
/****************************************************************************
**
** Copyright (C) 2014 Jolla Ltd.
** Contact: http://www.qt-project.org/legal
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and Digia. For licensing terms and
** conditions see http://qt.digia.com/licensing. For further information
** use the contact form at http://qt.digia.com/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 as published by the Free Software
** Foundation and appearing in the file LICENSE.LGPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU Lesser General Public License version 2.1 requirements
** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Digia gives you certain additional
** rights. These rights are described in the Digia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** GNU General Public License Usage
** Alternatively, this file may be used under the terms of the GNU
** General Public License version 3.0 as published by the Free Software
** Foundation and appearing in the file LICENSE.GPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU General Public License version 3.0 requirements will be
** met: http://www.gnu.org/copyleft/gpl.html.
**
**
** $QT_END_LICENSE$
**
****************************************************************************/
#include "qgstreamerbufferprobe_p.h"
#include "qgstutils_p.h"
QT_BEGIN_NAMESPACE
QGstreamerBufferProbe::QGstreamerBufferProbe(Flags flags)
#if GST_CHECK_VERSION(1,0,0)
: m_capsProbeId(-1)
#else
: m_caps(0)
#endif
, m_bufferProbeId(-1)
, m_flags(flags)
{
}
QGstreamerBufferProbe::~QGstreamerBufferProbe()
{
#if !GST_CHECK_VERSION(1,0,0)
if (m_caps)
gst_caps_unref(m_caps);
#endif
}
void QGstreamerBufferProbe::addProbeToPad(GstPad *pad, bool downstream)
{
if (GstCaps *caps = qt_gst_pad_get_current_caps(pad)) {
probeCaps(caps);
gst_caps_unref(caps);
}
#if GST_CHECK_VERSION(1,0,0)
if (m_flags & ProbeCaps) {
m_capsProbeId = gst_pad_add_probe(
pad,
downstream
? GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM
: GST_PAD_PROBE_TYPE_EVENT_UPSTREAM,
capsProbe,
this,
NULL);
}
if (m_flags & ProbeBuffers) {
m_bufferProbeId = gst_pad_add_probe(
pad, GST_PAD_PROBE_TYPE_BUFFER, bufferProbe, this, NULL);
}
#else
Q_UNUSED(downstream);
m_bufferProbeId = gst_pad_add_buffer_probe(pad, G_CALLBACK(bufferProbe), this);
#endif
}
void QGstreamerBufferProbe::removeProbeFromPad(GstPad *pad)
{
#if GST_CHECK_VERSION(1,0,0)
if (m_capsProbeId != -1) {
gst_pad_remove_probe(pad, m_capsProbeId);
m_capsProbeId = -1;
}
if (m_bufferProbeId != -1) {
gst_pad_remove_probe(pad, m_bufferProbeId);
m_bufferProbeId = -1;
}
#else
if (m_bufferProbeId != -1) {
gst_pad_remove_buffer_probe(pad, m_bufferProbeId);
m_bufferProbeId = -1;
if (m_caps) {
gst_caps_unref(m_caps);
m_caps = 0;
}
}
#endif
}
void QGstreamerBufferProbe::probeCaps(GstCaps *)
{
}
bool QGstreamerBufferProbe::probeBuffer(GstBuffer *)
{
return true;
}
#if GST_CHECK_VERSION(1,0,0)
GstPadProbeReturn QGstreamerBufferProbe::capsProbe(
GstPad *, GstPadProbeInfo *info, gpointer user_data)
{
QGstreamerBufferProbe * const control = static_cast<QGstreamerBufferProbe *>(user_data);
if (GstEvent * const event = gst_pad_probe_info_get_event(info)) {
if (GST_EVENT_TYPE(event) == GST_EVENT_CAPS) {
GstCaps *caps;
gst_event_parse_caps(event, &caps);
control->probeCaps(caps);
}
}
return GST_PAD_PROBE_OK;
}
GstPadProbeReturn QGstreamerBufferProbe::bufferProbe(
GstPad *, GstPadProbeInfo *info, gpointer user_data)
{
QGstreamerBufferProbe * const control = static_cast<QGstreamerBufferProbe *>(user_data);
if (GstBuffer * const buffer = gst_pad_probe_info_get_buffer(info))
return control->probeBuffer(buffer) ? GST_PAD_PROBE_OK : GST_PAD_PROBE_DROP;
return GST_PAD_PROBE_OK;
}
#else
gboolean QGstreamerBufferProbe::bufferProbe(GstElement *, GstBuffer *buffer, gpointer user_data)
{
QGstreamerBufferProbe * const control = static_cast<QGstreamerBufferProbe *>(user_data);
if (control->m_flags & ProbeCaps) {
GstCaps *caps = gst_buffer_get_caps(buffer);
if (caps && (!control->m_caps || !gst_caps_is_equal(control->m_caps, caps))) {
qSwap(caps, control->m_caps);
control->probeCaps(control->m_caps);
}
if (caps)
gst_caps_unref(caps);
}
if (control->m_flags & ProbeBuffers) {
return control->probeBuffer(buffer) ? TRUE : FALSE;
} else {
return TRUE;
}
}
#endif
QT_END_NAMESPACE

View File

@@ -154,13 +154,21 @@ QGstreamerBusHelper::QGstreamerBusHelper(GstBus* bus, QObject* parent):
QObject(parent)
{
d = new QGstreamerBusHelperPrivate(this, bus);
#if GST_CHECK_VERSION(1,0,0)
gst_bus_set_sync_handler(bus, (GstBusSyncHandler)syncGstBusFilter, d, 0);
#else
gst_bus_set_sync_handler(bus, (GstBusSyncHandler)syncGstBusFilter, d);
#endif
gst_object_ref(GST_OBJECT(bus));
}
QGstreamerBusHelper::~QGstreamerBusHelper()
{
#if GST_CHECK_VERSION(1,0,0)
gst_bus_set_sync_handler(d->bus(), 0, 0, 0);
#else
gst_bus_set_sync_handler(d->bus(),0,0);
#endif
gst_object_unref(GST_OBJECT(d->bus()));
}

View File

@@ -0,0 +1,351 @@
/****************************************************************************
**
** Copyright (C) 2014 Canonical Ltd.
** Contact: http://www.qt-project.org/legal
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL21$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and Digia. For licensing terms and
** conditions see http://qt.digia.com/licensing. For further information
** use the contact form at http://qt.digia.com/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 or version 3 as published by the Free
** Software Foundation and appearing in the file LICENSE.LGPLv21 and
** LICENSE.LGPLv3 included in the packaging of this file. Please review the
** following information to ensure the GNU Lesser General Public License
** requirements will be met: https://www.gnu.org/licenses/lgpl.html and
** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Digia gives you certain additional
** rights. These rights are described in the Digia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** $QT_END_LICENSE$
**
****************************************************************************/
#include "qgstreamermirtexturerenderer_p.h"
#include <qgstreamerplayersession.h>
#include <private/qvideosurfacegstsink_p.h>
#include <private/qgstutils_p.h>
#include <qabstractvideosurface.h>
#include <QAbstractVideoBuffer>
#include <QGuiApplication>
#include <QDebug>
#include <QtQuick/QQuickWindow>
#include <QOpenGLContext>
#include <QGLContext>
#include <QGuiApplication>
#include <qgl.h>
#include <gst/gst.h>
static QGstreamerMirTextureRenderer *rendererInstance = NULL;
class QGstreamerMirTextureBuffer : public QAbstractVideoBuffer
{
public:
QGstreamerMirTextureBuffer(GLuint textureId) :
QAbstractVideoBuffer(QAbstractVideoBuffer::GLTextureHandle),
m_textureId(textureId)
{
}
MapMode mapMode() const { return NotMapped; }
uchar *map(MapMode mode, int *numBytes, int *bytesPerLine)
{
qDebug() << Q_FUNC_INFO;
Q_UNUSED(mode);
Q_UNUSED(numBytes);
Q_UNUSED(bytesPerLine);
return NULL;
}
void unmap() { qDebug() << Q_FUNC_INFO; }
QVariant handle() const { return QVariant::fromValue<unsigned int>(m_textureId); }
GLuint textureId() { return m_textureId; }
private:
GLuint m_textureId;
};
QGstreamerMirTextureRenderer::QGstreamerMirTextureRenderer(QObject *parent
, const QGstreamerPlayerSession *playerSession)
: QVideoRendererControl(0), m_videoSink(0), m_surface(0),
m_glSurface(0),
m_context(0),
m_glContext(0),
m_textureId(0),
m_offscreenSurface(0),
m_textureBuffer(0)
{
Q_UNUSED(parent);
setPlayerSession(playerSession);
}
QGstreamerMirTextureRenderer::~QGstreamerMirTextureRenderer()
{
if (m_videoSink)
gst_object_unref(GST_OBJECT(m_videoSink));
delete m_glContext;
delete m_offscreenSurface;
}
GstElement *QGstreamerMirTextureRenderer::videoSink()
{
qDebug() << Q_FUNC_INFO;
// FIXME: Ugly hack until I figure out why passing this segfaults in the g_signal handler
rendererInstance = const_cast<QGstreamerMirTextureRenderer*>(this);
if (!m_videoSink && m_surface) {
qDebug() << Q_FUNC_INFO << ": using mirsink, (this: " << this << ")";
m_videoSink = gst_element_factory_make("mirsink", "video-output");
connect(QGuiApplication::instance(), SIGNAL(focusWindowChanged(QWindow*)),
this, SLOT(handleFocusWindowChanged(QWindow*)), Qt::QueuedConnection);
g_signal_connect(G_OBJECT(m_videoSink), "frame-ready", G_CALLBACK(handleFrameReady),
(gpointer)this);
}
if (m_videoSink) {
gst_object_ref_sink(GST_OBJECT(m_videoSink));
GstPad *pad = gst_element_get_static_pad(m_videoSink, "sink");
gst_pad_add_probe(pad, GST_PAD_PROBE_TYPE_BUFFER,
padBufferProbe, this, NULL);
}
return m_videoSink;
}
QWindow *QGstreamerMirTextureRenderer::createOffscreenWindow(const QSurfaceFormat &format)
{
QWindow *w = new QWindow();
w->setSurfaceType(QWindow::OpenGLSurface);
w->setFormat(format);
w->setGeometry(0, 0, 1, 1);
w->setFlags(w->flags() | Qt::WindowTransparentForInput);
w->create();
return w;
}
void QGstreamerMirTextureRenderer::handleFrameReady(gpointer userData)
{
QGstreamerMirTextureRenderer *renderer = reinterpret_cast<QGstreamerMirTextureRenderer*>(userData);
#if 1
QMutexLocker locker(&rendererInstance->m_mutex);
QMetaObject::invokeMethod(rendererInstance, "renderFrame", Qt::QueuedConnection);
#else
// FIXME!
//QMutexLocker locker(&renderer->m_mutex);
QMetaObject::invokeMethod(renderer, "renderFrame", Qt::QueuedConnection);
#endif
}
void QGstreamerMirTextureRenderer::renderFrame()
{
//qDebug() << Q_FUNC_INFO;
if (m_context)
m_context->makeCurrent();
GstState pendingState = GST_STATE_NULL;
GstState newState = GST_STATE_NULL;
// Don't block and return immediately:
GstStateChangeReturn ret = gst_element_get_state(m_videoSink, &newState,
&pendingState, 0);
if (ret == GST_STATE_CHANGE_FAILURE || newState == GST_STATE_NULL||
pendingState == GST_STATE_NULL) {
qWarning() << "Invalid state change for renderer, aborting";
stopRenderer();
return;
}
if (!m_surface->isActive()) {
qDebug() << "m_surface is not active";
GstPad *pad = gst_element_get_static_pad(m_videoSink, "sink");
GstCaps *caps = gst_pad_get_current_caps(pad);
if (caps) {
// Get the native video size from the video sink
QSize newNativeSize = QGstUtils::capsCorrectedResolution(caps);
if (m_nativeSize != newNativeSize) {
m_nativeSize = newNativeSize;
emit nativeSizeChanged();
}
gst_caps_unref(caps);
}
// Start the surface
QVideoSurfaceFormat format(m_nativeSize, QVideoFrame::Format_RGB32, QAbstractVideoBuffer::GLTextureHandle);
qDebug() << "m_nativeSize: " << m_nativeSize;
qDebug() << "format: " << format;
if (!m_surface->start(format)) {
qWarning() << Q_FUNC_INFO << ": failed to start the video surface " << format;
return;
}
}
QGstreamerMirTextureBuffer *buffer = new QGstreamerMirTextureBuffer(m_textureId);
//qDebug() << "frameSize: " << m_surface->surfaceFormat().frameSize();
QVideoFrame frame(buffer, m_surface->surfaceFormat().frameSize(),
m_surface->surfaceFormat().pixelFormat());
frame.setMetaData("TextureId", m_textureId);
// Display the video frame on the surface:
m_surface->present(frame);
}
GstPadProbeReturn QGstreamerMirTextureRenderer::padBufferProbe(GstPad *pad, GstPadProbeInfo *info, gpointer userData)
{
Q_UNUSED(pad);
Q_UNUSED(info);
QGstreamerMirTextureRenderer *control = reinterpret_cast<QGstreamerMirTextureRenderer*>(userData);
QMetaObject::invokeMethod(control, "updateNativeVideoSize", Qt::QueuedConnection);
return GST_PAD_PROBE_REMOVE;
}
void QGstreamerMirTextureRenderer::stopRenderer()
{
if (m_surface)
m_surface->stop();
}
QAbstractVideoSurface *QGstreamerMirTextureRenderer::surface() const
{
return m_surface;
}
void QGstreamerMirTextureRenderer::setSurface(QAbstractVideoSurface *surface)
{
qDebug() << Q_FUNC_INFO;
if (m_surface != surface) {
qDebug() << "Saving current QGLContext";
m_context = const_cast<QGLContext*>(QGLContext::currentContext());
if (m_videoSink)
gst_object_unref(GST_OBJECT(m_videoSink));
m_videoSink = 0;
if (m_surface) {
disconnect(m_surface.data(), SIGNAL(supportedFormatsChanged()),
this, SLOT(handleFormatChange()));
}
bool wasReady = isReady();
m_surface = surface;
if (m_surface) {
connect(m_surface.data(), SIGNAL(supportedFormatsChanged()),
this, SLOT(handleFormatChange()));
}
if (wasReady != isReady())
emit readyChanged(isReady());
emit sinkChanged();
}
}
void QGstreamerMirTextureRenderer::setPlayerSession(const QGstreamerPlayerSession *playerSession)
{
m_playerSession = const_cast<QGstreamerPlayerSession*>(playerSession);
}
void QGstreamerMirTextureRenderer::handleFormatChange()
{
qDebug() << "Supported formats list has changed, reload video output";
if (m_videoSink)
gst_object_unref(GST_OBJECT(m_videoSink));
m_videoSink = 0;
emit sinkChanged();
}
void QGstreamerMirTextureRenderer::updateNativeVideoSize()
{
//qDebug() << Q_FUNC_INFO;
const QSize oldSize = m_nativeSize;
if (m_videoSink) {
// Find video native size to update video widget size hint
GstPad *pad = gst_element_get_static_pad(m_videoSink,"sink");
GstCaps *caps = gst_pad_get_current_caps(pad);
if (caps) {
m_nativeSize = QGstUtils::capsCorrectedResolution(caps);
gst_caps_unref(caps);
}
} else {
m_nativeSize = QSize();
}
qDebug() << Q_FUNC_INFO << oldSize << m_nativeSize << m_videoSink;
if (m_nativeSize != oldSize)
emit nativeSizeChanged();
}
void QGstreamerMirTextureRenderer::handleFocusWindowChanged(QWindow *window)
{
qDebug() << Q_FUNC_INFO;
QOpenGLContext *currContext = QOpenGLContext::currentContext();
QQuickWindow *w = dynamic_cast<QQuickWindow*>(window);
// If we don't have a GL context in the current thread, create one and share it
// with the render thread GL context
if (!currContext && !m_glContext) {
// This emulates the new QOffscreenWindow class with Qt5.1
m_offscreenSurface = createOffscreenWindow(w->openglContext()->surface()->format());
m_offscreenSurface->setParent(window);
QOpenGLContext *shareContext = 0;
if (m_surface)
shareContext = qobject_cast<QOpenGLContext*>(m_surface->property("GLContext").value<QObject*>());
m_glContext = new QOpenGLContext;
m_glContext->setFormat(m_offscreenSurface->requestedFormat());
if (shareContext)
m_glContext->setShareContext(shareContext);
if (!m_glContext->create())
{
qWarning() << "Failed to create new shared context.";
return;
}
}
if (m_glContext)
m_glContext->makeCurrent(m_offscreenSurface);
if (m_textureId == 0) {
glGenTextures(1, &m_textureId);
qDebug() << "texture_id (handleFocusWindowChanged): " << m_textureId << endl;
g_object_set(G_OBJECT(m_videoSink), "texture-id", m_textureId, (char*)NULL);
}
}

View File

@@ -32,7 +32,8 @@
****************************************************************************/
#include "qgstreamervideoprobecontrol_p.h"
#include <private/qvideosurfacegstsink_p.h>
#include "qgstutils_p.h"
#include <private/qgstvideobuffer_p.h>
QGstreamerVideoProbeControl::QGstreamerVideoProbeControl(QObject *parent)
@@ -40,12 +41,10 @@ QGstreamerVideoProbeControl::QGstreamerVideoProbeControl(QObject *parent)
, m_flushing(false)
, m_frameProbed(false)
{
}
QGstreamerVideoProbeControl::~QGstreamerVideoProbeControl()
{
}
void QGstreamerVideoProbeControl::startFlushing()
@@ -67,33 +66,49 @@ void QGstreamerVideoProbeControl::stopFlushing()
m_flushing = false;
}
void QGstreamerVideoProbeControl::bufferProbed(GstBuffer* buffer)
void QGstreamerVideoProbeControl::probeCaps(GstCaps *caps)
{
if (m_flushing)
return;
GstCaps* caps = gst_buffer_get_caps(buffer);
if (!caps)
return;
#if GST_CHECK_VERSION(1,0,0)
GstVideoInfo videoInfo;
QVideoSurfaceFormat format = QGstUtils::formatForCaps(caps, &videoInfo);
QMutexLocker locker(&m_frameMutex);
m_videoInfo = videoInfo;
#else
int bytesPerLine = 0;
QVideoSurfaceFormat format = QVideoSurfaceGstSink::formatForCaps(caps, &bytesPerLine);
gst_caps_unref(caps);
if (!format.isValid() || !bytesPerLine)
return;
QVideoSurfaceFormat format = QGstUtils::formatForCaps(caps, &bytesPerLine);
QVideoFrame frame = QVideoFrame(new QGstVideoBuffer(buffer, bytesPerLine),
format.frameSize(), format.pixelFormat());
QMutexLocker locker(&m_frameMutex);
m_bytesPerLine = bytesPerLine;
#endif
m_format = format;
}
QVideoSurfaceGstSink::setFrameTimeStamps(&frame, buffer);
bool QGstreamerVideoProbeControl::probeBuffer(GstBuffer *buffer)
{
QMutexLocker locker(&m_frameMutex);
if (m_flushing || !m_format.isValid())
return true;
QVideoFrame frame(
#if GST_CHECK_VERSION(1,0,0)
new QGstVideoBuffer(buffer, m_videoInfo),
#else
new QGstVideoBuffer(buffer, m_bytesPerLine),
#endif
m_format.frameSize(),
m_format.pixelFormat());
QGstUtils::setFrameTimeStamps(&frame, buffer);
m_frameProbed = true;
{
QMutexLocker locker(&m_frameMutex);
m_pendingFrame = frame;
if (!m_pendingFrame.isValid())
QMetaObject::invokeMethod(this, "frameProbed", Qt::QueuedConnection);
}
m_pendingFrame = frame;
return true;
}
void QGstreamerVideoProbeControl::frameProbed()
@@ -104,6 +119,7 @@ void QGstreamerVideoProbeControl::frameProbed()
if (!m_pendingFrame.isValid())
return;
frame = m_pendingFrame;
m_pendingFrame = QVideoFrame();
}
emit videoFrameProbed(frame);
}

View File

@@ -35,8 +35,7 @@
#include <private/qvideosurfacegstsink_p.h>
#include <private/qgstutils_p.h>
#include <qabstractvideosurface.h>
#include <QDebug>
#include <QtCore/qdebug.h>
#include <gst/gst.h>

View File

@@ -40,8 +40,13 @@
#include <QtGui/qpainter.h>
#include <gst/gst.h>
#if !GST_CHECK_VERSION(1,0,0)
#include <gst/interfaces/xoverlay.h>
#include <gst/interfaces/propertyprobe.h>
#else
#include <gst/video/videooverlay.h>
#endif
QT_BEGIN_NAMESPACE
@@ -130,8 +135,6 @@ void QGstreamerVideoWidgetControl::createVideoWidget()
m_videoSink = gst_element_factory_make ("ximagesink", NULL);
qt_gst_object_ref_sink(GST_OBJECT (m_videoSink)); //Take ownership
}
GstElement *QGstreamerVideoWidgetControl::videoSink()
@@ -169,9 +172,13 @@ bool QGstreamerVideoWidgetControl::processSyncMessage(const QGstreamerMessage &m
{
GstMessage* gm = message.rawMessage();
#if !GST_CHECK_VERSION(1,0,0)
if (gm && (GST_MESSAGE_TYPE(gm) == GST_MESSAGE_ELEMENT) &&
gst_structure_has_name(gm->structure, "prepare-xwindow-id")) {
#else
if (gm && (GST_MESSAGE_TYPE(gm) == GST_MESSAGE_ELEMENT) &&
gst_structure_has_name(gst_message_get_structure(gm), "prepare-window-handle")) {
#endif
setOverlay();
QMetaObject::invokeMethod(this, "updateNativeVideoSize", Qt::QueuedConnection);
return true;
@@ -199,17 +206,24 @@ bool QGstreamerVideoWidgetControl::processBusMessage(const QGstreamerMessage &me
void QGstreamerVideoWidgetControl::setOverlay()
{
#if !GST_CHECK_VERSION(1,0,0)
if (m_videoSink && GST_IS_X_OVERLAY(m_videoSink)) {
gst_x_overlay_set_xwindow_id(GST_X_OVERLAY(m_videoSink), m_windowId);
}
#else
if (m_videoSink && GST_IS_VIDEO_OVERLAY(m_videoSink)) {
gst_video_overlay_set_window_handle(GST_VIDEO_OVERLAY(m_videoSink), m_windowId);
}
#endif
}
void QGstreamerVideoWidgetControl::updateNativeVideoSize()
{
if (m_videoSink) {
//find video native size to update video widget size hint
GstPad *pad = gst_element_get_static_pad(m_videoSink,"sink");
GstCaps *caps = gst_pad_get_negotiated_caps(pad);
GstPad *pad = gst_element_get_static_pad(m_videoSink, "sink");
GstCaps *caps = qt_gst_pad_get_current_caps(pad);
gst_object_unref(GST_OBJECT(pad));
if (caps) {
@@ -225,8 +239,13 @@ void QGstreamerVideoWidgetControl::updateNativeVideoSize()
void QGstreamerVideoWidgetControl::windowExposed()
{
#if !GST_CHECK_VERSION(1,0,0)
if (m_videoSink && GST_IS_X_OVERLAY(m_videoSink))
gst_x_overlay_expose(GST_X_OVERLAY(m_videoSink));
#else
if (m_videoSink && GST_IS_VIDEO_OVERLAY(m_videoSink))
gst_video_overlay_expose(GST_VIDEO_OVERLAY(m_videoSink));
#endif
}
QWidget *QGstreamerVideoWidgetControl::videoWidget()

View File

@@ -37,36 +37,49 @@
#include <QtCore/qdebug.h>
#include <gst/gst.h>
#if !GST_CHECK_VERSION(1,0,0)
#include <gst/interfaces/xoverlay.h>
#include <gst/interfaces/propertyprobe.h>
#else
#include <gst/video/videooverlay.h>
#endif
QGstreamerVideoWindow::QGstreamerVideoWindow(QObject *parent, const char *elementName)
: QVideoWindowControl(parent)
, QGstreamerBufferProbe(QGstreamerBufferProbe::ProbeCaps)
, m_videoSink(0)
, m_windowId(0)
, m_aspectRatioMode(Qt::KeepAspectRatio)
, m_fullScreen(false)
, m_colorKey(QColor::Invalid)
{
if (elementName)
if (elementName) {
m_videoSink = gst_element_factory_make(elementName, NULL);
else
} else {
m_videoSink = gst_element_factory_make("xvimagesink", NULL);
}
if (m_videoSink) {
qt_gst_object_ref_sink(GST_OBJECT(m_videoSink)); //Take ownership
GstPad *pad = gst_element_get_static_pad(m_videoSink,"sink");
m_bufferProbeId = gst_pad_add_buffer_probe(pad, G_CALLBACK(padBufferProbe), this);
GstPad *pad = gst_element_get_static_pad(m_videoSink, "sink");
addProbeToPad(pad);
gst_object_unref(GST_OBJECT(pad));
}
else
qDebug() << "No m_videoSink available!";
}
QGstreamerVideoWindow::~QGstreamerVideoWindow()
{
if (m_videoSink)
if (m_videoSink) {
GstPad *pad = gst_element_get_static_pad(m_videoSink,"sink");
removeProbeFromPad(pad);
gst_object_unref(GST_OBJECT(pad));
gst_object_unref(GST_OBJECT(m_videoSink));
}
}
WId QGstreamerVideoWindow::winId() const
@@ -82,11 +95,15 @@ void QGstreamerVideoWindow::setWinId(WId id)
WId oldId = m_windowId;
m_windowId = id;
#if GST_CHECK_VERSION(1,0,0)
if (m_videoSink && GST_IS_VIDEO_OVERLAY(m_videoSink)) {
gst_video_overlay_set_window_handle(GST_VIDEO_OVERLAY(m_videoSink), m_windowId);
}
#else
if (m_videoSink && GST_IS_X_OVERLAY(m_videoSink)) {
gst_x_overlay_set_xwindow_id(GST_X_OVERLAY(m_videoSink), m_windowId);
}
#endif
if (!oldId)
emit readyChanged(true);
@@ -97,20 +114,26 @@ void QGstreamerVideoWindow::setWinId(WId id)
bool QGstreamerVideoWindow::processSyncMessage(const QGstreamerMessage &message)
{
GstMessage* gm = message.rawMessage();
#if GST_CHECK_VERSION(1,0,0)
const GstStructure *s = gst_message_get_structure(gm);
if ((GST_MESSAGE_TYPE(gm) == GST_MESSAGE_ELEMENT) &&
gst_structure_has_name(s, "prepare-window-handle") &&
m_videoSink && GST_IS_VIDEO_OVERLAY(m_videoSink)) {
gst_video_overlay_set_window_handle(GST_VIDEO_OVERLAY(m_videoSink), m_windowId);
return true;
}
#else
if ((GST_MESSAGE_TYPE(gm) == GST_MESSAGE_ELEMENT) &&
gst_structure_has_name(gm->structure, "prepare-xwindow-id") &&
m_videoSink && GST_IS_X_OVERLAY(m_videoSink)) {
gst_x_overlay_set_xwindow_id(GST_X_OVERLAY(m_videoSink), m_windowId);
GstPad *pad = gst_element_get_static_pad(m_videoSink,"sink");
m_bufferProbeId = gst_pad_add_buffer_probe(pad, G_CALLBACK(padBufferProbe), this);
gst_object_unref(GST_OBJECT(pad));
return true;
}
#endif
return false;
}
@@ -122,7 +145,19 @@ QRect QGstreamerVideoWindow::displayRect() const
void QGstreamerVideoWindow::setDisplayRect(const QRect &rect)
{
m_displayRect = rect;
#if GST_CHECK_VERSION(1,0,0)
if (m_videoSink && GST_IS_VIDEO_OVERLAY(m_videoSink)) {
if (m_displayRect.isEmpty())
gst_video_overlay_set_render_rectangle(GST_VIDEO_OVERLAY(m_videoSink), -1, -1, -1, -1);
else
gst_video_overlay_set_render_rectangle(GST_VIDEO_OVERLAY(m_videoSink),
m_displayRect.x(),
m_displayRect.y(),
m_displayRect.width(),
m_displayRect.height());
repaint();
}
#else
if (m_videoSink && GST_IS_X_OVERLAY(m_videoSink)) {
#if GST_VERSION_MICRO >= 29
if (m_displayRect.isEmpty())
@@ -136,6 +171,7 @@ void QGstreamerVideoWindow::setDisplayRect(const QRect &rect)
repaint();
#endif
}
#endif
}
Qt::AspectRatioMode QGstreamerVideoWindow::aspectRatioMode() const
@@ -157,6 +193,16 @@ void QGstreamerVideoWindow::setAspectRatioMode(Qt::AspectRatioMode mode)
void QGstreamerVideoWindow::repaint()
{
#if GST_CHECK_VERSION(1,0,0)
if (m_videoSink && GST_IS_VIDEO_OVERLAY(m_videoSink)) {
//don't call gst_x_overlay_expose if the sink is in null state
GstState state = GST_STATE_NULL;
GstStateChangeReturn res = gst_element_get_state(m_videoSink, &state, NULL, 1000000);
if (res != GST_STATE_CHANGE_FAILURE && state != GST_STATE_NULL) {
gst_video_overlay_expose(GST_VIDEO_OVERLAY(m_videoSink));
}
}
#else
if (m_videoSink && GST_IS_X_OVERLAY(m_videoSink)) {
//don't call gst_x_overlay_expose if the sink is in null state
GstState state = GST_STATE_NULL;
@@ -165,6 +211,7 @@ void QGstreamerVideoWindow::repaint()
gst_x_overlay_expose(GST_X_OVERLAY(m_videoSink));
}
}
#endif
}
QColor QGstreamerVideoWindow::colorKey() const
@@ -296,32 +343,22 @@ QSize QGstreamerVideoWindow::nativeSize() const
return m_nativeSize;
}
void QGstreamerVideoWindow::padBufferProbe(GstPad *pad, GstBuffer * /* buffer */, gpointer user_data)
void QGstreamerVideoWindow::probeCaps(GstCaps *caps)
{
QGstreamerVideoWindow *control = reinterpret_cast<QGstreamerVideoWindow*>(user_data);
QMetaObject::invokeMethod(control, "updateNativeVideoSize", Qt::QueuedConnection);
gst_pad_remove_buffer_probe(pad, control->m_bufferProbeId);
QSize resolution = QGstUtils::capsCorrectedResolution(caps);
QMetaObject::invokeMethod(
this,
"updateNativeVideoSize",
Qt::QueuedConnection,
Q_ARG(QSize, resolution));
}
void QGstreamerVideoWindow::updateNativeVideoSize()
void QGstreamerVideoWindow::updateNativeVideoSize(const QSize &size)
{
const QSize oldSize = m_nativeSize;
m_nativeSize = QSize();
if (m_videoSink) {
//find video native size to update video widget size hint
GstPad *pad = gst_element_get_static_pad(m_videoSink,"sink");
GstCaps *caps = gst_pad_get_negotiated_caps(pad);
gst_object_unref(GST_OBJECT(pad));
if (caps) {
m_nativeSize = QGstUtils::capsCorrectedResolution(caps);
gst_caps_unref(caps);
}
}
if (m_nativeSize != oldSize)
if (m_nativeSize != size) {
m_nativeSize = size;
emit nativeSizeChanged();
}
}
GstElement *QGstreamerVideoWindow::videoSink()

View File

@@ -40,7 +40,14 @@
#include <QtCore/qsize.h>
#include <QtCore/qset.h>
#include <QtCore/qstringlist.h>
#include <QtGui/qimage.h>
#include <qaudioformat.h>
#include <QtMultimedia/qvideosurfaceformat.h>
#include <gst/audio/audio.h>
#include <gst/video/video.h>
template<typename T, int N> static int lengthOf(const T (&)[N]) { return N; }
#ifdef USE_V4L
# include <private/qcore_unix_p.h>
@@ -82,15 +89,24 @@ static void addTagToMap(const GstTagList *list,
map->insert(QByteArray(tag), g_value_get_boolean(&val));
break;
case G_TYPE_CHAR:
#if GLIB_CHECK_VERSION(2,32,0)
map->insert(QByteArray(tag), g_value_get_schar(&val));
#else
map->insert(QByteArray(tag), g_value_get_char(&val));
#endif
break;
case G_TYPE_DOUBLE:
map->insert(QByteArray(tag), g_value_get_double(&val));
break;
default:
// GST_TYPE_DATE is a function, not a constant, so pull it out of the switch
#if GST_CHECK_VERSION(1,0,0)
if (G_VALUE_TYPE(&val) == G_TYPE_DATE) {
const GDate *date = (const GDate *)g_value_get_boxed(&val);
#else
if (G_VALUE_TYPE(&val) == GST_TYPE_DATE) {
const GDate *date = gst_value_get_date(&val);
#endif
if (g_date_valid(date)) {
int year = g_date_get_year(date);
int month = g_date_get_month(date);
@@ -169,6 +185,42 @@ QSize QGstUtils::capsCorrectedResolution(const GstCaps *caps)
return size;
}
#if GST_CHECK_VERSION(1,0,0)
namespace {
struct AudioFormat
{
GstAudioFormat format;
QAudioFormat::SampleType sampleType;
QAudioFormat::Endian byteOrder;
int sampleSize;
};
static const AudioFormat qt_audioLookup[] =
{
{ GST_AUDIO_FORMAT_S8 , QAudioFormat::SignedInt , QAudioFormat::LittleEndian, 8 },
{ GST_AUDIO_FORMAT_U8 , QAudioFormat::UnSignedInt, QAudioFormat::LittleEndian, 8 },
{ GST_AUDIO_FORMAT_S16LE, QAudioFormat::SignedInt , QAudioFormat::LittleEndian, 16 },
{ GST_AUDIO_FORMAT_S16BE, QAudioFormat::SignedInt , QAudioFormat::BigEndian , 16 },
{ GST_AUDIO_FORMAT_U16LE, QAudioFormat::UnSignedInt, QAudioFormat::LittleEndian, 16 },
{ GST_AUDIO_FORMAT_U16BE, QAudioFormat::UnSignedInt, QAudioFormat::BigEndian , 16 },
{ GST_AUDIO_FORMAT_S32LE, QAudioFormat::SignedInt , QAudioFormat::LittleEndian, 32 },
{ GST_AUDIO_FORMAT_S32BE, QAudioFormat::SignedInt , QAudioFormat::BigEndian , 32 },
{ GST_AUDIO_FORMAT_U32LE, QAudioFormat::UnSignedInt, QAudioFormat::LittleEndian, 32 },
{ GST_AUDIO_FORMAT_U32BE, QAudioFormat::UnSignedInt, QAudioFormat::BigEndian , 32 },
{ GST_AUDIO_FORMAT_S24LE, QAudioFormat::SignedInt , QAudioFormat::LittleEndian, 24 },
{ GST_AUDIO_FORMAT_S24BE, QAudioFormat::SignedInt , QAudioFormat::BigEndian , 24 },
{ GST_AUDIO_FORMAT_U24LE, QAudioFormat::UnSignedInt, QAudioFormat::LittleEndian, 24 },
{ GST_AUDIO_FORMAT_U24BE, QAudioFormat::UnSignedInt, QAudioFormat::BigEndian , 24 },
{ GST_AUDIO_FORMAT_F32LE, QAudioFormat::Float , QAudioFormat::LittleEndian, 32 },
{ GST_AUDIO_FORMAT_F32BE, QAudioFormat::Float , QAudioFormat::BigEndian , 32 },
{ GST_AUDIO_FORMAT_F64LE, QAudioFormat::Float , QAudioFormat::LittleEndian, 64 },
{ GST_AUDIO_FORMAT_F64BE, QAudioFormat::Float , QAudioFormat::BigEndian , 64 }
};
}
#endif
/*!
Returns audio format for caps.
If caps doesn't have a valid audio format, an empty QAudioFormat is returned.
@@ -176,9 +228,26 @@ QSize QGstUtils::capsCorrectedResolution(const GstCaps *caps)
QAudioFormat QGstUtils::audioFormatForCaps(const GstCaps *caps)
{
const GstStructure *structure = gst_caps_get_structure(caps, 0);
QAudioFormat format;
#if GST_CHECK_VERSION(1,0,0)
GstAudioInfo info;
if (gst_audio_info_from_caps(&info, caps)) {
for (int i = 0; i < lengthOf(qt_audioLookup); ++i) {
if (qt_audioLookup[i].format != info.finfo->format)
continue;
format.setSampleType(qt_audioLookup[i].sampleType);
format.setByteOrder(qt_audioLookup[i].byteOrder);
format.setSampleSize(qt_audioLookup[i].sampleSize);
format.setSampleRate(info.rate);
format.setChannelCount(info.channels);
format.setCodec(QStringLiteral("audio/pcm"));
return format;
}
}
#else
const GstStructure *structure = gst_caps_get_structure(caps, 0);
if (qstrcmp(gst_structure_get_name(structure), "audio/x-raw-int") == 0) {
@@ -249,16 +318,28 @@ QAudioFormat QGstUtils::audioFormatForCaps(const GstCaps *caps)
} else {
return QAudioFormat();
}
#endif
return format;
}
#if GST_CHECK_VERSION(1,0,0)
/*!
Returns audio format for a sample.
If the buffer doesn't have a valid audio format, an empty QAudioFormat is returned.
*/
QAudioFormat QGstUtils::audioFormatForSample(GstSample *sample)
{
GstCaps* caps = gst_sample_get_caps(sample);
if (!caps)
return QAudioFormat();
return QGstUtils::audioFormatForCaps(caps);
}
#else
/*!
Returns audio format for a buffer.
If the buffer doesn't have a valid audio format, an empty QAudioFormat is returned.
*/
QAudioFormat QGstUtils::audioFormatForBuffer(GstBuffer *buffer)
{
GstCaps* caps = gst_buffer_get_caps(buffer);
@@ -269,7 +350,7 @@ QAudioFormat QGstUtils::audioFormatForBuffer(GstBuffer *buffer)
gst_caps_unref(caps);
return format;
}
#endif
/*!
Builds GstCaps for an audio format.
@@ -277,8 +358,32 @@ QAudioFormat QGstUtils::audioFormatForBuffer(GstBuffer *buffer)
Caller must unref GstCaps.
*/
GstCaps *QGstUtils::capsForAudioFormat(QAudioFormat format)
GstCaps *QGstUtils::capsForAudioFormat(const QAudioFormat &format)
{
if (!format.isValid())
return 0;
#if GST_CHECK_VERSION(1,0,0)
const QAudioFormat::SampleType sampleType = format.sampleType();
const QAudioFormat::Endian byteOrder = format.byteOrder();
const int sampleSize = format.sampleSize();
for (int i = 0; i < lengthOf(qt_audioLookup); ++i) {
if (qt_audioLookup[i].sampleType != sampleType
|| qt_audioLookup[i].byteOrder != byteOrder
|| qt_audioLookup[i].sampleSize != sampleSize) {
continue;
}
return gst_caps_new_simple(
"audio/x-raw",
"format" , G_TYPE_STRING, gst_audio_format_to_string(qt_audioLookup[i].format),
"rate" , G_TYPE_INT , format.sampleRate(),
"channels", G_TYPE_INT , format.channelCount(),
NULL);
}
return 0;
#else
GstStructure *structure = 0;
if (format.isValid()) {
@@ -313,6 +418,7 @@ GstCaps *QGstUtils::capsForAudioFormat(QAudioFormat format)
}
return caps;
#endif
}
void QGstUtils::initializeGst()
@@ -576,10 +682,629 @@ QByteArray QGstUtils::cameraDriver(const QString &device, GstElementFactory *fac
return QByteArray();
}
QSet<QString> QGstUtils::supportedMimeTypes(bool (*isValidFactory)(GstElementFactory *factory))
{
QSet<QString> supportedMimeTypes;
//enumerate supported mime types
gst_init(NULL, NULL);
#if GST_CHECK_VERSION(1,0,0)
GstRegistry *registry = gst_registry_get();
GList *orig_plugins = gst_registry_get_plugin_list(registry);
#else
GstRegistry *registry = gst_registry_get_default();
GList *orig_plugins = gst_default_registry_get_plugin_list ();
#endif
for (GList *plugins = orig_plugins; plugins; plugins = g_list_next(plugins)) {
GstPlugin *plugin = (GstPlugin *) (plugins->data);
#if GST_CHECK_VERSION(1,0,0)
if (GST_OBJECT_FLAG_IS_SET(GST_OBJECT(plugin), GST_PLUGIN_FLAG_BLACKLISTED))
continue;
#else
if (plugin->flags & (1<<1)) //GST_PLUGIN_FLAG_BLACKLISTED
continue;
#endif
GList *orig_features = gst_registry_get_feature_list_by_plugin(
registry, gst_plugin_get_name(plugin));
for (GList *features = orig_features; features; features = g_list_next(features)) {
if (G_UNLIKELY(features->data == NULL))
continue;
GstPluginFeature *feature = GST_PLUGIN_FEATURE(features->data);
GstElementFactory *factory;
if (GST_IS_TYPE_FIND_FACTORY(feature)) {
QString name(gst_plugin_feature_get_name(feature));
if (name.contains('/')) //filter out any string without '/' which is obviously not a mime type
supportedMimeTypes.insert(name.toLower());
continue;
} else if (!GST_IS_ELEMENT_FACTORY (feature)
|| !(factory = GST_ELEMENT_FACTORY(gst_plugin_feature_load(feature)))) {
continue;
} else if (!isValidFactory(factory)) {
// Do nothing
} else for (const GList *pads = gst_element_factory_get_static_pad_templates(factory);
pads;
pads = g_list_next(pads)) {
GstStaticPadTemplate *padtemplate = static_cast<GstStaticPadTemplate *>(pads->data);
if (padtemplate->direction == GST_PAD_SINK && padtemplate->static_caps.string) {
GstCaps *caps = gst_static_caps_get(&padtemplate->static_caps);
if (gst_caps_is_any(caps) || gst_caps_is_empty(caps)) {
} else for (guint i = 0; i < gst_caps_get_size(caps); i++) {
GstStructure *structure = gst_caps_get_structure(caps, i);
QString nameLowcase = QString(gst_structure_get_name(structure)).toLower();
supportedMimeTypes.insert(nameLowcase);
if (nameLowcase.contains("mpeg")) {
//Because mpeg version number is only included in the detail
//description, it is necessary to manually extract this information
//in order to match the mime type of mpeg4.
const GValue *value = gst_structure_get_value(structure, "mpegversion");
if (value) {
gchar *str = gst_value_serialize(value);
QString versions(str);
QStringList elements = versions.split(QRegExp("\\D+"), QString::SkipEmptyParts);
foreach (const QString &e, elements)
supportedMimeTypes.insert(nameLowcase + e);
g_free(str);
}
}
}
}
}
gst_object_unref(factory);
}
gst_plugin_feature_list_free(orig_features);
}
gst_plugin_list_free (orig_plugins);
#if defined QT_SUPPORTEDMIMETYPES_DEBUG
QStringList list = supportedMimeTypes.toList();
list.sort();
if (qgetenv("QT_DEBUG_PLUGINS").toInt() > 0) {
foreach (const QString &type, list)
qDebug() << type;
}
#endif
return supportedMimeTypes;
}
namespace {
struct ColorFormat { QImage::Format imageFormat; GstVideoFormat gstFormat; };
static const ColorFormat qt_colorLookup[] =
{
{ QImage::Format_RGBX8888, GST_VIDEO_FORMAT_RGBx },
{ QImage::Format_RGBA8888, GST_VIDEO_FORMAT_RGBA },
{ QImage::Format_RGB888 , GST_VIDEO_FORMAT_RGB },
{ QImage::Format_RGB16 , GST_VIDEO_FORMAT_RGB16 }
};
}
#if GST_CHECK_VERSION(1,0,0)
QImage QGstUtils::bufferToImage(GstBuffer *buffer, const GstVideoInfo &videoInfo)
#else
QImage QGstUtils::bufferToImage(GstBuffer *buffer)
#endif
{
QImage img;
#if GST_CHECK_VERSION(1,0,0)
GstVideoInfo info = videoInfo;
GstVideoFrame frame;
if (!gst_video_frame_map(&frame, &info, buffer, GST_MAP_READ))
return img;
#else
GstCaps *caps = gst_buffer_get_caps(buffer);
if (!caps)
return img;
GstStructure *structure = gst_caps_get_structure (caps, 0);
gint width = 0;
gint height = 0;
if (!structure
|| !gst_structure_get_int(structure, "width", &width)
|| !gst_structure_get_int(structure, "height", &height)
|| width <= 0
|| height <= 0) {
gst_caps_unref(caps);
return img;
}
gst_caps_unref(caps);
#endif
#if GST_CHECK_VERSION(1,0,0)
if (videoInfo.finfo->format == GST_VIDEO_FORMAT_I420) {
const int width = videoInfo.width;
const int height = videoInfo.height;
const int stride[] = { frame.info.stride[0], frame.info.stride[1], frame.info.stride[2] };
const uchar *data[] = {
static_cast<const uchar *>(frame.data[0]),
static_cast<const uchar *>(frame.data[1]),
static_cast<const uchar *>(frame.data[2])
};
#else
if (qstrcmp(gst_structure_get_name(structure), "video/x-raw-yuv") == 0) {
const int stride[] = { width, width / 2, width / 2 };
const uchar *data[] = {
(const uchar *)buffer->data,
(const uchar *)buffer->data + width * height,
(const uchar *)buffer->data + width * height * 5 / 4
};
#endif
img = QImage(width/2, height/2, QImage::Format_RGB32);
for (int y=0; y<height; y+=2) {
const uchar *yLine = data[0] + (y * stride[0]);
const uchar *uLine = data[1] + (y * stride[1] / 2);
const uchar *vLine = data[2] + (y * stride[2] / 2);
for (int x=0; x<width; x+=2) {
const qreal Y = 1.164*(yLine[x]-16);
const int U = uLine[x/2]-128;
const int V = vLine[x/2]-128;
int b = qBound(0, int(Y + 2.018*U), 255);
int g = qBound(0, int(Y - 0.813*V - 0.391*U), 255);
int r = qBound(0, int(Y + 1.596*V), 255);
img.setPixel(x/2,y/2,qRgb(r,g,b));
}
}
#if GST_CHECK_VERSION(1,0,0)
} else for (int i = 0; i < lengthOf(qt_colorLookup); ++i) {
if (qt_colorLookup[i].gstFormat != videoInfo.finfo->format)
continue;
const QImage image(
static_cast<const uchar *>(frame.data[0]),
videoInfo.width,
videoInfo.height,
frame.info.stride[0],
qt_colorLookup[i].imageFormat);
img = image;
img.detach();
break;
}
gst_video_frame_unmap(&frame);
#else
} else if (qstrcmp(gst_structure_get_name(structure), "video/x-raw-rgb") == 0) {
QImage::Format format = QImage::Format_Invalid;
int bpp = 0;
gst_structure_get_int(structure, "bpp", &bpp);
if (bpp == 24)
format = QImage::Format_RGB888;
else if (bpp == 32)
format = QImage::Format_RGB32;
if (format != QImage::Format_Invalid) {
img = QImage((const uchar *)buffer->data,
width,
height,
format);
img.bits(); //detach
}
}
#endif
return img;
}
namespace {
#if GST_CHECK_VERSION(1,0,0)
struct VideoFormat
{
QVideoFrame::PixelFormat pixelFormat;
GstVideoFormat gstFormat;
};
static const VideoFormat qt_videoFormatLookup[] =
{
{ QVideoFrame::Format_YUV420P, GST_VIDEO_FORMAT_I420 },
{ QVideoFrame::Format_YV12 , GST_VIDEO_FORMAT_YV12 },
{ QVideoFrame::Format_UYVY , GST_VIDEO_FORMAT_UYVY },
{ QVideoFrame::Format_YUYV , GST_VIDEO_FORMAT_YUY2 },
{ QVideoFrame::Format_NV12 , GST_VIDEO_FORMAT_NV12 },
{ QVideoFrame::Format_NV21 , GST_VIDEO_FORMAT_NV21 },
{ QVideoFrame::Format_AYUV444, GST_VIDEO_FORMAT_AYUV },
#if Q_BYTE_ORDER == Q_LITTLE_ENDIAN
{ QVideoFrame::Format_RGB32 , GST_VIDEO_FORMAT_BGRx },
{ QVideoFrame::Format_BGR32 , GST_VIDEO_FORMAT_RGBx },
{ QVideoFrame::Format_ARGB32, GST_VIDEO_FORMAT_BGRA },
{ QVideoFrame::Format_BGRA32, GST_VIDEO_FORMAT_ARGB },
#else
{ QVideoFrame::Format_RGB32 , GST_VIDEO_FORMAT_xRGB },
{ QVideoFrame::Format_BGR32 , GST_VIDEO_FORMAT_xBGR },
{ QVideoFrame::Format_ARGB32, GST_VIDEO_FORMAT_ARGB },
{ QVideoFrame::Format_BGRA32, GST_VIDEO_FORMAT_BGRA },
#endif
{ QVideoFrame::Format_RGB24 , GST_VIDEO_FORMAT_RGB },
{ QVideoFrame::Format_BGR24 , GST_VIDEO_FORMAT_BGR },
{ QVideoFrame::Format_RGB565, GST_VIDEO_FORMAT_RGB16 }
};
static int indexOfVideoFormat(QVideoFrame::PixelFormat format)
{
for (int i = 0; i < lengthOf(qt_videoFormatLookup); ++i)
if (qt_videoFormatLookup[i].pixelFormat == format)
return i;
return -1;
}
static int indexOfVideoFormat(GstVideoFormat format)
{
for (int i = 0; i < lengthOf(qt_videoFormatLookup); ++i)
if (qt_videoFormatLookup[i].gstFormat == format)
return i;
return -1;
}
#else
struct YuvFormat
{
QVideoFrame::PixelFormat pixelFormat;
guint32 fourcc;
int bitsPerPixel;
};
static const YuvFormat qt_yuvColorLookup[] =
{
{ QVideoFrame::Format_YUV420P, GST_MAKE_FOURCC('I','4','2','0'), 8 },
{ QVideoFrame::Format_YV12, GST_MAKE_FOURCC('Y','V','1','2'), 8 },
{ QVideoFrame::Format_UYVY, GST_MAKE_FOURCC('U','Y','V','Y'), 16 },
{ QVideoFrame::Format_YUYV, GST_MAKE_FOURCC('Y','U','Y','2'), 16 },
{ QVideoFrame::Format_NV12, GST_MAKE_FOURCC('N','V','1','2'), 8 },
{ QVideoFrame::Format_NV21, GST_MAKE_FOURCC('N','V','2','1'), 8 },
{ QVideoFrame::Format_AYUV444, GST_MAKE_FOURCC('A','Y','U','V'), 32 }
};
static int indexOfYuvColor(QVideoFrame::PixelFormat format)
{
const int count = sizeof(qt_yuvColorLookup) / sizeof(YuvFormat);
for (int i = 0; i < count; ++i)
if (qt_yuvColorLookup[i].pixelFormat == format)
return i;
return -1;
}
static int indexOfYuvColor(guint32 fourcc)
{
const int count = sizeof(qt_yuvColorLookup) / sizeof(YuvFormat);
for (int i = 0; i < count; ++i)
if (qt_yuvColorLookup[i].fourcc == fourcc)
return i;
return -1;
}
struct RgbFormat
{
QVideoFrame::PixelFormat pixelFormat;
int bitsPerPixel;
int depth;
int endianness;
int red;
int green;
int blue;
int alpha;
};
static const RgbFormat qt_rgbColorLookup[] =
{
{ QVideoFrame::Format_RGB32 , 32, 24, 4321, 0x0000FF00, 0x00FF0000, int(0xFF000000), 0x00000000 },
{ QVideoFrame::Format_RGB32 , 32, 24, 1234, 0x00FF0000, 0x0000FF00, 0x000000FF, 0x00000000 },
{ QVideoFrame::Format_BGR32 , 32, 24, 4321, int(0xFF000000), 0x00FF0000, 0x0000FF00, 0x00000000 },
{ QVideoFrame::Format_BGR32 , 32, 24, 1234, 0x000000FF, 0x0000FF00, 0x00FF0000, 0x00000000 },
{ QVideoFrame::Format_ARGB32, 32, 24, 4321, 0x0000FF00, 0x00FF0000, int(0xFF000000), 0x000000FF },
{ QVideoFrame::Format_ARGB32, 32, 24, 1234, 0x00FF0000, 0x0000FF00, 0x000000FF, int(0xFF000000) },
{ QVideoFrame::Format_RGB24 , 24, 24, 4321, 0x00FF0000, 0x0000FF00, 0x000000FF, 0x00000000 },
{ QVideoFrame::Format_BGR24 , 24, 24, 4321, 0x000000FF, 0x0000FF00, 0x00FF0000, 0x00000000 },
{ QVideoFrame::Format_RGB565, 16, 16, 1234, 0x0000F800, 0x000007E0, 0x0000001F, 0x00000000 }
};
static int indexOfRgbColor(
int bits, int depth, int endianness, int red, int green, int blue, int alpha)
{
const int count = sizeof(qt_rgbColorLookup) / sizeof(RgbFormat);
for (int i = 0; i < count; ++i) {
if (qt_rgbColorLookup[i].bitsPerPixel == bits
&& qt_rgbColorLookup[i].depth == depth
&& qt_rgbColorLookup[i].endianness == endianness
&& qt_rgbColorLookup[i].red == red
&& qt_rgbColorLookup[i].green == green
&& qt_rgbColorLookup[i].blue == blue
&& qt_rgbColorLookup[i].alpha == alpha) {
return i;
}
}
return -1;
}
#endif
}
#if GST_CHECK_VERSION(1,0,0)
QVideoSurfaceFormat QGstUtils::formatForCaps(
GstCaps *caps, GstVideoInfo *info, QAbstractVideoBuffer::HandleType handleType)
{
if (gst_video_info_from_caps(info, caps)) {
int index = indexOfVideoFormat(info->finfo->format);
if (index != -1) {
QVideoSurfaceFormat format(
QSize(info->width, info->height),
qt_videoFormatLookup[index].pixelFormat,
handleType);
if (info->fps_d > 0)
format.setFrameRate(qreal(info->fps_d) / info->fps_n);
if (info->par_d > 0)
format.setPixelAspectRatio(info->par_n, info->par_d);
return format;
}
}
return QVideoSurfaceFormat();
}
#else
QVideoSurfaceFormat QGstUtils::formatForCaps(
GstCaps *caps, int *bytesPerLine, QAbstractVideoBuffer::HandleType handleType)
{
const GstStructure *structure = gst_caps_get_structure(caps, 0);
QVideoFrame::PixelFormat pixelFormat = QVideoFrame::Format_Invalid;
int bitsPerPixel = 0;
QSize size;
gst_structure_get_int(structure, "width", &size.rwidth());
gst_structure_get_int(structure, "height", &size.rheight());
if (qstrcmp(gst_structure_get_name(structure), "video/x-raw-yuv") == 0) {
guint32 fourcc = 0;
gst_structure_get_fourcc(structure, "format", &fourcc);
int index = indexOfYuvColor(fourcc);
if (index != -1) {
pixelFormat = qt_yuvColorLookup[index].pixelFormat;
bitsPerPixel = qt_yuvColorLookup[index].bitsPerPixel;
}
} else if (qstrcmp(gst_structure_get_name(structure), "video/x-raw-rgb") == 0) {
int depth = 0;
int endianness = 0;
int red = 0;
int green = 0;
int blue = 0;
int alpha = 0;
gst_structure_get_int(structure, "bpp", &bitsPerPixel);
gst_structure_get_int(structure, "depth", &depth);
gst_structure_get_int(structure, "endianness", &endianness);
gst_structure_get_int(structure, "red_mask", &red);
gst_structure_get_int(structure, "green_mask", &green);
gst_structure_get_int(structure, "blue_mask", &blue);
gst_structure_get_int(structure, "alpha_mask", &alpha);
int index = indexOfRgbColor(bitsPerPixel, depth, endianness, red, green, blue, alpha);
if (index != -1)
pixelFormat = qt_rgbColorLookup[index].pixelFormat;
}
if (pixelFormat != QVideoFrame::Format_Invalid) {
QVideoSurfaceFormat format(size, pixelFormat, handleType);
QPair<int, int> rate;
gst_structure_get_fraction(structure, "framerate", &rate.first, &rate.second);
if (rate.second)
format.setFrameRate(qreal(rate.first)/rate.second);
gint aspectNum = 0;
gint aspectDenum = 0;
if (gst_structure_get_fraction(
structure, "pixel-aspect-ratio", &aspectNum, &aspectDenum)) {
if (aspectDenum > 0)
format.setPixelAspectRatio(aspectNum, aspectDenum);
}
if (bytesPerLine)
*bytesPerLine = ((size.width() * bitsPerPixel / 8) + 3) & ~3;
return format;
}
return QVideoSurfaceFormat();
}
#endif
GstCaps *QGstUtils::capsForFormats(const QList<QVideoFrame::PixelFormat> &formats)
{
GstCaps *caps = gst_caps_new_empty();
#if GST_CHECK_VERSION(1,0,0)
foreach (QVideoFrame::PixelFormat format, formats) {
int index = indexOfVideoFormat(format);
if (index != -1) {
gst_caps_append_structure(caps, gst_structure_new(
"video/x-raw",
"format" , G_TYPE_STRING, gst_video_format_to_string(qt_videoFormatLookup[index].gstFormat),
NULL));
}
}
#else
foreach (QVideoFrame::PixelFormat format, formats) {
int index = indexOfYuvColor(format);
if (index != -1) {
gst_caps_append_structure(caps, gst_structure_new(
"video/x-raw-yuv",
"format", GST_TYPE_FOURCC, qt_yuvColorLookup[index].fourcc,
NULL));
continue;
}
const int count = sizeof(qt_rgbColorLookup) / sizeof(RgbFormat);
for (int i = 0; i < count; ++i) {
if (qt_rgbColorLookup[i].pixelFormat == format) {
GstStructure *structure = gst_structure_new(
"video/x-raw-rgb",
"bpp" , G_TYPE_INT, qt_rgbColorLookup[i].bitsPerPixel,
"depth" , G_TYPE_INT, qt_rgbColorLookup[i].depth,
"endianness", G_TYPE_INT, qt_rgbColorLookup[i].endianness,
"red_mask" , G_TYPE_INT, qt_rgbColorLookup[i].red,
"green_mask", G_TYPE_INT, qt_rgbColorLookup[i].green,
"blue_mask" , G_TYPE_INT, qt_rgbColorLookup[i].blue,
NULL);
if (qt_rgbColorLookup[i].alpha != 0) {
gst_structure_set(
structure, "alpha_mask", G_TYPE_INT, qt_rgbColorLookup[i].alpha, NULL);
}
gst_caps_append_structure(caps, structure);
}
}
}
#endif
gst_caps_set_simple(
caps,
"framerate", GST_TYPE_FRACTION_RANGE, 0, 1, INT_MAX, 1,
"width" , GST_TYPE_INT_RANGE, 1, INT_MAX,
"height" , GST_TYPE_INT_RANGE, 1, INT_MAX,
NULL);
return caps;
}
void QGstUtils::setFrameTimeStamps(QVideoFrame *frame, GstBuffer *buffer)
{
// GStreamer uses nanoseconds, Qt uses microseconds
qint64 startTime = GST_BUFFER_TIMESTAMP(buffer);
if (startTime >= 0) {
frame->setStartTime(startTime/G_GINT64_CONSTANT (1000));
qint64 duration = GST_BUFFER_DURATION(buffer);
if (duration >= 0)
frame->setEndTime((startTime + duration)/G_GINT64_CONSTANT (1000));
}
}
void QGstUtils::setMetaData(GstElement *element, const QMap<QByteArray, QVariant> &data)
{
if (!GST_IS_TAG_SETTER(element))
return;
gst_tag_setter_reset_tags(GST_TAG_SETTER(element));
QMapIterator<QByteArray, QVariant> it(data);
while (it.hasNext()) {
it.next();
const QString tagName = it.key();
const QVariant tagValue = it.value();
switch (tagValue.type()) {
case QVariant::String:
gst_tag_setter_add_tags(GST_TAG_SETTER(element),
GST_TAG_MERGE_REPLACE,
tagName.toUtf8().constData(),
tagValue.toString().toUtf8().constData(),
NULL);
break;
case QVariant::Int:
case QVariant::LongLong:
gst_tag_setter_add_tags(GST_TAG_SETTER(element),
GST_TAG_MERGE_REPLACE,
tagName.toUtf8().constData(),
tagValue.toInt(),
NULL);
break;
case QVariant::Double:
gst_tag_setter_add_tags(GST_TAG_SETTER(element),
GST_TAG_MERGE_REPLACE,
tagName.toUtf8().constData(),
tagValue.toDouble(),
NULL);
break;
case QVariant::DateTime: {
QDateTime date = tagValue.toDateTime().toLocalTime();
gst_tag_setter_add_tags(GST_TAG_SETTER(element),
GST_TAG_MERGE_REPLACE,
tagName.toUtf8().constData(),
gst_date_time_new_local_time(
date.date().year(), date.date().month(), date.date().day(),
date.time().hour(), date.time().minute(), date.time().second()),
NULL);
break;
}
default:
break;
}
}
}
void QGstUtils::setMetaData(GstBin *bin, const QMap<QByteArray, QVariant> &data)
{
GstIterator *elements = gst_bin_iterate_all_by_interface(bin, GST_TYPE_TAG_SETTER);
#if GST_CHECK_VERSION(1,0,0)
GValue item = G_VALUE_INIT;
while (gst_iterator_next(elements, &item) == GST_ITERATOR_OK) {
GstElement * const element = GST_ELEMENT(g_value_get_object(&item));
#else
GstElement *element = 0;
while (gst_iterator_next(elements, (void**)&element) == GST_ITERATOR_OK) {
#endif
setMetaData(element, data);
}
gst_iterator_free(elements);
}
GstCaps *QGstUtils::videoFilterCaps()
{
static GstStaticCaps staticCaps = GST_STATIC_CAPS(
#if GST_CHECK_VERSION(1,2,0)
"video/x-raw(ANY);"
#elif GST_CHECK_VERSION(1,0,0)
"video/x-raw;"
#else
"video/x-raw-yuv;"
"video/x-raw-rgb;"
"video/x-raw-data;"
"video/x-android-buffer;"
#endif
"image/jpeg;"
"video/x-h264");
return gst_caps_make_writable(gst_static_caps_get(&staticCaps));
}
void qt_gst_object_ref_sink(gpointer object)
{
#if (GST_VERSION_MAJOR >= 0) && (GST_VERSION_MINOR >= 10) && (GST_VERSION_MICRO >= 24)
#if GST_CHECK_VERSION(0,10,24)
gst_object_ref_sink(object);
#else
g_return_if_fail (GST_IS_OBJECT(object));
@@ -595,4 +1320,50 @@ void qt_gst_object_ref_sink(gpointer object)
#endif
}
GstCaps *qt_gst_pad_get_current_caps(GstPad *pad)
{
#if GST_CHECK_VERSION(1,0,0)
return gst_pad_get_current_caps(pad);
#else
return gst_pad_get_negotiated_caps(pad);
#endif
}
GstStructure *qt_gst_structure_new_empty(const char *name)
{
#if GST_CHECK_VERSION(1,0,0)
return gst_structure_new_empty(name);
#else
return gst_structure_new(name, NULL);
#endif
}
gboolean qt_gst_element_query_position(GstElement *element, GstFormat format, gint64 *cur)
{
#if GST_CHECK_VERSION(1,0,0)
return gst_element_query_position(element, format, cur);
#else
return gst_element_query_position(element, &format, cur);
#endif
}
gboolean qt_gst_element_query_duration(GstElement *element, GstFormat format, gint64 *cur)
{
#if GST_CHECK_VERSION(1,0,0)
return gst_element_query_duration(element, format, cur);
#else
return gst_element_query_duration(element, &format, cur);
#endif
}
QDebug operator <<(QDebug debug, GstCaps *caps)
{
if (caps) {
gchar *string = gst_caps_to_string(caps);
debug = debug << string;
g_free(string);
}
return debug;
}
QT_END_NAMESPACE

View File

@@ -35,21 +35,35 @@
QT_BEGIN_NAMESPACE
#if GST_CHECK_VERSION(1,0,0)
QGstVideoBuffer::QGstVideoBuffer(GstBuffer *buffer, const GstVideoInfo &info)
: QAbstractPlanarVideoBuffer(NoHandle)
, m_videoInfo(info)
#else
QGstVideoBuffer::QGstVideoBuffer(GstBuffer *buffer, int bytesPerLine)
: QAbstractVideoBuffer(NoHandle)
, m_buffer(buffer)
, m_bytesPerLine(bytesPerLine)
#endif
, m_buffer(buffer)
, m_mode(NotMapped)
{
gst_buffer_ref(m_buffer);
}
#if GST_CHECK_VERSION(1,0,0)
QGstVideoBuffer::QGstVideoBuffer(GstBuffer *buffer, const GstVideoInfo &info,
QGstVideoBuffer::HandleType handleType,
const QVariant &handle)
: QAbstractPlanarVideoBuffer(handleType)
, m_videoInfo(info)
#else
QGstVideoBuffer::QGstVideoBuffer(GstBuffer *buffer, int bytesPerLine,
QGstVideoBuffer::HandleType handleType,
const QVariant &handle)
: QAbstractVideoBuffer(handleType)
, m_buffer(buffer)
, m_bytesPerLine(bytesPerLine)
#endif
, m_buffer(buffer)
, m_mode(NotMapped)
, m_handle(handle)
{
@@ -58,6 +72,8 @@ QGstVideoBuffer::QGstVideoBuffer(GstBuffer *buffer, int bytesPerLine,
QGstVideoBuffer::~QGstVideoBuffer()
{
unmap();
gst_buffer_unref(m_buffer);
}
@@ -67,12 +83,49 @@ QAbstractVideoBuffer::MapMode QGstVideoBuffer::mapMode() const
return m_mode;
}
#if GST_CHECK_VERSION(1,0,0)
int QGstVideoBuffer::map(MapMode mode, int *numBytes, int bytesPerLine[4], uchar *data[4])
{
const GstMapFlags flags = GstMapFlags(((mode & ReadOnly) ? GST_MAP_READ : 0)
| ((mode & WriteOnly) ? GST_MAP_WRITE : 0));
if (mode == NotMapped || m_mode != NotMapped) {
return 0;
} else if (m_videoInfo.finfo->n_planes == 0) { // Encoded
if (gst_buffer_map(m_buffer, &m_frame.map[0], flags)) {
if (numBytes)
*numBytes = m_frame.map[0].size;
bytesPerLine[0] = -1;
data[0] = static_cast<uchar *>(m_frame.map[0].data);
m_mode = mode;
return 1;
}
} else if (gst_video_frame_map(&m_frame, &m_videoInfo, m_buffer, flags)) {
if (numBytes)
*numBytes = m_frame.info.size;
for (guint i = 0; i < m_frame.info.finfo->n_planes; ++i) {
bytesPerLine[i] = m_frame.info.stride[i];
data[i] = static_cast<uchar *>(m_frame.data[i]);
}
m_mode = mode;
return m_frame.info.finfo->n_planes;
}
return 0;
}
#else
uchar *QGstVideoBuffer::map(MapMode mode, int *numBytes, int *bytesPerLine)
{
if (mode != NotMapped && m_mode == NotMapped) {
if (numBytes)
*numBytes = m_buffer->size;
if (bytesPerLine)
*bytesPerLine = m_bytesPerLine;
@@ -83,8 +136,19 @@ uchar *QGstVideoBuffer::map(MapMode mode, int *numBytes, int *bytesPerLine)
return 0;
}
}
#endif
void QGstVideoBuffer::unmap()
{
#if GST_CHECK_VERSION(1,0,0)
if (m_mode != NotMapped) {
if (m_videoInfo.finfo->n_planes == 0)
gst_buffer_unmap(m_buffer, &m_frame.map[0]);
else
gst_video_frame_unmap(&m_frame);
}
#endif
m_mode = NotMapped;
}

View File

@@ -0,0 +1,53 @@
/****************************************************************************
**
** Copyright (C) 2014 Jolla Ltd.
** Contact: http://www.qt-project.org/legal
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and Digia. For licensing terms and
** conditions see http://qt.digia.com/licensing. For further information
** use the contact form at http://qt.digia.com/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 as published by the Free Software
** Foundation and appearing in the file LICENSE.LGPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU Lesser General Public License version 2.1 requirements
** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Digia gives you certain additional
** rights. These rights are described in the Digia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** GNU General Public License Usage
** Alternatively, this file may be used under the terms of the GNU
** General Public License version 3.0 as published by the Free Software
** Foundation and appearing in the file LICENSE.GPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU General Public License version 3.0 requirements will be
** met: http://www.gnu.org/copyleft/gpl.html.
**
**
** $QT_END_LICENSE$
**
****************************************************************************/
#include "qgstvideorendererplugin_p.h"
QT_BEGIN_NAMESPACE
QGstVideoRendererPlugin::QGstVideoRendererPlugin(QObject *parent) :
QObject(parent)
{
}
QT_END_NAMESPACE
#include "moc_qgstvideorendererplugin_p.cpp"

View File

@@ -0,0 +1,605 @@
/****************************************************************************
**
** Copyright (C) 2014 Jolla Ltd.
** Contact: http://www.qt-project.org/legal
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and Digia. For licensing terms and
** conditions see http://qt.digia.com/licensing. For further information
** use the contact form at http://qt.digia.com/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 as published by the Free Software
** Foundation and appearing in the file LICENSE.LGPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU Lesser General Public License version 2.1 requirements
** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Digia gives you certain additional
** rights. These rights are described in the Digia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** GNU General Public License Usage
** Alternatively, this file may be used under the terms of the GNU
** General Public License version 3.0 as published by the Free Software
** Foundation and appearing in the file LICENSE.GPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU General Public License version 3.0 requirements will be
** met: http://www.gnu.org/copyleft/gpl.html.
**
**
** $QT_END_LICENSE$
**
****************************************************************************/
#include <qabstractvideosurface.h>
#include <qvideoframe.h>
#include <QDebug>
#include <QMap>
#include <QThread>
#include <QEvent>
#include <QCoreApplication>
#include <private/qmediapluginloader_p.h>
#include "qgstvideobuffer_p.h"
#include "qgstvideorenderersink_p.h"
#include <gst/video/video.h>
#include "qgstutils_p.h"
//#define DEBUG_VIDEO_SURFACE_SINK
QT_BEGIN_NAMESPACE
QGstDefaultVideoRenderer::QGstDefaultVideoRenderer()
: m_flushed(true)
{
}
QGstDefaultVideoRenderer::~QGstDefaultVideoRenderer()
{
}
GstCaps *QGstDefaultVideoRenderer::getCaps(QAbstractVideoSurface *surface)
{
return QGstUtils::capsForFormats(surface->supportedPixelFormats());
}
bool QGstDefaultVideoRenderer::start(QAbstractVideoSurface *surface, GstCaps *caps)
{
m_flushed = true;
m_format = QGstUtils::formatForCaps(caps, &m_videoInfo);
return m_format.isValid() && surface->start(m_format);
}
void QGstDefaultVideoRenderer::stop(QAbstractVideoSurface *surface)
{
m_flushed = true;
if (surface)
surface->stop();
}
bool QGstDefaultVideoRenderer::present(QAbstractVideoSurface *surface, GstBuffer *buffer)
{
m_flushed = false;
QVideoFrame frame(
new QGstVideoBuffer(buffer, m_videoInfo),
m_format.frameSize(),
m_format.pixelFormat());
QGstUtils::setFrameTimeStamps(&frame, buffer);
return surface->present(frame);
}
void QGstDefaultVideoRenderer::flush(QAbstractVideoSurface *surface)
{
if (surface && !m_flushed)
surface->present(QVideoFrame());
m_flushed = true;
}
bool QGstDefaultVideoRenderer::proposeAllocation(GstQuery *)
{
return true;
}
Q_GLOBAL_STATIC_WITH_ARGS(QMediaPluginLoader, rendererLoader,
(QGstVideoRendererInterface_iid, QLatin1String("video/gstvideorenderer"), Qt::CaseInsensitive))
QVideoSurfaceGstDelegate::QVideoSurfaceGstDelegate(QAbstractVideoSurface *surface)
: m_surface(surface)
, m_renderer(0)
, m_activeRenderer(0)
, m_surfaceCaps(0)
, m_startCaps(0)
, m_lastBuffer(0)
, m_notified(false)
, m_stop(false)
, m_render(false)
, m_flush(false)
{
foreach (QObject *instance, rendererLoader()->instances(QGstVideoRendererPluginKey)) {
QGstVideoRendererInterface* plugin = qobject_cast<QGstVideoRendererInterface*>(instance);
if (QGstVideoRenderer *renderer = plugin ? plugin->createRenderer() : 0)
m_renderers.append(renderer);
}
m_renderers.append(new QGstDefaultVideoRenderer);
updateSupportedFormats();
connect(m_surface, SIGNAL(supportedFormatsChanged()), this, SLOT(updateSupportedFormats()));
}
QVideoSurfaceGstDelegate::~QVideoSurfaceGstDelegate()
{
qDeleteAll(m_renderers);
if (m_surfaceCaps)
gst_caps_unref(m_surfaceCaps);
}
GstCaps *QVideoSurfaceGstDelegate::caps()
{
QMutexLocker locker(&m_mutex);
gst_caps_ref(m_surfaceCaps);
return m_surfaceCaps;
}
bool QVideoSurfaceGstDelegate::start(GstCaps *caps)
{
QMutexLocker locker(&m_mutex);
if (m_activeRenderer) {
m_flush = true;
m_stop = true;
}
m_render = false;
if (m_lastBuffer) {
gst_buffer_unref(m_lastBuffer);
m_lastBuffer = 0;
}
if (m_startCaps)
gst_caps_unref(m_startCaps);
m_startCaps = caps;
gst_caps_ref(m_startCaps);
/*
Waiting for start() to be invoked in the main thread may block
if gstreamer blocks the main thread until this call is finished.
This situation is rare and usually caused by setState(Null)
while pipeline is being prerolled.
The proper solution to this involves controlling gstreamer pipeline from
other thread than video surface.
Currently start() fails if wait() timed out.
*/
if (!waitForAsyncEvent(&locker, &m_setupCondition, 1000) && m_startCaps) {
qWarning() << "Failed to start video surface due to main thread blocked.";
gst_caps_unref(m_startCaps);
m_startCaps = 0;
}
return m_activeRenderer != 0;
}
void QVideoSurfaceGstDelegate::stop()
{
QMutexLocker locker(&m_mutex);
if (!m_activeRenderer)
return;
m_flush = true;
m_stop = true;
if (m_startCaps) {
gst_caps_unref(m_startCaps);
m_startCaps = 0;
}
if (m_lastBuffer) {
gst_buffer_unref(m_lastBuffer);
m_lastBuffer = 0;
}
waitForAsyncEvent(&locker, &m_setupCondition, 500);
}
bool QVideoSurfaceGstDelegate::proposeAllocation(GstQuery *query)
{
QMutexLocker locker(&m_mutex);
if (QGstVideoRenderer *pool = m_activeRenderer) {
locker.unlock();
return pool->proposeAllocation(query);
} else {
return false;
}
}
void QVideoSurfaceGstDelegate::flush()
{
QMutexLocker locker(&m_mutex);
m_flush = true;
m_render = false;
if (m_lastBuffer) {
gst_buffer_unref(m_lastBuffer);
m_lastBuffer = 0;
}
notify();
}
GstFlowReturn QVideoSurfaceGstDelegate::render(GstBuffer *buffer, bool show)
{
QMutexLocker locker(&m_mutex);
if (m_lastBuffer)
gst_buffer_unref(m_lastBuffer);
m_lastBuffer = buffer;
gst_buffer_ref(m_lastBuffer);
if (show) {
m_render = true;
return waitForAsyncEvent(&locker, &m_renderCondition, 300)
? m_renderReturn
: GST_FLOW_ERROR;
} else {
return GST_FLOW_OK;
}
}
void QVideoSurfaceGstDelegate::handleShowPrerollChange(GObject *object, GParamSpec *, gpointer d)
{
QVideoSurfaceGstDelegate * const delegate = static_cast<QVideoSurfaceGstDelegate *>(d);
gboolean showPreroll = true; // "show-preroll-frame" property is true by default
g_object_get(object, "show-preroll-frame", &showPreroll, NULL);
GstState state = GST_STATE_NULL;
GstState pendingState = GST_STATE_NULL;
gst_element_get_state(GST_ELEMENT(object), &state, &pendingState, 0);
const bool paused
= (pendingState == GST_STATE_VOID_PENDING && state == GST_STATE_PAUSED)
|| pendingState == GST_STATE_PAUSED;
if (paused) {
QMutexLocker locker(&delegate->m_mutex);
if (!showPreroll && delegate->m_lastBuffer) {
delegate->m_render = false;
delegate->m_flush = true;
delegate->notify();
} else if (delegate->m_lastBuffer) {
delegate->m_render = true;
delegate->notify();
}
}
}
bool QVideoSurfaceGstDelegate::event(QEvent *event)
{
if (event->type() == QEvent::UpdateRequest) {
QMutexLocker locker(&m_mutex);
if (m_notified) {
while (handleEvent(&locker)) {}
m_notified = false;
}
return true;
} else {
return QObject::event(event);
}
}
bool QVideoSurfaceGstDelegate::handleEvent(QMutexLocker *locker)
{
if (m_flush) {
m_flush = false;
if (m_activeRenderer) {
locker->unlock();
m_activeRenderer->flush(m_surface);
}
} else if (m_stop) {
m_stop = false;
if (QGstVideoRenderer * const activePool = m_activeRenderer) {
m_activeRenderer = 0;
locker->unlock();
activePool->stop(m_surface);
locker->relock();
}
} else if (m_startCaps) {
Q_ASSERT(!m_activeRenderer);
GstCaps * const startCaps = m_startCaps;
m_startCaps = 0;
if (m_renderer && m_surface) {
locker->unlock();
const bool started = m_renderer->start(m_surface, startCaps);
locker->relock();
m_activeRenderer = started
? m_renderer
: 0;
} else if (QGstVideoRenderer * const activePool = m_activeRenderer) {
m_activeRenderer = 0;
locker->unlock();
activePool->stop(m_surface);
locker->relock();
}
gst_caps_unref(startCaps);
} else if (m_render) {
m_render = false;
if (m_activeRenderer && m_surface && m_lastBuffer) {
GstBuffer *buffer = m_lastBuffer;
gst_buffer_ref(buffer);
locker->unlock();
const bool rendered = m_activeRenderer->present(m_surface, buffer);
gst_buffer_unref(buffer);
locker->relock();
m_renderReturn = rendered
? GST_FLOW_OK
: GST_FLOW_ERROR;
m_renderCondition.wakeAll();
} else {
m_renderReturn = GST_FLOW_ERROR;
m_renderCondition.wakeAll();
}
} else {
m_setupCondition.wakeAll();
return false;
}
return true;
}
void QVideoSurfaceGstDelegate::notify()
{
if (!m_notified) {
m_notified = true;
QCoreApplication::postEvent(this, new QEvent(QEvent::UpdateRequest));
}
}
bool QVideoSurfaceGstDelegate::waitForAsyncEvent(
QMutexLocker *locker, QWaitCondition *condition, unsigned long time)
{
if (QThread::currentThread() == thread()) {
while (handleEvent(locker)) {}
m_notified = false;
return true;
} else {
notify();
return condition->wait(&m_mutex, time);
}
}
void QVideoSurfaceGstDelegate::updateSupportedFormats()
{
if (m_surfaceCaps) {
gst_caps_unref(m_surfaceCaps);
m_surfaceCaps = 0;
}
foreach (QGstVideoRenderer *pool, m_renderers) {
if (GstCaps *caps = pool->getCaps(m_surface)) {
if (gst_caps_is_empty(caps)) {
gst_caps_unref(caps);
continue;
}
if (m_surfaceCaps)
gst_caps_unref(m_surfaceCaps);
m_renderer = pool;
m_surfaceCaps = caps;
break;
} else {
gst_caps_unref(caps);
}
}
}
static GstVideoSinkClass *sink_parent_class;
#define VO_SINK(s) QGstVideoRendererSink *sink(reinterpret_cast<QGstVideoRendererSink *>(s))
QGstVideoRendererSink *QGstVideoRendererSink::createSink(QAbstractVideoSurface *surface)
{
QGstVideoRendererSink *sink = reinterpret_cast<QGstVideoRendererSink *>(
g_object_new(QGstVideoRendererSink::get_type(), 0));
sink->delegate = new QVideoSurfaceGstDelegate(surface);
g_signal_connect(
G_OBJECT(sink),
"notify::show-preroll-frame",
G_CALLBACK(QVideoSurfaceGstDelegate::handleShowPrerollChange),
sink->delegate);
return sink;
}
GType QGstVideoRendererSink::get_type()
{
static GType type = 0;
if (type == 0) {
static const GTypeInfo info =
{
sizeof(QGstVideoRendererSinkClass), // class_size
base_init, // base_init
NULL, // base_finalize
class_init, // class_init
NULL, // class_finalize
NULL, // class_data
sizeof(QGstVideoRendererSink), // instance_size
0, // n_preallocs
instance_init, // instance_init
0 // value_table
};
type = g_type_register_static(
GST_TYPE_VIDEO_SINK, "QGstVideoRendererSink", &info, GTypeFlags(0));
}
return type;
}
void QGstVideoRendererSink::class_init(gpointer g_class, gpointer class_data)
{
Q_UNUSED(class_data);
sink_parent_class = reinterpret_cast<GstVideoSinkClass *>(g_type_class_peek_parent(g_class));
GstBaseSinkClass *base_sink_class = reinterpret_cast<GstBaseSinkClass *>(g_class);
base_sink_class->get_caps = QGstVideoRendererSink::get_caps;
base_sink_class->set_caps = QGstVideoRendererSink::set_caps;
base_sink_class->propose_allocation = QGstVideoRendererSink::propose_allocation;
base_sink_class->preroll = QGstVideoRendererSink::preroll;
base_sink_class->render = QGstVideoRendererSink::render;
GstElementClass *element_class = reinterpret_cast<GstElementClass *>(g_class);
element_class->change_state = QGstVideoRendererSink::change_state;
GObjectClass *object_class = reinterpret_cast<GObjectClass *>(g_class);
object_class->finalize = QGstVideoRendererSink::finalize;
}
void QGstVideoRendererSink::base_init(gpointer g_class)
{
static GstStaticPadTemplate sink_pad_template = GST_STATIC_PAD_TEMPLATE(
"sink", GST_PAD_SINK, GST_PAD_ALWAYS, GST_STATIC_CAPS(
"video/x-raw, "
"framerate = (fraction) [ 0, MAX ], "
"width = (int) [ 1, MAX ], "
"height = (int) [ 1, MAX ]"));
gst_element_class_add_pad_template(
GST_ELEMENT_CLASS(g_class), gst_static_pad_template_get(&sink_pad_template));
}
void QGstVideoRendererSink::instance_init(GTypeInstance *instance, gpointer g_class)
{
VO_SINK(instance);
Q_UNUSED(g_class);
sink->delegate = 0;
}
void QGstVideoRendererSink::finalize(GObject *object)
{
VO_SINK(object);
delete sink->delegate;
// Chain up
G_OBJECT_CLASS(sink_parent_class)->finalize(object);
}
GstStateChangeReturn QGstVideoRendererSink::change_state(
GstElement *element, GstStateChange transition)
{
Q_UNUSED(element);
return GST_ELEMENT_CLASS(sink_parent_class)->change_state(
element, transition);
}
GstCaps *QGstVideoRendererSink::get_caps(GstBaseSink *base, GstCaps *filter)
{
VO_SINK(base);
GstCaps *caps = sink->delegate->caps();
GstCaps *unfiltered = caps;
if (filter) {
caps = gst_caps_intersect(unfiltered, filter);
gst_caps_unref(unfiltered);
}
return caps;
}
gboolean QGstVideoRendererSink::set_caps(GstBaseSink *base, GstCaps *caps)
{
VO_SINK(base);
#ifdef DEBUG_VIDEO_SURFACE_SINK
qDebug() << "set_caps:";
qDebug() << caps;
#endif
if (!caps) {
sink->delegate->stop();
return TRUE;
} else if (sink->delegate->start(caps)) {
return TRUE;
} else {
return FALSE;
}
}
gboolean QGstVideoRendererSink::propose_allocation(GstBaseSink *base, GstQuery *query)
{
VO_SINK(base);
return sink->delegate->proposeAllocation(query);
}
GstFlowReturn QGstVideoRendererSink::preroll(GstBaseSink *base, GstBuffer *buffer)
{
VO_SINK(base);
gboolean showPreroll = true; // "show-preroll-frame" property is true by default
g_object_get(G_OBJECT(base), "show-preroll-frame", &showPreroll, NULL);
return sink->delegate->render(buffer, showPreroll); // display frame
}
GstFlowReturn QGstVideoRendererSink::render(GstBaseSink *base, GstBuffer *buffer)
{
VO_SINK(base);
return sink->delegate->render(buffer, true);
}
QT_END_NAMESPACE

View File

@@ -41,8 +41,13 @@
#include <private/qmediapluginloader_p.h>
#include "qgstvideobuffer_p.h"
#include "qgstutils_p.h"
#include "qvideosurfacegstsink_p.h"
#if GST_VERSION_MAJOR >=1
#include <gst/video/video.h>
#endif
//#define DEBUG_VIDEO_SURFACE_SINK
QT_BEGIN_NAMESPACE
@@ -62,10 +67,12 @@ QVideoSurfaceGstDelegate::QVideoSurfaceGstDelegate(
if (m_surface) {
foreach (QObject *instance, bufferPoolLoader()->instances(QGstBufferPoolPluginKey)) {
QGstBufferPoolInterface* plugin = qobject_cast<QGstBufferPoolInterface*>(instance);
if (plugin) {
m_pools.append(plugin);
}
}
updateSupportedFormats();
connect(m_surface, SIGNAL(supportedFormatsChanged()), this, SLOT(updateSupportedFormats()));
}
@@ -191,13 +198,15 @@ GstFlowReturn QVideoSurfaceGstDelegate::render(GstBuffer *buffer)
m_format.frameSize(),
m_format.pixelFormat());
QVideoSurfaceGstSink::setFrameTimeStamps(&m_frame, buffer);
QGstUtils::setFrameTimeStamps(&m_frame, buffer);
m_renderReturn = GST_FLOW_OK;
if (QThread::currentThread() == thread()) {
if (!m_surface.isNull())
m_surface->present(m_frame);
else
qWarning() << "m_surface.isNull().";
} else {
QMetaObject::invokeMethod(this, "queuedRender", Qt::QueuedConnection);
m_renderCondition.wait(&m_mutex, 300);
@@ -283,90 +292,6 @@ void QVideoSurfaceGstDelegate::updateSupportedFormats()
}
}
struct YuvFormat
{
QVideoFrame::PixelFormat pixelFormat;
guint32 fourcc;
int bitsPerPixel;
};
static const YuvFormat qt_yuvColorLookup[] =
{
{ QVideoFrame::Format_YUV420P, GST_MAKE_FOURCC('I','4','2','0'), 8 },
{ QVideoFrame::Format_YV12, GST_MAKE_FOURCC('Y','V','1','2'), 8 },
{ QVideoFrame::Format_UYVY, GST_MAKE_FOURCC('U','Y','V','Y'), 16 },
{ QVideoFrame::Format_YUYV, GST_MAKE_FOURCC('Y','U','Y','2'), 16 },
{ QVideoFrame::Format_NV12, GST_MAKE_FOURCC('N','V','1','2'), 8 },
{ QVideoFrame::Format_NV21, GST_MAKE_FOURCC('N','V','2','1'), 8 },
{ QVideoFrame::Format_AYUV444, GST_MAKE_FOURCC('A','Y','U','V'), 32 }
};
static int indexOfYuvColor(QVideoFrame::PixelFormat format)
{
const int count = sizeof(qt_yuvColorLookup) / sizeof(YuvFormat);
for (int i = 0; i < count; ++i)
if (qt_yuvColorLookup[i].pixelFormat == format)
return i;
return -1;
}
static int indexOfYuvColor(guint32 fourcc)
{
const int count = sizeof(qt_yuvColorLookup) / sizeof(YuvFormat);
for (int i = 0; i < count; ++i)
if (qt_yuvColorLookup[i].fourcc == fourcc)
return i;
return -1;
}
struct RgbFormat
{
QVideoFrame::PixelFormat pixelFormat;
int bitsPerPixel;
int depth;
int endianness;
int red;
int green;
int blue;
int alpha;
};
static const RgbFormat qt_rgbColorLookup[] =
{
{ QVideoFrame::Format_RGB32 , 32, 24, 4321, 0x0000FF00, 0x00FF0000, int(0xFF000000), 0x00000000 },
{ QVideoFrame::Format_RGB32 , 32, 24, 1234, 0x00FF0000, 0x0000FF00, 0x000000FF, 0x00000000 },
{ QVideoFrame::Format_BGR32 , 32, 24, 4321, int(0xFF000000), 0x00FF0000, 0x0000FF00, 0x00000000 },
{ QVideoFrame::Format_BGR32 , 32, 24, 1234, 0x000000FF, 0x0000FF00, 0x00FF0000, 0x00000000 },
{ QVideoFrame::Format_ARGB32, 32, 24, 4321, 0x0000FF00, 0x00FF0000, int(0xFF000000), 0x000000FF },
{ QVideoFrame::Format_ARGB32, 32, 24, 1234, 0x00FF0000, 0x0000FF00, 0x000000FF, int(0xFF000000) },
{ QVideoFrame::Format_RGB24 , 24, 24, 4321, 0x00FF0000, 0x0000FF00, 0x000000FF, 0x00000000 },
{ QVideoFrame::Format_BGR24 , 24, 24, 4321, 0x000000FF, 0x0000FF00, 0x00FF0000, 0x00000000 },
{ QVideoFrame::Format_RGB565, 16, 16, 1234, 0x0000F800, 0x000007E0, 0x0000001F, 0x00000000 }
};
static int indexOfRgbColor(
int bits, int depth, int endianness, int red, int green, int blue, int alpha)
{
const int count = sizeof(qt_rgbColorLookup) / sizeof(RgbFormat);
for (int i = 0; i < count; ++i) {
if (qt_rgbColorLookup[i].bitsPerPixel == bits
&& qt_rgbColorLookup[i].depth == depth
&& qt_rgbColorLookup[i].endianness == endianness
&& qt_rgbColorLookup[i].red == red
&& qt_rgbColorLookup[i].green == green
&& qt_rgbColorLookup[i].blue == blue
&& qt_rgbColorLookup[i].alpha == alpha) {
return i;
}
}
return -1;
}
static GstVideoSinkClass *sink_parent_class;
#define VO_SINK(s) QVideoSurfaceGstSink *sink(reinterpret_cast<QVideoSurfaceGstSink *>(s))
@@ -494,8 +419,6 @@ GstCaps *QVideoSurfaceGstSink::get_caps(GstBaseSink *base)
{
VO_SINK(base);
GstCaps *caps = gst_caps_new_empty();
// Find the supported pixel formats
// with buffer pool specific formats listed first
QList<QVideoFrame::PixelFormat> supportedFormats;
@@ -503,6 +426,7 @@ GstCaps *QVideoSurfaceGstSink::get_caps(GstBaseSink *base)
QList<QVideoFrame::PixelFormat> poolHandleFormats;
sink->delegate->poolMutex()->lock();
QGstBufferPoolInterface *pool = sink->delegate->pool();
if (pool)
poolHandleFormats = sink->delegate->supportedPixelFormats(pool->handleType());
sink->delegate->poolMutex()->unlock();
@@ -513,47 +437,7 @@ GstCaps *QVideoSurfaceGstSink::get_caps(GstBaseSink *base)
supportedFormats.append(format);
}
foreach (QVideoFrame::PixelFormat format, supportedFormats) {
int index = indexOfYuvColor(format);
if (index != -1) {
gst_caps_append_structure(caps, gst_structure_new(
"video/x-raw-yuv",
"framerate", GST_TYPE_FRACTION_RANGE, 0, 1, INT_MAX, 1,
"width" , GST_TYPE_INT_RANGE, 1, INT_MAX,
"height" , GST_TYPE_INT_RANGE, 1, INT_MAX,
"format" , GST_TYPE_FOURCC, qt_yuvColorLookup[index].fourcc,
NULL));
continue;
}
const int count = sizeof(qt_rgbColorLookup) / sizeof(RgbFormat);
for (int i = 0; i < count; ++i) {
if (qt_rgbColorLookup[i].pixelFormat == format) {
GstStructure *structure = gst_structure_new(
"video/x-raw-rgb",
"framerate" , GST_TYPE_FRACTION_RANGE, 0, 1, INT_MAX, 1,
"width" , GST_TYPE_INT_RANGE, 1, INT_MAX,
"height" , GST_TYPE_INT_RANGE, 1, INT_MAX,
"bpp" , G_TYPE_INT, qt_rgbColorLookup[i].bitsPerPixel,
"depth" , G_TYPE_INT, qt_rgbColorLookup[i].depth,
"endianness", G_TYPE_INT, qt_rgbColorLookup[i].endianness,
"red_mask" , G_TYPE_INT, qt_rgbColorLookup[i].red,
"green_mask", G_TYPE_INT, qt_rgbColorLookup[i].green,
"blue_mask" , G_TYPE_INT, qt_rgbColorLookup[i].blue,
NULL);
if (qt_rgbColorLookup[i].alpha != 0) {
gst_structure_set(
structure, "alpha_mask", G_TYPE_INT, qt_rgbColorLookup[i].alpha, NULL);
}
gst_caps_append_structure(caps, structure);
}
}
}
return caps;
return QGstUtils::capsForFormats(supportedFormats);
}
gboolean QVideoSurfaceGstSink::set_caps(GstBaseSink *base, GstCaps *caps)
@@ -575,7 +459,7 @@ gboolean QVideoSurfaceGstSink::set_caps(GstBaseSink *base, GstCaps *caps)
QAbstractVideoBuffer::HandleType handleType =
pool ? pool->handleType() : QAbstractVideoBuffer::NoHandle;
QVideoSurfaceFormat format = formatForCaps(caps, &bytesPerLine, handleType);
QVideoSurfaceFormat format = QGstUtils::formatForCaps(caps, &bytesPerLine, handleType);
if (sink->delegate->isActive()) {
QVideoSurfaceFormat surfaceFormst = sink->delegate->surfaceFormat();
@@ -592,7 +476,7 @@ gboolean QVideoSurfaceGstSink::set_caps(GstBaseSink *base, GstCaps *caps)
sink->lastRequestedCaps = 0;
#ifdef DEBUG_VIDEO_SURFACE_SINK
qDebug() << "Staring video surface, format:";
qDebug() << "Starting video surface, format:";
qDebug() << format;
qDebug() << "bytesPerLine:" << bytesPerLine;
#endif
@@ -606,87 +490,6 @@ gboolean QVideoSurfaceGstSink::set_caps(GstBaseSink *base, GstCaps *caps)
return FALSE;
}
QVideoSurfaceFormat QVideoSurfaceGstSink::formatForCaps(GstCaps *caps, int *bytesPerLine, QAbstractVideoBuffer::HandleType handleType)
{
const GstStructure *structure = gst_caps_get_structure(caps, 0);
QVideoFrame::PixelFormat pixelFormat = QVideoFrame::Format_Invalid;
int bitsPerPixel = 0;
QSize size;
gst_structure_get_int(structure, "width", &size.rwidth());
gst_structure_get_int(structure, "height", &size.rheight());
if (qstrcmp(gst_structure_get_name(structure), "video/x-raw-yuv") == 0) {
guint32 fourcc = 0;
gst_structure_get_fourcc(structure, "format", &fourcc);
int index = indexOfYuvColor(fourcc);
if (index != -1) {
pixelFormat = qt_yuvColorLookup[index].pixelFormat;
bitsPerPixel = qt_yuvColorLookup[index].bitsPerPixel;
}
} else if (qstrcmp(gst_structure_get_name(structure), "video/x-raw-rgb") == 0) {
int depth = 0;
int endianness = 0;
int red = 0;
int green = 0;
int blue = 0;
int alpha = 0;
gst_structure_get_int(structure, "bpp", &bitsPerPixel);
gst_structure_get_int(structure, "depth", &depth);
gst_structure_get_int(structure, "endianness", &endianness);
gst_structure_get_int(structure, "red_mask", &red);
gst_structure_get_int(structure, "green_mask", &green);
gst_structure_get_int(structure, "blue_mask", &blue);
gst_structure_get_int(structure, "alpha_mask", &alpha);
int index = indexOfRgbColor(bitsPerPixel, depth, endianness, red, green, blue, alpha);
if (index != -1)
pixelFormat = qt_rgbColorLookup[index].pixelFormat;
}
if (pixelFormat != QVideoFrame::Format_Invalid) {
QVideoSurfaceFormat format(size, pixelFormat, handleType);
QPair<int, int> rate;
gst_structure_get_fraction(structure, "framerate", &rate.first, &rate.second);
if (rate.second)
format.setFrameRate(qreal(rate.first)/rate.second);
gint aspectNum = 0;
gint aspectDenum = 0;
if (gst_structure_get_fraction(
structure, "pixel-aspect-ratio", &aspectNum, &aspectDenum)) {
if (aspectDenum > 0)
format.setPixelAspectRatio(aspectNum, aspectDenum);
}
if (bytesPerLine)
*bytesPerLine = ((size.width() * bitsPerPixel / 8) + 3) & ~3;
return format;
}
return QVideoSurfaceFormat();
}
void QVideoSurfaceGstSink::setFrameTimeStamps(QVideoFrame *frame, GstBuffer *buffer)
{
// GStreamer uses nanoseconds, Qt uses microseconds
qint64 startTime = GST_BUFFER_TIMESTAMP(buffer);
if (startTime >= 0) {
frame->setStartTime(startTime/G_GINT64_CONSTANT (1000));
qint64 duration = GST_BUFFER_DURATION(buffer);
if (duration >= 0)
frame->setEndTime((startTime + duration)/G_GINT64_CONSTANT (1000));
}
}
GstFlowReturn QVideoSurfaceGstSink::buffer_alloc(
GstBaseSink *base, guint64 offset, guint size, GstCaps *caps, GstBuffer **buffer)
{
@@ -731,7 +534,7 @@ GstFlowReturn QVideoSurfaceGstSink::buffer_alloc(
if (sink->delegate->isActive()) {
//if format was changed, restart the surface
QVideoSurfaceFormat format = formatForCaps(intersection);
QVideoSurfaceFormat format = QGstUtils::formatForCaps(intersection);
QVideoSurfaceFormat surfaceFormat = sink->delegate->surfaceFormat();
if (format.pixelFormat() != surfaceFormat.pixelFormat() ||
@@ -749,7 +552,7 @@ GstFlowReturn QVideoSurfaceGstSink::buffer_alloc(
QAbstractVideoBuffer::HandleType handleType =
pool ? pool->handleType() : QAbstractVideoBuffer::NoHandle;
QVideoSurfaceFormat format = formatForCaps(intersection, &bytesPerLine, handleType);
QVideoSurfaceFormat format = QGstUtils::formatForCaps(intersection, &bytesPerLine, handleType);
if (!sink->delegate->start(format, bytesPerLine)) {
qWarning() << "failed to start video surface";
@@ -763,7 +566,7 @@ GstFlowReturn QVideoSurfaceGstSink::buffer_alloc(
QVideoSurfaceFormat surfaceFormat = sink->delegate->surfaceFormat();
if (!pool->isFormatSupported(surfaceFormat)) {
//qDebug() << "sink doesn't support native pool format, skip custom buffers allocation";
qDebug() << "sink doesn't support native pool format, skip custom buffers allocation";
return GST_FLOW_OK;
}
@@ -787,7 +590,6 @@ GstFlowReturn QVideoSurfaceGstSink::buffer_alloc(
gboolean QVideoSurfaceGstSink::start(GstBaseSink *base)
{
Q_UNUSED(base);
return TRUE;
}