GStreamer: port to 1.0.

0.10 is still used by default.
To enable GStreamer 1.0, pass GST_VERSION=1.0 to qmake
for qtmultimedia.pro.

Contributions from:
Andrew den Exter <andrew.den.exter@qinetic.com.au>
Ilya Smelykh <ilya@videoexpertsgroup.com>
Jim Hodapp <jim.hodapp@canonical.com>
Sergio Schvezov <sergio.schvezov@canonical.com>

Change-Id: I72a46d1170a8794a149bdb5e20767afcc5b7587c
Reviewed-by: Andrew den Exter <andrew.den.exter@qinetic.com.au>
This commit is contained in:
Yoann Lopes
2014-11-20 17:54:18 +01:00
committed by Andrew den Exter
parent 7e3d69668e
commit 108dda7a90
71 changed files with 3669 additions and 1382 deletions

View File

@@ -3,11 +3,10 @@ SOURCES += main.cpp
CONFIG += link_pkgconfig CONFIG += link_pkgconfig
PKGCONFIG += \ PKGCONFIG += \
gstreamer-0.10 \ gstreamer-$$GST_VERSION \
gstreamer-base-0.10 \ gstreamer-base-$$GST_VERSION \
gstreamer-interfaces-0.10 \ gstreamer-audio-$$GST_VERSION \
gstreamer-audio-0.10 \ gstreamer-video-$$GST_VERSION \
gstreamer-video-0.10 \ gstreamer-pbutils-$$GST_VERSION
gstreamer-pbutils-0.10

View File

@@ -3,11 +3,8 @@ SOURCES += main.cpp
CONFIG += link_pkgconfig CONFIG += link_pkgconfig
PKGCONFIG += \ PKGCONFIG += \
gstreamer-0.10 \ gstreamer-$$GST_VERSION \
gstreamer-base-0.10 \ gstreamer-base-$$GST_VERSION \
gstreamer-interfaces-0.10 \ gstreamer-audio-$$GST_VERSION \
gstreamer-audio-0.10 \ gstreamer-video-$$GST_VERSION \
gstreamer-video-0.10 \ gstreamer-pbutils-$$GST_VERSION
gstreamer-app-0.10

View File

@@ -2,11 +2,10 @@ SOURCES += main.cpp
CONFIG += link_pkgconfig CONFIG += link_pkgconfig
PKGCONFIG += \
gstreamer-0.10 \
gstreamer-base-0.10 \
gstreamer-interfaces-0.10 \
gstreamer-audio-0.10 \
gstreamer-video-0.10 \
gstreamer-pbutils-0.10
PKGCONFIG += \
gstreamer-$$GST_VERSION \
gstreamer-base-$$GST_VERSION \
gstreamer-audio-$$GST_VERSION \
gstreamer-video-$$GST_VERSION \
gstreamer-pbutils-$$GST_VERSION

View File

@@ -3,12 +3,11 @@ SOURCES += main.cpp
CONFIG += link_pkgconfig CONFIG += link_pkgconfig
PKGCONFIG += \ PKGCONFIG += \
gstreamer-0.10 \ gstreamer-$$GST_VERSION \
gstreamer-base-0.10 \ gstreamer-base-$$GST_VERSION \
gstreamer-interfaces-0.10 \ gstreamer-audio-$$GST_VERSION \
gstreamer-audio-0.10 \ gstreamer-video-$$GST_VERSION \
gstreamer-video-0.10 \ gstreamer-pbutils-$$GST_VERSION
gstreamer-pbutils-0.10
LIBS += -lgstphotography-0.10 LIBS += -lgstphotography-$$GST_VERSION

View File

@@ -96,7 +96,7 @@ Rectangle {
videoRecorder { videoRecorder {
resolution: "640x480" resolution: "640x480"
frameRate: 15 frameRate: 30
} }
} }

View File

@@ -17,11 +17,27 @@ win32 {
} else { } else {
qtCompileTest(alsa) qtCompileTest(alsa)
qtCompileTest(pulseaudio) qtCompileTest(pulseaudio)
qtCompileTest(gstreamer) { !done_config_gstreamer {
qtCompileTest(gstreamer_photography) gstver=0.10
qtCompileTest(gstreamer_encodingprofiles) !isEmpty(GST_VERSION): gstver=$$GST_VERSION
qtCompileTest(gstreamer_appsrc) cache(GST_VERSION, set, gstver);
qtCompileTest(linux_v4l) qtCompileTest(gstreamer) {
qtCompileTest(gstreamer_photography)
qtCompileTest(gstreamer_encodingprofiles)
qtCompileTest(gstreamer_appsrc)
qtCompileTest(linux_v4l)
} else {
gstver=1.0
cache(GST_VERSION, set, gstver);
# Force a re-run of the test
CONFIG -= done_config_gstreamer
qtCompileTest(gstreamer) {
qtCompileTest(gstreamer_photography)
qtCompileTest(gstreamer_encodingprofiles)
qtCompileTest(gstreamer_appsrc)
qtCompileTest(linux_v4l)
}
}
} }
qtCompileTest(resourcepolicy) qtCompileTest(resourcepolicy)
qtCompileTest(gpu_vivante) qtCompileTest(gpu_vivante)

View File

@@ -2,6 +2,7 @@ TEMPLATE = lib
TARGET = qgsttools_p TARGET = qgsttools_p
QPRO_PWD = $$PWD QPRO_PWD = $$PWD
QT = core-private multimedia-private gui-private QT = core-private multimedia-private gui-private
!static:DEFINES += QT_MAKEDLL !static:DEFINES += QT_MAKEDLL
@@ -15,15 +16,17 @@ LIBS_PRIVATE += \
CONFIG += link_pkgconfig CONFIG += link_pkgconfig
PKGCONFIG_PRIVATE += \ PKGCONFIG += \
gstreamer-0.10 \ gstreamer-$$GST_VERSION \
gstreamer-base-0.10 \ gstreamer-base-$$GST_VERSION \
gstreamer-interfaces-0.10 \ gstreamer-audio-$$GST_VERSION \
gstreamer-audio-0.10 \ gstreamer-video-$$GST_VERSION \
gstreamer-video-0.10 \ gstreamer-pbutils-$$GST_VERSION
gstreamer-pbutils-0.10
maemo*: PKGCONFIG_PRIVATE +=gstreamer-plugins-bad-0.10 equals(GST_VERSION,"0.10") {
PKGCONFIG_PRIVATE += gstreamer-interfaces-0.10
maemo*: PKGCONFIG_PRIVATE +=gstreamer-plugins-bad-0.10
}
config_resourcepolicy { config_resourcepolicy {
DEFINES += HAVE_RESOURCE_POLICY DEFINES += HAVE_RESOURCE_POLICY
@@ -33,38 +36,36 @@ config_resourcepolicy {
# Header files must go inside source directory of a module # Header files must go inside source directory of a module
# to be installed by syncqt. # to be installed by syncqt.
INCLUDEPATH += ../multimedia/gsttools_headers/ INCLUDEPATH += ../multimedia/gsttools_headers/
INCLUDEPATH += ../plugins/gstreamer/mediaplayer/
VPATH += ../multimedia/gsttools_headers/ VPATH += ../multimedia/gsttools_headers/
PRIVATE_HEADERS += \ PRIVATE_HEADERS += \
qgstbufferpoolinterface_p.h \
qgstreamerbushelper_p.h \ qgstreamerbushelper_p.h \
qgstreamermessage_p.h \ qgstreamermessage_p.h \
qgstutils_p.h \ qgstutils_p.h \
qgstvideobuffer_p.h \ qgstvideobuffer_p.h \
qvideosurfacegstsink_p.h \ qvideosurfacegstsink_p.h \
qgstreamerbufferprobe_p.h \
qgstreamervideorendererinterface_p.h \ qgstreamervideorendererinterface_p.h \
qgstreameraudioinputselector_p.h \ qgstreameraudioinputselector_p.h \
qgstreamervideorenderer_p.h \ qgstreamervideorenderer_p.h \
qgstreamervideoinputdevicecontrol_p.h \ qgstreamervideoinputdevicecontrol_p.h \
gstvideoconnector_p.h \
qgstcodecsinfo_p.h \ qgstcodecsinfo_p.h \
qgstreamervideoprobecontrol_p.h \ qgstreamervideoprobecontrol_p.h \
qgstreameraudioprobecontrol_p.h \ qgstreameraudioprobecontrol_p.h \
qgstreamervideowindow_p.h qgstreamervideowindow_p.h
SOURCES += \ SOURCES += \
qgstbufferpoolinterface.cpp \
qgstreamerbushelper.cpp \ qgstreamerbushelper.cpp \
qgstreamermessage.cpp \ qgstreamermessage.cpp \
qgstutils.cpp \ qgstutils.cpp \
qgstvideobuffer.cpp \ qgstvideobuffer.cpp \
qvideosurfacegstsink.cpp \ qgstreamerbufferprobe.cpp \
qgstreamervideorendererinterface.cpp \ qgstreamervideorendererinterface.cpp \
qgstreameraudioinputselector.cpp \ qgstreameraudioinputselector.cpp \
qgstreamervideorenderer.cpp \ qgstreamervideorenderer.cpp \
qgstreamervideoinputdevicecontrol.cpp \ qgstreamervideoinputdevicecontrol.cpp \
qgstcodecsinfo.cpp \ qgstcodecsinfo.cpp \
gstvideoconnector.c \
qgstreamervideoprobecontrol.cpp \ qgstreamervideoprobecontrol.cpp \
qgstreameraudioprobecontrol.cpp \ qgstreameraudioprobecontrol.cpp \
qgstreamervideowindow.cpp qgstreamervideowindow.cpp
@@ -79,25 +80,54 @@ qtHaveModule(widgets) {
qgstreamervideowidget.cpp qgstreamervideowidget.cpp
} }
maemo6 { equals(GST_VERSION,"0.10") {
PKGCONFIG_PRIVATE += qmsystem2 PRIVATE_HEADERS += \
qgstbufferpoolinterface_p.h \
gstvideoconnector_p.h \
contains(QT_CONFIG, opengles2):qtHaveModule(widgets) { SOURCES += \
PRIVATE_HEADERS += qgstreamergltexturerenderer_p.h qgstbufferpoolinterface.cpp \
SOURCES += qgstreamergltexturerenderer.cpp qvideosurfacegstsink.cpp \
QT += opengl gstvideoconnector.c
LIBS_PRIVATE += -lEGL -lgstmeegointerfaces-0.10
maemo6 {
PKGCONFIG_PRIVATE += qmsystem2
contains(QT_CONFIG, opengles2):qtHaveModule(widgets) {
PRIVATE_HEADERS += qgstreamergltexturerenderer_p.h
SOURCES += qgstreamergltexturerenderer.cpp
QT += opengl
LIBS_PRIVATE += -lEGL -lgstmeegointerfaces-0.10
}
} }
} else {
PRIVATE_HEADERS += \
qgstvideorendererplugin_p.h \
qgstvideorenderersink_p.h
SOURCES += \
qgstvideorendererplugin.cpp \
qgstvideorenderersink.cpp
}
mir: {
contains(QT_CONFIG, opengles2):qtHaveModule(widgets) {
PRIVATE_HEADERS += qgstreamermirtexturerenderer_p.h
SOURCES += qgstreamermirtexturerenderer.cpp
QT += opengl quick
LIBS += -lEGL
}
DEFINES += HAVE_MIR
} }
config_gstreamer_appsrc { config_gstreamer_appsrc {
PKGCONFIG_PRIVATE += gstreamer-app-0.10 PKGCONFIG_PRIVATE += gstreamer-app-$$GST_VERSION
PRIVATE_HEADERS += qgstappsrc_p.h PRIVATE_HEADERS += qgstappsrc_p.h
SOURCES += qgstappsrc.cpp SOURCES += qgstappsrc.cpp
DEFINES += HAVE_GST_APPSRC DEFINES += HAVE_GST_APPSRC
LIBS_PRIVATE += -lgstapp-0.10 LIBS_PRIVATE += -lgstapp-$$GST_VERSION
} }
config_linux_v4l: DEFINES += USE_V4L config_linux_v4l: DEFINES += USE_V4L

View File

@@ -147,23 +147,44 @@ void QGstAppSrc::pushDataToAppSrc()
size = qMin(m_stream->bytesAvailable(), (qint64)m_dataRequestSize); size = qMin(m_stream->bytesAvailable(), (qint64)m_dataRequestSize);
if (size) { if (size) {
void *data = g_malloc(size); GstBuffer* buffer = gst_buffer_new_and_alloc(size);
GstBuffer* buffer = gst_app_buffer_new(data, size, g_free, data);
#if GST_CHECK_VERSION(1,0,0)
GstMapInfo mapInfo;
gst_buffer_map(buffer, &mapInfo, GST_MAP_WRITE);
void* bufferData = mapInfo.data;
#else
void* bufferData = GST_BUFFER_DATA(buffer);
#endif
buffer->offset = m_stream->pos(); buffer->offset = m_stream->pos();
qint64 bytesRead = m_stream->read((char*)GST_BUFFER_DATA(buffer), size); qint64 bytesRead = m_stream->read((char*)bufferData, size);
buffer->offset_end = buffer->offset + bytesRead - 1; buffer->offset_end = buffer->offset + bytesRead - 1;
#if GST_CHECK_VERSION(1,0,0)
gst_buffer_unmap(buffer, &mapInfo);
#endif
if (bytesRead > 0) { if (bytesRead > 0) {
m_dataRequested = false; m_dataRequested = false;
m_enoughData = false; m_enoughData = false;
GstFlowReturn ret = gst_app_src_push_buffer (GST_APP_SRC (element()), buffer); GstFlowReturn ret = gst_app_src_push_buffer (GST_APP_SRC (element()), buffer);
if (ret == GST_FLOW_ERROR) { if (ret == GST_FLOW_ERROR) {
qWarning()<<"appsrc: push buffer error"; qWarning()<<"appsrc: push buffer error";
#if GST_CHECK_VERSION(1,0,0)
} else if (ret == GST_FLOW_FLUSHING) {
qWarning()<<"appsrc: push buffer wrong state";
}
#else
} else if (ret == GST_FLOW_WRONG_STATE) { } else if (ret == GST_FLOW_WRONG_STATE) {
qWarning()<<"appsrc: push buffer wrong state"; qWarning()<<"appsrc: push buffer wrong state";
} else if (ret == GST_FLOW_RESEND) { }
#endif
#if GST_VERSION_MAJOR < 1
else if (ret == GST_FLOW_RESEND) {
qWarning()<<"appsrc: push buffer resend"; qWarning()<<"appsrc: push buffer resend";
} }
#endif
} }
} else { } else {
sendEOS(); sendEOS();

View File

@@ -32,7 +32,7 @@
****************************************************************************/ ****************************************************************************/
#include "qgstcodecsinfo_p.h" #include "qgstcodecsinfo_p.h"
#include "qgstutils_p.h"
#include <QtCore/qset.h> #include <QtCore/qset.h>
#ifdef QMEDIA_GSTREAMER_CAMERABIN #ifdef QMEDIA_GSTREAMER_CAMERABIN
@@ -146,7 +146,7 @@ GstCaps* QGstCodecsInfo::supportedElementCaps(GstElementFactoryListType elementT
if (fakeEncoderMimeTypes.contains(gst_structure_get_name(structure))) if (fakeEncoderMimeTypes.contains(gst_structure_get_name(structure)))
continue; continue;
GstStructure *newStructure = gst_structure_new(gst_structure_get_name(structure), NULL); GstStructure *newStructure = qt_gst_structure_new_empty(gst_structure_get_name(structure));
//add structure fields to distinguish between formats with similar mime types, //add structure fields to distinguish between formats with similar mime types,
//like audio/mpeg //like audio/mpeg
@@ -166,7 +166,11 @@ GstCaps* QGstCodecsInfo::supportedElementCaps(GstElementFactoryListType elementT
} }
} }
#if GST_CHECK_VERSION(1,0,0)
res =
#endif
gst_caps_merge_structure(res, newStructure); gst_caps_merge_structure(res, newStructure);
} }
gst_caps_unref(caps); gst_caps_unref(caps);
} }

View File

@@ -37,32 +37,48 @@
QGstreamerAudioProbeControl::QGstreamerAudioProbeControl(QObject *parent) QGstreamerAudioProbeControl::QGstreamerAudioProbeControl(QObject *parent)
: QMediaAudioProbeControl(parent) : QMediaAudioProbeControl(parent)
{ {
} }
QGstreamerAudioProbeControl::~QGstreamerAudioProbeControl() QGstreamerAudioProbeControl::~QGstreamerAudioProbeControl()
{ {
} }
void QGstreamerAudioProbeControl::bufferProbed(GstBuffer* buffer) void QGstreamerAudioProbeControl::probeCaps(GstCaps *caps)
{ {
GstCaps* caps = gst_buffer_get_caps(buffer);
if (!caps)
return;
QAudioFormat format = QGstUtils::audioFormatForCaps(caps); QAudioFormat format = QGstUtils::audioFormatForCaps(caps);
gst_caps_unref(caps);
if (!format.isValid())
return;
QAudioBuffer audioBuffer = QAudioBuffer(QByteArray((const char*)buffer->data, buffer->size), format); QMutexLocker locker(&m_bufferMutex);
m_format = format;
}
{ bool QGstreamerAudioProbeControl::probeBuffer(GstBuffer *buffer)
QMutexLocker locker(&m_bufferMutex); {
m_pendingBuffer = audioBuffer; qint64 position = GST_BUFFER_TIMESTAMP(buffer);
QMetaObject::invokeMethod(this, "bufferProbed", Qt::QueuedConnection); position = position >= 0
? position / G_GINT64_CONSTANT(1000) // microseconds
: -1;
QByteArray data;
#if GST_CHECK_VERSION(1,0,0)
GstMapInfo info;
if (gst_buffer_map(buffer, &info, GST_MAP_READ)) {
data = QByteArray(reinterpret_cast<const char *>(info.data), info.size);
gst_buffer_unmap(buffer, &info);
} else {
return true;
} }
#else
data = QByteArray(reinterpret_cast<const char *>(buffer->data), buffer->size);
#endif
QMutexLocker locker(&m_bufferMutex);
if (m_format.isValid()) {
if (!m_pendingBuffer.isValid())
QMetaObject::invokeMethod(this, "bufferProbed", Qt::QueuedConnection);
m_pendingBuffer = QAudioBuffer(data, m_format, position);
}
return true;
} }
void QGstreamerAudioProbeControl::bufferProbed() void QGstreamerAudioProbeControl::bufferProbed()
@@ -73,6 +89,7 @@ void QGstreamerAudioProbeControl::bufferProbed()
if (!m_pendingBuffer.isValid()) if (!m_pendingBuffer.isValid())
return; return;
audioBuffer = m_pendingBuffer; audioBuffer = m_pendingBuffer;
m_pendingBuffer = QAudioBuffer();
} }
emit audioBufferProbed(audioBuffer); emit audioBufferProbed(audioBuffer);
} }

View File

@@ -0,0 +1,174 @@
/****************************************************************************
**
** Copyright (C) 2014 Jolla Ltd.
** Contact: http://www.qt-project.org/legal
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and Digia. For licensing terms and
** conditions see http://qt.digia.com/licensing. For further information
** use the contact form at http://qt.digia.com/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 as published by the Free Software
** Foundation and appearing in the file LICENSE.LGPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU Lesser General Public License version 2.1 requirements
** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Digia gives you certain additional
** rights. These rights are described in the Digia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** GNU General Public License Usage
** Alternatively, this file may be used under the terms of the GNU
** General Public License version 3.0 as published by the Free Software
** Foundation and appearing in the file LICENSE.GPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU General Public License version 3.0 requirements will be
** met: http://www.gnu.org/copyleft/gpl.html.
**
**
** $QT_END_LICENSE$
**
****************************************************************************/
#include "qgstreamerbufferprobe_p.h"
#include "qgstutils_p.h"
QT_BEGIN_NAMESPACE
QGstreamerBufferProbe::QGstreamerBufferProbe(Flags flags)
#if GST_CHECK_VERSION(1,0,0)
: m_capsProbeId(-1)
#else
: m_caps(0)
#endif
, m_bufferProbeId(-1)
, m_flags(flags)
{
}
QGstreamerBufferProbe::~QGstreamerBufferProbe()
{
#if !GST_CHECK_VERSION(1,0,0)
if (m_caps)
gst_caps_unref(m_caps);
#endif
}
void QGstreamerBufferProbe::addProbeToPad(GstPad *pad, bool downstream)
{
if (GstCaps *caps = qt_gst_pad_get_current_caps(pad)) {
probeCaps(caps);
gst_caps_unref(caps);
}
#if GST_CHECK_VERSION(1,0,0)
if (m_flags & ProbeCaps) {
m_capsProbeId = gst_pad_add_probe(
pad,
downstream
? GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM
: GST_PAD_PROBE_TYPE_EVENT_UPSTREAM,
capsProbe,
this,
NULL);
}
if (m_flags & ProbeBuffers) {
m_bufferProbeId = gst_pad_add_probe(
pad, GST_PAD_PROBE_TYPE_BUFFER, bufferProbe, this, NULL);
}
#else
Q_UNUSED(downstream);
m_bufferProbeId = gst_pad_add_buffer_probe(pad, G_CALLBACK(bufferProbe), this);
#endif
}
void QGstreamerBufferProbe::removeProbeFromPad(GstPad *pad)
{
#if GST_CHECK_VERSION(1,0,0)
if (m_capsProbeId != -1) {
gst_pad_remove_probe(pad, m_capsProbeId);
m_capsProbeId = -1;
}
if (m_bufferProbeId != -1) {
gst_pad_remove_probe(pad, m_bufferProbeId);
m_bufferProbeId = -1;
}
#else
if (m_bufferProbeId != -1) {
gst_pad_remove_buffer_probe(pad, m_bufferProbeId);
m_bufferProbeId = -1;
if (m_caps) {
gst_caps_unref(m_caps);
m_caps = 0;
}
}
#endif
}
void QGstreamerBufferProbe::probeCaps(GstCaps *)
{
}
bool QGstreamerBufferProbe::probeBuffer(GstBuffer *)
{
return true;
}
#if GST_CHECK_VERSION(1,0,0)
GstPadProbeReturn QGstreamerBufferProbe::capsProbe(
GstPad *, GstPadProbeInfo *info, gpointer user_data)
{
QGstreamerBufferProbe * const control = static_cast<QGstreamerBufferProbe *>(user_data);
if (GstEvent * const event = gst_pad_probe_info_get_event(info)) {
if (GST_EVENT_TYPE(event) == GST_EVENT_CAPS) {
GstCaps *caps;
gst_event_parse_caps(event, &caps);
control->probeCaps(caps);
}
}
return GST_PAD_PROBE_OK;
}
GstPadProbeReturn QGstreamerBufferProbe::bufferProbe(
GstPad *, GstPadProbeInfo *info, gpointer user_data)
{
QGstreamerBufferProbe * const control = static_cast<QGstreamerBufferProbe *>(user_data);
if (GstBuffer * const buffer = gst_pad_probe_info_get_buffer(info))
return control->probeBuffer(buffer) ? GST_PAD_PROBE_OK : GST_PAD_PROBE_DROP;
return GST_PAD_PROBE_OK;
}
#else
gboolean QGstreamerBufferProbe::bufferProbe(GstElement *, GstBuffer *buffer, gpointer user_data)
{
QGstreamerBufferProbe * const control = static_cast<QGstreamerBufferProbe *>(user_data);
if (control->m_flags & ProbeCaps) {
GstCaps *caps = gst_buffer_get_caps(buffer);
if (caps && (!control->m_caps || !gst_caps_is_equal(control->m_caps, caps))) {
qSwap(caps, control->m_caps);
control->probeCaps(control->m_caps);
}
if (caps)
gst_caps_unref(caps);
}
if (control->m_flags & ProbeBuffers) {
return control->probeBuffer(buffer) ? TRUE : FALSE;
} else {
return TRUE;
}
}
#endif
QT_END_NAMESPACE

View File

@@ -154,13 +154,21 @@ QGstreamerBusHelper::QGstreamerBusHelper(GstBus* bus, QObject* parent):
QObject(parent) QObject(parent)
{ {
d = new QGstreamerBusHelperPrivate(this, bus); d = new QGstreamerBusHelperPrivate(this, bus);
#if GST_CHECK_VERSION(1,0,0)
gst_bus_set_sync_handler(bus, (GstBusSyncHandler)syncGstBusFilter, d, 0);
#else
gst_bus_set_sync_handler(bus, (GstBusSyncHandler)syncGstBusFilter, d); gst_bus_set_sync_handler(bus, (GstBusSyncHandler)syncGstBusFilter, d);
#endif
gst_object_ref(GST_OBJECT(bus)); gst_object_ref(GST_OBJECT(bus));
} }
QGstreamerBusHelper::~QGstreamerBusHelper() QGstreamerBusHelper::~QGstreamerBusHelper()
{ {
#if GST_CHECK_VERSION(1,0,0)
gst_bus_set_sync_handler(d->bus(), 0, 0, 0);
#else
gst_bus_set_sync_handler(d->bus(),0,0); gst_bus_set_sync_handler(d->bus(),0,0);
#endif
gst_object_unref(GST_OBJECT(d->bus())); gst_object_unref(GST_OBJECT(d->bus()));
} }

View File

@@ -0,0 +1,351 @@
/****************************************************************************
**
** Copyright (C) 2014 Canonical Ltd.
** Contact: http://www.qt-project.org/legal
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL21$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and Digia. For licensing terms and
** conditions see http://qt.digia.com/licensing. For further information
** use the contact form at http://qt.digia.com/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 or version 3 as published by the Free
** Software Foundation and appearing in the file LICENSE.LGPLv21 and
** LICENSE.LGPLv3 included in the packaging of this file. Please review the
** following information to ensure the GNU Lesser General Public License
** requirements will be met: https://www.gnu.org/licenses/lgpl.html and
** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Digia gives you certain additional
** rights. These rights are described in the Digia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** $QT_END_LICENSE$
**
****************************************************************************/
#include "qgstreamermirtexturerenderer_p.h"
#include <qgstreamerplayersession.h>
#include <private/qvideosurfacegstsink_p.h>
#include <private/qgstutils_p.h>
#include <qabstractvideosurface.h>
#include <QAbstractVideoBuffer>
#include <QGuiApplication>
#include <QDebug>
#include <QtQuick/QQuickWindow>
#include <QOpenGLContext>
#include <QGLContext>
#include <QGuiApplication>
#include <qgl.h>
#include <gst/gst.h>
static QGstreamerMirTextureRenderer *rendererInstance = NULL;
class QGstreamerMirTextureBuffer : public QAbstractVideoBuffer
{
public:
QGstreamerMirTextureBuffer(GLuint textureId) :
QAbstractVideoBuffer(QAbstractVideoBuffer::GLTextureHandle),
m_textureId(textureId)
{
}
MapMode mapMode() const { return NotMapped; }
uchar *map(MapMode mode, int *numBytes, int *bytesPerLine)
{
qDebug() << Q_FUNC_INFO;
Q_UNUSED(mode);
Q_UNUSED(numBytes);
Q_UNUSED(bytesPerLine);
return NULL;
}
void unmap() { qDebug() << Q_FUNC_INFO; }
QVariant handle() const { return QVariant::fromValue<unsigned int>(m_textureId); }
GLuint textureId() { return m_textureId; }
private:
GLuint m_textureId;
};
QGstreamerMirTextureRenderer::QGstreamerMirTextureRenderer(QObject *parent
, const QGstreamerPlayerSession *playerSession)
: QVideoRendererControl(0), m_videoSink(0), m_surface(0),
m_glSurface(0),
m_context(0),
m_glContext(0),
m_textureId(0),
m_offscreenSurface(0),
m_textureBuffer(0)
{
Q_UNUSED(parent);
setPlayerSession(playerSession);
}
QGstreamerMirTextureRenderer::~QGstreamerMirTextureRenderer()
{
if (m_videoSink)
gst_object_unref(GST_OBJECT(m_videoSink));
delete m_glContext;
delete m_offscreenSurface;
}
GstElement *QGstreamerMirTextureRenderer::videoSink()
{
qDebug() << Q_FUNC_INFO;
// FIXME: Ugly hack until I figure out why passing this segfaults in the g_signal handler
rendererInstance = const_cast<QGstreamerMirTextureRenderer*>(this);
if (!m_videoSink && m_surface) {
qDebug() << Q_FUNC_INFO << ": using mirsink, (this: " << this << ")";
m_videoSink = gst_element_factory_make("mirsink", "video-output");
connect(QGuiApplication::instance(), SIGNAL(focusWindowChanged(QWindow*)),
this, SLOT(handleFocusWindowChanged(QWindow*)), Qt::QueuedConnection);
g_signal_connect(G_OBJECT(m_videoSink), "frame-ready", G_CALLBACK(handleFrameReady),
(gpointer)this);
}
if (m_videoSink) {
gst_object_ref_sink(GST_OBJECT(m_videoSink));
GstPad *pad = gst_element_get_static_pad(m_videoSink, "sink");
gst_pad_add_probe(pad, GST_PAD_PROBE_TYPE_BUFFER,
padBufferProbe, this, NULL);
}
return m_videoSink;
}
QWindow *QGstreamerMirTextureRenderer::createOffscreenWindow(const QSurfaceFormat &format)
{
QWindow *w = new QWindow();
w->setSurfaceType(QWindow::OpenGLSurface);
w->setFormat(format);
w->setGeometry(0, 0, 1, 1);
w->setFlags(w->flags() | Qt::WindowTransparentForInput);
w->create();
return w;
}
void QGstreamerMirTextureRenderer::handleFrameReady(gpointer userData)
{
QGstreamerMirTextureRenderer *renderer = reinterpret_cast<QGstreamerMirTextureRenderer*>(userData);
#if 1
QMutexLocker locker(&rendererInstance->m_mutex);
QMetaObject::invokeMethod(rendererInstance, "renderFrame", Qt::QueuedConnection);
#else
// FIXME!
//QMutexLocker locker(&renderer->m_mutex);
QMetaObject::invokeMethod(renderer, "renderFrame", Qt::QueuedConnection);
#endif
}
void QGstreamerMirTextureRenderer::renderFrame()
{
//qDebug() << Q_FUNC_INFO;
if (m_context)
m_context->makeCurrent();
GstState pendingState = GST_STATE_NULL;
GstState newState = GST_STATE_NULL;
// Don't block and return immediately:
GstStateChangeReturn ret = gst_element_get_state(m_videoSink, &newState,
&pendingState, 0);
if (ret == GST_STATE_CHANGE_FAILURE || newState == GST_STATE_NULL||
pendingState == GST_STATE_NULL) {
qWarning() << "Invalid state change for renderer, aborting";
stopRenderer();
return;
}
if (!m_surface->isActive()) {
qDebug() << "m_surface is not active";
GstPad *pad = gst_element_get_static_pad(m_videoSink, "sink");
GstCaps *caps = gst_pad_get_current_caps(pad);
if (caps) {
// Get the native video size from the video sink
QSize newNativeSize = QGstUtils::capsCorrectedResolution(caps);
if (m_nativeSize != newNativeSize) {
m_nativeSize = newNativeSize;
emit nativeSizeChanged();
}
gst_caps_unref(caps);
}
// Start the surface
QVideoSurfaceFormat format(m_nativeSize, QVideoFrame::Format_RGB32, QAbstractVideoBuffer::GLTextureHandle);
qDebug() << "m_nativeSize: " << m_nativeSize;
qDebug() << "format: " << format;
if (!m_surface->start(format)) {
qWarning() << Q_FUNC_INFO << ": failed to start the video surface " << format;
return;
}
}
QGstreamerMirTextureBuffer *buffer = new QGstreamerMirTextureBuffer(m_textureId);
//qDebug() << "frameSize: " << m_surface->surfaceFormat().frameSize();
QVideoFrame frame(buffer, m_surface->surfaceFormat().frameSize(),
m_surface->surfaceFormat().pixelFormat());
frame.setMetaData("TextureId", m_textureId);
// Display the video frame on the surface:
m_surface->present(frame);
}
GstPadProbeReturn QGstreamerMirTextureRenderer::padBufferProbe(GstPad *pad, GstPadProbeInfo *info, gpointer userData)
{
Q_UNUSED(pad);
Q_UNUSED(info);
QGstreamerMirTextureRenderer *control = reinterpret_cast<QGstreamerMirTextureRenderer*>(userData);
QMetaObject::invokeMethod(control, "updateNativeVideoSize", Qt::QueuedConnection);
return GST_PAD_PROBE_REMOVE;
}
void QGstreamerMirTextureRenderer::stopRenderer()
{
if (m_surface)
m_surface->stop();
}
QAbstractVideoSurface *QGstreamerMirTextureRenderer::surface() const
{
return m_surface;
}
void QGstreamerMirTextureRenderer::setSurface(QAbstractVideoSurface *surface)
{
qDebug() << Q_FUNC_INFO;
if (m_surface != surface) {
qDebug() << "Saving current QGLContext";
m_context = const_cast<QGLContext*>(QGLContext::currentContext());
if (m_videoSink)
gst_object_unref(GST_OBJECT(m_videoSink));
m_videoSink = 0;
if (m_surface) {
disconnect(m_surface.data(), SIGNAL(supportedFormatsChanged()),
this, SLOT(handleFormatChange()));
}
bool wasReady = isReady();
m_surface = surface;
if (m_surface) {
connect(m_surface.data(), SIGNAL(supportedFormatsChanged()),
this, SLOT(handleFormatChange()));
}
if (wasReady != isReady())
emit readyChanged(isReady());
emit sinkChanged();
}
}
void QGstreamerMirTextureRenderer::setPlayerSession(const QGstreamerPlayerSession *playerSession)
{
m_playerSession = const_cast<QGstreamerPlayerSession*>(playerSession);
}
void QGstreamerMirTextureRenderer::handleFormatChange()
{
qDebug() << "Supported formats list has changed, reload video output";
if (m_videoSink)
gst_object_unref(GST_OBJECT(m_videoSink));
m_videoSink = 0;
emit sinkChanged();
}
void QGstreamerMirTextureRenderer::updateNativeVideoSize()
{
//qDebug() << Q_FUNC_INFO;
const QSize oldSize = m_nativeSize;
if (m_videoSink) {
// Find video native size to update video widget size hint
GstPad *pad = gst_element_get_static_pad(m_videoSink,"sink");
GstCaps *caps = gst_pad_get_current_caps(pad);
if (caps) {
m_nativeSize = QGstUtils::capsCorrectedResolution(caps);
gst_caps_unref(caps);
}
} else {
m_nativeSize = QSize();
}
qDebug() << Q_FUNC_INFO << oldSize << m_nativeSize << m_videoSink;
if (m_nativeSize != oldSize)
emit nativeSizeChanged();
}
void QGstreamerMirTextureRenderer::handleFocusWindowChanged(QWindow *window)
{
qDebug() << Q_FUNC_INFO;
QOpenGLContext *currContext = QOpenGLContext::currentContext();
QQuickWindow *w = dynamic_cast<QQuickWindow*>(window);
// If we don't have a GL context in the current thread, create one and share it
// with the render thread GL context
if (!currContext && !m_glContext) {
// This emulates the new QOffscreenWindow class with Qt5.1
m_offscreenSurface = createOffscreenWindow(w->openglContext()->surface()->format());
m_offscreenSurface->setParent(window);
QOpenGLContext *shareContext = 0;
if (m_surface)
shareContext = qobject_cast<QOpenGLContext*>(m_surface->property("GLContext").value<QObject*>());
m_glContext = new QOpenGLContext;
m_glContext->setFormat(m_offscreenSurface->requestedFormat());
if (shareContext)
m_glContext->setShareContext(shareContext);
if (!m_glContext->create())
{
qWarning() << "Failed to create new shared context.";
return;
}
}
if (m_glContext)
m_glContext->makeCurrent(m_offscreenSurface);
if (m_textureId == 0) {
glGenTextures(1, &m_textureId);
qDebug() << "texture_id (handleFocusWindowChanged): " << m_textureId << endl;
g_object_set(G_OBJECT(m_videoSink), "texture-id", m_textureId, (char*)NULL);
}
}

View File

@@ -32,7 +32,8 @@
****************************************************************************/ ****************************************************************************/
#include "qgstreamervideoprobecontrol_p.h" #include "qgstreamervideoprobecontrol_p.h"
#include <private/qvideosurfacegstsink_p.h>
#include "qgstutils_p.h"
#include <private/qgstvideobuffer_p.h> #include <private/qgstvideobuffer_p.h>
QGstreamerVideoProbeControl::QGstreamerVideoProbeControl(QObject *parent) QGstreamerVideoProbeControl::QGstreamerVideoProbeControl(QObject *parent)
@@ -40,12 +41,10 @@ QGstreamerVideoProbeControl::QGstreamerVideoProbeControl(QObject *parent)
, m_flushing(false) , m_flushing(false)
, m_frameProbed(false) , m_frameProbed(false)
{ {
} }
QGstreamerVideoProbeControl::~QGstreamerVideoProbeControl() QGstreamerVideoProbeControl::~QGstreamerVideoProbeControl()
{ {
} }
void QGstreamerVideoProbeControl::startFlushing() void QGstreamerVideoProbeControl::startFlushing()
@@ -67,33 +66,49 @@ void QGstreamerVideoProbeControl::stopFlushing()
m_flushing = false; m_flushing = false;
} }
void QGstreamerVideoProbeControl::bufferProbed(GstBuffer* buffer) void QGstreamerVideoProbeControl::probeCaps(GstCaps *caps)
{ {
if (m_flushing) #if GST_CHECK_VERSION(1,0,0)
return; GstVideoInfo videoInfo;
QVideoSurfaceFormat format = QGstUtils::formatForCaps(caps, &videoInfo);
GstCaps* caps = gst_buffer_get_caps(buffer);
if (!caps)
return;
QMutexLocker locker(&m_frameMutex);
m_videoInfo = videoInfo;
#else
int bytesPerLine = 0; int bytesPerLine = 0;
QVideoSurfaceFormat format = QVideoSurfaceGstSink::formatForCaps(caps, &bytesPerLine); QVideoSurfaceFormat format = QGstUtils::formatForCaps(caps, &bytesPerLine);
gst_caps_unref(caps);
if (!format.isValid() || !bytesPerLine)
return;
QVideoFrame frame = QVideoFrame(new QGstVideoBuffer(buffer, bytesPerLine), QMutexLocker locker(&m_frameMutex);
format.frameSize(), format.pixelFormat()); m_bytesPerLine = bytesPerLine;
#endif
m_format = format;
}
QVideoSurfaceGstSink::setFrameTimeStamps(&frame, buffer); bool QGstreamerVideoProbeControl::probeBuffer(GstBuffer *buffer)
{
QMutexLocker locker(&m_frameMutex);
if (m_flushing || !m_format.isValid())
return true;
QVideoFrame frame(
#if GST_CHECK_VERSION(1,0,0)
new QGstVideoBuffer(buffer, m_videoInfo),
#else
new QGstVideoBuffer(buffer, m_bytesPerLine),
#endif
m_format.frameSize(),
m_format.pixelFormat());
QGstUtils::setFrameTimeStamps(&frame, buffer);
m_frameProbed = true; m_frameProbed = true;
{ if (!m_pendingFrame.isValid())
QMutexLocker locker(&m_frameMutex);
m_pendingFrame = frame;
QMetaObject::invokeMethod(this, "frameProbed", Qt::QueuedConnection); QMetaObject::invokeMethod(this, "frameProbed", Qt::QueuedConnection);
} m_pendingFrame = frame;
return true;
} }
void QGstreamerVideoProbeControl::frameProbed() void QGstreamerVideoProbeControl::frameProbed()
@@ -104,6 +119,7 @@ void QGstreamerVideoProbeControl::frameProbed()
if (!m_pendingFrame.isValid()) if (!m_pendingFrame.isValid())
return; return;
frame = m_pendingFrame; frame = m_pendingFrame;
m_pendingFrame = QVideoFrame();
} }
emit videoFrameProbed(frame); emit videoFrameProbed(frame);
} }

View File

@@ -35,8 +35,7 @@
#include <private/qvideosurfacegstsink_p.h> #include <private/qvideosurfacegstsink_p.h>
#include <private/qgstutils_p.h> #include <private/qgstutils_p.h>
#include <qabstractvideosurface.h> #include <qabstractvideosurface.h>
#include <QtCore/qdebug.h>
#include <QDebug>
#include <gst/gst.h> #include <gst/gst.h>

View File

@@ -40,8 +40,13 @@
#include <QtGui/qpainter.h> #include <QtGui/qpainter.h>
#include <gst/gst.h> #include <gst/gst.h>
#if !GST_CHECK_VERSION(1,0,0)
#include <gst/interfaces/xoverlay.h> #include <gst/interfaces/xoverlay.h>
#include <gst/interfaces/propertyprobe.h> #include <gst/interfaces/propertyprobe.h>
#else
#include <gst/video/videooverlay.h>
#endif
QT_BEGIN_NAMESPACE QT_BEGIN_NAMESPACE
@@ -130,8 +135,6 @@ void QGstreamerVideoWidgetControl::createVideoWidget()
m_videoSink = gst_element_factory_make ("ximagesink", NULL); m_videoSink = gst_element_factory_make ("ximagesink", NULL);
qt_gst_object_ref_sink(GST_OBJECT (m_videoSink)); //Take ownership qt_gst_object_ref_sink(GST_OBJECT (m_videoSink)); //Take ownership
} }
GstElement *QGstreamerVideoWidgetControl::videoSink() GstElement *QGstreamerVideoWidgetControl::videoSink()
@@ -169,9 +172,13 @@ bool QGstreamerVideoWidgetControl::processSyncMessage(const QGstreamerMessage &m
{ {
GstMessage* gm = message.rawMessage(); GstMessage* gm = message.rawMessage();
#if !GST_CHECK_VERSION(1,0,0)
if (gm && (GST_MESSAGE_TYPE(gm) == GST_MESSAGE_ELEMENT) && if (gm && (GST_MESSAGE_TYPE(gm) == GST_MESSAGE_ELEMENT) &&
gst_structure_has_name(gm->structure, "prepare-xwindow-id")) { gst_structure_has_name(gm->structure, "prepare-xwindow-id")) {
#else
if (gm && (GST_MESSAGE_TYPE(gm) == GST_MESSAGE_ELEMENT) &&
gst_structure_has_name(gst_message_get_structure(gm), "prepare-window-handle")) {
#endif
setOverlay(); setOverlay();
QMetaObject::invokeMethod(this, "updateNativeVideoSize", Qt::QueuedConnection); QMetaObject::invokeMethod(this, "updateNativeVideoSize", Qt::QueuedConnection);
return true; return true;
@@ -199,17 +206,24 @@ bool QGstreamerVideoWidgetControl::processBusMessage(const QGstreamerMessage &me
void QGstreamerVideoWidgetControl::setOverlay() void QGstreamerVideoWidgetControl::setOverlay()
{ {
#if !GST_CHECK_VERSION(1,0,0)
if (m_videoSink && GST_IS_X_OVERLAY(m_videoSink)) { if (m_videoSink && GST_IS_X_OVERLAY(m_videoSink)) {
gst_x_overlay_set_xwindow_id(GST_X_OVERLAY(m_videoSink), m_windowId); gst_x_overlay_set_xwindow_id(GST_X_OVERLAY(m_videoSink), m_windowId);
} }
#else
if (m_videoSink && GST_IS_VIDEO_OVERLAY(m_videoSink)) {
gst_video_overlay_set_window_handle(GST_VIDEO_OVERLAY(m_videoSink), m_windowId);
}
#endif
} }
void QGstreamerVideoWidgetControl::updateNativeVideoSize() void QGstreamerVideoWidgetControl::updateNativeVideoSize()
{ {
if (m_videoSink) { if (m_videoSink) {
//find video native size to update video widget size hint //find video native size to update video widget size hint
GstPad *pad = gst_element_get_static_pad(m_videoSink,"sink"); GstPad *pad = gst_element_get_static_pad(m_videoSink, "sink");
GstCaps *caps = gst_pad_get_negotiated_caps(pad); GstCaps *caps = qt_gst_pad_get_current_caps(pad);
gst_object_unref(GST_OBJECT(pad)); gst_object_unref(GST_OBJECT(pad));
if (caps) { if (caps) {
@@ -225,8 +239,13 @@ void QGstreamerVideoWidgetControl::updateNativeVideoSize()
void QGstreamerVideoWidgetControl::windowExposed() void QGstreamerVideoWidgetControl::windowExposed()
{ {
#if !GST_CHECK_VERSION(1,0,0)
if (m_videoSink && GST_IS_X_OVERLAY(m_videoSink)) if (m_videoSink && GST_IS_X_OVERLAY(m_videoSink))
gst_x_overlay_expose(GST_X_OVERLAY(m_videoSink)); gst_x_overlay_expose(GST_X_OVERLAY(m_videoSink));
#else
if (m_videoSink && GST_IS_VIDEO_OVERLAY(m_videoSink))
gst_video_overlay_expose(GST_VIDEO_OVERLAY(m_videoSink));
#endif
} }
QWidget *QGstreamerVideoWidgetControl::videoWidget() QWidget *QGstreamerVideoWidgetControl::videoWidget()

View File

@@ -37,36 +37,49 @@
#include <QtCore/qdebug.h> #include <QtCore/qdebug.h>
#include <gst/gst.h> #include <gst/gst.h>
#if !GST_CHECK_VERSION(1,0,0)
#include <gst/interfaces/xoverlay.h> #include <gst/interfaces/xoverlay.h>
#include <gst/interfaces/propertyprobe.h> #include <gst/interfaces/propertyprobe.h>
#else
#include <gst/video/videooverlay.h>
#endif
QGstreamerVideoWindow::QGstreamerVideoWindow(QObject *parent, const char *elementName) QGstreamerVideoWindow::QGstreamerVideoWindow(QObject *parent, const char *elementName)
: QVideoWindowControl(parent) : QVideoWindowControl(parent)
, QGstreamerBufferProbe(QGstreamerBufferProbe::ProbeCaps)
, m_videoSink(0) , m_videoSink(0)
, m_windowId(0) , m_windowId(0)
, m_aspectRatioMode(Qt::KeepAspectRatio) , m_aspectRatioMode(Qt::KeepAspectRatio)
, m_fullScreen(false) , m_fullScreen(false)
, m_colorKey(QColor::Invalid) , m_colorKey(QColor::Invalid)
{ {
if (elementName) if (elementName) {
m_videoSink = gst_element_factory_make(elementName, NULL); m_videoSink = gst_element_factory_make(elementName, NULL);
else } else {
m_videoSink = gst_element_factory_make("xvimagesink", NULL); m_videoSink = gst_element_factory_make("xvimagesink", NULL);
}
if (m_videoSink) { if (m_videoSink) {
qt_gst_object_ref_sink(GST_OBJECT(m_videoSink)); //Take ownership qt_gst_object_ref_sink(GST_OBJECT(m_videoSink)); //Take ownership
GstPad *pad = gst_element_get_static_pad(m_videoSink,"sink"); GstPad *pad = gst_element_get_static_pad(m_videoSink, "sink");
m_bufferProbeId = gst_pad_add_buffer_probe(pad, G_CALLBACK(padBufferProbe), this); addProbeToPad(pad);
gst_object_unref(GST_OBJECT(pad)); gst_object_unref(GST_OBJECT(pad));
} }
else
qDebug() << "No m_videoSink available!";
} }
QGstreamerVideoWindow::~QGstreamerVideoWindow() QGstreamerVideoWindow::~QGstreamerVideoWindow()
{ {
if (m_videoSink) if (m_videoSink) {
GstPad *pad = gst_element_get_static_pad(m_videoSink,"sink");
removeProbeFromPad(pad);
gst_object_unref(GST_OBJECT(pad));
gst_object_unref(GST_OBJECT(m_videoSink)); gst_object_unref(GST_OBJECT(m_videoSink));
}
} }
WId QGstreamerVideoWindow::winId() const WId QGstreamerVideoWindow::winId() const
@@ -82,11 +95,15 @@ void QGstreamerVideoWindow::setWinId(WId id)
WId oldId = m_windowId; WId oldId = m_windowId;
m_windowId = id; m_windowId = id;
#if GST_CHECK_VERSION(1,0,0)
if (m_videoSink && GST_IS_VIDEO_OVERLAY(m_videoSink)) {
gst_video_overlay_set_window_handle(GST_VIDEO_OVERLAY(m_videoSink), m_windowId);
}
#else
if (m_videoSink && GST_IS_X_OVERLAY(m_videoSink)) { if (m_videoSink && GST_IS_X_OVERLAY(m_videoSink)) {
gst_x_overlay_set_xwindow_id(GST_X_OVERLAY(m_videoSink), m_windowId); gst_x_overlay_set_xwindow_id(GST_X_OVERLAY(m_videoSink), m_windowId);
} }
#endif
if (!oldId) if (!oldId)
emit readyChanged(true); emit readyChanged(true);
@@ -97,20 +114,26 @@ void QGstreamerVideoWindow::setWinId(WId id)
bool QGstreamerVideoWindow::processSyncMessage(const QGstreamerMessage &message) bool QGstreamerVideoWindow::processSyncMessage(const QGstreamerMessage &message)
{ {
GstMessage* gm = message.rawMessage(); GstMessage* gm = message.rawMessage();
#if GST_CHECK_VERSION(1,0,0)
const GstStructure *s = gst_message_get_structure(gm);
if ((GST_MESSAGE_TYPE(gm) == GST_MESSAGE_ELEMENT) &&
gst_structure_has_name(s, "prepare-window-handle") &&
m_videoSink && GST_IS_VIDEO_OVERLAY(m_videoSink)) {
gst_video_overlay_set_window_handle(GST_VIDEO_OVERLAY(m_videoSink), m_windowId);
return true;
}
#else
if ((GST_MESSAGE_TYPE(gm) == GST_MESSAGE_ELEMENT) && if ((GST_MESSAGE_TYPE(gm) == GST_MESSAGE_ELEMENT) &&
gst_structure_has_name(gm->structure, "prepare-xwindow-id") && gst_structure_has_name(gm->structure, "prepare-xwindow-id") &&
m_videoSink && GST_IS_X_OVERLAY(m_videoSink)) { m_videoSink && GST_IS_X_OVERLAY(m_videoSink)) {
gst_x_overlay_set_xwindow_id(GST_X_OVERLAY(m_videoSink), m_windowId); gst_x_overlay_set_xwindow_id(GST_X_OVERLAY(m_videoSink), m_windowId);
GstPad *pad = gst_element_get_static_pad(m_videoSink,"sink");
m_bufferProbeId = gst_pad_add_buffer_probe(pad, G_CALLBACK(padBufferProbe), this);
gst_object_unref(GST_OBJECT(pad));
return true; return true;
} }
#endif
return false; return false;
} }
@@ -122,7 +145,19 @@ QRect QGstreamerVideoWindow::displayRect() const
void QGstreamerVideoWindow::setDisplayRect(const QRect &rect) void QGstreamerVideoWindow::setDisplayRect(const QRect &rect)
{ {
m_displayRect = rect; m_displayRect = rect;
#if GST_CHECK_VERSION(1,0,0)
if (m_videoSink && GST_IS_VIDEO_OVERLAY(m_videoSink)) {
if (m_displayRect.isEmpty())
gst_video_overlay_set_render_rectangle(GST_VIDEO_OVERLAY(m_videoSink), -1, -1, -1, -1);
else
gst_video_overlay_set_render_rectangle(GST_VIDEO_OVERLAY(m_videoSink),
m_displayRect.x(),
m_displayRect.y(),
m_displayRect.width(),
m_displayRect.height());
repaint();
}
#else
if (m_videoSink && GST_IS_X_OVERLAY(m_videoSink)) { if (m_videoSink && GST_IS_X_OVERLAY(m_videoSink)) {
#if GST_VERSION_MICRO >= 29 #if GST_VERSION_MICRO >= 29
if (m_displayRect.isEmpty()) if (m_displayRect.isEmpty())
@@ -136,6 +171,7 @@ void QGstreamerVideoWindow::setDisplayRect(const QRect &rect)
repaint(); repaint();
#endif #endif
} }
#endif
} }
Qt::AspectRatioMode QGstreamerVideoWindow::aspectRatioMode() const Qt::AspectRatioMode QGstreamerVideoWindow::aspectRatioMode() const
@@ -157,6 +193,16 @@ void QGstreamerVideoWindow::setAspectRatioMode(Qt::AspectRatioMode mode)
void QGstreamerVideoWindow::repaint() void QGstreamerVideoWindow::repaint()
{ {
#if GST_CHECK_VERSION(1,0,0)
if (m_videoSink && GST_IS_VIDEO_OVERLAY(m_videoSink)) {
//don't call gst_x_overlay_expose if the sink is in null state
GstState state = GST_STATE_NULL;
GstStateChangeReturn res = gst_element_get_state(m_videoSink, &state, NULL, 1000000);
if (res != GST_STATE_CHANGE_FAILURE && state != GST_STATE_NULL) {
gst_video_overlay_expose(GST_VIDEO_OVERLAY(m_videoSink));
}
}
#else
if (m_videoSink && GST_IS_X_OVERLAY(m_videoSink)) { if (m_videoSink && GST_IS_X_OVERLAY(m_videoSink)) {
//don't call gst_x_overlay_expose if the sink is in null state //don't call gst_x_overlay_expose if the sink is in null state
GstState state = GST_STATE_NULL; GstState state = GST_STATE_NULL;
@@ -165,6 +211,7 @@ void QGstreamerVideoWindow::repaint()
gst_x_overlay_expose(GST_X_OVERLAY(m_videoSink)); gst_x_overlay_expose(GST_X_OVERLAY(m_videoSink));
} }
} }
#endif
} }
QColor QGstreamerVideoWindow::colorKey() const QColor QGstreamerVideoWindow::colorKey() const
@@ -296,32 +343,22 @@ QSize QGstreamerVideoWindow::nativeSize() const
return m_nativeSize; return m_nativeSize;
} }
void QGstreamerVideoWindow::padBufferProbe(GstPad *pad, GstBuffer * /* buffer */, gpointer user_data) void QGstreamerVideoWindow::probeCaps(GstCaps *caps)
{ {
QGstreamerVideoWindow *control = reinterpret_cast<QGstreamerVideoWindow*>(user_data); QSize resolution = QGstUtils::capsCorrectedResolution(caps);
QMetaObject::invokeMethod(control, "updateNativeVideoSize", Qt::QueuedConnection); QMetaObject::invokeMethod(
gst_pad_remove_buffer_probe(pad, control->m_bufferProbeId); this,
"updateNativeVideoSize",
Qt::QueuedConnection,
Q_ARG(QSize, resolution));
} }
void QGstreamerVideoWindow::updateNativeVideoSize() void QGstreamerVideoWindow::updateNativeVideoSize(const QSize &size)
{ {
const QSize oldSize = m_nativeSize; if (m_nativeSize != size) {
m_nativeSize = QSize(); m_nativeSize = size;
if (m_videoSink) {
//find video native size to update video widget size hint
GstPad *pad = gst_element_get_static_pad(m_videoSink,"sink");
GstCaps *caps = gst_pad_get_negotiated_caps(pad);
gst_object_unref(GST_OBJECT(pad));
if (caps) {
m_nativeSize = QGstUtils::capsCorrectedResolution(caps);
gst_caps_unref(caps);
}
}
if (m_nativeSize != oldSize)
emit nativeSizeChanged(); emit nativeSizeChanged();
}
} }
GstElement *QGstreamerVideoWindow::videoSink() GstElement *QGstreamerVideoWindow::videoSink()

View File

@@ -40,7 +40,14 @@
#include <QtCore/qsize.h> #include <QtCore/qsize.h>
#include <QtCore/qset.h> #include <QtCore/qset.h>
#include <QtCore/qstringlist.h> #include <QtCore/qstringlist.h>
#include <QtGui/qimage.h>
#include <qaudioformat.h> #include <qaudioformat.h>
#include <QtMultimedia/qvideosurfaceformat.h>
#include <gst/audio/audio.h>
#include <gst/video/video.h>
template<typename T, int N> static int lengthOf(const T (&)[N]) { return N; }
#ifdef USE_V4L #ifdef USE_V4L
# include <private/qcore_unix_p.h> # include <private/qcore_unix_p.h>
@@ -82,15 +89,24 @@ static void addTagToMap(const GstTagList *list,
map->insert(QByteArray(tag), g_value_get_boolean(&val)); map->insert(QByteArray(tag), g_value_get_boolean(&val));
break; break;
case G_TYPE_CHAR: case G_TYPE_CHAR:
#if GLIB_CHECK_VERSION(2,32,0)
map->insert(QByteArray(tag), g_value_get_schar(&val));
#else
map->insert(QByteArray(tag), g_value_get_char(&val)); map->insert(QByteArray(tag), g_value_get_char(&val));
#endif
break; break;
case G_TYPE_DOUBLE: case G_TYPE_DOUBLE:
map->insert(QByteArray(tag), g_value_get_double(&val)); map->insert(QByteArray(tag), g_value_get_double(&val));
break; break;
default: default:
// GST_TYPE_DATE is a function, not a constant, so pull it out of the switch // GST_TYPE_DATE is a function, not a constant, so pull it out of the switch
#if GST_CHECK_VERSION(1,0,0)
if (G_VALUE_TYPE(&val) == G_TYPE_DATE) {
const GDate *date = (const GDate *)g_value_get_boxed(&val);
#else
if (G_VALUE_TYPE(&val) == GST_TYPE_DATE) { if (G_VALUE_TYPE(&val) == GST_TYPE_DATE) {
const GDate *date = gst_value_get_date(&val); const GDate *date = gst_value_get_date(&val);
#endif
if (g_date_valid(date)) { if (g_date_valid(date)) {
int year = g_date_get_year(date); int year = g_date_get_year(date);
int month = g_date_get_month(date); int month = g_date_get_month(date);
@@ -169,6 +185,42 @@ QSize QGstUtils::capsCorrectedResolution(const GstCaps *caps)
return size; return size;
} }
#if GST_CHECK_VERSION(1,0,0)
namespace {
struct AudioFormat
{
GstAudioFormat format;
QAudioFormat::SampleType sampleType;
QAudioFormat::Endian byteOrder;
int sampleSize;
};
static const AudioFormat qt_audioLookup[] =
{
{ GST_AUDIO_FORMAT_S8 , QAudioFormat::SignedInt , QAudioFormat::LittleEndian, 8 },
{ GST_AUDIO_FORMAT_U8 , QAudioFormat::UnSignedInt, QAudioFormat::LittleEndian, 8 },
{ GST_AUDIO_FORMAT_S16LE, QAudioFormat::SignedInt , QAudioFormat::LittleEndian, 16 },
{ GST_AUDIO_FORMAT_S16BE, QAudioFormat::SignedInt , QAudioFormat::BigEndian , 16 },
{ GST_AUDIO_FORMAT_U16LE, QAudioFormat::UnSignedInt, QAudioFormat::LittleEndian, 16 },
{ GST_AUDIO_FORMAT_U16BE, QAudioFormat::UnSignedInt, QAudioFormat::BigEndian , 16 },
{ GST_AUDIO_FORMAT_S32LE, QAudioFormat::SignedInt , QAudioFormat::LittleEndian, 32 },
{ GST_AUDIO_FORMAT_S32BE, QAudioFormat::SignedInt , QAudioFormat::BigEndian , 32 },
{ GST_AUDIO_FORMAT_U32LE, QAudioFormat::UnSignedInt, QAudioFormat::LittleEndian, 32 },
{ GST_AUDIO_FORMAT_U32BE, QAudioFormat::UnSignedInt, QAudioFormat::BigEndian , 32 },
{ GST_AUDIO_FORMAT_S24LE, QAudioFormat::SignedInt , QAudioFormat::LittleEndian, 24 },
{ GST_AUDIO_FORMAT_S24BE, QAudioFormat::SignedInt , QAudioFormat::BigEndian , 24 },
{ GST_AUDIO_FORMAT_U24LE, QAudioFormat::UnSignedInt, QAudioFormat::LittleEndian, 24 },
{ GST_AUDIO_FORMAT_U24BE, QAudioFormat::UnSignedInt, QAudioFormat::BigEndian , 24 },
{ GST_AUDIO_FORMAT_F32LE, QAudioFormat::Float , QAudioFormat::LittleEndian, 32 },
{ GST_AUDIO_FORMAT_F32BE, QAudioFormat::Float , QAudioFormat::BigEndian , 32 },
{ GST_AUDIO_FORMAT_F64LE, QAudioFormat::Float , QAudioFormat::LittleEndian, 64 },
{ GST_AUDIO_FORMAT_F64BE, QAudioFormat::Float , QAudioFormat::BigEndian , 64 }
};
}
#endif
/*! /*!
Returns audio format for caps. Returns audio format for caps.
If caps doesn't have a valid audio format, an empty QAudioFormat is returned. If caps doesn't have a valid audio format, an empty QAudioFormat is returned.
@@ -176,9 +228,26 @@ QSize QGstUtils::capsCorrectedResolution(const GstCaps *caps)
QAudioFormat QGstUtils::audioFormatForCaps(const GstCaps *caps) QAudioFormat QGstUtils::audioFormatForCaps(const GstCaps *caps)
{ {
const GstStructure *structure = gst_caps_get_structure(caps, 0);
QAudioFormat format; QAudioFormat format;
#if GST_CHECK_VERSION(1,0,0)
GstAudioInfo info;
if (gst_audio_info_from_caps(&info, caps)) {
for (int i = 0; i < lengthOf(qt_audioLookup); ++i) {
if (qt_audioLookup[i].format != info.finfo->format)
continue;
format.setSampleType(qt_audioLookup[i].sampleType);
format.setByteOrder(qt_audioLookup[i].byteOrder);
format.setSampleSize(qt_audioLookup[i].sampleSize);
format.setSampleRate(info.rate);
format.setChannelCount(info.channels);
format.setCodec(QStringLiteral("audio/pcm"));
return format;
}
}
#else
const GstStructure *structure = gst_caps_get_structure(caps, 0);
if (qstrcmp(gst_structure_get_name(structure), "audio/x-raw-int") == 0) { if (qstrcmp(gst_structure_get_name(structure), "audio/x-raw-int") == 0) {
@@ -249,16 +318,28 @@ QAudioFormat QGstUtils::audioFormatForCaps(const GstCaps *caps)
} else { } else {
return QAudioFormat(); return QAudioFormat();
} }
#endif
return format; return format;
} }
#if GST_CHECK_VERSION(1,0,0)
/*!
Returns audio format for a sample.
If the buffer doesn't have a valid audio format, an empty QAudioFormat is returned.
*/
QAudioFormat QGstUtils::audioFormatForSample(GstSample *sample)
{
GstCaps* caps = gst_sample_get_caps(sample);
if (!caps)
return QAudioFormat();
return QGstUtils::audioFormatForCaps(caps);
}
#else
/*! /*!
Returns audio format for a buffer. Returns audio format for a buffer.
If the buffer doesn't have a valid audio format, an empty QAudioFormat is returned. If the buffer doesn't have a valid audio format, an empty QAudioFormat is returned.
*/ */
QAudioFormat QGstUtils::audioFormatForBuffer(GstBuffer *buffer) QAudioFormat QGstUtils::audioFormatForBuffer(GstBuffer *buffer)
{ {
GstCaps* caps = gst_buffer_get_caps(buffer); GstCaps* caps = gst_buffer_get_caps(buffer);
@@ -269,7 +350,7 @@ QAudioFormat QGstUtils::audioFormatForBuffer(GstBuffer *buffer)
gst_caps_unref(caps); gst_caps_unref(caps);
return format; return format;
} }
#endif
/*! /*!
Builds GstCaps for an audio format. Builds GstCaps for an audio format.
@@ -277,8 +358,32 @@ QAudioFormat QGstUtils::audioFormatForBuffer(GstBuffer *buffer)
Caller must unref GstCaps. Caller must unref GstCaps.
*/ */
GstCaps *QGstUtils::capsForAudioFormat(QAudioFormat format) GstCaps *QGstUtils::capsForAudioFormat(const QAudioFormat &format)
{ {
if (!format.isValid())
return 0;
#if GST_CHECK_VERSION(1,0,0)
const QAudioFormat::SampleType sampleType = format.sampleType();
const QAudioFormat::Endian byteOrder = format.byteOrder();
const int sampleSize = format.sampleSize();
for (int i = 0; i < lengthOf(qt_audioLookup); ++i) {
if (qt_audioLookup[i].sampleType != sampleType
|| qt_audioLookup[i].byteOrder != byteOrder
|| qt_audioLookup[i].sampleSize != sampleSize) {
continue;
}
return gst_caps_new_simple(
"audio/x-raw",
"format" , G_TYPE_STRING, gst_audio_format_to_string(qt_audioLookup[i].format),
"rate" , G_TYPE_INT , format.sampleRate(),
"channels", G_TYPE_INT , format.channelCount(),
NULL);
}
return 0;
#else
GstStructure *structure = 0; GstStructure *structure = 0;
if (format.isValid()) { if (format.isValid()) {
@@ -313,6 +418,7 @@ GstCaps *QGstUtils::capsForAudioFormat(QAudioFormat format)
} }
return caps; return caps;
#endif
} }
void QGstUtils::initializeGst() void QGstUtils::initializeGst()
@@ -576,10 +682,629 @@ QByteArray QGstUtils::cameraDriver(const QString &device, GstElementFactory *fac
return QByteArray(); return QByteArray();
} }
QSet<QString> QGstUtils::supportedMimeTypes(bool (*isValidFactory)(GstElementFactory *factory))
{
QSet<QString> supportedMimeTypes;
//enumerate supported mime types
gst_init(NULL, NULL);
#if GST_CHECK_VERSION(1,0,0)
GstRegistry *registry = gst_registry_get();
GList *orig_plugins = gst_registry_get_plugin_list(registry);
#else
GstRegistry *registry = gst_registry_get_default();
GList *orig_plugins = gst_default_registry_get_plugin_list ();
#endif
for (GList *plugins = orig_plugins; plugins; plugins = g_list_next(plugins)) {
GstPlugin *plugin = (GstPlugin *) (plugins->data);
#if GST_CHECK_VERSION(1,0,0)
if (GST_OBJECT_FLAG_IS_SET(GST_OBJECT(plugin), GST_PLUGIN_FLAG_BLACKLISTED))
continue;
#else
if (plugin->flags & (1<<1)) //GST_PLUGIN_FLAG_BLACKLISTED
continue;
#endif
GList *orig_features = gst_registry_get_feature_list_by_plugin(
registry, gst_plugin_get_name(plugin));
for (GList *features = orig_features; features; features = g_list_next(features)) {
if (G_UNLIKELY(features->data == NULL))
continue;
GstPluginFeature *feature = GST_PLUGIN_FEATURE(features->data);
GstElementFactory *factory;
if (GST_IS_TYPE_FIND_FACTORY(feature)) {
QString name(gst_plugin_feature_get_name(feature));
if (name.contains('/')) //filter out any string without '/' which is obviously not a mime type
supportedMimeTypes.insert(name.toLower());
continue;
} else if (!GST_IS_ELEMENT_FACTORY (feature)
|| !(factory = GST_ELEMENT_FACTORY(gst_plugin_feature_load(feature)))) {
continue;
} else if (!isValidFactory(factory)) {
// Do nothing
} else for (const GList *pads = gst_element_factory_get_static_pad_templates(factory);
pads;
pads = g_list_next(pads)) {
GstStaticPadTemplate *padtemplate = static_cast<GstStaticPadTemplate *>(pads->data);
if (padtemplate->direction == GST_PAD_SINK && padtemplate->static_caps.string) {
GstCaps *caps = gst_static_caps_get(&padtemplate->static_caps);
if (gst_caps_is_any(caps) || gst_caps_is_empty(caps)) {
} else for (guint i = 0; i < gst_caps_get_size(caps); i++) {
GstStructure *structure = gst_caps_get_structure(caps, i);
QString nameLowcase = QString(gst_structure_get_name(structure)).toLower();
supportedMimeTypes.insert(nameLowcase);
if (nameLowcase.contains("mpeg")) {
//Because mpeg version number is only included in the detail
//description, it is necessary to manually extract this information
//in order to match the mime type of mpeg4.
const GValue *value = gst_structure_get_value(structure, "mpegversion");
if (value) {
gchar *str = gst_value_serialize(value);
QString versions(str);
QStringList elements = versions.split(QRegExp("\\D+"), QString::SkipEmptyParts);
foreach (const QString &e, elements)
supportedMimeTypes.insert(nameLowcase + e);
g_free(str);
}
}
}
}
}
gst_object_unref(factory);
}
gst_plugin_feature_list_free(orig_features);
}
gst_plugin_list_free (orig_plugins);
#if defined QT_SUPPORTEDMIMETYPES_DEBUG
QStringList list = supportedMimeTypes.toList();
list.sort();
if (qgetenv("QT_DEBUG_PLUGINS").toInt() > 0) {
foreach (const QString &type, list)
qDebug() << type;
}
#endif
return supportedMimeTypes;
}
namespace {
struct ColorFormat { QImage::Format imageFormat; GstVideoFormat gstFormat; };
static const ColorFormat qt_colorLookup[] =
{
{ QImage::Format_RGBX8888, GST_VIDEO_FORMAT_RGBx },
{ QImage::Format_RGBA8888, GST_VIDEO_FORMAT_RGBA },
{ QImage::Format_RGB888 , GST_VIDEO_FORMAT_RGB },
{ QImage::Format_RGB16 , GST_VIDEO_FORMAT_RGB16 }
};
}
#if GST_CHECK_VERSION(1,0,0)
QImage QGstUtils::bufferToImage(GstBuffer *buffer, const GstVideoInfo &videoInfo)
#else
QImage QGstUtils::bufferToImage(GstBuffer *buffer)
#endif
{
QImage img;
#if GST_CHECK_VERSION(1,0,0)
GstVideoInfo info = videoInfo;
GstVideoFrame frame;
if (!gst_video_frame_map(&frame, &info, buffer, GST_MAP_READ))
return img;
#else
GstCaps *caps = gst_buffer_get_caps(buffer);
if (!caps)
return img;
GstStructure *structure = gst_caps_get_structure (caps, 0);
gint width = 0;
gint height = 0;
if (!structure
|| !gst_structure_get_int(structure, "width", &width)
|| !gst_structure_get_int(structure, "height", &height)
|| width <= 0
|| height <= 0) {
gst_caps_unref(caps);
return img;
}
gst_caps_unref(caps);
#endif
#if GST_CHECK_VERSION(1,0,0)
if (videoInfo.finfo->format == GST_VIDEO_FORMAT_I420) {
const int width = videoInfo.width;
const int height = videoInfo.height;
const int stride[] = { frame.info.stride[0], frame.info.stride[1], frame.info.stride[2] };
const uchar *data[] = {
static_cast<const uchar *>(frame.data[0]),
static_cast<const uchar *>(frame.data[1]),
static_cast<const uchar *>(frame.data[2])
};
#else
if (qstrcmp(gst_structure_get_name(structure), "video/x-raw-yuv") == 0) {
const int stride[] = { width, width / 2, width / 2 };
const uchar *data[] = {
(const uchar *)buffer->data,
(const uchar *)buffer->data + width * height,
(const uchar *)buffer->data + width * height * 5 / 4
};
#endif
img = QImage(width/2, height/2, QImage::Format_RGB32);
for (int y=0; y<height; y+=2) {
const uchar *yLine = data[0] + (y * stride[0]);
const uchar *uLine = data[1] + (y * stride[1] / 2);
const uchar *vLine = data[2] + (y * stride[2] / 2);
for (int x=0; x<width; x+=2) {
const qreal Y = 1.164*(yLine[x]-16);
const int U = uLine[x/2]-128;
const int V = vLine[x/2]-128;
int b = qBound(0, int(Y + 2.018*U), 255);
int g = qBound(0, int(Y - 0.813*V - 0.391*U), 255);
int r = qBound(0, int(Y + 1.596*V), 255);
img.setPixel(x/2,y/2,qRgb(r,g,b));
}
}
#if GST_CHECK_VERSION(1,0,0)
} else for (int i = 0; i < lengthOf(qt_colorLookup); ++i) {
if (qt_colorLookup[i].gstFormat != videoInfo.finfo->format)
continue;
const QImage image(
static_cast<const uchar *>(frame.data[0]),
videoInfo.width,
videoInfo.height,
frame.info.stride[0],
qt_colorLookup[i].imageFormat);
img = image;
img.detach();
break;
}
gst_video_frame_unmap(&frame);
#else
} else if (qstrcmp(gst_structure_get_name(structure), "video/x-raw-rgb") == 0) {
QImage::Format format = QImage::Format_Invalid;
int bpp = 0;
gst_structure_get_int(structure, "bpp", &bpp);
if (bpp == 24)
format = QImage::Format_RGB888;
else if (bpp == 32)
format = QImage::Format_RGB32;
if (format != QImage::Format_Invalid) {
img = QImage((const uchar *)buffer->data,
width,
height,
format);
img.bits(); //detach
}
}
#endif
return img;
}
namespace {
#if GST_CHECK_VERSION(1,0,0)
struct VideoFormat
{
QVideoFrame::PixelFormat pixelFormat;
GstVideoFormat gstFormat;
};
static const VideoFormat qt_videoFormatLookup[] =
{
{ QVideoFrame::Format_YUV420P, GST_VIDEO_FORMAT_I420 },
{ QVideoFrame::Format_YV12 , GST_VIDEO_FORMAT_YV12 },
{ QVideoFrame::Format_UYVY , GST_VIDEO_FORMAT_UYVY },
{ QVideoFrame::Format_YUYV , GST_VIDEO_FORMAT_YUY2 },
{ QVideoFrame::Format_NV12 , GST_VIDEO_FORMAT_NV12 },
{ QVideoFrame::Format_NV21 , GST_VIDEO_FORMAT_NV21 },
{ QVideoFrame::Format_AYUV444, GST_VIDEO_FORMAT_AYUV },
#if Q_BYTE_ORDER == Q_LITTLE_ENDIAN
{ QVideoFrame::Format_RGB32 , GST_VIDEO_FORMAT_BGRx },
{ QVideoFrame::Format_BGR32 , GST_VIDEO_FORMAT_RGBx },
{ QVideoFrame::Format_ARGB32, GST_VIDEO_FORMAT_BGRA },
{ QVideoFrame::Format_BGRA32, GST_VIDEO_FORMAT_ARGB },
#else
{ QVideoFrame::Format_RGB32 , GST_VIDEO_FORMAT_xRGB },
{ QVideoFrame::Format_BGR32 , GST_VIDEO_FORMAT_xBGR },
{ QVideoFrame::Format_ARGB32, GST_VIDEO_FORMAT_ARGB },
{ QVideoFrame::Format_BGRA32, GST_VIDEO_FORMAT_BGRA },
#endif
{ QVideoFrame::Format_RGB24 , GST_VIDEO_FORMAT_RGB },
{ QVideoFrame::Format_BGR24 , GST_VIDEO_FORMAT_BGR },
{ QVideoFrame::Format_RGB565, GST_VIDEO_FORMAT_RGB16 }
};
static int indexOfVideoFormat(QVideoFrame::PixelFormat format)
{
for (int i = 0; i < lengthOf(qt_videoFormatLookup); ++i)
if (qt_videoFormatLookup[i].pixelFormat == format)
return i;
return -1;
}
static int indexOfVideoFormat(GstVideoFormat format)
{
for (int i = 0; i < lengthOf(qt_videoFormatLookup); ++i)
if (qt_videoFormatLookup[i].gstFormat == format)
return i;
return -1;
}
#else
struct YuvFormat
{
QVideoFrame::PixelFormat pixelFormat;
guint32 fourcc;
int bitsPerPixel;
};
static const YuvFormat qt_yuvColorLookup[] =
{
{ QVideoFrame::Format_YUV420P, GST_MAKE_FOURCC('I','4','2','0'), 8 },
{ QVideoFrame::Format_YV12, GST_MAKE_FOURCC('Y','V','1','2'), 8 },
{ QVideoFrame::Format_UYVY, GST_MAKE_FOURCC('U','Y','V','Y'), 16 },
{ QVideoFrame::Format_YUYV, GST_MAKE_FOURCC('Y','U','Y','2'), 16 },
{ QVideoFrame::Format_NV12, GST_MAKE_FOURCC('N','V','1','2'), 8 },
{ QVideoFrame::Format_NV21, GST_MAKE_FOURCC('N','V','2','1'), 8 },
{ QVideoFrame::Format_AYUV444, GST_MAKE_FOURCC('A','Y','U','V'), 32 }
};
static int indexOfYuvColor(QVideoFrame::PixelFormat format)
{
const int count = sizeof(qt_yuvColorLookup) / sizeof(YuvFormat);
for (int i = 0; i < count; ++i)
if (qt_yuvColorLookup[i].pixelFormat == format)
return i;
return -1;
}
static int indexOfYuvColor(guint32 fourcc)
{
const int count = sizeof(qt_yuvColorLookup) / sizeof(YuvFormat);
for (int i = 0; i < count; ++i)
if (qt_yuvColorLookup[i].fourcc == fourcc)
return i;
return -1;
}
struct RgbFormat
{
QVideoFrame::PixelFormat pixelFormat;
int bitsPerPixel;
int depth;
int endianness;
int red;
int green;
int blue;
int alpha;
};
static const RgbFormat qt_rgbColorLookup[] =
{
{ QVideoFrame::Format_RGB32 , 32, 24, 4321, 0x0000FF00, 0x00FF0000, int(0xFF000000), 0x00000000 },
{ QVideoFrame::Format_RGB32 , 32, 24, 1234, 0x00FF0000, 0x0000FF00, 0x000000FF, 0x00000000 },
{ QVideoFrame::Format_BGR32 , 32, 24, 4321, int(0xFF000000), 0x00FF0000, 0x0000FF00, 0x00000000 },
{ QVideoFrame::Format_BGR32 , 32, 24, 1234, 0x000000FF, 0x0000FF00, 0x00FF0000, 0x00000000 },
{ QVideoFrame::Format_ARGB32, 32, 24, 4321, 0x0000FF00, 0x00FF0000, int(0xFF000000), 0x000000FF },
{ QVideoFrame::Format_ARGB32, 32, 24, 1234, 0x00FF0000, 0x0000FF00, 0x000000FF, int(0xFF000000) },
{ QVideoFrame::Format_RGB24 , 24, 24, 4321, 0x00FF0000, 0x0000FF00, 0x000000FF, 0x00000000 },
{ QVideoFrame::Format_BGR24 , 24, 24, 4321, 0x000000FF, 0x0000FF00, 0x00FF0000, 0x00000000 },
{ QVideoFrame::Format_RGB565, 16, 16, 1234, 0x0000F800, 0x000007E0, 0x0000001F, 0x00000000 }
};
static int indexOfRgbColor(
int bits, int depth, int endianness, int red, int green, int blue, int alpha)
{
const int count = sizeof(qt_rgbColorLookup) / sizeof(RgbFormat);
for (int i = 0; i < count; ++i) {
if (qt_rgbColorLookup[i].bitsPerPixel == bits
&& qt_rgbColorLookup[i].depth == depth
&& qt_rgbColorLookup[i].endianness == endianness
&& qt_rgbColorLookup[i].red == red
&& qt_rgbColorLookup[i].green == green
&& qt_rgbColorLookup[i].blue == blue
&& qt_rgbColorLookup[i].alpha == alpha) {
return i;
}
}
return -1;
}
#endif
}
#if GST_CHECK_VERSION(1,0,0)
QVideoSurfaceFormat QGstUtils::formatForCaps(
GstCaps *caps, GstVideoInfo *info, QAbstractVideoBuffer::HandleType handleType)
{
if (gst_video_info_from_caps(info, caps)) {
int index = indexOfVideoFormat(info->finfo->format);
if (index != -1) {
QVideoSurfaceFormat format(
QSize(info->width, info->height),
qt_videoFormatLookup[index].pixelFormat,
handleType);
if (info->fps_d > 0)
format.setFrameRate(qreal(info->fps_d) / info->fps_n);
if (info->par_d > 0)
format.setPixelAspectRatio(info->par_n, info->par_d);
return format;
}
}
return QVideoSurfaceFormat();
}
#else
QVideoSurfaceFormat QGstUtils::formatForCaps(
GstCaps *caps, int *bytesPerLine, QAbstractVideoBuffer::HandleType handleType)
{
const GstStructure *structure = gst_caps_get_structure(caps, 0);
QVideoFrame::PixelFormat pixelFormat = QVideoFrame::Format_Invalid;
int bitsPerPixel = 0;
QSize size;
gst_structure_get_int(structure, "width", &size.rwidth());
gst_structure_get_int(structure, "height", &size.rheight());
if (qstrcmp(gst_structure_get_name(structure), "video/x-raw-yuv") == 0) {
guint32 fourcc = 0;
gst_structure_get_fourcc(structure, "format", &fourcc);
int index = indexOfYuvColor(fourcc);
if (index != -1) {
pixelFormat = qt_yuvColorLookup[index].pixelFormat;
bitsPerPixel = qt_yuvColorLookup[index].bitsPerPixel;
}
} else if (qstrcmp(gst_structure_get_name(structure), "video/x-raw-rgb") == 0) {
int depth = 0;
int endianness = 0;
int red = 0;
int green = 0;
int blue = 0;
int alpha = 0;
gst_structure_get_int(structure, "bpp", &bitsPerPixel);
gst_structure_get_int(structure, "depth", &depth);
gst_structure_get_int(structure, "endianness", &endianness);
gst_structure_get_int(structure, "red_mask", &red);
gst_structure_get_int(structure, "green_mask", &green);
gst_structure_get_int(structure, "blue_mask", &blue);
gst_structure_get_int(structure, "alpha_mask", &alpha);
int index = indexOfRgbColor(bitsPerPixel, depth, endianness, red, green, blue, alpha);
if (index != -1)
pixelFormat = qt_rgbColorLookup[index].pixelFormat;
}
if (pixelFormat != QVideoFrame::Format_Invalid) {
QVideoSurfaceFormat format(size, pixelFormat, handleType);
QPair<int, int> rate;
gst_structure_get_fraction(structure, "framerate", &rate.first, &rate.second);
if (rate.second)
format.setFrameRate(qreal(rate.first)/rate.second);
gint aspectNum = 0;
gint aspectDenum = 0;
if (gst_structure_get_fraction(
structure, "pixel-aspect-ratio", &aspectNum, &aspectDenum)) {
if (aspectDenum > 0)
format.setPixelAspectRatio(aspectNum, aspectDenum);
}
if (bytesPerLine)
*bytesPerLine = ((size.width() * bitsPerPixel / 8) + 3) & ~3;
return format;
}
return QVideoSurfaceFormat();
}
#endif
GstCaps *QGstUtils::capsForFormats(const QList<QVideoFrame::PixelFormat> &formats)
{
GstCaps *caps = gst_caps_new_empty();
#if GST_CHECK_VERSION(1,0,0)
foreach (QVideoFrame::PixelFormat format, formats) {
int index = indexOfVideoFormat(format);
if (index != -1) {
gst_caps_append_structure(caps, gst_structure_new(
"video/x-raw",
"format" , G_TYPE_STRING, gst_video_format_to_string(qt_videoFormatLookup[index].gstFormat),
NULL));
}
}
#else
foreach (QVideoFrame::PixelFormat format, formats) {
int index = indexOfYuvColor(format);
if (index != -1) {
gst_caps_append_structure(caps, gst_structure_new(
"video/x-raw-yuv",
"format", GST_TYPE_FOURCC, qt_yuvColorLookup[index].fourcc,
NULL));
continue;
}
const int count = sizeof(qt_rgbColorLookup) / sizeof(RgbFormat);
for (int i = 0; i < count; ++i) {
if (qt_rgbColorLookup[i].pixelFormat == format) {
GstStructure *structure = gst_structure_new(
"video/x-raw-rgb",
"bpp" , G_TYPE_INT, qt_rgbColorLookup[i].bitsPerPixel,
"depth" , G_TYPE_INT, qt_rgbColorLookup[i].depth,
"endianness", G_TYPE_INT, qt_rgbColorLookup[i].endianness,
"red_mask" , G_TYPE_INT, qt_rgbColorLookup[i].red,
"green_mask", G_TYPE_INT, qt_rgbColorLookup[i].green,
"blue_mask" , G_TYPE_INT, qt_rgbColorLookup[i].blue,
NULL);
if (qt_rgbColorLookup[i].alpha != 0) {
gst_structure_set(
structure, "alpha_mask", G_TYPE_INT, qt_rgbColorLookup[i].alpha, NULL);
}
gst_caps_append_structure(caps, structure);
}
}
}
#endif
gst_caps_set_simple(
caps,
"framerate", GST_TYPE_FRACTION_RANGE, 0, 1, INT_MAX, 1,
"width" , GST_TYPE_INT_RANGE, 1, INT_MAX,
"height" , GST_TYPE_INT_RANGE, 1, INT_MAX,
NULL);
return caps;
}
void QGstUtils::setFrameTimeStamps(QVideoFrame *frame, GstBuffer *buffer)
{
// GStreamer uses nanoseconds, Qt uses microseconds
qint64 startTime = GST_BUFFER_TIMESTAMP(buffer);
if (startTime >= 0) {
frame->setStartTime(startTime/G_GINT64_CONSTANT (1000));
qint64 duration = GST_BUFFER_DURATION(buffer);
if (duration >= 0)
frame->setEndTime((startTime + duration)/G_GINT64_CONSTANT (1000));
}
}
void QGstUtils::setMetaData(GstElement *element, const QMap<QByteArray, QVariant> &data)
{
if (!GST_IS_TAG_SETTER(element))
return;
gst_tag_setter_reset_tags(GST_TAG_SETTER(element));
QMapIterator<QByteArray, QVariant> it(data);
while (it.hasNext()) {
it.next();
const QString tagName = it.key();
const QVariant tagValue = it.value();
switch (tagValue.type()) {
case QVariant::String:
gst_tag_setter_add_tags(GST_TAG_SETTER(element),
GST_TAG_MERGE_REPLACE,
tagName.toUtf8().constData(),
tagValue.toString().toUtf8().constData(),
NULL);
break;
case QVariant::Int:
case QVariant::LongLong:
gst_tag_setter_add_tags(GST_TAG_SETTER(element),
GST_TAG_MERGE_REPLACE,
tagName.toUtf8().constData(),
tagValue.toInt(),
NULL);
break;
case QVariant::Double:
gst_tag_setter_add_tags(GST_TAG_SETTER(element),
GST_TAG_MERGE_REPLACE,
tagName.toUtf8().constData(),
tagValue.toDouble(),
NULL);
break;
case QVariant::DateTime: {
QDateTime date = tagValue.toDateTime().toLocalTime();
gst_tag_setter_add_tags(GST_TAG_SETTER(element),
GST_TAG_MERGE_REPLACE,
tagName.toUtf8().constData(),
gst_date_time_new_local_time(
date.date().year(), date.date().month(), date.date().day(),
date.time().hour(), date.time().minute(), date.time().second()),
NULL);
break;
}
default:
break;
}
}
}
void QGstUtils::setMetaData(GstBin *bin, const QMap<QByteArray, QVariant> &data)
{
GstIterator *elements = gst_bin_iterate_all_by_interface(bin, GST_TYPE_TAG_SETTER);
#if GST_CHECK_VERSION(1,0,0)
GValue item = G_VALUE_INIT;
while (gst_iterator_next(elements, &item) == GST_ITERATOR_OK) {
GstElement * const element = GST_ELEMENT(g_value_get_object(&item));
#else
GstElement *element = 0;
while (gst_iterator_next(elements, (void**)&element) == GST_ITERATOR_OK) {
#endif
setMetaData(element, data);
}
gst_iterator_free(elements);
}
GstCaps *QGstUtils::videoFilterCaps()
{
static GstStaticCaps staticCaps = GST_STATIC_CAPS(
#if GST_CHECK_VERSION(1,2,0)
"video/x-raw(ANY);"
#elif GST_CHECK_VERSION(1,0,0)
"video/x-raw;"
#else
"video/x-raw-yuv;"
"video/x-raw-rgb;"
"video/x-raw-data;"
"video/x-android-buffer;"
#endif
"image/jpeg;"
"video/x-h264");
return gst_caps_make_writable(gst_static_caps_get(&staticCaps));
}
void qt_gst_object_ref_sink(gpointer object) void qt_gst_object_ref_sink(gpointer object)
{ {
#if (GST_VERSION_MAJOR >= 0) && (GST_VERSION_MINOR >= 10) && (GST_VERSION_MICRO >= 24) #if GST_CHECK_VERSION(0,10,24)
gst_object_ref_sink(object); gst_object_ref_sink(object);
#else #else
g_return_if_fail (GST_IS_OBJECT(object)); g_return_if_fail (GST_IS_OBJECT(object));
@@ -595,4 +1320,50 @@ void qt_gst_object_ref_sink(gpointer object)
#endif #endif
} }
GstCaps *qt_gst_pad_get_current_caps(GstPad *pad)
{
#if GST_CHECK_VERSION(1,0,0)
return gst_pad_get_current_caps(pad);
#else
return gst_pad_get_negotiated_caps(pad);
#endif
}
GstStructure *qt_gst_structure_new_empty(const char *name)
{
#if GST_CHECK_VERSION(1,0,0)
return gst_structure_new_empty(name);
#else
return gst_structure_new(name, NULL);
#endif
}
gboolean qt_gst_element_query_position(GstElement *element, GstFormat format, gint64 *cur)
{
#if GST_CHECK_VERSION(1,0,0)
return gst_element_query_position(element, format, cur);
#else
return gst_element_query_position(element, &format, cur);
#endif
}
gboolean qt_gst_element_query_duration(GstElement *element, GstFormat format, gint64 *cur)
{
#if GST_CHECK_VERSION(1,0,0)
return gst_element_query_duration(element, format, cur);
#else
return gst_element_query_duration(element, &format, cur);
#endif
}
QDebug operator <<(QDebug debug, GstCaps *caps)
{
if (caps) {
gchar *string = gst_caps_to_string(caps);
debug = debug << string;
g_free(string);
}
return debug;
}
QT_END_NAMESPACE QT_END_NAMESPACE

View File

@@ -35,21 +35,35 @@
QT_BEGIN_NAMESPACE QT_BEGIN_NAMESPACE
#if GST_CHECK_VERSION(1,0,0)
QGstVideoBuffer::QGstVideoBuffer(GstBuffer *buffer, const GstVideoInfo &info)
: QAbstractPlanarVideoBuffer(NoHandle)
, m_videoInfo(info)
#else
QGstVideoBuffer::QGstVideoBuffer(GstBuffer *buffer, int bytesPerLine) QGstVideoBuffer::QGstVideoBuffer(GstBuffer *buffer, int bytesPerLine)
: QAbstractVideoBuffer(NoHandle) : QAbstractVideoBuffer(NoHandle)
, m_buffer(buffer)
, m_bytesPerLine(bytesPerLine) , m_bytesPerLine(bytesPerLine)
#endif
, m_buffer(buffer)
, m_mode(NotMapped) , m_mode(NotMapped)
{ {
gst_buffer_ref(m_buffer); gst_buffer_ref(m_buffer);
} }
#if GST_CHECK_VERSION(1,0,0)
QGstVideoBuffer::QGstVideoBuffer(GstBuffer *buffer, const GstVideoInfo &info,
QGstVideoBuffer::HandleType handleType,
const QVariant &handle)
: QAbstractPlanarVideoBuffer(handleType)
, m_videoInfo(info)
#else
QGstVideoBuffer::QGstVideoBuffer(GstBuffer *buffer, int bytesPerLine, QGstVideoBuffer::QGstVideoBuffer(GstBuffer *buffer, int bytesPerLine,
QGstVideoBuffer::HandleType handleType, QGstVideoBuffer::HandleType handleType,
const QVariant &handle) const QVariant &handle)
: QAbstractVideoBuffer(handleType) : QAbstractVideoBuffer(handleType)
, m_buffer(buffer)
, m_bytesPerLine(bytesPerLine) , m_bytesPerLine(bytesPerLine)
#endif
, m_buffer(buffer)
, m_mode(NotMapped) , m_mode(NotMapped)
, m_handle(handle) , m_handle(handle)
{ {
@@ -58,6 +72,8 @@ QGstVideoBuffer::QGstVideoBuffer(GstBuffer *buffer, int bytesPerLine,
QGstVideoBuffer::~QGstVideoBuffer() QGstVideoBuffer::~QGstVideoBuffer()
{ {
unmap();
gst_buffer_unref(m_buffer); gst_buffer_unref(m_buffer);
} }
@@ -67,12 +83,49 @@ QAbstractVideoBuffer::MapMode QGstVideoBuffer::mapMode() const
return m_mode; return m_mode;
} }
#if GST_CHECK_VERSION(1,0,0)
int QGstVideoBuffer::map(MapMode mode, int *numBytes, int bytesPerLine[4], uchar *data[4])
{
const GstMapFlags flags = GstMapFlags(((mode & ReadOnly) ? GST_MAP_READ : 0)
| ((mode & WriteOnly) ? GST_MAP_WRITE : 0));
if (mode == NotMapped || m_mode != NotMapped) {
return 0;
} else if (m_videoInfo.finfo->n_planes == 0) { // Encoded
if (gst_buffer_map(m_buffer, &m_frame.map[0], flags)) {
if (numBytes)
*numBytes = m_frame.map[0].size;
bytesPerLine[0] = -1;
data[0] = static_cast<uchar *>(m_frame.map[0].data);
m_mode = mode;
return 1;
}
} else if (gst_video_frame_map(&m_frame, &m_videoInfo, m_buffer, flags)) {
if (numBytes)
*numBytes = m_frame.info.size;
for (guint i = 0; i < m_frame.info.finfo->n_planes; ++i) {
bytesPerLine[i] = m_frame.info.stride[i];
data[i] = static_cast<uchar *>(m_frame.data[i]);
}
m_mode = mode;
return m_frame.info.finfo->n_planes;
}
return 0;
}
#else
uchar *QGstVideoBuffer::map(MapMode mode, int *numBytes, int *bytesPerLine) uchar *QGstVideoBuffer::map(MapMode mode, int *numBytes, int *bytesPerLine)
{ {
if (mode != NotMapped && m_mode == NotMapped) { if (mode != NotMapped && m_mode == NotMapped) {
if (numBytes) if (numBytes)
*numBytes = m_buffer->size; *numBytes = m_buffer->size;
if (bytesPerLine) if (bytesPerLine)
*bytesPerLine = m_bytesPerLine; *bytesPerLine = m_bytesPerLine;
@@ -83,8 +136,19 @@ uchar *QGstVideoBuffer::map(MapMode mode, int *numBytes, int *bytesPerLine)
return 0; return 0;
} }
} }
#endif
void QGstVideoBuffer::unmap() void QGstVideoBuffer::unmap()
{ {
#if GST_CHECK_VERSION(1,0,0)
if (m_mode != NotMapped) {
if (m_videoInfo.finfo->n_planes == 0)
gst_buffer_unmap(m_buffer, &m_frame.map[0]);
else
gst_video_frame_unmap(&m_frame);
}
#endif
m_mode = NotMapped; m_mode = NotMapped;
} }

View File

@@ -0,0 +1,53 @@
/****************************************************************************
**
** Copyright (C) 2014 Jolla Ltd.
** Contact: http://www.qt-project.org/legal
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and Digia. For licensing terms and
** conditions see http://qt.digia.com/licensing. For further information
** use the contact form at http://qt.digia.com/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 as published by the Free Software
** Foundation and appearing in the file LICENSE.LGPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU Lesser General Public License version 2.1 requirements
** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Digia gives you certain additional
** rights. These rights are described in the Digia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** GNU General Public License Usage
** Alternatively, this file may be used under the terms of the GNU
** General Public License version 3.0 as published by the Free Software
** Foundation and appearing in the file LICENSE.GPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU General Public License version 3.0 requirements will be
** met: http://www.gnu.org/copyleft/gpl.html.
**
**
** $QT_END_LICENSE$
**
****************************************************************************/
#include "qgstvideorendererplugin_p.h"
QT_BEGIN_NAMESPACE
QGstVideoRendererPlugin::QGstVideoRendererPlugin(QObject *parent) :
QObject(parent)
{
}
QT_END_NAMESPACE
#include "moc_qgstvideorendererplugin_p.cpp"

View File

@@ -0,0 +1,605 @@
/****************************************************************************
**
** Copyright (C) 2014 Jolla Ltd.
** Contact: http://www.qt-project.org/legal
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and Digia. For licensing terms and
** conditions see http://qt.digia.com/licensing. For further information
** use the contact form at http://qt.digia.com/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 as published by the Free Software
** Foundation and appearing in the file LICENSE.LGPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU Lesser General Public License version 2.1 requirements
** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Digia gives you certain additional
** rights. These rights are described in the Digia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** GNU General Public License Usage
** Alternatively, this file may be used under the terms of the GNU
** General Public License version 3.0 as published by the Free Software
** Foundation and appearing in the file LICENSE.GPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU General Public License version 3.0 requirements will be
** met: http://www.gnu.org/copyleft/gpl.html.
**
**
** $QT_END_LICENSE$
**
****************************************************************************/
#include <qabstractvideosurface.h>
#include <qvideoframe.h>
#include <QDebug>
#include <QMap>
#include <QThread>
#include <QEvent>
#include <QCoreApplication>
#include <private/qmediapluginloader_p.h>
#include "qgstvideobuffer_p.h"
#include "qgstvideorenderersink_p.h"
#include <gst/video/video.h>
#include "qgstutils_p.h"
//#define DEBUG_VIDEO_SURFACE_SINK
QT_BEGIN_NAMESPACE
QGstDefaultVideoRenderer::QGstDefaultVideoRenderer()
: m_flushed(true)
{
}
QGstDefaultVideoRenderer::~QGstDefaultVideoRenderer()
{
}
GstCaps *QGstDefaultVideoRenderer::getCaps(QAbstractVideoSurface *surface)
{
return QGstUtils::capsForFormats(surface->supportedPixelFormats());
}
bool QGstDefaultVideoRenderer::start(QAbstractVideoSurface *surface, GstCaps *caps)
{
m_flushed = true;
m_format = QGstUtils::formatForCaps(caps, &m_videoInfo);
return m_format.isValid() && surface->start(m_format);
}
void QGstDefaultVideoRenderer::stop(QAbstractVideoSurface *surface)
{
m_flushed = true;
if (surface)
surface->stop();
}
bool QGstDefaultVideoRenderer::present(QAbstractVideoSurface *surface, GstBuffer *buffer)
{
m_flushed = false;
QVideoFrame frame(
new QGstVideoBuffer(buffer, m_videoInfo),
m_format.frameSize(),
m_format.pixelFormat());
QGstUtils::setFrameTimeStamps(&frame, buffer);
return surface->present(frame);
}
void QGstDefaultVideoRenderer::flush(QAbstractVideoSurface *surface)
{
if (surface && !m_flushed)
surface->present(QVideoFrame());
m_flushed = true;
}
bool QGstDefaultVideoRenderer::proposeAllocation(GstQuery *)
{
return true;
}
Q_GLOBAL_STATIC_WITH_ARGS(QMediaPluginLoader, rendererLoader,
(QGstVideoRendererInterface_iid, QLatin1String("video/gstvideorenderer"), Qt::CaseInsensitive))
QVideoSurfaceGstDelegate::QVideoSurfaceGstDelegate(QAbstractVideoSurface *surface)
: m_surface(surface)
, m_renderer(0)
, m_activeRenderer(0)
, m_surfaceCaps(0)
, m_startCaps(0)
, m_lastBuffer(0)
, m_notified(false)
, m_stop(false)
, m_render(false)
, m_flush(false)
{
foreach (QObject *instance, rendererLoader()->instances(QGstVideoRendererPluginKey)) {
QGstVideoRendererInterface* plugin = qobject_cast<QGstVideoRendererInterface*>(instance);
if (QGstVideoRenderer *renderer = plugin ? plugin->createRenderer() : 0)
m_renderers.append(renderer);
}
m_renderers.append(new QGstDefaultVideoRenderer);
updateSupportedFormats();
connect(m_surface, SIGNAL(supportedFormatsChanged()), this, SLOT(updateSupportedFormats()));
}
QVideoSurfaceGstDelegate::~QVideoSurfaceGstDelegate()
{
qDeleteAll(m_renderers);
if (m_surfaceCaps)
gst_caps_unref(m_surfaceCaps);
}
GstCaps *QVideoSurfaceGstDelegate::caps()
{
QMutexLocker locker(&m_mutex);
gst_caps_ref(m_surfaceCaps);
return m_surfaceCaps;
}
bool QVideoSurfaceGstDelegate::start(GstCaps *caps)
{
QMutexLocker locker(&m_mutex);
if (m_activeRenderer) {
m_flush = true;
m_stop = true;
}
m_render = false;
if (m_lastBuffer) {
gst_buffer_unref(m_lastBuffer);
m_lastBuffer = 0;
}
if (m_startCaps)
gst_caps_unref(m_startCaps);
m_startCaps = caps;
gst_caps_ref(m_startCaps);
/*
Waiting for start() to be invoked in the main thread may block
if gstreamer blocks the main thread until this call is finished.
This situation is rare and usually caused by setState(Null)
while pipeline is being prerolled.
The proper solution to this involves controlling gstreamer pipeline from
other thread than video surface.
Currently start() fails if wait() timed out.
*/
if (!waitForAsyncEvent(&locker, &m_setupCondition, 1000) && m_startCaps) {
qWarning() << "Failed to start video surface due to main thread blocked.";
gst_caps_unref(m_startCaps);
m_startCaps = 0;
}
return m_activeRenderer != 0;
}
void QVideoSurfaceGstDelegate::stop()
{
QMutexLocker locker(&m_mutex);
if (!m_activeRenderer)
return;
m_flush = true;
m_stop = true;
if (m_startCaps) {
gst_caps_unref(m_startCaps);
m_startCaps = 0;
}
if (m_lastBuffer) {
gst_buffer_unref(m_lastBuffer);
m_lastBuffer = 0;
}
waitForAsyncEvent(&locker, &m_setupCondition, 500);
}
bool QVideoSurfaceGstDelegate::proposeAllocation(GstQuery *query)
{
QMutexLocker locker(&m_mutex);
if (QGstVideoRenderer *pool = m_activeRenderer) {
locker.unlock();
return pool->proposeAllocation(query);
} else {
return false;
}
}
void QVideoSurfaceGstDelegate::flush()
{
QMutexLocker locker(&m_mutex);
m_flush = true;
m_render = false;
if (m_lastBuffer) {
gst_buffer_unref(m_lastBuffer);
m_lastBuffer = 0;
}
notify();
}
GstFlowReturn QVideoSurfaceGstDelegate::render(GstBuffer *buffer, bool show)
{
QMutexLocker locker(&m_mutex);
if (m_lastBuffer)
gst_buffer_unref(m_lastBuffer);
m_lastBuffer = buffer;
gst_buffer_ref(m_lastBuffer);
if (show) {
m_render = true;
return waitForAsyncEvent(&locker, &m_renderCondition, 300)
? m_renderReturn
: GST_FLOW_ERROR;
} else {
return GST_FLOW_OK;
}
}
void QVideoSurfaceGstDelegate::handleShowPrerollChange(GObject *object, GParamSpec *, gpointer d)
{
QVideoSurfaceGstDelegate * const delegate = static_cast<QVideoSurfaceGstDelegate *>(d);
gboolean showPreroll = true; // "show-preroll-frame" property is true by default
g_object_get(object, "show-preroll-frame", &showPreroll, NULL);
GstState state = GST_STATE_NULL;
GstState pendingState = GST_STATE_NULL;
gst_element_get_state(GST_ELEMENT(object), &state, &pendingState, 0);
const bool paused
= (pendingState == GST_STATE_VOID_PENDING && state == GST_STATE_PAUSED)
|| pendingState == GST_STATE_PAUSED;
if (paused) {
QMutexLocker locker(&delegate->m_mutex);
if (!showPreroll && delegate->m_lastBuffer) {
delegate->m_render = false;
delegate->m_flush = true;
delegate->notify();
} else if (delegate->m_lastBuffer) {
delegate->m_render = true;
delegate->notify();
}
}
}
bool QVideoSurfaceGstDelegate::event(QEvent *event)
{
if (event->type() == QEvent::UpdateRequest) {
QMutexLocker locker(&m_mutex);
if (m_notified) {
while (handleEvent(&locker)) {}
m_notified = false;
}
return true;
} else {
return QObject::event(event);
}
}
bool QVideoSurfaceGstDelegate::handleEvent(QMutexLocker *locker)
{
if (m_flush) {
m_flush = false;
if (m_activeRenderer) {
locker->unlock();
m_activeRenderer->flush(m_surface);
}
} else if (m_stop) {
m_stop = false;
if (QGstVideoRenderer * const activePool = m_activeRenderer) {
m_activeRenderer = 0;
locker->unlock();
activePool->stop(m_surface);
locker->relock();
}
} else if (m_startCaps) {
Q_ASSERT(!m_activeRenderer);
GstCaps * const startCaps = m_startCaps;
m_startCaps = 0;
if (m_renderer && m_surface) {
locker->unlock();
const bool started = m_renderer->start(m_surface, startCaps);
locker->relock();
m_activeRenderer = started
? m_renderer
: 0;
} else if (QGstVideoRenderer * const activePool = m_activeRenderer) {
m_activeRenderer = 0;
locker->unlock();
activePool->stop(m_surface);
locker->relock();
}
gst_caps_unref(startCaps);
} else if (m_render) {
m_render = false;
if (m_activeRenderer && m_surface && m_lastBuffer) {
GstBuffer *buffer = m_lastBuffer;
gst_buffer_ref(buffer);
locker->unlock();
const bool rendered = m_activeRenderer->present(m_surface, buffer);
gst_buffer_unref(buffer);
locker->relock();
m_renderReturn = rendered
? GST_FLOW_OK
: GST_FLOW_ERROR;
m_renderCondition.wakeAll();
} else {
m_renderReturn = GST_FLOW_ERROR;
m_renderCondition.wakeAll();
}
} else {
m_setupCondition.wakeAll();
return false;
}
return true;
}
void QVideoSurfaceGstDelegate::notify()
{
if (!m_notified) {
m_notified = true;
QCoreApplication::postEvent(this, new QEvent(QEvent::UpdateRequest));
}
}
bool QVideoSurfaceGstDelegate::waitForAsyncEvent(
QMutexLocker *locker, QWaitCondition *condition, unsigned long time)
{
if (QThread::currentThread() == thread()) {
while (handleEvent(locker)) {}
m_notified = false;
return true;
} else {
notify();
return condition->wait(&m_mutex, time);
}
}
void QVideoSurfaceGstDelegate::updateSupportedFormats()
{
if (m_surfaceCaps) {
gst_caps_unref(m_surfaceCaps);
m_surfaceCaps = 0;
}
foreach (QGstVideoRenderer *pool, m_renderers) {
if (GstCaps *caps = pool->getCaps(m_surface)) {
if (gst_caps_is_empty(caps)) {
gst_caps_unref(caps);
continue;
}
if (m_surfaceCaps)
gst_caps_unref(m_surfaceCaps);
m_renderer = pool;
m_surfaceCaps = caps;
break;
} else {
gst_caps_unref(caps);
}
}
}
static GstVideoSinkClass *sink_parent_class;
#define VO_SINK(s) QGstVideoRendererSink *sink(reinterpret_cast<QGstVideoRendererSink *>(s))
QGstVideoRendererSink *QGstVideoRendererSink::createSink(QAbstractVideoSurface *surface)
{
QGstVideoRendererSink *sink = reinterpret_cast<QGstVideoRendererSink *>(
g_object_new(QGstVideoRendererSink::get_type(), 0));
sink->delegate = new QVideoSurfaceGstDelegate(surface);
g_signal_connect(
G_OBJECT(sink),
"notify::show-preroll-frame",
G_CALLBACK(QVideoSurfaceGstDelegate::handleShowPrerollChange),
sink->delegate);
return sink;
}
GType QGstVideoRendererSink::get_type()
{
static GType type = 0;
if (type == 0) {
static const GTypeInfo info =
{
sizeof(QGstVideoRendererSinkClass), // class_size
base_init, // base_init
NULL, // base_finalize
class_init, // class_init
NULL, // class_finalize
NULL, // class_data
sizeof(QGstVideoRendererSink), // instance_size
0, // n_preallocs
instance_init, // instance_init
0 // value_table
};
type = g_type_register_static(
GST_TYPE_VIDEO_SINK, "QGstVideoRendererSink", &info, GTypeFlags(0));
}
return type;
}
void QGstVideoRendererSink::class_init(gpointer g_class, gpointer class_data)
{
Q_UNUSED(class_data);
sink_parent_class = reinterpret_cast<GstVideoSinkClass *>(g_type_class_peek_parent(g_class));
GstBaseSinkClass *base_sink_class = reinterpret_cast<GstBaseSinkClass *>(g_class);
base_sink_class->get_caps = QGstVideoRendererSink::get_caps;
base_sink_class->set_caps = QGstVideoRendererSink::set_caps;
base_sink_class->propose_allocation = QGstVideoRendererSink::propose_allocation;
base_sink_class->preroll = QGstVideoRendererSink::preroll;
base_sink_class->render = QGstVideoRendererSink::render;
GstElementClass *element_class = reinterpret_cast<GstElementClass *>(g_class);
element_class->change_state = QGstVideoRendererSink::change_state;
GObjectClass *object_class = reinterpret_cast<GObjectClass *>(g_class);
object_class->finalize = QGstVideoRendererSink::finalize;
}
void QGstVideoRendererSink::base_init(gpointer g_class)
{
static GstStaticPadTemplate sink_pad_template = GST_STATIC_PAD_TEMPLATE(
"sink", GST_PAD_SINK, GST_PAD_ALWAYS, GST_STATIC_CAPS(
"video/x-raw, "
"framerate = (fraction) [ 0, MAX ], "
"width = (int) [ 1, MAX ], "
"height = (int) [ 1, MAX ]"));
gst_element_class_add_pad_template(
GST_ELEMENT_CLASS(g_class), gst_static_pad_template_get(&sink_pad_template));
}
void QGstVideoRendererSink::instance_init(GTypeInstance *instance, gpointer g_class)
{
VO_SINK(instance);
Q_UNUSED(g_class);
sink->delegate = 0;
}
void QGstVideoRendererSink::finalize(GObject *object)
{
VO_SINK(object);
delete sink->delegate;
// Chain up
G_OBJECT_CLASS(sink_parent_class)->finalize(object);
}
GstStateChangeReturn QGstVideoRendererSink::change_state(
GstElement *element, GstStateChange transition)
{
Q_UNUSED(element);
return GST_ELEMENT_CLASS(sink_parent_class)->change_state(
element, transition);
}
GstCaps *QGstVideoRendererSink::get_caps(GstBaseSink *base, GstCaps *filter)
{
VO_SINK(base);
GstCaps *caps = sink->delegate->caps();
GstCaps *unfiltered = caps;
if (filter) {
caps = gst_caps_intersect(unfiltered, filter);
gst_caps_unref(unfiltered);
}
return caps;
}
gboolean QGstVideoRendererSink::set_caps(GstBaseSink *base, GstCaps *caps)
{
VO_SINK(base);
#ifdef DEBUG_VIDEO_SURFACE_SINK
qDebug() << "set_caps:";
qDebug() << caps;
#endif
if (!caps) {
sink->delegate->stop();
return TRUE;
} else if (sink->delegate->start(caps)) {
return TRUE;
} else {
return FALSE;
}
}
gboolean QGstVideoRendererSink::propose_allocation(GstBaseSink *base, GstQuery *query)
{
VO_SINK(base);
return sink->delegate->proposeAllocation(query);
}
GstFlowReturn QGstVideoRendererSink::preroll(GstBaseSink *base, GstBuffer *buffer)
{
VO_SINK(base);
gboolean showPreroll = true; // "show-preroll-frame" property is true by default
g_object_get(G_OBJECT(base), "show-preroll-frame", &showPreroll, NULL);
return sink->delegate->render(buffer, showPreroll); // display frame
}
GstFlowReturn QGstVideoRendererSink::render(GstBaseSink *base, GstBuffer *buffer)
{
VO_SINK(base);
return sink->delegate->render(buffer, true);
}
QT_END_NAMESPACE

View File

@@ -41,8 +41,13 @@
#include <private/qmediapluginloader_p.h> #include <private/qmediapluginloader_p.h>
#include "qgstvideobuffer_p.h" #include "qgstvideobuffer_p.h"
#include "qgstutils_p.h"
#include "qvideosurfacegstsink_p.h" #include "qvideosurfacegstsink_p.h"
#if GST_VERSION_MAJOR >=1
#include <gst/video/video.h>
#endif
//#define DEBUG_VIDEO_SURFACE_SINK //#define DEBUG_VIDEO_SURFACE_SINK
QT_BEGIN_NAMESPACE QT_BEGIN_NAMESPACE
@@ -62,10 +67,12 @@ QVideoSurfaceGstDelegate::QVideoSurfaceGstDelegate(
if (m_surface) { if (m_surface) {
foreach (QObject *instance, bufferPoolLoader()->instances(QGstBufferPoolPluginKey)) { foreach (QObject *instance, bufferPoolLoader()->instances(QGstBufferPoolPluginKey)) {
QGstBufferPoolInterface* plugin = qobject_cast<QGstBufferPoolInterface*>(instance); QGstBufferPoolInterface* plugin = qobject_cast<QGstBufferPoolInterface*>(instance);
if (plugin) { if (plugin) {
m_pools.append(plugin); m_pools.append(plugin);
} }
} }
updateSupportedFormats(); updateSupportedFormats();
connect(m_surface, SIGNAL(supportedFormatsChanged()), this, SLOT(updateSupportedFormats())); connect(m_surface, SIGNAL(supportedFormatsChanged()), this, SLOT(updateSupportedFormats()));
} }
@@ -191,13 +198,15 @@ GstFlowReturn QVideoSurfaceGstDelegate::render(GstBuffer *buffer)
m_format.frameSize(), m_format.frameSize(),
m_format.pixelFormat()); m_format.pixelFormat());
QVideoSurfaceGstSink::setFrameTimeStamps(&m_frame, buffer); QGstUtils::setFrameTimeStamps(&m_frame, buffer);
m_renderReturn = GST_FLOW_OK; m_renderReturn = GST_FLOW_OK;
if (QThread::currentThread() == thread()) { if (QThread::currentThread() == thread()) {
if (!m_surface.isNull()) if (!m_surface.isNull())
m_surface->present(m_frame); m_surface->present(m_frame);
else
qWarning() << "m_surface.isNull().";
} else { } else {
QMetaObject::invokeMethod(this, "queuedRender", Qt::QueuedConnection); QMetaObject::invokeMethod(this, "queuedRender", Qt::QueuedConnection);
m_renderCondition.wait(&m_mutex, 300); m_renderCondition.wait(&m_mutex, 300);
@@ -283,90 +292,6 @@ void QVideoSurfaceGstDelegate::updateSupportedFormats()
} }
} }
struct YuvFormat
{
QVideoFrame::PixelFormat pixelFormat;
guint32 fourcc;
int bitsPerPixel;
};
static const YuvFormat qt_yuvColorLookup[] =
{
{ QVideoFrame::Format_YUV420P, GST_MAKE_FOURCC('I','4','2','0'), 8 },
{ QVideoFrame::Format_YV12, GST_MAKE_FOURCC('Y','V','1','2'), 8 },
{ QVideoFrame::Format_UYVY, GST_MAKE_FOURCC('U','Y','V','Y'), 16 },
{ QVideoFrame::Format_YUYV, GST_MAKE_FOURCC('Y','U','Y','2'), 16 },
{ QVideoFrame::Format_NV12, GST_MAKE_FOURCC('N','V','1','2'), 8 },
{ QVideoFrame::Format_NV21, GST_MAKE_FOURCC('N','V','2','1'), 8 },
{ QVideoFrame::Format_AYUV444, GST_MAKE_FOURCC('A','Y','U','V'), 32 }
};
static int indexOfYuvColor(QVideoFrame::PixelFormat format)
{
const int count = sizeof(qt_yuvColorLookup) / sizeof(YuvFormat);
for (int i = 0; i < count; ++i)
if (qt_yuvColorLookup[i].pixelFormat == format)
return i;
return -1;
}
static int indexOfYuvColor(guint32 fourcc)
{
const int count = sizeof(qt_yuvColorLookup) / sizeof(YuvFormat);
for (int i = 0; i < count; ++i)
if (qt_yuvColorLookup[i].fourcc == fourcc)
return i;
return -1;
}
struct RgbFormat
{
QVideoFrame::PixelFormat pixelFormat;
int bitsPerPixel;
int depth;
int endianness;
int red;
int green;
int blue;
int alpha;
};
static const RgbFormat qt_rgbColorLookup[] =
{
{ QVideoFrame::Format_RGB32 , 32, 24, 4321, 0x0000FF00, 0x00FF0000, int(0xFF000000), 0x00000000 },
{ QVideoFrame::Format_RGB32 , 32, 24, 1234, 0x00FF0000, 0x0000FF00, 0x000000FF, 0x00000000 },
{ QVideoFrame::Format_BGR32 , 32, 24, 4321, int(0xFF000000), 0x00FF0000, 0x0000FF00, 0x00000000 },
{ QVideoFrame::Format_BGR32 , 32, 24, 1234, 0x000000FF, 0x0000FF00, 0x00FF0000, 0x00000000 },
{ QVideoFrame::Format_ARGB32, 32, 24, 4321, 0x0000FF00, 0x00FF0000, int(0xFF000000), 0x000000FF },
{ QVideoFrame::Format_ARGB32, 32, 24, 1234, 0x00FF0000, 0x0000FF00, 0x000000FF, int(0xFF000000) },
{ QVideoFrame::Format_RGB24 , 24, 24, 4321, 0x00FF0000, 0x0000FF00, 0x000000FF, 0x00000000 },
{ QVideoFrame::Format_BGR24 , 24, 24, 4321, 0x000000FF, 0x0000FF00, 0x00FF0000, 0x00000000 },
{ QVideoFrame::Format_RGB565, 16, 16, 1234, 0x0000F800, 0x000007E0, 0x0000001F, 0x00000000 }
};
static int indexOfRgbColor(
int bits, int depth, int endianness, int red, int green, int blue, int alpha)
{
const int count = sizeof(qt_rgbColorLookup) / sizeof(RgbFormat);
for (int i = 0; i < count; ++i) {
if (qt_rgbColorLookup[i].bitsPerPixel == bits
&& qt_rgbColorLookup[i].depth == depth
&& qt_rgbColorLookup[i].endianness == endianness
&& qt_rgbColorLookup[i].red == red
&& qt_rgbColorLookup[i].green == green
&& qt_rgbColorLookup[i].blue == blue
&& qt_rgbColorLookup[i].alpha == alpha) {
return i;
}
}
return -1;
}
static GstVideoSinkClass *sink_parent_class; static GstVideoSinkClass *sink_parent_class;
#define VO_SINK(s) QVideoSurfaceGstSink *sink(reinterpret_cast<QVideoSurfaceGstSink *>(s)) #define VO_SINK(s) QVideoSurfaceGstSink *sink(reinterpret_cast<QVideoSurfaceGstSink *>(s))
@@ -494,8 +419,6 @@ GstCaps *QVideoSurfaceGstSink::get_caps(GstBaseSink *base)
{ {
VO_SINK(base); VO_SINK(base);
GstCaps *caps = gst_caps_new_empty();
// Find the supported pixel formats // Find the supported pixel formats
// with buffer pool specific formats listed first // with buffer pool specific formats listed first
QList<QVideoFrame::PixelFormat> supportedFormats; QList<QVideoFrame::PixelFormat> supportedFormats;
@@ -503,6 +426,7 @@ GstCaps *QVideoSurfaceGstSink::get_caps(GstBaseSink *base)
QList<QVideoFrame::PixelFormat> poolHandleFormats; QList<QVideoFrame::PixelFormat> poolHandleFormats;
sink->delegate->poolMutex()->lock(); sink->delegate->poolMutex()->lock();
QGstBufferPoolInterface *pool = sink->delegate->pool(); QGstBufferPoolInterface *pool = sink->delegate->pool();
if (pool) if (pool)
poolHandleFormats = sink->delegate->supportedPixelFormats(pool->handleType()); poolHandleFormats = sink->delegate->supportedPixelFormats(pool->handleType());
sink->delegate->poolMutex()->unlock(); sink->delegate->poolMutex()->unlock();
@@ -513,47 +437,7 @@ GstCaps *QVideoSurfaceGstSink::get_caps(GstBaseSink *base)
supportedFormats.append(format); supportedFormats.append(format);
} }
foreach (QVideoFrame::PixelFormat format, supportedFormats) { return QGstUtils::capsForFormats(supportedFormats);
int index = indexOfYuvColor(format);
if (index != -1) {
gst_caps_append_structure(caps, gst_structure_new(
"video/x-raw-yuv",
"framerate", GST_TYPE_FRACTION_RANGE, 0, 1, INT_MAX, 1,
"width" , GST_TYPE_INT_RANGE, 1, INT_MAX,
"height" , GST_TYPE_INT_RANGE, 1, INT_MAX,
"format" , GST_TYPE_FOURCC, qt_yuvColorLookup[index].fourcc,
NULL));
continue;
}
const int count = sizeof(qt_rgbColorLookup) / sizeof(RgbFormat);
for (int i = 0; i < count; ++i) {
if (qt_rgbColorLookup[i].pixelFormat == format) {
GstStructure *structure = gst_structure_new(
"video/x-raw-rgb",
"framerate" , GST_TYPE_FRACTION_RANGE, 0, 1, INT_MAX, 1,
"width" , GST_TYPE_INT_RANGE, 1, INT_MAX,
"height" , GST_TYPE_INT_RANGE, 1, INT_MAX,
"bpp" , G_TYPE_INT, qt_rgbColorLookup[i].bitsPerPixel,
"depth" , G_TYPE_INT, qt_rgbColorLookup[i].depth,
"endianness", G_TYPE_INT, qt_rgbColorLookup[i].endianness,
"red_mask" , G_TYPE_INT, qt_rgbColorLookup[i].red,
"green_mask", G_TYPE_INT, qt_rgbColorLookup[i].green,
"blue_mask" , G_TYPE_INT, qt_rgbColorLookup[i].blue,
NULL);
if (qt_rgbColorLookup[i].alpha != 0) {
gst_structure_set(
structure, "alpha_mask", G_TYPE_INT, qt_rgbColorLookup[i].alpha, NULL);
}
gst_caps_append_structure(caps, structure);
}
}
}
return caps;
} }
gboolean QVideoSurfaceGstSink::set_caps(GstBaseSink *base, GstCaps *caps) gboolean QVideoSurfaceGstSink::set_caps(GstBaseSink *base, GstCaps *caps)
@@ -575,7 +459,7 @@ gboolean QVideoSurfaceGstSink::set_caps(GstBaseSink *base, GstCaps *caps)
QAbstractVideoBuffer::HandleType handleType = QAbstractVideoBuffer::HandleType handleType =
pool ? pool->handleType() : QAbstractVideoBuffer::NoHandle; pool ? pool->handleType() : QAbstractVideoBuffer::NoHandle;
QVideoSurfaceFormat format = formatForCaps(caps, &bytesPerLine, handleType); QVideoSurfaceFormat format = QGstUtils::formatForCaps(caps, &bytesPerLine, handleType);
if (sink->delegate->isActive()) { if (sink->delegate->isActive()) {
QVideoSurfaceFormat surfaceFormst = sink->delegate->surfaceFormat(); QVideoSurfaceFormat surfaceFormst = sink->delegate->surfaceFormat();
@@ -592,7 +476,7 @@ gboolean QVideoSurfaceGstSink::set_caps(GstBaseSink *base, GstCaps *caps)
sink->lastRequestedCaps = 0; sink->lastRequestedCaps = 0;
#ifdef DEBUG_VIDEO_SURFACE_SINK #ifdef DEBUG_VIDEO_SURFACE_SINK
qDebug() << "Staring video surface, format:"; qDebug() << "Starting video surface, format:";
qDebug() << format; qDebug() << format;
qDebug() << "bytesPerLine:" << bytesPerLine; qDebug() << "bytesPerLine:" << bytesPerLine;
#endif #endif
@@ -606,87 +490,6 @@ gboolean QVideoSurfaceGstSink::set_caps(GstBaseSink *base, GstCaps *caps)
return FALSE; return FALSE;
} }
QVideoSurfaceFormat QVideoSurfaceGstSink::formatForCaps(GstCaps *caps, int *bytesPerLine, QAbstractVideoBuffer::HandleType handleType)
{
const GstStructure *structure = gst_caps_get_structure(caps, 0);
QVideoFrame::PixelFormat pixelFormat = QVideoFrame::Format_Invalid;
int bitsPerPixel = 0;
QSize size;
gst_structure_get_int(structure, "width", &size.rwidth());
gst_structure_get_int(structure, "height", &size.rheight());
if (qstrcmp(gst_structure_get_name(structure), "video/x-raw-yuv") == 0) {
guint32 fourcc = 0;
gst_structure_get_fourcc(structure, "format", &fourcc);
int index = indexOfYuvColor(fourcc);
if (index != -1) {
pixelFormat = qt_yuvColorLookup[index].pixelFormat;
bitsPerPixel = qt_yuvColorLookup[index].bitsPerPixel;
}
} else if (qstrcmp(gst_structure_get_name(structure), "video/x-raw-rgb") == 0) {
int depth = 0;
int endianness = 0;
int red = 0;
int green = 0;
int blue = 0;
int alpha = 0;
gst_structure_get_int(structure, "bpp", &bitsPerPixel);
gst_structure_get_int(structure, "depth", &depth);
gst_structure_get_int(structure, "endianness", &endianness);
gst_structure_get_int(structure, "red_mask", &red);
gst_structure_get_int(structure, "green_mask", &green);
gst_structure_get_int(structure, "blue_mask", &blue);
gst_structure_get_int(structure, "alpha_mask", &alpha);
int index = indexOfRgbColor(bitsPerPixel, depth, endianness, red, green, blue, alpha);
if (index != -1)
pixelFormat = qt_rgbColorLookup[index].pixelFormat;
}
if (pixelFormat != QVideoFrame::Format_Invalid) {
QVideoSurfaceFormat format(size, pixelFormat, handleType);
QPair<int, int> rate;
gst_structure_get_fraction(structure, "framerate", &rate.first, &rate.second);
if (rate.second)
format.setFrameRate(qreal(rate.first)/rate.second);
gint aspectNum = 0;
gint aspectDenum = 0;
if (gst_structure_get_fraction(
structure, "pixel-aspect-ratio", &aspectNum, &aspectDenum)) {
if (aspectDenum > 0)
format.setPixelAspectRatio(aspectNum, aspectDenum);
}
if (bytesPerLine)
*bytesPerLine = ((size.width() * bitsPerPixel / 8) + 3) & ~3;
return format;
}
return QVideoSurfaceFormat();
}
void QVideoSurfaceGstSink::setFrameTimeStamps(QVideoFrame *frame, GstBuffer *buffer)
{
// GStreamer uses nanoseconds, Qt uses microseconds
qint64 startTime = GST_BUFFER_TIMESTAMP(buffer);
if (startTime >= 0) {
frame->setStartTime(startTime/G_GINT64_CONSTANT (1000));
qint64 duration = GST_BUFFER_DURATION(buffer);
if (duration >= 0)
frame->setEndTime((startTime + duration)/G_GINT64_CONSTANT (1000));
}
}
GstFlowReturn QVideoSurfaceGstSink::buffer_alloc( GstFlowReturn QVideoSurfaceGstSink::buffer_alloc(
GstBaseSink *base, guint64 offset, guint size, GstCaps *caps, GstBuffer **buffer) GstBaseSink *base, guint64 offset, guint size, GstCaps *caps, GstBuffer **buffer)
{ {
@@ -731,7 +534,7 @@ GstFlowReturn QVideoSurfaceGstSink::buffer_alloc(
if (sink->delegate->isActive()) { if (sink->delegate->isActive()) {
//if format was changed, restart the surface //if format was changed, restart the surface
QVideoSurfaceFormat format = formatForCaps(intersection); QVideoSurfaceFormat format = QGstUtils::formatForCaps(intersection);
QVideoSurfaceFormat surfaceFormat = sink->delegate->surfaceFormat(); QVideoSurfaceFormat surfaceFormat = sink->delegate->surfaceFormat();
if (format.pixelFormat() != surfaceFormat.pixelFormat() || if (format.pixelFormat() != surfaceFormat.pixelFormat() ||
@@ -749,7 +552,7 @@ GstFlowReturn QVideoSurfaceGstSink::buffer_alloc(
QAbstractVideoBuffer::HandleType handleType = QAbstractVideoBuffer::HandleType handleType =
pool ? pool->handleType() : QAbstractVideoBuffer::NoHandle; pool ? pool->handleType() : QAbstractVideoBuffer::NoHandle;
QVideoSurfaceFormat format = formatForCaps(intersection, &bytesPerLine, handleType); QVideoSurfaceFormat format = QGstUtils::formatForCaps(intersection, &bytesPerLine, handleType);
if (!sink->delegate->start(format, bytesPerLine)) { if (!sink->delegate->start(format, bytesPerLine)) {
qWarning() << "failed to start video surface"; qWarning() << "failed to start video surface";
@@ -763,7 +566,7 @@ GstFlowReturn QVideoSurfaceGstSink::buffer_alloc(
QVideoSurfaceFormat surfaceFormat = sink->delegate->surfaceFormat(); QVideoSurfaceFormat surfaceFormat = sink->delegate->surfaceFormat();
if (!pool->isFormatSupported(surfaceFormat)) { if (!pool->isFormatSupported(surfaceFormat)) {
//qDebug() << "sink doesn't support native pool format, skip custom buffers allocation"; qDebug() << "sink doesn't support native pool format, skip custom buffers allocation";
return GST_FLOW_OK; return GST_FLOW_OK;
} }
@@ -787,7 +590,6 @@ GstFlowReturn QVideoSurfaceGstSink::buffer_alloc(
gboolean QVideoSurfaceGstSink::start(GstBaseSink *base) gboolean QVideoSurfaceGstSink::start(GstBaseSink *base)
{ {
Q_UNUSED(base); Q_UNUSED(base);
return TRUE; return TRUE;
} }

View File

@@ -39,7 +39,10 @@
#include <gst/gst.h> #include <gst/gst.h>
#include <gst/app/gstappsrc.h> #include <gst/app/gstappsrc.h>
#if GST_VERSION_MAJOR < 1
#include <gst/app/gstappbuffer.h> #include <gst/app/gstappbuffer.h>
#endif
QT_BEGIN_NAMESPACE QT_BEGIN_NAMESPACE

View File

@@ -38,23 +38,32 @@
#include <qmediaaudioprobecontrol.h> #include <qmediaaudioprobecontrol.h>
#include <QtCore/qmutex.h> #include <QtCore/qmutex.h>
#include <qaudiobuffer.h> #include <qaudiobuffer.h>
#include <qshareddata.h>
#include <private/qgstreamerbufferprobe_p.h>
QT_BEGIN_NAMESPACE QT_BEGIN_NAMESPACE
class QGstreamerAudioProbeControl : public QMediaAudioProbeControl class QGstreamerAudioProbeControl
: public QMediaAudioProbeControl
, public QGstreamerBufferProbe
, public QSharedData
{ {
Q_OBJECT Q_OBJECT
public: public:
explicit QGstreamerAudioProbeControl(QObject *parent); explicit QGstreamerAudioProbeControl(QObject *parent);
virtual ~QGstreamerAudioProbeControl(); virtual ~QGstreamerAudioProbeControl();
void bufferProbed(GstBuffer* buffer); protected:
void probeCaps(GstCaps *caps);
bool probeBuffer(GstBuffer *buffer);
private slots: private slots:
void bufferProbed(); void bufferProbed();
private: private:
QAudioBuffer m_pendingBuffer; QAudioBuffer m_pendingBuffer;
QAudioFormat m_format;
QMutex m_bufferMutex; QMutex m_bufferMutex;
}; };

View File

@@ -0,0 +1,86 @@
/****************************************************************************
**
** Copyright (C) 2014 Jolla Ltd.
** Contact: http://www.qt-project.org/legal
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and Digia. For licensing terms and
** conditions see http://qt.digia.com/licensing. For further information
** use the contact form at http://qt.digia.com/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 as published by the Free Software
** Foundation and appearing in the file LICENSE.LGPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU Lesser General Public License version 2.1 requirements
** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Digia gives you certain additional
** rights. These rights are described in the Digia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** GNU General Public License Usage
** Alternatively, this file may be used under the terms of the GNU
** General Public License version 3.0 as published by the Free Software
** Foundation and appearing in the file LICENSE.GPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU General Public License version 3.0 requirements will be
** met: http://www.gnu.org/copyleft/gpl.html.
**
**
** $QT_END_LICENSE$
**
****************************************************************************/
#ifndef QGSTREAMERBUFFERPROBE_H
#define QGSTREAMERBUFFERPROBE_H
#include <gst/gst.h>
#include <QtCore/qglobal.h>
QT_BEGIN_NAMESPACE
class QGstreamerBufferProbe
{
public:
enum Flags
{
ProbeCaps = 0x01,
ProbeBuffers = 0x02,
ProbeAll = ProbeCaps | ProbeBuffers
};
explicit QGstreamerBufferProbe(Flags flags = ProbeAll);
virtual ~QGstreamerBufferProbe();
void addProbeToPad(GstPad *pad, bool downstream = true);
void removeProbeFromPad(GstPad *pad);
protected:
virtual void probeCaps(GstCaps *caps);
virtual bool probeBuffer(GstBuffer *buffer);
private:
#if GST_CHECK_VERSION(1,0,0)
static GstPadProbeReturn capsProbe(GstPad *pad, GstPadProbeInfo *info, gpointer user_data);
static GstPadProbeReturn bufferProbe(GstPad *pad, GstPadProbeInfo *info, gpointer user_data);
int m_capsProbeId;
#else
static gboolean bufferProbe(GstElement *element, GstBuffer *buffer, gpointer user_data);
GstCaps *m_caps;
#endif
int m_bufferProbeId;
const Flags m_flags;
};
QT_END_NAMESPACE
#endif // QGSTREAMERAUDIOPROBECONTROL_H

View File

@@ -0,0 +1,102 @@
/****************************************************************************
**
** Copyright (C) 2014 Canonical Ltd.
** Contact: http://www.qt-project.org/legal
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL21$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and Digia. For licensing terms and
** conditions see http://qt.digia.com/licensing. For further information
** use the contact form at http://qt.digia.com/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 or version 3 as published by the Free
** Software Foundation and appearing in the file LICENSE.LGPLv21 and
** LICENSE.LGPLv3 included in the packaging of this file. Please review the
** following information to ensure the GNU Lesser General Public License
** requirements will be met: https://www.gnu.org/licenses/lgpl.html and
** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Digia gives you certain additional
** rights. These rights are described in the Digia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** $QT_END_LICENSE$
**
****************************************************************************/
#ifndef QGSTREAMERMIRTEXTURERENDERER_H
#define QGSTREAMERMIRTEXTURERENDERER_H
#include <qmediaplayer.h>
#include <qvideorenderercontrol.h>
#include <private/qvideosurfacegstsink_p.h>
#include <qabstractvideosurface.h>
#include "qgstreamervideorendererinterface_p.h"
QT_BEGIN_NAMESPACE
class QGstreamerMirTextureBuffer;
class QGstreamerPlayerSession;
class QGLContext;
class QOpenGLContext;
class QSurfaceFormat;
class QGstreamerMirTextureRenderer : public QVideoRendererControl, public QGstreamerVideoRendererInterface
{
Q_OBJECT
Q_INTERFACES(QGstreamerVideoRendererInterface)
public:
QGstreamerMirTextureRenderer(QObject *parent = 0, const QGstreamerPlayerSession *playerSession = 0);
virtual ~QGstreamerMirTextureRenderer();
QAbstractVideoSurface *surface() const;
void setSurface(QAbstractVideoSurface *surface);
void setPlayerSession(const QGstreamerPlayerSession *playerSession);
GstElement *videoSink();
void stopRenderer();
bool isReady() const { return m_surface != 0; }
signals:
void sinkChanged();
void readyChanged(bool);
void nativeSizeChanged();
private slots:
void handleFormatChange();
void updateNativeVideoSize();
void handleFocusWindowChanged(QWindow *window);
void renderFrame();
private:
QWindow *createOffscreenWindow(const QSurfaceFormat &format);
static void handleFrameReady(gpointer userData);
static GstPadProbeReturn padBufferProbe(GstPad *pad, GstPadProbeInfo *info, gpointer userData);
GstElement *m_videoSink;
QPointer<QAbstractVideoSurface> m_surface;
QPointer<QAbstractVideoSurface> m_glSurface;
QGLContext *m_context;
QOpenGLContext *m_glContext;
unsigned int m_textureId;
QWindow *m_offscreenSurface;
QGstreamerPlayerSession *m_playerSession;
QGstreamerMirTextureBuffer *m_textureBuffer;
QSize m_nativeSize;
QMutex m_mutex;
};
QT_END_NAMESPACE
#endif // QGSTREAMERMIRTEXTURERENDRER_H

View File

@@ -35,20 +35,29 @@
#define QGSTREAMERVIDEOPROBECONTROL_H #define QGSTREAMERVIDEOPROBECONTROL_H
#include <gst/gst.h> #include <gst/gst.h>
#include <gst/video/video.h>
#include <qmediavideoprobecontrol.h> #include <qmediavideoprobecontrol.h>
#include <QtCore/qmutex.h> #include <QtCore/qmutex.h>
#include <qvideoframe.h> #include <qvideoframe.h>
#include <qvideosurfaceformat.h>
#include <private/qgstreamerbufferprobe_p.h>
QT_BEGIN_NAMESPACE QT_BEGIN_NAMESPACE
class QGstreamerVideoProbeControl : public QMediaVideoProbeControl class QGstreamerVideoProbeControl
: public QMediaVideoProbeControl
, public QGstreamerBufferProbe
, public QSharedData
{ {
Q_OBJECT Q_OBJECT
public: public:
explicit QGstreamerVideoProbeControl(QObject *parent); explicit QGstreamerVideoProbeControl(QObject *parent);
virtual ~QGstreamerVideoProbeControl(); virtual ~QGstreamerVideoProbeControl();
void bufferProbed(GstBuffer* buffer); void probeCaps(GstCaps *caps);
bool probeBuffer(GstBuffer *buffer);
void startFlushing(); void startFlushing();
void stopFlushing(); void stopFlushing();
@@ -56,10 +65,16 @@ private slots:
void frameProbed(); void frameProbed();
private: private:
bool m_flushing; QVideoSurfaceFormat m_format;
bool m_frameProbed; // true if at least one frame was probed
QVideoFrame m_pendingFrame; QVideoFrame m_pendingFrame;
QMutex m_frameMutex; QMutex m_frameMutex;
#if GST_CHECK_VERSION(1,0,0)
GstVideoInfo m_videoInfo;
#else
int m_bytesPerLine;
#endif
bool m_flushing;
bool m_frameProbed; // true if at least one frame was probed
}; };
QT_END_NAMESPACE QT_END_NAMESPACE

View File

@@ -38,6 +38,7 @@
#include "qgstreamervideorendererinterface_p.h" #include "qgstreamervideorendererinterface_p.h"
#include <private/qgstreamerbushelper_p.h> #include <private/qgstreamerbushelper_p.h>
#include <private/qgstreamerbufferprobe_p.h>
#include <QtGui/qcolor.h> #include <QtGui/qcolor.h>
QT_BEGIN_NAMESPACE QT_BEGIN_NAMESPACE
@@ -45,7 +46,8 @@ class QAbstractVideoSurface;
class QGstreamerVideoWindow : public QVideoWindowControl, class QGstreamerVideoWindow : public QVideoWindowControl,
public QGstreamerVideoRendererInterface, public QGstreamerVideoRendererInterface,
public QGstreamerSyncMessageFilter public QGstreamerSyncMessageFilter,
private QGstreamerBufferProbe
{ {
Q_OBJECT Q_OBJECT
Q_INTERFACES(QGstreamerVideoRendererInterface QGstreamerSyncMessageFilter) Q_INTERFACES(QGstreamerVideoRendererInterface QGstreamerSyncMessageFilter)
@@ -101,10 +103,10 @@ signals:
void readyChanged(bool); void readyChanged(bool);
private slots: private slots:
void updateNativeVideoSize(); void updateNativeVideoSize(const QSize &size);
private: private:
static void padBufferProbe(GstPad *pad, GstBuffer *buffer, gpointer user_data); void probeCaps(GstCaps *caps);
GstElement *m_videoSink; GstElement *m_videoSink;
WId m_windowId; WId m_windowId;
@@ -113,7 +115,6 @@ private:
bool m_fullScreen; bool m_fullScreen;
QSize m_nativeSize; QSize m_nativeSize;
mutable QColor m_colorKey; mutable QColor m_colorKey;
int m_bufferProbeId;
}; };
QT_END_NAMESPACE QT_END_NAMESPACE

View File

@@ -49,14 +49,32 @@
#include <QtCore/qset.h> #include <QtCore/qset.h>
#include <QtCore/qvector.h> #include <QtCore/qvector.h>
#include <gst/gst.h> #include <gst/gst.h>
#include <gst/video/video.h>
#include <qaudioformat.h> #include <qaudioformat.h>
#include <qcamera.h> #include <qcamera.h>
#include <qabstractvideobuffer.h>
#include <qvideoframe.h>
#include <QDebug>
#if GST_CHECK_VERSION(1,0,0)
# define QT_GSTREAMER_PLAYBIN_ELEMENT_NAME "playbin"
# define QT_GSTREAMER_CAMERABIN_ELEMENT_NAME "camerabin"
# define QT_GSTREAMER_COLORCONVERSION_ELEMENT_NAME "videoconvert"
# define QT_GSTREAMER_RAW_AUDIO_MIME "audio/x-raw"
#else
# define QT_GSTREAMER_PLAYBIN_ELEMENT_NAME "playbin2"
# define QT_GSTREAMER_CAMERABIN_ELEMENT_NAME "camerabin2"
# define QT_GSTREAMER_COLORCONVERSION_ELEMENT_NAME "ffmpegcolorspace"
# define QT_GSTREAMER_RAW_AUDIO_MIME "audio/x-raw-int"
#endif
QT_BEGIN_NAMESPACE QT_BEGIN_NAMESPACE
class QSize; class QSize;
class QVariant; class QVariant;
class QByteArray; class QByteArray;
class QImage;
class QVideoSurfaceFormat;
namespace QGstUtils { namespace QGstUtils {
struct CameraInfo struct CameraInfo
@@ -73,8 +91,12 @@ namespace QGstUtils {
QSize capsResolution(const GstCaps *caps); QSize capsResolution(const GstCaps *caps);
QSize capsCorrectedResolution(const GstCaps *caps); QSize capsCorrectedResolution(const GstCaps *caps);
QAudioFormat audioFormatForCaps(const GstCaps *caps); QAudioFormat audioFormatForCaps(const GstCaps *caps);
#if GST_CHECK_VERSION(1,0,0)
QAudioFormat audioFormatForSample(GstSample *sample);
#else
QAudioFormat audioFormatForBuffer(GstBuffer *buffer); QAudioFormat audioFormatForBuffer(GstBuffer *buffer);
GstCaps *capsForAudioFormat(QAudioFormat format); #endif
GstCaps *capsForAudioFormat(const QAudioFormat &format);
void initializeGst(); void initializeGst();
QMultimedia::SupportEstimate hasSupport(const QString &mimeType, QMultimedia::SupportEstimate hasSupport(const QString &mimeType,
const QStringList &codecs, const QStringList &codecs,
@@ -86,9 +108,40 @@ namespace QGstUtils {
QCamera::Position cameraPosition(const QString &device, GstElementFactory * factory = 0); QCamera::Position cameraPosition(const QString &device, GstElementFactory * factory = 0);
int cameraOrientation(const QString &device, GstElementFactory * factory = 0); int cameraOrientation(const QString &device, GstElementFactory * factory = 0);
QByteArray cameraDriver(const QString &device, GstElementFactory * factory = 0); QByteArray cameraDriver(const QString &device, GstElementFactory * factory = 0);
QSet<QString> supportedMimeTypes(bool (*isValidFactory)(GstElementFactory *factory));
#if GST_CHECK_VERSION(1,0,0)
QImage bufferToImage(GstBuffer *buffer, const GstVideoInfo &info);
QVideoSurfaceFormat formatForCaps(
GstCaps *caps,
GstVideoInfo *info,
QAbstractVideoBuffer::HandleType handleType = QAbstractVideoBuffer::NoHandle);
#else
QImage bufferToImage(GstBuffer *buffer);
QVideoSurfaceFormat formatForCaps(
GstCaps *caps,
int *bytesPerLine = 0,
QAbstractVideoBuffer::HandleType handleType = QAbstractVideoBuffer::NoHandle);
#endif
GstCaps *capsForFormats(const QList<QVideoFrame::PixelFormat> &formats);
void setFrameTimeStamps(QVideoFrame *frame, GstBuffer *buffer);
void setMetaData(GstElement *element, const QMap<QByteArray, QVariant> &data);
void setMetaData(GstBin *bin, const QMap<QByteArray, QVariant> &data);
GstCaps *videoFilterCaps();
} }
void qt_gst_object_ref_sink(gpointer object); void qt_gst_object_ref_sink(gpointer object);
GstCaps *qt_gst_pad_get_current_caps(GstPad *pad);
GstStructure *qt_gst_structure_new_empty(const char *name);
gboolean qt_gst_element_query_position(GstElement *element, GstFormat format, gint64 *cur);
gboolean qt_gst_element_query_duration(GstElement *element, GstFormat format, gint64 *cur);
QDebug operator <<(QDebug debug, GstCaps *caps);
QT_END_NAMESPACE QT_END_NAMESPACE

View File

@@ -49,26 +49,47 @@
#include <QtCore/qvariant.h> #include <QtCore/qvariant.h>
#include <gst/gst.h> #include <gst/gst.h>
#include <gst/video/video.h>
QT_BEGIN_NAMESPACE QT_BEGIN_NAMESPACE
#if GST_CHECK_VERSION(1,0,0)
class QGstVideoBuffer : public QAbstractPlanarVideoBuffer
{
public:
QGstVideoBuffer(GstBuffer *buffer, const GstVideoInfo &info);
QGstVideoBuffer(GstBuffer *buffer, const GstVideoInfo &info,
HandleType handleType, const QVariant &handle);
#else
class QGstVideoBuffer : public QAbstractVideoBuffer class QGstVideoBuffer : public QAbstractVideoBuffer
{ {
public: public:
QGstVideoBuffer(GstBuffer *buffer, int bytesPerLine); QGstVideoBuffer(GstBuffer *buffer, int bytesPerLine);
QGstVideoBuffer(GstBuffer *buffer, int bytesPerLine, QGstVideoBuffer(GstBuffer *buffer, int bytesPerLine,
HandleType handleType, const QVariant &handle); HandleType handleType, const QVariant &handle);
#endif
~QGstVideoBuffer(); ~QGstVideoBuffer();
MapMode mapMode() const; MapMode mapMode() const;
#if GST_CHECK_VERSION(1,0,0)
int map(MapMode mode, int *numBytes, int bytesPerLine[4], uchar *data[4]);
#else
uchar *map(MapMode mode, int *numBytes, int *bytesPerLine); uchar *map(MapMode mode, int *numBytes, int *bytesPerLine);
#endif
void unmap(); void unmap();
QVariant handle() const { return m_handle; } QVariant handle() const { return m_handle; }
private: private:
GstBuffer *m_buffer; #if GST_CHECK_VERSION(1,0,0)
GstVideoInfo m_videoInfo;
GstVideoFrame m_frame;
#else
int m_bytesPerLine; int m_bytesPerLine;
#endif
GstBuffer *m_buffer;
MapMode m_mode; MapMode m_mode;
QVariant m_handle; QVariant m_handle;
}; };

View File

@@ -0,0 +1,111 @@
/****************************************************************************
**
** Copyright (C) 2014 Jolla Ltd.
** Contact: http://www.qt-project.org/legal
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and Digia. For licensing terms and
** conditions see http://qt.digia.com/licensing. For further information
** use the contact form at http://qt.digia.com/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 as published by the Free Software
** Foundation and appearing in the file LICENSE.LGPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU Lesser General Public License version 2.1 requirements
** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Digia gives you certain additional
** rights. These rights are described in the Digia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** GNU General Public License Usage
** Alternatively, this file may be used under the terms of the GNU
** General Public License version 3.0 as published by the Free Software
** Foundation and appearing in the file LICENSE.GPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU General Public License version 3.0 requirements will be
** met: http://www.gnu.org/copyleft/gpl.html.
**
**
** $QT_END_LICENSE$
**
****************************************************************************/
#ifndef QGSTVIDEORENDERERPLUGIN_P_H
#define QGSTVIDEORENDERERPLUGIN_P_H
//
// W A R N I N G
// -------------
//
// This file is not part of the Qt API. It exists purely as an
// implementation detail. This header file may change from version to
// version without notice, or even be removed.
//
// We mean it.
//
#include <qabstractvideobuffer.h>
#include <qvideosurfaceformat.h>
#include <QtCore/qobject.h>
#include <QtCore/qplugin.h>
#include <gst/gst.h>
QT_BEGIN_NAMESPACE
class QAbstractVideoSurface;
const QLatin1String QGstVideoRendererPluginKey("gstvideorenderer");
class QGstVideoRenderer
{
public:
virtual ~QGstVideoRenderer() {}
virtual GstCaps *getCaps(QAbstractVideoSurface *surface) = 0;
virtual bool start(QAbstractVideoSurface *surface, GstCaps *caps) = 0;
virtual void stop(QAbstractVideoSurface *surface) = 0; // surface may be null if unexpectedly deleted.
virtual bool proposeAllocation(GstQuery *query) = 0; // may be called from a thread.
virtual bool present(QAbstractVideoSurface *surface, GstBuffer *buffer) = 0;
virtual void flush(QAbstractVideoSurface *surface) = 0; // surface may be null if unexpectedly deleted.
};
/*
Abstract interface for video buffers allocation.
*/
class QGstVideoRendererInterface
{
public:
virtual ~QGstVideoRendererInterface() {}
virtual QGstVideoRenderer *createRenderer() = 0;
};
#define QGstVideoRendererInterface_iid "org.qt-project.qt.gstvideorenderer/5.4"
Q_DECLARE_INTERFACE(QGstVideoRendererInterface, QGstVideoRendererInterface_iid)
class QGstVideoRendererPlugin : public QObject, public QGstVideoRendererInterface
{
Q_OBJECT
Q_INTERFACES(QGstVideoRendererInterface)
public:
explicit QGstVideoRendererPlugin(QObject *parent = 0);
virtual ~QGstVideoRendererPlugin() {}
virtual QGstVideoRenderer *createRenderer() = 0;
};
QT_END_NAMESPACE
#endif

View File

@@ -0,0 +1,183 @@
/****************************************************************************
**
** Copyright (C) 2014 Jolla Ltd.
** Contact: http://www.qt-project.org/legal
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and Digia. For licensing terms and
** conditions see http://qt.digia.com/licensing. For further information
** use the contact form at http://qt.digia.com/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 as published by the Free Software
** Foundation and appearing in the file LICENSE.LGPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU Lesser General Public License version 2.1 requirements
** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Digia gives you certain additional
** rights. These rights are described in the Digia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** GNU General Public License Usage
** Alternatively, this file may be used under the terms of the GNU
** General Public License version 3.0 as published by the Free Software
** Foundation and appearing in the file LICENSE.GPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU General Public License version 3.0 requirements will be
** met: http://www.gnu.org/copyleft/gpl.html.
**
**
** $QT_END_LICENSE$
**
****************************************************************************/
#ifndef QGSTVIDEORENDERERSINK_P_H
#define QGSTVIDEORENDERERSINK_P_H
//
// W A R N I N G
// -------------
//
// This file is not part of the Qt API. It exists purely as an
// implementation detail. This header file may change from version to
// version without notice, or even be removed.
//
// We mean it.
//
#include <gst/video/gstvideosink.h>
#include <gst/video/video.h>
#include <QtCore/qlist.h>
#include <QtCore/qmutex.h>
#include <QtCore/qqueue.h>
#include <QtCore/qpointer.h>
#include <QtCore/qwaitcondition.h>
#include <qvideosurfaceformat.h>
#include <qvideoframe.h>
#include <qabstractvideobuffer.h>
#include "qgstvideorendererplugin_p.h"
#include "qgstvideorendererplugin_p.h"
QT_BEGIN_NAMESPACE
class QAbstractVideoSurface;
class QGstDefaultVideoRenderer : public QGstVideoRenderer
{
public:
QGstDefaultVideoRenderer();
~QGstDefaultVideoRenderer();
GstCaps *getCaps(QAbstractVideoSurface *surface);
bool start(QAbstractVideoSurface *surface, GstCaps *caps);
void stop(QAbstractVideoSurface *surface);
bool proposeAllocation(GstQuery *query);
bool present(QAbstractVideoSurface *surface, GstBuffer *buffer);
void flush(QAbstractVideoSurface *surface);
private:
QVideoSurfaceFormat m_format;
GstVideoInfo m_videoInfo;
bool m_flushed;
};
class QVideoSurfaceGstDelegate : public QObject
{
Q_OBJECT
public:
QVideoSurfaceGstDelegate(QAbstractVideoSurface *surface);
~QVideoSurfaceGstDelegate();
GstCaps *caps();
bool start(GstCaps *caps);
void stop();
bool proposeAllocation(GstQuery *query);
void flush();
GstFlowReturn render(GstBuffer *buffer, bool show);
bool event(QEvent *event);
static void handleShowPrerollChange(GObject *o, GParamSpec *p, gpointer d);
private slots:
bool handleEvent(QMutexLocker *locker);
void updateSupportedFormats();
private:
void notify();
bool waitForAsyncEvent(QMutexLocker *locker, QWaitCondition *condition, unsigned long time);
QPointer<QAbstractVideoSurface> m_surface;
QMutex m_mutex;
QWaitCondition m_setupCondition;
QWaitCondition m_renderCondition;
GstFlowReturn m_renderReturn;
QList<QGstVideoRenderer *> m_renderers;
QGstVideoRenderer *m_renderer;
QGstVideoRenderer *m_activeRenderer;
GstCaps *m_surfaceCaps;
GstCaps *m_startCaps;
GstBuffer *m_lastBuffer;
bool m_notified;
bool m_stop;
bool m_render;
bool m_flush;
};
class QGstVideoRendererSink
{
public:
GstVideoSink parent;
static QGstVideoRendererSink *createSink(QAbstractVideoSurface *surface);
private:
static GType get_type();
static void class_init(gpointer g_class, gpointer class_data);
static void base_init(gpointer g_class);
static void instance_init(GTypeInstance *instance, gpointer g_class);
static void finalize(GObject *object);
static GstStateChangeReturn change_state(GstElement *element, GstStateChange transition);
static GstCaps *get_caps(GstBaseSink *sink, GstCaps *filter);
static gboolean set_caps(GstBaseSink *sink, GstCaps *caps);
static gboolean propose_allocation(GstBaseSink *sink, GstQuery *query);
static GstFlowReturn preroll(GstBaseSink *sink, GstBuffer *buffer);
static GstFlowReturn render(GstBaseSink *sink, GstBuffer *buffer);
private:
QVideoSurfaceGstDelegate *delegate;
};
class QGstVideoRendererSinkClass
{
public:
GstVideoSinkClass parent_class;
};
QT_END_NAMESPACE
#endif

View File

@@ -45,6 +45,18 @@
// We mean it. // We mean it.
// //
#include <gst/gst.h>
#if GST_CHECK_VERSION(1,0,0)
#include "qgstvideorenderersink_p.h"
QT_BEGIN_NAMESPACE
typedef QGstVideoRendererSink QVideoSurfaceGstSink;
QT_END_NAMESPACE
#else
#include <gst/video/gstvideosink.h> #include <gst/video/gstvideosink.h>
#include <QtCore/qlist.h> #include <QtCore/qlist.h>
@@ -116,10 +128,6 @@ public:
GstVideoSink parent; GstVideoSink parent;
static QVideoSurfaceGstSink *createSink(QAbstractVideoSurface *surface); static QVideoSurfaceGstSink *createSink(QAbstractVideoSurface *surface);
static QVideoSurfaceFormat formatForCaps(GstCaps *caps,
int *bytesPerLine = 0,
QAbstractVideoBuffer::HandleType handleType = QAbstractVideoBuffer::NoHandle);
static void setFrameTimeStamps(QVideoFrame *frame, GstBuffer *buffer);
private: private:
static GType get_type(); static GType get_type();
@@ -150,7 +158,6 @@ private:
QVideoSurfaceFormat *lastSurfaceFormat; QVideoSurfaceFormat *lastSurfaceFormat;
}; };
class QVideoSurfaceGstSinkClass class QVideoSurfaceGstSinkClass
{ {
public: public:
@@ -160,3 +167,5 @@ public:
QT_END_NAMESPACE QT_END_NAMESPACE
#endif #endif
#endif

View File

@@ -4,6 +4,8 @@ QT = core-private network gui-private
MODULE_PLUGIN_TYPES = \ MODULE_PLUGIN_TYPES = \
mediaservice \ mediaservice \
audio \ audio \
video/bufferpool \
video/gstvideorenderer \
video/videonode \ video/videonode \
playlistformats playlistformats

View File

@@ -68,89 +68,16 @@ QMultimedia::SupportEstimate QGstreamerAudioDecoderServicePlugin::hasSupport(con
return QGstUtils::hasSupport(mimeType, codecs, m_supportedMimeTypeSet); return QGstUtils::hasSupport(mimeType, codecs, m_supportedMimeTypeSet);
} }
static bool isDecoderOrDemuxer(GstElementFactory *factory)
{
return gst_element_factory_list_is_type(factory, GST_ELEMENT_FACTORY_TYPE_DEMUXER)
|| gst_element_factory_list_is_type(factory, GST_ELEMENT_FACTORY_TYPE_DECODER
| GST_ELEMENT_FACTORY_TYPE_MEDIA_AUDIO);
}
void QGstreamerAudioDecoderServicePlugin::updateSupportedMimeTypes() const void QGstreamerAudioDecoderServicePlugin::updateSupportedMimeTypes() const
{ {
//enumerate supported mime types m_supportedMimeTypeSet = QGstUtils::supportedMimeTypes(isDecoderOrDemuxer);
gst_init(NULL, NULL);
GList *plugins, *orig_plugins;
orig_plugins = plugins = gst_default_registry_get_plugin_list ();
while (plugins) {
GList *features, *orig_features;
GstPlugin *plugin = (GstPlugin *) (plugins->data);
plugins = g_list_next (plugins);
if (plugin->flags & (1<<1)) //GST_PLUGIN_FLAG_BLACKLISTED
continue;
orig_features = features = gst_registry_get_feature_list_by_plugin(gst_registry_get_default (),
plugin->desc.name);
while (features) {
if (!G_UNLIKELY(features->data == NULL)) {
GstPluginFeature *feature = GST_PLUGIN_FEATURE(features->data);
if (GST_IS_ELEMENT_FACTORY (feature)) {
GstElementFactory *factory = GST_ELEMENT_FACTORY(gst_plugin_feature_load(feature));
if (factory
&& factory->numpadtemplates > 0
&& (qstrcmp(factory->details.klass, "Codec/Decoder/Audio") == 0
|| qstrcmp(factory->details.klass, "Codec/Demux") == 0 )) {
const GList *pads = factory->staticpadtemplates;
while (pads) {
GstStaticPadTemplate *padtemplate = (GstStaticPadTemplate*)(pads->data);
pads = g_list_next (pads);
if (padtemplate->direction != GST_PAD_SINK)
continue;
if (padtemplate->static_caps.string) {
GstCaps *caps = gst_static_caps_get(&padtemplate->static_caps);
if (!gst_caps_is_any (caps) && ! gst_caps_is_empty (caps)) {
for (guint i = 0; i < gst_caps_get_size(caps); i++) {
GstStructure *structure = gst_caps_get_structure(caps, i);
QString nameLowcase = QString(gst_structure_get_name (structure)).toLower();
m_supportedMimeTypeSet.insert(nameLowcase);
if (nameLowcase.contains("mpeg")) {
//Because mpeg version number is only included in the detail
//description, it is necessary to manually extract this information
//in order to match the mime type of mpeg4.
const GValue *value = gst_structure_get_value(structure, "mpegversion");
if (value) {
gchar *str = gst_value_serialize (value);
QString versions(str);
QStringList elements = versions.split(QRegExp("\\D+"), QString::SkipEmptyParts);
foreach (const QString &e, elements)
m_supportedMimeTypeSet.insert(nameLowcase + e);
g_free (str);
}
}
}
}
gst_caps_unref(caps);
}
}
gst_object_unref (factory);
}
} else if (GST_IS_TYPE_FIND_FACTORY(feature)) {
QString name(gst_plugin_feature_get_name(feature));
if (name.contains('/')) //filter out any string without '/' which is obviously not a mime type
m_supportedMimeTypeSet.insert(name.toLower());
}
}
features = g_list_next (features);
}
gst_plugin_feature_list_free (orig_features);
}
gst_plugin_list_free (orig_plugins);
#if defined QT_SUPPORTEDMIMETYPES_DEBUG
QStringList list = m_supportedMimeTypeSet.toList();
list.sort();
if (qgetenv("QT_DEBUG_PLUGINS").toInt() > 0) {
foreach (const QString &type, list)
qDebug() << type;
}
#endif
} }
QStringList QGstreamerAudioDecoderServicePlugin::supportedMimeTypes() const QStringList QGstreamerAudioDecoderServicePlugin::supportedMimeTypes() const

View File

@@ -85,7 +85,7 @@ QGstreamerAudioDecoderSession::QGstreamerAudioDecoderSession(QObject *parent)
m_durationQueries(0) m_durationQueries(0)
{ {
// Create pipeline here // Create pipeline here
m_playbin = gst_element_factory_make("playbin2", NULL); m_playbin = gst_element_factory_make(QT_GSTREAMER_PLAYBIN_ELEMENT_NAME, NULL);
if (m_playbin != 0) { if (m_playbin != 0) {
// Sort out messages // Sort out messages
@@ -446,21 +446,40 @@ QAudioBuffer QGstreamerAudioDecoderSession::read()
if (buffersAvailable == 1) if (buffersAvailable == 1)
emit bufferAvailableChanged(false); emit bufferAvailableChanged(false);
GstBuffer *buffer = gst_app_sink_pull_buffer(m_appSink); const char* bufferData = 0;
int bufferSize = 0;
#if GST_CHECK_VERSION(1,0,0)
GstSample *sample = gst_app_sink_pull_sample(m_appSink);
GstBuffer *buffer = gst_sample_get_buffer(sample);
GstMapInfo mapInfo;
gst_buffer_map(buffer, &mapInfo, GST_MAP_READ);
bufferData = (const char*)mapInfo.data;
bufferSize = mapInfo.size;
QAudioFormat format = QGstUtils::audioFormatForSample(sample);
#else
GstBuffer *buffer = gst_app_sink_pull_buffer(m_appSink);
bufferData = (const char*)buffer->data;
bufferSize = buffer->size;
QAudioFormat format = QGstUtils::audioFormatForBuffer(buffer); QAudioFormat format = QGstUtils::audioFormatForBuffer(buffer);
#endif
if (format.isValid()) { if (format.isValid()) {
// XXX At the moment we have to copy data from GstBuffer into QAudioBuffer. // XXX At the moment we have to copy data from GstBuffer into QAudioBuffer.
// We could improve performance by implementing QAbstractAudioBuffer for GstBuffer. // We could improve performance by implementing QAbstractAudioBuffer for GstBuffer.
qint64 position = getPositionFromBuffer(buffer); qint64 position = getPositionFromBuffer(buffer);
audioBuffer = QAudioBuffer(QByteArray((const char*)buffer->data, buffer->size), format, position); audioBuffer = QAudioBuffer(QByteArray((const char*)bufferData, bufferSize), format, position);
position /= 1000; // convert to milliseconds position /= 1000; // convert to milliseconds
if (position != m_position) { if (position != m_position) {
m_position = position; m_position = position;
emit positionChanged(m_position); emit positionChanged(m_position);
} }
} }
#if GST_CHECK_VERSION(1,0,0)
gst_sample_unref(sample);
#else
gst_buffer_unref(buffer); gst_buffer_unref(buffer);
#endif
} }
return audioBuffer; return audioBuffer;
@@ -488,7 +507,7 @@ void QGstreamerAudioDecoderSession::processInvalidMedia(QAudioDecoder::Error err
emit error(int(errorCode), errorString); emit error(int(errorCode), errorString);
} }
GstFlowReturn QGstreamerAudioDecoderSession::new_buffer(GstAppSink *, gpointer user_data) GstFlowReturn QGstreamerAudioDecoderSession::new_sample(GstAppSink *, gpointer user_data)
{ {
// "Note that the preroll buffer will also be returned as the first buffer when calling gst_app_sink_pull_buffer()." // "Note that the preroll buffer will also be returned as the first buffer when calling gst_app_sink_pull_buffer()."
QGstreamerAudioDecoderSession *session = reinterpret_cast<QGstreamerAudioDecoderSession*>(user_data); QGstreamerAudioDecoderSession *session = reinterpret_cast<QGstreamerAudioDecoderSession*>(user_data);
@@ -531,7 +550,11 @@ void QGstreamerAudioDecoderSession::addAppSink()
GstAppSinkCallbacks callbacks; GstAppSinkCallbacks callbacks;
memset(&callbacks, 0, sizeof(callbacks)); memset(&callbacks, 0, sizeof(callbacks));
callbacks.new_buffer = &new_buffer; #if GST_CHECK_VERSION(1,0,0)
callbacks.new_sample = &new_sample;
#else
callbacks.new_buffer = &new_sample;
#endif
gst_app_sink_set_callbacks(m_appSink, &callbacks, this, NULL); gst_app_sink_set_callbacks(m_appSink, &callbacks, this, NULL);
gst_app_sink_set_max_buffers(m_appSink, MAX_BUFFERS_IN_QUEUE); gst_app_sink_set_max_buffers(m_appSink, MAX_BUFFERS_IN_QUEUE);
gst_base_sink_set_sync(GST_BASE_SINK(m_appSink), FALSE); gst_base_sink_set_sync(GST_BASE_SINK(m_appSink), FALSE);
@@ -553,11 +576,10 @@ void QGstreamerAudioDecoderSession::removeAppSink()
void QGstreamerAudioDecoderSession::updateDuration() void QGstreamerAudioDecoderSession::updateDuration()
{ {
GstFormat format = GST_FORMAT_TIME;
gint64 gstDuration = 0; gint64 gstDuration = 0;
int duration = -1; int duration = -1;
if (m_playbin && gst_element_query_duration(m_playbin, &format, &gstDuration)) if (m_playbin && qt_gst_element_query_duration(m_playbin, GST_FORMAT_TIME, &gstDuration))
duration = gstDuration / 1000000; duration = gstDuration / 1000000;
if (m_duration != duration) { if (m_duration != duration) {

View File

@@ -92,7 +92,7 @@ public:
qint64 position() const; qint64 position() const;
qint64 duration() const; qint64 duration() const;
static GstFlowReturn new_buffer(GstAppSink *sink, gpointer user_data); static GstFlowReturn new_sample(GstAppSink *sink, gpointer user_data);
signals: signals:
void stateChanged(QAudioDecoder::State newState); void stateChanged(QAudioDecoder::State newState);

View File

@@ -79,7 +79,7 @@ config_gstreamer_photography {
$$PWD/camerabinlocks.cpp \ $$PWD/camerabinlocks.cpp \
$$PWD/camerabinzoom.cpp $$PWD/camerabinzoom.cpp
LIBS += -lgstphotography-0.10 LIBS += -lgstphotography-$$GST_VERSION
DEFINES += GST_USE_UNSTABLE_API #prevents warnings because of unstable photography API DEFINES += GST_USE_UNSTABLE_API #prevents warnings because of unstable photography API
} }

View File

@@ -96,7 +96,7 @@ GstEncodingContainerProfile *CameraBinContainer::createProfile()
GstCaps *caps; GstCaps *caps;
if (m_actualFormat.isEmpty()) { if (m_actualFormat.isEmpty()) {
caps = gst_caps_new_any(); return 0;
} else { } else {
QString format = m_actualFormat; QString format = m_actualFormat;
QStringList supportedFormats = m_supportedContainers.supportedCodecs(); QStringList supportedFormats = m_supportedContainers.supportedCodecs();

View File

@@ -95,11 +95,6 @@ void CameraBinControl::setCaptureMode(QCamera::CaptureModes mode)
captureMode() == QCamera::CaptureStillImage ? captureMode() == QCamera::CaptureStillImage ?
CamerabinResourcePolicy::ImageCaptureResources : CamerabinResourcePolicy::ImageCaptureResources :
CamerabinResourcePolicy::VideoCaptureResources); CamerabinResourcePolicy::VideoCaptureResources);
#if (GST_VERSION_MAJOR == 0) && ((GST_VERSION_MINOR < 10) || (GST_VERSION_MICRO < 23))
//due to bug in v4l2src, it's necessary to reload camera on video caps changes
//https://bugzilla.gnome.org/show_bug.cgi?id=649832
reloadLater();
#endif
} }
emit captureModeChanged(mode); emit captureModeChanged(mode);
} }
@@ -299,6 +294,8 @@ bool CameraBinControl::canChangeProperty(PropertyChangeType changeType, QCamera:
switch (changeType) { switch (changeType) {
case QCameraControl::CaptureMode: case QCameraControl::CaptureMode:
return status != QCamera::ActiveStatus;
break;
case QCameraControl::ImageEncodingSettings: case QCameraControl::ImageEncodingSettings:
case QCameraControl::VideoEncodingSettings: case QCameraControl::VideoEncodingSettings:
case QCameraControl::Viewfinder: case QCameraControl::Viewfinder:

View File

@@ -37,6 +37,10 @@
#include <QDebug> #include <QDebug>
#if !GST_CHECK_VERSION(1,0,0)
typedef GstSceneMode GstPhotographySceneMode;
#endif
QT_BEGIN_NAMESPACE QT_BEGIN_NAMESPACE
CameraBinExposure::CameraBinExposure(CameraBinSession *session) CameraBinExposure::CameraBinExposure(CameraBinSession *session)
@@ -119,7 +123,7 @@ QVariant CameraBinExposure::actualValue(ExposureParameter parameter) const
} }
case QCameraExposureControl::ExposureMode: case QCameraExposureControl::ExposureMode:
{ {
GstSceneMode sceneMode; GstPhotographySceneMode sceneMode;
gst_photography_get_scene_mode(m_session->photography(), &sceneMode); gst_photography_get_scene_mode(m_session->photography(), &sceneMode);
switch (sceneMode) { switch (sceneMode) {
@@ -167,7 +171,7 @@ bool CameraBinExposure::setValue(ExposureParameter parameter, const QVariant& va
case QCameraExposureControl::ExposureMode: case QCameraExposureControl::ExposureMode:
{ {
QCameraExposure::ExposureMode mode = QCameraExposure::ExposureMode(value.toInt()); QCameraExposure::ExposureMode mode = QCameraExposure::ExposureMode(value.toInt());
GstSceneMode sceneMode; GstPhotographySceneMode sceneMode;
gst_photography_get_scene_mode(m_session->photography(), &sceneMode); gst_photography_get_scene_mode(m_session->photography(), &sceneMode);
switch (mode) { switch (mode) {

View File

@@ -37,6 +37,10 @@
#include <QDebug> #include <QDebug>
#if !GST_CHECK_VERSION(1,0,0)
typedef GstFlashMode GstPhotographyFlashMode;
#endif
QT_BEGIN_NAMESPACE QT_BEGIN_NAMESPACE
CameraBinFlash::CameraBinFlash(CameraBinSession *session) CameraBinFlash::CameraBinFlash(CameraBinSession *session)
@@ -51,7 +55,7 @@ CameraBinFlash::~CameraBinFlash()
QCameraExposure::FlashModes CameraBinFlash::flashMode() const QCameraExposure::FlashModes CameraBinFlash::flashMode() const
{ {
GstFlashMode flashMode; GstPhotographyFlashMode flashMode;
gst_photography_get_flash_mode(m_session->photography(), &flashMode); gst_photography_get_flash_mode(m_session->photography(), &flashMode);
QCameraExposure::FlashModes modes; QCameraExposure::FlashModes modes;
@@ -70,7 +74,7 @@ QCameraExposure::FlashModes CameraBinFlash::flashMode() const
void CameraBinFlash::setFlashMode(QCameraExposure::FlashModes mode) void CameraBinFlash::setFlashMode(QCameraExposure::FlashModes mode)
{ {
GstFlashMode flashMode; GstPhotographyFlashMode flashMode;
gst_photography_get_flash_mode(m_session->photography(), &flashMode); gst_photography_get_flash_mode(m_session->photography(), &flashMode);
if (mode.testFlag(QCameraExposure::FlashAuto)) flashMode = GST_PHOTOGRAPHY_FLASH_MODE_AUTO; if (mode.testFlag(QCameraExposure::FlashAuto)) flashMode = GST_PHOTOGRAPHY_FLASH_MODE_AUTO;

View File

@@ -39,6 +39,12 @@
#include <QDebug> #include <QDebug>
#include <QtCore/qmetaobject.h> #include <QtCore/qmetaobject.h>
#include <private/qgstutils_p.h>
#if !GST_CHECK_VERSION(1,0,0)
typedef GstFocusMode GstPhotographyFocusMode;
#endif
//#define CAMERABIN_DEBUG 1 //#define CAMERABIN_DEBUG 1
QT_BEGIN_NAMESPACE QT_BEGIN_NAMESPACE
@@ -73,7 +79,7 @@ QCameraFocus::FocusModes CameraBinFocus::focusMode() const
void CameraBinFocus::setFocusMode(QCameraFocus::FocusModes mode) void CameraBinFocus::setFocusMode(QCameraFocus::FocusModes mode)
{ {
GstFocusMode photographyMode; GstPhotographyFocusMode photographyMode;
switch (mode) { switch (mode) {
case QCameraFocus::AutoFocus: case QCameraFocus::AutoFocus:
@@ -181,9 +187,10 @@ QCameraFocusZoneList CameraBinFocus::focusZones() const
void CameraBinFocus::handleFocusMessage(GstMessage *gm) void CameraBinFocus::handleFocusMessage(GstMessage *gm)
{ {
//it's a sync message, so it's called from non main thread //it's a sync message, so it's called from non main thread
if (gst_structure_has_name(gm->structure, GST_PHOTOGRAPHY_AUTOFOCUS_DONE)) { const GstStructure *structure = gst_message_get_structure(gm);
if (gst_structure_has_name(structure, GST_PHOTOGRAPHY_AUTOFOCUS_DONE)) {
gint status = GST_PHOTOGRAPHY_FOCUS_STATUS_NONE; gint status = GST_PHOTOGRAPHY_FOCUS_STATUS_NONE;
gst_structure_get_int (gm->structure, "status", &status); gst_structure_get_int (structure, "status", &status);
QCamera::LockStatus focusStatus = m_focusStatus; QCamera::LockStatus focusStatus = m_focusStatus;
QCamera::LockChangeReason reason = QCamera::UserRequest; QCamera::LockChangeReason reason = QCamera::UserRequest;
@@ -243,7 +250,7 @@ void CameraBinFocus::_q_handleCameraStateChange(QCamera::State state)
m_cameraState = state; m_cameraState = state;
if (state == QCamera::ActiveState) { if (state == QCamera::ActiveState) {
if (GstPad *pad = gst_element_get_static_pad(m_session->cameraSource(), "vfsrc")) { if (GstPad *pad = gst_element_get_static_pad(m_session->cameraSource(), "vfsrc")) {
if (GstCaps *caps = gst_pad_get_negotiated_caps(pad)) { if (GstCaps *caps = qt_gst_pad_get_current_caps(pad)) {
if (GstStructure *structure = gst_caps_get_structure(caps, 0)) { if (GstStructure *structure = gst_caps_get_structure(caps, 0)) {
int width = 0; int width = 0;
int height = 0; int height = 0;

View File

@@ -53,11 +53,13 @@ QT_BEGIN_NAMESPACE
CameraBinImageCapture::CameraBinImageCapture(CameraBinSession *session) CameraBinImageCapture::CameraBinImageCapture(CameraBinSession *session)
:QCameraImageCaptureControl(session) :QCameraImageCaptureControl(session)
, m_encoderProbe(this)
, m_muxerProbe(this)
, m_session(session) , m_session(session)
, m_ready(false)
, m_requestId(0)
, m_jpegEncoderElement(0) , m_jpegEncoderElement(0)
, m_metadataMuxerElement(0) , m_metadataMuxerElement(0)
, m_requestId(0)
, m_ready(false)
{ {
connect(m_session, SIGNAL(stateChanged(QCamera::State)), SLOT(updateState())); connect(m_session, SIGNAL(stateChanged(QCamera::State)), SLOT(updateState()));
connect(m_session, SIGNAL(imageExposed(int)), this, SIGNAL(imageExposed(int))); connect(m_session, SIGNAL(imageExposed(int)), this, SIGNAL(imageExposed(int)));
@@ -108,11 +110,18 @@ void CameraBinImageCapture::updateState()
} }
} }
gboolean CameraBinImageCapture::metadataEventProbe(GstPad *pad, GstEvent *event, CameraBinImageCapture *self) #if GST_CHECK_VERSION(1,0,0)
GstPadProbeReturn CameraBinImageCapture::encoderEventProbe(
GstPad *, GstPadProbeInfo *info, gpointer user_data)
{ {
Q_UNUSED(pad); GstEvent * const event = gst_pad_probe_info_get_event(info);
#else
if (GST_EVENT_TYPE(event) == GST_EVENT_TAG) { gboolean CameraBinImageCapture::encoderEventProbe(
GstElement *, GstEvent *event, gpointer user_data)
{
#endif
CameraBinImageCapture * const self = static_cast<CameraBinImageCapture *>(user_data);
if (event && GST_EVENT_TYPE(event) == GST_EVENT_TAG) {
GstTagList *gstTags; GstTagList *gstTags;
gst_event_parse_tag(event, &gstTags); gst_event_parse_tag(event, &gstTags);
QMap<QByteArray, QVariant> extendedTags = QGstUtils::gstTagListToMap(gstTags); QMap<QByteArray, QVariant> extendedTags = QGstUtils::gstTagListToMap(gstTags);
@@ -146,17 +155,31 @@ gboolean CameraBinImageCapture::metadataEventProbe(GstPad *pad, GstEvent *event,
} }
} }
} }
#if GST_CHECK_VERSION(1,0,0)
return true; return GST_PAD_PROBE_OK;
#else
return TRUE;
#endif
} }
gboolean CameraBinImageCapture::uncompressedBufferProbe(GstPad *pad, GstBuffer *buffer, CameraBinImageCapture *self) void CameraBinImageCapture::EncoderProbe::probeCaps(GstCaps *caps)
{ {
Q_UNUSED(pad); #if GST_CHECK_VERSION(1,0,0)
CameraBinSession *session = self->m_session; capture->m_bufferFormat = QGstUtils::formatForCaps(caps, &capture->m_videoInfo);
#else
int bytesPerLine = 0;
QVideoSurfaceFormat format = QGstUtils::formatForCaps(caps, &bytesPerLine);
capture->m_bytesPerLine = bytesPerLine;
capture->m_bufferFormat = format;
#endif
}
bool CameraBinImageCapture::EncoderProbe::probeBuffer(GstBuffer *buffer)
{
CameraBinSession * const session = capture->m_session;
#ifdef DEBUG_CAPTURE #ifdef DEBUG_CAPTURE
qDebug() << "Uncompressed buffer probe" << gst_caps_to_string(GST_BUFFER_CAPS(buffer)); qDebug() << "Uncompressed buffer probe";
#endif #endif
QCameraImageCapture::CaptureDestinations destination = QCameraImageCapture::CaptureDestinations destination =
@@ -165,21 +188,23 @@ gboolean CameraBinImageCapture::uncompressedBufferProbe(GstPad *pad, GstBuffer *
if (destination & QCameraImageCapture::CaptureToBuffer) { if (destination & QCameraImageCapture::CaptureToBuffer) {
if (format != QVideoFrame::Format_Jpeg) { if (format != QVideoFrame::Format_Jpeg) {
GstCaps *caps = GST_BUFFER_CAPS(buffer);
int bytesPerLine = -1;
QVideoSurfaceFormat format = QVideoSurfaceGstSink::formatForCaps(caps, &bytesPerLine);
#ifdef DEBUG_CAPTURE #ifdef DEBUG_CAPTURE
qDebug() << "imageAvailable(uncompressed):" << format; qDebug() << "imageAvailable(uncompressed):" << format;
#endif #endif
QGstVideoBuffer *videoBuffer = new QGstVideoBuffer(buffer, bytesPerLine); #if GST_CHECK_VERSION(1,0,0)
QGstVideoBuffer *videoBuffer = new QGstVideoBuffer(buffer, capture->m_videoInfo);
#else
QGstVideoBuffer *videoBuffer = new QGstVideoBuffer(buffer, capture->m_bytesPerLine);
#endif
QVideoFrame frame(videoBuffer, QVideoFrame frame(
format.frameSize(), videoBuffer,
format.pixelFormat()); capture->m_bufferFormat.frameSize(),
capture->m_bufferFormat.pixelFormat());
QMetaObject::invokeMethod(self, "imageAvailable", QMetaObject::invokeMethod(capture, "imageAvailable",
Qt::QueuedConnection, Qt::QueuedConnection,
Q_ARG(int, self->m_requestId), Q_ARG(int, capture->m_requestId),
Q_ARG(QVideoFrame, frame)); Q_ARG(QVideoFrame, frame));
} }
} }
@@ -192,25 +217,40 @@ gboolean CameraBinImageCapture::uncompressedBufferProbe(GstPad *pad, GstBuffer *
return keepBuffer; return keepBuffer;
} }
gboolean CameraBinImageCapture::jpegBufferProbe(GstPad *pad, GstBuffer *buffer, CameraBinImageCapture *self) void CameraBinImageCapture::MuxerProbe::probeCaps(GstCaps *caps)
{ {
Q_UNUSED(pad); capture->m_jpegResolution = QGstUtils::capsCorrectedResolution(caps);
CameraBinSession *session = self->m_session; }
#ifdef DEBUG_CAPTURE bool CameraBinImageCapture::MuxerProbe::probeBuffer(GstBuffer *buffer)
qDebug() << "Jpeg buffer probe" << gst_caps_to_string(GST_BUFFER_CAPS(buffer)); {
#endif CameraBinSession * const session = capture->m_session;
QCameraImageCapture::CaptureDestinations destination = QCameraImageCapture::CaptureDestinations destination =
session->captureDestinationControl()->captureDestination(); session->captureDestinationControl()->captureDestination();
if ((destination & QCameraImageCapture::CaptureToBuffer) && if ((destination & QCameraImageCapture::CaptureToBuffer) &&
session->captureBufferFormatControl()->bufferFormat() == QVideoFrame::Format_Jpeg) { session->captureBufferFormatControl()->bufferFormat() == QVideoFrame::Format_Jpeg) {
QGstVideoBuffer *videoBuffer = new QGstVideoBuffer(buffer,
-1); //bytesPerLine is not available for jpegs
QSize resolution = QGstUtils::capsCorrectedResolution(GST_BUFFER_CAPS(buffer)); QSize resolution = capture->m_jpegResolution;
//if resolution is not presented in caps, try to find it from encoded jpeg data: //if resolution is not presented in caps, try to find it from encoded jpeg data:
#if GST_CHECK_VERSION(1,0,0)
GstMapInfo mapInfo;
if (resolution.isEmpty() && gst_buffer_map(buffer, &mapInfo, GST_MAP_READ)) {
QBuffer data;
data.setData(reinterpret_cast<const char*>(mapInfo.data), mapInfo.size);
QImageReader reader(&data, "JPEG");
resolution = reader.size();
gst_buffer_unmap(buffer, &mapInfo);
}
GstVideoInfo info;
gst_video_info_set_format(
&info, GST_VIDEO_FORMAT_ENCODED, resolution.width(), resolution.height());
QGstVideoBuffer *videoBuffer = new QGstVideoBuffer(buffer, info);
#else
if (resolution.isEmpty()) { if (resolution.isEmpty()) {
QBuffer data; QBuffer data;
data.setData(reinterpret_cast<const char*>(GST_BUFFER_DATA(buffer)), GST_BUFFER_SIZE(buffer)); data.setData(reinterpret_cast<const char*>(GST_BUFFER_DATA(buffer)), GST_BUFFER_SIZE(buffer));
@@ -218,20 +258,28 @@ gboolean CameraBinImageCapture::jpegBufferProbe(GstPad *pad, GstBuffer *buffer,
resolution = reader.size(); resolution = reader.size();
} }
QGstVideoBuffer *videoBuffer = new QGstVideoBuffer(buffer,
-1); //bytesPerLine is not available for jpegs
#endif
QVideoFrame frame(videoBuffer, QVideoFrame frame(videoBuffer,
resolution, resolution,
QVideoFrame::Format_Jpeg); QVideoFrame::Format_Jpeg);
QMetaObject::invokeMethod(capture, "imageAvailable",
QMetaObject::invokeMethod(self, "imageAvailable",
Qt::QueuedConnection, Qt::QueuedConnection,
Q_ARG(int, self->m_requestId), Q_ARG(int, capture->m_requestId),
Q_ARG(QVideoFrame, frame)); Q_ARG(QVideoFrame, frame));
} }
//drop the buffer if capture to file was disabled
return destination & QCameraImageCapture::CaptureToFile; // Theoretically we could drop the buffer here when don't want to capture to file but that
// prevents camerabin from recognizing that capture has been completed and returning
// to its idle state.
return true;
} }
bool CameraBinImageCapture::processBusMessage(const QGstreamerMessage &message) bool CameraBinImageCapture::processBusMessage(const QGstreamerMessage &message)
{ {
//Install metadata event and buffer probes //Install metadata event and buffer probes
@@ -252,9 +300,10 @@ bool CameraBinImageCapture::processBusMessage(const QGstreamerMessage &message)
return false; return false;
QString elementName = QString::fromLatin1(gst_element_get_name(element)); QString elementName = QString::fromLatin1(gst_element_get_name(element));
#if !GST_CHECK_VERSION(1,0,0)
GstElementClass *elementClass = GST_ELEMENT_GET_CLASS(element); GstElementClass *elementClass = GST_ELEMENT_GET_CLASS(element);
QString elementLongName = elementClass->details.longname; QString elementLongName = elementClass->details.longname;
#endif
if (elementName.contains("jpegenc") && element != m_jpegEncoderElement) { if (elementName.contains("jpegenc") && element != m_jpegEncoderElement) {
m_jpegEncoderElement = element; m_jpegEncoderElement = element;
GstPad *sinkpad = gst_element_get_static_pad(element, "sink"); GstPad *sinkpad = gst_element_get_static_pad(element, "sink");
@@ -264,21 +313,23 @@ bool CameraBinImageCapture::processBusMessage(const QGstreamerMessage &message)
#ifdef DEBUG_CAPTURE #ifdef DEBUG_CAPTURE
qDebug() << "install metadata probe"; qDebug() << "install metadata probe";
#endif #endif
gst_pad_add_event_probe(sinkpad, #if GST_CHECK_VERSION(1,0,0)
G_CALLBACK(CameraBinImageCapture::metadataEventProbe), gst_pad_add_probe(
this); sinkpad, GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM, encoderEventProbe, this, NULL);
#else
gst_pad_add_event_probe(sinkpad, G_CALLBACK(encoderEventProbe), this);
#endif
#ifdef DEBUG_CAPTURE #ifdef DEBUG_CAPTURE
qDebug() << "install uncompressed buffer probe"; qDebug() << "install uncompressed buffer probe";
#endif #endif
gst_pad_add_buffer_probe(sinkpad, m_encoderProbe.addProbeToPad(sinkpad, true);
G_CALLBACK(CameraBinImageCapture::uncompressedBufferProbe),
this);
gst_object_unref(sinkpad); gst_object_unref(sinkpad);
} else if ((elementName.contains("jifmux") || } else if ((elementName.contains("jifmux")
elementName.startsWith("metadatamux") || #if !GST_CHECK_VERSION(1,0,0)
elementLongName == QLatin1String("JPEG stream muxer")) || elementLongName == QLatin1String("JPEG stream muxer")
#endif
|| elementName.startsWith("metadatamux"))
&& element != m_metadataMuxerElement) { && element != m_metadataMuxerElement) {
//Jpeg encoded buffer probe is added after jifmux/metadatamux //Jpeg encoded buffer probe is added after jifmux/metadatamux
//element to ensure the resulting jpeg buffer contains capture metadata //element to ensure the resulting jpeg buffer contains capture metadata
@@ -288,9 +339,8 @@ bool CameraBinImageCapture::processBusMessage(const QGstreamerMessage &message)
#ifdef DEBUG_CAPTURE #ifdef DEBUG_CAPTURE
qDebug() << "install jpeg buffer probe"; qDebug() << "install jpeg buffer probe";
#endif #endif
gst_pad_add_buffer_probe(srcpad, m_muxerProbe.addProbeToPad(srcpad);
G_CALLBACK(CameraBinImageCapture::jpegBufferProbe),
this);
gst_object_unref(srcpad); gst_object_unref(srcpad);
} }
} }

View File

@@ -38,6 +38,14 @@
#include <qcameraimagecapturecontrol.h> #include <qcameraimagecapturecontrol.h>
#include "camerabinsession.h" #include "camerabinsession.h"
#include <qvideosurfaceformat.h>
#include <private/qgstreamerbufferprobe_p.h>
#if GST_CHECK_VERSION(1,0,0)
#include <gst/video/video.h>
#endif
QT_BEGIN_NAMESPACE QT_BEGIN_NAMESPACE
class CameraBinImageCapture : public QCameraImageCaptureControl, public QGstreamerBusMessageFilter class CameraBinImageCapture : public QCameraImageCaptureControl, public QGstreamerBusMessageFilter
@@ -61,15 +69,47 @@ private slots:
void updateState(); void updateState();
private: private:
static gboolean metadataEventProbe(GstPad *pad, GstEvent *event, CameraBinImageCapture *); #if GST_CHECK_VERSION(1,0,0)
static gboolean uncompressedBufferProbe(GstPad *pad, GstBuffer *buffer, CameraBinImageCapture *); static GstPadProbeReturn encoderEventProbe(GstPad *, GstPadProbeInfo *info, gpointer user_data);
static gboolean jpegBufferProbe(GstPad *pad, GstBuffer *buffer, CameraBinImageCapture *); #else
static gboolean encoderEventProbe(GstElement *, GstEvent *event, gpointer user_data);
#endif
class EncoderProbe : public QGstreamerBufferProbe
{
public:
EncoderProbe(CameraBinImageCapture *capture) : capture(capture) {}
void probeCaps(GstCaps *caps);
bool probeBuffer(GstBuffer *buffer);
private:
CameraBinImageCapture * const capture;
} m_encoderProbe;
class MuxerProbe : public QGstreamerBufferProbe
{
public:
MuxerProbe(CameraBinImageCapture *capture) : capture(capture) {}
void probeCaps(GstCaps *caps);
bool probeBuffer(GstBuffer *buffer);
private:
CameraBinImageCapture * const capture;
} m_muxerProbe;
QVideoSurfaceFormat m_bufferFormat;
QSize m_jpegResolution;
CameraBinSession *m_session; CameraBinSession *m_session;
bool m_ready;
int m_requestId;
GstElement *m_jpegEncoderElement; GstElement *m_jpegEncoderElement;
GstElement *m_metadataMuxerElement; GstElement *m_metadataMuxerElement;
#if GST_CHECK_VERSION(1,0,0)
GstVideoInfo m_videoInfo;
#else
int m_bytesPerLine;
#endif
int m_requestId;
bool m_ready;
}; };
QT_END_NAMESPACE QT_END_NAMESPACE

View File

@@ -49,7 +49,6 @@ CameraBinImageEncoder::~CameraBinImageEncoder()
QList<QSize> CameraBinImageEncoder::supportedResolutions(const QImageEncoderSettings &, bool *continuous) const QList<QSize> CameraBinImageEncoder::supportedResolutions(const QImageEncoderSettings &, bool *continuous) const
{ {
qDebug() << "CameraBinImageEncoder::supportedResolutions()";
if (continuous) if (continuous)
*continuous = false; *continuous = false;

View File

@@ -34,7 +34,11 @@
#include "camerabinimageprocessing.h" #include "camerabinimageprocessing.h"
#include "camerabinsession.h" #include "camerabinsession.h"
#include <gst/interfaces/colorbalance.h> #if GST_CHECK_VERSION(1,0,0)
# include <gst/video/colorbalance.h>
#else
# include <gst/interfaces/colorbalance.h>
#endif
QT_BEGIN_NAMESPACE QT_BEGIN_NAMESPACE
@@ -126,7 +130,7 @@ bool CameraBinImageProcessing::setColorBalanceValue(const QString& channel, qrea
QCameraImageProcessing::WhiteBalanceMode CameraBinImageProcessing::whiteBalanceMode() const QCameraImageProcessing::WhiteBalanceMode CameraBinImageProcessing::whiteBalanceMode() const
{ {
#ifdef HAVE_GST_PHOTOGRAPHY #ifdef HAVE_GST_PHOTOGRAPHY
GstWhiteBalanceMode wbMode; GstPhotographyWhiteBalanceMode wbMode;
gst_photography_get_white_balance_mode(m_session->photography(), &wbMode); gst_photography_get_white_balance_mode(m_session->photography(), &wbMode);
return m_mappedWbValues[wbMode]; return m_mappedWbValues[wbMode];
#else #else

View File

@@ -41,7 +41,10 @@
#include <glib.h> #include <glib.h>
#ifdef HAVE_GST_PHOTOGRAPHY #ifdef HAVE_GST_PHOTOGRAPHY
#include <gst/interfaces/photography.h> # include <gst/interfaces/photography.h>
# if !GST_CHECK_VERSION(1,0,0)
typedef GstWhiteBalanceMode GstPhotographyWhiteBalanceMode;
# endif
#endif #endif
QT_BEGIN_NAMESPACE QT_BEGIN_NAMESPACE
@@ -73,7 +76,7 @@ private:
CameraBinSession *m_session; CameraBinSession *m_session;
QMap<QCameraImageProcessingControl::ProcessingParameter, int> m_values; QMap<QCameraImageProcessingControl::ProcessingParameter, int> m_values;
#ifdef HAVE_GST_PHOTOGRAPHY #ifdef HAVE_GST_PHOTOGRAPHY
QMap<GstWhiteBalanceMode, QCameraImageProcessing::WhiteBalanceMode> m_mappedWbValues; QMap<GstPhotographyWhiteBalanceMode, QCameraImageProcessing::WhiteBalanceMode> m_mappedWbValues;
#endif #endif
}; };

View File

@@ -126,7 +126,7 @@ static const QGStreamerMetaDataKeys *qt_gstreamerMetaDataKeys()
metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::AlbumTitle, GST_TAG_ALBUM, QVariant::String)); metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::AlbumTitle, GST_TAG_ALBUM, QVariant::String));
metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::AlbumArtist, GST_TAG_ARTIST, QVariant::String)); metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::AlbumArtist, GST_TAG_ARTIST, QVariant::String));
metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::ContributingArtist, GST_TAG_PERFORMER, QVariant::String)); metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::ContributingArtist, GST_TAG_PERFORMER, QVariant::String));
#if (GST_VERSION_MAJOR >= 0) && (GST_VERSION_MINOR >= 10) && (GST_VERSION_MICRO >= 19) #if GST_CHECK_VERSION(0,10,19)
metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::Composer, GST_TAG_COMPOSER, QVariant::String)); metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::Composer, GST_TAG_COMPOSER, QVariant::String));
#endif #endif
//metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::Conductor, 0, QVariant::String)); //metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::Conductor, 0, QVariant::String));
@@ -153,8 +153,7 @@ static const QGStreamerMetaDataKeys *qt_gstreamerMetaDataKeys()
//metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::Director, 0, QVariant::String)); //metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::Director, 0, QVariant::String));
metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::LeadPerformer, GST_TAG_PERFORMER, QVariant::String)); metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::LeadPerformer, GST_TAG_PERFORMER, QVariant::String));
//metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::Writer, 0, QVariant::String)); //metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::Writer, 0, QVariant::String));
#if GST_CHECK_VERSION(0,10,30)
#if (GST_VERSION_MAJOR >= 0) && (GST_VERSION_MINOR >= 10) && (GST_VERSION_MICRO >= 30)
// Photos // Photos
metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::CameraManufacturer, GST_TAG_DEVICE_MANUFACTURER, QVariant::String)); metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::CameraManufacturer, GST_TAG_DEVICE_MANUFACTURER, QVariant::String));
metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::CameraModel, GST_TAG_DEVICE_MODEL, QVariant::String)); metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::CameraModel, GST_TAG_DEVICE_MODEL, QVariant::String));

View File

@@ -110,9 +110,10 @@ void CameraBinRecorder::updateStatus()
m_state = QMediaRecorder::StoppedState; m_state = QMediaRecorder::StoppedState;
m_session->stopVideoRecording(); m_session->stopVideoRecording();
} }
m_status = m_session->pendingState() == QCamera::ActiveState ? m_status = m_session->pendingState() == QCamera::ActiveState
QMediaRecorder::LoadingStatus : && m_session->captureMode().testFlag(QCamera::CaptureVideo)
QMediaRecorder::UnloadedStatus; ? QMediaRecorder::LoadingStatus
: QMediaRecorder::UnloadedStatus;
} }
if (m_state != oldState) if (m_state != oldState)
@@ -161,8 +162,6 @@ void CameraBinRecorder::applySettings()
QVideoEncoderSettings videoSettings = videoEncoderControl->videoSettings(); QVideoEncoderSettings videoSettings = videoEncoderControl->videoSettings();
videoSettings.setCodec(candidate[1]); videoSettings.setCodec(candidate[1]);
if (videoSettings.resolution().isEmpty())
videoSettings.setResolution(640, 480);
videoEncoderControl->setActualVideoSettings(videoSettings); videoEncoderControl->setActualVideoSettings(videoSettings);
QAudioEncoderSettings audioSettings = audioEncoderControl->audioSettings(); QAudioEncoderSettings audioSettings = audioEncoderControl->audioSettings();

View File

@@ -56,11 +56,11 @@
#include "camerabincapturedestination.h" #include "camerabincapturedestination.h"
#include "camerabinviewfindersettings.h" #include "camerabinviewfindersettings.h"
#include <private/qgstreamerbushelper_p.h> #include <private/qgstreamerbushelper_p.h>
#include <private/qgstutils_p.h>
#include <private/qgstreameraudioinputselector_p.h> #include <private/qgstreameraudioinputselector_p.h>
#include <private/qgstreamervideoinputdevicecontrol_p.h> #include <private/qgstreamervideoinputdevicecontrol_p.h>
#if defined(HAVE_WIDGETS) #if defined(HAVE_WIDGETS)
#include <private/qgstreamervideowidget_p.h> #include <private/qgstreamervideowidget_p.h>
#endif #endif
@@ -121,7 +121,6 @@ CameraBinService::CameraBinService(GstElementFactory *sourceFactory, QObject *pa
#else #else
m_videoWindow = new QGstreamerVideoWindow(this); m_videoWindow = new QGstreamerVideoWindow(this);
#endif #endif
#if defined(HAVE_WIDGETS) #if defined(HAVE_WIDGETS)
m_videoWidgetControl = new QGstreamerVideoWidgetControl(this); m_videoWidgetControl = new QGstreamerVideoWidgetControl(this);
#endif #endif
@@ -150,8 +149,6 @@ QMediaControl *CameraBinService::requestControl(const char *name)
if (!m_captureSession) if (!m_captureSession)
return 0; return 0;
//qDebug() << "Request control" << name;
if (!m_videoOutput) { if (!m_videoOutput) {
if (qstrcmp(name, QVideoRendererControl_iid) == 0) { if (qstrcmp(name, QVideoRendererControl_iid) == 0) {
m_videoOutput = m_videoRenderer; m_videoOutput = m_videoRenderer;
@@ -249,7 +246,7 @@ void CameraBinService::releaseControl(QMediaControl *control)
bool CameraBinService::isCameraBinAvailable() bool CameraBinService::isCameraBinAvailable()
{ {
GstElementFactory *factory = gst_element_factory_find("camerabin2"); GstElementFactory *factory = gst_element_factory_find(QT_GSTREAMER_CAMERABIN_ELEMENT_NAME);
if (factory) { if (factory) {
gst_object_unref(GST_OBJECT(factory)); gst_object_unref(GST_OBJECT(factory));
return true; return true;

View File

@@ -140,8 +140,8 @@ CameraBinSession::CameraBinSession(GstElementFactory *sourceFactory, QObject *pa
{ {
if (m_sourceFactory) if (m_sourceFactory)
gst_object_ref(GST_OBJECT(m_sourceFactory)); gst_object_ref(GST_OBJECT(m_sourceFactory));
m_camerabin = gst_element_factory_make(QT_GSTREAMER_CAMERABIN_ELEMENT_NAME, "camerabin");
m_camerabin = gst_element_factory_make("camerabin2", "camerabin2");
g_signal_connect(G_OBJECT(m_camerabin), "notify::idle", G_CALLBACK(updateBusyStatus), this); g_signal_connect(G_OBJECT(m_camerabin), "notify::idle", G_CALLBACK(updateBusyStatus), this);
g_signal_connect(G_OBJECT(m_camerabin), "element-added", G_CALLBACK(elementAdded), this); g_signal_connect(G_OBJECT(m_camerabin), "element-added", G_CALLBACK(elementAdded), this);
g_signal_connect(G_OBJECT(m_camerabin), "element-removed", G_CALLBACK(elementRemoved), this); g_signal_connect(G_OBJECT(m_camerabin), "element-removed", G_CALLBACK(elementRemoved), this);
@@ -178,7 +178,15 @@ CameraBinSession::CameraBinSession(GstElementFactory *sourceFactory, QObject *pa
//post image preview in RGB format //post image preview in RGB format
g_object_set(G_OBJECT(m_camerabin), POST_PREVIEWS_PROPERTY, TRUE, NULL); g_object_set(G_OBJECT(m_camerabin), POST_PREVIEWS_PROPERTY, TRUE, NULL);
#if GST_CHECK_VERSION(1,0,0)
GstCaps *previewCaps = gst_caps_new_simple(
"video/x-raw",
"format", G_TYPE_STRING, "RGBx",
NULL);
#else
GstCaps *previewCaps = gst_caps_from_string("video/x-raw-rgb"); GstCaps *previewCaps = gst_caps_from_string("video/x-raw-rgb");
#endif
g_object_set(G_OBJECT(m_camerabin), PREVIEW_CAPS_PROPERTY, previewCaps, NULL); g_object_set(G_OBJECT(m_camerabin), PREVIEW_CAPS_PROPERTY, previewCaps, NULL);
gst_caps_unref(previewCaps); gst_caps_unref(previewCaps);
} }
@@ -243,6 +251,7 @@ bool CameraBinSession::setupCameraBin()
qWarning() << "Staring camera without viewfinder available"; qWarning() << "Staring camera without viewfinder available";
m_viewfinderElement = gst_element_factory_make("fakesink", NULL); m_viewfinderElement = gst_element_factory_make("fakesink", NULL);
} }
g_object_set(G_OBJECT(m_viewfinderElement), "sync", FALSE, NULL);
qt_gst_object_ref_sink(GST_OBJECT(m_viewfinderElement)); qt_gst_object_ref_sink(GST_OBJECT(m_viewfinderElement));
gst_element_set_state(m_camerabin, GST_STATE_NULL); gst_element_set_state(m_camerabin, GST_STATE_NULL);
g_object_set(G_OBJECT(m_camerabin), VIEWFINDER_SINK_PROPERTY, m_viewfinderElement, NULL); g_object_set(G_OBJECT(m_camerabin), VIEWFINDER_SINK_PROPERTY, m_viewfinderElement, NULL);
@@ -251,61 +260,27 @@ bool CameraBinSession::setupCameraBin()
return true; return true;
} }
static GstCaps *resolutionToCaps(const QSize &resolution, const QPair<int, int> &rate = qMakePair<int,int>(0,0)) static GstCaps *resolutionToCaps(const QSize &resolution, qreal frameRate = 0.0)
{ {
if (resolution.isEmpty()) GstCaps *caps = QGstUtils::videoFilterCaps();
return gst_caps_new_any();
GstCaps *caps = 0; if (!resolution.isEmpty()) {
if (rate.second > 0) { gst_caps_set_simple(
caps = gst_caps_new_full(gst_structure_new("video/x-raw-yuv", caps,
"width", G_TYPE_INT, resolution.width(), "width", G_TYPE_INT, resolution.width(),
"height", G_TYPE_INT, resolution.height(), "height", G_TYPE_INT, resolution.height(),
"framerate", GST_TYPE_FRACTION, rate.first, rate.second, NULL);
NULL), }
gst_structure_new("video/x-raw-rgb",
"width", G_TYPE_INT, resolution.width(), if (frameRate > 0.0) {
"height", G_TYPE_INT, resolution.height(), gint numerator;
"framerate", GST_TYPE_FRACTION, rate.first, rate.second, gint denominator;
NULL), gst_util_double_to_fraction(frameRate, &numerator, &denominator);
gst_structure_new("video/x-raw-data",
"width", G_TYPE_INT, resolution.width(), gst_caps_set_simple(
"height", G_TYPE_INT, resolution.height(), caps,
"framerate", GST_TYPE_FRACTION, rate.first, rate.second, "framerate", GST_TYPE_FRACTION, numerator, denominator,
NULL), NULL);
gst_structure_new("video/x-android-buffer",
"width", G_TYPE_INT, resolution.width(),
"height", G_TYPE_INT, resolution.height(),
"framerate", GST_TYPE_FRACTION, rate.first, rate.second,
NULL),
gst_structure_new("image/jpeg",
"width", G_TYPE_INT, resolution.width(),
"height", G_TYPE_INT, resolution.height(),
"framerate", GST_TYPE_FRACTION, rate.first, rate.second,
NULL),
NULL);
} else {
caps = gst_caps_new_full (gst_structure_new ("video/x-raw-yuv",
"width", G_TYPE_INT, resolution.width(),
"height", G_TYPE_INT, resolution.height(),
NULL),
gst_structure_new ("video/x-raw-rgb",
"width", G_TYPE_INT, resolution.width(),
"height", G_TYPE_INT, resolution.height(),
NULL),
gst_structure_new("video/x-raw-data",
"width", G_TYPE_INT, resolution.width(),
"height", G_TYPE_INT, resolution.height(),
NULL),
gst_structure_new ("video/x-android-buffer",
"width", G_TYPE_INT, resolution.width(),
"height", G_TYPE_INT, resolution.height(),
NULL),
gst_structure_new ("image/jpeg",
"width", G_TYPE_INT, resolution.width(),
"height", G_TYPE_INT, resolution.height(),
NULL),
NULL);
} }
return caps; return caps;
@@ -314,40 +289,40 @@ static GstCaps *resolutionToCaps(const QSize &resolution, const QPair<int, int>
void CameraBinSession::setupCaptureResolution() void CameraBinSession::setupCaptureResolution()
{ {
QSize resolution = m_imageEncodeControl->imageSettings().resolution(); QSize resolution = m_imageEncodeControl->imageSettings().resolution();
if (!resolution.isEmpty()) { {
GstCaps *caps = resolutionToCaps(resolution); GstCaps *caps = resolutionToCaps(resolution);
#if CAMERABIN_DEBUG #if CAMERABIN_DEBUG
qDebug() << Q_FUNC_INFO << "set image resolution" << resolution << gst_caps_to_string(caps); qDebug() << Q_FUNC_INFO << "set image resolution" << resolution << caps;
#endif #endif
g_object_set(m_camerabin, IMAGE_CAPTURE_CAPS_PROPERTY, caps, NULL); g_object_set(m_camerabin, IMAGE_CAPTURE_CAPS_PROPERTY, caps, NULL);
gst_caps_unref(caps); if (caps)
} else { gst_caps_unref(caps);
g_object_set(m_camerabin, IMAGE_CAPTURE_CAPS_PROPERTY, NULL, NULL);
} }
const QSize viewfinderResolution = m_viewfinderSettingsControl->resolution();
resolution = m_videoEncodeControl->actualVideoSettings().resolution(); resolution = m_videoEncodeControl->actualVideoSettings().resolution();
//qreal framerate = m_videoEncodeControl->videoSettings().frameRate(); qreal framerate = m_videoEncodeControl->videoSettings().frameRate();
if (!resolution.isEmpty()) { {
GstCaps *caps = resolutionToCaps(resolution /*, framerate*/); //convert to rational GstCaps *caps = resolutionToCaps(
!resolution.isEmpty() ? resolution : viewfinderResolution, framerate);
#if CAMERABIN_DEBUG #if CAMERABIN_DEBUG
qDebug() << Q_FUNC_INFO << "set video resolution" << resolution << gst_caps_to_string(caps); qDebug() << Q_FUNC_INFO << "set video resolution" << resolution << caps;
#endif #endif
g_object_set(m_camerabin, VIDEO_CAPTURE_CAPS_PROPERTY, caps, NULL); g_object_set(m_camerabin, VIDEO_CAPTURE_CAPS_PROPERTY, caps, NULL);
gst_caps_unref(caps); if (caps)
} else { gst_caps_unref(caps);
g_object_set(m_camerabin, VIDEO_CAPTURE_CAPS_PROPERTY, NULL, NULL);
} }
resolution = m_viewfinderSettingsControl->resolution(); if (!viewfinderResolution.isEmpty())
if (!resolution.isEmpty()) { resolution = viewfinderResolution;
{
GstCaps *caps = resolutionToCaps(resolution); GstCaps *caps = resolutionToCaps(resolution);
#if CAMERABIN_DEBUG #if CAMERABIN_DEBUG
qDebug() << Q_FUNC_INFO << "set viewfinder resolution" << resolution << gst_caps_to_string(caps); qDebug() << Q_FUNC_INFO << "set viewfinder resolution" << resolution << caps;
#endif #endif
g_object_set(m_camerabin, VIEWFINDER_CAPS_PROPERTY, caps, NULL); g_object_set(m_camerabin, VIEWFINDER_CAPS_PROPERTY, caps, NULL);
gst_caps_unref(caps); if (caps)
} else { gst_caps_unref(caps);
g_object_set(m_camerabin, VIEWFINDER_CAPS_PROPERTY, NULL, NULL);
} }
if (m_videoEncoder) if (m_videoEncoder)
@@ -363,13 +338,17 @@ void CameraBinSession::setAudioCaptureCaps()
if (sampleRate == -1 && channelCount == -1) if (sampleRate == -1 && channelCount == -1)
return; return;
#if GST_CHECK_VERSION(1,0,0)
GstStructure *structure = gst_structure_new_empty(QT_GSTREAMER_RAW_AUDIO_MIME);
#else
GstStructure *structure = gst_structure_new( GstStructure *structure = gst_structure_new(
"audio/x-raw-int", QT_GSTREAMER_RAW_AUDIO_MIME,
"endianness", G_TYPE_INT, 1234, "endianness", G_TYPE_INT, 1234,
"signed", G_TYPE_BOOLEAN, TRUE, "signed", G_TYPE_BOOLEAN, TRUE,
"width", G_TYPE_INT, 16, "width", G_TYPE_INT, 16,
"depth", G_TYPE_INT, 16, "depth", G_TYPE_INT, 16,
NULL); NULL);
#endif
if (sampleRate != -1) if (sampleRate != -1)
gst_structure_set(structure, "rate", G_TYPE_INT, sampleRate, NULL); gst_structure_set(structure, "rate", G_TYPE_INT, sampleRate, NULL);
if (channelCount != -1) if (channelCount != -1)
@@ -760,7 +739,7 @@ qint64 CameraBinSession::duration() const
if (fileSink) { if (fileSink) {
GstFormat format = GST_FORMAT_TIME; GstFormat format = GST_FORMAT_TIME;
gint64 duration = 0; gint64 duration = 0;
bool ret = gst_element_query_position(fileSink, &format, &duration); bool ret = qt_gst_element_query_position(fileSink, format, &duration);
gst_object_unref(GST_OBJECT(fileSink)); gst_object_unref(GST_OBJECT(fileSink));
if (ret) if (ret)
return duration / 1000000; return duration / 1000000;
@@ -795,129 +774,57 @@ void CameraBinSession::setMetaData(const QMap<QByteArray, QVariant> &data)
{ {
m_metaData = data; m_metaData = data;
if (m_camerabin) { if (m_camerabin)
GstIterator *elements = gst_bin_iterate_all_by_interface(GST_BIN(m_camerabin), GST_TYPE_TAG_SETTER); QGstUtils::setMetaData(m_camerabin, data);
GstElement *element = 0;
while (gst_iterator_next(elements, (void**)&element) == GST_ITERATOR_OK) {
gst_tag_setter_reset_tags(GST_TAG_SETTER(element));
QMapIterator<QByteArray, QVariant> it(data);
while (it.hasNext()) {
it.next();
const QString tagName = it.key();
const QVariant tagValue = it.value();
switch(tagValue.type()) {
case QVariant::String:
gst_tag_setter_add_tags(GST_TAG_SETTER(element),
GST_TAG_MERGE_REPLACE,
tagName.toUtf8().constData(),
tagValue.toString().toUtf8().constData(),
NULL);
break;
case QVariant::Int:
case QVariant::LongLong:
gst_tag_setter_add_tags(GST_TAG_SETTER(element),
GST_TAG_MERGE_REPLACE,
tagName.toUtf8().constData(),
tagValue.toInt(),
NULL);
break;
case QVariant::Double:
gst_tag_setter_add_tags(GST_TAG_SETTER(element),
GST_TAG_MERGE_REPLACE,
tagName.toUtf8().constData(),
tagValue.toDouble(),
NULL);
break;
case QVariant::DateTime: {
QDateTime date = tagValue.toDateTime().toLocalTime();
gst_tag_setter_add_tags(GST_TAG_SETTER(element),
GST_TAG_MERGE_REPLACE,
tagName.toUtf8().constData(),
gst_date_time_new_local_time(
date.date().year(), date.date().month(), date.date().day(),
date.time().hour(), date.time().minute(), date.time().second()),
NULL);
break;
}
default:
break;
}
}
}
gst_iterator_free(elements);
}
} }
bool CameraBinSession::processSyncMessage(const QGstreamerMessage &message) bool CameraBinSession::processSyncMessage(const QGstreamerMessage &message)
{ {
GstMessage* gm = message.rawMessage(); GstMessage* gm = message.rawMessage();
const GstStructure *st;
const GValue *image;
GstBuffer *buffer = NULL;
if (gm && GST_MESSAGE_TYPE(gm) == GST_MESSAGE_ELEMENT) { if (gm && GST_MESSAGE_TYPE(gm) == GST_MESSAGE_ELEMENT) {
if (m_captureMode == QCamera::CaptureStillImage && const GstStructure *st = gst_message_get_structure(gm);
gst_structure_has_name(gm->structure, "preview-image")) { const GValue *sampleValue = 0;
st = gst_message_get_structure(gm); if (m_captureMode == QCamera::CaptureStillImage
&& gst_structure_has_name(st, "preview-image")
if (gst_structure_has_field_typed(st, "buffer", GST_TYPE_BUFFER)) { #if GST_CHECK_VERSION(1,0,0)
image = gst_structure_get_value(st, "buffer"); && gst_structure_has_field_typed(st, "sample", GST_TYPE_SAMPLE)
if (image) { && (sampleValue = gst_structure_get_value(st, "sample"))) {
buffer = gst_value_get_buffer(image); GstSample * const sample = gst_value_get_sample(sampleValue);
GstCaps * const previewCaps = gst_sample_get_caps(sample);
QImage img; GstBuffer * const buffer = gst_sample_get_buffer(sample);
#else
GstCaps *caps = gst_buffer_get_caps(buffer); && gst_structure_has_field_typed(st, "buffer", GST_TYPE_BUFFER)
if (caps) { && (sampleValue = gst_structure_get_value(st, "buffer"))) {
GstStructure *structure = gst_caps_get_structure(caps, 0); GstBuffer * const buffer = gst_value_get_buffer(sampleValue);
gint width = 0;
gint height = 0;
#if CAMERABIN_DEBUG
qDebug() << "Preview caps:" << gst_structure_to_string(structure);
#endif #endif
if (structure && QImage image;
gst_structure_get_int(structure, "width", &width) && #if GST_CHECK_VERSION(1,0,0)
gst_structure_get_int(structure, "height", &height) && GstVideoInfo previewInfo;
width > 0 && height > 0) { if (gst_video_info_from_caps(&previewInfo, previewCaps))
if (qstrcmp(gst_structure_get_name(structure), "video/x-raw-rgb") == 0) { image = QGstUtils::bufferToImage(buffer, previewInfo);
QImage::Format format = QImage::Format_Invalid; gst_sample_unref(sample);
int bpp = 0; #else
gst_structure_get_int(structure, "bpp", &bpp); image = QGstUtils::bufferToImage(buffer);
gst_buffer_unref(buffer);
#endif
if (!image.isNull()) {
static QMetaMethod exposedSignal = QMetaMethod::fromSignal(&CameraBinSession::imageExposed);
exposedSignal.invoke(this,
Qt::QueuedConnection,
Q_ARG(int,m_requestId));
if (bpp == 24) static QMetaMethod capturedSignal = QMetaMethod::fromSignal(&CameraBinSession::imageCaptured);
format = QImage::Format_RGB888; capturedSignal.invoke(this,
else if (bpp == 32) Qt::QueuedConnection,
format = QImage::Format_RGB32; Q_ARG(int,m_requestId),
Q_ARG(QImage,image));
if (format != QImage::Format_Invalid) {
img = QImage((const uchar *)buffer->data, width, height, format);
img.bits(); //detach
}
}
}
gst_caps_unref(caps);
static QMetaMethod exposedSignal = QMetaMethod::fromSignal(&CameraBinSession::imageExposed);
exposedSignal.invoke(this,
Qt::QueuedConnection,
Q_ARG(int,m_requestId));
static QMetaMethod capturedSignal = QMetaMethod::fromSignal(&CameraBinSession::imageCaptured);
capturedSignal.invoke(this,
Qt::QueuedConnection,
Q_ARG(int,m_requestId),
Q_ARG(QImage,img));
}
}
return true;
} }
return true;
} }
#ifdef HAVE_GST_PHOTOGRAPHY #ifdef HAVE_GST_PHOTOGRAPHY
if (gst_structure_has_name(gm->structure, GST_PHOTOGRAPHY_AUTOFOCUS_DONE)) if (gst_structure_has_name(st, GST_PHOTOGRAPHY_AUTOFOCUS_DONE))
m_cameraFocusControl->handleFocusMessage(gm); m_cameraFocusControl->handleFocusMessage(gm);
#endif #endif
} }
@@ -1109,20 +1016,12 @@ QList< QPair<int,int> > CameraBinSession::supportedFrameRates(const QSize &frame
if (frameSize.isEmpty()) { if (frameSize.isEmpty()) {
caps = gst_caps_copy(supportedCaps); caps = gst_caps_copy(supportedCaps);
} else { } else {
GstCaps *filter = gst_caps_new_full( GstCaps *filter = QGstUtils::videoFilterCaps();
gst_structure_new( gst_caps_set_simple(
"video/x-raw-rgb", filter,
"width" , G_TYPE_INT , frameSize.width(), "width", G_TYPE_INT, frameSize.width(),
"height" , G_TYPE_INT, frameSize.height(), NULL), "height", G_TYPE_INT, frameSize.height(),
gst_structure_new( NULL);
"video/x-raw-yuv",
"width" , G_TYPE_INT, frameSize.width(),
"height" , G_TYPE_INT, frameSize.height(), NULL),
gst_structure_new(
"image/jpeg",
"width" , G_TYPE_INT, frameSize.width(),
"height" , G_TYPE_INT, frameSize.height(), NULL),
NULL);
caps = gst_caps_intersect(supportedCaps, filter); caps = gst_caps_intersect(supportedCaps, filter);
gst_caps_unref(filter); gst_caps_unref(filter);
@@ -1133,7 +1032,7 @@ QList< QPair<int,int> > CameraBinSession::supportedFrameRates(const QSize &frame
caps = gst_caps_make_writable(caps); caps = gst_caps_make_writable(caps);
for (uint i=0; i<gst_caps_get_size(caps); i++) { for (uint i=0; i<gst_caps_get_size(caps); i++) {
GstStructure *structure = gst_caps_get_structure(caps, i); GstStructure *structure = gst_caps_get_structure(caps, i);
gst_structure_set_name(structure, "video/x-raw-yuv"); gst_structure_set_name(structure, "video/x-raw");
const GValue *oldRate = gst_structure_get_value(structure, "framerate"); const GValue *oldRate = gst_structure_get_value(structure, "framerate");
GValue rate; GValue rate;
memset(&rate, 0, sizeof(rate)); memset(&rate, 0, sizeof(rate));
@@ -1142,8 +1041,11 @@ QList< QPair<int,int> > CameraBinSession::supportedFrameRates(const QSize &frame
gst_structure_remove_all_fields(structure); gst_structure_remove_all_fields(structure);
gst_structure_set_value(structure, "framerate", &rate); gst_structure_set_value(structure, "framerate", &rate);
} }
#if GST_CHECK_VERSION(1,0,0)
caps = gst_caps_simplify(caps);
#else
gst_caps_do_simplify(caps); gst_caps_do_simplify(caps);
#endif
for (uint i=0; i<gst_caps_get_size(caps); i++) { for (uint i=0; i<gst_caps_get_size(caps); i++) {
GstStructure *structure = gst_caps_get_structure(caps, i); GstStructure *structure = gst_caps_get_structure(caps, i);
@@ -1154,7 +1056,7 @@ QList< QPair<int,int> > CameraBinSession::supportedFrameRates(const QSize &frame
qSort(res.begin(), res.end(), rateLessThan); qSort(res.begin(), res.end(), rateLessThan);
#if CAMERABIN_DEBUG #if CAMERABIN_DEBUG
qDebug() << "Supported rates:" << gst_caps_to_string(caps); qDebug() << "Supported rates:" << caps;
qDebug() << res; qDebug() << res;
#endif #endif
@@ -1213,31 +1115,24 @@ QList<QSize> CameraBinSession::supportedResolutions(QPair<int,int> rate,
SUPPORTED_IMAGE_CAPTURE_CAPS_PROPERTY : SUPPORTED_VIDEO_CAPTURE_CAPS_PROPERTY, SUPPORTED_IMAGE_CAPTURE_CAPS_PROPERTY : SUPPORTED_VIDEO_CAPTURE_CAPS_PROPERTY,
&supportedCaps, NULL); &supportedCaps, NULL);
#if CAMERABIN_DEBUG
qDebug() << "Source caps:" << supportedCaps;
#endif
if (!supportedCaps) if (!supportedCaps)
return res; return res;
#if CAMERABIN_DEBUG
qDebug() << "Source caps:" << gst_caps_to_string(supportedCaps);
#endif
GstCaps *caps = 0; GstCaps *caps = 0;
bool isContinuous = false; bool isContinuous = false;
if (rate.first <= 0 || rate.second <= 0) { if (rate.first <= 0 || rate.second <= 0) {
caps = gst_caps_copy(supportedCaps); caps = gst_caps_copy(supportedCaps);
} else { } else {
GstCaps *filter = gst_caps_new_full( GstCaps *filter = QGstUtils::videoFilterCaps();
gst_structure_new( gst_caps_set_simple(
"video/x-raw-rgb", filter,
"framerate" , GST_TYPE_FRACTION , rate.first, rate.second, NULL), "framerate" , GST_TYPE_FRACTION , rate.first, rate.second,
gst_structure_new( NULL);
"video/x-raw-yuv",
"framerate" , GST_TYPE_FRACTION , rate.first, rate.second, NULL),
gst_structure_new(
"image/jpeg",
"framerate" , GST_TYPE_FRACTION , rate.first, rate.second, NULL),
NULL);
caps = gst_caps_intersect(supportedCaps, filter); caps = gst_caps_intersect(supportedCaps, filter);
gst_caps_unref(filter); gst_caps_unref(filter);
} }
@@ -1247,7 +1142,7 @@ QList<QSize> CameraBinSession::supportedResolutions(QPair<int,int> rate,
caps = gst_caps_make_writable(caps); caps = gst_caps_make_writable(caps);
for (uint i=0; i<gst_caps_get_size(caps); i++) { for (uint i=0; i<gst_caps_get_size(caps); i++) {
GstStructure *structure = gst_caps_get_structure(caps, i); GstStructure *structure = gst_caps_get_structure(caps, i);
gst_structure_set_name(structure, "video/x-raw-yuv"); gst_structure_set_name(structure, "video/x-raw");
const GValue *oldW = gst_structure_get_value(structure, "width"); const GValue *oldW = gst_structure_get_value(structure, "width");
const GValue *oldH = gst_structure_get_value(structure, "height"); const GValue *oldH = gst_structure_get_value(structure, "height");
GValue w; GValue w;
@@ -1262,7 +1157,13 @@ QList<QSize> CameraBinSession::supportedResolutions(QPair<int,int> rate,
gst_structure_set_value(structure, "width", &w); gst_structure_set_value(structure, "width", &w);
gst_structure_set_value(structure, "height", &h); gst_structure_set_value(structure, "height", &h);
} }
#if GST_CHECK_VERSION(1,0,0)
caps = gst_caps_simplify(caps);
#else
gst_caps_do_simplify(caps); gst_caps_do_simplify(caps);
#endif
for (uint i=0; i<gst_caps_get_size(caps); i++) { for (uint i=0; i<gst_caps_get_size(caps); i++) {
GstStructure *structure = gst_caps_get_structure(caps, i); GstStructure *structure = gst_caps_get_structure(caps, i);

View File

@@ -12,14 +12,18 @@ LIBS += -lqgsttools_p
CONFIG += link_pkgconfig CONFIG += link_pkgconfig
PKGCONFIG += \ PKGCONFIG += \
gstreamer-0.10 \ gstreamer-$$GST_VERSION \
gstreamer-base-0.10 \ gstreamer-base-$$GST_VERSION \
gstreamer-interfaces-0.10 \ gstreamer-audio-$$GST_VERSION \
gstreamer-audio-0.10 \ gstreamer-video-$$GST_VERSION \
gstreamer-video-0.10 \ gstreamer-pbutils-$$GST_VERSION
gstreamer-pbutils-0.10
maemo*:PKGCONFIG +=gstreamer-plugins-bad-$$GST_VERSION
mir: {
DEFINES += HAVE_MIR
}
maemo*:PKGCONFIG +=gstreamer-plugins-bad-0.10
config_resourcepolicy { config_resourcepolicy {
DEFINES += HAVE_RESOURCE_POLICY DEFINES += HAVE_RESOURCE_POLICY
@@ -27,8 +31,8 @@ config_resourcepolicy {
} }
config_gstreamer_appsrc { config_gstreamer_appsrc {
PKGCONFIG += gstreamer-app-0.10 PKGCONFIG += gstreamer-app-$$GST_VERSION
DEFINES += HAVE_GST_APPSRC DEFINES += HAVE_GST_APPSRC
LIBS += -lgstapp-0.10 LIBS += -lgstapp-$$GST_VERSION
} }

View File

@@ -2,8 +2,8 @@ TEMPLATE = subdirs
SUBDIRS += \ SUBDIRS += \
audiodecoder \ audiodecoder \
mediacapture \ mediaplayer \
mediaplayer mediacapture
config_gstreamer_encodingprofiles { config_gstreamer_encodingprofiles {
SUBDIRS += camerabin SUBDIRS += camerabin

View File

@@ -1,4 +1,4 @@
{ {
"Keys": ["gstreamermediacapture"] "Keys": ["gstreamermediacapture"],
"Services": ["org.qt-project.qt.audiosource", "org.qt-project.qt.camera"] "Services": ["org.qt-project.qt.audiosource", "org.qt-project.qt.camera"]
} }

View File

@@ -34,6 +34,7 @@
#include "qgstreameraudioencode.h" #include "qgstreameraudioencode.h"
#include "qgstreamercapturesession.h" #include "qgstreamercapturesession.h"
#include "qgstreamermediacontainercontrol.h" #include "qgstreamermediacontainercontrol.h"
#include <private/qgstutils_p.h>
#include <QtCore/qdebug.h> #include <QtCore/qdebug.h>
@@ -175,7 +176,7 @@ GstElement *QGstreamerAudioEncode::createEncoder()
if (m_audioSettings.sampleRate() > 0 || m_audioSettings.channelCount() > 0) { if (m_audioSettings.sampleRate() > 0 || m_audioSettings.channelCount() > 0) {
GstCaps *caps = gst_caps_new_empty(); GstCaps *caps = gst_caps_new_empty();
GstStructure *structure = gst_structure_new("audio/x-raw-int", NULL); GstStructure *structure = qt_gst_structure_new_empty(QT_GSTREAMER_RAW_AUDIO_MIME);
if (m_audioSettings.sampleRate() > 0) if (m_audioSettings.sampleRate() > 0)
gst_structure_set(structure, "rate", G_TYPE_INT, m_audioSettings.sampleRate(), NULL ); gst_structure_set(structure, "rate", G_TYPE_INT, m_audioSettings.sampleRate(), NULL );

View File

@@ -62,27 +62,25 @@
QT_BEGIN_NAMESPACE QT_BEGIN_NAMESPACE
QGstreamerCaptureService::QGstreamerCaptureService(const QString &service, QObject *parent): QGstreamerCaptureService::QGstreamerCaptureService(const QString &service, QObject *parent)
QMediaService(parent) : QMediaService(parent)
{ , m_captureSession(0)
m_captureSession = 0; , m_cameraControl(0)
m_cameraControl = 0;
m_metaDataControl = 0;
#if defined(USE_GSTREAMER_CAMERA) #if defined(USE_GSTREAMER_CAMERA)
m_videoInput = 0; , m_videoInput(0)
#endif #endif
m_audioInputSelector = 0; , m_metaDataControl(0)
m_videoInputDevice = 0; , m_audioInputSelector(0)
, m_videoInputDevice(0)
m_videoOutput = 0; , m_videoOutput(0)
m_videoRenderer = 0; , m_videoRenderer(0)
m_videoWindow = 0; , m_videoWindow(0)
#if defined(HAVE_WIDGETS) #if defined(HAVE_WIDGETS)
m_videoWidgetControl = 0; , m_videoWidgetControl(0)
#endif #endif
m_imageCaptureControl = 0; , m_imageCaptureControl(0)
, m_audioProbeControl(0)
{
if (service == Q_MEDIASERVICE_AUDIOSOURCE) { if (service == Q_MEDIASERVICE_AUDIOSOURCE) {
m_captureSession = new QGstreamerCaptureSession(QGstreamerCaptureSession::Audio, this); m_captureSession = new QGstreamerCaptureSession(QGstreamerCaptureSession::Audio, this);
} }
@@ -163,12 +161,12 @@ QMediaControl *QGstreamerCaptureService::requestControl(const char *name)
return m_imageCaptureControl; return m_imageCaptureControl;
if (qstrcmp(name,QMediaAudioProbeControl_iid) == 0) { if (qstrcmp(name,QMediaAudioProbeControl_iid) == 0) {
if (m_captureSession) { if (!m_audioProbeControl) {
QGstreamerAudioProbeControl *probe = new QGstreamerAudioProbeControl(this); m_audioProbeControl = new QGstreamerAudioProbeControl(this);
m_captureSession->addProbe(probe); m_captureSession->addProbe(m_audioProbeControl);
return probe;
} }
return 0; m_audioProbeControl->ref.ref();
return m_audioProbeControl;
} }
if (!m_videoOutput) { if (!m_videoOutput) {
@@ -194,17 +192,15 @@ QMediaControl *QGstreamerCaptureService::requestControl(const char *name)
void QGstreamerCaptureService::releaseControl(QMediaControl *control) void QGstreamerCaptureService::releaseControl(QMediaControl *control)
{ {
if (control && control == m_videoOutput) { if (!control) {
return;
} else if (control == m_videoOutput) {
m_videoOutput = 0; m_videoOutput = 0;
m_captureSession->setVideoPreview(0); m_captureSession->setVideoPreview(0);
} } else if (control == m_audioProbeControl && !m_audioProbeControl->ref.deref()) {
m_captureSession->removeProbe(m_audioProbeControl);
QGstreamerAudioProbeControl* audioProbe = qobject_cast<QGstreamerAudioProbeControl*>(control); delete m_audioProbeControl;
if (audioProbe) { m_audioProbeControl = 0;
if (m_captureSession)
m_captureSession->removeProbe(audioProbe);
delete audioProbe;
return;
} }
} }

View File

@@ -43,6 +43,7 @@ QT_BEGIN_NAMESPACE
class QAudioInputSelectorControl; class QAudioInputSelectorControl;
class QVideoDeviceSelectorControl; class QVideoDeviceSelectorControl;
class QGstreamerAudioProbeControl;
class QGstreamerCaptureSession; class QGstreamerCaptureSession;
class QGstreamerCameraControl; class QGstreamerCameraControl;
class QGstreamerMessage; class QGstreamerMessage;
@@ -86,6 +87,8 @@ private:
QMediaControl *m_videoWidgetControl; QMediaControl *m_videoWidgetControl;
#endif #endif
QGstreamerImageCaptureControl *m_imageCaptureControl; QGstreamerImageCaptureControl *m_imageCaptureControl;
QGstreamerAudioProbeControl *m_audioProbeControl;
}; };
QT_END_NAMESPACE QT_END_NAMESPACE

View File

@@ -110,90 +110,16 @@ QMultimedia::SupportEstimate QGstreamerCaptureServicePlugin::hasSupport(const QS
return QGstUtils::hasSupport(mimeType, codecs, m_supportedMimeTypeSet); return QGstUtils::hasSupport(mimeType, codecs, m_supportedMimeTypeSet);
} }
static bool isEncoderOrMuxer(GstElementFactory *factory)
{
return gst_element_factory_list_is_type(factory, GST_ELEMENT_FACTORY_TYPE_MUXER)
|| gst_element_factory_list_is_type(factory, GST_ELEMENT_FACTORY_TYPE_ENCODER);
}
void QGstreamerCaptureServicePlugin::updateSupportedMimeTypes() const void QGstreamerCaptureServicePlugin::updateSupportedMimeTypes() const
{ {
//enumerate supported mime types m_supportedMimeTypeSet = QGstUtils::supportedMimeTypes(isEncoderOrMuxer);
gst_init(NULL, NULL);
GList *plugins, *orig_plugins;
orig_plugins = plugins = gst_default_registry_get_plugin_list ();
while (plugins) {
GList *features, *orig_features;
GstPlugin *plugin = (GstPlugin *) (plugins->data);
plugins = g_list_next (plugins);
if (plugin->flags & (1<<1)) //GST_PLUGIN_FLAG_BLACKLISTED
continue;
orig_features = features = gst_registry_get_feature_list_by_plugin(gst_registry_get_default (),
plugin->desc.name);
while (features) {
if (!G_UNLIKELY(features->data == NULL)) {
GstPluginFeature *feature = GST_PLUGIN_FEATURE(features->data);
if (GST_IS_ELEMENT_FACTORY (feature)) {
GstElementFactory *factory = GST_ELEMENT_FACTORY(gst_plugin_feature_load(feature));
if (factory
&& factory->numpadtemplates > 0
&& (qstrcmp(factory->details.klass, "Codec/Decoder/Audio") == 0
|| qstrcmp(factory->details.klass, "Codec/Decoder/Video") == 0
|| qstrcmp(factory->details.klass, "Codec/Demux") == 0 )) {
const GList *pads = factory->staticpadtemplates;
while (pads) {
GstStaticPadTemplate *padtemplate = (GstStaticPadTemplate*)(pads->data);
pads = g_list_next (pads);
if (padtemplate->direction != GST_PAD_SINK)
continue;
if (padtemplate->static_caps.string) {
GstCaps *caps = gst_static_caps_get(&padtemplate->static_caps);
if (!gst_caps_is_any (caps) && ! gst_caps_is_empty (caps)) {
for (guint i = 0; i < gst_caps_get_size(caps); i++) {
GstStructure *structure = gst_caps_get_structure(caps, i);
QString nameLowcase = QString(gst_structure_get_name (structure)).toLower();
m_supportedMimeTypeSet.insert(nameLowcase);
if (nameLowcase.contains("mpeg")) {
//Because mpeg version number is only included in the detail
//description, it is necessary to manually extract this information
//in order to match the mime type of mpeg4.
const GValue *value = gst_structure_get_value(structure, "mpegversion");
if (value) {
gchar *str = gst_value_serialize (value);
QString versions(str);
QStringList elements = versions.split(QRegExp("\\D+"), QString::SkipEmptyParts);
foreach (const QString &e, elements)
m_supportedMimeTypeSet.insert(nameLowcase + e);
g_free (str);
}
}
}
}
gst_caps_unref(caps);
}
}
gst_object_unref (factory);
}
} else if (GST_IS_TYPE_FIND_FACTORY(feature)) {
QString name(gst_plugin_feature_get_name(feature));
if (name.contains('/')) //filter out any string without '/' which is obviously not a mime type
m_supportedMimeTypeSet.insert(name.toLower());
}
}
features = g_list_next (features);
}
gst_plugin_feature_list_free (orig_features);
}
gst_plugin_list_free (orig_plugins);
#if defined QT_SUPPORTEDMIMETYPES_DEBUG
QStringList list = m_supportedMimeTypeSet.toList();
list.sort();
if (qgetenv("QT_DEBUG_PLUGINS").toInt() > 0) {
foreach (const QString &type, list)
qDebug() << type;
}
#endif
} }
QStringList QGstreamerCaptureServicePlugin::supportedMimeTypes() const QStringList QGstreamerCaptureServicePlugin::supportedMimeTypes() const

View File

@@ -45,6 +45,7 @@
#include <gst/gsttagsetter.h> #include <gst/gsttagsetter.h>
#include <gst/gstversion.h> #include <gst/gstversion.h>
#include <gst/video/video.h>
#include <QtCore/qdebug.h> #include <QtCore/qdebug.h>
#include <QtCore/qurl.h> #include <QtCore/qurl.h>
@@ -52,7 +53,6 @@
#include <QCoreApplication> #include <QCoreApplication>
#include <QtCore/qmetaobject.h> #include <QtCore/qmetaobject.h>
#include <QtCore/qfile.h> #include <QtCore/qfile.h>
#include <QtGui/qimage.h> #include <QtGui/qimage.h>
QT_BEGIN_NAMESPACE QT_BEGIN_NAMESPACE
@@ -64,7 +64,7 @@ QGstreamerCaptureSession::QGstreamerCaptureSession(QGstreamerCaptureSession::Cap
m_waitingForEos(false), m_waitingForEos(false),
m_pipelineMode(EmptyPipeline), m_pipelineMode(EmptyPipeline),
m_captureMode(captureMode), m_captureMode(captureMode),
m_audioBufferProbeId(-1), m_audioProbe(0),
m_audioInputFactory(0), m_audioInputFactory(0),
m_audioPreviewFactory(0), m_audioPreviewFactory(0),
m_videoInputFactory(0), m_videoInputFactory(0),
@@ -169,7 +169,7 @@ GstElement *QGstreamerCaptureSession::buildEncodeBin()
if (m_captureMode & Video) { if (m_captureMode & Video) {
GstElement *videoQueue = gst_element_factory_make("queue", "video-encode-queue"); GstElement *videoQueue = gst_element_factory_make("queue", "video-encode-queue");
GstElement *colorspace = gst_element_factory_make("ffmpegcolorspace", "ffmpegcolorspace-encoder"); GstElement *colorspace = gst_element_factory_make(QT_GSTREAMER_COLORCONVERSION_ELEMENT_NAME, "videoconvert-encoder");
GstElement *videoscale = gst_element_factory_make("videoscale","videoscale-encoder"); GstElement *videoscale = gst_element_factory_make("videoscale","videoscale-encoder");
gst_bin_add_many(GST_BIN(encodeBin), videoQueue, colorspace, videoscale, NULL); gst_bin_add_many(GST_BIN(encodeBin), videoQueue, colorspace, videoscale, NULL);
@@ -280,7 +280,7 @@ GstElement *QGstreamerCaptureSession::buildVideoPreview()
if (m_viewfinderInterface) { if (m_viewfinderInterface) {
GstElement *bin = gst_bin_new("video-preview-bin"); GstElement *bin = gst_bin_new("video-preview-bin");
GstElement *colorspace = gst_element_factory_make("ffmpegcolorspace", "ffmpegcolorspace-preview"); GstElement *colorspace = gst_element_factory_make(QT_GSTREAMER_COLORCONVERSION_ELEMENT_NAME, "videoconvert-preview");
GstElement *capsFilter = gst_element_factory_make("capsfilter", "capsfilter-video-preview"); GstElement *capsFilter = gst_element_factory_make("capsfilter", "capsfilter-video-preview");
GstElement *preview = m_viewfinderInterface->videoSink(); GstElement *preview = m_viewfinderInterface->videoSink();
@@ -299,36 +299,25 @@ GstElement *QGstreamerCaptureSession::buildVideoPreview()
resolution = m_imageEncodeControl->imageSettings().resolution(); resolution = m_imageEncodeControl->imageSettings().resolution();
} }
if (!resolution.isEmpty() || frameRate > 0.001) { GstCaps *caps = QGstUtils::videoFilterCaps();
GstCaps *caps = gst_caps_new_empty();
QStringList structureTypes;
structureTypes << "video/x-raw-yuv" << "video/x-raw-rgb";
foreach(const QString &structureType, structureTypes) { if (!resolution.isEmpty()) {
GstStructure *structure = gst_structure_new(structureType.toLatin1().constData(), NULL); gst_caps_set_simple(caps, "width", G_TYPE_INT, resolution.width(), NULL);
gst_caps_set_simple(caps, "height", G_TYPE_INT, resolution.height(), NULL);
if (!resolution.isEmpty()) {
gst_structure_set(structure, "width", G_TYPE_INT, resolution.width(), NULL);
gst_structure_set(structure, "height", G_TYPE_INT, resolution.height(), NULL);
}
if (frameRate > 0.001) {
QPair<int,int> rate = m_videoEncodeControl->rateAsRational();
//qDebug() << "frame rate:" << num << denum;
gst_structure_set(structure, "framerate", GST_TYPE_FRACTION, rate.first, rate.second, NULL);
}
gst_caps_append_structure(caps,structure);
}
//qDebug() << "set video preview caps filter:" << gst_caps_to_string(caps);
g_object_set(G_OBJECT(capsFilter), "caps", caps, NULL);
gst_caps_unref(caps);
} }
if (frameRate > 0.001) {
QPair<int,int> rate = m_videoEncodeControl->rateAsRational();
//qDebug() << "frame rate:" << num << denum;
gst_caps_set_simple(caps, "framerate", GST_TYPE_FRACTION, rate.first, rate.second, NULL);
}
//qDebug() << "set video preview caps filter:" << gst_caps_to_string(caps);
g_object_set(G_OBJECT(capsFilter), "caps", caps, NULL);
gst_caps_unref(caps);
// add ghostpads // add ghostpads
GstPad *pad = gst_element_get_static_pad(colorspace, "sink"); GstPad *pad = gst_element_get_static_pad(colorspace, "sink");
@@ -342,7 +331,7 @@ GstElement *QGstreamerCaptureSession::buildVideoPreview()
previewElement = gst_element_factory_make("fakesink", "video-preview"); previewElement = gst_element_factory_make("fakesink", "video-preview");
#else #else
GstElement *bin = gst_bin_new("video-preview-bin"); GstElement *bin = gst_bin_new("video-preview-bin");
GstElement *colorspace = gst_element_factory_make("ffmpegcolorspace", "ffmpegcolorspace-preview"); GstElement *colorspace = gst_element_factory_make(QT_GSTREAMER_COLORCONVERSION_ELEMENT_NAME, "videoconvert-preview");
GstElement *preview = gst_element_factory_make("ximagesink", "video-preview"); GstElement *preview = gst_element_factory_make("ximagesink", "video-preview");
gst_bin_add_many(GST_BIN(bin), colorspace, preview, NULL); gst_bin_add_many(GST_BIN(bin), colorspace, preview, NULL);
gst_element_link(colorspace,preview); gst_element_link(colorspace,preview);
@@ -360,101 +349,49 @@ GstElement *QGstreamerCaptureSession::buildVideoPreview()
return previewElement; return previewElement;
} }
void QGstreamerCaptureSession::probeCaps(GstCaps *caps)
static gboolean passImageFilter(GstElement *element,
GstBuffer *buffer,
void *appdata)
{ {
Q_UNUSED(element); #if GST_CHECK_VERSION(1,0,0)
Q_UNUSED(buffer); gst_video_info_from_caps(&m_previewInfo, caps);
#else
Q_UNUSED(caps);
#endif
}
QGstreamerCaptureSession *session = (QGstreamerCaptureSession *)appdata; bool QGstreamerCaptureSession::probeBuffer(GstBuffer *buffer)
if (session->m_passImage || session->m_passPrerollImage) { {
session->m_passImage = false; if (m_passPrerollImage) {
m_passImage = false;
m_passPrerollImage = false;
if (session->m_passPrerollImage) { return true;
session->m_passPrerollImage = false; } else if (!m_passImage) {
return TRUE; return false;
}
session->m_passPrerollImage = false;
QImage img;
GstCaps *caps = gst_buffer_get_caps(buffer);
if (caps) {
GstStructure *structure = gst_caps_get_structure (caps, 0);
gint width = 0;
gint height = 0;
if (structure &&
gst_structure_get_int(structure, "width", &width) &&
gst_structure_get_int(structure, "height", &height) &&
width > 0 && height > 0) {
if (qstrcmp(gst_structure_get_name(structure), "video/x-raw-yuv") == 0) {
guint32 fourcc = 0;
gst_structure_get_fourcc(structure, "format", &fourcc);
if (fourcc == GST_MAKE_FOURCC('I','4','2','0')) {
img = QImage(width/2, height/2, QImage::Format_RGB32);
const uchar *data = (const uchar *)buffer->data;
for (int y=0; y<height; y+=2) {
const uchar *yLine = data + y*width;
const uchar *uLine = data + width*height + y*width/4;
const uchar *vLine = data + width*height*5/4 + y*width/4;
for (int x=0; x<width; x+=2) {
const qreal Y = 1.164*(yLine[x]-16);
const int U = uLine[x/2]-128;
const int V = vLine[x/2]-128;
int b = qBound(0, int(Y + 2.018*U), 255);
int g = qBound(0, int(Y - 0.813*V - 0.391*U), 255);
int r = qBound(0, int(Y + 1.596*V), 255);
img.setPixel(x/2,y/2,qRgb(r,g,b));
}
}
}
} else if (qstrcmp(gst_structure_get_name(structure), "video/x-raw-rgb") == 0) {
QImage::Format format = QImage::Format_Invalid;
int bpp = 0;
gst_structure_get_int(structure, "bpp", &bpp);
if (bpp == 24)
format = QImage::Format_RGB888;
else if (bpp == 32)
format = QImage::Format_RGB32;
if (format != QImage::Format_Invalid) {
img = QImage((const uchar *)buffer->data,
width,
height,
format);
img.bits(); //detach
}
}
}
gst_caps_unref(caps);
}
static QMetaMethod exposedSignal = QMetaMethod::fromSignal(&QGstreamerCaptureSession::imageExposed);
exposedSignal.invoke(session,
Qt::QueuedConnection,
Q_ARG(int,session->m_imageRequestId));
static QMetaMethod capturedSignal = QMetaMethod::fromSignal(&QGstreamerCaptureSession::imageCaptured);
capturedSignal.invoke(session,
Qt::QueuedConnection,
Q_ARG(int,session->m_imageRequestId),
Q_ARG(QImage,img));
return TRUE;
} else {
return FALSE;
} }
m_passImage = false;
#if GST_CHECK_VERSION(1,0,0)
QImage img = QGstUtils::bufferToImage(buffer, m_previewInfo);
#else
QImage img = QGstUtils::bufferToImage(buffer);
#endif
if (img.isNull())
return true;
static QMetaMethod exposedSignal = QMetaMethod::fromSignal(&QGstreamerCaptureSession::imageExposed);
exposedSignal.invoke(this,
Qt::QueuedConnection,
Q_ARG(int,m_imageRequestId));
static QMetaMethod capturedSignal = QMetaMethod::fromSignal(&QGstreamerCaptureSession::imageCaptured);
capturedSignal.invoke(this,
Qt::QueuedConnection,
Q_ARG(int,m_imageRequestId),
Q_ARG(QImage,img));
return true;
} }
static gboolean saveImageFilter(GstElement *element, static gboolean saveImageFilter(GstElement *element,
@@ -471,7 +408,15 @@ static gboolean saveImageFilter(GstElement *element,
if (!fileName.isEmpty()) { if (!fileName.isEmpty()) {
QFile f(fileName); QFile f(fileName);
if (f.open(QFile::WriteOnly)) { if (f.open(QFile::WriteOnly)) {
f.write((const char *)buffer->data, buffer->size); #if GST_CHECK_VERSION(1,0,0)
GstMapInfo info;
if (gst_buffer_map(buffer, &info, GST_MAP_READ)) {
f.write(reinterpret_cast<const char *>(info.data), info.size);
gst_buffer_unmap(buffer, &info);
}
#else
f.write(reinterpret_cast<const char *>(buffer->data), buffer->size);
#endif
f.close(); f.close();
static QMetaMethod savedSignal = QMetaMethod::fromSignal(&QGstreamerCaptureSession::imageSaved); static QMetaMethod savedSignal = QMetaMethod::fromSignal(&QGstreamerCaptureSession::imageSaved);
@@ -489,18 +434,19 @@ GstElement *QGstreamerCaptureSession::buildImageCapture()
{ {
GstElement *bin = gst_bin_new("image-capture-bin"); GstElement *bin = gst_bin_new("image-capture-bin");
GstElement *queue = gst_element_factory_make("queue", "queue-image-capture"); GstElement *queue = gst_element_factory_make("queue", "queue-image-capture");
GstElement *colorspace = gst_element_factory_make("ffmpegcolorspace", "ffmpegcolorspace-image-capture"); GstElement *colorspace = gst_element_factory_make(QT_GSTREAMER_COLORCONVERSION_ELEMENT_NAME, "videoconvert-image-capture");
GstElement *encoder = gst_element_factory_make("jpegenc", "image-encoder"); GstElement *encoder = gst_element_factory_make("jpegenc", "image-encoder");
GstElement *sink = gst_element_factory_make("fakesink","sink-image-capture"); GstElement *sink = gst_element_factory_make("fakesink","sink-image-capture");
GstPad *pad = gst_element_get_static_pad(queue, "src"); GstPad *pad = gst_element_get_static_pad(queue, "src");
Q_ASSERT(pad); Q_ASSERT(pad);
gst_pad_add_buffer_probe(pad, G_CALLBACK(passImageFilter), this);
addProbeToPad(pad, false);
gst_object_unref(GST_OBJECT(pad)); gst_object_unref(GST_OBJECT(pad));
g_object_set(G_OBJECT(sink), "signal-handoffs", TRUE, NULL); g_object_set(G_OBJECT(sink), "signal-handoffs", TRUE, NULL);
g_signal_connect(G_OBJECT(sink), "handoff", g_signal_connect(G_OBJECT(sink), "handoff", G_CALLBACK(saveImageFilter), this);
G_CALLBACK(saveImageFilter), this);
gst_bin_add_many(GST_BIN(bin), queue, colorspace, encoder, sink, NULL); gst_bin_add_many(GST_BIN(bin), queue, colorspace, encoder, sink, NULL);
gst_element_link_many(queue, colorspace, encoder, sink, NULL); gst_element_link_many(queue, colorspace, encoder, sink, NULL);
@@ -715,6 +661,8 @@ void QGstreamerCaptureSession::dumpGraph(const QString &fileName)
_gst_debug_bin_to_dot_file(GST_BIN(m_pipeline), _gst_debug_bin_to_dot_file(GST_BIN(m_pipeline),
GstDebugGraphDetails(/*GST_DEBUG_GRAPH_SHOW_ALL |*/ GST_DEBUG_GRAPH_SHOW_MEDIA_TYPE | GST_DEBUG_GRAPH_SHOW_NON_DEFAULT_PARAMS | GST_DEBUG_GRAPH_SHOW_STATES), GstDebugGraphDetails(/*GST_DEBUG_GRAPH_SHOW_ALL |*/ GST_DEBUG_GRAPH_SHOW_MEDIA_TYPE | GST_DEBUG_GRAPH_SHOW_NON_DEFAULT_PARAMS | GST_DEBUG_GRAPH_SHOW_STATES),
fileName.toLatin1()); fileName.toLatin1());
#else
Q_UNUSED(fileName);
#endif #endif
} }
@@ -877,10 +825,8 @@ void QGstreamerCaptureSession::setState(QGstreamerCaptureSession::State newState
qint64 QGstreamerCaptureSession::duration() const qint64 QGstreamerCaptureSession::duration() const
{ {
GstFormat format = GST_FORMAT_TIME; gint64 duration = 0;
gint64 duration = 0; if (m_encodeBin && qt_gst_element_query_position(m_encodeBin, GST_FORMAT_TIME, &duration))
if ( m_encodeBin && gst_element_query_position(m_encodeBin, &format, &duration))
return duration / 1000000; return duration / 1000000;
else else
return 0; return 0;
@@ -896,50 +842,8 @@ void QGstreamerCaptureSession::setMetaData(const QMap<QByteArray, QVariant> &dat
//qDebug() << "QGstreamerCaptureSession::setMetaData" << data; //qDebug() << "QGstreamerCaptureSession::setMetaData" << data;
m_metaData = data; m_metaData = data;
if (m_encodeBin) { if (m_encodeBin)
GstIterator *elements = gst_bin_iterate_all_by_interface(GST_BIN(m_encodeBin), GST_TYPE_TAG_SETTER); QGstUtils::setMetaData(GST_BIN(m_encodeBin), data);
GstElement *element = 0;
while (gst_iterator_next(elements, (void**)&element) == GST_ITERATOR_OK) {
//qDebug() << "found element with tag setter interface:" << gst_element_get_name(element);
QMapIterator<QByteArray, QVariant> it(data);
while (it.hasNext()) {
it.next();
const QString tagName = it.key();
const QVariant tagValue = it.value();
switch(tagValue.type()) {
case QVariant::String:
gst_tag_setter_add_tags(GST_TAG_SETTER(element),
GST_TAG_MERGE_REPLACE_ALL,
tagName.toUtf8().constData(),
tagValue.toString().toUtf8().constData(),
NULL);
break;
case QVariant::Int:
case QVariant::LongLong:
gst_tag_setter_add_tags(GST_TAG_SETTER(element),
GST_TAG_MERGE_REPLACE_ALL,
tagName.toUtf8().constData(),
tagValue.toInt(),
NULL);
break;
case QVariant::Double:
gst_tag_setter_add_tags(GST_TAG_SETTER(element),
GST_TAG_MERGE_REPLACE_ALL,
tagName.toUtf8().constData(),
tagValue.toDouble(),
NULL);
break;
default:
break;
}
}
}
gst_iterator_free(elements);
}
} }
bool QGstreamerCaptureSession::processBusMessage(const QGstreamerMessage &message) bool QGstreamerCaptureSession::processBusMessage(const QGstreamerMessage &message)
@@ -1058,34 +962,16 @@ void QGstreamerCaptureSession::setVolume(qreal volume)
void QGstreamerCaptureSession::addProbe(QGstreamerAudioProbeControl* probe) void QGstreamerCaptureSession::addProbe(QGstreamerAudioProbeControl* probe)
{ {
QMutexLocker locker(&m_audioProbeMutex); Q_ASSERT(!m_audioProbe);
m_audioProbe = probe;
if (m_audioProbes.contains(probe)) addAudioBufferProbe();
return;
m_audioProbes.append(probe);
} }
void QGstreamerCaptureSession::removeProbe(QGstreamerAudioProbeControl* probe) void QGstreamerCaptureSession::removeProbe(QGstreamerAudioProbeControl* probe)
{ {
QMutexLocker locker(&m_audioProbeMutex); Q_ASSERT(m_audioProbe == probe);
m_audioProbes.removeOne(probe); removeAudioBufferProbe();
} m_audioProbe = 0;
gboolean QGstreamerCaptureSession::padAudioBufferProbe(GstPad *pad, GstBuffer *buffer, gpointer user_data)
{
Q_UNUSED(pad);
QGstreamerCaptureSession *session = reinterpret_cast<QGstreamerCaptureSession*>(user_data);
QMutexLocker locker(&session->m_audioProbeMutex);
if (session->m_audioProbes.isEmpty())
return TRUE;
foreach (QGstreamerAudioProbeControl* probe, session->m_audioProbes)
probe->bufferProbed(buffer);
return TRUE;
} }
GstPad *QGstreamerCaptureSession::getAudioProbePad() GstPad *QGstreamerCaptureSession::getAudioProbePad()
@@ -1114,26 +1000,25 @@ GstPad *QGstreamerCaptureSession::getAudioProbePad()
void QGstreamerCaptureSession::removeAudioBufferProbe() void QGstreamerCaptureSession::removeAudioBufferProbe()
{ {
if (m_audioBufferProbeId == -1) if (!m_audioProbe)
return; return;
GstPad *pad = getAudioProbePad(); GstPad *pad = getAudioProbePad();
if (pad) { if (pad) {
gst_pad_remove_buffer_probe(pad, m_audioBufferProbeId); m_audioProbe->removeProbeFromPad(pad);
gst_object_unref(G_OBJECT(pad)); gst_object_unref(GST_OBJECT(pad));
} }
m_audioBufferProbeId = -1;
} }
void QGstreamerCaptureSession::addAudioBufferProbe() void QGstreamerCaptureSession::addAudioBufferProbe()
{ {
Q_ASSERT(m_audioBufferProbeId == -1); if (!m_audioProbe)
return;
GstPad *pad = getAudioProbePad(); GstPad *pad = getAudioProbePad();
if (pad) { if (pad) {
m_audioBufferProbeId = gst_pad_add_buffer_probe(pad, G_CALLBACK(padAudioBufferProbe), this); m_audioProbe->addProbeToPad(pad);
gst_object_unref(G_OBJECT(pad)); gst_object_unref(GST_OBJECT(pad));
} }
} }

View File

@@ -41,8 +41,10 @@
#include <QtCore/qurl.h> #include <QtCore/qurl.h>
#include <gst/gst.h> #include <gst/gst.h>
#include <gst/video/video.h>
#include <private/qgstreamerbushelper_p.h> #include <private/qgstreamerbushelper_p.h>
#include <private/qgstreamerbufferprobe_p.h>
QT_BEGIN_NAMESPACE QT_BEGIN_NAMESPACE
@@ -70,7 +72,10 @@ public:
virtual QList<QSize> supportedResolutions(qreal frameRate = -1) const = 0; virtual QList<QSize> supportedResolutions(qreal frameRate = -1) const = 0;
}; };
class QGstreamerCaptureSession : public QObject, public QGstreamerBusMessageFilter class QGstreamerCaptureSession
: public QObject
, public QGstreamerBusMessageFilter
, private QGstreamerBufferProbe
{ {
Q_OBJECT Q_OBJECT
Q_PROPERTY(qint64 duration READ duration NOTIFY durationChanged) Q_PROPERTY(qint64 duration READ duration NOTIFY durationChanged)
@@ -131,7 +136,6 @@ public:
void addProbe(QGstreamerAudioProbeControl* probe); void addProbe(QGstreamerAudioProbeControl* probe);
void removeProbe(QGstreamerAudioProbeControl* probe); void removeProbe(QGstreamerAudioProbeControl* probe);
static gboolean padAudioBufferProbe(GstPad *pad, GstBuffer *buffer, gpointer user_data);
signals: signals:
void stateChanged(QGstreamerCaptureSession::State state); void stateChanged(QGstreamerCaptureSession::State state);
@@ -156,6 +160,9 @@ public slots:
void setVolume(qreal volume); void setVolume(qreal volume);
private: private:
void probeCaps(GstCaps *caps);
bool probeBuffer(GstBuffer *buffer);
enum PipelineMode { EmptyPipeline, PreviewPipeline, RecordingPipeline, PreviewAndRecordingPipeline }; enum PipelineMode { EmptyPipeline, PreviewPipeline, RecordingPipeline, PreviewAndRecordingPipeline };
GstElement *buildEncodeBin(); GstElement *buildEncodeBin();
@@ -180,9 +187,7 @@ private:
QGstreamerCaptureSession::CaptureMode m_captureMode; QGstreamerCaptureSession::CaptureMode m_captureMode;
QMap<QByteArray, QVariant> m_metaData; QMap<QByteArray, QVariant> m_metaData;
QList<QGstreamerAudioProbeControl*> m_audioProbes; QGstreamerAudioProbeControl *m_audioProbe;
QMutex m_audioProbeMutex;
int m_audioBufferProbeId;
QGstreamerElementFactory *m_audioInputFactory; QGstreamerElementFactory *m_audioInputFactory;
QGstreamerElementFactory *m_audioPreviewFactory; QGstreamerElementFactory *m_audioPreviewFactory;
@@ -217,6 +222,10 @@ private:
GstElement *m_encodeBin; GstElement *m_encodeBin;
#if GST_CHECK_VERSION(1,0,0)
GstVideoInfo m_previewInfo;
#endif
public: public:
bool m_passImage; bool m_passImage;
bool m_passPrerollImage; bool m_passPrerollImage;

View File

@@ -34,7 +34,7 @@
#include "qgstreamervideoencode.h" #include "qgstreamervideoencode.h"
#include "qgstreamercapturesession.h" #include "qgstreamercapturesession.h"
#include "qgstreamermediacontainercontrol.h" #include "qgstreamermediacontainercontrol.h"
#include <private/qgstutils_p.h>
#include <QtCore/qdebug.h> #include <QtCore/qdebug.h>
#include <math.h> #include <math.h>
@@ -147,7 +147,7 @@ GstElement *QGstreamerVideoEncode::createEncoder()
GstElement *capsFilter = gst_element_factory_make("capsfilter", "capsfilter-video"); GstElement *capsFilter = gst_element_factory_make("capsfilter", "capsfilter-video");
gst_bin_add(encoderBin, capsFilter); gst_bin_add(encoderBin, capsFilter);
GstElement *colorspace = gst_element_factory_make("ffmpegcolorspace", NULL); GstElement *colorspace = gst_element_factory_make(QT_GSTREAMER_COLORCONVERSION_ELEMENT_NAME, NULL);
gst_bin_add(encoderBin, colorspace); gst_bin_add(encoderBin, colorspace);
gst_bin_add(encoderBin, encoderElement); gst_bin_add(encoderBin, encoderElement);
@@ -252,27 +252,22 @@ GstElement *QGstreamerVideoEncode::createEncoder()
} }
if (!m_videoSettings.resolution().isEmpty() || m_videoSettings.frameRate() > 0.001) { if (!m_videoSettings.resolution().isEmpty() || m_videoSettings.frameRate() > 0.001) {
GstCaps *caps = gst_caps_new_empty(); GstCaps *caps = QGstUtils::videoFilterCaps();
QStringList structureTypes;
structureTypes << "video/x-raw-yuv" << "video/x-raw-rgb";
foreach(const QString &structureType, structureTypes) { if (!m_videoSettings.resolution().isEmpty()) {
GstStructure *structure = gst_structure_new(structureType.toLatin1().constData(), NULL); gst_caps_set_simple(
caps,
"width", G_TYPE_INT, m_videoSettings.resolution().width(),
"height", G_TYPE_INT, m_videoSettings.resolution().height(),
NULL);
}
if (!m_videoSettings.resolution().isEmpty()) { if (m_videoSettings.frameRate() > 0.001) {
gst_structure_set(structure, "width", G_TYPE_INT, m_videoSettings.resolution().width(), NULL); QPair<int,int> rate = rateAsRational();
gst_structure_set(structure, "height", G_TYPE_INT, m_videoSettings.resolution().height(), NULL); gst_caps_set_simple(
} caps,
"framerate", GST_TYPE_FRACTION, rate.first, rate.second,
if (m_videoSettings.frameRate() > 0.001) { NULL);
QPair<int,int> rate = rateAsRational();
//qDebug() << "frame rate:" << num << denum;
gst_structure_set(structure, "framerate", GST_TYPE_FRACTION, rate.first, rate.second, NULL);
}
gst_caps_append_structure(caps,structure);
} }
//qDebug() << "set video caps filter:" << gst_caps_to_string(caps); //qDebug() << "set video caps filter:" << gst_caps_to_string(caps);

View File

@@ -28,4 +28,3 @@ SOURCES += \
OTHER_FILES += \ OTHER_FILES += \
mediaplayer.json mediaplayer.json

View File

@@ -425,7 +425,6 @@ void QGstreamerPlayerControl::setMedia(const QMediaContent &content, QIODevice *
m_session->loadFromUri(request); m_session->loadFromUri(request);
#endif #endif
#if defined(HAVE_GST_APPSRC) #if defined(HAVE_GST_APPSRC)
if (!request.url().isEmpty() || userStreamValid) { if (!request.url().isEmpty() || userStreamValid) {
#else #else

View File

@@ -51,7 +51,11 @@
#include <private/qgstreamervideorenderer_p.h> #include <private/qgstreamervideorenderer_p.h>
#if defined(Q_WS_MAEMO_6) && defined(__arm__) #if defined(Q_WS_MAEMO_6) && defined(__arm__)
#include "qgstreamergltexturerenderer.h" #include "private/qgstreamergltexturerenderer.h"
#endif
#if defined(HAVE_MIR) && defined (__arm__)
#include "private/qgstreamermirtexturerenderer_p.h"
#endif #endif
#include "qgstreamerstreamscontrol.h" #include "qgstreamerstreamscontrol.h"
@@ -66,6 +70,8 @@ QT_BEGIN_NAMESPACE
QGstreamerPlayerService::QGstreamerPlayerService(QObject *parent): QGstreamerPlayerService::QGstreamerPlayerService(QObject *parent):
QMediaService(parent) QMediaService(parent)
, m_audioProbeControl(0)
, m_videoProbeControl(0)
, m_videoOutput(0) , m_videoOutput(0)
, m_videoRenderer(0) , m_videoRenderer(0)
, m_videoWindow(0) , m_videoWindow(0)
@@ -82,6 +88,8 @@ QGstreamerPlayerService::QGstreamerPlayerService(QObject *parent):
#if defined(Q_WS_MAEMO_6) && defined(__arm__) #if defined(Q_WS_MAEMO_6) && defined(__arm__)
m_videoRenderer = new QGstreamerGLTextureRenderer(this); m_videoRenderer = new QGstreamerGLTextureRenderer(this);
#elif defined(HAVE_MIR) && defined (__arm__)
m_videoRenderer = new QGstreamerMirTextureRenderer(this, m_session);
#else #else
m_videoRenderer = new QGstreamerVideoRenderer(this); m_videoRenderer = new QGstreamerVideoRenderer(this);
#endif #endif
@@ -115,23 +123,23 @@ QMediaControl *QGstreamerPlayerService::requestControl(const char *name)
if (qstrcmp(name, QMediaAvailabilityControl_iid) == 0) if (qstrcmp(name, QMediaAvailabilityControl_iid) == 0)
return m_availabilityControl; return m_availabilityControl;
if (qstrcmp(name,QMediaVideoProbeControl_iid) == 0) { if (qstrcmp(name, QMediaVideoProbeControl_iid) == 0) {
if (m_session) { if (!m_videoProbeControl) {
QGstreamerVideoProbeControl *probe = new QGstreamerVideoProbeControl(this);
increaseVideoRef(); increaseVideoRef();
m_session->addProbe(probe); m_videoProbeControl = new QGstreamerVideoProbeControl(this);
return probe; m_session->addProbe(m_videoProbeControl);
} }
return 0; m_videoProbeControl->ref.ref();
return m_videoProbeControl;
} }
if (qstrcmp(name,QMediaAudioProbeControl_iid) == 0) { if (qstrcmp(name, QMediaAudioProbeControl_iid) == 0) {
if (m_session) { if (!m_audioProbeControl) {
QGstreamerAudioProbeControl *probe = new QGstreamerAudioProbeControl(this); m_audioProbeControl = new QGstreamerAudioProbeControl(this);
m_session->addProbe(probe); m_session->addProbe(m_audioProbeControl);
return probe;
} }
return 0; m_audioProbeControl->ref.ref();
return m_audioProbeControl;
} }
if (!m_videoOutput) { if (!m_videoOutput) {
@@ -156,28 +164,21 @@ QMediaControl *QGstreamerPlayerService::requestControl(const char *name)
void QGstreamerPlayerService::releaseControl(QMediaControl *control) void QGstreamerPlayerService::releaseControl(QMediaControl *control)
{ {
if (control == m_videoOutput) { if (!control) {
return;
} else if (control == m_videoOutput) {
m_videoOutput = 0; m_videoOutput = 0;
m_control->setVideoOutput(0); m_control->setVideoOutput(0);
decreaseVideoRef(); decreaseVideoRef();
} } else if (control == m_videoProbeControl && !m_videoProbeControl->ref.deref()) {
m_session->removeProbe(m_videoProbeControl);
QGstreamerVideoProbeControl* videoProbe = qobject_cast<QGstreamerVideoProbeControl*>(control); delete m_videoProbeControl;
if (videoProbe) { m_videoProbeControl = 0;
if (m_session) { decreaseVideoRef();
m_session->removeProbe(videoProbe); } else if (control == m_audioProbeControl && !m_audioProbeControl->ref.deref()) {
decreaseVideoRef(); m_session->removeProbe(m_audioProbeControl);
} delete m_audioProbeControl;
delete videoProbe; m_audioProbeControl = 0;
return;
}
QGstreamerAudioProbeControl* audioProbe = qobject_cast<QGstreamerAudioProbeControl*>(control);
if (audioProbe) {
if (m_session)
m_session->removeProbe(audioProbe);
delete audioProbe;
return;
} }
} }

View File

@@ -52,6 +52,8 @@ class QGstreamerStreamsControl;
class QGstreamerVideoRenderer; class QGstreamerVideoRenderer;
class QGstreamerVideoWidgetControl; class QGstreamerVideoWidgetControl;
class QGStreamerAvailabilityControl; class QGStreamerAvailabilityControl;
class QGstreamerAudioProbeControl;
class QGstreamerVideoProbeControl;
class QGstreamerPlayerService : public QMediaService class QGstreamerPlayerService : public QMediaService
{ {
@@ -70,6 +72,9 @@ private:
QGstreamerStreamsControl *m_streamsControl; QGstreamerStreamsControl *m_streamsControl;
QGStreamerAvailabilityControl *m_availabilityControl; QGStreamerAvailabilityControl *m_availabilityControl;
QGstreamerAudioProbeControl *m_audioProbeControl;
QGstreamerVideoProbeControl *m_videoProbeControl;
QMediaControl *m_videoOutput; QMediaControl *m_videoOutput;
QMediaControl *m_videoRenderer; QMediaControl *m_videoRenderer;
QMediaControl *m_videoWindow; QMediaControl *m_videoWindow;

View File

@@ -81,89 +81,15 @@ QMultimedia::SupportEstimate QGstreamerPlayerServicePlugin::hasSupport(const QSt
return QGstUtils::hasSupport(mimeType, codecs, m_supportedMimeTypeSet); return QGstUtils::hasSupport(mimeType, codecs, m_supportedMimeTypeSet);
} }
static bool isDecoderOrDemuxer(GstElementFactory *factory)
{
return gst_element_factory_list_is_type(factory, GST_ELEMENT_FACTORY_TYPE_DEMUXER)
|| gst_element_factory_list_is_type(factory, GST_ELEMENT_FACTORY_TYPE_DECODER);
}
void QGstreamerPlayerServicePlugin::updateSupportedMimeTypes() const void QGstreamerPlayerServicePlugin::updateSupportedMimeTypes() const
{ {
//enumerate supported mime types m_supportedMimeTypeSet = QGstUtils::supportedMimeTypes(isDecoderOrDemuxer);
gst_init(NULL, NULL);
GList *plugins, *orig_plugins;
orig_plugins = plugins = gst_default_registry_get_plugin_list ();
while (plugins) {
GList *features, *orig_features;
GstPlugin *plugin = (GstPlugin *) (plugins->data);
plugins = g_list_next (plugins);
if (plugin->flags & (1<<1)) //GST_PLUGIN_FLAG_BLACKLISTED
continue;
orig_features = features = gst_registry_get_feature_list_by_plugin(gst_registry_get_default (),
plugin->desc.name);
while (features) {
if (!G_UNLIKELY(features->data == NULL)) {
GstPluginFeature *feature = GST_PLUGIN_FEATURE(features->data);
if (GST_IS_ELEMENT_FACTORY (feature)) {
GstElementFactory *factory = GST_ELEMENT_FACTORY(gst_plugin_feature_load(feature));
if (factory
&& factory->numpadtemplates > 0
&& (qstrcmp(factory->details.klass, "Codec/Decoder/Audio") == 0
|| qstrcmp(factory->details.klass, "Codec/Decoder/Video") == 0
|| qstrcmp(factory->details.klass, "Codec/Demux") == 0 )) {
const GList *pads = factory->staticpadtemplates;
while (pads) {
GstStaticPadTemplate *padtemplate = (GstStaticPadTemplate*)(pads->data);
pads = g_list_next (pads);
if (padtemplate->direction != GST_PAD_SINK)
continue;
if (padtemplate->static_caps.string) {
GstCaps *caps = gst_static_caps_get(&padtemplate->static_caps);
if (!gst_caps_is_any (caps) && ! gst_caps_is_empty (caps)) {
for (guint i = 0; i < gst_caps_get_size(caps); i++) {
GstStructure *structure = gst_caps_get_structure(caps, i);
QString nameLowcase = QString(gst_structure_get_name (structure)).toLower();
m_supportedMimeTypeSet.insert(nameLowcase);
if (nameLowcase.contains("mpeg")) {
//Because mpeg version number is only included in the detail
//description, it is necessary to manually extract this information
//in order to match the mime type of mpeg4.
const GValue *value = gst_structure_get_value(structure, "mpegversion");
if (value) {
gchar *str = gst_value_serialize (value);
QString versions(str);
QStringList elements = versions.split(QRegExp("\\D+"), QString::SkipEmptyParts);
foreach (const QString &e, elements)
m_supportedMimeTypeSet.insert(nameLowcase + e);
g_free (str);
}
}
}
}
}
}
gst_object_unref (factory);
}
} else if (GST_IS_TYPE_FIND_FACTORY(feature)) {
QString name(gst_plugin_feature_get_name(feature));
if (name.contains('/')) //filter out any string without '/' which is obviously not a mime type
m_supportedMimeTypeSet.insert(name.toLower());
}
}
features = g_list_next (features);
}
gst_plugin_feature_list_free (orig_features);
}
gst_plugin_list_free (orig_plugins);
#if defined QT_SUPPORTEDMIMETYPES_DEBUG
QStringList list = m_supportedMimeTypeSet.toList();
list.sort();
if (qgetenv("QT_DEBUG_PLUGINS").toInt() > 0) {
foreach (const QString &type, list)
qDebug() << type;
}
#endif
} }
QStringList QGstreamerPlayerServicePlugin::supportedMimeTypes() const QStringList QGstreamerPlayerServicePlugin::supportedMimeTypes() const

View File

@@ -37,7 +37,9 @@
#include <private/qgstreameraudioprobecontrol_p.h> #include <private/qgstreameraudioprobecontrol_p.h>
#include <private/qgstreamervideoprobecontrol_p.h> #include <private/qgstreamervideoprobecontrol_p.h>
#include <private/qgstreamervideorendererinterface_p.h> #include <private/qgstreamervideorendererinterface_p.h>
#if !GST_CHECK_VERSION(1,0,0)
#include <private/gstvideoconnector_p.h> #include <private/gstvideoconnector_p.h>
#endif
#include <private/qgstutils_p.h> #include <private/qgstutils_p.h>
#include <private/playlistfileparser_p.h> #include <private/playlistfileparser_p.h>
#include <private/qgstutils_p.h> #include <private/qgstutils_p.h>
@@ -85,6 +87,7 @@ typedef enum {
GST_PLAY_FLAG_BUFFERING = 0x000000100 GST_PLAY_FLAG_BUFFERING = 0x000000100
} GstPlayFlags; } GstPlayFlags;
#if !GST_CHECK_VERSION(1,0,0)
#define DEFAULT_RAW_CAPS \ #define DEFAULT_RAW_CAPS \
"video/x-raw-yuv; " \ "video/x-raw-yuv; " \
"video/x-raw-rgb; " \ "video/x-raw-rgb; " \
@@ -97,7 +100,9 @@ typedef enum {
"text/x-pango-markup; " \ "text/x-pango-markup; " \
"video/x-dvd-subpicture; " \ "video/x-dvd-subpicture; " \
"subpicture/x-pgs" "subpicture/x-pgs"
static GstStaticCaps static_RawCaps = GST_STATIC_CAPS(DEFAULT_RAW_CAPS); static GstStaticCaps static_RawCaps = GST_STATIC_CAPS(DEFAULT_RAW_CAPS);
#endif
QGstreamerPlayerSession::QGstreamerPlayerSession(QObject *parent) QGstreamerPlayerSession::QGstreamerPlayerSession(QObject *parent)
:QObject(parent), :QObject(parent),
@@ -105,7 +110,9 @@ QGstreamerPlayerSession::QGstreamerPlayerSession(QObject *parent)
m_pendingState(QMediaPlayer::StoppedState), m_pendingState(QMediaPlayer::StoppedState),
m_busHelper(0), m_busHelper(0),
m_playbin(0), m_playbin(0),
#if !GST_CHECK_VERSION(1,0,0)
m_usingColorspaceElement(false), m_usingColorspaceElement(false),
#endif
m_videoSink(0), m_videoSink(0),
m_pendingVideoSink(0), m_pendingVideoSink(0),
m_nullVideoSink(0), m_nullVideoSink(0),
@@ -117,8 +124,8 @@ QGstreamerPlayerSession::QGstreamerPlayerSession(QObject *parent)
#if defined(HAVE_GST_APPSRC) #if defined(HAVE_GST_APPSRC)
m_appSrc(0), m_appSrc(0),
#endif #endif
m_videoBufferProbeId(-1), m_videoProbe(0),
m_audioBufferProbeId(-1), m_audioProbe(0),
m_volume(100), m_volume(100),
m_playbackRate(1.0), m_playbackRate(1.0),
m_muted(false), m_muted(false),
@@ -138,8 +145,7 @@ QGstreamerPlayerSession::QGstreamerPlayerSession(QObject *parent)
Q_ASSERT(result == TRUE); Q_ASSERT(result == TRUE);
Q_UNUSED(result); Q_UNUSED(result);
m_playbin = gst_element_factory_make("playbin2", NULL); m_playbin = gst_element_factory_make(QT_GSTREAMER_PLAYBIN_ELEMENT_NAME, NULL);
if (m_playbin) { if (m_playbin) {
//GST_PLAY_FLAG_NATIVE_VIDEO omits configuration of ffmpegcolorspace and videoscale, //GST_PLAY_FLAG_NATIVE_VIDEO omits configuration of ffmpegcolorspace and videoscale,
//since those elements are included in the video output bin when necessary. //since those elements are included in the video output bin when necessary.
@@ -147,13 +153,14 @@ QGstreamerPlayerSession::QGstreamerPlayerSession(QObject *parent)
int flags = GST_PLAY_FLAG_VIDEO | GST_PLAY_FLAG_AUDIO | int flags = GST_PLAY_FLAG_VIDEO | GST_PLAY_FLAG_AUDIO |
GST_PLAY_FLAG_NATIVE_VIDEO | GST_PLAY_FLAG_NATIVE_AUDIO; GST_PLAY_FLAG_NATIVE_VIDEO | GST_PLAY_FLAG_NATIVE_AUDIO;
#else #else
int flags = 0; int flags = GST_PLAY_FLAG_VIDEO | GST_PLAY_FLAG_AUDIO;
g_object_get(G_OBJECT(m_playbin), "flags", &flags, NULL);
QByteArray envFlags = qgetenv("QT_GSTREAMER_PLAYBIN_FLAGS"); QByteArray envFlags = qgetenv("QT_GSTREAMER_PLAYBIN_FLAGS");
if (!envFlags.isEmpty()) { if (!envFlags.isEmpty()) {
flags |= envFlags.toInt(); flags |= envFlags.toInt();
#if !GST_CHECK_VERSION(1,0,0)
} else { } else {
flags |= GST_PLAY_FLAG_NATIVE_VIDEO; flags |= GST_PLAY_FLAG_NATIVE_VIDEO;
#endif
} }
#endif #endif
g_object_set(G_OBJECT(m_playbin), "flags", flags, NULL); g_object_set(G_OBJECT(m_playbin), "flags", flags, NULL);
@@ -185,12 +192,16 @@ QGstreamerPlayerSession::QGstreamerPlayerSession(QObject *parent)
} }
} }
#if GST_CHECK_VERSION(1,0,0)
m_videoIdentity = gst_element_factory_make("identity", NULL); // floating ref
#else
m_videoIdentity = GST_ELEMENT(g_object_new(gst_video_connector_get_type(), 0)); // floating ref m_videoIdentity = GST_ELEMENT(g_object_new(gst_video_connector_get_type(), 0)); // floating ref
g_signal_connect(G_OBJECT(m_videoIdentity), "connection-failed", G_CALLBACK(insertColorSpaceElement), (gpointer)this); g_signal_connect(G_OBJECT(m_videoIdentity), "connection-failed", G_CALLBACK(insertColorSpaceElement), (gpointer)this);
m_colorSpace = gst_element_factory_make(QT_GSTREAMER_COLORCONVERSION_ELEMENT_NAME, "ffmpegcolorspace-vo");
m_colorSpace = gst_element_factory_make("ffmpegcolorspace", "ffmpegcolorspace-vo");
// might not get a parent, take ownership to avoid leak // might not get a parent, take ownership to avoid leak
qt_gst_object_ref_sink(GST_OBJECT(m_colorSpace)); qt_gst_object_ref_sink(GST_OBJECT(m_colorSpace));
#endif
m_nullVideoSink = gst_element_factory_make("fakesink", NULL); m_nullVideoSink = gst_element_factory_make("fakesink", NULL);
g_object_set(G_OBJECT(m_nullVideoSink), "sync", true, NULL); g_object_set(G_OBJECT(m_nullVideoSink), "sync", true, NULL);
@@ -206,7 +217,7 @@ QGstreamerPlayerSession::QGstreamerPlayerSession(QObject *parent)
// add ghostpads // add ghostpads
GstPad *pad = gst_element_get_static_pad(m_videoIdentity,"sink"); GstPad *pad = gst_element_get_static_pad(m_videoIdentity,"sink");
gst_element_add_pad(GST_ELEMENT(m_videoOutputBin), gst_ghost_pad_new("videosink", pad)); gst_element_add_pad(GST_ELEMENT(m_videoOutputBin), gst_ghost_pad_new("sink", pad));
gst_object_unref(GST_OBJECT(pad)); gst_object_unref(GST_OBJECT(pad));
if (m_playbin != 0) { if (m_playbin != 0) {
@@ -244,7 +255,9 @@ QGstreamerPlayerSession::~QGstreamerPlayerSession()
delete m_busHelper; delete m_busHelper;
gst_object_unref(GST_OBJECT(m_bus)); gst_object_unref(GST_OBJECT(m_bus));
gst_object_unref(GST_OBJECT(m_playbin)); gst_object_unref(GST_OBJECT(m_playbin));
#if !GST_CHECK_VERSION(1,0,0)
gst_object_unref(GST_OBJECT(m_colorSpace)); gst_object_unref(GST_OBJECT(m_colorSpace));
#endif
gst_object_unref(GST_OBJECT(m_nullVideoSink)); gst_object_unref(GST_OBJECT(m_nullVideoSink));
gst_object_unref(GST_OBJECT(m_videoOutputBin)); gst_object_unref(GST_OBJECT(m_videoOutputBin));
} }
@@ -339,12 +352,10 @@ qint64 QGstreamerPlayerSession::duration() const
qint64 QGstreamerPlayerSession::position() const qint64 QGstreamerPlayerSession::position() const
{ {
GstFormat format = GST_FORMAT_TIME;
gint64 position = 0; gint64 position = 0;
if ( m_playbin && gst_element_query_position(m_playbin, &format, &position)) if (m_playbin && qt_gst_element_query_position(m_playbin, GST_FORMAT_TIME, &position))
m_lastPosition = position / 1000000; m_lastPosition = position / 1000000;
return m_lastPosition; return m_lastPosition;
} }
@@ -474,17 +485,26 @@ bool QGstreamerPlayerSession::isAudioAvailable() const
return m_audioAvailable; return m_audioAvailable;
} }
#if GST_CHECK_VERSION(1,0,0)
static GstPadProbeReturn block_pad_cb(GstPad *pad, GstPadProbeInfo *info, gpointer user_data)
#else
static void block_pad_cb(GstPad *pad, gboolean blocked, gpointer user_data) static void block_pad_cb(GstPad *pad, gboolean blocked, gpointer user_data)
#endif
{ {
Q_UNUSED(pad); Q_UNUSED(pad);
#if GST_CHECK_VERSION(1,0,0)
Q_UNUSED(info);
Q_UNUSED(user_data);
return GST_PAD_PROBE_OK;
#else
#ifdef DEBUG_PLAYBIN #ifdef DEBUG_PLAYBIN
qDebug() << "block_pad_cb, blocked:" << blocked; qDebug() << "block_pad_cb, blocked:" << blocked;
#endif #endif
if (blocked && user_data) { if (blocked && user_data) {
QGstreamerPlayerSession *session = reinterpret_cast<QGstreamerPlayerSession*>(user_data); QGstreamerPlayerSession *session = reinterpret_cast<QGstreamerPlayerSession*>(user_data);
QMetaObject::invokeMethod(session, "finishVideoOutputChange", Qt::QueuedConnection); QMetaObject::invokeMethod(session, "finishVideoOutputChange", Qt::QueuedConnection);
} }
#endif
} }
void QGstreamerPlayerSession::updateVideoRenderer() void QGstreamerPlayerSession::updateVideoRenderer()
@@ -529,7 +549,7 @@ void QGstreamerPlayerSession::setVideoRenderer(QObject *videoOutput)
m_renderer = renderer; m_renderer = renderer;
#ifdef DEBUG_VO_BIN_DUMP #ifdef DEBUG_VO_BIN_DUMP
_gst_debug_bin_to_dot_file_with_ts(GST_BIN(m_playbin), gst_debug_bin_to_dot_file_with_ts(GST_BIN(m_playbin),
GstDebugGraphDetails(GST_DEBUG_GRAPH_SHOW_ALL /* GST_DEBUG_GRAPH_SHOW_MEDIA_TYPE | GST_DEBUG_GRAPH_SHOW_NON_DEFAULT_PARAMS | GST_DEBUG_GRAPH_SHOW_STATES*/), GstDebugGraphDetails(GST_DEBUG_GRAPH_SHOW_ALL /* GST_DEBUG_GRAPH_SHOW_MEDIA_TYPE | GST_DEBUG_GRAPH_SHOW_NON_DEFAULT_PARAMS | GST_DEBUG_GRAPH_SHOW_STATES*/),
"playbin_set"); "playbin_set");
#endif #endif
@@ -570,12 +590,14 @@ void QGstreamerPlayerSession::setVideoRenderer(QObject *videoOutput)
gst_element_set_state(m_videoSink, GST_STATE_NULL); gst_element_set_state(m_videoSink, GST_STATE_NULL);
gst_element_set_state(m_playbin, GST_STATE_NULL); gst_element_set_state(m_playbin, GST_STATE_NULL);
#if !GST_CHECK_VERSION(1,0,0)
if (m_usingColorspaceElement) { if (m_usingColorspaceElement) {
gst_element_unlink(m_colorSpace, m_videoSink); gst_element_unlink(m_colorSpace, m_videoSink);
gst_bin_remove(GST_BIN(m_videoOutputBin), m_colorSpace); gst_bin_remove(GST_BIN(m_videoOutputBin), m_colorSpace);
} else { } else {
gst_element_unlink(m_videoIdentity, m_videoSink); gst_element_unlink(m_videoIdentity, m_videoSink);
} }
#endif
removeVideoBufferProbe(); removeVideoBufferProbe();
@@ -585,8 +607,9 @@ void QGstreamerPlayerSession::setVideoRenderer(QObject *videoOutput)
gst_bin_add(GST_BIN(m_videoOutputBin), m_videoSink); gst_bin_add(GST_BIN(m_videoOutputBin), m_videoSink);
m_usingColorspaceElement = false;
bool linked = gst_element_link(m_videoIdentity, m_videoSink); bool linked = gst_element_link(m_videoIdentity, m_videoSink);
#if !GST_CHECK_VERSION(1,0,0)
m_usingColorspaceElement = false;
if (!linked) { if (!linked) {
m_usingColorspaceElement = true; m_usingColorspaceElement = true;
#ifdef DEBUG_PLAYBIN #ifdef DEBUG_PLAYBIN
@@ -595,6 +618,10 @@ void QGstreamerPlayerSession::setVideoRenderer(QObject *videoOutput)
gst_bin_add(GST_BIN(m_videoOutputBin), m_colorSpace); gst_bin_add(GST_BIN(m_videoOutputBin), m_colorSpace);
linked = gst_element_link_many(m_videoIdentity, m_colorSpace, m_videoSink, NULL); linked = gst_element_link_many(m_videoIdentity, m_colorSpace, m_videoSink, NULL);
} }
#endif
if (!linked)
qWarning() << "Linking video output element failed";
if (g_object_class_find_property(G_OBJECT_GET_CLASS(m_videoSink), "show-preroll-frame") != 0) { if (g_object_class_find_property(G_OBJECT_GET_CLASS(m_videoSink), "show-preroll-frame") != 0) {
gboolean value = m_displayPrerolledFrame; gboolean value = m_displayPrerolledFrame;
@@ -633,7 +660,11 @@ void QGstreamerPlayerSession::setVideoRenderer(QObject *videoOutput)
//block pads, async to avoid locking in paused state //block pads, async to avoid locking in paused state
GstPad *srcPad = gst_element_get_static_pad(m_videoIdentity, "src"); GstPad *srcPad = gst_element_get_static_pad(m_videoIdentity, "src");
#if GST_CHECK_VERSION(1,0,0)
this->pad_probe_id = gst_pad_add_probe(srcPad, (GstPadProbeType)(GST_PAD_PROBE_TYPE_BUFFER | GST_PAD_PROBE_TYPE_BLOCKING), block_pad_cb, this, NULL);
#else
gst_pad_set_blocked_async(srcPad, true, &block_pad_cb, this); gst_pad_set_blocked_async(srcPad, true, &block_pad_cb, this);
#endif
gst_object_unref(GST_OBJECT(srcPad)); gst_object_unref(GST_OBJECT(srcPad));
//Unpause the sink to avoid waiting until the buffer is processed //Unpause the sink to avoid waiting until the buffer is processed
@@ -671,16 +702,22 @@ void QGstreamerPlayerSession::finishVideoOutputChange()
} }
if (m_pendingVideoSink == m_videoSink) { if (m_pendingVideoSink == m_videoSink) {
qDebug() << "Abort, no change";
//video output was change back to the current one, //video output was change back to the current one,
//no need to torment the pipeline, just unblock the pad //no need to torment the pipeline, just unblock the pad
if (gst_pad_is_blocked(srcPad)) if (gst_pad_is_blocked(srcPad))
#if GST_CHECK_VERSION(1,0,0)
gst_pad_remove_probe(srcPad, this->pad_probe_id);
#else
gst_pad_set_blocked_async(srcPad, false, &block_pad_cb, 0); gst_pad_set_blocked_async(srcPad, false, &block_pad_cb, 0);
#endif
m_pendingVideoSink = 0; m_pendingVideoSink = 0;
gst_object_unref(GST_OBJECT(srcPad)); gst_object_unref(GST_OBJECT(srcPad));
return; return;
} }
#if !GST_CHECK_VERSION(1,0,0)
if (m_usingColorspaceElement) { if (m_usingColorspaceElement) {
gst_element_set_state(m_colorSpace, GST_STATE_NULL); gst_element_set_state(m_colorSpace, GST_STATE_NULL);
gst_element_set_state(m_videoSink, GST_STATE_NULL); gst_element_set_state(m_videoSink, GST_STATE_NULL);
@@ -688,6 +725,9 @@ void QGstreamerPlayerSession::finishVideoOutputChange()
gst_element_unlink(m_colorSpace, m_videoSink); gst_element_unlink(m_colorSpace, m_videoSink);
gst_bin_remove(GST_BIN(m_videoOutputBin), m_colorSpace); gst_bin_remove(GST_BIN(m_videoOutputBin), m_colorSpace);
} else { } else {
#else
{
#endif
gst_element_set_state(m_videoSink, GST_STATE_NULL); gst_element_set_state(m_videoSink, GST_STATE_NULL);
gst_element_unlink(m_videoIdentity, m_videoSink); gst_element_unlink(m_videoIdentity, m_videoSink);
} }
@@ -703,8 +743,9 @@ void QGstreamerPlayerSession::finishVideoOutputChange()
addVideoBufferProbe(); addVideoBufferProbe();
m_usingColorspaceElement = false;
bool linked = gst_element_link(m_videoIdentity, m_videoSink); bool linked = gst_element_link(m_videoIdentity, m_videoSink);
#if !GST_CHECK_VERSION(1,0,0)
m_usingColorspaceElement = false;
if (!linked) { if (!linked) {
m_usingColorspaceElement = true; m_usingColorspaceElement = true;
#ifdef DEBUG_PLAYBIN #ifdef DEBUG_PLAYBIN
@@ -713,6 +754,7 @@ void QGstreamerPlayerSession::finishVideoOutputChange()
gst_bin_add(GST_BIN(m_videoOutputBin), m_colorSpace); gst_bin_add(GST_BIN(m_videoOutputBin), m_colorSpace);
linked = gst_element_link_many(m_videoIdentity, m_colorSpace, m_videoSink, NULL); linked = gst_element_link_many(m_videoIdentity, m_colorSpace, m_videoSink, NULL);
} }
#endif
if (!linked) if (!linked)
qWarning() << "Linking video output element failed"; qWarning() << "Linking video output element failed";
@@ -720,6 +762,8 @@ void QGstreamerPlayerSession::finishVideoOutputChange()
#ifdef DEBUG_PLAYBIN #ifdef DEBUG_PLAYBIN
qDebug() << "notify the video connector it has to emit a new segment message..."; qDebug() << "notify the video connector it has to emit a new segment message...";
#endif #endif
#if !GST_CHECK_VERSION(1,0,0)
//it's necessary to send a new segment event just before //it's necessary to send a new segment event just before
//the first buffer pushed to the new sink //the first buffer pushed to the new sink
g_signal_emit_by_name(m_videoIdentity, g_signal_emit_by_name(m_videoIdentity,
@@ -727,7 +771,7 @@ void QGstreamerPlayerSession::finishVideoOutputChange()
true //emit connection-failed signal true //emit connection-failed signal
//to have a chance to insert colorspace element //to have a chance to insert colorspace element
); );
#endif
GstState state = GST_STATE_VOID_PENDING; GstState state = GST_STATE_VOID_PENDING;
@@ -743,8 +787,10 @@ void QGstreamerPlayerSession::finishVideoOutputChange()
break; break;
} }
#if !GST_CHECK_VERSION(1,0,0)
if (m_usingColorspaceElement) if (m_usingColorspaceElement)
gst_element_set_state(m_colorSpace, state); gst_element_set_state(m_colorSpace, state);
#endif
gst_element_set_state(m_videoSink, state); gst_element_set_state(m_videoSink, state);
@@ -760,16 +806,23 @@ void QGstreamerPlayerSession::finishVideoOutputChange()
//don't have to wait here, it will unblock eventually //don't have to wait here, it will unblock eventually
if (gst_pad_is_blocked(srcPad)) if (gst_pad_is_blocked(srcPad))
gst_pad_set_blocked_async(srcPad, false, &block_pad_cb, 0); #if GST_CHECK_VERSION(1,0,0)
gst_pad_remove_probe(srcPad, this->pad_probe_id);
#else
gst_pad_set_blocked_async(srcPad, false, &block_pad_cb, 0);
#endif
gst_object_unref(GST_OBJECT(srcPad)); gst_object_unref(GST_OBJECT(srcPad));
#ifdef DEBUG_VO_BIN_DUMP #ifdef DEBUG_VO_BIN_DUMP
_gst_debug_bin_to_dot_file_with_ts(GST_BIN(m_playbin), gst_debug_bin_to_dot_file_with_ts(GST_BIN(m_playbin),
GstDebugGraphDetails(GST_DEBUG_GRAPH_SHOW_ALL /* GST_DEBUG_GRAPH_SHOW_MEDIA_TYPE | GST_DEBUG_GRAPH_SHOW_NON_DEFAULT_PARAMS | GST_DEBUG_GRAPH_SHOW_STATES*/), GstDebugGraphDetails(GST_DEBUG_GRAPH_SHOW_ALL /* | GST_DEBUG_GRAPH_SHOW_MEDIA_TYPE | GST_DEBUG_GRAPH_SHOW_NON_DEFAULT_PARAMS | GST_DEBUG_GRAPH_SHOW_STATES */),
"playbin_finish"); "playbin_finish");
#endif #endif
} }
#if !GST_CHECK_VERSION(1,0,0)
void QGstreamerPlayerSession::insertColorSpaceElement(GstElement *element, gpointer data) void QGstreamerPlayerSession::insertColorSpaceElement(GstElement *element, gpointer data)
{ {
#ifdef DEBUG_PLAYBIN #ifdef DEBUG_PLAYBIN
@@ -814,6 +867,7 @@ void QGstreamerPlayerSession::insertColorSpaceElement(GstElement *element, gpoin
gst_element_set_state(session->m_colorSpace, state); gst_element_set_state(session->m_colorSpace, state);
} }
#endif
bool QGstreamerPlayerSession::isVideoAvailable() const bool QGstreamerPlayerSession::isVideoAvailable() const
{ {
@@ -830,6 +884,7 @@ bool QGstreamerPlayerSession::play()
#ifdef DEBUG_PLAYBIN #ifdef DEBUG_PLAYBIN
qDebug() << Q_FUNC_INFO; qDebug() << Q_FUNC_INFO;
#endif #endif
m_everPlayed = false; m_everPlayed = false;
if (m_playbin) { if (m_playbin) {
m_pendingState = QMediaPlayer::PlayingState; m_pendingState = QMediaPlayer::PlayingState;
@@ -1161,21 +1216,20 @@ bool QGstreamerPlayerSession::processBusMessage(const QGstreamerMessage &message
case GST_MESSAGE_SEGMENT_DONE: case GST_MESSAGE_SEGMENT_DONE:
break; break;
case GST_MESSAGE_LATENCY: case GST_MESSAGE_LATENCY:
#if (GST_VERSION_MAJOR >= 0) && (GST_VERSION_MINOR >= 10) && (GST_VERSION_MICRO >= 13) #if GST_CHECK_VERSION(0,10,13)
case GST_MESSAGE_ASYNC_START: case GST_MESSAGE_ASYNC_START:
break; break;
case GST_MESSAGE_ASYNC_DONE: case GST_MESSAGE_ASYNC_DONE:
{ {
GstFormat format = GST_FORMAT_TIME;
gint64 position = 0; gint64 position = 0;
if (gst_element_query_position(m_playbin, &format, &position)) { if (qt_gst_element_query_position(m_playbin, GST_FORMAT_TIME, &position)) {
position /= 1000000; position /= 1000000;
m_lastPosition = position; m_lastPosition = position;
emit positionChanged(position); emit positionChanged(position);
} }
break; break;
} }
#if GST_VERSION_MICRO >= 23 #if GST_CHECK_VERSION(0,10,23)
case GST_MESSAGE_REQUEST_STATE: case GST_MESSAGE_REQUEST_STATE:
#endif #endif
#endif #endif
@@ -1327,8 +1381,11 @@ void QGstreamerPlayerSession::getStreamsInfo()
default: default:
break; break;
} }
#if GST_CHECK_VERSION(1,0,0)
if (tags && GST_IS_TAG_LIST(tags)) {
#else
if (tags && gst_is_tag_list(tags)) { if (tags && gst_is_tag_list(tags)) {
#endif
gchar *languageCode = 0; gchar *languageCode = 0;
if (gst_tag_list_get_string(tags, GST_TAG_LANGUAGE_CODE, &languageCode)) if (gst_tag_list_get_string(tags, GST_TAG_LANGUAGE_CODE, &languageCode))
streamProperties[QMediaMetaData::Language] = QString::fromUtf8(languageCode); streamProperties[QMediaMetaData::Language] = QString::fromUtf8(languageCode);
@@ -1365,9 +1422,8 @@ void QGstreamerPlayerSession::updateVideoResolutionTag()
#endif #endif
QSize size; QSize size;
QSize aspectRatio; QSize aspectRatio;
GstPad *pad = gst_element_get_static_pad(m_videoIdentity, "src"); GstPad *pad = gst_element_get_static_pad(m_videoIdentity, "src");
GstCaps *caps = gst_pad_get_negotiated_caps(pad); GstCaps *caps = qt_gst_pad_get_current_caps(pad);
if (caps) { if (caps) {
const GstStructure *structure = gst_caps_get_structure(caps, 0); const GstStructure *structure = gst_caps_get_structure(caps, 0);
@@ -1407,11 +1463,10 @@ void QGstreamerPlayerSession::updateVideoResolutionTag()
void QGstreamerPlayerSession::updateDuration() void QGstreamerPlayerSession::updateDuration()
{ {
GstFormat format = GST_FORMAT_TIME;
gint64 gstDuration = 0; gint64 gstDuration = 0;
int duration = -1; int duration = -1;
if (m_playbin && gst_element_query_duration(m_playbin, &format, &gstDuration)) if (m_playbin && qt_gst_element_query_duration(m_playbin, GST_FORMAT_TIME, &gstDuration))
duration = gstDuration / 1000000; duration = gstDuration / 1000000;
if (m_duration != duration) { if (m_duration != duration) {
@@ -1467,7 +1522,7 @@ void QGstreamerPlayerSession::playbinNotifySource(GObject *o, GParamSpec *p, gpo
// The rest // The rest
if (g_object_class_find_property(G_OBJECT_GET_CLASS(source), "extra-headers") != 0) { if (g_object_class_find_property(G_OBJECT_GET_CLASS(source), "extra-headers") != 0) {
GstStructure *extras = gst_structure_empty_new("extras"); GstStructure *extras = qt_gst_structure_new_empty("extras");
foreach (const QByteArray &rawHeader, self->m_request.rawHeaderList()) { foreach (const QByteArray &rawHeader, self->m_request.rawHeaderList()) {
if (rawHeader == userAgentString) // Filter User-Agent if (rawHeader == userAgentString) // Filter User-Agent
@@ -1528,7 +1583,8 @@ void QGstreamerPlayerSession::playbinNotifySource(GObject *o, GParamSpec *p, gpo
qDebug() << "Current source is a non-live source"; qDebug() << "Current source is a non-live source";
#endif #endif
g_object_set(G_OBJECT(self->m_videoSink), "sync", !self->m_isLiveSource, NULL); if (self->m_videoSink)
g_object_set(G_OBJECT(self->m_videoSink), "sync", !self->m_isLiveSource, NULL);
gst_object_unref(source); gst_object_unref(source);
} }
@@ -1623,7 +1679,11 @@ GstAutoplugSelectResult QGstreamerPlayerSession::handleAutoplugSelect(GstBin *bi
const gchar *factoryName = gst_plugin_feature_get_name(GST_PLUGIN_FEATURE(factory)); const gchar *factoryName = gst_plugin_feature_get_name(GST_PLUGIN_FEATURE(factory));
if (g_str_has_prefix(factoryName, "vaapi")) { if (g_str_has_prefix(factoryName, "vaapi")) {
GstPad *sinkPad = gst_element_get_static_pad(session->m_videoSink, "sink"); GstPad *sinkPad = gst_element_get_static_pad(session->m_videoSink, "sink");
#if GST_CHECK_VERSION(1,0,0)
GstCaps *sinkCaps = gst_pad_query_caps(sinkPad, NULL);
#else
GstCaps *sinkCaps = gst_pad_get_caps(sinkPad); GstCaps *sinkCaps = gst_pad_get_caps(sinkPad);
#endif
#if (GST_VERSION_MAJOR == 0) && ((GST_VERSION_MINOR < 10) || (GST_VERSION_MICRO < 33)) #if (GST_VERSION_MAJOR == 0) && ((GST_VERSION_MINOR < 10) || (GST_VERSION_MICRO < 33))
if (!factory_can_src_any_caps(factory, sinkCaps)) if (!factory_can_src_any_caps(factory, sinkCaps))
@@ -1652,8 +1712,10 @@ void QGstreamerPlayerSession::handleElementAdded(GstBin *bin, GstElement *elemen
// Disable on-disk buffering. // Disable on-disk buffering.
g_object_set(G_OBJECT(element), "temp-template", NULL, NULL); g_object_set(G_OBJECT(element), "temp-template", NULL, NULL);
} else if (g_str_has_prefix(elementName, "uridecodebin") || } else if (g_str_has_prefix(elementName, "uridecodebin") ||
g_str_has_prefix(elementName, "decodebin2")) { #if GST_CHECK_VERSION(1,0,0)
g_str_has_prefix(elementName, "decodebin")) {
#else
g_str_has_prefix(elementName, "decodebin2")) {
if (g_str_has_prefix(elementName, "uridecodebin")) { if (g_str_has_prefix(elementName, "uridecodebin")) {
// Add video/x-surface (VAAPI) to default raw formats // Add video/x-surface (VAAPI) to default raw formats
g_object_set(G_OBJECT(element), "caps", gst_static_caps_get(&static_RawCaps), NULL); g_object_set(G_OBJECT(element), "caps", gst_static_caps_get(&static_RawCaps), NULL);
@@ -1661,7 +1723,7 @@ void QGstreamerPlayerSession::handleElementAdded(GstBin *bin, GstElement *elemen
// video sink doesn't support it // video sink doesn't support it
g_signal_connect(element, "autoplug-select", G_CALLBACK(handleAutoplugSelect), session); g_signal_connect(element, "autoplug-select", G_CALLBACK(handleAutoplugSelect), session);
} }
#endif
//listen for queue2 element added to uridecodebin/decodebin2 as well. //listen for queue2 element added to uridecodebin/decodebin2 as well.
//Don't touch other bins since they may have unrelated queues //Don't touch other bins since they may have unrelated queues
g_signal_connect(element, "element-added", g_signal_connect(element, "element-added",
@@ -1711,68 +1773,30 @@ void QGstreamerPlayerSession::showPrerollFrames(bool enabled)
void QGstreamerPlayerSession::addProbe(QGstreamerVideoProbeControl* probe) void QGstreamerPlayerSession::addProbe(QGstreamerVideoProbeControl* probe)
{ {
QMutexLocker locker(&m_videoProbeMutex); Q_ASSERT(!m_videoProbe);
m_videoProbe = probe;
if (m_videoProbes.contains(probe)) addVideoBufferProbe();
return;
m_videoProbes.append(probe);
} }
void QGstreamerPlayerSession::removeProbe(QGstreamerVideoProbeControl* probe) void QGstreamerPlayerSession::removeProbe(QGstreamerVideoProbeControl* probe)
{ {
QMutexLocker locker(&m_videoProbeMutex); Q_ASSERT(m_videoProbe == probe);
m_videoProbes.removeOne(probe); removeVideoBufferProbe();
// Do not emit flush signal in this case. m_videoProbe = 0;
// Assume user releases any outstanding references to video frames.
}
gboolean QGstreamerPlayerSession::padVideoBufferProbe(GstPad *pad, GstBuffer *buffer, gpointer user_data)
{
Q_UNUSED(pad);
QGstreamerPlayerSession *session = reinterpret_cast<QGstreamerPlayerSession*>(user_data);
QMutexLocker locker(&session->m_videoProbeMutex);
if (session->m_videoProbes.isEmpty())
return TRUE;
foreach (QGstreamerVideoProbeControl* probe, session->m_videoProbes)
probe->bufferProbed(buffer);
return TRUE;
} }
void QGstreamerPlayerSession::addProbe(QGstreamerAudioProbeControl* probe) void QGstreamerPlayerSession::addProbe(QGstreamerAudioProbeControl* probe)
{ {
QMutexLocker locker(&m_audioProbeMutex); Q_ASSERT(!m_audioProbe);
m_audioProbe = probe;
if (m_audioProbes.contains(probe)) addAudioBufferProbe();
return;
m_audioProbes.append(probe);
} }
void QGstreamerPlayerSession::removeProbe(QGstreamerAudioProbeControl* probe) void QGstreamerPlayerSession::removeProbe(QGstreamerAudioProbeControl* probe)
{ {
QMutexLocker locker(&m_audioProbeMutex); Q_ASSERT(m_audioProbe == probe);
m_audioProbes.removeOne(probe); removeAudioBufferProbe();
} m_audioProbe = 0;
gboolean QGstreamerPlayerSession::padAudioBufferProbe(GstPad *pad, GstBuffer *buffer, gpointer user_data)
{
Q_UNUSED(pad);
QGstreamerPlayerSession *session = reinterpret_cast<QGstreamerPlayerSession*>(user_data);
QMutexLocker locker(&session->m_audioProbeMutex);
if (session->m_audioProbes.isEmpty())
return TRUE;
foreach (QGstreamerAudioProbeControl* probe, session->m_audioProbes)
probe->bufferProbed(buffer);
return TRUE;
} }
// This function is similar to stop(), // This function is similar to stop(),
@@ -1797,80 +1821,62 @@ void QGstreamerPlayerSession::endOfMediaReset()
void QGstreamerPlayerSession::removeVideoBufferProbe() void QGstreamerPlayerSession::removeVideoBufferProbe()
{ {
if (m_videoBufferProbeId == -1) if (!m_videoProbe)
return; return;
if (!m_videoSink) {
m_videoBufferProbeId = -1;
return;
}
GstPad *pad = gst_element_get_static_pad(m_videoSink, "sink"); GstPad *pad = gst_element_get_static_pad(m_videoSink, "sink");
if (pad) { if (pad) {
gst_pad_remove_buffer_probe(pad, m_videoBufferProbeId); m_videoProbe->removeProbeFromPad(pad);
gst_object_unref(GST_OBJECT(pad)); gst_object_unref(GST_OBJECT(pad));
} }
m_videoBufferProbeId = -1;
} }
void QGstreamerPlayerSession::addVideoBufferProbe() void QGstreamerPlayerSession::addVideoBufferProbe()
{ {
Q_ASSERT(m_videoBufferProbeId == -1); if (!m_videoProbe)
if (!m_videoSink)
return; return;
GstPad *pad = gst_element_get_static_pad(m_videoSink, "sink"); GstPad *pad = gst_element_get_static_pad(m_videoSink, "sink");
if (pad) { if (pad) {
m_videoBufferProbeId = gst_pad_add_buffer_probe(pad, G_CALLBACK(padVideoBufferProbe), this); m_videoProbe->addProbeToPad(pad);
gst_object_unref(GST_OBJECT(pad)); gst_object_unref(GST_OBJECT(pad));
} }
} }
void QGstreamerPlayerSession::removeAudioBufferProbe() void QGstreamerPlayerSession::removeAudioBufferProbe()
{ {
if (m_audioBufferProbeId == -1) if (!m_audioProbe)
return; return;
if (!m_audioSink) {
m_audioBufferProbeId = -1;
return;
}
GstPad *pad = gst_element_get_static_pad(m_audioSink, "sink"); GstPad *pad = gst_element_get_static_pad(m_audioSink, "sink");
if (pad) { if (pad) {
gst_pad_remove_buffer_probe(pad, m_audioBufferProbeId); m_audioProbe->removeProbeFromPad(pad);
gst_object_unref(GST_OBJECT(pad)); gst_object_unref(GST_OBJECT(pad));
} }
m_audioBufferProbeId = -1;
} }
void QGstreamerPlayerSession::addAudioBufferProbe() void QGstreamerPlayerSession::addAudioBufferProbe()
{ {
Q_ASSERT(m_audioBufferProbeId == -1); if (!m_audioProbe)
if (!m_audioSink)
return; return;
GstPad *pad = gst_element_get_static_pad(m_audioSink, "sink"); GstPad *pad = gst_element_get_static_pad(m_audioSink, "sink");
if (pad) { if (pad) {
m_audioBufferProbeId = gst_pad_add_buffer_probe(pad, G_CALLBACK(padAudioBufferProbe), this); m_audioProbe->addProbeToPad(pad);
gst_object_unref(GST_OBJECT(pad)); gst_object_unref(GST_OBJECT(pad));
} }
} }
void QGstreamerPlayerSession::flushVideoProbes() void QGstreamerPlayerSession::flushVideoProbes()
{ {
QMutexLocker locker(&m_videoProbeMutex); if (m_videoProbe)
foreach (QGstreamerVideoProbeControl* probe, m_videoProbes) m_videoProbe->startFlushing();
probe->startFlushing();
} }
void QGstreamerPlayerSession::resumeVideoProbes() void QGstreamerPlayerSession::resumeVideoProbes()
{ {
QMutexLocker locker(&m_videoProbeMutex); if (m_videoProbe)
foreach (QGstreamerVideoProbeControl* probe, m_videoProbes) m_videoProbe->stopFlushing();
probe->stopFlushing();
} }
void QGstreamerPlayerSession::playlistTypeFindFunction(GstTypeFind *find, gpointer userData) void QGstreamerPlayerSession::playlistTypeFindFunction(GstTypeFind *find, gpointer userData)
@@ -1878,7 +1884,11 @@ void QGstreamerPlayerSession::playlistTypeFindFunction(GstTypeFind *find, gpoint
QGstreamerPlayerSession* session = (QGstreamerPlayerSession*)userData; QGstreamerPlayerSession* session = (QGstreamerPlayerSession*)userData;
const gchar *uri = 0; const gchar *uri = 0;
#if GST_CHECK_VERSION(1,0,0)
g_object_get(G_OBJECT(session->m_playbin), "current-uri", &uri, NULL);
#else
g_object_get(G_OBJECT(session->m_playbin), "uri", &uri, NULL); g_object_get(G_OBJECT(session->m_playbin), "uri", &uri, NULL);
#endif
guint64 length = gst_type_find_get_length(find); guint64 length = gst_type_find_get_length(find);
if (!length) if (!length)
@@ -1887,7 +1897,7 @@ void QGstreamerPlayerSession::playlistTypeFindFunction(GstTypeFind *find, gpoint
length = qMin(length, guint64(1024)); length = qMin(length, guint64(1024));
while (length > 0) { while (length > 0) {
guint8 *data = gst_type_find_peek(find, 0, length); const guint8 *data = gst_type_find_peek(find, 0, length);
if (data) { if (data) {
session->m_isPlaylist = (QPlaylistFileParser::findPlaylistType(QString::fromUtf8(uri), 0, data, length) != QPlaylistFileParser::UNKNOWN); session->m_isPlaylist = (QPlaylistFileParser::findPlaylistType(QString::fromUtf8(uri), 0, data, length) != QPlaylistFileParser::UNKNOWN);
return; return;

View File

@@ -119,11 +119,9 @@ public:
void addProbe(QGstreamerVideoProbeControl* probe); void addProbe(QGstreamerVideoProbeControl* probe);
void removeProbe(QGstreamerVideoProbeControl* probe); void removeProbe(QGstreamerVideoProbeControl* probe);
static gboolean padVideoBufferProbe(GstPad *pad, GstBuffer *buffer, gpointer user_data);
void addProbe(QGstreamerAudioProbeControl* probe); void addProbe(QGstreamerAudioProbeControl* probe);
void removeProbe(QGstreamerAudioProbeControl* probe); void removeProbe(QGstreamerAudioProbeControl* probe);
static gboolean padAudioBufferProbe(GstPad *pad, GstBuffer *buffer, gpointer user_data);
void endOfMediaReset(); void endOfMediaReset();
@@ -172,7 +170,9 @@ private:
static void playbinNotifySource(GObject *o, GParamSpec *p, gpointer d); static void playbinNotifySource(GObject *o, GParamSpec *p, gpointer d);
static void handleVolumeChange(GObject *o, GParamSpec *p, gpointer d); static void handleVolumeChange(GObject *o, GParamSpec *p, gpointer d);
static void handleMutedChange(GObject *o, GParamSpec *p, gpointer d); static void handleMutedChange(GObject *o, GParamSpec *p, gpointer d);
#if !GST_CHECK_VERSION(1,0,0)
static void insertColorSpaceElement(GstElement *element, gpointer data); static void insertColorSpaceElement(GstElement *element, gpointer data);
#endif
static void handleElementAdded(GstBin *bin, GstElement *element, QGstreamerPlayerSession *session); static void handleElementAdded(GstBin *bin, GstElement *element, QGstreamerPlayerSession *session);
static void handleStreamsChange(GstBin *bin, gpointer user_data); static void handleStreamsChange(GstBin *bin, gpointer user_data);
static GstAutoplugSelectResult handleAutoplugSelect(GstBin *bin, GstPad *pad, GstCaps *caps, GstElementFactory *factory, QGstreamerPlayerSession *session); static GstAutoplugSelectResult handleAutoplugSelect(GstBin *bin, GstPad *pad, GstCaps *caps, GstElementFactory *factory, QGstreamerPlayerSession *session);
@@ -194,11 +194,14 @@ private:
QGstreamerBusHelper* m_busHelper; QGstreamerBusHelper* m_busHelper;
GstElement* m_playbin; GstElement* m_playbin;
GstElement* m_videoSink;
GstElement* m_videoOutputBin; GstElement* m_videoOutputBin;
GstElement* m_videoIdentity; GstElement* m_videoIdentity;
#if !GST_CHECK_VERSION(1,0,0)
GstElement* m_colorSpace; GstElement* m_colorSpace;
bool m_usingColorspaceElement; bool m_usingColorspaceElement;
GstElement* m_videoSink; #endif
GstElement* m_pendingVideoSink; GstElement* m_pendingVideoSink;
GstElement* m_nullVideoSink; GstElement* m_nullVideoSink;
@@ -218,13 +221,8 @@ private:
QList<QMediaStreamsControl::StreamType> m_streamTypes; QList<QMediaStreamsControl::StreamType> m_streamTypes;
QMap<QMediaStreamsControl::StreamType, int> m_playbin2StreamOffset; QMap<QMediaStreamsControl::StreamType, int> m_playbin2StreamOffset;
QList<QGstreamerVideoProbeControl*> m_videoProbes; QGstreamerVideoProbeControl *m_videoProbe;
QMutex m_videoProbeMutex; QGstreamerAudioProbeControl *m_audioProbe;
int m_videoBufferProbeId;
QList<QGstreamerAudioProbeControl*> m_audioProbes;
QMutex m_audioProbeMutex;
int m_audioBufferProbeId;
int m_volume; int m_volume;
qreal m_playbackRate; qreal m_playbackRate;
@@ -252,6 +250,7 @@ private:
bool m_isLiveSource; bool m_isLiveSource;
bool m_isPlaylist; bool m_isPlaylist;
gulong pad_probe_id;
}; };
QT_END_NAMESPACE QT_END_NAMESPACE

View File

@@ -495,6 +495,8 @@ void tst_QCameraBackend::testCaptureToBuffer()
QCOMPARE(imageCapture.bufferFormat(), QVideoFrame::Format_Jpeg); QCOMPARE(imageCapture.bufferFormat(), QVideoFrame::Format_Jpeg);
} }
QTRY_VERIFY(imageCapture.isReadyForCapture());
//Try to capture to both buffer and file //Try to capture to both buffer and file
#ifdef Q_WS_MAEMO_6 #ifdef Q_WS_MAEMO_6
QVERIFY(imageCapture.isCaptureDestinationSupported(QCameraImageCapture::CaptureToBuffer | QCameraImageCapture::CaptureToFile)); QVERIFY(imageCapture.isCaptureDestinationSupported(QCameraImageCapture::CaptureToBuffer | QCameraImageCapture::CaptureToFile));
@@ -651,11 +653,11 @@ void tst_QCameraBackend::testVideoRecording()
{ {
QFETCH(QByteArray, device); QFETCH(QByteArray, device);
QCamera *camera = device.isEmpty() ? new QCamera : new QCamera(device); QScopedPointer<QCamera> camera(device.isEmpty() ? new QCamera : new QCamera(device));
QMediaRecorder recorder(camera); QMediaRecorder recorder(camera.data());
QSignalSpy errorSignal(camera, SIGNAL(error(QCamera::Error))); QSignalSpy errorSignal(camera.data(), SIGNAL(error(QCamera::Error)));
QSignalSpy recorderErrorSignal(&recorder, SIGNAL(error(QMediaRecorder::Error))); QSignalSpy recorderErrorSignal(&recorder, SIGNAL(error(QMediaRecorder::Error)));
QSignalSpy recorderStatusSignal(&recorder, SIGNAL(statusChanged(QMediaRecorder::Status))); QSignalSpy recorderStatusSignal(&recorder, SIGNAL(statusChanged(QMediaRecorder::Status)));
@@ -702,8 +704,6 @@ void tst_QCameraBackend::testVideoRecording()
camera->setCaptureMode(QCamera::CaptureStillImage); camera->setCaptureMode(QCamera::CaptureStillImage);
QTRY_COMPARE(recorder.status(), QMediaRecorder::UnloadedStatus); QTRY_COMPARE(recorder.status(), QMediaRecorder::UnloadedStatus);
QCOMPARE(recorderStatusSignal.last().first().value<QMediaRecorder::Status>(), recorder.status()); QCOMPARE(recorderStatusSignal.last().first().value<QMediaRecorder::Status>(), recorder.status());
delete camera;
} }
QTEST_MAIN(tst_QCameraBackend) QTEST_MAIN(tst_QCameraBackend)

View File

@@ -724,7 +724,7 @@ void tst_QMediaPlayerBackend::seekPauseSeek()
{ {
QVideoFrame frame = surface->m_frameList.back(); QVideoFrame frame = surface->m_frameList.back();
const qint64 elapsed = frame.startTime() - position; const qint64 elapsed = (frame.startTime() / 1000) - position; // frame.startTime() is microsecond, position is milliseconds.
QVERIFY2(qAbs(elapsed) < (qint64)500, QByteArray::number(elapsed).constData()); QVERIFY2(qAbs(elapsed) < (qint64)500, QByteArray::number(elapsed).constData());
QCOMPARE(frame.width(), 160); QCOMPARE(frame.width(), 160);
QCOMPARE(frame.height(), 120); QCOMPARE(frame.height(), 120);
@@ -748,7 +748,7 @@ void tst_QMediaPlayerBackend::seekPauseSeek()
{ {
QVideoFrame frame = surface->m_frameList.back(); QVideoFrame frame = surface->m_frameList.back();
const qint64 elapsed = frame.startTime() - position; const qint64 elapsed = (frame.startTime() / 1000) - position;
QVERIFY2(qAbs(elapsed) < (qint64)500, QByteArray::number(elapsed).constData()); QVERIFY2(qAbs(elapsed) < (qint64)500, QByteArray::number(elapsed).constData());
QCOMPARE(frame.width(), 160); QCOMPARE(frame.width(), 160);
QCOMPARE(frame.height(), 120); QCOMPARE(frame.height(), 120);