GStreamer: fix memory leaks.

Many GStreamer objects were not properly managed or never released.

Change-Id: I38b3854e8b9e2264b5b647f331d3bb16b886e2d6
Reviewed-by: Andrew den Exter <andrew.den.exter@qinetic.com.au>
This commit is contained in:
Yoann Lopes
2014-03-20 19:20:24 +01:00
committed by The Qt Project
parent 60ba0afbde
commit 023c6ebcb9
20 changed files with 134 additions and 41 deletions

View File

@@ -72,7 +72,11 @@ bool QGstAppSrc::setup(GstElement* appsrc)
if (m_setup || m_stream == 0 || appsrc == 0)
return false;
if (m_appSrc)
gst_object_unref(G_OBJECT(m_appSrc));
m_appSrc = GST_APP_SRC(appsrc);
gst_object_ref(G_OBJECT(m_appSrc));
gst_app_src_set_callbacks(m_appSrc, (GstAppSrcCallbacks*)&m_callbacks, this, (GDestroyNotify)&QGstAppSrc::destroy_notify);
g_object_get(G_OBJECT(m_appSrc), "max-bytes", &m_maxBytes, NULL);

View File

@@ -93,6 +93,9 @@ QGstCodecsInfo::QGstCodecsInfo(QGstCodecsInfo::ElementType elementType)
gst_caps_remove_structure(caps, 0);
}
gst_caps_unref(caps);
gst_caps_unref(allCaps);
#else
Q_UNUSED(elementType);
#endif // GST_CHECK_VERSION(0,10,31)
@@ -143,7 +146,7 @@ GstCaps* QGstCodecsInfo::supportedElementCaps(GstElementFactoryListType elementT
padTemplates = padTemplates->next;
if (padTemplate->direction == padDirection) {
const GstCaps *caps = gst_static_caps_get(&padTemplate->static_caps);
GstCaps *caps = gst_static_caps_get(&padTemplate->static_caps);
for (uint i=0; i<gst_caps_get_size(caps); i++) {
const GstStructure *structure = gst_caps_get_structure(caps, i);
@@ -173,6 +176,7 @@ GstCaps* QGstCodecsInfo::supportedElementCaps(GstElementFactoryListType elementT
gst_caps_merge_structure(res, newStructure);
}
gst_caps_unref(caps);
}
}
}

View File

@@ -216,6 +216,7 @@ GstElement *QGstreamerGLTextureRenderer::videoSink()
GstPad *pad = gst_element_get_static_pad(m_videoSink,"sink");
m_bufferProbeId = gst_pad_add_buffer_probe(pad, G_CALLBACK(padBufferProbe), this);
gst_object_unref(GST_OBJECT(pad));
}
}

View File

@@ -41,6 +41,7 @@
#include "qgstreamervideorenderer_p.h"
#include <private/qvideosurfacegstsink_p.h>
#include <private/qgstutils_p.h>
#include <qabstractvideosurface.h>
#include <QDebug>
@@ -62,8 +63,7 @@ GstElement *QGstreamerVideoRenderer::videoSink()
{
if (!m_videoSink && m_surface) {
m_videoSink = QVideoSurfaceGstSink::createSink(m_surface);
gst_object_ref(GST_OBJECT(m_videoSink)); //Take ownership
gst_object_sink(GST_OBJECT(m_videoSink));
qt_gst_object_ref_sink(GST_OBJECT(m_videoSink)); //Take ownership
}
return reinterpret_cast<GstElement*>(m_videoSink);

View File

@@ -137,8 +137,7 @@ void QGstreamerVideoWidgetControl::createVideoWidget()
if (!m_videoSink)
m_videoSink = gst_element_factory_make ("ximagesink", NULL);
gst_object_ref (GST_OBJECT (m_videoSink)); //Take ownership
gst_object_sink (GST_OBJECT (m_videoSink));
qt_gst_object_ref_sink(GST_OBJECT (m_videoSink)); //Take ownership
}
@@ -219,6 +218,7 @@ void QGstreamerVideoWidgetControl::updateNativeVideoSize()
//find video native size to update video widget size hint
GstPad *pad = gst_element_get_static_pad(m_videoSink,"sink");
GstCaps *caps = gst_pad_get_negotiated_caps(pad);
gst_object_unref(GST_OBJECT(pad));
if (caps) {
m_widget->setNativeSize(QGstUtils::capsCorrectedResolution(caps));

View File

@@ -63,11 +63,11 @@ QGstreamerVideoWindow::QGstreamerVideoWindow(QObject *parent, const char *elemen
m_videoSink = gst_element_factory_make("xvimagesink", NULL);
if (m_videoSink) {
gst_object_ref(GST_OBJECT(m_videoSink)); //Take ownership
gst_object_sink(GST_OBJECT(m_videoSink));
qt_gst_object_ref_sink(GST_OBJECT(m_videoSink)); //Take ownership
GstPad *pad = gst_element_get_static_pad(m_videoSink,"sink");
m_bufferProbeId = gst_pad_add_buffer_probe(pad, G_CALLBACK(padBufferProbe), this);
gst_object_unref(GST_OBJECT(pad));
}
}
@@ -114,6 +114,7 @@ bool QGstreamerVideoWindow::processSyncMessage(const QGstreamerMessage &message)
GstPad *pad = gst_element_get_static_pad(m_videoSink,"sink");
m_bufferProbeId = gst_pad_add_buffer_probe(pad, G_CALLBACK(padBufferProbe), this);
gst_object_unref(GST_OBJECT(pad));
return true;
}
@@ -319,6 +320,7 @@ void QGstreamerVideoWindow::updateNativeVideoSize()
//find video native size to update video widget size hint
GstPad *pad = gst_element_get_static_pad(m_videoSink,"sink");
GstCaps *caps = gst_pad_get_negotiated_caps(pad);
gst_object_unref(GST_OBJECT(pad));
if (caps) {
m_nativeSize = QGstUtils::capsCorrectedResolution(caps);

View File

@@ -401,4 +401,22 @@ QMultimedia::SupportEstimate QGstUtils::hasSupport(const QString &mimeType,
return QMultimedia::MaybeSupported;
}
void qt_gst_object_ref_sink(gpointer object)
{
#if (GST_VERSION_MAJOR >= 0) && (GST_VERSION_MINOR >= 10) && (GST_VERSION_MICRO >= 24)
gst_object_ref_sink(object);
#else
g_return_if_fail (GST_IS_OBJECT(object));
GST_OBJECT_LOCK(object);
if (G_LIKELY(GST_OBJECT_IS_FLOATING(object))) {
GST_OBJECT_FLAG_UNSET(object, GST_OBJECT_FLOATING);
GST_OBJECT_UNLOCK(object);
} else {
GST_OBJECT_UNLOCK(object);
gst_object_ref(object);
}
#endif
}
QT_END_NAMESPACE

View File

@@ -78,6 +78,8 @@ namespace QGstUtils {
const QSet<QString> &supportedMimeTypeSet);
}
void qt_gst_object_ref_sink(gpointer object);
QT_END_NAMESPACE
#endif

View File

@@ -147,6 +147,7 @@ void QGstreamerAudioDecoderServicePlugin::updateSupportedMimeTypes() const
}
}
}
gst_caps_unref(caps);
}
}
gst_object_unref (factory);

View File

@@ -158,6 +158,8 @@ void QGstreamerAudioDecoderSession::configureAppSrcElement(GObject* object, GObj
if (!self->appsrc()->setup(appsrc))
qWarning()<<"Could not setup appsrc element";
g_object_unref(G_OBJECT(appsrc));
}
#endif
@@ -372,7 +374,8 @@ void QGstreamerAudioDecoderSession::start()
if (mFormat.isValid()) {
setAudioFlags(false);
GstCaps *caps = QGstUtils::capsForAudioFormat(mFormat);
gst_app_sink_set_caps(m_appSink, caps); // appsink unrefs caps
gst_app_sink_set_caps(m_appSink, caps);
gst_caps_unref(caps);
} else {
// We want whatever the native audio format is
setAudioFlags(true);

View File

@@ -114,11 +114,15 @@ GstEncodingProfile *CameraBinAudioEncoder::createProfile()
else
caps = gst_caps_from_string(codec.toLatin1());
return (GstEncodingProfile *)gst_encoding_audio_profile_new(
GstEncodingProfile *profile = (GstEncodingProfile *)gst_encoding_audio_profile_new(
caps,
!preset.isEmpty() ? preset.toLatin1().constData() : NULL, //preset
NULL, //restriction
0); //presence
gst_caps_unref(caps);
return profile;
}
QT_END_NAMESPACE

View File

@@ -124,11 +124,15 @@ GstEncodingContainerProfile *CameraBinContainer::createProfile()
caps = gst_caps_from_string(format.toLatin1());
}
return (GstEncodingContainerProfile *)gst_encoding_container_profile_new(
GstEncodingContainerProfile *profile = (GstEncodingContainerProfile *)gst_encoding_container_profile_new(
"camerabin2_profile",
(gchar *)"custom camera profile",
caps,
NULL); //preset
gst_caps_unref(caps);
return profile;
}
/*!

View File

@@ -191,10 +191,14 @@ GstEncodingContainerProfile *CameraBinRecorder::videoProfile()
GstEncodingProfile *audioProfile = m_session->audioEncodeControl()->createProfile();
GstEncodingProfile *videoProfile = m_session->videoEncodeControl()->createProfile();
if (audioProfile)
gst_encoding_container_profile_add_profile(containerProfile, audioProfile);
if (videoProfile)
gst_encoding_container_profile_add_profile(containerProfile, videoProfile);
if (audioProfile) {
if (!gst_encoding_container_profile_add_profile(containerProfile, audioProfile))
gst_encoding_profile_unref(audioProfile);
}
if (videoProfile) {
if (!gst_encoding_container_profile_add_profile(containerProfile, videoProfile))
gst_encoding_profile_unref(videoProfile);
}
}
return containerProfile;

View File

@@ -61,6 +61,7 @@
#include "camerabincapturebufferformat.h"
#include <private/qgstreamerbushelper_p.h>
#include <private/qgstreamervideorendererinterface_p.h>
#include <private/qgstutils_p.h>
#include <qmediarecorder.h>
#ifdef HAVE_GST_PHOTOGRAPHY
@@ -108,9 +109,6 @@
#define CAMERABIN_IMAGE_MODE 1
#define CAMERABIN_VIDEO_MODE 2
#define gstRef(element) { gst_object_ref(GST_OBJECT(element)); gst_object_sink(GST_OBJECT(element)); }
#define gstUnref(element) { if (element) { gst_object_unref(GST_OBJECT(element)); element = 0; } }
#define PREVIEW_CAPS_4_3 \
"video/x-raw-rgb, width = (int) 640, height = (int) 480"
@@ -146,7 +144,7 @@ CameraBinSession::CameraBinSession(QObject *parent)
{
m_camerabin = gst_element_factory_make("camerabin2", "camerabin2");
g_signal_connect(G_OBJECT(m_camerabin), "notify::idle", G_CALLBACK(updateBusyStatus), this);
gstRef(m_camerabin);
qt_gst_object_ref_sink(m_camerabin);
m_bus = gst_element_get_bus(m_camerabin);
@@ -192,9 +190,11 @@ CameraBinSession::~CameraBinSession()
gst_element_set_state(m_camerabin, GST_STATE_NULL);
gst_element_get_state(m_camerabin, NULL, NULL, GST_CLOCK_TIME_NONE);
gstUnref(m_camerabin);
gstUnref(m_viewfinderElement);
gst_object_unref(GST_OBJECT(m_bus));
gst_object_unref(GST_OBJECT(m_camerabin));
}
if (m_viewfinderElement)
gst_object_unref(GST_OBJECT(m_viewfinderElement));
}
#ifdef HAVE_GST_PHOTOGRAPHY
@@ -239,7 +239,7 @@ bool CameraBinSession::setupCameraBin()
qWarning() << "Staring camera without viewfinder available";
m_viewfinderElement = gst_element_factory_make("fakesink", NULL);
}
gst_object_ref(GST_OBJECT(m_viewfinderElement));
qt_gst_object_ref_sink(GST_OBJECT(m_viewfinderElement));
gst_element_set_state(m_camerabin, GST_STATE_NULL);
g_object_set(G_OBJECT(m_camerabin), VIEWFINDER_SINK_PROPERTY, m_viewfinderElement, NULL);
}
@@ -438,6 +438,9 @@ GstElement *CameraBinSession::buildCameraSource()
if (m_videoSrc != videoSrc)
g_object_set(G_OBJECT(m_camerabin), CAMERA_SOURCE_PROPERTY, m_videoSrc, NULL);
if (videoSrc)
gst_object_unref(GST_OBJECT(videoSrc));
return m_videoSrc;
}
@@ -680,10 +683,12 @@ void CameraBinSession::setState(QCamera::State newState)
m_recorderControl->applySettings();
GstEncodingContainerProfile *profile = m_recorderControl->videoProfile();
g_object_set (G_OBJECT(m_camerabin),
"video-profile",
m_recorderControl->videoProfile(),
profile,
NULL);
gst_encoding_profile_unref(profile);
setAudioCaptureCaps();
@@ -803,6 +808,7 @@ void CameraBinSession::setMetaData(const QMap<QByteArray, QVariant> &data)
}
}
}
gst_iterator_free(elements);
}
}

View File

@@ -175,6 +175,8 @@ GstEncodingProfile *CameraBinVideoEncoder::createProfile()
NULL, //restriction
1); //presence
gst_caps_unref(caps);
gst_encoding_video_profile_set_pass(profile, 0);
gst_encoding_video_profile_set_variableframerate(profile, TRUE);

View File

@@ -196,6 +196,8 @@ GstElement *QGstreamerAudioEncode::createEncoder()
//qDebug() << "set caps filter:" << gst_caps_to_string(caps);
g_object_set(G_OBJECT(capsFilter), "caps", caps, NULL);
gst_caps_unref(caps);
}
if (encoderElement) {

View File

@@ -261,6 +261,7 @@ void QGstreamerCaptureServicePlugin::updateSupportedMimeTypes() const
}
}
}
gst_caps_unref(caps);
}
}
gst_object_unref (factory);

View File

@@ -49,6 +49,7 @@
#include <private/qgstreamervideorendererinterface_p.h>
#include <private/qgstreameraudioprobecontrol_p.h>
#include <private/qgstreamerbushelper_p.h>
#include <private/qgstutils_p.h>
#include <gst/gsttagsetter.h>
#include <gst/gstversion.h>
@@ -64,9 +65,6 @@
QT_BEGIN_NAMESPACE
#define gstRef(element) { gst_object_ref(GST_OBJECT(element)); gst_object_sink(GST_OBJECT(element)); }
#define gstUnref(element) { if (element) { gst_object_unref(GST_OBJECT(element)); element = 0; } }
QGstreamerCaptureSession::QGstreamerCaptureSession(QGstreamerCaptureSession::CaptureMode captureMode, QObject *parent)
:QObject(parent),
m_state(StoppedState),
@@ -97,7 +95,7 @@ QGstreamerCaptureSession::QGstreamerCaptureSession(QGstreamerCaptureSession::Cap
m_passPrerollImage(false)
{
m_pipeline = gst_pipeline_new("media-capture-pipeline");
gstRef(m_pipeline);
qt_gst_object_ref_sink(m_pipeline);
m_bus = gst_element_get_bus(m_pipeline);
m_busHelper = new QGstreamerBusHelper(m_bus, this);
@@ -116,6 +114,7 @@ QGstreamerCaptureSession::~QGstreamerCaptureSession()
{
setState(StoppedState);
gst_element_set_state(m_pipeline, GST_STATE_NULL);
gst_object_unref(GST_OBJECT(m_bus));
gst_object_unref(GST_OBJECT(m_pipeline));
}
@@ -160,6 +159,7 @@ GstElement *QGstreamerCaptureSession::buildEncodeBin()
gst_bin_add(GST_BIN(encodeBin), audioEncoder);
if (!gst_element_link_many(audioConvert, audioQueue, m_audioVolume, audioEncoder, muxer, NULL)) {
m_audioVolume = 0;
gst_object_unref(encodeBin);
return 0;
}
@@ -333,6 +333,7 @@ GstElement *QGstreamerCaptureSession::buildVideoPreview()
g_object_set(G_OBJECT(capsFilter), "caps", caps, NULL);
gst_caps_unref(caps);
}
// add ghostpads
@@ -501,6 +502,7 @@ GstElement *QGstreamerCaptureSession::buildImageCapture()
GstPad *pad = gst_element_get_static_pad(queue, "src");
Q_ASSERT(pad);
gst_pad_add_buffer_probe(pad, G_CALLBACK(passImageFilter), this);
gst_object_unref(GST_OBJECT(pad));
g_object_set(G_OBJECT(sink), "signal-handoffs", TRUE, NULL);
g_signal_connect(G_OBJECT(sink), "handoff",
@@ -531,6 +533,7 @@ void QGstreamerCaptureSession::captureImage(int requestId, const QString &fileNa
#define REMOVE_ELEMENT(element) { if (element) {gst_bin_remove(GST_BIN(m_pipeline), element); element = 0;} }
#define UNREF_ELEMENT(element) { if (element) { gst_object_unref(GST_OBJECT(element)); element = 0; } }
bool QGstreamerCaptureSession::rebuildGraph(QGstreamerCaptureSession::PipelineMode newMode)
{
@@ -562,6 +565,9 @@ bool QGstreamerCaptureSession::rebuildGraph(QGstreamerCaptureSession::PipelineMo
if (ok) {
gst_bin_add_many(GST_BIN(m_pipeline), m_audioSrc, m_audioPreview, NULL);
ok &= gst_element_link(m_audioSrc, m_audioPreview);
} else {
UNREF_ELEMENT(m_audioSrc);
UNREF_ELEMENT(m_audioPreview);
}
}
if (m_captureMode & Video || m_captureMode & Image) {
@@ -582,6 +588,12 @@ bool QGstreamerCaptureSession::rebuildGraph(QGstreamerCaptureSession::PipelineMo
ok &= gst_element_link(m_videoTee, m_videoPreviewQueue);
ok &= gst_element_link(m_videoPreviewQueue, m_videoPreview);
ok &= gst_element_link(m_videoTee, m_imageCaptureBin);
} else {
UNREF_ELEMENT(m_videoSrc);
UNREF_ELEMENT(m_videoTee);
UNREF_ELEMENT(m_videoPreviewQueue);
UNREF_ELEMENT(m_videoPreview);
UNREF_ELEMENT(m_imageCaptureBin);
}
}
break;
@@ -631,6 +643,11 @@ bool QGstreamerCaptureSession::rebuildGraph(QGstreamerCaptureSession::PipelineMo
ok &= gst_element_link(m_audioTee, m_audioPreviewQueue);
ok &= gst_element_link(m_audioPreviewQueue, m_audioPreview);
ok &= gst_element_link(m_audioTee, m_encodeBin);
} else {
UNREF_ELEMENT(m_audioSrc);
UNREF_ELEMENT(m_audioPreview);
UNREF_ELEMENT(m_audioTee);
UNREF_ELEMENT(m_audioPreviewQueue);
}
}
@@ -648,6 +665,11 @@ bool QGstreamerCaptureSession::rebuildGraph(QGstreamerCaptureSession::PipelineMo
ok &= gst_element_link(m_videoSrc, m_videoTee);
ok &= gst_element_link(m_videoTee, m_videoPreviewQueue);
ok &= gst_element_link(m_videoPreviewQueue, m_videoPreview);
} else {
UNREF_ELEMENT(m_videoSrc);
UNREF_ELEMENT(m_videoTee);
UNREF_ELEMENT(m_videoPreviewQueue);
UNREF_ELEMENT(m_videoPreview);
}
if (ok && (m_captureMode & Video))
@@ -917,6 +939,7 @@ void QGstreamerCaptureSession::setMetaData(const QMap<QByteArray, QVariant> &dat
}
}
gst_iterator_free(elements);
}
}
@@ -1096,8 +1119,10 @@ void QGstreamerCaptureSession::removeAudioBufferProbe()
return;
GstPad *pad = getAudioProbePad();
if (pad)
if (pad) {
gst_pad_remove_buffer_probe(pad, m_audioBufferProbeId);
gst_object_unref(G_OBJECT(pad));
}
m_audioBufferProbeId = -1;
}
@@ -1107,8 +1132,10 @@ void QGstreamerCaptureSession::addAudioBufferProbe()
Q_ASSERT(m_audioBufferProbeId == -1);
GstPad *pad = getAudioProbePad();
if (pad)
if (pad) {
m_audioBufferProbeId = gst_pad_add_buffer_probe(pad, G_CALLBACK(padAudioBufferProbe), this);
gst_object_unref(G_OBJECT(pad));
}
}
QT_END_NAMESPACE

View File

@@ -286,6 +286,8 @@ GstElement *QGstreamerVideoEncode::createEncoder()
//qDebug() << "set video caps filter:" << gst_caps_to_string(caps);
g_object_set(G_OBJECT(capsFilter), "caps", caps, NULL);
gst_caps_unref(caps);
}
return GST_ELEMENT(encoderBin);

View File

@@ -48,6 +48,7 @@
#include <private/gstvideoconnector_p.h>
#include <private/qgstutils_p.h>
#include <private/playlistfileparser_p.h>
#include <private/qgstutils_p.h>
#include <gst/gstvalue.h>
#include <gst/base/gstbasesrc.h>
@@ -157,17 +158,20 @@ QGstreamerPlayerSession::QGstreamerPlayerSession(QObject *parent)
}
}
m_videoOutputBin = gst_bin_new("video-output-bin");
gst_object_ref(GST_OBJECT(m_videoOutputBin));
m_videoIdentity = GST_ELEMENT(g_object_new(gst_video_connector_get_type(), 0));
m_videoIdentity = GST_ELEMENT(g_object_new(gst_video_connector_get_type(), 0)); // floating ref
g_signal_connect(G_OBJECT(m_videoIdentity), "connection-failed", G_CALLBACK(insertColorSpaceElement), (gpointer)this);
m_colorSpace = gst_element_factory_make("ffmpegcolorspace", "ffmpegcolorspace-vo");
gst_object_ref(GST_OBJECT(m_colorSpace));
// might not get a parent, take ownership to avoid leak
qt_gst_object_ref_sink(GST_OBJECT(m_colorSpace));
m_nullVideoSink = gst_element_factory_make("fakesink", NULL);
g_object_set(G_OBJECT(m_nullVideoSink), "sync", true, NULL);
gst_object_ref(GST_OBJECT(m_nullVideoSink));
m_videoOutputBin = gst_bin_new("video-output-bin");
// might not get a parent, take ownership to avoid leak
qt_gst_object_ref_sink(GST_OBJECT(m_videoOutputBin));
gst_bin_add_many(GST_BIN(m_videoOutputBin), m_videoIdentity, m_nullVideoSink, NULL);
gst_element_link(m_videoIdentity, m_nullVideoSink);
@@ -238,6 +242,8 @@ void QGstreamerPlayerSession::configureAppSrcElement(GObject* object, GObject *o
if (!self->appsrc()->setup(appsrc))
qWarning()<<"Could not setup appsrc element";
g_object_unref(G_OBJECT(appsrc));
}
#endif