Merge remote-tracking branch 'origin/5.5' into 5.6

Conflicts:
	src/imports/multimedia/qdeclarativeaudio.cpp

Change-Id: I57c6252b084e4ed796f6f308b2e0c717d0f59b13
This commit is contained in:
Yoann Lopes
2015-08-24 14:31:24 +02:00
56 changed files with 2426 additions and 1986 deletions

View File

@@ -38,63 +38,87 @@ import android.graphics.ImageFormat;
import android.graphics.SurfaceTexture;
import android.util.Log;
import java.lang.Math;
import java.util.concurrent.locks.ReentrantLock;
public class QtCameraListener implements Camera.ShutterCallback,
Camera.PictureCallback,
Camera.AutoFocusCallback,
Camera.PreviewCallback
{
private int m_cameraId = -1;
private byte[][] m_cameraPreviewBuffer = null;
private volatile int m_actualPreviewBuffer = 0;
private final ReentrantLock m_buffersLock = new ReentrantLock();
private boolean m_fetchEachFrame = false;
private static final String TAG = "Qt Camera";
private static final int BUFFER_POOL_SIZE = 2;
private int m_cameraId = -1;
private boolean m_notifyNewFrames = false;
private byte[][] m_previewBuffers = null;
private byte[] m_lastPreviewBuffer = null;
private Camera.Size m_previewSize = null;
private QtCameraListener(int id)
{
m_cameraId = id;
}
public void preparePreviewBuffer(Camera camera)
public void notifyNewFrames(boolean notify)
{
Camera.Size previewSize = camera.getParameters().getPreviewSize();
double bytesPerPixel = ImageFormat.getBitsPerPixel(camera.getParameters().getPreviewFormat()) / 8.0;
int bufferSizeNeeded = (int)Math.ceil(bytesPerPixel*previewSize.width*previewSize.height);
m_buffersLock.lock();
if (m_cameraPreviewBuffer == null || m_cameraPreviewBuffer[0].length < bufferSizeNeeded)
m_cameraPreviewBuffer = new byte[2][bufferSizeNeeded];
m_buffersLock.unlock();
m_notifyNewFrames = notify;
}
public void fetchEachFrame(boolean fetch)
public byte[] lastPreviewBuffer()
{
m_fetchEachFrame = fetch;
return m_lastPreviewBuffer;
}
public byte[] lockAndFetchPreviewBuffer()
public int previewWidth()
{
//This method should always be followed by unlockPreviewBuffer()
//This method is not just a getter. It also marks last preview as already seen one.
//We should reset actualBuffer flag here to make sure we will not use old preview with future captures
byte[] result = null;
m_buffersLock.lock();
result = m_cameraPreviewBuffer[(m_actualPreviewBuffer == 1) ? 0 : 1];
m_actualPreviewBuffer = 0;
return result;
if (m_previewSize == null)
return -1;
return m_previewSize.width;
}
public void unlockPreviewBuffer()
public int previewHeight()
{
if (m_buffersLock.isHeldByCurrentThread())
m_buffersLock.unlock();
if (m_previewSize == null)
return -1;
return m_previewSize.height;
}
public byte[] callbackBuffer()
public void setupPreviewCallback(Camera camera)
{
return m_cameraPreviewBuffer[(m_actualPreviewBuffer == 1) ? 1 : 0];
// Clear previous callback (also clears added buffers)
m_lastPreviewBuffer = null;
camera.setPreviewCallbackWithBuffer(null);
final Camera.Parameters params = camera.getParameters();
m_previewSize = params.getPreviewSize();
double bytesPerPixel = ImageFormat.getBitsPerPixel(params.getPreviewFormat()) / 8.0;
int bufferSizeNeeded = (int) Math.ceil(bytesPerPixel * m_previewSize.width * m_previewSize.height);
// We could keep the same buffers when they are already bigger than the required size
// but the Android doc says the size must match, so in doubt just replace them.
if (m_previewBuffers == null || m_previewBuffers[0].length != bufferSizeNeeded)
m_previewBuffers = new byte[BUFFER_POOL_SIZE][bufferSizeNeeded];
// Add callback and queue all buffers
camera.setPreviewCallbackWithBuffer(this);
for (byte[] buffer : m_previewBuffers)
camera.addCallbackBuffer(buffer);
}
@Override
public void onPreviewFrame(byte[] data, Camera camera)
{
// Re-enqueue the last buffer
if (m_lastPreviewBuffer != null)
camera.addCallbackBuffer(m_lastPreviewBuffer);
m_lastPreviewBuffer = data;
if (data != null && m_notifyNewFrames)
notifyNewPreviewFrame(m_cameraId, data, m_previewSize.width, m_previewSize.height);
}
@Override
@@ -109,24 +133,6 @@ public class QtCameraListener implements Camera.ShutterCallback,
notifyPictureCaptured(m_cameraId, data);
}
@Override
public void onPreviewFrame(byte[] data, Camera camera)
{
m_buffersLock.lock();
if (data != null && m_fetchEachFrame)
notifyFrameFetched(m_cameraId, data);
if (data == m_cameraPreviewBuffer[0])
m_actualPreviewBuffer = 1;
else if (data == m_cameraPreviewBuffer[1])
m_actualPreviewBuffer = 2;
else
m_actualPreviewBuffer = 0;
camera.addCallbackBuffer(m_cameraPreviewBuffer[(m_actualPreviewBuffer == 1) ? 1 : 0]);
m_buffersLock.unlock();
}
@Override
public void onAutoFocus(boolean success, Camera camera)
{
@@ -136,5 +142,5 @@ public class QtCameraListener implements Camera.ShutterCallback,
private static native void notifyAutoFocusComplete(int id, boolean success);
private static native void notifyPictureExposed(int id);
private static native void notifyPictureCaptured(int id, byte[] data);
private static native void notifyFrameFetched(int id, byte[] data);
private static native void notifyNewPreviewFrame(int id, byte[] data, int width, int height);
}

View File

@@ -206,9 +206,10 @@ bool QAndroidCameraSession::open()
if (m_camera) {
connect(m_camera, SIGNAL(pictureExposed()), this, SLOT(onCameraPictureExposed()));
connect(m_camera, SIGNAL(previewFetched(QByteArray)), this, SLOT(onCameraPreviewFetched(QByteArray)));
connect(m_camera, SIGNAL(frameFetched(QByteArray)),
this, SLOT(onCameraFrameFetched(QByteArray)),
connect(m_camera, SIGNAL(lastPreviewFrameFetched(QByteArray,int,int)),
this, SLOT(onLastPreviewFrameFetched(QByteArray,int,int)));
connect(m_camera, SIGNAL(newPreviewFrame(QByteArray,int,int)),
this, SLOT(onNewPreviewFrame(QByteArray,int,int)),
Qt::DirectConnection);
connect(m_camera, SIGNAL(pictureCaptured(QByteArray)), this, SLOT(onCameraPictureCaptured(QByteArray)));
connect(m_camera, SIGNAL(previewStarted()), this, SLOT(onCameraPreviewStarted()));
@@ -221,7 +222,7 @@ bool QAndroidCameraSession::open()
if (m_camera->getPreviewFormat() != AndroidCamera::NV21)
m_camera->setPreviewFormat(AndroidCamera::NV21);
m_camera->fetchEachFrame(m_videoProbes.count());
m_camera->notifyNewFrames(m_videoProbes.count());
emit opened();
} else {
@@ -410,7 +411,7 @@ void QAndroidCameraSession::addProbe(QAndroidMediaVideoProbeControl *probe)
if (probe)
m_videoProbes << probe;
if (m_camera)
m_camera->fetchEachFrame(m_videoProbes.count());
m_camera->notifyNewFrames(m_videoProbes.count());
m_videoProbesMutex.unlock();
}
@@ -419,7 +420,7 @@ void QAndroidCameraSession::removeProbe(QAndroidMediaVideoProbeControl *probe)
m_videoProbesMutex.lock();
m_videoProbes.remove(probe);
if (m_camera)
m_camera->fetchEachFrame(m_videoProbes.count());
m_camera->notifyNewFrames(m_videoProbes.count());
m_videoProbesMutex.unlock();
}
@@ -562,25 +563,54 @@ void QAndroidCameraSession::onCameraPictureExposed()
m_camera->fetchLastPreviewFrame();
}
void QAndroidCameraSession::onCameraPreviewFetched(const QByteArray &preview)
void QAndroidCameraSession::onLastPreviewFrameFetched(const QByteArray &preview, int width, int height)
{
if (preview.size()) {
QtConcurrent::run(this, &QAndroidCameraSession::processPreviewImage,
m_currentImageCaptureId,
preview,
width,
height,
m_camera->getRotation());
}
}
void QAndroidCameraSession::onCameraFrameFetched(const QByteArray &frame)
void QAndroidCameraSession::processPreviewImage(int id, const QByteArray &data, int width, int height, int rotation)
{
emit imageCaptured(id, prepareImageFromPreviewData(data, width, height, rotation));
}
QImage QAndroidCameraSession::prepareImageFromPreviewData(const QByteArray &data, int width, int height, int rotation)
{
QImage result(width, height, QImage::Format_ARGB32);
qt_convert_NV21_to_ARGB32((const uchar *)data.constData(),
(quint32 *)result.bits(),
width,
height);
QTransform transform;
// Preview display of front-facing cameras is flipped horizontally, but the frame data
// we get here is not. Flip it ourselves if the camera is front-facing to match what the user
// sees on the viewfinder.
if (m_camera->getFacing() == AndroidCamera::CameraFacingFront)
transform.scale(-1, 1);
transform.rotate(rotation);
result = result.transformed(transform);
return result;
}
void QAndroidCameraSession::onNewPreviewFrame(const QByteArray &frame, int width, int height)
{
m_videoProbesMutex.lock();
if (frame.size() && m_videoProbes.count()) {
const QSize frameSize = m_camera->previewSize();
// Bytes per line should be only for the first plane. For NV21, the Y plane has 8 bits
// per sample, so bpl == width
QVideoFrame videoFrame(new DataVideoBuffer(frame, frameSize.width()),
frameSize,
QVideoFrame videoFrame(new DataVideoBuffer(frame, width),
QSize(width, height),
QVideoFrame::Format_NV21);
foreach (QAndroidMediaVideoProbeControl *probe, m_videoProbes)
probe->newFrameProbed(videoFrame);
@@ -666,35 +696,6 @@ void QAndroidCameraSession::processCapturedImage(int id,
}
}
void QAndroidCameraSession::processPreviewImage(int id, const QByteArray &data, int rotation)
{
emit imageCaptured(id, prepareImageFromPreviewData(data, rotation));
}
QImage QAndroidCameraSession::prepareImageFromPreviewData(const QByteArray &data, int rotation)
{
QSize frameSize = m_camera->previewSize();
QImage result(frameSize, QImage::Format_ARGB32);
qt_convert_NV21_to_ARGB32((const uchar *)data.constData(),
(quint32 *)result.bits(),
frameSize.width(),
frameSize.height());
QTransform transform;
// Preview display of front-facing cameras is flipped horizontally, but the frame data
// we get here is not. Flip it ourselves if the camera is front-facing to match what the user
// sees on the viewfinder.
if (m_camera->getFacing() == AndroidCamera::CameraFacingFront)
transform.scale(-1, 1);
transform.rotate(rotation);
result = result.transformed(transform);
return result;
}
void QAndroidCameraSession::onVideoOutputReady(bool ready)
{
if (ready && m_state == QCamera::ActiveState)

View File

@@ -113,9 +113,9 @@ private Q_SLOTS:
void onApplicationStateChanged(Qt::ApplicationState state);
void onCameraPictureExposed();
void onCameraPreviewFetched(const QByteArray &preview);
void onCameraFrameFetched(const QByteArray &frame);
void onCameraPictureCaptured(const QByteArray &data);
void onLastPreviewFrameFetched(const QByteArray &preview, int width, int height);
void onNewPreviewFrame(const QByteArray &frame, int width, int height);
void onCameraPreviewStarted();
void onCameraPreviewStopped();
@@ -129,8 +129,8 @@ private:
void stopPreview();
void applyImageSettings();
void processPreviewImage(int id, const QByteArray &data, int rotation);
QImage prepareImageFromPreviewData(const QByteArray &data, int rotation);
void processPreviewImage(int id, const QByteArray &data, int width, int height, int rotation);
QImage prepareImageFromPreviewData(const QByteArray &data, int width, int height, int rotation);
void processCapturedImage(int id,
const QByteArray &data,
const QSize &resolution,

View File

@@ -233,9 +233,14 @@ void QAndroidCaptureSession::start()
m_notifyTimer.start();
updateDuration();
if (m_cameraSession)
if (m_cameraSession) {
m_cameraSession->setReadyForCapture(false);
// Preview frame callback is cleared when setting up the camera with the media recorder.
// We need to reset it.
m_cameraSession->camera()->setupPreviewFrameCallback();
}
m_state = QMediaRecorder::RecordingState;
emit stateChanged(m_state);
}

View File

@@ -114,7 +114,7 @@ static void notifyPictureCaptured(JNIEnv *env, jobject, int id, jbyteArray data)
}
}
static void notifyFrameFetched(JNIEnv *env, jobject, int id, jbyteArray data)
static void notifyNewPreviewFrame(JNIEnv *env, jobject, int id, jbyteArray data, int width, int height)
{
QMutexLocker locker(&g_cameraMapMutex);
AndroidCamera *obj = g_cameraMap->value(id, 0);
@@ -123,7 +123,7 @@ static void notifyFrameFetched(JNIEnv *env, jobject, int id, jbyteArray data)
QByteArray bytes(arrayLength, Qt::Uninitialized);
env->GetByteArrayRegion(data, 0, arrayLength, (jbyte*)bytes.data());
Q_EMIT obj->frameFetched(bytes);
Q_EMIT obj->newPreviewFrame(bytes, width, height);
}
}
@@ -204,7 +204,8 @@ public:
Q_INVOKABLE void takePicture();
Q_INVOKABLE void fetchEachFrame(bool fetch);
Q_INVOKABLE void setupPreviewFrameCallback();
Q_INVOKABLE void notifyNewFrames(bool notify);
Q_INVOKABLE void fetchLastPreviewFrame();
Q_INVOKABLE void applyParameters();
@@ -229,7 +230,7 @@ Q_SIGNALS:
void whiteBalanceChanged();
void previewFetched(const QByteArray &preview);
void lastPreviewFrameFetched(const QByteArray &preview, int width, int height);
};
AndroidCamera::AndroidCamera(AndroidCameraPrivate *d, QThread *worker)
@@ -247,7 +248,7 @@ AndroidCamera::AndroidCamera(AndroidCameraPrivate *d, QThread *worker)
connect(d, &AndroidCameraPrivate::previewStopped, this, &AndroidCamera::previewStopped);
connect(d, &AndroidCameraPrivate::autoFocusStarted, this, &AndroidCamera::autoFocusStarted);
connect(d, &AndroidCameraPrivate::whiteBalanceChanged, this, &AndroidCamera::whiteBalanceChanged);
connect(d, &AndroidCameraPrivate::previewFetched, this, &AndroidCamera::previewFetched);
connect(d, &AndroidCameraPrivate::lastPreviewFrameFetched, this, &AndroidCamera::lastPreviewFrameFetched);
}
AndroidCamera::~AndroidCamera()
@@ -633,10 +634,16 @@ void AndroidCamera::takePicture()
QMetaObject::invokeMethod(d, "takePicture", Qt::BlockingQueuedConnection);
}
void AndroidCamera::fetchEachFrame(bool fetch)
void AndroidCamera::setupPreviewFrameCallback()
{
Q_D(AndroidCamera);
QMetaObject::invokeMethod(d, "fetchEachFrame", Q_ARG(bool, fetch));
QMetaObject::invokeMethod(d, "setupPreviewFrameCallback");
}
void AndroidCamera::notifyNewFrames(bool notify)
{
Q_D(AndroidCamera);
QMetaObject::invokeMethod(d, "notifyNewFrames", Q_ARG(bool, notify));
}
void AndroidCamera::fetchLastPreviewFrame()
@@ -1307,17 +1314,7 @@ void AndroidCameraPrivate::setJpegQuality(int quality)
void AndroidCameraPrivate::startPreview()
{
//We need to clear preview buffers queue here, but there is no method to do it
//Though just resetting preview callback do the trick
m_camera.callMethod<void>("setPreviewCallbackWithBuffer",
"(Landroid/hardware/Camera$PreviewCallback;)V",
jobject(0));
m_cameraListener.callMethod<void>("preparePreviewBuffer", "(Landroid/hardware/Camera;)V", m_camera.object());
QJNIObjectPrivate buffer = m_cameraListener.callObjectMethod<jbyteArray>("callbackBuffer");
m_camera.callMethod<void>("addCallbackBuffer", "([B)V", buffer.object());
m_camera.callMethod<void>("setPreviewCallbackWithBuffer",
"(Landroid/hardware/Camera$PreviewCallback;)V",
m_cameraListener.object());
setupPreviewFrameCallback();
m_camera.callMethod<void>("startPreview");
emit previewStarted();
}
@@ -1338,28 +1335,34 @@ void AndroidCameraPrivate::takePicture()
m_cameraListener.object());
}
void AndroidCameraPrivate::fetchEachFrame(bool fetch)
void AndroidCameraPrivate::setupPreviewFrameCallback()
{
m_cameraListener.callMethod<void>("fetchEachFrame", "(Z)V", fetch);
m_cameraListener.callMethod<void>("setupPreviewCallback", "(Landroid/hardware/Camera;)V", m_camera.object());
}
void AndroidCameraPrivate::notifyNewFrames(bool notify)
{
m_cameraListener.callMethod<void>("notifyNewFrames", "(Z)V", notify);
}
void AndroidCameraPrivate::fetchLastPreviewFrame()
{
QJNIEnvironmentPrivate env;
QJNIObjectPrivate data = m_cameraListener.callObjectMethod("lockAndFetchPreviewBuffer", "()[B");
if (!data.isValid()) {
m_cameraListener.callMethod<void>("unlockPreviewBuffer");
QJNIObjectPrivate data = m_cameraListener.callObjectMethod("lastPreviewBuffer", "()[B");
if (!data.isValid())
return;
}
const int arrayLength = env->GetArrayLength(static_cast<jbyteArray>(data.object()));
QByteArray bytes(arrayLength, Qt::Uninitialized);
env->GetByteArrayRegion(static_cast<jbyteArray>(data.object()),
0,
arrayLength,
reinterpret_cast<jbyte *>(bytes.data()));
m_cameraListener.callMethod<void>("unlockPreviewBuffer");
emit previewFetched(bytes);
emit lastPreviewFrameFetched(bytes,
m_cameraListener.callMethod<jint>("previewWidth"),
m_cameraListener.callMethod<jint>("previewHeight"));
}
void AndroidCameraPrivate::applyParameters()
@@ -1404,7 +1407,7 @@ bool AndroidCamera::initJNI(JNIEnv *env)
{"notifyAutoFocusComplete", "(IZ)V", (void *)notifyAutoFocusComplete},
{"notifyPictureExposed", "(I)V", (void *)notifyPictureExposed},
{"notifyPictureCaptured", "(I[B)V", (void *)notifyPictureCaptured},
{"notifyFrameFetched", "(I[B)V", (void *)notifyFrameFetched}
{"notifyNewPreviewFrame", "(I[BII)V", (void *)notifyNewPreviewFrame}
};
if (clazz && env->RegisterNatives(clazz,

View File

@@ -155,7 +155,8 @@ public:
void takePicture();
void fetchEachFrame(bool fetch);
void setupPreviewFrameCallback();
void notifyNewFrames(bool notify);
void fetchLastPreviewFrame();
QJNIObjectPrivate getCameraObject();
@@ -176,8 +177,8 @@ Q_SIGNALS:
void pictureExposed();
void pictureCaptured(const QByteArray &data);
void previewFetched(const QByteArray &preview);
void frameFetched(const QByteArray &frame);
void lastPreviewFrameFetched(const QByteArray &preview, int width, int height);
void newPreviewFrame(const QByteArray &frame, int width, int height);
private:
AndroidCamera(AndroidCameraPrivate *d, QThread *worker);

View File

@@ -646,7 +646,8 @@ bool DSCameraSession::configurePreviewFormat()
if ((m_viewfinderSettings.resolution().isEmpty() || m_viewfinderSettings.resolution() == s.resolution())
&& (qFuzzyIsNull(m_viewfinderSettings.minimumFrameRate()) || qFuzzyCompare((float)m_viewfinderSettings.minimumFrameRate(), (float)s.minimumFrameRate()))
&& (qFuzzyIsNull(m_viewfinderSettings.maximumFrameRate()) || qFuzzyCompare((float)m_viewfinderSettings.maximumFrameRate(), (float)s.maximumFrameRate()))
&& (m_viewfinderSettings.pixelFormat() == QVideoFrame::Format_Invalid || m_viewfinderSettings.pixelFormat() == s.pixelFormat())) {
&& (m_viewfinderSettings.pixelFormat() == QVideoFrame::Format_Invalid || m_viewfinderSettings.pixelFormat() == s.pixelFormat())
&& (m_viewfinderSettings.pixelAspectRatio().isEmpty() || m_viewfinderSettings.pixelAspectRatio() == s.pixelAspectRatio())) {
resolvedViewfinderSettings = s;
break;
}
@@ -899,6 +900,7 @@ void DSCameraSession::updateSourceCapabilities()
settings.setMinimumFrameRate(frameRateRange.minimumFrameRate);
settings.setMaximumFrameRate(frameRateRange.maximumFrameRate);
settings.setPixelFormat(pixelFormat);
settings.setPixelAspectRatio(1, 1);
m_supportedViewfinderSettings.append(settings);
AM_MEDIA_TYPE format;

View File

@@ -34,6 +34,7 @@
#include "camerabinaudioencoder.h"
#include "camerabincontainer.h"
#include <private/qgstcodecsinfo_p.h>
#include <private/qgstutils_p.h>
#include <QtCore/qdebug.h>
@@ -120,8 +121,7 @@ GstEncodingProfile *CameraBinAudioEncoder::createProfile()
void CameraBinAudioEncoder::applySettings(GstElement *encoder)
{
GObjectClass * const objectClass = G_OBJECT_GET_CLASS(encoder);
const char * const name = gst_plugin_feature_get_name(
GST_PLUGIN_FEATURE(gst_element_get_factory(encoder)));
const char * const name = qt_gst_element_get_factory_name(encoder);
const bool isVorbis = qstrcmp(name, "vorbisenc") == 0;

View File

@@ -124,8 +124,8 @@ CameraBinService::CameraBinService(GstElementFactory *sourceFactory, QObject *pa
#else
m_videoWindow = new QGstreamerVideoWindow(this);
#endif
// If the GStreamer sink element is not available (xvimagesink), don't provide
// the video window control since it won't work anyway.
// If the GStreamer video sink is not available, don't provide the video window control since
// it won't work anyway.
if (!m_videoWindow->videoSink()) {
delete m_videoWindow;
m_videoWindow = 0;
@@ -133,9 +133,8 @@ CameraBinService::CameraBinService(GstElementFactory *sourceFactory, QObject *pa
#if defined(HAVE_WIDGETS)
m_videoWidgetControl = new QGstreamerVideoWidgetControl(this);
// If the GStreamer sink element is not available (xvimagesink or ximagesink), don't provide
// the video widget control since it won't work anyway.
// QVideoWidget will fall back to QVideoRendererControl in that case.
// If the GStreamer video sink is not available, don't provide the video widget control since
// it won't work anyway. QVideoWidget will fall back to QVideoRendererControl in that case.
if (!m_videoWidgetControl->videoSink()) {
delete m_videoWidgetControl;
m_videoWidgetControl = 0;

View File

@@ -388,7 +388,7 @@ void CameraBinSession::setupCaptureResolution()
gst_caps_unref(caps);
// Special case when using mfw_v4lsrc
if (m_videoSrc && qstrcmp(gst_plugin_feature_get_name(GST_PLUGIN_FEATURE(gst_element_get_factory(m_videoSrc))), "mfw_v4lsrc") == 0) {
if (m_videoSrc && qstrcmp(qt_gst_element_get_factory_name(m_videoSrc), "mfw_v4lsrc") == 0) {
int capMode = 0;
if (viewfinderResolution == QSize(320, 240))
capMode = 1;
@@ -472,9 +472,7 @@ GstElement *CameraBinSession::buildCameraSource()
#if CAMERABIN_DEBUG
qDebug() << "set camera device" << m_inputDevice;
#endif
const char *const cameraSrcName = gst_plugin_feature_get_name(
GST_PLUGIN_FEATURE(gst_element_get_factory(m_cameraSrc)));
m_usingWrapperCameraBinSrc = qstrcmp(cameraSrcName, "wrappercamerabinsrc") == 0;
m_usingWrapperCameraBinSrc = qstrcmp(qt_gst_element_get_factory_name(m_cameraSrc), "wrappercamerabinsrc") == 0;
if (g_object_class_find_property(G_OBJECT_GET_CLASS(m_cameraSrc), "video-source")) {
if (!m_videoSrc) {

View File

@@ -34,6 +34,7 @@
#include "camerabinvideoencoder.h"
#include "camerabinsession.h"
#include "camerabincontainer.h"
#include <private/qgstutils_p.h>
#include <QtCore/qdebug.h>
@@ -178,8 +179,7 @@ GstEncodingProfile *CameraBinVideoEncoder::createProfile()
void CameraBinVideoEncoder::applySettings(GstElement *encoder)
{
GObjectClass * const objectClass = G_OBJECT_GET_CLASS(encoder);
const char * const name = gst_plugin_feature_get_name(
GST_PLUGIN_FEATURE(gst_element_get_factory(encoder)));
const char * const name = qt_gst_element_get_factory_name(encoder);
const int bitRate = m_actualVideoSettings.bitRate();
if (bitRate == -1) {

View File

@@ -102,8 +102,8 @@ QGstreamerCaptureService::QGstreamerCaptureService(const QString &service, QObje
m_videoRenderer = new QGstreamerVideoRenderer(this);
m_videoWindow = new QGstreamerVideoWindow(this);
// If the GStreamer sink element is not available (xvimagesink), don't provide
// the video window control since it won't work anyway.
// If the GStreamer video sink is not available, don't provide the video window control since
// it won't work anyway.
if (!m_videoWindow->videoSink()) {
delete m_videoWindow;
m_videoWindow = 0;
@@ -112,9 +112,8 @@ QGstreamerCaptureService::QGstreamerCaptureService(const QString &service, QObje
#if defined(HAVE_WIDGETS)
m_videoWidgetControl = new QGstreamerVideoWidgetControl(this);
// If the GStreamer sink element is not available (xvimagesink or ximagesink), don't provide
// the video widget control since it won't work anyway.
// QVideoWidget will fall back to QVideoRendererControl in that case.
// If the GStreamer video sink is not available, don't provide the video widget control since
// it won't work anyway. QVideoWidget will fall back to QVideoRendererControl in that case.
if (!m_videoWidgetControl->videoSink()) {
delete m_videoWidgetControl;
m_videoWidgetControl = 0;

View File

@@ -776,11 +776,11 @@ void QGstreamerCaptureSession::setState(QGstreamerCaptureSession::State newState
if (!m_waitingForEos) {
m_waitingForEos = true;
//qDebug() << "Waiting for EOS";
// Unless gstreamer is in GST_STATE_PLAYING our EOS message will not be received.
gst_element_set_state(m_pipeline, GST_STATE_PLAYING);
//with live sources it's necessary to send EOS even to pipeline
//before going to STOPPED state
gst_element_send_event(m_pipeline, gst_event_new_eos());
// Unless gstreamer is in GST_STATE_PLAYING our EOS message will not be received.
gst_element_set_state(m_pipeline, GST_STATE_PLAYING);
return;
} else {

View File

@@ -99,8 +99,8 @@ QGstreamerPlayerService::QGstreamerPlayerService(QObject *parent):
#else
m_videoWindow = new QGstreamerVideoWindow(this);
#endif
// If the GStreamer sink element is not available (xvimagesink), don't provide
// the video window control since it won't work anyway.
// If the GStreamer video sink is not available, don't provide the video window control since
// it won't work anyway.
if (!m_videoWindow->videoSink()) {
delete m_videoWindow;
m_videoWindow = 0;
@@ -109,8 +109,8 @@ QGstreamerPlayerService::QGstreamerPlayerService(QObject *parent):
#if defined(HAVE_WIDGETS)
m_videoWidget = new QGstreamerVideoWidgetControl(this);
// If the GStreamer sink element is not available (xvimagesink or ximagesink), don't provide
// the video widget control since it won't work anyway.
// If the GStreamer video sink is not available, don't provide the video widget control since
// it won't work anyway.
// QVideoWidget will fall back to QVideoRendererControl in that case.
if (!m_videoWidget->videoSink()) {
delete m_videoWidget;