Merge remote-tracking branch 'origin/release' into stable

Change-Id: Ifcb0e0bac29c11980ea2a9d67b12e1b79c4bd9f6
This commit is contained in:
Frederik Gladhorn
2014-01-22 11:36:50 +01:00
19 changed files with 689 additions and 213 deletions

View File

@@ -55,6 +55,7 @@
#endif
#include "camerabinimageprocessing.h"
#include "camerabinviewfindersettings.h"
#include "camerabincapturedestination.h"
#include "camerabincapturebufferformat.h"
@@ -75,6 +76,7 @@
#include <QtGui/qdesktopservices.h>
#include <QtGui/qimage.h>
#include <QtCore/qdatetime.h>
//#define CAMERABIN_DEBUG 1
//#define CAMERABIN_DEBUG_DUMP_BIN 1
@@ -91,6 +93,8 @@
#define AUDIO_SOURCE_PROPERTY "audio-source"
#define SUPPORTED_IMAGE_CAPTURE_CAPS_PROPERTY "image-capture-supported-caps"
#define SUPPORTED_VIDEO_CAPTURE_CAPS_PROPERTY "video-capture-supported-caps"
#define SUPPORTED_VIEWFINDER_CAPS_PROPERTY "viewfinder-supported-caps"
#define AUDIO_CAPTURE_CAPS_PROPERTY "audio-capture-caps"
#define IMAGE_CAPTURE_CAPS_PROPERTY "image-capture-caps"
#define VIDEO_CAPTURE_CAPS_PROPERTY "video-capture-caps"
#define VIEWFINDER_CAPS_PROPERTY "viewfinder-caps"
@@ -110,10 +114,6 @@
#define PREVIEW_CAPS_4_3 \
"video/x-raw-rgb, width = (int) 640, height = (int) 480"
#define VIEWFINDER_RESOLUTION_4x3 QSize(640, 480)
#define VIEWFINDER_RESOLUTION_3x2 QSize(720, 480)
#define VIEWFINDER_RESOLUTION_16x9 QSize(800, 450)
//using GST_STATE_READY for QCamera::LoadedState
//may not work reliably at least with some webcams.
@@ -170,6 +170,7 @@ CameraBinSession::CameraBinSession(QObject *parent)
m_imageProcessingControl = new CameraBinImageProcessing(this);
m_captureDestinationControl = new CameraBinCaptureDestination(this);
m_captureBufferFormatControl = new CameraBinCaptureBufferFormat(this);
m_viewfinderSettingsControl = new CameraBinViewfinderSettings(this);
QByteArray envFlags = qgetenv("QT_GSTREAMER_CAMERABIN_FLAGS");
if (!envFlags.isEmpty())
@@ -246,8 +247,7 @@ bool CameraBinSession::setupCameraBin()
return true;
}
static GstCaps *resolutionToCaps(const QSize &resolution,
const QPair<int, int> &rate = qMakePair<int,int>(0,0))
static GstCaps *resolutionToCaps(const QSize &resolution, const QPair<int, int> &rate = qMakePair<int,int>(0,0))
{
if (resolution.isEmpty())
return gst_caps_new_any();
@@ -263,7 +263,23 @@ static GstCaps *resolutionToCaps(const QSize &resolution,
"width", G_TYPE_INT, resolution.width(),
"height", G_TYPE_INT, resolution.height(),
"framerate", GST_TYPE_FRACTION, rate.first, rate.second,
NULL), NULL);
NULL),
gst_structure_new("video/x-raw-data",
"width", G_TYPE_INT, resolution.width(),
"height", G_TYPE_INT, resolution.height(),
"framerate", GST_TYPE_FRACTION, rate.first, rate.second,
NULL),
gst_structure_new("video/x-android-buffer",
"width", G_TYPE_INT, resolution.width(),
"height", G_TYPE_INT, resolution.height(),
"framerate", GST_TYPE_FRACTION, rate.first, rate.second,
NULL),
gst_structure_new("image/jpeg",
"width", G_TYPE_INT, resolution.width(),
"height", G_TYPE_INT, resolution.height(),
"framerate", GST_TYPE_FRACTION, rate.first, rate.second,
NULL),
NULL);
} else {
caps = gst_caps_new_full (gst_structure_new ("video/x-raw-yuv",
"width", G_TYPE_INT, resolution.width(),
@@ -271,88 +287,92 @@ static GstCaps *resolutionToCaps(const QSize &resolution,
NULL),
gst_structure_new ("video/x-raw-rgb",
"width", G_TYPE_INT, resolution.width(),
"height", G_TYPE_INT, resolution.height(), NULL), NULL);
"height", G_TYPE_INT, resolution.height(),
NULL),
gst_structure_new("video/x-raw-data",
"width", G_TYPE_INT, resolution.width(),
"height", G_TYPE_INT, resolution.height(),
NULL),
gst_structure_new ("video/x-android-buffer",
"width", G_TYPE_INT, resolution.width(),
"height", G_TYPE_INT, resolution.height(),
NULL),
gst_structure_new ("image/jpeg",
"width", G_TYPE_INT, resolution.width(),
"height", G_TYPE_INT, resolution.height(),
NULL),
NULL);
}
return caps;
}
void CameraBinSession::setupCaptureResolution()
{
if (m_captureMode == QCamera::CaptureStillImage) {
QSize resolution = m_imageEncodeControl->imageSettings().resolution();
//by default select the maximum supported resolution
if (resolution.isEmpty()) {
bool continuous = false;
QList<QSize> resolutions = supportedResolutions(qMakePair<int,int>(0,0),
&continuous,
QCamera::CaptureStillImage);
if (!resolutions.isEmpty())
resolution = resolutions.last();
}
QSize viewfinderResolution = VIEWFINDER_RESOLUTION_4x3;
if (!resolution.isEmpty()) {
GstCaps *caps = resolutionToCaps(resolution);
QSize resolution = m_imageEncodeControl->imageSettings().resolution();
if (!resolution.isEmpty()) {
GstCaps *caps = resolutionToCaps(resolution);
#if CAMERABIN_DEBUG
qDebug() << Q_FUNC_INFO << "set image resolution" << resolution << gst_caps_to_string(caps);
qDebug() << Q_FUNC_INFO << "set image resolution" << resolution << gst_caps_to_string(caps);
#endif
g_object_set(m_camerabin, IMAGE_CAPTURE_CAPS_PROPERTY, caps, NULL);
gst_caps_unref(caps);
if (!resolution.isEmpty()) {
qreal aspectRatio = qreal(resolution.width()) / resolution.height();
if (aspectRatio < 1.4)
viewfinderResolution = VIEWFINDER_RESOLUTION_4x3;
else if (aspectRatio > 1.7)
viewfinderResolution = VIEWFINDER_RESOLUTION_16x9;
else
viewfinderResolution = VIEWFINDER_RESOLUTION_3x2;
}
} else {
g_object_set(m_camerabin, IMAGE_CAPTURE_CAPS_PROPERTY, GST_CAPS_ANY, NULL);
}
//on low res cameras the viewfinder resolution should not be bigger
//then capture resolution
if (viewfinderResolution.width() > resolution.width() && !resolution.isEmpty())
viewfinderResolution = resolution;
GstCaps *viewfinderCaps = resolutionToCaps(viewfinderResolution);
#if CAMERABIN_DEBUG
qDebug() << "Set viewfinder resolution" << viewfinderResolution <<gst_caps_to_string(viewfinderCaps);
#endif
g_object_set(m_camerabin, VIEWFINDER_CAPS_PROPERTY, viewfinderCaps, NULL);
gst_caps_unref(viewfinderCaps);
g_object_set(m_camerabin, IMAGE_CAPTURE_CAPS_PROPERTY, caps, NULL);
gst_caps_unref(caps);
} else {
g_object_set(m_camerabin, IMAGE_CAPTURE_CAPS_PROPERTY, NULL, NULL);
}
if (m_captureMode == QCamera::CaptureVideo) {
QSize resolution = m_videoEncodeControl->actualVideoSettings().resolution();
//qreal framerate = m_videoEncodeControl->videoSettings().frameRate();
if (resolution.isEmpty()) {
//select the hightest supported resolution
bool continuous = false;
QList<QSize> resolutions = supportedResolutions(qMakePair<int,int>(0,0),
&continuous,
QCamera::CaptureVideo);
if (!resolutions.isEmpty())
resolution = resolutions.last();
}
resolution = m_videoEncodeControl->actualVideoSettings().resolution();
//qreal framerate = m_videoEncodeControl->videoSettings().frameRate();
if (!resolution.isEmpty()) {
GstCaps *caps = resolutionToCaps(resolution /*, framerate*/); //convert to rational
#if CAMERABIN_DEBUG
qDebug() << Q_FUNC_INFO << "set video resolution" << resolution << gst_caps_to_string(caps);
#endif
//Use the same resolution for viewfinder and video capture
g_object_set(m_camerabin, VIDEO_CAPTURE_CAPS_PROPERTY, caps, NULL);
gst_caps_unref(caps);
} else {
g_object_set(m_camerabin, VIDEO_CAPTURE_CAPS_PROPERTY, NULL, NULL);
}
resolution = m_viewfinderSettingsControl->resolution();
if (!resolution.isEmpty()) {
GstCaps *caps = resolutionToCaps(resolution);
#if CAMERABIN_DEBUG
qDebug() << Q_FUNC_INFO << "set viewfinder resolution" << resolution << gst_caps_to_string(caps);
#endif
g_object_set(m_camerabin, VIEWFINDER_CAPS_PROPERTY, caps, NULL);
gst_caps_unref(caps);
} else {
g_object_set(m_camerabin, VIEWFINDER_CAPS_PROPERTY, NULL, NULL);
}
}
void CameraBinSession::setAudioCaptureCaps()
{
QAudioEncoderSettings settings = m_audioEncodeControl->audioSettings();
const int sampleRate = settings.sampleRate();
const int channelCount = settings.channelCount();
if (sampleRate == -1 && channelCount == -1)
return;
GstStructure *structure = gst_structure_new(
"audio/x-raw-int",
"endianness", G_TYPE_INT, 1234,
"signed", G_TYPE_BOOLEAN, TRUE,
"width", G_TYPE_INT, 16,
"depth", G_TYPE_INT, 16,
NULL);
if (sampleRate != -1)
gst_structure_set(structure, "rate", G_TYPE_INT, sampleRate, NULL);
if (channelCount != -1)
gst_structure_set(structure, "channels", G_TYPE_INT, channelCount, NULL);
GstCaps *caps = gst_caps_new_full(structure, NULL);
g_object_set(G_OBJECT(m_camerabin), AUDIO_CAPTURE_CAPS_PROPERTY, caps, NULL);
gst_caps_unref(caps);
}
GstElement *CameraBinSession::buildCameraSource()
{
#if CAMERABIN_DEBUG
@@ -658,14 +678,14 @@ void CameraBinSession::setState(QCamera::State newState)
GstState pending = GST_STATE_NULL;
gst_element_get_state(m_camerabin, &binState, &pending, 0);
if (captureMode() == QCamera::CaptureVideo) {
m_recorderControl->applySettings();
m_recorderControl->applySettings();
g_object_set (G_OBJECT(m_camerabin),
"video-profile",
m_recorderControl->videoProfile(),
NULL);
}
g_object_set (G_OBJECT(m_camerabin),
"video-profile",
m_recorderControl->videoProfile(),
NULL);
setAudioCaptureCaps();
setupCaptureResolution();
@@ -745,7 +765,7 @@ void CameraBinSession::setMetaData(const QMap<QByteArray, QVariant> &data)
switch(tagValue.type()) {
case QVariant::String:
gst_tag_setter_add_tags(GST_TAG_SETTER(element),
GST_TAG_MERGE_REPLACE_ALL,
GST_TAG_MERGE_REPLACE,
tagName.toUtf8().constData(),
tagValue.toString().toUtf8().constData(),
NULL);
@@ -753,18 +773,29 @@ void CameraBinSession::setMetaData(const QMap<QByteArray, QVariant> &data)
case QVariant::Int:
case QVariant::LongLong:
gst_tag_setter_add_tags(GST_TAG_SETTER(element),
GST_TAG_MERGE_REPLACE_ALL,
GST_TAG_MERGE_REPLACE,
tagName.toUtf8().constData(),
tagValue.toInt(),
NULL);
break;
case QVariant::Double:
gst_tag_setter_add_tags(GST_TAG_SETTER(element),
GST_TAG_MERGE_REPLACE_ALL,
GST_TAG_MERGE_REPLACE,
tagName.toUtf8().constData(),
tagValue.toDouble(),
NULL);
break;
case QVariant::DateTime: {
QDateTime date = tagValue.toDateTime().toLocalTime();
gst_tag_setter_add_tags(GST_TAG_SETTER(element),
GST_TAG_MERGE_REPLACE,
tagName.toUtf8().constData(),
gst_date_time_new_local_time(
date.date().year(), date.date().month(), date.date().day(),
date.time().hour(), date.time().minute(), date.time().second()),
NULL);
break;
}
default:
break;
}
@@ -940,6 +971,7 @@ bool CameraBinSession::processBusMessage(const QGstreamerMessage &message)
emit stateChanged(m_state = QCamera::UnloadedState);
break;
case GST_STATE_READY:
setMetaData(m_metaData);
if (m_state != QCamera::LoadedState)
emit stateChanged(m_state = QCamera::LoadedState);
break;