Merge dev into 5.5

Change-Id: I715a549b4cc10220a6e3b48799fdc38865a9345e
This commit is contained in:
Oswald Buddenhagen
2015-02-24 11:02:24 +01:00
55 changed files with 129348 additions and 30327 deletions

49
dist/changes-5.4.1 vendored Normal file
View File

@@ -0,0 +1,49 @@
Qt 5.4.1 is a bug-fix release. It maintains both forward and backward
compatibility (source and binary) with Qt 5.4.0.
For more details, refer to the online documentation included in this
distribution. The documentation is also available online:
http://qt-project.org/doc/qt-5.4
The Qt version 5.4 series is binary compatible with the 5.3.x series.
Applications compiled for 5.3 will continue to run with 5.4.
Some of the changes listed in this file include issue tracking numbers
corresponding to tasks in the Qt Bug Tracker:
http://bugreports.qt-project.org/
Each of these identifiers can be entered in the bug tracker to obtain more
information about a particular change.
****************************************************************************
* Library *
****************************************************************************
QtMultimedia
------------
- VideoOutput's autoOrientation property now correctly works after
switching cameras.
****************************************************************************
* Platform Specific Changes *
****************************************************************************
Android
-------
- [QTBUG-42159] QAudioInput::setVolume() is now functioning.
Linux
-----
- [QTBUG-43514] Fixed static linking.
WinRT
-----
- [QTBUG-41066] Fix VideoOutput autoOrientation when used with a Camera.
- [QTBUG-41065] Show Camera viewfinder frames in a resolution adapted to
the current capture mode.

View File

@@ -17,28 +17,22 @@ win32 {
} else { } else {
qtCompileTest(alsa) qtCompileTest(alsa)
qtCompileTest(pulseaudio) qtCompileTest(pulseaudio)
!done_config_gstreamer {
gstver=0.10 isEmpty(GST_VERSION) {
!isEmpty(GST_VERSION): gstver=$$GST_VERSION contains(QT_CONFIG, gstreamer-0.10) {
cache(GST_VERSION, set, gstver); GST_VERSION = 0.10
qtCompileTest(gstreamer) { } else: contains(QT_CONFIG, gstreamer-1.0) {
qtCompileTest(gstreamer_photography) GST_VERSION = 1.0
qtCompileTest(gstreamer_encodingprofiles)
qtCompileTest(gstreamer_appsrc)
qtCompileTest(linux_v4l)
} else {
gstver=1.0
cache(GST_VERSION, set, gstver);
# Force a re-run of the test
CONFIG -= done_config_gstreamer
qtCompileTest(gstreamer) {
qtCompileTest(gstreamer_photography)
qtCompileTest(gstreamer_encodingprofiles)
qtCompileTest(gstreamer_appsrc)
qtCompileTest(linux_v4l)
}
} }
} }
cache(GST_VERSION, set)
!isEmpty(GST_VERSION):qtCompileTest(gstreamer) {
qtCompileTest(gstreamer_photography)
qtCompileTest(gstreamer_encodingprofiles)
qtCompileTest(gstreamer_appsrc)
qtCompileTest(linux_v4l)
}
qtCompileTest(resourcepolicy) qtCompileTest(resourcepolicy)
qtCompileTest(gpu_vivante) qtCompileTest(gpu_vivante)
} }

View File

@@ -149,15 +149,10 @@ QMap<QByteArray, QVariant> QGstUtils::gstTagListToMap(const GstTagList *tags)
*/ */
QSize QGstUtils::capsResolution(const GstCaps *caps) QSize QGstUtils::capsResolution(const GstCaps *caps)
{ {
QSize size; if (gst_caps_get_size(caps) == 0)
return QSize();
if (caps) { return structureResolution(gst_caps_get_structure(caps, 0));
const GstStructure *structure = gst_caps_get_structure(caps, 0);
gst_structure_get_int(structure, "width", &size.rwidth());
gst_structure_get_int(structure, "height", &size.rheight());
}
return size;
} }
/*! /*!
@@ -169,14 +164,12 @@ QSize QGstUtils::capsCorrectedResolution(const GstCaps *caps)
QSize size; QSize size;
if (caps) { if (caps) {
const GstStructure *structure = gst_caps_get_structure(caps, 0); size = capsResolution(caps);
gst_structure_get_int(structure, "width", &size.rwidth());
gst_structure_get_int(structure, "height", &size.rheight());
gint aspectNum = 0; gint aspectNum = 0;
gint aspectDenum = 0; gint aspectDenum = 0;
if (!size.isEmpty() && gst_structure_get_fraction( if (!size.isEmpty() && gst_structure_get_fraction(
structure, "pixel-aspect-ratio", &aspectNum, &aspectDenum)) { gst_caps_get_structure(caps, 0), "pixel-aspect-ratio", &aspectNum, &aspectDenum)) {
if (aspectDenum > 0) if (aspectDenum > 0)
size.setWidth(size.width()*aspectNum/aspectDenum); size.setWidth(size.width()*aspectNum/aspectDenum);
} }
@@ -1048,20 +1041,23 @@ static int indexOfRgbColor(
QVideoSurfaceFormat QGstUtils::formatForCaps( QVideoSurfaceFormat QGstUtils::formatForCaps(
GstCaps *caps, GstVideoInfo *info, QAbstractVideoBuffer::HandleType handleType) GstCaps *caps, GstVideoInfo *info, QAbstractVideoBuffer::HandleType handleType)
{ {
if (gst_video_info_from_caps(info, caps)) { GstVideoInfo vidInfo;
int index = indexOfVideoFormat(info->finfo->format); GstVideoInfo *infoPtr = info ? info : &vidInfo;
if (gst_video_info_from_caps(infoPtr, caps)) {
int index = indexOfVideoFormat(infoPtr->finfo->format);
if (index != -1) { if (index != -1) {
QVideoSurfaceFormat format( QVideoSurfaceFormat format(
QSize(info->width, info->height), QSize(infoPtr->width, infoPtr->height),
qt_videoFormatLookup[index].pixelFormat, qt_videoFormatLookup[index].pixelFormat,
handleType); handleType);
if (info->fps_d > 0) if (infoPtr->fps_d > 0)
format.setFrameRate(qreal(info->fps_d) / info->fps_n); format.setFrameRate(qreal(infoPtr->fps_n) / infoPtr->fps_d);
if (info->par_d > 0) if (infoPtr->par_d > 0)
format.setPixelAspectRatio(info->par_n, info->par_d); format.setPixelAspectRatio(infoPtr->par_n, infoPtr->par_d);
return format; return format;
} }
@@ -1076,60 +1072,18 @@ QVideoSurfaceFormat QGstUtils::formatForCaps(
{ {
const GstStructure *structure = gst_caps_get_structure(caps, 0); const GstStructure *structure = gst_caps_get_structure(caps, 0);
QVideoFrame::PixelFormat pixelFormat = QVideoFrame::Format_Invalid;
int bitsPerPixel = 0; int bitsPerPixel = 0;
QSize size = structureResolution(structure);
QSize size; QVideoFrame::PixelFormat pixelFormat = structurePixelFormat(structure, &bitsPerPixel);
gst_structure_get_int(structure, "width", &size.rwidth());
gst_structure_get_int(structure, "height", &size.rheight());
if (qstrcmp(gst_structure_get_name(structure), "video/x-raw-yuv") == 0) {
guint32 fourcc = 0;
gst_structure_get_fourcc(structure, "format", &fourcc);
int index = indexOfYuvColor(fourcc);
if (index != -1) {
pixelFormat = qt_yuvColorLookup[index].pixelFormat;
bitsPerPixel = qt_yuvColorLookup[index].bitsPerPixel;
}
} else if (qstrcmp(gst_structure_get_name(structure), "video/x-raw-rgb") == 0) {
int depth = 0;
int endianness = 0;
int red = 0;
int green = 0;
int blue = 0;
int alpha = 0;
gst_structure_get_int(structure, "bpp", &bitsPerPixel);
gst_structure_get_int(structure, "depth", &depth);
gst_structure_get_int(structure, "endianness", &endianness);
gst_structure_get_int(structure, "red_mask", &red);
gst_structure_get_int(structure, "green_mask", &green);
gst_structure_get_int(structure, "blue_mask", &blue);
gst_structure_get_int(structure, "alpha_mask", &alpha);
int index = indexOfRgbColor(bitsPerPixel, depth, endianness, red, green, blue, alpha);
if (index != -1)
pixelFormat = qt_rgbColorLookup[index].pixelFormat;
}
if (pixelFormat != QVideoFrame::Format_Invalid) { if (pixelFormat != QVideoFrame::Format_Invalid) {
QVideoSurfaceFormat format(size, pixelFormat, handleType); QVideoSurfaceFormat format(size, pixelFormat, handleType);
QPair<int, int> rate; QPair<qreal, qreal> rate = structureFrameRateRange(structure);
gst_structure_get_fraction(structure, "framerate", &rate.first, &rate.second);
if (rate.second) if (rate.second)
format.setFrameRate(qreal(rate.first)/rate.second); format.setFrameRate(rate.second);
gint aspectNum = 0; format.setPixelAspectRatio(structurePixelAspectRatio(structure));
gint aspectDenum = 0;
if (gst_structure_get_fraction(
structure, "pixel-aspect-ratio", &aspectNum, &aspectDenum)) {
if (aspectDenum > 0)
format.setPixelAspectRatio(aspectNum, aspectDenum);
}
if (bytesPerLine) if (bytesPerLine)
*bytesPerLine = ((size.width() * bitsPerPixel / 8) + 3) & ~3; *bytesPerLine = ((size.width() * bitsPerPixel / 8) + 3) & ~3;
@@ -1304,6 +1258,118 @@ GstCaps *QGstUtils::videoFilterCaps()
return gst_caps_make_writable(gst_static_caps_get(&staticCaps)); return gst_caps_make_writable(gst_static_caps_get(&staticCaps));
} }
QSize QGstUtils::structureResolution(const GstStructure *s)
{
QSize size;
int w, h;
if (s && gst_structure_get_int(s, "width", &w) && gst_structure_get_int(s, "height", &h)) {
size.rwidth() = w;
size.rheight() = h;
}
return size;
}
QVideoFrame::PixelFormat QGstUtils::structurePixelFormat(const GstStructure *structure, int *bpp)
{
QVideoFrame::PixelFormat pixelFormat = QVideoFrame::Format_Invalid;
if (!structure)
return pixelFormat;
#if GST_CHECK_VERSION(1,0,0)
Q_UNUSED(bpp);
if (gst_structure_has_name(structure, "video/x-raw")) {
const gchar *s = gst_structure_get_string(structure, "format");
if (s) {
GstVideoFormat format = gst_video_format_from_string(s);
int index = indexOfVideoFormat(format);
if (index != -1)
pixelFormat = qt_videoFormatLookup[index].pixelFormat;
}
}
#else
if (qstrcmp(gst_structure_get_name(structure), "video/x-raw-yuv") == 0) {
guint32 fourcc = 0;
gst_structure_get_fourcc(structure, "format", &fourcc);
int index = indexOfYuvColor(fourcc);
if (index != -1) {
pixelFormat = qt_yuvColorLookup[index].pixelFormat;
if (bpp)
*bpp = qt_yuvColorLookup[index].bitsPerPixel;
}
} else if (qstrcmp(gst_structure_get_name(structure), "video/x-raw-rgb") == 0) {
int bitsPerPixel = 0;
int depth = 0;
int endianness = 0;
int red = 0;
int green = 0;
int blue = 0;
int alpha = 0;
gst_structure_get_int(structure, "bpp", &bitsPerPixel);
gst_structure_get_int(structure, "depth", &depth);
gst_structure_get_int(structure, "endianness", &endianness);
gst_structure_get_int(structure, "red_mask", &red);
gst_structure_get_int(structure, "green_mask", &green);
gst_structure_get_int(structure, "blue_mask", &blue);
gst_structure_get_int(structure, "alpha_mask", &alpha);
int index = indexOfRgbColor(bitsPerPixel, depth, endianness, red, green, blue, alpha);
if (index != -1) {
pixelFormat = qt_rgbColorLookup[index].pixelFormat;
if (bpp)
*bpp = qt_rgbColorLookup[index].bitsPerPixel;
}
}
#endif
return pixelFormat;
}
QSize QGstUtils::structurePixelAspectRatio(const GstStructure *s)
{
QSize ratio(1, 1);
gint aspectNum = 0;
gint aspectDenum = 0;
if (s && gst_structure_get_fraction(s, "pixel-aspect-ratio", &aspectNum, &aspectDenum)) {
if (aspectDenum > 0) {
ratio.rwidth() = aspectNum;
ratio.rheight() = aspectDenum;
}
}
return ratio;
}
QPair<qreal, qreal> QGstUtils::structureFrameRateRange(const GstStructure *s)
{
QPair<qreal, qreal> rate;
if (!s)
return rate;
int n, d;
if (gst_structure_get_fraction(s, "framerate", &n, &d)) {
rate.second = qreal(n) / d;
rate.first = rate.second;
} else if (gst_structure_get_fraction(s, "max-framerate", &n, &d)) {
rate.second = qreal(n) / d;
if (gst_structure_get_fraction(s, "min-framerate", &n, &d))
rate.first = qreal(n) / d;
else
rate.first = qreal(1);
}
return rate;
}
void qt_gst_object_ref_sink(gpointer object) void qt_gst_object_ref_sink(gpointer object)
{ {
#if GST_CHECK_VERSION(0,10,24) #if GST_CHECK_VERSION(0,10,24)
@@ -1331,6 +1397,15 @@ GstCaps *qt_gst_pad_get_current_caps(GstPad *pad)
#endif #endif
} }
GstCaps *qt_gst_pad_get_caps(GstPad *pad)
{
#if GST_CHECK_VERSION(1,0,0)
return gst_pad_query_caps(pad, NULL);
#else
return gst_pad_get_caps_reffed(pad);
#endif
}
GstStructure *qt_gst_structure_new_empty(const char *name) GstStructure *qt_gst_structure_new_empty(const char *name)
{ {
#if GST_CHECK_VERSION(1,0,0) #if GST_CHECK_VERSION(1,0,0)
@@ -1358,6 +1433,19 @@ gboolean qt_gst_element_query_duration(GstElement *element, GstFormat format, gi
#endif #endif
} }
GstCaps *qt_gst_caps_normalize(GstCaps *caps)
{
#if GST_CHECK_VERSION(1,0,0)
// gst_caps_normalize() takes ownership of the argument in 1.0
return gst_caps_normalize(caps);
#else
// in 0.10, it doesn't. Unref the argument to mimic the 1.0 behavior
GstCaps *res = gst_caps_normalize(caps);
gst_caps_unref(caps);
return res;
#endif
}
QDebug operator <<(QDebug debug, GstCaps *caps) QDebug operator <<(QDebug debug, GstCaps *caps)
{ {
if (caps) { if (caps) {

View File

@@ -114,10 +114,9 @@ QVideoSurfaceGstDelegate::QVideoSurfaceGstDelegate(QAbstractVideoSurface *surfac
, m_activeRenderer(0) , m_activeRenderer(0)
, m_surfaceCaps(0) , m_surfaceCaps(0)
, m_startCaps(0) , m_startCaps(0)
, m_lastBuffer(0) , m_renderBuffer(0)
, m_notified(false) , m_notified(false)
, m_stop(false) , m_stop(false)
, m_render(false)
, m_flush(false) , m_flush(false)
{ {
foreach (QObject *instance, rendererLoader()->instances(QGstVideoRendererPluginKey)) { foreach (QObject *instance, rendererLoader()->instances(QGstVideoRendererPluginKey)) {
@@ -137,6 +136,8 @@ QVideoSurfaceGstDelegate::~QVideoSurfaceGstDelegate()
if (m_surfaceCaps) if (m_surfaceCaps)
gst_caps_unref(m_surfaceCaps); gst_caps_unref(m_surfaceCaps);
if (m_startCaps)
gst_caps_unref(m_startCaps);
} }
GstCaps *QVideoSurfaceGstDelegate::caps() GstCaps *QVideoSurfaceGstDelegate::caps()
@@ -157,13 +158,6 @@ bool QVideoSurfaceGstDelegate::start(GstCaps *caps)
m_stop = true; m_stop = true;
} }
m_render = false;
if (m_lastBuffer) {
gst_buffer_unref(m_lastBuffer);
m_lastBuffer = 0;
}
if (m_startCaps) if (m_startCaps)
gst_caps_unref(m_startCaps); gst_caps_unref(m_startCaps);
m_startCaps = caps; m_startCaps = caps;
@@ -204,11 +198,6 @@ void QVideoSurfaceGstDelegate::stop()
m_startCaps = 0; m_startCaps = 0;
} }
if (m_lastBuffer) {
gst_buffer_unref(m_lastBuffer);
m_lastBuffer = 0;
}
waitForAsyncEvent(&locker, &m_setupCondition, 500); waitForAsyncEvent(&locker, &m_setupCondition, 500);
} }
@@ -225,68 +214,19 @@ bool QVideoSurfaceGstDelegate::proposeAllocation(GstQuery *query)
} }
} }
void QVideoSurfaceGstDelegate::flush() GstFlowReturn QVideoSurfaceGstDelegate::render(GstBuffer *buffer)
{ {
QMutexLocker locker(&m_mutex); QMutexLocker locker(&m_mutex);
m_flush = true; m_renderBuffer = buffer;
m_render = false;
if (m_lastBuffer) { GstFlowReturn flowReturn = waitForAsyncEvent(&locker, &m_renderCondition, 300)
gst_buffer_unref(m_lastBuffer); ? m_renderReturn
m_lastBuffer = 0; : GST_FLOW_ERROR;
}
notify(); m_renderBuffer = 0;
}
GstFlowReturn QVideoSurfaceGstDelegate::render(GstBuffer *buffer, bool show) return flowReturn;
{
QMutexLocker locker(&m_mutex);
if (m_lastBuffer)
gst_buffer_unref(m_lastBuffer);
m_lastBuffer = buffer;
gst_buffer_ref(m_lastBuffer);
if (show) {
m_render = true;
return waitForAsyncEvent(&locker, &m_renderCondition, 300)
? m_renderReturn
: GST_FLOW_ERROR;
} else {
return GST_FLOW_OK;
}
}
void QVideoSurfaceGstDelegate::handleShowPrerollChange(GObject *object, GParamSpec *, gpointer d)
{
QVideoSurfaceGstDelegate * const delegate = static_cast<QVideoSurfaceGstDelegate *>(d);
gboolean showPreroll = true; // "show-preroll-frame" property is true by default
g_object_get(object, "show-preroll-frame", &showPreroll, NULL);
GstState state = GST_STATE_NULL;
GstState pendingState = GST_STATE_NULL;
gst_element_get_state(GST_ELEMENT(object), &state, &pendingState, 0);
const bool paused
= (pendingState == GST_STATE_VOID_PENDING && state == GST_STATE_PAUSED)
|| pendingState == GST_STATE_PAUSED;
if (paused) {
QMutexLocker locker(&delegate->m_mutex);
if (!showPreroll && delegate->m_lastBuffer) {
delegate->m_render = false;
delegate->m_flush = true;
delegate->notify();
} else if (delegate->m_lastBuffer) {
delegate->m_render = true;
delegate->notify();
}
}
} }
bool QVideoSurfaceGstDelegate::event(QEvent *event) bool QVideoSurfaceGstDelegate::event(QEvent *event)
@@ -350,11 +290,9 @@ bool QVideoSurfaceGstDelegate::handleEvent(QMutexLocker *locker)
} }
gst_caps_unref(startCaps); gst_caps_unref(startCaps);
} else if (m_render) { } else if (m_renderBuffer) {
m_render = false; if (m_activeRenderer && m_surface) {
GstBuffer *buffer = m_renderBuffer;
if (m_activeRenderer && m_surface && m_lastBuffer) {
GstBuffer *buffer = m_lastBuffer;
gst_buffer_ref(buffer); gst_buffer_ref(buffer);
locker->unlock(); locker->unlock();
@@ -442,12 +380,6 @@ QGstVideoRendererSink *QGstVideoRendererSink::createSink(QAbstractVideoSurface *
sink->delegate = new QVideoSurfaceGstDelegate(surface); sink->delegate = new QVideoSurfaceGstDelegate(surface);
g_signal_connect(
G_OBJECT(sink),
"notify::show-preroll-frame",
G_CALLBACK(QVideoSurfaceGstDelegate::handleShowPrerollChange),
sink->delegate);
return sink; return sink;
} }
@@ -487,7 +419,7 @@ void QGstVideoRendererSink::class_init(gpointer g_class, gpointer class_data)
base_sink_class->get_caps = QGstVideoRendererSink::get_caps; base_sink_class->get_caps = QGstVideoRendererSink::get_caps;
base_sink_class->set_caps = QGstVideoRendererSink::set_caps; base_sink_class->set_caps = QGstVideoRendererSink::set_caps;
base_sink_class->propose_allocation = QGstVideoRendererSink::propose_allocation; base_sink_class->propose_allocation = QGstVideoRendererSink::propose_allocation;
base_sink_class->preroll = QGstVideoRendererSink::preroll; base_sink_class->stop = QGstVideoRendererSink::stop;
base_sink_class->render = QGstVideoRendererSink::render; base_sink_class->render = QGstVideoRendererSink::render;
GstElementClass *element_class = reinterpret_cast<GstElementClass *>(g_class); GstElementClass *element_class = reinterpret_cast<GstElementClass *>(g_class);
@@ -578,20 +510,17 @@ gboolean QGstVideoRendererSink::propose_allocation(GstBaseSink *base, GstQuery *
return sink->delegate->proposeAllocation(query); return sink->delegate->proposeAllocation(query);
} }
GstFlowReturn QGstVideoRendererSink::preroll(GstBaseSink *base, GstBuffer *buffer) gboolean QGstVideoRendererSink::stop(GstBaseSink *base)
{ {
VO_SINK(base); VO_SINK(base);
sink->delegate->stop();
gboolean showPreroll = true; // "show-preroll-frame" property is true by default return TRUE;
g_object_get(G_OBJECT(base), "show-preroll-frame", &showPreroll, NULL);
return sink->delegate->render(buffer, showPreroll); // display frame
} }
GstFlowReturn QGstVideoRendererSink::render(GstBaseSink *base, GstBuffer *buffer) GstFlowReturn QGstVideoRendererSink::render(GstBaseSink *base, GstBuffer *buffer)
{ {
VO_SINK(base); VO_SINK(base);
return sink->delegate->render(buffer, true); return sink->delegate->render(buffer);
} }
QT_END_NAMESPACE QT_END_NAMESPACE

View File

@@ -925,7 +925,7 @@ void QDeclarativeCamera::setDigitalZoom(qreal value)
\qmlproperty variant QtMultimedia::Camera::metaData.gpsLongitude \qmlproperty variant QtMultimedia::Camera::metaData.gpsLongitude
\qmlproperty variant QtMultimedia::Camera::metaData.gpsAltitude \qmlproperty variant QtMultimedia::Camera::metaData.gpsAltitude
These properties hold the the geographic position in decimal degrees of the These properties hold the geographic position in decimal degrees of the
camera at time of capture. camera at time of capture.
\sa {QMediaMetaData} \sa {QMediaMetaData}

View File

@@ -123,9 +123,8 @@ QDeclarativeCameraCapture::~QDeclarativeCameraCapture()
This property holds a bool value indicating whether the camera This property holds a bool value indicating whether the camera
is ready to capture photos or not. is ready to capture photos or not.
If camera is not ready to capture image immediately, Calling capture() while \e ready is \c false is not permitted and
the capture request is queued with all the related camera settings, results in an error.
and the request will be executed as soon as possible.
*/ */
/*! /*!
@@ -134,11 +133,8 @@ QDeclarativeCameraCapture::~QDeclarativeCameraCapture()
This property holds a bool value indicating whether the camera This property holds a bool value indicating whether the camera
is ready to capture photos or not. is ready to capture photos or not.
It's permissible to call capture() while the camera is active Calling capture() while \e ready is \c false is not permitted and
regardless of the \e ready property value. results in an error.
If camera is not ready to capture image immediately,
the capture request is queued with all the related camera settings,
and the request will be executed as soon as possible.
*/ */
bool QDeclarativeCameraCapture::isReadyForCapture() const bool QDeclarativeCameraCapture::isReadyForCapture() const
{ {
@@ -157,11 +153,13 @@ bool QDeclarativeCameraCapture::isReadyForCapture() const
for video. for video.
Camera saves all the capture parameters like exposure settings or Camera saves all the capture parameters like exposure settings or
image processing parameters, so changes to camera paramaters after image processing parameters, so changes to camera parameters after
capture() is called do not affect previous capture requests. capture() is called do not affect previous capture requests.
CameraCapture::capture returns the capture requestId parameter, used with capture() returns the capture requestId parameter, used with
imageExposed(), imageCaptured(), imageMetadataAvailable() and imageSaved() signals. imageExposed(), imageCaptured(), imageMetadataAvailable() and imageSaved() signals.
\sa ready
*/ */
int QDeclarativeCameraCapture::capture() int QDeclarativeCameraCapture::capture()
{ {

View File

@@ -488,11 +488,8 @@ void QCameraImageCapture::setCaptureDestination(QCameraImageCapture::CaptureDest
\property QCameraImageCapture::readyForCapture \property QCameraImageCapture::readyForCapture
\brief whether the service is ready to capture a an image immediately. \brief whether the service is ready to capture a an image immediately.
It's permissible to call capture() while the camera status is QCamera::ActiveStatus Calling capture() while \e readyForCapture is \c false is not permitted and
regardless of isReadyForCapture property value. results in an error.
If camera is not ready to capture image immediately,
the capture request is queued with all the related camera settings
to be executed as soon as possible.
*/ */
bool QCameraImageCapture::isReadyForCapture() const bool QCameraImageCapture::isReadyForCapture() const
@@ -523,11 +520,13 @@ bool QCameraImageCapture::isReadyForCapture() const
the default directory, with a full path reported with imageCaptured() and imageSaved() signals. the default directory, with a full path reported with imageCaptured() and imageSaved() signals.
QCamera saves all the capture parameters like exposure settings or QCamera saves all the capture parameters like exposure settings or
image processing parameters, so changes to camera paramaters after image processing parameters, so changes to camera parameters after
capture() is called do not affect previous capture requests. capture() is called do not affect previous capture requests.
QCameraImageCapture::capture returns the capture Id parameter, used with QCameraImageCapture::capture returns the capture Id parameter, used with
imageExposed(), imageCaptured() and imageSaved() signals. imageExposed(), imageCaptured() and imageSaved() signals.
\sa isReadyForCapture()
*/ */
int QCameraImageCapture::capture(const QString &file) int QCameraImageCapture::capture(const QString &file)
{ {

View File

@@ -111,7 +111,7 @@ QCameraImageCaptureControl::~QCameraImageCaptureControl()
The Camera service should save all the capture parameters The Camera service should save all the capture parameters
like exposure settings or image processing parameters, like exposure settings or image processing parameters,
so changes to camera paramaters after capture() is called so changes to camera parameters after capture() is called
do not affect previous capture requests. do not affect previous capture requests.
Returns the capture request id number, which is used later Returns the capture request id number, which is used later

View File

@@ -31,7 +31,7 @@
\brief Platform notes for the BlackBerry Platform \brief Platform notes for the BlackBerry Platform
Qt Multimedia supports BlackBerry devices that run the BB10 operating system. Qt Multimedia supports BlackBerry devices that run the BB10 operating system.
This page covers the availibility of different features on BB10. This page covers the availability of different features on BB10.
\section1 Implementation \section1 Implementation

View File

@@ -115,7 +115,7 @@ namespace QGstUtils {
QImage bufferToImage(GstBuffer *buffer, const GstVideoInfo &info); QImage bufferToImage(GstBuffer *buffer, const GstVideoInfo &info);
QVideoSurfaceFormat formatForCaps( QVideoSurfaceFormat formatForCaps(
GstCaps *caps, GstCaps *caps,
GstVideoInfo *info, GstVideoInfo *info = 0,
QAbstractVideoBuffer::HandleType handleType = QAbstractVideoBuffer::NoHandle); QAbstractVideoBuffer::HandleType handleType = QAbstractVideoBuffer::NoHandle);
#else #else
QImage bufferToImage(GstBuffer *buffer); QImage bufferToImage(GstBuffer *buffer);
@@ -133,13 +133,20 @@ namespace QGstUtils {
GstCaps *videoFilterCaps(); GstCaps *videoFilterCaps();
QSize structureResolution(const GstStructure *s);
QVideoFrame::PixelFormat structurePixelFormat(const GstStructure *s, int *bpp = 0);
QSize structurePixelAspectRatio(const GstStructure *s);
QPair<qreal, qreal> structureFrameRateRange(const GstStructure *s);
} }
void qt_gst_object_ref_sink(gpointer object); void qt_gst_object_ref_sink(gpointer object);
GstCaps *qt_gst_pad_get_current_caps(GstPad *pad); GstCaps *qt_gst_pad_get_current_caps(GstPad *pad);
GstCaps *qt_gst_pad_get_caps(GstPad *pad);
GstStructure *qt_gst_structure_new_empty(const char *name); GstStructure *qt_gst_structure_new_empty(const char *name);
gboolean qt_gst_element_query_position(GstElement *element, GstFormat format, gint64 *cur); gboolean qt_gst_element_query_position(GstElement *element, GstFormat format, gint64 *cur);
gboolean qt_gst_element_query_duration(GstElement *element, GstFormat format, gint64 *cur); gboolean qt_gst_element_query_duration(GstElement *element, GstFormat format, gint64 *cur);
GstCaps *qt_gst_caps_normalize(GstCaps *caps);
QDebug operator <<(QDebug debug, GstCaps *caps); QDebug operator <<(QDebug debug, GstCaps *caps);

View File

@@ -98,14 +98,10 @@ public:
void stop(); void stop();
bool proposeAllocation(GstQuery *query); bool proposeAllocation(GstQuery *query);
void flush(); GstFlowReturn render(GstBuffer *buffer);
GstFlowReturn render(GstBuffer *buffer, bool show);
bool event(QEvent *event); bool event(QEvent *event);
static void handleShowPrerollChange(GObject *o, GParamSpec *p, gpointer d);
private slots: private slots:
bool handleEvent(QMutexLocker *locker); bool handleEvent(QMutexLocker *locker);
void updateSupportedFormats(); void updateSupportedFormats();
@@ -126,11 +122,10 @@ private:
GstCaps *m_surfaceCaps; GstCaps *m_surfaceCaps;
GstCaps *m_startCaps; GstCaps *m_startCaps;
GstBuffer *m_lastBuffer; GstBuffer *m_renderBuffer;
bool m_notified; bool m_notified;
bool m_stop; bool m_stop;
bool m_render;
bool m_flush; bool m_flush;
}; };
@@ -156,7 +151,8 @@ private:
static gboolean propose_allocation(GstBaseSink *sink, GstQuery *query); static gboolean propose_allocation(GstBaseSink *sink, GstQuery *query);
static GstFlowReturn preroll(GstBaseSink *sink, GstBuffer *buffer); static gboolean stop(GstBaseSink *sink);
static GstFlowReturn render(GstBaseSink *sink, GstBuffer *buffer); static GstFlowReturn render(GstBaseSink *sink, GstBuffer *buffer);
private: private:

View File

@@ -335,7 +335,7 @@ void QMediaPlayerPrivate::setPlaylistMedia()
return; return;
} else if (control != 0) { } else if (control != 0) {
// If we've just switched to a new playlist, // If we've just switched to a new playlist,
// then last emited currentMediaChanged was a playlist. // then last emitted currentMediaChanged was a playlist.
// Make sure we emit currentMediaChanged if new playlist has // Make sure we emit currentMediaChanged if new playlist has
// the same media as the previous one: // the same media as the previous one:
// sample.m3u // sample.m3u

View File

@@ -51,7 +51,7 @@
player->setVideoOutput(myVideoSurface); player->setVideoOutput(myVideoSurface);
player->setMedia(QUrl::fromLocalFile("observation.mp4")); player->setMedia(QUrl::fromLocalFile("observation.mp4"));
player->play(); // Start receving frames as they get presented to myVideoSurface player->play(); // Start receiving frames as they get presented to myVideoSurface
\endcode \endcode
This same approach works with the QCamera object as well, to receive viewfinder or video This same approach works with the QCamera object as well, to receive viewfinder or video

View File

@@ -34,7 +34,7 @@
#ifndef QVIDEOPROBE_H #ifndef QVIDEOPROBE_H
#define QVIDEOPROBE_H #define QVIDEOPROBE_H
#include <QObject> #include <QtCore/QObject>
#include <QtMultimedia/qvideoframe.h> #include <QtMultimedia/qvideoframe.h>
QT_BEGIN_NAMESPACE QT_BEGIN_NAMESPACE

View File

@@ -141,8 +141,8 @@ QAbstractVideoSurface::Error QVideoSurfaceGenericPainter::start(const QVideoSurf
bool ok = m_imageFormat != QImage::Format_Invalid && !m_imageSize.isEmpty(); bool ok = m_imageFormat != QImage::Format_Invalid && !m_imageSize.isEmpty();
#ifndef QT_NO_OPENGL #ifndef QT_NO_OPENGL
if (QOpenGLContext::openGLModuleType() == QOpenGLContext::LibGLES) if (QOpenGLContext::openGLModuleType() == QOpenGLContext::LibGLES)
#endif
ok &= format.pixelFormat() != QVideoFrame::Format_RGB24; ok &= format.pixelFormat() != QVideoFrame::Format_RGB24;
#endif
if (ok) if (ok)
return QAbstractVideoSurface::NoError; return QAbstractVideoSurface::NoError;
} else if (t == QAbstractVideoBuffer::QPixmapHandle) { } else if (t == QAbstractVideoBuffer::QPixmapHandle) {

View File

@@ -276,33 +276,38 @@ void QAndroidCameraSession::adjustViewfinderSize(const QSize &captureSize, bool
if (!m_camera) if (!m_camera)
return; return;
QSize viewfinderResolution = m_camera->previewSize(); QSize currentViewfinderResolution = m_camera->previewSize();
const qreal aspectRatio = qreal(captureSize.width()) / qreal(captureSize.height()); const qreal aspectRatio = qreal(captureSize.width()) / qreal(captureSize.height());
if (viewfinderResolution.isValid() && if (currentViewfinderResolution.isValid() &&
qFuzzyCompare(aspectRatio, qAbs(aspectRatio - (qreal(currentViewfinderResolution.width()) / currentViewfinderResolution.height())) < 0.01) {
qreal(viewfinderResolution.width()) / viewfinderResolution.height())) {
return; return;
} }
QSize adjustedViewfinderResolution;
QList<QSize> previewSizes = m_camera->getSupportedPreviewSizes(); QList<QSize> previewSizes = m_camera->getSupportedPreviewSizes();
for (int i = previewSizes.count() - 1; i >= 0; --i) { for (int i = previewSizes.count() - 1; i >= 0; --i) {
const QSize &size = previewSizes.at(i); const QSize &size = previewSizes.at(i);
// search for viewfinder resolution with the same aspect ratio // search for viewfinder resolution with the same aspect ratio
if (qFuzzyCompare(aspectRatio, (static_cast<qreal>(size.width())/static_cast<qreal>(size.height())))) { if (qAbs(aspectRatio - (qreal(size.width()) / size.height())) < 0.01) {
viewfinderResolution = size; adjustedViewfinderResolution = size;
break; break;
} }
} }
if (m_camera->previewSize() != viewfinderResolution) { if (!adjustedViewfinderResolution.isValid()) {
qWarning("Cannot find a viewfinder resolution matching the capture aspect ratio.");
return;
}
if (currentViewfinderResolution != adjustedViewfinderResolution) {
if (m_videoOutput) if (m_videoOutput)
m_videoOutput->setVideoSize(viewfinderResolution); m_videoOutput->setVideoSize(adjustedViewfinderResolution);
// if preview is started, we have to stop it first before changing its size // if preview is started, we have to stop it first before changing its size
if (m_previewStarted && restartPreview) if (m_previewStarted && restartPreview)
m_camera->stopPreview(); m_camera->stopPreview();
m_camera->setPreviewSize(viewfinderResolution); m_camera->setPreviewSize(adjustedViewfinderResolution);
// restart preview // restart preview
if (m_previewStarted && restartPreview) if (m_previewStarted && restartPreview)

View File

@@ -337,7 +337,7 @@ void QAndroidMediaPlayerControl::setMedia(const QMediaContent &mediaContent,
if (!mTempFile.isNull()) if (!mTempFile.isNull())
mediaPath = QStringLiteral("file://") + mTempFile->fileName(); mediaPath = QStringLiteral("file://") + mTempFile->fileName();
} else { } else {
mediaPath = url.toString(); mediaPath = url.toString(QUrl::FullyEncoded);
} }
if (mVideoSize.isValid() && mVideoOutput) if (mVideoSize.isValid() && mVideoOutput)

View File

@@ -1,5 +1,5 @@
TARGET = dsengine TARGET = dsengine
win32:!qtHaveModule(opengl) { win32:!qtHaveModule(opengl)|contains(QT_CONFIG,dynamicgl) {
LIBS_PRIVATE += -lgdi32 -luser32 LIBS_PRIVATE += -lgdi32 -luser32
} }
PLUGIN_TYPE=mediaservice PLUGIN_TYPE=mediaservice

View File

@@ -39,6 +39,7 @@
#include "dsvideodevicecontrol.h" #include "dsvideodevicecontrol.h"
#ifdef QMEDIA_DIRECTSHOW_CAMERA #ifdef QMEDIA_DIRECTSHOW_CAMERA
#include <QtCore/QElapsedTimer>
#include <dshow.h> #include <dshow.h>
#include "dscameraservice.h" #include "dscameraservice.h"
#endif #endif
@@ -121,8 +122,7 @@ QByteArray DSServicePlugin::defaultDevice(const QByteArray &service) const
{ {
#ifdef QMEDIA_DIRECTSHOW_CAMERA #ifdef QMEDIA_DIRECTSHOW_CAMERA
if (service == Q_MEDIASERVICE_CAMERA) { if (service == Q_MEDIASERVICE_CAMERA) {
if (m_cameraDevices.isEmpty()) updateDevices();
updateDevices();
return m_defaultCameraDevice; return m_defaultCameraDevice;
} }
@@ -135,8 +135,7 @@ QList<QByteArray> DSServicePlugin::devices(const QByteArray &service) const
{ {
#ifdef QMEDIA_DIRECTSHOW_CAMERA #ifdef QMEDIA_DIRECTSHOW_CAMERA
if (service == Q_MEDIASERVICE_CAMERA) { if (service == Q_MEDIASERVICE_CAMERA) {
if (m_cameraDevices.isEmpty()) updateDevices();
updateDevices();
return m_cameraDevices; return m_cameraDevices;
} }
@@ -149,8 +148,7 @@ QString DSServicePlugin::deviceDescription(const QByteArray &service, const QByt
{ {
#ifdef QMEDIA_DIRECTSHOW_CAMERA #ifdef QMEDIA_DIRECTSHOW_CAMERA
if (service == Q_MEDIASERVICE_CAMERA) { if (service == Q_MEDIASERVICE_CAMERA) {
if (m_cameraDevices.isEmpty()) updateDevices();
updateDevices();
for (int i=0; i<m_cameraDevices.count(); i++) for (int i=0; i<m_cameraDevices.count(); i++)
if (m_cameraDevices[i] == device) if (m_cameraDevices[i] == device)
@@ -164,6 +162,10 @@ QString DSServicePlugin::deviceDescription(const QByteArray &service, const QByt
void DSServicePlugin::updateDevices() const void DSServicePlugin::updateDevices() const
{ {
static QElapsedTimer timer;
if (timer.isValid() && timer.elapsed() < 500) // ms
return;
addRefCount(); addRefCount();
m_defaultCameraDevice.clear(); m_defaultCameraDevice.clear();
@@ -176,6 +178,7 @@ void DSServicePlugin::updateDevices() const
} }
releaseRefCount(); releaseRefCount();
timer.restart();
} }
#endif #endif

View File

@@ -24,6 +24,7 @@ HEADERS += \
$$PWD/camerabinrecorder.h \ $$PWD/camerabinrecorder.h \
$$PWD/camerabincontainer.h \ $$PWD/camerabincontainer.h \
$$PWD/camerabinimagecapture.h \ $$PWD/camerabinimagecapture.h \
$$PWD/camerabinzoom.h \
$$PWD/camerabinimageprocessing.h \ $$PWD/camerabinimageprocessing.h \
$$PWD/camerabinmetadata.h \ $$PWD/camerabinmetadata.h \
$$PWD/camerabinvideoencoder.h \ $$PWD/camerabinvideoencoder.h \
@@ -31,6 +32,7 @@ HEADERS += \
$$PWD/camerabincapturedestination.h \ $$PWD/camerabincapturedestination.h \
$$PWD/camerabincapturebufferformat.h \ $$PWD/camerabincapturebufferformat.h \
$$PWD/camerabinviewfindersettings.h \ $$PWD/camerabinviewfindersettings.h \
$$PWD/camerabinviewfindersettings2.h \
$$PWD/camerabininfocontrol.h $$PWD/camerabininfocontrol.h
SOURCES += \ SOURCES += \
@@ -42,6 +44,7 @@ SOURCES += \
$$PWD/camerabincontainer.cpp \ $$PWD/camerabincontainer.cpp \
$$PWD/camerabinimagecapture.cpp \ $$PWD/camerabinimagecapture.cpp \
$$PWD/camerabinimageencoder.cpp \ $$PWD/camerabinimageencoder.cpp \
$$PWD/camerabinzoom.cpp \
$$PWD/camerabinimageprocessing.cpp \ $$PWD/camerabinimageprocessing.cpp \
$$PWD/camerabinmetadata.cpp \ $$PWD/camerabinmetadata.cpp \
$$PWD/camerabinrecorder.cpp \ $$PWD/camerabinrecorder.cpp \
@@ -49,6 +52,7 @@ SOURCES += \
$$PWD/camerabinresourcepolicy.cpp \ $$PWD/camerabinresourcepolicy.cpp \
$$PWD/camerabincapturedestination.cpp \ $$PWD/camerabincapturedestination.cpp \
$$PWD/camerabinviewfindersettings.cpp \ $$PWD/camerabinviewfindersettings.cpp \
$$PWD/camerabinviewfindersettings2.cpp \
$$PWD/camerabincapturebufferformat.cpp \ $$PWD/camerabincapturebufferformat.cpp \
$$PWD/camerabininfocontrol.cpp $$PWD/camerabininfocontrol.cpp
@@ -69,15 +73,13 @@ config_gstreamer_photography {
$$PWD/camerabinfocus.h \ $$PWD/camerabinfocus.h \
$$PWD/camerabinexposure.h \ $$PWD/camerabinexposure.h \
$$PWD/camerabinflash.h \ $$PWD/camerabinflash.h \
$$PWD/camerabinlocks.h \ $$PWD/camerabinlocks.h
$$PWD/camerabinzoom.h
SOURCES += \ SOURCES += \
$$PWD/camerabinexposure.cpp \ $$PWD/camerabinexposure.cpp \
$$PWD/camerabinflash.cpp \ $$PWD/camerabinflash.cpp \
$$PWD/camerabinfocus.cpp \ $$PWD/camerabinfocus.cpp \
$$PWD/camerabinlocks.cpp \ $$PWD/camerabinlocks.cpp
$$PWD/camerabinzoom.cpp
LIBS += -lgstphotography-$$GST_VERSION LIBS += -lgstphotography-$$GST_VERSION
DEFINES += GST_USE_UNSTABLE_API #prevents warnings because of unstable photography API DEFINES += GST_USE_UNSTABLE_API #prevents warnings because of unstable photography API

View File

@@ -51,11 +51,10 @@ CameraBinControl::CameraBinControl(CameraBinSession *session)
:QCameraControl(session), :QCameraControl(session),
m_session(session), m_session(session),
m_state(QCamera::UnloadedState), m_state(QCamera::UnloadedState),
m_status(QCamera::UnloadedStatus),
m_reloadPending(false) m_reloadPending(false)
{ {
connect(m_session, SIGNAL(stateChanged(QCamera::State)), connect(m_session, SIGNAL(statusChanged(QCamera::Status)),
this, SLOT(updateStatus())); this, SIGNAL(statusChanged(QCamera::Status)));
connect(m_session, SIGNAL(viewfinderChanged()), connect(m_session, SIGNAL(viewfinderChanged()),
SLOT(reloadLater())); SLOT(reloadLater()));
@@ -116,7 +115,7 @@ void CameraBinControl::setState(QCamera::State state)
//special case for stopping the camera while it's busy, //special case for stopping the camera while it's busy,
//it should be delayed until the camera is idle //it should be delayed until the camera is idle
if (state == QCamera::LoadedState && if (state == QCamera::LoadedState &&
m_session->state() == QCamera::ActiveState && m_session->status() == QCamera::ActiveStatus &&
m_session->isBusy()) { m_session->isBusy()) {
#ifdef CAMEABIN_DEBUG #ifdef CAMEABIN_DEBUG
qDebug() << Q_FUNC_INFO << "Camera is busy, QCamera::stop() is delayed"; qDebug() << Q_FUNC_INFO << "Camera is busy, QCamera::stop() is delayed";
@@ -165,52 +164,9 @@ QCamera::State CameraBinControl::state() const
return m_state; return m_state;
} }
void CameraBinControl::updateStatus() QCamera::Status CameraBinControl::status() const
{ {
QCamera::State sessionState = m_session->state(); return m_session->status();
QCamera::Status oldStatus = m_status;
switch (m_state) {
case QCamera::UnloadedState:
m_status = QCamera::UnloadedStatus;
break;
case QCamera::LoadedState:
switch (sessionState) {
case QCamera::UnloadedState:
m_status = m_resourcePolicy->isResourcesGranted()
? QCamera::LoadingStatus
: QCamera::UnavailableStatus;
break;
case QCamera::LoadedState:
m_status = QCamera::LoadedStatus;
break;
case QCamera::ActiveState:
m_status = QCamera::ActiveStatus;
break;
}
break;
case QCamera::ActiveState:
switch (sessionState) {
case QCamera::UnloadedState:
m_status = m_resourcePolicy->isResourcesGranted()
? QCamera::LoadingStatus
: QCamera::UnavailableStatus;
break;
case QCamera::LoadedState:
m_status = QCamera::StartingStatus;
break;
case QCamera::ActiveState:
m_status = QCamera::ActiveStatus;
break;
}
}
if (m_status != oldStatus) {
#ifdef CAMEABIN_DEBUG
qDebug() << "Camera status changed" << ENUM_NAME(QCamera, "Status", m_status);
#endif
emit statusChanged(m_status);
}
} }
void CameraBinControl::reloadLater() void CameraBinControl::reloadLater()
@@ -254,7 +210,7 @@ void CameraBinControl::handleResourcesGranted()
void CameraBinControl::handleBusyChanged(bool busy) void CameraBinControl::handleBusyChanged(bool busy)
{ {
if (!busy && m_session->state() == QCamera::ActiveState) { if (!busy && m_session->status() == QCamera::ActiveStatus) {
if (m_state == QCamera::LoadedState) { if (m_state == QCamera::LoadedState) {
//handle delayed stop() because of busy camera //handle delayed stop() because of busy camera
m_resourcePolicy->setResourceSet(CamerabinResourcePolicy::LoadedResources); m_resourcePolicy->setResourceSet(CamerabinResourcePolicy::LoadedResources);
@@ -293,15 +249,14 @@ bool CameraBinControl::canChangeProperty(PropertyChangeType changeType, QCamera:
Q_UNUSED(status); Q_UNUSED(status);
switch (changeType) { switch (changeType) {
case QCameraControl::CaptureMode:
return status != QCamera::ActiveStatus;
break;
case QCameraControl::ImageEncodingSettings:
case QCameraControl::VideoEncodingSettings:
case QCameraControl::Viewfinder: case QCameraControl::Viewfinder:
return true; return true;
case QCameraControl::CaptureMode:
case QCameraControl::ImageEncodingSettings:
case QCameraControl::VideoEncodingSettings:
case QCameraControl::ViewfinderSettings:
default: default:
return false; return status != QCamera::ActiveStatus;
} }
} }

View File

@@ -56,7 +56,7 @@ public:
QCamera::State state() const; QCamera::State state() const;
void setState(QCamera::State state); void setState(QCamera::State state);
QCamera::Status status() const { return m_status; } QCamera::Status status() const;
QCamera::CaptureModes captureMode() const; QCamera::CaptureModes captureMode() const;
void setCaptureMode(QCamera::CaptureModes mode); void setCaptureMode(QCamera::CaptureModes mode);
@@ -72,7 +72,6 @@ public slots:
void setViewfinderColorSpaceConversion(bool enabled); void setViewfinderColorSpaceConversion(bool enabled);
private slots: private slots:
void updateStatus();
void delayedReload(); void delayedReload();
void handleResourcesGranted(); void handleResourcesGranted();
@@ -86,7 +85,6 @@ private:
CameraBinSession *m_session; CameraBinSession *m_session;
QCamera::State m_state; QCamera::State m_state;
QCamera::Status m_status;
CamerabinResourcePolicy *m_resourcePolicy; CamerabinResourcePolicy *m_resourcePolicy;
bool m_reloadPending; bool m_reloadPending;

View File

@@ -56,7 +56,7 @@ CameraBinFocus::CameraBinFocus(CameraBinSession *session)
QGstreamerBufferProbe(ProbeBuffers), QGstreamerBufferProbe(ProbeBuffers),
#endif #endif
m_session(session), m_session(session),
m_cameraState(QCamera::UnloadedState), m_cameraStatus(QCamera::UnloadedStatus),
m_focusMode(QCameraFocus::AutoFocus), m_focusMode(QCameraFocus::AutoFocus),
m_focusPointMode(QCameraFocus::FocusPointAuto), m_focusPointMode(QCameraFocus::FocusPointAuto),
m_focusStatus(QCamera::Unlocked), m_focusStatus(QCamera::Unlocked),
@@ -68,8 +68,8 @@ CameraBinFocus::CameraBinFocus(CameraBinSession *session)
gst_photography_set_focus_mode(m_session->photography(), GST_PHOTOGRAPHY_FOCUS_MODE_AUTO); gst_photography_set_focus_mode(m_session->photography(), GST_PHOTOGRAPHY_FOCUS_MODE_AUTO);
connect(m_session, SIGNAL(stateChanged(QCamera::State)), connect(m_session, SIGNAL(statusChanged(QCamera::Status)),
this, SLOT(_q_handleCameraStateChange(QCamera::State))); this, SLOT(_q_handleCameraStatusChange(QCamera::Status)));
} }
CameraBinFocus::~CameraBinFocus() CameraBinFocus::~CameraBinFocus()
@@ -319,10 +319,10 @@ void CameraBinFocus::_q_setFocusStatus(QCamera::LockStatus status, QCamera::Lock
} }
} }
void CameraBinFocus::_q_handleCameraStateChange(QCamera::State state) void CameraBinFocus::_q_handleCameraStatusChange(QCamera::Status status)
{ {
m_cameraState = state; m_cameraStatus = status;
if (state == QCamera::ActiveState) { if (status == QCamera::ActiveStatus) {
if (GstPad *pad = gst_element_get_static_pad(m_session->cameraSource(), "vfsrc")) { if (GstPad *pad = gst_element_get_static_pad(m_session->cameraSource(), "vfsrc")) {
if (GstCaps *caps = qt_gst_pad_get_current_caps(pad)) { if (GstCaps *caps = qt_gst_pad_get_current_caps(pad)) {
if (GstStructure *structure = gst_caps_get_structure(caps, 0)) { if (GstStructure *structure = gst_caps_get_structure(caps, 0)) {
@@ -415,7 +415,7 @@ void CameraBinFocus::updateRegionOfInterest(const QRectF &rectangle)
void CameraBinFocus::updateRegionOfInterest(const QVector<QRect> &rectangles) void CameraBinFocus::updateRegionOfInterest(const QVector<QRect> &rectangles)
{ {
if (m_cameraState != QCamera::ActiveState) if (m_cameraStatus != QCamera::ActiveStatus)
return; return;
GstElement * const cameraSource = m_session->cameraSource(); GstElement * const cameraSource = m_session->cameraSource();

View File

@@ -93,7 +93,7 @@ protected:
private Q_SLOTS: private Q_SLOTS:
void _q_setFocusStatus(QCamera::LockStatus status, QCamera::LockChangeReason reason); void _q_setFocusStatus(QCamera::LockStatus status, QCamera::LockChangeReason reason);
void _q_handleCameraStateChange(QCamera::State state); void _q_handleCameraStatusChange(QCamera::Status status);
#if GST_CHECK_VERSION(1,0,0) #if GST_CHECK_VERSION(1,0,0)
void _q_updateFaces(); void _q_updateFaces();
@@ -109,7 +109,7 @@ private:
#endif #endif
CameraBinSession *m_session; CameraBinSession *m_session;
QCamera::State m_cameraState; QCamera::Status m_cameraStatus;
QCameraFocus::FocusModes m_focusMode; QCameraFocus::FocusModes m_focusMode;
QCameraFocus::FocusPointMode m_focusPointMode; QCameraFocus::FocusPointMode m_focusPointMode;
QCamera::LockStatus m_focusStatus; QCamera::LockStatus m_focusStatus;

View File

@@ -61,7 +61,7 @@ CameraBinImageCapture::CameraBinImageCapture(CameraBinSession *session)
, m_requestId(0) , m_requestId(0)
, m_ready(false) , m_ready(false)
{ {
connect(m_session, SIGNAL(stateChanged(QCamera::State)), SLOT(updateState())); connect(m_session, SIGNAL(statusChanged(QCamera::Status)), SLOT(updateState()));
connect(m_session, SIGNAL(imageExposed(int)), this, SIGNAL(imageExposed(int))); connect(m_session, SIGNAL(imageExposed(int)), this, SIGNAL(imageExposed(int)));
connect(m_session, SIGNAL(imageCaptured(int,QImage)), this, SIGNAL(imageCaptured(int,QImage))); connect(m_session, SIGNAL(imageCaptured(int,QImage)), this, SIGNAL(imageCaptured(int,QImage)));
connect(m_session->cameraControl()->resourcePolicy(), SIGNAL(canCaptureChanged()), this, SLOT(updateState())); connect(m_session->cameraControl()->resourcePolicy(), SIGNAL(canCaptureChanged()), this, SLOT(updateState()));
@@ -100,7 +100,7 @@ void CameraBinImageCapture::cancelCapture()
void CameraBinImageCapture::updateState() void CameraBinImageCapture::updateState()
{ {
bool ready = m_session->state() == QCamera::ActiveState bool ready = m_session->status() == QCamera::ActiveStatus
&& m_session->cameraControl()->resourcePolicy()->canCapture(); && m_session->cameraControl()->resourcePolicy()->canCapture();
if (m_ready != ready) { if (m_ready != ready) {
#ifdef DEBUG_CAPTURE #ifdef DEBUG_CAPTURE

View File

@@ -49,7 +49,7 @@ CameraBinRecorder::CameraBinRecorder(CameraBinSession *session)
m_state(QMediaRecorder::StoppedState), m_state(QMediaRecorder::StoppedState),
m_status(QMediaRecorder::UnloadedStatus) m_status(QMediaRecorder::UnloadedStatus)
{ {
connect(m_session, SIGNAL(stateChanged(QCamera::State)), SLOT(updateStatus())); connect(m_session, SIGNAL(statusChanged(QCamera::Status)), SLOT(updateStatus()));
connect(m_session, SIGNAL(pendingStateChanged(QCamera::State)), SLOT(updateStatus())); connect(m_session, SIGNAL(pendingStateChanged(QCamera::State)), SLOT(updateStatus()));
connect(m_session, SIGNAL(busyChanged(bool)), SLOT(updateStatus())); connect(m_session, SIGNAL(busyChanged(bool)), SLOT(updateStatus()));
@@ -86,12 +86,12 @@ QMediaRecorder::Status CameraBinRecorder::status() const
void CameraBinRecorder::updateStatus() void CameraBinRecorder::updateStatus()
{ {
QCamera::State sessionState = m_session->state(); QCamera::Status sessionStatus = m_session->status();
QMediaRecorder::State oldState = m_state; QMediaRecorder::State oldState = m_state;
QMediaRecorder::Status oldStatus = m_status; QMediaRecorder::Status oldStatus = m_status;
if (sessionState == QCamera::ActiveState && if (sessionStatus == QCamera::ActiveStatus &&
m_session->captureMode().testFlag(QCamera::CaptureVideo)) { m_session->captureMode().testFlag(QCamera::CaptureVideo)) {
if (!m_session->cameraControl()->resourcePolicy()->canCapture()) { if (!m_session->cameraControl()->resourcePolicy()->canCapture()) {
@@ -214,7 +214,7 @@ void CameraBinRecorder::setState(QMediaRecorder::State state)
break; break;
case QMediaRecorder::RecordingState: case QMediaRecorder::RecordingState:
if (m_session->state() != QCamera::ActiveState) { if (m_session->status() != QCamera::ActiveStatus) {
emit error(QMediaRecorder::ResourceError, tr("Service has not been started")); emit error(QMediaRecorder::ResourceError, tr("Service has not been started"));
} else if (!m_session->cameraControl()->resourcePolicy()->canCapture()) { } else if (!m_session->cameraControl()->resourcePolicy()->canCapture()) {
emit error(QMediaRecorder::ResourceError, tr("Recording permissions are not available")); emit error(QMediaRecorder::ResourceError, tr("Recording permissions are not available"));

View File

@@ -55,6 +55,7 @@
#include "camerabincapturebufferformat.h" #include "camerabincapturebufferformat.h"
#include "camerabincapturedestination.h" #include "camerabincapturedestination.h"
#include "camerabinviewfindersettings.h" #include "camerabinviewfindersettings.h"
#include "camerabinviewfindersettings2.h"
#include <private/qgstreamerbushelper_p.h> #include <private/qgstreamerbushelper_p.h>
#include <private/qgstutils_p.h> #include <private/qgstutils_p.h>
@@ -84,7 +85,9 @@ QT_BEGIN_NAMESPACE
CameraBinService::CameraBinService(GstElementFactory *sourceFactory, QObject *parent): CameraBinService::CameraBinService(GstElementFactory *sourceFactory, QObject *parent):
QMediaService(parent), QMediaService(parent),
m_cameraInfoControl(0) m_cameraInfoControl(0),
m_viewfinderSettingsControl(0),
m_viewfinderSettingsControl2(0)
{ {
m_captureSession = 0; m_captureSession = 0;
m_metaDataControl = 0; m_metaDataControl = 0;
@@ -224,8 +227,17 @@ QMediaControl *CameraBinService::requestControl(const char *name)
if (qstrcmp(name, QCameraCaptureBufferFormatControl_iid) == 0) if (qstrcmp(name, QCameraCaptureBufferFormatControl_iid) == 0)
return m_captureSession->captureBufferFormatControl(); return m_captureSession->captureBufferFormatControl();
if (qstrcmp(name, QCameraViewfinderSettingsControl_iid) == 0) if (qstrcmp(name, QCameraViewfinderSettingsControl_iid) == 0) {
return m_captureSession->viewfinderSettingsControl(); if (!m_viewfinderSettingsControl)
m_viewfinderSettingsControl = new CameraBinViewfinderSettings(m_captureSession);
return m_viewfinderSettingsControl;
}
if (qstrcmp(name, QCameraViewfinderSettingsControl2_iid) == 0) {
if (!m_viewfinderSettingsControl2)
m_viewfinderSettingsControl2 = new CameraBinViewfinderSettings2(m_captureSession);
return m_viewfinderSettingsControl2;
}
if (qstrcmp(name, QCameraInfoControl_iid) == 0) { if (qstrcmp(name, QCameraInfoControl_iid) == 0) {
if (!m_cameraInfoControl) if (!m_cameraInfoControl)

View File

@@ -53,6 +53,8 @@ class QGstreamerElementFactory;
class CameraBinMetaData; class CameraBinMetaData;
class CameraBinImageCapture; class CameraBinImageCapture;
class CameraBinMetaData; class CameraBinMetaData;
class CameraBinViewfinderSettings;
class CameraBinViewfinderSettings2;
class CameraBinService : public QMediaService class CameraBinService : public QMediaService
{ {
@@ -85,6 +87,9 @@ private:
#endif #endif
CameraBinImageCapture *m_imageCaptureControl; CameraBinImageCapture *m_imageCaptureControl;
QMediaControl *m_cameraInfoControl; QMediaControl *m_cameraInfoControl;
CameraBinViewfinderSettings *m_viewfinderSettingsControl;
CameraBinViewfinderSettings2 *m_viewfinderSettingsControl2;
}; };
QT_END_NAMESPACE QT_END_NAMESPACE

View File

@@ -43,9 +43,9 @@
#include "camerabinflash.h" #include "camerabinflash.h"
#include "camerabinfocus.h" #include "camerabinfocus.h"
#include "camerabinlocks.h" #include "camerabinlocks.h"
#include "camerabinzoom.h"
#endif #endif
#include "camerabinzoom.h"
#include "camerabinimageprocessing.h" #include "camerabinimageprocessing.h"
#include "camerabinviewfindersettings.h" #include "camerabinviewfindersettings.h"
@@ -55,6 +55,7 @@
#include <private/qgstreamervideorendererinterface_p.h> #include <private/qgstreamervideorendererinterface_p.h>
#include <private/qgstutils_p.h> #include <private/qgstutils_p.h>
#include <qmediarecorder.h> #include <qmediarecorder.h>
#include <qvideosurfaceformat.h>
#ifdef HAVE_GST_PHOTOGRAPHY #ifdef HAVE_GST_PHOTOGRAPHY
#include <gst/interfaces/photography.h> #include <gst/interfaces/photography.h>
@@ -106,17 +107,12 @@
#define PREVIEW_CAPS_4_3 \ #define PREVIEW_CAPS_4_3 \
"video/x-raw-rgb, width = (int) 640, height = (int) 480" "video/x-raw-rgb, width = (int) 640, height = (int) 480"
//using GST_STATE_READY for QCamera::LoadedState
//may not work reliably at least with some webcams.
//#define USE_READY_STATE_ON_LOADED
QT_BEGIN_NAMESPACE QT_BEGIN_NAMESPACE
CameraBinSession::CameraBinSession(GstElementFactory *sourceFactory, QObject *parent) CameraBinSession::CameraBinSession(GstElementFactory *sourceFactory, QObject *parent)
:QObject(parent), :QObject(parent),
m_recordingActive(false), m_recordingActive(false),
m_state(QCamera::UnloadedState), m_status(QCamera::UnloadedStatus),
m_pendingState(QCamera::UnloadedState), m_pendingState(QCamera::UnloadedState),
m_muted(false), m_muted(false),
m_busy(false), m_busy(false),
@@ -125,11 +121,20 @@ CameraBinSession::CameraBinSession(GstElementFactory *sourceFactory, QObject *pa
m_videoInputFactory(0), m_videoInputFactory(0),
m_viewfinder(0), m_viewfinder(0),
m_viewfinderInterface(0), m_viewfinderInterface(0),
#ifdef HAVE_GST_PHOTOGRAPHY
m_cameraExposureControl(0),
m_cameraFlashControl(0),
m_cameraFocusControl(0),
m_cameraLocksControl(0),
#endif
m_cameraSrc(0),
m_videoSrc(0), m_videoSrc(0),
m_viewfinderElement(0), m_viewfinderElement(0),
m_sourceFactory(sourceFactory), m_sourceFactory(sourceFactory),
m_viewfinderHasChanged(true), m_viewfinderHasChanged(true),
m_videoInputHasChanged(true), m_inputDeviceHasChanged(true),
m_usingWrapperCameraBinSrc(false),
m_viewfinderProbe(this),
m_audioSrc(0), m_audioSrc(0),
m_audioConvert(0), m_audioConvert(0),
m_capsFilter(0), m_capsFilter(0),
@@ -158,18 +163,10 @@ CameraBinSession::CameraBinSession(GstElementFactory *sourceFactory, QObject *pa
m_imageEncodeControl = new CameraBinImageEncoder(this); m_imageEncodeControl = new CameraBinImageEncoder(this);
m_recorderControl = new CameraBinRecorder(this); m_recorderControl = new CameraBinRecorder(this);
m_mediaContainerControl = new CameraBinContainer(this); m_mediaContainerControl = new CameraBinContainer(this);
#ifdef HAVE_GST_PHOTOGRAPHY
m_cameraExposureControl = new CameraBinExposure(this);
m_cameraFlashControl = new CameraBinFlash(this);
m_cameraFocusControl = new CameraBinFocus(this);
m_cameraLocksControl = new CameraBinLocks(this);
m_cameraZoomControl = new CameraBinZoom(this); m_cameraZoomControl = new CameraBinZoom(this);
#endif
m_imageProcessingControl = new CameraBinImageProcessing(this); m_imageProcessingControl = new CameraBinImageProcessing(this);
m_captureDestinationControl = new CameraBinCaptureDestination(this); m_captureDestinationControl = new CameraBinCaptureDestination(this);
m_captureBufferFormatControl = new CameraBinCaptureBufferFormat(this); m_captureBufferFormatControl = new CameraBinCaptureBufferFormat(this);
m_viewfinderSettingsControl = new CameraBinViewfinderSettings(this);
QByteArray envFlags = qgetenv("QT_GSTREAMER_CAMERABIN_FLAGS"); QByteArray envFlags = qgetenv("QT_GSTREAMER_CAMERABIN_FLAGS");
if (!envFlags.isEmpty()) if (!envFlags.isEmpty())
@@ -223,24 +220,48 @@ GstPhotography *CameraBinSession::photography()
return 0; return 0;
} }
#endif
CameraBinSession::CameraRole CameraBinSession::cameraRole() const CameraBinExposure *CameraBinSession::cameraExposureControl()
{ {
return BackCamera; if (!m_cameraExposureControl && photography())
m_cameraExposureControl = new CameraBinExposure(this);
return m_cameraExposureControl;
} }
/* CameraBinFlash *CameraBinSession::cameraFlashControl()
Configure camera during Loaded->Active states stansition. {
*/ if (!m_cameraFlashControl && photography())
m_cameraFlashControl = new CameraBinFlash(this);
return m_cameraFlashControl;
}
CameraBinFocus *CameraBinSession::cameraFocusControl()
{
if (!m_cameraFocusControl && photography())
m_cameraFocusControl = new CameraBinFocus(this);
return m_cameraFocusControl;
}
CameraBinLocks *CameraBinSession::cameraLocksControl()
{
if (!m_cameraLocksControl && photography())
m_cameraLocksControl = new CameraBinLocks(this);
return m_cameraLocksControl;
}
#endif
bool CameraBinSession::setupCameraBin() bool CameraBinSession::setupCameraBin()
{ {
if (!buildCameraSource()) if (!buildCameraSource())
return false; return false;
if (m_viewfinderHasChanged) { if (m_viewfinderHasChanged) {
if (m_viewfinderElement) if (m_viewfinderElement) {
GstPad *pad = gst_element_get_static_pad(m_viewfinderElement, "sink");
m_viewfinderProbe.removeProbeFromPad(pad);
gst_object_unref(GST_OBJECT(pad));
gst_object_unref(GST_OBJECT(m_viewfinderElement)); gst_object_unref(GST_OBJECT(m_viewfinderElement));
}
m_viewfinderElement = m_viewfinderInterface ? m_viewfinderInterface->videoSink() : 0; m_viewfinderElement = m_viewfinderInterface ? m_viewfinderInterface->videoSink() : 0;
#if CAMERABIN_DEBUG #if CAMERABIN_DEBUG
@@ -248,9 +269,15 @@ bool CameraBinSession::setupCameraBin()
#endif #endif
m_viewfinderHasChanged = false; m_viewfinderHasChanged = false;
if (!m_viewfinderElement) { if (!m_viewfinderElement) {
qWarning() << "Staring camera without viewfinder available"; if (m_pendingState == QCamera::ActiveState)
qWarning() << "Starting camera without viewfinder available";
m_viewfinderElement = gst_element_factory_make("fakesink", NULL); m_viewfinderElement = gst_element_factory_make("fakesink", NULL);
} }
GstPad *pad = gst_element_get_static_pad(m_viewfinderElement, "sink");
m_viewfinderProbe.addProbeToPad(pad);
gst_object_unref(GST_OBJECT(pad));
g_object_set(G_OBJECT(m_viewfinderElement), "sync", FALSE, NULL); g_object_set(G_OBJECT(m_viewfinderElement), "sync", FALSE, NULL);
qt_gst_object_ref_sink(GST_OBJECT(m_viewfinderElement)); qt_gst_object_ref_sink(GST_OBJECT(m_viewfinderElement));
gst_element_set_state(m_camerabin, GST_STATE_NULL); gst_element_set_state(m_camerabin, GST_STATE_NULL);
@@ -260,9 +287,15 @@ bool CameraBinSession::setupCameraBin()
return true; return true;
} }
static GstCaps *resolutionToCaps(const QSize &resolution, qreal frameRate = 0.0) static GstCaps *resolutionToCaps(const QSize &resolution,
qreal frameRate = 0.0,
QVideoFrame::PixelFormat pixelFormat = QVideoFrame::Format_Invalid)
{ {
GstCaps *caps = QGstUtils::videoFilterCaps(); GstCaps *caps = 0;
if (pixelFormat == QVideoFrame::Format_Invalid)
caps = QGstUtils::videoFilterCaps();
else
caps = QGstUtils::capsForFormats(QList<QVideoFrame::PixelFormat>() << pixelFormat);
if (!resolution.isEmpty()) { if (!resolution.isEmpty()) {
gst_caps_set_simple( gst_caps_set_simple(
@@ -288,75 +321,92 @@ static GstCaps *resolutionToCaps(const QSize &resolution, qreal frameRate = 0.0)
void CameraBinSession::setupCaptureResolution() void CameraBinSession::setupCaptureResolution()
{ {
QSize resolution = m_imageEncodeControl->imageSettings().resolution(); QSize viewfinderResolution = m_viewfinderSettings.resolution();
{ qreal viewfinderFrameRate = m_viewfinderSettings.maximumFrameRate();
GstCaps *caps = resolutionToCaps(resolution); QVideoFrame::PixelFormat viewfinderPixelFormat = m_viewfinderSettings.pixelFormat();
#if CAMERABIN_DEBUG const QSize imageResolution = m_imageEncodeControl->imageSettings().resolution();
qDebug() << Q_FUNC_INFO << "set image resolution" << resolution << caps; const QSize videoResolution = m_videoEncodeControl->actualVideoSettings().resolution();
#endif
g_object_set(m_camerabin, IMAGE_CAPTURE_CAPS_PROPERTY, caps, NULL);
if (caps)
gst_caps_unref(caps);
}
const QSize viewfinderResolution = m_viewfinderSettingsControl->resolution(); // WrapperCameraBinSrc cannot have different caps on its imgsrc, vidsrc and vfsrc pads.
resolution = m_videoEncodeControl->actualVideoSettings().resolution(); // If capture resolution is specified, use it also for the viewfinder to avoid caps negotiation
qreal framerate = m_videoEncodeControl->videoSettings().frameRate(); // to fail.
{ if (m_usingWrapperCameraBinSrc) {
GstCaps *caps = resolutionToCaps( if (m_captureMode == QCamera::CaptureStillImage && !imageResolution.isEmpty())
!resolution.isEmpty() ? resolution : viewfinderResolution, framerate); viewfinderResolution = imageResolution;
#if CAMERABIN_DEBUG else if (m_captureMode == QCamera::CaptureVideo && !videoResolution.isEmpty())
qDebug() << Q_FUNC_INFO << "set video resolution" << resolution << caps; viewfinderResolution = videoResolution;
#endif
g_object_set(m_camerabin, VIDEO_CAPTURE_CAPS_PROPERTY, caps, NULL);
if (caps)
gst_caps_unref(caps);
}
if (!viewfinderResolution.isEmpty()) // Make sure we don't use incompatible frame rate and pixel format with the new resolution
resolution = viewfinderResolution; if (viewfinderResolution != m_viewfinderSettings.resolution() &&
(!qFuzzyIsNull(viewfinderFrameRate) || viewfinderPixelFormat != QVideoFrame::Format_Invalid)) {
{ enum {
GstCaps *caps = resolutionToCaps(resolution); Nothing = 0x0,
#if CAMERABIN_DEBUG OnlyFrameRate = 0x1,
qDebug() << Q_FUNC_INFO << "set viewfinder resolution" << resolution << caps; OnlyPixelFormat = 0x2,
#endif Both = 0x4
g_object_set(m_camerabin, VIEWFINDER_CAPS_PROPERTY, caps, NULL); };
if (caps) quint8 found = Nothing;
gst_caps_unref(caps);
GstElement *mfw_v4lsrc = 0; for (int i = 0; i < m_supportedViewfinderSettings.count() && !(found & Both); ++i) {
if (g_object_class_find_property(G_OBJECT_GET_CLASS(m_videoSrc), "video-source")) { const QCameraViewfinderSettings &s = m_supportedViewfinderSettings.at(i);
GstElement *videoSrc = 0; if (s.resolution() == viewfinderResolution) {
g_object_get(G_OBJECT(m_videoSrc), "video-source", &videoSrc, NULL); if ((qFuzzyIsNull(viewfinderFrameRate) || s.maximumFrameRate() == viewfinderFrameRate)
if (videoSrc) { && (viewfinderPixelFormat == QVideoFrame::Format_Invalid || s.pixelFormat() == viewfinderPixelFormat))
const char *name = gst_plugin_feature_get_name(GST_PLUGIN_FEATURE(gst_element_get_factory(videoSrc))); found |= Both;
if (!qstrcmp(name, "mfw_v4lsrc")) else if (s.maximumFrameRate() == viewfinderFrameRate)
mfw_v4lsrc = videoSrc; found |= OnlyFrameRate;
else if (s.pixelFormat() == viewfinderPixelFormat)
found |= OnlyPixelFormat;
}
}
if (found & Both) {
// no-op
} else if (found & OnlyPixelFormat) {
viewfinderFrameRate = qreal(0);
} else if (found & OnlyFrameRate) {
viewfinderPixelFormat = QVideoFrame::Format_Invalid;
} else {
viewfinderPixelFormat = QVideoFrame::Format_Invalid;
viewfinderFrameRate = qreal(0);
} }
} }
}
if (mfw_v4lsrc) { GstCaps *caps = resolutionToCaps(imageResolution);
int capMode = 0; g_object_set(m_camerabin, IMAGE_CAPTURE_CAPS_PROPERTY, caps, NULL);
if (resolution == QSize(320, 240)) gst_caps_unref(caps);
capMode = 1;
else if (resolution == QSize(720, 480))
capMode = 2;
else if (resolution == QSize(720, 576))
capMode = 3;
else if (resolution == QSize(1280, 720))
capMode = 4;
else if (resolution == QSize(1920, 1080))
capMode = 5;
g_object_set(G_OBJECT(mfw_v4lsrc), "capture-mode", capMode, NULL);
const qreal maxFps = m_viewfinderSettingsControl->maximumFrameRate(); qreal framerate = m_videoEncodeControl->videoSettings().frameRate();
if (!qFuzzyIsNull(maxFps)) { caps = resolutionToCaps(videoResolution, framerate);
int n, d; g_object_set(m_camerabin, VIDEO_CAPTURE_CAPS_PROPERTY, caps, NULL);
gst_util_double_to_fraction(maxFps, &n, &d); gst_caps_unref(caps);
g_object_set(G_OBJECT(mfw_v4lsrc), "fps-n", n, NULL);
g_object_set(G_OBJECT(mfw_v4lsrc), "fps-d", d, NULL); caps = resolutionToCaps(viewfinderResolution, viewfinderFrameRate, viewfinderPixelFormat);
} g_object_set(m_camerabin, VIEWFINDER_CAPS_PROPERTY, caps, NULL);
gst_caps_unref(caps);
// Special case when using mfw_v4lsrc
if (m_videoSrc && qstrcmp(gst_plugin_feature_get_name(GST_PLUGIN_FEATURE(gst_element_get_factory(m_videoSrc))), "mfw_v4lsrc") == 0) {
int capMode = 0;
if (viewfinderResolution == QSize(320, 240))
capMode = 1;
else if (viewfinderResolution == QSize(720, 480))
capMode = 2;
else if (viewfinderResolution == QSize(720, 576))
capMode = 3;
else if (viewfinderResolution == QSize(1280, 720))
capMode = 4;
else if (viewfinderResolution == QSize(1920, 1080))
capMode = 5;
g_object_set(G_OBJECT(m_videoSrc), "capture-mode", capMode, NULL);
if (!qFuzzyIsNull(viewfinderFrameRate)) {
int n, d;
gst_util_double_to_fraction(viewfinderFrameRate, &n, &d);
g_object_set(G_OBJECT(m_videoSrc), "fps-n", n, NULL);
g_object_set(G_OBJECT(m_videoSrc), "fps-d", d, NULL);
} }
} }
@@ -370,7 +420,7 @@ void CameraBinSession::setAudioCaptureCaps()
const int sampleRate = settings.sampleRate(); const int sampleRate = settings.sampleRate();
const int channelCount = settings.channelCount(); const int channelCount = settings.channelCount();
if (sampleRate == -1 && channelCount == -1) if (sampleRate <= 0 && channelCount <=0)
return; return;
#if GST_CHECK_VERSION(1,0,0) #if GST_CHECK_VERSION(1,0,0)
@@ -384,9 +434,9 @@ void CameraBinSession::setAudioCaptureCaps()
"depth", G_TYPE_INT, 16, "depth", G_TYPE_INT, 16,
NULL); NULL);
#endif #endif
if (sampleRate != -1) if (sampleRate > 0)
gst_structure_set(structure, "rate", G_TYPE_INT, sampleRate, NULL); gst_structure_set(structure, "rate", G_TYPE_INT, sampleRate, NULL);
if (channelCount != -1) if (channelCount > 0)
gst_structure_set(structure, "channels", G_TYPE_INT, channelCount, NULL); gst_structure_set(structure, "channels", G_TYPE_INT, channelCount, NULL);
GstCaps *caps = gst_caps_new_full(structure, NULL); GstCaps *caps = gst_caps_new_full(structure, NULL);
@@ -402,87 +452,92 @@ GstElement *CameraBinSession::buildCameraSource()
#if CAMERABIN_DEBUG #if CAMERABIN_DEBUG
qDebug() << Q_FUNC_INFO; qDebug() << Q_FUNC_INFO;
#endif #endif
if (!m_videoInputHasChanged) if (!m_inputDeviceHasChanged)
return m_videoSrc; return m_cameraSrc;
m_videoInputHasChanged = false;
GstElement *videoSrc = 0; m_inputDeviceHasChanged = false;
m_usingWrapperCameraBinSrc = false;
if (!videoSrc) GstElement *camSrc = 0;
g_object_get(G_OBJECT(m_camerabin), CAMERA_SOURCE_PROPERTY, &videoSrc, NULL); g_object_get(G_OBJECT(m_camerabin), CAMERA_SOURCE_PROPERTY, &camSrc, NULL);
if (m_sourceFactory) if (!m_cameraSrc && m_sourceFactory)
m_videoSrc = gst_element_factory_create(m_sourceFactory, "camera_source"); m_cameraSrc = gst_element_factory_create(m_sourceFactory, "camera_source");
// If gstreamer has set a default source use it. // If gstreamer has set a default source use it.
if (!m_videoSrc) if (!m_cameraSrc)
m_videoSrc = videoSrc; m_cameraSrc = camSrc;
if (m_videoSrc && !m_inputDevice.isEmpty()) { if (m_cameraSrc && !m_inputDevice.isEmpty()) {
#if CAMERABIN_DEBUG #if CAMERABIN_DEBUG
qDebug() << "set camera device" << m_inputDevice; qDebug() << "set camera device" << m_inputDevice;
#endif #endif
const char *const cameraSrcName = gst_plugin_feature_get_name(
GST_PLUGIN_FEATURE(gst_element_get_factory(m_cameraSrc)));
m_usingWrapperCameraBinSrc = qstrcmp(cameraSrcName, "wrappercamerabinsrc") == 0;
if (g_object_class_find_property(G_OBJECT_GET_CLASS(m_videoSrc), "video-source")) { if (g_object_class_find_property(G_OBJECT_GET_CLASS(m_cameraSrc), "video-source")) {
GstElement *src = 0; if (!m_videoSrc) {
/* QT_GSTREAMER_CAMERABIN_VIDEOSRC can be used to set the video source element.
/* QT_GSTREAMER_CAMERABIN_VIDEOSRC can be used to set the video source element. --- Usage
--- Usage QT_GSTREAMER_CAMERABIN_VIDEOSRC=[drivername=elementname[,drivername2=elementname2 ...],][elementname]
QT_GSTREAMER_CAMERABIN_VIDEOSRC=[drivername=elementname[,drivername2=elementname2 ...],][elementname] --- Examples
--- Examples Always use 'somevideosrc':
QT_GSTREAMER_CAMERABIN_VIDEOSRC="somevideosrc"
Always use 'somevideosrc': Use 'somevideosrc' when the device driver is 'somedriver', otherwise use default:
QT_GSTREAMER_CAMERABIN_VIDEOSRC="somevideosrc" QT_GSTREAMER_CAMERABIN_VIDEOSRC="somedriver=somevideosrc"
Use 'somevideosrc' when the device driver is 'somedriver', otherwise use default: Use 'somevideosrc' when the device driver is 'somedriver', otherwise use 'somevideosrc2'
QT_GSTREAMER_CAMERABIN_VIDEOSRC="somedriver=somevideosrc" QT_GSTREAMER_CAMERABIN_VIDEOSRC="somedriver=somevideosrc,somevideosrc2"
*/
const QByteArray envVideoSource = qgetenv("QT_GSTREAMER_CAMERABIN_VIDEOSRC");
Use 'somevideosrc' when the device driver is 'somedriver', otherwise use 'somevideosrc2' if (!envVideoSource.isEmpty()) {
QT_GSTREAMER_CAMERABIN_VIDEOSRC="somedriver=somevideosrc,somevideosrc2" QList<QByteArray> sources = envVideoSource.split(',');
*/ foreach (const QByteArray &source, sources) {
const QByteArray envVideoSource = qgetenv("QT_GSTREAMER_CAMERABIN_VIDEOSRC"); QList<QByteArray> keyValue = source.split('=');
if (!envVideoSource.isEmpty()) { if (keyValue.count() == 1) {
QList<QByteArray> sources = envVideoSource.split(','); m_videoSrc = gst_element_factory_make(keyValue.at(0), "camera_source");
foreach (const QByteArray &source, sources) { break;
QList<QByteArray> keyValue = source.split('='); } else if (keyValue.at(0) == QGstUtils::cameraDriver(m_inputDevice, m_sourceFactory)) {
if (keyValue.count() == 1) { m_videoSrc = gst_element_factory_make(keyValue.at(1), "camera_source");
src = gst_element_factory_make(keyValue.at(0), "camera_source"); break;
break; }
} else if (keyValue.at(0) == QGstUtils::cameraDriver(m_inputDevice, m_sourceFactory)) {
src = gst_element_factory_make(keyValue.at(1), "camera_source");
break;
} }
} else if (m_videoInputFactory) {
m_videoSrc = m_videoInputFactory->buildElement();
} }
} else if (m_videoInputFactory) {
src = m_videoInputFactory->buildElement(); if (!m_videoSrc)
m_videoSrc = gst_element_factory_make("v4l2src", "camera_source");
g_object_set(G_OBJECT(m_cameraSrc), "video-source", m_videoSrc, NULL);
} }
if (!src) if (m_videoSrc)
src = gst_element_factory_make("v4l2src", "camera_source"); g_object_set(G_OBJECT(m_videoSrc), "device", m_inputDevice.toUtf8().constData(), NULL);
if (src) { } else if (g_object_class_find_property(G_OBJECT_GET_CLASS(m_cameraSrc), "camera-device")) {
g_object_set(G_OBJECT(src), "device", m_inputDevice.toUtf8().constData(), NULL);
g_object_set(G_OBJECT(m_videoSrc), "video-source", src, NULL);
}
} else if (g_object_class_find_property(G_OBJECT_GET_CLASS(m_videoSrc), "camera-device")) {
if (m_inputDevice == QLatin1String("secondary")) { if (m_inputDevice == QLatin1String("secondary")) {
g_object_set(G_OBJECT(m_videoSrc), "camera-device", 1, NULL); g_object_set(G_OBJECT(m_cameraSrc), "camera-device", 1, NULL);
} else { } else {
g_object_set(G_OBJECT(m_videoSrc), "camera-device", 0, NULL); g_object_set(G_OBJECT(m_cameraSrc), "camera-device", 0, NULL);
} }
} }
} }
if (m_videoSrc != videoSrc) if (m_cameraSrc != camSrc)
g_object_set(G_OBJECT(m_camerabin), CAMERA_SOURCE_PROPERTY, m_videoSrc, NULL); g_object_set(G_OBJECT(m_camerabin), CAMERA_SOURCE_PROPERTY, m_cameraSrc, NULL);
if (videoSrc) if (camSrc)
gst_object_unref(GST_OBJECT(videoSrc)); gst_object_unref(GST_OBJECT(camSrc));
return m_videoSrc; return m_cameraSrc;
} }
void CameraBinSession::captureImage(int requestId, const QString &fileName) void CameraBinSession::captureImage(int requestId, const QString &fileName)
@@ -595,7 +650,7 @@ void CameraBinSession::setDevice(const QString &device)
{ {
if (m_inputDevice != device) { if (m_inputDevice != device) {
m_inputDevice = device; m_inputDevice = device;
m_videoInputHasChanged = true; m_inputDeviceHasChanged = true;
} }
} }
@@ -607,7 +662,7 @@ void CameraBinSession::setAudioInput(QGstreamerElementFactory *audioInput)
void CameraBinSession::setVideoInput(QGstreamerElementFactory *videoInput) void CameraBinSession::setVideoInput(QGstreamerElementFactory *videoInput)
{ {
m_videoInputFactory = videoInput; m_videoInputFactory = videoInput;
m_videoInputHasChanged = true; m_inputDeviceHasChanged = true;
} }
bool CameraBinSession::isReady() const bool CameraBinSession::isReady() const
@@ -655,6 +710,28 @@ void CameraBinSession::setViewfinder(QObject *viewfinder)
} }
} }
QList<QCameraViewfinderSettings> CameraBinSession::supportedViewfinderSettings() const
{
return m_supportedViewfinderSettings;
}
QCameraViewfinderSettings CameraBinSession::viewfinderSettings() const
{
return m_status == QCamera::ActiveStatus ? m_actualViewfinderSettings : m_viewfinderSettings;
}
void CameraBinSession::ViewfinderProbe::probeCaps(GstCaps *caps)
{
// Update actual viewfinder settings on viewfinder caps change
const GstStructure *s = gst_caps_get_structure(caps, 0);
const QPair<qreal, qreal> frameRate = QGstUtils::structureFrameRateRange(s);
session->m_actualViewfinderSettings.setResolution(QGstUtils::structureResolution(s));
session->m_actualViewfinderSettings.setMinimumFrameRate(frameRate.first);
session->m_actualViewfinderSettings.setMaximumFrameRate(frameRate.second);
session->m_actualViewfinderSettings.setPixelFormat(QGstUtils::structurePixelFormat(s));
session->m_actualViewfinderSettings.setPixelAspectRatio(QGstUtils::structurePixelAspectRatio(s));
}
void CameraBinSession::handleViewfinderChange() void CameraBinSession::handleViewfinderChange()
{ {
//the viewfinder will be reloaded //the viewfinder will be reloaded
@@ -663,9 +740,20 @@ void CameraBinSession::handleViewfinderChange()
emit viewfinderChanged(); emit viewfinderChanged();
} }
QCamera::State CameraBinSession::state() const void CameraBinSession::setStatus(QCamera::Status status)
{ {
return m_state; if (m_status == status)
return;
m_status = status;
emit statusChanged(m_status);
setStateHelper(m_pendingState);
}
QCamera::Status CameraBinSession::status() const
{
return m_status;
} }
QCamera::State CameraBinSession::pendingState() const QCamera::State CameraBinSession::pendingState() const
@@ -685,66 +773,116 @@ void CameraBinSession::setState(QCamera::State newState)
qDebug() << Q_FUNC_INFO << newState; qDebug() << Q_FUNC_INFO << newState;
#endif #endif
switch (newState) { setStateHelper(newState);
}
void CameraBinSession::setStateHelper(QCamera::State state)
{
switch (state) {
case QCamera::UnloadedState: case QCamera::UnloadedState:
if (m_recordingActive) unload();
stopVideoRecording();
if (m_viewfinderInterface)
m_viewfinderInterface->stopRenderer();
gst_element_set_state(m_camerabin, GST_STATE_NULL);
m_state = newState;
if (m_busy)
emit busyChanged(m_busy = false);
emit stateChanged(m_state);
break; break;
case QCamera::LoadedState: case QCamera::LoadedState:
if (m_recordingActive) if (m_status == QCamera::ActiveStatus)
stopVideoRecording(); stop();
else if (m_status == QCamera::UnloadedStatus)
if (m_videoInputHasChanged) { load();
if (m_viewfinderInterface)
m_viewfinderInterface->stopRenderer();
gst_element_set_state(m_camerabin, GST_STATE_NULL);
buildCameraSource();
}
#ifdef USE_READY_STATE_ON_LOADED
gst_element_set_state(m_camerabin, GST_STATE_READY);
#else
m_state = QCamera::LoadedState;
if (m_viewfinderInterface)
m_viewfinderInterface->stopRenderer();
gst_element_set_state(m_camerabin, GST_STATE_NULL);
emit stateChanged(m_state);
#endif
break; break;
case QCamera::ActiveState: case QCamera::ActiveState:
if (setupCameraBin()) { // If the viewfinder changed while in the loaded state, we need to reload the pipeline
GstState binState = GST_STATE_NULL; if (m_status == QCamera::LoadedStatus && !m_viewfinderHasChanged)
GstState pending = GST_STATE_NULL; start();
gst_element_get_state(m_camerabin, &binState, &pending, 0); else if (m_status == QCamera::UnloadedStatus || m_viewfinderHasChanged)
load();
m_recorderControl->applySettings();
GstEncodingContainerProfile *profile = m_recorderControl->videoProfile();
g_object_set (G_OBJECT(m_camerabin),
"video-profile",
profile,
NULL);
gst_encoding_profile_unref(profile);
setAudioCaptureCaps();
setupCaptureResolution();
gst_element_set_state(m_camerabin, GST_STATE_PLAYING);
}
} }
} }
void CameraBinSession::setError(int err, const QString &errorString)
{
m_pendingState = QCamera::UnloadedState;
emit error(err, errorString);
setStatus(QCamera::UnloadedStatus);
}
void CameraBinSession::load()
{
if (m_status != QCamera::UnloadedStatus && !m_viewfinderHasChanged)
return;
setStatus(QCamera::LoadingStatus);
gst_element_set_state(m_camerabin, GST_STATE_NULL);
if (!setupCameraBin()) {
setError(QCamera::CameraError, QStringLiteral("No camera source available"));
return;
}
gst_element_set_state(m_camerabin, GST_STATE_READY);
}
void CameraBinSession::unload()
{
if (m_status == QCamera::UnloadedStatus || m_status == QCamera::UnloadingStatus)
return;
setStatus(QCamera::UnloadingStatus);
if (m_recordingActive)
stopVideoRecording();
if (m_viewfinderInterface)
m_viewfinderInterface->stopRenderer();
gst_element_set_state(m_camerabin, GST_STATE_NULL);
if (m_busy)
emit busyChanged(m_busy = false);
m_supportedViewfinderSettings.clear();
setStatus(QCamera::UnloadedStatus);
}
void CameraBinSession::start()
{
if (m_status != QCamera::LoadedStatus)
return;
setStatus(QCamera::StartingStatus);
m_recorderControl->applySettings();
GstEncodingContainerProfile *profile = m_recorderControl->videoProfile();
g_object_set (G_OBJECT(m_camerabin),
"video-profile",
profile,
NULL);
gst_encoding_profile_unref(profile);
setAudioCaptureCaps();
setupCaptureResolution();
gst_element_set_state(m_camerabin, GST_STATE_PLAYING);
}
void CameraBinSession::stop()
{
if (m_status != QCamera::ActiveStatus)
return;
setStatus(QCamera::StoppingStatus);
if (m_recordingActive)
stopVideoRecording();
if (m_viewfinderInterface)
m_viewfinderInterface->stopRenderer();
gst_element_set_state(m_camerabin, GST_STATE_READY);
}
bool CameraBinSession::isBusy() const bool CameraBinSession::isBusy() const
{ {
return m_busy; return m_busy;
@@ -889,7 +1027,7 @@ bool CameraBinSession::processBusMessage(const QGstreamerMessage &message)
if (message.isEmpty()) if (message.isEmpty())
message = tr("Camera error"); message = tr("Camera error");
emit error(int(QMediaRecorder::ResourceError), message); setError(int(QMediaRecorder::ResourceError), message);
} }
#ifdef CAMERABIN_DEBUG_DUMP_BIN #ifdef CAMERABIN_DEBUG_DUMP_BIN
@@ -955,17 +1093,20 @@ bool CameraBinSession::processBusMessage(const QGstreamerMessage &message)
switch (newState) { switch (newState) {
case GST_STATE_VOID_PENDING: case GST_STATE_VOID_PENDING:
case GST_STATE_NULL: case GST_STATE_NULL:
if (m_state != QCamera::UnloadedState) setStatus(QCamera::UnloadedStatus);
emit stateChanged(m_state = QCamera::UnloadedState);
break; break;
case GST_STATE_READY: case GST_STATE_READY:
if (oldState == GST_STATE_NULL)
updateSupportedViewfinderSettings();
setMetaData(m_metaData); setMetaData(m_metaData);
if (m_state != QCamera::LoadedState) setStatus(QCamera::LoadedStatus);
emit stateChanged(m_state = QCamera::LoadedState); break;
case GST_STATE_PLAYING:
setStatus(QCamera::ActiveStatus);
break; break;
case GST_STATE_PAUSED: case GST_STATE_PAUSED:
case GST_STATE_PLAYING: default:
emit stateChanged(m_state = QCamera::ActiveState);
break; break;
} }
} }
@@ -973,7 +1114,6 @@ bool CameraBinSession::processBusMessage(const QGstreamerMessage &message)
default: default:
break; break;
} }
//qDebug() << "New session state:" << ENUM_NAME(CameraBinSession,"State",m_state);
} }
} }
@@ -1034,14 +1174,47 @@ static bool rateLessThan(const QPair<int,int> &r1, const QPair<int,int> &r2)
return r1.first*r2.second < r2.first*r1.second; return r1.first*r2.second < r2.first*r1.second;
} }
GstCaps *CameraBinSession::supportedCaps(QCamera::CaptureModes mode) const
{
GstCaps *supportedCaps = 0;
// When using wrappercamerabinsrc, get the supported caps directly from the video source element.
// This makes sure we only get the caps actually supported by the video source element.
if (m_videoSrc) {
GstPad *pad = gst_element_get_static_pad(m_videoSrc, "src");
if (pad) {
supportedCaps = qt_gst_pad_get_caps(pad);
gst_object_unref(GST_OBJECT(pad));
}
}
// Otherwise, let the camerabin handle this.
if (!supportedCaps) {
const gchar *prop;
switch (mode) {
case QCamera::CaptureStillImage:
prop = SUPPORTED_IMAGE_CAPTURE_CAPS_PROPERTY;
break;
case QCamera::CaptureVideo:
prop = SUPPORTED_VIDEO_CAPTURE_CAPS_PROPERTY;
break;
case QCamera::CaptureViewfinder:
default:
prop = SUPPORTED_VIEWFINDER_CAPS_PROPERTY;
break;
}
g_object_get(G_OBJECT(m_camerabin), prop, &supportedCaps, NULL);
}
return supportedCaps;
}
QList< QPair<int,int> > CameraBinSession::supportedFrameRates(const QSize &frameSize, bool *continuous) const QList< QPair<int,int> > CameraBinSession::supportedFrameRates(const QSize &frameSize, bool *continuous) const
{ {
QList< QPair<int,int> > res; QList< QPair<int,int> > res;
GstCaps *supportedCaps = 0; GstCaps *supportedCaps = this->supportedCaps(QCamera::CaptureVideo);
g_object_get(G_OBJECT(m_camerabin),
SUPPORTED_VIDEO_CAPTURE_CAPS_PROPERTY,
&supportedCaps, NULL);
if (!supportedCaps) if (!supportedCaps)
return res; return res;
@@ -1144,11 +1317,7 @@ QList<QSize> CameraBinSession::supportedResolutions(QPair<int,int> rate,
if (continuous) if (continuous)
*continuous = false; *continuous = false;
GstCaps *supportedCaps = 0; GstCaps *supportedCaps = this->supportedCaps(mode);
g_object_get(G_OBJECT(m_camerabin),
(mode == QCamera::CaptureStillImage) ?
SUPPORTED_IMAGE_CAPTURE_CAPS_PROPERTY : SUPPORTED_VIDEO_CAPTURE_CAPS_PROPERTY,
&supportedCaps, NULL);
#if CAMERABIN_DEBUG #if CAMERABIN_DEBUG
qDebug() << "Source caps:" << supportedCaps; qDebug() << "Source caps:" << supportedCaps;
@@ -1278,6 +1447,40 @@ QList<QSize> CameraBinSession::supportedResolutions(QPair<int,int> rate,
return res; return res;
} }
void CameraBinSession::updateSupportedViewfinderSettings()
{
m_supportedViewfinderSettings.clear();
GstCaps *supportedCaps = this->supportedCaps(QCamera::CaptureViewfinder);
// Convert caps to QCameraViewfinderSettings
if (supportedCaps) {
supportedCaps = qt_gst_caps_normalize(supportedCaps);
for (uint i = 0; i < gst_caps_get_size(supportedCaps); i++) {
const GstStructure *structure = gst_caps_get_structure(supportedCaps, i);
QCameraViewfinderSettings s;
s.setResolution(QGstUtils::structureResolution(structure));
s.setPixelFormat(QGstUtils::structurePixelFormat(structure));
s.setPixelAspectRatio(QGstUtils::structurePixelAspectRatio(structure));
QPair<qreal, qreal> frameRateRange = QGstUtils::structureFrameRateRange(structure);
s.setMinimumFrameRate(frameRateRange.first);
s.setMaximumFrameRate(frameRateRange.second);
if (!s.resolution().isEmpty()
&& s.pixelFormat() != QVideoFrame::Format_Invalid
&& !m_supportedViewfinderSettings.contains(s)) {
m_supportedViewfinderSettings.append(s);
}
}
gst_caps_unref(supportedCaps);
}
}
void CameraBinSession::elementAdded(GstBin *, GstElement *element, CameraBinSession *session) void CameraBinSession::elementAdded(GstBin *, GstElement *element, CameraBinSession *session)
{ {
GstElementFactory *factory = gst_element_get_factory(element); GstElementFactory *factory = gst_element_get_factory(element);

View File

@@ -45,6 +45,7 @@
#endif #endif
#include <private/qgstreamerbushelper_p.h> #include <private/qgstreamerbushelper_p.h>
#include <private/qgstreamerbufferprobe_p.h>
#include "qcamera.h" #include "qcamera.h"
QT_BEGIN_NAMESPACE QT_BEGIN_NAMESPACE
@@ -74,7 +75,6 @@ public:
virtual GstElement *buildElement() = 0; virtual GstElement *buildElement() = 0;
}; };
class CameraBinSession : public QObject, class CameraBinSession : public QObject,
public QGstreamerBusMessageFilter, public QGstreamerBusMessageFilter,
public QGstreamerSyncMessageFilter public QGstreamerSyncMessageFilter
@@ -83,11 +83,6 @@ class CameraBinSession : public QObject,
Q_PROPERTY(qint64 duration READ duration NOTIFY durationChanged) Q_PROPERTY(qint64 duration READ duration NOTIFY durationChanged)
Q_INTERFACES(QGstreamerBusMessageFilter QGstreamerSyncMessageFilter) Q_INTERFACES(QGstreamerBusMessageFilter QGstreamerSyncMessageFilter)
public: public:
enum CameraRole {
FrontCamera, // Secondary camera
BackCamera // Main photo camera
};
CameraBinSession(GstElementFactory *sourceFactory, QObject *parent); CameraBinSession(GstElementFactory *sourceFactory, QObject *parent);
~CameraBinSession(); ~CameraBinSession();
@@ -98,8 +93,6 @@ public:
GstElement *cameraSource() { return m_videoSrc; } GstElement *cameraSource() { return m_videoSrc; }
QGstreamerBusHelper *bus() { return m_busHelper; } QGstreamerBusHelper *bus() { return m_busHelper; }
CameraRole cameraRole() const;
QList< QPair<int,int> > supportedFrameRates(const QSize &frameSize, bool *continuous) const; QList< QPair<int,int> > supportedFrameRates(const QSize &frameSize, bool *continuous) const;
QList<QSize> supportedResolutions(QPair<int,int> rate, bool *continuous, QCamera::CaptureModes mode) const; QList<QSize> supportedResolutions(QPair<int,int> rate, bool *continuous, QCamera::CaptureModes mode) const;
@@ -121,17 +114,16 @@ public:
CameraBinImageEncoder *imageEncodeControl() const { return m_imageEncodeControl; } CameraBinImageEncoder *imageEncodeControl() const { return m_imageEncodeControl; }
#ifdef HAVE_GST_PHOTOGRAPHY #ifdef HAVE_GST_PHOTOGRAPHY
CameraBinExposure *cameraExposureControl() const { return m_cameraExposureControl; } CameraBinExposure *cameraExposureControl();
CameraBinFlash *cameraFlashControl() const { return m_cameraFlashControl; } CameraBinFlash *cameraFlashControl();
CameraBinFocus *cameraFocusControl() const { return m_cameraFocusControl; } CameraBinFocus *cameraFocusControl();
CameraBinLocks *cameraLocksControl() const { return m_cameraLocksControl; } CameraBinLocks *cameraLocksControl();
CameraBinZoom *cameraZoomControl() const { return m_cameraZoomControl; }
#endif #endif
CameraBinZoom *cameraZoomControl() const { return m_cameraZoomControl; }
CameraBinImageProcessing *imageProcessingControl() const { return m_imageProcessingControl; } CameraBinImageProcessing *imageProcessingControl() const { return m_imageProcessingControl; }
CameraBinCaptureDestination *captureDestinationControl() const { return m_captureDestinationControl; } CameraBinCaptureDestination *captureDestinationControl() const { return m_captureDestinationControl; }
CameraBinCaptureBufferFormat *captureBufferFormatControl() const { return m_captureBufferFormatControl; } CameraBinCaptureBufferFormat *captureBufferFormatControl() const { return m_captureBufferFormatControl; }
CameraBinViewfinderSettings *viewfinderSettingsControl() const { return m_viewfinderSettingsControl; }
CameraBinRecorder *recorderControl() const { return m_recorderControl; } CameraBinRecorder *recorderControl() const { return m_recorderControl; }
CameraBinContainer *mediaContainerControl() const { return m_mediaContainerControl; } CameraBinContainer *mediaContainerControl() const { return m_mediaContainerControl; }
@@ -146,9 +138,13 @@ public:
QObject *viewfinder() const { return m_viewfinder; } QObject *viewfinder() const { return m_viewfinder; }
void setViewfinder(QObject *viewfinder); void setViewfinder(QObject *viewfinder);
QList<QCameraViewfinderSettings> supportedViewfinderSettings() const;
QCameraViewfinderSettings viewfinderSettings() const;
void setViewfinderSettings(const QCameraViewfinderSettings &settings) { m_viewfinderSettings = settings; }
void captureImage(int requestId, const QString &fileName); void captureImage(int requestId, const QString &fileName);
QCamera::State state() const; QCamera::Status status() const;
QCamera::State pendingState() const; QCamera::State pendingState() const;
bool isBusy() const; bool isBusy() const;
@@ -163,7 +159,7 @@ public:
bool processBusMessage(const QGstreamerMessage &message); bool processBusMessage(const QGstreamerMessage &message);
signals: signals:
void stateChanged(QCamera::State state); void statusChanged(QCamera::Status status);
void pendingStateChanged(QCamera::State state); void pendingStateChanged(QCamera::State state);
void durationChanged(qint64 duration); void durationChanged(qint64 duration);
void error(int error, const QString &errorString); void error(int error, const QString &errorString);
@@ -183,11 +179,22 @@ public slots:
private slots: private slots:
void handleViewfinderChange(); void handleViewfinderChange();
void setupCaptureResolution();
private: private:
void load();
void unload();
void start();
void stop();
void setStatus(QCamera::Status status);
void setStateHelper(QCamera::State state);
void setError(int error, const QString &errorString);
bool setupCameraBin(); bool setupCameraBin();
void setupCaptureResolution();
void setAudioCaptureCaps(); void setAudioCaptureCaps();
GstCaps *supportedCaps(QCamera::CaptureModes mode) const;
void updateSupportedViewfinderSettings();
static void updateBusyStatus(GObject *o, GParamSpec *p, gpointer d); static void updateBusyStatus(GObject *o, GParamSpec *p, gpointer d);
static void elementAdded(GstBin *bin, GstElement *element, CameraBinSession *session); static void elementAdded(GstBin *bin, GstElement *element, CameraBinSession *session);
@@ -197,7 +204,7 @@ private:
QUrl m_actualSink; QUrl m_actualSink;
bool m_recordingActive; bool m_recordingActive;
QString m_captureDevice; QString m_captureDevice;
QCamera::State m_state; QCamera::Status m_status;
QCamera::State m_pendingState; QCamera::State m_pendingState;
QString m_inputDevice; QString m_inputDevice;
bool m_muted; bool m_muted;
@@ -210,6 +217,9 @@ private:
QGstreamerElementFactory *m_videoInputFactory; QGstreamerElementFactory *m_videoInputFactory;
QObject *m_viewfinder; QObject *m_viewfinder;
QGstreamerVideoRendererInterface *m_viewfinderInterface; QGstreamerVideoRendererInterface *m_viewfinderInterface;
QList<QCameraViewfinderSettings> m_supportedViewfinderSettings;
QCameraViewfinderSettings m_viewfinderSettings;
QCameraViewfinderSettings m_actualViewfinderSettings;
CameraBinControl *m_cameraControl; CameraBinControl *m_cameraControl;
CameraBinAudioEncoder *m_audioEncodeControl; CameraBinAudioEncoder *m_audioEncodeControl;
@@ -222,22 +232,35 @@ private:
CameraBinFlash *m_cameraFlashControl; CameraBinFlash *m_cameraFlashControl;
CameraBinFocus *m_cameraFocusControl; CameraBinFocus *m_cameraFocusControl;
CameraBinLocks *m_cameraLocksControl; CameraBinLocks *m_cameraLocksControl;
CameraBinZoom *m_cameraZoomControl;
#endif #endif
CameraBinZoom *m_cameraZoomControl;
CameraBinImageProcessing *m_imageProcessingControl; CameraBinImageProcessing *m_imageProcessingControl;
CameraBinCaptureDestination *m_captureDestinationControl; CameraBinCaptureDestination *m_captureDestinationControl;
CameraBinCaptureBufferFormat *m_captureBufferFormatControl; CameraBinCaptureBufferFormat *m_captureBufferFormatControl;
CameraBinViewfinderSettings *m_viewfinderSettingsControl;
QGstreamerBusHelper *m_busHelper; QGstreamerBusHelper *m_busHelper;
GstBus* m_bus; GstBus* m_bus;
GstElement *m_camerabin; GstElement *m_camerabin;
GstElement *m_cameraSrc;
GstElement *m_videoSrc; GstElement *m_videoSrc;
GstElement *m_viewfinderElement; GstElement *m_viewfinderElement;
GstElementFactory *m_sourceFactory; GstElementFactory *m_sourceFactory;
bool m_viewfinderHasChanged; bool m_viewfinderHasChanged;
bool m_videoInputHasChanged; bool m_inputDeviceHasChanged;
bool m_usingWrapperCameraBinSrc;
class ViewfinderProbe : public QGstreamerBufferProbe {
public:
ViewfinderProbe(CameraBinSession *s)
: QGstreamerBufferProbe(QGstreamerBufferProbe::ProbeCaps)
, session(s)
{}
void probeCaps(GstCaps *caps);
private:
CameraBinSession * const session;
} m_viewfinderProbe;
GstElement *m_audioSrc; GstElement *m_audioSrc;
GstElement *m_audioConvert; GstElement *m_audioConvert;

View File

@@ -1,6 +1,7 @@
/**************************************************************************** /****************************************************************************
** **
** Copyright (C) 2013 Jolla Ltd. ** Copyright (C) 2013 Jolla Ltd.
** Copyright (C) 2015 The Qt Company Ltd.
** Contact: http://www.qt.io/licensing/ ** Contact: http://www.qt.io/licensing/
** **
** This file is part of the Qt Toolkit. ** This file is part of the Qt Toolkit.
@@ -33,14 +34,14 @@
#include "camerabinviewfindersettings.h" #include "camerabinviewfindersettings.h"
#include "camerabinsession.h"
QT_BEGIN_NAMESPACE QT_BEGIN_NAMESPACE
CameraBinViewfinderSettings::CameraBinViewfinderSettings(QObject *parent)
: QCameraViewfinderSettingsControl(parent), CameraBinViewfinderSettings::CameraBinViewfinderSettings(CameraBinSession *session)
m_minimumFrameRate(0), : QCameraViewfinderSettingsControl(session)
m_maximumFrameRate(0) , m_session(session)
{ {
} }
@@ -52,11 +53,11 @@ bool CameraBinViewfinderSettings::isViewfinderParameterSupported(ViewfinderParam
{ {
switch (parameter) { switch (parameter) {
case Resolution: case Resolution:
case PixelAspectRatio:
case MinimumFrameRate: case MinimumFrameRate:
case MaximumFrameRate: case MaximumFrameRate:
return true;
case PixelAspectRatio:
case PixelFormat: case PixelFormat:
return true;
case UserParameter: case UserParameter:
return false; return false;
} }
@@ -67,13 +68,15 @@ QVariant CameraBinViewfinderSettings::viewfinderParameter(ViewfinderParameter pa
{ {
switch (parameter) { switch (parameter) {
case Resolution: case Resolution:
return m_resolution; return m_session->viewfinderSettings().resolution();
case MinimumFrameRate:
return m_minimumFrameRate;
case MaximumFrameRate:
return m_maximumFrameRate;
case PixelAspectRatio: case PixelAspectRatio:
return m_session->viewfinderSettings().pixelAspectRatio();
case MinimumFrameRate:
return m_session->viewfinderSettings().minimumFrameRate();
case MaximumFrameRate:
return m_session->viewfinderSettings().maximumFrameRate();
case PixelFormat: case PixelFormat:
return m_session->viewfinderSettings().pixelFormat();
case UserParameter: case UserParameter:
return QVariant(); return QVariant();
} }
@@ -82,36 +85,28 @@ QVariant CameraBinViewfinderSettings::viewfinderParameter(ViewfinderParameter pa
void CameraBinViewfinderSettings::setViewfinderParameter(ViewfinderParameter parameter, const QVariant &value) void CameraBinViewfinderSettings::setViewfinderParameter(ViewfinderParameter parameter, const QVariant &value)
{ {
QCameraViewfinderSettings settings = m_session->viewfinderSettings();
switch (parameter) { switch (parameter) {
case Resolution: case Resolution:
m_resolution = value.toSize(); settings.setResolution(value.toSize());
break;
case MinimumFrameRate:
m_minimumFrameRate = value.toFloat();
break;
case MaximumFrameRate:
m_maximumFrameRate = value.toFloat();
break; break;
case PixelAspectRatio: case PixelAspectRatio:
settings.setPixelAspectRatio(value.toSize());
break;
case MinimumFrameRate:
settings.setMinimumFrameRate(value.toReal());
break;
case MaximumFrameRate:
settings.setMaximumFrameRate(value.toReal());
break;
case PixelFormat: case PixelFormat:
settings.setPixelFormat(qvariant_cast<QVideoFrame::PixelFormat>(value));
case UserParameter: case UserParameter:
break; break;
} }
}
QSize CameraBinViewfinderSettings::resolution() const m_session->setViewfinderSettings(settings);
{
return m_resolution;
}
qreal CameraBinViewfinderSettings::minimumFrameRate() const
{
return m_minimumFrameRate;
}
qreal CameraBinViewfinderSettings::maximumFrameRate() const
{
return m_maximumFrameRate;
} }
QT_END_NAMESPACE QT_END_NAMESPACE

View File

@@ -1,6 +1,7 @@
/**************************************************************************** /****************************************************************************
** **
** Copyright (C) 2013 Jolla Ltd. ** Copyright (C) 2013 Jolla Ltd.
** Copyright (C) 2015 The Qt Company Ltd.
** Contact: http://www.qt.io/licensing/ ** Contact: http://www.qt.io/licensing/
** **
** This file is part of the Qt Toolkit. ** This file is part of the Qt Toolkit.
@@ -36,29 +37,23 @@
#include <qcameraviewfindersettingscontrol.h> #include <qcameraviewfindersettingscontrol.h>
#include <QtCore/qsize.h>
QT_BEGIN_NAMESPACE QT_BEGIN_NAMESPACE
class CameraBinSession;
class CameraBinViewfinderSettings : public QCameraViewfinderSettingsControl class CameraBinViewfinderSettings : public QCameraViewfinderSettingsControl
{ {
Q_OBJECT Q_OBJECT
public: public:
CameraBinViewfinderSettings(QObject *parent); CameraBinViewfinderSettings(CameraBinSession *session);
~CameraBinViewfinderSettings(); ~CameraBinViewfinderSettings();
bool isViewfinderParameterSupported(ViewfinderParameter parameter) const; bool isViewfinderParameterSupported(ViewfinderParameter parameter) const;
QVariant viewfinderParameter(ViewfinderParameter parameter) const; QVariant viewfinderParameter(ViewfinderParameter parameter) const;
void setViewfinderParameter(ViewfinderParameter parameter, const QVariant &value); void setViewfinderParameter(ViewfinderParameter parameter, const QVariant &value);
QSize resolution() const;
qreal minimumFrameRate() const;
qreal maximumFrameRate() const;
private: private:
QSize m_resolution; CameraBinSession *m_session;
qreal m_minimumFrameRate;
qreal m_maximumFrameRate;
}; };
QT_END_NAMESPACE QT_END_NAMESPACE

View File

@@ -0,0 +1,67 @@
/****************************************************************************
**
** Copyright (C) 2015 The Qt Company Ltd.
** Contact: http://www.qt.io/licensing/
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL21$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and The Qt Company. For licensing terms
** and conditions see http://www.qt.io/terms-conditions. For further
** information use the contact form at http://www.qt.io/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 or version 3 as published by the Free
** Software Foundation and appearing in the file LICENSE.LGPLv21 and
** LICENSE.LGPLv3 included in the packaging of this file. Please review the
** following information to ensure the GNU Lesser General Public License
** requirements will be met: https://www.gnu.org/licenses/lgpl.html and
** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** As a special exception, The Qt Company gives you certain additional
** rights. These rights are described in The Qt Company LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** $QT_END_LICENSE$
**
****************************************************************************/
#include "camerabinviewfindersettings2.h"
#include "camerabinsession.h"
QT_BEGIN_NAMESPACE
CameraBinViewfinderSettings2::CameraBinViewfinderSettings2(CameraBinSession *session)
: QCameraViewfinderSettingsControl2(session)
, m_session(session)
{
}
CameraBinViewfinderSettings2::~CameraBinViewfinderSettings2()
{
}
QList<QCameraViewfinderSettings> CameraBinViewfinderSettings2::supportedViewfinderSettings() const
{
return m_session->supportedViewfinderSettings();
}
QCameraViewfinderSettings CameraBinViewfinderSettings2::viewfinderSettings() const
{
return m_session->viewfinderSettings();
}
void CameraBinViewfinderSettings2::setViewfinderSettings(const QCameraViewfinderSettings &settings)
{
m_session->setViewfinderSettings(settings);
}
QT_END_NAMESPACE

View File

@@ -0,0 +1,61 @@
/****************************************************************************
**
** Copyright (C) 2015 The Qt Company Ltd.
** Contact: http://www.qt.io/licensing/
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL21$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and The Qt Company. For licensing terms
** and conditions see http://www.qt.io/terms-conditions. For further
** information use the contact form at http://www.qt.io/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 or version 3 as published by the Free
** Software Foundation and appearing in the file LICENSE.LGPLv21 and
** LICENSE.LGPLv3 included in the packaging of this file. Please review the
** following information to ensure the GNU Lesser General Public License
** requirements will be met: https://www.gnu.org/licenses/lgpl.html and
** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** As a special exception, The Qt Company gives you certain additional
** rights. These rights are described in The Qt Company LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** $QT_END_LICENSE$
**
****************************************************************************/
#ifndef CAMERABINVIEWFINDERSETTINGS2_H
#define CAMERABINVIEWFINDERSETTINGS2_H
#include <qcameraviewfindersettingscontrol.h>
QT_BEGIN_NAMESPACE
class CameraBinSession;
class CameraBinViewfinderSettings2 : public QCameraViewfinderSettingsControl2
{
Q_OBJECT
public:
CameraBinViewfinderSettings2(CameraBinSession *session);
~CameraBinViewfinderSettings2();
QList<QCameraViewfinderSettings> supportedViewfinderSettings() const;
QCameraViewfinderSettings viewfinderSettings() const;
void setViewfinderSettings(const QCameraViewfinderSettings &settings);
private:
CameraBinSession *m_session;
};
QT_END_NAMESPACE
#endif // CAMERABINVIEWFINDERSETTINGS2_H

View File

@@ -34,8 +34,6 @@
#include "camerabinzoom.h" #include "camerabinzoom.h"
#include "camerabinsession.h" #include "camerabinsession.h"
#include <gst/interfaces/photography.h>
#define ZOOM_PROPERTY "zoom" #define ZOOM_PROPERTY "zoom"
#define MAX_ZOOM_PROPERTY "max-zoom" #define MAX_ZOOM_PROPERTY "max-zoom"

View File

@@ -131,7 +131,6 @@ QVariant BbCameraExposureControl::requestedValue(ExposureParameter parameter) co
QVariant BbCameraExposureControl::actualValue(ExposureParameter parameter) const QVariant BbCameraExposureControl::actualValue(ExposureParameter parameter) const
{ {
#ifndef Q_OS_BLACKBERRY_TABLET
if (parameter != QCameraExposureControl::ExposureMode) // no other parameter supported by BB10 API at the moment if (parameter != QCameraExposureControl::ExposureMode) // no other parameter supported by BB10 API at the moment
return QVariantList(); return QVariantList();
@@ -161,11 +160,10 @@ QVariant BbCameraExposureControl::actualValue(ExposureParameter parameter) const
case CAMERA_SCENE_NIGHT: case CAMERA_SCENE_NIGHT:
return QVariant::fromValue(QCameraExposure::ExposureNight); return QVariant::fromValue(QCameraExposure::ExposureNight);
default: default:
return QVariant(); break;
} }
#else
return QVariant(); return QVariant();
#endif
} }
bool BbCameraExposureControl::setValue(ExposureParameter parameter, const QVariant& value) bool BbCameraExposureControl::setValue(ExposureParameter parameter, const QVariant& value)

View File

@@ -37,14 +37,11 @@
#include <QDebug> #include <QDebug>
#include <QUrl> #include <QUrl>
#ifndef Q_OS_BLACKBERRY_TABLET
#include <audio/audio_manager_device.h> #include <audio/audio_manager_device.h>
#include <audio/audio_manager_volume.h> #include <audio/audio_manager_volume.h>
#endif
QT_BEGIN_NAMESPACE QT_BEGIN_NAMESPACE
#ifndef Q_OS_BLACKBERRY_TABLET
static audio_manager_device_t currentAudioInputDevice() static audio_manager_device_t currentAudioInputDevice()
{ {
audio_manager_device_t device = AUDIO_DEVICE_HEADSET; audio_manager_device_t device = AUDIO_DEVICE_HEADSET;
@@ -57,7 +54,6 @@ static audio_manager_device_t currentAudioInputDevice()
return device; return device;
} }
#endif
BbCameraMediaRecorderControl::BbCameraMediaRecorderControl(BbCameraSession *session, QObject *parent) BbCameraMediaRecorderControl::BbCameraMediaRecorderControl(BbCameraSession *session, QObject *parent)
: QMediaRecorderControl(parent) : QMediaRecorderControl(parent)
@@ -99,13 +95,12 @@ bool BbCameraMediaRecorderControl::isMuted() const
{ {
bool muted = false; bool muted = false;
#ifndef Q_OS_BLACKBERRY_TABLET
const int result = audio_manager_get_input_mute(currentAudioInputDevice(), &muted); const int result = audio_manager_get_input_mute(currentAudioInputDevice(), &muted);
if (result != EOK) { if (result != EOK) {
emit const_cast<BbCameraMediaRecorderControl*>(this)->error(QMediaRecorder::ResourceError, tr("Unable to retrieve mute status")); emit const_cast<BbCameraMediaRecorderControl*>(this)->error(QMediaRecorder::ResourceError, tr("Unable to retrieve mute status"));
return false; return false;
} }
#endif
return muted; return muted;
} }
@@ -113,13 +108,11 @@ qreal BbCameraMediaRecorderControl::volume() const
{ {
double level = 0.0; double level = 0.0;
#ifndef Q_OS_BLACKBERRY_TABLET
const int result = audio_manager_get_input_level(currentAudioInputDevice(), &level); const int result = audio_manager_get_input_level(currentAudioInputDevice(), &level);
if (result != EOK) { if (result != EOK) {
emit const_cast<BbCameraMediaRecorderControl*>(this)->error(QMediaRecorder::ResourceError, tr("Unable to retrieve audio input volume")); emit const_cast<BbCameraMediaRecorderControl*>(this)->error(QMediaRecorder::ResourceError, tr("Unable to retrieve audio input volume"));
return 0.0; return 0.0;
} }
#endif
return (level / 100); return (level / 100);
} }
@@ -136,26 +129,22 @@ void BbCameraMediaRecorderControl::setState(QMediaRecorder::State state)
void BbCameraMediaRecorderControl::setMuted(bool muted) void BbCameraMediaRecorderControl::setMuted(bool muted)
{ {
#ifndef Q_OS_BLACKBERRY_TABLET
const int result = audio_manager_set_input_mute(currentAudioInputDevice(), muted); const int result = audio_manager_set_input_mute(currentAudioInputDevice(), muted);
if (result != EOK) { if (result != EOK) {
emit error(QMediaRecorder::ResourceError, tr("Unable to set mute status")); emit error(QMediaRecorder::ResourceError, tr("Unable to set mute status"));
} else { } else {
emit mutedChanged(muted); emit mutedChanged(muted);
} }
#endif
} }
void BbCameraMediaRecorderControl::setVolume(qreal volume) void BbCameraMediaRecorderControl::setVolume(qreal volume)
{ {
#ifndef Q_OS_BLACKBERRY_TABLET
const int result = audio_manager_set_input_level(currentAudioInputDevice(), (volume * 100)); const int result = audio_manager_set_input_level(currentAudioInputDevice(), (volume * 100));
if (result != EOK) { if (result != EOK) {
emit error(QMediaRecorder::ResourceError, tr("Unable to set audio input volume")); emit error(QMediaRecorder::ResourceError, tr("Unable to set audio input volume"));
} else { } else {
emit volumeChanged(volume); emit volumeChanged(volume);
} }
#endif
} }
QT_END_NAMESPACE QT_END_NAMESPACE

View File

@@ -63,11 +63,9 @@ BbCameraOrientationHandler::BbCameraOrientationHandler(QObject *parent)
BbCameraOrientationHandler::~BbCameraOrientationHandler() BbCameraOrientationHandler::~BbCameraOrientationHandler()
{ {
#ifndef Q_OS_BLACKBERRY_TABLET
const int result = orientation_stop_events(0); const int result = orientation_stop_events(0);
if (result == BPS_FAILURE) if (result == BPS_FAILURE)
qWarning() << "Unable to unregister for orientation change events"; qWarning() << "Unable to unregister for orientation change events";
#endif
QCoreApplication::eventDispatcher()->removeNativeEventFilter(this); QCoreApplication::eventDispatcher()->removeNativeEventFilter(this);
} }
@@ -80,10 +78,9 @@ bool BbCameraOrientationHandler::nativeEventFilter(const QByteArray&, void *mess
const int angle = orientation_event_get_angle(event); const int angle = orientation_event_get_angle(event);
if (angle != m_orientation) { if (angle != m_orientation) {
#ifndef Q_OS_BLACKBERRY_TABLET
if (angle == 180) // The screen does not rotate at 180 degrees if (angle == 180) // The screen does not rotate at 180 degrees
return false; return false;
#endif
m_orientation = angle; m_orientation = angle;
emit orientationChanged(m_orientation); emit orientationChanged(m_orientation);
} }

View File

@@ -83,7 +83,6 @@ static QString errorToString(camera_error_t error)
return QLatin1String("Callback registration failed"); return QLatin1String("Callback registration failed");
case CAMERA_EMICINUSE: case CAMERA_EMICINUSE:
return QLatin1String("Microphone in use already"); return QLatin1String("Microphone in use already");
#ifndef Q_OS_BLACKBERRY_TABLET
case CAMERA_ENODATA: case CAMERA_ENODATA:
return QLatin1String("Data does not exist"); return QLatin1String("Data does not exist");
case CAMERA_EBUSY: case CAMERA_EBUSY:
@@ -98,7 +97,6 @@ static QString errorToString(camera_error_t error)
return QLatin1String("3A have been locked"); return QLatin1String("3A have been locked");
// case CAMERA_EVIEWFINDERFROZEN: // not yet available in 10.2 NDK // case CAMERA_EVIEWFINDERFROZEN: // not yet available in 10.2 NDK
// return QLatin1String("Freeze flag set"); // return QLatin1String("Freeze flag set");
#endif
default: default:
return QLatin1String("Unknown error"); return QLatin1String("Unknown error");
} }
@@ -561,7 +559,6 @@ void BbCameraSession::applyVideoSettings()
const QSize resolution = m_videoEncoderSettings.resolution(); const QSize resolution = m_videoEncoderSettings.resolution();
#ifndef Q_OS_BLACKBERRY_TABLET
QString videoCodec = m_videoEncoderSettings.codec(); QString videoCodec = m_videoEncoderSettings.codec();
if (videoCodec.isEmpty()) if (videoCodec.isEmpty())
videoCodec = QLatin1String("h264"); videoCodec = QLatin1String("h264");
@@ -599,11 +596,6 @@ void BbCameraSession::applyVideoSettings()
CAMERA_IMGPROP_ROTATION, rotationAngle, CAMERA_IMGPROP_ROTATION, rotationAngle,
CAMERA_IMGPROP_VIDEOCODEC, cameraVideoCodec, CAMERA_IMGPROP_VIDEOCODEC, cameraVideoCodec,
CAMERA_IMGPROP_AUDIOCODEC, cameraAudioCodec); CAMERA_IMGPROP_AUDIOCODEC, cameraAudioCodec);
#else
result = camera_set_video_property(m_handle,
CAMERA_IMGPROP_WIDTH, resolution.width(),
CAMERA_IMGPROP_HEIGHT, resolution.height());
#endif
if (result != CAMERA_EOK) { if (result != CAMERA_EOK) {
qWarning() << "Unable to apply video settings:" << result; qWarning() << "Unable to apply video settings:" << result;
@@ -864,13 +856,10 @@ static void viewFinderStatusCallback(camera_handle_t handle, camera_devstatus_t
BbCameraSession *session = static_cast<BbCameraSession*>(context); BbCameraSession *session = static_cast<BbCameraSession*>(context);
QMetaObject::invokeMethod(session, "focusStatusChanged", Qt::QueuedConnection, Q_ARG(int, value)); QMetaObject::invokeMethod(session, "focusStatusChanged", Qt::QueuedConnection, Q_ARG(int, value));
return; return;
} } else if (status == CAMERA_STATUS_POWERUP) {
#ifndef Q_OS_BLACKBERRY_TABLET
else if (status == CAMERA_STATUS_POWERUP) {
BbCameraSession *session = static_cast<BbCameraSession*>(context); BbCameraSession *session = static_cast<BbCameraSession*>(context);
QMetaObject::invokeMethod(session, "handleCameraPowerUp", Qt::QueuedConnection); QMetaObject::invokeMethod(session, "handleCameraPowerUp", Qt::QueuedConnection);
} }
#endif
} }
bool BbCameraSession::startViewFinder() bool BbCameraSession::startViewFinder()
@@ -1027,7 +1016,6 @@ static void videoRecordingStatusCallback(camera_handle_t handle, camera_devstatu
Q_UNUSED(handle) Q_UNUSED(handle)
Q_UNUSED(value) Q_UNUSED(value)
#ifndef Q_OS_BLACKBERRY_TABLET
if (status == CAMERA_STATUS_VIDEO_PAUSE) { if (status == CAMERA_STATUS_VIDEO_PAUSE) {
BbCameraSession *session = static_cast<BbCameraSession*>(context); BbCameraSession *session = static_cast<BbCameraSession*>(context);
QMetaObject::invokeMethod(session, "handleVideoRecordingPaused", Qt::QueuedConnection); QMetaObject::invokeMethod(session, "handleVideoRecordingPaused", Qt::QueuedConnection);
@@ -1035,7 +1023,6 @@ static void videoRecordingStatusCallback(camera_handle_t handle, camera_devstatu
BbCameraSession *session = static_cast<BbCameraSession*>(context); BbCameraSession *session = static_cast<BbCameraSession*>(context);
QMetaObject::invokeMethod(session, "handleVideoRecordingResumed", Qt::QueuedConnection); QMetaObject::invokeMethod(session, "handleVideoRecordingResumed", Qt::QueuedConnection);
} }
#endif
} }
bool BbCameraSession::startVideoRecording() bool BbCameraSession::startVideoRecording()

View File

@@ -148,12 +148,10 @@ QVariant BbCameraViewfinderSettingsControl::viewfinderParameter(ViewfinderParame
return QVideoFrame::Format_Invalid; return QVideoFrame::Format_Invalid;
case CAMERA_FRAMETYPE_CBYCRY: case CAMERA_FRAMETYPE_CBYCRY:
return QVideoFrame::Format_Invalid; return QVideoFrame::Format_Invalid;
#ifndef Q_OS_BLACKBERRY_TABLET
case CAMERA_FRAMETYPE_COMPRESSEDVIDEO: case CAMERA_FRAMETYPE_COMPRESSEDVIDEO:
return QVideoFrame::Format_Invalid; return QVideoFrame::Format_Invalid;
case CAMERA_FRAMETYPE_COMPRESSEDAUDIO: case CAMERA_FRAMETYPE_COMPRESSEDAUDIO:
return QVideoFrame::Format_Invalid; return QVideoFrame::Format_Invalid;
#endif
default: default:
return QVideoFrame::Format_Invalid; return QVideoFrame::Format_Invalid;
} }

View File

@@ -48,8 +48,5 @@ SOURCES += \
$$PWD/bbvideodeviceselectorcontrol.cpp \ $$PWD/bbvideodeviceselectorcontrol.cpp \
$$PWD/bbvideorenderercontrol.cpp $$PWD/bbvideorenderercontrol.cpp
LIBS += -lcamapi LIBS += -lcamapi -laudio_manager
!blackberry-playbook {
LIBS += -laudio_manager
}

View File

@@ -127,30 +127,6 @@ void WindowGrabber::start()
int result = 0; int result = 0;
#ifdef Q_OS_BLACKBERRY_TABLET
// HACK: On the Playbook, screen_read_window() will fail for invisible windows.
// To workaround this, make the window visible again, but set a global
// alpha of less than 255. The global alpha makes the window completely invisible
// (due to a bug?), but screen_read_window() will work again.
errno = 0;
int val = 200; // anything less than 255
result = screen_set_window_property_iv(m_window, SCREEN_PROPERTY_GLOBAL_ALPHA, &val);
if (result != 0) {
qWarning() << "WindowGrabber: unable to set global alpha:" << strerror(errno);
return;
}
errno = 0;
val = 1;
result = screen_set_window_property_iv(m_window, SCREEN_PROPERTY_VISIBLE, &val);
if (result != 0) {
qWarning() << "WindowGrabber: unable to make window visible:" << strerror(errno);
return;
}
#endif
result = screen_create_context(&m_screenContext, SCREEN_APPLICATION_CONTEXT); result = screen_create_context(&m_screenContext, SCREEN_APPLICATION_CONTEXT);
if (result != 0) { if (result != 0) {
qWarning() << "WindowGrabber: cannot create screen context:" << strerror(errno); qWarning() << "WindowGrabber: cannot create screen context:" << strerror(errno);

View File

@@ -454,7 +454,9 @@ QList<QByteArray> QWindowsAudioDeviceInfo::availableDevices(QAudio::Mode mode)
pPropBag->Release(); pPropBag->Release();
pMoniker->Release(); pMoniker->Release();
} }
pEnum->Release();
} }
pDevEnum->Release();
} }
CoUninitialize(); CoUninitialize();
#else // Q_OS_WINCE #else // Q_OS_WINCE

View File

@@ -430,7 +430,7 @@ void QWindowsAudioInput::initMixer()
return; return;
mixerID = (HMIXEROBJ)mixerIntID; mixerID = (HMIXEROBJ)mixerIntID;
// Get the Destination (Recording) Line Infomation // Get the Destination (Recording) Line Information
MIXERLINE mixerLine; MIXERLINE mixerLine;
mixerLine.cbStruct = sizeof(MIXERLINE); mixerLine.cbStruct = sizeof(MIXERLINE);
mixerLine.dwComponentType = MIXERLINE_COMPONENTTYPE_DST_WAVEIN; mixerLine.dwComponentType = MIXERLINE_COMPONENTTYPE_DST_WAVEIN;

View File

@@ -1296,7 +1296,7 @@ void MFPlayerSession::commitRateChange(qreal rate, BOOL isThin)
// (which might be earlier than the last decoded key frame) // (which might be earlier than the last decoded key frame)
resetPosition = true; resetPosition = true;
} else if (cmdNow == CmdPause) { } else if (cmdNow == CmdPause) {
// If paused, dont reset the position until we resume, otherwise // If paused, don't reset the position until we resume, otherwise
// a new frame will be rendered // a new frame will be rendered
m_presentationClock->GetCorrelatedTime(0, &hnsClockTime, &hnsSystemTime); m_presentationClock->GetCorrelatedTime(0, &hnsClockTime, &hnsSystemTime);
m_request.setCommand(CmdSeekResume); m_request.setCommand(CmdSeekResume);

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff