Merge dev into 5.5

Change-Id: I715a549b4cc10220a6e3b48799fdc38865a9345e
This commit is contained in:
Oswald Buddenhagen
2015-02-24 11:02:24 +01:00
55 changed files with 129348 additions and 30327 deletions

View File

@@ -149,15 +149,10 @@ QMap<QByteArray, QVariant> QGstUtils::gstTagListToMap(const GstTagList *tags)
*/
QSize QGstUtils::capsResolution(const GstCaps *caps)
{
QSize size;
if (gst_caps_get_size(caps) == 0)
return QSize();
if (caps) {
const GstStructure *structure = gst_caps_get_structure(caps, 0);
gst_structure_get_int(structure, "width", &size.rwidth());
gst_structure_get_int(structure, "height", &size.rheight());
}
return size;
return structureResolution(gst_caps_get_structure(caps, 0));
}
/*!
@@ -169,14 +164,12 @@ QSize QGstUtils::capsCorrectedResolution(const GstCaps *caps)
QSize size;
if (caps) {
const GstStructure *structure = gst_caps_get_structure(caps, 0);
gst_structure_get_int(structure, "width", &size.rwidth());
gst_structure_get_int(structure, "height", &size.rheight());
size = capsResolution(caps);
gint aspectNum = 0;
gint aspectDenum = 0;
if (!size.isEmpty() && gst_structure_get_fraction(
structure, "pixel-aspect-ratio", &aspectNum, &aspectDenum)) {
gst_caps_get_structure(caps, 0), "pixel-aspect-ratio", &aspectNum, &aspectDenum)) {
if (aspectDenum > 0)
size.setWidth(size.width()*aspectNum/aspectDenum);
}
@@ -1048,20 +1041,23 @@ static int indexOfRgbColor(
QVideoSurfaceFormat QGstUtils::formatForCaps(
GstCaps *caps, GstVideoInfo *info, QAbstractVideoBuffer::HandleType handleType)
{
if (gst_video_info_from_caps(info, caps)) {
int index = indexOfVideoFormat(info->finfo->format);
GstVideoInfo vidInfo;
GstVideoInfo *infoPtr = info ? info : &vidInfo;
if (gst_video_info_from_caps(infoPtr, caps)) {
int index = indexOfVideoFormat(infoPtr->finfo->format);
if (index != -1) {
QVideoSurfaceFormat format(
QSize(info->width, info->height),
QSize(infoPtr->width, infoPtr->height),
qt_videoFormatLookup[index].pixelFormat,
handleType);
if (info->fps_d > 0)
format.setFrameRate(qreal(info->fps_d) / info->fps_n);
if (infoPtr->fps_d > 0)
format.setFrameRate(qreal(infoPtr->fps_n) / infoPtr->fps_d);
if (info->par_d > 0)
format.setPixelAspectRatio(info->par_n, info->par_d);
if (infoPtr->par_d > 0)
format.setPixelAspectRatio(infoPtr->par_n, infoPtr->par_d);
return format;
}
@@ -1076,60 +1072,18 @@ QVideoSurfaceFormat QGstUtils::formatForCaps(
{
const GstStructure *structure = gst_caps_get_structure(caps, 0);
QVideoFrame::PixelFormat pixelFormat = QVideoFrame::Format_Invalid;
int bitsPerPixel = 0;
QSize size;
gst_structure_get_int(structure, "width", &size.rwidth());
gst_structure_get_int(structure, "height", &size.rheight());
if (qstrcmp(gst_structure_get_name(structure), "video/x-raw-yuv") == 0) {
guint32 fourcc = 0;
gst_structure_get_fourcc(structure, "format", &fourcc);
int index = indexOfYuvColor(fourcc);
if (index != -1) {
pixelFormat = qt_yuvColorLookup[index].pixelFormat;
bitsPerPixel = qt_yuvColorLookup[index].bitsPerPixel;
}
} else if (qstrcmp(gst_structure_get_name(structure), "video/x-raw-rgb") == 0) {
int depth = 0;
int endianness = 0;
int red = 0;
int green = 0;
int blue = 0;
int alpha = 0;
gst_structure_get_int(structure, "bpp", &bitsPerPixel);
gst_structure_get_int(structure, "depth", &depth);
gst_structure_get_int(structure, "endianness", &endianness);
gst_structure_get_int(structure, "red_mask", &red);
gst_structure_get_int(structure, "green_mask", &green);
gst_structure_get_int(structure, "blue_mask", &blue);
gst_structure_get_int(structure, "alpha_mask", &alpha);
int index = indexOfRgbColor(bitsPerPixel, depth, endianness, red, green, blue, alpha);
if (index != -1)
pixelFormat = qt_rgbColorLookup[index].pixelFormat;
}
QSize size = structureResolution(structure);
QVideoFrame::PixelFormat pixelFormat = structurePixelFormat(structure, &bitsPerPixel);
if (pixelFormat != QVideoFrame::Format_Invalid) {
QVideoSurfaceFormat format(size, pixelFormat, handleType);
QPair<int, int> rate;
gst_structure_get_fraction(structure, "framerate", &rate.first, &rate.second);
QPair<qreal, qreal> rate = structureFrameRateRange(structure);
if (rate.second)
format.setFrameRate(qreal(rate.first)/rate.second);
format.setFrameRate(rate.second);
gint aspectNum = 0;
gint aspectDenum = 0;
if (gst_structure_get_fraction(
structure, "pixel-aspect-ratio", &aspectNum, &aspectDenum)) {
if (aspectDenum > 0)
format.setPixelAspectRatio(aspectNum, aspectDenum);
}
format.setPixelAspectRatio(structurePixelAspectRatio(structure));
if (bytesPerLine)
*bytesPerLine = ((size.width() * bitsPerPixel / 8) + 3) & ~3;
@@ -1304,6 +1258,118 @@ GstCaps *QGstUtils::videoFilterCaps()
return gst_caps_make_writable(gst_static_caps_get(&staticCaps));
}
QSize QGstUtils::structureResolution(const GstStructure *s)
{
QSize size;
int w, h;
if (s && gst_structure_get_int(s, "width", &w) && gst_structure_get_int(s, "height", &h)) {
size.rwidth() = w;
size.rheight() = h;
}
return size;
}
QVideoFrame::PixelFormat QGstUtils::structurePixelFormat(const GstStructure *structure, int *bpp)
{
QVideoFrame::PixelFormat pixelFormat = QVideoFrame::Format_Invalid;
if (!structure)
return pixelFormat;
#if GST_CHECK_VERSION(1,0,0)
Q_UNUSED(bpp);
if (gst_structure_has_name(structure, "video/x-raw")) {
const gchar *s = gst_structure_get_string(structure, "format");
if (s) {
GstVideoFormat format = gst_video_format_from_string(s);
int index = indexOfVideoFormat(format);
if (index != -1)
pixelFormat = qt_videoFormatLookup[index].pixelFormat;
}
}
#else
if (qstrcmp(gst_structure_get_name(structure), "video/x-raw-yuv") == 0) {
guint32 fourcc = 0;
gst_structure_get_fourcc(structure, "format", &fourcc);
int index = indexOfYuvColor(fourcc);
if (index != -1) {
pixelFormat = qt_yuvColorLookup[index].pixelFormat;
if (bpp)
*bpp = qt_yuvColorLookup[index].bitsPerPixel;
}
} else if (qstrcmp(gst_structure_get_name(structure), "video/x-raw-rgb") == 0) {
int bitsPerPixel = 0;
int depth = 0;
int endianness = 0;
int red = 0;
int green = 0;
int blue = 0;
int alpha = 0;
gst_structure_get_int(structure, "bpp", &bitsPerPixel);
gst_structure_get_int(structure, "depth", &depth);
gst_structure_get_int(structure, "endianness", &endianness);
gst_structure_get_int(structure, "red_mask", &red);
gst_structure_get_int(structure, "green_mask", &green);
gst_structure_get_int(structure, "blue_mask", &blue);
gst_structure_get_int(structure, "alpha_mask", &alpha);
int index = indexOfRgbColor(bitsPerPixel, depth, endianness, red, green, blue, alpha);
if (index != -1) {
pixelFormat = qt_rgbColorLookup[index].pixelFormat;
if (bpp)
*bpp = qt_rgbColorLookup[index].bitsPerPixel;
}
}
#endif
return pixelFormat;
}
QSize QGstUtils::structurePixelAspectRatio(const GstStructure *s)
{
QSize ratio(1, 1);
gint aspectNum = 0;
gint aspectDenum = 0;
if (s && gst_structure_get_fraction(s, "pixel-aspect-ratio", &aspectNum, &aspectDenum)) {
if (aspectDenum > 0) {
ratio.rwidth() = aspectNum;
ratio.rheight() = aspectDenum;
}
}
return ratio;
}
QPair<qreal, qreal> QGstUtils::structureFrameRateRange(const GstStructure *s)
{
QPair<qreal, qreal> rate;
if (!s)
return rate;
int n, d;
if (gst_structure_get_fraction(s, "framerate", &n, &d)) {
rate.second = qreal(n) / d;
rate.first = rate.second;
} else if (gst_structure_get_fraction(s, "max-framerate", &n, &d)) {
rate.second = qreal(n) / d;
if (gst_structure_get_fraction(s, "min-framerate", &n, &d))
rate.first = qreal(n) / d;
else
rate.first = qreal(1);
}
return rate;
}
void qt_gst_object_ref_sink(gpointer object)
{
#if GST_CHECK_VERSION(0,10,24)
@@ -1331,6 +1397,15 @@ GstCaps *qt_gst_pad_get_current_caps(GstPad *pad)
#endif
}
GstCaps *qt_gst_pad_get_caps(GstPad *pad)
{
#if GST_CHECK_VERSION(1,0,0)
return gst_pad_query_caps(pad, NULL);
#else
return gst_pad_get_caps_reffed(pad);
#endif
}
GstStructure *qt_gst_structure_new_empty(const char *name)
{
#if GST_CHECK_VERSION(1,0,0)
@@ -1358,6 +1433,19 @@ gboolean qt_gst_element_query_duration(GstElement *element, GstFormat format, gi
#endif
}
GstCaps *qt_gst_caps_normalize(GstCaps *caps)
{
#if GST_CHECK_VERSION(1,0,0)
// gst_caps_normalize() takes ownership of the argument in 1.0
return gst_caps_normalize(caps);
#else
// in 0.10, it doesn't. Unref the argument to mimic the 1.0 behavior
GstCaps *res = gst_caps_normalize(caps);
gst_caps_unref(caps);
return res;
#endif
}
QDebug operator <<(QDebug debug, GstCaps *caps)
{
if (caps) {

View File

@@ -114,10 +114,9 @@ QVideoSurfaceGstDelegate::QVideoSurfaceGstDelegate(QAbstractVideoSurface *surfac
, m_activeRenderer(0)
, m_surfaceCaps(0)
, m_startCaps(0)
, m_lastBuffer(0)
, m_renderBuffer(0)
, m_notified(false)
, m_stop(false)
, m_render(false)
, m_flush(false)
{
foreach (QObject *instance, rendererLoader()->instances(QGstVideoRendererPluginKey)) {
@@ -137,6 +136,8 @@ QVideoSurfaceGstDelegate::~QVideoSurfaceGstDelegate()
if (m_surfaceCaps)
gst_caps_unref(m_surfaceCaps);
if (m_startCaps)
gst_caps_unref(m_startCaps);
}
GstCaps *QVideoSurfaceGstDelegate::caps()
@@ -157,13 +158,6 @@ bool QVideoSurfaceGstDelegate::start(GstCaps *caps)
m_stop = true;
}
m_render = false;
if (m_lastBuffer) {
gst_buffer_unref(m_lastBuffer);
m_lastBuffer = 0;
}
if (m_startCaps)
gst_caps_unref(m_startCaps);
m_startCaps = caps;
@@ -204,11 +198,6 @@ void QVideoSurfaceGstDelegate::stop()
m_startCaps = 0;
}
if (m_lastBuffer) {
gst_buffer_unref(m_lastBuffer);
m_lastBuffer = 0;
}
waitForAsyncEvent(&locker, &m_setupCondition, 500);
}
@@ -225,68 +214,19 @@ bool QVideoSurfaceGstDelegate::proposeAllocation(GstQuery *query)
}
}
void QVideoSurfaceGstDelegate::flush()
GstFlowReturn QVideoSurfaceGstDelegate::render(GstBuffer *buffer)
{
QMutexLocker locker(&m_mutex);
m_flush = true;
m_render = false;
m_renderBuffer = buffer;
if (m_lastBuffer) {
gst_buffer_unref(m_lastBuffer);
m_lastBuffer = 0;
}
GstFlowReturn flowReturn = waitForAsyncEvent(&locker, &m_renderCondition, 300)
? m_renderReturn
: GST_FLOW_ERROR;
notify();
}
m_renderBuffer = 0;
GstFlowReturn QVideoSurfaceGstDelegate::render(GstBuffer *buffer, bool show)
{
QMutexLocker locker(&m_mutex);
if (m_lastBuffer)
gst_buffer_unref(m_lastBuffer);
m_lastBuffer = buffer;
gst_buffer_ref(m_lastBuffer);
if (show) {
m_render = true;
return waitForAsyncEvent(&locker, &m_renderCondition, 300)
? m_renderReturn
: GST_FLOW_ERROR;
} else {
return GST_FLOW_OK;
}
}
void QVideoSurfaceGstDelegate::handleShowPrerollChange(GObject *object, GParamSpec *, gpointer d)
{
QVideoSurfaceGstDelegate * const delegate = static_cast<QVideoSurfaceGstDelegate *>(d);
gboolean showPreroll = true; // "show-preroll-frame" property is true by default
g_object_get(object, "show-preroll-frame", &showPreroll, NULL);
GstState state = GST_STATE_NULL;
GstState pendingState = GST_STATE_NULL;
gst_element_get_state(GST_ELEMENT(object), &state, &pendingState, 0);
const bool paused
= (pendingState == GST_STATE_VOID_PENDING && state == GST_STATE_PAUSED)
|| pendingState == GST_STATE_PAUSED;
if (paused) {
QMutexLocker locker(&delegate->m_mutex);
if (!showPreroll && delegate->m_lastBuffer) {
delegate->m_render = false;
delegate->m_flush = true;
delegate->notify();
} else if (delegate->m_lastBuffer) {
delegate->m_render = true;
delegate->notify();
}
}
return flowReturn;
}
bool QVideoSurfaceGstDelegate::event(QEvent *event)
@@ -350,11 +290,9 @@ bool QVideoSurfaceGstDelegate::handleEvent(QMutexLocker *locker)
}
gst_caps_unref(startCaps);
} else if (m_render) {
m_render = false;
if (m_activeRenderer && m_surface && m_lastBuffer) {
GstBuffer *buffer = m_lastBuffer;
} else if (m_renderBuffer) {
if (m_activeRenderer && m_surface) {
GstBuffer *buffer = m_renderBuffer;
gst_buffer_ref(buffer);
locker->unlock();
@@ -442,12 +380,6 @@ QGstVideoRendererSink *QGstVideoRendererSink::createSink(QAbstractVideoSurface *
sink->delegate = new QVideoSurfaceGstDelegate(surface);
g_signal_connect(
G_OBJECT(sink),
"notify::show-preroll-frame",
G_CALLBACK(QVideoSurfaceGstDelegate::handleShowPrerollChange),
sink->delegate);
return sink;
}
@@ -487,7 +419,7 @@ void QGstVideoRendererSink::class_init(gpointer g_class, gpointer class_data)
base_sink_class->get_caps = QGstVideoRendererSink::get_caps;
base_sink_class->set_caps = QGstVideoRendererSink::set_caps;
base_sink_class->propose_allocation = QGstVideoRendererSink::propose_allocation;
base_sink_class->preroll = QGstVideoRendererSink::preroll;
base_sink_class->stop = QGstVideoRendererSink::stop;
base_sink_class->render = QGstVideoRendererSink::render;
GstElementClass *element_class = reinterpret_cast<GstElementClass *>(g_class);
@@ -578,20 +510,17 @@ gboolean QGstVideoRendererSink::propose_allocation(GstBaseSink *base, GstQuery *
return sink->delegate->proposeAllocation(query);
}
GstFlowReturn QGstVideoRendererSink::preroll(GstBaseSink *base, GstBuffer *buffer)
gboolean QGstVideoRendererSink::stop(GstBaseSink *base)
{
VO_SINK(base);
gboolean showPreroll = true; // "show-preroll-frame" property is true by default
g_object_get(G_OBJECT(base), "show-preroll-frame", &showPreroll, NULL);
return sink->delegate->render(buffer, showPreroll); // display frame
sink->delegate->stop();
return TRUE;
}
GstFlowReturn QGstVideoRendererSink::render(GstBaseSink *base, GstBuffer *buffer)
{
VO_SINK(base);
return sink->delegate->render(buffer, true);
return sink->delegate->render(buffer);
}
QT_END_NAMESPACE

View File

@@ -925,7 +925,7 @@ void QDeclarativeCamera::setDigitalZoom(qreal value)
\qmlproperty variant QtMultimedia::Camera::metaData.gpsLongitude
\qmlproperty variant QtMultimedia::Camera::metaData.gpsAltitude
These properties hold the the geographic position in decimal degrees of the
These properties hold the geographic position in decimal degrees of the
camera at time of capture.
\sa {QMediaMetaData}

View File

@@ -123,9 +123,8 @@ QDeclarativeCameraCapture::~QDeclarativeCameraCapture()
This property holds a bool value indicating whether the camera
is ready to capture photos or not.
If camera is not ready to capture image immediately,
the capture request is queued with all the related camera settings,
and the request will be executed as soon as possible.
Calling capture() while \e ready is \c false is not permitted and
results in an error.
*/
/*!
@@ -134,11 +133,8 @@ QDeclarativeCameraCapture::~QDeclarativeCameraCapture()
This property holds a bool value indicating whether the camera
is ready to capture photos or not.
It's permissible to call capture() while the camera is active
regardless of the \e ready property value.
If camera is not ready to capture image immediately,
the capture request is queued with all the related camera settings,
and the request will be executed as soon as possible.
Calling capture() while \e ready is \c false is not permitted and
results in an error.
*/
bool QDeclarativeCameraCapture::isReadyForCapture() const
{
@@ -157,11 +153,13 @@ bool QDeclarativeCameraCapture::isReadyForCapture() const
for video.
Camera saves all the capture parameters like exposure settings or
image processing parameters, so changes to camera paramaters after
image processing parameters, so changes to camera parameters after
capture() is called do not affect previous capture requests.
CameraCapture::capture returns the capture requestId parameter, used with
capture() returns the capture requestId parameter, used with
imageExposed(), imageCaptured(), imageMetadataAvailable() and imageSaved() signals.
\sa ready
*/
int QDeclarativeCameraCapture::capture()
{

View File

@@ -488,11 +488,8 @@ void QCameraImageCapture::setCaptureDestination(QCameraImageCapture::CaptureDest
\property QCameraImageCapture::readyForCapture
\brief whether the service is ready to capture a an image immediately.
It's permissible to call capture() while the camera status is QCamera::ActiveStatus
regardless of isReadyForCapture property value.
If camera is not ready to capture image immediately,
the capture request is queued with all the related camera settings
to be executed as soon as possible.
Calling capture() while \e readyForCapture is \c false is not permitted and
results in an error.
*/
bool QCameraImageCapture::isReadyForCapture() const
@@ -523,11 +520,13 @@ bool QCameraImageCapture::isReadyForCapture() const
the default directory, with a full path reported with imageCaptured() and imageSaved() signals.
QCamera saves all the capture parameters like exposure settings or
image processing parameters, so changes to camera paramaters after
image processing parameters, so changes to camera parameters after
capture() is called do not affect previous capture requests.
QCameraImageCapture::capture returns the capture Id parameter, used with
imageExposed(), imageCaptured() and imageSaved() signals.
\sa isReadyForCapture()
*/
int QCameraImageCapture::capture(const QString &file)
{

View File

@@ -111,7 +111,7 @@ QCameraImageCaptureControl::~QCameraImageCaptureControl()
The Camera service should save all the capture parameters
like exposure settings or image processing parameters,
so changes to camera paramaters after capture() is called
so changes to camera parameters after capture() is called
do not affect previous capture requests.
Returns the capture request id number, which is used later

View File

@@ -31,7 +31,7 @@
\brief Platform notes for the BlackBerry Platform
Qt Multimedia supports BlackBerry devices that run the BB10 operating system.
This page covers the availibility of different features on BB10.
This page covers the availability of different features on BB10.
\section1 Implementation

View File

@@ -115,7 +115,7 @@ namespace QGstUtils {
QImage bufferToImage(GstBuffer *buffer, const GstVideoInfo &info);
QVideoSurfaceFormat formatForCaps(
GstCaps *caps,
GstVideoInfo *info,
GstVideoInfo *info = 0,
QAbstractVideoBuffer::HandleType handleType = QAbstractVideoBuffer::NoHandle);
#else
QImage bufferToImage(GstBuffer *buffer);
@@ -133,13 +133,20 @@ namespace QGstUtils {
GstCaps *videoFilterCaps();
QSize structureResolution(const GstStructure *s);
QVideoFrame::PixelFormat structurePixelFormat(const GstStructure *s, int *bpp = 0);
QSize structurePixelAspectRatio(const GstStructure *s);
QPair<qreal, qreal> structureFrameRateRange(const GstStructure *s);
}
void qt_gst_object_ref_sink(gpointer object);
GstCaps *qt_gst_pad_get_current_caps(GstPad *pad);
GstCaps *qt_gst_pad_get_caps(GstPad *pad);
GstStructure *qt_gst_structure_new_empty(const char *name);
gboolean qt_gst_element_query_position(GstElement *element, GstFormat format, gint64 *cur);
gboolean qt_gst_element_query_duration(GstElement *element, GstFormat format, gint64 *cur);
GstCaps *qt_gst_caps_normalize(GstCaps *caps);
QDebug operator <<(QDebug debug, GstCaps *caps);

View File

@@ -98,14 +98,10 @@ public:
void stop();
bool proposeAllocation(GstQuery *query);
void flush();
GstFlowReturn render(GstBuffer *buffer, bool show);
GstFlowReturn render(GstBuffer *buffer);
bool event(QEvent *event);
static void handleShowPrerollChange(GObject *o, GParamSpec *p, gpointer d);
private slots:
bool handleEvent(QMutexLocker *locker);
void updateSupportedFormats();
@@ -126,11 +122,10 @@ private:
GstCaps *m_surfaceCaps;
GstCaps *m_startCaps;
GstBuffer *m_lastBuffer;
GstBuffer *m_renderBuffer;
bool m_notified;
bool m_stop;
bool m_render;
bool m_flush;
};
@@ -156,7 +151,8 @@ private:
static gboolean propose_allocation(GstBaseSink *sink, GstQuery *query);
static GstFlowReturn preroll(GstBaseSink *sink, GstBuffer *buffer);
static gboolean stop(GstBaseSink *sink);
static GstFlowReturn render(GstBaseSink *sink, GstBuffer *buffer);
private:

View File

@@ -335,7 +335,7 @@ void QMediaPlayerPrivate::setPlaylistMedia()
return;
} else if (control != 0) {
// If we've just switched to a new playlist,
// then last emited currentMediaChanged was a playlist.
// then last emitted currentMediaChanged was a playlist.
// Make sure we emit currentMediaChanged if new playlist has
// the same media as the previous one:
// sample.m3u

View File

@@ -51,7 +51,7 @@
player->setVideoOutput(myVideoSurface);
player->setMedia(QUrl::fromLocalFile("observation.mp4"));
player->play(); // Start receving frames as they get presented to myVideoSurface
player->play(); // Start receiving frames as they get presented to myVideoSurface
\endcode
This same approach works with the QCamera object as well, to receive viewfinder or video

View File

@@ -34,7 +34,7 @@
#ifndef QVIDEOPROBE_H
#define QVIDEOPROBE_H
#include <QObject>
#include <QtCore/QObject>
#include <QtMultimedia/qvideoframe.h>
QT_BEGIN_NAMESPACE

View File

@@ -141,8 +141,8 @@ QAbstractVideoSurface::Error QVideoSurfaceGenericPainter::start(const QVideoSurf
bool ok = m_imageFormat != QImage::Format_Invalid && !m_imageSize.isEmpty();
#ifndef QT_NO_OPENGL
if (QOpenGLContext::openGLModuleType() == QOpenGLContext::LibGLES)
#endif
ok &= format.pixelFormat() != QVideoFrame::Format_RGB24;
#endif
if (ok)
return QAbstractVideoSurface::NoError;
} else if (t == QAbstractVideoBuffer::QPixmapHandle) {

View File

@@ -276,33 +276,38 @@ void QAndroidCameraSession::adjustViewfinderSize(const QSize &captureSize, bool
if (!m_camera)
return;
QSize viewfinderResolution = m_camera->previewSize();
QSize currentViewfinderResolution = m_camera->previewSize();
const qreal aspectRatio = qreal(captureSize.width()) / qreal(captureSize.height());
if (viewfinderResolution.isValid() &&
qFuzzyCompare(aspectRatio,
qreal(viewfinderResolution.width()) / viewfinderResolution.height())) {
if (currentViewfinderResolution.isValid() &&
qAbs(aspectRatio - (qreal(currentViewfinderResolution.width()) / currentViewfinderResolution.height())) < 0.01) {
return;
}
QSize adjustedViewfinderResolution;
QList<QSize> previewSizes = m_camera->getSupportedPreviewSizes();
for (int i = previewSizes.count() - 1; i >= 0; --i) {
const QSize &size = previewSizes.at(i);
// search for viewfinder resolution with the same aspect ratio
if (qFuzzyCompare(aspectRatio, (static_cast<qreal>(size.width())/static_cast<qreal>(size.height())))) {
viewfinderResolution = size;
if (qAbs(aspectRatio - (qreal(size.width()) / size.height())) < 0.01) {
adjustedViewfinderResolution = size;
break;
}
}
if (m_camera->previewSize() != viewfinderResolution) {
if (!adjustedViewfinderResolution.isValid()) {
qWarning("Cannot find a viewfinder resolution matching the capture aspect ratio.");
return;
}
if (currentViewfinderResolution != adjustedViewfinderResolution) {
if (m_videoOutput)
m_videoOutput->setVideoSize(viewfinderResolution);
m_videoOutput->setVideoSize(adjustedViewfinderResolution);
// if preview is started, we have to stop it first before changing its size
if (m_previewStarted && restartPreview)
m_camera->stopPreview();
m_camera->setPreviewSize(viewfinderResolution);
m_camera->setPreviewSize(adjustedViewfinderResolution);
// restart preview
if (m_previewStarted && restartPreview)

View File

@@ -337,7 +337,7 @@ void QAndroidMediaPlayerControl::setMedia(const QMediaContent &mediaContent,
if (!mTempFile.isNull())
mediaPath = QStringLiteral("file://") + mTempFile->fileName();
} else {
mediaPath = url.toString();
mediaPath = url.toString(QUrl::FullyEncoded);
}
if (mVideoSize.isValid() && mVideoOutput)

View File

@@ -1,5 +1,5 @@
TARGET = dsengine
win32:!qtHaveModule(opengl) {
win32:!qtHaveModule(opengl)|contains(QT_CONFIG,dynamicgl) {
LIBS_PRIVATE += -lgdi32 -luser32
}
PLUGIN_TYPE=mediaservice

View File

@@ -39,6 +39,7 @@
#include "dsvideodevicecontrol.h"
#ifdef QMEDIA_DIRECTSHOW_CAMERA
#include <QtCore/QElapsedTimer>
#include <dshow.h>
#include "dscameraservice.h"
#endif
@@ -121,8 +122,7 @@ QByteArray DSServicePlugin::defaultDevice(const QByteArray &service) const
{
#ifdef QMEDIA_DIRECTSHOW_CAMERA
if (service == Q_MEDIASERVICE_CAMERA) {
if (m_cameraDevices.isEmpty())
updateDevices();
updateDevices();
return m_defaultCameraDevice;
}
@@ -135,8 +135,7 @@ QList<QByteArray> DSServicePlugin::devices(const QByteArray &service) const
{
#ifdef QMEDIA_DIRECTSHOW_CAMERA
if (service == Q_MEDIASERVICE_CAMERA) {
if (m_cameraDevices.isEmpty())
updateDevices();
updateDevices();
return m_cameraDevices;
}
@@ -149,8 +148,7 @@ QString DSServicePlugin::deviceDescription(const QByteArray &service, const QByt
{
#ifdef QMEDIA_DIRECTSHOW_CAMERA
if (service == Q_MEDIASERVICE_CAMERA) {
if (m_cameraDevices.isEmpty())
updateDevices();
updateDevices();
for (int i=0; i<m_cameraDevices.count(); i++)
if (m_cameraDevices[i] == device)
@@ -164,6 +162,10 @@ QString DSServicePlugin::deviceDescription(const QByteArray &service, const QByt
void DSServicePlugin::updateDevices() const
{
static QElapsedTimer timer;
if (timer.isValid() && timer.elapsed() < 500) // ms
return;
addRefCount();
m_defaultCameraDevice.clear();
@@ -176,6 +178,7 @@ void DSServicePlugin::updateDevices() const
}
releaseRefCount();
timer.restart();
}
#endif

View File

@@ -24,6 +24,7 @@ HEADERS += \
$$PWD/camerabinrecorder.h \
$$PWD/camerabincontainer.h \
$$PWD/camerabinimagecapture.h \
$$PWD/camerabinzoom.h \
$$PWD/camerabinimageprocessing.h \
$$PWD/camerabinmetadata.h \
$$PWD/camerabinvideoencoder.h \
@@ -31,6 +32,7 @@ HEADERS += \
$$PWD/camerabincapturedestination.h \
$$PWD/camerabincapturebufferformat.h \
$$PWD/camerabinviewfindersettings.h \
$$PWD/camerabinviewfindersettings2.h \
$$PWD/camerabininfocontrol.h
SOURCES += \
@@ -42,6 +44,7 @@ SOURCES += \
$$PWD/camerabincontainer.cpp \
$$PWD/camerabinimagecapture.cpp \
$$PWD/camerabinimageencoder.cpp \
$$PWD/camerabinzoom.cpp \
$$PWD/camerabinimageprocessing.cpp \
$$PWD/camerabinmetadata.cpp \
$$PWD/camerabinrecorder.cpp \
@@ -49,6 +52,7 @@ SOURCES += \
$$PWD/camerabinresourcepolicy.cpp \
$$PWD/camerabincapturedestination.cpp \
$$PWD/camerabinviewfindersettings.cpp \
$$PWD/camerabinviewfindersettings2.cpp \
$$PWD/camerabincapturebufferformat.cpp \
$$PWD/camerabininfocontrol.cpp
@@ -69,15 +73,13 @@ config_gstreamer_photography {
$$PWD/camerabinfocus.h \
$$PWD/camerabinexposure.h \
$$PWD/camerabinflash.h \
$$PWD/camerabinlocks.h \
$$PWD/camerabinzoom.h
$$PWD/camerabinlocks.h
SOURCES += \
$$PWD/camerabinexposure.cpp \
$$PWD/camerabinflash.cpp \
$$PWD/camerabinfocus.cpp \
$$PWD/camerabinlocks.cpp \
$$PWD/camerabinzoom.cpp
$$PWD/camerabinlocks.cpp
LIBS += -lgstphotography-$$GST_VERSION
DEFINES += GST_USE_UNSTABLE_API #prevents warnings because of unstable photography API

View File

@@ -51,11 +51,10 @@ CameraBinControl::CameraBinControl(CameraBinSession *session)
:QCameraControl(session),
m_session(session),
m_state(QCamera::UnloadedState),
m_status(QCamera::UnloadedStatus),
m_reloadPending(false)
{
connect(m_session, SIGNAL(stateChanged(QCamera::State)),
this, SLOT(updateStatus()));
connect(m_session, SIGNAL(statusChanged(QCamera::Status)),
this, SIGNAL(statusChanged(QCamera::Status)));
connect(m_session, SIGNAL(viewfinderChanged()),
SLOT(reloadLater()));
@@ -116,7 +115,7 @@ void CameraBinControl::setState(QCamera::State state)
//special case for stopping the camera while it's busy,
//it should be delayed until the camera is idle
if (state == QCamera::LoadedState &&
m_session->state() == QCamera::ActiveState &&
m_session->status() == QCamera::ActiveStatus &&
m_session->isBusy()) {
#ifdef CAMEABIN_DEBUG
qDebug() << Q_FUNC_INFO << "Camera is busy, QCamera::stop() is delayed";
@@ -165,52 +164,9 @@ QCamera::State CameraBinControl::state() const
return m_state;
}
void CameraBinControl::updateStatus()
QCamera::Status CameraBinControl::status() const
{
QCamera::State sessionState = m_session->state();
QCamera::Status oldStatus = m_status;
switch (m_state) {
case QCamera::UnloadedState:
m_status = QCamera::UnloadedStatus;
break;
case QCamera::LoadedState:
switch (sessionState) {
case QCamera::UnloadedState:
m_status = m_resourcePolicy->isResourcesGranted()
? QCamera::LoadingStatus
: QCamera::UnavailableStatus;
break;
case QCamera::LoadedState:
m_status = QCamera::LoadedStatus;
break;
case QCamera::ActiveState:
m_status = QCamera::ActiveStatus;
break;
}
break;
case QCamera::ActiveState:
switch (sessionState) {
case QCamera::UnloadedState:
m_status = m_resourcePolicy->isResourcesGranted()
? QCamera::LoadingStatus
: QCamera::UnavailableStatus;
break;
case QCamera::LoadedState:
m_status = QCamera::StartingStatus;
break;
case QCamera::ActiveState:
m_status = QCamera::ActiveStatus;
break;
}
}
if (m_status != oldStatus) {
#ifdef CAMEABIN_DEBUG
qDebug() << "Camera status changed" << ENUM_NAME(QCamera, "Status", m_status);
#endif
emit statusChanged(m_status);
}
return m_session->status();
}
void CameraBinControl::reloadLater()
@@ -254,7 +210,7 @@ void CameraBinControl::handleResourcesGranted()
void CameraBinControl::handleBusyChanged(bool busy)
{
if (!busy && m_session->state() == QCamera::ActiveState) {
if (!busy && m_session->status() == QCamera::ActiveStatus) {
if (m_state == QCamera::LoadedState) {
//handle delayed stop() because of busy camera
m_resourcePolicy->setResourceSet(CamerabinResourcePolicy::LoadedResources);
@@ -293,15 +249,14 @@ bool CameraBinControl::canChangeProperty(PropertyChangeType changeType, QCamera:
Q_UNUSED(status);
switch (changeType) {
case QCameraControl::CaptureMode:
return status != QCamera::ActiveStatus;
break;
case QCameraControl::ImageEncodingSettings:
case QCameraControl::VideoEncodingSettings:
case QCameraControl::Viewfinder:
return true;
case QCameraControl::CaptureMode:
case QCameraControl::ImageEncodingSettings:
case QCameraControl::VideoEncodingSettings:
case QCameraControl::ViewfinderSettings:
default:
return false;
return status != QCamera::ActiveStatus;
}
}

View File

@@ -56,7 +56,7 @@ public:
QCamera::State state() const;
void setState(QCamera::State state);
QCamera::Status status() const { return m_status; }
QCamera::Status status() const;
QCamera::CaptureModes captureMode() const;
void setCaptureMode(QCamera::CaptureModes mode);
@@ -72,7 +72,6 @@ public slots:
void setViewfinderColorSpaceConversion(bool enabled);
private slots:
void updateStatus();
void delayedReload();
void handleResourcesGranted();
@@ -86,7 +85,6 @@ private:
CameraBinSession *m_session;
QCamera::State m_state;
QCamera::Status m_status;
CamerabinResourcePolicy *m_resourcePolicy;
bool m_reloadPending;

View File

@@ -56,7 +56,7 @@ CameraBinFocus::CameraBinFocus(CameraBinSession *session)
QGstreamerBufferProbe(ProbeBuffers),
#endif
m_session(session),
m_cameraState(QCamera::UnloadedState),
m_cameraStatus(QCamera::UnloadedStatus),
m_focusMode(QCameraFocus::AutoFocus),
m_focusPointMode(QCameraFocus::FocusPointAuto),
m_focusStatus(QCamera::Unlocked),
@@ -68,8 +68,8 @@ CameraBinFocus::CameraBinFocus(CameraBinSession *session)
gst_photography_set_focus_mode(m_session->photography(), GST_PHOTOGRAPHY_FOCUS_MODE_AUTO);
connect(m_session, SIGNAL(stateChanged(QCamera::State)),
this, SLOT(_q_handleCameraStateChange(QCamera::State)));
connect(m_session, SIGNAL(statusChanged(QCamera::Status)),
this, SLOT(_q_handleCameraStatusChange(QCamera::Status)));
}
CameraBinFocus::~CameraBinFocus()
@@ -319,10 +319,10 @@ void CameraBinFocus::_q_setFocusStatus(QCamera::LockStatus status, QCamera::Lock
}
}
void CameraBinFocus::_q_handleCameraStateChange(QCamera::State state)
void CameraBinFocus::_q_handleCameraStatusChange(QCamera::Status status)
{
m_cameraState = state;
if (state == QCamera::ActiveState) {
m_cameraStatus = status;
if (status == QCamera::ActiveStatus) {
if (GstPad *pad = gst_element_get_static_pad(m_session->cameraSource(), "vfsrc")) {
if (GstCaps *caps = qt_gst_pad_get_current_caps(pad)) {
if (GstStructure *structure = gst_caps_get_structure(caps, 0)) {
@@ -415,7 +415,7 @@ void CameraBinFocus::updateRegionOfInterest(const QRectF &rectangle)
void CameraBinFocus::updateRegionOfInterest(const QVector<QRect> &rectangles)
{
if (m_cameraState != QCamera::ActiveState)
if (m_cameraStatus != QCamera::ActiveStatus)
return;
GstElement * const cameraSource = m_session->cameraSource();

View File

@@ -93,7 +93,7 @@ protected:
private Q_SLOTS:
void _q_setFocusStatus(QCamera::LockStatus status, QCamera::LockChangeReason reason);
void _q_handleCameraStateChange(QCamera::State state);
void _q_handleCameraStatusChange(QCamera::Status status);
#if GST_CHECK_VERSION(1,0,0)
void _q_updateFaces();
@@ -109,7 +109,7 @@ private:
#endif
CameraBinSession *m_session;
QCamera::State m_cameraState;
QCamera::Status m_cameraStatus;
QCameraFocus::FocusModes m_focusMode;
QCameraFocus::FocusPointMode m_focusPointMode;
QCamera::LockStatus m_focusStatus;

View File

@@ -61,7 +61,7 @@ CameraBinImageCapture::CameraBinImageCapture(CameraBinSession *session)
, m_requestId(0)
, m_ready(false)
{
connect(m_session, SIGNAL(stateChanged(QCamera::State)), SLOT(updateState()));
connect(m_session, SIGNAL(statusChanged(QCamera::Status)), SLOT(updateState()));
connect(m_session, SIGNAL(imageExposed(int)), this, SIGNAL(imageExposed(int)));
connect(m_session, SIGNAL(imageCaptured(int,QImage)), this, SIGNAL(imageCaptured(int,QImage)));
connect(m_session->cameraControl()->resourcePolicy(), SIGNAL(canCaptureChanged()), this, SLOT(updateState()));
@@ -100,7 +100,7 @@ void CameraBinImageCapture::cancelCapture()
void CameraBinImageCapture::updateState()
{
bool ready = m_session->state() == QCamera::ActiveState
bool ready = m_session->status() == QCamera::ActiveStatus
&& m_session->cameraControl()->resourcePolicy()->canCapture();
if (m_ready != ready) {
#ifdef DEBUG_CAPTURE

View File

@@ -49,7 +49,7 @@ CameraBinRecorder::CameraBinRecorder(CameraBinSession *session)
m_state(QMediaRecorder::StoppedState),
m_status(QMediaRecorder::UnloadedStatus)
{
connect(m_session, SIGNAL(stateChanged(QCamera::State)), SLOT(updateStatus()));
connect(m_session, SIGNAL(statusChanged(QCamera::Status)), SLOT(updateStatus()));
connect(m_session, SIGNAL(pendingStateChanged(QCamera::State)), SLOT(updateStatus()));
connect(m_session, SIGNAL(busyChanged(bool)), SLOT(updateStatus()));
@@ -86,12 +86,12 @@ QMediaRecorder::Status CameraBinRecorder::status() const
void CameraBinRecorder::updateStatus()
{
QCamera::State sessionState = m_session->state();
QCamera::Status sessionStatus = m_session->status();
QMediaRecorder::State oldState = m_state;
QMediaRecorder::Status oldStatus = m_status;
if (sessionState == QCamera::ActiveState &&
if (sessionStatus == QCamera::ActiveStatus &&
m_session->captureMode().testFlag(QCamera::CaptureVideo)) {
if (!m_session->cameraControl()->resourcePolicy()->canCapture()) {
@@ -214,7 +214,7 @@ void CameraBinRecorder::setState(QMediaRecorder::State state)
break;
case QMediaRecorder::RecordingState:
if (m_session->state() != QCamera::ActiveState) {
if (m_session->status() != QCamera::ActiveStatus) {
emit error(QMediaRecorder::ResourceError, tr("Service has not been started"));
} else if (!m_session->cameraControl()->resourcePolicy()->canCapture()) {
emit error(QMediaRecorder::ResourceError, tr("Recording permissions are not available"));

View File

@@ -55,6 +55,7 @@
#include "camerabincapturebufferformat.h"
#include "camerabincapturedestination.h"
#include "camerabinviewfindersettings.h"
#include "camerabinviewfindersettings2.h"
#include <private/qgstreamerbushelper_p.h>
#include <private/qgstutils_p.h>
@@ -84,7 +85,9 @@ QT_BEGIN_NAMESPACE
CameraBinService::CameraBinService(GstElementFactory *sourceFactory, QObject *parent):
QMediaService(parent),
m_cameraInfoControl(0)
m_cameraInfoControl(0),
m_viewfinderSettingsControl(0),
m_viewfinderSettingsControl2(0)
{
m_captureSession = 0;
m_metaDataControl = 0;
@@ -224,8 +227,17 @@ QMediaControl *CameraBinService::requestControl(const char *name)
if (qstrcmp(name, QCameraCaptureBufferFormatControl_iid) == 0)
return m_captureSession->captureBufferFormatControl();
if (qstrcmp(name, QCameraViewfinderSettingsControl_iid) == 0)
return m_captureSession->viewfinderSettingsControl();
if (qstrcmp(name, QCameraViewfinderSettingsControl_iid) == 0) {
if (!m_viewfinderSettingsControl)
m_viewfinderSettingsControl = new CameraBinViewfinderSettings(m_captureSession);
return m_viewfinderSettingsControl;
}
if (qstrcmp(name, QCameraViewfinderSettingsControl2_iid) == 0) {
if (!m_viewfinderSettingsControl2)
m_viewfinderSettingsControl2 = new CameraBinViewfinderSettings2(m_captureSession);
return m_viewfinderSettingsControl2;
}
if (qstrcmp(name, QCameraInfoControl_iid) == 0) {
if (!m_cameraInfoControl)

View File

@@ -53,6 +53,8 @@ class QGstreamerElementFactory;
class CameraBinMetaData;
class CameraBinImageCapture;
class CameraBinMetaData;
class CameraBinViewfinderSettings;
class CameraBinViewfinderSettings2;
class CameraBinService : public QMediaService
{
@@ -85,6 +87,9 @@ private:
#endif
CameraBinImageCapture *m_imageCaptureControl;
QMediaControl *m_cameraInfoControl;
CameraBinViewfinderSettings *m_viewfinderSettingsControl;
CameraBinViewfinderSettings2 *m_viewfinderSettingsControl2;
};
QT_END_NAMESPACE

View File

@@ -43,9 +43,9 @@
#include "camerabinflash.h"
#include "camerabinfocus.h"
#include "camerabinlocks.h"
#include "camerabinzoom.h"
#endif
#include "camerabinzoom.h"
#include "camerabinimageprocessing.h"
#include "camerabinviewfindersettings.h"
@@ -55,6 +55,7 @@
#include <private/qgstreamervideorendererinterface_p.h>
#include <private/qgstutils_p.h>
#include <qmediarecorder.h>
#include <qvideosurfaceformat.h>
#ifdef HAVE_GST_PHOTOGRAPHY
#include <gst/interfaces/photography.h>
@@ -106,17 +107,12 @@
#define PREVIEW_CAPS_4_3 \
"video/x-raw-rgb, width = (int) 640, height = (int) 480"
//using GST_STATE_READY for QCamera::LoadedState
//may not work reliably at least with some webcams.
//#define USE_READY_STATE_ON_LOADED
QT_BEGIN_NAMESPACE
CameraBinSession::CameraBinSession(GstElementFactory *sourceFactory, QObject *parent)
:QObject(parent),
m_recordingActive(false),
m_state(QCamera::UnloadedState),
m_status(QCamera::UnloadedStatus),
m_pendingState(QCamera::UnloadedState),
m_muted(false),
m_busy(false),
@@ -125,11 +121,20 @@ CameraBinSession::CameraBinSession(GstElementFactory *sourceFactory, QObject *pa
m_videoInputFactory(0),
m_viewfinder(0),
m_viewfinderInterface(0),
#ifdef HAVE_GST_PHOTOGRAPHY
m_cameraExposureControl(0),
m_cameraFlashControl(0),
m_cameraFocusControl(0),
m_cameraLocksControl(0),
#endif
m_cameraSrc(0),
m_videoSrc(0),
m_viewfinderElement(0),
m_sourceFactory(sourceFactory),
m_viewfinderHasChanged(true),
m_videoInputHasChanged(true),
m_inputDeviceHasChanged(true),
m_usingWrapperCameraBinSrc(false),
m_viewfinderProbe(this),
m_audioSrc(0),
m_audioConvert(0),
m_capsFilter(0),
@@ -158,18 +163,10 @@ CameraBinSession::CameraBinSession(GstElementFactory *sourceFactory, QObject *pa
m_imageEncodeControl = new CameraBinImageEncoder(this);
m_recorderControl = new CameraBinRecorder(this);
m_mediaContainerControl = new CameraBinContainer(this);
#ifdef HAVE_GST_PHOTOGRAPHY
m_cameraExposureControl = new CameraBinExposure(this);
m_cameraFlashControl = new CameraBinFlash(this);
m_cameraFocusControl = new CameraBinFocus(this);
m_cameraLocksControl = new CameraBinLocks(this);
m_cameraZoomControl = new CameraBinZoom(this);
#endif
m_imageProcessingControl = new CameraBinImageProcessing(this);
m_captureDestinationControl = new CameraBinCaptureDestination(this);
m_captureBufferFormatControl = new CameraBinCaptureBufferFormat(this);
m_viewfinderSettingsControl = new CameraBinViewfinderSettings(this);
QByteArray envFlags = qgetenv("QT_GSTREAMER_CAMERABIN_FLAGS");
if (!envFlags.isEmpty())
@@ -223,24 +220,48 @@ GstPhotography *CameraBinSession::photography()
return 0;
}
#endif
CameraBinSession::CameraRole CameraBinSession::cameraRole() const
CameraBinExposure *CameraBinSession::cameraExposureControl()
{
return BackCamera;
if (!m_cameraExposureControl && photography())
m_cameraExposureControl = new CameraBinExposure(this);
return m_cameraExposureControl;
}
/*
Configure camera during Loaded->Active states stansition.
*/
CameraBinFlash *CameraBinSession::cameraFlashControl()
{
if (!m_cameraFlashControl && photography())
m_cameraFlashControl = new CameraBinFlash(this);
return m_cameraFlashControl;
}
CameraBinFocus *CameraBinSession::cameraFocusControl()
{
if (!m_cameraFocusControl && photography())
m_cameraFocusControl = new CameraBinFocus(this);
return m_cameraFocusControl;
}
CameraBinLocks *CameraBinSession::cameraLocksControl()
{
if (!m_cameraLocksControl && photography())
m_cameraLocksControl = new CameraBinLocks(this);
return m_cameraLocksControl;
}
#endif
bool CameraBinSession::setupCameraBin()
{
if (!buildCameraSource())
return false;
if (m_viewfinderHasChanged) {
if (m_viewfinderElement)
if (m_viewfinderElement) {
GstPad *pad = gst_element_get_static_pad(m_viewfinderElement, "sink");
m_viewfinderProbe.removeProbeFromPad(pad);
gst_object_unref(GST_OBJECT(pad));
gst_object_unref(GST_OBJECT(m_viewfinderElement));
}
m_viewfinderElement = m_viewfinderInterface ? m_viewfinderInterface->videoSink() : 0;
#if CAMERABIN_DEBUG
@@ -248,9 +269,15 @@ bool CameraBinSession::setupCameraBin()
#endif
m_viewfinderHasChanged = false;
if (!m_viewfinderElement) {
qWarning() << "Staring camera without viewfinder available";
if (m_pendingState == QCamera::ActiveState)
qWarning() << "Starting camera without viewfinder available";
m_viewfinderElement = gst_element_factory_make("fakesink", NULL);
}
GstPad *pad = gst_element_get_static_pad(m_viewfinderElement, "sink");
m_viewfinderProbe.addProbeToPad(pad);
gst_object_unref(GST_OBJECT(pad));
g_object_set(G_OBJECT(m_viewfinderElement), "sync", FALSE, NULL);
qt_gst_object_ref_sink(GST_OBJECT(m_viewfinderElement));
gst_element_set_state(m_camerabin, GST_STATE_NULL);
@@ -260,9 +287,15 @@ bool CameraBinSession::setupCameraBin()
return true;
}
static GstCaps *resolutionToCaps(const QSize &resolution, qreal frameRate = 0.0)
static GstCaps *resolutionToCaps(const QSize &resolution,
qreal frameRate = 0.0,
QVideoFrame::PixelFormat pixelFormat = QVideoFrame::Format_Invalid)
{
GstCaps *caps = QGstUtils::videoFilterCaps();
GstCaps *caps = 0;
if (pixelFormat == QVideoFrame::Format_Invalid)
caps = QGstUtils::videoFilterCaps();
else
caps = QGstUtils::capsForFormats(QList<QVideoFrame::PixelFormat>() << pixelFormat);
if (!resolution.isEmpty()) {
gst_caps_set_simple(
@@ -288,75 +321,92 @@ static GstCaps *resolutionToCaps(const QSize &resolution, qreal frameRate = 0.0)
void CameraBinSession::setupCaptureResolution()
{
QSize resolution = m_imageEncodeControl->imageSettings().resolution();
{
GstCaps *caps = resolutionToCaps(resolution);
#if CAMERABIN_DEBUG
qDebug() << Q_FUNC_INFO << "set image resolution" << resolution << caps;
#endif
g_object_set(m_camerabin, IMAGE_CAPTURE_CAPS_PROPERTY, caps, NULL);
if (caps)
gst_caps_unref(caps);
}
QSize viewfinderResolution = m_viewfinderSettings.resolution();
qreal viewfinderFrameRate = m_viewfinderSettings.maximumFrameRate();
QVideoFrame::PixelFormat viewfinderPixelFormat = m_viewfinderSettings.pixelFormat();
const QSize imageResolution = m_imageEncodeControl->imageSettings().resolution();
const QSize videoResolution = m_videoEncodeControl->actualVideoSettings().resolution();
const QSize viewfinderResolution = m_viewfinderSettingsControl->resolution();
resolution = m_videoEncodeControl->actualVideoSettings().resolution();
qreal framerate = m_videoEncodeControl->videoSettings().frameRate();
{
GstCaps *caps = resolutionToCaps(
!resolution.isEmpty() ? resolution : viewfinderResolution, framerate);
#if CAMERABIN_DEBUG
qDebug() << Q_FUNC_INFO << "set video resolution" << resolution << caps;
#endif
g_object_set(m_camerabin, VIDEO_CAPTURE_CAPS_PROPERTY, caps, NULL);
if (caps)
gst_caps_unref(caps);
}
// WrapperCameraBinSrc cannot have different caps on its imgsrc, vidsrc and vfsrc pads.
// If capture resolution is specified, use it also for the viewfinder to avoid caps negotiation
// to fail.
if (m_usingWrapperCameraBinSrc) {
if (m_captureMode == QCamera::CaptureStillImage && !imageResolution.isEmpty())
viewfinderResolution = imageResolution;
else if (m_captureMode == QCamera::CaptureVideo && !videoResolution.isEmpty())
viewfinderResolution = videoResolution;
if (!viewfinderResolution.isEmpty())
resolution = viewfinderResolution;
// Make sure we don't use incompatible frame rate and pixel format with the new resolution
if (viewfinderResolution != m_viewfinderSettings.resolution() &&
(!qFuzzyIsNull(viewfinderFrameRate) || viewfinderPixelFormat != QVideoFrame::Format_Invalid)) {
{
GstCaps *caps = resolutionToCaps(resolution);
#if CAMERABIN_DEBUG
qDebug() << Q_FUNC_INFO << "set viewfinder resolution" << resolution << caps;
#endif
g_object_set(m_camerabin, VIEWFINDER_CAPS_PROPERTY, caps, NULL);
if (caps)
gst_caps_unref(caps);
enum {
Nothing = 0x0,
OnlyFrameRate = 0x1,
OnlyPixelFormat = 0x2,
Both = 0x4
};
quint8 found = Nothing;
GstElement *mfw_v4lsrc = 0;
if (g_object_class_find_property(G_OBJECT_GET_CLASS(m_videoSrc), "video-source")) {
GstElement *videoSrc = 0;
g_object_get(G_OBJECT(m_videoSrc), "video-source", &videoSrc, NULL);
if (videoSrc) {
const char *name = gst_plugin_feature_get_name(GST_PLUGIN_FEATURE(gst_element_get_factory(videoSrc)));
if (!qstrcmp(name, "mfw_v4lsrc"))
mfw_v4lsrc = videoSrc;
for (int i = 0; i < m_supportedViewfinderSettings.count() && !(found & Both); ++i) {
const QCameraViewfinderSettings &s = m_supportedViewfinderSettings.at(i);
if (s.resolution() == viewfinderResolution) {
if ((qFuzzyIsNull(viewfinderFrameRate) || s.maximumFrameRate() == viewfinderFrameRate)
&& (viewfinderPixelFormat == QVideoFrame::Format_Invalid || s.pixelFormat() == viewfinderPixelFormat))
found |= Both;
else if (s.maximumFrameRate() == viewfinderFrameRate)
found |= OnlyFrameRate;
else if (s.pixelFormat() == viewfinderPixelFormat)
found |= OnlyPixelFormat;
}
}
if (found & Both) {
// no-op
} else if (found & OnlyPixelFormat) {
viewfinderFrameRate = qreal(0);
} else if (found & OnlyFrameRate) {
viewfinderPixelFormat = QVideoFrame::Format_Invalid;
} else {
viewfinderPixelFormat = QVideoFrame::Format_Invalid;
viewfinderFrameRate = qreal(0);
}
}
}
if (mfw_v4lsrc) {
int capMode = 0;
if (resolution == QSize(320, 240))
capMode = 1;
else if (resolution == QSize(720, 480))
capMode = 2;
else if (resolution == QSize(720, 576))
capMode = 3;
else if (resolution == QSize(1280, 720))
capMode = 4;
else if (resolution == QSize(1920, 1080))
capMode = 5;
g_object_set(G_OBJECT(mfw_v4lsrc), "capture-mode", capMode, NULL);
GstCaps *caps = resolutionToCaps(imageResolution);
g_object_set(m_camerabin, IMAGE_CAPTURE_CAPS_PROPERTY, caps, NULL);
gst_caps_unref(caps);
const qreal maxFps = m_viewfinderSettingsControl->maximumFrameRate();
if (!qFuzzyIsNull(maxFps)) {
int n, d;
gst_util_double_to_fraction(maxFps, &n, &d);
g_object_set(G_OBJECT(mfw_v4lsrc), "fps-n", n, NULL);
g_object_set(G_OBJECT(mfw_v4lsrc), "fps-d", d, NULL);
}
qreal framerate = m_videoEncodeControl->videoSettings().frameRate();
caps = resolutionToCaps(videoResolution, framerate);
g_object_set(m_camerabin, VIDEO_CAPTURE_CAPS_PROPERTY, caps, NULL);
gst_caps_unref(caps);
caps = resolutionToCaps(viewfinderResolution, viewfinderFrameRate, viewfinderPixelFormat);
g_object_set(m_camerabin, VIEWFINDER_CAPS_PROPERTY, caps, NULL);
gst_caps_unref(caps);
// Special case when using mfw_v4lsrc
if (m_videoSrc && qstrcmp(gst_plugin_feature_get_name(GST_PLUGIN_FEATURE(gst_element_get_factory(m_videoSrc))), "mfw_v4lsrc") == 0) {
int capMode = 0;
if (viewfinderResolution == QSize(320, 240))
capMode = 1;
else if (viewfinderResolution == QSize(720, 480))
capMode = 2;
else if (viewfinderResolution == QSize(720, 576))
capMode = 3;
else if (viewfinderResolution == QSize(1280, 720))
capMode = 4;
else if (viewfinderResolution == QSize(1920, 1080))
capMode = 5;
g_object_set(G_OBJECT(m_videoSrc), "capture-mode", capMode, NULL);
if (!qFuzzyIsNull(viewfinderFrameRate)) {
int n, d;
gst_util_double_to_fraction(viewfinderFrameRate, &n, &d);
g_object_set(G_OBJECT(m_videoSrc), "fps-n", n, NULL);
g_object_set(G_OBJECT(m_videoSrc), "fps-d", d, NULL);
}
}
@@ -370,7 +420,7 @@ void CameraBinSession::setAudioCaptureCaps()
const int sampleRate = settings.sampleRate();
const int channelCount = settings.channelCount();
if (sampleRate == -1 && channelCount == -1)
if (sampleRate <= 0 && channelCount <=0)
return;
#if GST_CHECK_VERSION(1,0,0)
@@ -384,9 +434,9 @@ void CameraBinSession::setAudioCaptureCaps()
"depth", G_TYPE_INT, 16,
NULL);
#endif
if (sampleRate != -1)
if (sampleRate > 0)
gst_structure_set(structure, "rate", G_TYPE_INT, sampleRate, NULL);
if (channelCount != -1)
if (channelCount > 0)
gst_structure_set(structure, "channels", G_TYPE_INT, channelCount, NULL);
GstCaps *caps = gst_caps_new_full(structure, NULL);
@@ -402,87 +452,92 @@ GstElement *CameraBinSession::buildCameraSource()
#if CAMERABIN_DEBUG
qDebug() << Q_FUNC_INFO;
#endif
if (!m_videoInputHasChanged)
return m_videoSrc;
m_videoInputHasChanged = false;
if (!m_inputDeviceHasChanged)
return m_cameraSrc;
GstElement *videoSrc = 0;
m_inputDeviceHasChanged = false;
m_usingWrapperCameraBinSrc = false;
if (!videoSrc)
g_object_get(G_OBJECT(m_camerabin), CAMERA_SOURCE_PROPERTY, &videoSrc, NULL);
GstElement *camSrc = 0;
g_object_get(G_OBJECT(m_camerabin), CAMERA_SOURCE_PROPERTY, &camSrc, NULL);
if (m_sourceFactory)
m_videoSrc = gst_element_factory_create(m_sourceFactory, "camera_source");
if (!m_cameraSrc && m_sourceFactory)
m_cameraSrc = gst_element_factory_create(m_sourceFactory, "camera_source");
// If gstreamer has set a default source use it.
if (!m_videoSrc)
m_videoSrc = videoSrc;
if (!m_cameraSrc)
m_cameraSrc = camSrc;
if (m_videoSrc && !m_inputDevice.isEmpty()) {
if (m_cameraSrc && !m_inputDevice.isEmpty()) {
#if CAMERABIN_DEBUG
qDebug() << "set camera device" << m_inputDevice;
#endif
const char *const cameraSrcName = gst_plugin_feature_get_name(
GST_PLUGIN_FEATURE(gst_element_get_factory(m_cameraSrc)));
m_usingWrapperCameraBinSrc = qstrcmp(cameraSrcName, "wrappercamerabinsrc") == 0;
if (g_object_class_find_property(G_OBJECT_GET_CLASS(m_videoSrc), "video-source")) {
GstElement *src = 0;
if (g_object_class_find_property(G_OBJECT_GET_CLASS(m_cameraSrc), "video-source")) {
if (!m_videoSrc) {
/* QT_GSTREAMER_CAMERABIN_VIDEOSRC can be used to set the video source element.
/* QT_GSTREAMER_CAMERABIN_VIDEOSRC can be used to set the video source element.
--- Usage
--- Usage
QT_GSTREAMER_CAMERABIN_VIDEOSRC=[drivername=elementname[,drivername2=elementname2 ...],][elementname]
QT_GSTREAMER_CAMERABIN_VIDEOSRC=[drivername=elementname[,drivername2=elementname2 ...],][elementname]
--- Examples
--- Examples
Always use 'somevideosrc':
QT_GSTREAMER_CAMERABIN_VIDEOSRC="somevideosrc"
Always use 'somevideosrc':
QT_GSTREAMER_CAMERABIN_VIDEOSRC="somevideosrc"
Use 'somevideosrc' when the device driver is 'somedriver', otherwise use default:
QT_GSTREAMER_CAMERABIN_VIDEOSRC="somedriver=somevideosrc"
Use 'somevideosrc' when the device driver is 'somedriver', otherwise use default:
QT_GSTREAMER_CAMERABIN_VIDEOSRC="somedriver=somevideosrc"
Use 'somevideosrc' when the device driver is 'somedriver', otherwise use 'somevideosrc2'
QT_GSTREAMER_CAMERABIN_VIDEOSRC="somedriver=somevideosrc,somevideosrc2"
*/
const QByteArray envVideoSource = qgetenv("QT_GSTREAMER_CAMERABIN_VIDEOSRC");
Use 'somevideosrc' when the device driver is 'somedriver', otherwise use 'somevideosrc2'
QT_GSTREAMER_CAMERABIN_VIDEOSRC="somedriver=somevideosrc,somevideosrc2"
*/
const QByteArray envVideoSource = qgetenv("QT_GSTREAMER_CAMERABIN_VIDEOSRC");
if (!envVideoSource.isEmpty()) {
QList<QByteArray> sources = envVideoSource.split(',');
foreach (const QByteArray &source, sources) {
QList<QByteArray> keyValue = source.split('=');
if (keyValue.count() == 1) {
src = gst_element_factory_make(keyValue.at(0), "camera_source");
break;
} else if (keyValue.at(0) == QGstUtils::cameraDriver(m_inputDevice, m_sourceFactory)) {
src = gst_element_factory_make(keyValue.at(1), "camera_source");
break;
if (!envVideoSource.isEmpty()) {
QList<QByteArray> sources = envVideoSource.split(',');
foreach (const QByteArray &source, sources) {
QList<QByteArray> keyValue = source.split('=');
if (keyValue.count() == 1) {
m_videoSrc = gst_element_factory_make(keyValue.at(0), "camera_source");
break;
} else if (keyValue.at(0) == QGstUtils::cameraDriver(m_inputDevice, m_sourceFactory)) {
m_videoSrc = gst_element_factory_make(keyValue.at(1), "camera_source");
break;
}
}
} else if (m_videoInputFactory) {
m_videoSrc = m_videoInputFactory->buildElement();
}
} else if (m_videoInputFactory) {
src = m_videoInputFactory->buildElement();
if (!m_videoSrc)
m_videoSrc = gst_element_factory_make("v4l2src", "camera_source");
g_object_set(G_OBJECT(m_cameraSrc), "video-source", m_videoSrc, NULL);
}
if (!src)
src = gst_element_factory_make("v4l2src", "camera_source");
if (m_videoSrc)
g_object_set(G_OBJECT(m_videoSrc), "device", m_inputDevice.toUtf8().constData(), NULL);
if (src) {
g_object_set(G_OBJECT(src), "device", m_inputDevice.toUtf8().constData(), NULL);
g_object_set(G_OBJECT(m_videoSrc), "video-source", src, NULL);
}
} else if (g_object_class_find_property(G_OBJECT_GET_CLASS(m_videoSrc), "camera-device")) {
} else if (g_object_class_find_property(G_OBJECT_GET_CLASS(m_cameraSrc), "camera-device")) {
if (m_inputDevice == QLatin1String("secondary")) {
g_object_set(G_OBJECT(m_videoSrc), "camera-device", 1, NULL);
g_object_set(G_OBJECT(m_cameraSrc), "camera-device", 1, NULL);
} else {
g_object_set(G_OBJECT(m_videoSrc), "camera-device", 0, NULL);
g_object_set(G_OBJECT(m_cameraSrc), "camera-device", 0, NULL);
}
}
}
if (m_videoSrc != videoSrc)
g_object_set(G_OBJECT(m_camerabin), CAMERA_SOURCE_PROPERTY, m_videoSrc, NULL);
if (m_cameraSrc != camSrc)
g_object_set(G_OBJECT(m_camerabin), CAMERA_SOURCE_PROPERTY, m_cameraSrc, NULL);
if (videoSrc)
gst_object_unref(GST_OBJECT(videoSrc));
if (camSrc)
gst_object_unref(GST_OBJECT(camSrc));
return m_videoSrc;
return m_cameraSrc;
}
void CameraBinSession::captureImage(int requestId, const QString &fileName)
@@ -595,7 +650,7 @@ void CameraBinSession::setDevice(const QString &device)
{
if (m_inputDevice != device) {
m_inputDevice = device;
m_videoInputHasChanged = true;
m_inputDeviceHasChanged = true;
}
}
@@ -607,7 +662,7 @@ void CameraBinSession::setAudioInput(QGstreamerElementFactory *audioInput)
void CameraBinSession::setVideoInput(QGstreamerElementFactory *videoInput)
{
m_videoInputFactory = videoInput;
m_videoInputHasChanged = true;
m_inputDeviceHasChanged = true;
}
bool CameraBinSession::isReady() const
@@ -655,6 +710,28 @@ void CameraBinSession::setViewfinder(QObject *viewfinder)
}
}
QList<QCameraViewfinderSettings> CameraBinSession::supportedViewfinderSettings() const
{
return m_supportedViewfinderSettings;
}
QCameraViewfinderSettings CameraBinSession::viewfinderSettings() const
{
return m_status == QCamera::ActiveStatus ? m_actualViewfinderSettings : m_viewfinderSettings;
}
void CameraBinSession::ViewfinderProbe::probeCaps(GstCaps *caps)
{
// Update actual viewfinder settings on viewfinder caps change
const GstStructure *s = gst_caps_get_structure(caps, 0);
const QPair<qreal, qreal> frameRate = QGstUtils::structureFrameRateRange(s);
session->m_actualViewfinderSettings.setResolution(QGstUtils::structureResolution(s));
session->m_actualViewfinderSettings.setMinimumFrameRate(frameRate.first);
session->m_actualViewfinderSettings.setMaximumFrameRate(frameRate.second);
session->m_actualViewfinderSettings.setPixelFormat(QGstUtils::structurePixelFormat(s));
session->m_actualViewfinderSettings.setPixelAspectRatio(QGstUtils::structurePixelAspectRatio(s));
}
void CameraBinSession::handleViewfinderChange()
{
//the viewfinder will be reloaded
@@ -663,9 +740,20 @@ void CameraBinSession::handleViewfinderChange()
emit viewfinderChanged();
}
QCamera::State CameraBinSession::state() const
void CameraBinSession::setStatus(QCamera::Status status)
{
return m_state;
if (m_status == status)
return;
m_status = status;
emit statusChanged(m_status);
setStateHelper(m_pendingState);
}
QCamera::Status CameraBinSession::status() const
{
return m_status;
}
QCamera::State CameraBinSession::pendingState() const
@@ -685,66 +773,116 @@ void CameraBinSession::setState(QCamera::State newState)
qDebug() << Q_FUNC_INFO << newState;
#endif
switch (newState) {
setStateHelper(newState);
}
void CameraBinSession::setStateHelper(QCamera::State state)
{
switch (state) {
case QCamera::UnloadedState:
if (m_recordingActive)
stopVideoRecording();
if (m_viewfinderInterface)
m_viewfinderInterface->stopRenderer();
gst_element_set_state(m_camerabin, GST_STATE_NULL);
m_state = newState;
if (m_busy)
emit busyChanged(m_busy = false);
emit stateChanged(m_state);
unload();
break;
case QCamera::LoadedState:
if (m_recordingActive)
stopVideoRecording();
if (m_videoInputHasChanged) {
if (m_viewfinderInterface)
m_viewfinderInterface->stopRenderer();
gst_element_set_state(m_camerabin, GST_STATE_NULL);
buildCameraSource();
}
#ifdef USE_READY_STATE_ON_LOADED
gst_element_set_state(m_camerabin, GST_STATE_READY);
#else
m_state = QCamera::LoadedState;
if (m_viewfinderInterface)
m_viewfinderInterface->stopRenderer();
gst_element_set_state(m_camerabin, GST_STATE_NULL);
emit stateChanged(m_state);
#endif
if (m_status == QCamera::ActiveStatus)
stop();
else if (m_status == QCamera::UnloadedStatus)
load();
break;
case QCamera::ActiveState:
if (setupCameraBin()) {
GstState binState = GST_STATE_NULL;
GstState pending = GST_STATE_NULL;
gst_element_get_state(m_camerabin, &binState, &pending, 0);
m_recorderControl->applySettings();
GstEncodingContainerProfile *profile = m_recorderControl->videoProfile();
g_object_set (G_OBJECT(m_camerabin),
"video-profile",
profile,
NULL);
gst_encoding_profile_unref(profile);
setAudioCaptureCaps();
setupCaptureResolution();
gst_element_set_state(m_camerabin, GST_STATE_PLAYING);
}
// If the viewfinder changed while in the loaded state, we need to reload the pipeline
if (m_status == QCamera::LoadedStatus && !m_viewfinderHasChanged)
start();
else if (m_status == QCamera::UnloadedStatus || m_viewfinderHasChanged)
load();
}
}
void CameraBinSession::setError(int err, const QString &errorString)
{
m_pendingState = QCamera::UnloadedState;
emit error(err, errorString);
setStatus(QCamera::UnloadedStatus);
}
void CameraBinSession::load()
{
if (m_status != QCamera::UnloadedStatus && !m_viewfinderHasChanged)
return;
setStatus(QCamera::LoadingStatus);
gst_element_set_state(m_camerabin, GST_STATE_NULL);
if (!setupCameraBin()) {
setError(QCamera::CameraError, QStringLiteral("No camera source available"));
return;
}
gst_element_set_state(m_camerabin, GST_STATE_READY);
}
void CameraBinSession::unload()
{
if (m_status == QCamera::UnloadedStatus || m_status == QCamera::UnloadingStatus)
return;
setStatus(QCamera::UnloadingStatus);
if (m_recordingActive)
stopVideoRecording();
if (m_viewfinderInterface)
m_viewfinderInterface->stopRenderer();
gst_element_set_state(m_camerabin, GST_STATE_NULL);
if (m_busy)
emit busyChanged(m_busy = false);
m_supportedViewfinderSettings.clear();
setStatus(QCamera::UnloadedStatus);
}
void CameraBinSession::start()
{
if (m_status != QCamera::LoadedStatus)
return;
setStatus(QCamera::StartingStatus);
m_recorderControl->applySettings();
GstEncodingContainerProfile *profile = m_recorderControl->videoProfile();
g_object_set (G_OBJECT(m_camerabin),
"video-profile",
profile,
NULL);
gst_encoding_profile_unref(profile);
setAudioCaptureCaps();
setupCaptureResolution();
gst_element_set_state(m_camerabin, GST_STATE_PLAYING);
}
void CameraBinSession::stop()
{
if (m_status != QCamera::ActiveStatus)
return;
setStatus(QCamera::StoppingStatus);
if (m_recordingActive)
stopVideoRecording();
if (m_viewfinderInterface)
m_viewfinderInterface->stopRenderer();
gst_element_set_state(m_camerabin, GST_STATE_READY);
}
bool CameraBinSession::isBusy() const
{
return m_busy;
@@ -889,7 +1027,7 @@ bool CameraBinSession::processBusMessage(const QGstreamerMessage &message)
if (message.isEmpty())
message = tr("Camera error");
emit error(int(QMediaRecorder::ResourceError), message);
setError(int(QMediaRecorder::ResourceError), message);
}
#ifdef CAMERABIN_DEBUG_DUMP_BIN
@@ -955,17 +1093,20 @@ bool CameraBinSession::processBusMessage(const QGstreamerMessage &message)
switch (newState) {
case GST_STATE_VOID_PENDING:
case GST_STATE_NULL:
if (m_state != QCamera::UnloadedState)
emit stateChanged(m_state = QCamera::UnloadedState);
setStatus(QCamera::UnloadedStatus);
break;
case GST_STATE_READY:
if (oldState == GST_STATE_NULL)
updateSupportedViewfinderSettings();
setMetaData(m_metaData);
if (m_state != QCamera::LoadedState)
emit stateChanged(m_state = QCamera::LoadedState);
setStatus(QCamera::LoadedStatus);
break;
case GST_STATE_PLAYING:
setStatus(QCamera::ActiveStatus);
break;
case GST_STATE_PAUSED:
case GST_STATE_PLAYING:
emit stateChanged(m_state = QCamera::ActiveState);
default:
break;
}
}
@@ -973,7 +1114,6 @@ bool CameraBinSession::processBusMessage(const QGstreamerMessage &message)
default:
break;
}
//qDebug() << "New session state:" << ENUM_NAME(CameraBinSession,"State",m_state);
}
}
@@ -1034,14 +1174,47 @@ static bool rateLessThan(const QPair<int,int> &r1, const QPair<int,int> &r2)
return r1.first*r2.second < r2.first*r1.second;
}
GstCaps *CameraBinSession::supportedCaps(QCamera::CaptureModes mode) const
{
GstCaps *supportedCaps = 0;
// When using wrappercamerabinsrc, get the supported caps directly from the video source element.
// This makes sure we only get the caps actually supported by the video source element.
if (m_videoSrc) {
GstPad *pad = gst_element_get_static_pad(m_videoSrc, "src");
if (pad) {
supportedCaps = qt_gst_pad_get_caps(pad);
gst_object_unref(GST_OBJECT(pad));
}
}
// Otherwise, let the camerabin handle this.
if (!supportedCaps) {
const gchar *prop;
switch (mode) {
case QCamera::CaptureStillImage:
prop = SUPPORTED_IMAGE_CAPTURE_CAPS_PROPERTY;
break;
case QCamera::CaptureVideo:
prop = SUPPORTED_VIDEO_CAPTURE_CAPS_PROPERTY;
break;
case QCamera::CaptureViewfinder:
default:
prop = SUPPORTED_VIEWFINDER_CAPS_PROPERTY;
break;
}
g_object_get(G_OBJECT(m_camerabin), prop, &supportedCaps, NULL);
}
return supportedCaps;
}
QList< QPair<int,int> > CameraBinSession::supportedFrameRates(const QSize &frameSize, bool *continuous) const
{
QList< QPair<int,int> > res;
GstCaps *supportedCaps = 0;
g_object_get(G_OBJECT(m_camerabin),
SUPPORTED_VIDEO_CAPTURE_CAPS_PROPERTY,
&supportedCaps, NULL);
GstCaps *supportedCaps = this->supportedCaps(QCamera::CaptureVideo);
if (!supportedCaps)
return res;
@@ -1144,11 +1317,7 @@ QList<QSize> CameraBinSession::supportedResolutions(QPair<int,int> rate,
if (continuous)
*continuous = false;
GstCaps *supportedCaps = 0;
g_object_get(G_OBJECT(m_camerabin),
(mode == QCamera::CaptureStillImage) ?
SUPPORTED_IMAGE_CAPTURE_CAPS_PROPERTY : SUPPORTED_VIDEO_CAPTURE_CAPS_PROPERTY,
&supportedCaps, NULL);
GstCaps *supportedCaps = this->supportedCaps(mode);
#if CAMERABIN_DEBUG
qDebug() << "Source caps:" << supportedCaps;
@@ -1278,6 +1447,40 @@ QList<QSize> CameraBinSession::supportedResolutions(QPair<int,int> rate,
return res;
}
void CameraBinSession::updateSupportedViewfinderSettings()
{
m_supportedViewfinderSettings.clear();
GstCaps *supportedCaps = this->supportedCaps(QCamera::CaptureViewfinder);
// Convert caps to QCameraViewfinderSettings
if (supportedCaps) {
supportedCaps = qt_gst_caps_normalize(supportedCaps);
for (uint i = 0; i < gst_caps_get_size(supportedCaps); i++) {
const GstStructure *structure = gst_caps_get_structure(supportedCaps, i);
QCameraViewfinderSettings s;
s.setResolution(QGstUtils::structureResolution(structure));
s.setPixelFormat(QGstUtils::structurePixelFormat(structure));
s.setPixelAspectRatio(QGstUtils::structurePixelAspectRatio(structure));
QPair<qreal, qreal> frameRateRange = QGstUtils::structureFrameRateRange(structure);
s.setMinimumFrameRate(frameRateRange.first);
s.setMaximumFrameRate(frameRateRange.second);
if (!s.resolution().isEmpty()
&& s.pixelFormat() != QVideoFrame::Format_Invalid
&& !m_supportedViewfinderSettings.contains(s)) {
m_supportedViewfinderSettings.append(s);
}
}
gst_caps_unref(supportedCaps);
}
}
void CameraBinSession::elementAdded(GstBin *, GstElement *element, CameraBinSession *session)
{
GstElementFactory *factory = gst_element_get_factory(element);

View File

@@ -45,6 +45,7 @@
#endif
#include <private/qgstreamerbushelper_p.h>
#include <private/qgstreamerbufferprobe_p.h>
#include "qcamera.h"
QT_BEGIN_NAMESPACE
@@ -74,7 +75,6 @@ public:
virtual GstElement *buildElement() = 0;
};
class CameraBinSession : public QObject,
public QGstreamerBusMessageFilter,
public QGstreamerSyncMessageFilter
@@ -83,11 +83,6 @@ class CameraBinSession : public QObject,
Q_PROPERTY(qint64 duration READ duration NOTIFY durationChanged)
Q_INTERFACES(QGstreamerBusMessageFilter QGstreamerSyncMessageFilter)
public:
enum CameraRole {
FrontCamera, // Secondary camera
BackCamera // Main photo camera
};
CameraBinSession(GstElementFactory *sourceFactory, QObject *parent);
~CameraBinSession();
@@ -98,8 +93,6 @@ public:
GstElement *cameraSource() { return m_videoSrc; }
QGstreamerBusHelper *bus() { return m_busHelper; }
CameraRole cameraRole() const;
QList< QPair<int,int> > supportedFrameRates(const QSize &frameSize, bool *continuous) const;
QList<QSize> supportedResolutions(QPair<int,int> rate, bool *continuous, QCamera::CaptureModes mode) const;
@@ -121,17 +114,16 @@ public:
CameraBinImageEncoder *imageEncodeControl() const { return m_imageEncodeControl; }
#ifdef HAVE_GST_PHOTOGRAPHY
CameraBinExposure *cameraExposureControl() const { return m_cameraExposureControl; }
CameraBinFlash *cameraFlashControl() const { return m_cameraFlashControl; }
CameraBinFocus *cameraFocusControl() const { return m_cameraFocusControl; }
CameraBinLocks *cameraLocksControl() const { return m_cameraLocksControl; }
CameraBinZoom *cameraZoomControl() const { return m_cameraZoomControl; }
CameraBinExposure *cameraExposureControl();
CameraBinFlash *cameraFlashControl();
CameraBinFocus *cameraFocusControl();
CameraBinLocks *cameraLocksControl();
#endif
CameraBinZoom *cameraZoomControl() const { return m_cameraZoomControl; }
CameraBinImageProcessing *imageProcessingControl() const { return m_imageProcessingControl; }
CameraBinCaptureDestination *captureDestinationControl() const { return m_captureDestinationControl; }
CameraBinCaptureBufferFormat *captureBufferFormatControl() const { return m_captureBufferFormatControl; }
CameraBinViewfinderSettings *viewfinderSettingsControl() const { return m_viewfinderSettingsControl; }
CameraBinRecorder *recorderControl() const { return m_recorderControl; }
CameraBinContainer *mediaContainerControl() const { return m_mediaContainerControl; }
@@ -146,9 +138,13 @@ public:
QObject *viewfinder() const { return m_viewfinder; }
void setViewfinder(QObject *viewfinder);
QList<QCameraViewfinderSettings> supportedViewfinderSettings() const;
QCameraViewfinderSettings viewfinderSettings() const;
void setViewfinderSettings(const QCameraViewfinderSettings &settings) { m_viewfinderSettings = settings; }
void captureImage(int requestId, const QString &fileName);
QCamera::State state() const;
QCamera::Status status() const;
QCamera::State pendingState() const;
bool isBusy() const;
@@ -163,7 +159,7 @@ public:
bool processBusMessage(const QGstreamerMessage &message);
signals:
void stateChanged(QCamera::State state);
void statusChanged(QCamera::Status status);
void pendingStateChanged(QCamera::State state);
void durationChanged(qint64 duration);
void error(int error, const QString &errorString);
@@ -183,11 +179,22 @@ public slots:
private slots:
void handleViewfinderChange();
void setupCaptureResolution();
private:
void load();
void unload();
void start();
void stop();
void setStatus(QCamera::Status status);
void setStateHelper(QCamera::State state);
void setError(int error, const QString &errorString);
bool setupCameraBin();
void setupCaptureResolution();
void setAudioCaptureCaps();
GstCaps *supportedCaps(QCamera::CaptureModes mode) const;
void updateSupportedViewfinderSettings();
static void updateBusyStatus(GObject *o, GParamSpec *p, gpointer d);
static void elementAdded(GstBin *bin, GstElement *element, CameraBinSession *session);
@@ -197,7 +204,7 @@ private:
QUrl m_actualSink;
bool m_recordingActive;
QString m_captureDevice;
QCamera::State m_state;
QCamera::Status m_status;
QCamera::State m_pendingState;
QString m_inputDevice;
bool m_muted;
@@ -210,6 +217,9 @@ private:
QGstreamerElementFactory *m_videoInputFactory;
QObject *m_viewfinder;
QGstreamerVideoRendererInterface *m_viewfinderInterface;
QList<QCameraViewfinderSettings> m_supportedViewfinderSettings;
QCameraViewfinderSettings m_viewfinderSettings;
QCameraViewfinderSettings m_actualViewfinderSettings;
CameraBinControl *m_cameraControl;
CameraBinAudioEncoder *m_audioEncodeControl;
@@ -222,22 +232,35 @@ private:
CameraBinFlash *m_cameraFlashControl;
CameraBinFocus *m_cameraFocusControl;
CameraBinLocks *m_cameraLocksControl;
CameraBinZoom *m_cameraZoomControl;
#endif
CameraBinZoom *m_cameraZoomControl;
CameraBinImageProcessing *m_imageProcessingControl;
CameraBinCaptureDestination *m_captureDestinationControl;
CameraBinCaptureBufferFormat *m_captureBufferFormatControl;
CameraBinViewfinderSettings *m_viewfinderSettingsControl;
QGstreamerBusHelper *m_busHelper;
GstBus* m_bus;
GstElement *m_camerabin;
GstElement *m_cameraSrc;
GstElement *m_videoSrc;
GstElement *m_viewfinderElement;
GstElementFactory *m_sourceFactory;
bool m_viewfinderHasChanged;
bool m_videoInputHasChanged;
bool m_inputDeviceHasChanged;
bool m_usingWrapperCameraBinSrc;
class ViewfinderProbe : public QGstreamerBufferProbe {
public:
ViewfinderProbe(CameraBinSession *s)
: QGstreamerBufferProbe(QGstreamerBufferProbe::ProbeCaps)
, session(s)
{}
void probeCaps(GstCaps *caps);
private:
CameraBinSession * const session;
} m_viewfinderProbe;
GstElement *m_audioSrc;
GstElement *m_audioConvert;

View File

@@ -1,6 +1,7 @@
/****************************************************************************
**
** Copyright (C) 2013 Jolla Ltd.
** Copyright (C) 2015 The Qt Company Ltd.
** Contact: http://www.qt.io/licensing/
**
** This file is part of the Qt Toolkit.
@@ -33,14 +34,14 @@
#include "camerabinviewfindersettings.h"
#include "camerabinsession.h"
QT_BEGIN_NAMESPACE
CameraBinViewfinderSettings::CameraBinViewfinderSettings(QObject *parent)
: QCameraViewfinderSettingsControl(parent),
m_minimumFrameRate(0),
m_maximumFrameRate(0)
CameraBinViewfinderSettings::CameraBinViewfinderSettings(CameraBinSession *session)
: QCameraViewfinderSettingsControl(session)
, m_session(session)
{
}
@@ -52,11 +53,11 @@ bool CameraBinViewfinderSettings::isViewfinderParameterSupported(ViewfinderParam
{
switch (parameter) {
case Resolution:
case PixelAspectRatio:
case MinimumFrameRate:
case MaximumFrameRate:
return true;
case PixelAspectRatio:
case PixelFormat:
return true;
case UserParameter:
return false;
}
@@ -67,13 +68,15 @@ QVariant CameraBinViewfinderSettings::viewfinderParameter(ViewfinderParameter pa
{
switch (parameter) {
case Resolution:
return m_resolution;
case MinimumFrameRate:
return m_minimumFrameRate;
case MaximumFrameRate:
return m_maximumFrameRate;
return m_session->viewfinderSettings().resolution();
case PixelAspectRatio:
return m_session->viewfinderSettings().pixelAspectRatio();
case MinimumFrameRate:
return m_session->viewfinderSettings().minimumFrameRate();
case MaximumFrameRate:
return m_session->viewfinderSettings().maximumFrameRate();
case PixelFormat:
return m_session->viewfinderSettings().pixelFormat();
case UserParameter:
return QVariant();
}
@@ -82,36 +85,28 @@ QVariant CameraBinViewfinderSettings::viewfinderParameter(ViewfinderParameter pa
void CameraBinViewfinderSettings::setViewfinderParameter(ViewfinderParameter parameter, const QVariant &value)
{
QCameraViewfinderSettings settings = m_session->viewfinderSettings();
switch (parameter) {
case Resolution:
m_resolution = value.toSize();
break;
case MinimumFrameRate:
m_minimumFrameRate = value.toFloat();
break;
case MaximumFrameRate:
m_maximumFrameRate = value.toFloat();
settings.setResolution(value.toSize());
break;
case PixelAspectRatio:
settings.setPixelAspectRatio(value.toSize());
break;
case MinimumFrameRate:
settings.setMinimumFrameRate(value.toReal());
break;
case MaximumFrameRate:
settings.setMaximumFrameRate(value.toReal());
break;
case PixelFormat:
settings.setPixelFormat(qvariant_cast<QVideoFrame::PixelFormat>(value));
case UserParameter:
break;
}
}
QSize CameraBinViewfinderSettings::resolution() const
{
return m_resolution;
}
qreal CameraBinViewfinderSettings::minimumFrameRate() const
{
return m_minimumFrameRate;
}
qreal CameraBinViewfinderSettings::maximumFrameRate() const
{
return m_maximumFrameRate;
m_session->setViewfinderSettings(settings);
}
QT_END_NAMESPACE

View File

@@ -1,6 +1,7 @@
/****************************************************************************
**
** Copyright (C) 2013 Jolla Ltd.
** Copyright (C) 2015 The Qt Company Ltd.
** Contact: http://www.qt.io/licensing/
**
** This file is part of the Qt Toolkit.
@@ -36,29 +37,23 @@
#include <qcameraviewfindersettingscontrol.h>
#include <QtCore/qsize.h>
QT_BEGIN_NAMESPACE
class CameraBinSession;
class CameraBinViewfinderSettings : public QCameraViewfinderSettingsControl
{
Q_OBJECT
public:
CameraBinViewfinderSettings(QObject *parent);
CameraBinViewfinderSettings(CameraBinSession *session);
~CameraBinViewfinderSettings();
bool isViewfinderParameterSupported(ViewfinderParameter parameter) const;
QVariant viewfinderParameter(ViewfinderParameter parameter) const;
void setViewfinderParameter(ViewfinderParameter parameter, const QVariant &value);
QSize resolution() const;
qreal minimumFrameRate() const;
qreal maximumFrameRate() const;
private:
QSize m_resolution;
qreal m_minimumFrameRate;
qreal m_maximumFrameRate;
CameraBinSession *m_session;
};
QT_END_NAMESPACE

View File

@@ -0,0 +1,67 @@
/****************************************************************************
**
** Copyright (C) 2015 The Qt Company Ltd.
** Contact: http://www.qt.io/licensing/
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL21$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and The Qt Company. For licensing terms
** and conditions see http://www.qt.io/terms-conditions. For further
** information use the contact form at http://www.qt.io/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 or version 3 as published by the Free
** Software Foundation and appearing in the file LICENSE.LGPLv21 and
** LICENSE.LGPLv3 included in the packaging of this file. Please review the
** following information to ensure the GNU Lesser General Public License
** requirements will be met: https://www.gnu.org/licenses/lgpl.html and
** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** As a special exception, The Qt Company gives you certain additional
** rights. These rights are described in The Qt Company LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** $QT_END_LICENSE$
**
****************************************************************************/
#include "camerabinviewfindersettings2.h"
#include "camerabinsession.h"
QT_BEGIN_NAMESPACE
CameraBinViewfinderSettings2::CameraBinViewfinderSettings2(CameraBinSession *session)
: QCameraViewfinderSettingsControl2(session)
, m_session(session)
{
}
CameraBinViewfinderSettings2::~CameraBinViewfinderSettings2()
{
}
QList<QCameraViewfinderSettings> CameraBinViewfinderSettings2::supportedViewfinderSettings() const
{
return m_session->supportedViewfinderSettings();
}
QCameraViewfinderSettings CameraBinViewfinderSettings2::viewfinderSettings() const
{
return m_session->viewfinderSettings();
}
void CameraBinViewfinderSettings2::setViewfinderSettings(const QCameraViewfinderSettings &settings)
{
m_session->setViewfinderSettings(settings);
}
QT_END_NAMESPACE

View File

@@ -0,0 +1,61 @@
/****************************************************************************
**
** Copyright (C) 2015 The Qt Company Ltd.
** Contact: http://www.qt.io/licensing/
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL21$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and The Qt Company. For licensing terms
** and conditions see http://www.qt.io/terms-conditions. For further
** information use the contact form at http://www.qt.io/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 or version 3 as published by the Free
** Software Foundation and appearing in the file LICENSE.LGPLv21 and
** LICENSE.LGPLv3 included in the packaging of this file. Please review the
** following information to ensure the GNU Lesser General Public License
** requirements will be met: https://www.gnu.org/licenses/lgpl.html and
** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** As a special exception, The Qt Company gives you certain additional
** rights. These rights are described in The Qt Company LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** $QT_END_LICENSE$
**
****************************************************************************/
#ifndef CAMERABINVIEWFINDERSETTINGS2_H
#define CAMERABINVIEWFINDERSETTINGS2_H
#include <qcameraviewfindersettingscontrol.h>
QT_BEGIN_NAMESPACE
class CameraBinSession;
class CameraBinViewfinderSettings2 : public QCameraViewfinderSettingsControl2
{
Q_OBJECT
public:
CameraBinViewfinderSettings2(CameraBinSession *session);
~CameraBinViewfinderSettings2();
QList<QCameraViewfinderSettings> supportedViewfinderSettings() const;
QCameraViewfinderSettings viewfinderSettings() const;
void setViewfinderSettings(const QCameraViewfinderSettings &settings);
private:
CameraBinSession *m_session;
};
QT_END_NAMESPACE
#endif // CAMERABINVIEWFINDERSETTINGS2_H

View File

@@ -34,8 +34,6 @@
#include "camerabinzoom.h"
#include "camerabinsession.h"
#include <gst/interfaces/photography.h>
#define ZOOM_PROPERTY "zoom"
#define MAX_ZOOM_PROPERTY "max-zoom"

View File

@@ -131,7 +131,6 @@ QVariant BbCameraExposureControl::requestedValue(ExposureParameter parameter) co
QVariant BbCameraExposureControl::actualValue(ExposureParameter parameter) const
{
#ifndef Q_OS_BLACKBERRY_TABLET
if (parameter != QCameraExposureControl::ExposureMode) // no other parameter supported by BB10 API at the moment
return QVariantList();
@@ -161,11 +160,10 @@ QVariant BbCameraExposureControl::actualValue(ExposureParameter parameter) const
case CAMERA_SCENE_NIGHT:
return QVariant::fromValue(QCameraExposure::ExposureNight);
default:
return QVariant();
break;
}
#else
return QVariant();
#endif
}
bool BbCameraExposureControl::setValue(ExposureParameter parameter, const QVariant& value)

View File

@@ -37,14 +37,11 @@
#include <QDebug>
#include <QUrl>
#ifndef Q_OS_BLACKBERRY_TABLET
#include <audio/audio_manager_device.h>
#include <audio/audio_manager_volume.h>
#endif
QT_BEGIN_NAMESPACE
#ifndef Q_OS_BLACKBERRY_TABLET
static audio_manager_device_t currentAudioInputDevice()
{
audio_manager_device_t device = AUDIO_DEVICE_HEADSET;
@@ -57,7 +54,6 @@ static audio_manager_device_t currentAudioInputDevice()
return device;
}
#endif
BbCameraMediaRecorderControl::BbCameraMediaRecorderControl(BbCameraSession *session, QObject *parent)
: QMediaRecorderControl(parent)
@@ -99,13 +95,12 @@ bool BbCameraMediaRecorderControl::isMuted() const
{
bool muted = false;
#ifndef Q_OS_BLACKBERRY_TABLET
const int result = audio_manager_get_input_mute(currentAudioInputDevice(), &muted);
if (result != EOK) {
emit const_cast<BbCameraMediaRecorderControl*>(this)->error(QMediaRecorder::ResourceError, tr("Unable to retrieve mute status"));
return false;
}
#endif
return muted;
}
@@ -113,13 +108,11 @@ qreal BbCameraMediaRecorderControl::volume() const
{
double level = 0.0;
#ifndef Q_OS_BLACKBERRY_TABLET
const int result = audio_manager_get_input_level(currentAudioInputDevice(), &level);
if (result != EOK) {
emit const_cast<BbCameraMediaRecorderControl*>(this)->error(QMediaRecorder::ResourceError, tr("Unable to retrieve audio input volume"));
return 0.0;
}
#endif
return (level / 100);
}
@@ -136,26 +129,22 @@ void BbCameraMediaRecorderControl::setState(QMediaRecorder::State state)
void BbCameraMediaRecorderControl::setMuted(bool muted)
{
#ifndef Q_OS_BLACKBERRY_TABLET
const int result = audio_manager_set_input_mute(currentAudioInputDevice(), muted);
if (result != EOK) {
emit error(QMediaRecorder::ResourceError, tr("Unable to set mute status"));
} else {
emit mutedChanged(muted);
}
#endif
}
void BbCameraMediaRecorderControl::setVolume(qreal volume)
{
#ifndef Q_OS_BLACKBERRY_TABLET
const int result = audio_manager_set_input_level(currentAudioInputDevice(), (volume * 100));
if (result != EOK) {
emit error(QMediaRecorder::ResourceError, tr("Unable to set audio input volume"));
} else {
emit volumeChanged(volume);
}
#endif
}
QT_END_NAMESPACE

View File

@@ -63,11 +63,9 @@ BbCameraOrientationHandler::BbCameraOrientationHandler(QObject *parent)
BbCameraOrientationHandler::~BbCameraOrientationHandler()
{
#ifndef Q_OS_BLACKBERRY_TABLET
const int result = orientation_stop_events(0);
if (result == BPS_FAILURE)
qWarning() << "Unable to unregister for orientation change events";
#endif
QCoreApplication::eventDispatcher()->removeNativeEventFilter(this);
}
@@ -80,10 +78,9 @@ bool BbCameraOrientationHandler::nativeEventFilter(const QByteArray&, void *mess
const int angle = orientation_event_get_angle(event);
if (angle != m_orientation) {
#ifndef Q_OS_BLACKBERRY_TABLET
if (angle == 180) // The screen does not rotate at 180 degrees
return false;
#endif
m_orientation = angle;
emit orientationChanged(m_orientation);
}

View File

@@ -83,7 +83,6 @@ static QString errorToString(camera_error_t error)
return QLatin1String("Callback registration failed");
case CAMERA_EMICINUSE:
return QLatin1String("Microphone in use already");
#ifndef Q_OS_BLACKBERRY_TABLET
case CAMERA_ENODATA:
return QLatin1String("Data does not exist");
case CAMERA_EBUSY:
@@ -98,7 +97,6 @@ static QString errorToString(camera_error_t error)
return QLatin1String("3A have been locked");
// case CAMERA_EVIEWFINDERFROZEN: // not yet available in 10.2 NDK
// return QLatin1String("Freeze flag set");
#endif
default:
return QLatin1String("Unknown error");
}
@@ -561,7 +559,6 @@ void BbCameraSession::applyVideoSettings()
const QSize resolution = m_videoEncoderSettings.resolution();
#ifndef Q_OS_BLACKBERRY_TABLET
QString videoCodec = m_videoEncoderSettings.codec();
if (videoCodec.isEmpty())
videoCodec = QLatin1String("h264");
@@ -599,11 +596,6 @@ void BbCameraSession::applyVideoSettings()
CAMERA_IMGPROP_ROTATION, rotationAngle,
CAMERA_IMGPROP_VIDEOCODEC, cameraVideoCodec,
CAMERA_IMGPROP_AUDIOCODEC, cameraAudioCodec);
#else
result = camera_set_video_property(m_handle,
CAMERA_IMGPROP_WIDTH, resolution.width(),
CAMERA_IMGPROP_HEIGHT, resolution.height());
#endif
if (result != CAMERA_EOK) {
qWarning() << "Unable to apply video settings:" << result;
@@ -864,13 +856,10 @@ static void viewFinderStatusCallback(camera_handle_t handle, camera_devstatus_t
BbCameraSession *session = static_cast<BbCameraSession*>(context);
QMetaObject::invokeMethod(session, "focusStatusChanged", Qt::QueuedConnection, Q_ARG(int, value));
return;
}
#ifndef Q_OS_BLACKBERRY_TABLET
else if (status == CAMERA_STATUS_POWERUP) {
} else if (status == CAMERA_STATUS_POWERUP) {
BbCameraSession *session = static_cast<BbCameraSession*>(context);
QMetaObject::invokeMethod(session, "handleCameraPowerUp", Qt::QueuedConnection);
}
#endif
}
bool BbCameraSession::startViewFinder()
@@ -1027,7 +1016,6 @@ static void videoRecordingStatusCallback(camera_handle_t handle, camera_devstatu
Q_UNUSED(handle)
Q_UNUSED(value)
#ifndef Q_OS_BLACKBERRY_TABLET
if (status == CAMERA_STATUS_VIDEO_PAUSE) {
BbCameraSession *session = static_cast<BbCameraSession*>(context);
QMetaObject::invokeMethod(session, "handleVideoRecordingPaused", Qt::QueuedConnection);
@@ -1035,7 +1023,6 @@ static void videoRecordingStatusCallback(camera_handle_t handle, camera_devstatu
BbCameraSession *session = static_cast<BbCameraSession*>(context);
QMetaObject::invokeMethod(session, "handleVideoRecordingResumed", Qt::QueuedConnection);
}
#endif
}
bool BbCameraSession::startVideoRecording()

View File

@@ -148,12 +148,10 @@ QVariant BbCameraViewfinderSettingsControl::viewfinderParameter(ViewfinderParame
return QVideoFrame::Format_Invalid;
case CAMERA_FRAMETYPE_CBYCRY:
return QVideoFrame::Format_Invalid;
#ifndef Q_OS_BLACKBERRY_TABLET
case CAMERA_FRAMETYPE_COMPRESSEDVIDEO:
return QVideoFrame::Format_Invalid;
case CAMERA_FRAMETYPE_COMPRESSEDAUDIO:
return QVideoFrame::Format_Invalid;
#endif
default:
return QVideoFrame::Format_Invalid;
}

View File

@@ -48,8 +48,5 @@ SOURCES += \
$$PWD/bbvideodeviceselectorcontrol.cpp \
$$PWD/bbvideorenderercontrol.cpp
LIBS += -lcamapi
LIBS += -lcamapi -laudio_manager
!blackberry-playbook {
LIBS += -laudio_manager
}

View File

@@ -127,30 +127,6 @@ void WindowGrabber::start()
int result = 0;
#ifdef Q_OS_BLACKBERRY_TABLET
// HACK: On the Playbook, screen_read_window() will fail for invisible windows.
// To workaround this, make the window visible again, but set a global
// alpha of less than 255. The global alpha makes the window completely invisible
// (due to a bug?), but screen_read_window() will work again.
errno = 0;
int val = 200; // anything less than 255
result = screen_set_window_property_iv(m_window, SCREEN_PROPERTY_GLOBAL_ALPHA, &val);
if (result != 0) {
qWarning() << "WindowGrabber: unable to set global alpha:" << strerror(errno);
return;
}
errno = 0;
val = 1;
result = screen_set_window_property_iv(m_window, SCREEN_PROPERTY_VISIBLE, &val);
if (result != 0) {
qWarning() << "WindowGrabber: unable to make window visible:" << strerror(errno);
return;
}
#endif
result = screen_create_context(&m_screenContext, SCREEN_APPLICATION_CONTEXT);
if (result != 0) {
qWarning() << "WindowGrabber: cannot create screen context:" << strerror(errno);

View File

@@ -454,7 +454,9 @@ QList<QByteArray> QWindowsAudioDeviceInfo::availableDevices(QAudio::Mode mode)
pPropBag->Release();
pMoniker->Release();
}
pEnum->Release();
}
pDevEnum->Release();
}
CoUninitialize();
#else // Q_OS_WINCE

View File

@@ -430,7 +430,7 @@ void QWindowsAudioInput::initMixer()
return;
mixerID = (HMIXEROBJ)mixerIntID;
// Get the Destination (Recording) Line Infomation
// Get the Destination (Recording) Line Information
MIXERLINE mixerLine;
mixerLine.cbStruct = sizeof(MIXERLINE);
mixerLine.dwComponentType = MIXERLINE_COMPONENTTYPE_DST_WAVEIN;

View File

@@ -1296,7 +1296,7 @@ void MFPlayerSession::commitRateChange(qreal rate, BOOL isThin)
// (which might be earlier than the last decoded key frame)
resetPosition = true;
} else if (cmdNow == CmdPause) {
// If paused, dont reset the position until we resume, otherwise
// If paused, don't reset the position until we resume, otherwise
// a new frame will be rendered
m_presentationClock->GetCorrelatedTime(0, &hnsClockTime, &hnsSystemTime);
m_request.setCommand(CmdSeekResume);