Merge remote-tracking branch 'origin/stable' into dev

Change-Id: I7dc36a224702721b67ffa2cf7f9ce2a6b047f88e
This commit is contained in:
Frederik Gladhorn
2014-01-10 18:30:14 +01:00
20 changed files with 267 additions and 24 deletions

View File

@@ -574,7 +574,7 @@ QRectF QDeclarativeVideoOutput::mapNormalizedRectToItem(const QRectF &rectangle)
}
/*!
\qmlmethod QPointF QtMultimedia::VideoOutput::mapPointToItem(const QPointF &point) const
\qmlmethod QPointF QtMultimedia::VideoOutput::mapPointToSource(const QPointF &point) const
Given a point \a point in item coordinates, return the
corresponding point in source coordinates. This mapping is
@@ -612,7 +612,7 @@ QRectF QDeclarativeVideoOutput::mapRectToSource(const QRectF &rectangle) const
}
/*!
\qmlmethod QPointF QtMultimedia::VideoOutput::mapPointToItemNormalized(const QPointF &point) const
\qmlmethod QPointF QtMultimedia::VideoOutput::mapPointToSourceNormalized(const QPointF &point) const
Given a point \a point in item coordinates, return the
corresponding point in normalized source coordinates. This mapping is

View File

@@ -127,7 +127,7 @@ public:
order, channel, codec, frequency, sample rate, and sample type. A
format is represented by the QAudioFormat class.
The values supported by the the device for each of these
The values supported by the device for each of these
parameters can be fetched with
supportedByteOrders(), supportedChannelCounts(), supportedCodecs(),
supportedSampleRates(), supportedSampleSizes(), and

View File

@@ -183,16 +183,17 @@ private Q_SLOTS:
lock();
m_context = pa_context_new(m_mainLoopApi, QString(QLatin1String("QtPulseAudio:%1")).arg(::getpid()).toLatin1().constData());
pa_context_set_state_callback(m_context, context_state_callback, this);
if (m_context == 0) {
qWarning("PulseAudioService: Unable to create new pulseaudio context");
pa_threaded_mainloop_unlock(m_mainLoop);
pa_threaded_mainloop_free(m_mainLoop);
m_mainLoop = 0;
onContextFailed();
return;
}
pa_context_set_state_callback(m_context, context_state_callback, this);
if (pa_context_connect(m_context, 0, (pa_context_flags_t)0, 0) < 0) {
qWarning("PulseAudioService: pa_context_connect() failed");
pa_context_unref(m_context);

View File

@@ -120,8 +120,8 @@ QCameraImageProcessingControl::~QCameraImageProcessingControl()
Returns true if the camera supports adjusting image processing \a parameter.
Usually the the supported settings is static,
but some parameter may not be available depending on other
Usually the supported setting is static,
but some parameters may not be available depending on other
camera settings, like presets.
In such case the currently supported parameters should be returned.
*/
@@ -129,7 +129,7 @@ QCameraImageProcessingControl::~QCameraImageProcessingControl()
/*!
\fn bool QCameraImageProcessingControl::isParameterValueSupported(ProcessingParameter parameter, const QVariant &value) const
Returns true if the camera supports settings the the image processing \a parameter \a value.
Returns true if the camera supports setting the image processing \a parameter \a value.
It's used only for parameters with a limited set of values, like WhiteBalancePreset.
*/

View File

@@ -59,7 +59,7 @@ supported in QMediaPlayer as well. Playlists as sources are also supported.
mmrenderer does not allow access to the pixel data of video frames, hence Qt Multimedia
classes like QVideoFrame and QAbstractVideoSurface will not work since they require access
to the image data. QVideoWidget and the VideoOutput QML element are implemented with an overlay window;
mmrenderer creates a seperate window displaying a video and puts that on top of the Qt application.
mmrenderer creates a separate window displaying a video and puts that on top of the Qt application.
As a consequence, no other widget or QML element can be put on top of the video, and QML shaders have
no effect.

View File

@@ -125,7 +125,7 @@ changed in ways that may affect previously written code. This table highlights s
you will probably need to remove them.
\row
\li QSoundEffect availability
\li The SoundEffect QML type was publically accessible in Qt Multimeda Kit,
\li The SoundEffect QML type was publicly accessible in Qt Multimeda Kit,
and now the C++ version is officially public too. If your code contains the
previously undocumented QSoundEffect, you may need to update it.
\row

View File

@@ -88,7 +88,7 @@ namespace
like QMediaPlayer.
QMediaPlaylist allows to access the service intrinsic playlist functionality
if available, otherwise it provides the the local memory playlist implementation.
if available, otherwise it provides the local memory playlist implementation.
\snippet multimedia-snippets/media.cpp Movie playlist

View File

@@ -78,7 +78,7 @@ namespace
a new hardware accelerated video system, for example.
The contents of a buffer can be accessed by mapping the buffer to memory using the map()
function, which returns a pointer to memory containing the contents of the the video buffer.
function, which returns a pointer to memory containing the contents of the video buffer.
The memory returned by map() is released by calling the unmap() function.
The handle() of a buffer may also be used to manipulate its contents using type specific APIs.

View File

@@ -60,6 +60,7 @@ public class QtCamera implements Camera.ShutterCallback,
private int m_actualPreviewBuffer = 0;
private final ReentrantLock m_buffersLock = new ReentrantLock();
private boolean m_isReleased = false;
private boolean m_fetchEachFrame = false;
private static final String TAG = "Qt Camera";
@@ -141,6 +142,11 @@ public class QtCamera implements Camera.ShutterCallback,
}
}
public void fetchEachFrame(boolean fetch)
{
m_fetchEachFrame = fetch;
}
public void startPreview()
{
Camera.Size previewSize = m_camera.getParameters().getPreviewSize();
@@ -233,6 +239,10 @@ public class QtCamera implements Camera.ShutterCallback,
public void onPreviewFrame(byte[] data, Camera camera)
{
m_buffersLock.lock();
if (data != null && m_fetchEachFrame)
notifyFrameFetched(m_cameraId, data);
if (data == m_cameraPreviewFirstBuffer)
m_actualPreviewBuffer = 1;
else if (data == m_cameraPreviewSecondBuffer)
@@ -252,4 +262,5 @@ public class QtCamera implements Camera.ShutterCallback,
private static native void notifyAutoFocusComplete(int id, boolean success);
private static native void notifyPictureExposed(int id);
private static native void notifyPictureCaptured(int id, byte[] data);
private static native void notifyFrameFetched(int id, byte[] data);
}

View File

@@ -21,7 +21,8 @@ SOURCES += \
$$PWD/qandroidaudioencodersettingscontrol.cpp \
$$PWD/qandroidmediacontainercontrol.cpp \
$$PWD/qandroidvideoencodersettingscontrol.cpp \
$$PWD/qandroidaudioinputselectorcontrol.cpp
$$PWD/qandroidaudioinputselectorcontrol.cpp \
$$PWD/qandroidmediavideoprobecontrol.cpp
HEADERS += \
$$PWD/qandroidcaptureservice.h \
@@ -44,4 +45,5 @@ HEADERS += \
$$PWD/qandroidaudioencodersettingscontrol.h \
$$PWD/qandroidmediacontainercontrol.h \
$$PWD/qandroidvideoencodersettingscontrol.h \
$$PWD/qandroidaudioinputselectorcontrol.h
$$PWD/qandroidaudioinputselectorcontrol.h \
$$PWD/qandroidmediavideoprobecontrol.h

View File

@@ -44,11 +44,13 @@
#include "jcamera.h"
#include "jmultimediautils.h"
#include "qandroidvideooutput.h"
#include "qandroidmediavideoprobecontrol.h"
#include "qandroidmultimediautils.h"
#include <QtConcurrent/qtconcurrentrun.h>
#include <qfile.h>
#include <qguiapplication.h>
#include <qdebug.h>
#include <qvideoframe.h>
QT_BEGIN_NAMESPACE
@@ -183,6 +185,9 @@ bool QAndroidCameraSession::open()
if (m_camera) {
connect(m_camera, SIGNAL(pictureExposed()), this, SLOT(onCameraPictureExposed()));
connect(m_camera, SIGNAL(previewFetched(QByteArray)), this, SLOT(onCameraPreviewFetched(QByteArray)));
connect(m_camera, SIGNAL(frameFetched(QByteArray)),
this, SLOT(onCameraFrameFetched(QByteArray)),
Qt::DirectConnection);
connect(m_camera, SIGNAL(pictureCaptured(QByteArray)), this, SLOT(onCameraPictureCaptured(QByteArray)));
connect(m_camera, SIGNAL(previewStarted()), this, SLOT(onCameraPreviewStarted()));
connect(m_camera, SIGNAL(previewStopped()), this, SLOT(onCameraPreviewStopped()));
@@ -200,6 +205,8 @@ bool QAndroidCameraSession::open()
if (m_camera->getPreviewFormat() != JCamera::NV21)
m_camera->setPreviewFormat(JCamera::NV21);
m_camera->fetchEachFrame(m_videoProbes.count());
emit opened();
} else {
m_status = QCamera::UnavailableStatus;
@@ -364,6 +371,25 @@ int QAndroidCameraSession::currentCameraRotation() const
return rotation;
}
void QAndroidCameraSession::addProbe(QAndroidMediaVideoProbeControl *probe)
{
m_videoProbesMutex.lock();
if (probe)
m_videoProbes << probe;
if (m_camera)
m_camera->fetchEachFrame(m_videoProbes.count());
m_videoProbesMutex.unlock();
}
void QAndroidCameraSession::removeProbe(QAndroidMediaVideoProbeControl *probe)
{
m_videoProbesMutex.lock();
m_videoProbes.remove(probe);
if (m_camera)
m_camera->fetchEachFrame(m_videoProbes.count());
m_videoProbesMutex.unlock();
}
void QAndroidCameraSession::applyImageSettings()
{
if (!m_camera || !m_imageSettingsDirty)
@@ -513,6 +539,19 @@ void QAndroidCameraSession::onCameraPreviewFetched(const QByteArray &preview)
}
}
void QAndroidCameraSession::onCameraFrameFetched(const QByteArray &frame)
{
m_videoProbesMutex.lock();
if (frame.size() && m_videoProbes.count()) {
QVideoFrame videoFrame(new DataVideoBuffer(frame),
m_camera->previewSize(),
QVideoFrame::Format_NV21);
foreach (QAndroidMediaVideoProbeControl *probe, m_videoProbes)
probe->newFrameProbed(videoFrame);
}
m_videoProbesMutex.unlock();
}
void QAndroidCameraSession::onCameraPictureCaptured(const QByteArray &data)
{
if (!m_captureCanceled) {
@@ -592,11 +631,16 @@ void QAndroidCameraSession::processCapturedImage(int id,
}
void QAndroidCameraSession::processPreviewImage(int id, const QByteArray &data, int rotation)
{
emit imageCaptured(id, prepareImageFromPreviewData(data, rotation));
}
QImage QAndroidCameraSession::prepareImageFromPreviewData(const QByteArray &data, int rotation)
{
QSize frameSize = m_camera->previewSize();
QImage preview(frameSize, QImage::Format_ARGB32);
QImage result(frameSize, QImage::Format_ARGB32);
qt_convert_NV21_to_ARGB32((const uchar *)data.constData(),
(quint32 *)preview.bits(),
(quint32 *)result.bits(),
frameSize.width(),
frameSize.height());
@@ -610,9 +654,9 @@ void QAndroidCameraSession::processPreviewImage(int id, const QByteArray &data,
transform.rotate(rotation);
preview = preview.transformed(transform);
result = result.transformed(transform);
emit imageCaptured(id, preview);
return result;
}
void QAndroidCameraSession::onVideoOutputReady(bool ready)

View File

@@ -45,12 +45,15 @@
#include <qcamera.h>
#include <qmediaencodersettings.h>
#include <QCameraImageCapture>
#include <QSet>
#include <QMutex>
#include "qandroidmediastoragelocation.h"
QT_BEGIN_NAMESPACE
class JCamera;
class QAndroidVideoOutput;
class QAndroidMediaVideoProbeControl;
class QAndroidCameraSession : public QObject
{
@@ -90,6 +93,9 @@ public:
int currentCameraRotation() const;
void addProbe(QAndroidMediaVideoProbeControl *probe);
void removeProbe(QAndroidMediaVideoProbeControl *probe);
Q_SIGNALS:
void statusChanged(QCamera::Status status);
void stateChanged(QCamera::State);
@@ -114,6 +120,7 @@ private Q_SLOTS:
void onCameraPictureExposed();
void onCameraPreviewFetched(const QByteArray &preview);
void onCameraFrameFetched(const QByteArray &frame);
void onCameraPictureCaptured(const QByteArray &data);
void onCameraPreviewStarted();
void onCameraPreviewStopped();
@@ -127,6 +134,7 @@ private:
void applyImageSettings();
void processPreviewImage(int id, const QByteArray &data, int rotation);
QImage prepareImageFromPreviewData(const QByteArray &data, int rotation);
void processCapturedImage(int id,
const QByteArray &data,
const QSize &resolution,
@@ -156,6 +164,9 @@ private:
QString m_currentImageCaptureFileName;
QAndroidMediaStorageLocation m_mediaStorageLocation;
QSet<QAndroidMediaVideoProbeControl *> m_videoProbes;
QMutex m_videoProbesMutex;
};
QT_END_NAMESPACE

View File

@@ -61,6 +61,7 @@
#include "qandroidaudioencodersettingscontrol.h"
#include "qandroidvideoencodersettingscontrol.h"
#include "qandroidmediacontainercontrol.h"
#include "qandroidmediavideoprobecontrol.h"
#include <qmediaserviceproviderplugin.h>
@@ -201,16 +202,37 @@ QMediaControl *QAndroidCaptureService::requestControl(const char *name)
return m_videoRendererControl;
}
if (qstrcmp(name,QMediaVideoProbeControl_iid) == 0) {
QAndroidMediaVideoProbeControl *videoProbe = 0;
if (m_cameraSession) {
videoProbe = new QAndroidMediaVideoProbeControl(this);
m_cameraSession->addProbe(videoProbe);
}
return videoProbe;
}
return 0;
}
void QAndroidCaptureService::releaseControl(QMediaControl *control)
{
if (control && control == m_videoRendererControl) {
m_cameraSession->setVideoPreview(0);
delete m_videoRendererControl;
m_videoRendererControl = 0;
if (control) {
if (control == m_videoRendererControl) {
m_cameraSession->setVideoPreview(0);
delete m_videoRendererControl;
m_videoRendererControl = 0;
return;
}
QAndroidMediaVideoProbeControl *videoProbe = qobject_cast<QAndroidMediaVideoProbeControl *>(control);
if (videoProbe) {
if (m_cameraSession)
m_cameraSession->removeProbe(videoProbe);
delete videoProbe;
return;
}
}
}
QT_END_NAMESPACE

View File

@@ -0,0 +1,59 @@
/****************************************************************************
**
** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
** Copyright (C) 2013 Integrated Computer Solutions, Inc
** Contact: http://www.qt-project.org/legal
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and Digia. For licensing terms and
** conditions see http://qt.digia.com/licensing. For further information
** use the contact form at http://qt.digia.com/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 as published by the Free Software
** Foundation and appearing in the file LICENSE.LGPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU Lesser General Public License version 2.1 requirements
** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Digia gives you certain additional
** rights. These rights are described in the Digia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** GNU General Public License Usage
** Alternatively, this file may be used under the terms of the GNU
** General Public License version 3.0 as published by the Free Software
** Foundation and appearing in the file LICENSE.GPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU General Public License version 3.0 requirements will be
** met: http://www.gnu.org/copyleft/gpl.html.
**
**
** $QT_END_LICENSE$
**
****************************************************************************/
#include "qandroidmediavideoprobecontrol.h"
#include <qvideoframe.h>
QAndroidMediaVideoProbeControl::QAndroidMediaVideoProbeControl(QObject *parent) :
QMediaVideoProbeControl(parent)
{
}
QAndroidMediaVideoProbeControl::~QAndroidMediaVideoProbeControl()
{
}
void QAndroidMediaVideoProbeControl::newFrameProbed(const QVideoFrame &frame)
{
emit videoFrameProbed(frame);
}

View File

@@ -0,0 +1,59 @@
/****************************************************************************
**
** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
** Copyright (C) 2013 Integrated Computer Solutions, Inc
** Contact: http://www.qt-project.org/legal
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and Digia. For licensing terms and
** conditions see http://qt.digia.com/licensing. For further information
** use the contact form at http://qt.digia.com/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 as published by the Free Software
** Foundation and appearing in the file LICENSE.LGPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU Lesser General Public License version 2.1 requirements
** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Digia gives you certain additional
** rights. These rights are described in the Digia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** GNU General Public License Usage
** Alternatively, this file may be used under the terms of the GNU
** General Public License version 3.0 as published by the Free Software
** Foundation and appearing in the file LICENSE.GPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU General Public License version 3.0 requirements will be
** met: http://www.gnu.org/copyleft/gpl.html.
**
**
** $QT_END_LICENSE$
**
****************************************************************************/
#ifndef QANDROIDMEDIAVIDEOPROBECONTROL_H
#define QANDROIDMEDIAVIDEOPROBECONTROL_H
#include <qmediavideoprobecontrol.h>
class QAndroidMediaVideoProbeControl : public QMediaVideoProbeControl
{
Q_OBJECT
public:
explicit QAndroidMediaVideoProbeControl(QObject *parent = 0);
virtual ~QAndroidMediaVideoProbeControl();
void newFrameProbed(const QVideoFrame& frame);
};
#endif // QANDROIDMEDIAVIDEOPROBECONTROL_H

View File

@@ -111,6 +111,21 @@ static void notifyPictureCaptured(JNIEnv *env, jobject, int id, jbyteArray data)
}
}
static void notifyFrameFetched(JNIEnv *env, jobject, int id, jbyteArray data)
{
g_objectMapMutex.lock();
JCamera *obj = g_objectMap.value(id, 0);
g_objectMapMutex.unlock();
if (obj) {
QByteArray bytes;
int arrayLength = env->GetArrayLength(data);
bytes.resize(arrayLength);
env->GetByteArrayRegion(data, 0, arrayLength, (jbyte*)bytes.data());
Q_EMIT obj->frameFetched(bytes);
}
}
class JCameraInstantiator : public QObject
{
Q_OBJECT
@@ -201,6 +216,7 @@ class JCameraWorker : public QObject, public QJNIObjectPrivate
Q_INVOKABLE void startPreview();
Q_INVOKABLE void stopPreview();
Q_INVOKABLE void fetchEachFrame(bool fetch);
Q_INVOKABLE void fetchLastPreviewFrame();
Q_INVOKABLE void applyParameters();
@@ -571,6 +587,11 @@ void JCamera::takePicture()
QMetaObject::invokeMethod(d, "callVoidMethod", Q_ARG(QByteArray, "takePicture"));
}
void JCamera::fetchEachFrame(bool fetch)
{
QMetaObject::invokeMethod(d, "fetchEachFrame", Q_ARG(bool, fetch));
}
void JCamera::fetchLastPreviewFrame()
{
QMetaObject::invokeMethod(d, "fetchLastPreviewFrame");
@@ -1165,6 +1186,11 @@ void JCameraWorker::stopPreview()
emit previewStopped();
}
void JCameraWorker::fetchEachFrame(bool fetch)
{
callMethod<void>("fetchEachFrame", "(Z)V", fetch);
}
void JCameraWorker::fetchLastPreviewFrame()
{
QJNIEnvironmentPrivate env;
@@ -1224,7 +1250,8 @@ void JCameraWorker::callVoidMethod(const QByteArray &methodName)
static JNINativeMethod methods[] = {
{"notifyAutoFocusComplete", "(IZ)V", (void *)notifyAutoFocusComplete},
{"notifyPictureExposed", "(I)V", (void *)notifyPictureExposed},
{"notifyPictureCaptured", "(I[B)V", (void *)notifyPictureCaptured}
{"notifyPictureCaptured", "(I[B)V", (void *)notifyPictureCaptured},
{"notifyFrameFetched", "(I[B)V", (void *)notifyFrameFetched}
};
bool JCamera::initJNI(JNIEnv *env)

View File

@@ -155,6 +155,7 @@ public:
void takePicture();
void fetchEachFrame(bool fetch);
void fetchLastPreviewFrame();
QJNIObjectPrivate getCameraObject();
@@ -173,6 +174,7 @@ Q_SIGNALS:
void pictureExposed();
void pictureCaptured(const QByteArray &data);
void previewFetched(const QByteArray &preview);
void frameFetched(const QByteArray &frame);
private:
JCamera(int cameraId, jobject cam, QThread *workerThread);

View File

@@ -98,11 +98,14 @@ QMediaRecorder::Status QGstreamerRecorderControl::status() const
switch ( m_session->state() ) {
case QGstreamerCaptureSession::RecordingState:
sessionState = QMediaRecorder::RecordingState;
break;
case QGstreamerCaptureSession::PausedState:
sessionState = QMediaRecorder::PausedState;
break;
case QGstreamerCaptureSession::PreviewState:
case QGstreamerCaptureSession::StoppedState:
sessionState = QMediaRecorder::StoppedState;
break;
}
return statusTable[m_state][sessionState];

View File

@@ -530,7 +530,7 @@ void MmRendererMediaPlayerControl::play()
}
setPositionInternal(m_position);
setVolumeInternal(m_volume);
setVolumeInternal(m_muted ? 0 : m_volume);
setPlaybackRateInternal(m_rate);
if (mmr_play(m_context) != 0) {