Merge remote-tracking branch 'origin/stable' into dev

Conflicts:
	src/plugins/blackberry/camera/bbcamerasession.cpp

Change-Id: I7c86e10140ab86fd2a07e2f034dec38ae9112559
This commit is contained in:
Frederik Gladhorn
2013-10-11 16:18:46 +02:00
96 changed files with 2113 additions and 1286 deletions

View File

@@ -0,0 +1,50 @@
/****************************************************************************
**
** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
** Contact: http://www.qt-project.org/legal
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and Digia. For licensing terms and
** conditions see http://qt.digia.com/licensing. For further information
** use the contact form at http://qt.digia.com/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 as published by the Free Software
** Foundation and appearing in the file LICENSE.LGPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU Lesser General Public License version 2.1 requirements
** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Digia gives you certain additional
** rights. These rights are described in the Digia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** GNU General Public License Usage
** Alternatively, this file may be used under the terms of the GNU
** General Public License version 3.0 as published by the Free Software
** Foundation and appearing in the file LICENSE.GPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU General Public License version 3.0 requirements will be
** met: http://www.gnu.org/copyleft/gpl.html.
**
**
** $QT_END_LICENSE$
**
****************************************************************************/
#include <ShlObj.h>
int main(int, char**)
{
IShellItem2 *item;
IPropertyStore *store;
return 0;
}

View File

@@ -0,0 +1,4 @@
CONFIG -= qt
CONFIG += console
SOURCES += main.cpp

View File

@@ -45,6 +45,7 @@
#include <QMediaRecorder> #include <QMediaRecorder>
#include "audiorecorder.h" #include "audiorecorder.h"
#include "qaudiolevel.h"
#if defined(Q_WS_MAEMO_6) #if defined(Q_WS_MAEMO_6)
#include "ui_audiorecorder_small.h" #include "ui_audiorecorder_small.h"
@@ -53,10 +54,10 @@
#endif #endif
static qreal getPeakValue(const QAudioFormat &format); static qreal getPeakValue(const QAudioFormat &format);
static qreal getBufferLevel(const QAudioBuffer &buffer); static QVector<qreal> getBufferLevels(const QAudioBuffer &buffer);
template <class T> template <class T>
static qreal getBufferLevel(const T *buffer, int samples); static QVector<qreal> getBufferLevels(const T *buffer, int frames, int channels);
AudioRecorder::AudioRecorder(QWidget *parent) : AudioRecorder::AudioRecorder(QWidget *parent) :
QMainWindow(parent), QMainWindow(parent),
@@ -67,7 +68,8 @@ AudioRecorder::AudioRecorder(QWidget *parent) :
audioRecorder = new QAudioRecorder(this); audioRecorder = new QAudioRecorder(this);
probe = new QAudioProbe; probe = new QAudioProbe;
connect(probe, SIGNAL(audioBufferProbed(QAudioBuffer)), this, SLOT(processBuffer(QAudioBuffer))); connect(probe, SIGNAL(audioBufferProbed(QAudioBuffer)),
this, SLOT(processBuffer(QAudioBuffer)));
probe->setSource(audioRecorder); probe->setSource(audioRecorder);
//audio devices //audio devices
@@ -88,27 +90,34 @@ AudioRecorder::AudioRecorder(QWidget *parent) :
ui->containerBox->addItem(containerName, QVariant(containerName)); ui->containerBox->addItem(containerName, QVariant(containerName));
} }
//sample rate: //sample rate
ui->sampleRateBox->addItem(tr("Default"), QVariant(0)); ui->sampleRateBox->addItem(tr("Default"), QVariant(0));
foreach (int sampleRate, audioRecorder->supportedAudioSampleRates()) { foreach (int sampleRate, audioRecorder->supportedAudioSampleRates()) {
ui->sampleRateBox->addItem(QString::number(sampleRate), QVariant( ui->sampleRateBox->addItem(QString::number(sampleRate), QVariant(
sampleRate)); sampleRate));
} }
//channels
ui->channelsBox->addItem(tr("Default"), QVariant(-1));
ui->channelsBox->addItem(QStringLiteral("1"), QVariant(1));
ui->channelsBox->addItem(QStringLiteral("2"), QVariant(2));
ui->channelsBox->addItem(QStringLiteral("4"), QVariant(4));
//quality
ui->qualitySlider->setRange(0, int(QMultimedia::VeryHighQuality)); ui->qualitySlider->setRange(0, int(QMultimedia::VeryHighQuality));
ui->qualitySlider->setValue(int(QMultimedia::NormalQuality)); ui->qualitySlider->setValue(int(QMultimedia::NormalQuality));
//bitrates: //bitrates:
ui->bitrateBox->addItem(QString("Default"), QVariant(0)); ui->bitrateBox->addItem(tr("Default"), QVariant(0));
ui->bitrateBox->addItem(QString("32000"), QVariant(32000)); ui->bitrateBox->addItem(QStringLiteral("32000"), QVariant(32000));
ui->bitrateBox->addItem(QString("64000"), QVariant(64000)); ui->bitrateBox->addItem(QStringLiteral("64000"), QVariant(64000));
ui->bitrateBox->addItem(QString("96000"), QVariant(96000)); ui->bitrateBox->addItem(QStringLiteral("96000"), QVariant(96000));
ui->bitrateBox->addItem(QString("128000"), QVariant(128000)); ui->bitrateBox->addItem(QStringLiteral("128000"), QVariant(128000));
connect(audioRecorder, SIGNAL(durationChanged(qint64)), this, connect(audioRecorder, SIGNAL(durationChanged(qint64)), this,
SLOT(updateProgress(qint64))); SLOT(updateProgress(qint64)));
connect(audioRecorder, SIGNAL(stateChanged(QMediaRecorder::State)), this, connect(audioRecorder, SIGNAL(statusChanged(QMediaRecorder::Status)), this,
SLOT(updateState(QMediaRecorder::State))); SLOT(updateStatus(QMediaRecorder::Status)));
connect(audioRecorder, SIGNAL(error(QMediaRecorder::Error)), this, connect(audioRecorder, SIGNAL(error(QMediaRecorder::Error)), this,
SLOT(displayErrorMessage())); SLOT(displayErrorMessage()));
} }
@@ -127,12 +136,22 @@ void AudioRecorder::updateProgress(qint64 duration)
ui->statusbar->showMessage(tr("Recorded %1 sec").arg(duration / 1000)); ui->statusbar->showMessage(tr("Recorded %1 sec").arg(duration / 1000));
} }
void AudioRecorder::updateState(QMediaRecorder::State state) void AudioRecorder::updateStatus(QMediaRecorder::Status status)
{ {
QString statusMessage; QString statusMessage;
switch (state) { switch (status) {
case QMediaRecorder::RecordingState: case QMediaRecorder::RecordingStatus:
if (audioLevels.count() != audioRecorder->audioSettings().channelCount()) {
qDeleteAll(audioLevels);
audioLevels.clear();
for (int i = 0; i < audioRecorder->audioSettings().channelCount(); ++i) {
QAudioLevel *level = new QAudioLevel(ui->centralwidget);
audioLevels.append(level);
ui->levelsLayout->addWidget(level);
}
}
ui->recordButton->setText(tr("Stop")); ui->recordButton->setText(tr("Stop"));
ui->pauseButton->setText(tr("Pause")); ui->pauseButton->setText(tr("Pause"));
if (audioRecorder->outputLocation().isEmpty()) if (audioRecorder->outputLocation().isEmpty())
@@ -141,18 +160,23 @@ void AudioRecorder::updateState(QMediaRecorder::State state)
statusMessage = tr("Recording to %1").arg( statusMessage = tr("Recording to %1").arg(
audioRecorder->outputLocation().toString()); audioRecorder->outputLocation().toString());
break; break;
case QMediaRecorder::PausedState: case QMediaRecorder::PausedStatus:
clearAudioLevels();
ui->recordButton->setText(tr("Stop")); ui->recordButton->setText(tr("Stop"));
ui->pauseButton->setText(tr("Resume")); ui->pauseButton->setText(tr("Resume"));
statusMessage = tr("Paused"); statusMessage = tr("Paused");
break; break;
case QMediaRecorder::StoppedState: case QMediaRecorder::UnloadedStatus:
clearAudioLevels();
ui->recordButton->setText(tr("Record")); ui->recordButton->setText(tr("Record"));
ui->pauseButton->setText(tr("Pause")); ui->pauseButton->setText(tr("Pause"));
statusMessage = tr("Stopped"); statusMessage = tr("Stopped");
default:
break;
} }
ui->pauseButton->setEnabled(state != QMediaRecorder::StoppedState); ui->pauseButton->setEnabled(audioRecorder->state()
!= QMediaRecorder::StoppedState);
if (audioRecorder->error() == QMediaRecorder::NoError) if (audioRecorder->error() == QMediaRecorder::NoError)
ui->statusbar->showMessage(statusMessage); ui->statusbar->showMessage(statusMessage);
@@ -176,6 +200,7 @@ void AudioRecorder::toggleRecord()
settings.setCodec(boxValue(ui->audioCodecBox).toString()); settings.setCodec(boxValue(ui->audioCodecBox).toString());
settings.setSampleRate(boxValue(ui->sampleRateBox).toInt()); settings.setSampleRate(boxValue(ui->sampleRateBox).toInt());
settings.setBitRate(boxValue(ui->bitrateBox).toInt()); settings.setBitRate(boxValue(ui->bitrateBox).toInt());
settings.setChannelCount(boxValue(ui->channelsBox).toInt());
settings.setQuality(QMultimedia::EncodingQuality(ui->qualitySlider->value())); settings.setQuality(QMultimedia::EncodingQuality(ui->qualitySlider->value()));
settings.setEncodingMode(ui->constantQualityRadioButton->isChecked() ? settings.setEncodingMode(ui->constantQualityRadioButton->isChecked() ?
QMultimedia::ConstantQualityEncoding : QMultimedia::ConstantQualityEncoding :
@@ -202,7 +227,7 @@ void AudioRecorder::togglePause()
void AudioRecorder::setOutputLocation() void AudioRecorder::setOutputLocation()
{ {
QString fileName = QFileDialog::getSaveFileName(); QString fileName = QFileDialog::getSaveFileName();
audioRecorder->setOutputLocation(QUrl(fileName)); audioRecorder->setOutputLocation(QUrl::fromLocalFile(fileName));
outputLocationSet = true; outputLocationSet = true;
} }
@@ -211,88 +236,121 @@ void AudioRecorder::displayErrorMessage()
ui->statusbar->showMessage(audioRecorder->errorString()); ui->statusbar->showMessage(audioRecorder->errorString());
} }
void AudioRecorder::clearAudioLevels()
{
for (int i = 0; i < audioLevels.size(); ++i)
audioLevels.at(i)->setLevel(0);
}
// This function returns the maximum possible sample value for a given audio format // This function returns the maximum possible sample value for a given audio format
qreal getPeakValue(const QAudioFormat& format) qreal getPeakValue(const QAudioFormat& format)
{ {
// Note: Only the most common sample formats are supported // Note: Only the most common sample formats are supported
if (!format.isValid()) if (!format.isValid())
return 0.0; return qreal(0);
if (format.codec() != "audio/pcm") if (format.codec() != "audio/pcm")
return 0.0; return qreal(0);
switch (format.sampleType()) { switch (format.sampleType()) {
case QAudioFormat::Unknown: case QAudioFormat::Unknown:
break; break;
case QAudioFormat::Float: case QAudioFormat::Float:
if (format.sampleSize() != 32) // other sample formats are not supported if (format.sampleSize() != 32) // other sample formats are not supported
return 0.0; return qreal(0);
return 1.00003; return qreal(1.00003);
case QAudioFormat::SignedInt: case QAudioFormat::SignedInt:
if (format.sampleSize() == 32) if (format.sampleSize() == 32)
return 2147483648.0; return qreal(INT_MAX);
if (format.sampleSize() == 16) if (format.sampleSize() == 16)
return 32768.0; return qreal(SHRT_MAX);
if (format.sampleSize() == 8) if (format.sampleSize() == 8)
return 128.0; return qreal(CHAR_MAX);
break; break;
case QAudioFormat::UnSignedInt: case QAudioFormat::UnSignedInt:
// Unsigned formats are not supported in this example if (format.sampleSize() == 32)
return qreal(UINT_MAX);
if (format.sampleSize() == 16)
return qreal(USHRT_MAX);
if (format.sampleSize() == 8)
return qreal(UCHAR_MAX);
break; break;
} }
return 0.0; return qreal(0);
} }
qreal getBufferLevel(const QAudioBuffer& buffer) // returns the audio level for each channel
QVector<qreal> getBufferLevels(const QAudioBuffer& buffer)
{ {
QVector<qreal> values;
if (!buffer.format().isValid() || buffer.format().byteOrder() != QAudioFormat::LittleEndian) if (!buffer.format().isValid() || buffer.format().byteOrder() != QAudioFormat::LittleEndian)
return 0.0; return values;
if (buffer.format().codec() != "audio/pcm") if (buffer.format().codec() != "audio/pcm")
return 0.0; return values;
int channelCount = buffer.format().channelCount();
values.fill(0, channelCount);
qreal peak_value = getPeakValue(buffer.format()); qreal peak_value = getPeakValue(buffer.format());
if (qFuzzyCompare(peak_value, 0.0)) if (qFuzzyCompare(peak_value, qreal(0)))
return 0.0; return values;
switch (buffer.format().sampleType()) { switch (buffer.format().sampleType()) {
case QAudioFormat::Unknown: case QAudioFormat::Unknown:
case QAudioFormat::UnSignedInt: case QAudioFormat::UnSignedInt:
if (buffer.format().sampleSize() == 32)
values = getBufferLevels(buffer.constData<quint32>(), buffer.frameCount(), channelCount);
if (buffer.format().sampleSize() == 16)
values = getBufferLevels(buffer.constData<quint16>(), buffer.frameCount(), channelCount);
if (buffer.format().sampleSize() == 8)
values = getBufferLevels(buffer.constData<quint8>(), buffer.frameCount(), channelCount);
for (int i = 0; i < values.size(); ++i)
values[i] = qAbs(values.at(i) - peak_value / 2) / (peak_value / 2);
break; break;
case QAudioFormat::Float: case QAudioFormat::Float:
if (buffer.format().sampleSize() == 32) if (buffer.format().sampleSize() == 32) {
return getBufferLevel(buffer.constData<float>(), buffer.sampleCount()) / peak_value; values = getBufferLevels(buffer.constData<float>(), buffer.frameCount(), channelCount);
for (int i = 0; i < values.size(); ++i)
values[i] /= peak_value;
}
break; break;
case QAudioFormat::SignedInt: case QAudioFormat::SignedInt:
if (buffer.format().sampleSize() == 32) if (buffer.format().sampleSize() == 32)
return getBufferLevel(buffer.constData<long int>(), buffer.sampleCount()) / peak_value; values = getBufferLevels(buffer.constData<qint32>(), buffer.frameCount(), channelCount);
if (buffer.format().sampleSize() == 16) if (buffer.format().sampleSize() == 16)
return getBufferLevel(buffer.constData<short int>(), buffer.sampleCount()) / peak_value; values = getBufferLevels(buffer.constData<qint16>(), buffer.frameCount(), channelCount);
if (buffer.format().sampleSize() == 8) if (buffer.format().sampleSize() == 8)
return getBufferLevel(buffer.constData<signed char>(), buffer.sampleCount()) / peak_value; values = getBufferLevels(buffer.constData<qint8>(), buffer.frameCount(), channelCount);
for (int i = 0; i < values.size(); ++i)
values[i] /= peak_value;
break; break;
} }
return 0.0; return values;
} }
template <class T> template <class T>
qreal getBufferLevel(const T *buffer, int samples) QVector<qreal> getBufferLevels(const T *buffer, int frames, int channels)
{ {
qreal max_value = 0.0; QVector<qreal> max_values;
max_values.fill(0, channels);
for (int i = 0; i < samples; ++i) { for (int i = 0; i < frames; ++i) {
qreal value = qAbs(qreal(buffer[i])); for (int j = 0; j < channels; ++j) {
if (value > max_value) qreal value = qAbs(qreal(buffer[i * channels + j]));
max_value = value; if (value > max_values.at(j))
max_values.replace(j, value);
}
} }
return max_value; return max_values;
} }
void AudioRecorder::processBuffer(const QAudioBuffer& buffer) void AudioRecorder::processBuffer(const QAudioBuffer& buffer)
{ {
qreal level = getBufferLevel(buffer); QVector<qreal> levels = getBufferLevels(buffer);
ui->audioLevel->setLevel(level); for (int i = 0; i < levels.count(); ++i)
audioLevels.at(i)->setLevel(levels.at(i));
} }

View File

@@ -52,6 +52,8 @@ class QAudioProbe;
class QAudioBuffer; class QAudioBuffer;
QT_END_NAMESPACE QT_END_NAMESPACE
class QAudioLevel;
class AudioRecorder : public QMainWindow class AudioRecorder : public QMainWindow
{ {
Q_OBJECT Q_OBJECT
@@ -68,15 +70,18 @@ private slots:
void togglePause(); void togglePause();
void toggleRecord(); void toggleRecord();
void updateState(QMediaRecorder::State); void updateStatus(QMediaRecorder::Status);
void updateProgress(qint64 pos); void updateProgress(qint64 pos);
void displayErrorMessage(); void displayErrorMessage();
private: private:
void clearAudioLevels();
Ui::AudioRecorder *ui; Ui::AudioRecorder *ui;
QAudioRecorder *audioRecorder; QAudioRecorder *audioRecorder;
QAudioProbe *probe; QAudioProbe *probe;
QList<QAudioLevel*> audioLevels;
bool outputLocationSet; bool outputLocationSet;
}; };

View File

@@ -17,10 +17,10 @@
<layout class="QGridLayout" name="gridLayout_3"> <layout class="QGridLayout" name="gridLayout_3">
<item row="0" column="0" colspan="3"> <item row="0" column="0" colspan="3">
<layout class="QGridLayout" name="gridLayout_2"> <layout class="QGridLayout" name="gridLayout_2">
<item row="0" column="0"> <item row="3" column="0">
<widget class="QLabel" name="label"> <widget class="QLabel" name="label_4">
<property name="text"> <property name="text">
<string>Input Device:</string> <string>Sample rate:</string>
</property> </property>
</widget> </widget>
</item> </item>
@@ -34,8 +34,12 @@
</property> </property>
</widget> </widget>
</item> </item>
<item row="1" column="1"> <item row="0" column="0">
<widget class="QComboBox" name="audioCodecBox"/> <widget class="QLabel" name="label">
<property name="text">
<string>Input Device:</string>
</property>
</widget>
</item> </item>
<item row="2" column="0"> <item row="2" column="0">
<widget class="QLabel" name="label_3"> <widget class="QLabel" name="label_3">
@@ -47,15 +51,21 @@
<item row="2" column="1"> <item row="2" column="1">
<widget class="QComboBox" name="containerBox"/> <widget class="QComboBox" name="containerBox"/>
</item> </item>
<item row="3" column="0"> <item row="3" column="1">
<widget class="QLabel" name="label_4"> <widget class="QComboBox" name="sampleRateBox"/>
</item>
<item row="1" column="1">
<widget class="QComboBox" name="audioCodecBox"/>
</item>
<item row="4" column="0">
<widget class="QLabel" name="label_5">
<property name="text"> <property name="text">
<string>Sample rate:</string> <string>Channels:</string>
</property> </property>
</widget> </widget>
</item> </item>
<item row="3" column="1"> <item row="4" column="1">
<widget class="QComboBox" name="sampleRateBox"/> <widget class="QComboBox" name="channelsBox"/>
</item> </item>
</layout> </layout>
</item> </item>
@@ -162,9 +172,8 @@
</property> </property>
</widget> </widget>
</item> </item>
<item row="3" column="1" colspan="-1"> <item row="3" column="1" colspan="2">
<widget class="QAudioLevel" name="audioLevel"> <layout class="QVBoxLayout" name="levelsLayout"/>
</widget>
</item> </item>
</layout> </layout>
</widget> </widget>

View File

@@ -28,57 +28,33 @@
<rect> <rect>
<x>0</x> <x>0</x>
<y>0</y> <y>0</y>
<width>398</width> <width>400</width>
<height>275</height> <height>277</height>
</rect> </rect>
</property> </property>
<layout class="QGridLayout" name="gridLayout_4"> <layout class="QGridLayout" name="gridLayout_4">
<item row="0" column="0"> <item row="0" column="0">
<widget class="QWidget" name="widget" native="true"> <widget class="QWidget" name="widget" native="true">
<layout class="QGridLayout" name="gridLayout_3"> <layout class="QGridLayout" name="gridLayout_3">
<item row="0" column="0"> <item row="3" column="0">
<layout class="QGridLayout" name="gridLayout_2"> <widget class="QLabel" name="label_6">
<item row="0" column="0">
<widget class="QLabel" name="label">
<property name="text"> <property name="text">
<string>Input Device:</string> <string>Audio Level:</string>
</property> </property>
</widget> </widget>
</item> </item>
<item row="0" column="1">
<widget class="QComboBox" name="audioDeviceBox"/>
</item>
<item row="1" column="0">
<widget class="QLabel" name="label_2">
<property name="text">
<string>Audio Codec:</string>
</property>
</widget>
</item>
<item row="1" column="1">
<widget class="QComboBox" name="audioCodecBox"/>
</item>
<item row="2" column="0"> <item row="2" column="0">
<widget class="QLabel" name="label_3"> <spacer name="verticalSpacer">
<property name="text"> <property name="orientation">
<string>File Container:</string> <enum>Qt::Vertical</enum>
</property> </property>
</widget> <property name="sizeHint" stdset="0">
</item> <size>
<item row="2" column="1"> <width>20</width>
<widget class="QComboBox" name="containerBox"/> <height>29</height>
</item> </size>
<item row="3" column="0">
<widget class="QLabel" name="label_4">
<property name="text">
<string>Sample rate:</string>
</property> </property>
</widget> </spacer>
</item>
<item row="3" column="1">
<widget class="QComboBox" name="sampleRateBox"/>
</item>
</layout>
</item> </item>
<item row="1" column="0"> <item row="1" column="0">
<layout class="QGridLayout" name="gridLayout"> <layout class="QGridLayout" name="gridLayout">
@@ -127,18 +103,62 @@
</item> </item>
</layout> </layout>
</item> </item>
<item row="0" column="0">
<layout class="QGridLayout" name="gridLayout_2">
<item row="3" column="0">
<widget class="QLabel" name="label_4">
<property name="text">
<string>Sample rate:</string>
</property>
</widget>
</item>
<item row="1" column="0">
<widget class="QLabel" name="label_2">
<property name="text">
<string>Audio Codec:</string>
</property>
</widget>
</item>
<item row="2" column="1">
<widget class="QComboBox" name="containerBox"/>
</item>
<item row="0" column="1">
<widget class="QComboBox" name="audioDeviceBox"/>
</item>
<item row="3" column="1">
<widget class="QComboBox" name="sampleRateBox"/>
</item>
<item row="1" column="1">
<widget class="QComboBox" name="audioCodecBox"/>
</item>
<item row="2" column="0"> <item row="2" column="0">
<spacer name="verticalSpacer"> <widget class="QLabel" name="label_3">
<property name="orientation"> <property name="text">
<enum>Qt::Vertical</enum> <string>File Container:</string>
</property> </property>
<property name="sizeHint" stdset="0"> </widget>
<size> </item>
<width>20</width> <item row="0" column="0">
<height>29</height> <widget class="QLabel" name="label">
</size> <property name="text">
<string>Input Device:</string>
</property> </property>
</spacer> </widget>
</item>
<item row="4" column="0">
<widget class="QLabel" name="label_5">
<property name="text">
<string>Channels:</string>
</property>
</widget>
</item>
<item row="4" column="1">
<widget class="QComboBox" name="channelsBox"/>
</item>
</layout>
</item>
<item row="4" column="0">
<layout class="QVBoxLayout" name="levelsLayout"/>
</item> </item>
</layout> </layout>
</widget> </widget>

View File

@@ -45,6 +45,8 @@ QAudioLevel::QAudioLevel(QWidget *parent)
: QWidget(parent) : QWidget(parent)
, m_level(0.0) , m_level(0.0)
{ {
setMinimumHeight(15);
setMaximumHeight(50);
} }
void QAudioLevel::setLevel(qreal level) void QAudioLevel::setLevel(qreal level)

View File

@@ -83,10 +83,6 @@ Rectangle {
highlight: Rectangle { color: "gray"; radius: 5 } highlight: Rectangle { color: "gray"; radius: 5 }
currentIndex: indexForValue(propertyPopup.currentValue) currentIndex: indexForValue(propertyPopup.currentValue)
onCurrentIndexChanged: {
propertyPopup.currentValue = model.get(view.currentIndex).value
}
delegate: Item { delegate: Item {
width: propertyPopup.itemWidth width: propertyPopup.itemWidth
height: 70 height: 70

View File

@@ -4,7 +4,9 @@ load(configure)
qtCompileTest(openal) qtCompileTest(openal)
qtCompileTest(opensles) qtCompileTest(opensles)
win32 { win32 {
qtCompileTest(directshow) qtCompileTest(directshow) {
qtCompileTest(wshellitem)
}
qtCompileTest(wmsdk) qtCompileTest(wmsdk)
qtCompileTest(wmp) qtCompileTest(wmp)
contains(QT_CONFIG, wmf-backend): qtCompileTest(wmf) contains(QT_CONFIG, wmf-backend): qtCompileTest(wmf)

View File

@@ -92,9 +92,9 @@ void QDeclarativeAttenuationModel::setName(const QString& name)
/*! /*!
\qmltype AttenuationModelLinear \qmltype AttenuationModelLinear
\instantiates QDeclarativeAttenuationModelLinear \instantiates QDeclarativeAttenuationModelLinear
\since 1.0 \since 5.0
\brief Defines a linear attenuation curve for a \l Sound. \brief Defines a linear attenuation curve for a \l Sound.
\inqmlmodule QtAudioEngine 1.0 \inqmlmodule QtAudioEngine
\ingroup multimedia_audioengine \ingroup multimedia_audioengine
\inherits Item \inherits Item
\preliminary \preliminary
@@ -225,9 +225,9 @@ qreal QDeclarativeAttenuationModelLinear::calculateGain(const QVector3D &listene
\qmltype AttenuationModelInverse \qmltype AttenuationModelInverse
\instantiates QDeclarativeAttenuationModelInverse \instantiates QDeclarativeAttenuationModelInverse
\since 1.0 \since 5.0
\brief Defines a non-linear attenuation curve for a \l Sound. \brief Defines a non-linear attenuation curve for a \l Sound.
\inqmlmodule QtAudioEngine 1.0 \inqmlmodule QtAudioEngine
\ingroup multimedia_audioengine \ingroup multimedia_audioengine
\inherits Item \inherits Item
\preliminary \preliminary

View File

@@ -49,9 +49,9 @@ QT_USE_NAMESPACE
/*! /*!
\qmltype AudioCategory \qmltype AudioCategory
\instantiates QDeclarativeAudioCategory \instantiates QDeclarativeAudioCategory
\since 1.0 \since 5.0
\brief Control all active sound instances by group. \brief Control all active sound instances by group.
\inqmlmodule QtAudioEngine 1.0 \inqmlmodule QtAudioEngine
\ingroup multimedia_audioengine \ingroup multimedia_audioengine
\inherits Item \inherits Item
\preliminary \preliminary

View File

@@ -58,9 +58,9 @@ QT_BEGIN_NAMESPACE
/*! /*!
\qmltype AudioEngine \qmltype AudioEngine
\instantiates QDeclarativeAudioEngine \instantiates QDeclarativeAudioEngine
\since 1.0 \since 5.0
\brief Organize all your 3d audio content in one place. \brief Organize all your 3d audio content in one place.
\inqmlmodule QtAudioEngine 1.0 \inqmlmodule QtAudioEngine
\ingroup multimedia_audioengine \ingroup multimedia_audioengine
\inherits Item \inherits Item
\preliminary \preliminary

View File

@@ -50,9 +50,9 @@ QT_USE_NAMESPACE
/*! /*!
\qmltype AudioListener \qmltype AudioListener
\instantiates QDeclarativeAudioListener \instantiates QDeclarativeAudioListener
\since 1.0 \since 5.0
\brief Control global listener parameters. \brief Control global listener parameters.
\inqmlmodule QtAudioEngine 1.0 \inqmlmodule QtAudioEngine
\ingroup multimedia_audioengine \ingroup multimedia_audioengine
\inherits Item \inherits Item
\preliminary \preliminary

View File

@@ -52,9 +52,9 @@ QT_USE_NAMESPACE
/*! /*!
\qmltype AudioSample \qmltype AudioSample
\instantiates QDeclarativeAudioSample \instantiates QDeclarativeAudioSample
\since 1.0 \since 5.0
\brief Load audio samples, mostly .wav. \brief Load audio samples, mostly .wav.
\inqmlmodule QtAudioEngine 1.0 \inqmlmodule QtAudioEngine
\ingroup multimedia_audioengine \ingroup multimedia_audioengine
\inherits Item \inherits Item
\preliminary \preliminary

View File

@@ -51,12 +51,12 @@ QT_USE_NAMESPACE
/*! /*!
\qmltype PlayVariation \qmltype PlayVariation
\instantiates QDeclarativePlayVariation \instantiates QDeclarativePlayVariation
\since 1.0 \since 5.0
\brief Define a playback variation for \l {Sound} {sounds}. \brief Define a playback variation for \l {Sound} {sounds}.
So each time the playback of the same sound can be a slightly different even with the same So each time the playback of the same sound can be a slightly different even with the same
AudioSample. AudioSample.
\inqmlmodule QtAudioEngine 1.0 \inqmlmodule QtAudioEngine
\ingroup multimedia_audioengine \ingroup multimedia_audioengine
\inherits Item \inherits Item
\preliminary \preliminary

View File

@@ -140,10 +140,10 @@ void QDeclarativeSoundCone::componentComplete()
/*! /*!
\qmltype Sound \qmltype Sound
\instantiates QDeclarativeSound \instantiates QDeclarativeSound
\since 1.0 \since 5.0
\brief Define a variety of samples and parameters to be used for \brief Define a variety of samples and parameters to be used for
SoundInstance. SoundInstance.
\inqmlmodule QtAudioEngine 1.0 \inqmlmodule QtAudioEngine
\ingroup multimedia_audioengine \ingroup multimedia_audioengine
\inherits Item \inherits Item
\preliminary \preliminary

View File

@@ -53,9 +53,9 @@ QT_USE_NAMESPACE
/*! /*!
\qmltype SoundInstance \qmltype SoundInstance
\instantiates QDeclarativeSoundInstance \instantiates QDeclarativeSoundInstance
\since 1.0 \since 5.0
\brief Play 3d audio content. \brief Play 3d audio content.
\inqmlmodule QtAudioEngine 1.0 \inqmlmodule QtAudioEngine
\ingroup multimedia_audioengine \ingroup multimedia_audioengine
\inherits Item \inherits Item
\preliminary \preliminary

View File

@@ -47,7 +47,7 @@ import QtMultimedia 5.0
\inherits Item \inherits Item
\ingroup multimedia_qml \ingroup multimedia_qml
\ingroup multimedia_video_qml \ingroup multimedia_video_qml
\inqmlmodule QtMultimedia 5.0 \inqmlmodule QtMultimedia
\brief A convenience type for showing a specified video. \brief A convenience type for showing a specified video.
\c Video is a convenience type combining the functionality \c Video is a convenience type combining the functionality

File diff suppressed because it is too large Load Diff

View File

@@ -79,7 +79,7 @@ void QDeclarativeCamera::_q_availabilityChanged(QMultimedia::AvailabilityStatus
\brief Access viewfinder frames, and take photos and movies. \brief Access viewfinder frames, and take photos and movies.
\ingroup multimedia_qml \ingroup multimedia_qml
\ingroup camera_qml \ingroup camera_qml
\inqmlmodule QtMultimedia 5.0 \inqmlmodule QtMultimedia
\inherits Item \inherits Item
@@ -223,11 +223,11 @@ QDeclarativeCamera::Error QDeclarativeCamera::errorCode() const
} }
/*! /*!
\qmlproperty string QtMultimedia5::Camera::errorString \qmlproperty string QtMultimedia::Camera::errorString
This property holds the last error string, if any. This property holds the last error string, if any.
\sa QtMultimedia5::Camera::onError \sa QtMultimedia::Camera::onError
*/ */
QString QDeclarativeCamera::errorString() const QString QDeclarativeCamera::errorString() const
{ {
@@ -235,7 +235,7 @@ QString QDeclarativeCamera::errorString() const
} }
/*! /*!
\qmlproperty enumeration QtMultimedia5::Camera::availability \qmlproperty enumeration QtMultimedia::Camera::availability
This property holds the availability state of the camera. This property holds the availability state of the camera.
@@ -262,7 +262,7 @@ QDeclarativeCamera::Availability QDeclarativeCamera::availability() const
/*! /*!
\qmlproperty enumeration QtMultimedia5::Camera::captureMode \qmlproperty enumeration QtMultimedia::Camera::captureMode
This property holds the camera capture mode, which can be one of the This property holds the camera capture mode, which can be one of the
following: following:
@@ -293,7 +293,7 @@ void QDeclarativeCamera::setCaptureMode(QDeclarativeCamera::CaptureMode mode)
/*! /*!
\qmlproperty enumeration QtMultimedia5::Camera::cameraState \qmlproperty enumeration QtMultimedia::Camera::cameraState
This property holds the camera object's current state, which can be one of the following: This property holds the camera object's current state, which can be one of the following:
@@ -330,7 +330,7 @@ QDeclarativeCamera::State QDeclarativeCamera::cameraState() const
} }
/*! /*!
\qmlproperty enumeration QtMultimedia5::Camera::cameraStatus \qmlproperty enumeration QtMultimedia::Camera::cameraStatus
This property holds the camera object's current status, which can be one of the following: This property holds the camera object's current status, which can be one of the following:
@@ -407,7 +407,7 @@ void QDeclarativeCamera::setCameraState(QDeclarativeCamera::State state)
} }
/*! /*!
\qmlmethod QtMultimedia5::Camera::start() \qmlmethod QtMultimedia::Camera::start()
Starts the camera. Viewfinder frames will Starts the camera. Viewfinder frames will
be available and image or movie capture will be available and image or movie capture will
@@ -419,7 +419,7 @@ void QDeclarativeCamera::start()
} }
/*! /*!
\qmlmethod QtMultimedia5::Camera::stop() \qmlmethod QtMultimedia::Camera::stop()
Stops the camera, but leaves the camera Stops the camera, but leaves the camera
stack loaded. stack loaded.
@@ -431,7 +431,7 @@ void QDeclarativeCamera::stop()
/*! /*!
\qmlproperty enumeration QtMultimedia5::Camera::lockStatus \qmlproperty enumeration QtMultimedia::Camera::lockStatus
This property holds the status of all the requested camera locks. This property holds the status of all the requested camera locks.
@@ -492,7 +492,7 @@ QDeclarativeCamera::LockStatus QDeclarativeCamera::lockStatus() const
} }
/*! /*!
\qmlmethod QtMultimedia5::Camera::searchAndLock() \qmlmethod QtMultimedia::Camera::searchAndLock()
Start focusing, exposure and white balance calculation. Start focusing, exposure and white balance calculation.
@@ -507,7 +507,7 @@ void QDeclarativeCamera::searchAndLock()
} }
/*! /*!
\qmlmethod QtMultimedia5::Camera::unlock() \qmlmethod QtMultimedia::Camera::unlock()
Unlock focus, exposure and white balance locks. Unlock focus, exposure and white balance locks.
*/ */
@@ -521,7 +521,7 @@ void QDeclarativeCamera::unlock()
This property holds the maximum optical zoom factor supported, or 1.0 if optical zoom is not supported. This property holds the maximum optical zoom factor supported, or 1.0 if optical zoom is not supported.
*/ */
/*! /*!
\qmlproperty real QtMultimedia5::Camera::maximumOpticalZoom \qmlproperty real QtMultimedia::Camera::maximumOpticalZoom
This property holds the maximum optical zoom factor supported, or 1.0 if optical zoom is not supported. This property holds the maximum optical zoom factor supported, or 1.0 if optical zoom is not supported.
*/ */
@@ -535,7 +535,7 @@ qreal QDeclarativeCamera::maximumOpticalZoom() const
This property holds the maximum digital zoom factor supported, or 1.0 if digital zoom is not supported. This property holds the maximum digital zoom factor supported, or 1.0 if digital zoom is not supported.
*/ */
/*! /*!
\qmlproperty real QtMultimedia5::Camera::maximumDigitalZoom \qmlproperty real QtMultimedia::Camera::maximumDigitalZoom
This property holds the maximum digital zoom factor supported, or 1.0 if digital zoom is not supported. This property holds the maximum digital zoom factor supported, or 1.0 if digital zoom is not supported.
*/ */
@@ -550,7 +550,7 @@ qreal QDeclarativeCamera::maximumDigitalZoom() const
*/ */
/*! /*!
\qmlproperty real QtMultimedia5::Camera::opticalZoom \qmlproperty real QtMultimedia::Camera::opticalZoom
This property holds the current optical zoom factor. This property holds the current optical zoom factor.
*/ */
@@ -569,7 +569,7 @@ void QDeclarativeCamera::setOpticalZoom(qreal value)
This property holds the current digital zoom factor. This property holds the current digital zoom factor.
*/ */
/*! /*!
\qmlproperty real QtMultimedia5::Camera::digitalZoom \qmlproperty real QtMultimedia::Camera::digitalZoom
This property holds the current digital zoom factor. This property holds the current digital zoom factor.
*/ */
@@ -584,21 +584,21 @@ void QDeclarativeCamera::setDigitalZoom(qreal value)
} }
/*! /*!
\qmlproperty variant QtMultimedia5::Camera::mediaObject \qmlproperty variant QtMultimedia::Camera::mediaObject
This property holds the media object for the camera. This property holds the media object for the camera.
*/ */
/*! /*!
\qmlproperty enumeration QtMultimedia5::Camera::errorCode \qmlproperty enumeration QtMultimedia::Camera::errorCode
This property holds the last error code. This property holds the last error code.
\sa QtMultimedia5::Camera::onError \sa QtMultimedia::Camera::onError
*/ */
/*! /*!
\qmlsignal QtMultimedia5::Camera::onError(errorCode, errorString) \qmlsignal QtMultimedia::Camera::onError(errorCode, errorString)
This handler is called when an error occurs. The enumeration value This handler is called when an error occurs. The enumeration value
\a errorCode is one of the values defined below, and a descriptive string \a errorCode is one of the values defined below, and a descriptive string

View File

@@ -54,7 +54,7 @@ QT_BEGIN_NAMESPACE
\instantiates QDeclarativeCameraCapture \instantiates QDeclarativeCameraCapture
\brief An interface for capturing camera images \brief An interface for capturing camera images
\ingroup multimedia_qml \ingroup multimedia_qml
\inqmlmodule QtMultimedia 5.0 \inqmlmodule QtMultimedia
\ingroup camera_qml \ingroup camera_qml
This type allows you to capture still images and be notified when they This type allows you to capture still images and be notified when they
@@ -137,7 +137,7 @@ QDeclarativeCameraCapture::~QDeclarativeCameraCapture()
*/ */
/*! /*!
\qmlproperty bool QtMultimedia5::CameraCapture::ready \qmlproperty bool QtMultimedia::CameraCapture::ready
This property holds a bool value indicating whether the camera This property holds a bool value indicating whether the camera
is ready to capture photos or not. is ready to capture photos or not.
@@ -154,7 +154,7 @@ bool QDeclarativeCameraCapture::isReadyForCapture() const
} }
/*! /*!
\qmlmethod QtMultimedia5::CameraCapture::capture() \qmlmethod QtMultimedia::CameraCapture::capture()
Start image capture. The \l onImageCaptured() and \l onImageSaved() signals will Start image capture. The \l onImageCaptured() and \l onImageSaved() signals will
be emitted when the capture is complete. be emitted when the capture is complete.
@@ -177,7 +177,7 @@ int QDeclarativeCameraCapture::capture()
} }
/*! /*!
\qmlmethod QtMultimedia5::CameraCapture::captureToLocation(location) \qmlmethod QtMultimedia::CameraCapture::captureToLocation(location)
Start image capture to specified \a location. The \l onImageCaptured() and \l onImageSaved() signals will Start image capture to specified \a location. The \l onImageCaptured() and \l onImageSaved() signals will
be emitted when the capture is complete. be emitted when the capture is complete.
@@ -196,7 +196,7 @@ int QDeclarativeCameraCapture::captureToLocation(const QString &location)
} }
/*! /*!
\qmlmethod QtMultimedia5::CameraCapture::cancelCapture() \qmlmethod QtMultimedia::CameraCapture::cancelCapture()
Cancel pending image capture requests. Cancel pending image capture requests.
*/ */
@@ -211,7 +211,7 @@ void QDeclarativeCameraCapture::cancelCapture()
This property holds the location of the last captured image. This property holds the location of the last captured image.
*/ */
/*! /*!
\qmlproperty string QtMultimedia5::CameraCapture::capturedImagePath \qmlproperty string QtMultimedia::CameraCapture::capturedImagePath
This property holds the location of the last captured image. This property holds the location of the last captured image.
*/ */
@@ -254,7 +254,7 @@ void QDeclarativeCameraCapture::_q_captureFailed(int id, QCameraImageCapture::Er
*/ */
/*! /*!
\qmlproperty size QtMultimedia5::CameraCapture::resolution \qmlproperty size QtMultimedia::CameraCapture::resolution
This property holds the resolution/size of the image to be captured. This property holds the resolution/size of the image to be captured.
If empty, the system chooses the appropriate resolution. If empty, the system chooses the appropriate resolution.
@@ -285,7 +285,7 @@ QCameraImageCapture::Error QDeclarativeCameraCapture::error() const
*/ */
/*! /*!
\qmlproperty string QtMultimedia5::CameraCapture::errorString \qmlproperty string QtMultimedia::CameraCapture::errorString
This property holds the error message related to the last capture. This property holds the error message related to the last capture.
*/ */
@@ -295,7 +295,7 @@ QString QDeclarativeCameraCapture::errorString() const
} }
/*! /*!
\qmlmethod QtMultimedia5::CameraCapture::setMetadata(key, value) \qmlmethod QtMultimedia::CameraCapture::setMetadata(key, value)
Sets a particular metadata \a key to \a value for the subsequent image captures. Sets a particular metadata \a key to \a value for the subsequent image captures.
@@ -309,14 +309,14 @@ void QDeclarativeCameraCapture::setMetadata(const QString &key, const QVariant &
} }
/*! /*!
\qmlsignal QtMultimedia5::CameraCapture::onCaptureFailed(requestId, message) \qmlsignal QtMultimedia::CameraCapture::onCaptureFailed(requestId, message)
This handler is called when an error occurs during capture with \a requestId. This handler is called when an error occurs during capture with \a requestId.
A descriptive message is available in \a message. A descriptive message is available in \a message.
*/ */
/*! /*!
\qmlsignal QtMultimedia5::CameraCapture::onImageCaptured(requestId, preview) \qmlsignal QtMultimedia::CameraCapture::onImageCaptured(requestId, preview)
This handler is called when an image with \a requestId has been captured This handler is called when an image with \a requestId has been captured
but not yet saved to the filesystem. The \a preview but not yet saved to the filesystem. The \a preview
@@ -326,7 +326,7 @@ void QDeclarativeCameraCapture::setMetadata(const QString &key, const QVariant &
*/ */
/*! /*!
\qmlsignal QtMultimedia5::CameraCapture::onImageSaved(requestId, path) \qmlsignal QtMultimedia::CameraCapture::onImageSaved(requestId, path)
This handler is called after the image with \a requestId has been written to the filesystem. This handler is called after the image with \a requestId has been written to the filesystem.
The \a path is a local file path, not a URL. The \a path is a local file path, not a URL.
@@ -336,7 +336,7 @@ void QDeclarativeCameraCapture::setMetadata(const QString &key, const QVariant &
/*! /*!
\qmlsignal QtMultimedia5::CameraCapture::onImageMetadataAvailable(requestId, key, value) \qmlsignal QtMultimedia::CameraCapture::onImageMetadataAvailable(requestId, key, value)
This handler is called when the image with \a requestId has new metadata This handler is called when the image with \a requestId has new metadata
available with the key \a key and value \a value. available with the key \a key and value \a value.

View File

@@ -50,7 +50,7 @@ QT_BEGIN_NAMESPACE
\brief An interface for exposure related camera settings. \brief An interface for exposure related camera settings.
\ingroup multimedia_qml \ingroup multimedia_qml
\ingroup camera_qml \ingroup camera_qml
\inqmlmodule QtMultimedia 5.0 \inqmlmodule QtMultimedia
This type is part of the \b{QtMultimedia 5.0} module. This type is part of the \b{QtMultimedia 5.0} module.
@@ -124,7 +124,7 @@ QDeclarativeCameraExposure::~QDeclarativeCameraExposure()
This property holds the adjustment value for the automatically calculated exposure. The value is in EV units. This property holds the adjustment value for the automatically calculated exposure. The value is in EV units.
*/ */
/*! /*!
\qmlproperty real QtMultimedia5::CameraExposure::exposureCompensation \qmlproperty real QtMultimedia::CameraExposure::exposureCompensation
This property holds the adjustment value for the automatically calculated exposure. The value is This property holds the adjustment value for the automatically calculated exposure. The value is
in EV units. in EV units.
@@ -144,7 +144,7 @@ void QDeclarativeCameraExposure::setExposureCompensation(qreal ev)
This property holds the sensor's ISO sensitivity value. This property holds the sensor's ISO sensitivity value.
*/ */
/*! /*!
\qmlproperty int QtMultimedia5::CameraExposure::iso \qmlproperty int QtMultimedia::CameraExposure::iso
This property holds the sensor's ISO sensitivity value. This property holds the sensor's ISO sensitivity value.
*/ */
@@ -161,7 +161,7 @@ int QDeclarativeCameraExposure::isoSensitivity() const
*/ */
/*! /*!
\qmlproperty real QtMultimedia5::CameraExposure::shutterSpeed \qmlproperty real QtMultimedia::CameraExposure::shutterSpeed
This property holds the camera's current shutter speed value in seconds. This property holds the camera's current shutter speed value in seconds.
To affect the shutter speed you can use the \l manualShutterSpeed To affect the shutter speed you can use the \l manualShutterSpeed
@@ -180,7 +180,7 @@ qreal QDeclarativeCameraExposure::shutterSpeed() const
\sa manualAperture, setAutoAperture() \sa manualAperture, setAutoAperture()
*/ */
/*! /*!
\qmlproperty real QtMultimedia5::CameraExposure::aperture \qmlproperty real QtMultimedia::CameraExposure::aperture
This property holds the current lens aperture as an F number (the ratio of This property holds the current lens aperture as an F number (the ratio of
the focal length to effective aperture diameter). the focal length to effective aperture diameter).
@@ -202,7 +202,7 @@ qreal QDeclarativeCameraExposure::aperture() const
\sa iso, setAutoIsoSensitivity() \sa iso, setAutoIsoSensitivity()
*/ */
/*! /*!
\qmlproperty real QtMultimedia5::CameraExposure::manualIso \qmlproperty real QtMultimedia::CameraExposure::manualIso
This property holds the ISO settings for capturing photos. This property holds the ISO settings for capturing photos.
@@ -237,7 +237,7 @@ void QDeclarativeCameraExposure::setManualIsoSensitivity(int iso)
\l shutterSpeed, setAutoShutterSpeed() \l shutterSpeed, setAutoShutterSpeed()
*/ */
/*! /*!
\qmlproperty real QtMultimedia5::CameraExposure::manualShutterSpeed \qmlproperty real QtMultimedia::CameraExposure::manualShutterSpeed
This property holds the shutter speed value (in seconds). This property holds the shutter speed value (in seconds).
If the value is less than zero, the camera automatically If the value is less than zero, the camera automatically
@@ -272,7 +272,7 @@ void QDeclarativeCameraExposure::setManualShutterSpeed(qreal speed)
\l aperture, setAutoAperture() \l aperture, setAutoAperture()
*/ */
/*! /*!
\qmlproperty real QtMultimedia5::CameraExposure::manualAperture \qmlproperty real QtMultimedia::CameraExposure::manualAperture
This property holds the aperture (F number) value This property holds the aperture (F number) value
for capturing photos. for capturing photos.
@@ -299,7 +299,7 @@ void QDeclarativeCameraExposure::setManualAperture(qreal aperture)
} }
/*! /*!
\qmlmethod QtMultimedia5::CameraExposure::setAutoAperture() \qmlmethod QtMultimedia::CameraExposure::setAutoAperture()
Turn on auto aperture selection. The manual aperture value is reset to -1.0 Turn on auto aperture selection. The manual aperture value is reset to -1.0
*/ */
void QDeclarativeCameraExposure::setAutoAperture() void QDeclarativeCameraExposure::setAutoAperture()
@@ -308,7 +308,7 @@ void QDeclarativeCameraExposure::setAutoAperture()
} }
/*! /*!
\qmlmethod QtMultimedia5::CameraExposure::setAutoShutterSpeed() \qmlmethod QtMultimedia::CameraExposure::setAutoShutterSpeed()
Turn on auto shutter speed selection. The manual shutter speed value is reset to -1.0 Turn on auto shutter speed selection. The manual shutter speed value is reset to -1.0
*/ */
void QDeclarativeCameraExposure::setAutoShutterSpeed() void QDeclarativeCameraExposure::setAutoShutterSpeed()
@@ -317,7 +317,7 @@ void QDeclarativeCameraExposure::setAutoShutterSpeed()
} }
/*! /*!
\qmlmethod QtMultimedia5::CameraExposure::setAutoIsoSensitivity() \qmlmethod QtMultimedia::CameraExposure::setAutoIsoSensitivity()
Turn on auto ISO sensitivity selection. The manual ISO value is reset to -1. Turn on auto ISO sensitivity selection. The manual ISO value is reset to -1.
*/ */
void QDeclarativeCameraExposure::setAutoIsoSensitivity() void QDeclarativeCameraExposure::setAutoIsoSensitivity()
@@ -330,7 +330,7 @@ void QDeclarativeCameraExposure::setAutoIsoSensitivity()
This property holds the camera exposure mode. The mode can one of the values in \l QCameraExposure::ExposureMode. This property holds the camera exposure mode. The mode can one of the values in \l QCameraExposure::ExposureMode.
*/ */
/*! /*!
\qmlproperty enumeration QtMultimedia5::CameraExposure::exposureMode \qmlproperty enumeration QtMultimedia::CameraExposure::exposureMode
This property holds the camera exposure mode. This property holds the camera exposure mode.
@@ -373,7 +373,7 @@ void QDeclarativeCameraExposure::setExposureMode(QDeclarativeCamera::ExposureMod
typically defaults to the center \c (0.5, 0.5). typically defaults to the center \c (0.5, 0.5).
*/ */
/*! /*!
\qmlproperty QPointF QtMultimedia5::CameraExposure::spotMeteringPoint \qmlproperty QPointF QtMultimedia::CameraExposure::spotMeteringPoint
The property holds the frame coordinates of the point to use for exposure metering. The property holds the frame coordinates of the point to use for exposure metering.
This point is only used in spot metering mode, and it typically defaults This point is only used in spot metering mode, and it typically defaults
@@ -400,7 +400,7 @@ void QDeclarativeCameraExposure::setSpotMeteringPoint(const QPointF &point)
The mode can be one of the constants in \l QCameraExposure::MeteringMode. The mode can be one of the constants in \l QCameraExposure::MeteringMode.
*/ */
/*! /*!
\qmlproperty enumeration QtMultimedia5::CameraExposure::meteringMode \qmlproperty enumeration QtMultimedia::CameraExposure::meteringMode
This property holds the camera metering mode (how exposure is balanced). This property holds the camera metering mode (how exposure is balanced).

View File

@@ -47,7 +47,7 @@ QT_BEGIN_NAMESPACE
/*! /*!
\qmltype CameraFlash \qmltype CameraFlash
\instantiates QDeclarativeCameraFlash \instantiates QDeclarativeCameraFlash
\inqmlmodule QtMultimedia 5.0 \inqmlmodule QtMultimedia
\brief An interface for flash related camera settings. \brief An interface for flash related camera settings.
\ingroup multimedia_qml \ingroup multimedia_qml
\ingroup camera_qml \ingroup camera_qml
@@ -95,7 +95,7 @@ QDeclarativeCameraFlash::~QDeclarativeCameraFlash()
This property indicates whether the flash is charged. This property indicates whether the flash is charged.
*/ */
/*! /*!
\qmlproperty bool QtMultimedia5::CameraFlash::ready \qmlproperty bool QtMultimedia::CameraFlash::ready
This property indicates whether the flash is charged. This property indicates whether the flash is charged.
*/ */
@@ -109,7 +109,7 @@ bool QDeclarativeCameraFlash::isFlashReady() const
This property holds the camera flash mode. The mode can be one of the constants in \l QCameraExposure::FlashMode. This property holds the camera flash mode. The mode can be one of the constants in \l QCameraExposure::FlashMode.
*/ */
/*! /*!
\qmlproperty enumeration QtMultimedia5::CameraFlash::mode \qmlproperty enumeration QtMultimedia::CameraFlash::mode
This property holds the camera flash mode. This property holds the camera flash mode.
@@ -145,13 +145,13 @@ void QDeclarativeCameraFlash::setFlashMode(int mode)
} }
/*! /*!
\qmlsignal QtMultimedia5::CameraFlash::flashModeChanged(int) \qmlsignal QtMultimedia::CameraFlash::flashModeChanged(int)
This signal is emitted when the \c flashMode property is changed. This signal is emitted when the \c flashMode property is changed.
The corresponding handler is \c onFlashModeChanged. The corresponding handler is \c onFlashModeChanged.
*/ */
/*! /*!
\qmlsignal QtMultimedia5::CameraFlash::flashReady(bool) \qmlsignal QtMultimedia::CameraFlash::flashReady(bool)
This signal is emitted when QCameraExposure indicates that This signal is emitted when QCameraExposure indicates that
the flash is ready to use. the flash is ready to use.
The corresponsing handler is \c onFlashReadyChanged. The corresponsing handler is \c onFlashReadyChanged.

View File

@@ -47,7 +47,7 @@ QT_BEGIN_NAMESPACE
/*! /*!
\qmltype CameraFocus \qmltype CameraFocus
\instantiates QDeclarativeCameraFocus \instantiates QDeclarativeCameraFocus
\inqmlmodule QtMultimedia 5.0 \inqmlmodule QtMultimedia
\brief An interface for focus related camera settings. \brief An interface for focus related camera settings.
\ingroup multimedia_qml \ingroup multimedia_qml
\ingroup camera_qml \ingroup camera_qml
@@ -167,7 +167,7 @@ QDeclarativeCamera::FocusMode QDeclarativeCameraFocus::focusMode() const
} }
/*! /*!
\qmlmethod bool QtMultimedia5::CameraFocus::isFocusModeSupported(mode) const \qmlmethod bool QtMultimedia::CameraFocus::isFocusModeSupported(mode) const
Returns true if the supplied \a mode is a supported focus mode, and Returns true if the supplied \a mode is a supported focus mode, and
false otherwise. false otherwise.
@@ -235,7 +235,7 @@ void QDeclarativeCameraFocus::setFocusPointMode(QDeclarativeCamera::FocusPointMo
} }
/*! /*!
\qmlmethod bool QtMultimedia5::CameraFocus::isFocusPointModeSupported(mode) const \qmlmethod bool QtMultimedia::CameraFocus::isFocusPointModeSupported(mode) const
Returns true if the supplied \a mode is a supported focus point mode, and Returns true if the supplied \a mode is a supported focus point mode, and
false otherwise. false otherwise.
@@ -255,7 +255,7 @@ bool QDeclarativeCameraFocus::isFocusPointModeSupported(QDeclarativeCamera::Focu
*/ */
/*! /*!
\qmlproperty point QtMultimedia5::CameraFocus::customFocusPoint \qmlproperty point QtMultimedia::CameraFocus::customFocusPoint
This property holds the position of custom focus point, in relative frame coordinates: This property holds the position of custom focus point, in relative frame coordinates:
QPointF(0,0) points to the left top frame point, QPointF(0.5,0.5) QPointF(0,0) points to the left top frame point, QPointF(0.5,0.5)
@@ -289,7 +289,7 @@ void QDeclarativeCameraFocus::setCustomFocusPoint(const QPointF &point)
\endtable \endtable
*/ */
/*! /*!
\qmlproperty list<focusZone> QtMultimedia5::CameraFocus::focusZones \qmlproperty list<focusZone> QtMultimedia::CameraFocus::focusZones
This property holds the list of current camera focus zones, This property holds the list of current camera focus zones,
each including \c area specified in the same coordinates as \l customFocusPoint, each including \c area specified in the same coordinates as \l customFocusPoint,

View File

@@ -47,7 +47,7 @@ QT_BEGIN_NAMESPACE
/*! /*!
\qmltype CameraImageProcessing \qmltype CameraImageProcessing
\instantiates QDeclarativeCameraImageProcessing \instantiates QDeclarativeCameraImageProcessing
\inqmlmodule QtMultimedia 5.0 \inqmlmodule QtMultimedia
\brief An interface for camera capture related settings. \brief An interface for camera capture related settings.
\ingroup multimedia_qml \ingroup multimedia_qml
\ingroup camera_qml \ingroup camera_qml
@@ -95,7 +95,7 @@ QDeclarativeCameraImageProcessing::~QDeclarativeCameraImageProcessing()
} }
/*! /*!
\qmlproperty enumeration QtMultimedia5::CameraImageProcessing::whiteBalanceMode \qmlproperty enumeration QtMultimedia::CameraImageProcessing::whiteBalanceMode
\table \table
\header \li Value \li Description \header \li Value \li Description
@@ -127,7 +127,7 @@ void QDeclarativeCameraImageProcessing::setWhiteBalanceMode(QDeclarativeCameraIm
} }
/*! /*!
\qmlproperty qreal QtMultimedia5::CameraImageProcessing::manualWhiteBalance \qmlproperty qreal QtMultimedia::CameraImageProcessing::manualWhiteBalance
The color temperature used when in manual white balance mode (WhiteBalanceManual). The color temperature used when in manual white balance mode (WhiteBalanceManual).
The units are Kelvin. The units are Kelvin.
@@ -148,7 +148,7 @@ void QDeclarativeCameraImageProcessing::setManualWhiteBalance(qreal colorTemp) c
} }
/*! /*!
\qmlproperty qreal QtMultimedia5::CameraImageProcessing::contrast \qmlproperty qreal QtMultimedia::CameraImageProcessing::contrast
Image contrast adjustment. Image contrast adjustment.
Valid contrast adjustment values range between -1.0 and 1.0, with a default of 0. Valid contrast adjustment values range between -1.0 and 1.0, with a default of 0.
@@ -167,7 +167,7 @@ void QDeclarativeCameraImageProcessing::setContrast(qreal value)
} }
/*! /*!
\qmlproperty qreal QtMultimedia5::CameraImageProcessing::saturation \qmlproperty qreal QtMultimedia::CameraImageProcessing::saturation
Image saturation adjustment. Image saturation adjustment.
Valid saturation adjustment values range between -1.0 and 1.0, the default is 0. Valid saturation adjustment values range between -1.0 and 1.0, the default is 0.
@@ -186,7 +186,7 @@ void QDeclarativeCameraImageProcessing::setSaturation(qreal value)
} }
/*! /*!
\qmlproperty qreal QtMultimedia5::CameraImageProcessing::sharpeningLevel \qmlproperty qreal QtMultimedia::CameraImageProcessing::sharpeningLevel
Adjustment of sharpening level applied to image. Adjustment of sharpening level applied to image.
@@ -207,7 +207,7 @@ void QDeclarativeCameraImageProcessing::setSharpeningLevel(qreal value)
} }
/*! /*!
\qmlproperty qreal QtMultimedia5::CameraImageProcessing::denoisingLevel \qmlproperty qreal QtMultimedia::CameraImageProcessing::denoisingLevel
Adjustment of denoising applied to image. Adjustment of denoising applied to image.
@@ -228,12 +228,12 @@ void QDeclarativeCameraImageProcessing::setDenoisingLevel(qreal value)
} }
/*! /*!
\qmlsignal QtMultimedia5::Camera::whiteBalanceModeChanged(Camera::WhiteBalanceMode) \qmlsignal QtMultimedia::Camera::whiteBalanceModeChanged(Camera::WhiteBalanceMode)
This signal is emitted when the \c whiteBalanceMode property is changed. This signal is emitted when the \c whiteBalanceMode property is changed.
*/ */
/*! /*!
\qmlsignal QtMultimedia5::Camera::manualWhiteBalanceChanged(qreal) \qmlsignal QtMultimedia::Camera::manualWhiteBalanceChanged(qreal)
This signal is emitted when the \c manualWhiteBalance property is changed. This signal is emitted when the \c manualWhiteBalance property is changed.
*/ */

View File

@@ -49,7 +49,7 @@ QT_BEGIN_NAMESPACE
/*! /*!
\qmltype CameraRecorder \qmltype CameraRecorder
\instantiates QDeclarativeCameraRecorder \instantiates QDeclarativeCameraRecorder
\inqmlmodule QtMultimedia 5.0 \inqmlmodule QtMultimedia
\brief Controls video recording with the Camera. \brief Controls video recording with the Camera.
\ingroup multimedia_qml \ingroup multimedia_qml
\ingroup camera_qml \ingroup camera_qml
@@ -99,7 +99,7 @@ QDeclarativeCameraRecorder::~QDeclarativeCameraRecorder()
} }
/*! /*!
\qmlproperty size QtMultimedia5::CameraRecorder::resolution \qmlproperty size QtMultimedia::CameraRecorder::resolution
This property holds the video frame dimensions to be used for video capture. This property holds the video frame dimensions to be used for video capture.
*/ */
@@ -109,12 +109,12 @@ QSize QDeclarativeCameraRecorder::captureResolution()
} }
/*! /*!
\qmlproperty string QtMultimedia5::CameraRecorder::audioCodec \qmlproperty string QtMultimedia::CameraRecorder::audioCodec
This property holds the audio codec to be used for recording video. This property holds the audio codec to be used for recording video.
Typically this is \c aac or \c amr-wb. Typically this is \c aac or \c amr-wb.
\sa {QtMultimedia5::CameraImageProcessing::whiteBalanceMode}{whileBalanceMode} \sa {QtMultimedia::CameraImageProcessing::whiteBalanceMode}{whileBalanceMode}
*/ */
QString QDeclarativeCameraRecorder::audioCodec() const QString QDeclarativeCameraRecorder::audioCodec() const
{ {
@@ -122,7 +122,7 @@ QString QDeclarativeCameraRecorder::audioCodec() const
} }
/*! /*!
\qmlproperty string QtMultimedia5::CameraRecorder::videoCodec \qmlproperty string QtMultimedia::CameraRecorder::videoCodec
This property holds the video codec to be used for recording video. This property holds the video codec to be used for recording video.
Typically this is \c h264. Typically this is \c h264.
@@ -133,7 +133,7 @@ QString QDeclarativeCameraRecorder::videoCodec() const
} }
/*! /*!
\qmlproperty string QtMultimedia5::CameraRecorder::mediaContainer \qmlproperty string QtMultimedia::CameraRecorder::mediaContainer
This property holds the media container to be used for recording video. This property holds the media container to be used for recording video.
Typically this is \c mp4. Typically this is \c mp4.
@@ -180,7 +180,7 @@ void QDeclarativeCameraRecorder::setMediaContainer(const QString &container)
} }
/*! /*!
\qmlproperty qreal QtMultimedia5::CameraRecorder::frameRate \qmlproperty qreal QtMultimedia::CameraRecorder::frameRate
This property holds the framerate (in frames per second) to be used for recording video. This property holds the framerate (in frames per second) to be used for recording video.
*/ */
@@ -190,7 +190,7 @@ qreal QDeclarativeCameraRecorder::frameRate() const
} }
/*! /*!
\qmlproperty int QtMultimedia5::CameraRecorder::videoBitRate \qmlproperty int QtMultimedia::CameraRecorder::videoBitRate
This property holds the bit rate (in bits per second) to be used for recording video. This property holds the bit rate (in bits per second) to be used for recording video.
*/ */
@@ -200,7 +200,7 @@ int QDeclarativeCameraRecorder::videoBitRate() const
} }
/*! /*!
\qmlproperty int QtMultimedia5::CameraRecorder::audioBitRate \qmlproperty int QtMultimedia::CameraRecorder::audioBitRate
This property holds the audio bit rate (in bits per second) to be used for recording video. This property holds the audio bit rate (in bits per second) to be used for recording video.
*/ */
@@ -210,7 +210,7 @@ int QDeclarativeCameraRecorder::audioBitRate() const
} }
/*! /*!
\qmlproperty int QtMultimedia5::CameraRecorder::audioChannels \qmlproperty int QtMultimedia::CameraRecorder::audioChannels
This property indicates the number of audio channels to be encoded while This property indicates the number of audio channels to be encoded while
recording video (1 is mono, 2 is stereo). recording video (1 is mono, 2 is stereo).
@@ -221,7 +221,7 @@ int QDeclarativeCameraRecorder::audioChannels() const
} }
/*! /*!
\qmlproperty int QtMultimedia5::CameraRecorder::audioSampleRate \qmlproperty int QtMultimedia::CameraRecorder::audioSampleRate
This property holds the sample rate to be used to encode audio while recording video. This property holds the sample rate to be used to encode audio while recording video.
*/ */
@@ -231,7 +231,7 @@ int QDeclarativeCameraRecorder::audioSampleRate() const
} }
/*! /*!
\qmlproperty enumeration QtMultimedia5::CameraRecorder::videoEncodingMode \qmlproperty enumeration QtMultimedia::CameraRecorder::videoEncodingMode
This property holds the type of encoding method to be used for recording video. This property holds the type of encoding method to be used for recording video.
@@ -257,7 +257,7 @@ QDeclarativeCameraRecorder::EncodingMode QDeclarativeCameraRecorder::videoEncodi
} }
/*! /*!
\qmlproperty enumeration QtMultimedia5::CameraRecorder::audioEncodingMode \qmlproperty enumeration QtMultimedia::CameraRecorder::audioEncodingMode
The type of encoding method to use when recording audio. The type of encoding method to use when recording audio.
@@ -343,7 +343,7 @@ void QDeclarativeCameraRecorder::setVideoEncodingMode(QDeclarativeCameraRecorder
} }
/*! /*!
\qmlproperty enumeration QtMultimedia5::CameraRecorder::errorCode \qmlproperty enumeration QtMultimedia::CameraRecorder::errorCode
This property holds the last error code. This property holds the last error code.
@@ -369,7 +369,7 @@ QDeclarativeCameraRecorder::Error QDeclarativeCameraRecorder::errorCode() const
} }
/*! /*!
\qmlproperty string QtMultimedia5::CameraRecorder::errorString \qmlproperty string QtMultimedia::CameraRecorder::errorString
This property holds the description of the last error. This property holds the description of the last error.
*/ */
@@ -379,7 +379,7 @@ QString QDeclarativeCameraRecorder::errorString() const
} }
/*! /*!
\qmlproperty enumeration QtMultimedia5::CameraRecorder::recorderState \qmlproperty enumeration QtMultimedia::CameraRecorder::recorderState
This property holds the current state of the camera recorder object. This property holds the current state of the camera recorder object.
@@ -407,7 +407,7 @@ QDeclarativeCameraRecorder::RecorderState QDeclarativeCameraRecorder::recorderSt
/*! /*!
\qmlproperty enumeration QtMultimedia5::CameraRecorder::recorderStatus \qmlproperty enumeration QtMultimedia::CameraRecorder::recorderStatus
This property holds the current status of media recording. This property holds the current status of media recording.
@@ -438,7 +438,7 @@ QDeclarativeCameraRecorder::RecorderStatus QDeclarativeCameraRecorder::recorderS
} }
/*! /*!
\qmlmethod QtMultimedia5::CameraRecorder::record() \qmlmethod QtMultimedia::CameraRecorder::record()
Starts recording. Starts recording.
*/ */
@@ -448,7 +448,7 @@ void QDeclarativeCameraRecorder::record()
} }
/*! /*!
\qmlmethod QtMultimedia5::CameraRecorder::stop() \qmlmethod QtMultimedia::CameraRecorder::stop()
Stops recording. Stops recording.
*/ */
@@ -478,7 +478,7 @@ void QDeclarativeCameraRecorder::setRecorderState(QDeclarativeCameraRecorder::Re
the recorder uses the system-specific place and file naming scheme. the recorder uses the system-specific place and file naming scheme.
*/ */
/*! /*!
\qmlproperty string QtMultimedia5::CameraRecorder::outputLocation \qmlproperty string QtMultimedia::CameraRecorder::outputLocation
This property holds the destination location of the media content. If the location is empty, This property holds the destination location of the media content. If the location is empty,
the recorder uses the system-specific place and file naming scheme. the recorder uses the system-specific place and file naming scheme.
@@ -496,7 +496,7 @@ QString QDeclarativeCameraRecorder::outputLocation() const
new location is set or new recording starts. new location is set or new recording starts.
*/ */
/*! /*!
\qmlproperty string QtMultimedia5::CameraRecorder::actualLocation \qmlproperty string QtMultimedia::CameraRecorder::actualLocation
This property holds the actual location of the last saved media content. The actual location is This property holds the actual location of the last saved media content. The actual location is
usually available after the recording starts, and reset when new location is set or the new recording starts. usually available after the recording starts, and reset when new location is set or the new recording starts.
@@ -520,7 +520,7 @@ void QDeclarativeCameraRecorder::setOutputLocation(const QString &location)
This property holds the duration (in miliseconds) of the last recording. This property holds the duration (in miliseconds) of the last recording.
*/ */
/*! /*!
\qmlproperty int QtMultimedia5::CameraRecorder::duration \qmlproperty int QtMultimedia::CameraRecorder::duration
This property holds the duration (in miliseconds) of the last recording. This property holds the duration (in miliseconds) of the last recording.
*/ */
@@ -535,7 +535,7 @@ qint64 QDeclarativeCameraRecorder::duration() const
recording. recording.
*/ */
/*! /*!
\qmlproperty bool QtMultimedia5::CameraRecorder::muted \qmlproperty bool QtMultimedia::CameraRecorder::muted
This property indicates whether the audio input is muted during recording. This property indicates whether the audio input is muted during recording.
*/ */
@@ -550,7 +550,7 @@ void QDeclarativeCameraRecorder::setMuted(bool muted)
} }
/*! /*!
\qmlmethod QtMultimedia5::CameraRecorder::setMetadata(key, value) \qmlmethod QtMultimedia::CameraRecorder::setMetadata(key, value)
Sets metadata for the next video to be recorder, with Sets metadata for the next video to be recorder, with
the given \a key being associated with \a value. the given \a key being associated with \a value.

View File

@@ -47,7 +47,7 @@ QT_BEGIN_NAMESPACE
/*! /*!
\qmltype Radio \qmltype Radio
\instantiates QDeclarativeRadio \instantiates QDeclarativeRadio
\inqmlmodule QtMultimedia 5.0 \inqmlmodule QtMultimedia
\brief Access radio functionality from a QML application. \brief Access radio functionality from a QML application.
\ingroup multimedia_qml \ingroup multimedia_qml
\ingroup multimedia_radio_qml \ingroup multimedia_radio_qml
@@ -126,7 +126,7 @@ QDeclarativeRadio::~QDeclarativeRadio()
} }
/*! /*!
\qmlproperty enumeration QtMultimedia5::Radio::state \qmlproperty enumeration QtMultimedia::Radio::state
This property holds the current state of the Radio. This property holds the current state of the Radio.
@@ -148,7 +148,7 @@ QDeclarativeRadio::State QDeclarativeRadio::state() const
} }
/*! /*!
\qmlproperty enumeration QtMultimedia5::Radio::band \qmlproperty enumeration QtMultimedia::Radio::band
This property holds the frequency band used for the radio, which can be specified as This property holds the frequency band used for the radio, which can be specified as
any one of the values in the table below. any one of the values in the table below.
@@ -178,7 +178,7 @@ QDeclarativeRadio::Band QDeclarativeRadio::band() const
} }
/*! /*!
\qmlproperty int QtMultimedia5::Radio::frequency \qmlproperty int QtMultimedia::Radio::frequency
Sets the frequency in Hertz that the radio is tuned to. The frequency must be within the frequency Sets the frequency in Hertz that the radio is tuned to. The frequency must be within the frequency
range for the current band, otherwise it will be changed to be within the frequency range. range for the current band, otherwise it will be changed to be within the frequency range.
@@ -191,7 +191,7 @@ int QDeclarativeRadio::frequency() const
} }
/*! /*!
\qmlproperty enumeration QtMultimedia5::Radio::stereoMode \qmlproperty enumeration QtMultimedia::Radio::stereoMode
This property holds the stereo mode of the radio, which can be set to any one of the This property holds the stereo mode of the radio, which can be set to any one of the
values in the table below. values in the table below.
@@ -215,7 +215,7 @@ QDeclarativeRadio::StereoMode QDeclarativeRadio::stereoMode() const
} }
/*! /*!
\qmlproperty int QtMultimedia5::Radio::volume \qmlproperty int QtMultimedia::Radio::volume
Set this property to control the volume of the radio. The valid range of the volume is from 0 to 100. Set this property to control the volume of the radio. The valid range of the volume is from 0 to 100.
*/ */
@@ -225,7 +225,7 @@ int QDeclarativeRadio::volume() const
} }
/*! /*!
\qmlproperty bool QtMultimedia5::Radio::muted \qmlproperty bool QtMultimedia::Radio::muted
This property reflects whether the radio is muted or not. This property reflects whether the radio is muted or not.
*/ */
@@ -235,7 +235,7 @@ bool QDeclarativeRadio::muted() const
} }
/*! /*!
\qmlproperty bool QtMultimedia5::Radio::stereo \qmlproperty bool QtMultimedia::Radio::stereo
This property holds whether the radio is receiving a stereo signal or not. If \l stereoMode is This property holds whether the radio is receiving a stereo signal or not. If \l stereoMode is
set to ForceMono the value will always be false. Likewise, it will always be true if stereoMode set to ForceMono the value will always be false. Likewise, it will always be true if stereoMode
@@ -249,7 +249,7 @@ bool QDeclarativeRadio::stereo() const
} }
/*! /*!
\qmlproperty int QtMultimedia5::Radio::signalStrength \qmlproperty int QtMultimedia::Radio::signalStrength
The strength of the current radio signal as a percentage where 0% equals no signal, and 100% is a The strength of the current radio signal as a percentage where 0% equals no signal, and 100% is a
very good signal. very good signal.
@@ -260,7 +260,7 @@ int QDeclarativeRadio::signalStrength() const
} }
/*! /*!
\qmlproperty bool QtMultimedia5::Radio::searching \qmlproperty bool QtMultimedia::Radio::searching
This property is true if the radio is currently searching for radio stations, for instance using the \l scanUp, This property is true if the radio is currently searching for radio stations, for instance using the \l scanUp,
\l scanDown, and \l searchAllStations methods. Once the search completes, or if it is cancelled using \l scanDown, and \l searchAllStations methods. Once the search completes, or if it is cancelled using
@@ -272,7 +272,7 @@ bool QDeclarativeRadio::searching() const
} }
/*! /*!
\qmlproperty int QtMultimedia5::Radio::frequencyStep \qmlproperty int QtMultimedia::Radio::frequencyStep
The number of Hertz for each step when tuning the radio manually. The value is for the current \l band. The number of Hertz for each step when tuning the radio manually. The value is for the current \l band.
*/ */
@@ -282,7 +282,7 @@ int QDeclarativeRadio::frequencyStep() const
} }
/*! /*!
\qmlproperty int QtMultimedia5::Radio::minimumFrequency \qmlproperty int QtMultimedia::Radio::minimumFrequency
The minimum frequency for the current \l band. The minimum frequency for the current \l band.
*/ */
@@ -292,7 +292,7 @@ int QDeclarativeRadio::minimumFrequency() const
} }
/*! /*!
\qmlproperty int QtMultimedia5::Radio::maximumFrequency \qmlproperty int QtMultimedia::Radio::maximumFrequency
The maximum frequency for the current \l band. The maximum frequency for the current \l band.
*/ */
@@ -302,7 +302,7 @@ int QDeclarativeRadio::maximumFrequency() const
} }
/*! /*!
\qmlproperty int QtMultimedia5::Radio::antennaConnected \qmlproperty int QtMultimedia::Radio::antennaConnected
This property is true if there is an antenna connected. Otherwise it will be false. This property is true if there is an antenna connected. Otherwise it will be false.
*/ */
@@ -312,7 +312,7 @@ bool QDeclarativeRadio::isAntennaConnected() const
} }
/*! /*!
\qmlproperty enumeration QtMultimedia5::Radio::availability \qmlproperty enumeration QtMultimedia::Radio::availability
Returns the availability state of the radio. Returns the availability state of the radio.
@@ -367,7 +367,7 @@ void QDeclarativeRadio::setMuted(bool muted)
} }
/*! /*!
\qmlmethod QtMultimedia5::Radio::cancelScan() \qmlmethod QtMultimedia::Radio::cancelScan()
Cancel the current scan. Will also cancel a search started with \l searchAllStations. Cancel the current scan. Will also cancel a search started with \l searchAllStations.
*/ */
@@ -377,7 +377,7 @@ void QDeclarativeRadio::cancelScan()
} }
/*! /*!
\qmlmethod QtMultimedia5::Radio::scanDown() \qmlmethod QtMultimedia::Radio::scanDown()
Searches backward in the frequency range for the current band. Searches backward in the frequency range for the current band.
*/ */
@@ -387,7 +387,7 @@ void QDeclarativeRadio::scanDown()
} }
/*! /*!
\qmlmethod QtMultimedia5::Radio::scanUp() \qmlmethod QtMultimedia::Radio::scanUp()
Searches forward in the frequency range for the current band. Searches forward in the frequency range for the current band.
*/ */
@@ -397,7 +397,7 @@ void QDeclarativeRadio::scanUp()
} }
/*! /*!
\qmlmethod QtMultimedia5::Radio::searchAllStations(enumeration searchMode) \qmlmethod QtMultimedia::Radio::searchAllStations(enumeration searchMode)
Start searching the complete frequency range for the current band, and save all the Start searching the complete frequency range for the current band, and save all the
radio stations found. The search mode can be either of the values described in the radio stations found. The search mode can be either of the values described in the
@@ -461,7 +461,7 @@ void QDeclarativeRadio::searchAllStations(QDeclarativeRadio::SearchMode searchMo
} }
/*! /*!
\qmlmethod QtMultimedia5::Radio::tuneDown() \qmlmethod QtMultimedia::Radio::tuneDown()
Decrements the frequency by the frequency step for the current band. If the frequency is already set Decrements the frequency by the frequency step for the current band. If the frequency is already set
to the minimum frequency, calling this function has no effect. to the minimum frequency, calling this function has no effect.
@@ -476,7 +476,7 @@ void QDeclarativeRadio::tuneDown()
} }
/*! /*!
\qmlmethod QtMultimedia5::Radio::tuneUp() \qmlmethod QtMultimedia::Radio::tuneUp()
Increments the frequency by the frequency step for the current band. If the frequency is already set Increments the frequency by the frequency step for the current band. If the frequency is already set
to the maximum frequency, calling this function has no effect. to the maximum frequency, calling this function has no effect.
@@ -491,7 +491,7 @@ void QDeclarativeRadio::tuneUp()
} }
/*! /*!
\qmlmethod QtMultimedia5::Radio::start() \qmlmethod QtMultimedia::Radio::start()
Starts the radio. If the radio is available, as determined by the \l availability property, Starts the radio. If the radio is available, as determined by the \l availability property,
this will result in the \l state becoming \c ActiveState. this will result in the \l state becoming \c ActiveState.
@@ -502,7 +502,7 @@ void QDeclarativeRadio::start()
} }
/*! /*!
\qmlmethod QtMultimedia5::Radio::stop() \qmlmethod QtMultimedia::Radio::stop()
Stops the radio. After calling this method the \l state will be \c StoppedState. Stops the radio. After calling this method the \l state will be \c StoppedState.
*/ */
@@ -533,7 +533,7 @@ void QDeclarativeRadio::_q_availabilityChanged(QMultimedia::AvailabilityStatus a
} }
/*! /*!
\qmlsignal QtMultimedia5::Radio::stationFound(int frequency, string stationId) \qmlsignal QtMultimedia::Radio::stationFound(int frequency, string stationId)
This signal is emitted when a new radio station is found. This signal is only emitted This signal is emitted when a new radio station is found. This signal is only emitted
if \l searchAllStations is called with \c SearchGetStationId. if \l searchAllStations is called with \c SearchGetStationId.

View File

@@ -46,7 +46,7 @@ QT_BEGIN_NAMESPACE
/*! /*!
\qmltype RadioData \qmltype RadioData
\instantiates QDeclarativeRadioData \instantiates QDeclarativeRadioData
\inqmlmodule QtMultimedia 5.0 \inqmlmodule QtMultimedia
\brief Access RDS data from a QML application. \brief Access RDS data from a QML application.
\ingroup multimedia_qml \ingroup multimedia_qml
\ingroup multimedia_radio_qml \ingroup multimedia_radio_qml
@@ -120,7 +120,7 @@ QDeclarativeRadioData::~QDeclarativeRadioData()
} }
/*! /*!
\qmlproperty enumeration QtMultimedia5::RadioData::availability \qmlproperty enumeration QtMultimedia::RadioData::availability
Returns the availability state of the radio data interface. Returns the availability state of the radio data interface.
@@ -147,7 +147,7 @@ QDeclarativeRadioData::Availability QDeclarativeRadioData::availability() const
/*! /*!
\qmlproperty string QtMultimedia5::RadioData::stationId \qmlproperty string QtMultimedia::RadioData::stationId
This property allows you to read the station Id of the currently tuned radio This property allows you to read the station Id of the currently tuned radio
station. station.
@@ -158,7 +158,7 @@ QString QDeclarativeRadioData::stationId() const
} }
/*! /*!
\qmlproperty enumeration QtMultimedia5::RadioData::programType \qmlproperty enumeration QtMultimedia::RadioData::programType
This property holds the type of the currently playing program as transmitted This property holds the type of the currently playing program as transmitted
by the radio station. The value can be any one of the values defined in the by the radio station. The value can be any one of the values defined in the
@@ -223,7 +223,7 @@ QDeclarativeRadioData::ProgramType QDeclarativeRadioData::programType() const
} }
/*! /*!
\qmlproperty string QtMultimedia5::RadioData::programTypeName \qmlproperty string QtMultimedia::RadioData::programTypeName
This property holds a string representation of the \l programType. This property holds a string representation of the \l programType.
*/ */
@@ -233,7 +233,7 @@ QString QDeclarativeRadioData::programTypeName() const
} }
/*! /*!
\qmlproperty string QtMultimedia5::RadioData::stationName \qmlproperty string QtMultimedia::RadioData::stationName
This property has the name of the currently tuned radio station. This property has the name of the currently tuned radio station.
*/ */
@@ -243,7 +243,7 @@ QString QDeclarativeRadioData::stationName() const
} }
/*! /*!
\qmlproperty string QtMultimedia5::RadioData::radioText \qmlproperty string QtMultimedia::RadioData::radioText
This property holds free-text transmitted by the radio station. This is typically used to This property holds free-text transmitted by the radio station. This is typically used to
show supporting information for the currently playing content, for instance song title or show supporting information for the currently playing content, for instance song title or
@@ -255,7 +255,7 @@ QString QDeclarativeRadioData::radioText() const
} }
/*! /*!
\qmlproperty bool QtMultimedia5::RadioData::alternativeFrequenciesEnabled \qmlproperty bool QtMultimedia::RadioData::alternativeFrequenciesEnabled
This property allows you to specify whether the radio should try and tune to alternative This property allows you to specify whether the radio should try and tune to alternative
frequencies if the signal strength of the current station becomes too weak. The alternative frequencies if the signal strength of the current station becomes too weak. The alternative

View File

@@ -49,7 +49,7 @@ QT_BEGIN_NAMESPACE
/*! /*!
\qmltype Torch \qmltype Torch
\instantiates QDeclarativeTorch \instantiates QDeclarativeTorch
\inqmlmodule QtMultimedia 5.0 \inqmlmodule QtMultimedia
\brief Simple control over torch functionality \brief Simple control over torch functionality
\ingroup multimedia_qml \ingroup multimedia_qml
@@ -92,7 +92,7 @@ QDeclarativeTorch::~QDeclarativeTorch()
} }
/*! /*!
\qmlproperty bool QtMultimedia5::Torch::enabled \qmlproperty bool QtMultimedia::Torch::enabled
This property indicates whether the torch is enabled. If the torch functionality is shared This property indicates whether the torch is enabled. If the torch functionality is shared
with the camera flash hardware, the camera will take priority with the camera flash hardware, the camera will take priority
@@ -140,7 +140,7 @@ void QDeclarativeTorch::setEnabled(bool on)
} }
/*! /*!
\qmlproperty int QtMultimedia5::Torch::power \qmlproperty int QtMultimedia::Torch::power
This property holds the current torch power setting, as a percentage of full power. This property holds the current torch power setting, as a percentage of full power.

View File

@@ -58,7 +58,7 @@ QT_BEGIN_NAMESPACE
\ingroup multimedia_qml \ingroup multimedia_qml
\ingroup multimedia_video_qml \ingroup multimedia_video_qml
\inqmlmodule QtMultimedia 5.0 \inqmlmodule QtMultimedia
\c VideoOutput is part of the \b{QtMultimedia 5.0} module. \c VideoOutput is part of the \b{QtMultimedia 5.0} module.
@@ -143,7 +143,7 @@ QDeclarativeVideoOutput::~QDeclarativeVideoOutput()
} }
/*! /*!
\qmlproperty variant QtMultimedia5::VideoOutput::source \qmlproperty variant QtMultimedia::VideoOutput::source
This property holds the source item providing the video frames like MediaPlayer or Camera. This property holds the source item providing the video frames like MediaPlayer or Camera.
@@ -265,7 +265,7 @@ void QDeclarativeVideoOutput::_q_updateMediaObject()
} }
/*! /*!
\qmlproperty enumeration QtMultimedia5::VideoOutput::fillMode \qmlproperty enumeration QtMultimedia::VideoOutput::fillMode
Set this property to define how the video is scaled to fit the target area. Set this property to define how the video is scaled to fit the target area.
@@ -359,7 +359,7 @@ void QDeclarativeVideoOutput::_q_screenOrientationChanged(int orientation)
} }
/*! /*!
\qmlproperty int QtMultimedia5::VideoOutput::orientation \qmlproperty int QtMultimedia::VideoOutput::orientation
In some cases the source video stream requires a certain In some cases the source video stream requires a certain
orientation to be correct. This includes orientation to be correct. This includes
@@ -420,7 +420,7 @@ void QDeclarativeVideoOutput::setOrientation(int orientation)
} }
/*! /*!
\qmlproperty int QtMultimedia5::VideoOutput::autoOrientation \qmlproperty int QtMultimedia::VideoOutput::autoOrientation
This property allows you to enable and disable auto orientation This property allows you to enable and disable auto orientation
of the video stream, so that its orientation always matches of the video stream, so that its orientation always matches
@@ -459,7 +459,7 @@ void QDeclarativeVideoOutput::setAutoOrientation(bool autoOrientation)
} }
/*! /*!
\qmlproperty rectangle QtMultimedia5::VideoOutput::contentRect \qmlproperty rectangle QtMultimedia::VideoOutput::contentRect
This property holds the item coordinates of the area that This property holds the item coordinates of the area that
would contain video to render. With certain fill modes, would contain video to render. With certain fill modes,
@@ -480,7 +480,7 @@ QRectF QDeclarativeVideoOutput::contentRect() const
} }
/*! /*!
\qmlproperty rectangle QtMultimedia5::VideoOutput::sourceRect \qmlproperty rectangle QtMultimedia::VideoOutput::sourceRect
This property holds the area of the source video This property holds the area of the source video
content that is considered for rendering. The content that is considered for rendering. The
@@ -520,7 +520,7 @@ QRectF QDeclarativeVideoOutput::sourceRect() const
} }
/*! /*!
\qmlmethod QPointF QtMultimedia5::VideoOutput::mapNormalizedPointToItem (const QPointF &point) const \qmlmethod QPointF QtMultimedia::VideoOutput::mapNormalizedPointToItem (const QPointF &point) const
Given normalized coordinates \a point (that is, each Given normalized coordinates \a point (that is, each
component in the range of 0 to 1.0), return the mapped point component in the range of 0 to 1.0), return the mapped point
@@ -557,7 +557,7 @@ QPointF QDeclarativeVideoOutput::mapNormalizedPointToItem(const QPointF &point)
} }
/*! /*!
\qmlmethod QRectF QtMultimedia5::VideoOutput::mapNormalizedRectToItem(const QRectF &rectangle) const \qmlmethod QRectF QtMultimedia::VideoOutput::mapNormalizedRectToItem(const QRectF &rectangle) const
Given a rectangle \a rectangle in normalized Given a rectangle \a rectangle in normalized
coordinates (that is, each component in the range of 0 to 1.0), coordinates (that is, each component in the range of 0 to 1.0),
@@ -574,7 +574,7 @@ QRectF QDeclarativeVideoOutput::mapNormalizedRectToItem(const QRectF &rectangle)
} }
/*! /*!
\qmlmethod QPointF QtMultimedia5::VideoOutput::mapPointToItem(const QPointF &point) const \qmlmethod QPointF QtMultimedia::VideoOutput::mapPointToItem(const QPointF &point) const
Given a point \a point in item coordinates, return the Given a point \a point in item coordinates, return the
corresponding point in source coordinates. This mapping is corresponding point in source coordinates. This mapping is
@@ -594,7 +594,7 @@ QPointF QDeclarativeVideoOutput::mapPointToSource(const QPointF &point) const
} }
/*! /*!
\qmlmethod QRectF QtMultimedia5::VideoOutput::mapRectToSource(const QRectF &rectangle) const \qmlmethod QRectF QtMultimedia::VideoOutput::mapRectToSource(const QRectF &rectangle) const
Given a rectangle \a rectangle in item coordinates, return the Given a rectangle \a rectangle in item coordinates, return the
corresponding rectangle in source coordinates. This mapping is corresponding rectangle in source coordinates. This mapping is
@@ -612,7 +612,7 @@ QRectF QDeclarativeVideoOutput::mapRectToSource(const QRectF &rectangle) const
} }
/*! /*!
\qmlmethod QPointF QtMultimedia5::VideoOutput::mapPointToItemNormalized(const QPointF &point) const \qmlmethod QPointF QtMultimedia::VideoOutput::mapPointToItemNormalized(const QPointF &point) const
Given a point \a point in item coordinates, return the Given a point \a point in item coordinates, return the
corresponding point in normalized source coordinates. This mapping is corresponding point in normalized source coordinates. This mapping is
@@ -647,7 +647,7 @@ QPointF QDeclarativeVideoOutput::mapPointToSourceNormalized(const QPointF &point
} }
/*! /*!
\qmlmethod QRectF QtMultimedia5::VideoOutput::mapRectToSourceNormalized(const QRectF &rectangle) const \qmlmethod QRectF QtMultimedia::VideoOutput::mapRectToSourceNormalized(const QRectF &rectangle) const
Given a rectangle \a rectangle in item coordinates, return the Given a rectangle \a rectangle in item coordinates, return the
corresponding rectangle in normalized source coordinates. This mapping is corresponding rectangle in normalized source coordinates. This mapping is
@@ -670,7 +670,7 @@ QDeclarativeVideoOutput::SourceType QDeclarativeVideoOutput::sourceType() const
} }
/*! /*!
\qmlmethod QPointF QtMultimedia5::VideoOutput::mapPointToItem(const QPointF &point) const \qmlmethod QPointF QtMultimedia::VideoOutput::mapPointToItem(const QPointF &point) const
Given a point \a point in source coordinates, return the Given a point \a point in source coordinates, return the
corresponding point in item coordinates. This mapping is corresponding point in item coordinates. This mapping is
@@ -693,7 +693,7 @@ QPointF QDeclarativeVideoOutput::mapPointToItem(const QPointF &point) const
} }
/*! /*!
\qmlmethod QRectF QtMultimedia5::VideoOutput::mapRectToItem(const QRectF &rectangle) const \qmlmethod QRectF QtMultimedia::VideoOutput::mapRectToItem(const QRectF &rectangle) const
Given a rectangle \a rectangle in source coordinates, return the Given a rectangle \a rectangle in source coordinates, return the
corresponding rectangle in item coordinates. This mapping is corresponding rectangle in item coordinates. This mapping is

View File

@@ -193,6 +193,13 @@ QSGNode *QDeclarativeVideoRendererBackend::updatePaintNode(QSGNode *oldNode,
if (!m_glContext) { if (!m_glContext) {
m_glContext = QOpenGLContext::currentContext(); m_glContext = QOpenGLContext::currentContext();
m_surface->scheduleOpenGLContextUpdate(); m_surface->scheduleOpenGLContextUpdate();
// Internal mechanism to call back the surface renderer from the QtQuick render thread
QObject *obj = m_surface->property("_q_GLThreadCallback").value<QObject*>();
if (obj) {
QEvent ev(QEvent::User);
obj->event(&ev);
}
} }
if (m_frameChanged) { if (m_frameChanged) {

View File

@@ -89,7 +89,7 @@ QT_BEGIN_NAMESPACE
\inmodule QtMultimedia \inmodule QtMultimedia
\ingroup multimedia_qml \ingroup multimedia_qml
\ingroup multimedia_audio_qml \ingroup multimedia_audio_qml
\inqmlmodule QtMultimedia 5.0 \inqmlmodule QtMultimedia
SoundEffect is part of the \b{QtMultimedia 5.0} module. SoundEffect is part of the \b{QtMultimedia 5.0} module.
@@ -149,7 +149,7 @@ QStringList QSoundEffect::supportedMimeTypes()
} }
/*! /*!
\qmlproperty url QtMultimedia5::SoundEffect::source \qmlproperty url QtMultimedia::SoundEffect::source
This property holds the url for the sound to play. For the SoundEffect This property holds the url for the sound to play. For the SoundEffect
to attempt to load the source, the URL must exist and the application must have read permission to attempt to load the source, the URL must exist and the application must have read permission
@@ -182,7 +182,7 @@ void QSoundEffect::setSource(const QUrl &url)
} }
/*! /*!
\qmlproperty int QtMultimedia5::SoundEffect::loops \qmlproperty int QtMultimedia::SoundEffect::loops
This property provides a way to control the number of times to repeat the sound on each play(). This property provides a way to control the number of times to repeat the sound on each play().
@@ -232,7 +232,7 @@ void QSoundEffect::setLoopCount(int loopCount)
} }
/*! /*!
\qmlproperty int QtMultimedia5::SoundEffect::loopsRemaining \qmlproperty int QtMultimedia::SoundEffect::loopsRemaining
This property contains the number of loops remaining before the sound effect This property contains the number of loops remaining before the sound effect
stops by itself, or SoundEffect.Infinite if that's what has been set in \l loops. stops by itself, or SoundEffect.Infinite if that's what has been set in \l loops.
@@ -250,7 +250,7 @@ int QSoundEffect::loopsRemaining() const
/*! /*!
\qmlproperty qreal QtMultimedia5::SoundEffect::volume \qmlproperty qreal QtMultimedia::SoundEffect::volume
This property holds the volume of the sound effect playback, from 0.0 (silent) to 1.0 (maximum volume). This property holds the volume of the sound effect playback, from 0.0 (silent) to 1.0 (maximum volume).
*/ */
@@ -285,7 +285,7 @@ void QSoundEffect::setVolume(qreal volume)
} }
/*! /*!
\qmlproperty bool QtMultimedia5::SoundEffect::muted \qmlproperty bool QtMultimedia::SoundEffect::muted
This property provides a way to control muting. A value of \c true will mute this effect. This property provides a way to control muting. A value of \c true will mute this effect.
Otherwise, playback will occur with the currently specified \l volume. Otherwise, playback will occur with the currently specified \l volume.
@@ -323,7 +323,7 @@ void QSoundEffect::setMuted(bool muted)
Returns whether the sound effect has finished loading the \l source(). Returns whether the sound effect has finished loading the \l source().
*/ */
/*! /*!
\qmlmethod bool QtMultimedia5::SoundEffect::isLoaded() \qmlmethod bool QtMultimedia::SoundEffect::isLoaded()
Returns whether the sound effect has finished loading the \l source. Returns whether the sound effect has finished loading the \l source.
*/ */
@@ -333,7 +333,7 @@ bool QSoundEffect::isLoaded() const
} }
/*! /*!
\qmlmethod QtMultimedia5::SoundEffect::play() \qmlmethod QtMultimedia::SoundEffect::play()
Start playback of the sound effect, looping the effect for the number of Start playback of the sound effect, looping the effect for the number of
times as specified in the loops property. times as specified in the loops property.
@@ -354,7 +354,7 @@ void QSoundEffect::play()
} }
/*! /*!
\qmlproperty bool QtMultimedia5::SoundEffect::playing \qmlproperty bool QtMultimedia::SoundEffect::playing
This property indicates whether the sound effect is playing or not. This property indicates whether the sound effect is playing or not.
*/ */
@@ -381,7 +381,7 @@ bool QSoundEffect::isPlaying() const
*/ */
/*! /*!
\qmlproperty enumeration QtMultimedia5::SoundEffect::status \qmlproperty enumeration QtMultimedia::SoundEffect::status
This property indicates the current status of the SoundEffect This property indicates the current status of the SoundEffect
as enumerated within SoundEffect. as enumerated within SoundEffect.
@@ -411,7 +411,7 @@ QSoundEffect::Status QSoundEffect::status() const
} }
/*! /*!
\qmlproperty string QtMultimedia5::SoundEffect::category \qmlproperty string QtMultimedia::SoundEffect::category
This property contains the \e category of this sound effect. This property contains the \e category of this sound effect.
@@ -474,7 +474,7 @@ void QSoundEffect::setCategory(const QString &category)
/*! /*!
\qmlmethod QtMultimedia5::SoundEffect::stop() \qmlmethod QtMultimedia::SoundEffect::stop()
Stop current playback. Stop current playback.
@@ -498,7 +498,7 @@ void QSoundEffect::stop()
The \c sourceChanged signal is emitted when the source has been changed. The \c sourceChanged signal is emitted when the source has been changed.
*/ */
/*! /*!
\qmlsignal QtMultimedia5::SoundEffect::sourceChanged() \qmlsignal QtMultimedia::SoundEffect::sourceChanged()
The \c sourceChanged signal is emitted when the source has been changed. The \c sourceChanged signal is emitted when the source has been changed.
@@ -510,7 +510,7 @@ void QSoundEffect::stop()
The \c loadedChanged signal is emitted when the loading state has changed. The \c loadedChanged signal is emitted when the loading state has changed.
*/ */
/*! /*!
\qmlsignal QtMultimedia5::SoundEffect::loadedChanged() \qmlsignal QtMultimedia::SoundEffect::loadedChanged()
The \c loadedChanged signal is emitted when the loading state has changed. The \c loadedChanged signal is emitted when the loading state has changed.
@@ -523,7 +523,7 @@ void QSoundEffect::stop()
The \c loopCountChanged signal is emitted when the initial number of loops has changed. The \c loopCountChanged signal is emitted when the initial number of loops has changed.
*/ */
/*! /*!
\qmlsignal QtMultimedia5::SoundEffect::loopCountChanged() \qmlsignal QtMultimedia::SoundEffect::loopCountChanged()
The \c loopCountChanged signal is emitted when the initial number of loops has changed. The \c loopCountChanged signal is emitted when the initial number of loops has changed.
@@ -536,7 +536,7 @@ void QSoundEffect::stop()
The \c loopsRemainingChanged signal is emitted when the remaining number of loops has changed. The \c loopsRemainingChanged signal is emitted when the remaining number of loops has changed.
*/ */
/*! /*!
\qmlsignal QtMultimedia5::SoundEffect::loopsRemainingChanged() \qmlsignal QtMultimedia::SoundEffect::loopsRemainingChanged()
The \c loopsRemainingChanged signal is emitted when the remaining number of loops has changed. The \c loopsRemainingChanged signal is emitted when the remaining number of loops has changed.
@@ -549,7 +549,7 @@ void QSoundEffect::stop()
The \c volumeChanged signal is emitted when the volume has changed. The \c volumeChanged signal is emitted when the volume has changed.
*/ */
/*! /*!
\qmlsignal QtMultimedia5::SoundEffect::volumeChanged() \qmlsignal QtMultimedia::SoundEffect::volumeChanged()
The \c volumeChanged signal is emitted when the volume has changed. The \c volumeChanged signal is emitted when the volume has changed.
@@ -562,7 +562,7 @@ void QSoundEffect::stop()
The \c mutedChanged signal is emitted when the mute state has changed. The \c mutedChanged signal is emitted when the mute state has changed.
*/ */
/*! /*!
\qmlsignal QtMultimedia5::SoundEffect::mutedChanged() \qmlsignal QtMultimedia::SoundEffect::mutedChanged()
The \c mutedChanged signal is emitted when the mute state has changed. The \c mutedChanged signal is emitted when the mute state has changed.
@@ -575,7 +575,7 @@ void QSoundEffect::stop()
The \c playingChanged signal is emitted when the playing property has changed. The \c playingChanged signal is emitted when the playing property has changed.
*/ */
/*! /*!
\qmlsignal QtMultimedia5::SoundEffect::playingChanged() \qmlsignal QtMultimedia::SoundEffect::playingChanged()
The \c playingChanged signal is emitted when the playing property has changed. The \c playingChanged signal is emitted when the playing property has changed.
@@ -588,7 +588,7 @@ void QSoundEffect::stop()
The \c statusChanged signal is emitted when the status property has changed. The \c statusChanged signal is emitted when the status property has changed.
*/ */
/*! /*!
\qmlsignal QtMultimedia5::SoundEffect::statusChanged() \qmlsignal QtMultimedia::SoundEffect::statusChanged()
The \c statusChanged signal is emitted when the status property has changed. The \c statusChanged signal is emitted when the status property has changed.
@@ -601,7 +601,7 @@ void QSoundEffect::stop()
The \c categoryChanged signal is emitted when the category property has changed. The \c categoryChanged signal is emitted when the category property has changed.
*/ */
/*! /*!
\qmlsignal QtMultimedia5::SoundEffect::categoryChanged() \qmlsignal QtMultimedia::SoundEffect::categoryChanged()
The \c categoryChanged signal is emitted when the category property has changed. The \c categoryChanged signal is emitted when the category property has changed.

View File

@@ -34,7 +34,7 @@
Qt Multimedia offers a range of audio classes, covering both low and Qt Multimedia offers a range of audio classes, covering both low and
high level approaches to audio input, output and processing. In high level approaches to audio input, output and processing. In
addition to traditional audio usage, the \l {Positional Audio}{Qt AudioEngine} addition to traditional audio usage, the \l{Qt Audio Engine QML Types}{Qt Audio Engine}
QML types offer high level 3D positional audio for QML applications. QML types offer high level 3D positional audio for QML applications.
See that documentation for more information. See that documentation for more information.

View File

@@ -43,8 +43,8 @@ on BB10.
The Qt Multimedia BlackBerry backend uses mmrenderer for media playback. The Qt Multimedia BlackBerry backend uses mmrenderer for media playback.
For the positional audio classes in the \l {Positional Audio} {QtAudioEngine} QML For the positional audio classes in the \l{Qt Audio Engine QML Types}{Qt Audio Engine}
module, OpenAL is used as on all other platforms. QML module, OpenAL is used as on all other platforms.
For recording videos and taking photos, the camapi library is used. For recording videos and taking photos, the camapi library is used.
@@ -63,7 +63,7 @@ mmrenderer creates a seperate window displaying a video and puts that on top of
As a consequence, no other widget or QML element can be put on top of the video, and QML shaders have As a consequence, no other widget or QML element can be put on top of the video, and QML shaders have
no effect. no effect.
The \l {Positional Audio} {QtAudioEngine} QML module is fully supported, as it is based on OpenAL which is available The \l{Qt Audio Engine QML Types}{Qt Audio Engine} QML module is fully supported, as it is based on OpenAL which is available
in BB10. in BB10.
The \l {camera} {QCamera} support includes recording of videos and taking photos. The viewfinder The \l {camera} {QCamera} support includes recording of videos and taking photos. The viewfinder

View File

@@ -46,7 +46,7 @@ Here are some examples of what can be done with Qt Multimedia APIs:
\li Record audio and compress it \li Record audio and compress it
\li Tune and listen to radio stations \li Tune and listen to radio stations
\li Use a camera, including viewfinder, image capture, and movie recording \li Use a camera, including viewfinder, image capture, and movie recording
\li Play 3D positional audio with \l {Positional Audio} {QtAudioEngine} \li Play 3D positional audio with \l{Qt Audio Engine QML Types}{Qt Audio Engine}
\li Decode audio media files into memory for processing \li Decode audio media files into memory for processing
\li Accessing video frames or audio buffers as they are played or recorded \li Accessing video frames or audio buffers as they are played or recorded
\endlist \endlist
@@ -180,7 +180,7 @@ The QML types are accessed by using:
import QtMultimedia 5.0 import QtMultimedia 5.0
\endcode \endcode
\annotatedlist multimedia_qml \annotatedlist multimedia_qml
The following types are accessed by using \l {Positional Audio} {QtAudioEngine}: The following types are accessed by using \l{Qt Audio Engine QML Types}{Qt Audio Engine}:
\code \code
import QtAudioEngine 1.0 import QtAudioEngine 1.0
\endcode \endcode

View File

@@ -26,54 +26,34 @@
****************************************************************************/ ****************************************************************************/
/*! /*!
\page audioengineoverview.html \qmlmodule QtAudioEngine 1.0
\title Positional Audio \title Qt Audio Engine QML Types
\brief 3D positional audio playback and content management \ingroup qmlmodules
\brief Provides QML types for 3D positional audio playback and content management.
\section1 QtAudioEngine Features Qt Audio Engine is part of the \l{Qt Multimedia} module. Qt Audio
Engine provides types for 3D positional audio playback and content management.
Qt Multimedia includes the \c QtAudioEngine QML module for The QML types can be imported into your application using the following import
providing 3D positional audio playback and content management. statement in your .qml file:
\code
QtAudioEngine enables developers to organize wave files into discrete \l Sound with different
\l {PlayVariation}{play variations}, group sound controls by \l {AudioCategory} categories and
define \l {AttenuationModelLinear}{attenuation models} and various 3d audio settings all in one
place. Playback of \l {SoundInstance}{sound instances} can be conveniently activated by in-app
events and managed by QtAudioEngine or controlled by explicitly defining \l SoundInstance
for easier QML bindings.
To access these QML types import the
\b{QtAudioEngine 1.0} module.
\qml
import QtQuick 2.0
import QtAudioEngine 1.0 import QtAudioEngine 1.0
\endcode
AudioEngine { \section1 Qt Audio Engine Features
//...
} Qt Audio Engine enables developers to organize wave files into discrete \l Sound
\endqml with different \l {PlayVariation}{play variations}, group sound controls by \l
{AudioCategory} categories and define \l {AttenuationModelLinear}{attenuation
models} and various 3D audio settings all in one place. Playback of \l
{SoundInstance}{sound instances} can be conveniently activated by in-app events
and managed by QtAudioEngine or controlled by explicitly defining \l
SoundInstance for easier QML bindings.
\section1 Examples \section1 Examples
\list \list
\li \l {AudioEngine Example}{Audio Engine} \li \l {AudioEngine Example}{Audio Engine}
\endlist \endlist
\section1 Reference Documentation \section1 QML types
\section2 QML Types
\list
\li \l AudioEngine
\li \l AudioSample
\li \l AudioCategory
\li \l AttenuationModelLinear
\li \l AttenuationModelInverse
\li \l Sound
\li \l PlayVariation
\li \l AudioListener
\li \l SoundInstance
\endlist
*/ */

View File

@@ -33,7 +33,9 @@
Qt Multimedia is an essential module that provides a rich set of QML types Qt Multimedia is an essential module that provides a rich set of QML types
and C++ classes to handle multimedia content. It also provides necessary and C++ classes to handle multimedia content. It also provides necessary
APIs to access the camera and radio functionality. APIs to access the camera and radio functionality. The included
\l{Qt Audio Engine QML Types}{Qt Audio Engine} provides types for
3D positional audio playback and management.
The \l{Qt Multimedia Widgets} module provides widget based multimedia The \l{Qt Multimedia Widgets} module provides widget based multimedia
classes. classes.
@@ -74,20 +76,20 @@
\li Type \li Type
\li Description \li Description
\row \row
\li \l {QtMultimedia5::Audio}{Audio} \li \l {QtMultimedia::Audio}{Audio}
\li Add audio playback functionality to a scene \li Add audio playback functionality to a scene
\row \row
\li \l {QtMultimedia5::Camera}{Camera} \li \l {QtMultimedia::Camera}{Camera}
\li Access camera viewfinder frames \li Access camera viewfinder frames
\row \row
\li MediaPlayer \li MediaPlayer
\li Add media playback functionality to a scene. It is same as Audio type, \li Add media playback functionality to a scene. It is same as Audio type,
but can be used for video playback with the VideoOutput type. but can be used for video playback with the VideoOutput type.
\row \row
\li \l {QtMultimedia5::Radio}{Radio} \li \l {QtMultimedia::Radio}{Radio}
\li Access radio functionality \li Access radio functionality
\row \row
\li \l {QtMultimedia5::Video}{Video} \li \l {QtMultimedia::Video}{Video}
\li Add Video playback functionality to a scene. It uses MediaPlayer and \li Add Video playback functionality to a scene. It uses MediaPlayer and
VideoOutput types to provide video playback functionality. VideoOutput types to provide video playback functionality.
\endtable \endtable
@@ -122,7 +124,6 @@
\list \list
\li \l Multimedia - overview of multimedia support in Qt \li \l Multimedia - overview of multimedia support in Qt
\li \l{Audio Overview} \li \l{Audio Overview}
\li \l{Positional Audio}
\li \l{Video Overview} \li \l{Video Overview}
\li \l{Camera Overview} \li \l{Camera Overview}
\li \l{Radio Overview} \li \l{Radio Overview}
@@ -135,10 +136,20 @@
\endlist \endlist
\section2 Reference \section2 Reference
\list
\li Qt Multimedia
\list \list
\li \l{Qt Multimedia QML Types}{QML Types} \li \l{Qt Multimedia QML Types}{QML Types}
\li \l{Qt Multimedia C++ Classes}{C++ Classes} \li \l{Qt Multimedia C++ Classes}{C++ Classes}
\endlist \endlist
\endlist
\list
\li Qt Audio Engine
\list
\li \l{Qt Audio Engine QML Types}{QML Types}
\endlist
\endlist
\section2 Examples \section2 Examples
\list \list

View File

@@ -26,7 +26,7 @@
****************************************************************************/ ****************************************************************************/
/*! /*!
\qmlmodule QtMultimedia 5 \qmlmodule QtMultimedia 5.0
\title Qt Multimedia QML Types \title Qt Multimedia QML Types
\ingroup qmlmodules \ingroup qmlmodules
\brief Provides QML types for multimedia support. \brief Provides QML types for multimedia support.

View File

@@ -62,7 +62,8 @@ ANDROID_BUNDLED_FILES += \
lib/libQt5MultimediaQuick_p.so lib/libQt5MultimediaQuick_p.so
MODULE_PLUGIN_TYPES = \ MODULE_PLUGIN_TYPES = \
mediaservice \ mediaservice \
audio audio \
video/videonode
win32:LIBS += -luuid win32:LIBS += -luuid

View File

@@ -433,10 +433,8 @@ void QMediaPlaylistNavigator::jump(int position)
{ {
Q_D(QMediaPlaylistNavigator); Q_D(QMediaPlaylistNavigator);
if (position<-1 || position>=d->playlist->mediaCount()) { if (position < -1 || position >= d->playlist->mediaCount())
qWarning() << "QMediaPlaylistNavigator: Jump outside playlist range";
position = -1; position = -1;
}
if (position != -1) if (position != -1)
d->lastValidPos = position; d->lastValidPos = position;

View File

@@ -69,7 +69,7 @@ private:
int m_orientation; int m_orientation;
}; };
class QSGVideoNodeFactoryInterface class Q_MULTIMEDIAQUICK_EXPORT QSGVideoNodeFactoryInterface
{ {
public: public:
virtual QList<QVideoFrame::PixelFormat> supportedPixelFormats(QAbstractVideoBuffer::HandleType handleType) const = 0; virtual QList<QVideoFrame::PixelFormat> supportedPixelFormats(QAbstractVideoBuffer::HandleType handleType) const = 0;
@@ -79,7 +79,7 @@ public:
#define QSGVideoNodeFactoryInterface_iid "org.qt-project.qt.sgvideonodefactory/5.0" #define QSGVideoNodeFactoryInterface_iid "org.qt-project.qt.sgvideonodefactory/5.0"
Q_DECLARE_INTERFACE(QSGVideoNodeFactoryInterface, QSGVideoNodeFactoryInterface_iid) Q_DECLARE_INTERFACE(QSGVideoNodeFactoryInterface, QSGVideoNodeFactoryInterface_iid)
class QSGVideoNodeFactoryPlugin : public QObject, public QSGVideoNodeFactoryInterface class Q_MULTIMEDIAQUICK_EXPORT QSGVideoNodeFactoryPlugin : public QObject, public QSGVideoNodeFactoryInterface
{ {
Q_OBJECT Q_OBJECT
Q_INTERFACES(QSGVideoNodeFactoryInterface) Q_INTERFACES(QSGVideoNodeFactoryInterface)

View File

@@ -3,3 +3,6 @@ TEMPLATE = subdirs
SUBDIRS += src \ SUBDIRS += src \
jar jar
qtHaveModule(quick) {
SUBDIRS += videonode
}

View File

@@ -45,7 +45,10 @@ import android.hardware.Camera;
import android.graphics.SurfaceTexture; import android.graphics.SurfaceTexture;
import android.util.Log; import android.util.Log;
public class QtCamera implements Camera.ShutterCallback, Camera.PictureCallback, Camera.AutoFocusCallback public class QtCamera implements Camera.ShutterCallback,
Camera.PictureCallback,
Camera.AutoFocusCallback,
Camera.PreviewCallback
{ {
private int m_cameraId = -1; private int m_cameraId = -1;
private Camera m_camera = null; private Camera m_camera = null;
@@ -149,6 +152,11 @@ public class QtCamera implements Camera.ShutterCallback, Camera.PictureCallback,
m_camera.cancelAutoFocus(); m_camera.cancelAutoFocus();
} }
public void requestPreviewFrame()
{
m_camera.setOneShotPreviewCallback(this);
}
public void takePicture() public void takePicture()
{ {
try { try {
@@ -170,6 +178,12 @@ public class QtCamera implements Camera.ShutterCallback, Camera.PictureCallback,
notifyPictureCaptured(m_cameraId, data); notifyPictureCaptured(m_cameraId, data);
} }
@Override
public void onPreviewFrame(byte[] data, Camera camera)
{
notifyPreviewFrame(m_cameraId, data);
}
@Override @Override
public void onAutoFocus(boolean success, Camera camera) public void onAutoFocus(boolean success, Camera camera)
{ {
@@ -179,4 +193,5 @@ public class QtCamera implements Camera.ShutterCallback, Camera.PictureCallback,
private static native void notifyAutoFocusComplete(int id, boolean success); private static native void notifyAutoFocusComplete(int id, boolean success);
private static native void notifyPictureExposed(int id); private static native void notifyPictureExposed(int id);
private static native void notifyPictureCaptured(int id, byte[] data); private static native void notifyPictureCaptured(int id, byte[] data);
private static native void notifyPreviewFrame(int id, byte[] data);
} }

View File

@@ -76,4 +76,29 @@ bool qt_sizeLessThan(const QSize &s1, const QSize &s2)
return s1.width() * s1.height() < s2.width() * s2.height(); return s1.width() * s1.height() < s2.width() * s2.height();
} }
void qt_convert_NV21_to_ARGB32(const uchar *yuv, quint32 *rgb, int width, int height)
{
const int frameSize = width * height;
int a = 0;
for (int i = 0, ci = 0; i < height; ++i, ci += 1) {
for (int j = 0, cj = 0; j < width; ++j, cj += 1) {
int y = (0xff & ((int) yuv[ci * width + cj]));
int v = (0xff & ((int) yuv[frameSize + (ci >> 1) * width + (cj & ~1) + 0]));
int u = (0xff & ((int) yuv[frameSize + (ci >> 1) * width + (cj & ~1) + 1]));
y = y < 16 ? 16 : y;
int r = (int) (1.164f * (y - 16) + 1.596f * (v - 128));
int g = (int) (1.164f * (y - 16) - 0.813f * (v - 128) - 0.391f * (u - 128));
int b = (int) (1.164f * (y - 16) + 2.018f * (u - 128));
r = qBound(0, r, 255);
g = qBound(0, g, 255);
b = qBound(0, b, 255);
rgb[a++] = 0xff000000 | (r << 16) | (g << 8) | b;
}
}
}
QT_END_NAMESPACE QT_END_NAMESPACE

View File

@@ -53,6 +53,8 @@ int qt_findClosestValue(const QList<int> &list, int value);
bool qt_sizeLessThan(const QSize &s1, const QSize &s2); bool qt_sizeLessThan(const QSize &s1, const QSize &s2);
void qt_convert_NV21_to_ARGB32(const uchar *yuv, quint32 *rgb, int width, int height);
QT_END_NAMESPACE QT_END_NAMESPACE
#endif // QANDROIDMULTIMEDIAUTILS_H #endif // QANDROIDMULTIMEDIAUTILS_H

View File

@@ -48,26 +48,26 @@
QT_BEGIN_NAMESPACE QT_BEGIN_NAMESPACE
typedef void (*TextureReadyCallback)(void*);
class QAndroidVideoOutput class QAndroidVideoOutput
{ {
public: public:
QAndroidVideoOutput() { }
virtual ~QAndroidVideoOutput() { } virtual ~QAndroidVideoOutput() { }
virtual jobject surfaceHolder() = 0; virtual jobject surfaceHolder() = 0;
virtual jobject surfaceTexture() { return 0; }
virtual bool isTextureReady() = 0; virtual bool isReady() { return true; }
virtual void setTextureReadyCallback(TextureReadyCallback cb, void *context = 0) = 0;
virtual jobject surfaceTexture() = 0;
virtual void setVideoSize(const QSize &size) = 0; virtual void setVideoSize(const QSize &) { }
virtual void stop() = 0; virtual void stop() { }
virtual QImage toImage() = 0; // signals:
// void readyChanged(bool);
}; };
#define QAndroidVideoOutput_iid "org.qt-project.qt.qandroidvideooutput/5.0"
Q_DECLARE_INTERFACE(QAndroidVideoOutput, QAndroidVideoOutput_iid)
QT_END_NAMESPACE QT_END_NAMESPACE
#endif // QANDROIDVIDEOOUTPUT_H #endif // QANDROIDVIDEOOUTPUT_H

View File

@@ -44,39 +44,31 @@
#include <QtCore/private/qjni_p.h> #include <QtCore/private/qjni_p.h>
#include "jsurfacetextureholder.h" #include "jsurfacetextureholder.h"
#include <QAbstractVideoSurface> #include <QAbstractVideoSurface>
#include <QOpenGLContext>
#include <QOffscreenSurface>
#include <QOpenGLFramebufferObject>
#include <QVideoSurfaceFormat> #include <QVideoSurfaceFormat>
#include <QOpenGLFunctions>
#include <QOpenGLShaderProgram>
#include <qevent.h> #include <qevent.h>
#include <qcoreapplication.h>
#include <qopenglcontext.h>
#include <qopenglfunctions.h>
QT_BEGIN_NAMESPACE QT_BEGIN_NAMESPACE
static const GLfloat g_vertex_data[] = { #define ExternalGLTextureHandle QAbstractVideoBuffer::HandleType(QAbstractVideoBuffer::UserHandle + 1)
-1.f, 1.f,
1.f, 1.f,
1.f, -1.f,
-1.f, -1.f
};
static const GLfloat g_texture_data[] = { TextureDeleter::~TextureDeleter()
0.f, 0.f, {
1.f, 0.f, glDeleteTextures(1, &m_id);
1.f, 1.f, }
0.f, 1.f
};
class TextureVideoBuffer : public QAbstractVideoBuffer class AndroidTextureVideoBuffer : public QAbstractVideoBuffer
{ {
public: public:
TextureVideoBuffer(GLuint textureId) AndroidTextureVideoBuffer(JSurfaceTexture *surface)
: QAbstractVideoBuffer(GLTextureHandle) : QAbstractVideoBuffer(ExternalGLTextureHandle)
, m_textureId(textureId) , m_surfaceTexture(surface)
{} {
}
virtual ~TextureVideoBuffer() {} virtual ~AndroidTextureVideoBuffer() {}
MapMode mapMode() const { return NotMapped; } MapMode mapMode() const { return NotMapped; }
uchar *map(MapMode, int*, int*) { return 0; } uchar *map(MapMode, int*, int*) { return 0; }
@@ -84,67 +76,33 @@ public:
QVariant handle() const QVariant handle() const
{ {
return QVariant::fromValue<unsigned int>(m_textureId); if (m_data.isEmpty()) {
// update the video texture (called from the render thread)
m_surfaceTexture->updateTexImage();
m_data << (uint)m_surfaceTexture->textureID() << m_surfaceTexture->getTransformMatrix();
}
return m_data;
} }
private: private:
GLuint m_textureId; mutable JSurfaceTexture *m_surfaceTexture;
}; mutable QVariantList m_data;
class ImageVideoBuffer : public QAbstractVideoBuffer
{
public:
ImageVideoBuffer(const QImage &image)
: QAbstractVideoBuffer(NoHandle)
, m_image(image)
, m_mode(NotMapped)
{
}
MapMode mapMode() const { return m_mode; }
uchar *map(MapMode mode, int *, int *)
{
if (mode != NotMapped && m_mode == NotMapped) {
m_mode = mode;
return m_image.bits();
}
return 0;
}
void unmap()
{
m_mode = NotMapped;
}
private:
QImage m_image;
MapMode m_mode;
}; };
QAndroidVideoRendererControl::QAndroidVideoRendererControl(QObject *parent) QAndroidVideoRendererControl::QAndroidVideoRendererControl(QObject *parent)
: QVideoRendererControl(parent) : QVideoRendererControl(parent)
, m_surface(0) , m_surface(0)
, m_offscreenSurface(0)
, m_glContext(0)
, m_fbo(0)
, m_program(0)
, m_useImage(false)
, m_androidSurface(0) , m_androidSurface(0)
, m_surfaceTexture(0) , m_surfaceTexture(0)
, m_surfaceHolder(0) , m_surfaceHolder(0)
, m_externalTex(0) , m_externalTex(0)
, m_textureReadyCallback(0) , m_textureDeleter(0)
, m_textureReadyContext(0)
{ {
} }
QAndroidVideoRendererControl::~QAndroidVideoRendererControl() QAndroidVideoRendererControl::~QAndroidVideoRendererControl()
{ {
if (m_glContext)
m_glContext->makeCurrent(m_offscreenSurface);
if (m_surfaceTexture) { if (m_surfaceTexture) {
m_surfaceTexture->callMethod<void>("release"); m_surfaceTexture->callMethod<void>("release");
delete m_surfaceTexture; delete m_surfaceTexture;
@@ -159,13 +117,8 @@ QAndroidVideoRendererControl::~QAndroidVideoRendererControl()
delete m_surfaceHolder; delete m_surfaceHolder;
m_surfaceHolder = 0; m_surfaceHolder = 0;
} }
if (m_externalTex) if (m_textureDeleter)
glDeleteTextures(1, &m_externalTex); m_textureDeleter->deleteLater();
delete m_fbo;
delete m_program;
delete m_glContext;
delete m_offscreenSurface;
} }
QAbstractVideoSurface *QAndroidVideoRendererControl::surface() const QAbstractVideoSurface *QAndroidVideoRendererControl::surface() const
@@ -178,28 +131,23 @@ void QAndroidVideoRendererControl::setSurface(QAbstractVideoSurface *surface)
if (surface == m_surface) if (surface == m_surface)
return; return;
if (m_surface && m_surface->isActive()) { if (m_surface) {
if (m_surface->isActive())
m_surface->stop(); m_surface->stop();
m_surface->removeEventFilter(this); m_surface->setProperty("_q_GLThreadCallback", QVariant());
} }
m_surface = surface; m_surface = surface;
if (m_surface) { if (m_surface) {
m_useImage = !m_surface->supportedPixelFormats(QAbstractVideoBuffer::GLTextureHandle).contains(QVideoFrame::Format_BGR32); m_surface->setProperty("_q_GLThreadCallback",
m_surface->installEventFilter(this); QVariant::fromValue<QObject*>(this));
} }
} }
bool QAndroidVideoRendererControl::isTextureReady() bool QAndroidVideoRendererControl::isReady()
{ {
return QOpenGLContext::currentContext() || (m_surface && m_surface->property("GLContext").isValid()); return QOpenGLContext::currentContext() || m_externalTex;
}
void QAndroidVideoRendererControl::setTextureReadyCallback(TextureReadyCallback cb, void *context)
{
m_textureReadyCallback = cb;
m_textureReadyContext = context;
} }
bool QAndroidVideoRendererControl::initSurfaceTexture() bool QAndroidVideoRendererControl::initSurfaceTexture()
@@ -210,45 +158,15 @@ bool QAndroidVideoRendererControl::initSurfaceTexture()
if (!m_surface) if (!m_surface)
return false; return false;
QOpenGLContext *currContext = QOpenGLContext::currentContext(); // if we have an OpenGL context in the current thread, create a texture. Otherwise, wait
// for the GL render thread to call us back to do it.
// If we don't have a GL context in the current thread, create one and share it if (QOpenGLContext::currentContext()) {
// with the render thread GL context
if (!currContext && !m_glContext) {
QOpenGLContext *shareContext = qobject_cast<QOpenGLContext*>(m_surface->property("GLContext").value<QObject*>());
if (!shareContext)
return false;
m_offscreenSurface = new QOffscreenSurface;
QSurfaceFormat format;
format.setSwapBehavior(QSurfaceFormat::SingleBuffer);
m_offscreenSurface->setFormat(format);
m_offscreenSurface->create();
m_glContext = new QOpenGLContext;
m_glContext->setFormat(m_offscreenSurface->requestedFormat());
if (shareContext)
m_glContext->setShareContext(shareContext);
if (!m_glContext->create()) {
delete m_glContext;
m_glContext = 0;
delete m_offscreenSurface;
m_offscreenSurface = 0;
return false;
}
// if sharing contexts is not supported, fallback to image rendering and send the bits
// to the video surface
if (!m_glContext->shareContext())
m_useImage = true;
}
if (m_glContext)
m_glContext->makeCurrent(m_offscreenSurface);
glGenTextures(1, &m_externalTex); glGenTextures(1, &m_externalTex);
m_textureDeleter = new TextureDeleter(m_externalTex);
} else if (!m_externalTex) {
return false;
}
m_surfaceTexture = new JSurfaceTexture(m_externalTex); m_surfaceTexture = new JSurfaceTexture(m_externalTex);
if (m_surfaceTexture->isValid()) { if (m_surfaceTexture->isValid()) {
@@ -256,7 +174,9 @@ bool QAndroidVideoRendererControl::initSurfaceTexture()
} else { } else {
delete m_surfaceTexture; delete m_surfaceTexture;
m_surfaceTexture = 0; m_surfaceTexture = 0;
glDeleteTextures(1, &m_externalTex); m_textureDeleter->deleteLater();
m_externalTex = 0;
m_textureDeleter = 0;
} }
return m_surfaceTexture != 0; return m_surfaceTexture != 0;
@@ -294,9 +214,6 @@ void QAndroidVideoRendererControl::setVideoSize(const QSize &size)
stop(); stop();
m_nativeSize = size; m_nativeSize = size;
delete m_fbo;
m_fbo = 0;
} }
void QAndroidVideoRendererControl::stop() void QAndroidVideoRendererControl::stop()
@@ -306,133 +223,40 @@ void QAndroidVideoRendererControl::stop()
m_nativeSize = QSize(); m_nativeSize = QSize();
} }
QImage QAndroidVideoRendererControl::toImage()
{
if (!m_fbo)
return QImage();
return m_fbo->toImage().mirrored();
}
void QAndroidVideoRendererControl::onFrameAvailable() void QAndroidVideoRendererControl::onFrameAvailable()
{ {
if (m_glContext) if (!m_nativeSize.isValid() || !m_surface)
m_glContext->makeCurrent(m_offscreenSurface);
m_surfaceTexture->updateTexImage();
if (!m_nativeSize.isValid())
return; return;
renderFrameToFbo(); QAbstractVideoBuffer *buffer = new AndroidTextureVideoBuffer(m_surfaceTexture);
QVideoFrame frame(buffer, m_nativeSize, QVideoFrame::Format_BGR32);
QAbstractVideoBuffer *buffer = 0;
QVideoFrame frame;
if (m_useImage) {
buffer = new ImageVideoBuffer(m_fbo->toImage().mirrored());
frame = QVideoFrame(buffer, m_nativeSize, QVideoFrame::Format_RGB32);
} else {
buffer = new TextureVideoBuffer(m_fbo->texture());
frame = QVideoFrame(buffer, m_nativeSize, QVideoFrame::Format_BGR32);
}
if (m_surface && frame.isValid()) {
if (m_surface->isActive() && (m_surface->surfaceFormat().pixelFormat() != frame.pixelFormat() if (m_surface->isActive() && (m_surface->surfaceFormat().pixelFormat() != frame.pixelFormat()
|| m_surface->nativeResolution() != frame.size())) { || m_surface->nativeResolution() != frame.size())) {
m_surface->stop(); m_surface->stop();
} }
if (!m_surface->isActive()) { if (!m_surface->isActive()) {
QVideoSurfaceFormat format(frame.size(), frame.pixelFormat(), QVideoSurfaceFormat format(frame.size(), frame.pixelFormat(), ExternalGLTextureHandle);
m_useImage ? QAbstractVideoBuffer::NoHandle format.setScanLineDirection(QVideoSurfaceFormat::BottomToTop);
: QAbstractVideoBuffer::GLTextureHandle);
m_surface->start(format); m_surface->start(format);
} }
if (m_surface->isActive()) if (m_surface->isActive())
m_surface->present(frame); m_surface->present(frame);
}
} }
void QAndroidVideoRendererControl::renderFrameToFbo() void QAndroidVideoRendererControl::customEvent(QEvent *e)
{ {
createGLResources(); if (e->type() == QEvent::User) {
// This is running in the render thread (OpenGL enabled)
m_fbo->bind(); if (!m_externalTex) {
glGenTextures(1, &m_externalTex);
glViewport(0, 0, m_nativeSize.width(), m_nativeSize.height()); m_textureDeleter = new TextureDeleter(m_externalTex); // will be deleted in the correct thread
emit readyChanged(true);
m_program->bind();
m_program->enableAttributeArray(0);
m_program->enableAttributeArray(1);
m_program->setUniformValue("frameTexture", GLuint(0));
m_program->setUniformValue("texMatrix", m_surfaceTexture->getTransformMatrix());
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 0, g_vertex_data);
glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 0, g_texture_data);
glDrawArrays(GL_TRIANGLE_FAN, 0, 4);
m_program->disableAttributeArray(0);
m_program->disableAttributeArray(1);
m_program->release();
glBindTexture(GL_TEXTURE_EXTERNAL_OES, 0);
m_fbo->release();
glFinish();
}
void QAndroidVideoRendererControl::createGLResources()
{
if (!m_fbo)
m_fbo = new QOpenGLFramebufferObject(m_nativeSize);
if (!m_program) {
m_program = new QOpenGLShaderProgram;
QOpenGLShader *vertexShader = new QOpenGLShader(QOpenGLShader::Vertex, m_program);
vertexShader->compileSourceCode("attribute highp vec4 vertexCoordsArray; \n" \
"attribute highp vec2 textureCoordArray; \n" \
"uniform highp mat4 texMatrix; \n" \
"varying highp vec2 textureCoords; \n" \
"void main(void) \n" \
"{ \n" \
" gl_Position = vertexCoordsArray; \n" \
" textureCoords = (texMatrix * vec4(textureCoordArray, 0.0, 1.0)).xy; \n" \
"}\n");
m_program->addShader(vertexShader);
QOpenGLShader *fragmentShader = new QOpenGLShader(QOpenGLShader::Fragment, m_program);
fragmentShader->compileSourceCode("#extension GL_OES_EGL_image_external : require \n" \
"varying highp vec2 textureCoords; \n" \
"uniform samplerExternalOES frameTexture; \n" \
"void main() \n" \
"{ \n" \
" gl_FragColor = texture2D(frameTexture, textureCoords); \n" \
"}\n");
m_program->addShader(fragmentShader);
m_program->bindAttributeLocation("vertexCoordsArray", 0);
m_program->bindAttributeLocation("textureCoordArray", 1);
m_program->link();
}
}
bool QAndroidVideoRendererControl::eventFilter(QObject *, QEvent *e)
{
if (e->type() == QEvent::DynamicPropertyChange) {
QDynamicPropertyChangeEvent *event = static_cast<QDynamicPropertyChangeEvent*>(e);
if (event->propertyName() == "GLContext" && m_textureReadyCallback) {
m_textureReadyCallback(m_textureReadyContext);
m_textureReadyCallback = 0;
m_textureReadyContext = 0;
} }
} }
return false;
} }
QT_END_NAMESPACE QT_END_NAMESPACE

View File

@@ -48,15 +48,23 @@
QT_BEGIN_NAMESPACE QT_BEGIN_NAMESPACE
class QOpenGLContext;
class QOffscreenSurface;
class QOpenGLFramebufferObject;
class QOpenGLShaderProgram;
class JSurfaceTextureHolder; class JSurfaceTextureHolder;
class TextureDeleter : public QObject
{
Q_OBJECT
public:
TextureDeleter(uint id) : m_id(id) { }
~TextureDeleter();
private:
uint m_id;
};
class QAndroidVideoRendererControl : public QVideoRendererControl, public QAndroidVideoOutput class QAndroidVideoRendererControl : public QVideoRendererControl, public QAndroidVideoOutput
{ {
Q_OBJECT Q_OBJECT
Q_INTERFACES(QAndroidVideoOutput)
public: public:
explicit QAndroidVideoRendererControl(QObject *parent = 0); explicit QAndroidVideoRendererControl(QObject *parent = 0);
~QAndroidVideoRendererControl() Q_DECL_OVERRIDE; ~QAndroidVideoRendererControl() Q_DECL_OVERRIDE;
@@ -65,38 +73,30 @@ public:
void setSurface(QAbstractVideoSurface *surface) Q_DECL_OVERRIDE; void setSurface(QAbstractVideoSurface *surface) Q_DECL_OVERRIDE;
jobject surfaceHolder() Q_DECL_OVERRIDE; jobject surfaceHolder() Q_DECL_OVERRIDE;
bool isTextureReady() Q_DECL_OVERRIDE;
void setTextureReadyCallback(TextureReadyCallback cb, void *context = 0) Q_DECL_OVERRIDE;
jobject surfaceTexture() Q_DECL_OVERRIDE; jobject surfaceTexture() Q_DECL_OVERRIDE;
bool isReady() Q_DECL_OVERRIDE;
void setVideoSize(const QSize &size) Q_DECL_OVERRIDE; void setVideoSize(const QSize &size) Q_DECL_OVERRIDE;
void stop() Q_DECL_OVERRIDE; void stop() Q_DECL_OVERRIDE;
QImage toImage() Q_DECL_OVERRIDE;
bool eventFilter(QObject *obj, QEvent *event) Q_DECL_OVERRIDE; void customEvent(QEvent *) Q_DECL_OVERRIDE;
Q_SIGNALS:
void readyChanged(bool);
private Q_SLOTS: private Q_SLOTS:
void onFrameAvailable(); void onFrameAvailable();
private: private:
bool initSurfaceTexture(); bool initSurfaceTexture();
void renderFrameToFbo();
void createGLResources();
QAbstractVideoSurface *m_surface; QAbstractVideoSurface *m_surface;
QOffscreenSurface *m_offscreenSurface;
QOpenGLContext *m_glContext;
QOpenGLFramebufferObject *m_fbo;
QOpenGLShaderProgram *m_program;
bool m_useImage;
QSize m_nativeSize; QSize m_nativeSize;
QJNIObjectPrivate *m_androidSurface; QJNIObjectPrivate *m_androidSurface;
JSurfaceTexture *m_surfaceTexture; JSurfaceTexture *m_surfaceTexture;
JSurfaceTextureHolder *m_surfaceHolder; JSurfaceTextureHolder *m_surfaceHolder;
uint m_externalTex; uint m_externalTex;
TextureDeleter *m_textureDeleter;
TextureReadyCallback m_textureReadyCallback;
void *m_textureReadyContext;
}; };
QT_END_NAMESPACE QT_END_NAMESPACE

View File

@@ -52,12 +52,6 @@
QT_BEGIN_NAMESPACE QT_BEGIN_NAMESPACE
static void textureReadyCallback(void *context)
{
if (context)
reinterpret_cast<QAndroidCameraSession *>(context)->onSurfaceTextureReady();
}
QAndroidCameraSession::QAndroidCameraSession(QObject *parent) QAndroidCameraSession::QAndroidCameraSession(QObject *parent)
: QObject(parent) : QObject(parent)
, m_selectedCamera(0) , m_selectedCamera(0)
@@ -153,8 +147,11 @@ bool QAndroidCameraSession::open()
if (m_camera) { if (m_camera) {
connect(m_camera, SIGNAL(pictureExposed()), this, SLOT(onCameraPictureExposed())); connect(m_camera, SIGNAL(pictureExposed()), this, SLOT(onCameraPictureExposed()));
connect(m_camera, SIGNAL(pictureCaptured(QByteArray)), this, SLOT(onCameraPictureCaptured(QByteArray))); connect(m_camera, SIGNAL(pictureCaptured(QByteArray)), this, SLOT(onCameraPictureCaptured(QByteArray)));
connect(m_camera, SIGNAL(previewFrameAvailable(QByteArray)), this, SLOT(onCameraPreviewFrameAvailable(QByteArray)));
m_nativeOrientation = m_camera->getNativeOrientation(); m_nativeOrientation = m_camera->getNativeOrientation();
m_status = QCamera::LoadedStatus; m_status = QCamera::LoadedStatus;
if (m_camera->getPreviewFormat() != JCamera::NV21)
m_camera->setPreviewFormat(JCamera::NV21);
emit opened(); emit opened();
} else { } else {
m_status = QCamera::UnavailableStatus; m_status = QCamera::UnavailableStatus;
@@ -188,12 +185,17 @@ void QAndroidCameraSession::close()
emit statusChanged(m_status); emit statusChanged(m_status);
} }
void QAndroidCameraSession::setVideoPreview(QAndroidVideoOutput *videoOutput) void QAndroidCameraSession::setVideoPreview(QObject *videoOutput)
{ {
if (m_videoOutput) if (m_videoOutput)
m_videoOutput->stop(); m_videoOutput->stop();
m_videoOutput = videoOutput; if (videoOutput) {
connect(videoOutput, SIGNAL(readyChanged(bool)), this, SLOT(onVideoOutputReady(bool)));
m_videoOutput = qobject_cast<QAndroidVideoOutput *>(videoOutput);
} else {
m_videoOutput = 0;
}
} }
void QAndroidCameraSession::adjustViewfinderSize(const QSize &captureSize, bool restartPreview) void QAndroidCameraSession::adjustViewfinderSize(const QSize &captureSize, bool restartPreview)
@@ -243,12 +245,8 @@ void QAndroidCameraSession::startPreview()
applyImageSettings(); applyImageSettings();
adjustViewfinderSize(m_imageSettings.resolution()); adjustViewfinderSize(m_imageSettings.resolution());
if (m_videoOutput) { if (m_videoOutput && m_videoOutput->isReady())
if (m_videoOutput->isTextureReady()) onVideoOutputReady(true);
m_camera->setPreviewTexture(m_videoOutput->surfaceTexture());
else
m_videoOutput->setTextureReadyCallback(textureReadyCallback, this);
}
JMultimediaUtils::enableOrientationListener(true); JMultimediaUtils::enableOrientationListener(true);
@@ -427,6 +425,7 @@ int QAndroidCameraSession::capture(const QString &fileName)
// adjust picture rotation depending on the device orientation // adjust picture rotation depending on the device orientation
m_camera->setRotation(currentCameraRotation()); m_camera->setRotation(currentCameraRotation());
m_camera->requestPreviewFrame();
m_camera->takePicture(); m_camera->takePicture();
} else { } else {
emit imageCaptureError(m_lastImageCaptureId, QCameraImageCapture::NotSupportedFeatureError, emit imageCaptureError(m_lastImageCaptureId, QCameraImageCapture::NotSupportedFeatureError,
@@ -455,10 +454,6 @@ void QAndroidCameraSession::onCameraPictureExposed()
void QAndroidCameraSession::onCameraPictureCaptured(const QByteArray &data) void QAndroidCameraSession::onCameraPictureCaptured(const QByteArray &data)
{ {
if (!m_captureCanceled) { if (!m_captureCanceled) {
// generate a preview from the viewport
if (m_videoOutput)
emit imageCaptured(m_currentImageCaptureId, m_videoOutput->toImage());
// Loading and saving the captured image can be slow, do it in a separate thread // Loading and saving the captured image can be slow, do it in a separate thread
QtConcurrent::run(this, &QAndroidCameraSession::processCapturedImage, QtConcurrent::run(this, &QAndroidCameraSession::processCapturedImage,
m_currentImageCaptureId, m_currentImageCaptureId,
@@ -522,9 +517,37 @@ void QAndroidCameraSession::processCapturedImage(int id,
} }
} }
void QAndroidCameraSession::onSurfaceTextureReady() void QAndroidCameraSession::onCameraPreviewFrameAvailable(const QByteArray &data)
{ {
if (m_camera && m_videoOutput) if (m_captureCanceled || m_readyForCapture)
return;
QtConcurrent::run(this, &QAndroidCameraSession::processPreviewImage,
m_currentImageCaptureId,
data);
}
void QAndroidCameraSession::processPreviewImage(int id, const QByteArray &data)
{
QSize frameSize = m_camera->previewSize();
QImage preview(frameSize, QImage::Format_ARGB32);
qt_convert_NV21_to_ARGB32((const uchar *)data.constData(),
(quint32 *)preview.bits(),
frameSize.width(),
frameSize.height());
// Preview display of front-facing cameras is flipped horizontally, but the frame data
// we get here is not. Flip it ourselves if the camera is front-facing to match what the user
// sees on the viewfinder.
if (m_camera->getFacing() == JCamera::CameraFacingFront)
preview = preview.transformed(QTransform().scale(-1, 1));
emit imageCaptured(id, preview);
}
void QAndroidCameraSession::onVideoOutputReady(bool ready)
{
if (m_camera && m_videoOutput && ready)
m_camera->setPreviewTexture(m_videoOutput->surfaceTexture()); m_camera->setPreviewTexture(m_videoOutput->surfaceTexture());
} }

View File

@@ -71,7 +71,7 @@ public:
void setCaptureMode(QCamera::CaptureModes mode); void setCaptureMode(QCamera::CaptureModes mode);
bool isCaptureModeSupported(QCamera::CaptureModes mode) const; bool isCaptureModeSupported(QCamera::CaptureModes mode) const;
void setVideoPreview(QAndroidVideoOutput *videoOutput); void setVideoPreview(QObject *videoOutput);
void adjustViewfinderSize(const QSize &captureSize, bool restartPreview = true); void adjustViewfinderSize(const QSize &captureSize, bool restartPreview = true);
QImageEncoderSettings imageSettings() const { return m_imageSettings; } QImageEncoderSettings imageSettings() const { return m_imageSettings; }
@@ -88,8 +88,6 @@ public:
int capture(const QString &fileName); int capture(const QString &fileName);
void cancelCapture(); void cancelCapture();
void onSurfaceTextureReady();
int currentCameraRotation() const; int currentCameraRotation() const;
Q_SIGNALS: Q_SIGNALS:
@@ -110,10 +108,13 @@ Q_SIGNALS:
void imageCaptureError(int id, int error, const QString &errorString); void imageCaptureError(int id, int error, const QString &errorString);
private Q_SLOTS: private Q_SLOTS:
void onVideoOutputReady(bool ready);
void onApplicationStateChanged(Qt::ApplicationState state); void onApplicationStateChanged(Qt::ApplicationState state);
void onCameraPictureExposed(); void onCameraPictureExposed();
void onCameraPictureCaptured(const QByteArray &data); void onCameraPictureCaptured(const QByteArray &data);
void onCameraPreviewFrameAvailable(const QByteArray &data);
private: private:
bool open(); bool open();
@@ -123,7 +124,7 @@ private:
void stopPreview(); void stopPreview();
void applyImageSettings(); void applyImageSettings();
void processPreviewImage(int id); void processPreviewImage(int id, const QByteArray &data);
void processCapturedImage(int id, void processCapturedImage(int id,
const QByteArray &data, const QByteArray &data,
QCameraImageCapture::CaptureDestinations dest, QCameraImageCapture::CaptureDestinations dest,

View File

@@ -88,7 +88,7 @@ private:
QAndroidVideoDeviceSelectorControl *m_videoInputControl; QAndroidVideoDeviceSelectorControl *m_videoInputControl;
QAndroidAudioInputSelectorControl *m_audioInputControl; QAndroidAudioInputSelectorControl *m_audioInputControl;
QAndroidCameraSession *m_cameraSession; QAndroidCameraSession *m_cameraSession;
QAndroidVideoRendererControl *m_videoRendererControl; QMediaControl *m_videoRendererControl;
QAndroidCameraZoomControl *m_cameraZoomControl; QAndroidCameraZoomControl *m_cameraZoomControl;
QAndroidCameraExposureControl *m_cameraExposureControl; QAndroidCameraExposureControl *m_cameraExposureControl;
QAndroidCameraFlashControl *m_cameraFlashControl; QAndroidCameraFlashControl *m_cameraFlashControl;

View File

@@ -110,25 +110,27 @@ void QAndroidCaptureSession::setAudioInput(const QString &input)
QUrl QAndroidCaptureSession::outputLocation() const QUrl QAndroidCaptureSession::outputLocation() const
{ {
return m_outputLocation; return m_actualOutputLocation;
} }
bool QAndroidCaptureSession::setOutputLocation(const QUrl &location) bool QAndroidCaptureSession::setOutputLocation(const QUrl &location)
{ {
if (m_outputLocation == location) if (m_requestedOutputLocation == location)
return false; return false;
m_outputLocation = location; m_actualOutputLocation = QUrl();
m_requestedOutputLocation = location;
if (m_outputLocation.isEmpty()) if (m_requestedOutputLocation.isEmpty())
return true; return true;
if (m_outputLocation.isValid() && (m_outputLocation.isLocalFile() || m_outputLocation.isRelative())) { if (m_requestedOutputLocation.isValid()
emit actualLocationChanged(m_outputLocation); && (m_requestedOutputLocation.isLocalFile() || m_requestedOutputLocation.isRelative())) {
emit actualLocationChanged(m_requestedOutputLocation);
return true; return true;
} }
m_outputLocation = QUrl(); m_requestedOutputLocation = QUrl();
return false; return false;
} }
@@ -213,15 +215,18 @@ bool QAndroidCaptureSession::start()
// Set output file // Set output file
QString filePath = m_mediaStorageLocation.generateFileName(m_outputLocation.isLocalFile() ? m_outputLocation.toLocalFile() QString filePath = m_mediaStorageLocation.generateFileName(
: m_outputLocation.toString(), m_requestedOutputLocation.isLocalFile() ? m_requestedOutputLocation.toLocalFile()
: m_requestedOutputLocation.toString(),
m_cameraSession ? QAndroidMediaStorageLocation::Camera m_cameraSession ? QAndroidMediaStorageLocation::Camera
: QAndroidMediaStorageLocation::Audio, : QAndroidMediaStorageLocation::Audio,
m_cameraSession ? QLatin1String("VID_") m_cameraSession ? QLatin1String("VID_")
: QLatin1String("REC_"), : QLatin1String("REC_"),
m_containerFormat); m_containerFormat);
m_outputLocation = QUrl::fromLocalFile(filePath);
emit actualLocationChanged(m_outputLocation); m_actualOutputLocation = QUrl::fromLocalFile(filePath);
if (m_actualOutputLocation != m_requestedOutputLocation)
emit actualLocationChanged(m_actualOutputLocation);
m_mediaRecorder->setOutputFile(filePath); m_mediaRecorder->setOutputFile(filePath);
@@ -280,7 +285,7 @@ void QAndroidCaptureSession::stop(bool error)
// if the media is saved into the standard media location, register it // if the media is saved into the standard media location, register it
// with the Android media scanner so it appears immediately in apps // with the Android media scanner so it appears immediately in apps
// such as the gallery. // such as the gallery.
QString mediaPath = m_outputLocation.toLocalFile(); QString mediaPath = m_actualOutputLocation.toLocalFile();
QString standardLoc = m_cameraSession ? JMultimediaUtils::getDefaultMediaDirectory(JMultimediaUtils::DCIM) QString standardLoc = m_cameraSession ? JMultimediaUtils::getDefaultMediaDirectory(JMultimediaUtils::DCIM)
: JMultimediaUtils::getDefaultMediaDirectory(JMultimediaUtils::Sounds); : JMultimediaUtils::getDefaultMediaDirectory(JMultimediaUtils::Sounds);
if (mediaPath.startsWith(standardLoc)) if (mediaPath.startsWith(standardLoc))

View File

@@ -160,7 +160,8 @@ private:
QMediaRecorder::State m_state; QMediaRecorder::State m_state;
QMediaRecorder::Status m_status; QMediaRecorder::Status m_status;
QUrl m_outputLocation; QUrl m_requestedOutputLocation;
QUrl m_actualOutputLocation;
CaptureProfile m_defaultSettings; CaptureProfile m_defaultSettings;

View File

@@ -45,12 +45,6 @@
QT_BEGIN_NAMESPACE QT_BEGIN_NAMESPACE
static void textureReadyCallback(void *context)
{
if (context)
reinterpret_cast<QAndroidMediaPlayerControl *>(context)->onSurfaceTextureReady();
}
QAndroidMediaPlayerControl::QAndroidMediaPlayerControl(QObject *parent) QAndroidMediaPlayerControl::QAndroidMediaPlayerControl(QObject *parent)
: QMediaPlayerControl(parent), : QMediaPlayerControl(parent),
mMediaPlayer(new JMediaPlayer), mMediaPlayer(new JMediaPlayer),
@@ -241,18 +235,18 @@ void QAndroidMediaPlayerControl::setMedia(const QMediaContent &mediaContent,
setSeekable(true); setSeekable(true);
} }
void QAndroidMediaPlayerControl::setVideoOutput(QAndroidVideoOutput *videoOutput) void QAndroidMediaPlayerControl::setVideoOutput(QObject *videoOutput)
{ {
if (mVideoOutput) if (mVideoOutput)
mVideoOutput->stop(); mVideoOutput->stop();
mVideoOutput = videoOutput; mVideoOutput = qobject_cast<QAndroidVideoOutput *>(videoOutput);
if (mVideoOutput && !mMediaPlayer->display()) { if (mVideoOutput && !mMediaPlayer->display()) {
if (mVideoOutput->isTextureReady()) if (mVideoOutput->isReady())
mMediaPlayer->setDisplay(mVideoOutput->surfaceHolder()); mMediaPlayer->setDisplay(mVideoOutput->surfaceHolder());
else else
mVideoOutput->setTextureReadyCallback(textureReadyCallback, this); connect(videoOutput, SIGNAL(readyChanged(bool)), this, SLOT(onVideoOutputReady(bool)));
} }
} }
@@ -426,9 +420,9 @@ void QAndroidMediaPlayerControl::onVideoSizeChanged(qint32 width, qint32 height)
mVideoOutput->setVideoSize(mVideoSize); mVideoOutput->setVideoSize(mVideoSize);
} }
void QAndroidMediaPlayerControl::onSurfaceTextureReady() void QAndroidMediaPlayerControl::onVideoOutputReady(bool ready)
{ {
if (!mMediaPlayer->display() && mVideoOutput) { if (!mMediaPlayer->display() && mVideoOutput && ready) {
mMediaPlayer->setDisplay(mVideoOutput->surfaceHolder()); mMediaPlayer->setDisplay(mVideoOutput->surfaceHolder());
flushPendingStates(); flushPendingStates();
} }

View File

@@ -75,8 +75,7 @@ public:
const QIODevice *mediaStream() const Q_DECL_OVERRIDE; const QIODevice *mediaStream() const Q_DECL_OVERRIDE;
void setMedia(const QMediaContent &mediaContent, QIODevice *stream) Q_DECL_OVERRIDE; void setMedia(const QMediaContent &mediaContent, QIODevice *stream) Q_DECL_OVERRIDE;
void setVideoOutput(QAndroidVideoOutput *videoOutput); void setVideoOutput(QObject *videoOutput);
void onSurfaceTextureReady();
Q_SIGNALS: Q_SIGNALS:
void metaDataUpdated(); void metaDataUpdated();
@@ -90,6 +89,7 @@ public Q_SLOTS:
void setMuted(bool muted) Q_DECL_OVERRIDE; void setMuted(bool muted) Q_DECL_OVERRIDE;
private Q_SLOTS: private Q_SLOTS:
void onVideoOutputReady(bool ready);
void onError(qint32 what, qint32 extra); void onError(qint32 what, qint32 extra);
void onInfo(qint32 what, qint32 extra); void onInfo(qint32 what, qint32 extra);
void onMediaPlayerInfo(qint32 what, qint32 extra); void onMediaPlayerInfo(qint32 what, qint32 extra);

View File

@@ -48,7 +48,6 @@ QT_BEGIN_NAMESPACE
class QAndroidMediaPlayerControl; class QAndroidMediaPlayerControl;
class QAndroidMetaDataReaderControl; class QAndroidMetaDataReaderControl;
class QAndroidVideoRendererControl;
class QAndroidMediaService : public QMediaService class QAndroidMediaService : public QMediaService
{ {
@@ -63,7 +62,7 @@ public:
private: private:
QAndroidMediaPlayerControl *mMediaControl; QAndroidMediaPlayerControl *mMediaControl;
QAndroidMetaDataReaderControl *mMetadataControl; QAndroidMetaDataReaderControl *mMetadataControl;
QAndroidVideoRendererControl *mVideoRendererControl; QMediaControl *mVideoRendererControl;
}; };
QT_END_NAMESPACE QT_END_NAMESPACE

View File

@@ -102,6 +102,18 @@ static void notifyPictureCaptured(JNIEnv *env, jobject, int id, jbyteArray data)
} }
} }
static void notifyPreviewFrame(JNIEnv *env, jobject, int id, jbyteArray data)
{
JCamera *obj = g_objectMap.value(id, 0);
if (obj) {
QByteArray bytes;
int arrayLength = env->GetArrayLength(data);
bytes.resize(arrayLength);
env->GetByteArrayRegion(data, 0, arrayLength, (jbyte*)bytes.data());
Q_EMIT obj->previewFrameAvailable(bytes);
}
}
JCamera::JCamera(int cameraId, jobject cam) JCamera::JCamera(int cameraId, jobject cam)
: QObject() : QObject()
, QJNIObjectPrivate(cam) , QJNIObjectPrivate(cam)
@@ -225,6 +237,23 @@ QList<QSize> JCamera::getSupportedPreviewSizes()
return list; return list;
} }
JCamera::ImageFormat JCamera::getPreviewFormat()
{
if (!m_parameters.isValid())
return Unknown;
return JCamera::ImageFormat(m_parameters.callMethod<jint>("getPreviewFormat"));
}
void JCamera::setPreviewFormat(ImageFormat fmt)
{
if (!m_parameters.isValid())
return;
m_parameters.callMethod<void>("setPreviewFormat", "(I)V", jint(fmt));
applyParameters();
}
void JCamera::setPreviewSize(const QSize &size) void JCamera::setPreviewSize(const QSize &size)
{ {
if (!m_parameters.isValid()) if (!m_parameters.isValid())
@@ -624,6 +653,11 @@ void JCamera::setJpegQuality(int quality)
applyParameters(); applyParameters();
} }
void JCamera::requestPreviewFrame()
{
callMethod<void>("requestPreviewFrame");
}
void JCamera::takePicture() void JCamera::takePicture()
{ {
callMethod<void>("takePicture"); callMethod<void>("takePicture");
@@ -672,7 +706,8 @@ QStringList JCamera::callStringListMethod(const char *methodName)
static JNINativeMethod methods[] = { static JNINativeMethod methods[] = {
{"notifyAutoFocusComplete", "(IZ)V", (void *)notifyAutoFocusComplete}, {"notifyAutoFocusComplete", "(IZ)V", (void *)notifyAutoFocusComplete},
{"notifyPictureExposed", "(I)V", (void *)notifyPictureExposed}, {"notifyPictureExposed", "(I)V", (void *)notifyPictureExposed},
{"notifyPictureCaptured", "(I[B)V", (void *)notifyPictureCaptured} {"notifyPictureCaptured", "(I[B)V", (void *)notifyPictureCaptured},
{"notifyPreviewFrame", "(I[B)V", (void *)notifyPreviewFrame}
}; };
bool JCamera::initJNI(JNIEnv *env) bool JCamera::initJNI(JNIEnv *env)

View File

@@ -58,6 +58,16 @@ public:
CameraFacingFront = 1 CameraFacingFront = 1
}; };
enum ImageFormat { // same values as in android.graphics.ImageFormat Java class
Unknown = 0,
RGB565 = 4,
NV16 = 16,
NV21 = 17,
YUY2 = 20,
JPEG = 256,
YV12 = 842094169
};
~JCamera(); ~JCamera();
static JCamera *open(int cameraId); static JCamera *open(int cameraId);
@@ -75,6 +85,9 @@ public:
QSize getPreferredPreviewSizeForVideo(); QSize getPreferredPreviewSizeForVideo();
QList<QSize> getSupportedPreviewSizes(); QList<QSize> getSupportedPreviewSizes();
ImageFormat getPreviewFormat();
void setPreviewFormat(ImageFormat fmt);
QSize previewSize() const { return m_previewSize; } QSize previewSize() const { return m_previewSize; }
void setPreviewSize(const QSize &size); void setPreviewSize(const QSize &size);
void setPreviewTexture(jobject surfaceTexture); void setPreviewTexture(jobject surfaceTexture);
@@ -131,6 +144,8 @@ public:
void startPreview(); void startPreview();
void stopPreview(); void stopPreview();
void requestPreviewFrame();
void takePicture(); void takePicture();
static bool initJNI(JNIEnv *env); static bool initJNI(JNIEnv *env);
@@ -143,6 +158,8 @@ Q_SIGNALS:
void whiteBalanceChanged(); void whiteBalanceChanged();
void previewFrameAvailable(const QByteArray &data);
void pictureExposed(); void pictureExposed();
void pictureCaptured(const QByteArray &data); void pictureCaptured(const QByteArray &data);

View File

@@ -56,6 +56,7 @@ public:
explicit JSurfaceTexture(unsigned int texName); explicit JSurfaceTexture(unsigned int texName);
~JSurfaceTexture(); ~JSurfaceTexture();
int textureID() const { return m_texID; }
QMatrix4x4 getTransformMatrix(); QMatrix4x4 getTransformMatrix();
void updateTexImage(); void updateTexImage();

View File

@@ -0,0 +1,3 @@
{
"Keys": ["sgvideonodes"]
}

View File

@@ -0,0 +1,204 @@
/****************************************************************************
**
** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
** Contact: http://www.qt-project.org/legal
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and Digia. For licensing terms and
** conditions see http://qt.digia.com/licensing. For further information
** use the contact form at http://qt.digia.com/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 as published by the Free Software
** Foundation and appearing in the file LICENSE.LGPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU Lesser General Public License version 2.1 requirements
** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Digia gives you certain additional
** rights. These rights are described in the Digia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** GNU General Public License Usage
** Alternatively, this file may be used under the terms of the GNU
** General Public License version 3.0 as published by the Free Software
** Foundation and appearing in the file LICENSE.GPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU General Public License version 3.0 requirements will be
** met: http://www.gnu.org/copyleft/gpl.html.
**
**
** $QT_END_LICENSE$
**
****************************************************************************/
#include "qandroidsgvideonode.h"
#include <qsgmaterial.h>
#include <qmutex.h>
QT_BEGIN_NAMESPACE
class QAndroidSGVideoNodeMaterialShader : public QSGMaterialShader
{
public:
void updateState(const RenderState &state, QSGMaterial *newMaterial, QSGMaterial *oldMaterial);
char const *const *attributeNames() const {
static const char *names[] = {
"qt_VertexPosition",
"qt_VertexTexCoord",
0
};
return names;
}
protected:
const char *vertexShader() const {
return
"uniform highp mat4 qt_Matrix; \n"
"uniform highp mat4 texMatrix; \n"
"attribute highp vec4 qt_VertexPosition; \n"
"attribute highp vec2 qt_VertexTexCoord; \n"
"varying highp vec2 qt_TexCoord; \n"
"void main() { \n"
" qt_TexCoord = (texMatrix * vec4(qt_VertexTexCoord, 0.0, 1.0)).xy; \n"
" gl_Position = qt_Matrix * qt_VertexPosition; \n"
"}";
}
const char *fragmentShader() const {
return
"#extension GL_OES_EGL_image_external : require \n"
"uniform samplerExternalOES videoTexture; \n"
"uniform lowp float opacity; \n"
"varying highp vec2 qt_TexCoord; \n"
"void main() \n"
"{ \n"
" gl_FragColor = texture2D(videoTexture, qt_TexCoord) * opacity; \n"
"}";
}
void initialize() {
m_id_matrix = program()->uniformLocation("qt_Matrix");
m_id_texMatrix = program()->uniformLocation("texMatrix");
m_id_texture = program()->uniformLocation("videoTexture");
m_id_opacity = program()->uniformLocation("opacity");
}
int m_id_matrix;
int m_id_texMatrix;
int m_id_texture;
int m_id_opacity;
};
class QAndroidSGVideoNodeMaterial : public QSGMaterial
{
public:
QAndroidSGVideoNodeMaterial()
: m_textureId(0)
{
setFlag(Blending, false);
}
~QAndroidSGVideoNodeMaterial()
{
m_frame = QVideoFrame();
}
QSGMaterialType *type() const {
static QSGMaterialType theType;
return &theType;
}
QSGMaterialShader *createShader() const {
return new QAndroidSGVideoNodeMaterialShader;
}
int compare(const QSGMaterial *other) const {
const QAndroidSGVideoNodeMaterial *m = static_cast<const QAndroidSGVideoNodeMaterial *>(other);
return m_textureId - m->m_textureId;
}
void setVideoFrame(const QVideoFrame &frame) {
QMutexLocker lock(&m_frameMutex);
m_frame = frame;
}
bool updateTexture()
{
QMutexLocker lock(&m_frameMutex);
bool texMatrixDirty = false;
if (m_frame.isValid()) {
QVariantList list = m_frame.handle().toList();
GLuint texId = list.at(0).toUInt();
QMatrix4x4 mat = qvariant_cast<QMatrix4x4>(list.at(1));
texMatrixDirty = texId != m_textureId || mat != m_texMatrix;
m_textureId = texId;
m_texMatrix = mat;
// the texture is already bound and initialized at this point,
// no need to call glTexParams
} else {
m_textureId = 0;
}
return texMatrixDirty;
}
QVideoFrame m_frame;
QMutex m_frameMutex;
GLuint m_textureId;
QMatrix4x4 m_texMatrix;
};
void QAndroidSGVideoNodeMaterialShader::updateState(const RenderState &state,
QSGMaterial *newMaterial,
QSGMaterial *oldMaterial)
{
Q_UNUSED(oldMaterial);
QAndroidSGVideoNodeMaterial *mat = static_cast<QAndroidSGVideoNodeMaterial *>(newMaterial);
program()->setUniformValue(m_id_texture, 0);
if (mat->updateTexture())
program()->setUniformValue(m_id_texMatrix, mat->m_texMatrix);
if (state.isOpacityDirty())
program()->setUniformValue(m_id_opacity, state.opacity());
if (state.isMatrixDirty())
program()->setUniformValue(m_id_matrix, state.combinedMatrix());
}
QAndroidSGVideoNode::QAndroidSGVideoNode(const QVideoSurfaceFormat &format)
: m_format(format)
{
setFlag(QSGNode::OwnsMaterial);
m_material = new QAndroidSGVideoNodeMaterial;
setMaterial(m_material);
}
void QAndroidSGVideoNode::setCurrentFrame(const QVideoFrame &frame)
{
m_material->setVideoFrame(frame);
markDirty(DirtyMaterial);
}
QVideoFrame::PixelFormat QAndroidSGVideoNode::pixelFormat() const
{
return m_format.pixelFormat();
}
QT_END_NAMESPACE

View File

@@ -0,0 +1,67 @@
/****************************************************************************
**
** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
** Contact: http://www.qt-project.org/legal
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and Digia. For licensing terms and
** conditions see http://qt.digia.com/licensing. For further information
** use the contact form at http://qt.digia.com/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 as published by the Free Software
** Foundation and appearing in the file LICENSE.LGPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU Lesser General Public License version 2.1 requirements
** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Digia gives you certain additional
** rights. These rights are described in the Digia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** GNU General Public License Usage
** Alternatively, this file may be used under the terms of the GNU
** General Public License version 3.0 as published by the Free Software
** Foundation and appearing in the file LICENSE.GPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU General Public License version 3.0 requirements will be
** met: http://www.gnu.org/copyleft/gpl.html.
**
**
** $QT_END_LICENSE$
**
****************************************************************************/
#ifndef QANDROIDSGVIDEONODE_H
#define QANDROIDSGVIDEONODE_H
#include <private/qsgvideonode_p.h>
QT_BEGIN_NAMESPACE
class QAndroidSGVideoNodeMaterial;
class QAndroidSGVideoNode : public QSGVideoNode
{
public:
QAndroidSGVideoNode(const QVideoSurfaceFormat &format);
void setCurrentFrame(const QVideoFrame &frame);
QVideoFrame::PixelFormat pixelFormat() const;
private:
QVideoSurfaceFormat m_format;
QAndroidSGVideoNodeMaterial *m_material;
QVideoFrame m_frame;
};
QT_END_NAMESPACE
#endif // QANDROIDSGVIDEONODE_H

View File

@@ -0,0 +1,69 @@
/****************************************************************************
**
** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
** Contact: http://www.qt-project.org/legal
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and Digia. For licensing terms and
** conditions see http://qt.digia.com/licensing. For further information
** use the contact form at http://qt.digia.com/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 as published by the Free Software
** Foundation and appearing in the file LICENSE.LGPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU Lesser General Public License version 2.1 requirements
** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Digia gives you certain additional
** rights. These rights are described in the Digia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** GNU General Public License Usage
** Alternatively, this file may be used under the terms of the GNU
** General Public License version 3.0 as published by the Free Software
** Foundation and appearing in the file LICENSE.GPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU General Public License version 3.0 requirements will be
** met: http://www.gnu.org/copyleft/gpl.html.
**
**
** $QT_END_LICENSE$
**
****************************************************************************/
#include "qandroidsgvideonodeplugin.h"
#include "qandroidsgvideonode.h"
QT_BEGIN_NAMESPACE
#define ExternalGLTextureHandle (QAbstractVideoBuffer::UserHandle + 1)
QList<QVideoFrame::PixelFormat> QAndroidSGVideoNodeFactoryPlugin::supportedPixelFormats(
QAbstractVideoBuffer::HandleType handleType) const
{
QList<QVideoFrame::PixelFormat> pixelFormats;
if (handleType == ExternalGLTextureHandle)
pixelFormats.append(QVideoFrame::Format_BGR32);
return pixelFormats;
}
QSGVideoNode *QAndroidSGVideoNodeFactoryPlugin::createNode(const QVideoSurfaceFormat &format)
{
if (supportedPixelFormats(format.handleType()).contains(format.pixelFormat()))
return new QAndroidSGVideoNode(format);
return 0;
}
QT_END_NAMESPACE

View File

@@ -0,0 +1,62 @@
/****************************************************************************
**
** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
** Contact: http://www.qt-project.org/legal
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and Digia. For licensing terms and
** conditions see http://qt.digia.com/licensing. For further information
** use the contact form at http://qt.digia.com/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 as published by the Free Software
** Foundation and appearing in the file LICENSE.LGPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU Lesser General Public License version 2.1 requirements
** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Digia gives you certain additional
** rights. These rights are described in the Digia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** GNU General Public License Usage
** Alternatively, this file may be used under the terms of the GNU
** General Public License version 3.0 as published by the Free Software
** Foundation and appearing in the file LICENSE.GPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU General Public License version 3.0 requirements will be
** met: http://www.gnu.org/copyleft/gpl.html.
**
**
** $QT_END_LICENSE$
**
****************************************************************************/
#ifndef QANDROIDSGVIDEONODEPLUGIN_H
#define QANDROIDSGVIDEONODEPLUGIN_H
#include <private/qsgvideonode_p.h>
QT_BEGIN_NAMESPACE
class QAndroidSGVideoNodeFactoryPlugin : public QSGVideoNodeFactoryPlugin
{
Q_OBJECT
Q_PLUGIN_METADATA(IID QSGVideoNodeFactoryInterface_iid
FILE "android_videonode.json")
public:
QList<QVideoFrame::PixelFormat> supportedPixelFormats(QAbstractVideoBuffer::HandleType handleType) const;
QSGVideoNode *createNode(const QVideoSurfaceFormat &format);
};
QT_END_NAMESPACE
#endif // QANDROIDSGVIDEONODEPLUGIN_H

View File

@@ -0,0 +1,16 @@
TARGET = qtsgvideonode_android
QT += quick multimedia-private qtmultimediaquicktools-private
PLUGIN_TYPE = video/videonode
PLUGIN_CLASS_NAME = QAndroidSGVideoNodeFactoryPlugin
load(qt_plugin)
HEADERS += \
qandroidsgvideonodeplugin.h \
qandroidsgvideonode.h
SOURCES += \
qandroidsgvideonodeplugin.cpp \
qandroidsgvideonode.cpp
OTHER_FILES += android_videonode.json

View File

@@ -41,6 +41,8 @@
#include "audiocaptureprobecontrol.h" #include "audiocaptureprobecontrol.h"
QT_BEGIN_NAMESPACE
AudioCaptureProbeControl::AudioCaptureProbeControl(QObject *parent): AudioCaptureProbeControl::AudioCaptureProbeControl(QObject *parent):
QMediaAudioProbeControl(parent) QMediaAudioProbeControl(parent)
{ {
@@ -58,3 +60,5 @@ void AudioCaptureProbeControl::bufferProbed(const char *data, quint32 size, cons
QAudioBuffer audioBuffer = QAudioBuffer(QByteArray::fromRawData(data, size), format); QAudioBuffer audioBuffer = QAudioBuffer(QByteArray::fromRawData(data, size), format);
QMetaObject::invokeMethod(this, "audioBufferProbed", Qt::QueuedConnection, Q_ARG(QAudioBuffer, audioBuffer)); QMetaObject::invokeMethod(this, "audioBufferProbed", Qt::QueuedConnection, Q_ARG(QAudioBuffer, audioBuffer));
} }
QT_END_NAMESPACE

View File

@@ -46,7 +46,7 @@
#include <QtCore/qmutex.h> #include <QtCore/qmutex.h>
#include <qaudiobuffer.h> #include <qaudiobuffer.h>
QT_USE_NAMESPACE QT_BEGIN_NAMESPACE
class AudioCaptureProbeControl : public QMediaAudioProbeControl class AudioCaptureProbeControl : public QMediaAudioProbeControl
{ {
@@ -58,4 +58,6 @@ public:
void bufferProbed(const char *data, quint32 size, const QAudioFormat& format); void bufferProbed(const char *data, quint32 size, const QAudioFormat& format);
}; };
QT_END_NAMESPACE
#endif #endif

View File

@@ -47,6 +47,8 @@
#include "audiomediarecordercontrol.h" #include "audiomediarecordercontrol.h"
#include "audiocaptureprobecontrol.h" #include "audiocaptureprobecontrol.h"
QT_BEGIN_NAMESPACE
AudioCaptureService::AudioCaptureService(QObject *parent): AudioCaptureService::AudioCaptureService(QObject *parent):
QMediaService(parent) QMediaService(parent)
{ {
@@ -94,4 +96,4 @@ void AudioCaptureService::releaseControl(QMediaControl *control)
Q_UNUSED(control) Q_UNUSED(control)
} }
QT_END_NAMESPACE

View File

@@ -46,14 +46,14 @@
#include "qmediaservice.h" #include "qmediaservice.h"
QT_BEGIN_NAMESPACE
class AudioCaptureSession; class AudioCaptureSession;
class AudioEncoderControl; class AudioEncoderControl;
class AudioContainerControl; class AudioContainerControl;
class AudioMediaRecorderControl; class AudioMediaRecorderControl;
class AudioInputSelector; class AudioInputSelector;
QT_USE_NAMESPACE
class AudioCaptureService : public QMediaService class AudioCaptureService : public QMediaService
{ {
Q_OBJECT Q_OBJECT
@@ -71,4 +71,6 @@ private:
AudioMediaRecorderControl *m_mediaControl; AudioMediaRecorderControl *m_mediaControl;
}; };
QT_END_NAMESPACE
#endif #endif

View File

@@ -46,6 +46,7 @@
#include "qmediaserviceproviderplugin.h" #include "qmediaserviceproviderplugin.h"
QT_BEGIN_NAMESPACE
QMediaService* AudioCaptureServicePlugin::create(QString const& key) QMediaService* AudioCaptureServicePlugin::create(QString const& key)
{ {
@@ -60,3 +61,4 @@ void AudioCaptureServicePlugin::release(QMediaService *service)
delete service; delete service;
} }
QT_END_NAMESPACE

View File

@@ -45,7 +45,7 @@
#include "qmediaserviceproviderplugin.h" #include "qmediaserviceproviderplugin.h"
QT_USE_NAMESPACE QT_BEGIN_NAMESPACE
class AudioCaptureServicePlugin : public QMediaServiceProviderPlugin class AudioCaptureServicePlugin : public QMediaServiceProviderPlugin
{ {
@@ -58,4 +58,6 @@ public:
void release(QMediaService *service); void release(QMediaService *service);
}; };
QT_END_NAMESPACE
#endif // AUDIOCAPTURESERVICEPLUGIN_H #endif // AUDIOCAPTURESERVICEPLUGIN_H

View File

@@ -49,6 +49,8 @@
#include "audiocapturesession.h" #include "audiocapturesession.h"
#include "audiocaptureprobecontrol.h" #include "audiocaptureprobecontrol.h"
QT_BEGIN_NAMESPACE
void FileProbeProxy::startProbes(const QAudioFormat &format) void FileProbeProxy::startProbes(const QAudioFormat &format)
{ {
m_format = format; m_format = format;
@@ -87,33 +89,20 @@ qint64 FileProbeProxy::writeData(const char *data, qint64 len)
return QFile::writeData(data, len); return QFile::writeData(data, len);
} }
AudioCaptureSession::AudioCaptureSession(QObject *parent): AudioCaptureSession::AudioCaptureSession(QObject *parent)
QObject(parent) : QObject(parent)
, m_state(QMediaRecorder::StoppedState)
, m_status(QMediaRecorder::UnloadedStatus)
, m_audioInput(0)
, m_deviceInfo(QAudioDeviceInfo::defaultInputDevice())
, m_wavFile(true)
{ {
m_deviceInfo = new QAudioDeviceInfo(QAudioDeviceInfo::defaultInputDevice()); m_format = m_deviceInfo.preferredFormat();
m_audioInput = 0;
m_position = 0;
m_state = QMediaRecorder::StoppedState;
m_format.setSampleRate(8000);
m_format.setChannelCount(1);
m_format.setSampleSize(8);
m_format.setSampleType(QAudioFormat::UnSignedInt);
m_format.setCodec("audio/pcm");
wavFile = true;
} }
AudioCaptureSession::~AudioCaptureSession() AudioCaptureSession::~AudioCaptureSession()
{ {
stop(); setState(QMediaRecorder::StoppedState);
if(m_audioInput)
delete m_audioInput;
}
QAudioDeviceInfo* AudioCaptureSession::deviceInfo() const
{
return m_deviceInfo;
} }
QAudioFormat AudioCaptureSession::format() const QAudioFormat AudioCaptureSession::format() const
@@ -121,118 +110,96 @@ QAudioFormat AudioCaptureSession::format() const
return m_format; return m_format;
} }
bool AudioCaptureSession::isFormatSupported(const QAudioFormat &format) const void AudioCaptureSession::setFormat(const QAudioFormat &format)
{ {
if(m_deviceInfo) { m_format = format;
if(format.codec().contains(QLatin1String("audio/x-wav"))) {
QAudioFormat fmt = format;
fmt.setCodec("audio/pcm");
return m_deviceInfo->isFormatSupported(fmt);
} else
return m_deviceInfo->isFormatSupported(format);
}
return false;
}
bool AudioCaptureSession::setFormat(const QAudioFormat &format)
{
if(m_deviceInfo) {
QAudioFormat fmt = format;
if(m_deviceInfo->isFormatSupported(fmt)) {
m_format = fmt;
if(m_audioInput) delete m_audioInput;
m_audioInput = 0;
QList<QAudioDeviceInfo> devices = QAudioDeviceInfo::availableDevices(QAudio::AudioInput);
for(int i=0;i<devices.size();i++) {
if(qstrcmp(m_deviceInfo->deviceName().toLocal8Bit().constData(),
devices.at(i).deviceName().toLocal8Bit().constData()) == 0) {
m_audioInput = new QAudioInput(devices.at(i),m_format);
connect(m_audioInput,SIGNAL(stateChanged(QAudio::State)),this,SLOT(stateChanged(QAudio::State)));
connect(m_audioInput,SIGNAL(notify()),this,SLOT(notify()));
break;
}
}
} else {
m_format = m_deviceInfo->preferredFormat();
qWarning()<<"failed to setFormat using preferred...";
}
}
return false;
}
QStringList AudioCaptureSession::supportedContainers() const
{
QStringList list;
if(m_deviceInfo) {
if (m_deviceInfo->supportedCodecs().size() > 0) {
list << "audio/x-wav";
list << "audio/pcm";
}
}
return list;
}
QString AudioCaptureSession::containerDescription(const QString &formatMimeType) const
{
if(m_deviceInfo) {
if (formatMimeType.contains(QLatin1String("audio/pcm")))
return tr("RAW file format");
if (formatMimeType.contains(QLatin1String("audio/x-wav")))
return tr("WAV file format");
}
return QString();
} }
void AudioCaptureSession::setContainerFormat(const QString &formatMimeType) void AudioCaptureSession::setContainerFormat(const QString &formatMimeType)
{ {
if (!formatMimeType.contains(QLatin1String("audio/x-wav")) && m_wavFile = (formatMimeType.isEmpty()
!formatMimeType.contains(QLatin1String("audio/pcm")) && || QString::compare(formatMimeType, QLatin1String("audio/x-wav")) == 0);
!formatMimeType.isEmpty())
return;
if(m_deviceInfo) {
if (!m_deviceInfo->supportedCodecs().contains(QLatin1String("audio/pcm")))
return;
if (formatMimeType.isEmpty() || formatMimeType.contains(QLatin1String("audio/x-wav"))) {
wavFile = true;
m_format.setCodec("audio/pcm");
} else {
wavFile = false;
m_format.setCodec(formatMimeType);
}
}
} }
QString AudioCaptureSession::containerFormat() const QString AudioCaptureSession::containerFormat() const
{ {
if(wavFile) if (m_wavFile)
return QString("audio/x-wav"); return QStringLiteral("audio/x-wav");
return QString("audio/pcm"); return QStringLiteral("audio/x-raw");
} }
QUrl AudioCaptureSession::outputLocation() const QUrl AudioCaptureSession::outputLocation() const
{ {
return m_actualSink; return m_actualOutputLocation;
} }
bool AudioCaptureSession::setOutputLocation(const QUrl& sink) bool AudioCaptureSession::setOutputLocation(const QUrl& location)
{ {
m_sink = m_actualSink = sink; if (m_requestedOutputLocation == location)
return false;
m_actualOutputLocation = QUrl();
m_requestedOutputLocation = location;
if (m_requestedOutputLocation.isEmpty())
return true; return true;
if (m_requestedOutputLocation.isValid() && (m_requestedOutputLocation.isLocalFile()
|| m_requestedOutputLocation.isRelative())) {
emit actualLocationChanged(m_requestedOutputLocation);
return true;
}
m_requestedOutputLocation = QUrl();
return false;
} }
qint64 AudioCaptureSession::position() const qint64 AudioCaptureSession::position() const
{ {
return m_position; if (m_audioInput)
return m_audioInput->processedUSecs() / 1000;
return 0;
} }
int AudioCaptureSession::state() const void AudioCaptureSession::setState(QMediaRecorder::State state)
{ {
return int(m_state); if (m_state == state)
return;
m_state = state;
emit stateChanged(m_state);
switch (m_state) {
case QMediaRecorder::StoppedState:
stop();
break;
case QMediaRecorder::PausedState:
pause();
break;
case QMediaRecorder::RecordingState:
record();
break;
}
}
QMediaRecorder::State AudioCaptureSession::state() const
{
return m_state;
}
void AudioCaptureSession::setStatus(QMediaRecorder::Status status)
{
if (m_status == status)
return;
m_status = status;
emit statusChanged(m_status);
}
QMediaRecorder::Status AudioCaptureSession::status() const
{
return m_status;
} }
QDir AudioCaptureSession::defaultDir() const QDir AudioCaptureSession::defaultDir() const
@@ -258,9 +225,29 @@ QDir AudioCaptureSession::defaultDir() const
return QDir(); return QDir();
} }
QString AudioCaptureSession::generateFileName(const QDir &dir, const QString &ext) const QString AudioCaptureSession::generateFileName(const QString &requestedName,
const QString &extension) const
{ {
if (requestedName.isEmpty())
return generateFileName(defaultDir(), extension);
QString path = requestedName;
if (QFileInfo(path).isRelative())
path = defaultDir().absoluteFilePath(path);
if (QFileInfo(path).isDir())
return generateFileName(QDir(path), extension);
if (!path.endsWith(extension))
path.append(QString(".%1").arg(extension));
return path;
}
QString AudioCaptureSession::generateFileName(const QDir &dir,
const QString &ext) const
{
int lastClip = 0; int lastClip = 0;
foreach(QString fileName, dir.entryList(QStringList() << QString("clip_*.%1").arg(ext))) { foreach(QString fileName, dir.entryList(QStringList() << QString("clip_*.%1").arg(ext))) {
int imgNumber = fileName.mid(5, fileName.size()-6-ext.length()).toInt(); int imgNumber = fileName.mid(5, fileName.size()-6-ext.length()).toInt();
@@ -277,25 +264,45 @@ QString AudioCaptureSession::generateFileName(const QDir &dir, const QString &ex
void AudioCaptureSession::record() void AudioCaptureSession::record()
{ {
if(!m_audioInput) { if (m_status == QMediaRecorder::PausedStatus) {
setFormat(m_format); m_audioInput->resume();
} else {
if (m_deviceInfo.isNull()) {
emit error(QMediaRecorder::ResourceError,
QStringLiteral("No input device available."));
m_state = QMediaRecorder::StoppedState;
emit stateChanged(m_state);
setStatus(QMediaRecorder::UnavailableStatus);
return;
} }
m_actualSink = m_sink; setStatus(QMediaRecorder::LoadingStatus);
if (m_actualSink.isEmpty()) { m_format = m_deviceInfo.nearestFormat(m_format);
QString ext = wavFile ? QLatin1String("wav") : QLatin1String("raw"); m_audioInput = new QAudioInput(m_deviceInfo, m_format);
m_actualSink = generateFileName(defaultDir(), ext); connect(m_audioInput, SIGNAL(stateChanged(QAudio::State)),
} this, SLOT(audioInputStateChanged(QAudio::State)));
connect(m_audioInput, SIGNAL(notify()),
this, SLOT(notify()));
if(m_actualSink.toLocalFile().length() > 0)
file.setFileName(m_actualSink.toLocalFile());
else
file.setFileName(m_actualSink.toString());
if(m_audioInput) { QString filePath = generateFileName(
if(m_state == QMediaRecorder::StoppedState) { m_requestedOutputLocation.isLocalFile() ? m_requestedOutputLocation.toLocalFile()
if(file.open(QIODevice::WriteOnly)) { : m_requestedOutputLocation.toString(),
m_wavFile ? QLatin1String("wav")
: QLatin1String("raw"));
m_actualOutputLocation = QUrl::fromLocalFile(filePath);
if (m_actualOutputLocation != m_requestedOutputLocation)
emit actualLocationChanged(m_actualOutputLocation);
file.setFileName(filePath);
setStatus(QMediaRecorder::LoadedStatus);
setStatus(QMediaRecorder::StartingStatus);
if (file.open(QIODevice::WriteOnly)) {
if (m_wavFile) {
memset(&header,0,sizeof(CombinedHeader)); memset(&header,0,sizeof(CombinedHeader));
memcpy(header.riff.descriptor.id,"RIFF",4); memcpy(header.riff.descriptor.id,"RIFF",4);
header.riff.descriptor.size = 0xFFFFFFFF; // This should be updated on stop(), filesize-8 header.riff.descriptor.size = 0xFFFFFFFF; // This should be updated on stop(), filesize-8
@@ -310,28 +317,26 @@ void AudioCaptureSession::record()
header.wave.bitsPerSample = m_format.sampleSize(); header.wave.bitsPerSample = m_format.sampleSize();
memcpy(header.data.descriptor.id,"data",4); memcpy(header.data.descriptor.id,"data",4);
header.data.descriptor.size = 0xFFFFFFFF; // This should be updated on stop(),samples*channels*sampleSize/8 header.data.descriptor.size = 0xFFFFFFFF; // This should be updated on stop(),samples*channels*sampleSize/8
if (wavFile)
file.write((char*)&header,sizeof(CombinedHeader)); file.write((char*)&header,sizeof(CombinedHeader));
}
file.startProbes(m_format); file.startProbes(m_format);
m_audioInput->start(qobject_cast<QIODevice*>(&file)); m_audioInput->start(qobject_cast<QIODevice*>(&file));
} else { } else {
emit error(1,QString("can't open source, failed")); delete m_audioInput;
m_audioInput = 0;
emit error(QMediaRecorder::ResourceError,
QStringLiteral("Can't open output location"));
m_state = QMediaRecorder::StoppedState; m_state = QMediaRecorder::StoppedState;
emit stateChanged(m_state); emit stateChanged(m_state);
setStatus(QMediaRecorder::UnloadedStatus);
} }
} }
}
m_state = QMediaRecorder::RecordingState;
} }
void AudioCaptureSession::pause() void AudioCaptureSession::pause()
{ {
if(m_audioInput) m_audioInput->suspend();
m_audioInput->stop();
m_state = QMediaRecorder::PausedState;
} }
void AudioCaptureSession::stop() void AudioCaptureSession::stop()
@@ -340,7 +345,7 @@ void AudioCaptureSession::stop()
m_audioInput->stop(); m_audioInput->stop();
file.stopProbes(); file.stopProbes();
file.close(); file.close();
if (wavFile) { if (m_wavFile) {
qint32 fileSize = file.size()-8; qint32 fileSize = file.size()-8;
file.open(QIODevice::ReadWrite | QIODevice::Unbuffered); file.open(QIODevice::ReadWrite | QIODevice::Unbuffered);
file.read((char*)&header,sizeof(CombinedHeader)); file.read((char*)&header,sizeof(CombinedHeader));
@@ -350,9 +355,10 @@ void AudioCaptureSession::stop()
file.write((char*)&header,sizeof(CombinedHeader)); file.write((char*)&header,sizeof(CombinedHeader));
file.close(); file.close();
} }
m_position = 0; delete m_audioInput;
m_audioInput = 0;
setStatus(QMediaRecorder::UnloadedStatus);
} }
m_state = QMediaRecorder::StoppedState;
} }
void AudioCaptureSession::addProbe(AudioCaptureProbeControl *probe) void AudioCaptureSession::addProbe(AudioCaptureProbeControl *probe)
@@ -365,45 +371,41 @@ void AudioCaptureSession::removeProbe(AudioCaptureProbeControl *probe)
file.removeProbe(probe); file.removeProbe(probe);
} }
void AudioCaptureSession::stateChanged(QAudio::State state) void AudioCaptureSession::audioInputStateChanged(QAudio::State state)
{ {
switch(state) { switch(state) {
case QAudio::ActiveState: case QAudio::ActiveState:
emit stateChanged(QMediaRecorder::RecordingState); setStatus(QMediaRecorder::RecordingStatus);
break;
case QAudio::SuspendedState:
setStatus(QMediaRecorder::PausedStatus);
break;
case QAudio::StoppedState:
setStatus(QMediaRecorder::FinalizingStatus);
break; break;
default: default:
if(!((m_state == QMediaRecorder::PausedState)||(m_state == QMediaRecorder::StoppedState)))
m_state = QMediaRecorder::StoppedState;
emit stateChanged(m_state);
break; break;
} }
} }
void AudioCaptureSession::notify() void AudioCaptureSession::notify()
{ {
m_position += m_audioInput->notifyInterval(); emit positionChanged(position());
emit positionChanged(m_position);
} }
void AudioCaptureSession::setCaptureDevice(const QString &deviceName) void AudioCaptureSession::setCaptureDevice(const QString &deviceName)
{ {
m_captureDevice = deviceName; m_captureDevice = deviceName;
if(m_deviceInfo)
delete m_deviceInfo;
m_deviceInfo = 0;
QList<QAudioDeviceInfo> devices = QAudioDeviceInfo::availableDevices(QAudio::AudioInput); QList<QAudioDeviceInfo> devices = QAudioDeviceInfo::availableDevices(QAudio::AudioInput);
for(int i = 0; i < devices.size(); i++) { for (int i = 0; i < devices.size(); ++i) {
if(qstrcmp(m_captureDevice.toLocal8Bit().constData(), QAudioDeviceInfo info = devices.at(i);
devices.at(i).deviceName().toLocal8Bit().constData())==0){ if (m_captureDevice == info.deviceName()){
m_deviceInfo = new QAudioDeviceInfo(devices.at(i)); m_deviceInfo = info;
return; return;
} }
} }
m_deviceInfo = new QAudioDeviceInfo(QAudioDeviceInfo::defaultInputDevice()); m_deviceInfo = QAudioDeviceInfo::defaultInputDevice();
} }
QT_END_NAMESPACE

View File

@@ -55,7 +55,7 @@
#include <qaudioinput.h> #include <qaudioinput.h>
#include <qaudiodeviceinfo.h> #include <qaudiodeviceinfo.h>
QT_USE_NAMESPACE QT_BEGIN_NAMESPACE
class AudioCaptureProbeControl; class AudioCaptureProbeControl;
@@ -85,50 +85,58 @@ public:
~AudioCaptureSession(); ~AudioCaptureSession();
QAudioFormat format() const; QAudioFormat format() const;
QAudioDeviceInfo* deviceInfo() const; void setFormat(const QAudioFormat &format);
bool isFormatSupported(const QAudioFormat &format) const;
bool setFormat(const QAudioFormat &format);
QStringList supportedContainers() const;
QString containerFormat() const; QString containerFormat() const;
void setContainerFormat(const QString &formatMimeType); void setContainerFormat(const QString &formatMimeType);
QString containerDescription(const QString &formatMimeType) const;
QUrl outputLocation() const; QUrl outputLocation() const;
bool setOutputLocation(const QUrl& sink); bool setOutputLocation(const QUrl& location);
qint64 position() const; qint64 position() const;
int state() const;
void record(); void setState(QMediaRecorder::State state);
void pause(); QMediaRecorder::State state() const;
void stop(); QMediaRecorder::Status status() const;
void addProbe(AudioCaptureProbeControl *probe); void addProbe(AudioCaptureProbeControl *probe);
void removeProbe(AudioCaptureProbeControl *probe); void removeProbe(AudioCaptureProbeControl *probe);
public slots:
void setCaptureDevice(const QString &deviceName); void setCaptureDevice(const QString &deviceName);
signals: signals:
void stateChanged(QMediaRecorder::State state); void stateChanged(QMediaRecorder::State state);
void statusChanged(QMediaRecorder::Status status);
void positionChanged(qint64 position); void positionChanged(qint64 position);
void actualLocationChanged(const QUrl &location);
void error(int error, const QString &errorString); void error(int error, const QString &errorString);
private slots: private slots:
void stateChanged(QAudio::State state); void audioInputStateChanged(QAudio::State state);
void notify(); void notify();
private: private:
void record();
void pause();
void stop();
void setStatus(QMediaRecorder::Status status);
QDir defaultDir() const; QDir defaultDir() const;
QString generateFileName(const QDir &dir, const QString &ext) const; QString generateFileName(const QString &requestedName,
const QString &extension) const;
QString generateFileName(const QDir &dir, const QString &extension) const;
FileProbeProxy file; FileProbeProxy file;
QString m_captureDevice; QString m_captureDevice;
QUrl m_sink; QUrl m_requestedOutputLocation;
QUrl m_actualSink; QUrl m_actualOutputLocation;
QMediaRecorder::State m_state; QMediaRecorder::State m_state;
QMediaRecorder::Status m_status;
QAudioInput *m_audioInput; QAudioInput *m_audioInput;
QAudioDeviceInfo *m_deviceInfo; QAudioDeviceInfo m_deviceInfo;
QAudioFormat m_format; QAudioFormat m_format;
qint64 m_position; bool m_wavFile;
bool wavFile;
// WAV header stuff // WAV header stuff
@@ -171,4 +179,6 @@ private:
CombinedHeader header; CombinedHeader header;
}; };
QT_END_NAMESPACE
#endif #endif

View File

@@ -42,6 +42,8 @@
#include "audiocontainercontrol.h" #include "audiocontainercontrol.h"
#include "audiocapturesession.h" #include "audiocapturesession.h"
QT_BEGIN_NAMESPACE
AudioContainerControl::AudioContainerControl(QObject *parent) AudioContainerControl::AudioContainerControl(QObject *parent)
:QMediaContainerControl(parent) :QMediaContainerControl(parent)
{ {
@@ -54,7 +56,8 @@ AudioContainerControl::~AudioContainerControl()
QStringList AudioContainerControl::supportedContainers() const QStringList AudioContainerControl::supportedContainers() const
{ {
return m_session->supportedContainers(); return QStringList() << QStringLiteral("audio/x-wav")
<< QStringLiteral("audio/x-raw");
} }
QString AudioContainerControl::containerFormat() const QString AudioContainerControl::containerFormat() const
@@ -64,11 +67,18 @@ QString AudioContainerControl::containerFormat() const
void AudioContainerControl::setContainerFormat(const QString &formatMimeType) void AudioContainerControl::setContainerFormat(const QString &formatMimeType)
{ {
if (formatMimeType.isEmpty() || supportedContainers().contains(formatMimeType))
m_session->setContainerFormat(formatMimeType); m_session->setContainerFormat(formatMimeType);
} }
QString AudioContainerControl::containerDescription(const QString &formatMimeType) const QString AudioContainerControl::containerDescription(const QString &formatMimeType) const
{ {
return m_session->containerDescription(formatMimeType); if (QString::compare(formatMimeType, QLatin1String("audio/x-raw")) == 0)
return tr("RAW (headerless) file format");
if (QString::compare(formatMimeType, QLatin1String("audio/x-wav")) == 0)
return tr("WAV file format");
return QString();
} }
QT_END_NAMESPACE

View File

@@ -47,9 +47,9 @@
#include <QtCore/qstringlist.h> #include <QtCore/qstringlist.h>
#include <QtCore/qmap.h> #include <QtCore/qmap.h>
class AudioCaptureSession; QT_BEGIN_NAMESPACE
QT_USE_NAMESPACE class AudioCaptureSession;
class AudioContainerControl : public QMediaContainerControl class AudioContainerControl : public QMediaContainerControl
{ {
@@ -67,4 +67,6 @@ private:
AudioCaptureSession* m_session; AudioCaptureSession* m_session;
}; };
QT_END_NAMESPACE
#endif #endif

View File

@@ -46,87 +46,12 @@
#include <QtCore/qdebug.h> #include <QtCore/qdebug.h>
AudioEncoderControl::AudioEncoderControl(QObject *parent) QT_BEGIN_NAMESPACE
:QAudioEncoderSettingsControl(parent)
static QAudioFormat audioSettingsToAudioFormat(const QAudioEncoderSettings &settings)
{ {
m_session = qobject_cast<AudioCaptureSession*>(parent); QAudioFormat fmt;
fmt.setCodec(settings.codec());
QT_PREPEND_NAMESPACE(QAudioFormat) fmt;
fmt.setSampleSize(8);
fmt.setChannelCount(1);
fmt.setSampleRate(8000);
fmt.setSampleType(QT_PREPEND_NAMESPACE(QAudioFormat)::SignedInt);
fmt.setCodec("audio/pcm");
fmt.setByteOrder(QAudioFormat::LittleEndian);
m_session->setFormat(fmt);
m_settings.setEncodingMode(QMultimedia::ConstantQualityEncoding);
m_settings.setCodec("audio/pcm");
m_settings.setBitRate(8000);
m_settings.setChannelCount(1);
m_settings.setSampleRate(8000);
m_settings.setQuality(QMultimedia::LowQuality);
}
AudioEncoderControl::~AudioEncoderControl()
{
}
QStringList AudioEncoderControl::supportedAudioCodecs() const
{
QStringList list;
if (m_session->supportedContainers().size() > 0)
list.append("audio/pcm");
return list;
}
QString AudioEncoderControl::codecDescription(const QString &codecName) const
{
if (codecName.contains(QLatin1String("audio/pcm")))
return tr("PCM audio data");
return QString();
}
QList<int> AudioEncoderControl::supportedSampleRates(const QAudioEncoderSettings &, bool *continuous) const
{
if (continuous)
*continuous = false;
return m_session->deviceInfo()->supportedSampleRates();
}
QAudioEncoderSettings AudioEncoderControl::audioSettings() const
{
return m_settings;
}
void AudioEncoderControl::setAudioSettings(const QAudioEncoderSettings &settings)
{
QAudioFormat fmt = m_session->format();
if (settings.encodingMode() == QMultimedia::ConstantQualityEncoding) {
if (settings.quality() == QMultimedia::LowQuality) {
fmt.setSampleSize(8);
fmt.setChannelCount(1);
fmt.setSampleRate(8000);
fmt.setSampleType(QAudioFormat::UnSignedInt);
} else if (settings.quality() == QMultimedia::NormalQuality) {
fmt.setSampleSize(16);
fmt.setChannelCount(1);
fmt.setSampleRate(22050);
fmt.setSampleType(QAudioFormat::SignedInt);
} else {
fmt.setSampleSize(16);
fmt.setChannelCount(1);
fmt.setSampleRate(44100);
fmt.setSampleType(QAudioFormat::SignedInt);
}
} else {
fmt.setChannelCount(settings.channelCount()); fmt.setChannelCount(settings.channelCount());
fmt.setSampleRate(settings.sampleRate()); fmt.setSampleRate(settings.sampleRate());
if (settings.sampleRate() == 8000 && settings.bitRate() == 8000) { if (settings.sampleRate() == 8000 && settings.bitRate() == 8000) {
@@ -136,9 +61,115 @@ void AudioEncoderControl::setAudioSettings(const QAudioEncoderSettings &settings
fmt.setSampleSize(16); fmt.setSampleSize(16);
fmt.setSampleType(QAudioFormat::SignedInt); fmt.setSampleType(QAudioFormat::SignedInt);
} }
} fmt.setByteOrder(QAudioDeviceInfo::defaultInputDevice().preferredFormat().byteOrder());
return fmt;
}
static QAudioEncoderSettings audioFormatToAudioSettings(const QAudioFormat &format)
{
QAudioEncoderSettings settings;
settings.setCodec(format.codec());
settings.setChannelCount(format.channelCount());
settings.setSampleRate(format.sampleRate());
settings.setEncodingMode(QMultimedia::ConstantBitRateEncoding);
settings.setBitRate(format.channelCount()
* format.sampleSize()
* format.sampleRate());
return settings;
}
AudioEncoderControl::AudioEncoderControl(QObject *parent)
:QAudioEncoderSettingsControl(parent)
{
m_session = qobject_cast<AudioCaptureSession*>(parent);
update();
}
AudioEncoderControl::~AudioEncoderControl()
{
}
QStringList AudioEncoderControl::supportedAudioCodecs() const
{
return QStringList() << QStringLiteral("audio/pcm");
}
QString AudioEncoderControl::codecDescription(const QString &codecName) const
{
if (QString::compare(codecName, QLatin1String("audio/pcm")) == 0)
return tr("Linear PCM audio data");
return QString();
}
QList<int> AudioEncoderControl::supportedSampleRates(const QAudioEncoderSettings &settings, bool *continuous) const
{
if (continuous)
*continuous = false;
if (settings.codec().isEmpty() || settings.codec() == QLatin1String("audio/pcm"))
return m_sampleRates;
return QList<int>();
}
QAudioEncoderSettings AudioEncoderControl::audioSettings() const
{
return audioFormatToAudioSettings(m_session->format());
}
void AudioEncoderControl::setAudioSettings(const QAudioEncoderSettings &settings)
{
QAudioFormat fmt = audioSettingsToAudioFormat(settings);
if (settings.encodingMode() == QMultimedia::ConstantQualityEncoding) {
fmt.setCodec("audio/pcm"); fmt.setCodec("audio/pcm");
switch (settings.quality()) {
case QMultimedia::VeryLowQuality:
fmt.setSampleSize(8);
fmt.setSampleRate(8000);
fmt.setSampleType(QAudioFormat::UnSignedInt);
break;
case QMultimedia::LowQuality:
fmt.setSampleSize(8);
fmt.setSampleRate(22050);
fmt.setSampleType(QAudioFormat::UnSignedInt);
break;
case QMultimedia::HighQuality:
fmt.setSampleSize(16);
fmt.setSampleRate(48000);
fmt.setSampleType(QAudioFormat::SignedInt);
break;
case QMultimedia::VeryHighQuality:
fmt.setSampleSize(16);
fmt.setSampleRate(96000);
fmt.setSampleType(QAudioFormat::SignedInt);
break;
case QMultimedia::NormalQuality:
default:
fmt.setSampleSize(16);
fmt.setSampleRate(44100);
fmt.setSampleType(QAudioFormat::SignedInt);
break;
}
}
m_session->setFormat(fmt); m_session->setFormat(fmt);
m_settings = settings;
} }
void AudioEncoderControl::update()
{
m_sampleRates.clear();
QList<QAudioDeviceInfo> devices = QAudioDeviceInfo::availableDevices(QAudio::AudioInput);
for (int i = 0; i < devices.size(); ++i) {
QList<int> rates = devices.at(i).supportedSampleRates();
for (int j = 0; j < rates.size(); ++j) {
int rate = rates.at(j);
if (!m_sampleRates.contains(rate))
m_sampleRates.append(rate);
}
}
qSort(m_sampleRates);
}
QT_END_NAMESPACE

View File

@@ -49,9 +49,9 @@
#include <qaudioformat.h> #include <qaudioformat.h>
class AudioCaptureSession; QT_BEGIN_NAMESPACE
QT_USE_NAMESPACE class AudioCaptureSession;
class AudioEncoderControl : public QAudioEncoderSettingsControl class AudioEncoderControl : public QAudioEncoderSettingsControl
{ {
@@ -68,8 +68,12 @@ public:
void setAudioSettings(const QAudioEncoderSettings&); void setAudioSettings(const QAudioEncoderSettings&);
private: private:
void update();
AudioCaptureSession* m_session; AudioCaptureSession* m_session;
QAudioEncoderSettings m_settings; QList<int> m_sampleRates;
}; };
QT_END_NAMESPACE
#endif #endif

View File

@@ -44,6 +44,7 @@
#include <qaudiodeviceinfo.h> #include <qaudiodeviceinfo.h>
QT_BEGIN_NAMESPACE
AudioInputSelector::AudioInputSelector(QObject *parent) AudioInputSelector::AudioInputSelector(QObject *parent)
:QAudioInputSelectorControl(parent) :QAudioInputSelectorControl(parent)
@@ -79,7 +80,7 @@ QString AudioInputSelector::inputDescription(const QString& name) const
QString AudioInputSelector::defaultInput() const QString AudioInputSelector::defaultInput() const
{ {
return QAudioDeviceInfo(QAudioDeviceInfo::defaultInputDevice()).deviceName(); return QAudioDeviceInfo::defaultInputDevice().deviceName();
} }
QString AudioInputSelector::activeInput() const QString AudioInputSelector::activeInput() const
@@ -108,3 +109,5 @@ void AudioInputSelector::update()
m_descriptions.append(devices.at(i).deviceName()); m_descriptions.append(devices.at(i).deviceName());
} }
} }
QT_END_NAMESPACE

View File

@@ -46,9 +46,9 @@
#include "qaudioinputselectorcontrol.h" #include "qaudioinputselectorcontrol.h"
class AudioCaptureSession; QT_BEGIN_NAMESPACE
QT_USE_NAMESPACE class AudioCaptureSession;
class AudioInputSelector : public QAudioInputSelectorControl class AudioInputSelector : public QAudioInputSelectorControl
{ {
@@ -74,4 +74,6 @@ private:
AudioCaptureSession* m_session; AudioCaptureSession* m_session;
}; };
QT_END_NAMESPACE
#endif // AUDIOINPUTSELECTOR_H #endif // AUDIOINPUTSELECTOR_H

View File

@@ -44,15 +44,22 @@
#include <QtCore/qdebug.h> #include <QtCore/qdebug.h>
QT_BEGIN_NAMESPACE
AudioMediaRecorderControl::AudioMediaRecorderControl(QObject *parent) AudioMediaRecorderControl::AudioMediaRecorderControl(QObject *parent)
:QMediaRecorderControl(parent) : QMediaRecorderControl(parent)
, m_state(QMediaRecorder::StoppedState)
, m_prevStatus(QMediaRecorder::UnloadedStatus)
{ {
m_session = qobject_cast<AudioCaptureSession*>(parent); m_session = qobject_cast<AudioCaptureSession*>(parent);
connect(m_session,SIGNAL(positionChanged(qint64)),this,SIGNAL(durationChanged(qint64))); connect(m_session, SIGNAL(positionChanged(qint64)),
connect(m_session,SIGNAL(stateChanged(QMediaRecorder::State)), this,SLOT(updateStatus())); this, SIGNAL(durationChanged(qint64)));
connect(m_session,SIGNAL(error(int,QString)),this,SLOT(handleSessionError(int,QString))); connect(m_session, SIGNAL(stateChanged(QMediaRecorder::State)),
this, SIGNAL(stateChanged(QMediaRecorder::State)));
connect(m_session, SIGNAL(statusChanged(QMediaRecorder::Status)),
this, SIGNAL(statusChanged(QMediaRecorder::Status)));
connect(m_session, SIGNAL(actualLocationChanged(QUrl)),
this, SIGNAL(actualLocationChanged(QUrl)));
connect(m_session, SIGNAL(error(int,QString)),
this, SIGNAL(error(int,QString)));
} }
AudioMediaRecorderControl::~AudioMediaRecorderControl() AudioMediaRecorderControl::~AudioMediaRecorderControl()
@@ -71,21 +78,12 @@ bool AudioMediaRecorderControl::setOutputLocation(const QUrl& sink)
QMediaRecorder::State AudioMediaRecorderControl::state() const QMediaRecorder::State AudioMediaRecorderControl::state() const
{ {
return (QMediaRecorder::State)m_session->state(); return m_session->state();
} }
QMediaRecorder::Status AudioMediaRecorderControl::status() const QMediaRecorder::Status AudioMediaRecorderControl::status() const
{ {
static QMediaRecorder::Status statusTable[3][3] = { return m_session->status();
//Stopped recorder state:
{ QMediaRecorder::LoadedStatus, QMediaRecorder::FinalizingStatus, QMediaRecorder::FinalizingStatus },
//Recording recorder state:
{ QMediaRecorder::StartingStatus, QMediaRecorder::RecordingStatus, QMediaRecorder::PausedStatus },
//Paused recorder state:
{ QMediaRecorder::StartingStatus, QMediaRecorder::RecordingStatus, QMediaRecorder::PausedStatus }
};
return statusTable[m_state][m_session->state()];
} }
qint64 AudioMediaRecorderControl::duration() const qint64 AudioMediaRecorderControl::duration() const
@@ -106,47 +104,19 @@ qreal AudioMediaRecorderControl::volume() const
void AudioMediaRecorderControl::setState(QMediaRecorder::State state) void AudioMediaRecorderControl::setState(QMediaRecorder::State state)
{ {
if (m_state == state) m_session->setState(state);
return;
m_state = state;
switch (state) {
case QMediaRecorder::StoppedState:
m_session->stop();
break;
case QMediaRecorder::PausedState:
m_session->pause();
break;
case QMediaRecorder::RecordingState:
m_session->record();
break;
}
updateStatus();
} }
void AudioMediaRecorderControl::setMuted(bool) void AudioMediaRecorderControl::setMuted(bool muted)
{ {
if (muted)
qWarning("Muting the audio recording is not supported.");
} }
void AudioMediaRecorderControl::setVolume(qreal volume) void AudioMediaRecorderControl::setVolume(qreal volume)
{ {
if (!qFuzzyCompare(volume, qreal(1.0))) if (!qFuzzyCompare(volume, qreal(1.0)))
qWarning() << "Media service doesn't support recorder audio gain."; qWarning("Changing the audio recording volume is not supported.");
} }
void AudioMediaRecorderControl::updateStatus() QT_END_NAMESPACE
{
QMediaRecorder::Status newStatus = status();
if (m_prevStatus != newStatus) {
m_prevStatus = newStatus;
emit statusChanged(m_prevStatus);
}
}
void AudioMediaRecorderControl::handleSessionError(int code, const QString &description)
{
emit error(code, description);
setState(QMediaRecorder::StoppedState);
}

View File

@@ -47,9 +47,9 @@
#include "qmediarecorder.h" #include "qmediarecorder.h"
#include "qmediarecordercontrol.h" #include "qmediarecordercontrol.h"
class AudioCaptureSession; QT_BEGIN_NAMESPACE
QT_USE_NAMESPACE class AudioCaptureSession;
class AudioMediaRecorderControl : public QMediaRecorderControl class AudioMediaRecorderControl : public QMediaRecorderControl
{ {
@@ -59,7 +59,7 @@ public:
~AudioMediaRecorderControl(); ~AudioMediaRecorderControl();
QUrl outputLocation() const; QUrl outputLocation() const;
bool setOutputLocation(const QUrl &sink); bool setOutputLocation(const QUrl &location);
QMediaRecorder::State state() const; QMediaRecorder::State state() const;
QMediaRecorder::Status status() const; QMediaRecorder::Status status() const;
@@ -71,19 +71,14 @@ public:
void applySettings() {} void applySettings() {}
public slots:
void setState(QMediaRecorder::State state); void setState(QMediaRecorder::State state);
void setMuted(bool); void setMuted(bool);
void setVolume(qreal volume); void setVolume(qreal volume);
private slots:
void updateStatus();
void handleSessionError(int code, const QString &description);
private: private:
AudioCaptureSession* m_session; AudioCaptureSession* m_session;
QMediaRecorder::State m_state;
QMediaRecorder::Status m_prevStatus;
}; };
QT_END_NAMESPACE
#endif #endif

View File

@@ -40,10 +40,8 @@
****************************************************************************/ ****************************************************************************/
#include "bbserviceplugin.h" #include "bbserviceplugin.h"
#ifndef Q_OS_BLACKBERRY_TABLET
#include "bbcameraservice.h" #include "bbcameraservice.h"
#include "bbvideodeviceselectorcontrol.h" #include "bbvideodeviceselectorcontrol.h"
#endif
#include "bbmediaplayerservice.h" #include "bbmediaplayerservice.h"
#include <QDebug> #include <QDebug>
@@ -56,10 +54,8 @@ BbServicePlugin::BbServicePlugin()
QMediaService *BbServicePlugin::create(const QString &key) QMediaService *BbServicePlugin::create(const QString &key)
{ {
#ifndef Q_OS_BLACKBERRY_TABLET
if (key == QLatin1String(Q_MEDIASERVICE_CAMERA)) if (key == QLatin1String(Q_MEDIASERVICE_CAMERA))
return new BbCameraService(); return new BbCameraService();
#endif
if (key == QLatin1String(Q_MEDIASERVICE_MEDIAPLAYER)) if (key == QLatin1String(Q_MEDIASERVICE_MEDIAPLAYER))
return new BbMediaPlayerService(); return new BbMediaPlayerService();
@@ -106,9 +102,7 @@ QString BbServicePlugin::deviceDescription(const QByteArray &service, const QByt
void BbServicePlugin::updateDevices() const void BbServicePlugin::updateDevices() const
{ {
#ifndef Q_OS_BLACKBERRY_TABLET
BbVideoDeviceSelectorControl::enumerateDevices(&m_cameraDevices, &m_cameraDescriptions); BbVideoDeviceSelectorControl::enumerateDevices(&m_cameraDevices, &m_cameraDescriptions);
#endif
if (m_cameraDevices.isEmpty()) { if (m_cameraDevices.isEmpty()) {
qWarning() << "No camera devices found"; qWarning() << "No camera devices found";

View File

@@ -12,9 +12,7 @@ SOURCES += bbserviceplugin.cpp
include(common/common.pri) include(common/common.pri)
!blackberry-playbook { include(camera/camera.pri)
include(camera/camera.pri)
}
include(mediaplayer/mediaplayer.pri) include(mediaplayer/mediaplayer.pri)

View File

@@ -139,6 +139,7 @@ QVariant BbCameraExposureControl::requestedValue(ExposureParameter parameter) co
QVariant BbCameraExposureControl::actualValue(ExposureParameter parameter) const QVariant BbCameraExposureControl::actualValue(ExposureParameter parameter) const
{ {
#ifndef Q_OS_BLACKBERRY_TABLET
if (parameter != QCameraExposureControl::ExposureMode) // no other parameter supported by BB10 API at the moment if (parameter != QCameraExposureControl::ExposureMode) // no other parameter supported by BB10 API at the moment
return QVariantList(); return QVariantList();
@@ -170,6 +171,9 @@ QVariant BbCameraExposureControl::actualValue(ExposureParameter parameter) const
default: default:
return QVariant(); return QVariant();
} }
#else
return QVariant();
#endif
} }
bool BbCameraExposureControl::setValue(ExposureParameter parameter, const QVariant& value) bool BbCameraExposureControl::setValue(ExposureParameter parameter, const QVariant& value)

View File

@@ -45,11 +45,14 @@
#include <QDebug> #include <QDebug>
#include <QUrl> #include <QUrl>
#ifndef Q_OS_BLACKBERRY_TABLET
#include <audio/audio_manager_device.h> #include <audio/audio_manager_device.h>
#include <audio/audio_manager_volume.h> #include <audio/audio_manager_volume.h>
#endif
QT_BEGIN_NAMESPACE QT_BEGIN_NAMESPACE
#ifndef Q_OS_BLACKBERRY_TABLET
static audio_manager_device_t currentAudioInputDevice() static audio_manager_device_t currentAudioInputDevice()
{ {
audio_manager_device_t device = AUDIO_DEVICE_HEADSET; audio_manager_device_t device = AUDIO_DEVICE_HEADSET;
@@ -62,6 +65,7 @@ static audio_manager_device_t currentAudioInputDevice()
return device; return device;
} }
#endif
BbCameraMediaRecorderControl::BbCameraMediaRecorderControl(BbCameraSession *session, QObject *parent) BbCameraMediaRecorderControl::BbCameraMediaRecorderControl(BbCameraSession *session, QObject *parent)
: QMediaRecorderControl(parent) : QMediaRecorderControl(parent)
@@ -103,12 +107,13 @@ bool BbCameraMediaRecorderControl::isMuted() const
{ {
bool muted = false; bool muted = false;
#ifndef Q_OS_BLACKBERRY_TABLET
const int result = audio_manager_get_input_mute(currentAudioInputDevice(), &muted); const int result = audio_manager_get_input_mute(currentAudioInputDevice(), &muted);
if (result != EOK) { if (result != EOK) {
emit const_cast<BbCameraMediaRecorderControl*>(this)->error(QMediaRecorder::ResourceError, tr("Unable to retrieve mute status")); emit const_cast<BbCameraMediaRecorderControl*>(this)->error(QMediaRecorder::ResourceError, tr("Unable to retrieve mute status"));
return false; return false;
} }
#endif
return muted; return muted;
} }
@@ -116,11 +121,13 @@ qreal BbCameraMediaRecorderControl::volume() const
{ {
double level = 0.0; double level = 0.0;
#ifndef Q_OS_BLACKBERRY_TABLET
const int result = audio_manager_get_input_level(currentAudioInputDevice(), &level); const int result = audio_manager_get_input_level(currentAudioInputDevice(), &level);
if (result != EOK) { if (result != EOK) {
emit const_cast<BbCameraMediaRecorderControl*>(this)->error(QMediaRecorder::ResourceError, tr("Unable to retrieve audio input volume")); emit const_cast<BbCameraMediaRecorderControl*>(this)->error(QMediaRecorder::ResourceError, tr("Unable to retrieve audio input volume"));
return 0.0; return 0.0;
} }
#endif
return (level / 100); return (level / 100);
} }
@@ -137,22 +144,26 @@ void BbCameraMediaRecorderControl::setState(QMediaRecorder::State state)
void BbCameraMediaRecorderControl::setMuted(bool muted) void BbCameraMediaRecorderControl::setMuted(bool muted)
{ {
#ifndef Q_OS_BLACKBERRY_TABLET
const int result = audio_manager_set_input_mute(currentAudioInputDevice(), muted); const int result = audio_manager_set_input_mute(currentAudioInputDevice(), muted);
if (result != EOK) { if (result != EOK) {
emit error(QMediaRecorder::ResourceError, tr("Unable to set mute status")); emit error(QMediaRecorder::ResourceError, tr("Unable to set mute status"));
} else { } else {
emit mutedChanged(muted); emit mutedChanged(muted);
} }
#endif
} }
void BbCameraMediaRecorderControl::setVolume(qreal volume) void BbCameraMediaRecorderControl::setVolume(qreal volume)
{ {
#ifndef Q_OS_BLACKBERRY_TABLET
const int result = audio_manager_set_input_level(currentAudioInputDevice(), (volume * 100)); const int result = audio_manager_set_input_level(currentAudioInputDevice(), (volume * 100));
if (result != EOK) { if (result != EOK) {
emit error(QMediaRecorder::ResourceError, tr("Unable to set audio input volume")); emit error(QMediaRecorder::ResourceError, tr("Unable to set audio input volume"));
} else { } else {
emit volumeChanged(volume); emit volumeChanged(volume);
} }
#endif
} }
QT_END_NAMESPACE QT_END_NAMESPACE

View File

@@ -70,9 +70,11 @@ BbCameraOrientationHandler::BbCameraOrientationHandler(QObject *parent)
BbCameraOrientationHandler::~BbCameraOrientationHandler() BbCameraOrientationHandler::~BbCameraOrientationHandler()
{ {
#ifndef Q_OS_BLACKBERRY_TABLET
const int result = orientation_stop_events(0); const int result = orientation_stop_events(0);
if (result == BPS_FAILURE) if (result == BPS_FAILURE)
qWarning() << "Unable to unregister for orientation change events"; qWarning() << "Unable to unregister for orientation change events";
#endif
QCoreApplication::eventDispatcher()->removeNativeEventFilter(this); QCoreApplication::eventDispatcher()->removeNativeEventFilter(this);
} }

View File

@@ -75,8 +75,6 @@ static QString errorToString(camera_error_t error)
return QLatin1String("No permission"); return QLatin1String("No permission");
case CAMERA_EBADR: case CAMERA_EBADR:
return QLatin1String("Invalid file descriptor"); return QLatin1String("Invalid file descriptor");
case CAMERA_ENODATA:
return QLatin1String("Data does not exist");
case CAMERA_ENOENT: case CAMERA_ENOENT:
return QLatin1String("File or directory does not exists"); return QLatin1String("File or directory does not exists");
case CAMERA_ENOMEM: case CAMERA_ENOMEM:
@@ -87,24 +85,28 @@ static QString errorToString(camera_error_t error)
return QLatin1String("Communication timeout"); return QLatin1String("Communication timeout");
case CAMERA_EALREADY: case CAMERA_EALREADY:
return QLatin1String("Operation already in progress"); return QLatin1String("Operation already in progress");
case CAMERA_EBUSY:
return QLatin1String("Camera busy");
case CAMERA_ENOSPC:
return QLatin1String("Disk is full");
case CAMERA_EUNINIT: case CAMERA_EUNINIT:
return QLatin1String("Camera library not initialized"); return QLatin1String("Camera library not initialized");
case CAMERA_EREGFAULT: case CAMERA_EREGFAULT:
return QLatin1String("Callback registration failed"); return QLatin1String("Callback registration failed");
case CAMERA_EMICINUSE: case CAMERA_EMICINUSE:
return QLatin1String("Microphone in use already"); return QLatin1String("Microphone in use already");
#ifndef Q_OS_BLACKBERRY_TABLET
case CAMERA_ENODATA:
return QLatin1String("Data does not exist");
case CAMERA_EBUSY:
return QLatin1String("Camera busy");
case CAMERA_EDESKTOPCAMERAINUSE: case CAMERA_EDESKTOPCAMERAINUSE:
return QLatin1String("Desktop camera in use already"); return QLatin1String("Desktop camera in use already");
case CAMERA_ENOSPC:
return QLatin1String("Disk is full");
case CAMERA_EPOWERDOWN: case CAMERA_EPOWERDOWN:
return QLatin1String("Camera in power down state"); return QLatin1String("Camera in power down state");
case CAMERA_3ALOCKED: case CAMERA_3ALOCKED:
return QLatin1String("3A have been locked"); return QLatin1String("3A have been locked");
case CAMERA_EVIEWFINDERFROZEN: case CAMERA_EVIEWFINDERFROZEN:
return QLatin1String("Freeze flag set"); return QLatin1String("Freeze flag set");
#endif
default: default:
return QLatin1String("Unknown error"); return QLatin1String("Unknown error");
} }
@@ -658,6 +660,9 @@ void BbCameraSession::applyVideoSettings()
return; return;
} }
const QSize resolution = m_videoEncoderSettings.resolution();
#ifndef Q_OS_BLACKBERRY_TABLET
QString videoCodec = m_videoEncoderSettings.codec(); QString videoCodec = m_videoEncoderSettings.codec();
if (videoCodec.isEmpty()) if (videoCodec.isEmpty())
videoCodec = QLatin1String("h264"); videoCodec = QLatin1String("h264");
@@ -670,8 +675,6 @@ void BbCameraSession::applyVideoSettings()
else if (videoCodec == QLatin1String("h264")) else if (videoCodec == QLatin1String("h264"))
cameraVideoCodec = CAMERA_VIDEOCODEC_H264; cameraVideoCodec = CAMERA_VIDEOCODEC_H264;
const QSize resolution = m_videoEncoderSettings.resolution();
qreal frameRate = m_videoEncoderSettings.frameRate(); qreal frameRate = m_videoEncoderSettings.frameRate();
if (frameRate == 0) { if (frameRate == 0) {
const QList<qreal> frameRates = supportedFrameRates(QVideoEncoderSettings(), 0); const QList<qreal> frameRates = supportedFrameRates(QVideoEncoderSettings(), 0);
@@ -690,12 +693,16 @@ void BbCameraSession::applyVideoSettings()
cameraAudioCodec = CAMERA_AUDIOCODEC_AAC; cameraAudioCodec = CAMERA_AUDIOCODEC_AAC;
else if (audioCodec == QLatin1String("raw")) else if (audioCodec == QLatin1String("raw"))
cameraAudioCodec = CAMERA_AUDIOCODEC_RAW; cameraAudioCodec = CAMERA_AUDIOCODEC_RAW;
result = camera_set_video_property(m_handle, result = camera_set_video_property(m_handle,
CAMERA_IMGPROP_WIDTH, resolution.width(), CAMERA_IMGPROP_WIDTH, resolution.width(),
CAMERA_IMGPROP_HEIGHT, resolution.height(), CAMERA_IMGPROP_HEIGHT, resolution.height(),
CAMERA_IMGPROP_VIDEOCODEC, cameraVideoCodec, CAMERA_IMGPROP_VIDEOCODEC, cameraVideoCodec,
CAMERA_IMGPROP_AUDIOCODEC, cameraAudioCodec); CAMERA_IMGPROP_AUDIOCODEC, cameraAudioCodec);
#else
result = camera_set_video_property(m_handle,
CAMERA_IMGPROP_WIDTH, resolution.width(),
CAMERA_IMGPROP_HEIGHT, resolution.height());
#endif
if (result != CAMERA_EOK) { if (result != CAMERA_EOK) {
qWarning() << "Unable to apply video settings:" << result; qWarning() << "Unable to apply video settings:" << result;
@@ -979,10 +986,14 @@ static void viewFinderStatusCallback(camera_handle_t handle, camera_devstatus_t
if (status == CAMERA_STATUS_FOCUS_CHANGE) { if (status == CAMERA_STATUS_FOCUS_CHANGE) {
BbCameraSession *session = static_cast<BbCameraSession*>(context); BbCameraSession *session = static_cast<BbCameraSession*>(context);
QMetaObject::invokeMethod(session, "handleFocusStatusChanged", Qt::QueuedConnection, Q_ARG(int, value)); QMetaObject::invokeMethod(session, "handleFocusStatusChanged", Qt::QueuedConnection, Q_ARG(int, value));
} else if (status == CAMERA_STATUS_POWERUP) { return;
}
#ifndef Q_OS_BLACKBERRY_TABLET
else if (status == CAMERA_STATUS_POWERUP) {
BbCameraSession *session = static_cast<BbCameraSession*>(context); BbCameraSession *session = static_cast<BbCameraSession*>(context);
QMetaObject::invokeMethod(session, "handleCameraPowerUp", Qt::QueuedConnection); QMetaObject::invokeMethod(session, "handleCameraPowerUp", Qt::QueuedConnection);
} }
#endif
} }
bool BbCameraSession::startViewFinder() bool BbCameraSession::startViewFinder()
@@ -1159,6 +1170,7 @@ static void videoRecordingStatusCallback(camera_handle_t handle, camera_devstatu
Q_UNUSED(handle) Q_UNUSED(handle)
Q_UNUSED(value) Q_UNUSED(value)
#ifndef Q_OS_BLACKBERRY_TABLET
if (status == CAMERA_STATUS_VIDEO_PAUSE) { if (status == CAMERA_STATUS_VIDEO_PAUSE) {
BbCameraSession *session = static_cast<BbCameraSession*>(context); BbCameraSession *session = static_cast<BbCameraSession*>(context);
QMetaObject::invokeMethod(session, "handleVideoRecordingPaused", Qt::QueuedConnection); QMetaObject::invokeMethod(session, "handleVideoRecordingPaused", Qt::QueuedConnection);
@@ -1166,6 +1178,7 @@ static void videoRecordingStatusCallback(camera_handle_t handle, camera_devstatu
BbCameraSession *session = static_cast<BbCameraSession*>(context); BbCameraSession *session = static_cast<BbCameraSession*>(context);
QMetaObject::invokeMethod(session, "handleVideoRecordingResumed", Qt::QueuedConnection); QMetaObject::invokeMethod(session, "handleVideoRecordingResumed", Qt::QueuedConnection);
} }
#endif
} }
bool BbCameraSession::startVideoRecording() bool BbCameraSession::startVideoRecording()

View File

@@ -156,10 +156,12 @@ QVariant BbCameraViewfinderSettingsControl::viewfinderParameter(ViewfinderParame
return QVideoFrame::Format_Invalid; return QVideoFrame::Format_Invalid;
case CAMERA_FRAMETYPE_CBYCRY: case CAMERA_FRAMETYPE_CBYCRY:
return QVideoFrame::Format_Invalid; return QVideoFrame::Format_Invalid;
#ifndef Q_OS_BLACKBERRY_TABLET
case CAMERA_FRAMETYPE_COMPRESSEDVIDEO: case CAMERA_FRAMETYPE_COMPRESSEDVIDEO:
return QVideoFrame::Format_Invalid; return QVideoFrame::Format_Invalid;
case CAMERA_FRAMETYPE_COMPRESSEDAUDIO: case CAMERA_FRAMETYPE_COMPRESSEDAUDIO:
return QVideoFrame::Format_Invalid; return QVideoFrame::Format_Invalid;
#endif
default: default:
return QVideoFrame::Format_Invalid; return QVideoFrame::Format_Invalid;
} }

View File

@@ -46,4 +46,8 @@ SOURCES += \
$$PWD/bbvideodeviceselectorcontrol.cpp \ $$PWD/bbvideodeviceselectorcontrol.cpp \
$$PWD/bbvideorenderercontrol.cpp $$PWD/bbvideorenderercontrol.cpp
LIBS += -lcamapi -laudio_manager LIBS += -lcamapi
!blackberry-playbook {
LIBS += -laudio_manager
}

View File

@@ -39,6 +39,12 @@
** **
****************************************************************************/ ****************************************************************************/
#include <QtMultimedia/qmediametadata.h>
#include <QtCore/qcoreapplication.h>
#include <QSize>
#include <qdatetime.h>
#include <qimage.h>
#include <dshow.h> #include <dshow.h>
#include <initguid.h> #include <initguid.h>
#include <qnetwork.h> #include <qnetwork.h>
@@ -46,8 +52,56 @@
#include "directshowmetadatacontrol.h" #include "directshowmetadatacontrol.h"
#include "directshowplayerservice.h" #include "directshowplayerservice.h"
#include <QtMultimedia/qmediametadata.h> #ifndef QT_NO_WMSDK
#include <QtCore/qcoreapplication.h> #include <wmsdk.h>
#endif
#ifndef QT_NO_SHELLITEM
#include <ShlObj.h>
#include <propkeydef.h>
#include <private/qsystemlibrary_p.h>
DEFINE_PROPERTYKEY(PKEY_Author, 0xF29F85E0, 0x4FF9, 0x1068, 0xAB, 0x91, 0x08, 0x00, 0x2B, 0x27, 0xB3, 0xD9, 4);
DEFINE_PROPERTYKEY(PKEY_Title, 0xF29F85E0, 0x4FF9, 0x1068, 0xAB, 0x91, 0x08, 0x00, 0x2B, 0x27, 0xB3, 0xD9, 2);
DEFINE_PROPERTYKEY(PKEY_Media_SubTitle, 0x56A3372E, 0xCE9C, 0x11D2, 0x9F, 0x0E, 0x00, 0x60, 0x97, 0xC6, 0x86, 0xF6, 38);
DEFINE_PROPERTYKEY(PKEY_ParentalRating, 0x64440492, 0x4C8B, 0x11D1, 0x8B, 0x70, 0x08, 0x00, 0x36, 0xB1, 0x1A, 0x03, 21);
DEFINE_PROPERTYKEY(PKEY_Comment, 0xF29F85E0, 0x4FF9, 0x1068, 0xAB, 0x91, 0x08, 0x00, 0x2B, 0x27, 0xB3, 0xD9, 6);
DEFINE_PROPERTYKEY(PKEY_Copyright, 0x64440492, 0x4C8B, 0x11D1, 0x8B, 0x70, 0x08, 0x00, 0x36, 0xB1, 0x1A, 0x03, 11);
DEFINE_PROPERTYKEY(PKEY_Media_ProviderStyle, 0x64440492, 0x4C8B, 0x11D1, 0x8B, 0x70, 0x08, 0x00, 0x36, 0xB1, 0x1A, 0x03, 40);
DEFINE_PROPERTYKEY(PKEY_Media_Year, 0x56A3372E, 0xCE9C, 0x11D2, 0x9F, 0x0E, 0x00, 0x60, 0x97, 0xC6, 0x86, 0xF6, 5);
DEFINE_PROPERTYKEY(PKEY_Media_DateEncoded, 0x2E4B640D, 0x5019, 0x46D8, 0x88, 0x81, 0x55, 0x41, 0x4C, 0xC5, 0xCA, 0xA0, 100);
DEFINE_PROPERTYKEY(PKEY_Rating, 0x64440492, 0x4C8B, 0x11D1, 0x8B, 0x70, 0x08, 0x00, 0x36, 0xB1, 0x1A, 0x03, 9);
DEFINE_PROPERTYKEY(PKEY_Keywords, 0xF29F85E0, 0x4FF9, 0x1068, 0xAB, 0x91, 0x08, 0x00, 0x2B, 0x27, 0xB3, 0xD9, 5);
DEFINE_PROPERTYKEY(PKEY_Language, 0xD5CDD502, 0x2E9C, 0x101B, 0x93, 0x97, 0x08, 0x00, 0x2B, 0x2C, 0xF9, 0xAE, 28);
DEFINE_PROPERTYKEY(PKEY_Media_Publisher, 0x64440492, 0x4C8B, 0x11D1, 0x8B, 0x70, 0x08, 0x00, 0x36, 0xB1, 0x1A, 0x03, 30);
DEFINE_PROPERTYKEY(PKEY_Media_Duration, 0x64440490, 0x4C8B, 0x11D1, 0x8B, 0x70, 0x08, 0x00, 0x36, 0xB1, 0x1A, 0x03, 3);
DEFINE_PROPERTYKEY(PKEY_Audio_EncodingBitrate, 0x64440490, 0x4C8B, 0x11D1, 0x8B, 0x70, 0x08, 0x00, 0x36, 0xB1, 0x1A, 0x03, 4);
DEFINE_PROPERTYKEY(PKEY_Media_AverageLevel, 0x09EDD5B6, 0xB301, 0x43C5, 0x99, 0x90, 0xD0, 0x03, 0x02, 0xEF, 0xFD, 0x46, 100);
DEFINE_PROPERTYKEY(PKEY_Audio_ChannelCount, 0x64440490, 0x4C8B, 0x11D1, 0x8B, 0x70, 0x08, 0x00, 0x36, 0xB1, 0x1A, 0x03, 7);
DEFINE_PROPERTYKEY(PKEY_Audio_PeakValue, 0x2579E5D0, 0x1116, 0x4084, 0xBD, 0x9A, 0x9B, 0x4F, 0x7C, 0xB4, 0xDF, 0x5E, 100);
DEFINE_PROPERTYKEY(PKEY_Audio_SampleRate, 0x64440490, 0x4C8B, 0x11D1, 0x8B, 0x70, 0x08, 0x00, 0x36, 0xB1, 0x1A, 0x03, 5);
DEFINE_PROPERTYKEY(PKEY_Music_AlbumTitle, 0x56A3372E, 0xCE9C, 0x11D2, 0x9F, 0x0E, 0x00, 0x60, 0x97, 0xC6, 0x86, 0xF6, 4);
DEFINE_PROPERTYKEY(PKEY_Music_AlbumArtist, 0x56A3372E, 0xCE9C, 0x11D2, 0x9F, 0x0E, 0x00, 0x60, 0x97, 0xC6, 0x86, 0xF6, 13);
DEFINE_PROPERTYKEY(PKEY_Music_Artist, 0x56A3372E, 0xCE9C, 0x11D2, 0x9F, 0x0E, 0x00, 0x60, 0x97, 0xC6, 0x86, 0xF6, 2);
DEFINE_PROPERTYKEY(PKEY_Music_Composer, 0x64440492, 0x4C8B, 0x11D1, 0x8B, 0x70, 0x08, 0x00, 0x36, 0xB1, 0x1A, 0x03, 19);
DEFINE_PROPERTYKEY(PKEY_Music_Conductor, 0x56A3372E, 0xCE9C, 0x11D2, 0x9F, 0x0E, 0x00, 0x60, 0x97, 0xC6, 0x86, 0xF6, 36);
DEFINE_PROPERTYKEY(PKEY_Music_Lyrics, 0x56A3372E, 0xCE9C, 0x11D2, 0x9F, 0x0E, 0x00, 0x60, 0x97, 0xC6, 0x86, 0xF6, 12);
DEFINE_PROPERTYKEY(PKEY_Music_Mood, 0x56A3372E, 0xCE9C, 0x11D2, 0x9F, 0x0E, 0x00, 0x60, 0x97, 0xC6, 0x86, 0xF6, 39);
DEFINE_PROPERTYKEY(PKEY_Music_TrackNumber, 0x56A3372E, 0xCE9C, 0x11D2, 0x9F, 0x0E, 0x00, 0x60, 0x97, 0xC6, 0x86, 0xF6, 7);
DEFINE_PROPERTYKEY(PKEY_Music_Genre, 0x56A3372E, 0xCE9C, 0x11D2, 0x9F, 0x0E, 0x00, 0x60, 0x97, 0xC6, 0x86, 0xF6, 11);
DEFINE_PROPERTYKEY(PKEY_ThumbnailStream, 0xF29F85E0, 0x4FF9, 0x1068, 0xAB, 0x91, 0x08, 0x00, 0x2B, 0x27, 0xB3, 0xD9, 27);
DEFINE_PROPERTYKEY(PKEY_Video_FrameHeight, 0x64440491, 0x4C8B, 0x11D1, 0x8B, 0x70, 0x08, 0x00, 0x36, 0xB1, 0x1A, 0x03, 4);
DEFINE_PROPERTYKEY(PKEY_Video_FrameWidth, 0x64440491, 0x4C8B, 0x11D1, 0x8B, 0x70, 0x08, 0x00, 0x36, 0xB1, 0x1A, 0x03, 3);
DEFINE_PROPERTYKEY(PKEY_Video_HorizontalAspectRatio, 0x64440491, 0x4C8B, 0x11D1, 0x8B, 0x70, 0x08, 0x00, 0x36, 0xB1, 0x1A, 0x03, 42);
DEFINE_PROPERTYKEY(PKEY_Video_VerticalAspectRatio, 0x64440491, 0x4C8B, 0x11D1, 0x8B, 0x70, 0x08, 0x00, 0x36, 0xB1, 0x1A, 0x03, 45);
DEFINE_PROPERTYKEY(PKEY_Video_FrameRate, 0x64440491, 0x4C8B, 0x11D1, 0x8B, 0x70, 0x08, 0x00, 0x36, 0xB1, 0x1A, 0x03, 6);
DEFINE_PROPERTYKEY(PKEY_Video_EncodingBitrate, 0x64440491, 0x4C8B, 0x11D1, 0x8B, 0x70, 0x08, 0x00, 0x36, 0xB1, 0x1A, 0x03, 8);
DEFINE_PROPERTYKEY(PKEY_Video_Director, 0x64440492, 0x4C8B, 0x11D1, 0x8B, 0x70, 0x08, 0x00, 0x36, 0xB1, 0x1A, 0x03, 20);
DEFINE_PROPERTYKEY(PKEY_Media_Writer, 0x64440492, 0x4C8B, 0x11D1, 0x8B, 0x70, 0x08, 0x00, 0x36, 0xB1, 0x1A, 0x03, 23);
typedef HRESULT (WINAPI *q_SHCreateItemFromParsingName)(PCWSTR, IBindCtx *, const GUID&, void **);
static q_SHCreateItemFromParsingName sHCreateItemFromParsingName = 0;
#endif
#ifndef QT_NO_WMSDK #ifndef QT_NO_WMSDK
namespace namespace
@@ -70,12 +124,12 @@ static const QWMMetaDataKeyLookup qt_wmMetaDataKeys[] =
{ QMediaMetaData::Genre, L"WM/Genre" }, { QMediaMetaData::Genre, L"WM/Genre" },
//{ QMediaMetaData::Date, 0 }, //{ QMediaMetaData::Date, 0 },
{ QMediaMetaData::Year, L"WM/Year" }, { QMediaMetaData::Year, L"WM/Year" },
{ QMediaMetaData::UserRating, L"UserRating" }, { QMediaMetaData::UserRating, L"Rating" },
//{ QMediaMetaData::MetaDatawords, 0 }, //{ QMediaMetaData::MetaDatawords, 0 },
{ QMediaMetaData::Language, L"Language" }, { QMediaMetaData::Language, L"WM/Language" },
{ QMediaMetaData::Publisher, L"WM/Publisher" }, { QMediaMetaData::Publisher, L"WM/Publisher" },
{ QMediaMetaData::Copyright, L"Copyright" }, { QMediaMetaData::Copyright, L"Copyright" },
{ QMediaMetaData::ParentalRating, L"ParentalRating" }, { QMediaMetaData::ParentalRating, L"WM/ParentalRating" },
//{ QMediaMetaData::RatingOrganisation, L"RatingOrganisation" }, //{ QMediaMetaData::RatingOrganisation, L"RatingOrganisation" },
// Media // Media
@@ -103,11 +157,11 @@ static const QWMMetaDataKeyLookup qt_wmMetaDataKeys[] =
//{ QMediaMetaData::CoverArtUriLarge, 0 }, //{ QMediaMetaData::CoverArtUriLarge, 0 },
// Image/Video // Image/Video
//{ QMediaMetaData::Resolution, 0 }, { QMediaMetaData::Resolution, L"WM/VideoHeight" },
//{ QMediaMetaData::PixelAspectRatio, 0 }, { QMediaMetaData::PixelAspectRatio, L"AspectRatioX" },
// Video // Video
//{ QMediaMetaData::FrameRate, 0 }, { QMediaMetaData::VideoFrameRate, L"WM/VideoFrameRate" },
{ QMediaMetaData::VideoBitRate, L"VideoBitRate" }, { QMediaMetaData::VideoBitRate, L"VideoBitRate" },
{ QMediaMetaData::VideoCodec, L"VideoCodec" }, { QMediaMetaData::VideoCodec, L"VideoCodec" },
@@ -118,12 +172,6 @@ static const QWMMetaDataKeyLookup qt_wmMetaDataKeys[] =
{ QMediaMetaData::Director, L"WM/Director" }, { QMediaMetaData::Director, L"WM/Director" },
{ QMediaMetaData::LeadPerformer, L"LeadPerformer" }, { QMediaMetaData::LeadPerformer, L"LeadPerformer" },
{ QMediaMetaData::Writer, L"WM/Writer" }, { QMediaMetaData::Writer, L"WM/Writer" },
// Photos
{ QMediaMetaData::CameraManufacturer, L"CameraManufacturer" },
{ QMediaMetaData::CameraModel, L"CameraModel" },
{ QMediaMetaData::Event, L"Event" },
{ QMediaMetaData::Subject, L"Subject" }
}; };
static QVariant getValue(IWMHeaderInfo *header, const wchar_t *key) static QVariant getValue(IWMHeaderInfo *header, const wchar_t *key)
@@ -150,7 +198,7 @@ static QVariant getValue(IWMHeaderInfo *header, const wchar_t *key)
case WMT_TYPE_STRING: case WMT_TYPE_STRING:
{ {
QString string; QString string;
string.resize(size / 2 - 1); string.resize(size / 2); // size is in bytes, string is in UTF16
if (header->GetAttributeByName( if (header->GetAttributeByName(
&streamNumber, &streamNumber,
@@ -227,12 +275,58 @@ static QVariant getValue(IWMHeaderInfo *header, const wchar_t *key)
} }
#endif #endif
#ifndef QT_NO_SHELLITEM
static QVariant convertValue(const PROPVARIANT& var)
{
QVariant value;
switch (var.vt) {
case VT_LPWSTR:
value = QString::fromUtf16(reinterpret_cast<const ushort*>(var.pwszVal));
break;
case VT_UI4:
value = uint(var.ulVal);
break;
case VT_UI8:
value = qulonglong(var.uhVal.QuadPart);
break;
case VT_BOOL:
value = bool(var.boolVal);
break;
case VT_FILETIME:
SYSTEMTIME sysDate;
if (!FileTimeToSystemTime(&var.filetime, &sysDate))
break;
value = QDate(sysDate.wYear, sysDate.wMonth, sysDate.wDay);
break;
case VT_STREAM:
{
STATSTG stat;
if (FAILED(var.pStream->Stat(&stat, STATFLAG_NONAME)))
break;
void *data = malloc(stat.cbSize.QuadPart);
ULONG read = 0;
if (FAILED(var.pStream->Read(data, stat.cbSize.QuadPart, &read))) {
free(data);
break;
}
value = QImage::fromData(reinterpret_cast<const uchar*>(data), read);
free(data);
}
break;
case VT_VECTOR | VT_LPWSTR:
QStringList vList;
for (ULONG i = 0; i < var.calpwstr.cElems; ++i)
vList.append(QString::fromUtf16(reinterpret_cast<const ushort*>(var.calpwstr.pElems[i])));
value = vList;
break;
}
return value;
}
#endif
DirectShowMetaDataControl::DirectShowMetaDataControl(QObject *parent) DirectShowMetaDataControl::DirectShowMetaDataControl(QObject *parent)
: QMetaDataReaderControl(parent) : QMetaDataReaderControl(parent)
, m_content(0) , m_available(false)
#ifndef QT_NO_WMSDK
, m_headerInfo(0)
#endif
{ {
} }
@@ -242,75 +336,229 @@ DirectShowMetaDataControl::~DirectShowMetaDataControl()
bool DirectShowMetaDataControl::isMetaDataAvailable() const bool DirectShowMetaDataControl::isMetaDataAvailable() const
{ {
#ifndef QT_NO_WMSDK return m_available;
return m_content || m_headerInfo;
#else
return m_content;
#endif
} }
QVariant DirectShowMetaDataControl::metaData(const QString &key) const QVariant DirectShowMetaDataControl::metaData(const QString &key) const
{ {
QVariant value; return m_metadata.value(key);
#ifndef QT_NO_WMSDK
if (m_headerInfo) {
static const int count = sizeof(qt_wmMetaDataKeys) / sizeof(QWMMetaDataKeyLookup);
for (int i = 0; i < count; ++i) {
if (qt_wmMetaDataKeys[i].key == key) {
value = getValue(m_headerInfo, qt_wmMetaDataKeys[i].token);
break;
}
}
} else if (m_content) {
#else
if (m_content) {
#endif
BSTR string = 0;
if (key == QMediaMetaData::Author)
m_content->get_AuthorName(&string);
else if (key == QMediaMetaData::Title)
m_content->get_Title(&string);
else if (key == QMediaMetaData::ParentalRating)
m_content->get_Rating(&string);
else if (key == QMediaMetaData::Description)
m_content->get_Description(&string);
else if (key == QMediaMetaData::Copyright)
m_content->get_Copyright(&string);
if (string) {
value = QString::fromUtf16(reinterpret_cast<ushort *>(string), ::SysStringLen(string));
::SysFreeString(string);
}
}
return value;
} }
QStringList DirectShowMetaDataControl::availableMetaData() const QStringList DirectShowMetaDataControl::availableMetaData() const
{ {
return QStringList(); return m_metadata.keys();
} }
void DirectShowMetaDataControl::updateGraph(IFilterGraph2 *graph, IBaseFilter *source) static QString convertBSTR(BSTR *string)
{ {
if (m_content) QString value = QString::fromUtf16(reinterpret_cast<ushort *>(*string),
m_content->Release(); ::SysStringLen(*string));
if (!graph || graph->QueryInterface( ::SysFreeString(*string);
IID_IAMMediaContent, reinterpret_cast<void **>(&m_content)) != S_OK) { string = 0;
m_content = 0;
return value;
}
void DirectShowMetaDataControl::updateGraph(IFilterGraph2 *graph, IBaseFilter *source, const QString &fileSrc)
{
m_metadata.clear();
#ifndef QT_NO_SHELLITEM
if (!sHCreateItemFromParsingName) {
QSystemLibrary lib(QStringLiteral("shell32"));
sHCreateItemFromParsingName = (q_SHCreateItemFromParsingName)(lib.resolve("SHCreateItemFromParsingName"));
} }
#ifdef QT_NO_WMSDK if (!fileSrc.isEmpty() && sHCreateItemFromParsingName) {
Q_UNUSED(source); IShellItem2* shellItem = 0;
#else if (sHCreateItemFromParsingName(reinterpret_cast<const WCHAR*>(fileSrc.utf16()),
if (m_headerInfo) 0, IID_PPV_ARGS(&shellItem)) == S_OK) {
m_headerInfo->Release();
m_headerInfo = com_cast<IWMHeaderInfo>(source, IID_IWMHeaderInfo); IPropertyStore *pStore = 0;
if (shellItem->GetPropertyStore(GPS_DEFAULT, IID_PPV_ARGS(&pStore)) == S_OK) {
DWORD cProps;
if (SUCCEEDED(pStore->GetCount(&cProps))) {
for (DWORD i = 0; i < cProps; ++i)
{
PROPERTYKEY key;
PROPVARIANT var;
PropVariantInit(&var);
if (FAILED(pStore->GetAt(i, &key)))
continue;
if (FAILED(pStore->GetValue(key, &var)))
continue;
if (key == PKEY_Author) {
m_metadata.insert(QMediaMetaData::Author, convertValue(var));
} else if (key == PKEY_Title) {
m_metadata.insert(QMediaMetaData::Title, convertValue(var));
} else if (key == PKEY_Media_SubTitle) {
m_metadata.insert(QMediaMetaData::SubTitle, convertValue(var));
} else if (key == PKEY_ParentalRating) {
m_metadata.insert(QMediaMetaData::ParentalRating, convertValue(var));
} else if (key == PKEY_Comment) {
m_metadata.insert(QMediaMetaData::Description, convertValue(var));
} else if (key == PKEY_Copyright) {
m_metadata.insert(QMediaMetaData::Copyright, convertValue(var));
} else if (key == PKEY_Media_ProviderStyle) {
m_metadata.insert(QMediaMetaData::Genre, convertValue(var));
} else if (key == PKEY_Media_Year) {
m_metadata.insert(QMediaMetaData::Year, convertValue(var));
} else if (key == PKEY_Media_DateEncoded) {
m_metadata.insert(QMediaMetaData::Date, convertValue(var));
} else if (key == PKEY_Rating) {
m_metadata.insert(QMediaMetaData::UserRating,
int((convertValue(var).toUInt() - 1) / qreal(98) * 100));
} else if (key == PKEY_Keywords) {
m_metadata.insert(QMediaMetaData::Keywords, convertValue(var));
} else if (key == PKEY_Language) {
m_metadata.insert(QMediaMetaData::Language, convertValue(var));
} else if (key == PKEY_Media_Publisher) {
m_metadata.insert(QMediaMetaData::Publisher, convertValue(var));
} else if (key == PKEY_Media_Duration) {
m_metadata.insert(QMediaMetaData::Duration,
(convertValue(var).toLongLong() + 10000) / 10000);
} else if (key == PKEY_Audio_EncodingBitrate) {
m_metadata.insert(QMediaMetaData::AudioBitRate, convertValue(var));
} else if (key == PKEY_Media_AverageLevel) {
m_metadata.insert(QMediaMetaData::AverageLevel, convertValue(var));
} else if (key == PKEY_Audio_ChannelCount) {
m_metadata.insert(QMediaMetaData::ChannelCount, convertValue(var));
} else if (key == PKEY_Audio_PeakValue) {
m_metadata.insert(QMediaMetaData::PeakValue, convertValue(var));
} else if (key == PKEY_Audio_SampleRate) {
m_metadata.insert(QMediaMetaData::SampleRate, convertValue(var));
} else if (key == PKEY_Music_AlbumTitle) {
m_metadata.insert(QMediaMetaData::AlbumTitle, convertValue(var));
} else if (key == PKEY_Music_AlbumArtist) {
m_metadata.insert(QMediaMetaData::AlbumArtist, convertValue(var));
} else if (key == PKEY_Music_Artist) {
m_metadata.insert(QMediaMetaData::ContributingArtist, convertValue(var));
} else if (key == PKEY_Music_Composer) {
m_metadata.insert(QMediaMetaData::Composer, convertValue(var));
} else if (key == PKEY_Music_Conductor) {
m_metadata.insert(QMediaMetaData::Conductor, convertValue(var));
} else if (key == PKEY_Music_Lyrics) {
m_metadata.insert(QMediaMetaData::Lyrics, convertValue(var));
} else if (key == PKEY_Music_Mood) {
m_metadata.insert(QMediaMetaData::Mood, convertValue(var));
} else if (key == PKEY_Music_TrackNumber) {
m_metadata.insert(QMediaMetaData::TrackNumber, convertValue(var));
} else if (key == PKEY_Music_Genre) {
m_metadata.insert(QMediaMetaData::Genre, convertValue(var));
} else if (key == PKEY_ThumbnailStream) {
m_metadata.insert(QMediaMetaData::ThumbnailImage, convertValue(var));
} else if (key == PKEY_Video_FrameHeight) {
QSize res;
res.setHeight(convertValue(var).toUInt());
if (SUCCEEDED(pStore->GetValue(PKEY_Video_FrameWidth, &var)))
res.setWidth(convertValue(var).toUInt());
m_metadata.insert(QMediaMetaData::Resolution, res);
} else if (key == PKEY_Video_HorizontalAspectRatio) {
QSize aspectRatio;
aspectRatio.setWidth(convertValue(var).toUInt());
if (SUCCEEDED(pStore->GetValue(PKEY_Video_VerticalAspectRatio, &var)))
aspectRatio.setHeight(convertValue(var).toUInt());
m_metadata.insert(QMediaMetaData::PixelAspectRatio, aspectRatio);
} else if (key == PKEY_Video_FrameRate) {
m_metadata.insert(QMediaMetaData::VideoFrameRate,
convertValue(var).toReal() / 1000);
} else if (key == PKEY_Video_EncodingBitrate) {
m_metadata.insert(QMediaMetaData::VideoBitRate, convertValue(var));
} else if (key == PKEY_Video_Director) {
m_metadata.insert(QMediaMetaData::Director, convertValue(var));
} else if (key == PKEY_Media_Writer) {
m_metadata.insert(QMediaMetaData::Writer, convertValue(var));
}
PropVariantClear(&var);
}
}
pStore->Release();
}
shellItem->Release();
}
}
if (!m_metadata.isEmpty())
goto send_event;
#endif #endif
#ifndef QT_NO_WMSDK
IWMHeaderInfo *info = com_cast<IWMHeaderInfo>(source, IID_IWMHeaderInfo);
if (info) {
static const int count = sizeof(qt_wmMetaDataKeys) / sizeof(QWMMetaDataKeyLookup);
for (int i = 0; i < count; ++i) {
QVariant var = getValue(info, qt_wmMetaDataKeys[i].token);
if (var.isValid()) {
QString key = qt_wmMetaDataKeys[i].key;
if (key == QMediaMetaData::Duration) {
// duration is provided in 100-nanosecond units, convert to milliseconds
var = (var.toLongLong() + 10000) / 10000;
} else if (key == QMediaMetaData::Resolution) {
QSize res;
res.setHeight(var.toUInt());
res.setWidth(getValue(info, L"WM/VideoWidth").toUInt());
var = res;
} else if (key == QMediaMetaData::VideoFrameRate) {
var = var.toReal() / 1000.f;
} else if (key == QMediaMetaData::PixelAspectRatio) {
QSize aspectRatio;
aspectRatio.setWidth(var.toUInt());
aspectRatio.setHeight(getValue(info, L"AspectRatioY").toUInt());
var = aspectRatio;
} else if (key == QMediaMetaData::UserRating) {
var = (var.toUInt() - 1) / qreal(98) * 100;
}
m_metadata.insert(key, var);
}
}
info->Release();
}
if (!m_metadata.isEmpty())
goto send_event;
#endif
{
IAMMediaContent *content = 0;
if ((!graph || graph->QueryInterface(
IID_IAMMediaContent, reinterpret_cast<void **>(&content)) != S_OK)
&& (!source || source->QueryInterface(
IID_IAMMediaContent, reinterpret_cast<void **>(&content)) != S_OK)) {
content = 0;
}
if (content) {
BSTR string = 0;
if (content->get_AuthorName(&string) == S_OK)
m_metadata.insert(QMediaMetaData::Author, convertBSTR(&string));
if (content->get_Title(&string) == S_OK)
m_metadata.insert(QMediaMetaData::Title, convertBSTR(&string));
if (content->get_Description(&string) == S_OK)
m_metadata.insert(QMediaMetaData::Description, convertBSTR(&string));
if (content->get_Rating(&string) == S_OK)
m_metadata.insert(QMediaMetaData::UserRating, convertBSTR(&string));
if (content->get_Copyright(&string) == S_OK)
m_metadata.insert(QMediaMetaData::Copyright, convertBSTR(&string));
content->Release();
}
}
send_event:
// DirectShowMediaPlayerService holds a lock at this point so defer emitting signals to a later // DirectShowMediaPlayerService holds a lock at this point so defer emitting signals to a later
// time. // time.
QCoreApplication::postEvent(this, new QEvent(QEvent::Type(MetaDataChanged))); QCoreApplication::postEvent(this, new QEvent(QEvent::Type(MetaDataChanged)));
@@ -321,12 +569,12 @@ void DirectShowMetaDataControl::customEvent(QEvent *event)
if (event->type() == QEvent::Type(MetaDataChanged)) { if (event->type() == QEvent::Type(MetaDataChanged)) {
event->accept(); event->accept();
bool oldAvailable = m_available;
m_available = !m_metadata.isEmpty();
if (m_available != oldAvailable)
emit metaDataAvailableChanged(m_available);
emit metaDataChanged(); emit metaDataChanged();
#ifndef QT_NO_WMSDK
emit metaDataAvailableChanged(m_content || m_headerInfo);
#else
emit metaDataAvailableChanged(m_content);
#endif
} else { } else {
QMetaDataReaderControl::customEvent(event); QMetaDataReaderControl::customEvent(event);
} }

View File

@@ -46,12 +46,6 @@
#include "directshowglobal.h" #include "directshowglobal.h"
#include <qnetwork.h>
#ifndef QT_NO_WMSDK
#include <wmsdk.h>
#endif
#include <QtCore/qcoreevent.h> #include <QtCore/qcoreevent.h>
class DirectShowPlayerService; class DirectShowPlayerService;
@@ -70,7 +64,8 @@ public:
QVariant metaData(const QString &key) const; QVariant metaData(const QString &key) const;
QStringList availableMetaData() const; QStringList availableMetaData() const;
void updateGraph(IFilterGraph2 *graph, IBaseFilter *source); void updateGraph(IFilterGraph2 *graph, IBaseFilter *source,
const QString &fileSrc = QString());
protected: protected:
void customEvent(QEvent *event); void customEvent(QEvent *event);
@@ -81,10 +76,8 @@ private:
MetaDataChanged = QEvent::User MetaDataChanged = QEvent::User
}; };
IAMMediaContent *m_content; QVariantMap m_metadata;
#ifndef QT_NO_WMSDK bool m_available;
IWMHeaderInfo *m_headerInfo;
#endif
}; };
#endif #endif

View File

@@ -50,6 +50,10 @@
#include "vmr9videowindowcontrol.h" #include "vmr9videowindowcontrol.h"
#endif #endif
#ifndef QT_NO_WMSDK
#include <wmsdk.h>
#endif
#include "qmediacontent.h" #include "qmediacontent.h"
#include <QtCore/qcoreapplication.h> #include <QtCore/qcoreapplication.h>
@@ -268,11 +272,10 @@ void DirectShowPlayerService::doSetUrlSource(QMutexLocker *locker)
IBaseFilter *source = 0; IBaseFilter *source = 0;
QMediaResource resource = m_resources.takeFirst(); QMediaResource resource = m_resources.takeFirst();
QUrl url = resource.url(); m_url = resource.url();
HRESULT hr = E_FAIL; HRESULT hr = E_FAIL;
if (m_url.scheme() == QLatin1String("http") || m_url.scheme() == QLatin1String("https")) {
if (url.scheme() == QLatin1String("http") || url.scheme() == QLatin1String("https")) {
static const GUID clsid_WMAsfReader = { static const GUID clsid_WMAsfReader = {
0x187463a0, 0x5bb7, 0x11d3, {0xac, 0xbe, 0x00, 0x80, 0xc7, 0x5e, 0x24, 0x6e} }; 0x187463a0, 0x5bb7, 0x11d3, {0xac, 0xbe, 0x00, 0x80, 0xc7, 0x5e, 0x24, 0x6e} };
@@ -283,7 +286,7 @@ void DirectShowPlayerService::doSetUrlSource(QMutexLocker *locker)
if (IFileSourceFilter *fileSource = com_new<IFileSourceFilter>( if (IFileSourceFilter *fileSource = com_new<IFileSourceFilter>(
clsid_WMAsfReader, iid_IFileSourceFilter)) { clsid_WMAsfReader, iid_IFileSourceFilter)) {
locker->unlock(); locker->unlock();
hr = fileSource->Load(reinterpret_cast<const OLECHAR *>(url.toString().utf16()), 0); hr = fileSource->Load(reinterpret_cast<const OLECHAR *>(m_url.toString().utf16()), 0);
if (SUCCEEDED(hr)) { if (SUCCEEDED(hr)) {
source = com_cast<IBaseFilter>(fileSource, IID_IBaseFilter); source = com_cast<IBaseFilter>(fileSource, IID_IBaseFilter);
@@ -296,11 +299,11 @@ void DirectShowPlayerService::doSetUrlSource(QMutexLocker *locker)
fileSource->Release(); fileSource->Release();
locker->relock(); locker->relock();
} }
} else if (url.scheme() == QLatin1String("qrc")) { } else if (m_url.scheme() == QLatin1String("qrc")) {
DirectShowRcSource *rcSource = new DirectShowRcSource(m_loop); DirectShowRcSource *rcSource = new DirectShowRcSource(m_loop);
locker->unlock(); locker->unlock();
if (rcSource->open(url) && SUCCEEDED(hr = m_graph->AddFilter(rcSource, L"Source"))) if (rcSource->open(m_url) && SUCCEEDED(hr = m_graph->AddFilter(rcSource, L"Source")))
source = rcSource; source = rcSource;
else else
rcSource->Release(); rcSource->Release();
@@ -310,7 +313,7 @@ void DirectShowPlayerService::doSetUrlSource(QMutexLocker *locker)
if (!SUCCEEDED(hr)) { if (!SUCCEEDED(hr)) {
locker->unlock(); locker->unlock();
hr = m_graph->AddSourceFilter( hr = m_graph->AddSourceFilter(
reinterpret_cast<const OLECHAR *>(url.toString().utf16()), L"Source", &source); reinterpret_cast<const OLECHAR *>(m_url.toString().utf16()), L"Source", &source);
locker->relock(); locker->relock();
} }
@@ -1128,7 +1131,7 @@ void DirectShowPlayerService::customEvent(QEvent *event)
QMutexLocker locker(&m_mutex); QMutexLocker locker(&m_mutex);
m_playerControl->updateMediaInfo(m_duration, m_streamTypes, m_seekable); m_playerControl->updateMediaInfo(m_duration, m_streamTypes, m_seekable);
m_metaDataControl->updateGraph(m_graph, m_source); m_metaDataControl->updateGraph(m_graph, m_source, m_url.toString());
updateStatus(); updateStatus();
} else if (event->type() == QEvent::Type(Error)) { } else if (event->type() == QEvent::Type(Error)) {

View File

@@ -43,5 +43,11 @@ qtHaveModule(widgets):!simulator {
$$PWD/vmr9videowindowcontrol.cpp $$PWD/vmr9videowindowcontrol.cpp
} }
config_wshellitem {
QT += core-private
} else {
DEFINES += QT_NO_SHELLITEM
}
LIBS += -lstrmiids -ldmoguids -luuid -lmsdmo -lole32 -loleaut32 -lgdi32 LIBS += -lstrmiids -ldmoguids -luuid -lmsdmo -lole32 -loleaut32 -lgdi32

View File

@@ -17,7 +17,7 @@ blackberry {
} }
qnx { qnx {
SUBDIRS += qnx SUBDIRS += audiocapture qnx
} }
win32 { win32 {

View File

@@ -276,7 +276,6 @@ void tst_QMediaPlaylist::currentItem()
QCOMPARE(playlist.previousIndex(), 0); QCOMPARE(playlist.previousIndex(), 0);
QCOMPARE(playlist.previousIndex(2), -1); QCOMPARE(playlist.previousIndex(2), -1);
QTest::ignoreMessage(QtWarningMsg, "QMediaPlaylistNavigator: Jump outside playlist range ");
playlist.setCurrentIndex(2); playlist.setCurrentIndex(2);
QCOMPARE(playlist.currentIndex(), -1); QCOMPARE(playlist.currentIndex(), -1);

View File

@@ -115,8 +115,7 @@ void tst_QMediaPlaylistNavigator::linearPlayback()
QMediaPlaylistNavigator navigator(&playlist); QMediaPlaylistNavigator navigator(&playlist);
navigator.setPlaybackMode(QMediaPlaylist::Sequential); navigator.setPlaybackMode(QMediaPlaylist::Sequential);
QTest::ignoreMessage(QtWarningMsg, "QMediaPlaylistNavigator: Jump outside playlist range "); navigator.jump(0);
navigator.jump(0);//it's ok to have warning here
QVERIFY(navigator.currentItem().isNull()); QVERIFY(navigator.currentItem().isNull());
QCOMPARE(navigator.currentIndex(), -1); QCOMPARE(navigator.currentIndex(), -1);
@@ -169,7 +168,6 @@ void tst_QMediaPlaylistNavigator::loopPlayback()
QMediaPlaylistNavigator navigator(&playlist); QMediaPlaylistNavigator navigator(&playlist);
navigator.setPlaybackMode(QMediaPlaylist::Loop); navigator.setPlaybackMode(QMediaPlaylist::Loop);
QTest::ignoreMessage(QtWarningMsg, "QMediaPlaylistNavigator: Jump outside playlist range ");
navigator.jump(0); navigator.jump(0);
QVERIFY(navigator.currentItem().isNull()); QVERIFY(navigator.currentItem().isNull());
QCOMPARE(navigator.currentIndex(), -1); QCOMPARE(navigator.currentIndex(), -1);