Merge remote-tracking branch 'origin/stable' into dev
Conflicts: src/plugins/blackberry/camera/bbcamerasession.cpp Change-Id: I7c86e10140ab86fd2a07e2f034dec38ae9112559
This commit is contained in:
50
config.tests/wshellitem/main.cpp
Normal file
50
config.tests/wshellitem/main.cpp
Normal file
@@ -0,0 +1,50 @@
|
||||
/****************************************************************************
|
||||
**
|
||||
** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
|
||||
** Contact: http://www.qt-project.org/legal
|
||||
**
|
||||
** This file is part of the Qt Toolkit.
|
||||
**
|
||||
** $QT_BEGIN_LICENSE:LGPL$
|
||||
** Commercial License Usage
|
||||
** Licensees holding valid commercial Qt licenses may use this file in
|
||||
** accordance with the commercial license agreement provided with the
|
||||
** Software or, alternatively, in accordance with the terms contained in
|
||||
** a written agreement between you and Digia. For licensing terms and
|
||||
** conditions see http://qt.digia.com/licensing. For further information
|
||||
** use the contact form at http://qt.digia.com/contact-us.
|
||||
**
|
||||
** GNU Lesser General Public License Usage
|
||||
** Alternatively, this file may be used under the terms of the GNU Lesser
|
||||
** General Public License version 2.1 as published by the Free Software
|
||||
** Foundation and appearing in the file LICENSE.LGPL included in the
|
||||
** packaging of this file. Please review the following information to
|
||||
** ensure the GNU Lesser General Public License version 2.1 requirements
|
||||
** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
|
||||
**
|
||||
** In addition, as a special exception, Digia gives you certain additional
|
||||
** rights. These rights are described in the Digia Qt LGPL Exception
|
||||
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
|
||||
**
|
||||
** GNU General Public License Usage
|
||||
** Alternatively, this file may be used under the terms of the GNU
|
||||
** General Public License version 3.0 as published by the Free Software
|
||||
** Foundation and appearing in the file LICENSE.GPL included in the
|
||||
** packaging of this file. Please review the following information to
|
||||
** ensure the GNU General Public License version 3.0 requirements will be
|
||||
** met: http://www.gnu.org/copyleft/gpl.html.
|
||||
**
|
||||
**
|
||||
** $QT_END_LICENSE$
|
||||
**
|
||||
****************************************************************************/
|
||||
|
||||
#include <ShlObj.h>
|
||||
|
||||
int main(int, char**)
|
||||
{
|
||||
IShellItem2 *item;
|
||||
IPropertyStore *store;
|
||||
|
||||
return 0;
|
||||
}
|
||||
4
config.tests/wshellitem/wshellitem.pro
Normal file
4
config.tests/wshellitem/wshellitem.pro
Normal file
@@ -0,0 +1,4 @@
|
||||
CONFIG -= qt
|
||||
CONFIG += console
|
||||
|
||||
SOURCES += main.cpp
|
||||
@@ -45,6 +45,7 @@
|
||||
#include <QMediaRecorder>
|
||||
|
||||
#include "audiorecorder.h"
|
||||
#include "qaudiolevel.h"
|
||||
|
||||
#if defined(Q_WS_MAEMO_6)
|
||||
#include "ui_audiorecorder_small.h"
|
||||
@@ -53,10 +54,10 @@
|
||||
#endif
|
||||
|
||||
static qreal getPeakValue(const QAudioFormat &format);
|
||||
static qreal getBufferLevel(const QAudioBuffer &buffer);
|
||||
static QVector<qreal> getBufferLevels(const QAudioBuffer &buffer);
|
||||
|
||||
template <class T>
|
||||
static qreal getBufferLevel(const T *buffer, int samples);
|
||||
static QVector<qreal> getBufferLevels(const T *buffer, int frames, int channels);
|
||||
|
||||
AudioRecorder::AudioRecorder(QWidget *parent) :
|
||||
QMainWindow(parent),
|
||||
@@ -67,7 +68,8 @@ AudioRecorder::AudioRecorder(QWidget *parent) :
|
||||
|
||||
audioRecorder = new QAudioRecorder(this);
|
||||
probe = new QAudioProbe;
|
||||
connect(probe, SIGNAL(audioBufferProbed(QAudioBuffer)), this, SLOT(processBuffer(QAudioBuffer)));
|
||||
connect(probe, SIGNAL(audioBufferProbed(QAudioBuffer)),
|
||||
this, SLOT(processBuffer(QAudioBuffer)));
|
||||
probe->setSource(audioRecorder);
|
||||
|
||||
//audio devices
|
||||
@@ -88,27 +90,34 @@ AudioRecorder::AudioRecorder(QWidget *parent) :
|
||||
ui->containerBox->addItem(containerName, QVariant(containerName));
|
||||
}
|
||||
|
||||
//sample rate:
|
||||
//sample rate
|
||||
ui->sampleRateBox->addItem(tr("Default"), QVariant(0));
|
||||
foreach (int sampleRate, audioRecorder->supportedAudioSampleRates()) {
|
||||
ui->sampleRateBox->addItem(QString::number(sampleRate), QVariant(
|
||||
sampleRate));
|
||||
}
|
||||
|
||||
//channels
|
||||
ui->channelsBox->addItem(tr("Default"), QVariant(-1));
|
||||
ui->channelsBox->addItem(QStringLiteral("1"), QVariant(1));
|
||||
ui->channelsBox->addItem(QStringLiteral("2"), QVariant(2));
|
||||
ui->channelsBox->addItem(QStringLiteral("4"), QVariant(4));
|
||||
|
||||
//quality
|
||||
ui->qualitySlider->setRange(0, int(QMultimedia::VeryHighQuality));
|
||||
ui->qualitySlider->setValue(int(QMultimedia::NormalQuality));
|
||||
|
||||
//bitrates:
|
||||
ui->bitrateBox->addItem(QString("Default"), QVariant(0));
|
||||
ui->bitrateBox->addItem(QString("32000"), QVariant(32000));
|
||||
ui->bitrateBox->addItem(QString("64000"), QVariant(64000));
|
||||
ui->bitrateBox->addItem(QString("96000"), QVariant(96000));
|
||||
ui->bitrateBox->addItem(QString("128000"), QVariant(128000));
|
||||
ui->bitrateBox->addItem(tr("Default"), QVariant(0));
|
||||
ui->bitrateBox->addItem(QStringLiteral("32000"), QVariant(32000));
|
||||
ui->bitrateBox->addItem(QStringLiteral("64000"), QVariant(64000));
|
||||
ui->bitrateBox->addItem(QStringLiteral("96000"), QVariant(96000));
|
||||
ui->bitrateBox->addItem(QStringLiteral("128000"), QVariant(128000));
|
||||
|
||||
connect(audioRecorder, SIGNAL(durationChanged(qint64)), this,
|
||||
SLOT(updateProgress(qint64)));
|
||||
connect(audioRecorder, SIGNAL(stateChanged(QMediaRecorder::State)), this,
|
||||
SLOT(updateState(QMediaRecorder::State)));
|
||||
connect(audioRecorder, SIGNAL(statusChanged(QMediaRecorder::Status)), this,
|
||||
SLOT(updateStatus(QMediaRecorder::Status)));
|
||||
connect(audioRecorder, SIGNAL(error(QMediaRecorder::Error)), this,
|
||||
SLOT(displayErrorMessage()));
|
||||
}
|
||||
@@ -127,32 +136,47 @@ void AudioRecorder::updateProgress(qint64 duration)
|
||||
ui->statusbar->showMessage(tr("Recorded %1 sec").arg(duration / 1000));
|
||||
}
|
||||
|
||||
void AudioRecorder::updateState(QMediaRecorder::State state)
|
||||
void AudioRecorder::updateStatus(QMediaRecorder::Status status)
|
||||
{
|
||||
QString statusMessage;
|
||||
|
||||
switch (state) {
|
||||
case QMediaRecorder::RecordingState:
|
||||
ui->recordButton->setText(tr("Stop"));
|
||||
ui->pauseButton->setText(tr("Pause"));
|
||||
if (audioRecorder->outputLocation().isEmpty())
|
||||
statusMessage = tr("Recording");
|
||||
else
|
||||
statusMessage = tr("Recording to %1").arg(
|
||||
switch (status) {
|
||||
case QMediaRecorder::RecordingStatus:
|
||||
if (audioLevels.count() != audioRecorder->audioSettings().channelCount()) {
|
||||
qDeleteAll(audioLevels);
|
||||
audioLevels.clear();
|
||||
for (int i = 0; i < audioRecorder->audioSettings().channelCount(); ++i) {
|
||||
QAudioLevel *level = new QAudioLevel(ui->centralwidget);
|
||||
audioLevels.append(level);
|
||||
ui->levelsLayout->addWidget(level);
|
||||
}
|
||||
}
|
||||
|
||||
ui->recordButton->setText(tr("Stop"));
|
||||
ui->pauseButton->setText(tr("Pause"));
|
||||
if (audioRecorder->outputLocation().isEmpty())
|
||||
statusMessage = tr("Recording");
|
||||
else
|
||||
statusMessage = tr("Recording to %1").arg(
|
||||
audioRecorder->outputLocation().toString());
|
||||
break;
|
||||
case QMediaRecorder::PausedState:
|
||||
ui->recordButton->setText(tr("Stop"));
|
||||
ui->pauseButton->setText(tr("Resume"));
|
||||
statusMessage = tr("Paused");
|
||||
break;
|
||||
case QMediaRecorder::StoppedState:
|
||||
ui->recordButton->setText(tr("Record"));
|
||||
ui->pauseButton->setText(tr("Pause"));
|
||||
statusMessage = tr("Stopped");
|
||||
break;
|
||||
case QMediaRecorder::PausedStatus:
|
||||
clearAudioLevels();
|
||||
ui->recordButton->setText(tr("Stop"));
|
||||
ui->pauseButton->setText(tr("Resume"));
|
||||
statusMessage = tr("Paused");
|
||||
break;
|
||||
case QMediaRecorder::UnloadedStatus:
|
||||
clearAudioLevels();
|
||||
ui->recordButton->setText(tr("Record"));
|
||||
ui->pauseButton->setText(tr("Pause"));
|
||||
statusMessage = tr("Stopped");
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
ui->pauseButton->setEnabled(state != QMediaRecorder::StoppedState);
|
||||
ui->pauseButton->setEnabled(audioRecorder->state()
|
||||
!= QMediaRecorder::StoppedState);
|
||||
|
||||
if (audioRecorder->error() == QMediaRecorder::NoError)
|
||||
ui->statusbar->showMessage(statusMessage);
|
||||
@@ -176,6 +200,7 @@ void AudioRecorder::toggleRecord()
|
||||
settings.setCodec(boxValue(ui->audioCodecBox).toString());
|
||||
settings.setSampleRate(boxValue(ui->sampleRateBox).toInt());
|
||||
settings.setBitRate(boxValue(ui->bitrateBox).toInt());
|
||||
settings.setChannelCount(boxValue(ui->channelsBox).toInt());
|
||||
settings.setQuality(QMultimedia::EncodingQuality(ui->qualitySlider->value()));
|
||||
settings.setEncodingMode(ui->constantQualityRadioButton->isChecked() ?
|
||||
QMultimedia::ConstantQualityEncoding :
|
||||
@@ -202,7 +227,7 @@ void AudioRecorder::togglePause()
|
||||
void AudioRecorder::setOutputLocation()
|
||||
{
|
||||
QString fileName = QFileDialog::getSaveFileName();
|
||||
audioRecorder->setOutputLocation(QUrl(fileName));
|
||||
audioRecorder->setOutputLocation(QUrl::fromLocalFile(fileName));
|
||||
outputLocationSet = true;
|
||||
}
|
||||
|
||||
@@ -211,88 +236,121 @@ void AudioRecorder::displayErrorMessage()
|
||||
ui->statusbar->showMessage(audioRecorder->errorString());
|
||||
}
|
||||
|
||||
void AudioRecorder::clearAudioLevels()
|
||||
{
|
||||
for (int i = 0; i < audioLevels.size(); ++i)
|
||||
audioLevels.at(i)->setLevel(0);
|
||||
}
|
||||
|
||||
// This function returns the maximum possible sample value for a given audio format
|
||||
qreal getPeakValue(const QAudioFormat& format)
|
||||
{
|
||||
// Note: Only the most common sample formats are supported
|
||||
if (!format.isValid())
|
||||
return 0.0;
|
||||
return qreal(0);
|
||||
|
||||
if (format.codec() != "audio/pcm")
|
||||
return 0.0;
|
||||
return qreal(0);
|
||||
|
||||
switch (format.sampleType()) {
|
||||
case QAudioFormat::Unknown:
|
||||
break;
|
||||
case QAudioFormat::Float:
|
||||
if (format.sampleSize() != 32) // other sample formats are not supported
|
||||
return 0.0;
|
||||
return 1.00003;
|
||||
return qreal(0);
|
||||
return qreal(1.00003);
|
||||
case QAudioFormat::SignedInt:
|
||||
if (format.sampleSize() == 32)
|
||||
return 2147483648.0;
|
||||
return qreal(INT_MAX);
|
||||
if (format.sampleSize() == 16)
|
||||
return 32768.0;
|
||||
return qreal(SHRT_MAX);
|
||||
if (format.sampleSize() == 8)
|
||||
return 128.0;
|
||||
return qreal(CHAR_MAX);
|
||||
break;
|
||||
case QAudioFormat::UnSignedInt:
|
||||
// Unsigned formats are not supported in this example
|
||||
if (format.sampleSize() == 32)
|
||||
return qreal(UINT_MAX);
|
||||
if (format.sampleSize() == 16)
|
||||
return qreal(USHRT_MAX);
|
||||
if (format.sampleSize() == 8)
|
||||
return qreal(UCHAR_MAX);
|
||||
break;
|
||||
}
|
||||
|
||||
return 0.0;
|
||||
return qreal(0);
|
||||
}
|
||||
|
||||
qreal getBufferLevel(const QAudioBuffer& buffer)
|
||||
// returns the audio level for each channel
|
||||
QVector<qreal> getBufferLevels(const QAudioBuffer& buffer)
|
||||
{
|
||||
QVector<qreal> values;
|
||||
|
||||
if (!buffer.format().isValid() || buffer.format().byteOrder() != QAudioFormat::LittleEndian)
|
||||
return 0.0;
|
||||
return values;
|
||||
|
||||
if (buffer.format().codec() != "audio/pcm")
|
||||
return 0.0;
|
||||
return values;
|
||||
|
||||
int channelCount = buffer.format().channelCount();
|
||||
values.fill(0, channelCount);
|
||||
qreal peak_value = getPeakValue(buffer.format());
|
||||
if (qFuzzyCompare(peak_value, 0.0))
|
||||
return 0.0;
|
||||
if (qFuzzyCompare(peak_value, qreal(0)))
|
||||
return values;
|
||||
|
||||
switch (buffer.format().sampleType()) {
|
||||
case QAudioFormat::Unknown:
|
||||
case QAudioFormat::UnSignedInt:
|
||||
if (buffer.format().sampleSize() == 32)
|
||||
values = getBufferLevels(buffer.constData<quint32>(), buffer.frameCount(), channelCount);
|
||||
if (buffer.format().sampleSize() == 16)
|
||||
values = getBufferLevels(buffer.constData<quint16>(), buffer.frameCount(), channelCount);
|
||||
if (buffer.format().sampleSize() == 8)
|
||||
values = getBufferLevels(buffer.constData<quint8>(), buffer.frameCount(), channelCount);
|
||||
for (int i = 0; i < values.size(); ++i)
|
||||
values[i] = qAbs(values.at(i) - peak_value / 2) / (peak_value / 2);
|
||||
break;
|
||||
case QAudioFormat::Float:
|
||||
if (buffer.format().sampleSize() == 32)
|
||||
return getBufferLevel(buffer.constData<float>(), buffer.sampleCount()) / peak_value;
|
||||
if (buffer.format().sampleSize() == 32) {
|
||||
values = getBufferLevels(buffer.constData<float>(), buffer.frameCount(), channelCount);
|
||||
for (int i = 0; i < values.size(); ++i)
|
||||
values[i] /= peak_value;
|
||||
}
|
||||
break;
|
||||
case QAudioFormat::SignedInt:
|
||||
if (buffer.format().sampleSize() == 32)
|
||||
return getBufferLevel(buffer.constData<long int>(), buffer.sampleCount()) / peak_value;
|
||||
values = getBufferLevels(buffer.constData<qint32>(), buffer.frameCount(), channelCount);
|
||||
if (buffer.format().sampleSize() == 16)
|
||||
return getBufferLevel(buffer.constData<short int>(), buffer.sampleCount()) / peak_value;
|
||||
values = getBufferLevels(buffer.constData<qint16>(), buffer.frameCount(), channelCount);
|
||||
if (buffer.format().sampleSize() == 8)
|
||||
return getBufferLevel(buffer.constData<signed char>(), buffer.sampleCount()) / peak_value;
|
||||
values = getBufferLevels(buffer.constData<qint8>(), buffer.frameCount(), channelCount);
|
||||
for (int i = 0; i < values.size(); ++i)
|
||||
values[i] /= peak_value;
|
||||
break;
|
||||
}
|
||||
|
||||
return 0.0;
|
||||
return values;
|
||||
}
|
||||
|
||||
template <class T>
|
||||
qreal getBufferLevel(const T *buffer, int samples)
|
||||
QVector<qreal> getBufferLevels(const T *buffer, int frames, int channels)
|
||||
{
|
||||
qreal max_value = 0.0;
|
||||
QVector<qreal> max_values;
|
||||
max_values.fill(0, channels);
|
||||
|
||||
for (int i = 0; i < samples; ++i) {
|
||||
qreal value = qAbs(qreal(buffer[i]));
|
||||
if (value > max_value)
|
||||
max_value = value;
|
||||
for (int i = 0; i < frames; ++i) {
|
||||
for (int j = 0; j < channels; ++j) {
|
||||
qreal value = qAbs(qreal(buffer[i * channels + j]));
|
||||
if (value > max_values.at(j))
|
||||
max_values.replace(j, value);
|
||||
}
|
||||
}
|
||||
|
||||
return max_value;
|
||||
return max_values;
|
||||
}
|
||||
|
||||
void AudioRecorder::processBuffer(const QAudioBuffer& buffer)
|
||||
{
|
||||
qreal level = getBufferLevel(buffer);
|
||||
ui->audioLevel->setLevel(level);
|
||||
QVector<qreal> levels = getBufferLevels(buffer);
|
||||
for (int i = 0; i < levels.count(); ++i)
|
||||
audioLevels.at(i)->setLevel(levels.at(i));
|
||||
}
|
||||
|
||||
@@ -52,6 +52,8 @@ class QAudioProbe;
|
||||
class QAudioBuffer;
|
||||
QT_END_NAMESPACE
|
||||
|
||||
class QAudioLevel;
|
||||
|
||||
class AudioRecorder : public QMainWindow
|
||||
{
|
||||
Q_OBJECT
|
||||
@@ -68,15 +70,18 @@ private slots:
|
||||
void togglePause();
|
||||
void toggleRecord();
|
||||
|
||||
void updateState(QMediaRecorder::State);
|
||||
void updateStatus(QMediaRecorder::Status);
|
||||
void updateProgress(qint64 pos);
|
||||
void displayErrorMessage();
|
||||
|
||||
private:
|
||||
void clearAudioLevels();
|
||||
|
||||
Ui::AudioRecorder *ui;
|
||||
|
||||
QAudioRecorder *audioRecorder;
|
||||
QAudioProbe *probe;
|
||||
QList<QAudioLevel*> audioLevels;
|
||||
bool outputLocationSet;
|
||||
|
||||
};
|
||||
|
||||
@@ -17,10 +17,10 @@
|
||||
<layout class="QGridLayout" name="gridLayout_3">
|
||||
<item row="0" column="0" colspan="3">
|
||||
<layout class="QGridLayout" name="gridLayout_2">
|
||||
<item row="0" column="0">
|
||||
<widget class="QLabel" name="label">
|
||||
<item row="3" column="0">
|
||||
<widget class="QLabel" name="label_4">
|
||||
<property name="text">
|
||||
<string>Input Device:</string>
|
||||
<string>Sample rate:</string>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
@@ -34,8 +34,12 @@
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item row="1" column="1">
|
||||
<widget class="QComboBox" name="audioCodecBox"/>
|
||||
<item row="0" column="0">
|
||||
<widget class="QLabel" name="label">
|
||||
<property name="text">
|
||||
<string>Input Device:</string>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item row="2" column="0">
|
||||
<widget class="QLabel" name="label_3">
|
||||
@@ -47,15 +51,21 @@
|
||||
<item row="2" column="1">
|
||||
<widget class="QComboBox" name="containerBox"/>
|
||||
</item>
|
||||
<item row="3" column="0">
|
||||
<widget class="QLabel" name="label_4">
|
||||
<item row="3" column="1">
|
||||
<widget class="QComboBox" name="sampleRateBox"/>
|
||||
</item>
|
||||
<item row="1" column="1">
|
||||
<widget class="QComboBox" name="audioCodecBox"/>
|
||||
</item>
|
||||
<item row="4" column="0">
|
||||
<widget class="QLabel" name="label_5">
|
||||
<property name="text">
|
||||
<string>Sample rate:</string>
|
||||
<string>Channels:</string>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item row="3" column="1">
|
||||
<widget class="QComboBox" name="sampleRateBox"/>
|
||||
<item row="4" column="1">
|
||||
<widget class="QComboBox" name="channelsBox"/>
|
||||
</item>
|
||||
</layout>
|
||||
</item>
|
||||
@@ -162,9 +172,8 @@
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item row="3" column="1" colspan="-1">
|
||||
<widget class="QAudioLevel" name="audioLevel">
|
||||
</widget>
|
||||
<item row="3" column="1" colspan="2">
|
||||
<layout class="QVBoxLayout" name="levelsLayout"/>
|
||||
</item>
|
||||
</layout>
|
||||
</widget>
|
||||
|
||||
@@ -28,57 +28,33 @@
|
||||
<rect>
|
||||
<x>0</x>
|
||||
<y>0</y>
|
||||
<width>398</width>
|
||||
<height>275</height>
|
||||
<width>400</width>
|
||||
<height>277</height>
|
||||
</rect>
|
||||
</property>
|
||||
<layout class="QGridLayout" name="gridLayout_4">
|
||||
<item row="0" column="0">
|
||||
<widget class="QWidget" name="widget" native="true">
|
||||
<layout class="QGridLayout" name="gridLayout_3">
|
||||
<item row="0" column="0">
|
||||
<layout class="QGridLayout" name="gridLayout_2">
|
||||
<item row="0" column="0">
|
||||
<widget class="QLabel" name="label">
|
||||
<property name="text">
|
||||
<string>Input Device:</string>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item row="0" column="1">
|
||||
<widget class="QComboBox" name="audioDeviceBox"/>
|
||||
</item>
|
||||
<item row="1" column="0">
|
||||
<widget class="QLabel" name="label_2">
|
||||
<property name="text">
|
||||
<string>Audio Codec:</string>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item row="1" column="1">
|
||||
<widget class="QComboBox" name="audioCodecBox"/>
|
||||
</item>
|
||||
<item row="2" column="0">
|
||||
<widget class="QLabel" name="label_3">
|
||||
<property name="text">
|
||||
<string>File Container:</string>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item row="2" column="1">
|
||||
<widget class="QComboBox" name="containerBox"/>
|
||||
</item>
|
||||
<item row="3" column="0">
|
||||
<widget class="QLabel" name="label_4">
|
||||
<property name="text">
|
||||
<string>Sample rate:</string>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item row="3" column="1">
|
||||
<widget class="QComboBox" name="sampleRateBox"/>
|
||||
</item>
|
||||
</layout>
|
||||
<item row="3" column="0">
|
||||
<widget class="QLabel" name="label_6">
|
||||
<property name="text">
|
||||
<string>Audio Level:</string>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item row="2" column="0">
|
||||
<spacer name="verticalSpacer">
|
||||
<property name="orientation">
|
||||
<enum>Qt::Vertical</enum>
|
||||
</property>
|
||||
<property name="sizeHint" stdset="0">
|
||||
<size>
|
||||
<width>20</width>
|
||||
<height>29</height>
|
||||
</size>
|
||||
</property>
|
||||
</spacer>
|
||||
</item>
|
||||
<item row="1" column="0">
|
||||
<layout class="QGridLayout" name="gridLayout">
|
||||
@@ -127,18 +103,62 @@
|
||||
</item>
|
||||
</layout>
|
||||
</item>
|
||||
<item row="2" column="0">
|
||||
<spacer name="verticalSpacer">
|
||||
<property name="orientation">
|
||||
<enum>Qt::Vertical</enum>
|
||||
</property>
|
||||
<property name="sizeHint" stdset="0">
|
||||
<size>
|
||||
<width>20</width>
|
||||
<height>29</height>
|
||||
</size>
|
||||
</property>
|
||||
</spacer>
|
||||
<item row="0" column="0">
|
||||
<layout class="QGridLayout" name="gridLayout_2">
|
||||
<item row="3" column="0">
|
||||
<widget class="QLabel" name="label_4">
|
||||
<property name="text">
|
||||
<string>Sample rate:</string>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item row="1" column="0">
|
||||
<widget class="QLabel" name="label_2">
|
||||
<property name="text">
|
||||
<string>Audio Codec:</string>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item row="2" column="1">
|
||||
<widget class="QComboBox" name="containerBox"/>
|
||||
</item>
|
||||
<item row="0" column="1">
|
||||
<widget class="QComboBox" name="audioDeviceBox"/>
|
||||
</item>
|
||||
<item row="3" column="1">
|
||||
<widget class="QComboBox" name="sampleRateBox"/>
|
||||
</item>
|
||||
<item row="1" column="1">
|
||||
<widget class="QComboBox" name="audioCodecBox"/>
|
||||
</item>
|
||||
<item row="2" column="0">
|
||||
<widget class="QLabel" name="label_3">
|
||||
<property name="text">
|
||||
<string>File Container:</string>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item row="0" column="0">
|
||||
<widget class="QLabel" name="label">
|
||||
<property name="text">
|
||||
<string>Input Device:</string>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item row="4" column="0">
|
||||
<widget class="QLabel" name="label_5">
|
||||
<property name="text">
|
||||
<string>Channels:</string>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item row="4" column="1">
|
||||
<widget class="QComboBox" name="channelsBox"/>
|
||||
</item>
|
||||
</layout>
|
||||
</item>
|
||||
<item row="4" column="0">
|
||||
<layout class="QVBoxLayout" name="levelsLayout"/>
|
||||
</item>
|
||||
</layout>
|
||||
</widget>
|
||||
|
||||
@@ -45,6 +45,8 @@ QAudioLevel::QAudioLevel(QWidget *parent)
|
||||
: QWidget(parent)
|
||||
, m_level(0.0)
|
||||
{
|
||||
setMinimumHeight(15);
|
||||
setMaximumHeight(50);
|
||||
}
|
||||
|
||||
void QAudioLevel::setLevel(qreal level)
|
||||
|
||||
@@ -83,10 +83,6 @@ Rectangle {
|
||||
highlight: Rectangle { color: "gray"; radius: 5 }
|
||||
currentIndex: indexForValue(propertyPopup.currentValue)
|
||||
|
||||
onCurrentIndexChanged: {
|
||||
propertyPopup.currentValue = model.get(view.currentIndex).value
|
||||
}
|
||||
|
||||
delegate: Item {
|
||||
width: propertyPopup.itemWidth
|
||||
height: 70
|
||||
|
||||
@@ -4,7 +4,9 @@ load(configure)
|
||||
qtCompileTest(openal)
|
||||
qtCompileTest(opensles)
|
||||
win32 {
|
||||
qtCompileTest(directshow)
|
||||
qtCompileTest(directshow) {
|
||||
qtCompileTest(wshellitem)
|
||||
}
|
||||
qtCompileTest(wmsdk)
|
||||
qtCompileTest(wmp)
|
||||
contains(QT_CONFIG, wmf-backend): qtCompileTest(wmf)
|
||||
|
||||
@@ -92,9 +92,9 @@ void QDeclarativeAttenuationModel::setName(const QString& name)
|
||||
/*!
|
||||
\qmltype AttenuationModelLinear
|
||||
\instantiates QDeclarativeAttenuationModelLinear
|
||||
\since 1.0
|
||||
\since 5.0
|
||||
\brief Defines a linear attenuation curve for a \l Sound.
|
||||
\inqmlmodule QtAudioEngine 1.0
|
||||
\inqmlmodule QtAudioEngine
|
||||
\ingroup multimedia_audioengine
|
||||
\inherits Item
|
||||
\preliminary
|
||||
@@ -225,9 +225,9 @@ qreal QDeclarativeAttenuationModelLinear::calculateGain(const QVector3D &listene
|
||||
\qmltype AttenuationModelInverse
|
||||
\instantiates QDeclarativeAttenuationModelInverse
|
||||
|
||||
\since 1.0
|
||||
\since 5.0
|
||||
\brief Defines a non-linear attenuation curve for a \l Sound.
|
||||
\inqmlmodule QtAudioEngine 1.0
|
||||
\inqmlmodule QtAudioEngine
|
||||
\ingroup multimedia_audioengine
|
||||
\inherits Item
|
||||
\preliminary
|
||||
|
||||
@@ -49,9 +49,9 @@ QT_USE_NAMESPACE
|
||||
/*!
|
||||
\qmltype AudioCategory
|
||||
\instantiates QDeclarativeAudioCategory
|
||||
\since 1.0
|
||||
\since 5.0
|
||||
\brief Control all active sound instances by group.
|
||||
\inqmlmodule QtAudioEngine 1.0
|
||||
\inqmlmodule QtAudioEngine
|
||||
\ingroup multimedia_audioengine
|
||||
\inherits Item
|
||||
\preliminary
|
||||
|
||||
@@ -58,9 +58,9 @@ QT_BEGIN_NAMESPACE
|
||||
/*!
|
||||
\qmltype AudioEngine
|
||||
\instantiates QDeclarativeAudioEngine
|
||||
\since 1.0
|
||||
\since 5.0
|
||||
\brief Organize all your 3d audio content in one place.
|
||||
\inqmlmodule QtAudioEngine 1.0
|
||||
\inqmlmodule QtAudioEngine
|
||||
\ingroup multimedia_audioengine
|
||||
\inherits Item
|
||||
\preliminary
|
||||
|
||||
@@ -50,9 +50,9 @@ QT_USE_NAMESPACE
|
||||
/*!
|
||||
\qmltype AudioListener
|
||||
\instantiates QDeclarativeAudioListener
|
||||
\since 1.0
|
||||
\since 5.0
|
||||
\brief Control global listener parameters.
|
||||
\inqmlmodule QtAudioEngine 1.0
|
||||
\inqmlmodule QtAudioEngine
|
||||
\ingroup multimedia_audioengine
|
||||
\inherits Item
|
||||
\preliminary
|
||||
|
||||
@@ -52,9 +52,9 @@ QT_USE_NAMESPACE
|
||||
/*!
|
||||
\qmltype AudioSample
|
||||
\instantiates QDeclarativeAudioSample
|
||||
\since 1.0
|
||||
\since 5.0
|
||||
\brief Load audio samples, mostly .wav.
|
||||
\inqmlmodule QtAudioEngine 1.0
|
||||
\inqmlmodule QtAudioEngine
|
||||
\ingroup multimedia_audioengine
|
||||
\inherits Item
|
||||
\preliminary
|
||||
|
||||
@@ -51,12 +51,12 @@ QT_USE_NAMESPACE
|
||||
/*!
|
||||
\qmltype PlayVariation
|
||||
\instantiates QDeclarativePlayVariation
|
||||
\since 1.0
|
||||
\since 5.0
|
||||
\brief Define a playback variation for \l {Sound} {sounds}.
|
||||
So each time the playback of the same sound can be a slightly different even with the same
|
||||
AudioSample.
|
||||
|
||||
\inqmlmodule QtAudioEngine 1.0
|
||||
\inqmlmodule QtAudioEngine
|
||||
\ingroup multimedia_audioengine
|
||||
\inherits Item
|
||||
\preliminary
|
||||
|
||||
@@ -140,10 +140,10 @@ void QDeclarativeSoundCone::componentComplete()
|
||||
/*!
|
||||
\qmltype Sound
|
||||
\instantiates QDeclarativeSound
|
||||
\since 1.0
|
||||
\since 5.0
|
||||
\brief Define a variety of samples and parameters to be used for
|
||||
SoundInstance.
|
||||
\inqmlmodule QtAudioEngine 1.0
|
||||
\inqmlmodule QtAudioEngine
|
||||
\ingroup multimedia_audioengine
|
||||
\inherits Item
|
||||
\preliminary
|
||||
|
||||
@@ -53,9 +53,9 @@ QT_USE_NAMESPACE
|
||||
/*!
|
||||
\qmltype SoundInstance
|
||||
\instantiates QDeclarativeSoundInstance
|
||||
\since 1.0
|
||||
\since 5.0
|
||||
\brief Play 3d audio content.
|
||||
\inqmlmodule QtAudioEngine 1.0
|
||||
\inqmlmodule QtAudioEngine
|
||||
\ingroup multimedia_audioengine
|
||||
\inherits Item
|
||||
\preliminary
|
||||
|
||||
@@ -47,7 +47,7 @@ import QtMultimedia 5.0
|
||||
\inherits Item
|
||||
\ingroup multimedia_qml
|
||||
\ingroup multimedia_video_qml
|
||||
\inqmlmodule QtMultimedia 5.0
|
||||
\inqmlmodule QtMultimedia
|
||||
\brief A convenience type for showing a specified video.
|
||||
|
||||
\c Video is a convenience type combining the functionality
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -79,7 +79,7 @@ void QDeclarativeCamera::_q_availabilityChanged(QMultimedia::AvailabilityStatus
|
||||
\brief Access viewfinder frames, and take photos and movies.
|
||||
\ingroup multimedia_qml
|
||||
\ingroup camera_qml
|
||||
\inqmlmodule QtMultimedia 5.0
|
||||
\inqmlmodule QtMultimedia
|
||||
|
||||
\inherits Item
|
||||
|
||||
@@ -223,11 +223,11 @@ QDeclarativeCamera::Error QDeclarativeCamera::errorCode() const
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlproperty string QtMultimedia5::Camera::errorString
|
||||
\qmlproperty string QtMultimedia::Camera::errorString
|
||||
|
||||
This property holds the last error string, if any.
|
||||
|
||||
\sa QtMultimedia5::Camera::onError
|
||||
\sa QtMultimedia::Camera::onError
|
||||
*/
|
||||
QString QDeclarativeCamera::errorString() const
|
||||
{
|
||||
@@ -235,7 +235,7 @@ QString QDeclarativeCamera::errorString() const
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlproperty enumeration QtMultimedia5::Camera::availability
|
||||
\qmlproperty enumeration QtMultimedia::Camera::availability
|
||||
|
||||
This property holds the availability state of the camera.
|
||||
|
||||
@@ -262,7 +262,7 @@ QDeclarativeCamera::Availability QDeclarativeCamera::availability() const
|
||||
|
||||
|
||||
/*!
|
||||
\qmlproperty enumeration QtMultimedia5::Camera::captureMode
|
||||
\qmlproperty enumeration QtMultimedia::Camera::captureMode
|
||||
|
||||
This property holds the camera capture mode, which can be one of the
|
||||
following:
|
||||
@@ -293,7 +293,7 @@ void QDeclarativeCamera::setCaptureMode(QDeclarativeCamera::CaptureMode mode)
|
||||
|
||||
|
||||
/*!
|
||||
\qmlproperty enumeration QtMultimedia5::Camera::cameraState
|
||||
\qmlproperty enumeration QtMultimedia::Camera::cameraState
|
||||
|
||||
This property holds the camera object's current state, which can be one of the following:
|
||||
|
||||
@@ -330,7 +330,7 @@ QDeclarativeCamera::State QDeclarativeCamera::cameraState() const
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlproperty enumeration QtMultimedia5::Camera::cameraStatus
|
||||
\qmlproperty enumeration QtMultimedia::Camera::cameraStatus
|
||||
|
||||
This property holds the camera object's current status, which can be one of the following:
|
||||
|
||||
@@ -407,7 +407,7 @@ void QDeclarativeCamera::setCameraState(QDeclarativeCamera::State state)
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlmethod QtMultimedia5::Camera::start()
|
||||
\qmlmethod QtMultimedia::Camera::start()
|
||||
|
||||
Starts the camera. Viewfinder frames will
|
||||
be available and image or movie capture will
|
||||
@@ -419,7 +419,7 @@ void QDeclarativeCamera::start()
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlmethod QtMultimedia5::Camera::stop()
|
||||
\qmlmethod QtMultimedia::Camera::stop()
|
||||
|
||||
Stops the camera, but leaves the camera
|
||||
stack loaded.
|
||||
@@ -431,7 +431,7 @@ void QDeclarativeCamera::stop()
|
||||
|
||||
|
||||
/*!
|
||||
\qmlproperty enumeration QtMultimedia5::Camera::lockStatus
|
||||
\qmlproperty enumeration QtMultimedia::Camera::lockStatus
|
||||
|
||||
This property holds the status of all the requested camera locks.
|
||||
|
||||
@@ -492,7 +492,7 @@ QDeclarativeCamera::LockStatus QDeclarativeCamera::lockStatus() const
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlmethod QtMultimedia5::Camera::searchAndLock()
|
||||
\qmlmethod QtMultimedia::Camera::searchAndLock()
|
||||
|
||||
Start focusing, exposure and white balance calculation.
|
||||
|
||||
@@ -507,7 +507,7 @@ void QDeclarativeCamera::searchAndLock()
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlmethod QtMultimedia5::Camera::unlock()
|
||||
\qmlmethod QtMultimedia::Camera::unlock()
|
||||
|
||||
Unlock focus, exposure and white balance locks.
|
||||
*/
|
||||
@@ -521,7 +521,7 @@ void QDeclarativeCamera::unlock()
|
||||
This property holds the maximum optical zoom factor supported, or 1.0 if optical zoom is not supported.
|
||||
*/
|
||||
/*!
|
||||
\qmlproperty real QtMultimedia5::Camera::maximumOpticalZoom
|
||||
\qmlproperty real QtMultimedia::Camera::maximumOpticalZoom
|
||||
|
||||
This property holds the maximum optical zoom factor supported, or 1.0 if optical zoom is not supported.
|
||||
*/
|
||||
@@ -535,7 +535,7 @@ qreal QDeclarativeCamera::maximumOpticalZoom() const
|
||||
This property holds the maximum digital zoom factor supported, or 1.0 if digital zoom is not supported.
|
||||
*/
|
||||
/*!
|
||||
\qmlproperty real QtMultimedia5::Camera::maximumDigitalZoom
|
||||
\qmlproperty real QtMultimedia::Camera::maximumDigitalZoom
|
||||
|
||||
This property holds the maximum digital zoom factor supported, or 1.0 if digital zoom is not supported.
|
||||
*/
|
||||
@@ -550,7 +550,7 @@ qreal QDeclarativeCamera::maximumDigitalZoom() const
|
||||
*/
|
||||
|
||||
/*!
|
||||
\qmlproperty real QtMultimedia5::Camera::opticalZoom
|
||||
\qmlproperty real QtMultimedia::Camera::opticalZoom
|
||||
|
||||
This property holds the current optical zoom factor.
|
||||
*/
|
||||
@@ -569,7 +569,7 @@ void QDeclarativeCamera::setOpticalZoom(qreal value)
|
||||
This property holds the current digital zoom factor.
|
||||
*/
|
||||
/*!
|
||||
\qmlproperty real QtMultimedia5::Camera::digitalZoom
|
||||
\qmlproperty real QtMultimedia::Camera::digitalZoom
|
||||
|
||||
This property holds the current digital zoom factor.
|
||||
*/
|
||||
@@ -584,21 +584,21 @@ void QDeclarativeCamera::setDigitalZoom(qreal value)
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlproperty variant QtMultimedia5::Camera::mediaObject
|
||||
\qmlproperty variant QtMultimedia::Camera::mediaObject
|
||||
|
||||
This property holds the media object for the camera.
|
||||
*/
|
||||
|
||||
/*!
|
||||
\qmlproperty enumeration QtMultimedia5::Camera::errorCode
|
||||
\qmlproperty enumeration QtMultimedia::Camera::errorCode
|
||||
|
||||
This property holds the last error code.
|
||||
|
||||
\sa QtMultimedia5::Camera::onError
|
||||
\sa QtMultimedia::Camera::onError
|
||||
*/
|
||||
|
||||
/*!
|
||||
\qmlsignal QtMultimedia5::Camera::onError(errorCode, errorString)
|
||||
\qmlsignal QtMultimedia::Camera::onError(errorCode, errorString)
|
||||
|
||||
This handler is called when an error occurs. The enumeration value
|
||||
\a errorCode is one of the values defined below, and a descriptive string
|
||||
|
||||
@@ -54,7 +54,7 @@ QT_BEGIN_NAMESPACE
|
||||
\instantiates QDeclarativeCameraCapture
|
||||
\brief An interface for capturing camera images
|
||||
\ingroup multimedia_qml
|
||||
\inqmlmodule QtMultimedia 5.0
|
||||
\inqmlmodule QtMultimedia
|
||||
\ingroup camera_qml
|
||||
|
||||
This type allows you to capture still images and be notified when they
|
||||
@@ -137,7 +137,7 @@ QDeclarativeCameraCapture::~QDeclarativeCameraCapture()
|
||||
*/
|
||||
|
||||
/*!
|
||||
\qmlproperty bool QtMultimedia5::CameraCapture::ready
|
||||
\qmlproperty bool QtMultimedia::CameraCapture::ready
|
||||
|
||||
This property holds a bool value indicating whether the camera
|
||||
is ready to capture photos or not.
|
||||
@@ -154,7 +154,7 @@ bool QDeclarativeCameraCapture::isReadyForCapture() const
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlmethod QtMultimedia5::CameraCapture::capture()
|
||||
\qmlmethod QtMultimedia::CameraCapture::capture()
|
||||
|
||||
Start image capture. The \l onImageCaptured() and \l onImageSaved() signals will
|
||||
be emitted when the capture is complete.
|
||||
@@ -177,7 +177,7 @@ int QDeclarativeCameraCapture::capture()
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlmethod QtMultimedia5::CameraCapture::captureToLocation(location)
|
||||
\qmlmethod QtMultimedia::CameraCapture::captureToLocation(location)
|
||||
|
||||
Start image capture to specified \a location. The \l onImageCaptured() and \l onImageSaved() signals will
|
||||
be emitted when the capture is complete.
|
||||
@@ -196,7 +196,7 @@ int QDeclarativeCameraCapture::captureToLocation(const QString &location)
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlmethod QtMultimedia5::CameraCapture::cancelCapture()
|
||||
\qmlmethod QtMultimedia::CameraCapture::cancelCapture()
|
||||
|
||||
Cancel pending image capture requests.
|
||||
*/
|
||||
@@ -211,7 +211,7 @@ void QDeclarativeCameraCapture::cancelCapture()
|
||||
This property holds the location of the last captured image.
|
||||
*/
|
||||
/*!
|
||||
\qmlproperty string QtMultimedia5::CameraCapture::capturedImagePath
|
||||
\qmlproperty string QtMultimedia::CameraCapture::capturedImagePath
|
||||
|
||||
This property holds the location of the last captured image.
|
||||
*/
|
||||
@@ -254,7 +254,7 @@ void QDeclarativeCameraCapture::_q_captureFailed(int id, QCameraImageCapture::Er
|
||||
*/
|
||||
|
||||
/*!
|
||||
\qmlproperty size QtMultimedia5::CameraCapture::resolution
|
||||
\qmlproperty size QtMultimedia::CameraCapture::resolution
|
||||
|
||||
This property holds the resolution/size of the image to be captured.
|
||||
If empty, the system chooses the appropriate resolution.
|
||||
@@ -285,7 +285,7 @@ QCameraImageCapture::Error QDeclarativeCameraCapture::error() const
|
||||
*/
|
||||
|
||||
/*!
|
||||
\qmlproperty string QtMultimedia5::CameraCapture::errorString
|
||||
\qmlproperty string QtMultimedia::CameraCapture::errorString
|
||||
|
||||
This property holds the error message related to the last capture.
|
||||
*/
|
||||
@@ -295,7 +295,7 @@ QString QDeclarativeCameraCapture::errorString() const
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlmethod QtMultimedia5::CameraCapture::setMetadata(key, value)
|
||||
\qmlmethod QtMultimedia::CameraCapture::setMetadata(key, value)
|
||||
|
||||
|
||||
Sets a particular metadata \a key to \a value for the subsequent image captures.
|
||||
@@ -309,14 +309,14 @@ void QDeclarativeCameraCapture::setMetadata(const QString &key, const QVariant &
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlsignal QtMultimedia5::CameraCapture::onCaptureFailed(requestId, message)
|
||||
\qmlsignal QtMultimedia::CameraCapture::onCaptureFailed(requestId, message)
|
||||
|
||||
This handler is called when an error occurs during capture with \a requestId.
|
||||
A descriptive message is available in \a message.
|
||||
*/
|
||||
|
||||
/*!
|
||||
\qmlsignal QtMultimedia5::CameraCapture::onImageCaptured(requestId, preview)
|
||||
\qmlsignal QtMultimedia::CameraCapture::onImageCaptured(requestId, preview)
|
||||
|
||||
This handler is called when an image with \a requestId has been captured
|
||||
but not yet saved to the filesystem. The \a preview
|
||||
@@ -326,7 +326,7 @@ void QDeclarativeCameraCapture::setMetadata(const QString &key, const QVariant &
|
||||
*/
|
||||
|
||||
/*!
|
||||
\qmlsignal QtMultimedia5::CameraCapture::onImageSaved(requestId, path)
|
||||
\qmlsignal QtMultimedia::CameraCapture::onImageSaved(requestId, path)
|
||||
|
||||
This handler is called after the image with \a requestId has been written to the filesystem.
|
||||
The \a path is a local file path, not a URL.
|
||||
@@ -336,7 +336,7 @@ void QDeclarativeCameraCapture::setMetadata(const QString &key, const QVariant &
|
||||
|
||||
|
||||
/*!
|
||||
\qmlsignal QtMultimedia5::CameraCapture::onImageMetadataAvailable(requestId, key, value)
|
||||
\qmlsignal QtMultimedia::CameraCapture::onImageMetadataAvailable(requestId, key, value)
|
||||
|
||||
This handler is called when the image with \a requestId has new metadata
|
||||
available with the key \a key and value \a value.
|
||||
|
||||
@@ -50,7 +50,7 @@ QT_BEGIN_NAMESPACE
|
||||
\brief An interface for exposure related camera settings.
|
||||
\ingroup multimedia_qml
|
||||
\ingroup camera_qml
|
||||
\inqmlmodule QtMultimedia 5.0
|
||||
\inqmlmodule QtMultimedia
|
||||
|
||||
This type is part of the \b{QtMultimedia 5.0} module.
|
||||
|
||||
@@ -124,7 +124,7 @@ QDeclarativeCameraExposure::~QDeclarativeCameraExposure()
|
||||
This property holds the adjustment value for the automatically calculated exposure. The value is in EV units.
|
||||
*/
|
||||
/*!
|
||||
\qmlproperty real QtMultimedia5::CameraExposure::exposureCompensation
|
||||
\qmlproperty real QtMultimedia::CameraExposure::exposureCompensation
|
||||
|
||||
This property holds the adjustment value for the automatically calculated exposure. The value is
|
||||
in EV units.
|
||||
@@ -144,7 +144,7 @@ void QDeclarativeCameraExposure::setExposureCompensation(qreal ev)
|
||||
This property holds the sensor's ISO sensitivity value.
|
||||
*/
|
||||
/*!
|
||||
\qmlproperty int QtMultimedia5::CameraExposure::iso
|
||||
\qmlproperty int QtMultimedia::CameraExposure::iso
|
||||
|
||||
This property holds the sensor's ISO sensitivity value.
|
||||
*/
|
||||
@@ -161,7 +161,7 @@ int QDeclarativeCameraExposure::isoSensitivity() const
|
||||
|
||||
*/
|
||||
/*!
|
||||
\qmlproperty real QtMultimedia5::CameraExposure::shutterSpeed
|
||||
\qmlproperty real QtMultimedia::CameraExposure::shutterSpeed
|
||||
|
||||
This property holds the camera's current shutter speed value in seconds.
|
||||
To affect the shutter speed you can use the \l manualShutterSpeed
|
||||
@@ -180,7 +180,7 @@ qreal QDeclarativeCameraExposure::shutterSpeed() const
|
||||
\sa manualAperture, setAutoAperture()
|
||||
*/
|
||||
/*!
|
||||
\qmlproperty real QtMultimedia5::CameraExposure::aperture
|
||||
\qmlproperty real QtMultimedia::CameraExposure::aperture
|
||||
|
||||
This property holds the current lens aperture as an F number (the ratio of
|
||||
the focal length to effective aperture diameter).
|
||||
@@ -202,7 +202,7 @@ qreal QDeclarativeCameraExposure::aperture() const
|
||||
\sa iso, setAutoIsoSensitivity()
|
||||
*/
|
||||
/*!
|
||||
\qmlproperty real QtMultimedia5::CameraExposure::manualIso
|
||||
\qmlproperty real QtMultimedia::CameraExposure::manualIso
|
||||
|
||||
This property holds the ISO settings for capturing photos.
|
||||
|
||||
@@ -237,7 +237,7 @@ void QDeclarativeCameraExposure::setManualIsoSensitivity(int iso)
|
||||
\l shutterSpeed, setAutoShutterSpeed()
|
||||
*/
|
||||
/*!
|
||||
\qmlproperty real QtMultimedia5::CameraExposure::manualShutterSpeed
|
||||
\qmlproperty real QtMultimedia::CameraExposure::manualShutterSpeed
|
||||
|
||||
This property holds the shutter speed value (in seconds).
|
||||
If the value is less than zero, the camera automatically
|
||||
@@ -272,7 +272,7 @@ void QDeclarativeCameraExposure::setManualShutterSpeed(qreal speed)
|
||||
\l aperture, setAutoAperture()
|
||||
*/
|
||||
/*!
|
||||
\qmlproperty real QtMultimedia5::CameraExposure::manualAperture
|
||||
\qmlproperty real QtMultimedia::CameraExposure::manualAperture
|
||||
|
||||
This property holds the aperture (F number) value
|
||||
for capturing photos.
|
||||
@@ -299,7 +299,7 @@ void QDeclarativeCameraExposure::setManualAperture(qreal aperture)
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlmethod QtMultimedia5::CameraExposure::setAutoAperture()
|
||||
\qmlmethod QtMultimedia::CameraExposure::setAutoAperture()
|
||||
Turn on auto aperture selection. The manual aperture value is reset to -1.0
|
||||
*/
|
||||
void QDeclarativeCameraExposure::setAutoAperture()
|
||||
@@ -308,7 +308,7 @@ void QDeclarativeCameraExposure::setAutoAperture()
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlmethod QtMultimedia5::CameraExposure::setAutoShutterSpeed()
|
||||
\qmlmethod QtMultimedia::CameraExposure::setAutoShutterSpeed()
|
||||
Turn on auto shutter speed selection. The manual shutter speed value is reset to -1.0
|
||||
*/
|
||||
void QDeclarativeCameraExposure::setAutoShutterSpeed()
|
||||
@@ -317,7 +317,7 @@ void QDeclarativeCameraExposure::setAutoShutterSpeed()
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlmethod QtMultimedia5::CameraExposure::setAutoIsoSensitivity()
|
||||
\qmlmethod QtMultimedia::CameraExposure::setAutoIsoSensitivity()
|
||||
Turn on auto ISO sensitivity selection. The manual ISO value is reset to -1.
|
||||
*/
|
||||
void QDeclarativeCameraExposure::setAutoIsoSensitivity()
|
||||
@@ -330,7 +330,7 @@ void QDeclarativeCameraExposure::setAutoIsoSensitivity()
|
||||
This property holds the camera exposure mode. The mode can one of the values in \l QCameraExposure::ExposureMode.
|
||||
*/
|
||||
/*!
|
||||
\qmlproperty enumeration QtMultimedia5::CameraExposure::exposureMode
|
||||
\qmlproperty enumeration QtMultimedia::CameraExposure::exposureMode
|
||||
|
||||
This property holds the camera exposure mode.
|
||||
|
||||
@@ -373,7 +373,7 @@ void QDeclarativeCameraExposure::setExposureMode(QDeclarativeCamera::ExposureMod
|
||||
typically defaults to the center \c (0.5, 0.5).
|
||||
*/
|
||||
/*!
|
||||
\qmlproperty QPointF QtMultimedia5::CameraExposure::spotMeteringPoint
|
||||
\qmlproperty QPointF QtMultimedia::CameraExposure::spotMeteringPoint
|
||||
|
||||
The property holds the frame coordinates of the point to use for exposure metering.
|
||||
This point is only used in spot metering mode, and it typically defaults
|
||||
@@ -400,7 +400,7 @@ void QDeclarativeCameraExposure::setSpotMeteringPoint(const QPointF &point)
|
||||
The mode can be one of the constants in \l QCameraExposure::MeteringMode.
|
||||
*/
|
||||
/*!
|
||||
\qmlproperty enumeration QtMultimedia5::CameraExposure::meteringMode
|
||||
\qmlproperty enumeration QtMultimedia::CameraExposure::meteringMode
|
||||
|
||||
This property holds the camera metering mode (how exposure is balanced).
|
||||
|
||||
|
||||
@@ -47,7 +47,7 @@ QT_BEGIN_NAMESPACE
|
||||
/*!
|
||||
\qmltype CameraFlash
|
||||
\instantiates QDeclarativeCameraFlash
|
||||
\inqmlmodule QtMultimedia 5.0
|
||||
\inqmlmodule QtMultimedia
|
||||
\brief An interface for flash related camera settings.
|
||||
\ingroup multimedia_qml
|
||||
\ingroup camera_qml
|
||||
@@ -95,7 +95,7 @@ QDeclarativeCameraFlash::~QDeclarativeCameraFlash()
|
||||
This property indicates whether the flash is charged.
|
||||
*/
|
||||
/*!
|
||||
\qmlproperty bool QtMultimedia5::CameraFlash::ready
|
||||
\qmlproperty bool QtMultimedia::CameraFlash::ready
|
||||
|
||||
This property indicates whether the flash is charged.
|
||||
*/
|
||||
@@ -109,7 +109,7 @@ bool QDeclarativeCameraFlash::isFlashReady() const
|
||||
This property holds the camera flash mode. The mode can be one of the constants in \l QCameraExposure::FlashMode.
|
||||
*/
|
||||
/*!
|
||||
\qmlproperty enumeration QtMultimedia5::CameraFlash::mode
|
||||
\qmlproperty enumeration QtMultimedia::CameraFlash::mode
|
||||
|
||||
This property holds the camera flash mode.
|
||||
|
||||
@@ -145,13 +145,13 @@ void QDeclarativeCameraFlash::setFlashMode(int mode)
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlsignal QtMultimedia5::CameraFlash::flashModeChanged(int)
|
||||
\qmlsignal QtMultimedia::CameraFlash::flashModeChanged(int)
|
||||
This signal is emitted when the \c flashMode property is changed.
|
||||
The corresponding handler is \c onFlashModeChanged.
|
||||
*/
|
||||
|
||||
/*!
|
||||
\qmlsignal QtMultimedia5::CameraFlash::flashReady(bool)
|
||||
\qmlsignal QtMultimedia::CameraFlash::flashReady(bool)
|
||||
This signal is emitted when QCameraExposure indicates that
|
||||
the flash is ready to use.
|
||||
The corresponsing handler is \c onFlashReadyChanged.
|
||||
|
||||
@@ -47,7 +47,7 @@ QT_BEGIN_NAMESPACE
|
||||
/*!
|
||||
\qmltype CameraFocus
|
||||
\instantiates QDeclarativeCameraFocus
|
||||
\inqmlmodule QtMultimedia 5.0
|
||||
\inqmlmodule QtMultimedia
|
||||
\brief An interface for focus related camera settings.
|
||||
\ingroup multimedia_qml
|
||||
\ingroup camera_qml
|
||||
@@ -167,7 +167,7 @@ QDeclarativeCamera::FocusMode QDeclarativeCameraFocus::focusMode() const
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlmethod bool QtMultimedia5::CameraFocus::isFocusModeSupported(mode) const
|
||||
\qmlmethod bool QtMultimedia::CameraFocus::isFocusModeSupported(mode) const
|
||||
|
||||
Returns true if the supplied \a mode is a supported focus mode, and
|
||||
false otherwise.
|
||||
@@ -235,7 +235,7 @@ void QDeclarativeCameraFocus::setFocusPointMode(QDeclarativeCamera::FocusPointMo
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlmethod bool QtMultimedia5::CameraFocus::isFocusPointModeSupported(mode) const
|
||||
\qmlmethod bool QtMultimedia::CameraFocus::isFocusPointModeSupported(mode) const
|
||||
|
||||
Returns true if the supplied \a mode is a supported focus point mode, and
|
||||
false otherwise.
|
||||
@@ -255,7 +255,7 @@ bool QDeclarativeCameraFocus::isFocusPointModeSupported(QDeclarativeCamera::Focu
|
||||
*/
|
||||
|
||||
/*!
|
||||
\qmlproperty point QtMultimedia5::CameraFocus::customFocusPoint
|
||||
\qmlproperty point QtMultimedia::CameraFocus::customFocusPoint
|
||||
|
||||
This property holds the position of custom focus point, in relative frame coordinates:
|
||||
QPointF(0,0) points to the left top frame point, QPointF(0.5,0.5)
|
||||
@@ -289,7 +289,7 @@ void QDeclarativeCameraFocus::setCustomFocusPoint(const QPointF &point)
|
||||
\endtable
|
||||
*/
|
||||
/*!
|
||||
\qmlproperty list<focusZone> QtMultimedia5::CameraFocus::focusZones
|
||||
\qmlproperty list<focusZone> QtMultimedia::CameraFocus::focusZones
|
||||
|
||||
This property holds the list of current camera focus zones,
|
||||
each including \c area specified in the same coordinates as \l customFocusPoint,
|
||||
|
||||
@@ -47,7 +47,7 @@ QT_BEGIN_NAMESPACE
|
||||
/*!
|
||||
\qmltype CameraImageProcessing
|
||||
\instantiates QDeclarativeCameraImageProcessing
|
||||
\inqmlmodule QtMultimedia 5.0
|
||||
\inqmlmodule QtMultimedia
|
||||
\brief An interface for camera capture related settings.
|
||||
\ingroup multimedia_qml
|
||||
\ingroup camera_qml
|
||||
@@ -95,7 +95,7 @@ QDeclarativeCameraImageProcessing::~QDeclarativeCameraImageProcessing()
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlproperty enumeration QtMultimedia5::CameraImageProcessing::whiteBalanceMode
|
||||
\qmlproperty enumeration QtMultimedia::CameraImageProcessing::whiteBalanceMode
|
||||
|
||||
\table
|
||||
\header \li Value \li Description
|
||||
@@ -127,7 +127,7 @@ void QDeclarativeCameraImageProcessing::setWhiteBalanceMode(QDeclarativeCameraIm
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlproperty qreal QtMultimedia5::CameraImageProcessing::manualWhiteBalance
|
||||
\qmlproperty qreal QtMultimedia::CameraImageProcessing::manualWhiteBalance
|
||||
|
||||
The color temperature used when in manual white balance mode (WhiteBalanceManual).
|
||||
The units are Kelvin.
|
||||
@@ -148,7 +148,7 @@ void QDeclarativeCameraImageProcessing::setManualWhiteBalance(qreal colorTemp) c
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlproperty qreal QtMultimedia5::CameraImageProcessing::contrast
|
||||
\qmlproperty qreal QtMultimedia::CameraImageProcessing::contrast
|
||||
|
||||
Image contrast adjustment.
|
||||
Valid contrast adjustment values range between -1.0 and 1.0, with a default of 0.
|
||||
@@ -167,7 +167,7 @@ void QDeclarativeCameraImageProcessing::setContrast(qreal value)
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlproperty qreal QtMultimedia5::CameraImageProcessing::saturation
|
||||
\qmlproperty qreal QtMultimedia::CameraImageProcessing::saturation
|
||||
|
||||
Image saturation adjustment.
|
||||
Valid saturation adjustment values range between -1.0 and 1.0, the default is 0.
|
||||
@@ -186,7 +186,7 @@ void QDeclarativeCameraImageProcessing::setSaturation(qreal value)
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlproperty qreal QtMultimedia5::CameraImageProcessing::sharpeningLevel
|
||||
\qmlproperty qreal QtMultimedia::CameraImageProcessing::sharpeningLevel
|
||||
|
||||
Adjustment of sharpening level applied to image.
|
||||
|
||||
@@ -207,7 +207,7 @@ void QDeclarativeCameraImageProcessing::setSharpeningLevel(qreal value)
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlproperty qreal QtMultimedia5::CameraImageProcessing::denoisingLevel
|
||||
\qmlproperty qreal QtMultimedia::CameraImageProcessing::denoisingLevel
|
||||
|
||||
Adjustment of denoising applied to image.
|
||||
|
||||
@@ -228,12 +228,12 @@ void QDeclarativeCameraImageProcessing::setDenoisingLevel(qreal value)
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlsignal QtMultimedia5::Camera::whiteBalanceModeChanged(Camera::WhiteBalanceMode)
|
||||
\qmlsignal QtMultimedia::Camera::whiteBalanceModeChanged(Camera::WhiteBalanceMode)
|
||||
This signal is emitted when the \c whiteBalanceMode property is changed.
|
||||
*/
|
||||
|
||||
/*!
|
||||
\qmlsignal QtMultimedia5::Camera::manualWhiteBalanceChanged(qreal)
|
||||
\qmlsignal QtMultimedia::Camera::manualWhiteBalanceChanged(qreal)
|
||||
This signal is emitted when the \c manualWhiteBalance property is changed.
|
||||
*/
|
||||
|
||||
|
||||
@@ -49,7 +49,7 @@ QT_BEGIN_NAMESPACE
|
||||
/*!
|
||||
\qmltype CameraRecorder
|
||||
\instantiates QDeclarativeCameraRecorder
|
||||
\inqmlmodule QtMultimedia 5.0
|
||||
\inqmlmodule QtMultimedia
|
||||
\brief Controls video recording with the Camera.
|
||||
\ingroup multimedia_qml
|
||||
\ingroup camera_qml
|
||||
@@ -99,7 +99,7 @@ QDeclarativeCameraRecorder::~QDeclarativeCameraRecorder()
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlproperty size QtMultimedia5::CameraRecorder::resolution
|
||||
\qmlproperty size QtMultimedia::CameraRecorder::resolution
|
||||
|
||||
This property holds the video frame dimensions to be used for video capture.
|
||||
*/
|
||||
@@ -109,12 +109,12 @@ QSize QDeclarativeCameraRecorder::captureResolution()
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlproperty string QtMultimedia5::CameraRecorder::audioCodec
|
||||
\qmlproperty string QtMultimedia::CameraRecorder::audioCodec
|
||||
|
||||
This property holds the audio codec to be used for recording video.
|
||||
Typically this is \c aac or \c amr-wb.
|
||||
|
||||
\sa {QtMultimedia5::CameraImageProcessing::whiteBalanceMode}{whileBalanceMode}
|
||||
\sa {QtMultimedia::CameraImageProcessing::whiteBalanceMode}{whileBalanceMode}
|
||||
*/
|
||||
QString QDeclarativeCameraRecorder::audioCodec() const
|
||||
{
|
||||
@@ -122,7 +122,7 @@ QString QDeclarativeCameraRecorder::audioCodec() const
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlproperty string QtMultimedia5::CameraRecorder::videoCodec
|
||||
\qmlproperty string QtMultimedia::CameraRecorder::videoCodec
|
||||
|
||||
This property holds the video codec to be used for recording video.
|
||||
Typically this is \c h264.
|
||||
@@ -133,7 +133,7 @@ QString QDeclarativeCameraRecorder::videoCodec() const
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlproperty string QtMultimedia5::CameraRecorder::mediaContainer
|
||||
\qmlproperty string QtMultimedia::CameraRecorder::mediaContainer
|
||||
|
||||
This property holds the media container to be used for recording video.
|
||||
Typically this is \c mp4.
|
||||
@@ -180,7 +180,7 @@ void QDeclarativeCameraRecorder::setMediaContainer(const QString &container)
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlproperty qreal QtMultimedia5::CameraRecorder::frameRate
|
||||
\qmlproperty qreal QtMultimedia::CameraRecorder::frameRate
|
||||
|
||||
This property holds the framerate (in frames per second) to be used for recording video.
|
||||
*/
|
||||
@@ -190,7 +190,7 @@ qreal QDeclarativeCameraRecorder::frameRate() const
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlproperty int QtMultimedia5::CameraRecorder::videoBitRate
|
||||
\qmlproperty int QtMultimedia::CameraRecorder::videoBitRate
|
||||
|
||||
This property holds the bit rate (in bits per second) to be used for recording video.
|
||||
*/
|
||||
@@ -200,7 +200,7 @@ int QDeclarativeCameraRecorder::videoBitRate() const
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlproperty int QtMultimedia5::CameraRecorder::audioBitRate
|
||||
\qmlproperty int QtMultimedia::CameraRecorder::audioBitRate
|
||||
|
||||
This property holds the audio bit rate (in bits per second) to be used for recording video.
|
||||
*/
|
||||
@@ -210,7 +210,7 @@ int QDeclarativeCameraRecorder::audioBitRate() const
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlproperty int QtMultimedia5::CameraRecorder::audioChannels
|
||||
\qmlproperty int QtMultimedia::CameraRecorder::audioChannels
|
||||
|
||||
This property indicates the number of audio channels to be encoded while
|
||||
recording video (1 is mono, 2 is stereo).
|
||||
@@ -221,7 +221,7 @@ int QDeclarativeCameraRecorder::audioChannels() const
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlproperty int QtMultimedia5::CameraRecorder::audioSampleRate
|
||||
\qmlproperty int QtMultimedia::CameraRecorder::audioSampleRate
|
||||
|
||||
This property holds the sample rate to be used to encode audio while recording video.
|
||||
*/
|
||||
@@ -231,7 +231,7 @@ int QDeclarativeCameraRecorder::audioSampleRate() const
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlproperty enumeration QtMultimedia5::CameraRecorder::videoEncodingMode
|
||||
\qmlproperty enumeration QtMultimedia::CameraRecorder::videoEncodingMode
|
||||
|
||||
This property holds the type of encoding method to be used for recording video.
|
||||
|
||||
@@ -257,7 +257,7 @@ QDeclarativeCameraRecorder::EncodingMode QDeclarativeCameraRecorder::videoEncodi
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlproperty enumeration QtMultimedia5::CameraRecorder::audioEncodingMode
|
||||
\qmlproperty enumeration QtMultimedia::CameraRecorder::audioEncodingMode
|
||||
|
||||
The type of encoding method to use when recording audio.
|
||||
|
||||
@@ -343,7 +343,7 @@ void QDeclarativeCameraRecorder::setVideoEncodingMode(QDeclarativeCameraRecorder
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlproperty enumeration QtMultimedia5::CameraRecorder::errorCode
|
||||
\qmlproperty enumeration QtMultimedia::CameraRecorder::errorCode
|
||||
|
||||
This property holds the last error code.
|
||||
|
||||
@@ -369,7 +369,7 @@ QDeclarativeCameraRecorder::Error QDeclarativeCameraRecorder::errorCode() const
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlproperty string QtMultimedia5::CameraRecorder::errorString
|
||||
\qmlproperty string QtMultimedia::CameraRecorder::errorString
|
||||
|
||||
This property holds the description of the last error.
|
||||
*/
|
||||
@@ -379,7 +379,7 @@ QString QDeclarativeCameraRecorder::errorString() const
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlproperty enumeration QtMultimedia5::CameraRecorder::recorderState
|
||||
\qmlproperty enumeration QtMultimedia::CameraRecorder::recorderState
|
||||
|
||||
This property holds the current state of the camera recorder object.
|
||||
|
||||
@@ -407,7 +407,7 @@ QDeclarativeCameraRecorder::RecorderState QDeclarativeCameraRecorder::recorderSt
|
||||
|
||||
|
||||
/*!
|
||||
\qmlproperty enumeration QtMultimedia5::CameraRecorder::recorderStatus
|
||||
\qmlproperty enumeration QtMultimedia::CameraRecorder::recorderStatus
|
||||
|
||||
This property holds the current status of media recording.
|
||||
|
||||
@@ -438,7 +438,7 @@ QDeclarativeCameraRecorder::RecorderStatus QDeclarativeCameraRecorder::recorderS
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlmethod QtMultimedia5::CameraRecorder::record()
|
||||
\qmlmethod QtMultimedia::CameraRecorder::record()
|
||||
|
||||
Starts recording.
|
||||
*/
|
||||
@@ -448,7 +448,7 @@ void QDeclarativeCameraRecorder::record()
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlmethod QtMultimedia5::CameraRecorder::stop()
|
||||
\qmlmethod QtMultimedia::CameraRecorder::stop()
|
||||
|
||||
Stops recording.
|
||||
*/
|
||||
@@ -478,7 +478,7 @@ void QDeclarativeCameraRecorder::setRecorderState(QDeclarativeCameraRecorder::Re
|
||||
the recorder uses the system-specific place and file naming scheme.
|
||||
*/
|
||||
/*!
|
||||
\qmlproperty string QtMultimedia5::CameraRecorder::outputLocation
|
||||
\qmlproperty string QtMultimedia::CameraRecorder::outputLocation
|
||||
|
||||
This property holds the destination location of the media content. If the location is empty,
|
||||
the recorder uses the system-specific place and file naming scheme.
|
||||
@@ -496,7 +496,7 @@ QString QDeclarativeCameraRecorder::outputLocation() const
|
||||
new location is set or new recording starts.
|
||||
*/
|
||||
/*!
|
||||
\qmlproperty string QtMultimedia5::CameraRecorder::actualLocation
|
||||
\qmlproperty string QtMultimedia::CameraRecorder::actualLocation
|
||||
|
||||
This property holds the actual location of the last saved media content. The actual location is
|
||||
usually available after the recording starts, and reset when new location is set or the new recording starts.
|
||||
@@ -520,7 +520,7 @@ void QDeclarativeCameraRecorder::setOutputLocation(const QString &location)
|
||||
This property holds the duration (in miliseconds) of the last recording.
|
||||
*/
|
||||
/*!
|
||||
\qmlproperty int QtMultimedia5::CameraRecorder::duration
|
||||
\qmlproperty int QtMultimedia::CameraRecorder::duration
|
||||
|
||||
This property holds the duration (in miliseconds) of the last recording.
|
||||
*/
|
||||
@@ -535,7 +535,7 @@ qint64 QDeclarativeCameraRecorder::duration() const
|
||||
recording.
|
||||
*/
|
||||
/*!
|
||||
\qmlproperty bool QtMultimedia5::CameraRecorder::muted
|
||||
\qmlproperty bool QtMultimedia::CameraRecorder::muted
|
||||
|
||||
This property indicates whether the audio input is muted during recording.
|
||||
*/
|
||||
@@ -550,7 +550,7 @@ void QDeclarativeCameraRecorder::setMuted(bool muted)
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlmethod QtMultimedia5::CameraRecorder::setMetadata(key, value)
|
||||
\qmlmethod QtMultimedia::CameraRecorder::setMetadata(key, value)
|
||||
|
||||
Sets metadata for the next video to be recorder, with
|
||||
the given \a key being associated with \a value.
|
||||
|
||||
@@ -47,7 +47,7 @@ QT_BEGIN_NAMESPACE
|
||||
/*!
|
||||
\qmltype Radio
|
||||
\instantiates QDeclarativeRadio
|
||||
\inqmlmodule QtMultimedia 5.0
|
||||
\inqmlmodule QtMultimedia
|
||||
\brief Access radio functionality from a QML application.
|
||||
\ingroup multimedia_qml
|
||||
\ingroup multimedia_radio_qml
|
||||
@@ -126,7 +126,7 @@ QDeclarativeRadio::~QDeclarativeRadio()
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlproperty enumeration QtMultimedia5::Radio::state
|
||||
\qmlproperty enumeration QtMultimedia::Radio::state
|
||||
|
||||
This property holds the current state of the Radio.
|
||||
|
||||
@@ -148,7 +148,7 @@ QDeclarativeRadio::State QDeclarativeRadio::state() const
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlproperty enumeration QtMultimedia5::Radio::band
|
||||
\qmlproperty enumeration QtMultimedia::Radio::band
|
||||
|
||||
This property holds the frequency band used for the radio, which can be specified as
|
||||
any one of the values in the table below.
|
||||
@@ -178,7 +178,7 @@ QDeclarativeRadio::Band QDeclarativeRadio::band() const
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlproperty int QtMultimedia5::Radio::frequency
|
||||
\qmlproperty int QtMultimedia::Radio::frequency
|
||||
|
||||
Sets the frequency in Hertz that the radio is tuned to. The frequency must be within the frequency
|
||||
range for the current band, otherwise it will be changed to be within the frequency range.
|
||||
@@ -191,7 +191,7 @@ int QDeclarativeRadio::frequency() const
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlproperty enumeration QtMultimedia5::Radio::stereoMode
|
||||
\qmlproperty enumeration QtMultimedia::Radio::stereoMode
|
||||
|
||||
This property holds the stereo mode of the radio, which can be set to any one of the
|
||||
values in the table below.
|
||||
@@ -215,7 +215,7 @@ QDeclarativeRadio::StereoMode QDeclarativeRadio::stereoMode() const
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlproperty int QtMultimedia5::Radio::volume
|
||||
\qmlproperty int QtMultimedia::Radio::volume
|
||||
|
||||
Set this property to control the volume of the radio. The valid range of the volume is from 0 to 100.
|
||||
*/
|
||||
@@ -225,7 +225,7 @@ int QDeclarativeRadio::volume() const
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlproperty bool QtMultimedia5::Radio::muted
|
||||
\qmlproperty bool QtMultimedia::Radio::muted
|
||||
|
||||
This property reflects whether the radio is muted or not.
|
||||
*/
|
||||
@@ -235,7 +235,7 @@ bool QDeclarativeRadio::muted() const
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlproperty bool QtMultimedia5::Radio::stereo
|
||||
\qmlproperty bool QtMultimedia::Radio::stereo
|
||||
|
||||
This property holds whether the radio is receiving a stereo signal or not. If \l stereoMode is
|
||||
set to ForceMono the value will always be false. Likewise, it will always be true if stereoMode
|
||||
@@ -249,7 +249,7 @@ bool QDeclarativeRadio::stereo() const
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlproperty int QtMultimedia5::Radio::signalStrength
|
||||
\qmlproperty int QtMultimedia::Radio::signalStrength
|
||||
|
||||
The strength of the current radio signal as a percentage where 0% equals no signal, and 100% is a
|
||||
very good signal.
|
||||
@@ -260,7 +260,7 @@ int QDeclarativeRadio::signalStrength() const
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlproperty bool QtMultimedia5::Radio::searching
|
||||
\qmlproperty bool QtMultimedia::Radio::searching
|
||||
|
||||
This property is true if the radio is currently searching for radio stations, for instance using the \l scanUp,
|
||||
\l scanDown, and \l searchAllStations methods. Once the search completes, or if it is cancelled using
|
||||
@@ -272,7 +272,7 @@ bool QDeclarativeRadio::searching() const
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlproperty int QtMultimedia5::Radio::frequencyStep
|
||||
\qmlproperty int QtMultimedia::Radio::frequencyStep
|
||||
|
||||
The number of Hertz for each step when tuning the radio manually. The value is for the current \l band.
|
||||
*/
|
||||
@@ -282,7 +282,7 @@ int QDeclarativeRadio::frequencyStep() const
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlproperty int QtMultimedia5::Radio::minimumFrequency
|
||||
\qmlproperty int QtMultimedia::Radio::minimumFrequency
|
||||
|
||||
The minimum frequency for the current \l band.
|
||||
*/
|
||||
@@ -292,7 +292,7 @@ int QDeclarativeRadio::minimumFrequency() const
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlproperty int QtMultimedia5::Radio::maximumFrequency
|
||||
\qmlproperty int QtMultimedia::Radio::maximumFrequency
|
||||
|
||||
The maximum frequency for the current \l band.
|
||||
*/
|
||||
@@ -302,7 +302,7 @@ int QDeclarativeRadio::maximumFrequency() const
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlproperty int QtMultimedia5::Radio::antennaConnected
|
||||
\qmlproperty int QtMultimedia::Radio::antennaConnected
|
||||
|
||||
This property is true if there is an antenna connected. Otherwise it will be false.
|
||||
*/
|
||||
@@ -312,7 +312,7 @@ bool QDeclarativeRadio::isAntennaConnected() const
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlproperty enumeration QtMultimedia5::Radio::availability
|
||||
\qmlproperty enumeration QtMultimedia::Radio::availability
|
||||
|
||||
Returns the availability state of the radio.
|
||||
|
||||
@@ -367,7 +367,7 @@ void QDeclarativeRadio::setMuted(bool muted)
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlmethod QtMultimedia5::Radio::cancelScan()
|
||||
\qmlmethod QtMultimedia::Radio::cancelScan()
|
||||
|
||||
Cancel the current scan. Will also cancel a search started with \l searchAllStations.
|
||||
*/
|
||||
@@ -377,7 +377,7 @@ void QDeclarativeRadio::cancelScan()
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlmethod QtMultimedia5::Radio::scanDown()
|
||||
\qmlmethod QtMultimedia::Radio::scanDown()
|
||||
|
||||
Searches backward in the frequency range for the current band.
|
||||
*/
|
||||
@@ -387,7 +387,7 @@ void QDeclarativeRadio::scanDown()
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlmethod QtMultimedia5::Radio::scanUp()
|
||||
\qmlmethod QtMultimedia::Radio::scanUp()
|
||||
|
||||
Searches forward in the frequency range for the current band.
|
||||
*/
|
||||
@@ -397,7 +397,7 @@ void QDeclarativeRadio::scanUp()
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlmethod QtMultimedia5::Radio::searchAllStations(enumeration searchMode)
|
||||
\qmlmethod QtMultimedia::Radio::searchAllStations(enumeration searchMode)
|
||||
|
||||
Start searching the complete frequency range for the current band, and save all the
|
||||
radio stations found. The search mode can be either of the values described in the
|
||||
@@ -461,7 +461,7 @@ void QDeclarativeRadio::searchAllStations(QDeclarativeRadio::SearchMode searchMo
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlmethod QtMultimedia5::Radio::tuneDown()
|
||||
\qmlmethod QtMultimedia::Radio::tuneDown()
|
||||
|
||||
Decrements the frequency by the frequency step for the current band. If the frequency is already set
|
||||
to the minimum frequency, calling this function has no effect.
|
||||
@@ -476,7 +476,7 @@ void QDeclarativeRadio::tuneDown()
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlmethod QtMultimedia5::Radio::tuneUp()
|
||||
\qmlmethod QtMultimedia::Radio::tuneUp()
|
||||
|
||||
Increments the frequency by the frequency step for the current band. If the frequency is already set
|
||||
to the maximum frequency, calling this function has no effect.
|
||||
@@ -491,7 +491,7 @@ void QDeclarativeRadio::tuneUp()
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlmethod QtMultimedia5::Radio::start()
|
||||
\qmlmethod QtMultimedia::Radio::start()
|
||||
|
||||
Starts the radio. If the radio is available, as determined by the \l availability property,
|
||||
this will result in the \l state becoming \c ActiveState.
|
||||
@@ -502,7 +502,7 @@ void QDeclarativeRadio::start()
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlmethod QtMultimedia5::Radio::stop()
|
||||
\qmlmethod QtMultimedia::Radio::stop()
|
||||
|
||||
Stops the radio. After calling this method the \l state will be \c StoppedState.
|
||||
*/
|
||||
@@ -533,7 +533,7 @@ void QDeclarativeRadio::_q_availabilityChanged(QMultimedia::AvailabilityStatus a
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlsignal QtMultimedia5::Radio::stationFound(int frequency, string stationId)
|
||||
\qmlsignal QtMultimedia::Radio::stationFound(int frequency, string stationId)
|
||||
|
||||
This signal is emitted when a new radio station is found. This signal is only emitted
|
||||
if \l searchAllStations is called with \c SearchGetStationId.
|
||||
|
||||
@@ -46,7 +46,7 @@ QT_BEGIN_NAMESPACE
|
||||
/*!
|
||||
\qmltype RadioData
|
||||
\instantiates QDeclarativeRadioData
|
||||
\inqmlmodule QtMultimedia 5.0
|
||||
\inqmlmodule QtMultimedia
|
||||
\brief Access RDS data from a QML application.
|
||||
\ingroup multimedia_qml
|
||||
\ingroup multimedia_radio_qml
|
||||
@@ -120,7 +120,7 @@ QDeclarativeRadioData::~QDeclarativeRadioData()
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlproperty enumeration QtMultimedia5::RadioData::availability
|
||||
\qmlproperty enumeration QtMultimedia::RadioData::availability
|
||||
|
||||
Returns the availability state of the radio data interface.
|
||||
|
||||
@@ -147,7 +147,7 @@ QDeclarativeRadioData::Availability QDeclarativeRadioData::availability() const
|
||||
|
||||
|
||||
/*!
|
||||
\qmlproperty string QtMultimedia5::RadioData::stationId
|
||||
\qmlproperty string QtMultimedia::RadioData::stationId
|
||||
|
||||
This property allows you to read the station Id of the currently tuned radio
|
||||
station.
|
||||
@@ -158,7 +158,7 @@ QString QDeclarativeRadioData::stationId() const
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlproperty enumeration QtMultimedia5::RadioData::programType
|
||||
\qmlproperty enumeration QtMultimedia::RadioData::programType
|
||||
|
||||
This property holds the type of the currently playing program as transmitted
|
||||
by the radio station. The value can be any one of the values defined in the
|
||||
@@ -223,7 +223,7 @@ QDeclarativeRadioData::ProgramType QDeclarativeRadioData::programType() const
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlproperty string QtMultimedia5::RadioData::programTypeName
|
||||
\qmlproperty string QtMultimedia::RadioData::programTypeName
|
||||
|
||||
This property holds a string representation of the \l programType.
|
||||
*/
|
||||
@@ -233,7 +233,7 @@ QString QDeclarativeRadioData::programTypeName() const
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlproperty string QtMultimedia5::RadioData::stationName
|
||||
\qmlproperty string QtMultimedia::RadioData::stationName
|
||||
|
||||
This property has the name of the currently tuned radio station.
|
||||
*/
|
||||
@@ -243,7 +243,7 @@ QString QDeclarativeRadioData::stationName() const
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlproperty string QtMultimedia5::RadioData::radioText
|
||||
\qmlproperty string QtMultimedia::RadioData::radioText
|
||||
|
||||
This property holds free-text transmitted by the radio station. This is typically used to
|
||||
show supporting information for the currently playing content, for instance song title or
|
||||
@@ -255,7 +255,7 @@ QString QDeclarativeRadioData::radioText() const
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlproperty bool QtMultimedia5::RadioData::alternativeFrequenciesEnabled
|
||||
\qmlproperty bool QtMultimedia::RadioData::alternativeFrequenciesEnabled
|
||||
|
||||
This property allows you to specify whether the radio should try and tune to alternative
|
||||
frequencies if the signal strength of the current station becomes too weak. The alternative
|
||||
|
||||
@@ -49,7 +49,7 @@ QT_BEGIN_NAMESPACE
|
||||
/*!
|
||||
\qmltype Torch
|
||||
\instantiates QDeclarativeTorch
|
||||
\inqmlmodule QtMultimedia 5.0
|
||||
\inqmlmodule QtMultimedia
|
||||
\brief Simple control over torch functionality
|
||||
|
||||
\ingroup multimedia_qml
|
||||
@@ -92,7 +92,7 @@ QDeclarativeTorch::~QDeclarativeTorch()
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlproperty bool QtMultimedia5::Torch::enabled
|
||||
\qmlproperty bool QtMultimedia::Torch::enabled
|
||||
|
||||
This property indicates whether the torch is enabled. If the torch functionality is shared
|
||||
with the camera flash hardware, the camera will take priority
|
||||
@@ -140,7 +140,7 @@ void QDeclarativeTorch::setEnabled(bool on)
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlproperty int QtMultimedia5::Torch::power
|
||||
\qmlproperty int QtMultimedia::Torch::power
|
||||
|
||||
This property holds the current torch power setting, as a percentage of full power.
|
||||
|
||||
|
||||
@@ -58,7 +58,7 @@ QT_BEGIN_NAMESPACE
|
||||
|
||||
\ingroup multimedia_qml
|
||||
\ingroup multimedia_video_qml
|
||||
\inqmlmodule QtMultimedia 5.0
|
||||
\inqmlmodule QtMultimedia
|
||||
|
||||
\c VideoOutput is part of the \b{QtMultimedia 5.0} module.
|
||||
|
||||
@@ -143,7 +143,7 @@ QDeclarativeVideoOutput::~QDeclarativeVideoOutput()
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlproperty variant QtMultimedia5::VideoOutput::source
|
||||
\qmlproperty variant QtMultimedia::VideoOutput::source
|
||||
|
||||
This property holds the source item providing the video frames like MediaPlayer or Camera.
|
||||
|
||||
@@ -265,7 +265,7 @@ void QDeclarativeVideoOutput::_q_updateMediaObject()
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlproperty enumeration QtMultimedia5::VideoOutput::fillMode
|
||||
\qmlproperty enumeration QtMultimedia::VideoOutput::fillMode
|
||||
|
||||
Set this property to define how the video is scaled to fit the target area.
|
||||
|
||||
@@ -359,7 +359,7 @@ void QDeclarativeVideoOutput::_q_screenOrientationChanged(int orientation)
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlproperty int QtMultimedia5::VideoOutput::orientation
|
||||
\qmlproperty int QtMultimedia::VideoOutput::orientation
|
||||
|
||||
In some cases the source video stream requires a certain
|
||||
orientation to be correct. This includes
|
||||
@@ -420,7 +420,7 @@ void QDeclarativeVideoOutput::setOrientation(int orientation)
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlproperty int QtMultimedia5::VideoOutput::autoOrientation
|
||||
\qmlproperty int QtMultimedia::VideoOutput::autoOrientation
|
||||
|
||||
This property allows you to enable and disable auto orientation
|
||||
of the video stream, so that its orientation always matches
|
||||
@@ -459,7 +459,7 @@ void QDeclarativeVideoOutput::setAutoOrientation(bool autoOrientation)
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlproperty rectangle QtMultimedia5::VideoOutput::contentRect
|
||||
\qmlproperty rectangle QtMultimedia::VideoOutput::contentRect
|
||||
|
||||
This property holds the item coordinates of the area that
|
||||
would contain video to render. With certain fill modes,
|
||||
@@ -480,7 +480,7 @@ QRectF QDeclarativeVideoOutput::contentRect() const
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlproperty rectangle QtMultimedia5::VideoOutput::sourceRect
|
||||
\qmlproperty rectangle QtMultimedia::VideoOutput::sourceRect
|
||||
|
||||
This property holds the area of the source video
|
||||
content that is considered for rendering. The
|
||||
@@ -520,7 +520,7 @@ QRectF QDeclarativeVideoOutput::sourceRect() const
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlmethod QPointF QtMultimedia5::VideoOutput::mapNormalizedPointToItem (const QPointF &point) const
|
||||
\qmlmethod QPointF QtMultimedia::VideoOutput::mapNormalizedPointToItem (const QPointF &point) const
|
||||
|
||||
Given normalized coordinates \a point (that is, each
|
||||
component in the range of 0 to 1.0), return the mapped point
|
||||
@@ -557,7 +557,7 @@ QPointF QDeclarativeVideoOutput::mapNormalizedPointToItem(const QPointF &point)
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlmethod QRectF QtMultimedia5::VideoOutput::mapNormalizedRectToItem(const QRectF &rectangle) const
|
||||
\qmlmethod QRectF QtMultimedia::VideoOutput::mapNormalizedRectToItem(const QRectF &rectangle) const
|
||||
|
||||
Given a rectangle \a rectangle in normalized
|
||||
coordinates (that is, each component in the range of 0 to 1.0),
|
||||
@@ -574,7 +574,7 @@ QRectF QDeclarativeVideoOutput::mapNormalizedRectToItem(const QRectF &rectangle)
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlmethod QPointF QtMultimedia5::VideoOutput::mapPointToItem(const QPointF &point) const
|
||||
\qmlmethod QPointF QtMultimedia::VideoOutput::mapPointToItem(const QPointF &point) const
|
||||
|
||||
Given a point \a point in item coordinates, return the
|
||||
corresponding point in source coordinates. This mapping is
|
||||
@@ -594,7 +594,7 @@ QPointF QDeclarativeVideoOutput::mapPointToSource(const QPointF &point) const
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlmethod QRectF QtMultimedia5::VideoOutput::mapRectToSource(const QRectF &rectangle) const
|
||||
\qmlmethod QRectF QtMultimedia::VideoOutput::mapRectToSource(const QRectF &rectangle) const
|
||||
|
||||
Given a rectangle \a rectangle in item coordinates, return the
|
||||
corresponding rectangle in source coordinates. This mapping is
|
||||
@@ -612,7 +612,7 @@ QRectF QDeclarativeVideoOutput::mapRectToSource(const QRectF &rectangle) const
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlmethod QPointF QtMultimedia5::VideoOutput::mapPointToItemNormalized(const QPointF &point) const
|
||||
\qmlmethod QPointF QtMultimedia::VideoOutput::mapPointToItemNormalized(const QPointF &point) const
|
||||
|
||||
Given a point \a point in item coordinates, return the
|
||||
corresponding point in normalized source coordinates. This mapping is
|
||||
@@ -647,7 +647,7 @@ QPointF QDeclarativeVideoOutput::mapPointToSourceNormalized(const QPointF &point
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlmethod QRectF QtMultimedia5::VideoOutput::mapRectToSourceNormalized(const QRectF &rectangle) const
|
||||
\qmlmethod QRectF QtMultimedia::VideoOutput::mapRectToSourceNormalized(const QRectF &rectangle) const
|
||||
|
||||
Given a rectangle \a rectangle in item coordinates, return the
|
||||
corresponding rectangle in normalized source coordinates. This mapping is
|
||||
@@ -670,7 +670,7 @@ QDeclarativeVideoOutput::SourceType QDeclarativeVideoOutput::sourceType() const
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlmethod QPointF QtMultimedia5::VideoOutput::mapPointToItem(const QPointF &point) const
|
||||
\qmlmethod QPointF QtMultimedia::VideoOutput::mapPointToItem(const QPointF &point) const
|
||||
|
||||
Given a point \a point in source coordinates, return the
|
||||
corresponding point in item coordinates. This mapping is
|
||||
@@ -693,7 +693,7 @@ QPointF QDeclarativeVideoOutput::mapPointToItem(const QPointF &point) const
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlmethod QRectF QtMultimedia5::VideoOutput::mapRectToItem(const QRectF &rectangle) const
|
||||
\qmlmethod QRectF QtMultimedia::VideoOutput::mapRectToItem(const QRectF &rectangle) const
|
||||
|
||||
Given a rectangle \a rectangle in source coordinates, return the
|
||||
corresponding rectangle in item coordinates. This mapping is
|
||||
|
||||
@@ -193,6 +193,13 @@ QSGNode *QDeclarativeVideoRendererBackend::updatePaintNode(QSGNode *oldNode,
|
||||
if (!m_glContext) {
|
||||
m_glContext = QOpenGLContext::currentContext();
|
||||
m_surface->scheduleOpenGLContextUpdate();
|
||||
|
||||
// Internal mechanism to call back the surface renderer from the QtQuick render thread
|
||||
QObject *obj = m_surface->property("_q_GLThreadCallback").value<QObject*>();
|
||||
if (obj) {
|
||||
QEvent ev(QEvent::User);
|
||||
obj->event(&ev);
|
||||
}
|
||||
}
|
||||
|
||||
if (m_frameChanged) {
|
||||
|
||||
@@ -89,7 +89,7 @@ QT_BEGIN_NAMESPACE
|
||||
\inmodule QtMultimedia
|
||||
\ingroup multimedia_qml
|
||||
\ingroup multimedia_audio_qml
|
||||
\inqmlmodule QtMultimedia 5.0
|
||||
\inqmlmodule QtMultimedia
|
||||
|
||||
SoundEffect is part of the \b{QtMultimedia 5.0} module.
|
||||
|
||||
@@ -149,7 +149,7 @@ QStringList QSoundEffect::supportedMimeTypes()
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlproperty url QtMultimedia5::SoundEffect::source
|
||||
\qmlproperty url QtMultimedia::SoundEffect::source
|
||||
|
||||
This property holds the url for the sound to play. For the SoundEffect
|
||||
to attempt to load the source, the URL must exist and the application must have read permission
|
||||
@@ -182,7 +182,7 @@ void QSoundEffect::setSource(const QUrl &url)
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlproperty int QtMultimedia5::SoundEffect::loops
|
||||
\qmlproperty int QtMultimedia::SoundEffect::loops
|
||||
|
||||
This property provides a way to control the number of times to repeat the sound on each play().
|
||||
|
||||
@@ -232,7 +232,7 @@ void QSoundEffect::setLoopCount(int loopCount)
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlproperty int QtMultimedia5::SoundEffect::loopsRemaining
|
||||
\qmlproperty int QtMultimedia::SoundEffect::loopsRemaining
|
||||
|
||||
This property contains the number of loops remaining before the sound effect
|
||||
stops by itself, or SoundEffect.Infinite if that's what has been set in \l loops.
|
||||
@@ -250,7 +250,7 @@ int QSoundEffect::loopsRemaining() const
|
||||
|
||||
|
||||
/*!
|
||||
\qmlproperty qreal QtMultimedia5::SoundEffect::volume
|
||||
\qmlproperty qreal QtMultimedia::SoundEffect::volume
|
||||
|
||||
This property holds the volume of the sound effect playback, from 0.0 (silent) to 1.0 (maximum volume).
|
||||
*/
|
||||
@@ -285,7 +285,7 @@ void QSoundEffect::setVolume(qreal volume)
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlproperty bool QtMultimedia5::SoundEffect::muted
|
||||
\qmlproperty bool QtMultimedia::SoundEffect::muted
|
||||
|
||||
This property provides a way to control muting. A value of \c true will mute this effect.
|
||||
Otherwise, playback will occur with the currently specified \l volume.
|
||||
@@ -323,7 +323,7 @@ void QSoundEffect::setMuted(bool muted)
|
||||
Returns whether the sound effect has finished loading the \l source().
|
||||
*/
|
||||
/*!
|
||||
\qmlmethod bool QtMultimedia5::SoundEffect::isLoaded()
|
||||
\qmlmethod bool QtMultimedia::SoundEffect::isLoaded()
|
||||
|
||||
Returns whether the sound effect has finished loading the \l source.
|
||||
*/
|
||||
@@ -333,7 +333,7 @@ bool QSoundEffect::isLoaded() const
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlmethod QtMultimedia5::SoundEffect::play()
|
||||
\qmlmethod QtMultimedia::SoundEffect::play()
|
||||
|
||||
Start playback of the sound effect, looping the effect for the number of
|
||||
times as specified in the loops property.
|
||||
@@ -354,7 +354,7 @@ void QSoundEffect::play()
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlproperty bool QtMultimedia5::SoundEffect::playing
|
||||
\qmlproperty bool QtMultimedia::SoundEffect::playing
|
||||
|
||||
This property indicates whether the sound effect is playing or not.
|
||||
*/
|
||||
@@ -381,7 +381,7 @@ bool QSoundEffect::isPlaying() const
|
||||
*/
|
||||
|
||||
/*!
|
||||
\qmlproperty enumeration QtMultimedia5::SoundEffect::status
|
||||
\qmlproperty enumeration QtMultimedia::SoundEffect::status
|
||||
|
||||
This property indicates the current status of the SoundEffect
|
||||
as enumerated within SoundEffect.
|
||||
@@ -411,7 +411,7 @@ QSoundEffect::Status QSoundEffect::status() const
|
||||
}
|
||||
|
||||
/*!
|
||||
\qmlproperty string QtMultimedia5::SoundEffect::category
|
||||
\qmlproperty string QtMultimedia::SoundEffect::category
|
||||
|
||||
This property contains the \e category of this sound effect.
|
||||
|
||||
@@ -474,7 +474,7 @@ void QSoundEffect::setCategory(const QString &category)
|
||||
|
||||
|
||||
/*!
|
||||
\qmlmethod QtMultimedia5::SoundEffect::stop()
|
||||
\qmlmethod QtMultimedia::SoundEffect::stop()
|
||||
|
||||
Stop current playback.
|
||||
|
||||
@@ -498,7 +498,7 @@ void QSoundEffect::stop()
|
||||
The \c sourceChanged signal is emitted when the source has been changed.
|
||||
*/
|
||||
/*!
|
||||
\qmlsignal QtMultimedia5::SoundEffect::sourceChanged()
|
||||
\qmlsignal QtMultimedia::SoundEffect::sourceChanged()
|
||||
|
||||
The \c sourceChanged signal is emitted when the source has been changed.
|
||||
|
||||
@@ -510,7 +510,7 @@ void QSoundEffect::stop()
|
||||
The \c loadedChanged signal is emitted when the loading state has changed.
|
||||
*/
|
||||
/*!
|
||||
\qmlsignal QtMultimedia5::SoundEffect::loadedChanged()
|
||||
\qmlsignal QtMultimedia::SoundEffect::loadedChanged()
|
||||
|
||||
The \c loadedChanged signal is emitted when the loading state has changed.
|
||||
|
||||
@@ -523,7 +523,7 @@ void QSoundEffect::stop()
|
||||
The \c loopCountChanged signal is emitted when the initial number of loops has changed.
|
||||
*/
|
||||
/*!
|
||||
\qmlsignal QtMultimedia5::SoundEffect::loopCountChanged()
|
||||
\qmlsignal QtMultimedia::SoundEffect::loopCountChanged()
|
||||
|
||||
The \c loopCountChanged signal is emitted when the initial number of loops has changed.
|
||||
|
||||
@@ -536,7 +536,7 @@ void QSoundEffect::stop()
|
||||
The \c loopsRemainingChanged signal is emitted when the remaining number of loops has changed.
|
||||
*/
|
||||
/*!
|
||||
\qmlsignal QtMultimedia5::SoundEffect::loopsRemainingChanged()
|
||||
\qmlsignal QtMultimedia::SoundEffect::loopsRemainingChanged()
|
||||
|
||||
The \c loopsRemainingChanged signal is emitted when the remaining number of loops has changed.
|
||||
|
||||
@@ -549,7 +549,7 @@ void QSoundEffect::stop()
|
||||
The \c volumeChanged signal is emitted when the volume has changed.
|
||||
*/
|
||||
/*!
|
||||
\qmlsignal QtMultimedia5::SoundEffect::volumeChanged()
|
||||
\qmlsignal QtMultimedia::SoundEffect::volumeChanged()
|
||||
|
||||
The \c volumeChanged signal is emitted when the volume has changed.
|
||||
|
||||
@@ -562,7 +562,7 @@ void QSoundEffect::stop()
|
||||
The \c mutedChanged signal is emitted when the mute state has changed.
|
||||
*/
|
||||
/*!
|
||||
\qmlsignal QtMultimedia5::SoundEffect::mutedChanged()
|
||||
\qmlsignal QtMultimedia::SoundEffect::mutedChanged()
|
||||
|
||||
The \c mutedChanged signal is emitted when the mute state has changed.
|
||||
|
||||
@@ -575,7 +575,7 @@ void QSoundEffect::stop()
|
||||
The \c playingChanged signal is emitted when the playing property has changed.
|
||||
*/
|
||||
/*!
|
||||
\qmlsignal QtMultimedia5::SoundEffect::playingChanged()
|
||||
\qmlsignal QtMultimedia::SoundEffect::playingChanged()
|
||||
|
||||
The \c playingChanged signal is emitted when the playing property has changed.
|
||||
|
||||
@@ -588,7 +588,7 @@ void QSoundEffect::stop()
|
||||
The \c statusChanged signal is emitted when the status property has changed.
|
||||
*/
|
||||
/*!
|
||||
\qmlsignal QtMultimedia5::SoundEffect::statusChanged()
|
||||
\qmlsignal QtMultimedia::SoundEffect::statusChanged()
|
||||
|
||||
The \c statusChanged signal is emitted when the status property has changed.
|
||||
|
||||
@@ -601,7 +601,7 @@ void QSoundEffect::stop()
|
||||
The \c categoryChanged signal is emitted when the category property has changed.
|
||||
*/
|
||||
/*!
|
||||
\qmlsignal QtMultimedia5::SoundEffect::categoryChanged()
|
||||
\qmlsignal QtMultimedia::SoundEffect::categoryChanged()
|
||||
|
||||
The \c categoryChanged signal is emitted when the category property has changed.
|
||||
|
||||
|
||||
@@ -34,7 +34,7 @@
|
||||
|
||||
Qt Multimedia offers a range of audio classes, covering both low and
|
||||
high level approaches to audio input, output and processing. In
|
||||
addition to traditional audio usage, the \l {Positional Audio}{Qt AudioEngine}
|
||||
addition to traditional audio usage, the \l{Qt Audio Engine QML Types}{Qt Audio Engine}
|
||||
QML types offer high level 3D positional audio for QML applications.
|
||||
See that documentation for more information.
|
||||
|
||||
|
||||
@@ -43,8 +43,8 @@ on BB10.
|
||||
|
||||
The Qt Multimedia BlackBerry backend uses mmrenderer for media playback.
|
||||
|
||||
For the positional audio classes in the \l {Positional Audio} {QtAudioEngine} QML
|
||||
module, OpenAL is used as on all other platforms.
|
||||
For the positional audio classes in the \l{Qt Audio Engine QML Types}{Qt Audio Engine}
|
||||
QML module, OpenAL is used as on all other platforms.
|
||||
|
||||
For recording videos and taking photos, the camapi library is used.
|
||||
|
||||
@@ -63,7 +63,7 @@ mmrenderer creates a seperate window displaying a video and puts that on top of
|
||||
As a consequence, no other widget or QML element can be put on top of the video, and QML shaders have
|
||||
no effect.
|
||||
|
||||
The \l {Positional Audio} {QtAudioEngine} QML module is fully supported, as it is based on OpenAL which is available
|
||||
The \l{Qt Audio Engine QML Types}{Qt Audio Engine} QML module is fully supported, as it is based on OpenAL which is available
|
||||
in BB10.
|
||||
|
||||
The \l {camera} {QCamera} support includes recording of videos and taking photos. The viewfinder
|
||||
|
||||
@@ -46,7 +46,7 @@ Here are some examples of what can be done with Qt Multimedia APIs:
|
||||
\li Record audio and compress it
|
||||
\li Tune and listen to radio stations
|
||||
\li Use a camera, including viewfinder, image capture, and movie recording
|
||||
\li Play 3D positional audio with \l {Positional Audio} {QtAudioEngine}
|
||||
\li Play 3D positional audio with \l{Qt Audio Engine QML Types}{Qt Audio Engine}
|
||||
\li Decode audio media files into memory for processing
|
||||
\li Accessing video frames or audio buffers as they are played or recorded
|
||||
\endlist
|
||||
@@ -180,7 +180,7 @@ The QML types are accessed by using:
|
||||
import QtMultimedia 5.0
|
||||
\endcode
|
||||
\annotatedlist multimedia_qml
|
||||
The following types are accessed by using \l {Positional Audio} {QtAudioEngine}:
|
||||
The following types are accessed by using \l{Qt Audio Engine QML Types}{Qt Audio Engine}:
|
||||
\code
|
||||
import QtAudioEngine 1.0
|
||||
\endcode
|
||||
|
||||
@@ -26,54 +26,34 @@
|
||||
****************************************************************************/
|
||||
|
||||
/*!
|
||||
\page audioengineoverview.html
|
||||
\title Positional Audio
|
||||
\brief 3D positional audio playback and content management
|
||||
\qmlmodule QtAudioEngine 1.0
|
||||
\title Qt Audio Engine QML Types
|
||||
\ingroup qmlmodules
|
||||
\brief Provides QML types for 3D positional audio playback and content management.
|
||||
|
||||
\section1 QtAudioEngine Features
|
||||
Qt Audio Engine is part of the \l{Qt Multimedia} module. Qt Audio
|
||||
Engine provides types for 3D positional audio playback and content management.
|
||||
|
||||
Qt Multimedia includes the \c QtAudioEngine QML module for
|
||||
providing 3D positional audio playback and content management.
|
||||
|
||||
QtAudioEngine enables developers to organize wave files into discrete \l Sound with different
|
||||
\l {PlayVariation}{play variations}, group sound controls by \l {AudioCategory} categories and
|
||||
define \l {AttenuationModelLinear}{attenuation models} and various 3d audio settings all in one
|
||||
place. Playback of \l {SoundInstance}{sound instances} can be conveniently activated by in-app
|
||||
events and managed by QtAudioEngine or controlled by explicitly defining \l SoundInstance
|
||||
for easier QML bindings.
|
||||
|
||||
To access these QML types import the
|
||||
\b{QtAudioEngine 1.0} module.
|
||||
|
||||
\qml
|
||||
import QtQuick 2.0
|
||||
The QML types can be imported into your application using the following import
|
||||
statement in your .qml file:
|
||||
\code
|
||||
import QtAudioEngine 1.0
|
||||
\endcode
|
||||
|
||||
AudioEngine {
|
||||
//...
|
||||
}
|
||||
\endqml
|
||||
\section1 Qt Audio Engine Features
|
||||
|
||||
Qt Audio Engine enables developers to organize wave files into discrete \l Sound
|
||||
with different \l {PlayVariation}{play variations}, group sound controls by \l
|
||||
{AudioCategory} categories and define \l {AttenuationModelLinear}{attenuation
|
||||
models} and various 3D audio settings all in one place. Playback of \l
|
||||
{SoundInstance}{sound instances} can be conveniently activated by in-app events
|
||||
and managed by QtAudioEngine or controlled by explicitly defining \l
|
||||
SoundInstance for easier QML bindings.
|
||||
|
||||
\section1 Examples
|
||||
\list
|
||||
\li \l {AudioEngine Example}{Audio Engine}
|
||||
\endlist
|
||||
|
||||
\section1 Reference Documentation
|
||||
|
||||
\section2 QML Types
|
||||
|
||||
\list
|
||||
\li \l AudioEngine
|
||||
\li \l AudioSample
|
||||
\li \l AudioCategory
|
||||
\li \l AttenuationModelLinear
|
||||
\li \l AttenuationModelInverse
|
||||
\li \l Sound
|
||||
\li \l PlayVariation
|
||||
\li \l AudioListener
|
||||
\li \l SoundInstance
|
||||
|
||||
\endlist
|
||||
|
||||
\section1 QML types
|
||||
*/
|
||||
@@ -33,7 +33,9 @@
|
||||
|
||||
Qt Multimedia is an essential module that provides a rich set of QML types
|
||||
and C++ classes to handle multimedia content. It also provides necessary
|
||||
APIs to access the camera and radio functionality.
|
||||
APIs to access the camera and radio functionality. The included
|
||||
\l{Qt Audio Engine QML Types}{Qt Audio Engine} provides types for
|
||||
3D positional audio playback and management.
|
||||
|
||||
The \l{Qt Multimedia Widgets} module provides widget based multimedia
|
||||
classes.
|
||||
@@ -74,20 +76,20 @@
|
||||
\li Type
|
||||
\li Description
|
||||
\row
|
||||
\li \l {QtMultimedia5::Audio}{Audio}
|
||||
\li \l {QtMultimedia::Audio}{Audio}
|
||||
\li Add audio playback functionality to a scene
|
||||
\row
|
||||
\li \l {QtMultimedia5::Camera}{Camera}
|
||||
\li \l {QtMultimedia::Camera}{Camera}
|
||||
\li Access camera viewfinder frames
|
||||
\row
|
||||
\li MediaPlayer
|
||||
\li Add media playback functionality to a scene. It is same as Audio type,
|
||||
but can be used for video playback with the VideoOutput type.
|
||||
\row
|
||||
\li \l {QtMultimedia5::Radio}{Radio}
|
||||
\li \l {QtMultimedia::Radio}{Radio}
|
||||
\li Access radio functionality
|
||||
\row
|
||||
\li \l {QtMultimedia5::Video}{Video}
|
||||
\li \l {QtMultimedia::Video}{Video}
|
||||
\li Add Video playback functionality to a scene. It uses MediaPlayer and
|
||||
VideoOutput types to provide video playback functionality.
|
||||
\endtable
|
||||
@@ -122,7 +124,6 @@
|
||||
\list
|
||||
\li \l Multimedia - overview of multimedia support in Qt
|
||||
\li \l{Audio Overview}
|
||||
\li \l{Positional Audio}
|
||||
\li \l{Video Overview}
|
||||
\li \l{Camera Overview}
|
||||
\li \l{Radio Overview}
|
||||
@@ -135,10 +136,20 @@
|
||||
\endlist
|
||||
|
||||
\section2 Reference
|
||||
\list
|
||||
\li \l{Qt Multimedia QML Types}{QML Types}
|
||||
\li \l{Qt Multimedia C++ Classes}{C++ Classes}
|
||||
\endlist
|
||||
\list
|
||||
\li Qt Multimedia
|
||||
\list
|
||||
\li \l{Qt Multimedia QML Types}{QML Types}
|
||||
\li \l{Qt Multimedia C++ Classes}{C++ Classes}
|
||||
\endlist
|
||||
\endlist
|
||||
|
||||
\list
|
||||
\li Qt Audio Engine
|
||||
\list
|
||||
\li \l{Qt Audio Engine QML Types}{QML Types}
|
||||
\endlist
|
||||
\endlist
|
||||
|
||||
\section2 Examples
|
||||
\list
|
||||
|
||||
@@ -26,7 +26,7 @@
|
||||
****************************************************************************/
|
||||
|
||||
/*!
|
||||
\qmlmodule QtMultimedia 5
|
||||
\qmlmodule QtMultimedia 5.0
|
||||
\title Qt Multimedia QML Types
|
||||
\ingroup qmlmodules
|
||||
\brief Provides QML types for multimedia support.
|
||||
|
||||
@@ -62,7 +62,8 @@ ANDROID_BUNDLED_FILES += \
|
||||
lib/libQt5MultimediaQuick_p.so
|
||||
MODULE_PLUGIN_TYPES = \
|
||||
mediaservice \
|
||||
audio
|
||||
audio \
|
||||
video/videonode
|
||||
|
||||
win32:LIBS += -luuid
|
||||
|
||||
|
||||
@@ -433,10 +433,8 @@ void QMediaPlaylistNavigator::jump(int position)
|
||||
{
|
||||
Q_D(QMediaPlaylistNavigator);
|
||||
|
||||
if (position<-1 || position>=d->playlist->mediaCount()) {
|
||||
qWarning() << "QMediaPlaylistNavigator: Jump outside playlist range";
|
||||
if (position < -1 || position >= d->playlist->mediaCount())
|
||||
position = -1;
|
||||
}
|
||||
|
||||
if (position != -1)
|
||||
d->lastValidPos = position;
|
||||
|
||||
@@ -69,7 +69,7 @@ private:
|
||||
int m_orientation;
|
||||
};
|
||||
|
||||
class QSGVideoNodeFactoryInterface
|
||||
class Q_MULTIMEDIAQUICK_EXPORT QSGVideoNodeFactoryInterface
|
||||
{
|
||||
public:
|
||||
virtual QList<QVideoFrame::PixelFormat> supportedPixelFormats(QAbstractVideoBuffer::HandleType handleType) const = 0;
|
||||
@@ -79,7 +79,7 @@ public:
|
||||
#define QSGVideoNodeFactoryInterface_iid "org.qt-project.qt.sgvideonodefactory/5.0"
|
||||
Q_DECLARE_INTERFACE(QSGVideoNodeFactoryInterface, QSGVideoNodeFactoryInterface_iid)
|
||||
|
||||
class QSGVideoNodeFactoryPlugin : public QObject, public QSGVideoNodeFactoryInterface
|
||||
class Q_MULTIMEDIAQUICK_EXPORT QSGVideoNodeFactoryPlugin : public QObject, public QSGVideoNodeFactoryInterface
|
||||
{
|
||||
Q_OBJECT
|
||||
Q_INTERFACES(QSGVideoNodeFactoryInterface)
|
||||
|
||||
@@ -3,3 +3,6 @@ TEMPLATE = subdirs
|
||||
SUBDIRS += src \
|
||||
jar
|
||||
|
||||
qtHaveModule(quick) {
|
||||
SUBDIRS += videonode
|
||||
}
|
||||
|
||||
@@ -45,7 +45,10 @@ import android.hardware.Camera;
|
||||
import android.graphics.SurfaceTexture;
|
||||
import android.util.Log;
|
||||
|
||||
public class QtCamera implements Camera.ShutterCallback, Camera.PictureCallback, Camera.AutoFocusCallback
|
||||
public class QtCamera implements Camera.ShutterCallback,
|
||||
Camera.PictureCallback,
|
||||
Camera.AutoFocusCallback,
|
||||
Camera.PreviewCallback
|
||||
{
|
||||
private int m_cameraId = -1;
|
||||
private Camera m_camera = null;
|
||||
@@ -149,6 +152,11 @@ public class QtCamera implements Camera.ShutterCallback, Camera.PictureCallback,
|
||||
m_camera.cancelAutoFocus();
|
||||
}
|
||||
|
||||
public void requestPreviewFrame()
|
||||
{
|
||||
m_camera.setOneShotPreviewCallback(this);
|
||||
}
|
||||
|
||||
public void takePicture()
|
||||
{
|
||||
try {
|
||||
@@ -170,6 +178,12 @@ public class QtCamera implements Camera.ShutterCallback, Camera.PictureCallback,
|
||||
notifyPictureCaptured(m_cameraId, data);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onPreviewFrame(byte[] data, Camera camera)
|
||||
{
|
||||
notifyPreviewFrame(m_cameraId, data);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onAutoFocus(boolean success, Camera camera)
|
||||
{
|
||||
@@ -179,4 +193,5 @@ public class QtCamera implements Camera.ShutterCallback, Camera.PictureCallback,
|
||||
private static native void notifyAutoFocusComplete(int id, boolean success);
|
||||
private static native void notifyPictureExposed(int id);
|
||||
private static native void notifyPictureCaptured(int id, byte[] data);
|
||||
private static native void notifyPreviewFrame(int id, byte[] data);
|
||||
}
|
||||
|
||||
@@ -76,4 +76,29 @@ bool qt_sizeLessThan(const QSize &s1, const QSize &s2)
|
||||
return s1.width() * s1.height() < s2.width() * s2.height();
|
||||
}
|
||||
|
||||
void qt_convert_NV21_to_ARGB32(const uchar *yuv, quint32 *rgb, int width, int height)
|
||||
{
|
||||
const int frameSize = width * height;
|
||||
|
||||
int a = 0;
|
||||
for (int i = 0, ci = 0; i < height; ++i, ci += 1) {
|
||||
for (int j = 0, cj = 0; j < width; ++j, cj += 1) {
|
||||
int y = (0xff & ((int) yuv[ci * width + cj]));
|
||||
int v = (0xff & ((int) yuv[frameSize + (ci >> 1) * width + (cj & ~1) + 0]));
|
||||
int u = (0xff & ((int) yuv[frameSize + (ci >> 1) * width + (cj & ~1) + 1]));
|
||||
y = y < 16 ? 16 : y;
|
||||
|
||||
int r = (int) (1.164f * (y - 16) + 1.596f * (v - 128));
|
||||
int g = (int) (1.164f * (y - 16) - 0.813f * (v - 128) - 0.391f * (u - 128));
|
||||
int b = (int) (1.164f * (y - 16) + 2.018f * (u - 128));
|
||||
|
||||
r = qBound(0, r, 255);
|
||||
g = qBound(0, g, 255);
|
||||
b = qBound(0, b, 255);
|
||||
|
||||
rgb[a++] = 0xff000000 | (r << 16) | (g << 8) | b;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
QT_END_NAMESPACE
|
||||
|
||||
@@ -53,6 +53,8 @@ int qt_findClosestValue(const QList<int> &list, int value);
|
||||
|
||||
bool qt_sizeLessThan(const QSize &s1, const QSize &s2);
|
||||
|
||||
void qt_convert_NV21_to_ARGB32(const uchar *yuv, quint32 *rgb, int width, int height);
|
||||
|
||||
QT_END_NAMESPACE
|
||||
|
||||
#endif // QANDROIDMULTIMEDIAUTILS_H
|
||||
|
||||
@@ -48,26 +48,26 @@
|
||||
|
||||
QT_BEGIN_NAMESPACE
|
||||
|
||||
typedef void (*TextureReadyCallback)(void*);
|
||||
|
||||
class QAndroidVideoOutput
|
||||
{
|
||||
public:
|
||||
QAndroidVideoOutput() { }
|
||||
virtual ~QAndroidVideoOutput() { }
|
||||
|
||||
virtual jobject surfaceHolder() = 0;
|
||||
virtual jobject surfaceTexture() { return 0; }
|
||||
|
||||
virtual bool isTextureReady() = 0;
|
||||
virtual void setTextureReadyCallback(TextureReadyCallback cb, void *context = 0) = 0;
|
||||
virtual jobject surfaceTexture() = 0;
|
||||
virtual bool isReady() { return true; }
|
||||
|
||||
virtual void setVideoSize(const QSize &size) = 0;
|
||||
virtual void stop() = 0;
|
||||
virtual void setVideoSize(const QSize &) { }
|
||||
virtual void stop() { }
|
||||
|
||||
virtual QImage toImage() = 0;
|
||||
// signals:
|
||||
// void readyChanged(bool);
|
||||
};
|
||||
|
||||
#define QAndroidVideoOutput_iid "org.qt-project.qt.qandroidvideooutput/5.0"
|
||||
Q_DECLARE_INTERFACE(QAndroidVideoOutput, QAndroidVideoOutput_iid)
|
||||
|
||||
QT_END_NAMESPACE
|
||||
|
||||
#endif // QANDROIDVIDEOOUTPUT_H
|
||||
|
||||
@@ -44,39 +44,31 @@
|
||||
#include <QtCore/private/qjni_p.h>
|
||||
#include "jsurfacetextureholder.h"
|
||||
#include <QAbstractVideoSurface>
|
||||
#include <QOpenGLContext>
|
||||
#include <QOffscreenSurface>
|
||||
#include <QOpenGLFramebufferObject>
|
||||
#include <QVideoSurfaceFormat>
|
||||
#include <QOpenGLFunctions>
|
||||
#include <QOpenGLShaderProgram>
|
||||
#include <qevent.h>
|
||||
#include <qcoreapplication.h>
|
||||
#include <qopenglcontext.h>
|
||||
#include <qopenglfunctions.h>
|
||||
|
||||
QT_BEGIN_NAMESPACE
|
||||
|
||||
static const GLfloat g_vertex_data[] = {
|
||||
-1.f, 1.f,
|
||||
1.f, 1.f,
|
||||
1.f, -1.f,
|
||||
-1.f, -1.f
|
||||
};
|
||||
#define ExternalGLTextureHandle QAbstractVideoBuffer::HandleType(QAbstractVideoBuffer::UserHandle + 1)
|
||||
|
||||
static const GLfloat g_texture_data[] = {
|
||||
0.f, 0.f,
|
||||
1.f, 0.f,
|
||||
1.f, 1.f,
|
||||
0.f, 1.f
|
||||
};
|
||||
TextureDeleter::~TextureDeleter()
|
||||
{
|
||||
glDeleteTextures(1, &m_id);
|
||||
}
|
||||
|
||||
class TextureVideoBuffer : public QAbstractVideoBuffer
|
||||
class AndroidTextureVideoBuffer : public QAbstractVideoBuffer
|
||||
{
|
||||
public:
|
||||
TextureVideoBuffer(GLuint textureId)
|
||||
: QAbstractVideoBuffer(GLTextureHandle)
|
||||
, m_textureId(textureId)
|
||||
{}
|
||||
AndroidTextureVideoBuffer(JSurfaceTexture *surface)
|
||||
: QAbstractVideoBuffer(ExternalGLTextureHandle)
|
||||
, m_surfaceTexture(surface)
|
||||
{
|
||||
}
|
||||
|
||||
virtual ~TextureVideoBuffer() {}
|
||||
virtual ~AndroidTextureVideoBuffer() {}
|
||||
|
||||
MapMode mapMode() const { return NotMapped; }
|
||||
uchar *map(MapMode, int*, int*) { return 0; }
|
||||
@@ -84,67 +76,33 @@ public:
|
||||
|
||||
QVariant handle() const
|
||||
{
|
||||
return QVariant::fromValue<unsigned int>(m_textureId);
|
||||
}
|
||||
|
||||
private:
|
||||
GLuint m_textureId;
|
||||
};
|
||||
|
||||
class ImageVideoBuffer : public QAbstractVideoBuffer
|
||||
{
|
||||
public:
|
||||
ImageVideoBuffer(const QImage &image)
|
||||
: QAbstractVideoBuffer(NoHandle)
|
||||
, m_image(image)
|
||||
, m_mode(NotMapped)
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
MapMode mapMode() const { return m_mode; }
|
||||
uchar *map(MapMode mode, int *, int *)
|
||||
{
|
||||
if (mode != NotMapped && m_mode == NotMapped) {
|
||||
m_mode = mode;
|
||||
return m_image.bits();
|
||||
if (m_data.isEmpty()) {
|
||||
// update the video texture (called from the render thread)
|
||||
m_surfaceTexture->updateTexImage();
|
||||
m_data << (uint)m_surfaceTexture->textureID() << m_surfaceTexture->getTransformMatrix();
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
void unmap()
|
||||
{
|
||||
m_mode = NotMapped;
|
||||
return m_data;
|
||||
}
|
||||
|
||||
private:
|
||||
QImage m_image;
|
||||
MapMode m_mode;
|
||||
mutable JSurfaceTexture *m_surfaceTexture;
|
||||
mutable QVariantList m_data;
|
||||
};
|
||||
|
||||
QAndroidVideoRendererControl::QAndroidVideoRendererControl(QObject *parent)
|
||||
: QVideoRendererControl(parent)
|
||||
, m_surface(0)
|
||||
, m_offscreenSurface(0)
|
||||
, m_glContext(0)
|
||||
, m_fbo(0)
|
||||
, m_program(0)
|
||||
, m_useImage(false)
|
||||
, m_androidSurface(0)
|
||||
, m_surfaceTexture(0)
|
||||
, m_surfaceHolder(0)
|
||||
, m_externalTex(0)
|
||||
, m_textureReadyCallback(0)
|
||||
, m_textureReadyContext(0)
|
||||
, m_textureDeleter(0)
|
||||
{
|
||||
}
|
||||
|
||||
QAndroidVideoRendererControl::~QAndroidVideoRendererControl()
|
||||
{
|
||||
if (m_glContext)
|
||||
m_glContext->makeCurrent(m_offscreenSurface);
|
||||
|
||||
if (m_surfaceTexture) {
|
||||
m_surfaceTexture->callMethod<void>("release");
|
||||
delete m_surfaceTexture;
|
||||
@@ -159,13 +117,8 @@ QAndroidVideoRendererControl::~QAndroidVideoRendererControl()
|
||||
delete m_surfaceHolder;
|
||||
m_surfaceHolder = 0;
|
||||
}
|
||||
if (m_externalTex)
|
||||
glDeleteTextures(1, &m_externalTex);
|
||||
|
||||
delete m_fbo;
|
||||
delete m_program;
|
||||
delete m_glContext;
|
||||
delete m_offscreenSurface;
|
||||
if (m_textureDeleter)
|
||||
m_textureDeleter->deleteLater();
|
||||
}
|
||||
|
||||
QAbstractVideoSurface *QAndroidVideoRendererControl::surface() const
|
||||
@@ -178,28 +131,23 @@ void QAndroidVideoRendererControl::setSurface(QAbstractVideoSurface *surface)
|
||||
if (surface == m_surface)
|
||||
return;
|
||||
|
||||
if (m_surface && m_surface->isActive()) {
|
||||
m_surface->stop();
|
||||
m_surface->removeEventFilter(this);
|
||||
if (m_surface) {
|
||||
if (m_surface->isActive())
|
||||
m_surface->stop();
|
||||
m_surface->setProperty("_q_GLThreadCallback", QVariant());
|
||||
}
|
||||
|
||||
m_surface = surface;
|
||||
|
||||
if (m_surface) {
|
||||
m_useImage = !m_surface->supportedPixelFormats(QAbstractVideoBuffer::GLTextureHandle).contains(QVideoFrame::Format_BGR32);
|
||||
m_surface->installEventFilter(this);
|
||||
m_surface->setProperty("_q_GLThreadCallback",
|
||||
QVariant::fromValue<QObject*>(this));
|
||||
}
|
||||
}
|
||||
|
||||
bool QAndroidVideoRendererControl::isTextureReady()
|
||||
bool QAndroidVideoRendererControl::isReady()
|
||||
{
|
||||
return QOpenGLContext::currentContext() || (m_surface && m_surface->property("GLContext").isValid());
|
||||
}
|
||||
|
||||
void QAndroidVideoRendererControl::setTextureReadyCallback(TextureReadyCallback cb, void *context)
|
||||
{
|
||||
m_textureReadyCallback = cb;
|
||||
m_textureReadyContext = context;
|
||||
return QOpenGLContext::currentContext() || m_externalTex;
|
||||
}
|
||||
|
||||
bool QAndroidVideoRendererControl::initSurfaceTexture()
|
||||
@@ -210,45 +158,15 @@ bool QAndroidVideoRendererControl::initSurfaceTexture()
|
||||
if (!m_surface)
|
||||
return false;
|
||||
|
||||
QOpenGLContext *currContext = QOpenGLContext::currentContext();
|
||||
|
||||
// If we don't have a GL context in the current thread, create one and share it
|
||||
// with the render thread GL context
|
||||
if (!currContext && !m_glContext) {
|
||||
QOpenGLContext *shareContext = qobject_cast<QOpenGLContext*>(m_surface->property("GLContext").value<QObject*>());
|
||||
if (!shareContext)
|
||||
return false;
|
||||
|
||||
m_offscreenSurface = new QOffscreenSurface;
|
||||
QSurfaceFormat format;
|
||||
format.setSwapBehavior(QSurfaceFormat::SingleBuffer);
|
||||
m_offscreenSurface->setFormat(format);
|
||||
m_offscreenSurface->create();
|
||||
|
||||
m_glContext = new QOpenGLContext;
|
||||
m_glContext->setFormat(m_offscreenSurface->requestedFormat());
|
||||
|
||||
if (shareContext)
|
||||
m_glContext->setShareContext(shareContext);
|
||||
|
||||
if (!m_glContext->create()) {
|
||||
delete m_glContext;
|
||||
m_glContext = 0;
|
||||
delete m_offscreenSurface;
|
||||
m_offscreenSurface = 0;
|
||||
return false;
|
||||
}
|
||||
|
||||
// if sharing contexts is not supported, fallback to image rendering and send the bits
|
||||
// to the video surface
|
||||
if (!m_glContext->shareContext())
|
||||
m_useImage = true;
|
||||
// if we have an OpenGL context in the current thread, create a texture. Otherwise, wait
|
||||
// for the GL render thread to call us back to do it.
|
||||
if (QOpenGLContext::currentContext()) {
|
||||
glGenTextures(1, &m_externalTex);
|
||||
m_textureDeleter = new TextureDeleter(m_externalTex);
|
||||
} else if (!m_externalTex) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (m_glContext)
|
||||
m_glContext->makeCurrent(m_offscreenSurface);
|
||||
|
||||
glGenTextures(1, &m_externalTex);
|
||||
m_surfaceTexture = new JSurfaceTexture(m_externalTex);
|
||||
|
||||
if (m_surfaceTexture->isValid()) {
|
||||
@@ -256,7 +174,9 @@ bool QAndroidVideoRendererControl::initSurfaceTexture()
|
||||
} else {
|
||||
delete m_surfaceTexture;
|
||||
m_surfaceTexture = 0;
|
||||
glDeleteTextures(1, &m_externalTex);
|
||||
m_textureDeleter->deleteLater();
|
||||
m_externalTex = 0;
|
||||
m_textureDeleter = 0;
|
||||
}
|
||||
|
||||
return m_surfaceTexture != 0;
|
||||
@@ -294,9 +214,6 @@ void QAndroidVideoRendererControl::setVideoSize(const QSize &size)
|
||||
stop();
|
||||
|
||||
m_nativeSize = size;
|
||||
|
||||
delete m_fbo;
|
||||
m_fbo = 0;
|
||||
}
|
||||
|
||||
void QAndroidVideoRendererControl::stop()
|
||||
@@ -306,133 +223,40 @@ void QAndroidVideoRendererControl::stop()
|
||||
m_nativeSize = QSize();
|
||||
}
|
||||
|
||||
QImage QAndroidVideoRendererControl::toImage()
|
||||
{
|
||||
if (!m_fbo)
|
||||
return QImage();
|
||||
|
||||
return m_fbo->toImage().mirrored();
|
||||
}
|
||||
|
||||
void QAndroidVideoRendererControl::onFrameAvailable()
|
||||
{
|
||||
if (m_glContext)
|
||||
m_glContext->makeCurrent(m_offscreenSurface);
|
||||
|
||||
m_surfaceTexture->updateTexImage();
|
||||
|
||||
if (!m_nativeSize.isValid())
|
||||
if (!m_nativeSize.isValid() || !m_surface)
|
||||
return;
|
||||
|
||||
renderFrameToFbo();
|
||||
QAbstractVideoBuffer *buffer = new AndroidTextureVideoBuffer(m_surfaceTexture);
|
||||
QVideoFrame frame(buffer, m_nativeSize, QVideoFrame::Format_BGR32);
|
||||
|
||||
QAbstractVideoBuffer *buffer = 0;
|
||||
QVideoFrame frame;
|
||||
|
||||
if (m_useImage) {
|
||||
buffer = new ImageVideoBuffer(m_fbo->toImage().mirrored());
|
||||
frame = QVideoFrame(buffer, m_nativeSize, QVideoFrame::Format_RGB32);
|
||||
} else {
|
||||
buffer = new TextureVideoBuffer(m_fbo->texture());
|
||||
frame = QVideoFrame(buffer, m_nativeSize, QVideoFrame::Format_BGR32);
|
||||
if (m_surface->isActive() && (m_surface->surfaceFormat().pixelFormat() != frame.pixelFormat()
|
||||
|| m_surface->nativeResolution() != frame.size())) {
|
||||
m_surface->stop();
|
||||
}
|
||||
|
||||
if (m_surface && frame.isValid()) {
|
||||
if (m_surface->isActive() && (m_surface->surfaceFormat().pixelFormat() != frame.pixelFormat()
|
||||
|| m_surface->nativeResolution() != frame.size())) {
|
||||
m_surface->stop();
|
||||
}
|
||||
if (!m_surface->isActive()) {
|
||||
QVideoSurfaceFormat format(frame.size(), frame.pixelFormat(), ExternalGLTextureHandle);
|
||||
format.setScanLineDirection(QVideoSurfaceFormat::BottomToTop);
|
||||
|
||||
if (!m_surface->isActive()) {
|
||||
QVideoSurfaceFormat format(frame.size(), frame.pixelFormat(),
|
||||
m_useImage ? QAbstractVideoBuffer::NoHandle
|
||||
: QAbstractVideoBuffer::GLTextureHandle);
|
||||
|
||||
m_surface->start(format);
|
||||
}
|
||||
|
||||
if (m_surface->isActive())
|
||||
m_surface->present(frame);
|
||||
m_surface->start(format);
|
||||
}
|
||||
|
||||
if (m_surface->isActive())
|
||||
m_surface->present(frame);
|
||||
}
|
||||
|
||||
void QAndroidVideoRendererControl::renderFrameToFbo()
|
||||
void QAndroidVideoRendererControl::customEvent(QEvent *e)
|
||||
{
|
||||
createGLResources();
|
||||
|
||||
m_fbo->bind();
|
||||
|
||||
glViewport(0, 0, m_nativeSize.width(), m_nativeSize.height());
|
||||
|
||||
m_program->bind();
|
||||
m_program->enableAttributeArray(0);
|
||||
m_program->enableAttributeArray(1);
|
||||
m_program->setUniformValue("frameTexture", GLuint(0));
|
||||
m_program->setUniformValue("texMatrix", m_surfaceTexture->getTransformMatrix());
|
||||
|
||||
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 0, g_vertex_data);
|
||||
glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 0, g_texture_data);
|
||||
|
||||
glDrawArrays(GL_TRIANGLE_FAN, 0, 4);
|
||||
|
||||
m_program->disableAttributeArray(0);
|
||||
m_program->disableAttributeArray(1);
|
||||
m_program->release();
|
||||
|
||||
glBindTexture(GL_TEXTURE_EXTERNAL_OES, 0);
|
||||
m_fbo->release();
|
||||
|
||||
glFinish();
|
||||
}
|
||||
|
||||
void QAndroidVideoRendererControl::createGLResources()
|
||||
{
|
||||
if (!m_fbo)
|
||||
m_fbo = new QOpenGLFramebufferObject(m_nativeSize);
|
||||
|
||||
if (!m_program) {
|
||||
m_program = new QOpenGLShaderProgram;
|
||||
|
||||
QOpenGLShader *vertexShader = new QOpenGLShader(QOpenGLShader::Vertex, m_program);
|
||||
vertexShader->compileSourceCode("attribute highp vec4 vertexCoordsArray; \n" \
|
||||
"attribute highp vec2 textureCoordArray; \n" \
|
||||
"uniform highp mat4 texMatrix; \n" \
|
||||
"varying highp vec2 textureCoords; \n" \
|
||||
"void main(void) \n" \
|
||||
"{ \n" \
|
||||
" gl_Position = vertexCoordsArray; \n" \
|
||||
" textureCoords = (texMatrix * vec4(textureCoordArray, 0.0, 1.0)).xy; \n" \
|
||||
"}\n");
|
||||
m_program->addShader(vertexShader);
|
||||
|
||||
QOpenGLShader *fragmentShader = new QOpenGLShader(QOpenGLShader::Fragment, m_program);
|
||||
fragmentShader->compileSourceCode("#extension GL_OES_EGL_image_external : require \n" \
|
||||
"varying highp vec2 textureCoords; \n" \
|
||||
"uniform samplerExternalOES frameTexture; \n" \
|
||||
"void main() \n" \
|
||||
"{ \n" \
|
||||
" gl_FragColor = texture2D(frameTexture, textureCoords); \n" \
|
||||
"}\n");
|
||||
m_program->addShader(fragmentShader);
|
||||
|
||||
m_program->bindAttributeLocation("vertexCoordsArray", 0);
|
||||
m_program->bindAttributeLocation("textureCoordArray", 1);
|
||||
m_program->link();
|
||||
}
|
||||
}
|
||||
|
||||
bool QAndroidVideoRendererControl::eventFilter(QObject *, QEvent *e)
|
||||
{
|
||||
if (e->type() == QEvent::DynamicPropertyChange) {
|
||||
QDynamicPropertyChangeEvent *event = static_cast<QDynamicPropertyChangeEvent*>(e);
|
||||
if (event->propertyName() == "GLContext" && m_textureReadyCallback) {
|
||||
m_textureReadyCallback(m_textureReadyContext);
|
||||
m_textureReadyCallback = 0;
|
||||
m_textureReadyContext = 0;
|
||||
if (e->type() == QEvent::User) {
|
||||
// This is running in the render thread (OpenGL enabled)
|
||||
if (!m_externalTex) {
|
||||
glGenTextures(1, &m_externalTex);
|
||||
m_textureDeleter = new TextureDeleter(m_externalTex); // will be deleted in the correct thread
|
||||
emit readyChanged(true);
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
QT_END_NAMESPACE
|
||||
|
||||
@@ -48,15 +48,23 @@
|
||||
|
||||
QT_BEGIN_NAMESPACE
|
||||
|
||||
class QOpenGLContext;
|
||||
class QOffscreenSurface;
|
||||
class QOpenGLFramebufferObject;
|
||||
class QOpenGLShaderProgram;
|
||||
class JSurfaceTextureHolder;
|
||||
|
||||
class TextureDeleter : public QObject
|
||||
{
|
||||
Q_OBJECT
|
||||
public:
|
||||
TextureDeleter(uint id) : m_id(id) { }
|
||||
~TextureDeleter();
|
||||
|
||||
private:
|
||||
uint m_id;
|
||||
};
|
||||
|
||||
class QAndroidVideoRendererControl : public QVideoRendererControl, public QAndroidVideoOutput
|
||||
{
|
||||
Q_OBJECT
|
||||
Q_INTERFACES(QAndroidVideoOutput)
|
||||
public:
|
||||
explicit QAndroidVideoRendererControl(QObject *parent = 0);
|
||||
~QAndroidVideoRendererControl() Q_DECL_OVERRIDE;
|
||||
@@ -65,38 +73,30 @@ public:
|
||||
void setSurface(QAbstractVideoSurface *surface) Q_DECL_OVERRIDE;
|
||||
|
||||
jobject surfaceHolder() Q_DECL_OVERRIDE;
|
||||
bool isTextureReady() Q_DECL_OVERRIDE;
|
||||
void setTextureReadyCallback(TextureReadyCallback cb, void *context = 0) Q_DECL_OVERRIDE;
|
||||
jobject surfaceTexture() Q_DECL_OVERRIDE;
|
||||
bool isReady() Q_DECL_OVERRIDE;
|
||||
void setVideoSize(const QSize &size) Q_DECL_OVERRIDE;
|
||||
void stop() Q_DECL_OVERRIDE;
|
||||
QImage toImage() Q_DECL_OVERRIDE;
|
||||
|
||||
bool eventFilter(QObject *obj, QEvent *event) Q_DECL_OVERRIDE;
|
||||
void customEvent(QEvent *) Q_DECL_OVERRIDE;
|
||||
|
||||
Q_SIGNALS:
|
||||
void readyChanged(bool);
|
||||
|
||||
private Q_SLOTS:
|
||||
void onFrameAvailable();
|
||||
|
||||
private:
|
||||
bool initSurfaceTexture();
|
||||
void renderFrameToFbo();
|
||||
void createGLResources();
|
||||
|
||||
QAbstractVideoSurface *m_surface;
|
||||
QOffscreenSurface *m_offscreenSurface;
|
||||
QOpenGLContext *m_glContext;
|
||||
QOpenGLFramebufferObject *m_fbo;
|
||||
QOpenGLShaderProgram *m_program;
|
||||
bool m_useImage;
|
||||
QSize m_nativeSize;
|
||||
|
||||
QJNIObjectPrivate *m_androidSurface;
|
||||
JSurfaceTexture *m_surfaceTexture;
|
||||
JSurfaceTextureHolder *m_surfaceHolder;
|
||||
uint m_externalTex;
|
||||
|
||||
TextureReadyCallback m_textureReadyCallback;
|
||||
void *m_textureReadyContext;
|
||||
TextureDeleter *m_textureDeleter;
|
||||
};
|
||||
|
||||
QT_END_NAMESPACE
|
||||
|
||||
@@ -52,12 +52,6 @@
|
||||
|
||||
QT_BEGIN_NAMESPACE
|
||||
|
||||
static void textureReadyCallback(void *context)
|
||||
{
|
||||
if (context)
|
||||
reinterpret_cast<QAndroidCameraSession *>(context)->onSurfaceTextureReady();
|
||||
}
|
||||
|
||||
QAndroidCameraSession::QAndroidCameraSession(QObject *parent)
|
||||
: QObject(parent)
|
||||
, m_selectedCamera(0)
|
||||
@@ -153,8 +147,11 @@ bool QAndroidCameraSession::open()
|
||||
if (m_camera) {
|
||||
connect(m_camera, SIGNAL(pictureExposed()), this, SLOT(onCameraPictureExposed()));
|
||||
connect(m_camera, SIGNAL(pictureCaptured(QByteArray)), this, SLOT(onCameraPictureCaptured(QByteArray)));
|
||||
connect(m_camera, SIGNAL(previewFrameAvailable(QByteArray)), this, SLOT(onCameraPreviewFrameAvailable(QByteArray)));
|
||||
m_nativeOrientation = m_camera->getNativeOrientation();
|
||||
m_status = QCamera::LoadedStatus;
|
||||
if (m_camera->getPreviewFormat() != JCamera::NV21)
|
||||
m_camera->setPreviewFormat(JCamera::NV21);
|
||||
emit opened();
|
||||
} else {
|
||||
m_status = QCamera::UnavailableStatus;
|
||||
@@ -188,12 +185,17 @@ void QAndroidCameraSession::close()
|
||||
emit statusChanged(m_status);
|
||||
}
|
||||
|
||||
void QAndroidCameraSession::setVideoPreview(QAndroidVideoOutput *videoOutput)
|
||||
void QAndroidCameraSession::setVideoPreview(QObject *videoOutput)
|
||||
{
|
||||
if (m_videoOutput)
|
||||
m_videoOutput->stop();
|
||||
|
||||
m_videoOutput = videoOutput;
|
||||
if (videoOutput) {
|
||||
connect(videoOutput, SIGNAL(readyChanged(bool)), this, SLOT(onVideoOutputReady(bool)));
|
||||
m_videoOutput = qobject_cast<QAndroidVideoOutput *>(videoOutput);
|
||||
} else {
|
||||
m_videoOutput = 0;
|
||||
}
|
||||
}
|
||||
|
||||
void QAndroidCameraSession::adjustViewfinderSize(const QSize &captureSize, bool restartPreview)
|
||||
@@ -243,12 +245,8 @@ void QAndroidCameraSession::startPreview()
|
||||
applyImageSettings();
|
||||
adjustViewfinderSize(m_imageSettings.resolution());
|
||||
|
||||
if (m_videoOutput) {
|
||||
if (m_videoOutput->isTextureReady())
|
||||
m_camera->setPreviewTexture(m_videoOutput->surfaceTexture());
|
||||
else
|
||||
m_videoOutput->setTextureReadyCallback(textureReadyCallback, this);
|
||||
}
|
||||
if (m_videoOutput && m_videoOutput->isReady())
|
||||
onVideoOutputReady(true);
|
||||
|
||||
JMultimediaUtils::enableOrientationListener(true);
|
||||
|
||||
@@ -427,6 +425,7 @@ int QAndroidCameraSession::capture(const QString &fileName)
|
||||
// adjust picture rotation depending on the device orientation
|
||||
m_camera->setRotation(currentCameraRotation());
|
||||
|
||||
m_camera->requestPreviewFrame();
|
||||
m_camera->takePicture();
|
||||
} else {
|
||||
emit imageCaptureError(m_lastImageCaptureId, QCameraImageCapture::NotSupportedFeatureError,
|
||||
@@ -455,10 +454,6 @@ void QAndroidCameraSession::onCameraPictureExposed()
|
||||
void QAndroidCameraSession::onCameraPictureCaptured(const QByteArray &data)
|
||||
{
|
||||
if (!m_captureCanceled) {
|
||||
// generate a preview from the viewport
|
||||
if (m_videoOutput)
|
||||
emit imageCaptured(m_currentImageCaptureId, m_videoOutput->toImage());
|
||||
|
||||
// Loading and saving the captured image can be slow, do it in a separate thread
|
||||
QtConcurrent::run(this, &QAndroidCameraSession::processCapturedImage,
|
||||
m_currentImageCaptureId,
|
||||
@@ -522,9 +517,37 @@ void QAndroidCameraSession::processCapturedImage(int id,
|
||||
}
|
||||
}
|
||||
|
||||
void QAndroidCameraSession::onSurfaceTextureReady()
|
||||
void QAndroidCameraSession::onCameraPreviewFrameAvailable(const QByteArray &data)
|
||||
{
|
||||
if (m_camera && m_videoOutput)
|
||||
if (m_captureCanceled || m_readyForCapture)
|
||||
return;
|
||||
|
||||
QtConcurrent::run(this, &QAndroidCameraSession::processPreviewImage,
|
||||
m_currentImageCaptureId,
|
||||
data);
|
||||
}
|
||||
|
||||
void QAndroidCameraSession::processPreviewImage(int id, const QByteArray &data)
|
||||
{
|
||||
QSize frameSize = m_camera->previewSize();
|
||||
QImage preview(frameSize, QImage::Format_ARGB32);
|
||||
qt_convert_NV21_to_ARGB32((const uchar *)data.constData(),
|
||||
(quint32 *)preview.bits(),
|
||||
frameSize.width(),
|
||||
frameSize.height());
|
||||
|
||||
// Preview display of front-facing cameras is flipped horizontally, but the frame data
|
||||
// we get here is not. Flip it ourselves if the camera is front-facing to match what the user
|
||||
// sees on the viewfinder.
|
||||
if (m_camera->getFacing() == JCamera::CameraFacingFront)
|
||||
preview = preview.transformed(QTransform().scale(-1, 1));
|
||||
|
||||
emit imageCaptured(id, preview);
|
||||
}
|
||||
|
||||
void QAndroidCameraSession::onVideoOutputReady(bool ready)
|
||||
{
|
||||
if (m_camera && m_videoOutput && ready)
|
||||
m_camera->setPreviewTexture(m_videoOutput->surfaceTexture());
|
||||
}
|
||||
|
||||
|
||||
@@ -71,7 +71,7 @@ public:
|
||||
void setCaptureMode(QCamera::CaptureModes mode);
|
||||
bool isCaptureModeSupported(QCamera::CaptureModes mode) const;
|
||||
|
||||
void setVideoPreview(QAndroidVideoOutput *videoOutput);
|
||||
void setVideoPreview(QObject *videoOutput);
|
||||
void adjustViewfinderSize(const QSize &captureSize, bool restartPreview = true);
|
||||
|
||||
QImageEncoderSettings imageSettings() const { return m_imageSettings; }
|
||||
@@ -88,8 +88,6 @@ public:
|
||||
int capture(const QString &fileName);
|
||||
void cancelCapture();
|
||||
|
||||
void onSurfaceTextureReady();
|
||||
|
||||
int currentCameraRotation() const;
|
||||
|
||||
Q_SIGNALS:
|
||||
@@ -110,10 +108,13 @@ Q_SIGNALS:
|
||||
void imageCaptureError(int id, int error, const QString &errorString);
|
||||
|
||||
private Q_SLOTS:
|
||||
void onVideoOutputReady(bool ready);
|
||||
|
||||
void onApplicationStateChanged(Qt::ApplicationState state);
|
||||
|
||||
void onCameraPictureExposed();
|
||||
void onCameraPictureCaptured(const QByteArray &data);
|
||||
void onCameraPreviewFrameAvailable(const QByteArray &data);
|
||||
|
||||
private:
|
||||
bool open();
|
||||
@@ -123,7 +124,7 @@ private:
|
||||
void stopPreview();
|
||||
|
||||
void applyImageSettings();
|
||||
void processPreviewImage(int id);
|
||||
void processPreviewImage(int id, const QByteArray &data);
|
||||
void processCapturedImage(int id,
|
||||
const QByteArray &data,
|
||||
QCameraImageCapture::CaptureDestinations dest,
|
||||
|
||||
@@ -88,7 +88,7 @@ private:
|
||||
QAndroidVideoDeviceSelectorControl *m_videoInputControl;
|
||||
QAndroidAudioInputSelectorControl *m_audioInputControl;
|
||||
QAndroidCameraSession *m_cameraSession;
|
||||
QAndroidVideoRendererControl *m_videoRendererControl;
|
||||
QMediaControl *m_videoRendererControl;
|
||||
QAndroidCameraZoomControl *m_cameraZoomControl;
|
||||
QAndroidCameraExposureControl *m_cameraExposureControl;
|
||||
QAndroidCameraFlashControl *m_cameraFlashControl;
|
||||
|
||||
@@ -110,25 +110,27 @@ void QAndroidCaptureSession::setAudioInput(const QString &input)
|
||||
|
||||
QUrl QAndroidCaptureSession::outputLocation() const
|
||||
{
|
||||
return m_outputLocation;
|
||||
return m_actualOutputLocation;
|
||||
}
|
||||
|
||||
bool QAndroidCaptureSession::setOutputLocation(const QUrl &location)
|
||||
{
|
||||
if (m_outputLocation == location)
|
||||
if (m_requestedOutputLocation == location)
|
||||
return false;
|
||||
|
||||
m_outputLocation = location;
|
||||
m_actualOutputLocation = QUrl();
|
||||
m_requestedOutputLocation = location;
|
||||
|
||||
if (m_outputLocation.isEmpty())
|
||||
if (m_requestedOutputLocation.isEmpty())
|
||||
return true;
|
||||
|
||||
if (m_outputLocation.isValid() && (m_outputLocation.isLocalFile() || m_outputLocation.isRelative())) {
|
||||
emit actualLocationChanged(m_outputLocation);
|
||||
if (m_requestedOutputLocation.isValid()
|
||||
&& (m_requestedOutputLocation.isLocalFile() || m_requestedOutputLocation.isRelative())) {
|
||||
emit actualLocationChanged(m_requestedOutputLocation);
|
||||
return true;
|
||||
}
|
||||
|
||||
m_outputLocation = QUrl();
|
||||
m_requestedOutputLocation = QUrl();
|
||||
return false;
|
||||
}
|
||||
|
||||
@@ -213,15 +215,18 @@ bool QAndroidCaptureSession::start()
|
||||
|
||||
|
||||
// Set output file
|
||||
QString filePath = m_mediaStorageLocation.generateFileName(m_outputLocation.isLocalFile() ? m_outputLocation.toLocalFile()
|
||||
: m_outputLocation.toString(),
|
||||
m_cameraSession ? QAndroidMediaStorageLocation::Camera
|
||||
: QAndroidMediaStorageLocation::Audio,
|
||||
m_cameraSession ? QLatin1String("VID_")
|
||||
: QLatin1String("REC_"),
|
||||
m_containerFormat);
|
||||
m_outputLocation = QUrl::fromLocalFile(filePath);
|
||||
emit actualLocationChanged(m_outputLocation);
|
||||
QString filePath = m_mediaStorageLocation.generateFileName(
|
||||
m_requestedOutputLocation.isLocalFile() ? m_requestedOutputLocation.toLocalFile()
|
||||
: m_requestedOutputLocation.toString(),
|
||||
m_cameraSession ? QAndroidMediaStorageLocation::Camera
|
||||
: QAndroidMediaStorageLocation::Audio,
|
||||
m_cameraSession ? QLatin1String("VID_")
|
||||
: QLatin1String("REC_"),
|
||||
m_containerFormat);
|
||||
|
||||
m_actualOutputLocation = QUrl::fromLocalFile(filePath);
|
||||
if (m_actualOutputLocation != m_requestedOutputLocation)
|
||||
emit actualLocationChanged(m_actualOutputLocation);
|
||||
|
||||
m_mediaRecorder->setOutputFile(filePath);
|
||||
|
||||
@@ -280,7 +285,7 @@ void QAndroidCaptureSession::stop(bool error)
|
||||
// if the media is saved into the standard media location, register it
|
||||
// with the Android media scanner so it appears immediately in apps
|
||||
// such as the gallery.
|
||||
QString mediaPath = m_outputLocation.toLocalFile();
|
||||
QString mediaPath = m_actualOutputLocation.toLocalFile();
|
||||
QString standardLoc = m_cameraSession ? JMultimediaUtils::getDefaultMediaDirectory(JMultimediaUtils::DCIM)
|
||||
: JMultimediaUtils::getDefaultMediaDirectory(JMultimediaUtils::Sounds);
|
||||
if (mediaPath.startsWith(standardLoc))
|
||||
|
||||
@@ -160,7 +160,8 @@ private:
|
||||
|
||||
QMediaRecorder::State m_state;
|
||||
QMediaRecorder::Status m_status;
|
||||
QUrl m_outputLocation;
|
||||
QUrl m_requestedOutputLocation;
|
||||
QUrl m_actualOutputLocation;
|
||||
|
||||
CaptureProfile m_defaultSettings;
|
||||
|
||||
|
||||
@@ -45,12 +45,6 @@
|
||||
|
||||
QT_BEGIN_NAMESPACE
|
||||
|
||||
static void textureReadyCallback(void *context)
|
||||
{
|
||||
if (context)
|
||||
reinterpret_cast<QAndroidMediaPlayerControl *>(context)->onSurfaceTextureReady();
|
||||
}
|
||||
|
||||
QAndroidMediaPlayerControl::QAndroidMediaPlayerControl(QObject *parent)
|
||||
: QMediaPlayerControl(parent),
|
||||
mMediaPlayer(new JMediaPlayer),
|
||||
@@ -241,18 +235,18 @@ void QAndroidMediaPlayerControl::setMedia(const QMediaContent &mediaContent,
|
||||
setSeekable(true);
|
||||
}
|
||||
|
||||
void QAndroidMediaPlayerControl::setVideoOutput(QAndroidVideoOutput *videoOutput)
|
||||
void QAndroidMediaPlayerControl::setVideoOutput(QObject *videoOutput)
|
||||
{
|
||||
if (mVideoOutput)
|
||||
mVideoOutput->stop();
|
||||
|
||||
mVideoOutput = videoOutput;
|
||||
mVideoOutput = qobject_cast<QAndroidVideoOutput *>(videoOutput);
|
||||
|
||||
if (mVideoOutput && !mMediaPlayer->display()) {
|
||||
if (mVideoOutput->isTextureReady())
|
||||
if (mVideoOutput->isReady())
|
||||
mMediaPlayer->setDisplay(mVideoOutput->surfaceHolder());
|
||||
else
|
||||
mVideoOutput->setTextureReadyCallback(textureReadyCallback, this);
|
||||
connect(videoOutput, SIGNAL(readyChanged(bool)), this, SLOT(onVideoOutputReady(bool)));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -426,9 +420,9 @@ void QAndroidMediaPlayerControl::onVideoSizeChanged(qint32 width, qint32 height)
|
||||
mVideoOutput->setVideoSize(mVideoSize);
|
||||
}
|
||||
|
||||
void QAndroidMediaPlayerControl::onSurfaceTextureReady()
|
||||
void QAndroidMediaPlayerControl::onVideoOutputReady(bool ready)
|
||||
{
|
||||
if (!mMediaPlayer->display() && mVideoOutput) {
|
||||
if (!mMediaPlayer->display() && mVideoOutput && ready) {
|
||||
mMediaPlayer->setDisplay(mVideoOutput->surfaceHolder());
|
||||
flushPendingStates();
|
||||
}
|
||||
|
||||
@@ -75,8 +75,7 @@ public:
|
||||
const QIODevice *mediaStream() const Q_DECL_OVERRIDE;
|
||||
void setMedia(const QMediaContent &mediaContent, QIODevice *stream) Q_DECL_OVERRIDE;
|
||||
|
||||
void setVideoOutput(QAndroidVideoOutput *videoOutput);
|
||||
void onSurfaceTextureReady();
|
||||
void setVideoOutput(QObject *videoOutput);
|
||||
|
||||
Q_SIGNALS:
|
||||
void metaDataUpdated();
|
||||
@@ -90,6 +89,7 @@ public Q_SLOTS:
|
||||
void setMuted(bool muted) Q_DECL_OVERRIDE;
|
||||
|
||||
private Q_SLOTS:
|
||||
void onVideoOutputReady(bool ready);
|
||||
void onError(qint32 what, qint32 extra);
|
||||
void onInfo(qint32 what, qint32 extra);
|
||||
void onMediaPlayerInfo(qint32 what, qint32 extra);
|
||||
|
||||
@@ -48,7 +48,6 @@ QT_BEGIN_NAMESPACE
|
||||
|
||||
class QAndroidMediaPlayerControl;
|
||||
class QAndroidMetaDataReaderControl;
|
||||
class QAndroidVideoRendererControl;
|
||||
|
||||
class QAndroidMediaService : public QMediaService
|
||||
{
|
||||
@@ -63,7 +62,7 @@ public:
|
||||
private:
|
||||
QAndroidMediaPlayerControl *mMediaControl;
|
||||
QAndroidMetaDataReaderControl *mMetadataControl;
|
||||
QAndroidVideoRendererControl *mVideoRendererControl;
|
||||
QMediaControl *mVideoRendererControl;
|
||||
};
|
||||
|
||||
QT_END_NAMESPACE
|
||||
|
||||
@@ -102,6 +102,18 @@ static void notifyPictureCaptured(JNIEnv *env, jobject, int id, jbyteArray data)
|
||||
}
|
||||
}
|
||||
|
||||
static void notifyPreviewFrame(JNIEnv *env, jobject, int id, jbyteArray data)
|
||||
{
|
||||
JCamera *obj = g_objectMap.value(id, 0);
|
||||
if (obj) {
|
||||
QByteArray bytes;
|
||||
int arrayLength = env->GetArrayLength(data);
|
||||
bytes.resize(arrayLength);
|
||||
env->GetByteArrayRegion(data, 0, arrayLength, (jbyte*)bytes.data());
|
||||
Q_EMIT obj->previewFrameAvailable(bytes);
|
||||
}
|
||||
}
|
||||
|
||||
JCamera::JCamera(int cameraId, jobject cam)
|
||||
: QObject()
|
||||
, QJNIObjectPrivate(cam)
|
||||
@@ -225,6 +237,23 @@ QList<QSize> JCamera::getSupportedPreviewSizes()
|
||||
return list;
|
||||
}
|
||||
|
||||
JCamera::ImageFormat JCamera::getPreviewFormat()
|
||||
{
|
||||
if (!m_parameters.isValid())
|
||||
return Unknown;
|
||||
|
||||
return JCamera::ImageFormat(m_parameters.callMethod<jint>("getPreviewFormat"));
|
||||
}
|
||||
|
||||
void JCamera::setPreviewFormat(ImageFormat fmt)
|
||||
{
|
||||
if (!m_parameters.isValid())
|
||||
return;
|
||||
|
||||
m_parameters.callMethod<void>("setPreviewFormat", "(I)V", jint(fmt));
|
||||
applyParameters();
|
||||
}
|
||||
|
||||
void JCamera::setPreviewSize(const QSize &size)
|
||||
{
|
||||
if (!m_parameters.isValid())
|
||||
@@ -624,6 +653,11 @@ void JCamera::setJpegQuality(int quality)
|
||||
applyParameters();
|
||||
}
|
||||
|
||||
void JCamera::requestPreviewFrame()
|
||||
{
|
||||
callMethod<void>("requestPreviewFrame");
|
||||
}
|
||||
|
||||
void JCamera::takePicture()
|
||||
{
|
||||
callMethod<void>("takePicture");
|
||||
@@ -672,7 +706,8 @@ QStringList JCamera::callStringListMethod(const char *methodName)
|
||||
static JNINativeMethod methods[] = {
|
||||
{"notifyAutoFocusComplete", "(IZ)V", (void *)notifyAutoFocusComplete},
|
||||
{"notifyPictureExposed", "(I)V", (void *)notifyPictureExposed},
|
||||
{"notifyPictureCaptured", "(I[B)V", (void *)notifyPictureCaptured}
|
||||
{"notifyPictureCaptured", "(I[B)V", (void *)notifyPictureCaptured},
|
||||
{"notifyPreviewFrame", "(I[B)V", (void *)notifyPreviewFrame}
|
||||
};
|
||||
|
||||
bool JCamera::initJNI(JNIEnv *env)
|
||||
|
||||
@@ -58,6 +58,16 @@ public:
|
||||
CameraFacingFront = 1
|
||||
};
|
||||
|
||||
enum ImageFormat { // same values as in android.graphics.ImageFormat Java class
|
||||
Unknown = 0,
|
||||
RGB565 = 4,
|
||||
NV16 = 16,
|
||||
NV21 = 17,
|
||||
YUY2 = 20,
|
||||
JPEG = 256,
|
||||
YV12 = 842094169
|
||||
};
|
||||
|
||||
~JCamera();
|
||||
|
||||
static JCamera *open(int cameraId);
|
||||
@@ -75,6 +85,9 @@ public:
|
||||
QSize getPreferredPreviewSizeForVideo();
|
||||
QList<QSize> getSupportedPreviewSizes();
|
||||
|
||||
ImageFormat getPreviewFormat();
|
||||
void setPreviewFormat(ImageFormat fmt);
|
||||
|
||||
QSize previewSize() const { return m_previewSize; }
|
||||
void setPreviewSize(const QSize &size);
|
||||
void setPreviewTexture(jobject surfaceTexture);
|
||||
@@ -131,6 +144,8 @@ public:
|
||||
void startPreview();
|
||||
void stopPreview();
|
||||
|
||||
void requestPreviewFrame();
|
||||
|
||||
void takePicture();
|
||||
|
||||
static bool initJNI(JNIEnv *env);
|
||||
@@ -143,6 +158,8 @@ Q_SIGNALS:
|
||||
|
||||
void whiteBalanceChanged();
|
||||
|
||||
void previewFrameAvailable(const QByteArray &data);
|
||||
|
||||
void pictureExposed();
|
||||
void pictureCaptured(const QByteArray &data);
|
||||
|
||||
|
||||
@@ -56,6 +56,7 @@ public:
|
||||
explicit JSurfaceTexture(unsigned int texName);
|
||||
~JSurfaceTexture();
|
||||
|
||||
int textureID() const { return m_texID; }
|
||||
QMatrix4x4 getTransformMatrix();
|
||||
void updateTexImage();
|
||||
|
||||
|
||||
3
src/plugins/android/videonode/android_videonode.json
Normal file
3
src/plugins/android/videonode/android_videonode.json
Normal file
@@ -0,0 +1,3 @@
|
||||
{
|
||||
"Keys": ["sgvideonodes"]
|
||||
}
|
||||
204
src/plugins/android/videonode/qandroidsgvideonode.cpp
Normal file
204
src/plugins/android/videonode/qandroidsgvideonode.cpp
Normal file
@@ -0,0 +1,204 @@
|
||||
/****************************************************************************
|
||||
**
|
||||
** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
|
||||
** Contact: http://www.qt-project.org/legal
|
||||
**
|
||||
** This file is part of the Qt Toolkit.
|
||||
**
|
||||
** $QT_BEGIN_LICENSE:LGPL$
|
||||
** Commercial License Usage
|
||||
** Licensees holding valid commercial Qt licenses may use this file in
|
||||
** accordance with the commercial license agreement provided with the
|
||||
** Software or, alternatively, in accordance with the terms contained in
|
||||
** a written agreement between you and Digia. For licensing terms and
|
||||
** conditions see http://qt.digia.com/licensing. For further information
|
||||
** use the contact form at http://qt.digia.com/contact-us.
|
||||
**
|
||||
** GNU Lesser General Public License Usage
|
||||
** Alternatively, this file may be used under the terms of the GNU Lesser
|
||||
** General Public License version 2.1 as published by the Free Software
|
||||
** Foundation and appearing in the file LICENSE.LGPL included in the
|
||||
** packaging of this file. Please review the following information to
|
||||
** ensure the GNU Lesser General Public License version 2.1 requirements
|
||||
** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
|
||||
**
|
||||
** In addition, as a special exception, Digia gives you certain additional
|
||||
** rights. These rights are described in the Digia Qt LGPL Exception
|
||||
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
|
||||
**
|
||||
** GNU General Public License Usage
|
||||
** Alternatively, this file may be used under the terms of the GNU
|
||||
** General Public License version 3.0 as published by the Free Software
|
||||
** Foundation and appearing in the file LICENSE.GPL included in the
|
||||
** packaging of this file. Please review the following information to
|
||||
** ensure the GNU General Public License version 3.0 requirements will be
|
||||
** met: http://www.gnu.org/copyleft/gpl.html.
|
||||
**
|
||||
**
|
||||
** $QT_END_LICENSE$
|
||||
**
|
||||
****************************************************************************/
|
||||
|
||||
#include "qandroidsgvideonode.h"
|
||||
|
||||
#include <qsgmaterial.h>
|
||||
#include <qmutex.h>
|
||||
|
||||
QT_BEGIN_NAMESPACE
|
||||
|
||||
class QAndroidSGVideoNodeMaterialShader : public QSGMaterialShader
|
||||
{
|
||||
public:
|
||||
void updateState(const RenderState &state, QSGMaterial *newMaterial, QSGMaterial *oldMaterial);
|
||||
|
||||
char const *const *attributeNames() const {
|
||||
static const char *names[] = {
|
||||
"qt_VertexPosition",
|
||||
"qt_VertexTexCoord",
|
||||
0
|
||||
};
|
||||
return names;
|
||||
}
|
||||
|
||||
protected:
|
||||
const char *vertexShader() const {
|
||||
return
|
||||
"uniform highp mat4 qt_Matrix; \n"
|
||||
"uniform highp mat4 texMatrix; \n"
|
||||
"attribute highp vec4 qt_VertexPosition; \n"
|
||||
"attribute highp vec2 qt_VertexTexCoord; \n"
|
||||
"varying highp vec2 qt_TexCoord; \n"
|
||||
"void main() { \n"
|
||||
" qt_TexCoord = (texMatrix * vec4(qt_VertexTexCoord, 0.0, 1.0)).xy; \n"
|
||||
" gl_Position = qt_Matrix * qt_VertexPosition; \n"
|
||||
"}";
|
||||
}
|
||||
|
||||
const char *fragmentShader() const {
|
||||
return
|
||||
"#extension GL_OES_EGL_image_external : require \n"
|
||||
"uniform samplerExternalOES videoTexture; \n"
|
||||
"uniform lowp float opacity; \n"
|
||||
"varying highp vec2 qt_TexCoord; \n"
|
||||
"void main() \n"
|
||||
"{ \n"
|
||||
" gl_FragColor = texture2D(videoTexture, qt_TexCoord) * opacity; \n"
|
||||
"}";
|
||||
}
|
||||
|
||||
void initialize() {
|
||||
m_id_matrix = program()->uniformLocation("qt_Matrix");
|
||||
m_id_texMatrix = program()->uniformLocation("texMatrix");
|
||||
m_id_texture = program()->uniformLocation("videoTexture");
|
||||
m_id_opacity = program()->uniformLocation("opacity");
|
||||
}
|
||||
|
||||
int m_id_matrix;
|
||||
int m_id_texMatrix;
|
||||
int m_id_texture;
|
||||
int m_id_opacity;
|
||||
};
|
||||
|
||||
class QAndroidSGVideoNodeMaterial : public QSGMaterial
|
||||
{
|
||||
public:
|
||||
QAndroidSGVideoNodeMaterial()
|
||||
: m_textureId(0)
|
||||
{
|
||||
setFlag(Blending, false);
|
||||
}
|
||||
|
||||
~QAndroidSGVideoNodeMaterial()
|
||||
{
|
||||
m_frame = QVideoFrame();
|
||||
}
|
||||
|
||||
QSGMaterialType *type() const {
|
||||
static QSGMaterialType theType;
|
||||
return &theType;
|
||||
}
|
||||
|
||||
QSGMaterialShader *createShader() const {
|
||||
return new QAndroidSGVideoNodeMaterialShader;
|
||||
}
|
||||
|
||||
int compare(const QSGMaterial *other) const {
|
||||
const QAndroidSGVideoNodeMaterial *m = static_cast<const QAndroidSGVideoNodeMaterial *>(other);
|
||||
return m_textureId - m->m_textureId;
|
||||
}
|
||||
|
||||
void setVideoFrame(const QVideoFrame &frame) {
|
||||
QMutexLocker lock(&m_frameMutex);
|
||||
m_frame = frame;
|
||||
}
|
||||
|
||||
bool updateTexture()
|
||||
{
|
||||
QMutexLocker lock(&m_frameMutex);
|
||||
bool texMatrixDirty = false;
|
||||
|
||||
if (m_frame.isValid()) {
|
||||
QVariantList list = m_frame.handle().toList();
|
||||
|
||||
GLuint texId = list.at(0).toUInt();
|
||||
QMatrix4x4 mat = qvariant_cast<QMatrix4x4>(list.at(1));
|
||||
|
||||
texMatrixDirty = texId != m_textureId || mat != m_texMatrix;
|
||||
|
||||
m_textureId = texId;
|
||||
m_texMatrix = mat;
|
||||
|
||||
// the texture is already bound and initialized at this point,
|
||||
// no need to call glTexParams
|
||||
|
||||
} else {
|
||||
m_textureId = 0;
|
||||
}
|
||||
|
||||
return texMatrixDirty;
|
||||
}
|
||||
|
||||
QVideoFrame m_frame;
|
||||
QMutex m_frameMutex;
|
||||
GLuint m_textureId;
|
||||
QMatrix4x4 m_texMatrix;
|
||||
};
|
||||
|
||||
void QAndroidSGVideoNodeMaterialShader::updateState(const RenderState &state,
|
||||
QSGMaterial *newMaterial,
|
||||
QSGMaterial *oldMaterial)
|
||||
{
|
||||
Q_UNUSED(oldMaterial);
|
||||
QAndroidSGVideoNodeMaterial *mat = static_cast<QAndroidSGVideoNodeMaterial *>(newMaterial);
|
||||
program()->setUniformValue(m_id_texture, 0);
|
||||
|
||||
if (mat->updateTexture())
|
||||
program()->setUniformValue(m_id_texMatrix, mat->m_texMatrix);
|
||||
|
||||
if (state.isOpacityDirty())
|
||||
program()->setUniformValue(m_id_opacity, state.opacity());
|
||||
|
||||
if (state.isMatrixDirty())
|
||||
program()->setUniformValue(m_id_matrix, state.combinedMatrix());
|
||||
}
|
||||
|
||||
QAndroidSGVideoNode::QAndroidSGVideoNode(const QVideoSurfaceFormat &format)
|
||||
: m_format(format)
|
||||
{
|
||||
setFlag(QSGNode::OwnsMaterial);
|
||||
m_material = new QAndroidSGVideoNodeMaterial;
|
||||
setMaterial(m_material);
|
||||
}
|
||||
|
||||
void QAndroidSGVideoNode::setCurrentFrame(const QVideoFrame &frame)
|
||||
{
|
||||
m_material->setVideoFrame(frame);
|
||||
markDirty(DirtyMaterial);
|
||||
}
|
||||
|
||||
QVideoFrame::PixelFormat QAndroidSGVideoNode::pixelFormat() const
|
||||
{
|
||||
return m_format.pixelFormat();
|
||||
}
|
||||
|
||||
QT_END_NAMESPACE
|
||||
67
src/plugins/android/videonode/qandroidsgvideonode.h
Normal file
67
src/plugins/android/videonode/qandroidsgvideonode.h
Normal file
@@ -0,0 +1,67 @@
|
||||
/****************************************************************************
|
||||
**
|
||||
** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
|
||||
** Contact: http://www.qt-project.org/legal
|
||||
**
|
||||
** This file is part of the Qt Toolkit.
|
||||
**
|
||||
** $QT_BEGIN_LICENSE:LGPL$
|
||||
** Commercial License Usage
|
||||
** Licensees holding valid commercial Qt licenses may use this file in
|
||||
** accordance with the commercial license agreement provided with the
|
||||
** Software or, alternatively, in accordance with the terms contained in
|
||||
** a written agreement between you and Digia. For licensing terms and
|
||||
** conditions see http://qt.digia.com/licensing. For further information
|
||||
** use the contact form at http://qt.digia.com/contact-us.
|
||||
**
|
||||
** GNU Lesser General Public License Usage
|
||||
** Alternatively, this file may be used under the terms of the GNU Lesser
|
||||
** General Public License version 2.1 as published by the Free Software
|
||||
** Foundation and appearing in the file LICENSE.LGPL included in the
|
||||
** packaging of this file. Please review the following information to
|
||||
** ensure the GNU Lesser General Public License version 2.1 requirements
|
||||
** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
|
||||
**
|
||||
** In addition, as a special exception, Digia gives you certain additional
|
||||
** rights. These rights are described in the Digia Qt LGPL Exception
|
||||
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
|
||||
**
|
||||
** GNU General Public License Usage
|
||||
** Alternatively, this file may be used under the terms of the GNU
|
||||
** General Public License version 3.0 as published by the Free Software
|
||||
** Foundation and appearing in the file LICENSE.GPL included in the
|
||||
** packaging of this file. Please review the following information to
|
||||
** ensure the GNU General Public License version 3.0 requirements will be
|
||||
** met: http://www.gnu.org/copyleft/gpl.html.
|
||||
**
|
||||
**
|
||||
** $QT_END_LICENSE$
|
||||
**
|
||||
****************************************************************************/
|
||||
|
||||
#ifndef QANDROIDSGVIDEONODE_H
|
||||
#define QANDROIDSGVIDEONODE_H
|
||||
|
||||
#include <private/qsgvideonode_p.h>
|
||||
|
||||
QT_BEGIN_NAMESPACE
|
||||
|
||||
class QAndroidSGVideoNodeMaterial;
|
||||
|
||||
class QAndroidSGVideoNode : public QSGVideoNode
|
||||
{
|
||||
public:
|
||||
QAndroidSGVideoNode(const QVideoSurfaceFormat &format);
|
||||
|
||||
void setCurrentFrame(const QVideoFrame &frame);
|
||||
QVideoFrame::PixelFormat pixelFormat() const;
|
||||
|
||||
private:
|
||||
QVideoSurfaceFormat m_format;
|
||||
QAndroidSGVideoNodeMaterial *m_material;
|
||||
QVideoFrame m_frame;
|
||||
};
|
||||
|
||||
QT_END_NAMESPACE
|
||||
|
||||
#endif // QANDROIDSGVIDEONODE_H
|
||||
69
src/plugins/android/videonode/qandroidsgvideonodeplugin.cpp
Normal file
69
src/plugins/android/videonode/qandroidsgvideonodeplugin.cpp
Normal file
@@ -0,0 +1,69 @@
|
||||
/****************************************************************************
|
||||
**
|
||||
** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
|
||||
** Contact: http://www.qt-project.org/legal
|
||||
**
|
||||
** This file is part of the Qt Toolkit.
|
||||
**
|
||||
** $QT_BEGIN_LICENSE:LGPL$
|
||||
** Commercial License Usage
|
||||
** Licensees holding valid commercial Qt licenses may use this file in
|
||||
** accordance with the commercial license agreement provided with the
|
||||
** Software or, alternatively, in accordance with the terms contained in
|
||||
** a written agreement between you and Digia. For licensing terms and
|
||||
** conditions see http://qt.digia.com/licensing. For further information
|
||||
** use the contact form at http://qt.digia.com/contact-us.
|
||||
**
|
||||
** GNU Lesser General Public License Usage
|
||||
** Alternatively, this file may be used under the terms of the GNU Lesser
|
||||
** General Public License version 2.1 as published by the Free Software
|
||||
** Foundation and appearing in the file LICENSE.LGPL included in the
|
||||
** packaging of this file. Please review the following information to
|
||||
** ensure the GNU Lesser General Public License version 2.1 requirements
|
||||
** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
|
||||
**
|
||||
** In addition, as a special exception, Digia gives you certain additional
|
||||
** rights. These rights are described in the Digia Qt LGPL Exception
|
||||
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
|
||||
**
|
||||
** GNU General Public License Usage
|
||||
** Alternatively, this file may be used under the terms of the GNU
|
||||
** General Public License version 3.0 as published by the Free Software
|
||||
** Foundation and appearing in the file LICENSE.GPL included in the
|
||||
** packaging of this file. Please review the following information to
|
||||
** ensure the GNU General Public License version 3.0 requirements will be
|
||||
** met: http://www.gnu.org/copyleft/gpl.html.
|
||||
**
|
||||
**
|
||||
** $QT_END_LICENSE$
|
||||
**
|
||||
****************************************************************************/
|
||||
|
||||
#include "qandroidsgvideonodeplugin.h"
|
||||
#include "qandroidsgvideonode.h"
|
||||
|
||||
QT_BEGIN_NAMESPACE
|
||||
|
||||
#define ExternalGLTextureHandle (QAbstractVideoBuffer::UserHandle + 1)
|
||||
|
||||
QList<QVideoFrame::PixelFormat> QAndroidSGVideoNodeFactoryPlugin::supportedPixelFormats(
|
||||
QAbstractVideoBuffer::HandleType handleType) const
|
||||
{
|
||||
QList<QVideoFrame::PixelFormat> pixelFormats;
|
||||
|
||||
if (handleType == ExternalGLTextureHandle)
|
||||
pixelFormats.append(QVideoFrame::Format_BGR32);
|
||||
|
||||
return pixelFormats;
|
||||
}
|
||||
|
||||
QSGVideoNode *QAndroidSGVideoNodeFactoryPlugin::createNode(const QVideoSurfaceFormat &format)
|
||||
{
|
||||
if (supportedPixelFormats(format.handleType()).contains(format.pixelFormat()))
|
||||
return new QAndroidSGVideoNode(format);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
QT_END_NAMESPACE
|
||||
62
src/plugins/android/videonode/qandroidsgvideonodeplugin.h
Normal file
62
src/plugins/android/videonode/qandroidsgvideonodeplugin.h
Normal file
@@ -0,0 +1,62 @@
|
||||
/****************************************************************************
|
||||
**
|
||||
** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
|
||||
** Contact: http://www.qt-project.org/legal
|
||||
**
|
||||
** This file is part of the Qt Toolkit.
|
||||
**
|
||||
** $QT_BEGIN_LICENSE:LGPL$
|
||||
** Commercial License Usage
|
||||
** Licensees holding valid commercial Qt licenses may use this file in
|
||||
** accordance with the commercial license agreement provided with the
|
||||
** Software or, alternatively, in accordance with the terms contained in
|
||||
** a written agreement between you and Digia. For licensing terms and
|
||||
** conditions see http://qt.digia.com/licensing. For further information
|
||||
** use the contact form at http://qt.digia.com/contact-us.
|
||||
**
|
||||
** GNU Lesser General Public License Usage
|
||||
** Alternatively, this file may be used under the terms of the GNU Lesser
|
||||
** General Public License version 2.1 as published by the Free Software
|
||||
** Foundation and appearing in the file LICENSE.LGPL included in the
|
||||
** packaging of this file. Please review the following information to
|
||||
** ensure the GNU Lesser General Public License version 2.1 requirements
|
||||
** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
|
||||
**
|
||||
** In addition, as a special exception, Digia gives you certain additional
|
||||
** rights. These rights are described in the Digia Qt LGPL Exception
|
||||
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
|
||||
**
|
||||
** GNU General Public License Usage
|
||||
** Alternatively, this file may be used under the terms of the GNU
|
||||
** General Public License version 3.0 as published by the Free Software
|
||||
** Foundation and appearing in the file LICENSE.GPL included in the
|
||||
** packaging of this file. Please review the following information to
|
||||
** ensure the GNU General Public License version 3.0 requirements will be
|
||||
** met: http://www.gnu.org/copyleft/gpl.html.
|
||||
**
|
||||
**
|
||||
** $QT_END_LICENSE$
|
||||
**
|
||||
****************************************************************************/
|
||||
|
||||
#ifndef QANDROIDSGVIDEONODEPLUGIN_H
|
||||
#define QANDROIDSGVIDEONODEPLUGIN_H
|
||||
|
||||
#include <private/qsgvideonode_p.h>
|
||||
|
||||
QT_BEGIN_NAMESPACE
|
||||
|
||||
class QAndroidSGVideoNodeFactoryPlugin : public QSGVideoNodeFactoryPlugin
|
||||
{
|
||||
Q_OBJECT
|
||||
Q_PLUGIN_METADATA(IID QSGVideoNodeFactoryInterface_iid
|
||||
FILE "android_videonode.json")
|
||||
|
||||
public:
|
||||
QList<QVideoFrame::PixelFormat> supportedPixelFormats(QAbstractVideoBuffer::HandleType handleType) const;
|
||||
QSGVideoNode *createNode(const QVideoSurfaceFormat &format);
|
||||
};
|
||||
|
||||
QT_END_NAMESPACE
|
||||
|
||||
#endif // QANDROIDSGVIDEONODEPLUGIN_H
|
||||
16
src/plugins/android/videonode/videonode.pro
Normal file
16
src/plugins/android/videonode/videonode.pro
Normal file
@@ -0,0 +1,16 @@
|
||||
TARGET = qtsgvideonode_android
|
||||
QT += quick multimedia-private qtmultimediaquicktools-private
|
||||
|
||||
PLUGIN_TYPE = video/videonode
|
||||
PLUGIN_CLASS_NAME = QAndroidSGVideoNodeFactoryPlugin
|
||||
load(qt_plugin)
|
||||
|
||||
HEADERS += \
|
||||
qandroidsgvideonodeplugin.h \
|
||||
qandroidsgvideonode.h
|
||||
|
||||
SOURCES += \
|
||||
qandroidsgvideonodeplugin.cpp \
|
||||
qandroidsgvideonode.cpp
|
||||
|
||||
OTHER_FILES += android_videonode.json
|
||||
@@ -41,6 +41,8 @@
|
||||
|
||||
#include "audiocaptureprobecontrol.h"
|
||||
|
||||
QT_BEGIN_NAMESPACE
|
||||
|
||||
AudioCaptureProbeControl::AudioCaptureProbeControl(QObject *parent):
|
||||
QMediaAudioProbeControl(parent)
|
||||
{
|
||||
@@ -58,3 +60,5 @@ void AudioCaptureProbeControl::bufferProbed(const char *data, quint32 size, cons
|
||||
QAudioBuffer audioBuffer = QAudioBuffer(QByteArray::fromRawData(data, size), format);
|
||||
QMetaObject::invokeMethod(this, "audioBufferProbed", Qt::QueuedConnection, Q_ARG(QAudioBuffer, audioBuffer));
|
||||
}
|
||||
|
||||
QT_END_NAMESPACE
|
||||
|
||||
@@ -46,7 +46,7 @@
|
||||
#include <QtCore/qmutex.h>
|
||||
#include <qaudiobuffer.h>
|
||||
|
||||
QT_USE_NAMESPACE
|
||||
QT_BEGIN_NAMESPACE
|
||||
|
||||
class AudioCaptureProbeControl : public QMediaAudioProbeControl
|
||||
{
|
||||
@@ -58,4 +58,6 @@ public:
|
||||
void bufferProbed(const char *data, quint32 size, const QAudioFormat& format);
|
||||
};
|
||||
|
||||
QT_END_NAMESPACE
|
||||
|
||||
#endif
|
||||
|
||||
@@ -47,6 +47,8 @@
|
||||
#include "audiomediarecordercontrol.h"
|
||||
#include "audiocaptureprobecontrol.h"
|
||||
|
||||
QT_BEGIN_NAMESPACE
|
||||
|
||||
AudioCaptureService::AudioCaptureService(QObject *parent):
|
||||
QMediaService(parent)
|
||||
{
|
||||
@@ -94,4 +96,4 @@ void AudioCaptureService::releaseControl(QMediaControl *control)
|
||||
Q_UNUSED(control)
|
||||
}
|
||||
|
||||
|
||||
QT_END_NAMESPACE
|
||||
|
||||
@@ -46,14 +46,14 @@
|
||||
|
||||
#include "qmediaservice.h"
|
||||
|
||||
QT_BEGIN_NAMESPACE
|
||||
|
||||
class AudioCaptureSession;
|
||||
class AudioEncoderControl;
|
||||
class AudioContainerControl;
|
||||
class AudioMediaRecorderControl;
|
||||
class AudioInputSelector;
|
||||
|
||||
QT_USE_NAMESPACE
|
||||
|
||||
class AudioCaptureService : public QMediaService
|
||||
{
|
||||
Q_OBJECT
|
||||
@@ -71,4 +71,6 @@ private:
|
||||
AudioMediaRecorderControl *m_mediaControl;
|
||||
};
|
||||
|
||||
QT_END_NAMESPACE
|
||||
|
||||
#endif
|
||||
|
||||
@@ -46,6 +46,7 @@
|
||||
|
||||
#include "qmediaserviceproviderplugin.h"
|
||||
|
||||
QT_BEGIN_NAMESPACE
|
||||
|
||||
QMediaService* AudioCaptureServicePlugin::create(QString const& key)
|
||||
{
|
||||
@@ -60,3 +61,4 @@ void AudioCaptureServicePlugin::release(QMediaService *service)
|
||||
delete service;
|
||||
}
|
||||
|
||||
QT_END_NAMESPACE
|
||||
|
||||
@@ -45,7 +45,7 @@
|
||||
|
||||
#include "qmediaserviceproviderplugin.h"
|
||||
|
||||
QT_USE_NAMESPACE
|
||||
QT_BEGIN_NAMESPACE
|
||||
|
||||
class AudioCaptureServicePlugin : public QMediaServiceProviderPlugin
|
||||
{
|
||||
@@ -58,4 +58,6 @@ public:
|
||||
void release(QMediaService *service);
|
||||
};
|
||||
|
||||
QT_END_NAMESPACE
|
||||
|
||||
#endif // AUDIOCAPTURESERVICEPLUGIN_H
|
||||
|
||||
@@ -49,6 +49,8 @@
|
||||
#include "audiocapturesession.h"
|
||||
#include "audiocaptureprobecontrol.h"
|
||||
|
||||
QT_BEGIN_NAMESPACE
|
||||
|
||||
void FileProbeProxy::startProbes(const QAudioFormat &format)
|
||||
{
|
||||
m_format = format;
|
||||
@@ -87,33 +89,20 @@ qint64 FileProbeProxy::writeData(const char *data, qint64 len)
|
||||
return QFile::writeData(data, len);
|
||||
}
|
||||
|
||||
AudioCaptureSession::AudioCaptureSession(QObject *parent):
|
||||
QObject(parent)
|
||||
AudioCaptureSession::AudioCaptureSession(QObject *parent)
|
||||
: QObject(parent)
|
||||
, m_state(QMediaRecorder::StoppedState)
|
||||
, m_status(QMediaRecorder::UnloadedStatus)
|
||||
, m_audioInput(0)
|
||||
, m_deviceInfo(QAudioDeviceInfo::defaultInputDevice())
|
||||
, m_wavFile(true)
|
||||
{
|
||||
m_deviceInfo = new QAudioDeviceInfo(QAudioDeviceInfo::defaultInputDevice());
|
||||
m_audioInput = 0;
|
||||
m_position = 0;
|
||||
m_state = QMediaRecorder::StoppedState;
|
||||
|
||||
m_format.setSampleRate(8000);
|
||||
m_format.setChannelCount(1);
|
||||
m_format.setSampleSize(8);
|
||||
m_format.setSampleType(QAudioFormat::UnSignedInt);
|
||||
m_format.setCodec("audio/pcm");
|
||||
wavFile = true;
|
||||
m_format = m_deviceInfo.preferredFormat();
|
||||
}
|
||||
|
||||
AudioCaptureSession::~AudioCaptureSession()
|
||||
{
|
||||
stop();
|
||||
|
||||
if(m_audioInput)
|
||||
delete m_audioInput;
|
||||
}
|
||||
|
||||
QAudioDeviceInfo* AudioCaptureSession::deviceInfo() const
|
||||
{
|
||||
return m_deviceInfo;
|
||||
setState(QMediaRecorder::StoppedState);
|
||||
}
|
||||
|
||||
QAudioFormat AudioCaptureSession::format() const
|
||||
@@ -121,118 +110,96 @@ QAudioFormat AudioCaptureSession::format() const
|
||||
return m_format;
|
||||
}
|
||||
|
||||
bool AudioCaptureSession::isFormatSupported(const QAudioFormat &format) const
|
||||
void AudioCaptureSession::setFormat(const QAudioFormat &format)
|
||||
{
|
||||
if(m_deviceInfo) {
|
||||
if(format.codec().contains(QLatin1String("audio/x-wav"))) {
|
||||
QAudioFormat fmt = format;
|
||||
fmt.setCodec("audio/pcm");
|
||||
return m_deviceInfo->isFormatSupported(fmt);
|
||||
} else
|
||||
return m_deviceInfo->isFormatSupported(format);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
bool AudioCaptureSession::setFormat(const QAudioFormat &format)
|
||||
{
|
||||
if(m_deviceInfo) {
|
||||
|
||||
QAudioFormat fmt = format;
|
||||
|
||||
if(m_deviceInfo->isFormatSupported(fmt)) {
|
||||
m_format = fmt;
|
||||
if(m_audioInput) delete m_audioInput;
|
||||
m_audioInput = 0;
|
||||
QList<QAudioDeviceInfo> devices = QAudioDeviceInfo::availableDevices(QAudio::AudioInput);
|
||||
for(int i=0;i<devices.size();i++) {
|
||||
if(qstrcmp(m_deviceInfo->deviceName().toLocal8Bit().constData(),
|
||||
devices.at(i).deviceName().toLocal8Bit().constData()) == 0) {
|
||||
m_audioInput = new QAudioInput(devices.at(i),m_format);
|
||||
connect(m_audioInput,SIGNAL(stateChanged(QAudio::State)),this,SLOT(stateChanged(QAudio::State)));
|
||||
connect(m_audioInput,SIGNAL(notify()),this,SLOT(notify()));
|
||||
break;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
m_format = m_deviceInfo->preferredFormat();
|
||||
qWarning()<<"failed to setFormat using preferred...";
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
QStringList AudioCaptureSession::supportedContainers() const
|
||||
{
|
||||
QStringList list;
|
||||
if(m_deviceInfo) {
|
||||
if (m_deviceInfo->supportedCodecs().size() > 0) {
|
||||
list << "audio/x-wav";
|
||||
list << "audio/pcm";
|
||||
}
|
||||
}
|
||||
return list;
|
||||
}
|
||||
|
||||
QString AudioCaptureSession::containerDescription(const QString &formatMimeType) const
|
||||
{
|
||||
if(m_deviceInfo) {
|
||||
if (formatMimeType.contains(QLatin1String("audio/pcm")))
|
||||
return tr("RAW file format");
|
||||
if (formatMimeType.contains(QLatin1String("audio/x-wav")))
|
||||
return tr("WAV file format");
|
||||
}
|
||||
return QString();
|
||||
m_format = format;
|
||||
}
|
||||
|
||||
void AudioCaptureSession::setContainerFormat(const QString &formatMimeType)
|
||||
{
|
||||
if (!formatMimeType.contains(QLatin1String("audio/x-wav")) &&
|
||||
!formatMimeType.contains(QLatin1String("audio/pcm")) &&
|
||||
!formatMimeType.isEmpty())
|
||||
return;
|
||||
|
||||
if(m_deviceInfo) {
|
||||
if (!m_deviceInfo->supportedCodecs().contains(QLatin1String("audio/pcm")))
|
||||
return;
|
||||
|
||||
if (formatMimeType.isEmpty() || formatMimeType.contains(QLatin1String("audio/x-wav"))) {
|
||||
wavFile = true;
|
||||
m_format.setCodec("audio/pcm");
|
||||
} else {
|
||||
wavFile = false;
|
||||
m_format.setCodec(formatMimeType);
|
||||
}
|
||||
}
|
||||
m_wavFile = (formatMimeType.isEmpty()
|
||||
|| QString::compare(formatMimeType, QLatin1String("audio/x-wav")) == 0);
|
||||
}
|
||||
|
||||
QString AudioCaptureSession::containerFormat() const
|
||||
{
|
||||
if(wavFile)
|
||||
return QString("audio/x-wav");
|
||||
if (m_wavFile)
|
||||
return QStringLiteral("audio/x-wav");
|
||||
|
||||
return QString("audio/pcm");
|
||||
return QStringLiteral("audio/x-raw");
|
||||
}
|
||||
|
||||
QUrl AudioCaptureSession::outputLocation() const
|
||||
{
|
||||
return m_actualSink;
|
||||
return m_actualOutputLocation;
|
||||
}
|
||||
|
||||
bool AudioCaptureSession::setOutputLocation(const QUrl& sink)
|
||||
bool AudioCaptureSession::setOutputLocation(const QUrl& location)
|
||||
{
|
||||
m_sink = m_actualSink = sink;
|
||||
return true;
|
||||
if (m_requestedOutputLocation == location)
|
||||
return false;
|
||||
|
||||
m_actualOutputLocation = QUrl();
|
||||
m_requestedOutputLocation = location;
|
||||
|
||||
if (m_requestedOutputLocation.isEmpty())
|
||||
return true;
|
||||
|
||||
if (m_requestedOutputLocation.isValid() && (m_requestedOutputLocation.isLocalFile()
|
||||
|| m_requestedOutputLocation.isRelative())) {
|
||||
emit actualLocationChanged(m_requestedOutputLocation);
|
||||
return true;
|
||||
}
|
||||
|
||||
m_requestedOutputLocation = QUrl();
|
||||
return false;
|
||||
}
|
||||
|
||||
qint64 AudioCaptureSession::position() const
|
||||
{
|
||||
return m_position;
|
||||
if (m_audioInput)
|
||||
return m_audioInput->processedUSecs() / 1000;
|
||||
return 0;
|
||||
}
|
||||
|
||||
int AudioCaptureSession::state() const
|
||||
void AudioCaptureSession::setState(QMediaRecorder::State state)
|
||||
{
|
||||
return int(m_state);
|
||||
if (m_state == state)
|
||||
return;
|
||||
|
||||
m_state = state;
|
||||
emit stateChanged(m_state);
|
||||
|
||||
switch (m_state) {
|
||||
case QMediaRecorder::StoppedState:
|
||||
stop();
|
||||
break;
|
||||
case QMediaRecorder::PausedState:
|
||||
pause();
|
||||
break;
|
||||
case QMediaRecorder::RecordingState:
|
||||
record();
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
QMediaRecorder::State AudioCaptureSession::state() const
|
||||
{
|
||||
return m_state;
|
||||
}
|
||||
|
||||
void AudioCaptureSession::setStatus(QMediaRecorder::Status status)
|
||||
{
|
||||
if (m_status == status)
|
||||
return;
|
||||
|
||||
m_status = status;
|
||||
emit statusChanged(m_status);
|
||||
}
|
||||
|
||||
QMediaRecorder::Status AudioCaptureSession::status() const
|
||||
{
|
||||
return m_status;
|
||||
}
|
||||
|
||||
QDir AudioCaptureSession::defaultDir() const
|
||||
@@ -258,9 +225,29 @@ QDir AudioCaptureSession::defaultDir() const
|
||||
return QDir();
|
||||
}
|
||||
|
||||
QString AudioCaptureSession::generateFileName(const QDir &dir, const QString &ext) const
|
||||
QString AudioCaptureSession::generateFileName(const QString &requestedName,
|
||||
const QString &extension) const
|
||||
{
|
||||
if (requestedName.isEmpty())
|
||||
return generateFileName(defaultDir(), extension);
|
||||
|
||||
QString path = requestedName;
|
||||
|
||||
if (QFileInfo(path).isRelative())
|
||||
path = defaultDir().absoluteFilePath(path);
|
||||
|
||||
if (QFileInfo(path).isDir())
|
||||
return generateFileName(QDir(path), extension);
|
||||
|
||||
if (!path.endsWith(extension))
|
||||
path.append(QString(".%1").arg(extension));
|
||||
|
||||
return path;
|
||||
}
|
||||
|
||||
QString AudioCaptureSession::generateFileName(const QDir &dir,
|
||||
const QString &ext) const
|
||||
{
|
||||
int lastClip = 0;
|
||||
foreach(QString fileName, dir.entryList(QStringList() << QString("clip_*.%1").arg(ext))) {
|
||||
int imgNumber = fileName.mid(5, fileName.size()-6-ext.length()).toInt();
|
||||
@@ -277,25 +264,45 @@ QString AudioCaptureSession::generateFileName(const QDir &dir, const QString &ex
|
||||
|
||||
void AudioCaptureSession::record()
|
||||
{
|
||||
if(!m_audioInput) {
|
||||
setFormat(m_format);
|
||||
}
|
||||
if (m_status == QMediaRecorder::PausedStatus) {
|
||||
m_audioInput->resume();
|
||||
} else {
|
||||
if (m_deviceInfo.isNull()) {
|
||||
emit error(QMediaRecorder::ResourceError,
|
||||
QStringLiteral("No input device available."));
|
||||
m_state = QMediaRecorder::StoppedState;
|
||||
emit stateChanged(m_state);
|
||||
setStatus(QMediaRecorder::UnavailableStatus);
|
||||
return;
|
||||
}
|
||||
|
||||
m_actualSink = m_sink;
|
||||
setStatus(QMediaRecorder::LoadingStatus);
|
||||
|
||||
if (m_actualSink.isEmpty()) {
|
||||
QString ext = wavFile ? QLatin1String("wav") : QLatin1String("raw");
|
||||
m_actualSink = generateFileName(defaultDir(), ext);
|
||||
}
|
||||
m_format = m_deviceInfo.nearestFormat(m_format);
|
||||
m_audioInput = new QAudioInput(m_deviceInfo, m_format);
|
||||
connect(m_audioInput, SIGNAL(stateChanged(QAudio::State)),
|
||||
this, SLOT(audioInputStateChanged(QAudio::State)));
|
||||
connect(m_audioInput, SIGNAL(notify()),
|
||||
this, SLOT(notify()));
|
||||
|
||||
if(m_actualSink.toLocalFile().length() > 0)
|
||||
file.setFileName(m_actualSink.toLocalFile());
|
||||
else
|
||||
file.setFileName(m_actualSink.toString());
|
||||
|
||||
if(m_audioInput) {
|
||||
if(m_state == QMediaRecorder::StoppedState) {
|
||||
if(file.open(QIODevice::WriteOnly)) {
|
||||
QString filePath = generateFileName(
|
||||
m_requestedOutputLocation.isLocalFile() ? m_requestedOutputLocation.toLocalFile()
|
||||
: m_requestedOutputLocation.toString(),
|
||||
m_wavFile ? QLatin1String("wav")
|
||||
: QLatin1String("raw"));
|
||||
|
||||
m_actualOutputLocation = QUrl::fromLocalFile(filePath);
|
||||
if (m_actualOutputLocation != m_requestedOutputLocation)
|
||||
emit actualLocationChanged(m_actualOutputLocation);
|
||||
|
||||
file.setFileName(filePath);
|
||||
|
||||
setStatus(QMediaRecorder::LoadedStatus);
|
||||
setStatus(QMediaRecorder::StartingStatus);
|
||||
|
||||
if (file.open(QIODevice::WriteOnly)) {
|
||||
if (m_wavFile) {
|
||||
memset(&header,0,sizeof(CombinedHeader));
|
||||
memcpy(header.riff.descriptor.id,"RIFF",4);
|
||||
header.riff.descriptor.size = 0xFFFFFFFF; // This should be updated on stop(), filesize-8
|
||||
@@ -310,28 +317,26 @@ void AudioCaptureSession::record()
|
||||
header.wave.bitsPerSample = m_format.sampleSize();
|
||||
memcpy(header.data.descriptor.id,"data",4);
|
||||
header.data.descriptor.size = 0xFFFFFFFF; // This should be updated on stop(),samples*channels*sampleSize/8
|
||||
if (wavFile)
|
||||
file.write((char*)&header,sizeof(CombinedHeader));
|
||||
|
||||
file.startProbes(m_format);
|
||||
m_audioInput->start(qobject_cast<QIODevice*>(&file));
|
||||
} else {
|
||||
emit error(1,QString("can't open source, failed"));
|
||||
m_state = QMediaRecorder::StoppedState;
|
||||
emit stateChanged(m_state);
|
||||
file.write((char*)&header,sizeof(CombinedHeader));
|
||||
}
|
||||
|
||||
file.startProbes(m_format);
|
||||
m_audioInput->start(qobject_cast<QIODevice*>(&file));
|
||||
} else {
|
||||
delete m_audioInput;
|
||||
m_audioInput = 0;
|
||||
emit error(QMediaRecorder::ResourceError,
|
||||
QStringLiteral("Can't open output location"));
|
||||
m_state = QMediaRecorder::StoppedState;
|
||||
emit stateChanged(m_state);
|
||||
setStatus(QMediaRecorder::UnloadedStatus);
|
||||
}
|
||||
}
|
||||
|
||||
m_state = QMediaRecorder::RecordingState;
|
||||
}
|
||||
|
||||
void AudioCaptureSession::pause()
|
||||
{
|
||||
if(m_audioInput)
|
||||
m_audioInput->stop();
|
||||
|
||||
m_state = QMediaRecorder::PausedState;
|
||||
m_audioInput->suspend();
|
||||
}
|
||||
|
||||
void AudioCaptureSession::stop()
|
||||
@@ -340,7 +345,7 @@ void AudioCaptureSession::stop()
|
||||
m_audioInput->stop();
|
||||
file.stopProbes();
|
||||
file.close();
|
||||
if (wavFile) {
|
||||
if (m_wavFile) {
|
||||
qint32 fileSize = file.size()-8;
|
||||
file.open(QIODevice::ReadWrite | QIODevice::Unbuffered);
|
||||
file.read((char*)&header,sizeof(CombinedHeader));
|
||||
@@ -350,9 +355,10 @@ void AudioCaptureSession::stop()
|
||||
file.write((char*)&header,sizeof(CombinedHeader));
|
||||
file.close();
|
||||
}
|
||||
m_position = 0;
|
||||
delete m_audioInput;
|
||||
m_audioInput = 0;
|
||||
setStatus(QMediaRecorder::UnloadedStatus);
|
||||
}
|
||||
m_state = QMediaRecorder::StoppedState;
|
||||
}
|
||||
|
||||
void AudioCaptureSession::addProbe(AudioCaptureProbeControl *probe)
|
||||
@@ -365,45 +371,41 @@ void AudioCaptureSession::removeProbe(AudioCaptureProbeControl *probe)
|
||||
file.removeProbe(probe);
|
||||
}
|
||||
|
||||
void AudioCaptureSession::stateChanged(QAudio::State state)
|
||||
void AudioCaptureSession::audioInputStateChanged(QAudio::State state)
|
||||
{
|
||||
switch(state) {
|
||||
case QAudio::ActiveState:
|
||||
emit stateChanged(QMediaRecorder::RecordingState);
|
||||
break;
|
||||
default:
|
||||
if(!((m_state == QMediaRecorder::PausedState)||(m_state == QMediaRecorder::StoppedState)))
|
||||
m_state = QMediaRecorder::StoppedState;
|
||||
|
||||
emit stateChanged(m_state);
|
||||
break;
|
||||
case QAudio::ActiveState:
|
||||
setStatus(QMediaRecorder::RecordingStatus);
|
||||
break;
|
||||
case QAudio::SuspendedState:
|
||||
setStatus(QMediaRecorder::PausedStatus);
|
||||
break;
|
||||
case QAudio::StoppedState:
|
||||
setStatus(QMediaRecorder::FinalizingStatus);
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
void AudioCaptureSession::notify()
|
||||
{
|
||||
m_position += m_audioInput->notifyInterval();
|
||||
emit positionChanged(m_position);
|
||||
emit positionChanged(position());
|
||||
}
|
||||
|
||||
void AudioCaptureSession::setCaptureDevice(const QString &deviceName)
|
||||
{
|
||||
m_captureDevice = deviceName;
|
||||
if(m_deviceInfo)
|
||||
delete m_deviceInfo;
|
||||
|
||||
m_deviceInfo = 0;
|
||||
|
||||
QList<QAudioDeviceInfo> devices = QAudioDeviceInfo::availableDevices(QAudio::AudioInput);
|
||||
for(int i = 0; i < devices.size(); i++) {
|
||||
if(qstrcmp(m_captureDevice.toLocal8Bit().constData(),
|
||||
devices.at(i).deviceName().toLocal8Bit().constData())==0){
|
||||
m_deviceInfo = new QAudioDeviceInfo(devices.at(i));
|
||||
for (int i = 0; i < devices.size(); ++i) {
|
||||
QAudioDeviceInfo info = devices.at(i);
|
||||
if (m_captureDevice == info.deviceName()){
|
||||
m_deviceInfo = info;
|
||||
return;
|
||||
}
|
||||
}
|
||||
m_deviceInfo = new QAudioDeviceInfo(QAudioDeviceInfo::defaultInputDevice());
|
||||
m_deviceInfo = QAudioDeviceInfo::defaultInputDevice();
|
||||
}
|
||||
|
||||
|
||||
|
||||
QT_END_NAMESPACE
|
||||
|
||||
@@ -55,7 +55,7 @@
|
||||
#include <qaudioinput.h>
|
||||
#include <qaudiodeviceinfo.h>
|
||||
|
||||
QT_USE_NAMESPACE
|
||||
QT_BEGIN_NAMESPACE
|
||||
|
||||
class AudioCaptureProbeControl;
|
||||
|
||||
@@ -85,50 +85,58 @@ public:
|
||||
~AudioCaptureSession();
|
||||
|
||||
QAudioFormat format() const;
|
||||
QAudioDeviceInfo* deviceInfo() const;
|
||||
bool isFormatSupported(const QAudioFormat &format) const;
|
||||
bool setFormat(const QAudioFormat &format);
|
||||
QStringList supportedContainers() const;
|
||||
void setFormat(const QAudioFormat &format);
|
||||
|
||||
QString containerFormat() const;
|
||||
void setContainerFormat(const QString &formatMimeType);
|
||||
QString containerDescription(const QString &formatMimeType) const;
|
||||
|
||||
QUrl outputLocation() const;
|
||||
bool setOutputLocation(const QUrl& sink);
|
||||
bool setOutputLocation(const QUrl& location);
|
||||
|
||||
qint64 position() const;
|
||||
int state() const;
|
||||
void record();
|
||||
void pause();
|
||||
void stop();
|
||||
|
||||
void setState(QMediaRecorder::State state);
|
||||
QMediaRecorder::State state() const;
|
||||
QMediaRecorder::Status status() const;
|
||||
|
||||
void addProbe(AudioCaptureProbeControl *probe);
|
||||
void removeProbe(AudioCaptureProbeControl *probe);
|
||||
|
||||
public slots:
|
||||
void setCaptureDevice(const QString &deviceName);
|
||||
|
||||
signals:
|
||||
void stateChanged(QMediaRecorder::State state);
|
||||
void statusChanged(QMediaRecorder::Status status);
|
||||
void positionChanged(qint64 position);
|
||||
void actualLocationChanged(const QUrl &location);
|
||||
void error(int error, const QString &errorString);
|
||||
|
||||
private slots:
|
||||
void stateChanged(QAudio::State state);
|
||||
void audioInputStateChanged(QAudio::State state);
|
||||
void notify();
|
||||
|
||||
private:
|
||||
void record();
|
||||
void pause();
|
||||
void stop();
|
||||
|
||||
void setStatus(QMediaRecorder::Status status);
|
||||
|
||||
QDir defaultDir() const;
|
||||
QString generateFileName(const QDir &dir, const QString &ext) const;
|
||||
QString generateFileName(const QString &requestedName,
|
||||
const QString &extension) const;
|
||||
QString generateFileName(const QDir &dir, const QString &extension) const;
|
||||
|
||||
FileProbeProxy file;
|
||||
QString m_captureDevice;
|
||||
QUrl m_sink;
|
||||
QUrl m_actualSink;
|
||||
QUrl m_requestedOutputLocation;
|
||||
QUrl m_actualOutputLocation;
|
||||
QMediaRecorder::State m_state;
|
||||
QMediaRecorder::Status m_status;
|
||||
QAudioInput *m_audioInput;
|
||||
QAudioDeviceInfo *m_deviceInfo;
|
||||
QAudioDeviceInfo m_deviceInfo;
|
||||
QAudioFormat m_format;
|
||||
qint64 m_position;
|
||||
bool wavFile;
|
||||
bool m_wavFile;
|
||||
|
||||
// WAV header stuff
|
||||
|
||||
@@ -171,4 +179,6 @@ private:
|
||||
CombinedHeader header;
|
||||
};
|
||||
|
||||
QT_END_NAMESPACE
|
||||
|
||||
#endif
|
||||
|
||||
@@ -42,6 +42,8 @@
|
||||
#include "audiocontainercontrol.h"
|
||||
#include "audiocapturesession.h"
|
||||
|
||||
QT_BEGIN_NAMESPACE
|
||||
|
||||
AudioContainerControl::AudioContainerControl(QObject *parent)
|
||||
:QMediaContainerControl(parent)
|
||||
{
|
||||
@@ -54,7 +56,8 @@ AudioContainerControl::~AudioContainerControl()
|
||||
|
||||
QStringList AudioContainerControl::supportedContainers() const
|
||||
{
|
||||
return m_session->supportedContainers();
|
||||
return QStringList() << QStringLiteral("audio/x-wav")
|
||||
<< QStringLiteral("audio/x-raw");
|
||||
}
|
||||
|
||||
QString AudioContainerControl::containerFormat() const
|
||||
@@ -64,11 +67,18 @@ QString AudioContainerControl::containerFormat() const
|
||||
|
||||
void AudioContainerControl::setContainerFormat(const QString &formatMimeType)
|
||||
{
|
||||
m_session->setContainerFormat(formatMimeType);
|
||||
if (formatMimeType.isEmpty() || supportedContainers().contains(formatMimeType))
|
||||
m_session->setContainerFormat(formatMimeType);
|
||||
}
|
||||
|
||||
QString AudioContainerControl::containerDescription(const QString &formatMimeType) const
|
||||
{
|
||||
return m_session->containerDescription(formatMimeType);
|
||||
if (QString::compare(formatMimeType, QLatin1String("audio/x-raw")) == 0)
|
||||
return tr("RAW (headerless) file format");
|
||||
if (QString::compare(formatMimeType, QLatin1String("audio/x-wav")) == 0)
|
||||
return tr("WAV file format");
|
||||
|
||||
return QString();
|
||||
}
|
||||
|
||||
QT_END_NAMESPACE
|
||||
|
||||
@@ -47,9 +47,9 @@
|
||||
#include <QtCore/qstringlist.h>
|
||||
#include <QtCore/qmap.h>
|
||||
|
||||
class AudioCaptureSession;
|
||||
QT_BEGIN_NAMESPACE
|
||||
|
||||
QT_USE_NAMESPACE
|
||||
class AudioCaptureSession;
|
||||
|
||||
class AudioContainerControl : public QMediaContainerControl
|
||||
{
|
||||
@@ -67,4 +67,6 @@ private:
|
||||
AudioCaptureSession* m_session;
|
||||
};
|
||||
|
||||
QT_END_NAMESPACE
|
||||
|
||||
#endif
|
||||
|
||||
@@ -46,26 +46,43 @@
|
||||
|
||||
#include <QtCore/qdebug.h>
|
||||
|
||||
QT_BEGIN_NAMESPACE
|
||||
|
||||
static QAudioFormat audioSettingsToAudioFormat(const QAudioEncoderSettings &settings)
|
||||
{
|
||||
QAudioFormat fmt;
|
||||
fmt.setCodec(settings.codec());
|
||||
fmt.setChannelCount(settings.channelCount());
|
||||
fmt.setSampleRate(settings.sampleRate());
|
||||
if (settings.sampleRate() == 8000 && settings.bitRate() == 8000) {
|
||||
fmt.setSampleType(QAudioFormat::UnSignedInt);
|
||||
fmt.setSampleSize(8);
|
||||
} else {
|
||||
fmt.setSampleSize(16);
|
||||
fmt.setSampleType(QAudioFormat::SignedInt);
|
||||
}
|
||||
fmt.setByteOrder(QAudioDeviceInfo::defaultInputDevice().preferredFormat().byteOrder());
|
||||
return fmt;
|
||||
}
|
||||
|
||||
static QAudioEncoderSettings audioFormatToAudioSettings(const QAudioFormat &format)
|
||||
{
|
||||
QAudioEncoderSettings settings;
|
||||
settings.setCodec(format.codec());
|
||||
settings.setChannelCount(format.channelCount());
|
||||
settings.setSampleRate(format.sampleRate());
|
||||
settings.setEncodingMode(QMultimedia::ConstantBitRateEncoding);
|
||||
settings.setBitRate(format.channelCount()
|
||||
* format.sampleSize()
|
||||
* format.sampleRate());
|
||||
return settings;
|
||||
}
|
||||
|
||||
AudioEncoderControl::AudioEncoderControl(QObject *parent)
|
||||
:QAudioEncoderSettingsControl(parent)
|
||||
{
|
||||
m_session = qobject_cast<AudioCaptureSession*>(parent);
|
||||
|
||||
QT_PREPEND_NAMESPACE(QAudioFormat) fmt;
|
||||
fmt.setSampleSize(8);
|
||||
fmt.setChannelCount(1);
|
||||
fmt.setSampleRate(8000);
|
||||
fmt.setSampleType(QT_PREPEND_NAMESPACE(QAudioFormat)::SignedInt);
|
||||
fmt.setCodec("audio/pcm");
|
||||
fmt.setByteOrder(QAudioFormat::LittleEndian);
|
||||
m_session->setFormat(fmt);
|
||||
|
||||
m_settings.setEncodingMode(QMultimedia::ConstantQualityEncoding);
|
||||
m_settings.setCodec("audio/pcm");
|
||||
m_settings.setBitRate(8000);
|
||||
m_settings.setChannelCount(1);
|
||||
m_settings.setSampleRate(8000);
|
||||
m_settings.setQuality(QMultimedia::LowQuality);
|
||||
update();
|
||||
}
|
||||
|
||||
AudioEncoderControl::~AudioEncoderControl()
|
||||
@@ -74,71 +91,85 @@ AudioEncoderControl::~AudioEncoderControl()
|
||||
|
||||
QStringList AudioEncoderControl::supportedAudioCodecs() const
|
||||
{
|
||||
QStringList list;
|
||||
if (m_session->supportedContainers().size() > 0)
|
||||
list.append("audio/pcm");
|
||||
|
||||
return list;
|
||||
return QStringList() << QStringLiteral("audio/pcm");
|
||||
}
|
||||
|
||||
QString AudioEncoderControl::codecDescription(const QString &codecName) const
|
||||
{
|
||||
if (codecName.contains(QLatin1String("audio/pcm")))
|
||||
return tr("PCM audio data");
|
||||
if (QString::compare(codecName, QLatin1String("audio/pcm")) == 0)
|
||||
return tr("Linear PCM audio data");
|
||||
|
||||
return QString();
|
||||
}
|
||||
|
||||
QList<int> AudioEncoderControl::supportedSampleRates(const QAudioEncoderSettings &, bool *continuous) const
|
||||
QList<int> AudioEncoderControl::supportedSampleRates(const QAudioEncoderSettings &settings, bool *continuous) const
|
||||
{
|
||||
if (continuous)
|
||||
*continuous = false;
|
||||
|
||||
return m_session->deviceInfo()->supportedSampleRates();
|
||||
if (settings.codec().isEmpty() || settings.codec() == QLatin1String("audio/pcm"))
|
||||
return m_sampleRates;
|
||||
|
||||
return QList<int>();
|
||||
}
|
||||
|
||||
QAudioEncoderSettings AudioEncoderControl::audioSettings() const
|
||||
{
|
||||
return m_settings;
|
||||
return audioFormatToAudioSettings(m_session->format());
|
||||
}
|
||||
|
||||
void AudioEncoderControl::setAudioSettings(const QAudioEncoderSettings &settings)
|
||||
{
|
||||
QAudioFormat fmt = m_session->format();
|
||||
QAudioFormat fmt = audioSettingsToAudioFormat(settings);
|
||||
|
||||
if (settings.encodingMode() == QMultimedia::ConstantQualityEncoding) {
|
||||
if (settings.quality() == QMultimedia::LowQuality) {
|
||||
fmt.setCodec("audio/pcm");
|
||||
switch (settings.quality()) {
|
||||
case QMultimedia::VeryLowQuality:
|
||||
fmt.setSampleSize(8);
|
||||
fmt.setChannelCount(1);
|
||||
fmt.setSampleRate(8000);
|
||||
fmt.setSampleType(QAudioFormat::UnSignedInt);
|
||||
|
||||
} else if (settings.quality() == QMultimedia::NormalQuality) {
|
||||
fmt.setSampleSize(16);
|
||||
fmt.setChannelCount(1);
|
||||
break;
|
||||
case QMultimedia::LowQuality:
|
||||
fmt.setSampleSize(8);
|
||||
fmt.setSampleRate(22050);
|
||||
fmt.setSampleType(QAudioFormat::SignedInt);
|
||||
|
||||
} else {
|
||||
fmt.setSampleType(QAudioFormat::UnSignedInt);
|
||||
break;
|
||||
case QMultimedia::HighQuality:
|
||||
fmt.setSampleSize(16);
|
||||
fmt.setSampleRate(48000);
|
||||
fmt.setSampleType(QAudioFormat::SignedInt);
|
||||
break;
|
||||
case QMultimedia::VeryHighQuality:
|
||||
fmt.setSampleSize(16);
|
||||
fmt.setSampleRate(96000);
|
||||
fmt.setSampleType(QAudioFormat::SignedInt);
|
||||
break;
|
||||
case QMultimedia::NormalQuality:
|
||||
default:
|
||||
fmt.setSampleSize(16);
|
||||
fmt.setChannelCount(1);
|
||||
fmt.setSampleRate(44100);
|
||||
fmt.setSampleType(QAudioFormat::SignedInt);
|
||||
}
|
||||
|
||||
} else {
|
||||
fmt.setChannelCount(settings.channelCount());
|
||||
fmt.setSampleRate(settings.sampleRate());
|
||||
if (settings.sampleRate() == 8000 && settings.bitRate() == 8000) {
|
||||
fmt.setSampleType(QAudioFormat::UnSignedInt);
|
||||
fmt.setSampleSize(8);
|
||||
} else {
|
||||
fmt.setSampleSize(16);
|
||||
fmt.setSampleType(QAudioFormat::SignedInt);
|
||||
break;
|
||||
}
|
||||
}
|
||||
fmt.setCodec("audio/pcm");
|
||||
|
||||
m_session->setFormat(fmt);
|
||||
m_settings = settings;
|
||||
}
|
||||
|
||||
void AudioEncoderControl::update()
|
||||
{
|
||||
m_sampleRates.clear();
|
||||
QList<QAudioDeviceInfo> devices = QAudioDeviceInfo::availableDevices(QAudio::AudioInput);
|
||||
for (int i = 0; i < devices.size(); ++i) {
|
||||
QList<int> rates = devices.at(i).supportedSampleRates();
|
||||
for (int j = 0; j < rates.size(); ++j) {
|
||||
int rate = rates.at(j);
|
||||
if (!m_sampleRates.contains(rate))
|
||||
m_sampleRates.append(rate);
|
||||
}
|
||||
}
|
||||
qSort(m_sampleRates);
|
||||
}
|
||||
|
||||
QT_END_NAMESPACE
|
||||
|
||||
@@ -49,9 +49,9 @@
|
||||
|
||||
#include <qaudioformat.h>
|
||||
|
||||
class AudioCaptureSession;
|
||||
QT_BEGIN_NAMESPACE
|
||||
|
||||
QT_USE_NAMESPACE
|
||||
class AudioCaptureSession;
|
||||
|
||||
class AudioEncoderControl : public QAudioEncoderSettingsControl
|
||||
{
|
||||
@@ -68,8 +68,12 @@ public:
|
||||
void setAudioSettings(const QAudioEncoderSettings&);
|
||||
|
||||
private:
|
||||
void update();
|
||||
|
||||
AudioCaptureSession* m_session;
|
||||
QAudioEncoderSettings m_settings;
|
||||
QList<int> m_sampleRates;
|
||||
};
|
||||
|
||||
QT_END_NAMESPACE
|
||||
|
||||
#endif
|
||||
|
||||
@@ -44,6 +44,7 @@
|
||||
|
||||
#include <qaudiodeviceinfo.h>
|
||||
|
||||
QT_BEGIN_NAMESPACE
|
||||
|
||||
AudioInputSelector::AudioInputSelector(QObject *parent)
|
||||
:QAudioInputSelectorControl(parent)
|
||||
@@ -79,7 +80,7 @@ QString AudioInputSelector::inputDescription(const QString& name) const
|
||||
|
||||
QString AudioInputSelector::defaultInput() const
|
||||
{
|
||||
return QAudioDeviceInfo(QAudioDeviceInfo::defaultInputDevice()).deviceName();
|
||||
return QAudioDeviceInfo::defaultInputDevice().deviceName();
|
||||
}
|
||||
|
||||
QString AudioInputSelector::activeInput() const
|
||||
@@ -108,3 +109,5 @@ void AudioInputSelector::update()
|
||||
m_descriptions.append(devices.at(i).deviceName());
|
||||
}
|
||||
}
|
||||
|
||||
QT_END_NAMESPACE
|
||||
|
||||
@@ -46,9 +46,9 @@
|
||||
|
||||
#include "qaudioinputselectorcontrol.h"
|
||||
|
||||
class AudioCaptureSession;
|
||||
QT_BEGIN_NAMESPACE
|
||||
|
||||
QT_USE_NAMESPACE
|
||||
class AudioCaptureSession;
|
||||
|
||||
class AudioInputSelector : public QAudioInputSelectorControl
|
||||
{
|
||||
@@ -74,4 +74,6 @@ private:
|
||||
AudioCaptureSession* m_session;
|
||||
};
|
||||
|
||||
QT_END_NAMESPACE
|
||||
|
||||
#endif // AUDIOINPUTSELECTOR_H
|
||||
|
||||
@@ -44,15 +44,22 @@
|
||||
|
||||
#include <QtCore/qdebug.h>
|
||||
|
||||
QT_BEGIN_NAMESPACE
|
||||
|
||||
AudioMediaRecorderControl::AudioMediaRecorderControl(QObject *parent)
|
||||
:QMediaRecorderControl(parent)
|
||||
, m_state(QMediaRecorder::StoppedState)
|
||||
, m_prevStatus(QMediaRecorder::UnloadedStatus)
|
||||
: QMediaRecorderControl(parent)
|
||||
{
|
||||
m_session = qobject_cast<AudioCaptureSession*>(parent);
|
||||
connect(m_session,SIGNAL(positionChanged(qint64)),this,SIGNAL(durationChanged(qint64)));
|
||||
connect(m_session,SIGNAL(stateChanged(QMediaRecorder::State)), this,SLOT(updateStatus()));
|
||||
connect(m_session,SIGNAL(error(int,QString)),this,SLOT(handleSessionError(int,QString)));
|
||||
connect(m_session, SIGNAL(positionChanged(qint64)),
|
||||
this, SIGNAL(durationChanged(qint64)));
|
||||
connect(m_session, SIGNAL(stateChanged(QMediaRecorder::State)),
|
||||
this, SIGNAL(stateChanged(QMediaRecorder::State)));
|
||||
connect(m_session, SIGNAL(statusChanged(QMediaRecorder::Status)),
|
||||
this, SIGNAL(statusChanged(QMediaRecorder::Status)));
|
||||
connect(m_session, SIGNAL(actualLocationChanged(QUrl)),
|
||||
this, SIGNAL(actualLocationChanged(QUrl)));
|
||||
connect(m_session, SIGNAL(error(int,QString)),
|
||||
this, SIGNAL(error(int,QString)));
|
||||
}
|
||||
|
||||
AudioMediaRecorderControl::~AudioMediaRecorderControl()
|
||||
@@ -71,21 +78,12 @@ bool AudioMediaRecorderControl::setOutputLocation(const QUrl& sink)
|
||||
|
||||
QMediaRecorder::State AudioMediaRecorderControl::state() const
|
||||
{
|
||||
return (QMediaRecorder::State)m_session->state();
|
||||
return m_session->state();
|
||||
}
|
||||
|
||||
QMediaRecorder::Status AudioMediaRecorderControl::status() const
|
||||
{
|
||||
static QMediaRecorder::Status statusTable[3][3] = {
|
||||
//Stopped recorder state:
|
||||
{ QMediaRecorder::LoadedStatus, QMediaRecorder::FinalizingStatus, QMediaRecorder::FinalizingStatus },
|
||||
//Recording recorder state:
|
||||
{ QMediaRecorder::StartingStatus, QMediaRecorder::RecordingStatus, QMediaRecorder::PausedStatus },
|
||||
//Paused recorder state:
|
||||
{ QMediaRecorder::StartingStatus, QMediaRecorder::RecordingStatus, QMediaRecorder::PausedStatus }
|
||||
};
|
||||
|
||||
return statusTable[m_state][m_session->state()];
|
||||
return m_session->status();
|
||||
}
|
||||
|
||||
qint64 AudioMediaRecorderControl::duration() const
|
||||
@@ -106,47 +104,19 @@ qreal AudioMediaRecorderControl::volume() const
|
||||
|
||||
void AudioMediaRecorderControl::setState(QMediaRecorder::State state)
|
||||
{
|
||||
if (m_state == state)
|
||||
return;
|
||||
|
||||
m_state = state;
|
||||
|
||||
switch (state) {
|
||||
case QMediaRecorder::StoppedState:
|
||||
m_session->stop();
|
||||
break;
|
||||
case QMediaRecorder::PausedState:
|
||||
m_session->pause();
|
||||
break;
|
||||
case QMediaRecorder::RecordingState:
|
||||
m_session->record();
|
||||
break;
|
||||
}
|
||||
|
||||
updateStatus();
|
||||
m_session->setState(state);
|
||||
}
|
||||
|
||||
void AudioMediaRecorderControl::setMuted(bool)
|
||||
void AudioMediaRecorderControl::setMuted(bool muted)
|
||||
{
|
||||
if (muted)
|
||||
qWarning("Muting the audio recording is not supported.");
|
||||
}
|
||||
|
||||
void AudioMediaRecorderControl::setVolume(qreal volume)
|
||||
{
|
||||
if (!qFuzzyCompare(volume, qreal(1.0)))
|
||||
qWarning() << "Media service doesn't support recorder audio gain.";
|
||||
qWarning("Changing the audio recording volume is not supported.");
|
||||
}
|
||||
|
||||
void AudioMediaRecorderControl::updateStatus()
|
||||
{
|
||||
QMediaRecorder::Status newStatus = status();
|
||||
if (m_prevStatus != newStatus) {
|
||||
m_prevStatus = newStatus;
|
||||
emit statusChanged(m_prevStatus);
|
||||
}
|
||||
}
|
||||
|
||||
void AudioMediaRecorderControl::handleSessionError(int code, const QString &description)
|
||||
{
|
||||
emit error(code, description);
|
||||
setState(QMediaRecorder::StoppedState);
|
||||
}
|
||||
QT_END_NAMESPACE
|
||||
|
||||
@@ -47,9 +47,9 @@
|
||||
#include "qmediarecorder.h"
|
||||
#include "qmediarecordercontrol.h"
|
||||
|
||||
class AudioCaptureSession;
|
||||
QT_BEGIN_NAMESPACE
|
||||
|
||||
QT_USE_NAMESPACE
|
||||
class AudioCaptureSession;
|
||||
|
||||
class AudioMediaRecorderControl : public QMediaRecorderControl
|
||||
{
|
||||
@@ -59,7 +59,7 @@ public:
|
||||
~AudioMediaRecorderControl();
|
||||
|
||||
QUrl outputLocation() const;
|
||||
bool setOutputLocation(const QUrl &sink);
|
||||
bool setOutputLocation(const QUrl &location);
|
||||
|
||||
QMediaRecorder::State state() const;
|
||||
QMediaRecorder::Status status() const;
|
||||
@@ -71,19 +71,14 @@ public:
|
||||
|
||||
void applySettings() {}
|
||||
|
||||
public slots:
|
||||
void setState(QMediaRecorder::State state);
|
||||
void setMuted(bool);
|
||||
void setVolume(qreal volume);
|
||||
|
||||
private slots:
|
||||
void updateStatus();
|
||||
void handleSessionError(int code, const QString &description);
|
||||
|
||||
private:
|
||||
AudioCaptureSession* m_session;
|
||||
QMediaRecorder::State m_state;
|
||||
QMediaRecorder::Status m_prevStatus;
|
||||
};
|
||||
|
||||
QT_END_NAMESPACE
|
||||
|
||||
#endif
|
||||
|
||||
@@ -40,10 +40,8 @@
|
||||
****************************************************************************/
|
||||
#include "bbserviceplugin.h"
|
||||
|
||||
#ifndef Q_OS_BLACKBERRY_TABLET
|
||||
#include "bbcameraservice.h"
|
||||
#include "bbvideodeviceselectorcontrol.h"
|
||||
#endif
|
||||
#include "bbmediaplayerservice.h"
|
||||
|
||||
#include <QDebug>
|
||||
@@ -56,10 +54,8 @@ BbServicePlugin::BbServicePlugin()
|
||||
|
||||
QMediaService *BbServicePlugin::create(const QString &key)
|
||||
{
|
||||
#ifndef Q_OS_BLACKBERRY_TABLET
|
||||
if (key == QLatin1String(Q_MEDIASERVICE_CAMERA))
|
||||
return new BbCameraService();
|
||||
#endif
|
||||
|
||||
if (key == QLatin1String(Q_MEDIASERVICE_MEDIAPLAYER))
|
||||
return new BbMediaPlayerService();
|
||||
@@ -106,9 +102,7 @@ QString BbServicePlugin::deviceDescription(const QByteArray &service, const QByt
|
||||
|
||||
void BbServicePlugin::updateDevices() const
|
||||
{
|
||||
#ifndef Q_OS_BLACKBERRY_TABLET
|
||||
BbVideoDeviceSelectorControl::enumerateDevices(&m_cameraDevices, &m_cameraDescriptions);
|
||||
#endif
|
||||
|
||||
if (m_cameraDevices.isEmpty()) {
|
||||
qWarning() << "No camera devices found";
|
||||
|
||||
@@ -12,9 +12,7 @@ SOURCES += bbserviceplugin.cpp
|
||||
|
||||
include(common/common.pri)
|
||||
|
||||
!blackberry-playbook {
|
||||
include(camera/camera.pri)
|
||||
}
|
||||
include(camera/camera.pri)
|
||||
|
||||
include(mediaplayer/mediaplayer.pri)
|
||||
|
||||
|
||||
@@ -139,6 +139,7 @@ QVariant BbCameraExposureControl::requestedValue(ExposureParameter parameter) co
|
||||
|
||||
QVariant BbCameraExposureControl::actualValue(ExposureParameter parameter) const
|
||||
{
|
||||
#ifndef Q_OS_BLACKBERRY_TABLET
|
||||
if (parameter != QCameraExposureControl::ExposureMode) // no other parameter supported by BB10 API at the moment
|
||||
return QVariantList();
|
||||
|
||||
@@ -170,6 +171,9 @@ QVariant BbCameraExposureControl::actualValue(ExposureParameter parameter) const
|
||||
default:
|
||||
return QVariant();
|
||||
}
|
||||
#else
|
||||
return QVariant();
|
||||
#endif
|
||||
}
|
||||
|
||||
bool BbCameraExposureControl::setValue(ExposureParameter parameter, const QVariant& value)
|
||||
|
||||
@@ -45,11 +45,14 @@
|
||||
#include <QDebug>
|
||||
#include <QUrl>
|
||||
|
||||
#ifndef Q_OS_BLACKBERRY_TABLET
|
||||
#include <audio/audio_manager_device.h>
|
||||
#include <audio/audio_manager_volume.h>
|
||||
#endif
|
||||
|
||||
QT_BEGIN_NAMESPACE
|
||||
|
||||
#ifndef Q_OS_BLACKBERRY_TABLET
|
||||
static audio_manager_device_t currentAudioInputDevice()
|
||||
{
|
||||
audio_manager_device_t device = AUDIO_DEVICE_HEADSET;
|
||||
@@ -62,6 +65,7 @@ static audio_manager_device_t currentAudioInputDevice()
|
||||
|
||||
return device;
|
||||
}
|
||||
#endif
|
||||
|
||||
BbCameraMediaRecorderControl::BbCameraMediaRecorderControl(BbCameraSession *session, QObject *parent)
|
||||
: QMediaRecorderControl(parent)
|
||||
@@ -103,12 +107,13 @@ bool BbCameraMediaRecorderControl::isMuted() const
|
||||
{
|
||||
bool muted = false;
|
||||
|
||||
#ifndef Q_OS_BLACKBERRY_TABLET
|
||||
const int result = audio_manager_get_input_mute(currentAudioInputDevice(), &muted);
|
||||
if (result != EOK) {
|
||||
emit const_cast<BbCameraMediaRecorderControl*>(this)->error(QMediaRecorder::ResourceError, tr("Unable to retrieve mute status"));
|
||||
return false;
|
||||
}
|
||||
|
||||
#endif
|
||||
return muted;
|
||||
}
|
||||
|
||||
@@ -116,11 +121,13 @@ qreal BbCameraMediaRecorderControl::volume() const
|
||||
{
|
||||
double level = 0.0;
|
||||
|
||||
#ifndef Q_OS_BLACKBERRY_TABLET
|
||||
const int result = audio_manager_get_input_level(currentAudioInputDevice(), &level);
|
||||
if (result != EOK) {
|
||||
emit const_cast<BbCameraMediaRecorderControl*>(this)->error(QMediaRecorder::ResourceError, tr("Unable to retrieve audio input volume"));
|
||||
return 0.0;
|
||||
}
|
||||
#endif
|
||||
|
||||
return (level / 100);
|
||||
}
|
||||
@@ -137,22 +144,26 @@ void BbCameraMediaRecorderControl::setState(QMediaRecorder::State state)
|
||||
|
||||
void BbCameraMediaRecorderControl::setMuted(bool muted)
|
||||
{
|
||||
#ifndef Q_OS_BLACKBERRY_TABLET
|
||||
const int result = audio_manager_set_input_mute(currentAudioInputDevice(), muted);
|
||||
if (result != EOK) {
|
||||
emit error(QMediaRecorder::ResourceError, tr("Unable to set mute status"));
|
||||
} else {
|
||||
emit mutedChanged(muted);
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
void BbCameraMediaRecorderControl::setVolume(qreal volume)
|
||||
{
|
||||
#ifndef Q_OS_BLACKBERRY_TABLET
|
||||
const int result = audio_manager_set_input_level(currentAudioInputDevice(), (volume * 100));
|
||||
if (result != EOK) {
|
||||
emit error(QMediaRecorder::ResourceError, tr("Unable to set audio input volume"));
|
||||
} else {
|
||||
emit volumeChanged(volume);
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
QT_END_NAMESPACE
|
||||
|
||||
@@ -70,9 +70,11 @@ BbCameraOrientationHandler::BbCameraOrientationHandler(QObject *parent)
|
||||
|
||||
BbCameraOrientationHandler::~BbCameraOrientationHandler()
|
||||
{
|
||||
#ifndef Q_OS_BLACKBERRY_TABLET
|
||||
const int result = orientation_stop_events(0);
|
||||
if (result == BPS_FAILURE)
|
||||
qWarning() << "Unable to unregister for orientation change events";
|
||||
#endif
|
||||
|
||||
QCoreApplication::eventDispatcher()->removeNativeEventFilter(this);
|
||||
}
|
||||
|
||||
@@ -75,8 +75,6 @@ static QString errorToString(camera_error_t error)
|
||||
return QLatin1String("No permission");
|
||||
case CAMERA_EBADR:
|
||||
return QLatin1String("Invalid file descriptor");
|
||||
case CAMERA_ENODATA:
|
||||
return QLatin1String("Data does not exist");
|
||||
case CAMERA_ENOENT:
|
||||
return QLatin1String("File or directory does not exists");
|
||||
case CAMERA_ENOMEM:
|
||||
@@ -87,24 +85,28 @@ static QString errorToString(camera_error_t error)
|
||||
return QLatin1String("Communication timeout");
|
||||
case CAMERA_EALREADY:
|
||||
return QLatin1String("Operation already in progress");
|
||||
case CAMERA_EBUSY:
|
||||
return QLatin1String("Camera busy");
|
||||
case CAMERA_ENOSPC:
|
||||
return QLatin1String("Disk is full");
|
||||
case CAMERA_EUNINIT:
|
||||
return QLatin1String("Camera library not initialized");
|
||||
case CAMERA_EREGFAULT:
|
||||
return QLatin1String("Callback registration failed");
|
||||
case CAMERA_EMICINUSE:
|
||||
return QLatin1String("Microphone in use already");
|
||||
#ifndef Q_OS_BLACKBERRY_TABLET
|
||||
case CAMERA_ENODATA:
|
||||
return QLatin1String("Data does not exist");
|
||||
case CAMERA_EBUSY:
|
||||
return QLatin1String("Camera busy");
|
||||
case CAMERA_EDESKTOPCAMERAINUSE:
|
||||
return QLatin1String("Desktop camera in use already");
|
||||
case CAMERA_ENOSPC:
|
||||
return QLatin1String("Disk is full");
|
||||
case CAMERA_EPOWERDOWN:
|
||||
return QLatin1String("Camera in power down state");
|
||||
case CAMERA_3ALOCKED:
|
||||
return QLatin1String("3A have been locked");
|
||||
case CAMERA_EVIEWFINDERFROZEN:
|
||||
return QLatin1String("Freeze flag set");
|
||||
#endif
|
||||
default:
|
||||
return QLatin1String("Unknown error");
|
||||
}
|
||||
@@ -658,6 +660,9 @@ void BbCameraSession::applyVideoSettings()
|
||||
return;
|
||||
}
|
||||
|
||||
const QSize resolution = m_videoEncoderSettings.resolution();
|
||||
|
||||
#ifndef Q_OS_BLACKBERRY_TABLET
|
||||
QString videoCodec = m_videoEncoderSettings.codec();
|
||||
if (videoCodec.isEmpty())
|
||||
videoCodec = QLatin1String("h264");
|
||||
@@ -670,8 +675,6 @@ void BbCameraSession::applyVideoSettings()
|
||||
else if (videoCodec == QLatin1String("h264"))
|
||||
cameraVideoCodec = CAMERA_VIDEOCODEC_H264;
|
||||
|
||||
const QSize resolution = m_videoEncoderSettings.resolution();
|
||||
|
||||
qreal frameRate = m_videoEncoderSettings.frameRate();
|
||||
if (frameRate == 0) {
|
||||
const QList<qreal> frameRates = supportedFrameRates(QVideoEncoderSettings(), 0);
|
||||
@@ -690,12 +693,16 @@ void BbCameraSession::applyVideoSettings()
|
||||
cameraAudioCodec = CAMERA_AUDIOCODEC_AAC;
|
||||
else if (audioCodec == QLatin1String("raw"))
|
||||
cameraAudioCodec = CAMERA_AUDIOCODEC_RAW;
|
||||
|
||||
result = camera_set_video_property(m_handle,
|
||||
CAMERA_IMGPROP_WIDTH, resolution.width(),
|
||||
CAMERA_IMGPROP_HEIGHT, resolution.height(),
|
||||
CAMERA_IMGPROP_VIDEOCODEC, cameraVideoCodec,
|
||||
CAMERA_IMGPROP_AUDIOCODEC, cameraAudioCodec);
|
||||
#else
|
||||
result = camera_set_video_property(m_handle,
|
||||
CAMERA_IMGPROP_WIDTH, resolution.width(),
|
||||
CAMERA_IMGPROP_HEIGHT, resolution.height());
|
||||
#endif
|
||||
|
||||
if (result != CAMERA_EOK) {
|
||||
qWarning() << "Unable to apply video settings:" << result;
|
||||
@@ -979,10 +986,14 @@ static void viewFinderStatusCallback(camera_handle_t handle, camera_devstatus_t
|
||||
if (status == CAMERA_STATUS_FOCUS_CHANGE) {
|
||||
BbCameraSession *session = static_cast<BbCameraSession*>(context);
|
||||
QMetaObject::invokeMethod(session, "handleFocusStatusChanged", Qt::QueuedConnection, Q_ARG(int, value));
|
||||
} else if (status == CAMERA_STATUS_POWERUP) {
|
||||
return;
|
||||
}
|
||||
#ifndef Q_OS_BLACKBERRY_TABLET
|
||||
else if (status == CAMERA_STATUS_POWERUP) {
|
||||
BbCameraSession *session = static_cast<BbCameraSession*>(context);
|
||||
QMetaObject::invokeMethod(session, "handleCameraPowerUp", Qt::QueuedConnection);
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
bool BbCameraSession::startViewFinder()
|
||||
@@ -1159,6 +1170,7 @@ static void videoRecordingStatusCallback(camera_handle_t handle, camera_devstatu
|
||||
Q_UNUSED(handle)
|
||||
Q_UNUSED(value)
|
||||
|
||||
#ifndef Q_OS_BLACKBERRY_TABLET
|
||||
if (status == CAMERA_STATUS_VIDEO_PAUSE) {
|
||||
BbCameraSession *session = static_cast<BbCameraSession*>(context);
|
||||
QMetaObject::invokeMethod(session, "handleVideoRecordingPaused", Qt::QueuedConnection);
|
||||
@@ -1166,6 +1178,7 @@ static void videoRecordingStatusCallback(camera_handle_t handle, camera_devstatu
|
||||
BbCameraSession *session = static_cast<BbCameraSession*>(context);
|
||||
QMetaObject::invokeMethod(session, "handleVideoRecordingResumed", Qt::QueuedConnection);
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
bool BbCameraSession::startVideoRecording()
|
||||
|
||||
@@ -156,10 +156,12 @@ QVariant BbCameraViewfinderSettingsControl::viewfinderParameter(ViewfinderParame
|
||||
return QVideoFrame::Format_Invalid;
|
||||
case CAMERA_FRAMETYPE_CBYCRY:
|
||||
return QVideoFrame::Format_Invalid;
|
||||
#ifndef Q_OS_BLACKBERRY_TABLET
|
||||
case CAMERA_FRAMETYPE_COMPRESSEDVIDEO:
|
||||
return QVideoFrame::Format_Invalid;
|
||||
case CAMERA_FRAMETYPE_COMPRESSEDAUDIO:
|
||||
return QVideoFrame::Format_Invalid;
|
||||
#endif
|
||||
default:
|
||||
return QVideoFrame::Format_Invalid;
|
||||
}
|
||||
|
||||
@@ -46,4 +46,8 @@ SOURCES += \
|
||||
$$PWD/bbvideodeviceselectorcontrol.cpp \
|
||||
$$PWD/bbvideorenderercontrol.cpp
|
||||
|
||||
LIBS += -lcamapi -laudio_manager
|
||||
LIBS += -lcamapi
|
||||
|
||||
!blackberry-playbook {
|
||||
LIBS += -laudio_manager
|
||||
}
|
||||
|
||||
@@ -39,6 +39,12 @@
|
||||
**
|
||||
****************************************************************************/
|
||||
|
||||
#include <QtMultimedia/qmediametadata.h>
|
||||
#include <QtCore/qcoreapplication.h>
|
||||
#include <QSize>
|
||||
#include <qdatetime.h>
|
||||
#include <qimage.h>
|
||||
|
||||
#include <dshow.h>
|
||||
#include <initguid.h>
|
||||
#include <qnetwork.h>
|
||||
@@ -46,8 +52,56 @@
|
||||
#include "directshowmetadatacontrol.h"
|
||||
#include "directshowplayerservice.h"
|
||||
|
||||
#include <QtMultimedia/qmediametadata.h>
|
||||
#include <QtCore/qcoreapplication.h>
|
||||
#ifndef QT_NO_WMSDK
|
||||
#include <wmsdk.h>
|
||||
#endif
|
||||
|
||||
#ifndef QT_NO_SHELLITEM
|
||||
#include <ShlObj.h>
|
||||
#include <propkeydef.h>
|
||||
#include <private/qsystemlibrary_p.h>
|
||||
|
||||
DEFINE_PROPERTYKEY(PKEY_Author, 0xF29F85E0, 0x4FF9, 0x1068, 0xAB, 0x91, 0x08, 0x00, 0x2B, 0x27, 0xB3, 0xD9, 4);
|
||||
DEFINE_PROPERTYKEY(PKEY_Title, 0xF29F85E0, 0x4FF9, 0x1068, 0xAB, 0x91, 0x08, 0x00, 0x2B, 0x27, 0xB3, 0xD9, 2);
|
||||
DEFINE_PROPERTYKEY(PKEY_Media_SubTitle, 0x56A3372E, 0xCE9C, 0x11D2, 0x9F, 0x0E, 0x00, 0x60, 0x97, 0xC6, 0x86, 0xF6, 38);
|
||||
DEFINE_PROPERTYKEY(PKEY_ParentalRating, 0x64440492, 0x4C8B, 0x11D1, 0x8B, 0x70, 0x08, 0x00, 0x36, 0xB1, 0x1A, 0x03, 21);
|
||||
DEFINE_PROPERTYKEY(PKEY_Comment, 0xF29F85E0, 0x4FF9, 0x1068, 0xAB, 0x91, 0x08, 0x00, 0x2B, 0x27, 0xB3, 0xD9, 6);
|
||||
DEFINE_PROPERTYKEY(PKEY_Copyright, 0x64440492, 0x4C8B, 0x11D1, 0x8B, 0x70, 0x08, 0x00, 0x36, 0xB1, 0x1A, 0x03, 11);
|
||||
DEFINE_PROPERTYKEY(PKEY_Media_ProviderStyle, 0x64440492, 0x4C8B, 0x11D1, 0x8B, 0x70, 0x08, 0x00, 0x36, 0xB1, 0x1A, 0x03, 40);
|
||||
DEFINE_PROPERTYKEY(PKEY_Media_Year, 0x56A3372E, 0xCE9C, 0x11D2, 0x9F, 0x0E, 0x00, 0x60, 0x97, 0xC6, 0x86, 0xF6, 5);
|
||||
DEFINE_PROPERTYKEY(PKEY_Media_DateEncoded, 0x2E4B640D, 0x5019, 0x46D8, 0x88, 0x81, 0x55, 0x41, 0x4C, 0xC5, 0xCA, 0xA0, 100);
|
||||
DEFINE_PROPERTYKEY(PKEY_Rating, 0x64440492, 0x4C8B, 0x11D1, 0x8B, 0x70, 0x08, 0x00, 0x36, 0xB1, 0x1A, 0x03, 9);
|
||||
DEFINE_PROPERTYKEY(PKEY_Keywords, 0xF29F85E0, 0x4FF9, 0x1068, 0xAB, 0x91, 0x08, 0x00, 0x2B, 0x27, 0xB3, 0xD9, 5);
|
||||
DEFINE_PROPERTYKEY(PKEY_Language, 0xD5CDD502, 0x2E9C, 0x101B, 0x93, 0x97, 0x08, 0x00, 0x2B, 0x2C, 0xF9, 0xAE, 28);
|
||||
DEFINE_PROPERTYKEY(PKEY_Media_Publisher, 0x64440492, 0x4C8B, 0x11D1, 0x8B, 0x70, 0x08, 0x00, 0x36, 0xB1, 0x1A, 0x03, 30);
|
||||
DEFINE_PROPERTYKEY(PKEY_Media_Duration, 0x64440490, 0x4C8B, 0x11D1, 0x8B, 0x70, 0x08, 0x00, 0x36, 0xB1, 0x1A, 0x03, 3);
|
||||
DEFINE_PROPERTYKEY(PKEY_Audio_EncodingBitrate, 0x64440490, 0x4C8B, 0x11D1, 0x8B, 0x70, 0x08, 0x00, 0x36, 0xB1, 0x1A, 0x03, 4);
|
||||
DEFINE_PROPERTYKEY(PKEY_Media_AverageLevel, 0x09EDD5B6, 0xB301, 0x43C5, 0x99, 0x90, 0xD0, 0x03, 0x02, 0xEF, 0xFD, 0x46, 100);
|
||||
DEFINE_PROPERTYKEY(PKEY_Audio_ChannelCount, 0x64440490, 0x4C8B, 0x11D1, 0x8B, 0x70, 0x08, 0x00, 0x36, 0xB1, 0x1A, 0x03, 7);
|
||||
DEFINE_PROPERTYKEY(PKEY_Audio_PeakValue, 0x2579E5D0, 0x1116, 0x4084, 0xBD, 0x9A, 0x9B, 0x4F, 0x7C, 0xB4, 0xDF, 0x5E, 100);
|
||||
DEFINE_PROPERTYKEY(PKEY_Audio_SampleRate, 0x64440490, 0x4C8B, 0x11D1, 0x8B, 0x70, 0x08, 0x00, 0x36, 0xB1, 0x1A, 0x03, 5);
|
||||
DEFINE_PROPERTYKEY(PKEY_Music_AlbumTitle, 0x56A3372E, 0xCE9C, 0x11D2, 0x9F, 0x0E, 0x00, 0x60, 0x97, 0xC6, 0x86, 0xF6, 4);
|
||||
DEFINE_PROPERTYKEY(PKEY_Music_AlbumArtist, 0x56A3372E, 0xCE9C, 0x11D2, 0x9F, 0x0E, 0x00, 0x60, 0x97, 0xC6, 0x86, 0xF6, 13);
|
||||
DEFINE_PROPERTYKEY(PKEY_Music_Artist, 0x56A3372E, 0xCE9C, 0x11D2, 0x9F, 0x0E, 0x00, 0x60, 0x97, 0xC6, 0x86, 0xF6, 2);
|
||||
DEFINE_PROPERTYKEY(PKEY_Music_Composer, 0x64440492, 0x4C8B, 0x11D1, 0x8B, 0x70, 0x08, 0x00, 0x36, 0xB1, 0x1A, 0x03, 19);
|
||||
DEFINE_PROPERTYKEY(PKEY_Music_Conductor, 0x56A3372E, 0xCE9C, 0x11D2, 0x9F, 0x0E, 0x00, 0x60, 0x97, 0xC6, 0x86, 0xF6, 36);
|
||||
DEFINE_PROPERTYKEY(PKEY_Music_Lyrics, 0x56A3372E, 0xCE9C, 0x11D2, 0x9F, 0x0E, 0x00, 0x60, 0x97, 0xC6, 0x86, 0xF6, 12);
|
||||
DEFINE_PROPERTYKEY(PKEY_Music_Mood, 0x56A3372E, 0xCE9C, 0x11D2, 0x9F, 0x0E, 0x00, 0x60, 0x97, 0xC6, 0x86, 0xF6, 39);
|
||||
DEFINE_PROPERTYKEY(PKEY_Music_TrackNumber, 0x56A3372E, 0xCE9C, 0x11D2, 0x9F, 0x0E, 0x00, 0x60, 0x97, 0xC6, 0x86, 0xF6, 7);
|
||||
DEFINE_PROPERTYKEY(PKEY_Music_Genre, 0x56A3372E, 0xCE9C, 0x11D2, 0x9F, 0x0E, 0x00, 0x60, 0x97, 0xC6, 0x86, 0xF6, 11);
|
||||
DEFINE_PROPERTYKEY(PKEY_ThumbnailStream, 0xF29F85E0, 0x4FF9, 0x1068, 0xAB, 0x91, 0x08, 0x00, 0x2B, 0x27, 0xB3, 0xD9, 27);
|
||||
DEFINE_PROPERTYKEY(PKEY_Video_FrameHeight, 0x64440491, 0x4C8B, 0x11D1, 0x8B, 0x70, 0x08, 0x00, 0x36, 0xB1, 0x1A, 0x03, 4);
|
||||
DEFINE_PROPERTYKEY(PKEY_Video_FrameWidth, 0x64440491, 0x4C8B, 0x11D1, 0x8B, 0x70, 0x08, 0x00, 0x36, 0xB1, 0x1A, 0x03, 3);
|
||||
DEFINE_PROPERTYKEY(PKEY_Video_HorizontalAspectRatio, 0x64440491, 0x4C8B, 0x11D1, 0x8B, 0x70, 0x08, 0x00, 0x36, 0xB1, 0x1A, 0x03, 42);
|
||||
DEFINE_PROPERTYKEY(PKEY_Video_VerticalAspectRatio, 0x64440491, 0x4C8B, 0x11D1, 0x8B, 0x70, 0x08, 0x00, 0x36, 0xB1, 0x1A, 0x03, 45);
|
||||
DEFINE_PROPERTYKEY(PKEY_Video_FrameRate, 0x64440491, 0x4C8B, 0x11D1, 0x8B, 0x70, 0x08, 0x00, 0x36, 0xB1, 0x1A, 0x03, 6);
|
||||
DEFINE_PROPERTYKEY(PKEY_Video_EncodingBitrate, 0x64440491, 0x4C8B, 0x11D1, 0x8B, 0x70, 0x08, 0x00, 0x36, 0xB1, 0x1A, 0x03, 8);
|
||||
DEFINE_PROPERTYKEY(PKEY_Video_Director, 0x64440492, 0x4C8B, 0x11D1, 0x8B, 0x70, 0x08, 0x00, 0x36, 0xB1, 0x1A, 0x03, 20);
|
||||
DEFINE_PROPERTYKEY(PKEY_Media_Writer, 0x64440492, 0x4C8B, 0x11D1, 0x8B, 0x70, 0x08, 0x00, 0x36, 0xB1, 0x1A, 0x03, 23);
|
||||
|
||||
typedef HRESULT (WINAPI *q_SHCreateItemFromParsingName)(PCWSTR, IBindCtx *, const GUID&, void **);
|
||||
static q_SHCreateItemFromParsingName sHCreateItemFromParsingName = 0;
|
||||
#endif
|
||||
|
||||
#ifndef QT_NO_WMSDK
|
||||
namespace
|
||||
@@ -70,12 +124,12 @@ static const QWMMetaDataKeyLookup qt_wmMetaDataKeys[] =
|
||||
{ QMediaMetaData::Genre, L"WM/Genre" },
|
||||
//{ QMediaMetaData::Date, 0 },
|
||||
{ QMediaMetaData::Year, L"WM/Year" },
|
||||
{ QMediaMetaData::UserRating, L"UserRating" },
|
||||
{ QMediaMetaData::UserRating, L"Rating" },
|
||||
//{ QMediaMetaData::MetaDatawords, 0 },
|
||||
{ QMediaMetaData::Language, L"Language" },
|
||||
{ QMediaMetaData::Language, L"WM/Language" },
|
||||
{ QMediaMetaData::Publisher, L"WM/Publisher" },
|
||||
{ QMediaMetaData::Copyright, L"Copyright" },
|
||||
{ QMediaMetaData::ParentalRating, L"ParentalRating" },
|
||||
{ QMediaMetaData::ParentalRating, L"WM/ParentalRating" },
|
||||
//{ QMediaMetaData::RatingOrganisation, L"RatingOrganisation" },
|
||||
|
||||
// Media
|
||||
@@ -103,11 +157,11 @@ static const QWMMetaDataKeyLookup qt_wmMetaDataKeys[] =
|
||||
//{ QMediaMetaData::CoverArtUriLarge, 0 },
|
||||
|
||||
// Image/Video
|
||||
//{ QMediaMetaData::Resolution, 0 },
|
||||
//{ QMediaMetaData::PixelAspectRatio, 0 },
|
||||
{ QMediaMetaData::Resolution, L"WM/VideoHeight" },
|
||||
{ QMediaMetaData::PixelAspectRatio, L"AspectRatioX" },
|
||||
|
||||
// Video
|
||||
//{ QMediaMetaData::FrameRate, 0 },
|
||||
{ QMediaMetaData::VideoFrameRate, L"WM/VideoFrameRate" },
|
||||
{ QMediaMetaData::VideoBitRate, L"VideoBitRate" },
|
||||
{ QMediaMetaData::VideoCodec, L"VideoCodec" },
|
||||
|
||||
@@ -118,12 +172,6 @@ static const QWMMetaDataKeyLookup qt_wmMetaDataKeys[] =
|
||||
{ QMediaMetaData::Director, L"WM/Director" },
|
||||
{ QMediaMetaData::LeadPerformer, L"LeadPerformer" },
|
||||
{ QMediaMetaData::Writer, L"WM/Writer" },
|
||||
|
||||
// Photos
|
||||
{ QMediaMetaData::CameraManufacturer, L"CameraManufacturer" },
|
||||
{ QMediaMetaData::CameraModel, L"CameraModel" },
|
||||
{ QMediaMetaData::Event, L"Event" },
|
||||
{ QMediaMetaData::Subject, L"Subject" }
|
||||
};
|
||||
|
||||
static QVariant getValue(IWMHeaderInfo *header, const wchar_t *key)
|
||||
@@ -150,7 +198,7 @@ static QVariant getValue(IWMHeaderInfo *header, const wchar_t *key)
|
||||
case WMT_TYPE_STRING:
|
||||
{
|
||||
QString string;
|
||||
string.resize(size / 2 - 1);
|
||||
string.resize(size / 2); // size is in bytes, string is in UTF16
|
||||
|
||||
if (header->GetAttributeByName(
|
||||
&streamNumber,
|
||||
@@ -227,12 +275,58 @@ static QVariant getValue(IWMHeaderInfo *header, const wchar_t *key)
|
||||
}
|
||||
#endif
|
||||
|
||||
#ifndef QT_NO_SHELLITEM
|
||||
static QVariant convertValue(const PROPVARIANT& var)
|
||||
{
|
||||
QVariant value;
|
||||
switch (var.vt) {
|
||||
case VT_LPWSTR:
|
||||
value = QString::fromUtf16(reinterpret_cast<const ushort*>(var.pwszVal));
|
||||
break;
|
||||
case VT_UI4:
|
||||
value = uint(var.ulVal);
|
||||
break;
|
||||
case VT_UI8:
|
||||
value = qulonglong(var.uhVal.QuadPart);
|
||||
break;
|
||||
case VT_BOOL:
|
||||
value = bool(var.boolVal);
|
||||
break;
|
||||
case VT_FILETIME:
|
||||
SYSTEMTIME sysDate;
|
||||
if (!FileTimeToSystemTime(&var.filetime, &sysDate))
|
||||
break;
|
||||
value = QDate(sysDate.wYear, sysDate.wMonth, sysDate.wDay);
|
||||
break;
|
||||
case VT_STREAM:
|
||||
{
|
||||
STATSTG stat;
|
||||
if (FAILED(var.pStream->Stat(&stat, STATFLAG_NONAME)))
|
||||
break;
|
||||
void *data = malloc(stat.cbSize.QuadPart);
|
||||
ULONG read = 0;
|
||||
if (FAILED(var.pStream->Read(data, stat.cbSize.QuadPart, &read))) {
|
||||
free(data);
|
||||
break;
|
||||
}
|
||||
value = QImage::fromData(reinterpret_cast<const uchar*>(data), read);
|
||||
free(data);
|
||||
}
|
||||
break;
|
||||
case VT_VECTOR | VT_LPWSTR:
|
||||
QStringList vList;
|
||||
for (ULONG i = 0; i < var.calpwstr.cElems; ++i)
|
||||
vList.append(QString::fromUtf16(reinterpret_cast<const ushort*>(var.calpwstr.pElems[i])));
|
||||
value = vList;
|
||||
break;
|
||||
}
|
||||
return value;
|
||||
}
|
||||
#endif
|
||||
|
||||
DirectShowMetaDataControl::DirectShowMetaDataControl(QObject *parent)
|
||||
: QMetaDataReaderControl(parent)
|
||||
, m_content(0)
|
||||
#ifndef QT_NO_WMSDK
|
||||
, m_headerInfo(0)
|
||||
#endif
|
||||
, m_available(false)
|
||||
{
|
||||
}
|
||||
|
||||
@@ -242,75 +336,229 @@ DirectShowMetaDataControl::~DirectShowMetaDataControl()
|
||||
|
||||
bool DirectShowMetaDataControl::isMetaDataAvailable() const
|
||||
{
|
||||
#ifndef QT_NO_WMSDK
|
||||
return m_content || m_headerInfo;
|
||||
#else
|
||||
return m_content;
|
||||
#endif
|
||||
return m_available;
|
||||
}
|
||||
|
||||
QVariant DirectShowMetaDataControl::metaData(const QString &key) const
|
||||
{
|
||||
QVariant value;
|
||||
|
||||
#ifndef QT_NO_WMSDK
|
||||
if (m_headerInfo) {
|
||||
static const int count = sizeof(qt_wmMetaDataKeys) / sizeof(QWMMetaDataKeyLookup);
|
||||
for (int i = 0; i < count; ++i) {
|
||||
if (qt_wmMetaDataKeys[i].key == key) {
|
||||
value = getValue(m_headerInfo, qt_wmMetaDataKeys[i].token);
|
||||
break;
|
||||
}
|
||||
}
|
||||
} else if (m_content) {
|
||||
#else
|
||||
if (m_content) {
|
||||
#endif
|
||||
BSTR string = 0;
|
||||
|
||||
if (key == QMediaMetaData::Author)
|
||||
m_content->get_AuthorName(&string);
|
||||
else if (key == QMediaMetaData::Title)
|
||||
m_content->get_Title(&string);
|
||||
else if (key == QMediaMetaData::ParentalRating)
|
||||
m_content->get_Rating(&string);
|
||||
else if (key == QMediaMetaData::Description)
|
||||
m_content->get_Description(&string);
|
||||
else if (key == QMediaMetaData::Copyright)
|
||||
m_content->get_Copyright(&string);
|
||||
|
||||
if (string) {
|
||||
value = QString::fromUtf16(reinterpret_cast<ushort *>(string), ::SysStringLen(string));
|
||||
|
||||
::SysFreeString(string);
|
||||
}
|
||||
}
|
||||
return value;
|
||||
return m_metadata.value(key);
|
||||
}
|
||||
|
||||
QStringList DirectShowMetaDataControl::availableMetaData() const
|
||||
{
|
||||
return QStringList();
|
||||
return m_metadata.keys();
|
||||
}
|
||||
|
||||
void DirectShowMetaDataControl::updateGraph(IFilterGraph2 *graph, IBaseFilter *source)
|
||||
static QString convertBSTR(BSTR *string)
|
||||
{
|
||||
if (m_content)
|
||||
m_content->Release();
|
||||
QString value = QString::fromUtf16(reinterpret_cast<ushort *>(*string),
|
||||
::SysStringLen(*string));
|
||||
|
||||
if (!graph || graph->QueryInterface(
|
||||
IID_IAMMediaContent, reinterpret_cast<void **>(&m_content)) != S_OK) {
|
||||
m_content = 0;
|
||||
::SysFreeString(*string);
|
||||
string = 0;
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
void DirectShowMetaDataControl::updateGraph(IFilterGraph2 *graph, IBaseFilter *source, const QString &fileSrc)
|
||||
{
|
||||
m_metadata.clear();
|
||||
|
||||
#ifndef QT_NO_SHELLITEM
|
||||
if (!sHCreateItemFromParsingName) {
|
||||
QSystemLibrary lib(QStringLiteral("shell32"));
|
||||
sHCreateItemFromParsingName = (q_SHCreateItemFromParsingName)(lib.resolve("SHCreateItemFromParsingName"));
|
||||
}
|
||||
|
||||
#ifdef QT_NO_WMSDK
|
||||
Q_UNUSED(source);
|
||||
#else
|
||||
if (m_headerInfo)
|
||||
m_headerInfo->Release();
|
||||
if (!fileSrc.isEmpty() && sHCreateItemFromParsingName) {
|
||||
IShellItem2* shellItem = 0;
|
||||
if (sHCreateItemFromParsingName(reinterpret_cast<const WCHAR*>(fileSrc.utf16()),
|
||||
0, IID_PPV_ARGS(&shellItem)) == S_OK) {
|
||||
|
||||
m_headerInfo = com_cast<IWMHeaderInfo>(source, IID_IWMHeaderInfo);
|
||||
IPropertyStore *pStore = 0;
|
||||
if (shellItem->GetPropertyStore(GPS_DEFAULT, IID_PPV_ARGS(&pStore)) == S_OK) {
|
||||
DWORD cProps;
|
||||
if (SUCCEEDED(pStore->GetCount(&cProps))) {
|
||||
for (DWORD i = 0; i < cProps; ++i)
|
||||
{
|
||||
PROPERTYKEY key;
|
||||
PROPVARIANT var;
|
||||
PropVariantInit(&var);
|
||||
if (FAILED(pStore->GetAt(i, &key)))
|
||||
continue;
|
||||
if (FAILED(pStore->GetValue(key, &var)))
|
||||
continue;
|
||||
|
||||
if (key == PKEY_Author) {
|
||||
m_metadata.insert(QMediaMetaData::Author, convertValue(var));
|
||||
} else if (key == PKEY_Title) {
|
||||
m_metadata.insert(QMediaMetaData::Title, convertValue(var));
|
||||
} else if (key == PKEY_Media_SubTitle) {
|
||||
m_metadata.insert(QMediaMetaData::SubTitle, convertValue(var));
|
||||
} else if (key == PKEY_ParentalRating) {
|
||||
m_metadata.insert(QMediaMetaData::ParentalRating, convertValue(var));
|
||||
} else if (key == PKEY_Comment) {
|
||||
m_metadata.insert(QMediaMetaData::Description, convertValue(var));
|
||||
} else if (key == PKEY_Copyright) {
|
||||
m_metadata.insert(QMediaMetaData::Copyright, convertValue(var));
|
||||
} else if (key == PKEY_Media_ProviderStyle) {
|
||||
m_metadata.insert(QMediaMetaData::Genre, convertValue(var));
|
||||
} else if (key == PKEY_Media_Year) {
|
||||
m_metadata.insert(QMediaMetaData::Year, convertValue(var));
|
||||
} else if (key == PKEY_Media_DateEncoded) {
|
||||
m_metadata.insert(QMediaMetaData::Date, convertValue(var));
|
||||
} else if (key == PKEY_Rating) {
|
||||
m_metadata.insert(QMediaMetaData::UserRating,
|
||||
int((convertValue(var).toUInt() - 1) / qreal(98) * 100));
|
||||
} else if (key == PKEY_Keywords) {
|
||||
m_metadata.insert(QMediaMetaData::Keywords, convertValue(var));
|
||||
} else if (key == PKEY_Language) {
|
||||
m_metadata.insert(QMediaMetaData::Language, convertValue(var));
|
||||
} else if (key == PKEY_Media_Publisher) {
|
||||
m_metadata.insert(QMediaMetaData::Publisher, convertValue(var));
|
||||
} else if (key == PKEY_Media_Duration) {
|
||||
m_metadata.insert(QMediaMetaData::Duration,
|
||||
(convertValue(var).toLongLong() + 10000) / 10000);
|
||||
} else if (key == PKEY_Audio_EncodingBitrate) {
|
||||
m_metadata.insert(QMediaMetaData::AudioBitRate, convertValue(var));
|
||||
} else if (key == PKEY_Media_AverageLevel) {
|
||||
m_metadata.insert(QMediaMetaData::AverageLevel, convertValue(var));
|
||||
} else if (key == PKEY_Audio_ChannelCount) {
|
||||
m_metadata.insert(QMediaMetaData::ChannelCount, convertValue(var));
|
||||
} else if (key == PKEY_Audio_PeakValue) {
|
||||
m_metadata.insert(QMediaMetaData::PeakValue, convertValue(var));
|
||||
} else if (key == PKEY_Audio_SampleRate) {
|
||||
m_metadata.insert(QMediaMetaData::SampleRate, convertValue(var));
|
||||
} else if (key == PKEY_Music_AlbumTitle) {
|
||||
m_metadata.insert(QMediaMetaData::AlbumTitle, convertValue(var));
|
||||
} else if (key == PKEY_Music_AlbumArtist) {
|
||||
m_metadata.insert(QMediaMetaData::AlbumArtist, convertValue(var));
|
||||
} else if (key == PKEY_Music_Artist) {
|
||||
m_metadata.insert(QMediaMetaData::ContributingArtist, convertValue(var));
|
||||
} else if (key == PKEY_Music_Composer) {
|
||||
m_metadata.insert(QMediaMetaData::Composer, convertValue(var));
|
||||
} else if (key == PKEY_Music_Conductor) {
|
||||
m_metadata.insert(QMediaMetaData::Conductor, convertValue(var));
|
||||
} else if (key == PKEY_Music_Lyrics) {
|
||||
m_metadata.insert(QMediaMetaData::Lyrics, convertValue(var));
|
||||
} else if (key == PKEY_Music_Mood) {
|
||||
m_metadata.insert(QMediaMetaData::Mood, convertValue(var));
|
||||
} else if (key == PKEY_Music_TrackNumber) {
|
||||
m_metadata.insert(QMediaMetaData::TrackNumber, convertValue(var));
|
||||
} else if (key == PKEY_Music_Genre) {
|
||||
m_metadata.insert(QMediaMetaData::Genre, convertValue(var));
|
||||
} else if (key == PKEY_ThumbnailStream) {
|
||||
m_metadata.insert(QMediaMetaData::ThumbnailImage, convertValue(var));
|
||||
} else if (key == PKEY_Video_FrameHeight) {
|
||||
QSize res;
|
||||
res.setHeight(convertValue(var).toUInt());
|
||||
if (SUCCEEDED(pStore->GetValue(PKEY_Video_FrameWidth, &var)))
|
||||
res.setWidth(convertValue(var).toUInt());
|
||||
m_metadata.insert(QMediaMetaData::Resolution, res);
|
||||
} else if (key == PKEY_Video_HorizontalAspectRatio) {
|
||||
QSize aspectRatio;
|
||||
aspectRatio.setWidth(convertValue(var).toUInt());
|
||||
if (SUCCEEDED(pStore->GetValue(PKEY_Video_VerticalAspectRatio, &var)))
|
||||
aspectRatio.setHeight(convertValue(var).toUInt());
|
||||
m_metadata.insert(QMediaMetaData::PixelAspectRatio, aspectRatio);
|
||||
} else if (key == PKEY_Video_FrameRate) {
|
||||
m_metadata.insert(QMediaMetaData::VideoFrameRate,
|
||||
convertValue(var).toReal() / 1000);
|
||||
} else if (key == PKEY_Video_EncodingBitrate) {
|
||||
m_metadata.insert(QMediaMetaData::VideoBitRate, convertValue(var));
|
||||
} else if (key == PKEY_Video_Director) {
|
||||
m_metadata.insert(QMediaMetaData::Director, convertValue(var));
|
||||
} else if (key == PKEY_Media_Writer) {
|
||||
m_metadata.insert(QMediaMetaData::Writer, convertValue(var));
|
||||
}
|
||||
|
||||
PropVariantClear(&var);
|
||||
}
|
||||
}
|
||||
|
||||
pStore->Release();
|
||||
}
|
||||
|
||||
shellItem->Release();
|
||||
}
|
||||
}
|
||||
|
||||
if (!m_metadata.isEmpty())
|
||||
goto send_event;
|
||||
#endif
|
||||
|
||||
#ifndef QT_NO_WMSDK
|
||||
IWMHeaderInfo *info = com_cast<IWMHeaderInfo>(source, IID_IWMHeaderInfo);
|
||||
|
||||
if (info) {
|
||||
static const int count = sizeof(qt_wmMetaDataKeys) / sizeof(QWMMetaDataKeyLookup);
|
||||
for (int i = 0; i < count; ++i) {
|
||||
QVariant var = getValue(info, qt_wmMetaDataKeys[i].token);
|
||||
if (var.isValid()) {
|
||||
QString key = qt_wmMetaDataKeys[i].key;
|
||||
|
||||
if (key == QMediaMetaData::Duration) {
|
||||
// duration is provided in 100-nanosecond units, convert to milliseconds
|
||||
var = (var.toLongLong() + 10000) / 10000;
|
||||
} else if (key == QMediaMetaData::Resolution) {
|
||||
QSize res;
|
||||
res.setHeight(var.toUInt());
|
||||
res.setWidth(getValue(info, L"WM/VideoWidth").toUInt());
|
||||
var = res;
|
||||
} else if (key == QMediaMetaData::VideoFrameRate) {
|
||||
var = var.toReal() / 1000.f;
|
||||
} else if (key == QMediaMetaData::PixelAspectRatio) {
|
||||
QSize aspectRatio;
|
||||
aspectRatio.setWidth(var.toUInt());
|
||||
aspectRatio.setHeight(getValue(info, L"AspectRatioY").toUInt());
|
||||
var = aspectRatio;
|
||||
} else if (key == QMediaMetaData::UserRating) {
|
||||
var = (var.toUInt() - 1) / qreal(98) * 100;
|
||||
}
|
||||
|
||||
m_metadata.insert(key, var);
|
||||
}
|
||||
}
|
||||
|
||||
info->Release();
|
||||
}
|
||||
|
||||
if (!m_metadata.isEmpty())
|
||||
goto send_event;
|
||||
#endif
|
||||
{
|
||||
IAMMediaContent *content = 0;
|
||||
|
||||
if ((!graph || graph->QueryInterface(
|
||||
IID_IAMMediaContent, reinterpret_cast<void **>(&content)) != S_OK)
|
||||
&& (!source || source->QueryInterface(
|
||||
IID_IAMMediaContent, reinterpret_cast<void **>(&content)) != S_OK)) {
|
||||
content = 0;
|
||||
}
|
||||
|
||||
if (content) {
|
||||
BSTR string = 0;
|
||||
|
||||
if (content->get_AuthorName(&string) == S_OK)
|
||||
m_metadata.insert(QMediaMetaData::Author, convertBSTR(&string));
|
||||
|
||||
if (content->get_Title(&string) == S_OK)
|
||||
m_metadata.insert(QMediaMetaData::Title, convertBSTR(&string));
|
||||
|
||||
if (content->get_Description(&string) == S_OK)
|
||||
m_metadata.insert(QMediaMetaData::Description, convertBSTR(&string));
|
||||
|
||||
if (content->get_Rating(&string) == S_OK)
|
||||
m_metadata.insert(QMediaMetaData::UserRating, convertBSTR(&string));
|
||||
|
||||
if (content->get_Copyright(&string) == S_OK)
|
||||
m_metadata.insert(QMediaMetaData::Copyright, convertBSTR(&string));
|
||||
|
||||
content->Release();
|
||||
}
|
||||
}
|
||||
|
||||
send_event:
|
||||
// DirectShowMediaPlayerService holds a lock at this point so defer emitting signals to a later
|
||||
// time.
|
||||
QCoreApplication::postEvent(this, new QEvent(QEvent::Type(MetaDataChanged)));
|
||||
@@ -321,12 +569,12 @@ void DirectShowMetaDataControl::customEvent(QEvent *event)
|
||||
if (event->type() == QEvent::Type(MetaDataChanged)) {
|
||||
event->accept();
|
||||
|
||||
bool oldAvailable = m_available;
|
||||
m_available = !m_metadata.isEmpty();
|
||||
if (m_available != oldAvailable)
|
||||
emit metaDataAvailableChanged(m_available);
|
||||
|
||||
emit metaDataChanged();
|
||||
#ifndef QT_NO_WMSDK
|
||||
emit metaDataAvailableChanged(m_content || m_headerInfo);
|
||||
#else
|
||||
emit metaDataAvailableChanged(m_content);
|
||||
#endif
|
||||
} else {
|
||||
QMetaDataReaderControl::customEvent(event);
|
||||
}
|
||||
|
||||
@@ -46,12 +46,6 @@
|
||||
|
||||
#include "directshowglobal.h"
|
||||
|
||||
#include <qnetwork.h>
|
||||
|
||||
#ifndef QT_NO_WMSDK
|
||||
#include <wmsdk.h>
|
||||
#endif
|
||||
|
||||
#include <QtCore/qcoreevent.h>
|
||||
|
||||
class DirectShowPlayerService;
|
||||
@@ -70,7 +64,8 @@ public:
|
||||
QVariant metaData(const QString &key) const;
|
||||
QStringList availableMetaData() const;
|
||||
|
||||
void updateGraph(IFilterGraph2 *graph, IBaseFilter *source);
|
||||
void updateGraph(IFilterGraph2 *graph, IBaseFilter *source,
|
||||
const QString &fileSrc = QString());
|
||||
|
||||
protected:
|
||||
void customEvent(QEvent *event);
|
||||
@@ -81,10 +76,8 @@ private:
|
||||
MetaDataChanged = QEvent::User
|
||||
};
|
||||
|
||||
IAMMediaContent *m_content;
|
||||
#ifndef QT_NO_WMSDK
|
||||
IWMHeaderInfo *m_headerInfo;
|
||||
#endif
|
||||
QVariantMap m_metadata;
|
||||
bool m_available;
|
||||
};
|
||||
|
||||
#endif
|
||||
|
||||
@@ -50,6 +50,10 @@
|
||||
#include "vmr9videowindowcontrol.h"
|
||||
#endif
|
||||
|
||||
#ifndef QT_NO_WMSDK
|
||||
#include <wmsdk.h>
|
||||
#endif
|
||||
|
||||
#include "qmediacontent.h"
|
||||
|
||||
#include <QtCore/qcoreapplication.h>
|
||||
@@ -268,11 +272,10 @@ void DirectShowPlayerService::doSetUrlSource(QMutexLocker *locker)
|
||||
IBaseFilter *source = 0;
|
||||
|
||||
QMediaResource resource = m_resources.takeFirst();
|
||||
QUrl url = resource.url();
|
||||
m_url = resource.url();
|
||||
|
||||
HRESULT hr = E_FAIL;
|
||||
|
||||
if (url.scheme() == QLatin1String("http") || url.scheme() == QLatin1String("https")) {
|
||||
if (m_url.scheme() == QLatin1String("http") || m_url.scheme() == QLatin1String("https")) {
|
||||
static const GUID clsid_WMAsfReader = {
|
||||
0x187463a0, 0x5bb7, 0x11d3, {0xac, 0xbe, 0x00, 0x80, 0xc7, 0x5e, 0x24, 0x6e} };
|
||||
|
||||
@@ -283,7 +286,7 @@ void DirectShowPlayerService::doSetUrlSource(QMutexLocker *locker)
|
||||
if (IFileSourceFilter *fileSource = com_new<IFileSourceFilter>(
|
||||
clsid_WMAsfReader, iid_IFileSourceFilter)) {
|
||||
locker->unlock();
|
||||
hr = fileSource->Load(reinterpret_cast<const OLECHAR *>(url.toString().utf16()), 0);
|
||||
hr = fileSource->Load(reinterpret_cast<const OLECHAR *>(m_url.toString().utf16()), 0);
|
||||
|
||||
if (SUCCEEDED(hr)) {
|
||||
source = com_cast<IBaseFilter>(fileSource, IID_IBaseFilter);
|
||||
@@ -296,11 +299,11 @@ void DirectShowPlayerService::doSetUrlSource(QMutexLocker *locker)
|
||||
fileSource->Release();
|
||||
locker->relock();
|
||||
}
|
||||
} else if (url.scheme() == QLatin1String("qrc")) {
|
||||
} else if (m_url.scheme() == QLatin1String("qrc")) {
|
||||
DirectShowRcSource *rcSource = new DirectShowRcSource(m_loop);
|
||||
|
||||
locker->unlock();
|
||||
if (rcSource->open(url) && SUCCEEDED(hr = m_graph->AddFilter(rcSource, L"Source")))
|
||||
if (rcSource->open(m_url) && SUCCEEDED(hr = m_graph->AddFilter(rcSource, L"Source")))
|
||||
source = rcSource;
|
||||
else
|
||||
rcSource->Release();
|
||||
@@ -310,7 +313,7 @@ void DirectShowPlayerService::doSetUrlSource(QMutexLocker *locker)
|
||||
if (!SUCCEEDED(hr)) {
|
||||
locker->unlock();
|
||||
hr = m_graph->AddSourceFilter(
|
||||
reinterpret_cast<const OLECHAR *>(url.toString().utf16()), L"Source", &source);
|
||||
reinterpret_cast<const OLECHAR *>(m_url.toString().utf16()), L"Source", &source);
|
||||
locker->relock();
|
||||
}
|
||||
|
||||
@@ -1128,7 +1131,7 @@ void DirectShowPlayerService::customEvent(QEvent *event)
|
||||
QMutexLocker locker(&m_mutex);
|
||||
|
||||
m_playerControl->updateMediaInfo(m_duration, m_streamTypes, m_seekable);
|
||||
m_metaDataControl->updateGraph(m_graph, m_source);
|
||||
m_metaDataControl->updateGraph(m_graph, m_source, m_url.toString());
|
||||
|
||||
updateStatus();
|
||||
} else if (event->type() == QEvent::Type(Error)) {
|
||||
|
||||
@@ -43,5 +43,11 @@ qtHaveModule(widgets):!simulator {
|
||||
$$PWD/vmr9videowindowcontrol.cpp
|
||||
}
|
||||
|
||||
config_wshellitem {
|
||||
QT += core-private
|
||||
} else {
|
||||
DEFINES += QT_NO_SHELLITEM
|
||||
}
|
||||
|
||||
LIBS += -lstrmiids -ldmoguids -luuid -lmsdmo -lole32 -loleaut32 -lgdi32
|
||||
|
||||
|
||||
@@ -17,7 +17,7 @@ blackberry {
|
||||
}
|
||||
|
||||
qnx {
|
||||
SUBDIRS += qnx
|
||||
SUBDIRS += audiocapture qnx
|
||||
}
|
||||
|
||||
win32 {
|
||||
|
||||
@@ -276,7 +276,6 @@ void tst_QMediaPlaylist::currentItem()
|
||||
QCOMPARE(playlist.previousIndex(), 0);
|
||||
QCOMPARE(playlist.previousIndex(2), -1);
|
||||
|
||||
QTest::ignoreMessage(QtWarningMsg, "QMediaPlaylistNavigator: Jump outside playlist range ");
|
||||
playlist.setCurrentIndex(2);
|
||||
|
||||
QCOMPARE(playlist.currentIndex(), -1);
|
||||
|
||||
@@ -115,8 +115,7 @@ void tst_QMediaPlaylistNavigator::linearPlayback()
|
||||
QMediaPlaylistNavigator navigator(&playlist);
|
||||
|
||||
navigator.setPlaybackMode(QMediaPlaylist::Sequential);
|
||||
QTest::ignoreMessage(QtWarningMsg, "QMediaPlaylistNavigator: Jump outside playlist range ");
|
||||
navigator.jump(0);//it's ok to have warning here
|
||||
navigator.jump(0);
|
||||
QVERIFY(navigator.currentItem().isNull());
|
||||
QCOMPARE(navigator.currentIndex(), -1);
|
||||
|
||||
@@ -169,7 +168,6 @@ void tst_QMediaPlaylistNavigator::loopPlayback()
|
||||
QMediaPlaylistNavigator navigator(&playlist);
|
||||
|
||||
navigator.setPlaybackMode(QMediaPlaylist::Loop);
|
||||
QTest::ignoreMessage(QtWarningMsg, "QMediaPlaylistNavigator: Jump outside playlist range ");
|
||||
navigator.jump(0);
|
||||
QVERIFY(navigator.currentItem().isNull());
|
||||
QCOMPARE(navigator.currentIndex(), -1);
|
||||
|
||||
Reference in New Issue
Block a user