Merge remote-tracking branch 'origin/5.5' into dev

Change-Id: Ie6817b576fd9f2571d315f31d5635d026a2b5430
This commit is contained in:
Liang Qi
2015-04-19 00:01:55 +02:00
63 changed files with 1529 additions and 720 deletions

View File

@@ -65,7 +65,7 @@ Rectangle {
loops: Animation.Infinite loops: Animation.Infinite
running: true running: true
NumberAnimation { NumberAnimation {
duration: 8000 duration: 12000
from: 0 from: 0
to: 1 to: 1
} }
@@ -113,7 +113,7 @@ Rectangle {
} }
velocity: { velocity: {
var speed = root.twoPi * root.radius / 4; var speed = root.twoPi * root.radius / 4;
return shipSound.direction * speed; return shipSound.direction.times(speed);
} }
Component.onCompleted: shipSound.play() Component.onCompleted: shipSound.play()
@@ -137,7 +137,7 @@ Rectangle {
color: "lightgreen" color: "lightgreen"
} }
Text { Text {
text: " volume:" + volumeBar.volumeCtrl.volume * 100 +"%"; text: " volume:" + Math.round(volumeBar.volumeCtrl.volume * 100) +"%";
font.pointSize: 16; font.pointSize: 16;
font.italic: true; font.italic: true;
color: "black" color: "black"

View File

@@ -68,22 +68,18 @@ AudioEngine {
} }
AudioSample { AudioSample {
name:"fire" name:"engine"
source: "fire-03-loop.wav" source: "engine-loop.wav"
preloaded:true preloaded:true
} }
AudioSample { AudioSample {
name:"explosion" name:"horn"
source: "explosion-02.wav" source: "horn.wav"
} }
AudioSample { AudioSample {
name:"lava" name:"whistle"
source: "lava-bubbling-01.wav" source: "whistle.wav"
}
AudioSample {
name:"water"
source: "running-water-01.wav"
} }
Sound { Sound {
name:"shipengine" name:"shipengine"
@@ -91,7 +87,7 @@ AudioEngine {
category:"sfx" category:"sfx"
PlayVariation { PlayVariation {
looping:true looping:true
sample:"fire" sample:"engine"
maxGain:0.9 maxGain:0.9
minGain:0.8 minGain:0.8
} }
@@ -101,23 +97,14 @@ AudioEngine {
name:"effects" name:"effects"
category:"sfx" category:"sfx"
PlayVariation { PlayVariation {
sample:"lava" sample:"horn"
maxGain:1.5 maxGain:2.0
minGain:1.2 minGain:0.9
maxPitch:2.0
minPitch:0.5
} }
PlayVariation { PlayVariation {
sample:"explosion" sample:"whistle"
maxGain:1.1 maxGain:1.0
minGain:0.7 minGain:0.8
maxPitch:1.5
minPitch:0.5
}
PlayVariation {
sample:"water"
maxGain:1.5
minGain:1.2
} }
} }

Binary file not shown.

View File

@@ -159,7 +159,7 @@ qint64 Generator::bytesAvailable() const
} }
AudioTest::AudioTest() AudioTest::AudioTest()
: m_pullTimer(new QTimer(this)) : m_pushTimer(new QTimer(this))
, m_modeButton(0) , m_modeButton(0)
, m_suspendResumeButton(0) , m_suspendResumeButton(0)
, m_deviceBox(0) , m_deviceBox(0)
@@ -220,7 +220,7 @@ void AudioTest::initializeWindow()
void AudioTest::initializeAudio() void AudioTest::initializeAudio()
{ {
connect(m_pullTimer, SIGNAL(timeout()), SLOT(pullTimerExpired())); connect(m_pushTimer, SIGNAL(timeout()), SLOT(pushTimerExpired()));
m_pullMode = true; m_pullMode = true;
@@ -259,7 +259,7 @@ AudioTest::~AudioTest()
void AudioTest::deviceChanged(int index) void AudioTest::deviceChanged(int index)
{ {
m_pullTimer->stop(); m_pushTimer->stop();
m_generator->stop(); m_generator->stop();
m_audioOutput->stop(); m_audioOutput->stop();
m_audioOutput->disconnect(this); m_audioOutput->disconnect(this);
@@ -273,7 +273,7 @@ void AudioTest::volumeChanged(int value)
m_audioOutput->setVolume(qreal(value/100.0f)); m_audioOutput->setVolume(qreal(value/100.0f));
} }
void AudioTest::pullTimerExpired() void AudioTest::pushTimerExpired()
{ {
if (m_audioOutput && m_audioOutput->state() != QAudio::StoppedState) { if (m_audioOutput && m_audioOutput->state() != QAudio::StoppedState) {
int chunks = m_audioOutput->bytesFree()/m_audioOutput->periodSize(); int chunks = m_audioOutput->bytesFree()/m_audioOutput->periodSize();
@@ -290,15 +290,17 @@ void AudioTest::pullTimerExpired()
void AudioTest::toggleMode() void AudioTest::toggleMode()
{ {
m_pullTimer->stop(); m_pushTimer->stop();
m_audioOutput->stop(); m_audioOutput->stop();
if (m_pullMode) { if (m_pullMode) {
//switch to push mode (periodically push to QAudioOutput using a timer)
m_modeButton->setText(tr(PULL_MODE_LABEL)); m_modeButton->setText(tr(PULL_MODE_LABEL));
m_output = m_audioOutput->start(); m_output = m_audioOutput->start();
m_pullMode = false; m_pullMode = false;
m_pullTimer->start(20); m_pushTimer->start(20);
} else { } else {
//switch to pull mode (QAudioOutput pulls from Generator as needed)
m_modeButton->setText(tr(PUSH_MODE_LABEL)); m_modeButton->setText(tr(PUSH_MODE_LABEL));
m_pullMode = true; m_pullMode = true;
m_audioOutput->start(m_generator); m_audioOutput->start(m_generator);

View File

@@ -91,7 +91,7 @@ private:
void createAudioOutput(); void createAudioOutput();
private: private:
QTimer *m_pullTimer; QTimer *m_pushTimer;
// Owned by layout // Owned by layout
QPushButton *m_modeButton; QPushButton *m_modeButton;
@@ -110,7 +110,7 @@ private:
QByteArray m_buffer; QByteArray m_buffer;
private slots: private slots:
void pullTimerExpired(); void pushTimerExpired();
void toggleMode(); void toggleMode();
void toggleSuspendResume(); void toggleSuspendResume();
void deviceChanged(int index); void deviceChanged(int index);

View File

@@ -100,9 +100,6 @@ QGstreamerVideoWidgetControl::QGstreamerVideoWidgetControl(QObject *parent)
{ {
m_videoSink = gst_element_factory_make ("xvimagesink", NULL); m_videoSink = gst_element_factory_make ("xvimagesink", NULL);
if (!m_videoSink)
m_videoSink = gst_element_factory_make ("ximagesink", NULL);
if (m_videoSink) { if (m_videoSink) {
// Check if the xv sink is usable // Check if the xv sink is usable
if (gst_element_set_state(m_videoSink, GST_STATE_READY) != GST_STATE_CHANGE_SUCCESS) { if (gst_element_set_state(m_videoSink, GST_STATE_READY) != GST_STATE_CHANGE_SUCCESS) {

View File

@@ -31,16 +31,15 @@
** **
****************************************************************************/ ****************************************************************************/
#include "qaudioengine_openal_p.h"
#include <QtCore/QMutex>
#include <QtCore/QThread> #include <QtCore/QThread>
#include <QtNetwork/QNetworkRequest> #include <QtNetwork/QNetworkRequest>
#include <QtNetwork/QNetworkReply> #include <QtNetwork/QNetworkReply>
#include <QtNetwork/QNetworkAccessManager> #include <QtNetwork/QNetworkAccessManager>
#include <QtCore/QUrl>
#include <QtCore/QThread>
#include <QtCore/QMutex>
#include "qsamplecache_p.h" #include "qsamplecache_p.h"
#include "qaudioengine_openal_p.h"
#include "qdebug.h" #include "qdebug.h"
@@ -48,155 +47,146 @@
QT_USE_NAMESPACE QT_USE_NAMESPACE
class StaticSoundBufferAL : public QSoundBufferPrivateAL
{
Q_OBJECT
public:
StaticSoundBufferAL(QObject *parent, const QUrl& url, QSampleCache *sampleLoader)
: QSoundBufferPrivateAL(parent)
, m_ref(1)
, m_url(url)
, m_alBuffer(0)
, m_isReady(false)
, m_sample(0)
, m_sampleLoader(sampleLoader)
{
#ifdef DEBUG_AUDIOENGINE
qDebug() << "creating new StaticSoundBufferOpenAL";
#endif
}
void load()
{
if (m_sample)
return;
m_sample = m_sampleLoader->requestSample(m_url);
connect(m_sample, SIGNAL(error()), this, SLOT(decoderError()));
connect(m_sample, SIGNAL(ready()), this, SLOT(sampleReady()));
switch (m_sample->state()) {
case QSample::Ready:
sampleReady();
break;
case QSample::Error:
decoderError();
break;
default:
break;
}
}
~StaticSoundBufferAL()
{
if (m_sample)
m_sample->release();
alDeleteBuffers(1, &m_alBuffer);
}
void bindToSource(ALuint alSource)
{
Q_ASSERT(m_alBuffer != 0);
alSourcei(alSource, AL_BUFFER, m_alBuffer);
}
void unbindFromSource(ALuint alSource)
{
alSourcei(alSource, AL_BUFFER, 0);
}
//called in application
bool isReady() const
{
return m_isReady;
}
long addRef()
{
return ++m_ref;
}
long release()
{
return --m_ref;
}
long refCount() const
{
return m_ref;
}
public Q_SLOTS:
void sampleReady()
{
#ifdef DEBUG_AUDIOENGINE
qDebug() << "StaticSoundBufferOpenAL:sample[" << m_url << "] loaded";
#endif
disconnect(m_sample, SIGNAL(error()), this, SLOT(decoderError()));
disconnect(m_sample, SIGNAL(ready()), this, SLOT(sampleReady()));
if (m_sample->data().size() > 1024 * 1024 * 4) {
qWarning() << "source [" << m_url << "] size too large!";
decoderError();
return;
}
if (m_sample->format().channelCount() > 2) {
qWarning() << "source [" << m_url << "] channel > 2!";
decoderError();
return;
}
ALenum alFormat = 0;
if (m_sample->format().sampleSize() == 8) {
alFormat = m_sample->format().channelCount() == 1 ? AL_FORMAT_MONO8 : AL_FORMAT_STEREO8;
} else if (m_sample->format().sampleSize() == 16) {
alFormat = m_sample->format().channelCount() == 1 ? AL_FORMAT_MONO16 : AL_FORMAT_STEREO16;
} else {
qWarning() << "source [" << m_url << "] invalid sample size:"
<< m_sample->format().sampleSize() << "(should be 8 or 16)";
decoderError();
return;
}
alGenBuffers(1, &m_alBuffer);
if (!QAudioEnginePrivate::checkNoError("create buffer")) {
return;
}
alBufferData(m_alBuffer, alFormat, m_sample->data().data(),
m_sample->data().size(), m_sample->format().sampleRate());
if (!QAudioEnginePrivate::checkNoError("fill buffer")) {
return;
}
m_isReady = true;
emit ready();
m_sample->release();
m_sample = 0;
}
void decoderError()
{
qWarning() << "loading [" << m_url << "] failed";
disconnect(m_sample, SIGNAL(error()), this, SLOT(decoderError()));
emit error();
}
private:
long m_ref;
QUrl m_url;
ALuint m_alBuffer;
bool m_isReady;
QSample *m_sample;
QSampleCache *m_sampleLoader;
};
QSoundBufferPrivateAL::QSoundBufferPrivateAL(QObject *parent) QSoundBufferPrivateAL::QSoundBufferPrivateAL(QObject *parent)
: QSoundBuffer(parent) : QSoundBuffer(parent)
{ {
} }
StaticSoundBufferAL::StaticSoundBufferAL(QObject *parent, const QUrl &url, QSampleCache *sampleLoader)
: QSoundBufferPrivateAL(parent),
m_ref(1),
m_url(url),
m_alBuffer(0),
m_state(Creating),
m_sample(0),
m_sampleLoader(sampleLoader)
{
#ifdef DEBUG_AUDIOENGINE
qDebug() << "creating new StaticSoundBufferOpenAL";
#endif
}
StaticSoundBufferAL::~StaticSoundBufferAL()
{
if (m_sample)
m_sample->release();
if (m_alBuffer != 0) {
alGetError(); // clear error
alDeleteBuffers(1, &m_alBuffer);
QAudioEnginePrivate::checkNoError("delete buffer");
}
}
QSoundBuffer::State StaticSoundBufferAL::state() const
{
return m_state;
}
void StaticSoundBufferAL::load()
{
if (m_state == Loading || m_state == Ready)
return;
m_state = Loading;
emit stateChanged(m_state);
m_sample = m_sampleLoader->requestSample(m_url);
connect(m_sample, SIGNAL(error()), this, SLOT(decoderError()));
connect(m_sample, SIGNAL(ready()), this, SLOT(sampleReady()));
switch (m_sample->state()) {
case QSample::Ready:
sampleReady();
break;
case QSample::Error:
decoderError();
break;
default:
break;
}
}
void StaticSoundBufferAL::bindToSource(ALuint alSource)
{
Q_ASSERT(m_alBuffer != 0);
alSourcei(alSource, AL_BUFFER, m_alBuffer);
}
void StaticSoundBufferAL::unbindFromSource(ALuint alSource)
{
alSourcei(alSource, AL_BUFFER, 0);
}
void StaticSoundBufferAL::sampleReady()
{
#ifdef DEBUG_AUDIOENGINE
qDebug() << "StaticSoundBufferOpenAL:sample[" << m_url << "] loaded";
#endif
disconnect(m_sample, SIGNAL(error()), this, SLOT(decoderError()));
disconnect(m_sample, SIGNAL(ready()), this, SLOT(sampleReady()));
if (m_sample->data().size() > 1024 * 1024 * 4) {
qWarning() << "source [" << m_url << "] size too large!";
decoderError();
return;
}
if (m_sample->format().channelCount() > 2) {
qWarning() << "source [" << m_url << "] channel > 2!";
decoderError();
return;
}
ALenum alFormat = 0;
if (m_sample->format().sampleSize() == 8) {
alFormat = m_sample->format().channelCount() == 1 ? AL_FORMAT_MONO8 : AL_FORMAT_STEREO8;
} else if (m_sample->format().sampleSize() == 16) {
alFormat = m_sample->format().channelCount() == 1 ? AL_FORMAT_MONO16 : AL_FORMAT_STEREO16;
} else {
qWarning() << "source [" << m_url << "] invalid sample size:"
<< m_sample->format().sampleSize() << "(should be 8 or 16)";
decoderError();
return;
}
alGenBuffers(1, &m_alBuffer);
if (!QAudioEnginePrivate::checkNoError("create buffer")) {
decoderError();
return;
}
alBufferData(m_alBuffer, alFormat, m_sample->data().data(),
m_sample->data().size(), m_sample->format().sampleRate());
if (!QAudioEnginePrivate::checkNoError("fill buffer")) {
decoderError();
return;
}
m_sample->release();
m_sample = 0;
m_state = Ready;
emit stateChanged(m_state);
emit ready();
}
void StaticSoundBufferAL::decoderError()
{
qWarning() << "loading [" << m_url << "] failed";
disconnect(m_sample, SIGNAL(error()), this, SLOT(decoderError()));
disconnect(m_sample, SIGNAL(ready()), this, SLOT(sampleReady()));
m_sample->release();
m_sample = 0;
m_state = Error;
emit stateChanged(m_state);
emit error();
}
///////////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////////////
QAudioEnginePrivate::QAudioEnginePrivate(QObject *parent) QAudioEnginePrivate::QAudioEnginePrivate(QObject *parent)
: QObject(parent) : QObject(parent)
@@ -245,7 +235,6 @@ QAudioEnginePrivate::~QAudioEnginePrivate()
#ifdef DEBUG_AUDIOENGINE #ifdef DEBUG_AUDIOENGINE
qDebug() << "QAudioEnginePrivate::dtor"; qDebug() << "QAudioEnginePrivate::dtor";
#endif #endif
delete m_sampleLoader;
QObjectList children = this->children(); QObjectList children = this->children();
foreach (QObject *child, children) { foreach (QObject *child, children) {
QSoundSourcePrivate* s = qobject_cast<QSoundSourcePrivate*>(child); QSoundSourcePrivate* s = qobject_cast<QSoundSourcePrivate*>(child);
@@ -259,6 +248,8 @@ QAudioEnginePrivate::~QAudioEnginePrivate()
} }
m_staticBufferPool.clear(); m_staticBufferPool.clear();
delete m_sampleLoader;
ALCcontext* context = alcGetCurrentContext(); ALCcontext* context = alcGetCurrentContext();
ALCdevice *device = alcGetContextsDevice(context); ALCdevice *device = alcGetContextsDevice(context);
alcDestroyContext(context); alcDestroyContext(context);
@@ -319,8 +310,7 @@ void QAudioEnginePrivate::releaseSoundBuffer(QSoundBuffer *buffer)
#ifdef DEBUG_AUDIOENGINE #ifdef DEBUG_AUDIOENGINE
qDebug() << "QAudioEnginePrivate: recycle sound buffer"; qDebug() << "QAudioEnginePrivate: recycle sound buffer";
#endif #endif
if (buffer->inherits("StaticSoundBufferAL")) { if (StaticSoundBufferAL *staticBuffer = qobject_cast<StaticSoundBufferAL *>(buffer)) {
StaticSoundBufferAL *staticBuffer = static_cast<StaticSoundBufferAL*>(buffer);
//decrement the reference count, still kept in memory for reuse //decrement the reference count, still kept in memory for reuse
staticBuffer->release(); staticBuffer->release();
//TODO implement some resource recycle strategy //TODO implement some resource recycle strategy
@@ -435,6 +425,3 @@ void QAudioEnginePrivate::updateSoundSources()
m_updateTimer.stop(); m_updateTimer.stop();
} }
} }
#include "qaudioengine_openal_p.moc"
//#include "moc_qaudioengine_openal_p.cpp"

View File

@@ -38,6 +38,7 @@
#include <QList> #include <QList>
#include <QMap> #include <QMap>
#include <QTimer> #include <QTimer>
#include <QUrl>
#if defined(HEADER_OPENAL_PREFIX) #if defined(HEADER_OPENAL_PREFIX)
#include <OpenAL/al.h> #include <OpenAL/al.h>
@@ -52,6 +53,9 @@
QT_BEGIN_NAMESPACE QT_BEGIN_NAMESPACE
class QSample;
class QSampleCache;
class QSoundBufferPrivateAL : public QSoundBuffer class QSoundBufferPrivateAL : public QSoundBuffer
{ {
Q_OBJECT Q_OBJECT
@@ -61,6 +65,40 @@ public:
virtual void unbindFromSource(ALuint alSource) = 0; virtual void unbindFromSource(ALuint alSource) = 0;
}; };
class StaticSoundBufferAL : public QSoundBufferPrivateAL
{
Q_OBJECT
public:
StaticSoundBufferAL(QObject *parent, const QUrl &url, QSampleCache *sampleLoader);
~StaticSoundBufferAL();
State state() const Q_DECL_OVERRIDE;
void load() Q_DECL_OVERRIDE;
void bindToSource(ALuint alSource) Q_DECL_OVERRIDE;
void unbindFromSource(ALuint alSource) Q_DECL_OVERRIDE;
inline long addRef() { return ++m_ref; }
inline long release() { return --m_ref; }
inline long refCount() const { return m_ref; }
public Q_SLOTS:
void sampleReady();
void decoderError();
private:
long m_ref;
QUrl m_url;
ALuint m_alBuffer;
State m_state;
QSample *m_sample;
QSampleCache *m_sampleLoader;
};
class QSoundSourcePrivate : public QSoundSource class QSoundSourcePrivate : public QSoundSource
{ {
Q_OBJECT Q_OBJECT
@@ -113,7 +151,7 @@ private:
qreal m_coneOuterGain; qreal m_coneOuterGain;
}; };
class QSampleCache;
class QAudioEnginePrivate : public QObject class QAudioEnginePrivate : public QObject
{ {
Q_OBJECT Q_OBJECT

View File

@@ -106,6 +106,11 @@ void QDeclarativeAudioSample::componentComplete()
m_complete = true; m_complete = true;
} }
/*!
\qmlproperty url QtAudioEngine::AudioSample::source
This property holds the source URL of the audio sample.
*/
QUrl QDeclarativeAudioSample::source() const QUrl QDeclarativeAudioSample::source() const
{ {
return m_url; return m_url;
@@ -148,7 +153,7 @@ bool QDeclarativeAudioSample::isLoaded() const
{ {
if (!m_soundBuffer) if (!m_soundBuffer)
return false; return false;
return m_soundBuffer->isReady(); return m_soundBuffer->state() == QSoundBuffer::Ready;
} }
/*! /*!
@@ -158,13 +163,12 @@ bool QDeclarativeAudioSample::isLoaded() const
*/ */
void QDeclarativeAudioSample::load() void QDeclarativeAudioSample::load()
{ {
if (isLoaded())
return;
if (!m_soundBuffer) { if (!m_soundBuffer) {
m_preloaded = true; m_preloaded = true;
return; return;
} }
m_soundBuffer->load(); if (m_soundBuffer->state() != QSoundBuffer::Loading && m_soundBuffer->state() != QSoundBuffer::Ready)
m_soundBuffer->load();
} }
void QDeclarativeAudioSample::setPreloaded(bool preloaded) void QDeclarativeAudioSample::setPreloaded(bool preloaded)
@@ -213,7 +217,7 @@ void QDeclarativeAudioSample::init()
} else { } else {
m_soundBuffer = m_soundBuffer =
qobject_cast<QDeclarativeAudioEngine*>(parent())->engine()->getStaticSoundBuffer(m_url); qobject_cast<QDeclarativeAudioEngine*>(parent())->engine()->getStaticSoundBuffer(m_url);
if (m_soundBuffer->isReady()) { if (m_soundBuffer->state() == QSoundBuffer::Ready) {
emit loadedChanged(); emit loadedChanged();
} else { } else {
connect(m_soundBuffer, SIGNAL(ready()), this, SIGNAL(loadedChanged())); connect(m_soundBuffer, SIGNAL(ready()), this, SIGNAL(loadedChanged()));

View File

@@ -81,13 +81,14 @@ QT_USE_NAMESPACE
Sound { Sound {
name:"explosion" name:"explosion"
playType: Sound.Random
PlayVariation { PlayVariation {
sample:"explosion01" sample:"explosion01"
minPitch: 0.8 minPitch: 0.8
maxPitch: 1.1 maxPitch: 1.1
} }
PlayVariation { PlayVariation {
sample:"explosion01" sample:"explosion02"
minGain: 1.1 minGain: 1.1
maxGain: 1.5 maxGain: 1.5
} }

View File

@@ -169,13 +169,14 @@ void QDeclarativeSoundCone::componentComplete()
Sound { Sound {
name:"explosion" name:"explosion"
playType: Sound.Random
PlayVariation { PlayVariation {
sample:"explosion01" sample:"explosion01"
minPitch: 0.8 minPitch: 0.8
maxPitch: 1.1 maxPitch: 1.1
} }
PlayVariation { PlayVariation {
sample:"explosion01" sample:"explosion02"
minGain: 1.1 minGain: 1.1
maxGain: 1.5 maxGain: 1.5
} }
@@ -319,7 +320,7 @@ QDeclarativePlayVariation* QDeclarativeSound::getVariation(int index)
return m_playlist[index]; return m_playlist[index];
} }
void QDeclarativeSound::setAttenuationModel(QString attenuationModel) void QDeclarativeSound::setAttenuationModel(const QString &attenuationModel)
{ {
if (m_complete) { if (m_complete) {
qWarning("Sound: attenuationModel not changeable after initialization."); qWarning("Sound: attenuationModel not changeable after initialization.");

View File

@@ -112,7 +112,7 @@ public:
void setName(const QString& name); void setName(const QString& name);
QString attenuationModel() const; QString attenuationModel() const;
void setAttenuationModel(QString attenuationModel); void setAttenuationModel(const QString &attenuationModel);
QDeclarativeSoundCone* cone() const; QDeclarativeSoundCone* cone() const;

View File

@@ -327,7 +327,7 @@ void QDeclarativeSoundInstance::play()
} }
/*! /*!
\qmlmethod QtAudioEngine::SoundInstance::play() \qmlmethod QtAudioEngine::SoundInstance::stop()
Stops current playback. Stops current playback.
*/ */
@@ -343,7 +343,7 @@ void QDeclarativeSoundInstance::stop()
} }
/*! /*!
\qmlmethod QtAudioEngine::SoundInstance::play() \qmlmethod QtAudioEngine::SoundInstance::pause()
Pauses current playback. Pauses current playback.
*/ */

View File

@@ -41,11 +41,22 @@ QT_BEGIN_NAMESPACE
class QSoundBuffer : public QObject class QSoundBuffer : public QObject
{ {
Q_OBJECT Q_OBJECT
public: public:
virtual bool isReady() const = 0; enum State
{
Creating,
Loading,
Error,
Ready
};
virtual State state() const = 0;
virtual void load() = 0; virtual void load() = 0;
Q_SIGNALS: Q_SIGNALS:
void stateChanged(State state);
void ready(); void ready();
void error(); void error();

View File

@@ -144,7 +144,7 @@ void QSoundInstance::prepareNewVariation()
detach(); detach();
m_bindBuffer = playVar->sampleObject()->soundBuffer(); m_bindBuffer = playVar->sampleObject()->soundBuffer();
if (m_bindBuffer->isReady()) { if (m_bindBuffer->state() == QSoundBuffer::Ready) {
Q_ASSERT(m_soundSource); Q_ASSERT(m_soundSource);
m_soundSource->bindBuffer(m_bindBuffer); m_soundSource->bindBuffer(m_bindBuffer);
m_isReady = true; m_isReady = true;

View File

@@ -85,7 +85,7 @@ void QSoundSourcePrivate::release()
void QSoundSourcePrivate::bindBuffer(QSoundBuffer* soundBuffer) void QSoundSourcePrivate::bindBuffer(QSoundBuffer* soundBuffer)
{ {
unbindBuffer(); unbindBuffer();
Q_ASSERT(soundBuffer->isReady()); Q_ASSERT(soundBuffer->state() == QSoundBuffer::Ready);
m_bindBuffer = qobject_cast<QSoundBufferPrivateAL*>(soundBuffer); m_bindBuffer = qobject_cast<QSoundBufferPrivateAL*>(soundBuffer);
m_bindBuffer->bindToSource(m_alSource); m_bindBuffer->bindToSource(m_alSource);
m_isReady = true; m_isReady = true;

View File

@@ -58,6 +58,12 @@ QML_DECLARE_TYPE(QSoundEffect)
QT_BEGIN_NAMESPACE QT_BEGIN_NAMESPACE
static QObject *multimedia_global_object(QQmlEngine *qmlEngine, QJSEngine *jsEngine)
{
Q_UNUSED(qmlEngine)
return new QDeclarativeMultimediaGlobal(jsEngine);
}
class QMultimediaDeclarativeModule : public QQmlExtensionPlugin class QMultimediaDeclarativeModule : public QQmlExtensionPlugin
{ {
Q_OBJECT Q_OBJECT
@@ -99,7 +105,7 @@ public:
qmlRegisterType<QSoundEffect>(uri, 5, 3, "SoundEffect"); qmlRegisterType<QSoundEffect>(uri, 5, 3, "SoundEffect");
// 5.4 types // 5.4 types
qmlRegisterSingletonType(uri, 5, 4, "QtMultimedia", QDeclarativeMultimedia::initGlobalObject); qmlRegisterSingletonType<QDeclarativeMultimediaGlobal>(uri, 5, 4, "QtMultimedia", multimedia_global_object);
qmlRegisterRevision<QDeclarativeCamera, 1>(uri, 5, 4); qmlRegisterRevision<QDeclarativeCamera, 1>(uri, 5, 4);
qmlRegisterUncreatableType<QDeclarativeCameraViewfinder>(uri, 5, 4, "CameraViewfinder", qmlRegisterUncreatableType<QDeclarativeCameraViewfinder>(uri, 5, 4, "CameraViewfinder",
trUtf8("CameraViewfinder is provided by Camera")); trUtf8("CameraViewfinder is provided by Camera"));

View File

@@ -3,9 +3,15 @@ import QtQuick.tooling 1.1
// This file describes the plugin-supplied types contained in the library. // This file describes the plugin-supplied types contained in the library.
// It is used for QML tooling purposes only. // It is used for QML tooling purposes only.
// //
// This file was auto-generated with the command 'qmlplugindump -notrelocatable QtMultimedia 5.0'. // This file was auto-generated by:
// 'qmlplugindump -nonrelocatable QtMultimedia 5.5'
Module { Module {
Component {
name: "QAbstractVideoFilter"
prototype: "QObject"
Property { name: "active"; type: "bool" }
}
Component { Component {
name: "QCamera" name: "QCamera"
prototype: "QMediaObject" prototype: "QMediaObject"
@@ -76,6 +82,14 @@ Module {
"LockFocus": 4 "LockFocus": 4
} }
} }
Enum {
name: "Position"
values: {
"UnspecifiedPosition": 0,
"BackFace": 1,
"FrontFace": 2
}
}
Property { name: "state"; type: "QCamera::State"; isReadonly: true } Property { name: "state"; type: "QCamera::State"; isReadonly: true }
Property { name: "status"; type: "QCamera::Status"; isReadonly: true } Property { name: "status"; type: "QCamera::Status"; isReadonly: true }
Property { name: "captureMode"; type: "QCamera::CaptureModes" } Property { name: "captureMode"; type: "QCamera::CaptureModes" }
@@ -232,6 +246,14 @@ Module {
prototype: "QObject" prototype: "QObject"
exports: ["QtMultimedia/Camera 5.0"] exports: ["QtMultimedia/Camera 5.0"]
exportMetaObjectRevisions: [0] exportMetaObjectRevisions: [0]
Enum {
name: "Position"
values: {
"UnspecifiedPosition": 0,
"BackFace": 1,
"FrontFace": 2
}
}
Enum { Enum {
name: "CaptureMode" name: "CaptureMode"
values: { values: {
@@ -309,6 +331,16 @@ Module {
"ExposureBeach": 8, "ExposureBeach": 8,
"ExposureLargeAperture": 9, "ExposureLargeAperture": 9,
"ExposureSmallAperture": 10, "ExposureSmallAperture": 10,
"ExposureAction": 11,
"ExposureLandscape": 12,
"ExposureNightPortrait": 13,
"ExposureTheatre": 14,
"ExposureSunset": 15,
"ExposureSteadyPhoto": 16,
"ExposureFireworks": 17,
"ExposureParty": 18,
"ExposureCandlelight": 19,
"ExposureBarcode": 20,
"ExposureModeVendor": 1000 "ExposureModeVendor": 1000
} }
} }
@@ -357,6 +389,10 @@ Module {
"ResourceMissing": 3 "ResourceMissing": 3
} }
} }
Property { name: "deviceId"; revision: 1; type: "string" }
Property { name: "position"; revision: 1; type: "Position" }
Property { name: "displayName"; revision: 1; type: "string"; isReadonly: true }
Property { name: "orientation"; revision: 1; type: "int"; isReadonly: true }
Property { name: "captureMode"; type: "CaptureMode" } Property { name: "captureMode"; type: "CaptureMode" }
Property { name: "cameraState"; type: "State" } Property { name: "cameraState"; type: "State" }
Property { name: "cameraStatus"; type: "Status"; isReadonly: true } Property { name: "cameraStatus"; type: "Status"; isReadonly: true }
@@ -395,12 +431,30 @@ Module {
isReadonly: true isReadonly: true
isPointer: true isPointer: true
} }
Property {
name: "metaData"
revision: 1
type: "QDeclarativeMediaMetaData"
isReadonly: true
isPointer: true
}
Property {
name: "viewfinder"
revision: 1
type: "QDeclarativeCameraViewfinder"
isReadonly: true
isPointer: true
}
Signal { name: "errorChanged" } Signal { name: "errorChanged" }
Signal { Signal {
name: "error" name: "error"
Parameter { name: "errorCode"; type: "QDeclarativeCamera::Error" } Parameter { name: "errorCode"; type: "QDeclarativeCamera::Error" }
Parameter { name: "errorString"; type: "string" } Parameter { name: "errorString"; type: "string" }
} }
Signal { name: "deviceIdChanged"; revision: 1 }
Signal { name: "positionChanged"; revision: 1 }
Signal { name: "displayNameChanged"; revision: 1 }
Signal { name: "orientationChanged"; revision: 1 }
Signal { Signal {
name: "cameraStateChanged" name: "cameraStateChanged"
Parameter { type: "QDeclarativeCamera::State" } Parameter { type: "QDeclarativeCamera::State" }
@@ -445,11 +499,33 @@ Module {
name: "setDigitalZoom" name: "setDigitalZoom"
Parameter { type: "double" } Parameter { type: "double" }
} }
Method {
name: "supportedViewfinderResolutions"
revision: 2
type: "QJSValue"
Parameter { name: "minimumFrameRate"; type: "double" }
Parameter { name: "maximumFrameRate"; type: "double" }
}
Method {
name: "supportedViewfinderResolutions"
revision: 2
type: "QJSValue"
Parameter { name: "minimumFrameRate"; type: "double" }
}
Method { name: "supportedViewfinderResolutions"; revision: 2; type: "QJSValue" }
Method {
name: "supportedViewfinderFrameRateRanges"
revision: 2
type: "QJSValue"
Parameter { name: "resolution"; type: "QSize" }
}
Method { name: "supportedViewfinderFrameRateRanges"; revision: 2; type: "QJSValue" }
} }
Component { Component {
name: "QDeclarativeCameraCapture" name: "QDeclarativeCameraCapture"
prototype: "QObject" prototype: "QObject"
exports: ["QtMultimedia/CameraCapture 5.0"] exports: ["QtMultimedia/CameraCapture 5.0"]
isCreatable: false
exportMetaObjectRevisions: [0] exportMetaObjectRevisions: [0]
Property { name: "ready"; type: "bool"; isReadonly: true } Property { name: "ready"; type: "bool"; isReadonly: true }
Property { name: "capturedImagePath"; type: "string"; isReadonly: true } Property { name: "capturedImagePath"; type: "string"; isReadonly: true }
@@ -509,7 +585,43 @@ Module {
name: "QDeclarativeCameraExposure" name: "QDeclarativeCameraExposure"
prototype: "QObject" prototype: "QObject"
exports: ["QtMultimedia/CameraExposure 5.0"] exports: ["QtMultimedia/CameraExposure 5.0"]
isCreatable: false
exportMetaObjectRevisions: [0] exportMetaObjectRevisions: [0]
Enum {
name: "ExposureMode"
values: {
"ExposureAuto": 0,
"ExposureManual": 1,
"ExposurePortrait": 2,
"ExposureNight": 3,
"ExposureBacklight": 4,
"ExposureSpotlight": 5,
"ExposureSports": 6,
"ExposureSnow": 7,
"ExposureBeach": 8,
"ExposureLargeAperture": 9,
"ExposureSmallAperture": 10,
"ExposureAction": 11,
"ExposureLandscape": 12,
"ExposureNightPortrait": 13,
"ExposureTheatre": 14,
"ExposureSunset": 15,
"ExposureSteadyPhoto": 16,
"ExposureFireworks": 17,
"ExposureParty": 18,
"ExposureCandlelight": 19,
"ExposureBarcode": 20,
"ExposureModeVendor": 1000
}
}
Enum {
name: "MeteringMode"
values: {
"MeteringMatrix": 1,
"MeteringAverage": 2,
"MeteringSpot": 3
}
}
Property { name: "exposureCompensation"; type: "double" } Property { name: "exposureCompensation"; type: "double" }
Property { name: "iso"; type: "int"; isReadonly: true } Property { name: "iso"; type: "int"; isReadonly: true }
Property { name: "shutterSpeed"; type: "double"; isReadonly: true } Property { name: "shutterSpeed"; type: "double"; isReadonly: true }
@@ -517,9 +629,9 @@ Module {
Property { name: "manualShutterSpeed"; type: "double" } Property { name: "manualShutterSpeed"; type: "double" }
Property { name: "manualAperture"; type: "double" } Property { name: "manualAperture"; type: "double" }
Property { name: "manualIso"; type: "double" } Property { name: "manualIso"; type: "double" }
Property { name: "exposureMode"; type: "QDeclarativeCamera::ExposureMode" } Property { name: "exposureMode"; type: "ExposureMode" }
Property { name: "spotMeteringPoint"; type: "QPointF" } Property { name: "spotMeteringPoint"; type: "QPointF" }
Property { name: "meteringMode"; type: "QDeclarativeCamera::MeteringMode" } Property { name: "meteringMode"; type: "MeteringMode" }
Signal { Signal {
name: "isoSensitivityChanged" name: "isoSensitivityChanged"
Parameter { type: "int" } Parameter { type: "int" }
@@ -550,11 +662,11 @@ Module {
} }
Signal { Signal {
name: "exposureModeChanged" name: "exposureModeChanged"
Parameter { type: "QDeclarativeCamera::ExposureMode" } Parameter { type: "ExposureMode" }
} }
Signal { Signal {
name: "meteringModeChanged" name: "meteringModeChanged"
Parameter { type: "QDeclarativeCamera::MeteringMode" } Parameter { type: "MeteringMode" }
} }
Signal { Signal {
name: "spotMeteringPointChanged" name: "spotMeteringPointChanged"
@@ -562,7 +674,7 @@ Module {
} }
Method { Method {
name: "setExposureMode" name: "setExposureMode"
Parameter { type: "QDeclarativeCamera::ExposureMode" } Parameter { type: "ExposureMode" }
} }
Method { Method {
name: "setExposureCompensation" name: "setExposureCompensation"
@@ -588,38 +700,75 @@ Module {
name: "QDeclarativeCameraFlash" name: "QDeclarativeCameraFlash"
prototype: "QObject" prototype: "QObject"
exports: ["QtMultimedia/CameraFlash 5.0"] exports: ["QtMultimedia/CameraFlash 5.0"]
isCreatable: false
exportMetaObjectRevisions: [0] exportMetaObjectRevisions: [0]
Enum {
name: "FlashMode"
values: {
"FlashAuto": 1,
"FlashOff": 2,
"FlashOn": 4,
"FlashRedEyeReduction": 8,
"FlashFill": 16,
"FlashTorch": 32,
"FlashVideoLight": 64,
"FlashSlowSyncFrontCurtain": 128,
"FlashSlowSyncRearCurtain": 256,
"FlashManual": 512
}
}
Property { name: "ready"; type: "bool"; isReadonly: true } Property { name: "ready"; type: "bool"; isReadonly: true }
Property { name: "mode"; type: "int" } Property { name: "mode"; type: "FlashMode" }
Signal { Signal {
name: "flashReady" name: "flashReady"
Parameter { name: "status"; type: "bool" } Parameter { name: "status"; type: "bool" }
} }
Signal { Signal {
name: "flashModeChanged" name: "flashModeChanged"
Parameter { type: "int" } Parameter { type: "FlashMode" }
} }
Method { Method {
name: "setFlashMode" name: "setFlashMode"
Parameter { type: "int" } Parameter { type: "FlashMode" }
} }
} }
Component { Component {
name: "QDeclarativeCameraFocus" name: "QDeclarativeCameraFocus"
prototype: "QObject" prototype: "QObject"
exports: ["QtMultimedia/CameraFocus 5.0"] exports: ["QtMultimedia/CameraFocus 5.0"]
isCreatable: false
exportMetaObjectRevisions: [0] exportMetaObjectRevisions: [0]
Property { name: "focusMode"; type: "QDeclarativeCamera::FocusMode" } Enum {
Property { name: "focusPointMode"; type: "QDeclarativeCamera::FocusPointMode" } name: "FocusMode"
values: {
"FocusManual": 1,
"FocusHyperfocal": 2,
"FocusInfinity": 4,
"FocusAuto": 8,
"FocusContinuous": 16,
"FocusMacro": 32
}
}
Enum {
name: "FocusPointMode"
values: {
"FocusPointAuto": 0,
"FocusPointCenter": 1,
"FocusPointFaceDetection": 2,
"FocusPointCustom": 3
}
}
Property { name: "focusMode"; type: "FocusMode" }
Property { name: "focusPointMode"; type: "FocusPointMode" }
Property { name: "customFocusPoint"; type: "QPointF" } Property { name: "customFocusPoint"; type: "QPointF" }
Property { name: "focusZones"; type: "QObject"; isReadonly: true; isPointer: true } Property { name: "focusZones"; type: "QObject"; isReadonly: true; isPointer: true }
Signal { Signal {
name: "focusModeChanged" name: "focusModeChanged"
Parameter { type: "QDeclarativeCamera::FocusMode" } Parameter { type: "FocusMode" }
} }
Signal { Signal {
name: "focusPointModeChanged" name: "focusPointModeChanged"
Parameter { type: "QDeclarativeCamera::FocusPointMode" } Parameter { type: "FocusPointMode" }
} }
Signal { Signal {
name: "customFocusPointChanged" name: "customFocusPointChanged"
@@ -627,11 +776,11 @@ Module {
} }
Method { Method {
name: "setFocusMode" name: "setFocusMode"
Parameter { type: "QDeclarativeCamera::FocusMode" } Parameter { type: "FocusMode" }
} }
Method { Method {
name: "setFocusPointMode" name: "setFocusPointMode"
Parameter { name: "mode"; type: "QDeclarativeCamera::FocusPointMode" } Parameter { name: "mode"; type: "FocusPointMode" }
} }
Method { Method {
name: "setCustomFocusPoint" name: "setCustomFocusPoint"
@@ -640,19 +789,23 @@ Module {
Method { Method {
name: "isFocusModeSupported" name: "isFocusModeSupported"
type: "bool" type: "bool"
Parameter { name: "mode"; type: "QDeclarativeCamera::FocusMode" } Parameter { name: "mode"; type: "FocusMode" }
} }
Method { Method {
name: "isFocusPointModeSupported" name: "isFocusPointModeSupported"
type: "bool" type: "bool"
Parameter { name: "mode"; type: "QDeclarativeCamera::FocusPointMode" } Parameter { name: "mode"; type: "FocusPointMode" }
} }
} }
Component { Component {
name: "QDeclarativeCameraImageProcessing" name: "QDeclarativeCameraImageProcessing"
prototype: "QObject" prototype: "QObject"
exports: ["QtMultimedia/CameraImageProcessing 5.0"] exports: [
exportMetaObjectRevisions: [0] "QtMultimedia/CameraImageProcessing 5.0",
"QtMultimedia/CameraImageProcessing 5.5"
]
isCreatable: false
exportMetaObjectRevisions: [0, 1]
Enum { Enum {
name: "WhiteBalanceMode" name: "WhiteBalanceMode"
values: { values: {
@@ -668,12 +821,28 @@ Module {
"WhiteBalanceVendor": 1000 "WhiteBalanceVendor": 1000
} }
} }
Enum {
name: "ColorFilter"
values: {
"ColorFilterNone": 0,
"ColorFilterGrayscale": 1,
"ColorFilterNegative": 2,
"ColorFilterSolarize": 3,
"ColorFilterSepia": 4,
"ColorFilterPosterize": 5,
"ColorFilterWhiteboard": 6,
"ColorFilterBlackboard": 7,
"ColorFilterAqua": 8,
"ColorFilterVendor": 1000
}
}
Property { name: "whiteBalanceMode"; type: "WhiteBalanceMode" } Property { name: "whiteBalanceMode"; type: "WhiteBalanceMode" }
Property { name: "manualWhiteBalance"; type: "double" } Property { name: "manualWhiteBalance"; type: "double" }
Property { name: "contrast"; type: "double" } Property { name: "contrast"; type: "double" }
Property { name: "saturation"; type: "double" } Property { name: "saturation"; type: "double" }
Property { name: "sharpeningLevel"; type: "double" } Property { name: "sharpeningLevel"; type: "double" }
Property { name: "denoisingLevel"; type: "double" } Property { name: "denoisingLevel"; type: "double" }
Property { name: "colorFilter"; revision: 1; type: "ColorFilter" }
Signal { Signal {
name: "whiteBalanceModeChanged" name: "whiteBalanceModeChanged"
Parameter { type: "QDeclarativeCameraImageProcessing::WhiteBalanceMode" } Parameter { type: "QDeclarativeCameraImageProcessing::WhiteBalanceMode" }
@@ -722,11 +891,16 @@ Module {
name: "setDenoisingLevel" name: "setDenoisingLevel"
Parameter { name: "value"; type: "double" } Parameter { name: "value"; type: "double" }
} }
Method {
name: "setColorFilter"
Parameter { name: "colorFilter"; type: "ColorFilter" }
}
} }
Component { Component {
name: "QDeclarativeCameraRecorder" name: "QDeclarativeCameraRecorder"
prototype: "QObject" prototype: "QObject"
exports: ["QtMultimedia/CameraRecorder 5.0"] exports: ["QtMultimedia/CameraRecorder 5.0"]
isCreatable: false
exportMetaObjectRevisions: [0] exportMetaObjectRevisions: [0]
Enum { Enum {
name: "RecorderState" name: "RecorderState"
@@ -783,7 +957,7 @@ Module {
Property { name: "actualLocation"; type: "string"; isReadonly: true } Property { name: "actualLocation"; type: "string"; isReadonly: true }
Property { name: "muted"; type: "bool" } Property { name: "muted"; type: "bool" }
Property { name: "errorString"; type: "string"; isReadonly: true } Property { name: "errorString"; type: "string"; isReadonly: true }
Property { name: "errorCode"; type: "string"; isReadonly: true } Property { name: "errorCode"; type: "Error"; isReadonly: true }
Signal { Signal {
name: "recorderStateChanged" name: "recorderStateChanged"
Parameter { name: "state"; type: "QDeclarativeCameraRecorder::RecorderState" } Parameter { name: "state"; type: "QDeclarativeCameraRecorder::RecorderState" }
@@ -922,6 +1096,119 @@ Module {
Parameter { name: "encodingMode"; type: "EncodingMode" } Parameter { name: "encodingMode"; type: "EncodingMode" }
} }
} }
Component {
name: "QDeclarativeCameraViewfinder"
prototype: "QObject"
exports: ["QtMultimedia/CameraViewfinder 5.4"]
isCreatable: false
exportMetaObjectRevisions: [0]
Property { name: "resolution"; type: "QSize" }
Property { name: "minimumFrameRate"; type: "double" }
Property { name: "maximumFrameRate"; type: "double" }
}
Component {
name: "QDeclarativeMediaMetaData"
prototype: "QObject"
Property { name: "title"; type: "QVariant" }
Property { name: "subTitle"; type: "QVariant" }
Property { name: "author"; type: "QVariant" }
Property { name: "comment"; type: "QVariant" }
Property { name: "description"; type: "QVariant" }
Property { name: "category"; type: "QVariant" }
Property { name: "genre"; type: "QVariant" }
Property { name: "year"; type: "QVariant" }
Property { name: "date"; type: "QVariant" }
Property { name: "userRating"; type: "QVariant" }
Property { name: "keywords"; type: "QVariant" }
Property { name: "language"; type: "QVariant" }
Property { name: "publisher"; type: "QVariant" }
Property { name: "copyright"; type: "QVariant" }
Property { name: "parentalRating"; type: "QVariant" }
Property { name: "ratingOrganization"; type: "QVariant" }
Property { name: "size"; type: "QVariant" }
Property { name: "mediaType"; type: "QVariant" }
Property { name: "duration"; type: "QVariant" }
Property { name: "audioBitRate"; type: "QVariant" }
Property { name: "audioCodec"; type: "QVariant" }
Property { name: "averageLevel"; type: "QVariant" }
Property { name: "channelCount"; type: "QVariant" }
Property { name: "peakValue"; type: "QVariant" }
Property { name: "sampleRate"; type: "QVariant" }
Property { name: "albumTitle"; type: "QVariant" }
Property { name: "albumArtist"; type: "QVariant" }
Property { name: "contributingArtist"; type: "QVariant" }
Property { name: "composer"; type: "QVariant" }
Property { name: "conductor"; type: "QVariant" }
Property { name: "lyrics"; type: "QVariant" }
Property { name: "mood"; type: "QVariant" }
Property { name: "trackNumber"; type: "QVariant" }
Property { name: "trackCount"; type: "QVariant" }
Property { name: "coverArtUrlSmall"; type: "QVariant" }
Property { name: "coverArtUrlLarge"; type: "QVariant" }
Property { name: "resolution"; type: "QVariant" }
Property { name: "pixelAspectRatio"; type: "QVariant" }
Property { name: "videoFrameRate"; type: "QVariant" }
Property { name: "videoBitRate"; type: "QVariant" }
Property { name: "videoCodec"; type: "QVariant" }
Property { name: "posterUrl"; type: "QVariant" }
Property { name: "chapterNumber"; type: "QVariant" }
Property { name: "director"; type: "QVariant" }
Property { name: "leadPerformer"; type: "QVariant" }
Property { name: "writer"; type: "QVariant" }
Property { name: "cameraManufacturer"; type: "QVariant" }
Property { name: "cameraModel"; type: "QVariant" }
Property { name: "event"; type: "QVariant" }
Property { name: "subject"; type: "QVariant" }
Property { name: "orientation"; type: "QVariant" }
Property { name: "exposureTime"; type: "QVariant" }
Property { name: "fNumber"; type: "QVariant" }
Property { name: "exposureProgram"; type: "QVariant" }
Property { name: "isoSpeedRatings"; type: "QVariant" }
Property { name: "exposureBiasValue"; type: "QVariant" }
Property { name: "dateTimeOriginal"; type: "QVariant" }
Property { name: "dateTimeDigitized"; type: "QVariant" }
Property { name: "subjectDistance"; type: "QVariant" }
Property { name: "meteringMode"; type: "QVariant" }
Property { name: "lightSource"; type: "QVariant" }
Property { name: "flash"; type: "QVariant" }
Property { name: "focalLength"; type: "QVariant" }
Property { name: "exposureMode"; type: "QVariant" }
Property { name: "whiteBalance"; type: "QVariant" }
Property { name: "digitalZoomRatio"; type: "QVariant" }
Property { name: "focalLengthIn35mmFilm"; type: "QVariant" }
Property { name: "sceneCaptureType"; type: "QVariant" }
Property { name: "gainControl"; type: "QVariant" }
Property { name: "contrast"; type: "QVariant" }
Property { name: "saturation"; type: "QVariant" }
Property { name: "sharpness"; type: "QVariant" }
Property { name: "deviceSettingDescription"; type: "QVariant" }
Property { name: "gpsLatitude"; type: "QVariant" }
Property { name: "gpsLongitude"; type: "QVariant" }
Property { name: "gpsAltitude"; type: "QVariant" }
Property { name: "gpsTimeStamp"; type: "QVariant" }
Property { name: "gpsSatellites"; type: "QVariant" }
Property { name: "gpsStatus"; type: "QVariant" }
Property { name: "gpsDOP"; type: "QVariant" }
Property { name: "gpsSpeed"; type: "QVariant" }
Property { name: "gpsTrack"; type: "QVariant" }
Property { name: "gpsTrackRef"; type: "QVariant" }
Property { name: "gpsImgDirection"; type: "QVariant" }
Property { name: "gpsImgDirectionRef"; type: "QVariant" }
Property { name: "gpsMapDatum"; type: "QVariant" }
Property { name: "gpsProcessingMethod"; type: "QVariant" }
Property { name: "gpsAreaInformation"; type: "QVariant" }
Signal { name: "metaDataChanged" }
}
Component {
name: "QDeclarativeMultimediaGlobal"
prototype: "QObject"
exports: ["QtMultimedia/QtMultimedia 5.4"]
isCreatable: false
isSingleton: true
exportMetaObjectRevisions: [0]
Property { name: "defaultCamera"; type: "QJSValue"; isReadonly: true }
Property { name: "availableCameras"; type: "QJSValue"; isReadonly: true }
}
Component { Component {
name: "QDeclarativeRadio" name: "QDeclarativeRadio"
prototype: "QObject" prototype: "QObject"
@@ -1223,9 +1510,10 @@ Module {
Property { name: "source"; type: "QObject"; isPointer: true } Property { name: "source"; type: "QObject"; isPointer: true }
Property { name: "fillMode"; type: "FillMode" } Property { name: "fillMode"; type: "FillMode" }
Property { name: "orientation"; type: "int" } Property { name: "orientation"; type: "int" }
Property { name: "autoOrientation"; revision: 2; type: "bool" }
Property { name: "sourceRect"; type: "QRectF"; isReadonly: true } Property { name: "sourceRect"; type: "QRectF"; isReadonly: true }
Property { name: "contentRect"; type: "QRectF"; isReadonly: true } Property { name: "contentRect"; type: "QRectF"; isReadonly: true }
Property { name: "filters"; isList: true; isReadonly: true } Property { name: "filters"; type: "QAbstractVideoFilter"; isList: true; isReadonly: true }
Signal { Signal {
name: "fillModeChanged" name: "fillModeChanged"
Parameter { type: "QDeclarativeVideoOutput::FillMode" } Parameter { type: "QDeclarativeVideoOutput::FillMode" }
@@ -1301,8 +1589,11 @@ Module {
Component { Component {
name: "QSoundEffect" name: "QSoundEffect"
prototype: "QObject" prototype: "QObject"
exports: ["QtMultimedia/SoundEffect 5.0"] exports: [
exportMetaObjectRevisions: [0] "QtMultimedia/SoundEffect 5.0",
"QtMultimedia/SoundEffect 5.3"
]
exportMetaObjectRevisions: [0, 0]
Enum { Enum {
name: "Loop" name: "Loop"
values: { values: {

View File

@@ -83,7 +83,7 @@ class QDeclarativeCameraRecorder : public QObject
Q_PROPERTY(QString actualLocation READ actualLocation NOTIFY actualLocationChanged) Q_PROPERTY(QString actualLocation READ actualLocation NOTIFY actualLocationChanged)
Q_PROPERTY(bool muted READ isMuted WRITE setMuted NOTIFY mutedChanged) Q_PROPERTY(bool muted READ isMuted WRITE setMuted NOTIFY mutedChanged)
Q_PROPERTY(QString errorString READ errorString NOTIFY error) Q_PROPERTY(QString errorString READ errorString NOTIFY error)
Q_PROPERTY(QString errorCode READ errorCode NOTIFY error) Q_PROPERTY(Error errorCode READ errorCode NOTIFY error)
public: public:
enum RecorderState enum RecorderState

View File

@@ -153,27 +153,6 @@ Camera {
\endqml \endqml
*/ */
namespace QDeclarativeMultimedia {
#define FREEZE_SOURCE "(function deepFreeze(o) { "\
" var prop, propKey;" \
" Object.freeze(o);" \
" for (propKey in o) {" \
" prop = o[propKey];" \
" if (!o.hasOwnProperty(propKey) || !(typeof prop === \"object\") || " \
" Object.isFrozen(prop)) {" \
" continue;" \
" }" \
" deepFreeze(prop);" \
" }" \
"})"
static void deepFreeze(QJSEngine *jsEngine, const QJSValue &obj)
{
QJSValue freezeFunc = jsEngine->evaluate(QString::fromUtf8(FREEZE_SOURCE));
freezeFunc.call(QJSValueList() << obj);
}
static QJSValue cameraInfoToJSValue(QJSEngine *jsEngine, const QCameraInfo &camera) static QJSValue cameraInfoToJSValue(QJSEngine *jsEngine, const QCameraInfo &camera)
{ {
QJSValue o = jsEngine->newObject(); QJSValue o = jsEngine->newObject();
@@ -184,29 +163,24 @@ static QJSValue cameraInfoToJSValue(QJSEngine *jsEngine, const QCameraInfo &came
return o; return o;
} }
QJSValue initGlobalObject(QQmlEngine *qmlEngine, QJSEngine *jsEngine) QDeclarativeMultimediaGlobal::QDeclarativeMultimediaGlobal(QJSEngine *engine, QObject *parent)
: QObject(parent)
, m_engine(engine)
{ {
Q_UNUSED(qmlEngine)
QJSValue globalObject = jsEngine->newObject();
// property object defaultCamera
globalObject.setProperty(QStringLiteral("defaultCamera"),
cameraInfoToJSValue(jsEngine, QCameraInfo::defaultCamera()));
// property list<object> availableCameras
QList<QCameraInfo> cameras = QCameraInfo::availableCameras();
QJSValue availableCameras = jsEngine->newArray(cameras.count());
for (int i = 0; i < cameras.count(); ++i)
availableCameras.setProperty(i, cameraInfoToJSValue(jsEngine, cameras.at(i)));
globalObject.setProperty(QStringLiteral("availableCameras"), availableCameras);
// freeze global object to prevent properties to be modified from QML
deepFreeze(jsEngine, globalObject);
return globalObject;
} }
QJSValue QDeclarativeMultimediaGlobal::defaultCamera() const
{
return cameraInfoToJSValue(m_engine, QCameraInfo::defaultCamera());
}
QJSValue QDeclarativeMultimediaGlobal::availableCameras() const
{
QList<QCameraInfo> cameras = QCameraInfo::availableCameras();
QJSValue availableCameras = m_engine->newArray(cameras.count());
for (int i = 0; i < cameras.count(); ++i)
availableCameras.setProperty(i, cameraInfoToJSValue(m_engine, cameras.at(i)));
return availableCameras;
} }
QT_END_NAMESPACE QT_END_NAMESPACE

View File

@@ -46,12 +46,32 @@
// //
#include <QtQml/qqml.h> #include <QtQml/qqml.h>
#include <QtQml/qjsvalue.h>
QT_BEGIN_NAMESPACE QT_BEGIN_NAMESPACE
namespace QDeclarativeMultimedia { class QDeclarativeMultimediaGlobal : public QObject
QJSValue initGlobalObject(QQmlEngine *, QJSEngine *); {
} Q_OBJECT
Q_PROPERTY(QJSValue defaultCamera READ defaultCamera NOTIFY defaultCameraChanged)
Q_PROPERTY(QJSValue availableCameras READ availableCameras NOTIFY availableCamerasChanged)
public:
explicit QDeclarativeMultimediaGlobal(QJSEngine *engine, QObject *parent = 0);
QJSValue defaultCamera() const;
QJSValue availableCameras() const;
Q_SIGNALS:
// Unused at the moment. QCameraInfo doesn't notify when cameras are added or removed,
// but it might change in the future.
void defaultCameraChanged();
void availableCamerasChanged();
private:
QJSEngine *m_engine;
};
QT_END_NAMESPACE QT_END_NAMESPACE

View File

@@ -74,7 +74,7 @@ QDeclarativeTorch::QDeclarativeTorch(QObject *parent)
m_flash = service ? service->requestControl<QCameraFlashControl*>() : 0; m_flash = service ? service->requestControl<QCameraFlashControl*>() : 0;
if (m_exposure) if (m_exposure)
connect(m_exposure, SIGNAL(valueChanged(int)), SLOT(parameterChanged(int))); connect(m_exposure, SIGNAL(actualValueChanged(int)), SLOT(parameterChanged(int)));
// XXX There's no signal for flash mode changed // XXX There's no signal for flash mode changed
} }

View File

@@ -37,6 +37,7 @@
#include <QtNetwork/QNetworkReply> #include <QtNetwork/QNetworkReply>
#include <QtNetwork/QNetworkRequest> #include <QtNetwork/QNetworkRequest>
#include "qmediaobject_p.h" #include "qmediaobject_p.h"
#include <private/qobject_p.h>
#include "qmediametadata.h" #include "qmediametadata.h"
QT_BEGIN_NAMESPACE QT_BEGIN_NAMESPACE
@@ -240,10 +241,9 @@ Version=2
///////////////////////////////////////////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////////////////////////////////////////////
class QPlaylistFileParserPrivate : public QObject class QPlaylistFileParserPrivate : public QObjectPrivate
{ {
Q_OBJECT Q_DECLARE_PUBLIC(QPlaylistFileParser)
Q_DECLARE_NON_CONST_PUBLIC(QPlaylistFileParser)
public: public:
QPlaylistFileParserPrivate() QPlaylistFileParserPrivate()
: m_source(0) : m_source(0)
@@ -270,8 +270,6 @@ public:
ParserBase *m_currentParser; ParserBase *m_currentParser;
QNetworkAccessManager m_mgr; QNetworkAccessManager m_mgr;
QPlaylistFileParser *q_ptr;
private: private:
void processLine(int startIndex, int length); void processLine(int startIndex, int length);
}; };
@@ -292,25 +290,25 @@ void QPlaylistFileParserPrivate::processLine(int startIndex, int length)
switch (m_type) { switch (m_type) {
case QPlaylistFileParser::UNKNOWN: case QPlaylistFileParser::UNKNOWN:
emit q->error(QPlaylistFileParser::FormatError, QString(tr("%1 playlist type is unknown")).arg(m_root.toString())); emit q->error(QPlaylistFileParser::FormatError, QString(QObject::tr("%1 playlist type is unknown")).arg(m_root.toString()));
q->stop(); q->stop();
return; return;
case QPlaylistFileParser::M3U: case QPlaylistFileParser::M3U:
m_currentParser = new M3UParser(this); m_currentParser = new M3UParser(q);
break; break;
case QPlaylistFileParser::M3U8: case QPlaylistFileParser::M3U8:
m_currentParser = new M3UParser(this); m_currentParser = new M3UParser(q);
m_utf8 = true; m_utf8 = true;
break; break;
case QPlaylistFileParser::PLS: case QPlaylistFileParser::PLS:
m_currentParser = new PLSParser(this); m_currentParser = new PLSParser(q);
break; break;
} }
Q_ASSERT(m_currentParser); Q_ASSERT(m_currentParser);
connect(m_currentParser, SIGNAL(newItem(QVariant)), q, SIGNAL(newItem(QVariant))); QObject::connect(m_currentParser, SIGNAL(newItem(QVariant)), q, SIGNAL(newItem(QVariant)));
connect(m_currentParser, SIGNAL(finished()), q, SLOT(_q_handleParserFinished())); QObject::connect(m_currentParser, SIGNAL(finished()), q, SLOT(_q_handleParserFinished()));
connect(m_currentParser, SIGNAL(error(QPlaylistFileParser::ParserError,QString)), QObject::connect(m_currentParser, SIGNAL(error(QPlaylistFileParser::ParserError,QString)),
q, SLOT(_q_handleParserError(QPlaylistFileParser::ParserError,QString))); q, SLOT(_q_handleParserError(QPlaylistFileParser::ParserError,QString)));
} }
QString line; QString line;
@@ -352,7 +350,7 @@ void QPlaylistFileParserPrivate::_q_handleData()
if (m_buffer.length() - processedBytes >= LINE_LIMIT) { if (m_buffer.length() - processedBytes >= LINE_LIMIT) {
qWarning() << "error parsing playlist["<< m_root << "] with line content >= 4096 bytes."; qWarning() << "error parsing playlist["<< m_root << "] with line content >= 4096 bytes.";
emit q->error(QPlaylistFileParser::FormatError, tr("invalid line in playlist file")); emit q->error(QPlaylistFileParser::FormatError, QObject::tr("invalid line in playlist file"));
q->stop(); q->stop();
return; return;
} }
@@ -400,7 +398,7 @@ void QPlaylistFileParserPrivate::_q_handleParserFinished()
Q_Q(QPlaylistFileParser); Q_Q(QPlaylistFileParser);
bool isParserValid = (m_currentParser != 0); bool isParserValid = (m_currentParser != 0);
if (!isParserValid) if (!isParserValid)
emit q->error(QPlaylistFileParser::FormatNotSupportedError, tr("Empty file provided")); emit q->error(QPlaylistFileParser::FormatNotSupportedError, QObject::tr("Empty file provided"));
q->stop(); q->stop();
@@ -410,9 +408,9 @@ void QPlaylistFileParserPrivate::_q_handleParserFinished()
QPlaylistFileParser::QPlaylistFileParser(QObject *parent) QPlaylistFileParser::QPlaylistFileParser(QObject *parent)
:QObject(parent), d_ptr(new QPlaylistFileParserPrivate) : QObject(*new QPlaylistFileParserPrivate, parent)
{ {
d_func()->q_ptr = this;
} }
QPlaylistFileParser::FileType QPlaylistFileParser::findPlaylistType(const QString& uri, const QString& mime, const void *data, quint32 size) QPlaylistFileParser::FileType QPlaylistFileParser::findPlaylistType(const QString& uri, const QString& mime, const void *data, quint32 size)

View File

@@ -84,9 +84,6 @@ Q_SIGNALS:
void finished(); void finished();
void error(QPlaylistFileParser::ParserError err, const QString& errorMsg); void error(QPlaylistFileParser::ParserError err, const QString& errorMsg);
protected:
QPlaylistFileParserPrivate *d_ptr;
private: private:
Q_DISABLE_COPY(QPlaylistFileParser) Q_DISABLE_COPY(QPlaylistFileParser)
Q_DECLARE_PRIVATE(QPlaylistFileParser) Q_DECLARE_PRIVATE(QPlaylistFileParser)

View File

@@ -340,7 +340,7 @@ void QMediaPlayerPrivate::setMedia(const QMediaContent &media, QIODevice *stream
if (!file->open(QFile::ReadOnly)) { if (!file->open(QFile::ReadOnly)) {
QMetaObject::invokeMethod(q, "_q_error", Qt::QueuedConnection, QMetaObject::invokeMethod(q, "_q_error", Qt::QueuedConnection,
Q_ARG(int, QMediaPlayer::ResourceError), Q_ARG(int, QMediaPlayer::ResourceError),
Q_ARG(QString, QObject::tr("Attempting to play invalid Qt resource"))); Q_ARG(QString, QMediaPlayer::tr("Attempting to play invalid Qt resource")));
QMetaObject::invokeMethod(q, "_q_mediaStatusChanged", Qt::QueuedConnection, QMetaObject::invokeMethod(q, "_q_mediaStatusChanged", Qt::QueuedConnection,
Q_ARG(QMediaPlayer::MediaStatus, QMediaPlayer::InvalidMedia)); Q_ARG(QMediaPlayer::MediaStatus, QMediaPlayer::InvalidMedia));
file.reset(); file.reset();

View File

@@ -58,25 +58,25 @@ int AVFCameraDeviceControl::deviceCount() const
QString AVFCameraDeviceControl::deviceName(int index) const QString AVFCameraDeviceControl::deviceName(int index) const
{ {
const QList<QByteArray> &devices = AVFCameraSession::availableCameraDevices(); const QList<AVFCameraInfo> &devices = AVFCameraSession::availableCameraDevices();
if (index < 0 || index >= devices.count()) if (index < 0 || index >= devices.count())
return QString(); return QString();
return QString::fromUtf8(devices.at(index)); return QString::fromUtf8(devices.at(index).deviceId);
} }
QString AVFCameraDeviceControl::deviceDescription(int index) const QString AVFCameraDeviceControl::deviceDescription(int index) const
{ {
const QList<QByteArray> &devices = AVFCameraSession::availableCameraDevices(); const QList<AVFCameraInfo> &devices = AVFCameraSession::availableCameraDevices();
if (index < 0 || index >= devices.count()) if (index < 0 || index >= devices.count())
return QString(); return QString();
return AVFCameraSession::cameraDeviceInfo(devices.at(index)).description; return devices.at(index).description;
} }
int AVFCameraDeviceControl::defaultDevice() const int AVFCameraDeviceControl::defaultDevice() const
{ {
return AVFCameraSession::availableCameraDevices().indexOf(AVFCameraSession::defaultCameraDevice()); return AVFCameraSession::defaultCameraIndex();
} }
int AVFCameraDeviceControl::selectedDevice() const int AVFCameraDeviceControl::selectedDevice() const

View File

@@ -0,0 +1,77 @@
/****************************************************************************
**
** Copyright (C) 2015 The Qt Company Ltd.
** Contact: http://www.qt.io/licensing/
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL21$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and The Qt Company. For licensing terms
** and conditions see http://www.qt.io/terms-conditions. For further
** information use the contact form at http://www.qt.io/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 or version 3 as published by the Free
** Software Foundation and appearing in the file LICENSE.LGPLv21 and
** LICENSE.LGPLv3 included in the packaging of this file. Please review the
** following information to ensure the GNU Lesser General Public License
** requirements will be met: https://www.gnu.org/licenses/lgpl.html and
** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** As a special exception, The Qt Company gives you certain additional
** rights. These rights are described in The Qt Company LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** $QT_END_LICENSE$
**
****************************************************************************/
#ifndef AVFCAMERAFLASHCONTROL_H
#define AVFCAMERAFLASHCONTROL_H
#include <QtMultimedia/qcameraflashcontrol.h>
#include <QtMultimedia/qcamera.h>
#include <QtCore/qlist.h>
QT_BEGIN_NAMESPACE
class AVFCameraService;
class AVFCameraSession;
class AVFCameraFlashControl : public QCameraFlashControl
{
Q_OBJECT
public:
AVFCameraFlashControl(AVFCameraService *service);
QCameraExposure::FlashModes flashMode() const Q_DECL_OVERRIDE;
void setFlashMode(QCameraExposure::FlashModes mode) Q_DECL_OVERRIDE;
bool isFlashModeSupported(QCameraExposure::FlashModes mode) const Q_DECL_OVERRIDE;
bool isFlashReady() const Q_DECL_OVERRIDE;
private Q_SLOTS:
void cameraStateChanged(QCamera::State newState);
private:
bool applyFlashSettings();
AVFCameraService *m_service;
AVFCameraSession *m_session;
// Set of bits:
QCameraExposure::FlashModes m_supportedModes;
// Only one bit set actually:
QCameraExposure::FlashModes m_flashMode;
};
QT_END_NAMESPACE
#endif // AVFCAMERAFLASHCONTROL_H

View File

@@ -0,0 +1,227 @@
/****************************************************************************
**
** Copyright (C) 2015 The Qt Company Ltd.
** Contact: http://www.qt.io/licensing/
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL21$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and The Qt Company. For licensing terms
** and conditions see http://www.qt.io/terms-conditions. For further
** information use the contact form at http://www.qt.io/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 or version 3 as published by the Free
** Software Foundation and appearing in the file LICENSE.LGPLv21 and
** LICENSE.LGPLv3 included in the packaging of this file. Please review the
** following information to ensure the GNU Lesser General Public License
** requirements will be met: https://www.gnu.org/licenses/lgpl.html and
** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** As a special exception, The Qt Company gives you certain additional
** rights. These rights are described in The Qt Company LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** $QT_END_LICENSE$
**
****************************************************************************/
#include "avfcameraflashcontrol.h"
#include "avfcamerautility.h"
#include "avfcamerasession.h"
#include "avfcameraservice.h"
#include "avfcameradebug.h"
#include <QtCore/qdebug.h>
#include <AVFoundation/AVFoundation.h>
AVFCameraFlashControl::AVFCameraFlashControl(AVFCameraService *service)
: m_service(service)
, m_session(0)
, m_supportedModes(QCameraExposure::FlashOff)
, m_flashMode(QCameraExposure::FlashOff)
{
Q_ASSERT(service);
m_session = m_service->session();
Q_ASSERT(m_session);
connect(m_session, SIGNAL(stateChanged(QCamera::State)), SLOT(cameraStateChanged(QCamera::State)));
}
QCameraExposure::FlashModes AVFCameraFlashControl::flashMode() const
{
return m_flashMode;
}
void AVFCameraFlashControl::setFlashMode(QCameraExposure::FlashModes mode)
{
if (m_flashMode == mode)
return;
if (m_session->state() == QCamera::ActiveState && !isFlashModeSupported(mode)) {
qDebugCamera() << Q_FUNC_INFO << "unsupported mode" << mode;
return;
}
m_flashMode = mode;
if (m_session->state() != QCamera::ActiveState)
return;
applyFlashSettings();
}
bool AVFCameraFlashControl::isFlashModeSupported(QCameraExposure::FlashModes mode) const
{
// From what QCameraExposure has, we can support only these:
// FlashAuto = 0x1,
// FlashOff = 0x2,
// FlashOn = 0x4,
// AVCaptureDevice has these flash modes:
// AVCaptureFlashModeAuto
// AVCaptureFlashModeOff
// AVCaptureFlashModeOn
// QCameraExposure also has:
// FlashTorch = 0x20, --> "Constant light source."
// FlashVideoLight = 0x40. --> "Constant light source."
// AVCaptureDevice:
// AVCaptureTorchModeOff (no mapping)
// AVCaptureTorchModeOn --> FlashVideoLight
// AVCaptureTorchModeAuto (no mapping)
return m_supportedModes & mode;
}
bool AVFCameraFlashControl::isFlashReady() const
{
if (m_session->state() != QCamera::ActiveState)
return false;
AVCaptureDevice *captureDevice = m_session->videoCaptureDevice();
if (!captureDevice)
return false;
if (!captureDevice.hasFlash && !captureDevice.hasTorch)
return false;
if (!isFlashModeSupported(m_flashMode))
return false;
#ifdef Q_OS_IOS
// AVCaptureDevice's docs:
// "The flash may become unavailable if, for example,
// the device overheats and needs to cool off."
if (m_flashMode != QCameraExposure::FlashVideoLight)
return [captureDevice isFlashAvailable];
return [captureDevice isTorchAvailable];
#endif
return true;
}
void AVFCameraFlashControl::cameraStateChanged(QCamera::State newState)
{
if (newState == QCamera::UnloadedState) {
m_supportedModes = QCameraExposure::FlashOff;
Q_EMIT flashReady(false);
} else if (newState == QCamera::ActiveState) {
m_supportedModes = QCameraExposure::FlashOff;
AVCaptureDevice *captureDevice = m_session->videoCaptureDevice();
if (!captureDevice) {
qDebugCamera() << Q_FUNC_INFO << "no capture device in 'Active' state";
Q_EMIT flashReady(false);
return;
}
if (captureDevice.hasFlash) {
if ([captureDevice isFlashModeSupported:AVCaptureFlashModeOn])
m_supportedModes |= QCameraExposure::FlashOn;
if ([captureDevice isFlashModeSupported:AVCaptureFlashModeAuto])
m_supportedModes |= QCameraExposure::FlashAuto;
}
if (captureDevice.hasTorch && [captureDevice isTorchModeSupported:AVCaptureTorchModeOn])
m_supportedModes |= QCameraExposure::FlashVideoLight;
Q_EMIT flashReady(applyFlashSettings());
}
}
bool AVFCameraFlashControl::applyFlashSettings()
{
Q_ASSERT(m_session->state() == QCamera::ActiveState);
AVCaptureDevice *captureDevice = m_session->videoCaptureDevice();
if (!captureDevice) {
qDebugCamera() << Q_FUNC_INFO << "no capture device found";
return false;
}
if (!isFlashModeSupported(m_flashMode)) {
qDebugCamera() << Q_FUNC_INFO << "unsupported mode" << m_flashMode;
return false;
}
if (!captureDevice.hasFlash && !captureDevice.hasTorch) {
// FlashOff is the only mode we support.
// Return false - flash is not ready.
return false;
}
const AVFConfigurationLock lock(captureDevice);
if (m_flashMode != QCameraExposure::FlashVideoLight) {
if (captureDevice.torchMode != AVCaptureTorchModeOff) {
#ifdef Q_OS_IOS
if (![captureDevice isTorchAvailable]) {
qDebugCamera() << Q_FUNC_INFO << "torch is not available at the moment";
return false;
}
#endif
captureDevice.torchMode = AVCaptureTorchModeOff;
}
#ifdef Q_OS_IOS
if (![captureDevice isFlashAvailable]) {
// We'd like to switch flash (into some mode), but it's not available:
qDebugCamera() << Q_FUNC_INFO << "flash is not available at the moment";
return false;
}
#endif
} else {
if (captureDevice.flashMode != AVCaptureFlashModeOff) {
#ifdef Q_OS_IOS
if (![captureDevice isFlashAvailable]) {
qDebugCamera() << Q_FUNC_INFO << "flash is not available at the moment";
return false;
}
#endif
captureDevice.flashMode = AVCaptureFlashModeOff;
}
#ifdef Q_OS_IOS
if (![captureDevice isTorchAvailable]) {
qDebugCamera() << Q_FUNC_INFO << "torch is not available at the moment";
return false;
}
#endif
}
if (m_flashMode == QCameraExposure::FlashOff)
captureDevice.flashMode = AVCaptureFlashModeOff;
else if (m_flashMode == QCameraExposure::FlashOn)
captureDevice.flashMode = AVCaptureFlashModeOn;
else if (m_flashMode == QCameraExposure::FlashAuto)
captureDevice.flashMode = AVCaptureFlashModeAuto;
else if (m_flashMode == QCameraExposure::FlashVideoLight)
captureDevice.torchMode = AVCaptureTorchModeOn;
return true;
}

View File

@@ -58,6 +58,7 @@ class AVFCameraZoomControl;
class AVFCameraViewfinderSettingsControl2; class AVFCameraViewfinderSettingsControl2;
class AVFCameraViewfinderSettingsControl; class AVFCameraViewfinderSettingsControl;
class AVFImageEncoderControl; class AVFImageEncoderControl;
class AVFCameraFlashControl;
class AVFCameraService : public QMediaService class AVFCameraService : public QMediaService
{ {
@@ -83,6 +84,7 @@ public:
AVFCameraViewfinderSettingsControl2 *viewfinderSettingsControl2() const {return m_viewfinderSettingsControl2; } AVFCameraViewfinderSettingsControl2 *viewfinderSettingsControl2() const {return m_viewfinderSettingsControl2; }
AVFCameraViewfinderSettingsControl *viewfinderSettingsControl() const {return m_viewfinderSettingsControl; } AVFCameraViewfinderSettingsControl *viewfinderSettingsControl() const {return m_viewfinderSettingsControl; }
AVFImageEncoderControl *imageEncoderControl() const {return m_imageEncoderControl; } AVFImageEncoderControl *imageEncoderControl() const {return m_imageEncoderControl; }
AVFCameraFlashControl *flashControl() const {return m_flashControl; }
private: private:
AVFCameraSession *m_session; AVFCameraSession *m_session;
@@ -100,6 +102,7 @@ private:
AVFCameraViewfinderSettingsControl2 *m_viewfinderSettingsControl2; AVFCameraViewfinderSettingsControl2 *m_viewfinderSettingsControl2;
AVFCameraViewfinderSettingsControl *m_viewfinderSettingsControl; AVFCameraViewfinderSettingsControl *m_viewfinderSettingsControl;
AVFImageEncoderControl *m_imageEncoderControl; AVFImageEncoderControl *m_imageEncoderControl;
AVFCameraFlashControl *m_flashControl;
}; };
QT_END_NAMESPACE QT_END_NAMESPACE

View File

@@ -52,6 +52,7 @@
#include "avfcameraexposurecontrol.h" #include "avfcameraexposurecontrol.h"
#include "avfcameraviewfindersettingscontrol.h" #include "avfcameraviewfindersettingscontrol.h"
#include "avfimageencodercontrol.h" #include "avfimageencodercontrol.h"
#include "avfcameraflashcontrol.h"
#ifdef Q_OS_IOS #ifdef Q_OS_IOS
#include "avfcamerazoomcontrol.h" #include "avfcamerazoomcontrol.h"
@@ -89,6 +90,7 @@ AVFCameraService::AVFCameraService(QObject *parent):
m_viewfinderSettingsControl2 = new AVFCameraViewfinderSettingsControl2(this); m_viewfinderSettingsControl2 = new AVFCameraViewfinderSettingsControl2(this);
m_viewfinderSettingsControl = new AVFCameraViewfinderSettingsControl(this); m_viewfinderSettingsControl = new AVFCameraViewfinderSettingsControl(this);
m_imageEncoderControl = new AVFImageEncoderControl(this); m_imageEncoderControl = new AVFImageEncoderControl(this);
m_flashControl = new AVFCameraFlashControl(this);
} }
AVFCameraService::~AVFCameraService() AVFCameraService::~AVFCameraService()
@@ -115,6 +117,7 @@ AVFCameraService::~AVFCameraService()
delete m_viewfinderSettingsControl2; delete m_viewfinderSettingsControl2;
delete m_viewfinderSettingsControl; delete m_viewfinderSettingsControl;
delete m_imageEncoderControl; delete m_imageEncoderControl;
delete m_flashControl;
delete m_session; delete m_session;
} }
@@ -158,6 +161,9 @@ QMediaControl *AVFCameraService::requestControl(const char *name)
if (qstrcmp(name, QImageEncoderControl_iid) == 0) if (qstrcmp(name, QImageEncoderControl_iid) == 0)
return m_imageEncoderControl; return m_imageEncoderControl;
if (qstrcmp(name, QCameraFlashControl_iid) == 0)
return m_flashControl;
if (qstrcmp(name,QMediaVideoProbeControl_iid) == 0) { if (qstrcmp(name,QMediaVideoProbeControl_iid) == 0) {
AVFMediaVideoProbeControl *videoProbe = 0; AVFMediaVideoProbeControl *videoProbe = 0;
videoProbe = new AVFMediaVideoProbeControl(this); videoProbe = new AVFMediaVideoProbeControl(this);

View File

@@ -63,18 +63,26 @@ void AVFServicePlugin::release(QMediaService *service)
QByteArray AVFServicePlugin::defaultDevice(const QByteArray &service) const QByteArray AVFServicePlugin::defaultDevice(const QByteArray &service) const
{ {
if (service == Q_MEDIASERVICE_CAMERA) if (service == Q_MEDIASERVICE_CAMERA) {
return AVFCameraSession::defaultCameraDevice(); int i = AVFCameraSession::defaultCameraIndex();
if (i != -1)
return AVFCameraSession::availableCameraDevices().at(i).deviceId;
}
return QByteArray(); return QByteArray();
} }
QList<QByteArray> AVFServicePlugin::devices(const QByteArray &service) const QList<QByteArray> AVFServicePlugin::devices(const QByteArray &service) const
{ {
if (service == Q_MEDIASERVICE_CAMERA) QList<QByteArray> devs;
return AVFCameraSession::availableCameraDevices();
return QList<QByteArray>(); if (service == Q_MEDIASERVICE_CAMERA) {
const QList<AVFCameraInfo> &cameras = AVFCameraSession::availableCameraDevices();
Q_FOREACH (const AVFCameraInfo &info, cameras)
devs.append(info.deviceId);
}
return devs;
} }
QString AVFServicePlugin::deviceDescription(const QByteArray &service, const QByteArray &device) QString AVFServicePlugin::deviceDescription(const QByteArray &service, const QByteArray &device)

View File

@@ -54,6 +54,7 @@ struct AVFCameraInfo
AVFCameraInfo() : position(QCamera::UnspecifiedPosition), orientation(0) AVFCameraInfo() : position(QCamera::UnspecifiedPosition), orientation(0)
{ } { }
QByteArray deviceId;
QString description; QString description;
QCamera::Position position; QCamera::Position position;
int orientation; int orientation;
@@ -66,8 +67,8 @@ public:
AVFCameraSession(AVFCameraService *service, QObject *parent = 0); AVFCameraSession(AVFCameraService *service, QObject *parent = 0);
~AVFCameraSession(); ~AVFCameraSession();
static const QByteArray &defaultCameraDevice(); static int defaultCameraIndex();
static const QList<QByteArray> &availableCameraDevices(); static const QList<AVFCameraInfo> &availableCameraDevices();
static AVFCameraInfo cameraDeviceInfo(const QByteArray &device); static AVFCameraInfo cameraDeviceInfo(const QByteArray &device);
void setVideoOutput(AVFCameraRendererControl *output); void setVideoOutput(AVFCameraRendererControl *output);
@@ -102,9 +103,8 @@ private:
void applyImageEncoderSettings(); void applyImageEncoderSettings();
void applyViewfinderSettings(); void applyViewfinderSettings();
static QByteArray m_defaultCameraDevice; static int m_defaultCameraIndex;
static QList<QByteArray> m_cameraDevices; static QList<AVFCameraInfo> m_cameraDevices;
static QMap<QByteArray, AVFCameraInfo> m_cameraInfo;
AVFCameraService *m_service; AVFCameraService *m_service;
AVFCameraRendererControl *m_videoOutput; AVFCameraRendererControl *m_videoOutput;

View File

@@ -48,14 +48,14 @@
#include <QtCore/qdatetime.h> #include <QtCore/qdatetime.h>
#include <QtCore/qurl.h> #include <QtCore/qurl.h>
#include <QtCore/qelapsedtimer.h>
#include <QtCore/qdebug.h> #include <QtCore/qdebug.h>
QT_USE_NAMESPACE QT_USE_NAMESPACE
QByteArray AVFCameraSession::m_defaultCameraDevice; int AVFCameraSession::m_defaultCameraIndex;
QList<QByteArray> AVFCameraSession::m_cameraDevices; QList<AVFCameraInfo> AVFCameraSession::m_cameraDevices;
QMap<QByteArray, AVFCameraInfo> AVFCameraSession::m_cameraInfo;
@interface AVFCameraSessionObserver : NSObject @interface AVFCameraSessionObserver : NSObject
{ {
@@ -169,45 +169,55 @@ AVFCameraSession::~AVFCameraSession()
[m_captureSession release]; [m_captureSession release];
} }
const QByteArray &AVFCameraSession::defaultCameraDevice() int AVFCameraSession::defaultCameraIndex()
{ {
if (m_cameraDevices.isEmpty()) updateCameraDevices();
updateCameraDevices(); return m_defaultCameraIndex;
return m_defaultCameraDevice;
} }
const QList<QByteArray> &AVFCameraSession::availableCameraDevices() const QList<AVFCameraInfo> &AVFCameraSession::availableCameraDevices()
{ {
if (m_cameraDevices.isEmpty()) updateCameraDevices();
updateCameraDevices();
return m_cameraDevices; return m_cameraDevices;
} }
AVFCameraInfo AVFCameraSession::cameraDeviceInfo(const QByteArray &device) AVFCameraInfo AVFCameraSession::cameraDeviceInfo(const QByteArray &device)
{ {
if (m_cameraDevices.isEmpty()) updateCameraDevices();
updateCameraDevices();
return m_cameraInfo.value(device); Q_FOREACH (const AVFCameraInfo &info, m_cameraDevices) {
if (info.deviceId == device)
return info;
}
return AVFCameraInfo();
} }
void AVFCameraSession::updateCameraDevices() void AVFCameraSession::updateCameraDevices()
{ {
m_defaultCameraDevice.clear(); #ifdef Q_OS_IOS
// Cameras can't change dynamically on iOS. Update only once.
if (!m_cameraDevices.isEmpty())
return;
#else
// On OS X, cameras can be added or removed. Update the list every time, but not more than
// once every 500 ms
static QElapsedTimer timer;
if (timer.isValid() && timer.elapsed() < 500) // ms
return;
#endif
m_defaultCameraIndex = -1;
m_cameraDevices.clear(); m_cameraDevices.clear();
m_cameraInfo.clear();
AVCaptureDevice *defaultDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; AVCaptureDevice *defaultDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
if (defaultDevice)
m_defaultCameraDevice = QByteArray([[defaultDevice uniqueID] UTF8String]);
NSArray *videoDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; NSArray *videoDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for (AVCaptureDevice *device in videoDevices) { for (AVCaptureDevice *device in videoDevices) {
QByteArray deviceId([[device uniqueID] UTF8String]); if (defaultDevice && [defaultDevice.uniqueID isEqualToString:device.uniqueID])
m_defaultCameraIndex = m_cameraDevices.count();
AVFCameraInfo info; AVFCameraInfo info;
info.deviceId = QByteArray([[device uniqueID] UTF8String]);
info.description = QString::fromNSString([device localizedName]); info.description = QString::fromNSString([device localizedName]);
// There is no API to get the camera sensor orientation, however, cameras are always // There is no API to get the camera sensor orientation, however, cameras are always
@@ -232,9 +242,12 @@ void AVFCameraSession::updateCameraDevices()
break; break;
} }
m_cameraDevices << deviceId; m_cameraDevices.append(info);
m_cameraInfo.insert(deviceId, info);
} }
#ifndef Q_OS_IOS
timer.restart();
#endif
} }
void AVFCameraSession::setVideoOutput(AVFCameraRendererControl *output) void AVFCameraSession::setVideoOutput(AVFCameraRendererControl *output)

View File

@@ -41,7 +41,8 @@ HEADERS += \
avfcameraexposurecontrol.h \ avfcameraexposurecontrol.h \
avfcamerautility.h \ avfcamerautility.h \
avfcameraviewfindersettingscontrol.h \ avfcameraviewfindersettingscontrol.h \
avfimageencodercontrol.h avfimageencodercontrol.h \
avfcameraflashcontrol.h
OBJECTIVE_SOURCES += \ OBJECTIVE_SOURCES += \
avfcameraserviceplugin.mm \ avfcameraserviceplugin.mm \
@@ -62,7 +63,8 @@ OBJECTIVE_SOURCES += \
avfcameraexposurecontrol.mm \ avfcameraexposurecontrol.mm \
avfcamerautility.mm \ avfcamerautility.mm \
avfcameraviewfindersettingscontrol.mm \ avfcameraviewfindersettingscontrol.mm \
avfimageencodercontrol.mm avfimageencodercontrol.mm \
avfcameraflashcontrol.mm
ios { ios {

View File

@@ -36,6 +36,10 @@
#import <AVFoundation/AVAudioSession.h> #import <AVFoundation/AVAudioSession.h>
#import <Foundation/Foundation.h> #import <Foundation/Foundation.h>
#if QT_IOS_DEPLOYMENT_TARGET_BELOW(__IPHONE_6_0)
#include <AudioToolbox/AudioToolbox.h>
#endif
QT_BEGIN_NAMESPACE QT_BEGIN_NAMESPACE
@interface CoreAudioSessionObserver : NSObject @interface CoreAudioSessionObserver : NSObject
@@ -71,19 +75,24 @@ QT_BEGIN_NAMESPACE
self->m_sessionManager = sessionManager; self->m_sessionManager = sessionManager;
self->m_audioSession = [AVAudioSession sharedInstance]; self->m_audioSession = [AVAudioSession sharedInstance];
//Set up observers #if QT_IOS_DEPLOYMENT_TARGET_BELOW(__IPHONE_6_0)
[[NSNotificationCenter defaultCenter] addObserver:self if (QSysInfo::MacintoshVersion >= QSysInfo::MV_IOS_6_0)
selector:@selector(audioSessionInterruption:) #endif
name:AVAudioSessionInterruptionNotification {
object:self->m_audioSession]; //Set up observers
[[NSNotificationCenter defaultCenter] addObserver:self [[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(audioSessionMediaServicesWereReset:) selector:@selector(audioSessionInterruption:)
name:AVAudioSessionMediaServicesWereResetNotification name:AVAudioSessionInterruptionNotification
object:self->m_audioSession]; object:self->m_audioSession];
[[NSNotificationCenter defaultCenter] addObserver:self [[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(audioSessionRouteChange:) selector:@selector(audioSessionMediaServicesWereReset:)
name:AVAudioSessionRouteChangeNotification name:AVAudioSessionMediaServicesWereResetNotification
object:self->m_audioSession]; object:self->m_audioSession];
[[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(audioSessionRouteChange:)
name:AVAudioSessionRouteChangeNotification
object:self->m_audioSession];
}
return self; return self;
} }
@@ -93,15 +102,22 @@ QT_BEGIN_NAMESPACE
#ifdef QT_DEBUG_COREAUDIO #ifdef QT_DEBUG_COREAUDIO
qDebug() << Q_FUNC_INFO; qDebug() << Q_FUNC_INFO;
#endif #endif
[[NSNotificationCenter defaultCenter] removeObserver:self
name:AVAudioSessionInterruptionNotification #if QT_IOS_DEPLOYMENT_TARGET_BELOW(__IPHONE_6_0)
object:self->m_audioSession]; if (QSysInfo::MacintoshVersion >= QSysInfo::MV_IOS_6_0)
[[NSNotificationCenter defaultCenter] removeObserver:self #endif
name:AVAudioSessionMediaServicesWereResetNotification {
object:self->m_audioSession]; [[NSNotificationCenter defaultCenter] removeObserver:self
[[NSNotificationCenter defaultCenter] removeObserver:self name:AVAudioSessionInterruptionNotification
name:AVAudioSessionRouteChangeNotification object:self->m_audioSession];
object:self->m_audioSession]; [[NSNotificationCenter defaultCenter] removeObserver:self
name:AVAudioSessionMediaServicesWereResetNotification
object:self->m_audioSession];
[[NSNotificationCenter defaultCenter] removeObserver:self
name:AVAudioSessionRouteChangeNotification
object:self->m_audioSession];
}
[super dealloc]; [super dealloc];
} }
@@ -261,6 +277,9 @@ bool CoreAudioSessionManager::setCategory(CoreAudioSessionManager::AudioSessionC
targetCategory = AVAudioSessionCategoryAudioProcessing; targetCategory = AVAudioSessionCategoryAudioProcessing;
break; break;
case CoreAudioSessionManager::MultiRoute: case CoreAudioSessionManager::MultiRoute:
#if QT_IOS_DEPLOYMENT_TARGET_BELOW(__IPHONE_6_0)
if (QSysInfo::MacintoshVersion >= QSysInfo::MV_IOS_6_0)
#endif
targetCategory = AVAudioSessionCategoryMultiRoute; targetCategory = AVAudioSessionCategoryMultiRoute;
break; break;
} }
@@ -268,9 +287,16 @@ bool CoreAudioSessionManager::setCategory(CoreAudioSessionManager::AudioSessionC
if (targetCategory == nil) if (targetCategory == nil)
return false; return false;
return [[m_sessionObserver audioSession] setCategory:targetCategory #if QT_IOS_DEPLOYMENT_TARGET_BELOW(__IPHONE_6_0)
withOptions:(AVAudioSessionCategoryOptions)options if (QSysInfo::MacintoshVersion < QSysInfo::MV_IOS_6_0) {
error:nil]; return [[m_sessionObserver audioSession] setCategory:targetCategory error:nil];
} else
#endif
{
return [[m_sessionObserver audioSession] setCategory:targetCategory
withOptions:(AVAudioSessionCategoryOptions)options
error:nil];
}
} }
bool CoreAudioSessionManager::setMode(CoreAudioSessionManager::AudioSessionModes mode) bool CoreAudioSessionManager::setMode(CoreAudioSessionManager::AudioSessionModes mode)
@@ -293,6 +319,9 @@ bool CoreAudioSessionManager::setMode(CoreAudioSessionManager::AudioSessionModes
targetMode = AVAudioSessionModeMeasurement; targetMode = AVAudioSessionModeMeasurement;
break; break;
case CoreAudioSessionManager::MoviePlayback: case CoreAudioSessionManager::MoviePlayback:
#if QT_IOS_DEPLOYMENT_TARGET_BELOW(__IPHONE_6_0)
if (QSysInfo::MacintoshVersion >= QSysInfo::MV_IOS_6_0)
#endif
targetMode = AVAudioSessionModeMoviePlayback; targetMode = AVAudioSessionModeMoviePlayback;
break; break;
} }
@@ -321,7 +350,11 @@ CoreAudioSessionManager::AudioSessionCategorys CoreAudioSessionManager::category
localCategory = PlayAndRecord; localCategory = PlayAndRecord;
} else if (category == AVAudioSessionCategoryAudioProcessing) { } else if (category == AVAudioSessionCategoryAudioProcessing) {
localCategory = AudioProcessing; localCategory = AudioProcessing;
} else if (category == AVAudioSessionCategoryMultiRoute) { } else if (
#if QT_IOS_DEPLOYMENT_TARGET_BELOW(__IPHONE_6_0)
QSysInfo::MacintoshVersion >= QSysInfo::MV_IOS_6_0 &&
#endif
category == AVAudioSessionCategoryMultiRoute) {
localCategory = MultiRoute; localCategory = MultiRoute;
} }
@@ -343,7 +376,11 @@ CoreAudioSessionManager::AudioSessionModes CoreAudioSessionManager::mode()
localMode = VideoRecording; localMode = VideoRecording;
} else if (mode == AVAudioSessionModeMeasurement) { } else if (mode == AVAudioSessionModeMeasurement) {
localMode = Measurement; localMode = Measurement;
} else if (mode == AVAudioSessionModeMoviePlayback) { } else if (
#if QT_IOS_DEPLOYMENT_TARGET_BELOW(__IPHONE_6_0)
QSysInfo::MacintoshVersion >= QSysInfo::MV_IOS_6_0 &&
#endif
mode == AVAudioSessionModeMoviePlayback) {
localMode = MoviePlayback; localMode = MoviePlayback;
} }
@@ -372,12 +409,32 @@ QList<QByteArray> CoreAudioSessionManager::outputDevices()
float CoreAudioSessionManager::currentIOBufferDuration() float CoreAudioSessionManager::currentIOBufferDuration()
{ {
return [[m_sessionObserver audioSession] IOBufferDuration]; #if QT_IOS_DEPLOYMENT_TARGET_BELOW(__IPHONE_6_0)
if (QSysInfo::MacintoshVersion < QSysInfo::MV_IOS_6_0) {
Float32 duration;
UInt32 size = sizeof(duration);
AudioSessionGetProperty(kAudioSessionProperty_CurrentHardwareIOBufferDuration, &size, &duration);
return duration;
} else
#endif
{
return [[m_sessionObserver audioSession] IOBufferDuration];
}
} }
float CoreAudioSessionManager::preferredSampleRate() float CoreAudioSessionManager::preferredSampleRate()
{ {
return [[m_sessionObserver audioSession] preferredSampleRate]; #if QT_IOS_DEPLOYMENT_TARGET_BELOW(__IPHONE_6_0)
if (QSysInfo::MacintoshVersion < QSysInfo::MV_IOS_6_0) {
Float64 sampleRate;
UInt32 size = sizeof(sampleRate);
AudioSessionGetProperty(kAudioSessionProperty_PreferredHardwareSampleRate, &size, &sampleRate);
return sampleRate;
} else
#endif
{
return [[m_sessionObserver audioSession] preferredSampleRate];
}
} }
#ifdef QT_DEBUG_COREAUDIO #ifdef QT_DEBUG_COREAUDIO

View File

@@ -32,7 +32,6 @@
****************************************************************************/ ****************************************************************************/
#include <QtCore/qdebug.h> #include <QtCore/qdebug.h>
#include <QWidget>
#include <QFile> #include <QFile>
#include <QtConcurrent/QtConcurrentRun> #include <QtConcurrent/QtConcurrentRun>
#include <QtMultimedia/qabstractvideobuffer.h> #include <QtMultimedia/qabstractvideobuffer.h>

View File

@@ -33,6 +33,7 @@
#include <QDebug> #include <QDebug>
#include <QFile> #include <QFile>
#include <qelapsedtimer.h>
#include "dsvideodevicecontrol.h" #include "dsvideodevicecontrol.h"
#include "dscamerasession.h" #include "dscamerasession.h"
@@ -48,33 +49,37 @@ extern const CLSID CLSID_VideoInputDeviceCategory;
QT_BEGIN_NAMESPACE QT_BEGIN_NAMESPACE
Q_GLOBAL_STATIC(QList<DSVideoDeviceInfo>, deviceList)
DSVideoDeviceControl::DSVideoDeviceControl(QObject *parent) DSVideoDeviceControl::DSVideoDeviceControl(QObject *parent)
: QVideoDeviceSelectorControl(parent) : QVideoDeviceSelectorControl(parent)
{ {
m_session = qobject_cast<DSCameraSession*>(parent); m_session = qobject_cast<DSCameraSession*>(parent);
enumerateDevices(&m_devices, &m_descriptions);
selected = 0; selected = 0;
} }
int DSVideoDeviceControl::deviceCount() const int DSVideoDeviceControl::deviceCount() const
{ {
return m_devices.count(); updateDevices();
return deviceList->count();
} }
QString DSVideoDeviceControl::deviceName(int index) const QString DSVideoDeviceControl::deviceName(int index) const
{ {
if (index >= 0 && index <= m_devices.count()) updateDevices();
return QString::fromUtf8(m_devices.at(index).constData());
if (index >= 0 && index <= deviceList->count())
return QString::fromUtf8(deviceList->at(index).first.constData());
return QString(); return QString();
} }
QString DSVideoDeviceControl::deviceDescription(int index) const QString DSVideoDeviceControl::deviceDescription(int index) const
{ {
if (index >= 0 && index <= m_descriptions.count()) updateDevices();
return m_descriptions.at(index);
if (index >= 0 && index <= deviceList->count())
return deviceList->at(index).second;
return QString(); return QString();
} }
@@ -89,10 +94,34 @@ int DSVideoDeviceControl::selectedDevice() const
return selected; return selected;
} }
void DSVideoDeviceControl::enumerateDevices(QList<QByteArray> *devices, QStringList *descriptions) void DSVideoDeviceControl::setSelectedDevice(int index)
{ {
devices->clear(); updateDevices();
descriptions->clear();
if (index >= 0 && index < deviceList->count()) {
if (m_session) {
QString device = deviceList->at(index).first;
if (device.startsWith("ds:"))
device.remove(0,3);
m_session->setDevice(device);
}
selected = index;
}
}
const QList<DSVideoDeviceInfo> &DSVideoDeviceControl::availableDevices()
{
updateDevices();
return *deviceList;
}
void DSVideoDeviceControl::updateDevices()
{
static QElapsedTimer timer;
if (timer.isValid() && timer.elapsed() < 500) // ms
return;
deviceList->clear();
ICreateDevEnum* pDevEnum = NULL; ICreateDevEnum* pDevEnum = NULL;
IEnumMoniker* pEnum = NULL; IEnumMoniker* pEnum = NULL;
@@ -116,7 +145,9 @@ void DSVideoDeviceControl::enumerateDevices(QList<QByteArray> *devices, QStringL
if (SUCCEEDED(hr)) { if (SUCCEEDED(hr)) {
QString output(QString::fromWCharArray(strName)); QString output(QString::fromWCharArray(strName));
mallocInterface->Free(strName); mallocInterface->Free(strName);
devices->append(output.toUtf8().constData());
DSVideoDeviceInfo devInfo;
devInfo.first = output.toUtf8();
IPropertyBag *pPropBag; IPropertyBag *pPropBag;
hr = pMoniker->BindToStorage(0, 0, IID_IPropertyBag, (void**)(&pPropBag)); hr = pMoniker->BindToStorage(0, 0, IID_IPropertyBag, (void**)(&pPropBag));
@@ -130,7 +161,9 @@ void DSVideoDeviceControl::enumerateDevices(QList<QByteArray> *devices, QStringL
} }
pPropBag->Release(); pPropBag->Release();
} }
descriptions->append(output); devInfo.second = output;
deviceList->append(devInfo);
} }
pMoniker->Release(); pMoniker->Release();
} }
@@ -139,19 +172,8 @@ void DSVideoDeviceControl::enumerateDevices(QList<QByteArray> *devices, QStringL
} }
pDevEnum->Release(); pDevEnum->Release();
} }
}
void DSVideoDeviceControl::setSelectedDevice(int index) timer.restart();
{
if (index >= 0 && index < m_devices.count()) {
if (m_session) {
QString device = m_devices.at(index);
if (device.startsWith("ds:"))
device.remove(0,3);
m_session->setDevice(device);
}
selected = index;
}
} }
QT_END_NAMESPACE QT_END_NAMESPACE

View File

@@ -42,6 +42,8 @@ class DSCameraSession;
//QTM_USE_NAMESPACE //QTM_USE_NAMESPACE
typedef QPair<QByteArray, QString> DSVideoDeviceInfo;
class DSVideoDeviceControl : public QVideoDeviceSelectorControl class DSVideoDeviceControl : public QVideoDeviceSelectorControl
{ {
Q_OBJECT Q_OBJECT
@@ -54,17 +56,15 @@ public:
int defaultDevice() const; int defaultDevice() const;
int selectedDevice() const; int selectedDevice() const;
static void enumerateDevices(QList<QByteArray> *devices, QStringList *descriptions); static const QList<DSVideoDeviceInfo> &availableDevices();
public Q_SLOTS: public Q_SLOTS:
void setSelectedDevice(int index); void setSelectedDevice(int index);
private: private:
static void updateDevices();
DSCameraSession* m_session; DSCameraSession* m_session;
QList<QByteArray> m_devices;
QStringList m_descriptions;
int selected; int selected;
}; };

View File

@@ -39,7 +39,6 @@
#include "dsvideodevicecontrol.h" #include "dsvideodevicecontrol.h"
#ifdef QMEDIA_DIRECTSHOW_CAMERA #ifdef QMEDIA_DIRECTSHOW_CAMERA
#include <QtCore/QElapsedTimer>
#include <dshow.h> #include <dshow.h>
#include "dscameraservice.h" #include "dscameraservice.h"
#endif #endif
@@ -122,9 +121,9 @@ QByteArray DSServicePlugin::defaultDevice(const QByteArray &service) const
{ {
#ifdef QMEDIA_DIRECTSHOW_CAMERA #ifdef QMEDIA_DIRECTSHOW_CAMERA
if (service == Q_MEDIASERVICE_CAMERA) { if (service == Q_MEDIASERVICE_CAMERA) {
updateDevices(); const QList<DSVideoDeviceInfo> &devs = DSVideoDeviceControl::availableDevices();
if (!devs.isEmpty())
return m_defaultCameraDevice; return devs.first().first;
} }
#endif #endif
@@ -133,52 +132,29 @@ QByteArray DSServicePlugin::defaultDevice(const QByteArray &service) const
QList<QByteArray> DSServicePlugin::devices(const QByteArray &service) const QList<QByteArray> DSServicePlugin::devices(const QByteArray &service) const
{ {
QList<QByteArray> result;
#ifdef QMEDIA_DIRECTSHOW_CAMERA #ifdef QMEDIA_DIRECTSHOW_CAMERA
if (service == Q_MEDIASERVICE_CAMERA) { if (service == Q_MEDIASERVICE_CAMERA) {
updateDevices(); const QList<DSVideoDeviceInfo> &devs = DSVideoDeviceControl::availableDevices();
Q_FOREACH (const DSVideoDeviceInfo &info, devs)
return m_cameraDevices; result.append(info.first);
} }
#endif #endif
return QList<QByteArray>(); return result;
} }
QString DSServicePlugin::deviceDescription(const QByteArray &service, const QByteArray &device) QString DSServicePlugin::deviceDescription(const QByteArray &service, const QByteArray &device)
{ {
#ifdef QMEDIA_DIRECTSHOW_CAMERA #ifdef QMEDIA_DIRECTSHOW_CAMERA
if (service == Q_MEDIASERVICE_CAMERA) { if (service == Q_MEDIASERVICE_CAMERA) {
updateDevices(); const QList<DSVideoDeviceInfo> &devs = DSVideoDeviceControl::availableDevices();
Q_FOREACH (const DSVideoDeviceInfo &info, devs) {
for (int i=0; i<m_cameraDevices.count(); i++) if (info.first == device)
if (m_cameraDevices[i] == device) return info.second;
return m_cameraDescriptions[i]; }
} }
#endif #endif
return QString(); return QString();
} }
#ifdef QMEDIA_DIRECTSHOW_CAMERA
void DSServicePlugin::updateDevices() const
{
static QElapsedTimer timer;
if (timer.isValid() && timer.elapsed() < 500) // ms
return;
addRefCount();
m_defaultCameraDevice.clear();
DSVideoDeviceControl::enumerateDevices(&m_cameraDevices, &m_cameraDescriptions);
if (m_cameraDevices.isEmpty()) {
qWarning() << "No camera devices found";
} else {
m_defaultCameraDevice = m_cameraDevices.first();
}
releaseRefCount();
timer.restart();
}
#endif

View File

@@ -65,15 +65,6 @@ public:
QByteArray defaultDevice(const QByteArray &service) const; QByteArray defaultDevice(const QByteArray &service) const;
QList<QByteArray> devices(const QByteArray &service) const; QList<QByteArray> devices(const QByteArray &service) const;
QString deviceDescription(const QByteArray &service, const QByteArray &device); QString deviceDescription(const QByteArray &service, const QByteArray &device);
private:
#ifdef QMEDIA_DIRECTSHOW_CAMERA
void updateDevices() const;
mutable QByteArray m_defaultCameraDevice;
mutable QList<QByteArray> m_cameraDevices;
mutable QStringList m_cameraDescriptions;
#endif
}; };
#endif // DSSERVICEPLUGIN_H #endif // DSSERVICEPLUGIN_H

View File

@@ -99,6 +99,9 @@ void DirectShowIOSource::setDevice(QIODevice *device)
void DirectShowIOSource::setAllocator(IMemAllocator *allocator) void DirectShowIOSource::setAllocator(IMemAllocator *allocator)
{ {
if (m_allocator == allocator)
return;
if (m_allocator) if (m_allocator)
m_allocator->Release(); m_allocator->Release();

View File

@@ -49,7 +49,7 @@ class DirectShowIOSource
{ {
public: public:
DirectShowIOSource(DirectShowEventLoop *loop); DirectShowIOSource(DirectShowEventLoop *loop);
~DirectShowIOSource(); virtual ~DirectShowIOSource();
void setDevice(QIODevice *device); void setDevice(QIODevice *device);
void setAllocator(IMemAllocator *allocator); void setAllocator(IMemAllocator *allocator);

View File

@@ -41,7 +41,7 @@ class DirectShowMediaTypeEnum : public IEnumMediaTypes
{ {
public: public:
DirectShowMediaTypeEnum(DirectShowMediaTypeList *list, int token, int index = 0); DirectShowMediaTypeEnum(DirectShowMediaTypeList *list, int token, int index = 0);
~DirectShowMediaTypeEnum(); virtual ~DirectShowMediaTypeEnum();
// IUnknown // IUnknown
HRESULT STDMETHODCALLTYPE QueryInterface(REFIID riid, void **ppvObject); HRESULT STDMETHODCALLTYPE QueryInterface(REFIID riid, void **ppvObject);
@@ -143,6 +143,10 @@ DirectShowMediaTypeList::DirectShowMediaTypeList()
{ {
} }
DirectShowMediaTypeList::~DirectShowMediaTypeList()
{
}
IEnumMediaTypes *DirectShowMediaTypeList::createMediaTypeEnum() IEnumMediaTypes *DirectShowMediaTypeList::createMediaTypeEnum()
{ {
return new DirectShowMediaTypeEnum(this, m_mediaTypeToken, 0); return new DirectShowMediaTypeEnum(this, m_mediaTypeToken, 0);

View File

@@ -42,6 +42,7 @@ class DirectShowMediaTypeList : public IUnknown
{ {
public: public:
DirectShowMediaTypeList(); DirectShowMediaTypeList();
virtual ~DirectShowMediaTypeList();
IEnumMediaTypes *createMediaTypeEnum(); IEnumMediaTypes *createMediaTypeEnum();

View File

@@ -42,7 +42,7 @@ class DirectShowPinEnum : public IEnumPins
{ {
public: public:
DirectShowPinEnum(const QList<IPin *> &pins); DirectShowPinEnum(const QList<IPin *> &pins);
~DirectShowPinEnum(); virtual ~DirectShowPinEnum();
// IUnknown // IUnknown
HRESULT STDMETHODCALLTYPE QueryInterface(REFIID riid, void **ppvObject); HRESULT STDMETHODCALLTYPE QueryInterface(REFIID riid, void **ppvObject);

View File

@@ -160,3 +160,5 @@ void MmRendererMetaDataReaderControl::setMetaData(const MmRendererMetaData &data
if (metaDataAvailable != oldMetaDataAvailable) if (metaDataAvailable != oldMetaDataAvailable)
emit metaDataAvailableChanged(metaDataAvailable); emit metaDataAvailableChanged(metaDataAvailable);
} }
QT_END_NAMESPACE

View File

@@ -47,6 +47,7 @@
#include <QtCore/QDataStream> #include <QtCore/QDataStream>
#include <mmsystem.h> #include <mmsystem.h>
#include "qwindowsaudiodeviceinfo.h" #include "qwindowsaudiodeviceinfo.h"
#include "qwindowsaudioutils.h"
#if defined(Q_CC_MINGW) && !defined(__MINGW64_VERSION_MAJOR) #if defined(Q_CC_MINGW) && !defined(__MINGW64_VERSION_MAJOR)
struct IBaseFilter; // Needed for strmif.h from stock MinGW. struct IBaseFilter; // Needed for strmif.h from stock MinGW.
@@ -167,8 +168,7 @@ QString QWindowsAudioDeviceInfo::deviceName() const
QStringList QWindowsAudioDeviceInfo::supportedCodecs() QStringList QWindowsAudioDeviceInfo::supportedCodecs()
{ {
updateLists(); return QStringList() << QStringLiteral("audio/pcm");
return codecz;
} }
QList<int> QWindowsAudioDeviceInfo::supportedSampleRates() QList<int> QWindowsAudioDeviceInfo::supportedSampleRates()
@@ -191,8 +191,7 @@ QList<int> QWindowsAudioDeviceInfo::supportedSampleSizes()
QList<QAudioFormat::Endian> QWindowsAudioDeviceInfo::supportedByteOrders() QList<QAudioFormat::Endian> QWindowsAudioDeviceInfo::supportedByteOrders()
{ {
updateLists(); return QList<QAudioFormat::Endian>() << QAudioFormat::LittleEndian;
return byteOrderz;
} }
QList<QAudioFormat::SampleType> QWindowsAudioDeviceInfo::supportedSampleTypes() QList<QAudioFormat::SampleType> QWindowsAudioDeviceInfo::supportedSampleTypes()
@@ -213,118 +212,50 @@ void QWindowsAudioDeviceInfo::close()
bool QWindowsAudioDeviceInfo::testSettings(const QAudioFormat& format) const bool QWindowsAudioDeviceInfo::testSettings(const QAudioFormat& format) const
{ {
// Set nearest to closest settings that do work. WAVEFORMATEXTENSIBLE wfx;
// See if what is in settings will work (return value). if (qt_convertFormat(format, &wfx)) {
// query only, do not open device
bool failed = false; if (mode == QAudio::AudioOutput) {
bool match = false; return (waveOutOpen(NULL, UINT_PTR(devId), &wfx.Format, NULL, NULL,
WAVE_FORMAT_QUERY) == MMSYSERR_NOERROR);
// check codec } else { // AudioInput
for( int i = 0; i < codecz.count(); i++) { return (waveInOpen(NULL, UINT_PTR(devId), &wfx.Format, NULL, NULL,
if (format.codec() == codecz.at(i)) WAVE_FORMAT_QUERY) == MMSYSERR_NOERROR);
match = true;
}
if (!match) failed = true;
// check channel
match = false;
if (!failed) {
for (int i = 0; i < channelz.count(); i++) {
if (format.channelCount() == channelz.at(i)) {
match = true;
break;
}
} }
if (!match)
failed = true;
} }
// check sampleRate
match = false;
if (!failed) {
for (int i = 0; i < sampleRatez.count(); i++) {
if (format.sampleRate() == sampleRatez.at(i)) {
match = true;
break;
}
}
if (!match)
failed = true;
}
// check sample size
match = false;
if (!failed) {
for( int i = 0; i < sizez.count(); i++) {
if (format.sampleSize() == sizez.at(i)) {
match = true;
break;
}
}
if (!match)
failed = true;
}
// check byte order
match = false;
if (!failed) {
for( int i = 0; i < byteOrderz.count(); i++) {
if (format.byteOrder() == byteOrderz.at(i)) {
match = true;
break;
}
}
if (!match)
failed = true;
}
// check sample type
match = false;
if (!failed) {
for( int i = 0; i < typez.count(); i++) {
if (format.sampleType() == typez.at(i)) {
match = true;
break;
}
}
if (!match)
failed = true;
}
if(!failed) {
// settings work
return true;
}
return false; return false;
} }
void QWindowsAudioDeviceInfo::updateLists() void QWindowsAudioDeviceInfo::updateLists()
{ {
// redo all lists based on current settings if (!sizez.isEmpty())
bool match = false; return;
bool hasCaps = false;
DWORD fmt = 0; DWORD fmt = 0;
if(mode == QAudio::AudioOutput) { if(mode == QAudio::AudioOutput) {
WAVEOUTCAPS woc; WAVEOUTCAPS woc;
if (waveOutGetDevCaps(devId, &woc, sizeof(WAVEOUTCAPS)) == MMSYSERR_NOERROR) { if (waveOutGetDevCaps(devId, &woc, sizeof(WAVEOUTCAPS)) == MMSYSERR_NOERROR) {
match = true; hasCaps = true;
fmt = woc.dwFormats; fmt = woc.dwFormats;
} }
} else { } else {
WAVEINCAPS woc; WAVEINCAPS woc;
if (waveInGetDevCaps(devId, &woc, sizeof(WAVEINCAPS)) == MMSYSERR_NOERROR) { if (waveInGetDevCaps(devId, &woc, sizeof(WAVEINCAPS)) == MMSYSERR_NOERROR) {
match = true; hasCaps = true;
fmt = woc.dwFormats; fmt = woc.dwFormats;
} }
} }
sizez.clear(); sizez.clear();
sampleRatez.clear(); sampleRatez.clear();
channelz.clear(); channelz.clear();
byteOrderz.clear();
typez.clear(); typez.clear();
codecz.clear();
if(match) { if (hasCaps) {
// Check sample size
if ((fmt & WAVE_FORMAT_1M08) if ((fmt & WAVE_FORMAT_1M08)
|| (fmt & WAVE_FORMAT_1S08) || (fmt & WAVE_FORMAT_1S08)
|| (fmt & WAVE_FORMAT_2M08) || (fmt & WAVE_FORMAT_2M08)
@@ -334,8 +265,7 @@ void QWindowsAudioDeviceInfo::updateLists()
|| (fmt & WAVE_FORMAT_48M08) || (fmt & WAVE_FORMAT_48M08)
|| (fmt & WAVE_FORMAT_48S08) || (fmt & WAVE_FORMAT_48S08)
|| (fmt & WAVE_FORMAT_96M08) || (fmt & WAVE_FORMAT_96M08)
|| (fmt & WAVE_FORMAT_96S08) || (fmt & WAVE_FORMAT_96S08)) {
) {
sizez.append(8); sizez.append(8);
} }
if ((fmt & WAVE_FORMAT_1M16) if ((fmt & WAVE_FORMAT_1M16)
@@ -347,10 +277,11 @@ void QWindowsAudioDeviceInfo::updateLists()
|| (fmt & WAVE_FORMAT_48M16) || (fmt & WAVE_FORMAT_48M16)
|| (fmt & WAVE_FORMAT_48S16) || (fmt & WAVE_FORMAT_48S16)
|| (fmt & WAVE_FORMAT_96M16) || (fmt & WAVE_FORMAT_96M16)
|| (fmt & WAVE_FORMAT_96S16) || (fmt & WAVE_FORMAT_96S16)) {
) {
sizez.append(16); sizez.append(16);
} }
// Check sample rate
if ((fmt & WAVE_FORMAT_1M08) if ((fmt & WAVE_FORMAT_1M08)
|| (fmt & WAVE_FORMAT_1S08) || (fmt & WAVE_FORMAT_1S08)
|| (fmt & WAVE_FORMAT_1M16) || (fmt & WAVE_FORMAT_1M16)
@@ -381,23 +312,81 @@ void QWindowsAudioDeviceInfo::updateLists()
|| (fmt & WAVE_FORMAT_96S16)) { || (fmt & WAVE_FORMAT_96S16)) {
sampleRatez.append(96000); sampleRatez.append(96000);
} }
channelz.append(1);
channelz.append(2);
if (mode == QAudio::AudioOutput) {
channelz.append(4);
channelz.append(6);
channelz.append(8);
}
byteOrderz.append(QAudioFormat::LittleEndian); // Check channel count
if (fmt & WAVE_FORMAT_1M08
|| fmt & WAVE_FORMAT_1M16
|| fmt & WAVE_FORMAT_2M08
|| fmt & WAVE_FORMAT_2M16
|| fmt & WAVE_FORMAT_4M08
|| fmt & WAVE_FORMAT_4M16
|| fmt & WAVE_FORMAT_48M08
|| fmt & WAVE_FORMAT_48M16
|| fmt & WAVE_FORMAT_96M08
|| fmt & WAVE_FORMAT_96M16) {
channelz.append(1);
}
if (fmt & WAVE_FORMAT_1S08
|| fmt & WAVE_FORMAT_1S16
|| fmt & WAVE_FORMAT_2S08
|| fmt & WAVE_FORMAT_2S16
|| fmt & WAVE_FORMAT_4S08
|| fmt & WAVE_FORMAT_4S16
|| fmt & WAVE_FORMAT_48S08
|| fmt & WAVE_FORMAT_48S16
|| fmt & WAVE_FORMAT_96S08
|| fmt & WAVE_FORMAT_96S16) {
channelz.append(2);
}
typez.append(QAudioFormat::SignedInt); typez.append(QAudioFormat::SignedInt);
typez.append(QAudioFormat::UnSignedInt); typez.append(QAudioFormat::UnSignedInt);
codecz.append(QLatin1String("audio/pcm")); // WAVEOUTCAPS and WAVEINCAPS contains information only for the previously tested parameters.
// WaveOut and WaveInt might actually support more formats, the only way to know is to try
// opening the device with it.
QAudioFormat testFormat;
testFormat.setCodec(QStringLiteral("audio/pcm"));
testFormat.setByteOrder(QAudioFormat::LittleEndian);
testFormat.setSampleType(QAudioFormat::SignedInt);
testFormat.setChannelCount(channelz.first());
testFormat.setSampleRate(sampleRatez.at(sampleRatez.size() / 2));
testFormat.setSampleSize(sizez.last());
const QAudioFormat defaultTestFormat(testFormat);
// Check if float samples are supported
testFormat.setSampleType(QAudioFormat::Float);
testFormat.setSampleSize(32);
if (testSettings(testFormat))
typez.append(QAudioFormat::Float);
// Check channel counts > 2
testFormat = defaultTestFormat;
for (int i = 3; i < 19; ++i) { // <mmreg.h> defines 18 different channels
testFormat.setChannelCount(i);
if (testSettings(testFormat))
channelz.append(i);
}
// Check more sample sizes
testFormat = defaultTestFormat;
QList<int> testSampleSizes = QList<int>() << 24 << 32 << 48 << 64;
Q_FOREACH (int s, testSampleSizes) {
testFormat.setSampleSize(s);
if (testSettings(testFormat))
sizez.append(s);
}
// Check more sample rates
testFormat = defaultTestFormat;
QList<int> testSampleRates = QList<int>() << 8000 << 16000 << 32000 << 88200 << 192000;
Q_FOREACH (int r, testSampleRates) {
testFormat.setSampleRate(r);
if (testSettings(testFormat))
sampleRatez.append(r);
}
std::sort(sampleRatez.begin(), sampleRatez.end());
} }
if (sampleRatez.count() > 0)
sampleRatez.prepend(8000);
} }
QList<QByteArray> QWindowsAudioDeviceInfo::availableDevices(QAudio::Mode mode) QList<QByteArray> QWindowsAudioDeviceInfo::availableDevices(QAudio::Mode mode)

View File

@@ -57,7 +57,6 @@
QT_BEGIN_NAMESPACE QT_BEGIN_NAMESPACE
const unsigned int MAX_SAMPLE_RATES = 5; const unsigned int MAX_SAMPLE_RATES = 5;
const unsigned int SAMPLE_RATES[] = { 8000, 11025, 22050, 44100, 48000 }; const unsigned int SAMPLE_RATES[] = { 8000, 11025, 22050, 44100, 48000 };
@@ -91,15 +90,14 @@ private:
QAudio::Mode mode; QAudio::Mode mode;
QString device; QString device;
quint32 devId; quint32 devId;
QAudioFormat nearest;
QList<int> sampleRatez; QList<int> sampleRatez;
QList<int> channelz; QList<int> channelz;
QList<int> sizez; QList<int> sizez;
QList<QAudioFormat::Endian> byteOrderz;
QStringList codecz;
QList<QAudioFormat::SampleType> typez; QList<QAudioFormat::SampleType> typez;
}; };
QT_END_NAMESPACE QT_END_NAMESPACE

View File

@@ -298,18 +298,9 @@ bool QWindowsAudioInput::open()
period_size = 0; period_size = 0;
if (!settings.isValid()) { if (!qt_convertFormat(settings, &wfx)) {
qWarning("QAudioInput: open error, invalid format."); qWarning("QAudioInput: open error, invalid format.");
} else if (settings.channelCount() <= 0) {
qWarning("QAudioInput: open error, invalid number of channels (%d).",
settings.channelCount());
} else if (settings.sampleSize() <= 0) {
qWarning("QAudioInput: open error, invalid sample size (%d).",
settings.sampleSize());
} else if (settings.sampleRate() < 8000 || settings.sampleRate() > 96000) {
qWarning("QAudioInput: open error, sample rate out of range (%d).", settings.sampleRate());
} else if (buffer_size == 0) { } else if (buffer_size == 0) {
buffer_size buffer_size
= (settings.sampleRate() = (settings.sampleRate()
* settings.channelCount() * settings.channelCount()
@@ -329,20 +320,12 @@ bool QWindowsAudioInput::open()
timeStamp.restart(); timeStamp.restart();
elapsedTimeOffset = 0; elapsedTimeOffset = 0;
wfx.nSamplesPerSec = settings.sampleRate();
wfx.wBitsPerSample = settings.sampleSize();
wfx.nChannels = settings.channelCount();
wfx.cbSize = 0;
wfx.wFormatTag = WAVE_FORMAT_PCM;
wfx.nBlockAlign = (wfx.wBitsPerSample >> 3) * wfx.nChannels;
wfx.nAvgBytesPerSec = wfx.nBlockAlign * wfx.nSamplesPerSec;
QDataStream ds(&m_device, QIODevice::ReadOnly); QDataStream ds(&m_device, QIODevice::ReadOnly);
quint32 deviceId; quint32 deviceId;
ds >> deviceId; ds >> deviceId;
if (waveInOpen(&hWaveIn, UINT_PTR(deviceId), &wfx, if (waveInOpen(&hWaveIn, UINT_PTR(deviceId), &wfx.Format,
(DWORD_PTR)&waveInProc, (DWORD_PTR)&waveInProc,
(DWORD_PTR) this, (DWORD_PTR) this,
CALLBACK_FUNCTION) != MMSYSERR_NOERROR) { CALLBACK_FUNCTION) != MMSYSERR_NOERROR) {

View File

@@ -45,8 +45,7 @@
#ifndef QWINDOWSAUDIOINPUT_H #ifndef QWINDOWSAUDIOINPUT_H
#define QWINDOWSAUDIOINPUT_H #define QWINDOWSAUDIOINPUT_H
#include <QtCore/qt_windows.h> #include "qwindowsaudioutils.h"
#include <mmsystem.h>
#include <QtCore/qfile.h> #include <QtCore/qfile.h>
#include <QtCore/qdebug.h> #include <QtCore/qdebug.h>
@@ -121,7 +120,7 @@ private:
qint64 totalTimeValue; qint64 totalTimeValue;
bool pullMode; bool pullMode;
bool resuming; bool resuming;
WAVEFORMATEX wfx; WAVEFORMATEXTENSIBLE wfx;
HWAVEIN hWaveIn; HWAVEIN hWaveIn;
MMRESULT result; MMRESULT result;
WAVEHDR* waveBlocks; WAVEHDR* waveBlocks;

View File

@@ -43,56 +43,11 @@
// //
#include "qwindowsaudiooutput.h" #include "qwindowsaudiooutput.h"
#include "qwindowsaudiodeviceinfo.h"
#include "qwindowsaudioutils.h"
#include <QtEndian> #include <QtEndian>
#include <QtCore/QDataStream> #include <QtCore/QDataStream>
#ifndef SPEAKER_FRONT_LEFT
#define SPEAKER_FRONT_LEFT 0x00000001
#define SPEAKER_FRONT_RIGHT 0x00000002
#define SPEAKER_FRONT_CENTER 0x00000004
#define SPEAKER_LOW_FREQUENCY 0x00000008
#define SPEAKER_BACK_LEFT 0x00000010
#define SPEAKER_BACK_RIGHT 0x00000020
#define SPEAKER_FRONT_LEFT_OF_CENTER 0x00000040
#define SPEAKER_FRONT_RIGHT_OF_CENTER 0x00000080
#define SPEAKER_BACK_CENTER 0x00000100
#define SPEAKER_SIDE_LEFT 0x00000200
#define SPEAKER_SIDE_RIGHT 0x00000400
#define SPEAKER_TOP_CENTER 0x00000800
#define SPEAKER_TOP_FRONT_LEFT 0x00001000
#define SPEAKER_TOP_FRONT_CENTER 0x00002000
#define SPEAKER_TOP_FRONT_RIGHT 0x00004000
#define SPEAKER_TOP_BACK_LEFT 0x00008000
#define SPEAKER_TOP_BACK_CENTER 0x00010000
#define SPEAKER_TOP_BACK_RIGHT 0x00020000
#define SPEAKER_RESERVED 0x7FFC0000
#define SPEAKER_ALL 0x80000000
#endif
#ifndef _WAVEFORMATEXTENSIBLE_
#define _WAVEFORMATEXTENSIBLE_
typedef struct
{
WAVEFORMATEX Format; // Base WAVEFORMATEX data
union
{
WORD wValidBitsPerSample; // Valid bits in each sample container
WORD wSamplesPerBlock; // Samples per block of audio data; valid
// if wBitsPerSample=0 (but rarely used).
WORD wReserved; // Zero if neither case above applies.
} Samples;
DWORD dwChannelMask; // Positions of the audio channels
GUID SubFormat; // Format identifier GUID
} WAVEFORMATEXTENSIBLE, *PWAVEFORMATEXTENSIBLE, *LPPWAVEFORMATEXTENSIBLE;
typedef const WAVEFORMATEXTENSIBLE* LPCWAVEFORMATEXTENSIBLE;
#endif
#if !defined(WAVE_FORMAT_EXTENSIBLE)
#define WAVE_FORMAT_EXTENSIBLE 0xFFFE
#endif
//#define DEBUG_AUDIO 1 //#define DEBUG_AUDIO 1
QT_BEGIN_NAMESPACE QT_BEGIN_NAMESPACE
@@ -265,16 +220,8 @@ bool QWindowsAudioOutput::open()
period_size = 0; period_size = 0;
if (!settings.isValid()) { if (!qt_convertFormat(settings, &wfx)) {
qWarning("QAudioOutput: open error, invalid format."); qWarning("QAudioOutput: open error, invalid format.");
} else if (settings.channelCount() <= 0) {
qWarning("QAudioOutput: open error, invalid number of channels (%d).",
settings.channelCount());
} else if (settings.sampleSize() <= 0) {
qWarning("QAudioOutput: open error, invalid sample size (%d).",
settings.sampleSize());
} else if (settings.sampleRate() < 8000 || settings.sampleRate() > 96000) {
qWarning("QAudioOutput: open error, sample rate out of range (%d).", settings.sampleRate());
} else if (buffer_size == 0) { } else if (buffer_size == 0) {
// Default buffer size, 200ms, default period size is 40ms // Default buffer size, 200ms, default period size is 40ms
buffer_size buffer_size
@@ -308,67 +255,19 @@ bool QWindowsAudioOutput::open()
timeStamp.restart(); timeStamp.restart();
elapsedTimeOffset = 0; elapsedTimeOffset = 0;
wfx.nSamplesPerSec = settings.sampleRate();
wfx.wBitsPerSample = settings.sampleSize();
wfx.nChannels = settings.channelCount();
wfx.cbSize = 0;
bool surround = false;
if (settings.channelCount() > 2)
surround = true;
wfx.wFormatTag = WAVE_FORMAT_PCM;
wfx.nBlockAlign = (wfx.wBitsPerSample >> 3) * wfx.nChannels;
wfx.nAvgBytesPerSec = wfx.nBlockAlign * wfx.nSamplesPerSec;
QDataStream ds(&m_device, QIODevice::ReadOnly); QDataStream ds(&m_device, QIODevice::ReadOnly);
quint32 deviceId; quint32 deviceId;
ds >> deviceId; ds >> deviceId;
if (!surround) { if (waveOutOpen(&hWaveOut, UINT_PTR(deviceId), &wfx.Format,
if (waveOutOpen(&hWaveOut, UINT_PTR(deviceId), &wfx,
(DWORD_PTR)&waveOutProc, (DWORD_PTR)&waveOutProc,
(DWORD_PTR) this, (DWORD_PTR) this,
CALLBACK_FUNCTION) != MMSYSERR_NOERROR) { CALLBACK_FUNCTION) != MMSYSERR_NOERROR) {
errorState = QAudio::OpenError; errorState = QAudio::OpenError;
deviceState = QAudio::StoppedState; deviceState = QAudio::StoppedState;
emit stateChanged(deviceState); emit stateChanged(deviceState);
qWarning("QAudioOutput: open error"); qWarning("QAudioOutput: open error");
return false; return false;
}
} else {
WAVEFORMATEXTENSIBLE wfex;
wfex.Format.wFormatTag = WAVE_FORMAT_EXTENSIBLE;
wfex.Format.nChannels = settings.channelCount();
wfex.Format.wBitsPerSample = settings.sampleSize();
wfex.Format.nSamplesPerSec = settings.sampleRate();
wfex.Format.nBlockAlign = wfex.Format.nChannels*wfex.Format.wBitsPerSample/8;
wfex.Format.nAvgBytesPerSec=wfex.Format.nSamplesPerSec*wfex.Format.nBlockAlign;
wfex.Samples.wValidBitsPerSample=wfex.Format.wBitsPerSample;
static const GUID _KSDATAFORMAT_SUBTYPE_PCM = {
0x00000001, 0x0000, 0x0010, {0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71}};
wfex.SubFormat=_KSDATAFORMAT_SUBTYPE_PCM;
wfex.Format.cbSize=22;
wfex.dwChannelMask = SPEAKER_FRONT_LEFT | SPEAKER_FRONT_RIGHT;
if (settings.channelCount() >= 4)
wfex.dwChannelMask |= SPEAKER_BACK_LEFT | SPEAKER_BACK_RIGHT;
if (settings.channelCount() >= 6)
wfex.dwChannelMask |= SPEAKER_FRONT_CENTER | SPEAKER_LOW_FREQUENCY;
if (settings.channelCount() == 8)
wfex.dwChannelMask |= SPEAKER_SIDE_LEFT | SPEAKER_SIDE_RIGHT;
if (waveOutOpen(&hWaveOut, UINT_PTR(deviceId), &wfex.Format,
(DWORD_PTR)&waveOutProc,
(DWORD_PTR) this,
CALLBACK_FUNCTION) != MMSYSERR_NOERROR) {
errorState = QAudio::OpenError;
deviceState = QAudio::StoppedState;
emit stateChanged(deviceState);
qWarning("QAudioOutput: open error");
return false;
}
} }
totalTimeValue = 0; totalTimeValue = 0;

View File

@@ -45,8 +45,7 @@
#ifndef QWINDOWSAUDIOOUTPUT_H #ifndef QWINDOWSAUDIOOUTPUT_H
#define QWINDOWSAUDIOOUTPUT_H #define QWINDOWSAUDIOOUTPUT_H
#include <QtCore/qt_windows.h> #include "qwindowsaudioutils.h"
#include <mmsystem.h>
#include <QtCore/qdebug.h> #include <QtCore/qdebug.h>
#include <QtCore/qtimer.h> #include <QtCore/qtimer.h>
@@ -132,7 +131,7 @@ private:
bool open(); bool open();
void close(); void close();
WAVEFORMATEX wfx; WAVEFORMATEXTENSIBLE wfx;
HWAVEOUT hWaveOut; HWAVEOUT hWaveOut;
MMRESULT result; MMRESULT result;
WAVEHDR header; WAVEHDR header;

View File

@@ -0,0 +1,111 @@
/****************************************************************************
**
** Copyright (C) 2015 The Qt Company Ltd.
** Contact: http://www.qt.io/licensing/
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL21$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and The Qt Company. For licensing terms
** and conditions see http://www.qt.io/terms-conditions. For further
** information use the contact form at http://www.qt.io/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 or version 3 as published by the Free
** Software Foundation and appearing in the file LICENSE.LGPLv21 and
** LICENSE.LGPLv3 included in the packaging of this file. Please review the
** following information to ensure the GNU Lesser General Public License
** requirements will be met: https://www.gnu.org/licenses/lgpl.html and
** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** As a special exception, The Qt Company gives you certain additional
** rights. These rights are described in The Qt Company LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** $QT_END_LICENSE$
**
****************************************************************************/
#include "qwindowsaudioutils.h"
#ifndef SPEAKER_FRONT_LEFT
#define SPEAKER_FRONT_LEFT 0x00000001
#define SPEAKER_FRONT_RIGHT 0x00000002
#define SPEAKER_FRONT_CENTER 0x00000004
#define SPEAKER_LOW_FREQUENCY 0x00000008
#define SPEAKER_BACK_LEFT 0x00000010
#define SPEAKER_BACK_RIGHT 0x00000020
#define SPEAKER_FRONT_LEFT_OF_CENTER 0x00000040
#define SPEAKER_FRONT_RIGHT_OF_CENTER 0x00000080
#define SPEAKER_BACK_CENTER 0x00000100
#define SPEAKER_SIDE_LEFT 0x00000200
#define SPEAKER_SIDE_RIGHT 0x00000400
#define SPEAKER_TOP_CENTER 0x00000800
#define SPEAKER_TOP_FRONT_LEFT 0x00001000
#define SPEAKER_TOP_FRONT_CENTER 0x00002000
#define SPEAKER_TOP_FRONT_RIGHT 0x00004000
#define SPEAKER_TOP_BACK_LEFT 0x00008000
#define SPEAKER_TOP_BACK_CENTER 0x00010000
#define SPEAKER_TOP_BACK_RIGHT 0x00020000
#define SPEAKER_RESERVED 0x7FFC0000
#define SPEAKER_ALL 0x80000000
#endif
#ifndef WAVE_FORMAT_EXTENSIBLE
#define WAVE_FORMAT_EXTENSIBLE 0xFFFE
#endif
#ifndef WAVE_FORMAT_IEEE_FLOAT
#define WAVE_FORMAT_IEEE_FLOAT 0x0003
#endif
static const GUID _KSDATAFORMAT_SUBTYPE_PCM = {
0x00000001, 0x0000, 0x0010, {0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71}};
static const GUID _KSDATAFORMAT_SUBTYPE_IEEE_FLOAT = {
0x00000003, 0x0000, 0x0010, {0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71}};
QT_BEGIN_NAMESPACE
bool qt_convertFormat(const QAudioFormat &format, WAVEFORMATEXTENSIBLE *wfx)
{
if (!wfx
|| !format.isValid()
|| format.codec() != QStringLiteral("audio/pcm")
|| format.sampleRate() <= 0
|| format.channelCount() <= 0
|| format.sampleSize() <= 0
|| format.byteOrder() != QAudioFormat::LittleEndian) {
return false;
}
wfx->Format.nSamplesPerSec = format.sampleRate();
wfx->Format.wBitsPerSample = wfx->Samples.wValidBitsPerSample = format.sampleSize();
wfx->Format.nChannels = format.channelCount();
wfx->Format.nBlockAlign = (wfx->Format.wBitsPerSample / 8) * wfx->Format.nChannels;
wfx->Format.nAvgBytesPerSec = wfx->Format.nBlockAlign * wfx->Format.nSamplesPerSec;
wfx->Format.cbSize = 0;
if (format.sampleType() == QAudioFormat::Float) {
wfx->Format.wFormatTag = WAVE_FORMAT_IEEE_FLOAT;
wfx->SubFormat = _KSDATAFORMAT_SUBTYPE_IEEE_FLOAT;
} else {
wfx->Format.wFormatTag = WAVE_FORMAT_PCM;
wfx->SubFormat = _KSDATAFORMAT_SUBTYPE_PCM;
}
if (format.channelCount() > 2) {
wfx->Format.wFormatTag = WAVE_FORMAT_EXTENSIBLE;
wfx->Format.cbSize = 22;
wfx->dwChannelMask = 0xFFFFFFFF >> (32 - format.channelCount());
}
return true;
}
QT_END_NAMESPACE

View File

@@ -0,0 +1,67 @@
/****************************************************************************
**
** Copyright (C) 2015 The Qt Company Ltd.
** Contact: http://www.qt.io/licensing/
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL21$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and The Qt Company. For licensing terms
** and conditions see http://www.qt.io/terms-conditions. For further
** information use the contact form at http://www.qt.io/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 or version 3 as published by the Free
** Software Foundation and appearing in the file LICENSE.LGPLv21 and
** LICENSE.LGPLv3 included in the packaging of this file. Please review the
** following information to ensure the GNU Lesser General Public License
** requirements will be met: https://www.gnu.org/licenses/lgpl.html and
** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** As a special exception, The Qt Company gives you certain additional
** rights. These rights are described in The Qt Company LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** $QT_END_LICENSE$
**
****************************************************************************/
#ifndef QWINDOWSAUDIOUTILS_H
#define QWINDOWSAUDIOUTILS_H
#include <qaudioformat.h>
#include <QtCore/qt_windows.h>
#include <mmsystem.h>
#ifndef _WAVEFORMATEXTENSIBLE_
#define _WAVEFORMATEXTENSIBLE_
typedef struct
{
WAVEFORMATEX Format; // Base WAVEFORMATEX data
union
{
WORD wValidBitsPerSample; // Valid bits in each sample container
WORD wSamplesPerBlock; // Samples per block of audio data; valid
// if wBitsPerSample=0 (but rarely used).
WORD wReserved; // Zero if neither case above applies.
} Samples;
DWORD dwChannelMask; // Positions of the audio channels
GUID SubFormat; // Format identifier GUID
} WAVEFORMATEXTENSIBLE, *PWAVEFORMATEXTENSIBLE, *LPPWAVEFORMATEXTENSIBLE;
typedef const WAVEFORMATEXTENSIBLE* LPCWAVEFORMATEXTENSIBLE;
#endif
QT_BEGIN_NAMESPACE
bool qt_convertFormat(const QAudioFormat &format, WAVEFORMATEXTENSIBLE *wfx);
QT_END_NAMESPACE
#endif // QWINDOWSAUDIOUTILS_H

View File

@@ -12,13 +12,15 @@ HEADERS += \
qwindowsaudioplugin.h \ qwindowsaudioplugin.h \
qwindowsaudiodeviceinfo.h \ qwindowsaudiodeviceinfo.h \
qwindowsaudioinput.h \ qwindowsaudioinput.h \
qwindowsaudiooutput.h qwindowsaudiooutput.h \
qwindowsaudioutils.h
SOURCES += \ SOURCES += \
qwindowsaudioplugin.cpp \ qwindowsaudioplugin.cpp \
qwindowsaudiodeviceinfo.cpp \ qwindowsaudiodeviceinfo.cpp \
qwindowsaudioinput.cpp \ qwindowsaudioinput.cpp \
qwindowsaudiooutput.cpp qwindowsaudiooutput.cpp \
qwindowsaudioutils.cpp
OTHER_FILES += \ OTHER_FILES += \
windowsaudio.json windowsaudio.json

View File

@@ -48,6 +48,11 @@
#include <windows.media.capture.h> #include <windows.media.capture.h>
#include <windows.storage.streams.h> #include <windows.storage.streams.h>
#ifdef Q_OS_WINPHONE
#include <Windows.Security.ExchangeActiveSyncProvisioning.h>
using namespace ABI::Windows::Security::ExchangeActiveSyncProvisioning;
#endif
using namespace Microsoft::WRL; using namespace Microsoft::WRL;
using namespace Microsoft::WRL::Wrappers; using namespace Microsoft::WRL::Wrappers;
using namespace ABI::Windows::Devices::Enumeration; using namespace ABI::Windows::Devices::Enumeration;
@@ -85,9 +90,21 @@ private:
class MediaStream : public RuntimeClass<RuntimeClassFlags<WinRtClassicComMix>, IMFStreamSink, IMFMediaEventGenerator, IMFMediaTypeHandler> class MediaStream : public RuntimeClass<RuntimeClassFlags<WinRtClassicComMix>, IMFStreamSink, IMFMediaEventGenerator, IMFMediaTypeHandler>
{ {
enum Flags { NoFlag = 0, BufferLockRequired = 1 };
template <int n>
static Flags bufferLockRequired(const wchar_t (&blackListName)[n], const HString &deviceModel)
{
quint32 deviceNameLength;
const wchar_t *deviceName = deviceModel.GetRawBuffer(&deviceNameLength);
if (n - 1 <= deviceNameLength && !wmemcmp(blackListName, deviceName, n - 1))
return BufferLockRequired;
return NoFlag;
}
public: public:
MediaStream(IMFMediaType *type, IMFMediaSink *mediaSink, QWinRTCameraVideoRendererControl *videoRenderer) MediaStream(IMFMediaType *type, IMFMediaSink *mediaSink, QWinRTCameraVideoRendererControl *videoRenderer)
: m_type(type), m_sink(mediaSink), m_videoRenderer(videoRenderer) : m_type(type), m_sink(mediaSink), m_videoRenderer(videoRenderer), m_flags(NoFlag)
{ {
Q_ASSERT(m_videoRenderer); Q_ASSERT(m_videoRenderer);
@@ -98,6 +115,18 @@ public:
Q_ASSERT_SUCCEEDED(hr); Q_ASSERT_SUCCEEDED(hr);
hr = MFAllocateSerialWorkQueue(MFASYNC_CALLBACK_QUEUE_STANDARD, &m_workQueueId); hr = MFAllocateSerialWorkQueue(MFASYNC_CALLBACK_QUEUE_STANDARD, &m_workQueueId);
Q_ASSERT_SUCCEEDED(hr); Q_ASSERT_SUCCEEDED(hr);
#ifdef Q_OS_WINPHONE
// Workaround for certain devices which fail to blit software buffers without first mapping them
ComPtr<IEasClientDeviceInformation> deviceInfo;
hr = RoActivateInstance(HString::MakeReference(RuntimeClass_Windows_Security_ExchangeActiveSyncProvisioning_EasClientDeviceInformation).Get(),
&deviceInfo);
Q_ASSERT_SUCCEEDED(hr);
HString deviceModel;
hr = deviceInfo->get_SystemSku(deviceModel.GetAddressOf());
Q_ASSERT_SUCCEEDED(hr);
m_flags |= bufferLockRequired(L"NOKIA RM-976", deviceModel);
#endif
} }
~MediaStream() ~MediaStream()
@@ -171,6 +200,16 @@ public:
hr = buffer.As(&buffer2d); hr = buffer.As(&buffer2d);
RETURN_HR_IF_FAILED("Failed to cast camera sample buffer to 2D buffer"); RETURN_HR_IF_FAILED("Failed to cast camera sample buffer to 2D buffer");
#ifdef Q_OS_WINPHONE
if (m_flags & BufferLockRequired) {
BYTE *bytes;
LONG stride;
hr = buffer2d->Lock2D(&bytes, &stride);
RETURN_HR_IF_FAILED("Failed to lock camera frame buffer");
hr = buffer2d->Unlock2D();
RETURN_HR_IF_FAILED("Failed to unlock camera frame buffer");
}
#endif
m_pendingSamples.deref(); m_pendingSamples.deref();
m_videoRenderer->queueBuffer(buffer2d.Get()); m_videoRenderer->queueBuffer(buffer2d.Get());
@@ -244,6 +283,7 @@ private:
QWinRTCameraVideoRendererControl *m_videoRenderer; QWinRTCameraVideoRendererControl *m_videoRenderer;
QAtomicInt m_pendingSamples; QAtomicInt m_pendingSamples;
quint32 m_flags;
}; };
class MediaSink : public RuntimeClass<RuntimeClassFlags<WinRtClassicComMix>, IMediaExtension, IMFMediaSink, IMFClockStateSink> class MediaSink : public RuntimeClass<RuntimeClassFlags<WinRtClassicComMix>, IMediaExtension, IMFMediaSink, IMFClockStateSink>

View File

@@ -195,17 +195,6 @@ void MFAudioDecoderControl::handleMediaSourceReady()
if (mediaType) { if (mediaType) {
m_sourceOutputFormat = m_audioFormat; m_sourceOutputFormat = m_audioFormat;
QAudioFormat af = m_audioFormat; QAudioFormat af = m_audioFormat;
GUID subType;
if (SUCCEEDED(mediaType->GetGUID(MF_MT_SUBTYPE, &subType))) {
if (subType == MFAudioFormat_Float) {
m_sourceOutputFormat.setSampleType(QAudioFormat::Float);
} else {
m_sourceOutputFormat.setSampleType(QAudioFormat::SignedInt);
}
}
if (m_sourceOutputFormat.sampleType() != QAudioFormat::Float) {
m_sourceOutputFormat.setByteOrder(QAudioFormat::LittleEndian);
}
UINT32 val = 0; UINT32 val = 0;
if (SUCCEEDED(mediaType->GetUINT32(MF_MT_AUDIO_NUM_CHANNELS, &val))) { if (SUCCEEDED(mediaType->GetUINT32(MF_MT_AUDIO_NUM_CHANNELS, &val))) {
@@ -218,6 +207,20 @@ void MFAudioDecoderControl::handleMediaSourceReady()
m_sourceOutputFormat.setSampleSize(int(val)); m_sourceOutputFormat.setSampleSize(int(val));
} }
GUID subType;
if (SUCCEEDED(mediaType->GetGUID(MF_MT_SUBTYPE, &subType))) {
if (subType == MFAudioFormat_Float) {
m_sourceOutputFormat.setSampleType(QAudioFormat::Float);
} else if (m_sourceOutputFormat.sampleSize() == 8) {
m_sourceOutputFormat.setSampleType(QAudioFormat::UnSignedInt);
} else {
m_sourceOutputFormat.setSampleType(QAudioFormat::SignedInt);
}
}
if (m_sourceOutputFormat.sampleType() != QAudioFormat::Float) {
m_sourceOutputFormat.setByteOrder(QAudioFormat::LittleEndian);
}
if (m_audioFormat.sampleType() != QAudioFormat::Float if (m_audioFormat.sampleType() != QAudioFormat::Float
&& m_audioFormat.sampleType() != QAudioFormat::SignedInt) { && m_audioFormat.sampleType() != QAudioFormat::SignedInt) {
af.setSampleType(m_sourceOutputFormat.sampleType()); af.setSampleType(m_sourceOutputFormat.sampleType());

View File

@@ -50,7 +50,7 @@
MFPlayerService::MFPlayerService(QObject *parent) MFPlayerService::MFPlayerService(QObject *parent)
: QMediaService(parent) : QMediaService(parent)
, m_session(0) , m_session(0)
#ifndef Q_WS_SIMULATOR #if defined(HAVE_WIDGETS) && !defined(Q_WS_SIMULATOR)
, m_videoWindowControl(0) , m_videoWindowControl(0)
#endif #endif
, m_videoRendererControl(0) , m_videoRendererControl(0)
@@ -65,7 +65,7 @@ MFPlayerService::~MFPlayerService()
{ {
m_session->close(); m_session->close();
#ifndef Q_WS_SIMULATOR #if defined(HAVE_WIDGETS) && !defined(Q_WS_SIMULATOR)
if (m_videoWindowControl) if (m_videoWindowControl)
delete m_videoWindowControl; delete m_videoWindowControl;
#endif #endif

View File

@@ -43,7 +43,7 @@
#include <QtCore/qbuffer.h> #include <QtCore/qbuffer.h>
#include "mfplayercontrol.h" #include "mfplayercontrol.h"
#ifndef Q_WS_SIMULATOR #if defined(HAVE_WIDGETS) && !defined(Q_WS_SIMULATOR)
#include "evr9videowindowcontrol.h" #include "evr9videowindowcontrol.h"
#endif #endif
#include "mfvideorenderercontrol.h" #include "mfvideorenderercontrol.h"
@@ -140,7 +140,7 @@ void MFPlayerSession::close()
if (m_playerService->videoRendererControl()) { if (m_playerService->videoRendererControl()) {
m_playerService->videoRendererControl()->releaseActivate(); m_playerService->videoRendererControl()->releaseActivate();
#ifndef Q_WS_SIMULATOR #if defined(HAVE_WIDGETS) && !defined(Q_WS_SIMULATOR)
} else if (m_playerService->videoWindowControl()) { } else if (m_playerService->videoWindowControl()) {
m_playerService->videoWindowControl()->releaseActivate(); m_playerService->videoWindowControl()->releaseActivate();
#endif #endif
@@ -404,7 +404,7 @@ IMFTopologyNode* MFPlayerSession::addOutputNode(IMFStreamDescriptor *streamDesc,
mediaType = Video; mediaType = Video;
if (m_playerService->videoRendererControl()) { if (m_playerService->videoRendererControl()) {
activate = m_playerService->videoRendererControl()->createActivate(); activate = m_playerService->videoRendererControl()->createActivate();
#ifndef Q_WS_SIMULATOR #if defined(HAVE_WIDGETS) && !defined(Q_WS_SIMULATOR)
} else if (m_playerService->videoWindowControl()) { } else if (m_playerService->videoWindowControl()) {
activate = m_playerService->videoWindowControl()->createActivate(); activate = m_playerService->videoWindowControl()->createActivate();
#endif #endif
@@ -556,7 +556,10 @@ QAudioFormat MFPlayerSession::audioFormatForMFMediaType(IMFMediaType *mediaType)
format.setSampleSize(wfx->wBitsPerSample); format.setSampleSize(wfx->wBitsPerSample);
format.setCodec("audio/pcm"); format.setCodec("audio/pcm");
format.setByteOrder(QAudioFormat::LittleEndian); format.setByteOrder(QAudioFormat::LittleEndian);
format.setSampleType(QAudioFormat::SignedInt); if (format.sampleSize() == 8)
format.setSampleType(QAudioFormat::UnSignedInt);
else
format.setSampleType(QAudioFormat::SignedInt);
CoTaskMemFree(wfx); CoTaskMemFree(wfx);
return format; return format;
@@ -1577,7 +1580,7 @@ void MFPlayerSession::handleSessionEvent(IMFMediaEvent *sessionEvent)
} }
updatePendingCommands(CmdStart); updatePendingCommands(CmdStart);
#ifndef Q_WS_SIMULATOR #if defined(HAVE_WIDGETS) && !defined(Q_WS_SIMULATOR)
// playback started, we can now set again the procAmpValues if they have been // playback started, we can now set again the procAmpValues if they have been
// changed previously (these are lost when loading a new media) // changed previously (these are lost when loading a new media)
if (m_playerService->videoWindowControl()) { if (m_playerService->videoWindowControl()) {
@@ -1721,10 +1724,17 @@ void MFPlayerSession::updatePendingCommands(Command command)
if (m_state.command != command || m_pendingState == NoPending) if (m_state.command != command || m_pendingState == NoPending)
return; return;
// The current pending command has completed. // Seek while paused completed
if (m_pendingState == SeekPending && m_state.prevCmd == CmdPause) { if (m_pendingState == SeekPending && m_state.prevCmd == CmdPause) {
m_pendingState = NoPending; m_pendingState = NoPending;
m_state.setCommand(CmdPause); // A seek operation actually restarts playback. If scrubbing is possible, playback rate
// is set to 0.0 at this point and we just need to reset the current state to Pause.
// If scrubbing is not possible, the playback rate was not changed and we explicitly need
// to re-pause playback.
if (!canScrub())
pause();
else
m_state.setCommand(CmdPause);
} }
m_pendingState = NoPending; m_pendingState = NoPending;

View File

@@ -813,7 +813,7 @@ namespace
case QVideoFrame::Format_RGB32: case QVideoFrame::Format_RGB32:
mediaType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_RGB32); mediaType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_RGB32);
break; break;
case QVideoFrame::Format_RGB24: case QVideoFrame::Format_BGR24: // MFVideoFormat_RGB24 has a BGR layout
mediaType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_RGB24); mediaType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_RGB24);
break; break;
case QVideoFrame::Format_RGB565: case QVideoFrame::Format_RGB565:
@@ -842,8 +842,11 @@ namespace
mediaType->Release(); mediaType->Release();
continue; continue;
} }
m_pixelFormats.push_back(format); // QAbstractVideoSurface::supportedPixelFormats() returns formats in descending
m_mediaTypes.push_back(mediaType); // order of preference, while IMFMediaTypeHandler is supposed to return supported
// formats in ascending order of preference. We need to reverse the list.
m_pixelFormats.prepend(format);
m_mediaTypes.prepend(mediaType);
} }
} }
@@ -1082,6 +1085,7 @@ namespace
return format.frameWidth() * 4; return format.frameWidth() * 4;
// 24 bpp packed formats. // 24 bpp packed formats.
case QVideoFrame::Format_RGB24: case QVideoFrame::Format_RGB24:
case QVideoFrame::Format_BGR24:
return PAD_TO_DWORD(format.frameWidth() * 3); return PAD_TO_DWORD(format.frameWidth() * 3);
// 16 bpp packed formats. // 16 bpp packed formats.
case QVideoFrame::Format_RGB565: case QVideoFrame::Format_RGB565:

View File

@@ -46,11 +46,11 @@ QList<QVideoFrame::PixelFormat> QSGVideoNodeFactory_RGB::supportedPixelFormats(
QList<QVideoFrame::PixelFormat> pixelFormats; QList<QVideoFrame::PixelFormat> pixelFormats;
if (handleType == QAbstractVideoBuffer::NoHandle) { if (handleType == QAbstractVideoBuffer::NoHandle) {
pixelFormats.append(QVideoFrame::Format_RGB565);
pixelFormats.append(QVideoFrame::Format_RGB32); pixelFormats.append(QVideoFrame::Format_RGB32);
pixelFormats.append(QVideoFrame::Format_ARGB32); pixelFormats.append(QVideoFrame::Format_ARGB32);
pixelFormats.append(QVideoFrame::Format_BGR32); pixelFormats.append(QVideoFrame::Format_BGR32);
pixelFormats.append(QVideoFrame::Format_BGRA32); pixelFormats.append(QVideoFrame::Format_BGRA32);
pixelFormats.append(QVideoFrame::Format_RGB565);
} }
return pixelFormats; return pixelFormats;