Merge remote-tracking branch 'origin/5.4' into 5.5

Conflicts:
	src/multimedia/playback/playlistfileparser.cpp
	src/plugins/windowsaudio/qwindowsaudiodeviceinfo.cpp

Change-Id: I52950def2b8283ae15797d05d4ead6a1256eba19
This commit is contained in:
Liang Qi
2015-04-15 09:26:14 +02:00
40 changed files with 871 additions and 533 deletions

View File

@@ -65,7 +65,7 @@ Rectangle {
loops: Animation.Infinite
running: true
NumberAnimation {
duration: 8000
duration: 12000
from: 0
to: 1
}
@@ -113,7 +113,7 @@ Rectangle {
}
velocity: {
var speed = root.twoPi * root.radius / 4;
return shipSound.direction * speed;
return shipSound.direction.times(speed);
}
Component.onCompleted: shipSound.play()
@@ -137,7 +137,7 @@ Rectangle {
color: "lightgreen"
}
Text {
text: " volume:" + volumeBar.volumeCtrl.volume * 100 +"%";
text: " volume:" + Math.round(volumeBar.volumeCtrl.volume * 100) +"%";
font.pointSize: 16;
font.italic: true;
color: "black"

View File

@@ -68,22 +68,18 @@ AudioEngine {
}
AudioSample {
name:"fire"
source: "fire-03-loop.wav"
name:"engine"
source: "engine-loop.wav"
preloaded:true
}
AudioSample {
name:"explosion"
source: "explosion-02.wav"
name:"horn"
source: "horn.wav"
}
AudioSample {
name:"lava"
source: "lava-bubbling-01.wav"
}
AudioSample {
name:"water"
source: "running-water-01.wav"
name:"whistle"
source: "whistle.wav"
}
Sound {
name:"shipengine"
@@ -91,7 +87,7 @@ AudioEngine {
category:"sfx"
PlayVariation {
looping:true
sample:"fire"
sample:"engine"
maxGain:0.9
minGain:0.8
}
@@ -101,23 +97,14 @@ AudioEngine {
name:"effects"
category:"sfx"
PlayVariation {
sample:"lava"
maxGain:1.5
minGain:1.2
maxPitch:2.0
minPitch:0.5
sample:"horn"
maxGain:2.0
minGain:0.9
}
PlayVariation {
sample:"explosion"
maxGain:1.1
minGain:0.7
maxPitch:1.5
minPitch:0.5
}
PlayVariation {
sample:"water"
maxGain:1.5
minGain:1.2
sample:"whistle"
maxGain:1.0
minGain:0.8
}
}

Binary file not shown.

View File

@@ -100,9 +100,6 @@ QGstreamerVideoWidgetControl::QGstreamerVideoWidgetControl(QObject *parent)
{
m_videoSink = gst_element_factory_make ("xvimagesink", NULL);
if (!m_videoSink)
m_videoSink = gst_element_factory_make ("ximagesink", NULL);
if (m_videoSink) {
// Check if the xv sink is usable
if (gst_element_set_state(m_videoSink, GST_STATE_READY) != GST_STATE_CHANGE_SUCCESS) {

View File

@@ -106,6 +106,11 @@ void QDeclarativeAudioSample::componentComplete()
m_complete = true;
}
/*!
\qmlproperty url QtAudioEngine::AudioSample::source
This property holds the source URL of the audio sample.
*/
QUrl QDeclarativeAudioSample::source() const
{
return m_url;

View File

@@ -58,6 +58,12 @@ QML_DECLARE_TYPE(QSoundEffect)
QT_BEGIN_NAMESPACE
static QObject *multimedia_global_object(QQmlEngine *qmlEngine, QJSEngine *jsEngine)
{
Q_UNUSED(qmlEngine)
return new QDeclarativeMultimediaGlobal(jsEngine);
}
class QMultimediaDeclarativeModule : public QQmlExtensionPlugin
{
Q_OBJECT
@@ -99,7 +105,7 @@ public:
qmlRegisterType<QSoundEffect>(uri, 5, 3, "SoundEffect");
// 5.4 types
qmlRegisterSingletonType(uri, 5, 4, "QtMultimedia", QDeclarativeMultimedia::initGlobalObject);
qmlRegisterSingletonType<QDeclarativeMultimediaGlobal>(uri, 5, 4, "QtMultimedia", multimedia_global_object);
qmlRegisterRevision<QDeclarativeCamera, 1>(uri, 5, 4);
qmlRegisterUncreatableType<QDeclarativeCameraViewfinder>(uri, 5, 4, "CameraViewfinder",
trUtf8("CameraViewfinder is provided by Camera"));

View File

@@ -3,7 +3,8 @@ import QtQuick.tooling 1.1
// This file describes the plugin-supplied types contained in the library.
// It is used for QML tooling purposes only.
//
// This file was auto-generated with the command 'qmlplugindump -notrelocatable QtMultimedia 5.0'.
// This file was auto-generated by:
// 'qmlplugindump -nonrelocatable QtMultimedia 5.4'
Module {
Component {
@@ -76,6 +77,14 @@ Module {
"LockFocus": 4
}
}
Enum {
name: "Position"
values: {
"UnspecifiedPosition": 0,
"BackFace": 1,
"FrontFace": 2
}
}
Property { name: "state"; type: "QCamera::State"; isReadonly: true }
Property { name: "status"; type: "QCamera::Status"; isReadonly: true }
Property { name: "captureMode"; type: "QCamera::CaptureModes" }
@@ -232,6 +241,14 @@ Module {
prototype: "QObject"
exports: ["QtMultimedia/Camera 5.0"]
exportMetaObjectRevisions: [0]
Enum {
name: "Position"
values: {
"UnspecifiedPosition": 0,
"BackFace": 1,
"FrontFace": 2
}
}
Enum {
name: "CaptureMode"
values: {
@@ -357,6 +374,10 @@ Module {
"ResourceMissing": 3
}
}
Property { name: "deviceId"; revision: 1; type: "string" }
Property { name: "position"; revision: 1; type: "Position" }
Property { name: "displayName"; revision: 1; type: "string"; isReadonly: true }
Property { name: "orientation"; revision: 1; type: "int"; isReadonly: true }
Property { name: "captureMode"; type: "CaptureMode" }
Property { name: "cameraState"; type: "State" }
Property { name: "cameraStatus"; type: "Status"; isReadonly: true }
@@ -395,12 +416,30 @@ Module {
isReadonly: true
isPointer: true
}
Property {
name: "metaData"
revision: 1
type: "QDeclarativeMediaMetaData"
isReadonly: true
isPointer: true
}
Property {
name: "viewfinder"
revision: 1
type: "QDeclarativeCameraViewfinder"
isReadonly: true
isPointer: true
}
Signal { name: "errorChanged" }
Signal {
name: "error"
Parameter { name: "errorCode"; type: "QDeclarativeCamera::Error" }
Parameter { name: "errorString"; type: "string" }
}
Signal { name: "deviceIdChanged"; revision: 1 }
Signal { name: "positionChanged"; revision: 1 }
Signal { name: "displayNameChanged"; revision: 1 }
Signal { name: "orientationChanged"; revision: 1 }
Signal {
name: "cameraStateChanged"
Parameter { type: "QDeclarativeCamera::State" }
@@ -450,6 +489,7 @@ Module {
name: "QDeclarativeCameraCapture"
prototype: "QObject"
exports: ["QtMultimedia/CameraCapture 5.0"]
isCreatable: false
exportMetaObjectRevisions: [0]
Property { name: "ready"; type: "bool"; isReadonly: true }
Property { name: "capturedImagePath"; type: "string"; isReadonly: true }
@@ -509,7 +549,33 @@ Module {
name: "QDeclarativeCameraExposure"
prototype: "QObject"
exports: ["QtMultimedia/CameraExposure 5.0"]
isCreatable: false
exportMetaObjectRevisions: [0]
Enum {
name: "ExposureMode"
values: {
"ExposureAuto": 0,
"ExposureManual": 1,
"ExposurePortrait": 2,
"ExposureNight": 3,
"ExposureBacklight": 4,
"ExposureSpotlight": 5,
"ExposureSports": 6,
"ExposureSnow": 7,
"ExposureBeach": 8,
"ExposureLargeAperture": 9,
"ExposureSmallAperture": 10,
"ExposureModeVendor": 1000
}
}
Enum {
name: "MeteringMode"
values: {
"MeteringMatrix": 1,
"MeteringAverage": 2,
"MeteringSpot": 3
}
}
Property { name: "exposureCompensation"; type: "double" }
Property { name: "iso"; type: "int"; isReadonly: true }
Property { name: "shutterSpeed"; type: "double"; isReadonly: true }
@@ -517,9 +583,9 @@ Module {
Property { name: "manualShutterSpeed"; type: "double" }
Property { name: "manualAperture"; type: "double" }
Property { name: "manualIso"; type: "double" }
Property { name: "exposureMode"; type: "QDeclarativeCamera::ExposureMode" }
Property { name: "exposureMode"; type: "ExposureMode" }
Property { name: "spotMeteringPoint"; type: "QPointF" }
Property { name: "meteringMode"; type: "QDeclarativeCamera::MeteringMode" }
Property { name: "meteringMode"; type: "MeteringMode" }
Signal {
name: "isoSensitivityChanged"
Parameter { type: "int" }
@@ -550,11 +616,11 @@ Module {
}
Signal {
name: "exposureModeChanged"
Parameter { type: "QDeclarativeCamera::ExposureMode" }
Parameter { type: "ExposureMode" }
}
Signal {
name: "meteringModeChanged"
Parameter { type: "QDeclarativeCamera::MeteringMode" }
Parameter { type: "MeteringMode" }
}
Signal {
name: "spotMeteringPointChanged"
@@ -562,7 +628,7 @@ Module {
}
Method {
name: "setExposureMode"
Parameter { type: "QDeclarativeCamera::ExposureMode" }
Parameter { type: "ExposureMode" }
}
Method {
name: "setExposureCompensation"
@@ -588,38 +654,75 @@ Module {
name: "QDeclarativeCameraFlash"
prototype: "QObject"
exports: ["QtMultimedia/CameraFlash 5.0"]
isCreatable: false
exportMetaObjectRevisions: [0]
Enum {
name: "FlashMode"
values: {
"FlashAuto": 1,
"FlashOff": 2,
"FlashOn": 4,
"FlashRedEyeReduction": 8,
"FlashFill": 16,
"FlashTorch": 32,
"FlashVideoLight": 64,
"FlashSlowSyncFrontCurtain": 128,
"FlashSlowSyncRearCurtain": 256,
"FlashManual": 512
}
}
Property { name: "ready"; type: "bool"; isReadonly: true }
Property { name: "mode"; type: "int" }
Property { name: "mode"; type: "FlashMode" }
Signal {
name: "flashReady"
Parameter { name: "status"; type: "bool" }
}
Signal {
name: "flashModeChanged"
Parameter { type: "int" }
Parameter { type: "FlashMode" }
}
Method {
name: "setFlashMode"
Parameter { type: "int" }
Parameter { type: "FlashMode" }
}
}
Component {
name: "QDeclarativeCameraFocus"
prototype: "QObject"
exports: ["QtMultimedia/CameraFocus 5.0"]
isCreatable: false
exportMetaObjectRevisions: [0]
Property { name: "focusMode"; type: "QDeclarativeCamera::FocusMode" }
Property { name: "focusPointMode"; type: "QDeclarativeCamera::FocusPointMode" }
Enum {
name: "FocusMode"
values: {
"FocusManual": 1,
"FocusHyperfocal": 2,
"FocusInfinity": 4,
"FocusAuto": 8,
"FocusContinuous": 16,
"FocusMacro": 32
}
}
Enum {
name: "FocusPointMode"
values: {
"FocusPointAuto": 0,
"FocusPointCenter": 1,
"FocusPointFaceDetection": 2,
"FocusPointCustom": 3
}
}
Property { name: "focusMode"; type: "FocusMode" }
Property { name: "focusPointMode"; type: "FocusPointMode" }
Property { name: "customFocusPoint"; type: "QPointF" }
Property { name: "focusZones"; type: "QObject"; isReadonly: true; isPointer: true }
Signal {
name: "focusModeChanged"
Parameter { type: "QDeclarativeCamera::FocusMode" }
Parameter { type: "FocusMode" }
}
Signal {
name: "focusPointModeChanged"
Parameter { type: "QDeclarativeCamera::FocusPointMode" }
Parameter { type: "FocusPointMode" }
}
Signal {
name: "customFocusPointChanged"
@@ -627,11 +730,11 @@ Module {
}
Method {
name: "setFocusMode"
Parameter { type: "QDeclarativeCamera::FocusMode" }
Parameter { type: "FocusMode" }
}
Method {
name: "setFocusPointMode"
Parameter { name: "mode"; type: "QDeclarativeCamera::FocusPointMode" }
Parameter { name: "mode"; type: "FocusPointMode" }
}
Method {
name: "setCustomFocusPoint"
@@ -640,18 +743,19 @@ Module {
Method {
name: "isFocusModeSupported"
type: "bool"
Parameter { name: "mode"; type: "QDeclarativeCamera::FocusMode" }
Parameter { name: "mode"; type: "FocusMode" }
}
Method {
name: "isFocusPointModeSupported"
type: "bool"
Parameter { name: "mode"; type: "QDeclarativeCamera::FocusPointMode" }
Parameter { name: "mode"; type: "FocusPointMode" }
}
}
Component {
name: "QDeclarativeCameraImageProcessing"
prototype: "QObject"
exports: ["QtMultimedia/CameraImageProcessing 5.0"]
isCreatable: false
exportMetaObjectRevisions: [0]
Enum {
name: "WhiteBalanceMode"
@@ -727,6 +831,7 @@ Module {
name: "QDeclarativeCameraRecorder"
prototype: "QObject"
exports: ["QtMultimedia/CameraRecorder 5.0"]
isCreatable: false
exportMetaObjectRevisions: [0]
Enum {
name: "RecorderState"
@@ -922,6 +1027,119 @@ Module {
Parameter { name: "encodingMode"; type: "EncodingMode" }
}
}
Component {
name: "QDeclarativeCameraViewfinder"
prototype: "QObject"
exports: ["QtMultimedia/CameraViewfinder 5.4"]
isCreatable: false
exportMetaObjectRevisions: [0]
Property { name: "resolution"; type: "QSize" }
Property { name: "minimumFrameRate"; type: "double" }
Property { name: "maximumFrameRate"; type: "double" }
}
Component {
name: "QDeclarativeMediaMetaData"
prototype: "QObject"
Property { name: "title"; type: "QVariant" }
Property { name: "subTitle"; type: "QVariant" }
Property { name: "author"; type: "QVariant" }
Property { name: "comment"; type: "QVariant" }
Property { name: "description"; type: "QVariant" }
Property { name: "category"; type: "QVariant" }
Property { name: "genre"; type: "QVariant" }
Property { name: "year"; type: "QVariant" }
Property { name: "date"; type: "QVariant" }
Property { name: "userRating"; type: "QVariant" }
Property { name: "keywords"; type: "QVariant" }
Property { name: "language"; type: "QVariant" }
Property { name: "publisher"; type: "QVariant" }
Property { name: "copyright"; type: "QVariant" }
Property { name: "parentalRating"; type: "QVariant" }
Property { name: "ratingOrganization"; type: "QVariant" }
Property { name: "size"; type: "QVariant" }
Property { name: "mediaType"; type: "QVariant" }
Property { name: "duration"; type: "QVariant" }
Property { name: "audioBitRate"; type: "QVariant" }
Property { name: "audioCodec"; type: "QVariant" }
Property { name: "averageLevel"; type: "QVariant" }
Property { name: "channelCount"; type: "QVariant" }
Property { name: "peakValue"; type: "QVariant" }
Property { name: "sampleRate"; type: "QVariant" }
Property { name: "albumTitle"; type: "QVariant" }
Property { name: "albumArtist"; type: "QVariant" }
Property { name: "contributingArtist"; type: "QVariant" }
Property { name: "composer"; type: "QVariant" }
Property { name: "conductor"; type: "QVariant" }
Property { name: "lyrics"; type: "QVariant" }
Property { name: "mood"; type: "QVariant" }
Property { name: "trackNumber"; type: "QVariant" }
Property { name: "trackCount"; type: "QVariant" }
Property { name: "coverArtUrlSmall"; type: "QVariant" }
Property { name: "coverArtUrlLarge"; type: "QVariant" }
Property { name: "resolution"; type: "QVariant" }
Property { name: "pixelAspectRatio"; type: "QVariant" }
Property { name: "videoFrameRate"; type: "QVariant" }
Property { name: "videoBitRate"; type: "QVariant" }
Property { name: "videoCodec"; type: "QVariant" }
Property { name: "posterUrl"; type: "QVariant" }
Property { name: "chapterNumber"; type: "QVariant" }
Property { name: "director"; type: "QVariant" }
Property { name: "leadPerformer"; type: "QVariant" }
Property { name: "writer"; type: "QVariant" }
Property { name: "cameraManufacturer"; type: "QVariant" }
Property { name: "cameraModel"; type: "QVariant" }
Property { name: "event"; type: "QVariant" }
Property { name: "subject"; type: "QVariant" }
Property { name: "orientation"; type: "QVariant" }
Property { name: "exposureTime"; type: "QVariant" }
Property { name: "fNumber"; type: "QVariant" }
Property { name: "exposureProgram"; type: "QVariant" }
Property { name: "isoSpeedRatings"; type: "QVariant" }
Property { name: "exposureBiasValue"; type: "QVariant" }
Property { name: "dateTimeOriginal"; type: "QVariant" }
Property { name: "dateTimeDigitized"; type: "QVariant" }
Property { name: "subjectDistance"; type: "QVariant" }
Property { name: "meteringMode"; type: "QVariant" }
Property { name: "lightSource"; type: "QVariant" }
Property { name: "flash"; type: "QVariant" }
Property { name: "focalLength"; type: "QVariant" }
Property { name: "exposureMode"; type: "QVariant" }
Property { name: "whiteBalance"; type: "QVariant" }
Property { name: "digitalZoomRatio"; type: "QVariant" }
Property { name: "focalLengthIn35mmFilm"; type: "QVariant" }
Property { name: "sceneCaptureType"; type: "QVariant" }
Property { name: "gainControl"; type: "QVariant" }
Property { name: "contrast"; type: "QVariant" }
Property { name: "saturation"; type: "QVariant" }
Property { name: "sharpness"; type: "QVariant" }
Property { name: "deviceSettingDescription"; type: "QVariant" }
Property { name: "gpsLatitude"; type: "QVariant" }
Property { name: "gpsLongitude"; type: "QVariant" }
Property { name: "gpsAltitude"; type: "QVariant" }
Property { name: "gpsTimeStamp"; type: "QVariant" }
Property { name: "gpsSatellites"; type: "QVariant" }
Property { name: "gpsStatus"; type: "QVariant" }
Property { name: "gpsDOP"; type: "QVariant" }
Property { name: "gpsSpeed"; type: "QVariant" }
Property { name: "gpsTrack"; type: "QVariant" }
Property { name: "gpsTrackRef"; type: "QVariant" }
Property { name: "gpsImgDirection"; type: "QVariant" }
Property { name: "gpsImgDirectionRef"; type: "QVariant" }
Property { name: "gpsMapDatum"; type: "QVariant" }
Property { name: "gpsProcessingMethod"; type: "QVariant" }
Property { name: "gpsAreaInformation"; type: "QVariant" }
Signal { name: "metaDataChanged" }
}
Component {
name: "QDeclarativeMultimediaGlobal"
prototype: "QObject"
exports: ["QtMultimedia/QtMultimedia 5.4"]
isCreatable: false
isSingleton: true
exportMetaObjectRevisions: [0]
Property { name: "defaultCamera"; type: "QJSValue"; isReadonly: true }
Property { name: "availableCameras"; type: "QJSValue"; isReadonly: true }
}
Component {
name: "QDeclarativeRadio"
prototype: "QObject"
@@ -1223,6 +1441,7 @@ Module {
Property { name: "source"; type: "QObject"; isPointer: true }
Property { name: "fillMode"; type: "FillMode" }
Property { name: "orientation"; type: "int" }
Property { name: "autoOrientation"; revision: 2; type: "bool" }
Property { name: "sourceRect"; type: "QRectF"; isReadonly: true }
Property { name: "contentRect"; type: "QRectF"; isReadonly: true }
Property { name: "filters"; isList: true; isReadonly: true }
@@ -1301,8 +1520,11 @@ Module {
Component {
name: "QSoundEffect"
prototype: "QObject"
exports: ["QtMultimedia/SoundEffect 5.0"]
exportMetaObjectRevisions: [0]
exports: [
"QtMultimedia/SoundEffect 5.0",
"QtMultimedia/SoundEffect 5.3"
]
exportMetaObjectRevisions: [0, 0]
Enum {
name: "Loop"
values: {

View File

@@ -83,7 +83,7 @@ class QDeclarativeCameraRecorder : public QObject
Q_PROPERTY(QString actualLocation READ actualLocation NOTIFY actualLocationChanged)
Q_PROPERTY(bool muted READ isMuted WRITE setMuted NOTIFY mutedChanged)
Q_PROPERTY(QString errorString READ errorString NOTIFY error)
Q_PROPERTY(QString errorCode READ errorCode NOTIFY error)
Q_PROPERTY(Error errorCode READ errorCode NOTIFY error)
public:
enum RecorderState

View File

@@ -153,27 +153,6 @@ Camera {
\endqml
*/
namespace QDeclarativeMultimedia {
#define FREEZE_SOURCE "(function deepFreeze(o) { "\
" var prop, propKey;" \
" Object.freeze(o);" \
" for (propKey in o) {" \
" prop = o[propKey];" \
" if (!o.hasOwnProperty(propKey) || !(typeof prop === \"object\") || " \
" Object.isFrozen(prop)) {" \
" continue;" \
" }" \
" deepFreeze(prop);" \
" }" \
"})"
static void deepFreeze(QJSEngine *jsEngine, const QJSValue &obj)
{
QJSValue freezeFunc = jsEngine->evaluate(QString::fromUtf8(FREEZE_SOURCE));
freezeFunc.call(QJSValueList() << obj);
}
static QJSValue cameraInfoToJSValue(QJSEngine *jsEngine, const QCameraInfo &camera)
{
QJSValue o = jsEngine->newObject();
@@ -184,29 +163,24 @@ static QJSValue cameraInfoToJSValue(QJSEngine *jsEngine, const QCameraInfo &came
return o;
}
QJSValue initGlobalObject(QQmlEngine *qmlEngine, QJSEngine *jsEngine)
QDeclarativeMultimediaGlobal::QDeclarativeMultimediaGlobal(QJSEngine *engine, QObject *parent)
: QObject(parent)
, m_engine(engine)
{
Q_UNUSED(qmlEngine)
QJSValue globalObject = jsEngine->newObject();
// property object defaultCamera
globalObject.setProperty(QStringLiteral("defaultCamera"),
cameraInfoToJSValue(jsEngine, QCameraInfo::defaultCamera()));
// property list<object> availableCameras
QList<QCameraInfo> cameras = QCameraInfo::availableCameras();
QJSValue availableCameras = jsEngine->newArray(cameras.count());
for (int i = 0; i < cameras.count(); ++i)
availableCameras.setProperty(i, cameraInfoToJSValue(jsEngine, cameras.at(i)));
globalObject.setProperty(QStringLiteral("availableCameras"), availableCameras);
// freeze global object to prevent properties to be modified from QML
deepFreeze(jsEngine, globalObject);
return globalObject;
}
QJSValue QDeclarativeMultimediaGlobal::defaultCamera() const
{
return cameraInfoToJSValue(m_engine, QCameraInfo::defaultCamera());
}
QJSValue QDeclarativeMultimediaGlobal::availableCameras() const
{
QList<QCameraInfo> cameras = QCameraInfo::availableCameras();
QJSValue availableCameras = m_engine->newArray(cameras.count());
for (int i = 0; i < cameras.count(); ++i)
availableCameras.setProperty(i, cameraInfoToJSValue(m_engine, cameras.at(i)));
return availableCameras;
}
QT_END_NAMESPACE

View File

@@ -46,12 +46,32 @@
//
#include <QtQml/qqml.h>
#include <QtQml/qjsvalue.h>
QT_BEGIN_NAMESPACE
namespace QDeclarativeMultimedia {
QJSValue initGlobalObject(QQmlEngine *, QJSEngine *);
}
class QDeclarativeMultimediaGlobal : public QObject
{
Q_OBJECT
Q_PROPERTY(QJSValue defaultCamera READ defaultCamera NOTIFY defaultCameraChanged)
Q_PROPERTY(QJSValue availableCameras READ availableCameras NOTIFY availableCamerasChanged)
public:
explicit QDeclarativeMultimediaGlobal(QJSEngine *engine, QObject *parent = 0);
QJSValue defaultCamera() const;
QJSValue availableCameras() const;
Q_SIGNALS:
// Unused at the moment. QCameraInfo doesn't notify when cameras are added or removed,
// but it might change in the future.
void defaultCameraChanged();
void availableCamerasChanged();
private:
QJSEngine *m_engine;
};
QT_END_NAMESPACE

View File

@@ -74,7 +74,7 @@ QDeclarativeTorch::QDeclarativeTorch(QObject *parent)
m_flash = service ? service->requestControl<QCameraFlashControl*>() : 0;
if (m_exposure)
connect(m_exposure, SIGNAL(valueChanged(int)), SLOT(parameterChanged(int)));
connect(m_exposure, SIGNAL(actualValueChanged(int)), SLOT(parameterChanged(int)));
// XXX There's no signal for flash mode changed
}

View File

@@ -37,6 +37,7 @@
#include <QtNetwork/QNetworkReply>
#include <QtNetwork/QNetworkRequest>
#include "qmediaobject_p.h"
#include <private/qobject_p.h>
#include "qmediametadata.h"
QT_BEGIN_NAMESPACE
@@ -240,10 +241,9 @@ Version=2
/////////////////////////////////////////////////////////////////////////////////////////////////
class QPlaylistFileParserPrivate : public QObject
class QPlaylistFileParserPrivate : public QObjectPrivate
{
Q_OBJECT
Q_DECLARE_NON_CONST_PUBLIC(QPlaylistFileParser)
Q_DECLARE_PUBLIC(QPlaylistFileParser)
public:
QPlaylistFileParserPrivate()
: m_source(0)
@@ -270,8 +270,6 @@ public:
ParserBase *m_currentParser;
QNetworkAccessManager m_mgr;
QPlaylistFileParser *q_ptr;
private:
void processLine(int startIndex, int length);
};
@@ -292,25 +290,25 @@ void QPlaylistFileParserPrivate::processLine(int startIndex, int length)
switch (m_type) {
case QPlaylistFileParser::UNKNOWN:
emit q->error(QPlaylistFileParser::FormatError, QString(tr("%1 playlist type is unknown")).arg(m_root.toString()));
emit q->error(QPlaylistFileParser::FormatError, QString(QObject::tr("%1 playlist type is unknown")).arg(m_root.toString()));
q->stop();
return;
case QPlaylistFileParser::M3U:
m_currentParser = new M3UParser(this);
m_currentParser = new M3UParser(q);
break;
case QPlaylistFileParser::M3U8:
m_currentParser = new M3UParser(this);
m_currentParser = new M3UParser(q);
m_utf8 = true;
break;
case QPlaylistFileParser::PLS:
m_currentParser = new PLSParser(this);
m_currentParser = new PLSParser(q);
break;
}
Q_ASSERT(m_currentParser);
connect(m_currentParser, SIGNAL(newItem(QVariant)), q, SIGNAL(newItem(QVariant)));
connect(m_currentParser, SIGNAL(finished()), q, SLOT(_q_handleParserFinished()));
connect(m_currentParser, SIGNAL(error(QPlaylistFileParser::ParserError,QString)),
q, SLOT(_q_handleParserError(QPlaylistFileParser::ParserError,QString)));
QObject::connect(m_currentParser, SIGNAL(newItem(QVariant)), q, SIGNAL(newItem(QVariant)));
QObject::connect(m_currentParser, SIGNAL(finished()), q, SLOT(_q_handleParserFinished()));
QObject::connect(m_currentParser, SIGNAL(error(QPlaylistFileParser::ParserError,QString)),
q, SLOT(_q_handleParserError(QPlaylistFileParser::ParserError,QString)));
}
QString line;
@@ -352,7 +350,7 @@ void QPlaylistFileParserPrivate::_q_handleData()
if (m_buffer.length() - processedBytes >= LINE_LIMIT) {
qWarning() << "error parsing playlist["<< m_root << "] with line content >= 4096 bytes.";
emit q->error(QPlaylistFileParser::FormatError, tr("invalid line in playlist file"));
emit q->error(QPlaylistFileParser::FormatError, QObject::tr("invalid line in playlist file"));
q->stop();
return;
}
@@ -400,7 +398,7 @@ void QPlaylistFileParserPrivate::_q_handleParserFinished()
Q_Q(QPlaylistFileParser);
bool isParserValid = (m_currentParser != 0);
if (!isParserValid)
emit q->error(QPlaylistFileParser::FormatNotSupportedError, tr("Empty file provided"));
emit q->error(QPlaylistFileParser::FormatNotSupportedError, QObject::tr("Empty file provided"));
q->stop();
@@ -410,9 +408,9 @@ void QPlaylistFileParserPrivate::_q_handleParserFinished()
QPlaylistFileParser::QPlaylistFileParser(QObject *parent)
:QObject(parent), d_ptr(new QPlaylistFileParserPrivate)
: QObject(*new QPlaylistFileParserPrivate, parent)
{
d_func()->q_ptr = this;
}
QPlaylistFileParser::FileType QPlaylistFileParser::findPlaylistType(const QString& uri, const QString& mime, const void *data, quint32 size)

View File

@@ -84,9 +84,6 @@ Q_SIGNALS:
void finished();
void error(QPlaylistFileParser::ParserError err, const QString& errorMsg);
protected:
QPlaylistFileParserPrivate *d_ptr;
private:
Q_DISABLE_COPY(QPlaylistFileParser)
Q_DECLARE_PRIVATE(QPlaylistFileParser)

View File

@@ -58,25 +58,25 @@ int AVFCameraDeviceControl::deviceCount() const
QString AVFCameraDeviceControl::deviceName(int index) const
{
const QList<QByteArray> &devices = AVFCameraSession::availableCameraDevices();
const QList<AVFCameraInfo> &devices = AVFCameraSession::availableCameraDevices();
if (index < 0 || index >= devices.count())
return QString();
return QString::fromUtf8(devices.at(index));
return QString::fromUtf8(devices.at(index).deviceId);
}
QString AVFCameraDeviceControl::deviceDescription(int index) const
{
const QList<QByteArray> &devices = AVFCameraSession::availableCameraDevices();
const QList<AVFCameraInfo> &devices = AVFCameraSession::availableCameraDevices();
if (index < 0 || index >= devices.count())
return QString();
return AVFCameraSession::cameraDeviceInfo(devices.at(index)).description;
return devices.at(index).description;
}
int AVFCameraDeviceControl::defaultDevice() const
{
return AVFCameraSession::availableCameraDevices().indexOf(AVFCameraSession::defaultCameraDevice());
return AVFCameraSession::defaultCameraIndex();
}
int AVFCameraDeviceControl::selectedDevice() const

View File

@@ -63,18 +63,26 @@ void AVFServicePlugin::release(QMediaService *service)
QByteArray AVFServicePlugin::defaultDevice(const QByteArray &service) const
{
if (service == Q_MEDIASERVICE_CAMERA)
return AVFCameraSession::defaultCameraDevice();
if (service == Q_MEDIASERVICE_CAMERA) {
int i = AVFCameraSession::defaultCameraIndex();
if (i != -1)
return AVFCameraSession::availableCameraDevices().at(i).deviceId;
}
return QByteArray();
}
QList<QByteArray> AVFServicePlugin::devices(const QByteArray &service) const
{
if (service == Q_MEDIASERVICE_CAMERA)
return AVFCameraSession::availableCameraDevices();
QList<QByteArray> devs;
return QList<QByteArray>();
if (service == Q_MEDIASERVICE_CAMERA) {
const QList<AVFCameraInfo> &cameras = AVFCameraSession::availableCameraDevices();
Q_FOREACH (const AVFCameraInfo &info, cameras)
devs.append(info.deviceId);
}
return devs;
}
QString AVFServicePlugin::deviceDescription(const QByteArray &service, const QByteArray &device)

View File

@@ -54,6 +54,7 @@ struct AVFCameraInfo
AVFCameraInfo() : position(QCamera::UnspecifiedPosition), orientation(0)
{ }
QByteArray deviceId;
QString description;
QCamera::Position position;
int orientation;
@@ -66,8 +67,8 @@ public:
AVFCameraSession(AVFCameraService *service, QObject *parent = 0);
~AVFCameraSession();
static const QByteArray &defaultCameraDevice();
static const QList<QByteArray> &availableCameraDevices();
static int defaultCameraIndex();
static const QList<AVFCameraInfo> &availableCameraDevices();
static AVFCameraInfo cameraDeviceInfo(const QByteArray &device);
void setVideoOutput(AVFCameraRendererControl *output);
@@ -102,9 +103,8 @@ private:
void applyImageEncoderSettings();
void applyViewfinderSettings();
static QByteArray m_defaultCameraDevice;
static QList<QByteArray> m_cameraDevices;
static QMap<QByteArray, AVFCameraInfo> m_cameraInfo;
static int m_defaultCameraIndex;
static QList<AVFCameraInfo> m_cameraDevices;
AVFCameraService *m_service;
AVFCameraRendererControl *m_videoOutput;

View File

@@ -48,14 +48,14 @@
#include <QtCore/qdatetime.h>
#include <QtCore/qurl.h>
#include <QtCore/qelapsedtimer.h>
#include <QtCore/qdebug.h>
QT_USE_NAMESPACE
QByteArray AVFCameraSession::m_defaultCameraDevice;
QList<QByteArray> AVFCameraSession::m_cameraDevices;
QMap<QByteArray, AVFCameraInfo> AVFCameraSession::m_cameraInfo;
int AVFCameraSession::m_defaultCameraIndex;
QList<AVFCameraInfo> AVFCameraSession::m_cameraDevices;
@interface AVFCameraSessionObserver : NSObject
{
@@ -169,45 +169,55 @@ AVFCameraSession::~AVFCameraSession()
[m_captureSession release];
}
const QByteArray &AVFCameraSession::defaultCameraDevice()
int AVFCameraSession::defaultCameraIndex()
{
if (m_cameraDevices.isEmpty())
updateCameraDevices();
return m_defaultCameraDevice;
updateCameraDevices();
return m_defaultCameraIndex;
}
const QList<QByteArray> &AVFCameraSession::availableCameraDevices()
const QList<AVFCameraInfo> &AVFCameraSession::availableCameraDevices()
{
if (m_cameraDevices.isEmpty())
updateCameraDevices();
updateCameraDevices();
return m_cameraDevices;
}
AVFCameraInfo AVFCameraSession::cameraDeviceInfo(const QByteArray &device)
{
if (m_cameraDevices.isEmpty())
updateCameraDevices();
updateCameraDevices();
return m_cameraInfo.value(device);
Q_FOREACH (const AVFCameraInfo &info, m_cameraDevices) {
if (info.deviceId == device)
return info;
}
return AVFCameraInfo();
}
void AVFCameraSession::updateCameraDevices()
{
m_defaultCameraDevice.clear();
#ifdef Q_OS_IOS
// Cameras can't change dynamically on iOS. Update only once.
if (!m_cameraDevices.isEmpty())
return;
#else
// On OS X, cameras can be added or removed. Update the list every time, but not more than
// once every 500 ms
static QElapsedTimer timer;
if (timer.isValid() && timer.elapsed() < 500) // ms
return;
#endif
m_defaultCameraIndex = -1;
m_cameraDevices.clear();
m_cameraInfo.clear();
AVCaptureDevice *defaultDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
if (defaultDevice)
m_defaultCameraDevice = QByteArray([[defaultDevice uniqueID] UTF8String]);
NSArray *videoDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for (AVCaptureDevice *device in videoDevices) {
QByteArray deviceId([[device uniqueID] UTF8String]);
if (defaultDevice && [defaultDevice.uniqueID isEqualToString:device.uniqueID])
m_defaultCameraIndex = m_cameraDevices.count();
AVFCameraInfo info;
info.deviceId = QByteArray([[device uniqueID] UTF8String]);
info.description = QString::fromNSString([device localizedName]);
// There is no API to get the camera sensor orientation, however, cameras are always
@@ -232,9 +242,12 @@ void AVFCameraSession::updateCameraDevices()
break;
}
m_cameraDevices << deviceId;
m_cameraInfo.insert(deviceId, info);
m_cameraDevices.append(info);
}
#ifndef Q_OS_IOS
timer.restart();
#endif
}
void AVFCameraSession::setVideoOutput(AVFCameraRendererControl *output)

View File

@@ -36,6 +36,10 @@
#import <AVFoundation/AVAudioSession.h>
#import <Foundation/Foundation.h>
#if QT_IOS_DEPLOYMENT_TARGET_BELOW(__IPHONE_6_0)
#include <AudioToolbox/AudioToolbox.h>
#endif
QT_BEGIN_NAMESPACE
@interface CoreAudioSessionObserver : NSObject
@@ -71,19 +75,24 @@ QT_BEGIN_NAMESPACE
self->m_sessionManager = sessionManager;
self->m_audioSession = [AVAudioSession sharedInstance];
//Set up observers
[[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(audioSessionInterruption:)
name:AVAudioSessionInterruptionNotification
object:self->m_audioSession];
[[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(audioSessionMediaServicesWereReset:)
name:AVAudioSessionMediaServicesWereResetNotification
object:self->m_audioSession];
[[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(audioSessionRouteChange:)
name:AVAudioSessionRouteChangeNotification
object:self->m_audioSession];
#if QT_IOS_DEPLOYMENT_TARGET_BELOW(__IPHONE_6_0)
if (QSysInfo::MacintoshVersion >= QSysInfo::MV_IOS_6_0)
#endif
{
//Set up observers
[[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(audioSessionInterruption:)
name:AVAudioSessionInterruptionNotification
object:self->m_audioSession];
[[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(audioSessionMediaServicesWereReset:)
name:AVAudioSessionMediaServicesWereResetNotification
object:self->m_audioSession];
[[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(audioSessionRouteChange:)
name:AVAudioSessionRouteChangeNotification
object:self->m_audioSession];
}
return self;
}
@@ -93,15 +102,22 @@ QT_BEGIN_NAMESPACE
#ifdef QT_DEBUG_COREAUDIO
qDebug() << Q_FUNC_INFO;
#endif
[[NSNotificationCenter defaultCenter] removeObserver:self
name:AVAudioSessionInterruptionNotification
object:self->m_audioSession];
[[NSNotificationCenter defaultCenter] removeObserver:self
name:AVAudioSessionMediaServicesWereResetNotification
object:self->m_audioSession];
[[NSNotificationCenter defaultCenter] removeObserver:self
name:AVAudioSessionRouteChangeNotification
object:self->m_audioSession];
#if QT_IOS_DEPLOYMENT_TARGET_BELOW(__IPHONE_6_0)
if (QSysInfo::MacintoshVersion >= QSysInfo::MV_IOS_6_0)
#endif
{
[[NSNotificationCenter defaultCenter] removeObserver:self
name:AVAudioSessionInterruptionNotification
object:self->m_audioSession];
[[NSNotificationCenter defaultCenter] removeObserver:self
name:AVAudioSessionMediaServicesWereResetNotification
object:self->m_audioSession];
[[NSNotificationCenter defaultCenter] removeObserver:self
name:AVAudioSessionRouteChangeNotification
object:self->m_audioSession];
}
[super dealloc];
}
@@ -261,6 +277,9 @@ bool CoreAudioSessionManager::setCategory(CoreAudioSessionManager::AudioSessionC
targetCategory = AVAudioSessionCategoryAudioProcessing;
break;
case CoreAudioSessionManager::MultiRoute:
#if QT_IOS_DEPLOYMENT_TARGET_BELOW(__IPHONE_6_0)
if (QSysInfo::MacintoshVersion >= QSysInfo::MV_IOS_6_0)
#endif
targetCategory = AVAudioSessionCategoryMultiRoute;
break;
}
@@ -268,9 +287,16 @@ bool CoreAudioSessionManager::setCategory(CoreAudioSessionManager::AudioSessionC
if (targetCategory == nil)
return false;
return [[m_sessionObserver audioSession] setCategory:targetCategory
withOptions:(AVAudioSessionCategoryOptions)options
error:nil];
#if QT_IOS_DEPLOYMENT_TARGET_BELOW(__IPHONE_6_0)
if (QSysInfo::MacintoshVersion < QSysInfo::MV_IOS_6_0) {
return [[m_sessionObserver audioSession] setCategory:targetCategory error:nil];
} else
#endif
{
return [[m_sessionObserver audioSession] setCategory:targetCategory
withOptions:(AVAudioSessionCategoryOptions)options
error:nil];
}
}
bool CoreAudioSessionManager::setMode(CoreAudioSessionManager::AudioSessionModes mode)
@@ -293,6 +319,9 @@ bool CoreAudioSessionManager::setMode(CoreAudioSessionManager::AudioSessionModes
targetMode = AVAudioSessionModeMeasurement;
break;
case CoreAudioSessionManager::MoviePlayback:
#if QT_IOS_DEPLOYMENT_TARGET_BELOW(__IPHONE_6_0)
if (QSysInfo::MacintoshVersion >= QSysInfo::MV_IOS_6_0)
#endif
targetMode = AVAudioSessionModeMoviePlayback;
break;
}
@@ -321,7 +350,11 @@ CoreAudioSessionManager::AudioSessionCategorys CoreAudioSessionManager::category
localCategory = PlayAndRecord;
} else if (category == AVAudioSessionCategoryAudioProcessing) {
localCategory = AudioProcessing;
} else if (category == AVAudioSessionCategoryMultiRoute) {
} else if (
#if QT_IOS_DEPLOYMENT_TARGET_BELOW(__IPHONE_6_0)
QSysInfo::MacintoshVersion >= QSysInfo::MV_IOS_6_0 &&
#endif
category == AVAudioSessionCategoryMultiRoute) {
localCategory = MultiRoute;
}
@@ -343,7 +376,11 @@ CoreAudioSessionManager::AudioSessionModes CoreAudioSessionManager::mode()
localMode = VideoRecording;
} else if (mode == AVAudioSessionModeMeasurement) {
localMode = Measurement;
} else if (mode == AVAudioSessionModeMoviePlayback) {
} else if (
#if QT_IOS_DEPLOYMENT_TARGET_BELOW(__IPHONE_6_0)
QSysInfo::MacintoshVersion >= QSysInfo::MV_IOS_6_0 &&
#endif
mode == AVAudioSessionModeMoviePlayback) {
localMode = MoviePlayback;
}
@@ -372,12 +409,32 @@ QList<QByteArray> CoreAudioSessionManager::outputDevices()
float CoreAudioSessionManager::currentIOBufferDuration()
{
return [[m_sessionObserver audioSession] IOBufferDuration];
#if QT_IOS_DEPLOYMENT_TARGET_BELOW(__IPHONE_6_0)
if (QSysInfo::MacintoshVersion < QSysInfo::MV_IOS_6_0) {
Float32 duration;
UInt32 size = sizeof(duration);
AudioSessionGetProperty(kAudioSessionProperty_CurrentHardwareIOBufferDuration, &size, &duration);
return duration;
} else
#endif
{
return [[m_sessionObserver audioSession] IOBufferDuration];
}
}
float CoreAudioSessionManager::preferredSampleRate()
{
return [[m_sessionObserver audioSession] preferredSampleRate];
#if QT_IOS_DEPLOYMENT_TARGET_BELOW(__IPHONE_6_0)
if (QSysInfo::MacintoshVersion < QSysInfo::MV_IOS_6_0) {
Float64 sampleRate;
UInt32 size = sizeof(sampleRate);
AudioSessionGetProperty(kAudioSessionProperty_PreferredHardwareSampleRate, &size, &sampleRate);
return sampleRate;
} else
#endif
{
return [[m_sessionObserver audioSession] preferredSampleRate];
}
}
#ifdef QT_DEBUG_COREAUDIO

View File

@@ -32,7 +32,6 @@
****************************************************************************/
#include <QtCore/qdebug.h>
#include <QWidget>
#include <QFile>
#include <QtConcurrent/QtConcurrentRun>
#include <QtMultimedia/qabstractvideobuffer.h>

View File

@@ -33,6 +33,7 @@
#include <QDebug>
#include <QFile>
#include <qelapsedtimer.h>
#include "dsvideodevicecontrol.h"
#include "dscamerasession.h"
@@ -48,33 +49,37 @@ extern const CLSID CLSID_VideoInputDeviceCategory;
QT_BEGIN_NAMESPACE
Q_GLOBAL_STATIC(QList<DSVideoDeviceInfo>, deviceList)
DSVideoDeviceControl::DSVideoDeviceControl(QObject *parent)
: QVideoDeviceSelectorControl(parent)
{
m_session = qobject_cast<DSCameraSession*>(parent);
enumerateDevices(&m_devices, &m_descriptions);
selected = 0;
}
int DSVideoDeviceControl::deviceCount() const
{
return m_devices.count();
updateDevices();
return deviceList->count();
}
QString DSVideoDeviceControl::deviceName(int index) const
{
if (index >= 0 && index <= m_devices.count())
return QString::fromUtf8(m_devices.at(index).constData());
updateDevices();
if (index >= 0 && index <= deviceList->count())
return QString::fromUtf8(deviceList->at(index).first.constData());
return QString();
}
QString DSVideoDeviceControl::deviceDescription(int index) const
{
if (index >= 0 && index <= m_descriptions.count())
return m_descriptions.at(index);
updateDevices();
if (index >= 0 && index <= deviceList->count())
return deviceList->at(index).second;
return QString();
}
@@ -89,10 +94,34 @@ int DSVideoDeviceControl::selectedDevice() const
return selected;
}
void DSVideoDeviceControl::enumerateDevices(QList<QByteArray> *devices, QStringList *descriptions)
void DSVideoDeviceControl::setSelectedDevice(int index)
{
devices->clear();
descriptions->clear();
updateDevices();
if (index >= 0 && index < deviceList->count()) {
if (m_session) {
QString device = deviceList->at(index).first;
if (device.startsWith("ds:"))
device.remove(0,3);
m_session->setDevice(device);
}
selected = index;
}
}
const QList<DSVideoDeviceInfo> &DSVideoDeviceControl::availableDevices()
{
updateDevices();
return *deviceList;
}
void DSVideoDeviceControl::updateDevices()
{
static QElapsedTimer timer;
if (timer.isValid() && timer.elapsed() < 500) // ms
return;
deviceList->clear();
ICreateDevEnum* pDevEnum = NULL;
IEnumMoniker* pEnum = NULL;
@@ -116,7 +145,9 @@ void DSVideoDeviceControl::enumerateDevices(QList<QByteArray> *devices, QStringL
if (SUCCEEDED(hr)) {
QString output(QString::fromWCharArray(strName));
mallocInterface->Free(strName);
devices->append(output.toUtf8().constData());
DSVideoDeviceInfo devInfo;
devInfo.first = output.toUtf8();
IPropertyBag *pPropBag;
hr = pMoniker->BindToStorage(0, 0, IID_IPropertyBag, (void**)(&pPropBag));
@@ -130,7 +161,9 @@ void DSVideoDeviceControl::enumerateDevices(QList<QByteArray> *devices, QStringL
}
pPropBag->Release();
}
descriptions->append(output);
devInfo.second = output;
deviceList->append(devInfo);
}
pMoniker->Release();
}
@@ -139,19 +172,8 @@ void DSVideoDeviceControl::enumerateDevices(QList<QByteArray> *devices, QStringL
}
pDevEnum->Release();
}
}
void DSVideoDeviceControl::setSelectedDevice(int index)
{
if (index >= 0 && index < m_devices.count()) {
if (m_session) {
QString device = m_devices.at(index);
if (device.startsWith("ds:"))
device.remove(0,3);
m_session->setDevice(device);
}
selected = index;
}
timer.restart();
}
QT_END_NAMESPACE

View File

@@ -42,6 +42,8 @@ class DSCameraSession;
//QTM_USE_NAMESPACE
typedef QPair<QByteArray, QString> DSVideoDeviceInfo;
class DSVideoDeviceControl : public QVideoDeviceSelectorControl
{
Q_OBJECT
@@ -54,17 +56,15 @@ public:
int defaultDevice() const;
int selectedDevice() const;
static void enumerateDevices(QList<QByteArray> *devices, QStringList *descriptions);
static const QList<DSVideoDeviceInfo> &availableDevices();
public Q_SLOTS:
void setSelectedDevice(int index);
private:
static void updateDevices();
DSCameraSession* m_session;
QList<QByteArray> m_devices;
QStringList m_descriptions;
int selected;
};

View File

@@ -39,7 +39,6 @@
#include "dsvideodevicecontrol.h"
#ifdef QMEDIA_DIRECTSHOW_CAMERA
#include <QtCore/QElapsedTimer>
#include <dshow.h>
#include "dscameraservice.h"
#endif
@@ -122,9 +121,9 @@ QByteArray DSServicePlugin::defaultDevice(const QByteArray &service) const
{
#ifdef QMEDIA_DIRECTSHOW_CAMERA
if (service == Q_MEDIASERVICE_CAMERA) {
updateDevices();
return m_defaultCameraDevice;
const QList<DSVideoDeviceInfo> &devs = DSVideoDeviceControl::availableDevices();
if (!devs.isEmpty())
return devs.first().first;
}
#endif
@@ -133,52 +132,29 @@ QByteArray DSServicePlugin::defaultDevice(const QByteArray &service) const
QList<QByteArray> DSServicePlugin::devices(const QByteArray &service) const
{
QList<QByteArray> result;
#ifdef QMEDIA_DIRECTSHOW_CAMERA
if (service == Q_MEDIASERVICE_CAMERA) {
updateDevices();
return m_cameraDevices;
const QList<DSVideoDeviceInfo> &devs = DSVideoDeviceControl::availableDevices();
Q_FOREACH (const DSVideoDeviceInfo &info, devs)
result.append(info.first);
}
#endif
return QList<QByteArray>();
return result;
}
QString DSServicePlugin::deviceDescription(const QByteArray &service, const QByteArray &device)
{
#ifdef QMEDIA_DIRECTSHOW_CAMERA
if (service == Q_MEDIASERVICE_CAMERA) {
updateDevices();
for (int i=0; i<m_cameraDevices.count(); i++)
if (m_cameraDevices[i] == device)
return m_cameraDescriptions[i];
const QList<DSVideoDeviceInfo> &devs = DSVideoDeviceControl::availableDevices();
Q_FOREACH (const DSVideoDeviceInfo &info, devs) {
if (info.first == device)
return info.second;
}
}
#endif
return QString();
}
#ifdef QMEDIA_DIRECTSHOW_CAMERA
void DSServicePlugin::updateDevices() const
{
static QElapsedTimer timer;
if (timer.isValid() && timer.elapsed() < 500) // ms
return;
addRefCount();
m_defaultCameraDevice.clear();
DSVideoDeviceControl::enumerateDevices(&m_cameraDevices, &m_cameraDescriptions);
if (m_cameraDevices.isEmpty()) {
qWarning() << "No camera devices found";
} else {
m_defaultCameraDevice = m_cameraDevices.first();
}
releaseRefCount();
timer.restart();
}
#endif

View File

@@ -65,15 +65,6 @@ public:
QByteArray defaultDevice(const QByteArray &service) const;
QList<QByteArray> devices(const QByteArray &service) const;
QString deviceDescription(const QByteArray &service, const QByteArray &device);
private:
#ifdef QMEDIA_DIRECTSHOW_CAMERA
void updateDevices() const;
mutable QByteArray m_defaultCameraDevice;
mutable QList<QByteArray> m_cameraDevices;
mutable QStringList m_cameraDescriptions;
#endif
};
#endif // DSSERVICEPLUGIN_H

View File

@@ -160,3 +160,5 @@ void MmRendererMetaDataReaderControl::setMetaData(const MmRendererMetaData &data
if (metaDataAvailable != oldMetaDataAvailable)
emit metaDataAvailableChanged(metaDataAvailable);
}
QT_END_NAMESPACE

View File

@@ -47,6 +47,7 @@
#include <QtCore/QDataStream>
#include <mmsystem.h>
#include "qwindowsaudiodeviceinfo.h"
#include "qwindowsaudioutils.h"
#if defined(Q_CC_MINGW) && !defined(__MINGW64_VERSION_MAJOR)
struct IBaseFilter; // Needed for strmif.h from stock MinGW.
@@ -167,8 +168,7 @@ QString QWindowsAudioDeviceInfo::deviceName() const
QStringList QWindowsAudioDeviceInfo::supportedCodecs()
{
updateLists();
return codecz;
return QStringList() << QStringLiteral("audio/pcm");
}
QList<int> QWindowsAudioDeviceInfo::supportedSampleRates()
@@ -191,8 +191,7 @@ QList<int> QWindowsAudioDeviceInfo::supportedSampleSizes()
QList<QAudioFormat::Endian> QWindowsAudioDeviceInfo::supportedByteOrders()
{
updateLists();
return byteOrderz;
return QList<QAudioFormat::Endian>() << QAudioFormat::LittleEndian;
}
QList<QAudioFormat::SampleType> QWindowsAudioDeviceInfo::supportedSampleTypes()
@@ -213,118 +212,50 @@ void QWindowsAudioDeviceInfo::close()
bool QWindowsAudioDeviceInfo::testSettings(const QAudioFormat& format) const
{
// Set nearest to closest settings that do work.
// See if what is in settings will work (return value).
bool failed = false;
bool match = false;
// check codec
for( int i = 0; i < codecz.count(); i++) {
if (format.codec() == codecz.at(i))
match = true;
}
if (!match) failed = true;
// check channel
match = false;
if (!failed) {
for (int i = 0; i < channelz.count(); i++) {
if (format.channelCount() == channelz.at(i)) {
match = true;
break;
}
WAVEFORMATEXTENSIBLE wfx;
if (qt_convertFormat(format, &wfx)) {
// query only, do not open device
if (mode == QAudio::AudioOutput) {
return (waveOutOpen(NULL, UINT_PTR(devId), &wfx.Format, NULL, NULL,
WAVE_FORMAT_QUERY) == MMSYSERR_NOERROR);
} else { // AudioInput
return (waveInOpen(NULL, UINT_PTR(devId), &wfx.Format, NULL, NULL,
WAVE_FORMAT_QUERY) == MMSYSERR_NOERROR);
}
if (!match)
failed = true;
}
// check sampleRate
match = false;
if (!failed) {
for (int i = 0; i < sampleRatez.count(); i++) {
if (format.sampleRate() == sampleRatez.at(i)) {
match = true;
break;
}
}
if (!match)
failed = true;
}
// check sample size
match = false;
if (!failed) {
for( int i = 0; i < sizez.count(); i++) {
if (format.sampleSize() == sizez.at(i)) {
match = true;
break;
}
}
if (!match)
failed = true;
}
// check byte order
match = false;
if (!failed) {
for( int i = 0; i < byteOrderz.count(); i++) {
if (format.byteOrder() == byteOrderz.at(i)) {
match = true;
break;
}
}
if (!match)
failed = true;
}
// check sample type
match = false;
if (!failed) {
for( int i = 0; i < typez.count(); i++) {
if (format.sampleType() == typez.at(i)) {
match = true;
break;
}
}
if (!match)
failed = true;
}
if(!failed) {
// settings work
return true;
}
return false;
}
void QWindowsAudioDeviceInfo::updateLists()
{
// redo all lists based on current settings
bool match = false;
if (!sizez.isEmpty())
return;
bool hasCaps = false;
DWORD fmt = 0;
if(mode == QAudio::AudioOutput) {
WAVEOUTCAPS woc;
if (waveOutGetDevCaps(devId, &woc, sizeof(WAVEOUTCAPS)) == MMSYSERR_NOERROR) {
match = true;
hasCaps = true;
fmt = woc.dwFormats;
}
} else {
WAVEINCAPS woc;
if (waveInGetDevCaps(devId, &woc, sizeof(WAVEINCAPS)) == MMSYSERR_NOERROR) {
match = true;
hasCaps = true;
fmt = woc.dwFormats;
}
}
sizez.clear();
sampleRatez.clear();
channelz.clear();
byteOrderz.clear();
typez.clear();
codecz.clear();
if(match) {
if (hasCaps) {
// Check sample size
if ((fmt & WAVE_FORMAT_1M08)
|| (fmt & WAVE_FORMAT_1S08)
|| (fmt & WAVE_FORMAT_2M08)
@@ -334,8 +265,7 @@ void QWindowsAudioDeviceInfo::updateLists()
|| (fmt & WAVE_FORMAT_48M08)
|| (fmt & WAVE_FORMAT_48S08)
|| (fmt & WAVE_FORMAT_96M08)
|| (fmt & WAVE_FORMAT_96S08)
) {
|| (fmt & WAVE_FORMAT_96S08)) {
sizez.append(8);
}
if ((fmt & WAVE_FORMAT_1M16)
@@ -347,10 +277,11 @@ void QWindowsAudioDeviceInfo::updateLists()
|| (fmt & WAVE_FORMAT_48M16)
|| (fmt & WAVE_FORMAT_48S16)
|| (fmt & WAVE_FORMAT_96M16)
|| (fmt & WAVE_FORMAT_96S16)
) {
|| (fmt & WAVE_FORMAT_96S16)) {
sizez.append(16);
}
// Check sample rate
if ((fmt & WAVE_FORMAT_1M08)
|| (fmt & WAVE_FORMAT_1S08)
|| (fmt & WAVE_FORMAT_1M16)
@@ -381,23 +312,81 @@ void QWindowsAudioDeviceInfo::updateLists()
|| (fmt & WAVE_FORMAT_96S16)) {
sampleRatez.append(96000);
}
channelz.append(1);
channelz.append(2);
if (mode == QAudio::AudioOutput) {
channelz.append(4);
channelz.append(6);
channelz.append(8);
}
byteOrderz.append(QAudioFormat::LittleEndian);
// Check channel count
if (fmt & WAVE_FORMAT_1M08
|| fmt & WAVE_FORMAT_1M16
|| fmt & WAVE_FORMAT_2M08
|| fmt & WAVE_FORMAT_2M16
|| fmt & WAVE_FORMAT_4M08
|| fmt & WAVE_FORMAT_4M16
|| fmt & WAVE_FORMAT_48M08
|| fmt & WAVE_FORMAT_48M16
|| fmt & WAVE_FORMAT_96M08
|| fmt & WAVE_FORMAT_96M16) {
channelz.append(1);
}
if (fmt & WAVE_FORMAT_1S08
|| fmt & WAVE_FORMAT_1S16
|| fmt & WAVE_FORMAT_2S08
|| fmt & WAVE_FORMAT_2S16
|| fmt & WAVE_FORMAT_4S08
|| fmt & WAVE_FORMAT_4S16
|| fmt & WAVE_FORMAT_48S08
|| fmt & WAVE_FORMAT_48S16
|| fmt & WAVE_FORMAT_96S08
|| fmt & WAVE_FORMAT_96S16) {
channelz.append(2);
}
typez.append(QAudioFormat::SignedInt);
typez.append(QAudioFormat::UnSignedInt);
codecz.append(QLatin1String("audio/pcm"));
// WAVEOUTCAPS and WAVEINCAPS contains information only for the previously tested parameters.
// WaveOut and WaveInt might actually support more formats, the only way to know is to try
// opening the device with it.
QAudioFormat testFormat;
testFormat.setCodec(QStringLiteral("audio/pcm"));
testFormat.setByteOrder(QAudioFormat::LittleEndian);
testFormat.setSampleType(QAudioFormat::SignedInt);
testFormat.setChannelCount(channelz.first());
testFormat.setSampleRate(sampleRatez.at(sampleRatez.size() / 2));
testFormat.setSampleSize(sizez.last());
const QAudioFormat defaultTestFormat(testFormat);
// Check if float samples are supported
testFormat.setSampleType(QAudioFormat::Float);
testFormat.setSampleSize(32);
if (testSettings(testFormat))
typez.append(QAudioFormat::Float);
// Check channel counts > 2
testFormat = defaultTestFormat;
for (int i = 3; i < 19; ++i) { // <mmreg.h> defines 18 different channels
testFormat.setChannelCount(i);
if (testSettings(testFormat))
channelz.append(i);
}
// Check more sample sizes
testFormat = defaultTestFormat;
QList<int> testSampleSizes = QList<int>() << 24 << 32 << 48 << 64;
Q_FOREACH (int s, testSampleSizes) {
testFormat.setSampleSize(s);
if (testSettings(testFormat))
sizez.append(s);
}
// Check more sample rates
testFormat = defaultTestFormat;
QList<int> testSampleRates = QList<int>() << 8000 << 16000 << 32000 << 88200 << 192000;
Q_FOREACH (int r, testSampleRates) {
testFormat.setSampleRate(r);
if (testSettings(testFormat))
sampleRatez.append(r);
}
std::sort(sampleRatez.begin(), sampleRatez.end());
}
if (sampleRatez.count() > 0)
sampleRatez.prepend(8000);
}
QList<QByteArray> QWindowsAudioDeviceInfo::availableDevices(QAudio::Mode mode)

View File

@@ -57,7 +57,6 @@
QT_BEGIN_NAMESPACE
const unsigned int MAX_SAMPLE_RATES = 5;
const unsigned int SAMPLE_RATES[] = { 8000, 11025, 22050, 44100, 48000 };
@@ -91,15 +90,14 @@ private:
QAudio::Mode mode;
QString device;
quint32 devId;
QAudioFormat nearest;
QList<int> sampleRatez;
QList<int> channelz;
QList<int> sizez;
QList<QAudioFormat::Endian> byteOrderz;
QStringList codecz;
QList<QAudioFormat::SampleType> typez;
};
QT_END_NAMESPACE

View File

@@ -298,18 +298,9 @@ bool QWindowsAudioInput::open()
period_size = 0;
if (!settings.isValid()) {
if (!qt_convertFormat(settings, &wfx)) {
qWarning("QAudioInput: open error, invalid format.");
} else if (settings.channelCount() <= 0) {
qWarning("QAudioInput: open error, invalid number of channels (%d).",
settings.channelCount());
} else if (settings.sampleSize() <= 0) {
qWarning("QAudioInput: open error, invalid sample size (%d).",
settings.sampleSize());
} else if (settings.sampleRate() < 8000 || settings.sampleRate() > 96000) {
qWarning("QAudioInput: open error, sample rate out of range (%d).", settings.sampleRate());
} else if (buffer_size == 0) {
buffer_size
= (settings.sampleRate()
* settings.channelCount()
@@ -329,20 +320,12 @@ bool QWindowsAudioInput::open()
timeStamp.restart();
elapsedTimeOffset = 0;
wfx.nSamplesPerSec = settings.sampleRate();
wfx.wBitsPerSample = settings.sampleSize();
wfx.nChannels = settings.channelCount();
wfx.cbSize = 0;
wfx.wFormatTag = WAVE_FORMAT_PCM;
wfx.nBlockAlign = (wfx.wBitsPerSample >> 3) * wfx.nChannels;
wfx.nAvgBytesPerSec = wfx.nBlockAlign * wfx.nSamplesPerSec;
QDataStream ds(&m_device, QIODevice::ReadOnly);
quint32 deviceId;
ds >> deviceId;
if (waveInOpen(&hWaveIn, UINT_PTR(deviceId), &wfx,
if (waveInOpen(&hWaveIn, UINT_PTR(deviceId), &wfx.Format,
(DWORD_PTR)&waveInProc,
(DWORD_PTR) this,
CALLBACK_FUNCTION) != MMSYSERR_NOERROR) {

View File

@@ -45,8 +45,7 @@
#ifndef QWINDOWSAUDIOINPUT_H
#define QWINDOWSAUDIOINPUT_H
#include <QtCore/qt_windows.h>
#include <mmsystem.h>
#include "qwindowsaudioutils.h"
#include <QtCore/qfile.h>
#include <QtCore/qdebug.h>
@@ -121,7 +120,7 @@ private:
qint64 totalTimeValue;
bool pullMode;
bool resuming;
WAVEFORMATEX wfx;
WAVEFORMATEXTENSIBLE wfx;
HWAVEIN hWaveIn;
MMRESULT result;
WAVEHDR* waveBlocks;

View File

@@ -43,56 +43,11 @@
//
#include "qwindowsaudiooutput.h"
#include "qwindowsaudiodeviceinfo.h"
#include "qwindowsaudioutils.h"
#include <QtEndian>
#include <QtCore/QDataStream>
#ifndef SPEAKER_FRONT_LEFT
#define SPEAKER_FRONT_LEFT 0x00000001
#define SPEAKER_FRONT_RIGHT 0x00000002
#define SPEAKER_FRONT_CENTER 0x00000004
#define SPEAKER_LOW_FREQUENCY 0x00000008
#define SPEAKER_BACK_LEFT 0x00000010
#define SPEAKER_BACK_RIGHT 0x00000020
#define SPEAKER_FRONT_LEFT_OF_CENTER 0x00000040
#define SPEAKER_FRONT_RIGHT_OF_CENTER 0x00000080
#define SPEAKER_BACK_CENTER 0x00000100
#define SPEAKER_SIDE_LEFT 0x00000200
#define SPEAKER_SIDE_RIGHT 0x00000400
#define SPEAKER_TOP_CENTER 0x00000800
#define SPEAKER_TOP_FRONT_LEFT 0x00001000
#define SPEAKER_TOP_FRONT_CENTER 0x00002000
#define SPEAKER_TOP_FRONT_RIGHT 0x00004000
#define SPEAKER_TOP_BACK_LEFT 0x00008000
#define SPEAKER_TOP_BACK_CENTER 0x00010000
#define SPEAKER_TOP_BACK_RIGHT 0x00020000
#define SPEAKER_RESERVED 0x7FFC0000
#define SPEAKER_ALL 0x80000000
#endif
#ifndef _WAVEFORMATEXTENSIBLE_
#define _WAVEFORMATEXTENSIBLE_
typedef struct
{
WAVEFORMATEX Format; // Base WAVEFORMATEX data
union
{
WORD wValidBitsPerSample; // Valid bits in each sample container
WORD wSamplesPerBlock; // Samples per block of audio data; valid
// if wBitsPerSample=0 (but rarely used).
WORD wReserved; // Zero if neither case above applies.
} Samples;
DWORD dwChannelMask; // Positions of the audio channels
GUID SubFormat; // Format identifier GUID
} WAVEFORMATEXTENSIBLE, *PWAVEFORMATEXTENSIBLE, *LPPWAVEFORMATEXTENSIBLE;
typedef const WAVEFORMATEXTENSIBLE* LPCWAVEFORMATEXTENSIBLE;
#endif
#if !defined(WAVE_FORMAT_EXTENSIBLE)
#define WAVE_FORMAT_EXTENSIBLE 0xFFFE
#endif
//#define DEBUG_AUDIO 1
QT_BEGIN_NAMESPACE
@@ -265,16 +220,8 @@ bool QWindowsAudioOutput::open()
period_size = 0;
if (!settings.isValid()) {
if (!qt_convertFormat(settings, &wfx)) {
qWarning("QAudioOutput: open error, invalid format.");
} else if (settings.channelCount() <= 0) {
qWarning("QAudioOutput: open error, invalid number of channels (%d).",
settings.channelCount());
} else if (settings.sampleSize() <= 0) {
qWarning("QAudioOutput: open error, invalid sample size (%d).",
settings.sampleSize());
} else if (settings.sampleRate() < 8000 || settings.sampleRate() > 96000) {
qWarning("QAudioOutput: open error, sample rate out of range (%d).", settings.sampleRate());
} else if (buffer_size == 0) {
// Default buffer size, 200ms, default period size is 40ms
buffer_size
@@ -308,67 +255,19 @@ bool QWindowsAudioOutput::open()
timeStamp.restart();
elapsedTimeOffset = 0;
wfx.nSamplesPerSec = settings.sampleRate();
wfx.wBitsPerSample = settings.sampleSize();
wfx.nChannels = settings.channelCount();
wfx.cbSize = 0;
bool surround = false;
if (settings.channelCount() > 2)
surround = true;
wfx.wFormatTag = WAVE_FORMAT_PCM;
wfx.nBlockAlign = (wfx.wBitsPerSample >> 3) * wfx.nChannels;
wfx.nAvgBytesPerSec = wfx.nBlockAlign * wfx.nSamplesPerSec;
QDataStream ds(&m_device, QIODevice::ReadOnly);
quint32 deviceId;
ds >> deviceId;
if (!surround) {
if (waveOutOpen(&hWaveOut, UINT_PTR(deviceId), &wfx,
if (waveOutOpen(&hWaveOut, UINT_PTR(deviceId), &wfx.Format,
(DWORD_PTR)&waveOutProc,
(DWORD_PTR) this,
CALLBACK_FUNCTION) != MMSYSERR_NOERROR) {
errorState = QAudio::OpenError;
deviceState = QAudio::StoppedState;
emit stateChanged(deviceState);
qWarning("QAudioOutput: open error");
return false;
}
} else {
WAVEFORMATEXTENSIBLE wfex;
wfex.Format.wFormatTag = WAVE_FORMAT_EXTENSIBLE;
wfex.Format.nChannels = settings.channelCount();
wfex.Format.wBitsPerSample = settings.sampleSize();
wfex.Format.nSamplesPerSec = settings.sampleRate();
wfex.Format.nBlockAlign = wfex.Format.nChannels*wfex.Format.wBitsPerSample/8;
wfex.Format.nAvgBytesPerSec=wfex.Format.nSamplesPerSec*wfex.Format.nBlockAlign;
wfex.Samples.wValidBitsPerSample=wfex.Format.wBitsPerSample;
static const GUID _KSDATAFORMAT_SUBTYPE_PCM = {
0x00000001, 0x0000, 0x0010, {0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71}};
wfex.SubFormat=_KSDATAFORMAT_SUBTYPE_PCM;
wfex.Format.cbSize=22;
wfex.dwChannelMask = SPEAKER_FRONT_LEFT | SPEAKER_FRONT_RIGHT;
if (settings.channelCount() >= 4)
wfex.dwChannelMask |= SPEAKER_BACK_LEFT | SPEAKER_BACK_RIGHT;
if (settings.channelCount() >= 6)
wfex.dwChannelMask |= SPEAKER_FRONT_CENTER | SPEAKER_LOW_FREQUENCY;
if (settings.channelCount() == 8)
wfex.dwChannelMask |= SPEAKER_SIDE_LEFT | SPEAKER_SIDE_RIGHT;
if (waveOutOpen(&hWaveOut, UINT_PTR(deviceId), &wfex.Format,
(DWORD_PTR)&waveOutProc,
(DWORD_PTR) this,
CALLBACK_FUNCTION) != MMSYSERR_NOERROR) {
errorState = QAudio::OpenError;
deviceState = QAudio::StoppedState;
emit stateChanged(deviceState);
qWarning("QAudioOutput: open error");
return false;
}
errorState = QAudio::OpenError;
deviceState = QAudio::StoppedState;
emit stateChanged(deviceState);
qWarning("QAudioOutput: open error");
return false;
}
totalTimeValue = 0;

View File

@@ -45,8 +45,7 @@
#ifndef QWINDOWSAUDIOOUTPUT_H
#define QWINDOWSAUDIOOUTPUT_H
#include <QtCore/qt_windows.h>
#include <mmsystem.h>
#include "qwindowsaudioutils.h"
#include <QtCore/qdebug.h>
#include <QtCore/qtimer.h>
@@ -132,7 +131,7 @@ private:
bool open();
void close();
WAVEFORMATEX wfx;
WAVEFORMATEXTENSIBLE wfx;
HWAVEOUT hWaveOut;
MMRESULT result;
WAVEHDR header;

View File

@@ -0,0 +1,111 @@
/****************************************************************************
**
** Copyright (C) 2015 Digia Plc and/or its subsidiary(-ies).
** Contact: http://www.qt-project.org/legal
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL21$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and Digia. For licensing terms and
** conditions see http://qt.digia.com/licensing. For further information
** use the contact form at http://qt.digia.com/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 or version 3 as published by the Free
** Software Foundation and appearing in the file LICENSE.LGPLv21 and
** LICENSE.LGPLv3 included in the packaging of this file. Please review the
** following information to ensure the GNU Lesser General Public License
** requirements will be met: https://www.gnu.org/licenses/lgpl.html and
** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Digia gives you certain additional
** rights. These rights are described in the Digia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** $QT_END_LICENSE$
**
****************************************************************************/
#include "qwindowsaudioutils.h"
#ifndef SPEAKER_FRONT_LEFT
#define SPEAKER_FRONT_LEFT 0x00000001
#define SPEAKER_FRONT_RIGHT 0x00000002
#define SPEAKER_FRONT_CENTER 0x00000004
#define SPEAKER_LOW_FREQUENCY 0x00000008
#define SPEAKER_BACK_LEFT 0x00000010
#define SPEAKER_BACK_RIGHT 0x00000020
#define SPEAKER_FRONT_LEFT_OF_CENTER 0x00000040
#define SPEAKER_FRONT_RIGHT_OF_CENTER 0x00000080
#define SPEAKER_BACK_CENTER 0x00000100
#define SPEAKER_SIDE_LEFT 0x00000200
#define SPEAKER_SIDE_RIGHT 0x00000400
#define SPEAKER_TOP_CENTER 0x00000800
#define SPEAKER_TOP_FRONT_LEFT 0x00001000
#define SPEAKER_TOP_FRONT_CENTER 0x00002000
#define SPEAKER_TOP_FRONT_RIGHT 0x00004000
#define SPEAKER_TOP_BACK_LEFT 0x00008000
#define SPEAKER_TOP_BACK_CENTER 0x00010000
#define SPEAKER_TOP_BACK_RIGHT 0x00020000
#define SPEAKER_RESERVED 0x7FFC0000
#define SPEAKER_ALL 0x80000000
#endif
#ifndef WAVE_FORMAT_EXTENSIBLE
#define WAVE_FORMAT_EXTENSIBLE 0xFFFE
#endif
#ifndef WAVE_FORMAT_IEEE_FLOAT
#define WAVE_FORMAT_IEEE_FLOAT 0x0003
#endif
static const GUID _KSDATAFORMAT_SUBTYPE_PCM = {
0x00000001, 0x0000, 0x0010, {0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71}};
static const GUID _KSDATAFORMAT_SUBTYPE_IEEE_FLOAT = {
0x00000003, 0x0000, 0x0010, {0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71}};
QT_BEGIN_NAMESPACE
bool qt_convertFormat(const QAudioFormat &format, WAVEFORMATEXTENSIBLE *wfx)
{
if (!wfx
|| !format.isValid()
|| format.codec() != QStringLiteral("audio/pcm")
|| format.sampleRate() <= 0
|| format.channelCount() <= 0
|| format.sampleSize() <= 0
|| format.byteOrder() != QAudioFormat::LittleEndian) {
return false;
}
wfx->Format.nSamplesPerSec = format.sampleRate();
wfx->Format.wBitsPerSample = wfx->Samples.wValidBitsPerSample = format.sampleSize();
wfx->Format.nChannels = format.channelCount();
wfx->Format.nBlockAlign = (wfx->Format.wBitsPerSample / 8) * wfx->Format.nChannels;
wfx->Format.nAvgBytesPerSec = wfx->Format.nBlockAlign * wfx->Format.nSamplesPerSec;
wfx->Format.cbSize = 0;
if (format.sampleType() == QAudioFormat::Float) {
wfx->Format.wFormatTag = WAVE_FORMAT_IEEE_FLOAT;
wfx->SubFormat = _KSDATAFORMAT_SUBTYPE_IEEE_FLOAT;
} else {
wfx->Format.wFormatTag = WAVE_FORMAT_PCM;
wfx->SubFormat = _KSDATAFORMAT_SUBTYPE_PCM;
}
if (format.channelCount() > 2) {
wfx->Format.wFormatTag = WAVE_FORMAT_EXTENSIBLE;
wfx->Format.cbSize = 22;
wfx->dwChannelMask = 0xFFFFFFFF >> (32 - format.channelCount());
}
return true;
}
QT_END_NAMESPACE

View File

@@ -0,0 +1,67 @@
/****************************************************************************
**
** Copyright (C) 2015 Digia Plc and/or its subsidiary(-ies).
** Contact: http://www.qt-project.org/legal
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL21$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and Digia. For licensing terms and
** conditions see http://qt.digia.com/licensing. For further information
** use the contact form at http://qt.digia.com/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 or version 3 as published by the Free
** Software Foundation and appearing in the file LICENSE.LGPLv21 and
** LICENSE.LGPLv3 included in the packaging of this file. Please review the
** following information to ensure the GNU Lesser General Public License
** requirements will be met: https://www.gnu.org/licenses/lgpl.html and
** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Digia gives you certain additional
** rights. These rights are described in the Digia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** $QT_END_LICENSE$
**
****************************************************************************/
#ifndef QWINDOWSAUDIOUTILS_H
#define QWINDOWSAUDIOUTILS_H
#include <qaudioformat.h>
#include <QtCore/qt_windows.h>
#include <mmsystem.h>
#ifndef _WAVEFORMATEXTENSIBLE_
#define _WAVEFORMATEXTENSIBLE_
typedef struct
{
WAVEFORMATEX Format; // Base WAVEFORMATEX data
union
{
WORD wValidBitsPerSample; // Valid bits in each sample container
WORD wSamplesPerBlock; // Samples per block of audio data; valid
// if wBitsPerSample=0 (but rarely used).
WORD wReserved; // Zero if neither case above applies.
} Samples;
DWORD dwChannelMask; // Positions of the audio channels
GUID SubFormat; // Format identifier GUID
} WAVEFORMATEXTENSIBLE, *PWAVEFORMATEXTENSIBLE, *LPPWAVEFORMATEXTENSIBLE;
typedef const WAVEFORMATEXTENSIBLE* LPCWAVEFORMATEXTENSIBLE;
#endif
QT_BEGIN_NAMESPACE
bool qt_convertFormat(const QAudioFormat &format, WAVEFORMATEXTENSIBLE *wfx);
QT_END_NAMESPACE
#endif // QWINDOWSAUDIOUTILS_H

View File

@@ -12,13 +12,15 @@ HEADERS += \
qwindowsaudioplugin.h \
qwindowsaudiodeviceinfo.h \
qwindowsaudioinput.h \
qwindowsaudiooutput.h
qwindowsaudiooutput.h \
qwindowsaudioutils.h
SOURCES += \
qwindowsaudioplugin.cpp \
qwindowsaudiodeviceinfo.cpp \
qwindowsaudioinput.cpp \
qwindowsaudiooutput.cpp
qwindowsaudiooutput.cpp \
qwindowsaudioutils.cpp
OTHER_FILES += \
windowsaudio.json

View File

@@ -195,17 +195,6 @@ void MFAudioDecoderControl::handleMediaSourceReady()
if (mediaType) {
m_sourceOutputFormat = m_audioFormat;
QAudioFormat af = m_audioFormat;
GUID subType;
if (SUCCEEDED(mediaType->GetGUID(MF_MT_SUBTYPE, &subType))) {
if (subType == MFAudioFormat_Float) {
m_sourceOutputFormat.setSampleType(QAudioFormat::Float);
} else {
m_sourceOutputFormat.setSampleType(QAudioFormat::SignedInt);
}
}
if (m_sourceOutputFormat.sampleType() != QAudioFormat::Float) {
m_sourceOutputFormat.setByteOrder(QAudioFormat::LittleEndian);
}
UINT32 val = 0;
if (SUCCEEDED(mediaType->GetUINT32(MF_MT_AUDIO_NUM_CHANNELS, &val))) {
@@ -218,6 +207,20 @@ void MFAudioDecoderControl::handleMediaSourceReady()
m_sourceOutputFormat.setSampleSize(int(val));
}
GUID subType;
if (SUCCEEDED(mediaType->GetGUID(MF_MT_SUBTYPE, &subType))) {
if (subType == MFAudioFormat_Float) {
m_sourceOutputFormat.setSampleType(QAudioFormat::Float);
} else if (m_sourceOutputFormat.sampleSize() == 8) {
m_sourceOutputFormat.setSampleType(QAudioFormat::UnSignedInt);
} else {
m_sourceOutputFormat.setSampleType(QAudioFormat::SignedInt);
}
}
if (m_sourceOutputFormat.sampleType() != QAudioFormat::Float) {
m_sourceOutputFormat.setByteOrder(QAudioFormat::LittleEndian);
}
if (m_audioFormat.sampleType() != QAudioFormat::Float
&& m_audioFormat.sampleType() != QAudioFormat::SignedInt) {
af.setSampleType(m_sourceOutputFormat.sampleType());

View File

@@ -50,7 +50,7 @@
MFPlayerService::MFPlayerService(QObject *parent)
: QMediaService(parent)
, m_session(0)
#ifndef Q_WS_SIMULATOR
#if defined(HAVE_WIDGETS) && !defined(Q_WS_SIMULATOR)
, m_videoWindowControl(0)
#endif
, m_videoRendererControl(0)
@@ -65,7 +65,7 @@ MFPlayerService::~MFPlayerService()
{
m_session->close();
#ifndef Q_WS_SIMULATOR
#if defined(HAVE_WIDGETS) && !defined(Q_WS_SIMULATOR)
if (m_videoWindowControl)
delete m_videoWindowControl;
#endif

View File

@@ -43,7 +43,7 @@
#include <QtCore/qbuffer.h>
#include "mfplayercontrol.h"
#ifndef Q_WS_SIMULATOR
#if defined(HAVE_WIDGETS) && !defined(Q_WS_SIMULATOR)
#include "evr9videowindowcontrol.h"
#endif
#include "mfvideorenderercontrol.h"
@@ -140,7 +140,7 @@ void MFPlayerSession::close()
if (m_playerService->videoRendererControl()) {
m_playerService->videoRendererControl()->releaseActivate();
#ifndef Q_WS_SIMULATOR
#if defined(HAVE_WIDGETS) && !defined(Q_WS_SIMULATOR)
} else if (m_playerService->videoWindowControl()) {
m_playerService->videoWindowControl()->releaseActivate();
#endif
@@ -404,7 +404,7 @@ IMFTopologyNode* MFPlayerSession::addOutputNode(IMFStreamDescriptor *streamDesc,
mediaType = Video;
if (m_playerService->videoRendererControl()) {
activate = m_playerService->videoRendererControl()->createActivate();
#ifndef Q_WS_SIMULATOR
#if defined(HAVE_WIDGETS) && !defined(Q_WS_SIMULATOR)
} else if (m_playerService->videoWindowControl()) {
activate = m_playerService->videoWindowControl()->createActivate();
#endif
@@ -556,7 +556,10 @@ QAudioFormat MFPlayerSession::audioFormatForMFMediaType(IMFMediaType *mediaType)
format.setSampleSize(wfx->wBitsPerSample);
format.setCodec("audio/pcm");
format.setByteOrder(QAudioFormat::LittleEndian);
format.setSampleType(QAudioFormat::SignedInt);
if (format.sampleSize() == 8)
format.setSampleType(QAudioFormat::UnSignedInt);
else
format.setSampleType(QAudioFormat::SignedInt);
CoTaskMemFree(wfx);
return format;
@@ -1577,7 +1580,7 @@ void MFPlayerSession::handleSessionEvent(IMFMediaEvent *sessionEvent)
}
updatePendingCommands(CmdStart);
#ifndef Q_WS_SIMULATOR
#if defined(HAVE_WIDGETS) && !defined(Q_WS_SIMULATOR)
// playback started, we can now set again the procAmpValues if they have been
// changed previously (these are lost when loading a new media)
if (m_playerService->videoWindowControl()) {
@@ -1721,10 +1724,17 @@ void MFPlayerSession::updatePendingCommands(Command command)
if (m_state.command != command || m_pendingState == NoPending)
return;
// The current pending command has completed.
// Seek while paused completed
if (m_pendingState == SeekPending && m_state.prevCmd == CmdPause) {
m_pendingState = NoPending;
m_state.setCommand(CmdPause);
// A seek operation actually restarts playback. If scrubbing is possible, playback rate
// is set to 0.0 at this point and we just need to reset the current state to Pause.
// If scrubbing is not possible, the playback rate was not changed and we explicitly need
// to re-pause playback.
if (!canScrub())
pause();
else
m_state.setCommand(CmdPause);
}
m_pendingState = NoPending;

View File

@@ -813,7 +813,7 @@ namespace
case QVideoFrame::Format_RGB32:
mediaType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_RGB32);
break;
case QVideoFrame::Format_RGB24:
case QVideoFrame::Format_BGR24: // MFVideoFormat_RGB24 has a BGR layout
mediaType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_RGB24);
break;
case QVideoFrame::Format_RGB565:
@@ -842,8 +842,11 @@ namespace
mediaType->Release();
continue;
}
m_pixelFormats.push_back(format);
m_mediaTypes.push_back(mediaType);
// QAbstractVideoSurface::supportedPixelFormats() returns formats in descending
// order of preference, while IMFMediaTypeHandler is supposed to return supported
// formats in ascending order of preference. We need to reverse the list.
m_pixelFormats.prepend(format);
m_mediaTypes.prepend(mediaType);
}
}
@@ -1082,6 +1085,7 @@ namespace
return format.frameWidth() * 4;
// 24 bpp packed formats.
case QVideoFrame::Format_RGB24:
case QVideoFrame::Format_BGR24:
return PAD_TO_DWORD(format.frameWidth() * 3);
// 16 bpp packed formats.
case QVideoFrame::Format_RGB565:

View File

@@ -46,11 +46,11 @@ QList<QVideoFrame::PixelFormat> QSGVideoNodeFactory_RGB::supportedPixelFormats(
QList<QVideoFrame::PixelFormat> pixelFormats;
if (handleType == QAbstractVideoBuffer::NoHandle) {
pixelFormats.append(QVideoFrame::Format_RGB565);
pixelFormats.append(QVideoFrame::Format_RGB32);
pixelFormats.append(QVideoFrame::Format_ARGB32);
pixelFormats.append(QVideoFrame::Format_BGR32);
pixelFormats.append(QVideoFrame::Format_BGRA32);
pixelFormats.append(QVideoFrame::Format_RGB565);
}
return pixelFormats;