Merge remote-tracking branch 'origin/5.3' into dev

Change-Id: If8e5050901a320f1ff692e842f173c0062bed5fe
This commit is contained in:
Frederik Gladhorn
2014-07-22 20:23:12 +02:00
34 changed files with 1314 additions and 1947 deletions

View File

@@ -63,6 +63,9 @@ QAndroidCameraExposureControl::QAndroidCameraExposureControl(QAndroidCameraSessi
bool QAndroidCameraExposureControl::isParameterSupported(ExposureParameter parameter) const bool QAndroidCameraExposureControl::isParameterSupported(ExposureParameter parameter) const
{ {
if (!m_session->camera())
return false;
switch (parameter) { switch (parameter) {
case QCameraExposureControl::ISO: case QCameraExposureControl::ISO:
return false; return false;
@@ -71,7 +74,7 @@ bool QAndroidCameraExposureControl::isParameterSupported(ExposureParameter param
case QCameraExposureControl::ShutterSpeed: case QCameraExposureControl::ShutterSpeed:
return false; return false;
case QCameraExposureControl::ExposureCompensation: case QCameraExposureControl::ExposureCompensation:
return true; return !m_supportedExposureCompensations.isEmpty();
case QCameraExposureControl::FlashPower: case QCameraExposureControl::FlashPower:
return false; return false;
case QCameraExposureControl::FlashCompensation: case QCameraExposureControl::FlashCompensation:
@@ -81,7 +84,7 @@ bool QAndroidCameraExposureControl::isParameterSupported(ExposureParameter param
case QCameraExposureControl::SpotMeteringPoint: case QCameraExposureControl::SpotMeteringPoint:
return false; return false;
case QCameraExposureControl::ExposureMode: case QCameraExposureControl::ExposureMode:
return true; return !m_supportedExposureModes.isEmpty();
case QCameraExposureControl::MeteringMode: case QCameraExposureControl::MeteringMode:
return false; return false;
default: default:
@@ -127,27 +130,41 @@ QVariant QAndroidCameraExposureControl::actualValue(ExposureParameter parameter)
bool QAndroidCameraExposureControl::setValue(ExposureParameter parameter, const QVariant& value) bool QAndroidCameraExposureControl::setValue(ExposureParameter parameter, const QVariant& value)
{ {
if (!m_session->camera() || !value.isValid()) if (!value.isValid())
return false; return false;
if (parameter == QCameraExposureControl::ExposureCompensation) { if (parameter == QCameraExposureControl::ExposureCompensation) {
m_requestedExposureCompensation = value.toReal(); qreal expComp = value.toReal();
emit requestedValueChanged(QCameraExposureControl::ExposureCompensation); if (!qFuzzyCompare(m_requestedExposureCompensation, expComp)) {
m_requestedExposureCompensation = expComp;
emit requestedValueChanged(QCameraExposureControl::ExposureCompensation);
}
if (!m_session->camera())
return true;
int expCompIndex = qRound(m_requestedExposureCompensation / m_exposureCompensationStep); int expCompIndex = qRound(m_requestedExposureCompensation / m_exposureCompensationStep);
if (expCompIndex >= m_minExposureCompensationIndex if (expCompIndex >= m_minExposureCompensationIndex
&& expCompIndex <= m_maxExposureCompensationIndex) { && expCompIndex <= m_maxExposureCompensationIndex) {
qreal comp = expCompIndex * m_exposureCompensationStep;
m_session->camera()->setExposureCompensation(expCompIndex); m_session->camera()->setExposureCompensation(expCompIndex);
if (!qFuzzyCompare(m_actualExposureCompensation, comp)) {
m_actualExposureCompensation = expCompIndex * m_exposureCompensationStep; m_actualExposureCompensation = expCompIndex * m_exposureCompensationStep;
emit actualValueChanged(QCameraExposureControl::ExposureCompensation); emit actualValueChanged(QCameraExposureControl::ExposureCompensation);
}
return true; return true;
} }
} else if (parameter == QCameraExposureControl::ExposureMode) { } else if (parameter == QCameraExposureControl::ExposureMode) {
m_requestedExposureMode = value.value<QCameraExposure::ExposureMode>(); QCameraExposure::ExposureMode expMode = value.value<QCameraExposure::ExposureMode>();
emit requestedValueChanged(QCameraExposureControl::ExposureMode); if (m_requestedExposureMode != expMode) {
m_requestedExposureMode = expMode;
emit requestedValueChanged(QCameraExposureControl::ExposureMode);
}
if (!m_session->camera())
return true;
if (!m_supportedExposureModes.isEmpty()) { if (!m_supportedExposureModes.isEmpty()) {
m_actualExposureMode = m_requestedExposureMode; m_actualExposureMode = m_requestedExposureMode;
@@ -190,38 +207,39 @@ bool QAndroidCameraExposureControl::setValue(ExposureParameter parameter, const
void QAndroidCameraExposureControl::onCameraOpened() void QAndroidCameraExposureControl::onCameraOpened()
{ {
m_requestedExposureCompensation = m_actualExposureCompensation = 0.0; m_supportedExposureCompensations.clear();
m_requestedExposureMode = m_actualExposureMode = QCameraExposure::ExposureAuto;
emit requestedValueChanged(QCameraExposureControl::ExposureCompensation);
emit actualValueChanged(QCameraExposureControl::ExposureCompensation);
emit requestedValueChanged(QCameraExposureControl::ExposureMode);
emit actualValueChanged(QCameraExposureControl::ExposureMode);
m_minExposureCompensationIndex = m_session->camera()->getMinExposureCompensation(); m_minExposureCompensationIndex = m_session->camera()->getMinExposureCompensation();
m_maxExposureCompensationIndex = m_session->camera()->getMaxExposureCompensation(); m_maxExposureCompensationIndex = m_session->camera()->getMaxExposureCompensation();
m_exposureCompensationStep = m_session->camera()->getExposureCompensationStep(); m_exposureCompensationStep = m_session->camera()->getExposureCompensationStep();
for (int i = m_minExposureCompensationIndex; i <= m_maxExposureCompensationIndex; ++i) if (m_minExposureCompensationIndex != 0 || m_maxExposureCompensationIndex != 0) {
m_supportedExposureCompensations.append(i * m_exposureCompensationStep); for (int i = m_minExposureCompensationIndex; i <= m_maxExposureCompensationIndex; ++i)
emit parameterRangeChanged(QCameraExposureControl::ExposureCompensation); m_supportedExposureCompensations.append(i * m_exposureCompensationStep);
emit parameterRangeChanged(QCameraExposureControl::ExposureCompensation);
}
m_supportedExposureModes.clear(); m_supportedExposureModes.clear();
QStringList sceneModes = m_session->camera()->getSupportedSceneModes(); QStringList sceneModes = m_session->camera()->getSupportedSceneModes();
for (int i = 0; i < sceneModes.size(); ++i) { if (!sceneModes.isEmpty()) {
const QString &sceneMode = sceneModes.at(i); for (int i = 0; i < sceneModes.size(); ++i) {
if (sceneMode == QLatin1String("auto")) const QString &sceneMode = sceneModes.at(i);
m_supportedExposureModes << QVariant::fromValue(QCameraExposure::ExposureAuto); if (sceneMode == QLatin1String("auto"))
else if (sceneMode == QLatin1String("beach")) m_supportedExposureModes << QVariant::fromValue(QCameraExposure::ExposureAuto);
m_supportedExposureModes << QVariant::fromValue(QCameraExposure::ExposureBeach); else if (sceneMode == QLatin1String("beach"))
else if (sceneMode == QLatin1String("night")) m_supportedExposureModes << QVariant::fromValue(QCameraExposure::ExposureBeach);
m_supportedExposureModes << QVariant::fromValue(QCameraExposure::ExposureNight); else if (sceneMode == QLatin1String("night"))
else if (sceneMode == QLatin1String("portrait")) m_supportedExposureModes << QVariant::fromValue(QCameraExposure::ExposureNight);
m_supportedExposureModes << QVariant::fromValue(QCameraExposure::ExposurePortrait); else if (sceneMode == QLatin1String("portrait"))
else if (sceneMode == QLatin1String("snow")) m_supportedExposureModes << QVariant::fromValue(QCameraExposure::ExposurePortrait);
m_supportedExposureModes << QVariant::fromValue(QCameraExposure::ExposureSnow); else if (sceneMode == QLatin1String("snow"))
else if (sceneMode == QLatin1String("sports")) m_supportedExposureModes << QVariant::fromValue(QCameraExposure::ExposureSnow);
m_supportedExposureModes << QVariant::fromValue(QCameraExposure::ExposureSports); else if (sceneMode == QLatin1String("sports"))
m_supportedExposureModes << QVariant::fromValue(QCameraExposure::ExposureSports);
}
emit parameterRangeChanged(QCameraExposureControl::ExposureMode);
} }
emit parameterRangeChanged(QCameraExposureControl::ExposureMode);
setValue(QCameraExposureControl::ExposureCompensation, QVariant::fromValue(m_requestedExposureCompensation));
setValue(QCameraExposureControl::ExposureMode, QVariant::fromValue(m_requestedExposureMode));
} }
QT_END_NAMESPACE QT_END_NAMESPACE

View File

@@ -62,7 +62,12 @@ QCameraExposure::FlashModes QAndroidCameraFlashControl::flashMode() const
void QAndroidCameraFlashControl::setFlashMode(QCameraExposure::FlashModes mode) void QAndroidCameraFlashControl::setFlashMode(QCameraExposure::FlashModes mode)
{ {
if (m_flashMode == mode || !m_session->camera() || !isFlashModeSupported(mode)) if (!m_session->camera()) {
m_flashMode = mode;
return;
}
if (!isFlashModeSupported(mode))
return; return;
// if torch was enabled, it first needs to be turned off before setting another mode // if torch was enabled, it first needs to be turned off before setting another mode
@@ -88,7 +93,7 @@ void QAndroidCameraFlashControl::setFlashMode(QCameraExposure::FlashModes mode)
bool QAndroidCameraFlashControl::isFlashModeSupported(QCameraExposure::FlashModes mode) const bool QAndroidCameraFlashControl::isFlashModeSupported(QCameraExposure::FlashModes mode) const
{ {
return m_supportedFlashModes.contains(mode); return m_session->camera() ? m_supportedFlashModes.contains(mode) : false;
} }
bool QAndroidCameraFlashControl::isFlashReady() const bool QAndroidCameraFlashControl::isFlashReady() const
@@ -115,6 +120,11 @@ void QAndroidCameraFlashControl::onCameraOpened()
else if (flashMode == QLatin1String("torch")) else if (flashMode == QLatin1String("torch"))
m_supportedFlashModes << QCameraExposure::FlashVideoLight; m_supportedFlashModes << QCameraExposure::FlashVideoLight;
} }
if (!m_supportedFlashModes.contains(m_flashMode))
m_flashMode = QCameraExposure::FlashOff;
setFlashMode(m_flashMode);
} }
QT_END_NAMESPACE QT_END_NAMESPACE

View File

@@ -80,42 +80,45 @@ QCameraFocus::FocusModes QAndroidCameraFocusControl::focusMode() const
void QAndroidCameraFocusControl::setFocusMode(QCameraFocus::FocusModes mode) void QAndroidCameraFocusControl::setFocusMode(QCameraFocus::FocusModes mode)
{ {
if (m_focusMode == mode || !m_session->camera() || !isFocusModeSupported(mode)) if (!m_session->camera()) {
setFocusModeHelper(mode);
return; return;
QString focusMode = QLatin1String("fixed");
if (mode.testFlag(QCameraFocus::HyperfocalFocus)) {
focusMode = QLatin1String("edof");
} else if (mode.testFlag(QCameraFocus::ManualFocus)) {
focusMode = QLatin1String("fixed");
} else if (mode.testFlag(QCameraFocus::AutoFocus)) {
focusMode = QLatin1String("auto");
} else if (mode.testFlag(QCameraFocus::MacroFocus)) {
focusMode = QLatin1String("macro");
} else if (mode.testFlag(QCameraFocus::ContinuousFocus)) {
if ((m_session->captureMode().testFlag(QCamera::CaptureVideo) && m_continuousVideoFocusSupported)
|| !m_continuousPictureFocusSupported) {
focusMode = QLatin1String("continuous-video");
} else {
focusMode = QLatin1String("continuous-picture");
}
} else if (mode.testFlag(QCameraFocus::InfinityFocus)) {
focusMode = QLatin1String("infinity");
} }
m_session->camera()->setFocusMode(focusMode); if (isFocusModeSupported(mode)) {
QString focusMode = QLatin1String("fixed");
// reset focus position if (mode.testFlag(QCameraFocus::HyperfocalFocus)) {
m_session->camera()->cancelAutoFocus(); focusMode = QLatin1String("edof");
} else if (mode.testFlag(QCameraFocus::ManualFocus)) {
focusMode = QLatin1String("fixed");
} else if (mode.testFlag(QCameraFocus::AutoFocus)) {
focusMode = QLatin1String("auto");
} else if (mode.testFlag(QCameraFocus::MacroFocus)) {
focusMode = QLatin1String("macro");
} else if (mode.testFlag(QCameraFocus::ContinuousFocus)) {
if ((m_session->captureMode().testFlag(QCamera::CaptureVideo) && m_continuousVideoFocusSupported)
|| !m_continuousPictureFocusSupported) {
focusMode = QLatin1String("continuous-video");
} else {
focusMode = QLatin1String("continuous-picture");
}
} else if (mode.testFlag(QCameraFocus::InfinityFocus)) {
focusMode = QLatin1String("infinity");
}
m_focusMode = mode; m_session->camera()->setFocusMode(focusMode);
emit focusModeChanged(m_focusMode);
// reset focus position
m_session->camera()->cancelAutoFocus();
setFocusModeHelper(mode);
}
} }
bool QAndroidCameraFocusControl::isFocusModeSupported(QCameraFocus::FocusModes mode) const bool QAndroidCameraFocusControl::isFocusModeSupported(QCameraFocus::FocusModes mode) const
{ {
return m_supportedFocusModes.contains(mode); return m_session->camera() ? m_supportedFocusModes.contains(mode) : false;
} }
QCameraFocus::FocusPointMode QAndroidCameraFocusControl::focusPointMode() const QCameraFocus::FocusPointMode QAndroidCameraFocusControl::focusPointMode() const
@@ -125,29 +128,31 @@ QCameraFocus::FocusPointMode QAndroidCameraFocusControl::focusPointMode() const
void QAndroidCameraFocusControl::setFocusPointMode(QCameraFocus::FocusPointMode mode) void QAndroidCameraFocusControl::setFocusPointMode(QCameraFocus::FocusPointMode mode)
{ {
if (!m_session->camera() || m_focusPointMode == mode || !isFocusPointModeSupported(mode)) if (!m_session->camera()) {
setFocusPointModeHelper(mode);
return; return;
m_focusPointMode = mode;
if (mode == QCameraFocus::FocusPointCustom) {
m_actualFocusPoint = m_customFocusPoint;
} else {
// FocusPointAuto | FocusPointCenter
// note: there is no way to know the actual focus point in FocusPointAuto mode,
// so just report the focus point to be at the center of the frame
m_actualFocusPoint = QPointF(0.5, 0.5);
} }
updateFocusZones(); if (isFocusPointModeSupported(mode)) {
setCameraFocusArea(); if (mode == QCameraFocus::FocusPointCustom) {
m_actualFocusPoint = m_customFocusPoint;
} else {
// FocusPointAuto | FocusPointCenter
// note: there is no way to know the actual focus point in FocusPointAuto mode,
// so just report the focus point to be at the center of the frame
m_actualFocusPoint = QPointF(0.5, 0.5);
}
emit focusPointModeChanged(mode); setFocusPointModeHelper(mode);
updateFocusZones();
setCameraFocusArea();
}
} }
bool QAndroidCameraFocusControl::isFocusPointModeSupported(QCameraFocus::FocusPointMode mode) const bool QAndroidCameraFocusControl::isFocusPointModeSupported(QCameraFocus::FocusPointMode mode) const
{ {
return m_supportedFocusPointModes.contains(mode); return m_session->camera() ? m_supportedFocusPointModes.contains(mode) : false;
} }
QPointF QAndroidCameraFocusControl::customFocusPoint() const QPointF QAndroidCameraFocusControl::customFocusPoint() const
@@ -157,13 +162,12 @@ QPointF QAndroidCameraFocusControl::customFocusPoint() const
void QAndroidCameraFocusControl::setCustomFocusPoint(const QPointF &point) void QAndroidCameraFocusControl::setCustomFocusPoint(const QPointF &point)
{ {
if (m_customFocusPoint == point) if (m_customFocusPoint != point) {
return; m_customFocusPoint = point;
emit customFocusPointChanged(m_customFocusPoint);
}
m_customFocusPoint = point; if (m_session->camera() && m_focusPointMode == QCameraFocus::FocusPointCustom) {
emit customFocusPointChanged(m_customFocusPoint);
if (m_focusPointMode == QCameraFocus::FocusPointCustom) {
m_actualFocusPoint = m_customFocusPoint; m_actualFocusPoint = m_customFocusPoint;
updateFocusZones(); updateFocusZones();
setCameraFocusArea(); setCameraFocusArea();
@@ -187,12 +191,7 @@ void QAndroidCameraFocusControl::onCameraOpened()
m_supportedFocusModes.clear(); m_supportedFocusModes.clear();
m_continuousPictureFocusSupported = false; m_continuousPictureFocusSupported = false;
m_continuousVideoFocusSupported = false; m_continuousVideoFocusSupported = false;
m_focusPointMode = QCameraFocus::FocusPointAuto;
m_actualFocusPoint = QPointF(0.5, 0.5);
m_customFocusPoint = QPointF();
m_supportedFocusPointModes.clear(); m_supportedFocusPointModes.clear();
m_focusZones.clear();
QStringList focusModes = m_session->camera()->getSupportedFocusModes(); QStringList focusModes = m_session->camera()->getSupportedFocusModes();
for (int i = 0; i < focusModes.size(); ++i) { for (int i = 0; i < focusModes.size(); ++i) {
@@ -220,10 +219,14 @@ void QAndroidCameraFocusControl::onCameraOpened()
if (m_session->camera()->getMaxNumFocusAreas() > 0) if (m_session->camera()->getMaxNumFocusAreas() > 0)
m_supportedFocusPointModes << QCameraFocus::FocusPointCenter << QCameraFocus::FocusPointCustom; m_supportedFocusPointModes << QCameraFocus::FocusPointCenter << QCameraFocus::FocusPointCustom;
emit focusModeChanged(focusMode()); if (!m_supportedFocusModes.contains(m_focusMode))
emit focusPointModeChanged(m_focusPointMode); setFocusModeHelper(QCameraFocus::AutoFocus);
emit customFocusPointChanged(m_customFocusPoint); if (!m_supportedFocusPointModes.contains(m_focusPointMode))
emit focusZonesChanged(); setFocusPointModeHelper(QCameraFocus::FocusPointAuto);
setFocusMode(m_focusMode);
setCustomFocusPoint(m_customFocusPoint);
setFocusPointMode(m_focusPointMode);
} }
void QAndroidCameraFocusControl::updateFocusZones(QCameraFocusZone::FocusZoneStatus status) void QAndroidCameraFocusControl::updateFocusZones(QCameraFocusZone::FocusZoneStatus status)
@@ -276,11 +279,12 @@ void QAndroidCameraFocusControl::onViewportSizeChanged()
if (!m_focusZones.isEmpty()) if (!m_focusZones.isEmpty())
status = m_focusZones.at(0).status(); status = m_focusZones.at(0).status();
updateFocusZones(status); updateFocusZones(status);
setCameraFocusArea();
} }
void QAndroidCameraFocusControl::onCameraCaptureModeChanged() void QAndroidCameraFocusControl::onCameraCaptureModeChanged()
{ {
if (m_focusMode == QCameraFocus::ContinuousFocus) { if (m_session->camera() && m_focusMode == QCameraFocus::ContinuousFocus) {
QString focusMode; QString focusMode;
if ((m_session->captureMode().testFlag(QCamera::CaptureVideo) && m_continuousVideoFocusSupported) if ((m_session->captureMode().testFlag(QCamera::CaptureVideo) && m_continuousVideoFocusSupported)
|| !m_continuousPictureFocusSupported) { || !m_continuousPictureFocusSupported) {

View File

@@ -72,6 +72,22 @@ private Q_SLOTS:
void onAutoFocusComplete(bool success); void onAutoFocusComplete(bool success);
private: private:
inline void setFocusModeHelper(QCameraFocus::FocusModes mode)
{
if (m_focusMode != mode) {
m_focusMode = mode;
emit focusModeChanged(mode);
}
}
inline void setFocusPointModeHelper(QCameraFocus::FocusPointMode mode)
{
if (m_focusPointMode != mode) {
m_focusPointMode = mode;
emit focusPointModeChanged(mode);
}
}
void updateFocusZones(QCameraFocusZone::FocusZoneStatus status = QCameraFocusZone::Selected); void updateFocusZones(QCameraFocusZone::FocusZoneStatus status = QCameraFocusZone::Selected);
void setCameraFocusArea(); void setCameraFocusArea();

View File

@@ -49,6 +49,7 @@ QT_BEGIN_NAMESPACE
QAndroidCameraImageProcessingControl::QAndroidCameraImageProcessingControl(QAndroidCameraSession *session) QAndroidCameraImageProcessingControl::QAndroidCameraImageProcessingControl(QAndroidCameraSession *session)
: QCameraImageProcessingControl() : QCameraImageProcessingControl()
, m_session(session) , m_session(session)
, m_whiteBalanceMode(QCameraImageProcessing::WhiteBalanceAuto)
{ {
connect(m_session, SIGNAL(opened()), connect(m_session, SIGNAL(opened()),
this, SLOT(onCameraOpened())); this, SLOT(onCameraOpened()));
@@ -56,19 +57,17 @@ QAndroidCameraImageProcessingControl::QAndroidCameraImageProcessingControl(QAndr
bool QAndroidCameraImageProcessingControl::isParameterSupported(ProcessingParameter parameter) const bool QAndroidCameraImageProcessingControl::isParameterSupported(ProcessingParameter parameter) const
{ {
return (parameter == QCameraImageProcessingControl::WhiteBalancePreset); return parameter == QCameraImageProcessingControl::WhiteBalancePreset
&& m_session->camera()
&& !m_supportedWhiteBalanceModes.isEmpty();
} }
bool QAndroidCameraImageProcessingControl::isParameterValueSupported(ProcessingParameter parameter, bool QAndroidCameraImageProcessingControl::isParameterValueSupported(ProcessingParameter parameter,
const QVariant &value) const const QVariant &value) const
{ {
if (parameter != QCameraImageProcessingControl::WhiteBalancePreset) return parameter == QCameraImageProcessingControl::WhiteBalancePreset
return false; && m_session->camera()
&& m_supportedWhiteBalanceModes.contains(value.value<QCameraImageProcessing::WhiteBalanceMode>());
if (!m_session->camera())
return false;
return m_supportedWhiteBalanceModes.contains(value.value<QCameraImageProcessing::WhiteBalanceMode>());
} }
QVariant QAndroidCameraImageProcessingControl::parameter(ProcessingParameter parameter) const QVariant QAndroidCameraImageProcessingControl::parameter(ProcessingParameter parameter) const
@@ -76,13 +75,7 @@ QVariant QAndroidCameraImageProcessingControl::parameter(ProcessingParameter par
if (parameter != QCameraImageProcessingControl::WhiteBalancePreset) if (parameter != QCameraImageProcessingControl::WhiteBalancePreset)
return QVariant(); return QVariant();
if (!m_session->camera()) return QVariant::fromValue(m_whiteBalanceMode);
return QVariant();
QString wb = m_session->camera()->getWhiteBalance();
QCameraImageProcessing::WhiteBalanceMode mode = m_supportedWhiteBalanceModes.key(wb, QCameraImageProcessing::WhiteBalanceAuto);
return QVariant::fromValue(mode);
} }
void QAndroidCameraImageProcessingControl::setParameter(ProcessingParameter parameter, const QVariant &value) void QAndroidCameraImageProcessingControl::setParameter(ProcessingParameter parameter, const QVariant &value)
@@ -90,12 +83,21 @@ void QAndroidCameraImageProcessingControl::setParameter(ProcessingParameter para
if (parameter != QCameraImageProcessingControl::WhiteBalancePreset) if (parameter != QCameraImageProcessingControl::WhiteBalancePreset)
return; return;
if (!m_session->camera()) QCameraImageProcessing::WhiteBalanceMode mode = value.value<QCameraImageProcessing::WhiteBalanceMode>();
return;
QString wb = m_supportedWhiteBalanceModes.value(value.value<QCameraImageProcessing::WhiteBalanceMode>(), QString()); if (m_session->camera())
if (!wb.isEmpty()) setWhiteBalanceModeHelper(mode);
else
m_whiteBalanceMode = mode;
}
void QAndroidCameraImageProcessingControl::setWhiteBalanceModeHelper(QCameraImageProcessing::WhiteBalanceMode mode)
{
QString wb = m_supportedWhiteBalanceModes.value(mode, QString());
if (!wb.isEmpty()) {
m_session->camera()->setWhiteBalance(wb); m_session->camera()->setWhiteBalance(wb);
m_whiteBalanceMode = mode;
}
} }
void QAndroidCameraImageProcessingControl::onCameraOpened() void QAndroidCameraImageProcessingControl::onCameraOpened()
@@ -130,6 +132,11 @@ void QAndroidCameraImageProcessingControl::onCameraOpened()
QStringLiteral("warm-fluorescent")); QStringLiteral("warm-fluorescent"));
} }
} }
if (!m_supportedWhiteBalanceModes.contains(m_whiteBalanceMode))
m_whiteBalanceMode = QCameraImageProcessing::WhiteBalanceAuto;
setWhiteBalanceModeHelper(m_whiteBalanceMode);
} }
QT_END_NAMESPACE QT_END_NAMESPACE

View File

@@ -63,9 +63,13 @@ private Q_SLOTS:
void onCameraOpened(); void onCameraOpened();
private: private:
void setWhiteBalanceModeHelper(QCameraImageProcessing::WhiteBalanceMode mode);
QAndroidCameraSession *m_session; QAndroidCameraSession *m_session;
QHash<QCameraImageProcessing::WhiteBalanceMode, QString> m_supportedWhiteBalanceModes; QCameraImageProcessing::WhiteBalanceMode m_whiteBalanceMode;
QMap<QCameraImageProcessing::WhiteBalanceMode, QString> m_supportedWhiteBalanceModes;
}; };
QT_END_NAMESPACE QT_END_NAMESPACE

View File

@@ -66,13 +66,13 @@ QAndroidCameraLocksControl::QAndroidCameraLocksControl(QAndroidCameraSession *se
QCamera::LockTypes QAndroidCameraLocksControl::supportedLocks() const QCamera::LockTypes QAndroidCameraLocksControl::supportedLocks() const
{ {
return (QCamera::LockExposure | QCamera::LockWhiteBalance | QCamera::LockFocus); return m_supportedLocks;
} }
QCamera::LockStatus QAndroidCameraLocksControl::lockStatus(QCamera::LockType lock) const QCamera::LockStatus QAndroidCameraLocksControl::lockStatus(QCamera::LockType lock) const
{ {
if (!m_supportedLocks.testFlag(lock) || !m_session->camera()) if (!m_supportedLocks.testFlag(lock) || !m_session->camera())
return QCamera::Locked; return QCamera::Unlocked;
if (lock == QCamera::LockFocus) if (lock == QCamera::LockFocus)
return m_focusLockStatus; return m_focusLockStatus;
@@ -83,7 +83,7 @@ QCamera::LockStatus QAndroidCameraLocksControl::lockStatus(QCamera::LockType loc
if (lock == QCamera::LockWhiteBalance) if (lock == QCamera::LockWhiteBalance)
return m_whiteBalanceLockStatus; return m_whiteBalanceLockStatus;
return QCamera::Locked; return QCamera::Unlocked;
} }
void QAndroidCameraLocksControl::searchAndLock(QCamera::LockTypes locks) void QAndroidCameraLocksControl::searchAndLock(QCamera::LockTypes locks)

View File

@@ -331,11 +331,12 @@ bool QAndroidCameraSession::startPreview()
if (m_previewStarted) if (m_previewStarted)
return true; return true;
if (m_videoOutput->isReady()) if (!m_videoOutput->isReady())
m_camera->setPreviewTexture(m_videoOutput->surfaceTexture());
else
return true; // delay starting until the video output is ready return true; // delay starting until the video output is ready
if (!m_camera->setPreviewTexture(m_videoOutput->surfaceTexture()))
return false;
m_status = QCamera::StartingStatus; m_status = QCamera::StartingStatus;
emit statusChanged(m_status); emit statusChanged(m_status);

View File

@@ -96,32 +96,25 @@ void QAndroidCameraZoomControl::zoomTo(qreal optical, qreal digital)
{ {
Q_UNUSED(optical); Q_UNUSED(optical);
if (!m_cameraSession->camera() || if (!qFuzzyCompare(m_requestedZoom, digital)) {
qFuzzyCompare(m_requestedZoom, digital) || m_requestedZoom = digital;
qFuzzyCompare(m_maximumZoom, qreal(1))) { emit requestedDigitalZoomChanged(m_requestedZoom);
return;
} }
m_requestedZoom = digital; if (m_cameraSession->camera()) {
emit requestedDigitalZoomChanged(m_requestedZoom); digital = qBound(qreal(1), digital, m_maximumZoom);
int validZoomIndex = qt_findClosestValue(m_zoomRatios, qRound(digital * 100));
digital = qBound(qreal(1), digital, m_maximumZoom); qreal newZoom = m_zoomRatios.at(validZoomIndex) / qreal(100);
int validZoomIndex = qt_findClosestValue(m_zoomRatios, qRound(digital * 100)); if (!qFuzzyCompare(m_currentZoom, newZoom)) {
qreal newZoom = m_zoomRatios.at(validZoomIndex) / qreal(100); m_cameraSession->camera()->setZoom(validZoomIndex);
if (!qFuzzyCompare(m_currentZoom, newZoom)) { m_currentZoom = newZoom;
m_cameraSession->camera()->setZoom(validZoomIndex); emit currentDigitalZoomChanged(m_currentZoom);
m_currentZoom = newZoom; }
emit currentDigitalZoomChanged(m_currentZoom);
} }
} }
void QAndroidCameraZoomControl::onCameraOpened() void QAndroidCameraZoomControl::onCameraOpened()
{ {
m_requestedZoom = 1.0;
m_currentZoom = 1.0;
emit requestedDigitalZoomChanged(m_requestedZoom);
emit currentDigitalZoomChanged(m_currentZoom);
if (m_cameraSession->camera()->isZoomSupported()) { if (m_cameraSession->camera()->isZoomSupported()) {
m_zoomRatios = m_cameraSession->camera()->getZoomRatios(); m_zoomRatios = m_cameraSession->camera()->getZoomRatios();
qreal maxZoom = m_zoomRatios.last() / qreal(100); qreal maxZoom = m_zoomRatios.last() / qreal(100);
@@ -129,6 +122,7 @@ void QAndroidCameraZoomControl::onCameraOpened()
m_maximumZoom = maxZoom; m_maximumZoom = maxZoom;
emit maximumDigitalZoomChanged(m_maximumZoom); emit maximumDigitalZoomChanged(m_maximumZoom);
} }
zoomTo(1, m_requestedZoom);
} else { } else {
m_zoomRatios.clear(); m_zoomRatios.clear();
if (!qFuzzyCompare(m_maximumZoom, qreal(1))) { if (!qFuzzyCompare(m_maximumZoom, qreal(1))) {

View File

@@ -56,6 +56,19 @@ static QMutex g_cameraMapMutex;
typedef QMap<int, AndroidCamera *> CameraMap; typedef QMap<int, AndroidCamera *> CameraMap;
Q_GLOBAL_STATIC(CameraMap, g_cameraMap) Q_GLOBAL_STATIC(CameraMap, g_cameraMap)
static inline bool exceptionCheckAndClear(JNIEnv *env)
{
if (Q_UNLIKELY(env->ExceptionCheck())) {
#ifdef QT_DEBUG
env->ExceptionDescribe();
#endif // QT_DEBUG
env->ExceptionClear();
return true;
}
return false;
}
static QRect areaToRect(jobject areaObj) static QRect areaToRect(jobject areaObj)
{ {
QJNIObjectPrivate area(areaObj); QJNIObjectPrivate area(areaObj);
@@ -132,9 +145,9 @@ public:
Q_INVOKABLE bool init(int cameraId); Q_INVOKABLE bool init(int cameraId);
Q_INVOKABLE void release(); Q_INVOKABLE void release();
Q_INVOKABLE void lock(); Q_INVOKABLE bool lock();
Q_INVOKABLE void unlock(); Q_INVOKABLE bool unlock();
Q_INVOKABLE void reconnect(); Q_INVOKABLE bool reconnect();
Q_INVOKABLE AndroidCamera::CameraFacing getFacing(); Q_INVOKABLE AndroidCamera::CameraFacing getFacing();
Q_INVOKABLE int getNativeOrientation(); Q_INVOKABLE int getNativeOrientation();
@@ -147,7 +160,7 @@ public:
Q_INVOKABLE QSize previewSize() const { return m_previewSize; } Q_INVOKABLE QSize previewSize() const { return m_previewSize; }
Q_INVOKABLE void updatePreviewSize(); Q_INVOKABLE void updatePreviewSize();
Q_INVOKABLE void setPreviewTexture(void *surfaceTexture); Q_INVOKABLE bool setPreviewTexture(void *surfaceTexture);
Q_INVOKABLE bool isZoomSupported(); Q_INVOKABLE bool isZoomSupported();
Q_INVOKABLE int getMaxZoom(); Q_INVOKABLE int getMaxZoom();
@@ -266,7 +279,7 @@ AndroidCamera *AndroidCamera::open(int cameraId)
worker->start(); worker->start();
d->moveToThread(worker); d->moveToThread(worker);
connect(worker, &QThread::finished, d, &AndroidCameraPrivate::deleteLater); connect(worker, &QThread::finished, d, &AndroidCameraPrivate::deleteLater);
bool ok = false; bool ok = true;
QMetaObject::invokeMethod(d, "init", Qt::BlockingQueuedConnection, Q_RETURN_ARG(bool, ok), Q_ARG(int, cameraId)); QMetaObject::invokeMethod(d, "init", Qt::BlockingQueuedConnection, Q_RETURN_ARG(bool, ok), Q_ARG(int, cameraId));
if (!ok) { if (!ok) {
worker->quit(); worker->quit();
@@ -289,22 +302,28 @@ int AndroidCamera::cameraId() const
return d->m_cameraId; return d->m_cameraId;
} }
void AndroidCamera::lock() bool AndroidCamera::lock()
{ {
Q_D(AndroidCamera); Q_D(AndroidCamera);
QMetaObject::invokeMethod(d, "lock", Qt::BlockingQueuedConnection); bool ok = true;
QMetaObject::invokeMethod(d, "lock", Qt::BlockingQueuedConnection, Q_RETURN_ARG(bool, ok));
return ok;
} }
void AndroidCamera::unlock() bool AndroidCamera::unlock()
{ {
Q_D(AndroidCamera); Q_D(AndroidCamera);
QMetaObject::invokeMethod(d, "unlock", Qt::BlockingQueuedConnection); bool ok = true;
QMetaObject::invokeMethod(d, "unlock", Qt::BlockingQueuedConnection, Q_RETURN_ARG(bool, ok));
return ok;
} }
void AndroidCamera::reconnect() bool AndroidCamera::reconnect()
{ {
Q_D(AndroidCamera); Q_D(AndroidCamera);
QMetaObject::invokeMethod(d, "reconnect"); bool ok = true;
QMetaObject::invokeMethod(d, "reconnect", Qt::BlockingQueuedConnection, Q_RETURN_ARG(bool, ok));
return ok;
} }
void AndroidCamera::release() void AndroidCamera::release()
@@ -368,13 +387,16 @@ void AndroidCamera::setPreviewSize(const QSize &size)
QMetaObject::invokeMethod(d, "updatePreviewSize"); QMetaObject::invokeMethod(d, "updatePreviewSize");
} }
void AndroidCamera::setPreviewTexture(AndroidSurfaceTexture *surfaceTexture) bool AndroidCamera::setPreviewTexture(AndroidSurfaceTexture *surfaceTexture)
{ {
Q_D(AndroidCamera); Q_D(AndroidCamera);
bool ok = true;
QMetaObject::invokeMethod(d, QMetaObject::invokeMethod(d,
"setPreviewTexture", "setPreviewTexture",
Qt::BlockingQueuedConnection, Qt::BlockingQueuedConnection,
Q_RETURN_ARG(bool, ok),
Q_ARG(void *, surfaceTexture ? surfaceTexture->surfaceTexture() : 0)); Q_ARG(void *, surfaceTexture ? surfaceTexture->surfaceTexture() : 0));
return ok;
} }
bool AndroidCamera::isZoomSupported() bool AndroidCamera::isZoomSupported()
@@ -698,12 +720,12 @@ AndroidCameraPrivate::~AndroidCameraPrivate()
bool AndroidCameraPrivate::init(int cameraId) bool AndroidCameraPrivate::init(int cameraId)
{ {
m_cameraId = cameraId; m_cameraId = cameraId;
QJNIEnvironmentPrivate env;
m_camera = QJNIObjectPrivate::callStaticObjectMethod("android/hardware/Camera", m_camera = QJNIObjectPrivate::callStaticObjectMethod("android/hardware/Camera",
"open", "open",
"(I)Landroid/hardware/Camera;", "(I)Landroid/hardware/Camera;",
cameraId); cameraId);
if (exceptionCheckAndClear(env) || !m_camera.isValid())
if (!m_camera.isValid())
return false; return false;
m_cameraListener = QJNIObjectPrivate(g_qtCameraListenerClass, "(I)V", m_cameraId); m_cameraListener = QJNIObjectPrivate(g_qtCameraListenerClass, "(I)V", m_cameraId);
@@ -731,26 +753,25 @@ void AndroidCameraPrivate::release()
m_camera.callMethod<void>("release"); m_camera.callMethod<void>("release");
} }
void AndroidCameraPrivate::lock() bool AndroidCameraPrivate::lock()
{ {
QJNIEnvironmentPrivate env;
m_camera.callMethod<void>("lock"); m_camera.callMethod<void>("lock");
return !exceptionCheckAndClear(env);
} }
void AndroidCameraPrivate::unlock() bool AndroidCameraPrivate::unlock()
{ {
QJNIEnvironmentPrivate env;
m_camera.callMethod<void>("unlock"); m_camera.callMethod<void>("unlock");
return !exceptionCheckAndClear(env);
} }
void AndroidCameraPrivate::reconnect() bool AndroidCameraPrivate::reconnect()
{ {
QJNIEnvironmentPrivate env; QJNIEnvironmentPrivate env;
m_camera.callMethod<void>("reconnect"); m_camera.callMethod<void>("reconnect");
if (env->ExceptionCheck()) { return !exceptionCheckAndClear(env);
#ifdef QT_DEBUG
env->ExceptionDescribe();
#endif // QT_DEBUG
env->ExceptionDescribe();
}
} }
AndroidCamera::CameraFacing AndroidCameraPrivate::getFacing() AndroidCamera::CameraFacing AndroidCameraPrivate::getFacing()
@@ -832,11 +853,13 @@ void AndroidCameraPrivate::updatePreviewSize()
emit previewSizeChanged(); emit previewSizeChanged();
} }
void AndroidCameraPrivate::setPreviewTexture(void *surfaceTexture) bool AndroidCameraPrivate::setPreviewTexture(void *surfaceTexture)
{ {
QJNIEnvironmentPrivate env;
m_camera.callMethod<void>("setPreviewTexture", m_camera.callMethod<void>("setPreviewTexture",
"(Landroid/graphics/SurfaceTexture;)V", "(Landroid/graphics/SurfaceTexture;)V",
static_cast<jobject>(surfaceTexture)); static_cast<jobject>(surfaceTexture));
return !exceptionCheckAndClear(env);
} }
bool AndroidCameraPrivate::isZoomSupported() bool AndroidCameraPrivate::isZoomSupported()
@@ -1020,8 +1043,7 @@ void AndroidCameraPrivate::setFocusAreas(const QList<QRect> &areas)
arrayList.callMethod<jboolean>("add", arrayList.callMethod<jboolean>("add",
"(Ljava/lang/Object;)Z", "(Ljava/lang/Object;)Z",
rectToArea(areas.at(i)).object()); rectToArea(areas.at(i)).object());
if (env->ExceptionCheck()) exceptionCheckAndClear(env);
env->ExceptionClear();
} }
list = arrayList; list = arrayList;
} }
@@ -1347,9 +1369,11 @@ void AndroidCameraPrivate::fetchLastPreviewFrame()
void AndroidCameraPrivate::applyParameters() void AndroidCameraPrivate::applyParameters()
{ {
QJNIEnvironmentPrivate env;
m_camera.callMethod<void>("setParameters", m_camera.callMethod<void>("setParameters",
"(Landroid/hardware/Camera$Parameters;)V", "(Landroid/hardware/Camera$Parameters;)V",
m_parameters.object()); m_parameters.object());
exceptionCheckAndClear(env);
} }
QStringList AndroidCameraPrivate::callParametersStringListMethod(const QByteArray &methodName) QStringList AndroidCameraPrivate::callParametersStringListMethod(const QByteArray &methodName)
@@ -1386,10 +1410,8 @@ static JNINativeMethod methods[] = {
bool AndroidCamera::initJNI(JNIEnv *env) bool AndroidCamera::initJNI(JNIEnv *env)
{ {
jclass clazz = env->FindClass("org/qtproject/qt5/android/multimedia/QtCameraListener"); jclass clazz = env->FindClass("org/qtproject/qt5/android/multimedia/QtCameraListener");
if (env->ExceptionCheck())
env->ExceptionClear();
if (clazz) { if (!exceptionCheckAndClear(env) && clazz) {
g_qtCameraListenerClass = static_cast<jclass>(env->NewGlobalRef(clazz)); g_qtCameraListenerClass = static_cast<jclass>(env->NewGlobalRef(clazz));
if (env->RegisterNatives(g_qtCameraListenerClass, if (env->RegisterNatives(g_qtCameraListenerClass,
methods, methods,

View File

@@ -90,9 +90,9 @@ public:
int cameraId() const; int cameraId() const;
void lock(); bool lock();
void unlock(); bool unlock();
void reconnect(); bool reconnect();
void release(); void release();
CameraFacing getFacing(); CameraFacing getFacing();
@@ -106,7 +106,7 @@ public:
QSize previewSize() const; QSize previewSize() const;
void setPreviewSize(const QSize &size); void setPreviewSize(const QSize &size);
void setPreviewTexture(AndroidSurfaceTexture *surfaceTexture); bool setPreviewTexture(AndroidSurfaceTexture *surfaceTexture);
bool isZoomSupported(); bool isZoomSupported();
int getMaxZoom(); int getMaxZoom();

View File

@@ -250,10 +250,8 @@ void AVFMediaRecorderControl::setState(QMediaRecorder::State state)
qDebugCamera() << "Video capture location:" << actualLocation.toString(); qDebugCamera() << "Video capture location:" << actualLocation.toString();
NSString *urlString = [NSString stringWithUTF8String:actualLocation.toString().toUtf8().constData()]; [m_movieOutput startRecordingToOutputFileURL:actualLocation.toNSURL()
NSURL *fileURL = [NSURL URLWithString:urlString]; recordingDelegate:m_recorderDelagate];
[m_movieOutput startRecordingToOutputFileURL:fileURL recordingDelegate:m_recorderDelagate];
Q_EMIT actualLocationChanged(actualLocation); Q_EMIT actualLocationChanged(actualLocation);
} else { } else {

View File

@@ -216,8 +216,9 @@ CoreAudioSessionManager::CoreAudioSessionManager() :
{ {
m_sessionObserver = [[CoreAudioSessionObserver alloc] initWithAudioSessionManager:this]; m_sessionObserver = [[CoreAudioSessionObserver alloc] initWithAudioSessionManager:this];
setActive(true); setActive(true);
//set default category to just Playback and only switch if we need more permissions // Set default category to Ambient (implies MixWithOthers). This makes sure audio stops playing
setCategory(CoreAudioSessionManager::Playback, CoreAudioSessionManager::MixWithOthers); // if the screen is locked or if the Silent switch is toggled.
setCategory(CoreAudioSessionManager::Ambient, CoreAudioSessionManager::None);
} }
CoreAudioSessionManager::~CoreAudioSessionManager() CoreAudioSessionManager::~CoreAudioSessionManager()

View File

@@ -23,10 +23,5 @@ SOURCES += \
$$PWD/dsimagecapturecontrol.cpp \ $$PWD/dsimagecapturecontrol.cpp \
$$PWD/dscamerasession.cpp $$PWD/dscamerasession.cpp
qtHaveModule(widgets) {
HEADERS += $$PWD/dsvideowidgetcontrol.h
SOURCES += $$PWD/dsvideowidgetcontrol.cpp
}
*-msvc*:INCLUDEPATH += $$(DXSDK_DIR)/include *-msvc*:INCLUDEPATH += $$(DXSDK_DIR)/include
LIBS += -lstrmiids -ldmoguids -luuid -lmsdmo -lole32 -loleaut32 LIBS += -lstrmiids -ldmoguids -luuid -lmsdmo -lole32 -loleaut32

View File

@@ -48,10 +48,13 @@
QT_BEGIN_NAMESPACE QT_BEGIN_NAMESPACE
DSCameraControl::DSCameraControl(QObject *parent) DSCameraControl::DSCameraControl(QObject *parent)
:QCameraControl(parent), m_captureMode(QCamera::CaptureStillImage) : QCameraControl(parent)
, m_state(QCamera::UnloadedState)
, m_captureMode(QCamera::CaptureStillImage)
{ {
m_session = qobject_cast<DSCameraSession*>(parent); m_session = qobject_cast<DSCameraSession*>(parent);
connect(m_session, SIGNAL(stateChanged(QCamera::State)),this, SIGNAL(stateChanged(QCamera::State))); connect(m_session, SIGNAL(statusChanged(QCamera::Status)),
this, SIGNAL(statusChanged(QCamera::Status)));
} }
DSCameraControl::~DSCameraControl() DSCameraControl::~DSCameraControl()
@@ -60,14 +63,30 @@ DSCameraControl::~DSCameraControl()
void DSCameraControl::setState(QCamera::State state) void DSCameraControl::setState(QCamera::State state)
{ {
if (m_state == state)
return;
bool succeeded = false;
switch (state) { switch (state) {
case QCamera::ActiveState: case QCamera::UnloadedState:
start(); succeeded = m_session->unload();
break; break;
case QCamera::UnloadedState: /* fall through */ case QCamera::LoadedState:
case QCamera::LoadedState: case QCamera::ActiveState:
stop(); if (m_state == QCamera::UnloadedState && !m_session->load())
break; return;
if (state == QCamera::ActiveState)
succeeded = m_session->startPreview();
else
succeeded = m_session->stopPreview();
break;
}
if (succeeded) {
m_state = state;
emit stateChanged(m_state);
} }
} }
@@ -85,19 +104,17 @@ bool DSCameraControl::isCaptureModeSupported(QCamera::CaptureModes mode) const
return bCaptureSupported; return bCaptureSupported;
} }
void DSCameraControl::start() void DSCameraControl::setCaptureMode(QCamera::CaptureModes mode)
{ {
m_session->record(); if (m_captureMode != mode && isCaptureModeSupported(mode)) {
m_captureMode = mode;
emit captureModeChanged(mode);
}
} }
void DSCameraControl::stop() QCamera::Status DSCameraControl::status() const
{ {
m_session->stop(); return m_session->status();
}
QCamera::State DSCameraControl::state() const
{
return (QCamera::State)m_session->state();
} }
QT_END_NAMESPACE QT_END_NAMESPACE

View File

@@ -58,28 +58,21 @@ public:
DSCameraControl(QObject *parent = 0); DSCameraControl(QObject *parent = 0);
~DSCameraControl(); ~DSCameraControl();
void start(); QCamera::State state() const { return m_state; }
void stop();
QCamera::State state() const;
QCamera::CaptureModes captureMode() const { return m_captureMode; } QCamera::CaptureModes captureMode() const { return m_captureMode; }
void setCaptureMode(QCamera::CaptureModes mode) void setCaptureMode(QCamera::CaptureModes mode);
{
if (m_captureMode != mode) {
m_captureMode = mode;
emit captureModeChanged(mode);
}
}
void setState(QCamera::State state); void setState(QCamera::State state);
QCamera::Status status() const { return QCamera::UnavailableStatus; } QCamera::Status status() const;
bool isCaptureModeSupported(QCamera::CaptureModes mode) const; bool isCaptureModeSupported(QCamera::CaptureModes mode) const;
bool canChangeProperty(PropertyChangeType /* changeType */, QCamera::Status /* status */) const {return false; } bool canChangeProperty(PropertyChangeType /* changeType */, QCamera::Status /* status */) const {return false; }
private: private:
DSCameraSession *m_session; DSCameraSession *m_session;
DSCameraService *m_service; DSCameraService *m_service;
QCamera::State m_state;
QCamera::CaptureModes m_captureMode; QCamera::CaptureModes m_captureMode;
}; };

View File

@@ -42,11 +42,6 @@
#include <QtCore/qvariant.h> #include <QtCore/qvariant.h>
#include <QtCore/qdebug.h> #include <QtCore/qdebug.h>
#if defined(HAVE_WIDGETS)
#include <QtWidgets/qwidget.h>
#include <QVideoWidgetControl>
#endif
#include "dscameraservice.h" #include "dscameraservice.h"
#include "dscameracontrol.h" #include "dscameracontrol.h"
#include "dscamerasession.h" #include "dscamerasession.h"
@@ -54,28 +49,16 @@
#include "dsvideodevicecontrol.h" #include "dsvideodevicecontrol.h"
#include "dsimagecapturecontrol.h" #include "dsimagecapturecontrol.h"
#if defined(HAVE_WIDGETS)
#include "dsvideowidgetcontrol.h"
#endif
QT_BEGIN_NAMESPACE QT_BEGIN_NAMESPACE
DSCameraService::DSCameraService(QObject *parent): DSCameraService::DSCameraService(QObject *parent):
QMediaService(parent) QMediaService(parent)
#if defined(HAVE_WIDGETS)
, m_viewFinderWidget(0)
#endif
, m_videoRenderer(0) , m_videoRenderer(0)
{ {
m_session = new DSCameraSession(this); m_session = new DSCameraSession(this);
m_control = new DSCameraControl(m_session); m_control = new DSCameraControl(m_session);
m_videoDevice = new DSVideoDeviceControl(m_session); m_videoDevice = new DSVideoDeviceControl(m_session);
m_imageCapture = new DSImageCaptureControl(m_session); m_imageCapture = new DSImageCaptureControl(m_session);
m_device = QByteArray("default");
} }
DSCameraService::~DSCameraService() DSCameraService::~DSCameraService()
@@ -84,9 +67,6 @@ DSCameraService::~DSCameraService()
delete m_videoDevice; delete m_videoDevice;
delete m_videoRenderer; delete m_videoRenderer;
delete m_imageCapture; delete m_imageCapture;
#if defined(HAVE_WIDGETS)
delete m_viewFinderWidget;
#endif
delete m_session; delete m_session;
} }
@@ -98,21 +78,8 @@ QMediaControl* DSCameraService::requestControl(const char *name)
if (qstrcmp(name, QCameraImageCaptureControl_iid) == 0) if (qstrcmp(name, QCameraImageCaptureControl_iid) == 0)
return m_imageCapture; return m_imageCapture;
#if defined(HAVE_WIDGETS)
if (qstrcmp(name, QVideoWidgetControl_iid) == 0) {
if (!m_viewFinderWidget && !m_videoRenderer) {
m_viewFinderWidget = new DSVideoWidgetControl(m_session);
return m_viewFinderWidget;
}
}
#endif
if (qstrcmp(name,QVideoRendererControl_iid) == 0) { if (qstrcmp(name,QVideoRendererControl_iid) == 0) {
#if defined(HAVE_WIDGETS)
if (!m_videoRenderer && !m_viewFinderWidget) {
#else
if (!m_videoRenderer) { if (!m_videoRenderer) {
#endif
m_videoRenderer = new DSVideoRendererControl(m_session, this); m_videoRenderer = new DSVideoRendererControl(m_session, this);
return m_videoRenderer; return m_videoRenderer;
} }
@@ -131,14 +98,6 @@ void DSCameraService::releaseControl(QMediaControl *control)
m_videoRenderer = 0; m_videoRenderer = 0;
return; return;
} }
#if defined(HAVE_WIDGETS)
if (control == m_viewFinderWidget) {
delete m_viewFinderWidget;
m_viewFinderWidget = 0;
return;
}
#endif
} }
QT_END_NAMESPACE QT_END_NAMESPACE

View File

@@ -70,13 +70,9 @@ private:
DSCameraControl *m_control; DSCameraControl *m_control;
DSCameraSession *m_session; DSCameraSession *m_session;
DSVideoOutputControl *m_videoOutput; DSVideoOutputControl *m_videoOutput;
#if defined(HAVE_WIDGETS)
QMediaControl *m_viewFinderWidget;
#endif
DSVideoDeviceControl *m_videoDevice; DSVideoDeviceControl *m_videoDevice;
QMediaControl *m_videoRenderer; QMediaControl *m_videoRenderer;
DSImageCaptureControl *m_imageCapture; DSImageCaptureControl *m_imageCapture;
QByteArray m_device;
}; };
QT_END_NAMESPACE QT_END_NAMESPACE

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
/**************************************************************************** /****************************************************************************
** **
** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies). ** Copyright (C) 2014 Digia Plc and/or its subsidiary(-ies).
** Contact: http://www.qt-project.org/legal ** Contact: http://www.qt-project.org/legal
** **
** This file is part of the Qt Toolkit. ** This file is part of the Qt Toolkit.
@@ -51,6 +51,7 @@
#include <QtMultimedia/qvideoframe.h> #include <QtMultimedia/qvideoframe.h>
#include <QtMultimedia/qabstractvideosurface.h> #include <QtMultimedia/qabstractvideosurface.h>
#include <QtMultimedia/qvideosurfaceformat.h> #include <QtMultimedia/qvideosurfaceformat.h>
#include <private/qmediastoragelocation_p.h>
#include <tchar.h> #include <tchar.h>
#include <dshow.h> #include <dshow.h>
@@ -75,18 +76,8 @@ struct ISampleGrabber;
QT_BEGIN_NAMESPACE QT_BEGIN_NAMESPACE
class DSVideoRenderer;
class SampleGrabberCallbackPrivate; class SampleGrabberCallbackPrivate;
struct video_buffer {
unsigned char* buffer;
int length;
qint64 time;
};
typedef QMap<unsigned int, QList<QSize> > FormatResolutionMap;
class DSCameraSession : public QObject class DSCameraSession : public QObject
{ {
Q_OBJECT Q_OBJECT
@@ -94,113 +85,82 @@ public:
DSCameraSession(QObject *parent = 0); DSCameraSession(QObject *parent = 0);
~DSCameraSession(); ~DSCameraSession();
bool deviceReady(); QCamera::Status status() const { return m_status; }
bool pictureInProgress();
// camera controls
int framerate() const;
void setFrameRate(int rate);
int brightness() const;
void setBrightness(int b);
int contrast() const;
void setContrast(int c);
int saturation() const;
void setSaturation(int s);
int hue() const;
void setHue(int h);
int sharpness() const;
void setSharpness(int s);
int zoom() const;
void setZoom(int z);
bool backlightCompensation() const;
void setBacklightCompensation(bool);
int whitelevel() const;
void setWhitelevel(int w);
int rotation() const;
void setRotation(int r);
bool flash() const;
void setFlash(bool f);
bool autofocus() const;
void setAutofocus(bool f);
QSize frameSize() const;
void setFrameSize(const QSize& s);
void setDevice(const QString &device); void setDevice(const QString &device);
QList<QVideoFrame::PixelFormat> supportedPixelFormats();
QVideoFrame::PixelFormat pixelFormat() const;
void setPixelFormat(QVideoFrame::PixelFormat fmt);
QList<QSize> supportedResolutions(QVideoFrame::PixelFormat format);
// media control bool load();
bool unload();
bool startPreview();
bool stopPreview();
bool setOutputLocation(const QUrl &sink); bool isReadyForCapture();
QUrl outputLocation() const; int captureImage(const QString &fileName);
qint64 position() const;
int state() const;
void record();
void pause();
void stop();
void setSurface(QAbstractVideoSurface* surface); void setSurface(QAbstractVideoSurface* surface);
int captureImage(const QString &fileName);
AM_MEDIA_TYPE StillMediaType;
QList<video_buffer*> frames;
SampleGrabberCallbackPrivate* StillCapCB;
QMutex mutex;
Q_SIGNALS: Q_SIGNALS:
void stateChanged(QCamera::State); void statusChanged(QCamera::Status);
void imageExposed(int id);
void imageCaptured(int id, const QImage &preview); void imageCaptured(int id, const QImage &preview);
void imageSaved(int id, const QString &fileName); void imageSaved(int id, const QString &fileName);
void readyForCaptureChanged(bool); void readyForCaptureChanged(bool);
void captureError(int id, int error, const QString &errorString);
private Q_SLOTS: private Q_SLOTS:
void captureFrame(); void presentFrame();
void updateReadyForCapture();
private: private:
QVideoSurfaceFormat actualFormat; void setStatus(QCamera::Status status);
QList<QVideoFrame::PixelFormat> types; void populateCommonResolutions();
QTime timeStamp; void onFrameAvailable(const char *frameData, long len);
bool graph; void saveCapturedImage(int id, const QImage &image, const QString &path);
bool active;
bool opened;
bool available;
QCamera::State m_state;
QByteArray m_device;
QUrl m_sink;
DSVideoRenderer* m_output;
QAbstractVideoSurface* m_surface;
QVideoFrame::PixelFormat pixelF;
QSize m_windowSize;
FormatResolutionMap resolutions;
ICaptureGraphBuilder2* pBuild;
IGraphBuilder* pGraph;
IBaseFilter* pCap;
IBaseFilter* pSG_Filter;
ISampleGrabber *pSG;
QString m_snapshot;
int m_currentImageId;
bool needsHorizontalMirroring;
bool needsVerticalMirroring;
protected:
HRESULT getPin(IBaseFilter *pFilter, PIN_DIRECTION PinDir, IPin **ppPin);
bool createFilterGraph(); bool createFilterGraph();
void updateProperties(); bool connectGraph();
bool setProperties(); void disconnectGraph();
bool openStream(); void updateSourceCapabilities();
void closeStream(); bool configurePreviewFormat();
bool startStream();
void stopStream(); QMutex m_presentMutex;
void suspendStream(); QMutex m_captureMutex;
void resumeStream();
// Capture Graph
ICaptureGraphBuilder2* m_graphBuilder;
IGraphBuilder* m_filterGraph;
// Source (camera)
QString m_sourceDeviceName;
IBaseFilter* m_sourceFilter;
AM_MEDIA_TYPE m_sourcePreferredFormat;
QSize m_sourcePreferredResolution;
bool m_needsHorizontalMirroring;
// Preview
IBaseFilter *m_previewFilter;
ISampleGrabber *m_previewSampleGrabber;
IBaseFilter *m_nullRendererFilter;
QVideoFrame m_currentFrame;
bool m_previewStarted;
QAbstractVideoSurface* m_surface;
QVideoSurfaceFormat m_previewSurfaceFormat;
QVideoFrame::PixelFormat m_previewPixelFormat;
QSize m_previewSize;
// Image capture
QString m_imageCaptureFileName;
QMediaStorageLocation m_fileNameGenerator;
bool m_readyForCapture;
int m_imageIdCounter;
int m_currentImageId;
QVideoFrame m_capturedFrame;
// Internal state
QCamera::Status m_status;
friend class SampleGrabberCallbackPrivate;
}; };
QT_END_NAMESPACE QT_END_NAMESPACE

View File

@@ -46,15 +46,19 @@
QT_BEGIN_NAMESPACE QT_BEGIN_NAMESPACE
DSImageCaptureControl::DSImageCaptureControl(DSCameraSession *session) DSImageCaptureControl::DSImageCaptureControl(DSCameraSession *session)
:QCameraImageCaptureControl(session), m_session(session), m_ready(false) : QCameraImageCaptureControl(session)
, m_session(session)
{ {
connect(m_session, SIGNAL(stateChanged(QCamera::State)), SLOT(updateState())); connect(m_session, SIGNAL(imageExposed(int)),
this, SIGNAL(imageExposed(int)));
connect(m_session, SIGNAL(imageCaptured(int,QImage)), connect(m_session, SIGNAL(imageCaptured(int,QImage)),
this, SIGNAL(imageCaptured(int,QImage))); this, SIGNAL(imageCaptured(int,QImage)));
connect(m_session, SIGNAL(imageSaved(int,QString)), connect(m_session, SIGNAL(imageSaved(int,QString)),
this, SIGNAL(imageSaved(int,QString))); this, SIGNAL(imageSaved(int,QString)));
connect(m_session, SIGNAL(readyForCaptureChanged(bool)), connect(m_session, SIGNAL(readyForCaptureChanged(bool)),
this, SIGNAL(readyForCaptureChanged(bool))); this, SIGNAL(readyForCaptureChanged(bool)));
connect(m_session, SIGNAL(captureError(int,int,QString)),
this, SIGNAL(error(int,int,QString)));
} }
DSImageCaptureControl::~DSImageCaptureControl() DSImageCaptureControl::~DSImageCaptureControl()
@@ -63,7 +67,7 @@ DSImageCaptureControl::~DSImageCaptureControl()
bool DSImageCaptureControl::isReadyForCapture() const bool DSImageCaptureControl::isReadyForCapture() const
{ {
return m_ready; return m_session->isReadyForCapture();
} }
int DSImageCaptureControl::capture(const QString &fileName) int DSImageCaptureControl::capture(const QString &fileName)
@@ -71,12 +75,15 @@ int DSImageCaptureControl::capture(const QString &fileName)
return m_session->captureImage(fileName); return m_session->captureImage(fileName);
} }
void DSImageCaptureControl::updateState() QCameraImageCapture::DriveMode DSImageCaptureControl::driveMode() const
{ {
bool ready = (m_session->state() == QCamera::ActiveState) && return QCameraImageCapture::SingleImageCapture;
!m_session->pictureInProgress(); }
if(m_ready != ready)
emit readyForCaptureChanged(m_ready = ready); void DSImageCaptureControl::setDriveMode(QCameraImageCapture::DriveMode mode)
{
if (mode != QCameraImageCapture::SingleImageCapture)
qWarning("Drive mode not supported.");
} }
QT_END_NAMESPACE QT_END_NAMESPACE

View File

@@ -52,23 +52,18 @@ class DSImageCaptureControl : public QCameraImageCaptureControl
Q_OBJECT Q_OBJECT
public: public:
DSImageCaptureControl(DSCameraSession *session); DSImageCaptureControl(DSCameraSession *session);
virtual ~DSImageCaptureControl(); ~DSImageCaptureControl();
bool isReadyForCapture() const; bool isReadyForCapture() const;
int capture(const QString &fileName); int capture(const QString &fileName);
virtual QCameraImageCapture::DriveMode driveMode() const { return QCameraImageCapture::SingleImageCapture; } QCameraImageCapture::DriveMode driveMode() const;
virtual void setDriveMode(QCameraImageCapture::DriveMode mode) { Q_UNUSED(mode) } void setDriveMode(QCameraImageCapture::DriveMode mode);
virtual void cancelCapture() {}
private slots:
void updateState();
void cancelCapture() {}
private: private:
DSCameraSession *m_session; DSCameraSession *m_session;
bool m_ready;
}; };
QT_END_NAMESPACE QT_END_NAMESPACE

View File

@@ -102,7 +102,6 @@ void DSVideoDeviceControl::enumerateDevices(QList<QByteArray> *devices, QStringL
devices->clear(); devices->clear();
descriptions->clear(); descriptions->clear();
CoInitialize(NULL);
ICreateDevEnum* pDevEnum = NULL; ICreateDevEnum* pDevEnum = NULL;
IEnumMoniker* pEnum = NULL; IEnumMoniker* pEnum = NULL;
// Create the System device enumerator // Create the System device enumerator
@@ -148,7 +147,6 @@ void DSVideoDeviceControl::enumerateDevices(QList<QByteArray> *devices, QStringL
} }
pDevEnum->Release(); pDevEnum->Release();
} }
CoUninitialize();
} }
void DSVideoDeviceControl::setSelectedDevice(int index) void DSVideoDeviceControl::setSelectedDevice(int index)

View File

@@ -1,253 +0,0 @@
/****************************************************************************
**
** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
** Contact: http://www.qt-project.org/legal
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and Digia. For licensing terms and
** conditions see http://qt.digia.com/licensing. For further information
** use the contact form at http://qt.digia.com/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 as published by the Free Software
** Foundation and appearing in the file LICENSE.LGPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU Lesser General Public License version 2.1 requirements
** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Digia gives you certain additional
** rights. These rights are described in the Digia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** GNU General Public License Usage
** Alternatively, this file may be used under the terms of the GNU
** General Public License version 3.0 as published by the Free Software
** Foundation and appearing in the file LICENSE.GPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU General Public License version 3.0 requirements will be
** met: http://www.gnu.org/copyleft/gpl.html.
**
**
** $QT_END_LICENSE$
**
****************************************************************************/
#include <QtCore/qcoreevent.h>
#include <QtCore/qtimer.h>
#include "dsvideowidgetcontrol.h"
#include "dscamerasession.h"
QT_BEGIN_NAMESPACE
DSVideoWidgetSurface::DSVideoWidgetSurface(QLabel *pWidget, QObject *parent)
: QAbstractVideoSurface(parent)
{
widget = pWidget;
myPixmap = 0;
}
QList<QVideoFrame::PixelFormat> DSVideoWidgetSurface::supportedPixelFormats(
QAbstractVideoBuffer::HandleType handleType) const
{
if (handleType == QAbstractVideoBuffer::NoHandle) {
return QList<QVideoFrame::PixelFormat>()
<< QVideoFrame::Format_RGB32
<< QVideoFrame::Format_RGB24;
} else {
return QList<QVideoFrame::PixelFormat>();
}
}
bool DSVideoWidgetSurface::present(const QVideoFrame &frame)
{
QVideoFrame myFrame = frame;
myFrame.map(QAbstractVideoBuffer::ReadOnly);
QImage image(
frame.bits(),
frame.width(),
frame.height(),
frame.bytesPerLine(),
imageFormat);
if (image.isNull())
{
// Try to adapt
QImage image2(
frame.bits(),
frame.width(),
frame.height(),
frame.bytesPerLine(),
QImage::Format_RGB888);
image = image2;
}
myFrame.unmap();
delete myPixmap;
myPixmap = new QPixmap(QPixmap::fromImage(image).scaled(widget->size()));
widget->setPixmap(*myPixmap);
widget->repaint();
return true;
}
void DSVideoWidgetSurface::setImageFormat(QImage::Format fmt)
{
imageFormat = fmt;
}
void DSVideoWidgetSurface::updateVideoRect()
{
}
void DSVideoWidgetSurface::paint(QPainter *painter)
{
Q_UNUSED(painter)
}
DSVideoWidgetControl::DSVideoWidgetControl(DSCameraSession* session, QObject *parent) :
QVideoWidgetControl(parent),
m_session(session),
m_widget(new QLabel()),
m_fullScreen(false)
{
m_widget->setSizePolicy(QSizePolicy::MinimumExpanding, QSizePolicy::MinimumExpanding);
m_widget->setAlignment(Qt::AlignCenter);
m_widget->setAttribute(Qt::WA_NoSystemBackground, true);
surface = new DSVideoWidgetSurface(m_widget);
QPalette palette;
palette.setColor(QPalette::Background, Qt::black);
m_widget->setPalette(palette);
m_widget->setAutoFillBackground( true );
// Request QEvents
m_widget->installEventFilter(this);
m_windowId = m_widget->effectiveWinId();
surface->setImageFormat(QImage::Format_RGB888);
session->setSurface(surface);
}
DSVideoWidgetControl::~DSVideoWidgetControl()
{
delete m_widget;
}
bool DSVideoWidgetControl::eventFilter(QObject *object, QEvent *e)
{
if (object == m_widget) {
switch (e->type()) {
case QEvent::ParentChange:
case QEvent::WinIdChange:
case QEvent::Show:
m_windowId = m_widget->effectiveWinId();
emit widgetUpdated();
break;
case QEvent::Resize:
emit widgetResized(m_widget->size());
break;
case QEvent::PolishRequest:
m_widget->ensurePolished();
break;
default:
// Do nothing
break;
}
}
return false;
}
QWidget *DSVideoWidgetControl::videoWidget()
{
return m_widget;
}
Qt::AspectRatioMode DSVideoWidgetControl::aspectRatioMode() const
{
return m_aspectRatioMode;
}
void DSVideoWidgetControl::setAspectRatioMode(Qt::AspectRatioMode ratio)
{
if (m_aspectRatioMode==ratio) {
return;
}
m_aspectRatioMode = ratio;
if (m_aspectRatioMode == Qt::KeepAspectRatio)
m_widget->setScaledContents(false);
else {
m_widget->setScaledContents(true);
}
}
bool DSVideoWidgetControl::isFullScreen() const
{
return m_fullScreen;
}
void DSVideoWidgetControl::setFullScreen(bool fullScreen)
{
if (m_widget && !fullScreen && m_fullScreen) {
m_widget->showNormal();
m_fullScreen = false;
} else if (m_widget && fullScreen) {
m_widget->showFullScreen();
m_fullScreen = true;
}
emit fullScreenChanged(fullScreen);
}
int DSVideoWidgetControl::brightness() const
{
return 0;
}
void DSVideoWidgetControl::setBrightness(int brightness)
{
Q_UNUSED(brightness);
}
int DSVideoWidgetControl::contrast() const
{
return 0;
}
void DSVideoWidgetControl::setContrast(int contrast)
{
Q_UNUSED(contrast);
}
int DSVideoWidgetControl::hue() const
{
return 0;
}
void DSVideoWidgetControl::setHue(int hue)
{
Q_UNUSED(hue);
}
int DSVideoWidgetControl::saturation() const
{
return 0;
}
void DSVideoWidgetControl::setSaturation(int saturation)
{
Q_UNUSED(saturation);
}
QT_END_NAMESPACE
// End of file

View File

@@ -1,150 +0,0 @@
/****************************************************************************
**
** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
** Contact: http://www.qt-project.org/legal
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and Digia. For licensing terms and
** conditions see http://qt.digia.com/licensing. For further information
** use the contact form at http://qt.digia.com/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 as published by the Free Software
** Foundation and appearing in the file LICENSE.LGPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU Lesser General Public License version 2.1 requirements
** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Digia gives you certain additional
** rights. These rights are described in the Digia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** GNU General Public License Usage
** Alternatively, this file may be used under the terms of the GNU
** General Public License version 3.0 as published by the Free Software
** Foundation and appearing in the file LICENSE.GPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU General Public License version 3.0 requirements will be
** met: http://www.gnu.org/copyleft/gpl.html.
**
**
** $QT_END_LICENSE$
**
****************************************************************************/
#ifndef DSVIDEOWIDGETCONTROL_H
#define DSVIDEOWIDGETCONTROL_H
#include <QtCore/qobject.h>
#include <QtWidgets>
#include <QtMultimedia/qvideoframe.h>
#include <QtMultimedia/qabstractvideosurface.h>
#include <QtMultimedia/qvideosurfaceformat.h>
#include <qvideowidgetcontrol.h>
#include "dscameracontrol.h"
QT_BEGIN_NAMESPACE
class DSVideoWidgetSurface : public QAbstractVideoSurface
{
Q_OBJECT
public:
DSVideoWidgetSurface(QLabel *pWidget, QObject *parent = 0);
QList<QVideoFrame::PixelFormat> supportedPixelFormats(
QAbstractVideoBuffer::HandleType handleType = QAbstractVideoBuffer::NoHandle) const;
bool present(const QVideoFrame &frame);
QRect videoRect() const { return targetRect; }
void updateVideoRect();
void paint(QPainter *painter);
void setImageFormat(QImage::Format fmt);
private:
QLabel *widget;
QImage::Format imageFormat;
QRect targetRect;
QSize imageSize;
QRect sourceRect;
QPixmap* myPixmap;
};
class DSVideoWidgetControl : public QVideoWidgetControl
{
Q_OBJECT
DSVideoWidgetSurface* surface;
public: // Constructor & Destructor
DSVideoWidgetControl(DSCameraSession* session, QObject *parent = 0);
virtual ~DSVideoWidgetControl();
public: // QVideoWidgetControl
QWidget *videoWidget();
// Aspect Ratio
Qt::AspectRatioMode aspectRatioMode() const;
void setAspectRatioMode(Qt::AspectRatioMode ratio);
// Full Screen
bool isFullScreen() const;
void setFullScreen(bool fullScreen);
// Brightness
int brightness() const;
void setBrightness(int brightness);
// Contrast
int contrast() const;
void setContrast(int contrast);
// Hue
int hue() const;
void setHue(int hue);
// Saturation
int saturation() const;
void setSaturation(int saturation);
public: // Internal
bool eventFilter(QObject *object, QEvent *event);
/*
Q_SIGNALS: // QVideoWidgetControl
void fullScreenChanged(bool fullScreen);
void brightnessChanged(int brightness);
void contrastChanged(int contrast);
void hueChanged(int hue);
void saturationChanged(int saturation);
*/
Q_SIGNALS: // Internal Signals
void widgetResized(QSize size);
void widgetUpdated();
private: // Data
DSCameraSession* m_session;
QLabel *m_widget;
WId m_windowId;
Qt::AspectRatioMode m_aspectRatioMode;
bool m_fullScreen;
};
QT_END_NAMESPACE
#endif // DSVideoWidgetControl_H

View File

@@ -4,7 +4,7 @@ PLUGIN_TYPE=mediaservice
PLUGIN_CLASS_NAME = DSServicePlugin PLUGIN_CLASS_NAME = DSServicePlugin
load(qt_plugin) load(qt_plugin)
QT += multimedia QT += multimedia-private
HEADERS += dsserviceplugin.h HEADERS += dsserviceplugin.h
SOURCES += dsserviceplugin.cpp SOURCES += dsserviceplugin.cpp

View File

@@ -79,15 +79,32 @@ extern const CLSID CLSID_VideoInputDeviceCategory;
QT_USE_NAMESPACE QT_USE_NAMESPACE
static int g_refCount = 0;
void addRefCount()
{
if (++g_refCount == 1)
CoInitialize(NULL);
}
void releaseRefCount()
{
if (--g_refCount == 0)
CoUninitialize();
}
QMediaService* DSServicePlugin::create(QString const& key) QMediaService* DSServicePlugin::create(QString const& key)
{ {
#ifdef QMEDIA_DIRECTSHOW_CAMERA #ifdef QMEDIA_DIRECTSHOW_CAMERA
if (key == QLatin1String(Q_MEDIASERVICE_CAMERA)) if (key == QLatin1String(Q_MEDIASERVICE_CAMERA)) {
addRefCount();
return new DSCameraService; return new DSCameraService;
}
#endif #endif
#ifdef QMEDIA_DIRECTSHOW_PLAYER #ifdef QMEDIA_DIRECTSHOW_PLAYER
if (key == QLatin1String(Q_MEDIASERVICE_MEDIAPLAYER)) if (key == QLatin1String(Q_MEDIASERVICE_MEDIAPLAYER)) {
addRefCount();
return new DirectShowPlayerService; return new DirectShowPlayerService;
}
#endif #endif
return 0; return 0;
@@ -96,6 +113,7 @@ QMediaService* DSServicePlugin::create(QString const& key)
void DSServicePlugin::release(QMediaService *service) void DSServicePlugin::release(QMediaService *service)
{ {
delete service; delete service;
releaseRefCount();
} }
QMediaServiceProviderHint::Features DSServicePlugin::supportedFeatures( QMediaServiceProviderHint::Features DSServicePlugin::supportedFeatures(
@@ -154,6 +172,8 @@ QString DSServicePlugin::deviceDescription(const QByteArray &service, const QByt
void DSServicePlugin::updateDevices() const void DSServicePlugin::updateDevices() const
{ {
addRefCount();
m_defaultCameraDevice.clear(); m_defaultCameraDevice.clear();
DSVideoDeviceControl::enumerateDevices(&m_cameraDevices, &m_cameraDescriptions); DSVideoDeviceControl::enumerateDevices(&m_cameraDevices, &m_cameraDescriptions);
@@ -162,6 +182,8 @@ void DSServicePlugin::updateDevices() const
} else { } else {
m_defaultCameraDevice = m_cameraDevices.first(); m_defaultCameraDevice = m_cameraDevices.first();
} }
releaseRefCount();
} }
#endif #endif

View File

@@ -111,7 +111,6 @@ DirectShowPlayerService::DirectShowPlayerService(QObject *parent)
, m_seekable(false) , m_seekable(false)
, m_atEnd(false) , m_atEnd(false)
{ {
CoInitialize(NULL);
m_playerControl = new DirectShowPlayerControl(this); m_playerControl = new DirectShowPlayerControl(this);
m_metaDataControl = new DirectShowMetaDataControl(this); m_metaDataControl = new DirectShowMetaDataControl(this);
m_audioEndpointControl = new DirectShowAudioEndpointControl(this); m_audioEndpointControl = new DirectShowAudioEndpointControl(this);
@@ -153,7 +152,6 @@ DirectShowPlayerService::~DirectShowPlayerService()
#endif #endif
::CloseHandle(m_taskHandle); ::CloseHandle(m_taskHandle);
CoUninitialize();
} }
QMediaControl *DirectShowPlayerService::requestControl(const char *name) QMediaControl *DirectShowPlayerService::requestControl(const char *name)

View File

@@ -1,6 +1,6 @@
/**************************************************************************** /****************************************************************************
** **
** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies). ** Copyright (C) 2014 Digia Plc and/or its subsidiary(-ies).
** Contact: http://www.qt-project.org/legal ** Contact: http://www.qt-project.org/legal
** **
** This file is part of the Qt Toolkit. ** This file is part of the Qt Toolkit.
@@ -54,10 +54,6 @@ QT_BEGIN_NAMESPACE
const int PeriodTimeMs = 50; const int PeriodTimeMs = 50;
// Map from void* (for userdata) to QPulseAudioInput instance
// protected by pulse mainloop lock
QMap<void *, QPulseAudioInput*> QPulseAudioInput::s_inputsMap;
static void inputStreamReadCallback(pa_stream *stream, size_t length, void *userdata) static void inputStreamReadCallback(pa_stream *stream, size_t length, void *userdata)
{ {
Q_UNUSED(userdata); Q_UNUSED(userdata);
@@ -136,8 +132,8 @@ void QPulseAudioInput::sourceInfoCallback(pa_context *context, const pa_source_i
Q_UNUSED(eol); Q_UNUSED(eol);
Q_ASSERT(userdata); Q_ASSERT(userdata);
QPulseAudioInput *that = QPulseAudioInput::s_inputsMap.value(userdata); if (i) {
if (that && i) { QPulseAudioInput *that = reinterpret_cast<QPulseAudioInput*>(userdata);
that->m_volume = pa_sw_volume_to_linear(pa_cvolume_avg(&i->volume)); that->m_volume = pa_sw_volume_to_linear(pa_cvolume_avg(&i->volume));
} }
} }
@@ -149,12 +145,11 @@ void QPulseAudioInput::inputVolumeCallback(pa_context *context, int success, voi
if (!success) if (!success)
qWarning() << "QAudioInput: failed to set input volume"; qWarning() << "QAudioInput: failed to set input volume";
QPulseAudioInput *that = QPulseAudioInput::s_inputsMap.value(userdata); QPulseAudioInput *that = reinterpret_cast<QPulseAudioInput*>(userdata);
// Regardless of success or failure, we update the volume property // Regardless of success or failure, we update the volume property
if (that && that->m_stream) { if (that->m_stream)
pa_context_get_source_info_by_index(context, pa_stream_get_device_index(that->m_stream), sourceInfoCallback, userdata); pa_context_get_source_info_by_index(context, pa_stream_get_device_index(that->m_stream), sourceInfoCallback, userdata);
}
} }
QPulseAudioInput::QPulseAudioInput(const QByteArray &device) QPulseAudioInput::QPulseAudioInput(const QByteArray &device)
@@ -175,31 +170,39 @@ QPulseAudioInput::QPulseAudioInput(const QByteArray &device)
{ {
m_timer = new QTimer(this); m_timer = new QTimer(this);
connect(m_timer, SIGNAL(timeout()), SLOT(userFeed())); connect(m_timer, SIGNAL(timeout()), SLOT(userFeed()));
QPulseAudioEngine *pulseEngine = QPulseAudioEngine::instance();
pa_threaded_mainloop_lock(pulseEngine->mainloop());
s_inputsMap.insert(this, this);
pa_threaded_mainloop_unlock(pulseEngine->mainloop());
} }
QPulseAudioInput::~QPulseAudioInput() QPulseAudioInput::~QPulseAudioInput()
{ {
QPulseAudioEngine *pulseEngine = QPulseAudioEngine::instance();
pa_threaded_mainloop_lock(pulseEngine->mainloop());
s_inputsMap.remove(this);
pa_threaded_mainloop_unlock(pulseEngine->mainloop());
close(); close();
disconnect(m_timer, SIGNAL(timeout())); disconnect(m_timer, SIGNAL(timeout()));
QCoreApplication::processEvents(); QCoreApplication::processEvents();
delete m_timer; delete m_timer;
} }
void QPulseAudioInput::setError(QAudio::Error error)
{
if (m_errorState == error)
return;
m_errorState = error;
emit errorChanged(error);
}
QAudio::Error QPulseAudioInput::error() const QAudio::Error QPulseAudioInput::error() const
{ {
return m_errorState; return m_errorState;
} }
void QPulseAudioInput::setState(QAudio::State state)
{
if (m_deviceState == state)
return;
m_deviceState = state;
emit stateChanged(state);
}
QAudio::State QPulseAudioInput::state() const QAudio::State QPulseAudioInput::state() const
{ {
return m_deviceState; return m_deviceState;
@@ -218,41 +221,45 @@ QAudioFormat QPulseAudioInput::format() const
void QPulseAudioInput::start(QIODevice *device) void QPulseAudioInput::start(QIODevice *device)
{ {
if (m_deviceState != QAudio::StoppedState) setState(QAudio::StoppedState);
close(); setError(QAudio::NoError);
if (!m_pullMode && m_audioSource) if (!m_pullMode && m_audioSource) {
delete m_audioSource; delete m_audioSource;
m_audioSource = 0;
}
m_pullMode = true; close();
m_audioSource = device;
m_deviceState = QAudio::ActiveState;
if (!open()) if (!open())
return; return;
emit stateChanged(m_deviceState); m_pullMode = true;
m_audioSource = device;
setState(QAudio::ActiveState);
} }
QIODevice *QPulseAudioInput::start() QIODevice *QPulseAudioInput::start()
{ {
if (m_deviceState != QAudio::StoppedState) setState(QAudio::StoppedState);
close(); setError(QAudio::NoError);
if (!m_pullMode && m_audioSource) if (!m_pullMode && m_audioSource) {
delete m_audioSource; delete m_audioSource;
m_audioSource = 0;
}
close();
if (!open())
return Q_NULLPTR;
m_pullMode = false; m_pullMode = false;
m_audioSource = new InputPrivate(this); m_audioSource = new InputPrivate(this);
m_audioSource->open(QIODevice::ReadOnly | QIODevice::Unbuffered); m_audioSource->open(QIODevice::ReadOnly | QIODevice::Unbuffered);
m_deviceState = QAudio::IdleState; setState(QAudio::IdleState);
if (!open())
return 0;
emit stateChanged(m_deviceState);
return m_audioSource; return m_audioSource;
} }
@@ -262,40 +269,43 @@ void QPulseAudioInput::stop()
if (m_deviceState == QAudio::StoppedState) if (m_deviceState == QAudio::StoppedState)
return; return;
m_errorState = QAudio::NoError;
m_deviceState = QAudio::StoppedState;
close(); close();
emit stateChanged(m_deviceState);
setError(QAudio::NoError);
setState(QAudio::StoppedState);
} }
bool QPulseAudioInput::open() bool QPulseAudioInput::open()
{ {
if (m_opened) if (m_opened)
return true;
QPulseAudioEngine *pulseEngine = QPulseAudioEngine::instance();
if (!pulseEngine->context() || pa_context_get_state(pulseEngine->context()) != PA_CONTEXT_READY) {
setError(QAudio::FatalError);
setState(QAudio::StoppedState);
return false; return false;
}
pa_sample_spec spec = QPulseAudioInternal::audioFormatToSampleSpec(m_format);
if (!pa_sample_spec_valid(&spec)) {
setError(QAudio::OpenError);
setState(QAudio::StoppedState);
return false;
}
m_spec = spec;
#ifdef DEBUG_PULSE #ifdef DEBUG_PULSE
// QTime now(QTime::currentTime()); // QTime now(QTime::currentTime());
// qDebug()<<now.second()<<"s "<<now.msec()<<"ms :open()"; // qDebug()<<now.second()<<"s "<<now.msec()<<"ms :open()";
#endif #endif
m_clockStamp.restart();
m_timeStamp.restart();
m_elapsedTimeOffset = 0;
if (m_streamName.isNull()) if (m_streamName.isNull())
m_streamName = QString(QLatin1String("QtmPulseStream-%1-%2")).arg(::getpid()).arg(quintptr(this)).toUtf8(); m_streamName = QString(QLatin1String("QtmPulseStream-%1-%2")).arg(::getpid()).arg(quintptr(this)).toUtf8();
pa_sample_spec spec = QPulseAudioInternal::audioFormatToSampleSpec(m_format);
if (!pa_sample_spec_valid(&spec)) {
m_errorState = QAudio::OpenError;
m_deviceState = QAudio::StoppedState;
emit stateChanged(m_deviceState);
return false;
}
m_spec = spec;
#ifdef DEBUG_PULSE #ifdef DEBUG_PULSE
qDebug() << "Format: " << QPulseAudioInternal::sampleFormatToQString(spec.format); qDebug() << "Format: " << QPulseAudioInternal::sampleFormatToQString(spec.format);
qDebug() << "Rate: " << spec.rate; qDebug() << "Rate: " << spec.rate;
@@ -303,15 +313,13 @@ bool QPulseAudioInput::open()
qDebug() << "Frame size: " << pa_frame_size(&spec); qDebug() << "Frame size: " << pa_frame_size(&spec);
#endif #endif
QPulseAudioEngine *pulseEngine = QPulseAudioEngine::instance(); pulseEngine->lock();
pa_threaded_mainloop_lock(pulseEngine->mainloop());
pa_channel_map channel_map; pa_channel_map channel_map;
pa_channel_map_init_extend(&channel_map, spec.channels, PA_CHANNEL_MAP_DEFAULT); pa_channel_map_init_extend(&channel_map, spec.channels, PA_CHANNEL_MAP_DEFAULT);
if (!pa_channel_map_compatible(&channel_map, &spec)) { if (!pa_channel_map_compatible(&channel_map, &spec))
qWarning() << "Channel map doesn't match sample specification!"; qWarning() << "Channel map doesn't match sample specification!";
}
m_stream = pa_stream_new(pulseEngine->context(), m_streamName.constData(), &spec, &channel_map); m_stream = pa_stream_new(pulseEngine->context(), m_streamName.constData(), &spec, &channel_map);
@@ -338,13 +346,16 @@ bool QPulseAudioInput::open()
if (pa_stream_connect_record(m_stream, m_device.data(), &buffer_attr, (pa_stream_flags_t)flags) < 0) { if (pa_stream_connect_record(m_stream, m_device.data(), &buffer_attr, (pa_stream_flags_t)flags) < 0) {
qWarning() << "pa_stream_connect_record() failed!"; qWarning() << "pa_stream_connect_record() failed!";
m_errorState = QAudio::FatalError; pa_stream_unref(m_stream);
m_stream = 0;
pulseEngine->unlock();
setError(QAudio::OpenError);
setState(QAudio::StoppedState);
return false; return false;
} }
while (pa_stream_get_state(m_stream) != PA_STREAM_READY) { while (pa_stream_get_state(m_stream) != PA_STREAM_READY)
pa_threaded_mainloop_wait(pulseEngine->mainloop()); pa_threaded_mainloop_wait(pulseEngine->mainloop());
}
const pa_buffer_attr *actualBufferAttr = pa_stream_get_buffer_attr(m_stream); const pa_buffer_attr *actualBufferAttr = pa_stream_get_buffer_attr(m_stream);
m_periodSize = actualBufferAttr->fragsize; m_periodSize = actualBufferAttr->fragsize;
@@ -354,12 +365,16 @@ bool QPulseAudioInput::open()
setPulseVolume(); setPulseVolume();
pa_threaded_mainloop_unlock(pulseEngine->mainloop()); pulseEngine->unlock();
connect(pulseEngine, &QPulseAudioEngine::contextFailed, this, &QPulseAudioInput::onPulseContextFailed);
m_opened = true; m_opened = true;
m_timer->start(m_periodTime); m_timer->start(m_periodTime);
m_errorState = QAudio::NoError;
m_clockStamp.restart();
m_timeStamp.restart();
m_elapsedTimeOffset = 0;
m_totalTimeValue = 0; m_totalTimeValue = 0;
return true; return true;
@@ -367,21 +382,30 @@ bool QPulseAudioInput::open()
void QPulseAudioInput::close() void QPulseAudioInput::close()
{ {
if (!m_opened)
return;
m_timer->stop(); m_timer->stop();
if (m_stream) { QPulseAudioEngine *pulseEngine = QPulseAudioEngine::instance();
QPulseAudioEngine *pulseEngine = QPulseAudioEngine::instance();
pa_threaded_mainloop_lock(pulseEngine->mainloop());
if (m_stream) {
pulseEngine->lock();
pa_stream_set_state_callback(m_stream, 0, 0);
pa_stream_set_read_callback(m_stream, 0, 0); pa_stream_set_read_callback(m_stream, 0, 0);
pa_stream_set_underflow_callback(m_stream, 0, 0);
pa_stream_set_overflow_callback(m_stream, 0, 0);
pa_stream_disconnect(m_stream); pa_stream_disconnect(m_stream);
pa_stream_unref(m_stream); pa_stream_unref(m_stream);
m_stream = 0; m_stream = 0;
pa_threaded_mainloop_unlock(pulseEngine->mainloop()); pulseEngine->unlock();
} }
disconnect(pulseEngine, &QPulseAudioEngine::contextFailed, this, &QPulseAudioInput::onPulseContextFailed);
if (!m_pullMode && m_audioSource) { if (!m_pullMode && m_audioSource) {
delete m_audioSource; delete m_audioSource;
m_audioSource = 0; m_audioSource = 0;
@@ -393,6 +417,7 @@ void QPulseAudioInput::close()
void QPulseAudioInput::setPulseVolume() void QPulseAudioInput::setPulseVolume()
{ {
QPulseAudioEngine *pulseEngine = QPulseAudioEngine::instance(); QPulseAudioEngine *pulseEngine = QPulseAudioEngine::instance();
Q_ASSERT(pulseEngine->context() != 0);
pa_cvolume cvolume; pa_cvolume cvolume;
@@ -434,11 +459,8 @@ qint64 QPulseAudioInput::read(char *data, qint64 len)
{ {
m_bytesAvailable = checkBytesReady(); m_bytesAvailable = checkBytesReady();
if (m_deviceState != QAudio::ActiveState) { setError(QAudio::NoError);
m_errorState = QAudio::NoError; setState(QAudio::ActiveState);
m_deviceState = QAudio::ActiveState;
emit stateChanged(m_deviceState);
}
int readBytes = 0; int readBytes = 0;
@@ -463,7 +485,8 @@ qint64 QPulseAudioInput::read(char *data, qint64 len)
#endif #endif
QPulseAudioEngine *pulseEngine = QPulseAudioEngine::instance(); QPulseAudioEngine *pulseEngine = QPulseAudioEngine::instance();
pa_threaded_mainloop_lock(pulseEngine->mainloop()); pulseEngine->lock();
const void *audioBuffer; const void *audioBuffer;
// Second and third parameters (audioBuffer and length) to pa_stream_peek are output parameters, // Second and third parameters (audioBuffer and length) to pa_stream_peek are output parameters,
@@ -471,7 +494,7 @@ qint64 QPulseAudioInput::read(char *data, qint64 len)
// and the length is set to the length of this data. // and the length is set to the length of this data.
if (pa_stream_peek(m_stream, &audioBuffer, &readLength) < 0) { if (pa_stream_peek(m_stream, &audioBuffer, &readLength) < 0) {
qWarning() << QString("pa_stream_peek() failed: %1").arg(pa_strerror(pa_context_errno(pa_stream_get_context(m_stream)))); qWarning() << QString("pa_stream_peek() failed: %1").arg(pa_strerror(pa_context_errno(pa_stream_get_context(m_stream))));
pa_threaded_mainloop_unlock(pulseEngine->mainloop()); pulseEngine->unlock();
return 0; return 0;
} }
@@ -480,11 +503,10 @@ qint64 QPulseAudioInput::read(char *data, qint64 len)
actualLength = m_audioSource->write(static_cast<const char *>(audioBuffer), readLength); actualLength = m_audioSource->write(static_cast<const char *>(audioBuffer), readLength);
if (actualLength < qint64(readLength)) { if (actualLength < qint64(readLength)) {
pa_threaded_mainloop_unlock(pulseEngine->mainloop()); pulseEngine->unlock();
m_errorState = QAudio::UnderrunError; setError(QAudio::UnderrunError);
m_deviceState = QAudio::IdleState; setState(QAudio::IdleState);
emit stateChanged(m_deviceState);
return actualLength; return actualLength;
} }
@@ -509,7 +531,7 @@ qint64 QPulseAudioInput::read(char *data, qint64 len)
readBytes += actualLength; readBytes += actualLength;
pa_stream_drop(m_stream); pa_stream_drop(m_stream);
pa_threaded_mainloop_unlock(pulseEngine->mainloop()); pulseEngine->unlock();
if (!m_pullMode && readBytes >= len) if (!m_pullMode && readBytes >= len)
break; break;
@@ -534,22 +556,18 @@ void QPulseAudioInput::resume()
QPulseAudioEngine *pulseEngine = QPulseAudioEngine::instance(); QPulseAudioEngine *pulseEngine = QPulseAudioEngine::instance();
pa_operation *operation; pa_operation *operation;
pa_threaded_mainloop_lock(pulseEngine->mainloop()); pulseEngine->lock();
operation = pa_stream_cork(m_stream, 0, inputStreamSuccessCallback, 0); operation = pa_stream_cork(m_stream, 0, inputStreamSuccessCallback, 0);
pulseEngine->wait(operation);
while (pa_operation_get_state(operation) == PA_OPERATION_RUNNING)
pa_threaded_mainloop_wait(pulseEngine->mainloop());
pa_operation_unref(operation); pa_operation_unref(operation);
pa_threaded_mainloop_unlock(pulseEngine->mainloop()); pulseEngine->unlock();
m_timer->start(m_periodTime); m_timer->start(m_periodTime);
m_deviceState = QAudio::ActiveState; setState(QAudio::ActiveState);
setError(QAudio::NoError);
emit stateChanged(m_deviceState);
} }
} }
@@ -557,23 +575,23 @@ void QPulseAudioInput::setVolume(qreal vol)
{ {
if (vol >= 0.0 && vol <= 1.0) { if (vol >= 0.0 && vol <= 1.0) {
QPulseAudioEngine *pulseEngine = QPulseAudioEngine::instance(); QPulseAudioEngine *pulseEngine = QPulseAudioEngine::instance();
pa_threaded_mainloop_lock(pulseEngine->mainloop()); pulseEngine->lock();
if (!qFuzzyCompare(m_volume, vol)) { if (!qFuzzyCompare(m_volume, vol)) {
m_volume = vol; m_volume = vol;
if (m_opened) { if (m_opened) {
setPulseVolume(); setPulseVolume();
} }
} }
pa_threaded_mainloop_unlock(pulseEngine->mainloop()); pulseEngine->unlock();
} }
} }
qreal QPulseAudioInput::volume() const qreal QPulseAudioInput::volume() const
{ {
QPulseAudioEngine *pulseEngine = QPulseAudioEngine::instance(); QPulseAudioEngine *pulseEngine = QPulseAudioEngine::instance();
pa_threaded_mainloop_lock(pulseEngine->mainloop()); pulseEngine->lock();
qreal vol = m_volume; qreal vol = m_volume;
pa_threaded_mainloop_unlock(pulseEngine->mainloop()); pulseEngine->unlock();
return vol; return vol;
} }
@@ -614,23 +632,21 @@ qint64 QPulseAudioInput::processedUSecs() const
void QPulseAudioInput::suspend() void QPulseAudioInput::suspend()
{ {
if (m_deviceState == QAudio::ActiveState) { if (m_deviceState == QAudio::ActiveState) {
setError(QAudio::NoError);
setState(QAudio::SuspendedState);
m_timer->stop(); m_timer->stop();
m_deviceState = QAudio::SuspendedState;
emit stateChanged(m_deviceState);
QPulseAudioEngine *pulseEngine = QPulseAudioEngine::instance(); QPulseAudioEngine *pulseEngine = QPulseAudioEngine::instance();
pa_operation *operation; pa_operation *operation;
pa_threaded_mainloop_lock(pulseEngine->mainloop()); pulseEngine->lock();
operation = pa_stream_cork(m_stream, 1, inputStreamSuccessCallback, 0); operation = pa_stream_cork(m_stream, 1, inputStreamSuccessCallback, 0);
pulseEngine->wait(operation);
while (pa_operation_get_state(operation) == PA_OPERATION_RUNNING)
pa_threaded_mainloop_wait(pulseEngine->mainloop());
pa_operation_unref(operation); pa_operation_unref(operation);
pa_threaded_mainloop_unlock(pulseEngine->mainloop()); pulseEngine->unlock();
} }
} }
@@ -685,6 +701,14 @@ void QPulseAudioInput::reset()
m_bytesAvailable = 0; m_bytesAvailable = 0;
} }
void QPulseAudioInput::onPulseContextFailed()
{
close();
setError(QAudio::FatalError);
setState(QAudio::StoppedState);
}
InputPrivate::InputPrivate(QPulseAudioInput *audio) InputPrivate::InputPrivate(QPulseAudioInput *audio)
{ {
m_audioDevice = qobject_cast<QPulseAudioInput*>(audio); m_audioDevice = qobject_cast<QPulseAudioInput*>(audio);

View File

@@ -1,6 +1,6 @@
/**************************************************************************** /****************************************************************************
** **
** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies). ** Copyright (C) 2014 Digia Plc and/or its subsidiary(-ies).
** Contact: http://www.qt-project.org/legal ** Contact: http://www.qt-project.org/legal
** **
** This file is part of the Qt Toolkit. ** This file is part of the Qt Toolkit.
@@ -112,8 +112,12 @@ public:
private slots: private slots:
void userFeed(); void userFeed();
bool deviceReady(); bool deviceReady();
void onPulseContextFailed();
private: private:
void setState(QAudio::State state);
void setError(QAudio::Error error);
int checkBytesReady(); int checkBytesReady();
bool open(); bool open();
void close(); void close();

View File

@@ -1,6 +1,6 @@
/**************************************************************************** /****************************************************************************
** **
** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies). ** Copyright (C) 2014 Digia Plc and/or its subsidiary(-ies).
** Contact: http://www.qt-project.org/legal ** Contact: http://www.qt-project.org/legal
** **
** This file is part of the Qt Toolkit. ** This file is part of the Qt Toolkit.
@@ -170,11 +170,29 @@ QPulseAudioOutput::~QPulseAudioOutput()
QCoreApplication::processEvents(); QCoreApplication::processEvents();
} }
void QPulseAudioOutput::setError(QAudio::Error error)
{
if (m_errorState == error)
return;
m_errorState = error;
emit errorChanged(error);
}
QAudio::Error QPulseAudioOutput::error() const QAudio::Error QPulseAudioOutput::error() const
{ {
return m_errorState; return m_errorState;
} }
void QPulseAudioOutput::setState(QAudio::State state)
{
if (m_deviceState == state)
return;
m_deviceState = state;
emit stateChanged(state);
}
QAudio::State QPulseAudioOutput::state() const QAudio::State QPulseAudioOutput::state() const
{ {
return m_deviceState; return m_deviceState;
@@ -183,19 +201,15 @@ QAudio::State QPulseAudioOutput::state() const
void QPulseAudioOutput::streamUnderflowCallback() void QPulseAudioOutput::streamUnderflowCallback()
{ {
if (m_deviceState != QAudio::IdleState && !m_resuming) { if (m_deviceState != QAudio::IdleState && !m_resuming) {
m_errorState = QAudio::UnderrunError; setError(QAudio::UnderrunError);
emit errorChanged(m_errorState); setState(QAudio::IdleState);
m_deviceState = QAudio::IdleState;
emit stateChanged(m_deviceState);
} }
} }
void QPulseAudioOutput::start(QIODevice *device) void QPulseAudioOutput::start(QIODevice *device)
{ {
if (m_deviceState != QAudio::StoppedState) setState(QAudio::StoppedState);
m_deviceState = QAudio::StoppedState; setError(QAudio::NoError);
m_errorState = QAudio::NoError;
// Handle change of mode // Handle change of mode
if (m_audioSource && !m_pullMode) { if (m_audioSource && !m_pullMode) {
@@ -205,22 +219,19 @@ void QPulseAudioOutput::start(QIODevice *device)
close(); close();
if (!open())
return;
m_pullMode = true; m_pullMode = true;
m_audioSource = device; m_audioSource = device;
m_deviceState = QAudio::ActiveState; setState(QAudio::ActiveState);
open();
emit stateChanged(m_deviceState);
} }
QIODevice *QPulseAudioOutput::start() QIODevice *QPulseAudioOutput::start()
{ {
if (m_deviceState != QAudio::StoppedState) setState(QAudio::StoppedState);
m_deviceState = QAudio::StoppedState; setError(QAudio::NoError);
m_errorState = QAudio::NoError;
// Handle change of mode // Handle change of mode
if (m_audioSource && !m_pullMode) { if (m_audioSource && !m_pullMode) {
@@ -230,15 +241,14 @@ QIODevice *QPulseAudioOutput::start()
close(); close();
if (!open())
return Q_NULLPTR;
m_audioSource = new OutputPrivate(this); m_audioSource = new OutputPrivate(this);
m_audioSource->open(QIODevice::WriteOnly|QIODevice::Unbuffered); m_audioSource->open(QIODevice::WriteOnly|QIODevice::Unbuffered);
m_pullMode = false; m_pullMode = false;
m_deviceState = QAudio::IdleState; setState(QAudio::IdleState);
open();
emit stateChanged(m_deviceState);
return m_audioSource; return m_audioSource;
} }
@@ -246,33 +256,38 @@ QIODevice *QPulseAudioOutput::start()
bool QPulseAudioOutput::open() bool QPulseAudioOutput::open()
{ {
if (m_opened) if (m_opened)
return true;
QPulseAudioEngine *pulseEngine = QPulseAudioEngine::instance();
if (!pulseEngine->context() || pa_context_get_state(pulseEngine->context()) != PA_CONTEXT_READY) {
setError(QAudio::FatalError);
setState(QAudio::StoppedState);
return false; return false;
}
pa_sample_spec spec = QPulseAudioInternal::audioFormatToSampleSpec(m_format); pa_sample_spec spec = QPulseAudioInternal::audioFormatToSampleSpec(m_format);
if (!pa_sample_spec_valid(&spec)) { if (!pa_sample_spec_valid(&spec)) {
m_errorState = QAudio::OpenError; setError(QAudio::OpenError);
m_deviceState = QAudio::StoppedState; setState(QAudio::StoppedState);
return false; return false;
} }
m_spec = spec; m_spec = spec;
m_totalTimeValue = 0; m_totalTimeValue = 0;
m_elapsedTimeOffset = 0;
m_timeStamp.restart();
if (m_streamName.isNull()) if (m_streamName.isNull())
m_streamName = QString(QLatin1String("QtmPulseStream-%1-%2")).arg(::getpid()).arg(quintptr(this)).toUtf8(); m_streamName = QString(QLatin1String("QtmPulseStream-%1-%2")).arg(::getpid()).arg(quintptr(this)).toUtf8();
#ifdef DEBUG_PULSE #ifdef DEBUG_PULSE
qDebug() << "Format: " << QPulseAudioInternal::sampleFormatToQString(spec.format); qDebug() << "Format: " << QPulseAudioInternal::sampleFormatToQString(spec.format);
qDebug() << "Rate: " << spec.rate; qDebug() << "Rate: " << spec.rate;
qDebug() << "Channels: " << spec.channels; qDebug() << "Channels: " << spec.channels;
qDebug() << "Frame size: " << pa_frame_size(&spec); qDebug() << "Frame size: " << pa_frame_size(&spec);
#endif #endif
QPulseAudioEngine *pulseEngine = QPulseAudioEngine::instance(); pulseEngine->lock();
pa_threaded_mainloop_lock(pulseEngine->mainloop());
qint64 bytesPerSecond = m_format.sampleRate() * m_format.channelCount() * m_format.sampleSize() / 8; qint64 bytesPerSecond = m_format.sampleRate() * m_format.channelCount() * m_format.sampleSize() / 8;
@@ -280,7 +295,7 @@ bool QPulseAudioOutput::open()
if (!m_category.isNull()) if (!m_category.isNull())
pa_proplist_sets(propList, PA_PROP_MEDIA_ROLE, m_category.toLatin1().constData()); pa_proplist_sets(propList, PA_PROP_MEDIA_ROLE, m_category.toLatin1().constData());
m_stream = pa_stream_new_with_proplist(pulseEngine->context(), m_streamName.constData(), &spec, 0, propList); m_stream = pa_stream_new_with_proplist(pulseEngine->context(), m_streamName.constData(), &m_spec, 0, propList);
pa_proplist_free(propList); pa_proplist_free(propList);
pa_stream_set_state_callback(m_stream, outputStreamStateCallback, this); pa_stream_set_state_callback(m_stream, outputStreamStateCallback, this);
@@ -312,15 +327,20 @@ bool QPulseAudioOutput::open()
if (pa_stream_connect_playback(m_stream, m_device.data(), (m_bufferSize > 0) ? &requestedBuffer : NULL, (pa_stream_flags_t)0, &m_chVolume, NULL) < 0) { if (pa_stream_connect_playback(m_stream, m_device.data(), (m_bufferSize > 0) ? &requestedBuffer : NULL, (pa_stream_flags_t)0, &m_chVolume, NULL) < 0) {
qWarning() << "pa_stream_connect_playback() failed!"; qWarning() << "pa_stream_connect_playback() failed!";
pa_stream_unref(m_stream);
m_stream = 0;
pulseEngine->unlock();
setError(QAudio::OpenError);
setState(QAudio::StoppedState);
return false; return false;
} }
while (pa_stream_get_state(m_stream) != PA_STREAM_READY) { while (pa_stream_get_state(m_stream) != PA_STREAM_READY)
pa_threaded_mainloop_wait(pulseEngine->mainloop()); pa_threaded_mainloop_wait(pulseEngine->mainloop());
}
const pa_buffer_attr *buffer = pa_stream_get_buffer_attr(m_stream); const pa_buffer_attr *buffer = pa_stream_get_buffer_attr(m_stream);
m_periodTime = (m_category == LOW_LATENCY_CATEGORY_NAME) ? LowLatencyPeriodTimeMs : PeriodTimeMs; m_periodTime = (m_category == LOW_LATENCY_CATEGORY_NAME) ? LowLatencyPeriodTimeMs : PeriodTimeMs;
m_periodSize = pa_usec_to_bytes(m_periodTime*1000, &spec); m_periodSize = pa_usec_to_bytes(m_periodTime*1000, &m_spec);
m_bufferSize = buffer->tlength; m_bufferSize = buffer->tlength;
m_maxBufferSize = buffer->maxlength; m_maxBufferSize = buffer->maxlength;
m_audioBuffer = new char[m_maxBufferSize]; m_audioBuffer = new char[m_maxBufferSize];
@@ -333,9 +353,12 @@ bool QPulseAudioOutput::open()
qDebug() << "\tFragment size: " << buffer->fragsize; qDebug() << "\tFragment size: " << buffer->fragsize;
#endif #endif
pa_threaded_mainloop_unlock(pulseEngine->mainloop()); pulseEngine->unlock();
connect(pulseEngine, &QPulseAudioEngine::contextFailed, this, &QPulseAudioOutput::onPulseContextFailed);
m_opened = true; m_opened = true;
m_tickTimer->start(m_periodTime); m_tickTimer->start(m_periodTime);
m_elapsedTimeOffset = 0; m_elapsedTimeOffset = 0;
@@ -347,28 +370,35 @@ bool QPulseAudioOutput::open()
void QPulseAudioOutput::close() void QPulseAudioOutput::close()
{ {
if (!m_opened)
return;
m_tickTimer->stop(); m_tickTimer->stop();
if (m_stream) { QPulseAudioEngine *pulseEngine = QPulseAudioEngine::instance();
QPulseAudioEngine *pulseEngine = QPulseAudioEngine::instance();
pa_threaded_mainloop_lock(pulseEngine->mainloop());
pa_stream_set_write_callback(m_stream, NULL, NULL); if (m_stream) {
pulseEngine->lock();
pa_stream_set_state_callback(m_stream, 0, 0);
pa_stream_set_write_callback(m_stream, 0, 0);
pa_stream_set_underflow_callback(m_stream, 0, 0);
pa_stream_set_overflow_callback(m_stream, 0, 0);
pa_stream_set_latency_update_callback(m_stream, 0, 0);
pa_operation *o = pa_stream_drain(m_stream, outputStreamDrainComplete, NULL); pa_operation *o = pa_stream_drain(m_stream, outputStreamDrainComplete, NULL);
if (!o) { if (o)
qWarning() << QString("pa_stream_drain(): %1").arg(pa_strerror(pa_context_errno(pa_stream_get_context(m_stream))));
} else {
pa_operation_unref(o); pa_operation_unref(o);
}
pa_stream_disconnect(m_stream); pa_stream_disconnect(m_stream);
pa_stream_unref(m_stream); pa_stream_unref(m_stream);
m_stream = NULL; m_stream = NULL;
pa_threaded_mainloop_unlock(pulseEngine->mainloop()); pulseEngine->unlock();
} }
disconnect(pulseEngine, &QPulseAudioEngine::contextFailed, this, &QPulseAudioOutput::onPulseContextFailed);
if (!m_pullMode && m_audioSource) { if (!m_pullMode && m_audioSource) {
delete m_audioSource; delete m_audioSource;
m_audioSource = 0; m_audioSource = 0;
@@ -430,17 +460,14 @@ qint64 QPulseAudioOutput::write(const char *data, qint64 len)
{ {
QPulseAudioEngine *pulseEngine = QPulseAudioEngine::instance(); QPulseAudioEngine *pulseEngine = QPulseAudioEngine::instance();
pa_threaded_mainloop_lock(pulseEngine->mainloop()); pulseEngine->lock();
len = qMin(len, static_cast<qint64>(pa_stream_writable_size(m_stream))); len = qMin(len, static_cast<qint64>(pa_stream_writable_size(m_stream)));
pa_stream_write(m_stream, data, len, 0, 0, PA_SEEK_RELATIVE); pa_stream_write(m_stream, data, len, 0, 0, PA_SEEK_RELATIVE);
pa_threaded_mainloop_unlock(pulseEngine->mainloop()); pulseEngine->unlock();
m_totalTimeValue += len; m_totalTimeValue += len;
m_errorState = QAudio::NoError; setError(QAudio::NoError);
if (m_deviceState != QAudio::ActiveState) { setState(QAudio::ActiveState);
m_deviceState = QAudio::ActiveState;
emit stateChanged(m_deviceState);
}
return len; return len;
} }
@@ -450,10 +477,10 @@ void QPulseAudioOutput::stop()
if (m_deviceState == QAudio::StoppedState) if (m_deviceState == QAudio::StoppedState)
return; return;
m_errorState = QAudio::NoError;
m_deviceState = QAudio::StoppedState;
close(); close();
emit stateChanged(m_deviceState);
setError(QAudio::NoError);
setState(QAudio::StoppedState);
} }
int QPulseAudioOutput::bytesFree() const int QPulseAudioOutput::bytesFree() const
@@ -462,9 +489,9 @@ int QPulseAudioOutput::bytesFree() const
return 0; return 0;
QPulseAudioEngine *pulseEngine = QPulseAudioEngine::instance(); QPulseAudioEngine *pulseEngine = QPulseAudioEngine::instance();
pa_threaded_mainloop_lock(pulseEngine->mainloop()); pulseEngine->lock();
int writableSize = pa_stream_writable_size(m_stream); int writableSize = pa_stream_writable_size(m_stream);
pa_threaded_mainloop_unlock(pulseEngine->mainloop()); pulseEngine->unlock();
return writableSize; return writableSize;
} }
@@ -509,30 +536,22 @@ void QPulseAudioOutput::resume()
QPulseAudioEngine *pulseEngine = QPulseAudioEngine::instance(); QPulseAudioEngine *pulseEngine = QPulseAudioEngine::instance();
pa_threaded_mainloop_lock(pulseEngine->mainloop()); pulseEngine->lock();
pa_operation *operation = pa_stream_cork(m_stream, 0, outputStreamSuccessCallback, NULL); pa_operation *operation = pa_stream_cork(m_stream, 0, outputStreamSuccessCallback, NULL);
pulseEngine->wait(operation);
while (pa_operation_get_state(operation) == PA_OPERATION_RUNNING)
pa_threaded_mainloop_wait(pulseEngine->mainloop());
pa_operation_unref(operation); pa_operation_unref(operation);
operation = pa_stream_trigger(m_stream, outputStreamSuccessCallback, NULL); operation = pa_stream_trigger(m_stream, outputStreamSuccessCallback, NULL);
pulseEngine->wait(operation);
while (pa_operation_get_state(operation) == PA_OPERATION_RUNNING)
pa_threaded_mainloop_wait(pulseEngine->mainloop());
pa_operation_unref(operation); pa_operation_unref(operation);
pa_threaded_mainloop_unlock(pulseEngine->mainloop()); pulseEngine->unlock();
m_deviceState = QAudio::ActiveState;
m_errorState = QAudio::NoError;
m_tickTimer->start(m_periodTime); m_tickTimer->start(m_periodTime);
emit stateChanged(m_deviceState); setState(QAudio::ActiveState);
setError(QAudio::NoError);
} }
} }
@@ -549,24 +568,21 @@ QAudioFormat QPulseAudioOutput::format() const
void QPulseAudioOutput::suspend() void QPulseAudioOutput::suspend()
{ {
if (m_deviceState == QAudio::ActiveState || m_deviceState == QAudio::IdleState) { if (m_deviceState == QAudio::ActiveState || m_deviceState == QAudio::IdleState) {
setError(QAudio::NoError);
setState(QAudio::SuspendedState);
m_tickTimer->stop(); m_tickTimer->stop();
m_deviceState = QAudio::SuspendedState;
m_errorState = QAudio::NoError;
emit stateChanged(m_deviceState);
QPulseAudioEngine *pulseEngine = QPulseAudioEngine::instance(); QPulseAudioEngine *pulseEngine = QPulseAudioEngine::instance();
pa_operation *operation; pa_operation *operation;
pa_threaded_mainloop_lock(pulseEngine->mainloop()); pulseEngine->lock();
operation = pa_stream_cork(m_stream, 1, outputStreamSuccessCallback, NULL); operation = pa_stream_cork(m_stream, 1, outputStreamSuccessCallback, NULL);
pulseEngine->wait(operation);
while (pa_operation_get_state(operation) == PA_OPERATION_RUNNING)
pa_threaded_mainloop_wait(pulseEngine->mainloop());
pa_operation_unref(operation); pa_operation_unref(operation);
pa_threaded_mainloop_unlock(pulseEngine->mainloop()); pulseEngine->unlock();
} }
} }
@@ -601,8 +617,8 @@ qint64 OutputPrivate::writeData(const char *data, qint64 len)
int retry = 0; int retry = 0;
qint64 written = 0; qint64 written = 0;
if ((m_audioDevice->m_deviceState == QAudio::ActiveState) if ((m_audioDevice->m_deviceState == QAudio::ActiveState
||(m_audioDevice->m_deviceState == QAudio::IdleState)) { || m_audioDevice->m_deviceState == QAudio::IdleState)) {
while(written < len) { while(written < len) {
int chunk = m_audioDevice->write(data+written, (len-written)); int chunk = m_audioDevice->write(data+written, (len-written));
if (chunk <= 0) if (chunk <= 0)
@@ -623,7 +639,7 @@ void QPulseAudioOutput::setVolume(qreal vol)
m_volume = vol; m_volume = vol;
if (m_opened) { if (m_opened) {
QPulseAudioEngine *pulseEngine = QPulseAudioEngine::instance(); QPulseAudioEngine *pulseEngine = QPulseAudioEngine::instance();
pa_threaded_mainloop_lock(pulseEngine->mainloop()); pulseEngine->lock();
pa_volume_t paVolume; pa_volume_t paVolume;
if (qFuzzyCompare(vol, 0.0)) { if (qFuzzyCompare(vol, 0.0)) {
pa_cvolume_mute(&m_chVolume, m_spec.channels); pa_cvolume_mute(&m_chVolume, m_spec.channels);
@@ -641,7 +657,7 @@ void QPulseAudioOutput::setVolume(qreal vol)
qWarning()<<"QAudioOutput: Failed to set volume"; qWarning()<<"QAudioOutput: Failed to set volume";
else else
pa_operation_unref(op); pa_operation_unref(op);
pa_threaded_mainloop_unlock(pulseEngine->mainloop()); pulseEngine->unlock();
} }
} }
} }
@@ -664,6 +680,14 @@ QString QPulseAudioOutput::category() const
return m_category; return m_category;
} }
void QPulseAudioOutput::onPulseContextFailed()
{
close();
setError(QAudio::FatalError);
setState(QAudio::StoppedState);
}
QT_END_NAMESPACE QT_END_NAMESPACE
#include "moc_qaudiooutput_pulse.cpp" #include "moc_qaudiooutput_pulse.cpp"

View File

@@ -1,6 +1,6 @@
/**************************************************************************** /****************************************************************************
** **
** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies). ** Copyright (C) 2014 Digia Plc and/or its subsidiary(-ies).
** Contact: http://www.qt-project.org/legal ** Contact: http://www.qt-project.org/legal
** **
** This file is part of the Qt Toolkit. ** This file is part of the Qt Toolkit.
@@ -105,12 +105,16 @@ public:
void streamUnderflowCallback(); void streamUnderflowCallback();
private: private:
void setState(QAudio::State state);
void setError(QAudio::Error error);
bool open(); bool open();
void close(); void close();
qint64 write(const char *data, qint64 len); qint64 write(const char *data, qint64 len);
private Q_SLOTS: private Q_SLOTS:
void userFeed(); void userFeed();
void onPulseContextFailed();
private: private:
QByteArray m_device; QByteArray m_device;

View File

@@ -1,6 +1,6 @@
/**************************************************************************** /****************************************************************************
** **
** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies). ** Copyright (C) 2014 Digia Plc and/or its subsidiary(-ies).
** Contact: http://www.qt-project.org/legal ** Contact: http://www.qt-project.org/legal
** **
** This file is part of the Qt Toolkit. ** This file is part of the Qt Toolkit.
@@ -170,15 +170,17 @@ static void contextStateCallbackInit(pa_context *context, void *userdata)
pa_threaded_mainloop_signal(pulseEngine->mainloop(), 0); pa_threaded_mainloop_signal(pulseEngine->mainloop(), 0);
} }
static void contextStateCallback(pa_context *context, void *userdata) static void contextStateCallback(pa_context *c, void *userdata)
{ {
Q_UNUSED(userdata); QPulseAudioEngine *self = reinterpret_cast<QPulseAudioEngine*>(userdata);
Q_UNUSED(context); pa_context_state_t state = pa_context_get_state(c);
#ifdef DEBUG_PULSE #ifdef DEBUG_PULSE
pa_context_state_t state = pa_context_get_state(context);
qDebug() << QPulseAudioInternal::stateToQString(state); qDebug() << QPulseAudioInternal::stateToQString(state);
#endif #endif
if (state == PA_CONTEXT_FAILED)
QMetaObject::invokeMethod(self, "onContextFailed", Qt::QueuedConnection);
} }
Q_GLOBAL_STATIC(QPulseAudioEngine, pulseEngine); Q_GLOBAL_STATIC(QPulseAudioEngine, pulseEngine);
@@ -187,40 +189,59 @@ QPulseAudioEngine::QPulseAudioEngine(QObject *parent)
: QObject(parent) : QObject(parent)
, m_mainLoopApi(0) , m_mainLoopApi(0)
, m_context(0) , m_context(0)
, m_prepared(false)
{
prepare();
}
QPulseAudioEngine::~QPulseAudioEngine()
{
if (m_prepared)
release();
}
void QPulseAudioEngine::prepare()
{ {
bool keepGoing = true; bool keepGoing = true;
bool ok = true; bool ok = true;
m_mainLoop = pa_threaded_mainloop_new(); m_mainLoop = pa_threaded_mainloop_new();
if (m_mainLoop == 0) { if (m_mainLoop == 0) {
qWarning("Unable to create pulseaudio mainloop"); qWarning("PulseAudioService: unable to create pulseaudio mainloop");
return; return;
} }
if (pa_threaded_mainloop_start(m_mainLoop) != 0) { if (pa_threaded_mainloop_start(m_mainLoop) != 0) {
qWarning("Unable to start pulseaudio mainloop"); qWarning("PulseAudioService: unable to start pulseaudio mainloop");
pa_threaded_mainloop_free(m_mainLoop); pa_threaded_mainloop_free(m_mainLoop);
m_mainLoop = 0;
return; return;
} }
m_mainLoopApi = pa_threaded_mainloop_get_api(m_mainLoop); m_mainLoopApi = pa_threaded_mainloop_get_api(m_mainLoop);
pa_threaded_mainloop_lock(m_mainLoop); lock();
m_context = pa_context_new(m_mainLoopApi, QString(QLatin1String("QtmPulseContext:%1")).arg(::getpid()).toLatin1().constData()); m_context = pa_context_new(m_mainLoopApi, QString(QLatin1String("QtPulseAudio:%1")).arg(::getpid()).toLatin1().constData());
pa_context_set_state_callback(m_context, contextStateCallbackInit, this);
if (!m_context) { if (m_context == 0) {
qWarning("Unable to create new pulseaudio context"); qWarning("PulseAudioService: Unable to create new pulseaudio context");
pa_threaded_mainloop_unlock(m_mainLoop);
pa_threaded_mainloop_free(m_mainLoop); pa_threaded_mainloop_free(m_mainLoop);
m_mainLoop = 0;
onContextFailed();
return; return;
} }
if (pa_context_connect(m_context, NULL, (pa_context_flags_t)0, NULL) < 0) { pa_context_set_state_callback(m_context, contextStateCallbackInit, this);
qWarning("Unable to create a connection to the pulseaudio context");
if (pa_context_connect(m_context, 0, (pa_context_flags_t)0, 0) < 0) {
qWarning("PulseAudioService: pa_context_connect() failed");
pa_context_unref(m_context); pa_context_unref(m_context);
pa_threaded_mainloop_unlock(m_mainLoop);
pa_threaded_mainloop_free(m_mainLoop); pa_threaded_mainloop_free(m_mainLoop);
m_mainLoop = 0;
m_context = 0;
return; return;
} }
@@ -241,47 +262,49 @@ QPulseAudioEngine::QPulseAudioEngine(QObject *parent)
break; break;
case PA_CONTEXT_TERMINATED: case PA_CONTEXT_TERMINATED:
qCritical("Context terminated."); qCritical("PulseAudioService: Context terminated.");
keepGoing = false; keepGoing = false;
ok = false; ok = false;
break; break;
case PA_CONTEXT_FAILED: case PA_CONTEXT_FAILED:
default: default:
qCritical() << QString("Connection failure: %1").arg(pa_strerror(pa_context_errno(m_context))); qCritical() << QString("PulseAudioService: Connection failure: %1").arg(pa_strerror(pa_context_errno(m_context)));
keepGoing = false; keepGoing = false;
ok = false; ok = false;
} }
if (keepGoing) { if (keepGoing)
pa_threaded_mainloop_wait(m_mainLoop); pa_threaded_mainloop_wait(m_mainLoop);
}
} }
if (ok) { if (ok) {
pa_context_set_state_callback(m_context, contextStateCallback, this); pa_context_set_state_callback(m_context, contextStateCallback, this);
} else { } else {
if (m_context) { pa_context_unref(m_context);
pa_context_unref(m_context); m_context = 0;
m_context = 0;
}
} }
pa_threaded_mainloop_unlock(m_mainLoop); unlock();
if (ok) { if (ok) {
serverInfo(); updateDevices();
sinks(); m_prepared = true;
sources(); } else {
pa_threaded_mainloop_free(m_mainLoop);
m_mainLoop = 0;
onContextFailed();
} }
} }
QPulseAudioEngine::~QPulseAudioEngine() void QPulseAudioEngine::release()
{ {
if (!m_prepared)
return;
if (m_context) { if (m_context) {
pa_threaded_mainloop_lock(m_mainLoop);
pa_context_disconnect(m_context); pa_context_disconnect(m_context);
pa_threaded_mainloop_unlock(m_mainLoop); pa_context_unref(m_context);
m_context = 0; m_context = 0;
} }
@@ -290,64 +313,54 @@ QPulseAudioEngine::~QPulseAudioEngine()
pa_threaded_mainloop_free(m_mainLoop); pa_threaded_mainloop_free(m_mainLoop);
m_mainLoop = 0; m_mainLoop = 0;
} }
m_prepared = false;
} }
void QPulseAudioEngine::serverInfo() void QPulseAudioEngine::updateDevices()
{ {
pa_operation *operation; lock();
pa_threaded_mainloop_lock(m_mainLoop);
operation = pa_context_get_server_info(m_context, serverInfoCallback, this);
// Get default input and output devices
pa_operation *operation = pa_context_get_server_info(m_context, serverInfoCallback, this);
while (pa_operation_get_state(operation) == PA_OPERATION_RUNNING) while (pa_operation_get_state(operation) == PA_OPERATION_RUNNING)
pa_threaded_mainloop_wait(m_mainLoop); pa_threaded_mainloop_wait(m_mainLoop);
pa_operation_unref(operation); pa_operation_unref(operation);
pa_threaded_mainloop_unlock(m_mainLoop); // Get output devices
}
void QPulseAudioEngine::sinks()
{
pa_operation *operation;
pa_threaded_mainloop_lock(m_mainLoop);
operation = pa_context_get_sink_info_list(m_context, sinkInfoCallback, this); operation = pa_context_get_sink_info_list(m_context, sinkInfoCallback, this);
while (pa_operation_get_state(operation) == PA_OPERATION_RUNNING) while (pa_operation_get_state(operation) == PA_OPERATION_RUNNING)
pa_threaded_mainloop_wait(m_mainLoop); pa_threaded_mainloop_wait(m_mainLoop);
pa_operation_unref(operation); pa_operation_unref(operation);
pa_threaded_mainloop_unlock(m_mainLoop); // Get input devices
operation = pa_context_get_source_info_list(m_context, sourceInfoCallback, this);
while (pa_operation_get_state(operation) == PA_OPERATION_RUNNING)
pa_threaded_mainloop_wait(m_mainLoop);
pa_operation_unref(operation);
// Swap the default sink to index 0 unlock();
// Swap the default output to index 0
m_sinks.removeOne(m_defaultSink); m_sinks.removeOne(m_defaultSink);
m_sinks.prepend(m_defaultSink); m_sinks.prepend(m_defaultSink);
}
void QPulseAudioEngine::sources() // Swap the default input to index 0
{
pa_operation *operation;
pa_threaded_mainloop_lock(m_mainLoop);
operation = pa_context_get_source_info_list(m_context, sourceInfoCallback, this);
while (pa_operation_get_state(operation) == PA_OPERATION_RUNNING)
pa_threaded_mainloop_wait(m_mainLoop);
pa_operation_unref(operation);
pa_threaded_mainloop_unlock(m_mainLoop);
// Swap the default source to index 0
m_sources.removeOne(m_defaultSource); m_sources.removeOne(m_defaultSource);
m_sources.prepend(m_defaultSource); m_sources.prepend(m_defaultSource);
} }
void QPulseAudioEngine::onContextFailed()
{
// Give a chance to the connected slots to still use the Pulse main loop before releasing it.
emit contextFailed();
release();
// Try to reconnect later
QTimer::singleShot(3000, this, SLOT(prepare()));
}
QPulseAudioEngine *QPulseAudioEngine::instance() QPulseAudioEngine *QPulseAudioEngine::instance()
{ {
return pulseEngine(); return pulseEngine();

View File

@@ -1,6 +1,6 @@
/**************************************************************************** /****************************************************************************
** **
** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies). ** Copyright (C) 2014 Digia Plc and/or its subsidiary(-ies).
** Contact: http://www.qt-project.org/legal ** Contact: http://www.qt-project.org/legal
** **
** This file is part of the Qt Toolkit. ** This file is part of the Qt Toolkit.
@@ -74,12 +74,36 @@ public:
pa_threaded_mainloop *mainloop() { return m_mainLoop; } pa_threaded_mainloop *mainloop() { return m_mainLoop; }
pa_context *context() { return m_context; } pa_context *context() { return m_context; }
inline void lock()
{
if (m_mainLoop)
pa_threaded_mainloop_lock(m_mainLoop);
}
inline void unlock()
{
if (m_mainLoop)
pa_threaded_mainloop_unlock(m_mainLoop);
}
inline void wait(pa_operation *op)
{
while (m_mainLoop && pa_operation_get_state(op) == PA_OPERATION_RUNNING)
pa_threaded_mainloop_wait(m_mainLoop);
}
QList<QByteArray> availableDevices(QAudio::Mode mode) const; QList<QByteArray> availableDevices(QAudio::Mode mode) const;
Q_SIGNALS:
void contextFailed();
private Q_SLOTS:
void prepare();
void onContextFailed();
private: private:
void serverInfo(); void updateDevices();
void sinks(); void release();
void sources();
public: public:
QList<QByteArray> m_sinks; QList<QByteArray> m_sinks;
@@ -93,6 +117,7 @@ private:
pa_mainloop_api *m_mainLoopApi; pa_mainloop_api *m_mainLoopApi;
pa_threaded_mainloop *m_mainLoop; pa_threaded_mainloop *m_mainLoop;
pa_context *m_context; pa_context *m_context;
bool m_prepared;
}; };
QT_END_NAMESPACE QT_END_NAMESPACE