Merge remote-tracking branch 'origin/5.3' into dev
Change-Id: If8e5050901a320f1ff692e842f173c0062bed5fe
This commit is contained in:
@@ -63,6 +63,9 @@ QAndroidCameraExposureControl::QAndroidCameraExposureControl(QAndroidCameraSessi
|
||||
|
||||
bool QAndroidCameraExposureControl::isParameterSupported(ExposureParameter parameter) const
|
||||
{
|
||||
if (!m_session->camera())
|
||||
return false;
|
||||
|
||||
switch (parameter) {
|
||||
case QCameraExposureControl::ISO:
|
||||
return false;
|
||||
@@ -71,7 +74,7 @@ bool QAndroidCameraExposureControl::isParameterSupported(ExposureParameter param
|
||||
case QCameraExposureControl::ShutterSpeed:
|
||||
return false;
|
||||
case QCameraExposureControl::ExposureCompensation:
|
||||
return true;
|
||||
return !m_supportedExposureCompensations.isEmpty();
|
||||
case QCameraExposureControl::FlashPower:
|
||||
return false;
|
||||
case QCameraExposureControl::FlashCompensation:
|
||||
@@ -81,7 +84,7 @@ bool QAndroidCameraExposureControl::isParameterSupported(ExposureParameter param
|
||||
case QCameraExposureControl::SpotMeteringPoint:
|
||||
return false;
|
||||
case QCameraExposureControl::ExposureMode:
|
||||
return true;
|
||||
return !m_supportedExposureModes.isEmpty();
|
||||
case QCameraExposureControl::MeteringMode:
|
||||
return false;
|
||||
default:
|
||||
@@ -127,27 +130,41 @@ QVariant QAndroidCameraExposureControl::actualValue(ExposureParameter parameter)
|
||||
|
||||
bool QAndroidCameraExposureControl::setValue(ExposureParameter parameter, const QVariant& value)
|
||||
{
|
||||
if (!m_session->camera() || !value.isValid())
|
||||
if (!value.isValid())
|
||||
return false;
|
||||
|
||||
if (parameter == QCameraExposureControl::ExposureCompensation) {
|
||||
m_requestedExposureCompensation = value.toReal();
|
||||
qreal expComp = value.toReal();
|
||||
if (!qFuzzyCompare(m_requestedExposureCompensation, expComp)) {
|
||||
m_requestedExposureCompensation = expComp;
|
||||
emit requestedValueChanged(QCameraExposureControl::ExposureCompensation);
|
||||
}
|
||||
|
||||
if (!m_session->camera())
|
||||
return true;
|
||||
|
||||
int expCompIndex = qRound(m_requestedExposureCompensation / m_exposureCompensationStep);
|
||||
if (expCompIndex >= m_minExposureCompensationIndex
|
||||
&& expCompIndex <= m_maxExposureCompensationIndex) {
|
||||
qreal comp = expCompIndex * m_exposureCompensationStep;
|
||||
m_session->camera()->setExposureCompensation(expCompIndex);
|
||||
|
||||
if (!qFuzzyCompare(m_actualExposureCompensation, comp)) {
|
||||
m_actualExposureCompensation = expCompIndex * m_exposureCompensationStep;
|
||||
emit actualValueChanged(QCameraExposureControl::ExposureCompensation);
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
} else if (parameter == QCameraExposureControl::ExposureMode) {
|
||||
m_requestedExposureMode = value.value<QCameraExposure::ExposureMode>();
|
||||
QCameraExposure::ExposureMode expMode = value.value<QCameraExposure::ExposureMode>();
|
||||
if (m_requestedExposureMode != expMode) {
|
||||
m_requestedExposureMode = expMode;
|
||||
emit requestedValueChanged(QCameraExposureControl::ExposureMode);
|
||||
}
|
||||
|
||||
if (!m_session->camera())
|
||||
return true;
|
||||
|
||||
if (!m_supportedExposureModes.isEmpty()) {
|
||||
m_actualExposureMode = m_requestedExposureMode;
|
||||
@@ -190,22 +207,19 @@ bool QAndroidCameraExposureControl::setValue(ExposureParameter parameter, const
|
||||
|
||||
void QAndroidCameraExposureControl::onCameraOpened()
|
||||
{
|
||||
m_requestedExposureCompensation = m_actualExposureCompensation = 0.0;
|
||||
m_requestedExposureMode = m_actualExposureMode = QCameraExposure::ExposureAuto;
|
||||
emit requestedValueChanged(QCameraExposureControl::ExposureCompensation);
|
||||
emit actualValueChanged(QCameraExposureControl::ExposureCompensation);
|
||||
emit requestedValueChanged(QCameraExposureControl::ExposureMode);
|
||||
emit actualValueChanged(QCameraExposureControl::ExposureMode);
|
||||
|
||||
m_supportedExposureCompensations.clear();
|
||||
m_minExposureCompensationIndex = m_session->camera()->getMinExposureCompensation();
|
||||
m_maxExposureCompensationIndex = m_session->camera()->getMaxExposureCompensation();
|
||||
m_exposureCompensationStep = m_session->camera()->getExposureCompensationStep();
|
||||
if (m_minExposureCompensationIndex != 0 || m_maxExposureCompensationIndex != 0) {
|
||||
for (int i = m_minExposureCompensationIndex; i <= m_maxExposureCompensationIndex; ++i)
|
||||
m_supportedExposureCompensations.append(i * m_exposureCompensationStep);
|
||||
emit parameterRangeChanged(QCameraExposureControl::ExposureCompensation);
|
||||
}
|
||||
|
||||
m_supportedExposureModes.clear();
|
||||
QStringList sceneModes = m_session->camera()->getSupportedSceneModes();
|
||||
if (!sceneModes.isEmpty()) {
|
||||
for (int i = 0; i < sceneModes.size(); ++i) {
|
||||
const QString &sceneMode = sceneModes.at(i);
|
||||
if (sceneMode == QLatin1String("auto"))
|
||||
@@ -224,4 +238,8 @@ void QAndroidCameraExposureControl::onCameraOpened()
|
||||
emit parameterRangeChanged(QCameraExposureControl::ExposureMode);
|
||||
}
|
||||
|
||||
setValue(QCameraExposureControl::ExposureCompensation, QVariant::fromValue(m_requestedExposureCompensation));
|
||||
setValue(QCameraExposureControl::ExposureMode, QVariant::fromValue(m_requestedExposureMode));
|
||||
}
|
||||
|
||||
QT_END_NAMESPACE
|
||||
|
||||
@@ -62,7 +62,12 @@ QCameraExposure::FlashModes QAndroidCameraFlashControl::flashMode() const
|
||||
|
||||
void QAndroidCameraFlashControl::setFlashMode(QCameraExposure::FlashModes mode)
|
||||
{
|
||||
if (m_flashMode == mode || !m_session->camera() || !isFlashModeSupported(mode))
|
||||
if (!m_session->camera()) {
|
||||
m_flashMode = mode;
|
||||
return;
|
||||
}
|
||||
|
||||
if (!isFlashModeSupported(mode))
|
||||
return;
|
||||
|
||||
// if torch was enabled, it first needs to be turned off before setting another mode
|
||||
@@ -88,7 +93,7 @@ void QAndroidCameraFlashControl::setFlashMode(QCameraExposure::FlashModes mode)
|
||||
|
||||
bool QAndroidCameraFlashControl::isFlashModeSupported(QCameraExposure::FlashModes mode) const
|
||||
{
|
||||
return m_supportedFlashModes.contains(mode);
|
||||
return m_session->camera() ? m_supportedFlashModes.contains(mode) : false;
|
||||
}
|
||||
|
||||
bool QAndroidCameraFlashControl::isFlashReady() const
|
||||
@@ -115,6 +120,11 @@ void QAndroidCameraFlashControl::onCameraOpened()
|
||||
else if (flashMode == QLatin1String("torch"))
|
||||
m_supportedFlashModes << QCameraExposure::FlashVideoLight;
|
||||
}
|
||||
|
||||
if (!m_supportedFlashModes.contains(m_flashMode))
|
||||
m_flashMode = QCameraExposure::FlashOff;
|
||||
|
||||
setFlashMode(m_flashMode);
|
||||
}
|
||||
|
||||
QT_END_NAMESPACE
|
||||
|
||||
@@ -80,9 +80,12 @@ QCameraFocus::FocusModes QAndroidCameraFocusControl::focusMode() const
|
||||
|
||||
void QAndroidCameraFocusControl::setFocusMode(QCameraFocus::FocusModes mode)
|
||||
{
|
||||
if (m_focusMode == mode || !m_session->camera() || !isFocusModeSupported(mode))
|
||||
if (!m_session->camera()) {
|
||||
setFocusModeHelper(mode);
|
||||
return;
|
||||
}
|
||||
|
||||
if (isFocusModeSupported(mode)) {
|
||||
QString focusMode = QLatin1String("fixed");
|
||||
|
||||
if (mode.testFlag(QCameraFocus::HyperfocalFocus)) {
|
||||
@@ -109,13 +112,13 @@ void QAndroidCameraFocusControl::setFocusMode(QCameraFocus::FocusModes mode)
|
||||
// reset focus position
|
||||
m_session->camera()->cancelAutoFocus();
|
||||
|
||||
m_focusMode = mode;
|
||||
emit focusModeChanged(m_focusMode);
|
||||
setFocusModeHelper(mode);
|
||||
}
|
||||
}
|
||||
|
||||
bool QAndroidCameraFocusControl::isFocusModeSupported(QCameraFocus::FocusModes mode) const
|
||||
{
|
||||
return m_supportedFocusModes.contains(mode);
|
||||
return m_session->camera() ? m_supportedFocusModes.contains(mode) : false;
|
||||
}
|
||||
|
||||
QCameraFocus::FocusPointMode QAndroidCameraFocusControl::focusPointMode() const
|
||||
@@ -125,11 +128,12 @@ QCameraFocus::FocusPointMode QAndroidCameraFocusControl::focusPointMode() const
|
||||
|
||||
void QAndroidCameraFocusControl::setFocusPointMode(QCameraFocus::FocusPointMode mode)
|
||||
{
|
||||
if (!m_session->camera() || m_focusPointMode == mode || !isFocusPointModeSupported(mode))
|
||||
if (!m_session->camera()) {
|
||||
setFocusPointModeHelper(mode);
|
||||
return;
|
||||
}
|
||||
|
||||
m_focusPointMode = mode;
|
||||
|
||||
if (isFocusPointModeSupported(mode)) {
|
||||
if (mode == QCameraFocus::FocusPointCustom) {
|
||||
m_actualFocusPoint = m_customFocusPoint;
|
||||
} else {
|
||||
@@ -139,15 +143,16 @@ void QAndroidCameraFocusControl::setFocusPointMode(QCameraFocus::FocusPointMode
|
||||
m_actualFocusPoint = QPointF(0.5, 0.5);
|
||||
}
|
||||
|
||||
setFocusPointModeHelper(mode);
|
||||
|
||||
updateFocusZones();
|
||||
setCameraFocusArea();
|
||||
|
||||
emit focusPointModeChanged(mode);
|
||||
}
|
||||
}
|
||||
|
||||
bool QAndroidCameraFocusControl::isFocusPointModeSupported(QCameraFocus::FocusPointMode mode) const
|
||||
{
|
||||
return m_supportedFocusPointModes.contains(mode);
|
||||
return m_session->camera() ? m_supportedFocusPointModes.contains(mode) : false;
|
||||
}
|
||||
|
||||
QPointF QAndroidCameraFocusControl::customFocusPoint() const
|
||||
@@ -157,13 +162,12 @@ QPointF QAndroidCameraFocusControl::customFocusPoint() const
|
||||
|
||||
void QAndroidCameraFocusControl::setCustomFocusPoint(const QPointF &point)
|
||||
{
|
||||
if (m_customFocusPoint == point)
|
||||
return;
|
||||
|
||||
if (m_customFocusPoint != point) {
|
||||
m_customFocusPoint = point;
|
||||
emit customFocusPointChanged(m_customFocusPoint);
|
||||
}
|
||||
|
||||
if (m_focusPointMode == QCameraFocus::FocusPointCustom) {
|
||||
if (m_session->camera() && m_focusPointMode == QCameraFocus::FocusPointCustom) {
|
||||
m_actualFocusPoint = m_customFocusPoint;
|
||||
updateFocusZones();
|
||||
setCameraFocusArea();
|
||||
@@ -187,12 +191,7 @@ void QAndroidCameraFocusControl::onCameraOpened()
|
||||
m_supportedFocusModes.clear();
|
||||
m_continuousPictureFocusSupported = false;
|
||||
m_continuousVideoFocusSupported = false;
|
||||
|
||||
m_focusPointMode = QCameraFocus::FocusPointAuto;
|
||||
m_actualFocusPoint = QPointF(0.5, 0.5);
|
||||
m_customFocusPoint = QPointF();
|
||||
m_supportedFocusPointModes.clear();
|
||||
m_focusZones.clear();
|
||||
|
||||
QStringList focusModes = m_session->camera()->getSupportedFocusModes();
|
||||
for (int i = 0; i < focusModes.size(); ++i) {
|
||||
@@ -220,10 +219,14 @@ void QAndroidCameraFocusControl::onCameraOpened()
|
||||
if (m_session->camera()->getMaxNumFocusAreas() > 0)
|
||||
m_supportedFocusPointModes << QCameraFocus::FocusPointCenter << QCameraFocus::FocusPointCustom;
|
||||
|
||||
emit focusModeChanged(focusMode());
|
||||
emit focusPointModeChanged(m_focusPointMode);
|
||||
emit customFocusPointChanged(m_customFocusPoint);
|
||||
emit focusZonesChanged();
|
||||
if (!m_supportedFocusModes.contains(m_focusMode))
|
||||
setFocusModeHelper(QCameraFocus::AutoFocus);
|
||||
if (!m_supportedFocusPointModes.contains(m_focusPointMode))
|
||||
setFocusPointModeHelper(QCameraFocus::FocusPointAuto);
|
||||
|
||||
setFocusMode(m_focusMode);
|
||||
setCustomFocusPoint(m_customFocusPoint);
|
||||
setFocusPointMode(m_focusPointMode);
|
||||
}
|
||||
|
||||
void QAndroidCameraFocusControl::updateFocusZones(QCameraFocusZone::FocusZoneStatus status)
|
||||
@@ -276,11 +279,12 @@ void QAndroidCameraFocusControl::onViewportSizeChanged()
|
||||
if (!m_focusZones.isEmpty())
|
||||
status = m_focusZones.at(0).status();
|
||||
updateFocusZones(status);
|
||||
setCameraFocusArea();
|
||||
}
|
||||
|
||||
void QAndroidCameraFocusControl::onCameraCaptureModeChanged()
|
||||
{
|
||||
if (m_focusMode == QCameraFocus::ContinuousFocus) {
|
||||
if (m_session->camera() && m_focusMode == QCameraFocus::ContinuousFocus) {
|
||||
QString focusMode;
|
||||
if ((m_session->captureMode().testFlag(QCamera::CaptureVideo) && m_continuousVideoFocusSupported)
|
||||
|| !m_continuousPictureFocusSupported) {
|
||||
|
||||
@@ -72,6 +72,22 @@ private Q_SLOTS:
|
||||
void onAutoFocusComplete(bool success);
|
||||
|
||||
private:
|
||||
inline void setFocusModeHelper(QCameraFocus::FocusModes mode)
|
||||
{
|
||||
if (m_focusMode != mode) {
|
||||
m_focusMode = mode;
|
||||
emit focusModeChanged(mode);
|
||||
}
|
||||
}
|
||||
|
||||
inline void setFocusPointModeHelper(QCameraFocus::FocusPointMode mode)
|
||||
{
|
||||
if (m_focusPointMode != mode) {
|
||||
m_focusPointMode = mode;
|
||||
emit focusPointModeChanged(mode);
|
||||
}
|
||||
}
|
||||
|
||||
void updateFocusZones(QCameraFocusZone::FocusZoneStatus status = QCameraFocusZone::Selected);
|
||||
void setCameraFocusArea();
|
||||
|
||||
|
||||
@@ -49,6 +49,7 @@ QT_BEGIN_NAMESPACE
|
||||
QAndroidCameraImageProcessingControl::QAndroidCameraImageProcessingControl(QAndroidCameraSession *session)
|
||||
: QCameraImageProcessingControl()
|
||||
, m_session(session)
|
||||
, m_whiteBalanceMode(QCameraImageProcessing::WhiteBalanceAuto)
|
||||
{
|
||||
connect(m_session, SIGNAL(opened()),
|
||||
this, SLOT(onCameraOpened()));
|
||||
@@ -56,19 +57,17 @@ QAndroidCameraImageProcessingControl::QAndroidCameraImageProcessingControl(QAndr
|
||||
|
||||
bool QAndroidCameraImageProcessingControl::isParameterSupported(ProcessingParameter parameter) const
|
||||
{
|
||||
return (parameter == QCameraImageProcessingControl::WhiteBalancePreset);
|
||||
return parameter == QCameraImageProcessingControl::WhiteBalancePreset
|
||||
&& m_session->camera()
|
||||
&& !m_supportedWhiteBalanceModes.isEmpty();
|
||||
}
|
||||
|
||||
bool QAndroidCameraImageProcessingControl::isParameterValueSupported(ProcessingParameter parameter,
|
||||
const QVariant &value) const
|
||||
{
|
||||
if (parameter != QCameraImageProcessingControl::WhiteBalancePreset)
|
||||
return false;
|
||||
|
||||
if (!m_session->camera())
|
||||
return false;
|
||||
|
||||
return m_supportedWhiteBalanceModes.contains(value.value<QCameraImageProcessing::WhiteBalanceMode>());
|
||||
return parameter == QCameraImageProcessingControl::WhiteBalancePreset
|
||||
&& m_session->camera()
|
||||
&& m_supportedWhiteBalanceModes.contains(value.value<QCameraImageProcessing::WhiteBalanceMode>());
|
||||
}
|
||||
|
||||
QVariant QAndroidCameraImageProcessingControl::parameter(ProcessingParameter parameter) const
|
||||
@@ -76,13 +75,7 @@ QVariant QAndroidCameraImageProcessingControl::parameter(ProcessingParameter par
|
||||
if (parameter != QCameraImageProcessingControl::WhiteBalancePreset)
|
||||
return QVariant();
|
||||
|
||||
if (!m_session->camera())
|
||||
return QVariant();
|
||||
|
||||
QString wb = m_session->camera()->getWhiteBalance();
|
||||
QCameraImageProcessing::WhiteBalanceMode mode = m_supportedWhiteBalanceModes.key(wb, QCameraImageProcessing::WhiteBalanceAuto);
|
||||
|
||||
return QVariant::fromValue(mode);
|
||||
return QVariant::fromValue(m_whiteBalanceMode);
|
||||
}
|
||||
|
||||
void QAndroidCameraImageProcessingControl::setParameter(ProcessingParameter parameter, const QVariant &value)
|
||||
@@ -90,12 +83,21 @@ void QAndroidCameraImageProcessingControl::setParameter(ProcessingParameter para
|
||||
if (parameter != QCameraImageProcessingControl::WhiteBalancePreset)
|
||||
return;
|
||||
|
||||
if (!m_session->camera())
|
||||
return;
|
||||
QCameraImageProcessing::WhiteBalanceMode mode = value.value<QCameraImageProcessing::WhiteBalanceMode>();
|
||||
|
||||
QString wb = m_supportedWhiteBalanceModes.value(value.value<QCameraImageProcessing::WhiteBalanceMode>(), QString());
|
||||
if (!wb.isEmpty())
|
||||
if (m_session->camera())
|
||||
setWhiteBalanceModeHelper(mode);
|
||||
else
|
||||
m_whiteBalanceMode = mode;
|
||||
}
|
||||
|
||||
void QAndroidCameraImageProcessingControl::setWhiteBalanceModeHelper(QCameraImageProcessing::WhiteBalanceMode mode)
|
||||
{
|
||||
QString wb = m_supportedWhiteBalanceModes.value(mode, QString());
|
||||
if (!wb.isEmpty()) {
|
||||
m_session->camera()->setWhiteBalance(wb);
|
||||
m_whiteBalanceMode = mode;
|
||||
}
|
||||
}
|
||||
|
||||
void QAndroidCameraImageProcessingControl::onCameraOpened()
|
||||
@@ -130,6 +132,11 @@ void QAndroidCameraImageProcessingControl::onCameraOpened()
|
||||
QStringLiteral("warm-fluorescent"));
|
||||
}
|
||||
}
|
||||
|
||||
if (!m_supportedWhiteBalanceModes.contains(m_whiteBalanceMode))
|
||||
m_whiteBalanceMode = QCameraImageProcessing::WhiteBalanceAuto;
|
||||
|
||||
setWhiteBalanceModeHelper(m_whiteBalanceMode);
|
||||
}
|
||||
|
||||
QT_END_NAMESPACE
|
||||
|
||||
@@ -63,9 +63,13 @@ private Q_SLOTS:
|
||||
void onCameraOpened();
|
||||
|
||||
private:
|
||||
void setWhiteBalanceModeHelper(QCameraImageProcessing::WhiteBalanceMode mode);
|
||||
|
||||
QAndroidCameraSession *m_session;
|
||||
|
||||
QHash<QCameraImageProcessing::WhiteBalanceMode, QString> m_supportedWhiteBalanceModes;
|
||||
QCameraImageProcessing::WhiteBalanceMode m_whiteBalanceMode;
|
||||
|
||||
QMap<QCameraImageProcessing::WhiteBalanceMode, QString> m_supportedWhiteBalanceModes;
|
||||
};
|
||||
|
||||
QT_END_NAMESPACE
|
||||
|
||||
@@ -66,13 +66,13 @@ QAndroidCameraLocksControl::QAndroidCameraLocksControl(QAndroidCameraSession *se
|
||||
|
||||
QCamera::LockTypes QAndroidCameraLocksControl::supportedLocks() const
|
||||
{
|
||||
return (QCamera::LockExposure | QCamera::LockWhiteBalance | QCamera::LockFocus);
|
||||
return m_supportedLocks;
|
||||
}
|
||||
|
||||
QCamera::LockStatus QAndroidCameraLocksControl::lockStatus(QCamera::LockType lock) const
|
||||
{
|
||||
if (!m_supportedLocks.testFlag(lock) || !m_session->camera())
|
||||
return QCamera::Locked;
|
||||
return QCamera::Unlocked;
|
||||
|
||||
if (lock == QCamera::LockFocus)
|
||||
return m_focusLockStatus;
|
||||
@@ -83,7 +83,7 @@ QCamera::LockStatus QAndroidCameraLocksControl::lockStatus(QCamera::LockType loc
|
||||
if (lock == QCamera::LockWhiteBalance)
|
||||
return m_whiteBalanceLockStatus;
|
||||
|
||||
return QCamera::Locked;
|
||||
return QCamera::Unlocked;
|
||||
}
|
||||
|
||||
void QAndroidCameraLocksControl::searchAndLock(QCamera::LockTypes locks)
|
||||
|
||||
@@ -331,11 +331,12 @@ bool QAndroidCameraSession::startPreview()
|
||||
if (m_previewStarted)
|
||||
return true;
|
||||
|
||||
if (m_videoOutput->isReady())
|
||||
m_camera->setPreviewTexture(m_videoOutput->surfaceTexture());
|
||||
else
|
||||
if (!m_videoOutput->isReady())
|
||||
return true; // delay starting until the video output is ready
|
||||
|
||||
if (!m_camera->setPreviewTexture(m_videoOutput->surfaceTexture()))
|
||||
return false;
|
||||
|
||||
m_status = QCamera::StartingStatus;
|
||||
emit statusChanged(m_status);
|
||||
|
||||
|
||||
@@ -96,15 +96,12 @@ void QAndroidCameraZoomControl::zoomTo(qreal optical, qreal digital)
|
||||
{
|
||||
Q_UNUSED(optical);
|
||||
|
||||
if (!m_cameraSession->camera() ||
|
||||
qFuzzyCompare(m_requestedZoom, digital) ||
|
||||
qFuzzyCompare(m_maximumZoom, qreal(1))) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!qFuzzyCompare(m_requestedZoom, digital)) {
|
||||
m_requestedZoom = digital;
|
||||
emit requestedDigitalZoomChanged(m_requestedZoom);
|
||||
}
|
||||
|
||||
if (m_cameraSession->camera()) {
|
||||
digital = qBound(qreal(1), digital, m_maximumZoom);
|
||||
int validZoomIndex = qt_findClosestValue(m_zoomRatios, qRound(digital * 100));
|
||||
qreal newZoom = m_zoomRatios.at(validZoomIndex) / qreal(100);
|
||||
@@ -114,14 +111,10 @@ void QAndroidCameraZoomControl::zoomTo(qreal optical, qreal digital)
|
||||
emit currentDigitalZoomChanged(m_currentZoom);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void QAndroidCameraZoomControl::onCameraOpened()
|
||||
{
|
||||
m_requestedZoom = 1.0;
|
||||
m_currentZoom = 1.0;
|
||||
emit requestedDigitalZoomChanged(m_requestedZoom);
|
||||
emit currentDigitalZoomChanged(m_currentZoom);
|
||||
|
||||
if (m_cameraSession->camera()->isZoomSupported()) {
|
||||
m_zoomRatios = m_cameraSession->camera()->getZoomRatios();
|
||||
qreal maxZoom = m_zoomRatios.last() / qreal(100);
|
||||
@@ -129,6 +122,7 @@ void QAndroidCameraZoomControl::onCameraOpened()
|
||||
m_maximumZoom = maxZoom;
|
||||
emit maximumDigitalZoomChanged(m_maximumZoom);
|
||||
}
|
||||
zoomTo(1, m_requestedZoom);
|
||||
} else {
|
||||
m_zoomRatios.clear();
|
||||
if (!qFuzzyCompare(m_maximumZoom, qreal(1))) {
|
||||
|
||||
@@ -56,6 +56,19 @@ static QMutex g_cameraMapMutex;
|
||||
typedef QMap<int, AndroidCamera *> CameraMap;
|
||||
Q_GLOBAL_STATIC(CameraMap, g_cameraMap)
|
||||
|
||||
static inline bool exceptionCheckAndClear(JNIEnv *env)
|
||||
{
|
||||
if (Q_UNLIKELY(env->ExceptionCheck())) {
|
||||
#ifdef QT_DEBUG
|
||||
env->ExceptionDescribe();
|
||||
#endif // QT_DEBUG
|
||||
env->ExceptionClear();
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
static QRect areaToRect(jobject areaObj)
|
||||
{
|
||||
QJNIObjectPrivate area(areaObj);
|
||||
@@ -132,9 +145,9 @@ public:
|
||||
Q_INVOKABLE bool init(int cameraId);
|
||||
|
||||
Q_INVOKABLE void release();
|
||||
Q_INVOKABLE void lock();
|
||||
Q_INVOKABLE void unlock();
|
||||
Q_INVOKABLE void reconnect();
|
||||
Q_INVOKABLE bool lock();
|
||||
Q_INVOKABLE bool unlock();
|
||||
Q_INVOKABLE bool reconnect();
|
||||
|
||||
Q_INVOKABLE AndroidCamera::CameraFacing getFacing();
|
||||
Q_INVOKABLE int getNativeOrientation();
|
||||
@@ -147,7 +160,7 @@ public:
|
||||
|
||||
Q_INVOKABLE QSize previewSize() const { return m_previewSize; }
|
||||
Q_INVOKABLE void updatePreviewSize();
|
||||
Q_INVOKABLE void setPreviewTexture(void *surfaceTexture);
|
||||
Q_INVOKABLE bool setPreviewTexture(void *surfaceTexture);
|
||||
|
||||
Q_INVOKABLE bool isZoomSupported();
|
||||
Q_INVOKABLE int getMaxZoom();
|
||||
@@ -266,7 +279,7 @@ AndroidCamera *AndroidCamera::open(int cameraId)
|
||||
worker->start();
|
||||
d->moveToThread(worker);
|
||||
connect(worker, &QThread::finished, d, &AndroidCameraPrivate::deleteLater);
|
||||
bool ok = false;
|
||||
bool ok = true;
|
||||
QMetaObject::invokeMethod(d, "init", Qt::BlockingQueuedConnection, Q_RETURN_ARG(bool, ok), Q_ARG(int, cameraId));
|
||||
if (!ok) {
|
||||
worker->quit();
|
||||
@@ -289,22 +302,28 @@ int AndroidCamera::cameraId() const
|
||||
return d->m_cameraId;
|
||||
}
|
||||
|
||||
void AndroidCamera::lock()
|
||||
bool AndroidCamera::lock()
|
||||
{
|
||||
Q_D(AndroidCamera);
|
||||
QMetaObject::invokeMethod(d, "lock", Qt::BlockingQueuedConnection);
|
||||
bool ok = true;
|
||||
QMetaObject::invokeMethod(d, "lock", Qt::BlockingQueuedConnection, Q_RETURN_ARG(bool, ok));
|
||||
return ok;
|
||||
}
|
||||
|
||||
void AndroidCamera::unlock()
|
||||
bool AndroidCamera::unlock()
|
||||
{
|
||||
Q_D(AndroidCamera);
|
||||
QMetaObject::invokeMethod(d, "unlock", Qt::BlockingQueuedConnection);
|
||||
bool ok = true;
|
||||
QMetaObject::invokeMethod(d, "unlock", Qt::BlockingQueuedConnection, Q_RETURN_ARG(bool, ok));
|
||||
return ok;
|
||||
}
|
||||
|
||||
void AndroidCamera::reconnect()
|
||||
bool AndroidCamera::reconnect()
|
||||
{
|
||||
Q_D(AndroidCamera);
|
||||
QMetaObject::invokeMethod(d, "reconnect");
|
||||
bool ok = true;
|
||||
QMetaObject::invokeMethod(d, "reconnect", Qt::BlockingQueuedConnection, Q_RETURN_ARG(bool, ok));
|
||||
return ok;
|
||||
}
|
||||
|
||||
void AndroidCamera::release()
|
||||
@@ -368,13 +387,16 @@ void AndroidCamera::setPreviewSize(const QSize &size)
|
||||
QMetaObject::invokeMethod(d, "updatePreviewSize");
|
||||
}
|
||||
|
||||
void AndroidCamera::setPreviewTexture(AndroidSurfaceTexture *surfaceTexture)
|
||||
bool AndroidCamera::setPreviewTexture(AndroidSurfaceTexture *surfaceTexture)
|
||||
{
|
||||
Q_D(AndroidCamera);
|
||||
bool ok = true;
|
||||
QMetaObject::invokeMethod(d,
|
||||
"setPreviewTexture",
|
||||
Qt::BlockingQueuedConnection,
|
||||
Q_RETURN_ARG(bool, ok),
|
||||
Q_ARG(void *, surfaceTexture ? surfaceTexture->surfaceTexture() : 0));
|
||||
return ok;
|
||||
}
|
||||
|
||||
bool AndroidCamera::isZoomSupported()
|
||||
@@ -698,12 +720,12 @@ AndroidCameraPrivate::~AndroidCameraPrivate()
|
||||
bool AndroidCameraPrivate::init(int cameraId)
|
||||
{
|
||||
m_cameraId = cameraId;
|
||||
QJNIEnvironmentPrivate env;
|
||||
m_camera = QJNIObjectPrivate::callStaticObjectMethod("android/hardware/Camera",
|
||||
"open",
|
||||
"(I)Landroid/hardware/Camera;",
|
||||
cameraId);
|
||||
|
||||
if (!m_camera.isValid())
|
||||
if (exceptionCheckAndClear(env) || !m_camera.isValid())
|
||||
return false;
|
||||
|
||||
m_cameraListener = QJNIObjectPrivate(g_qtCameraListenerClass, "(I)V", m_cameraId);
|
||||
@@ -731,26 +753,25 @@ void AndroidCameraPrivate::release()
|
||||
m_camera.callMethod<void>("release");
|
||||
}
|
||||
|
||||
void AndroidCameraPrivate::lock()
|
||||
bool AndroidCameraPrivate::lock()
|
||||
{
|
||||
QJNIEnvironmentPrivate env;
|
||||
m_camera.callMethod<void>("lock");
|
||||
return !exceptionCheckAndClear(env);
|
||||
}
|
||||
|
||||
void AndroidCameraPrivate::unlock()
|
||||
bool AndroidCameraPrivate::unlock()
|
||||
{
|
||||
QJNIEnvironmentPrivate env;
|
||||
m_camera.callMethod<void>("unlock");
|
||||
return !exceptionCheckAndClear(env);
|
||||
}
|
||||
|
||||
void AndroidCameraPrivate::reconnect()
|
||||
bool AndroidCameraPrivate::reconnect()
|
||||
{
|
||||
QJNIEnvironmentPrivate env;
|
||||
m_camera.callMethod<void>("reconnect");
|
||||
if (env->ExceptionCheck()) {
|
||||
#ifdef QT_DEBUG
|
||||
env->ExceptionDescribe();
|
||||
#endif // QT_DEBUG
|
||||
env->ExceptionDescribe();
|
||||
}
|
||||
return !exceptionCheckAndClear(env);
|
||||
}
|
||||
|
||||
AndroidCamera::CameraFacing AndroidCameraPrivate::getFacing()
|
||||
@@ -832,11 +853,13 @@ void AndroidCameraPrivate::updatePreviewSize()
|
||||
emit previewSizeChanged();
|
||||
}
|
||||
|
||||
void AndroidCameraPrivate::setPreviewTexture(void *surfaceTexture)
|
||||
bool AndroidCameraPrivate::setPreviewTexture(void *surfaceTexture)
|
||||
{
|
||||
QJNIEnvironmentPrivate env;
|
||||
m_camera.callMethod<void>("setPreviewTexture",
|
||||
"(Landroid/graphics/SurfaceTexture;)V",
|
||||
static_cast<jobject>(surfaceTexture));
|
||||
return !exceptionCheckAndClear(env);
|
||||
}
|
||||
|
||||
bool AndroidCameraPrivate::isZoomSupported()
|
||||
@@ -1020,8 +1043,7 @@ void AndroidCameraPrivate::setFocusAreas(const QList<QRect> &areas)
|
||||
arrayList.callMethod<jboolean>("add",
|
||||
"(Ljava/lang/Object;)Z",
|
||||
rectToArea(areas.at(i)).object());
|
||||
if (env->ExceptionCheck())
|
||||
env->ExceptionClear();
|
||||
exceptionCheckAndClear(env);
|
||||
}
|
||||
list = arrayList;
|
||||
}
|
||||
@@ -1347,9 +1369,11 @@ void AndroidCameraPrivate::fetchLastPreviewFrame()
|
||||
|
||||
void AndroidCameraPrivate::applyParameters()
|
||||
{
|
||||
QJNIEnvironmentPrivate env;
|
||||
m_camera.callMethod<void>("setParameters",
|
||||
"(Landroid/hardware/Camera$Parameters;)V",
|
||||
m_parameters.object());
|
||||
exceptionCheckAndClear(env);
|
||||
}
|
||||
|
||||
QStringList AndroidCameraPrivate::callParametersStringListMethod(const QByteArray &methodName)
|
||||
@@ -1386,10 +1410,8 @@ static JNINativeMethod methods[] = {
|
||||
bool AndroidCamera::initJNI(JNIEnv *env)
|
||||
{
|
||||
jclass clazz = env->FindClass("org/qtproject/qt5/android/multimedia/QtCameraListener");
|
||||
if (env->ExceptionCheck())
|
||||
env->ExceptionClear();
|
||||
|
||||
if (clazz) {
|
||||
if (!exceptionCheckAndClear(env) && clazz) {
|
||||
g_qtCameraListenerClass = static_cast<jclass>(env->NewGlobalRef(clazz));
|
||||
if (env->RegisterNatives(g_qtCameraListenerClass,
|
||||
methods,
|
||||
|
||||
@@ -90,9 +90,9 @@ public:
|
||||
|
||||
int cameraId() const;
|
||||
|
||||
void lock();
|
||||
void unlock();
|
||||
void reconnect();
|
||||
bool lock();
|
||||
bool unlock();
|
||||
bool reconnect();
|
||||
void release();
|
||||
|
||||
CameraFacing getFacing();
|
||||
@@ -106,7 +106,7 @@ public:
|
||||
|
||||
QSize previewSize() const;
|
||||
void setPreviewSize(const QSize &size);
|
||||
void setPreviewTexture(AndroidSurfaceTexture *surfaceTexture);
|
||||
bool setPreviewTexture(AndroidSurfaceTexture *surfaceTexture);
|
||||
|
||||
bool isZoomSupported();
|
||||
int getMaxZoom();
|
||||
|
||||
@@ -250,10 +250,8 @@ void AVFMediaRecorderControl::setState(QMediaRecorder::State state)
|
||||
|
||||
qDebugCamera() << "Video capture location:" << actualLocation.toString();
|
||||
|
||||
NSString *urlString = [NSString stringWithUTF8String:actualLocation.toString().toUtf8().constData()];
|
||||
NSURL *fileURL = [NSURL URLWithString:urlString];
|
||||
|
||||
[m_movieOutput startRecordingToOutputFileURL:fileURL recordingDelegate:m_recorderDelagate];
|
||||
[m_movieOutput startRecordingToOutputFileURL:actualLocation.toNSURL()
|
||||
recordingDelegate:m_recorderDelagate];
|
||||
|
||||
Q_EMIT actualLocationChanged(actualLocation);
|
||||
} else {
|
||||
|
||||
@@ -216,8 +216,9 @@ CoreAudioSessionManager::CoreAudioSessionManager() :
|
||||
{
|
||||
m_sessionObserver = [[CoreAudioSessionObserver alloc] initWithAudioSessionManager:this];
|
||||
setActive(true);
|
||||
//set default category to just Playback and only switch if we need more permissions
|
||||
setCategory(CoreAudioSessionManager::Playback, CoreAudioSessionManager::MixWithOthers);
|
||||
// Set default category to Ambient (implies MixWithOthers). This makes sure audio stops playing
|
||||
// if the screen is locked or if the Silent switch is toggled.
|
||||
setCategory(CoreAudioSessionManager::Ambient, CoreAudioSessionManager::None);
|
||||
}
|
||||
|
||||
CoreAudioSessionManager::~CoreAudioSessionManager()
|
||||
|
||||
@@ -23,10 +23,5 @@ SOURCES += \
|
||||
$$PWD/dsimagecapturecontrol.cpp \
|
||||
$$PWD/dscamerasession.cpp
|
||||
|
||||
qtHaveModule(widgets) {
|
||||
HEADERS += $$PWD/dsvideowidgetcontrol.h
|
||||
SOURCES += $$PWD/dsvideowidgetcontrol.cpp
|
||||
}
|
||||
|
||||
*-msvc*:INCLUDEPATH += $$(DXSDK_DIR)/include
|
||||
LIBS += -lstrmiids -ldmoguids -luuid -lmsdmo -lole32 -loleaut32
|
||||
|
||||
@@ -48,10 +48,13 @@
|
||||
QT_BEGIN_NAMESPACE
|
||||
|
||||
DSCameraControl::DSCameraControl(QObject *parent)
|
||||
:QCameraControl(parent), m_captureMode(QCamera::CaptureStillImage)
|
||||
: QCameraControl(parent)
|
||||
, m_state(QCamera::UnloadedState)
|
||||
, m_captureMode(QCamera::CaptureStillImage)
|
||||
{
|
||||
m_session = qobject_cast<DSCameraSession*>(parent);
|
||||
connect(m_session, SIGNAL(stateChanged(QCamera::State)),this, SIGNAL(stateChanged(QCamera::State)));
|
||||
connect(m_session, SIGNAL(statusChanged(QCamera::Status)),
|
||||
this, SIGNAL(statusChanged(QCamera::Status)));
|
||||
}
|
||||
|
||||
DSCameraControl::~DSCameraControl()
|
||||
@@ -60,15 +63,31 @@ DSCameraControl::~DSCameraControl()
|
||||
|
||||
void DSCameraControl::setState(QCamera::State state)
|
||||
{
|
||||
if (m_state == state)
|
||||
return;
|
||||
|
||||
bool succeeded = false;
|
||||
switch (state) {
|
||||
case QCamera::ActiveState:
|
||||
start();
|
||||
case QCamera::UnloadedState:
|
||||
succeeded = m_session->unload();
|
||||
break;
|
||||
case QCamera::UnloadedState: /* fall through */
|
||||
case QCamera::LoadedState:
|
||||
stop();
|
||||
case QCamera::ActiveState:
|
||||
if (m_state == QCamera::UnloadedState && !m_session->load())
|
||||
return;
|
||||
|
||||
if (state == QCamera::ActiveState)
|
||||
succeeded = m_session->startPreview();
|
||||
else
|
||||
succeeded = m_session->stopPreview();
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
if (succeeded) {
|
||||
m_state = state;
|
||||
emit stateChanged(m_state);
|
||||
}
|
||||
}
|
||||
|
||||
bool DSCameraControl::isCaptureModeSupported(QCamera::CaptureModes mode) const
|
||||
@@ -85,19 +104,17 @@ bool DSCameraControl::isCaptureModeSupported(QCamera::CaptureModes mode) const
|
||||
return bCaptureSupported;
|
||||
}
|
||||
|
||||
void DSCameraControl::start()
|
||||
void DSCameraControl::setCaptureMode(QCamera::CaptureModes mode)
|
||||
{
|
||||
m_session->record();
|
||||
if (m_captureMode != mode && isCaptureModeSupported(mode)) {
|
||||
m_captureMode = mode;
|
||||
emit captureModeChanged(mode);
|
||||
}
|
||||
}
|
||||
|
||||
void DSCameraControl::stop()
|
||||
QCamera::Status DSCameraControl::status() const
|
||||
{
|
||||
m_session->stop();
|
||||
}
|
||||
|
||||
QCamera::State DSCameraControl::state() const
|
||||
{
|
||||
return (QCamera::State)m_session->state();
|
||||
return m_session->status();
|
||||
}
|
||||
|
||||
QT_END_NAMESPACE
|
||||
|
||||
@@ -58,28 +58,21 @@ public:
|
||||
DSCameraControl(QObject *parent = 0);
|
||||
~DSCameraControl();
|
||||
|
||||
void start();
|
||||
void stop();
|
||||
QCamera::State state() const;
|
||||
QCamera::State state() const { return m_state; }
|
||||
|
||||
QCamera::CaptureModes captureMode() const { return m_captureMode; }
|
||||
void setCaptureMode(QCamera::CaptureModes mode)
|
||||
{
|
||||
if (m_captureMode != mode) {
|
||||
m_captureMode = mode;
|
||||
emit captureModeChanged(mode);
|
||||
}
|
||||
}
|
||||
void setCaptureMode(QCamera::CaptureModes mode);
|
||||
|
||||
void setState(QCamera::State state);
|
||||
|
||||
QCamera::Status status() const { return QCamera::UnavailableStatus; }
|
||||
QCamera::Status status() const;
|
||||
bool isCaptureModeSupported(QCamera::CaptureModes mode) const;
|
||||
bool canChangeProperty(PropertyChangeType /* changeType */, QCamera::Status /* status */) const {return false; }
|
||||
|
||||
private:
|
||||
DSCameraSession *m_session;
|
||||
DSCameraService *m_service;
|
||||
QCamera::State m_state;
|
||||
QCamera::CaptureModes m_captureMode;
|
||||
};
|
||||
|
||||
|
||||
@@ -42,11 +42,6 @@
|
||||
#include <QtCore/qvariant.h>
|
||||
#include <QtCore/qdebug.h>
|
||||
|
||||
#if defined(HAVE_WIDGETS)
|
||||
#include <QtWidgets/qwidget.h>
|
||||
#include <QVideoWidgetControl>
|
||||
#endif
|
||||
|
||||
#include "dscameraservice.h"
|
||||
#include "dscameracontrol.h"
|
||||
#include "dscamerasession.h"
|
||||
@@ -54,28 +49,16 @@
|
||||
#include "dsvideodevicecontrol.h"
|
||||
#include "dsimagecapturecontrol.h"
|
||||
|
||||
#if defined(HAVE_WIDGETS)
|
||||
#include "dsvideowidgetcontrol.h"
|
||||
#endif
|
||||
|
||||
QT_BEGIN_NAMESPACE
|
||||
|
||||
DSCameraService::DSCameraService(QObject *parent):
|
||||
QMediaService(parent)
|
||||
#if defined(HAVE_WIDGETS)
|
||||
, m_viewFinderWidget(0)
|
||||
#endif
|
||||
, m_videoRenderer(0)
|
||||
{
|
||||
m_session = new DSCameraSession(this);
|
||||
|
||||
m_control = new DSCameraControl(m_session);
|
||||
|
||||
m_videoDevice = new DSVideoDeviceControl(m_session);
|
||||
|
||||
m_imageCapture = new DSImageCaptureControl(m_session);
|
||||
|
||||
m_device = QByteArray("default");
|
||||
}
|
||||
|
||||
DSCameraService::~DSCameraService()
|
||||
@@ -84,9 +67,6 @@ DSCameraService::~DSCameraService()
|
||||
delete m_videoDevice;
|
||||
delete m_videoRenderer;
|
||||
delete m_imageCapture;
|
||||
#if defined(HAVE_WIDGETS)
|
||||
delete m_viewFinderWidget;
|
||||
#endif
|
||||
delete m_session;
|
||||
}
|
||||
|
||||
@@ -98,21 +78,8 @@ QMediaControl* DSCameraService::requestControl(const char *name)
|
||||
if (qstrcmp(name, QCameraImageCaptureControl_iid) == 0)
|
||||
return m_imageCapture;
|
||||
|
||||
#if defined(HAVE_WIDGETS)
|
||||
if (qstrcmp(name, QVideoWidgetControl_iid) == 0) {
|
||||
if (!m_viewFinderWidget && !m_videoRenderer) {
|
||||
m_viewFinderWidget = new DSVideoWidgetControl(m_session);
|
||||
return m_viewFinderWidget;
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
if (qstrcmp(name,QVideoRendererControl_iid) == 0) {
|
||||
#if defined(HAVE_WIDGETS)
|
||||
if (!m_videoRenderer && !m_viewFinderWidget) {
|
||||
#else
|
||||
if (!m_videoRenderer) {
|
||||
#endif
|
||||
m_videoRenderer = new DSVideoRendererControl(m_session, this);
|
||||
return m_videoRenderer;
|
||||
}
|
||||
@@ -131,14 +98,6 @@ void DSCameraService::releaseControl(QMediaControl *control)
|
||||
m_videoRenderer = 0;
|
||||
return;
|
||||
}
|
||||
|
||||
#if defined(HAVE_WIDGETS)
|
||||
if (control == m_viewFinderWidget) {
|
||||
delete m_viewFinderWidget;
|
||||
m_viewFinderWidget = 0;
|
||||
return;
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
QT_END_NAMESPACE
|
||||
|
||||
@@ -70,13 +70,9 @@ private:
|
||||
DSCameraControl *m_control;
|
||||
DSCameraSession *m_session;
|
||||
DSVideoOutputControl *m_videoOutput;
|
||||
#if defined(HAVE_WIDGETS)
|
||||
QMediaControl *m_viewFinderWidget;
|
||||
#endif
|
||||
DSVideoDeviceControl *m_videoDevice;
|
||||
QMediaControl *m_videoRenderer;
|
||||
DSImageCaptureControl *m_imageCapture;
|
||||
QByteArray m_device;
|
||||
};
|
||||
|
||||
QT_END_NAMESPACE
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,6 @@
|
||||
/****************************************************************************
|
||||
**
|
||||
** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
|
||||
** Copyright (C) 2014 Digia Plc and/or its subsidiary(-ies).
|
||||
** Contact: http://www.qt-project.org/legal
|
||||
**
|
||||
** This file is part of the Qt Toolkit.
|
||||
@@ -51,6 +51,7 @@
|
||||
#include <QtMultimedia/qvideoframe.h>
|
||||
#include <QtMultimedia/qabstractvideosurface.h>
|
||||
#include <QtMultimedia/qvideosurfaceformat.h>
|
||||
#include <private/qmediastoragelocation_p.h>
|
||||
|
||||
#include <tchar.h>
|
||||
#include <dshow.h>
|
||||
@@ -75,18 +76,8 @@ struct ISampleGrabber;
|
||||
|
||||
QT_BEGIN_NAMESPACE
|
||||
|
||||
class DSVideoRenderer;
|
||||
class SampleGrabberCallbackPrivate;
|
||||
|
||||
|
||||
struct video_buffer {
|
||||
unsigned char* buffer;
|
||||
int length;
|
||||
qint64 time;
|
||||
};
|
||||
|
||||
typedef QMap<unsigned int, QList<QSize> > FormatResolutionMap;
|
||||
|
||||
class DSCameraSession : public QObject
|
||||
{
|
||||
Q_OBJECT
|
||||
@@ -94,113 +85,82 @@ public:
|
||||
DSCameraSession(QObject *parent = 0);
|
||||
~DSCameraSession();
|
||||
|
||||
bool deviceReady();
|
||||
bool pictureInProgress();
|
||||
QCamera::Status status() const { return m_status; }
|
||||
|
||||
// camera controls
|
||||
|
||||
int framerate() const;
|
||||
void setFrameRate(int rate);
|
||||
int brightness() const;
|
||||
void setBrightness(int b);
|
||||
int contrast() const;
|
||||
void setContrast(int c);
|
||||
int saturation() const;
|
||||
void setSaturation(int s);
|
||||
int hue() const;
|
||||
void setHue(int h);
|
||||
int sharpness() const;
|
||||
void setSharpness(int s);
|
||||
int zoom() const;
|
||||
void setZoom(int z);
|
||||
bool backlightCompensation() const;
|
||||
void setBacklightCompensation(bool);
|
||||
int whitelevel() const;
|
||||
void setWhitelevel(int w);
|
||||
int rotation() const;
|
||||
void setRotation(int r);
|
||||
bool flash() const;
|
||||
void setFlash(bool f);
|
||||
bool autofocus() const;
|
||||
void setAutofocus(bool f);
|
||||
|
||||
QSize frameSize() const;
|
||||
void setFrameSize(const QSize& s);
|
||||
void setDevice(const QString &device);
|
||||
QList<QVideoFrame::PixelFormat> supportedPixelFormats();
|
||||
QVideoFrame::PixelFormat pixelFormat() const;
|
||||
void setPixelFormat(QVideoFrame::PixelFormat fmt);
|
||||
QList<QSize> supportedResolutions(QVideoFrame::PixelFormat format);
|
||||
|
||||
// media control
|
||||
bool load();
|
||||
bool unload();
|
||||
bool startPreview();
|
||||
bool stopPreview();
|
||||
|
||||
bool setOutputLocation(const QUrl &sink);
|
||||
QUrl outputLocation() const;
|
||||
qint64 position() const;
|
||||
int state() const;
|
||||
void record();
|
||||
void pause();
|
||||
void stop();
|
||||
bool isReadyForCapture();
|
||||
int captureImage(const QString &fileName);
|
||||
|
||||
void setSurface(QAbstractVideoSurface* surface);
|
||||
|
||||
int captureImage(const QString &fileName);
|
||||
|
||||
AM_MEDIA_TYPE StillMediaType;
|
||||
QList<video_buffer*> frames;
|
||||
SampleGrabberCallbackPrivate* StillCapCB;
|
||||
|
||||
QMutex mutex;
|
||||
|
||||
Q_SIGNALS:
|
||||
void stateChanged(QCamera::State);
|
||||
void statusChanged(QCamera::Status);
|
||||
void imageExposed(int id);
|
||||
void imageCaptured(int id, const QImage &preview);
|
||||
void imageSaved(int id, const QString &fileName);
|
||||
void readyForCaptureChanged(bool);
|
||||
void captureError(int id, int error, const QString &errorString);
|
||||
|
||||
private Q_SLOTS:
|
||||
void captureFrame();
|
||||
void presentFrame();
|
||||
void updateReadyForCapture();
|
||||
|
||||
private:
|
||||
QVideoSurfaceFormat actualFormat;
|
||||
QList<QVideoFrame::PixelFormat> types;
|
||||
void setStatus(QCamera::Status status);
|
||||
void populateCommonResolutions();
|
||||
|
||||
QTime timeStamp;
|
||||
bool graph;
|
||||
bool active;
|
||||
bool opened;
|
||||
bool available;
|
||||
QCamera::State m_state;
|
||||
QByteArray m_device;
|
||||
QUrl m_sink;
|
||||
DSVideoRenderer* m_output;
|
||||
QAbstractVideoSurface* m_surface;
|
||||
QVideoFrame::PixelFormat pixelF;
|
||||
QSize m_windowSize;
|
||||
FormatResolutionMap resolutions;
|
||||
void onFrameAvailable(const char *frameData, long len);
|
||||
void saveCapturedImage(int id, const QImage &image, const QString &path);
|
||||
|
||||
ICaptureGraphBuilder2* pBuild;
|
||||
IGraphBuilder* pGraph;
|
||||
IBaseFilter* pCap;
|
||||
IBaseFilter* pSG_Filter;
|
||||
ISampleGrabber *pSG;
|
||||
|
||||
|
||||
QString m_snapshot;
|
||||
int m_currentImageId;
|
||||
bool needsHorizontalMirroring;
|
||||
bool needsVerticalMirroring;
|
||||
protected:
|
||||
HRESULT getPin(IBaseFilter *pFilter, PIN_DIRECTION PinDir, IPin **ppPin);
|
||||
bool createFilterGraph();
|
||||
void updateProperties();
|
||||
bool setProperties();
|
||||
bool openStream();
|
||||
void closeStream();
|
||||
bool startStream();
|
||||
void stopStream();
|
||||
void suspendStream();
|
||||
void resumeStream();
|
||||
bool connectGraph();
|
||||
void disconnectGraph();
|
||||
void updateSourceCapabilities();
|
||||
bool configurePreviewFormat();
|
||||
|
||||
QMutex m_presentMutex;
|
||||
QMutex m_captureMutex;
|
||||
|
||||
// Capture Graph
|
||||
ICaptureGraphBuilder2* m_graphBuilder;
|
||||
IGraphBuilder* m_filterGraph;
|
||||
|
||||
// Source (camera)
|
||||
QString m_sourceDeviceName;
|
||||
IBaseFilter* m_sourceFilter;
|
||||
AM_MEDIA_TYPE m_sourcePreferredFormat;
|
||||
QSize m_sourcePreferredResolution;
|
||||
bool m_needsHorizontalMirroring;
|
||||
|
||||
// Preview
|
||||
IBaseFilter *m_previewFilter;
|
||||
ISampleGrabber *m_previewSampleGrabber;
|
||||
IBaseFilter *m_nullRendererFilter;
|
||||
QVideoFrame m_currentFrame;
|
||||
bool m_previewStarted;
|
||||
QAbstractVideoSurface* m_surface;
|
||||
QVideoSurfaceFormat m_previewSurfaceFormat;
|
||||
QVideoFrame::PixelFormat m_previewPixelFormat;
|
||||
QSize m_previewSize;
|
||||
|
||||
// Image capture
|
||||
QString m_imageCaptureFileName;
|
||||
QMediaStorageLocation m_fileNameGenerator;
|
||||
bool m_readyForCapture;
|
||||
int m_imageIdCounter;
|
||||
int m_currentImageId;
|
||||
QVideoFrame m_capturedFrame;
|
||||
|
||||
// Internal state
|
||||
QCamera::Status m_status;
|
||||
|
||||
friend class SampleGrabberCallbackPrivate;
|
||||
};
|
||||
|
||||
QT_END_NAMESPACE
|
||||
|
||||
@@ -46,15 +46,19 @@
|
||||
QT_BEGIN_NAMESPACE
|
||||
|
||||
DSImageCaptureControl::DSImageCaptureControl(DSCameraSession *session)
|
||||
:QCameraImageCaptureControl(session), m_session(session), m_ready(false)
|
||||
: QCameraImageCaptureControl(session)
|
||||
, m_session(session)
|
||||
{
|
||||
connect(m_session, SIGNAL(stateChanged(QCamera::State)), SLOT(updateState()));
|
||||
connect(m_session, SIGNAL(imageExposed(int)),
|
||||
this, SIGNAL(imageExposed(int)));
|
||||
connect(m_session, SIGNAL(imageCaptured(int,QImage)),
|
||||
this, SIGNAL(imageCaptured(int,QImage)));
|
||||
connect(m_session, SIGNAL(imageSaved(int,QString)),
|
||||
this, SIGNAL(imageSaved(int,QString)));
|
||||
connect(m_session, SIGNAL(readyForCaptureChanged(bool)),
|
||||
this, SIGNAL(readyForCaptureChanged(bool)));
|
||||
connect(m_session, SIGNAL(captureError(int,int,QString)),
|
||||
this, SIGNAL(error(int,int,QString)));
|
||||
}
|
||||
|
||||
DSImageCaptureControl::~DSImageCaptureControl()
|
||||
@@ -63,7 +67,7 @@ DSImageCaptureControl::~DSImageCaptureControl()
|
||||
|
||||
bool DSImageCaptureControl::isReadyForCapture() const
|
||||
{
|
||||
return m_ready;
|
||||
return m_session->isReadyForCapture();
|
||||
}
|
||||
|
||||
int DSImageCaptureControl::capture(const QString &fileName)
|
||||
@@ -71,12 +75,15 @@ int DSImageCaptureControl::capture(const QString &fileName)
|
||||
return m_session->captureImage(fileName);
|
||||
}
|
||||
|
||||
void DSImageCaptureControl::updateState()
|
||||
QCameraImageCapture::DriveMode DSImageCaptureControl::driveMode() const
|
||||
{
|
||||
bool ready = (m_session->state() == QCamera::ActiveState) &&
|
||||
!m_session->pictureInProgress();
|
||||
if(m_ready != ready)
|
||||
emit readyForCaptureChanged(m_ready = ready);
|
||||
return QCameraImageCapture::SingleImageCapture;
|
||||
}
|
||||
|
||||
void DSImageCaptureControl::setDriveMode(QCameraImageCapture::DriveMode mode)
|
||||
{
|
||||
if (mode != QCameraImageCapture::SingleImageCapture)
|
||||
qWarning("Drive mode not supported.");
|
||||
}
|
||||
|
||||
QT_END_NAMESPACE
|
||||
|
||||
@@ -52,23 +52,18 @@ class DSImageCaptureControl : public QCameraImageCaptureControl
|
||||
Q_OBJECT
|
||||
public:
|
||||
DSImageCaptureControl(DSCameraSession *session);
|
||||
virtual ~DSImageCaptureControl();
|
||||
~DSImageCaptureControl();
|
||||
|
||||
bool isReadyForCapture() const;
|
||||
int capture(const QString &fileName);
|
||||
|
||||
virtual QCameraImageCapture::DriveMode driveMode() const { return QCameraImageCapture::SingleImageCapture; }
|
||||
virtual void setDriveMode(QCameraImageCapture::DriveMode mode) { Q_UNUSED(mode) }
|
||||
|
||||
virtual void cancelCapture() {}
|
||||
|
||||
private slots:
|
||||
void updateState();
|
||||
QCameraImageCapture::DriveMode driveMode() const;
|
||||
void setDriveMode(QCameraImageCapture::DriveMode mode);
|
||||
|
||||
void cancelCapture() {}
|
||||
|
||||
private:
|
||||
DSCameraSession *m_session;
|
||||
bool m_ready;
|
||||
};
|
||||
|
||||
QT_END_NAMESPACE
|
||||
|
||||
@@ -102,7 +102,6 @@ void DSVideoDeviceControl::enumerateDevices(QList<QByteArray> *devices, QStringL
|
||||
devices->clear();
|
||||
descriptions->clear();
|
||||
|
||||
CoInitialize(NULL);
|
||||
ICreateDevEnum* pDevEnum = NULL;
|
||||
IEnumMoniker* pEnum = NULL;
|
||||
// Create the System device enumerator
|
||||
@@ -148,7 +147,6 @@ void DSVideoDeviceControl::enumerateDevices(QList<QByteArray> *devices, QStringL
|
||||
}
|
||||
pDevEnum->Release();
|
||||
}
|
||||
CoUninitialize();
|
||||
}
|
||||
|
||||
void DSVideoDeviceControl::setSelectedDevice(int index)
|
||||
|
||||
@@ -1,253 +0,0 @@
|
||||
/****************************************************************************
|
||||
**
|
||||
** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
|
||||
** Contact: http://www.qt-project.org/legal
|
||||
**
|
||||
** This file is part of the Qt Toolkit.
|
||||
**
|
||||
** $QT_BEGIN_LICENSE:LGPL$
|
||||
** Commercial License Usage
|
||||
** Licensees holding valid commercial Qt licenses may use this file in
|
||||
** accordance with the commercial license agreement provided with the
|
||||
** Software or, alternatively, in accordance with the terms contained in
|
||||
** a written agreement between you and Digia. For licensing terms and
|
||||
** conditions see http://qt.digia.com/licensing. For further information
|
||||
** use the contact form at http://qt.digia.com/contact-us.
|
||||
**
|
||||
** GNU Lesser General Public License Usage
|
||||
** Alternatively, this file may be used under the terms of the GNU Lesser
|
||||
** General Public License version 2.1 as published by the Free Software
|
||||
** Foundation and appearing in the file LICENSE.LGPL included in the
|
||||
** packaging of this file. Please review the following information to
|
||||
** ensure the GNU Lesser General Public License version 2.1 requirements
|
||||
** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
|
||||
**
|
||||
** In addition, as a special exception, Digia gives you certain additional
|
||||
** rights. These rights are described in the Digia Qt LGPL Exception
|
||||
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
|
||||
**
|
||||
** GNU General Public License Usage
|
||||
** Alternatively, this file may be used under the terms of the GNU
|
||||
** General Public License version 3.0 as published by the Free Software
|
||||
** Foundation and appearing in the file LICENSE.GPL included in the
|
||||
** packaging of this file. Please review the following information to
|
||||
** ensure the GNU General Public License version 3.0 requirements will be
|
||||
** met: http://www.gnu.org/copyleft/gpl.html.
|
||||
**
|
||||
**
|
||||
** $QT_END_LICENSE$
|
||||
**
|
||||
****************************************************************************/
|
||||
|
||||
#include <QtCore/qcoreevent.h>
|
||||
#include <QtCore/qtimer.h>
|
||||
|
||||
#include "dsvideowidgetcontrol.h"
|
||||
#include "dscamerasession.h"
|
||||
|
||||
QT_BEGIN_NAMESPACE
|
||||
|
||||
DSVideoWidgetSurface::DSVideoWidgetSurface(QLabel *pWidget, QObject *parent)
|
||||
: QAbstractVideoSurface(parent)
|
||||
{
|
||||
widget = pWidget;
|
||||
myPixmap = 0;
|
||||
}
|
||||
|
||||
QList<QVideoFrame::PixelFormat> DSVideoWidgetSurface::supportedPixelFormats(
|
||||
QAbstractVideoBuffer::HandleType handleType) const
|
||||
{
|
||||
if (handleType == QAbstractVideoBuffer::NoHandle) {
|
||||
return QList<QVideoFrame::PixelFormat>()
|
||||
<< QVideoFrame::Format_RGB32
|
||||
<< QVideoFrame::Format_RGB24;
|
||||
} else {
|
||||
return QList<QVideoFrame::PixelFormat>();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
bool DSVideoWidgetSurface::present(const QVideoFrame &frame)
|
||||
{
|
||||
QVideoFrame myFrame = frame;
|
||||
myFrame.map(QAbstractVideoBuffer::ReadOnly);
|
||||
QImage image(
|
||||
frame.bits(),
|
||||
frame.width(),
|
||||
frame.height(),
|
||||
frame.bytesPerLine(),
|
||||
imageFormat);
|
||||
if (image.isNull())
|
||||
{
|
||||
// Try to adapt
|
||||
QImage image2(
|
||||
frame.bits(),
|
||||
frame.width(),
|
||||
frame.height(),
|
||||
frame.bytesPerLine(),
|
||||
QImage::Format_RGB888);
|
||||
image = image2;
|
||||
}
|
||||
myFrame.unmap();
|
||||
delete myPixmap;
|
||||
myPixmap = new QPixmap(QPixmap::fromImage(image).scaled(widget->size()));
|
||||
widget->setPixmap(*myPixmap);
|
||||
widget->repaint();
|
||||
return true;
|
||||
}
|
||||
|
||||
void DSVideoWidgetSurface::setImageFormat(QImage::Format fmt)
|
||||
{
|
||||
imageFormat = fmt;
|
||||
}
|
||||
|
||||
void DSVideoWidgetSurface::updateVideoRect()
|
||||
{
|
||||
}
|
||||
|
||||
void DSVideoWidgetSurface::paint(QPainter *painter)
|
||||
{
|
||||
Q_UNUSED(painter)
|
||||
}
|
||||
|
||||
|
||||
DSVideoWidgetControl::DSVideoWidgetControl(DSCameraSession* session, QObject *parent) :
|
||||
QVideoWidgetControl(parent),
|
||||
m_session(session),
|
||||
m_widget(new QLabel()),
|
||||
m_fullScreen(false)
|
||||
{
|
||||
m_widget->setSizePolicy(QSizePolicy::MinimumExpanding, QSizePolicy::MinimumExpanding);
|
||||
m_widget->setAlignment(Qt::AlignCenter);
|
||||
m_widget->setAttribute(Qt::WA_NoSystemBackground, true);
|
||||
|
||||
surface = new DSVideoWidgetSurface(m_widget);
|
||||
|
||||
QPalette palette;
|
||||
palette.setColor(QPalette::Background, Qt::black);
|
||||
m_widget->setPalette(palette);
|
||||
m_widget->setAutoFillBackground( true );
|
||||
|
||||
// Request QEvents
|
||||
m_widget->installEventFilter(this);
|
||||
m_windowId = m_widget->effectiveWinId();
|
||||
|
||||
surface->setImageFormat(QImage::Format_RGB888);
|
||||
session->setSurface(surface);
|
||||
}
|
||||
|
||||
DSVideoWidgetControl::~DSVideoWidgetControl()
|
||||
{
|
||||
delete m_widget;
|
||||
}
|
||||
|
||||
bool DSVideoWidgetControl::eventFilter(QObject *object, QEvent *e)
|
||||
{
|
||||
if (object == m_widget) {
|
||||
switch (e->type()) {
|
||||
case QEvent::ParentChange:
|
||||
case QEvent::WinIdChange:
|
||||
case QEvent::Show:
|
||||
m_windowId = m_widget->effectiveWinId();
|
||||
emit widgetUpdated();
|
||||
break;
|
||||
case QEvent::Resize:
|
||||
emit widgetResized(m_widget->size());
|
||||
break;
|
||||
case QEvent::PolishRequest:
|
||||
m_widget->ensurePolished();
|
||||
break;
|
||||
|
||||
default:
|
||||
// Do nothing
|
||||
break;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
QWidget *DSVideoWidgetControl::videoWidget()
|
||||
{
|
||||
return m_widget;
|
||||
}
|
||||
|
||||
Qt::AspectRatioMode DSVideoWidgetControl::aspectRatioMode() const
|
||||
{
|
||||
return m_aspectRatioMode;
|
||||
}
|
||||
|
||||
void DSVideoWidgetControl::setAspectRatioMode(Qt::AspectRatioMode ratio)
|
||||
{
|
||||
if (m_aspectRatioMode==ratio) {
|
||||
return;
|
||||
}
|
||||
m_aspectRatioMode = ratio;
|
||||
|
||||
if (m_aspectRatioMode == Qt::KeepAspectRatio)
|
||||
m_widget->setScaledContents(false);
|
||||
else {
|
||||
m_widget->setScaledContents(true);
|
||||
}
|
||||
}
|
||||
|
||||
bool DSVideoWidgetControl::isFullScreen() const
|
||||
{
|
||||
return m_fullScreen;
|
||||
}
|
||||
|
||||
void DSVideoWidgetControl::setFullScreen(bool fullScreen)
|
||||
{
|
||||
if (m_widget && !fullScreen && m_fullScreen) {
|
||||
m_widget->showNormal();
|
||||
m_fullScreen = false;
|
||||
} else if (m_widget && fullScreen) {
|
||||
m_widget->showFullScreen();
|
||||
m_fullScreen = true;
|
||||
}
|
||||
|
||||
emit fullScreenChanged(fullScreen);
|
||||
}
|
||||
|
||||
int DSVideoWidgetControl::brightness() const
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
void DSVideoWidgetControl::setBrightness(int brightness)
|
||||
{
|
||||
Q_UNUSED(brightness);
|
||||
}
|
||||
|
||||
int DSVideoWidgetControl::contrast() const
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
void DSVideoWidgetControl::setContrast(int contrast)
|
||||
{
|
||||
Q_UNUSED(contrast);
|
||||
}
|
||||
|
||||
int DSVideoWidgetControl::hue() const
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
void DSVideoWidgetControl::setHue(int hue)
|
||||
{
|
||||
Q_UNUSED(hue);
|
||||
}
|
||||
|
||||
int DSVideoWidgetControl::saturation() const
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
void DSVideoWidgetControl::setSaturation(int saturation)
|
||||
{
|
||||
Q_UNUSED(saturation);
|
||||
}
|
||||
|
||||
QT_END_NAMESPACE
|
||||
|
||||
// End of file
|
||||
@@ -1,150 +0,0 @@
|
||||
/****************************************************************************
|
||||
**
|
||||
** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
|
||||
** Contact: http://www.qt-project.org/legal
|
||||
**
|
||||
** This file is part of the Qt Toolkit.
|
||||
**
|
||||
** $QT_BEGIN_LICENSE:LGPL$
|
||||
** Commercial License Usage
|
||||
** Licensees holding valid commercial Qt licenses may use this file in
|
||||
** accordance with the commercial license agreement provided with the
|
||||
** Software or, alternatively, in accordance with the terms contained in
|
||||
** a written agreement between you and Digia. For licensing terms and
|
||||
** conditions see http://qt.digia.com/licensing. For further information
|
||||
** use the contact form at http://qt.digia.com/contact-us.
|
||||
**
|
||||
** GNU Lesser General Public License Usage
|
||||
** Alternatively, this file may be used under the terms of the GNU Lesser
|
||||
** General Public License version 2.1 as published by the Free Software
|
||||
** Foundation and appearing in the file LICENSE.LGPL included in the
|
||||
** packaging of this file. Please review the following information to
|
||||
** ensure the GNU Lesser General Public License version 2.1 requirements
|
||||
** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
|
||||
**
|
||||
** In addition, as a special exception, Digia gives you certain additional
|
||||
** rights. These rights are described in the Digia Qt LGPL Exception
|
||||
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
|
||||
**
|
||||
** GNU General Public License Usage
|
||||
** Alternatively, this file may be used under the terms of the GNU
|
||||
** General Public License version 3.0 as published by the Free Software
|
||||
** Foundation and appearing in the file LICENSE.GPL included in the
|
||||
** packaging of this file. Please review the following information to
|
||||
** ensure the GNU General Public License version 3.0 requirements will be
|
||||
** met: http://www.gnu.org/copyleft/gpl.html.
|
||||
**
|
||||
**
|
||||
** $QT_END_LICENSE$
|
||||
**
|
||||
****************************************************************************/
|
||||
|
||||
#ifndef DSVIDEOWIDGETCONTROL_H
|
||||
#define DSVIDEOWIDGETCONTROL_H
|
||||
|
||||
#include <QtCore/qobject.h>
|
||||
#include <QtWidgets>
|
||||
#include <QtMultimedia/qvideoframe.h>
|
||||
#include <QtMultimedia/qabstractvideosurface.h>
|
||||
#include <QtMultimedia/qvideosurfaceformat.h>
|
||||
|
||||
#include <qvideowidgetcontrol.h>
|
||||
#include "dscameracontrol.h"
|
||||
|
||||
QT_BEGIN_NAMESPACE
|
||||
|
||||
class DSVideoWidgetSurface : public QAbstractVideoSurface
|
||||
{
|
||||
Q_OBJECT
|
||||
public:
|
||||
DSVideoWidgetSurface(QLabel *pWidget, QObject *parent = 0);
|
||||
|
||||
QList<QVideoFrame::PixelFormat> supportedPixelFormats(
|
||||
QAbstractVideoBuffer::HandleType handleType = QAbstractVideoBuffer::NoHandle) const;
|
||||
|
||||
bool present(const QVideoFrame &frame);
|
||||
|
||||
QRect videoRect() const { return targetRect; }
|
||||
void updateVideoRect();
|
||||
|
||||
void paint(QPainter *painter);
|
||||
void setImageFormat(QImage::Format fmt);
|
||||
|
||||
private:
|
||||
QLabel *widget;
|
||||
QImage::Format imageFormat;
|
||||
QRect targetRect;
|
||||
QSize imageSize;
|
||||
QRect sourceRect;
|
||||
QPixmap* myPixmap;
|
||||
};
|
||||
|
||||
class DSVideoWidgetControl : public QVideoWidgetControl
|
||||
{
|
||||
Q_OBJECT
|
||||
|
||||
DSVideoWidgetSurface* surface;
|
||||
public: // Constructor & Destructor
|
||||
|
||||
DSVideoWidgetControl(DSCameraSession* session, QObject *parent = 0);
|
||||
virtual ~DSVideoWidgetControl();
|
||||
|
||||
public: // QVideoWidgetControl
|
||||
|
||||
QWidget *videoWidget();
|
||||
|
||||
// Aspect Ratio
|
||||
Qt::AspectRatioMode aspectRatioMode() const;
|
||||
void setAspectRatioMode(Qt::AspectRatioMode ratio);
|
||||
|
||||
// Full Screen
|
||||
bool isFullScreen() const;
|
||||
void setFullScreen(bool fullScreen);
|
||||
|
||||
// Brightness
|
||||
int brightness() const;
|
||||
void setBrightness(int brightness);
|
||||
|
||||
// Contrast
|
||||
int contrast() const;
|
||||
void setContrast(int contrast);
|
||||
|
||||
// Hue
|
||||
int hue() const;
|
||||
void setHue(int hue);
|
||||
|
||||
// Saturation
|
||||
int saturation() const;
|
||||
void setSaturation(int saturation);
|
||||
|
||||
public: // Internal
|
||||
|
||||
bool eventFilter(QObject *object, QEvent *event);
|
||||
|
||||
/*
|
||||
Q_SIGNALS: // QVideoWidgetControl
|
||||
|
||||
void fullScreenChanged(bool fullScreen);
|
||||
void brightnessChanged(int brightness);
|
||||
void contrastChanged(int contrast);
|
||||
void hueChanged(int hue);
|
||||
void saturationChanged(int saturation);
|
||||
*/
|
||||
|
||||
Q_SIGNALS: // Internal Signals
|
||||
|
||||
void widgetResized(QSize size);
|
||||
void widgetUpdated();
|
||||
|
||||
private: // Data
|
||||
|
||||
DSCameraSession* m_session;
|
||||
QLabel *m_widget;
|
||||
WId m_windowId;
|
||||
Qt::AspectRatioMode m_aspectRatioMode;
|
||||
bool m_fullScreen;
|
||||
};
|
||||
|
||||
QT_END_NAMESPACE
|
||||
|
||||
#endif // DSVideoWidgetControl_H
|
||||
@@ -4,7 +4,7 @@ PLUGIN_TYPE=mediaservice
|
||||
PLUGIN_CLASS_NAME = DSServicePlugin
|
||||
load(qt_plugin)
|
||||
|
||||
QT += multimedia
|
||||
QT += multimedia-private
|
||||
|
||||
HEADERS += dsserviceplugin.h
|
||||
SOURCES += dsserviceplugin.cpp
|
||||
|
||||
@@ -79,15 +79,32 @@ extern const CLSID CLSID_VideoInputDeviceCategory;
|
||||
|
||||
QT_USE_NAMESPACE
|
||||
|
||||
static int g_refCount = 0;
|
||||
void addRefCount()
|
||||
{
|
||||
if (++g_refCount == 1)
|
||||
CoInitialize(NULL);
|
||||
}
|
||||
|
||||
void releaseRefCount()
|
||||
{
|
||||
if (--g_refCount == 0)
|
||||
CoUninitialize();
|
||||
}
|
||||
|
||||
QMediaService* DSServicePlugin::create(QString const& key)
|
||||
{
|
||||
#ifdef QMEDIA_DIRECTSHOW_CAMERA
|
||||
if (key == QLatin1String(Q_MEDIASERVICE_CAMERA))
|
||||
if (key == QLatin1String(Q_MEDIASERVICE_CAMERA)) {
|
||||
addRefCount();
|
||||
return new DSCameraService;
|
||||
}
|
||||
#endif
|
||||
#ifdef QMEDIA_DIRECTSHOW_PLAYER
|
||||
if (key == QLatin1String(Q_MEDIASERVICE_MEDIAPLAYER))
|
||||
if (key == QLatin1String(Q_MEDIASERVICE_MEDIAPLAYER)) {
|
||||
addRefCount();
|
||||
return new DirectShowPlayerService;
|
||||
}
|
||||
#endif
|
||||
|
||||
return 0;
|
||||
@@ -96,6 +113,7 @@ QMediaService* DSServicePlugin::create(QString const& key)
|
||||
void DSServicePlugin::release(QMediaService *service)
|
||||
{
|
||||
delete service;
|
||||
releaseRefCount();
|
||||
}
|
||||
|
||||
QMediaServiceProviderHint::Features DSServicePlugin::supportedFeatures(
|
||||
@@ -154,6 +172,8 @@ QString DSServicePlugin::deviceDescription(const QByteArray &service, const QByt
|
||||
|
||||
void DSServicePlugin::updateDevices() const
|
||||
{
|
||||
addRefCount();
|
||||
|
||||
m_defaultCameraDevice.clear();
|
||||
DSVideoDeviceControl::enumerateDevices(&m_cameraDevices, &m_cameraDescriptions);
|
||||
|
||||
@@ -162,6 +182,8 @@ void DSServicePlugin::updateDevices() const
|
||||
} else {
|
||||
m_defaultCameraDevice = m_cameraDevices.first();
|
||||
}
|
||||
|
||||
releaseRefCount();
|
||||
}
|
||||
#endif
|
||||
|
||||
|
||||
@@ -111,7 +111,6 @@ DirectShowPlayerService::DirectShowPlayerService(QObject *parent)
|
||||
, m_seekable(false)
|
||||
, m_atEnd(false)
|
||||
{
|
||||
CoInitialize(NULL);
|
||||
m_playerControl = new DirectShowPlayerControl(this);
|
||||
m_metaDataControl = new DirectShowMetaDataControl(this);
|
||||
m_audioEndpointControl = new DirectShowAudioEndpointControl(this);
|
||||
@@ -153,7 +152,6 @@ DirectShowPlayerService::~DirectShowPlayerService()
|
||||
#endif
|
||||
|
||||
::CloseHandle(m_taskHandle);
|
||||
CoUninitialize();
|
||||
}
|
||||
|
||||
QMediaControl *DirectShowPlayerService::requestControl(const char *name)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
/****************************************************************************
|
||||
**
|
||||
** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
|
||||
** Copyright (C) 2014 Digia Plc and/or its subsidiary(-ies).
|
||||
** Contact: http://www.qt-project.org/legal
|
||||
**
|
||||
** This file is part of the Qt Toolkit.
|
||||
@@ -54,10 +54,6 @@ QT_BEGIN_NAMESPACE
|
||||
|
||||
const int PeriodTimeMs = 50;
|
||||
|
||||
// Map from void* (for userdata) to QPulseAudioInput instance
|
||||
// protected by pulse mainloop lock
|
||||
QMap<void *, QPulseAudioInput*> QPulseAudioInput::s_inputsMap;
|
||||
|
||||
static void inputStreamReadCallback(pa_stream *stream, size_t length, void *userdata)
|
||||
{
|
||||
Q_UNUSED(userdata);
|
||||
@@ -136,8 +132,8 @@ void QPulseAudioInput::sourceInfoCallback(pa_context *context, const pa_source_i
|
||||
Q_UNUSED(eol);
|
||||
|
||||
Q_ASSERT(userdata);
|
||||
QPulseAudioInput *that = QPulseAudioInput::s_inputsMap.value(userdata);
|
||||
if (that && i) {
|
||||
if (i) {
|
||||
QPulseAudioInput *that = reinterpret_cast<QPulseAudioInput*>(userdata);
|
||||
that->m_volume = pa_sw_volume_to_linear(pa_cvolume_avg(&i->volume));
|
||||
}
|
||||
}
|
||||
@@ -149,13 +145,12 @@ void QPulseAudioInput::inputVolumeCallback(pa_context *context, int success, voi
|
||||
if (!success)
|
||||
qWarning() << "QAudioInput: failed to set input volume";
|
||||
|
||||
QPulseAudioInput *that = QPulseAudioInput::s_inputsMap.value(userdata);
|
||||
QPulseAudioInput *that = reinterpret_cast<QPulseAudioInput*>(userdata);
|
||||
|
||||
// Regardless of success or failure, we update the volume property
|
||||
if (that && that->m_stream) {
|
||||
if (that->m_stream)
|
||||
pa_context_get_source_info_by_index(context, pa_stream_get_device_index(that->m_stream), sourceInfoCallback, userdata);
|
||||
}
|
||||
}
|
||||
|
||||
QPulseAudioInput::QPulseAudioInput(const QByteArray &device)
|
||||
: m_totalTimeValue(0)
|
||||
@@ -175,31 +170,39 @@ QPulseAudioInput::QPulseAudioInput(const QByteArray &device)
|
||||
{
|
||||
m_timer = new QTimer(this);
|
||||
connect(m_timer, SIGNAL(timeout()), SLOT(userFeed()));
|
||||
|
||||
QPulseAudioEngine *pulseEngine = QPulseAudioEngine::instance();
|
||||
pa_threaded_mainloop_lock(pulseEngine->mainloop());
|
||||
s_inputsMap.insert(this, this);
|
||||
pa_threaded_mainloop_unlock(pulseEngine->mainloop());
|
||||
}
|
||||
|
||||
QPulseAudioInput::~QPulseAudioInput()
|
||||
{
|
||||
QPulseAudioEngine *pulseEngine = QPulseAudioEngine::instance();
|
||||
pa_threaded_mainloop_lock(pulseEngine->mainloop());
|
||||
s_inputsMap.remove(this);
|
||||
pa_threaded_mainloop_unlock(pulseEngine->mainloop());
|
||||
|
||||
close();
|
||||
disconnect(m_timer, SIGNAL(timeout()));
|
||||
QCoreApplication::processEvents();
|
||||
delete m_timer;
|
||||
}
|
||||
|
||||
void QPulseAudioInput::setError(QAudio::Error error)
|
||||
{
|
||||
if (m_errorState == error)
|
||||
return;
|
||||
|
||||
m_errorState = error;
|
||||
emit errorChanged(error);
|
||||
}
|
||||
|
||||
QAudio::Error QPulseAudioInput::error() const
|
||||
{
|
||||
return m_errorState;
|
||||
}
|
||||
|
||||
void QPulseAudioInput::setState(QAudio::State state)
|
||||
{
|
||||
if (m_deviceState == state)
|
||||
return;
|
||||
|
||||
m_deviceState = state;
|
||||
emit stateChanged(state);
|
||||
}
|
||||
|
||||
QAudio::State QPulseAudioInput::state() const
|
||||
{
|
||||
return m_deviceState;
|
||||
@@ -218,41 +221,45 @@ QAudioFormat QPulseAudioInput::format() const
|
||||
|
||||
void QPulseAudioInput::start(QIODevice *device)
|
||||
{
|
||||
if (m_deviceState != QAudio::StoppedState)
|
||||
close();
|
||||
setState(QAudio::StoppedState);
|
||||
setError(QAudio::NoError);
|
||||
|
||||
if (!m_pullMode && m_audioSource)
|
||||
if (!m_pullMode && m_audioSource) {
|
||||
delete m_audioSource;
|
||||
m_audioSource = 0;
|
||||
}
|
||||
|
||||
m_pullMode = true;
|
||||
m_audioSource = device;
|
||||
|
||||
m_deviceState = QAudio::ActiveState;
|
||||
close();
|
||||
|
||||
if (!open())
|
||||
return;
|
||||
|
||||
emit stateChanged(m_deviceState);
|
||||
m_pullMode = true;
|
||||
m_audioSource = device;
|
||||
|
||||
setState(QAudio::ActiveState);
|
||||
}
|
||||
|
||||
QIODevice *QPulseAudioInput::start()
|
||||
{
|
||||
if (m_deviceState != QAudio::StoppedState)
|
||||
setState(QAudio::StoppedState);
|
||||
setError(QAudio::NoError);
|
||||
|
||||
if (!m_pullMode && m_audioSource) {
|
||||
delete m_audioSource;
|
||||
m_audioSource = 0;
|
||||
}
|
||||
|
||||
close();
|
||||
|
||||
if (!m_pullMode && m_audioSource)
|
||||
delete m_audioSource;
|
||||
if (!open())
|
||||
return Q_NULLPTR;
|
||||
|
||||
m_pullMode = false;
|
||||
m_audioSource = new InputPrivate(this);
|
||||
m_audioSource->open(QIODevice::ReadOnly | QIODevice::Unbuffered);
|
||||
|
||||
m_deviceState = QAudio::IdleState;
|
||||
|
||||
if (!open())
|
||||
return 0;
|
||||
|
||||
emit stateChanged(m_deviceState);
|
||||
setState(QAudio::IdleState);
|
||||
|
||||
return m_audioSource;
|
||||
}
|
||||
@@ -262,40 +269,43 @@ void QPulseAudioInput::stop()
|
||||
if (m_deviceState == QAudio::StoppedState)
|
||||
return;
|
||||
|
||||
m_errorState = QAudio::NoError;
|
||||
m_deviceState = QAudio::StoppedState;
|
||||
|
||||
close();
|
||||
emit stateChanged(m_deviceState);
|
||||
|
||||
setError(QAudio::NoError);
|
||||
setState(QAudio::StoppedState);
|
||||
}
|
||||
|
||||
bool QPulseAudioInput::open()
|
||||
{
|
||||
if (m_opened)
|
||||
return true;
|
||||
|
||||
QPulseAudioEngine *pulseEngine = QPulseAudioEngine::instance();
|
||||
|
||||
if (!pulseEngine->context() || pa_context_get_state(pulseEngine->context()) != PA_CONTEXT_READY) {
|
||||
setError(QAudio::FatalError);
|
||||
setState(QAudio::StoppedState);
|
||||
return false;
|
||||
}
|
||||
|
||||
pa_sample_spec spec = QPulseAudioInternal::audioFormatToSampleSpec(m_format);
|
||||
|
||||
if (!pa_sample_spec_valid(&spec)) {
|
||||
setError(QAudio::OpenError);
|
||||
setState(QAudio::StoppedState);
|
||||
return false;
|
||||
}
|
||||
|
||||
m_spec = spec;
|
||||
|
||||
#ifdef DEBUG_PULSE
|
||||
// QTime now(QTime::currentTime());
|
||||
// qDebug()<<now.second()<<"s "<<now.msec()<<"ms :open()";
|
||||
#endif
|
||||
m_clockStamp.restart();
|
||||
m_timeStamp.restart();
|
||||
m_elapsedTimeOffset = 0;
|
||||
|
||||
if (m_streamName.isNull())
|
||||
m_streamName = QString(QLatin1String("QtmPulseStream-%1-%2")).arg(::getpid()).arg(quintptr(this)).toUtf8();
|
||||
|
||||
pa_sample_spec spec = QPulseAudioInternal::audioFormatToSampleSpec(m_format);
|
||||
|
||||
if (!pa_sample_spec_valid(&spec)) {
|
||||
m_errorState = QAudio::OpenError;
|
||||
m_deviceState = QAudio::StoppedState;
|
||||
emit stateChanged(m_deviceState);
|
||||
return false;
|
||||
}
|
||||
|
||||
m_spec = spec;
|
||||
|
||||
#ifdef DEBUG_PULSE
|
||||
qDebug() << "Format: " << QPulseAudioInternal::sampleFormatToQString(spec.format);
|
||||
qDebug() << "Rate: " << spec.rate;
|
||||
@@ -303,15 +313,13 @@ bool QPulseAudioInput::open()
|
||||
qDebug() << "Frame size: " << pa_frame_size(&spec);
|
||||
#endif
|
||||
|
||||
QPulseAudioEngine *pulseEngine = QPulseAudioEngine::instance();
|
||||
pa_threaded_mainloop_lock(pulseEngine->mainloop());
|
||||
pulseEngine->lock();
|
||||
pa_channel_map channel_map;
|
||||
|
||||
pa_channel_map_init_extend(&channel_map, spec.channels, PA_CHANNEL_MAP_DEFAULT);
|
||||
|
||||
if (!pa_channel_map_compatible(&channel_map, &spec)) {
|
||||
if (!pa_channel_map_compatible(&channel_map, &spec))
|
||||
qWarning() << "Channel map doesn't match sample specification!";
|
||||
}
|
||||
|
||||
m_stream = pa_stream_new(pulseEngine->context(), m_streamName.constData(), &spec, &channel_map);
|
||||
|
||||
@@ -338,13 +346,16 @@ bool QPulseAudioInput::open()
|
||||
|
||||
if (pa_stream_connect_record(m_stream, m_device.data(), &buffer_attr, (pa_stream_flags_t)flags) < 0) {
|
||||
qWarning() << "pa_stream_connect_record() failed!";
|
||||
m_errorState = QAudio::FatalError;
|
||||
pa_stream_unref(m_stream);
|
||||
m_stream = 0;
|
||||
pulseEngine->unlock();
|
||||
setError(QAudio::OpenError);
|
||||
setState(QAudio::StoppedState);
|
||||
return false;
|
||||
}
|
||||
|
||||
while (pa_stream_get_state(m_stream) != PA_STREAM_READY) {
|
||||
while (pa_stream_get_state(m_stream) != PA_STREAM_READY)
|
||||
pa_threaded_mainloop_wait(pulseEngine->mainloop());
|
||||
}
|
||||
|
||||
const pa_buffer_attr *actualBufferAttr = pa_stream_get_buffer_attr(m_stream);
|
||||
m_periodSize = actualBufferAttr->fragsize;
|
||||
@@ -354,12 +365,16 @@ bool QPulseAudioInput::open()
|
||||
|
||||
setPulseVolume();
|
||||
|
||||
pa_threaded_mainloop_unlock(pulseEngine->mainloop());
|
||||
pulseEngine->unlock();
|
||||
|
||||
connect(pulseEngine, &QPulseAudioEngine::contextFailed, this, &QPulseAudioInput::onPulseContextFailed);
|
||||
|
||||
m_opened = true;
|
||||
m_timer->start(m_periodTime);
|
||||
m_errorState = QAudio::NoError;
|
||||
|
||||
m_clockStamp.restart();
|
||||
m_timeStamp.restart();
|
||||
m_elapsedTimeOffset = 0;
|
||||
m_totalTimeValue = 0;
|
||||
|
||||
return true;
|
||||
@@ -367,21 +382,30 @@ bool QPulseAudioInput::open()
|
||||
|
||||
void QPulseAudioInput::close()
|
||||
{
|
||||
if (!m_opened)
|
||||
return;
|
||||
|
||||
m_timer->stop();
|
||||
|
||||
if (m_stream) {
|
||||
QPulseAudioEngine *pulseEngine = QPulseAudioEngine::instance();
|
||||
pa_threaded_mainloop_lock(pulseEngine->mainloop());
|
||||
|
||||
if (m_stream) {
|
||||
pulseEngine->lock();
|
||||
|
||||
pa_stream_set_state_callback(m_stream, 0, 0);
|
||||
pa_stream_set_read_callback(m_stream, 0, 0);
|
||||
pa_stream_set_underflow_callback(m_stream, 0, 0);
|
||||
pa_stream_set_overflow_callback(m_stream, 0, 0);
|
||||
|
||||
pa_stream_disconnect(m_stream);
|
||||
pa_stream_unref(m_stream);
|
||||
m_stream = 0;
|
||||
|
||||
pa_threaded_mainloop_unlock(pulseEngine->mainloop());
|
||||
pulseEngine->unlock();
|
||||
}
|
||||
|
||||
disconnect(pulseEngine, &QPulseAudioEngine::contextFailed, this, &QPulseAudioInput::onPulseContextFailed);
|
||||
|
||||
if (!m_pullMode && m_audioSource) {
|
||||
delete m_audioSource;
|
||||
m_audioSource = 0;
|
||||
@@ -393,6 +417,7 @@ void QPulseAudioInput::close()
|
||||
void QPulseAudioInput::setPulseVolume()
|
||||
{
|
||||
QPulseAudioEngine *pulseEngine = QPulseAudioEngine::instance();
|
||||
Q_ASSERT(pulseEngine->context() != 0);
|
||||
|
||||
pa_cvolume cvolume;
|
||||
|
||||
@@ -434,11 +459,8 @@ qint64 QPulseAudioInput::read(char *data, qint64 len)
|
||||
{
|
||||
m_bytesAvailable = checkBytesReady();
|
||||
|
||||
if (m_deviceState != QAudio::ActiveState) {
|
||||
m_errorState = QAudio::NoError;
|
||||
m_deviceState = QAudio::ActiveState;
|
||||
emit stateChanged(m_deviceState);
|
||||
}
|
||||
setError(QAudio::NoError);
|
||||
setState(QAudio::ActiveState);
|
||||
|
||||
int readBytes = 0;
|
||||
|
||||
@@ -463,7 +485,8 @@ qint64 QPulseAudioInput::read(char *data, qint64 len)
|
||||
#endif
|
||||
|
||||
QPulseAudioEngine *pulseEngine = QPulseAudioEngine::instance();
|
||||
pa_threaded_mainloop_lock(pulseEngine->mainloop());
|
||||
pulseEngine->lock();
|
||||
|
||||
const void *audioBuffer;
|
||||
|
||||
// Second and third parameters (audioBuffer and length) to pa_stream_peek are output parameters,
|
||||
@@ -471,7 +494,7 @@ qint64 QPulseAudioInput::read(char *data, qint64 len)
|
||||
// and the length is set to the length of this data.
|
||||
if (pa_stream_peek(m_stream, &audioBuffer, &readLength) < 0) {
|
||||
qWarning() << QString("pa_stream_peek() failed: %1").arg(pa_strerror(pa_context_errno(pa_stream_get_context(m_stream))));
|
||||
pa_threaded_mainloop_unlock(pulseEngine->mainloop());
|
||||
pulseEngine->unlock();
|
||||
return 0;
|
||||
}
|
||||
|
||||
@@ -480,11 +503,10 @@ qint64 QPulseAudioInput::read(char *data, qint64 len)
|
||||
actualLength = m_audioSource->write(static_cast<const char *>(audioBuffer), readLength);
|
||||
|
||||
if (actualLength < qint64(readLength)) {
|
||||
pa_threaded_mainloop_unlock(pulseEngine->mainloop());
|
||||
pulseEngine->unlock();
|
||||
|
||||
m_errorState = QAudio::UnderrunError;
|
||||
m_deviceState = QAudio::IdleState;
|
||||
emit stateChanged(m_deviceState);
|
||||
setError(QAudio::UnderrunError);
|
||||
setState(QAudio::IdleState);
|
||||
|
||||
return actualLength;
|
||||
}
|
||||
@@ -509,7 +531,7 @@ qint64 QPulseAudioInput::read(char *data, qint64 len)
|
||||
readBytes += actualLength;
|
||||
|
||||
pa_stream_drop(m_stream);
|
||||
pa_threaded_mainloop_unlock(pulseEngine->mainloop());
|
||||
pulseEngine->unlock();
|
||||
|
||||
if (!m_pullMode && readBytes >= len)
|
||||
break;
|
||||
@@ -534,22 +556,18 @@ void QPulseAudioInput::resume()
|
||||
QPulseAudioEngine *pulseEngine = QPulseAudioEngine::instance();
|
||||
pa_operation *operation;
|
||||
|
||||
pa_threaded_mainloop_lock(pulseEngine->mainloop());
|
||||
pulseEngine->lock();
|
||||
|
||||
operation = pa_stream_cork(m_stream, 0, inputStreamSuccessCallback, 0);
|
||||
|
||||
while (pa_operation_get_state(operation) == PA_OPERATION_RUNNING)
|
||||
pa_threaded_mainloop_wait(pulseEngine->mainloop());
|
||||
|
||||
pulseEngine->wait(operation);
|
||||
pa_operation_unref(operation);
|
||||
|
||||
pa_threaded_mainloop_unlock(pulseEngine->mainloop());
|
||||
pulseEngine->unlock();
|
||||
|
||||
m_timer->start(m_periodTime);
|
||||
|
||||
m_deviceState = QAudio::ActiveState;
|
||||
|
||||
emit stateChanged(m_deviceState);
|
||||
setState(QAudio::ActiveState);
|
||||
setError(QAudio::NoError);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -557,23 +575,23 @@ void QPulseAudioInput::setVolume(qreal vol)
|
||||
{
|
||||
if (vol >= 0.0 && vol <= 1.0) {
|
||||
QPulseAudioEngine *pulseEngine = QPulseAudioEngine::instance();
|
||||
pa_threaded_mainloop_lock(pulseEngine->mainloop());
|
||||
pulseEngine->lock();
|
||||
if (!qFuzzyCompare(m_volume, vol)) {
|
||||
m_volume = vol;
|
||||
if (m_opened) {
|
||||
setPulseVolume();
|
||||
}
|
||||
}
|
||||
pa_threaded_mainloop_unlock(pulseEngine->mainloop());
|
||||
pulseEngine->unlock();
|
||||
}
|
||||
}
|
||||
|
||||
qreal QPulseAudioInput::volume() const
|
||||
{
|
||||
QPulseAudioEngine *pulseEngine = QPulseAudioEngine::instance();
|
||||
pa_threaded_mainloop_lock(pulseEngine->mainloop());
|
||||
pulseEngine->lock();
|
||||
qreal vol = m_volume;
|
||||
pa_threaded_mainloop_unlock(pulseEngine->mainloop());
|
||||
pulseEngine->unlock();
|
||||
return vol;
|
||||
}
|
||||
|
||||
@@ -614,23 +632,21 @@ qint64 QPulseAudioInput::processedUSecs() const
|
||||
void QPulseAudioInput::suspend()
|
||||
{
|
||||
if (m_deviceState == QAudio::ActiveState) {
|
||||
setError(QAudio::NoError);
|
||||
setState(QAudio::SuspendedState);
|
||||
|
||||
m_timer->stop();
|
||||
m_deviceState = QAudio::SuspendedState;
|
||||
emit stateChanged(m_deviceState);
|
||||
|
||||
QPulseAudioEngine *pulseEngine = QPulseAudioEngine::instance();
|
||||
pa_operation *operation;
|
||||
|
||||
pa_threaded_mainloop_lock(pulseEngine->mainloop());
|
||||
pulseEngine->lock();
|
||||
|
||||
operation = pa_stream_cork(m_stream, 1, inputStreamSuccessCallback, 0);
|
||||
|
||||
while (pa_operation_get_state(operation) == PA_OPERATION_RUNNING)
|
||||
pa_threaded_mainloop_wait(pulseEngine->mainloop());
|
||||
|
||||
pulseEngine->wait(operation);
|
||||
pa_operation_unref(operation);
|
||||
|
||||
pa_threaded_mainloop_unlock(pulseEngine->mainloop());
|
||||
pulseEngine->unlock();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -685,6 +701,14 @@ void QPulseAudioInput::reset()
|
||||
m_bytesAvailable = 0;
|
||||
}
|
||||
|
||||
void QPulseAudioInput::onPulseContextFailed()
|
||||
{
|
||||
close();
|
||||
|
||||
setError(QAudio::FatalError);
|
||||
setState(QAudio::StoppedState);
|
||||
}
|
||||
|
||||
InputPrivate::InputPrivate(QPulseAudioInput *audio)
|
||||
{
|
||||
m_audioDevice = qobject_cast<QPulseAudioInput*>(audio);
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
/****************************************************************************
|
||||
**
|
||||
** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
|
||||
** Copyright (C) 2014 Digia Plc and/or its subsidiary(-ies).
|
||||
** Contact: http://www.qt-project.org/legal
|
||||
**
|
||||
** This file is part of the Qt Toolkit.
|
||||
@@ -112,8 +112,12 @@ public:
|
||||
private slots:
|
||||
void userFeed();
|
||||
bool deviceReady();
|
||||
void onPulseContextFailed();
|
||||
|
||||
private:
|
||||
void setState(QAudio::State state);
|
||||
void setError(QAudio::Error error);
|
||||
|
||||
int checkBytesReady();
|
||||
bool open();
|
||||
void close();
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
/****************************************************************************
|
||||
**
|
||||
** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
|
||||
** Copyright (C) 2014 Digia Plc and/or its subsidiary(-ies).
|
||||
** Contact: http://www.qt-project.org/legal
|
||||
**
|
||||
** This file is part of the Qt Toolkit.
|
||||
@@ -170,11 +170,29 @@ QPulseAudioOutput::~QPulseAudioOutput()
|
||||
QCoreApplication::processEvents();
|
||||
}
|
||||
|
||||
void QPulseAudioOutput::setError(QAudio::Error error)
|
||||
{
|
||||
if (m_errorState == error)
|
||||
return;
|
||||
|
||||
m_errorState = error;
|
||||
emit errorChanged(error);
|
||||
}
|
||||
|
||||
QAudio::Error QPulseAudioOutput::error() const
|
||||
{
|
||||
return m_errorState;
|
||||
}
|
||||
|
||||
void QPulseAudioOutput::setState(QAudio::State state)
|
||||
{
|
||||
if (m_deviceState == state)
|
||||
return;
|
||||
|
||||
m_deviceState = state;
|
||||
emit stateChanged(state);
|
||||
}
|
||||
|
||||
QAudio::State QPulseAudioOutput::state() const
|
||||
{
|
||||
return m_deviceState;
|
||||
@@ -183,19 +201,15 @@ QAudio::State QPulseAudioOutput::state() const
|
||||
void QPulseAudioOutput::streamUnderflowCallback()
|
||||
{
|
||||
if (m_deviceState != QAudio::IdleState && !m_resuming) {
|
||||
m_errorState = QAudio::UnderrunError;
|
||||
emit errorChanged(m_errorState);
|
||||
m_deviceState = QAudio::IdleState;
|
||||
emit stateChanged(m_deviceState);
|
||||
setError(QAudio::UnderrunError);
|
||||
setState(QAudio::IdleState);
|
||||
}
|
||||
}
|
||||
|
||||
void QPulseAudioOutput::start(QIODevice *device)
|
||||
{
|
||||
if (m_deviceState != QAudio::StoppedState)
|
||||
m_deviceState = QAudio::StoppedState;
|
||||
|
||||
m_errorState = QAudio::NoError;
|
||||
setState(QAudio::StoppedState);
|
||||
setError(QAudio::NoError);
|
||||
|
||||
// Handle change of mode
|
||||
if (m_audioSource && !m_pullMode) {
|
||||
@@ -205,22 +219,19 @@ void QPulseAudioOutput::start(QIODevice *device)
|
||||
|
||||
close();
|
||||
|
||||
if (!open())
|
||||
return;
|
||||
|
||||
m_pullMode = true;
|
||||
m_audioSource = device;
|
||||
|
||||
m_deviceState = QAudio::ActiveState;
|
||||
|
||||
open();
|
||||
|
||||
emit stateChanged(m_deviceState);
|
||||
setState(QAudio::ActiveState);
|
||||
}
|
||||
|
||||
QIODevice *QPulseAudioOutput::start()
|
||||
{
|
||||
if (m_deviceState != QAudio::StoppedState)
|
||||
m_deviceState = QAudio::StoppedState;
|
||||
|
||||
m_errorState = QAudio::NoError;
|
||||
setState(QAudio::StoppedState);
|
||||
setError(QAudio::NoError);
|
||||
|
||||
// Handle change of mode
|
||||
if (m_audioSource && !m_pullMode) {
|
||||
@@ -230,15 +241,14 @@ QIODevice *QPulseAudioOutput::start()
|
||||
|
||||
close();
|
||||
|
||||
if (!open())
|
||||
return Q_NULLPTR;
|
||||
|
||||
m_audioSource = new OutputPrivate(this);
|
||||
m_audioSource->open(QIODevice::WriteOnly|QIODevice::Unbuffered);
|
||||
m_pullMode = false;
|
||||
|
||||
m_deviceState = QAudio::IdleState;
|
||||
|
||||
open();
|
||||
|
||||
emit stateChanged(m_deviceState);
|
||||
setState(QAudio::IdleState);
|
||||
|
||||
return m_audioSource;
|
||||
}
|
||||
@@ -246,20 +256,26 @@ QIODevice *QPulseAudioOutput::start()
|
||||
bool QPulseAudioOutput::open()
|
||||
{
|
||||
if (m_opened)
|
||||
return true;
|
||||
|
||||
QPulseAudioEngine *pulseEngine = QPulseAudioEngine::instance();
|
||||
|
||||
if (!pulseEngine->context() || pa_context_get_state(pulseEngine->context()) != PA_CONTEXT_READY) {
|
||||
setError(QAudio::FatalError);
|
||||
setState(QAudio::StoppedState);
|
||||
return false;
|
||||
}
|
||||
|
||||
pa_sample_spec spec = QPulseAudioInternal::audioFormatToSampleSpec(m_format);
|
||||
|
||||
if (!pa_sample_spec_valid(&spec)) {
|
||||
m_errorState = QAudio::OpenError;
|
||||
m_deviceState = QAudio::StoppedState;
|
||||
setError(QAudio::OpenError);
|
||||
setState(QAudio::StoppedState);
|
||||
return false;
|
||||
}
|
||||
|
||||
m_spec = spec;
|
||||
m_totalTimeValue = 0;
|
||||
m_elapsedTimeOffset = 0;
|
||||
m_timeStamp.restart();
|
||||
|
||||
if (m_streamName.isNull())
|
||||
m_streamName = QString(QLatin1String("QtmPulseStream-%1-%2")).arg(::getpid()).arg(quintptr(this)).toUtf8();
|
||||
@@ -271,8 +287,7 @@ bool QPulseAudioOutput::open()
|
||||
qDebug() << "Frame size: " << pa_frame_size(&spec);
|
||||
#endif
|
||||
|
||||
QPulseAudioEngine *pulseEngine = QPulseAudioEngine::instance();
|
||||
pa_threaded_mainloop_lock(pulseEngine->mainloop());
|
||||
pulseEngine->lock();
|
||||
|
||||
qint64 bytesPerSecond = m_format.sampleRate() * m_format.channelCount() * m_format.sampleSize() / 8;
|
||||
|
||||
@@ -280,7 +295,7 @@ bool QPulseAudioOutput::open()
|
||||
if (!m_category.isNull())
|
||||
pa_proplist_sets(propList, PA_PROP_MEDIA_ROLE, m_category.toLatin1().constData());
|
||||
|
||||
m_stream = pa_stream_new_with_proplist(pulseEngine->context(), m_streamName.constData(), &spec, 0, propList);
|
||||
m_stream = pa_stream_new_with_proplist(pulseEngine->context(), m_streamName.constData(), &m_spec, 0, propList);
|
||||
pa_proplist_free(propList);
|
||||
|
||||
pa_stream_set_state_callback(m_stream, outputStreamStateCallback, this);
|
||||
@@ -312,15 +327,20 @@ bool QPulseAudioOutput::open()
|
||||
|
||||
if (pa_stream_connect_playback(m_stream, m_device.data(), (m_bufferSize > 0) ? &requestedBuffer : NULL, (pa_stream_flags_t)0, &m_chVolume, NULL) < 0) {
|
||||
qWarning() << "pa_stream_connect_playback() failed!";
|
||||
pa_stream_unref(m_stream);
|
||||
m_stream = 0;
|
||||
pulseEngine->unlock();
|
||||
setError(QAudio::OpenError);
|
||||
setState(QAudio::StoppedState);
|
||||
return false;
|
||||
}
|
||||
|
||||
while (pa_stream_get_state(m_stream) != PA_STREAM_READY) {
|
||||
while (pa_stream_get_state(m_stream) != PA_STREAM_READY)
|
||||
pa_threaded_mainloop_wait(pulseEngine->mainloop());
|
||||
}
|
||||
|
||||
const pa_buffer_attr *buffer = pa_stream_get_buffer_attr(m_stream);
|
||||
m_periodTime = (m_category == LOW_LATENCY_CATEGORY_NAME) ? LowLatencyPeriodTimeMs : PeriodTimeMs;
|
||||
m_periodSize = pa_usec_to_bytes(m_periodTime*1000, &spec);
|
||||
m_periodSize = pa_usec_to_bytes(m_periodTime*1000, &m_spec);
|
||||
m_bufferSize = buffer->tlength;
|
||||
m_maxBufferSize = buffer->maxlength;
|
||||
m_audioBuffer = new char[m_maxBufferSize];
|
||||
@@ -333,9 +353,12 @@ bool QPulseAudioOutput::open()
|
||||
qDebug() << "\tFragment size: " << buffer->fragsize;
|
||||
#endif
|
||||
|
||||
pa_threaded_mainloop_unlock(pulseEngine->mainloop());
|
||||
pulseEngine->unlock();
|
||||
|
||||
connect(pulseEngine, &QPulseAudioEngine::contextFailed, this, &QPulseAudioOutput::onPulseContextFailed);
|
||||
|
||||
m_opened = true;
|
||||
|
||||
m_tickTimer->start(m_periodTime);
|
||||
|
||||
m_elapsedTimeOffset = 0;
|
||||
@@ -347,28 +370,35 @@ bool QPulseAudioOutput::open()
|
||||
|
||||
void QPulseAudioOutput::close()
|
||||
{
|
||||
if (!m_opened)
|
||||
return;
|
||||
|
||||
m_tickTimer->stop();
|
||||
|
||||
if (m_stream) {
|
||||
QPulseAudioEngine *pulseEngine = QPulseAudioEngine::instance();
|
||||
pa_threaded_mainloop_lock(pulseEngine->mainloop());
|
||||
|
||||
pa_stream_set_write_callback(m_stream, NULL, NULL);
|
||||
if (m_stream) {
|
||||
pulseEngine->lock();
|
||||
|
||||
pa_stream_set_state_callback(m_stream, 0, 0);
|
||||
pa_stream_set_write_callback(m_stream, 0, 0);
|
||||
pa_stream_set_underflow_callback(m_stream, 0, 0);
|
||||
pa_stream_set_overflow_callback(m_stream, 0, 0);
|
||||
pa_stream_set_latency_update_callback(m_stream, 0, 0);
|
||||
|
||||
pa_operation *o = pa_stream_drain(m_stream, outputStreamDrainComplete, NULL);
|
||||
if (!o) {
|
||||
qWarning() << QString("pa_stream_drain(): %1").arg(pa_strerror(pa_context_errno(pa_stream_get_context(m_stream))));
|
||||
} else {
|
||||
if (o)
|
||||
pa_operation_unref(o);
|
||||
}
|
||||
|
||||
pa_stream_disconnect(m_stream);
|
||||
pa_stream_unref(m_stream);
|
||||
m_stream = NULL;
|
||||
|
||||
pa_threaded_mainloop_unlock(pulseEngine->mainloop());
|
||||
pulseEngine->unlock();
|
||||
}
|
||||
|
||||
disconnect(pulseEngine, &QPulseAudioEngine::contextFailed, this, &QPulseAudioOutput::onPulseContextFailed);
|
||||
|
||||
if (!m_pullMode && m_audioSource) {
|
||||
delete m_audioSource;
|
||||
m_audioSource = 0;
|
||||
@@ -430,17 +460,14 @@ qint64 QPulseAudioOutput::write(const char *data, qint64 len)
|
||||
{
|
||||
QPulseAudioEngine *pulseEngine = QPulseAudioEngine::instance();
|
||||
|
||||
pa_threaded_mainloop_lock(pulseEngine->mainloop());
|
||||
pulseEngine->lock();
|
||||
len = qMin(len, static_cast<qint64>(pa_stream_writable_size(m_stream)));
|
||||
pa_stream_write(m_stream, data, len, 0, 0, PA_SEEK_RELATIVE);
|
||||
pa_threaded_mainloop_unlock(pulseEngine->mainloop());
|
||||
pulseEngine->unlock();
|
||||
m_totalTimeValue += len;
|
||||
|
||||
m_errorState = QAudio::NoError;
|
||||
if (m_deviceState != QAudio::ActiveState) {
|
||||
m_deviceState = QAudio::ActiveState;
|
||||
emit stateChanged(m_deviceState);
|
||||
}
|
||||
setError(QAudio::NoError);
|
||||
setState(QAudio::ActiveState);
|
||||
|
||||
return len;
|
||||
}
|
||||
@@ -450,10 +477,10 @@ void QPulseAudioOutput::stop()
|
||||
if (m_deviceState == QAudio::StoppedState)
|
||||
return;
|
||||
|
||||
m_errorState = QAudio::NoError;
|
||||
m_deviceState = QAudio::StoppedState;
|
||||
close();
|
||||
emit stateChanged(m_deviceState);
|
||||
|
||||
setError(QAudio::NoError);
|
||||
setState(QAudio::StoppedState);
|
||||
}
|
||||
|
||||
int QPulseAudioOutput::bytesFree() const
|
||||
@@ -462,9 +489,9 @@ int QPulseAudioOutput::bytesFree() const
|
||||
return 0;
|
||||
|
||||
QPulseAudioEngine *pulseEngine = QPulseAudioEngine::instance();
|
||||
pa_threaded_mainloop_lock(pulseEngine->mainloop());
|
||||
pulseEngine->lock();
|
||||
int writableSize = pa_stream_writable_size(m_stream);
|
||||
pa_threaded_mainloop_unlock(pulseEngine->mainloop());
|
||||
pulseEngine->unlock();
|
||||
return writableSize;
|
||||
}
|
||||
|
||||
@@ -509,30 +536,22 @@ void QPulseAudioOutput::resume()
|
||||
|
||||
QPulseAudioEngine *pulseEngine = QPulseAudioEngine::instance();
|
||||
|
||||
pa_threaded_mainloop_lock(pulseEngine->mainloop());
|
||||
pulseEngine->lock();
|
||||
|
||||
pa_operation *operation = pa_stream_cork(m_stream, 0, outputStreamSuccessCallback, NULL);
|
||||
|
||||
while (pa_operation_get_state(operation) == PA_OPERATION_RUNNING)
|
||||
pa_threaded_mainloop_wait(pulseEngine->mainloop());
|
||||
|
||||
pulseEngine->wait(operation);
|
||||
pa_operation_unref(operation);
|
||||
|
||||
operation = pa_stream_trigger(m_stream, outputStreamSuccessCallback, NULL);
|
||||
|
||||
while (pa_operation_get_state(operation) == PA_OPERATION_RUNNING)
|
||||
pa_threaded_mainloop_wait(pulseEngine->mainloop());
|
||||
|
||||
pulseEngine->wait(operation);
|
||||
pa_operation_unref(operation);
|
||||
|
||||
pa_threaded_mainloop_unlock(pulseEngine->mainloop());
|
||||
pulseEngine->unlock();
|
||||
|
||||
m_deviceState = QAudio::ActiveState;
|
||||
|
||||
m_errorState = QAudio::NoError;
|
||||
m_tickTimer->start(m_periodTime);
|
||||
|
||||
emit stateChanged(m_deviceState);
|
||||
setState(QAudio::ActiveState);
|
||||
setError(QAudio::NoError);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -549,24 +568,21 @@ QAudioFormat QPulseAudioOutput::format() const
|
||||
void QPulseAudioOutput::suspend()
|
||||
{
|
||||
if (m_deviceState == QAudio::ActiveState || m_deviceState == QAudio::IdleState) {
|
||||
setError(QAudio::NoError);
|
||||
setState(QAudio::SuspendedState);
|
||||
|
||||
m_tickTimer->stop();
|
||||
m_deviceState = QAudio::SuspendedState;
|
||||
m_errorState = QAudio::NoError;
|
||||
emit stateChanged(m_deviceState);
|
||||
|
||||
QPulseAudioEngine *pulseEngine = QPulseAudioEngine::instance();
|
||||
pa_operation *operation;
|
||||
|
||||
pa_threaded_mainloop_lock(pulseEngine->mainloop());
|
||||
pulseEngine->lock();
|
||||
|
||||
operation = pa_stream_cork(m_stream, 1, outputStreamSuccessCallback, NULL);
|
||||
|
||||
while (pa_operation_get_state(operation) == PA_OPERATION_RUNNING)
|
||||
pa_threaded_mainloop_wait(pulseEngine->mainloop());
|
||||
|
||||
pulseEngine->wait(operation);
|
||||
pa_operation_unref(operation);
|
||||
|
||||
pa_threaded_mainloop_unlock(pulseEngine->mainloop());
|
||||
pulseEngine->unlock();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -601,8 +617,8 @@ qint64 OutputPrivate::writeData(const char *data, qint64 len)
|
||||
int retry = 0;
|
||||
qint64 written = 0;
|
||||
|
||||
if ((m_audioDevice->m_deviceState == QAudio::ActiveState)
|
||||
||(m_audioDevice->m_deviceState == QAudio::IdleState)) {
|
||||
if ((m_audioDevice->m_deviceState == QAudio::ActiveState
|
||||
|| m_audioDevice->m_deviceState == QAudio::IdleState)) {
|
||||
while(written < len) {
|
||||
int chunk = m_audioDevice->write(data+written, (len-written));
|
||||
if (chunk <= 0)
|
||||
@@ -623,7 +639,7 @@ void QPulseAudioOutput::setVolume(qreal vol)
|
||||
m_volume = vol;
|
||||
if (m_opened) {
|
||||
QPulseAudioEngine *pulseEngine = QPulseAudioEngine::instance();
|
||||
pa_threaded_mainloop_lock(pulseEngine->mainloop());
|
||||
pulseEngine->lock();
|
||||
pa_volume_t paVolume;
|
||||
if (qFuzzyCompare(vol, 0.0)) {
|
||||
pa_cvolume_mute(&m_chVolume, m_spec.channels);
|
||||
@@ -641,7 +657,7 @@ void QPulseAudioOutput::setVolume(qreal vol)
|
||||
qWarning()<<"QAudioOutput: Failed to set volume";
|
||||
else
|
||||
pa_operation_unref(op);
|
||||
pa_threaded_mainloop_unlock(pulseEngine->mainloop());
|
||||
pulseEngine->unlock();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -664,6 +680,14 @@ QString QPulseAudioOutput::category() const
|
||||
return m_category;
|
||||
}
|
||||
|
||||
void QPulseAudioOutput::onPulseContextFailed()
|
||||
{
|
||||
close();
|
||||
|
||||
setError(QAudio::FatalError);
|
||||
setState(QAudio::StoppedState);
|
||||
}
|
||||
|
||||
QT_END_NAMESPACE
|
||||
|
||||
#include "moc_qaudiooutput_pulse.cpp"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
/****************************************************************************
|
||||
**
|
||||
** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
|
||||
** Copyright (C) 2014 Digia Plc and/or its subsidiary(-ies).
|
||||
** Contact: http://www.qt-project.org/legal
|
||||
**
|
||||
** This file is part of the Qt Toolkit.
|
||||
@@ -105,12 +105,16 @@ public:
|
||||
void streamUnderflowCallback();
|
||||
|
||||
private:
|
||||
void setState(QAudio::State state);
|
||||
void setError(QAudio::Error error);
|
||||
|
||||
bool open();
|
||||
void close();
|
||||
qint64 write(const char *data, qint64 len);
|
||||
|
||||
private Q_SLOTS:
|
||||
void userFeed();
|
||||
void onPulseContextFailed();
|
||||
|
||||
private:
|
||||
QByteArray m_device;
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
/****************************************************************************
|
||||
**
|
||||
** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
|
||||
** Copyright (C) 2014 Digia Plc and/or its subsidiary(-ies).
|
||||
** Contact: http://www.qt-project.org/legal
|
||||
**
|
||||
** This file is part of the Qt Toolkit.
|
||||
@@ -170,15 +170,17 @@ static void contextStateCallbackInit(pa_context *context, void *userdata)
|
||||
pa_threaded_mainloop_signal(pulseEngine->mainloop(), 0);
|
||||
}
|
||||
|
||||
static void contextStateCallback(pa_context *context, void *userdata)
|
||||
static void contextStateCallback(pa_context *c, void *userdata)
|
||||
{
|
||||
Q_UNUSED(userdata);
|
||||
Q_UNUSED(context);
|
||||
QPulseAudioEngine *self = reinterpret_cast<QPulseAudioEngine*>(userdata);
|
||||
pa_context_state_t state = pa_context_get_state(c);
|
||||
|
||||
#ifdef DEBUG_PULSE
|
||||
pa_context_state_t state = pa_context_get_state(context);
|
||||
qDebug() << QPulseAudioInternal::stateToQString(state);
|
||||
#endif
|
||||
|
||||
if (state == PA_CONTEXT_FAILED)
|
||||
QMetaObject::invokeMethod(self, "onContextFailed", Qt::QueuedConnection);
|
||||
}
|
||||
|
||||
Q_GLOBAL_STATIC(QPulseAudioEngine, pulseEngine);
|
||||
@@ -187,40 +189,59 @@ QPulseAudioEngine::QPulseAudioEngine(QObject *parent)
|
||||
: QObject(parent)
|
||||
, m_mainLoopApi(0)
|
||||
, m_context(0)
|
||||
, m_prepared(false)
|
||||
{
|
||||
prepare();
|
||||
}
|
||||
|
||||
QPulseAudioEngine::~QPulseAudioEngine()
|
||||
{
|
||||
if (m_prepared)
|
||||
release();
|
||||
}
|
||||
|
||||
void QPulseAudioEngine::prepare()
|
||||
{
|
||||
bool keepGoing = true;
|
||||
bool ok = true;
|
||||
|
||||
m_mainLoop = pa_threaded_mainloop_new();
|
||||
if (m_mainLoop == 0) {
|
||||
qWarning("Unable to create pulseaudio mainloop");
|
||||
qWarning("PulseAudioService: unable to create pulseaudio mainloop");
|
||||
return;
|
||||
}
|
||||
|
||||
if (pa_threaded_mainloop_start(m_mainLoop) != 0) {
|
||||
qWarning("Unable to start pulseaudio mainloop");
|
||||
qWarning("PulseAudioService: unable to start pulseaudio mainloop");
|
||||
pa_threaded_mainloop_free(m_mainLoop);
|
||||
m_mainLoop = 0;
|
||||
return;
|
||||
}
|
||||
|
||||
m_mainLoopApi = pa_threaded_mainloop_get_api(m_mainLoop);
|
||||
|
||||
pa_threaded_mainloop_lock(m_mainLoop);
|
||||
lock();
|
||||
|
||||
m_context = pa_context_new(m_mainLoopApi, QString(QLatin1String("QtmPulseContext:%1")).arg(::getpid()).toLatin1().constData());
|
||||
pa_context_set_state_callback(m_context, contextStateCallbackInit, this);
|
||||
m_context = pa_context_new(m_mainLoopApi, QString(QLatin1String("QtPulseAudio:%1")).arg(::getpid()).toLatin1().constData());
|
||||
|
||||
if (!m_context) {
|
||||
qWarning("Unable to create new pulseaudio context");
|
||||
if (m_context == 0) {
|
||||
qWarning("PulseAudioService: Unable to create new pulseaudio context");
|
||||
pa_threaded_mainloop_unlock(m_mainLoop);
|
||||
pa_threaded_mainloop_free(m_mainLoop);
|
||||
m_mainLoop = 0;
|
||||
onContextFailed();
|
||||
return;
|
||||
}
|
||||
|
||||
if (pa_context_connect(m_context, NULL, (pa_context_flags_t)0, NULL) < 0) {
|
||||
qWarning("Unable to create a connection to the pulseaudio context");
|
||||
pa_context_set_state_callback(m_context, contextStateCallbackInit, this);
|
||||
|
||||
if (pa_context_connect(m_context, 0, (pa_context_flags_t)0, 0) < 0) {
|
||||
qWarning("PulseAudioService: pa_context_connect() failed");
|
||||
pa_context_unref(m_context);
|
||||
pa_threaded_mainloop_unlock(m_mainLoop);
|
||||
pa_threaded_mainloop_free(m_mainLoop);
|
||||
m_mainLoop = 0;
|
||||
m_context = 0;
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -241,47 +262,49 @@ QPulseAudioEngine::QPulseAudioEngine(QObject *parent)
|
||||
break;
|
||||
|
||||
case PA_CONTEXT_TERMINATED:
|
||||
qCritical("Context terminated.");
|
||||
qCritical("PulseAudioService: Context terminated.");
|
||||
keepGoing = false;
|
||||
ok = false;
|
||||
break;
|
||||
|
||||
case PA_CONTEXT_FAILED:
|
||||
default:
|
||||
qCritical() << QString("Connection failure: %1").arg(pa_strerror(pa_context_errno(m_context)));
|
||||
qCritical() << QString("PulseAudioService: Connection failure: %1").arg(pa_strerror(pa_context_errno(m_context)));
|
||||
keepGoing = false;
|
||||
ok = false;
|
||||
}
|
||||
|
||||
if (keepGoing) {
|
||||
if (keepGoing)
|
||||
pa_threaded_mainloop_wait(m_mainLoop);
|
||||
}
|
||||
}
|
||||
|
||||
if (ok) {
|
||||
pa_context_set_state_callback(m_context, contextStateCallback, this);
|
||||
} else {
|
||||
if (m_context) {
|
||||
pa_context_unref(m_context);
|
||||
m_context = 0;
|
||||
}
|
||||
}
|
||||
|
||||
pa_threaded_mainloop_unlock(m_mainLoop);
|
||||
unlock();
|
||||
|
||||
if (ok) {
|
||||
serverInfo();
|
||||
sinks();
|
||||
sources();
|
||||
updateDevices();
|
||||
m_prepared = true;
|
||||
} else {
|
||||
pa_threaded_mainloop_free(m_mainLoop);
|
||||
m_mainLoop = 0;
|
||||
onContextFailed();
|
||||
}
|
||||
}
|
||||
|
||||
QPulseAudioEngine::~QPulseAudioEngine()
|
||||
void QPulseAudioEngine::release()
|
||||
{
|
||||
if (!m_prepared)
|
||||
return;
|
||||
|
||||
if (m_context) {
|
||||
pa_threaded_mainloop_lock(m_mainLoop);
|
||||
pa_context_disconnect(m_context);
|
||||
pa_threaded_mainloop_unlock(m_mainLoop);
|
||||
pa_context_unref(m_context);
|
||||
m_context = 0;
|
||||
}
|
||||
|
||||
@@ -290,64 +313,54 @@ QPulseAudioEngine::~QPulseAudioEngine()
|
||||
pa_threaded_mainloop_free(m_mainLoop);
|
||||
m_mainLoop = 0;
|
||||
}
|
||||
|
||||
m_prepared = false;
|
||||
}
|
||||
|
||||
void QPulseAudioEngine::serverInfo()
|
||||
void QPulseAudioEngine::updateDevices()
|
||||
{
|
||||
pa_operation *operation;
|
||||
|
||||
pa_threaded_mainloop_lock(m_mainLoop);
|
||||
|
||||
operation = pa_context_get_server_info(m_context, serverInfoCallback, this);
|
||||
lock();
|
||||
|
||||
// Get default input and output devices
|
||||
pa_operation *operation = pa_context_get_server_info(m_context, serverInfoCallback, this);
|
||||
while (pa_operation_get_state(operation) == PA_OPERATION_RUNNING)
|
||||
pa_threaded_mainloop_wait(m_mainLoop);
|
||||
|
||||
pa_operation_unref(operation);
|
||||
|
||||
pa_threaded_mainloop_unlock(m_mainLoop);
|
||||
}
|
||||
|
||||
void QPulseAudioEngine::sinks()
|
||||
{
|
||||
pa_operation *operation;
|
||||
|
||||
pa_threaded_mainloop_lock(m_mainLoop);
|
||||
|
||||
// Get output devices
|
||||
operation = pa_context_get_sink_info_list(m_context, sinkInfoCallback, this);
|
||||
|
||||
while (pa_operation_get_state(operation) == PA_OPERATION_RUNNING)
|
||||
pa_threaded_mainloop_wait(m_mainLoop);
|
||||
|
||||
pa_operation_unref(operation);
|
||||
|
||||
pa_threaded_mainloop_unlock(m_mainLoop);
|
||||
// Get input devices
|
||||
operation = pa_context_get_source_info_list(m_context, sourceInfoCallback, this);
|
||||
while (pa_operation_get_state(operation) == PA_OPERATION_RUNNING)
|
||||
pa_threaded_mainloop_wait(m_mainLoop);
|
||||
pa_operation_unref(operation);
|
||||
|
||||
// Swap the default sink to index 0
|
||||
unlock();
|
||||
|
||||
// Swap the default output to index 0
|
||||
m_sinks.removeOne(m_defaultSink);
|
||||
m_sinks.prepend(m_defaultSink);
|
||||
}
|
||||
|
||||
void QPulseAudioEngine::sources()
|
||||
{
|
||||
pa_operation *operation;
|
||||
|
||||
pa_threaded_mainloop_lock(m_mainLoop);
|
||||
|
||||
operation = pa_context_get_source_info_list(m_context, sourceInfoCallback, this);
|
||||
|
||||
while (pa_operation_get_state(operation) == PA_OPERATION_RUNNING)
|
||||
pa_threaded_mainloop_wait(m_mainLoop);
|
||||
|
||||
pa_operation_unref(operation);
|
||||
|
||||
pa_threaded_mainloop_unlock(m_mainLoop);
|
||||
|
||||
// Swap the default source to index 0
|
||||
// Swap the default input to index 0
|
||||
m_sources.removeOne(m_defaultSource);
|
||||
m_sources.prepend(m_defaultSource);
|
||||
}
|
||||
|
||||
void QPulseAudioEngine::onContextFailed()
|
||||
{
|
||||
// Give a chance to the connected slots to still use the Pulse main loop before releasing it.
|
||||
emit contextFailed();
|
||||
|
||||
release();
|
||||
|
||||
// Try to reconnect later
|
||||
QTimer::singleShot(3000, this, SLOT(prepare()));
|
||||
}
|
||||
|
||||
QPulseAudioEngine *QPulseAudioEngine::instance()
|
||||
{
|
||||
return pulseEngine();
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
/****************************************************************************
|
||||
**
|
||||
** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
|
||||
** Copyright (C) 2014 Digia Plc and/or its subsidiary(-ies).
|
||||
** Contact: http://www.qt-project.org/legal
|
||||
**
|
||||
** This file is part of the Qt Toolkit.
|
||||
@@ -74,12 +74,36 @@ public:
|
||||
pa_threaded_mainloop *mainloop() { return m_mainLoop; }
|
||||
pa_context *context() { return m_context; }
|
||||
|
||||
inline void lock()
|
||||
{
|
||||
if (m_mainLoop)
|
||||
pa_threaded_mainloop_lock(m_mainLoop);
|
||||
}
|
||||
|
||||
inline void unlock()
|
||||
{
|
||||
if (m_mainLoop)
|
||||
pa_threaded_mainloop_unlock(m_mainLoop);
|
||||
}
|
||||
|
||||
inline void wait(pa_operation *op)
|
||||
{
|
||||
while (m_mainLoop && pa_operation_get_state(op) == PA_OPERATION_RUNNING)
|
||||
pa_threaded_mainloop_wait(m_mainLoop);
|
||||
}
|
||||
|
||||
QList<QByteArray> availableDevices(QAudio::Mode mode) const;
|
||||
|
||||
Q_SIGNALS:
|
||||
void contextFailed();
|
||||
|
||||
private Q_SLOTS:
|
||||
void prepare();
|
||||
void onContextFailed();
|
||||
|
||||
private:
|
||||
void serverInfo();
|
||||
void sinks();
|
||||
void sources();
|
||||
void updateDevices();
|
||||
void release();
|
||||
|
||||
public:
|
||||
QList<QByteArray> m_sinks;
|
||||
@@ -93,6 +117,7 @@ private:
|
||||
pa_mainloop_api *m_mainLoopApi;
|
||||
pa_threaded_mainloop *m_mainLoop;
|
||||
pa_context *m_context;
|
||||
bool m_prepared;
|
||||
};
|
||||
|
||||
QT_END_NAMESPACE
|
||||
|
||||
Reference in New Issue
Block a user