Merge remote-tracking branch 'origin/5.5' into 5.6

Conflicts:
	src/plugins/android/src/wrappers/jni/androidcamera.cpp

Change-Id: Ibb34f710b1dfb9a23b378462f31432581c6c26f4
This commit is contained in:
Liang Qi
2016-01-07 11:13:47 +01:00
16 changed files with 243 additions and 156 deletions

View File

@@ -32,7 +32,7 @@
****************************************************************************/ ****************************************************************************/
#include <alsa/asoundlib.h> #include <alsa/asoundlib.h>
#if (!(SND_LIB_MAJOR == 1 && SND_LIB_MINOR == 0 && SND_LIB_SUBMINOR >= 10)) #if (!(SND_LIB_MAJOR == 1 && (SND_LIB_MINOR > 0 || SND_LIB_SUBMINOR >= 10)))
#error "Alsa version found too old, require >= 1.0.10" #error "Alsa version found too old, require >= 1.0.10"
#endif #endif

View File

@@ -141,7 +141,7 @@ bool QAlsaAudioDeviceInfo::open()
QList<QByteArray> devices = availableDevices(mode); QList<QByteArray> devices = availableDevices(mode);
if(dev.compare(QLatin1String("default")) == 0) { if(dev.compare(QLatin1String("default")) == 0) {
#if(SND_LIB_MAJOR == 1 && SND_LIB_MINOR == 0 && SND_LIB_SUBMINOR >= 14) #if (SND_LIB_MAJOR == 1 && (SND_LIB_MINOR > 0 || SND_LIB_SUBMINOR >= 14))
if (devices.size() > 0) if (devices.size() > 0)
dev = QLatin1String(devices.first().constData()); dev = QLatin1String(devices.first().constData());
else else
@@ -150,7 +150,7 @@ bool QAlsaAudioDeviceInfo::open()
dev = QLatin1String("hw:0,0"); dev = QLatin1String("hw:0,0");
#endif #endif
} else { } else {
#if(SND_LIB_MAJOR == 1 && SND_LIB_MINOR == 0 && SND_LIB_SUBMINOR >= 14) #if (SND_LIB_MAJOR == 1 && (SND_LIB_MINOR > 0 || SND_LIB_SUBMINOR >= 14))
dev = device; dev = device;
#else #else
int idx = 0; int idx = 0;
@@ -194,7 +194,7 @@ bool QAlsaAudioDeviceInfo::testSettings(const QAudioFormat& format) const
snd_pcm_hw_params_t *params; snd_pcm_hw_params_t *params;
QString dev; QString dev;
#if(SND_LIB_MAJOR == 1 && SND_LIB_MINOR == 0 && SND_LIB_SUBMINOR >= 14) #if (SND_LIB_MAJOR == 1 && (SND_LIB_MINOR > 0 || SND_LIB_SUBMINOR >= 14))
dev = device; dev = device;
if (dev.compare(QLatin1String("default")) == 0) { if (dev.compare(QLatin1String("default")) == 0) {
QList<QByteArray> devices = availableDevices(QAudio::AudioOutput); QList<QByteArray> devices = availableDevices(QAudio::AudioOutput);
@@ -335,7 +335,7 @@ QList<QByteArray> QAlsaAudioDeviceInfo::availableDevices(QAudio::Mode mode)
QList<QByteArray> devices; QList<QByteArray> devices;
QByteArray filter; QByteArray filter;
#if(SND_LIB_MAJOR == 1 && SND_LIB_MINOR == 0 && SND_LIB_SUBMINOR >= 14) #if (SND_LIB_MAJOR == 1 && (SND_LIB_MINOR > 0 || SND_LIB_SUBMINOR >= 14))
// Create a list of all current audio devices that support mode // Create a list of all current audio devices that support mode
void **hints, **n; void **hints, **n;
char *name, *descr, *io; char *name, *descr, *io;

View File

@@ -303,7 +303,7 @@ bool QAlsaAudioInput::open()
QString dev = QString(QLatin1String(m_device.constData())); QString dev = QString(QLatin1String(m_device.constData()));
QList<QByteArray> devices = QAlsaAudioDeviceInfo::availableDevices(QAudio::AudioInput); QList<QByteArray> devices = QAlsaAudioDeviceInfo::availableDevices(QAudio::AudioInput);
if(dev.compare(QLatin1String("default")) == 0) { if(dev.compare(QLatin1String("default")) == 0) {
#if(SND_LIB_MAJOR == 1 && SND_LIB_MINOR == 0 && SND_LIB_SUBMINOR >= 14) #if (SND_LIB_MAJOR == 1 && (SND_LIB_MINOR > 0 || SND_LIB_SUBMINOR >= 14))
if (devices.size() > 0) if (devices.size() > 0)
dev = QLatin1String(devices.first()); dev = QLatin1String(devices.first());
else else
@@ -312,7 +312,7 @@ bool QAlsaAudioInput::open()
dev = QLatin1String("hw:0,0"); dev = QLatin1String("hw:0,0");
#endif #endif
} else { } else {
#if(SND_LIB_MAJOR == 1 && SND_LIB_MINOR == 0 && SND_LIB_SUBMINOR >= 14) #if (SND_LIB_MAJOR == 1 && (SND_LIB_MINOR > 0 || SND_LIB_SUBMINOR >= 14))
dev = QLatin1String(m_device); dev = QLatin1String(m_device);
#else #else
int idx = 0; int idx = 0;

View File

@@ -306,7 +306,7 @@ bool QAlsaAudioOutput::open()
QString dev = QString(QLatin1String(m_device.constData())); QString dev = QString(QLatin1String(m_device.constData()));
QList<QByteArray> devices = QAlsaAudioDeviceInfo::availableDevices(QAudio::AudioOutput); QList<QByteArray> devices = QAlsaAudioDeviceInfo::availableDevices(QAudio::AudioOutput);
if(dev.compare(QLatin1String("default")) == 0) { if(dev.compare(QLatin1String("default")) == 0) {
#if(SND_LIB_MAJOR == 1 && SND_LIB_MINOR == 0 && SND_LIB_SUBMINOR >= 14) #if (SND_LIB_MAJOR == 1 && (SND_LIB_MINOR > 0 || SND_LIB_SUBMINOR >= 14))
if (devices.size() > 0) if (devices.size() > 0)
dev = QLatin1String(devices.first()); dev = QLatin1String(devices.first());
else else
@@ -315,7 +315,7 @@ bool QAlsaAudioOutput::open()
dev = QLatin1String("hw:0,0"); dev = QLatin1String("hw:0,0");
#endif #endif
} else { } else {
#if(SND_LIB_MAJOR == 1 && SND_LIB_MINOR == 0 && SND_LIB_SUBMINOR >= 14) #if (SND_LIB_MAJOR == 1 && (SND_LIB_MINOR > 0 || SND_LIB_SUBMINOR >= 14))
dev = QLatin1String(m_device); dev = QLatin1String(m_device);
#else #else
int idx = 0; int idx = 0;

View File

@@ -182,6 +182,8 @@ bool QAndroidCameraSession::open()
connect(m_camera, SIGNAL(pictureCaptured(QByteArray)), this, SLOT(onCameraPictureCaptured(QByteArray))); connect(m_camera, SIGNAL(pictureCaptured(QByteArray)), this, SLOT(onCameraPictureCaptured(QByteArray)));
connect(m_camera, SIGNAL(previewStarted()), this, SLOT(onCameraPreviewStarted())); connect(m_camera, SIGNAL(previewStarted()), this, SLOT(onCameraPreviewStarted()));
connect(m_camera, SIGNAL(previewStopped()), this, SLOT(onCameraPreviewStopped())); connect(m_camera, SIGNAL(previewStopped()), this, SLOT(onCameraPreviewStopped()));
connect(m_camera, &AndroidCamera::previewFailedToStart, this, &QAndroidCameraSession::onCameraPreviewFailedToStart);
connect(m_camera, &AndroidCamera::takePictureFailed, this, &QAndroidCameraSession::onCameraTakePictureFailed);
m_nativeOrientation = m_camera->getNativeOrientation(); m_nativeOrientation = m_camera->getNativeOrientation();
@@ -546,6 +548,12 @@ void QAndroidCameraSession::cancelCapture()
m_captureCanceled = true; m_captureCanceled = true;
} }
void QAndroidCameraSession::onCameraTakePictureFailed()
{
emit imageCaptureError(m_currentImageCaptureId, QCameraImageCapture::ResourceError,
tr("Failed to capture image"));
}
void QAndroidCameraSession::onCameraPictureExposed() void QAndroidCameraSession::onCameraPictureExposed()
{ {
if (m_captureCanceled) if (m_captureCanceled)
@@ -617,6 +625,27 @@ void QAndroidCameraSession::onCameraPreviewStarted()
setReadyForCapture(true); setReadyForCapture(true);
} }
void QAndroidCameraSession::onCameraPreviewFailedToStart()
{
if (m_status == QCamera::StartingStatus) {
Q_EMIT error(QCamera::CameraError, tr("Camera preview failed to start."));
AndroidMultimediaUtils::enableOrientationListener(false);
m_camera->setPreviewSize(QSize());
m_camera->setPreviewTexture(0);
if (m_videoOutput) {
m_videoOutput->stop();
m_videoOutput->reset();
}
m_previewStarted = false;
m_status = QCamera::LoadedStatus;
emit statusChanged(m_status);
setReadyForCapture(false);
}
}
void QAndroidCameraSession::onCameraPreviewStopped() void QAndroidCameraSession::onCameraPreviewStopped()
{ {
if (m_status == QCamera::StoppingStatus) { if (m_status == QCamera::StoppingStatus) {

View File

@@ -121,11 +121,13 @@ private Q_SLOTS:
void onApplicationStateChanged(Qt::ApplicationState state); void onApplicationStateChanged(Qt::ApplicationState state);
void onCameraTakePictureFailed();
void onCameraPictureExposed(); void onCameraPictureExposed();
void onCameraPictureCaptured(const QByteArray &data); void onCameraPictureCaptured(const QByteArray &data);
void onLastPreviewFrameFetched(const QVideoFrame &frame); void onLastPreviewFrameFetched(const QVideoFrame &frame);
void onNewPreviewFrame(const QVideoFrame &frame); void onNewPreviewFrame(const QVideoFrame &frame);
void onCameraPreviewStarted(); void onCameraPreviewStarted();
void onCameraPreviewFailedToStart();
void onCameraPreviewStopped(); void onCameraPreviewStopped();
private: private:

View File

@@ -244,12 +244,15 @@ public:
Q_SIGNALS: Q_SIGNALS:
void previewSizeChanged(); void previewSizeChanged();
void previewStarted(); void previewStarted();
void previewFailedToStart();
void previewStopped(); void previewStopped();
void autoFocusStarted(); void autoFocusStarted();
void whiteBalanceChanged(); void whiteBalanceChanged();
void takePictureFailed();
void lastPreviewFrameFetched(const QVideoFrame &frame); void lastPreviewFrameFetched(const QVideoFrame &frame);
}; };
@@ -266,9 +269,11 @@ AndroidCamera::AndroidCamera(AndroidCameraPrivate *d, QThread *worker)
connect(d, &AndroidCameraPrivate::previewSizeChanged, this, &AndroidCamera::previewSizeChanged); connect(d, &AndroidCameraPrivate::previewSizeChanged, this, &AndroidCamera::previewSizeChanged);
connect(d, &AndroidCameraPrivate::previewStarted, this, &AndroidCamera::previewStarted); connect(d, &AndroidCameraPrivate::previewStarted, this, &AndroidCamera::previewStarted);
connect(d, &AndroidCameraPrivate::previewFailedToStart, this, &AndroidCamera::previewFailedToStart);
connect(d, &AndroidCameraPrivate::previewStopped, this, &AndroidCamera::previewStopped); connect(d, &AndroidCameraPrivate::previewStopped, this, &AndroidCamera::previewStopped);
connect(d, &AndroidCameraPrivate::autoFocusStarted, this, &AndroidCamera::autoFocusStarted); connect(d, &AndroidCameraPrivate::autoFocusStarted, this, &AndroidCamera::autoFocusStarted);
connect(d, &AndroidCameraPrivate::whiteBalanceChanged, this, &AndroidCamera::whiteBalanceChanged); connect(d, &AndroidCameraPrivate::whiteBalanceChanged, this, &AndroidCamera::whiteBalanceChanged);
connect(d, &AndroidCameraPrivate::takePictureFailed, this, &AndroidCamera::takePictureFailed);
connect(d, &AndroidCameraPrivate::lastPreviewFrameFetched, this, &AndroidCamera::lastPreviewFrameFetched); connect(d, &AndroidCameraPrivate::lastPreviewFrameFetched, this, &AndroidCamera::lastPreviewFrameFetched);
} }
@@ -1131,15 +1136,21 @@ void AndroidCameraPrivate::setFocusAreas(const QList<QRect> &areas)
void AndroidCameraPrivate::autoFocus() void AndroidCameraPrivate::autoFocus()
{ {
QJNIEnvironmentPrivate env;
m_camera.callMethod<void>("autoFocus", m_camera.callMethod<void>("autoFocus",
"(Landroid/hardware/Camera$AutoFocusCallback;)V", "(Landroid/hardware/Camera$AutoFocusCallback;)V",
m_cameraListener.object()); m_cameraListener.object());
emit autoFocusStarted();
if (!exceptionCheckAndClear(env))
emit autoFocusStarted();
} }
void AndroidCameraPrivate::cancelAutoFocus() void AndroidCameraPrivate::cancelAutoFocus()
{ {
QJNIEnvironmentPrivate env;
m_camera.callMethod<void>("cancelAutoFocus"); m_camera.callMethod<void>("cancelAutoFocus");
exceptionCheckAndClear(env);
} }
bool AndroidCameraPrivate::isAutoExposureLockSupported() bool AndroidCameraPrivate::isAutoExposureLockSupported()
@@ -1388,25 +1399,40 @@ void AndroidCameraPrivate::setJpegQuality(int quality)
void AndroidCameraPrivate::startPreview() void AndroidCameraPrivate::startPreview()
{ {
QJNIEnvironmentPrivate env;
setupPreviewFrameCallback(); setupPreviewFrameCallback();
m_camera.callMethod<void>("startPreview"); m_camera.callMethod<void>("startPreview");
emit previewStarted();
if (exceptionCheckAndClear(env))
emit previewFailedToStart();
else
emit previewStarted();
} }
void AndroidCameraPrivate::stopPreview() void AndroidCameraPrivate::stopPreview()
{ {
QJNIEnvironmentPrivate env;
m_camera.callMethod<void>("stopPreview"); m_camera.callMethod<void>("stopPreview");
exceptionCheckAndClear(env);
emit previewStopped(); emit previewStopped();
} }
void AndroidCameraPrivate::takePicture() void AndroidCameraPrivate::takePicture()
{ {
QJNIEnvironmentPrivate env;
m_camera.callMethod<void>("takePicture", "(Landroid/hardware/Camera$ShutterCallback;" m_camera.callMethod<void>("takePicture", "(Landroid/hardware/Camera$ShutterCallback;"
"Landroid/hardware/Camera$PictureCallback;" "Landroid/hardware/Camera$PictureCallback;"
"Landroid/hardware/Camera$PictureCallback;)V", "Landroid/hardware/Camera$PictureCallback;)V",
m_cameraListener.object(), m_cameraListener.object(),
jobject(0), jobject(0),
m_cameraListener.object()); m_cameraListener.object());
if (exceptionCheckAndClear(env))
emit takePictureFailed();
} }
void AndroidCameraPrivate::setupPreviewFrameCallback() void AndroidCameraPrivate::setupPreviewFrameCallback()

View File

@@ -172,6 +172,7 @@ public:
Q_SIGNALS: Q_SIGNALS:
void previewSizeChanged(); void previewSizeChanged();
void previewStarted(); void previewStarted();
void previewFailedToStart();
void previewStopped(); void previewStopped();
void autoFocusStarted(); void autoFocusStarted();
@@ -179,6 +180,7 @@ Q_SIGNALS:
void whiteBalanceChanged(); void whiteBalanceChanged();
void takePictureFailed();
void pictureExposed(); void pictureExposed();
void pictureCaptured(const QByteArray &data); void pictureCaptured(const QByteArray &data);
void lastPreviewFrameFetched(const QVideoFrame &frame); void lastPreviewFrameFetched(const QVideoFrame &frame);

View File

@@ -105,8 +105,8 @@ Q_SIGNALS:
private: private:
static void updateCameraDevices(); static void updateCameraDevices();
void attachVideoInputDevice(); void attachVideoInputDevice();
void applyImageEncoderSettings(); bool applyImageEncoderSettings();
void applyViewfinderSettings(); bool applyViewfinderSettings();
static int m_defaultCameraIndex; static int m_defaultCameraIndex;
static QList<AVFCameraInfo> m_cameraDevices; static QList<AVFCameraInfo> m_cameraDevices;

View File

@@ -285,10 +285,23 @@ void AVFCameraSession::setState(QCamera::State newState)
Q_EMIT readyToConfigureConnections(); Q_EMIT readyToConfigureConnections();
m_defaultCodec = 0; m_defaultCodec = 0;
defaultCodec(); defaultCodec();
applyImageEncoderSettings();
applyViewfinderSettings(); bool activeFormatSet = applyImageEncoderSettings();
activeFormatSet |= applyViewfinderSettings();
[m_captureSession commitConfiguration]; [m_captureSession commitConfiguration];
if (activeFormatSet) {
// According to the doc, the capture device must be locked before
// startRunning to prevent the format we set to be overriden by the
// session preset.
[videoCaptureDevice() lockForConfiguration:nil];
}
[m_captureSession startRunning]; [m_captureSession startRunning];
if (activeFormatSet)
[videoCaptureDevice() unlockForConfiguration];
} }
if (oldState == QCamera::ActiveState) { if (oldState == QCamera::ActiveState) {
@@ -357,27 +370,32 @@ void AVFCameraSession::attachVideoInputDevice()
} }
} }
void AVFCameraSession::applyImageEncoderSettings() bool AVFCameraSession::applyImageEncoderSettings()
{ {
if (AVFImageEncoderControl *control = m_service->imageEncoderControl()) if (AVFImageEncoderControl *control = m_service->imageEncoderControl())
control->applySettings(); return control->applySettings();
return false;
} }
void AVFCameraSession::applyViewfinderSettings() bool AVFCameraSession::applyViewfinderSettings()
{ {
if (AVFCameraViewfinderSettingsControl2 *vfControl = m_service->viewfinderSettingsControl2()) { if (AVFCameraViewfinderSettingsControl2 *vfControl = m_service->viewfinderSettingsControl2()) {
QCameraViewfinderSettings vfSettings(vfControl->requestedSettings()); QCameraViewfinderSettings vfSettings(vfControl->requestedSettings());
// Viewfinder and image capture solutions must be the same, if an image capture
// resolution is set, it takes precedence over the viewfinder resolution.
if (AVFImageEncoderControl *imControl = m_service->imageEncoderControl()) { if (AVFImageEncoderControl *imControl = m_service->imageEncoderControl()) {
const QSize imageResolution(imControl->imageSettings().resolution()); const QSize imageResolution(imControl->requestedSettings().resolution());
if (!imageResolution.isNull() && imageResolution.isValid()) { if (!imageResolution.isNull() && imageResolution.isValid()) {
vfSettings.setResolution(imageResolution); vfSettings.setResolution(imageResolution);
vfControl->setViewfinderSettings(vfSettings); vfControl->setViewfinderSettings(vfSettings);
return;
} }
} }
vfControl->applySettings(); return vfControl->applySettings();
} }
return false;
} }
void AVFCameraSession::addProbe(AVFMediaVideoProbeControl *probe) void AVFCameraSession::addProbe(AVFMediaVideoProbeControl *probe)

View File

@@ -76,17 +76,13 @@ private:
AVCaptureDeviceFormat *findBestFormatMatch(const QCameraViewfinderSettings &settings) const; AVCaptureDeviceFormat *findBestFormatMatch(const QCameraViewfinderSettings &settings) const;
QVector<QVideoFrame::PixelFormat> viewfinderPixelFormats() const; QVector<QVideoFrame::PixelFormat> viewfinderPixelFormats() const;
bool convertPixelFormatIfSupported(QVideoFrame::PixelFormat format, unsigned &avfFormat) const; bool convertPixelFormatIfSupported(QVideoFrame::PixelFormat format, unsigned &avfFormat) const;
void applySettings(); bool applySettings();
QCameraViewfinderSettings requestedSettings() const; QCameraViewfinderSettings requestedSettings() const;
// Aux. function to extract things like captureDevice, videoOutput, etc.
bool updateAVFoundationObjects() const; AVCaptureConnection *videoConnection() const;
AVFCameraService *m_service; AVFCameraService *m_service;
mutable AVFCameraSession *m_session;
QCameraViewfinderSettings m_settings; QCameraViewfinderSettings m_settings;
mutable AVCaptureDevice *m_captureDevice;
mutable AVCaptureVideoDataOutput *m_videoOutput;
mutable AVCaptureConnection *m_videoConnection;
}; };
class AVFCameraViewfinderSettingsControl : public QCameraViewfinderSettingsControl class AVFCameraViewfinderSettingsControl : public QCameraViewfinderSettingsControl

View File

@@ -206,7 +206,6 @@ void qt_set_framerate_limits(AVCaptureDevice *captureDevice,
AVFPSRange qt_current_framerates(AVCaptureDevice *captureDevice, AVCaptureConnection *videoConnection) AVFPSRange qt_current_framerates(AVCaptureDevice *captureDevice, AVCaptureConnection *videoConnection)
{ {
Q_ASSERT(captureDevice); Q_ASSERT(captureDevice);
Q_ASSERT(videoConnection);
AVFPSRange fps; AVFPSRange fps;
#if QT_MAC_PLATFORM_SDK_EQUAL_OR_ABOVE(__MAC_10_7, __IPHONE_7_0) #if QT_MAC_PLATFORM_SDK_EQUAL_OR_ABOVE(__MAC_10_7, __IPHONE_7_0)
@@ -234,7 +233,8 @@ AVFPSRange qt_current_framerates(AVCaptureDevice *captureDevice, AVCaptureConnec
#else // OSX < 10.7 or iOS < 7.0 #else // OSX < 10.7 or iOS < 7.0
{ {
#endif // QT_MAC_PLATFORM_SDK_EQUAL_OR_ABOVE(__MAC_10_7, __IPHONE_7_0) #endif // QT_MAC_PLATFORM_SDK_EQUAL_OR_ABOVE(__MAC_10_7, __IPHONE_7_0)
fps = qt_connection_framerates(videoConnection); if (videoConnection)
fps = qt_connection_framerates(videoConnection);
} }
return fps; return fps;
@@ -244,24 +244,20 @@ void qt_set_framerate_limits(AVCaptureDevice *captureDevice, AVCaptureConnection
const QCameraViewfinderSettings &settings) const QCameraViewfinderSettings &settings)
{ {
Q_ASSERT(captureDevice); Q_ASSERT(captureDevice);
Q_ASSERT(videoConnection);
#if QT_MAC_PLATFORM_SDK_EQUAL_OR_ABOVE(__MAC_10_7, __IPHONE_7_0) #if QT_MAC_PLATFORM_SDK_EQUAL_OR_ABOVE(__MAC_10_7, __IPHONE_7_0)
if (QSysInfo::MacintoshVersion >= qt_OS_limit(QSysInfo::MV_10_9, QSysInfo::MV_IOS_7_0)) if (QSysInfo::MacintoshVersion >= qt_OS_limit(QSysInfo::MV_10_9, QSysInfo::MV_IOS_7_0))
qt_set_framerate_limits(captureDevice, settings); qt_set_framerate_limits(captureDevice, settings);
else else
qt_set_framerate_limits(videoConnection, settings);
#else
qt_set_framerate_limits(videoConnection, settings);
#endif #endif
if (videoConnection)
qt_set_framerate_limits(videoConnection, settings);
} }
} // Unnamed namespace. } // Unnamed namespace.
AVFCameraViewfinderSettingsControl2::AVFCameraViewfinderSettingsControl2(AVFCameraService *service) AVFCameraViewfinderSettingsControl2::AVFCameraViewfinderSettingsControl2(AVFCameraService *service)
: m_service(service), : m_service(service)
m_captureDevice(0),
m_videoOutput(0),
m_videoConnection(0)
{ {
Q_ASSERT(service); Q_ASSERT(service);
} }
@@ -270,8 +266,9 @@ QList<QCameraViewfinderSettings> AVFCameraViewfinderSettingsControl2::supportedV
{ {
QList<QCameraViewfinderSettings> supportedSettings; QList<QCameraViewfinderSettings> supportedSettings;
if (!updateAVFoundationObjects()) { AVCaptureDevice *captureDevice = m_service->session()->videoCaptureDevice();
qDebugCamera() << Q_FUNC_INFO << "no capture device or video output found"; if (!captureDevice) {
qDebugCamera() << Q_FUNC_INFO << "no capture device found";
return supportedSettings; return supportedSettings;
} }
@@ -281,15 +278,16 @@ QList<QCameraViewfinderSettings> AVFCameraViewfinderSettingsControl2::supportedV
if (!pixelFormats.size()) if (!pixelFormats.size())
pixelFormats << QVideoFrame::Format_Invalid; // The default value. pixelFormats << QVideoFrame::Format_Invalid; // The default value.
#if QT_MAC_PLATFORM_SDK_EQUAL_OR_ABOVE(__MAC_10_7, __IPHONE_7_0) #if QT_MAC_PLATFORM_SDK_EQUAL_OR_ABOVE(__MAC_10_7, __IPHONE_7_0)
if (QSysInfo::MacintoshVersion >= qt_OS_limit(QSysInfo::MV_10_7, QSysInfo::MV_IOS_7_0)) { if (QSysInfo::MacintoshVersion >= qt_OS_limit(QSysInfo::MV_10_7, QSysInfo::MV_IOS_7_0)) {
if (!m_captureDevice.formats || !m_captureDevice.formats.count) { if (!captureDevice.formats || !captureDevice.formats.count) {
qDebugCamera() << Q_FUNC_INFO << "no capture device formats found"; qDebugCamera() << Q_FUNC_INFO << "no capture device formats found";
return supportedSettings; return supportedSettings;
} }
const QVector<AVCaptureDeviceFormat *> formats(qt_unique_device_formats(m_captureDevice, const QVector<AVCaptureDeviceFormat *> formats(qt_unique_device_formats(captureDevice,
m_session->defaultCodec())); m_service->session()->defaultCodec()));
for (int i = 0; i < formats.size(); ++i) { for (int i = 0; i < formats.size(); ++i) {
AVCaptureDeviceFormat *format = formats[i]; AVCaptureDeviceFormat *format = formats[i];
@@ -320,15 +318,18 @@ QList<QCameraViewfinderSettings> AVFCameraViewfinderSettingsControl2::supportedV
#else #else
{ {
#endif #endif
// TODO: resolution and PAR. AVCaptureConnection *connection = videoConnection();
framerates << qt_connection_framerates(m_videoConnection); if (connection) {
for (int i = 0; i < pixelFormats.size(); ++i) { // TODO: resolution and PAR.
for (int j = 0; j < framerates.size(); ++j) { framerates << qt_connection_framerates(connection);
QCameraViewfinderSettings newSet; for (int i = 0; i < pixelFormats.size(); ++i) {
newSet.setPixelFormat(pixelFormats[i]); for (int j = 0; j < framerates.size(); ++j) {
newSet.setMinimumFrameRate(framerates[j].first); QCameraViewfinderSettings newSet;
newSet.setMaximumFrameRate(framerates[j].second); newSet.setPixelFormat(pixelFormats[i]);
supportedSettings << newSet; newSet.setMinimumFrameRate(framerates[j].first);
newSet.setMaximumFrameRate(framerates[j].second);
supportedSettings << newSet;
}
} }
} }
} }
@@ -340,20 +341,21 @@ QCameraViewfinderSettings AVFCameraViewfinderSettingsControl2::viewfinderSetting
{ {
QCameraViewfinderSettings settings; QCameraViewfinderSettings settings;
if (!updateAVFoundationObjects()) { AVCaptureDevice *captureDevice = m_service->session()->videoCaptureDevice();
qDebugCamera() << Q_FUNC_INFO << "no capture device or video output found"; if (!captureDevice) {
qDebugCamera() << Q_FUNC_INFO << "no capture device found";
return settings; return settings;
} }
#if QT_MAC_PLATFORM_SDK_EQUAL_OR_ABOVE(__MAC_10_7, __IPHONE_7_0) #if QT_MAC_PLATFORM_SDK_EQUAL_OR_ABOVE(__MAC_10_7, __IPHONE_7_0)
if (QSysInfo::MacintoshVersion >= qt_OS_limit(QSysInfo::MV_10_7, QSysInfo::MV_IOS_7_0)) { if (QSysInfo::MacintoshVersion >= qt_OS_limit(QSysInfo::MV_10_7, QSysInfo::MV_IOS_7_0)) {
if (!m_captureDevice.activeFormat) { if (!captureDevice.activeFormat) {
qDebugCamera() << Q_FUNC_INFO << "no active capture device format"; qDebugCamera() << Q_FUNC_INFO << "no active capture device format";
return settings; return settings;
} }
const QSize res(qt_device_format_resolution(m_captureDevice.activeFormat)); const QSize res(qt_device_format_resolution(captureDevice.activeFormat));
const QSize par(qt_device_format_pixel_aspect_ratio(m_captureDevice.activeFormat)); const QSize par(qt_device_format_pixel_aspect_ratio(captureDevice.activeFormat));
if (res.isNull() || !res.isValid() || par.isNull() || !par.isValid()) { if (res.isNull() || !res.isValid() || par.isNull() || !par.isValid()) {
qDebugCamera() << Q_FUNC_INFO << "failed to obtain resolution/pixel aspect ratio"; qDebugCamera() << Q_FUNC_INFO << "failed to obtain resolution/pixel aspect ratio";
return settings; return settings;
@@ -364,12 +366,14 @@ QCameraViewfinderSettings AVFCameraViewfinderSettingsControl2::viewfinderSetting
} }
#endif #endif
// TODO: resolution and PAR before 7.0. // TODO: resolution and PAR before 7.0.
const AVFPSRange fps = qt_current_framerates(m_captureDevice, m_videoConnection); const AVFPSRange fps = qt_current_framerates(captureDevice, videoConnection());
settings.setMinimumFrameRate(fps.first); settings.setMinimumFrameRate(fps.first);
settings.setMaximumFrameRate(fps.second); settings.setMaximumFrameRate(fps.second);
if (NSObject *obj = [m_videoOutput.videoSettings objectForKey:(id)kCVPixelBufferPixelFormatTypeKey]) { AVCaptureVideoDataOutput *videoOutput = m_service->videoOutput() ? m_service->videoOutput()->videoDataOutput() : 0;
if ([obj isKindOfClass:[NSNumber class]]) { if (videoOutput) {
NSObject *obj = [videoOutput.videoSettings objectForKey:(id)kCVPixelBufferPixelFormatTypeKey];
if (obj && [obj isKindOfClass:[NSNumber class]]) {
NSNumber *nsNum = static_cast<NSNumber *>(obj); NSNumber *nsNum = static_cast<NSNumber *>(obj);
settings.setPixelFormat(QtPixelFormatFromCVFormat([nsNum unsignedIntValue])); settings.setPixelFormat(QtPixelFormatFromCVFormat([nsNum unsignedIntValue]));
} }
@@ -380,11 +384,6 @@ QCameraViewfinderSettings AVFCameraViewfinderSettingsControl2::viewfinderSetting
void AVFCameraViewfinderSettingsControl2::setViewfinderSettings(const QCameraViewfinderSettings &settings) void AVFCameraViewfinderSettingsControl2::setViewfinderSettings(const QCameraViewfinderSettings &settings)
{ {
if (settings.isNull()) {
qDebugCamera() << Q_FUNC_INFO << "empty viewfinder settings";
return;
}
if (m_settings == settings) if (m_settings == settings)
return; return;
@@ -449,17 +448,19 @@ bool AVFCameraViewfinderSettingsControl2::CVPixelFormatFromQtFormat(QVideoFrame:
AVCaptureDeviceFormat *AVFCameraViewfinderSettingsControl2::findBestFormatMatch(const QCameraViewfinderSettings &settings) const AVCaptureDeviceFormat *AVFCameraViewfinderSettingsControl2::findBestFormatMatch(const QCameraViewfinderSettings &settings) const
{ {
AVCaptureDevice *captureDevice = m_service->session()->videoCaptureDevice();
if (!captureDevice || settings.isNull())
return nil;
#if QT_MAC_PLATFORM_SDK_EQUAL_OR_ABOVE(__MAC_10_7, __IPHONE_7_0) #if QT_MAC_PLATFORM_SDK_EQUAL_OR_ABOVE(__MAC_10_7, __IPHONE_7_0)
if (QSysInfo::MacintoshVersion >= qt_OS_limit(QSysInfo::MV_10_7, QSysInfo::MV_IOS_7_0)) { if (QSysInfo::MacintoshVersion >= qt_OS_limit(QSysInfo::MV_10_7, QSysInfo::MV_IOS_7_0)) {
Q_ASSERT(m_captureDevice);
Q_ASSERT(m_session);
const QSize &resolution = settings.resolution(); const QSize &resolution = settings.resolution();
if (!resolution.isNull() && resolution.isValid()) { if (!resolution.isNull() && resolution.isValid()) {
// Either the exact match (including high resolution for images on iOS) // Either the exact match (including high resolution for images on iOS)
// or a format with a resolution close to the requested one. // or a format with a resolution close to the requested one.
return qt_find_best_resolution_match(m_captureDevice, resolution, return qt_find_best_resolution_match(captureDevice, resolution,
m_session->defaultCodec()); m_service->session()->defaultCodec());
} }
// No resolution requested, what about framerates? // No resolution requested, what about framerates?
@@ -472,22 +473,28 @@ AVCaptureDeviceFormat *AVFCameraViewfinderSettingsControl2::findBestFormatMatch(
const qreal minFPS(settings.minimumFrameRate()); const qreal minFPS(settings.minimumFrameRate());
const qreal maxFPS(settings.maximumFrameRate()); const qreal maxFPS(settings.maximumFrameRate());
if (minFPS || maxFPS) if (minFPS || maxFPS)
return qt_find_best_framerate_match(m_captureDevice, maxFPS ? maxFPS : minFPS, return qt_find_best_framerate_match(captureDevice, maxFPS ? maxFPS : minFPS,
m_session->defaultCodec()); m_service->session()->defaultCodec());
// Ignore PAR for the moment (PAR without resolution can // Ignore PAR for the moment (PAR without resolution can
// pick a format with really bad resolution). // pick a format with really bad resolution).
// No need to test pixel format, just return settings. // No need to test pixel format, just return settings.
} }
#endif #endif
return nil; return nil;
} }
QVector<QVideoFrame::PixelFormat> AVFCameraViewfinderSettingsControl2::viewfinderPixelFormats() const QVector<QVideoFrame::PixelFormat> AVFCameraViewfinderSettingsControl2::viewfinderPixelFormats() const
{ {
Q_ASSERT(m_videoOutput);
QVector<QVideoFrame::PixelFormat> qtFormats; QVector<QVideoFrame::PixelFormat> qtFormats;
NSArray *pixelFormats = [m_videoOutput availableVideoCVPixelFormatTypes];
AVCaptureVideoDataOutput *videoOutput = m_service->videoOutput() ? m_service->videoOutput()->videoDataOutput() : 0;
if (!videoOutput) {
qDebugCamera() << Q_FUNC_INFO << "no video output found";
return qtFormats;
}
NSArray *pixelFormats = [videoOutput availableVideoCVPixelFormatTypes];
for (NSObject *obj in pixelFormats) { for (NSObject *obj in pixelFormats) {
if (![obj isKindOfClass:[NSNumber class]]) if (![obj isKindOfClass:[NSNumber class]])
@@ -508,17 +515,19 @@ QVector<QVideoFrame::PixelFormat> AVFCameraViewfinderSettingsControl2::viewfinde
bool AVFCameraViewfinderSettingsControl2::convertPixelFormatIfSupported(QVideoFrame::PixelFormat qtFormat, bool AVFCameraViewfinderSettingsControl2::convertPixelFormatIfSupported(QVideoFrame::PixelFormat qtFormat,
unsigned &avfFormat)const unsigned &avfFormat)const
{ {
Q_ASSERT(m_videoOutput); AVCaptureVideoDataOutput *videoOutput = m_service->videoOutput() ? m_service->videoOutput()->videoDataOutput() : 0;
if (!videoOutput)
return false;
unsigned conv = 0; unsigned conv = 0;
if (!CVPixelFormatFromQtFormat(qtFormat, conv)) if (!CVPixelFormatFromQtFormat(qtFormat, conv))
return false; return false;
NSArray *formats = [m_videoOutput availableVideoCVPixelFormatTypes]; NSArray *formats = [videoOutput availableVideoCVPixelFormatTypes];
if (!formats || !formats.count) if (!formats || !formats.count)
return false; return false;
if (m_service->videoOutput() && m_service->videoOutput()->surface()) { if (m_service->videoOutput()->surface()) {
const QAbstractVideoSurface *surface = m_service->videoOutput()->surface(); const QAbstractVideoSurface *surface = m_service->videoOutput()->surface();
if (!surface->supportedPixelFormats().contains(qtFormat)) if (!surface->supportedPixelFormats().contains(qtFormat))
return false; return false;
@@ -539,31 +548,30 @@ bool AVFCameraViewfinderSettingsControl2::convertPixelFormatIfSupported(QVideoFr
return found; return found;
} }
void AVFCameraViewfinderSettingsControl2::applySettings() bool AVFCameraViewfinderSettingsControl2::applySettings()
{ {
if (m_settings.isNull()) if (m_service->session()->state() != QCamera::LoadedState &&
return; m_service->session()->state() != QCamera::ActiveState) {
return false;
if (!updateAVFoundationObjects())
return;
if (m_session->state() != QCamera::LoadedState &&
m_session->state() != QCamera::ActiveState) {
return;
} }
NSMutableDictionary *videoSettings = [NSMutableDictionary dictionaryWithCapacity:1]; AVCaptureDevice *captureDevice = m_service->session()->videoCaptureDevice();
if (!captureDevice)
return false;
bool activeFormatChanged = false;
#if QT_MAC_PLATFORM_SDK_EQUAL_OR_ABOVE(__MAC_10_7, __IPHONE_7_0) #if QT_MAC_PLATFORM_SDK_EQUAL_OR_ABOVE(__MAC_10_7, __IPHONE_7_0)
AVCaptureDeviceFormat *match = findBestFormatMatch(m_settings); AVCaptureDeviceFormat *match = findBestFormatMatch(m_settings);
if (match) { if (match) {
if (match != m_captureDevice.activeFormat) { if (match != captureDevice.activeFormat) {
const AVFConfigurationLock lock(m_captureDevice); const AVFConfigurationLock lock(captureDevice);
if (!lock) { if (lock) {
captureDevice.activeFormat = match;
activeFormatChanged = true;
} else {
qDebugCamera() << Q_FUNC_INFO << "failed to lock for configuration"; qDebugCamera() << Q_FUNC_INFO << "failed to lock for configuration";
return;
} }
m_captureDevice.activeFormat = match;
} }
} else { } else {
qDebugCamera() << Q_FUNC_INFO << "matching device format not found"; qDebugCamera() << Q_FUNC_INFO << "matching device format not found";
@@ -571,43 +579,48 @@ void AVFCameraViewfinderSettingsControl2::applySettings()
} }
#endif #endif
unsigned avfPixelFormat = 0; AVCaptureVideoDataOutput *videoOutput = m_service->videoOutput() ? m_service->videoOutput()->videoDataOutput() : 0;
if (!convertPixelFormatIfSupported(m_settings.pixelFormat(), avfPixelFormat)) { if (videoOutput) {
// If the the pixel format is not specified or invalid, pick the preferred video surface unsigned avfPixelFormat = 0;
// format, or if no surface is set, the preferred capture device format if (!convertPixelFormatIfSupported(m_settings.pixelFormat(), avfPixelFormat)) {
// If the the pixel format is not specified or invalid, pick the preferred video surface
// format, or if no surface is set, the preferred capture device format
const QVector<QVideoFrame::PixelFormat> deviceFormats = viewfinderPixelFormats(); const QVector<QVideoFrame::PixelFormat> deviceFormats = viewfinderPixelFormats();
QVideoFrame::PixelFormat pickedFormat = deviceFormats.first(); QVideoFrame::PixelFormat pickedFormat = deviceFormats.first();
QAbstractVideoSurface *surface = m_service->videoOutput() ? m_service->videoOutput()->surface() QAbstractVideoSurface *surface = m_service->videoOutput()->surface();
: 0; if (surface) {
if (surface) { if (m_service->videoOutput()->supportsTextures()) {
if (m_service->videoOutput()->supportsTextures()) { pickedFormat = QVideoFrame::Format_ARGB32;
pickedFormat = QVideoFrame::Format_ARGB32; } else {
} else { QList<QVideoFrame::PixelFormat> surfaceFormats = surface->supportedPixelFormats();
QList<QVideoFrame::PixelFormat> surfaceFormats = m_service->videoOutput()->surface()->supportedPixelFormats();
for (int i = 0; i < surfaceFormats.count(); ++i) { for (int i = 0; i < surfaceFormats.count(); ++i) {
const QVideoFrame::PixelFormat surfaceFormat = surfaceFormats.at(i); const QVideoFrame::PixelFormat surfaceFormat = surfaceFormats.at(i);
if (deviceFormats.contains(surfaceFormat)) { if (deviceFormats.contains(surfaceFormat)) {
pickedFormat = surfaceFormat; pickedFormat = surfaceFormat;
break; break;
}
} }
} }
} }
CVPixelFormatFromQtFormat(pickedFormat, avfPixelFormat);
} }
CVPixelFormatFromQtFormat(pickedFormat, avfPixelFormat); if (avfPixelFormat != 0) {
NSMutableDictionary *videoSettings = [NSMutableDictionary dictionaryWithCapacity:1];
[videoSettings setObject:[NSNumber numberWithUnsignedInt:avfPixelFormat]
forKey:(id)kCVPixelBufferPixelFormatTypeKey];
videoOutput.videoSettings = videoSettings;
}
} }
if (avfPixelFormat != 0) { qt_set_framerate_limits(captureDevice, videoConnection(), m_settings);
[videoSettings setObject:[NSNumber numberWithUnsignedInt:avfPixelFormat]
forKey:(id)kCVPixelBufferPixelFormatTypeKey];
m_videoOutput.videoSettings = videoSettings; return activeFormatChanged;
}
qt_set_framerate_limits(m_captureDevice, m_videoConnection, m_settings);
} }
QCameraViewfinderSettings AVFCameraViewfinderSettingsControl2::requestedSettings() const QCameraViewfinderSettings AVFCameraViewfinderSettingsControl2::requestedSettings() const
@@ -615,33 +628,12 @@ QCameraViewfinderSettings AVFCameraViewfinderSettingsControl2::requestedSettings
return m_settings; return m_settings;
} }
bool AVFCameraViewfinderSettingsControl2::updateAVFoundationObjects() const AVCaptureConnection *AVFCameraViewfinderSettingsControl2::videoConnection() const
{ {
m_session = 0;
m_captureDevice = 0;
m_videoOutput = 0;
m_videoConnection = 0;
if (!m_service->session())
return false;
if (!m_service->session()->videoCaptureDevice())
return false;
if (!m_service->videoOutput() || !m_service->videoOutput()->videoDataOutput()) if (!m_service->videoOutput() || !m_service->videoOutput()->videoDataOutput())
return false; return nil;
AVCaptureVideoDataOutput *output = m_service->videoOutput()->videoDataOutput(); return [m_service->videoOutput()->videoDataOutput() connectionWithMediaType:AVMediaTypeVideo];
AVCaptureConnection *connection = [output connectionWithMediaType:AVMediaTypeVideo];
if (!connection)
return false;
m_session = m_service->session();
m_captureDevice = m_session->videoCaptureDevice();
m_videoOutput = output;
m_videoConnection = connection;
return true;
} }
AVFCameraViewfinderSettingsControl::AVFCameraViewfinderSettingsControl(AVFCameraService *service) AVFCameraViewfinderSettingsControl::AVFCameraViewfinderSettingsControl(AVFCameraService *service)

View File

@@ -62,11 +62,13 @@ public:
QImageEncoderSettings imageSettings() const Q_DECL_OVERRIDE; QImageEncoderSettings imageSettings() const Q_DECL_OVERRIDE;
void setImageSettings(const QImageEncoderSettings &settings) Q_DECL_OVERRIDE; void setImageSettings(const QImageEncoderSettings &settings) Q_DECL_OVERRIDE;
QImageEncoderSettings requestedSettings() const;
private: private:
AVFCameraService *m_service; AVFCameraService *m_service;
QImageEncoderSettings m_settings; QImageEncoderSettings m_settings;
void applySettings(); bool applySettings();
bool videoCaptureDeviceIsValid() const; bool videoCaptureDeviceIsValid() const;
}; };

View File

@@ -115,6 +115,11 @@ QList<QSize> AVFImageEncoderControl::supportedResolutions(const QImageEncoderSet
return resolutions; return resolutions;
} }
QImageEncoderSettings AVFImageEncoderControl::requestedSettings() const
{
return m_settings;
}
QImageEncoderSettings AVFImageEncoderControl::imageSettings() const QImageEncoderSettings AVFImageEncoderControl::imageSettings() const
{ {
QImageEncoderSettings settings; QImageEncoderSettings settings;
@@ -163,40 +168,40 @@ QImageEncoderSettings AVFImageEncoderControl::imageSettings() const
void AVFImageEncoderControl::setImageSettings(const QImageEncoderSettings &settings) void AVFImageEncoderControl::setImageSettings(const QImageEncoderSettings &settings)
{ {
if (m_settings == settings || settings.isNull()) if (m_settings == settings)
return; return;
m_settings = settings; m_settings = settings;
applySettings(); applySettings();
} }
void AVFImageEncoderControl::applySettings() bool AVFImageEncoderControl::applySettings()
{ {
if (!videoCaptureDeviceIsValid()) if (!videoCaptureDeviceIsValid())
return; return false;
AVFCameraSession *session = m_service->session(); AVFCameraSession *session = m_service->session();
if (!session || (session->state() != QCamera::ActiveState if (!session || (session->state() != QCamera::ActiveState
&& session->state() != QCamera::LoadedState)) { && session->state() != QCamera::LoadedState)) {
return; return false;
} }
if (!m_service->imageCaptureControl() if (!m_service->imageCaptureControl()
|| !m_service->imageCaptureControl()->stillImageOutput()) { || !m_service->imageCaptureControl()->stillImageOutput()) {
qDebugCamera() << Q_FUNC_INFO << "no still image output"; qDebugCamera() << Q_FUNC_INFO << "no still image output";
return; return false;
} }
if (m_settings.codec().size() if (m_settings.codec().size()
&& m_settings.codec() != QLatin1String("jpeg")) { && m_settings.codec() != QLatin1String("jpeg")) {
qDebugCamera() << Q_FUNC_INFO << "unsupported codec:" << m_settings.codec(); qDebugCamera() << Q_FUNC_INFO << "unsupported codec:" << m_settings.codec();
return; return false;
} }
QSize res(m_settings.resolution()); QSize res(m_settings.resolution());
if (res.isNull()) { if (res.isNull()) {
qDebugCamera() << Q_FUNC_INFO << "invalid resolution:" << res; qDebugCamera() << Q_FUNC_INFO << "invalid resolution:" << res;
return; return false;
} }
if (!res.isValid()) { if (!res.isValid()) {
@@ -204,9 +209,11 @@ void AVFImageEncoderControl::applySettings()
// Here we could choose the best format available, but // Here we could choose the best format available, but
// activeFormat is already equal to 'preset high' by default, // activeFormat is already equal to 'preset high' by default,
// which is good enough, otherwise we can end in some format with low framerates. // which is good enough, otherwise we can end in some format with low framerates.
return; return false;
} }
bool activeFormatChanged = false;
#if QT_MAC_PLATFORM_SDK_EQUAL_OR_ABOVE(__MAC_10_7, __IPHONE_7_0) #if QT_MAC_PLATFORM_SDK_EQUAL_OR_ABOVE(__MAC_10_7, __IPHONE_7_0)
if (QSysInfo::MacintoshVersion >= qt_OS_limit(QSysInfo::MV_10_7, QSysInfo::MV_IOS_7_0)) { if (QSysInfo::MacintoshVersion >= qt_OS_limit(QSysInfo::MV_10_7, QSysInfo::MV_IOS_7_0)) {
AVCaptureDevice *captureDevice = m_service->session()->videoCaptureDevice(); AVCaptureDevice *captureDevice = m_service->session()->videoCaptureDevice();
@@ -215,16 +222,17 @@ void AVFImageEncoderControl::applySettings()
if (!match) { if (!match) {
qDebugCamera() << Q_FUNC_INFO << "unsupported resolution:" << res; qDebugCamera() << Q_FUNC_INFO << "unsupported resolution:" << res;
return; return false;
} }
if (match != captureDevice.activeFormat) { if (match != captureDevice.activeFormat) {
const AVFConfigurationLock lock(captureDevice); const AVFConfigurationLock lock(captureDevice);
if (!lock) { if (!lock) {
qDebugCamera() << Q_FUNC_INFO << "failed to lock for configuration"; qDebugCamera() << Q_FUNC_INFO << "failed to lock for configuration";
return; return false;
} }
captureDevice.activeFormat = match; captureDevice.activeFormat = match;
activeFormatChanged = true;
} }
#if defined(Q_OS_IOS) && QT_IOS_PLATFORM_SDK_EQUAL_OR_ABOVE(__IPHONE_8_0) #if defined(Q_OS_IOS) && QT_IOS_PLATFORM_SDK_EQUAL_OR_ABOVE(__IPHONE_8_0)
@@ -242,6 +250,8 @@ void AVFImageEncoderControl::applySettings()
#endif #endif
// TODO: resolution without capture device format ... // TODO: resolution without capture device format ...
} }
return activeFormatChanged;
} }
bool AVFImageEncoderControl::videoCaptureDeviceIsValid() const bool AVFImageEncoderControl::videoCaptureDeviceIsValid() const

View File

@@ -37,6 +37,7 @@
#include "avfcameraservice.h" #include "avfcameraservice.h"
#include "avfcameracontrol.h" #include "avfcameracontrol.h"
#include "avfaudioinputselectorcontrol.h" #include "avfaudioinputselectorcontrol.h"
#include "avfcamerautility.h"
#include <QtCore/qurl.h> #include <QtCore/qurl.h>
#include <QtCore/qfileinfo.h> #include <QtCore/qfileinfo.h>
@@ -330,6 +331,9 @@ void AVFMediaRecorderControl::setupSessionForCapture()
&& m_cameraControl->captureMode().testFlag(QCamera::CaptureVideo) && m_cameraControl->captureMode().testFlag(QCamera::CaptureVideo)
&& m_session->state() != QCamera::UnloadedState) { && m_session->state() != QCamera::UnloadedState) {
// Lock the video capture device to make sure the active format is not reset
const AVFConfigurationLock lock(m_session->videoCaptureDevice());
// Add audio input // Add audio input
// Allow recording even if something wrong happens with the audio input initialization // Allow recording even if something wrong happens with the audio input initialization
AVCaptureDevice *audioDevice = m_audioInputControl->createCaptureDevice(); AVCaptureDevice *audioDevice = m_audioInputControl->createCaptureDevice();
@@ -359,7 +363,10 @@ void AVFMediaRecorderControl::setupSessionForCapture()
} }
} else if (m_connected } else if (m_connected
&& (!m_cameraControl->captureMode().testFlag(QCamera::CaptureVideo) && (!m_cameraControl->captureMode().testFlag(QCamera::CaptureVideo)
|| m_session->state() != QCamera::ActiveState)) { || m_session->state() == QCamera::UnloadedState)) {
// Lock the video capture device to make sure the active format is not reset
const AVFConfigurationLock lock(m_session->videoCaptureDevice());
[captureSession removeOutput:m_movieOutput]; [captureSession removeOutput:m_movieOutput];

View File

@@ -508,6 +508,9 @@ void MmRendererMediaPlayerControl::play()
return; return;
} }
if (m_mediaStatus == QMediaPlayer::EndOfMedia)
m_position = 0;
setPositionInternal(m_position); setPositionInternal(m_position);
setVolumeInternal(m_muted ? 0 : m_volume); setVolumeInternal(m_muted ? 0 : m_volume);
setPlaybackRateInternal(m_rate); setPlaybackRateInternal(m_rate);