Merge remote-tracking branch 'origin/5.5' into 5.6

Conflicts:
	src/plugins/android/src/wrappers/jni/androidcamera.cpp

Change-Id: Ibb34f710b1dfb9a23b378462f31432581c6c26f4
This commit is contained in:
Liang Qi
2016-01-07 11:13:47 +01:00
16 changed files with 243 additions and 156 deletions

View File

@@ -32,7 +32,7 @@
****************************************************************************/
#include <alsa/asoundlib.h>
#if (!(SND_LIB_MAJOR == 1 && SND_LIB_MINOR == 0 && SND_LIB_SUBMINOR >= 10))
#if (!(SND_LIB_MAJOR == 1 && (SND_LIB_MINOR > 0 || SND_LIB_SUBMINOR >= 10)))
#error "Alsa version found too old, require >= 1.0.10"
#endif

View File

@@ -141,7 +141,7 @@ bool QAlsaAudioDeviceInfo::open()
QList<QByteArray> devices = availableDevices(mode);
if(dev.compare(QLatin1String("default")) == 0) {
#if(SND_LIB_MAJOR == 1 && SND_LIB_MINOR == 0 && SND_LIB_SUBMINOR >= 14)
#if (SND_LIB_MAJOR == 1 && (SND_LIB_MINOR > 0 || SND_LIB_SUBMINOR >= 14))
if (devices.size() > 0)
dev = QLatin1String(devices.first().constData());
else
@@ -150,7 +150,7 @@ bool QAlsaAudioDeviceInfo::open()
dev = QLatin1String("hw:0,0");
#endif
} else {
#if(SND_LIB_MAJOR == 1 && SND_LIB_MINOR == 0 && SND_LIB_SUBMINOR >= 14)
#if (SND_LIB_MAJOR == 1 && (SND_LIB_MINOR > 0 || SND_LIB_SUBMINOR >= 14))
dev = device;
#else
int idx = 0;
@@ -194,7 +194,7 @@ bool QAlsaAudioDeviceInfo::testSettings(const QAudioFormat& format) const
snd_pcm_hw_params_t *params;
QString dev;
#if(SND_LIB_MAJOR == 1 && SND_LIB_MINOR == 0 && SND_LIB_SUBMINOR >= 14)
#if (SND_LIB_MAJOR == 1 && (SND_LIB_MINOR > 0 || SND_LIB_SUBMINOR >= 14))
dev = device;
if (dev.compare(QLatin1String("default")) == 0) {
QList<QByteArray> devices = availableDevices(QAudio::AudioOutput);
@@ -335,7 +335,7 @@ QList<QByteArray> QAlsaAudioDeviceInfo::availableDevices(QAudio::Mode mode)
QList<QByteArray> devices;
QByteArray filter;
#if(SND_LIB_MAJOR == 1 && SND_LIB_MINOR == 0 && SND_LIB_SUBMINOR >= 14)
#if (SND_LIB_MAJOR == 1 && (SND_LIB_MINOR > 0 || SND_LIB_SUBMINOR >= 14))
// Create a list of all current audio devices that support mode
void **hints, **n;
char *name, *descr, *io;

View File

@@ -303,7 +303,7 @@ bool QAlsaAudioInput::open()
QString dev = QString(QLatin1String(m_device.constData()));
QList<QByteArray> devices = QAlsaAudioDeviceInfo::availableDevices(QAudio::AudioInput);
if(dev.compare(QLatin1String("default")) == 0) {
#if(SND_LIB_MAJOR == 1 && SND_LIB_MINOR == 0 && SND_LIB_SUBMINOR >= 14)
#if (SND_LIB_MAJOR == 1 && (SND_LIB_MINOR > 0 || SND_LIB_SUBMINOR >= 14))
if (devices.size() > 0)
dev = QLatin1String(devices.first());
else
@@ -312,7 +312,7 @@ bool QAlsaAudioInput::open()
dev = QLatin1String("hw:0,0");
#endif
} else {
#if(SND_LIB_MAJOR == 1 && SND_LIB_MINOR == 0 && SND_LIB_SUBMINOR >= 14)
#if (SND_LIB_MAJOR == 1 && (SND_LIB_MINOR > 0 || SND_LIB_SUBMINOR >= 14))
dev = QLatin1String(m_device);
#else
int idx = 0;

View File

@@ -306,7 +306,7 @@ bool QAlsaAudioOutput::open()
QString dev = QString(QLatin1String(m_device.constData()));
QList<QByteArray> devices = QAlsaAudioDeviceInfo::availableDevices(QAudio::AudioOutput);
if(dev.compare(QLatin1String("default")) == 0) {
#if(SND_LIB_MAJOR == 1 && SND_LIB_MINOR == 0 && SND_LIB_SUBMINOR >= 14)
#if (SND_LIB_MAJOR == 1 && (SND_LIB_MINOR > 0 || SND_LIB_SUBMINOR >= 14))
if (devices.size() > 0)
dev = QLatin1String(devices.first());
else
@@ -315,7 +315,7 @@ bool QAlsaAudioOutput::open()
dev = QLatin1String("hw:0,0");
#endif
} else {
#if(SND_LIB_MAJOR == 1 && SND_LIB_MINOR == 0 && SND_LIB_SUBMINOR >= 14)
#if (SND_LIB_MAJOR == 1 && (SND_LIB_MINOR > 0 || SND_LIB_SUBMINOR >= 14))
dev = QLatin1String(m_device);
#else
int idx = 0;

View File

@@ -182,6 +182,8 @@ bool QAndroidCameraSession::open()
connect(m_camera, SIGNAL(pictureCaptured(QByteArray)), this, SLOT(onCameraPictureCaptured(QByteArray)));
connect(m_camera, SIGNAL(previewStarted()), this, SLOT(onCameraPreviewStarted()));
connect(m_camera, SIGNAL(previewStopped()), this, SLOT(onCameraPreviewStopped()));
connect(m_camera, &AndroidCamera::previewFailedToStart, this, &QAndroidCameraSession::onCameraPreviewFailedToStart);
connect(m_camera, &AndroidCamera::takePictureFailed, this, &QAndroidCameraSession::onCameraTakePictureFailed);
m_nativeOrientation = m_camera->getNativeOrientation();
@@ -546,6 +548,12 @@ void QAndroidCameraSession::cancelCapture()
m_captureCanceled = true;
}
void QAndroidCameraSession::onCameraTakePictureFailed()
{
emit imageCaptureError(m_currentImageCaptureId, QCameraImageCapture::ResourceError,
tr("Failed to capture image"));
}
void QAndroidCameraSession::onCameraPictureExposed()
{
if (m_captureCanceled)
@@ -617,6 +625,27 @@ void QAndroidCameraSession::onCameraPreviewStarted()
setReadyForCapture(true);
}
void QAndroidCameraSession::onCameraPreviewFailedToStart()
{
if (m_status == QCamera::StartingStatus) {
Q_EMIT error(QCamera::CameraError, tr("Camera preview failed to start."));
AndroidMultimediaUtils::enableOrientationListener(false);
m_camera->setPreviewSize(QSize());
m_camera->setPreviewTexture(0);
if (m_videoOutput) {
m_videoOutput->stop();
m_videoOutput->reset();
}
m_previewStarted = false;
m_status = QCamera::LoadedStatus;
emit statusChanged(m_status);
setReadyForCapture(false);
}
}
void QAndroidCameraSession::onCameraPreviewStopped()
{
if (m_status == QCamera::StoppingStatus) {

View File

@@ -121,11 +121,13 @@ private Q_SLOTS:
void onApplicationStateChanged(Qt::ApplicationState state);
void onCameraTakePictureFailed();
void onCameraPictureExposed();
void onCameraPictureCaptured(const QByteArray &data);
void onLastPreviewFrameFetched(const QVideoFrame &frame);
void onNewPreviewFrame(const QVideoFrame &frame);
void onCameraPreviewStarted();
void onCameraPreviewFailedToStart();
void onCameraPreviewStopped();
private:

View File

@@ -244,12 +244,15 @@ public:
Q_SIGNALS:
void previewSizeChanged();
void previewStarted();
void previewFailedToStart();
void previewStopped();
void autoFocusStarted();
void whiteBalanceChanged();
void takePictureFailed();
void lastPreviewFrameFetched(const QVideoFrame &frame);
};
@@ -266,9 +269,11 @@ AndroidCamera::AndroidCamera(AndroidCameraPrivate *d, QThread *worker)
connect(d, &AndroidCameraPrivate::previewSizeChanged, this, &AndroidCamera::previewSizeChanged);
connect(d, &AndroidCameraPrivate::previewStarted, this, &AndroidCamera::previewStarted);
connect(d, &AndroidCameraPrivate::previewFailedToStart, this, &AndroidCamera::previewFailedToStart);
connect(d, &AndroidCameraPrivate::previewStopped, this, &AndroidCamera::previewStopped);
connect(d, &AndroidCameraPrivate::autoFocusStarted, this, &AndroidCamera::autoFocusStarted);
connect(d, &AndroidCameraPrivate::whiteBalanceChanged, this, &AndroidCamera::whiteBalanceChanged);
connect(d, &AndroidCameraPrivate::takePictureFailed, this, &AndroidCamera::takePictureFailed);
connect(d, &AndroidCameraPrivate::lastPreviewFrameFetched, this, &AndroidCamera::lastPreviewFrameFetched);
}
@@ -1131,15 +1136,21 @@ void AndroidCameraPrivate::setFocusAreas(const QList<QRect> &areas)
void AndroidCameraPrivate::autoFocus()
{
QJNIEnvironmentPrivate env;
m_camera.callMethod<void>("autoFocus",
"(Landroid/hardware/Camera$AutoFocusCallback;)V",
m_cameraListener.object());
if (!exceptionCheckAndClear(env))
emit autoFocusStarted();
}
void AndroidCameraPrivate::cancelAutoFocus()
{
QJNIEnvironmentPrivate env;
m_camera.callMethod<void>("cancelAutoFocus");
exceptionCheckAndClear(env);
}
bool AndroidCameraPrivate::isAutoExposureLockSupported()
@@ -1388,25 +1399,40 @@ void AndroidCameraPrivate::setJpegQuality(int quality)
void AndroidCameraPrivate::startPreview()
{
QJNIEnvironmentPrivate env;
setupPreviewFrameCallback();
m_camera.callMethod<void>("startPreview");
if (exceptionCheckAndClear(env))
emit previewFailedToStart();
else
emit previewStarted();
}
void AndroidCameraPrivate::stopPreview()
{
QJNIEnvironmentPrivate env;
m_camera.callMethod<void>("stopPreview");
exceptionCheckAndClear(env);
emit previewStopped();
}
void AndroidCameraPrivate::takePicture()
{
QJNIEnvironmentPrivate env;
m_camera.callMethod<void>("takePicture", "(Landroid/hardware/Camera$ShutterCallback;"
"Landroid/hardware/Camera$PictureCallback;"
"Landroid/hardware/Camera$PictureCallback;)V",
m_cameraListener.object(),
jobject(0),
m_cameraListener.object());
if (exceptionCheckAndClear(env))
emit takePictureFailed();
}
void AndroidCameraPrivate::setupPreviewFrameCallback()

View File

@@ -172,6 +172,7 @@ public:
Q_SIGNALS:
void previewSizeChanged();
void previewStarted();
void previewFailedToStart();
void previewStopped();
void autoFocusStarted();
@@ -179,6 +180,7 @@ Q_SIGNALS:
void whiteBalanceChanged();
void takePictureFailed();
void pictureExposed();
void pictureCaptured(const QByteArray &data);
void lastPreviewFrameFetched(const QVideoFrame &frame);

View File

@@ -105,8 +105,8 @@ Q_SIGNALS:
private:
static void updateCameraDevices();
void attachVideoInputDevice();
void applyImageEncoderSettings();
void applyViewfinderSettings();
bool applyImageEncoderSettings();
bool applyViewfinderSettings();
static int m_defaultCameraIndex;
static QList<AVFCameraInfo> m_cameraDevices;

View File

@@ -285,10 +285,23 @@ void AVFCameraSession::setState(QCamera::State newState)
Q_EMIT readyToConfigureConnections();
m_defaultCodec = 0;
defaultCodec();
applyImageEncoderSettings();
applyViewfinderSettings();
bool activeFormatSet = applyImageEncoderSettings();
activeFormatSet |= applyViewfinderSettings();
[m_captureSession commitConfiguration];
if (activeFormatSet) {
// According to the doc, the capture device must be locked before
// startRunning to prevent the format we set to be overriden by the
// session preset.
[videoCaptureDevice() lockForConfiguration:nil];
}
[m_captureSession startRunning];
if (activeFormatSet)
[videoCaptureDevice() unlockForConfiguration];
}
if (oldState == QCamera::ActiveState) {
@@ -357,27 +370,32 @@ void AVFCameraSession::attachVideoInputDevice()
}
}
void AVFCameraSession::applyImageEncoderSettings()
bool AVFCameraSession::applyImageEncoderSettings()
{
if (AVFImageEncoderControl *control = m_service->imageEncoderControl())
control->applySettings();
return control->applySettings();
return false;
}
void AVFCameraSession::applyViewfinderSettings()
bool AVFCameraSession::applyViewfinderSettings()
{
if (AVFCameraViewfinderSettingsControl2 *vfControl = m_service->viewfinderSettingsControl2()) {
QCameraViewfinderSettings vfSettings(vfControl->requestedSettings());
// Viewfinder and image capture solutions must be the same, if an image capture
// resolution is set, it takes precedence over the viewfinder resolution.
if (AVFImageEncoderControl *imControl = m_service->imageEncoderControl()) {
const QSize imageResolution(imControl->imageSettings().resolution());
const QSize imageResolution(imControl->requestedSettings().resolution());
if (!imageResolution.isNull() && imageResolution.isValid()) {
vfSettings.setResolution(imageResolution);
vfControl->setViewfinderSettings(vfSettings);
return;
}
}
vfControl->applySettings();
return vfControl->applySettings();
}
return false;
}
void AVFCameraSession::addProbe(AVFMediaVideoProbeControl *probe)

View File

@@ -76,17 +76,13 @@ private:
AVCaptureDeviceFormat *findBestFormatMatch(const QCameraViewfinderSettings &settings) const;
QVector<QVideoFrame::PixelFormat> viewfinderPixelFormats() const;
bool convertPixelFormatIfSupported(QVideoFrame::PixelFormat format, unsigned &avfFormat) const;
void applySettings();
bool applySettings();
QCameraViewfinderSettings requestedSettings() const;
// Aux. function to extract things like captureDevice, videoOutput, etc.
bool updateAVFoundationObjects() const;
AVCaptureConnection *videoConnection() const;
AVFCameraService *m_service;
mutable AVFCameraSession *m_session;
QCameraViewfinderSettings m_settings;
mutable AVCaptureDevice *m_captureDevice;
mutable AVCaptureVideoDataOutput *m_videoOutput;
mutable AVCaptureConnection *m_videoConnection;
};
class AVFCameraViewfinderSettingsControl : public QCameraViewfinderSettingsControl

View File

@@ -206,7 +206,6 @@ void qt_set_framerate_limits(AVCaptureDevice *captureDevice,
AVFPSRange qt_current_framerates(AVCaptureDevice *captureDevice, AVCaptureConnection *videoConnection)
{
Q_ASSERT(captureDevice);
Q_ASSERT(videoConnection);
AVFPSRange fps;
#if QT_MAC_PLATFORM_SDK_EQUAL_OR_ABOVE(__MAC_10_7, __IPHONE_7_0)
@@ -234,6 +233,7 @@ AVFPSRange qt_current_framerates(AVCaptureDevice *captureDevice, AVCaptureConnec
#else // OSX < 10.7 or iOS < 7.0
{
#endif // QT_MAC_PLATFORM_SDK_EQUAL_OR_ABOVE(__MAC_10_7, __IPHONE_7_0)
if (videoConnection)
fps = qt_connection_framerates(videoConnection);
}
@@ -244,24 +244,20 @@ void qt_set_framerate_limits(AVCaptureDevice *captureDevice, AVCaptureConnection
const QCameraViewfinderSettings &settings)
{
Q_ASSERT(captureDevice);
Q_ASSERT(videoConnection);
#if QT_MAC_PLATFORM_SDK_EQUAL_OR_ABOVE(__MAC_10_7, __IPHONE_7_0)
if (QSysInfo::MacintoshVersion >= qt_OS_limit(QSysInfo::MV_10_9, QSysInfo::MV_IOS_7_0))
qt_set_framerate_limits(captureDevice, settings);
else
qt_set_framerate_limits(videoConnection, settings);
#else
qt_set_framerate_limits(videoConnection, settings);
#endif
if (videoConnection)
qt_set_framerate_limits(videoConnection, settings);
}
} // Unnamed namespace.
AVFCameraViewfinderSettingsControl2::AVFCameraViewfinderSettingsControl2(AVFCameraService *service)
: m_service(service),
m_captureDevice(0),
m_videoOutput(0),
m_videoConnection(0)
: m_service(service)
{
Q_ASSERT(service);
}
@@ -270,8 +266,9 @@ QList<QCameraViewfinderSettings> AVFCameraViewfinderSettingsControl2::supportedV
{
QList<QCameraViewfinderSettings> supportedSettings;
if (!updateAVFoundationObjects()) {
qDebugCamera() << Q_FUNC_INFO << "no capture device or video output found";
AVCaptureDevice *captureDevice = m_service->session()->videoCaptureDevice();
if (!captureDevice) {
qDebugCamera() << Q_FUNC_INFO << "no capture device found";
return supportedSettings;
}
@@ -281,15 +278,16 @@ QList<QCameraViewfinderSettings> AVFCameraViewfinderSettingsControl2::supportedV
if (!pixelFormats.size())
pixelFormats << QVideoFrame::Format_Invalid; // The default value.
#if QT_MAC_PLATFORM_SDK_EQUAL_OR_ABOVE(__MAC_10_7, __IPHONE_7_0)
if (QSysInfo::MacintoshVersion >= qt_OS_limit(QSysInfo::MV_10_7, QSysInfo::MV_IOS_7_0)) {
if (!m_captureDevice.formats || !m_captureDevice.formats.count) {
if (!captureDevice.formats || !captureDevice.formats.count) {
qDebugCamera() << Q_FUNC_INFO << "no capture device formats found";
return supportedSettings;
}
const QVector<AVCaptureDeviceFormat *> formats(qt_unique_device_formats(m_captureDevice,
m_session->defaultCodec()));
const QVector<AVCaptureDeviceFormat *> formats(qt_unique_device_formats(captureDevice,
m_service->session()->defaultCodec()));
for (int i = 0; i < formats.size(); ++i) {
AVCaptureDeviceFormat *format = formats[i];
@@ -320,8 +318,10 @@ QList<QCameraViewfinderSettings> AVFCameraViewfinderSettingsControl2::supportedV
#else
{
#endif
AVCaptureConnection *connection = videoConnection();
if (connection) {
// TODO: resolution and PAR.
framerates << qt_connection_framerates(m_videoConnection);
framerates << qt_connection_framerates(connection);
for (int i = 0; i < pixelFormats.size(); ++i) {
for (int j = 0; j < framerates.size(); ++j) {
QCameraViewfinderSettings newSet;
@@ -332,6 +332,7 @@ QList<QCameraViewfinderSettings> AVFCameraViewfinderSettingsControl2::supportedV
}
}
}
}
return supportedSettings;
}
@@ -340,20 +341,21 @@ QCameraViewfinderSettings AVFCameraViewfinderSettingsControl2::viewfinderSetting
{
QCameraViewfinderSettings settings;
if (!updateAVFoundationObjects()) {
qDebugCamera() << Q_FUNC_INFO << "no capture device or video output found";
AVCaptureDevice *captureDevice = m_service->session()->videoCaptureDevice();
if (!captureDevice) {
qDebugCamera() << Q_FUNC_INFO << "no capture device found";
return settings;
}
#if QT_MAC_PLATFORM_SDK_EQUAL_OR_ABOVE(__MAC_10_7, __IPHONE_7_0)
if (QSysInfo::MacintoshVersion >= qt_OS_limit(QSysInfo::MV_10_7, QSysInfo::MV_IOS_7_0)) {
if (!m_captureDevice.activeFormat) {
if (!captureDevice.activeFormat) {
qDebugCamera() << Q_FUNC_INFO << "no active capture device format";
return settings;
}
const QSize res(qt_device_format_resolution(m_captureDevice.activeFormat));
const QSize par(qt_device_format_pixel_aspect_ratio(m_captureDevice.activeFormat));
const QSize res(qt_device_format_resolution(captureDevice.activeFormat));
const QSize par(qt_device_format_pixel_aspect_ratio(captureDevice.activeFormat));
if (res.isNull() || !res.isValid() || par.isNull() || !par.isValid()) {
qDebugCamera() << Q_FUNC_INFO << "failed to obtain resolution/pixel aspect ratio";
return settings;
@@ -364,12 +366,14 @@ QCameraViewfinderSettings AVFCameraViewfinderSettingsControl2::viewfinderSetting
}
#endif
// TODO: resolution and PAR before 7.0.
const AVFPSRange fps = qt_current_framerates(m_captureDevice, m_videoConnection);
const AVFPSRange fps = qt_current_framerates(captureDevice, videoConnection());
settings.setMinimumFrameRate(fps.first);
settings.setMaximumFrameRate(fps.second);
if (NSObject *obj = [m_videoOutput.videoSettings objectForKey:(id)kCVPixelBufferPixelFormatTypeKey]) {
if ([obj isKindOfClass:[NSNumber class]]) {
AVCaptureVideoDataOutput *videoOutput = m_service->videoOutput() ? m_service->videoOutput()->videoDataOutput() : 0;
if (videoOutput) {
NSObject *obj = [videoOutput.videoSettings objectForKey:(id)kCVPixelBufferPixelFormatTypeKey];
if (obj && [obj isKindOfClass:[NSNumber class]]) {
NSNumber *nsNum = static_cast<NSNumber *>(obj);
settings.setPixelFormat(QtPixelFormatFromCVFormat([nsNum unsignedIntValue]));
}
@@ -380,11 +384,6 @@ QCameraViewfinderSettings AVFCameraViewfinderSettingsControl2::viewfinderSetting
void AVFCameraViewfinderSettingsControl2::setViewfinderSettings(const QCameraViewfinderSettings &settings)
{
if (settings.isNull()) {
qDebugCamera() << Q_FUNC_INFO << "empty viewfinder settings";
return;
}
if (m_settings == settings)
return;
@@ -449,17 +448,19 @@ bool AVFCameraViewfinderSettingsControl2::CVPixelFormatFromQtFormat(QVideoFrame:
AVCaptureDeviceFormat *AVFCameraViewfinderSettingsControl2::findBestFormatMatch(const QCameraViewfinderSettings &settings) const
{
AVCaptureDevice *captureDevice = m_service->session()->videoCaptureDevice();
if (!captureDevice || settings.isNull())
return nil;
#if QT_MAC_PLATFORM_SDK_EQUAL_OR_ABOVE(__MAC_10_7, __IPHONE_7_0)
if (QSysInfo::MacintoshVersion >= qt_OS_limit(QSysInfo::MV_10_7, QSysInfo::MV_IOS_7_0)) {
Q_ASSERT(m_captureDevice);
Q_ASSERT(m_session);
const QSize &resolution = settings.resolution();
if (!resolution.isNull() && resolution.isValid()) {
// Either the exact match (including high resolution for images on iOS)
// or a format with a resolution close to the requested one.
return qt_find_best_resolution_match(m_captureDevice, resolution,
m_session->defaultCodec());
return qt_find_best_resolution_match(captureDevice, resolution,
m_service->session()->defaultCodec());
}
// No resolution requested, what about framerates?
@@ -472,22 +473,28 @@ AVCaptureDeviceFormat *AVFCameraViewfinderSettingsControl2::findBestFormatMatch(
const qreal minFPS(settings.minimumFrameRate());
const qreal maxFPS(settings.maximumFrameRate());
if (minFPS || maxFPS)
return qt_find_best_framerate_match(m_captureDevice, maxFPS ? maxFPS : minFPS,
m_session->defaultCodec());
return qt_find_best_framerate_match(captureDevice, maxFPS ? maxFPS : minFPS,
m_service->session()->defaultCodec());
// Ignore PAR for the moment (PAR without resolution can
// pick a format with really bad resolution).
// No need to test pixel format, just return settings.
}
#endif
return nil;
}
QVector<QVideoFrame::PixelFormat> AVFCameraViewfinderSettingsControl2::viewfinderPixelFormats() const
{
Q_ASSERT(m_videoOutput);
QVector<QVideoFrame::PixelFormat> qtFormats;
NSArray *pixelFormats = [m_videoOutput availableVideoCVPixelFormatTypes];
AVCaptureVideoDataOutput *videoOutput = m_service->videoOutput() ? m_service->videoOutput()->videoDataOutput() : 0;
if (!videoOutput) {
qDebugCamera() << Q_FUNC_INFO << "no video output found";
return qtFormats;
}
NSArray *pixelFormats = [videoOutput availableVideoCVPixelFormatTypes];
for (NSObject *obj in pixelFormats) {
if (![obj isKindOfClass:[NSNumber class]])
@@ -508,17 +515,19 @@ QVector<QVideoFrame::PixelFormat> AVFCameraViewfinderSettingsControl2::viewfinde
bool AVFCameraViewfinderSettingsControl2::convertPixelFormatIfSupported(QVideoFrame::PixelFormat qtFormat,
unsigned &avfFormat)const
{
Q_ASSERT(m_videoOutput);
AVCaptureVideoDataOutput *videoOutput = m_service->videoOutput() ? m_service->videoOutput()->videoDataOutput() : 0;
if (!videoOutput)
return false;
unsigned conv = 0;
if (!CVPixelFormatFromQtFormat(qtFormat, conv))
return false;
NSArray *formats = [m_videoOutput availableVideoCVPixelFormatTypes];
NSArray *formats = [videoOutput availableVideoCVPixelFormatTypes];
if (!formats || !formats.count)
return false;
if (m_service->videoOutput() && m_service->videoOutput()->surface()) {
if (m_service->videoOutput()->surface()) {
const QAbstractVideoSurface *surface = m_service->videoOutput()->surface();
if (!surface->supportedPixelFormats().contains(qtFormat))
return false;
@@ -539,31 +548,30 @@ bool AVFCameraViewfinderSettingsControl2::convertPixelFormatIfSupported(QVideoFr
return found;
}
void AVFCameraViewfinderSettingsControl2::applySettings()
bool AVFCameraViewfinderSettingsControl2::applySettings()
{
if (m_settings.isNull())
return;
if (!updateAVFoundationObjects())
return;
if (m_session->state() != QCamera::LoadedState &&
m_session->state() != QCamera::ActiveState) {
return;
if (m_service->session()->state() != QCamera::LoadedState &&
m_service->session()->state() != QCamera::ActiveState) {
return false;
}
NSMutableDictionary *videoSettings = [NSMutableDictionary dictionaryWithCapacity:1];
AVCaptureDevice *captureDevice = m_service->session()->videoCaptureDevice();
if (!captureDevice)
return false;
bool activeFormatChanged = false;
#if QT_MAC_PLATFORM_SDK_EQUAL_OR_ABOVE(__MAC_10_7, __IPHONE_7_0)
AVCaptureDeviceFormat *match = findBestFormatMatch(m_settings);
if (match) {
if (match != m_captureDevice.activeFormat) {
const AVFConfigurationLock lock(m_captureDevice);
if (!lock) {
if (match != captureDevice.activeFormat) {
const AVFConfigurationLock lock(captureDevice);
if (lock) {
captureDevice.activeFormat = match;
activeFormatChanged = true;
} else {
qDebugCamera() << Q_FUNC_INFO << "failed to lock for configuration";
return;
}
m_captureDevice.activeFormat = match;
}
} else {
qDebugCamera() << Q_FUNC_INFO << "matching device format not found";
@@ -571,6 +579,8 @@ void AVFCameraViewfinderSettingsControl2::applySettings()
}
#endif
AVCaptureVideoDataOutput *videoOutput = m_service->videoOutput() ? m_service->videoOutput()->videoDataOutput() : 0;
if (videoOutput) {
unsigned avfPixelFormat = 0;
if (!convertPixelFormatIfSupported(m_settings.pixelFormat(), avfPixelFormat)) {
// If the the pixel format is not specified or invalid, pick the preferred video surface
@@ -579,13 +589,12 @@ void AVFCameraViewfinderSettingsControl2::applySettings()
const QVector<QVideoFrame::PixelFormat> deviceFormats = viewfinderPixelFormats();
QVideoFrame::PixelFormat pickedFormat = deviceFormats.first();
QAbstractVideoSurface *surface = m_service->videoOutput() ? m_service->videoOutput()->surface()
: 0;
QAbstractVideoSurface *surface = m_service->videoOutput()->surface();
if (surface) {
if (m_service->videoOutput()->supportsTextures()) {
pickedFormat = QVideoFrame::Format_ARGB32;
} else {
QList<QVideoFrame::PixelFormat> surfaceFormats = m_service->videoOutput()->surface()->supportedPixelFormats();
QList<QVideoFrame::PixelFormat> surfaceFormats = surface->supportedPixelFormats();
for (int i = 0; i < surfaceFormats.count(); ++i) {
const QVideoFrame::PixelFormat surfaceFormat = surfaceFormats.at(i);
@@ -601,13 +610,17 @@ void AVFCameraViewfinderSettingsControl2::applySettings()
}
if (avfPixelFormat != 0) {
NSMutableDictionary *videoSettings = [NSMutableDictionary dictionaryWithCapacity:1];
[videoSettings setObject:[NSNumber numberWithUnsignedInt:avfPixelFormat]
forKey:(id)kCVPixelBufferPixelFormatTypeKey];
m_videoOutput.videoSettings = videoSettings;
videoOutput.videoSettings = videoSettings;
}
}
qt_set_framerate_limits(m_captureDevice, m_videoConnection, m_settings);
qt_set_framerate_limits(captureDevice, videoConnection(), m_settings);
return activeFormatChanged;
}
QCameraViewfinderSettings AVFCameraViewfinderSettingsControl2::requestedSettings() const
@@ -615,33 +628,12 @@ QCameraViewfinderSettings AVFCameraViewfinderSettingsControl2::requestedSettings
return m_settings;
}
bool AVFCameraViewfinderSettingsControl2::updateAVFoundationObjects() const
AVCaptureConnection *AVFCameraViewfinderSettingsControl2::videoConnection() const
{
m_session = 0;
m_captureDevice = 0;
m_videoOutput = 0;
m_videoConnection = 0;
if (!m_service->session())
return false;
if (!m_service->session()->videoCaptureDevice())
return false;
if (!m_service->videoOutput() || !m_service->videoOutput()->videoDataOutput())
return false;
return nil;
AVCaptureVideoDataOutput *output = m_service->videoOutput()->videoDataOutput();
AVCaptureConnection *connection = [output connectionWithMediaType:AVMediaTypeVideo];
if (!connection)
return false;
m_session = m_service->session();
m_captureDevice = m_session->videoCaptureDevice();
m_videoOutput = output;
m_videoConnection = connection;
return true;
return [m_service->videoOutput()->videoDataOutput() connectionWithMediaType:AVMediaTypeVideo];
}
AVFCameraViewfinderSettingsControl::AVFCameraViewfinderSettingsControl(AVFCameraService *service)

View File

@@ -62,11 +62,13 @@ public:
QImageEncoderSettings imageSettings() const Q_DECL_OVERRIDE;
void setImageSettings(const QImageEncoderSettings &settings) Q_DECL_OVERRIDE;
QImageEncoderSettings requestedSettings() const;
private:
AVFCameraService *m_service;
QImageEncoderSettings m_settings;
void applySettings();
bool applySettings();
bool videoCaptureDeviceIsValid() const;
};

View File

@@ -115,6 +115,11 @@ QList<QSize> AVFImageEncoderControl::supportedResolutions(const QImageEncoderSet
return resolutions;
}
QImageEncoderSettings AVFImageEncoderControl::requestedSettings() const
{
return m_settings;
}
QImageEncoderSettings AVFImageEncoderControl::imageSettings() const
{
QImageEncoderSettings settings;
@@ -163,40 +168,40 @@ QImageEncoderSettings AVFImageEncoderControl::imageSettings() const
void AVFImageEncoderControl::setImageSettings(const QImageEncoderSettings &settings)
{
if (m_settings == settings || settings.isNull())
if (m_settings == settings)
return;
m_settings = settings;
applySettings();
}
void AVFImageEncoderControl::applySettings()
bool AVFImageEncoderControl::applySettings()
{
if (!videoCaptureDeviceIsValid())
return;
return false;
AVFCameraSession *session = m_service->session();
if (!session || (session->state() != QCamera::ActiveState
&& session->state() != QCamera::LoadedState)) {
return;
return false;
}
if (!m_service->imageCaptureControl()
|| !m_service->imageCaptureControl()->stillImageOutput()) {
qDebugCamera() << Q_FUNC_INFO << "no still image output";
return;
return false;
}
if (m_settings.codec().size()
&& m_settings.codec() != QLatin1String("jpeg")) {
qDebugCamera() << Q_FUNC_INFO << "unsupported codec:" << m_settings.codec();
return;
return false;
}
QSize res(m_settings.resolution());
if (res.isNull()) {
qDebugCamera() << Q_FUNC_INFO << "invalid resolution:" << res;
return;
return false;
}
if (!res.isValid()) {
@@ -204,9 +209,11 @@ void AVFImageEncoderControl::applySettings()
// Here we could choose the best format available, but
// activeFormat is already equal to 'preset high' by default,
// which is good enough, otherwise we can end in some format with low framerates.
return;
return false;
}
bool activeFormatChanged = false;
#if QT_MAC_PLATFORM_SDK_EQUAL_OR_ABOVE(__MAC_10_7, __IPHONE_7_0)
if (QSysInfo::MacintoshVersion >= qt_OS_limit(QSysInfo::MV_10_7, QSysInfo::MV_IOS_7_0)) {
AVCaptureDevice *captureDevice = m_service->session()->videoCaptureDevice();
@@ -215,16 +222,17 @@ void AVFImageEncoderControl::applySettings()
if (!match) {
qDebugCamera() << Q_FUNC_INFO << "unsupported resolution:" << res;
return;
return false;
}
if (match != captureDevice.activeFormat) {
const AVFConfigurationLock lock(captureDevice);
if (!lock) {
qDebugCamera() << Q_FUNC_INFO << "failed to lock for configuration";
return;
return false;
}
captureDevice.activeFormat = match;
activeFormatChanged = true;
}
#if defined(Q_OS_IOS) && QT_IOS_PLATFORM_SDK_EQUAL_OR_ABOVE(__IPHONE_8_0)
@@ -242,6 +250,8 @@ void AVFImageEncoderControl::applySettings()
#endif
// TODO: resolution without capture device format ...
}
return activeFormatChanged;
}
bool AVFImageEncoderControl::videoCaptureDeviceIsValid() const

View File

@@ -37,6 +37,7 @@
#include "avfcameraservice.h"
#include "avfcameracontrol.h"
#include "avfaudioinputselectorcontrol.h"
#include "avfcamerautility.h"
#include <QtCore/qurl.h>
#include <QtCore/qfileinfo.h>
@@ -330,6 +331,9 @@ void AVFMediaRecorderControl::setupSessionForCapture()
&& m_cameraControl->captureMode().testFlag(QCamera::CaptureVideo)
&& m_session->state() != QCamera::UnloadedState) {
// Lock the video capture device to make sure the active format is not reset
const AVFConfigurationLock lock(m_session->videoCaptureDevice());
// Add audio input
// Allow recording even if something wrong happens with the audio input initialization
AVCaptureDevice *audioDevice = m_audioInputControl->createCaptureDevice();
@@ -359,7 +363,10 @@ void AVFMediaRecorderControl::setupSessionForCapture()
}
} else if (m_connected
&& (!m_cameraControl->captureMode().testFlag(QCamera::CaptureVideo)
|| m_session->state() != QCamera::ActiveState)) {
|| m_session->state() == QCamera::UnloadedState)) {
// Lock the video capture device to make sure the active format is not reset
const AVFConfigurationLock lock(m_session->videoCaptureDevice());
[captureSession removeOutput:m_movieOutput];

View File

@@ -508,6 +508,9 @@ void MmRendererMediaPlayerControl::play()
return;
}
if (m_mediaStatus == QMediaPlayer::EndOfMedia)
m_position = 0;
setPositionInternal(m_position);
setVolumeInternal(m_muted ? 0 : m_volume);
setPlaybackRateInternal(m_rate);