AVFoundation: improve viewfinder settings.

Retrieving the supported viewfinder settings don't actually require
a video surface, we just need a capture device. The supported settings
can now be retrieved without calling QCamera::setSurface().
More generally, all viewfinder settings that don't require a video
surface can now be gotten/set before calling setSurface().

Task-number: QTBUG-49170
Change-Id: I39b14eeb40517a9ba399748b5778be8bbc8cfcda
Reviewed-by: Timur Pocheptsov <timur.pocheptsov@theqtcompany.com>
This commit is contained in:
Yoann Lopes
2015-11-12 15:40:21 +01:00
committed by Timur Pocheptsov
parent 5135ffaf2a
commit 9b5a67d810
2 changed files with 101 additions and 109 deletions

View File

@@ -78,15 +78,11 @@ private:
bool convertPixelFormatIfSupported(QVideoFrame::PixelFormat format, unsigned &avfFormat) const; bool convertPixelFormatIfSupported(QVideoFrame::PixelFormat format, unsigned &avfFormat) const;
void applySettings(); void applySettings();
QCameraViewfinderSettings requestedSettings() const; QCameraViewfinderSettings requestedSettings() const;
// Aux. function to extract things like captureDevice, videoOutput, etc.
bool updateAVFoundationObjects() const; AVCaptureConnection *videoConnection() const;
AVFCameraService *m_service; AVFCameraService *m_service;
mutable AVFCameraSession *m_session;
QCameraViewfinderSettings m_settings; QCameraViewfinderSettings m_settings;
mutable AVCaptureDevice *m_captureDevice;
mutable AVCaptureVideoDataOutput *m_videoOutput;
mutable AVCaptureConnection *m_videoConnection;
}; };
class AVFCameraViewfinderSettingsControl : public QCameraViewfinderSettingsControl class AVFCameraViewfinderSettingsControl : public QCameraViewfinderSettingsControl

View File

@@ -206,7 +206,6 @@ void qt_set_framerate_limits(AVCaptureDevice *captureDevice,
AVFPSRange qt_current_framerates(AVCaptureDevice *captureDevice, AVCaptureConnection *videoConnection) AVFPSRange qt_current_framerates(AVCaptureDevice *captureDevice, AVCaptureConnection *videoConnection)
{ {
Q_ASSERT(captureDevice); Q_ASSERT(captureDevice);
Q_ASSERT(videoConnection);
AVFPSRange fps; AVFPSRange fps;
#if QT_MAC_PLATFORM_SDK_EQUAL_OR_ABOVE(__MAC_10_7, __IPHONE_7_0) #if QT_MAC_PLATFORM_SDK_EQUAL_OR_ABOVE(__MAC_10_7, __IPHONE_7_0)
@@ -234,7 +233,8 @@ AVFPSRange qt_current_framerates(AVCaptureDevice *captureDevice, AVCaptureConnec
#else // OSX < 10.7 or iOS < 7.0 #else // OSX < 10.7 or iOS < 7.0
{ {
#endif // QT_MAC_PLATFORM_SDK_EQUAL_OR_ABOVE(__MAC_10_7, __IPHONE_7_0) #endif // QT_MAC_PLATFORM_SDK_EQUAL_OR_ABOVE(__MAC_10_7, __IPHONE_7_0)
fps = qt_connection_framerates(videoConnection); if (videoConnection)
fps = qt_connection_framerates(videoConnection);
} }
return fps; return fps;
@@ -244,24 +244,20 @@ void qt_set_framerate_limits(AVCaptureDevice *captureDevice, AVCaptureConnection
const QCameraViewfinderSettings &settings) const QCameraViewfinderSettings &settings)
{ {
Q_ASSERT(captureDevice); Q_ASSERT(captureDevice);
Q_ASSERT(videoConnection);
#if QT_MAC_PLATFORM_SDK_EQUAL_OR_ABOVE(__MAC_10_7, __IPHONE_7_0) #if QT_MAC_PLATFORM_SDK_EQUAL_OR_ABOVE(__MAC_10_7, __IPHONE_7_0)
if (QSysInfo::MacintoshVersion >= qt_OS_limit(QSysInfo::MV_10_9, QSysInfo::MV_IOS_7_0)) if (QSysInfo::MacintoshVersion >= qt_OS_limit(QSysInfo::MV_10_9, QSysInfo::MV_IOS_7_0))
qt_set_framerate_limits(captureDevice, settings); qt_set_framerate_limits(captureDevice, settings);
else else
qt_set_framerate_limits(videoConnection, settings);
#else
qt_set_framerate_limits(videoConnection, settings);
#endif #endif
if (videoConnection)
qt_set_framerate_limits(videoConnection, settings);
} }
} // Unnamed namespace. } // Unnamed namespace.
AVFCameraViewfinderSettingsControl2::AVFCameraViewfinderSettingsControl2(AVFCameraService *service) AVFCameraViewfinderSettingsControl2::AVFCameraViewfinderSettingsControl2(AVFCameraService *service)
: m_service(service), : m_service(service)
m_captureDevice(0),
m_videoOutput(0),
m_videoConnection(0)
{ {
Q_ASSERT(service); Q_ASSERT(service);
} }
@@ -270,8 +266,9 @@ QList<QCameraViewfinderSettings> AVFCameraViewfinderSettingsControl2::supportedV
{ {
QList<QCameraViewfinderSettings> supportedSettings; QList<QCameraViewfinderSettings> supportedSettings;
if (!updateAVFoundationObjects()) { AVCaptureDevice *captureDevice = m_service->session()->videoCaptureDevice();
qDebugCamera() << Q_FUNC_INFO << "no capture device or video output found"; if (!captureDevice) {
qDebugCamera() << Q_FUNC_INFO << "no capture device found";
return supportedSettings; return supportedSettings;
} }
@@ -281,15 +278,16 @@ QList<QCameraViewfinderSettings> AVFCameraViewfinderSettingsControl2::supportedV
if (!pixelFormats.size()) if (!pixelFormats.size())
pixelFormats << QVideoFrame::Format_Invalid; // The default value. pixelFormats << QVideoFrame::Format_Invalid; // The default value.
#if QT_MAC_PLATFORM_SDK_EQUAL_OR_ABOVE(__MAC_10_7, __IPHONE_7_0) #if QT_MAC_PLATFORM_SDK_EQUAL_OR_ABOVE(__MAC_10_7, __IPHONE_7_0)
if (QSysInfo::MacintoshVersion >= qt_OS_limit(QSysInfo::MV_10_7, QSysInfo::MV_IOS_7_0)) { if (QSysInfo::MacintoshVersion >= qt_OS_limit(QSysInfo::MV_10_7, QSysInfo::MV_IOS_7_0)) {
if (!m_captureDevice.formats || !m_captureDevice.formats.count) { if (!captureDevice.formats || !captureDevice.formats.count) {
qDebugCamera() << Q_FUNC_INFO << "no capture device formats found"; qDebugCamera() << Q_FUNC_INFO << "no capture device formats found";
return supportedSettings; return supportedSettings;
} }
const QVector<AVCaptureDeviceFormat *> formats(qt_unique_device_formats(m_captureDevice, const QVector<AVCaptureDeviceFormat *> formats(qt_unique_device_formats(captureDevice,
m_session->defaultCodec())); m_service->session()->defaultCodec()));
for (int i = 0; i < formats.size(); ++i) { for (int i = 0; i < formats.size(); ++i) {
AVCaptureDeviceFormat *format = formats[i]; AVCaptureDeviceFormat *format = formats[i];
@@ -320,15 +318,18 @@ QList<QCameraViewfinderSettings> AVFCameraViewfinderSettingsControl2::supportedV
#else #else
{ {
#endif #endif
// TODO: resolution and PAR. AVCaptureConnection *connection = videoConnection();
framerates << qt_connection_framerates(m_videoConnection); if (connection) {
for (int i = 0; i < pixelFormats.size(); ++i) { // TODO: resolution and PAR.
for (int j = 0; j < framerates.size(); ++j) { framerates << qt_connection_framerates(connection);
QCameraViewfinderSettings newSet; for (int i = 0; i < pixelFormats.size(); ++i) {
newSet.setPixelFormat(pixelFormats[i]); for (int j = 0; j < framerates.size(); ++j) {
newSet.setMinimumFrameRate(framerates[j].first); QCameraViewfinderSettings newSet;
newSet.setMaximumFrameRate(framerates[j].second); newSet.setPixelFormat(pixelFormats[i]);
supportedSettings << newSet; newSet.setMinimumFrameRate(framerates[j].first);
newSet.setMaximumFrameRate(framerates[j].second);
supportedSettings << newSet;
}
} }
} }
} }
@@ -340,20 +341,21 @@ QCameraViewfinderSettings AVFCameraViewfinderSettingsControl2::viewfinderSetting
{ {
QCameraViewfinderSettings settings; QCameraViewfinderSettings settings;
if (!updateAVFoundationObjects()) { AVCaptureDevice *captureDevice = m_service->session()->videoCaptureDevice();
qDebugCamera() << Q_FUNC_INFO << "no capture device or video output found"; if (!captureDevice) {
qDebugCamera() << Q_FUNC_INFO << "no capture device found";
return settings; return settings;
} }
#if QT_MAC_PLATFORM_SDK_EQUAL_OR_ABOVE(__MAC_10_7, __IPHONE_7_0) #if QT_MAC_PLATFORM_SDK_EQUAL_OR_ABOVE(__MAC_10_7, __IPHONE_7_0)
if (QSysInfo::MacintoshVersion >= qt_OS_limit(QSysInfo::MV_10_7, QSysInfo::MV_IOS_7_0)) { if (QSysInfo::MacintoshVersion >= qt_OS_limit(QSysInfo::MV_10_7, QSysInfo::MV_IOS_7_0)) {
if (!m_captureDevice.activeFormat) { if (!captureDevice.activeFormat) {
qDebugCamera() << Q_FUNC_INFO << "no active capture device format"; qDebugCamera() << Q_FUNC_INFO << "no active capture device format";
return settings; return settings;
} }
const QSize res(qt_device_format_resolution(m_captureDevice.activeFormat)); const QSize res(qt_device_format_resolution(captureDevice.activeFormat));
const QSize par(qt_device_format_pixel_aspect_ratio(m_captureDevice.activeFormat)); const QSize par(qt_device_format_pixel_aspect_ratio(captureDevice.activeFormat));
if (res.isNull() || !res.isValid() || par.isNull() || !par.isValid()) { if (res.isNull() || !res.isValid() || par.isNull() || !par.isValid()) {
qDebugCamera() << Q_FUNC_INFO << "failed to obtain resolution/pixel aspect ratio"; qDebugCamera() << Q_FUNC_INFO << "failed to obtain resolution/pixel aspect ratio";
return settings; return settings;
@@ -364,12 +366,14 @@ QCameraViewfinderSettings AVFCameraViewfinderSettingsControl2::viewfinderSetting
} }
#endif #endif
// TODO: resolution and PAR before 7.0. // TODO: resolution and PAR before 7.0.
const AVFPSRange fps = qt_current_framerates(m_captureDevice, m_videoConnection); const AVFPSRange fps = qt_current_framerates(captureDevice, videoConnection());
settings.setMinimumFrameRate(fps.first); settings.setMinimumFrameRate(fps.first);
settings.setMaximumFrameRate(fps.second); settings.setMaximumFrameRate(fps.second);
if (NSObject *obj = [m_videoOutput.videoSettings objectForKey:(id)kCVPixelBufferPixelFormatTypeKey]) { AVCaptureVideoDataOutput *videoOutput = m_service->videoOutput() ? m_service->videoOutput()->videoDataOutput() : 0;
if ([obj isKindOfClass:[NSNumber class]]) { if (videoOutput) {
NSObject *obj = [videoOutput.videoSettings objectForKey:(id)kCVPixelBufferPixelFormatTypeKey];
if (obj && [obj isKindOfClass:[NSNumber class]]) {
NSNumber *nsNum = static_cast<NSNumber *>(obj); NSNumber *nsNum = static_cast<NSNumber *>(obj);
settings.setPixelFormat(QtPixelFormatFromCVFormat([nsNum unsignedIntValue])); settings.setPixelFormat(QtPixelFormatFromCVFormat([nsNum unsignedIntValue]));
} }
@@ -449,17 +453,19 @@ bool AVFCameraViewfinderSettingsControl2::CVPixelFormatFromQtFormat(QVideoFrame:
AVCaptureDeviceFormat *AVFCameraViewfinderSettingsControl2::findBestFormatMatch(const QCameraViewfinderSettings &settings) const AVCaptureDeviceFormat *AVFCameraViewfinderSettingsControl2::findBestFormatMatch(const QCameraViewfinderSettings &settings) const
{ {
AVCaptureDevice *captureDevice = m_service->session()->videoCaptureDevice();
if (!captureDevice)
return nil;
#if QT_MAC_PLATFORM_SDK_EQUAL_OR_ABOVE(__MAC_10_7, __IPHONE_7_0) #if QT_MAC_PLATFORM_SDK_EQUAL_OR_ABOVE(__MAC_10_7, __IPHONE_7_0)
if (QSysInfo::MacintoshVersion >= qt_OS_limit(QSysInfo::MV_10_7, QSysInfo::MV_IOS_7_0)) { if (QSysInfo::MacintoshVersion >= qt_OS_limit(QSysInfo::MV_10_7, QSysInfo::MV_IOS_7_0)) {
Q_ASSERT(m_captureDevice);
Q_ASSERT(m_session);
const QSize &resolution = settings.resolution(); const QSize &resolution = settings.resolution();
if (!resolution.isNull() && resolution.isValid()) { if (!resolution.isNull() && resolution.isValid()) {
// Either the exact match (including high resolution for images on iOS) // Either the exact match (including high resolution for images on iOS)
// or a format with a resolution close to the requested one. // or a format with a resolution close to the requested one.
return qt_find_best_resolution_match(m_captureDevice, resolution, return qt_find_best_resolution_match(captureDevice, resolution,
m_session->defaultCodec()); m_service->session()->defaultCodec());
} }
// No resolution requested, what about framerates? // No resolution requested, what about framerates?
@@ -472,22 +478,28 @@ AVCaptureDeviceFormat *AVFCameraViewfinderSettingsControl2::findBestFormatMatch(
const qreal minFPS(settings.minimumFrameRate()); const qreal minFPS(settings.minimumFrameRate());
const qreal maxFPS(settings.maximumFrameRate()); const qreal maxFPS(settings.maximumFrameRate());
if (minFPS || maxFPS) if (minFPS || maxFPS)
return qt_find_best_framerate_match(m_captureDevice, maxFPS ? maxFPS : minFPS, return qt_find_best_framerate_match(captureDevice, maxFPS ? maxFPS : minFPS,
m_session->defaultCodec()); m_service->session()->defaultCodec());
// Ignore PAR for the moment (PAR without resolution can // Ignore PAR for the moment (PAR without resolution can
// pick a format with really bad resolution). // pick a format with really bad resolution).
// No need to test pixel format, just return settings. // No need to test pixel format, just return settings.
} }
#endif #endif
return nil; return nil;
} }
QVector<QVideoFrame::PixelFormat> AVFCameraViewfinderSettingsControl2::viewfinderPixelFormats() const QVector<QVideoFrame::PixelFormat> AVFCameraViewfinderSettingsControl2::viewfinderPixelFormats() const
{ {
Q_ASSERT(m_videoOutput);
QVector<QVideoFrame::PixelFormat> qtFormats; QVector<QVideoFrame::PixelFormat> qtFormats;
NSArray *pixelFormats = [m_videoOutput availableVideoCVPixelFormatTypes];
AVCaptureVideoDataOutput *videoOutput = m_service->videoOutput() ? m_service->videoOutput()->videoDataOutput() : 0;
if (!videoOutput) {
qDebugCamera() << Q_FUNC_INFO << "no video output found";
return qtFormats;
}
NSArray *pixelFormats = [videoOutput availableVideoCVPixelFormatTypes];
for (NSObject *obj in pixelFormats) { for (NSObject *obj in pixelFormats) {
if (![obj isKindOfClass:[NSNumber class]]) if (![obj isKindOfClass:[NSNumber class]])
@@ -508,17 +520,19 @@ QVector<QVideoFrame::PixelFormat> AVFCameraViewfinderSettingsControl2::viewfinde
bool AVFCameraViewfinderSettingsControl2::convertPixelFormatIfSupported(QVideoFrame::PixelFormat qtFormat, bool AVFCameraViewfinderSettingsControl2::convertPixelFormatIfSupported(QVideoFrame::PixelFormat qtFormat,
unsigned &avfFormat)const unsigned &avfFormat)const
{ {
Q_ASSERT(m_videoOutput); AVCaptureVideoDataOutput *videoOutput = m_service->videoOutput() ? m_service->videoOutput()->videoDataOutput() : 0;
if (!videoOutput)
return false;
unsigned conv = 0; unsigned conv = 0;
if (!CVPixelFormatFromQtFormat(qtFormat, conv)) if (!CVPixelFormatFromQtFormat(qtFormat, conv))
return false; return false;
NSArray *formats = [m_videoOutput availableVideoCVPixelFormatTypes]; NSArray *formats = [videoOutput availableVideoCVPixelFormatTypes];
if (!formats || !formats.count) if (!formats || !formats.count)
return false; return false;
if (m_service->videoOutput() && m_service->videoOutput()->surface()) { if (m_service->videoOutput()->surface()) {
const QAbstractVideoSurface *surface = m_service->videoOutput()->surface(); const QAbstractVideoSurface *surface = m_service->videoOutput()->surface();
if (!surface->supportedPixelFormats().contains(qtFormat)) if (!surface->supportedPixelFormats().contains(qtFormat))
return false; return false;
@@ -544,26 +558,27 @@ void AVFCameraViewfinderSettingsControl2::applySettings()
if (m_settings.isNull()) if (m_settings.isNull())
return; return;
if (!updateAVFoundationObjects()) if (m_service->session()->state() != QCamera::LoadedState &&
return; m_service->session()->state() != QCamera::ActiveState) {
if (m_session->state() != QCamera::LoadedState &&
m_session->state() != QCamera::ActiveState) {
return; return;
} }
AVCaptureDevice *captureDevice = m_service->session()->videoCaptureDevice();
if (!captureDevice)
return;
NSMutableDictionary *videoSettings = [NSMutableDictionary dictionaryWithCapacity:1]; NSMutableDictionary *videoSettings = [NSMutableDictionary dictionaryWithCapacity:1];
#if QT_MAC_PLATFORM_SDK_EQUAL_OR_ABOVE(__MAC_10_7, __IPHONE_7_0) #if QT_MAC_PLATFORM_SDK_EQUAL_OR_ABOVE(__MAC_10_7, __IPHONE_7_0)
AVCaptureDeviceFormat *match = findBestFormatMatch(m_settings); AVCaptureDeviceFormat *match = findBestFormatMatch(m_settings);
if (match) { if (match) {
if (match != m_captureDevice.activeFormat) { if (match != captureDevice.activeFormat) {
const AVFConfigurationLock lock(m_captureDevice); const AVFConfigurationLock lock(captureDevice);
if (!lock) { if (!lock) {
qDebugCamera() << Q_FUNC_INFO << "failed to lock for configuration"; qDebugCamera() << Q_FUNC_INFO << "failed to lock for configuration";
return; return;
} }
m_captureDevice.activeFormat = match; captureDevice.activeFormat = match;
} }
} else { } else {
qDebugCamera() << Q_FUNC_INFO << "matching device format not found"; qDebugCamera() << Q_FUNC_INFO << "matching device format not found";
@@ -571,43 +586,45 @@ void AVFCameraViewfinderSettingsControl2::applySettings()
} }
#endif #endif
unsigned avfPixelFormat = 0; AVCaptureVideoDataOutput *videoOutput = m_service->videoOutput() ? m_service->videoOutput()->videoDataOutput() : 0;
if (!convertPixelFormatIfSupported(m_settings.pixelFormat(), avfPixelFormat)) { if (videoOutput) {
// If the the pixel format is not specified or invalid, pick the preferred video surface unsigned avfPixelFormat = 0;
// format, or if no surface is set, the preferred capture device format if (!convertPixelFormatIfSupported(m_settings.pixelFormat(), avfPixelFormat)) {
// If the the pixel format is not specified or invalid, pick the preferred video surface
// format, or if no surface is set, the preferred capture device format
const QVector<QVideoFrame::PixelFormat> deviceFormats = viewfinderPixelFormats(); const QVector<QVideoFrame::PixelFormat> deviceFormats = viewfinderPixelFormats();
QVideoFrame::PixelFormat pickedFormat = deviceFormats.first(); QVideoFrame::PixelFormat pickedFormat = deviceFormats.first();
QAbstractVideoSurface *surface = m_service->videoOutput() ? m_service->videoOutput()->surface() QAbstractVideoSurface *surface = m_service->videoOutput()->surface();
: 0; if (surface) {
if (surface) { if (m_service->videoOutput()->supportsTextures()) {
if (m_service->videoOutput()->supportsTextures()) { pickedFormat = QVideoFrame::Format_ARGB32;
pickedFormat = QVideoFrame::Format_ARGB32; } else {
} else { QList<QVideoFrame::PixelFormat> surfaceFormats = surface->supportedPixelFormats();
QList<QVideoFrame::PixelFormat> surfaceFormats = m_service->videoOutput()->surface()->supportedPixelFormats();
for (int i = 0; i < surfaceFormats.count(); ++i) { for (int i = 0; i < surfaceFormats.count(); ++i) {
const QVideoFrame::PixelFormat surfaceFormat = surfaceFormats.at(i); const QVideoFrame::PixelFormat surfaceFormat = surfaceFormats.at(i);
if (deviceFormats.contains(surfaceFormat)) { if (deviceFormats.contains(surfaceFormat)) {
pickedFormat = surfaceFormat; pickedFormat = surfaceFormat;
break; break;
}
} }
} }
} }
CVPixelFormatFromQtFormat(pickedFormat, avfPixelFormat);
} }
CVPixelFormatFromQtFormat(pickedFormat, avfPixelFormat); if (avfPixelFormat != 0) {
[videoSettings setObject:[NSNumber numberWithUnsignedInt:avfPixelFormat]
forKey:(id)kCVPixelBufferPixelFormatTypeKey];
videoOutput.videoSettings = videoSettings;
}
} }
if (avfPixelFormat != 0) { qt_set_framerate_limits(captureDevice, videoConnection(), m_settings);
[videoSettings setObject:[NSNumber numberWithUnsignedInt:avfPixelFormat]
forKey:(id)kCVPixelBufferPixelFormatTypeKey];
m_videoOutput.videoSettings = videoSettings;
}
qt_set_framerate_limits(m_captureDevice, m_videoConnection, m_settings);
} }
QCameraViewfinderSettings AVFCameraViewfinderSettingsControl2::requestedSettings() const QCameraViewfinderSettings AVFCameraViewfinderSettingsControl2::requestedSettings() const
@@ -615,33 +632,12 @@ QCameraViewfinderSettings AVFCameraViewfinderSettingsControl2::requestedSettings
return m_settings; return m_settings;
} }
bool AVFCameraViewfinderSettingsControl2::updateAVFoundationObjects() const AVCaptureConnection *AVFCameraViewfinderSettingsControl2::videoConnection() const
{ {
m_session = 0;
m_captureDevice = 0;
m_videoOutput = 0;
m_videoConnection = 0;
if (!m_service->session())
return false;
if (!m_service->session()->videoCaptureDevice())
return false;
if (!m_service->videoOutput() || !m_service->videoOutput()->videoDataOutput()) if (!m_service->videoOutput() || !m_service->videoOutput()->videoDataOutput())
return false; return nil;
AVCaptureVideoDataOutput *output = m_service->videoOutput()->videoDataOutput(); return [m_service->videoOutput()->videoDataOutput() connectionWithMediaType:AVMediaTypeVideo];
AVCaptureConnection *connection = [output connectionWithMediaType:AVMediaTypeVideo];
if (!connection)
return false;
m_session = m_service->session();
m_captureDevice = m_session->videoCaptureDevice();
m_videoOutput = output;
m_videoConnection = connection;
return true;
} }
AVFCameraViewfinderSettingsControl::AVFCameraViewfinderSettingsControl(AVFCameraService *service) AVFCameraViewfinderSettingsControl::AVFCameraViewfinderSettingsControl(AVFCameraService *service)