AVFoundation: fix default camera viewfinder pixel format.

It was hardcoded to ARGB32, which is not a good idea, at least on iOS
where the necessary conversion is slow.
We now pick the QAbstractVideoSurface's preferred format, or if no
surface is set, we pick the default from AVFoundation.

As a result, the QML VideoOutput will now always use the NV12 format.

Change-Id: I65205c706455502883b8098f0b5c0577b4106e01
Reviewed-by: Timur Pocheptsov <Timur.Pocheptsov@digia.com>
This commit is contained in:
Yoann Lopes
2015-05-28 16:40:08 +02:00
parent 8143aff1b2
commit aeb79d4a8b
3 changed files with 33 additions and 41 deletions

View File

@@ -167,19 +167,13 @@ private:
int width = CVPixelBufferGetWidth(imageBuffer);
int height = CVPixelBufferGetHeight(imageBuffer);
QVideoFrame::PixelFormat format =
AVFCameraViewfinderSettingsControl2::QtPixelFormatFromCVFormat(CVPixelBufferGetPixelFormatType(imageBuffer));
QAbstractVideoBuffer *buffer = new CVPixelBufferVideoBuffer(imageBuffer);
if (format == QVideoFrame::Format_Invalid)
return;
QVideoFrame::PixelFormat format = QVideoFrame::Format_RGB32;
if ([captureOutput isKindOfClass:[AVCaptureVideoDataOutput class]]) {
NSDictionary *settings = ((AVCaptureVideoDataOutput *)captureOutput).videoSettings;
if (settings && [settings objectForKey:(id)kCVPixelBufferPixelFormatTypeKey]) {
NSNumber *avf = [settings objectForKey:(id)kCVPixelBufferPixelFormatTypeKey];
format = AVFCameraViewfinderSettingsControl2::QtPixelFormatFromCVFormat([avf unsignedIntValue]);
}
}
QVideoFrame frame(buffer, QSize(width, height), format);
QVideoFrame frame(new CVPixelBufferVideoBuffer(imageBuffer), QSize(width, height), format);
m_renderer->syncHandleViewfinderFrame(frame);
}
@end
@@ -229,12 +223,6 @@ void AVFCameraRendererControl::configureAVCaptureSession(AVFCameraSession *camer
queue:queue];
dispatch_release(queue);
// Specify the pixel format
m_videoDataOutput.videoSettings =
[NSDictionary dictionaryWithObject:
[NSNumber numberWithInt:kCVPixelFormatType_32BGRA]
forKey:(id)kCVPixelBufferPixelFormatTypeKey];
[m_cameraSession->captureSession() addOutput:m_videoDataOutput];
}

View File

@@ -283,12 +283,12 @@ void AVFCameraSession::setState(QCamera::State newState)
if (m_state == QCamera::ActiveState) {
Q_EMIT readyToConfigureConnections();
[m_captureSession commitConfiguration];
[m_captureSession startRunning];
m_defaultCodec = 0;
defaultCodec();
applyImageEncoderSettings();
applyViewfinderSettings();
[m_captureSession commitConfiguration];
[m_captureSession startRunning];
}
if (oldState == QCamera::ActiveState) {
@@ -374,8 +374,7 @@ void AVFCameraSession::applyViewfinderSettings()
}
}
if (!vfSettings.isNull())
vfControl->applySettings();
vfControl->applySettings();
}
}

View File

@@ -485,13 +485,7 @@ QVector<QVideoFrame::PixelFormat> AVFCameraViewfinderSettingsControl2::viewfinde
Q_ASSERT(m_videoOutput);
QVector<QVideoFrame::PixelFormat> qtFormats;
QList<QVideoFrame::PixelFormat> filter;
NSArray *pixelFormats = [m_videoOutput availableVideoCVPixelFormatTypes];
const QAbstractVideoSurface *surface = m_service->videoOutput() ? m_service->videoOutput()->surface() : 0;
if (surface)
filter = surface->supportedPixelFormats();
for (NSObject *obj in pixelFormats) {
if (![obj isKindOfClass:[NSNumber class]])
@@ -500,8 +494,8 @@ QVector<QVideoFrame::PixelFormat> AVFCameraViewfinderSettingsControl2::viewfinde
NSNumber *formatAsNSNumber = static_cast<NSNumber *>(obj);
// It's actually FourCharCode (== UInt32):
const QVideoFrame::PixelFormat qtFormat(QtPixelFormatFromCVFormat([formatAsNSNumber unsignedIntValue]));
if (qtFormat != QVideoFrame::Format_Invalid && (!surface || filter.contains(qtFormat))
&& !qtFormats.contains(qtFormat)) { // Can happen, for example, with 8BiPlanar existing in video/full range.
if (qtFormat != QVideoFrame::Format_Invalid
&& !qtFormats.contains(qtFormat)) { // Can happen, for example, with 8BiPlanar existing in video/full range.
qtFormats << qtFormat;
}
}
@@ -576,22 +570,33 @@ void AVFCameraViewfinderSettingsControl2::applySettings()
#endif
unsigned avfPixelFormat = 0;
if (m_settings.pixelFormat() != QVideoFrame::Format_Invalid &&
convertPixelFormatIfSupported(m_settings.pixelFormat(), avfPixelFormat)) {
[videoSettings setObject:[NSNumber numberWithUnsignedInt:avfPixelFormat]
forKey:(id)kCVPixelBufferPixelFormatTypeKey];
} else {
// We have to set the pixel format, otherwise AVFoundation can change it to something we do not support.
if (NSObject *oldFormat = [m_videoOutput.videoSettings objectForKey:(id)kCVPixelBufferPixelFormatTypeKey]) {
[videoSettings setObject:oldFormat forKey:(id)kCVPixelBufferPixelFormatTypeKey];
} else {
[videoSettings setObject:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA]
forKey:(id)kCVPixelBufferPixelFormatTypeKey];
if (!convertPixelFormatIfSupported(m_settings.pixelFormat(), avfPixelFormat)) {
// If the the pixel format is not specified or invalid, pick the preferred video surface
// format, or if no surface is set, the preferred capture device format
const QVector<QVideoFrame::PixelFormat> deviceFormats = viewfinderPixelFormats();
QList<QVideoFrame::PixelFormat> surfaceFormats;
if (m_service->videoOutput() && m_service->videoOutput()->surface())
surfaceFormats = m_service->videoOutput()->surface()->supportedPixelFormats();
QVideoFrame::PixelFormat format = deviceFormats.first();
for (int i = 0; i < surfaceFormats.count(); ++i) {
const QVideoFrame::PixelFormat surfaceFormat = surfaceFormats.at(i);
if (deviceFormats.contains(surfaceFormat)) {
format = surfaceFormat;
break;
}
}
CVPixelFormatFromQtFormat(format, avfPixelFormat);
}
if (videoSettings.count)
if (avfPixelFormat != 0) {
[videoSettings setObject:[NSNumber numberWithUnsignedInt:avfPixelFormat]
forKey:(id)kCVPixelBufferPixelFormatTypeKey];
m_videoOutput.videoSettings = videoSettings;
}
qt_set_framerate_limits(m_captureDevice, m_videoConnection, m_settings);
}