Files
qtmultimedia/src/plugins/avfoundation/camera/avfcamerasession.mm
Timur Pocheptsov f839f9e3eb AVCaptureDeviceFormat - avoid duplicates (OS X/iOS)
Excluding video range (iOS) is not the right way to avoid "duplicates" - with
other devices there can be also duplicates (formats with the same resolutions),
but completely different pixel formats. Since we do not know what they will be in advance,
we take the media subtype from the initial preset for a capture device and use it
as a filter. Update viewfinder and image encoder settings controls.

Change-Id: If20aea24b19b43574d5c3e9bf2ba85f50fc08916
Reviewed-by: Yoann Lopes <yoann.lopes@theqtcompany.com>
2015-03-10 16:35:59 +00:00

445 lines
14 KiB
Plaintext

/****************************************************************************
**
** Copyright (C) 2015 The Qt Company Ltd and/or its subsidiary(-ies).
** Contact: http://www.qt.io/licensing/
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL21$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and The Qt Company. For licensing terms
** and conditions see http://www.qt.io/terms-conditions. For further
** information use the contact form at http://www.qt.io/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 or version 3 as published by the Free
** Software Foundation and appearing in the file LICENSE.LGPLv21 and
** LICENSE.LGPLv3 included in the packaging of this file. Please review the
** following information to ensure the GNU Lesser General Public License
** requirements will be met: https://www.gnu.org/licenses/lgpl.html and
** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** As a special exception, The Qt Company gives you certain additional
** rights. These rights are described in The Qt Company LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** $QT_END_LICENSE$
**
****************************************************************************/
#include "avfcameradebug.h"
#include "avfcamerasession.h"
#include "avfcameraservice.h"
#include "avfcameracontrol.h"
#include "avfcamerarenderercontrol.h"
#include "avfcameradevicecontrol.h"
#include "avfaudioinputselectorcontrol.h"
#include "avfmediavideoprobecontrol.h"
#include "avfcameraviewfindersettingscontrol.h"
#include "avfimageencodercontrol.h"
#include "avfcamerautility.h"
#include <CoreFoundation/CoreFoundation.h>
#include <Foundation/Foundation.h>
#include <QtCore/qdatetime.h>
#include <QtCore/qurl.h>
#include <QtCore/qdebug.h>
QT_USE_NAMESPACE
QByteArray AVFCameraSession::m_defaultCameraDevice;
QList<QByteArray> AVFCameraSession::m_cameraDevices;
QMap<QByteArray, AVFCameraInfo> AVFCameraSession::m_cameraInfo;
@interface AVFCameraSessionObserver : NSObject
{
@private
AVFCameraSession *m_session;
AVCaptureSession *m_captureSession;
}
- (AVFCameraSessionObserver *) initWithCameraSession:(AVFCameraSession*)session;
- (void) processRuntimeError:(NSNotification *)notification;
- (void) processSessionStarted:(NSNotification *)notification;
- (void) processSessionStopped:(NSNotification *)notification;
@end
@implementation AVFCameraSessionObserver
- (AVFCameraSessionObserver *) initWithCameraSession:(AVFCameraSession*)session
{
if (!(self = [super init]))
return nil;
self->m_session = session;
self->m_captureSession = session->captureSession();
[m_captureSession retain];
[[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(processRuntimeError:)
name:AVCaptureSessionRuntimeErrorNotification
object:m_captureSession];
[[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(processSessionStarted:)
name:AVCaptureSessionDidStartRunningNotification
object:m_captureSession];
[[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(processSessionStopped:)
name:AVCaptureSessionDidStopRunningNotification
object:m_captureSession];
return self;
}
- (void) dealloc
{
[[NSNotificationCenter defaultCenter] removeObserver:self
name:AVCaptureSessionRuntimeErrorNotification
object:m_captureSession];
[[NSNotificationCenter defaultCenter] removeObserver:self
name:AVCaptureSessionDidStartRunningNotification
object:m_captureSession];
[[NSNotificationCenter defaultCenter] removeObserver:self
name:AVCaptureSessionDidStopRunningNotification
object:m_captureSession];
[m_captureSession release];
[super dealloc];
}
- (void) processRuntimeError:(NSNotification *)notification
{
Q_UNUSED(notification);
QMetaObject::invokeMethod(m_session, "processRuntimeError", Qt::AutoConnection);
}
- (void) processSessionStarted:(NSNotification *)notification
{
Q_UNUSED(notification);
QMetaObject::invokeMethod(m_session, "processSessionStarted", Qt::AutoConnection);
}
- (void) processSessionStopped:(NSNotification *)notification
{
Q_UNUSED(notification);
QMetaObject::invokeMethod(m_session, "processSessionStopped", Qt::AutoConnection);
}
@end
AVFCameraSession::AVFCameraSession(AVFCameraService *service, QObject *parent)
: QObject(parent)
, m_service(service)
, m_state(QCamera::UnloadedState)
, m_active(false)
, m_videoInput(nil)
, m_audioInput(nil)
, m_defaultCodec(0)
{
m_captureSession = [[AVCaptureSession alloc] init];
m_observer = [[AVFCameraSessionObserver alloc] initWithCameraSession:this];
//configuration is commited during transition to Active state
[m_captureSession beginConfiguration];
}
AVFCameraSession::~AVFCameraSession()
{
if (m_videoInput) {
[m_captureSession removeInput:m_videoInput];
[m_videoInput release];
}
if (m_audioInput) {
[m_captureSession removeInput:m_audioInput];
[m_audioInput release];
}
[m_observer release];
[m_captureSession release];
}
const QByteArray &AVFCameraSession::defaultCameraDevice()
{
if (m_cameraDevices.isEmpty())
updateCameraDevices();
return m_defaultCameraDevice;
}
const QList<QByteArray> &AVFCameraSession::availableCameraDevices()
{
if (m_cameraDevices.isEmpty())
updateCameraDevices();
return m_cameraDevices;
}
AVFCameraInfo AVFCameraSession::cameraDeviceInfo(const QByteArray &device)
{
if (m_cameraDevices.isEmpty())
updateCameraDevices();
return m_cameraInfo.value(device);
}
void AVFCameraSession::updateCameraDevices()
{
m_defaultCameraDevice.clear();
m_cameraDevices.clear();
m_cameraInfo.clear();
AVCaptureDevice *defaultDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
if (defaultDevice)
m_defaultCameraDevice = QByteArray([[defaultDevice uniqueID] UTF8String]);
NSArray *videoDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for (AVCaptureDevice *device in videoDevices) {
QByteArray deviceId([[device uniqueID] UTF8String]);
AVFCameraInfo info;
info.description = QString::fromNSString([device localizedName]);
// There is no API to get the camera sensor orientation, however, cameras are always
// mounted in landscape on iDevices.
// - Back-facing cameras have the top side of the sensor aligned with the right side of
// the screen when held in portrait ==> 270 degrees clockwise angle
// - Front-facing cameras have the top side of the sensor aligned with the left side of
// the screen when held in portrait ==> 270 degrees clockwise angle
// On Mac OS, the position will always be unspecified and the sensor orientation unknown.
switch (device.position) {
case AVCaptureDevicePositionBack:
info.position = QCamera::BackFace;
info.orientation = 270;
break;
case AVCaptureDevicePositionFront:
info.position = QCamera::FrontFace;
info.orientation = 270;
break;
default:
info.position = QCamera::UnspecifiedPosition;
info.orientation = 0;
break;
}
m_cameraDevices << deviceId;
m_cameraInfo.insert(deviceId, info);
}
}
void AVFCameraSession::setVideoOutput(AVFCameraRendererControl *output)
{
m_videoOutput = output;
if (output)
output->configureAVCaptureSession(this);
}
AVCaptureDevice *AVFCameraSession::videoCaptureDevice() const
{
if (m_videoInput)
return m_videoInput.device;
return 0;
}
QCamera::State AVFCameraSession::state() const
{
if (m_active)
return QCamera::ActiveState;
return m_state == QCamera::ActiveState ? QCamera::LoadedState : m_state;
}
void AVFCameraSession::setState(QCamera::State newState)
{
if (m_state == newState)
return;
qDebugCamera() << Q_FUNC_INFO << m_state << " -> " << newState;
QCamera::State oldState = m_state;
m_state = newState;
//attach audio and video inputs during Unloaded->Loaded transition
if (oldState == QCamera::UnloadedState) {
attachInputDevices();
}
if (m_state == QCamera::ActiveState) {
Q_EMIT readyToConfigureConnections();
[m_captureSession commitConfiguration];
[m_captureSession startRunning];
m_defaultCodec = 0;
defaultCodec();
applyImageEncoderSettings();
applyViewfinderSettings();
}
if (oldState == QCamera::ActiveState) {
[m_captureSession stopRunning];
[m_captureSession beginConfiguration];
}
Q_EMIT stateChanged(m_state);
}
void AVFCameraSession::processRuntimeError()
{
qWarning() << tr("Runtime camera error");
Q_EMIT error(QCamera::CameraError, tr("Runtime camera error"));
}
void AVFCameraSession::processSessionStarted()
{
qDebugCamera() << Q_FUNC_INFO;
if (!m_active) {
m_active = true;
Q_EMIT activeChanged(m_active);
Q_EMIT stateChanged(state());
}
}
void AVFCameraSession::processSessionStopped()
{
qDebugCamera() << Q_FUNC_INFO;
if (m_active) {
m_active = false;
Q_EMIT activeChanged(m_active);
Q_EMIT stateChanged(state());
}
}
void AVFCameraSession::attachInputDevices()
{
//Attach video input device:
if (m_service->videoDeviceControl()->isDirty()) {
if (m_videoInput) {
[m_captureSession removeInput:m_videoInput];
[m_videoInput release];
m_videoInput = 0;
}
AVCaptureDevice *videoDevice = m_service->videoDeviceControl()->createCaptureDevice();
NSError *error = nil;
m_videoInput = [AVCaptureDeviceInput
deviceInputWithDevice:videoDevice
error:&error];
if (!m_videoInput) {
qWarning() << "Failed to create video device input";
} else {
if ([m_captureSession canAddInput:m_videoInput]) {
[m_videoInput retain];
[m_captureSession addInput:m_videoInput];
} else {
qWarning() << "Failed to connect video device input";
}
}
}
//Attach audio input device:
if (m_service->audioInputSelectorControl()->isDirty()) {
if (m_audioInput) {
[m_captureSession removeInput:m_audioInput];
[m_audioInput release];
m_audioInput = 0;
}
AVCaptureDevice *audioDevice = m_service->audioInputSelectorControl()->createCaptureDevice();
NSError *error = nil;
m_audioInput = [AVCaptureDeviceInput
deviceInputWithDevice:audioDevice
error:&error];
if (!m_audioInput) {
qWarning() << "Failed to create audio device input";
} else {
[m_audioInput retain];
[m_captureSession addInput:m_audioInput];
}
}
}
void AVFCameraSession::applyImageEncoderSettings()
{
if (AVFImageEncoderControl *control = m_service->imageEncoderControl())
control->applySettings();
}
void AVFCameraSession::applyViewfinderSettings()
{
if (AVFCameraViewfinderSettingsControl2 *vfControl = m_service->viewfinderSettingsControl2()) {
QCameraViewfinderSettings vfSettings(vfControl->requestedSettings());
if (AVFImageEncoderControl *imControl = m_service->imageEncoderControl()) {
const QSize imageResolution(imControl->imageSettings().resolution());
if (!imageResolution.isNull() && imageResolution.isValid()) {
vfSettings.setResolution(imageResolution);
vfControl->setViewfinderSettings(vfSettings);
return;
}
}
if (!vfSettings.isNull())
vfControl->applySettings();
}
}
void AVFCameraSession::addProbe(AVFMediaVideoProbeControl *probe)
{
m_videoProbesMutex.lock();
if (probe)
m_videoProbes << probe;
m_videoProbesMutex.unlock();
}
void AVFCameraSession::removeProbe(AVFMediaVideoProbeControl *probe)
{
m_videoProbesMutex.lock();
m_videoProbes.remove(probe);
m_videoProbesMutex.unlock();
}
FourCharCode AVFCameraSession::defaultCodec()
{
if (!m_defaultCodec) {
#if QT_MAC_PLATFORM_SDK_EQUAL_OR_ABOVE(__MAC_10_7, __IPHONE_7_0)
if (QSysInfo::MacintoshVersion >= qt_OS_limit(QSysInfo::MV_10_7, QSysInfo::MV_IOS_7_0)) {
if (AVCaptureDevice *device = videoCaptureDevice()) {
AVCaptureDeviceFormat *format = device.activeFormat;
if (!format || !format.formatDescription)
return m_defaultCodec;
m_defaultCodec = CMVideoFormatDescriptionGetCodecType(format.formatDescription);
}
}
#else
// TODO: extract media subtype.
#endif
}
return m_defaultCodec;
}
void AVFCameraSession::onCameraFrameFetched(const QVideoFrame &frame)
{
m_videoProbesMutex.lock();
QSet<AVFMediaVideoProbeControl *>::const_iterator i = m_videoProbes.constBegin();
while (i != m_videoProbes.constEnd()) {
(*i)->newFrameProbed(frame);
++i;
}
m_videoProbesMutex.unlock();
}
#include "moc_avfcamerasession.cpp"