Merge remote-tracking branch 'origin/5.5' into dev

Change-Id: Idd1e7cf9f11d05b8d1a19ba349474eda67bc705a
This commit is contained in:
Frederik Gladhorn
2015-03-03 18:13:51 +01:00
39 changed files with 2170 additions and 159 deletions

View File

@@ -1,18 +1,18 @@
This example performs some simple OpenCL operations on camera or video input which This example performs some simple OpenCL operations on camera or video input
is assumed to be provided in RGB format. The OpenCL operation is done on an which is assumed to be provided in RGB format. The OpenCL operation is done on
OpenGL texture using CL-GL interop, without any further readbacks or copies an OpenGL texture using CL-GL interop, without any further readbacks or copies
(except for the initial texture upload, when necessary). (except for the initial texture upload, when necessary).
Currently only OS X and Windows with desktop OpenGL (opengl32.dll) are supported. Currently OS X, Windows with real OpenGL (opengl32.dll) and Linux (GLX only) are
On Windows you may need to edit testplugin.pro to specify the location of the OpenCL supported. Note that an OpenCL implementation with GPU support is required. The
headers and libraries. platform and device selection logic supports NVIDIA, AMD and Intel. Porting to
other platforms is probably simple, see clCreateContextFromType.
Note that an OpenCL implementation with GPU support is required. On Windows you may need to edit testplugin.pro to specify the location of the
The platform and device selection logic supports NVIDIA and Intel. OpenCL headers and libraries.
Porting to other platforms is probably simple, see clCreateContextFromType.
Note however that YUV formats, that are commonly used also for camera input YUV formats are not supported in this example. This is probably not an issue an
on some platforms, are not supported in this example. OS X and Windows, but will most likely disable the example on Linux.
Pass the name of a video file to perform video playback or launch without Pass the name of a video file to perform video playback or launch without
arguments to use the camera. arguments to use the camera.

View File

@@ -46,6 +46,10 @@
#include <CL/opencl.h> #include <CL/opencl.h>
#endif #endif
#ifdef Q_OS_LINUX
#include <QtPlatformHeaders/QGLXNativeContext>
#endif
#include "rgbframehelper.h" #include "rgbframehelper.h"
static const char *openclSrc = static const char *openclSrc =
@@ -119,10 +123,17 @@ CLFilterRunnable::CLFilterRunnable(CLFilter *filter) :
// Set up OpenCL. // Set up OpenCL.
QOpenGLFunctions *f = QOpenGLContext::currentContext()->functions(); QOpenGLFunctions *f = QOpenGLContext::currentContext()->functions();
cl_int err;
cl_uint n; cl_uint n;
if (clGetPlatformIDs(0, 0, &n) != CL_SUCCESS) { cl_int err = clGetPlatformIDs(0, 0, &n);
qWarning("Failed to get platform ID count"); if (err != CL_SUCCESS) {
qWarning("Failed to get platform ID count (error %d)", err);
if (err == -1001) {
qDebug("Could not find OpenCL implementation. ICD missing?"
#ifdef Q_OS_LINUX
" Check /etc/OpenCL/vendors."
#endif
);
}
return; return;
} }
if (n == 0) { if (n == 0) {
@@ -140,6 +151,7 @@ CLFilterRunnable::CLFilterRunnable(CLFilter *filter) :
qDebug("GL_VENDOR: %s", vendor); qDebug("GL_VENDOR: %s", vendor);
const bool isNV = vendor && strstr(vendor, "NVIDIA"); const bool isNV = vendor && strstr(vendor, "NVIDIA");
const bool isIntel = vendor && strstr(vendor, "Intel"); const bool isIntel = vendor && strstr(vendor, "Intel");
const bool isAMD = vendor && strstr(vendor, "ATI");
qDebug("Found %u OpenCL platforms:", n); qDebug("Found %u OpenCL platforms:", n);
for (cl_uint i = 0; i < n; ++i) { for (cl_uint i = 0; i < n; ++i) {
QByteArray name; QByteArray name;
@@ -153,6 +165,8 @@ CLFilterRunnable::CLFilterRunnable(CLFilter *filter) :
platform = platformIds[i]; platform = platformIds[i];
else if (isIntel && name.contains(QByteArrayLiteral("Intel"))) else if (isIntel && name.contains(QByteArrayLiteral("Intel")))
platform = platformIds[i]; platform = platformIds[i];
else if (isAMD && name.contains(QByteArrayLiteral("AMD")))
platform = platformIds[i];
} }
qDebug("Using platform %p", platform); qDebug("Using platform %p", platform);
@@ -166,6 +180,18 @@ CLFilterRunnable::CLFilterRunnable(CLFilter *filter) :
CL_GL_CONTEXT_KHR, (cl_context_properties) wglGetCurrentContext(), CL_GL_CONTEXT_KHR, (cl_context_properties) wglGetCurrentContext(),
CL_WGL_HDC_KHR, (cl_context_properties) wglGetCurrentDC(), CL_WGL_HDC_KHR, (cl_context_properties) wglGetCurrentDC(),
0 }; 0 };
#elif defined(Q_OS_LINUX)
// An elegant alternative to glXGetCurrentContext. This will even survive
// (without interop) when using something other than GLX.
QVariant nativeGLXHandle = QOpenGLContext::currentContext()->nativeHandle();
QGLXNativeContext nativeGLXContext;
if (!nativeGLXHandle.isNull() && nativeGLXHandle.canConvert<QGLXNativeContext>())
nativeGLXContext = nativeGLXHandle.value<QGLXNativeContext>();
else
qWarning("Failed to get the underlying GLX context from the current QOpenGLContext");
cl_context_properties contextProps[] = { CL_CONTEXT_PLATFORM, (cl_context_properties) platform,
CL_GL_CONTEXT_KHR, (cl_context_properties) nativeGLXContext.context(),
0 };
#endif #endif
m_clContext = clCreateContextFromType(contextProps, CL_DEVICE_TYPE_GPU, 0, 0, &err); m_clContext = clCreateContextFromType(contextProps, CL_DEVICE_TYPE_GPU, 0, 0, &err);

View File

@@ -12,10 +12,10 @@ OTHER_FILES = main.qml
target.path = $$[QT_INSTALL_EXAMPLES]/multimedia/video/qmlvideofilter_opencl target.path = $$[QT_INSTALL_EXAMPLES]/multimedia/video/qmlvideofilter_opencl
INSTALLS += target INSTALLS += target
# Edit these as necessary osx: LIBS += -framework OpenCL
osx { unix: !osx: LIBS += -lOpenCL
LIBS += -framework OpenCL win32:!winrt {
} else { # Edit these as necessary
INCLUDEPATH += c:/cuda/include INCLUDEPATH += c:/cuda/include
LIBPATH += c:/cuda/lib/x64 LIBPATH += c:/cuda/lib/x64
LIBS += -lopengl32 -lOpenCL LIBS += -lopengl32 -lOpenCL

View File

@@ -15,8 +15,8 @@ win32 {
} else:qnx { } else:qnx {
qtCompileTest(mmrenderer) qtCompileTest(mmrenderer)
} else { } else {
qtCompileTest(alsa) contains(QT_CONFIG, alsa):qtCompileTest(alsa)
qtCompileTest(pulseaudio) contains(QT_CONFIG, pulseaudio):qtCompileTest(pulseaudio)
isEmpty(GST_VERSION) { isEmpty(GST_VERSION) {
contains(QT_CONFIG, gstreamer-0.10) { contains(QT_CONFIG, gstreamer-0.10) {

View File

@@ -98,6 +98,22 @@ QGstreamerVideoWidgetControl::QGstreamerVideoWidgetControl(QObject *parent)
, m_widget(0) , m_widget(0)
, m_fullScreen(false) , m_fullScreen(false)
{ {
m_videoSink = gst_element_factory_make ("xvimagesink", NULL);
if (!m_videoSink)
m_videoSink = gst_element_factory_make ("ximagesink", NULL);
if (m_videoSink) {
// Check if the xv sink is usable
if (gst_element_set_state(m_videoSink, GST_STATE_READY) != GST_STATE_CHANGE_SUCCESS) {
gst_object_unref(GST_OBJECT(m_videoSink));
m_videoSink = 0;
} else {
gst_element_set_state(m_videoSink, GST_STATE_NULL);
g_object_set(G_OBJECT(m_videoSink), "force-aspect-ratio", 1, (const char*)NULL);
qt_gst_object_ref_sink(GST_OBJECT (m_videoSink)); //Take ownership
}
}
} }
QGstreamerVideoWidgetControl::~QGstreamerVideoWidgetControl() QGstreamerVideoWidgetControl::~QGstreamerVideoWidgetControl()
@@ -110,36 +126,17 @@ QGstreamerVideoWidgetControl::~QGstreamerVideoWidgetControl()
void QGstreamerVideoWidgetControl::createVideoWidget() void QGstreamerVideoWidgetControl::createVideoWidget()
{ {
if (m_widget) if (!m_videoSink || m_widget)
return; return;
m_widget = new QGstreamerVideoWidget; m_widget = new QGstreamerVideoWidget;
m_widget->installEventFilter(this); m_widget->installEventFilter(this);
m_windowId = m_widget->winId(); m_windowId = m_widget->winId();
m_videoSink = gst_element_factory_make ("xvimagesink", NULL);
if (m_videoSink) {
// Check if the xv sink is usable
if (gst_element_set_state(m_videoSink, GST_STATE_READY) != GST_STATE_CHANGE_SUCCESS) {
gst_object_unref(GST_OBJECT(m_videoSink));
m_videoSink = 0;
} else {
gst_element_set_state(m_videoSink, GST_STATE_NULL);
g_object_set(G_OBJECT(m_videoSink), "force-aspect-ratio", 1, (const char*)NULL);
}
}
if (!m_videoSink)
m_videoSink = gst_element_factory_make ("ximagesink", NULL);
qt_gst_object_ref_sink(GST_OBJECT (m_videoSink)); //Take ownership
} }
GstElement *QGstreamerVideoWidgetControl::videoSink() GstElement *QGstreamerVideoWidgetControl::videoSink()
{ {
createVideoWidget();
return m_videoSink; return m_videoSink;
} }

View File

@@ -850,19 +850,19 @@ QMediaServiceProvider *QMediaServiceProvider::defaultServiceProvider()
/*! /*!
\since 5.3 \since 5.3
\fn QMediaServiceSupportedDevicesInterface::defaultDevice(const QByteArray &service) const \fn QByteArray QMediaServiceSupportedDevicesInterface::defaultDevice(const QByteArray &service) const
Returns the default device for a \a service type. Returns the default device for a \a service type.
*/ */
/*! /*!
\fn QMediaServiceSupportedDevicesInterface::devices(const QByteArray &service) const \fn QList<QByteArray> QMediaServiceSupportedDevicesInterface::devices(const QByteArray &service) const
Returns a list of devices available for a \a service type. Returns a list of devices available for a \a service type.
*/ */
/*! /*!
\fn QMediaServiceSupportedDevicesInterface::deviceDescription(const QByteArray &service, const QByteArray &device) \fn QString QMediaServiceSupportedDevicesInterface::deviceDescription(const QByteArray &service, const QByteArray &device)
Returns the description of a \a device available for a \a service type. Returns the description of a \a device available for a \a service type.
*/ */

View File

@@ -259,7 +259,7 @@ QVideoFilterRunnable::~QVideoFilterRunnable()
} }
/*! /*!
Constructs a new QAbstractVideoFilter instance. Constructs a new QAbstractVideoFilter instance with parent object \a parent.
*/ */
QAbstractVideoFilter::QAbstractVideoFilter(QObject *parent) : QAbstractVideoFilter::QAbstractVideoFilter(QObject *parent) :
QObject(parent), QObject(parent),

View File

@@ -32,7 +32,7 @@
****************************************************************************/ ****************************************************************************/
#include "avfcameraexposurecontrol.h" #include "avfcameraexposurecontrol.h"
#include "avfconfigurationlock.h" #include "avfcamerautility.h"
#include "avfcamerasession.h" #include "avfcamerasession.h"
#include "avfcameraservice.h" #include "avfcameraservice.h"
#include "avfcameradebug.h" #include "avfcameradebug.h"

View File

@@ -32,7 +32,7 @@
****************************************************************************/ ****************************************************************************/
#include "avfcamerafocuscontrol.h" #include "avfcamerafocuscontrol.h"
#include "avfconfigurationlock.h" #include "avfcamerautility.h"
#include "avfcameraservice.h" #include "avfcameraservice.h"
#include "avfcamerasession.h" #include "avfcamerasession.h"
#include "avfcameradebug.h" #include "avfcameradebug.h"

View File

@@ -61,6 +61,8 @@ public:
void configureAVCaptureSession(AVFCameraSession *cameraSession); void configureAVCaptureSession(AVFCameraSession *cameraSession);
void syncHandleViewfinderFrame(const QVideoFrame &frame); void syncHandleViewfinderFrame(const QVideoFrame &frame);
AVCaptureVideoDataOutput *videoDataOutput() const;
Q_SIGNALS: Q_SIGNALS:
void surfaceChanged(QAbstractVideoSurface *surface); void surfaceChanged(QAbstractVideoSurface *surface);

View File

@@ -31,6 +31,7 @@
** **
****************************************************************************/ ****************************************************************************/
#include "avfcameraviewfindersettingscontrol.h"
#include "avfcamerarenderercontrol.h" #include "avfcamerarenderercontrol.h"
#include "avfcamerasession.h" #include "avfcamerasession.h"
#include "avfcameraservice.h" #include "avfcameraservice.h"
@@ -129,7 +130,17 @@ private:
int height = CVPixelBufferGetHeight(imageBuffer); int height = CVPixelBufferGetHeight(imageBuffer);
QAbstractVideoBuffer *buffer = new CVPixelBufferVideoBuffer(imageBuffer); QAbstractVideoBuffer *buffer = new CVPixelBufferVideoBuffer(imageBuffer);
QVideoFrame frame(buffer, QSize(width, height), QVideoFrame::Format_RGB32);
QVideoFrame::PixelFormat format = QVideoFrame::Format_RGB32;
if ([captureOutput isKindOfClass:[AVCaptureVideoDataOutput class]]) {
NSDictionary *settings = ((AVCaptureVideoDataOutput *)captureOutput).videoSettings;
if (settings && [settings objectForKey:(id)kCVPixelBufferPixelFormatTypeKey]) {
NSNumber *avf = [settings objectForKey:(id)kCVPixelBufferPixelFormatTypeKey];
format = AVFCameraViewfinderSettingsControl2::QtPixelFormatFromCVFormat([avf unsignedIntValue]);
}
}
QVideoFrame frame(buffer, QSize(width, height), format);
m_renderer->syncHandleViewfinderFrame(frame); m_renderer->syncHandleViewfinderFrame(frame);
} }
@end @end
@@ -236,6 +247,11 @@ void AVFCameraRendererControl::syncHandleViewfinderFrame(const QVideoFrame &fram
m_cameraSession->onCameraFrameFetched(m_lastViewfinderFrame); m_cameraSession->onCameraFrameFetched(m_lastViewfinderFrame);
} }
AVCaptureVideoDataOutput *AVFCameraRendererControl::videoDataOutput() const
{
return m_videoDataOutput;
}
void AVFCameraRendererControl::handleViewfinderFrame() void AVFCameraRendererControl::handleViewfinderFrame()
{ {
QVideoFrame frame; QVideoFrame frame;

View File

@@ -55,6 +55,9 @@ class AVFAudioInputSelectorControl;
class AVFCameraFocusControl; class AVFCameraFocusControl;
class AVFCameraExposureControl; class AVFCameraExposureControl;
class AVFCameraZoomControl; class AVFCameraZoomControl;
class AVFCameraViewfinderSettingsControl2;
class AVFCameraViewfinderSettingsControl;
class AVFImageEncoderControl;
class AVFCameraService : public QMediaService class AVFCameraService : public QMediaService
{ {
@@ -76,6 +79,10 @@ public:
AVFCameraFocusControl *cameraFocusControl() const { return m_cameraFocusControl; } AVFCameraFocusControl *cameraFocusControl() const { return m_cameraFocusControl; }
AVFCameraExposureControl *cameraExposureControl() const {return m_cameraExposureControl; } AVFCameraExposureControl *cameraExposureControl() const {return m_cameraExposureControl; }
AVFCameraZoomControl *cameraZoomControl() const {return m_cameraZoomControl; } AVFCameraZoomControl *cameraZoomControl() const {return m_cameraZoomControl; }
AVFCameraRendererControl *videoOutput() const {return m_videoOutput; }
AVFCameraViewfinderSettingsControl2 *viewfinderSettingsControl2() const {return m_viewfinderSettingsControl2; }
AVFCameraViewfinderSettingsControl *viewfinderSettingsControl() const {return m_viewfinderSettingsControl; }
AVFImageEncoderControl *imageEncoderControl() const {return m_imageEncoderControl; }
private: private:
AVFCameraSession *m_session; AVFCameraSession *m_session;
@@ -90,6 +97,9 @@ private:
AVFCameraFocusControl *m_cameraFocusControl; AVFCameraFocusControl *m_cameraFocusControl;
AVFCameraExposureControl *m_cameraExposureControl; AVFCameraExposureControl *m_cameraExposureControl;
AVFCameraZoomControl *m_cameraZoomControl; AVFCameraZoomControl *m_cameraZoomControl;
AVFCameraViewfinderSettingsControl2 *m_viewfinderSettingsControl2;
AVFCameraViewfinderSettingsControl *m_viewfinderSettingsControl;
AVFImageEncoderControl *m_imageEncoderControl;
}; };
QT_END_NAMESPACE QT_END_NAMESPACE

View File

@@ -50,6 +50,8 @@
#include "avfmediavideoprobecontrol.h" #include "avfmediavideoprobecontrol.h"
#include "avfcamerafocuscontrol.h" #include "avfcamerafocuscontrol.h"
#include "avfcameraexposurecontrol.h" #include "avfcameraexposurecontrol.h"
#include "avfcameraviewfindersettingscontrol.h"
#include "avfimageencodercontrol.h"
#ifdef Q_OS_IOS #ifdef Q_OS_IOS
#include "avfcamerazoomcontrol.h" #include "avfcamerazoomcontrol.h"
@@ -84,6 +86,9 @@ AVFCameraService::AVFCameraService(QObject *parent):
#ifdef Q_OS_IOS #ifdef Q_OS_IOS
m_cameraZoomControl = new AVFCameraZoomControl(this); m_cameraZoomControl = new AVFCameraZoomControl(this);
#endif #endif
m_viewfinderSettingsControl2 = new AVFCameraViewfinderSettingsControl2(this);
m_viewfinderSettingsControl = new AVFCameraViewfinderSettingsControl(this);
m_imageEncoderControl = new AVFImageEncoderControl(this);
} }
AVFCameraService::~AVFCameraService() AVFCameraService::~AVFCameraService()
@@ -107,6 +112,10 @@ AVFCameraService::~AVFCameraService()
#ifdef Q_OS_IOS #ifdef Q_OS_IOS
delete m_cameraZoomControl; delete m_cameraZoomControl;
#endif #endif
delete m_viewfinderSettingsControl2;
delete m_viewfinderSettingsControl;
delete m_imageEncoderControl;
delete m_session; delete m_session;
} }
@@ -140,6 +149,15 @@ QMediaControl *AVFCameraService::requestControl(const char *name)
if (qstrcmp(name, QCameraFocusControl_iid) == 0) if (qstrcmp(name, QCameraFocusControl_iid) == 0)
return m_cameraFocusControl; return m_cameraFocusControl;
if (qstrcmp(name, QCameraViewfinderSettingsControl2_iid) == 0)
return m_viewfinderSettingsControl2;
if (qstrcmp(name, QCameraViewfinderSettingsControl_iid) == 0)
return m_viewfinderSettingsControl;
if (qstrcmp(name, QImageEncoderControl_iid) == 0)
return m_imageEncoderControl;
if (qstrcmp(name,QMediaVideoProbeControl_iid) == 0) { if (qstrcmp(name,QMediaVideoProbeControl_iid) == 0) {
AVFMediaVideoProbeControl *videoProbe = 0; AVFMediaVideoProbeControl *videoProbe = 0;
videoProbe = new AVFMediaVideoProbeControl(this); videoProbe = new AVFMediaVideoProbeControl(this);

View File

@@ -98,6 +98,8 @@ Q_SIGNALS:
private: private:
static void updateCameraDevices(); static void updateCameraDevices();
void attachInputDevices(); void attachInputDevices();
void applyImageEncoderSettings();
void applyViewfinderSettings();
static QByteArray m_defaultCameraDevice; static QByteArray m_defaultCameraDevice;
static QList<QByteArray> m_cameraDevices; static QList<QByteArray> m_cameraDevices;

View File

@@ -39,6 +39,8 @@
#include "avfcameradevicecontrol.h" #include "avfcameradevicecontrol.h"
#include "avfaudioinputselectorcontrol.h" #include "avfaudioinputselectorcontrol.h"
#include "avfmediavideoprobecontrol.h" #include "avfmediavideoprobecontrol.h"
#include "avfcameraviewfindersettingscontrol.h"
#include "avfimageencodercontrol.h"
#include <CoreFoundation/CoreFoundation.h> #include <CoreFoundation/CoreFoundation.h>
#include <Foundation/Foundation.h> #include <Foundation/Foundation.h>
@@ -275,6 +277,8 @@ void AVFCameraSession::setState(QCamera::State newState)
Q_EMIT readyToConfigureConnections(); Q_EMIT readyToConfigureConnections();
[m_captureSession commitConfiguration]; [m_captureSession commitConfiguration];
[m_captureSession startRunning]; [m_captureSession startRunning];
applyImageEncoderSettings();
applyViewfinderSettings();
} }
if (oldState == QCamera::ActiveState) { if (oldState == QCamera::ActiveState) {
@@ -364,6 +368,30 @@ void AVFCameraSession::attachInputDevices()
} }
} }
void AVFCameraSession::applyImageEncoderSettings()
{
if (AVFImageEncoderControl *control = m_service->imageEncoderControl())
control->applySettings();
}
void AVFCameraSession::applyViewfinderSettings()
{
if (AVFCameraViewfinderSettingsControl2 *vfControl = m_service->viewfinderSettingsControl2()) {
QCameraViewfinderSettings vfSettings(vfControl->requestedSettings());
if (AVFImageEncoderControl *imControl = m_service->imageEncoderControl()) {
const QSize imageResolution(imControl->imageSettings().resolution());
if (!imageResolution.isNull() && imageResolution.isValid()) {
vfSettings.setResolution(imageResolution);
vfControl->setViewfinderSettings(vfSettings);
return;
}
}
if (!vfSettings.isNull())
vfControl->applySettings();
}
}
void AVFCameraSession::addProbe(AVFMediaVideoProbeControl *probe) void AVFCameraSession::addProbe(AVFMediaVideoProbeControl *probe)
{ {
m_videoProbesMutex.lock(); m_videoProbesMutex.lock();

View File

@@ -0,0 +1,113 @@
/****************************************************************************
**
** Copyright (C) 2015 The Qt Company Ltd.
** Contact: http://www.qt.io/licensing/
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL21$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and The Qt Company. For licensing terms
** and conditions see http://www.qt.io/terms-conditions. For further
** information use the contact form at http://www.qt.io/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 or version 3 as published by the Free
** Software Foundation and appearing in the file LICENSE.LGPLv21 and
** LICENSE.LGPLv3 included in the packaging of this file. Please review the
** following information to ensure the GNU Lesser General Public License
** requirements will be met: https://www.gnu.org/licenses/lgpl.html and
** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** As a special exception, The Qt Company gives you certain additional
** rights. These rights are described in The Qt Company LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** $QT_END_LICENSE$
**
****************************************************************************/
#ifndef AVFCAMERAUTILITY_H
#define AVFCAMERAUTILITY_H
#include <QtCore/qsysinfo.h>
#include <QtCore/qglobal.h>
#include <QtCore/qvector.h>
#include <QtCore/qdebug.h>
#include <QtCore/qsize.h>
#include <QtCore/qpair.h>
#include <AVFoundation/AVFoundation.h>
// In case we have SDK below 10.7/7.0:
@class AVCaptureDeviceFormat;
QT_BEGIN_NAMESPACE
class AVFConfigurationLock
{
public:
explicit AVFConfigurationLock(AVCaptureDevice *captureDevice)
: m_captureDevice(captureDevice),
m_locked(false)
{
Q_ASSERT(m_captureDevice);
NSError *error = nil;
m_locked = [m_captureDevice lockForConfiguration:&error];
}
~AVFConfigurationLock()
{
if (m_locked)
[m_captureDevice unlockForConfiguration];
}
operator bool() const
{
return m_locked;
}
private:
Q_DISABLE_COPY(AVFConfigurationLock)
AVCaptureDevice *m_captureDevice;
bool m_locked;
};
inline QSysInfo::MacVersion qt_OS_limit(QSysInfo::MacVersion osxVersion,
QSysInfo::MacVersion iosVersion)
{
#ifdef Q_OS_OSX
Q_UNUSED(iosVersion)
return osxVersion;
#else
Q_UNUSED(osxVersion)
return iosVersion;
#endif
}
typedef QPair<qreal, qreal> AVFPSRange;
AVFPSRange qt_connection_framerates(AVCaptureConnection *videoConnection);
typedef QPair<int, int> AVFRational;
AVFRational qt_float_to_rational(qreal par, int limit);
#if QT_MAC_PLATFORM_SDK_EQUAL_OR_ABOVE(__MAC_10_7, __IPHONE_7_0)
bool qt_is_video_range_subtype(AVCaptureDeviceFormat *format);
QSize qt_device_format_resolution(AVCaptureDeviceFormat *format);
QSize qt_device_format_high_resolution(AVCaptureDeviceFormat *format);
QSize qt_device_format_pixel_aspect_ratio(AVCaptureDeviceFormat *format);
QVector<AVFPSRange> qt_device_format_framerates(AVCaptureDeviceFormat *format);
AVCaptureDeviceFormat *qt_find_best_resolution_match(AVCaptureDevice *captureDevice, const QSize &res);
AVCaptureDeviceFormat *qt_find_best_framerate_match(AVCaptureDevice *captureDevice, Float64 fps);
AVFrameRateRange *qt_find_supported_framerate_range(AVCaptureDeviceFormat *format, Float64 fps);
#endif
QT_END_NAMESPACE
#endif

View File

@@ -0,0 +1,386 @@
/****************************************************************************
**
** Copyright (C) 2015 The Qt Company Ltd.
** Contact: http://www.qt.io/licensing/
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL21$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and The Qt Company. For licensing terms
** and conditions see http://www.qt.io/terms-conditions. For further
** information use the contact form at http://www.qt.io/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 or version 3 as published by the Free
** Software Foundation and appearing in the file LICENSE.LGPLv21 and
** LICENSE.LGPLv3 included in the packaging of this file. Please review the
** following information to ensure the GNU Lesser General Public License
** requirements will be met: https://www.gnu.org/licenses/lgpl.html and
** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** As a special exception, The Qt Company gives you certain additional
** rights. These rights are described in The Qt Company LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** $QT_END_LICENSE$
**
****************************************************************************/
#include "avfcamerautility.h"
#include "avfcameradebug.h"
#include <QtCore/qvector.h>
#include <QtCore/qpair.h>
#include <algorithm>
#include <limits>
QT_BEGIN_NAMESPACE
AVFPSRange qt_connection_framerates(AVCaptureConnection *videoConnection)
{
Q_ASSERT(videoConnection);
AVFPSRange newRange;
// "The value in the videoMinFrameDuration is equivalent to the reciprocal
// of the maximum framerate, the value in the videoMaxFrameDuration is equivalent
// to the reciprocal of the minimum framerate."
if (videoConnection.supportsVideoMinFrameDuration) {
const CMTime cmMin = videoConnection.videoMinFrameDuration;
if (CMTimeCompare(cmMin, kCMTimeInvalid)) { // Has some non-default value:
if (const Float64 minSeconds = CMTimeGetSeconds(cmMin))
newRange.second = 1. / minSeconds;
}
}
if (videoConnection.supportsVideoMaxFrameDuration) {
const CMTime cmMax = videoConnection.videoMaxFrameDuration;
if (CMTimeCompare(cmMax, kCMTimeInvalid)) {
if (const Float64 maxSeconds = CMTimeGetSeconds(cmMax))
newRange.first = 1. / maxSeconds;
}
}
return newRange;
}
AVFRational qt_float_to_rational(qreal par, int limit)
{
Q_ASSERT(limit > 0);
// In Qt we represent pixel aspect ratio
// as a rational number (we use QSize).
// AVFoundation describes dimensions in pixels
// and in pixels with width multiplied by PAR.
// Represent this PAR as a ratio.
int a = 0, b = 1, c = 1, d = 1;
qreal mid = 0.;
while (b <= limit && d <= limit) {
mid = qreal(a + c) / (b + d);
if (qAbs(par - mid) < 0.000001) {
if (b + d <= limit)
return AVFRational(a + c, b + d);
else if (d > b)
return AVFRational(c, d);
else
return AVFRational(a, b);
} else if (par > mid) {
a = a + c;
b = b + d;
} else {
c = a + c;
d = b + d;
}
}
if (b > limit)
return AVFRational(c, d);
return AVFRational(a, b);
}
#if QT_MAC_PLATFORM_SDK_EQUAL_OR_ABOVE(__MAC_10_7, __IPHONE_7_0)
bool qt_is_video_range_subtype(AVCaptureDeviceFormat *format)
{
Q_ASSERT(format);
#ifdef Q_OS_IOS
// Use only 420f on iOS, not 420v.
const FourCharCode subType = CMFormatDescriptionGetMediaSubType(format.formatDescription);
return subType == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange;
#else
Q_UNUSED(format)
#endif
return false;
}
namespace {
inline bool qt_area_sane(const QSize &size)
{
return !size.isNull() && size.isValid()
&& std::numeric_limits<int>::max() / size.width() >= size.height();
}
inline bool avf_format_compare(AVCaptureDeviceFormat *f1, AVCaptureDeviceFormat *f2)
{
Q_ASSERT(f1);
Q_ASSERT(f2);
const QSize r1(qt_device_format_resolution(f1));
const QSize r2(qt_device_format_resolution(f2));
return r1.width() > r2.width() && r1.height() > r2.height();
}
QVector<AVCaptureDeviceFormat *> qt_sort_device_formats(AVCaptureDevice *captureDevice)
{
// Select only formats with framerate ranges + sort them by resoluions,
Q_ASSERT(captureDevice);
QVector<AVCaptureDeviceFormat *>sorted;
NSArray *formats = captureDevice.formats;
if (!formats || !formats.count)
return sorted;
sorted.reserve(formats.count);
for (AVCaptureDeviceFormat *format in formats) {
if (qt_is_video_range_subtype(format))
continue;
if (format.videoSupportedFrameRateRanges && format.videoSupportedFrameRateRanges.count) {
const QSize resolution(qt_device_format_resolution(format));
if (!resolution.isNull() && resolution.isValid())
sorted << format;
}
}
std::sort(sorted.begin(), sorted.end(), avf_format_compare);
return sorted;
}
Float64 qt_find_min_framerate_distance(AVCaptureDeviceFormat *format, Float64 fps)
{
Q_ASSERT(format && format.videoSupportedFrameRateRanges
&& format.videoSupportedFrameRateRanges.count);
AVFrameRateRange *range = [format.videoSupportedFrameRateRanges objectAtIndex:0];
Float64 distance = qAbs(range.maxFrameRate - fps);
for (NSUInteger i = 1, e = format.videoSupportedFrameRateRanges.count; i < e; ++i) {
range = [format.videoSupportedFrameRateRanges objectAtIndex:i];
distance = qMin(distance, qAbs(range.maxFrameRate - fps));
}
return distance;
}
} // Unnamed namespace.
QSize qt_device_format_resolution(AVCaptureDeviceFormat *format)
{
Q_ASSERT(format);
if (!format.formatDescription)
return QSize();
const CMVideoDimensions res = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
return QSize(res.width, res.height);
}
QSize qt_device_format_high_resolution(AVCaptureDeviceFormat *format)
{
Q_ASSERT(format);
QSize res;
#if defined(Q_OS_IOS) && QT_IOS_PLATFORM_SDK_EQUAL_OR_ABOVE(__IPHONE_8_0)
if (QSysInfo::MacintoshVersion >= QSysInfo::MV_IOS_8_0) {
const CMVideoDimensions hrDim(format.highResolutionStillImageDimensions);
res.setWidth(hrDim.width);
res.setHeight(hrDim.height);
}
#endif
return res;
}
QVector<AVFPSRange> qt_device_format_framerates(AVCaptureDeviceFormat *format)
{
Q_ASSERT(format);
QVector<AVFPSRange> qtRanges;
if (!format.videoSupportedFrameRateRanges || !format.videoSupportedFrameRateRanges.count)
return qtRanges;
qtRanges.reserve(format.videoSupportedFrameRateRanges.count);
for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges)
qtRanges << AVFPSRange(range.minFrameRate, range.maxFrameRate);
return qtRanges;
}
QSize qt_device_format_pixel_aspect_ratio(AVCaptureDeviceFormat *format)
{
Q_ASSERT(format);
if (!format.formatDescription) {
qDebugCamera() << Q_FUNC_INFO << "no format description found";
return QSize();
}
const CMVideoDimensions res = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
const CGSize resPAR = CMVideoFormatDescriptionGetPresentationDimensions(format.formatDescription, true, false);
if (qAbs(resPAR.width - res.width) < 1.) {
// "Pixel aspect ratio is used to adjust the width, leaving the height alone."
return QSize(1, 1);
}
if (!res.width || !resPAR.width)
return QSize();
const AVFRational asRatio(qt_float_to_rational(resPAR.width > res.width
? res.width / qreal(resPAR.width)
: resPAR.width / qreal(res.width), 200));
return QSize(asRatio.first, asRatio.second);
}
AVCaptureDeviceFormat *qt_find_best_resolution_match(AVCaptureDevice *captureDevice, const QSize &request)
{
Q_ASSERT(captureDevice);
Q_ASSERT(!request.isNull() && request.isValid());
if (!captureDevice.formats || !captureDevice.formats.count)
return 0;
for (AVCaptureDeviceFormat *format in captureDevice.formats) {
if (qt_is_video_range_subtype(format))
continue;
if (qt_device_format_resolution(format) == request)
return format;
// iOS only (still images).
if (qt_device_format_high_resolution(format) == request)
return format;
}
if (!qt_area_sane(request))
return 0;
typedef QPair<QSize, AVCaptureDeviceFormat *> FormatPair;
QVector<FormatPair> formats;
formats.reserve(captureDevice.formats.count);
for (AVCaptureDeviceFormat *format in captureDevice.formats) {
if (qt_is_video_range_subtype(format))
continue;
const QSize res(qt_device_format_resolution(format));
if (!res.isNull() && res.isValid() && qt_area_sane(res))
formats << FormatPair(res, format);
const QSize highRes(qt_device_format_high_resolution(format));
if (!highRes.isNull() && highRes.isValid() && qt_area_sane(highRes))
formats << FormatPair(highRes, format);
}
if (!formats.size())
return 0;
AVCaptureDeviceFormat *best = formats[0].second;
QSize next(formats[0].first);
int wDiff = qAbs(request.width() - next.width());
int hDiff = qAbs(request.height() - next.height());
const int area = request.width() * request.height();
int areaDiff = qAbs(area - next.width() * next.height());
for (int i = 1; i < formats.size(); ++i) {
next = formats[i].first;
const int newWDiff = qAbs(next.width() - request.width());
const int newHDiff = qAbs(next.height() - request.height());
const int newAreaDiff = qAbs(area - next.width() * next.height());
if ((newWDiff < wDiff && newHDiff < hDiff)
|| ((newWDiff <= wDiff || newHDiff <= hDiff) && newAreaDiff <= areaDiff)) {
wDiff = newWDiff;
hDiff = newHDiff;
best = formats[i].second;
areaDiff = newAreaDiff;
}
}
return best;
}
AVCaptureDeviceFormat *qt_find_best_framerate_match(AVCaptureDevice *captureDevice, Float64 fps)
{
Q_ASSERT(captureDevice);
Q_ASSERT(fps > 0.);
const qreal epsilon = 0.1;
// Sort formats by their resolution.
const QVector<AVCaptureDeviceFormat *> sorted(qt_sort_device_formats(captureDevice));
if (!sorted.size())
return nil;
for (int i = 0; i < sorted.size(); ++i) {
AVCaptureDeviceFormat *format = sorted[i];
for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges) {
if (range.maxFrameRate - range.minFrameRate < epsilon) {
// On OS X ranges are points (built-in camera).
if (qAbs(fps - range.maxFrameRate) < epsilon)
return format;
}
if (fps >= range.minFrameRate && fps <= range.maxFrameRate)
return format;
}
}
Float64 distance = qt_find_min_framerate_distance(sorted[0], fps);
AVCaptureDeviceFormat *match = sorted[0];
for (int i = 1; i < sorted.size(); ++i) {
const Float64 newDistance = qt_find_min_framerate_distance(sorted[i], fps);
if (newDistance < distance) {
distance = newDistance;
match = sorted[i];
}
}
return match;
}
AVFrameRateRange *qt_find_supported_framerate_range(AVCaptureDeviceFormat *format, Float64 fps)
{
Q_ASSERT(format && format.videoSupportedFrameRateRanges
&& format.videoSupportedFrameRateRanges.count);
const qreal epsilon = 0.1;
for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges) {
if (range.maxFrameRate - range.minFrameRate < epsilon) {
// On OS X ranges are points (built-in camera).
if (qAbs(fps - range.maxFrameRate) < epsilon)
return range;
}
if (fps >= range.minFrameRate && fps <= range.maxFrameRate)
return range;
}
AVFrameRateRange *match = [format.videoSupportedFrameRateRanges objectAtIndex:0];
Float64 distance = qAbs(match.maxFrameRate - fps);
for (NSUInteger i = 1, e = format.videoSupportedFrameRateRanges.count; i < e; ++i) {
AVFrameRateRange *range = [format.videoSupportedFrameRateRanges objectAtIndex:i];
const Float64 newDistance = qAbs(range.maxFrameRate - fps);
if (newDistance < distance) {
distance = newDistance;
match = range;
}
}
return match;
}
#endif // SDK
QT_END_NAMESPACE

View File

@@ -0,0 +1,114 @@
/****************************************************************************
**
** Copyright (C) 2015 The Qt Company Ltd.
** Contact: http://www.qt.io/licensing/
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL21$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and The Qt Company. For licensing terms
** and conditions see http://www.qt.io/terms-conditions. For further
** information use the contact form at http://www.qt.io/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 or version 3 as published by the Free
** Software Foundation and appearing in the file LICENSE.LGPLv21 and
** LICENSE.LGPLv3 included in the packaging of this file. Please review the
** following information to ensure the GNU Lesser General Public License
** requirements will be met: https://www.gnu.org/licenses/lgpl.html and
** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** As a special exception, The Qt Company gives you certain additional
** rights. These rights are described in The Qt Company LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** $QT_END_LICENSE$
**
****************************************************************************/
#ifndef AVFCAMERAVIEWFINDERSETTINGSCONTROL_H
#define AVFCAMERAVIEWFINDERSETTINGSCONTROL_H
#include <QtMultimedia/qcameraviewfindersettingscontrol.h>
#include <QtMultimedia/qcameraviewfindersettings.h>
#include <QtMultimedia/qvideoframe.h>
#include <QtCore/qpointer.h>
#include <QtCore/qglobal.h>
#include <QtCore/qsize.h>
@class AVCaptureDevice;
@class AVCaptureVideoDataOutput;
@class AVCaptureConnection;
@class AVCaptureDeviceFormat;
QT_BEGIN_NAMESPACE
class AVFCameraSession;
class AVFCameraService;
class AVFCameraViewfinderSettingsControl2 : public QCameraViewfinderSettingsControl2
{
Q_OBJECT
friend class AVFCameraSession;
friend class AVFCameraViewfinderSettingsControl;
public:
AVFCameraViewfinderSettingsControl2(AVFCameraService *service);
QList<QCameraViewfinderSettings> supportedViewfinderSettings() const Q_DECL_OVERRIDE;
QCameraViewfinderSettings viewfinderSettings() const Q_DECL_OVERRIDE;
void setViewfinderSettings(const QCameraViewfinderSettings &settings) Q_DECL_OVERRIDE;
// "Converters":
static QVideoFrame::PixelFormat QtPixelFormatFromCVFormat(unsigned avPixelFormat);
static bool CVPixelFormatFromQtFormat(QVideoFrame::PixelFormat qtFormat, unsigned &conv);
private:
void setResolution(const QSize &resolution);
void setFramerate(qreal minFPS, qreal maxFPS, bool useActive);
void setPixelFormat(QVideoFrame::PixelFormat newFormat);
AVCaptureDeviceFormat *findBestFormatMatch(const QCameraViewfinderSettings &settings) const;
bool convertPixelFormatIfSupported(QVideoFrame::PixelFormat format, unsigned &avfFormat) const;
void applySettings();
QCameraViewfinderSettings requestedSettings() const;
// Aux. function to extract things like captureDevice, videoOutput, etc.
bool updateAVFoundationObjects() const;
AVFCameraService *m_service;
mutable AVFCameraSession *m_session;
QCameraViewfinderSettings m_settings;
mutable AVCaptureDevice *m_captureDevice;
mutable AVCaptureVideoDataOutput *m_videoOutput;
mutable AVCaptureConnection *m_videoConnection;
};
class AVFCameraViewfinderSettingsControl : public QCameraViewfinderSettingsControl
{
Q_OBJECT
public:
AVFCameraViewfinderSettingsControl(AVFCameraService *service);
bool isViewfinderParameterSupported(ViewfinderParameter parameter) const Q_DECL_OVERRIDE;
QVariant viewfinderParameter(ViewfinderParameter parameter) const Q_DECL_OVERRIDE;
void setViewfinderParameter(ViewfinderParameter parameter, const QVariant &value) Q_DECL_OVERRIDE;
private:
void setResolution(const QVariant &resolution);
void setAspectRatio(const QVariant &aspectRatio);
void setFrameRate(const QVariant &fps, bool max);
void setPixelFormat(const QVariant &pf);
bool initSettingsControl() const;
AVFCameraService *m_service;
mutable QPointer<AVFCameraViewfinderSettingsControl2> m_settingsControl;
};
QT_END_NAMESPACE
#endif

View File

@@ -0,0 +1,732 @@
/****************************************************************************
**
** Copyright (C) 2015 The Qt Company Ltd.
** Contact: http://www.qt.io/licensing/
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL21$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and The Qt Company. For licensing terms
** and conditions see http://www.qt.io/terms-conditions. For further
** information use the contact form at http://www.qt.io/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 or version 3 as published by the Free
** Software Foundation and appearing in the file LICENSE.LGPLv21 and
** LICENSE.LGPLv3 included in the packaging of this file. Please review the
** following information to ensure the GNU Lesser General Public License
** requirements will be met: https://www.gnu.org/licenses/lgpl.html and
** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** As a special exception, The Qt Company gives you certain additional
** rights. These rights are described in The Qt Company LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** $QT_END_LICENSE$
**
****************************************************************************/
#include "avfcameraviewfindersettingscontrol.h"
#include "avfcamerarenderercontrol.h"
#include "avfcamerautility.h"
#include "avfcamerasession.h"
#include "avfcameraservice.h"
#include "avfcameradebug.h"
#include <QtCore/qvariant.h>
#include <QtCore/qsysinfo.h>
#include <QtCore/qvector.h>
#include <QtCore/qdebug.h>
#include <QtCore/qlist.h>
#include <algorithm>
#include <AVFoundation/AVFoundation.h>
QT_BEGIN_NAMESPACE
namespace {
QVector<QVideoFrame::PixelFormat> qt_viewfinder_pixel_formats(AVCaptureVideoDataOutput *videoOutput)
{
Q_ASSERT(videoOutput);
QVector<QVideoFrame::PixelFormat> qtFormats;
NSArray *pixelFormats = [videoOutput availableVideoCVPixelFormatTypes];
for (NSObject *obj in pixelFormats) {
if (![obj isKindOfClass:[NSNumber class]])
continue;
NSNumber *formatAsNSNumber = static_cast<NSNumber *>(obj);
// It's actually FourCharCode (== UInt32):
const QVideoFrame::PixelFormat qtFormat(AVFCameraViewfinderSettingsControl2::
QtPixelFormatFromCVFormat([formatAsNSNumber unsignedIntValue]));
if (qtFormat != QVideoFrame::Format_Invalid)
qtFormats << qtFormat;
}
return qtFormats;
}
bool qt_framerates_sane(const QCameraViewfinderSettings &settings)
{
const qreal minFPS = settings.minimumFrameRate();
const qreal maxFPS = settings.maximumFrameRate();
if (minFPS < 0. || maxFPS < 0.)
return false;
return !maxFPS || maxFPS >= minFPS;
}
void qt_set_framerate_limits(AVCaptureConnection *videoConnection,
const QCameraViewfinderSettings &settings)
{
Q_ASSERT(videoConnection);
if (!qt_framerates_sane(settings)) {
qDebugCamera() << Q_FUNC_INFO << "invalid framerate (min, max):"
<< settings.minimumFrameRate() << settings.maximumFrameRate();
return;
}
const qreal minFPS = settings.minimumFrameRate();
const qreal maxFPS = settings.maximumFrameRate();
CMTime minDuration = kCMTimeInvalid;
CMTime maxDuration = kCMTimeInvalid;
if (minFPS > 0. || maxFPS > 0.) {
if (maxFPS) {
if (!videoConnection.supportsVideoMinFrameDuration)
qDebugCamera() << Q_FUNC_INFO << "maximum framerate is not supported";
else
minDuration = CMTimeMake(1, maxFPS);
}
if (minFPS) {
if (!videoConnection.supportsVideoMaxFrameDuration)
qDebugCamera() << Q_FUNC_INFO << "minimum framerate is not supported";
else
maxDuration = CMTimeMake(1, minFPS);
}
}
if (videoConnection.supportsVideoMinFrameDuration)
videoConnection.videoMinFrameDuration = minDuration;
if (videoConnection.supportsVideoMaxFrameDuration)
videoConnection.videoMaxFrameDuration = maxDuration;
}
#if QT_MAC_PLATFORM_SDK_EQUAL_OR_ABOVE(__MAC_10_7, __IPHONE_7_0)
CMTime qt_adjusted_frame_duration(AVFrameRateRange *range, qreal fps)
{
Q_ASSERT(range);
Q_ASSERT(fps > 0.);
if (range.maxFrameRate - range.minFrameRate < 0.1) {
// Can happen on OS X.
return range.minFrameDuration;
}
if (fps <= range.minFrameRate)
return range.maxFrameDuration;
if (fps >= range.maxFrameRate)
return range.minFrameDuration;
const AVFRational timeAsRational(qt_float_to_rational(1. / fps, 1000));
return CMTimeMake(timeAsRational.first, timeAsRational.second);
}
void qt_set_framerate_limits(AVCaptureDevice *captureDevice,
const QCameraViewfinderSettings &settings)
{
Q_ASSERT(captureDevice);
if (!captureDevice.activeFormat) {
qDebugCamera() << Q_FUNC_INFO << "no active capture device format";
return;
}
const qreal minFPS = settings.minimumFrameRate();
const qreal maxFPS = settings.maximumFrameRate();
if (!qt_framerates_sane(settings)) {
qDebugCamera() << Q_FUNC_INFO << "invalid framerates (min, max):"
<< minFPS << maxFPS;
return;
}
CMTime minFrameDuration = kCMTimeInvalid;
CMTime maxFrameDuration = kCMTimeInvalid;
if (maxFPS || minFPS) {
AVFrameRateRange *range = qt_find_supported_framerate_range(captureDevice.activeFormat,
maxFPS ? maxFPS : minFPS);
if (!range) {
qDebugCamera() << Q_FUNC_INFO << "no framerate range found, (min, max):"
<< minFPS << maxFPS;
return;
}
if (maxFPS)
minFrameDuration = qt_adjusted_frame_duration(range, maxFPS);
if (minFPS)
maxFrameDuration = qt_adjusted_frame_duration(range, minFPS);
}
const AVFConfigurationLock lock(captureDevice);
if (!lock) {
qDebugCamera() << Q_FUNC_INFO << "failed to lock for configuration";
return;
}
// While Apple's docs say kCMTimeInvalid will end in default
// settings for this format, kCMTimeInvalid on OS X ends with a runtime
// exception:
// "The activeVideoMinFrameDuration passed is not supported by the device."
#ifdef Q_OS_IOS
[captureDevice setActiveVideoMinFrameDuration:minFrameDuration];
[captureDevice setActiveVideoMaxFrameDuration:maxFrameDuration];
#else
if (CMTimeCompare(minFrameDuration, kCMTimeInvalid))
[captureDevice setActiveVideoMinFrameDuration:minFrameDuration];
if (CMTimeCompare(maxFrameDuration, kCMTimeInvalid))
[captureDevice setActiveVideoMaxFrameDuration:maxFrameDuration];
#endif
}
#endif // Platform SDK >= 10.9, >= 7.0.
// 'Dispatchers':
AVFPSRange qt_current_framerates(AVCaptureDevice *captureDevice, AVCaptureConnection *videoConnection)
{
Q_ASSERT(captureDevice);
Q_ASSERT(videoConnection);
AVFPSRange fps;
#if QT_MAC_PLATFORM_SDK_EQUAL_OR_ABOVE(__MAC_10_7, __IPHONE_7_0)
if (QSysInfo::MacintoshVersion >= qt_OS_limit(QSysInfo::MV_10_7, QSysInfo::MV_IOS_7_0)) {
const CMTime minDuration = captureDevice.activeVideoMinFrameDuration;
if (CMTimeCompare(minDuration, kCMTimeInvalid)) {
if (const Float64 minSeconds = CMTimeGetSeconds(minDuration))
fps.second = 1. / minSeconds; // Max FPS = 1 / MinDuration.
}
const CMTime maxDuration = captureDevice.activeVideoMaxFrameDuration;
if (CMTimeCompare(maxDuration, kCMTimeInvalid)) {
if (const Float64 maxSeconds = CMTimeGetSeconds(maxDuration))
fps.first = 1. / maxSeconds; // Min FPS = 1 / MaxDuration.
}
} else {
#else
{
#endif
fps = qt_connection_framerates(videoConnection);
}
return fps;
}
void qt_set_framerate_limits(AVCaptureDevice *captureDevice, AVCaptureConnection *videoConnection,
const QCameraViewfinderSettings &settings)
{
Q_ASSERT(captureDevice);
Q_ASSERT(videoConnection);
#if QT_MAC_PLATFORM_SDK_EQUAL_OR_ABOVE(__MAC_10_7, __IPHONE_7_0)
if (QSysInfo::MacintoshVersion >= qt_OS_limit(QSysInfo::MV_10_9, QSysInfo::MV_IOS_7_0))
qt_set_framerate_limits(captureDevice, settings);
else
qt_set_framerate_limits(videoConnection, settings);
#else
qt_set_framerate_limits(videoConnection, settings);
#endif
}
} // Unnamed namespace.
AVFCameraViewfinderSettingsControl2::AVFCameraViewfinderSettingsControl2(AVFCameraService *service)
: m_service(service),
m_captureDevice(0),
m_videoOutput(0),
m_videoConnection(0)
{
Q_ASSERT(service);
}
QList<QCameraViewfinderSettings> AVFCameraViewfinderSettingsControl2::supportedViewfinderSettings() const
{
QList<QCameraViewfinderSettings> supportedSettings;
if (!updateAVFoundationObjects()) {
qDebugCamera() << Q_FUNC_INFO << "no capture device or video output found";
return supportedSettings;
}
QVector<AVFPSRange> framerates;
QVector<QVideoFrame::PixelFormat> pixelFormats(qt_viewfinder_pixel_formats(m_videoOutput));
if (!pixelFormats.size())
pixelFormats << QVideoFrame::Format_Invalid; // The default value.
#if QT_MAC_PLATFORM_SDK_EQUAL_OR_ABOVE(__MAC_10_7, __IPHONE_7_0)
if (QSysInfo::MacintoshVersion >= qt_OS_limit(QSysInfo::MV_10_7, QSysInfo::MV_IOS_7_0)) {
if (!m_captureDevice.formats || !m_captureDevice.formats.count) {
qDebugCamera() << Q_FUNC_INFO << "no capture device formats found";
return supportedSettings;
}
for (AVCaptureDeviceFormat *format in m_captureDevice.formats) {
if (qt_is_video_range_subtype(format))
continue;
const QSize res(qt_device_format_resolution(format));
if (res.isNull() || !res.isValid())
continue;
const QSize par(qt_device_format_pixel_aspect_ratio(format));
if (par.isNull() || !par.isValid())
continue;
framerates = qt_device_format_framerates(format);
if (!framerates.size())
framerates << AVFPSRange(); // The default value.
for (int i = 0; i < pixelFormats.size(); ++i) {
for (int j = 0; j < framerates.size(); ++j) {
QCameraViewfinderSettings newSet;
newSet.setResolution(res);
newSet.setPixelAspectRatio(par);
newSet.setPixelFormat(pixelFormats[i]);
newSet.setMinimumFrameRate(framerates[j].first);
newSet.setMaximumFrameRate(framerates[j].second);
supportedSettings << newSet;
}
}
}
} else {
#else
{
#endif
// TODO: resolution and PAR.
framerates << qt_connection_framerates(m_videoConnection);
for (int i = 0; i < pixelFormats.size(); ++i) {
for (int j = 0; j < framerates.size(); ++j) {
QCameraViewfinderSettings newSet;
newSet.setPixelFormat(pixelFormats[i]);
newSet.setMinimumFrameRate(framerates[j].first);
newSet.setMaximumFrameRate(framerates[j].second);
supportedSettings << newSet;
}
}
}
return supportedSettings;
}
QCameraViewfinderSettings AVFCameraViewfinderSettingsControl2::viewfinderSettings() const
{
QCameraViewfinderSettings settings;
if (!updateAVFoundationObjects()) {
qDebugCamera() << Q_FUNC_INFO << "no capture device or video output found";
return settings;
}
#if QT_MAC_PLATFORM_SDK_EQUAL_OR_ABOVE(__MAC_10_7, __IPHONE_7_0)
if (QSysInfo::MacintoshVersion >= qt_OS_limit(QSysInfo::MV_10_7, QSysInfo::MV_IOS_7_0)) {
if (!m_captureDevice.activeFormat) {
qDebugCamera() << Q_FUNC_INFO << "no active capture device format";
return settings;
}
const QSize res(qt_device_format_resolution(m_captureDevice.activeFormat));
const QSize par(qt_device_format_pixel_aspect_ratio(m_captureDevice.activeFormat));
if (res.isNull() || !res.isValid() || par.isNull() || !par.isValid()) {
qDebugCamera() << Q_FUNC_INFO << "failed to obtain resolution/pixel aspect ratio";
return settings;
}
settings.setResolution(res);
settings.setPixelAspectRatio(par);
}
#endif
// TODO: resolution and PAR before 7.0.
const AVFPSRange fps = qt_current_framerates(m_captureDevice, m_videoConnection);
settings.setMinimumFrameRate(fps.first);
settings.setMaximumFrameRate(fps.second);
if (NSObject *obj = [m_videoOutput.videoSettings objectForKey:(id)kCVPixelBufferPixelFormatTypeKey]) {
if ([obj isKindOfClass:[NSNumber class]]) {
NSNumber *nsNum = static_cast<NSNumber *>(obj);
settings.setPixelFormat(QtPixelFormatFromCVFormat([nsNum unsignedIntValue]));
}
}
return settings;
}
void AVFCameraViewfinderSettingsControl2::setViewfinderSettings(const QCameraViewfinderSettings &settings)
{
if (settings.isNull()) {
qDebugCamera() << Q_FUNC_INFO << "empty viewfinder settings";
return;
}
if (m_settings == settings)
return;
m_settings = settings;
applySettings();
}
QVideoFrame::PixelFormat AVFCameraViewfinderSettingsControl2::QtPixelFormatFromCVFormat(unsigned avPixelFormat)
{
// BGRA <-> ARGB "swap" is intentional:
// to work correctly with GL_RGBA, color swap shaders
// (in QSG node renderer etc.).
switch (avPixelFormat) {
case kCVPixelFormatType_32ARGB:
return QVideoFrame::Format_BGRA32;
case kCVPixelFormatType_32BGRA:
return QVideoFrame::Format_ARGB32;
case kCVPixelFormatType_24RGB:
return QVideoFrame::Format_RGB24;
case kCVPixelFormatType_24BGR:
return QVideoFrame::Format_BGR24;
default:
return QVideoFrame::Format_Invalid;
}
}
bool AVFCameraViewfinderSettingsControl2::CVPixelFormatFromQtFormat(QVideoFrame::PixelFormat qtFormat, unsigned &conv)
{
// BGRA <-> ARGB "swap" is intentional:
// to work correctly with GL_RGBA, color swap shaders
// (in QSG node renderer etc.).
switch (qtFormat) {
case QVideoFrame::Format_ARGB32:
conv = kCVPixelFormatType_32BGRA;
break;
case QVideoFrame::Format_BGRA32:
conv = kCVPixelFormatType_32ARGB;
break;
// These two formats below are not supported
// by QSGVideoNodeFactory_RGB, so for now I have to
// disable them.
/*
case QVideoFrame::Format_RGB24:
conv = kCVPixelFormatType_24RGB;
break;
case QVideoFrame::Format_BGR24:
conv = kCVPixelFormatType_24BGR;
break;
*/
default:
return false;
}
return true;
}
AVCaptureDeviceFormat *AVFCameraViewfinderSettingsControl2::findBestFormatMatch(const QCameraViewfinderSettings &settings) const
{
#if QT_MAC_PLATFORM_SDK_EQUAL_OR_ABOVE(__MAC_10_7, __IPHONE_7_0)
if (QSysInfo::MacintoshVersion >= qt_OS_limit(QSysInfo::MV_10_7, QSysInfo::MV_IOS_7_0)) {
Q_ASSERT(m_captureDevice);
const QSize &resolution = settings.resolution();
if (!resolution.isNull() && resolution.isValid()) {
// Either the exact match (including high resolution for images on iOS)
// or a format with a resolution close to the requested one.
return qt_find_best_resolution_match(m_captureDevice, resolution);
}
// No resolution requested, what about framerates?
if (!qt_framerates_sane(settings)) {
qDebugCamera() << Q_FUNC_INFO << "invalid framerate requested (min/max):"
<< settings.minimumFrameRate() << settings.maximumFrameRate();
return nil;
}
const qreal minFPS(settings.minimumFrameRate());
const qreal maxFPS(settings.maximumFrameRate());
if (minFPS || maxFPS)
return qt_find_best_framerate_match(m_captureDevice, maxFPS ? maxFPS : minFPS);
// Ignore PAR for the moment (PAR without resolution can
// pick a format with really bad resolution).
// No need to test pixel format, just return settings.
}
#endif
return nil;
}
bool AVFCameraViewfinderSettingsControl2::convertPixelFormatIfSupported(QVideoFrame::PixelFormat qtFormat, unsigned &avfFormat)const
{
Q_ASSERT(m_videoOutput);
unsigned conv = 0;
if (!CVPixelFormatFromQtFormat(qtFormat, conv))
return false;
NSArray *formats = [m_videoOutput availableVideoCVPixelFormatTypes];
if (!formats || !formats.count)
return false;
for (NSObject *obj in formats) {
if (![obj isKindOfClass:[NSNumber class]])
continue;
NSNumber *nsNum = static_cast<NSNumber *>(obj);
if ([nsNum unsignedIntValue] == conv) {
avfFormat = conv;
return true;
}
}
return false;
}
void AVFCameraViewfinderSettingsControl2::applySettings()
{
if (m_settings.isNull())
return;
if (!updateAVFoundationObjects())
return;
if (m_session->state() != QCamera::LoadedState &&
m_session->state() != QCamera::ActiveState) {
return;
}
NSMutableDictionary *videoSettings = [NSMutableDictionary dictionaryWithCapacity:1];
#if QT_MAC_PLATFORM_SDK_EQUAL_OR_ABOVE(__MAC_10_7, __IPHONE_7_0)
AVCaptureDeviceFormat *match = findBestFormatMatch(m_settings);
if (match) {
if (match != m_captureDevice.activeFormat) {
const AVFConfigurationLock lock(m_captureDevice);
if (!lock) {
qDebugCamera() << Q_FUNC_INFO << "failed to lock for configuration";
return;
}
m_captureDevice.activeFormat = match;
}
} else {
qDebugCamera() << Q_FUNC_INFO << "matching device format not found";
// We still can update the pixel format at least.
}
#endif
unsigned avfPixelFormat = 0;
if (m_settings.pixelFormat() != QVideoFrame::Format_Invalid &&
convertPixelFormatIfSupported(m_settings.pixelFormat(), avfPixelFormat)) {
[videoSettings setObject:[NSNumber numberWithUnsignedInt:avfPixelFormat]
forKey:(id)kCVPixelBufferPixelFormatTypeKey];
} else {
// We have to set the pixel format, otherwise AVFoundation can change it to something we do not support.
if (NSObject *oldFormat = [m_videoOutput.videoSettings objectForKey:(id)kCVPixelBufferPixelFormatTypeKey]) {
[videoSettings setObject:oldFormat forKey:(id)kCVPixelBufferPixelFormatTypeKey];
} else {
[videoSettings setObject:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA]
forKey:(id)kCVPixelBufferPixelFormatTypeKey];
}
}
if (videoSettings.count)
m_videoOutput.videoSettings = videoSettings;
qt_set_framerate_limits(m_captureDevice, m_videoConnection, m_settings);
}
QCameraViewfinderSettings AVFCameraViewfinderSettingsControl2::requestedSettings() const
{
return m_settings;
}
bool AVFCameraViewfinderSettingsControl2::updateAVFoundationObjects() const
{
m_session = 0;
m_captureDevice = 0;
m_videoOutput = 0;
m_videoConnection = 0;
if (!m_service->session())
return false;
if (!m_service->session()->videoCaptureDevice())
return false;
if (!m_service->videoOutput() || !m_service->videoOutput()->videoDataOutput())
return false;
AVCaptureVideoDataOutput *output = m_service->videoOutput()->videoDataOutput();
AVCaptureConnection *connection = [output connectionWithMediaType:AVMediaTypeVideo];
if (!connection)
return false;
m_session = m_service->session();
m_captureDevice = m_session->videoCaptureDevice();
m_videoOutput = output;
m_videoConnection = connection;
return true;
}
AVFCameraViewfinderSettingsControl::AVFCameraViewfinderSettingsControl(AVFCameraService *service)
: m_service(service)
{
// Legacy viewfinder settings control.
Q_ASSERT(service);
initSettingsControl();
}
bool AVFCameraViewfinderSettingsControl::isViewfinderParameterSupported(ViewfinderParameter parameter) const
{
return parameter == Resolution
|| parameter == PixelAspectRatio
|| parameter == MinimumFrameRate
|| parameter == MaximumFrameRate
|| parameter == PixelFormat;
}
QVariant AVFCameraViewfinderSettingsControl::viewfinderParameter(ViewfinderParameter parameter) const
{
if (!isViewfinderParameterSupported(parameter)) {
qDebugCamera() << Q_FUNC_INFO << "parameter is not supported";
return QVariant();
}
if (!initSettingsControl()) {
qDebugCamera() << Q_FUNC_INFO << "initialization failed";
return QVariant();
}
const QCameraViewfinderSettings settings(m_settingsControl->viewfinderSettings());
if (parameter == Resolution)
return settings.resolution();
if (parameter == PixelAspectRatio)
return settings.pixelAspectRatio();
if (parameter == MinimumFrameRate)
return settings.minimumFrameRate();
if (parameter == MaximumFrameRate)
return settings.maximumFrameRate();
if (parameter == PixelFormat)
return QVariant::fromValue(settings.pixelFormat());
return QVariant();
}
void AVFCameraViewfinderSettingsControl::setViewfinderParameter(ViewfinderParameter parameter, const QVariant &value)
{
if (!isViewfinderParameterSupported(parameter)) {
qDebugCamera() << Q_FUNC_INFO << "parameter is not supported";
return;
}
if (parameter == Resolution)
setResolution(value);
if (parameter == PixelAspectRatio)
setAspectRatio(value);
if (parameter == MinimumFrameRate)
setFrameRate(value, false);
if (parameter == MaximumFrameRate)
setFrameRate(value, true);
if (parameter == PixelFormat)
setPixelFormat(value);
}
void AVFCameraViewfinderSettingsControl::setResolution(const QVariant &newValue)
{
if (!newValue.canConvert<QSize>()) {
qDebugCamera() << Q_FUNC_INFO << "QSize type expected";
return;
}
if (!initSettingsControl()) {
qDebugCamera() << Q_FUNC_INFO << "initialization failed";
return;
}
const QSize res(newValue.toSize());
if (res.isNull() || !res.isValid()) {
qDebugCamera() << Q_FUNC_INFO << "invalid resolution:" << res;
return;
}
QCameraViewfinderSettings settings(m_settingsControl->viewfinderSettings());
settings.setResolution(res);
m_settingsControl->setViewfinderSettings(settings);
}
void AVFCameraViewfinderSettingsControl::setAspectRatio(const QVariant &newValue)
{
if (!newValue.canConvert<QSize>()) {
qDebugCamera() << Q_FUNC_INFO << "QSize type expected";
return;
}
if (!initSettingsControl()) {
qDebugCamera() << Q_FUNC_INFO << "initialization failed";
return;
}
const QSize par(newValue.value<QSize>());
if (par.isNull() || !par.isValid()) {
qDebugCamera() << Q_FUNC_INFO << "invalid pixel aspect ratio:" << par;
return;
}
QCameraViewfinderSettings settings(m_settingsControl->viewfinderSettings());
settings.setPixelAspectRatio(par);
m_settingsControl->setViewfinderSettings(settings);
}
void AVFCameraViewfinderSettingsControl::setFrameRate(const QVariant &newValue, bool max)
{
if (!newValue.canConvert<qreal>()) {
qDebugCamera() << Q_FUNC_INFO << "qreal type expected";
return;
}
if (!initSettingsControl()) {
qDebugCamera() << Q_FUNC_INFO << "initialization failed";
return;
}
const qreal fps(newValue.toReal());
QCameraViewfinderSettings settings(m_settingsControl->viewfinderSettings());
max ? settings.setMaximumFrameRate(fps) : settings.setMinimumFrameRate(fps);
m_settingsControl->setViewfinderSettings(settings);
}
void AVFCameraViewfinderSettingsControl::setPixelFormat(const QVariant &newValue)
{
if (!newValue.canConvert<QVideoFrame::PixelFormat>()) {
qDebugCamera() << Q_FUNC_INFO
<< "QVideoFrame::PixelFormat type expected";
return;
}
if (!initSettingsControl()) {
qDebugCamera() << Q_FUNC_INFO << "initialization failed";
return;
}
QCameraViewfinderSettings settings(m_settingsControl->viewfinderSettings());
settings.setPixelFormat(newValue.value<QVideoFrame::PixelFormat>());
m_settingsControl->setViewfinderSettings(settings);
}
bool AVFCameraViewfinderSettingsControl::initSettingsControl()const
{
if (!m_settingsControl)
m_settingsControl = m_service->viewfinderSettingsControl2();
return !m_settingsControl.isNull();
}
QT_END_NAMESPACE
#include "moc_avfcameraviewfindersettingscontrol.cpp"

View File

@@ -32,8 +32,8 @@
****************************************************************************/ ****************************************************************************/
#include "avfcamerazoomcontrol.h" #include "avfcamerazoomcontrol.h"
#include "avfconfigurationlock.h"
#include "avfcameraservice.h" #include "avfcameraservice.h"
#include "avfcamerautility.h"
#include "avfcamerasession.h" #include "avfcamerasession.h"
#include "avfcameracontrol.h" #include "avfcameracontrol.h"
#include "avfcameradebug.h" #include "avfcameradebug.h"

View File

@@ -56,6 +56,7 @@ public:
QCameraImageCapture::DriveMode driveMode() const { return QCameraImageCapture::SingleImageCapture; } QCameraImageCapture::DriveMode driveMode() const { return QCameraImageCapture::SingleImageCapture; }
void setDriveMode(QCameraImageCapture::DriveMode ) {} void setDriveMode(QCameraImageCapture::DriveMode ) {}
AVCaptureStillImageOutput *stillImageOutput() const {return m_stillImageOutput;}
int capture(const QString &fileName); int capture(const QString &fileName);
void cancelCapture(); void cancelCapture();

View File

@@ -0,0 +1,77 @@
/****************************************************************************
**
** Copyright (C) 2015 The Qt Company Ltd.
** Contact: http://www.qt.io/licensing/
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL21$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and The Qt Company. For licensing terms
** and conditions see http://www.qt.io/terms-conditions. For further
** information use the contact form at http://www.qt.io/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 or version 3 as published by the Free
** Software Foundation and appearing in the file LICENSE.LGPLv21 and
** LICENSE.LGPLv3 included in the packaging of this file. Please review the
** following information to ensure the GNU Lesser General Public License
** requirements will be met: https://www.gnu.org/licenses/lgpl.html and
** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** As a special exception, The Qt Company gives you certain additional
** rights. These rights are described in The Qt Company LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** $QT_END_LICENSE$
**
****************************************************************************/
#ifndef AVFIMAGEENCODERCONTROL_H
#define AVFIMAGEENCODERCONTROL_H
#include <QtMultimedia/qmediaencodersettings.h>
#include <QtMultimedia/qimageencodercontrol.h>
#include <QtCore/qglobal.h>
#include <QtCore/qstring.h>
#include <QtCore/qlist.h>
@class AVCaptureDeviceFormat;
QT_BEGIN_NAMESPACE
class AVFCameraService;
class AVFImageEncoderControl : public QImageEncoderControl
{
Q_OBJECT
friend class AVFCameraSession;
public:
AVFImageEncoderControl(AVFCameraService *service);
QStringList supportedImageCodecs() const Q_DECL_OVERRIDE;
QString imageCodecDescription(const QString &codecName) const Q_DECL_OVERRIDE;
QList<QSize> supportedResolutions(const QImageEncoderSettings &settings,
bool *continuous) const Q_DECL_OVERRIDE;
QImageEncoderSettings imageSettings() const Q_DECL_OVERRIDE;
void setImageSettings(const QImageEncoderSettings &settings) Q_DECL_OVERRIDE;
private:
AVFCameraService *m_service;
QImageEncoderSettings m_settings;
void applySettings();
bool videoCaptureDeviceIsValid() const;
};
QSize qt_image_high_resolution(AVCaptureDeviceFormat *fomat);
QT_END_NAMESPACE
#endif

View File

@@ -0,0 +1,273 @@
/****************************************************************************
**
** Copyright (C) 2015 The Qt Company Ltd.
** Contact: http://www.qt.io/licensing/
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL21$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and The Qt Company. For licensing terms
** and conditions see http://www.qt.io/terms-conditions. For further
** information use the contact form at http://www.qt.io/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 or version 3 as published by the Free
** Software Foundation and appearing in the file LICENSE.LGPLv21 and
** LICENSE.LGPLv3 included in the packaging of this file. Please review the
** following information to ensure the GNU Lesser General Public License
** requirements will be met: https://www.gnu.org/licenses/lgpl.html and
** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** As a special exception, The Qt Company gives you certain additional
** rights. These rights are described in The Qt Company LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** $QT_END_LICENSE$
**
****************************************************************************/
#include "avfcameraviewfindersettingscontrol.h"
#include "avfimageencodercontrol.h"
#include "avfimagecapturecontrol.h"
#include "avfcamerautility.h"
#include "avfcamerasession.h"
#include "avfcameraservice.h"
#include "avfcameradebug.h"
#include <QtMultimedia/qmediaencodersettings.h>
#include <QtCore/qsysinfo.h>
#include <QtCore/qdebug.h>
#include <AVFoundation/AVFoundation.h>
QT_BEGIN_NAMESPACE
QSize qt_image_high_resolution(AVCaptureDeviceFormat *format)
{
Q_ASSERT(format);
QSize res;
#if defined(Q_OS_IOS) && QT_IOS_PLATFORM_SDK_EQUAL_OR_ABOVE(__IPHONE_8_0)
if (QSysInfo::MacintoshVersion >= QSysInfo::MV_IOS_8_0) {
const CMVideoDimensions hrDim(format.highResolutionStillImageDimensions);
res.setWidth(hrDim.width);
res.setHeight(hrDim.height);
}
#endif
return res;
}
AVFImageEncoderControl::AVFImageEncoderControl(AVFCameraService *service)
: m_service(service)
{
Q_ASSERT(service);
}
QStringList AVFImageEncoderControl::supportedImageCodecs() const
{
return QStringList() << QLatin1String("jpeg");
}
QString AVFImageEncoderControl::imageCodecDescription(const QString &codecName) const
{
if (codecName == QLatin1String("jpeg"))
return tr("JPEG image");
return QString();
}
QList<QSize> AVFImageEncoderControl::supportedResolutions(const QImageEncoderSettings &settings,
bool *continuous) const
{
Q_UNUSED(settings)
QList<QSize> resolutions;
if (!videoCaptureDeviceIsValid())
return resolutions;
#if QT_MAC_PLATFORM_SDK_EQUAL_OR_ABOVE(__MAC_10_7, __IPHONE_7_0)
if (QSysInfo::MacintoshVersion >= qt_OS_limit(QSysInfo::MV_10_7, QSysInfo::MV_IOS_7_0)) {
AVCaptureDevice *captureDevice = m_service->session()->videoCaptureDevice();
for (AVCaptureDeviceFormat *format in captureDevice.formats) {
if (qt_is_video_range_subtype(format))
continue;
const QSize res(qt_device_format_resolution(format));
if (!res.isNull() && res.isValid())
resolutions << res;
#if defined(Q_OS_IOS) && QT_IOS_PLATFORM_SDK_EQUAL_OR_ABOVE(__IPHONE_8_0)
if (QSysInfo::MacintoshVersion >= QSysInfo::MV_IOS_8_0) {
// From Apple's docs (iOS):
// By default, AVCaptureStillImageOutput emits images with the same dimensions as
// its source AVCaptureDevice instances activeFormat.formatDescription. However,
// if you set this property to YES, the receiver emits still images at the capture
// devices highResolutionStillImageDimensions value.
const QSize hrRes(qt_image_high_resolution(format));
if (!hrRes.isNull() && hrRes.isValid())
resolutions << res;
}
#endif
}
} else {
#else
{
#endif
// TODO: resolutions without AVCaptureDeviceFormat ...
}
if (continuous)
*continuous = false;
return resolutions;
}
QImageEncoderSettings AVFImageEncoderControl::imageSettings() const
{
QImageEncoderSettings settings;
if (!videoCaptureDeviceIsValid())
return settings;
#if QT_MAC_PLATFORM_SDK_EQUAL_OR_ABOVE(__MAC_10_7, __IPHONE_7_0)
if (QSysInfo::MacintoshVersion >= qt_OS_limit(QSysInfo::MV_10_7, QSysInfo::MV_IOS_7_0)) {
AVCaptureDevice *captureDevice = m_service->session()->videoCaptureDevice();
if (!captureDevice.activeFormat) {
qDebugCamera() << Q_FUNC_INFO << "no active format";
return settings;
}
QSize res(qt_device_format_resolution(captureDevice.activeFormat));
#if defined(Q_OS_IOS) && QT_IOS_PLATFORM_SDK_EQUAL_OR_ABOVE(__IPHONE_8_0)
if (QSysInfo::MacintoshVersion >= QSysInfo::MV_IOS_8_0) {
if (!m_service->imageCaptureControl() || !m_service->imageCaptureControl()->stillImageOutput()) {
qDebugCamera() << Q_FUNC_INFO << "no still image output";
return settings;
}
AVCaptureStillImageOutput *stillImageOutput = m_service->imageCaptureControl()->stillImageOutput();
if (stillImageOutput.highResolutionStillImageOutputEnabled)
res = qt_image_high_resolution(captureDevice.activeFormat);
}
#endif
if (res.isNull() || !res.isValid()) {
qDebugCamera() << Q_FUNC_INFO << "failed to exctract the image resolution";
return settings;
}
settings.setResolution(res);
} else {
#else
{
#endif
// TODO: resolution without AVCaptureDeviceFormat.
}
settings.setCodec(QLatin1String("jpeg"));
return settings;
}
void AVFImageEncoderControl::setImageSettings(const QImageEncoderSettings &settings)
{
if (m_settings == settings || settings.isNull())
return;
m_settings = settings;
applySettings();
}
void AVFImageEncoderControl::applySettings()
{
if (!videoCaptureDeviceIsValid())
return;
AVFCameraSession *session = m_service->session();
if (!session || (session->state() != QCamera::ActiveState
&& session->state() != QCamera::LoadedState)) {
return;
}
if (!m_service->imageCaptureControl()
|| !m_service->imageCaptureControl()->stillImageOutput()) {
qDebugCamera() << Q_FUNC_INFO << "no still image output";
return;
}
if (m_settings.codec().size()
&& m_settings.codec() != QLatin1String("jpeg")) {
qDebugCamera() << Q_FUNC_INFO << "unsupported codec:" << m_settings.codec();
return;
}
QSize res(m_settings.resolution());
if (res.isNull()) {
qDebugCamera() << Q_FUNC_INFO << "invalid resolution:" << res;
return;
}
if (!res.isValid()) {
// Invalid == default value.
// Here we could choose the best format available, but
// activeFormat is already equal to 'preset high' by default,
// which is good enough, otherwise we can end in some format with low framerates.
return;
}
#if QT_MAC_PLATFORM_SDK_EQUAL_OR_ABOVE(__MAC_10_7, __IPHONE_7_0)
if (QSysInfo::MacintoshVersion >= qt_OS_limit(QSysInfo::MV_10_7, QSysInfo::MV_IOS_7_0)) {
AVCaptureDevice *captureDevice = m_service->session()->videoCaptureDevice();
AVCaptureDeviceFormat *match = qt_find_best_resolution_match(captureDevice, res);
if (!match) {
qDebugCamera() << Q_FUNC_INFO << "unsupported resolution:" << res;
return;
}
if (match != captureDevice.activeFormat) {
const AVFConfigurationLock lock(captureDevice);
if (!lock) {
qDebugCamera() << Q_FUNC_INFO << "failed to lock for configuration";
return;
}
captureDevice.activeFormat = match;
}
#if defined(Q_OS_IOS) && QT_IOS_PLATFORM_SDK_EQUAL_OR_ABOVE(__IPHONE_8_0)
if (QSysInfo::MacintoshVersion >= QSysInfo::MV_IOS_8_0) {
AVCaptureStillImageOutput *imageOutput = m_service->imageCaptureControl()->stillImageOutput();
if (res == qt_image_high_resolution(captureDevice.activeFormat))
imageOutput.highResolutionStillImageOutputEnabled = YES;
else
imageOutput.highResolutionStillImageOutputEnabled = NO;
}
#endif
} else {
#else
{
#endif
// TODO: resolution without capture device format ...
}
}
bool AVFImageEncoderControl::videoCaptureDeviceIsValid() const
{
if (!m_service->session() || !m_service->session()->videoCaptureDevice())
return false;
AVCaptureDevice *captureDevice = m_service->session()->videoCaptureDevice();
if (!captureDevice.formats || !captureDevice.formats.count)
return false;
return true;
}
QT_END_NAMESPACE
#include "moc_avfimageencodercontrol.cpp"

View File

@@ -39,7 +39,9 @@ HEADERS += \
avfcameradevicecontrol.h \ avfcameradevicecontrol.h \
avfcamerafocuscontrol.h \ avfcamerafocuscontrol.h \
avfcameraexposurecontrol.h \ avfcameraexposurecontrol.h \
avfconfigurationlock.h avfcamerautility.h \
avfcameraviewfindersettingscontrol.h \
avfimageencodercontrol.h
OBJECTIVE_SOURCES += \ OBJECTIVE_SOURCES += \
avfcameraserviceplugin.mm \ avfcameraserviceplugin.mm \
@@ -57,7 +59,10 @@ OBJECTIVE_SOURCES += \
avfcameradevicecontrol.mm \ avfcameradevicecontrol.mm \
avfcamerarenderercontrol.mm \ avfcamerarenderercontrol.mm \
avfcamerafocuscontrol.mm \ avfcamerafocuscontrol.mm \
avfcameraexposurecontrol.mm avfcameraexposurecontrol.mm \
avfcamerautility.mm \
avfcameraviewfindersettingscontrol.mm \
avfimageencodercontrol.mm
ios { ios {

View File

@@ -719,14 +719,17 @@ void AVFMediaPlayerSession::setVolume(int volume)
if (m_volume == volume) if (m_volume == volume)
return; return;
m_volume = volume;
#if defined(Q_OS_OSX)
AVPlayer *player = [(AVFMediaPlayerSessionObserver*)m_observer player]; AVPlayer *player = [(AVFMediaPlayerSessionObserver*)m_observer player];
if (player) { if (!player)
[[(AVFMediaPlayerSessionObserver*)m_observer player] setVolume:m_volume / 100.0f]; return;
if (![player respondsToSelector:@selector(setVolume:)]) {
qWarning("%s not implemented, requires iOS 7 or later", Q_FUNC_INFO);
return;
} }
#endif
[player setVolume:m_volume / 100.0f];
m_volume = volume;
Q_EMIT volumeChanged(m_volume); Q_EMIT volumeChanged(m_volume);
} }
@@ -739,10 +742,19 @@ void AVFMediaPlayerSession::setMuted(bool muted)
if (m_muted == muted) if (m_muted == muted)
return; return;
AVPlayer *player = [(AVFMediaPlayerSessionObserver*)m_observer player];
if (!player)
return;
// iOS: setMuted exists since iOS 7.0, thus check if it exists
if (![player respondsToSelector:@selector(setMuted:)]) {
qWarning("%s not implemented, requires iOS 7 or later", Q_FUNC_INFO);
return;
}
[player setMuted:m_muted];
m_muted = muted; m_muted = muted;
#if defined(Q_OS_OSX)
[[(AVFMediaPlayerSessionObserver*)m_observer player] setMuted:m_muted];
#endif
Q_EMIT mutedChanged(muted); Q_EMIT mutedChanged(muted);
} }

View File

@@ -240,14 +240,16 @@ void AVFVideoFrameRenderer::initRenderer()
//Need current context //Need current context
m_glContext->makeCurrent(m_offscreenSurface); m_glContext->makeCurrent(m_offscreenSurface);
// Create a new open gl texture cache if (!m_textureCache) {
CVReturn err = CVOGLTextureCacheCreate(kCFAllocatorDefault, NULL, // Create a new open gl texture cache
[EAGLContext currentContext], CVReturn err = CVOGLTextureCacheCreate(kCFAllocatorDefault, NULL,
NULL, &m_textureCache); [EAGLContext currentContext],
NULL, &m_textureCache);
if (err) { if (err) {
#ifdef QT_DEBUG_AVF #ifdef QT_DEBUG_AVF
qWarning("Error at CVOGLTextureCacheCreate %d", err); qWarning("Error at CVOGLTextureCacheCreate %d", err);
#endif #endif
}
} }
} }

View File

@@ -13,7 +13,8 @@ HEADERS += \
$$PWD/dsvideodevicecontrol.h \ $$PWD/dsvideodevicecontrol.h \
$$PWD/dsimagecapturecontrol.h \ $$PWD/dsimagecapturecontrol.h \
$$PWD/dscamerasession.h \ $$PWD/dscamerasession.h \
$$PWD/directshowglobal.h $$PWD/directshowglobal.h \
$$PWD/dscameraviewfindersettingscontrol.h
SOURCES += \ SOURCES += \
$$PWD/dscameraservice.cpp \ $$PWD/dscameraservice.cpp \
@@ -21,7 +22,8 @@ SOURCES += \
$$PWD/dsvideorenderer.cpp \ $$PWD/dsvideorenderer.cpp \
$$PWD/dsvideodevicecontrol.cpp \ $$PWD/dsvideodevicecontrol.cpp \
$$PWD/dsimagecapturecontrol.cpp \ $$PWD/dsimagecapturecontrol.cpp \
$$PWD/dscamerasession.cpp $$PWD/dscamerasession.cpp \
$$PWD/dscameraviewfindersettingscontrol.cpp
*-msvc*:INCLUDEPATH += $$(DXSDK_DIR)/include *-msvc*:INCLUDEPATH += $$(DXSDK_DIR)/include
LIBS += -lstrmiids -ldmoguids -luuid -lmsdmo -lole32 -loleaut32 LIBS += -lstrmiids -ldmoguids -luuid -lmsdmo -lole32 -loleaut32

View File

@@ -40,6 +40,7 @@
#include "dsvideorenderer.h" #include "dsvideorenderer.h"
#include "dsvideodevicecontrol.h" #include "dsvideodevicecontrol.h"
#include "dsimagecapturecontrol.h" #include "dsimagecapturecontrol.h"
#include "dscameraviewfindersettingscontrol.h"
QT_BEGIN_NAMESPACE QT_BEGIN_NAMESPACE
@@ -51,11 +52,13 @@ DSCameraService::DSCameraService(QObject *parent):
m_control = new DSCameraControl(m_session); m_control = new DSCameraControl(m_session);
m_videoDevice = new DSVideoDeviceControl(m_session); m_videoDevice = new DSVideoDeviceControl(m_session);
m_imageCapture = new DSImageCaptureControl(m_session); m_imageCapture = new DSImageCaptureControl(m_session);
m_viewfinderSettings = new DSCameraViewfinderSettingsControl(m_session);
} }
DSCameraService::~DSCameraService() DSCameraService::~DSCameraService()
{ {
delete m_control; delete m_control;
delete m_viewfinderSettings;
delete m_videoDevice; delete m_videoDevice;
delete m_videoRenderer; delete m_videoRenderer;
delete m_imageCapture; delete m_imageCapture;
@@ -80,6 +83,9 @@ QMediaControl* DSCameraService::requestControl(const char *name)
if (qstrcmp(name,QVideoDeviceSelectorControl_iid) == 0) if (qstrcmp(name,QVideoDeviceSelectorControl_iid) == 0)
return m_videoDevice; return m_videoDevice;
if (qstrcmp(name, QCameraViewfinderSettingsControl2_iid) == 0)
return m_viewfinderSettings;
return 0; return 0;
} }

View File

@@ -45,7 +45,7 @@ class DSCameraSession;
class DSVideoOutputControl; class DSVideoOutputControl;
class DSVideoDeviceControl; class DSVideoDeviceControl;
class DSImageCaptureControl; class DSImageCaptureControl;
class DSCameraViewfinderSettingsControl;
class DSCameraService : public QMediaService class DSCameraService : public QMediaService
{ {
@@ -65,6 +65,7 @@ private:
DSVideoDeviceControl *m_videoDevice; DSVideoDeviceControl *m_videoDevice;
QMediaControl *m_videoRenderer; QMediaControl *m_videoRenderer;
DSImageCaptureControl *m_imageCapture; DSImageCaptureControl *m_imageCapture;
DSCameraViewfinderSettingsControl *m_viewfinderSettings;
}; };
QT_END_NAMESPACE QT_END_NAMESPACE

View File

@@ -78,9 +78,6 @@ void _FreeMediaType(AM_MEDIA_TYPE& mt)
} }
} // end namespace } // end namespace
typedef QList<QSize> SizeList;
Q_GLOBAL_STATIC(SizeList, commonPreviewResolutions)
static HRESULT getPin(IBaseFilter *filter, PIN_DIRECTION pinDir, IPin **pin); static HRESULT getPin(IBaseFilter *filter, PIN_DIRECTION pinDir, IPin **pin);
@@ -148,6 +145,42 @@ private:
DSCameraSession *m_session; DSCameraSession *m_session;
}; };
QVideoFrame::PixelFormat pixelFormatFromMediaSubtype(GUID uid)
{
if (uid == MEDIASUBTYPE_ARGB32)
return QVideoFrame::Format_ARGB32;
else if (uid == MEDIASUBTYPE_RGB32)
return QVideoFrame::Format_RGB32;
else if (uid == MEDIASUBTYPE_RGB24)
return QVideoFrame::Format_RGB24;
else if (uid == MEDIASUBTYPE_RGB565)
return QVideoFrame::Format_RGB565;
else if (uid == MEDIASUBTYPE_RGB555)
return QVideoFrame::Format_RGB555;
else if (uid == MEDIASUBTYPE_AYUV)
return QVideoFrame::Format_AYUV444;
else if (uid == MEDIASUBTYPE_I420 || uid == MEDIASUBTYPE_IYUV)
return QVideoFrame::Format_YUV420P;
else if (uid == MEDIASUBTYPE_YV12)
return QVideoFrame::Format_YV12;
else if (uid == MEDIASUBTYPE_UYVY)
return QVideoFrame::Format_UYVY;
else if (uid == MEDIASUBTYPE_YUYV || uid == MEDIASUBTYPE_YUY2)
return QVideoFrame::Format_YUYV;
else if (uid == MEDIASUBTYPE_NV12)
return QVideoFrame::Format_NV12;
else if (uid == MEDIASUBTYPE_IMC1)
return QVideoFrame::Format_IMC1;
else if (uid == MEDIASUBTYPE_IMC2)
return QVideoFrame::Format_IMC2;
else if (uid == MEDIASUBTYPE_IMC3)
return QVideoFrame::Format_IMC3;
else if (uid == MEDIASUBTYPE_IMC4)
return QVideoFrame::Format_IMC4;
else
return QVideoFrame::Format_Invalid;
}
DSCameraSession::DSCameraSession(QObject *parent) DSCameraSession::DSCameraSession(QObject *parent)
: QObject(parent) : QObject(parent)
@@ -167,7 +200,7 @@ DSCameraSession::DSCameraSession(QObject *parent)
, m_currentImageId(-1) , m_currentImageId(-1)
, m_status(QCamera::UnloadedStatus) , m_status(QCamera::UnloadedStatus)
{ {
ZeroMemory(&m_sourcePreferredFormat, sizeof(m_sourcePreferredFormat)); ZeroMemory(&m_sourceFormat, sizeof(m_sourceFormat));
connect(this, SIGNAL(statusChanged(QCamera::Status)), connect(this, SIGNAL(statusChanged(QCamera::Status)),
this, SLOT(updateReadyForCapture())); this, SLOT(updateReadyForCapture()));
@@ -188,6 +221,16 @@ void DSCameraSession::setDevice(const QString &device)
m_sourceDeviceName = device; m_sourceDeviceName = device;
} }
QCameraViewfinderSettings DSCameraSession::viewfinderSettings() const
{
return m_status == QCamera::ActiveStatus ? m_actualViewfinderSettings : m_viewfinderSettings;
}
void DSCameraSession::setViewfinderSettings(const QCameraViewfinderSettings &settings)
{
m_viewfinderSettings = settings;
}
bool DSCameraSession::load() bool DSCameraSession::load()
{ {
unload(); unload();
@@ -214,9 +257,10 @@ bool DSCameraSession::unload()
setStatus(QCamera::UnloadingStatus); setStatus(QCamera::UnloadingStatus);
m_needsHorizontalMirroring = false; m_needsHorizontalMirroring = false;
m_sourcePreferredResolution = QSize(); m_supportedViewfinderSettings.clear();
_FreeMediaType(m_sourcePreferredFormat); Q_FOREACH (AM_MEDIA_TYPE f, m_supportedFormats)
ZeroMemory(&m_sourcePreferredFormat, sizeof(m_sourcePreferredFormat)); _FreeMediaType(f);
m_supportedFormats.clear();
SAFE_RELEASE(m_sourceFilter); SAFE_RELEASE(m_sourceFilter);
SAFE_RELEASE(m_previewSampleGrabber); SAFE_RELEASE(m_previewSampleGrabber);
SAFE_RELEASE(m_previewFilter); SAFE_RELEASE(m_previewFilter);
@@ -302,6 +346,9 @@ bool DSCameraSession::stopPreview()
disconnectGraph(); disconnectGraph();
_FreeMediaType(m_sourceFormat);
ZeroMemory(&m_sourceFormat, sizeof(m_sourceFormat));
m_previewStarted = false; m_previewStarted = false;
setStatus(QCamera::LoadedStatus); setStatus(QCamera::LoadedStatus);
return true; return true;
@@ -581,9 +628,6 @@ bool DSCameraSession::createFilterGraph()
failed: failed:
m_needsHorizontalMirroring = false; m_needsHorizontalMirroring = false;
m_sourcePreferredResolution = QSize();
_FreeMediaType(m_sourcePreferredFormat);
ZeroMemory(&m_sourcePreferredFormat, sizeof(m_sourcePreferredFormat));
SAFE_RELEASE(m_sourceFilter); SAFE_RELEASE(m_sourceFilter);
SAFE_RELEASE(m_previewSampleGrabber); SAFE_RELEASE(m_previewSampleGrabber);
SAFE_RELEASE(m_previewFilter); SAFE_RELEASE(m_previewFilter);
@@ -596,6 +640,34 @@ failed:
bool DSCameraSession::configurePreviewFormat() bool DSCameraSession::configurePreviewFormat()
{ {
// Resolve viewfinder settings
int settingsIndex = 0;
QCameraViewfinderSettings resolvedViewfinderSettings;
Q_FOREACH (const QCameraViewfinderSettings &s, m_supportedViewfinderSettings) {
if ((m_viewfinderSettings.resolution().isEmpty() || m_viewfinderSettings.resolution() == s.resolution())
&& (qFuzzyIsNull(m_viewfinderSettings.minimumFrameRate()) || qFuzzyCompare((float)m_viewfinderSettings.minimumFrameRate(), (float)s.minimumFrameRate()))
&& (qFuzzyIsNull(m_viewfinderSettings.maximumFrameRate()) || qFuzzyCompare((float)m_viewfinderSettings.maximumFrameRate(), (float)s.maximumFrameRate()))
&& (m_viewfinderSettings.pixelFormat() == QVideoFrame::Format_Invalid || m_viewfinderSettings.pixelFormat() == s.pixelFormat())) {
resolvedViewfinderSettings = s;
break;
}
++settingsIndex;
}
if (resolvedViewfinderSettings.isNull()) {
qWarning("Invalid viewfinder settings");
return false;
}
m_actualViewfinderSettings = resolvedViewfinderSettings;
_CopyMediaType(&m_sourceFormat, &m_supportedFormats[settingsIndex]);
// Set frame rate.
// We don't care about the minimumFrameRate, DirectShow only allows to set an
// average frame rate, so set that to the maximumFrameRate.
VIDEOINFOHEADER *videoInfo = reinterpret_cast<VIDEOINFOHEADER*>(m_sourceFormat.pbFormat);
videoInfo->AvgTimePerFrame = 10000000 / resolvedViewfinderSettings.maximumFrameRate();
// We only support RGB32, if the capture source doesn't support // We only support RGB32, if the capture source doesn't support
// that format, the graph builder will automatically insert a // that format, the graph builder will automatically insert a
// converter. // converter.
@@ -607,7 +679,7 @@ bool DSCameraSession::configurePreviewFormat()
} }
m_previewPixelFormat = QVideoFrame::Format_RGB32; m_previewPixelFormat = QVideoFrame::Format_RGB32;
m_previewSize = m_sourcePreferredResolution; m_previewSize = resolvedViewfinderSettings.resolution();
m_previewSurfaceFormat = QVideoSurfaceFormat(m_previewSize, m_previewSurfaceFormat = QVideoSurfaceFormat(m_previewSize,
m_previewPixelFormat, m_previewPixelFormat,
QAbstractVideoBuffer::NoHandle); QAbstractVideoBuffer::NoHandle);
@@ -624,7 +696,7 @@ bool DSCameraSession::configurePreviewFormat()
return false; return false;
} }
hr = pConfig->SetFormat(&m_sourcePreferredFormat); hr = pConfig->SetFormat(&m_sourceFormat);
pConfig->Release(); pConfig->Release();
@@ -716,6 +788,11 @@ void DSCameraSession::disconnectGraph()
m_filterGraph->RemoveFilter(m_sourceFilter); m_filterGraph->RemoveFilter(m_sourceFilter);
} }
static bool qt_frameRateRangeGreaterThan(const QCamera::FrameRateRange &r1, const QCamera::FrameRateRange &r2)
{
return r1.second > r2.second;
}
void DSCameraSession::updateSourceCapabilities() void DSCameraSession::updateSourceCapabilities()
{ {
HRESULT hr; HRESULT hr;
@@ -724,10 +801,11 @@ void DSCameraSession::updateSourceCapabilities()
VIDEO_STREAM_CONFIG_CAPS scc; VIDEO_STREAM_CONFIG_CAPS scc;
IAMStreamConfig* pConfig = 0; IAMStreamConfig* pConfig = 0;
m_supportedViewfinderSettings.clear();
m_needsHorizontalMirroring = false; m_needsHorizontalMirroring = false;
m_sourcePreferredResolution = QSize(); Q_FOREACH (AM_MEDIA_TYPE f, m_supportedFormats)
_FreeMediaType(m_sourcePreferredFormat); _FreeMediaType(f);
ZeroMemory(&m_sourcePreferredFormat, sizeof(m_sourcePreferredFormat)); m_supportedFormats.clear();
IAMVideoControl *pVideoControl = 0; IAMVideoControl *pVideoControl = 0;
hr = m_graphBuilder->FindInterface(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Video, hr = m_graphBuilder->FindInterface(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Video,
@@ -774,53 +852,68 @@ void DSCameraSession::updateSourceCapabilities()
return; return;
} }
// Use preferred pixel format (first in the list)
// Then, pick the highest available resolution among the typical resolutions
// used for camera preview.
if (commonPreviewResolutions->isEmpty())
populateCommonResolutions();
long maxPixelCount = 0;
for (int iIndex = 0; iIndex < iCount; ++iIndex) { for (int iIndex = 0; iIndex < iCount; ++iIndex) {
hr = pConfig->GetStreamCaps(iIndex, &pmt, reinterpret_cast<BYTE*>(&scc)); hr = pConfig->GetStreamCaps(iIndex, &pmt, reinterpret_cast<BYTE*>(&scc));
if (hr == S_OK) { if (hr == S_OK) {
if ((pmt->majortype == MEDIATYPE_Video) && QVideoFrame::PixelFormat pixelFormat = pixelFormatFromMediaSubtype(pmt->subtype);
(pmt->formattype == FORMAT_VideoInfo) &&
(!m_sourcePreferredFormat.cbFormat || if (pmt->majortype == MEDIATYPE_Video
m_sourcePreferredFormat.subtype == pmt->subtype)) { && pmt->formattype == FORMAT_VideoInfo
&& pixelFormat != QVideoFrame::Format_Invalid) {
pvi = reinterpret_cast<VIDEOINFOHEADER*>(pmt->pbFormat); pvi = reinterpret_cast<VIDEOINFOHEADER*>(pmt->pbFormat);
QSize resolution(pvi->bmiHeader.biWidth, pvi->bmiHeader.biHeight); QSize resolution(pvi->bmiHeader.biWidth, pvi->bmiHeader.biHeight);
long pixelCount = resolution.width() * resolution.height();
if (!m_sourcePreferredFormat.cbFormat || QList<QCamera::FrameRateRange> frameRateRanges;
(pixelCount > maxPixelCount && commonPreviewResolutions->contains(resolution))) {
_FreeMediaType(m_sourcePreferredFormat); if (pVideoControl) {
_CopyMediaType(&m_sourcePreferredFormat, pmt); IPin *pPin = 0;
m_sourcePreferredResolution = resolution; hr = getPin(m_sourceFilter, PINDIR_OUTPUT, &pPin);
maxPixelCount = pixelCount; if (FAILED(hr)) {
qWarning() << "Failed to get the pin for the video control";
} else {
long listSize = 0;
LONGLONG *frameRates = 0;
SIZE size = { resolution.width(), resolution.height() };
if (SUCCEEDED(pVideoControl->GetFrameRateList(pPin, iIndex, size,
&listSize, &frameRates))) {
for (long i = 0; i < listSize; ++i) {
qreal fr = qreal(10000000) / frameRates[i];
frameRateRanges.append(QCamera::FrameRateRange(fr, fr));
}
// Make sure higher frame rates come first
std::sort(frameRateRanges.begin(), frameRateRanges.end(), qt_frameRateRangeGreaterThan);
}
pPin->Release();
}
} }
if (frameRateRanges.isEmpty()) {
frameRateRanges.append(QCamera::FrameRateRange(qreal(10000000) / scc.MaxFrameInterval,
qreal(10000000) / scc.MinFrameInterval));
}
Q_FOREACH (const QCamera::FrameRateRange &frameRateRange, frameRateRanges) {
QCameraViewfinderSettings settings;
settings.setResolution(resolution);
settings.setMinimumFrameRate(frameRateRange.first);
settings.setMaximumFrameRate(frameRateRange.second);
settings.setPixelFormat(pixelFormat);
m_supportedViewfinderSettings.append(settings);
AM_MEDIA_TYPE format;
_CopyMediaType(&format, pmt);
m_supportedFormats.append(format);
}
} }
_FreeMediaType(*pmt); _FreeMediaType(*pmt);
} }
} }
pConfig->Release(); pConfig->Release();
if (!m_sourcePreferredResolution.isValid())
m_sourcePreferredResolution = QSize(640, 480);
}
void DSCameraSession::populateCommonResolutions()
{
commonPreviewResolutions->append(QSize(1920, 1080)); // 1080p
commonPreviewResolutions->append(QSize(1280, 720)); // 720p
commonPreviewResolutions->append(QSize(1024, 576)); // WSVGA
commonPreviewResolutions->append(QSize(720, 480)); // 480p (16:9)
commonPreviewResolutions->append(QSize(640, 480)); // 480p (4:3)
commonPreviewResolutions->append(QSize(352, 288)); // CIF
commonPreviewResolutions->append(QSize(320, 240)); // QVGA
} }
HRESULT getPin(IBaseFilter *pFilter, PIN_DIRECTION PinDir, IPin **ppPin) HRESULT getPin(IBaseFilter *pFilter, PIN_DIRECTION PinDir, IPin **ppPin)

View File

@@ -91,6 +91,12 @@ public:
void setSurface(QAbstractVideoSurface* surface); void setSurface(QAbstractVideoSurface* surface);
QCameraViewfinderSettings viewfinderSettings() const;
void setViewfinderSettings(const QCameraViewfinderSettings &settings);
QList<QCameraViewfinderSettings> supportedViewfinderSettings() const
{ return m_supportedViewfinderSettings; }
Q_SIGNALS: Q_SIGNALS:
void statusChanged(QCamera::Status); void statusChanged(QCamera::Status);
void imageExposed(int id); void imageExposed(int id);
@@ -105,7 +111,6 @@ private Q_SLOTS:
private: private:
void setStatus(QCamera::Status status); void setStatus(QCamera::Status status);
void populateCommonResolutions();
void onFrameAvailable(const char *frameData, long len); void onFrameAvailable(const char *frameData, long len);
void saveCapturedImage(int id, const QImage &image, const QString &path); void saveCapturedImage(int id, const QImage &image, const QString &path);
@@ -126,9 +131,10 @@ private:
// Source (camera) // Source (camera)
QString m_sourceDeviceName; QString m_sourceDeviceName;
IBaseFilter* m_sourceFilter; IBaseFilter* m_sourceFilter;
AM_MEDIA_TYPE m_sourcePreferredFormat;
QSize m_sourcePreferredResolution;
bool m_needsHorizontalMirroring; bool m_needsHorizontalMirroring;
QList<AM_MEDIA_TYPE> m_supportedFormats;
QList<QCameraViewfinderSettings> m_supportedViewfinderSettings;
AM_MEDIA_TYPE m_sourceFormat;
// Preview // Preview
IBaseFilter *m_previewFilter; IBaseFilter *m_previewFilter;
@@ -140,6 +146,8 @@ private:
QVideoSurfaceFormat m_previewSurfaceFormat; QVideoSurfaceFormat m_previewSurfaceFormat;
QVideoFrame::PixelFormat m_previewPixelFormat; QVideoFrame::PixelFormat m_previewPixelFormat;
QSize m_previewSize; QSize m_previewSize;
QCameraViewfinderSettings m_viewfinderSettings;
QCameraViewfinderSettings m_actualViewfinderSettings;
// Image capture // Image capture
QString m_imageCaptureFileName; QString m_imageCaptureFileName;

View File

@@ -0,0 +1,60 @@
/****************************************************************************
**
** Copyright (C) 2015 The Qt Company Ltd.
** Contact: http://www.qt.io/licensing/
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL21$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and The Qt Company. For licensing terms
** and conditions see http://www.qt.io/terms-conditions. For further
** information use the contact form at http://www.qt.io/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 or version 3 as published by the Free
** Software Foundation and appearing in the file LICENSE.LGPLv21 and
** LICENSE.LGPLv3 included in the packaging of this file. Please review the
** following information to ensure the GNU Lesser General Public License
** requirements will be met: https://www.gnu.org/licenses/lgpl.html and
** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** As a special exception, The Qt Company gives you certain additional
** rights. These rights are described in The Qt Company LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** $QT_END_LICENSE$
**
****************************************************************************/
#include "dscameraviewfindersettingscontrol.h"
#include "dscamerasession.h"
QT_BEGIN_NAMESPACE
DSCameraViewfinderSettingsControl::DSCameraViewfinderSettingsControl(DSCameraSession *session)
: QCameraViewfinderSettingsControl2(session)
, m_session(session)
{
}
QList<QCameraViewfinderSettings> DSCameraViewfinderSettingsControl::supportedViewfinderSettings() const
{
return m_session->supportedViewfinderSettings();
}
QCameraViewfinderSettings DSCameraViewfinderSettingsControl::viewfinderSettings() const
{
return m_session->viewfinderSettings();
}
void DSCameraViewfinderSettingsControl::setViewfinderSettings(const QCameraViewfinderSettings &settings)
{
m_session->setViewfinderSettings(settings);
}
QT_END_NAMESPACE

View File

@@ -31,48 +31,29 @@
** **
****************************************************************************/ ****************************************************************************/
#ifndef AVFCONFIGURATIONLOCK_H #ifndef DSCAMERAVIEWFINDERSETTINGSCONTROL_H
#define AVFCONFIGURATIONLOCK_H #define DSCAMERAVIEWFINDERSETTINGSCONTROL_H
#include <QtCore/qglobal.h> #include <qcameraviewfindersettingscontrol.h>
#include <QtCore/qdebug.h>
#include <AVFoundation/AVFoundation.h>
@class AVCaptureDevice;
QT_BEGIN_NAMESPACE QT_BEGIN_NAMESPACE
class AVFConfigurationLock class DSCameraSession;
class DSCameraViewfinderSettingsControl : public QCameraViewfinderSettingsControl2
{ {
public: public:
explicit AVFConfigurationLock(AVCaptureDevice *captureDevice) DSCameraViewfinderSettingsControl(DSCameraSession *session);
: m_captureDevice(captureDevice),
m_locked(false)
{
Q_ASSERT(m_captureDevice);
NSError *error = nil;
m_locked = [m_captureDevice lockForConfiguration:&error];
}
~AVFConfigurationLock() QList<QCameraViewfinderSettings> supportedViewfinderSettings() const;
{
if (m_locked)
[m_captureDevice unlockForConfiguration];
}
operator bool() const QCameraViewfinderSettings viewfinderSettings() const;
{ void setViewfinderSettings(const QCameraViewfinderSettings &settings);
return m_locked;
}
private: private:
Q_DISABLE_COPY(AVFConfigurationLock) DSCameraSession *m_session;
AVCaptureDevice *m_captureDevice;
bool m_locked;
}; };
QT_END_NAMESPACE QT_END_NAMESPACE
#endif #endif // DSCAMERAVIEWFINDERSETTINGSCONTROL_H

View File

@@ -124,8 +124,22 @@ CameraBinService::CameraBinService(GstElementFactory *sourceFactory, QObject *pa
#else #else
m_videoWindow = new QGstreamerVideoWindow(this); m_videoWindow = new QGstreamerVideoWindow(this);
#endif #endif
// If the GStreamer sink element is not available (xvimagesink), don't provide
// the video window control since it won't work anyway.
if (!m_videoWindow->videoSink()) {
delete m_videoWindow;
m_videoWindow = 0;
}
#if defined(HAVE_WIDGETS) #if defined(HAVE_WIDGETS)
m_videoWidgetControl = new QGstreamerVideoWidgetControl(this); m_videoWidgetControl = new QGstreamerVideoWidgetControl(this);
// If the GStreamer sink element is not available (xvimagesink or ximagesink), don't provide
// the video widget control since it won't work anyway.
// QVideoWidget will fall back to QVideoRendererControl in that case.
if (!m_videoWidgetControl->videoSink()) {
delete m_videoWidgetControl;
m_videoWidgetControl = 0;
}
#endif #endif
m_audioInputSelector = new QGstreamerAudioInputSelector(this); m_audioInputSelector = new QGstreamerAudioInputSelector(this);

View File

@@ -48,6 +48,7 @@ class CameraBinControl;
class QGstreamerMessage; class QGstreamerMessage;
class QGstreamerBusHelper; class QGstreamerBusHelper;
class QGstreamerVideoRenderer; class QGstreamerVideoRenderer;
class QGstreamerVideoWindow;
class QGstreamerVideoWidgetControl; class QGstreamerVideoWidgetControl;
class QGstreamerElementFactory; class QGstreamerElementFactory;
class CameraBinMetaData; class CameraBinMetaData;
@@ -81,7 +82,7 @@ private:
QMediaControl *m_videoOutput; QMediaControl *m_videoOutput;
QMediaControl *m_videoRenderer; QMediaControl *m_videoRenderer;
QMediaControl *m_videoWindow; QGstreamerVideoWindow *m_videoWindow;
#if defined(HAVE_WIDGETS) #if defined(HAVE_WIDGETS)
QGstreamerVideoWidgetControl *m_videoWidgetControl; QGstreamerVideoWidgetControl *m_videoWidgetControl;
#endif #endif

View File

@@ -100,10 +100,25 @@ QGstreamerCaptureService::QGstreamerCaptureService(const QString &service, QObje
m_videoInput->setDevice(m_videoInputDevice->deviceName(m_videoInputDevice->selectedDevice())); m_videoInput->setDevice(m_videoInputDevice->deviceName(m_videoInputDevice->selectedDevice()));
m_videoRenderer = new QGstreamerVideoRenderer(this); m_videoRenderer = new QGstreamerVideoRenderer(this);
m_videoWindow = new QGstreamerVideoWindow(this); m_videoWindow = new QGstreamerVideoWindow(this);
// If the GStreamer sink element is not available (xvimagesink), don't provide
// the video window control since it won't work anyway.
if (!m_videoWindow->videoSink()) {
delete m_videoWindow;
m_videoWindow = 0;
}
#if defined(HAVE_WIDGETS) #if defined(HAVE_WIDGETS)
m_videoWidgetControl = new QGstreamerVideoWidgetControl(this); m_videoWidgetControl = new QGstreamerVideoWidgetControl(this);
// If the GStreamer sink element is not available (xvimagesink or ximagesink), don't provide
// the video widget control since it won't work anyway.
// QVideoWidget will fall back to QVideoRendererControl in that case.
if (!m_videoWidgetControl->videoSink()) {
delete m_videoWidgetControl;
m_videoWidgetControl = 0;
}
#endif #endif
m_imageCaptureControl = new QGstreamerImageCaptureControl(m_captureSession); m_imageCaptureControl = new QGstreamerImageCaptureControl(m_captureSession);
} }

View File

@@ -49,6 +49,7 @@ class QGstreamerCameraControl;
class QGstreamerMessage; class QGstreamerMessage;
class QGstreamerBusHelper; class QGstreamerBusHelper;
class QGstreamerVideoRenderer; class QGstreamerVideoRenderer;
class QGstreamerVideoWindow;
class QGstreamerVideoWidgetControl; class QGstreamerVideoWidgetControl;
class QGstreamerElementFactory; class QGstreamerElementFactory;
class QGstreamerCaptureMetaDataControl; class QGstreamerCaptureMetaDataControl;
@@ -82,9 +83,9 @@ private:
QMediaControl *m_videoOutput; QMediaControl *m_videoOutput;
QGstreamerVideoRenderer *m_videoRenderer; QGstreamerVideoRenderer *m_videoRenderer;
QMediaControl *m_videoWindow; QGstreamerVideoWindow *m_videoWindow;
#if defined(HAVE_WIDGETS) #if defined(HAVE_WIDGETS)
QMediaControl *m_videoWidgetControl; QGstreamerVideoWidgetControl *m_videoWidgetControl;
#endif #endif
QGstreamerImageCaptureControl *m_imageCaptureControl; QGstreamerImageCaptureControl *m_imageCaptureControl;

View File

@@ -99,9 +99,23 @@ QGstreamerPlayerService::QGstreamerPlayerService(QObject *parent):
#else #else
m_videoWindow = new QGstreamerVideoWindow(this); m_videoWindow = new QGstreamerVideoWindow(this);
#endif #endif
// If the GStreamer sink element is not available (xvimagesink), don't provide
// the video window control since it won't work anyway.
if (!m_videoWindow->videoSink()) {
delete m_videoWindow;
m_videoWindow = 0;
}
#if defined(HAVE_WIDGETS) #if defined(HAVE_WIDGETS)
m_videoWidget = new QGstreamerVideoWidgetControl(this); m_videoWidget = new QGstreamerVideoWidgetControl(this);
// If the GStreamer sink element is not available (xvimagesink or ximagesink), don't provide
// the video widget control since it won't work anyway.
// QVideoWidget will fall back to QVideoRendererControl in that case.
if (!m_videoWidget->videoSink()) {
delete m_videoWidget;
m_videoWidget = 0;
}
#endif #endif
} }

View File

@@ -50,6 +50,7 @@ class QGstreamerPlayerSession;
class QGstreamerMetaDataProvider; class QGstreamerMetaDataProvider;
class QGstreamerStreamsControl; class QGstreamerStreamsControl;
class QGstreamerVideoRenderer; class QGstreamerVideoRenderer;
class QGstreamerVideoWindow;
class QGstreamerVideoWidgetControl; class QGstreamerVideoWidgetControl;
class QGStreamerAvailabilityControl; class QGStreamerAvailabilityControl;
class QGstreamerAudioProbeControl; class QGstreamerAudioProbeControl;
@@ -77,9 +78,9 @@ private:
QMediaControl *m_videoOutput; QMediaControl *m_videoOutput;
QMediaControl *m_videoRenderer; QMediaControl *m_videoRenderer;
QMediaControl *m_videoWindow; QGstreamerVideoWindow *m_videoWindow;
#if defined(HAVE_WIDGETS) #if defined(HAVE_WIDGETS)
QMediaControl *m_videoWidget; QGstreamerVideoWidgetControl *m_videoWidget;
#endif #endif
void increaseVideoRef(); void increaseVideoRef();