Merge remote-tracking branch 'origin/5.5' into dev

Change-Id: I957b44c06fca7a3f552edc18f286de043d31e42d
This commit is contained in:
Frederik Gladhorn
2015-08-09 15:27:16 +02:00
24 changed files with 118 additions and 141 deletions

View File

@@ -66,7 +66,7 @@ QAudioSystemFactoryInterface::~QAudioSystemFactoryInterface()
\sa QAbstractAudioDeviceInfo, QAbstractAudioOutput, QAbstractAudioInput \sa QAbstractAudioDeviceInfo, QAbstractAudioOutput, QAbstractAudioInput
Qt supports win32, linux(alsa) and Mac OS X standard (builtin to the Qt supports win32, linux(alsa) and OS X standard (builtin to the
QtMultimedia library at compile time). QtMultimedia library at compile time).
You can support other backends other than these predefined ones by You can support other backends other than these predefined ones by

View File

@@ -32,6 +32,6 @@
\brief Demonstrates the multimedia functionality provided by Qt. \brief Demonstrates the multimedia functionality provided by Qt.
The \l{Qt Multimedia} module provides low-level audio support on Linux, The \l{Qt Multimedia} module provides low-level audio support on Linux,
Windows and Mac OS X. It also provides audio plugin API to allow developers Windows and OS X. It also provides audio plugin API to allow developers
implement their own audio support for custom devices and platforms. implement their own audio support for custom devices and platforms.
*/ */

View File

@@ -92,7 +92,7 @@ int QAbstractVideoBufferPrivate::map(
\value NoHandle The buffer has no handle, its data can only be accessed by mapping the buffer. \value NoHandle The buffer has no handle, its data can only be accessed by mapping the buffer.
\value GLTextureHandle The handle of the buffer is an OpenGL texture ID. \value GLTextureHandle The handle of the buffer is an OpenGL texture ID.
\value XvShmImageHandle The handle contains pointer to shared memory XVideo image. \value XvShmImageHandle The handle contains pointer to shared memory XVideo image.
\value CoreImageHandle The handle contains pointer to Mac OS X CIImage. \value CoreImageHandle The handle contains pointer to OS X CIImage.
\value QPixmapHandle The handle of the buffer is a QPixmap. \value QPixmapHandle The handle of the buffer is a QPixmap.
\value EGLImageHandle The handle of the buffer is an EGLImageKHR. \value EGLImageHandle The handle of the buffer is an EGLImageKHR.
\value UserHandle Start value for user defined handle types. \value UserHandle Start value for user defined handle types.

View File

@@ -61,6 +61,7 @@ public:
, pixelAspectRatio(1, 1) , pixelAspectRatio(1, 1)
, ycbcrColorSpace(QVideoSurfaceFormat::YCbCr_Undefined) , ycbcrColorSpace(QVideoSurfaceFormat::YCbCr_Undefined)
, frameRate(0.0) , frameRate(0.0)
, mirrored(false)
{ {
} }
@@ -76,6 +77,7 @@ public:
, ycbcrColorSpace(QVideoSurfaceFormat::YCbCr_Undefined) , ycbcrColorSpace(QVideoSurfaceFormat::YCbCr_Undefined)
, viewport(QPoint(0, 0), size) , viewport(QPoint(0, 0), size)
, frameRate(0.0) , frameRate(0.0)
, mirrored(false)
{ {
} }
@@ -89,6 +91,7 @@ public:
, ycbcrColorSpace(other.ycbcrColorSpace) , ycbcrColorSpace(other.ycbcrColorSpace)
, viewport(other.viewport) , viewport(other.viewport)
, frameRate(other.frameRate) , frameRate(other.frameRate)
, mirrored(other.mirrored)
, propertyNames(other.propertyNames) , propertyNames(other.propertyNames)
, propertyValues(other.propertyValues) , propertyValues(other.propertyValues)
{ {
@@ -104,6 +107,7 @@ public:
&& viewport == other.viewport && viewport == other.viewport
&& frameRatesEqual(frameRate, other.frameRate) && frameRatesEqual(frameRate, other.frameRate)
&& ycbcrColorSpace == other.ycbcrColorSpace && ycbcrColorSpace == other.ycbcrColorSpace
&& mirrored == other.mirrored
&& propertyNames.count() == other.propertyNames.count()) { && propertyNames.count() == other.propertyNames.count()) {
for (int i = 0; i < propertyNames.count(); ++i) { for (int i = 0; i < propertyNames.count(); ++i) {
int j = other.propertyNames.indexOf(propertyNames.at(i)); int j = other.propertyNames.indexOf(propertyNames.at(i));
@@ -130,6 +134,7 @@ public:
QVideoSurfaceFormat::YCbCrColorSpace ycbcrColorSpace; QVideoSurfaceFormat::YCbCrColorSpace ycbcrColorSpace;
QRect viewport; QRect viewport;
qreal frameRate; qreal frameRate;
bool mirrored;
QList<QByteArray> propertyNames; QList<QByteArray> propertyNames;
QList<QVariant> propertyValues; QList<QVariant> propertyValues;
}; };
@@ -468,7 +473,8 @@ QList<QByteArray> QVideoSurfaceFormat::propertyNames() const
<< "frameRate" << "frameRate"
<< "pixelAspectRatio" << "pixelAspectRatio"
<< "sizeHint" << "sizeHint"
<< "yCbCrColorSpace") << "yCbCrColorSpace"
<< "mirrored")
+ d->propertyNames; + d->propertyNames;
} }
@@ -499,6 +505,8 @@ QVariant QVideoSurfaceFormat::property(const char *name) const
return sizeHint(); return sizeHint();
} else if (qstrcmp(name, "yCbCrColorSpace") == 0) { } else if (qstrcmp(name, "yCbCrColorSpace") == 0) {
return qVariantFromValue(d->ycbcrColorSpace); return qVariantFromValue(d->ycbcrColorSpace);
} else if (qstrcmp(name, "mirrored") == 0) {
return d->mirrored;
} else { } else {
int id = 0; int id = 0;
for (; id < d->propertyNames.count() && d->propertyNames.at(id) != name; ++id) {} for (; id < d->propertyNames.count() && d->propertyNames.at(id) != name; ++id) {}
@@ -547,6 +555,9 @@ void QVideoSurfaceFormat::setProperty(const char *name, const QVariant &value)
} else if (qstrcmp(name, "yCbCrColorSpace") == 0) { } else if (qstrcmp(name, "yCbCrColorSpace") == 0) {
if (value.canConvert<YCbCrColorSpace>()) if (value.canConvert<YCbCrColorSpace>())
d->ycbcrColorSpace = qvariant_cast<YCbCrColorSpace>(value); d->ycbcrColorSpace = qvariant_cast<YCbCrColorSpace>(value);
} else if (qstrcmp(name, "mirrored") == 0) {
if (value.canConvert<bool>())
d->mirrored = qvariant_cast<bool>(value);
} else { } else {
int id = 0; int id = 0;
for (; id < d->propertyNames.count() && d->propertyNames.at(id) != name; ++id) {} for (; id < d->propertyNames.count() && d->propertyNames.at(id) != name; ++id) {}

View File

@@ -86,11 +86,13 @@ private:
QSize m_imageSize; QSize m_imageSize;
QImage::Format m_imageFormat; QImage::Format m_imageFormat;
QVideoSurfaceFormat::Direction m_scanLineDirection; QVideoSurfaceFormat::Direction m_scanLineDirection;
bool m_mirrored;
}; };
QVideoSurfaceGenericPainter::QVideoSurfaceGenericPainter() QVideoSurfaceGenericPainter::QVideoSurfaceGenericPainter()
: m_imageFormat(QImage::Format_Invalid) : m_imageFormat(QImage::Format_Invalid)
, m_scanLineDirection(QVideoSurfaceFormat::TopToBottom) , m_scanLineDirection(QVideoSurfaceFormat::TopToBottom)
, m_mirrored(false)
{ {
m_imagePixelFormats << QVideoFrame::Format_RGB32; m_imagePixelFormats << QVideoFrame::Format_RGB32;
@@ -137,6 +139,7 @@ QAbstractVideoSurface::Error QVideoSurfaceGenericPainter::start(const QVideoSurf
m_imageFormat = QVideoFrame::imageFormatFromPixelFormat(format.pixelFormat()); m_imageFormat = QVideoFrame::imageFormatFromPixelFormat(format.pixelFormat());
m_imageSize = format.frameSize(); m_imageSize = format.frameSize();
m_scanLineDirection = format.scanLineDirection(); m_scanLineDirection = format.scanLineDirection();
m_mirrored = format.property("mirrored").toBool();
const QAbstractVideoBuffer::HandleType t = format.handleType(); const QAbstractVideoBuffer::HandleType t = format.handleType();
if (t == QAbstractVideoBuffer::NoHandle) { if (t == QAbstractVideoBuffer::NoHandle) {
@@ -183,17 +186,22 @@ QAbstractVideoSurface::Error QVideoSurfaceGenericPainter::paint(
m_frame.bytesPerLine(), m_frame.bytesPerLine(),
m_imageFormat); m_imageFormat);
if (m_scanLineDirection == QVideoSurfaceFormat::BottomToTop) {
const QTransform oldTransform = painter->transform(); const QTransform oldTransform = painter->transform();
QTransform transform = oldTransform;
painter->scale(1, -1); QRectF targetRect = target;
painter->translate(0, -target.bottom()); if (m_scanLineDirection == QVideoSurfaceFormat::BottomToTop) {
painter->drawImage( transform.scale(1, -1);
QRectF(target.x(), 0, target.width(), target.height()), image, source); transform.translate(0, -target.bottom());
painter->setTransform(oldTransform); targetRect.setY(0);
} else {
painter->drawImage(target, image, source);
} }
if (m_mirrored) {
transform.scale(-1, 1);
transform.translate(-target.right(), 0);
targetRect.setX(0);
}
painter->setTransform(transform);
painter->drawImage(targetRect, image, source);
painter->setTransform(oldTransform);
m_frame.unmap(); m_frame.unmap();
} else if (m_frame.isValid()) { } else if (m_frame.isValid()) {
@@ -281,6 +289,7 @@ protected:
QGLContext *m_context; QGLContext *m_context;
QAbstractVideoBuffer::HandleType m_handleType; QAbstractVideoBuffer::HandleType m_handleType;
QVideoSurfaceFormat::Direction m_scanLineDirection; QVideoSurfaceFormat::Direction m_scanLineDirection;
bool m_mirrored;
QVideoSurfaceFormat::YCbCrColorSpace m_colorSpace; QVideoSurfaceFormat::YCbCrColorSpace m_colorSpace;
GLenum m_textureFormat; GLenum m_textureFormat;
GLuint m_textureInternalFormat; GLuint m_textureInternalFormat;
@@ -299,6 +308,7 @@ QVideoSurfaceGLPainter::QVideoSurfaceGLPainter(QGLContext *context)
: m_context(context) : m_context(context)
, m_handleType(QAbstractVideoBuffer::NoHandle) , m_handleType(QAbstractVideoBuffer::NoHandle)
, m_scanLineDirection(QVideoSurfaceFormat::TopToBottom) , m_scanLineDirection(QVideoSurfaceFormat::TopToBottom)
, m_mirrored(false)
, m_colorSpace(QVideoSurfaceFormat::YCbCr_BT601) , m_colorSpace(QVideoSurfaceFormat::YCbCr_BT601)
, m_textureFormat(0) , m_textureFormat(0)
, m_textureInternalFormat(0) , m_textureInternalFormat(0)
@@ -829,6 +839,7 @@ QAbstractVideoSurface::Error QVideoSurfaceArbFpPainter::start(const QVideoSurfac
} else { } else {
m_handleType = format.handleType(); m_handleType = format.handleType();
m_scanLineDirection = format.scanLineDirection(); m_scanLineDirection = format.scanLineDirection();
m_mirrored = format.property("mirrored").toBool();
m_frameSize = format.frameSize(); m_frameSize = format.frameSize();
m_colorSpace = format.yCbCrColorSpace(); m_colorSpace = format.yCbCrColorSpace();
@@ -878,8 +889,10 @@ QAbstractVideoSurface::Error QVideoSurfaceArbFpPainter::paint(
if (scissorTestEnabled) if (scissorTestEnabled)
glEnable(GL_SCISSOR_TEST); glEnable(GL_SCISSOR_TEST);
const float txLeft = source.left() / m_frameSize.width(); const float txLeft = m_mirrored ? source.right() / m_frameSize.width()
const float txRight = source.right() / m_frameSize.width(); : source.left() / m_frameSize.width();
const float txRight = m_mirrored ? source.left() / m_frameSize.width()
: source.right() / m_frameSize.width();
const float txTop = m_scanLineDirection == QVideoSurfaceFormat::TopToBottom const float txTop = m_scanLineDirection == QVideoSurfaceFormat::TopToBottom
? source.top() / m_frameSize.height() ? source.top() / m_frameSize.height()
: source.bottom() / m_frameSize.height(); : source.bottom() / m_frameSize.height();
@@ -1188,6 +1201,7 @@ QAbstractVideoSurface::Error QVideoSurfaceGlslPainter::start(const QVideoSurface
} else { } else {
m_handleType = format.handleType(); m_handleType = format.handleType();
m_scanLineDirection = format.scanLineDirection(); m_scanLineDirection = format.scanLineDirection();
m_mirrored = format.property("mirrored").toBool();
m_frameSize = format.frameSize(); m_frameSize = format.frameSize();
m_colorSpace = format.yCbCrColorSpace(); m_colorSpace = format.yCbCrColorSpace();
@@ -1276,8 +1290,10 @@ QAbstractVideoSurface::Error QVideoSurfaceGlslPainter::paint(
GLfloat(target.right() + 1), GLfloat(target.top()) GLfloat(target.right() + 1), GLfloat(target.top())
}; };
const GLfloat txLeft = source.left() / m_frameSize.width(); const GLfloat txLeft = m_mirrored ? source.right() / m_frameSize.width()
const GLfloat txRight = source.right() / m_frameSize.width(); : source.left() / m_frameSize.width();
const GLfloat txRight = m_mirrored ? source.left() / m_frameSize.width()
: source.right() / m_frameSize.width();
const GLfloat txTop = m_scanLineDirection == QVideoSurfaceFormat::TopToBottom const GLfloat txTop = m_scanLineDirection == QVideoSurfaceFormat::TopToBottom
? source.top() / m_frameSize.height() ? source.top() / m_frameSize.height()
: source.bottom() / m_frameSize.height(); : source.bottom() / m_frameSize.height();

View File

@@ -65,8 +65,6 @@ public:
AVCaptureDevice *createCaptureDevice(); AVCaptureDevice *createCaptureDevice();
private: private:
AVFCameraService *m_service;
QString m_activeInput; QString m_activeInput;
bool m_dirty; bool m_dirty;
QString m_defaultDevice; QString m_defaultDevice;

View File

@@ -41,9 +41,9 @@ QT_USE_NAMESPACE
AVFAudioInputSelectorControl::AVFAudioInputSelectorControl(AVFCameraService *service, QObject *parent) AVFAudioInputSelectorControl::AVFAudioInputSelectorControl(AVFCameraService *service, QObject *parent)
: QAudioInputSelectorControl(parent) : QAudioInputSelectorControl(parent)
, m_service(service)
, m_dirty(true) , m_dirty(true)
{ {
Q_UNUSED(service);
NSArray *videoDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio]; NSArray *videoDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio];
for (AVCaptureDevice *device in videoDevices) { for (AVCaptureDevice *device in videoDevices) {
QString deviceId = QString::fromUtf8([[device uniqueID] UTF8String]); QString deviceId = QString::fromUtf8([[device uniqueID] UTF8String]);

View File

@@ -65,7 +65,6 @@ private Q_SLOTS:
void updateStatus(); void updateStatus();
private: private:
AVFCameraService *m_service;
AVFCameraSession *m_session; AVFCameraSession *m_session;
QCamera::State m_state; QCamera::State m_state;

View File

@@ -40,12 +40,12 @@ QT_USE_NAMESPACE
AVFCameraControl::AVFCameraControl(AVFCameraService *service, QObject *parent) AVFCameraControl::AVFCameraControl(AVFCameraService *service, QObject *parent)
: QCameraControl(parent) : QCameraControl(parent)
, m_service(service)
, m_session(service->session()) , m_session(service->session())
, m_state(QCamera::UnloadedState) , m_state(QCamera::UnloadedState)
, m_lastStatus(QCamera::UnloadedStatus) , m_lastStatus(QCamera::UnloadedStatus)
, m_captureMode(QCamera::CaptureStillImage) , m_captureMode(QCamera::CaptureStillImage)
{ {
Q_UNUSED(service);
connect(m_session, SIGNAL(stateChanged(QCamera::State)), SLOT(updateStatus())); connect(m_session, SIGNAL(stateChanged(QCamera::State)), SLOT(updateStatus()));
} }

View File

@@ -55,7 +55,6 @@ public:
QStringList availableMetaData() const; QStringList availableMetaData() const;
private: private:
AVFCameraService *m_service;
QMap<QString, QVariant> m_tags; QMap<QString, QVariant> m_tags;
}; };

View File

@@ -40,8 +40,9 @@ QT_USE_NAMESPACE
//metadata support is not implemented yet //metadata support is not implemented yet
AVFCameraMetaDataControl::AVFCameraMetaDataControl(AVFCameraService *service, QObject *parent) AVFCameraMetaDataControl::AVFCameraMetaDataControl(AVFCameraService *service, QObject *parent)
:QMetaDataWriterControl(parent), m_service(service) :QMetaDataWriterControl(parent)
{ {
Q_UNUSED(service);
} }
AVFCameraMetaDataControl::~AVFCameraMetaDataControl() AVFCameraMetaDataControl::~AVFCameraMetaDataControl()

View File

@@ -66,7 +66,6 @@ private Q_SLOTS:
void updateReadyStatus(); void updateReadyStatus();
private: private:
AVFCameraService *m_service;
AVFCameraSession *m_session; AVFCameraSession *m_session;
AVFCameraControl *m_cameraControl; AVFCameraControl *m_cameraControl;
bool m_ready; bool m_ready;

View File

@@ -46,13 +46,13 @@ QT_USE_NAMESPACE
AVFImageCaptureControl::AVFImageCaptureControl(AVFCameraService *service, QObject *parent) AVFImageCaptureControl::AVFImageCaptureControl(AVFCameraService *service, QObject *parent)
: QCameraImageCaptureControl(parent) : QCameraImageCaptureControl(parent)
, m_service(service)
, m_session(service->session()) , m_session(service->session())
, m_cameraControl(service->cameraControl()) , m_cameraControl(service->cameraControl())
, m_ready(false) , m_ready(false)
, m_lastCaptureId(0) , m_lastCaptureId(0)
, m_videoConnection(nil) , m_videoConnection(nil)
{ {
Q_UNUSED(service);
m_stillImageOutput = [[AVCaptureStillImageOutput alloc] init]; m_stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys: NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys:

View File

@@ -33,7 +33,6 @@ HEADERS += \
avfaudioinputselectorcontrol.h \ avfaudioinputselectorcontrol.h \
avfcamerainfocontrol.h \ avfcamerainfocontrol.h \
avfmediavideoprobecontrol.h \ avfmediavideoprobecontrol.h \
avfcamerainfocontrol.h \
avfcamerarenderercontrol.h \ avfcamerarenderercontrol.h \
avfcameradevicecontrol.h \ avfcameradevicecontrol.h \
avfcamerafocuscontrol.h \ avfcamerafocuscontrol.h \
@@ -54,7 +53,6 @@ OBJECTIVE_SOURCES += \
avfaudioinputselectorcontrol.mm \ avfaudioinputselectorcontrol.mm \
avfcamerainfocontrol.mm \ avfcamerainfocontrol.mm \
avfmediavideoprobecontrol.mm \ avfmediavideoprobecontrol.mm \
avfcamerainfocontrol.mm \
avfcameradevicecontrol.mm \ avfcameradevicecontrol.mm \
avfcamerarenderercontrol.mm \ avfcamerarenderercontrol.mm \
avfcamerafocuscontrol.mm \ avfcamerafocuscontrol.mm \

View File

@@ -48,6 +48,7 @@ CameraBinImageProcessing::CameraBinImageProcessing(CameraBinSession *session)
m_whiteBalanceMode(QCameraImageProcessing::WhiteBalanceAuto) m_whiteBalanceMode(QCameraImageProcessing::WhiteBalanceAuto)
{ {
#ifdef HAVE_GST_PHOTOGRAPHY #ifdef HAVE_GST_PHOTOGRAPHY
if (m_session->photography()) {
m_mappedWbValues[GST_PHOTOGRAPHY_WB_MODE_AUTO] = QCameraImageProcessing::WhiteBalanceAuto; m_mappedWbValues[GST_PHOTOGRAPHY_WB_MODE_AUTO] = QCameraImageProcessing::WhiteBalanceAuto;
m_mappedWbValues[GST_PHOTOGRAPHY_WB_MODE_DAYLIGHT] = QCameraImageProcessing::WhiteBalanceSunlight; m_mappedWbValues[GST_PHOTOGRAPHY_WB_MODE_DAYLIGHT] = QCameraImageProcessing::WhiteBalanceSunlight;
m_mappedWbValues[GST_PHOTOGRAPHY_WB_MODE_CLOUDY] = QCameraImageProcessing::WhiteBalanceCloudy; m_mappedWbValues[GST_PHOTOGRAPHY_WB_MODE_CLOUDY] = QCameraImageProcessing::WhiteBalanceCloudy;
@@ -55,6 +56,7 @@ CameraBinImageProcessing::CameraBinImageProcessing(CameraBinSession *session)
m_mappedWbValues[GST_PHOTOGRAPHY_WB_MODE_TUNGSTEN] = QCameraImageProcessing::WhiteBalanceTungsten; m_mappedWbValues[GST_PHOTOGRAPHY_WB_MODE_TUNGSTEN] = QCameraImageProcessing::WhiteBalanceTungsten;
m_mappedWbValues[GST_PHOTOGRAPHY_WB_MODE_FLUORESCENT] = QCameraImageProcessing::WhiteBalanceFluorescent; m_mappedWbValues[GST_PHOTOGRAPHY_WB_MODE_FLUORESCENT] = QCameraImageProcessing::WhiteBalanceFluorescent;
unlockWhiteBalance(); unlockWhiteBalance();
}
#if GST_CHECK_VERSION(1, 0, 0) #if GST_CHECK_VERSION(1, 0, 0)
m_filterMap.insert(QCameraImageProcessing::ColorFilterNone, GST_PHOTOGRAPHY_COLOR_TONE_MODE_NORMAL); m_filterMap.insert(QCameraImageProcessing::ColorFilterNone, GST_PHOTOGRAPHY_COLOR_TONE_MODE_NORMAL);
@@ -188,10 +190,19 @@ bool CameraBinImageProcessing::isWhiteBalanceModeSupported(QCameraImageProcessin
bool CameraBinImageProcessing::isParameterSupported(QCameraImageProcessingControl::ProcessingParameter parameter) const bool CameraBinImageProcessing::isParameterSupported(QCameraImageProcessingControl::ProcessingParameter parameter) const
{ {
return parameter == QCameraImageProcessingControl::Contrast #ifdef HAVE_GST_PHOTOGRAPHY
if (parameter == QCameraImageProcessingControl::WhiteBalancePreset
|| parameter == QCameraImageProcessingControl::ColorFilter)
return m_session->photography();
#endif
if (parameter == QCameraImageProcessingControl::Contrast
|| parameter == QCameraImageProcessingControl::Brightness || parameter == QCameraImageProcessingControl::Brightness
|| parameter == QCameraImageProcessingControl::Saturation || parameter == QCameraImageProcessingControl::Saturation) {
|| parameter == QCameraImageProcessingControl::WhiteBalancePreset; return GST_IS_COLOR_BALANCE(m_session->cameraBin());
}
return false;
} }
bool CameraBinImageProcessing::isParameterValueSupported(QCameraImageProcessingControl::ProcessingParameter parameter, const QVariant &value) const bool CameraBinImageProcessing::isParameterValueSupported(QCameraImageProcessingControl::ProcessingParameter parameter, const QVariant &value) const
@@ -200,7 +211,7 @@ bool CameraBinImageProcessing::isParameterValueSupported(QCameraImageProcessingC
case ContrastAdjustment: case ContrastAdjustment:
case BrightnessAdjustment: case BrightnessAdjustment:
case SaturationAdjustment: case SaturationAdjustment:
return qAbs(value.toReal()) <= 1.0; return GST_IS_COLOR_BALANCE(m_session->cameraBin()) && qAbs(value.toReal()) <= 1.0;
case WhiteBalancePreset: case WhiteBalancePreset:
return isWhiteBalanceModeSupported(value.value<QCameraImageProcessing::WhiteBalanceMode>()); return isWhiteBalanceModeSupported(value.value<QCameraImageProcessing::WhiteBalanceMode>());
case ColorFilter: { case ColorFilter: {

View File

@@ -542,9 +542,10 @@ GstElement *CameraBinSession::buildCameraSource()
void CameraBinSession::captureImage(int requestId, const QString &fileName) void CameraBinSession::captureImage(int requestId, const QString &fileName)
{ {
QString actualFileName = fileName; const QString actualFileName = m_mediaStorageLocation.generateFileName(fileName,
if (actualFileName.isEmpty()) QMediaStorageLocation::Pictures,
actualFileName = generateFileName("img_", defaultDir(QCamera::CaptureStillImage), "jpg"); QLatin1String("IMG_"),
QLatin1String("jpg"));
m_requestId = requestId; m_requestId = requestId;
@@ -592,60 +593,6 @@ bool CameraBinSession::setOutputLocation(const QUrl& sink)
return true; return true;
} }
QDir CameraBinSession::defaultDir(QCamera::CaptureModes mode) const
{
QStringList dirCandidates;
#if defined(Q_WS_MAEMO_6)
dirCandidates << QLatin1String("/home/user/MyDocs/DCIM");
dirCandidates << QLatin1String("/home/user/MyDocs/");
#endif
if (mode == QCamera::CaptureVideo) {
dirCandidates << QStandardPaths::writableLocation(QStandardPaths::MoviesLocation);
dirCandidates << QDir::home().filePath("Documents/Video");
dirCandidates << QDir::home().filePath("Documents/Videos");
} else {
dirCandidates << QStandardPaths::writableLocation(QStandardPaths::PicturesLocation);
dirCandidates << QDir::home().filePath("Documents/Photo");
dirCandidates << QDir::home().filePath("Documents/Photos");
dirCandidates << QDir::home().filePath("Documents/photo");
dirCandidates << QDir::home().filePath("Documents/photos");
dirCandidates << QDir::home().filePath("Documents/Images");
}
dirCandidates << QDir::home().filePath("Documents");
dirCandidates << QDir::home().filePath("My Documents");
dirCandidates << QDir::homePath();
dirCandidates << QDir::currentPath();
dirCandidates << QDir::tempPath();
foreach (const QString &path, dirCandidates) {
if (QFileInfo(path).isWritable())
return QDir(path);
}
return QDir();
}
QString CameraBinSession::generateFileName(const QString &prefix, const QDir &dir, const QString &ext) const
{
int lastClip = 0;
foreach(QString fileName, dir.entryList(QStringList() << QString("%1*.%2").arg(prefix).arg(ext))) {
int imgNumber = fileName.midRef(prefix.length(), fileName.size()-prefix.length()-ext.length()-1).toInt();
lastClip = qMax(lastClip, imgNumber);
}
QString name = QString("%1%2.%3").arg(prefix)
.arg(lastClip+1,
4, //fieldWidth
10,
QLatin1Char('0'))
.arg(ext);
return dir.absoluteFilePath(name);
}
void CameraBinSession::setDevice(const QString &device) void CameraBinSession::setDevice(const QString &device)
{ {
if (m_inputDevice != device) { if (m_inputDevice != device) {
@@ -1122,18 +1069,16 @@ bool CameraBinSession::processBusMessage(const QGstreamerMessage &message)
void CameraBinSession::recordVideo() void CameraBinSession::recordVideo()
{ {
m_recordingActive = true; const QString actualFileName = m_mediaStorageLocation.generateFileName(m_sink.isLocalFile() ? m_sink.toLocalFile()
m_actualSink = m_sink; : m_sink.toString(),
if (m_actualSink.isEmpty()) { QMediaStorageLocation::Movies,
QString ext = m_mediaContainerControl->suggestedFileExtension(m_mediaContainerControl->actualContainerFormat()); QLatin1String("clip_"),
m_actualSink = QUrl::fromLocalFile(generateFileName("clip_", defaultDir(QCamera::CaptureVideo), ext)); m_mediaContainerControl->suggestedFileExtension(m_mediaContainerControl->actualContainerFormat()));
} else {
// Output location was rejected in setOutputlocation() if not a local file
m_actualSink = QUrl::fromLocalFile(QDir::currentPath()).resolved(m_actualSink);
}
QString fileName = m_actualSink.toLocalFile(); m_recordingActive = true;
g_object_set(G_OBJECT(m_camerabin), FILENAME_PROPERTY, QFile::encodeName(fileName).constData(), NULL); m_actualSink = QUrl::fromLocalFile(actualFileName);
g_object_set(G_OBJECT(m_camerabin), FILENAME_PROPERTY, QFile::encodeName(actualFileName).constData(), NULL);
g_signal_emit_by_name(G_OBJECT(m_camerabin), CAPTURE_START, NULL); g_signal_emit_by_name(G_OBJECT(m_camerabin), CAPTURE_START, NULL);
} }
@@ -1304,7 +1249,7 @@ static QPair<int,int> valueRange(const GValue *value, bool *continuous)
static bool resolutionLessThan(const QSize &r1, const QSize &r2) static bool resolutionLessThan(const QSize &r1, const QSize &r2)
{ {
return r1.width()*r1.height() < r2.width()*r2.height(); return qlonglong(r1.width()) * r1.height() < qlonglong(r2.width()) * r2.height();
} }

View File

@@ -46,6 +46,7 @@
#include <private/qgstreamerbushelper_p.h> #include <private/qgstreamerbushelper_p.h>
#include <private/qgstreamerbufferprobe_p.h> #include <private/qgstreamerbufferprobe_p.h>
#include <private/qmediastoragelocation_p.h>
#include "qcamera.h" #include "qcamera.h"
QT_BEGIN_NAMESPACE QT_BEGIN_NAMESPACE
@@ -102,9 +103,6 @@ public:
QUrl outputLocation() const; QUrl outputLocation() const;
bool setOutputLocation(const QUrl& sink); bool setOutputLocation(const QUrl& sink);
QDir defaultDir(QCamera::CaptureModes mode) const;
QString generateFileName(const QString &prefix, const QDir &dir, const QString &ext) const;
GstElement *buildCameraSource(); GstElement *buildCameraSource();
GstElementFactory *sourceFactory() const { return m_sourceFactory; } GstElementFactory *sourceFactory() const { return m_sourceFactory; }
@@ -209,6 +207,7 @@ private:
QString m_inputDevice; QString m_inputDevice;
bool m_muted; bool m_muted;
bool m_busy; bool m_busy;
QMediaStorageLocation m_mediaStorageLocation;
QCamera::CaptureModes m_captureMode; QCamera::CaptureModes m_captureMode;
QMap<QByteArray, QVariant> m_metaData; QMap<QByteArray, QVariant> m_metaData;

View File

@@ -216,10 +216,10 @@ bool QWindowsAudioDeviceInfo::testSettings(const QAudioFormat& format) const
if (qt_convertFormat(format, &wfx)) { if (qt_convertFormat(format, &wfx)) {
// query only, do not open device // query only, do not open device
if (mode == QAudio::AudioOutput) { if (mode == QAudio::AudioOutput) {
return (waveOutOpen(NULL, UINT_PTR(devId), &wfx.Format, NULL, NULL, return (waveOutOpen(NULL, UINT_PTR(devId), &wfx.Format, 0, 0,
WAVE_FORMAT_QUERY) == MMSYSERR_NOERROR); WAVE_FORMAT_QUERY) == MMSYSERR_NOERROR);
} else { // AudioInput } else { // AudioInput
return (waveInOpen(NULL, UINT_PTR(devId), &wfx.Format, NULL, NULL, return (waveInOpen(NULL, UINT_PTR(devId), &wfx.Format, 0, 0,
WAVE_FORMAT_QUERY) == MMSYSERR_NOERROR); WAVE_FORMAT_QUERY) == MMSYSERR_NOERROR);
} }
} }

View File

@@ -125,21 +125,13 @@ Q_GLOBAL_STATIC(QWinRTVideoRendererControlGlobal, g)
class QWinRTVideoBuffer : public QAbstractVideoBuffer, public QOpenGLTexture class QWinRTVideoBuffer : public QAbstractVideoBuffer, public QOpenGLTexture
{ {
public: public:
QWinRTVideoBuffer() QWinRTVideoBuffer(const QSize &size, TextureFormat format)
: QAbstractVideoBuffer(QAbstractVideoBuffer::GLTextureHandle) : QAbstractVideoBuffer(QAbstractVideoBuffer::GLTextureHandle)
, QOpenGLTexture(QOpenGLTexture::Target2D) , QOpenGLTexture(QOpenGLTexture::Target2D)
{ {
} setSize(size.width(), size.height());
setFormat(format);
void addRef() create();
{
refCount.ref();
}
void release() Q_DECL_OVERRIDE
{
if (!refCount.deref())
delete this;
} }
MapMode mapMode() const Q_DECL_OVERRIDE MapMode mapMode() const Q_DECL_OVERRIDE
@@ -163,9 +155,6 @@ public:
{ {
return QVariant::fromValue(textureId()); return QVariant::fromValue(textureId());
} }
private:
QAtomicInt refCount;
}; };
enum DirtyState { enum DirtyState {
@@ -189,7 +178,7 @@ public:
EGLConfig eglConfig; EGLConfig eglConfig;
EGLSurface eglSurface; EGLSurface eglSurface;
QWinRTVideoBuffer *videoBuffer; QVideoFrame presentFrame;
QThread renderThread; QThread renderThread;
bool active; bool active;
@@ -224,8 +213,6 @@ QWinRTAbstractVideoRendererControl::QWinRTAbstractVideoRendererControl(const QSi
d->eglSurface = EGL_NO_SURFACE; d->eglSurface = EGL_NO_SURFACE;
d->active = false; d->active = false;
d->videoBuffer = new QWinRTVideoBuffer;
connect(&d->renderThread, &QThread::started, connect(&d->renderThread, &QThread::started,
this, &QWinRTAbstractVideoRendererControl::syncAndRender, this, &QWinRTAbstractVideoRendererControl::syncAndRender,
Qt::DirectConnection); Qt::DirectConnection);
@@ -390,23 +377,19 @@ void QWinRTAbstractVideoRendererControl::present()
return; return;
} }
d->videoBuffer->setFormat(QOpenGLTexture::RGBAFormat); QWinRTVideoBuffer *videoBuffer = new QWinRTVideoBuffer(d->format.frameSize(), QOpenGLTexture::RGBAFormat);
d->videoBuffer->setSize(d->format.frameWidth(), d->format.frameHeight()); d->presentFrame = QVideoFrame(videoBuffer, d->format.frameSize(), d->format.pixelFormat());
if (!d->videoBuffer->isCreated())
d->videoBuffer->create();
// bind the pbuffer surface to the texture // bind the pbuffer surface to the texture
d->videoBuffer->bind(); videoBuffer->bind();
eglBindTexImage(d->eglDisplay, d->eglSurface, EGL_BACK_BUFFER); eglBindTexImage(d->eglDisplay, d->eglSurface, EGL_BACK_BUFFER);
static_cast<QOpenGLTexture *>(d->videoBuffer)->release(); static_cast<QOpenGLTexture *>(videoBuffer)->release();
d->dirtyState = NotDirty; d->dirtyState = NotDirty;
} }
// Present the frame // Present the frame
d->videoBuffer->addRef(); d->surface->present(d->presentFrame);
QVideoFrame frame(d->videoBuffer, d->format.frameSize(), d->format.pixelFormat());
d->surface->present(frame);
} }
QT_END_NAMESPACE QT_END_NAMESPACE

View File

@@ -255,6 +255,12 @@ void QDeclarativeVideoRendererBackend::updateGeometry()
m_sourceTextureRect.setTop(m_sourceTextureRect.bottom()); m_sourceTextureRect.setTop(m_sourceTextureRect.bottom());
m_sourceTextureRect.setBottom(top); m_sourceTextureRect.setBottom(top);
} }
if (videoSurface()->surfaceFormat().property("mirrored").toBool()) {
qreal left = m_sourceTextureRect.left();
m_sourceTextureRect.setLeft(m_sourceTextureRect.right());
m_sourceTextureRect.setRight(left);
}
} }
QSGNode *QDeclarativeVideoRendererBackend::updatePaintNode(QSGNode *oldNode, QSGNode *QDeclarativeVideoRendererBackend::updatePaintNode(QSGNode *oldNode,

View File

@@ -145,6 +145,10 @@ public:
virtual int compare(const QSGMaterial *other) const { virtual int compare(const QSGMaterial *other) const {
const QSGVideoMaterial_RGB *m = static_cast<const QSGVideoMaterial_RGB *>(other); const QSGVideoMaterial_RGB *m = static_cast<const QSGVideoMaterial_RGB *>(other);
if (!m_textureId)
return 1;
return m_textureId - m->m_textureId; return m_textureId - m->m_textureId;
} }

View File

@@ -138,6 +138,10 @@ public:
virtual int compare(const QSGMaterial *other) const { virtual int compare(const QSGMaterial *other) const {
const QSGVideoMaterial_Texture *m = static_cast<const QSGVideoMaterial_Texture *>(other); const QSGVideoMaterial_Texture *m = static_cast<const QSGVideoMaterial_Texture *>(other);
if (!m_textureId)
return 1;
int diff = m_textureId - m->m_textureId; int diff = m_textureId - m->m_textureId;
if (diff) if (diff)
return diff; return diff;

View File

@@ -171,6 +171,9 @@ public:
virtual int compare(const QSGMaterial *other) const { virtual int compare(const QSGMaterial *other) const {
const QSGVideoMaterial_YUV *m = static_cast<const QSGVideoMaterial_YUV *>(other); const QSGVideoMaterial_YUV *m = static_cast<const QSGVideoMaterial_YUV *>(other);
if (!m_textureIds[0])
return 1;
int d = m_textureIds[0] - m->m_textureIds[0]; int d = m_textureIds[0] - m->m_textureIds[0];
if (d) if (d)
return d; return d;

View File

@@ -521,7 +521,8 @@ void tst_QVideoSurfaceFormat::staticPropertyNames()
QVERIFY(propertyNames.contains("pixelAspectRatio")); QVERIFY(propertyNames.contains("pixelAspectRatio"));
QVERIFY(propertyNames.contains("yCbCrColorSpace")); QVERIFY(propertyNames.contains("yCbCrColorSpace"));
QVERIFY(propertyNames.contains("sizeHint")); QVERIFY(propertyNames.contains("sizeHint"));
QCOMPARE(propertyNames.count(), 10); QVERIFY(propertyNames.contains("mirrored"));
QCOMPARE(propertyNames.count(), 11);
} }
void tst_QVideoSurfaceFormat::dynamicProperty() void tst_QVideoSurfaceFormat::dynamicProperty()