Merge remote-tracking branch 'origin/5.5' into dev
Change-Id: I957b44c06fca7a3f552edc18f286de043d31e42d
This commit is contained in:
@@ -65,8 +65,6 @@ public:
|
||||
AVCaptureDevice *createCaptureDevice();
|
||||
|
||||
private:
|
||||
AVFCameraService *m_service;
|
||||
|
||||
QString m_activeInput;
|
||||
bool m_dirty;
|
||||
QString m_defaultDevice;
|
||||
|
||||
@@ -41,9 +41,9 @@ QT_USE_NAMESPACE
|
||||
|
||||
AVFAudioInputSelectorControl::AVFAudioInputSelectorControl(AVFCameraService *service, QObject *parent)
|
||||
: QAudioInputSelectorControl(parent)
|
||||
, m_service(service)
|
||||
, m_dirty(true)
|
||||
{
|
||||
Q_UNUSED(service);
|
||||
NSArray *videoDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio];
|
||||
for (AVCaptureDevice *device in videoDevices) {
|
||||
QString deviceId = QString::fromUtf8([[device uniqueID] UTF8String]);
|
||||
|
||||
@@ -65,7 +65,6 @@ private Q_SLOTS:
|
||||
void updateStatus();
|
||||
|
||||
private:
|
||||
AVFCameraService *m_service;
|
||||
AVFCameraSession *m_session;
|
||||
|
||||
QCamera::State m_state;
|
||||
|
||||
@@ -40,12 +40,12 @@ QT_USE_NAMESPACE
|
||||
|
||||
AVFCameraControl::AVFCameraControl(AVFCameraService *service, QObject *parent)
|
||||
: QCameraControl(parent)
|
||||
, m_service(service)
|
||||
, m_session(service->session())
|
||||
, m_state(QCamera::UnloadedState)
|
||||
, m_lastStatus(QCamera::UnloadedStatus)
|
||||
, m_captureMode(QCamera::CaptureStillImage)
|
||||
{
|
||||
Q_UNUSED(service);
|
||||
connect(m_session, SIGNAL(stateChanged(QCamera::State)), SLOT(updateStatus()));
|
||||
}
|
||||
|
||||
|
||||
@@ -55,7 +55,6 @@ public:
|
||||
QStringList availableMetaData() const;
|
||||
|
||||
private:
|
||||
AVFCameraService *m_service;
|
||||
QMap<QString, QVariant> m_tags;
|
||||
};
|
||||
|
||||
|
||||
@@ -40,8 +40,9 @@ QT_USE_NAMESPACE
|
||||
//metadata support is not implemented yet
|
||||
|
||||
AVFCameraMetaDataControl::AVFCameraMetaDataControl(AVFCameraService *service, QObject *parent)
|
||||
:QMetaDataWriterControl(parent), m_service(service)
|
||||
:QMetaDataWriterControl(parent)
|
||||
{
|
||||
Q_UNUSED(service);
|
||||
}
|
||||
|
||||
AVFCameraMetaDataControl::~AVFCameraMetaDataControl()
|
||||
|
||||
@@ -66,7 +66,6 @@ private Q_SLOTS:
|
||||
void updateReadyStatus();
|
||||
|
||||
private:
|
||||
AVFCameraService *m_service;
|
||||
AVFCameraSession *m_session;
|
||||
AVFCameraControl *m_cameraControl;
|
||||
bool m_ready;
|
||||
|
||||
@@ -46,13 +46,13 @@ QT_USE_NAMESPACE
|
||||
|
||||
AVFImageCaptureControl::AVFImageCaptureControl(AVFCameraService *service, QObject *parent)
|
||||
: QCameraImageCaptureControl(parent)
|
||||
, m_service(service)
|
||||
, m_session(service->session())
|
||||
, m_cameraControl(service->cameraControl())
|
||||
, m_ready(false)
|
||||
, m_lastCaptureId(0)
|
||||
, m_videoConnection(nil)
|
||||
{
|
||||
Q_UNUSED(service);
|
||||
m_stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
|
||||
|
||||
NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys:
|
||||
|
||||
@@ -33,7 +33,6 @@ HEADERS += \
|
||||
avfaudioinputselectorcontrol.h \
|
||||
avfcamerainfocontrol.h \
|
||||
avfmediavideoprobecontrol.h \
|
||||
avfcamerainfocontrol.h \
|
||||
avfcamerarenderercontrol.h \
|
||||
avfcameradevicecontrol.h \
|
||||
avfcamerafocuscontrol.h \
|
||||
@@ -54,7 +53,6 @@ OBJECTIVE_SOURCES += \
|
||||
avfaudioinputselectorcontrol.mm \
|
||||
avfcamerainfocontrol.mm \
|
||||
avfmediavideoprobecontrol.mm \
|
||||
avfcamerainfocontrol.mm \
|
||||
avfcameradevicecontrol.mm \
|
||||
avfcamerarenderercontrol.mm \
|
||||
avfcamerafocuscontrol.mm \
|
||||
|
||||
@@ -48,13 +48,15 @@ CameraBinImageProcessing::CameraBinImageProcessing(CameraBinSession *session)
|
||||
m_whiteBalanceMode(QCameraImageProcessing::WhiteBalanceAuto)
|
||||
{
|
||||
#ifdef HAVE_GST_PHOTOGRAPHY
|
||||
m_mappedWbValues[GST_PHOTOGRAPHY_WB_MODE_AUTO] = QCameraImageProcessing::WhiteBalanceAuto;
|
||||
m_mappedWbValues[GST_PHOTOGRAPHY_WB_MODE_DAYLIGHT] = QCameraImageProcessing::WhiteBalanceSunlight;
|
||||
m_mappedWbValues[GST_PHOTOGRAPHY_WB_MODE_CLOUDY] = QCameraImageProcessing::WhiteBalanceCloudy;
|
||||
m_mappedWbValues[GST_PHOTOGRAPHY_WB_MODE_SUNSET] = QCameraImageProcessing::WhiteBalanceSunset;
|
||||
m_mappedWbValues[GST_PHOTOGRAPHY_WB_MODE_TUNGSTEN] = QCameraImageProcessing::WhiteBalanceTungsten;
|
||||
m_mappedWbValues[GST_PHOTOGRAPHY_WB_MODE_FLUORESCENT] = QCameraImageProcessing::WhiteBalanceFluorescent;
|
||||
unlockWhiteBalance();
|
||||
if (m_session->photography()) {
|
||||
m_mappedWbValues[GST_PHOTOGRAPHY_WB_MODE_AUTO] = QCameraImageProcessing::WhiteBalanceAuto;
|
||||
m_mappedWbValues[GST_PHOTOGRAPHY_WB_MODE_DAYLIGHT] = QCameraImageProcessing::WhiteBalanceSunlight;
|
||||
m_mappedWbValues[GST_PHOTOGRAPHY_WB_MODE_CLOUDY] = QCameraImageProcessing::WhiteBalanceCloudy;
|
||||
m_mappedWbValues[GST_PHOTOGRAPHY_WB_MODE_SUNSET] = QCameraImageProcessing::WhiteBalanceSunset;
|
||||
m_mappedWbValues[GST_PHOTOGRAPHY_WB_MODE_TUNGSTEN] = QCameraImageProcessing::WhiteBalanceTungsten;
|
||||
m_mappedWbValues[GST_PHOTOGRAPHY_WB_MODE_FLUORESCENT] = QCameraImageProcessing::WhiteBalanceFluorescent;
|
||||
unlockWhiteBalance();
|
||||
}
|
||||
|
||||
#if GST_CHECK_VERSION(1, 0, 0)
|
||||
m_filterMap.insert(QCameraImageProcessing::ColorFilterNone, GST_PHOTOGRAPHY_COLOR_TONE_MODE_NORMAL);
|
||||
@@ -188,10 +190,19 @@ bool CameraBinImageProcessing::isWhiteBalanceModeSupported(QCameraImageProcessin
|
||||
|
||||
bool CameraBinImageProcessing::isParameterSupported(QCameraImageProcessingControl::ProcessingParameter parameter) const
|
||||
{
|
||||
return parameter == QCameraImageProcessingControl::Contrast
|
||||
#ifdef HAVE_GST_PHOTOGRAPHY
|
||||
if (parameter == QCameraImageProcessingControl::WhiteBalancePreset
|
||||
|| parameter == QCameraImageProcessingControl::ColorFilter)
|
||||
return m_session->photography();
|
||||
#endif
|
||||
|
||||
if (parameter == QCameraImageProcessingControl::Contrast
|
||||
|| parameter == QCameraImageProcessingControl::Brightness
|
||||
|| parameter == QCameraImageProcessingControl::Saturation
|
||||
|| parameter == QCameraImageProcessingControl::WhiteBalancePreset;
|
||||
|| parameter == QCameraImageProcessingControl::Saturation) {
|
||||
return GST_IS_COLOR_BALANCE(m_session->cameraBin());
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
bool CameraBinImageProcessing::isParameterValueSupported(QCameraImageProcessingControl::ProcessingParameter parameter, const QVariant &value) const
|
||||
@@ -200,7 +211,7 @@ bool CameraBinImageProcessing::isParameterValueSupported(QCameraImageProcessingC
|
||||
case ContrastAdjustment:
|
||||
case BrightnessAdjustment:
|
||||
case SaturationAdjustment:
|
||||
return qAbs(value.toReal()) <= 1.0;
|
||||
return GST_IS_COLOR_BALANCE(m_session->cameraBin()) && qAbs(value.toReal()) <= 1.0;
|
||||
case WhiteBalancePreset:
|
||||
return isWhiteBalanceModeSupported(value.value<QCameraImageProcessing::WhiteBalanceMode>());
|
||||
case ColorFilter: {
|
||||
|
||||
@@ -542,9 +542,10 @@ GstElement *CameraBinSession::buildCameraSource()
|
||||
|
||||
void CameraBinSession::captureImage(int requestId, const QString &fileName)
|
||||
{
|
||||
QString actualFileName = fileName;
|
||||
if (actualFileName.isEmpty())
|
||||
actualFileName = generateFileName("img_", defaultDir(QCamera::CaptureStillImage), "jpg");
|
||||
const QString actualFileName = m_mediaStorageLocation.generateFileName(fileName,
|
||||
QMediaStorageLocation::Pictures,
|
||||
QLatin1String("IMG_"),
|
||||
QLatin1String("jpg"));
|
||||
|
||||
m_requestId = requestId;
|
||||
|
||||
@@ -592,60 +593,6 @@ bool CameraBinSession::setOutputLocation(const QUrl& sink)
|
||||
return true;
|
||||
}
|
||||
|
||||
QDir CameraBinSession::defaultDir(QCamera::CaptureModes mode) const
|
||||
{
|
||||
QStringList dirCandidates;
|
||||
|
||||
#if defined(Q_WS_MAEMO_6)
|
||||
dirCandidates << QLatin1String("/home/user/MyDocs/DCIM");
|
||||
dirCandidates << QLatin1String("/home/user/MyDocs/");
|
||||
#endif
|
||||
|
||||
if (mode == QCamera::CaptureVideo) {
|
||||
dirCandidates << QStandardPaths::writableLocation(QStandardPaths::MoviesLocation);
|
||||
dirCandidates << QDir::home().filePath("Documents/Video");
|
||||
dirCandidates << QDir::home().filePath("Documents/Videos");
|
||||
} else {
|
||||
dirCandidates << QStandardPaths::writableLocation(QStandardPaths::PicturesLocation);
|
||||
dirCandidates << QDir::home().filePath("Documents/Photo");
|
||||
dirCandidates << QDir::home().filePath("Documents/Photos");
|
||||
dirCandidates << QDir::home().filePath("Documents/photo");
|
||||
dirCandidates << QDir::home().filePath("Documents/photos");
|
||||
dirCandidates << QDir::home().filePath("Documents/Images");
|
||||
}
|
||||
|
||||
dirCandidates << QDir::home().filePath("Documents");
|
||||
dirCandidates << QDir::home().filePath("My Documents");
|
||||
dirCandidates << QDir::homePath();
|
||||
dirCandidates << QDir::currentPath();
|
||||
dirCandidates << QDir::tempPath();
|
||||
|
||||
foreach (const QString &path, dirCandidates) {
|
||||
if (QFileInfo(path).isWritable())
|
||||
return QDir(path);
|
||||
}
|
||||
|
||||
return QDir();
|
||||
}
|
||||
|
||||
QString CameraBinSession::generateFileName(const QString &prefix, const QDir &dir, const QString &ext) const
|
||||
{
|
||||
int lastClip = 0;
|
||||
foreach(QString fileName, dir.entryList(QStringList() << QString("%1*.%2").arg(prefix).arg(ext))) {
|
||||
int imgNumber = fileName.midRef(prefix.length(), fileName.size()-prefix.length()-ext.length()-1).toInt();
|
||||
lastClip = qMax(lastClip, imgNumber);
|
||||
}
|
||||
|
||||
QString name = QString("%1%2.%3").arg(prefix)
|
||||
.arg(lastClip+1,
|
||||
4, //fieldWidth
|
||||
10,
|
||||
QLatin1Char('0'))
|
||||
.arg(ext);
|
||||
|
||||
return dir.absoluteFilePath(name);
|
||||
}
|
||||
|
||||
void CameraBinSession::setDevice(const QString &device)
|
||||
{
|
||||
if (m_inputDevice != device) {
|
||||
@@ -1122,18 +1069,16 @@ bool CameraBinSession::processBusMessage(const QGstreamerMessage &message)
|
||||
|
||||
void CameraBinSession::recordVideo()
|
||||
{
|
||||
m_recordingActive = true;
|
||||
m_actualSink = m_sink;
|
||||
if (m_actualSink.isEmpty()) {
|
||||
QString ext = m_mediaContainerControl->suggestedFileExtension(m_mediaContainerControl->actualContainerFormat());
|
||||
m_actualSink = QUrl::fromLocalFile(generateFileName("clip_", defaultDir(QCamera::CaptureVideo), ext));
|
||||
} else {
|
||||
// Output location was rejected in setOutputlocation() if not a local file
|
||||
m_actualSink = QUrl::fromLocalFile(QDir::currentPath()).resolved(m_actualSink);
|
||||
}
|
||||
const QString actualFileName = m_mediaStorageLocation.generateFileName(m_sink.isLocalFile() ? m_sink.toLocalFile()
|
||||
: m_sink.toString(),
|
||||
QMediaStorageLocation::Movies,
|
||||
QLatin1String("clip_"),
|
||||
m_mediaContainerControl->suggestedFileExtension(m_mediaContainerControl->actualContainerFormat()));
|
||||
|
||||
QString fileName = m_actualSink.toLocalFile();
|
||||
g_object_set(G_OBJECT(m_camerabin), FILENAME_PROPERTY, QFile::encodeName(fileName).constData(), NULL);
|
||||
m_recordingActive = true;
|
||||
m_actualSink = QUrl::fromLocalFile(actualFileName);
|
||||
|
||||
g_object_set(G_OBJECT(m_camerabin), FILENAME_PROPERTY, QFile::encodeName(actualFileName).constData(), NULL);
|
||||
|
||||
g_signal_emit_by_name(G_OBJECT(m_camerabin), CAPTURE_START, NULL);
|
||||
}
|
||||
@@ -1304,7 +1249,7 @@ static QPair<int,int> valueRange(const GValue *value, bool *continuous)
|
||||
|
||||
static bool resolutionLessThan(const QSize &r1, const QSize &r2)
|
||||
{
|
||||
return r1.width()*r1.height() < r2.width()*r2.height();
|
||||
return qlonglong(r1.width()) * r1.height() < qlonglong(r2.width()) * r2.height();
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -46,6 +46,7 @@
|
||||
|
||||
#include <private/qgstreamerbushelper_p.h>
|
||||
#include <private/qgstreamerbufferprobe_p.h>
|
||||
#include <private/qmediastoragelocation_p.h>
|
||||
#include "qcamera.h"
|
||||
|
||||
QT_BEGIN_NAMESPACE
|
||||
@@ -102,9 +103,6 @@ public:
|
||||
QUrl outputLocation() const;
|
||||
bool setOutputLocation(const QUrl& sink);
|
||||
|
||||
QDir defaultDir(QCamera::CaptureModes mode) const;
|
||||
QString generateFileName(const QString &prefix, const QDir &dir, const QString &ext) const;
|
||||
|
||||
GstElement *buildCameraSource();
|
||||
GstElementFactory *sourceFactory() const { return m_sourceFactory; }
|
||||
|
||||
@@ -209,6 +207,7 @@ private:
|
||||
QString m_inputDevice;
|
||||
bool m_muted;
|
||||
bool m_busy;
|
||||
QMediaStorageLocation m_mediaStorageLocation;
|
||||
|
||||
QCamera::CaptureModes m_captureMode;
|
||||
QMap<QByteArray, QVariant> m_metaData;
|
||||
|
||||
@@ -216,10 +216,10 @@ bool QWindowsAudioDeviceInfo::testSettings(const QAudioFormat& format) const
|
||||
if (qt_convertFormat(format, &wfx)) {
|
||||
// query only, do not open device
|
||||
if (mode == QAudio::AudioOutput) {
|
||||
return (waveOutOpen(NULL, UINT_PTR(devId), &wfx.Format, NULL, NULL,
|
||||
return (waveOutOpen(NULL, UINT_PTR(devId), &wfx.Format, 0, 0,
|
||||
WAVE_FORMAT_QUERY) == MMSYSERR_NOERROR);
|
||||
} else { // AudioInput
|
||||
return (waveInOpen(NULL, UINT_PTR(devId), &wfx.Format, NULL, NULL,
|
||||
return (waveInOpen(NULL, UINT_PTR(devId), &wfx.Format, 0, 0,
|
||||
WAVE_FORMAT_QUERY) == MMSYSERR_NOERROR);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -125,21 +125,13 @@ Q_GLOBAL_STATIC(QWinRTVideoRendererControlGlobal, g)
|
||||
class QWinRTVideoBuffer : public QAbstractVideoBuffer, public QOpenGLTexture
|
||||
{
|
||||
public:
|
||||
QWinRTVideoBuffer()
|
||||
QWinRTVideoBuffer(const QSize &size, TextureFormat format)
|
||||
: QAbstractVideoBuffer(QAbstractVideoBuffer::GLTextureHandle)
|
||||
, QOpenGLTexture(QOpenGLTexture::Target2D)
|
||||
{
|
||||
}
|
||||
|
||||
void addRef()
|
||||
{
|
||||
refCount.ref();
|
||||
}
|
||||
|
||||
void release() Q_DECL_OVERRIDE
|
||||
{
|
||||
if (!refCount.deref())
|
||||
delete this;
|
||||
setSize(size.width(), size.height());
|
||||
setFormat(format);
|
||||
create();
|
||||
}
|
||||
|
||||
MapMode mapMode() const Q_DECL_OVERRIDE
|
||||
@@ -163,9 +155,6 @@ public:
|
||||
{
|
||||
return QVariant::fromValue(textureId());
|
||||
}
|
||||
|
||||
private:
|
||||
QAtomicInt refCount;
|
||||
};
|
||||
|
||||
enum DirtyState {
|
||||
@@ -189,7 +178,7 @@ public:
|
||||
EGLConfig eglConfig;
|
||||
EGLSurface eglSurface;
|
||||
|
||||
QWinRTVideoBuffer *videoBuffer;
|
||||
QVideoFrame presentFrame;
|
||||
|
||||
QThread renderThread;
|
||||
bool active;
|
||||
@@ -224,8 +213,6 @@ QWinRTAbstractVideoRendererControl::QWinRTAbstractVideoRendererControl(const QSi
|
||||
d->eglSurface = EGL_NO_SURFACE;
|
||||
d->active = false;
|
||||
|
||||
d->videoBuffer = new QWinRTVideoBuffer;
|
||||
|
||||
connect(&d->renderThread, &QThread::started,
|
||||
this, &QWinRTAbstractVideoRendererControl::syncAndRender,
|
||||
Qt::DirectConnection);
|
||||
@@ -390,23 +377,19 @@ void QWinRTAbstractVideoRendererControl::present()
|
||||
return;
|
||||
}
|
||||
|
||||
d->videoBuffer->setFormat(QOpenGLTexture::RGBAFormat);
|
||||
d->videoBuffer->setSize(d->format.frameWidth(), d->format.frameHeight());
|
||||
if (!d->videoBuffer->isCreated())
|
||||
d->videoBuffer->create();
|
||||
QWinRTVideoBuffer *videoBuffer = new QWinRTVideoBuffer(d->format.frameSize(), QOpenGLTexture::RGBAFormat);
|
||||
d->presentFrame = QVideoFrame(videoBuffer, d->format.frameSize(), d->format.pixelFormat());
|
||||
|
||||
// bind the pbuffer surface to the texture
|
||||
d->videoBuffer->bind();
|
||||
videoBuffer->bind();
|
||||
eglBindTexImage(d->eglDisplay, d->eglSurface, EGL_BACK_BUFFER);
|
||||
static_cast<QOpenGLTexture *>(d->videoBuffer)->release();
|
||||
static_cast<QOpenGLTexture *>(videoBuffer)->release();
|
||||
|
||||
d->dirtyState = NotDirty;
|
||||
}
|
||||
|
||||
// Present the frame
|
||||
d->videoBuffer->addRef();
|
||||
QVideoFrame frame(d->videoBuffer, d->format.frameSize(), d->format.pixelFormat());
|
||||
d->surface->present(frame);
|
||||
d->surface->present(d->presentFrame);
|
||||
}
|
||||
|
||||
QT_END_NAMESPACE
|
||||
|
||||
Reference in New Issue
Block a user