Merge remote-tracking branch 'origin/5.5' into dev

Change-Id: I957b44c06fca7a3f552edc18f286de043d31e42d
This commit is contained in:
Frederik Gladhorn
2015-08-09 15:27:16 +02:00
24 changed files with 118 additions and 141 deletions

View File

@@ -66,7 +66,7 @@ QAudioSystemFactoryInterface::~QAudioSystemFactoryInterface()
\sa QAbstractAudioDeviceInfo, QAbstractAudioOutput, QAbstractAudioInput
Qt supports win32, linux(alsa) and Mac OS X standard (builtin to the
Qt supports win32, linux(alsa) and OS X standard (builtin to the
QtMultimedia library at compile time).
You can support other backends other than these predefined ones by

View File

@@ -32,6 +32,6 @@
\brief Demonstrates the multimedia functionality provided by Qt.
The \l{Qt Multimedia} module provides low-level audio support on Linux,
Windows and Mac OS X. It also provides audio plugin API to allow developers
Windows and OS X. It also provides audio plugin API to allow developers
implement their own audio support for custom devices and platforms.
*/

View File

@@ -92,7 +92,7 @@ int QAbstractVideoBufferPrivate::map(
\value NoHandle The buffer has no handle, its data can only be accessed by mapping the buffer.
\value GLTextureHandle The handle of the buffer is an OpenGL texture ID.
\value XvShmImageHandle The handle contains pointer to shared memory XVideo image.
\value CoreImageHandle The handle contains pointer to Mac OS X CIImage.
\value CoreImageHandle The handle contains pointer to OS X CIImage.
\value QPixmapHandle The handle of the buffer is a QPixmap.
\value EGLImageHandle The handle of the buffer is an EGLImageKHR.
\value UserHandle Start value for user defined handle types.

View File

@@ -61,6 +61,7 @@ public:
, pixelAspectRatio(1, 1)
, ycbcrColorSpace(QVideoSurfaceFormat::YCbCr_Undefined)
, frameRate(0.0)
, mirrored(false)
{
}
@@ -76,6 +77,7 @@ public:
, ycbcrColorSpace(QVideoSurfaceFormat::YCbCr_Undefined)
, viewport(QPoint(0, 0), size)
, frameRate(0.0)
, mirrored(false)
{
}
@@ -89,6 +91,7 @@ public:
, ycbcrColorSpace(other.ycbcrColorSpace)
, viewport(other.viewport)
, frameRate(other.frameRate)
, mirrored(other.mirrored)
, propertyNames(other.propertyNames)
, propertyValues(other.propertyValues)
{
@@ -104,6 +107,7 @@ public:
&& viewport == other.viewport
&& frameRatesEqual(frameRate, other.frameRate)
&& ycbcrColorSpace == other.ycbcrColorSpace
&& mirrored == other.mirrored
&& propertyNames.count() == other.propertyNames.count()) {
for (int i = 0; i < propertyNames.count(); ++i) {
int j = other.propertyNames.indexOf(propertyNames.at(i));
@@ -130,6 +134,7 @@ public:
QVideoSurfaceFormat::YCbCrColorSpace ycbcrColorSpace;
QRect viewport;
qreal frameRate;
bool mirrored;
QList<QByteArray> propertyNames;
QList<QVariant> propertyValues;
};
@@ -468,7 +473,8 @@ QList<QByteArray> QVideoSurfaceFormat::propertyNames() const
<< "frameRate"
<< "pixelAspectRatio"
<< "sizeHint"
<< "yCbCrColorSpace")
<< "yCbCrColorSpace"
<< "mirrored")
+ d->propertyNames;
}
@@ -499,6 +505,8 @@ QVariant QVideoSurfaceFormat::property(const char *name) const
return sizeHint();
} else if (qstrcmp(name, "yCbCrColorSpace") == 0) {
return qVariantFromValue(d->ycbcrColorSpace);
} else if (qstrcmp(name, "mirrored") == 0) {
return d->mirrored;
} else {
int id = 0;
for (; id < d->propertyNames.count() && d->propertyNames.at(id) != name; ++id) {}
@@ -547,6 +555,9 @@ void QVideoSurfaceFormat::setProperty(const char *name, const QVariant &value)
} else if (qstrcmp(name, "yCbCrColorSpace") == 0) {
if (value.canConvert<YCbCrColorSpace>())
d->ycbcrColorSpace = qvariant_cast<YCbCrColorSpace>(value);
} else if (qstrcmp(name, "mirrored") == 0) {
if (value.canConvert<bool>())
d->mirrored = qvariant_cast<bool>(value);
} else {
int id = 0;
for (; id < d->propertyNames.count() && d->propertyNames.at(id) != name; ++id) {}

View File

@@ -86,11 +86,13 @@ private:
QSize m_imageSize;
QImage::Format m_imageFormat;
QVideoSurfaceFormat::Direction m_scanLineDirection;
bool m_mirrored;
};
QVideoSurfaceGenericPainter::QVideoSurfaceGenericPainter()
: m_imageFormat(QImage::Format_Invalid)
, m_scanLineDirection(QVideoSurfaceFormat::TopToBottom)
, m_mirrored(false)
{
m_imagePixelFormats << QVideoFrame::Format_RGB32;
@@ -137,6 +139,7 @@ QAbstractVideoSurface::Error QVideoSurfaceGenericPainter::start(const QVideoSurf
m_imageFormat = QVideoFrame::imageFormatFromPixelFormat(format.pixelFormat());
m_imageSize = format.frameSize();
m_scanLineDirection = format.scanLineDirection();
m_mirrored = format.property("mirrored").toBool();
const QAbstractVideoBuffer::HandleType t = format.handleType();
if (t == QAbstractVideoBuffer::NoHandle) {
@@ -183,17 +186,22 @@ QAbstractVideoSurface::Error QVideoSurfaceGenericPainter::paint(
m_frame.bytesPerLine(),
m_imageFormat);
const QTransform oldTransform = painter->transform();
QTransform transform = oldTransform;
QRectF targetRect = target;
if (m_scanLineDirection == QVideoSurfaceFormat::BottomToTop) {
const QTransform oldTransform = painter->transform();
painter->scale(1, -1);
painter->translate(0, -target.bottom());
painter->drawImage(
QRectF(target.x(), 0, target.width(), target.height()), image, source);
painter->setTransform(oldTransform);
} else {
painter->drawImage(target, image, source);
transform.scale(1, -1);
transform.translate(0, -target.bottom());
targetRect.setY(0);
}
if (m_mirrored) {
transform.scale(-1, 1);
transform.translate(-target.right(), 0);
targetRect.setX(0);
}
painter->setTransform(transform);
painter->drawImage(targetRect, image, source);
painter->setTransform(oldTransform);
m_frame.unmap();
} else if (m_frame.isValid()) {
@@ -281,6 +289,7 @@ protected:
QGLContext *m_context;
QAbstractVideoBuffer::HandleType m_handleType;
QVideoSurfaceFormat::Direction m_scanLineDirection;
bool m_mirrored;
QVideoSurfaceFormat::YCbCrColorSpace m_colorSpace;
GLenum m_textureFormat;
GLuint m_textureInternalFormat;
@@ -299,6 +308,7 @@ QVideoSurfaceGLPainter::QVideoSurfaceGLPainter(QGLContext *context)
: m_context(context)
, m_handleType(QAbstractVideoBuffer::NoHandle)
, m_scanLineDirection(QVideoSurfaceFormat::TopToBottom)
, m_mirrored(false)
, m_colorSpace(QVideoSurfaceFormat::YCbCr_BT601)
, m_textureFormat(0)
, m_textureInternalFormat(0)
@@ -829,6 +839,7 @@ QAbstractVideoSurface::Error QVideoSurfaceArbFpPainter::start(const QVideoSurfac
} else {
m_handleType = format.handleType();
m_scanLineDirection = format.scanLineDirection();
m_mirrored = format.property("mirrored").toBool();
m_frameSize = format.frameSize();
m_colorSpace = format.yCbCrColorSpace();
@@ -878,8 +889,10 @@ QAbstractVideoSurface::Error QVideoSurfaceArbFpPainter::paint(
if (scissorTestEnabled)
glEnable(GL_SCISSOR_TEST);
const float txLeft = source.left() / m_frameSize.width();
const float txRight = source.right() / m_frameSize.width();
const float txLeft = m_mirrored ? source.right() / m_frameSize.width()
: source.left() / m_frameSize.width();
const float txRight = m_mirrored ? source.left() / m_frameSize.width()
: source.right() / m_frameSize.width();
const float txTop = m_scanLineDirection == QVideoSurfaceFormat::TopToBottom
? source.top() / m_frameSize.height()
: source.bottom() / m_frameSize.height();
@@ -1188,6 +1201,7 @@ QAbstractVideoSurface::Error QVideoSurfaceGlslPainter::start(const QVideoSurface
} else {
m_handleType = format.handleType();
m_scanLineDirection = format.scanLineDirection();
m_mirrored = format.property("mirrored").toBool();
m_frameSize = format.frameSize();
m_colorSpace = format.yCbCrColorSpace();
@@ -1276,8 +1290,10 @@ QAbstractVideoSurface::Error QVideoSurfaceGlslPainter::paint(
GLfloat(target.right() + 1), GLfloat(target.top())
};
const GLfloat txLeft = source.left() / m_frameSize.width();
const GLfloat txRight = source.right() / m_frameSize.width();
const GLfloat txLeft = m_mirrored ? source.right() / m_frameSize.width()
: source.left() / m_frameSize.width();
const GLfloat txRight = m_mirrored ? source.left() / m_frameSize.width()
: source.right() / m_frameSize.width();
const GLfloat txTop = m_scanLineDirection == QVideoSurfaceFormat::TopToBottom
? source.top() / m_frameSize.height()
: source.bottom() / m_frameSize.height();

View File

@@ -65,8 +65,6 @@ public:
AVCaptureDevice *createCaptureDevice();
private:
AVFCameraService *m_service;
QString m_activeInput;
bool m_dirty;
QString m_defaultDevice;

View File

@@ -41,9 +41,9 @@ QT_USE_NAMESPACE
AVFAudioInputSelectorControl::AVFAudioInputSelectorControl(AVFCameraService *service, QObject *parent)
: QAudioInputSelectorControl(parent)
, m_service(service)
, m_dirty(true)
{
Q_UNUSED(service);
NSArray *videoDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio];
for (AVCaptureDevice *device in videoDevices) {
QString deviceId = QString::fromUtf8([[device uniqueID] UTF8String]);

View File

@@ -65,7 +65,6 @@ private Q_SLOTS:
void updateStatus();
private:
AVFCameraService *m_service;
AVFCameraSession *m_session;
QCamera::State m_state;

View File

@@ -40,12 +40,12 @@ QT_USE_NAMESPACE
AVFCameraControl::AVFCameraControl(AVFCameraService *service, QObject *parent)
: QCameraControl(parent)
, m_service(service)
, m_session(service->session())
, m_state(QCamera::UnloadedState)
, m_lastStatus(QCamera::UnloadedStatus)
, m_captureMode(QCamera::CaptureStillImage)
{
Q_UNUSED(service);
connect(m_session, SIGNAL(stateChanged(QCamera::State)), SLOT(updateStatus()));
}

View File

@@ -55,7 +55,6 @@ public:
QStringList availableMetaData() const;
private:
AVFCameraService *m_service;
QMap<QString, QVariant> m_tags;
};

View File

@@ -40,8 +40,9 @@ QT_USE_NAMESPACE
//metadata support is not implemented yet
AVFCameraMetaDataControl::AVFCameraMetaDataControl(AVFCameraService *service, QObject *parent)
:QMetaDataWriterControl(parent), m_service(service)
:QMetaDataWriterControl(parent)
{
Q_UNUSED(service);
}
AVFCameraMetaDataControl::~AVFCameraMetaDataControl()

View File

@@ -66,7 +66,6 @@ private Q_SLOTS:
void updateReadyStatus();
private:
AVFCameraService *m_service;
AVFCameraSession *m_session;
AVFCameraControl *m_cameraControl;
bool m_ready;

View File

@@ -46,13 +46,13 @@ QT_USE_NAMESPACE
AVFImageCaptureControl::AVFImageCaptureControl(AVFCameraService *service, QObject *parent)
: QCameraImageCaptureControl(parent)
, m_service(service)
, m_session(service->session())
, m_cameraControl(service->cameraControl())
, m_ready(false)
, m_lastCaptureId(0)
, m_videoConnection(nil)
{
Q_UNUSED(service);
m_stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys:

View File

@@ -33,7 +33,6 @@ HEADERS += \
avfaudioinputselectorcontrol.h \
avfcamerainfocontrol.h \
avfmediavideoprobecontrol.h \
avfcamerainfocontrol.h \
avfcamerarenderercontrol.h \
avfcameradevicecontrol.h \
avfcamerafocuscontrol.h \
@@ -54,7 +53,6 @@ OBJECTIVE_SOURCES += \
avfaudioinputselectorcontrol.mm \
avfcamerainfocontrol.mm \
avfmediavideoprobecontrol.mm \
avfcamerainfocontrol.mm \
avfcameradevicecontrol.mm \
avfcamerarenderercontrol.mm \
avfcamerafocuscontrol.mm \

View File

@@ -48,13 +48,15 @@ CameraBinImageProcessing::CameraBinImageProcessing(CameraBinSession *session)
m_whiteBalanceMode(QCameraImageProcessing::WhiteBalanceAuto)
{
#ifdef HAVE_GST_PHOTOGRAPHY
m_mappedWbValues[GST_PHOTOGRAPHY_WB_MODE_AUTO] = QCameraImageProcessing::WhiteBalanceAuto;
m_mappedWbValues[GST_PHOTOGRAPHY_WB_MODE_DAYLIGHT] = QCameraImageProcessing::WhiteBalanceSunlight;
m_mappedWbValues[GST_PHOTOGRAPHY_WB_MODE_CLOUDY] = QCameraImageProcessing::WhiteBalanceCloudy;
m_mappedWbValues[GST_PHOTOGRAPHY_WB_MODE_SUNSET] = QCameraImageProcessing::WhiteBalanceSunset;
m_mappedWbValues[GST_PHOTOGRAPHY_WB_MODE_TUNGSTEN] = QCameraImageProcessing::WhiteBalanceTungsten;
m_mappedWbValues[GST_PHOTOGRAPHY_WB_MODE_FLUORESCENT] = QCameraImageProcessing::WhiteBalanceFluorescent;
unlockWhiteBalance();
if (m_session->photography()) {
m_mappedWbValues[GST_PHOTOGRAPHY_WB_MODE_AUTO] = QCameraImageProcessing::WhiteBalanceAuto;
m_mappedWbValues[GST_PHOTOGRAPHY_WB_MODE_DAYLIGHT] = QCameraImageProcessing::WhiteBalanceSunlight;
m_mappedWbValues[GST_PHOTOGRAPHY_WB_MODE_CLOUDY] = QCameraImageProcessing::WhiteBalanceCloudy;
m_mappedWbValues[GST_PHOTOGRAPHY_WB_MODE_SUNSET] = QCameraImageProcessing::WhiteBalanceSunset;
m_mappedWbValues[GST_PHOTOGRAPHY_WB_MODE_TUNGSTEN] = QCameraImageProcessing::WhiteBalanceTungsten;
m_mappedWbValues[GST_PHOTOGRAPHY_WB_MODE_FLUORESCENT] = QCameraImageProcessing::WhiteBalanceFluorescent;
unlockWhiteBalance();
}
#if GST_CHECK_VERSION(1, 0, 0)
m_filterMap.insert(QCameraImageProcessing::ColorFilterNone, GST_PHOTOGRAPHY_COLOR_TONE_MODE_NORMAL);
@@ -188,10 +190,19 @@ bool CameraBinImageProcessing::isWhiteBalanceModeSupported(QCameraImageProcessin
bool CameraBinImageProcessing::isParameterSupported(QCameraImageProcessingControl::ProcessingParameter parameter) const
{
return parameter == QCameraImageProcessingControl::Contrast
#ifdef HAVE_GST_PHOTOGRAPHY
if (parameter == QCameraImageProcessingControl::WhiteBalancePreset
|| parameter == QCameraImageProcessingControl::ColorFilter)
return m_session->photography();
#endif
if (parameter == QCameraImageProcessingControl::Contrast
|| parameter == QCameraImageProcessingControl::Brightness
|| parameter == QCameraImageProcessingControl::Saturation
|| parameter == QCameraImageProcessingControl::WhiteBalancePreset;
|| parameter == QCameraImageProcessingControl::Saturation) {
return GST_IS_COLOR_BALANCE(m_session->cameraBin());
}
return false;
}
bool CameraBinImageProcessing::isParameterValueSupported(QCameraImageProcessingControl::ProcessingParameter parameter, const QVariant &value) const
@@ -200,7 +211,7 @@ bool CameraBinImageProcessing::isParameterValueSupported(QCameraImageProcessingC
case ContrastAdjustment:
case BrightnessAdjustment:
case SaturationAdjustment:
return qAbs(value.toReal()) <= 1.0;
return GST_IS_COLOR_BALANCE(m_session->cameraBin()) && qAbs(value.toReal()) <= 1.0;
case WhiteBalancePreset:
return isWhiteBalanceModeSupported(value.value<QCameraImageProcessing::WhiteBalanceMode>());
case ColorFilter: {

View File

@@ -542,9 +542,10 @@ GstElement *CameraBinSession::buildCameraSource()
void CameraBinSession::captureImage(int requestId, const QString &fileName)
{
QString actualFileName = fileName;
if (actualFileName.isEmpty())
actualFileName = generateFileName("img_", defaultDir(QCamera::CaptureStillImage), "jpg");
const QString actualFileName = m_mediaStorageLocation.generateFileName(fileName,
QMediaStorageLocation::Pictures,
QLatin1String("IMG_"),
QLatin1String("jpg"));
m_requestId = requestId;
@@ -592,60 +593,6 @@ bool CameraBinSession::setOutputLocation(const QUrl& sink)
return true;
}
QDir CameraBinSession::defaultDir(QCamera::CaptureModes mode) const
{
QStringList dirCandidates;
#if defined(Q_WS_MAEMO_6)
dirCandidates << QLatin1String("/home/user/MyDocs/DCIM");
dirCandidates << QLatin1String("/home/user/MyDocs/");
#endif
if (mode == QCamera::CaptureVideo) {
dirCandidates << QStandardPaths::writableLocation(QStandardPaths::MoviesLocation);
dirCandidates << QDir::home().filePath("Documents/Video");
dirCandidates << QDir::home().filePath("Documents/Videos");
} else {
dirCandidates << QStandardPaths::writableLocation(QStandardPaths::PicturesLocation);
dirCandidates << QDir::home().filePath("Documents/Photo");
dirCandidates << QDir::home().filePath("Documents/Photos");
dirCandidates << QDir::home().filePath("Documents/photo");
dirCandidates << QDir::home().filePath("Documents/photos");
dirCandidates << QDir::home().filePath("Documents/Images");
}
dirCandidates << QDir::home().filePath("Documents");
dirCandidates << QDir::home().filePath("My Documents");
dirCandidates << QDir::homePath();
dirCandidates << QDir::currentPath();
dirCandidates << QDir::tempPath();
foreach (const QString &path, dirCandidates) {
if (QFileInfo(path).isWritable())
return QDir(path);
}
return QDir();
}
QString CameraBinSession::generateFileName(const QString &prefix, const QDir &dir, const QString &ext) const
{
int lastClip = 0;
foreach(QString fileName, dir.entryList(QStringList() << QString("%1*.%2").arg(prefix).arg(ext))) {
int imgNumber = fileName.midRef(prefix.length(), fileName.size()-prefix.length()-ext.length()-1).toInt();
lastClip = qMax(lastClip, imgNumber);
}
QString name = QString("%1%2.%3").arg(prefix)
.arg(lastClip+1,
4, //fieldWidth
10,
QLatin1Char('0'))
.arg(ext);
return dir.absoluteFilePath(name);
}
void CameraBinSession::setDevice(const QString &device)
{
if (m_inputDevice != device) {
@@ -1122,18 +1069,16 @@ bool CameraBinSession::processBusMessage(const QGstreamerMessage &message)
void CameraBinSession::recordVideo()
{
m_recordingActive = true;
m_actualSink = m_sink;
if (m_actualSink.isEmpty()) {
QString ext = m_mediaContainerControl->suggestedFileExtension(m_mediaContainerControl->actualContainerFormat());
m_actualSink = QUrl::fromLocalFile(generateFileName("clip_", defaultDir(QCamera::CaptureVideo), ext));
} else {
// Output location was rejected in setOutputlocation() if not a local file
m_actualSink = QUrl::fromLocalFile(QDir::currentPath()).resolved(m_actualSink);
}
const QString actualFileName = m_mediaStorageLocation.generateFileName(m_sink.isLocalFile() ? m_sink.toLocalFile()
: m_sink.toString(),
QMediaStorageLocation::Movies,
QLatin1String("clip_"),
m_mediaContainerControl->suggestedFileExtension(m_mediaContainerControl->actualContainerFormat()));
QString fileName = m_actualSink.toLocalFile();
g_object_set(G_OBJECT(m_camerabin), FILENAME_PROPERTY, QFile::encodeName(fileName).constData(), NULL);
m_recordingActive = true;
m_actualSink = QUrl::fromLocalFile(actualFileName);
g_object_set(G_OBJECT(m_camerabin), FILENAME_PROPERTY, QFile::encodeName(actualFileName).constData(), NULL);
g_signal_emit_by_name(G_OBJECT(m_camerabin), CAPTURE_START, NULL);
}
@@ -1304,7 +1249,7 @@ static QPair<int,int> valueRange(const GValue *value, bool *continuous)
static bool resolutionLessThan(const QSize &r1, const QSize &r2)
{
return r1.width()*r1.height() < r2.width()*r2.height();
return qlonglong(r1.width()) * r1.height() < qlonglong(r2.width()) * r2.height();
}

View File

@@ -46,6 +46,7 @@
#include <private/qgstreamerbushelper_p.h>
#include <private/qgstreamerbufferprobe_p.h>
#include <private/qmediastoragelocation_p.h>
#include "qcamera.h"
QT_BEGIN_NAMESPACE
@@ -102,9 +103,6 @@ public:
QUrl outputLocation() const;
bool setOutputLocation(const QUrl& sink);
QDir defaultDir(QCamera::CaptureModes mode) const;
QString generateFileName(const QString &prefix, const QDir &dir, const QString &ext) const;
GstElement *buildCameraSource();
GstElementFactory *sourceFactory() const { return m_sourceFactory; }
@@ -209,6 +207,7 @@ private:
QString m_inputDevice;
bool m_muted;
bool m_busy;
QMediaStorageLocation m_mediaStorageLocation;
QCamera::CaptureModes m_captureMode;
QMap<QByteArray, QVariant> m_metaData;

View File

@@ -216,10 +216,10 @@ bool QWindowsAudioDeviceInfo::testSettings(const QAudioFormat& format) const
if (qt_convertFormat(format, &wfx)) {
// query only, do not open device
if (mode == QAudio::AudioOutput) {
return (waveOutOpen(NULL, UINT_PTR(devId), &wfx.Format, NULL, NULL,
return (waveOutOpen(NULL, UINT_PTR(devId), &wfx.Format, 0, 0,
WAVE_FORMAT_QUERY) == MMSYSERR_NOERROR);
} else { // AudioInput
return (waveInOpen(NULL, UINT_PTR(devId), &wfx.Format, NULL, NULL,
return (waveInOpen(NULL, UINT_PTR(devId), &wfx.Format, 0, 0,
WAVE_FORMAT_QUERY) == MMSYSERR_NOERROR);
}
}

View File

@@ -125,21 +125,13 @@ Q_GLOBAL_STATIC(QWinRTVideoRendererControlGlobal, g)
class QWinRTVideoBuffer : public QAbstractVideoBuffer, public QOpenGLTexture
{
public:
QWinRTVideoBuffer()
QWinRTVideoBuffer(const QSize &size, TextureFormat format)
: QAbstractVideoBuffer(QAbstractVideoBuffer::GLTextureHandle)
, QOpenGLTexture(QOpenGLTexture::Target2D)
{
}
void addRef()
{
refCount.ref();
}
void release() Q_DECL_OVERRIDE
{
if (!refCount.deref())
delete this;
setSize(size.width(), size.height());
setFormat(format);
create();
}
MapMode mapMode() const Q_DECL_OVERRIDE
@@ -163,9 +155,6 @@ public:
{
return QVariant::fromValue(textureId());
}
private:
QAtomicInt refCount;
};
enum DirtyState {
@@ -189,7 +178,7 @@ public:
EGLConfig eglConfig;
EGLSurface eglSurface;
QWinRTVideoBuffer *videoBuffer;
QVideoFrame presentFrame;
QThread renderThread;
bool active;
@@ -224,8 +213,6 @@ QWinRTAbstractVideoRendererControl::QWinRTAbstractVideoRendererControl(const QSi
d->eglSurface = EGL_NO_SURFACE;
d->active = false;
d->videoBuffer = new QWinRTVideoBuffer;
connect(&d->renderThread, &QThread::started,
this, &QWinRTAbstractVideoRendererControl::syncAndRender,
Qt::DirectConnection);
@@ -390,23 +377,19 @@ void QWinRTAbstractVideoRendererControl::present()
return;
}
d->videoBuffer->setFormat(QOpenGLTexture::RGBAFormat);
d->videoBuffer->setSize(d->format.frameWidth(), d->format.frameHeight());
if (!d->videoBuffer->isCreated())
d->videoBuffer->create();
QWinRTVideoBuffer *videoBuffer = new QWinRTVideoBuffer(d->format.frameSize(), QOpenGLTexture::RGBAFormat);
d->presentFrame = QVideoFrame(videoBuffer, d->format.frameSize(), d->format.pixelFormat());
// bind the pbuffer surface to the texture
d->videoBuffer->bind();
videoBuffer->bind();
eglBindTexImage(d->eglDisplay, d->eglSurface, EGL_BACK_BUFFER);
static_cast<QOpenGLTexture *>(d->videoBuffer)->release();
static_cast<QOpenGLTexture *>(videoBuffer)->release();
d->dirtyState = NotDirty;
}
// Present the frame
d->videoBuffer->addRef();
QVideoFrame frame(d->videoBuffer, d->format.frameSize(), d->format.pixelFormat());
d->surface->present(frame);
d->surface->present(d->presentFrame);
}
QT_END_NAMESPACE

View File

@@ -255,6 +255,12 @@ void QDeclarativeVideoRendererBackend::updateGeometry()
m_sourceTextureRect.setTop(m_sourceTextureRect.bottom());
m_sourceTextureRect.setBottom(top);
}
if (videoSurface()->surfaceFormat().property("mirrored").toBool()) {
qreal left = m_sourceTextureRect.left();
m_sourceTextureRect.setLeft(m_sourceTextureRect.right());
m_sourceTextureRect.setRight(left);
}
}
QSGNode *QDeclarativeVideoRendererBackend::updatePaintNode(QSGNode *oldNode,

View File

@@ -145,6 +145,10 @@ public:
virtual int compare(const QSGMaterial *other) const {
const QSGVideoMaterial_RGB *m = static_cast<const QSGVideoMaterial_RGB *>(other);
if (!m_textureId)
return 1;
return m_textureId - m->m_textureId;
}

View File

@@ -138,6 +138,10 @@ public:
virtual int compare(const QSGMaterial *other) const {
const QSGVideoMaterial_Texture *m = static_cast<const QSGVideoMaterial_Texture *>(other);
if (!m_textureId)
return 1;
int diff = m_textureId - m->m_textureId;
if (diff)
return diff;

View File

@@ -171,6 +171,9 @@ public:
virtual int compare(const QSGMaterial *other) const {
const QSGVideoMaterial_YUV *m = static_cast<const QSGVideoMaterial_YUV *>(other);
if (!m_textureIds[0])
return 1;
int d = m_textureIds[0] - m->m_textureIds[0];
if (d)
return d;

View File

@@ -521,7 +521,8 @@ void tst_QVideoSurfaceFormat::staticPropertyNames()
QVERIFY(propertyNames.contains("pixelAspectRatio"));
QVERIFY(propertyNames.contains("yCbCrColorSpace"));
QVERIFY(propertyNames.contains("sizeHint"));
QCOMPARE(propertyNames.count(), 10);
QVERIFY(propertyNames.contains("mirrored"));
QCOMPARE(propertyNames.count(), 11);
}
void tst_QVideoSurfaceFormat::dynamicProperty()