Merge remote-tracking branch 'origin/5.5' into dev

Change-Id: I0390795fa3d5f2a7ba098b514ee2da4a86e243a6
This commit is contained in:
Liang Qi
2015-05-11 10:01:29 +02:00
27 changed files with 502 additions and 357 deletions

View File

@@ -151,23 +151,19 @@ void QAndroidCaptureSession::setState(QMediaRecorder::State state)
stop();
break;
case QMediaRecorder::RecordingState:
if (!start())
return;
start();
break;
case QMediaRecorder::PausedState:
// Not supported by Android API
qWarning("QMediaRecorder::PausedState is not supported on Android");
return;
break;
}
m_state = state;
emit stateChanged(m_state);
}
bool QAndroidCaptureSession::start()
void QAndroidCaptureSession::start()
{
if (m_state == QMediaRecorder::RecordingState || m_status != QMediaRecorder::LoadedStatus)
return false;
return;
setStatus(QMediaRecorder::StartingStatus);
@@ -225,13 +221,13 @@ bool QAndroidCaptureSession::start()
if (!m_mediaRecorder->prepare()) {
emit error(QMediaRecorder::FormatError, QLatin1String("Unable to prepare the media recorder."));
restartViewfinder();
return false;
return;
}
if (!m_mediaRecorder->start()) {
emit error(QMediaRecorder::FormatError, QLatin1String("Unable to start the media recorder."));
restartViewfinder();
return false;
return;
}
m_elapsedTime.start();
@@ -241,22 +237,21 @@ bool QAndroidCaptureSession::start()
if (m_cameraSession)
m_cameraSession->setReadyForCapture(false);
return true;
m_state = QMediaRecorder::RecordingState;
emit stateChanged(m_state);
}
void QAndroidCaptureSession::stop(bool error)
{
if (m_state == QMediaRecorder::StoppedState)
if (m_state == QMediaRecorder::StoppedState || m_mediaRecorder == 0)
return;
setStatus(QMediaRecorder::FinalizingStatus);
m_mediaRecorder->stop();
m_notifyTimer.stop();
updateDuration();
m_elapsedTime.invalidate();
m_mediaRecorder->release();
delete m_mediaRecorder;
m_mediaRecorder = 0;
@@ -279,6 +274,9 @@ void QAndroidCaptureSession::stop(bool error)
m_actualOutputLocation = m_usedOutputLocation;
emit actualLocationChanged(m_actualOutputLocation);
}
m_state = QMediaRecorder::StoppedState;
emit stateChanged(m_state);
}
void QAndroidCaptureSession::setStatus(QMediaRecorder::Status status)
@@ -541,8 +539,6 @@ void QAndroidCaptureSession::onError(int what, int extra)
Q_UNUSED(what)
Q_UNUSED(extra)
stop(true);
m_state = QMediaRecorder::StoppedState;
emit stateChanged(m_state);
emit error(QMediaRecorder::ResourceError, QLatin1String("Unknown error."));
}

View File

@@ -130,7 +130,7 @@ private:
CaptureProfile getProfile(int id);
bool start();
void start();
void stop(bool error = false);
void setStatus(QMediaRecorder::Status status);

View File

@@ -69,6 +69,7 @@ private:
QString m_activeInput;
bool m_dirty;
QString m_defaultDevice;
QStringList m_devices;
QMap<QString, QString> m_deviceDescriptions;
};

View File

@@ -52,8 +52,11 @@ AVFAudioInputSelectorControl::AVFAudioInputSelectorControl(AVFCameraService *ser
QString::fromUtf8([[device localizedName] UTF8String]));
}
if (m_devices.size() > 0)
m_activeInput = m_devices.first();
AVCaptureDevice *defaultDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
if (defaultDevice) {
m_defaultDevice = QString::fromUtf8([defaultDevice.uniqueID UTF8String]);
m_activeInput = m_defaultDevice;
}
}
AVFAudioInputSelectorControl::~AVFAudioInputSelectorControl()
@@ -72,7 +75,7 @@ QString AVFAudioInputSelectorControl::inputDescription(const QString &name) cons
QString AVFAudioInputSelectorControl::defaultInput() const
{
return m_devices.size() > 0 ? m_devices.first() : QString();
return m_defaultDevice;
}
QString AVFAudioInputSelectorControl::activeInput() const

View File

@@ -99,7 +99,7 @@ Q_SIGNALS:
private:
static void updateCameraDevices();
void attachInputDevices();
void attachVideoInputDevice();
void applyImageEncoderSettings();
void applyViewfinderSettings();
@@ -114,7 +114,6 @@ private:
AVCaptureSession *m_captureSession;
AVCaptureDeviceInput *m_videoInput;
AVCaptureDeviceInput *m_audioInput;
AVFCameraSessionObserver *m_observer;
QSet<AVFMediaVideoProbeControl *> m_videoProbes;

View File

@@ -143,7 +143,6 @@ AVFCameraSession::AVFCameraSession(AVFCameraService *service, QObject *parent)
, m_state(QCamera::UnloadedState)
, m_active(false)
, m_videoInput(nil)
, m_audioInput(nil)
, m_defaultCodec(0)
{
m_captureSession = [[AVCaptureSession alloc] init];
@@ -160,11 +159,6 @@ AVFCameraSession::~AVFCameraSession()
[m_videoInput release];
}
if (m_audioInput) {
[m_captureSession removeInput:m_audioInput];
[m_audioInput release];
}
[m_observer release];
[m_captureSession release];
}
@@ -283,10 +277,9 @@ void AVFCameraSession::setState(QCamera::State newState)
QCamera::State oldState = m_state;
m_state = newState;
//attach audio and video inputs during Unloaded->Loaded transition
if (oldState == QCamera::UnloadedState) {
attachInputDevices();
}
//attach video input during Unloaded->Loaded transition
if (oldState == QCamera::UnloadedState)
attachVideoInputDevice();
if (m_state == QCamera::ActiveState) {
Q_EMIT readyToConfigureConnections();
@@ -332,7 +325,7 @@ void AVFCameraSession::processSessionStopped()
}
}
void AVFCameraSession::attachInputDevices()
void AVFCameraSession::attachVideoInputDevice()
{
//Attach video input device:
if (m_service->videoDeviceControl()->isDirty()) {
@@ -360,29 +353,6 @@ void AVFCameraSession::attachInputDevices()
}
}
}
//Attach audio input device:
if (m_service->audioInputSelectorControl()->isDirty()) {
if (m_audioInput) {
[m_captureSession removeInput:m_audioInput];
[m_audioInput release];
m_audioInput = 0;
}
AVCaptureDevice *audioDevice = m_service->audioInputSelectorControl()->createCaptureDevice();
NSError *error = nil;
m_audioInput = [AVCaptureDeviceInput
deviceInputWithDevice:audioDevice
error:&error];
if (!m_audioInput) {
qWarning() << "Failed to create audio device input";
} else {
[m_audioInput retain];
[m_captureSession addInput:m_audioInput];
}
}
}
void AVFCameraSession::applyImageEncoderSettings()

View File

@@ -46,6 +46,7 @@ QT_BEGIN_NAMESPACE
class AVFCameraSession;
class AVFCameraControl;
class AVFAudioInputSelectorControl;
class AVFCameraService;
class AVFMediaRecorderControl : public QMediaRecorderControl
@@ -78,11 +79,12 @@ public Q_SLOTS:
void handleRecordingFailed(const QString &message);
private Q_SLOTS:
void reconnectMovieOutput();
void setupSessionForCapture();
void updateStatus();
private:
AVFCameraControl *m_cameraControl;
AVFAudioInputSelectorControl *m_audioInputControl;
AVFCameraSession *m_session;
bool m_connected;
@@ -96,6 +98,7 @@ private:
bool m_muted;
qreal m_volume;
AVCaptureDeviceInput *m_audioInput;
AVCaptureMovieFileOutput *m_movieOutput;
AVFMediaRecorderDelegate *m_recorderDelagate;
AVFStorageLocation m_storageLocation;

View File

@@ -36,6 +36,7 @@
#include "avfcamerasession.h"
#include "avfcameraservice.h"
#include "avfcameracontrol.h"
#include "avfaudioinputselectorcontrol.h"
#include <QtCore/qurl.h>
#include <QtCore/qfileinfo.h>
@@ -114,6 +115,7 @@ QT_USE_NAMESPACE
AVFMediaRecorderControl::AVFMediaRecorderControl(AVFCameraService *service, QObject *parent)
: QMediaRecorderControl(parent)
, m_cameraControl(service->cameraControl())
, m_audioInputControl(service->audioInputSelectorControl())
, m_session(service->session())
, m_connected(false)
, m_state(QMediaRecorder::StoppedState)
@@ -122,21 +124,29 @@ AVFMediaRecorderControl::AVFMediaRecorderControl(AVFCameraService *service, QObj
, m_recordingFinished(false)
, m_muted(false)
, m_volume(1.0)
, m_audioInput(nil)
{
m_movieOutput = [[AVCaptureMovieFileOutput alloc] init];
m_recorderDelagate = [[AVFMediaRecorderDelegate alloc] initWithRecorder:this];
connect(m_cameraControl, SIGNAL(stateChanged(QCamera::State)), SLOT(updateStatus()));
connect(m_cameraControl, SIGNAL(statusChanged(QCamera::Status)), SLOT(updateStatus()));
connect(m_cameraControl, SIGNAL(captureModeChanged(QCamera::CaptureModes)), SLOT(reconnectMovieOutput()));
reconnectMovieOutput();
connect(m_cameraControl, SIGNAL(captureModeChanged(QCamera::CaptureModes)), SLOT(setupSessionForCapture()));
connect(m_session, SIGNAL(readyToConfigureConnections()), SLOT(setupSessionForCapture()));
connect(m_session, SIGNAL(stateChanged(QCamera::State)), SLOT(setupSessionForCapture()));
}
AVFMediaRecorderControl::~AVFMediaRecorderControl()
{
if (m_movieOutput)
if (m_movieOutput) {
[m_session->captureSession() removeOutput:m_movieOutput];
[m_movieOutput release];
}
if (m_audioInput) {
[m_session->captureSession() removeInput:m_audioInput];
[m_audioInput release];
}
[m_recorderDelagate release];
}
@@ -307,13 +317,39 @@ void AVFMediaRecorderControl::handleRecordingFailed(const QString &message)
Q_EMIT error(QMediaRecorder::ResourceError, message);
}
void AVFMediaRecorderControl::reconnectMovieOutput()
void AVFMediaRecorderControl::setupSessionForCapture()
{
//adding movie output causes high CPU usage even when while recording is not active,
//connect it only while video capture mode is enabled
//connect it only while video capture mode is enabled.
// Similarly, connect the Audio input only in that mode, since it's only necessary
// when recording anyway. Adding an Audio input will trigger the microphone permission
// request on iOS, but it shoudn't do so until we actually try to record.
AVCaptureSession *captureSession = m_session->captureSession();
if (!m_connected && m_cameraControl->captureMode().testFlag(QCamera::CaptureVideo)) {
if (!m_connected
&& m_cameraControl->captureMode().testFlag(QCamera::CaptureVideo)
&& m_session->state() != QCamera::UnloadedState) {
// Add audio input
// Allow recording even if something wrong happens with the audio input initialization
AVCaptureDevice *audioDevice = m_audioInputControl->createCaptureDevice();
if (!audioDevice) {
qWarning("No audio input device available");
} else {
NSError *error = nil;
m_audioInput = [AVCaptureDeviceInput deviceInputWithDevice:audioDevice error:&error];
if (!m_audioInput) {
qWarning() << "Failed to create audio device input";
} else if (![captureSession canAddInput:m_audioInput]) {
qWarning() << "Could not connect the audio input";
m_audioInput = 0;
} else {
[m_audioInput retain];
[captureSession addInput:m_audioInput];
}
}
if ([captureSession canAddOutput:m_movieOutput]) {
[captureSession addOutput:m_movieOutput];
m_connected = true;
@@ -321,8 +357,18 @@ void AVFMediaRecorderControl::reconnectMovieOutput()
Q_EMIT error(QMediaRecorder::ResourceError, tr("Could not connect the video recorder"));
qWarning() << "Could not connect the video recorder";
}
} else if (m_connected && !m_cameraControl->captureMode().testFlag(QCamera::CaptureVideo)) {
} else if (m_connected
&& (!m_cameraControl->captureMode().testFlag(QCamera::CaptureVideo)
|| m_session->state() != QCamera::ActiveState)) {
[captureSession removeOutput:m_movieOutput];
if (m_audioInput) {
[captureSession removeInput:m_audioInput];
[m_audioInput release];
m_audioInput = nil;
}
m_connected = false;
}

View File

@@ -132,11 +132,14 @@ private:
ComPtr<ID3D11VideoProcessorOutputView> m_outputView;
};
#define CAMERA_SAMPLE_QUEUE_SIZE 5
class QWinRTCameraVideoRendererControlPrivate
{
public:
QScopedPointer<D3DVideoBlitter> blitter;
QVector<ComPtr<IMF2DBuffer>> buffers;
ComPtr<IMF2DBuffer> buffers[CAMERA_SAMPLE_QUEUE_SIZE];
QAtomicInteger<quint16> writeIndex;
QAtomicInteger<quint16> readIndex;
};
QWinRTCameraVideoRendererControl::QWinRTCameraVideoRendererControl(const QSize &size, QObject *parent)
@@ -153,13 +156,17 @@ bool QWinRTCameraVideoRendererControl::render(ID3D11Texture2D *target)
{
Q_D(QWinRTCameraVideoRendererControl);
if (d->buffers.isEmpty()) {
const quint16 readIndex = d->readIndex;
if (readIndex == d->writeIndex) {
emit bufferRequested();
return false;
}
HRESULT hr;
ComPtr<IMF2DBuffer> buffer = d->buffers.takeFirst();
ComPtr<IMF2DBuffer> buffer = d->buffers[readIndex];
Q_ASSERT(buffer);
d->buffers[readIndex].Reset();
d->readIndex = (readIndex + 1) % CAMERA_SAMPLE_QUEUE_SIZE;
ComPtr<ID3D11Texture2D> sourceTexture;
ComPtr<IMFDXGIBuffer> dxgiBuffer;
@@ -186,11 +193,17 @@ void QWinRTCameraVideoRendererControl::queueBuffer(IMF2DBuffer *buffer)
{
Q_D(QWinRTCameraVideoRendererControl);
Q_ASSERT(buffer);
d->buffers.append(buffer);
const quint16 writeIndex = (d->writeIndex + 1) % CAMERA_SAMPLE_QUEUE_SIZE;
if (d->readIndex == writeIndex) // Drop new sample if queue is full
return;
d->buffers[d->writeIndex] = buffer;
d->writeIndex = writeIndex;
}
void QWinRTCameraVideoRendererControl::discardBuffers()
{
Q_D(QWinRTCameraVideoRendererControl);
d->buffers.clear();
d->writeIndex = d->readIndex = 0;
for (ComPtr<IMF2DBuffer> &buffer : d->buffers)
buffer.Reset();
}

View File

@@ -68,14 +68,15 @@ QSGVideoNode *QSGVideoNodeFactory_RGB::createNode(const QVideoSurfaceFormat &for
class QSGVideoMaterialShader_RGB : public QSGMaterialShader
{
public:
QSGVideoMaterialShader_RGB(QVideoFrame::PixelFormat pixelFormat)
QSGVideoMaterialShader_RGB()
: QSGMaterialShader(),
m_id_matrix(-1),
m_id_width(-1),
m_id_rgbTexture(-1),
m_id_opacity(-1),
m_pixelFormat(pixelFormat)
m_id_opacity(-1)
{
setShaderSourceFile(QOpenGLShader::Vertex, QStringLiteral(":/qtmultimediaquicktools/shaders/rgbvideo_padded.vert"));
setShaderSourceFile(QOpenGLShader::Fragment, QStringLiteral(":/qtmultimediaquicktools/shaders/rgbvideo.frag"));
}
void updateState(const RenderState &state, QSGMaterial *newMaterial, QSGMaterial *oldMaterial);
@@ -90,54 +91,6 @@ public:
}
protected:
virtual const char *vertexShader() const {
const char *shader =
"uniform highp mat4 qt_Matrix; \n"
"uniform highp float width; \n"
"attribute highp vec4 qt_VertexPosition; \n"
"attribute highp vec2 qt_VertexTexCoord; \n"
"varying highp vec2 qt_TexCoord; \n"
"void main() { \n"
" qt_TexCoord = qt_VertexTexCoord * vec2(width, 1);\n"
" gl_Position = qt_Matrix * qt_VertexPosition; \n"
"}";
return shader;
}
virtual const char *fragmentShader() const {
static const char *shader =
"uniform sampler2D rgbTexture;"
"uniform lowp float opacity;"
""
"varying highp vec2 qt_TexCoord;"
""
"void main()"
"{"
" gl_FragColor = texture2D(rgbTexture, qt_TexCoord) * opacity;"
"}";
static const char *colorsSwapShader =
"uniform sampler2D rgbTexture;"
"uniform lowp float opacity;"
""
"varying highp vec2 qt_TexCoord;"
""
"void main()"
"{"
" gl_FragColor = vec4(texture2D(rgbTexture, qt_TexCoord).bgr, 1.0) * opacity;"
"}";
switch (m_pixelFormat) {
case QVideoFrame::Format_RGB32:
case QVideoFrame::Format_ARGB32:
return colorsSwapShader;
default:
return shader;
}
}
virtual void initialize() {
m_id_matrix = program()->uniformLocation("qt_Matrix");
m_id_width = program()->uniformLocation("width");
@@ -149,7 +102,16 @@ protected:
int m_id_width;
int m_id_rgbTexture;
int m_id_opacity;
QVideoFrame::PixelFormat m_pixelFormat;
};
class QSGVideoMaterialShader_RGB_swizzle : public QSGVideoMaterialShader_RGB
{
public:
QSGVideoMaterialShader_RGB_swizzle()
: QSGVideoMaterialShader_RGB()
{
setShaderSourceFile(QOpenGLShader::Fragment, QStringLiteral(":/qtmultimediaquicktools/shaders/rgbvideo_swizzle.frag"));
}
};
@@ -172,12 +134,13 @@ public:
}
virtual QSGMaterialType *type() const {
static QSGMaterialType theType;
return &theType;
static QSGMaterialType normalType, swizzleType;
return needsSwizzling() ? &swizzleType : &normalType;
}
virtual QSGMaterialShader *createShader() const {
return new QSGVideoMaterialShader_RGB(m_format.pixelFormat());
return needsSwizzling() ? new QSGVideoMaterialShader_RGB_swizzle
: new QSGVideoMaterialShader_RGB;
}
virtual int compare(const QSGMaterial *other) const {
@@ -263,6 +226,12 @@ public:
GLuint m_textureId;
qreal m_opacity;
GLfloat m_width;
private:
bool needsSwizzling() const {
return m_format.pixelFormat() == QVideoFrame::Format_RGB32
|| m_format.pixelFormat() == QVideoFrame::Format_ARGB32;
}
};

View File

@@ -69,10 +69,11 @@ QSGVideoNode *QSGVideoNodeFactory_Texture::createNode(const QVideoSurfaceFormat
class QSGVideoMaterialShader_Texture : public QSGMaterialShader
{
public:
QSGVideoMaterialShader_Texture(QVideoFrame::PixelFormat pixelFormat)
: QSGMaterialShader(),
m_pixelFormat(pixelFormat)
QSGVideoMaterialShader_Texture()
: QSGMaterialShader()
{
setShaderSourceFile(QOpenGLShader::Vertex, QStringLiteral(":/qtmultimediaquicktools/shaders/rgbvideo.vert"));
setShaderSourceFile(QOpenGLShader::Fragment, QStringLiteral(":/qtmultimediaquicktools/shaders/rgbvideo.frag"));
}
void updateState(const RenderState &state, QSGMaterial *newMaterial, QSGMaterial *oldMaterial);
@@ -87,56 +88,6 @@ public:
}
protected:
virtual const char *vertexShader() const {
const char *shader =
"uniform highp mat4 qt_Matrix; \n"
"attribute highp vec4 qt_VertexPosition; \n"
"attribute highp vec2 qt_VertexTexCoord; \n"
"varying highp vec2 qt_TexCoord; \n"
"void main() { \n"
" qt_TexCoord = qt_VertexTexCoord; \n"
" gl_Position = qt_Matrix * qt_VertexPosition; \n"
"}";
return shader;
}
virtual const char *fragmentShader() const {
static const char *shader =
"uniform sampler2D rgbTexture;"
"uniform lowp float opacity;"
""
"varying highp vec2 qt_TexCoord;"
""
"void main()"
"{"
" gl_FragColor = texture2D(rgbTexture, qt_TexCoord) * opacity;"
"}";
static const char *colorsSwapShader =
"uniform sampler2D rgbTexture;"
"uniform lowp float opacity;"
""
"varying highp vec2 qt_TexCoord;"
""
"void main()"
"{"
" gl_FragColor = vec4(texture2D(rgbTexture, qt_TexCoord).bgr, 1.0) * opacity;"
"}";
if (!QMediaOpenGLHelper::isANGLE()) {
switch (m_pixelFormat) {
case QVideoFrame::Format_RGB32:
case QVideoFrame::Format_ARGB32:
return colorsSwapShader;
default:
break;
}
}
return shader;
}
virtual void initialize() {
m_id_matrix = program()->uniformLocation("qt_Matrix");
m_id_Texture = program()->uniformLocation("rgbTexture");
@@ -146,7 +97,16 @@ protected:
int m_id_matrix;
int m_id_Texture;
int m_id_opacity;
QVideoFrame::PixelFormat m_pixelFormat;
};
class QSGVideoMaterialShader_Texture_swizzle : public QSGVideoMaterialShader_Texture
{
public:
QSGVideoMaterialShader_Texture_swizzle()
: QSGVideoMaterialShader_Texture()
{
setShaderSourceFile(QOpenGLShader::Fragment, QStringLiteral(":/qtmultimediaquicktools/shaders/rgbvideo_swizzle.frag"));
}
};
@@ -167,12 +127,13 @@ public:
}
virtual QSGMaterialType *type() const {
static QSGMaterialType theType;
return &theType;
static QSGMaterialType normalType, swizzleType;
return needsSwizzling() ? &swizzleType : &normalType;
}
virtual QSGMaterialShader *createShader() const {
return new QSGVideoMaterialShader_Texture(m_format.pixelFormat());
return needsSwizzling() ? new QSGVideoMaterialShader_Texture_swizzle
: new QSGVideoMaterialShader_Texture;
}
virtual int compare(const QSGMaterial *other) const {
@@ -220,6 +181,13 @@ public:
QVideoSurfaceFormat m_format;
GLuint m_textureId;
qreal m_opacity;
private:
bool needsSwizzling() const {
return !QMediaOpenGLHelper::isANGLE()
&& (m_format.pixelFormat() == QVideoFrame::Format_RGB32
|| m_format.pixelFormat() == QVideoFrame::Format_ARGB32);
}
};

View File

@@ -62,10 +62,17 @@ QSGVideoNode *QSGVideoNodeFactory_YUV::createNode(const QVideoSurfaceFormat &for
}
class QSGVideoMaterialShader_YUV420 : public QSGMaterialShader
class QSGVideoMaterialShader_YUV_BiPlanar : public QSGMaterialShader
{
public:
void updateState(const RenderState &state, QSGMaterial *newMaterial, QSGMaterial *oldMaterial);
QSGVideoMaterialShader_YUV_BiPlanar()
: QSGMaterialShader()
{
setShaderSourceFile(QOpenGLShader::Vertex, QStringLiteral(":/qtmultimediaquicktools/shaders/biplanaryuvvideo.vert"));
setShaderSourceFile(QOpenGLShader::Fragment, QStringLiteral(":/qtmultimediaquicktools/shaders/biplanaryuvvideo.frag"));
}
virtual void updateState(const RenderState &state, QSGMaterial *newMaterial, QSGMaterial *oldMaterial);
virtual char const *const *attributeNames() const {
static const char *names[] = {
@@ -77,133 +84,58 @@ public:
}
protected:
virtual const char *vertexShader() const {
const char *shader =
"uniform highp mat4 qt_Matrix; \n"
"uniform highp float yWidth; \n"
"uniform highp float uvWidth; \n"
"attribute highp vec4 qt_VertexPosition; \n"
"attribute highp vec2 qt_VertexTexCoord; \n"
"varying highp vec2 yTexCoord; \n"
"varying highp vec2 uvTexCoord; \n"
"void main() { \n"
" yTexCoord = qt_VertexTexCoord * vec2(yWidth, 1);\n"
" uvTexCoord = qt_VertexTexCoord * vec2(uvWidth, 1);\n"
" gl_Position = qt_Matrix * qt_VertexPosition; \n"
"}";
return shader;
}
virtual const char *fragmentShader() const {
static const char *shader =
"uniform sampler2D yTexture;"
"uniform sampler2D uTexture;"
"uniform sampler2D vTexture;"
"uniform mediump mat4 colorMatrix;"
"uniform lowp float opacity;"
""
"varying highp vec2 yTexCoord;"
"varying highp vec2 uvTexCoord;"
""
"void main()"
"{"
" mediump float Y = texture2D(yTexture, yTexCoord).r;"
" mediump float U = texture2D(uTexture, uvTexCoord).r;"
" mediump float V = texture2D(vTexture, uvTexCoord).r;"
" mediump vec4 color = vec4(Y, U, V, 1.);"
" gl_FragColor = colorMatrix * color * opacity;"
"}";
return shader;
}
virtual void initialize() {
m_id_matrix = program()->uniformLocation("qt_Matrix");
m_id_yWidth = program()->uniformLocation("yWidth");
m_id_uvWidth = program()->uniformLocation("uvWidth");
m_id_yTexture = program()->uniformLocation("yTexture");
m_id_uTexture = program()->uniformLocation("uTexture");
m_id_vTexture = program()->uniformLocation("vTexture");
m_id_plane1Width = program()->uniformLocation("plane1Width");
m_id_plane2Width = program()->uniformLocation("plane2Width");
m_id_plane1Texture = program()->uniformLocation("plane1Texture");
m_id_plane2Texture = program()->uniformLocation("plane2Texture");
m_id_colorMatrix = program()->uniformLocation("colorMatrix");
m_id_opacity = program()->uniformLocation("opacity");
}
int m_id_matrix;
int m_id_yWidth;
int m_id_uvWidth;
int m_id_yTexture;
int m_id_uTexture;
int m_id_vTexture;
int m_id_plane1Width;
int m_id_plane2Width;
int m_id_plane1Texture;
int m_id_plane2Texture;
int m_id_colorMatrix;
int m_id_opacity;
};
class QSGVideoMaterialShader_NV_12_21 : public QSGVideoMaterialShader_YUV420
class QSGVideoMaterialShader_YUV_BiPlanar_swizzle : public QSGVideoMaterialShader_YUV_BiPlanar
{
public:
QSGVideoMaterialShader_NV_12_21(bool isNV21) : m_isNV21(isNV21) {
QSGVideoMaterialShader_YUV_BiPlanar_swizzle()
: QSGVideoMaterialShader_YUV_BiPlanar()
{
setShaderSourceFile(QOpenGLShader::Fragment, QStringLiteral(":/qtmultimediaquicktools/shaders/biplanaryuvvideo_swizzle.frag"));
}
};
class QSGVideoMaterialShader_YUV_TriPlanar : public QSGVideoMaterialShader_YUV_BiPlanar
{
public:
QSGVideoMaterialShader_YUV_TriPlanar()
: QSGVideoMaterialShader_YUV_BiPlanar()
{
setShaderSourceFile(QOpenGLShader::Vertex, QStringLiteral(":/qtmultimediaquicktools/shaders/triplanaryuvvideo.vert"));
setShaderSourceFile(QOpenGLShader::Fragment, QStringLiteral(":/qtmultimediaquicktools/shaders/triplanaryuvvideo.frag"));
}
virtual void updateState(const RenderState &state, QSGMaterial *newMaterial, QSGMaterial *oldMaterial);
protected:
virtual const char *vertexShader() const {
const char *shader =
"uniform highp mat4 qt_Matrix; \n"
"uniform highp float yWidth; \n"
"attribute highp vec4 qt_VertexPosition; \n"
"attribute highp vec2 qt_VertexTexCoord; \n"
"varying highp vec2 yTexCoord; \n"
"void main() { \n"
" yTexCoord = qt_VertexTexCoord * vec2(yWidth, 1);\n"
" gl_Position = qt_Matrix * qt_VertexPosition; \n"
"}";
return shader;
}
virtual const char *fragmentShader() const {
static const char *shaderNV12 =
"uniform sampler2D yTexture; \n"
"uniform sampler2D uvTexture; \n"
"uniform mediump mat4 colorMatrix; \n"
"uniform lowp float opacity; \n"
"varying highp vec2 yTexCoord; \n"
"void main() \n"
"{ \n"
" mediump float Y = texture2D(yTexture, yTexCoord).r; \n"
" mediump vec2 UV = texture2D(uvTexture, yTexCoord).ra; \n"
" mediump vec4 color = vec4(Y, UV.x, UV.y, 1.); \n"
" gl_FragColor = colorMatrix * color * opacity; \n"
"}";
static const char *shaderNV21 =
"uniform sampler2D yTexture; \n"
"uniform sampler2D uvTexture; \n"
"uniform mediump mat4 colorMatrix; \n"
"uniform lowp float opacity; \n"
"varying highp vec2 yTexCoord; \n"
"void main() \n"
"{ \n"
" mediump float Y = texture2D(yTexture, yTexCoord).r; \n"
" mediump vec2 UV = texture2D(uvTexture, yTexCoord).ar; \n"
" mediump vec4 color = vec4(Y, UV.x, UV.y, 1.); \n"
" gl_FragColor = colorMatrix * color * opacity; \n"
"}";
return m_isNV21 ? shaderNV21 : shaderNV12;
}
virtual void initialize() {
m_id_yTexture = program()->uniformLocation("yTexture");
m_id_uTexture = program()->uniformLocation("uvTexture");
m_id_matrix = program()->uniformLocation("qt_Matrix");
m_id_yWidth = program()->uniformLocation("yWidth");
m_id_colorMatrix = program()->uniformLocation("colorMatrix");
m_id_opacity = program()->uniformLocation("opacity");
m_id_plane3Width = program()->uniformLocation("plane3Width");
m_id_plane3Texture = program()->uniformLocation("plane3Texture");
QSGVideoMaterialShader_YUV_BiPlanar::initialize();
}
private:
bool m_isNV21;
int m_id_plane3Width;
int m_id_plane3Texture;
};
@@ -213,22 +145,28 @@ public:
QSGVideoMaterial_YUV(const QVideoSurfaceFormat &format);
~QSGVideoMaterial_YUV();
bool isNV12_21() const {
const QVideoFrame::PixelFormat pf = m_format.pixelFormat();
return pf == QVideoFrame::Format_NV12 || pf == QVideoFrame::Format_NV21;
}
virtual QSGMaterialType *type() const {
static QSGMaterialType theType;
return &theType;
static QSGMaterialType biPlanarType, biPlanarSwizzleType, triPlanarType;
switch (m_format.pixelFormat()) {
case QVideoFrame::Format_NV12:
return &biPlanarType;
case QVideoFrame::Format_NV21:
return &biPlanarSwizzleType;
default: // Currently: YUV420P and YV12
return &triPlanarType;
}
}
virtual QSGMaterialShader *createShader() const {
const QVideoFrame::PixelFormat pf = m_format.pixelFormat();
if (isNV12_21())
return new QSGVideoMaterialShader_NV_12_21(pf == QVideoFrame::Format_NV21);
return new QSGVideoMaterialShader_YUV420;
switch (m_format.pixelFormat()) {
case QVideoFrame::Format_NV12:
return new QSGVideoMaterialShader_YUV_BiPlanar;
case QVideoFrame::Format_NV21:
return new QSGVideoMaterialShader_YUV_BiPlanar_swizzle;
default: // Currently: YUV420P and YV12
return new QSGVideoMaterialShader_YUV_TriPlanar;
}
}
virtual int compare(const QSGMaterial *other) const {
@@ -236,13 +174,10 @@ public:
int d = m_textureIds[0] - m->m_textureIds[0];
if (d)
return d;
d = m_textureIds[1] - m->m_textureIds[1];
if (m_textureIds.size() == 2 || d != 0)
else if ((d = m_textureIds[1] - m->m_textureIds[1]) != 0)
return d;
return m_textureIds[2] - m->m_textureIds[2];
else
return m_textureIds[2] - m->m_textureIds[2];
}
void updateBlending() {
@@ -259,12 +194,12 @@ public:
QVideoSurfaceFormat m_format;
QSize m_textureSize;
int m_planeCount;
QVector<GLuint> m_textureIds;
GLuint m_textureIds[3];
GLfloat m_planeWidth[3];
qreal m_opacity;
GLfloat m_yWidth;
GLfloat m_uvWidth;
QMatrix4x4 m_colorMatrix;
QVideoFrame m_frame;
@@ -273,11 +208,23 @@ public:
QSGVideoMaterial_YUV::QSGVideoMaterial_YUV(const QVideoSurfaceFormat &format) :
m_format(format),
m_opacity(1.0),
m_yWidth(1.0),
m_uvWidth(1.0)
m_opacity(1.0)
{
m_textureIds.resize(isNV12_21() ? 2 : 3);
memset(m_textureIds, 0, sizeof(m_textureIds));
switch (format.pixelFormat()) {
case QVideoFrame::Format_NV12:
case QVideoFrame::Format_NV21:
m_planeCount = 2;
break;
case QVideoFrame::Format_YUV420P:
case QVideoFrame::Format_YV12:
m_planeCount = 3;
break;
default:
m_planeCount = 1;
break;
}
switch (format.yCbCrColorSpace()) {
case QVideoSurfaceFormat::YCbCr_JPEG:
@@ -310,7 +257,7 @@ QSGVideoMaterial_YUV::~QSGVideoMaterial_YUV()
{
if (!m_textureSize.isEmpty()) {
if (QOpenGLContext *current = QOpenGLContext::currentContext())
current->functions()->glDeleteTextures(m_textureIds.size(), &m_textureIds[0]);
current->functions()->glDeleteTextures(m_planeCount, m_textureIds);
else
qWarning() << "QSGVideoMaterial_YUV: Cannot obtain GL context, unable to delete textures";
}
@@ -328,8 +275,8 @@ void QSGVideoMaterial_YUV::bind()
// Frame has changed size, recreate textures...
if (m_textureSize != m_frame.size()) {
if (!m_textureSize.isEmpty())
functions->glDeleteTextures(m_textureIds.size(), &m_textureIds[0]);
functions->glGenTextures(m_textureIds.size(), &m_textureIds[0]);
functions->glDeleteTextures(m_planeCount, m_textureIds);
functions->glGenTextures(m_planeCount, m_textureIds);
m_textureSize = m_frame.size();
}
@@ -337,24 +284,25 @@ void QSGVideoMaterial_YUV::bind()
functions->glGetIntegerv(GL_UNPACK_ALIGNMENT, &previousAlignment);
functions->glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
if (isNV12_21()) {
if (m_format.pixelFormat() == QVideoFrame::Format_NV12
|| m_format.pixelFormat() == QVideoFrame::Format_NV21) {
const int y = 0;
const int uv = 1;
m_yWidth = qreal(fw) / m_frame.bytesPerLine(y);
m_uvWidth = m_yWidth;
m_planeWidth[0] = m_planeWidth[1] = qreal(fw) / m_frame.bytesPerLine(y);
functions->glActiveTexture(GL_TEXTURE1);
bindTexture(m_textureIds[1], m_frame.bytesPerLine(uv) / 2, fh / 2, m_frame.bits(uv), GL_LUMINANCE_ALPHA);
functions->glActiveTexture(GL_TEXTURE0); // Finish with 0 as default texture unit
bindTexture(m_textureIds[0], m_frame.bytesPerLine(y), fh, m_frame.bits(y), GL_LUMINANCE);
} else {
} else { // YUV420P || YV12
const int y = 0;
const int u = m_frame.pixelFormat() == QVideoFrame::Format_YUV420P ? 1 : 2;
const int v = m_frame.pixelFormat() == QVideoFrame::Format_YUV420P ? 2 : 1;
m_yWidth = qreal(fw) / m_frame.bytesPerLine(y);
m_uvWidth = qreal(fw) / (2 * m_frame.bytesPerLine(u));
m_planeWidth[0] = qreal(fw) / m_frame.bytesPerLine(y);
m_planeWidth[1] = m_planeWidth[2] = qreal(fw) / (2 * m_frame.bytesPerLine(u));
functions->glActiveTexture(GL_TEXTURE1);
bindTexture(m_textureIds[1], m_frame.bytesPerLine(u), fh / 2, m_frame.bits(u), GL_LUMINANCE);
@@ -370,14 +318,10 @@ void QSGVideoMaterial_YUV::bind()
m_frame = QVideoFrame();
} else {
functions->glActiveTexture(GL_TEXTURE1);
functions->glBindTexture(GL_TEXTURE_2D, m_textureIds[1]);
if (!isNV12_21()) {
functions->glActiveTexture(GL_TEXTURE2);
functions->glBindTexture(GL_TEXTURE_2D, m_textureIds[2]);
for (int i = 0; i < m_planeCount; ++i) {
functions->glActiveTexture(GL_TEXTURE0 + i);
functions->glBindTexture(GL_TEXTURE_2D, m_textureIds[i]);
}
functions->glActiveTexture(GL_TEXTURE0); // Finish with 0 as default texture unit
functions->glBindTexture(GL_TEXTURE_2D, m_textureIds[0]);
}
}
@@ -411,22 +355,21 @@ void QSGVideoNode_YUV::setCurrentFrame(const QVideoFrame &frame, FrameFlags)
markDirty(DirtyMaterial);
}
void QSGVideoMaterialShader_YUV420::updateState(const RenderState &state,
QSGMaterial *newMaterial,
QSGMaterial *oldMaterial)
void QSGVideoMaterialShader_YUV_BiPlanar::updateState(const RenderState &state,
QSGMaterial *newMaterial,
QSGMaterial *oldMaterial)
{
Q_UNUSED(oldMaterial);
QSGVideoMaterial_YUV *mat = static_cast<QSGVideoMaterial_YUV *>(newMaterial);
program()->setUniformValue(m_id_yTexture, 0);
program()->setUniformValue(m_id_uTexture, 1);
program()->setUniformValue(m_id_vTexture, 2);
program()->setUniformValue(m_id_plane1Texture, 0);
program()->setUniformValue(m_id_plane2Texture, 1);
mat->bind();
program()->setUniformValue(m_id_colorMatrix, mat->m_colorMatrix);
program()->setUniformValue(m_id_yWidth, mat->m_yWidth);
program()->setUniformValue(m_id_uvWidth, mat->m_uvWidth);
program()->setUniformValue(m_id_plane1Width, mat->m_planeWidth[0]);
program()->setUniformValue(m_id_plane2Width, mat->m_planeWidth[1]);
if (state.isOpacityDirty()) {
mat->m_opacity = state.opacity();
program()->setUniformValue(m_id_opacity, GLfloat(mat->m_opacity));
@@ -435,26 +378,15 @@ void QSGVideoMaterialShader_YUV420::updateState(const RenderState &state,
program()->setUniformValue(m_id_matrix, state.combinedMatrix());
}
void QSGVideoMaterialShader_NV_12_21::updateState(const RenderState &state,
QSGMaterial *newMaterial,
QSGMaterial *oldMaterial)
void QSGVideoMaterialShader_YUV_TriPlanar::updateState(const RenderState &state,
QSGMaterial *newMaterial,
QSGMaterial *oldMaterial)
{
Q_UNUSED(oldMaterial);
QSGVideoMaterialShader_YUV_BiPlanar::updateState(state, newMaterial, oldMaterial);
QSGVideoMaterial_YUV *mat = static_cast<QSGVideoMaterial_YUV *>(newMaterial);
program()->setUniformValue(m_id_yTexture, 0);
program()->setUniformValue(m_id_uTexture, 1);
mat->bind();
program()->setUniformValue(m_id_colorMatrix, mat->m_colorMatrix);
program()->setUniformValue(m_id_yWidth, mat->m_yWidth);
if (state.isOpacityDirty()) {
mat->m_opacity = state.opacity();
program()->setUniformValue(m_id_opacity, GLfloat(mat->m_opacity));
}
if (state.isMatrixDirty())
program()->setUniformValue(m_id_matrix, state.combinedMatrix());
program()->setUniformValue(m_id_plane3Texture, 2);
program()->setUniformValue(m_id_plane3Width, mat->m_planeWidth[2]);
}
QT_END_NAMESPACE

View File

@@ -32,3 +32,17 @@ HEADERS += \
qsgvideonode_yuv.h \
qsgvideonode_rgb.h \
qsgvideonode_texture.h
RESOURCES += \
qtmultimediaquicktools.qrc
OTHER_FILES += \
shaders/rgbvideo.vert \
shaders/rgbvideo_padded.vert \
shaders/rgbvideo.frag \
shaders/rgbvideo_swizzle.frag \
shaders/biplanaryuvvideo.vert \
shaders/biplanaryuvvideo.frag \
shaders/biplanaryuvvideo_swizzle.frag \
shaders/triplanaryuvvideo.vert \
shaders/triplanaryuvvideo.frag

View File

@@ -0,0 +1,13 @@
<RCC>
<qresource prefix="/qtmultimediaquicktools">
<file>shaders/rgbvideo.vert</file>
<file>shaders/rgbvideo.frag</file>
<file>shaders/rgbvideo_swizzle.frag</file>
<file>shaders/rgbvideo_padded.vert</file>
<file>shaders/biplanaryuvvideo.frag</file>
<file>shaders/biplanaryuvvideo.vert</file>
<file>shaders/biplanaryuvvideo_swizzle.frag</file>
<file>shaders/triplanaryuvvideo.frag</file>
<file>shaders/triplanaryuvvideo.vert</file>
</qresource>
</RCC>

View File

@@ -0,0 +1,14 @@
uniform sampler2D plane1Texture;
uniform sampler2D plane2Texture;
uniform mediump mat4 colorMatrix;
uniform lowp float opacity;
varying highp vec2 plane1TexCoord;
varying highp vec2 plane2TexCoord;
void main()
{
mediump float Y = texture2D(plane1Texture, plane1TexCoord).r;
mediump vec2 UV = texture2D(plane2Texture, plane2TexCoord).ra;
mediump vec4 color = vec4(Y, UV.x, UV.y, 1.);
gl_FragColor = colorMatrix * color * opacity;
}

View File

@@ -0,0 +1,13 @@
uniform highp mat4 qt_Matrix;
uniform highp float plane1Width;
uniform highp float plane2Width;
attribute highp vec4 qt_VertexPosition;
attribute highp vec2 qt_VertexTexCoord;
varying highp vec2 plane1TexCoord;
varying highp vec2 plane2TexCoord;
void main() {
plane1TexCoord = qt_VertexTexCoord * vec2(plane1Width, 1);
plane2TexCoord = qt_VertexTexCoord * vec2(plane2Width, 1);
gl_Position = qt_Matrix * qt_VertexPosition;
}

View File

@@ -0,0 +1,14 @@
uniform sampler2D plane1Texture;
uniform sampler2D plane2Texture;
uniform mediump mat4 colorMatrix;
uniform lowp float opacity;
varying highp vec2 plane1TexCoord;
varying highp vec2 plane2TexCoord;
void main()
{
mediump float Y = texture2D(plane1Texture, plane1TexCoord).r;
mediump vec2 UV = texture2D(plane2Texture, plane2TexCoord).ar;
mediump vec4 color = vec4(Y, UV.x, UV.y, 1.);
gl_FragColor = colorMatrix * color * opacity;
}

View File

@@ -0,0 +1,8 @@
uniform sampler2D rgbTexture;
uniform lowp float opacity;
varying highp vec2 qt_TexCoord;
void main()
{
gl_FragColor = texture2D(rgbTexture, qt_TexCoord) * opacity;
}

View File

@@ -0,0 +1,9 @@
uniform highp mat4 qt_Matrix;
attribute highp vec4 qt_VertexPosition;
attribute highp vec2 qt_VertexTexCoord;
varying highp vec2 qt_TexCoord;
void main() {
qt_TexCoord = qt_VertexTexCoord;
gl_Position = qt_Matrix * qt_VertexPosition;
}

View File

@@ -0,0 +1,10 @@
uniform highp mat4 qt_Matrix;
uniform highp float width;
attribute highp vec4 qt_VertexPosition;
attribute highp vec2 qt_VertexTexCoord;
varying highp vec2 qt_TexCoord;
void main() {
qt_TexCoord = qt_VertexTexCoord * vec2(width, 1);
gl_Position = qt_Matrix * qt_VertexPosition;
}

View File

@@ -0,0 +1,8 @@
uniform sampler2D rgbTexture;
uniform lowp float opacity;
varying highp vec2 qt_TexCoord;
void main()
{
gl_FragColor = vec4(texture2D(rgbTexture, qt_TexCoord).bgr, 1.0) * opacity;
}

View File

@@ -0,0 +1,18 @@
uniform sampler2D plane1Texture;
uniform sampler2D plane2Texture;
uniform sampler2D plane3Texture;
uniform mediump mat4 colorMatrix;
uniform lowp float opacity;
varying highp vec2 plane1TexCoord;
varying highp vec2 plane2TexCoord;
varying highp vec2 plane3TexCoord;
void main()
{
mediump float Y = texture2D(plane1Texture, plane1TexCoord).r;
mediump float U = texture2D(plane2Texture, plane2TexCoord).r;
mediump float V = texture2D(plane3Texture, plane3TexCoord).r;
mediump vec4 color = vec4(Y, U, V, 1.);
gl_FragColor = colorMatrix * color * opacity;
}

View File

@@ -0,0 +1,16 @@
uniform highp mat4 qt_Matrix;
uniform highp float plane1Width;
uniform highp float plane2Width;
uniform highp float plane3Width;
attribute highp vec4 qt_VertexPosition;
attribute highp vec2 qt_VertexTexCoord;
varying highp vec2 plane1TexCoord;
varying highp vec2 plane2TexCoord;
varying highp vec2 plane3TexCoord;
void main() {
plane1TexCoord = qt_VertexTexCoord * vec2(plane1Width, 1);
plane2TexCoord = qt_VertexTexCoord * vec2(plane2Width, 1);
plane3TexCoord = qt_VertexTexCoord * vec2(plane3Width, 1);
gl_Position = qt_Matrix * qt_VertexPosition;
}