Android: refactor video renderer.

Removed the overhead of having to create a shared OpenGL context in the GUI
thread and pre-render the frame into a FBO.

We now directly render the GL_TEXTURE_EXTERNAL_OES in the QtQuick render
thread, using an Android-specific QSGVideoNode.
We also use a callback from the render thread to create the texture from
there and not have to create a separate shared OpenGL context.

Change-Id: I6c8eb94b47d0a03329c912701b8af3fb5ebd1876
Reviewed-by: Christian Stromme <christian.stromme@digia.com>
This commit is contained in:
Yoann Lopes
2013-09-27 11:24:10 +02:00
committed by The Qt Project
parent 329d9d4563
commit be7a6241e7
20 changed files with 554 additions and 299 deletions

View File

@@ -193,6 +193,13 @@ QSGNode *QDeclarativeVideoRendererBackend::updatePaintNode(QSGNode *oldNode,
if (!m_glContext) { if (!m_glContext) {
m_glContext = QOpenGLContext::currentContext(); m_glContext = QOpenGLContext::currentContext();
m_surface->scheduleOpenGLContextUpdate(); m_surface->scheduleOpenGLContextUpdate();
// Internal mechanism to call back the surface renderer from the QtQuick render thread
QObject *obj = m_surface->property("_q_GLThreadCallback").value<QObject*>();
if (obj) {
QEvent ev(QEvent::User);
obj->event(&ev);
}
} }
if (m_frameChanged) { if (m_frameChanged) {

View File

@@ -62,7 +62,8 @@ ANDROID_BUNDLED_FILES += \
lib/libQt5MultimediaQuick_p.so lib/libQt5MultimediaQuick_p.so
MODULE_PLUGIN_TYPES = \ MODULE_PLUGIN_TYPES = \
mediaservice \ mediaservice \
audio audio \
video/videonode
win32:LIBS += -luuid win32:LIBS += -luuid

View File

@@ -69,7 +69,7 @@ private:
int m_orientation; int m_orientation;
}; };
class QSGVideoNodeFactoryInterface class Q_MULTIMEDIAQUICK_EXPORT QSGVideoNodeFactoryInterface
{ {
public: public:
virtual QList<QVideoFrame::PixelFormat> supportedPixelFormats(QAbstractVideoBuffer::HandleType handleType) const = 0; virtual QList<QVideoFrame::PixelFormat> supportedPixelFormats(QAbstractVideoBuffer::HandleType handleType) const = 0;
@@ -79,7 +79,7 @@ public:
#define QSGVideoNodeFactoryInterface_iid "org.qt-project.qt.sgvideonodefactory/5.0" #define QSGVideoNodeFactoryInterface_iid "org.qt-project.qt.sgvideonodefactory/5.0"
Q_DECLARE_INTERFACE(QSGVideoNodeFactoryInterface, QSGVideoNodeFactoryInterface_iid) Q_DECLARE_INTERFACE(QSGVideoNodeFactoryInterface, QSGVideoNodeFactoryInterface_iid)
class QSGVideoNodeFactoryPlugin : public QObject, public QSGVideoNodeFactoryInterface class Q_MULTIMEDIAQUICK_EXPORT QSGVideoNodeFactoryPlugin : public QObject, public QSGVideoNodeFactoryInterface
{ {
Q_OBJECT Q_OBJECT
Q_INTERFACES(QSGVideoNodeFactoryInterface) Q_INTERFACES(QSGVideoNodeFactoryInterface)

View File

@@ -3,3 +3,6 @@ TEMPLATE = subdirs
SUBDIRS += src \ SUBDIRS += src \
jar jar
qtHaveModule(quick) {
SUBDIRS += videonode
}

View File

@@ -48,26 +48,28 @@
QT_BEGIN_NAMESPACE QT_BEGIN_NAMESPACE
typedef void (*TextureReadyCallback)(void*);
class QAndroidVideoOutput class QAndroidVideoOutput
{ {
public: public:
QAndroidVideoOutput() { }
virtual ~QAndroidVideoOutput() { } virtual ~QAndroidVideoOutput() { }
virtual jobject surfaceHolder() = 0; virtual jobject surfaceHolder() = 0;
virtual jobject surfaceTexture() { return 0; }
virtual bool isTextureReady() = 0; virtual bool isReady() { return true; }
virtual void setTextureReadyCallback(TextureReadyCallback cb, void *context = 0) = 0;
virtual jobject surfaceTexture() = 0;
virtual void setVideoSize(const QSize &size) = 0; virtual void setVideoSize(const QSize &) { }
virtual void stop() = 0; virtual void stop() { }
virtual QImage toImage() = 0; virtual QImage toImage() = 0;
// signals:
// void readyChanged(bool);
}; };
#define QAndroidVideoOutput_iid "org.qt-project.qt.qandroidvideooutput/5.0"
Q_DECLARE_INTERFACE(QAndroidVideoOutput, QAndroidVideoOutput_iid)
QT_END_NAMESPACE QT_END_NAMESPACE
#endif // QANDROIDVIDEOOUTPUT_H #endif // QANDROIDVIDEOOUTPUT_H

View File

@@ -44,39 +44,31 @@
#include <QtCore/private/qjni_p.h> #include <QtCore/private/qjni_p.h>
#include "jsurfacetextureholder.h" #include "jsurfacetextureholder.h"
#include <QAbstractVideoSurface> #include <QAbstractVideoSurface>
#include <QOpenGLContext>
#include <QOffscreenSurface>
#include <QOpenGLFramebufferObject>
#include <QVideoSurfaceFormat> #include <QVideoSurfaceFormat>
#include <QOpenGLFunctions>
#include <QOpenGLShaderProgram>
#include <qevent.h> #include <qevent.h>
#include <qcoreapplication.h>
#include <qopenglcontext.h>
#include <qopenglfunctions.h>
QT_BEGIN_NAMESPACE QT_BEGIN_NAMESPACE
static const GLfloat g_vertex_data[] = { #define ExternalGLTextureHandle QAbstractVideoBuffer::HandleType(QAbstractVideoBuffer::UserHandle + 1)
-1.f, 1.f,
1.f, 1.f,
1.f, -1.f,
-1.f, -1.f
};
static const GLfloat g_texture_data[] = { TextureDeleter::~TextureDeleter()
0.f, 0.f, {
1.f, 0.f, glDeleteTextures(1, &m_id);
1.f, 1.f, }
0.f, 1.f
};
class TextureVideoBuffer : public QAbstractVideoBuffer class AndroidTextureVideoBuffer : public QAbstractVideoBuffer
{ {
public: public:
TextureVideoBuffer(GLuint textureId) AndroidTextureVideoBuffer(JSurfaceTexture *surface)
: QAbstractVideoBuffer(GLTextureHandle) : QAbstractVideoBuffer(ExternalGLTextureHandle)
, m_textureId(textureId) , m_surfaceTexture(surface)
{} {
}
virtual ~TextureVideoBuffer() {} virtual ~AndroidTextureVideoBuffer() {}
MapMode mapMode() const { return NotMapped; } MapMode mapMode() const { return NotMapped; }
uchar *map(MapMode, int*, int*) { return 0; } uchar *map(MapMode, int*, int*) { return 0; }
@@ -84,67 +76,33 @@ public:
QVariant handle() const QVariant handle() const
{ {
return QVariant::fromValue<unsigned int>(m_textureId); if (m_data.isEmpty()) {
// update the video texture (called from the render thread)
m_surfaceTexture->updateTexImage();
m_data << (uint)m_surfaceTexture->textureID() << m_surfaceTexture->getTransformMatrix();
}
return m_data;
} }
private: private:
GLuint m_textureId; mutable JSurfaceTexture *m_surfaceTexture;
}; mutable QVariantList m_data;
class ImageVideoBuffer : public QAbstractVideoBuffer
{
public:
ImageVideoBuffer(const QImage &image)
: QAbstractVideoBuffer(NoHandle)
, m_image(image)
, m_mode(NotMapped)
{
}
MapMode mapMode() const { return m_mode; }
uchar *map(MapMode mode, int *, int *)
{
if (mode != NotMapped && m_mode == NotMapped) {
m_mode = mode;
return m_image.bits();
}
return 0;
}
void unmap()
{
m_mode = NotMapped;
}
private:
QImage m_image;
MapMode m_mode;
}; };
QAndroidVideoRendererControl::QAndroidVideoRendererControl(QObject *parent) QAndroidVideoRendererControl::QAndroidVideoRendererControl(QObject *parent)
: QVideoRendererControl(parent) : QVideoRendererControl(parent)
, m_surface(0) , m_surface(0)
, m_offscreenSurface(0)
, m_glContext(0)
, m_fbo(0)
, m_program(0)
, m_useImage(false)
, m_androidSurface(0) , m_androidSurface(0)
, m_surfaceTexture(0) , m_surfaceTexture(0)
, m_surfaceHolder(0) , m_surfaceHolder(0)
, m_externalTex(0) , m_externalTex(0)
, m_textureReadyCallback(0) , m_textureDeleter(0)
, m_textureReadyContext(0)
{ {
} }
QAndroidVideoRendererControl::~QAndroidVideoRendererControl() QAndroidVideoRendererControl::~QAndroidVideoRendererControl()
{ {
if (m_glContext)
m_glContext->makeCurrent(m_offscreenSurface);
if (m_surfaceTexture) { if (m_surfaceTexture) {
m_surfaceTexture->callMethod<void>("release"); m_surfaceTexture->callMethod<void>("release");
delete m_surfaceTexture; delete m_surfaceTexture;
@@ -159,13 +117,8 @@ QAndroidVideoRendererControl::~QAndroidVideoRendererControl()
delete m_surfaceHolder; delete m_surfaceHolder;
m_surfaceHolder = 0; m_surfaceHolder = 0;
} }
if (m_externalTex) if (m_textureDeleter)
glDeleteTextures(1, &m_externalTex); m_textureDeleter->deleteLater();
delete m_fbo;
delete m_program;
delete m_glContext;
delete m_offscreenSurface;
} }
QAbstractVideoSurface *QAndroidVideoRendererControl::surface() const QAbstractVideoSurface *QAndroidVideoRendererControl::surface() const
@@ -178,28 +131,23 @@ void QAndroidVideoRendererControl::setSurface(QAbstractVideoSurface *surface)
if (surface == m_surface) if (surface == m_surface)
return; return;
if (m_surface && m_surface->isActive()) { if (m_surface) {
if (m_surface->isActive())
m_surface->stop(); m_surface->stop();
m_surface->removeEventFilter(this); m_surface->setProperty("_q_GLThreadCallback", QVariant());
} }
m_surface = surface; m_surface = surface;
if (m_surface) { if (m_surface) {
m_useImage = !m_surface->supportedPixelFormats(QAbstractVideoBuffer::GLTextureHandle).contains(QVideoFrame::Format_BGR32); m_surface->setProperty("_q_GLThreadCallback",
m_surface->installEventFilter(this); QVariant::fromValue<QObject*>(this));
} }
} }
bool QAndroidVideoRendererControl::isTextureReady() bool QAndroidVideoRendererControl::isReady()
{ {
return QOpenGLContext::currentContext() || (m_surface && m_surface->property("GLContext").isValid()); return QOpenGLContext::currentContext() || m_externalTex;
}
void QAndroidVideoRendererControl::setTextureReadyCallback(TextureReadyCallback cb, void *context)
{
m_textureReadyCallback = cb;
m_textureReadyContext = context;
} }
bool QAndroidVideoRendererControl::initSurfaceTexture() bool QAndroidVideoRendererControl::initSurfaceTexture()
@@ -210,45 +158,15 @@ bool QAndroidVideoRendererControl::initSurfaceTexture()
if (!m_surface) if (!m_surface)
return false; return false;
QOpenGLContext *currContext = QOpenGLContext::currentContext(); // if we have an OpenGL context in the current thread, create a texture. Otherwise, wait
// for the GL render thread to call us back to do it.
// If we don't have a GL context in the current thread, create one and share it if (QOpenGLContext::currentContext()) {
// with the render thread GL context
if (!currContext && !m_glContext) {
QOpenGLContext *shareContext = qobject_cast<QOpenGLContext*>(m_surface->property("GLContext").value<QObject*>());
if (!shareContext)
return false;
m_offscreenSurface = new QOffscreenSurface;
QSurfaceFormat format;
format.setSwapBehavior(QSurfaceFormat::SingleBuffer);
m_offscreenSurface->setFormat(format);
m_offscreenSurface->create();
m_glContext = new QOpenGLContext;
m_glContext->setFormat(m_offscreenSurface->requestedFormat());
if (shareContext)
m_glContext->setShareContext(shareContext);
if (!m_glContext->create()) {
delete m_glContext;
m_glContext = 0;
delete m_offscreenSurface;
m_offscreenSurface = 0;
return false;
}
// if sharing contexts is not supported, fallback to image rendering and send the bits
// to the video surface
if (!m_glContext->shareContext())
m_useImage = true;
}
if (m_glContext)
m_glContext->makeCurrent(m_offscreenSurface);
glGenTextures(1, &m_externalTex); glGenTextures(1, &m_externalTex);
m_textureDeleter = new TextureDeleter(m_externalTex);
} else if (!m_externalTex) {
return false;
}
m_surfaceTexture = new JSurfaceTexture(m_externalTex); m_surfaceTexture = new JSurfaceTexture(m_externalTex);
if (m_surfaceTexture->isValid()) { if (m_surfaceTexture->isValid()) {
@@ -256,7 +174,9 @@ bool QAndroidVideoRendererControl::initSurfaceTexture()
} else { } else {
delete m_surfaceTexture; delete m_surfaceTexture;
m_surfaceTexture = 0; m_surfaceTexture = 0;
glDeleteTextures(1, &m_externalTex); m_textureDeleter->deleteLater();
m_externalTex = 0;
m_textureDeleter = 0;
} }
return m_surfaceTexture != 0; return m_surfaceTexture != 0;
@@ -294,9 +214,6 @@ void QAndroidVideoRendererControl::setVideoSize(const QSize &size)
stop(); stop();
m_nativeSize = size; m_nativeSize = size;
delete m_fbo;
m_fbo = 0;
} }
void QAndroidVideoRendererControl::stop() void QAndroidVideoRendererControl::stop()
@@ -308,45 +225,27 @@ void QAndroidVideoRendererControl::stop()
QImage QAndroidVideoRendererControl::toImage() QImage QAndroidVideoRendererControl::toImage()
{ {
if (!m_fbo) // FIXME!!! Since we are not using a FBO anymore, we can't grab the pixels. And glGetTexImage
// doesn't work on GL_TEXTURE_EXTERNAL_OES
return QImage(); return QImage();
return m_fbo->toImage().mirrored();
} }
void QAndroidVideoRendererControl::onFrameAvailable() void QAndroidVideoRendererControl::onFrameAvailable()
{ {
if (m_glContext) if (!m_nativeSize.isValid() || !m_surface)
m_glContext->makeCurrent(m_offscreenSurface);
m_surfaceTexture->updateTexImage();
if (!m_nativeSize.isValid())
return; return;
renderFrameToFbo(); QAbstractVideoBuffer *buffer = new AndroidTextureVideoBuffer(m_surfaceTexture);
QVideoFrame frame(buffer, m_nativeSize, QVideoFrame::Format_BGR32);
QAbstractVideoBuffer *buffer = 0;
QVideoFrame frame;
if (m_useImage) {
buffer = new ImageVideoBuffer(m_fbo->toImage().mirrored());
frame = QVideoFrame(buffer, m_nativeSize, QVideoFrame::Format_RGB32);
} else {
buffer = new TextureVideoBuffer(m_fbo->texture());
frame = QVideoFrame(buffer, m_nativeSize, QVideoFrame::Format_BGR32);
}
if (m_surface && frame.isValid()) {
if (m_surface->isActive() && (m_surface->surfaceFormat().pixelFormat() != frame.pixelFormat() if (m_surface->isActive() && (m_surface->surfaceFormat().pixelFormat() != frame.pixelFormat()
|| m_surface->nativeResolution() != frame.size())) { || m_surface->nativeResolution() != frame.size())) {
m_surface->stop(); m_surface->stop();
} }
if (!m_surface->isActive()) { if (!m_surface->isActive()) {
QVideoSurfaceFormat format(frame.size(), frame.pixelFormat(), QVideoSurfaceFormat format(frame.size(), frame.pixelFormat(), ExternalGLTextureHandle);
m_useImage ? QAbstractVideoBuffer::NoHandle format.setScanLineDirection(QVideoSurfaceFormat::BottomToTop);
: QAbstractVideoBuffer::GLTextureHandle);
m_surface->start(format); m_surface->start(format);
} }
@@ -354,85 +253,17 @@ void QAndroidVideoRendererControl::onFrameAvailable()
if (m_surface->isActive()) if (m_surface->isActive())
m_surface->present(frame); m_surface->present(frame);
} }
}
void QAndroidVideoRendererControl::renderFrameToFbo() void QAndroidVideoRendererControl::customEvent(QEvent *e)
{ {
createGLResources(); if (e->type() == QEvent::User) {
// This is running in the render thread (OpenGL enabled)
m_fbo->bind(); if (!m_externalTex) {
glGenTextures(1, &m_externalTex);
glViewport(0, 0, m_nativeSize.width(), m_nativeSize.height()); m_textureDeleter = new TextureDeleter(m_externalTex); // will be deleted in the correct thread
emit readyChanged(true);
m_program->bind();
m_program->enableAttributeArray(0);
m_program->enableAttributeArray(1);
m_program->setUniformValue("frameTexture", GLuint(0));
m_program->setUniformValue("texMatrix", m_surfaceTexture->getTransformMatrix());
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 0, g_vertex_data);
glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 0, g_texture_data);
glDrawArrays(GL_TRIANGLE_FAN, 0, 4);
m_program->disableAttributeArray(0);
m_program->disableAttributeArray(1);
m_program->release();
glBindTexture(GL_TEXTURE_EXTERNAL_OES, 0);
m_fbo->release();
glFinish();
}
void QAndroidVideoRendererControl::createGLResources()
{
if (!m_fbo)
m_fbo = new QOpenGLFramebufferObject(m_nativeSize);
if (!m_program) {
m_program = new QOpenGLShaderProgram;
QOpenGLShader *vertexShader = new QOpenGLShader(QOpenGLShader::Vertex, m_program);
vertexShader->compileSourceCode("attribute highp vec4 vertexCoordsArray; \n" \
"attribute highp vec2 textureCoordArray; \n" \
"uniform highp mat4 texMatrix; \n" \
"varying highp vec2 textureCoords; \n" \
"void main(void) \n" \
"{ \n" \
" gl_Position = vertexCoordsArray; \n" \
" textureCoords = (texMatrix * vec4(textureCoordArray, 0.0, 1.0)).xy; \n" \
"}\n");
m_program->addShader(vertexShader);
QOpenGLShader *fragmentShader = new QOpenGLShader(QOpenGLShader::Fragment, m_program);
fragmentShader->compileSourceCode("#extension GL_OES_EGL_image_external : require \n" \
"varying highp vec2 textureCoords; \n" \
"uniform samplerExternalOES frameTexture; \n" \
"void main() \n" \
"{ \n" \
" gl_FragColor = texture2D(frameTexture, textureCoords); \n" \
"}\n");
m_program->addShader(fragmentShader);
m_program->bindAttributeLocation("vertexCoordsArray", 0);
m_program->bindAttributeLocation("textureCoordArray", 1);
m_program->link();
} }
} }
bool QAndroidVideoRendererControl::eventFilter(QObject *, QEvent *e)
{
if (e->type() == QEvent::DynamicPropertyChange) {
QDynamicPropertyChangeEvent *event = static_cast<QDynamicPropertyChangeEvent*>(e);
if (event->propertyName() == "GLContext" && m_textureReadyCallback) {
m_textureReadyCallback(m_textureReadyContext);
m_textureReadyCallback = 0;
m_textureReadyContext = 0;
}
}
return false;
} }
QT_END_NAMESPACE QT_END_NAMESPACE

View File

@@ -48,15 +48,23 @@
QT_BEGIN_NAMESPACE QT_BEGIN_NAMESPACE
class QOpenGLContext;
class QOffscreenSurface;
class QOpenGLFramebufferObject;
class QOpenGLShaderProgram;
class JSurfaceTextureHolder; class JSurfaceTextureHolder;
class TextureDeleter : public QObject
{
Q_OBJECT
public:
TextureDeleter(uint id) : m_id(id) { }
~TextureDeleter();
private:
uint m_id;
};
class QAndroidVideoRendererControl : public QVideoRendererControl, public QAndroidVideoOutput class QAndroidVideoRendererControl : public QVideoRendererControl, public QAndroidVideoOutput
{ {
Q_OBJECT Q_OBJECT
Q_INTERFACES(QAndroidVideoOutput)
public: public:
explicit QAndroidVideoRendererControl(QObject *parent = 0); explicit QAndroidVideoRendererControl(QObject *parent = 0);
~QAndroidVideoRendererControl() Q_DECL_OVERRIDE; ~QAndroidVideoRendererControl() Q_DECL_OVERRIDE;
@@ -65,38 +73,31 @@ public:
void setSurface(QAbstractVideoSurface *surface) Q_DECL_OVERRIDE; void setSurface(QAbstractVideoSurface *surface) Q_DECL_OVERRIDE;
jobject surfaceHolder() Q_DECL_OVERRIDE; jobject surfaceHolder() Q_DECL_OVERRIDE;
bool isTextureReady() Q_DECL_OVERRIDE;
void setTextureReadyCallback(TextureReadyCallback cb, void *context = 0) Q_DECL_OVERRIDE;
jobject surfaceTexture() Q_DECL_OVERRIDE; jobject surfaceTexture() Q_DECL_OVERRIDE;
bool isReady() Q_DECL_OVERRIDE;
void setVideoSize(const QSize &size) Q_DECL_OVERRIDE; void setVideoSize(const QSize &size) Q_DECL_OVERRIDE;
void stop() Q_DECL_OVERRIDE; void stop() Q_DECL_OVERRIDE;
QImage toImage() Q_DECL_OVERRIDE; QImage toImage() Q_DECL_OVERRIDE;
bool eventFilter(QObject *obj, QEvent *event) Q_DECL_OVERRIDE; void customEvent(QEvent *) Q_DECL_OVERRIDE;
Q_SIGNALS:
void readyChanged(bool);
private Q_SLOTS: private Q_SLOTS:
void onFrameAvailable(); void onFrameAvailable();
private: private:
bool initSurfaceTexture(); bool initSurfaceTexture();
void renderFrameToFbo();
void createGLResources();
QAbstractVideoSurface *m_surface; QAbstractVideoSurface *m_surface;
QOffscreenSurface *m_offscreenSurface;
QOpenGLContext *m_glContext;
QOpenGLFramebufferObject *m_fbo;
QOpenGLShaderProgram *m_program;
bool m_useImage;
QSize m_nativeSize; QSize m_nativeSize;
QJNIObjectPrivate *m_androidSurface; QJNIObjectPrivate *m_androidSurface;
JSurfaceTexture *m_surfaceTexture; JSurfaceTexture *m_surfaceTexture;
JSurfaceTextureHolder *m_surfaceHolder; JSurfaceTextureHolder *m_surfaceHolder;
uint m_externalTex; uint m_externalTex;
TextureDeleter *m_textureDeleter;
TextureReadyCallback m_textureReadyCallback;
void *m_textureReadyContext;
}; };
QT_END_NAMESPACE QT_END_NAMESPACE

View File

@@ -52,12 +52,6 @@
QT_BEGIN_NAMESPACE QT_BEGIN_NAMESPACE
static void textureReadyCallback(void *context)
{
if (context)
reinterpret_cast<QAndroidCameraSession *>(context)->onSurfaceTextureReady();
}
QAndroidCameraSession::QAndroidCameraSession(QObject *parent) QAndroidCameraSession::QAndroidCameraSession(QObject *parent)
: QObject(parent) : QObject(parent)
, m_selectedCamera(0) , m_selectedCamera(0)
@@ -188,12 +182,17 @@ void QAndroidCameraSession::close()
emit statusChanged(m_status); emit statusChanged(m_status);
} }
void QAndroidCameraSession::setVideoPreview(QAndroidVideoOutput *videoOutput) void QAndroidCameraSession::setVideoPreview(QObject *videoOutput)
{ {
if (m_videoOutput) if (m_videoOutput)
m_videoOutput->stop(); m_videoOutput->stop();
m_videoOutput = videoOutput; if (videoOutput) {
connect(videoOutput, SIGNAL(readyChanged(bool)), this, SLOT(onVideoOutputReady(bool)));
m_videoOutput = qobject_cast<QAndroidVideoOutput *>(videoOutput);
} else {
m_videoOutput = 0;
}
} }
void QAndroidCameraSession::adjustViewfinderSize(const QSize &captureSize, bool restartPreview) void QAndroidCameraSession::adjustViewfinderSize(const QSize &captureSize, bool restartPreview)
@@ -243,12 +242,8 @@ void QAndroidCameraSession::startPreview()
applyImageSettings(); applyImageSettings();
adjustViewfinderSize(m_imageSettings.resolution()); adjustViewfinderSize(m_imageSettings.resolution());
if (m_videoOutput) { if (m_videoOutput && m_videoOutput->isReady())
if (m_videoOutput->isTextureReady()) onVideoOutputReady(true);
m_camera->setPreviewTexture(m_videoOutput->surfaceTexture());
else
m_videoOutput->setTextureReadyCallback(textureReadyCallback, this);
}
JMultimediaUtils::enableOrientationListener(true); JMultimediaUtils::enableOrientationListener(true);
@@ -522,9 +517,9 @@ void QAndroidCameraSession::processCapturedImage(int id,
} }
} }
void QAndroidCameraSession::onSurfaceTextureReady() void QAndroidCameraSession::onVideoOutputReady(bool ready)
{ {
if (m_camera && m_videoOutput) if (m_camera && m_videoOutput && ready)
m_camera->setPreviewTexture(m_videoOutput->surfaceTexture()); m_camera->setPreviewTexture(m_videoOutput->surfaceTexture());
} }

View File

@@ -71,7 +71,7 @@ public:
void setCaptureMode(QCamera::CaptureModes mode); void setCaptureMode(QCamera::CaptureModes mode);
bool isCaptureModeSupported(QCamera::CaptureModes mode) const; bool isCaptureModeSupported(QCamera::CaptureModes mode) const;
void setVideoPreview(QAndroidVideoOutput *videoOutput); void setVideoPreview(QObject *videoOutput);
void adjustViewfinderSize(const QSize &captureSize, bool restartPreview = true); void adjustViewfinderSize(const QSize &captureSize, bool restartPreview = true);
QImageEncoderSettings imageSettings() const { return m_imageSettings; } QImageEncoderSettings imageSettings() const { return m_imageSettings; }
@@ -88,8 +88,6 @@ public:
int capture(const QString &fileName); int capture(const QString &fileName);
void cancelCapture(); void cancelCapture();
void onSurfaceTextureReady();
int currentCameraRotation() const; int currentCameraRotation() const;
Q_SIGNALS: Q_SIGNALS:
@@ -110,6 +108,8 @@ Q_SIGNALS:
void imageCaptureError(int id, int error, const QString &errorString); void imageCaptureError(int id, int error, const QString &errorString);
private Q_SLOTS: private Q_SLOTS:
void onVideoOutputReady(bool ready);
void onApplicationStateChanged(Qt::ApplicationState state); void onApplicationStateChanged(Qt::ApplicationState state);
void onCameraPictureExposed(); void onCameraPictureExposed();

View File

@@ -88,7 +88,7 @@ private:
QAndroidVideoDeviceSelectorControl *m_videoInputControl; QAndroidVideoDeviceSelectorControl *m_videoInputControl;
QAndroidAudioInputSelectorControl *m_audioInputControl; QAndroidAudioInputSelectorControl *m_audioInputControl;
QAndroidCameraSession *m_cameraSession; QAndroidCameraSession *m_cameraSession;
QAndroidVideoRendererControl *m_videoRendererControl; QMediaControl *m_videoRendererControl;
QAndroidCameraZoomControl *m_cameraZoomControl; QAndroidCameraZoomControl *m_cameraZoomControl;
QAndroidCameraExposureControl *m_cameraExposureControl; QAndroidCameraExposureControl *m_cameraExposureControl;
QAndroidCameraFlashControl *m_cameraFlashControl; QAndroidCameraFlashControl *m_cameraFlashControl;

View File

@@ -45,12 +45,6 @@
QT_BEGIN_NAMESPACE QT_BEGIN_NAMESPACE
static void textureReadyCallback(void *context)
{
if (context)
reinterpret_cast<QAndroidMediaPlayerControl *>(context)->onSurfaceTextureReady();
}
QAndroidMediaPlayerControl::QAndroidMediaPlayerControl(QObject *parent) QAndroidMediaPlayerControl::QAndroidMediaPlayerControl(QObject *parent)
: QMediaPlayerControl(parent), : QMediaPlayerControl(parent),
mMediaPlayer(new JMediaPlayer), mMediaPlayer(new JMediaPlayer),
@@ -241,18 +235,18 @@ void QAndroidMediaPlayerControl::setMedia(const QMediaContent &mediaContent,
setSeekable(true); setSeekable(true);
} }
void QAndroidMediaPlayerControl::setVideoOutput(QAndroidVideoOutput *videoOutput) void QAndroidMediaPlayerControl::setVideoOutput(QObject *videoOutput)
{ {
if (mVideoOutput) if (mVideoOutput)
mVideoOutput->stop(); mVideoOutput->stop();
mVideoOutput = videoOutput; mVideoOutput = qobject_cast<QAndroidVideoOutput *>(videoOutput);
if (mVideoOutput && !mMediaPlayer->display()) { if (mVideoOutput && !mMediaPlayer->display()) {
if (mVideoOutput->isTextureReady()) if (mVideoOutput->isReady())
mMediaPlayer->setDisplay(mVideoOutput->surfaceHolder()); mMediaPlayer->setDisplay(mVideoOutput->surfaceHolder());
else else
mVideoOutput->setTextureReadyCallback(textureReadyCallback, this); connect(videoOutput, SIGNAL(readyChanged(bool)), this, SLOT(onVideoOutputReady(bool)));
} }
} }
@@ -426,9 +420,9 @@ void QAndroidMediaPlayerControl::onVideoSizeChanged(qint32 width, qint32 height)
mVideoOutput->setVideoSize(mVideoSize); mVideoOutput->setVideoSize(mVideoSize);
} }
void QAndroidMediaPlayerControl::onSurfaceTextureReady() void QAndroidMediaPlayerControl::onVideoOutputReady(bool ready)
{ {
if (!mMediaPlayer->display() && mVideoOutput) { if (!mMediaPlayer->display() && mVideoOutput && ready) {
mMediaPlayer->setDisplay(mVideoOutput->surfaceHolder()); mMediaPlayer->setDisplay(mVideoOutput->surfaceHolder());
flushPendingStates(); flushPendingStates();
} }

View File

@@ -75,8 +75,7 @@ public:
const QIODevice *mediaStream() const Q_DECL_OVERRIDE; const QIODevice *mediaStream() const Q_DECL_OVERRIDE;
void setMedia(const QMediaContent &mediaContent, QIODevice *stream) Q_DECL_OVERRIDE; void setMedia(const QMediaContent &mediaContent, QIODevice *stream) Q_DECL_OVERRIDE;
void setVideoOutput(QAndroidVideoOutput *videoOutput); void setVideoOutput(QObject *videoOutput);
void onSurfaceTextureReady();
Q_SIGNALS: Q_SIGNALS:
void metaDataUpdated(); void metaDataUpdated();
@@ -90,6 +89,7 @@ public Q_SLOTS:
void setMuted(bool muted) Q_DECL_OVERRIDE; void setMuted(bool muted) Q_DECL_OVERRIDE;
private Q_SLOTS: private Q_SLOTS:
void onVideoOutputReady(bool ready);
void onError(qint32 what, qint32 extra); void onError(qint32 what, qint32 extra);
void onInfo(qint32 what, qint32 extra); void onInfo(qint32 what, qint32 extra);
void onMediaPlayerInfo(qint32 what, qint32 extra); void onMediaPlayerInfo(qint32 what, qint32 extra);

View File

@@ -48,7 +48,6 @@ QT_BEGIN_NAMESPACE
class QAndroidMediaPlayerControl; class QAndroidMediaPlayerControl;
class QAndroidMetaDataReaderControl; class QAndroidMetaDataReaderControl;
class QAndroidVideoRendererControl;
class QAndroidMediaService : public QMediaService class QAndroidMediaService : public QMediaService
{ {
@@ -63,7 +62,7 @@ public:
private: private:
QAndroidMediaPlayerControl *mMediaControl; QAndroidMediaPlayerControl *mMediaControl;
QAndroidMetaDataReaderControl *mMetadataControl; QAndroidMetaDataReaderControl *mMetadataControl;
QAndroidVideoRendererControl *mVideoRendererControl; QMediaControl *mVideoRendererControl;
}; };
QT_END_NAMESPACE QT_END_NAMESPACE

View File

@@ -56,6 +56,7 @@ public:
explicit JSurfaceTexture(unsigned int texName); explicit JSurfaceTexture(unsigned int texName);
~JSurfaceTexture(); ~JSurfaceTexture();
int textureID() const { return m_texID; }
QMatrix4x4 getTransformMatrix(); QMatrix4x4 getTransformMatrix();
void updateTexImage(); void updateTexImage();

View File

@@ -0,0 +1,3 @@
{
"Keys": ["sgvideonodes"]
}

View File

@@ -0,0 +1,204 @@
/****************************************************************************
**
** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
** Contact: http://www.qt-project.org/legal
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and Digia. For licensing terms and
** conditions see http://qt.digia.com/licensing. For further information
** use the contact form at http://qt.digia.com/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 as published by the Free Software
** Foundation and appearing in the file LICENSE.LGPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU Lesser General Public License version 2.1 requirements
** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Digia gives you certain additional
** rights. These rights are described in the Digia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** GNU General Public License Usage
** Alternatively, this file may be used under the terms of the GNU
** General Public License version 3.0 as published by the Free Software
** Foundation and appearing in the file LICENSE.GPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU General Public License version 3.0 requirements will be
** met: http://www.gnu.org/copyleft/gpl.html.
**
**
** $QT_END_LICENSE$
**
****************************************************************************/
#include "qandroidsgvideonode.h"
#include <qsgmaterial.h>
#include <qmutex.h>
QT_BEGIN_NAMESPACE
class QAndroidSGVideoNodeMaterialShader : public QSGMaterialShader
{
public:
void updateState(const RenderState &state, QSGMaterial *newMaterial, QSGMaterial *oldMaterial);
char const *const *attributeNames() const {
static const char *names[] = {
"qt_VertexPosition",
"qt_VertexTexCoord",
0
};
return names;
}
protected:
const char *vertexShader() const {
return
"uniform highp mat4 qt_Matrix; \n"
"uniform highp mat4 texMatrix; \n"
"attribute highp vec4 qt_VertexPosition; \n"
"attribute highp vec2 qt_VertexTexCoord; \n"
"varying highp vec2 qt_TexCoord; \n"
"void main() { \n"
" qt_TexCoord = (texMatrix * vec4(qt_VertexTexCoord, 0.0, 1.0)).xy; \n"
" gl_Position = qt_Matrix * qt_VertexPosition; \n"
"}";
}
const char *fragmentShader() const {
return
"#extension GL_OES_EGL_image_external : require \n"
"uniform samplerExternalOES videoTexture; \n"
"uniform lowp float opacity; \n"
"varying highp vec2 qt_TexCoord; \n"
"void main() \n"
"{ \n"
" gl_FragColor = texture2D(videoTexture, qt_TexCoord) * opacity; \n"
"}";
}
void initialize() {
m_id_matrix = program()->uniformLocation("qt_Matrix");
m_id_texMatrix = program()->uniformLocation("texMatrix");
m_id_texture = program()->uniformLocation("videoTexture");
m_id_opacity = program()->uniformLocation("opacity");
}
int m_id_matrix;
int m_id_texMatrix;
int m_id_texture;
int m_id_opacity;
};
class QAndroidSGVideoNodeMaterial : public QSGMaterial
{
public:
QAndroidSGVideoNodeMaterial()
: m_textureId(0)
{
setFlag(Blending, false);
}
~QAndroidSGVideoNodeMaterial()
{
m_frame = QVideoFrame();
}
QSGMaterialType *type() const {
static QSGMaterialType theType;
return &theType;
}
QSGMaterialShader *createShader() const {
return new QAndroidSGVideoNodeMaterialShader;
}
int compare(const QSGMaterial *other) const {
const QAndroidSGVideoNodeMaterial *m = static_cast<const QAndroidSGVideoNodeMaterial *>(other);
return m_textureId - m->m_textureId;
}
void setVideoFrame(const QVideoFrame &frame) {
QMutexLocker lock(&m_frameMutex);
m_frame = frame;
}
bool updateTexture()
{
QMutexLocker lock(&m_frameMutex);
bool texMatrixDirty = false;
if (m_frame.isValid()) {
QVariantList list = m_frame.handle().toList();
GLuint texId = list.at(0).toUInt();
QMatrix4x4 mat = qvariant_cast<QMatrix4x4>(list.at(1));
texMatrixDirty = texId != m_textureId || mat != m_texMatrix;
m_textureId = texId;
m_texMatrix = mat;
// the texture is already bound and initialized at this point,
// no need to call glTexParams
} else {
m_textureId = 0;
}
return texMatrixDirty;
}
QVideoFrame m_frame;
QMutex m_frameMutex;
GLuint m_textureId;
QMatrix4x4 m_texMatrix;
};
void QAndroidSGVideoNodeMaterialShader::updateState(const RenderState &state,
QSGMaterial *newMaterial,
QSGMaterial *oldMaterial)
{
Q_UNUSED(oldMaterial);
QAndroidSGVideoNodeMaterial *mat = static_cast<QAndroidSGVideoNodeMaterial *>(newMaterial);
program()->setUniformValue(m_id_texture, 0);
if (mat->updateTexture())
program()->setUniformValue(m_id_texMatrix, mat->m_texMatrix);
if (state.isOpacityDirty())
program()->setUniformValue(m_id_opacity, state.opacity());
if (state.isMatrixDirty())
program()->setUniformValue(m_id_matrix, state.combinedMatrix());
}
QAndroidSGVideoNode::QAndroidSGVideoNode(const QVideoSurfaceFormat &format)
: m_format(format)
{
setFlag(QSGNode::OwnsMaterial);
m_material = new QAndroidSGVideoNodeMaterial;
setMaterial(m_material);
}
void QAndroidSGVideoNode::setCurrentFrame(const QVideoFrame &frame)
{
m_material->setVideoFrame(frame);
markDirty(DirtyMaterial);
}
QVideoFrame::PixelFormat QAndroidSGVideoNode::pixelFormat() const
{
return m_format.pixelFormat();
}
QT_END_NAMESPACE

View File

@@ -0,0 +1,67 @@
/****************************************************************************
**
** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
** Contact: http://www.qt-project.org/legal
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and Digia. For licensing terms and
** conditions see http://qt.digia.com/licensing. For further information
** use the contact form at http://qt.digia.com/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 as published by the Free Software
** Foundation and appearing in the file LICENSE.LGPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU Lesser General Public License version 2.1 requirements
** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Digia gives you certain additional
** rights. These rights are described in the Digia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** GNU General Public License Usage
** Alternatively, this file may be used under the terms of the GNU
** General Public License version 3.0 as published by the Free Software
** Foundation and appearing in the file LICENSE.GPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU General Public License version 3.0 requirements will be
** met: http://www.gnu.org/copyleft/gpl.html.
**
**
** $QT_END_LICENSE$
**
****************************************************************************/
#ifndef QANDROIDSGVIDEONODE_H
#define QANDROIDSGVIDEONODE_H
#include <private/qsgvideonode_p.h>
QT_BEGIN_NAMESPACE
class QAndroidSGVideoNodeMaterial;
class QAndroidSGVideoNode : public QSGVideoNode
{
public:
QAndroidSGVideoNode(const QVideoSurfaceFormat &format);
void setCurrentFrame(const QVideoFrame &frame);
QVideoFrame::PixelFormat pixelFormat() const;
private:
QVideoSurfaceFormat m_format;
QAndroidSGVideoNodeMaterial *m_material;
QVideoFrame m_frame;
};
QT_END_NAMESPACE
#endif // QANDROIDSGVIDEONODE_H

View File

@@ -0,0 +1,69 @@
/****************************************************************************
**
** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
** Contact: http://www.qt-project.org/legal
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and Digia. For licensing terms and
** conditions see http://qt.digia.com/licensing. For further information
** use the contact form at http://qt.digia.com/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 as published by the Free Software
** Foundation and appearing in the file LICENSE.LGPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU Lesser General Public License version 2.1 requirements
** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Digia gives you certain additional
** rights. These rights are described in the Digia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** GNU General Public License Usage
** Alternatively, this file may be used under the terms of the GNU
** General Public License version 3.0 as published by the Free Software
** Foundation and appearing in the file LICENSE.GPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU General Public License version 3.0 requirements will be
** met: http://www.gnu.org/copyleft/gpl.html.
**
**
** $QT_END_LICENSE$
**
****************************************************************************/
#include "qandroidsgvideonodeplugin.h"
#include "qandroidsgvideonode.h"
QT_BEGIN_NAMESPACE
#define ExternalGLTextureHandle (QAbstractVideoBuffer::UserHandle + 1)
QList<QVideoFrame::PixelFormat> QAndroidSGVideoNodeFactoryPlugin::supportedPixelFormats(
QAbstractVideoBuffer::HandleType handleType) const
{
QList<QVideoFrame::PixelFormat> pixelFormats;
if (handleType == ExternalGLTextureHandle)
pixelFormats.append(QVideoFrame::Format_BGR32);
return pixelFormats;
}
QSGVideoNode *QAndroidSGVideoNodeFactoryPlugin::createNode(const QVideoSurfaceFormat &format)
{
if (supportedPixelFormats(format.handleType()).contains(format.pixelFormat()))
return new QAndroidSGVideoNode(format);
return 0;
}
QT_END_NAMESPACE

View File

@@ -0,0 +1,62 @@
/****************************************************************************
**
** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
** Contact: http://www.qt-project.org/legal
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and Digia. For licensing terms and
** conditions see http://qt.digia.com/licensing. For further information
** use the contact form at http://qt.digia.com/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 as published by the Free Software
** Foundation and appearing in the file LICENSE.LGPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU Lesser General Public License version 2.1 requirements
** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Digia gives you certain additional
** rights. These rights are described in the Digia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** GNU General Public License Usage
** Alternatively, this file may be used under the terms of the GNU
** General Public License version 3.0 as published by the Free Software
** Foundation and appearing in the file LICENSE.GPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU General Public License version 3.0 requirements will be
** met: http://www.gnu.org/copyleft/gpl.html.
**
**
** $QT_END_LICENSE$
**
****************************************************************************/
#ifndef QANDROIDSGVIDEONODEPLUGIN_H
#define QANDROIDSGVIDEONODEPLUGIN_H
#include <private/qsgvideonode_p.h>
QT_BEGIN_NAMESPACE
class QAndroidSGVideoNodeFactoryPlugin : public QSGVideoNodeFactoryPlugin
{
Q_OBJECT
Q_PLUGIN_METADATA(IID QSGVideoNodeFactoryInterface_iid
FILE "android_videonode.json")
public:
QList<QVideoFrame::PixelFormat> supportedPixelFormats(QAbstractVideoBuffer::HandleType handleType) const;
QSGVideoNode *createNode(const QVideoSurfaceFormat &format);
};
QT_END_NAMESPACE
#endif // QANDROIDSGVIDEONODEPLUGIN_H

View File

@@ -0,0 +1,16 @@
TARGET = qtsgvideonode_android
QT += quick multimedia-private qtmultimediaquicktools-private
PLUGIN_TYPE = video/videonode
PLUGIN_CLASS_NAME = QAndroidSGVideoNodeFactoryPlugin
load(qt_plugin)
HEADERS += \
qandroidsgvideonodeplugin.h \
qandroidsgvideonode.h
SOURCES += \
qandroidsgvideonodeplugin.cpp \
qandroidsgvideonode.cpp
OTHER_FILES += android_videonode.json