Merge remote-tracking branch 'origin/release' into stable
Change-Id: Ifcb0e0bac29c11980ea2a9d67b12e1b79c4bd9f6
This commit is contained in:
@@ -49,22 +49,39 @@
|
||||
#include <qcoreapplication.h>
|
||||
#include <qopenglcontext.h>
|
||||
#include <qopenglfunctions.h>
|
||||
#include <qopenglshaderprogram.h>
|
||||
#include <qopenglframebufferobject.h>
|
||||
|
||||
QT_BEGIN_NAMESPACE
|
||||
|
||||
#define ExternalGLTextureHandle QAbstractVideoBuffer::HandleType(QAbstractVideoBuffer::UserHandle + 1)
|
||||
static const GLfloat g_vertex_data[] = {
|
||||
-1.f, 1.f,
|
||||
1.f, 1.f,
|
||||
1.f, -1.f,
|
||||
-1.f, -1.f
|
||||
};
|
||||
|
||||
TextureDeleter::~TextureDeleter()
|
||||
static const GLfloat g_texture_data[] = {
|
||||
0.f, 0.f,
|
||||
1.f, 0.f,
|
||||
1.f, 1.f,
|
||||
0.f, 1.f
|
||||
};
|
||||
|
||||
OpenGLResourcesDeleter::~OpenGLResourcesDeleter()
|
||||
{
|
||||
glDeleteTextures(1, &m_id);
|
||||
glDeleteTextures(1, &m_textureID);
|
||||
delete m_fbo;
|
||||
delete m_program;
|
||||
}
|
||||
|
||||
class AndroidTextureVideoBuffer : public QAbstractVideoBuffer
|
||||
{
|
||||
public:
|
||||
AndroidTextureVideoBuffer(JSurfaceTexture *surface)
|
||||
: QAbstractVideoBuffer(ExternalGLTextureHandle)
|
||||
, m_surfaceTexture(surface)
|
||||
AndroidTextureVideoBuffer(QAndroidVideoRendererControl *control)
|
||||
: QAbstractVideoBuffer(GLTextureHandle)
|
||||
, m_control(control)
|
||||
, m_textureUpdated(false)
|
||||
{
|
||||
}
|
||||
|
||||
@@ -76,18 +93,18 @@ public:
|
||||
|
||||
QVariant handle() const
|
||||
{
|
||||
if (m_data.isEmpty()) {
|
||||
if (!m_textureUpdated) {
|
||||
// update the video texture (called from the render thread)
|
||||
m_surfaceTexture->updateTexImage();
|
||||
m_data << (uint)m_surfaceTexture->textureID() << m_surfaceTexture->getTransformMatrix();
|
||||
m_control->renderFrameToFbo();
|
||||
m_textureUpdated = true;
|
||||
}
|
||||
|
||||
return m_data;
|
||||
return m_control->m_fbo->texture();
|
||||
}
|
||||
|
||||
private:
|
||||
mutable JSurfaceTexture *m_surfaceTexture;
|
||||
mutable QVariantList m_data;
|
||||
mutable QAndroidVideoRendererControl *m_control;
|
||||
mutable bool m_textureUpdated;
|
||||
};
|
||||
|
||||
QAndroidVideoRendererControl::QAndroidVideoRendererControl(QObject *parent)
|
||||
@@ -97,7 +114,9 @@ QAndroidVideoRendererControl::QAndroidVideoRendererControl(QObject *parent)
|
||||
, m_surfaceTexture(0)
|
||||
, m_surfaceHolder(0)
|
||||
, m_externalTex(0)
|
||||
, m_textureDeleter(0)
|
||||
, m_fbo(0)
|
||||
, m_program(0)
|
||||
, m_glDeleter(0)
|
||||
{
|
||||
}
|
||||
|
||||
@@ -117,8 +136,8 @@ QAndroidVideoRendererControl::~QAndroidVideoRendererControl()
|
||||
delete m_surfaceHolder;
|
||||
m_surfaceHolder = 0;
|
||||
}
|
||||
if (m_textureDeleter)
|
||||
m_textureDeleter->deleteLater();
|
||||
if (m_glDeleter)
|
||||
m_glDeleter->deleteLater();
|
||||
}
|
||||
|
||||
QAbstractVideoSurface *QAndroidVideoRendererControl::surface() const
|
||||
@@ -162,7 +181,8 @@ bool QAndroidVideoRendererControl::initSurfaceTexture()
|
||||
// for the GL render thread to call us back to do it.
|
||||
if (QOpenGLContext::currentContext()) {
|
||||
glGenTextures(1, &m_externalTex);
|
||||
m_textureDeleter = new TextureDeleter(m_externalTex);
|
||||
m_glDeleter = new OpenGLResourcesDeleter;
|
||||
m_glDeleter->setTexture(m_externalTex);
|
||||
} else if (!m_externalTex) {
|
||||
return false;
|
||||
}
|
||||
@@ -174,9 +194,9 @@ bool QAndroidVideoRendererControl::initSurfaceTexture()
|
||||
} else {
|
||||
delete m_surfaceTexture;
|
||||
m_surfaceTexture = 0;
|
||||
m_textureDeleter->deleteLater();
|
||||
m_glDeleter->deleteLater();
|
||||
m_externalTex = 0;
|
||||
m_textureDeleter = 0;
|
||||
m_glDeleter = 0;
|
||||
}
|
||||
|
||||
return m_surfaceTexture != 0;
|
||||
@@ -208,6 +228,8 @@ jobject QAndroidVideoRendererControl::surfaceTexture()
|
||||
|
||||
void QAndroidVideoRendererControl::setVideoSize(const QSize &size)
|
||||
{
|
||||
QMutexLocker locker(&m_mutex);
|
||||
|
||||
if (m_nativeSize == size)
|
||||
return;
|
||||
|
||||
@@ -228,7 +250,7 @@ void QAndroidVideoRendererControl::onFrameAvailable()
|
||||
if (!m_nativeSize.isValid() || !m_surface)
|
||||
return;
|
||||
|
||||
QAbstractVideoBuffer *buffer = new AndroidTextureVideoBuffer(m_surfaceTexture);
|
||||
QAbstractVideoBuffer *buffer = new AndroidTextureVideoBuffer(this);
|
||||
QVideoFrame frame(buffer, m_nativeSize, QVideoFrame::Format_BGR32);
|
||||
|
||||
if (m_surface->isActive() && (m_surface->surfaceFormat().pixelFormat() != frame.pixelFormat()
|
||||
@@ -237,8 +259,8 @@ void QAndroidVideoRendererControl::onFrameAvailable()
|
||||
}
|
||||
|
||||
if (!m_surface->isActive()) {
|
||||
QVideoSurfaceFormat format(frame.size(), frame.pixelFormat(), ExternalGLTextureHandle);
|
||||
format.setScanLineDirection(QVideoSurfaceFormat::BottomToTop);
|
||||
QVideoSurfaceFormat format(frame.size(), frame.pixelFormat(),
|
||||
QAbstractVideoBuffer::GLTextureHandle);
|
||||
|
||||
m_surface->start(format);
|
||||
}
|
||||
@@ -247,13 +269,114 @@ void QAndroidVideoRendererControl::onFrameAvailable()
|
||||
m_surface->present(frame);
|
||||
}
|
||||
|
||||
void QAndroidVideoRendererControl::renderFrameToFbo()
|
||||
{
|
||||
QMutexLocker locker(&m_mutex);
|
||||
|
||||
createGLResources();
|
||||
|
||||
m_surfaceTexture->updateTexImage();
|
||||
|
||||
// save current render states
|
||||
GLboolean stencilTestEnabled;
|
||||
GLboolean depthTestEnabled;
|
||||
GLboolean scissorTestEnabled;
|
||||
GLboolean blendEnabled;
|
||||
glGetBooleanv(GL_STENCIL_TEST, &stencilTestEnabled);
|
||||
glGetBooleanv(GL_DEPTH_TEST, &depthTestEnabled);
|
||||
glGetBooleanv(GL_SCISSOR_TEST, &scissorTestEnabled);
|
||||
glGetBooleanv(GL_BLEND, &blendEnabled);
|
||||
|
||||
if (stencilTestEnabled)
|
||||
glDisable(GL_STENCIL_TEST);
|
||||
if (depthTestEnabled)
|
||||
glDisable(GL_DEPTH_TEST);
|
||||
if (scissorTestEnabled)
|
||||
glDisable(GL_SCISSOR_TEST);
|
||||
if (blendEnabled)
|
||||
glDisable(GL_BLEND);
|
||||
|
||||
m_fbo->bind();
|
||||
|
||||
glViewport(0, 0, m_nativeSize.width(), m_nativeSize.height());
|
||||
|
||||
m_program->bind();
|
||||
m_program->enableAttributeArray(0);
|
||||
m_program->enableAttributeArray(1);
|
||||
m_program->setUniformValue("frameTexture", GLuint(0));
|
||||
m_program->setUniformValue("texMatrix", m_surfaceTexture->getTransformMatrix());
|
||||
|
||||
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 0, g_vertex_data);
|
||||
glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 0, g_texture_data);
|
||||
|
||||
glDrawArrays(GL_TRIANGLE_FAN, 0, 4);
|
||||
|
||||
m_program->disableAttributeArray(0);
|
||||
m_program->disableAttributeArray(1);
|
||||
|
||||
glBindTexture(GL_TEXTURE_EXTERNAL_OES, 0);
|
||||
m_fbo->release();
|
||||
|
||||
// restore render states
|
||||
if (stencilTestEnabled)
|
||||
glEnable(GL_STENCIL_TEST);
|
||||
if (depthTestEnabled)
|
||||
glEnable(GL_DEPTH_TEST);
|
||||
if (scissorTestEnabled)
|
||||
glEnable(GL_SCISSOR_TEST);
|
||||
if (blendEnabled)
|
||||
glEnable(GL_BLEND);
|
||||
}
|
||||
|
||||
void QAndroidVideoRendererControl::createGLResources()
|
||||
{
|
||||
if (!m_fbo || m_fbo->size() != m_nativeSize) {
|
||||
delete m_fbo;
|
||||
m_fbo = new QOpenGLFramebufferObject(m_nativeSize);
|
||||
m_glDeleter->setFbo(m_fbo);
|
||||
}
|
||||
|
||||
if (!m_program) {
|
||||
m_program = new QOpenGLShaderProgram;
|
||||
|
||||
QOpenGLShader *vertexShader = new QOpenGLShader(QOpenGLShader::Vertex, m_program);
|
||||
vertexShader->compileSourceCode("attribute highp vec4 vertexCoordsArray; \n" \
|
||||
"attribute highp vec2 textureCoordArray; \n" \
|
||||
"uniform highp mat4 texMatrix; \n" \
|
||||
"varying highp vec2 textureCoords; \n" \
|
||||
"void main(void) \n" \
|
||||
"{ \n" \
|
||||
" gl_Position = vertexCoordsArray; \n" \
|
||||
" textureCoords = (texMatrix * vec4(textureCoordArray, 0.0, 1.0)).xy; \n" \
|
||||
"}\n");
|
||||
m_program->addShader(vertexShader);
|
||||
|
||||
QOpenGLShader *fragmentShader = new QOpenGLShader(QOpenGLShader::Fragment, m_program);
|
||||
fragmentShader->compileSourceCode("#extension GL_OES_EGL_image_external : require \n" \
|
||||
"varying highp vec2 textureCoords; \n" \
|
||||
"uniform samplerExternalOES frameTexture; \n" \
|
||||
"void main() \n" \
|
||||
"{ \n" \
|
||||
" gl_FragColor = texture2D(frameTexture, textureCoords); \n" \
|
||||
"}\n");
|
||||
m_program->addShader(fragmentShader);
|
||||
|
||||
m_program->bindAttributeLocation("vertexCoordsArray", 0);
|
||||
m_program->bindAttributeLocation("textureCoordArray", 1);
|
||||
m_program->link();
|
||||
|
||||
m_glDeleter->setShaderProgram(m_program);
|
||||
}
|
||||
}
|
||||
|
||||
void QAndroidVideoRendererControl::customEvent(QEvent *e)
|
||||
{
|
||||
if (e->type() == QEvent::User) {
|
||||
// This is running in the render thread (OpenGL enabled)
|
||||
if (!m_externalTex) {
|
||||
glGenTextures(1, &m_externalTex);
|
||||
m_textureDeleter = new TextureDeleter(m_externalTex); // will be deleted in the correct thread
|
||||
m_glDeleter = new OpenGLResourcesDeleter; // will cleanup GL resources in the correct thread
|
||||
m_glDeleter->setTexture(m_externalTex);
|
||||
emit readyChanged(true);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -43,22 +43,37 @@
|
||||
#define QANDROIDVIDEORENDERCONTROL_H
|
||||
|
||||
#include <qvideorenderercontrol.h>
|
||||
#include <qmutex.h>
|
||||
#include "qandroidvideooutput.h"
|
||||
#include "jsurfacetexture.h"
|
||||
|
||||
QT_BEGIN_NAMESPACE
|
||||
|
||||
class JSurfaceTextureHolder;
|
||||
class QOpenGLTexture;
|
||||
class QOpenGLFramebufferObject;
|
||||
class QOpenGLShaderProgram;
|
||||
|
||||
class TextureDeleter : public QObject
|
||||
class OpenGLResourcesDeleter : public QObject
|
||||
{
|
||||
Q_OBJECT
|
||||
public:
|
||||
TextureDeleter(uint id) : m_id(id) { }
|
||||
~TextureDeleter();
|
||||
OpenGLResourcesDeleter()
|
||||
: m_textureID(0)
|
||||
, m_fbo(0)
|
||||
, m_program(0)
|
||||
{ }
|
||||
|
||||
~OpenGLResourcesDeleter();
|
||||
|
||||
void setTexture(quint32 id) { m_textureID = id; }
|
||||
void setFbo(QOpenGLFramebufferObject *fbo) { m_fbo = fbo; }
|
||||
void setShaderProgram(QOpenGLShaderProgram *prog) { m_program = prog; }
|
||||
|
||||
private:
|
||||
uint m_id;
|
||||
quint32 m_textureID;
|
||||
QOpenGLFramebufferObject *m_fbo;
|
||||
QOpenGLShaderProgram *m_program;
|
||||
};
|
||||
|
||||
class QAndroidVideoRendererControl : public QVideoRendererControl, public QAndroidVideoOutput
|
||||
@@ -88,6 +103,10 @@ private Q_SLOTS:
|
||||
|
||||
private:
|
||||
bool initSurfaceTexture();
|
||||
void renderFrameToFbo();
|
||||
void createGLResources();
|
||||
|
||||
QMutex m_mutex;
|
||||
|
||||
QAbstractVideoSurface *m_surface;
|
||||
QSize m_nativeSize;
|
||||
@@ -95,8 +114,13 @@ private:
|
||||
QJNIObjectPrivate *m_androidSurface;
|
||||
JSurfaceTexture *m_surfaceTexture;
|
||||
JSurfaceTextureHolder *m_surfaceHolder;
|
||||
uint m_externalTex;
|
||||
TextureDeleter *m_textureDeleter;
|
||||
|
||||
quint32 m_externalTex;
|
||||
QOpenGLFramebufferObject *m_fbo;
|
||||
QOpenGLShaderProgram *m_program;
|
||||
OpenGLResourcesDeleter *m_glDeleter;
|
||||
|
||||
friend class AndroidTextureVideoBuffer;
|
||||
};
|
||||
|
||||
QT_END_NAMESPACE
|
||||
|
||||
@@ -46,15 +46,36 @@
|
||||
|
||||
QT_BEGIN_NAMESPACE
|
||||
|
||||
static QRect adjustedArea(const QRectF &area)
|
||||
static QPointF rotateNormalizedPoint(const QPointF &point, int rotation)
|
||||
{
|
||||
const qreal one(1.0f);
|
||||
|
||||
switch (rotation) {
|
||||
case 0:
|
||||
default:
|
||||
return point;
|
||||
case 90:
|
||||
return QPointF(point.y(), one - point.x());
|
||||
case 180:
|
||||
return QPointF(one - point.x(), one - point.y());
|
||||
case 270:
|
||||
return QPointF(one - point.y(), point.x());
|
||||
}
|
||||
}
|
||||
|
||||
static QRect adjustedArea(const QRectF &area, int rotation)
|
||||
{
|
||||
// Qt maps focus points in the range (0.0, 0.0) -> (1.0, 1.0)
|
||||
// Android maps focus points in the range (-1000, -1000) -> (1000, 1000)
|
||||
// Converts an area in Qt coordinates to Android coordinates
|
||||
return QRect(-1000 + qRound(area.x() * 2000),
|
||||
-1000 + qRound(area.y() * 2000),
|
||||
qRound(area.width() * 2000),
|
||||
qRound(area.height() * 2000))
|
||||
// Applies 'rotation' in the counter-clockwise direction
|
||||
QRectF rotated(rotateNormalizedPoint(area.topLeft(), rotation),
|
||||
rotateNormalizedPoint(area.bottomRight(), rotation));
|
||||
|
||||
return QRect(-1000 + qRound(rotated.x() * 2000),
|
||||
-1000 + qRound(rotated.y() * 2000),
|
||||
qRound(rotated.width() * 2000),
|
||||
qRound(rotated.height() * 2000))
|
||||
.intersected(QRect(-1000, -1000, 2000, 2000));
|
||||
}
|
||||
|
||||
@@ -242,6 +263,9 @@ void QAndroidCameraFocusControl::updateFocusZones(QCameraFocusZone::FocusZoneSta
|
||||
if (!viewportSize.isValid())
|
||||
return;
|
||||
|
||||
if (m_session->camera()->getDisplayOrientation() % 180)
|
||||
viewportSize.transpose();
|
||||
|
||||
QSizeF focusSize(50.f / viewportSize.width(), 50.f / viewportSize.height());
|
||||
float x = qBound(qreal(0),
|
||||
m_actualFocusPoint.x() - (focusSize.width() / 2),
|
||||
@@ -264,8 +288,13 @@ void QAndroidCameraFocusControl::setCameraFocusArea()
|
||||
// in FocusPointAuto mode, leave the area list empty
|
||||
// to let the driver choose the focus point.
|
||||
|
||||
for (int i = 0; i < m_focusZones.size(); ++i)
|
||||
areas.append(adjustedArea(m_focusZones.at(i).area()));
|
||||
for (int i = 0; i < m_focusZones.size(); ++i) {
|
||||
// The area passed to Android should be in sensor orientation.
|
||||
// What we have in m_focusZones is in viewport orientation, so revert the rotation set
|
||||
// on the viewport to get sensor coordinates.
|
||||
areas.append(adjustedArea(m_focusZones.at(i).area(),
|
||||
m_session->camera()->getDisplayOrientation()));
|
||||
}
|
||||
|
||||
}
|
||||
m_session->camera()->setFocusAreas(areas);
|
||||
|
||||
@@ -151,6 +151,7 @@ class JCameraWorker : public QObject, public QJNIObjectPrivate
|
||||
friend class JCamera;
|
||||
|
||||
JCameraWorker(JCamera *camera, int cameraId, jobject cam, QThread *workerThread);
|
||||
~JCameraWorker();
|
||||
|
||||
Q_INVOKABLE void release();
|
||||
|
||||
@@ -230,6 +231,7 @@ class JCameraWorker : public QObject, public QJNIObjectPrivate
|
||||
|
||||
QSize m_previewSize;
|
||||
int m_rotation;
|
||||
int m_displayOrientation;
|
||||
|
||||
bool m_hasAPI14;
|
||||
|
||||
@@ -275,9 +277,7 @@ JCamera::~JCamera()
|
||||
g_objectMap.remove(d->m_cameraId);
|
||||
g_objectMapMutex.unlock();
|
||||
}
|
||||
QThread *workerThread = d->m_workerThread;
|
||||
d->deleteLater();
|
||||
workerThread->quit();
|
||||
}
|
||||
|
||||
JCamera *JCamera::open(int cameraId)
|
||||
@@ -337,8 +337,14 @@ int JCamera::getNativeOrientation()
|
||||
return d->getNativeOrientation();
|
||||
}
|
||||
|
||||
int JCamera::getDisplayOrientation() const
|
||||
{
|
||||
return d->m_displayOrientation;
|
||||
}
|
||||
|
||||
void JCamera::setDisplayOrientation(int degrees)
|
||||
{
|
||||
d->m_displayOrientation = degrees;
|
||||
QMetaObject::invokeMethod(d, "setDisplayOrientation", Q_ARG(int, degrees));
|
||||
}
|
||||
|
||||
@@ -372,7 +378,7 @@ void JCamera::setPreviewSize(const QSize &size)
|
||||
d->m_parametersMutex.lock();
|
||||
bool areParametersValid = d->m_parameters.isValid();
|
||||
d->m_parametersMutex.unlock();
|
||||
if (!areParametersValid || !size.isValid())
|
||||
if (!areParametersValid)
|
||||
return;
|
||||
|
||||
d->m_previewSize = size;
|
||||
@@ -620,6 +626,7 @@ JCameraWorker::JCameraWorker(JCamera *camera, int cameraId, jobject cam, QThread
|
||||
, QJNIObjectPrivate(cam)
|
||||
, m_cameraId(cameraId)
|
||||
, m_rotation(0)
|
||||
, m_displayOrientation(0)
|
||||
, m_hasAPI14(false)
|
||||
, m_parametersMutex(QMutex::Recursive)
|
||||
{
|
||||
@@ -661,6 +668,11 @@ JCameraWorker::JCameraWorker(JCamera *camera, int cameraId, jobject cam, QThread
|
||||
}
|
||||
}
|
||||
|
||||
JCameraWorker::~JCameraWorker()
|
||||
{
|
||||
m_workerThread->quit();
|
||||
}
|
||||
|
||||
void JCameraWorker::release()
|
||||
{
|
||||
m_previewSize = QSize();
|
||||
|
||||
@@ -88,6 +88,7 @@ public:
|
||||
CameraFacing getFacing();
|
||||
int getNativeOrientation();
|
||||
|
||||
int getDisplayOrientation() const;
|
||||
void setDisplayOrientation(int degrees);
|
||||
|
||||
QSize getPreferredPreviewSizeForVideo();
|
||||
|
||||
@@ -61,41 +61,42 @@ public:
|
||||
}
|
||||
|
||||
protected:
|
||||
|
||||
const char *vertexShader() const {
|
||||
return
|
||||
const char *shader =
|
||||
"uniform highp mat4 qt_Matrix; \n"
|
||||
"uniform highp mat4 texMatrix; \n"
|
||||
"attribute highp vec4 qt_VertexPosition; \n"
|
||||
"attribute highp vec2 qt_VertexTexCoord; \n"
|
||||
"varying highp vec2 qt_TexCoord; \n"
|
||||
"void main() { \n"
|
||||
" qt_TexCoord = (texMatrix * vec4(qt_VertexTexCoord, 0.0, 1.0)).xy; \n"
|
||||
" qt_TexCoord = qt_VertexTexCoord; \n"
|
||||
" gl_Position = qt_Matrix * qt_VertexPosition; \n"
|
||||
"}";
|
||||
return shader;
|
||||
}
|
||||
|
||||
const char *fragmentShader() const {
|
||||
return
|
||||
"#extension GL_OES_EGL_image_external : require \n"
|
||||
"uniform samplerExternalOES videoTexture; \n"
|
||||
"uniform lowp float opacity; \n"
|
||||
"varying highp vec2 qt_TexCoord; \n"
|
||||
"void main() \n"
|
||||
"{ \n"
|
||||
" gl_FragColor = texture2D(videoTexture, qt_TexCoord) * opacity; \n"
|
||||
static const char *shader =
|
||||
"uniform sampler2D rgbTexture;"
|
||||
"uniform lowp float opacity;"
|
||||
""
|
||||
"varying highp vec2 qt_TexCoord;"
|
||||
""
|
||||
"void main()"
|
||||
"{"
|
||||
" gl_FragColor = texture2D(rgbTexture, qt_TexCoord) * opacity;"
|
||||
"}";
|
||||
return shader;
|
||||
}
|
||||
|
||||
void initialize() {
|
||||
m_id_matrix = program()->uniformLocation("qt_Matrix");
|
||||
m_id_texMatrix = program()->uniformLocation("texMatrix");
|
||||
m_id_texture = program()->uniformLocation("videoTexture");
|
||||
m_id_Texture = program()->uniformLocation("rgbTexture");
|
||||
m_id_opacity = program()->uniformLocation("opacity");
|
||||
}
|
||||
|
||||
int m_id_matrix;
|
||||
int m_id_texMatrix;
|
||||
int m_id_texture;
|
||||
int m_id_Texture;
|
||||
int m_id_opacity;
|
||||
};
|
||||
|
||||
@@ -104,15 +105,12 @@ class QAndroidSGVideoNodeMaterial : public QSGMaterial
|
||||
public:
|
||||
QAndroidSGVideoNodeMaterial()
|
||||
: m_textureId(0)
|
||||
, m_textureUpdated(false)
|
||||
, m_opacity(1.0)
|
||||
{
|
||||
setFlag(Blending, false);
|
||||
}
|
||||
|
||||
~QAndroidSGVideoNodeMaterial()
|
||||
{
|
||||
m_frame = QVideoFrame();
|
||||
}
|
||||
|
||||
QSGMaterialType *type() const {
|
||||
static QSGMaterialType theType;
|
||||
return &theType;
|
||||
@@ -124,81 +122,93 @@ public:
|
||||
|
||||
int compare(const QSGMaterial *other) const {
|
||||
const QAndroidSGVideoNodeMaterial *m = static_cast<const QAndroidSGVideoNodeMaterial *>(other);
|
||||
return m_textureId - m->m_textureId;
|
||||
int diff = m_textureId - m->m_textureId;
|
||||
if (diff)
|
||||
return diff;
|
||||
|
||||
return (m_opacity > m->m_opacity) ? 1 : -1;
|
||||
}
|
||||
|
||||
void setVideoFrame(const QVideoFrame &frame) {
|
||||
QMutexLocker lock(&m_frameMutex);
|
||||
m_frame = frame;
|
||||
void updateBlending() {
|
||||
setFlag(Blending, qFuzzyCompare(m_opacity, qreal(1.0)) ? false : true);
|
||||
}
|
||||
|
||||
bool updateTexture()
|
||||
void updateTexture(GLuint id, const QSize &size) {
|
||||
if (m_textureId != id || m_textureSize != size) {
|
||||
m_textureId = id;
|
||||
m_textureSize = size;
|
||||
m_textureUpdated = true;
|
||||
}
|
||||
}
|
||||
|
||||
void bind()
|
||||
{
|
||||
glBindTexture(GL_TEXTURE_2D, m_textureId);
|
||||
if (m_textureUpdated) {
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
|
||||
m_textureUpdated = false;
|
||||
}
|
||||
}
|
||||
|
||||
QSize m_textureSize;
|
||||
GLuint m_textureId;
|
||||
bool m_textureUpdated;
|
||||
qreal m_opacity;
|
||||
};
|
||||
|
||||
|
||||
QAndroidSGVideoNode::QAndroidSGVideoNode(const QVideoSurfaceFormat &format)
|
||||
: m_format(format)
|
||||
{
|
||||
setFlags(OwnsMaterial | UsePreprocess);
|
||||
m_material = new QAndroidSGVideoNodeMaterial;
|
||||
setMaterial(m_material);
|
||||
}
|
||||
|
||||
QAndroidSGVideoNode::~QAndroidSGVideoNode()
|
||||
{
|
||||
m_frame = QVideoFrame();
|
||||
}
|
||||
|
||||
void QAndroidSGVideoNode::setCurrentFrame(const QVideoFrame &frame)
|
||||
{
|
||||
QMutexLocker lock(&m_frameMutex);
|
||||
bool texMatrixDirty = false;
|
||||
|
||||
if (m_frame.isValid()) {
|
||||
QVariantList list = m_frame.handle().toList();
|
||||
|
||||
GLuint texId = list.at(0).toUInt();
|
||||
QMatrix4x4 mat = qvariant_cast<QMatrix4x4>(list.at(1));
|
||||
|
||||
texMatrixDirty = texId != m_textureId || mat != m_texMatrix;
|
||||
|
||||
m_textureId = texId;
|
||||
m_texMatrix = mat;
|
||||
|
||||
// the texture is already bound and initialized at this point,
|
||||
// no need to call glTexParams
|
||||
|
||||
} else {
|
||||
m_textureId = 0;
|
||||
m_frame = frame;
|
||||
markDirty(DirtyMaterial);
|
||||
}
|
||||
|
||||
return texMatrixDirty;
|
||||
}
|
||||
|
||||
QVideoFrame m_frame;
|
||||
QMutex m_frameMutex;
|
||||
GLuint m_textureId;
|
||||
QMatrix4x4 m_texMatrix;
|
||||
};
|
||||
|
||||
void QAndroidSGVideoNodeMaterialShader::updateState(const RenderState &state,
|
||||
QSGMaterial *newMaterial,
|
||||
QSGMaterial *oldMaterial)
|
||||
{
|
||||
Q_UNUSED(oldMaterial);
|
||||
QAndroidSGVideoNodeMaterial *mat = static_cast<QAndroidSGVideoNodeMaterial *>(newMaterial);
|
||||
program()->setUniformValue(m_id_texture, 0);
|
||||
program()->setUniformValue(m_id_Texture, 0);
|
||||
|
||||
if (mat->updateTexture())
|
||||
program()->setUniformValue(m_id_texMatrix, mat->m_texMatrix);
|
||||
mat->bind();
|
||||
|
||||
if (state.isOpacityDirty())
|
||||
program()->setUniformValue(m_id_opacity, state.opacity());
|
||||
if (state.isOpacityDirty()) {
|
||||
mat->m_opacity = state.opacity();
|
||||
mat->updateBlending();
|
||||
program()->setUniformValue(m_id_opacity, GLfloat(mat->m_opacity));
|
||||
}
|
||||
|
||||
if (state.isMatrixDirty())
|
||||
program()->setUniformValue(m_id_matrix, state.combinedMatrix());
|
||||
}
|
||||
|
||||
QAndroidSGVideoNode::QAndroidSGVideoNode(const QVideoSurfaceFormat &format)
|
||||
: m_format(format)
|
||||
void QAndroidSGVideoNode::preprocess()
|
||||
{
|
||||
setFlag(QSGNode::OwnsMaterial);
|
||||
m_material = new QAndroidSGVideoNodeMaterial;
|
||||
setMaterial(m_material);
|
||||
}
|
||||
QMutexLocker lock(&m_frameMutex);
|
||||
|
||||
void QAndroidSGVideoNode::setCurrentFrame(const QVideoFrame &frame)
|
||||
{
|
||||
m_material->setVideoFrame(frame);
|
||||
markDirty(DirtyMaterial);
|
||||
}
|
||||
GLuint texId = 0;
|
||||
if (m_frame.isValid())
|
||||
texId = m_frame.handle().toUInt();
|
||||
|
||||
QVideoFrame::PixelFormat QAndroidSGVideoNode::pixelFormat() const
|
||||
{
|
||||
return m_format.pixelFormat();
|
||||
m_material->updateTexture(texId, m_frame.size());
|
||||
}
|
||||
|
||||
QT_END_NAMESPACE
|
||||
|
||||
@@ -43,6 +43,7 @@
|
||||
#define QANDROIDSGVIDEONODE_H
|
||||
|
||||
#include <private/qsgvideonode_p.h>
|
||||
#include <qmutex.h>
|
||||
|
||||
QT_BEGIN_NAMESPACE
|
||||
|
||||
@@ -52,14 +53,18 @@ class QAndroidSGVideoNode : public QSGVideoNode
|
||||
{
|
||||
public:
|
||||
QAndroidSGVideoNode(const QVideoSurfaceFormat &format);
|
||||
~QAndroidSGVideoNode();
|
||||
|
||||
void setCurrentFrame(const QVideoFrame &frame);
|
||||
QVideoFrame::PixelFormat pixelFormat() const;
|
||||
QVideoFrame::PixelFormat pixelFormat() const { return m_format.pixelFormat(); }
|
||||
|
||||
void preprocess();
|
||||
|
||||
private:
|
||||
QVideoSurfaceFormat m_format;
|
||||
QAndroidSGVideoNodeMaterial *m_material;
|
||||
QMutex m_frameMutex;
|
||||
QVideoFrame m_frame;
|
||||
QVideoSurfaceFormat m_format;
|
||||
};
|
||||
|
||||
QT_END_NAMESPACE
|
||||
|
||||
@@ -44,14 +44,12 @@
|
||||
|
||||
QT_BEGIN_NAMESPACE
|
||||
|
||||
#define ExternalGLTextureHandle (QAbstractVideoBuffer::UserHandle + 1)
|
||||
|
||||
QList<QVideoFrame::PixelFormat> QAndroidSGVideoNodeFactoryPlugin::supportedPixelFormats(
|
||||
QAbstractVideoBuffer::HandleType handleType) const
|
||||
{
|
||||
QList<QVideoFrame::PixelFormat> pixelFormats;
|
||||
|
||||
if (handleType == ExternalGLTextureHandle)
|
||||
if (handleType == QAbstractVideoBuffer::GLTextureHandle)
|
||||
pixelFormats.append(QVideoFrame::Format_BGR32);
|
||||
|
||||
return pixelFormats;
|
||||
|
||||
@@ -29,7 +29,8 @@ HEADERS += \
|
||||
$$PWD/camerabinvideoencoder.h \
|
||||
$$PWD/camerabinresourcepolicy.h \
|
||||
$$PWD/camerabincapturedestination.h \
|
||||
$$PWD/camerabincapturebufferformat.h
|
||||
$$PWD/camerabincapturebufferformat.h \
|
||||
$$PWD/camerabinviewfindersettings.h
|
||||
|
||||
SOURCES += \
|
||||
$$PWD/camerabinserviceplugin.cpp \
|
||||
@@ -46,6 +47,7 @@ SOURCES += \
|
||||
$$PWD/camerabinvideoencoder.cpp \
|
||||
$$PWD/camerabinresourcepolicy.cpp \
|
||||
$$PWD/camerabincapturedestination.cpp \
|
||||
$$PWD/camerabinviewfindersettings.cpp \
|
||||
$$PWD/camerabincapturebufferformat.cpp
|
||||
|
||||
maemo6 {
|
||||
|
||||
@@ -106,16 +106,17 @@ void CameraBinAudioEncoder::resetActualSettings()
|
||||
GstEncodingProfile *CameraBinAudioEncoder::createProfile()
|
||||
{
|
||||
QString codec = m_actualAudioSettings.codec();
|
||||
QString preset = m_actualAudioSettings.encodingOption(QStringLiteral("preset")).toString();
|
||||
GstCaps *caps;
|
||||
|
||||
if (codec.isEmpty())
|
||||
caps = gst_caps_new_any();
|
||||
return 0;
|
||||
else
|
||||
caps = gst_caps_from_string(codec.toLatin1());
|
||||
|
||||
return (GstEncodingProfile *)gst_encoding_audio_profile_new(
|
||||
caps,
|
||||
NULL, //preset
|
||||
!preset.isEmpty() ? preset.toLatin1().constData() : NULL, //preset
|
||||
NULL, //restriction
|
||||
0); //presence
|
||||
}
|
||||
|
||||
@@ -115,10 +115,11 @@ void CameraBinControl::setCaptureMode(QCamera::CaptureModes mode)
|
||||
captureMode() == QCamera::CaptureStillImage ?
|
||||
CamerabinResourcePolicy::ImageCaptureResources :
|
||||
CamerabinResourcePolicy::VideoCaptureResources);
|
||||
|
||||
#if (GST_VERSION_MAJOR == 0) && ((GST_VERSION_MINOR < 10) || (GST_VERSION_MICRO < 23))
|
||||
//due to bug in v4l2src, it's necessary to reload camera on video caps changes
|
||||
//https://bugzilla.gnome.org/show_bug.cgi?id=649832
|
||||
reloadLater();
|
||||
#endif
|
||||
}
|
||||
emit captureModeChanged(mode);
|
||||
}
|
||||
|
||||
@@ -54,12 +54,43 @@ struct QGstreamerMetaDataKeyLookup
|
||||
const char *token;
|
||||
};
|
||||
|
||||
static QVariant fromGStreamerOrientation(const QVariant &value)
|
||||
{
|
||||
// Note gstreamer tokens either describe the counter clockwise rotation of the
|
||||
// image or the clockwise transform to apply to correct the image. The orientation
|
||||
// value returned is the clockwise rotation of the image.
|
||||
const QString token = value.toString();
|
||||
if (token == QStringLiteral("rotate-90"))
|
||||
return 270;
|
||||
else if (token == QStringLiteral("rotate-180"))
|
||||
return 180;
|
||||
else if (token == QStringLiteral("rotate-270"))
|
||||
return 90;
|
||||
else
|
||||
return 0;
|
||||
}
|
||||
|
||||
static QVariant toGStreamerOrientation(const QVariant &value)
|
||||
{
|
||||
switch (value.toInt()) {
|
||||
case 90:
|
||||
return QStringLiteral("rotate-270");
|
||||
case 180:
|
||||
return QStringLiteral("rotate-180");
|
||||
case 270:
|
||||
return QStringLiteral("rotate-90");
|
||||
default:
|
||||
return QStringLiteral("rotate-0");
|
||||
}
|
||||
}
|
||||
|
||||
static const QGstreamerMetaDataKeyLookup qt_gstreamerMetaDataKeys[] =
|
||||
{
|
||||
{ QMediaMetaData::Title, GST_TAG_TITLE },
|
||||
//{ QMediaMetaData::SubTitle, 0 },
|
||||
//{ QMediaMetaData::Author, 0 },
|
||||
{ QMediaMetaData::Comment, GST_TAG_COMMENT },
|
||||
{ QMediaMetaData::Date, GST_TAG_DATE_TIME },
|
||||
{ QMediaMetaData::Description, GST_TAG_DESCRIPTION },
|
||||
//{ QMediaMetaData::Category, 0 },
|
||||
{ QMediaMetaData::Genre, GST_TAG_GENRE },
|
||||
@@ -120,7 +151,9 @@ static const QGstreamerMetaDataKeyLookup qt_gstreamerMetaDataKeys[] =
|
||||
//{ QMediaMetaData::CameraManufacturer, 0 },
|
||||
//{ QMediaMetaData::CameraModel, 0 },
|
||||
//{ QMediaMetaData::Event, 0 },
|
||||
//{ QMediaMetaData::Subject, 0 }
|
||||
//{ QMediaMetaData::Subject, 0 },
|
||||
|
||||
{ QMediaMetaData::Orientation, GST_TAG_IMAGE_ORIENTATION }
|
||||
};
|
||||
|
||||
CameraBinMetaData::CameraBinMetaData(QObject *parent)
|
||||
@@ -130,6 +163,10 @@ CameraBinMetaData::CameraBinMetaData(QObject *parent)
|
||||
|
||||
QVariant CameraBinMetaData::metaData(const QString &key) const
|
||||
{
|
||||
if (key == QMediaMetaData::Orientation) {
|
||||
return fromGStreamerOrientation(m_values.value(QByteArray(GST_TAG_IMAGE_ORIENTATION)));
|
||||
}
|
||||
|
||||
static const int count = sizeof(qt_gstreamerMetaDataKeys) / sizeof(QGstreamerMetaDataKeyLookup);
|
||||
|
||||
for (int i = 0; i < count; ++i) {
|
||||
@@ -144,6 +181,15 @@ QVariant CameraBinMetaData::metaData(const QString &key) const
|
||||
|
||||
void CameraBinMetaData::setMetaData(const QString &key, const QVariant &value)
|
||||
{
|
||||
if (key == QMediaMetaData::Orientation) {
|
||||
m_values.insert(QByteArray(GST_TAG_IMAGE_ORIENTATION), toGStreamerOrientation(value));
|
||||
|
||||
emit QMetaDataWriterControl::metaDataChanged();
|
||||
emit metaDataChanged(m_values);
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
static const int count = sizeof(qt_gstreamerMetaDataKeys) / sizeof(QGstreamerMetaDataKeyLookup);
|
||||
|
||||
for (int i = 0; i < count; ++i) {
|
||||
|
||||
@@ -191,7 +191,9 @@ GstEncodingContainerProfile *CameraBinRecorder::videoProfile()
|
||||
GstEncodingProfile *audioProfile = m_session->audioEncodeControl()->createProfile();
|
||||
GstEncodingProfile *videoProfile = m_session->videoEncodeControl()->createProfile();
|
||||
|
||||
if (audioProfile)
|
||||
gst_encoding_container_profile_add_profile(containerProfile, audioProfile);
|
||||
if (videoProfile)
|
||||
gst_encoding_container_profile_add_profile(containerProfile, videoProfile);
|
||||
}
|
||||
|
||||
|
||||
@@ -61,6 +61,7 @@
|
||||
#include "camerabinimageprocessing.h"
|
||||
#include "camerabincapturebufferformat.h"
|
||||
#include "camerabincapturedestination.h"
|
||||
#include "camerabinviewfindersettings.h"
|
||||
#include <private/qgstreamerbushelper_p.h>
|
||||
|
||||
#include <private/qgstreameraudioinputselector_p.h>
|
||||
@@ -240,6 +241,9 @@ QMediaControl *CameraBinService::requestControl(const char *name)
|
||||
if (qstrcmp(name, QCameraCaptureBufferFormatControl_iid) == 0)
|
||||
return m_captureSession->captureBufferFormatControl();
|
||||
|
||||
if (qstrcmp(name, QCameraViewfinderSettingsControl_iid) == 0)
|
||||
return m_captureSession->viewfinderSettingsControl();
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
@@ -55,6 +55,7 @@
|
||||
#endif
|
||||
|
||||
#include "camerabinimageprocessing.h"
|
||||
#include "camerabinviewfindersettings.h"
|
||||
|
||||
#include "camerabincapturedestination.h"
|
||||
#include "camerabincapturebufferformat.h"
|
||||
@@ -75,6 +76,7 @@
|
||||
#include <QtGui/qdesktopservices.h>
|
||||
|
||||
#include <QtGui/qimage.h>
|
||||
#include <QtCore/qdatetime.h>
|
||||
|
||||
//#define CAMERABIN_DEBUG 1
|
||||
//#define CAMERABIN_DEBUG_DUMP_BIN 1
|
||||
@@ -91,6 +93,8 @@
|
||||
#define AUDIO_SOURCE_PROPERTY "audio-source"
|
||||
#define SUPPORTED_IMAGE_CAPTURE_CAPS_PROPERTY "image-capture-supported-caps"
|
||||
#define SUPPORTED_VIDEO_CAPTURE_CAPS_PROPERTY "video-capture-supported-caps"
|
||||
#define SUPPORTED_VIEWFINDER_CAPS_PROPERTY "viewfinder-supported-caps"
|
||||
#define AUDIO_CAPTURE_CAPS_PROPERTY "audio-capture-caps"
|
||||
#define IMAGE_CAPTURE_CAPS_PROPERTY "image-capture-caps"
|
||||
#define VIDEO_CAPTURE_CAPS_PROPERTY "video-capture-caps"
|
||||
#define VIEWFINDER_CAPS_PROPERTY "viewfinder-caps"
|
||||
@@ -110,10 +114,6 @@
|
||||
#define PREVIEW_CAPS_4_3 \
|
||||
"video/x-raw-rgb, width = (int) 640, height = (int) 480"
|
||||
|
||||
#define VIEWFINDER_RESOLUTION_4x3 QSize(640, 480)
|
||||
#define VIEWFINDER_RESOLUTION_3x2 QSize(720, 480)
|
||||
#define VIEWFINDER_RESOLUTION_16x9 QSize(800, 450)
|
||||
|
||||
//using GST_STATE_READY for QCamera::LoadedState
|
||||
//may not work reliably at least with some webcams.
|
||||
|
||||
@@ -170,6 +170,7 @@ CameraBinSession::CameraBinSession(QObject *parent)
|
||||
m_imageProcessingControl = new CameraBinImageProcessing(this);
|
||||
m_captureDestinationControl = new CameraBinCaptureDestination(this);
|
||||
m_captureBufferFormatControl = new CameraBinCaptureBufferFormat(this);
|
||||
m_viewfinderSettingsControl = new CameraBinViewfinderSettings(this);
|
||||
|
||||
QByteArray envFlags = qgetenv("QT_GSTREAMER_CAMERABIN_FLAGS");
|
||||
if (!envFlags.isEmpty())
|
||||
@@ -246,8 +247,7 @@ bool CameraBinSession::setupCameraBin()
|
||||
return true;
|
||||
}
|
||||
|
||||
static GstCaps *resolutionToCaps(const QSize &resolution,
|
||||
const QPair<int, int> &rate = qMakePair<int,int>(0,0))
|
||||
static GstCaps *resolutionToCaps(const QSize &resolution, const QPair<int, int> &rate = qMakePair<int,int>(0,0))
|
||||
{
|
||||
if (resolution.isEmpty())
|
||||
return gst_caps_new_any();
|
||||
@@ -263,7 +263,23 @@ static GstCaps *resolutionToCaps(const QSize &resolution,
|
||||
"width", G_TYPE_INT, resolution.width(),
|
||||
"height", G_TYPE_INT, resolution.height(),
|
||||
"framerate", GST_TYPE_FRACTION, rate.first, rate.second,
|
||||
NULL), NULL);
|
||||
NULL),
|
||||
gst_structure_new("video/x-raw-data",
|
||||
"width", G_TYPE_INT, resolution.width(),
|
||||
"height", G_TYPE_INT, resolution.height(),
|
||||
"framerate", GST_TYPE_FRACTION, rate.first, rate.second,
|
||||
NULL),
|
||||
gst_structure_new("video/x-android-buffer",
|
||||
"width", G_TYPE_INT, resolution.width(),
|
||||
"height", G_TYPE_INT, resolution.height(),
|
||||
"framerate", GST_TYPE_FRACTION, rate.first, rate.second,
|
||||
NULL),
|
||||
gst_structure_new("image/jpeg",
|
||||
"width", G_TYPE_INT, resolution.width(),
|
||||
"height", G_TYPE_INT, resolution.height(),
|
||||
"framerate", GST_TYPE_FRACTION, rate.first, rate.second,
|
||||
NULL),
|
||||
NULL);
|
||||
} else {
|
||||
caps = gst_caps_new_full (gst_structure_new ("video/x-raw-yuv",
|
||||
"width", G_TYPE_INT, resolution.width(),
|
||||
@@ -271,28 +287,29 @@ static GstCaps *resolutionToCaps(const QSize &resolution,
|
||||
NULL),
|
||||
gst_structure_new ("video/x-raw-rgb",
|
||||
"width", G_TYPE_INT, resolution.width(),
|
||||
"height", G_TYPE_INT, resolution.height(), NULL), NULL);
|
||||
"height", G_TYPE_INT, resolution.height(),
|
||||
NULL),
|
||||
gst_structure_new("video/x-raw-data",
|
||||
"width", G_TYPE_INT, resolution.width(),
|
||||
"height", G_TYPE_INT, resolution.height(),
|
||||
NULL),
|
||||
gst_structure_new ("video/x-android-buffer",
|
||||
"width", G_TYPE_INT, resolution.width(),
|
||||
"height", G_TYPE_INT, resolution.height(),
|
||||
NULL),
|
||||
gst_structure_new ("image/jpeg",
|
||||
"width", G_TYPE_INT, resolution.width(),
|
||||
"height", G_TYPE_INT, resolution.height(),
|
||||
NULL),
|
||||
NULL);
|
||||
}
|
||||
|
||||
return caps;
|
||||
}
|
||||
|
||||
void CameraBinSession::setupCaptureResolution()
|
||||
{
|
||||
if (m_captureMode == QCamera::CaptureStillImage) {
|
||||
QSize resolution = m_imageEncodeControl->imageSettings().resolution();
|
||||
|
||||
//by default select the maximum supported resolution
|
||||
if (resolution.isEmpty()) {
|
||||
bool continuous = false;
|
||||
QList<QSize> resolutions = supportedResolutions(qMakePair<int,int>(0,0),
|
||||
&continuous,
|
||||
QCamera::CaptureStillImage);
|
||||
if (!resolutions.isEmpty())
|
||||
resolution = resolutions.last();
|
||||
}
|
||||
|
||||
QSize viewfinderResolution = VIEWFINDER_RESOLUTION_4x3;
|
||||
|
||||
if (!resolution.isEmpty()) {
|
||||
GstCaps *caps = resolutionToCaps(resolution);
|
||||
#if CAMERABIN_DEBUG
|
||||
@@ -300,59 +317,62 @@ void CameraBinSession::setupCaptureResolution()
|
||||
#endif
|
||||
g_object_set(m_camerabin, IMAGE_CAPTURE_CAPS_PROPERTY, caps, NULL);
|
||||
gst_caps_unref(caps);
|
||||
|
||||
if (!resolution.isEmpty()) {
|
||||
qreal aspectRatio = qreal(resolution.width()) / resolution.height();
|
||||
if (aspectRatio < 1.4)
|
||||
viewfinderResolution = VIEWFINDER_RESOLUTION_4x3;
|
||||
else if (aspectRatio > 1.7)
|
||||
viewfinderResolution = VIEWFINDER_RESOLUTION_16x9;
|
||||
else
|
||||
viewfinderResolution = VIEWFINDER_RESOLUTION_3x2;
|
||||
}
|
||||
} else {
|
||||
g_object_set(m_camerabin, IMAGE_CAPTURE_CAPS_PROPERTY, GST_CAPS_ANY, NULL);
|
||||
g_object_set(m_camerabin, IMAGE_CAPTURE_CAPS_PROPERTY, NULL, NULL);
|
||||
}
|
||||
|
||||
//on low res cameras the viewfinder resolution should not be bigger
|
||||
//then capture resolution
|
||||
if (viewfinderResolution.width() > resolution.width() && !resolution.isEmpty())
|
||||
viewfinderResolution = resolution;
|
||||
|
||||
GstCaps *viewfinderCaps = resolutionToCaps(viewfinderResolution);
|
||||
#if CAMERABIN_DEBUG
|
||||
qDebug() << "Set viewfinder resolution" << viewfinderResolution <<gst_caps_to_string(viewfinderCaps);
|
||||
#endif
|
||||
g_object_set(m_camerabin, VIEWFINDER_CAPS_PROPERTY, viewfinderCaps, NULL);
|
||||
gst_caps_unref(viewfinderCaps);
|
||||
}
|
||||
|
||||
if (m_captureMode == QCamera::CaptureVideo) {
|
||||
QSize resolution = m_videoEncodeControl->actualVideoSettings().resolution();
|
||||
resolution = m_videoEncodeControl->actualVideoSettings().resolution();
|
||||
//qreal framerate = m_videoEncodeControl->videoSettings().frameRate();
|
||||
|
||||
if (resolution.isEmpty()) {
|
||||
//select the hightest supported resolution
|
||||
bool continuous = false;
|
||||
QList<QSize> resolutions = supportedResolutions(qMakePair<int,int>(0,0),
|
||||
&continuous,
|
||||
QCamera::CaptureVideo);
|
||||
if (!resolutions.isEmpty())
|
||||
resolution = resolutions.last();
|
||||
}
|
||||
|
||||
if (!resolution.isEmpty()) {
|
||||
GstCaps *caps = resolutionToCaps(resolution /*, framerate*/); //convert to rational
|
||||
#if CAMERABIN_DEBUG
|
||||
qDebug() << Q_FUNC_INFO << "set video resolution" << resolution << gst_caps_to_string(caps);
|
||||
#endif
|
||||
|
||||
//Use the same resolution for viewfinder and video capture
|
||||
g_object_set(m_camerabin, VIDEO_CAPTURE_CAPS_PROPERTY, caps, NULL);
|
||||
gst_caps_unref(caps);
|
||||
} else {
|
||||
g_object_set(m_camerabin, VIDEO_CAPTURE_CAPS_PROPERTY, NULL, NULL);
|
||||
}
|
||||
|
||||
resolution = m_viewfinderSettingsControl->resolution();
|
||||
if (!resolution.isEmpty()) {
|
||||
GstCaps *caps = resolutionToCaps(resolution);
|
||||
#if CAMERABIN_DEBUG
|
||||
qDebug() << Q_FUNC_INFO << "set viewfinder resolution" << resolution << gst_caps_to_string(caps);
|
||||
#endif
|
||||
g_object_set(m_camerabin, VIEWFINDER_CAPS_PROPERTY, caps, NULL);
|
||||
gst_caps_unref(caps);
|
||||
} else {
|
||||
g_object_set(m_camerabin, VIEWFINDER_CAPS_PROPERTY, NULL, NULL);
|
||||
}
|
||||
}
|
||||
|
||||
void CameraBinSession::setAudioCaptureCaps()
|
||||
{
|
||||
QAudioEncoderSettings settings = m_audioEncodeControl->audioSettings();
|
||||
const int sampleRate = settings.sampleRate();
|
||||
const int channelCount = settings.channelCount();
|
||||
|
||||
if (sampleRate == -1 && channelCount == -1)
|
||||
return;
|
||||
|
||||
GstStructure *structure = gst_structure_new(
|
||||
"audio/x-raw-int",
|
||||
"endianness", G_TYPE_INT, 1234,
|
||||
"signed", G_TYPE_BOOLEAN, TRUE,
|
||||
"width", G_TYPE_INT, 16,
|
||||
"depth", G_TYPE_INT, 16,
|
||||
NULL);
|
||||
if (sampleRate != -1)
|
||||
gst_structure_set(structure, "rate", G_TYPE_INT, sampleRate, NULL);
|
||||
if (channelCount != -1)
|
||||
gst_structure_set(structure, "channels", G_TYPE_INT, channelCount, NULL);
|
||||
|
||||
GstCaps *caps = gst_caps_new_full(structure, NULL);
|
||||
g_object_set(G_OBJECT(m_camerabin), AUDIO_CAPTURE_CAPS_PROPERTY, caps, NULL);
|
||||
gst_caps_unref(caps);
|
||||
}
|
||||
|
||||
GstElement *CameraBinSession::buildCameraSource()
|
||||
{
|
||||
#if CAMERABIN_DEBUG
|
||||
@@ -658,14 +678,14 @@ void CameraBinSession::setState(QCamera::State newState)
|
||||
GstState pending = GST_STATE_NULL;
|
||||
gst_element_get_state(m_camerabin, &binState, &pending, 0);
|
||||
|
||||
if (captureMode() == QCamera::CaptureVideo) {
|
||||
m_recorderControl->applySettings();
|
||||
|
||||
g_object_set (G_OBJECT(m_camerabin),
|
||||
"video-profile",
|
||||
m_recorderControl->videoProfile(),
|
||||
NULL);
|
||||
}
|
||||
|
||||
setAudioCaptureCaps();
|
||||
|
||||
setupCaptureResolution();
|
||||
|
||||
@@ -745,7 +765,7 @@ void CameraBinSession::setMetaData(const QMap<QByteArray, QVariant> &data)
|
||||
switch(tagValue.type()) {
|
||||
case QVariant::String:
|
||||
gst_tag_setter_add_tags(GST_TAG_SETTER(element),
|
||||
GST_TAG_MERGE_REPLACE_ALL,
|
||||
GST_TAG_MERGE_REPLACE,
|
||||
tagName.toUtf8().constData(),
|
||||
tagValue.toString().toUtf8().constData(),
|
||||
NULL);
|
||||
@@ -753,18 +773,29 @@ void CameraBinSession::setMetaData(const QMap<QByteArray, QVariant> &data)
|
||||
case QVariant::Int:
|
||||
case QVariant::LongLong:
|
||||
gst_tag_setter_add_tags(GST_TAG_SETTER(element),
|
||||
GST_TAG_MERGE_REPLACE_ALL,
|
||||
GST_TAG_MERGE_REPLACE,
|
||||
tagName.toUtf8().constData(),
|
||||
tagValue.toInt(),
|
||||
NULL);
|
||||
break;
|
||||
case QVariant::Double:
|
||||
gst_tag_setter_add_tags(GST_TAG_SETTER(element),
|
||||
GST_TAG_MERGE_REPLACE_ALL,
|
||||
GST_TAG_MERGE_REPLACE,
|
||||
tagName.toUtf8().constData(),
|
||||
tagValue.toDouble(),
|
||||
NULL);
|
||||
break;
|
||||
case QVariant::DateTime: {
|
||||
QDateTime date = tagValue.toDateTime().toLocalTime();
|
||||
gst_tag_setter_add_tags(GST_TAG_SETTER(element),
|
||||
GST_TAG_MERGE_REPLACE,
|
||||
tagName.toUtf8().constData(),
|
||||
gst_date_time_new_local_time(
|
||||
date.date().year(), date.date().month(), date.date().day(),
|
||||
date.time().hour(), date.time().minute(), date.time().second()),
|
||||
NULL);
|
||||
break;
|
||||
}
|
||||
default:
|
||||
break;
|
||||
}
|
||||
@@ -940,6 +971,7 @@ bool CameraBinSession::processBusMessage(const QGstreamerMessage &message)
|
||||
emit stateChanged(m_state = QCamera::UnloadedState);
|
||||
break;
|
||||
case GST_STATE_READY:
|
||||
setMetaData(m_metaData);
|
||||
if (m_state != QCamera::LoadedState)
|
||||
emit stateChanged(m_state = QCamera::LoadedState);
|
||||
break;
|
||||
|
||||
@@ -74,6 +74,7 @@ class CameraBinZoom;
|
||||
class CameraBinCaptureDestination;
|
||||
class CameraBinCaptureBufferFormat;
|
||||
class QGstreamerVideoRendererInterface;
|
||||
class CameraBinViewfinderSettings;
|
||||
|
||||
class QGstreamerElementFactory
|
||||
{
|
||||
@@ -136,7 +137,7 @@ public:
|
||||
CameraBinImageProcessing *imageProcessingControl() const { return m_imageProcessingControl; }
|
||||
CameraBinCaptureDestination *captureDestinationControl() const { return m_captureDestinationControl; }
|
||||
CameraBinCaptureBufferFormat *captureBufferFormatControl() const { return m_captureBufferFormatControl; }
|
||||
|
||||
CameraBinViewfinderSettings *viewfinderSettingsControl() const { return m_viewfinderSettingsControl; }
|
||||
|
||||
CameraBinRecorder *recorderControl() const { return m_recorderControl; }
|
||||
CameraBinContainer *mediaContainerControl() const { return m_mediaContainerControl; }
|
||||
@@ -192,6 +193,7 @@ private slots:
|
||||
private:
|
||||
bool setupCameraBin();
|
||||
void setupCaptureResolution();
|
||||
void setAudioCaptureCaps();
|
||||
static void updateBusyStatus(GObject *o, GParamSpec *p, gpointer d);
|
||||
|
||||
QUrl m_sink;
|
||||
@@ -229,6 +231,7 @@ private:
|
||||
CameraBinImageProcessing *m_imageProcessingControl;
|
||||
CameraBinCaptureDestination *m_captureDestinationControl;
|
||||
CameraBinCaptureBufferFormat *m_captureBufferFormatControl;
|
||||
CameraBinViewfinderSettings *m_viewfinderSettingsControl;
|
||||
|
||||
QGstreamerBusHelper *m_busHelper;
|
||||
GstBus* m_bus;
|
||||
|
||||
@@ -160,18 +160,25 @@ QPair<int,int> CameraBinVideoEncoder::rateAsRational(qreal frameRate) const
|
||||
GstEncodingProfile *CameraBinVideoEncoder::createProfile()
|
||||
{
|
||||
QString codec = m_actualVideoSettings.codec();
|
||||
QString preset = m_actualVideoSettings.encodingOption(QStringLiteral("preset")).toString();
|
||||
|
||||
GstCaps *caps;
|
||||
|
||||
if (codec.isEmpty())
|
||||
caps = gst_caps_new_any();
|
||||
caps = 0;
|
||||
else
|
||||
caps = gst_caps_from_string(codec.toLatin1());
|
||||
|
||||
return (GstEncodingProfile *)gst_encoding_video_profile_new(
|
||||
GstEncodingVideoProfile *profile = gst_encoding_video_profile_new(
|
||||
caps,
|
||||
NULL, //preset
|
||||
!preset.isEmpty() ? preset.toLatin1().constData() : NULL, //preset
|
||||
NULL, //restriction
|
||||
0); //presence
|
||||
1); //presence
|
||||
|
||||
gst_encoding_video_profile_set_pass(profile, 0);
|
||||
gst_encoding_video_profile_set_variableframerate(profile, TRUE);
|
||||
|
||||
return (GstEncodingProfile *)profile;
|
||||
}
|
||||
|
||||
QT_END_NAMESPACE
|
||||
|
||||
106
src/plugins/gstreamer/camerabin/camerabinviewfindersettings.cpp
Normal file
106
src/plugins/gstreamer/camerabin/camerabinviewfindersettings.cpp
Normal file
@@ -0,0 +1,106 @@
|
||||
/****************************************************************************
|
||||
**
|
||||
** Copyright (C) 2013 Jolla Ltd.
|
||||
** Contact: http://www.qt-project.org/legal
|
||||
**
|
||||
** This file is part of the Qt Toolkit.
|
||||
**
|
||||
** $QT_BEGIN_LICENSE:LGPL$
|
||||
** Commercial License Usage
|
||||
** Licensees holding valid commercial Qt licenses may use this file in
|
||||
** accordance with the commercial license agreement provided with the
|
||||
** Software or, alternatively, in accordance with the terms contained in
|
||||
** a written agreement between you and Digia. For licensing terms and
|
||||
** conditions see http://qt.digia.com/licensing. For further information
|
||||
** use the contact form at http://qt.digia.com/contact-us.
|
||||
**
|
||||
** GNU Lesser General Public License Usage
|
||||
** Alternatively, this file may be used under the terms of the GNU Lesser
|
||||
** General Public License version 2.1 as published by the Free Software
|
||||
** Foundation and appearing in the file LICENSE.LGPL included in the
|
||||
** packaging of this file. Please review the following information to
|
||||
** ensure the GNU Lesser General Public License version 2.1 requirements
|
||||
** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
|
||||
**
|
||||
** In addition, as a special exception, Digia gives you certain additional
|
||||
** rights. These rights are described in the Digia Qt LGPL Exception
|
||||
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
|
||||
**
|
||||
** GNU General Public License Usage
|
||||
** Alternatively, this file may be used under the terms of the GNU
|
||||
** General Public License version 3.0 as published by the Free Software
|
||||
** Foundation and appearing in the file LICENSE.GPL included in the
|
||||
** packaging of this file. Please review the following information to
|
||||
** ensure the GNU General Public License version 3.0 requirements will be
|
||||
** met: http://www.gnu.org/copyleft/gpl.html.
|
||||
**
|
||||
**
|
||||
** $QT_END_LICENSE$
|
||||
**
|
||||
****************************************************************************/
|
||||
|
||||
|
||||
#include "camerabinviewfindersettings.h"
|
||||
|
||||
|
||||
QT_BEGIN_NAMESPACE
|
||||
|
||||
CameraBinViewfinderSettings::CameraBinViewfinderSettings(QObject *parent)
|
||||
: QCameraViewfinderSettingsControl(parent)
|
||||
{
|
||||
}
|
||||
|
||||
CameraBinViewfinderSettings::~CameraBinViewfinderSettings()
|
||||
{
|
||||
}
|
||||
|
||||
bool CameraBinViewfinderSettings::isViewfinderParameterSupported(ViewfinderParameter parameter) const
|
||||
{
|
||||
switch (parameter) {
|
||||
case Resolution:
|
||||
return true;
|
||||
case PixelAspectRatio:
|
||||
case MinimumFrameRate:
|
||||
case MaximumFrameRate:
|
||||
case PixelFormat:
|
||||
case UserParameter:
|
||||
return false;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
QVariant CameraBinViewfinderSettings::viewfinderParameter(ViewfinderParameter parameter) const
|
||||
{
|
||||
switch (parameter) {
|
||||
case Resolution:
|
||||
return m_resolution;
|
||||
case PixelAspectRatio:
|
||||
case MinimumFrameRate:
|
||||
case MaximumFrameRate:
|
||||
case PixelFormat:
|
||||
case UserParameter:
|
||||
return QVariant();
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
void CameraBinViewfinderSettings::setViewfinderParameter(ViewfinderParameter parameter, const QVariant &value)
|
||||
{
|
||||
switch (parameter) {
|
||||
case Resolution:
|
||||
m_resolution = value.toSize();
|
||||
case PixelAspectRatio:
|
||||
case MinimumFrameRate:
|
||||
case MaximumFrameRate:
|
||||
case PixelFormat:
|
||||
case UserParameter:
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
QSize CameraBinViewfinderSettings::resolution() const
|
||||
{
|
||||
return m_resolution;
|
||||
}
|
||||
|
||||
QT_END_NAMESPACE
|
||||
@@ -0,0 +1,70 @@
|
||||
/****************************************************************************
|
||||
**
|
||||
** Copyright (C) 2013 Jolla Ltd.
|
||||
** Contact: http://www.qt-project.org/legal
|
||||
**
|
||||
** This file is part of the Qt Toolkit.
|
||||
**
|
||||
** $QT_BEGIN_LICENSE:LGPL$
|
||||
** Commercial License Usage
|
||||
** Licensees holding valid commercial Qt licenses may use this file in
|
||||
** accordance with the commercial license agreement provided with the
|
||||
** Software or, alternatively, in accordance with the terms contained in
|
||||
** a written agreement between you and Digia. For licensing terms and
|
||||
** conditions see http://qt.digia.com/licensing. For further information
|
||||
** use the contact form at http://qt.digia.com/contact-us.
|
||||
**
|
||||
** GNU Lesser General Public License Usage
|
||||
** Alternatively, this file may be used under the terms of the GNU Lesser
|
||||
** General Public License version 2.1 as published by the Free Software
|
||||
** Foundation and appearing in the file LICENSE.LGPL included in the
|
||||
** packaging of this file. Please review the following information to
|
||||
** ensure the GNU Lesser General Public License version 2.1 requirements
|
||||
** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
|
||||
**
|
||||
** In addition, as a special exception, Digia gives you certain additional
|
||||
** rights. These rights are described in the Digia Qt LGPL Exception
|
||||
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
|
||||
**
|
||||
** GNU General Public License Usage
|
||||
** Alternatively, this file may be used under the terms of the GNU
|
||||
** General Public License version 3.0 as published by the Free Software
|
||||
** Foundation and appearing in the file LICENSE.GPL included in the
|
||||
** packaging of this file. Please review the following information to
|
||||
** ensure the GNU General Public License version 3.0 requirements will be
|
||||
** met: http://www.gnu.org/copyleft/gpl.html.
|
||||
**
|
||||
**
|
||||
** $QT_END_LICENSE$
|
||||
**
|
||||
****************************************************************************/
|
||||
|
||||
#ifndef CAMERABINVIEWFINDERSETTINGS_H
|
||||
#define CAMERABINVIEWFINDERSETTINGS_H
|
||||
|
||||
#include <qcameraviewfindersettingscontrol.h>
|
||||
|
||||
#include <QtCore/qsize.h>
|
||||
|
||||
QT_BEGIN_NAMESPACE
|
||||
|
||||
class CameraBinViewfinderSettings : public QCameraViewfinderSettingsControl
|
||||
{
|
||||
Q_OBJECT
|
||||
public:
|
||||
CameraBinViewfinderSettings(QObject *parent);
|
||||
~CameraBinViewfinderSettings();
|
||||
|
||||
bool isViewfinderParameterSupported(ViewfinderParameter parameter) const;
|
||||
QVariant viewfinderParameter(ViewfinderParameter parameter) const;
|
||||
void setViewfinderParameter(ViewfinderParameter parameter, const QVariant &value);
|
||||
|
||||
QSize resolution() const;
|
||||
|
||||
private:
|
||||
QSize m_resolution;
|
||||
};
|
||||
|
||||
QT_END_NAMESPACE
|
||||
|
||||
#endif
|
||||
Reference in New Issue
Block a user