Merge remote-tracking branch 'origin/stable' into dev

Conflicts:
	src/plugins/blackberry/camera/bbcamerasession.cpp

Change-Id: I7c86e10140ab86fd2a07e2f034dec38ae9112559
This commit is contained in:
Frederik Gladhorn
2013-10-11 16:18:46 +02:00
96 changed files with 2113 additions and 1286 deletions

View File

@@ -3,3 +3,6 @@ TEMPLATE = subdirs
SUBDIRS += src \
jar
qtHaveModule(quick) {
SUBDIRS += videonode
}

View File

@@ -45,7 +45,10 @@ import android.hardware.Camera;
import android.graphics.SurfaceTexture;
import android.util.Log;
public class QtCamera implements Camera.ShutterCallback, Camera.PictureCallback, Camera.AutoFocusCallback
public class QtCamera implements Camera.ShutterCallback,
Camera.PictureCallback,
Camera.AutoFocusCallback,
Camera.PreviewCallback
{
private int m_cameraId = -1;
private Camera m_camera = null;
@@ -149,6 +152,11 @@ public class QtCamera implements Camera.ShutterCallback, Camera.PictureCallback,
m_camera.cancelAutoFocus();
}
public void requestPreviewFrame()
{
m_camera.setOneShotPreviewCallback(this);
}
public void takePicture()
{
try {
@@ -170,6 +178,12 @@ public class QtCamera implements Camera.ShutterCallback, Camera.PictureCallback,
notifyPictureCaptured(m_cameraId, data);
}
@Override
public void onPreviewFrame(byte[] data, Camera camera)
{
notifyPreviewFrame(m_cameraId, data);
}
@Override
public void onAutoFocus(boolean success, Camera camera)
{
@@ -179,4 +193,5 @@ public class QtCamera implements Camera.ShutterCallback, Camera.PictureCallback,
private static native void notifyAutoFocusComplete(int id, boolean success);
private static native void notifyPictureExposed(int id);
private static native void notifyPictureCaptured(int id, byte[] data);
private static native void notifyPreviewFrame(int id, byte[] data);
}

View File

@@ -76,4 +76,29 @@ bool qt_sizeLessThan(const QSize &s1, const QSize &s2)
return s1.width() * s1.height() < s2.width() * s2.height();
}
void qt_convert_NV21_to_ARGB32(const uchar *yuv, quint32 *rgb, int width, int height)
{
const int frameSize = width * height;
int a = 0;
for (int i = 0, ci = 0; i < height; ++i, ci += 1) {
for (int j = 0, cj = 0; j < width; ++j, cj += 1) {
int y = (0xff & ((int) yuv[ci * width + cj]));
int v = (0xff & ((int) yuv[frameSize + (ci >> 1) * width + (cj & ~1) + 0]));
int u = (0xff & ((int) yuv[frameSize + (ci >> 1) * width + (cj & ~1) + 1]));
y = y < 16 ? 16 : y;
int r = (int) (1.164f * (y - 16) + 1.596f * (v - 128));
int g = (int) (1.164f * (y - 16) - 0.813f * (v - 128) - 0.391f * (u - 128));
int b = (int) (1.164f * (y - 16) + 2.018f * (u - 128));
r = qBound(0, r, 255);
g = qBound(0, g, 255);
b = qBound(0, b, 255);
rgb[a++] = 0xff000000 | (r << 16) | (g << 8) | b;
}
}
}
QT_END_NAMESPACE

View File

@@ -53,6 +53,8 @@ int qt_findClosestValue(const QList<int> &list, int value);
bool qt_sizeLessThan(const QSize &s1, const QSize &s2);
void qt_convert_NV21_to_ARGB32(const uchar *yuv, quint32 *rgb, int width, int height);
QT_END_NAMESPACE
#endif // QANDROIDMULTIMEDIAUTILS_H

View File

@@ -48,26 +48,26 @@
QT_BEGIN_NAMESPACE
typedef void (*TextureReadyCallback)(void*);
class QAndroidVideoOutput
{
public:
QAndroidVideoOutput() { }
virtual ~QAndroidVideoOutput() { }
virtual jobject surfaceHolder() = 0;
virtual jobject surfaceTexture() { return 0; }
virtual bool isTextureReady() = 0;
virtual void setTextureReadyCallback(TextureReadyCallback cb, void *context = 0) = 0;
virtual jobject surfaceTexture() = 0;
virtual bool isReady() { return true; }
virtual void setVideoSize(const QSize &size) = 0;
virtual void stop() = 0;
virtual void setVideoSize(const QSize &) { }
virtual void stop() { }
virtual QImage toImage() = 0;
// signals:
// void readyChanged(bool);
};
#define QAndroidVideoOutput_iid "org.qt-project.qt.qandroidvideooutput/5.0"
Q_DECLARE_INTERFACE(QAndroidVideoOutput, QAndroidVideoOutput_iid)
QT_END_NAMESPACE
#endif // QANDROIDVIDEOOUTPUT_H

View File

@@ -44,39 +44,31 @@
#include <QtCore/private/qjni_p.h>
#include "jsurfacetextureholder.h"
#include <QAbstractVideoSurface>
#include <QOpenGLContext>
#include <QOffscreenSurface>
#include <QOpenGLFramebufferObject>
#include <QVideoSurfaceFormat>
#include <QOpenGLFunctions>
#include <QOpenGLShaderProgram>
#include <qevent.h>
#include <qcoreapplication.h>
#include <qopenglcontext.h>
#include <qopenglfunctions.h>
QT_BEGIN_NAMESPACE
static const GLfloat g_vertex_data[] = {
-1.f, 1.f,
1.f, 1.f,
1.f, -1.f,
-1.f, -1.f
};
#define ExternalGLTextureHandle QAbstractVideoBuffer::HandleType(QAbstractVideoBuffer::UserHandle + 1)
static const GLfloat g_texture_data[] = {
0.f, 0.f,
1.f, 0.f,
1.f, 1.f,
0.f, 1.f
};
TextureDeleter::~TextureDeleter()
{
glDeleteTextures(1, &m_id);
}
class TextureVideoBuffer : public QAbstractVideoBuffer
class AndroidTextureVideoBuffer : public QAbstractVideoBuffer
{
public:
TextureVideoBuffer(GLuint textureId)
: QAbstractVideoBuffer(GLTextureHandle)
, m_textureId(textureId)
{}
AndroidTextureVideoBuffer(JSurfaceTexture *surface)
: QAbstractVideoBuffer(ExternalGLTextureHandle)
, m_surfaceTexture(surface)
{
}
virtual ~TextureVideoBuffer() {}
virtual ~AndroidTextureVideoBuffer() {}
MapMode mapMode() const { return NotMapped; }
uchar *map(MapMode, int*, int*) { return 0; }
@@ -84,67 +76,33 @@ public:
QVariant handle() const
{
return QVariant::fromValue<unsigned int>(m_textureId);
}
private:
GLuint m_textureId;
};
class ImageVideoBuffer : public QAbstractVideoBuffer
{
public:
ImageVideoBuffer(const QImage &image)
: QAbstractVideoBuffer(NoHandle)
, m_image(image)
, m_mode(NotMapped)
{
}
MapMode mapMode() const { return m_mode; }
uchar *map(MapMode mode, int *, int *)
{
if (mode != NotMapped && m_mode == NotMapped) {
m_mode = mode;
return m_image.bits();
if (m_data.isEmpty()) {
// update the video texture (called from the render thread)
m_surfaceTexture->updateTexImage();
m_data << (uint)m_surfaceTexture->textureID() << m_surfaceTexture->getTransformMatrix();
}
return 0;
}
void unmap()
{
m_mode = NotMapped;
return m_data;
}
private:
QImage m_image;
MapMode m_mode;
mutable JSurfaceTexture *m_surfaceTexture;
mutable QVariantList m_data;
};
QAndroidVideoRendererControl::QAndroidVideoRendererControl(QObject *parent)
: QVideoRendererControl(parent)
, m_surface(0)
, m_offscreenSurface(0)
, m_glContext(0)
, m_fbo(0)
, m_program(0)
, m_useImage(false)
, m_androidSurface(0)
, m_surfaceTexture(0)
, m_surfaceHolder(0)
, m_externalTex(0)
, m_textureReadyCallback(0)
, m_textureReadyContext(0)
, m_textureDeleter(0)
{
}
QAndroidVideoRendererControl::~QAndroidVideoRendererControl()
{
if (m_glContext)
m_glContext->makeCurrent(m_offscreenSurface);
if (m_surfaceTexture) {
m_surfaceTexture->callMethod<void>("release");
delete m_surfaceTexture;
@@ -159,13 +117,8 @@ QAndroidVideoRendererControl::~QAndroidVideoRendererControl()
delete m_surfaceHolder;
m_surfaceHolder = 0;
}
if (m_externalTex)
glDeleteTextures(1, &m_externalTex);
delete m_fbo;
delete m_program;
delete m_glContext;
delete m_offscreenSurface;
if (m_textureDeleter)
m_textureDeleter->deleteLater();
}
QAbstractVideoSurface *QAndroidVideoRendererControl::surface() const
@@ -178,28 +131,23 @@ void QAndroidVideoRendererControl::setSurface(QAbstractVideoSurface *surface)
if (surface == m_surface)
return;
if (m_surface && m_surface->isActive()) {
m_surface->stop();
m_surface->removeEventFilter(this);
if (m_surface) {
if (m_surface->isActive())
m_surface->stop();
m_surface->setProperty("_q_GLThreadCallback", QVariant());
}
m_surface = surface;
if (m_surface) {
m_useImage = !m_surface->supportedPixelFormats(QAbstractVideoBuffer::GLTextureHandle).contains(QVideoFrame::Format_BGR32);
m_surface->installEventFilter(this);
m_surface->setProperty("_q_GLThreadCallback",
QVariant::fromValue<QObject*>(this));
}
}
bool QAndroidVideoRendererControl::isTextureReady()
bool QAndroidVideoRendererControl::isReady()
{
return QOpenGLContext::currentContext() || (m_surface && m_surface->property("GLContext").isValid());
}
void QAndroidVideoRendererControl::setTextureReadyCallback(TextureReadyCallback cb, void *context)
{
m_textureReadyCallback = cb;
m_textureReadyContext = context;
return QOpenGLContext::currentContext() || m_externalTex;
}
bool QAndroidVideoRendererControl::initSurfaceTexture()
@@ -210,45 +158,15 @@ bool QAndroidVideoRendererControl::initSurfaceTexture()
if (!m_surface)
return false;
QOpenGLContext *currContext = QOpenGLContext::currentContext();
// If we don't have a GL context in the current thread, create one and share it
// with the render thread GL context
if (!currContext && !m_glContext) {
QOpenGLContext *shareContext = qobject_cast<QOpenGLContext*>(m_surface->property("GLContext").value<QObject*>());
if (!shareContext)
return false;
m_offscreenSurface = new QOffscreenSurface;
QSurfaceFormat format;
format.setSwapBehavior(QSurfaceFormat::SingleBuffer);
m_offscreenSurface->setFormat(format);
m_offscreenSurface->create();
m_glContext = new QOpenGLContext;
m_glContext->setFormat(m_offscreenSurface->requestedFormat());
if (shareContext)
m_glContext->setShareContext(shareContext);
if (!m_glContext->create()) {
delete m_glContext;
m_glContext = 0;
delete m_offscreenSurface;
m_offscreenSurface = 0;
return false;
}
// if sharing contexts is not supported, fallback to image rendering and send the bits
// to the video surface
if (!m_glContext->shareContext())
m_useImage = true;
// if we have an OpenGL context in the current thread, create a texture. Otherwise, wait
// for the GL render thread to call us back to do it.
if (QOpenGLContext::currentContext()) {
glGenTextures(1, &m_externalTex);
m_textureDeleter = new TextureDeleter(m_externalTex);
} else if (!m_externalTex) {
return false;
}
if (m_glContext)
m_glContext->makeCurrent(m_offscreenSurface);
glGenTextures(1, &m_externalTex);
m_surfaceTexture = new JSurfaceTexture(m_externalTex);
if (m_surfaceTexture->isValid()) {
@@ -256,7 +174,9 @@ bool QAndroidVideoRendererControl::initSurfaceTexture()
} else {
delete m_surfaceTexture;
m_surfaceTexture = 0;
glDeleteTextures(1, &m_externalTex);
m_textureDeleter->deleteLater();
m_externalTex = 0;
m_textureDeleter = 0;
}
return m_surfaceTexture != 0;
@@ -294,9 +214,6 @@ void QAndroidVideoRendererControl::setVideoSize(const QSize &size)
stop();
m_nativeSize = size;
delete m_fbo;
m_fbo = 0;
}
void QAndroidVideoRendererControl::stop()
@@ -306,133 +223,40 @@ void QAndroidVideoRendererControl::stop()
m_nativeSize = QSize();
}
QImage QAndroidVideoRendererControl::toImage()
{
if (!m_fbo)
return QImage();
return m_fbo->toImage().mirrored();
}
void QAndroidVideoRendererControl::onFrameAvailable()
{
if (m_glContext)
m_glContext->makeCurrent(m_offscreenSurface);
m_surfaceTexture->updateTexImage();
if (!m_nativeSize.isValid())
if (!m_nativeSize.isValid() || !m_surface)
return;
renderFrameToFbo();
QAbstractVideoBuffer *buffer = new AndroidTextureVideoBuffer(m_surfaceTexture);
QVideoFrame frame(buffer, m_nativeSize, QVideoFrame::Format_BGR32);
QAbstractVideoBuffer *buffer = 0;
QVideoFrame frame;
if (m_useImage) {
buffer = new ImageVideoBuffer(m_fbo->toImage().mirrored());
frame = QVideoFrame(buffer, m_nativeSize, QVideoFrame::Format_RGB32);
} else {
buffer = new TextureVideoBuffer(m_fbo->texture());
frame = QVideoFrame(buffer, m_nativeSize, QVideoFrame::Format_BGR32);
if (m_surface->isActive() && (m_surface->surfaceFormat().pixelFormat() != frame.pixelFormat()
|| m_surface->nativeResolution() != frame.size())) {
m_surface->stop();
}
if (m_surface && frame.isValid()) {
if (m_surface->isActive() && (m_surface->surfaceFormat().pixelFormat() != frame.pixelFormat()
|| m_surface->nativeResolution() != frame.size())) {
m_surface->stop();
}
if (!m_surface->isActive()) {
QVideoSurfaceFormat format(frame.size(), frame.pixelFormat(), ExternalGLTextureHandle);
format.setScanLineDirection(QVideoSurfaceFormat::BottomToTop);
if (!m_surface->isActive()) {
QVideoSurfaceFormat format(frame.size(), frame.pixelFormat(),
m_useImage ? QAbstractVideoBuffer::NoHandle
: QAbstractVideoBuffer::GLTextureHandle);
m_surface->start(format);
}
if (m_surface->isActive())
m_surface->present(frame);
m_surface->start(format);
}
if (m_surface->isActive())
m_surface->present(frame);
}
void QAndroidVideoRendererControl::renderFrameToFbo()
void QAndroidVideoRendererControl::customEvent(QEvent *e)
{
createGLResources();
m_fbo->bind();
glViewport(0, 0, m_nativeSize.width(), m_nativeSize.height());
m_program->bind();
m_program->enableAttributeArray(0);
m_program->enableAttributeArray(1);
m_program->setUniformValue("frameTexture", GLuint(0));
m_program->setUniformValue("texMatrix", m_surfaceTexture->getTransformMatrix());
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 0, g_vertex_data);
glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 0, g_texture_data);
glDrawArrays(GL_TRIANGLE_FAN, 0, 4);
m_program->disableAttributeArray(0);
m_program->disableAttributeArray(1);
m_program->release();
glBindTexture(GL_TEXTURE_EXTERNAL_OES, 0);
m_fbo->release();
glFinish();
}
void QAndroidVideoRendererControl::createGLResources()
{
if (!m_fbo)
m_fbo = new QOpenGLFramebufferObject(m_nativeSize);
if (!m_program) {
m_program = new QOpenGLShaderProgram;
QOpenGLShader *vertexShader = new QOpenGLShader(QOpenGLShader::Vertex, m_program);
vertexShader->compileSourceCode("attribute highp vec4 vertexCoordsArray; \n" \
"attribute highp vec2 textureCoordArray; \n" \
"uniform highp mat4 texMatrix; \n" \
"varying highp vec2 textureCoords; \n" \
"void main(void) \n" \
"{ \n" \
" gl_Position = vertexCoordsArray; \n" \
" textureCoords = (texMatrix * vec4(textureCoordArray, 0.0, 1.0)).xy; \n" \
"}\n");
m_program->addShader(vertexShader);
QOpenGLShader *fragmentShader = new QOpenGLShader(QOpenGLShader::Fragment, m_program);
fragmentShader->compileSourceCode("#extension GL_OES_EGL_image_external : require \n" \
"varying highp vec2 textureCoords; \n" \
"uniform samplerExternalOES frameTexture; \n" \
"void main() \n" \
"{ \n" \
" gl_FragColor = texture2D(frameTexture, textureCoords); \n" \
"}\n");
m_program->addShader(fragmentShader);
m_program->bindAttributeLocation("vertexCoordsArray", 0);
m_program->bindAttributeLocation("textureCoordArray", 1);
m_program->link();
}
}
bool QAndroidVideoRendererControl::eventFilter(QObject *, QEvent *e)
{
if (e->type() == QEvent::DynamicPropertyChange) {
QDynamicPropertyChangeEvent *event = static_cast<QDynamicPropertyChangeEvent*>(e);
if (event->propertyName() == "GLContext" && m_textureReadyCallback) {
m_textureReadyCallback(m_textureReadyContext);
m_textureReadyCallback = 0;
m_textureReadyContext = 0;
if (e->type() == QEvent::User) {
// This is running in the render thread (OpenGL enabled)
if (!m_externalTex) {
glGenTextures(1, &m_externalTex);
m_textureDeleter = new TextureDeleter(m_externalTex); // will be deleted in the correct thread
emit readyChanged(true);
}
}
return false;
}
QT_END_NAMESPACE

View File

@@ -48,15 +48,23 @@
QT_BEGIN_NAMESPACE
class QOpenGLContext;
class QOffscreenSurface;
class QOpenGLFramebufferObject;
class QOpenGLShaderProgram;
class JSurfaceTextureHolder;
class TextureDeleter : public QObject
{
Q_OBJECT
public:
TextureDeleter(uint id) : m_id(id) { }
~TextureDeleter();
private:
uint m_id;
};
class QAndroidVideoRendererControl : public QVideoRendererControl, public QAndroidVideoOutput
{
Q_OBJECT
Q_INTERFACES(QAndroidVideoOutput)
public:
explicit QAndroidVideoRendererControl(QObject *parent = 0);
~QAndroidVideoRendererControl() Q_DECL_OVERRIDE;
@@ -65,38 +73,30 @@ public:
void setSurface(QAbstractVideoSurface *surface) Q_DECL_OVERRIDE;
jobject surfaceHolder() Q_DECL_OVERRIDE;
bool isTextureReady() Q_DECL_OVERRIDE;
void setTextureReadyCallback(TextureReadyCallback cb, void *context = 0) Q_DECL_OVERRIDE;
jobject surfaceTexture() Q_DECL_OVERRIDE;
bool isReady() Q_DECL_OVERRIDE;
void setVideoSize(const QSize &size) Q_DECL_OVERRIDE;
void stop() Q_DECL_OVERRIDE;
QImage toImage() Q_DECL_OVERRIDE;
bool eventFilter(QObject *obj, QEvent *event) Q_DECL_OVERRIDE;
void customEvent(QEvent *) Q_DECL_OVERRIDE;
Q_SIGNALS:
void readyChanged(bool);
private Q_SLOTS:
void onFrameAvailable();
private:
bool initSurfaceTexture();
void renderFrameToFbo();
void createGLResources();
QAbstractVideoSurface *m_surface;
QOffscreenSurface *m_offscreenSurface;
QOpenGLContext *m_glContext;
QOpenGLFramebufferObject *m_fbo;
QOpenGLShaderProgram *m_program;
bool m_useImage;
QSize m_nativeSize;
QJNIObjectPrivate *m_androidSurface;
JSurfaceTexture *m_surfaceTexture;
JSurfaceTextureHolder *m_surfaceHolder;
uint m_externalTex;
TextureReadyCallback m_textureReadyCallback;
void *m_textureReadyContext;
TextureDeleter *m_textureDeleter;
};
QT_END_NAMESPACE

View File

@@ -52,12 +52,6 @@
QT_BEGIN_NAMESPACE
static void textureReadyCallback(void *context)
{
if (context)
reinterpret_cast<QAndroidCameraSession *>(context)->onSurfaceTextureReady();
}
QAndroidCameraSession::QAndroidCameraSession(QObject *parent)
: QObject(parent)
, m_selectedCamera(0)
@@ -153,8 +147,11 @@ bool QAndroidCameraSession::open()
if (m_camera) {
connect(m_camera, SIGNAL(pictureExposed()), this, SLOT(onCameraPictureExposed()));
connect(m_camera, SIGNAL(pictureCaptured(QByteArray)), this, SLOT(onCameraPictureCaptured(QByteArray)));
connect(m_camera, SIGNAL(previewFrameAvailable(QByteArray)), this, SLOT(onCameraPreviewFrameAvailable(QByteArray)));
m_nativeOrientation = m_camera->getNativeOrientation();
m_status = QCamera::LoadedStatus;
if (m_camera->getPreviewFormat() != JCamera::NV21)
m_camera->setPreviewFormat(JCamera::NV21);
emit opened();
} else {
m_status = QCamera::UnavailableStatus;
@@ -188,12 +185,17 @@ void QAndroidCameraSession::close()
emit statusChanged(m_status);
}
void QAndroidCameraSession::setVideoPreview(QAndroidVideoOutput *videoOutput)
void QAndroidCameraSession::setVideoPreview(QObject *videoOutput)
{
if (m_videoOutput)
m_videoOutput->stop();
m_videoOutput = videoOutput;
if (videoOutput) {
connect(videoOutput, SIGNAL(readyChanged(bool)), this, SLOT(onVideoOutputReady(bool)));
m_videoOutput = qobject_cast<QAndroidVideoOutput *>(videoOutput);
} else {
m_videoOutput = 0;
}
}
void QAndroidCameraSession::adjustViewfinderSize(const QSize &captureSize, bool restartPreview)
@@ -243,12 +245,8 @@ void QAndroidCameraSession::startPreview()
applyImageSettings();
adjustViewfinderSize(m_imageSettings.resolution());
if (m_videoOutput) {
if (m_videoOutput->isTextureReady())
m_camera->setPreviewTexture(m_videoOutput->surfaceTexture());
else
m_videoOutput->setTextureReadyCallback(textureReadyCallback, this);
}
if (m_videoOutput && m_videoOutput->isReady())
onVideoOutputReady(true);
JMultimediaUtils::enableOrientationListener(true);
@@ -427,6 +425,7 @@ int QAndroidCameraSession::capture(const QString &fileName)
// adjust picture rotation depending on the device orientation
m_camera->setRotation(currentCameraRotation());
m_camera->requestPreviewFrame();
m_camera->takePicture();
} else {
emit imageCaptureError(m_lastImageCaptureId, QCameraImageCapture::NotSupportedFeatureError,
@@ -455,10 +454,6 @@ void QAndroidCameraSession::onCameraPictureExposed()
void QAndroidCameraSession::onCameraPictureCaptured(const QByteArray &data)
{
if (!m_captureCanceled) {
// generate a preview from the viewport
if (m_videoOutput)
emit imageCaptured(m_currentImageCaptureId, m_videoOutput->toImage());
// Loading and saving the captured image can be slow, do it in a separate thread
QtConcurrent::run(this, &QAndroidCameraSession::processCapturedImage,
m_currentImageCaptureId,
@@ -522,9 +517,37 @@ void QAndroidCameraSession::processCapturedImage(int id,
}
}
void QAndroidCameraSession::onSurfaceTextureReady()
void QAndroidCameraSession::onCameraPreviewFrameAvailable(const QByteArray &data)
{
if (m_camera && m_videoOutput)
if (m_captureCanceled || m_readyForCapture)
return;
QtConcurrent::run(this, &QAndroidCameraSession::processPreviewImage,
m_currentImageCaptureId,
data);
}
void QAndroidCameraSession::processPreviewImage(int id, const QByteArray &data)
{
QSize frameSize = m_camera->previewSize();
QImage preview(frameSize, QImage::Format_ARGB32);
qt_convert_NV21_to_ARGB32((const uchar *)data.constData(),
(quint32 *)preview.bits(),
frameSize.width(),
frameSize.height());
// Preview display of front-facing cameras is flipped horizontally, but the frame data
// we get here is not. Flip it ourselves if the camera is front-facing to match what the user
// sees on the viewfinder.
if (m_camera->getFacing() == JCamera::CameraFacingFront)
preview = preview.transformed(QTransform().scale(-1, 1));
emit imageCaptured(id, preview);
}
void QAndroidCameraSession::onVideoOutputReady(bool ready)
{
if (m_camera && m_videoOutput && ready)
m_camera->setPreviewTexture(m_videoOutput->surfaceTexture());
}

View File

@@ -71,7 +71,7 @@ public:
void setCaptureMode(QCamera::CaptureModes mode);
bool isCaptureModeSupported(QCamera::CaptureModes mode) const;
void setVideoPreview(QAndroidVideoOutput *videoOutput);
void setVideoPreview(QObject *videoOutput);
void adjustViewfinderSize(const QSize &captureSize, bool restartPreview = true);
QImageEncoderSettings imageSettings() const { return m_imageSettings; }
@@ -88,8 +88,6 @@ public:
int capture(const QString &fileName);
void cancelCapture();
void onSurfaceTextureReady();
int currentCameraRotation() const;
Q_SIGNALS:
@@ -110,10 +108,13 @@ Q_SIGNALS:
void imageCaptureError(int id, int error, const QString &errorString);
private Q_SLOTS:
void onVideoOutputReady(bool ready);
void onApplicationStateChanged(Qt::ApplicationState state);
void onCameraPictureExposed();
void onCameraPictureCaptured(const QByteArray &data);
void onCameraPreviewFrameAvailable(const QByteArray &data);
private:
bool open();
@@ -123,7 +124,7 @@ private:
void stopPreview();
void applyImageSettings();
void processPreviewImage(int id);
void processPreviewImage(int id, const QByteArray &data);
void processCapturedImage(int id,
const QByteArray &data,
QCameraImageCapture::CaptureDestinations dest,

View File

@@ -88,7 +88,7 @@ private:
QAndroidVideoDeviceSelectorControl *m_videoInputControl;
QAndroidAudioInputSelectorControl *m_audioInputControl;
QAndroidCameraSession *m_cameraSession;
QAndroidVideoRendererControl *m_videoRendererControl;
QMediaControl *m_videoRendererControl;
QAndroidCameraZoomControl *m_cameraZoomControl;
QAndroidCameraExposureControl *m_cameraExposureControl;
QAndroidCameraFlashControl *m_cameraFlashControl;

View File

@@ -110,25 +110,27 @@ void QAndroidCaptureSession::setAudioInput(const QString &input)
QUrl QAndroidCaptureSession::outputLocation() const
{
return m_outputLocation;
return m_actualOutputLocation;
}
bool QAndroidCaptureSession::setOutputLocation(const QUrl &location)
{
if (m_outputLocation == location)
if (m_requestedOutputLocation == location)
return false;
m_outputLocation = location;
m_actualOutputLocation = QUrl();
m_requestedOutputLocation = location;
if (m_outputLocation.isEmpty())
if (m_requestedOutputLocation.isEmpty())
return true;
if (m_outputLocation.isValid() && (m_outputLocation.isLocalFile() || m_outputLocation.isRelative())) {
emit actualLocationChanged(m_outputLocation);
if (m_requestedOutputLocation.isValid()
&& (m_requestedOutputLocation.isLocalFile() || m_requestedOutputLocation.isRelative())) {
emit actualLocationChanged(m_requestedOutputLocation);
return true;
}
m_outputLocation = QUrl();
m_requestedOutputLocation = QUrl();
return false;
}
@@ -213,15 +215,18 @@ bool QAndroidCaptureSession::start()
// Set output file
QString filePath = m_mediaStorageLocation.generateFileName(m_outputLocation.isLocalFile() ? m_outputLocation.toLocalFile()
: m_outputLocation.toString(),
m_cameraSession ? QAndroidMediaStorageLocation::Camera
: QAndroidMediaStorageLocation::Audio,
m_cameraSession ? QLatin1String("VID_")
: QLatin1String("REC_"),
m_containerFormat);
m_outputLocation = QUrl::fromLocalFile(filePath);
emit actualLocationChanged(m_outputLocation);
QString filePath = m_mediaStorageLocation.generateFileName(
m_requestedOutputLocation.isLocalFile() ? m_requestedOutputLocation.toLocalFile()
: m_requestedOutputLocation.toString(),
m_cameraSession ? QAndroidMediaStorageLocation::Camera
: QAndroidMediaStorageLocation::Audio,
m_cameraSession ? QLatin1String("VID_")
: QLatin1String("REC_"),
m_containerFormat);
m_actualOutputLocation = QUrl::fromLocalFile(filePath);
if (m_actualOutputLocation != m_requestedOutputLocation)
emit actualLocationChanged(m_actualOutputLocation);
m_mediaRecorder->setOutputFile(filePath);
@@ -280,7 +285,7 @@ void QAndroidCaptureSession::stop(bool error)
// if the media is saved into the standard media location, register it
// with the Android media scanner so it appears immediately in apps
// such as the gallery.
QString mediaPath = m_outputLocation.toLocalFile();
QString mediaPath = m_actualOutputLocation.toLocalFile();
QString standardLoc = m_cameraSession ? JMultimediaUtils::getDefaultMediaDirectory(JMultimediaUtils::DCIM)
: JMultimediaUtils::getDefaultMediaDirectory(JMultimediaUtils::Sounds);
if (mediaPath.startsWith(standardLoc))

View File

@@ -160,7 +160,8 @@ private:
QMediaRecorder::State m_state;
QMediaRecorder::Status m_status;
QUrl m_outputLocation;
QUrl m_requestedOutputLocation;
QUrl m_actualOutputLocation;
CaptureProfile m_defaultSettings;

View File

@@ -45,12 +45,6 @@
QT_BEGIN_NAMESPACE
static void textureReadyCallback(void *context)
{
if (context)
reinterpret_cast<QAndroidMediaPlayerControl *>(context)->onSurfaceTextureReady();
}
QAndroidMediaPlayerControl::QAndroidMediaPlayerControl(QObject *parent)
: QMediaPlayerControl(parent),
mMediaPlayer(new JMediaPlayer),
@@ -241,18 +235,18 @@ void QAndroidMediaPlayerControl::setMedia(const QMediaContent &mediaContent,
setSeekable(true);
}
void QAndroidMediaPlayerControl::setVideoOutput(QAndroidVideoOutput *videoOutput)
void QAndroidMediaPlayerControl::setVideoOutput(QObject *videoOutput)
{
if (mVideoOutput)
mVideoOutput->stop();
mVideoOutput = videoOutput;
mVideoOutput = qobject_cast<QAndroidVideoOutput *>(videoOutput);
if (mVideoOutput && !mMediaPlayer->display()) {
if (mVideoOutput->isTextureReady())
if (mVideoOutput->isReady())
mMediaPlayer->setDisplay(mVideoOutput->surfaceHolder());
else
mVideoOutput->setTextureReadyCallback(textureReadyCallback, this);
connect(videoOutput, SIGNAL(readyChanged(bool)), this, SLOT(onVideoOutputReady(bool)));
}
}
@@ -426,9 +420,9 @@ void QAndroidMediaPlayerControl::onVideoSizeChanged(qint32 width, qint32 height)
mVideoOutput->setVideoSize(mVideoSize);
}
void QAndroidMediaPlayerControl::onSurfaceTextureReady()
void QAndroidMediaPlayerControl::onVideoOutputReady(bool ready)
{
if (!mMediaPlayer->display() && mVideoOutput) {
if (!mMediaPlayer->display() && mVideoOutput && ready) {
mMediaPlayer->setDisplay(mVideoOutput->surfaceHolder());
flushPendingStates();
}

View File

@@ -75,8 +75,7 @@ public:
const QIODevice *mediaStream() const Q_DECL_OVERRIDE;
void setMedia(const QMediaContent &mediaContent, QIODevice *stream) Q_DECL_OVERRIDE;
void setVideoOutput(QAndroidVideoOutput *videoOutput);
void onSurfaceTextureReady();
void setVideoOutput(QObject *videoOutput);
Q_SIGNALS:
void metaDataUpdated();
@@ -90,6 +89,7 @@ public Q_SLOTS:
void setMuted(bool muted) Q_DECL_OVERRIDE;
private Q_SLOTS:
void onVideoOutputReady(bool ready);
void onError(qint32 what, qint32 extra);
void onInfo(qint32 what, qint32 extra);
void onMediaPlayerInfo(qint32 what, qint32 extra);

View File

@@ -48,7 +48,6 @@ QT_BEGIN_NAMESPACE
class QAndroidMediaPlayerControl;
class QAndroidMetaDataReaderControl;
class QAndroidVideoRendererControl;
class QAndroidMediaService : public QMediaService
{
@@ -63,7 +62,7 @@ public:
private:
QAndroidMediaPlayerControl *mMediaControl;
QAndroidMetaDataReaderControl *mMetadataControl;
QAndroidVideoRendererControl *mVideoRendererControl;
QMediaControl *mVideoRendererControl;
};
QT_END_NAMESPACE

View File

@@ -102,6 +102,18 @@ static void notifyPictureCaptured(JNIEnv *env, jobject, int id, jbyteArray data)
}
}
static void notifyPreviewFrame(JNIEnv *env, jobject, int id, jbyteArray data)
{
JCamera *obj = g_objectMap.value(id, 0);
if (obj) {
QByteArray bytes;
int arrayLength = env->GetArrayLength(data);
bytes.resize(arrayLength);
env->GetByteArrayRegion(data, 0, arrayLength, (jbyte*)bytes.data());
Q_EMIT obj->previewFrameAvailable(bytes);
}
}
JCamera::JCamera(int cameraId, jobject cam)
: QObject()
, QJNIObjectPrivate(cam)
@@ -225,6 +237,23 @@ QList<QSize> JCamera::getSupportedPreviewSizes()
return list;
}
JCamera::ImageFormat JCamera::getPreviewFormat()
{
if (!m_parameters.isValid())
return Unknown;
return JCamera::ImageFormat(m_parameters.callMethod<jint>("getPreviewFormat"));
}
void JCamera::setPreviewFormat(ImageFormat fmt)
{
if (!m_parameters.isValid())
return;
m_parameters.callMethod<void>("setPreviewFormat", "(I)V", jint(fmt));
applyParameters();
}
void JCamera::setPreviewSize(const QSize &size)
{
if (!m_parameters.isValid())
@@ -624,6 +653,11 @@ void JCamera::setJpegQuality(int quality)
applyParameters();
}
void JCamera::requestPreviewFrame()
{
callMethod<void>("requestPreviewFrame");
}
void JCamera::takePicture()
{
callMethod<void>("takePicture");
@@ -672,7 +706,8 @@ QStringList JCamera::callStringListMethod(const char *methodName)
static JNINativeMethod methods[] = {
{"notifyAutoFocusComplete", "(IZ)V", (void *)notifyAutoFocusComplete},
{"notifyPictureExposed", "(I)V", (void *)notifyPictureExposed},
{"notifyPictureCaptured", "(I[B)V", (void *)notifyPictureCaptured}
{"notifyPictureCaptured", "(I[B)V", (void *)notifyPictureCaptured},
{"notifyPreviewFrame", "(I[B)V", (void *)notifyPreviewFrame}
};
bool JCamera::initJNI(JNIEnv *env)

View File

@@ -58,6 +58,16 @@ public:
CameraFacingFront = 1
};
enum ImageFormat { // same values as in android.graphics.ImageFormat Java class
Unknown = 0,
RGB565 = 4,
NV16 = 16,
NV21 = 17,
YUY2 = 20,
JPEG = 256,
YV12 = 842094169
};
~JCamera();
static JCamera *open(int cameraId);
@@ -75,6 +85,9 @@ public:
QSize getPreferredPreviewSizeForVideo();
QList<QSize> getSupportedPreviewSizes();
ImageFormat getPreviewFormat();
void setPreviewFormat(ImageFormat fmt);
QSize previewSize() const { return m_previewSize; }
void setPreviewSize(const QSize &size);
void setPreviewTexture(jobject surfaceTexture);
@@ -131,6 +144,8 @@ public:
void startPreview();
void stopPreview();
void requestPreviewFrame();
void takePicture();
static bool initJNI(JNIEnv *env);
@@ -143,6 +158,8 @@ Q_SIGNALS:
void whiteBalanceChanged();
void previewFrameAvailable(const QByteArray &data);
void pictureExposed();
void pictureCaptured(const QByteArray &data);

View File

@@ -56,6 +56,7 @@ public:
explicit JSurfaceTexture(unsigned int texName);
~JSurfaceTexture();
int textureID() const { return m_texID; }
QMatrix4x4 getTransformMatrix();
void updateTexImage();

View File

@@ -0,0 +1,3 @@
{
"Keys": ["sgvideonodes"]
}

View File

@@ -0,0 +1,204 @@
/****************************************************************************
**
** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
** Contact: http://www.qt-project.org/legal
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and Digia. For licensing terms and
** conditions see http://qt.digia.com/licensing. For further information
** use the contact form at http://qt.digia.com/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 as published by the Free Software
** Foundation and appearing in the file LICENSE.LGPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU Lesser General Public License version 2.1 requirements
** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Digia gives you certain additional
** rights. These rights are described in the Digia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** GNU General Public License Usage
** Alternatively, this file may be used under the terms of the GNU
** General Public License version 3.0 as published by the Free Software
** Foundation and appearing in the file LICENSE.GPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU General Public License version 3.0 requirements will be
** met: http://www.gnu.org/copyleft/gpl.html.
**
**
** $QT_END_LICENSE$
**
****************************************************************************/
#include "qandroidsgvideonode.h"
#include <qsgmaterial.h>
#include <qmutex.h>
QT_BEGIN_NAMESPACE
class QAndroidSGVideoNodeMaterialShader : public QSGMaterialShader
{
public:
void updateState(const RenderState &state, QSGMaterial *newMaterial, QSGMaterial *oldMaterial);
char const *const *attributeNames() const {
static const char *names[] = {
"qt_VertexPosition",
"qt_VertexTexCoord",
0
};
return names;
}
protected:
const char *vertexShader() const {
return
"uniform highp mat4 qt_Matrix; \n"
"uniform highp mat4 texMatrix; \n"
"attribute highp vec4 qt_VertexPosition; \n"
"attribute highp vec2 qt_VertexTexCoord; \n"
"varying highp vec2 qt_TexCoord; \n"
"void main() { \n"
" qt_TexCoord = (texMatrix * vec4(qt_VertexTexCoord, 0.0, 1.0)).xy; \n"
" gl_Position = qt_Matrix * qt_VertexPosition; \n"
"}";
}
const char *fragmentShader() const {
return
"#extension GL_OES_EGL_image_external : require \n"
"uniform samplerExternalOES videoTexture; \n"
"uniform lowp float opacity; \n"
"varying highp vec2 qt_TexCoord; \n"
"void main() \n"
"{ \n"
" gl_FragColor = texture2D(videoTexture, qt_TexCoord) * opacity; \n"
"}";
}
void initialize() {
m_id_matrix = program()->uniformLocation("qt_Matrix");
m_id_texMatrix = program()->uniformLocation("texMatrix");
m_id_texture = program()->uniformLocation("videoTexture");
m_id_opacity = program()->uniformLocation("opacity");
}
int m_id_matrix;
int m_id_texMatrix;
int m_id_texture;
int m_id_opacity;
};
class QAndroidSGVideoNodeMaterial : public QSGMaterial
{
public:
QAndroidSGVideoNodeMaterial()
: m_textureId(0)
{
setFlag(Blending, false);
}
~QAndroidSGVideoNodeMaterial()
{
m_frame = QVideoFrame();
}
QSGMaterialType *type() const {
static QSGMaterialType theType;
return &theType;
}
QSGMaterialShader *createShader() const {
return new QAndroidSGVideoNodeMaterialShader;
}
int compare(const QSGMaterial *other) const {
const QAndroidSGVideoNodeMaterial *m = static_cast<const QAndroidSGVideoNodeMaterial *>(other);
return m_textureId - m->m_textureId;
}
void setVideoFrame(const QVideoFrame &frame) {
QMutexLocker lock(&m_frameMutex);
m_frame = frame;
}
bool updateTexture()
{
QMutexLocker lock(&m_frameMutex);
bool texMatrixDirty = false;
if (m_frame.isValid()) {
QVariantList list = m_frame.handle().toList();
GLuint texId = list.at(0).toUInt();
QMatrix4x4 mat = qvariant_cast<QMatrix4x4>(list.at(1));
texMatrixDirty = texId != m_textureId || mat != m_texMatrix;
m_textureId = texId;
m_texMatrix = mat;
// the texture is already bound and initialized at this point,
// no need to call glTexParams
} else {
m_textureId = 0;
}
return texMatrixDirty;
}
QVideoFrame m_frame;
QMutex m_frameMutex;
GLuint m_textureId;
QMatrix4x4 m_texMatrix;
};
void QAndroidSGVideoNodeMaterialShader::updateState(const RenderState &state,
QSGMaterial *newMaterial,
QSGMaterial *oldMaterial)
{
Q_UNUSED(oldMaterial);
QAndroidSGVideoNodeMaterial *mat = static_cast<QAndroidSGVideoNodeMaterial *>(newMaterial);
program()->setUniformValue(m_id_texture, 0);
if (mat->updateTexture())
program()->setUniformValue(m_id_texMatrix, mat->m_texMatrix);
if (state.isOpacityDirty())
program()->setUniformValue(m_id_opacity, state.opacity());
if (state.isMatrixDirty())
program()->setUniformValue(m_id_matrix, state.combinedMatrix());
}
QAndroidSGVideoNode::QAndroidSGVideoNode(const QVideoSurfaceFormat &format)
: m_format(format)
{
setFlag(QSGNode::OwnsMaterial);
m_material = new QAndroidSGVideoNodeMaterial;
setMaterial(m_material);
}
void QAndroidSGVideoNode::setCurrentFrame(const QVideoFrame &frame)
{
m_material->setVideoFrame(frame);
markDirty(DirtyMaterial);
}
QVideoFrame::PixelFormat QAndroidSGVideoNode::pixelFormat() const
{
return m_format.pixelFormat();
}
QT_END_NAMESPACE

View File

@@ -0,0 +1,67 @@
/****************************************************************************
**
** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
** Contact: http://www.qt-project.org/legal
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and Digia. For licensing terms and
** conditions see http://qt.digia.com/licensing. For further information
** use the contact form at http://qt.digia.com/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 as published by the Free Software
** Foundation and appearing in the file LICENSE.LGPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU Lesser General Public License version 2.1 requirements
** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Digia gives you certain additional
** rights. These rights are described in the Digia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** GNU General Public License Usage
** Alternatively, this file may be used under the terms of the GNU
** General Public License version 3.0 as published by the Free Software
** Foundation and appearing in the file LICENSE.GPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU General Public License version 3.0 requirements will be
** met: http://www.gnu.org/copyleft/gpl.html.
**
**
** $QT_END_LICENSE$
**
****************************************************************************/
#ifndef QANDROIDSGVIDEONODE_H
#define QANDROIDSGVIDEONODE_H
#include <private/qsgvideonode_p.h>
QT_BEGIN_NAMESPACE
class QAndroidSGVideoNodeMaterial;
class QAndroidSGVideoNode : public QSGVideoNode
{
public:
QAndroidSGVideoNode(const QVideoSurfaceFormat &format);
void setCurrentFrame(const QVideoFrame &frame);
QVideoFrame::PixelFormat pixelFormat() const;
private:
QVideoSurfaceFormat m_format;
QAndroidSGVideoNodeMaterial *m_material;
QVideoFrame m_frame;
};
QT_END_NAMESPACE
#endif // QANDROIDSGVIDEONODE_H

View File

@@ -0,0 +1,69 @@
/****************************************************************************
**
** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
** Contact: http://www.qt-project.org/legal
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and Digia. For licensing terms and
** conditions see http://qt.digia.com/licensing. For further information
** use the contact form at http://qt.digia.com/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 as published by the Free Software
** Foundation and appearing in the file LICENSE.LGPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU Lesser General Public License version 2.1 requirements
** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Digia gives you certain additional
** rights. These rights are described in the Digia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** GNU General Public License Usage
** Alternatively, this file may be used under the terms of the GNU
** General Public License version 3.0 as published by the Free Software
** Foundation and appearing in the file LICENSE.GPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU General Public License version 3.0 requirements will be
** met: http://www.gnu.org/copyleft/gpl.html.
**
**
** $QT_END_LICENSE$
**
****************************************************************************/
#include "qandroidsgvideonodeplugin.h"
#include "qandroidsgvideonode.h"
QT_BEGIN_NAMESPACE
#define ExternalGLTextureHandle (QAbstractVideoBuffer::UserHandle + 1)
QList<QVideoFrame::PixelFormat> QAndroidSGVideoNodeFactoryPlugin::supportedPixelFormats(
QAbstractVideoBuffer::HandleType handleType) const
{
QList<QVideoFrame::PixelFormat> pixelFormats;
if (handleType == ExternalGLTextureHandle)
pixelFormats.append(QVideoFrame::Format_BGR32);
return pixelFormats;
}
QSGVideoNode *QAndroidSGVideoNodeFactoryPlugin::createNode(const QVideoSurfaceFormat &format)
{
if (supportedPixelFormats(format.handleType()).contains(format.pixelFormat()))
return new QAndroidSGVideoNode(format);
return 0;
}
QT_END_NAMESPACE

View File

@@ -0,0 +1,62 @@
/****************************************************************************
**
** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
** Contact: http://www.qt-project.org/legal
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and Digia. For licensing terms and
** conditions see http://qt.digia.com/licensing. For further information
** use the contact form at http://qt.digia.com/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 as published by the Free Software
** Foundation and appearing in the file LICENSE.LGPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU Lesser General Public License version 2.1 requirements
** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Digia gives you certain additional
** rights. These rights are described in the Digia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** GNU General Public License Usage
** Alternatively, this file may be used under the terms of the GNU
** General Public License version 3.0 as published by the Free Software
** Foundation and appearing in the file LICENSE.GPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU General Public License version 3.0 requirements will be
** met: http://www.gnu.org/copyleft/gpl.html.
**
**
** $QT_END_LICENSE$
**
****************************************************************************/
#ifndef QANDROIDSGVIDEONODEPLUGIN_H
#define QANDROIDSGVIDEONODEPLUGIN_H
#include <private/qsgvideonode_p.h>
QT_BEGIN_NAMESPACE
class QAndroidSGVideoNodeFactoryPlugin : public QSGVideoNodeFactoryPlugin
{
Q_OBJECT
Q_PLUGIN_METADATA(IID QSGVideoNodeFactoryInterface_iid
FILE "android_videonode.json")
public:
QList<QVideoFrame::PixelFormat> supportedPixelFormats(QAbstractVideoBuffer::HandleType handleType) const;
QSGVideoNode *createNode(const QVideoSurfaceFormat &format);
};
QT_END_NAMESPACE
#endif // QANDROIDSGVIDEONODEPLUGIN_H

View File

@@ -0,0 +1,16 @@
TARGET = qtsgvideonode_android
QT += quick multimedia-private qtmultimediaquicktools-private
PLUGIN_TYPE = video/videonode
PLUGIN_CLASS_NAME = QAndroidSGVideoNodeFactoryPlugin
load(qt_plugin)
HEADERS += \
qandroidsgvideonodeplugin.h \
qandroidsgvideonode.h
SOURCES += \
qandroidsgvideonodeplugin.cpp \
qandroidsgvideonode.cpp
OTHER_FILES += android_videonode.json

View File

@@ -41,6 +41,8 @@
#include "audiocaptureprobecontrol.h"
QT_BEGIN_NAMESPACE
AudioCaptureProbeControl::AudioCaptureProbeControl(QObject *parent):
QMediaAudioProbeControl(parent)
{
@@ -58,3 +60,5 @@ void AudioCaptureProbeControl::bufferProbed(const char *data, quint32 size, cons
QAudioBuffer audioBuffer = QAudioBuffer(QByteArray::fromRawData(data, size), format);
QMetaObject::invokeMethod(this, "audioBufferProbed", Qt::QueuedConnection, Q_ARG(QAudioBuffer, audioBuffer));
}
QT_END_NAMESPACE

View File

@@ -46,7 +46,7 @@
#include <QtCore/qmutex.h>
#include <qaudiobuffer.h>
QT_USE_NAMESPACE
QT_BEGIN_NAMESPACE
class AudioCaptureProbeControl : public QMediaAudioProbeControl
{
@@ -58,4 +58,6 @@ public:
void bufferProbed(const char *data, quint32 size, const QAudioFormat& format);
};
QT_END_NAMESPACE
#endif

View File

@@ -47,6 +47,8 @@
#include "audiomediarecordercontrol.h"
#include "audiocaptureprobecontrol.h"
QT_BEGIN_NAMESPACE
AudioCaptureService::AudioCaptureService(QObject *parent):
QMediaService(parent)
{
@@ -94,4 +96,4 @@ void AudioCaptureService::releaseControl(QMediaControl *control)
Q_UNUSED(control)
}
QT_END_NAMESPACE

View File

@@ -46,14 +46,14 @@
#include "qmediaservice.h"
QT_BEGIN_NAMESPACE
class AudioCaptureSession;
class AudioEncoderControl;
class AudioContainerControl;
class AudioMediaRecorderControl;
class AudioInputSelector;
QT_USE_NAMESPACE
class AudioCaptureService : public QMediaService
{
Q_OBJECT
@@ -71,4 +71,6 @@ private:
AudioMediaRecorderControl *m_mediaControl;
};
QT_END_NAMESPACE
#endif

View File

@@ -46,6 +46,7 @@
#include "qmediaserviceproviderplugin.h"
QT_BEGIN_NAMESPACE
QMediaService* AudioCaptureServicePlugin::create(QString const& key)
{
@@ -60,3 +61,4 @@ void AudioCaptureServicePlugin::release(QMediaService *service)
delete service;
}
QT_END_NAMESPACE

View File

@@ -45,7 +45,7 @@
#include "qmediaserviceproviderplugin.h"
QT_USE_NAMESPACE
QT_BEGIN_NAMESPACE
class AudioCaptureServicePlugin : public QMediaServiceProviderPlugin
{
@@ -58,4 +58,6 @@ public:
void release(QMediaService *service);
};
QT_END_NAMESPACE
#endif // AUDIOCAPTURESERVICEPLUGIN_H

View File

@@ -49,6 +49,8 @@
#include "audiocapturesession.h"
#include "audiocaptureprobecontrol.h"
QT_BEGIN_NAMESPACE
void FileProbeProxy::startProbes(const QAudioFormat &format)
{
m_format = format;
@@ -87,33 +89,20 @@ qint64 FileProbeProxy::writeData(const char *data, qint64 len)
return QFile::writeData(data, len);
}
AudioCaptureSession::AudioCaptureSession(QObject *parent):
QObject(parent)
AudioCaptureSession::AudioCaptureSession(QObject *parent)
: QObject(parent)
, m_state(QMediaRecorder::StoppedState)
, m_status(QMediaRecorder::UnloadedStatus)
, m_audioInput(0)
, m_deviceInfo(QAudioDeviceInfo::defaultInputDevice())
, m_wavFile(true)
{
m_deviceInfo = new QAudioDeviceInfo(QAudioDeviceInfo::defaultInputDevice());
m_audioInput = 0;
m_position = 0;
m_state = QMediaRecorder::StoppedState;
m_format.setSampleRate(8000);
m_format.setChannelCount(1);
m_format.setSampleSize(8);
m_format.setSampleType(QAudioFormat::UnSignedInt);
m_format.setCodec("audio/pcm");
wavFile = true;
m_format = m_deviceInfo.preferredFormat();
}
AudioCaptureSession::~AudioCaptureSession()
{
stop();
if(m_audioInput)
delete m_audioInput;
}
QAudioDeviceInfo* AudioCaptureSession::deviceInfo() const
{
return m_deviceInfo;
setState(QMediaRecorder::StoppedState);
}
QAudioFormat AudioCaptureSession::format() const
@@ -121,118 +110,96 @@ QAudioFormat AudioCaptureSession::format() const
return m_format;
}
bool AudioCaptureSession::isFormatSupported(const QAudioFormat &format) const
void AudioCaptureSession::setFormat(const QAudioFormat &format)
{
if(m_deviceInfo) {
if(format.codec().contains(QLatin1String("audio/x-wav"))) {
QAudioFormat fmt = format;
fmt.setCodec("audio/pcm");
return m_deviceInfo->isFormatSupported(fmt);
} else
return m_deviceInfo->isFormatSupported(format);
}
return false;
}
bool AudioCaptureSession::setFormat(const QAudioFormat &format)
{
if(m_deviceInfo) {
QAudioFormat fmt = format;
if(m_deviceInfo->isFormatSupported(fmt)) {
m_format = fmt;
if(m_audioInput) delete m_audioInput;
m_audioInput = 0;
QList<QAudioDeviceInfo> devices = QAudioDeviceInfo::availableDevices(QAudio::AudioInput);
for(int i=0;i<devices.size();i++) {
if(qstrcmp(m_deviceInfo->deviceName().toLocal8Bit().constData(),
devices.at(i).deviceName().toLocal8Bit().constData()) == 0) {
m_audioInput = new QAudioInput(devices.at(i),m_format);
connect(m_audioInput,SIGNAL(stateChanged(QAudio::State)),this,SLOT(stateChanged(QAudio::State)));
connect(m_audioInput,SIGNAL(notify()),this,SLOT(notify()));
break;
}
}
} else {
m_format = m_deviceInfo->preferredFormat();
qWarning()<<"failed to setFormat using preferred...";
}
}
return false;
}
QStringList AudioCaptureSession::supportedContainers() const
{
QStringList list;
if(m_deviceInfo) {
if (m_deviceInfo->supportedCodecs().size() > 0) {
list << "audio/x-wav";
list << "audio/pcm";
}
}
return list;
}
QString AudioCaptureSession::containerDescription(const QString &formatMimeType) const
{
if(m_deviceInfo) {
if (formatMimeType.contains(QLatin1String("audio/pcm")))
return tr("RAW file format");
if (formatMimeType.contains(QLatin1String("audio/x-wav")))
return tr("WAV file format");
}
return QString();
m_format = format;
}
void AudioCaptureSession::setContainerFormat(const QString &formatMimeType)
{
if (!formatMimeType.contains(QLatin1String("audio/x-wav")) &&
!formatMimeType.contains(QLatin1String("audio/pcm")) &&
!formatMimeType.isEmpty())
return;
if(m_deviceInfo) {
if (!m_deviceInfo->supportedCodecs().contains(QLatin1String("audio/pcm")))
return;
if (formatMimeType.isEmpty() || formatMimeType.contains(QLatin1String("audio/x-wav"))) {
wavFile = true;
m_format.setCodec("audio/pcm");
} else {
wavFile = false;
m_format.setCodec(formatMimeType);
}
}
m_wavFile = (formatMimeType.isEmpty()
|| QString::compare(formatMimeType, QLatin1String("audio/x-wav")) == 0);
}
QString AudioCaptureSession::containerFormat() const
{
if(wavFile)
return QString("audio/x-wav");
if (m_wavFile)
return QStringLiteral("audio/x-wav");
return QString("audio/pcm");
return QStringLiteral("audio/x-raw");
}
QUrl AudioCaptureSession::outputLocation() const
{
return m_actualSink;
return m_actualOutputLocation;
}
bool AudioCaptureSession::setOutputLocation(const QUrl& sink)
bool AudioCaptureSession::setOutputLocation(const QUrl& location)
{
m_sink = m_actualSink = sink;
return true;
if (m_requestedOutputLocation == location)
return false;
m_actualOutputLocation = QUrl();
m_requestedOutputLocation = location;
if (m_requestedOutputLocation.isEmpty())
return true;
if (m_requestedOutputLocation.isValid() && (m_requestedOutputLocation.isLocalFile()
|| m_requestedOutputLocation.isRelative())) {
emit actualLocationChanged(m_requestedOutputLocation);
return true;
}
m_requestedOutputLocation = QUrl();
return false;
}
qint64 AudioCaptureSession::position() const
{
return m_position;
if (m_audioInput)
return m_audioInput->processedUSecs() / 1000;
return 0;
}
int AudioCaptureSession::state() const
void AudioCaptureSession::setState(QMediaRecorder::State state)
{
return int(m_state);
if (m_state == state)
return;
m_state = state;
emit stateChanged(m_state);
switch (m_state) {
case QMediaRecorder::StoppedState:
stop();
break;
case QMediaRecorder::PausedState:
pause();
break;
case QMediaRecorder::RecordingState:
record();
break;
}
}
QMediaRecorder::State AudioCaptureSession::state() const
{
return m_state;
}
void AudioCaptureSession::setStatus(QMediaRecorder::Status status)
{
if (m_status == status)
return;
m_status = status;
emit statusChanged(m_status);
}
QMediaRecorder::Status AudioCaptureSession::status() const
{
return m_status;
}
QDir AudioCaptureSession::defaultDir() const
@@ -258,9 +225,29 @@ QDir AudioCaptureSession::defaultDir() const
return QDir();
}
QString AudioCaptureSession::generateFileName(const QDir &dir, const QString &ext) const
QString AudioCaptureSession::generateFileName(const QString &requestedName,
const QString &extension) const
{
if (requestedName.isEmpty())
return generateFileName(defaultDir(), extension);
QString path = requestedName;
if (QFileInfo(path).isRelative())
path = defaultDir().absoluteFilePath(path);
if (QFileInfo(path).isDir())
return generateFileName(QDir(path), extension);
if (!path.endsWith(extension))
path.append(QString(".%1").arg(extension));
return path;
}
QString AudioCaptureSession::generateFileName(const QDir &dir,
const QString &ext) const
{
int lastClip = 0;
foreach(QString fileName, dir.entryList(QStringList() << QString("clip_*.%1").arg(ext))) {
int imgNumber = fileName.mid(5, fileName.size()-6-ext.length()).toInt();
@@ -277,25 +264,45 @@ QString AudioCaptureSession::generateFileName(const QDir &dir, const QString &ex
void AudioCaptureSession::record()
{
if(!m_audioInput) {
setFormat(m_format);
}
if (m_status == QMediaRecorder::PausedStatus) {
m_audioInput->resume();
} else {
if (m_deviceInfo.isNull()) {
emit error(QMediaRecorder::ResourceError,
QStringLiteral("No input device available."));
m_state = QMediaRecorder::StoppedState;
emit stateChanged(m_state);
setStatus(QMediaRecorder::UnavailableStatus);
return;
}
m_actualSink = m_sink;
setStatus(QMediaRecorder::LoadingStatus);
if (m_actualSink.isEmpty()) {
QString ext = wavFile ? QLatin1String("wav") : QLatin1String("raw");
m_actualSink = generateFileName(defaultDir(), ext);
}
m_format = m_deviceInfo.nearestFormat(m_format);
m_audioInput = new QAudioInput(m_deviceInfo, m_format);
connect(m_audioInput, SIGNAL(stateChanged(QAudio::State)),
this, SLOT(audioInputStateChanged(QAudio::State)));
connect(m_audioInput, SIGNAL(notify()),
this, SLOT(notify()));
if(m_actualSink.toLocalFile().length() > 0)
file.setFileName(m_actualSink.toLocalFile());
else
file.setFileName(m_actualSink.toString());
if(m_audioInput) {
if(m_state == QMediaRecorder::StoppedState) {
if(file.open(QIODevice::WriteOnly)) {
QString filePath = generateFileName(
m_requestedOutputLocation.isLocalFile() ? m_requestedOutputLocation.toLocalFile()
: m_requestedOutputLocation.toString(),
m_wavFile ? QLatin1String("wav")
: QLatin1String("raw"));
m_actualOutputLocation = QUrl::fromLocalFile(filePath);
if (m_actualOutputLocation != m_requestedOutputLocation)
emit actualLocationChanged(m_actualOutputLocation);
file.setFileName(filePath);
setStatus(QMediaRecorder::LoadedStatus);
setStatus(QMediaRecorder::StartingStatus);
if (file.open(QIODevice::WriteOnly)) {
if (m_wavFile) {
memset(&header,0,sizeof(CombinedHeader));
memcpy(header.riff.descriptor.id,"RIFF",4);
header.riff.descriptor.size = 0xFFFFFFFF; // This should be updated on stop(), filesize-8
@@ -310,28 +317,26 @@ void AudioCaptureSession::record()
header.wave.bitsPerSample = m_format.sampleSize();
memcpy(header.data.descriptor.id,"data",4);
header.data.descriptor.size = 0xFFFFFFFF; // This should be updated on stop(),samples*channels*sampleSize/8
if (wavFile)
file.write((char*)&header,sizeof(CombinedHeader));
file.startProbes(m_format);
m_audioInput->start(qobject_cast<QIODevice*>(&file));
} else {
emit error(1,QString("can't open source, failed"));
m_state = QMediaRecorder::StoppedState;
emit stateChanged(m_state);
file.write((char*)&header,sizeof(CombinedHeader));
}
file.startProbes(m_format);
m_audioInput->start(qobject_cast<QIODevice*>(&file));
} else {
delete m_audioInput;
m_audioInput = 0;
emit error(QMediaRecorder::ResourceError,
QStringLiteral("Can't open output location"));
m_state = QMediaRecorder::StoppedState;
emit stateChanged(m_state);
setStatus(QMediaRecorder::UnloadedStatus);
}
}
m_state = QMediaRecorder::RecordingState;
}
void AudioCaptureSession::pause()
{
if(m_audioInput)
m_audioInput->stop();
m_state = QMediaRecorder::PausedState;
m_audioInput->suspend();
}
void AudioCaptureSession::stop()
@@ -340,7 +345,7 @@ void AudioCaptureSession::stop()
m_audioInput->stop();
file.stopProbes();
file.close();
if (wavFile) {
if (m_wavFile) {
qint32 fileSize = file.size()-8;
file.open(QIODevice::ReadWrite | QIODevice::Unbuffered);
file.read((char*)&header,sizeof(CombinedHeader));
@@ -350,9 +355,10 @@ void AudioCaptureSession::stop()
file.write((char*)&header,sizeof(CombinedHeader));
file.close();
}
m_position = 0;
delete m_audioInput;
m_audioInput = 0;
setStatus(QMediaRecorder::UnloadedStatus);
}
m_state = QMediaRecorder::StoppedState;
}
void AudioCaptureSession::addProbe(AudioCaptureProbeControl *probe)
@@ -365,45 +371,41 @@ void AudioCaptureSession::removeProbe(AudioCaptureProbeControl *probe)
file.removeProbe(probe);
}
void AudioCaptureSession::stateChanged(QAudio::State state)
void AudioCaptureSession::audioInputStateChanged(QAudio::State state)
{
switch(state) {
case QAudio::ActiveState:
emit stateChanged(QMediaRecorder::RecordingState);
break;
default:
if(!((m_state == QMediaRecorder::PausedState)||(m_state == QMediaRecorder::StoppedState)))
m_state = QMediaRecorder::StoppedState;
emit stateChanged(m_state);
break;
case QAudio::ActiveState:
setStatus(QMediaRecorder::RecordingStatus);
break;
case QAudio::SuspendedState:
setStatus(QMediaRecorder::PausedStatus);
break;
case QAudio::StoppedState:
setStatus(QMediaRecorder::FinalizingStatus);
break;
default:
break;
}
}
void AudioCaptureSession::notify()
{
m_position += m_audioInput->notifyInterval();
emit positionChanged(m_position);
emit positionChanged(position());
}
void AudioCaptureSession::setCaptureDevice(const QString &deviceName)
{
m_captureDevice = deviceName;
if(m_deviceInfo)
delete m_deviceInfo;
m_deviceInfo = 0;
QList<QAudioDeviceInfo> devices = QAudioDeviceInfo::availableDevices(QAudio::AudioInput);
for(int i = 0; i < devices.size(); i++) {
if(qstrcmp(m_captureDevice.toLocal8Bit().constData(),
devices.at(i).deviceName().toLocal8Bit().constData())==0){
m_deviceInfo = new QAudioDeviceInfo(devices.at(i));
for (int i = 0; i < devices.size(); ++i) {
QAudioDeviceInfo info = devices.at(i);
if (m_captureDevice == info.deviceName()){
m_deviceInfo = info;
return;
}
}
m_deviceInfo = new QAudioDeviceInfo(QAudioDeviceInfo::defaultInputDevice());
m_deviceInfo = QAudioDeviceInfo::defaultInputDevice();
}
QT_END_NAMESPACE

View File

@@ -55,7 +55,7 @@
#include <qaudioinput.h>
#include <qaudiodeviceinfo.h>
QT_USE_NAMESPACE
QT_BEGIN_NAMESPACE
class AudioCaptureProbeControl;
@@ -85,50 +85,58 @@ public:
~AudioCaptureSession();
QAudioFormat format() const;
QAudioDeviceInfo* deviceInfo() const;
bool isFormatSupported(const QAudioFormat &format) const;
bool setFormat(const QAudioFormat &format);
QStringList supportedContainers() const;
void setFormat(const QAudioFormat &format);
QString containerFormat() const;
void setContainerFormat(const QString &formatMimeType);
QString containerDescription(const QString &formatMimeType) const;
QUrl outputLocation() const;
bool setOutputLocation(const QUrl& sink);
bool setOutputLocation(const QUrl& location);
qint64 position() const;
int state() const;
void record();
void pause();
void stop();
void setState(QMediaRecorder::State state);
QMediaRecorder::State state() const;
QMediaRecorder::Status status() const;
void addProbe(AudioCaptureProbeControl *probe);
void removeProbe(AudioCaptureProbeControl *probe);
public slots:
void setCaptureDevice(const QString &deviceName);
signals:
void stateChanged(QMediaRecorder::State state);
void statusChanged(QMediaRecorder::Status status);
void positionChanged(qint64 position);
void actualLocationChanged(const QUrl &location);
void error(int error, const QString &errorString);
private slots:
void stateChanged(QAudio::State state);
void audioInputStateChanged(QAudio::State state);
void notify();
private:
void record();
void pause();
void stop();
void setStatus(QMediaRecorder::Status status);
QDir defaultDir() const;
QString generateFileName(const QDir &dir, const QString &ext) const;
QString generateFileName(const QString &requestedName,
const QString &extension) const;
QString generateFileName(const QDir &dir, const QString &extension) const;
FileProbeProxy file;
QString m_captureDevice;
QUrl m_sink;
QUrl m_actualSink;
QUrl m_requestedOutputLocation;
QUrl m_actualOutputLocation;
QMediaRecorder::State m_state;
QMediaRecorder::Status m_status;
QAudioInput *m_audioInput;
QAudioDeviceInfo *m_deviceInfo;
QAudioDeviceInfo m_deviceInfo;
QAudioFormat m_format;
qint64 m_position;
bool wavFile;
bool m_wavFile;
// WAV header stuff
@@ -171,4 +179,6 @@ private:
CombinedHeader header;
};
QT_END_NAMESPACE
#endif

View File

@@ -42,6 +42,8 @@
#include "audiocontainercontrol.h"
#include "audiocapturesession.h"
QT_BEGIN_NAMESPACE
AudioContainerControl::AudioContainerControl(QObject *parent)
:QMediaContainerControl(parent)
{
@@ -54,7 +56,8 @@ AudioContainerControl::~AudioContainerControl()
QStringList AudioContainerControl::supportedContainers() const
{
return m_session->supportedContainers();
return QStringList() << QStringLiteral("audio/x-wav")
<< QStringLiteral("audio/x-raw");
}
QString AudioContainerControl::containerFormat() const
@@ -64,11 +67,18 @@ QString AudioContainerControl::containerFormat() const
void AudioContainerControl::setContainerFormat(const QString &formatMimeType)
{
m_session->setContainerFormat(formatMimeType);
if (formatMimeType.isEmpty() || supportedContainers().contains(formatMimeType))
m_session->setContainerFormat(formatMimeType);
}
QString AudioContainerControl::containerDescription(const QString &formatMimeType) const
{
return m_session->containerDescription(formatMimeType);
if (QString::compare(formatMimeType, QLatin1String("audio/x-raw")) == 0)
return tr("RAW (headerless) file format");
if (QString::compare(formatMimeType, QLatin1String("audio/x-wav")) == 0)
return tr("WAV file format");
return QString();
}
QT_END_NAMESPACE

View File

@@ -47,9 +47,9 @@
#include <QtCore/qstringlist.h>
#include <QtCore/qmap.h>
class AudioCaptureSession;
QT_BEGIN_NAMESPACE
QT_USE_NAMESPACE
class AudioCaptureSession;
class AudioContainerControl : public QMediaContainerControl
{
@@ -67,4 +67,6 @@ private:
AudioCaptureSession* m_session;
};
QT_END_NAMESPACE
#endif

View File

@@ -46,26 +46,43 @@
#include <QtCore/qdebug.h>
QT_BEGIN_NAMESPACE
static QAudioFormat audioSettingsToAudioFormat(const QAudioEncoderSettings &settings)
{
QAudioFormat fmt;
fmt.setCodec(settings.codec());
fmt.setChannelCount(settings.channelCount());
fmt.setSampleRate(settings.sampleRate());
if (settings.sampleRate() == 8000 && settings.bitRate() == 8000) {
fmt.setSampleType(QAudioFormat::UnSignedInt);
fmt.setSampleSize(8);
} else {
fmt.setSampleSize(16);
fmt.setSampleType(QAudioFormat::SignedInt);
}
fmt.setByteOrder(QAudioDeviceInfo::defaultInputDevice().preferredFormat().byteOrder());
return fmt;
}
static QAudioEncoderSettings audioFormatToAudioSettings(const QAudioFormat &format)
{
QAudioEncoderSettings settings;
settings.setCodec(format.codec());
settings.setChannelCount(format.channelCount());
settings.setSampleRate(format.sampleRate());
settings.setEncodingMode(QMultimedia::ConstantBitRateEncoding);
settings.setBitRate(format.channelCount()
* format.sampleSize()
* format.sampleRate());
return settings;
}
AudioEncoderControl::AudioEncoderControl(QObject *parent)
:QAudioEncoderSettingsControl(parent)
{
m_session = qobject_cast<AudioCaptureSession*>(parent);
QT_PREPEND_NAMESPACE(QAudioFormat) fmt;
fmt.setSampleSize(8);
fmt.setChannelCount(1);
fmt.setSampleRate(8000);
fmt.setSampleType(QT_PREPEND_NAMESPACE(QAudioFormat)::SignedInt);
fmt.setCodec("audio/pcm");
fmt.setByteOrder(QAudioFormat::LittleEndian);
m_session->setFormat(fmt);
m_settings.setEncodingMode(QMultimedia::ConstantQualityEncoding);
m_settings.setCodec("audio/pcm");
m_settings.setBitRate(8000);
m_settings.setChannelCount(1);
m_settings.setSampleRate(8000);
m_settings.setQuality(QMultimedia::LowQuality);
update();
}
AudioEncoderControl::~AudioEncoderControl()
@@ -74,71 +91,85 @@ AudioEncoderControl::~AudioEncoderControl()
QStringList AudioEncoderControl::supportedAudioCodecs() const
{
QStringList list;
if (m_session->supportedContainers().size() > 0)
list.append("audio/pcm");
return list;
return QStringList() << QStringLiteral("audio/pcm");
}
QString AudioEncoderControl::codecDescription(const QString &codecName) const
{
if (codecName.contains(QLatin1String("audio/pcm")))
return tr("PCM audio data");
if (QString::compare(codecName, QLatin1String("audio/pcm")) == 0)
return tr("Linear PCM audio data");
return QString();
}
QList<int> AudioEncoderControl::supportedSampleRates(const QAudioEncoderSettings &, bool *continuous) const
QList<int> AudioEncoderControl::supportedSampleRates(const QAudioEncoderSettings &settings, bool *continuous) const
{
if (continuous)
*continuous = false;
return m_session->deviceInfo()->supportedSampleRates();
if (settings.codec().isEmpty() || settings.codec() == QLatin1String("audio/pcm"))
return m_sampleRates;
return QList<int>();
}
QAudioEncoderSettings AudioEncoderControl::audioSettings() const
{
return m_settings;
return audioFormatToAudioSettings(m_session->format());
}
void AudioEncoderControl::setAudioSettings(const QAudioEncoderSettings &settings)
{
QAudioFormat fmt = m_session->format();
QAudioFormat fmt = audioSettingsToAudioFormat(settings);
if (settings.encodingMode() == QMultimedia::ConstantQualityEncoding) {
if (settings.quality() == QMultimedia::LowQuality) {
fmt.setCodec("audio/pcm");
switch (settings.quality()) {
case QMultimedia::VeryLowQuality:
fmt.setSampleSize(8);
fmt.setChannelCount(1);
fmt.setSampleRate(8000);
fmt.setSampleType(QAudioFormat::UnSignedInt);
} else if (settings.quality() == QMultimedia::NormalQuality) {
fmt.setSampleSize(16);
fmt.setChannelCount(1);
break;
case QMultimedia::LowQuality:
fmt.setSampleSize(8);
fmt.setSampleRate(22050);
fmt.setSampleType(QAudioFormat::SignedInt);
} else {
fmt.setSampleType(QAudioFormat::UnSignedInt);
break;
case QMultimedia::HighQuality:
fmt.setSampleSize(16);
fmt.setSampleRate(48000);
fmt.setSampleType(QAudioFormat::SignedInt);
break;
case QMultimedia::VeryHighQuality:
fmt.setSampleSize(16);
fmt.setSampleRate(96000);
fmt.setSampleType(QAudioFormat::SignedInt);
break;
case QMultimedia::NormalQuality:
default:
fmt.setSampleSize(16);
fmt.setChannelCount(1);
fmt.setSampleRate(44100);
fmt.setSampleType(QAudioFormat::SignedInt);
}
} else {
fmt.setChannelCount(settings.channelCount());
fmt.setSampleRate(settings.sampleRate());
if (settings.sampleRate() == 8000 && settings.bitRate() == 8000) {
fmt.setSampleType(QAudioFormat::UnSignedInt);
fmt.setSampleSize(8);
} else {
fmt.setSampleSize(16);
fmt.setSampleType(QAudioFormat::SignedInt);
break;
}
}
fmt.setCodec("audio/pcm");
m_session->setFormat(fmt);
m_settings = settings;
}
void AudioEncoderControl::update()
{
m_sampleRates.clear();
QList<QAudioDeviceInfo> devices = QAudioDeviceInfo::availableDevices(QAudio::AudioInput);
for (int i = 0; i < devices.size(); ++i) {
QList<int> rates = devices.at(i).supportedSampleRates();
for (int j = 0; j < rates.size(); ++j) {
int rate = rates.at(j);
if (!m_sampleRates.contains(rate))
m_sampleRates.append(rate);
}
}
qSort(m_sampleRates);
}
QT_END_NAMESPACE

View File

@@ -49,9 +49,9 @@
#include <qaudioformat.h>
class AudioCaptureSession;
QT_BEGIN_NAMESPACE
QT_USE_NAMESPACE
class AudioCaptureSession;
class AudioEncoderControl : public QAudioEncoderSettingsControl
{
@@ -68,8 +68,12 @@ public:
void setAudioSettings(const QAudioEncoderSettings&);
private:
void update();
AudioCaptureSession* m_session;
QAudioEncoderSettings m_settings;
QList<int> m_sampleRates;
};
QT_END_NAMESPACE
#endif

View File

@@ -44,6 +44,7 @@
#include <qaudiodeviceinfo.h>
QT_BEGIN_NAMESPACE
AudioInputSelector::AudioInputSelector(QObject *parent)
:QAudioInputSelectorControl(parent)
@@ -79,7 +80,7 @@ QString AudioInputSelector::inputDescription(const QString& name) const
QString AudioInputSelector::defaultInput() const
{
return QAudioDeviceInfo(QAudioDeviceInfo::defaultInputDevice()).deviceName();
return QAudioDeviceInfo::defaultInputDevice().deviceName();
}
QString AudioInputSelector::activeInput() const
@@ -108,3 +109,5 @@ void AudioInputSelector::update()
m_descriptions.append(devices.at(i).deviceName());
}
}
QT_END_NAMESPACE

View File

@@ -46,9 +46,9 @@
#include "qaudioinputselectorcontrol.h"
class AudioCaptureSession;
QT_BEGIN_NAMESPACE
QT_USE_NAMESPACE
class AudioCaptureSession;
class AudioInputSelector : public QAudioInputSelectorControl
{
@@ -74,4 +74,6 @@ private:
AudioCaptureSession* m_session;
};
QT_END_NAMESPACE
#endif // AUDIOINPUTSELECTOR_H

View File

@@ -44,15 +44,22 @@
#include <QtCore/qdebug.h>
QT_BEGIN_NAMESPACE
AudioMediaRecorderControl::AudioMediaRecorderControl(QObject *parent)
:QMediaRecorderControl(parent)
, m_state(QMediaRecorder::StoppedState)
, m_prevStatus(QMediaRecorder::UnloadedStatus)
: QMediaRecorderControl(parent)
{
m_session = qobject_cast<AudioCaptureSession*>(parent);
connect(m_session,SIGNAL(positionChanged(qint64)),this,SIGNAL(durationChanged(qint64)));
connect(m_session,SIGNAL(stateChanged(QMediaRecorder::State)), this,SLOT(updateStatus()));
connect(m_session,SIGNAL(error(int,QString)),this,SLOT(handleSessionError(int,QString)));
connect(m_session, SIGNAL(positionChanged(qint64)),
this, SIGNAL(durationChanged(qint64)));
connect(m_session, SIGNAL(stateChanged(QMediaRecorder::State)),
this, SIGNAL(stateChanged(QMediaRecorder::State)));
connect(m_session, SIGNAL(statusChanged(QMediaRecorder::Status)),
this, SIGNAL(statusChanged(QMediaRecorder::Status)));
connect(m_session, SIGNAL(actualLocationChanged(QUrl)),
this, SIGNAL(actualLocationChanged(QUrl)));
connect(m_session, SIGNAL(error(int,QString)),
this, SIGNAL(error(int,QString)));
}
AudioMediaRecorderControl::~AudioMediaRecorderControl()
@@ -71,21 +78,12 @@ bool AudioMediaRecorderControl::setOutputLocation(const QUrl& sink)
QMediaRecorder::State AudioMediaRecorderControl::state() const
{
return (QMediaRecorder::State)m_session->state();
return m_session->state();
}
QMediaRecorder::Status AudioMediaRecorderControl::status() const
{
static QMediaRecorder::Status statusTable[3][3] = {
//Stopped recorder state:
{ QMediaRecorder::LoadedStatus, QMediaRecorder::FinalizingStatus, QMediaRecorder::FinalizingStatus },
//Recording recorder state:
{ QMediaRecorder::StartingStatus, QMediaRecorder::RecordingStatus, QMediaRecorder::PausedStatus },
//Paused recorder state:
{ QMediaRecorder::StartingStatus, QMediaRecorder::RecordingStatus, QMediaRecorder::PausedStatus }
};
return statusTable[m_state][m_session->state()];
return m_session->status();
}
qint64 AudioMediaRecorderControl::duration() const
@@ -106,47 +104,19 @@ qreal AudioMediaRecorderControl::volume() const
void AudioMediaRecorderControl::setState(QMediaRecorder::State state)
{
if (m_state == state)
return;
m_state = state;
switch (state) {
case QMediaRecorder::StoppedState:
m_session->stop();
break;
case QMediaRecorder::PausedState:
m_session->pause();
break;
case QMediaRecorder::RecordingState:
m_session->record();
break;
}
updateStatus();
m_session->setState(state);
}
void AudioMediaRecorderControl::setMuted(bool)
void AudioMediaRecorderControl::setMuted(bool muted)
{
if (muted)
qWarning("Muting the audio recording is not supported.");
}
void AudioMediaRecorderControl::setVolume(qreal volume)
{
if (!qFuzzyCompare(volume, qreal(1.0)))
qWarning() << "Media service doesn't support recorder audio gain.";
qWarning("Changing the audio recording volume is not supported.");
}
void AudioMediaRecorderControl::updateStatus()
{
QMediaRecorder::Status newStatus = status();
if (m_prevStatus != newStatus) {
m_prevStatus = newStatus;
emit statusChanged(m_prevStatus);
}
}
void AudioMediaRecorderControl::handleSessionError(int code, const QString &description)
{
emit error(code, description);
setState(QMediaRecorder::StoppedState);
}
QT_END_NAMESPACE

View File

@@ -47,9 +47,9 @@
#include "qmediarecorder.h"
#include "qmediarecordercontrol.h"
class AudioCaptureSession;
QT_BEGIN_NAMESPACE
QT_USE_NAMESPACE
class AudioCaptureSession;
class AudioMediaRecorderControl : public QMediaRecorderControl
{
@@ -59,7 +59,7 @@ public:
~AudioMediaRecorderControl();
QUrl outputLocation() const;
bool setOutputLocation(const QUrl &sink);
bool setOutputLocation(const QUrl &location);
QMediaRecorder::State state() const;
QMediaRecorder::Status status() const;
@@ -71,19 +71,14 @@ public:
void applySettings() {}
public slots:
void setState(QMediaRecorder::State state);
void setMuted(bool);
void setVolume(qreal volume);
private slots:
void updateStatus();
void handleSessionError(int code, const QString &description);
private:
AudioCaptureSession* m_session;
QMediaRecorder::State m_state;
QMediaRecorder::Status m_prevStatus;
};
QT_END_NAMESPACE
#endif

View File

@@ -40,10 +40,8 @@
****************************************************************************/
#include "bbserviceplugin.h"
#ifndef Q_OS_BLACKBERRY_TABLET
#include "bbcameraservice.h"
#include "bbvideodeviceselectorcontrol.h"
#endif
#include "bbmediaplayerservice.h"
#include <QDebug>
@@ -56,10 +54,8 @@ BbServicePlugin::BbServicePlugin()
QMediaService *BbServicePlugin::create(const QString &key)
{
#ifndef Q_OS_BLACKBERRY_TABLET
if (key == QLatin1String(Q_MEDIASERVICE_CAMERA))
return new BbCameraService();
#endif
if (key == QLatin1String(Q_MEDIASERVICE_MEDIAPLAYER))
return new BbMediaPlayerService();
@@ -106,9 +102,7 @@ QString BbServicePlugin::deviceDescription(const QByteArray &service, const QByt
void BbServicePlugin::updateDevices() const
{
#ifndef Q_OS_BLACKBERRY_TABLET
BbVideoDeviceSelectorControl::enumerateDevices(&m_cameraDevices, &m_cameraDescriptions);
#endif
if (m_cameraDevices.isEmpty()) {
qWarning() << "No camera devices found";

View File

@@ -12,9 +12,7 @@ SOURCES += bbserviceplugin.cpp
include(common/common.pri)
!blackberry-playbook {
include(camera/camera.pri)
}
include(camera/camera.pri)
include(mediaplayer/mediaplayer.pri)

View File

@@ -139,6 +139,7 @@ QVariant BbCameraExposureControl::requestedValue(ExposureParameter parameter) co
QVariant BbCameraExposureControl::actualValue(ExposureParameter parameter) const
{
#ifndef Q_OS_BLACKBERRY_TABLET
if (parameter != QCameraExposureControl::ExposureMode) // no other parameter supported by BB10 API at the moment
return QVariantList();
@@ -170,6 +171,9 @@ QVariant BbCameraExposureControl::actualValue(ExposureParameter parameter) const
default:
return QVariant();
}
#else
return QVariant();
#endif
}
bool BbCameraExposureControl::setValue(ExposureParameter parameter, const QVariant& value)

View File

@@ -45,11 +45,14 @@
#include <QDebug>
#include <QUrl>
#ifndef Q_OS_BLACKBERRY_TABLET
#include <audio/audio_manager_device.h>
#include <audio/audio_manager_volume.h>
#endif
QT_BEGIN_NAMESPACE
#ifndef Q_OS_BLACKBERRY_TABLET
static audio_manager_device_t currentAudioInputDevice()
{
audio_manager_device_t device = AUDIO_DEVICE_HEADSET;
@@ -62,6 +65,7 @@ static audio_manager_device_t currentAudioInputDevice()
return device;
}
#endif
BbCameraMediaRecorderControl::BbCameraMediaRecorderControl(BbCameraSession *session, QObject *parent)
: QMediaRecorderControl(parent)
@@ -103,12 +107,13 @@ bool BbCameraMediaRecorderControl::isMuted() const
{
bool muted = false;
#ifndef Q_OS_BLACKBERRY_TABLET
const int result = audio_manager_get_input_mute(currentAudioInputDevice(), &muted);
if (result != EOK) {
emit const_cast<BbCameraMediaRecorderControl*>(this)->error(QMediaRecorder::ResourceError, tr("Unable to retrieve mute status"));
return false;
}
#endif
return muted;
}
@@ -116,11 +121,13 @@ qreal BbCameraMediaRecorderControl::volume() const
{
double level = 0.0;
#ifndef Q_OS_BLACKBERRY_TABLET
const int result = audio_manager_get_input_level(currentAudioInputDevice(), &level);
if (result != EOK) {
emit const_cast<BbCameraMediaRecorderControl*>(this)->error(QMediaRecorder::ResourceError, tr("Unable to retrieve audio input volume"));
return 0.0;
}
#endif
return (level / 100);
}
@@ -137,22 +144,26 @@ void BbCameraMediaRecorderControl::setState(QMediaRecorder::State state)
void BbCameraMediaRecorderControl::setMuted(bool muted)
{
#ifndef Q_OS_BLACKBERRY_TABLET
const int result = audio_manager_set_input_mute(currentAudioInputDevice(), muted);
if (result != EOK) {
emit error(QMediaRecorder::ResourceError, tr("Unable to set mute status"));
} else {
emit mutedChanged(muted);
}
#endif
}
void BbCameraMediaRecorderControl::setVolume(qreal volume)
{
#ifndef Q_OS_BLACKBERRY_TABLET
const int result = audio_manager_set_input_level(currentAudioInputDevice(), (volume * 100));
if (result != EOK) {
emit error(QMediaRecorder::ResourceError, tr("Unable to set audio input volume"));
} else {
emit volumeChanged(volume);
}
#endif
}
QT_END_NAMESPACE

View File

@@ -70,9 +70,11 @@ BbCameraOrientationHandler::BbCameraOrientationHandler(QObject *parent)
BbCameraOrientationHandler::~BbCameraOrientationHandler()
{
#ifndef Q_OS_BLACKBERRY_TABLET
const int result = orientation_stop_events(0);
if (result == BPS_FAILURE)
qWarning() << "Unable to unregister for orientation change events";
#endif
QCoreApplication::eventDispatcher()->removeNativeEventFilter(this);
}

View File

@@ -75,8 +75,6 @@ static QString errorToString(camera_error_t error)
return QLatin1String("No permission");
case CAMERA_EBADR:
return QLatin1String("Invalid file descriptor");
case CAMERA_ENODATA:
return QLatin1String("Data does not exist");
case CAMERA_ENOENT:
return QLatin1String("File or directory does not exists");
case CAMERA_ENOMEM:
@@ -87,24 +85,28 @@ static QString errorToString(camera_error_t error)
return QLatin1String("Communication timeout");
case CAMERA_EALREADY:
return QLatin1String("Operation already in progress");
case CAMERA_EBUSY:
return QLatin1String("Camera busy");
case CAMERA_ENOSPC:
return QLatin1String("Disk is full");
case CAMERA_EUNINIT:
return QLatin1String("Camera library not initialized");
case CAMERA_EREGFAULT:
return QLatin1String("Callback registration failed");
case CAMERA_EMICINUSE:
return QLatin1String("Microphone in use already");
#ifndef Q_OS_BLACKBERRY_TABLET
case CAMERA_ENODATA:
return QLatin1String("Data does not exist");
case CAMERA_EBUSY:
return QLatin1String("Camera busy");
case CAMERA_EDESKTOPCAMERAINUSE:
return QLatin1String("Desktop camera in use already");
case CAMERA_ENOSPC:
return QLatin1String("Disk is full");
case CAMERA_EPOWERDOWN:
return QLatin1String("Camera in power down state");
case CAMERA_3ALOCKED:
return QLatin1String("3A have been locked");
case CAMERA_EVIEWFINDERFROZEN:
return QLatin1String("Freeze flag set");
#endif
default:
return QLatin1String("Unknown error");
}
@@ -658,6 +660,9 @@ void BbCameraSession::applyVideoSettings()
return;
}
const QSize resolution = m_videoEncoderSettings.resolution();
#ifndef Q_OS_BLACKBERRY_TABLET
QString videoCodec = m_videoEncoderSettings.codec();
if (videoCodec.isEmpty())
videoCodec = QLatin1String("h264");
@@ -670,8 +675,6 @@ void BbCameraSession::applyVideoSettings()
else if (videoCodec == QLatin1String("h264"))
cameraVideoCodec = CAMERA_VIDEOCODEC_H264;
const QSize resolution = m_videoEncoderSettings.resolution();
qreal frameRate = m_videoEncoderSettings.frameRate();
if (frameRate == 0) {
const QList<qreal> frameRates = supportedFrameRates(QVideoEncoderSettings(), 0);
@@ -690,12 +693,16 @@ void BbCameraSession::applyVideoSettings()
cameraAudioCodec = CAMERA_AUDIOCODEC_AAC;
else if (audioCodec == QLatin1String("raw"))
cameraAudioCodec = CAMERA_AUDIOCODEC_RAW;
result = camera_set_video_property(m_handle,
CAMERA_IMGPROP_WIDTH, resolution.width(),
CAMERA_IMGPROP_HEIGHT, resolution.height(),
CAMERA_IMGPROP_VIDEOCODEC, cameraVideoCodec,
CAMERA_IMGPROP_AUDIOCODEC, cameraAudioCodec);
#else
result = camera_set_video_property(m_handle,
CAMERA_IMGPROP_WIDTH, resolution.width(),
CAMERA_IMGPROP_HEIGHT, resolution.height());
#endif
if (result != CAMERA_EOK) {
qWarning() << "Unable to apply video settings:" << result;
@@ -979,10 +986,14 @@ static void viewFinderStatusCallback(camera_handle_t handle, camera_devstatus_t
if (status == CAMERA_STATUS_FOCUS_CHANGE) {
BbCameraSession *session = static_cast<BbCameraSession*>(context);
QMetaObject::invokeMethod(session, "handleFocusStatusChanged", Qt::QueuedConnection, Q_ARG(int, value));
} else if (status == CAMERA_STATUS_POWERUP) {
return;
}
#ifndef Q_OS_BLACKBERRY_TABLET
else if (status == CAMERA_STATUS_POWERUP) {
BbCameraSession *session = static_cast<BbCameraSession*>(context);
QMetaObject::invokeMethod(session, "handleCameraPowerUp", Qt::QueuedConnection);
}
#endif
}
bool BbCameraSession::startViewFinder()
@@ -1159,6 +1170,7 @@ static void videoRecordingStatusCallback(camera_handle_t handle, camera_devstatu
Q_UNUSED(handle)
Q_UNUSED(value)
#ifndef Q_OS_BLACKBERRY_TABLET
if (status == CAMERA_STATUS_VIDEO_PAUSE) {
BbCameraSession *session = static_cast<BbCameraSession*>(context);
QMetaObject::invokeMethod(session, "handleVideoRecordingPaused", Qt::QueuedConnection);
@@ -1166,6 +1178,7 @@ static void videoRecordingStatusCallback(camera_handle_t handle, camera_devstatu
BbCameraSession *session = static_cast<BbCameraSession*>(context);
QMetaObject::invokeMethod(session, "handleVideoRecordingResumed", Qt::QueuedConnection);
}
#endif
}
bool BbCameraSession::startVideoRecording()

View File

@@ -156,10 +156,12 @@ QVariant BbCameraViewfinderSettingsControl::viewfinderParameter(ViewfinderParame
return QVideoFrame::Format_Invalid;
case CAMERA_FRAMETYPE_CBYCRY:
return QVideoFrame::Format_Invalid;
#ifndef Q_OS_BLACKBERRY_TABLET
case CAMERA_FRAMETYPE_COMPRESSEDVIDEO:
return QVideoFrame::Format_Invalid;
case CAMERA_FRAMETYPE_COMPRESSEDAUDIO:
return QVideoFrame::Format_Invalid;
#endif
default:
return QVideoFrame::Format_Invalid;
}

View File

@@ -46,4 +46,8 @@ SOURCES += \
$$PWD/bbvideodeviceselectorcontrol.cpp \
$$PWD/bbvideorenderercontrol.cpp
LIBS += -lcamapi -laudio_manager
LIBS += -lcamapi
!blackberry-playbook {
LIBS += -laudio_manager
}

View File

@@ -39,6 +39,12 @@
**
****************************************************************************/
#include <QtMultimedia/qmediametadata.h>
#include <QtCore/qcoreapplication.h>
#include <QSize>
#include <qdatetime.h>
#include <qimage.h>
#include <dshow.h>
#include <initguid.h>
#include <qnetwork.h>
@@ -46,8 +52,56 @@
#include "directshowmetadatacontrol.h"
#include "directshowplayerservice.h"
#include <QtMultimedia/qmediametadata.h>
#include <QtCore/qcoreapplication.h>
#ifndef QT_NO_WMSDK
#include <wmsdk.h>
#endif
#ifndef QT_NO_SHELLITEM
#include <ShlObj.h>
#include <propkeydef.h>
#include <private/qsystemlibrary_p.h>
DEFINE_PROPERTYKEY(PKEY_Author, 0xF29F85E0, 0x4FF9, 0x1068, 0xAB, 0x91, 0x08, 0x00, 0x2B, 0x27, 0xB3, 0xD9, 4);
DEFINE_PROPERTYKEY(PKEY_Title, 0xF29F85E0, 0x4FF9, 0x1068, 0xAB, 0x91, 0x08, 0x00, 0x2B, 0x27, 0xB3, 0xD9, 2);
DEFINE_PROPERTYKEY(PKEY_Media_SubTitle, 0x56A3372E, 0xCE9C, 0x11D2, 0x9F, 0x0E, 0x00, 0x60, 0x97, 0xC6, 0x86, 0xF6, 38);
DEFINE_PROPERTYKEY(PKEY_ParentalRating, 0x64440492, 0x4C8B, 0x11D1, 0x8B, 0x70, 0x08, 0x00, 0x36, 0xB1, 0x1A, 0x03, 21);
DEFINE_PROPERTYKEY(PKEY_Comment, 0xF29F85E0, 0x4FF9, 0x1068, 0xAB, 0x91, 0x08, 0x00, 0x2B, 0x27, 0xB3, 0xD9, 6);
DEFINE_PROPERTYKEY(PKEY_Copyright, 0x64440492, 0x4C8B, 0x11D1, 0x8B, 0x70, 0x08, 0x00, 0x36, 0xB1, 0x1A, 0x03, 11);
DEFINE_PROPERTYKEY(PKEY_Media_ProviderStyle, 0x64440492, 0x4C8B, 0x11D1, 0x8B, 0x70, 0x08, 0x00, 0x36, 0xB1, 0x1A, 0x03, 40);
DEFINE_PROPERTYKEY(PKEY_Media_Year, 0x56A3372E, 0xCE9C, 0x11D2, 0x9F, 0x0E, 0x00, 0x60, 0x97, 0xC6, 0x86, 0xF6, 5);
DEFINE_PROPERTYKEY(PKEY_Media_DateEncoded, 0x2E4B640D, 0x5019, 0x46D8, 0x88, 0x81, 0x55, 0x41, 0x4C, 0xC5, 0xCA, 0xA0, 100);
DEFINE_PROPERTYKEY(PKEY_Rating, 0x64440492, 0x4C8B, 0x11D1, 0x8B, 0x70, 0x08, 0x00, 0x36, 0xB1, 0x1A, 0x03, 9);
DEFINE_PROPERTYKEY(PKEY_Keywords, 0xF29F85E0, 0x4FF9, 0x1068, 0xAB, 0x91, 0x08, 0x00, 0x2B, 0x27, 0xB3, 0xD9, 5);
DEFINE_PROPERTYKEY(PKEY_Language, 0xD5CDD502, 0x2E9C, 0x101B, 0x93, 0x97, 0x08, 0x00, 0x2B, 0x2C, 0xF9, 0xAE, 28);
DEFINE_PROPERTYKEY(PKEY_Media_Publisher, 0x64440492, 0x4C8B, 0x11D1, 0x8B, 0x70, 0x08, 0x00, 0x36, 0xB1, 0x1A, 0x03, 30);
DEFINE_PROPERTYKEY(PKEY_Media_Duration, 0x64440490, 0x4C8B, 0x11D1, 0x8B, 0x70, 0x08, 0x00, 0x36, 0xB1, 0x1A, 0x03, 3);
DEFINE_PROPERTYKEY(PKEY_Audio_EncodingBitrate, 0x64440490, 0x4C8B, 0x11D1, 0x8B, 0x70, 0x08, 0x00, 0x36, 0xB1, 0x1A, 0x03, 4);
DEFINE_PROPERTYKEY(PKEY_Media_AverageLevel, 0x09EDD5B6, 0xB301, 0x43C5, 0x99, 0x90, 0xD0, 0x03, 0x02, 0xEF, 0xFD, 0x46, 100);
DEFINE_PROPERTYKEY(PKEY_Audio_ChannelCount, 0x64440490, 0x4C8B, 0x11D1, 0x8B, 0x70, 0x08, 0x00, 0x36, 0xB1, 0x1A, 0x03, 7);
DEFINE_PROPERTYKEY(PKEY_Audio_PeakValue, 0x2579E5D0, 0x1116, 0x4084, 0xBD, 0x9A, 0x9B, 0x4F, 0x7C, 0xB4, 0xDF, 0x5E, 100);
DEFINE_PROPERTYKEY(PKEY_Audio_SampleRate, 0x64440490, 0x4C8B, 0x11D1, 0x8B, 0x70, 0x08, 0x00, 0x36, 0xB1, 0x1A, 0x03, 5);
DEFINE_PROPERTYKEY(PKEY_Music_AlbumTitle, 0x56A3372E, 0xCE9C, 0x11D2, 0x9F, 0x0E, 0x00, 0x60, 0x97, 0xC6, 0x86, 0xF6, 4);
DEFINE_PROPERTYKEY(PKEY_Music_AlbumArtist, 0x56A3372E, 0xCE9C, 0x11D2, 0x9F, 0x0E, 0x00, 0x60, 0x97, 0xC6, 0x86, 0xF6, 13);
DEFINE_PROPERTYKEY(PKEY_Music_Artist, 0x56A3372E, 0xCE9C, 0x11D2, 0x9F, 0x0E, 0x00, 0x60, 0x97, 0xC6, 0x86, 0xF6, 2);
DEFINE_PROPERTYKEY(PKEY_Music_Composer, 0x64440492, 0x4C8B, 0x11D1, 0x8B, 0x70, 0x08, 0x00, 0x36, 0xB1, 0x1A, 0x03, 19);
DEFINE_PROPERTYKEY(PKEY_Music_Conductor, 0x56A3372E, 0xCE9C, 0x11D2, 0x9F, 0x0E, 0x00, 0x60, 0x97, 0xC6, 0x86, 0xF6, 36);
DEFINE_PROPERTYKEY(PKEY_Music_Lyrics, 0x56A3372E, 0xCE9C, 0x11D2, 0x9F, 0x0E, 0x00, 0x60, 0x97, 0xC6, 0x86, 0xF6, 12);
DEFINE_PROPERTYKEY(PKEY_Music_Mood, 0x56A3372E, 0xCE9C, 0x11D2, 0x9F, 0x0E, 0x00, 0x60, 0x97, 0xC6, 0x86, 0xF6, 39);
DEFINE_PROPERTYKEY(PKEY_Music_TrackNumber, 0x56A3372E, 0xCE9C, 0x11D2, 0x9F, 0x0E, 0x00, 0x60, 0x97, 0xC6, 0x86, 0xF6, 7);
DEFINE_PROPERTYKEY(PKEY_Music_Genre, 0x56A3372E, 0xCE9C, 0x11D2, 0x9F, 0x0E, 0x00, 0x60, 0x97, 0xC6, 0x86, 0xF6, 11);
DEFINE_PROPERTYKEY(PKEY_ThumbnailStream, 0xF29F85E0, 0x4FF9, 0x1068, 0xAB, 0x91, 0x08, 0x00, 0x2B, 0x27, 0xB3, 0xD9, 27);
DEFINE_PROPERTYKEY(PKEY_Video_FrameHeight, 0x64440491, 0x4C8B, 0x11D1, 0x8B, 0x70, 0x08, 0x00, 0x36, 0xB1, 0x1A, 0x03, 4);
DEFINE_PROPERTYKEY(PKEY_Video_FrameWidth, 0x64440491, 0x4C8B, 0x11D1, 0x8B, 0x70, 0x08, 0x00, 0x36, 0xB1, 0x1A, 0x03, 3);
DEFINE_PROPERTYKEY(PKEY_Video_HorizontalAspectRatio, 0x64440491, 0x4C8B, 0x11D1, 0x8B, 0x70, 0x08, 0x00, 0x36, 0xB1, 0x1A, 0x03, 42);
DEFINE_PROPERTYKEY(PKEY_Video_VerticalAspectRatio, 0x64440491, 0x4C8B, 0x11D1, 0x8B, 0x70, 0x08, 0x00, 0x36, 0xB1, 0x1A, 0x03, 45);
DEFINE_PROPERTYKEY(PKEY_Video_FrameRate, 0x64440491, 0x4C8B, 0x11D1, 0x8B, 0x70, 0x08, 0x00, 0x36, 0xB1, 0x1A, 0x03, 6);
DEFINE_PROPERTYKEY(PKEY_Video_EncodingBitrate, 0x64440491, 0x4C8B, 0x11D1, 0x8B, 0x70, 0x08, 0x00, 0x36, 0xB1, 0x1A, 0x03, 8);
DEFINE_PROPERTYKEY(PKEY_Video_Director, 0x64440492, 0x4C8B, 0x11D1, 0x8B, 0x70, 0x08, 0x00, 0x36, 0xB1, 0x1A, 0x03, 20);
DEFINE_PROPERTYKEY(PKEY_Media_Writer, 0x64440492, 0x4C8B, 0x11D1, 0x8B, 0x70, 0x08, 0x00, 0x36, 0xB1, 0x1A, 0x03, 23);
typedef HRESULT (WINAPI *q_SHCreateItemFromParsingName)(PCWSTR, IBindCtx *, const GUID&, void **);
static q_SHCreateItemFromParsingName sHCreateItemFromParsingName = 0;
#endif
#ifndef QT_NO_WMSDK
namespace
@@ -70,12 +124,12 @@ static const QWMMetaDataKeyLookup qt_wmMetaDataKeys[] =
{ QMediaMetaData::Genre, L"WM/Genre" },
//{ QMediaMetaData::Date, 0 },
{ QMediaMetaData::Year, L"WM/Year" },
{ QMediaMetaData::UserRating, L"UserRating" },
{ QMediaMetaData::UserRating, L"Rating" },
//{ QMediaMetaData::MetaDatawords, 0 },
{ QMediaMetaData::Language, L"Language" },
{ QMediaMetaData::Language, L"WM/Language" },
{ QMediaMetaData::Publisher, L"WM/Publisher" },
{ QMediaMetaData::Copyright, L"Copyright" },
{ QMediaMetaData::ParentalRating, L"ParentalRating" },
{ QMediaMetaData::ParentalRating, L"WM/ParentalRating" },
//{ QMediaMetaData::RatingOrganisation, L"RatingOrganisation" },
// Media
@@ -103,11 +157,11 @@ static const QWMMetaDataKeyLookup qt_wmMetaDataKeys[] =
//{ QMediaMetaData::CoverArtUriLarge, 0 },
// Image/Video
//{ QMediaMetaData::Resolution, 0 },
//{ QMediaMetaData::PixelAspectRatio, 0 },
{ QMediaMetaData::Resolution, L"WM/VideoHeight" },
{ QMediaMetaData::PixelAspectRatio, L"AspectRatioX" },
// Video
//{ QMediaMetaData::FrameRate, 0 },
{ QMediaMetaData::VideoFrameRate, L"WM/VideoFrameRate" },
{ QMediaMetaData::VideoBitRate, L"VideoBitRate" },
{ QMediaMetaData::VideoCodec, L"VideoCodec" },
@@ -118,12 +172,6 @@ static const QWMMetaDataKeyLookup qt_wmMetaDataKeys[] =
{ QMediaMetaData::Director, L"WM/Director" },
{ QMediaMetaData::LeadPerformer, L"LeadPerformer" },
{ QMediaMetaData::Writer, L"WM/Writer" },
// Photos
{ QMediaMetaData::CameraManufacturer, L"CameraManufacturer" },
{ QMediaMetaData::CameraModel, L"CameraModel" },
{ QMediaMetaData::Event, L"Event" },
{ QMediaMetaData::Subject, L"Subject" }
};
static QVariant getValue(IWMHeaderInfo *header, const wchar_t *key)
@@ -150,7 +198,7 @@ static QVariant getValue(IWMHeaderInfo *header, const wchar_t *key)
case WMT_TYPE_STRING:
{
QString string;
string.resize(size / 2 - 1);
string.resize(size / 2); // size is in bytes, string is in UTF16
if (header->GetAttributeByName(
&streamNumber,
@@ -227,12 +275,58 @@ static QVariant getValue(IWMHeaderInfo *header, const wchar_t *key)
}
#endif
#ifndef QT_NO_SHELLITEM
static QVariant convertValue(const PROPVARIANT& var)
{
QVariant value;
switch (var.vt) {
case VT_LPWSTR:
value = QString::fromUtf16(reinterpret_cast<const ushort*>(var.pwszVal));
break;
case VT_UI4:
value = uint(var.ulVal);
break;
case VT_UI8:
value = qulonglong(var.uhVal.QuadPart);
break;
case VT_BOOL:
value = bool(var.boolVal);
break;
case VT_FILETIME:
SYSTEMTIME sysDate;
if (!FileTimeToSystemTime(&var.filetime, &sysDate))
break;
value = QDate(sysDate.wYear, sysDate.wMonth, sysDate.wDay);
break;
case VT_STREAM:
{
STATSTG stat;
if (FAILED(var.pStream->Stat(&stat, STATFLAG_NONAME)))
break;
void *data = malloc(stat.cbSize.QuadPart);
ULONG read = 0;
if (FAILED(var.pStream->Read(data, stat.cbSize.QuadPart, &read))) {
free(data);
break;
}
value = QImage::fromData(reinterpret_cast<const uchar*>(data), read);
free(data);
}
break;
case VT_VECTOR | VT_LPWSTR:
QStringList vList;
for (ULONG i = 0; i < var.calpwstr.cElems; ++i)
vList.append(QString::fromUtf16(reinterpret_cast<const ushort*>(var.calpwstr.pElems[i])));
value = vList;
break;
}
return value;
}
#endif
DirectShowMetaDataControl::DirectShowMetaDataControl(QObject *parent)
: QMetaDataReaderControl(parent)
, m_content(0)
#ifndef QT_NO_WMSDK
, m_headerInfo(0)
#endif
, m_available(false)
{
}
@@ -242,75 +336,229 @@ DirectShowMetaDataControl::~DirectShowMetaDataControl()
bool DirectShowMetaDataControl::isMetaDataAvailable() const
{
#ifndef QT_NO_WMSDK
return m_content || m_headerInfo;
#else
return m_content;
#endif
return m_available;
}
QVariant DirectShowMetaDataControl::metaData(const QString &key) const
{
QVariant value;
#ifndef QT_NO_WMSDK
if (m_headerInfo) {
static const int count = sizeof(qt_wmMetaDataKeys) / sizeof(QWMMetaDataKeyLookup);
for (int i = 0; i < count; ++i) {
if (qt_wmMetaDataKeys[i].key == key) {
value = getValue(m_headerInfo, qt_wmMetaDataKeys[i].token);
break;
}
}
} else if (m_content) {
#else
if (m_content) {
#endif
BSTR string = 0;
if (key == QMediaMetaData::Author)
m_content->get_AuthorName(&string);
else if (key == QMediaMetaData::Title)
m_content->get_Title(&string);
else if (key == QMediaMetaData::ParentalRating)
m_content->get_Rating(&string);
else if (key == QMediaMetaData::Description)
m_content->get_Description(&string);
else if (key == QMediaMetaData::Copyright)
m_content->get_Copyright(&string);
if (string) {
value = QString::fromUtf16(reinterpret_cast<ushort *>(string), ::SysStringLen(string));
::SysFreeString(string);
}
}
return value;
return m_metadata.value(key);
}
QStringList DirectShowMetaDataControl::availableMetaData() const
{
return QStringList();
return m_metadata.keys();
}
void DirectShowMetaDataControl::updateGraph(IFilterGraph2 *graph, IBaseFilter *source)
static QString convertBSTR(BSTR *string)
{
if (m_content)
m_content->Release();
QString value = QString::fromUtf16(reinterpret_cast<ushort *>(*string),
::SysStringLen(*string));
if (!graph || graph->QueryInterface(
IID_IAMMediaContent, reinterpret_cast<void **>(&m_content)) != S_OK) {
m_content = 0;
::SysFreeString(*string);
string = 0;
return value;
}
void DirectShowMetaDataControl::updateGraph(IFilterGraph2 *graph, IBaseFilter *source, const QString &fileSrc)
{
m_metadata.clear();
#ifndef QT_NO_SHELLITEM
if (!sHCreateItemFromParsingName) {
QSystemLibrary lib(QStringLiteral("shell32"));
sHCreateItemFromParsingName = (q_SHCreateItemFromParsingName)(lib.resolve("SHCreateItemFromParsingName"));
}
#ifdef QT_NO_WMSDK
Q_UNUSED(source);
#else
if (m_headerInfo)
m_headerInfo->Release();
if (!fileSrc.isEmpty() && sHCreateItemFromParsingName) {
IShellItem2* shellItem = 0;
if (sHCreateItemFromParsingName(reinterpret_cast<const WCHAR*>(fileSrc.utf16()),
0, IID_PPV_ARGS(&shellItem)) == S_OK) {
m_headerInfo = com_cast<IWMHeaderInfo>(source, IID_IWMHeaderInfo);
IPropertyStore *pStore = 0;
if (shellItem->GetPropertyStore(GPS_DEFAULT, IID_PPV_ARGS(&pStore)) == S_OK) {
DWORD cProps;
if (SUCCEEDED(pStore->GetCount(&cProps))) {
for (DWORD i = 0; i < cProps; ++i)
{
PROPERTYKEY key;
PROPVARIANT var;
PropVariantInit(&var);
if (FAILED(pStore->GetAt(i, &key)))
continue;
if (FAILED(pStore->GetValue(key, &var)))
continue;
if (key == PKEY_Author) {
m_metadata.insert(QMediaMetaData::Author, convertValue(var));
} else if (key == PKEY_Title) {
m_metadata.insert(QMediaMetaData::Title, convertValue(var));
} else if (key == PKEY_Media_SubTitle) {
m_metadata.insert(QMediaMetaData::SubTitle, convertValue(var));
} else if (key == PKEY_ParentalRating) {
m_metadata.insert(QMediaMetaData::ParentalRating, convertValue(var));
} else if (key == PKEY_Comment) {
m_metadata.insert(QMediaMetaData::Description, convertValue(var));
} else if (key == PKEY_Copyright) {
m_metadata.insert(QMediaMetaData::Copyright, convertValue(var));
} else if (key == PKEY_Media_ProviderStyle) {
m_metadata.insert(QMediaMetaData::Genre, convertValue(var));
} else if (key == PKEY_Media_Year) {
m_metadata.insert(QMediaMetaData::Year, convertValue(var));
} else if (key == PKEY_Media_DateEncoded) {
m_metadata.insert(QMediaMetaData::Date, convertValue(var));
} else if (key == PKEY_Rating) {
m_metadata.insert(QMediaMetaData::UserRating,
int((convertValue(var).toUInt() - 1) / qreal(98) * 100));
} else if (key == PKEY_Keywords) {
m_metadata.insert(QMediaMetaData::Keywords, convertValue(var));
} else if (key == PKEY_Language) {
m_metadata.insert(QMediaMetaData::Language, convertValue(var));
} else if (key == PKEY_Media_Publisher) {
m_metadata.insert(QMediaMetaData::Publisher, convertValue(var));
} else if (key == PKEY_Media_Duration) {
m_metadata.insert(QMediaMetaData::Duration,
(convertValue(var).toLongLong() + 10000) / 10000);
} else if (key == PKEY_Audio_EncodingBitrate) {
m_metadata.insert(QMediaMetaData::AudioBitRate, convertValue(var));
} else if (key == PKEY_Media_AverageLevel) {
m_metadata.insert(QMediaMetaData::AverageLevel, convertValue(var));
} else if (key == PKEY_Audio_ChannelCount) {
m_metadata.insert(QMediaMetaData::ChannelCount, convertValue(var));
} else if (key == PKEY_Audio_PeakValue) {
m_metadata.insert(QMediaMetaData::PeakValue, convertValue(var));
} else if (key == PKEY_Audio_SampleRate) {
m_metadata.insert(QMediaMetaData::SampleRate, convertValue(var));
} else if (key == PKEY_Music_AlbumTitle) {
m_metadata.insert(QMediaMetaData::AlbumTitle, convertValue(var));
} else if (key == PKEY_Music_AlbumArtist) {
m_metadata.insert(QMediaMetaData::AlbumArtist, convertValue(var));
} else if (key == PKEY_Music_Artist) {
m_metadata.insert(QMediaMetaData::ContributingArtist, convertValue(var));
} else if (key == PKEY_Music_Composer) {
m_metadata.insert(QMediaMetaData::Composer, convertValue(var));
} else if (key == PKEY_Music_Conductor) {
m_metadata.insert(QMediaMetaData::Conductor, convertValue(var));
} else if (key == PKEY_Music_Lyrics) {
m_metadata.insert(QMediaMetaData::Lyrics, convertValue(var));
} else if (key == PKEY_Music_Mood) {
m_metadata.insert(QMediaMetaData::Mood, convertValue(var));
} else if (key == PKEY_Music_TrackNumber) {
m_metadata.insert(QMediaMetaData::TrackNumber, convertValue(var));
} else if (key == PKEY_Music_Genre) {
m_metadata.insert(QMediaMetaData::Genre, convertValue(var));
} else if (key == PKEY_ThumbnailStream) {
m_metadata.insert(QMediaMetaData::ThumbnailImage, convertValue(var));
} else if (key == PKEY_Video_FrameHeight) {
QSize res;
res.setHeight(convertValue(var).toUInt());
if (SUCCEEDED(pStore->GetValue(PKEY_Video_FrameWidth, &var)))
res.setWidth(convertValue(var).toUInt());
m_metadata.insert(QMediaMetaData::Resolution, res);
} else if (key == PKEY_Video_HorizontalAspectRatio) {
QSize aspectRatio;
aspectRatio.setWidth(convertValue(var).toUInt());
if (SUCCEEDED(pStore->GetValue(PKEY_Video_VerticalAspectRatio, &var)))
aspectRatio.setHeight(convertValue(var).toUInt());
m_metadata.insert(QMediaMetaData::PixelAspectRatio, aspectRatio);
} else if (key == PKEY_Video_FrameRate) {
m_metadata.insert(QMediaMetaData::VideoFrameRate,
convertValue(var).toReal() / 1000);
} else if (key == PKEY_Video_EncodingBitrate) {
m_metadata.insert(QMediaMetaData::VideoBitRate, convertValue(var));
} else if (key == PKEY_Video_Director) {
m_metadata.insert(QMediaMetaData::Director, convertValue(var));
} else if (key == PKEY_Media_Writer) {
m_metadata.insert(QMediaMetaData::Writer, convertValue(var));
}
PropVariantClear(&var);
}
}
pStore->Release();
}
shellItem->Release();
}
}
if (!m_metadata.isEmpty())
goto send_event;
#endif
#ifndef QT_NO_WMSDK
IWMHeaderInfo *info = com_cast<IWMHeaderInfo>(source, IID_IWMHeaderInfo);
if (info) {
static const int count = sizeof(qt_wmMetaDataKeys) / sizeof(QWMMetaDataKeyLookup);
for (int i = 0; i < count; ++i) {
QVariant var = getValue(info, qt_wmMetaDataKeys[i].token);
if (var.isValid()) {
QString key = qt_wmMetaDataKeys[i].key;
if (key == QMediaMetaData::Duration) {
// duration is provided in 100-nanosecond units, convert to milliseconds
var = (var.toLongLong() + 10000) / 10000;
} else if (key == QMediaMetaData::Resolution) {
QSize res;
res.setHeight(var.toUInt());
res.setWidth(getValue(info, L"WM/VideoWidth").toUInt());
var = res;
} else if (key == QMediaMetaData::VideoFrameRate) {
var = var.toReal() / 1000.f;
} else if (key == QMediaMetaData::PixelAspectRatio) {
QSize aspectRatio;
aspectRatio.setWidth(var.toUInt());
aspectRatio.setHeight(getValue(info, L"AspectRatioY").toUInt());
var = aspectRatio;
} else if (key == QMediaMetaData::UserRating) {
var = (var.toUInt() - 1) / qreal(98) * 100;
}
m_metadata.insert(key, var);
}
}
info->Release();
}
if (!m_metadata.isEmpty())
goto send_event;
#endif
{
IAMMediaContent *content = 0;
if ((!graph || graph->QueryInterface(
IID_IAMMediaContent, reinterpret_cast<void **>(&content)) != S_OK)
&& (!source || source->QueryInterface(
IID_IAMMediaContent, reinterpret_cast<void **>(&content)) != S_OK)) {
content = 0;
}
if (content) {
BSTR string = 0;
if (content->get_AuthorName(&string) == S_OK)
m_metadata.insert(QMediaMetaData::Author, convertBSTR(&string));
if (content->get_Title(&string) == S_OK)
m_metadata.insert(QMediaMetaData::Title, convertBSTR(&string));
if (content->get_Description(&string) == S_OK)
m_metadata.insert(QMediaMetaData::Description, convertBSTR(&string));
if (content->get_Rating(&string) == S_OK)
m_metadata.insert(QMediaMetaData::UserRating, convertBSTR(&string));
if (content->get_Copyright(&string) == S_OK)
m_metadata.insert(QMediaMetaData::Copyright, convertBSTR(&string));
content->Release();
}
}
send_event:
// DirectShowMediaPlayerService holds a lock at this point so defer emitting signals to a later
// time.
QCoreApplication::postEvent(this, new QEvent(QEvent::Type(MetaDataChanged)));
@@ -321,12 +569,12 @@ void DirectShowMetaDataControl::customEvent(QEvent *event)
if (event->type() == QEvent::Type(MetaDataChanged)) {
event->accept();
bool oldAvailable = m_available;
m_available = !m_metadata.isEmpty();
if (m_available != oldAvailable)
emit metaDataAvailableChanged(m_available);
emit metaDataChanged();
#ifndef QT_NO_WMSDK
emit metaDataAvailableChanged(m_content || m_headerInfo);
#else
emit metaDataAvailableChanged(m_content);
#endif
} else {
QMetaDataReaderControl::customEvent(event);
}

View File

@@ -46,12 +46,6 @@
#include "directshowglobal.h"
#include <qnetwork.h>
#ifndef QT_NO_WMSDK
#include <wmsdk.h>
#endif
#include <QtCore/qcoreevent.h>
class DirectShowPlayerService;
@@ -70,7 +64,8 @@ public:
QVariant metaData(const QString &key) const;
QStringList availableMetaData() const;
void updateGraph(IFilterGraph2 *graph, IBaseFilter *source);
void updateGraph(IFilterGraph2 *graph, IBaseFilter *source,
const QString &fileSrc = QString());
protected:
void customEvent(QEvent *event);
@@ -81,10 +76,8 @@ private:
MetaDataChanged = QEvent::User
};
IAMMediaContent *m_content;
#ifndef QT_NO_WMSDK
IWMHeaderInfo *m_headerInfo;
#endif
QVariantMap m_metadata;
bool m_available;
};
#endif

View File

@@ -50,6 +50,10 @@
#include "vmr9videowindowcontrol.h"
#endif
#ifndef QT_NO_WMSDK
#include <wmsdk.h>
#endif
#include "qmediacontent.h"
#include <QtCore/qcoreapplication.h>
@@ -268,11 +272,10 @@ void DirectShowPlayerService::doSetUrlSource(QMutexLocker *locker)
IBaseFilter *source = 0;
QMediaResource resource = m_resources.takeFirst();
QUrl url = resource.url();
m_url = resource.url();
HRESULT hr = E_FAIL;
if (url.scheme() == QLatin1String("http") || url.scheme() == QLatin1String("https")) {
if (m_url.scheme() == QLatin1String("http") || m_url.scheme() == QLatin1String("https")) {
static const GUID clsid_WMAsfReader = {
0x187463a0, 0x5bb7, 0x11d3, {0xac, 0xbe, 0x00, 0x80, 0xc7, 0x5e, 0x24, 0x6e} };
@@ -283,7 +286,7 @@ void DirectShowPlayerService::doSetUrlSource(QMutexLocker *locker)
if (IFileSourceFilter *fileSource = com_new<IFileSourceFilter>(
clsid_WMAsfReader, iid_IFileSourceFilter)) {
locker->unlock();
hr = fileSource->Load(reinterpret_cast<const OLECHAR *>(url.toString().utf16()), 0);
hr = fileSource->Load(reinterpret_cast<const OLECHAR *>(m_url.toString().utf16()), 0);
if (SUCCEEDED(hr)) {
source = com_cast<IBaseFilter>(fileSource, IID_IBaseFilter);
@@ -296,11 +299,11 @@ void DirectShowPlayerService::doSetUrlSource(QMutexLocker *locker)
fileSource->Release();
locker->relock();
}
} else if (url.scheme() == QLatin1String("qrc")) {
} else if (m_url.scheme() == QLatin1String("qrc")) {
DirectShowRcSource *rcSource = new DirectShowRcSource(m_loop);
locker->unlock();
if (rcSource->open(url) && SUCCEEDED(hr = m_graph->AddFilter(rcSource, L"Source")))
if (rcSource->open(m_url) && SUCCEEDED(hr = m_graph->AddFilter(rcSource, L"Source")))
source = rcSource;
else
rcSource->Release();
@@ -310,7 +313,7 @@ void DirectShowPlayerService::doSetUrlSource(QMutexLocker *locker)
if (!SUCCEEDED(hr)) {
locker->unlock();
hr = m_graph->AddSourceFilter(
reinterpret_cast<const OLECHAR *>(url.toString().utf16()), L"Source", &source);
reinterpret_cast<const OLECHAR *>(m_url.toString().utf16()), L"Source", &source);
locker->relock();
}
@@ -1128,7 +1131,7 @@ void DirectShowPlayerService::customEvent(QEvent *event)
QMutexLocker locker(&m_mutex);
m_playerControl->updateMediaInfo(m_duration, m_streamTypes, m_seekable);
m_metaDataControl->updateGraph(m_graph, m_source);
m_metaDataControl->updateGraph(m_graph, m_source, m_url.toString());
updateStatus();
} else if (event->type() == QEvent::Type(Error)) {

View File

@@ -43,5 +43,11 @@ qtHaveModule(widgets):!simulator {
$$PWD/vmr9videowindowcontrol.cpp
}
config_wshellitem {
QT += core-private
} else {
DEFINES += QT_NO_SHELLITEM
}
LIBS += -lstrmiids -ldmoguids -luuid -lmsdmo -lole32 -loleaut32 -lgdi32

View File

@@ -17,7 +17,7 @@ blackberry {
}
qnx {
SUBDIRS += qnx
SUBDIRS += audiocapture qnx
}
win32 {