Merge remote-tracking branch 'origin/5.3' into dev
Conflicts: .qmake.conf Change-Id: Iecd8d7b94e52a8981526b12cffa40e99870ba62f
This commit is contained in:
@@ -1,7 +1,7 @@
|
||||
TEMPLATE = subdirs
|
||||
|
||||
SUBDIRS += src \
|
||||
jar
|
||||
SUBDIRS += src
|
||||
android:!android-no-sdk: SUBDIRS += jar
|
||||
|
||||
qtHaveModule(quick) {
|
||||
SUBDIRS += videonode
|
||||
|
||||
@@ -6,7 +6,7 @@ API_VERSION = android-11
|
||||
JAVACLASSPATH += $$PWD/src
|
||||
|
||||
JAVASOURCES += $$PWD/src/org/qtproject/qt5/android/multimedia/QtAndroidMediaPlayer.java \
|
||||
$$PWD/src/org/qtproject/qt5/android/multimedia/QtCamera.java \
|
||||
$$PWD/src/org/qtproject/qt5/android/multimedia/QtCameraListener.java \
|
||||
$$PWD/src/org/qtproject/qt5/android/multimedia/QtSurfaceTextureListener.java \
|
||||
$$PWD/src/org/qtproject/qt5/android/multimedia/QtSurfaceTextureHolder.java \
|
||||
$$PWD/src/org/qtproject/qt5/android/multimedia/QtMultimediaUtils.java \
|
||||
|
||||
@@ -43,6 +43,7 @@ package org.qtproject.qt5.android.multimedia;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.lang.String;
|
||||
import java.io.FileInputStream;
|
||||
|
||||
// API is level is < 9 unless marked otherwise.
|
||||
import android.app.Activity;
|
||||
@@ -247,6 +248,8 @@ public class QtAndroidMediaPlayer
|
||||
if (mMediaPlayer == null) {
|
||||
mMediaPlayer = new MediaPlayer();
|
||||
setState(State.Idle);
|
||||
// Make sure the new media player has the volume that was set on the QMediaPlayer
|
||||
setVolumeHelper(mMuted ? 0 : mVolume);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -376,6 +379,7 @@ public class QtAndroidMediaPlayer
|
||||
mMediaPlayer.setDisplay(mSurfaceHolder);
|
||||
|
||||
AssetFileDescriptor afd = null;
|
||||
FileInputStream fis = null;
|
||||
try {
|
||||
mUri = Uri.parse(path);
|
||||
final boolean inAssets = (mUri.getScheme().compareTo("assets") == 0);
|
||||
@@ -387,6 +391,10 @@ public class QtAndroidMediaPlayer
|
||||
final long length = afd.getLength();
|
||||
FileDescriptor fd = afd.getFileDescriptor();
|
||||
mMediaPlayer.setDataSource(fd, offset, length);
|
||||
} else if (mUri.getScheme().compareTo("file") == 0) {
|
||||
fis = new FileInputStream(mUri.getPath());
|
||||
FileDescriptor fd = fis.getFD();
|
||||
mMediaPlayer.setDataSource(fd);
|
||||
} else {
|
||||
mMediaPlayer.setDataSource(mActivity, mUri);
|
||||
}
|
||||
@@ -402,9 +410,13 @@ public class QtAndroidMediaPlayer
|
||||
} catch (final NullPointerException e) {
|
||||
Log.d(TAG, "" + e.getMessage());
|
||||
} finally {
|
||||
if (afd !=null) {
|
||||
try { afd.close(); } catch (final IOException ioe) { /* Ignore... */ }
|
||||
}
|
||||
try {
|
||||
if (afd != null)
|
||||
afd.close();
|
||||
if (fis != null)
|
||||
fis.close();
|
||||
} catch (final IOException ioe) { /* Ignore... */ }
|
||||
|
||||
if ((mState & State.Initialized) == 0) {
|
||||
setState(State.Error);
|
||||
onErrorNative(MediaPlayer.MEDIA_ERROR_UNKNOWN,
|
||||
@@ -471,6 +483,20 @@ public class QtAndroidMediaPlayer
|
||||
}
|
||||
|
||||
public void setVolume(int volume)
|
||||
{
|
||||
if (volume < 0)
|
||||
volume = 0;
|
||||
|
||||
if (volume > 100)
|
||||
volume = 100;
|
||||
|
||||
mVolume = volume;
|
||||
|
||||
if (!mMuted)
|
||||
setVolumeHelper(mVolume);
|
||||
}
|
||||
|
||||
private void setVolumeHelper(int volume)
|
||||
{
|
||||
if ((mState & (State.Idle
|
||||
| State.Initialized
|
||||
@@ -482,18 +508,9 @@ public class QtAndroidMediaPlayer
|
||||
return;
|
||||
}
|
||||
|
||||
if (volume < 0)
|
||||
volume = 0;
|
||||
|
||||
if (volume > 100)
|
||||
volume = 100;
|
||||
|
||||
float newVolume = adjustVolume(volume);
|
||||
|
||||
try {
|
||||
float newVolume = adjustVolume(volume);
|
||||
mMediaPlayer.setVolume(newVolume, newVolume);
|
||||
if (!mMuted)
|
||||
mVolume = volume;
|
||||
} catch (final IllegalStateException e) {
|
||||
Log.d(TAG, "" + e.getMessage());
|
||||
}
|
||||
@@ -523,7 +540,7 @@ public class QtAndroidMediaPlayer
|
||||
public void mute(final boolean mute)
|
||||
{
|
||||
mMuted = mute;
|
||||
setVolume(mute ? 0 : mVolume);
|
||||
setVolumeHelper(mute ? 0 : mVolume);
|
||||
}
|
||||
|
||||
public boolean isMuted()
|
||||
|
||||
@@ -48,98 +48,33 @@ import android.util.Log;
|
||||
import java.lang.Math;
|
||||
import java.util.concurrent.locks.ReentrantLock;
|
||||
|
||||
public class QtCamera implements Camera.ShutterCallback,
|
||||
Camera.PictureCallback,
|
||||
Camera.AutoFocusCallback,
|
||||
Camera.PreviewCallback
|
||||
public class QtCameraListener implements Camera.ShutterCallback,
|
||||
Camera.PictureCallback,
|
||||
Camera.AutoFocusCallback,
|
||||
Camera.PreviewCallback
|
||||
{
|
||||
private int m_cameraId = -1;
|
||||
private Camera m_camera = null;
|
||||
private byte[] m_cameraPreviewFirstBuffer = null;
|
||||
private byte[] m_cameraPreviewSecondBuffer = null;
|
||||
private int m_actualPreviewBuffer = 0;
|
||||
private byte[][] m_cameraPreviewBuffer = null;
|
||||
private volatile int m_actualPreviewBuffer = 0;
|
||||
private final ReentrantLock m_buffersLock = new ReentrantLock();
|
||||
private boolean m_isReleased = false;
|
||||
private boolean m_fetchEachFrame = false;
|
||||
|
||||
private static final String TAG = "Qt Camera";
|
||||
|
||||
private QtCamera(int id, Camera cam)
|
||||
private QtCameraListener(int id)
|
||||
{
|
||||
m_cameraId = id;
|
||||
m_camera = cam;
|
||||
}
|
||||
|
||||
public static QtCamera open(int cameraId)
|
||||
public void preparePreviewBuffer(Camera camera)
|
||||
{
|
||||
try {
|
||||
Camera cam = Camera.open(cameraId);
|
||||
return new QtCamera(cameraId, cam);
|
||||
} catch(Exception e) {
|
||||
Log.d(TAG, e.getMessage());
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public Camera.Parameters getParameters()
|
||||
{
|
||||
return m_camera.getParameters();
|
||||
}
|
||||
|
||||
public void lock()
|
||||
{
|
||||
try {
|
||||
m_camera.lock();
|
||||
} catch(Exception e) {
|
||||
Log.d(TAG, e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
public void unlock()
|
||||
{
|
||||
try {
|
||||
m_camera.unlock();
|
||||
} catch(Exception e) {
|
||||
Log.d(TAG, e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
public void release()
|
||||
{
|
||||
m_isReleased = true;
|
||||
m_camera.release();
|
||||
}
|
||||
|
||||
public void reconnect()
|
||||
{
|
||||
try {
|
||||
m_camera.reconnect();
|
||||
} catch(Exception e) {
|
||||
Log.d(TAG, e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
public void setDisplayOrientation(int degrees)
|
||||
{
|
||||
m_camera.setDisplayOrientation(degrees);
|
||||
}
|
||||
|
||||
public void setParameters(Camera.Parameters params)
|
||||
{
|
||||
try {
|
||||
m_camera.setParameters(params);
|
||||
} catch(Exception e) {
|
||||
Log.d(TAG, e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
public void setPreviewTexture(SurfaceTexture surfaceTexture)
|
||||
{
|
||||
try {
|
||||
m_camera.setPreviewTexture(surfaceTexture);
|
||||
} catch(Exception e) {
|
||||
Log.d(TAG, e.getMessage());
|
||||
}
|
||||
Camera.Size previewSize = camera.getParameters().getPreviewSize();
|
||||
double bytesPerPixel = ImageFormat.getBitsPerPixel(camera.getParameters().getPreviewFormat()) / 8.0;
|
||||
int bufferSizeNeeded = (int)Math.ceil(bytesPerPixel*previewSize.width*previewSize.height);
|
||||
m_buffersLock.lock();
|
||||
if (m_cameraPreviewBuffer == null || m_cameraPreviewBuffer[0].length < bufferSizeNeeded)
|
||||
m_cameraPreviewBuffer = new byte[2][bufferSizeNeeded];
|
||||
m_buffersLock.unlock();
|
||||
}
|
||||
|
||||
public void fetchEachFrame(boolean fetch)
|
||||
@@ -147,51 +82,6 @@ public class QtCamera implements Camera.ShutterCallback,
|
||||
m_fetchEachFrame = fetch;
|
||||
}
|
||||
|
||||
public void startPreview()
|
||||
{
|
||||
Camera.Size previewSize = m_camera.getParameters().getPreviewSize();
|
||||
double bytesPerPixel = ImageFormat.getBitsPerPixel(m_camera.getParameters().getPreviewFormat()) / 8.0;
|
||||
int bufferSizeNeeded = (int)Math.ceil(bytesPerPixel*previewSize.width*previewSize.height);
|
||||
|
||||
//We need to clear preview buffers queue here, but there is no method to do it
|
||||
//Though just resetting preview callback do the trick
|
||||
m_camera.setPreviewCallback(null);
|
||||
m_buffersLock.lock();
|
||||
if (m_cameraPreviewFirstBuffer == null || m_cameraPreviewFirstBuffer.length < bufferSizeNeeded)
|
||||
m_cameraPreviewFirstBuffer = new byte[bufferSizeNeeded];
|
||||
if (m_cameraPreviewSecondBuffer == null || m_cameraPreviewSecondBuffer.length < bufferSizeNeeded)
|
||||
m_cameraPreviewSecondBuffer = new byte[bufferSizeNeeded];
|
||||
addCallbackBuffer();
|
||||
m_buffersLock.unlock();
|
||||
m_camera.setPreviewCallbackWithBuffer(this);
|
||||
|
||||
m_camera.startPreview();
|
||||
}
|
||||
|
||||
public void stopPreview()
|
||||
{
|
||||
m_camera.stopPreview();
|
||||
}
|
||||
|
||||
public void autoFocus()
|
||||
{
|
||||
m_camera.autoFocus(this);
|
||||
}
|
||||
|
||||
public void cancelAutoFocus()
|
||||
{
|
||||
m_camera.cancelAutoFocus();
|
||||
}
|
||||
|
||||
public void takePicture()
|
||||
{
|
||||
try {
|
||||
m_camera.takePicture(this, null, this);
|
||||
} catch(Exception e) {
|
||||
Log.d(TAG, e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
public byte[] lockAndFetchPreviewBuffer()
|
||||
{
|
||||
//This method should always be followed by unlockPreviewBuffer()
|
||||
@@ -199,10 +89,7 @@ public class QtCamera implements Camera.ShutterCallback,
|
||||
//We should reset actualBuffer flag here to make sure we will not use old preview with future captures
|
||||
byte[] result = null;
|
||||
m_buffersLock.lock();
|
||||
if (m_actualPreviewBuffer == 1)
|
||||
result = m_cameraPreviewFirstBuffer;
|
||||
else if (m_actualPreviewBuffer == 2)
|
||||
result = m_cameraPreviewSecondBuffer;
|
||||
result = m_cameraPreviewBuffer[(m_actualPreviewBuffer == 1) ? 0 : 1];
|
||||
m_actualPreviewBuffer = 0;
|
||||
return result;
|
||||
}
|
||||
@@ -213,14 +100,9 @@ public class QtCamera implements Camera.ShutterCallback,
|
||||
m_buffersLock.unlock();
|
||||
}
|
||||
|
||||
private void addCallbackBuffer()
|
||||
public byte[] callbackBuffer()
|
||||
{
|
||||
if (m_isReleased)
|
||||
return;
|
||||
|
||||
m_camera.addCallbackBuffer((m_actualPreviewBuffer == 1)
|
||||
? m_cameraPreviewSecondBuffer
|
||||
: m_cameraPreviewFirstBuffer);
|
||||
return m_cameraPreviewBuffer[(m_actualPreviewBuffer == 1) ? 1 : 0];
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -243,13 +125,13 @@ public class QtCamera implements Camera.ShutterCallback,
|
||||
if (data != null && m_fetchEachFrame)
|
||||
notifyFrameFetched(m_cameraId, data);
|
||||
|
||||
if (data == m_cameraPreviewFirstBuffer)
|
||||
if (data == m_cameraPreviewBuffer[0])
|
||||
m_actualPreviewBuffer = 1;
|
||||
else if (data == m_cameraPreviewSecondBuffer)
|
||||
else if (data == m_cameraPreviewBuffer[1])
|
||||
m_actualPreviewBuffer = 2;
|
||||
else
|
||||
m_actualPreviewBuffer = 0;
|
||||
addCallbackBuffer();
|
||||
camera.addCallbackBuffer(m_cameraPreviewBuffer[(m_actualPreviewBuffer == 1) ? 1 : 0]);
|
||||
m_buffersLock.unlock();
|
||||
}
|
||||
|
||||
@@ -44,17 +44,17 @@
|
||||
|
||||
#include <qglobal.h>
|
||||
#include <qsize.h>
|
||||
#include <jni.h>
|
||||
|
||||
QT_BEGIN_NAMESPACE
|
||||
|
||||
class AndroidSurfaceTexture;
|
||||
|
||||
class QAndroidVideoOutput
|
||||
{
|
||||
public:
|
||||
virtual ~QAndroidVideoOutput() { }
|
||||
|
||||
virtual jobject surfaceHolder() = 0;
|
||||
virtual jobject surfaceTexture() { return 0; }
|
||||
virtual AndroidSurfaceTexture *surfaceTexture() { return 0; }
|
||||
|
||||
virtual bool isReady() { return true; }
|
||||
|
||||
|
||||
@@ -40,8 +40,8 @@
|
||||
****************************************************************************/
|
||||
|
||||
#include "qandroidvideorendercontrol.h"
|
||||
#include "androidsurfacetexture.h"
|
||||
|
||||
#include <QtCore/private/qjni_p.h>
|
||||
#include <QAbstractVideoSurface>
|
||||
#include <QVideoSurfaceFormat>
|
||||
#include <qevent.h>
|
||||
@@ -50,7 +50,6 @@
|
||||
#include <qopenglfunctions.h>
|
||||
#include <qopenglshaderprogram.h>
|
||||
#include <qopenglframebufferobject.h>
|
||||
#include <QtCore/private/qjnihelpers_p.h>
|
||||
|
||||
QT_BEGIN_NAMESPACE
|
||||
|
||||
@@ -110,9 +109,7 @@ private:
|
||||
QAndroidVideoRendererControl::QAndroidVideoRendererControl(QObject *parent)
|
||||
: QVideoRendererControl(parent)
|
||||
, m_surface(0)
|
||||
, m_androidSurface(0)
|
||||
, m_surfaceTexture(0)
|
||||
, m_surfaceHolder(0)
|
||||
, m_externalTex(0)
|
||||
, m_fbo(0)
|
||||
, m_program(0)
|
||||
@@ -175,9 +172,9 @@ bool QAndroidVideoRendererControl::initSurfaceTexture()
|
||||
return false;
|
||||
}
|
||||
|
||||
m_surfaceTexture = new JSurfaceTexture(m_externalTex);
|
||||
m_surfaceTexture = new AndroidSurfaceTexture(m_externalTex);
|
||||
|
||||
if (m_surfaceTexture->object()) {
|
||||
if (m_surfaceTexture->surfaceTexture() != 0) {
|
||||
connect(m_surfaceTexture, SIGNAL(frameAvailable()), this, SLOT(onFrameAvailable()));
|
||||
} else {
|
||||
delete m_surfaceTexture;
|
||||
@@ -196,42 +193,14 @@ void QAndroidVideoRendererControl::clearSurfaceTexture()
|
||||
delete m_surfaceTexture;
|
||||
m_surfaceTexture = 0;
|
||||
}
|
||||
if (m_androidSurface) {
|
||||
if (QtAndroidPrivate::androidSdkVersion() > 13)
|
||||
m_androidSurface->callMethod<void>("release");
|
||||
delete m_androidSurface;
|
||||
m_androidSurface = 0;
|
||||
}
|
||||
if (m_surfaceHolder) {
|
||||
delete m_surfaceHolder;
|
||||
m_surfaceHolder = 0;
|
||||
}
|
||||
}
|
||||
|
||||
jobject QAndroidVideoRendererControl::surfaceHolder()
|
||||
AndroidSurfaceTexture *QAndroidVideoRendererControl::surfaceTexture()
|
||||
{
|
||||
if (!initSurfaceTexture())
|
||||
return 0;
|
||||
|
||||
if (!m_surfaceHolder) {
|
||||
m_androidSurface = new QJNIObjectPrivate("android/view/Surface",
|
||||
"(Landroid/graphics/SurfaceTexture;)V",
|
||||
m_surfaceTexture->object());
|
||||
|
||||
m_surfaceHolder = new QJNIObjectPrivate("org/qtproject/qt5/android/multimedia/QtSurfaceTextureHolder",
|
||||
"(Landroid/view/Surface;)V",
|
||||
m_androidSurface->object());
|
||||
}
|
||||
|
||||
return m_surfaceHolder->object();
|
||||
}
|
||||
|
||||
jobject QAndroidVideoRendererControl::surfaceTexture()
|
||||
{
|
||||
if (!initSurfaceTexture())
|
||||
return 0;
|
||||
|
||||
return m_surfaceTexture->object();
|
||||
return m_surfaceTexture;
|
||||
}
|
||||
|
||||
void QAndroidVideoRendererControl::setVideoSize(const QSize &size)
|
||||
@@ -267,7 +236,7 @@ void QAndroidVideoRendererControl::onFrameAvailable()
|
||||
QVideoFrame frame(buffer, m_nativeSize, QVideoFrame::Format_BGR32);
|
||||
|
||||
if (m_surface->isActive() && (m_surface->surfaceFormat().pixelFormat() != frame.pixelFormat()
|
||||
|| m_surface->nativeResolution() != frame.size())) {
|
||||
|| m_surface->surfaceFormat().frameSize() != frame.size())) {
|
||||
m_surface->stop();
|
||||
}
|
||||
|
||||
|
||||
@@ -45,13 +45,13 @@
|
||||
#include <qvideorenderercontrol.h>
|
||||
#include <qmutex.h>
|
||||
#include "qandroidvideooutput.h"
|
||||
#include "jsurfacetexture.h"
|
||||
|
||||
QT_BEGIN_NAMESPACE
|
||||
|
||||
class QOpenGLTexture;
|
||||
class QOpenGLFramebufferObject;
|
||||
class QOpenGLShaderProgram;
|
||||
class AndroidSurfaceTexture;
|
||||
|
||||
class OpenGLResourcesDeleter : public QObject
|
||||
{
|
||||
@@ -86,8 +86,7 @@ public:
|
||||
QAbstractVideoSurface *surface() const Q_DECL_OVERRIDE;
|
||||
void setSurface(QAbstractVideoSurface *surface) Q_DECL_OVERRIDE;
|
||||
|
||||
jobject surfaceHolder() Q_DECL_OVERRIDE;
|
||||
jobject surfaceTexture() Q_DECL_OVERRIDE;
|
||||
AndroidSurfaceTexture *surfaceTexture() Q_DECL_OVERRIDE;
|
||||
bool isReady() Q_DECL_OVERRIDE;
|
||||
void setVideoSize(const QSize &size) Q_DECL_OVERRIDE;
|
||||
void stop() Q_DECL_OVERRIDE;
|
||||
@@ -112,9 +111,7 @@ private:
|
||||
QAbstractVideoSurface *m_surface;
|
||||
QSize m_nativeSize;
|
||||
|
||||
QJNIObjectPrivate *m_androidSurface;
|
||||
JSurfaceTexture *m_surfaceTexture;
|
||||
QJNIObjectPrivate *m_surfaceHolder;
|
||||
AndroidSurfaceTexture *m_surfaceTexture;
|
||||
|
||||
quint32 m_externalTex;
|
||||
QOpenGLFramebufferObject *m_fbo;
|
||||
|
||||
@@ -12,7 +12,6 @@ SOURCES += \
|
||||
$$PWD/qandroidcameraimagecapturecontrol.cpp \
|
||||
$$PWD/qandroidcameracapturedestinationcontrol.cpp \
|
||||
$$PWD/qandroidcameracapturebufferformatcontrol.cpp \
|
||||
$$PWD/qandroidmediastoragelocation.cpp \
|
||||
$$PWD/qandroidcameraflashcontrol.cpp \
|
||||
$$PWD/qandroidcamerafocuscontrol.cpp \
|
||||
$$PWD/qandroidcameralockscontrol.cpp \
|
||||
@@ -37,7 +36,6 @@ HEADERS += \
|
||||
$$PWD/qandroidcameraimagecapturecontrol.h \
|
||||
$$PWD/qandroidcameracapturedestinationcontrol.h \
|
||||
$$PWD/qandroidcameracapturebufferformatcontrol.h \
|
||||
$$PWD/qandroidmediastoragelocation.h \
|
||||
$$PWD/qandroidcameraflashcontrol.h \
|
||||
$$PWD/qandroidcamerafocuscontrol.h \
|
||||
$$PWD/qandroidcameralockscontrol.h \
|
||||
|
||||
@@ -42,7 +42,7 @@
|
||||
#include "qandroidcameraexposurecontrol.h"
|
||||
|
||||
#include "qandroidcamerasession.h"
|
||||
#include "jcamera.h"
|
||||
#include "androidcamera.h"
|
||||
|
||||
QT_BEGIN_NAMESPACE
|
||||
|
||||
|
||||
@@ -42,7 +42,7 @@
|
||||
#include "qandroidcameraflashcontrol.h"
|
||||
|
||||
#include "qandroidcamerasession.h"
|
||||
#include "jcamera.h"
|
||||
#include "androidcamera.h"
|
||||
|
||||
QT_BEGIN_NAMESPACE
|
||||
|
||||
|
||||
@@ -42,7 +42,7 @@
|
||||
#include "qandroidcamerafocuscontrol.h"
|
||||
|
||||
#include "qandroidcamerasession.h"
|
||||
#include "jcamera.h"
|
||||
#include "androidcamera.h"
|
||||
|
||||
QT_BEGIN_NAMESPACE
|
||||
|
||||
|
||||
@@ -42,7 +42,7 @@
|
||||
#include "qandroidcameraimageprocessingcontrol.h"
|
||||
|
||||
#include "qandroidcamerasession.h"
|
||||
#include "jcamera.h"
|
||||
#include "androidcamera.h"
|
||||
|
||||
QT_BEGIN_NAMESPACE
|
||||
|
||||
|
||||
@@ -42,7 +42,7 @@
|
||||
#include "qandroidcameralockscontrol.h"
|
||||
|
||||
#include "qandroidcamerasession.h"
|
||||
#include "jcamera.h"
|
||||
#include "androidcamera.h"
|
||||
#include <qtimer.h>
|
||||
|
||||
QT_BEGIN_NAMESPACE
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
/****************************************************************************
|
||||
**
|
||||
** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
|
||||
** Copyright (C) 2014 Digia Plc and/or its subsidiary(-ies).
|
||||
** Contact: http://www.qt-project.org/legal
|
||||
**
|
||||
** This file is part of the Qt Toolkit.
|
||||
@@ -41,8 +41,8 @@
|
||||
|
||||
#include "qandroidcamerasession.h"
|
||||
|
||||
#include "jcamera.h"
|
||||
#include "jmultimediautils.h"
|
||||
#include "androidcamera.h"
|
||||
#include "androidmultimediautils.h"
|
||||
#include "qandroidvideooutput.h"
|
||||
#include "qandroidmediavideoprobecontrol.h"
|
||||
#include "qandroidmultimediautils.h"
|
||||
@@ -111,6 +111,10 @@ QAndroidCameraSession::QAndroidCameraSession(QObject *parent)
|
||||
, m_captureCanceled(false)
|
||||
, m_currentImageCaptureId(-1)
|
||||
{
|
||||
m_mediaStorageLocation.addStorageLocation(
|
||||
QMediaStorageLocation::Pictures,
|
||||
AndroidMultimediaUtils::getDefaultMediaDirectory(AndroidMultimediaUtils::DCIM));
|
||||
|
||||
if (qApp) {
|
||||
connect(qApp, SIGNAL(applicationStateChanged(Qt::ApplicationState)),
|
||||
this, SLOT(onApplicationStateChanged(Qt::ApplicationState)));
|
||||
@@ -164,10 +168,12 @@ void QAndroidCameraSession::setState(QCamera::State state)
|
||||
emit error(QCamera::CameraError, QStringLiteral("Failed to open camera"));
|
||||
return;
|
||||
}
|
||||
if (state == QCamera::ActiveState)
|
||||
startPreview();
|
||||
else if (state == QCamera::LoadedState)
|
||||
if (state == QCamera::ActiveState) {
|
||||
if (!startPreview())
|
||||
return;
|
||||
} else if (state == QCamera::LoadedState) {
|
||||
stopPreview();
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
@@ -179,36 +185,10 @@ void QAndroidCameraSession::updateAvailableCameras()
|
||||
{
|
||||
g_availableCameras->clear();
|
||||
|
||||
const QJNIObjectPrivate cameraInfo("android/hardware/Camera$CameraInfo");
|
||||
const int numCameras = QJNIObjectPrivate::callStaticMethod<jint>("android/hardware/Camera",
|
||||
"getNumberOfCameras");
|
||||
|
||||
const int numCameras = AndroidCamera::getNumberOfCameras();
|
||||
for (int i = 0; i < numCameras; ++i) {
|
||||
AndroidCameraInfo info;
|
||||
|
||||
QJNIObjectPrivate::callStaticMethod<void>("android/hardware/Camera",
|
||||
"getCameraInfo",
|
||||
"(ILandroid/hardware/Camera$CameraInfo;)V",
|
||||
i, cameraInfo.object());
|
||||
|
||||
JCamera::CameraFacing facing = JCamera::CameraFacing(cameraInfo.getField<jint>("facing"));
|
||||
// The orientation provided by Android is counter-clockwise, we need it clockwise
|
||||
info.orientation = (360 - cameraInfo.getField<jint>("orientation")) % 360;
|
||||
|
||||
switch (facing) {
|
||||
case JCamera::CameraFacingBack:
|
||||
info.name = QByteArray("back");
|
||||
info.description = QStringLiteral("Rear-facing camera");
|
||||
info.position = QCamera::BackFace;
|
||||
break;
|
||||
case JCamera::CameraFacingFront:
|
||||
info.name = QByteArray("front");
|
||||
info.description = QStringLiteral("Front-facing camera");
|
||||
info.position = QCamera::FrontFace;
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
AndroidCamera::getCameraInfo(i, &info);
|
||||
|
||||
if (!info.name.isNull())
|
||||
g_availableCameras->append(info);
|
||||
@@ -230,7 +210,7 @@ bool QAndroidCameraSession::open()
|
||||
m_status = QCamera::LoadingStatus;
|
||||
emit statusChanged(m_status);
|
||||
|
||||
m_camera = JCamera::open(m_selectedCamera);
|
||||
m_camera = AndroidCamera::open(m_selectedCamera);
|
||||
|
||||
if (m_camera) {
|
||||
connect(m_camera, SIGNAL(pictureExposed()), this, SLOT(onCameraPictureExposed()));
|
||||
@@ -246,8 +226,8 @@ bool QAndroidCameraSession::open()
|
||||
|
||||
m_status = QCamera::LoadedStatus;
|
||||
|
||||
if (m_camera->getPreviewFormat() != JCamera::NV21)
|
||||
m_camera->setPreviewFormat(JCamera::NV21);
|
||||
if (m_camera->getPreviewFormat() != AndroidCamera::NV21)
|
||||
m_camera->setPreviewFormat(AndroidCamera::NV21);
|
||||
|
||||
m_camera->fetchEachFrame(m_videoProbes.count());
|
||||
|
||||
@@ -286,8 +266,10 @@ void QAndroidCameraSession::close()
|
||||
|
||||
void QAndroidCameraSession::setVideoPreview(QObject *videoOutput)
|
||||
{
|
||||
if (m_videoOutput)
|
||||
if (m_videoOutput) {
|
||||
m_videoOutput->stop();
|
||||
m_videoOutput->reset();
|
||||
}
|
||||
|
||||
if (videoOutput) {
|
||||
connect(videoOutput, SIGNAL(readyChanged(bool)), this, SLOT(onVideoOutputReady(bool)));
|
||||
@@ -336,10 +318,23 @@ void QAndroidCameraSession::adjustViewfinderSize(const QSize &captureSize, bool
|
||||
}
|
||||
}
|
||||
|
||||
void QAndroidCameraSession::startPreview()
|
||||
bool QAndroidCameraSession::startPreview()
|
||||
{
|
||||
if (!m_camera || m_previewStarted)
|
||||
return;
|
||||
if (!m_camera)
|
||||
return false;
|
||||
|
||||
if (!m_videoOutput) {
|
||||
Q_EMIT error(QCamera::InvalidRequestError, tr("Camera cannot be started without a viewfinder."));
|
||||
return false;
|
||||
}
|
||||
|
||||
if (m_previewStarted)
|
||||
return true;
|
||||
|
||||
if (m_videoOutput->isReady())
|
||||
m_camera->setPreviewTexture(m_videoOutput->surfaceTexture());
|
||||
else
|
||||
return true; // delay starting until the video output is ready
|
||||
|
||||
m_status = QCamera::StartingStatus;
|
||||
emit statusChanged(m_status);
|
||||
@@ -347,13 +342,12 @@ void QAndroidCameraSession::startPreview()
|
||||
applyImageSettings();
|
||||
adjustViewfinderSize(m_imageSettings.resolution());
|
||||
|
||||
if (m_videoOutput && m_videoOutput->isReady())
|
||||
onVideoOutputReady(true);
|
||||
|
||||
JMultimediaUtils::enableOrientationListener(true);
|
||||
AndroidMultimediaUtils::enableOrientationListener(true);
|
||||
|
||||
m_camera->startPreview();
|
||||
m_previewStarted = true;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
void QAndroidCameraSession::stopPreview()
|
||||
@@ -364,12 +358,16 @@ void QAndroidCameraSession::stopPreview()
|
||||
m_status = QCamera::StoppingStatus;
|
||||
emit statusChanged(m_status);
|
||||
|
||||
JMultimediaUtils::enableOrientationListener(false);
|
||||
AndroidMultimediaUtils::enableOrientationListener(false);
|
||||
|
||||
m_camera->stopPreview();
|
||||
m_camera->setPreviewSize(QSize());
|
||||
if (m_videoOutput)
|
||||
m_camera->setPreviewTexture(0);
|
||||
|
||||
if (m_videoOutput) {
|
||||
m_videoOutput->stop();
|
||||
m_videoOutput->reset();
|
||||
}
|
||||
m_previewStarted = false;
|
||||
}
|
||||
|
||||
@@ -397,8 +395,8 @@ int QAndroidCameraSession::currentCameraRotation() const
|
||||
|
||||
// subtract natural camera orientation and physical device orientation
|
||||
int rotation = 0;
|
||||
int deviceOrientation = (JMultimediaUtils::getDeviceOrientation() + 45) / 90 * 90;
|
||||
if (m_camera->getFacing() == JCamera::CameraFacingFront)
|
||||
int deviceOrientation = (AndroidMultimediaUtils::getDeviceOrientation() + 45) / 90 * 90;
|
||||
if (m_camera->getFacing() == AndroidCamera::CameraFacingFront)
|
||||
rotation = (m_nativeOrientation - deviceOrientation + 360) % 360;
|
||||
else // back-facing camera
|
||||
rotation = (m_nativeOrientation + deviceOrientation) % 360;
|
||||
@@ -638,7 +636,7 @@ void QAndroidCameraSession::processCapturedImage(int id,
|
||||
|
||||
if (dest & QCameraImageCapture::CaptureToFile) {
|
||||
const QString actualFileName = m_mediaStorageLocation.generateFileName(fileName,
|
||||
QAndroidMediaStorageLocation::Camera,
|
||||
QMediaStorageLocation::Pictures,
|
||||
QLatin1String("IMG_"),
|
||||
QLatin1String("jpg"));
|
||||
|
||||
@@ -648,9 +646,9 @@ void QAndroidCameraSession::processCapturedImage(int id,
|
||||
// if the picture is saved into the standard picture location, register it
|
||||
// with the Android media scanner so it appears immediately in apps
|
||||
// such as the gallery.
|
||||
QString standardLoc = JMultimediaUtils::getDefaultMediaDirectory(JMultimediaUtils::DCIM);
|
||||
QString standardLoc = AndroidMultimediaUtils::getDefaultMediaDirectory(AndroidMultimediaUtils::DCIM);
|
||||
if (actualFileName.startsWith(standardLoc))
|
||||
JMultimediaUtils::registerMediaFile(actualFileName);
|
||||
AndroidMultimediaUtils::registerMediaFile(actualFileName);
|
||||
|
||||
emit imageSaved(id, actualFileName);
|
||||
} else {
|
||||
@@ -687,7 +685,7 @@ QImage QAndroidCameraSession::prepareImageFromPreviewData(const QByteArray &data
|
||||
// Preview display of front-facing cameras is flipped horizontally, but the frame data
|
||||
// we get here is not. Flip it ourselves if the camera is front-facing to match what the user
|
||||
// sees on the viewfinder.
|
||||
if (m_camera->getFacing() == JCamera::CameraFacingFront)
|
||||
if (m_camera->getFacing() == AndroidCamera::CameraFacingFront)
|
||||
transform.scale(-1, 1);
|
||||
|
||||
transform.rotate(rotation);
|
||||
@@ -699,8 +697,8 @@ QImage QAndroidCameraSession::prepareImageFromPreviewData(const QByteArray &data
|
||||
|
||||
void QAndroidCameraSession::onVideoOutputReady(bool ready)
|
||||
{
|
||||
if (m_camera && m_videoOutput && ready)
|
||||
m_camera->setPreviewTexture(m_videoOutput->surfaceTexture());
|
||||
if (ready && m_state == QCamera::ActiveState)
|
||||
startPreview();
|
||||
}
|
||||
|
||||
void QAndroidCameraSession::onApplicationStateChanged(Qt::ApplicationState state)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
/****************************************************************************
|
||||
**
|
||||
** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
|
||||
** Copyright (C) 2014 Digia Plc and/or its subsidiary(-ies).
|
||||
** Contact: http://www.qt-project.org/legal
|
||||
**
|
||||
** This file is part of the Qt Toolkit.
|
||||
@@ -47,22 +47,14 @@
|
||||
#include <QCameraImageCapture>
|
||||
#include <QSet>
|
||||
#include <QMutex>
|
||||
#include "qandroidmediastoragelocation.h"
|
||||
#include <private/qmediastoragelocation_p.h>
|
||||
#include "androidcamera.h"
|
||||
|
||||
QT_BEGIN_NAMESPACE
|
||||
|
||||
class JCamera;
|
||||
class QAndroidVideoOutput;
|
||||
class QAndroidMediaVideoProbeControl;
|
||||
|
||||
struct AndroidCameraInfo
|
||||
{
|
||||
QByteArray name;
|
||||
QString description;
|
||||
QCamera::Position position;
|
||||
int orientation;
|
||||
};
|
||||
|
||||
class QAndroidCameraSession : public QObject
|
||||
{
|
||||
Q_OBJECT
|
||||
@@ -73,7 +65,7 @@ public:
|
||||
static const QList<AndroidCameraInfo> &availableCameras();
|
||||
|
||||
void setSelectedCamera(int cameraId) { m_selectedCamera = cameraId; }
|
||||
JCamera *camera() const { return m_camera; }
|
||||
AndroidCamera *camera() const { return m_camera; }
|
||||
|
||||
QCamera::State state() const { return m_state; }
|
||||
void setState(QCamera::State state);
|
||||
@@ -141,7 +133,7 @@ private:
|
||||
bool open();
|
||||
void close();
|
||||
|
||||
void startPreview();
|
||||
bool startPreview();
|
||||
void stopPreview();
|
||||
|
||||
void applyImageSettings();
|
||||
@@ -154,7 +146,7 @@ private:
|
||||
const QString &fileName);
|
||||
|
||||
int m_selectedCamera;
|
||||
JCamera *m_camera;
|
||||
AndroidCamera *m_camera;
|
||||
int m_nativeOrientation;
|
||||
QAndroidVideoOutput *m_videoOutput;
|
||||
|
||||
@@ -174,7 +166,7 @@ private:
|
||||
int m_currentImageCaptureId;
|
||||
QString m_currentImageCaptureFileName;
|
||||
|
||||
QAndroidMediaStorageLocation m_mediaStorageLocation;
|
||||
QMediaStorageLocation m_mediaStorageLocation;
|
||||
|
||||
QSet<QAndroidMediaVideoProbeControl *> m_videoProbes;
|
||||
QMutex m_videoProbesMutex;
|
||||
|
||||
@@ -42,7 +42,7 @@
|
||||
#include "qandroidcamerazoomcontrol.h"
|
||||
|
||||
#include "qandroidcamerasession.h"
|
||||
#include "jcamera.h"
|
||||
#include "androidcamera.h"
|
||||
#include "qandroidmultimediautils.h"
|
||||
#include <qmath.h>
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
/****************************************************************************
|
||||
**
|
||||
** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
|
||||
** Copyright (C) 2014 Digia Plc and/or its subsidiary(-ies).
|
||||
** Contact: http://www.qt-project.org/legal
|
||||
**
|
||||
** This file is part of the Qt Toolkit.
|
||||
@@ -41,11 +41,10 @@
|
||||
|
||||
#include "qandroidcapturesession.h"
|
||||
|
||||
#include "jcamera.h"
|
||||
#include "androidcamera.h"
|
||||
#include "qandroidcamerasession.h"
|
||||
#include "jmultimediautils.h"
|
||||
#include "androidmultimediautils.h"
|
||||
#include "qandroidmultimediautils.h"
|
||||
#include <QtCore/private/qjni_p.h>
|
||||
|
||||
QT_BEGIN_NAMESPACE
|
||||
|
||||
@@ -53,7 +52,7 @@ QAndroidCaptureSession::QAndroidCaptureSession(QAndroidCameraSession *cameraSess
|
||||
: QObject()
|
||||
, m_mediaRecorder(0)
|
||||
, m_cameraSession(cameraSession)
|
||||
, m_audioSource(JMediaRecorder::DefaultAudioSource)
|
||||
, m_audioSource(AndroidMediaRecorder::DefaultAudioSource)
|
||||
, m_duration(0)
|
||||
, m_state(QMediaRecorder::StoppedState)
|
||||
, m_status(QMediaRecorder::UnloadedStatus)
|
||||
@@ -61,10 +60,18 @@ QAndroidCaptureSession::QAndroidCaptureSession(QAndroidCameraSession *cameraSess
|
||||
, m_containerFormatDirty(true)
|
||||
, m_videoSettingsDirty(true)
|
||||
, m_audioSettingsDirty(true)
|
||||
, m_outputFormat(JMediaRecorder::DefaultOutputFormat)
|
||||
, m_audioEncoder(JMediaRecorder::DefaultAudioEncoder)
|
||||
, m_videoEncoder(JMediaRecorder::DefaultVideoEncoder)
|
||||
, m_outputFormat(AndroidMediaRecorder::DefaultOutputFormat)
|
||||
, m_audioEncoder(AndroidMediaRecorder::DefaultAudioEncoder)
|
||||
, m_videoEncoder(AndroidMediaRecorder::DefaultVideoEncoder)
|
||||
{
|
||||
m_mediaStorageLocation.addStorageLocation(
|
||||
QMediaStorageLocation::Movies,
|
||||
AndroidMultimediaUtils::getDefaultMediaDirectory(AndroidMultimediaUtils::DCIM));
|
||||
|
||||
m_mediaStorageLocation.addStorageLocation(
|
||||
QMediaStorageLocation::Sounds,
|
||||
AndroidMultimediaUtils::getDefaultMediaDirectory(AndroidMultimediaUtils::Sounds));
|
||||
|
||||
connect(this, SIGNAL(stateChanged(QMediaRecorder::State)), this, SLOT(updateStatus()));
|
||||
|
||||
if (cameraSession) {
|
||||
@@ -95,19 +102,19 @@ void QAndroidCaptureSession::setAudioInput(const QString &input)
|
||||
m_audioInput = input;
|
||||
|
||||
if (m_audioInput == QLatin1String("default"))
|
||||
m_audioSource = JMediaRecorder::DefaultAudioSource;
|
||||
m_audioSource = AndroidMediaRecorder::DefaultAudioSource;
|
||||
else if (m_audioInput == QLatin1String("mic"))
|
||||
m_audioSource = JMediaRecorder::Mic;
|
||||
m_audioSource = AndroidMediaRecorder::Mic;
|
||||
else if (m_audioInput == QLatin1String("voice_uplink"))
|
||||
m_audioSource = JMediaRecorder::VoiceUplink;
|
||||
m_audioSource = AndroidMediaRecorder::VoiceUplink;
|
||||
else if (m_audioInput == QLatin1String("voice_downlink"))
|
||||
m_audioSource = JMediaRecorder::VoiceDownlink;
|
||||
m_audioSource = AndroidMediaRecorder::VoiceDownlink;
|
||||
else if (m_audioInput == QLatin1String("voice_call"))
|
||||
m_audioSource = JMediaRecorder::VoiceCall;
|
||||
m_audioSource = AndroidMediaRecorder::VoiceCall;
|
||||
else if (m_audioInput == QLatin1String("voice_recognition"))
|
||||
m_audioSource = JMediaRecorder::VoiceRecognition;
|
||||
m_audioSource = AndroidMediaRecorder::VoiceRecognition;
|
||||
else
|
||||
m_audioSource = JMediaRecorder::DefaultAudioSource;
|
||||
m_audioSource = AndroidMediaRecorder::DefaultAudioSource;
|
||||
|
||||
emit audioInputChanged(m_audioInput);
|
||||
}
|
||||
@@ -176,7 +183,7 @@ bool QAndroidCaptureSession::start()
|
||||
m_mediaRecorder->release();
|
||||
delete m_mediaRecorder;
|
||||
}
|
||||
m_mediaRecorder = new JMediaRecorder;
|
||||
m_mediaRecorder = new AndroidMediaRecorder;
|
||||
connect(m_mediaRecorder, SIGNAL(error(int,int)), this, SLOT(onError(int,int)));
|
||||
connect(m_mediaRecorder, SIGNAL(info(int,int)), this, SLOT(onInfo(int,int)));
|
||||
|
||||
@@ -185,8 +192,8 @@ bool QAndroidCaptureSession::start()
|
||||
updateViewfinder();
|
||||
m_cameraSession->camera()->unlock();
|
||||
m_mediaRecorder->setCamera(m_cameraSession->camera());
|
||||
m_mediaRecorder->setAudioSource(JMediaRecorder::Camcorder);
|
||||
m_mediaRecorder->setVideoSource(JMediaRecorder::Camera);
|
||||
m_mediaRecorder->setAudioSource(AndroidMediaRecorder::Camcorder);
|
||||
m_mediaRecorder->setVideoSource(AndroidMediaRecorder::Camera);
|
||||
} else {
|
||||
m_mediaRecorder->setAudioSource(m_audioSource);
|
||||
}
|
||||
@@ -214,8 +221,8 @@ bool QAndroidCaptureSession::start()
|
||||
QString filePath = m_mediaStorageLocation.generateFileName(
|
||||
m_requestedOutputLocation.isLocalFile() ? m_requestedOutputLocation.toLocalFile()
|
||||
: m_requestedOutputLocation.toString(),
|
||||
m_cameraSession ? QAndroidMediaStorageLocation::Camera
|
||||
: QAndroidMediaStorageLocation::Audio,
|
||||
m_cameraSession ? QMediaStorageLocation::Movies
|
||||
: QMediaStorageLocation::Sounds,
|
||||
m_cameraSession ? QLatin1String("VID_")
|
||||
: QLatin1String("REC_"),
|
||||
m_containerFormat);
|
||||
@@ -272,10 +279,10 @@ void QAndroidCaptureSession::stop(bool error)
|
||||
// with the Android media scanner so it appears immediately in apps
|
||||
// such as the gallery.
|
||||
QString mediaPath = m_actualOutputLocation.toLocalFile();
|
||||
QString standardLoc = m_cameraSession ? JMultimediaUtils::getDefaultMediaDirectory(JMultimediaUtils::DCIM)
|
||||
: JMultimediaUtils::getDefaultMediaDirectory(JMultimediaUtils::Sounds);
|
||||
QString standardLoc = m_cameraSession ? AndroidMultimediaUtils::getDefaultMediaDirectory(AndroidMultimediaUtils::DCIM)
|
||||
: AndroidMultimediaUtils::getDefaultMediaDirectory(AndroidMultimediaUtils::Sounds);
|
||||
if (mediaPath.startsWith(standardLoc))
|
||||
JMultimediaUtils::registerMediaFile(mediaPath);
|
||||
AndroidMultimediaUtils::registerMediaFile(mediaPath);
|
||||
|
||||
m_actualOutputLocation = m_usedOutputLocation;
|
||||
emit actualLocationChanged(m_actualOutputLocation);
|
||||
@@ -339,14 +346,14 @@ void QAndroidCaptureSession::applySettings()
|
||||
m_containerFormat = m_defaultSettings.outputFileExtension;
|
||||
m_outputFormat = m_defaultSettings.outputFormat;
|
||||
} else if (m_containerFormat == QLatin1String("3gp")) {
|
||||
m_outputFormat = JMediaRecorder::THREE_GPP;
|
||||
m_outputFormat = AndroidMediaRecorder::THREE_GPP;
|
||||
} else if (!m_cameraSession && m_containerFormat == QLatin1String("amr")) {
|
||||
m_outputFormat = JMediaRecorder::AMR_NB_Format;
|
||||
m_outputFormat = AndroidMediaRecorder::AMR_NB_Format;
|
||||
} else if (!m_cameraSession && m_containerFormat == QLatin1String("awb")) {
|
||||
m_outputFormat = JMediaRecorder::AMR_WB_Format;
|
||||
m_outputFormat = AndroidMediaRecorder::AMR_WB_Format;
|
||||
} else {
|
||||
m_containerFormat = QStringLiteral("mp4");
|
||||
m_outputFormat = JMediaRecorder::MPEG_4;
|
||||
m_outputFormat = AndroidMediaRecorder::MPEG_4;
|
||||
}
|
||||
|
||||
m_containerFormatDirty = false;
|
||||
@@ -364,11 +371,11 @@ void QAndroidCaptureSession::applySettings()
|
||||
if (m_audioSettings.codec().isEmpty())
|
||||
m_audioEncoder = m_defaultSettings.audioEncoder;
|
||||
else if (m_audioSettings.codec() == QLatin1String("aac"))
|
||||
m_audioEncoder = JMediaRecorder::AAC;
|
||||
m_audioEncoder = AndroidMediaRecorder::AAC;
|
||||
else if (m_audioSettings.codec() == QLatin1String("amr-nb"))
|
||||
m_audioEncoder = JMediaRecorder::AMR_NB_Encoder;
|
||||
m_audioEncoder = AndroidMediaRecorder::AMR_NB_Encoder;
|
||||
else if (m_audioSettings.codec() == QLatin1String("amr-wb"))
|
||||
m_audioEncoder = JMediaRecorder::AMR_WB_Encoder;
|
||||
m_audioEncoder = AndroidMediaRecorder::AMR_WB_Encoder;
|
||||
else
|
||||
m_audioEncoder = m_defaultSettings.audioEncoder;
|
||||
|
||||
@@ -402,11 +409,11 @@ void QAndroidCaptureSession::applySettings()
|
||||
if (m_videoSettings.codec().isEmpty())
|
||||
m_videoEncoder = m_defaultSettings.videoEncoder;
|
||||
else if (m_videoSettings.codec() == QLatin1String("h263"))
|
||||
m_videoEncoder = JMediaRecorder::H263;
|
||||
m_videoEncoder = AndroidMediaRecorder::H263;
|
||||
else if (m_videoSettings.codec() == QLatin1String("h264"))
|
||||
m_videoEncoder = JMediaRecorder::H264;
|
||||
m_videoEncoder = AndroidMediaRecorder::H264;
|
||||
else if (m_videoSettings.codec() == QLatin1String("mpeg4_sp"))
|
||||
m_videoEncoder = JMediaRecorder::MPEG_4_SP;
|
||||
m_videoEncoder = AndroidMediaRecorder::MPEG_4_SP;
|
||||
else
|
||||
m_videoEncoder = m_defaultSettings.videoEncoder;
|
||||
|
||||
@@ -448,7 +455,7 @@ void QAndroidCaptureSession::onCameraOpened()
|
||||
for (int i = 0; i < 8; ++i) {
|
||||
CaptureProfile profile = getProfile(i);
|
||||
if (!profile.isNull) {
|
||||
if (i == 1) // QUALITY_HIGH
|
||||
if (i == AndroidCamcorderProfile::QUALITY_HIGH)
|
||||
m_defaultSettings = profile;
|
||||
|
||||
if (!m_supportedResolutions.contains(profile.videoResolution))
|
||||
@@ -467,38 +474,31 @@ void QAndroidCaptureSession::onCameraOpened()
|
||||
QAndroidCaptureSession::CaptureProfile QAndroidCaptureSession::getProfile(int id)
|
||||
{
|
||||
CaptureProfile profile;
|
||||
bool hasProfile = QJNIObjectPrivate::callStaticMethod<jboolean>("android/media/CamcorderProfile",
|
||||
"hasProfile",
|
||||
"(II)Z",
|
||||
m_cameraSession->camera()->cameraId(),
|
||||
id);
|
||||
const bool hasProfile = AndroidCamcorderProfile::hasProfile(m_cameraSession->camera()->cameraId(),
|
||||
AndroidCamcorderProfile::Quality(id));
|
||||
|
||||
if (hasProfile) {
|
||||
QJNIObjectPrivate obj = QJNIObjectPrivate::callStaticObjectMethod("android/media/CamcorderProfile",
|
||||
"get",
|
||||
"(II)Landroid/media/CamcorderProfile;",
|
||||
m_cameraSession->camera()->cameraId(),
|
||||
id);
|
||||
AndroidCamcorderProfile camProfile = AndroidCamcorderProfile::get(m_cameraSession->camera()->cameraId(),
|
||||
AndroidCamcorderProfile::Quality(id));
|
||||
|
||||
profile.outputFormat = AndroidMediaRecorder::OutputFormat(camProfile.getValue(AndroidCamcorderProfile::fileFormat));
|
||||
profile.audioEncoder = AndroidMediaRecorder::AudioEncoder(camProfile.getValue(AndroidCamcorderProfile::audioCodec));
|
||||
profile.audioBitRate = camProfile.getValue(AndroidCamcorderProfile::audioBitRate);
|
||||
profile.audioChannels = camProfile.getValue(AndroidCamcorderProfile::audioChannels);
|
||||
profile.audioSampleRate = camProfile.getValue(AndroidCamcorderProfile::audioSampleRate);
|
||||
profile.videoEncoder = AndroidMediaRecorder::VideoEncoder(camProfile.getValue(AndroidCamcorderProfile::videoCodec));
|
||||
profile.videoBitRate = camProfile.getValue(AndroidCamcorderProfile::videoBitRate);
|
||||
profile.videoFrameRate = camProfile.getValue(AndroidCamcorderProfile::videoFrameRate);
|
||||
profile.videoResolution = QSize(camProfile.getValue(AndroidCamcorderProfile::videoFrameWidth),
|
||||
camProfile.getValue(AndroidCamcorderProfile::videoFrameHeight));
|
||||
|
||||
profile.outputFormat = JMediaRecorder::OutputFormat(obj.getField<jint>("fileFormat"));
|
||||
profile.audioEncoder = JMediaRecorder::AudioEncoder(obj.getField<jint>("audioCodec"));
|
||||
profile.audioBitRate = obj.getField<jint>("audioBitRate");
|
||||
profile.audioChannels = obj.getField<jint>("audioChannels");
|
||||
profile.audioSampleRate = obj.getField<jint>("audioSampleRate");
|
||||
profile.videoEncoder = JMediaRecorder::VideoEncoder(obj.getField<jint>("videoCodec"));
|
||||
profile.videoBitRate = obj.getField<jint>("videoBitRate");
|
||||
profile.videoFrameRate = obj.getField<jint>("videoFrameRate");
|
||||
profile.videoResolution = QSize(obj.getField<jint>("videoFrameWidth"),
|
||||
obj.getField<jint>("videoFrameHeight"));
|
||||
|
||||
if (profile.outputFormat == JMediaRecorder::MPEG_4)
|
||||
if (profile.outputFormat == AndroidMediaRecorder::MPEG_4)
|
||||
profile.outputFileExtension = QStringLiteral("mp4");
|
||||
else if (profile.outputFormat == JMediaRecorder::THREE_GPP)
|
||||
else if (profile.outputFormat == AndroidMediaRecorder::THREE_GPP)
|
||||
profile.outputFileExtension = QStringLiteral("3gp");
|
||||
else if (profile.outputFormat == JMediaRecorder::AMR_NB_Format)
|
||||
else if (profile.outputFormat == AndroidMediaRecorder::AMR_NB_Format)
|
||||
profile.outputFileExtension = QStringLiteral("amr");
|
||||
else if (profile.outputFormat == JMediaRecorder::AMR_WB_Format)
|
||||
else if (profile.outputFormat == AndroidMediaRecorder::AMR_WB_Format)
|
||||
profile.outputFileExtension = QStringLiteral("awb");
|
||||
|
||||
profile.isNull = false;
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
/****************************************************************************
|
||||
**
|
||||
** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
|
||||
** Copyright (C) 2014 Digia Plc and/or its subsidiary(-ies).
|
||||
** Contact: http://www.qt-project.org/legal
|
||||
**
|
||||
** This file is part of the Qt Toolkit.
|
||||
@@ -47,8 +47,8 @@
|
||||
#include <qurl.h>
|
||||
#include <qelapsedtimer.h>
|
||||
#include <qtimer.h>
|
||||
#include "qandroidmediastoragelocation.h"
|
||||
#include "jmediarecorder.h"
|
||||
#include <private/qmediastoragelocation_p.h>
|
||||
#include "androidmediarecorder.h"
|
||||
|
||||
QT_BEGIN_NAMESPACE
|
||||
|
||||
@@ -106,15 +106,15 @@ private Q_SLOTS:
|
||||
|
||||
private:
|
||||
struct CaptureProfile {
|
||||
JMediaRecorder::OutputFormat outputFormat;
|
||||
AndroidMediaRecorder::OutputFormat outputFormat;
|
||||
QString outputFileExtension;
|
||||
|
||||
JMediaRecorder::AudioEncoder audioEncoder;
|
||||
AndroidMediaRecorder::AudioEncoder audioEncoder;
|
||||
int audioBitRate;
|
||||
int audioChannels;
|
||||
int audioSampleRate;
|
||||
|
||||
JMediaRecorder::VideoEncoder videoEncoder;
|
||||
AndroidMediaRecorder::VideoEncoder videoEncoder;
|
||||
int videoBitRate;
|
||||
int videoFrameRate;
|
||||
QSize videoResolution;
|
||||
@@ -122,13 +122,13 @@ private:
|
||||
bool isNull;
|
||||
|
||||
CaptureProfile()
|
||||
: outputFormat(JMediaRecorder::MPEG_4)
|
||||
: outputFormat(AndroidMediaRecorder::MPEG_4)
|
||||
, outputFileExtension(QLatin1String("mp4"))
|
||||
, audioEncoder(JMediaRecorder::DefaultAudioEncoder)
|
||||
, audioEncoder(AndroidMediaRecorder::DefaultAudioEncoder)
|
||||
, audioBitRate(128000)
|
||||
, audioChannels(2)
|
||||
, audioSampleRate(44100)
|
||||
, videoEncoder(JMediaRecorder::DefaultVideoEncoder)
|
||||
, videoEncoder(AndroidMediaRecorder::DefaultVideoEncoder)
|
||||
, videoBitRate(1)
|
||||
, videoFrameRate(-1)
|
||||
, videoResolution(320, 240)
|
||||
@@ -146,13 +146,13 @@ private:
|
||||
void updateViewfinder();
|
||||
void restartViewfinder();
|
||||
|
||||
JMediaRecorder *m_mediaRecorder;
|
||||
AndroidMediaRecorder *m_mediaRecorder;
|
||||
QAndroidCameraSession *m_cameraSession;
|
||||
|
||||
QString m_audioInput;
|
||||
JMediaRecorder::AudioSource m_audioSource;
|
||||
AndroidMediaRecorder::AudioSource m_audioSource;
|
||||
|
||||
QAndroidMediaStorageLocation m_mediaStorageLocation;
|
||||
QMediaStorageLocation m_mediaStorageLocation;
|
||||
|
||||
QElapsedTimer m_elapsedTime;
|
||||
QTimer m_notifyTimer;
|
||||
@@ -173,9 +173,9 @@ private:
|
||||
bool m_containerFormatDirty;
|
||||
bool m_videoSettingsDirty;
|
||||
bool m_audioSettingsDirty;
|
||||
JMediaRecorder::OutputFormat m_outputFormat;
|
||||
JMediaRecorder::AudioEncoder m_audioEncoder;
|
||||
JMediaRecorder::VideoEncoder m_videoEncoder;
|
||||
AndroidMediaRecorder::OutputFormat m_outputFormat;
|
||||
AndroidMediaRecorder::AudioEncoder m_audioEncoder;
|
||||
AndroidMediaRecorder::VideoEncoder m_videoEncoder;
|
||||
|
||||
QList<QSize> m_supportedResolutions;
|
||||
QList<qreal> m_supportedFramerates;
|
||||
|
||||
@@ -42,7 +42,7 @@
|
||||
#include "qandroidimageencodercontrol.h"
|
||||
|
||||
#include "qandroidcamerasession.h"
|
||||
#include "jcamera.h"
|
||||
#include "androidcamera.h"
|
||||
|
||||
QT_BEGIN_NAMESPACE
|
||||
|
||||
|
||||
@@ -69,4 +69,6 @@ private:
|
||||
QList<QSize> m_supportedResolutions;
|
||||
};
|
||||
|
||||
QT_END_NAMESPACE
|
||||
|
||||
#endif // QANDROIDIMAGEENCODERCONTROL_H
|
||||
|
||||
@@ -1,130 +0,0 @@
|
||||
/****************************************************************************
|
||||
**
|
||||
** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
|
||||
** Contact: http://www.qt-project.org/legal
|
||||
**
|
||||
** This file is part of the Qt Toolkit.
|
||||
**
|
||||
** $QT_BEGIN_LICENSE:LGPL$
|
||||
** Commercial License Usage
|
||||
** Licensees holding valid commercial Qt licenses may use this file in
|
||||
** accordance with the commercial license agreement provided with the
|
||||
** Software or, alternatively, in accordance with the terms contained in
|
||||
** a written agreement between you and Digia. For licensing terms and
|
||||
** conditions see http://qt.digia.com/licensing. For further information
|
||||
** use the contact form at http://qt.digia.com/contact-us.
|
||||
**
|
||||
** GNU Lesser General Public License Usage
|
||||
** Alternatively, this file may be used under the terms of the GNU Lesser
|
||||
** General Public License version 2.1 as published by the Free Software
|
||||
** Foundation and appearing in the file LICENSE.LGPL included in the
|
||||
** packaging of this file. Please review the following information to
|
||||
** ensure the GNU Lesser General Public License version 2.1 requirements
|
||||
** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
|
||||
**
|
||||
** In addition, as a special exception, Digia gives you certain additional
|
||||
** rights. These rights are described in the Digia Qt LGPL Exception
|
||||
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
|
||||
**
|
||||
** GNU General Public License Usage
|
||||
** Alternatively, this file may be used under the terms of the GNU
|
||||
** General Public License version 3.0 as published by the Free Software
|
||||
** Foundation and appearing in the file LICENSE.GPL included in the
|
||||
** packaging of this file. Please review the following information to
|
||||
** ensure the GNU General Public License version 3.0 requirements will be
|
||||
** met: http://www.gnu.org/copyleft/gpl.html.
|
||||
**
|
||||
**
|
||||
** $QT_END_LICENSE$
|
||||
**
|
||||
****************************************************************************/
|
||||
|
||||
#include "qandroidmediastoragelocation.h"
|
||||
|
||||
#include "jmultimediautils.h"
|
||||
|
||||
QT_BEGIN_NAMESPACE
|
||||
|
||||
QAndroidMediaStorageLocation::QAndroidMediaStorageLocation()
|
||||
{
|
||||
}
|
||||
|
||||
QDir QAndroidMediaStorageLocation::defaultDir(CaptureSource source) const
|
||||
{
|
||||
QStringList dirCandidates;
|
||||
|
||||
if (source == Camera)
|
||||
dirCandidates << JMultimediaUtils::getDefaultMediaDirectory(JMultimediaUtils::DCIM);
|
||||
else
|
||||
dirCandidates << JMultimediaUtils::getDefaultMediaDirectory(JMultimediaUtils::Sounds);
|
||||
dirCandidates << QDir::homePath();
|
||||
dirCandidates << QDir::currentPath();
|
||||
dirCandidates << QDir::tempPath();
|
||||
|
||||
Q_FOREACH (const QString &path, dirCandidates) {
|
||||
if (QFileInfo(path).isWritable())
|
||||
return QDir(path);
|
||||
}
|
||||
|
||||
return QDir();
|
||||
}
|
||||
|
||||
QString QAndroidMediaStorageLocation::generateFileName(const QString &requestedName,
|
||||
CaptureSource source,
|
||||
const QString &prefix,
|
||||
const QString &extension) const
|
||||
{
|
||||
if (requestedName.isEmpty())
|
||||
return generateFileName(prefix, defaultDir(source), extension);
|
||||
|
||||
QString path = requestedName;
|
||||
|
||||
if (QFileInfo(path).isRelative())
|
||||
path = defaultDir(source).absoluteFilePath(path);
|
||||
|
||||
if (QFileInfo(path).isDir())
|
||||
return generateFileName(prefix, QDir(path), extension);
|
||||
|
||||
if (!path.endsWith(extension))
|
||||
path.append(QString(".%1").arg(extension));
|
||||
|
||||
return path;
|
||||
}
|
||||
|
||||
QString QAndroidMediaStorageLocation::generateFileName(const QString &prefix,
|
||||
const QDir &dir,
|
||||
const QString &extension) const
|
||||
{
|
||||
QMutexLocker lock(&m_mutex);
|
||||
|
||||
const QString lastMediaKey = dir.absolutePath() + QLatin1Char(' ') + prefix + QLatin1Char(' ') + extension;
|
||||
qint64 lastMediaIndex = m_lastUsedIndex.value(lastMediaKey, 0);
|
||||
|
||||
if (lastMediaIndex == 0) {
|
||||
// first run, find the maximum media number during the fist capture
|
||||
Q_FOREACH (const QString &fileName, dir.entryList(QStringList() << QString("%1*.%2").arg(prefix).arg(extension))) {
|
||||
const qint64 mediaIndex = fileName.midRef(prefix.length(), fileName.size() - prefix.length() - extension.length() - 1).toInt();
|
||||
lastMediaIndex = qMax(lastMediaIndex, mediaIndex);
|
||||
}
|
||||
}
|
||||
|
||||
// don't just rely on cached lastMediaIndex value,
|
||||
// someone else may create a file after camera started
|
||||
while (true) {
|
||||
const QString name = QString("%1%2.%3").arg(prefix)
|
||||
.arg(lastMediaIndex + 1, 8, 10, QLatin1Char('0'))
|
||||
.arg(extension);
|
||||
|
||||
const QString path = dir.absoluteFilePath(name);
|
||||
if (!QFileInfo(path).exists()) {
|
||||
m_lastUsedIndex[lastMediaKey] = lastMediaIndex + 1;
|
||||
return path;
|
||||
}
|
||||
|
||||
lastMediaIndex++;
|
||||
}
|
||||
|
||||
return QString();
|
||||
}
|
||||
|
||||
QT_END_NAMESPACE
|
||||
@@ -1,75 +0,0 @@
|
||||
/****************************************************************************
|
||||
**
|
||||
** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
|
||||
** Contact: http://www.qt-project.org/legal
|
||||
**
|
||||
** This file is part of the Qt Toolkit.
|
||||
**
|
||||
** $QT_BEGIN_LICENSE:LGPL$
|
||||
** Commercial License Usage
|
||||
** Licensees holding valid commercial Qt licenses may use this file in
|
||||
** accordance with the commercial license agreement provided with the
|
||||
** Software or, alternatively, in accordance with the terms contained in
|
||||
** a written agreement between you and Digia. For licensing terms and
|
||||
** conditions see http://qt.digia.com/licensing. For further information
|
||||
** use the contact form at http://qt.digia.com/contact-us.
|
||||
**
|
||||
** GNU Lesser General Public License Usage
|
||||
** Alternatively, this file may be used under the terms of the GNU Lesser
|
||||
** General Public License version 2.1 as published by the Free Software
|
||||
** Foundation and appearing in the file LICENSE.LGPL included in the
|
||||
** packaging of this file. Please review the following information to
|
||||
** ensure the GNU Lesser General Public License version 2.1 requirements
|
||||
** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
|
||||
**
|
||||
** In addition, as a special exception, Digia gives you certain additional
|
||||
** rights. These rights are described in the Digia Qt LGPL Exception
|
||||
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
|
||||
**
|
||||
** GNU General Public License Usage
|
||||
** Alternatively, this file may be used under the terms of the GNU
|
||||
** General Public License version 3.0 as published by the Free Software
|
||||
** Foundation and appearing in the file LICENSE.GPL included in the
|
||||
** packaging of this file. Please review the following information to
|
||||
** ensure the GNU General Public License version 3.0 requirements will be
|
||||
** met: http://www.gnu.org/copyleft/gpl.html.
|
||||
**
|
||||
**
|
||||
** $QT_END_LICENSE$
|
||||
**
|
||||
****************************************************************************/
|
||||
|
||||
#ifndef QANDROIDMEDIASTORAGELOCATION_H
|
||||
#define QANDROIDMEDIASTORAGELOCATION_H
|
||||
|
||||
#include <QCamera>
|
||||
#include <QDir>
|
||||
#include <QHash>
|
||||
#include <QMutex>
|
||||
|
||||
QT_BEGIN_NAMESPACE
|
||||
|
||||
class QAndroidMediaStorageLocation
|
||||
{
|
||||
public:
|
||||
enum CaptureSource {
|
||||
Camera,
|
||||
Audio
|
||||
};
|
||||
|
||||
QAndroidMediaStorageLocation();
|
||||
|
||||
QDir defaultDir(CaptureSource source) const;
|
||||
|
||||
QString generateFileName(const QString &requestedName, CaptureSource source, const QString &prefix, const QString &extension) const;
|
||||
QString generateFileName(const QString &prefix, const QDir &dir, const QString &extension) const;
|
||||
|
||||
private:
|
||||
mutable QHash<QString, qint64> m_lastUsedIndex;
|
||||
|
||||
mutable QMutex m_mutex;
|
||||
};
|
||||
|
||||
QT_END_NAMESPACE
|
||||
|
||||
#endif // QANDROIDMEDIASTORAGELOCATION_H
|
||||
@@ -43,6 +43,8 @@
|
||||
#include "qandroidmediavideoprobecontrol.h"
|
||||
#include <qvideoframe.h>
|
||||
|
||||
QT_BEGIN_NAMESPACE
|
||||
|
||||
QAndroidMediaVideoProbeControl::QAndroidMediaVideoProbeControl(QObject *parent) :
|
||||
QMediaVideoProbeControl(parent)
|
||||
{
|
||||
@@ -57,3 +59,5 @@ void QAndroidMediaVideoProbeControl::newFrameProbed(const QVideoFrame &frame)
|
||||
{
|
||||
emit videoFrameProbed(frame);
|
||||
}
|
||||
|
||||
QT_END_NAMESPACE
|
||||
|
||||
@@ -45,6 +45,8 @@
|
||||
|
||||
#include <qmediavideoprobecontrol.h>
|
||||
|
||||
QT_BEGIN_NAMESPACE
|
||||
|
||||
class QAndroidMediaVideoProbeControl : public QMediaVideoProbeControl
|
||||
{
|
||||
Q_OBJECT
|
||||
@@ -56,4 +58,6 @@ public:
|
||||
|
||||
};
|
||||
|
||||
QT_END_NAMESPACE
|
||||
|
||||
#endif // QANDROIDMEDIAVIDEOPROBECONTROL_H
|
||||
|
||||
@@ -42,7 +42,7 @@
|
||||
#include "qandroidvideodeviceselectorcontrol.h"
|
||||
|
||||
#include "qandroidcamerasession.h"
|
||||
#include "jcamera.h"
|
||||
#include "androidcamera.h"
|
||||
|
||||
QT_BEGIN_NAMESPACE
|
||||
|
||||
|
||||
@@ -40,24 +40,25 @@
|
||||
****************************************************************************/
|
||||
|
||||
#include "qandroidmediaplayercontrol.h"
|
||||
#include "jmediaplayer.h"
|
||||
#include "androidmediaplayer.h"
|
||||
#include "qandroidvideooutput.h"
|
||||
|
||||
QT_BEGIN_NAMESPACE
|
||||
|
||||
QAndroidMediaPlayerControl::QAndroidMediaPlayerControl(QObject *parent)
|
||||
: QMediaPlayerControl(parent),
|
||||
mMediaPlayer(new JMediaPlayer),
|
||||
mMediaPlayer(new AndroidMediaPlayer),
|
||||
mCurrentState(QMediaPlayer::StoppedState),
|
||||
mCurrentMediaStatus(QMediaPlayer::NoMedia),
|
||||
mMediaStream(0),
|
||||
mVideoOutput(0),
|
||||
mSeekable(true),
|
||||
mBufferPercent(-1),
|
||||
mBufferFilled(false),
|
||||
mAudioAvailable(false),
|
||||
mVideoAvailable(false),
|
||||
mBuffering(false),
|
||||
mState(JMediaPlayer::Uninitialized),
|
||||
mState(AndroidMediaPlayer::Uninitialized),
|
||||
mPendingState(-1),
|
||||
mPendingPosition(-1),
|
||||
mPendingSetMedia(false),
|
||||
@@ -98,11 +99,11 @@ QMediaPlayer::MediaStatus QAndroidMediaPlayerControl::mediaStatus() const
|
||||
|
||||
qint64 QAndroidMediaPlayerControl::duration() const
|
||||
{
|
||||
if ((mState & (JMediaPlayer::Prepared
|
||||
| JMediaPlayer::Started
|
||||
| JMediaPlayer::Paused
|
||||
| JMediaPlayer::Stopped
|
||||
| JMediaPlayer::PlaybackCompleted)) == 0) {
|
||||
if ((mState & (AndroidMediaPlayer::Prepared
|
||||
| AndroidMediaPlayer::Started
|
||||
| AndroidMediaPlayer::Paused
|
||||
| AndroidMediaPlayer::Stopped
|
||||
| AndroidMediaPlayer::PlaybackCompleted)) == 0) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
@@ -114,13 +115,13 @@ qint64 QAndroidMediaPlayerControl::position() const
|
||||
if (mCurrentMediaStatus == QMediaPlayer::EndOfMedia)
|
||||
return duration();
|
||||
|
||||
if ((mState & (JMediaPlayer::Idle
|
||||
| JMediaPlayer::Initialized
|
||||
| JMediaPlayer::Prepared
|
||||
| JMediaPlayer::Started
|
||||
| JMediaPlayer::Paused
|
||||
| JMediaPlayer::Stopped
|
||||
| JMediaPlayer::PlaybackCompleted)) == 0) {
|
||||
if ((mState & (AndroidMediaPlayer::Idle
|
||||
| AndroidMediaPlayer::Initialized
|
||||
| AndroidMediaPlayer::Prepared
|
||||
| AndroidMediaPlayer::Started
|
||||
| AndroidMediaPlayer::Paused
|
||||
| AndroidMediaPlayer::Stopped
|
||||
| AndroidMediaPlayer::PlaybackCompleted)) == 0) {
|
||||
return (mPendingPosition == -1) ? 0 : mPendingPosition;
|
||||
}
|
||||
|
||||
@@ -134,10 +135,10 @@ void QAndroidMediaPlayerControl::setPosition(qint64 position)
|
||||
|
||||
const int seekPosition = (position > INT_MAX) ? INT_MAX : position;
|
||||
|
||||
if ((mState & (JMediaPlayer::Prepared
|
||||
| JMediaPlayer::Started
|
||||
| JMediaPlayer::Paused
|
||||
| JMediaPlayer::PlaybackCompleted)) == 0) {
|
||||
if ((mState & (AndroidMediaPlayer::Prepared
|
||||
| AndroidMediaPlayer::Started
|
||||
| AndroidMediaPlayer::Paused
|
||||
| AndroidMediaPlayer::PlaybackCompleted)) == 0) {
|
||||
if (mPendingPosition != seekPosition) {
|
||||
mPendingPosition = seekPosition;
|
||||
Q_EMIT positionChanged(seekPosition);
|
||||
@@ -164,13 +165,13 @@ int QAndroidMediaPlayerControl::volume() const
|
||||
|
||||
void QAndroidMediaPlayerControl::setVolume(int volume)
|
||||
{
|
||||
if ((mState & (JMediaPlayer::Idle
|
||||
| JMediaPlayer::Initialized
|
||||
| JMediaPlayer::Stopped
|
||||
| JMediaPlayer::Prepared
|
||||
| JMediaPlayer::Started
|
||||
| JMediaPlayer::Paused
|
||||
| JMediaPlayer::PlaybackCompleted)) == 0) {
|
||||
if ((mState & (AndroidMediaPlayer::Idle
|
||||
| AndroidMediaPlayer::Initialized
|
||||
| AndroidMediaPlayer::Stopped
|
||||
| AndroidMediaPlayer::Prepared
|
||||
| AndroidMediaPlayer::Started
|
||||
| AndroidMediaPlayer::Paused
|
||||
| AndroidMediaPlayer::PlaybackCompleted)) == 0) {
|
||||
if (mPendingVolume != volume) {
|
||||
mPendingVolume = volume;
|
||||
Q_EMIT volumeChanged(volume);
|
||||
@@ -195,13 +196,13 @@ bool QAndroidMediaPlayerControl::isMuted() const
|
||||
|
||||
void QAndroidMediaPlayerControl::setMuted(bool muted)
|
||||
{
|
||||
if ((mState & (JMediaPlayer::Idle
|
||||
| JMediaPlayer::Initialized
|
||||
| JMediaPlayer::Stopped
|
||||
| JMediaPlayer::Prepared
|
||||
| JMediaPlayer::Started
|
||||
| JMediaPlayer::Paused
|
||||
| JMediaPlayer::PlaybackCompleted)) == 0) {
|
||||
if ((mState & (AndroidMediaPlayer::Idle
|
||||
| AndroidMediaPlayer::Initialized
|
||||
| AndroidMediaPlayer::Stopped
|
||||
| AndroidMediaPlayer::Prepared
|
||||
| AndroidMediaPlayer::Started
|
||||
| AndroidMediaPlayer::Paused
|
||||
| AndroidMediaPlayer::PlaybackCompleted)) == 0) {
|
||||
if (mPendingMute != muted) {
|
||||
mPendingMute = muted;
|
||||
Q_EMIT mutedChanged(muted);
|
||||
@@ -221,7 +222,7 @@ void QAndroidMediaPlayerControl::setMuted(bool muted)
|
||||
|
||||
int QAndroidMediaPlayerControl::bufferStatus() const
|
||||
{
|
||||
return mBufferPercent;
|
||||
return mBufferFilled ? 100 : 0;
|
||||
}
|
||||
|
||||
bool QAndroidMediaPlayerControl::isAudioAvailable() const
|
||||
@@ -290,7 +291,7 @@ void QAndroidMediaPlayerControl::setMedia(const QMediaContent &mediaContent,
|
||||
}
|
||||
|
||||
// Release the mediaplayer if it's not in in Idle or Uninitialized state
|
||||
if ((mState & (JMediaPlayer::Idle | JMediaPlayer::Uninitialized)) == 0)
|
||||
if ((mState & (AndroidMediaPlayer::Idle | AndroidMediaPlayer::Uninitialized)) == 0)
|
||||
mMediaPlayer->release();
|
||||
|
||||
if (mediaContent.isNull()) {
|
||||
@@ -311,7 +312,7 @@ void QAndroidMediaPlayerControl::setMedia(const QMediaContent &mediaContent,
|
||||
const QString path = url.toString().mid(3);
|
||||
mTempFile.reset(QTemporaryFile::createNativeFile(path));
|
||||
if (!mTempFile.isNull())
|
||||
mediaPath = QLatin1String("file://") + mTempFile->fileName();
|
||||
mediaPath = QStringLiteral("file://") + mTempFile->fileName();
|
||||
} else {
|
||||
mediaPath = url.toString();
|
||||
}
|
||||
@@ -319,8 +320,8 @@ void QAndroidMediaPlayerControl::setMedia(const QMediaContent &mediaContent,
|
||||
if (mVideoSize.isValid() && mVideoOutput)
|
||||
mVideoOutput->setVideoSize(mVideoSize);
|
||||
|
||||
if (!mMediaPlayer->display() && mVideoOutput)
|
||||
mMediaPlayer->setDisplay(mVideoOutput->surfaceHolder());
|
||||
if ((mMediaPlayer->display() == 0) && mVideoOutput)
|
||||
mMediaPlayer->setDisplay(mVideoOutput->surfaceTexture());
|
||||
mMediaPlayer->setDataSource(mediaPath);
|
||||
mMediaPlayer->prepareAsync();
|
||||
|
||||
@@ -344,7 +345,7 @@ void QAndroidMediaPlayerControl::setVideoOutput(QObject *videoOutput)
|
||||
return;
|
||||
|
||||
if (mVideoOutput->isReady())
|
||||
mMediaPlayer->setDisplay(mVideoOutput->surfaceHolder());
|
||||
mMediaPlayer->setDisplay(mVideoOutput->surfaceTexture());
|
||||
|
||||
connect(videoOutput, SIGNAL(readyChanged(bool)), this, SLOT(onVideoOutputReady(bool)));
|
||||
}
|
||||
@@ -352,16 +353,16 @@ void QAndroidMediaPlayerControl::setVideoOutput(QObject *videoOutput)
|
||||
void QAndroidMediaPlayerControl::play()
|
||||
{
|
||||
// We need to prepare the mediaplayer again.
|
||||
if ((mState & JMediaPlayer::Stopped) && !mMediaContent.isNull()) {
|
||||
if ((mState & AndroidMediaPlayer::Stopped) && !mMediaContent.isNull()) {
|
||||
setMedia(mMediaContent, mMediaStream);
|
||||
}
|
||||
|
||||
setState(QMediaPlayer::PlayingState);
|
||||
|
||||
if ((mState & (JMediaPlayer::Prepared
|
||||
| JMediaPlayer::Started
|
||||
| JMediaPlayer::Paused
|
||||
| JMediaPlayer::PlaybackCompleted)) == 0) {
|
||||
if ((mState & (AndroidMediaPlayer::Prepared
|
||||
| AndroidMediaPlayer::Started
|
||||
| AndroidMediaPlayer::Paused
|
||||
| AndroidMediaPlayer::PlaybackCompleted)) == 0) {
|
||||
mPendingState = QMediaPlayer::PlayingState;
|
||||
return;
|
||||
}
|
||||
@@ -373,9 +374,9 @@ void QAndroidMediaPlayerControl::pause()
|
||||
{
|
||||
setState(QMediaPlayer::PausedState);
|
||||
|
||||
if ((mState & (JMediaPlayer::Started
|
||||
| JMediaPlayer::Paused
|
||||
| JMediaPlayer::PlaybackCompleted)) == 0) {
|
||||
if ((mState & (AndroidMediaPlayer::Started
|
||||
| AndroidMediaPlayer::Paused
|
||||
| AndroidMediaPlayer::PlaybackCompleted)) == 0) {
|
||||
mPendingState = QMediaPlayer::PausedState;
|
||||
return;
|
||||
}
|
||||
@@ -387,12 +388,12 @@ void QAndroidMediaPlayerControl::stop()
|
||||
{
|
||||
setState(QMediaPlayer::StoppedState);
|
||||
|
||||
if ((mState & (JMediaPlayer::Prepared
|
||||
| JMediaPlayer::Started
|
||||
| JMediaPlayer::Stopped
|
||||
| JMediaPlayer::Paused
|
||||
| JMediaPlayer::PlaybackCompleted)) == 0) {
|
||||
if ((mState & (JMediaPlayer::Idle | JMediaPlayer::Uninitialized | JMediaPlayer::Error)) == 0)
|
||||
if ((mState & (AndroidMediaPlayer::Prepared
|
||||
| AndroidMediaPlayer::Started
|
||||
| AndroidMediaPlayer::Stopped
|
||||
| AndroidMediaPlayer::Paused
|
||||
| AndroidMediaPlayer::PlaybackCompleted)) == 0) {
|
||||
if ((mState & (AndroidMediaPlayer::Idle | AndroidMediaPlayer::Uninitialized | AndroidMediaPlayer::Error)) == 0)
|
||||
mPendingState = QMediaPlayer::StoppedState;
|
||||
return;
|
||||
}
|
||||
@@ -404,28 +405,28 @@ void QAndroidMediaPlayerControl::onInfo(qint32 what, qint32 extra)
|
||||
{
|
||||
Q_UNUSED(extra);
|
||||
switch (what) {
|
||||
case JMediaPlayer::MEDIA_INFO_UNKNOWN:
|
||||
case AndroidMediaPlayer::MEDIA_INFO_UNKNOWN:
|
||||
break;
|
||||
case JMediaPlayer::MEDIA_INFO_VIDEO_TRACK_LAGGING:
|
||||
case AndroidMediaPlayer::MEDIA_INFO_VIDEO_TRACK_LAGGING:
|
||||
// IGNORE
|
||||
break;
|
||||
case JMediaPlayer::MEDIA_INFO_VIDEO_RENDERING_START:
|
||||
case AndroidMediaPlayer::MEDIA_INFO_VIDEO_RENDERING_START:
|
||||
break;
|
||||
case JMediaPlayer::MEDIA_INFO_BUFFERING_START:
|
||||
case AndroidMediaPlayer::MEDIA_INFO_BUFFERING_START:
|
||||
mPendingState = mCurrentState;
|
||||
setState(QMediaPlayer::PausedState);
|
||||
setMediaStatus(QMediaPlayer::StalledMedia);
|
||||
break;
|
||||
case JMediaPlayer::MEDIA_INFO_BUFFERING_END:
|
||||
case AndroidMediaPlayer::MEDIA_INFO_BUFFERING_END:
|
||||
if (mCurrentState != QMediaPlayer::StoppedState)
|
||||
flushPendingStates();
|
||||
break;
|
||||
case JMediaPlayer::MEDIA_INFO_BAD_INTERLEAVING:
|
||||
case AndroidMediaPlayer::MEDIA_INFO_BAD_INTERLEAVING:
|
||||
break;
|
||||
case JMediaPlayer::MEDIA_INFO_NOT_SEEKABLE:
|
||||
case AndroidMediaPlayer::MEDIA_INFO_NOT_SEEKABLE:
|
||||
setSeekable(false);
|
||||
break;
|
||||
case JMediaPlayer::MEDIA_INFO_METADATA_UPDATE:
|
||||
case AndroidMediaPlayer::MEDIA_INFO_METADATA_UPDATE:
|
||||
Q_EMIT metaDataUpdated();
|
||||
break;
|
||||
}
|
||||
@@ -437,44 +438,44 @@ void QAndroidMediaPlayerControl::onError(qint32 what, qint32 extra)
|
||||
QMediaPlayer::Error error = QMediaPlayer::ResourceError;
|
||||
|
||||
switch (what) {
|
||||
case JMediaPlayer::MEDIA_ERROR_UNKNOWN:
|
||||
case AndroidMediaPlayer::MEDIA_ERROR_UNKNOWN:
|
||||
errorString = QLatin1String("Error:");
|
||||
break;
|
||||
case JMediaPlayer::MEDIA_ERROR_SERVER_DIED:
|
||||
case AndroidMediaPlayer::MEDIA_ERROR_SERVER_DIED:
|
||||
errorString = QLatin1String("Error: Server died");
|
||||
error = QMediaPlayer::ServiceMissingError;
|
||||
break;
|
||||
case JMediaPlayer::MEDIA_ERROR_INVALID_STATE:
|
||||
case AndroidMediaPlayer::MEDIA_ERROR_INVALID_STATE:
|
||||
errorString = QLatin1String("Error: Invalid state");
|
||||
error = QMediaPlayer::ServiceMissingError;
|
||||
break;
|
||||
}
|
||||
|
||||
switch (extra) {
|
||||
case JMediaPlayer::MEDIA_ERROR_IO: // Network OR file error
|
||||
case AndroidMediaPlayer::MEDIA_ERROR_IO: // Network OR file error
|
||||
errorString += QLatin1String(" (I/O operation failed)");
|
||||
error = QMediaPlayer::NetworkError;
|
||||
setMediaStatus(QMediaPlayer::InvalidMedia);
|
||||
break;
|
||||
case JMediaPlayer::MEDIA_ERROR_MALFORMED:
|
||||
case AndroidMediaPlayer::MEDIA_ERROR_MALFORMED:
|
||||
errorString += QLatin1String(" (Malformed bitstream)");
|
||||
error = QMediaPlayer::FormatError;
|
||||
setMediaStatus(QMediaPlayer::InvalidMedia);
|
||||
break;
|
||||
case JMediaPlayer::MEDIA_ERROR_UNSUPPORTED:
|
||||
case AndroidMediaPlayer::MEDIA_ERROR_UNSUPPORTED:
|
||||
errorString += QLatin1String(" (Unsupported media)");
|
||||
error = QMediaPlayer::FormatError;
|
||||
setMediaStatus(QMediaPlayer::InvalidMedia);
|
||||
break;
|
||||
case JMediaPlayer::MEDIA_ERROR_TIMED_OUT:
|
||||
case AndroidMediaPlayer::MEDIA_ERROR_TIMED_OUT:
|
||||
errorString += QLatin1String(" (Timed out)");
|
||||
break;
|
||||
case JMediaPlayer::MEDIA_ERROR_NOT_VALID_FOR_PROGRESSIVE_PLAYBACK:
|
||||
case AndroidMediaPlayer::MEDIA_ERROR_NOT_VALID_FOR_PROGRESSIVE_PLAYBACK:
|
||||
errorString += QLatin1String(" (Unable to start progressive playback')");
|
||||
error = QMediaPlayer::FormatError;
|
||||
setMediaStatus(QMediaPlayer::InvalidMedia);
|
||||
break;
|
||||
case JMediaPlayer::MEDIA_ERROR_BAD_THINGS_ARE_GOING_TO_HAPPEN:
|
||||
case AndroidMediaPlayer::MEDIA_ERROR_BAD_THINGS_ARE_GOING_TO_HAPPEN:
|
||||
errorString += QLatin1String(" (Unknown error/Insufficient resources)");
|
||||
error = QMediaPlayer::ServiceMissingError;
|
||||
break;
|
||||
@@ -487,7 +488,6 @@ void QAndroidMediaPlayerControl::onBufferingChanged(qint32 percent)
|
||||
{
|
||||
mBuffering = percent != 100;
|
||||
mBufferPercent = percent;
|
||||
Q_EMIT bufferStatusChanged(mBufferPercent);
|
||||
|
||||
updateAvailablePlaybackRanges();
|
||||
|
||||
@@ -512,21 +512,21 @@ void QAndroidMediaPlayerControl::onVideoSizeChanged(qint32 width, qint32 height)
|
||||
void QAndroidMediaPlayerControl::onStateChanged(qint32 state)
|
||||
{
|
||||
// If reloading, don't report state changes unless the new state is Prepared or Error.
|
||||
if ((mState & JMediaPlayer::Stopped)
|
||||
&& (state & (JMediaPlayer::Prepared | JMediaPlayer::Error | JMediaPlayer::Uninitialized)) == 0) {
|
||||
if ((mState & AndroidMediaPlayer::Stopped)
|
||||
&& (state & (AndroidMediaPlayer::Prepared | AndroidMediaPlayer::Error | AndroidMediaPlayer::Uninitialized)) == 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
mState = state;
|
||||
switch (mState) {
|
||||
case JMediaPlayer::Idle:
|
||||
case AndroidMediaPlayer::Idle:
|
||||
break;
|
||||
case JMediaPlayer::Initialized:
|
||||
case AndroidMediaPlayer::Initialized:
|
||||
break;
|
||||
case JMediaPlayer::Preparing:
|
||||
case AndroidMediaPlayer::Preparing:
|
||||
setMediaStatus(QMediaPlayer::LoadingMedia);
|
||||
break;
|
||||
case JMediaPlayer::Prepared:
|
||||
case AndroidMediaPlayer::Prepared:
|
||||
setMediaStatus(QMediaPlayer::LoadedMedia);
|
||||
if (mBuffering) {
|
||||
setMediaStatus(mBufferPercent == 100 ? QMediaPlayer::BufferedMedia
|
||||
@@ -537,7 +537,7 @@ void QAndroidMediaPlayerControl::onStateChanged(qint32 state)
|
||||
setAudioAvailable(true);
|
||||
flushPendingStates();
|
||||
break;
|
||||
case JMediaPlayer::Started:
|
||||
case AndroidMediaPlayer::Started:
|
||||
setState(QMediaPlayer::PlayingState);
|
||||
if (mBuffering) {
|
||||
setMediaStatus(mBufferPercent == 100 ? QMediaPlayer::BufferedMedia
|
||||
@@ -546,25 +546,25 @@ void QAndroidMediaPlayerControl::onStateChanged(qint32 state)
|
||||
setMediaStatus(QMediaPlayer::BufferedMedia);
|
||||
}
|
||||
break;
|
||||
case JMediaPlayer::Paused:
|
||||
case AndroidMediaPlayer::Paused:
|
||||
setState(QMediaPlayer::PausedState);
|
||||
break;
|
||||
case JMediaPlayer::Error:
|
||||
case AndroidMediaPlayer::Error:
|
||||
setState(QMediaPlayer::StoppedState);
|
||||
setMediaStatus(QMediaPlayer::UnknownMediaStatus);
|
||||
mMediaPlayer->release();
|
||||
break;
|
||||
case JMediaPlayer::Stopped:
|
||||
case AndroidMediaPlayer::Stopped:
|
||||
setState(QMediaPlayer::StoppedState);
|
||||
setMediaStatus(QMediaPlayer::LoadedMedia);
|
||||
setPosition(0);
|
||||
break;
|
||||
case JMediaPlayer::PlaybackCompleted:
|
||||
case AndroidMediaPlayer::PlaybackCompleted:
|
||||
setState(QMediaPlayer::StoppedState);
|
||||
setPosition(0);
|
||||
setMediaStatus(QMediaPlayer::EndOfMedia);
|
||||
break;
|
||||
case JMediaPlayer::Uninitialized:
|
||||
case AndroidMediaPlayer::Uninitialized:
|
||||
// reset some properties
|
||||
resetBufferingProgress();
|
||||
mPendingPosition = -1;
|
||||
@@ -579,7 +579,7 @@ void QAndroidMediaPlayerControl::onStateChanged(qint32 state)
|
||||
break;
|
||||
}
|
||||
|
||||
if ((mState & (JMediaPlayer::Stopped | JMediaPlayer::Uninitialized)) != 0) {
|
||||
if ((mState & (AndroidMediaPlayer::Stopped | AndroidMediaPlayer::Uninitialized)) != 0) {
|
||||
mMediaPlayer->setDisplay(0);
|
||||
if (mVideoOutput) {
|
||||
mVideoOutput->stop();
|
||||
@@ -590,8 +590,8 @@ void QAndroidMediaPlayerControl::onStateChanged(qint32 state)
|
||||
|
||||
void QAndroidMediaPlayerControl::onVideoOutputReady(bool ready)
|
||||
{
|
||||
if (!mMediaPlayer->display() && mVideoOutput && ready)
|
||||
mMediaPlayer->setDisplay(mVideoOutput->surfaceHolder());
|
||||
if ((mMediaPlayer->display() == 0) && mVideoOutput && ready)
|
||||
mMediaPlayer->setDisplay(mVideoOutput->surfaceTexture());
|
||||
|
||||
flushPendingStates();
|
||||
}
|
||||
@@ -621,6 +621,8 @@ void QAndroidMediaPlayerControl::setMediaStatus(QMediaPlayer::MediaStatus status
|
||||
|
||||
mCurrentMediaStatus = status;
|
||||
Q_EMIT mediaStatusChanged(mCurrentMediaStatus);
|
||||
|
||||
updateBufferStatus();
|
||||
}
|
||||
|
||||
void QAndroidMediaPlayerControl::setSeekable(bool seekable)
|
||||
@@ -658,7 +660,6 @@ void QAndroidMediaPlayerControl::resetBufferingProgress()
|
||||
mBuffering = false;
|
||||
mBufferPercent = 0;
|
||||
mAvailablePlaybackRange = QMediaTimeRange();
|
||||
Q_EMIT bufferStatusChanged(mBufferPercent);
|
||||
}
|
||||
|
||||
void QAndroidMediaPlayerControl::flushPendingStates()
|
||||
@@ -694,4 +695,15 @@ void QAndroidMediaPlayerControl::flushPendingStates()
|
||||
}
|
||||
}
|
||||
|
||||
void QAndroidMediaPlayerControl::updateBufferStatus()
|
||||
{
|
||||
bool bufferFilled = (mCurrentMediaStatus == QMediaPlayer::BufferedMedia
|
||||
|| mCurrentMediaStatus == QMediaPlayer::BufferingMedia);
|
||||
|
||||
if (mBufferFilled != bufferFilled) {
|
||||
mBufferFilled = bufferFilled;
|
||||
Q_EMIT bufferStatusChanged(bufferStatus());
|
||||
}
|
||||
}
|
||||
|
||||
QT_END_NAMESPACE
|
||||
|
||||
@@ -49,7 +49,7 @@
|
||||
|
||||
QT_BEGIN_NAMESPACE
|
||||
|
||||
class JMediaPlayer;
|
||||
class AndroidMediaPlayer;
|
||||
class QAndroidVideoOutput;
|
||||
|
||||
class QAndroidMediaPlayerControl : public QMediaPlayerControl
|
||||
@@ -98,7 +98,7 @@ private Q_SLOTS:
|
||||
void onStateChanged(qint32 state);
|
||||
|
||||
private:
|
||||
JMediaPlayer *mMediaPlayer;
|
||||
AndroidMediaPlayer *mMediaPlayer;
|
||||
QMediaPlayer::State mCurrentState;
|
||||
QMediaPlayer::MediaStatus mCurrentMediaStatus;
|
||||
QMediaContent mMediaContent;
|
||||
@@ -106,6 +106,7 @@ private:
|
||||
QAndroidVideoOutput *mVideoOutput;
|
||||
bool mSeekable;
|
||||
int mBufferPercent;
|
||||
bool mBufferFilled;
|
||||
bool mAudioAvailable;
|
||||
bool mVideoAvailable;
|
||||
QSize mVideoSize;
|
||||
@@ -127,6 +128,7 @@ private:
|
||||
void updateAvailablePlaybackRanges();
|
||||
void resetBufferingProgress();
|
||||
void flushPendingStates();
|
||||
void updateBufferStatus();
|
||||
};
|
||||
|
||||
QT_END_NAMESPACE
|
||||
|
||||
@@ -41,7 +41,7 @@
|
||||
|
||||
#include "qandroidmetadatareadercontrol.h"
|
||||
|
||||
#include "jmediametadataretriever.h"
|
||||
#include "androidmediametadataretriever.h"
|
||||
#include <QtMultimedia/qmediametadata.h>
|
||||
#include <qsize.h>
|
||||
#include <QDate>
|
||||
@@ -74,7 +74,7 @@ static const char* qt_ID3GenreNames[] =
|
||||
QAndroidMetaDataReaderControl::QAndroidMetaDataReaderControl(QObject *parent)
|
||||
: QMetaDataReaderControl(parent)
|
||||
, m_available(false)
|
||||
, m_retriever(new JMediaMetadataRetriever)
|
||||
, m_retriever(new AndroidMediaMetadataRetriever)
|
||||
{
|
||||
}
|
||||
|
||||
@@ -124,56 +124,56 @@ void QAndroidMetaDataReaderControl::updateData()
|
||||
|
||||
if (!m_mediaContent.isNull()) {
|
||||
if (m_retriever->setDataSource(m_mediaContent.canonicalUrl())) {
|
||||
QString mimeType = m_retriever->extractMetadata(JMediaMetadataRetriever::MimeType);
|
||||
QString mimeType = m_retriever->extractMetadata(AndroidMediaMetadataRetriever::MimeType);
|
||||
if (!mimeType.isNull())
|
||||
m_metadata.insert(QMediaMetaData::MediaType, mimeType);
|
||||
|
||||
bool isVideo = !m_retriever->extractMetadata(JMediaMetadataRetriever::HasVideo).isNull()
|
||||
bool isVideo = !m_retriever->extractMetadata(AndroidMediaMetadataRetriever::HasVideo).isNull()
|
||||
|| mimeType.startsWith(QStringLiteral("video"));
|
||||
|
||||
QString string = m_retriever->extractMetadata(JMediaMetadataRetriever::Album);
|
||||
QString string = m_retriever->extractMetadata(AndroidMediaMetadataRetriever::Album);
|
||||
if (!string.isNull())
|
||||
m_metadata.insert(QMediaMetaData::AlbumTitle, string);
|
||||
|
||||
string = m_retriever->extractMetadata(JMediaMetadataRetriever::AlbumArtist);
|
||||
string = m_retriever->extractMetadata(AndroidMediaMetadataRetriever::AlbumArtist);
|
||||
if (!string.isNull())
|
||||
m_metadata.insert(QMediaMetaData::AlbumArtist, string);
|
||||
|
||||
string = m_retriever->extractMetadata(JMediaMetadataRetriever::Artist);
|
||||
string = m_retriever->extractMetadata(AndroidMediaMetadataRetriever::Artist);
|
||||
if (!string.isNull()) {
|
||||
m_metadata.insert(isVideo ? QMediaMetaData::LeadPerformer
|
||||
: QMediaMetaData::ContributingArtist,
|
||||
string.split('/', QString::SkipEmptyParts));
|
||||
}
|
||||
|
||||
string = m_retriever->extractMetadata(JMediaMetadataRetriever::Author);
|
||||
string = m_retriever->extractMetadata(AndroidMediaMetadataRetriever::Author);
|
||||
if (!string.isNull())
|
||||
m_metadata.insert(QMediaMetaData::Author, string.split('/', QString::SkipEmptyParts));
|
||||
|
||||
string = m_retriever->extractMetadata(JMediaMetadataRetriever::Bitrate);
|
||||
string = m_retriever->extractMetadata(AndroidMediaMetadataRetriever::Bitrate);
|
||||
if (!string.isNull()) {
|
||||
m_metadata.insert(isVideo ? QMediaMetaData::VideoBitRate
|
||||
: QMediaMetaData::AudioBitRate,
|
||||
string.toInt());
|
||||
}
|
||||
|
||||
string = m_retriever->extractMetadata(JMediaMetadataRetriever::CDTrackNumber);
|
||||
string = m_retriever->extractMetadata(AndroidMediaMetadataRetriever::CDTrackNumber);
|
||||
if (!string.isNull())
|
||||
m_metadata.insert(QMediaMetaData::TrackNumber, string.toInt());
|
||||
|
||||
string = m_retriever->extractMetadata(JMediaMetadataRetriever::Composer);
|
||||
string = m_retriever->extractMetadata(AndroidMediaMetadataRetriever::Composer);
|
||||
if (!string.isNull())
|
||||
m_metadata.insert(QMediaMetaData::Composer, string.split('/', QString::SkipEmptyParts));
|
||||
|
||||
string = m_retriever->extractMetadata(JMediaMetadataRetriever::Date);
|
||||
string = m_retriever->extractMetadata(AndroidMediaMetadataRetriever::Date);
|
||||
if (!string.isNull())
|
||||
m_metadata.insert(QMediaMetaData::Date, QDateTime::fromString(string, QStringLiteral("yyyyMMddTHHmmss.zzzZ")).date());
|
||||
|
||||
string = m_retriever->extractMetadata(JMediaMetadataRetriever::Duration);
|
||||
string = m_retriever->extractMetadata(AndroidMediaMetadataRetriever::Duration);
|
||||
if (!string.isNull())
|
||||
m_metadata.insert(QMediaMetaData::Duration, string.toLongLong());
|
||||
|
||||
string = m_retriever->extractMetadata(JMediaMetadataRetriever::Genre);
|
||||
string = m_retriever->extractMetadata(AndroidMediaMetadataRetriever::Genre);
|
||||
if (!string.isNull()) {
|
||||
// The genre can be returned as an ID3v2 id, get the name for it in that case
|
||||
if (string.startsWith('(') && string.endsWith(')')) {
|
||||
@@ -185,22 +185,22 @@ void QAndroidMetaDataReaderControl::updateData()
|
||||
m_metadata.insert(QMediaMetaData::Genre, string);
|
||||
}
|
||||
|
||||
string = m_retriever->extractMetadata(JMediaMetadataRetriever::Title);
|
||||
string = m_retriever->extractMetadata(AndroidMediaMetadataRetriever::Title);
|
||||
if (!string.isNull())
|
||||
m_metadata.insert(QMediaMetaData::Title, string);
|
||||
|
||||
string = m_retriever->extractMetadata(JMediaMetadataRetriever::VideoHeight);
|
||||
string = m_retriever->extractMetadata(AndroidMediaMetadataRetriever::VideoHeight);
|
||||
if (!string.isNull()) {
|
||||
int height = string.toInt();
|
||||
int width = m_retriever->extractMetadata(JMediaMetadataRetriever::VideoWidth).toInt();
|
||||
int width = m_retriever->extractMetadata(AndroidMediaMetadataRetriever::VideoWidth).toInt();
|
||||
m_metadata.insert(QMediaMetaData::Resolution, QSize(width, height));
|
||||
}
|
||||
|
||||
string = m_retriever->extractMetadata(JMediaMetadataRetriever::Writer);
|
||||
string = m_retriever->extractMetadata(AndroidMediaMetadataRetriever::Writer);
|
||||
if (!string.isNull())
|
||||
m_metadata.insert(QMediaMetaData::Writer, string.split('/', QString::SkipEmptyParts));
|
||||
|
||||
string = m_retriever->extractMetadata(JMediaMetadataRetriever::Year);
|
||||
string = m_retriever->extractMetadata(AndroidMediaMetadataRetriever::Year);
|
||||
if (!string.isNull())
|
||||
m_metadata.insert(QMediaMetaData::Year, string.toInt());
|
||||
}
|
||||
|
||||
@@ -47,7 +47,7 @@
|
||||
|
||||
QT_BEGIN_NAMESPACE
|
||||
|
||||
class JMediaMetadataRetriever;
|
||||
class AndroidMediaMetadataRetriever;
|
||||
|
||||
class QAndroidMetaDataReaderControl : public QMetaDataReaderControl
|
||||
{
|
||||
@@ -72,7 +72,7 @@ private:
|
||||
bool m_available;
|
||||
QVariantMap m_metadata;
|
||||
|
||||
JMediaMetadataRetriever *m_retriever;
|
||||
AndroidMediaMetadataRetriever *m_retriever;
|
||||
};
|
||||
|
||||
QT_END_NAMESPACE
|
||||
|
||||
@@ -46,11 +46,11 @@
|
||||
#include "qandroidaudioinputselectorcontrol.h"
|
||||
#include "qandroidcamerainfocontrol.h"
|
||||
#include "qandroidcamerasession.h"
|
||||
#include "jmediaplayer.h"
|
||||
#include "jsurfacetexture.h"
|
||||
#include "jcamera.h"
|
||||
#include "jmultimediautils.h"
|
||||
#include "jmediarecorder.h"
|
||||
#include "androidmediaplayer.h"
|
||||
#include "androidsurfacetexture.h"
|
||||
#include "androidcamera.h"
|
||||
#include "androidmultimediautils.h"
|
||||
#include "androidmediarecorder.h"
|
||||
#include <qdebug.h>
|
||||
|
||||
QT_BEGIN_NAMESPACE
|
||||
@@ -147,9 +147,12 @@ int QAndroidMediaServicePlugin::cameraOrientation(const QByteArray &device) cons
|
||||
return QAndroidCameraInfoControl::orientation(device);
|
||||
}
|
||||
|
||||
QT_END_NAMESPACE
|
||||
|
||||
#ifndef Q_OS_ANDROID_NO_SDK
|
||||
Q_DECL_EXPORT jint JNICALL JNI_OnLoad(JavaVM *vm, void * /*reserved*/)
|
||||
{
|
||||
QT_USE_NAMESPACE
|
||||
typedef union {
|
||||
JNIEnv *nativeEnvironment;
|
||||
void *venv;
|
||||
@@ -163,15 +166,14 @@ Q_DECL_EXPORT jint JNICALL JNI_OnLoad(JavaVM *vm, void * /*reserved*/)
|
||||
|
||||
JNIEnv *jniEnv = uenv.nativeEnvironment;
|
||||
|
||||
if (!JMediaPlayer::initJNI(jniEnv) ||
|
||||
!JCamera::initJNI(jniEnv) ||
|
||||
!JMediaRecorder::initJNI(jniEnv)) {
|
||||
if (!AndroidMediaPlayer::initJNI(jniEnv) ||
|
||||
!AndroidCamera::initJNI(jniEnv) ||
|
||||
!AndroidMediaRecorder::initJNI(jniEnv)) {
|
||||
return JNI_ERR;
|
||||
}
|
||||
|
||||
JSurfaceTexture::initJNI(jniEnv);
|
||||
AndroidSurfaceTexture::initJNI(jniEnv);
|
||||
|
||||
return JNI_VERSION_1_4;
|
||||
}
|
||||
|
||||
QT_END_NAMESPACE
|
||||
#endif // Q_OS_ANDROID_NO_SDK
|
||||
|
||||
@@ -11,7 +11,7 @@ HEADERS += \
|
||||
SOURCES += \
|
||||
qandroidmediaserviceplugin.cpp
|
||||
|
||||
include (wrappers/wrappers.pri)
|
||||
include (wrappers/jni/jni.pri)
|
||||
include (common/common.pri)
|
||||
include (mediaplayer/mediaplayer.pri)
|
||||
include (mediacapture/mediacapture.pri)
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -39,21 +39,31 @@
|
||||
**
|
||||
****************************************************************************/
|
||||
|
||||
#ifndef JCAMERA_H
|
||||
#define JCAMERA_H
|
||||
#ifndef ANDROIDCAMERA_H
|
||||
#define ANDROIDCAMERA_H
|
||||
|
||||
#include <qobject.h>
|
||||
#include <QtCore/private/qjni_p.h>
|
||||
#include <qsize.h>
|
||||
#include <qrect.h>
|
||||
#include <QtMultimedia/qcamera.h>
|
||||
|
||||
QT_BEGIN_NAMESPACE
|
||||
|
||||
class QThread;
|
||||
|
||||
class JCameraWorker;
|
||||
class AndroidCameraPrivate;
|
||||
class AndroidSurfaceTexture;
|
||||
|
||||
class JCamera : public QObject
|
||||
struct AndroidCameraInfo
|
||||
{
|
||||
QByteArray name;
|
||||
QString description;
|
||||
QCamera::Position position;
|
||||
int orientation;
|
||||
};
|
||||
|
||||
class AndroidCamera : public QObject
|
||||
{
|
||||
Q_OBJECT
|
||||
Q_ENUMS(CameraFacing)
|
||||
@@ -74,9 +84,9 @@ public:
|
||||
YV12 = 842094169
|
||||
};
|
||||
|
||||
~JCamera();
|
||||
~AndroidCamera();
|
||||
|
||||
static JCamera *open(int cameraId);
|
||||
static AndroidCamera *open(int cameraId);
|
||||
|
||||
int cameraId() const;
|
||||
|
||||
@@ -96,7 +106,7 @@ public:
|
||||
|
||||
QSize previewSize() const;
|
||||
void setPreviewSize(const QSize &size);
|
||||
void setPreviewTexture(jobject surfaceTexture);
|
||||
void setPreviewTexture(AndroidSurfaceTexture *surfaceTexture);
|
||||
|
||||
bool isZoomSupported();
|
||||
int getMaxZoom();
|
||||
@@ -157,6 +167,9 @@ public:
|
||||
void fetchLastPreviewFrame();
|
||||
QJNIObjectPrivate getCameraObject();
|
||||
|
||||
static int getNumberOfCameras();
|
||||
static void getCameraInfo(int id, AndroidCameraInfo *info);
|
||||
|
||||
static bool initJNI(JNIEnv *env);
|
||||
|
||||
Q_SIGNALS:
|
||||
@@ -175,11 +188,13 @@ Q_SIGNALS:
|
||||
void frameFetched(const QByteArray &frame);
|
||||
|
||||
private:
|
||||
JCamera(int cameraId, jobject cam, QThread *workerThread);
|
||||
AndroidCamera(AndroidCameraPrivate *d, QThread *worker);
|
||||
|
||||
JCameraWorker *d;
|
||||
Q_DECLARE_PRIVATE(AndroidCamera)
|
||||
AndroidCameraPrivate *d_ptr;
|
||||
QScopedPointer<QThread> m_worker;
|
||||
};
|
||||
|
||||
QT_END_NAMESPACE
|
||||
|
||||
#endif // JCAMERA_H
|
||||
#endif // ANDROIDCAMERA_H
|
||||
@@ -39,23 +39,23 @@
|
||||
**
|
||||
****************************************************************************/
|
||||
|
||||
#include "jmediametadataretriever.h"
|
||||
#include "androidmediametadataretriever.h"
|
||||
|
||||
#include <QtCore/private/qjnihelpers_p.h>
|
||||
#include <QtCore/private/qjni_p.h>
|
||||
|
||||
QT_BEGIN_NAMESPACE
|
||||
|
||||
JMediaMetadataRetriever::JMediaMetadataRetriever()
|
||||
AndroidMediaMetadataRetriever::AndroidMediaMetadataRetriever()
|
||||
{
|
||||
m_metadataRetriever = QJNIObjectPrivate("android/media/MediaMetadataRetriever");
|
||||
}
|
||||
|
||||
JMediaMetadataRetriever::~JMediaMetadataRetriever()
|
||||
AndroidMediaMetadataRetriever::~AndroidMediaMetadataRetriever()
|
||||
{
|
||||
}
|
||||
|
||||
QString JMediaMetadataRetriever::extractMetadata(MetadataKey key)
|
||||
QString AndroidMediaMetadataRetriever::extractMetadata(MetadataKey key)
|
||||
{
|
||||
QString value;
|
||||
|
||||
@@ -68,7 +68,7 @@ QString JMediaMetadataRetriever::extractMetadata(MetadataKey key)
|
||||
return value;
|
||||
}
|
||||
|
||||
void JMediaMetadataRetriever::release()
|
||||
void AndroidMediaMetadataRetriever::release()
|
||||
{
|
||||
if (!m_metadataRetriever.isValid())
|
||||
return;
|
||||
@@ -76,7 +76,7 @@ void JMediaMetadataRetriever::release()
|
||||
m_metadataRetriever.callMethod<void>("release");
|
||||
}
|
||||
|
||||
bool JMediaMetadataRetriever::setDataSource(const QUrl &url)
|
||||
bool AndroidMediaMetadataRetriever::setDataSource(const QUrl &url)
|
||||
{
|
||||
if (!m_metadataRetriever.isValid())
|
||||
return false;
|
||||
@@ -107,7 +107,7 @@ bool JMediaMetadataRetriever::setDataSource(const QUrl &url)
|
||||
return loaded;
|
||||
}
|
||||
|
||||
bool JMediaMetadataRetriever::setDataSource(const QString &path)
|
||||
bool AndroidMediaMetadataRetriever::setDataSource(const QString &path)
|
||||
{
|
||||
if (!m_metadataRetriever.isValid())
|
||||
return false;
|
||||
@@ -39,15 +39,15 @@
|
||||
**
|
||||
****************************************************************************/
|
||||
|
||||
#ifndef JMEDIAMETADATARETRIEVER_H
|
||||
#define JMEDIAMETADATARETRIEVER_H
|
||||
#ifndef ANDROIDMEDIAMETADATARETRIEVER_H
|
||||
#define ANDROIDMEDIAMETADATARETRIEVER_H
|
||||
|
||||
#include <QtCore/private/qjni_p.h>
|
||||
#include <qurl.h>
|
||||
|
||||
QT_BEGIN_NAMESPACE
|
||||
|
||||
class JMediaMetadataRetriever
|
||||
class AndroidMediaMetadataRetriever
|
||||
{
|
||||
public:
|
||||
enum MetadataKey {
|
||||
@@ -76,8 +76,8 @@ public:
|
||||
Year = 8
|
||||
};
|
||||
|
||||
JMediaMetadataRetriever();
|
||||
~JMediaMetadataRetriever();
|
||||
AndroidMediaMetadataRetriever();
|
||||
~AndroidMediaMetadataRetriever();
|
||||
|
||||
QString extractMetadata(MetadataKey key);
|
||||
void release();
|
||||
@@ -90,4 +90,4 @@ private:
|
||||
|
||||
QT_END_NAMESPACE
|
||||
|
||||
#endif // JMEDIAMETADATARETRIEVER_H
|
||||
#endif // ANDROIDMEDIAMETADATARETRIEVER_H
|
||||
@@ -39,20 +39,21 @@
|
||||
**
|
||||
****************************************************************************/
|
||||
|
||||
#include "jmediaplayer.h"
|
||||
#include "androidmediaplayer.h"
|
||||
|
||||
#include <QString>
|
||||
#include <QtCore/private/qjni_p.h>
|
||||
#include <QtCore/private/qjnihelpers_p.h>
|
||||
#include "androidsurfacetexture.h"
|
||||
#include <QMap>
|
||||
|
||||
static jclass mediaPlayerClass = Q_NULLPTR;
|
||||
typedef QMap<jlong, JMediaPlayer *> MediaPlayerMap;
|
||||
typedef QMap<jlong, AndroidMediaPlayer *> MediaPlayerMap;
|
||||
Q_GLOBAL_STATIC(MediaPlayerMap, mediaPlayers)
|
||||
|
||||
QT_BEGIN_NAMESPACE
|
||||
|
||||
JMediaPlayer::JMediaPlayer()
|
||||
AndroidMediaPlayer::AndroidMediaPlayer()
|
||||
: QObject()
|
||||
{
|
||||
|
||||
@@ -64,104 +65,104 @@ JMediaPlayer::JMediaPlayer()
|
||||
(*mediaPlayers)[id] = this;
|
||||
}
|
||||
|
||||
JMediaPlayer::~JMediaPlayer()
|
||||
AndroidMediaPlayer::~AndroidMediaPlayer()
|
||||
{
|
||||
mediaPlayers->remove(reinterpret_cast<jlong>(this));
|
||||
}
|
||||
|
||||
void JMediaPlayer::release()
|
||||
void AndroidMediaPlayer::release()
|
||||
{
|
||||
mMediaPlayer.callMethod<void>("release");
|
||||
}
|
||||
|
||||
void JMediaPlayer::reset()
|
||||
void AndroidMediaPlayer::reset()
|
||||
{
|
||||
mMediaPlayer.callMethod<void>("reset");
|
||||
}
|
||||
|
||||
int JMediaPlayer::getCurrentPosition()
|
||||
int AndroidMediaPlayer::getCurrentPosition()
|
||||
{
|
||||
return mMediaPlayer.callMethod<jint>("getCurrentPosition");
|
||||
}
|
||||
|
||||
int JMediaPlayer::getDuration()
|
||||
int AndroidMediaPlayer::getDuration()
|
||||
{
|
||||
return mMediaPlayer.callMethod<jint>("getDuration");
|
||||
}
|
||||
|
||||
bool JMediaPlayer::isPlaying()
|
||||
bool AndroidMediaPlayer::isPlaying()
|
||||
{
|
||||
return mMediaPlayer.callMethod<jboolean>("isPlaying");
|
||||
}
|
||||
|
||||
int JMediaPlayer::volume()
|
||||
int AndroidMediaPlayer::volume()
|
||||
{
|
||||
return mMediaPlayer.callMethod<jint>("getVolume");
|
||||
}
|
||||
|
||||
bool JMediaPlayer::isMuted()
|
||||
bool AndroidMediaPlayer::isMuted()
|
||||
{
|
||||
return mMediaPlayer.callMethod<jboolean>("isMuted");
|
||||
}
|
||||
|
||||
jobject JMediaPlayer::display()
|
||||
jobject AndroidMediaPlayer::display()
|
||||
{
|
||||
return mMediaPlayer.callObjectMethod("display", "()Landroid/view/SurfaceHolder;").object();
|
||||
}
|
||||
|
||||
void JMediaPlayer::play()
|
||||
void AndroidMediaPlayer::play()
|
||||
{
|
||||
mMediaPlayer.callMethod<void>("start");
|
||||
}
|
||||
|
||||
void JMediaPlayer::pause()
|
||||
void AndroidMediaPlayer::pause()
|
||||
{
|
||||
mMediaPlayer.callMethod<void>("pause");
|
||||
}
|
||||
|
||||
void JMediaPlayer::stop()
|
||||
void AndroidMediaPlayer::stop()
|
||||
{
|
||||
mMediaPlayer.callMethod<void>("stop");
|
||||
}
|
||||
|
||||
void JMediaPlayer::seekTo(qint32 msec)
|
||||
void AndroidMediaPlayer::seekTo(qint32 msec)
|
||||
{
|
||||
mMediaPlayer.callMethod<void>("seekTo", "(I)V", jint(msec));
|
||||
}
|
||||
|
||||
void JMediaPlayer::setMuted(bool mute)
|
||||
void AndroidMediaPlayer::setMuted(bool mute)
|
||||
{
|
||||
mMediaPlayer.callMethod<void>("mute", "(Z)V", jboolean(mute));
|
||||
}
|
||||
|
||||
void JMediaPlayer::setDataSource(const QString &path)
|
||||
void AndroidMediaPlayer::setDataSource(const QString &path)
|
||||
{
|
||||
QJNIObjectPrivate string = QJNIObjectPrivate::fromString(path);
|
||||
mMediaPlayer.callMethod<void>("setDataSource", "(Ljava/lang/String;)V", string.object());
|
||||
}
|
||||
|
||||
void JMediaPlayer::prepareAsync()
|
||||
void AndroidMediaPlayer::prepareAsync()
|
||||
{
|
||||
mMediaPlayer.callMethod<void>("prepareAsync");
|
||||
}
|
||||
|
||||
void JMediaPlayer::setVolume(int volume)
|
||||
void AndroidMediaPlayer::setVolume(int volume)
|
||||
{
|
||||
mMediaPlayer.callMethod<void>("setVolume", "(I)V", jint(volume));
|
||||
}
|
||||
|
||||
void JMediaPlayer::setDisplay(jobject surfaceHolder)
|
||||
void AndroidMediaPlayer::setDisplay(AndroidSurfaceTexture *surfaceTexture)
|
||||
{
|
||||
mMediaPlayer.callMethod<void>("setDisplay", "(Landroid/view/SurfaceHolder;)V", surfaceHolder);
|
||||
mMediaPlayer.callMethod<void>("setDisplay",
|
||||
"(Landroid/view/SurfaceHolder;)V",
|
||||
surfaceTexture ? surfaceTexture->surfaceHolder() : 0);
|
||||
}
|
||||
|
||||
QT_END_NAMESPACE
|
||||
|
||||
static void onErrorNative(JNIEnv *env, jobject thiz, jint what, jint extra, jlong id)
|
||||
{
|
||||
Q_UNUSED(env);
|
||||
Q_UNUSED(thiz);
|
||||
JMediaPlayer *const mp = (*mediaPlayers)[id];
|
||||
AndroidMediaPlayer *const mp = (*mediaPlayers)[id];
|
||||
if (!mp)
|
||||
return;
|
||||
|
||||
@@ -172,7 +173,7 @@ static void onBufferingUpdateNative(JNIEnv *env, jobject thiz, jint percent, jlo
|
||||
{
|
||||
Q_UNUSED(env);
|
||||
Q_UNUSED(thiz);
|
||||
JMediaPlayer *const mp = (*mediaPlayers)[id];
|
||||
AndroidMediaPlayer *const mp = (*mediaPlayers)[id];
|
||||
if (!mp)
|
||||
return;
|
||||
|
||||
@@ -183,7 +184,7 @@ static void onProgressUpdateNative(JNIEnv *env, jobject thiz, jint progress, jlo
|
||||
{
|
||||
Q_UNUSED(env);
|
||||
Q_UNUSED(thiz);
|
||||
JMediaPlayer *const mp = (*mediaPlayers)[id];
|
||||
AndroidMediaPlayer *const mp = (*mediaPlayers)[id];
|
||||
if (!mp)
|
||||
return;
|
||||
|
||||
@@ -194,7 +195,7 @@ static void onDurationChangedNative(JNIEnv *env, jobject thiz, jint duration, jl
|
||||
{
|
||||
Q_UNUSED(env);
|
||||
Q_UNUSED(thiz);
|
||||
JMediaPlayer *const mp = (*mediaPlayers)[id];
|
||||
AndroidMediaPlayer *const mp = (*mediaPlayers)[id];
|
||||
if (!mp)
|
||||
return;
|
||||
|
||||
@@ -205,7 +206,7 @@ static void onInfoNative(JNIEnv *env, jobject thiz, jint what, jint extra, jlong
|
||||
{
|
||||
Q_UNUSED(env);
|
||||
Q_UNUSED(thiz);
|
||||
JMediaPlayer *const mp = (*mediaPlayers)[id];
|
||||
AndroidMediaPlayer *const mp = (*mediaPlayers)[id];
|
||||
if (!mp)
|
||||
return;
|
||||
|
||||
@@ -216,7 +217,7 @@ static void onStateChangedNative(JNIEnv *env, jobject thiz, jint state, jlong id
|
||||
{
|
||||
Q_UNUSED(env);
|
||||
Q_UNUSED(thiz);
|
||||
JMediaPlayer *const mp = (*mediaPlayers)[id];
|
||||
AndroidMediaPlayer *const mp = (*mediaPlayers)[id];
|
||||
if (!mp)
|
||||
return;
|
||||
|
||||
@@ -231,16 +232,14 @@ static void onVideoSizeChangedNative(JNIEnv *env,
|
||||
{
|
||||
Q_UNUSED(env);
|
||||
Q_UNUSED(thiz);
|
||||
JMediaPlayer *const mp = (*mediaPlayers)[id];
|
||||
AndroidMediaPlayer *const mp = (*mediaPlayers)[id];
|
||||
if (!mp)
|
||||
return;
|
||||
|
||||
Q_EMIT mp->videoSizeChanged(width, height);
|
||||
}
|
||||
|
||||
QT_BEGIN_NAMESPACE
|
||||
|
||||
bool JMediaPlayer::initJNI(JNIEnv *env)
|
||||
bool AndroidMediaPlayer::initJNI(JNIEnv *env)
|
||||
{
|
||||
jclass jClass = env->FindClass("org/qtproject/qt5/android/multimedia/QtAndroidMediaPlayer");
|
||||
|
||||
@@ -39,20 +39,22 @@
|
||||
**
|
||||
****************************************************************************/
|
||||
|
||||
#ifndef QANDROIDMEDIAPLAYER_H
|
||||
#define QANDROIDMEDIAPLAYER_H
|
||||
#ifndef ANDROIDMEDIAPLAYER_H
|
||||
#define ANDROIDMEDIAPLAYER_H
|
||||
|
||||
#include <QObject>
|
||||
#include <QtCore/private/qjni_p.h>
|
||||
|
||||
QT_BEGIN_NAMESPACE
|
||||
|
||||
class JMediaPlayer : public QObject
|
||||
class AndroidSurfaceTexture;
|
||||
|
||||
class AndroidMediaPlayer : public QObject
|
||||
{
|
||||
Q_OBJECT
|
||||
public:
|
||||
JMediaPlayer();
|
||||
~JMediaPlayer();
|
||||
AndroidMediaPlayer();
|
||||
~AndroidMediaPlayer();
|
||||
|
||||
enum MediaError
|
||||
{
|
||||
@@ -113,7 +115,7 @@ public:
|
||||
void setDataSource(const QString &path);
|
||||
void prepareAsync();
|
||||
void setVolume(int volume);
|
||||
void setDisplay(jobject surfaceHolder);
|
||||
void setDisplay(AndroidSurfaceTexture *surfaceTexture);
|
||||
|
||||
static bool initJNI(JNIEnv *env);
|
||||
|
||||
@@ -132,4 +134,4 @@ private:
|
||||
|
||||
QT_END_NAMESPACE
|
||||
|
||||
#endif // QANDROIDMEDIAPLAYER_H
|
||||
#endif // ANDROIDMEDIAPLAYER_H
|
||||
@@ -39,33 +39,107 @@
|
||||
**
|
||||
****************************************************************************/
|
||||
|
||||
#include "jmediarecorder.h"
|
||||
#include "androidmediarecorder.h"
|
||||
|
||||
#include "jcamera.h"
|
||||
#include "androidcamera.h"
|
||||
#include <QtCore/private/qjni_p.h>
|
||||
#include <qmap.h>
|
||||
|
||||
QT_BEGIN_NAMESPACE
|
||||
|
||||
typedef QMap<QString, QJNIObjectPrivate> CamcorderProfiles;
|
||||
Q_GLOBAL_STATIC(CamcorderProfiles, g_camcorderProfiles)
|
||||
|
||||
static QString profileKey()
|
||||
{
|
||||
return QStringLiteral("%1-%2");
|
||||
}
|
||||
|
||||
bool AndroidCamcorderProfile::hasProfile(jint cameraId, Quality quality)
|
||||
{
|
||||
if (g_camcorderProfiles->contains(profileKey().arg(cameraId).arg(quality)))
|
||||
return true;
|
||||
|
||||
return QJNIObjectPrivate::callStaticMethod<jboolean>("android/media/CamcorderProfile",
|
||||
"hasProfile",
|
||||
"(II)Z",
|
||||
cameraId,
|
||||
quality);
|
||||
}
|
||||
|
||||
AndroidCamcorderProfile AndroidCamcorderProfile::get(jint cameraId, Quality quality)
|
||||
{
|
||||
const QString key = profileKey().arg(cameraId).arg(quality);
|
||||
QMap<QString, QJNIObjectPrivate>::const_iterator it = g_camcorderProfiles->constFind(key);
|
||||
|
||||
if (it != g_camcorderProfiles->constEnd())
|
||||
return AndroidCamcorderProfile(*it);
|
||||
|
||||
QJNIObjectPrivate camProfile = QJNIObjectPrivate::callStaticObjectMethod("android/media/CamcorderProfile",
|
||||
"get",
|
||||
"(II)Landroid/media/CamcorderProfile;",
|
||||
cameraId,
|
||||
quality);
|
||||
|
||||
return AndroidCamcorderProfile((*g_camcorderProfiles)[key] = camProfile);
|
||||
}
|
||||
|
||||
int AndroidCamcorderProfile::getValue(AndroidCamcorderProfile::Field field) const
|
||||
{
|
||||
switch (field) {
|
||||
case audioBitRate:
|
||||
return m_camcorderProfile.getField<jint>("audioBitRate");
|
||||
case audioChannels:
|
||||
return m_camcorderProfile.getField<jint>("audioChannels");
|
||||
case audioCodec:
|
||||
return m_camcorderProfile.getField<jint>("audioCodec");
|
||||
case audioSampleRate:
|
||||
return m_camcorderProfile.getField<jint>("audioSampleRate");
|
||||
case duration:
|
||||
return m_camcorderProfile.getField<jint>("duration");
|
||||
case fileFormat:
|
||||
return m_camcorderProfile.getField<jint>("fileFormat");
|
||||
case quality:
|
||||
return m_camcorderProfile.getField<jint>("quality");
|
||||
case videoBitRate:
|
||||
return m_camcorderProfile.getField<jint>("videoBitRate");
|
||||
case videoCodec:
|
||||
return m_camcorderProfile.getField<jint>("videoCodec");
|
||||
case videoFrameHeight:
|
||||
return m_camcorderProfile.getField<jint>("videoFrameHeight");
|
||||
case videoFrameRate:
|
||||
return m_camcorderProfile.getField<jint>("videoFrameRate");
|
||||
case videoFrameWidth:
|
||||
return m_camcorderProfile.getField<jint>("videoFrameWidth");
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
AndroidCamcorderProfile::AndroidCamcorderProfile(const QJNIObjectPrivate &camcorderProfile)
|
||||
{
|
||||
m_camcorderProfile = camcorderProfile;
|
||||
}
|
||||
|
||||
static jclass g_qtMediaRecorderListenerClass = 0;
|
||||
typedef QMap<jlong, JMediaRecorder*> MediaRecorderMap;
|
||||
typedef QMap<jlong, AndroidMediaRecorder*> MediaRecorderMap;
|
||||
Q_GLOBAL_STATIC(MediaRecorderMap, mediaRecorders)
|
||||
|
||||
static void notifyError(JNIEnv* , jobject, jlong id, jint what, jint extra)
|
||||
{
|
||||
JMediaRecorder *obj = mediaRecorders->value(id, 0);
|
||||
AndroidMediaRecorder *obj = mediaRecorders->value(id, 0);
|
||||
if (obj)
|
||||
emit obj->error(what, extra);
|
||||
}
|
||||
|
||||
static void notifyInfo(JNIEnv* , jobject, jlong id, jint what, jint extra)
|
||||
{
|
||||
JMediaRecorder *obj = mediaRecorders->value(id, 0);
|
||||
AndroidMediaRecorder *obj = mediaRecorders->value(id, 0);
|
||||
if (obj)
|
||||
emit obj->info(what, extra);
|
||||
}
|
||||
|
||||
JMediaRecorder::JMediaRecorder()
|
||||
AndroidMediaRecorder::AndroidMediaRecorder()
|
||||
: QObject()
|
||||
, m_id(reinterpret_cast<jlong>(this))
|
||||
{
|
||||
@@ -82,17 +156,17 @@ JMediaRecorder::JMediaRecorder()
|
||||
}
|
||||
}
|
||||
|
||||
JMediaRecorder::~JMediaRecorder()
|
||||
AndroidMediaRecorder::~AndroidMediaRecorder()
|
||||
{
|
||||
mediaRecorders->remove(m_id);
|
||||
}
|
||||
|
||||
void JMediaRecorder::release()
|
||||
void AndroidMediaRecorder::release()
|
||||
{
|
||||
m_mediaRecorder.callMethod<void>("release");
|
||||
}
|
||||
|
||||
bool JMediaRecorder::prepare()
|
||||
bool AndroidMediaRecorder::prepare()
|
||||
{
|
||||
QJNIEnvironmentPrivate env;
|
||||
m_mediaRecorder.callMethod<void>("prepare");
|
||||
@@ -106,12 +180,12 @@ bool JMediaRecorder::prepare()
|
||||
return true;
|
||||
}
|
||||
|
||||
void JMediaRecorder::reset()
|
||||
void AndroidMediaRecorder::reset()
|
||||
{
|
||||
m_mediaRecorder.callMethod<void>("reset");
|
||||
}
|
||||
|
||||
bool JMediaRecorder::start()
|
||||
bool AndroidMediaRecorder::start()
|
||||
{
|
||||
QJNIEnvironmentPrivate env;
|
||||
m_mediaRecorder.callMethod<void>("start");
|
||||
@@ -125,7 +199,7 @@ bool JMediaRecorder::start()
|
||||
return true;
|
||||
}
|
||||
|
||||
void JMediaRecorder::stop()
|
||||
void AndroidMediaRecorder::stop()
|
||||
{
|
||||
QJNIEnvironmentPrivate env;
|
||||
m_mediaRecorder.callMethod<void>("stop");
|
||||
@@ -137,12 +211,12 @@ void JMediaRecorder::stop()
|
||||
}
|
||||
}
|
||||
|
||||
void JMediaRecorder::setAudioChannels(int numChannels)
|
||||
void AndroidMediaRecorder::setAudioChannels(int numChannels)
|
||||
{
|
||||
m_mediaRecorder.callMethod<void>("setAudioChannels", "(I)V", numChannels);
|
||||
}
|
||||
|
||||
void JMediaRecorder::setAudioEncoder(AudioEncoder encoder)
|
||||
void AndroidMediaRecorder::setAudioEncoder(AudioEncoder encoder)
|
||||
{
|
||||
QJNIEnvironmentPrivate env;
|
||||
m_mediaRecorder.callMethod<void>("setAudioEncoder", "(I)V", int(encoder));
|
||||
@@ -154,17 +228,17 @@ void JMediaRecorder::setAudioEncoder(AudioEncoder encoder)
|
||||
}
|
||||
}
|
||||
|
||||
void JMediaRecorder::setAudioEncodingBitRate(int bitRate)
|
||||
void AndroidMediaRecorder::setAudioEncodingBitRate(int bitRate)
|
||||
{
|
||||
m_mediaRecorder.callMethod<void>("setAudioEncodingBitRate", "(I)V", bitRate);
|
||||
}
|
||||
|
||||
void JMediaRecorder::setAudioSamplingRate(int samplingRate)
|
||||
void AndroidMediaRecorder::setAudioSamplingRate(int samplingRate)
|
||||
{
|
||||
m_mediaRecorder.callMethod<void>("setAudioSamplingRate", "(I)V", samplingRate);
|
||||
}
|
||||
|
||||
void JMediaRecorder::setAudioSource(AudioSource source)
|
||||
void AndroidMediaRecorder::setAudioSource(AudioSource source)
|
||||
{
|
||||
QJNIEnvironmentPrivate env;
|
||||
m_mediaRecorder.callMethod<void>("setAudioSource", "(I)V", int(source));
|
||||
@@ -176,13 +250,13 @@ void JMediaRecorder::setAudioSource(AudioSource source)
|
||||
}
|
||||
}
|
||||
|
||||
void JMediaRecorder::setCamera(JCamera *camera)
|
||||
void AndroidMediaRecorder::setCamera(AndroidCamera *camera)
|
||||
{
|
||||
QJNIObjectPrivate cam = camera->getCameraObject();
|
||||
m_mediaRecorder.callMethod<void>("setCamera", "(Landroid/hardware/Camera;)V", cam.object());
|
||||
}
|
||||
|
||||
void JMediaRecorder::setVideoEncoder(VideoEncoder encoder)
|
||||
void AndroidMediaRecorder::setVideoEncoder(VideoEncoder encoder)
|
||||
{
|
||||
QJNIEnvironmentPrivate env;
|
||||
m_mediaRecorder.callMethod<void>("setVideoEncoder", "(I)V", int(encoder));
|
||||
@@ -194,12 +268,12 @@ void JMediaRecorder::setVideoEncoder(VideoEncoder encoder)
|
||||
}
|
||||
}
|
||||
|
||||
void JMediaRecorder::setVideoEncodingBitRate(int bitRate)
|
||||
void AndroidMediaRecorder::setVideoEncodingBitRate(int bitRate)
|
||||
{
|
||||
m_mediaRecorder.callMethod<void>("setVideoEncodingBitRate", "(I)V", bitRate);
|
||||
}
|
||||
|
||||
void JMediaRecorder::setVideoFrameRate(int rate)
|
||||
void AndroidMediaRecorder::setVideoFrameRate(int rate)
|
||||
{
|
||||
QJNIEnvironmentPrivate env;
|
||||
m_mediaRecorder.callMethod<void>("setVideoFrameRate", "(I)V", rate);
|
||||
@@ -211,7 +285,7 @@ void JMediaRecorder::setVideoFrameRate(int rate)
|
||||
}
|
||||
}
|
||||
|
||||
void JMediaRecorder::setVideoSize(const QSize &size)
|
||||
void AndroidMediaRecorder::setVideoSize(const QSize &size)
|
||||
{
|
||||
QJNIEnvironmentPrivate env;
|
||||
m_mediaRecorder.callMethod<void>("setVideoSize", "(II)V", size.width(), size.height());
|
||||
@@ -223,7 +297,7 @@ void JMediaRecorder::setVideoSize(const QSize &size)
|
||||
}
|
||||
}
|
||||
|
||||
void JMediaRecorder::setVideoSource(VideoSource source)
|
||||
void AndroidMediaRecorder::setVideoSource(VideoSource source)
|
||||
{
|
||||
QJNIEnvironmentPrivate env;
|
||||
m_mediaRecorder.callMethod<void>("setVideoSource", "(I)V", int(source));
|
||||
@@ -235,7 +309,7 @@ void JMediaRecorder::setVideoSource(VideoSource source)
|
||||
}
|
||||
}
|
||||
|
||||
void JMediaRecorder::setOrientationHint(int degrees)
|
||||
void AndroidMediaRecorder::setOrientationHint(int degrees)
|
||||
{
|
||||
QJNIEnvironmentPrivate env;
|
||||
m_mediaRecorder.callMethod<void>("setOrientationHint", "(I)V", degrees);
|
||||
@@ -247,7 +321,7 @@ void JMediaRecorder::setOrientationHint(int degrees)
|
||||
}
|
||||
}
|
||||
|
||||
void JMediaRecorder::setOutputFormat(OutputFormat format)
|
||||
void AndroidMediaRecorder::setOutputFormat(OutputFormat format)
|
||||
{
|
||||
QJNIEnvironmentPrivate env;
|
||||
m_mediaRecorder.callMethod<void>("setOutputFormat", "(I)V", int(format));
|
||||
@@ -259,7 +333,7 @@ void JMediaRecorder::setOutputFormat(OutputFormat format)
|
||||
}
|
||||
}
|
||||
|
||||
void JMediaRecorder::setOutputFile(const QString &path)
|
||||
void AndroidMediaRecorder::setOutputFile(const QString &path)
|
||||
{
|
||||
QJNIEnvironmentPrivate env;
|
||||
m_mediaRecorder.callMethod<void>("setOutputFile",
|
||||
@@ -278,7 +352,7 @@ static JNINativeMethod methods[] = {
|
||||
{"notifyInfo", "(JII)V", (void *)notifyInfo}
|
||||
};
|
||||
|
||||
bool JMediaRecorder::initJNI(JNIEnv *env)
|
||||
bool AndroidMediaRecorder::initJNI(JNIEnv *env)
|
||||
{
|
||||
jclass clazz = env->FindClass("org/qtproject/qt5/android/multimedia/QtMediaRecorderListener");
|
||||
if (env->ExceptionCheck())
|
||||
@@ -39,8 +39,8 @@
|
||||
**
|
||||
****************************************************************************/
|
||||
|
||||
#ifndef JMEDIARECORDER_H
|
||||
#define JMEDIARECORDER_H
|
||||
#ifndef ANDROIDMEDIARECORDER_H
|
||||
#define ANDROIDMEDIARECORDER_H
|
||||
|
||||
#include <qobject.h>
|
||||
#include <QtCore/private/qjni_p.h>
|
||||
@@ -48,9 +48,47 @@
|
||||
|
||||
QT_BEGIN_NAMESPACE
|
||||
|
||||
class JCamera;
|
||||
class AndroidCamera;
|
||||
|
||||
class JMediaRecorder : public QObject
|
||||
class AndroidCamcorderProfile
|
||||
{
|
||||
public:
|
||||
enum Quality { // Needs to match CamcorderProfile
|
||||
QUALITY_LOW,
|
||||
QUALITY_HIGH,
|
||||
QUALITY_QCIF,
|
||||
QUALITY_CIF,
|
||||
QUALITY_480P,
|
||||
QUALITY_720P,
|
||||
QUALITY_1080P,
|
||||
QUALITY_QVGA
|
||||
};
|
||||
|
||||
enum Field {
|
||||
audioBitRate,
|
||||
audioChannels,
|
||||
audioCodec,
|
||||
audioSampleRate,
|
||||
duration,
|
||||
fileFormat,
|
||||
quality,
|
||||
videoBitRate,
|
||||
videoCodec,
|
||||
videoFrameHeight,
|
||||
videoFrameRate,
|
||||
videoFrameWidth
|
||||
};
|
||||
|
||||
static bool hasProfile(jint cameraId, Quality quality);
|
||||
static AndroidCamcorderProfile get(jint cameraId, Quality quality);
|
||||
int getValue(Field field) const;
|
||||
|
||||
private:
|
||||
AndroidCamcorderProfile(const QJNIObjectPrivate &camcorderProfile);
|
||||
QJNIObjectPrivate m_camcorderProfile;
|
||||
};
|
||||
|
||||
class AndroidMediaRecorder : public QObject
|
||||
{
|
||||
Q_OBJECT
|
||||
public:
|
||||
@@ -91,8 +129,8 @@ public:
|
||||
AMR_WB_Format = 4
|
||||
};
|
||||
|
||||
JMediaRecorder();
|
||||
~JMediaRecorder();
|
||||
AndroidMediaRecorder();
|
||||
~AndroidMediaRecorder();
|
||||
|
||||
void release();
|
||||
bool prepare();
|
||||
@@ -107,7 +145,7 @@ public:
|
||||
void setAudioSamplingRate(int samplingRate);
|
||||
void setAudioSource(AudioSource source);
|
||||
|
||||
void setCamera(JCamera *camera);
|
||||
void setCamera(AndroidCamera *camera);
|
||||
void setVideoEncoder(VideoEncoder encoder);
|
||||
void setVideoEncodingBitRate(int bitRate);
|
||||
void setVideoFrameRate(int rate);
|
||||
@@ -132,4 +170,4 @@ private:
|
||||
|
||||
QT_END_NAMESPACE
|
||||
|
||||
#endif // JMEDIARECORDER_H
|
||||
#endif // ANDROIDMEDIARECORDER_H
|
||||
@@ -39,14 +39,14 @@
|
||||
**
|
||||
****************************************************************************/
|
||||
|
||||
#include "jmultimediautils.h"
|
||||
#include "androidmultimediautils.h"
|
||||
|
||||
#include <QtCore/private/qjni_p.h>
|
||||
|
||||
QT_BEGIN_NAMESPACE
|
||||
|
||||
|
||||
void JMultimediaUtils::enableOrientationListener(bool enable)
|
||||
void AndroidMultimediaUtils::enableOrientationListener(bool enable)
|
||||
{
|
||||
QJNIObjectPrivate::callStaticMethod<void>("org/qtproject/qt5/android/multimedia/QtMultimediaUtils",
|
||||
"enableOrientationListener",
|
||||
@@ -54,13 +54,13 @@ void JMultimediaUtils::enableOrientationListener(bool enable)
|
||||
enable);
|
||||
}
|
||||
|
||||
int JMultimediaUtils::getDeviceOrientation()
|
||||
int AndroidMultimediaUtils::getDeviceOrientation()
|
||||
{
|
||||
return QJNIObjectPrivate::callStaticMethod<jint>("org/qtproject/qt5/android/multimedia/QtMultimediaUtils",
|
||||
"getDeviceOrientation");
|
||||
}
|
||||
|
||||
QString JMultimediaUtils::getDefaultMediaDirectory(MediaType type)
|
||||
QString AndroidMultimediaUtils::getDefaultMediaDirectory(MediaType type)
|
||||
{
|
||||
QJNIObjectPrivate path = QJNIObjectPrivate::callStaticObjectMethod("org/qtproject/qt5/android/multimedia/QtMultimediaUtils",
|
||||
"getDefaultMediaDirectory",
|
||||
@@ -69,7 +69,7 @@ QString JMultimediaUtils::getDefaultMediaDirectory(MediaType type)
|
||||
return path.toString();
|
||||
}
|
||||
|
||||
void JMultimediaUtils::registerMediaFile(const QString &file)
|
||||
void AndroidMultimediaUtils::registerMediaFile(const QString &file)
|
||||
{
|
||||
QJNIObjectPrivate::callStaticMethod<void>("org/qtproject/qt5/android/multimedia/QtMultimediaUtils",
|
||||
"registerMediaFile",
|
||||
@@ -39,15 +39,15 @@
|
||||
**
|
||||
****************************************************************************/
|
||||
|
||||
#ifndef JMULTIMEDIAUTILS_H
|
||||
#define JMULTIMEDIAUTILS_H
|
||||
#ifndef ANDROIDMULTIMEDIAUTILS_H
|
||||
#define ANDROIDMULTIMEDIAUTILS_H
|
||||
|
||||
#include <qobject.h>
|
||||
#include <QtCore/private/qjni_p.h>
|
||||
|
||||
QT_BEGIN_NAMESPACE
|
||||
|
||||
class JMultimediaUtils
|
||||
class AndroidMultimediaUtils
|
||||
{
|
||||
public:
|
||||
enum MediaType {
|
||||
@@ -65,4 +65,4 @@ public:
|
||||
|
||||
QT_END_NAMESPACE
|
||||
|
||||
#endif // JMULTIMEDIAUTILS_H
|
||||
#endif // ANDROIDMULTIMEDIAUTILS_H
|
||||
@@ -39,24 +39,24 @@
|
||||
**
|
||||
****************************************************************************/
|
||||
|
||||
#include "jsurfacetexture.h"
|
||||
#include "androidsurfacetexture.h"
|
||||
#include <QtCore/private/qjni_p.h>
|
||||
#include <QtCore/private/qjnihelpers_p.h>
|
||||
|
||||
QT_BEGIN_NAMESPACE
|
||||
|
||||
static jclass g_qtSurfaceTextureListenerClass = 0;
|
||||
static QMap<int, JSurfaceTexture*> g_objectMap;
|
||||
static QMap<int, AndroidSurfaceTexture*> g_objectMap;
|
||||
|
||||
// native method for QtSurfaceTexture.java
|
||||
static void notifyFrameAvailable(JNIEnv* , jobject, int id)
|
||||
{
|
||||
JSurfaceTexture *obj = g_objectMap.value(id, 0);
|
||||
AndroidSurfaceTexture *obj = g_objectMap.value(id, 0);
|
||||
if (obj)
|
||||
Q_EMIT obj->frameAvailable();
|
||||
}
|
||||
|
||||
JSurfaceTexture::JSurfaceTexture(unsigned int texName)
|
||||
AndroidSurfaceTexture::AndroidSurfaceTexture(unsigned int texName)
|
||||
: QObject()
|
||||
, m_texID(int(texName))
|
||||
{
|
||||
@@ -84,15 +84,18 @@ JSurfaceTexture::JSurfaceTexture(unsigned int texName)
|
||||
listener.object());
|
||||
}
|
||||
|
||||
JSurfaceTexture::~JSurfaceTexture()
|
||||
AndroidSurfaceTexture::~AndroidSurfaceTexture()
|
||||
{
|
||||
if (QtAndroidPrivate::androidSdkVersion() > 13 && m_surfaceView.isValid())
|
||||
m_surfaceView.callMethod<void>("release");
|
||||
|
||||
if (m_surfaceTexture.isValid()) {
|
||||
release();
|
||||
g_objectMap.remove(m_texID);
|
||||
}
|
||||
}
|
||||
|
||||
QMatrix4x4 JSurfaceTexture::getTransformMatrix()
|
||||
QMatrix4x4 AndroidSurfaceTexture::getTransformMatrix()
|
||||
{
|
||||
QMatrix4x4 matrix;
|
||||
if (!m_surfaceTexture.isValid())
|
||||
@@ -108,7 +111,7 @@ QMatrix4x4 JSurfaceTexture::getTransformMatrix()
|
||||
return matrix;
|
||||
}
|
||||
|
||||
void JSurfaceTexture::release()
|
||||
void AndroidSurfaceTexture::release()
|
||||
{
|
||||
if (QtAndroidPrivate::androidSdkVersion() < 14)
|
||||
return;
|
||||
@@ -116,7 +119,7 @@ void JSurfaceTexture::release()
|
||||
m_surfaceTexture.callMethod<void>("release");
|
||||
}
|
||||
|
||||
void JSurfaceTexture::updateTexImage()
|
||||
void AndroidSurfaceTexture::updateTexImage()
|
||||
{
|
||||
if (!m_surfaceTexture.isValid())
|
||||
return;
|
||||
@@ -124,16 +127,36 @@ void JSurfaceTexture::updateTexImage()
|
||||
m_surfaceTexture.callMethod<void>("updateTexImage");
|
||||
}
|
||||
|
||||
jobject JSurfaceTexture::object()
|
||||
jobject AndroidSurfaceTexture::surfaceTexture()
|
||||
{
|
||||
return m_surfaceTexture.object();
|
||||
}
|
||||
|
||||
jobject AndroidSurfaceTexture::surfaceView()
|
||||
{
|
||||
return m_surfaceView.object();
|
||||
}
|
||||
|
||||
jobject AndroidSurfaceTexture::surfaceHolder()
|
||||
{
|
||||
if (!m_surfaceHolder.isValid()) {
|
||||
m_surfaceView = QJNIObjectPrivate("android/view/Surface",
|
||||
"(Landroid/graphics/SurfaceTexture;)V",
|
||||
m_surfaceTexture.object());
|
||||
|
||||
m_surfaceHolder = QJNIObjectPrivate("org/qtproject/qt5/android/multimedia/QtSurfaceTextureHolder",
|
||||
"(Landroid/view/Surface;)V",
|
||||
m_surfaceView.object());
|
||||
}
|
||||
|
||||
return m_surfaceHolder.object();
|
||||
}
|
||||
|
||||
static JNINativeMethod methods[] = {
|
||||
{"notifyFrameAvailable", "(I)V", (void *)notifyFrameAvailable}
|
||||
};
|
||||
|
||||
bool JSurfaceTexture::initJNI(JNIEnv *env)
|
||||
bool AndroidSurfaceTexture::initJNI(JNIEnv *env)
|
||||
{
|
||||
// SurfaceTexture is available since API 11.
|
||||
if (QtAndroidPrivate::androidSdkVersion() < 11)
|
||||
@@ -39,8 +39,8 @@
|
||||
**
|
||||
****************************************************************************/
|
||||
|
||||
#ifndef JSURFACETEXTURE_H
|
||||
#define JSURFACETEXTURE_H
|
||||
#ifndef ANDROIDSURFACETEXTURE_H
|
||||
#define ANDROIDSURFACETEXTURE_H
|
||||
|
||||
#include <qobject.h>
|
||||
#include <QtCore/private/qjni_p.h>
|
||||
@@ -49,15 +49,18 @@
|
||||
|
||||
QT_BEGIN_NAMESPACE
|
||||
|
||||
class JSurfaceTexture : public QObject
|
||||
class AndroidSurfaceTexture : public QObject
|
||||
{
|
||||
Q_OBJECT
|
||||
public:
|
||||
explicit JSurfaceTexture(unsigned int texName);
|
||||
~JSurfaceTexture();
|
||||
explicit AndroidSurfaceTexture(unsigned int texName);
|
||||
~AndroidSurfaceTexture();
|
||||
|
||||
int textureID() const { return m_texID; }
|
||||
jobject object();
|
||||
jobject surfaceTexture();
|
||||
jobject surfaceView();
|
||||
jobject surfaceHolder();
|
||||
inline bool isValid() const { return m_surfaceTexture.isValid(); }
|
||||
|
||||
QMatrix4x4 getTransformMatrix();
|
||||
void release(); // API level 14
|
||||
@@ -71,8 +74,10 @@ Q_SIGNALS:
|
||||
private:
|
||||
int m_texID;
|
||||
QJNIObjectPrivate m_surfaceTexture;
|
||||
QJNIObjectPrivate m_surfaceView;
|
||||
QJNIObjectPrivate m_surfaceHolder;
|
||||
};
|
||||
|
||||
QT_END_NAMESPACE
|
||||
|
||||
#endif // JSURFACETEXTURE_H
|
||||
#endif // ANDROIDSURFACETEXTURE_H
|
||||
19
src/plugins/android/src/wrappers/jni/jni.pri
Normal file
19
src/plugins/android/src/wrappers/jni/jni.pri
Normal file
@@ -0,0 +1,19 @@
|
||||
QT += platformsupport-private
|
||||
|
||||
INCLUDEPATH += $$PWD
|
||||
|
||||
HEADERS += \
|
||||
$$PWD/androidmediaplayer.h \
|
||||
$$PWD/androidsurfacetexture.h \
|
||||
$$PWD/androidmediametadataretriever.h \
|
||||
$$PWD/androidcamera.h \
|
||||
$$PWD/androidmultimediautils.h \
|
||||
$$PWD/androidmediarecorder.h
|
||||
|
||||
SOURCES += \
|
||||
$$PWD/androidmediaplayer.cpp \
|
||||
$$PWD/androidsurfacetexture.cpp \
|
||||
$$PWD/androidmediametadataretriever.cpp \
|
||||
$$PWD/androidcamera.cpp \
|
||||
$$PWD/androidmultimediautils.cpp \
|
||||
$$PWD/androidmediarecorder.cpp
|
||||
@@ -1,19 +0,0 @@
|
||||
QT += platformsupport-private
|
||||
|
||||
INCLUDEPATH += $$PWD
|
||||
|
||||
HEADERS += \
|
||||
$$PWD/jmediaplayer.h \
|
||||
$$PWD/jsurfacetexture.h \
|
||||
$$PWD/jmediametadataretriever.h \
|
||||
$$PWD/jcamera.h \
|
||||
$$PWD/jmultimediautils.h \
|
||||
$$PWD/jmediarecorder.h
|
||||
|
||||
SOURCES += \
|
||||
$$PWD/jmediaplayer.cpp \
|
||||
$$PWD/jsurfacetexture.cpp \
|
||||
$$PWD/jmediametadataretriever.cpp \
|
||||
$$PWD/jcamera.cpp \
|
||||
$$PWD/jmultimediautils.cpp \
|
||||
$$PWD/jmediarecorder.cpp
|
||||
@@ -622,17 +622,10 @@ DirectShowRcSource::DirectShowRcSource(DirectShowEventLoop *loop)
|
||||
bool DirectShowRcSource::open(const QUrl &url)
|
||||
{
|
||||
m_file.moveToThread(QCoreApplication::instance()->thread());
|
||||
|
||||
m_file.setFileName(QLatin1Char(':') + url.path());
|
||||
|
||||
qDebug("qrc file %s", qPrintable(m_file.fileName()));
|
||||
|
||||
if (m_file.open(QIODevice::ReadOnly)) {
|
||||
qDebug("Size %d", int(m_file.size()));
|
||||
qDebug("Sequential %d", int(m_file.isSequential()));
|
||||
|
||||
setDevice(&m_file);
|
||||
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
|
||||
@@ -104,75 +104,85 @@ static q_SHCreateItemFromParsingName sHCreateItemFromParsingName = 0;
|
||||
#endif
|
||||
|
||||
#ifndef QT_NO_WMSDK
|
||||
|
||||
namespace
|
||||
{
|
||||
struct QWMMetaDataKeyLookup
|
||||
struct QWMMetaDataKey
|
||||
{
|
||||
QString key;
|
||||
const wchar_t *token;
|
||||
QString qtName;
|
||||
const wchar_t *wmName;
|
||||
|
||||
QWMMetaDataKey(const QString &qtn, const wchar_t *wmn) : qtName(qtn), wmName(wmn) { }
|
||||
};
|
||||
}
|
||||
|
||||
static const QWMMetaDataKeyLookup qt_wmMetaDataKeys[] =
|
||||
typedef QList<QWMMetaDataKey> QWMMetaDataKeys;
|
||||
Q_GLOBAL_STATIC(QWMMetaDataKeys, metadataKeys)
|
||||
|
||||
static const QWMMetaDataKeys *qt_wmMetaDataKeys()
|
||||
{
|
||||
{ QMediaMetaData::Title, L"Title" },
|
||||
{ QMediaMetaData::SubTitle, L"WM/SubTitle" },
|
||||
{ QMediaMetaData::Author, L"Author" },
|
||||
{ QMediaMetaData::Comment, L"Comment" },
|
||||
{ QMediaMetaData::Description, L"Description" },
|
||||
{ QMediaMetaData::Category, L"WM/Category" },
|
||||
{ QMediaMetaData::Genre, L"WM/Genre" },
|
||||
//{ QMediaMetaData::Date, 0 },
|
||||
{ QMediaMetaData::Year, L"WM/Year" },
|
||||
{ QMediaMetaData::UserRating, L"Rating" },
|
||||
//{ QMediaMetaData::MetaDatawords, 0 },
|
||||
{ QMediaMetaData::Language, L"WM/Language" },
|
||||
{ QMediaMetaData::Publisher, L"WM/Publisher" },
|
||||
{ QMediaMetaData::Copyright, L"Copyright" },
|
||||
{ QMediaMetaData::ParentalRating, L"WM/ParentalRating" },
|
||||
//{ QMediaMetaData::RatingOrganisation, L"RatingOrganisation" },
|
||||
if (metadataKeys->isEmpty()) {
|
||||
metadataKeys->append(QWMMetaDataKey(QMediaMetaData::Title, L"Title"));
|
||||
metadataKeys->append(QWMMetaDataKey(QMediaMetaData::SubTitle, L"WM/SubTitle"));
|
||||
metadataKeys->append(QWMMetaDataKey(QMediaMetaData::Author, L"Author"));
|
||||
metadataKeys->append(QWMMetaDataKey(QMediaMetaData::Comment, L"Comment"));
|
||||
metadataKeys->append(QWMMetaDataKey(QMediaMetaData::Description, L"Description"));
|
||||
metadataKeys->append(QWMMetaDataKey(QMediaMetaData::Category, L"WM/Category"));
|
||||
metadataKeys->append(QWMMetaDataKey(QMediaMetaData::Genre, L"WM/Genre"));
|
||||
//metadataKeys->append(QWMMetaDataKey(QMediaMetaData::Date, 0));
|
||||
metadataKeys->append(QWMMetaDataKey(QMediaMetaData::Year, L"WM/Year"));
|
||||
metadataKeys->append(QWMMetaDataKey(QMediaMetaData::UserRating, L"Rating"));
|
||||
//metadataKeys->append(QWMMetaDataKey(QMediaMetaData::MetaDatawords, 0));
|
||||
metadataKeys->append(QWMMetaDataKey(QMediaMetaData::Language, L"WM/Language"));
|
||||
metadataKeys->append(QWMMetaDataKey(QMediaMetaData::Publisher, L"WM/Publisher"));
|
||||
metadataKeys->append(QWMMetaDataKey(QMediaMetaData::Copyright, L"Copyright"));
|
||||
metadataKeys->append(QWMMetaDataKey(QMediaMetaData::ParentalRating, L"WM/ParentalRating"));
|
||||
//metadataKeys->append(QWMMetaDataKey(QMediaMetaData::RatingOrganisation, L"RatingOrganisation"));
|
||||
|
||||
// Media
|
||||
{ QMediaMetaData::Size, L"FileSize" },
|
||||
{ QMediaMetaData::MediaType, L"MediaType" },
|
||||
{ QMediaMetaData::Duration, L"Duration" },
|
||||
// Media
|
||||
metadataKeys->append(QWMMetaDataKey(QMediaMetaData::Size, L"FileSize"));
|
||||
metadataKeys->append(QWMMetaDataKey(QMediaMetaData::MediaType, L"MediaType"));
|
||||
metadataKeys->append(QWMMetaDataKey(QMediaMetaData::Duration, L"Duration"));
|
||||
|
||||
// Audio
|
||||
{ QMediaMetaData::AudioBitRate, L"AudioBitRate" },
|
||||
{ QMediaMetaData::AudioCodec, L"AudioCodec" },
|
||||
{ QMediaMetaData::ChannelCount, L"ChannelCount" },
|
||||
{ QMediaMetaData::SampleRate, L"Frequency" },
|
||||
// Audio
|
||||
metadataKeys->append(QWMMetaDataKey(QMediaMetaData::AudioBitRate, L"AudioBitRate"));
|
||||
metadataKeys->append(QWMMetaDataKey(QMediaMetaData::AudioCodec, L"AudioCodec"));
|
||||
metadataKeys->append(QWMMetaDataKey(QMediaMetaData::ChannelCount, L"ChannelCount"));
|
||||
metadataKeys->append(QWMMetaDataKey(QMediaMetaData::SampleRate, L"Frequency"));
|
||||
|
||||
// Music
|
||||
{ QMediaMetaData::AlbumTitle, L"WM/AlbumTitle" },
|
||||
{ QMediaMetaData::AlbumArtist, L"WM/AlbumArtist" },
|
||||
{ QMediaMetaData::ContributingArtist, L"Author" },
|
||||
{ QMediaMetaData::Composer, L"WM/Composer" },
|
||||
{ QMediaMetaData::Conductor, L"WM/Conductor" },
|
||||
{ QMediaMetaData::Lyrics, L"WM/Lyrics" },
|
||||
{ QMediaMetaData::Mood, L"WM/Mood" },
|
||||
{ QMediaMetaData::TrackNumber, L"WM/TrackNumber" },
|
||||
//{ QMediaMetaData::TrackCount, 0 },
|
||||
//{ QMediaMetaData::CoverArtUriSmall, 0 },
|
||||
//{ QMediaMetaData::CoverArtUriLarge, 0 },
|
||||
// Music
|
||||
metadataKeys->append(QWMMetaDataKey(QMediaMetaData::AlbumTitle, L"WM/AlbumTitle"));
|
||||
metadataKeys->append(QWMMetaDataKey(QMediaMetaData::AlbumArtist, L"WM/AlbumArtist"));
|
||||
metadataKeys->append(QWMMetaDataKey(QMediaMetaData::ContributingArtist, L"Author"));
|
||||
metadataKeys->append(QWMMetaDataKey(QMediaMetaData::Composer, L"WM/Composer"));
|
||||
metadataKeys->append(QWMMetaDataKey(QMediaMetaData::Conductor, L"WM/Conductor"));
|
||||
metadataKeys->append(QWMMetaDataKey(QMediaMetaData::Lyrics, L"WM/Lyrics"));
|
||||
metadataKeys->append(QWMMetaDataKey(QMediaMetaData::Mood, L"WM/Mood"));
|
||||
metadataKeys->append(QWMMetaDataKey(QMediaMetaData::TrackNumber, L"WM/TrackNumber"));
|
||||
//metadataKeys->append(QWMMetaDataKey(QMediaMetaData::TrackCount, 0));
|
||||
//metadataKeys->append(QWMMetaDataKey(QMediaMetaData::CoverArtUriSmall, 0));
|
||||
//metadataKeys->append(QWMMetaDataKey(QMediaMetaData::CoverArtUriLarge, 0));
|
||||
|
||||
// Image/Video
|
||||
{ QMediaMetaData::Resolution, L"WM/VideoHeight" },
|
||||
{ QMediaMetaData::PixelAspectRatio, L"AspectRatioX" },
|
||||
// Image/Video
|
||||
metadataKeys->append(QWMMetaDataKey(QMediaMetaData::Resolution, L"WM/VideoHeight"));
|
||||
metadataKeys->append(QWMMetaDataKey(QMediaMetaData::PixelAspectRatio, L"AspectRatioX"));
|
||||
|
||||
// Video
|
||||
{ QMediaMetaData::VideoFrameRate, L"WM/VideoFrameRate" },
|
||||
{ QMediaMetaData::VideoBitRate, L"VideoBitRate" },
|
||||
{ QMediaMetaData::VideoCodec, L"VideoCodec" },
|
||||
// Video
|
||||
metadataKeys->append(QWMMetaDataKey(QMediaMetaData::VideoFrameRate, L"WM/VideoFrameRate"));
|
||||
metadataKeys->append(QWMMetaDataKey(QMediaMetaData::VideoBitRate, L"VideoBitRate"));
|
||||
metadataKeys->append(QWMMetaDataKey(QMediaMetaData::VideoCodec, L"VideoCodec"));
|
||||
|
||||
//{ QMediaMetaData::PosterUri, 0 },
|
||||
//metadataKeys->append(QWMMetaDataKey(QMediaMetaData::PosterUri, 0));
|
||||
|
||||
// Movie
|
||||
{ QMediaMetaData::ChapterNumber, L"ChapterNumber" },
|
||||
{ QMediaMetaData::Director, L"WM/Director" },
|
||||
{ QMediaMetaData::LeadPerformer, L"LeadPerformer" },
|
||||
{ QMediaMetaData::Writer, L"WM/Writer" },
|
||||
};
|
||||
// Movie
|
||||
metadataKeys->append(QWMMetaDataKey(QMediaMetaData::ChapterNumber, L"ChapterNumber"));
|
||||
metadataKeys->append(QWMMetaDataKey(QMediaMetaData::Director, L"WM/Director"));
|
||||
metadataKeys->append(QWMMetaDataKey(QMediaMetaData::LeadPerformer, L"LeadPerformer"));
|
||||
metadataKeys->append(QWMMetaDataKey(QMediaMetaData::Writer, L"WM/Writer"));
|
||||
}
|
||||
|
||||
return metadataKeys;
|
||||
}
|
||||
|
||||
static QVariant getValue(IWMHeaderInfo *header, const wchar_t *key)
|
||||
{
|
||||
@@ -491,32 +501,29 @@ void DirectShowMetaDataControl::updateGraph(IFilterGraph2 *graph, IBaseFilter *s
|
||||
IWMHeaderInfo *info = com_cast<IWMHeaderInfo>(source, IID_IWMHeaderInfo);
|
||||
|
||||
if (info) {
|
||||
static const int count = sizeof(qt_wmMetaDataKeys) / sizeof(QWMMetaDataKeyLookup);
|
||||
for (int i = 0; i < count; ++i) {
|
||||
QVariant var = getValue(info, qt_wmMetaDataKeys[i].token);
|
||||
Q_FOREACH (const QWMMetaDataKey &key, *qt_wmMetaDataKeys()) {
|
||||
QVariant var = getValue(info, key.wmName);
|
||||
if (var.isValid()) {
|
||||
QString key = qt_wmMetaDataKeys[i].key;
|
||||
|
||||
if (key == QMediaMetaData::Duration) {
|
||||
if (key.qtName == QMediaMetaData::Duration) {
|
||||
// duration is provided in 100-nanosecond units, convert to milliseconds
|
||||
var = (var.toLongLong() + 10000) / 10000;
|
||||
} else if (key == QMediaMetaData::Resolution) {
|
||||
} else if (key.qtName == QMediaMetaData::Resolution) {
|
||||
QSize res;
|
||||
res.setHeight(var.toUInt());
|
||||
res.setWidth(getValue(info, L"WM/VideoWidth").toUInt());
|
||||
var = res;
|
||||
} else if (key == QMediaMetaData::VideoFrameRate) {
|
||||
} else if (key.qtName == QMediaMetaData::VideoFrameRate) {
|
||||
var = var.toReal() / 1000.f;
|
||||
} else if (key == QMediaMetaData::PixelAspectRatio) {
|
||||
} else if (key.qtName == QMediaMetaData::PixelAspectRatio) {
|
||||
QSize aspectRatio;
|
||||
aspectRatio.setWidth(var.toUInt());
|
||||
aspectRatio.setHeight(getValue(info, L"AspectRatioY").toUInt());
|
||||
var = aspectRatio;
|
||||
} else if (key == QMediaMetaData::UserRating) {
|
||||
} else if (key.qtName == QMediaMetaData::UserRating) {
|
||||
var = (var.toUInt() - 1) / qreal(98) * 100;
|
||||
}
|
||||
|
||||
m_metadata.insert(key, var);
|
||||
m_metadata.insert(key.qtName, var);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -50,13 +50,6 @@
|
||||
|
||||
QT_BEGIN_NAMESPACE
|
||||
|
||||
struct QGstreamerMetaDataKeyLookup
|
||||
{
|
||||
QString key;
|
||||
const char *token;
|
||||
QVariant::Type type;
|
||||
};
|
||||
|
||||
static QVariant fromGStreamerOrientation(const QVariant &value)
|
||||
{
|
||||
// Note gstreamer tokens either describe the counter clockwise rotation of the
|
||||
@@ -87,87 +80,109 @@ static QVariant toGStreamerOrientation(const QVariant &value)
|
||||
}
|
||||
}
|
||||
|
||||
static const QGstreamerMetaDataKeyLookup qt_gstreamerMetaDataKeys[] =
|
||||
namespace {
|
||||
struct QGStreamerMetaDataKey
|
||||
{
|
||||
QString qtName;
|
||||
const char *gstName;
|
||||
QVariant::Type type;
|
||||
|
||||
QGStreamerMetaDataKey(const QString &qtn, const char *gstn, QVariant::Type t)
|
||||
: qtName(qtn)
|
||||
, gstName(gstn)
|
||||
, type(t)
|
||||
{ }
|
||||
};
|
||||
}
|
||||
|
||||
typedef QList<QGStreamerMetaDataKey> QGStreamerMetaDataKeys;
|
||||
Q_GLOBAL_STATIC(QGStreamerMetaDataKeys, metadataKeys)
|
||||
|
||||
static const QGStreamerMetaDataKeys *qt_gstreamerMetaDataKeys()
|
||||
{
|
||||
{ QMediaMetaData::Title, GST_TAG_TITLE, QVariant::String },
|
||||
//{ QMediaMetaData::SubTitle, 0, QVariant::String },
|
||||
//{ QMediaMetaData::Author, 0, QVariant::String },
|
||||
{ QMediaMetaData::Comment, GST_TAG_COMMENT, QVariant::String },
|
||||
{ QMediaMetaData::Date, GST_TAG_DATE_TIME, QVariant::DateTime },
|
||||
{ QMediaMetaData::Description, GST_TAG_DESCRIPTION, QVariant::String },
|
||||
//{ QMediaMetaData::Category, 0, QVariant::String },
|
||||
{ QMediaMetaData::Genre, GST_TAG_GENRE, QVariant::String },
|
||||
//{ QMediaMetaData::Year, 0, QVariant::Int },
|
||||
//{ QMediaMetaData::UserRating, , QVariant::Int },
|
||||
if (metadataKeys->isEmpty()) {
|
||||
metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::Title, GST_TAG_TITLE, QVariant::String));
|
||||
//metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::SubTitle, 0, QVariant::String));
|
||||
//metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::Author, 0, QVariant::String));
|
||||
metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::Comment, GST_TAG_COMMENT, QVariant::String));
|
||||
metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::Date, GST_TAG_DATE_TIME, QVariant::DateTime));
|
||||
metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::Description, GST_TAG_DESCRIPTION, QVariant::String));
|
||||
//metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::Category, 0, QVariant::String));
|
||||
metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::Genre, GST_TAG_GENRE, QVariant::String));
|
||||
//metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::Year, 0, QVariant::Int));
|
||||
//metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::UserRating, , QVariant::Int));
|
||||
|
||||
{ QMediaMetaData::Language, GST_TAG_LANGUAGE_CODE, QVariant::String },
|
||||
metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::Language, GST_TAG_LANGUAGE_CODE, QVariant::String));
|
||||
|
||||
{ QMediaMetaData::Publisher, GST_TAG_ORGANIZATION, QVariant::String },
|
||||
{ QMediaMetaData::Copyright, GST_TAG_COPYRIGHT, QVariant::String },
|
||||
//{ QMediaMetaData::ParentalRating, 0, QVariant::String },
|
||||
//{ QMediaMetaData::RatingOrganisation, 0, QVariant::String },
|
||||
metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::Publisher, GST_TAG_ORGANIZATION, QVariant::String));
|
||||
metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::Copyright, GST_TAG_COPYRIGHT, QVariant::String));
|
||||
//metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::ParentalRating, 0, QVariant::String));
|
||||
//metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::RatingOrganisation, 0, QVariant::String));
|
||||
|
||||
// Media
|
||||
//{ QMediaMetaData::Size, 0, QVariant::Int },
|
||||
//{ QMediaMetaData::MediaType, 0, QVariant::String },
|
||||
{ QMediaMetaData::Duration, GST_TAG_DURATION, QVariant::Int },
|
||||
// Media
|
||||
//metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::Size, 0, QVariant::Int));
|
||||
//metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::MediaType, 0, QVariant::String));
|
||||
metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::Duration, GST_TAG_DURATION, QVariant::Int));
|
||||
|
||||
// Audio
|
||||
{ QMediaMetaData::AudioBitRate, GST_TAG_BITRATE, QVariant::Int },
|
||||
{ QMediaMetaData::AudioCodec, GST_TAG_AUDIO_CODEC, QVariant::String },
|
||||
//{ QMediaMetaData::ChannelCount, 0, QVariant::Int },
|
||||
//{ QMediaMetaData::SampleRate, 0, QVariant::Int },
|
||||
// Audio
|
||||
metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::AudioBitRate, GST_TAG_BITRATE, QVariant::Int));
|
||||
metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::AudioCodec, GST_TAG_AUDIO_CODEC, QVariant::String));
|
||||
//metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::ChannelCount, 0, QVariant::Int));
|
||||
//metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::SampleRate, 0, QVariant::Int));
|
||||
|
||||
// Music
|
||||
{ QMediaMetaData::AlbumTitle, GST_TAG_ALBUM, QVariant::String },
|
||||
{ QMediaMetaData::AlbumArtist, GST_TAG_ARTIST, QVariant::String},
|
||||
{ QMediaMetaData::ContributingArtist, GST_TAG_PERFORMER, QVariant::String },
|
||||
// Music
|
||||
metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::AlbumTitle, GST_TAG_ALBUM, QVariant::String));
|
||||
metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::AlbumArtist, GST_TAG_ARTIST, QVariant::String));
|
||||
metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::ContributingArtist, GST_TAG_PERFORMER, QVariant::String));
|
||||
#if (GST_VERSION_MAJOR >= 0) && (GST_VERSION_MINOR >= 10) && (GST_VERSION_MICRO >= 19)
|
||||
{ QMediaMetaData::Composer, GST_TAG_COMPOSER, QVariant::String },
|
||||
metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::Composer, GST_TAG_COMPOSER, QVariant::String));
|
||||
#endif
|
||||
//{ QMediaMetaData::Conductor, 0, QVariant::String },
|
||||
//{ QMediaMetaData::Lyrics, 0, QVariant::String },
|
||||
//{ QMediaMetaData::Mood, 0, QVariant::String },
|
||||
{ QMediaMetaData::TrackNumber, GST_TAG_TRACK_NUMBER, QVariant::Int },
|
||||
//metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::Conductor, 0, QVariant::String));
|
||||
//metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::Lyrics, 0, QVariant::String));
|
||||
//metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::Mood, 0, QVariant::String));
|
||||
metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::TrackNumber, GST_TAG_TRACK_NUMBER, QVariant::Int));
|
||||
|
||||
//{ QMediaMetaData::CoverArtUrlSmall, 0, QVariant::String },
|
||||
//{ QMediaMetaData::CoverArtUrlLarge, 0, QVariant::String },
|
||||
//metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::CoverArtUrlSmall, 0, QVariant::String));
|
||||
//metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::CoverArtUrlLarge, 0, QVariant::String));
|
||||
|
||||
// Image/Video
|
||||
//{ QMediaMetaData::Resolution, 0, QVariant::Size },
|
||||
//{ QMediaMetaData::PixelAspectRatio, 0, QVariant::Size },
|
||||
// Image/Video
|
||||
//metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::Resolution, 0, QVariant::Size));
|
||||
//metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::PixelAspectRatio, 0, QVariant::Size));
|
||||
|
||||
// Video
|
||||
//{ QMediaMetaData::VideoFrameRate, 0, QVariant::String },
|
||||
//{ QMediaMetaData::VideoBitRate, 0, QVariant::Double },
|
||||
{ QMediaMetaData::VideoCodec, GST_TAG_VIDEO_CODEC, QVariant::String },
|
||||
// Video
|
||||
//metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::VideoFrameRate, 0, QVariant::String));
|
||||
//metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::VideoBitRate, 0, QVariant::Double));
|
||||
metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::VideoCodec, GST_TAG_VIDEO_CODEC, QVariant::String));
|
||||
|
||||
//{ QMediaMetaData::PosterUrl, 0, QVariant::String },
|
||||
//metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::PosterUrl, 0, QVariant::String));
|
||||
|
||||
// Movie
|
||||
//{ QMediaMetaData::ChapterNumber, 0, QVariant::Int },
|
||||
//{ QMediaMetaData::Director, 0, QVariant::String },
|
||||
{ QMediaMetaData::LeadPerformer, GST_TAG_PERFORMER, QVariant::String },
|
||||
//{ QMediaMetaData::Writer, 0, QVariant::String },
|
||||
// Movie
|
||||
//metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::ChapterNumber, 0, QVariant::Int));
|
||||
//metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::Director, 0, QVariant::String));
|
||||
metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::LeadPerformer, GST_TAG_PERFORMER, QVariant::String));
|
||||
//metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::Writer, 0, QVariant::String));
|
||||
|
||||
#if (GST_VERSION_MAJOR >= 0) && (GST_VERSION_MINOR >= 10) && (GST_VERSION_MICRO >= 30)
|
||||
// Photos
|
||||
{ QMediaMetaData::CameraManufacturer, GST_TAG_DEVICE_MANUFACTURER, QVariant::String },
|
||||
{ QMediaMetaData::CameraModel, GST_TAG_DEVICE_MODEL, QVariant::String },
|
||||
//{ QMediaMetaData::Event, 0, QVariant::String },
|
||||
//{ QMediaMetaData::Subject, 0, QVariant::String },
|
||||
// Photos
|
||||
metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::CameraManufacturer, GST_TAG_DEVICE_MANUFACTURER, QVariant::String));
|
||||
metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::CameraModel, GST_TAG_DEVICE_MODEL, QVariant::String));
|
||||
//metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::Event, 0, QVariant::String));
|
||||
//metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::Subject, 0, QVariant::String));
|
||||
|
||||
{ QMediaMetaData::Orientation, GST_TAG_IMAGE_ORIENTATION, QVariant::String },
|
||||
metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::Orientation, GST_TAG_IMAGE_ORIENTATION, QVariant::String));
|
||||
|
||||
// GPS
|
||||
{ QMediaMetaData::GPSLatitude, GST_TAG_GEO_LOCATION_LATITUDE, QVariant::Double },
|
||||
{ QMediaMetaData::GPSLongitude, GST_TAG_GEO_LOCATION_LONGITUDE, QVariant::Double },
|
||||
{ QMediaMetaData::GPSAltitude, GST_TAG_GEO_LOCATION_ELEVATION, QVariant::Double },
|
||||
{ QMediaMetaData::GPSTrack, GST_TAG_GEO_LOCATION_MOVEMENT_DIRECTION, QVariant::Double },
|
||||
{ QMediaMetaData::GPSSpeed, GST_TAG_GEO_LOCATION_MOVEMENT_SPEED, QVariant::Double },
|
||||
{ QMediaMetaData::GPSImgDirection, GST_TAG_GEO_LOCATION_CAPTURE_DIRECTION, QVariant::Double }
|
||||
// GPS
|
||||
metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::GPSLatitude, GST_TAG_GEO_LOCATION_LATITUDE, QVariant::Double));
|
||||
metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::GPSLongitude, GST_TAG_GEO_LOCATION_LONGITUDE, QVariant::Double));
|
||||
metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::GPSAltitude, GST_TAG_GEO_LOCATION_ELEVATION, QVariant::Double));
|
||||
metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::GPSTrack, GST_TAG_GEO_LOCATION_MOVEMENT_DIRECTION, QVariant::Double));
|
||||
metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::GPSSpeed, GST_TAG_GEO_LOCATION_MOVEMENT_SPEED, QVariant::Double));
|
||||
metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::GPSImgDirection, GST_TAG_GEO_LOCATION_CAPTURE_DIRECTION, QVariant::Double));
|
||||
#endif
|
||||
};
|
||||
}
|
||||
|
||||
return metadataKeys;
|
||||
}
|
||||
|
||||
CameraBinMetaData::CameraBinMetaData(QObject *parent)
|
||||
:QMetaDataWriterControl(parent)
|
||||
@@ -183,14 +198,9 @@ QVariant CameraBinMetaData::metaData(const QString &key) const
|
||||
return (metersPerSec * 3600) / 1000;
|
||||
}
|
||||
|
||||
static const int count = sizeof(qt_gstreamerMetaDataKeys) / sizeof(QGstreamerMetaDataKeyLookup);
|
||||
|
||||
for (int i = 0; i < count; ++i) {
|
||||
if (qt_gstreamerMetaDataKeys[i].key == key) {
|
||||
const char *name = qt_gstreamerMetaDataKeys[i].token;
|
||||
|
||||
return m_values.value(QByteArray::fromRawData(name, qstrlen(name)));
|
||||
}
|
||||
Q_FOREACH (const QGStreamerMetaDataKey &metadataKey, *qt_gstreamerMetaDataKeys()) {
|
||||
if (metadataKey.qtName == key)
|
||||
return m_values.value(QByteArray::fromRawData(metadataKey.gstName, qstrlen(metadataKey.gstName)));
|
||||
}
|
||||
return QVariant();
|
||||
}
|
||||
@@ -207,14 +217,12 @@ void CameraBinMetaData::setMetaData(const QString &key, const QVariant &value)
|
||||
}
|
||||
}
|
||||
|
||||
static const int count = sizeof(qt_gstreamerMetaDataKeys) / sizeof(QGstreamerMetaDataKeyLookup);
|
||||
|
||||
for (int i = 0; i < count; ++i) {
|
||||
if (qt_gstreamerMetaDataKeys[i].key == key) {
|
||||
const char *name = qt_gstreamerMetaDataKeys[i].token;
|
||||
Q_FOREACH (const QGStreamerMetaDataKey &metadataKey, *qt_gstreamerMetaDataKeys()) {
|
||||
if (metadataKey.qtName == key) {
|
||||
const char *name = metadataKey.gstName;
|
||||
|
||||
if (correctedValue.isValid()) {
|
||||
correctedValue.convert(qt_gstreamerMetaDataKeys[i].type);
|
||||
correctedValue.convert(metadataKey.type);
|
||||
m_values.insert(QByteArray::fromRawData(name, qstrlen(name)), correctedValue);
|
||||
} else {
|
||||
m_values.remove(QByteArray::fromRawData(name, qstrlen(name)));
|
||||
@@ -232,14 +240,12 @@ QStringList CameraBinMetaData::availableMetaData() const
|
||||
{
|
||||
static QMap<QByteArray, QString> keysMap;
|
||||
if (keysMap.isEmpty()) {
|
||||
const int count = sizeof(qt_gstreamerMetaDataKeys) / sizeof(QGstreamerMetaDataKeyLookup);
|
||||
for (int i = 0; i < count; ++i) {
|
||||
keysMap[QByteArray(qt_gstreamerMetaDataKeys[i].token)] = qt_gstreamerMetaDataKeys[i].key;
|
||||
}
|
||||
Q_FOREACH (const QGStreamerMetaDataKey &metadataKey, *qt_gstreamerMetaDataKeys())
|
||||
keysMap[QByteArray(metadataKey.gstName)] = metadataKey.qtName;
|
||||
}
|
||||
|
||||
QStringList res;
|
||||
foreach (const QByteArray &key, m_values.keys()) {
|
||||
Q_FOREACH (const QByteArray &key, m_values.keys()) {
|
||||
QString tag = keysMap.value(key);
|
||||
if (!tag.isEmpty())
|
||||
res.append(tag);
|
||||
|
||||
@@ -46,80 +46,82 @@
|
||||
#include <gst/gst.h>
|
||||
#include <gst/gstversion.h>
|
||||
|
||||
struct QGstreamerMetaDataKeyLookup
|
||||
|
||||
typedef QMap<QString, QByteArray> QGstreamerMetaDataKeyLookup;
|
||||
Q_GLOBAL_STATIC(QGstreamerMetaDataKeyLookup, metadataKeys)
|
||||
|
||||
static const QGstreamerMetaDataKeyLookup *qt_gstreamerMetaDataKeys()
|
||||
{
|
||||
QString key;
|
||||
const char *token;
|
||||
};
|
||||
if (metadataKeys->isEmpty()) {
|
||||
metadataKeys->insert(QMediaMetaData::Title, GST_TAG_TITLE);
|
||||
metadataKeys->insert(QMediaMetaData::SubTitle, 0);
|
||||
//metadataKeys->insert(QMediaMetaData::Author, 0);
|
||||
metadataKeys->insert(QMediaMetaData::Comment, GST_TAG_COMMENT);
|
||||
metadataKeys->insert(QMediaMetaData::Description, GST_TAG_DESCRIPTION);
|
||||
//metadataKeys->insert(QMediaMetaData::Category, 0);
|
||||
metadataKeys->insert(QMediaMetaData::Genre, GST_TAG_GENRE);
|
||||
//metadataKeys->insert(QMediaMetaData::Year, 0);
|
||||
//metadataKeys->insert(QMediaMetaData::UserRating, 0);
|
||||
|
||||
static const QGstreamerMetaDataKeyLookup qt_gstreamerMetaDataKeys[] =
|
||||
{
|
||||
{ QMediaMetaData::Title, GST_TAG_TITLE },
|
||||
//{ QMediaMetaData::SubTitle, 0 },
|
||||
//{ QMediaMetaData::Author, 0 },
|
||||
{ QMediaMetaData::Comment, GST_TAG_COMMENT },
|
||||
{ QMediaMetaData::Description, GST_TAG_DESCRIPTION },
|
||||
//{ QMediaMetaData::Category, 0 },
|
||||
{ QMediaMetaData::Genre, GST_TAG_GENRE },
|
||||
//{ QMediaMetaData::Year, 0 },
|
||||
//{ QMediaMetaData::UserRating, 0 },
|
||||
metadataKeys->insert(QMediaMetaData::Language, GST_TAG_LANGUAGE_CODE);
|
||||
|
||||
{ QMediaMetaData::Language, GST_TAG_LANGUAGE_CODE },
|
||||
metadataKeys->insert(QMediaMetaData::Publisher, GST_TAG_ORGANIZATION);
|
||||
metadataKeys->insert(QMediaMetaData::Copyright, GST_TAG_COPYRIGHT);
|
||||
//metadataKeys->insert(QMediaMetaData::ParentalRating, 0);
|
||||
//metadataKeys->insert(QMediaMetaData::RatingOrganisation, 0);
|
||||
|
||||
{ QMediaMetaData::Publisher, GST_TAG_ORGANIZATION },
|
||||
{ QMediaMetaData::Copyright, GST_TAG_COPYRIGHT },
|
||||
//{ QMediaMetaData::ParentalRating, 0 },
|
||||
//{ QMediaMetaData::RatingOrganisation, 0 },
|
||||
// Media
|
||||
//metadataKeys->insert(QMediaMetaData::Size, 0);
|
||||
//metadataKeys->insert(QMediaMetaData::MediaType, 0);
|
||||
metadataKeys->insert(QMediaMetaData::Duration, GST_TAG_DURATION);
|
||||
|
||||
// Media
|
||||
//{ QMediaMetaData::Size, 0 },
|
||||
//{ QMediaMetaData::MediaType, 0 },
|
||||
{ QMediaMetaData::Duration, GST_TAG_DURATION },
|
||||
// Audio
|
||||
metadataKeys->insert(QMediaMetaData::AudioBitRate, GST_TAG_BITRATE);
|
||||
metadataKeys->insert(QMediaMetaData::AudioCodec, GST_TAG_AUDIO_CODEC);
|
||||
//metadataKeys->insert(QMediaMetaData::ChannelCount, 0);
|
||||
//metadataKeys->insert(QMediaMetaData::SampleRate, 0);
|
||||
|
||||
// Audio
|
||||
{ QMediaMetaData::AudioBitRate, GST_TAG_BITRATE },
|
||||
{ QMediaMetaData::AudioCodec, GST_TAG_AUDIO_CODEC },
|
||||
//{ QMediaMetaData::ChannelCount, 0 },
|
||||
//{ QMediaMetaData::SampleRate, 0 },
|
||||
|
||||
// Music
|
||||
{ QMediaMetaData::AlbumTitle, GST_TAG_ALBUM },
|
||||
{ QMediaMetaData::AlbumArtist, GST_TAG_ARTIST},
|
||||
{ QMediaMetaData::ContributingArtist, GST_TAG_PERFORMER },
|
||||
// Music
|
||||
metadataKeys->insert(QMediaMetaData::AlbumTitle, GST_TAG_ALBUM);
|
||||
metadataKeys->insert(QMediaMetaData::AlbumArtist, GST_TAG_ARTIST);
|
||||
metadataKeys->insert(QMediaMetaData::ContributingArtist, GST_TAG_PERFORMER);
|
||||
#if (GST_VERSION_MAJOR >= 0) && (GST_VERSION_MINOR >= 10) && (GST_VERSION_MICRO >= 19)
|
||||
{ QMediaMetaData::Composer, GST_TAG_COMPOSER },
|
||||
metadataKeys->insert(QMediaMetaData::Composer, GST_TAG_COMPOSER);
|
||||
#endif
|
||||
//{ QMediaMetaData::Conductor, 0 },
|
||||
//{ QMediaMetaData::Lyrics, 0 },
|
||||
//{ QMediaMetaData::Mood, 0 },
|
||||
{ QMediaMetaData::TrackNumber, GST_TAG_TRACK_NUMBER },
|
||||
//metadataKeys->insert(QMediaMetaData::Conductor, 0);
|
||||
//metadataKeys->insert(QMediaMetaData::Lyrics, 0);
|
||||
//metadataKeys->insert(QMediaMetaData::Mood, 0);
|
||||
metadataKeys->insert(QMediaMetaData::TrackNumber, GST_TAG_TRACK_NUMBER);
|
||||
|
||||
//{ QMediaMetaData::CoverArtUrlSmall, 0 },
|
||||
//{ QMediaMetaData::CoverArtUrlLarge, 0 },
|
||||
//metadataKeys->insert(QMediaMetaData::CoverArtUrlSmall, 0);
|
||||
//metadataKeys->insert(QMediaMetaData::CoverArtUrlLarge, 0);
|
||||
|
||||
// Image/Video
|
||||
//{ QMediaMetaData::Resolution, 0 },
|
||||
//{ QMediaMetaData::PixelAspectRatio, 0 },
|
||||
// Image/Video
|
||||
//metadataKeys->insert(QMediaMetaData::Resolution, 0);
|
||||
//metadataKeys->insert(QMediaMetaData::PixelAspectRatio, 0);
|
||||
|
||||
// Video
|
||||
//{ QMediaMetaData::VideoFrameRate, 0 },
|
||||
//{ QMediaMetaData::VideoBitRate, 0 },
|
||||
{ QMediaMetaData::VideoCodec, GST_TAG_VIDEO_CODEC },
|
||||
// Video
|
||||
//metadataKeys->insert(QMediaMetaData::VideoFrameRate, 0);
|
||||
//metadataKeys->insert(QMediaMetaData::VideoBitRate, 0);
|
||||
metadataKeys->insert(QMediaMetaData::VideoCodec, GST_TAG_VIDEO_CODEC);
|
||||
|
||||
//{ QMediaMetaData::PosterUrl, 0 },
|
||||
//metadataKeys->insert(QMediaMetaData::PosterUrl, 0);
|
||||
|
||||
// Movie
|
||||
//{ QMediaMetaData::ChapterNumber, 0 },
|
||||
//{ QMediaMetaData::Director, 0 },
|
||||
{ QMediaMetaData::LeadPerformer, GST_TAG_PERFORMER },
|
||||
//{ QMediaMetaData::Writer, 0 },
|
||||
// Movie
|
||||
//metadataKeys->insert(QMediaMetaData::ChapterNumber, 0);
|
||||
//metadataKeys->insert(QMediaMetaData::Director, 0);
|
||||
metadataKeys->insert(QMediaMetaData::LeadPerformer, GST_TAG_PERFORMER);
|
||||
//metadataKeys->insert(QMediaMetaData::Writer, 0);
|
||||
|
||||
// Photos
|
||||
//{ QMediaMetaData::CameraManufacturer, 0 },
|
||||
//{ QMediaMetaData::CameraModel, 0 },
|
||||
//{ QMediaMetaData::Event, 0 },
|
||||
//{ QMediaMetaData::Subject, 0 }
|
||||
};
|
||||
// Photos
|
||||
//metadataKeys->insert(QMediaMetaData::CameraManufacturer, 0);
|
||||
//metadataKeys->insert(QMediaMetaData::CameraModel, 0);
|
||||
//metadataKeys->insert(QMediaMetaData::Event, 0);
|
||||
//metadataKeys->insert(QMediaMetaData::Subject, 0 }
|
||||
}
|
||||
|
||||
return metadataKeys;
|
||||
}
|
||||
|
||||
QGstreamerCaptureMetaDataControl::QGstreamerCaptureMetaDataControl(QObject *parent)
|
||||
:QMetaDataWriterControl(parent)
|
||||
@@ -128,50 +130,30 @@ QGstreamerCaptureMetaDataControl::QGstreamerCaptureMetaDataControl(QObject *pare
|
||||
|
||||
QVariant QGstreamerCaptureMetaDataControl::metaData(const QString &key) const
|
||||
{
|
||||
static const int count = sizeof(qt_gstreamerMetaDataKeys) / sizeof(QGstreamerMetaDataKeyLookup);
|
||||
QGstreamerMetaDataKeyLookup::const_iterator it = qt_gstreamerMetaDataKeys()->find(key);
|
||||
if (it != qt_gstreamerMetaDataKeys()->constEnd())
|
||||
return m_values.value(it.value());
|
||||
|
||||
for (int i = 0; i < count; ++i) {
|
||||
if (qt_gstreamerMetaDataKeys[i].key == key) {
|
||||
const char *name = qt_gstreamerMetaDataKeys[i].token;
|
||||
|
||||
return m_values.value(QByteArray::fromRawData(name, qstrlen(name)));
|
||||
}
|
||||
}
|
||||
return QVariant();
|
||||
}
|
||||
|
||||
void QGstreamerCaptureMetaDataControl::setMetaData(const QString &key, const QVariant &value)
|
||||
{
|
||||
static const int count = sizeof(qt_gstreamerMetaDataKeys) / sizeof(QGstreamerMetaDataKeyLookup);
|
||||
QGstreamerMetaDataKeyLookup::const_iterator it = qt_gstreamerMetaDataKeys()->find(key);
|
||||
if (it != qt_gstreamerMetaDataKeys()->constEnd()) {
|
||||
m_values.insert(it.value(), value);
|
||||
|
||||
for (int i = 0; i < count; ++i) {
|
||||
if (qt_gstreamerMetaDataKeys[i].key == key) {
|
||||
const char *name = qt_gstreamerMetaDataKeys[i].token;
|
||||
|
||||
m_values.insert(QByteArray::fromRawData(name, qstrlen(name)), value);
|
||||
|
||||
emit QMetaDataWriterControl::metaDataChanged();
|
||||
emit QMetaDataWriterControl::metaDataChanged(key, value);
|
||||
emit metaDataChanged(m_values);
|
||||
|
||||
return;
|
||||
}
|
||||
emit QMetaDataWriterControl::metaDataChanged();
|
||||
emit QMetaDataWriterControl::metaDataChanged(key, value);
|
||||
emit metaDataChanged(m_values);
|
||||
}
|
||||
}
|
||||
|
||||
QStringList QGstreamerCaptureMetaDataControl::availableMetaData() const
|
||||
{
|
||||
static QMap<QByteArray, QString> keysMap;
|
||||
if (keysMap.isEmpty()) {
|
||||
const int count = sizeof(qt_gstreamerMetaDataKeys) / sizeof(QGstreamerMetaDataKeyLookup);
|
||||
for (int i = 0; i < count; ++i) {
|
||||
keysMap[QByteArray(qt_gstreamerMetaDataKeys[i].token)] = qt_gstreamerMetaDataKeys[i].key;
|
||||
}
|
||||
}
|
||||
|
||||
QStringList res;
|
||||
foreach (const QByteArray &key, m_values.keys()) {
|
||||
QString tag = keysMap.value(key);
|
||||
QString tag = qt_gstreamerMetaDataKeys()->key(key);
|
||||
if (!tag.isEmpty())
|
||||
res.append(tag);
|
||||
}
|
||||
|
||||
@@ -48,90 +48,86 @@
|
||||
|
||||
QT_BEGIN_NAMESPACE
|
||||
|
||||
struct QGstreamerMetaDataKeyLookup
|
||||
typedef QMap<QByteArray, QString> QGstreamerMetaDataKeyLookup;
|
||||
Q_GLOBAL_STATIC(QGstreamerMetaDataKeyLookup, metadataKeys)
|
||||
|
||||
static const QGstreamerMetaDataKeyLookup *qt_gstreamerMetaDataKeys()
|
||||
{
|
||||
QString key;
|
||||
const char *token;
|
||||
};
|
||||
if (metadataKeys->isEmpty()) {
|
||||
metadataKeys->insert(GST_TAG_TITLE, QMediaMetaData::Title);
|
||||
//metadataKeys->insert(0, QMediaMetaData::SubTitle);
|
||||
//metadataKeys->insert(0, QMediaMetaData::Author);
|
||||
metadataKeys->insert(GST_TAG_COMMENT, QMediaMetaData::Comment);
|
||||
metadataKeys->insert(GST_TAG_DESCRIPTION, QMediaMetaData::Description);
|
||||
//metadataKeys->insert(0, QMediaMetaData::Category);
|
||||
metadataKeys->insert(GST_TAG_GENRE, QMediaMetaData::Genre);
|
||||
metadataKeys->insert("year", QMediaMetaData::Year);
|
||||
//metadataKeys->insert(0, QMediaMetaData::UserRating);
|
||||
|
||||
static const QGstreamerMetaDataKeyLookup qt_gstreamerMetaDataKeys[] =
|
||||
{
|
||||
{ QMediaMetaData::Title, GST_TAG_TITLE },
|
||||
//{ QMediaMetaData::SubTitle, 0 },
|
||||
//{ QMediaMetaData::Author, 0 },
|
||||
{ QMediaMetaData::Comment, GST_TAG_COMMENT },
|
||||
{ QMediaMetaData::Description, GST_TAG_DESCRIPTION },
|
||||
//{ QMediaMetaData::Category, 0 },
|
||||
{ QMediaMetaData::Genre, GST_TAG_GENRE },
|
||||
{ QMediaMetaData::Year, "year" },
|
||||
//{ QMediaMetaData::UserRating, 0 },
|
||||
metadataKeys->insert(GST_TAG_LANGUAGE_CODE, QMediaMetaData::Language);
|
||||
|
||||
{ QMediaMetaData::Language, GST_TAG_LANGUAGE_CODE },
|
||||
metadataKeys->insert(GST_TAG_ORGANIZATION, QMediaMetaData::Publisher);
|
||||
metadataKeys->insert(GST_TAG_COPYRIGHT, QMediaMetaData::Copyright);
|
||||
//metadataKeys->insert(0, QMediaMetaData::ParentalRating);
|
||||
//metadataKeys->insert(0, QMediaMetaData::RatingOrganisation);
|
||||
|
||||
{ QMediaMetaData::Publisher, GST_TAG_ORGANIZATION },
|
||||
{ QMediaMetaData::Copyright, GST_TAG_COPYRIGHT },
|
||||
//{ QMediaMetaData::ParentalRating, 0 },
|
||||
//{ QMediaMetaData::RatingOrganisation, 0 },
|
||||
// Media
|
||||
//metadataKeys->insert(0, QMediaMetaData::Size);
|
||||
//metadataKeys->insert(0,QMediaMetaData::MediaType );
|
||||
metadataKeys->insert(GST_TAG_DURATION, QMediaMetaData::Duration);
|
||||
|
||||
// Media
|
||||
//{ QMediaMetaData::Size, 0 },
|
||||
//{ QMediaMetaData::MediaType, 0 },
|
||||
{ QMediaMetaData::Duration, GST_TAG_DURATION },
|
||||
// Audio
|
||||
metadataKeys->insert(GST_TAG_BITRATE, QMediaMetaData::AudioBitRate);
|
||||
metadataKeys->insert(GST_TAG_AUDIO_CODEC, QMediaMetaData::AudioCodec);
|
||||
//metadataKeys->insert(0, QMediaMetaData::ChannelCount);
|
||||
//metadataKeys->insert(0, QMediaMetaData::SampleRate);
|
||||
|
||||
// Audio
|
||||
{ QMediaMetaData::AudioBitRate, GST_TAG_BITRATE },
|
||||
{ QMediaMetaData::AudioCodec, GST_TAG_AUDIO_CODEC },
|
||||
//{ QMediaMetaData::ChannelCount, 0 },
|
||||
//{ QMediaMetaData::SampleRate, 0 },
|
||||
|
||||
// Music
|
||||
{ QMediaMetaData::AlbumTitle, GST_TAG_ALBUM },
|
||||
{ QMediaMetaData::AlbumArtist, GST_TAG_ARTIST},
|
||||
{ QMediaMetaData::ContributingArtist, GST_TAG_PERFORMER },
|
||||
// Music
|
||||
metadataKeys->insert(GST_TAG_ALBUM, QMediaMetaData::AlbumTitle);
|
||||
metadataKeys->insert(GST_TAG_ARTIST, QMediaMetaData::AlbumArtist);
|
||||
metadataKeys->insert(GST_TAG_PERFORMER, QMediaMetaData::ContributingArtist);
|
||||
#if (GST_VERSION_MAJOR >= 0) && (GST_VERSION_MINOR >= 10) && (GST_VERSION_MICRO >= 19)
|
||||
{ QMediaMetaData::Composer, GST_TAG_COMPOSER },
|
||||
metadataKeys->insert(GST_TAG_COMPOSER, QMediaMetaData::Composer);
|
||||
#endif
|
||||
//{ QMediaMetaData::Conductor, 0 },
|
||||
//{ QMediaMetaData::Lyrics, 0 },
|
||||
//{ QMediaMetaData::Mood, 0 },
|
||||
{ QMediaMetaData::TrackNumber, GST_TAG_TRACK_NUMBER },
|
||||
//metadataKeys->insert(0, QMediaMetaData::Conductor);
|
||||
//metadataKeys->insert(0, QMediaMetaData::Lyrics);
|
||||
//metadataKeys->insert(0, QMediaMetaData::Mood);
|
||||
metadataKeys->insert(GST_TAG_TRACK_NUMBER, QMediaMetaData::TrackNumber);
|
||||
|
||||
//{ QMediaMetaData::CoverArtUrlSmall, 0 },
|
||||
//{ QMediaMetaData::CoverArtUrlLarge, 0 },
|
||||
//metadataKeys->insert(0, QMediaMetaData::CoverArtUrlSmall);
|
||||
//metadataKeys->insert(0, QMediaMetaData::CoverArtUrlLarge);
|
||||
|
||||
// Image/Video
|
||||
{ QMediaMetaData::Resolution, "resolution" },
|
||||
{ QMediaMetaData::PixelAspectRatio, "pixel-aspect-ratio" },
|
||||
// Image/Video
|
||||
metadataKeys->insert("resolution", QMediaMetaData::Resolution);
|
||||
metadataKeys->insert("pixel-aspect-ratio", QMediaMetaData::PixelAspectRatio);
|
||||
|
||||
// Video
|
||||
//{ QMediaMetaData::VideoFrameRate, 0 },
|
||||
//{ QMediaMetaData::VideoBitRate, 0 },
|
||||
{ QMediaMetaData::VideoCodec, GST_TAG_VIDEO_CODEC },
|
||||
// Video
|
||||
//metadataKeys->insert(0, QMediaMetaData::VideoFrameRate);
|
||||
//metadataKeys->insert(0, QMediaMetaData::VideoBitRate);
|
||||
metadataKeys->insert(GST_TAG_VIDEO_CODEC, QMediaMetaData::VideoCodec);
|
||||
|
||||
//{ QMediaMetaData::PosterUrl, 0 },
|
||||
//metadataKeys->insert(0, QMediaMetaData::PosterUrl);
|
||||
|
||||
// Movie
|
||||
//{ QMediaMetaData::ChapterNumber, 0 },
|
||||
//{ QMediaMetaData::Director, 0 },
|
||||
{ QMediaMetaData::LeadPerformer, GST_TAG_PERFORMER },
|
||||
//{ QMediaMetaData::Writer, 0 },
|
||||
// Movie
|
||||
//metadataKeys->insert(0, QMediaMetaData::ChapterNumber);
|
||||
//metadataKeys->insert(0, QMediaMetaData::Director);
|
||||
metadataKeys->insert(GST_TAG_PERFORMER, QMediaMetaData::LeadPerformer);
|
||||
//metadataKeys->insert(0, QMediaMetaData::Writer);
|
||||
|
||||
// Photos
|
||||
//{ QMediaMetaData::CameraManufacturer, 0 },
|
||||
//{ QMediaMetaData::CameraModel, 0 },
|
||||
//{ QMediaMetaData::Event, 0 },
|
||||
//{ QMediaMetaData::Subject, 0 }
|
||||
};
|
||||
// Photos
|
||||
//metadataKeys->insert(0, QMediaMetaData::CameraManufacturer);
|
||||
//metadataKeys->insert(0, QMediaMetaData::CameraModel);
|
||||
//metadataKeys->insert(0, QMediaMetaData::Event);
|
||||
//metadataKeys->insert(0, QMediaMetaData::Subject);
|
||||
}
|
||||
|
||||
return metadataKeys;
|
||||
}
|
||||
|
||||
QGstreamerMetaDataProvider::QGstreamerMetaDataProvider(QGstreamerPlayerSession *session, QObject *parent)
|
||||
:QMetaDataReaderControl(parent), m_session(session)
|
||||
{
|
||||
connect(m_session, SIGNAL(tagsChanged()), SLOT(updateTags()));
|
||||
|
||||
const int count = sizeof(qt_gstreamerMetaDataKeys) / sizeof(QGstreamerMetaDataKeyLookup);
|
||||
for (int i = 0; i < count; ++i) {
|
||||
m_keysMap[QByteArray(qt_gstreamerMetaDataKeys[i].token)] = qt_gstreamerMetaDataKeys[i].key;
|
||||
}
|
||||
}
|
||||
|
||||
QGstreamerMetaDataProvider::~QGstreamerMetaDataProvider()
|
||||
@@ -167,8 +163,8 @@ void QGstreamerMetaDataProvider::updateTags()
|
||||
QMapIterator<QByteArray ,QVariant> i(m_session->tags());
|
||||
while (i.hasNext()) {
|
||||
i.next();
|
||||
//use gstreamer native keys for elements not in m_keysMap
|
||||
QString key = m_keysMap.value(i.key(), i.key());
|
||||
//use gstreamer native keys for elements not in our key map
|
||||
QString key = qt_gstreamerMetaDataKeys()->value(i.key(), i.key());
|
||||
m_tags.insert(key, i.value());
|
||||
if (i.value() != oldTags.value(key)) {
|
||||
changed = true;
|
||||
|
||||
@@ -67,7 +67,6 @@ private slots:
|
||||
private:
|
||||
QGstreamerPlayerSession *m_session;
|
||||
QVariantMap m_tags;
|
||||
QMap<QByteArray, QString> m_keysMap;
|
||||
};
|
||||
|
||||
QT_END_NAMESPACE
|
||||
|
||||
@@ -1535,6 +1535,7 @@ void QGstreamerPlayerSession::playbinNotifySource(GObject *o, GParamSpec *p, gpo
|
||||
qDebug() << "Current source is a non-live source";
|
||||
#endif
|
||||
|
||||
g_object_set(G_OBJECT(self->m_videoSink), "sync", !self->m_isLiveSource, NULL);
|
||||
|
||||
gst_object_unref(source);
|
||||
}
|
||||
|
||||
@@ -568,7 +568,7 @@ void QOpenSLESAudioOutput::destroyPlayer()
|
||||
|
||||
qint64 QOpenSLESAudioOutput::writeData(const char *data, qint64 len)
|
||||
{
|
||||
if (!len)
|
||||
if (!len || !m_availableBuffers.load())
|
||||
return 0;
|
||||
|
||||
if (len > m_bufferSize)
|
||||
|
||||
@@ -208,7 +208,8 @@ void MmRendererMediaPlayerControl::attach()
|
||||
if (m_videoWindowControl)
|
||||
m_videoWindowControl->attachDisplay(m_context);
|
||||
|
||||
m_audioId = mmr_output_attach(m_context, "audio:default", "audio");
|
||||
const QByteArray defaultAudioDevice = qgetenv("QQNX_RENDERER_DEFAULT_AUDIO_SINK");
|
||||
m_audioId = mmr_output_attach(m_context, defaultAudioDevice.isEmpty() ? "audio:default" : defaultAudioDevice.constData(), "audio");
|
||||
if (m_audioId == -1) {
|
||||
emitMmError("mmr_output_attach() for audio failed");
|
||||
return;
|
||||
|
||||
@@ -70,7 +70,7 @@ QSGVivanteVideoMaterial::QSGVivanteVideoMaterial() :
|
||||
|
||||
QSGVivanteVideoMaterial::~QSGVivanteVideoMaterial()
|
||||
{
|
||||
for (GLuint id : mBitsToTextureMap.values()) {
|
||||
Q_FOREACH (GLuint id, mBitsToTextureMap.values()) {
|
||||
#ifdef QT_VIVANTE_VIDEO_DEBUG
|
||||
qDebug() << "delete texture: " << id;
|
||||
#endif
|
||||
@@ -154,7 +154,7 @@ GLuint QSGVivanteVideoMaterial::vivanteMapping(QVideoFrame vF)
|
||||
mWidth = vF.width();
|
||||
mHeight = vF.height();
|
||||
mFormat = vF.pixelFormat();
|
||||
for (GLuint id : mBitsToTextureMap.values()) {
|
||||
Q_FOREACH (GLuint id, mBitsToTextureMap.values()) {
|
||||
#ifdef QT_VIVANTE_VIDEO_DEBUG
|
||||
qDebug() << "delete texture: " << id;
|
||||
#endif
|
||||
|
||||
@@ -572,6 +572,18 @@ namespace
|
||||
QVideoSurfaceFormat format(QSize(width, height), m_pixelFormats[index]);
|
||||
m_surfaceFormat = format;
|
||||
|
||||
MFVideoArea viewport;
|
||||
if (SUCCEEDED(pMediaType->GetBlob(MF_MT_GEOMETRIC_APERTURE,
|
||||
reinterpret_cast<UINT8*>(&viewport),
|
||||
sizeof(MFVideoArea),
|
||||
NULL))) {
|
||||
|
||||
m_surfaceFormat.setViewport(QRect(viewport.OffsetX.value,
|
||||
viewport.OffsetY.value,
|
||||
viewport.Area.cx,
|
||||
viewport.Area.cy));
|
||||
}
|
||||
|
||||
if (FAILED(pMediaType->GetUINT32(MF_MT_DEFAULT_STRIDE, (UINT32*)&m_bytesPerLine))) {
|
||||
m_bytesPerLine = getBytesPerLine(format);
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user