Android: Don't expose java calls in the control classes.
Makes the abstraction more clear. Change-Id: Ia9a7b0b157e1182158cd26b62775d13f6c5e1727 Reviewed-by: Yoann Lopes <yoann.lopes@digia.com>
This commit is contained in:
committed by
The Qt Project
parent
1ef3ef6a30
commit
b9b06defc7
@@ -44,17 +44,17 @@
|
||||
|
||||
#include <qglobal.h>
|
||||
#include <qsize.h>
|
||||
#include <jni.h>
|
||||
|
||||
QT_BEGIN_NAMESPACE
|
||||
|
||||
class AndroidSurfaceTexture;
|
||||
|
||||
class QAndroidVideoOutput
|
||||
{
|
||||
public:
|
||||
virtual ~QAndroidVideoOutput() { }
|
||||
|
||||
virtual jobject surfaceHolder() = 0;
|
||||
virtual jobject surfaceTexture() { return 0; }
|
||||
virtual AndroidSurfaceTexture *surfaceTexture() { return 0; }
|
||||
|
||||
virtual bool isReady() { return true; }
|
||||
|
||||
|
||||
@@ -109,9 +109,7 @@ private:
|
||||
QAndroidVideoRendererControl::QAndroidVideoRendererControl(QObject *parent)
|
||||
: QVideoRendererControl(parent)
|
||||
, m_surface(0)
|
||||
, m_androidSurface(0)
|
||||
, m_surfaceTexture(0)
|
||||
, m_surfaceHolder(0)
|
||||
, m_externalTex(0)
|
||||
, m_fbo(0)
|
||||
, m_program(0)
|
||||
@@ -176,7 +174,7 @@ bool QAndroidVideoRendererControl::initSurfaceTexture()
|
||||
|
||||
m_surfaceTexture = new AndroidSurfaceTexture(m_externalTex);
|
||||
|
||||
if (m_surfaceTexture->object()) {
|
||||
if (m_surfaceTexture->surfaceTexture() != 0) {
|
||||
connect(m_surfaceTexture, SIGNAL(frameAvailable()), this, SLOT(onFrameAvailable()));
|
||||
} else {
|
||||
delete m_surfaceTexture;
|
||||
@@ -195,42 +193,14 @@ void QAndroidVideoRendererControl::clearSurfaceTexture()
|
||||
delete m_surfaceTexture;
|
||||
m_surfaceTexture = 0;
|
||||
}
|
||||
if (m_androidSurface) {
|
||||
if (QtAndroidPrivate::androidSdkVersion() > 13)
|
||||
m_androidSurface->callMethod<void>("release");
|
||||
delete m_androidSurface;
|
||||
m_androidSurface = 0;
|
||||
}
|
||||
if (m_surfaceHolder) {
|
||||
delete m_surfaceHolder;
|
||||
m_surfaceHolder = 0;
|
||||
}
|
||||
}
|
||||
|
||||
jobject QAndroidVideoRendererControl::surfaceHolder()
|
||||
AndroidSurfaceTexture *QAndroidVideoRendererControl::surfaceTexture()
|
||||
{
|
||||
if (!initSurfaceTexture())
|
||||
return 0;
|
||||
|
||||
if (!m_surfaceHolder) {
|
||||
m_androidSurface = new QJNIObjectPrivate("android/view/Surface",
|
||||
"(Landroid/graphics/SurfaceTexture;)V",
|
||||
m_surfaceTexture->object());
|
||||
|
||||
m_surfaceHolder = new QJNIObjectPrivate("org/qtproject/qt5/android/multimedia/QtSurfaceTextureHolder",
|
||||
"(Landroid/view/Surface;)V",
|
||||
m_androidSurface->object());
|
||||
}
|
||||
|
||||
return m_surfaceHolder->object();
|
||||
}
|
||||
|
||||
jobject QAndroidVideoRendererControl::surfaceTexture()
|
||||
{
|
||||
if (!initSurfaceTexture())
|
||||
return 0;
|
||||
|
||||
return m_surfaceTexture->object();
|
||||
return m_surfaceTexture;
|
||||
}
|
||||
|
||||
void QAndroidVideoRendererControl::setVideoSize(const QSize &size)
|
||||
|
||||
@@ -86,8 +86,7 @@ public:
|
||||
QAbstractVideoSurface *surface() const Q_DECL_OVERRIDE;
|
||||
void setSurface(QAbstractVideoSurface *surface) Q_DECL_OVERRIDE;
|
||||
|
||||
jobject surfaceHolder() Q_DECL_OVERRIDE;
|
||||
jobject surfaceTexture() Q_DECL_OVERRIDE;
|
||||
AndroidSurfaceTexture *surfaceTexture() Q_DECL_OVERRIDE;
|
||||
bool isReady() Q_DECL_OVERRIDE;
|
||||
void setVideoSize(const QSize &size) Q_DECL_OVERRIDE;
|
||||
void stop() Q_DECL_OVERRIDE;
|
||||
@@ -112,9 +111,7 @@ private:
|
||||
QAbstractVideoSurface *m_surface;
|
||||
QSize m_nativeSize;
|
||||
|
||||
QJNIObjectPrivate *m_androidSurface;
|
||||
AndroidSurfaceTexture *m_surfaceTexture;
|
||||
QJNIObjectPrivate *m_surfaceHolder;
|
||||
|
||||
quint32 m_externalTex;
|
||||
QOpenGLFramebufferObject *m_fbo;
|
||||
|
||||
@@ -183,36 +183,10 @@ void QAndroidCameraSession::updateAvailableCameras()
|
||||
{
|
||||
g_availableCameras->clear();
|
||||
|
||||
const QJNIObjectPrivate cameraInfo("android/hardware/Camera$CameraInfo");
|
||||
const int numCameras = QJNIObjectPrivate::callStaticMethod<jint>("android/hardware/Camera",
|
||||
"getNumberOfCameras");
|
||||
|
||||
const int numCameras = AndroidCamera::getNumberOfCameras();
|
||||
for (int i = 0; i < numCameras; ++i) {
|
||||
AndroidCameraInfo info;
|
||||
|
||||
QJNIObjectPrivate::callStaticMethod<void>("android/hardware/Camera",
|
||||
"getCameraInfo",
|
||||
"(ILandroid/hardware/Camera$CameraInfo;)V",
|
||||
i, cameraInfo.object());
|
||||
|
||||
AndroidCamera::CameraFacing facing = AndroidCamera::CameraFacing(cameraInfo.getField<jint>("facing"));
|
||||
// The orientation provided by Android is counter-clockwise, we need it clockwise
|
||||
info.orientation = (360 - cameraInfo.getField<jint>("orientation")) % 360;
|
||||
|
||||
switch (facing) {
|
||||
case AndroidCamera::CameraFacingBack:
|
||||
info.name = QByteArray("back");
|
||||
info.description = QStringLiteral("Rear-facing camera");
|
||||
info.position = QCamera::BackFace;
|
||||
break;
|
||||
case AndroidCamera::CameraFacingFront:
|
||||
info.name = QByteArray("front");
|
||||
info.description = QStringLiteral("Front-facing camera");
|
||||
info.position = QCamera::FrontFace;
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
AndroidCamera::getCameraInfo(i, &info);
|
||||
|
||||
if (!info.name.isNull())
|
||||
g_availableCameras->append(info);
|
||||
|
||||
@@ -48,21 +48,13 @@
|
||||
#include <QSet>
|
||||
#include <QMutex>
|
||||
#include <private/qmediastoragelocation_p.h>
|
||||
#include "androidcamera.h"
|
||||
|
||||
QT_BEGIN_NAMESPACE
|
||||
|
||||
class AndroidCamera;
|
||||
class QAndroidVideoOutput;
|
||||
class QAndroidMediaVideoProbeControl;
|
||||
|
||||
struct AndroidCameraInfo
|
||||
{
|
||||
QByteArray name;
|
||||
QString description;
|
||||
QCamera::Position position;
|
||||
int orientation;
|
||||
};
|
||||
|
||||
class QAndroidCameraSession : public QObject
|
||||
{
|
||||
Q_OBJECT
|
||||
|
||||
@@ -455,7 +455,7 @@ void QAndroidCaptureSession::onCameraOpened()
|
||||
for (int i = 0; i < 8; ++i) {
|
||||
CaptureProfile profile = getProfile(i);
|
||||
if (!profile.isNull) {
|
||||
if (i == 1) // QUALITY_HIGH
|
||||
if (i == AndroidCamcorderProfile::QUALITY_HIGH)
|
||||
m_defaultSettings = profile;
|
||||
|
||||
if (!m_supportedResolutions.contains(profile.videoResolution))
|
||||
@@ -474,30 +474,23 @@ void QAndroidCaptureSession::onCameraOpened()
|
||||
QAndroidCaptureSession::CaptureProfile QAndroidCaptureSession::getProfile(int id)
|
||||
{
|
||||
CaptureProfile profile;
|
||||
bool hasProfile = QJNIObjectPrivate::callStaticMethod<jboolean>("android/media/CamcorderProfile",
|
||||
"hasProfile",
|
||||
"(II)Z",
|
||||
m_cameraSession->camera()->cameraId(),
|
||||
id);
|
||||
const bool hasProfile = AndroidCamcorderProfile::hasProfile(m_cameraSession->camera()->cameraId(),
|
||||
AndroidCamcorderProfile::Quality(id));
|
||||
|
||||
if (hasProfile) {
|
||||
QJNIObjectPrivate obj = QJNIObjectPrivate::callStaticObjectMethod("android/media/CamcorderProfile",
|
||||
"get",
|
||||
"(II)Landroid/media/CamcorderProfile;",
|
||||
m_cameraSession->camera()->cameraId(),
|
||||
id);
|
||||
AndroidCamcorderProfile camProfile = AndroidCamcorderProfile::get(m_cameraSession->camera()->cameraId(),
|
||||
AndroidCamcorderProfile::Quality(id));
|
||||
|
||||
|
||||
profile.outputFormat = AndroidMediaRecorder::OutputFormat(obj.getField<jint>("fileFormat"));
|
||||
profile.audioEncoder = AndroidMediaRecorder::AudioEncoder(obj.getField<jint>("audioCodec"));
|
||||
profile.audioBitRate = obj.getField<jint>("audioBitRate");
|
||||
profile.audioChannels = obj.getField<jint>("audioChannels");
|
||||
profile.audioSampleRate = obj.getField<jint>("audioSampleRate");
|
||||
profile.videoEncoder = AndroidMediaRecorder::VideoEncoder(obj.getField<jint>("videoCodec"));
|
||||
profile.videoBitRate = obj.getField<jint>("videoBitRate");
|
||||
profile.videoFrameRate = obj.getField<jint>("videoFrameRate");
|
||||
profile.videoResolution = QSize(obj.getField<jint>("videoFrameWidth"),
|
||||
obj.getField<jint>("videoFrameHeight"));
|
||||
profile.outputFormat = AndroidMediaRecorder::OutputFormat(camProfile.getValue(AndroidCamcorderProfile::fileFormat));
|
||||
profile.audioEncoder = AndroidMediaRecorder::AudioEncoder(camProfile.getValue(AndroidCamcorderProfile::audioCodec));
|
||||
profile.audioBitRate = camProfile.getValue(AndroidCamcorderProfile::audioBitRate);
|
||||
profile.audioChannels = camProfile.getValue(AndroidCamcorderProfile::audioChannels);
|
||||
profile.audioSampleRate = camProfile.getValue(AndroidCamcorderProfile::audioSampleRate);
|
||||
profile.videoEncoder = AndroidMediaRecorder::VideoEncoder(camProfile.getValue(AndroidCamcorderProfile::videoCodec));
|
||||
profile.videoBitRate = camProfile.getValue(AndroidCamcorderProfile::videoBitRate);
|
||||
profile.videoFrameRate = camProfile.getValue(AndroidCamcorderProfile::videoFrameRate);
|
||||
profile.videoResolution = QSize(camProfile.getValue(AndroidCamcorderProfile::videoFrameWidth),
|
||||
camProfile.getValue(AndroidCamcorderProfile::videoFrameHeight));
|
||||
|
||||
if (profile.outputFormat == AndroidMediaRecorder::MPEG_4)
|
||||
profile.outputFileExtension = QStringLiteral("mp4");
|
||||
|
||||
@@ -320,8 +320,8 @@ void QAndroidMediaPlayerControl::setMedia(const QMediaContent &mediaContent,
|
||||
if (mVideoSize.isValid() && mVideoOutput)
|
||||
mVideoOutput->setVideoSize(mVideoSize);
|
||||
|
||||
if (!mMediaPlayer->display() && mVideoOutput)
|
||||
mMediaPlayer->setDisplay(mVideoOutput->surfaceHolder());
|
||||
if ((mMediaPlayer->display() == 0) && mVideoOutput)
|
||||
mMediaPlayer->setDisplay(mVideoOutput->surfaceTexture());
|
||||
mMediaPlayer->setDataSource(mediaPath);
|
||||
mMediaPlayer->prepareAsync();
|
||||
|
||||
@@ -345,7 +345,7 @@ void QAndroidMediaPlayerControl::setVideoOutput(QObject *videoOutput)
|
||||
return;
|
||||
|
||||
if (mVideoOutput->isReady())
|
||||
mMediaPlayer->setDisplay(mVideoOutput->surfaceHolder());
|
||||
mMediaPlayer->setDisplay(mVideoOutput->surfaceTexture());
|
||||
|
||||
connect(videoOutput, SIGNAL(readyChanged(bool)), this, SLOT(onVideoOutputReady(bool)));
|
||||
}
|
||||
@@ -590,8 +590,8 @@ void QAndroidMediaPlayerControl::onStateChanged(qint32 state)
|
||||
|
||||
void QAndroidMediaPlayerControl::onVideoOutputReady(bool ready)
|
||||
{
|
||||
if (!mMediaPlayer->display() && mVideoOutput && ready)
|
||||
mMediaPlayer->setDisplay(mVideoOutput->surfaceHolder());
|
||||
if ((mMediaPlayer->display() == 0) && mVideoOutput && ready)
|
||||
mMediaPlayer->setDisplay(mVideoOutput->surfaceTexture());
|
||||
|
||||
flushPendingStates();
|
||||
}
|
||||
|
||||
@@ -40,10 +40,11 @@
|
||||
****************************************************************************/
|
||||
|
||||
#include "androidcamera.h"
|
||||
#include "androidsurfacetexture.h"
|
||||
#include "qandroidmultimediautils.h"
|
||||
|
||||
#include <qstringlist.h>
|
||||
#include <qdebug.h>
|
||||
#include "qandroidmultimediautils.h"
|
||||
#include <qmutex.h>
|
||||
#include <QtCore/private/qjnihelpers_p.h>
|
||||
#include <QtCore/qthread.h>
|
||||
@@ -367,10 +368,13 @@ void AndroidCamera::setPreviewSize(const QSize &size)
|
||||
QMetaObject::invokeMethod(d, "updatePreviewSize");
|
||||
}
|
||||
|
||||
void AndroidCamera::setPreviewTexture(jobject surfaceTexture)
|
||||
void AndroidCamera::setPreviewTexture(AndroidSurfaceTexture *surfaceTexture)
|
||||
{
|
||||
Q_D(AndroidCamera);
|
||||
QMetaObject::invokeMethod(d, "setPreviewTexture", Qt::BlockingQueuedConnection, Q_ARG(void *, surfaceTexture));
|
||||
QMetaObject::invokeMethod(d,
|
||||
"setPreviewTexture",
|
||||
Qt::BlockingQueuedConnection,
|
||||
Q_ARG(void *, surfaceTexture ? surfaceTexture->surfaceTexture() : 0));
|
||||
}
|
||||
|
||||
bool AndroidCamera::isZoomSupported()
|
||||
@@ -633,6 +637,42 @@ QJNIObjectPrivate AndroidCamera::getCameraObject()
|
||||
return d->m_camera;
|
||||
}
|
||||
|
||||
int AndroidCamera::getNumberOfCameras()
|
||||
{
|
||||
return QJNIObjectPrivate::callStaticMethod<jint>("android/hardware/Camera",
|
||||
"getNumberOfCameras");
|
||||
}
|
||||
|
||||
void AndroidCamera::getCameraInfo(int id, AndroidCameraInfo *info)
|
||||
{
|
||||
Q_ASSERT(info);
|
||||
|
||||
QJNIObjectPrivate cameraInfo("android/hardware/Camera$CameraInfo");
|
||||
QJNIObjectPrivate::callStaticMethod<void>("android/hardware/Camera",
|
||||
"getCameraInfo",
|
||||
"(ILandroid/hardware/Camera$CameraInfo;)V",
|
||||
id, cameraInfo.object());
|
||||
|
||||
AndroidCamera::CameraFacing facing = AndroidCamera::CameraFacing(cameraInfo.getField<jint>("facing"));
|
||||
// The orientation provided by Android is counter-clockwise, we need it clockwise
|
||||
info->orientation = (360 - cameraInfo.getField<jint>("orientation")) % 360;
|
||||
|
||||
switch (facing) {
|
||||
case AndroidCamera::CameraFacingBack:
|
||||
info->name = QByteArray("back");
|
||||
info->description = QStringLiteral("Rear-facing camera");
|
||||
info->position = QCamera::BackFace;
|
||||
break;
|
||||
case AndroidCamera::CameraFacingFront:
|
||||
info->name = QByteArray("front");
|
||||
info->description = QStringLiteral("Front-facing camera");
|
||||
info->position = QCamera::FrontFace;
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
void AndroidCamera::startPreview()
|
||||
{
|
||||
Q_D(AndroidCamera);
|
||||
|
||||
@@ -46,12 +46,22 @@
|
||||
#include <QtCore/private/qjni_p.h>
|
||||
#include <qsize.h>
|
||||
#include <qrect.h>
|
||||
#include <QtMultimedia/qcamera.h>
|
||||
|
||||
QT_BEGIN_NAMESPACE
|
||||
|
||||
class QThread;
|
||||
|
||||
class AndroidCameraPrivate;
|
||||
class AndroidSurfaceTexture;
|
||||
|
||||
struct AndroidCameraInfo
|
||||
{
|
||||
QByteArray name;
|
||||
QString description;
|
||||
QCamera::Position position;
|
||||
int orientation;
|
||||
};
|
||||
|
||||
class AndroidCamera : public QObject
|
||||
{
|
||||
@@ -96,7 +106,7 @@ public:
|
||||
|
||||
QSize previewSize() const;
|
||||
void setPreviewSize(const QSize &size);
|
||||
void setPreviewTexture(jobject surfaceTexture);
|
||||
void setPreviewTexture(AndroidSurfaceTexture *surfaceTexture);
|
||||
|
||||
bool isZoomSupported();
|
||||
int getMaxZoom();
|
||||
@@ -157,6 +167,9 @@ public:
|
||||
void fetchLastPreviewFrame();
|
||||
QJNIObjectPrivate getCameraObject();
|
||||
|
||||
static int getNumberOfCameras();
|
||||
static void getCameraInfo(int id, AndroidCameraInfo *info);
|
||||
|
||||
static bool initJNI(JNIEnv *env);
|
||||
|
||||
Q_SIGNALS:
|
||||
|
||||
@@ -44,6 +44,7 @@
|
||||
#include <QString>
|
||||
#include <QtCore/private/qjni_p.h>
|
||||
#include <QtCore/private/qjnihelpers_p.h>
|
||||
#include "androidsurfacetexture.h"
|
||||
#include <QMap>
|
||||
|
||||
static jclass mediaPlayerClass = Q_NULLPTR;
|
||||
@@ -150,9 +151,11 @@ void AndroidMediaPlayer::setVolume(int volume)
|
||||
mMediaPlayer.callMethod<void>("setVolume", "(I)V", jint(volume));
|
||||
}
|
||||
|
||||
void AndroidMediaPlayer::setDisplay(jobject surfaceHolder)
|
||||
void AndroidMediaPlayer::setDisplay(AndroidSurfaceTexture *surfaceTexture)
|
||||
{
|
||||
mMediaPlayer.callMethod<void>("setDisplay", "(Landroid/view/SurfaceHolder;)V", surfaceHolder);
|
||||
mMediaPlayer.callMethod<void>("setDisplay",
|
||||
"(Landroid/view/SurfaceHolder;)V",
|
||||
surfaceTexture ? surfaceTexture->surfaceHolder() : 0);
|
||||
}
|
||||
|
||||
static void onErrorNative(JNIEnv *env, jobject thiz, jint what, jint extra, jlong id)
|
||||
|
||||
@@ -47,6 +47,8 @@
|
||||
|
||||
QT_BEGIN_NAMESPACE
|
||||
|
||||
class AndroidSurfaceTexture;
|
||||
|
||||
class AndroidMediaPlayer : public QObject
|
||||
{
|
||||
Q_OBJECT
|
||||
@@ -113,7 +115,7 @@ public:
|
||||
void setDataSource(const QString &path);
|
||||
void prepareAsync();
|
||||
void setVolume(int volume);
|
||||
void setDisplay(jobject surfaceHolder);
|
||||
void setDisplay(AndroidSurfaceTexture *surfaceTexture);
|
||||
|
||||
static bool initJNI(JNIEnv *env);
|
||||
|
||||
|
||||
@@ -47,6 +47,80 @@
|
||||
|
||||
QT_BEGIN_NAMESPACE
|
||||
|
||||
typedef QMap<QString, QJNIObjectPrivate> CamcorderProfiles;
|
||||
Q_GLOBAL_STATIC(CamcorderProfiles, g_camcorderProfiles)
|
||||
|
||||
static QString profileKey()
|
||||
{
|
||||
return QStringLiteral("%1-%2");
|
||||
}
|
||||
|
||||
bool AndroidCamcorderProfile::hasProfile(jint cameraId, Quality quality)
|
||||
{
|
||||
if (g_camcorderProfiles->contains(profileKey().arg(cameraId).arg(quality)))
|
||||
return true;
|
||||
|
||||
return QJNIObjectPrivate::callStaticMethod<jboolean>("android/media/CamcorderProfile",
|
||||
"hasProfile",
|
||||
"(II)Z",
|
||||
cameraId,
|
||||
quality);
|
||||
}
|
||||
|
||||
AndroidCamcorderProfile AndroidCamcorderProfile::get(jint cameraId, Quality quality)
|
||||
{
|
||||
const QString key = profileKey().arg(cameraId).arg(quality);
|
||||
QMap<QString, QJNIObjectPrivate>::const_iterator it = g_camcorderProfiles->constFind(key);
|
||||
|
||||
if (it != g_camcorderProfiles->constEnd())
|
||||
return AndroidCamcorderProfile(*it);
|
||||
|
||||
QJNIObjectPrivate camProfile = QJNIObjectPrivate::callStaticObjectMethod("android/media/CamcorderProfile",
|
||||
"get",
|
||||
"(II)Landroid/media/CamcorderProfile;",
|
||||
cameraId,
|
||||
quality);
|
||||
|
||||
return AndroidCamcorderProfile((*g_camcorderProfiles)[key] = camProfile);
|
||||
}
|
||||
|
||||
int AndroidCamcorderProfile::getValue(AndroidCamcorderProfile::Field field) const
|
||||
{
|
||||
switch (field) {
|
||||
case audioBitRate:
|
||||
return m_camcorderProfile.getField<jint>("audioBitRate");
|
||||
case audioChannels:
|
||||
return m_camcorderProfile.getField<jint>("audioChannels");
|
||||
case audioCodec:
|
||||
return m_camcorderProfile.getField<jint>("audioCodec");
|
||||
case audioSampleRate:
|
||||
return m_camcorderProfile.getField<jint>("audioSampleRate");
|
||||
case duration:
|
||||
return m_camcorderProfile.getField<jint>("duration");
|
||||
case fileFormat:
|
||||
return m_camcorderProfile.getField<jint>("fileFormat");
|
||||
case quality:
|
||||
return m_camcorderProfile.getField<jint>("quality");
|
||||
case videoBitRate:
|
||||
return m_camcorderProfile.getField<jint>("videoBitRate");
|
||||
case videoCodec:
|
||||
return m_camcorderProfile.getField<jint>("videoCodec");
|
||||
case videoFrameHeight:
|
||||
return m_camcorderProfile.getField<jint>("videoFrameHeight");
|
||||
case videoFrameRate:
|
||||
return m_camcorderProfile.getField<jint>("videoFrameRate");
|
||||
case videoFrameWidth:
|
||||
return m_camcorderProfile.getField<jint>("videoFrameWidth");
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
AndroidCamcorderProfile::AndroidCamcorderProfile(const QJNIObjectPrivate &camcorderProfile)
|
||||
{
|
||||
m_camcorderProfile = camcorderProfile;
|
||||
}
|
||||
|
||||
static jclass g_qtMediaRecorderListenerClass = 0;
|
||||
typedef QMap<jlong, AndroidMediaRecorder*> MediaRecorderMap;
|
||||
Q_GLOBAL_STATIC(MediaRecorderMap, mediaRecorders)
|
||||
|
||||
@@ -50,6 +50,44 @@ QT_BEGIN_NAMESPACE
|
||||
|
||||
class AndroidCamera;
|
||||
|
||||
class AndroidCamcorderProfile
|
||||
{
|
||||
public:
|
||||
enum Quality { // Needs to match CamcorderProfile
|
||||
QUALITY_LOW,
|
||||
QUALITY_HIGH,
|
||||
QUALITY_QCIF,
|
||||
QUALITY_CIF,
|
||||
QUALITY_480P,
|
||||
QUALITY_720P,
|
||||
QUALITY_1080P,
|
||||
QUALITY_QVGA
|
||||
};
|
||||
|
||||
enum Field {
|
||||
audioBitRate,
|
||||
audioChannels,
|
||||
audioCodec,
|
||||
audioSampleRate,
|
||||
duration,
|
||||
fileFormat,
|
||||
quality,
|
||||
videoBitRate,
|
||||
videoCodec,
|
||||
videoFrameHeight,
|
||||
videoFrameRate,
|
||||
videoFrameWidth
|
||||
};
|
||||
|
||||
static bool hasProfile(jint cameraId, Quality quality);
|
||||
static AndroidCamcorderProfile get(jint cameraId, Quality quality);
|
||||
int getValue(Field field) const;
|
||||
|
||||
private:
|
||||
AndroidCamcorderProfile(const QJNIObjectPrivate &camcorderProfile);
|
||||
QJNIObjectPrivate m_camcorderProfile;
|
||||
};
|
||||
|
||||
class AndroidMediaRecorder : public QObject
|
||||
{
|
||||
Q_OBJECT
|
||||
|
||||
@@ -86,6 +86,9 @@ AndroidSurfaceTexture::AndroidSurfaceTexture(unsigned int texName)
|
||||
|
||||
AndroidSurfaceTexture::~AndroidSurfaceTexture()
|
||||
{
|
||||
if (QtAndroidPrivate::androidSdkVersion() > 13 && m_surfaceView.isValid())
|
||||
m_surfaceView.callMethod<void>("release");
|
||||
|
||||
if (m_surfaceTexture.isValid()) {
|
||||
release();
|
||||
g_objectMap.remove(m_texID);
|
||||
@@ -124,11 +127,31 @@ void AndroidSurfaceTexture::updateTexImage()
|
||||
m_surfaceTexture.callMethod<void>("updateTexImage");
|
||||
}
|
||||
|
||||
jobject AndroidSurfaceTexture::object()
|
||||
jobject AndroidSurfaceTexture::surfaceTexture()
|
||||
{
|
||||
return m_surfaceTexture.object();
|
||||
}
|
||||
|
||||
jobject AndroidSurfaceTexture::surfaceView()
|
||||
{
|
||||
return m_surfaceView.object();
|
||||
}
|
||||
|
||||
jobject AndroidSurfaceTexture::surfaceHolder()
|
||||
{
|
||||
if (!m_surfaceHolder.isValid()) {
|
||||
m_surfaceView = QJNIObjectPrivate("android/view/Surface",
|
||||
"(Landroid/graphics/SurfaceTexture;)V",
|
||||
m_surfaceTexture.object());
|
||||
|
||||
m_surfaceHolder = QJNIObjectPrivate("org/qtproject/qt5/android/multimedia/QtSurfaceTextureHolder",
|
||||
"(Landroid/view/Surface;)V",
|
||||
m_surfaceView.object());
|
||||
}
|
||||
|
||||
return m_surfaceHolder.object();
|
||||
}
|
||||
|
||||
static JNINativeMethod methods[] = {
|
||||
{"notifyFrameAvailable", "(I)V", (void *)notifyFrameAvailable}
|
||||
};
|
||||
|
||||
@@ -57,7 +57,10 @@ public:
|
||||
~AndroidSurfaceTexture();
|
||||
|
||||
int textureID() const { return m_texID; }
|
||||
jobject object();
|
||||
jobject surfaceTexture();
|
||||
jobject surfaceView();
|
||||
jobject surfaceHolder();
|
||||
inline bool isValid() const { return m_surfaceTexture.isValid(); }
|
||||
|
||||
QMatrix4x4 getTransformMatrix();
|
||||
void release(); // API level 14
|
||||
@@ -71,6 +74,8 @@ Q_SIGNALS:
|
||||
private:
|
||||
int m_texID;
|
||||
QJNIObjectPrivate m_surfaceTexture;
|
||||
QJNIObjectPrivate m_surfaceView;
|
||||
QJNIObjectPrivate m_surfaceHolder;
|
||||
};
|
||||
|
||||
QT_END_NAMESPACE
|
||||
|
||||
Reference in New Issue
Block a user