Android: Rename the wrapper classes

Change-Id: I2ce15c8475da3186f128ba59b7c58f9b5b0a67e1
Reviewed-by: Yoann Lopes <yoann.lopes@digia.com>
This commit is contained in:
Christian Strømme
2014-05-09 17:32:28 +02:00
committed by The Qt Project
parent efa4628e74
commit 9214742012
34 changed files with 533 additions and 533 deletions

View File

@@ -40,8 +40,8 @@
****************************************************************************/
#include "qandroidvideorendercontrol.h"
#include "androidsurfacetexture.h"
#include <QtCore/private/qjni_p.h>
#include <QAbstractVideoSurface>
#include <QVideoSurfaceFormat>
#include <qevent.h>
@@ -50,7 +50,6 @@
#include <qopenglfunctions.h>
#include <qopenglshaderprogram.h>
#include <qopenglframebufferobject.h>
#include <QtCore/private/qjnihelpers_p.h>
QT_BEGIN_NAMESPACE
@@ -175,7 +174,7 @@ bool QAndroidVideoRendererControl::initSurfaceTexture()
return false;
}
m_surfaceTexture = new JSurfaceTexture(m_externalTex);
m_surfaceTexture = new AndroidSurfaceTexture(m_externalTex);
if (m_surfaceTexture->object()) {
connect(m_surfaceTexture, SIGNAL(frameAvailable()), this, SLOT(onFrameAvailable()));

View File

@@ -45,13 +45,13 @@
#include <qvideorenderercontrol.h>
#include <qmutex.h>
#include "qandroidvideooutput.h"
#include "jsurfacetexture.h"
QT_BEGIN_NAMESPACE
class QOpenGLTexture;
class QOpenGLFramebufferObject;
class QOpenGLShaderProgram;
class AndroidSurfaceTexture;
class OpenGLResourcesDeleter : public QObject
{
@@ -113,7 +113,7 @@ private:
QSize m_nativeSize;
QJNIObjectPrivate *m_androidSurface;
JSurfaceTexture *m_surfaceTexture;
AndroidSurfaceTexture *m_surfaceTexture;
QJNIObjectPrivate *m_surfaceHolder;
quint32 m_externalTex;

View File

@@ -42,7 +42,7 @@
#include "qandroidcameraexposurecontrol.h"
#include "qandroidcamerasession.h"
#include "jcamera.h"
#include "androidcamera.h"
QT_BEGIN_NAMESPACE

View File

@@ -42,7 +42,7 @@
#include "qandroidcameraflashcontrol.h"
#include "qandroidcamerasession.h"
#include "jcamera.h"
#include "androidcamera.h"
QT_BEGIN_NAMESPACE

View File

@@ -42,7 +42,7 @@
#include "qandroidcamerafocuscontrol.h"
#include "qandroidcamerasession.h"
#include "jcamera.h"
#include "androidcamera.h"
QT_BEGIN_NAMESPACE

View File

@@ -42,7 +42,7 @@
#include "qandroidcameraimageprocessingcontrol.h"
#include "qandroidcamerasession.h"
#include "jcamera.h"
#include "androidcamera.h"
QT_BEGIN_NAMESPACE

View File

@@ -42,7 +42,7 @@
#include "qandroidcameralockscontrol.h"
#include "qandroidcamerasession.h"
#include "jcamera.h"
#include "androidcamera.h"
#include <qtimer.h>
QT_BEGIN_NAMESPACE

View File

@@ -41,8 +41,8 @@
#include "qandroidcamerasession.h"
#include "jcamera.h"
#include "jmultimediautils.h"
#include "androidcamera.h"
#include "androidmultimediautils.h"
#include "qandroidvideooutput.h"
#include "qandroidmediavideoprobecontrol.h"
#include "qandroidmultimediautils.h"
@@ -113,7 +113,7 @@ QAndroidCameraSession::QAndroidCameraSession(QObject *parent)
{
m_mediaStorageLocation.addStorageLocation(
QMediaStorageLocation::Pictures,
JMultimediaUtils::getDefaultMediaDirectory(JMultimediaUtils::DCIM));
AndroidMultimediaUtils::getDefaultMediaDirectory(AndroidMultimediaUtils::DCIM));
if (qApp) {
connect(qApp, SIGNAL(applicationStateChanged(Qt::ApplicationState)),
@@ -195,17 +195,17 @@ void QAndroidCameraSession::updateAvailableCameras()
"(ILandroid/hardware/Camera$CameraInfo;)V",
i, cameraInfo.object());
JCamera::CameraFacing facing = JCamera::CameraFacing(cameraInfo.getField<jint>("facing"));
AndroidCamera::CameraFacing facing = AndroidCamera::CameraFacing(cameraInfo.getField<jint>("facing"));
// The orientation provided by Android is counter-clockwise, we need it clockwise
info.orientation = (360 - cameraInfo.getField<jint>("orientation")) % 360;
switch (facing) {
case JCamera::CameraFacingBack:
case AndroidCamera::CameraFacingBack:
info.name = QByteArray("back");
info.description = QStringLiteral("Rear-facing camera");
info.position = QCamera::BackFace;
break;
case JCamera::CameraFacingFront:
case AndroidCamera::CameraFacingFront:
info.name = QByteArray("front");
info.description = QStringLiteral("Front-facing camera");
info.position = QCamera::FrontFace;
@@ -234,7 +234,7 @@ bool QAndroidCameraSession::open()
m_status = QCamera::LoadingStatus;
emit statusChanged(m_status);
m_camera = JCamera::open(m_selectedCamera);
m_camera = AndroidCamera::open(m_selectedCamera);
if (m_camera) {
connect(m_camera, SIGNAL(pictureExposed()), this, SLOT(onCameraPictureExposed()));
@@ -250,8 +250,8 @@ bool QAndroidCameraSession::open()
m_status = QCamera::LoadedStatus;
if (m_camera->getPreviewFormat() != JCamera::NV21)
m_camera->setPreviewFormat(JCamera::NV21);
if (m_camera->getPreviewFormat() != AndroidCamera::NV21)
m_camera->setPreviewFormat(AndroidCamera::NV21);
m_camera->fetchEachFrame(m_videoProbes.count());
@@ -356,7 +356,7 @@ void QAndroidCameraSession::startPreview()
if (m_videoOutput && m_videoOutput->isReady())
onVideoOutputReady(true);
JMultimediaUtils::enableOrientationListener(true);
AndroidMultimediaUtils::enableOrientationListener(true);
m_camera->startPreview();
m_previewStarted = true;
@@ -370,7 +370,7 @@ void QAndroidCameraSession::stopPreview()
m_status = QCamera::StoppingStatus;
emit statusChanged(m_status);
JMultimediaUtils::enableOrientationListener(false);
AndroidMultimediaUtils::enableOrientationListener(false);
m_camera->stopPreview();
m_camera->setPreviewSize(QSize());
@@ -407,8 +407,8 @@ int QAndroidCameraSession::currentCameraRotation() const
// subtract natural camera orientation and physical device orientation
int rotation = 0;
int deviceOrientation = (JMultimediaUtils::getDeviceOrientation() + 45) / 90 * 90;
if (m_camera->getFacing() == JCamera::CameraFacingFront)
int deviceOrientation = (AndroidMultimediaUtils::getDeviceOrientation() + 45) / 90 * 90;
if (m_camera->getFacing() == AndroidCamera::CameraFacingFront)
rotation = (m_nativeOrientation - deviceOrientation + 360) % 360;
else // back-facing camera
rotation = (m_nativeOrientation + deviceOrientation) % 360;
@@ -658,9 +658,9 @@ void QAndroidCameraSession::processCapturedImage(int id,
// if the picture is saved into the standard picture location, register it
// with the Android media scanner so it appears immediately in apps
// such as the gallery.
QString standardLoc = JMultimediaUtils::getDefaultMediaDirectory(JMultimediaUtils::DCIM);
QString standardLoc = AndroidMultimediaUtils::getDefaultMediaDirectory(AndroidMultimediaUtils::DCIM);
if (actualFileName.startsWith(standardLoc))
JMultimediaUtils::registerMediaFile(actualFileName);
AndroidMultimediaUtils::registerMediaFile(actualFileName);
emit imageSaved(id, actualFileName);
} else {
@@ -697,7 +697,7 @@ QImage QAndroidCameraSession::prepareImageFromPreviewData(const QByteArray &data
// Preview display of front-facing cameras is flipped horizontally, but the frame data
// we get here is not. Flip it ourselves if the camera is front-facing to match what the user
// sees on the viewfinder.
if (m_camera->getFacing() == JCamera::CameraFacingFront)
if (m_camera->getFacing() == AndroidCamera::CameraFacingFront)
transform.scale(-1, 1);
transform.rotate(rotation);

View File

@@ -51,7 +51,7 @@
QT_BEGIN_NAMESPACE
class JCamera;
class AndroidCamera;
class QAndroidVideoOutput;
class QAndroidMediaVideoProbeControl;
@@ -73,7 +73,7 @@ public:
static const QList<AndroidCameraInfo> &availableCameras();
void setSelectedCamera(int cameraId) { m_selectedCamera = cameraId; }
JCamera *camera() const { return m_camera; }
AndroidCamera *camera() const { return m_camera; }
QCamera::State state() const { return m_state; }
void setState(QCamera::State state);
@@ -154,7 +154,7 @@ private:
const QString &fileName);
int m_selectedCamera;
JCamera *m_camera;
AndroidCamera *m_camera;
int m_nativeOrientation;
QAndroidVideoOutput *m_videoOutput;

View File

@@ -42,7 +42,7 @@
#include "qandroidcamerazoomcontrol.h"
#include "qandroidcamerasession.h"
#include "jcamera.h"
#include "androidcamera.h"
#include "qandroidmultimediautils.h"
#include <qmath.h>

View File

@@ -41,11 +41,10 @@
#include "qandroidcapturesession.h"
#include "jcamera.h"
#include "androidcamera.h"
#include "qandroidcamerasession.h"
#include "jmultimediautils.h"
#include "androidmultimediautils.h"
#include "qandroidmultimediautils.h"
#include <QtCore/private/qjni_p.h>
QT_BEGIN_NAMESPACE
@@ -53,7 +52,7 @@ QAndroidCaptureSession::QAndroidCaptureSession(QAndroidCameraSession *cameraSess
: QObject()
, m_mediaRecorder(0)
, m_cameraSession(cameraSession)
, m_audioSource(JMediaRecorder::DefaultAudioSource)
, m_audioSource(AndroidMediaRecorder::DefaultAudioSource)
, m_duration(0)
, m_state(QMediaRecorder::StoppedState)
, m_status(QMediaRecorder::UnloadedStatus)
@@ -61,17 +60,17 @@ QAndroidCaptureSession::QAndroidCaptureSession(QAndroidCameraSession *cameraSess
, m_containerFormatDirty(true)
, m_videoSettingsDirty(true)
, m_audioSettingsDirty(true)
, m_outputFormat(JMediaRecorder::DefaultOutputFormat)
, m_audioEncoder(JMediaRecorder::DefaultAudioEncoder)
, m_videoEncoder(JMediaRecorder::DefaultVideoEncoder)
, m_outputFormat(AndroidMediaRecorder::DefaultOutputFormat)
, m_audioEncoder(AndroidMediaRecorder::DefaultAudioEncoder)
, m_videoEncoder(AndroidMediaRecorder::DefaultVideoEncoder)
{
m_mediaStorageLocation.addStorageLocation(
QMediaStorageLocation::Movies,
JMultimediaUtils::getDefaultMediaDirectory(JMultimediaUtils::DCIM));
AndroidMultimediaUtils::getDefaultMediaDirectory(AndroidMultimediaUtils::DCIM));
m_mediaStorageLocation.addStorageLocation(
QMediaStorageLocation::Sounds,
JMultimediaUtils::getDefaultMediaDirectory(JMultimediaUtils::Sounds));
AndroidMultimediaUtils::getDefaultMediaDirectory(AndroidMultimediaUtils::Sounds));
connect(this, SIGNAL(stateChanged(QMediaRecorder::State)), this, SLOT(updateStatus()));
@@ -103,19 +102,19 @@ void QAndroidCaptureSession::setAudioInput(const QString &input)
m_audioInput = input;
if (m_audioInput == QLatin1String("default"))
m_audioSource = JMediaRecorder::DefaultAudioSource;
m_audioSource = AndroidMediaRecorder::DefaultAudioSource;
else if (m_audioInput == QLatin1String("mic"))
m_audioSource = JMediaRecorder::Mic;
m_audioSource = AndroidMediaRecorder::Mic;
else if (m_audioInput == QLatin1String("voice_uplink"))
m_audioSource = JMediaRecorder::VoiceUplink;
m_audioSource = AndroidMediaRecorder::VoiceUplink;
else if (m_audioInput == QLatin1String("voice_downlink"))
m_audioSource = JMediaRecorder::VoiceDownlink;
m_audioSource = AndroidMediaRecorder::VoiceDownlink;
else if (m_audioInput == QLatin1String("voice_call"))
m_audioSource = JMediaRecorder::VoiceCall;
m_audioSource = AndroidMediaRecorder::VoiceCall;
else if (m_audioInput == QLatin1String("voice_recognition"))
m_audioSource = JMediaRecorder::VoiceRecognition;
m_audioSource = AndroidMediaRecorder::VoiceRecognition;
else
m_audioSource = JMediaRecorder::DefaultAudioSource;
m_audioSource = AndroidMediaRecorder::DefaultAudioSource;
emit audioInputChanged(m_audioInput);
}
@@ -184,7 +183,7 @@ bool QAndroidCaptureSession::start()
m_mediaRecorder->release();
delete m_mediaRecorder;
}
m_mediaRecorder = new JMediaRecorder;
m_mediaRecorder = new AndroidMediaRecorder;
connect(m_mediaRecorder, SIGNAL(error(int,int)), this, SLOT(onError(int,int)));
connect(m_mediaRecorder, SIGNAL(info(int,int)), this, SLOT(onInfo(int,int)));
@@ -193,8 +192,8 @@ bool QAndroidCaptureSession::start()
updateViewfinder();
m_cameraSession->camera()->unlock();
m_mediaRecorder->setCamera(m_cameraSession->camera());
m_mediaRecorder->setAudioSource(JMediaRecorder::Camcorder);
m_mediaRecorder->setVideoSource(JMediaRecorder::Camera);
m_mediaRecorder->setAudioSource(AndroidMediaRecorder::Camcorder);
m_mediaRecorder->setVideoSource(AndroidMediaRecorder::Camera);
} else {
m_mediaRecorder->setAudioSource(m_audioSource);
}
@@ -280,10 +279,10 @@ void QAndroidCaptureSession::stop(bool error)
// with the Android media scanner so it appears immediately in apps
// such as the gallery.
QString mediaPath = m_actualOutputLocation.toLocalFile();
QString standardLoc = m_cameraSession ? JMultimediaUtils::getDefaultMediaDirectory(JMultimediaUtils::DCIM)
: JMultimediaUtils::getDefaultMediaDirectory(JMultimediaUtils::Sounds);
QString standardLoc = m_cameraSession ? AndroidMultimediaUtils::getDefaultMediaDirectory(AndroidMultimediaUtils::DCIM)
: AndroidMultimediaUtils::getDefaultMediaDirectory(AndroidMultimediaUtils::Sounds);
if (mediaPath.startsWith(standardLoc))
JMultimediaUtils::registerMediaFile(mediaPath);
AndroidMultimediaUtils::registerMediaFile(mediaPath);
m_actualOutputLocation = m_usedOutputLocation;
emit actualLocationChanged(m_actualOutputLocation);
@@ -347,14 +346,14 @@ void QAndroidCaptureSession::applySettings()
m_containerFormat = m_defaultSettings.outputFileExtension;
m_outputFormat = m_defaultSettings.outputFormat;
} else if (m_containerFormat == QLatin1String("3gp")) {
m_outputFormat = JMediaRecorder::THREE_GPP;
m_outputFormat = AndroidMediaRecorder::THREE_GPP;
} else if (!m_cameraSession && m_containerFormat == QLatin1String("amr")) {
m_outputFormat = JMediaRecorder::AMR_NB_Format;
m_outputFormat = AndroidMediaRecorder::AMR_NB_Format;
} else if (!m_cameraSession && m_containerFormat == QLatin1String("awb")) {
m_outputFormat = JMediaRecorder::AMR_WB_Format;
m_outputFormat = AndroidMediaRecorder::AMR_WB_Format;
} else {
m_containerFormat = QStringLiteral("mp4");
m_outputFormat = JMediaRecorder::MPEG_4;
m_outputFormat = AndroidMediaRecorder::MPEG_4;
}
m_containerFormatDirty = false;
@@ -372,11 +371,11 @@ void QAndroidCaptureSession::applySettings()
if (m_audioSettings.codec().isEmpty())
m_audioEncoder = m_defaultSettings.audioEncoder;
else if (m_audioSettings.codec() == QLatin1String("aac"))
m_audioEncoder = JMediaRecorder::AAC;
m_audioEncoder = AndroidMediaRecorder::AAC;
else if (m_audioSettings.codec() == QLatin1String("amr-nb"))
m_audioEncoder = JMediaRecorder::AMR_NB_Encoder;
m_audioEncoder = AndroidMediaRecorder::AMR_NB_Encoder;
else if (m_audioSettings.codec() == QLatin1String("amr-wb"))
m_audioEncoder = JMediaRecorder::AMR_WB_Encoder;
m_audioEncoder = AndroidMediaRecorder::AMR_WB_Encoder;
else
m_audioEncoder = m_defaultSettings.audioEncoder;
@@ -410,11 +409,11 @@ void QAndroidCaptureSession::applySettings()
if (m_videoSettings.codec().isEmpty())
m_videoEncoder = m_defaultSettings.videoEncoder;
else if (m_videoSettings.codec() == QLatin1String("h263"))
m_videoEncoder = JMediaRecorder::H263;
m_videoEncoder = AndroidMediaRecorder::H263;
else if (m_videoSettings.codec() == QLatin1String("h264"))
m_videoEncoder = JMediaRecorder::H264;
m_videoEncoder = AndroidMediaRecorder::H264;
else if (m_videoSettings.codec() == QLatin1String("mpeg4_sp"))
m_videoEncoder = JMediaRecorder::MPEG_4_SP;
m_videoEncoder = AndroidMediaRecorder::MPEG_4_SP;
else
m_videoEncoder = m_defaultSettings.videoEncoder;
@@ -489,24 +488,24 @@ QAndroidCaptureSession::CaptureProfile QAndroidCaptureSession::getProfile(int id
id);
profile.outputFormat = JMediaRecorder::OutputFormat(obj.getField<jint>("fileFormat"));
profile.audioEncoder = JMediaRecorder::AudioEncoder(obj.getField<jint>("audioCodec"));
profile.outputFormat = AndroidMediaRecorder::OutputFormat(obj.getField<jint>("fileFormat"));
profile.audioEncoder = AndroidMediaRecorder::AudioEncoder(obj.getField<jint>("audioCodec"));
profile.audioBitRate = obj.getField<jint>("audioBitRate");
profile.audioChannels = obj.getField<jint>("audioChannels");
profile.audioSampleRate = obj.getField<jint>("audioSampleRate");
profile.videoEncoder = JMediaRecorder::VideoEncoder(obj.getField<jint>("videoCodec"));
profile.videoEncoder = AndroidMediaRecorder::VideoEncoder(obj.getField<jint>("videoCodec"));
profile.videoBitRate = obj.getField<jint>("videoBitRate");
profile.videoFrameRate = obj.getField<jint>("videoFrameRate");
profile.videoResolution = QSize(obj.getField<jint>("videoFrameWidth"),
obj.getField<jint>("videoFrameHeight"));
if (profile.outputFormat == JMediaRecorder::MPEG_4)
if (profile.outputFormat == AndroidMediaRecorder::MPEG_4)
profile.outputFileExtension = QStringLiteral("mp4");
else if (profile.outputFormat == JMediaRecorder::THREE_GPP)
else if (profile.outputFormat == AndroidMediaRecorder::THREE_GPP)
profile.outputFileExtension = QStringLiteral("3gp");
else if (profile.outputFormat == JMediaRecorder::AMR_NB_Format)
else if (profile.outputFormat == AndroidMediaRecorder::AMR_NB_Format)
profile.outputFileExtension = QStringLiteral("amr");
else if (profile.outputFormat == JMediaRecorder::AMR_WB_Format)
else if (profile.outputFormat == AndroidMediaRecorder::AMR_WB_Format)
profile.outputFileExtension = QStringLiteral("awb");
profile.isNull = false;

View File

@@ -48,7 +48,7 @@
#include <qelapsedtimer.h>
#include <qtimer.h>
#include <private/qmediastoragelocation_p.h>
#include "jmediarecorder.h"
#include "androidmediarecorder.h"
QT_BEGIN_NAMESPACE
@@ -106,15 +106,15 @@ private Q_SLOTS:
private:
struct CaptureProfile {
JMediaRecorder::OutputFormat outputFormat;
AndroidMediaRecorder::OutputFormat outputFormat;
QString outputFileExtension;
JMediaRecorder::AudioEncoder audioEncoder;
AndroidMediaRecorder::AudioEncoder audioEncoder;
int audioBitRate;
int audioChannels;
int audioSampleRate;
JMediaRecorder::VideoEncoder videoEncoder;
AndroidMediaRecorder::VideoEncoder videoEncoder;
int videoBitRate;
int videoFrameRate;
QSize videoResolution;
@@ -122,13 +122,13 @@ private:
bool isNull;
CaptureProfile()
: outputFormat(JMediaRecorder::MPEG_4)
: outputFormat(AndroidMediaRecorder::MPEG_4)
, outputFileExtension(QLatin1String("mp4"))
, audioEncoder(JMediaRecorder::DefaultAudioEncoder)
, audioEncoder(AndroidMediaRecorder::DefaultAudioEncoder)
, audioBitRate(128000)
, audioChannels(2)
, audioSampleRate(44100)
, videoEncoder(JMediaRecorder::DefaultVideoEncoder)
, videoEncoder(AndroidMediaRecorder::DefaultVideoEncoder)
, videoBitRate(1)
, videoFrameRate(-1)
, videoResolution(320, 240)
@@ -146,11 +146,11 @@ private:
void updateViewfinder();
void restartViewfinder();
JMediaRecorder *m_mediaRecorder;
AndroidMediaRecorder *m_mediaRecorder;
QAndroidCameraSession *m_cameraSession;
QString m_audioInput;
JMediaRecorder::AudioSource m_audioSource;
AndroidMediaRecorder::AudioSource m_audioSource;
QMediaStorageLocation m_mediaStorageLocation;
@@ -173,9 +173,9 @@ private:
bool m_containerFormatDirty;
bool m_videoSettingsDirty;
bool m_audioSettingsDirty;
JMediaRecorder::OutputFormat m_outputFormat;
JMediaRecorder::AudioEncoder m_audioEncoder;
JMediaRecorder::VideoEncoder m_videoEncoder;
AndroidMediaRecorder::OutputFormat m_outputFormat;
AndroidMediaRecorder::AudioEncoder m_audioEncoder;
AndroidMediaRecorder::VideoEncoder m_videoEncoder;
QList<QSize> m_supportedResolutions;
QList<qreal> m_supportedFramerates;

View File

@@ -42,7 +42,7 @@
#include "qandroidimageencodercontrol.h"
#include "qandroidcamerasession.h"
#include "jcamera.h"
#include "androidcamera.h"
QT_BEGIN_NAMESPACE

View File

@@ -42,7 +42,7 @@
#include "qandroidvideodeviceselectorcontrol.h"
#include "qandroidcamerasession.h"
#include "jcamera.h"
#include "androidcamera.h"
QT_BEGIN_NAMESPACE

View File

@@ -40,14 +40,14 @@
****************************************************************************/
#include "qandroidmediaplayercontrol.h"
#include "jmediaplayer.h"
#include "androidmediaplayer.h"
#include "qandroidvideooutput.h"
QT_BEGIN_NAMESPACE
QAndroidMediaPlayerControl::QAndroidMediaPlayerControl(QObject *parent)
: QMediaPlayerControl(parent),
mMediaPlayer(new JMediaPlayer),
mMediaPlayer(new AndroidMediaPlayer),
mCurrentState(QMediaPlayer::StoppedState),
mCurrentMediaStatus(QMediaPlayer::NoMedia),
mMediaStream(0),
@@ -58,7 +58,7 @@ QAndroidMediaPlayerControl::QAndroidMediaPlayerControl(QObject *parent)
mAudioAvailable(false),
mVideoAvailable(false),
mBuffering(false),
mState(JMediaPlayer::Uninitialized),
mState(AndroidMediaPlayer::Uninitialized),
mPendingState(-1),
mPendingPosition(-1),
mPendingSetMedia(false),
@@ -99,11 +99,11 @@ QMediaPlayer::MediaStatus QAndroidMediaPlayerControl::mediaStatus() const
qint64 QAndroidMediaPlayerControl::duration() const
{
if ((mState & (JMediaPlayer::Prepared
| JMediaPlayer::Started
| JMediaPlayer::Paused
| JMediaPlayer::Stopped
| JMediaPlayer::PlaybackCompleted)) == 0) {
if ((mState & (AndroidMediaPlayer::Prepared
| AndroidMediaPlayer::Started
| AndroidMediaPlayer::Paused
| AndroidMediaPlayer::Stopped
| AndroidMediaPlayer::PlaybackCompleted)) == 0) {
return 0;
}
@@ -115,13 +115,13 @@ qint64 QAndroidMediaPlayerControl::position() const
if (mCurrentMediaStatus == QMediaPlayer::EndOfMedia)
return duration();
if ((mState & (JMediaPlayer::Idle
| JMediaPlayer::Initialized
| JMediaPlayer::Prepared
| JMediaPlayer::Started
| JMediaPlayer::Paused
| JMediaPlayer::Stopped
| JMediaPlayer::PlaybackCompleted)) == 0) {
if ((mState & (AndroidMediaPlayer::Idle
| AndroidMediaPlayer::Initialized
| AndroidMediaPlayer::Prepared
| AndroidMediaPlayer::Started
| AndroidMediaPlayer::Paused
| AndroidMediaPlayer::Stopped
| AndroidMediaPlayer::PlaybackCompleted)) == 0) {
return (mPendingPosition == -1) ? 0 : mPendingPosition;
}
@@ -135,10 +135,10 @@ void QAndroidMediaPlayerControl::setPosition(qint64 position)
const int seekPosition = (position > INT_MAX) ? INT_MAX : position;
if ((mState & (JMediaPlayer::Prepared
| JMediaPlayer::Started
| JMediaPlayer::Paused
| JMediaPlayer::PlaybackCompleted)) == 0) {
if ((mState & (AndroidMediaPlayer::Prepared
| AndroidMediaPlayer::Started
| AndroidMediaPlayer::Paused
| AndroidMediaPlayer::PlaybackCompleted)) == 0) {
if (mPendingPosition != seekPosition) {
mPendingPosition = seekPosition;
Q_EMIT positionChanged(seekPosition);
@@ -165,13 +165,13 @@ int QAndroidMediaPlayerControl::volume() const
void QAndroidMediaPlayerControl::setVolume(int volume)
{
if ((mState & (JMediaPlayer::Idle
| JMediaPlayer::Initialized
| JMediaPlayer::Stopped
| JMediaPlayer::Prepared
| JMediaPlayer::Started
| JMediaPlayer::Paused
| JMediaPlayer::PlaybackCompleted)) == 0) {
if ((mState & (AndroidMediaPlayer::Idle
| AndroidMediaPlayer::Initialized
| AndroidMediaPlayer::Stopped
| AndroidMediaPlayer::Prepared
| AndroidMediaPlayer::Started
| AndroidMediaPlayer::Paused
| AndroidMediaPlayer::PlaybackCompleted)) == 0) {
if (mPendingVolume != volume) {
mPendingVolume = volume;
Q_EMIT volumeChanged(volume);
@@ -196,13 +196,13 @@ bool QAndroidMediaPlayerControl::isMuted() const
void QAndroidMediaPlayerControl::setMuted(bool muted)
{
if ((mState & (JMediaPlayer::Idle
| JMediaPlayer::Initialized
| JMediaPlayer::Stopped
| JMediaPlayer::Prepared
| JMediaPlayer::Started
| JMediaPlayer::Paused
| JMediaPlayer::PlaybackCompleted)) == 0) {
if ((mState & (AndroidMediaPlayer::Idle
| AndroidMediaPlayer::Initialized
| AndroidMediaPlayer::Stopped
| AndroidMediaPlayer::Prepared
| AndroidMediaPlayer::Started
| AndroidMediaPlayer::Paused
| AndroidMediaPlayer::PlaybackCompleted)) == 0) {
if (mPendingMute != muted) {
mPendingMute = muted;
Q_EMIT mutedChanged(muted);
@@ -291,7 +291,7 @@ void QAndroidMediaPlayerControl::setMedia(const QMediaContent &mediaContent,
}
// Release the mediaplayer if it's not in in Idle or Uninitialized state
if ((mState & (JMediaPlayer::Idle | JMediaPlayer::Uninitialized)) == 0)
if ((mState & (AndroidMediaPlayer::Idle | AndroidMediaPlayer::Uninitialized)) == 0)
mMediaPlayer->release();
if (mediaContent.isNull()) {
@@ -353,16 +353,16 @@ void QAndroidMediaPlayerControl::setVideoOutput(QObject *videoOutput)
void QAndroidMediaPlayerControl::play()
{
// We need to prepare the mediaplayer again.
if ((mState & JMediaPlayer::Stopped) && !mMediaContent.isNull()) {
if ((mState & AndroidMediaPlayer::Stopped) && !mMediaContent.isNull()) {
setMedia(mMediaContent, mMediaStream);
}
setState(QMediaPlayer::PlayingState);
if ((mState & (JMediaPlayer::Prepared
| JMediaPlayer::Started
| JMediaPlayer::Paused
| JMediaPlayer::PlaybackCompleted)) == 0) {
if ((mState & (AndroidMediaPlayer::Prepared
| AndroidMediaPlayer::Started
| AndroidMediaPlayer::Paused
| AndroidMediaPlayer::PlaybackCompleted)) == 0) {
mPendingState = QMediaPlayer::PlayingState;
return;
}
@@ -374,9 +374,9 @@ void QAndroidMediaPlayerControl::pause()
{
setState(QMediaPlayer::PausedState);
if ((mState & (JMediaPlayer::Started
| JMediaPlayer::Paused
| JMediaPlayer::PlaybackCompleted)) == 0) {
if ((mState & (AndroidMediaPlayer::Started
| AndroidMediaPlayer::Paused
| AndroidMediaPlayer::PlaybackCompleted)) == 0) {
mPendingState = QMediaPlayer::PausedState;
return;
}
@@ -388,12 +388,12 @@ void QAndroidMediaPlayerControl::stop()
{
setState(QMediaPlayer::StoppedState);
if ((mState & (JMediaPlayer::Prepared
| JMediaPlayer::Started
| JMediaPlayer::Stopped
| JMediaPlayer::Paused
| JMediaPlayer::PlaybackCompleted)) == 0) {
if ((mState & (JMediaPlayer::Idle | JMediaPlayer::Uninitialized | JMediaPlayer::Error)) == 0)
if ((mState & (AndroidMediaPlayer::Prepared
| AndroidMediaPlayer::Started
| AndroidMediaPlayer::Stopped
| AndroidMediaPlayer::Paused
| AndroidMediaPlayer::PlaybackCompleted)) == 0) {
if ((mState & (AndroidMediaPlayer::Idle | AndroidMediaPlayer::Uninitialized | AndroidMediaPlayer::Error)) == 0)
mPendingState = QMediaPlayer::StoppedState;
return;
}
@@ -405,28 +405,28 @@ void QAndroidMediaPlayerControl::onInfo(qint32 what, qint32 extra)
{
Q_UNUSED(extra);
switch (what) {
case JMediaPlayer::MEDIA_INFO_UNKNOWN:
case AndroidMediaPlayer::MEDIA_INFO_UNKNOWN:
break;
case JMediaPlayer::MEDIA_INFO_VIDEO_TRACK_LAGGING:
case AndroidMediaPlayer::MEDIA_INFO_VIDEO_TRACK_LAGGING:
// IGNORE
break;
case JMediaPlayer::MEDIA_INFO_VIDEO_RENDERING_START:
case AndroidMediaPlayer::MEDIA_INFO_VIDEO_RENDERING_START:
break;
case JMediaPlayer::MEDIA_INFO_BUFFERING_START:
case AndroidMediaPlayer::MEDIA_INFO_BUFFERING_START:
mPendingState = mCurrentState;
setState(QMediaPlayer::PausedState);
setMediaStatus(QMediaPlayer::StalledMedia);
break;
case JMediaPlayer::MEDIA_INFO_BUFFERING_END:
case AndroidMediaPlayer::MEDIA_INFO_BUFFERING_END:
if (mCurrentState != QMediaPlayer::StoppedState)
flushPendingStates();
break;
case JMediaPlayer::MEDIA_INFO_BAD_INTERLEAVING:
case AndroidMediaPlayer::MEDIA_INFO_BAD_INTERLEAVING:
break;
case JMediaPlayer::MEDIA_INFO_NOT_SEEKABLE:
case AndroidMediaPlayer::MEDIA_INFO_NOT_SEEKABLE:
setSeekable(false);
break;
case JMediaPlayer::MEDIA_INFO_METADATA_UPDATE:
case AndroidMediaPlayer::MEDIA_INFO_METADATA_UPDATE:
Q_EMIT metaDataUpdated();
break;
}
@@ -438,44 +438,44 @@ void QAndroidMediaPlayerControl::onError(qint32 what, qint32 extra)
QMediaPlayer::Error error = QMediaPlayer::ResourceError;
switch (what) {
case JMediaPlayer::MEDIA_ERROR_UNKNOWN:
case AndroidMediaPlayer::MEDIA_ERROR_UNKNOWN:
errorString = QLatin1String("Error:");
break;
case JMediaPlayer::MEDIA_ERROR_SERVER_DIED:
case AndroidMediaPlayer::MEDIA_ERROR_SERVER_DIED:
errorString = QLatin1String("Error: Server died");
error = QMediaPlayer::ServiceMissingError;
break;
case JMediaPlayer::MEDIA_ERROR_INVALID_STATE:
case AndroidMediaPlayer::MEDIA_ERROR_INVALID_STATE:
errorString = QLatin1String("Error: Invalid state");
error = QMediaPlayer::ServiceMissingError;
break;
}
switch (extra) {
case JMediaPlayer::MEDIA_ERROR_IO: // Network OR file error
case AndroidMediaPlayer::MEDIA_ERROR_IO: // Network OR file error
errorString += QLatin1String(" (I/O operation failed)");
error = QMediaPlayer::NetworkError;
setMediaStatus(QMediaPlayer::InvalidMedia);
break;
case JMediaPlayer::MEDIA_ERROR_MALFORMED:
case AndroidMediaPlayer::MEDIA_ERROR_MALFORMED:
errorString += QLatin1String(" (Malformed bitstream)");
error = QMediaPlayer::FormatError;
setMediaStatus(QMediaPlayer::InvalidMedia);
break;
case JMediaPlayer::MEDIA_ERROR_UNSUPPORTED:
case AndroidMediaPlayer::MEDIA_ERROR_UNSUPPORTED:
errorString += QLatin1String(" (Unsupported media)");
error = QMediaPlayer::FormatError;
setMediaStatus(QMediaPlayer::InvalidMedia);
break;
case JMediaPlayer::MEDIA_ERROR_TIMED_OUT:
case AndroidMediaPlayer::MEDIA_ERROR_TIMED_OUT:
errorString += QLatin1String(" (Timed out)");
break;
case JMediaPlayer::MEDIA_ERROR_NOT_VALID_FOR_PROGRESSIVE_PLAYBACK:
case AndroidMediaPlayer::MEDIA_ERROR_NOT_VALID_FOR_PROGRESSIVE_PLAYBACK:
errorString += QLatin1String(" (Unable to start progressive playback')");
error = QMediaPlayer::FormatError;
setMediaStatus(QMediaPlayer::InvalidMedia);
break;
case JMediaPlayer::MEDIA_ERROR_BAD_THINGS_ARE_GOING_TO_HAPPEN:
case AndroidMediaPlayer::MEDIA_ERROR_BAD_THINGS_ARE_GOING_TO_HAPPEN:
errorString += QLatin1String(" (Unknown error/Insufficient resources)");
error = QMediaPlayer::ServiceMissingError;
break;
@@ -512,21 +512,21 @@ void QAndroidMediaPlayerControl::onVideoSizeChanged(qint32 width, qint32 height)
void QAndroidMediaPlayerControl::onStateChanged(qint32 state)
{
// If reloading, don't report state changes unless the new state is Prepared or Error.
if ((mState & JMediaPlayer::Stopped)
&& (state & (JMediaPlayer::Prepared | JMediaPlayer::Error | JMediaPlayer::Uninitialized)) == 0) {
if ((mState & AndroidMediaPlayer::Stopped)
&& (state & (AndroidMediaPlayer::Prepared | AndroidMediaPlayer::Error | AndroidMediaPlayer::Uninitialized)) == 0) {
return;
}
mState = state;
switch (mState) {
case JMediaPlayer::Idle:
case AndroidMediaPlayer::Idle:
break;
case JMediaPlayer::Initialized:
case AndroidMediaPlayer::Initialized:
break;
case JMediaPlayer::Preparing:
case AndroidMediaPlayer::Preparing:
setMediaStatus(QMediaPlayer::LoadingMedia);
break;
case JMediaPlayer::Prepared:
case AndroidMediaPlayer::Prepared:
setMediaStatus(QMediaPlayer::LoadedMedia);
if (mBuffering) {
setMediaStatus(mBufferPercent == 100 ? QMediaPlayer::BufferedMedia
@@ -537,7 +537,7 @@ void QAndroidMediaPlayerControl::onStateChanged(qint32 state)
setAudioAvailable(true);
flushPendingStates();
break;
case JMediaPlayer::Started:
case AndroidMediaPlayer::Started:
setState(QMediaPlayer::PlayingState);
if (mBuffering) {
setMediaStatus(mBufferPercent == 100 ? QMediaPlayer::BufferedMedia
@@ -546,25 +546,25 @@ void QAndroidMediaPlayerControl::onStateChanged(qint32 state)
setMediaStatus(QMediaPlayer::BufferedMedia);
}
break;
case JMediaPlayer::Paused:
case AndroidMediaPlayer::Paused:
setState(QMediaPlayer::PausedState);
break;
case JMediaPlayer::Error:
case AndroidMediaPlayer::Error:
setState(QMediaPlayer::StoppedState);
setMediaStatus(QMediaPlayer::UnknownMediaStatus);
mMediaPlayer->release();
break;
case JMediaPlayer::Stopped:
case AndroidMediaPlayer::Stopped:
setState(QMediaPlayer::StoppedState);
setMediaStatus(QMediaPlayer::LoadedMedia);
setPosition(0);
break;
case JMediaPlayer::PlaybackCompleted:
case AndroidMediaPlayer::PlaybackCompleted:
setState(QMediaPlayer::StoppedState);
setPosition(0);
setMediaStatus(QMediaPlayer::EndOfMedia);
break;
case JMediaPlayer::Uninitialized:
case AndroidMediaPlayer::Uninitialized:
// reset some properties
resetBufferingProgress();
mPendingPosition = -1;
@@ -579,7 +579,7 @@ void QAndroidMediaPlayerControl::onStateChanged(qint32 state)
break;
}
if ((mState & (JMediaPlayer::Stopped | JMediaPlayer::Uninitialized)) != 0) {
if ((mState & (AndroidMediaPlayer::Stopped | AndroidMediaPlayer::Uninitialized)) != 0) {
mMediaPlayer->setDisplay(0);
if (mVideoOutput) {
mVideoOutput->stop();

View File

@@ -49,7 +49,7 @@
QT_BEGIN_NAMESPACE
class JMediaPlayer;
class AndroidMediaPlayer;
class QAndroidVideoOutput;
class QAndroidMediaPlayerControl : public QMediaPlayerControl
@@ -98,7 +98,7 @@ private Q_SLOTS:
void onStateChanged(qint32 state);
private:
JMediaPlayer *mMediaPlayer;
AndroidMediaPlayer *mMediaPlayer;
QMediaPlayer::State mCurrentState;
QMediaPlayer::MediaStatus mCurrentMediaStatus;
QMediaContent mMediaContent;

View File

@@ -41,7 +41,7 @@
#include "qandroidmetadatareadercontrol.h"
#include "jmediametadataretriever.h"
#include "androidmediametadataretriever.h"
#include <QtMultimedia/qmediametadata.h>
#include <qsize.h>
#include <QDate>
@@ -74,7 +74,7 @@ static const char* qt_ID3GenreNames[] =
QAndroidMetaDataReaderControl::QAndroidMetaDataReaderControl(QObject *parent)
: QMetaDataReaderControl(parent)
, m_available(false)
, m_retriever(new JMediaMetadataRetriever)
, m_retriever(new AndroidMediaMetadataRetriever)
{
}
@@ -124,56 +124,56 @@ void QAndroidMetaDataReaderControl::updateData()
if (!m_mediaContent.isNull()) {
if (m_retriever->setDataSource(m_mediaContent.canonicalUrl())) {
QString mimeType = m_retriever->extractMetadata(JMediaMetadataRetriever::MimeType);
QString mimeType = m_retriever->extractMetadata(AndroidMediaMetadataRetriever::MimeType);
if (!mimeType.isNull())
m_metadata.insert(QMediaMetaData::MediaType, mimeType);
bool isVideo = !m_retriever->extractMetadata(JMediaMetadataRetriever::HasVideo).isNull()
bool isVideo = !m_retriever->extractMetadata(AndroidMediaMetadataRetriever::HasVideo).isNull()
|| mimeType.startsWith(QStringLiteral("video"));
QString string = m_retriever->extractMetadata(JMediaMetadataRetriever::Album);
QString string = m_retriever->extractMetadata(AndroidMediaMetadataRetriever::Album);
if (!string.isNull())
m_metadata.insert(QMediaMetaData::AlbumTitle, string);
string = m_retriever->extractMetadata(JMediaMetadataRetriever::AlbumArtist);
string = m_retriever->extractMetadata(AndroidMediaMetadataRetriever::AlbumArtist);
if (!string.isNull())
m_metadata.insert(QMediaMetaData::AlbumArtist, string);
string = m_retriever->extractMetadata(JMediaMetadataRetriever::Artist);
string = m_retriever->extractMetadata(AndroidMediaMetadataRetriever::Artist);
if (!string.isNull()) {
m_metadata.insert(isVideo ? QMediaMetaData::LeadPerformer
: QMediaMetaData::ContributingArtist,
string.split('/', QString::SkipEmptyParts));
}
string = m_retriever->extractMetadata(JMediaMetadataRetriever::Author);
string = m_retriever->extractMetadata(AndroidMediaMetadataRetriever::Author);
if (!string.isNull())
m_metadata.insert(QMediaMetaData::Author, string.split('/', QString::SkipEmptyParts));
string = m_retriever->extractMetadata(JMediaMetadataRetriever::Bitrate);
string = m_retriever->extractMetadata(AndroidMediaMetadataRetriever::Bitrate);
if (!string.isNull()) {
m_metadata.insert(isVideo ? QMediaMetaData::VideoBitRate
: QMediaMetaData::AudioBitRate,
string.toInt());
}
string = m_retriever->extractMetadata(JMediaMetadataRetriever::CDTrackNumber);
string = m_retriever->extractMetadata(AndroidMediaMetadataRetriever::CDTrackNumber);
if (!string.isNull())
m_metadata.insert(QMediaMetaData::TrackNumber, string.toInt());
string = m_retriever->extractMetadata(JMediaMetadataRetriever::Composer);
string = m_retriever->extractMetadata(AndroidMediaMetadataRetriever::Composer);
if (!string.isNull())
m_metadata.insert(QMediaMetaData::Composer, string.split('/', QString::SkipEmptyParts));
string = m_retriever->extractMetadata(JMediaMetadataRetriever::Date);
string = m_retriever->extractMetadata(AndroidMediaMetadataRetriever::Date);
if (!string.isNull())
m_metadata.insert(QMediaMetaData::Date, QDateTime::fromString(string, QStringLiteral("yyyyMMddTHHmmss.zzzZ")).date());
string = m_retriever->extractMetadata(JMediaMetadataRetriever::Duration);
string = m_retriever->extractMetadata(AndroidMediaMetadataRetriever::Duration);
if (!string.isNull())
m_metadata.insert(QMediaMetaData::Duration, string.toLongLong());
string = m_retriever->extractMetadata(JMediaMetadataRetriever::Genre);
string = m_retriever->extractMetadata(AndroidMediaMetadataRetriever::Genre);
if (!string.isNull()) {
// The genre can be returned as an ID3v2 id, get the name for it in that case
if (string.startsWith('(') && string.endsWith(')')) {
@@ -185,22 +185,22 @@ void QAndroidMetaDataReaderControl::updateData()
m_metadata.insert(QMediaMetaData::Genre, string);
}
string = m_retriever->extractMetadata(JMediaMetadataRetriever::Title);
string = m_retriever->extractMetadata(AndroidMediaMetadataRetriever::Title);
if (!string.isNull())
m_metadata.insert(QMediaMetaData::Title, string);
string = m_retriever->extractMetadata(JMediaMetadataRetriever::VideoHeight);
string = m_retriever->extractMetadata(AndroidMediaMetadataRetriever::VideoHeight);
if (!string.isNull()) {
int height = string.toInt();
int width = m_retriever->extractMetadata(JMediaMetadataRetriever::VideoWidth).toInt();
int width = m_retriever->extractMetadata(AndroidMediaMetadataRetriever::VideoWidth).toInt();
m_metadata.insert(QMediaMetaData::Resolution, QSize(width, height));
}
string = m_retriever->extractMetadata(JMediaMetadataRetriever::Writer);
string = m_retriever->extractMetadata(AndroidMediaMetadataRetriever::Writer);
if (!string.isNull())
m_metadata.insert(QMediaMetaData::Writer, string.split('/', QString::SkipEmptyParts));
string = m_retriever->extractMetadata(JMediaMetadataRetriever::Year);
string = m_retriever->extractMetadata(AndroidMediaMetadataRetriever::Year);
if (!string.isNull())
m_metadata.insert(QMediaMetaData::Year, string.toInt());
}

View File

@@ -47,7 +47,7 @@
QT_BEGIN_NAMESPACE
class JMediaMetadataRetriever;
class AndroidMediaMetadataRetriever;
class QAndroidMetaDataReaderControl : public QMetaDataReaderControl
{
@@ -72,7 +72,7 @@ private:
bool m_available;
QVariantMap m_metadata;
JMediaMetadataRetriever *m_retriever;
AndroidMediaMetadataRetriever *m_retriever;
};
QT_END_NAMESPACE

View File

@@ -46,11 +46,11 @@
#include "qandroidaudioinputselectorcontrol.h"
#include "qandroidcamerainfocontrol.h"
#include "qandroidcamerasession.h"
#include "jmediaplayer.h"
#include "jsurfacetexture.h"
#include "jcamera.h"
#include "jmultimediautils.h"
#include "jmediarecorder.h"
#include "androidmediaplayer.h"
#include "androidsurfacetexture.h"
#include "androidcamera.h"
#include "androidmultimediautils.h"
#include "androidmediarecorder.h"
#include <qdebug.h>
QT_BEGIN_NAMESPACE
@@ -149,6 +149,7 @@ int QAndroidMediaServicePlugin::cameraOrientation(const QByteArray &device) cons
QT_END_NAMESPACE
#ifndef Q_OS_ANDROID_NO_SDK
Q_DECL_EXPORT jint JNICALL JNI_OnLoad(JavaVM *vm, void * /*reserved*/)
{
QT_USE_NAMESPACE
@@ -165,13 +166,14 @@ Q_DECL_EXPORT jint JNICALL JNI_OnLoad(JavaVM *vm, void * /*reserved*/)
JNIEnv *jniEnv = uenv.nativeEnvironment;
if (!JMediaPlayer::initJNI(jniEnv) ||
!JCamera::initJNI(jniEnv) ||
!JMediaRecorder::initJNI(jniEnv)) {
if (!AndroidMediaPlayer::initJNI(jniEnv) ||
!AndroidCamera::initJNI(jniEnv) ||
!AndroidMediaRecorder::initJNI(jniEnv)) {
return JNI_ERR;
}
JSurfaceTexture::initJNI(jniEnv);
AndroidSurfaceTexture::initJNI(jniEnv);
return JNI_VERSION_1_4;
}
#endif // Q_OS_ANDROID_NO_SDK

View File

@@ -11,7 +11,7 @@ HEADERS += \
SOURCES += \
qandroidmediaserviceplugin.cpp
include (wrappers/wrappers.pri)
include (wrappers/jni/jni.pri)
include (common/common.pri)
include (mediaplayer/mediaplayer.pri)
include (mediacapture/mediacapture.pri)

View File

@@ -39,8 +39,8 @@
**
****************************************************************************/
#ifndef JCAMERA_H
#define JCAMERA_H
#ifndef ANDROIDCAMERA_H
#define ANDROIDCAMERA_H
#include <qobject.h>
#include <QtCore/private/qjni_p.h>
@@ -51,9 +51,9 @@ QT_BEGIN_NAMESPACE
class QThread;
class JCameraPrivate;
class AndroidCameraPrivate;
class JCamera : public QObject
class AndroidCamera : public QObject
{
Q_OBJECT
Q_ENUMS(CameraFacing)
@@ -74,9 +74,9 @@ public:
YV12 = 842094169
};
~JCamera();
~AndroidCamera();
static JCamera *open(int cameraId);
static AndroidCamera *open(int cameraId);
int cameraId() const;
@@ -175,13 +175,13 @@ Q_SIGNALS:
void frameFetched(const QByteArray &frame);
private:
JCamera(JCameraPrivate *d, QThread *worker);
AndroidCamera(AndroidCameraPrivate *d, QThread *worker);
Q_DECLARE_PRIVATE(JCamera)
JCameraPrivate *d_ptr;
Q_DECLARE_PRIVATE(AndroidCamera)
AndroidCameraPrivate *d_ptr;
QScopedPointer<QThread> m_worker;
};
QT_END_NAMESPACE
#endif // JCAMERA_H
#endif // ANDROIDCAMERA_H

View File

@@ -39,23 +39,23 @@
**
****************************************************************************/
#include "jmediametadataretriever.h"
#include "androidmediametadataretriever.h"
#include <QtCore/private/qjnihelpers_p.h>
#include <QtCore/private/qjni_p.h>
QT_BEGIN_NAMESPACE
JMediaMetadataRetriever::JMediaMetadataRetriever()
AndroidMediaMetadataRetriever::AndroidMediaMetadataRetriever()
{
m_metadataRetriever = QJNIObjectPrivate("android/media/MediaMetadataRetriever");
}
JMediaMetadataRetriever::~JMediaMetadataRetriever()
AndroidMediaMetadataRetriever::~AndroidMediaMetadataRetriever()
{
}
QString JMediaMetadataRetriever::extractMetadata(MetadataKey key)
QString AndroidMediaMetadataRetriever::extractMetadata(MetadataKey key)
{
QString value;
@@ -68,7 +68,7 @@ QString JMediaMetadataRetriever::extractMetadata(MetadataKey key)
return value;
}
void JMediaMetadataRetriever::release()
void AndroidMediaMetadataRetriever::release()
{
if (!m_metadataRetriever.isValid())
return;
@@ -76,7 +76,7 @@ void JMediaMetadataRetriever::release()
m_metadataRetriever.callMethod<void>("release");
}
bool JMediaMetadataRetriever::setDataSource(const QUrl &url)
bool AndroidMediaMetadataRetriever::setDataSource(const QUrl &url)
{
if (!m_metadataRetriever.isValid())
return false;
@@ -107,7 +107,7 @@ bool JMediaMetadataRetriever::setDataSource(const QUrl &url)
return loaded;
}
bool JMediaMetadataRetriever::setDataSource(const QString &path)
bool AndroidMediaMetadataRetriever::setDataSource(const QString &path)
{
if (!m_metadataRetriever.isValid())
return false;

View File

@@ -39,15 +39,15 @@
**
****************************************************************************/
#ifndef JMEDIAMETADATARETRIEVER_H
#define JMEDIAMETADATARETRIEVER_H
#ifndef ANDROIDMEDIAMETADATARETRIEVER_H
#define ANDROIDMEDIAMETADATARETRIEVER_H
#include <QtCore/private/qjni_p.h>
#include <qurl.h>
QT_BEGIN_NAMESPACE
class JMediaMetadataRetriever
class AndroidMediaMetadataRetriever
{
public:
enum MetadataKey {
@@ -76,8 +76,8 @@ public:
Year = 8
};
JMediaMetadataRetriever();
~JMediaMetadataRetriever();
AndroidMediaMetadataRetriever();
~AndroidMediaMetadataRetriever();
QString extractMetadata(MetadataKey key);
void release();
@@ -90,4 +90,4 @@ private:
QT_END_NAMESPACE
#endif // JMEDIAMETADATARETRIEVER_H
#endif // ANDROIDMEDIAMETADATARETRIEVER_H

View File

@@ -39,7 +39,7 @@
**
****************************************************************************/
#include "jmediaplayer.h"
#include "androidmediaplayer.h"
#include <QString>
#include <QtCore/private/qjni_p.h>
@@ -47,12 +47,12 @@
#include <QMap>
static jclass mediaPlayerClass = Q_NULLPTR;
typedef QMap<jlong, JMediaPlayer *> MediaPlayerMap;
typedef QMap<jlong, AndroidMediaPlayer *> MediaPlayerMap;
Q_GLOBAL_STATIC(MediaPlayerMap, mediaPlayers)
QT_BEGIN_NAMESPACE
JMediaPlayer::JMediaPlayer()
AndroidMediaPlayer::AndroidMediaPlayer()
: QObject()
{
@@ -64,93 +64,93 @@ JMediaPlayer::JMediaPlayer()
(*mediaPlayers)[id] = this;
}
JMediaPlayer::~JMediaPlayer()
AndroidMediaPlayer::~AndroidMediaPlayer()
{
mediaPlayers->remove(reinterpret_cast<jlong>(this));
}
void JMediaPlayer::release()
void AndroidMediaPlayer::release()
{
mMediaPlayer.callMethod<void>("release");
}
void JMediaPlayer::reset()
void AndroidMediaPlayer::reset()
{
mMediaPlayer.callMethod<void>("reset");
}
int JMediaPlayer::getCurrentPosition()
int AndroidMediaPlayer::getCurrentPosition()
{
return mMediaPlayer.callMethod<jint>("getCurrentPosition");
}
int JMediaPlayer::getDuration()
int AndroidMediaPlayer::getDuration()
{
return mMediaPlayer.callMethod<jint>("getDuration");
}
bool JMediaPlayer::isPlaying()
bool AndroidMediaPlayer::isPlaying()
{
return mMediaPlayer.callMethod<jboolean>("isPlaying");
}
int JMediaPlayer::volume()
int AndroidMediaPlayer::volume()
{
return mMediaPlayer.callMethod<jint>("getVolume");
}
bool JMediaPlayer::isMuted()
bool AndroidMediaPlayer::isMuted()
{
return mMediaPlayer.callMethod<jboolean>("isMuted");
}
jobject JMediaPlayer::display()
jobject AndroidMediaPlayer::display()
{
return mMediaPlayer.callObjectMethod("display", "()Landroid/view/SurfaceHolder;").object();
}
void JMediaPlayer::play()
void AndroidMediaPlayer::play()
{
mMediaPlayer.callMethod<void>("start");
}
void JMediaPlayer::pause()
void AndroidMediaPlayer::pause()
{
mMediaPlayer.callMethod<void>("pause");
}
void JMediaPlayer::stop()
void AndroidMediaPlayer::stop()
{
mMediaPlayer.callMethod<void>("stop");
}
void JMediaPlayer::seekTo(qint32 msec)
void AndroidMediaPlayer::seekTo(qint32 msec)
{
mMediaPlayer.callMethod<void>("seekTo", "(I)V", jint(msec));
}
void JMediaPlayer::setMuted(bool mute)
void AndroidMediaPlayer::setMuted(bool mute)
{
mMediaPlayer.callMethod<void>("mute", "(Z)V", jboolean(mute));
}
void JMediaPlayer::setDataSource(const QString &path)
void AndroidMediaPlayer::setDataSource(const QString &path)
{
QJNIObjectPrivate string = QJNIObjectPrivate::fromString(path);
mMediaPlayer.callMethod<void>("setDataSource", "(Ljava/lang/String;)V", string.object());
}
void JMediaPlayer::prepareAsync()
void AndroidMediaPlayer::prepareAsync()
{
mMediaPlayer.callMethod<void>("prepareAsync");
}
void JMediaPlayer::setVolume(int volume)
void AndroidMediaPlayer::setVolume(int volume)
{
mMediaPlayer.callMethod<void>("setVolume", "(I)V", jint(volume));
}
void JMediaPlayer::setDisplay(jobject surfaceHolder)
void AndroidMediaPlayer::setDisplay(jobject surfaceHolder)
{
mMediaPlayer.callMethod<void>("setDisplay", "(Landroid/view/SurfaceHolder;)V", surfaceHolder);
}
@@ -159,7 +159,7 @@ static void onErrorNative(JNIEnv *env, jobject thiz, jint what, jint extra, jlon
{
Q_UNUSED(env);
Q_UNUSED(thiz);
JMediaPlayer *const mp = (*mediaPlayers)[id];
AndroidMediaPlayer *const mp = (*mediaPlayers)[id];
if (!mp)
return;
@@ -170,7 +170,7 @@ static void onBufferingUpdateNative(JNIEnv *env, jobject thiz, jint percent, jlo
{
Q_UNUSED(env);
Q_UNUSED(thiz);
JMediaPlayer *const mp = (*mediaPlayers)[id];
AndroidMediaPlayer *const mp = (*mediaPlayers)[id];
if (!mp)
return;
@@ -181,7 +181,7 @@ static void onProgressUpdateNative(JNIEnv *env, jobject thiz, jint progress, jlo
{
Q_UNUSED(env);
Q_UNUSED(thiz);
JMediaPlayer *const mp = (*mediaPlayers)[id];
AndroidMediaPlayer *const mp = (*mediaPlayers)[id];
if (!mp)
return;
@@ -192,7 +192,7 @@ static void onDurationChangedNative(JNIEnv *env, jobject thiz, jint duration, jl
{
Q_UNUSED(env);
Q_UNUSED(thiz);
JMediaPlayer *const mp = (*mediaPlayers)[id];
AndroidMediaPlayer *const mp = (*mediaPlayers)[id];
if (!mp)
return;
@@ -203,7 +203,7 @@ static void onInfoNative(JNIEnv *env, jobject thiz, jint what, jint extra, jlong
{
Q_UNUSED(env);
Q_UNUSED(thiz);
JMediaPlayer *const mp = (*mediaPlayers)[id];
AndroidMediaPlayer *const mp = (*mediaPlayers)[id];
if (!mp)
return;
@@ -214,7 +214,7 @@ static void onStateChangedNative(JNIEnv *env, jobject thiz, jint state, jlong id
{
Q_UNUSED(env);
Q_UNUSED(thiz);
JMediaPlayer *const mp = (*mediaPlayers)[id];
AndroidMediaPlayer *const mp = (*mediaPlayers)[id];
if (!mp)
return;
@@ -229,14 +229,14 @@ static void onVideoSizeChangedNative(JNIEnv *env,
{
Q_UNUSED(env);
Q_UNUSED(thiz);
JMediaPlayer *const mp = (*mediaPlayers)[id];
AndroidMediaPlayer *const mp = (*mediaPlayers)[id];
if (!mp)
return;
Q_EMIT mp->videoSizeChanged(width, height);
}
bool JMediaPlayer::initJNI(JNIEnv *env)
bool AndroidMediaPlayer::initJNI(JNIEnv *env)
{
jclass jClass = env->FindClass("org/qtproject/qt5/android/multimedia/QtAndroidMediaPlayer");

View File

@@ -39,20 +39,20 @@
**
****************************************************************************/
#ifndef QANDROIDMEDIAPLAYER_H
#define QANDROIDMEDIAPLAYER_H
#ifndef ANDROIDMEDIAPLAYER_H
#define ANDROIDMEDIAPLAYER_H
#include <QObject>
#include <QtCore/private/qjni_p.h>
QT_BEGIN_NAMESPACE
class JMediaPlayer : public QObject
class AndroidMediaPlayer : public QObject
{
Q_OBJECT
public:
JMediaPlayer();
~JMediaPlayer();
AndroidMediaPlayer();
~AndroidMediaPlayer();
enum MediaError
{
@@ -132,4 +132,4 @@ private:
QT_END_NAMESPACE
#endif // QANDROIDMEDIAPLAYER_H
#endif // ANDROIDMEDIAPLAYER_H

View File

@@ -39,33 +39,33 @@
**
****************************************************************************/
#include "jmediarecorder.h"
#include "androidmediarecorder.h"
#include "jcamera.h"
#include "androidcamera.h"
#include <QtCore/private/qjni_p.h>
#include <qmap.h>
QT_BEGIN_NAMESPACE
static jclass g_qtMediaRecorderListenerClass = 0;
typedef QMap<jlong, JMediaRecorder*> MediaRecorderMap;
typedef QMap<jlong, AndroidMediaRecorder*> MediaRecorderMap;
Q_GLOBAL_STATIC(MediaRecorderMap, mediaRecorders)
static void notifyError(JNIEnv* , jobject, jlong id, jint what, jint extra)
{
JMediaRecorder *obj = mediaRecorders->value(id, 0);
AndroidMediaRecorder *obj = mediaRecorders->value(id, 0);
if (obj)
emit obj->error(what, extra);
}
static void notifyInfo(JNIEnv* , jobject, jlong id, jint what, jint extra)
{
JMediaRecorder *obj = mediaRecorders->value(id, 0);
AndroidMediaRecorder *obj = mediaRecorders->value(id, 0);
if (obj)
emit obj->info(what, extra);
}
JMediaRecorder::JMediaRecorder()
AndroidMediaRecorder::AndroidMediaRecorder()
: QObject()
, m_id(reinterpret_cast<jlong>(this))
{
@@ -82,17 +82,17 @@ JMediaRecorder::JMediaRecorder()
}
}
JMediaRecorder::~JMediaRecorder()
AndroidMediaRecorder::~AndroidMediaRecorder()
{
mediaRecorders->remove(m_id);
}
void JMediaRecorder::release()
void AndroidMediaRecorder::release()
{
m_mediaRecorder.callMethod<void>("release");
}
bool JMediaRecorder::prepare()
bool AndroidMediaRecorder::prepare()
{
QJNIEnvironmentPrivate env;
m_mediaRecorder.callMethod<void>("prepare");
@@ -106,12 +106,12 @@ bool JMediaRecorder::prepare()
return true;
}
void JMediaRecorder::reset()
void AndroidMediaRecorder::reset()
{
m_mediaRecorder.callMethod<void>("reset");
}
bool JMediaRecorder::start()
bool AndroidMediaRecorder::start()
{
QJNIEnvironmentPrivate env;
m_mediaRecorder.callMethod<void>("start");
@@ -125,7 +125,7 @@ bool JMediaRecorder::start()
return true;
}
void JMediaRecorder::stop()
void AndroidMediaRecorder::stop()
{
QJNIEnvironmentPrivate env;
m_mediaRecorder.callMethod<void>("stop");
@@ -137,12 +137,12 @@ void JMediaRecorder::stop()
}
}
void JMediaRecorder::setAudioChannels(int numChannels)
void AndroidMediaRecorder::setAudioChannels(int numChannels)
{
m_mediaRecorder.callMethod<void>("setAudioChannels", "(I)V", numChannels);
}
void JMediaRecorder::setAudioEncoder(AudioEncoder encoder)
void AndroidMediaRecorder::setAudioEncoder(AudioEncoder encoder)
{
QJNIEnvironmentPrivate env;
m_mediaRecorder.callMethod<void>("setAudioEncoder", "(I)V", int(encoder));
@@ -154,17 +154,17 @@ void JMediaRecorder::setAudioEncoder(AudioEncoder encoder)
}
}
void JMediaRecorder::setAudioEncodingBitRate(int bitRate)
void AndroidMediaRecorder::setAudioEncodingBitRate(int bitRate)
{
m_mediaRecorder.callMethod<void>("setAudioEncodingBitRate", "(I)V", bitRate);
}
void JMediaRecorder::setAudioSamplingRate(int samplingRate)
void AndroidMediaRecorder::setAudioSamplingRate(int samplingRate)
{
m_mediaRecorder.callMethod<void>("setAudioSamplingRate", "(I)V", samplingRate);
}
void JMediaRecorder::setAudioSource(AudioSource source)
void AndroidMediaRecorder::setAudioSource(AudioSource source)
{
QJNIEnvironmentPrivate env;
m_mediaRecorder.callMethod<void>("setAudioSource", "(I)V", int(source));
@@ -176,13 +176,13 @@ void JMediaRecorder::setAudioSource(AudioSource source)
}
}
void JMediaRecorder::setCamera(JCamera *camera)
void AndroidMediaRecorder::setCamera(AndroidCamera *camera)
{
QJNIObjectPrivate cam = camera->getCameraObject();
m_mediaRecorder.callMethod<void>("setCamera", "(Landroid/hardware/Camera;)V", cam.object());
}
void JMediaRecorder::setVideoEncoder(VideoEncoder encoder)
void AndroidMediaRecorder::setVideoEncoder(VideoEncoder encoder)
{
QJNIEnvironmentPrivate env;
m_mediaRecorder.callMethod<void>("setVideoEncoder", "(I)V", int(encoder));
@@ -194,12 +194,12 @@ void JMediaRecorder::setVideoEncoder(VideoEncoder encoder)
}
}
void JMediaRecorder::setVideoEncodingBitRate(int bitRate)
void AndroidMediaRecorder::setVideoEncodingBitRate(int bitRate)
{
m_mediaRecorder.callMethod<void>("setVideoEncodingBitRate", "(I)V", bitRate);
}
void JMediaRecorder::setVideoFrameRate(int rate)
void AndroidMediaRecorder::setVideoFrameRate(int rate)
{
QJNIEnvironmentPrivate env;
m_mediaRecorder.callMethod<void>("setVideoFrameRate", "(I)V", rate);
@@ -211,7 +211,7 @@ void JMediaRecorder::setVideoFrameRate(int rate)
}
}
void JMediaRecorder::setVideoSize(const QSize &size)
void AndroidMediaRecorder::setVideoSize(const QSize &size)
{
QJNIEnvironmentPrivate env;
m_mediaRecorder.callMethod<void>("setVideoSize", "(II)V", size.width(), size.height());
@@ -223,7 +223,7 @@ void JMediaRecorder::setVideoSize(const QSize &size)
}
}
void JMediaRecorder::setVideoSource(VideoSource source)
void AndroidMediaRecorder::setVideoSource(VideoSource source)
{
QJNIEnvironmentPrivate env;
m_mediaRecorder.callMethod<void>("setVideoSource", "(I)V", int(source));
@@ -235,7 +235,7 @@ void JMediaRecorder::setVideoSource(VideoSource source)
}
}
void JMediaRecorder::setOrientationHint(int degrees)
void AndroidMediaRecorder::setOrientationHint(int degrees)
{
QJNIEnvironmentPrivate env;
m_mediaRecorder.callMethod<void>("setOrientationHint", "(I)V", degrees);
@@ -247,7 +247,7 @@ void JMediaRecorder::setOrientationHint(int degrees)
}
}
void JMediaRecorder::setOutputFormat(OutputFormat format)
void AndroidMediaRecorder::setOutputFormat(OutputFormat format)
{
QJNIEnvironmentPrivate env;
m_mediaRecorder.callMethod<void>("setOutputFormat", "(I)V", int(format));
@@ -259,7 +259,7 @@ void JMediaRecorder::setOutputFormat(OutputFormat format)
}
}
void JMediaRecorder::setOutputFile(const QString &path)
void AndroidMediaRecorder::setOutputFile(const QString &path)
{
QJNIEnvironmentPrivate env;
m_mediaRecorder.callMethod<void>("setOutputFile",
@@ -278,7 +278,7 @@ static JNINativeMethod methods[] = {
{"notifyInfo", "(JII)V", (void *)notifyInfo}
};
bool JMediaRecorder::initJNI(JNIEnv *env)
bool AndroidMediaRecorder::initJNI(JNIEnv *env)
{
jclass clazz = env->FindClass("org/qtproject/qt5/android/multimedia/QtMediaRecorderListener");
if (env->ExceptionCheck())

View File

@@ -39,8 +39,8 @@
**
****************************************************************************/
#ifndef JMEDIARECORDER_H
#define JMEDIARECORDER_H
#ifndef ANDROIDMEDIARECORDER_H
#define ANDROIDMEDIARECORDER_H
#include <qobject.h>
#include <QtCore/private/qjni_p.h>
@@ -48,9 +48,9 @@
QT_BEGIN_NAMESPACE
class JCamera;
class AndroidCamera;
class JMediaRecorder : public QObject
class AndroidMediaRecorder : public QObject
{
Q_OBJECT
public:
@@ -91,8 +91,8 @@ public:
AMR_WB_Format = 4
};
JMediaRecorder();
~JMediaRecorder();
AndroidMediaRecorder();
~AndroidMediaRecorder();
void release();
bool prepare();
@@ -107,7 +107,7 @@ public:
void setAudioSamplingRate(int samplingRate);
void setAudioSource(AudioSource source);
void setCamera(JCamera *camera);
void setCamera(AndroidCamera *camera);
void setVideoEncoder(VideoEncoder encoder);
void setVideoEncodingBitRate(int bitRate);
void setVideoFrameRate(int rate);
@@ -132,4 +132,4 @@ private:
QT_END_NAMESPACE
#endif // JMEDIARECORDER_H
#endif // ANDROIDMEDIARECORDER_H

View File

@@ -39,14 +39,14 @@
**
****************************************************************************/
#include "jmultimediautils.h"
#include "androidmultimediautils.h"
#include <QtCore/private/qjni_p.h>
QT_BEGIN_NAMESPACE
void JMultimediaUtils::enableOrientationListener(bool enable)
void AndroidMultimediaUtils::enableOrientationListener(bool enable)
{
QJNIObjectPrivate::callStaticMethod<void>("org/qtproject/qt5/android/multimedia/QtMultimediaUtils",
"enableOrientationListener",
@@ -54,13 +54,13 @@ void JMultimediaUtils::enableOrientationListener(bool enable)
enable);
}
int JMultimediaUtils::getDeviceOrientation()
int AndroidMultimediaUtils::getDeviceOrientation()
{
return QJNIObjectPrivate::callStaticMethod<jint>("org/qtproject/qt5/android/multimedia/QtMultimediaUtils",
"getDeviceOrientation");
}
QString JMultimediaUtils::getDefaultMediaDirectory(MediaType type)
QString AndroidMultimediaUtils::getDefaultMediaDirectory(MediaType type)
{
QJNIObjectPrivate path = QJNIObjectPrivate::callStaticObjectMethod("org/qtproject/qt5/android/multimedia/QtMultimediaUtils",
"getDefaultMediaDirectory",
@@ -69,7 +69,7 @@ QString JMultimediaUtils::getDefaultMediaDirectory(MediaType type)
return path.toString();
}
void JMultimediaUtils::registerMediaFile(const QString &file)
void AndroidMultimediaUtils::registerMediaFile(const QString &file)
{
QJNIObjectPrivate::callStaticMethod<void>("org/qtproject/qt5/android/multimedia/QtMultimediaUtils",
"registerMediaFile",

View File

@@ -39,15 +39,15 @@
**
****************************************************************************/
#ifndef JMULTIMEDIAUTILS_H
#define JMULTIMEDIAUTILS_H
#ifndef ANDROIDMULTIMEDIAUTILS_H
#define ANDROIDMULTIMEDIAUTILS_H
#include <qobject.h>
#include <QtCore/private/qjni_p.h>
QT_BEGIN_NAMESPACE
class JMultimediaUtils
class AndroidMultimediaUtils
{
public:
enum MediaType {
@@ -65,4 +65,4 @@ public:
QT_END_NAMESPACE
#endif // JMULTIMEDIAUTILS_H
#endif // ANDROIDMULTIMEDIAUTILS_H

View File

@@ -39,24 +39,24 @@
**
****************************************************************************/
#include "jsurfacetexture.h"
#include "androidsurfacetexture.h"
#include <QtCore/private/qjni_p.h>
#include <QtCore/private/qjnihelpers_p.h>
QT_BEGIN_NAMESPACE
static jclass g_qtSurfaceTextureListenerClass = 0;
static QMap<int, JSurfaceTexture*> g_objectMap;
static QMap<int, AndroidSurfaceTexture*> g_objectMap;
// native method for QtSurfaceTexture.java
static void notifyFrameAvailable(JNIEnv* , jobject, int id)
{
JSurfaceTexture *obj = g_objectMap.value(id, 0);
AndroidSurfaceTexture *obj = g_objectMap.value(id, 0);
if (obj)
Q_EMIT obj->frameAvailable();
}
JSurfaceTexture::JSurfaceTexture(unsigned int texName)
AndroidSurfaceTexture::AndroidSurfaceTexture(unsigned int texName)
: QObject()
, m_texID(int(texName))
{
@@ -84,7 +84,7 @@ JSurfaceTexture::JSurfaceTexture(unsigned int texName)
listener.object());
}
JSurfaceTexture::~JSurfaceTexture()
AndroidSurfaceTexture::~AndroidSurfaceTexture()
{
if (m_surfaceTexture.isValid()) {
release();
@@ -92,7 +92,7 @@ JSurfaceTexture::~JSurfaceTexture()
}
}
QMatrix4x4 JSurfaceTexture::getTransformMatrix()
QMatrix4x4 AndroidSurfaceTexture::getTransformMatrix()
{
QMatrix4x4 matrix;
if (!m_surfaceTexture.isValid())
@@ -108,7 +108,7 @@ QMatrix4x4 JSurfaceTexture::getTransformMatrix()
return matrix;
}
void JSurfaceTexture::release()
void AndroidSurfaceTexture::release()
{
if (QtAndroidPrivate::androidSdkVersion() < 14)
return;
@@ -116,7 +116,7 @@ void JSurfaceTexture::release()
m_surfaceTexture.callMethod<void>("release");
}
void JSurfaceTexture::updateTexImage()
void AndroidSurfaceTexture::updateTexImage()
{
if (!m_surfaceTexture.isValid())
return;
@@ -124,7 +124,7 @@ void JSurfaceTexture::updateTexImage()
m_surfaceTexture.callMethod<void>("updateTexImage");
}
jobject JSurfaceTexture::object()
jobject AndroidSurfaceTexture::object()
{
return m_surfaceTexture.object();
}
@@ -133,7 +133,7 @@ static JNINativeMethod methods[] = {
{"notifyFrameAvailable", "(I)V", (void *)notifyFrameAvailable}
};
bool JSurfaceTexture::initJNI(JNIEnv *env)
bool AndroidSurfaceTexture::initJNI(JNIEnv *env)
{
// SurfaceTexture is available since API 11.
if (QtAndroidPrivate::androidSdkVersion() < 11)

View File

@@ -39,8 +39,8 @@
**
****************************************************************************/
#ifndef JSURFACETEXTURE_H
#define JSURFACETEXTURE_H
#ifndef ANDROIDSURFACETEXTURE_H
#define ANDROIDSURFACETEXTURE_H
#include <qobject.h>
#include <QtCore/private/qjni_p.h>
@@ -49,12 +49,12 @@
QT_BEGIN_NAMESPACE
class JSurfaceTexture : public QObject
class AndroidSurfaceTexture : public QObject
{
Q_OBJECT
public:
explicit JSurfaceTexture(unsigned int texName);
~JSurfaceTexture();
explicit AndroidSurfaceTexture(unsigned int texName);
~AndroidSurfaceTexture();
int textureID() const { return m_texID; }
jobject object();
@@ -75,4 +75,4 @@ private:
QT_END_NAMESPACE
#endif // JSURFACETEXTURE_H
#endif // ANDROIDSURFACETEXTURE_H

View File

@@ -0,0 +1,19 @@
QT += platformsupport-private
INCLUDEPATH += $$PWD
HEADERS += \
$$PWD/androidmediaplayer.h \
$$PWD/androidsurfacetexture.h \
$$PWD/androidmediametadataretriever.h \
$$PWD/androidcamera.h \
$$PWD/androidmultimediautils.h \
$$PWD/androidmediarecorder.h
SOURCES += \
$$PWD/androidmediaplayer.cpp \
$$PWD/androidsurfacetexture.cpp \
$$PWD/androidmediametadataretriever.cpp \
$$PWD/androidcamera.cpp \
$$PWD/androidmultimediautils.cpp \
$$PWD/androidmediarecorder.cpp

View File

@@ -1,19 +0,0 @@
QT += platformsupport-private
INCLUDEPATH += $$PWD
HEADERS += \
$$PWD/jmediaplayer.h \
$$PWD/jsurfacetexture.h \
$$PWD/jmediametadataretriever.h \
$$PWD/jcamera.h \
$$PWD/jmultimediautils.h \
$$PWD/jmediarecorder.h
SOURCES += \
$$PWD/jmediaplayer.cpp \
$$PWD/jsurfacetexture.cpp \
$$PWD/jmediametadataretriever.cpp \
$$PWD/jcamera.cpp \
$$PWD/jmultimediautils.cpp \
$$PWD/jmediarecorder.cpp