Merge remote-tracking branch 'origin/stable' into dev

Change-Id: Ia2cf3c55e57d7ac17f02965915fa6933ff2c8cce
This commit is contained in:
Frederik Gladhorn
2013-07-11 23:26:39 +02:00
10 changed files with 158 additions and 30 deletions

View File

@@ -52,9 +52,9 @@ include(recording/recording.pri)
include(video/video.pri)
ANDROID_BUNDLED_JAR_DEPENDENCIES = \
jar/QtMultimedia-bundled.jar:org.qtproject.qt5.android.multimedia.QtAndroidMediaPlayer
jar/QtMultimedia-bundled.jar
ANDROID_JAR_DEPENDENCIES = \
jar/QtMultimedia.jar:org.qtproject.qt5.android.multimedia.QtAndroidMediaPlayer
jar/QtMultimedia.jar
ANDROID_LIB_DEPENDENCIES = \
plugins/mediaservice/libandroidmediaplayer.so \
lib/libQt5MultimediaQuick_p.so:Qt5Quick

View File

@@ -50,6 +50,9 @@ import android.content.Context;
import android.media.MediaPlayer;
import android.net.Uri;
import android.util.Log;
import java.io.FileDescriptor;
import android.content.res.AssetFileDescriptor;
import android.content.res.AssetManager;
public class QtAndroidMediaPlayer extends MediaPlayer
{
@@ -335,7 +338,17 @@ public class QtAndroidMediaPlayer extends MediaPlayer
mPreparing = true;
onMediaPlayerInfoNative(MEDIA_PLAYER_PREPARING, 0, mID);
mUri = Uri.parse(path);
setDataSource(mApplicationContext, mUri);
if (mUri.getScheme().compareTo("assets") == 0) {
final String asset = mUri.getPath().substring(1 /* Remove first '/' */);
final AssetManager am = mApplicationContext.getAssets();
final AssetFileDescriptor afd = am.openFd(asset);
final long offset = afd.getStartOffset();
final long length = afd.getLength();
FileDescriptor fd = afd.getFileDescriptor();
setDataSource(fd, offset, length);
} else {
setDataSource(mApplicationContext, mUri);
}
mInitialized = true;
setOnPreparedListener(new MediaPlayerPreparedListener());
prepareAsync();

View File

@@ -45,6 +45,12 @@
QT_BEGIN_NAMESPACE
static void textureReadyCallback(void *context)
{
if (context)
reinterpret_cast<QAndroidMediaPlayerControl *>(context)->onSurfaceTextureReady();
}
QAndroidMediaPlayerControl::QAndroidMediaPlayerControl(QObject *parent)
: QMediaPlayerControl(parent),
mMediaPlayer(new JMediaPlayer),
@@ -58,7 +64,8 @@ QAndroidMediaPlayerControl::QAndroidMediaPlayerControl(QObject *parent)
mVideoAvailable(false),
mBuffering(false),
mMediaPlayerReady(false),
mPendingPosition(-1)
mPendingPosition(-1),
mPendingSetMedia(false)
{
connect(mMediaPlayer, SIGNAL(bufferingUpdate(qint32)),
this, SLOT(onBufferChanged(qint32)));
@@ -74,6 +81,7 @@ QAndroidMediaPlayerControl::QAndroidMediaPlayerControl(QObject *parent)
QAndroidMediaPlayerControl::~QAndroidMediaPlayerControl()
{
mMediaPlayer->release();
delete mMediaPlayer;
}
@@ -207,6 +215,13 @@ void QAndroidMediaPlayerControl::setMedia(const QMediaContent &mediaContent,
mMediaContent = mediaContent;
mMediaStream = stream;
if (mVideoOutput && !mMediaPlayer->display()) {
// if a video output is set but the video texture is not ready, delay loading the media
// since it can cause problems on some hardware
mPendingSetMedia = true;
return;
}
const QString uri = mediaContent.canonicalUrl().toString();
if (!uri.isEmpty())
@@ -230,6 +245,13 @@ void QAndroidMediaPlayerControl::setVideoOutput(QAndroidVideoOutput *videoOutput
mVideoOutput->stop();
mVideoOutput = videoOutput;
if (mVideoOutput && !mMediaPlayer->display()) {
if (mVideoOutput->isTextureReady())
mMediaPlayer->setDisplay(mVideoOutput->surfaceHolder());
else
mVideoOutput->setTextureReadyCallback(textureReadyCallback, this);
}
}
void QAndroidMediaPlayerControl::play()
@@ -238,7 +260,8 @@ void QAndroidMediaPlayerControl::play()
mPendingState = QMediaPlayer::PlayingState;
if (mCurrentState == QMediaPlayer::StoppedState
&& !mMediaContent.isNull()
&& mCurrentMediaStatus != QMediaPlayer::LoadingMedia) {
&& mCurrentMediaStatus != QMediaPlayer::LoadingMedia
&& !mPendingSetMedia) {
setMedia(mMediaContent, 0);
}
return;
@@ -391,16 +414,23 @@ void QAndroidMediaPlayerControl::onBufferChanged(qint32 percent)
void QAndroidMediaPlayerControl::onVideoSizeChanged(qint32 width, qint32 height)
{
if (width == 0 || height == 0)
QSize newSize(width, height);
if (width == 0 || height == 0 || newSize == mVideoSize)
return;
setVideoAvailable(true);
mVideoSize = newSize;
if (mVideoOutput) {
if (!mMediaPlayer->display())
mMediaPlayer->setDisplay(mVideoOutput->surfaceHolder());
if (mMediaPlayer->display())
mVideoOutput->setVideoSize(QSize(width, height));
if (mVideoOutput)
mVideoOutput->setVideoSize(mVideoSize);
}
void QAndroidMediaPlayerControl::onSurfaceTextureReady()
{
if (!mMediaPlayer->display() && mVideoOutput) {
mMediaPlayer->setDisplay(mVideoOutput->surfaceHolder());
flushPendingStates();
}
}
@@ -464,6 +494,9 @@ void QAndroidMediaPlayerControl::setVideoAvailable(bool available)
if (mVideoAvailable == available)
return;
if (!available)
mVideoSize = QSize();
mVideoAvailable = available;
Q_EMIT videoAvailableChanged(mVideoAvailable);
}
@@ -478,6 +511,12 @@ void QAndroidMediaPlayerControl::resetBufferingProgress()
void QAndroidMediaPlayerControl::flushPendingStates()
{
if (mPendingSetMedia) {
setMedia(mMediaContent, 0);
mPendingSetMedia = false;
return;
}
switch (mPendingState) {
case QMediaPlayer::PlayingState:
if (mPendingPosition > -1)

View File

@@ -44,6 +44,7 @@
#include <qglobal.h>
#include <QMediaPlayerControl>
#include <qsize.h>
QT_BEGIN_NAMESPACE
@@ -75,6 +76,7 @@ public:
void setMedia(const QMediaContent &mediaContent, QIODevice *stream) Q_DECL_OVERRIDE;
void setVideoOutput(QAndroidVideoOutput *videoOutput);
void onSurfaceTextureReady();
Q_SIGNALS:
void metaDataUpdated();
@@ -105,11 +107,13 @@ private:
int mBufferPercent;
bool mAudioAvailable;
bool mVideoAvailable;
QSize mVideoSize;
bool mBuffering;
QMediaTimeRange mAvailablePlaybackRange;
bool mMediaPlayerReady;
QMediaPlayer::State mPendingState;
qint64 mPendingPosition;
bool mPendingSetMedia;
void setState(QMediaPlayer::State state);
void setMediaStatus(QMediaPlayer::MediaStatus status);

View File

@@ -48,6 +48,8 @@
QT_BEGIN_NAMESPACE
typedef void (*TextureReadyCallback)(void*);
class QAndroidVideoOutput
{
public:
@@ -55,6 +57,10 @@ public:
virtual ~QAndroidVideoOutput() { }
virtual jobject surfaceHolder() = 0;
virtual bool isTextureReady() = 0;
virtual void setTextureReadyCallback(TextureReadyCallback cb, void *context = 0) = 0;
virtual void setVideoSize(const QSize &size) = 0;
virtual void stop() = 0;
};

View File

@@ -50,6 +50,7 @@
#include <QVideoSurfaceFormat>
#include <QOpenGLFunctions>
#include <QOpenGLShaderProgram>
#include <qevent.h>
QT_BEGIN_NAMESPACE
@@ -134,6 +135,8 @@ QAndroidVideoRendererControl::QAndroidVideoRendererControl(QObject *parent)
, m_surfaceTexture(0)
, m_surfaceHolder(0)
, m_externalTex(0)
, m_textureReadyCallback(0)
, m_textureReadyContext(0)
{
}
@@ -177,42 +180,66 @@ void QAndroidVideoRendererControl::setSurface(QAbstractVideoSurface *surface)
if (surface == m_surface)
return;
if (m_surface && m_surface->isActive())
if (m_surface && m_surface->isActive()) {
m_surface->stop();
m_surface->removeEventFilter(this);
}
m_surface = surface;
if (m_surface)
if (m_surface) {
m_useImage = !m_surface->supportedPixelFormats(QAbstractVideoBuffer::GLTextureHandle).contains(QVideoFrame::Format_BGR32);
m_surface->installEventFilter(this);
}
}
jobject QAndroidVideoRendererControl::surfaceHolder()
bool QAndroidVideoRendererControl::isTextureReady()
{
if (m_surfaceHolder)
return m_surfaceHolder->object();
return QOpenGLContext::currentContext() || (m_surface && m_surface->property("GLContext").isValid());
}
void QAndroidVideoRendererControl::setTextureReadyCallback(TextureReadyCallback cb, void *context)
{
m_textureReadyCallback = cb;
m_textureReadyContext = context;
}
bool QAndroidVideoRendererControl::initSurfaceTexture()
{
if (m_surfaceTexture)
return true;
if (!m_surface)
return false;
QOpenGLContext *currContext = QOpenGLContext::currentContext();
// If we don't have a GL context in the current thread, create one and share it
// with the render thread GL context
if (!currContext && !m_glContext) {
QOpenGLContext *shareContext = qobject_cast<QOpenGLContext*>(m_surface->property("GLContext").value<QObject*>());
if (!shareContext)
return false;
m_offscreenSurface = new QOffscreenSurface;
QSurfaceFormat format;
format.setSwapBehavior(QSurfaceFormat::SingleBuffer);
m_offscreenSurface->setFormat(format);
m_offscreenSurface->create();
QOpenGLContext *shareContext = 0;
if (m_surface)
shareContext = qobject_cast<QOpenGLContext*>(m_surface->property("GLContext").value<QObject*>());
m_glContext = new QOpenGLContext;
m_glContext->setFormat(m_offscreenSurface->requestedFormat());
if (shareContext)
m_glContext->setShareContext(shareContext);
if (!m_glContext->create())
return 0;
if (!m_glContext->create()) {
delete m_glContext;
m_glContext = 0;
delete m_offscreenSurface;
m_offscreenSurface = 0;
return false;
}
// if sharing contexts is not supported, fallback to image rendering and send the bits
// to the video surface
@@ -228,7 +255,21 @@ jobject QAndroidVideoRendererControl::surfaceHolder()
if (m_surfaceTexture->isValid()) {
connect(m_surfaceTexture, SIGNAL(frameAvailable()), this, SLOT(onFrameAvailable()));
} else {
delete m_surfaceTexture;
m_surfaceTexture = 0;
glDeleteTextures(1, &m_externalTex);
}
return m_surfaceTexture != 0;
}
jobject QAndroidVideoRendererControl::surfaceHolder()
{
if (!initSurfaceTexture())
return 0;
if (!m_surfaceHolder) {
QJNILocalRef<jobject> surfaceTex = m_surfaceTexture->surfaceTexture();
m_androidSurface = new QJNIObject("android/view/Surface",
@@ -236,16 +277,9 @@ jobject QAndroidVideoRendererControl::surfaceHolder()
surfaceTex.object());
m_surfaceHolder = new JSurfaceTextureHolder(m_androidSurface->object());
} else {
delete m_surfaceTexture;
m_surfaceTexture = 0;
glDeleteTextures(1, &m_externalTex);
}
if (m_surfaceHolder)
return m_surfaceHolder->object();
return 0;
return m_surfaceHolder->object();
}
void QAndroidVideoRendererControl::setVideoSize(const QSize &size)
@@ -373,4 +407,18 @@ void QAndroidVideoRendererControl::createGLResources()
}
}
bool QAndroidVideoRendererControl::eventFilter(QObject *, QEvent *e)
{
if (e->type() == QEvent::DynamicPropertyChange) {
QDynamicPropertyChangeEvent *event = static_cast<QDynamicPropertyChangeEvent*>(e);
if (event->propertyName() == "GLContext" && m_textureReadyCallback) {
m_textureReadyCallback(m_textureReadyContext);
m_textureReadyCallback = 0;
m_textureReadyContext = 0;
}
}
return false;
}
QT_END_NAMESPACE

View File

@@ -65,14 +65,18 @@ public:
void setSurface(QAbstractVideoSurface *surface) Q_DECL_OVERRIDE;
jobject surfaceHolder() Q_DECL_OVERRIDE;
bool isTextureReady() Q_DECL_OVERRIDE;
void setTextureReadyCallback(TextureReadyCallback cb, void *context = 0) Q_DECL_OVERRIDE;
void setVideoSize(const QSize &size) Q_DECL_OVERRIDE;
void stop() Q_DECL_OVERRIDE;
bool eventFilter(QObject *obj, QEvent *event) Q_DECL_OVERRIDE;
private Q_SLOTS:
void onFrameAvailable();
private:
void setupSurface();
bool initSurfaceTexture();
void renderFrameToFbo();
void createGLResources();
@@ -88,6 +92,9 @@ private:
JSurfaceTexture *m_surfaceTexture;
JSurfaceTextureHolder *m_surfaceHolder;
uint m_externalTex;
TextureReadyCallback m_textureReadyCallback;
void *m_textureReadyContext;
};
QT_END_NAMESPACE

View File

@@ -82,6 +82,11 @@ JMediaPlayer::~JMediaPlayer()
mplayers.remove(mId);
}
void JMediaPlayer::release()
{
callMethod<void>("release");
}
void JMediaPlayer::onError(qint32 what, qint32 extra)
{
Q_EMIT error(what, extra);

View File

@@ -89,6 +89,8 @@ public:
MEDIA_PLAYER_FINISHED = 6
};
void release();
int getCurrentPosition();
int getDuration();
bool isPlaying();

View File

@@ -578,6 +578,9 @@ void DirectShowPlayerService::doReleaseGraph(QMutexLocker *locker)
control->Release();
}
//release m_headerInfo -> decrease ref counter of m_source
m_metaDataControl->updateGraph(0, 0);
if (m_source) {
m_source->Release();
m_source = 0;
@@ -627,6 +630,7 @@ int DirectShowPlayerService::findStreamTypes(IBaseFilter *source) const
}
}
}
pins->Release();
}
filter->Release();
}