Merge remote-tracking branch 'origin/stable' into dev
Change-Id: I469f258c4838f87edaedc8620d925a3c537d1619
This commit is contained in:
@@ -713,13 +713,14 @@ QVideoSurfaceFormat QVideoSurfaceGstSink::formatForCaps(GstCaps *caps, int *byte
|
||||
|
||||
void QVideoSurfaceGstSink::setFrameTimeStamps(QVideoFrame *frame, GstBuffer *buffer)
|
||||
{
|
||||
// GStreamer uses nanoseconds, Qt uses microseconds
|
||||
qint64 startTime = GST_BUFFER_TIMESTAMP(buffer);
|
||||
if (startTime >= 0) {
|
||||
frame->setStartTime(startTime/G_GINT64_CONSTANT (1000000));
|
||||
frame->setStartTime(startTime/G_GINT64_CONSTANT (1000));
|
||||
|
||||
qint64 duration = GST_BUFFER_DURATION(buffer);
|
||||
if (duration >= 0)
|
||||
frame->setEndTime((startTime + duration)/G_GINT64_CONSTANT (1000000));
|
||||
frame->setEndTime((startTime + duration)/G_GINT64_CONSTANT (1000));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -369,7 +369,7 @@ void PrivateSoundSource::stateChanged(QAudio::State state)
|
||||
|
||||
qint64 PrivateSoundSource::readData( char* data, qint64 len)
|
||||
{
|
||||
if (m_runningCount > 0 && m_playing) {
|
||||
if ((m_runningCount > 0 || m_runningCount == QSoundEffect::Infinite) && m_playing) {
|
||||
|
||||
if (m_sample->state() != QSample::Ready)
|
||||
return 0;
|
||||
|
||||
@@ -191,8 +191,8 @@ public class QtAndroidMediaPlayer extends MediaPlayer
|
||||
@Override
|
||||
public void onPrepared(final MediaPlayer mp)
|
||||
{
|
||||
onMediaPlayerInfoNative(MEDIA_PLAYER_DURATION, getDuration(), mID);
|
||||
onMediaPlayerInfoNative(MEDIA_PLAYER_READY, 0, mID);
|
||||
onMediaPlayerInfoNative(MEDIA_PLAYER_DURATION, getDuration(), mID);
|
||||
mPreparing = false;
|
||||
}
|
||||
|
||||
|
||||
@@ -98,8 +98,9 @@ QMediaPlayer::MediaStatus QAndroidMediaPlayerControl::mediaStatus() const
|
||||
qint64 QAndroidMediaPlayerControl::duration() const
|
||||
{
|
||||
return (mCurrentMediaStatus == QMediaPlayer::InvalidMedia
|
||||
|| mCurrentMediaStatus == QMediaPlayer::NoMedia) ? 0
|
||||
: mMediaPlayer->getDuration();
|
||||
|| mCurrentMediaStatus == QMediaPlayer::NoMedia
|
||||
|| !mMediaPlayerReady) ? 0
|
||||
: mMediaPlayer->getDuration();
|
||||
}
|
||||
|
||||
qint64 QAndroidMediaPlayerControl::position() const
|
||||
@@ -330,14 +331,12 @@ void QAndroidMediaPlayerControl::onMediaPlayerInfo(qint32 what, qint32 extra)
|
||||
setState(QMediaPlayer::StoppedState);
|
||||
break;
|
||||
case JMediaPlayer::MEDIA_PLAYER_READY:
|
||||
setMediaStatus(QMediaPlayer::LoadedMedia);
|
||||
if (mBuffering) {
|
||||
setMediaStatus(mBufferPercent == 100 ? QMediaPlayer::BufferedMedia
|
||||
: QMediaPlayer::BufferingMedia);
|
||||
} else {
|
||||
setMediaStatus(QMediaPlayer::LoadedMedia);
|
||||
mBufferPercent = 100;
|
||||
Q_EMIT bufferStatusChanged(mBufferPercent);
|
||||
updateAvailablePlaybackRanges();
|
||||
onBufferChanged(100);
|
||||
}
|
||||
setAudioAvailable(true);
|
||||
mMediaPlayerReady = true;
|
||||
@@ -402,7 +401,7 @@ void QAndroidMediaPlayerControl::onError(qint32 what, qint32 extra)
|
||||
|
||||
void QAndroidMediaPlayerControl::onBufferChanged(qint32 percent)
|
||||
{
|
||||
mBuffering = true;
|
||||
mBuffering = percent != 100;
|
||||
mBufferPercent = percent;
|
||||
Q_EMIT bufferStatusChanged(mBufferPercent);
|
||||
|
||||
|
||||
@@ -60,13 +60,13 @@ JSurfaceTexture::JSurfaceTexture(unsigned int texName)
|
||||
, QJNIObject(g_qtSurfaceTextureClass, "(I)V", jint(texName))
|
||||
, m_texID(int(texName))
|
||||
{
|
||||
if (m_jobject)
|
||||
if (isValid())
|
||||
g_objectMap.insert(int(texName), this);
|
||||
}
|
||||
|
||||
JSurfaceTexture::~JSurfaceTexture()
|
||||
{
|
||||
if (m_jobject)
|
||||
if (isValid())
|
||||
g_objectMap.remove(m_texID);
|
||||
}
|
||||
|
||||
|
||||
@@ -50,6 +50,7 @@
|
||||
#include <qabstractvideosurface.h>
|
||||
#include <qthread.h>
|
||||
#include <qcoreapplication.h>
|
||||
#include <qmath.h>
|
||||
#include <QtCore/qdebug.h>
|
||||
#include <d3d9.h>
|
||||
#include <dshow.h>
|
||||
@@ -325,7 +326,7 @@ HRESULT Scheduler::processSample(IMFSample *sample, LONG *pNextSleep)
|
||||
// Adjust the sleep time for the clock rate. (The presentation clock runs
|
||||
// at m_fRate, but sleeping uses the system clock.)
|
||||
if (m_playbackRate != 0)
|
||||
nextSleep = (LONG)(nextSleep / fabsf(m_playbackRate));
|
||||
nextSleep = (LONG)(nextSleep / qFabs(m_playbackRate));
|
||||
|
||||
// Don't present yet.
|
||||
presentNow = false;
|
||||
@@ -987,7 +988,7 @@ HRESULT EVRCustomPresenter::IsRateSupported(BOOL thin, float rate, float *neares
|
||||
// Note: We have no minimum rate (that is, we support anything down to 0).
|
||||
maxRate = getMaxRate(thin);
|
||||
|
||||
if (fabsf(rate) > maxRate) {
|
||||
if (qFabs(rate) > maxRate) {
|
||||
// The (absolute) requested rate exceeds the maximum rate.
|
||||
hr = MF_E_UNSUPPORTED_RATE;
|
||||
|
||||
|
||||
@@ -288,9 +288,21 @@ void D3DPresentEngine::presentSample(void *opaque, qint64)
|
||||
}
|
||||
|
||||
if (surface && updateTexture(surface)) {
|
||||
m_surface->present(QVideoFrame(new TextureVideoBuffer(m_glTexture),
|
||||
m_surfaceFormat.frameSize(),
|
||||
m_surfaceFormat.pixelFormat()));
|
||||
QVideoFrame frame = QVideoFrame(new TextureVideoBuffer(m_glTexture),
|
||||
m_surfaceFormat.frameSize(),
|
||||
m_surfaceFormat.pixelFormat());
|
||||
|
||||
// WMF uses 100-nanosecond units, Qt uses microseconds
|
||||
LONGLONG startTime = -1;
|
||||
if (SUCCEEDED(sample->GetSampleTime(&startTime))) {
|
||||
frame.setStartTime(startTime * 0.1);
|
||||
|
||||
LONGLONG duration = -1;
|
||||
if (SUCCEEDED(sample->GetSampleDuration(&duration)))
|
||||
frame.setEndTime((startTime + duration) * 0.1);
|
||||
}
|
||||
|
||||
m_surface->present(frame);
|
||||
}
|
||||
|
||||
done:
|
||||
|
||||
@@ -632,13 +632,14 @@ QVideoFrame MFTransform::makeVideoFrame()
|
||||
// That is why we copy data from IMFMediaBuffer here.
|
||||
frame = QVideoFrame(new QMemoryVideoBuffer(array, m_bytesPerLine), m_format.frameSize(), m_format.pixelFormat());
|
||||
|
||||
// WMF uses 100-nanosecond units, Qt uses microseconds
|
||||
LONGLONG startTime = -1;
|
||||
if (SUCCEEDED(m_sample->GetSampleTime(&startTime))) {
|
||||
frame.setStartTime(startTime);
|
||||
frame.setStartTime(startTime * 0.1);
|
||||
|
||||
LONGLONG duration = -1;
|
||||
if (SUCCEEDED(m_sample->GetSampleDuration(&duration)))
|
||||
frame.setEndTime(startTime + duration);
|
||||
frame.setEndTime((startTime + duration) * 0.1);
|
||||
}
|
||||
} while (false);
|
||||
|
||||
|
||||
@@ -1918,19 +1918,17 @@ void MFPlayerSession::handleSessionEvent(IMFMediaEvent *sessionEvent)
|
||||
changeStatus(QMediaPlayer::BufferedMedia);
|
||||
emit bufferStatusChanged(bufferStatus());
|
||||
break;
|
||||
case MEEndOfPresentation:
|
||||
stop();
|
||||
changeStatus(QMediaPlayer::EndOfMedia);
|
||||
m_varStart.vt = VT_I8;
|
||||
//keep reporting the final position after end of media
|
||||
m_varStart.hVal.QuadPart = m_duration;
|
||||
break;
|
||||
case MESessionEnded:
|
||||
m_pendingState = NoPending;
|
||||
m_state.command = CmdStop;
|
||||
m_state.prevCmd = CmdNone;
|
||||
m_request.command = CmdNone;
|
||||
m_request.prevCmd = CmdNone;
|
||||
|
||||
changeStatus(QMediaPlayer::EndOfMedia);
|
||||
m_varStart.vt = VT_I8;
|
||||
//keep reporting the final position after end of media
|
||||
m_varStart.hVal.QuadPart = m_duration;
|
||||
break;
|
||||
case MEEndOfPresentationSegment:
|
||||
break;
|
||||
@@ -1993,6 +1991,8 @@ void MFPlayerSession::handleSessionEvent(IMFMediaEvent *sessionEvent)
|
||||
}
|
||||
}
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
sessionEvent->Release();
|
||||
|
||||
@@ -254,6 +254,8 @@ namespace
|
||||
, m_workQueueCB(this, &MediaStream::onDispatchWorkItem)
|
||||
, m_finalizeResult(0)
|
||||
, m_scheduledBuffer(0)
|
||||
, m_bufferStartTime(-1)
|
||||
, m_bufferDuration(-1)
|
||||
, m_presentationClock(0)
|
||||
, m_currentMediaType(0)
|
||||
, m_prerolling(false)
|
||||
@@ -839,10 +841,13 @@ namespace
|
||||
QMutexLocker locker(&m_mutex);
|
||||
if (!m_scheduledBuffer)
|
||||
return;
|
||||
m_surface->present(QVideoFrame(
|
||||
new MediaSampleVideoBuffer(m_scheduledBuffer, m_bytesPerLine),
|
||||
m_surfaceFormat.frameSize(),
|
||||
m_surfaceFormat.pixelFormat()));
|
||||
QVideoFrame frame = QVideoFrame(
|
||||
new MediaSampleVideoBuffer(m_scheduledBuffer, m_bytesPerLine),
|
||||
m_surfaceFormat.frameSize(),
|
||||
m_surfaceFormat.pixelFormat());
|
||||
frame.setStartTime(m_bufferStartTime * 0.1);
|
||||
frame.setEndTime((m_bufferStartTime + m_bufferDuration) * 0.1);
|
||||
m_surface->present(frame);
|
||||
m_scheduledBuffer->Release();
|
||||
m_scheduledBuffer = NULL;
|
||||
if (m_rate != 0)
|
||||
@@ -1309,8 +1314,10 @@ namespace
|
||||
|
||||
HRESULT processSampleData(IMFSample *pSample)
|
||||
{
|
||||
LONGLONG time;
|
||||
LONGLONG time, duration = -1;
|
||||
HRESULT hr = pSample->GetSampleTime(&time);
|
||||
if (SUCCEEDED(hr))
|
||||
pSample->GetSampleDuration(&duration);
|
||||
|
||||
if (m_prerolling) {
|
||||
if (SUCCEEDED(hr) && time >= m_prerollTargetTime) {
|
||||
@@ -1320,6 +1327,7 @@ namespace
|
||||
SampleBuffer sb;
|
||||
sb.m_buffer = pBuffer;
|
||||
sb.m_time = time;
|
||||
sb.m_duration = duration;
|
||||
m_bufferCache.push_back(sb);
|
||||
endPreroll(S_OK);
|
||||
}
|
||||
@@ -1336,6 +1344,7 @@ namespace
|
||||
SampleBuffer sb;
|
||||
sb.m_buffer = pBuffer;
|
||||
sb.m_time = time;
|
||||
sb.m_duration = duration;
|
||||
m_bufferCache.push_back(sb);
|
||||
}
|
||||
if (m_rate == 0)
|
||||
@@ -1351,6 +1360,7 @@ namespace
|
||||
public:
|
||||
IMFMediaBuffer *m_buffer;
|
||||
LONGLONG m_time;
|
||||
LONGLONG m_duration;
|
||||
};
|
||||
QList<SampleBuffer> m_bufferCache;
|
||||
static const int BUFFER_CACHE_SIZE = 2;
|
||||
@@ -1383,6 +1393,8 @@ namespace
|
||||
continue;
|
||||
}
|
||||
m_scheduledBuffer = sb.m_buffer;
|
||||
m_bufferStartTime = sb.m_time;
|
||||
m_bufferDuration = sb.m_duration;
|
||||
QCoreApplication::postEvent(m_rendererControl, new PresentEvent(sb.m_time));
|
||||
if (m_rate == 0)
|
||||
queueEvent(MEStreamSinkScrubSampleComplete, GUID_NULL, S_OK, NULL);
|
||||
@@ -1393,6 +1405,8 @@ namespace
|
||||
queueEvent(MEStreamSinkRequestSample, GUID_NULL, S_OK, NULL);
|
||||
}
|
||||
IMFMediaBuffer *m_scheduledBuffer;
|
||||
MFTIME m_bufferStartTime;
|
||||
MFTIME m_bufferDuration;
|
||||
IMFPresentationClock *m_presentationClock;
|
||||
float m_rate;
|
||||
};
|
||||
|
||||
Reference in New Issue
Block a user