WMF and GStreamer: fixed incorrect frame startTime and endTime.
The QVideoFrame documentation explicitly says that the time is in microseconds, however the GStreamer backend was setting the time in milliseconds and the WMF backend in 100-nanosecond units. With WMF, the time was missing from the QVideoFrame when presenting it to the video surface. Task-number: QTBUG-31731 Change-Id: I0638d2abf8eed25b3a531db67c19a18703e5b630 Reviewed-by: Andy Nichols <andy.nichols@digia.com>
This commit is contained in:
committed by
The Qt Project
parent
ca769ba264
commit
a2f078f108
@@ -713,13 +713,14 @@ QVideoSurfaceFormat QVideoSurfaceGstSink::formatForCaps(GstCaps *caps, int *byte
|
|||||||
|
|
||||||
void QVideoSurfaceGstSink::setFrameTimeStamps(QVideoFrame *frame, GstBuffer *buffer)
|
void QVideoSurfaceGstSink::setFrameTimeStamps(QVideoFrame *frame, GstBuffer *buffer)
|
||||||
{
|
{
|
||||||
|
// GStreamer uses nanoseconds, Qt uses microseconds
|
||||||
qint64 startTime = GST_BUFFER_TIMESTAMP(buffer);
|
qint64 startTime = GST_BUFFER_TIMESTAMP(buffer);
|
||||||
if (startTime >= 0) {
|
if (startTime >= 0) {
|
||||||
frame->setStartTime(startTime/G_GINT64_CONSTANT (1000000));
|
frame->setStartTime(startTime/G_GINT64_CONSTANT (1000));
|
||||||
|
|
||||||
qint64 duration = GST_BUFFER_DURATION(buffer);
|
qint64 duration = GST_BUFFER_DURATION(buffer);
|
||||||
if (duration >= 0)
|
if (duration >= 0)
|
||||||
frame->setEndTime((startTime + duration)/G_GINT64_CONSTANT (1000000));
|
frame->setEndTime((startTime + duration)/G_GINT64_CONSTANT (1000));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -288,9 +288,21 @@ void D3DPresentEngine::presentSample(void *opaque, qint64)
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (surface && updateTexture(surface)) {
|
if (surface && updateTexture(surface)) {
|
||||||
m_surface->present(QVideoFrame(new TextureVideoBuffer(m_glTexture),
|
QVideoFrame frame = QVideoFrame(new TextureVideoBuffer(m_glTexture),
|
||||||
m_surfaceFormat.frameSize(),
|
m_surfaceFormat.frameSize(),
|
||||||
m_surfaceFormat.pixelFormat()));
|
m_surfaceFormat.pixelFormat());
|
||||||
|
|
||||||
|
// WMF uses 100-nanosecond units, Qt uses microseconds
|
||||||
|
LONGLONG startTime = -1;
|
||||||
|
if (SUCCEEDED(sample->GetSampleTime(&startTime))) {
|
||||||
|
frame.setStartTime(startTime * 0.1);
|
||||||
|
|
||||||
|
LONGLONG duration = -1;
|
||||||
|
if (SUCCEEDED(sample->GetSampleDuration(&duration)))
|
||||||
|
frame.setEndTime((startTime + duration) * 0.1);
|
||||||
|
}
|
||||||
|
|
||||||
|
m_surface->present(frame);
|
||||||
}
|
}
|
||||||
|
|
||||||
done:
|
done:
|
||||||
|
|||||||
@@ -632,13 +632,14 @@ QVideoFrame MFTransform::makeVideoFrame()
|
|||||||
// That is why we copy data from IMFMediaBuffer here.
|
// That is why we copy data from IMFMediaBuffer here.
|
||||||
frame = QVideoFrame(new QMemoryVideoBuffer(array, m_bytesPerLine), m_format.frameSize(), m_format.pixelFormat());
|
frame = QVideoFrame(new QMemoryVideoBuffer(array, m_bytesPerLine), m_format.frameSize(), m_format.pixelFormat());
|
||||||
|
|
||||||
|
// WMF uses 100-nanosecond units, Qt uses microseconds
|
||||||
LONGLONG startTime = -1;
|
LONGLONG startTime = -1;
|
||||||
if (SUCCEEDED(m_sample->GetSampleTime(&startTime))) {
|
if (SUCCEEDED(m_sample->GetSampleTime(&startTime))) {
|
||||||
frame.setStartTime(startTime);
|
frame.setStartTime(startTime * 0.1);
|
||||||
|
|
||||||
LONGLONG duration = -1;
|
LONGLONG duration = -1;
|
||||||
if (SUCCEEDED(m_sample->GetSampleDuration(&duration)))
|
if (SUCCEEDED(m_sample->GetSampleDuration(&duration)))
|
||||||
frame.setEndTime(startTime + duration);
|
frame.setEndTime((startTime + duration) * 0.1);
|
||||||
}
|
}
|
||||||
} while (false);
|
} while (false);
|
||||||
|
|
||||||
|
|||||||
@@ -254,6 +254,8 @@ namespace
|
|||||||
, m_workQueueCB(this, &MediaStream::onDispatchWorkItem)
|
, m_workQueueCB(this, &MediaStream::onDispatchWorkItem)
|
||||||
, m_finalizeResult(0)
|
, m_finalizeResult(0)
|
||||||
, m_scheduledBuffer(0)
|
, m_scheduledBuffer(0)
|
||||||
|
, m_bufferStartTime(-1)
|
||||||
|
, m_bufferDuration(-1)
|
||||||
, m_presentationClock(0)
|
, m_presentationClock(0)
|
||||||
, m_currentMediaType(0)
|
, m_currentMediaType(0)
|
||||||
, m_prerolling(false)
|
, m_prerolling(false)
|
||||||
@@ -839,10 +841,13 @@ namespace
|
|||||||
QMutexLocker locker(&m_mutex);
|
QMutexLocker locker(&m_mutex);
|
||||||
if (!m_scheduledBuffer)
|
if (!m_scheduledBuffer)
|
||||||
return;
|
return;
|
||||||
m_surface->present(QVideoFrame(
|
QVideoFrame frame = QVideoFrame(
|
||||||
new MediaSampleVideoBuffer(m_scheduledBuffer, m_bytesPerLine),
|
new MediaSampleVideoBuffer(m_scheduledBuffer, m_bytesPerLine),
|
||||||
m_surfaceFormat.frameSize(),
|
m_surfaceFormat.frameSize(),
|
||||||
m_surfaceFormat.pixelFormat()));
|
m_surfaceFormat.pixelFormat());
|
||||||
|
frame.setStartTime(m_bufferStartTime * 0.1);
|
||||||
|
frame.setEndTime((m_bufferStartTime + m_bufferDuration) * 0.1);
|
||||||
|
m_surface->present(frame);
|
||||||
m_scheduledBuffer->Release();
|
m_scheduledBuffer->Release();
|
||||||
m_scheduledBuffer = NULL;
|
m_scheduledBuffer = NULL;
|
||||||
if (m_rate != 0)
|
if (m_rate != 0)
|
||||||
@@ -1309,8 +1314,10 @@ namespace
|
|||||||
|
|
||||||
HRESULT processSampleData(IMFSample *pSample)
|
HRESULT processSampleData(IMFSample *pSample)
|
||||||
{
|
{
|
||||||
LONGLONG time;
|
LONGLONG time, duration = -1;
|
||||||
HRESULT hr = pSample->GetSampleTime(&time);
|
HRESULT hr = pSample->GetSampleTime(&time);
|
||||||
|
if (SUCCEEDED(hr))
|
||||||
|
pSample->GetSampleDuration(&duration);
|
||||||
|
|
||||||
if (m_prerolling) {
|
if (m_prerolling) {
|
||||||
if (SUCCEEDED(hr) && time >= m_prerollTargetTime) {
|
if (SUCCEEDED(hr) && time >= m_prerollTargetTime) {
|
||||||
@@ -1320,6 +1327,7 @@ namespace
|
|||||||
SampleBuffer sb;
|
SampleBuffer sb;
|
||||||
sb.m_buffer = pBuffer;
|
sb.m_buffer = pBuffer;
|
||||||
sb.m_time = time;
|
sb.m_time = time;
|
||||||
|
sb.m_duration = duration;
|
||||||
m_bufferCache.push_back(sb);
|
m_bufferCache.push_back(sb);
|
||||||
endPreroll(S_OK);
|
endPreroll(S_OK);
|
||||||
}
|
}
|
||||||
@@ -1336,6 +1344,7 @@ namespace
|
|||||||
SampleBuffer sb;
|
SampleBuffer sb;
|
||||||
sb.m_buffer = pBuffer;
|
sb.m_buffer = pBuffer;
|
||||||
sb.m_time = time;
|
sb.m_time = time;
|
||||||
|
sb.m_duration = duration;
|
||||||
m_bufferCache.push_back(sb);
|
m_bufferCache.push_back(sb);
|
||||||
}
|
}
|
||||||
if (m_rate == 0)
|
if (m_rate == 0)
|
||||||
@@ -1351,6 +1360,7 @@ namespace
|
|||||||
public:
|
public:
|
||||||
IMFMediaBuffer *m_buffer;
|
IMFMediaBuffer *m_buffer;
|
||||||
LONGLONG m_time;
|
LONGLONG m_time;
|
||||||
|
LONGLONG m_duration;
|
||||||
};
|
};
|
||||||
QList<SampleBuffer> m_bufferCache;
|
QList<SampleBuffer> m_bufferCache;
|
||||||
static const int BUFFER_CACHE_SIZE = 2;
|
static const int BUFFER_CACHE_SIZE = 2;
|
||||||
@@ -1383,6 +1393,8 @@ namespace
|
|||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
m_scheduledBuffer = sb.m_buffer;
|
m_scheduledBuffer = sb.m_buffer;
|
||||||
|
m_bufferStartTime = sb.m_time;
|
||||||
|
m_bufferDuration = sb.m_duration;
|
||||||
QCoreApplication::postEvent(m_rendererControl, new PresentEvent(sb.m_time));
|
QCoreApplication::postEvent(m_rendererControl, new PresentEvent(sb.m_time));
|
||||||
if (m_rate == 0)
|
if (m_rate == 0)
|
||||||
queueEvent(MEStreamSinkScrubSampleComplete, GUID_NULL, S_OK, NULL);
|
queueEvent(MEStreamSinkScrubSampleComplete, GUID_NULL, S_OK, NULL);
|
||||||
@@ -1393,6 +1405,8 @@ namespace
|
|||||||
queueEvent(MEStreamSinkRequestSample, GUID_NULL, S_OK, NULL);
|
queueEvent(MEStreamSinkRequestSample, GUID_NULL, S_OK, NULL);
|
||||||
}
|
}
|
||||||
IMFMediaBuffer *m_scheduledBuffer;
|
IMFMediaBuffer *m_scheduledBuffer;
|
||||||
|
MFTIME m_bufferStartTime;
|
||||||
|
MFTIME m_bufferDuration;
|
||||||
IMFPresentationClock *m_presentationClock;
|
IMFPresentationClock *m_presentationClock;
|
||||||
float m_rate;
|
float m_rate;
|
||||||
};
|
};
|
||||||
|
|||||||
Reference in New Issue
Block a user