WMF and GStreamer: fixed incorrect frame startTime and endTime.
The QVideoFrame documentation explicitly says that the time is in microseconds, however the GStreamer backend was setting the time in milliseconds and the WMF backend in 100-nanosecond units. With WMF, the time was missing from the QVideoFrame when presenting it to the video surface. Task-number: QTBUG-31731 Change-Id: I0638d2abf8eed25b3a531db67c19a18703e5b630 Reviewed-by: Andy Nichols <andy.nichols@digia.com>
This commit is contained in:
committed by
The Qt Project
parent
ca769ba264
commit
a2f078f108
@@ -288,9 +288,21 @@ void D3DPresentEngine::presentSample(void *opaque, qint64)
|
||||
}
|
||||
|
||||
if (surface && updateTexture(surface)) {
|
||||
m_surface->present(QVideoFrame(new TextureVideoBuffer(m_glTexture),
|
||||
m_surfaceFormat.frameSize(),
|
||||
m_surfaceFormat.pixelFormat()));
|
||||
QVideoFrame frame = QVideoFrame(new TextureVideoBuffer(m_glTexture),
|
||||
m_surfaceFormat.frameSize(),
|
||||
m_surfaceFormat.pixelFormat());
|
||||
|
||||
// WMF uses 100-nanosecond units, Qt uses microseconds
|
||||
LONGLONG startTime = -1;
|
||||
if (SUCCEEDED(sample->GetSampleTime(&startTime))) {
|
||||
frame.setStartTime(startTime * 0.1);
|
||||
|
||||
LONGLONG duration = -1;
|
||||
if (SUCCEEDED(sample->GetSampleDuration(&duration)))
|
||||
frame.setEndTime((startTime + duration) * 0.1);
|
||||
}
|
||||
|
||||
m_surface->present(frame);
|
||||
}
|
||||
|
||||
done:
|
||||
|
||||
Reference in New Issue
Block a user