Merge remote-tracking branch 'origin/5.5' into 5.6

Change-Id: I1373622a6d1fd0e2d35df2da79860a373056387f
This commit is contained in:
Liang Qi
2015-10-02 13:48:43 +02:00
14 changed files with 489 additions and 132 deletions

View File

@@ -63,6 +63,8 @@ public:
AVCaptureVideoDataOutput *videoDataOutput() const;
bool supportsTextures() const { return m_supportsTextures; }
#ifdef Q_OS_IOS
AVFCaptureFramesDelegate *captureDelegate() const;
void resetCaptureDelegate() const;
@@ -81,11 +83,18 @@ private:
AVFCameraSession *m_cameraSession;
AVCaptureVideoDataOutput *m_videoDataOutput;
bool m_supportsTextures;
bool m_needsHorizontalMirroring;
#ifdef Q_OS_IOS
CVOpenGLESTextureCacheRef m_textureCache;
#endif
QVideoFrame m_lastViewfinderFrame;
QMutex m_vfMutex;
dispatch_queue_t m_delegateQueue;
friend class CVImageVideoBuffer;
};
QT_END_NAMESPACE

View File

@@ -38,6 +38,10 @@
#include "avfcameraservice.h"
#include "avfcameradebug.h"
#ifdef Q_OS_IOS
#include <QtGui/qopengl.h>
#endif
#include <QtMultimedia/qabstractvideosurface.h>
#include <QtMultimedia/qabstractvideobuffer.h>
@@ -45,20 +49,32 @@
QT_USE_NAMESPACE
class CVPixelBufferVideoBuffer : public QAbstractPlanarVideoBuffer
class CVImageVideoBuffer : public QAbstractPlanarVideoBuffer
{
friend class CVPixelBufferVideoBufferPrivate;
public:
CVPixelBufferVideoBuffer(CVPixelBufferRef buffer)
CVImageVideoBuffer(CVImageBufferRef buffer, AVFCameraRendererControl *renderer)
#ifndef Q_OS_IOS
: QAbstractPlanarVideoBuffer(NoHandle)
#else
: QAbstractPlanarVideoBuffer(renderer->supportsTextures()
&& CVPixelBufferGetPixelFormatType(buffer) == kCVPixelFormatType_32BGRA
? GLTextureHandle : NoHandle)
, m_texture(0)
#endif
, m_buffer(buffer)
, m_renderer(renderer)
, m_mode(NotMapped)
{
CVPixelBufferRetain(m_buffer);
}
virtual ~CVPixelBufferVideoBuffer()
~CVImageVideoBuffer()
{
CVImageVideoBuffer::unmap();
#ifdef Q_OS_IOS
if (m_texture)
CFRelease(m_texture);
#endif
CVPixelBufferRelease(m_buffer);
}
@@ -78,7 +94,9 @@ public:
// For a bi-planar format we have to set the parameters correctly:
if (mode != QAbstractVideoBuffer::NotMapped && m_mode == QAbstractVideoBuffer::NotMapped) {
CVPixelBufferLockBaseAddress(m_buffer, 0);
CVPixelBufferLockBaseAddress(m_buffer, mode == QAbstractVideoBuffer::ReadOnly
? kCVPixelBufferLock_ReadOnly
: 0);
if (numBytes)
*numBytes = CVPixelBufferGetDataSize(m_buffer);
@@ -103,8 +121,9 @@ public:
uchar *map(MapMode mode, int *numBytes, int *bytesPerLine)
{
if (mode != NotMapped && m_mode == NotMapped) {
CVPixelBufferLockBaseAddress(m_buffer, 0);
CVPixelBufferLockBaseAddress(m_buffer, mode == QAbstractVideoBuffer::ReadOnly
? kCVPixelBufferLock_ReadOnly
: 0);
if (numBytes)
*numBytes = CVPixelBufferGetDataSize(m_buffer);
@@ -121,13 +140,63 @@ public:
void unmap()
{
if (m_mode != NotMapped) {
CVPixelBufferUnlockBaseAddress(m_buffer, m_mode == QAbstractVideoBuffer::ReadOnly
? kCVPixelBufferLock_ReadOnly
: 0);
m_mode = NotMapped;
CVPixelBufferUnlockBaseAddress(m_buffer, 0);
}
}
QVariant handle() const
{
#ifdef Q_OS_IOS
// Called from the render thread, so there is a current OpenGL context
if (!m_renderer->m_textureCache) {
CVReturn err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault,
NULL,
[EAGLContext currentContext],
NULL,
&m_renderer->m_textureCache);
if (err != kCVReturnSuccess)
qWarning("Error creating texture cache");
}
if (m_renderer->m_textureCache && !m_texture) {
CVOpenGLESTextureCacheFlush(m_renderer->m_textureCache, 0);
CVReturn err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
m_renderer->m_textureCache,
m_buffer,
NULL,
GL_TEXTURE_2D,
GL_RGBA,
CVPixelBufferGetWidth(m_buffer),
CVPixelBufferGetHeight(m_buffer),
GL_BGRA,
GL_UNSIGNED_BYTE,
0,
&m_texture);
if (err != kCVReturnSuccess)
qWarning("Error creating texture from buffer");
}
if (m_texture)
return CVOpenGLESTextureGetName(m_texture);
else
return 0;
#else
return QVariant();
#endif
}
private:
CVPixelBufferRef m_buffer;
#ifdef Q_OS_IOS
mutable CVOpenGLESTextureRef m_texture;
#endif
CVImageBufferRef m_buffer;
AVFCameraRendererControl *m_renderer;
MapMode m_mode;
};
@@ -171,13 +240,25 @@ private:
int width = CVPixelBufferGetWidth(imageBuffer);
int height = CVPixelBufferGetHeight(imageBuffer);
QVideoFrame::PixelFormat format =
AVFCameraViewfinderSettingsControl2::QtPixelFormatFromCVFormat(CVPixelBufferGetPixelFormatType(imageBuffer));
QVideoFrame::PixelFormat format;
#ifdef Q_OS_IOS
bool useTexture = m_renderer->supportsTextures()
&& CVPixelBufferGetPixelFormatType(imageBuffer) == kCVPixelFormatType_32BGRA;
if (useTexture)
format = QVideoFrame::Format_BGRA32;
else
#endif
format = AVFCameraViewfinderSettingsControl2::QtPixelFormatFromCVFormat(CVPixelBufferGetPixelFormatType(imageBuffer));
if (format == QVideoFrame::Format_Invalid)
return;
QVideoFrame frame(new CVPixelBufferVideoBuffer(imageBuffer), QSize(width, height), format);
QVideoFrame frame(new CVImageVideoBuffer(imageBuffer, m_renderer),
QSize(width, height),
format);
m_renderer->syncHandleViewfinderFrame(frame);
}
@@ -187,7 +268,11 @@ private:
AVFCameraRendererControl::AVFCameraRendererControl(QObject *parent)
: QVideoRendererControl(parent)
, m_surface(0)
, m_supportsTextures(false)
, m_needsHorizontalMirroring(false)
#ifdef Q_OS_IOS
, m_textureCache(0)
#endif
{
m_viewfinderFramesDelegate = [[AVFCaptureFramesDelegate alloc] initWithRenderer:this];
}
@@ -198,6 +283,10 @@ AVFCameraRendererControl::~AVFCameraRendererControl()
[m_viewfinderFramesDelegate release];
if (m_delegateQueue)
dispatch_release(m_delegateQueue);
#ifdef Q_OS_IOS
if (m_textureCache)
CFRelease(m_textureCache);
#endif
}
QAbstractVideoSurface *AVFCameraRendererControl::surface() const
@@ -209,6 +298,11 @@ void AVFCameraRendererControl::setSurface(QAbstractVideoSurface *surface)
{
if (m_surface != surface) {
m_surface = surface;
#ifdef Q_OS_IOS
m_supportsTextures = m_surface
? m_surface->supportedPixelFormats(QAbstractVideoBuffer::GLTextureHandle).contains(QVideoFrame::Format_BGRA32)
: false;
#endif
Q_EMIT surfaceChanged(surface);
}
}
@@ -261,21 +355,6 @@ void AVFCameraRendererControl::syncHandleViewfinderFrame(const QVideoFrame &fram
m_lastViewfinderFrame = frame;
if (m_needsHorizontalMirroring) {
m_lastViewfinderFrame.map(QAbstractVideoBuffer::ReadOnly);
// no deep copy
QImage image(m_lastViewfinderFrame.bits(),
m_lastViewfinderFrame.size().width(),
m_lastViewfinderFrame.size().height(),
m_lastViewfinderFrame.bytesPerLine(),
QImage::Format_RGB32);
QImage mirrored = image.mirrored(true, false);
m_lastViewfinderFrame.unmap();
m_lastViewfinderFrame = QVideoFrame(mirrored);
}
if (m_cameraSession && m_lastViewfinderFrame.isValid())
m_cameraSession->onCameraFrameFetched(m_lastViewfinderFrame);
}
@@ -315,7 +394,9 @@ void AVFCameraRendererControl::handleViewfinderFrame()
}
if (!m_surface->isActive()) {
QVideoSurfaceFormat format(frame.size(), frame.pixelFormat());
QVideoSurfaceFormat format(frame.size(), frame.pixelFormat(), frame.handleType());
if (m_needsHorizontalMirroring)
format.setProperty("mirrored", true);
if (!m_surface->start(format)) {
qWarning() << "Failed to start viewfinder m_surface, format:" << format;

View File

@@ -573,22 +573,29 @@ void AVFCameraViewfinderSettingsControl2::applySettings()
if (!convertPixelFormatIfSupported(m_settings.pixelFormat(), avfPixelFormat)) {
// If the the pixel format is not specified or invalid, pick the preferred video surface
// format, or if no surface is set, the preferred capture device format
const QVector<QVideoFrame::PixelFormat> deviceFormats = viewfinderPixelFormats();
QList<QVideoFrame::PixelFormat> surfaceFormats;
if (m_service->videoOutput() && m_service->videoOutput()->surface())
surfaceFormats = m_service->videoOutput()->surface()->supportedPixelFormats();
QVideoFrame::PixelFormat pickedFormat = deviceFormats.first();
QVideoFrame::PixelFormat format = deviceFormats.first();
QAbstractVideoSurface *surface = m_service->videoOutput() ? m_service->videoOutput()->surface()
: 0;
if (surface) {
if (m_service->videoOutput()->supportsTextures()) {
pickedFormat = QVideoFrame::Format_ARGB32;
} else {
QList<QVideoFrame::PixelFormat> surfaceFormats = m_service->videoOutput()->surface()->supportedPixelFormats();
for (int i = 0; i < surfaceFormats.count(); ++i) {
const QVideoFrame::PixelFormat surfaceFormat = surfaceFormats.at(i);
if (deviceFormats.contains(surfaceFormat)) {
format = surfaceFormat;
break;
for (int i = 0; i < surfaceFormats.count(); ++i) {
const QVideoFrame::PixelFormat surfaceFormat = surfaceFormats.at(i);
if (deviceFormats.contains(surfaceFormat)) {
pickedFormat = surfaceFormat;
break;
}
}
}
}
CVPixelFormatFromQtFormat(format, avfPixelFormat);
CVPixelFormatFromQtFormat(pickedFormat, avfPixelFormat);
}
if (avfPixelFormat != 0) {

View File

@@ -63,6 +63,10 @@ void AVFMediaPlayerControl::setSession(AVFMediaPlayerSession *session)
connect(m_session, SIGNAL(audioAvailableChanged(bool)), this, SIGNAL(audioAvailableChanged(bool)));
connect(m_session, SIGNAL(videoAvailableChanged(bool)), this, SIGNAL(videoAvailableChanged(bool)));
connect(m_session, SIGNAL(error(int,QString)), this, SIGNAL(error(int,QString)));
connect(m_session, &AVFMediaPlayerSession::playbackRateChanged,
this, &AVFMediaPlayerControl::playbackRateChanged);
connect(m_session, &AVFMediaPlayerSession::seekableChanged,
this, &AVFMediaPlayerControl::seekableChanged);
}
QMediaPlayer::State AVFMediaPlayerControl::state() const

View File

@@ -80,6 +80,8 @@ public:
qreal playbackRate() const;
inline bool isVolumeSupported() const { return m_volumeSupported; }
public Q_SLOTS:
void setPlaybackRate(qreal rate);
@@ -106,6 +108,8 @@ Q_SIGNALS:
void mutedChanged(bool muted);
void audioAvailableChanged(bool audioAvailable);
void videoAvailableChanged(bool videoAvailable);
void playbackRateChanged(qreal rate);
void seekableChanged(bool seekable);
void error(int error, const QString &errorString);
private:
@@ -148,6 +152,7 @@ private:
void setAudioAvailable(bool available);
void setVideoAvailable(bool available);
void setSeekable(bool seekable);
AVFMediaPlayerService *m_service;
AVFVideoOutput *m_videoOutput;
@@ -158,14 +163,17 @@ private:
QMediaContent m_resources;
ResourceHandler m_resourceHandler;
const bool m_volumeSupported;
bool m_muted;
bool m_tryingAsync;
int m_volume;
qreal m_rate;
qint64 m_requestedPosition;
qint64 m_duration;
bool m_videoAvailable;
bool m_audioAvailable;
bool m_seekable;
void *m_observer;
};

View File

@@ -121,19 +121,28 @@ static void *AVFMediaPlayerSessionObserverCurrentItemObservationContext = &AVFMe
- (void) unloadMedia
{
if (m_player)
[m_player setRate:0.0];
if (m_playerItem) {
[m_playerItem removeObserver:self forKeyPath:AVF_STATUS_KEY];
[[NSNotificationCenter defaultCenter] removeObserver:self
name:AVPlayerItemDidPlayToEndTimeNotification
object:m_playerItem];
object:m_playerItem];
[[NSNotificationCenter defaultCenter] removeObserver:self
name:AVPlayerItemTimeJumpedNotification
object:m_playerItem];
m_playerItem = 0;
}
if (m_player) {
[m_player setRate:0.0];
[m_player removeObserver:self forKeyPath:AVF_CURRENT_ITEM_KEY];
[m_player removeObserver:self forKeyPath:AVF_RATE_KEY];
[m_player release];
m_player = 0;
}
if (m_playerLayer) {
[m_playerLayer release];
m_playerLayer = 0;
}
}
- (void) prepareToPlayAsset:(AVURLAsset *)asset
@@ -203,30 +212,15 @@ static void *AVFMediaPlayerSessionObserverCurrentItemObservationContext = &AVFMe
name:AVPlayerItemTimeJumpedNotification
object:m_playerItem];
//Clean up old player if we have one
if (m_player) {
[m_player setRate:0.0];
[m_player removeObserver:self forKeyPath:AVF_CURRENT_ITEM_KEY];
[m_player removeObserver:self forKeyPath:AVF_RATE_KEY];
[m_player release];
m_player = 0;
if (m_playerLayer) {
[m_playerLayer release];
m_playerLayer = 0; //Will have been released
}
}
//Get a new AVPlayer initialized to play the specified player item.
m_player = [AVPlayer playerWithPlayerItem:m_playerItem];
[m_player retain];
#if defined(Q_OS_OSX)
//Set the initial volume on new player object
if (self.session)
m_player.volume = m_session->volume() / 100.0f;
#endif
if (self.session && self.session->isVolumeSupported()) {
[m_player setVolume:m_session->volume() / 100.0f];
[m_player setMuted:m_session->isMuted()];
}
//Create a new player layer if we don't have one already
if (!m_playerLayer)
@@ -354,18 +348,6 @@ static void *AVFMediaPlayerSessionObserverCurrentItemObservationContext = &AVFMe
#endif
[self unloadMedia];
if (m_player) {
[m_player removeObserver:self forKeyPath:AVF_CURRENT_ITEM_KEY];
[m_player removeObserver:self forKeyPath:AVF_RATE_KEY];
[m_player release];
m_player = 0;
}
if (m_playerLayer) {
[m_playerLayer release];
m_playerLayer = 0;
}
if (m_URL) {
[m_URL release];
}
@@ -382,13 +364,20 @@ AVFMediaPlayerSession::AVFMediaPlayerSession(AVFMediaPlayerService *service, QOb
, m_state(QMediaPlayer::StoppedState)
, m_mediaStatus(QMediaPlayer::NoMedia)
, m_mediaStream(0)
#ifdef Q_OS_IOS
, m_volumeSupported(QSysInfo::MacintoshVersion >= QSysInfo::MV_IOS_7_0)
#else
, m_volumeSupported(true)
#endif
, m_muted(false)
, m_tryingAsync(false)
, m_volume(100)
, m_rate(1.0)
, m_requestedPosition(-1)
, m_duration(0)
, m_videoAvailable(false)
, m_audioAvailable(false)
, m_seekable(false)
{
m_observer = [[AVFMediaPlayerSessionObserver alloc] initWithMediaPlayerSession:this];
}
@@ -458,23 +447,26 @@ void AVFMediaPlayerSession::setMedia(const QMediaContent &content, QIODevice *st
qDebug() << Q_FUNC_INFO << content.canonicalUrl();
#endif
[(AVFMediaPlayerSessionObserver*)m_observer unloadMedia];
m_resources = content;
m_mediaStream = stream;
setAudioAvailable(false);
setVideoAvailable(false);
setSeekable(false);
m_requestedPosition = -1;
Q_EMIT positionChanged(position());
QMediaPlayer::MediaStatus oldMediaStatus = m_mediaStatus;
if (content.isNull() || content.canonicalUrl().isEmpty()) {
[(AVFMediaPlayerSessionObserver*)m_observer unloadMedia];
m_mediaStatus = QMediaPlayer::NoMedia;
if (m_state != QMediaPlayer::StoppedState)
Q_EMIT stateChanged(m_state = QMediaPlayer::StoppedState);
if (m_mediaStatus != oldMediaStatus)
Q_EMIT mediaStatusChanged(m_mediaStatus);
Q_EMIT positionChanged(position());
return;
} else {
@@ -482,6 +474,7 @@ void AVFMediaPlayerSession::setMedia(const QMediaContent &content, QIODevice *st
if (m_mediaStatus != oldMediaStatus)
Q_EMIT mediaStatusChanged(m_mediaStatus);
}
//Load AVURLAsset
//initialize asset using content's URL
NSString *urlString = [NSString stringWithUTF8String:content.canonicalUrl().toEncoded().constData()];
@@ -494,7 +487,7 @@ qint64 AVFMediaPlayerSession::position() const
AVPlayerItem *playerItem = [(AVFMediaPlayerSessionObserver*)m_observer playerItem];
if (!playerItem)
return 0;
return m_requestedPosition != -1 ? m_requestedPosition : 0;
CMTime time = [playerItem currentTime];
return static_cast<quint64>(float(time.value) / float(time.timescale) * 1000.0f);
@@ -563,7 +556,16 @@ bool AVFMediaPlayerSession::isVideoAvailable() const
bool AVFMediaPlayerSession::isSeekable() const
{
return true;
return m_seekable;
}
void AVFMediaPlayerSession::setSeekable(bool seekable)
{
if (m_seekable == seekable)
return;
m_seekable = seekable;
Q_EMIT seekableChanged(seekable);
}
QMediaTimeRange AVFMediaPlayerSession::availablePlaybackRanges() const
@@ -602,10 +604,10 @@ void AVFMediaPlayerSession::setPlaybackRate(qreal rate)
m_rate = rate;
AVPlayer *player = [(AVFMediaPlayerSessionObserver*)m_observer player];
if (player != 0 && m_state == QMediaPlayer::PlayingState) {
if (player && m_state == QMediaPlayer::PlayingState)
[player setRate:m_rate];
}
Q_EMIT playbackRateChanged(m_rate);
}
void AVFMediaPlayerSession::setPosition(qint64 pos)
@@ -614,14 +616,23 @@ void AVFMediaPlayerSession::setPosition(qint64 pos)
qDebug() << Q_FUNC_INFO << pos;
#endif
if ( !isSeekable() || pos == position())
if (pos == position())
return;
AVPlayerItem *playerItem = [(AVFMediaPlayerSessionObserver*)m_observer playerItem];
if (!playerItem)
if (!playerItem) {
m_requestedPosition = pos;
Q_EMIT positionChanged(m_requestedPosition);
return;
} else if (!isSeekable()) {
if (m_requestedPosition != -1) {
m_requestedPosition = -1;
Q_EMIT positionChanged(position());
}
return;
}
pos = qMax(qint64(0), pos);
if (duration() > 0)
pos = qMin(pos, duration());
@@ -655,8 +666,10 @@ void AVFMediaPlayerSession::play()
processLoadStateChange();
}
if (m_mediaStatus == QMediaPlayer::LoadedMedia || m_mediaStatus == QMediaPlayer::BufferedMedia)
[[(AVFMediaPlayerSessionObserver*)m_observer player] play];
if (m_mediaStatus == QMediaPlayer::LoadedMedia || m_mediaStatus == QMediaPlayer::BufferedMedia) {
// Setting the rate starts playback
[[(AVFMediaPlayerSessionObserver*)m_observer player] setRate:m_rate];
}
//processLoadStateChange();
Q_EMIT stateChanged(m_state);
@@ -697,8 +710,8 @@ void AVFMediaPlayerSession::stop()
return;
m_state = QMediaPlayer::StoppedState;
m_rate = 0.0f;
[[(AVFMediaPlayerSessionObserver*)m_observer player] setRate:m_rate];
// AVPlayer doesn't have stop(), only pause() and play().
[[(AVFMediaPlayerSessionObserver*)m_observer player] pause];
setPosition(0);
if (m_videoOutput) {
@@ -716,21 +729,20 @@ void AVFMediaPlayerSession::setVolume(int volume)
qDebug() << Q_FUNC_INFO << volume;
#endif
if (m_volume == volume)
return;
AVPlayer *player = [(AVFMediaPlayerSessionObserver*)m_observer player];
if (!player)
return;
if (![player respondsToSelector:@selector(setVolume:)]) {
if (!m_volumeSupported) {
qWarning("%s not implemented, requires iOS 7 or later", Q_FUNC_INFO);
return;
}
[player setVolume:volume / 100.0f];
if (m_volume == volume)
return;
m_volume = volume;
AVPlayer *player = [(AVFMediaPlayerSessionObserver*)m_observer player];
if (player)
[player setVolume:volume / 100.0f];
Q_EMIT volumeChanged(m_volume);
}
@@ -739,22 +751,21 @@ void AVFMediaPlayerSession::setMuted(bool muted)
#ifdef QT_DEBUG_AVF
qDebug() << Q_FUNC_INFO << muted;
#endif
if (m_muted == muted)
return;
AVPlayer *player = [(AVFMediaPlayerSessionObserver*)m_observer player];
if (!player)
return;
// iOS: setMuted exists since iOS 7.0, thus check if it exists
if (![player respondsToSelector:@selector(setMuted:)]) {
if (!m_volumeSupported) {
qWarning("%s not implemented, requires iOS 7 or later", Q_FUNC_INFO);
return;
}
[player setMuted:muted];
if (m_muted == muted)
return;
m_muted = muted;
AVPlayer *player = [(AVFMediaPlayerSessionObserver*)m_observer player];
if (player)
[player setMuted:muted];
Q_EMIT mutedChanged(muted);
}
@@ -767,6 +778,11 @@ void AVFMediaPlayerSession::processEOS()
Q_EMIT positionChanged(position());
m_mediaStatus = QMediaPlayer::EndOfMedia;
// At this point, frames should not be rendered anymore.
// Clear the output layer to make sure of that.
if (m_videoOutput)
m_videoOutput->setLayer(0);
Q_EMIT stateChanged(m_state = QMediaPlayer::StoppedState);
Q_EMIT mediaStatusChanged(m_mediaStatus);
}
@@ -801,6 +817,8 @@ void AVFMediaPlayerSession::processLoadStateChange()
}
}
setSeekable([[playerItem seekableTimeRanges] count] > 0);
// Get the native size of the video, and reset the bounds of the player layer
AVPlayerLayer *playerLayer = [(AVFMediaPlayerSessionObserver*)m_observer playerLayer];
if (videoTrack && playerLayer) {
@@ -818,11 +836,16 @@ void AVFMediaPlayerSession::processLoadStateChange()
if (m_duration != currentDuration)
Q_EMIT durationChanged(m_duration = currentDuration);
if (m_requestedPosition != -1) {
setPosition(m_requestedPosition);
m_requestedPosition = -1;
}
newStatus = isPlaying ? QMediaPlayer::BufferedMedia : QMediaPlayer::LoadedMedia;
if (m_state == QMediaPlayer::PlayingState && [(AVFMediaPlayerSessionObserver*)m_observer player]) {
// Setting the rate is enough to start playback, no need to call play()
[[(AVFMediaPlayerSessionObserver*)m_observer player] setRate:m_rate];
[[(AVFMediaPlayerSessionObserver*)m_observer player] play];
}
}
@@ -837,6 +860,10 @@ void AVFMediaPlayerSession::processPositionChange()
void AVFMediaPlayerSession::processMediaLoadError()
{
if (m_requestedPosition != -1) {
m_requestedPosition = -1;
Q_EMIT positionChanged(position());
}
Q_EMIT error(QMediaPlayer::FormatError, tr("Failed to load media"));
Q_EMIT mediaStatusChanged(m_mediaStatus = QMediaPlayer::InvalidMedia);
Q_EMIT stateChanged(m_state = QMediaPlayer::StoppedState);