AVFoundation: fix retrieving tracks information from live sources.

For live sources, tracks information is available only after the
AVPlayer changed its status to AVPlayerStatusReadyToPlay. It also
seems to be available only from AVPlayerItem.tracks rather than
AVAsset.tracks.
The audioAvailableChanged() and videoAvailableChanged()
signals are now correclty emitted and the video layer is
correctly positioned for live sources.

Task-number: QTBUG-38666
Change-Id: I8ee015a6ce81694c1fc1e44c679887cf7ccb0fd6
Reviewed-by: Andy Nichols <andy.nichols@digia.com>
This commit is contained in:
Yoann Lopes
2014-05-08 15:22:11 +02:00
parent 341b86c63f
commit bee6244e24
2 changed files with 55 additions and 51 deletions

View File

@@ -156,6 +156,9 @@ private:
QByteArray rawData;
};
void setAudioAvailable(bool available);
void setVideoAvailable(bool available);
AVFMediaPlayerService *m_service;
AVFVideoOutput *m_videoOutput;

View File

@@ -70,15 +70,11 @@ static void *AVFMediaPlayerSessionObserverCurrentItemObservationContext = &AVFMe
AVPlayerItem *m_playerItem;
AVPlayerLayer *m_playerLayer;
NSURL *m_URL;
bool m_audioAvailable;
bool m_videoAvailable;
}
@property (readonly, getter=player) AVPlayer* m_player;
@property (readonly, getter=playerItem) AVPlayerItem* m_playerItem;
@property (readonly, getter=playerLayer) AVPlayerLayer* m_playerLayer;
@property (readonly, getter=audioAvailable) bool m_audioAvailable;
@property (readonly, getter=videoAvailable) bool m_videoAvailable;
@property (readonly, getter=session) AVFMediaPlayerSession* m_session;
- (AVFMediaPlayerSessionObserver *) initWithMediaPlayerSession:(AVFMediaPlayerSession *)session;
@@ -96,7 +92,7 @@ static void *AVFMediaPlayerSessionObserverCurrentItemObservationContext = &AVFMe
@implementation AVFMediaPlayerSessionObserver
@synthesize m_player, m_playerItem, m_playerLayer, m_audioAvailable, m_videoAvailable, m_session;
@synthesize m_player, m_playerItem, m_playerLayer, m_session;
- (AVFMediaPlayerSessionObserver *) initWithMediaPlayerSession:(AVFMediaPlayerSession *)session
{
@@ -186,18 +182,6 @@ static void *AVFMediaPlayerSessionObserverCurrentItemObservationContext = &AVFMe
return;
}
m_audioAvailable = false;
m_videoAvailable = false;
//Check each track of asset for audio and video content
NSArray *tracks = [asset tracks];
for (AVAssetTrack *track in tracks) {
if ([track hasMediaCharacteristic:AVMediaCharacteristicAudible])
m_audioAvailable = true;
if ([track hasMediaCharacteristic:AVMediaCharacteristicVisual])
m_videoAvailable = true;
}
//At this point we're ready to set up for playback of the asset.
//Stop observing our prior AVPlayerItem, if we have one.
if (m_playerItem)
@@ -258,18 +242,7 @@ static void *AVFMediaPlayerSessionObserverCurrentItemObservationContext = &AVFMe
m_playerLayer = [AVPlayerLayer playerLayerWithPlayer:m_player];
[m_playerLayer retain];
m_playerLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
//Get the native size of the new item, and reset the bounds of the player layer
AVAsset *asset = m_playerItem.asset;
if (asset) {
NSArray *tracks = [asset tracksWithMediaType:AVMediaTypeVideo];
if ([tracks count]) {
AVAssetTrack *videoTrack = [tracks objectAtIndex:0];
m_playerLayer.anchorPoint = CGPointMake(0.0f, 0.0f);
m_playerLayer.bounds = CGRectMake(0.0f, 0.0f, videoTrack.naturalSize.width, videoTrack.naturalSize.height);
}
}
m_playerLayer.anchorPoint = CGPointMake(0.0f, 0.0f);
}
//Observe the AVPlayer "currentItem" property to find out when any
@@ -366,22 +339,8 @@ static void *AVFMediaPlayerSessionObserverCurrentItemObservationContext = &AVFMe
{
AVPlayerItem *newPlayerItem = [change objectForKey:NSKeyValueChangeNewKey];
if (m_playerItem != newPlayerItem)
{
m_playerItem = newPlayerItem;
//Get the native size of the new item, and reset the bounds of the player layer
//AVAsset *asset = m_playerItem.asset;
AVAsset *asset = [m_playerItem asset];
if (asset) {
NSArray *tracks = [asset tracksWithMediaType:AVMediaTypeVideo];
if ([tracks count]) {
AVAssetTrack *videoTrack = [tracks objectAtIndex:0];
m_playerLayer.anchorPoint = CGPointMake(0.0f, 0.0f);
m_playerLayer.bounds = CGRectMake(0.0f, 0.0f, videoTrack.naturalSize.width, videoTrack.naturalSize.height);
}
}
}
if (self.session)
QMetaObject::invokeMethod(m_session, "processCurrentItemChanged", Qt::AutoConnection);
}
@@ -513,6 +472,9 @@ void AVFMediaPlayerSession::setMedia(const QMediaContent &content, QIODevice *st
m_resources = content;
m_mediaStream = stream;
setAudioAvailable(false);
setVideoAvailable(false);
QMediaPlayer::MediaStatus oldMediaStatus = m_mediaStatus;
if (content.isNull() || content.canonicalUrl().isEmpty()) {
@@ -582,14 +544,32 @@ bool AVFMediaPlayerSession::isMuted() const
return m_muted;
}
void AVFMediaPlayerSession::setAudioAvailable(bool available)
{
if (m_audioAvailable == available)
return;
m_audioAvailable = available;
Q_EMIT audioAvailableChanged(available);
}
bool AVFMediaPlayerSession::isAudioAvailable() const
{
return [(AVFMediaPlayerSessionObserver*)m_observer audioAvailable];
return m_audioAvailable;
}
void AVFMediaPlayerSession::setVideoAvailable(bool available)
{
if (m_videoAvailable == available)
return;
m_videoAvailable = available;
Q_EMIT videoAvailableChanged(available);
}
bool AVFMediaPlayerSession::isVideoAvailable() const
{
return [(AVFMediaPlayerSessionObserver*)m_observer videoAvailable];
return m_videoAvailable;
}
bool AVFMediaPlayerSession::isSeekable() const
@@ -802,16 +782,37 @@ void AVFMediaPlayerSession::processLoadStateChange()
bool isPlaying = (m_state != QMediaPlayer::StoppedState);
if (currentStatus == AVPlayerStatusReadyToPlay) {
AVPlayerItem *playerItem = [(AVFMediaPlayerSessionObserver*)m_observer playerItem];
if (playerItem) {
// Check each track for audio and video content
AVAssetTrack *videoTrack = nil;
NSArray *tracks = playerItem.tracks;
for (AVPlayerItemTrack *track in tracks) {
AVAssetTrack *assetTrack = track.assetTrack;
if (assetTrack) {
if ([assetTrack.mediaType isEqualToString:AVMediaTypeAudio])
setAudioAvailable(true);
if ([assetTrack.mediaType isEqualToString:AVMediaTypeVideo]) {
setVideoAvailable(true);
if (!videoTrack)
videoTrack = assetTrack;
}
}
}
// Get the native size of the video, and reset the bounds of the player layer
AVPlayerLayer *playerLayer = [(AVFMediaPlayerSessionObserver*)m_observer playerLayer];
if (videoTrack && playerLayer) {
playerLayer.bounds = CGRectMake(0.0f, 0.0f,
videoTrack.naturalSize.width,
videoTrack.naturalSize.height);
}
}
qint64 currentDuration = duration();
if (m_duration != currentDuration)
Q_EMIT durationChanged(m_duration = currentDuration);
if (m_audioAvailable != isAudioAvailable())
Q_EMIT audioAvailableChanged(m_audioAvailable = !m_audioAvailable);
if (m_videoAvailable != isVideoAvailable())
Q_EMIT videoAvailableChanged(m_videoAvailable = !m_videoAvailable);
newStatus = isPlaying ? QMediaPlayer::BufferedMedia : QMediaPlayer::LoadedMedia;
if (m_state == QMediaPlayer::PlayingState && [(AVFMediaPlayerSessionObserver*)m_observer player]) {