iOS video frame render implementation.
Uses CVTextureCache, iOS only for now, OS-X code could be ported but will need further work to support TEXTURE_RECTANGLE in the QVideoNode classes. When we can’t share a context, falls back to an offscreen window, FBO rendering and grabbing a QImage. Change-Id: I23b831fdcc63aeb1b67b7741d8d56779470240d3 Reviewed-by: Yoann Lopes <yoann.lopes@theqtcompany.com>
This commit is contained in:
committed by
Yoann Lopes
parent
3e94b7ce2d
commit
9444c8ec61
@@ -37,7 +37,11 @@
|
|||||||
#include <QtCore/qobject.h>
|
#include <QtCore/qobject.h>
|
||||||
#include <QtCore/qmutex.h>
|
#include <QtCore/qmutex.h>
|
||||||
|
|
||||||
|
#if defined(Q_OS_IOS)
|
||||||
|
#include <CoreVideo/CVBase.h>
|
||||||
|
#else
|
||||||
#include <QuartzCore/CVDisplayLink.h>
|
#include <QuartzCore/CVDisplayLink.h>
|
||||||
|
#endif
|
||||||
|
|
||||||
QT_BEGIN_NAMESPACE
|
QT_BEGIN_NAMESPACE
|
||||||
|
|
||||||
@@ -64,7 +68,11 @@ protected:
|
|||||||
virtual bool event(QEvent *);
|
virtual bool event(QEvent *);
|
||||||
|
|
||||||
private:
|
private:
|
||||||
|
#if defined(Q_OS_IOS)
|
||||||
|
void *m_displayLink;
|
||||||
|
#else
|
||||||
CVDisplayLinkRef m_displayLink;
|
CVDisplayLinkRef m_displayLink;
|
||||||
|
#endif
|
||||||
QMutex m_displayLinkMutex;
|
QMutex m_displayLinkMutex;
|
||||||
bool m_pendingDisplayLinkEvent;
|
bool m_pendingDisplayLinkEvent;
|
||||||
bool m_isActive;
|
bool m_isActive;
|
||||||
|
|||||||
@@ -46,8 +46,70 @@
|
|||||||
#include <QtCore/qdebug.h>
|
#include <QtCore/qdebug.h>
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
#if defined(Q_OS_IOS)
|
||||||
|
#import <QuartzCore/CADisplayLink.h>
|
||||||
|
#import <Foundation/NSRunLoop.h>
|
||||||
|
#define _m_displayLink static_cast<DisplayLinkObserver*>(m_displayLink)
|
||||||
|
#else
|
||||||
|
#endif
|
||||||
|
|
||||||
QT_USE_NAMESPACE
|
QT_USE_NAMESPACE
|
||||||
|
|
||||||
|
#if defined(Q_OS_IOS)
|
||||||
|
@interface DisplayLinkObserver : NSObject
|
||||||
|
{
|
||||||
|
AVFDisplayLink *m_avfDisplayLink;
|
||||||
|
CADisplayLink *m_displayLink;
|
||||||
|
}
|
||||||
|
|
||||||
|
- (void)start;
|
||||||
|
- (void)stop;
|
||||||
|
- (void)displayLinkNotification:(CADisplayLink *)sender;
|
||||||
|
|
||||||
|
@end
|
||||||
|
|
||||||
|
@implementation DisplayLinkObserver
|
||||||
|
|
||||||
|
- (id)initWithAVFDisplayLink:(AVFDisplayLink *)link
|
||||||
|
{
|
||||||
|
self = [super init];
|
||||||
|
|
||||||
|
if (self) {
|
||||||
|
m_avfDisplayLink = link;
|
||||||
|
m_displayLink = [[CADisplayLink displayLinkWithTarget:self selector:@selector(displayLinkNotification:)] retain];
|
||||||
|
}
|
||||||
|
|
||||||
|
return self;
|
||||||
|
}
|
||||||
|
|
||||||
|
- (void) dealloc
|
||||||
|
{
|
||||||
|
if (m_displayLink) {
|
||||||
|
[m_displayLink release];
|
||||||
|
m_displayLink = NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
[super dealloc];
|
||||||
|
}
|
||||||
|
|
||||||
|
- (void)start
|
||||||
|
{
|
||||||
|
[m_displayLink addToRunLoop:[NSRunLoop currentRunLoop] forMode:NSDefaultRunLoopMode];
|
||||||
|
}
|
||||||
|
|
||||||
|
- (void)stop
|
||||||
|
{
|
||||||
|
[m_displayLink removeFromRunLoop:[NSRunLoop currentRunLoop] forMode:NSDefaultRunLoopMode];
|
||||||
|
}
|
||||||
|
|
||||||
|
- (void)displayLinkNotification:(CADisplayLink *)sender
|
||||||
|
{
|
||||||
|
Q_UNUSED(sender);
|
||||||
|
m_avfDisplayLink->displayLinkEvent(nullptr);
|
||||||
|
}
|
||||||
|
|
||||||
|
@end
|
||||||
|
#else
|
||||||
static CVReturn CVDisplayLinkCallback(CVDisplayLinkRef displayLink,
|
static CVReturn CVDisplayLinkCallback(CVDisplayLinkRef displayLink,
|
||||||
const CVTimeStamp *inNow,
|
const CVTimeStamp *inNow,
|
||||||
const CVTimeStamp *inOutputTime,
|
const CVTimeStamp *inOutputTime,
|
||||||
@@ -65,12 +127,17 @@ static CVReturn CVDisplayLinkCallback(CVDisplayLinkRef displayLink,
|
|||||||
link->displayLinkEvent(inOutputTime);
|
link->displayLinkEvent(inOutputTime);
|
||||||
return kCVReturnSuccess;
|
return kCVReturnSuccess;
|
||||||
}
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
AVFDisplayLink::AVFDisplayLink(QObject *parent)
|
AVFDisplayLink::AVFDisplayLink(QObject *parent)
|
||||||
: QObject(parent)
|
: QObject(parent)
|
||||||
|
, m_displayLink(0)
|
||||||
, m_pendingDisplayLinkEvent(false)
|
, m_pendingDisplayLinkEvent(false)
|
||||||
, m_isActive(false)
|
, m_isActive(false)
|
||||||
{
|
{
|
||||||
|
#if defined(Q_OS_IOS)
|
||||||
|
m_displayLink = [[DisplayLinkObserver alloc] initWithAVFDisplayLink:this];
|
||||||
|
#else
|
||||||
// create display link for the main display
|
// create display link for the main display
|
||||||
CVDisplayLinkCreateWithCGDisplay(kCGDirectMainDisplay, &m_displayLink);
|
CVDisplayLinkCreateWithCGDisplay(kCGDirectMainDisplay, &m_displayLink);
|
||||||
if (m_displayLink) {
|
if (m_displayLink) {
|
||||||
@@ -80,6 +147,7 @@ AVFDisplayLink::AVFDisplayLink(QObject *parent)
|
|||||||
// set the renderer output callback function
|
// set the renderer output callback function
|
||||||
CVDisplayLinkSetOutputCallback(m_displayLink, &CVDisplayLinkCallback, this);
|
CVDisplayLinkSetOutputCallback(m_displayLink, &CVDisplayLinkCallback, this);
|
||||||
}
|
}
|
||||||
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
AVFDisplayLink::~AVFDisplayLink()
|
AVFDisplayLink::~AVFDisplayLink()
|
||||||
@@ -89,8 +157,12 @@ AVFDisplayLink::~AVFDisplayLink()
|
|||||||
#endif
|
#endif
|
||||||
|
|
||||||
if (m_displayLink) {
|
if (m_displayLink) {
|
||||||
CVDisplayLinkStop(m_displayLink);
|
stop();
|
||||||
|
#if defined(Q_OS_IOS)
|
||||||
|
[_m_displayLink release];
|
||||||
|
#else
|
||||||
CVDisplayLinkRelease(m_displayLink);
|
CVDisplayLinkRelease(m_displayLink);
|
||||||
|
#endif
|
||||||
m_displayLink = NULL;
|
m_displayLink = NULL;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -108,20 +180,27 @@ bool AVFDisplayLink::isActive() const
|
|||||||
void AVFDisplayLink::start()
|
void AVFDisplayLink::start()
|
||||||
{
|
{
|
||||||
if (m_displayLink && !m_isActive) {
|
if (m_displayLink && !m_isActive) {
|
||||||
CVDisplayLinkStart(m_displayLink);
|
#if defined(Q_OS_IOS)
|
||||||
m_isActive = true;
|
[_m_displayLink start];
|
||||||
|
#else
|
||||||
|
CVDisplayLinkStart(m_displayLink);
|
||||||
|
#endif
|
||||||
|
m_isActive = true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void AVFDisplayLink::stop()
|
void AVFDisplayLink::stop()
|
||||||
{
|
{
|
||||||
if (m_displayLink && m_isActive) {
|
if (m_displayLink && m_isActive) {
|
||||||
|
#if defined(Q_OS_IOS)
|
||||||
|
[_m_displayLink stop];
|
||||||
|
#else
|
||||||
CVDisplayLinkStop(m_displayLink);
|
CVDisplayLinkStop(m_displayLink);
|
||||||
|
#endif
|
||||||
m_isActive = false;
|
m_isActive = false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void AVFDisplayLink::displayLinkEvent(const CVTimeStamp *ts)
|
void AVFDisplayLink::displayLinkEvent(const CVTimeStamp *ts)
|
||||||
{
|
{
|
||||||
// This function is called from a
|
// This function is called from a
|
||||||
@@ -131,7 +210,12 @@ void AVFDisplayLink::displayLinkEvent(const CVTimeStamp *ts)
|
|||||||
m_displayLinkMutex.lock();
|
m_displayLinkMutex.lock();
|
||||||
bool pending = m_pendingDisplayLinkEvent;
|
bool pending = m_pendingDisplayLinkEvent;
|
||||||
m_pendingDisplayLinkEvent = true;
|
m_pendingDisplayLinkEvent = true;
|
||||||
|
#if defined(Q_OS_IOS)
|
||||||
|
Q_UNUSED(ts);
|
||||||
|
memset(&m_frameTimeStamp, 0, sizeof(CVTimeStamp));
|
||||||
|
#else
|
||||||
m_frameTimeStamp = *ts;
|
m_frameTimeStamp = *ts;
|
||||||
|
#endif
|
||||||
m_displayLinkMutex.unlock();
|
m_displayLinkMutex.unlock();
|
||||||
|
|
||||||
if (!pending)
|
if (!pending)
|
||||||
|
|||||||
@@ -57,6 +57,7 @@ private:
|
|||||||
AVFMediaPlayerControl *m_control;
|
AVFMediaPlayerControl *m_control;
|
||||||
QMediaControl *m_videoOutput;
|
QMediaControl *m_videoOutput;
|
||||||
AVFMediaPlayerMetaDataControl *m_playerMetaDataControl;
|
AVFMediaPlayerMetaDataControl *m_playerMetaDataControl;
|
||||||
|
bool m_enableRenderControl;
|
||||||
};
|
};
|
||||||
|
|
||||||
QT_END_NAMESPACE
|
QT_END_NAMESPACE
|
||||||
|
|||||||
@@ -43,26 +43,39 @@
|
|||||||
#include "avfmediaplayersession.h"
|
#include "avfmediaplayersession.h"
|
||||||
#include "avfmediaplayercontrol.h"
|
#include "avfmediaplayercontrol.h"
|
||||||
#include "avfmediaplayermetadatacontrol.h"
|
#include "avfmediaplayermetadatacontrol.h"
|
||||||
#if defined(Q_OS_OSX)
|
#include "avfvideooutput.h"
|
||||||
# include "avfvideooutput.h"
|
#include "avfvideorenderercontrol.h"
|
||||||
# include "avfvideorenderercontrol.h"
|
|
||||||
#endif
|
|
||||||
#ifndef QT_NO_WIDGETS
|
#ifndef QT_NO_WIDGETS
|
||||||
# include "avfvideowidgetcontrol.h"
|
# include "avfvideowidgetcontrol.h"
|
||||||
#endif
|
#endif
|
||||||
#include "avfvideowindowcontrol.h"
|
#include "avfvideowindowcontrol.h"
|
||||||
|
|
||||||
|
#if QT_MAC_PLATFORM_SDK_EQUAL_OR_ABOVE(__MAC_10_8, __IPHONE_6_0)
|
||||||
|
#import <AVFoundation/AVFoundation.h>
|
||||||
|
#endif
|
||||||
|
|
||||||
QT_USE_NAMESPACE
|
QT_USE_NAMESPACE
|
||||||
|
|
||||||
AVFMediaPlayerService::AVFMediaPlayerService(QObject *parent)
|
AVFMediaPlayerService::AVFMediaPlayerService(QObject *parent)
|
||||||
: QMediaService(parent)
|
: QMediaService(parent)
|
||||||
, m_videoOutput(0)
|
, m_videoOutput(0)
|
||||||
|
, m_enableRenderControl(true)
|
||||||
{
|
{
|
||||||
m_session = new AVFMediaPlayerSession(this);
|
m_session = new AVFMediaPlayerSession(this);
|
||||||
m_control = new AVFMediaPlayerControl(this);
|
m_control = new AVFMediaPlayerControl(this);
|
||||||
m_control->setSession(m_session);
|
m_control->setSession(m_session);
|
||||||
m_playerMetaDataControl = new AVFMediaPlayerMetaDataControl(m_session, this);
|
m_playerMetaDataControl = new AVFMediaPlayerMetaDataControl(m_session, this);
|
||||||
|
|
||||||
|
#if QT_MAC_PLATFORM_SDK_EQUAL_OR_ABOVE(__MAC_10_8, __IPHONE_6_0)
|
||||||
|
// AVPlayerItemVideoOutput is available in SDK
|
||||||
|
#if QT_MAC_DEPLOYMENT_TARGET_BELOW(__MAC_10_8, __IPHONE_6_0)
|
||||||
|
// might not be available at runtime
|
||||||
|
#if defined(Q_OS_IOS)
|
||||||
|
m_enableRenderControl = [AVPlayerItemVideoOutput class] != 0;
|
||||||
|
#endif
|
||||||
|
#endif
|
||||||
|
#endif
|
||||||
|
|
||||||
connect(m_control, SIGNAL(mediaChanged(QMediaContent)), m_playerMetaDataControl, SLOT(updateTags()));
|
connect(m_control, SIGNAL(mediaChanged(QMediaContent)), m_playerMetaDataControl, SLOT(updateTags()));
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -85,15 +98,16 @@ QMediaControl *AVFMediaPlayerService::requestControl(const char *name)
|
|||||||
|
|
||||||
if (qstrcmp(name, QMetaDataReaderControl_iid) == 0)
|
if (qstrcmp(name, QMetaDataReaderControl_iid) == 0)
|
||||||
return m_playerMetaDataControl;
|
return m_playerMetaDataControl;
|
||||||
#if defined(Q_OS_OSX)
|
|
||||||
if (qstrcmp(name, QVideoRendererControl_iid) == 0) {
|
|
||||||
|
if (m_enableRenderControl && (qstrcmp(name, QVideoRendererControl_iid) == 0)) {
|
||||||
if (!m_videoOutput)
|
if (!m_videoOutput)
|
||||||
m_videoOutput = new AVFVideoRendererControl(this);
|
m_videoOutput = new AVFVideoRendererControl(this);
|
||||||
|
|
||||||
m_session->setVideoOutput(qobject_cast<AVFVideoOutput*>(m_videoOutput));
|
m_session->setVideoOutput(qobject_cast<AVFVideoOutput*>(m_videoOutput));
|
||||||
return m_videoOutput;
|
return m_videoOutput;
|
||||||
}
|
}
|
||||||
#endif
|
|
||||||
#ifndef QT_NO_WIDGETS
|
#ifndef QT_NO_WIDGETS
|
||||||
if (qstrcmp(name, QVideoWidgetControl_iid) == 0) {
|
if (qstrcmp(name, QVideoWidgetControl_iid) == 0) {
|
||||||
if (!m_videoOutput)
|
if (!m_videoOutput)
|
||||||
@@ -119,11 +133,11 @@ void AVFMediaPlayerService::releaseControl(QMediaControl *control)
|
|||||||
qDebug() << Q_FUNC_INFO << control;
|
qDebug() << Q_FUNC_INFO << control;
|
||||||
#endif
|
#endif
|
||||||
if (m_videoOutput == control) {
|
if (m_videoOutput == control) {
|
||||||
#if defined(Q_OS_OSX)
|
|
||||||
AVFVideoRendererControl *renderControl = qobject_cast<AVFVideoRendererControl*>(m_videoOutput);
|
AVFVideoRendererControl *renderControl = qobject_cast<AVFVideoRendererControl*>(m_videoOutput);
|
||||||
|
|
||||||
if (renderControl)
|
if (renderControl)
|
||||||
renderControl->setSurface(0);
|
renderControl->setSurface(0);
|
||||||
#endif
|
|
||||||
m_videoOutput = 0;
|
m_videoOutput = 0;
|
||||||
m_session->setVideoOutput(0);
|
m_session->setVideoOutput(0);
|
||||||
|
|
||||||
|
|||||||
107
src/plugins/avfoundation/mediaplayer/avfvideoframerenderer_ios.h
Normal file
107
src/plugins/avfoundation/mediaplayer/avfvideoframerenderer_ios.h
Normal file
@@ -0,0 +1,107 @@
|
|||||||
|
/****************************************************************************
|
||||||
|
**
|
||||||
|
** Copyright (C) 2014 Digia Plc and/or its subsidiary(-ies).
|
||||||
|
** Contact: http://www.qt-project.org/legal
|
||||||
|
**
|
||||||
|
** This file is part of the Qt Toolkit.
|
||||||
|
**
|
||||||
|
** $QT_BEGIN_LICENSE:LGPL21$
|
||||||
|
** Commercial License Usage
|
||||||
|
** Licensees holding valid commercial Qt licenses may use this file in
|
||||||
|
** accordance with the commercial license agreement provided with the
|
||||||
|
** Software or, alternatively, in accordance with the terms contained in
|
||||||
|
** a written agreement between you and Digia. For licensing terms and
|
||||||
|
** conditions see http://qt.digia.com/licensing. For further information
|
||||||
|
** use the contact form at http://qt.digia.com/contact-us.
|
||||||
|
**
|
||||||
|
** GNU Lesser General Public License Usage
|
||||||
|
** Alternatively, this file may be used under the terms of the GNU Lesser
|
||||||
|
** General Public License version 2.1 or version 3 as published by the Free
|
||||||
|
** Software Foundation and appearing in the file LICENSE.LGPLv21 and
|
||||||
|
** LICENSE.LGPLv3 included in the packaging of this file. Please review the
|
||||||
|
** following information to ensure the GNU Lesser General Public License
|
||||||
|
** requirements will be met: https://www.gnu.org/licenses/lgpl.html and
|
||||||
|
** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
|
||||||
|
**
|
||||||
|
** In addition, as a special exception, Digia gives you certain additional
|
||||||
|
** rights. These rights are described in the Digia Qt LGPL Exception
|
||||||
|
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
|
||||||
|
**
|
||||||
|
** $QT_END_LICENSE$
|
||||||
|
**
|
||||||
|
****************************************************************************/
|
||||||
|
|
||||||
|
#ifndef AVFVIDEOFRAMERENDERER_H
|
||||||
|
#define AVFVIDEOFRAMERENDERER_H
|
||||||
|
|
||||||
|
#include <QtCore/QObject>
|
||||||
|
#include <QtGui/QImage>
|
||||||
|
#include <QtGui/QOpenGLContext>
|
||||||
|
#include <QtCore/QSize>
|
||||||
|
|
||||||
|
@class AVPlayerLayer;
|
||||||
|
@class AVPlayerItemVideoOutput;
|
||||||
|
|
||||||
|
QT_BEGIN_NAMESPACE
|
||||||
|
|
||||||
|
class QOpenGLContext;
|
||||||
|
class QOpenGLFramebufferObject;
|
||||||
|
class QOpenGLShaderProgram;
|
||||||
|
class QOffscreenSurface;
|
||||||
|
class QAbstractVideoSurface;
|
||||||
|
|
||||||
|
typedef struct __CVBuffer *CVBufferRef;
|
||||||
|
typedef CVBufferRef CVImageBufferRef;
|
||||||
|
typedef CVImageBufferRef CVPixelBufferRef;
|
||||||
|
#if defined(Q_OS_IOS)
|
||||||
|
typedef struct __CVOpenGLESTextureCache *CVOpenGLESTextureCacheRef;
|
||||||
|
typedef CVImageBufferRef CVOpenGLESTextureRef;
|
||||||
|
// helpers to avoid boring if def
|
||||||
|
typedef CVOpenGLESTextureCacheRef CVOGLTextureCacheRef;
|
||||||
|
typedef CVOpenGLESTextureRef CVOGLTextureRef;
|
||||||
|
#define CVOGLTextureGetTarget CVOpenGLESTextureGetTarget
|
||||||
|
#define CVOGLTextureGetName CVOpenGLESTextureGetName
|
||||||
|
#define CVOGLTextureCacheCreate CVOpenGLESTextureCacheCreate
|
||||||
|
#define CVOGLTextureCacheCreateTextureFromImage CVOpenGLESTextureCacheCreateTextureFromImage
|
||||||
|
#define CVOGLTextureCacheFlush CVOpenGLESTextureCacheFlush
|
||||||
|
#else
|
||||||
|
typedef struct __CVOpenGLTextureCache *CVOpenGLTextureCacheRef;
|
||||||
|
typedef CVImageBufferRef CVOpenGLTextureRef;
|
||||||
|
// helpers to avoid boring if def
|
||||||
|
typedef CVOpenGLTextureCacheRef CVOGLTextureCacheRef;
|
||||||
|
typedef CVOpenGLTextureRef CVOGLTextureRef;
|
||||||
|
#define CVOGLTextureGetTarget CVOpenGLTextureGetTarget
|
||||||
|
#define CVOGLTextureGetName CVOpenGLTextureGetName
|
||||||
|
#define CVOGLTextureCacheCreate CVOpenGLTextureCacheCreate
|
||||||
|
#define CVOGLTextureCacheCreateTextureFromImage CVOpenGLTextureCacheCreateTextureFromImage
|
||||||
|
#define CVOGLTextureCacheFlush CVOpenGLTextureCacheFlush
|
||||||
|
#endif
|
||||||
|
|
||||||
|
class AVFVideoFrameRenderer : public QObject
|
||||||
|
{
|
||||||
|
public:
|
||||||
|
AVFVideoFrameRenderer(QAbstractVideoSurface *surface, QObject *parent = 0);
|
||||||
|
|
||||||
|
virtual ~AVFVideoFrameRenderer();
|
||||||
|
|
||||||
|
void setPlayerLayer(AVPlayerLayer *layer);
|
||||||
|
|
||||||
|
CVOGLTextureRef renderLayerToTexture(AVPlayerLayer *layer);
|
||||||
|
QImage renderLayerToImage(AVPlayerLayer *layer);
|
||||||
|
|
||||||
|
private:
|
||||||
|
void initRenderer();
|
||||||
|
CVPixelBufferRef copyPixelBufferFromLayer(AVPlayerLayer *layer, size_t& width, size_t& height);
|
||||||
|
CVOGLTextureRef createCacheTextureFromLayer(AVPlayerLayer *layer, size_t& width, size_t& height);
|
||||||
|
|
||||||
|
QOpenGLContext *m_glContext;
|
||||||
|
QOffscreenSurface *m_offscreenSurface;
|
||||||
|
QAbstractVideoSurface *m_surface;
|
||||||
|
CVOGLTextureCacheRef m_textureCache;
|
||||||
|
AVPlayerItemVideoOutput* m_videoOutput;
|
||||||
|
bool m_isContextShared;
|
||||||
|
};
|
||||||
|
|
||||||
|
QT_END_NAMESPACE
|
||||||
|
|
||||||
|
#endif // AVFVIDEOFRAMERENDERER_H
|
||||||
@@ -0,0 +1,261 @@
|
|||||||
|
/****************************************************************************
|
||||||
|
**
|
||||||
|
** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
|
||||||
|
** Contact: http://www.qt-project.org/legal
|
||||||
|
**
|
||||||
|
** This file is part of the Qt Toolkit.
|
||||||
|
**
|
||||||
|
** $QT_BEGIN_LICENSE:LGPL$
|
||||||
|
** Commercial License Usage
|
||||||
|
** Licensees holding valid commercial Qt licenses may use this file in
|
||||||
|
** accordance with the commercial license agreement provided with the
|
||||||
|
** Software or, alternatively, in accordance with the terms contained in
|
||||||
|
** a written agreement between you and Digia. For licensing terms and
|
||||||
|
** conditions see http://qt.digia.com/licensing. For further information
|
||||||
|
** use the contact form at http://qt.digia.com/contact-us.
|
||||||
|
**
|
||||||
|
** GNU Lesser General Public License Usage
|
||||||
|
** Alternatively, this file may be used under the terms of the GNU Lesser
|
||||||
|
** General Public License version 2.1 as published by the Free Software
|
||||||
|
** Foundation and appearing in the file LICENSE.LGPL included in the
|
||||||
|
** packaging of this file. Please review the following information to
|
||||||
|
** ensure the GNU Lesser General Public License version 2.1 requirements
|
||||||
|
** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
|
||||||
|
**
|
||||||
|
** In addition, as a special exception, Digia gives you certain additional
|
||||||
|
** rights. These rights are described in the Digia Qt LGPL Exception
|
||||||
|
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
|
||||||
|
**
|
||||||
|
** GNU General Public License Usage
|
||||||
|
** Alternatively, this file may be used under the terms of the GNU
|
||||||
|
** General Public License version 3.0 as published by the Free Software
|
||||||
|
** Foundation and appearing in the file LICENSE.GPL included in the
|
||||||
|
** packaging of this file. Please review the following information to
|
||||||
|
** ensure the GNU General Public License version 3.0 requirements will be
|
||||||
|
** met: http://www.gnu.org/copyleft/gpl.html.
|
||||||
|
**
|
||||||
|
**
|
||||||
|
** $QT_END_LICENSE$
|
||||||
|
**
|
||||||
|
****************************************************************************/
|
||||||
|
|
||||||
|
#include "avfvideoframerenderer_ios.h"
|
||||||
|
|
||||||
|
#include <QtMultimedia/qabstractvideosurface.h>
|
||||||
|
#include <QtGui/QOpenGLFramebufferObject>
|
||||||
|
#include <QtGui/QOpenGLShaderProgram>
|
||||||
|
#include <QtGui/QOffscreenSurface>
|
||||||
|
|
||||||
|
#ifdef QT_DEBUG_AVF
|
||||||
|
#include <QtCore/qdebug.h>
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#import <CoreVideo/CVBase.h>
|
||||||
|
#import <AVFoundation/AVFoundation.h>
|
||||||
|
QT_USE_NAMESPACE
|
||||||
|
|
||||||
|
AVFVideoFrameRenderer::AVFVideoFrameRenderer(QAbstractVideoSurface *surface, QObject *parent)
|
||||||
|
: QObject(parent)
|
||||||
|
, m_glContext(0)
|
||||||
|
, m_offscreenSurface(0)
|
||||||
|
, m_surface(surface)
|
||||||
|
, m_textureCache(0)
|
||||||
|
, m_videoOutput(0)
|
||||||
|
, m_isContextShared(true)
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
||||||
|
AVFVideoFrameRenderer::~AVFVideoFrameRenderer()
|
||||||
|
{
|
||||||
|
#ifdef QT_DEBUG_AVF
|
||||||
|
qDebug() << Q_FUNC_INFO;
|
||||||
|
#endif
|
||||||
|
|
||||||
|
[m_videoOutput release]; // sending to nil is fine
|
||||||
|
if (m_textureCache)
|
||||||
|
CFRelease(m_textureCache);
|
||||||
|
delete m_offscreenSurface;
|
||||||
|
delete m_glContext;
|
||||||
|
}
|
||||||
|
|
||||||
|
void AVFVideoFrameRenderer::setPlayerLayer(AVPlayerLayer *layer)
|
||||||
|
{
|
||||||
|
Q_UNUSED(layer)
|
||||||
|
if (m_videoOutput) {
|
||||||
|
[m_videoOutput release];
|
||||||
|
m_videoOutput = 0;
|
||||||
|
// will be re-created in first call to copyPixelBufferFromLayer
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
CVOGLTextureRef AVFVideoFrameRenderer::renderLayerToTexture(AVPlayerLayer *layer)
|
||||||
|
{
|
||||||
|
initRenderer();
|
||||||
|
|
||||||
|
// If the glContext isn't shared, it doesn't make sense to return a texture for us
|
||||||
|
if (!m_isContextShared)
|
||||||
|
return 0;
|
||||||
|
|
||||||
|
size_t dummyWidth = 0, dummyHeight = 0;
|
||||||
|
return createCacheTextureFromLayer(layer, dummyWidth, dummyHeight);
|
||||||
|
}
|
||||||
|
|
||||||
|
static NSString* const AVF_PIXEL_FORMAT_KEY = (NSString*)kCVPixelBufferPixelFormatTypeKey;
|
||||||
|
static NSNumber* const AVF_PIXEL_FORMAT_VALUE = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA];
|
||||||
|
static NSDictionary* const AVF_OUTPUT_SETTINGS = [NSDictionary dictionaryWithObject:AVF_PIXEL_FORMAT_VALUE forKey:AVF_PIXEL_FORMAT_KEY];
|
||||||
|
|
||||||
|
|
||||||
|
CVPixelBufferRef AVFVideoFrameRenderer::copyPixelBufferFromLayer(AVPlayerLayer *layer,
|
||||||
|
size_t& width, size_t& height)
|
||||||
|
{
|
||||||
|
//Is layer valid
|
||||||
|
if (!layer) {
|
||||||
|
#ifdef QT_DEBUG_AVF
|
||||||
|
qWarning("copyPixelBufferFromLayer: invalid layer");
|
||||||
|
#endif
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!m_videoOutput) {
|
||||||
|
m_videoOutput = [[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:AVF_OUTPUT_SETTINGS];
|
||||||
|
[m_videoOutput setDelegate:nil queue:nil];
|
||||||
|
AVPlayerItem * item = [[layer player] currentItem];
|
||||||
|
[item addOutput:m_videoOutput];
|
||||||
|
}
|
||||||
|
|
||||||
|
CFTimeInterval currentCAFrameTime = CACurrentMediaTime();
|
||||||
|
CMTime currentCMFrameTime = [m_videoOutput itemTimeForHostTime:currentCAFrameTime];
|
||||||
|
// happens when buffering / loading
|
||||||
|
if (CMTimeCompare(currentCMFrameTime, kCMTimeZero) < 0) {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
CVPixelBufferRef pixelBuffer = [m_videoOutput copyPixelBufferForItemTime:currentCMFrameTime
|
||||||
|
itemTimeForDisplay:nil];
|
||||||
|
if (!pixelBuffer) {
|
||||||
|
#ifdef QT_DEBUG_AVF
|
||||||
|
qWarning("copyPixelBufferForItemTime returned nil");
|
||||||
|
CMTimeShow(currentCMFrameTime);
|
||||||
|
#endif
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
width = CVPixelBufferGetWidth(pixelBuffer);
|
||||||
|
height = CVPixelBufferGetHeight(pixelBuffer);
|
||||||
|
return pixelBuffer;
|
||||||
|
}
|
||||||
|
|
||||||
|
CVOGLTextureRef AVFVideoFrameRenderer::createCacheTextureFromLayer(AVPlayerLayer *layer,
|
||||||
|
size_t& width, size_t& height)
|
||||||
|
{
|
||||||
|
CVPixelBufferRef pixelBuffer = copyPixelBufferFromLayer(layer, width, height);
|
||||||
|
|
||||||
|
if (!pixelBuffer)
|
||||||
|
return 0;
|
||||||
|
|
||||||
|
CVOGLTextureCacheFlush(m_textureCache, 0);
|
||||||
|
|
||||||
|
CVOGLTextureRef texture = 0;
|
||||||
|
CVReturn err = CVOGLTextureCacheCreateTextureFromImage(kCFAllocatorDefault, m_textureCache, pixelBuffer, NULL,
|
||||||
|
GL_TEXTURE_2D, GL_RGBA,
|
||||||
|
(GLsizei) width, (GLsizei) height,
|
||||||
|
GL_BGRA, GL_UNSIGNED_BYTE, 0,
|
||||||
|
&texture);
|
||||||
|
|
||||||
|
if (!texture || err) {
|
||||||
|
#ifdef QT_DEBUG_AVF
|
||||||
|
qWarning("CVOGLTextureCacheCreateTextureFromImage failed (error: %d)", err);
|
||||||
|
#endif
|
||||||
|
}
|
||||||
|
|
||||||
|
CVPixelBufferRelease(pixelBuffer);
|
||||||
|
|
||||||
|
return texture;
|
||||||
|
}
|
||||||
|
|
||||||
|
QImage AVFVideoFrameRenderer::renderLayerToImage(AVPlayerLayer *layer)
|
||||||
|
{
|
||||||
|
size_t width = 0;
|
||||||
|
size_t height = 0;
|
||||||
|
CVPixelBufferRef pixelBuffer = copyPixelBufferFromLayer(layer, width, height);
|
||||||
|
|
||||||
|
if (!pixelBuffer)
|
||||||
|
return QImage();
|
||||||
|
|
||||||
|
OSType pixelFormat = CVPixelBufferGetPixelFormatType(pixelBuffer);
|
||||||
|
if (pixelFormat != kCVPixelFormatType_32BGRA) {
|
||||||
|
#ifdef QT_DEBUG_AVF
|
||||||
|
qWarning("CVPixelBuffer format is not BGRA32 (got: %d)", static_cast<quint32>(pixelFormat));
|
||||||
|
#endif
|
||||||
|
return QImage();
|
||||||
|
}
|
||||||
|
|
||||||
|
CVPixelBufferLockBaseAddress(pixelBuffer, 0);
|
||||||
|
char *data = (char *)CVPixelBufferGetBaseAddress(pixelBuffer);
|
||||||
|
size_t stride = CVPixelBufferGetBytesPerRow(pixelBuffer);
|
||||||
|
|
||||||
|
// format here is not relevant, only using for storage
|
||||||
|
QImage img = QImage(width, height, QImage::Format_ARGB32);
|
||||||
|
for (size_t j = 0; j < height; j++) {
|
||||||
|
memcpy(img.scanLine(j), data, width * 4);
|
||||||
|
data += stride;
|
||||||
|
}
|
||||||
|
|
||||||
|
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
|
||||||
|
CVPixelBufferRelease(pixelBuffer);
|
||||||
|
return img;
|
||||||
|
}
|
||||||
|
|
||||||
|
void AVFVideoFrameRenderer::initRenderer()
|
||||||
|
{
|
||||||
|
// even for using a texture directly, we need to be able to make a context current,
|
||||||
|
// so we need an offscreen, and we shouldn't assume we can make the surface context
|
||||||
|
// current on that offscreen, so use our own (sharing with it). Slightly
|
||||||
|
// excessive but no performance penalty and makes the QImage path easier to maintain
|
||||||
|
|
||||||
|
//Make sure we have an OpenGL context to make current
|
||||||
|
if (!m_glContext) {
|
||||||
|
//Create OpenGL context and set share context from surface
|
||||||
|
QOpenGLContext *shareContext = 0;
|
||||||
|
if (m_surface) {
|
||||||
|
shareContext = qobject_cast<QOpenGLContext*>(m_surface->property("GLContext").value<QObject*>());
|
||||||
|
}
|
||||||
|
|
||||||
|
m_glContext = new QOpenGLContext();
|
||||||
|
if (shareContext) {
|
||||||
|
m_glContext->setShareContext(shareContext);
|
||||||
|
m_isContextShared = true;
|
||||||
|
} else {
|
||||||
|
#ifdef QT_DEBUG_AVF
|
||||||
|
qWarning("failed to get Render Thread context");
|
||||||
|
#endif
|
||||||
|
m_isContextShared = false;
|
||||||
|
}
|
||||||
|
if (!m_glContext->create()) {
|
||||||
|
#ifdef QT_DEBUG_AVF
|
||||||
|
qWarning("failed to create QOpenGLContext");
|
||||||
|
#endif
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!m_offscreenSurface) {
|
||||||
|
m_offscreenSurface = new QOffscreenSurface();
|
||||||
|
m_offscreenSurface->setFormat(m_glContext->format());
|
||||||
|
m_offscreenSurface->create();
|
||||||
|
}
|
||||||
|
|
||||||
|
//Need current context
|
||||||
|
m_glContext->makeCurrent(m_offscreenSurface);
|
||||||
|
|
||||||
|
// Create a new open gl texture cache
|
||||||
|
CVReturn err = CVOGLTextureCacheCreate(kCFAllocatorDefault, NULL,
|
||||||
|
[EAGLContext currentContext],
|
||||||
|
NULL, &m_textureCache);
|
||||||
|
|
||||||
|
if (err) {
|
||||||
|
#ifdef QT_DEBUG_AVF
|
||||||
|
qWarning("Error at CVOGLTextureCacheCreate %d", err);
|
||||||
|
#endif
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -41,26 +41,40 @@
|
|||||||
|
|
||||||
#include "avfvideorenderercontrol.h"
|
#include "avfvideorenderercontrol.h"
|
||||||
#include "avfdisplaylink.h"
|
#include "avfdisplaylink.h"
|
||||||
|
|
||||||
|
#if defined(Q_OS_IOS)
|
||||||
|
#include "avfvideoframerenderer_ios.h"
|
||||||
|
#else
|
||||||
#include "avfvideoframerenderer.h"
|
#include "avfvideoframerenderer.h"
|
||||||
|
#endif
|
||||||
|
|
||||||
#include <QtMultimedia/qabstractvideobuffer.h>
|
#include <QtMultimedia/qabstractvideobuffer.h>
|
||||||
#include <QtMultimedia/qabstractvideosurface.h>
|
#include <QtMultimedia/qabstractvideosurface.h>
|
||||||
#include <QtMultimedia/qvideosurfaceformat.h>
|
#include <QtMultimedia/qvideosurfaceformat.h>
|
||||||
|
|
||||||
|
#include <private/qimagevideobuffer_p.h>
|
||||||
|
|
||||||
#include <QtCore/qdebug.h>
|
#include <QtCore/qdebug.h>
|
||||||
|
|
||||||
#import <AVFoundation/AVFoundation.h>
|
#import <AVFoundation/AVFoundation.h>
|
||||||
|
|
||||||
QT_USE_NAMESPACE
|
QT_USE_NAMESPACE
|
||||||
|
|
||||||
class TextureVideoBuffer : public QAbstractVideoBuffer
|
#if defined(Q_OS_IOS)
|
||||||
|
class TextureCacheVideoBuffer : public QAbstractVideoBuffer
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
TextureVideoBuffer(GLuint textureId)
|
TextureCacheVideoBuffer(CVOGLTextureRef texture)
|
||||||
: QAbstractVideoBuffer(GLTextureHandle)
|
: QAbstractVideoBuffer(GLTextureHandle)
|
||||||
, m_textureId(textureId)
|
, m_texture(texture)
|
||||||
{}
|
{}
|
||||||
|
|
||||||
virtual ~TextureVideoBuffer() {}
|
virtual ~TextureCacheVideoBuffer()
|
||||||
|
{
|
||||||
|
// absolutely critical that we drop this
|
||||||
|
// reference of textures will stay in the cache
|
||||||
|
CFRelease(m_texture);
|
||||||
|
}
|
||||||
|
|
||||||
MapMode mapMode() const { return NotMapped; }
|
MapMode mapMode() const { return NotMapped; }
|
||||||
uchar *map(MapMode, int*, int*) { return 0; }
|
uchar *map(MapMode, int*, int*) { return 0; }
|
||||||
@@ -68,41 +82,39 @@ public:
|
|||||||
|
|
||||||
QVariant handle() const
|
QVariant handle() const
|
||||||
{
|
{
|
||||||
return QVariant::fromValue<unsigned int>(m_textureId);
|
GLuint texId = CVOGLTextureGetName(m_texture);
|
||||||
|
return QVariant::fromValue<unsigned int>(texId);
|
||||||
}
|
}
|
||||||
|
|
||||||
private:
|
private:
|
||||||
GLuint m_textureId;
|
CVOGLTextureRef m_texture;
|
||||||
};
|
};
|
||||||
|
#else
|
||||||
class QImageVideoBuffer : public QAbstractVideoBuffer
|
class TextureVideoBuffer : public QAbstractVideoBuffer
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
QImageVideoBuffer(const QImage &image)
|
TextureVideoBuffer(GLuint tex)
|
||||||
: QAbstractVideoBuffer(NoHandle)
|
: QAbstractVideoBuffer(GLTextureHandle)
|
||||||
, m_image(image)
|
, m_texture(tex)
|
||||||
, m_mode(NotMapped)
|
{}
|
||||||
|
|
||||||
|
virtual ~TextureVideoBuffer()
|
||||||
{
|
{
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
MapMode mapMode() const { return m_mode; }
|
MapMode mapMode() const { return NotMapped; }
|
||||||
uchar *map(MapMode mode, int *numBytes, int *bytesPerLine)
|
uchar *map(MapMode, int*, int*) { return 0; }
|
||||||
|
void unmap() {}
|
||||||
|
|
||||||
|
QVariant handle() const
|
||||||
{
|
{
|
||||||
if (mode != NotMapped && m_mode == NotMapped) {
|
return QVariant::fromValue<unsigned int>(m_texture);
|
||||||
m_mode = mode;
|
|
||||||
return m_image.bits();
|
|
||||||
} else
|
|
||||||
return 0;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void unmap() {
|
|
||||||
m_mode = NotMapped;
|
|
||||||
}
|
|
||||||
private:
|
private:
|
||||||
QImage m_image;
|
GLuint m_texture;
|
||||||
MapMode m_mode;
|
|
||||||
};
|
};
|
||||||
|
#endif
|
||||||
|
|
||||||
AVFVideoRendererControl::AVFVideoRendererControl(QObject *parent)
|
AVFVideoRendererControl::AVFVideoRendererControl(QObject *parent)
|
||||||
: QVideoRendererControl(parent)
|
: QVideoRendererControl(parent)
|
||||||
@@ -122,8 +134,7 @@ AVFVideoRendererControl::~AVFVideoRendererControl()
|
|||||||
qDebug() << Q_FUNC_INFO;
|
qDebug() << Q_FUNC_INFO;
|
||||||
#endif
|
#endif
|
||||||
m_displayLink->stop();
|
m_displayLink->stop();
|
||||||
if (m_playerLayer)
|
[(AVPlayerLayer*)m_playerLayer release];
|
||||||
[(AVPlayerLayer*)m_playerLayer release];
|
|
||||||
}
|
}
|
||||||
|
|
||||||
QAbstractVideoSurface *AVFVideoRendererControl::surface() const
|
QAbstractVideoSurface *AVFVideoRendererControl::surface() const
|
||||||
@@ -150,8 +161,8 @@ void AVFVideoRendererControl::setSurface(QAbstractVideoSurface *surface)
|
|||||||
m_surface = surface;
|
m_surface = surface;
|
||||||
|
|
||||||
//If the surface changed, then the current frame renderer is no longer valid
|
//If the surface changed, then the current frame renderer is no longer valid
|
||||||
if (m_frameRenderer)
|
delete m_frameRenderer;
|
||||||
delete m_frameRenderer;
|
m_frameRenderer = 0;
|
||||||
|
|
||||||
//If there is now no surface to render too
|
//If there is now no surface to render too
|
||||||
if (m_surface == 0) {
|
if (m_surface == 0) {
|
||||||
@@ -161,6 +172,11 @@ void AVFVideoRendererControl::setSurface(QAbstractVideoSurface *surface)
|
|||||||
|
|
||||||
//Surface changed, so we need a new frame renderer
|
//Surface changed, so we need a new frame renderer
|
||||||
m_frameRenderer = new AVFVideoFrameRenderer(m_surface, this);
|
m_frameRenderer = new AVFVideoFrameRenderer(m_surface, this);
|
||||||
|
#if defined(Q_OS_IOS)
|
||||||
|
if (m_playerLayer) {
|
||||||
|
m_frameRenderer->setPlayerLayer(static_cast<AVPlayerLayer*>(m_playerLayer));
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
//Check for needed formats to render as OpenGL Texture
|
//Check for needed formats to render as OpenGL Texture
|
||||||
m_enableOpenGL = m_surface->supportedPixelFormats(QAbstractVideoBuffer::GLTextureHandle).contains(QVideoFrame::Format_BGR32);
|
m_enableOpenGL = m_surface->supportedPixelFormats(QAbstractVideoBuffer::GLTextureHandle).contains(QVideoFrame::Format_BGR32);
|
||||||
@@ -187,6 +203,12 @@ void AVFVideoRendererControl::setLayer(void *playerLayer)
|
|||||||
if (m_surface && m_surface->isActive())
|
if (m_surface && m_surface->isActive())
|
||||||
m_surface->stop();
|
m_surface->stop();
|
||||||
|
|
||||||
|
#if defined(Q_OS_IOS)
|
||||||
|
if (m_frameRenderer) {
|
||||||
|
m_frameRenderer->setPlayerLayer(static_cast<AVPlayerLayer*>(playerLayer));
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
//If there is no layer to render, stop scheduling updates
|
//If there is no layer to render, stop scheduling updates
|
||||||
if (m_playerLayer == 0) {
|
if (m_playerLayer == 0) {
|
||||||
m_displayLink->stop();
|
m_displayLink->stop();
|
||||||
@@ -216,16 +238,22 @@ void AVFVideoRendererControl::updateVideoFrame(const CVTimeStamp &ts)
|
|||||||
return;
|
return;
|
||||||
|
|
||||||
if (m_enableOpenGL) {
|
if (m_enableOpenGL) {
|
||||||
|
#if defined(Q_OS_IOS)
|
||||||
GLuint textureId = m_frameRenderer->renderLayerToTexture(playerLayer);
|
CVOGLTextureRef tex = m_frameRenderer->renderLayerToTexture(playerLayer);
|
||||||
|
|
||||||
//Make sure we got a valid texture
|
//Make sure we got a valid texture
|
||||||
if (textureId == 0) {
|
if (tex == 0)
|
||||||
qWarning("renderLayerToTexture failed");
|
|
||||||
return;
|
return;
|
||||||
}
|
|
||||||
|
|
||||||
QAbstractVideoBuffer *buffer = new TextureVideoBuffer(textureId);
|
QAbstractVideoBuffer *buffer = new TextureCacheVideoBuffer(tex);
|
||||||
|
#else
|
||||||
|
GLuint tex = m_frameRenderer->renderLayerToTexture(playerLayer);
|
||||||
|
//Make sure we got a valid texture
|
||||||
|
if (tex == 0)
|
||||||
|
return;
|
||||||
|
|
||||||
|
QAbstractVideoBuffer *buffer = new TextureVideoBuffer(tex);
|
||||||
|
#endif
|
||||||
QVideoFrame frame = QVideoFrame(buffer, m_nativeSize, QVideoFrame::Format_BGR32);
|
QVideoFrame frame = QVideoFrame(buffer, m_nativeSize, QVideoFrame::Format_BGR32);
|
||||||
|
|
||||||
if (m_surface && frame.isValid()) {
|
if (m_surface && frame.isValid()) {
|
||||||
@@ -234,8 +262,11 @@ void AVFVideoRendererControl::updateVideoFrame(const CVTimeStamp &ts)
|
|||||||
|
|
||||||
if (!m_surface->isActive()) {
|
if (!m_surface->isActive()) {
|
||||||
QVideoSurfaceFormat format(frame.size(), frame.pixelFormat(), QAbstractVideoBuffer::GLTextureHandle);
|
QVideoSurfaceFormat format(frame.size(), frame.pixelFormat(), QAbstractVideoBuffer::GLTextureHandle);
|
||||||
|
#if defined(Q_OS_IOS)
|
||||||
|
format.setScanLineDirection(QVideoSurfaceFormat::TopToBottom);
|
||||||
|
#else
|
||||||
format.setScanLineDirection(QVideoSurfaceFormat::BottomToTop);
|
format.setScanLineDirection(QVideoSurfaceFormat::BottomToTop);
|
||||||
|
#endif
|
||||||
if (!m_surface->start(format)) {
|
if (!m_surface->start(format)) {
|
||||||
//Surface doesn't support GLTextureHandle
|
//Surface doesn't support GLTextureHandle
|
||||||
qWarning("Failed to activate video surface");
|
qWarning("Failed to activate video surface");
|
||||||
@@ -250,13 +281,11 @@ void AVFVideoRendererControl::updateVideoFrame(const CVTimeStamp &ts)
|
|||||||
QImage frameData = m_frameRenderer->renderLayerToImage(playerLayer);
|
QImage frameData = m_frameRenderer->renderLayerToImage(playerLayer);
|
||||||
|
|
||||||
if (frameData.isNull()) {
|
if (frameData.isNull()) {
|
||||||
qWarning("renterLayerToImage failed");
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
QAbstractVideoBuffer *buffer = new QImageVideoBuffer(frameData);
|
QAbstractVideoBuffer *buffer = new QImageVideoBuffer(frameData);
|
||||||
QVideoFrame frame = QVideoFrame(buffer, m_nativeSize, QVideoFrame::Format_ARGB32_Premultiplied);
|
QVideoFrame frame = QVideoFrame(buffer, m_nativeSize, QVideoFrame::Format_ARGB32);
|
||||||
|
|
||||||
if (m_surface && frame.isValid()) {
|
if (m_surface && frame.isValid()) {
|
||||||
if (m_surface->isActive() && m_surface->surfaceFormat().pixelFormat() != frame.pixelFormat())
|
if (m_surface->isActive() && m_surface->surfaceFormat().pixelFormat() != frame.pixelFormat())
|
||||||
m_surface->stop();
|
m_surface->stop();
|
||||||
|
|||||||
@@ -44,21 +44,31 @@ OBJECTIVE_SOURCES += \
|
|||||||
avfvideowidget.mm
|
avfvideowidget.mm
|
||||||
}
|
}
|
||||||
|
|
||||||
!ios {
|
ios {
|
||||||
LIBS += -framework QuartzCore -framework AppKit
|
contains(QT_CONFIG, opengl.*) {
|
||||||
|
HEADERS += \
|
||||||
|
avfvideoframerenderer_ios.h \
|
||||||
|
avfvideorenderercontrol.h \
|
||||||
|
avfdisplaylink.h
|
||||||
|
|
||||||
HEADERS += \
|
OBJECTIVE_SOURCES += \
|
||||||
avfvideorenderercontrol.h \
|
avfvideoframerenderer_ios.mm \
|
||||||
avfdisplaylink.h
|
avfvideorenderercontrol.mm \
|
||||||
OBJECTIVE_SOURCES += \
|
avfdisplaylink.mm
|
||||||
avfvideorenderercontrol.mm \
|
}
|
||||||
avfdisplaylink.mm
|
} else {
|
||||||
|
LIBS += -framework QuartzCore -framework AppKit
|
||||||
|
|
||||||
contains(QT_CONFIG, opengl.*) {
|
contains(QT_CONFIG, opengl.*) {
|
||||||
HEADERS += \
|
HEADERS += \
|
||||||
avfvideoframerenderer.h
|
avfvideoframerenderer.h \
|
||||||
|
avfvideorenderercontrol.h \
|
||||||
|
avfdisplaylink.h
|
||||||
|
|
||||||
OBJECTIVE_SOURCES += \
|
OBJECTIVE_SOURCES += \
|
||||||
avfvideoframerenderer.mm
|
avfvideoframerenderer.mm \
|
||||||
|
avfvideorenderercontrol.mm \
|
||||||
|
avfdisplaylink.mm
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user