iOS video frame render implementation.

Uses CVTextureCache, iOS only for now, OS-X code could be ported
but will need further work to support TEXTURE_RECTANGLE in the
QVideoNode classes.

When we can’t share a context, falls back to an offscreen window,
FBO rendering and grabbing a QImage.

Change-Id: I23b831fdcc63aeb1b67b7741d8d56779470240d3
Reviewed-by: Yoann Lopes <yoann.lopes@theqtcompany.com>
This commit is contained in:
James Turner
2015-01-12 13:51:25 +00:00
committed by Yoann Lopes
parent 3e94b7ce2d
commit 9444c8ec61
8 changed files with 576 additions and 62 deletions

View File

@@ -37,7 +37,11 @@
#include <QtCore/qobject.h>
#include <QtCore/qmutex.h>
#if defined(Q_OS_IOS)
#include <CoreVideo/CVBase.h>
#else
#include <QuartzCore/CVDisplayLink.h>
#endif
QT_BEGIN_NAMESPACE
@@ -64,7 +68,11 @@ protected:
virtual bool event(QEvent *);
private:
#if defined(Q_OS_IOS)
void *m_displayLink;
#else
CVDisplayLinkRef m_displayLink;
#endif
QMutex m_displayLinkMutex;
bool m_pendingDisplayLinkEvent;
bool m_isActive;

View File

@@ -46,8 +46,70 @@
#include <QtCore/qdebug.h>
#endif
#if defined(Q_OS_IOS)
#import <QuartzCore/CADisplayLink.h>
#import <Foundation/NSRunLoop.h>
#define _m_displayLink static_cast<DisplayLinkObserver*>(m_displayLink)
#else
#endif
QT_USE_NAMESPACE
#if defined(Q_OS_IOS)
@interface DisplayLinkObserver : NSObject
{
AVFDisplayLink *m_avfDisplayLink;
CADisplayLink *m_displayLink;
}
- (void)start;
- (void)stop;
- (void)displayLinkNotification:(CADisplayLink *)sender;
@end
@implementation DisplayLinkObserver
- (id)initWithAVFDisplayLink:(AVFDisplayLink *)link
{
self = [super init];
if (self) {
m_avfDisplayLink = link;
m_displayLink = [[CADisplayLink displayLinkWithTarget:self selector:@selector(displayLinkNotification:)] retain];
}
return self;
}
- (void) dealloc
{
if (m_displayLink) {
[m_displayLink release];
m_displayLink = NULL;
}
[super dealloc];
}
- (void)start
{
[m_displayLink addToRunLoop:[NSRunLoop currentRunLoop] forMode:NSDefaultRunLoopMode];
}
- (void)stop
{
[m_displayLink removeFromRunLoop:[NSRunLoop currentRunLoop] forMode:NSDefaultRunLoopMode];
}
- (void)displayLinkNotification:(CADisplayLink *)sender
{
Q_UNUSED(sender);
m_avfDisplayLink->displayLinkEvent(nullptr);
}
@end
#else
static CVReturn CVDisplayLinkCallback(CVDisplayLinkRef displayLink,
const CVTimeStamp *inNow,
const CVTimeStamp *inOutputTime,
@@ -65,12 +127,17 @@ static CVReturn CVDisplayLinkCallback(CVDisplayLinkRef displayLink,
link->displayLinkEvent(inOutputTime);
return kCVReturnSuccess;
}
#endif
AVFDisplayLink::AVFDisplayLink(QObject *parent)
: QObject(parent)
, m_displayLink(0)
, m_pendingDisplayLinkEvent(false)
, m_isActive(false)
{
#if defined(Q_OS_IOS)
m_displayLink = [[DisplayLinkObserver alloc] initWithAVFDisplayLink:this];
#else
// create display link for the main display
CVDisplayLinkCreateWithCGDisplay(kCGDirectMainDisplay, &m_displayLink);
if (m_displayLink) {
@@ -80,6 +147,7 @@ AVFDisplayLink::AVFDisplayLink(QObject *parent)
// set the renderer output callback function
CVDisplayLinkSetOutputCallback(m_displayLink, &CVDisplayLinkCallback, this);
}
#endif
}
AVFDisplayLink::~AVFDisplayLink()
@@ -89,8 +157,12 @@ AVFDisplayLink::~AVFDisplayLink()
#endif
if (m_displayLink) {
CVDisplayLinkStop(m_displayLink);
stop();
#if defined(Q_OS_IOS)
[_m_displayLink release];
#else
CVDisplayLinkRelease(m_displayLink);
#endif
m_displayLink = NULL;
}
}
@@ -108,20 +180,27 @@ bool AVFDisplayLink::isActive() const
void AVFDisplayLink::start()
{
if (m_displayLink && !m_isActive) {
CVDisplayLinkStart(m_displayLink);
m_isActive = true;
#if defined(Q_OS_IOS)
[_m_displayLink start];
#else
CVDisplayLinkStart(m_displayLink);
#endif
m_isActive = true;
}
}
void AVFDisplayLink::stop()
{
if (m_displayLink && m_isActive) {
#if defined(Q_OS_IOS)
[_m_displayLink stop];
#else
CVDisplayLinkStop(m_displayLink);
#endif
m_isActive = false;
}
}
void AVFDisplayLink::displayLinkEvent(const CVTimeStamp *ts)
{
// This function is called from a
@@ -131,7 +210,12 @@ void AVFDisplayLink::displayLinkEvent(const CVTimeStamp *ts)
m_displayLinkMutex.lock();
bool pending = m_pendingDisplayLinkEvent;
m_pendingDisplayLinkEvent = true;
#if defined(Q_OS_IOS)
Q_UNUSED(ts);
memset(&m_frameTimeStamp, 0, sizeof(CVTimeStamp));
#else
m_frameTimeStamp = *ts;
#endif
m_displayLinkMutex.unlock();
if (!pending)

View File

@@ -57,6 +57,7 @@ private:
AVFMediaPlayerControl *m_control;
QMediaControl *m_videoOutput;
AVFMediaPlayerMetaDataControl *m_playerMetaDataControl;
bool m_enableRenderControl;
};
QT_END_NAMESPACE

View File

@@ -43,26 +43,39 @@
#include "avfmediaplayersession.h"
#include "avfmediaplayercontrol.h"
#include "avfmediaplayermetadatacontrol.h"
#if defined(Q_OS_OSX)
# include "avfvideooutput.h"
# include "avfvideorenderercontrol.h"
#endif
#include "avfvideooutput.h"
#include "avfvideorenderercontrol.h"
#ifndef QT_NO_WIDGETS
# include "avfvideowidgetcontrol.h"
#endif
#include "avfvideowindowcontrol.h"
#if QT_MAC_PLATFORM_SDK_EQUAL_OR_ABOVE(__MAC_10_8, __IPHONE_6_0)
#import <AVFoundation/AVFoundation.h>
#endif
QT_USE_NAMESPACE
AVFMediaPlayerService::AVFMediaPlayerService(QObject *parent)
: QMediaService(parent)
, m_videoOutput(0)
, m_enableRenderControl(true)
{
m_session = new AVFMediaPlayerSession(this);
m_control = new AVFMediaPlayerControl(this);
m_control->setSession(m_session);
m_playerMetaDataControl = new AVFMediaPlayerMetaDataControl(m_session, this);
#if QT_MAC_PLATFORM_SDK_EQUAL_OR_ABOVE(__MAC_10_8, __IPHONE_6_0)
// AVPlayerItemVideoOutput is available in SDK
#if QT_MAC_DEPLOYMENT_TARGET_BELOW(__MAC_10_8, __IPHONE_6_0)
// might not be available at runtime
#if defined(Q_OS_IOS)
m_enableRenderControl = [AVPlayerItemVideoOutput class] != 0;
#endif
#endif
#endif
connect(m_control, SIGNAL(mediaChanged(QMediaContent)), m_playerMetaDataControl, SLOT(updateTags()));
}
@@ -85,15 +98,16 @@ QMediaControl *AVFMediaPlayerService::requestControl(const char *name)
if (qstrcmp(name, QMetaDataReaderControl_iid) == 0)
return m_playerMetaDataControl;
#if defined(Q_OS_OSX)
if (qstrcmp(name, QVideoRendererControl_iid) == 0) {
if (m_enableRenderControl && (qstrcmp(name, QVideoRendererControl_iid) == 0)) {
if (!m_videoOutput)
m_videoOutput = new AVFVideoRendererControl(this);
m_session->setVideoOutput(qobject_cast<AVFVideoOutput*>(m_videoOutput));
return m_videoOutput;
}
#endif
#ifndef QT_NO_WIDGETS
if (qstrcmp(name, QVideoWidgetControl_iid) == 0) {
if (!m_videoOutput)
@@ -119,11 +133,11 @@ void AVFMediaPlayerService::releaseControl(QMediaControl *control)
qDebug() << Q_FUNC_INFO << control;
#endif
if (m_videoOutput == control) {
#if defined(Q_OS_OSX)
AVFVideoRendererControl *renderControl = qobject_cast<AVFVideoRendererControl*>(m_videoOutput);
if (renderControl)
renderControl->setSurface(0);
#endif
m_videoOutput = 0;
m_session->setVideoOutput(0);

View File

@@ -0,0 +1,107 @@
/****************************************************************************
**
** Copyright (C) 2014 Digia Plc and/or its subsidiary(-ies).
** Contact: http://www.qt-project.org/legal
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL21$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and Digia. For licensing terms and
** conditions see http://qt.digia.com/licensing. For further information
** use the contact form at http://qt.digia.com/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 or version 3 as published by the Free
** Software Foundation and appearing in the file LICENSE.LGPLv21 and
** LICENSE.LGPLv3 included in the packaging of this file. Please review the
** following information to ensure the GNU Lesser General Public License
** requirements will be met: https://www.gnu.org/licenses/lgpl.html and
** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Digia gives you certain additional
** rights. These rights are described in the Digia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** $QT_END_LICENSE$
**
****************************************************************************/
#ifndef AVFVIDEOFRAMERENDERER_H
#define AVFVIDEOFRAMERENDERER_H
#include <QtCore/QObject>
#include <QtGui/QImage>
#include <QtGui/QOpenGLContext>
#include <QtCore/QSize>
@class AVPlayerLayer;
@class AVPlayerItemVideoOutput;
QT_BEGIN_NAMESPACE
class QOpenGLContext;
class QOpenGLFramebufferObject;
class QOpenGLShaderProgram;
class QOffscreenSurface;
class QAbstractVideoSurface;
typedef struct __CVBuffer *CVBufferRef;
typedef CVBufferRef CVImageBufferRef;
typedef CVImageBufferRef CVPixelBufferRef;
#if defined(Q_OS_IOS)
typedef struct __CVOpenGLESTextureCache *CVOpenGLESTextureCacheRef;
typedef CVImageBufferRef CVOpenGLESTextureRef;
// helpers to avoid boring if def
typedef CVOpenGLESTextureCacheRef CVOGLTextureCacheRef;
typedef CVOpenGLESTextureRef CVOGLTextureRef;
#define CVOGLTextureGetTarget CVOpenGLESTextureGetTarget
#define CVOGLTextureGetName CVOpenGLESTextureGetName
#define CVOGLTextureCacheCreate CVOpenGLESTextureCacheCreate
#define CVOGLTextureCacheCreateTextureFromImage CVOpenGLESTextureCacheCreateTextureFromImage
#define CVOGLTextureCacheFlush CVOpenGLESTextureCacheFlush
#else
typedef struct __CVOpenGLTextureCache *CVOpenGLTextureCacheRef;
typedef CVImageBufferRef CVOpenGLTextureRef;
// helpers to avoid boring if def
typedef CVOpenGLTextureCacheRef CVOGLTextureCacheRef;
typedef CVOpenGLTextureRef CVOGLTextureRef;
#define CVOGLTextureGetTarget CVOpenGLTextureGetTarget
#define CVOGLTextureGetName CVOpenGLTextureGetName
#define CVOGLTextureCacheCreate CVOpenGLTextureCacheCreate
#define CVOGLTextureCacheCreateTextureFromImage CVOpenGLTextureCacheCreateTextureFromImage
#define CVOGLTextureCacheFlush CVOpenGLTextureCacheFlush
#endif
class AVFVideoFrameRenderer : public QObject
{
public:
AVFVideoFrameRenderer(QAbstractVideoSurface *surface, QObject *parent = 0);
virtual ~AVFVideoFrameRenderer();
void setPlayerLayer(AVPlayerLayer *layer);
CVOGLTextureRef renderLayerToTexture(AVPlayerLayer *layer);
QImage renderLayerToImage(AVPlayerLayer *layer);
private:
void initRenderer();
CVPixelBufferRef copyPixelBufferFromLayer(AVPlayerLayer *layer, size_t& width, size_t& height);
CVOGLTextureRef createCacheTextureFromLayer(AVPlayerLayer *layer, size_t& width, size_t& height);
QOpenGLContext *m_glContext;
QOffscreenSurface *m_offscreenSurface;
QAbstractVideoSurface *m_surface;
CVOGLTextureCacheRef m_textureCache;
AVPlayerItemVideoOutput* m_videoOutput;
bool m_isContextShared;
};
QT_END_NAMESPACE
#endif // AVFVIDEOFRAMERENDERER_H

View File

@@ -0,0 +1,261 @@
/****************************************************************************
**
** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
** Contact: http://www.qt-project.org/legal
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and Digia. For licensing terms and
** conditions see http://qt.digia.com/licensing. For further information
** use the contact form at http://qt.digia.com/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 as published by the Free Software
** Foundation and appearing in the file LICENSE.LGPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU Lesser General Public License version 2.1 requirements
** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Digia gives you certain additional
** rights. These rights are described in the Digia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** GNU General Public License Usage
** Alternatively, this file may be used under the terms of the GNU
** General Public License version 3.0 as published by the Free Software
** Foundation and appearing in the file LICENSE.GPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU General Public License version 3.0 requirements will be
** met: http://www.gnu.org/copyleft/gpl.html.
**
**
** $QT_END_LICENSE$
**
****************************************************************************/
#include "avfvideoframerenderer_ios.h"
#include <QtMultimedia/qabstractvideosurface.h>
#include <QtGui/QOpenGLFramebufferObject>
#include <QtGui/QOpenGLShaderProgram>
#include <QtGui/QOffscreenSurface>
#ifdef QT_DEBUG_AVF
#include <QtCore/qdebug.h>
#endif
#import <CoreVideo/CVBase.h>
#import <AVFoundation/AVFoundation.h>
QT_USE_NAMESPACE
AVFVideoFrameRenderer::AVFVideoFrameRenderer(QAbstractVideoSurface *surface, QObject *parent)
: QObject(parent)
, m_glContext(0)
, m_offscreenSurface(0)
, m_surface(surface)
, m_textureCache(0)
, m_videoOutput(0)
, m_isContextShared(true)
{
}
AVFVideoFrameRenderer::~AVFVideoFrameRenderer()
{
#ifdef QT_DEBUG_AVF
qDebug() << Q_FUNC_INFO;
#endif
[m_videoOutput release]; // sending to nil is fine
if (m_textureCache)
CFRelease(m_textureCache);
delete m_offscreenSurface;
delete m_glContext;
}
void AVFVideoFrameRenderer::setPlayerLayer(AVPlayerLayer *layer)
{
Q_UNUSED(layer)
if (m_videoOutput) {
[m_videoOutput release];
m_videoOutput = 0;
// will be re-created in first call to copyPixelBufferFromLayer
}
}
CVOGLTextureRef AVFVideoFrameRenderer::renderLayerToTexture(AVPlayerLayer *layer)
{
initRenderer();
// If the glContext isn't shared, it doesn't make sense to return a texture for us
if (!m_isContextShared)
return 0;
size_t dummyWidth = 0, dummyHeight = 0;
return createCacheTextureFromLayer(layer, dummyWidth, dummyHeight);
}
static NSString* const AVF_PIXEL_FORMAT_KEY = (NSString*)kCVPixelBufferPixelFormatTypeKey;
static NSNumber* const AVF_PIXEL_FORMAT_VALUE = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA];
static NSDictionary* const AVF_OUTPUT_SETTINGS = [NSDictionary dictionaryWithObject:AVF_PIXEL_FORMAT_VALUE forKey:AVF_PIXEL_FORMAT_KEY];
CVPixelBufferRef AVFVideoFrameRenderer::copyPixelBufferFromLayer(AVPlayerLayer *layer,
size_t& width, size_t& height)
{
//Is layer valid
if (!layer) {
#ifdef QT_DEBUG_AVF
qWarning("copyPixelBufferFromLayer: invalid layer");
#endif
return 0;
}
if (!m_videoOutput) {
m_videoOutput = [[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:AVF_OUTPUT_SETTINGS];
[m_videoOutput setDelegate:nil queue:nil];
AVPlayerItem * item = [[layer player] currentItem];
[item addOutput:m_videoOutput];
}
CFTimeInterval currentCAFrameTime = CACurrentMediaTime();
CMTime currentCMFrameTime = [m_videoOutput itemTimeForHostTime:currentCAFrameTime];
// happens when buffering / loading
if (CMTimeCompare(currentCMFrameTime, kCMTimeZero) < 0) {
return 0;
}
CVPixelBufferRef pixelBuffer = [m_videoOutput copyPixelBufferForItemTime:currentCMFrameTime
itemTimeForDisplay:nil];
if (!pixelBuffer) {
#ifdef QT_DEBUG_AVF
qWarning("copyPixelBufferForItemTime returned nil");
CMTimeShow(currentCMFrameTime);
#endif
return 0;
}
width = CVPixelBufferGetWidth(pixelBuffer);
height = CVPixelBufferGetHeight(pixelBuffer);
return pixelBuffer;
}
CVOGLTextureRef AVFVideoFrameRenderer::createCacheTextureFromLayer(AVPlayerLayer *layer,
size_t& width, size_t& height)
{
CVPixelBufferRef pixelBuffer = copyPixelBufferFromLayer(layer, width, height);
if (!pixelBuffer)
return 0;
CVOGLTextureCacheFlush(m_textureCache, 0);
CVOGLTextureRef texture = 0;
CVReturn err = CVOGLTextureCacheCreateTextureFromImage(kCFAllocatorDefault, m_textureCache, pixelBuffer, NULL,
GL_TEXTURE_2D, GL_RGBA,
(GLsizei) width, (GLsizei) height,
GL_BGRA, GL_UNSIGNED_BYTE, 0,
&texture);
if (!texture || err) {
#ifdef QT_DEBUG_AVF
qWarning("CVOGLTextureCacheCreateTextureFromImage failed (error: %d)", err);
#endif
}
CVPixelBufferRelease(pixelBuffer);
return texture;
}
QImage AVFVideoFrameRenderer::renderLayerToImage(AVPlayerLayer *layer)
{
size_t width = 0;
size_t height = 0;
CVPixelBufferRef pixelBuffer = copyPixelBufferFromLayer(layer, width, height);
if (!pixelBuffer)
return QImage();
OSType pixelFormat = CVPixelBufferGetPixelFormatType(pixelBuffer);
if (pixelFormat != kCVPixelFormatType_32BGRA) {
#ifdef QT_DEBUG_AVF
qWarning("CVPixelBuffer format is not BGRA32 (got: %d)", static_cast<quint32>(pixelFormat));
#endif
return QImage();
}
CVPixelBufferLockBaseAddress(pixelBuffer, 0);
char *data = (char *)CVPixelBufferGetBaseAddress(pixelBuffer);
size_t stride = CVPixelBufferGetBytesPerRow(pixelBuffer);
// format here is not relevant, only using for storage
QImage img = QImage(width, height, QImage::Format_ARGB32);
for (size_t j = 0; j < height; j++) {
memcpy(img.scanLine(j), data, width * 4);
data += stride;
}
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
CVPixelBufferRelease(pixelBuffer);
return img;
}
void AVFVideoFrameRenderer::initRenderer()
{
// even for using a texture directly, we need to be able to make a context current,
// so we need an offscreen, and we shouldn't assume we can make the surface context
// current on that offscreen, so use our own (sharing with it). Slightly
// excessive but no performance penalty and makes the QImage path easier to maintain
//Make sure we have an OpenGL context to make current
if (!m_glContext) {
//Create OpenGL context and set share context from surface
QOpenGLContext *shareContext = 0;
if (m_surface) {
shareContext = qobject_cast<QOpenGLContext*>(m_surface->property("GLContext").value<QObject*>());
}
m_glContext = new QOpenGLContext();
if (shareContext) {
m_glContext->setShareContext(shareContext);
m_isContextShared = true;
} else {
#ifdef QT_DEBUG_AVF
qWarning("failed to get Render Thread context");
#endif
m_isContextShared = false;
}
if (!m_glContext->create()) {
#ifdef QT_DEBUG_AVF
qWarning("failed to create QOpenGLContext");
#endif
return;
}
}
if (!m_offscreenSurface) {
m_offscreenSurface = new QOffscreenSurface();
m_offscreenSurface->setFormat(m_glContext->format());
m_offscreenSurface->create();
}
//Need current context
m_glContext->makeCurrent(m_offscreenSurface);
// Create a new open gl texture cache
CVReturn err = CVOGLTextureCacheCreate(kCFAllocatorDefault, NULL,
[EAGLContext currentContext],
NULL, &m_textureCache);
if (err) {
#ifdef QT_DEBUG_AVF
qWarning("Error at CVOGLTextureCacheCreate %d", err);
#endif
}
}

View File

@@ -41,26 +41,40 @@
#include "avfvideorenderercontrol.h"
#include "avfdisplaylink.h"
#if defined(Q_OS_IOS)
#include "avfvideoframerenderer_ios.h"
#else
#include "avfvideoframerenderer.h"
#endif
#include <QtMultimedia/qabstractvideobuffer.h>
#include <QtMultimedia/qabstractvideosurface.h>
#include <QtMultimedia/qvideosurfaceformat.h>
#include <private/qimagevideobuffer_p.h>
#include <QtCore/qdebug.h>
#import <AVFoundation/AVFoundation.h>
QT_USE_NAMESPACE
class TextureVideoBuffer : public QAbstractVideoBuffer
#if defined(Q_OS_IOS)
class TextureCacheVideoBuffer : public QAbstractVideoBuffer
{
public:
TextureVideoBuffer(GLuint textureId)
TextureCacheVideoBuffer(CVOGLTextureRef texture)
: QAbstractVideoBuffer(GLTextureHandle)
, m_textureId(textureId)
, m_texture(texture)
{}
virtual ~TextureVideoBuffer() {}
virtual ~TextureCacheVideoBuffer()
{
// absolutely critical that we drop this
// reference of textures will stay in the cache
CFRelease(m_texture);
}
MapMode mapMode() const { return NotMapped; }
uchar *map(MapMode, int*, int*) { return 0; }
@@ -68,41 +82,39 @@ public:
QVariant handle() const
{
return QVariant::fromValue<unsigned int>(m_textureId);
GLuint texId = CVOGLTextureGetName(m_texture);
return QVariant::fromValue<unsigned int>(texId);
}
private:
GLuint m_textureId;
CVOGLTextureRef m_texture;
};
class QImageVideoBuffer : public QAbstractVideoBuffer
#else
class TextureVideoBuffer : public QAbstractVideoBuffer
{
public:
QImageVideoBuffer(const QImage &image)
: QAbstractVideoBuffer(NoHandle)
, m_image(image)
, m_mode(NotMapped)
TextureVideoBuffer(GLuint tex)
: QAbstractVideoBuffer(GLTextureHandle)
, m_texture(tex)
{}
virtual ~TextureVideoBuffer()
{
}
MapMode mapMode() const { return m_mode; }
uchar *map(MapMode mode, int *numBytes, int *bytesPerLine)
MapMode mapMode() const { return NotMapped; }
uchar *map(MapMode, int*, int*) { return 0; }
void unmap() {}
QVariant handle() const
{
if (mode != NotMapped && m_mode == NotMapped) {
m_mode = mode;
return m_image.bits();
} else
return 0;
return QVariant::fromValue<unsigned int>(m_texture);
}
void unmap() {
m_mode = NotMapped;
}
private:
QImage m_image;
MapMode m_mode;
GLuint m_texture;
};
#endif
AVFVideoRendererControl::AVFVideoRendererControl(QObject *parent)
: QVideoRendererControl(parent)
@@ -122,8 +134,7 @@ AVFVideoRendererControl::~AVFVideoRendererControl()
qDebug() << Q_FUNC_INFO;
#endif
m_displayLink->stop();
if (m_playerLayer)
[(AVPlayerLayer*)m_playerLayer release];
[(AVPlayerLayer*)m_playerLayer release];
}
QAbstractVideoSurface *AVFVideoRendererControl::surface() const
@@ -150,8 +161,8 @@ void AVFVideoRendererControl::setSurface(QAbstractVideoSurface *surface)
m_surface = surface;
//If the surface changed, then the current frame renderer is no longer valid
if (m_frameRenderer)
delete m_frameRenderer;
delete m_frameRenderer;
m_frameRenderer = 0;
//If there is now no surface to render too
if (m_surface == 0) {
@@ -161,6 +172,11 @@ void AVFVideoRendererControl::setSurface(QAbstractVideoSurface *surface)
//Surface changed, so we need a new frame renderer
m_frameRenderer = new AVFVideoFrameRenderer(m_surface, this);
#if defined(Q_OS_IOS)
if (m_playerLayer) {
m_frameRenderer->setPlayerLayer(static_cast<AVPlayerLayer*>(m_playerLayer));
}
#endif
//Check for needed formats to render as OpenGL Texture
m_enableOpenGL = m_surface->supportedPixelFormats(QAbstractVideoBuffer::GLTextureHandle).contains(QVideoFrame::Format_BGR32);
@@ -187,6 +203,12 @@ void AVFVideoRendererControl::setLayer(void *playerLayer)
if (m_surface && m_surface->isActive())
m_surface->stop();
#if defined(Q_OS_IOS)
if (m_frameRenderer) {
m_frameRenderer->setPlayerLayer(static_cast<AVPlayerLayer*>(playerLayer));
}
#endif
//If there is no layer to render, stop scheduling updates
if (m_playerLayer == 0) {
m_displayLink->stop();
@@ -216,16 +238,22 @@ void AVFVideoRendererControl::updateVideoFrame(const CVTimeStamp &ts)
return;
if (m_enableOpenGL) {
GLuint textureId = m_frameRenderer->renderLayerToTexture(playerLayer);
#if defined(Q_OS_IOS)
CVOGLTextureRef tex = m_frameRenderer->renderLayerToTexture(playerLayer);
//Make sure we got a valid texture
if (textureId == 0) {
qWarning("renderLayerToTexture failed");
if (tex == 0)
return;
}
QAbstractVideoBuffer *buffer = new TextureVideoBuffer(textureId);
QAbstractVideoBuffer *buffer = new TextureCacheVideoBuffer(tex);
#else
GLuint tex = m_frameRenderer->renderLayerToTexture(playerLayer);
//Make sure we got a valid texture
if (tex == 0)
return;
QAbstractVideoBuffer *buffer = new TextureVideoBuffer(tex);
#endif
QVideoFrame frame = QVideoFrame(buffer, m_nativeSize, QVideoFrame::Format_BGR32);
if (m_surface && frame.isValid()) {
@@ -234,8 +262,11 @@ void AVFVideoRendererControl::updateVideoFrame(const CVTimeStamp &ts)
if (!m_surface->isActive()) {
QVideoSurfaceFormat format(frame.size(), frame.pixelFormat(), QAbstractVideoBuffer::GLTextureHandle);
#if defined(Q_OS_IOS)
format.setScanLineDirection(QVideoSurfaceFormat::TopToBottom);
#else
format.setScanLineDirection(QVideoSurfaceFormat::BottomToTop);
#endif
if (!m_surface->start(format)) {
//Surface doesn't support GLTextureHandle
qWarning("Failed to activate video surface");
@@ -250,13 +281,11 @@ void AVFVideoRendererControl::updateVideoFrame(const CVTimeStamp &ts)
QImage frameData = m_frameRenderer->renderLayerToImage(playerLayer);
if (frameData.isNull()) {
qWarning("renterLayerToImage failed");
return;
}
QAbstractVideoBuffer *buffer = new QImageVideoBuffer(frameData);
QVideoFrame frame = QVideoFrame(buffer, m_nativeSize, QVideoFrame::Format_ARGB32_Premultiplied);
QVideoFrame frame = QVideoFrame(buffer, m_nativeSize, QVideoFrame::Format_ARGB32);
if (m_surface && frame.isValid()) {
if (m_surface->isActive() && m_surface->surfaceFormat().pixelFormat() != frame.pixelFormat())
m_surface->stop();

View File

@@ -44,21 +44,31 @@ OBJECTIVE_SOURCES += \
avfvideowidget.mm
}
!ios {
LIBS += -framework QuartzCore -framework AppKit
ios {
contains(QT_CONFIG, opengl.*) {
HEADERS += \
avfvideoframerenderer_ios.h \
avfvideorenderercontrol.h \
avfdisplaylink.h
HEADERS += \
avfvideorenderercontrol.h \
avfdisplaylink.h
OBJECTIVE_SOURCES += \
avfvideorenderercontrol.mm \
avfdisplaylink.mm
OBJECTIVE_SOURCES += \
avfvideoframerenderer_ios.mm \
avfvideorenderercontrol.mm \
avfdisplaylink.mm
}
} else {
LIBS += -framework QuartzCore -framework AppKit
contains(QT_CONFIG, opengl.*) {
HEADERS += \
avfvideoframerenderer.h
avfvideoframerenderer.h \
avfvideorenderercontrol.h \
avfdisplaylink.h
OBJECTIVE_SOURCES += \
avfvideoframerenderer.mm
avfvideoframerenderer.mm \
avfvideorenderercontrol.mm \
avfdisplaylink.mm
}
}