AVFoundation: render camera frames using OpenGL textures on iOS.

OpenGL textures can easily be created from a CVImageBuffer using
Apple APIs. It avoids having to map the buffer to main memory and
therefore greatly improves rendering performances.

We could do the same on OSX, but there, the textures are always of
the GL_TEXTURE_RECTANGLE target type and changes need to be
done to the QVideoFrame API and to the video node implementations
to support that.

Change-Id: I6dde7e8d7a27460e41523cd474c3c741affc1480
Reviewed-by: James Turner <james.turner@kdab.com>
Reviewed-by: Christian Stromme <christian.stromme@theqtcompany.com>
This commit is contained in:
Yoann Lopes
2015-05-29 18:02:26 +02:00
parent 99f6cf5f28
commit 46a83d5b86
3 changed files with 135 additions and 38 deletions

View File

@@ -63,6 +63,8 @@ public:
AVCaptureVideoDataOutput *videoDataOutput() const;
bool supportsTextures() const { return m_supportsTextures; }
#ifdef Q_OS_IOS
AVFCaptureFramesDelegate *captureDelegate() const;
void resetCaptureDelegate() const;
@@ -81,11 +83,18 @@ private:
AVFCameraSession *m_cameraSession;
AVCaptureVideoDataOutput *m_videoDataOutput;
bool m_supportsTextures;
bool m_needsHorizontalMirroring;
#ifdef Q_OS_IOS
CVOpenGLESTextureCacheRef m_textureCache;
#endif
QVideoFrame m_lastViewfinderFrame;
QMutex m_vfMutex;
dispatch_queue_t m_delegateQueue;
friend class CVImageVideoBuffer;
};
QT_END_NAMESPACE

View File

@@ -38,6 +38,10 @@
#include "avfcameraservice.h"
#include "avfcameradebug.h"
#ifdef Q_OS_IOS
#include <QtGui/qopengl.h>
#endif
#include <QtMultimedia/qabstractvideosurface.h>
#include <QtMultimedia/qabstractvideobuffer.h>
@@ -45,20 +49,32 @@
QT_USE_NAMESPACE
class CVPixelBufferVideoBuffer : public QAbstractPlanarVideoBuffer
class CVImageVideoBuffer : public QAbstractPlanarVideoBuffer
{
friend class CVPixelBufferVideoBufferPrivate;
public:
CVPixelBufferVideoBuffer(CVPixelBufferRef buffer)
CVImageVideoBuffer(CVImageBufferRef buffer, AVFCameraRendererControl *renderer)
#ifndef Q_OS_IOS
: QAbstractPlanarVideoBuffer(NoHandle)
#else
: QAbstractPlanarVideoBuffer(renderer->supportsTextures()
&& CVPixelBufferGetPixelFormatType(buffer) == kCVPixelFormatType_32BGRA
? GLTextureHandle : NoHandle)
, m_texture(0)
#endif
, m_buffer(buffer)
, m_renderer(renderer)
, m_mode(NotMapped)
{
CVPixelBufferRetain(m_buffer);
}
virtual ~CVPixelBufferVideoBuffer()
~CVImageVideoBuffer()
{
CVImageVideoBuffer::unmap();
#ifdef Q_OS_IOS
if (m_texture)
CFRelease(m_texture);
#endif
CVPixelBufferRelease(m_buffer);
}
@@ -78,7 +94,9 @@ public:
// For a bi-planar format we have to set the parameters correctly:
if (mode != QAbstractVideoBuffer::NotMapped && m_mode == QAbstractVideoBuffer::NotMapped) {
CVPixelBufferLockBaseAddress(m_buffer, 0);
CVPixelBufferLockBaseAddress(m_buffer, mode == QAbstractVideoBuffer::ReadOnly
? kCVPixelBufferLock_ReadOnly
: 0);
if (numBytes)
*numBytes = CVPixelBufferGetDataSize(m_buffer);
@@ -103,8 +121,9 @@ public:
uchar *map(MapMode mode, int *numBytes, int *bytesPerLine)
{
if (mode != NotMapped && m_mode == NotMapped) {
CVPixelBufferLockBaseAddress(m_buffer, 0);
CVPixelBufferLockBaseAddress(m_buffer, mode == QAbstractVideoBuffer::ReadOnly
? kCVPixelBufferLock_ReadOnly
: 0);
if (numBytes)
*numBytes = CVPixelBufferGetDataSize(m_buffer);
@@ -121,13 +140,63 @@ public:
void unmap()
{
if (m_mode != NotMapped) {
CVPixelBufferUnlockBaseAddress(m_buffer, m_mode == QAbstractVideoBuffer::ReadOnly
? kCVPixelBufferLock_ReadOnly
: 0);
m_mode = NotMapped;
CVPixelBufferUnlockBaseAddress(m_buffer, 0);
}
}
QVariant handle() const
{
#ifdef Q_OS_IOS
// Called from the render thread, so there is a current OpenGL context
if (!m_renderer->m_textureCache) {
CVReturn err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault,
NULL,
[EAGLContext currentContext],
NULL,
&m_renderer->m_textureCache);
if (err != kCVReturnSuccess)
qWarning("Error creating texture cache");
}
if (m_renderer->m_textureCache && !m_texture) {
CVOpenGLESTextureCacheFlush(m_renderer->m_textureCache, 0);
CVReturn err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
m_renderer->m_textureCache,
m_buffer,
NULL,
GL_TEXTURE_2D,
GL_RGBA,
CVPixelBufferGetWidth(m_buffer),
CVPixelBufferGetHeight(m_buffer),
GL_BGRA,
GL_UNSIGNED_BYTE,
0,
&m_texture);
if (err != kCVReturnSuccess)
qWarning("Error creating texture from buffer");
}
if (m_texture)
return CVOpenGLESTextureGetName(m_texture);
else
return 0;
#else
return QVariant();
#endif
}
private:
CVPixelBufferRef m_buffer;
#ifdef Q_OS_IOS
mutable CVOpenGLESTextureRef m_texture;
#endif
CVImageBufferRef m_buffer;
AVFCameraRendererControl *m_renderer;
MapMode m_mode;
};
@@ -171,13 +240,25 @@ private:
int width = CVPixelBufferGetWidth(imageBuffer);
int height = CVPixelBufferGetHeight(imageBuffer);
QVideoFrame::PixelFormat format =
AVFCameraViewfinderSettingsControl2::QtPixelFormatFromCVFormat(CVPixelBufferGetPixelFormatType(imageBuffer));
QVideoFrame::PixelFormat format;
#ifdef Q_OS_IOS
bool useTexture = m_renderer->supportsTextures()
&& CVPixelBufferGetPixelFormatType(imageBuffer) == kCVPixelFormatType_32BGRA;
if (useTexture)
format = QVideoFrame::Format_BGRA32;
else
#endif
format = AVFCameraViewfinderSettingsControl2::QtPixelFormatFromCVFormat(CVPixelBufferGetPixelFormatType(imageBuffer));
if (format == QVideoFrame::Format_Invalid)
return;
QVideoFrame frame(new CVPixelBufferVideoBuffer(imageBuffer), QSize(width, height), format);
QVideoFrame frame(new CVImageVideoBuffer(imageBuffer, m_renderer),
QSize(width, height),
format);
m_renderer->syncHandleViewfinderFrame(frame);
}
@@ -187,7 +268,11 @@ private:
AVFCameraRendererControl::AVFCameraRendererControl(QObject *parent)
: QVideoRendererControl(parent)
, m_surface(0)
, m_supportsTextures(false)
, m_needsHorizontalMirroring(false)
#ifdef Q_OS_IOS
, m_textureCache(0)
#endif
{
m_viewfinderFramesDelegate = [[AVFCaptureFramesDelegate alloc] initWithRenderer:this];
}
@@ -198,6 +283,10 @@ AVFCameraRendererControl::~AVFCameraRendererControl()
[m_viewfinderFramesDelegate release];
if (m_delegateQueue)
dispatch_release(m_delegateQueue);
#ifdef Q_OS_IOS
if (m_textureCache)
CFRelease(m_textureCache);
#endif
}
QAbstractVideoSurface *AVFCameraRendererControl::surface() const
@@ -209,6 +298,11 @@ void AVFCameraRendererControl::setSurface(QAbstractVideoSurface *surface)
{
if (m_surface != surface) {
m_surface = surface;
#ifdef Q_OS_IOS
m_supportsTextures = m_surface
? m_surface->supportedPixelFormats(QAbstractVideoBuffer::GLTextureHandle).contains(QVideoFrame::Format_BGRA32)
: false;
#endif
Q_EMIT surfaceChanged(surface);
}
}
@@ -261,21 +355,6 @@ void AVFCameraRendererControl::syncHandleViewfinderFrame(const QVideoFrame &fram
m_lastViewfinderFrame = frame;
if (m_needsHorizontalMirroring) {
m_lastViewfinderFrame.map(QAbstractVideoBuffer::ReadOnly);
// no deep copy
QImage image(m_lastViewfinderFrame.bits(),
m_lastViewfinderFrame.size().width(),
m_lastViewfinderFrame.size().height(),
m_lastViewfinderFrame.bytesPerLine(),
QImage::Format_RGB32);
QImage mirrored = image.mirrored(true, false);
m_lastViewfinderFrame.unmap();
m_lastViewfinderFrame = QVideoFrame(mirrored);
}
if (m_cameraSession && m_lastViewfinderFrame.isValid())
m_cameraSession->onCameraFrameFetched(m_lastViewfinderFrame);
}
@@ -315,7 +394,9 @@ void AVFCameraRendererControl::handleViewfinderFrame()
}
if (!m_surface->isActive()) {
QVideoSurfaceFormat format(frame.size(), frame.pixelFormat());
QVideoSurfaceFormat format(frame.size(), frame.pixelFormat(), frame.handleType());
if (m_needsHorizontalMirroring)
format.setProperty("mirrored", true);
if (!m_surface->start(format)) {
qWarning() << "Failed to start viewfinder m_surface, format:" << format;

View File

@@ -573,22 +573,29 @@ void AVFCameraViewfinderSettingsControl2::applySettings()
if (!convertPixelFormatIfSupported(m_settings.pixelFormat(), avfPixelFormat)) {
// If the the pixel format is not specified or invalid, pick the preferred video surface
// format, or if no surface is set, the preferred capture device format
const QVector<QVideoFrame::PixelFormat> deviceFormats = viewfinderPixelFormats();
QList<QVideoFrame::PixelFormat> surfaceFormats;
if (m_service->videoOutput() && m_service->videoOutput()->surface())
surfaceFormats = m_service->videoOutput()->surface()->supportedPixelFormats();
QVideoFrame::PixelFormat pickedFormat = deviceFormats.first();
QVideoFrame::PixelFormat format = deviceFormats.first();
QAbstractVideoSurface *surface = m_service->videoOutput() ? m_service->videoOutput()->surface()
: 0;
if (surface) {
if (m_service->videoOutput()->supportsTextures()) {
pickedFormat = QVideoFrame::Format_ARGB32;
} else {
QList<QVideoFrame::PixelFormat> surfaceFormats = m_service->videoOutput()->surface()->supportedPixelFormats();
for (int i = 0; i < surfaceFormats.count(); ++i) {
const QVideoFrame::PixelFormat surfaceFormat = surfaceFormats.at(i);
if (deviceFormats.contains(surfaceFormat)) {
format = surfaceFormat;
break;
for (int i = 0; i < surfaceFormats.count(); ++i) {
const QVideoFrame::PixelFormat surfaceFormat = surfaceFormats.at(i);
if (deviceFormats.contains(surfaceFormat)) {
pickedFormat = surfaceFormat;
break;
}
}
}
}
CVPixelFormatFromQtFormat(format, avfPixelFormat);
CVPixelFormatFromQtFormat(pickedFormat, avfPixelFormat);
}
if (avfPixelFormat != 0) {