Android: fixed image capture preview.

We used to generate the capture preview from the video output, grabbing
the pixels of the last frame (in a FBO). This is not possible anymore,
we instead query the camera for a preview frame, which is in the NV21
format and needs to be converted to RGB.

Change-Id: I1c728b3a708a6f052a83aebf9f15f511eab7a02f
Reviewed-by: Christian Stromme <christian.stromme@digia.com>
This commit is contained in:
Yoann Lopes
2013-09-27 18:20:15 +02:00
committed by The Qt Project
parent be7a6241e7
commit 28df116570
10 changed files with 130 additions and 17 deletions

View File

@@ -45,7 +45,10 @@ import android.hardware.Camera;
import android.graphics.SurfaceTexture;
import android.util.Log;
public class QtCamera implements Camera.ShutterCallback, Camera.PictureCallback, Camera.AutoFocusCallback
public class QtCamera implements Camera.ShutterCallback,
Camera.PictureCallback,
Camera.AutoFocusCallback,
Camera.PreviewCallback
{
private int m_cameraId = -1;
private Camera m_camera = null;
@@ -149,6 +152,11 @@ public class QtCamera implements Camera.ShutterCallback, Camera.PictureCallback,
m_camera.cancelAutoFocus();
}
public void requestPreviewFrame()
{
m_camera.setOneShotPreviewCallback(this);
}
public void takePicture()
{
try {
@@ -170,6 +178,12 @@ public class QtCamera implements Camera.ShutterCallback, Camera.PictureCallback,
notifyPictureCaptured(m_cameraId, data);
}
@Override
public void onPreviewFrame(byte[] data, Camera camera)
{
notifyPreviewFrame(m_cameraId, data);
}
@Override
public void onAutoFocus(boolean success, Camera camera)
{
@@ -179,4 +193,5 @@ public class QtCamera implements Camera.ShutterCallback, Camera.PictureCallback,
private static native void notifyAutoFocusComplete(int id, boolean success);
private static native void notifyPictureExposed(int id);
private static native void notifyPictureCaptured(int id, byte[] data);
private static native void notifyPreviewFrame(int id, byte[] data);
}

View File

@@ -76,4 +76,29 @@ bool qt_sizeLessThan(const QSize &s1, const QSize &s2)
return s1.width() * s1.height() < s2.width() * s2.height();
}
void qt_convert_NV21_to_ARGB32(const uchar *yuv, quint32 *rgb, int width, int height)
{
const int frameSize = width * height;
int a = 0;
for (int i = 0, ci = 0; i < height; ++i, ci += 1) {
for (int j = 0, cj = 0; j < width; ++j, cj += 1) {
int y = (0xff & ((int) yuv[ci * width + cj]));
int v = (0xff & ((int) yuv[frameSize + (ci >> 1) * width + (cj & ~1) + 0]));
int u = (0xff & ((int) yuv[frameSize + (ci >> 1) * width + (cj & ~1) + 1]));
y = y < 16 ? 16 : y;
int r = (int) (1.164f * (y - 16) + 1.596f * (v - 128));
int g = (int) (1.164f * (y - 16) - 0.813f * (v - 128) - 0.391f * (u - 128));
int b = (int) (1.164f * (y - 16) + 2.018f * (u - 128));
r = qBound(0, r, 255);
g = qBound(0, g, 255);
b = qBound(0, b, 255);
rgb[a++] = 0xff000000 | (r << 16) | (g << 8) | b;
}
}
}
QT_END_NAMESPACE

View File

@@ -53,6 +53,8 @@ int qt_findClosestValue(const QList<int> &list, int value);
bool qt_sizeLessThan(const QSize &s1, const QSize &s2);
void qt_convert_NV21_to_ARGB32(const uchar *yuv, quint32 *rgb, int width, int height);
QT_END_NAMESPACE
#endif // QANDROIDMULTIMEDIAUTILS_H

View File

@@ -61,8 +61,6 @@ public:
virtual void setVideoSize(const QSize &) { }
virtual void stop() { }
virtual QImage toImage() = 0;
// signals:
// void readyChanged(bool);
};

View File

@@ -223,13 +223,6 @@ void QAndroidVideoRendererControl::stop()
m_nativeSize = QSize();
}
QImage QAndroidVideoRendererControl::toImage()
{
// FIXME!!! Since we are not using a FBO anymore, we can't grab the pixels. And glGetTexImage
// doesn't work on GL_TEXTURE_EXTERNAL_OES
return QImage();
}
void QAndroidVideoRendererControl::onFrameAvailable()
{
if (!m_nativeSize.isValid() || !m_surface)

View File

@@ -77,7 +77,6 @@ public:
bool isReady() Q_DECL_OVERRIDE;
void setVideoSize(const QSize &size) Q_DECL_OVERRIDE;
void stop() Q_DECL_OVERRIDE;
QImage toImage() Q_DECL_OVERRIDE;
void customEvent(QEvent *) Q_DECL_OVERRIDE;

View File

@@ -147,8 +147,11 @@ bool QAndroidCameraSession::open()
if (m_camera) {
connect(m_camera, SIGNAL(pictureExposed()), this, SLOT(onCameraPictureExposed()));
connect(m_camera, SIGNAL(pictureCaptured(QByteArray)), this, SLOT(onCameraPictureCaptured(QByteArray)));
connect(m_camera, SIGNAL(previewFrameAvailable(QByteArray)), this, SLOT(onCameraPreviewFrameAvailable(QByteArray)));
m_nativeOrientation = m_camera->getNativeOrientation();
m_status = QCamera::LoadedStatus;
if (m_camera->getPreviewFormat() != JCamera::NV21)
m_camera->setPreviewFormat(JCamera::NV21);
emit opened();
} else {
m_status = QCamera::UnavailableStatus;
@@ -422,6 +425,7 @@ int QAndroidCameraSession::capture(const QString &fileName)
// adjust picture rotation depending on the device orientation
m_camera->setRotation(currentCameraRotation());
m_camera->requestPreviewFrame();
m_camera->takePicture();
} else {
emit imageCaptureError(m_lastImageCaptureId, QCameraImageCapture::NotSupportedFeatureError,
@@ -450,10 +454,6 @@ void QAndroidCameraSession::onCameraPictureExposed()
void QAndroidCameraSession::onCameraPictureCaptured(const QByteArray &data)
{
if (!m_captureCanceled) {
// generate a preview from the viewport
if (m_videoOutput)
emit imageCaptured(m_currentImageCaptureId, m_videoOutput->toImage());
// Loading and saving the captured image can be slow, do it in a separate thread
QtConcurrent::run(this, &QAndroidCameraSession::processCapturedImage,
m_currentImageCaptureId,
@@ -517,6 +517,34 @@ void QAndroidCameraSession::processCapturedImage(int id,
}
}
void QAndroidCameraSession::onCameraPreviewFrameAvailable(const QByteArray &data)
{
if (m_captureCanceled || m_readyForCapture)
return;
QtConcurrent::run(this, &QAndroidCameraSession::processPreviewImage,
m_currentImageCaptureId,
data);
}
void QAndroidCameraSession::processPreviewImage(int id, const QByteArray &data)
{
QSize frameSize = m_camera->previewSize();
QImage preview(frameSize, QImage::Format_ARGB32);
qt_convert_NV21_to_ARGB32((const uchar *)data.constData(),
(quint32 *)preview.bits(),
frameSize.width(),
frameSize.height());
// Preview display of front-facing cameras is flipped horizontally, but the frame data
// we get here is not. Flip it ourselves if the camera is front-facing to match what the user
// sees on the viewfinder.
if (m_camera->getFacing() == JCamera::CameraFacingFront)
preview = preview.transformed(QTransform().scale(-1, 1));
emit imageCaptured(id, preview);
}
void QAndroidCameraSession::onVideoOutputReady(bool ready)
{
if (m_camera && m_videoOutput && ready)

View File

@@ -114,6 +114,7 @@ private Q_SLOTS:
void onCameraPictureExposed();
void onCameraPictureCaptured(const QByteArray &data);
void onCameraPreviewFrameAvailable(const QByteArray &data);
private:
bool open();
@@ -123,7 +124,7 @@ private:
void stopPreview();
void applyImageSettings();
void processPreviewImage(int id);
void processPreviewImage(int id, const QByteArray &data);
void processCapturedImage(int id,
const QByteArray &data,
QCameraImageCapture::CaptureDestinations dest,

View File

@@ -102,6 +102,18 @@ static void notifyPictureCaptured(JNIEnv *env, jobject, int id, jbyteArray data)
}
}
static void notifyPreviewFrame(JNIEnv *env, jobject, int id, jbyteArray data)
{
JCamera *obj = g_objectMap.value(id, 0);
if (obj) {
QByteArray bytes;
int arrayLength = env->GetArrayLength(data);
bytes.resize(arrayLength);
env->GetByteArrayRegion(data, 0, arrayLength, (jbyte*)bytes.data());
Q_EMIT obj->previewFrameAvailable(bytes);
}
}
JCamera::JCamera(int cameraId, jobject cam)
: QObject()
, QJNIObjectPrivate(cam)
@@ -225,6 +237,23 @@ QList<QSize> JCamera::getSupportedPreviewSizes()
return list;
}
JCamera::ImageFormat JCamera::getPreviewFormat()
{
if (!m_parameters.isValid())
return Unknown;
return JCamera::ImageFormat(m_parameters.callMethod<jint>("getPreviewFormat"));
}
void JCamera::setPreviewFormat(ImageFormat fmt)
{
if (!m_parameters.isValid())
return;
m_parameters.callMethod<void>("setPreviewFormat", "(I)V", jint(fmt));
applyParameters();
}
void JCamera::setPreviewSize(const QSize &size)
{
if (!m_parameters.isValid())
@@ -624,6 +653,11 @@ void JCamera::setJpegQuality(int quality)
applyParameters();
}
void JCamera::requestPreviewFrame()
{
callMethod<void>("requestPreviewFrame");
}
void JCamera::takePicture()
{
callMethod<void>("takePicture");
@@ -672,7 +706,8 @@ QStringList JCamera::callStringListMethod(const char *methodName)
static JNINativeMethod methods[] = {
{"notifyAutoFocusComplete", "(IZ)V", (void *)notifyAutoFocusComplete},
{"notifyPictureExposed", "(I)V", (void *)notifyPictureExposed},
{"notifyPictureCaptured", "(I[B)V", (void *)notifyPictureCaptured}
{"notifyPictureCaptured", "(I[B)V", (void *)notifyPictureCaptured},
{"notifyPreviewFrame", "(I[B)V", (void *)notifyPreviewFrame}
};
bool JCamera::initJNI(JNIEnv *env)

View File

@@ -58,6 +58,16 @@ public:
CameraFacingFront = 1
};
enum ImageFormat { // same values as in android.graphics.ImageFormat Java class
Unknown = 0,
RGB565 = 4,
NV16 = 16,
NV21 = 17,
YUY2 = 20,
JPEG = 256,
YV12 = 842094169
};
~JCamera();
static JCamera *open(int cameraId);
@@ -75,6 +85,9 @@ public:
QSize getPreferredPreviewSizeForVideo();
QList<QSize> getSupportedPreviewSizes();
ImageFormat getPreviewFormat();
void setPreviewFormat(ImageFormat fmt);
QSize previewSize() const { return m_previewSize; }
void setPreviewSize(const QSize &size);
void setPreviewTexture(jobject surfaceTexture);
@@ -131,6 +144,8 @@ public:
void startPreview();
void stopPreview();
void requestPreviewFrame();
void takePicture();
static bool initJNI(JNIEnv *env);
@@ -143,6 +158,8 @@ Q_SIGNALS:
void whiteBalanceChanged();
void previewFrameAvailable(const QByteArray &data);
void pictureExposed();
void pictureCaptured(const QByteArray &data);