Merge remote-tracking branch 'origin/5.5' into 5.6

Conflicts:
	src/imports/multimedia/qdeclarativeaudio.cpp

Change-Id: I57c6252b084e4ed796f6f308b2e0c717d0f59b13
This commit is contained in:
Yoann Lopes
2015-08-24 14:31:24 +02:00
56 changed files with 2426 additions and 1986 deletions

View File

@@ -53,7 +53,8 @@ PRIVATE_HEADERS += \
qgstcodecsinfo_p.h \
qgstreamervideoprobecontrol_p.h \
qgstreameraudioprobecontrol_p.h \
qgstreamervideowindow_p.h
qgstreamervideowindow_p.h \
qgstreamervideooverlay_p.h
SOURCES += \
qgstreamerbushelper.cpp \
@@ -68,7 +69,8 @@ SOURCES += \
qgstcodecsinfo.cpp \
qgstreamervideoprobecontrol.cpp \
qgstreameraudioprobecontrol.cpp \
qgstreamervideowindow.cpp
qgstreamervideowindow.cpp \
qgstreamervideooverlay.cpp
qtHaveModule(widgets) {
QT += multimediawidgets

View File

@@ -0,0 +1,429 @@
/****************************************************************************
**
** Copyright (C) 2015 The Qt Company Ltd.
** Contact: http://www.qt.io/licensing/
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL21$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and The Qt Company. For licensing terms
** and conditions see http://www.qt.io/terms-conditions. For further
** information use the contact form at http://www.qt.io/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 or version 3 as published by the Free
** Software Foundation and appearing in the file LICENSE.LGPLv21 and
** LICENSE.LGPLv3 included in the packaging of this file. Please review the
** following information to ensure the GNU Lesser General Public License
** requirements will be met: https://www.gnu.org/licenses/lgpl.html and
** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** As a special exception, The Qt Company gives you certain additional
** rights. These rights are described in The Qt Company LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** $QT_END_LICENSE$
**
****************************************************************************/
#include "qgstreamervideooverlay_p.h"
#include <QtGui/qguiapplication.h>
#include "qgstutils_p.h"
#if !GST_CHECK_VERSION(1,0,0)
#include <gst/interfaces/xoverlay.h>
#else
#include <gst/video/videooverlay.h>
#endif
QT_BEGIN_NAMESPACE
struct ElementMap
{
const char *qtPlatform;
const char *gstreamerElement;
};
// Ordered by descending priority
static const ElementMap elementMap[] =
{
{ "xcb", "vaapisink" },
{ "xcb", "xvimagesink" },
{ "xcb", "ximagesink" }
};
QGstreamerVideoOverlay::QGstreamerVideoOverlay(QObject *parent, const QByteArray &elementName)
: QObject(parent)
, QGstreamerBufferProbe(QGstreamerBufferProbe::ProbeCaps)
, m_videoSink(0)
, m_isActive(false)
, m_hasForceAspectRatio(false)
, m_hasBrightness(false)
, m_hasContrast(false)
, m_hasHue(false)
, m_hasSaturation(false)
, m_hasShowPrerollFrame(false)
, m_windowId(0)
, m_aspectRatioMode(Qt::KeepAspectRatio)
, m_brightness(0)
, m_contrast(0)
, m_hue(0)
, m_saturation(0)
{
if (!elementName.isEmpty())
m_videoSink = gst_element_factory_make(elementName.constData(), NULL);
else
m_videoSink = findBestVideoSink();
if (m_videoSink) {
qt_gst_object_ref_sink(GST_OBJECT(m_videoSink)); //Take ownership
GstPad *pad = gst_element_get_static_pad(m_videoSink, "sink");
addProbeToPad(pad);
gst_object_unref(GST_OBJECT(pad));
m_hasForceAspectRatio = g_object_class_find_property(G_OBJECT_GET_CLASS(m_videoSink), "force-aspect-ratio");
m_hasBrightness = g_object_class_find_property(G_OBJECT_GET_CLASS(m_videoSink), "brightness");
m_hasContrast = g_object_class_find_property(G_OBJECT_GET_CLASS(m_videoSink), "contrast");
m_hasHue = g_object_class_find_property(G_OBJECT_GET_CLASS(m_videoSink), "hue");
m_hasSaturation = g_object_class_find_property(G_OBJECT_GET_CLASS(m_videoSink), "saturation");
m_hasShowPrerollFrame = g_object_class_find_property(G_OBJECT_GET_CLASS(m_videoSink), "show-preroll-frame");
if (m_hasShowPrerollFrame) {
g_signal_connect(m_videoSink, "notify::show-preroll-frame",
G_CALLBACK(showPrerollFrameChanged), this);
}
}
}
QGstreamerVideoOverlay::~QGstreamerVideoOverlay()
{
if (m_videoSink) {
GstPad *pad = gst_element_get_static_pad(m_videoSink, "sink");
removeProbeFromPad(pad);
gst_object_unref(GST_OBJECT(pad));
gst_object_unref(GST_OBJECT(m_videoSink));
}
}
static bool qt_gst_element_is_functioning(GstElement *element)
{
GstStateChangeReturn ret = gst_element_set_state(element, GST_STATE_READY);
if (ret == GST_STATE_CHANGE_SUCCESS) {
gst_element_set_state(element, GST_STATE_NULL);
return true;
}
return false;
}
GstElement *QGstreamerVideoOverlay::findBestVideoSink() const
{
GstElement *choice = 0;
QString platform = QGuiApplication::platformName();
// We need a native window ID to use the GstVideoOverlay interface.
// Bail out if the Qt platform plugin in use cannot provide a sensible WId.
if (platform != QLatin1String("xcb"))
return 0;
// First, try some known video sinks, depending on the Qt platform plugin in use.
for (quint32 i = 0; i < (sizeof(elementMap) / sizeof(ElementMap)); ++i) {
if (platform == QLatin1String(elementMap[i].qtPlatform)
&& (choice = gst_element_factory_make(elementMap[i].gstreamerElement, NULL))) {
if (qt_gst_element_is_functioning(choice))
return choice;
gst_object_unref(choice);
choice = 0;
}
}
// If none of the known video sinks are available, try to find one that implements the
// GstVideoOverlay interface and has autoplugging rank.
GList *list = gst_element_factory_list_get_elements(GST_ELEMENT_FACTORY_TYPE_SINK | GST_ELEMENT_FACTORY_TYPE_MEDIA_VIDEO,
GST_RANK_MARGINAL);
for (GList *item = list; item != NULL; item = item->next) {
GstElementFactory *f = GST_ELEMENT_FACTORY(item->data);
if (!gst_element_factory_has_interface(f, QT_GSTREAMER_VIDEOOVERLAY_INTERFACE_NAME))
continue;
if (GstElement *el = gst_element_factory_create(f, NULL)) {
if (qt_gst_element_is_functioning(el)) {
choice = el;
break;
}
gst_object_unref(el);
}
}
gst_plugin_feature_list_free(list);
return choice;
}
GstElement *QGstreamerVideoOverlay::videoSink() const
{
return m_videoSink;
}
QSize QGstreamerVideoOverlay::nativeVideoSize() const
{
return m_nativeVideoSize;
}
void QGstreamerVideoOverlay::setWindowHandle(WId id)
{
m_windowId = id;
if (isActive())
setWindowHandle_helper(id);
}
void QGstreamerVideoOverlay::setWindowHandle_helper(WId id)
{
#if GST_CHECK_VERSION(1,0,0)
if (m_videoSink && GST_IS_VIDEO_OVERLAY(m_videoSink)) {
gst_video_overlay_set_window_handle(GST_VIDEO_OVERLAY(m_videoSink), id);
#else
if (m_videoSink && GST_IS_X_OVERLAY(m_videoSink)) {
# if GST_CHECK_VERSION(0,10,31)
gst_x_overlay_set_window_handle(GST_X_OVERLAY(m_videoSink), id);
# else
gst_x_overlay_set_xwindow_id(GST_X_OVERLAY(m_videoSink), id);
# endif
#endif
// Properties need to be reset when changing the winId.
setAspectRatioMode(m_aspectRatioMode);
setBrightness(m_brightness);
setContrast(m_contrast);
setHue(m_hue);
setSaturation(m_saturation);
}
}
void QGstreamerVideoOverlay::expose()
{
if (!isActive())
return;
#if !GST_CHECK_VERSION(1,0,0)
if (m_videoSink && GST_IS_X_OVERLAY(m_videoSink))
gst_x_overlay_expose(GST_X_OVERLAY(m_videoSink));
#else
if (m_videoSink && GST_IS_VIDEO_OVERLAY(m_videoSink)) {
gst_video_overlay_expose(GST_VIDEO_OVERLAY(m_videoSink));
}
#endif
}
void QGstreamerVideoOverlay::setRenderRectangle(const QRect &rect)
{
int x = -1;
int y = -1;
int w = -1;
int h = -1;
if (!rect.isEmpty()) {
x = rect.x();
y = rect.y();
w = rect.width();
h = rect.height();
}
#if !GST_CHECK_VERSION(1,0,0)
if (m_videoSink && GST_IS_X_OVERLAY(m_videoSink))
gst_x_overlay_set_render_rectangle(GST_X_OVERLAY(m_videoSink), x, y , w , h);
#else
if (m_videoSink && GST_IS_VIDEO_OVERLAY(m_videoSink))
gst_video_overlay_set_render_rectangle(GST_VIDEO_OVERLAY(m_videoSink), x, y, w, h);
#endif
}
bool QGstreamerVideoOverlay::processSyncMessage(const QGstreamerMessage &message)
{
GstMessage* gm = message.rawMessage();
#if !GST_CHECK_VERSION(1,0,0)
if (gm && (GST_MESSAGE_TYPE(gm) == GST_MESSAGE_ELEMENT) &&
gst_structure_has_name(gm->structure, "prepare-xwindow-id")) {
#else
if (gm && (GST_MESSAGE_TYPE(gm) == GST_MESSAGE_ELEMENT) &&
gst_structure_has_name(gst_message_get_structure(gm), "prepare-window-handle")) {
#endif
setWindowHandle_helper(m_windowId);
return true;
}
return false;
}
bool QGstreamerVideoOverlay::processBusMessage(const QGstreamerMessage &message)
{
GstMessage* gm = message.rawMessage();
if (GST_MESSAGE_TYPE(gm) == GST_MESSAGE_STATE_CHANGED &&
GST_MESSAGE_SRC(gm) == GST_OBJECT_CAST(m_videoSink)) {
updateIsActive();
}
return false;
}
void QGstreamerVideoOverlay::probeCaps(GstCaps *caps)
{
QSize size = QGstUtils::capsCorrectedResolution(caps);
if (size != m_nativeVideoSize) {
m_nativeVideoSize = size;
emit nativeVideoSizeChanged();
}
}
bool QGstreamerVideoOverlay::isActive() const
{
return m_isActive;
}
void QGstreamerVideoOverlay::updateIsActive()
{
if (!m_videoSink)
return;
GstState state = GST_STATE(m_videoSink);
gboolean showPreroll = true;
if (m_hasShowPrerollFrame)
g_object_get(G_OBJECT(m_videoSink), "show-preroll-frame", &showPreroll, NULL);
bool newIsActive = (state == GST_STATE_PLAYING || (state == GST_STATE_PAUSED && showPreroll));
if (newIsActive != m_isActive) {
m_isActive = newIsActive;
emit activeChanged();
}
}
void QGstreamerVideoOverlay::showPrerollFrameChanged(GObject *, GParamSpec *, QGstreamerVideoOverlay *overlay)
{
overlay->updateIsActive();
}
Qt::AspectRatioMode QGstreamerVideoOverlay::aspectRatioMode() const
{
Qt::AspectRatioMode mode = Qt::KeepAspectRatio;
if (m_hasForceAspectRatio) {
gboolean forceAR = false;
g_object_get(G_OBJECT(m_videoSink), "force-aspect-ratio", &forceAR, NULL);
if (!forceAR)
mode = Qt::IgnoreAspectRatio;
}
return mode;
}
void QGstreamerVideoOverlay::setAspectRatioMode(Qt::AspectRatioMode mode)
{
if (m_hasForceAspectRatio) {
g_object_set(G_OBJECT(m_videoSink),
"force-aspect-ratio",
(mode == Qt::KeepAspectRatio),
(const char*)NULL);
}
m_aspectRatioMode = mode;
}
int QGstreamerVideoOverlay::brightness() const
{
int brightness = 0;
if (m_hasBrightness)
g_object_get(G_OBJECT(m_videoSink), "brightness", &brightness, NULL);
return brightness / 10;
}
void QGstreamerVideoOverlay::setBrightness(int brightness)
{
if (m_hasBrightness) {
g_object_set(G_OBJECT(m_videoSink), "brightness", brightness * 10, NULL);
emit brightnessChanged(brightness);
}
m_brightness = brightness;
}
int QGstreamerVideoOverlay::contrast() const
{
int contrast = 0;
if (m_hasContrast)
g_object_get(G_OBJECT(m_videoSink), "contrast", &contrast, NULL);
return contrast / 10;
}
void QGstreamerVideoOverlay::setContrast(int contrast)
{
if (m_hasContrast) {
g_object_set(G_OBJECT(m_videoSink), "contrast", contrast * 10, NULL);
emit contrastChanged(contrast);
}
m_contrast = contrast;
}
int QGstreamerVideoOverlay::hue() const
{
int hue = 0;
if (m_hasHue)
g_object_get(G_OBJECT(m_videoSink), "hue", &hue, NULL);
return hue / 10;
}
void QGstreamerVideoOverlay::setHue(int hue)
{
if (m_hasHue) {
g_object_set(G_OBJECT(m_videoSink), "hue", hue * 10, NULL);
emit hueChanged(hue);
}
m_hue = hue;
}
int QGstreamerVideoOverlay::saturation() const
{
int saturation = 0;
if (m_hasSaturation)
g_object_get(G_OBJECT(m_videoSink), "saturation", &saturation, NULL);
return saturation / 10;
}
void QGstreamerVideoOverlay::setSaturation(int saturation)
{
if (m_hasSaturation) {
g_object_set(G_OBJECT(m_videoSink), "saturation", saturation * 10, NULL);
emit saturationChanged(saturation);
}
m_saturation = saturation;
}
QT_END_NAMESPACE

View File

@@ -32,22 +32,11 @@
****************************************************************************/
#include "qgstreamervideowidget_p.h"
#include <private/qgstutils_p.h>
#include <QtCore/qcoreevent.h>
#include <QtCore/qdebug.h>
#include <QtWidgets/qapplication.h>
#include <QtGui/qpainter.h>
#include <gst/gst.h>
#if !GST_CHECK_VERSION(1,0,0)
#include <gst/interfaces/xoverlay.h>
#include <gst/interfaces/propertyprobe.h>
#else
#include <gst/video/videooverlay.h>
#endif
QT_BEGIN_NAMESPACE
class QGstreamerVideoWidget : public QWidget
@@ -82,167 +71,133 @@ public:
}
}
protected:
void paintEvent(QPaintEvent *)
void paint_helper()
{
QPainter painter(this);
painter.fillRect(rect(), palette().background());
}
protected:
void paintEvent(QPaintEvent *)
{
paint_helper();
}
QSize m_nativeSize;
};
QGstreamerVideoWidgetControl::QGstreamerVideoWidgetControl(QObject *parent)
QGstreamerVideoWidgetControl::QGstreamerVideoWidgetControl(QObject *parent, const QByteArray &elementName)
: QVideoWidgetControl(parent)
, m_videoSink(0)
, m_videoOverlay(this, !elementName.isEmpty() ? elementName : qgetenv("QT_GSTREAMER_WIDGET_VIDEOSINK"))
, m_widget(0)
, m_stopped(false)
, m_windowId(0)
, m_fullScreen(false)
{
// The QWidget needs to have a native X window handle to be able to use xvimagesink.
// Bail out if Qt is not using xcb (the control will then be ignored by the plugin)
if (QGuiApplication::platformName().compare(QLatin1String("xcb"), Qt::CaseInsensitive) == 0)
m_videoSink = gst_element_factory_make ("xvimagesink", NULL);
if (m_videoSink) {
// Check if the xv sink is usable
if (gst_element_set_state(m_videoSink, GST_STATE_READY) != GST_STATE_CHANGE_SUCCESS) {
gst_object_unref(GST_OBJECT(m_videoSink));
m_videoSink = 0;
} else {
gst_element_set_state(m_videoSink, GST_STATE_NULL);
g_object_set(G_OBJECT(m_videoSink), "force-aspect-ratio", 1, (const char*)NULL);
qt_gst_object_ref_sink(GST_OBJECT (m_videoSink)); //Take ownership
}
}
connect(&m_videoOverlay, &QGstreamerVideoOverlay::activeChanged,
this, &QGstreamerVideoWidgetControl::onOverlayActiveChanged);
connect(&m_videoOverlay, &QGstreamerVideoOverlay::nativeVideoSizeChanged,
this, &QGstreamerVideoWidgetControl::onNativeVideoSizeChanged);
connect(&m_videoOverlay, &QGstreamerVideoOverlay::brightnessChanged,
this, &QGstreamerVideoWidgetControl::brightnessChanged);
connect(&m_videoOverlay, &QGstreamerVideoOverlay::contrastChanged,
this, &QGstreamerVideoWidgetControl::contrastChanged);
connect(&m_videoOverlay, &QGstreamerVideoOverlay::hueChanged,
this, &QGstreamerVideoWidgetControl::hueChanged);
connect(&m_videoOverlay, &QGstreamerVideoOverlay::saturationChanged,
this, &QGstreamerVideoWidgetControl::saturationChanged);
}
QGstreamerVideoWidgetControl::~QGstreamerVideoWidgetControl()
{
if (m_videoSink)
gst_object_unref(GST_OBJECT(m_videoSink));
delete m_widget;
}
void QGstreamerVideoWidgetControl::createVideoWidget()
{
if (!m_videoSink || m_widget)
if (m_widget)
return;
m_widget = new QGstreamerVideoWidget;
m_widget->installEventFilter(this);
m_windowId = m_widget->winId();
m_videoOverlay.setWindowHandle(m_windowId = m_widget->winId());
}
GstElement *QGstreamerVideoWidgetControl::videoSink()
{
return m_videoSink;
return m_videoOverlay.videoSink();
}
void QGstreamerVideoWidgetControl::onOverlayActiveChanged()
{
updateWidgetAttributes();
}
void QGstreamerVideoWidgetControl::stopRenderer()
{
m_stopped = true;
updateWidgetAttributes();
m_widget->setNativeSize(QSize());
}
void QGstreamerVideoWidgetControl::onNativeVideoSizeChanged()
{
const QSize &size = m_videoOverlay.nativeVideoSize();
if (size.isValid())
m_stopped = false;
if (m_widget)
m_widget->setNativeSize(size);
}
bool QGstreamerVideoWidgetControl::eventFilter(QObject *object, QEvent *e)
{
if (m_widget && object == m_widget) {
if (e->type() == QEvent::ParentChange || e->type() == QEvent::Show) {
if (e->type() == QEvent::ParentChange || e->type() == QEvent::Show || e->type() == QEvent::WinIdChange) {
WId newWId = m_widget->winId();
if (newWId != m_windowId) {
m_windowId = newWId;
setOverlay();
}
if (newWId != m_windowId)
m_videoOverlay.setWindowHandle(m_windowId = newWId);
}
if (e->type() == QEvent::Show) {
// Setting these values ensures smooth resizing since it
// will prevent the system from clearing the background
m_widget->setAttribute(Qt::WA_NoSystemBackground, true);
} else if (e->type() == QEvent::Resize) {
// This is a workaround for missing background repaints
// when reducing window size
windowExposed();
if (e->type() == QEvent::Paint) {
if (m_videoOverlay.isActive())
m_videoOverlay.expose(); // triggers a repaint of the last frame
else
m_widget->paint_helper(); // paints the black background
return true;
}
}
return false;
}
void QGstreamerVideoWidgetControl::updateWidgetAttributes()
{
// When frames are being rendered (sink is active), we need the WA_PaintOnScreen attribute to
// be set in order to avoid flickering when the widget is repainted (for example when resized).
// We need to clear that flag when the the sink is inactive to allow the widget to paint its
// background, otherwise some garbage will be displayed.
if (m_videoOverlay.isActive() && !m_stopped) {
m_widget->setAttribute(Qt::WA_NoSystemBackground, true);
m_widget->setAttribute(Qt::WA_PaintOnScreen, true);
} else {
m_widget->setAttribute(Qt::WA_NoSystemBackground, false);
m_widget->setAttribute(Qt::WA_PaintOnScreen, false);
m_widget->update();
}
}
bool QGstreamerVideoWidgetControl::processSyncMessage(const QGstreamerMessage &message)
{
GstMessage* gm = message.rawMessage();
#if !GST_CHECK_VERSION(1,0,0)
if (gm && (GST_MESSAGE_TYPE(gm) == GST_MESSAGE_ELEMENT) &&
gst_structure_has_name(gm->structure, "prepare-xwindow-id")) {
#else
if (gm && (GST_MESSAGE_TYPE(gm) == GST_MESSAGE_ELEMENT) &&
gst_structure_has_name(gst_message_get_structure(gm), "prepare-window-handle")) {
#endif
setOverlay();
QMetaObject::invokeMethod(this, "updateNativeVideoSize", Qt::QueuedConnection);
return true;
}
return false;
return m_videoOverlay.processSyncMessage(message);
}
bool QGstreamerVideoWidgetControl::processBusMessage(const QGstreamerMessage &message)
{
GstMessage* gm = message.rawMessage();
if (GST_MESSAGE_TYPE(gm) == GST_MESSAGE_STATE_CHANGED &&
GST_MESSAGE_SRC(gm) == GST_OBJECT_CAST(m_videoSink)) {
GstState oldState;
GstState newState;
gst_message_parse_state_changed(gm, &oldState, &newState, 0);
if (oldState == GST_STATE_READY && newState == GST_STATE_PAUSED)
updateNativeVideoSize();
}
return false;
}
void QGstreamerVideoWidgetControl::setOverlay()
{
#if !GST_CHECK_VERSION(1,0,0)
if (m_videoSink && GST_IS_X_OVERLAY(m_videoSink)) {
gst_x_overlay_set_xwindow_id(GST_X_OVERLAY(m_videoSink), m_windowId);
}
#else
if (m_videoSink && GST_IS_VIDEO_OVERLAY(m_videoSink)) {
gst_video_overlay_set_window_handle(GST_VIDEO_OVERLAY(m_videoSink), m_windowId);
}
#endif
}
void QGstreamerVideoWidgetControl::updateNativeVideoSize()
{
if (m_videoSink) {
//find video native size to update video widget size hint
GstPad *pad = gst_element_get_static_pad(m_videoSink, "sink");
GstCaps *caps = qt_gst_pad_get_current_caps(pad);
gst_object_unref(GST_OBJECT(pad));
if (caps) {
m_widget->setNativeSize(QGstUtils::capsCorrectedResolution(caps));
gst_caps_unref(caps);
}
} else {
if (m_widget)
m_widget->setNativeSize(QSize());
}
}
void QGstreamerVideoWidgetControl::windowExposed()
{
#if !GST_CHECK_VERSION(1,0,0)
if (m_videoSink && GST_IS_X_OVERLAY(m_videoSink))
gst_x_overlay_expose(GST_X_OVERLAY(m_videoSink));
#else
if (m_videoSink && GST_IS_VIDEO_OVERLAY(m_videoSink))
gst_video_overlay_expose(GST_VIDEO_OVERLAY(m_videoSink));
#endif
return m_videoOverlay.processBusMessage(message);
}
QWidget *QGstreamerVideoWidgetControl::videoWidget()
@@ -253,19 +208,12 @@ QWidget *QGstreamerVideoWidgetControl::videoWidget()
Qt::AspectRatioMode QGstreamerVideoWidgetControl::aspectRatioMode() const
{
return m_aspectRatioMode;
return m_videoOverlay.aspectRatioMode();
}
void QGstreamerVideoWidgetControl::setAspectRatioMode(Qt::AspectRatioMode mode)
{
if (m_videoSink) {
g_object_set(G_OBJECT(m_videoSink),
"force-aspect-ratio",
(mode == Qt::KeepAspectRatio),
(const char*)NULL);
}
m_aspectRatioMode = mode;
m_videoOverlay.setAspectRatioMode(mode);
}
bool QGstreamerVideoWidgetControl::isFullScreen() const
@@ -280,78 +228,42 @@ void QGstreamerVideoWidgetControl::setFullScreen(bool fullScreen)
int QGstreamerVideoWidgetControl::brightness() const
{
int brightness = 0;
if (m_videoSink && g_object_class_find_property(G_OBJECT_GET_CLASS(m_videoSink), "brightness"))
g_object_get(G_OBJECT(m_videoSink), "brightness", &brightness, NULL);
return brightness / 10;
return m_videoOverlay.brightness();
}
void QGstreamerVideoWidgetControl::setBrightness(int brightness)
{
if (m_videoSink && g_object_class_find_property(G_OBJECT_GET_CLASS(m_videoSink), "brightness")) {
g_object_set(G_OBJECT(m_videoSink), "brightness", brightness * 10, NULL);
emit brightnessChanged(brightness);
}
m_videoOverlay.setBrightness(brightness);
}
int QGstreamerVideoWidgetControl::contrast() const
{
int contrast = 0;
if (m_videoSink && g_object_class_find_property(G_OBJECT_GET_CLASS(m_videoSink), "contrast"))
g_object_get(G_OBJECT(m_videoSink), "contrast", &contrast, NULL);
return contrast / 10;
return m_videoOverlay.contrast();
}
void QGstreamerVideoWidgetControl::setContrast(int contrast)
{
if (m_videoSink && g_object_class_find_property(G_OBJECT_GET_CLASS(m_videoSink), "contrast")) {
g_object_set(G_OBJECT(m_videoSink), "contrast", contrast * 10, NULL);
emit contrastChanged(contrast);
}
m_videoOverlay.setContrast(contrast);
}
int QGstreamerVideoWidgetControl::hue() const
{
int hue = 0;
if (m_videoSink && g_object_class_find_property(G_OBJECT_GET_CLASS(m_videoSink), "hue"))
g_object_get(G_OBJECT(m_videoSink), "hue", &hue, NULL);
return hue / 10;
return m_videoOverlay.hue();
}
void QGstreamerVideoWidgetControl::setHue(int hue)
{
if (m_videoSink && g_object_class_find_property(G_OBJECT_GET_CLASS(m_videoSink), "hue")) {
g_object_set(G_OBJECT(m_videoSink), "hue", hue * 10, NULL);
emit hueChanged(hue);
}
m_videoOverlay.setHue(hue);
}
int QGstreamerVideoWidgetControl::saturation() const
{
int saturation = 0;
if (m_videoSink && g_object_class_find_property(G_OBJECT_GET_CLASS(m_videoSink), "saturation"))
g_object_get(G_OBJECT(m_videoSink), "saturation", &saturation, NULL);
return saturation / 10;
return m_videoOverlay.saturation();
}
void QGstreamerVideoWidgetControl::setSaturation(int saturation)
{
if (m_videoSink && g_object_class_find_property(G_OBJECT_GET_CLASS(m_videoSink), "saturation")) {
g_object_set(G_OBJECT(m_videoSink), "saturation", saturation * 10, NULL);
emit saturationChanged(saturation);
}
m_videoOverlay.setSaturation(saturation);
}
QT_END_NAMESPACE

View File

@@ -35,52 +35,33 @@
#include <private/qgstutils_p.h>
#include <QtCore/qdebug.h>
#include <QtGui/qguiapplication.h>
#include <gst/gst.h>
#if !GST_CHECK_VERSION(1,0,0)
#include <gst/interfaces/xoverlay.h>
#include <gst/interfaces/propertyprobe.h>
#else
#include <gst/video/videooverlay.h>
#endif
QGstreamerVideoWindow::QGstreamerVideoWindow(QObject *parent, const char *elementName)
QGstreamerVideoWindow::QGstreamerVideoWindow(QObject *parent, const QByteArray &elementName)
: QVideoWindowControl(parent)
, QGstreamerBufferProbe(QGstreamerBufferProbe::ProbeCaps)
, m_videoSink(0)
, m_videoOverlay(this, !elementName.isEmpty() ? elementName : qgetenv("QT_GSTREAMER_WINDOW_VIDEOSINK"))
, m_windowId(0)
, m_aspectRatioMode(Qt::KeepAspectRatio)
, m_fullScreen(false)
, m_colorKey(QColor::Invalid)
{
if (elementName) {
m_videoSink = gst_element_factory_make(elementName, NULL);
} else if (QGuiApplication::platformName().compare(QLatin1String("xcb"), Qt::CaseInsensitive) == 0) {
// We need a native X window handle to be able to use xvimagesink.
// Bail out if Qt is not using xcb (the control will then be ignored by the plugin)
m_videoSink = gst_element_factory_make("xvimagesink", NULL);
}
if (m_videoSink) {
qt_gst_object_ref_sink(GST_OBJECT(m_videoSink)); //Take ownership
GstPad *pad = gst_element_get_static_pad(m_videoSink, "sink");
addProbeToPad(pad);
gst_object_unref(GST_OBJECT(pad));
}
connect(&m_videoOverlay, &QGstreamerVideoOverlay::nativeVideoSizeChanged,
this, &QGstreamerVideoWindow::nativeSizeChanged);
connect(&m_videoOverlay, &QGstreamerVideoOverlay::brightnessChanged,
this, &QGstreamerVideoWindow::brightnessChanged);
connect(&m_videoOverlay, &QGstreamerVideoOverlay::contrastChanged,
this, &QGstreamerVideoWindow::contrastChanged);
connect(&m_videoOverlay, &QGstreamerVideoOverlay::hueChanged,
this, &QGstreamerVideoWindow::hueChanged);
connect(&m_videoOverlay, &QGstreamerVideoOverlay::saturationChanged,
this, &QGstreamerVideoWindow::saturationChanged);
}
QGstreamerVideoWindow::~QGstreamerVideoWindow()
{
if (m_videoSink) {
GstPad *pad = gst_element_get_static_pad(m_videoSink,"sink");
removeProbeFromPad(pad);
gst_object_unref(GST_OBJECT(pad));
gst_object_unref(GST_OBJECT(m_videoSink));
}
}
GstElement *QGstreamerVideoWindow::videoSink()
{
return m_videoOverlay.videoSink();
}
WId QGstreamerVideoWindow::winId() const
@@ -94,17 +75,8 @@ void QGstreamerVideoWindow::setWinId(WId id)
return;
WId oldId = m_windowId;
m_videoOverlay.setWindowHandle(m_windowId = id);
m_windowId = id;
#if GST_CHECK_VERSION(1,0,0)
if (m_videoSink && GST_IS_VIDEO_OVERLAY(m_videoSink)) {
gst_video_overlay_set_window_handle(GST_VIDEO_OVERLAY(m_videoSink), m_windowId);
}
#else
if (m_videoSink && GST_IS_X_OVERLAY(m_videoSink)) {
gst_x_overlay_set_xwindow_id(GST_X_OVERLAY(m_videoSink), m_windowId);
}
#endif
if (!oldId)
emit readyChanged(true);
@@ -114,28 +86,12 @@ void QGstreamerVideoWindow::setWinId(WId id)
bool QGstreamerVideoWindow::processSyncMessage(const QGstreamerMessage &message)
{
GstMessage* gm = message.rawMessage();
#if GST_CHECK_VERSION(1,0,0)
const GstStructure *s = gst_message_get_structure(gm);
if ((GST_MESSAGE_TYPE(gm) == GST_MESSAGE_ELEMENT) &&
gst_structure_has_name(s, "prepare-window-handle") &&
m_videoSink && GST_IS_VIDEO_OVERLAY(m_videoSink)) {
return m_videoOverlay.processSyncMessage(message);
}
gst_video_overlay_set_window_handle(GST_VIDEO_OVERLAY(m_videoSink), m_windowId);
return true;
}
#else
if ((GST_MESSAGE_TYPE(gm) == GST_MESSAGE_ELEMENT) &&
gst_structure_has_name(gm->structure, "prepare-xwindow-id") &&
m_videoSink && GST_IS_X_OVERLAY(m_videoSink)) {
gst_x_overlay_set_xwindow_id(GST_X_OVERLAY(m_videoSink), m_windowId);
return true;
}
#endif
return false;
bool QGstreamerVideoWindow::processBusMessage(const QGstreamerMessage &message)
{
return m_videoOverlay.processBusMessage(message);
}
QRect QGstreamerVideoWindow::displayRect() const
@@ -145,188 +101,63 @@ QRect QGstreamerVideoWindow::displayRect() const
void QGstreamerVideoWindow::setDisplayRect(const QRect &rect)
{
m_displayRect = rect;
#if GST_CHECK_VERSION(1,0,0)
if (m_videoSink && GST_IS_VIDEO_OVERLAY(m_videoSink)) {
if (m_displayRect.isEmpty())
gst_video_overlay_set_render_rectangle(GST_VIDEO_OVERLAY(m_videoSink), -1, -1, -1, -1);
else
gst_video_overlay_set_render_rectangle(GST_VIDEO_OVERLAY(m_videoSink),
m_displayRect.x(),
m_displayRect.y(),
m_displayRect.width(),
m_displayRect.height());
repaint();
}
#else
if (m_videoSink && GST_IS_X_OVERLAY(m_videoSink)) {
#if GST_VERSION_MICRO >= 29
if (m_displayRect.isEmpty())
gst_x_overlay_set_render_rectangle(GST_X_OVERLAY(m_videoSink), -1, -1, -1, -1);
else
gst_x_overlay_set_render_rectangle(GST_X_OVERLAY(m_videoSink),
m_displayRect.x(),
m_displayRect.y(),
m_displayRect.width(),
m_displayRect.height());
repaint();
#endif
}
#endif
m_videoOverlay.setRenderRectangle(m_displayRect = rect);
repaint();
}
Qt::AspectRatioMode QGstreamerVideoWindow::aspectRatioMode() const
{
return m_aspectRatioMode;
return m_videoOverlay.aspectRatioMode();
}
void QGstreamerVideoWindow::setAspectRatioMode(Qt::AspectRatioMode mode)
{
m_aspectRatioMode = mode;
if (m_videoSink) {
g_object_set(G_OBJECT(m_videoSink),
"force-aspect-ratio",
(m_aspectRatioMode == Qt::KeepAspectRatio),
(const char*)NULL);
}
m_videoOverlay.setAspectRatioMode(mode);
}
void QGstreamerVideoWindow::repaint()
{
#if GST_CHECK_VERSION(1,0,0)
if (m_videoSink && GST_IS_VIDEO_OVERLAY(m_videoSink)) {
//don't call gst_x_overlay_expose if the sink is in null state
GstState state = GST_STATE_NULL;
GstStateChangeReturn res = gst_element_get_state(m_videoSink, &state, NULL, 1000000);
if (res != GST_STATE_CHANGE_FAILURE && state != GST_STATE_NULL) {
gst_video_overlay_expose(GST_VIDEO_OVERLAY(m_videoSink));
}
}
#else
if (m_videoSink && GST_IS_X_OVERLAY(m_videoSink)) {
//don't call gst_x_overlay_expose if the sink is in null state
GstState state = GST_STATE_NULL;
GstStateChangeReturn res = gst_element_get_state(m_videoSink, &state, NULL, 1000000);
if (res != GST_STATE_CHANGE_FAILURE && state != GST_STATE_NULL) {
gst_x_overlay_expose(GST_X_OVERLAY(m_videoSink));
}
}
#endif
}
QColor QGstreamerVideoWindow::colorKey() const
{
if (!m_colorKey.isValid()) {
gint colorkey = 0;
if (m_videoSink && g_object_class_find_property(G_OBJECT_GET_CLASS(m_videoSink), "colorkey"))
g_object_get(G_OBJECT(m_videoSink), "colorkey", &colorkey, NULL);
if (colorkey > 0)
m_colorKey.setRgb(colorkey);
}
return m_colorKey;
}
void QGstreamerVideoWindow::setColorKey(const QColor &color)
{
m_colorKey = color;
if (m_videoSink && g_object_class_find_property(G_OBJECT_GET_CLASS(m_videoSink), "colorkey"))
g_object_set(G_OBJECT(m_videoSink), "colorkey", color.rgba(), NULL);
}
bool QGstreamerVideoWindow::autopaintColorKey() const
{
bool enabled = true;
if (m_videoSink && g_object_class_find_property(G_OBJECT_GET_CLASS(m_videoSink), "autopaint-colorkey"))
g_object_get(G_OBJECT(m_videoSink), "autopaint-colorkey", &enabled, NULL);
return enabled;
}
void QGstreamerVideoWindow::setAutopaintColorKey(bool enabled)
{
if (m_videoSink && g_object_class_find_property(G_OBJECT_GET_CLASS(m_videoSink), "autopaint-colorkey"))
g_object_set(G_OBJECT(m_videoSink), "autopaint-colorkey", enabled, NULL);
m_videoOverlay.expose();
}
int QGstreamerVideoWindow::brightness() const
{
int brightness = 0;
if (m_videoSink && g_object_class_find_property(G_OBJECT_GET_CLASS(m_videoSink), "brightness"))
g_object_get(G_OBJECT(m_videoSink), "brightness", &brightness, NULL);
return brightness / 10;
return m_videoOverlay.brightness();
}
void QGstreamerVideoWindow::setBrightness(int brightness)
{
if (m_videoSink && g_object_class_find_property(G_OBJECT_GET_CLASS(m_videoSink), "brightness")) {
g_object_set(G_OBJECT(m_videoSink), "brightness", brightness * 10, NULL);
emit brightnessChanged(brightness);
}
m_videoOverlay.setBrightness(brightness);
}
int QGstreamerVideoWindow::contrast() const
{
int contrast = 0;
if (m_videoSink && g_object_class_find_property(G_OBJECT_GET_CLASS(m_videoSink), "contrast"))
g_object_get(G_OBJECT(m_videoSink), "contrast", &contrast, NULL);
return contrast / 10;
return m_videoOverlay.contrast();
}
void QGstreamerVideoWindow::setContrast(int contrast)
{
if (m_videoSink && g_object_class_find_property(G_OBJECT_GET_CLASS(m_videoSink), "contrast")) {
g_object_set(G_OBJECT(m_videoSink), "contrast", contrast * 10, NULL);
emit contrastChanged(contrast);
}
m_videoOverlay.setContrast(contrast);
}
int QGstreamerVideoWindow::hue() const
{
int hue = 0;
if (m_videoSink && g_object_class_find_property(G_OBJECT_GET_CLASS(m_videoSink), "hue"))
g_object_get(G_OBJECT(m_videoSink), "hue", &hue, NULL);
return hue / 10;
return m_videoOverlay.hue();
}
void QGstreamerVideoWindow::setHue(int hue)
{
if (m_videoSink && g_object_class_find_property(G_OBJECT_GET_CLASS(m_videoSink), "hue")) {
g_object_set(G_OBJECT(m_videoSink), "hue", hue * 10, NULL);
emit hueChanged(hue);
}
m_videoOverlay.setHue(hue);
}
int QGstreamerVideoWindow::saturation() const
{
int saturation = 0;
if (m_videoSink && g_object_class_find_property(G_OBJECT_GET_CLASS(m_videoSink), "saturation"))
g_object_get(G_OBJECT(m_videoSink), "saturation", &saturation, NULL);
return saturation / 10;
return m_videoOverlay.saturation();
}
void QGstreamerVideoWindow::setSaturation(int saturation)
{
if (m_videoSink && g_object_class_find_property(G_OBJECT_GET_CLASS(m_videoSink), "saturation")) {
g_object_set(G_OBJECT(m_videoSink), "saturation", saturation * 10, NULL);
emit saturationChanged(saturation);
}
m_videoOverlay.setSaturation(saturation);
}
bool QGstreamerVideoWindow::isFullScreen() const
@@ -341,28 +172,5 @@ void QGstreamerVideoWindow::setFullScreen(bool fullScreen)
QSize QGstreamerVideoWindow::nativeSize() const
{
return m_nativeSize;
}
void QGstreamerVideoWindow::probeCaps(GstCaps *caps)
{
QSize resolution = QGstUtils::capsCorrectedResolution(caps);
QMetaObject::invokeMethod(
this,
"updateNativeVideoSize",
Qt::QueuedConnection,
Q_ARG(QSize, resolution));
}
void QGstreamerVideoWindow::updateNativeVideoSize(const QSize &size)
{
if (m_nativeSize != size) {
m_nativeSize = size;
emit nativeSizeChanged();
}
}
GstElement *QGstreamerVideoWindow::videoSink()
{
return m_videoSink;
return m_videoOverlay.nativeVideoSize();
}

View File

@@ -1453,6 +1453,17 @@ GstCaps *qt_gst_caps_normalize(GstCaps *caps)
#endif
}
const gchar *qt_gst_element_get_factory_name(GstElement *element)
{
const gchar *name = 0;
const GstElementFactory *factory = 0;
if (element && (factory = gst_element_get_factory(element)))
name = gst_plugin_feature_get_name(GST_PLUGIN_FEATURE(factory));
return name;
}
QDebug operator <<(QDebug debug, GstCaps *caps)
{
if (caps) {

View File

@@ -48,9 +48,6 @@ import QtMultimedia 5.6
types.
\qml
import QtQuick 2.0
import QtMultimedia 5.0
Video {
id: video
width : 800
@@ -239,7 +236,15 @@ Item {
*/
property alias hasVideo: player.hasVideo
/* documented below due to length of metaData documentation */
/*!
\qmlproperty object Video::metaData
This property holds the meta data for the current media.
See \l{MediaPlayer::metaData}{MediaPlayer.metaData} for details about each meta data key.
\sa {QMediaMetaData}
*/
property alias metaData: player.metaData
/*!
@@ -457,540 +462,3 @@ Item {
}
}
// ***************************************
// Documentation for meta-data properties.
// ***************************************
/*!
\qmlproperty variant Video::metaData
This property holds a collection of all the meta-data for the media.
You can access individual properties like \l {Video::metaData.title}{metaData.title}
or \l {Video::metaData.trackNumber} {metaData.trackNumber}.
*/
/*!
\qmlproperty variant Video::metaData.title
This property holds the title of the media.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant Video::metaData.subTitle
This property holds the sub-title of the media.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant Video::metaData.author
This property holds the author of the media.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant Video::metaData.comment
This property holds a user comment about the media.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant Video::metaData.description
This property holds a description of the media.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant Video::metaData.category
This property holds the category of the media
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant Video::metaData.genre
This property holds the genre of the media.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant Video::metaData.year
This property holds the year of release of the media.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant Video::metaData.date
This property holds the date of the media.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant Video::metaData.userRating
This property holds a user rating of the media in the range of 0 to 100.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant Video::metaData.keywords
This property holds a list of keywords describing the media.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant Video::metaData.language
This property holds the language of the media, as an ISO 639-2 code.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant Video::metaData.publisher
This property holds the publisher of the media.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant Video::metaData.copyright
This property holds the media's copyright notice.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant Video::metaData.parentalRating
This property holds the parental rating of the media.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant Video::metaData.ratingOrganization
This property holds the name of the rating organization responsible for the
parental rating of the media.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant Video::metaData.size
This property property holds the size of the media in bytes.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant Video::metaData.mediaType
This property holds the type of the media.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant Video::metaData.audioBitRate
This property holds the bit rate of the media's audio stream in bits per
second.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant Video::metaData.audioCodec
This property holds the encoding of the media audio stream.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant Video::metaData.averageLevel
This property holds the average volume level of the media.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant Video::metaData.channelCount
This property holds the number of channels in the media's audio stream.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant Video::metaData.peakValue
This property holds the peak volume of the media's audio stream.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant Video::metaData.sampleRate
This property holds the sample rate of the media's audio stream in Hertz.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant Video::metaData.albumTitle
This property holds the title of the album the media belongs to.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant Video::metaData.albumArtist
This property holds the name of the principal artist of the album the media
belongs to.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant Video::metaData.contributingArtist
This property holds the names of artists contributing to the media.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant Video::metaData.composer
This property holds the composer of the media.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant Video::metaData.conductor
This property holds the conductor of the media.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant Video::metaData.lyrics
This property holds the lyrics to the media.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant Video::metaData.mood
This property holds the mood of the media.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant Video::metaData.trackNumber
This property holds the track number of the media.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant Video::metaData.trackCount
This property holds the number of track on the album containing the media.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant Video::metaData.coverArtUrlSmall
This property holds the URL of a small cover art image.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant Video::metaData.coverArtUrlLarge
This property holds the URL of a large cover art image.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant Video::metaData.resolution
This property holds the dimension of an image or video.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant Video::metaData.pixelAspectRatio
This property holds the pixel aspect ratio of an image or video.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant Video::metaData.videoFrameRate
This property holds the frame rate of the media's video stream.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant Video::metaData.videoBitRate
This property holds the bit rate of the media's video stream in bits per
second.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant Video::metaData.videoCodec
This property holds the encoding of the media's video stream.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant Video::metaData.posterUrl
This property holds the URL of a poster image.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant Video::metaData.chapterNumber
This property holds the chapter number of the media.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant Video::metaData.director
This property holds the director of the media.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant Video::metaData.leadPerformer
This property holds the lead performer in the media.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant Video::metaData.writer
This property holds the writer of the media.
\sa {QMediaMetaData}
*/
// The remaining properties are related to photos, and are technically
// available but will certainly never have values.
/*!
\qmlproperty variant Video::metaData.cameraManufacturer
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant Video::metaData.cameraModel
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant Video::metaData.event
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant Video::metaData.subject
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant Video::metaData.orientation
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant Video::metaData.exposureTime
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant Video::metaData.fNumber
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant Video::metaData.exposureProgram
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant Video::metaData.isoSpeedRatings
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant Video::metaData.exposureBiasValue
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant Video::metaData.dateTimeDigitized
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant Video::metaData.subjectDistance
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant Video::metaData.meteringMode
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant Video::metaData.lightSource
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant Video::metaData.flash
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant Video::metaData.focalLength
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant Video::metaData.exposureMode
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant Video::metaData.whiteBalance
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant Video::metaData.DigitalZoomRatio
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant Video::metaData.focalLengthIn35mmFilm
\sa {QMediaMetaData::FocalLengthIn35mmFile}
*/
/*!
\qmlproperty variant Video::metaData.sceneCaptureType
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant Video::metaData.gainControl
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant Video::metaData.contrast
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant Video::metaData.saturation
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant Video::metaData.sharpness
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant Video::metaData.deviceSettingDescription
\sa {QMediaMetaData}
*/

View File

@@ -59,12 +59,7 @@ QT_BEGIN_NAMESPACE
\ingroup multimedia_qml
\ingroup multimedia_audio_qml
This type is part of the \b{QtMultimedia 5.0} module.
\qml
import QtQuick 2.0
import QtMultimedia 5.0
Text {
text: "Click Me!";
font.pointSize: 24;
@@ -919,369 +914,124 @@ void QDeclarativeAudio::_q_mediaChanged(const QMediaContent &media)
*/
/*!
\qmlproperty variant QtMultimedia::Audio::mediaObject
This property holds the native media object.
It can be used to get a pointer to a QMediaPlayer object in order to integrate with C++ code.
\code
QObject *qmlAudio; // The QML Audio object
QMediaPlayer *player = qvariant_cast<QMediaPlayer *>(qmlAudio->property("mediaObject"));
\endcode
\note This property is not accessible from QML.
*/
/*!
\qmlpropertygroup QtMultimedia::Audio::metaData
\qmlproperty variant QtMultimedia::Audio::metaData.title
This property holds the title of the media.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::Audio::metaData.subTitle
This property holds the sub-title of the media.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::Audio::metaData.author
This property holds the author of the media.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::Audio::metaData.comment
This property holds a user comment about the media.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::Audio::metaData.description
This property holds a description of the media.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::Audio::metaData.category
This property holds the category of the media
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::Audio::metaData.genre
This property holds the genre of the media.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::Audio::metaData.year
This property holds the year of release of the media.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::Audio::metaData.date
This property holds the date of the media.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::Audio::metaData.userRating
This property holds a user rating of the media in the range of 0 to 100.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::Audio::metaData.keywords
This property holds a list of keywords describing the media.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::Audio::metaData.language
This property holds the language of the media, as an ISO 639-2 code.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::Audio::metaData.publisher
This property holds the publisher of the media.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::Audio::metaData.copyright
This property holds the media's copyright notice.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::Audio::metaData.parentalRating
This property holds the parental rating of the media.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::Audio::metaData.ratingOrganization
This property holds the name of the rating organization responsible for the
parental rating of the media.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::Audio::metaData.size
This property property holds the size of the media in bytes.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::Audio::metaData.mediaType
This property holds the type of the media.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::Audio::metaData.audioBitRate
This property holds the bit rate of the media's audio stream in bits per
second.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::Audio::metaData.audioCodec
This property holds the encoding of the media audio stream.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::Audio::metaData.averageLevel
This property holds the average volume level of the media.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::Audio::metaData.channelCount
This property holds the number of channels in the media's audio stream.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::Audio::metaData.peakValue
This property holds the peak volume of media's audio stream.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::Audio::metaData.sampleRate
This property holds the sample rate of the media's audio stream in hertz.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::Audio::metaData.albumTitle
This property holds the title of the album the media belongs to.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::Audio::metaData.albumArtist
This property holds the name of the principal artist of the album the media
belongs to.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::Audio::metaData.contributingArtist
This property holds the names of artists contributing to the media.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::Audio::metaData.composer
This property holds the composer of the media.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::Audio::metaData.conductor
This property holds the conductor of the media.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::Audio::metaData.lyrics
This property holds the lyrics to the media.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::Audio::metaData.mood
This property holds the mood of the media.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::Audio::metaData.trackNumber
This property holds the track number of the media.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::Audio::metaData.trackCount
This property holds the number of tracks on the album containing the media.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::Audio::metaData.coverArtUrlSmall
This property holds the URL of a small cover art image.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::Audio::metaData.coverArtUrlLarge
This property holds the URL of a large cover art image.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::Audio::metaData.resolution
This property holds the dimension of an image or video.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::Audio::metaData.pixelAspectRatio
This property holds the pixel aspect ratio of an image or video.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::Audio::metaData.videoFrameRate
This property holds the frame rate of the media's video stream.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::Audio::metaData.videoBitRate
This property holds the bit rate of the media's video stream in bits per
second.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::Audio::metaData.videoCodec
This property holds the encoding of the media's video stream.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::Audio::metaData.posterUrl
This property holds the URL of a poster image.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::Audio::metaData.chapterNumber
This property holds the chapter number of the media.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::Audio::metaData.director
This property holds the director of the media.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::Audio::metaData.leadPerformer
This property holds the lead performer in the media.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::Audio::metaData.writer
This property holds the writer of the media.
These properties hold the meta data for the current media.
\list
\li \c metaData.title - the title of the media.
\li \c metaData.subTitle - the sub-title of the media.
\li \c metaData.author - the author of the media.
\li \c metaData.comment - a user comment about the media.
\li \c metaData.description - a description of the media.
\li \c metaData.category - the category of the media.
\li \c metaData.genre - the genre of the media.
\li \c metaData.year - the year of release of the media.
\li \c metaData.date - the date of the media.
\li \c metaData.userRating - a user rating of the media in the range of 0 to 100.
\li \c metaData.keywords - a list of keywords describing the media.
\li \c metaData.language - the language of the media, as an ISO 639-2 code.
\li \c metaData.publisher - the publisher of the media.
\li \c metaData.copyright - the media's copyright notice.
\li \c metaData.parentalRating - the parental rating of the media.
\li \c metaData.ratingOrganization - the name of the rating organization responsible for the
parental rating of the media.
\li \c metaData.size - the size of the media in bytes.
\li \c metaData.mediaType - the type of the media.
\li \c metaData.audioBitRate - the bit rate of the media's audio stream in bits per second.
\li \c metaData.audioCodec - the encoding of the media audio stream.
\li \c metaData.averageLevel - the average volume level of the media.
\li \c metaData.channelCount - the number of channels in the media's audio stream.
\li \c metaData.peakValue - the peak volume of media's audio stream.
\li \c metaData.sampleRate - the sample rate of the media's audio stream in hertz.
\li \c metaData.albumTitle - the title of the album the media belongs to.
\li \c metaData.albumArtist - the name of the principal artist of the album the media
belongs to.
\li \c metaData.contributingArtist - the names of artists contributing to the media.
\li \c metaData.composer - the composer of the media.
\li \c metaData.conductor - the conductor of the media.
\li \c metaData.lyrics - the lyrics to the media.
\li \c metaData.mood - the mood of the media.
\li \c metaData.trackNumber - the track number of the media.
\li \c metaData.trackCount - the number of tracks on the album containing the media.
\li \c metaData.coverArtUrlSmall - the URL of a small cover art image.
\li \c metaData.coverArtUrlLarge - the URL of a large cover art image.
\li \c metaData.resolution - the dimension of an image or video.
\li \c metaData.pixelAspectRatio - the pixel aspect ratio of an image or video.
\li \c metaData.videoFrameRate - the frame rate of the media's video stream.
\li \c metaData.videoBitRate - the bit rate of the media's video stream in bits per second.
\li \c metaData.videoCodec - the encoding of the media's video stream.
\li \c metaData.posterUrl - the URL of a poster image.
\li \c metaData.chapterNumber - the chapter number of the media.
\li \c metaData.director - the director of the media.
\li \c metaData.leadPerformer - the lead performer in the media.
\li \c metaData.writer - the writer of the media.
\endlist
\sa {QMediaMetaData}
*/
///////////// MediaPlayer Docs /////////////
/*!
@@ -1294,12 +1044,7 @@ void QDeclarativeAudio::_q_mediaChanged(const QMediaContent &media)
\ingroup multimedia_audio_qml
\ingroup multimedia_video_qml
MediaPlayer is part of the \b{QtMultimedia 5.0} module.
\qml
import QtQuick 2.0
import QtMultimedia 5.0
Text {
text: "Click Me!";
font.pointSize: 24;
@@ -1321,9 +1066,6 @@ void QDeclarativeAudio::_q_mediaChanged(const QMediaContent &media)
or you can use it in conjunction with a \l VideoOutput for rendering video.
\qml
import QtQuick 2.0
import QtMultimedia 5.0
Item {
MediaPlayer {
id: mediaplayer
@@ -1664,365 +1406,119 @@ void QDeclarativeAudio::_q_mediaChanged(const QMediaContent &media)
*/
/*!
\qmlproperty variant QtMultimedia::MediaPlayer::mediaObject
This property holds the native media object.
It can be used to get a pointer to a QMediaPlayer object in order to integrate with C++ code.
\code
QObject *qmlMediaPlayer; // The QML MediaPlayer object
QMediaPlayer *player = qvariant_cast<QMediaPlayer *>(qmlMediaPlayer->property("mediaObject"));
\endcode
\note This property is not accessible from QML.
*/
/*!
\qmlpropertygroup QtMultimedia::MediaPlayer::metaData
\qmlproperty variant QtMultimedia::MediaPlayer::metaData.title
This property holds the title of the media.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::MediaPlayer::metaData.subTitle
This property holds the sub-title of the media.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::MediaPlayer::metaData.author
This property holds the author of the media.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::MediaPlayer::metaData.comment
This property holds a user comment about the media.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::MediaPlayer::metaData.description
This property holds a description of the media.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::MediaPlayer::metaData.category
This property holds the category of the media
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::MediaPlayer::metaData.genre
This property holds the genre of the media.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::MediaPlayer::metaData.year
This property holds the year of release of the media.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::MediaPlayer::metaData.date
This property holds the date of the media.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::MediaPlayer::metaData.userRating
This property holds a user rating of the media in the range of 0 to 100.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::MediaPlayer::metaData.keywords
This property holds a list of keywords describing the media.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::MediaPlayer::metaData.language
This property holds the language of the media, as an ISO 639-2 code.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::MediaPlayer::metaData.publisher
This property holds the publisher of the media.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::MediaPlayer::metaData.copyright
This property holds the media's copyright notice.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::MediaPlayer::metaData.parentalRating
This property holds the parental rating of the media.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::MediaPlayer::metaData.ratingOrganization
This property holds the name of the rating organization responsible for the
parental rating of the media.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::MediaPlayer::metaData.size
This property property holds the size of the media in bytes.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::MediaPlayer::metaData.mediaType
This property holds the type of the media.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::MediaPlayer::metaData.audioBitRate
This property holds the bit rate of the media's audio stream in bits per
second.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::MediaPlayer::metaData.audioCodec
This property holds the encoding of the media audio stream.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::MediaPlayer::metaData.averageLevel
This property holds the average volume level of the media.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::MediaPlayer::metaData.channelCount
This property holds the number of channels in the media's audio stream.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::MediaPlayer::metaData.peakValue
This property holds the peak volume of media's audio stream.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::MediaPlayer::metaData.sampleRate
This property holds the sample rate of the media's audio stream in hertz.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::MediaPlayer::metaData.albumTitle
This property holds the title of the album the media belongs to.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::MediaPlayer::metaData.albumArtist
This property holds the name of the principal artist of the album the media
belongs to.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::MediaPlayer::metaData.contributingArtist
This property holds the names of artists contributing to the media.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::MediaPlayer::metaData.composer
This property holds the composer of the media.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::MediaPlayer::metaData.conductor
This property holds the conductor of the media.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::MediaPlayer::metaData.lyrics
This property holds the lyrics to the media.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::MediaPlayer::metaData.mood
This property holds the mood of the media.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::MediaPlayer::metaData.trackNumber
This property holds the track number of the media.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::MediaPlayer::metaData.trackCount
This property holds the number of tracks on the album containing the media.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::MediaPlayer::metaData.coverArtUrlSmall
This property holds the URL of a small cover art image.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::MediaPlayer::metaData.coverArtUrlLarge
This property holds the URL of a large cover art image.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::MediaPlayer::metaData.resolution
This property holds the dimension of an image or video.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::MediaPlayer::metaData.pixelAspectRatio
This property holds the pixel aspect ratio of an image or video.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::MediaPlayer::metaData.videoFrameRate
This property holds the frame rate of the media's video stream.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::MediaPlayer::metaData.videoBitRate
This property holds the bit rate of the media's video stream in bits per
second.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::MediaPlayer::metaData.videoCodec
This property holds the encoding of the media's video stream.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::MediaPlayer::metaData.posterUrl
This property holds the URL of a poster image.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::MediaPlayer::metaData.chapterNumber
This property holds the chapter number of the media.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::MediaPlayer::metaData.director
This property holds the director of the media.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::MediaPlayer::metaData.leadPerformer
This property holds the lead performer in the media.
\sa {QMediaMetaData}
*/
/*!
\qmlproperty variant QtMultimedia::MediaPlayer::metaData.writer
This property holds the writer of the media.
These properties hold the meta data for the current media.
\list
\li \c metaData.title - the title of the media.
\li \c metaData.subTitle - the sub-title of the media.
\li \c metaData.author - the author of the media.
\li \c metaData.comment - a user comment about the media.
\li \c metaData.description - a description of the media.
\li \c metaData.category - the category of the media.
\li \c metaData.genre - the genre of the media.
\li \c metaData.year - the year of release of the media.
\li \c metaData.date - the date of the media.
\li \c metaData.userRating - a user rating of the media in the range of 0 to 100.
\li \c metaData.keywords - a list of keywords describing the media.
\li \c metaData.language - the language of the media, as an ISO 639-2 code.
\li \c metaData.publisher - the publisher of the media.
\li \c metaData.copyright - the media's copyright notice.
\li \c metaData.parentalRating - the parental rating of the media.
\li \c metaData.ratingOrganization - the name of the rating organization responsible for the
parental rating of the media.
\li \c metaData.size - the size of the media in bytes.
\li \c metaData.mediaType - the type of the media.
\li \c metaData.audioBitRate - the bit rate of the media's audio stream in bits per second.
\li \c metaData.audioCodec - the encoding of the media audio stream.
\li \c metaData.averageLevel - the average volume level of the media.
\li \c metaData.channelCount - the number of channels in the media's audio stream.
\li \c metaData.peakValue - the peak volume of media's audio stream.
\li \c metaData.sampleRate - the sample rate of the media's audio stream in hertz.
\li \c metaData.albumTitle - the title of the album the media belongs to.
\li \c metaData.albumArtist - the name of the principal artist of the album the media
belongs to.
\li \c metaData.contributingArtist - the names of artists contributing to the media.
\li \c metaData.composer - the composer of the media.
\li \c metaData.conductor - the conductor of the media.
\li \c metaData.lyrics - the lyrics to the media.
\li \c metaData.mood - the mood of the media.
\li \c metaData.trackNumber - the track number of the media.
\li \c metaData.trackCount - the number of tracks on the album containing the media.
\li \c metaData.coverArtUrlSmall - the URL of a small cover art image.
\li \c metaData.coverArtUrlLarge - the URL of a large cover art image.
\li \c metaData.resolution - the dimension of an image or video.
\li \c metaData.pixelAspectRatio - the pixel aspect ratio of an image or video.
\li \c metaData.videoFrameRate - the frame rate of the media's video stream.
\li \c metaData.videoBitRate - the bit rate of the media's video stream in bits per second.
\li \c metaData.videoCodec - the encoding of the media's video stream.
\li \c metaData.posterUrl - the URL of a poster image.
\li \c metaData.chapterNumber - the chapter number of the media.
\li \c metaData.director - the director of the media.
\li \c metaData.leadPerformer - the lead performer in the media.
\li \c metaData.writer - the writer of the media.
\endlist
\sa {QMediaMetaData}
*/

View File

@@ -84,10 +84,6 @@ void QDeclarativeCamera::_q_availabilityChanged(QMultimedia::AvailabilityStatus
viewfinder you can use \l VideoOutput with the Camera set as the source.
\qml
import QtQuick 2.0
import QtMultimedia 5.4
Item {
width: 640
height: 360
@@ -778,7 +774,16 @@ void QDeclarativeCamera::setDigitalZoom(qreal value)
/*!
\qmlproperty variant QtMultimedia::Camera::mediaObject
This property holds the media object for the camera.
This property holds the native media object for the camera.
It can be used to get a pointer to a QCamera object in order to integrate with C++ code.
\code
QObject *qmlCamera; // The QML Camera object
QCamera *camera = qvariant_cast<QCamera *>(qmlCamera->property("mediaObject"));
\endcode
\note This property is not accessible from QML.
*/
/*!
@@ -866,116 +871,42 @@ void QDeclarativeCamera::setDigitalZoom(qreal value)
*/
/*!
\qmlpropertygroup QtMultimedia::Camera::metaData
\qmlproperty variant QtMultimedia::Camera::metaData.cameraManufacturer
This property holds the name of the manufacturer of the camera.
\sa {QMediaMetaData}
\since 5.4
*/
/*!
\qmlproperty variant QtMultimedia::Camera::metaData.cameraModel
This property holds the name of the model of the camera.
\sa {QMediaMetaData}
\since 5.4
*/
/*!
\qmlproperty variant QtMultimedia::Camera::metaData.event
This property holds the event during which the photo or video is to be captured.
\sa {QMediaMetaData}
\since 5.4
*/
/*!
\qmlproperty variant QtMultimedia::Camera::metaData.subject
This property holds the name of the subject of the capture or recording.
\sa {QMediaMetaData}
\since 5.4
*/
/*!
\qmlproperty variant QtMultimedia::Camera::metaData.orientation
This property holds the clockwise rotation of the camera at time of capture.
\sa {QMediaMetaData}
\since 5.4
*/
/*!
\qmlproperty variant QtMultimedia::Camera::metaData.dateTimeOriginal
This property holds the initial time at which the photo or video is
captured.
\sa {QMediaMetaData}
\since 5.4
*/
/*!
\qmlproperty variant QtMultimedia::Camera::metaData.gpsLatitude
\qmlproperty variant QtMultimedia::Camera::metaData.gpsLongitude
\qmlproperty variant QtMultimedia::Camera::metaData.gpsAltitude
These properties hold the geographic position in decimal degrees of the
camera at time of capture.
\sa {QMediaMetaData}
\since 5.4
*/
/*!
\qmlproperty variant QtMultimedia::Camera::metaData.gpsTimestamp
This property holds the timestamp of the GPS position data.
\sa {QMediaMetaData}
\since 5.4
*/
/*!
\qmlproperty variant QtMultimedia::Camera::metaData.gpsTrack
This property holds direction of movement of the camera at the time of
capture. It is measured in degrees clockwise from north.
\sa {QMediaMetaData}
\since 5.4
*/
/*!
\qmlproperty variant QtMultimedia::Camera::metaData.gpsSpeed
This property holds the velocity in kilometers per hour of the camera at
time of capture.
\sa {QMediaMetaData}
\since 5.4
*/
/*!
\qmlproperty variant QtMultimedia::Camera::metaData.gpsImgDirection
This property holds direction the camera is facing at the time of capture.
It is measured in degrees clockwise from north.
\sa {QMediaMetaData}
\since 5.4
*/
/*!
\qmlproperty variant QtMultimedia::Camera::metaData.gpsProcessingMethod
This property holds the name of the method for determining the GPS position
data.
These properties hold the meta data for the camera captures.
\list
\li \c metaData.cameraManufacturer holds the name of the manufacturer of the camera.
\li \c metaData.cameraModel holds the name of the model of the camera.
\li \c metaData.event holds the event during which the photo or video is to be captured.
\li \c metaData.subject holds the name of the subject of the capture or recording.
\li \c metaData.orientation holds the clockwise rotation of the camera at time of capture.
\li \c metaData.dateTimeOriginal holds the initial time at which the photo or video is captured.
\li \c metaData.gpsLatitude holds the latitude of the camera in decimal degrees at time of capture.
\li \c metaData.gpsLongitude holds the longitude of the camera in decimal degrees at time of capture.
\li \c metaData.gpsAltitude holds the altitude of the camera in meters at time of capture.
\li \c metaData.gpsTimestamp holds the timestamp of the GPS position data.
\li \c metaData.gpsTrack holds direction of movement of the camera at the time of
capture. It is measured in degrees clockwise from north.
\li \c metaData.gpsSpeed holds the velocity in kilometers per hour of the camera at time of capture.
\li \c metaData.gpsImgDirection holds direction the camera is facing at the time of capture.
It is measured in degrees clockwise from north.
\li \c metaData.gpsProcessingMethod holds the name of the method for determining the GPS position.
\endlist
\sa {QMediaMetaData}
\since 5.4

View File

@@ -57,9 +57,6 @@ QT_BEGIN_NAMESPACE
and cannot be created directly.
\qml
import QtQuick 2.0
import QtMultimedia 5.0
Item {
width: 640
height: 360

View File

@@ -44,8 +44,6 @@ QT_BEGIN_NAMESPACE
\ingroup camera_qml
\inqmlmodule QtMultimedia
This type is part of the \b{QtMultimedia 5.0} module.
CameraExposure allows you to adjust exposure related settings
like aperture and shutter speed, metering and ISO speed.
@@ -53,8 +51,6 @@ QT_BEGIN_NAMESPACE
\c exposure property of the a \l Camera should be used.
\qml
import QtQuick 2.0
import QtMultimedia 5.0
Camera {
id: camera

View File

@@ -44,8 +44,6 @@ QT_BEGIN_NAMESPACE
\ingroup multimedia_qml
\ingroup camera_qml
CameraFlash is part of the \b{QtMultimedia 5.0} module.
This type allows you to operate the camera flash
hardware and control the flash mode used. Not all cameras have
flash hardware (and in some cases it is shared with the
@@ -55,8 +53,6 @@ QT_BEGIN_NAMESPACE
\c flash property of a \l Camera should be used.
\qml
import QtQuick 2.0
import QtMultimedia 5.0
Camera {
id: camera
@@ -81,11 +77,7 @@ QDeclarativeCameraFlash::QDeclarativeCameraFlash(QCamera *camera, QObject *paren
QDeclarativeCameraFlash::~QDeclarativeCameraFlash()
{
}
/*!
\property bool QDeclarativeCameraFlash::ready
This property indicates whether the flash is charged.
*/
/*!
\qmlproperty bool QtMultimedia::CameraFlash::ready
@@ -95,11 +87,7 @@ bool QDeclarativeCameraFlash::isFlashReady() const
{
return m_exposure->isFlashReady();
}
/*!
\property QDeclarativeCameraFlash::mode
This property holds the camera flash mode. The mode can be one of the constants in \l QCameraExposure::FlashMode.
*/
/*!
\qmlproperty enumeration QtMultimedia::CameraFlash::mode

View File

@@ -44,8 +44,6 @@ QT_BEGIN_NAMESPACE
\ingroup multimedia_qml
\ingroup camera_qml
CameraFocus is part of the \b{QtMultimedia 5.0} module.
This type allows control over manual and automatic
focus settings, including information about any parts of the
camera frame that are selected for autofocusing.
@@ -54,8 +52,6 @@ QT_BEGIN_NAMESPACE
\c focus property of a \l Camera should be used.
\qml
import QtQuick 2.0
import QtMultimedia 5.0
Item {
width: 640

View File

@@ -52,8 +52,6 @@ QT_BEGIN_NAMESPACE
\c imageProcessing property of a \l Camera should be used.
\qml
import QtQuick 2.0
import QtMultimedia 5.0
Camera {
id: camera
@@ -220,7 +218,7 @@ void QDeclarativeCameraImageProcessing::setDenoisingLevel(qreal value)
}
/*!
\qmlproperty QtMultimedia::CameraImageProcessing::colorFilter
\qmlproperty enumeration QtMultimedia::CameraImageProcessing::colorFilter
This property holds which color filter if any will be applied to image data captured by the camera.

View File

@@ -51,9 +51,6 @@ It is not instantiable; to use it, call the members of the global \c QtMultimedi
For example:
\qml
import QtQuick 2.0
import QtMultimedia 5.4
Camera {
deviceId: QtMultimedia.defaultCamera.deviceId
}
@@ -121,9 +118,6 @@ Camera {
the active camera by selecting one of the items in the list.
\qml
import QtQuick 2.0
import QtMultimedia 5.4
Item {
Camera {

View File

@@ -45,12 +45,7 @@ QT_BEGIN_NAMESPACE
\ingroup multimedia_radio_qml
\inherits Item
Radio is part of the \b{QtMultimedia 5.0} module.
\qml
import QtQuick 2.0
import QtMultimedia 5.0
Rectangle {
width: 320
height: 480

View File

@@ -44,17 +44,12 @@ QT_BEGIN_NAMESPACE
\ingroup multimedia_radio_qml
\inherits Item
This type is part of the \b{QtMultimedia 5.0} module.
\c RadioData is your gateway to all the data available through RDS. RDS is the Radio Data System
which allows radio stations to broadcast information like the \l stationId, \l programType, \l programTypeName,
\l stationName, and \l radioText. This information can be read from the \c RadioData. It also allows
you to set whether the radio should tune to alternative frequencies if the current signal strength falls too much.
\qml
import QtQuick 2.0
import QtMultimedia 5.0
Rectangle {
width: 480
height: 320

View File

@@ -46,16 +46,12 @@ QT_BEGIN_NAMESPACE
\ingroup multimedia_qml
\c Torch is part of the \b{QtMultimedia 5.0} module.
In many cases the torch hardware is shared with camera flash functionality,
and might be automatically controlled by the device. You have control over
the power level (of course, higher power levels are brighter but reduce
battery life significantly).
\qml
import QtQuick 2.0
import QtMultimedia 5.0
Torch {
power: 75 // 75% of full power

View File

@@ -83,8 +83,6 @@ QT_BEGIN_NAMESPACE
\ingroup multimedia_audio_qml
\inqmlmodule QtMultimedia
SoundEffect is part of the \b{QtMultimedia 5.0} module.
This type allows you to play uncompressed audio files (typically WAV files) in
a generally lower latency way, and is suitable for "feedback" type sounds in
response to user actions (e.g. virtual keyboard sounds, positive or negative

View File

@@ -72,6 +72,9 @@ static bool qt_sizeLessThan(const QSize &s1, const QSize &s2)
static bool qt_frameRateRangeLessThan(const QCamera::FrameRateRange &s1, const QCamera::FrameRateRange &s2)
{
if (s1.maximumFrameRate == s2.maximumFrameRate)
return s1.minimumFrameRate < s2.minimumFrameRate;
return s1.maximumFrameRate < s2.maximumFrameRate;
}
@@ -658,7 +661,7 @@ QList<QCameraViewfinderSettings> QCamera::supportedViewfinderSettings(const QCam
&& (qFuzzyIsNull(settings.minimumFrameRate()) || qFuzzyCompare((float)settings.minimumFrameRate(), (float)s.minimumFrameRate()))
&& (qFuzzyIsNull(settings.maximumFrameRate()) || qFuzzyCompare((float)settings.maximumFrameRate(), (float)s.maximumFrameRate()))
&& (settings.pixelFormat() == QVideoFrame::Format_Invalid || settings.pixelFormat() == s.pixelFormat())
&& (settings.pixelAspectRatio() == QSize(1, 1) || settings.pixelAspectRatio() == s.pixelAspectRatio())) {
&& (settings.pixelAspectRatio().isEmpty() || settings.pixelAspectRatio() == s.pixelAspectRatio())) {
results.append(s);
}
}
@@ -1042,6 +1045,19 @@ void QCamera::unlock()
\sa QCamera::supportedViewfinderFrameRateRanges(), QCameraViewfinderSettings
*/
/*!
\fn QCamera::FrameRateRange::FrameRateRange()
Constructs a null frame rate range, with both minimumFrameRate and maximumFrameRate
equal to \c 0.0.
*/
/*!
\fn QCamera::FrameRateRange::FrameRateRange(qreal minimum, qreal maximum)
Constructs a frame rate range with the given \a minimum and \a maximum frame rates.
*/
/*!
\variable QCamera::FrameRateRange::minimumFrameRate
The minimum frame rate supported by the range, in frames per second.

View File

@@ -319,7 +319,7 @@ void QCameraImageProcessing::setDenoisingLevel(qreal level)
*/
/*!
\enum QCameraImageProcessing::Filter
\enum QCameraImageProcessing::ColorFilter
\value ColorFilterNone No filter is applied to images.
\value ColorFilterGrayscale A grayscale filter.

View File

@@ -50,8 +50,7 @@ public:
isNull(true),
minimumFrameRate(0.0),
maximumFrameRate(0.0),
pixelFormat(QVideoFrame::Format_Invalid),
pixelAspectRatio(1, 1)
pixelFormat(QVideoFrame::Format_Invalid)
{
}
@@ -135,6 +134,18 @@ QCameraViewfinderSettings &QCameraViewfinderSettings::operator=(const QCameraVie
return *this;
}
/*! \fn QCameraViewfinderSettings &QCameraViewfinderSettings::operator=(QCameraViewfinderSettings &&other)
Moves \a other to this viewfinder settings object and returns a reference to this object.
*/
/*!
\fn void QCameraViewfinderSettings::swap(QCameraViewfinderSettings &other)
Swaps this viewfinder settings object with \a other. This
function is very fast and never fails.
*/
/*!
\relates QCameraViewfinderSettings
\since 5.5

View File

@@ -31,10 +31,10 @@
**
****************************************************************************/
//! [complete snippet]
import QtQuick 2.0
import QtMultimedia 5.0
import QtMultimedia 5.5
//! [complete snippet]
Text {
text: "Click Me!";
font.pointSize: 24;

View File

@@ -102,9 +102,6 @@ In QML, use the \l{QtMultimedia::QtMultimedia::availableCameras}{QtMultimedia.av
property:
\qml
import QtQuick 2.0
import QtMultimedia 5.4
Item {
property bool isCameraAvailable: QtMultimedia.availableCameras.length > 0
}
@@ -163,9 +160,6 @@ In QML, you can use \l Camera and \l VideoOutput together to show a
simple viewfinder:
\qml
import QtQuick 2.0
import QtMultimedia 5.4
VideoOutput {
source: camera

View File

@@ -177,7 +177,7 @@ what changed, and what you might need to change when porting code.
\section2 QML Types
The QML types are accessed by using:
\code
import QtMultimedia 5.4
import QtMultimedia 5.5
\endcode
\annotatedlist multimedia_qml
The following types are accessed by using \l{Qt Audio Engine QML Types}{Qt Audio Engine}:

View File

@@ -54,10 +54,6 @@
\qml
import QtQuick 2.0
import QtMultimedia 5.0
// ...
Item {
width: 640
height: 360
@@ -196,9 +192,6 @@
\qml
import QtQuick 2.0
import QtMultimedia 5.0
Item {
width: 640
height: 360

View File

@@ -46,7 +46,7 @@
import statement in your \c {.qml} file.
\code
import QtMultimedia 5.4
import QtMultimedia 5.5
\endcode
If you intend to use the C++ classes in your application, include the C++
@@ -104,12 +104,18 @@
\row
\li QAudioOutput
\li Sends audio data to an audio output device
\row
\li QAudioRecorder
\li Record media content from an audio source.
\row
\li QCamera
\li Access camera viewfinder.
\row
\li QCameraImageCapture
\li Record media content. Intended to be used with QCamera to record media.
\li Capture still images with a camera.
\row
\li QMediaRecorder
\li Record media content from a camera or radio tuner source.
\row
\li QMediaPlayer
\li Playback media from a source.
@@ -120,8 +126,8 @@
\li QRadioTuner
\li Access radio device.
\row
\li QVideoRendererControl
\li Control video data.
\li QAbstractVideoSurface
\li Base class for video presentation.
\endtable
\section1 Related Information

View File

@@ -26,7 +26,7 @@
****************************************************************************/
/*!
\qmlmodule QtMultimedia 5.4
\qmlmodule QtMultimedia 5.5
\title Qt Multimedia QML Types
\ingroup qmlmodules
\brief Provides QML types for multimedia support.
@@ -42,7 +42,7 @@ The QML types for \l{Qt Multimedia} support the basic use cases such as:
The QML types can be imported into your application using the following import
statement in your .qml file:
\code
import QtMultimedia 5.4
import QtMultimedia 5.5
\endcode
\section1 QML types

View File

@@ -0,0 +1,120 @@
/****************************************************************************
**
** Copyright (C) 2015 The Qt Company Ltd.
** Contact: http://www.qt.io/licensing/
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL21$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and The Qt Company. For licensing terms
** and conditions see http://www.qt.io/terms-conditions. For further
** information use the contact form at http://www.qt.io/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 or version 3 as published by the Free
** Software Foundation and appearing in the file LICENSE.LGPLv21 and
** LICENSE.LGPLv3 included in the packaging of this file. Please review the
** following information to ensure the GNU Lesser General Public License
** requirements will be met: https://www.gnu.org/licenses/lgpl.html and
** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** As a special exception, The Qt Company gives you certain additional
** rights. These rights are described in The Qt Company LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** $QT_END_LICENSE$
**
****************************************************************************/
#ifndef QGSTREAMERVIDEOOVERLAY_P_H
#define QGSTREAMERVIDEOOVERLAY_P_H
#include <private/qgstreamerbushelper_p.h>
#include <private/qgstreamerbufferprobe_p.h>
#include <QtGui/qwindowdefs.h>
#include <QtCore/qsize.h>
QT_BEGIN_NAMESPACE
class QGstreamerVideoOverlay
: public QObject
, public QGstreamerSyncMessageFilter
, public QGstreamerBusMessageFilter
, private QGstreamerBufferProbe
{
Q_OBJECT
Q_INTERFACES(QGstreamerSyncMessageFilter QGstreamerBusMessageFilter)
public:
explicit QGstreamerVideoOverlay(QObject *parent = 0, const QByteArray &elementName = QByteArray());
virtual ~QGstreamerVideoOverlay();
GstElement *videoSink() const;
QSize nativeVideoSize() const;
void setWindowHandle(WId id);
void expose();
void setRenderRectangle(const QRect &rect);
bool isActive() const;
Qt::AspectRatioMode aspectRatioMode() const;
void setAspectRatioMode(Qt::AspectRatioMode mode);
int brightness() const;
void setBrightness(int brightness);
int contrast() const;
void setContrast(int contrast);
int hue() const;
void setHue(int hue);
int saturation() const;
void setSaturation(int saturation);
bool processSyncMessage(const QGstreamerMessage &message);
bool processBusMessage(const QGstreamerMessage &message);
Q_SIGNALS:
void nativeVideoSizeChanged();
void activeChanged();
void brightnessChanged(int brightness);
void contrastChanged(int contrast);
void hueChanged(int hue);
void saturationChanged(int saturation);
private:
GstElement *findBestVideoSink() const;
void setWindowHandle_helper(WId id);
void updateIsActive();
void probeCaps(GstCaps *caps);
static void showPrerollFrameChanged(GObject *, GParamSpec *, QGstreamerVideoOverlay *);
GstElement *m_videoSink;
QSize m_nativeVideoSize;
bool m_isActive;
bool m_hasForceAspectRatio;
bool m_hasBrightness;
bool m_hasContrast;
bool m_hasHue;
bool m_hasSaturation;
bool m_hasShowPrerollFrame;
WId m_windowId;
Qt::AspectRatioMode m_aspectRatioMode;
int m_brightness;
int m_contrast;
int m_hue;
int m_saturation;
};
QT_END_NAMESPACE
#endif // QGSTREAMERVIDEOOVERLAY_P_H

View File

@@ -38,6 +38,7 @@
#include "qgstreamervideorendererinterface_p.h"
#include <private/qgstreamerbushelper_p.h>
#include <private/qgstreamervideooverlay_p.h>
QT_BEGIN_NAMESPACE
@@ -52,13 +53,15 @@ class QGstreamerVideoWidgetControl
Q_OBJECT
Q_INTERFACES(QGstreamerVideoRendererInterface QGstreamerSyncMessageFilter QGstreamerBusMessageFilter)
public:
QGstreamerVideoWidgetControl(QObject *parent = 0);
explicit QGstreamerVideoWidgetControl(QObject *parent = 0, const QByteArray &elementName = QByteArray());
virtual ~QGstreamerVideoWidgetControl();
GstElement *videoSink();
QWidget *videoWidget();
void stopRenderer();
Qt::AspectRatioMode aspectRatioMode() const;
void setAspectRatioMode(Qt::AspectRatioMode mode);
@@ -77,27 +80,27 @@ public:
int saturation() const;
void setSaturation(int saturation);
void setOverlay();
bool eventFilter(QObject *object, QEvent *event);
bool processSyncMessage(const QGstreamerMessage &message);
bool processBusMessage(const QGstreamerMessage &message);
public slots:
void updateNativeVideoSize();
signals:
void sinkChanged();
void readyChanged(bool);
private Q_SLOTS:
void onOverlayActiveChanged();
void onNativeVideoSizeChanged();
private:
void createVideoWidget();
void windowExposed();
void updateWidgetAttributes();
GstElement *m_videoSink;
bool processSyncMessage(const QGstreamerMessage &message);
bool processBusMessage(const QGstreamerMessage &message);
QGstreamerVideoOverlay m_videoOverlay;
QGstreamerVideoWidget *m_widget;
bool m_stopped;
WId m_windowId;
Qt::AspectRatioMode m_aspectRatioMode;
bool m_fullScreen;
};

View File

@@ -38,23 +38,22 @@
#include "qgstreamervideorendererinterface_p.h"
#include <private/qgstreamerbushelper_p.h>
#include <private/qgstreamerbufferprobe_p.h>
#include <private/qgstreamervideooverlay_p.h>
#include <QtGui/qcolor.h>
QT_BEGIN_NAMESPACE
class QAbstractVideoSurface;
class QGstreamerVideoWindow : public QVideoWindowControl,
class QGstreamerVideoWindow :
public QVideoWindowControl,
public QGstreamerVideoRendererInterface,
public QGstreamerSyncMessageFilter,
private QGstreamerBufferProbe
public QGstreamerBusMessageFilter
{
Q_OBJECT
Q_INTERFACES(QGstreamerVideoRendererInterface QGstreamerSyncMessageFilter)
Q_PROPERTY(QColor colorKey READ colorKey WRITE setColorKey)
Q_PROPERTY(bool autopaintColorKey READ autopaintColorKey WRITE setAutopaintColorKey)
Q_INTERFACES(QGstreamerVideoRendererInterface QGstreamerSyncMessageFilter QGstreamerBusMessageFilter)
public:
QGstreamerVideoWindow(QObject *parent = 0, const char *elementName = 0);
explicit QGstreamerVideoWindow(QObject *parent = 0, const QByteArray &elementName = QByteArray());
~QGstreamerVideoWindow();
WId winId() const;
@@ -71,12 +70,6 @@ public:
Qt::AspectRatioMode aspectRatioMode() const;
void setAspectRatioMode(Qt::AspectRatioMode mode);
QColor colorKey() const;
void setColorKey(const QColor &);
bool autopaintColorKey() const;
void setAutopaintColorKey(bool);
void repaint();
int brightness() const;
@@ -96,24 +89,18 @@ public:
GstElement *videoSink();
bool processSyncMessage(const QGstreamerMessage &message);
bool processBusMessage(const QGstreamerMessage &message);
bool isReady() const { return m_windowId != 0; }
signals:
void sinkChanged();
void readyChanged(bool);
private slots:
void updateNativeVideoSize(const QSize &size);
private:
void probeCaps(GstCaps *caps);
GstElement *m_videoSink;
QGstreamerVideoOverlay m_videoOverlay;
WId m_windowId;
Qt::AspectRatioMode m_aspectRatioMode;
QRect m_displayRect;
bool m_fullScreen;
QSize m_nativeSize;
mutable QColor m_colorKey;
};

View File

@@ -61,11 +61,13 @@
# define QT_GSTREAMER_CAMERABIN_ELEMENT_NAME "camerabin"
# define QT_GSTREAMER_COLORCONVERSION_ELEMENT_NAME "videoconvert"
# define QT_GSTREAMER_RAW_AUDIO_MIME "audio/x-raw"
# define QT_GSTREAMER_VIDEOOVERLAY_INTERFACE_NAME "GstVideoOverlay"
#else
# define QT_GSTREAMER_PLAYBIN_ELEMENT_NAME "playbin2"
# define QT_GSTREAMER_CAMERABIN_ELEMENT_NAME "camerabin2"
# define QT_GSTREAMER_COLORCONVERSION_ELEMENT_NAME "ffmpegcolorspace"
# define QT_GSTREAMER_RAW_AUDIO_MIME "audio/x-raw-int"
# define QT_GSTREAMER_VIDEOOVERLAY_INTERFACE_NAME "GstXOverlay"
#endif
QT_BEGIN_NAMESPACE
@@ -147,6 +149,7 @@ GstStructure *qt_gst_structure_new_empty(const char *name);
gboolean qt_gst_element_query_position(GstElement *element, GstFormat format, gint64 *cur);
gboolean qt_gst_element_query_duration(GstElement *element, GstFormat format, gint64 *cur);
GstCaps *qt_gst_caps_normalize(GstCaps *caps);
const gchar *qt_gst_element_get_factory_name(GstElement *element);
QDebug operator <<(QDebug debug, GstCaps *caps);

View File

@@ -468,6 +468,7 @@ void QMediaPlayerPrivate::disconnectPlaylist()
QObject::disconnect(playlist, SIGNAL(currentMediaChanged(QMediaContent)),
q, SLOT(_q_updateMedia(QMediaContent)));
QObject::disconnect(playlist, SIGNAL(destroyed()), q, SLOT(_q_playlistDestroyed()));
q->unbind(playlist);
}
}
@@ -475,6 +476,7 @@ void QMediaPlayerPrivate::connectPlaylist()
{
Q_Q(QMediaPlayer);
if (playlist) {
q->bind(playlist);
QObject::connect(playlist, SIGNAL(currentMediaChanged(QMediaContent)),
q, SLOT(_q_updateMedia(QMediaContent)));
QObject::connect(playlist, SIGNAL(destroyed()), q, SLOT(_q_playlistDestroyed()));
@@ -622,6 +624,8 @@ QMediaPlayer::~QMediaPlayer()
{
Q_D(QMediaPlayer);
d->disconnectPlaylist();
if (d->service) {
if (d->control)
d->service->releaseControl(d->control);

View File

@@ -106,7 +106,7 @@ Q_CONSTRUCTOR_FUNCTION(qRegisterMediaPlaylistMetaTypes)
/*!
Create a new playlist object for with the given \a parent.
Create a new playlist object with the given \a parent.
*/
QMediaPlaylist::QMediaPlaylist(QObject *parent)
@@ -214,8 +214,10 @@ bool QMediaPlaylist::setMediaObject(QMediaObject *mediaObject)
connect(d->control, SIGNAL(currentMediaChanged(QMediaContent)),
this, SIGNAL(currentMediaChanged(QMediaContent)));
if (oldSize)
if (oldSize) {
emit mediaAboutToBeRemoved(0, oldSize-1);
emit mediaRemoved(0, oldSize-1);
}
if (playlist->mediaCount()) {
emit mediaAboutToBeInserted(0,playlist->mediaCount()-1);
@@ -302,7 +304,7 @@ int QMediaPlaylist::mediaCount() const
}
/*!
Returns true if the playlist contains no items; otherwise returns false.
Returns true if the playlist contains no items, otherwise returns false.
\sa mediaCount()
*/
@@ -312,7 +314,7 @@ bool QMediaPlaylist::isEmpty() const
}
/*!
Returns true if the playlist can be modified; otherwise returns false.
Returns true if the playlist can be modified, otherwise returns false.
\sa mediaCount()
*/
@@ -333,7 +335,7 @@ QMediaContent QMediaPlaylist::media(int index) const
/*!
Append the media \a content to the playlist.
Returns true if the operation is successful, otherwise return false.
Returns true if the operation is successful, otherwise returns false.
*/
bool QMediaPlaylist::addMedia(const QMediaContent &content)
{
@@ -343,7 +345,7 @@ bool QMediaPlaylist::addMedia(const QMediaContent &content)
/*!
Append multiple media content \a items to the playlist.
Returns true if the operation is successful, otherwise return false.
Returns true if the operation is successful, otherwise returns false.
*/
bool QMediaPlaylist::addMedia(const QList<QMediaContent> &items)
{
@@ -353,23 +355,25 @@ bool QMediaPlaylist::addMedia(const QList<QMediaContent> &items)
/*!
Insert the media \a content to the playlist at position \a pos.
Returns true if the operation is successful, otherwise false.
Returns true if the operation is successful, otherwise returns false.
*/
bool QMediaPlaylist::insertMedia(int pos, const QMediaContent &content)
{
return d_func()->playlist()->insertMedia(pos, content);
QMediaPlaylistProvider *playlist = d_func()->playlist();
return playlist->insertMedia(qBound(0, pos, playlist->mediaCount()), content);
}
/*!
Insert multiple media content \a items to the playlist at position \a pos.
Returns true if the operation is successful, otherwise false.
Returns true if the operation is successful, otherwise returns false.
*/
bool QMediaPlaylist::insertMedia(int pos, const QList<QMediaContent> &items)
{
return d_func()->playlist()->insertMedia(pos, items);
QMediaPlaylistProvider *playlist = d_func()->playlist();
return playlist->insertMedia(qBound(0, pos, playlist->mediaCount()), items);
}
/*!
@@ -379,8 +383,11 @@ bool QMediaPlaylist::insertMedia(int pos, const QList<QMediaContent> &items)
*/
bool QMediaPlaylist::removeMedia(int pos)
{
Q_D(QMediaPlaylist);
return d->playlist()->removeMedia(pos);
QMediaPlaylistProvider *playlist = d_func()->playlist();
if (pos >= 0 && pos < playlist->mediaCount())
return playlist->removeMedia(pos);
else
return false;
}
/*!
@@ -390,8 +397,13 @@ bool QMediaPlaylist::removeMedia(int pos)
*/
bool QMediaPlaylist::removeMedia(int start, int end)
{
Q_D(QMediaPlaylist);
return d->playlist()->removeMedia(start, end);
QMediaPlaylistProvider *playlist = d_func()->playlist();
start = qMax(0, start);
end = qMin(end, playlist->mediaCount() - 1);
if (start <= end)
return playlist->removeMedia(start, end);
else
return false;
}
/*!

View File

@@ -53,7 +53,6 @@ public:
virtual QMediaControl* requestControl(const char *name) = 0;
#ifndef QT_NO_MEMBER_TEMPLATES
template <typename T> inline T requestControl() {
if (QMediaControl *control = requestControl(qmediacontrol_iid<T>())) {
if (T typedControl = qobject_cast<T>(control))
@@ -62,7 +61,6 @@ public:
}
return 0;
}
#endif
virtual void releaseControl(QMediaControl *control) = 0;

View File

@@ -359,19 +359,14 @@ public:
}
break;
case QMediaServiceProviderHint::Device: {
plugin = plugins[0];
foreach (QMediaServiceProviderPlugin *currentPlugin, plugins) {
QMediaServiceSupportedDevicesInterface *iface =
qobject_cast<QMediaServiceSupportedDevicesInterface*>(currentPlugin);
if (!iface) {
// the plugin may support the device,
// but this choice still can be overridden
if (iface && iface->devices(type).contains(hint.device())) {
plugin = currentPlugin;
} else {
if (iface->devices(type).contains(hint.device())) {
plugin = currentPlugin;
break;
}
break;
}
}
}
@@ -887,14 +882,6 @@ QMediaServiceProvider *QMediaServiceProvider::defaultServiceProvider()
Destroys a media service supported devices interface.
*/
/*!
\since 5.3
\fn QByteArray QMediaServiceSupportedDevicesInterface::defaultDevice(const QByteArray &service) const
Returns the default device for a \a service type.
*/
/*!
\fn QList<QByteArray> QMediaServiceSupportedDevicesInterface::devices(const QByteArray &service) const
@@ -907,6 +894,29 @@ QMediaServiceProvider *QMediaServiceProvider::defaultServiceProvider()
Returns the description of a \a device available for a \a service type.
*/
/*!
\class QMediaServiceDefaultDeviceInterface
\inmodule QtMultimedia
\brief The QMediaServiceDefaultDeviceInterface class interface
identifies the default device used by a media service plug-in.
A QMediaServiceProviderPlugin may implement this interface.
\since 5.3
*/
/*!
\fn QMediaServiceDefaultDeviceInterface::~QMediaServiceDefaultDeviceInterface()
Destroys a media service default device interface.
*/
/*!
\fn QByteArray QMediaServiceDefaultDeviceInterface::defaultDevice(const QByteArray &service) const
Returns the default device for a \a service type.
*/
/*!
\class QMediaServiceCameraInfoInterface
\inmodule QtMultimedia
@@ -918,6 +928,12 @@ QMediaServiceProvider *QMediaServiceProvider::defaultServiceProvider()
implement the QMediaServiceSupportedDevicesInterface.
*/
/*!
\fn QMediaServiceCameraInfoInterface::~QMediaServiceCameraInfoInterface()
Destroys a media service camera info interface.
*/
/*!
\fn QMediaServiceCameraInfoInterface::cameraPosition(const QByteArray &device) const

View File

@@ -276,10 +276,13 @@ QAbstractVideoFilter::~QAbstractVideoFilter()
}
/*!
\return \c true if the filter is active.
\property QAbstractVideoFilter::active
\brief the active status of the filter.
By default filters are active. When set to \c false, the filter will be
ignored by the VideoOutput type.
This is true if the filter is active, false otherwise.
By default filters are active. When set to \c false, the filter will be
ignored by the VideoOutput type.
*/
bool QAbstractVideoFilter::isActive() const
{
@@ -287,9 +290,6 @@ bool QAbstractVideoFilter::isActive() const
return d->active;
}
/*!
\internal
*/
void QAbstractVideoFilter::setActive(bool v)
{
Q_D(QAbstractVideoFilter);

View File

@@ -38,63 +38,87 @@ import android.graphics.ImageFormat;
import android.graphics.SurfaceTexture;
import android.util.Log;
import java.lang.Math;
import java.util.concurrent.locks.ReentrantLock;
public class QtCameraListener implements Camera.ShutterCallback,
Camera.PictureCallback,
Camera.AutoFocusCallback,
Camera.PreviewCallback
{
private int m_cameraId = -1;
private byte[][] m_cameraPreviewBuffer = null;
private volatile int m_actualPreviewBuffer = 0;
private final ReentrantLock m_buffersLock = new ReentrantLock();
private boolean m_fetchEachFrame = false;
private static final String TAG = "Qt Camera";
private static final int BUFFER_POOL_SIZE = 2;
private int m_cameraId = -1;
private boolean m_notifyNewFrames = false;
private byte[][] m_previewBuffers = null;
private byte[] m_lastPreviewBuffer = null;
private Camera.Size m_previewSize = null;
private QtCameraListener(int id)
{
m_cameraId = id;
}
public void preparePreviewBuffer(Camera camera)
public void notifyNewFrames(boolean notify)
{
Camera.Size previewSize = camera.getParameters().getPreviewSize();
double bytesPerPixel = ImageFormat.getBitsPerPixel(camera.getParameters().getPreviewFormat()) / 8.0;
int bufferSizeNeeded = (int)Math.ceil(bytesPerPixel*previewSize.width*previewSize.height);
m_buffersLock.lock();
if (m_cameraPreviewBuffer == null || m_cameraPreviewBuffer[0].length < bufferSizeNeeded)
m_cameraPreviewBuffer = new byte[2][bufferSizeNeeded];
m_buffersLock.unlock();
m_notifyNewFrames = notify;
}
public void fetchEachFrame(boolean fetch)
public byte[] lastPreviewBuffer()
{
m_fetchEachFrame = fetch;
return m_lastPreviewBuffer;
}
public byte[] lockAndFetchPreviewBuffer()
public int previewWidth()
{
//This method should always be followed by unlockPreviewBuffer()
//This method is not just a getter. It also marks last preview as already seen one.
//We should reset actualBuffer flag here to make sure we will not use old preview with future captures
byte[] result = null;
m_buffersLock.lock();
result = m_cameraPreviewBuffer[(m_actualPreviewBuffer == 1) ? 0 : 1];
m_actualPreviewBuffer = 0;
return result;
if (m_previewSize == null)
return -1;
return m_previewSize.width;
}
public void unlockPreviewBuffer()
public int previewHeight()
{
if (m_buffersLock.isHeldByCurrentThread())
m_buffersLock.unlock();
if (m_previewSize == null)
return -1;
return m_previewSize.height;
}
public byte[] callbackBuffer()
public void setupPreviewCallback(Camera camera)
{
return m_cameraPreviewBuffer[(m_actualPreviewBuffer == 1) ? 1 : 0];
// Clear previous callback (also clears added buffers)
m_lastPreviewBuffer = null;
camera.setPreviewCallbackWithBuffer(null);
final Camera.Parameters params = camera.getParameters();
m_previewSize = params.getPreviewSize();
double bytesPerPixel = ImageFormat.getBitsPerPixel(params.getPreviewFormat()) / 8.0;
int bufferSizeNeeded = (int) Math.ceil(bytesPerPixel * m_previewSize.width * m_previewSize.height);
// We could keep the same buffers when they are already bigger than the required size
// but the Android doc says the size must match, so in doubt just replace them.
if (m_previewBuffers == null || m_previewBuffers[0].length != bufferSizeNeeded)
m_previewBuffers = new byte[BUFFER_POOL_SIZE][bufferSizeNeeded];
// Add callback and queue all buffers
camera.setPreviewCallbackWithBuffer(this);
for (byte[] buffer : m_previewBuffers)
camera.addCallbackBuffer(buffer);
}
@Override
public void onPreviewFrame(byte[] data, Camera camera)
{
// Re-enqueue the last buffer
if (m_lastPreviewBuffer != null)
camera.addCallbackBuffer(m_lastPreviewBuffer);
m_lastPreviewBuffer = data;
if (data != null && m_notifyNewFrames)
notifyNewPreviewFrame(m_cameraId, data, m_previewSize.width, m_previewSize.height);
}
@Override
@@ -109,24 +133,6 @@ public class QtCameraListener implements Camera.ShutterCallback,
notifyPictureCaptured(m_cameraId, data);
}
@Override
public void onPreviewFrame(byte[] data, Camera camera)
{
m_buffersLock.lock();
if (data != null && m_fetchEachFrame)
notifyFrameFetched(m_cameraId, data);
if (data == m_cameraPreviewBuffer[0])
m_actualPreviewBuffer = 1;
else if (data == m_cameraPreviewBuffer[1])
m_actualPreviewBuffer = 2;
else
m_actualPreviewBuffer = 0;
camera.addCallbackBuffer(m_cameraPreviewBuffer[(m_actualPreviewBuffer == 1) ? 1 : 0]);
m_buffersLock.unlock();
}
@Override
public void onAutoFocus(boolean success, Camera camera)
{
@@ -136,5 +142,5 @@ public class QtCameraListener implements Camera.ShutterCallback,
private static native void notifyAutoFocusComplete(int id, boolean success);
private static native void notifyPictureExposed(int id);
private static native void notifyPictureCaptured(int id, byte[] data);
private static native void notifyFrameFetched(int id, byte[] data);
private static native void notifyNewPreviewFrame(int id, byte[] data, int width, int height);
}

View File

@@ -206,9 +206,10 @@ bool QAndroidCameraSession::open()
if (m_camera) {
connect(m_camera, SIGNAL(pictureExposed()), this, SLOT(onCameraPictureExposed()));
connect(m_camera, SIGNAL(previewFetched(QByteArray)), this, SLOT(onCameraPreviewFetched(QByteArray)));
connect(m_camera, SIGNAL(frameFetched(QByteArray)),
this, SLOT(onCameraFrameFetched(QByteArray)),
connect(m_camera, SIGNAL(lastPreviewFrameFetched(QByteArray,int,int)),
this, SLOT(onLastPreviewFrameFetched(QByteArray,int,int)));
connect(m_camera, SIGNAL(newPreviewFrame(QByteArray,int,int)),
this, SLOT(onNewPreviewFrame(QByteArray,int,int)),
Qt::DirectConnection);
connect(m_camera, SIGNAL(pictureCaptured(QByteArray)), this, SLOT(onCameraPictureCaptured(QByteArray)));
connect(m_camera, SIGNAL(previewStarted()), this, SLOT(onCameraPreviewStarted()));
@@ -221,7 +222,7 @@ bool QAndroidCameraSession::open()
if (m_camera->getPreviewFormat() != AndroidCamera::NV21)
m_camera->setPreviewFormat(AndroidCamera::NV21);
m_camera->fetchEachFrame(m_videoProbes.count());
m_camera->notifyNewFrames(m_videoProbes.count());
emit opened();
} else {
@@ -410,7 +411,7 @@ void QAndroidCameraSession::addProbe(QAndroidMediaVideoProbeControl *probe)
if (probe)
m_videoProbes << probe;
if (m_camera)
m_camera->fetchEachFrame(m_videoProbes.count());
m_camera->notifyNewFrames(m_videoProbes.count());
m_videoProbesMutex.unlock();
}
@@ -419,7 +420,7 @@ void QAndroidCameraSession::removeProbe(QAndroidMediaVideoProbeControl *probe)
m_videoProbesMutex.lock();
m_videoProbes.remove(probe);
if (m_camera)
m_camera->fetchEachFrame(m_videoProbes.count());
m_camera->notifyNewFrames(m_videoProbes.count());
m_videoProbesMutex.unlock();
}
@@ -562,25 +563,54 @@ void QAndroidCameraSession::onCameraPictureExposed()
m_camera->fetchLastPreviewFrame();
}
void QAndroidCameraSession::onCameraPreviewFetched(const QByteArray &preview)
void QAndroidCameraSession::onLastPreviewFrameFetched(const QByteArray &preview, int width, int height)
{
if (preview.size()) {
QtConcurrent::run(this, &QAndroidCameraSession::processPreviewImage,
m_currentImageCaptureId,
preview,
width,
height,
m_camera->getRotation());
}
}
void QAndroidCameraSession::onCameraFrameFetched(const QByteArray &frame)
void QAndroidCameraSession::processPreviewImage(int id, const QByteArray &data, int width, int height, int rotation)
{
emit imageCaptured(id, prepareImageFromPreviewData(data, width, height, rotation));
}
QImage QAndroidCameraSession::prepareImageFromPreviewData(const QByteArray &data, int width, int height, int rotation)
{
QImage result(width, height, QImage::Format_ARGB32);
qt_convert_NV21_to_ARGB32((const uchar *)data.constData(),
(quint32 *)result.bits(),
width,
height);
QTransform transform;
// Preview display of front-facing cameras is flipped horizontally, but the frame data
// we get here is not. Flip it ourselves if the camera is front-facing to match what the user
// sees on the viewfinder.
if (m_camera->getFacing() == AndroidCamera::CameraFacingFront)
transform.scale(-1, 1);
transform.rotate(rotation);
result = result.transformed(transform);
return result;
}
void QAndroidCameraSession::onNewPreviewFrame(const QByteArray &frame, int width, int height)
{
m_videoProbesMutex.lock();
if (frame.size() && m_videoProbes.count()) {
const QSize frameSize = m_camera->previewSize();
// Bytes per line should be only for the first plane. For NV21, the Y plane has 8 bits
// per sample, so bpl == width
QVideoFrame videoFrame(new DataVideoBuffer(frame, frameSize.width()),
frameSize,
QVideoFrame videoFrame(new DataVideoBuffer(frame, width),
QSize(width, height),
QVideoFrame::Format_NV21);
foreach (QAndroidMediaVideoProbeControl *probe, m_videoProbes)
probe->newFrameProbed(videoFrame);
@@ -666,35 +696,6 @@ void QAndroidCameraSession::processCapturedImage(int id,
}
}
void QAndroidCameraSession::processPreviewImage(int id, const QByteArray &data, int rotation)
{
emit imageCaptured(id, prepareImageFromPreviewData(data, rotation));
}
QImage QAndroidCameraSession::prepareImageFromPreviewData(const QByteArray &data, int rotation)
{
QSize frameSize = m_camera->previewSize();
QImage result(frameSize, QImage::Format_ARGB32);
qt_convert_NV21_to_ARGB32((const uchar *)data.constData(),
(quint32 *)result.bits(),
frameSize.width(),
frameSize.height());
QTransform transform;
// Preview display of front-facing cameras is flipped horizontally, but the frame data
// we get here is not. Flip it ourselves if the camera is front-facing to match what the user
// sees on the viewfinder.
if (m_camera->getFacing() == AndroidCamera::CameraFacingFront)
transform.scale(-1, 1);
transform.rotate(rotation);
result = result.transformed(transform);
return result;
}
void QAndroidCameraSession::onVideoOutputReady(bool ready)
{
if (ready && m_state == QCamera::ActiveState)

View File

@@ -113,9 +113,9 @@ private Q_SLOTS:
void onApplicationStateChanged(Qt::ApplicationState state);
void onCameraPictureExposed();
void onCameraPreviewFetched(const QByteArray &preview);
void onCameraFrameFetched(const QByteArray &frame);
void onCameraPictureCaptured(const QByteArray &data);
void onLastPreviewFrameFetched(const QByteArray &preview, int width, int height);
void onNewPreviewFrame(const QByteArray &frame, int width, int height);
void onCameraPreviewStarted();
void onCameraPreviewStopped();
@@ -129,8 +129,8 @@ private:
void stopPreview();
void applyImageSettings();
void processPreviewImage(int id, const QByteArray &data, int rotation);
QImage prepareImageFromPreviewData(const QByteArray &data, int rotation);
void processPreviewImage(int id, const QByteArray &data, int width, int height, int rotation);
QImage prepareImageFromPreviewData(const QByteArray &data, int width, int height, int rotation);
void processCapturedImage(int id,
const QByteArray &data,
const QSize &resolution,

View File

@@ -233,9 +233,14 @@ void QAndroidCaptureSession::start()
m_notifyTimer.start();
updateDuration();
if (m_cameraSession)
if (m_cameraSession) {
m_cameraSession->setReadyForCapture(false);
// Preview frame callback is cleared when setting up the camera with the media recorder.
// We need to reset it.
m_cameraSession->camera()->setupPreviewFrameCallback();
}
m_state = QMediaRecorder::RecordingState;
emit stateChanged(m_state);
}

View File

@@ -114,7 +114,7 @@ static void notifyPictureCaptured(JNIEnv *env, jobject, int id, jbyteArray data)
}
}
static void notifyFrameFetched(JNIEnv *env, jobject, int id, jbyteArray data)
static void notifyNewPreviewFrame(JNIEnv *env, jobject, int id, jbyteArray data, int width, int height)
{
QMutexLocker locker(&g_cameraMapMutex);
AndroidCamera *obj = g_cameraMap->value(id, 0);
@@ -123,7 +123,7 @@ static void notifyFrameFetched(JNIEnv *env, jobject, int id, jbyteArray data)
QByteArray bytes(arrayLength, Qt::Uninitialized);
env->GetByteArrayRegion(data, 0, arrayLength, (jbyte*)bytes.data());
Q_EMIT obj->frameFetched(bytes);
Q_EMIT obj->newPreviewFrame(bytes, width, height);
}
}
@@ -204,7 +204,8 @@ public:
Q_INVOKABLE void takePicture();
Q_INVOKABLE void fetchEachFrame(bool fetch);
Q_INVOKABLE void setupPreviewFrameCallback();
Q_INVOKABLE void notifyNewFrames(bool notify);
Q_INVOKABLE void fetchLastPreviewFrame();
Q_INVOKABLE void applyParameters();
@@ -229,7 +230,7 @@ Q_SIGNALS:
void whiteBalanceChanged();
void previewFetched(const QByteArray &preview);
void lastPreviewFrameFetched(const QByteArray &preview, int width, int height);
};
AndroidCamera::AndroidCamera(AndroidCameraPrivate *d, QThread *worker)
@@ -247,7 +248,7 @@ AndroidCamera::AndroidCamera(AndroidCameraPrivate *d, QThread *worker)
connect(d, &AndroidCameraPrivate::previewStopped, this, &AndroidCamera::previewStopped);
connect(d, &AndroidCameraPrivate::autoFocusStarted, this, &AndroidCamera::autoFocusStarted);
connect(d, &AndroidCameraPrivate::whiteBalanceChanged, this, &AndroidCamera::whiteBalanceChanged);
connect(d, &AndroidCameraPrivate::previewFetched, this, &AndroidCamera::previewFetched);
connect(d, &AndroidCameraPrivate::lastPreviewFrameFetched, this, &AndroidCamera::lastPreviewFrameFetched);
}
AndroidCamera::~AndroidCamera()
@@ -633,10 +634,16 @@ void AndroidCamera::takePicture()
QMetaObject::invokeMethod(d, "takePicture", Qt::BlockingQueuedConnection);
}
void AndroidCamera::fetchEachFrame(bool fetch)
void AndroidCamera::setupPreviewFrameCallback()
{
Q_D(AndroidCamera);
QMetaObject::invokeMethod(d, "fetchEachFrame", Q_ARG(bool, fetch));
QMetaObject::invokeMethod(d, "setupPreviewFrameCallback");
}
void AndroidCamera::notifyNewFrames(bool notify)
{
Q_D(AndroidCamera);
QMetaObject::invokeMethod(d, "notifyNewFrames", Q_ARG(bool, notify));
}
void AndroidCamera::fetchLastPreviewFrame()
@@ -1307,17 +1314,7 @@ void AndroidCameraPrivate::setJpegQuality(int quality)
void AndroidCameraPrivate::startPreview()
{
//We need to clear preview buffers queue here, but there is no method to do it
//Though just resetting preview callback do the trick
m_camera.callMethod<void>("setPreviewCallbackWithBuffer",
"(Landroid/hardware/Camera$PreviewCallback;)V",
jobject(0));
m_cameraListener.callMethod<void>("preparePreviewBuffer", "(Landroid/hardware/Camera;)V", m_camera.object());
QJNIObjectPrivate buffer = m_cameraListener.callObjectMethod<jbyteArray>("callbackBuffer");
m_camera.callMethod<void>("addCallbackBuffer", "([B)V", buffer.object());
m_camera.callMethod<void>("setPreviewCallbackWithBuffer",
"(Landroid/hardware/Camera$PreviewCallback;)V",
m_cameraListener.object());
setupPreviewFrameCallback();
m_camera.callMethod<void>("startPreview");
emit previewStarted();
}
@@ -1338,28 +1335,34 @@ void AndroidCameraPrivate::takePicture()
m_cameraListener.object());
}
void AndroidCameraPrivate::fetchEachFrame(bool fetch)
void AndroidCameraPrivate::setupPreviewFrameCallback()
{
m_cameraListener.callMethod<void>("fetchEachFrame", "(Z)V", fetch);
m_cameraListener.callMethod<void>("setupPreviewCallback", "(Landroid/hardware/Camera;)V", m_camera.object());
}
void AndroidCameraPrivate::notifyNewFrames(bool notify)
{
m_cameraListener.callMethod<void>("notifyNewFrames", "(Z)V", notify);
}
void AndroidCameraPrivate::fetchLastPreviewFrame()
{
QJNIEnvironmentPrivate env;
QJNIObjectPrivate data = m_cameraListener.callObjectMethod("lockAndFetchPreviewBuffer", "()[B");
if (!data.isValid()) {
m_cameraListener.callMethod<void>("unlockPreviewBuffer");
QJNIObjectPrivate data = m_cameraListener.callObjectMethod("lastPreviewBuffer", "()[B");
if (!data.isValid())
return;
}
const int arrayLength = env->GetArrayLength(static_cast<jbyteArray>(data.object()));
QByteArray bytes(arrayLength, Qt::Uninitialized);
env->GetByteArrayRegion(static_cast<jbyteArray>(data.object()),
0,
arrayLength,
reinterpret_cast<jbyte *>(bytes.data()));
m_cameraListener.callMethod<void>("unlockPreviewBuffer");
emit previewFetched(bytes);
emit lastPreviewFrameFetched(bytes,
m_cameraListener.callMethod<jint>("previewWidth"),
m_cameraListener.callMethod<jint>("previewHeight"));
}
void AndroidCameraPrivate::applyParameters()
@@ -1404,7 +1407,7 @@ bool AndroidCamera::initJNI(JNIEnv *env)
{"notifyAutoFocusComplete", "(IZ)V", (void *)notifyAutoFocusComplete},
{"notifyPictureExposed", "(I)V", (void *)notifyPictureExposed},
{"notifyPictureCaptured", "(I[B)V", (void *)notifyPictureCaptured},
{"notifyFrameFetched", "(I[B)V", (void *)notifyFrameFetched}
{"notifyNewPreviewFrame", "(I[BII)V", (void *)notifyNewPreviewFrame}
};
if (clazz && env->RegisterNatives(clazz,

View File

@@ -155,7 +155,8 @@ public:
void takePicture();
void fetchEachFrame(bool fetch);
void setupPreviewFrameCallback();
void notifyNewFrames(bool notify);
void fetchLastPreviewFrame();
QJNIObjectPrivate getCameraObject();
@@ -176,8 +177,8 @@ Q_SIGNALS:
void pictureExposed();
void pictureCaptured(const QByteArray &data);
void previewFetched(const QByteArray &preview);
void frameFetched(const QByteArray &frame);
void lastPreviewFrameFetched(const QByteArray &preview, int width, int height);
void newPreviewFrame(const QByteArray &frame, int width, int height);
private:
AndroidCamera(AndroidCameraPrivate *d, QThread *worker);

View File

@@ -646,7 +646,8 @@ bool DSCameraSession::configurePreviewFormat()
if ((m_viewfinderSettings.resolution().isEmpty() || m_viewfinderSettings.resolution() == s.resolution())
&& (qFuzzyIsNull(m_viewfinderSettings.minimumFrameRate()) || qFuzzyCompare((float)m_viewfinderSettings.minimumFrameRate(), (float)s.minimumFrameRate()))
&& (qFuzzyIsNull(m_viewfinderSettings.maximumFrameRate()) || qFuzzyCompare((float)m_viewfinderSettings.maximumFrameRate(), (float)s.maximumFrameRate()))
&& (m_viewfinderSettings.pixelFormat() == QVideoFrame::Format_Invalid || m_viewfinderSettings.pixelFormat() == s.pixelFormat())) {
&& (m_viewfinderSettings.pixelFormat() == QVideoFrame::Format_Invalid || m_viewfinderSettings.pixelFormat() == s.pixelFormat())
&& (m_viewfinderSettings.pixelAspectRatio().isEmpty() || m_viewfinderSettings.pixelAspectRatio() == s.pixelAspectRatio())) {
resolvedViewfinderSettings = s;
break;
}
@@ -899,6 +900,7 @@ void DSCameraSession::updateSourceCapabilities()
settings.setMinimumFrameRate(frameRateRange.minimumFrameRate);
settings.setMaximumFrameRate(frameRateRange.maximumFrameRate);
settings.setPixelFormat(pixelFormat);
settings.setPixelAspectRatio(1, 1);
m_supportedViewfinderSettings.append(settings);
AM_MEDIA_TYPE format;

View File

@@ -34,6 +34,7 @@
#include "camerabinaudioencoder.h"
#include "camerabincontainer.h"
#include <private/qgstcodecsinfo_p.h>
#include <private/qgstutils_p.h>
#include <QtCore/qdebug.h>
@@ -120,8 +121,7 @@ GstEncodingProfile *CameraBinAudioEncoder::createProfile()
void CameraBinAudioEncoder::applySettings(GstElement *encoder)
{
GObjectClass * const objectClass = G_OBJECT_GET_CLASS(encoder);
const char * const name = gst_plugin_feature_get_name(
GST_PLUGIN_FEATURE(gst_element_get_factory(encoder)));
const char * const name = qt_gst_element_get_factory_name(encoder);
const bool isVorbis = qstrcmp(name, "vorbisenc") == 0;

View File

@@ -124,8 +124,8 @@ CameraBinService::CameraBinService(GstElementFactory *sourceFactory, QObject *pa
#else
m_videoWindow = new QGstreamerVideoWindow(this);
#endif
// If the GStreamer sink element is not available (xvimagesink), don't provide
// the video window control since it won't work anyway.
// If the GStreamer video sink is not available, don't provide the video window control since
// it won't work anyway.
if (!m_videoWindow->videoSink()) {
delete m_videoWindow;
m_videoWindow = 0;
@@ -133,9 +133,8 @@ CameraBinService::CameraBinService(GstElementFactory *sourceFactory, QObject *pa
#if defined(HAVE_WIDGETS)
m_videoWidgetControl = new QGstreamerVideoWidgetControl(this);
// If the GStreamer sink element is not available (xvimagesink or ximagesink), don't provide
// the video widget control since it won't work anyway.
// QVideoWidget will fall back to QVideoRendererControl in that case.
// If the GStreamer video sink is not available, don't provide the video widget control since
// it won't work anyway. QVideoWidget will fall back to QVideoRendererControl in that case.
if (!m_videoWidgetControl->videoSink()) {
delete m_videoWidgetControl;
m_videoWidgetControl = 0;

View File

@@ -388,7 +388,7 @@ void CameraBinSession::setupCaptureResolution()
gst_caps_unref(caps);
// Special case when using mfw_v4lsrc
if (m_videoSrc && qstrcmp(gst_plugin_feature_get_name(GST_PLUGIN_FEATURE(gst_element_get_factory(m_videoSrc))), "mfw_v4lsrc") == 0) {
if (m_videoSrc && qstrcmp(qt_gst_element_get_factory_name(m_videoSrc), "mfw_v4lsrc") == 0) {
int capMode = 0;
if (viewfinderResolution == QSize(320, 240))
capMode = 1;
@@ -472,9 +472,7 @@ GstElement *CameraBinSession::buildCameraSource()
#if CAMERABIN_DEBUG
qDebug() << "set camera device" << m_inputDevice;
#endif
const char *const cameraSrcName = gst_plugin_feature_get_name(
GST_PLUGIN_FEATURE(gst_element_get_factory(m_cameraSrc)));
m_usingWrapperCameraBinSrc = qstrcmp(cameraSrcName, "wrappercamerabinsrc") == 0;
m_usingWrapperCameraBinSrc = qstrcmp(qt_gst_element_get_factory_name(m_cameraSrc), "wrappercamerabinsrc") == 0;
if (g_object_class_find_property(G_OBJECT_GET_CLASS(m_cameraSrc), "video-source")) {
if (!m_videoSrc) {

View File

@@ -34,6 +34,7 @@
#include "camerabinvideoencoder.h"
#include "camerabinsession.h"
#include "camerabincontainer.h"
#include <private/qgstutils_p.h>
#include <QtCore/qdebug.h>
@@ -178,8 +179,7 @@ GstEncodingProfile *CameraBinVideoEncoder::createProfile()
void CameraBinVideoEncoder::applySettings(GstElement *encoder)
{
GObjectClass * const objectClass = G_OBJECT_GET_CLASS(encoder);
const char * const name = gst_plugin_feature_get_name(
GST_PLUGIN_FEATURE(gst_element_get_factory(encoder)));
const char * const name = qt_gst_element_get_factory_name(encoder);
const int bitRate = m_actualVideoSettings.bitRate();
if (bitRate == -1) {

View File

@@ -102,8 +102,8 @@ QGstreamerCaptureService::QGstreamerCaptureService(const QString &service, QObje
m_videoRenderer = new QGstreamerVideoRenderer(this);
m_videoWindow = new QGstreamerVideoWindow(this);
// If the GStreamer sink element is not available (xvimagesink), don't provide
// the video window control since it won't work anyway.
// If the GStreamer video sink is not available, don't provide the video window control since
// it won't work anyway.
if (!m_videoWindow->videoSink()) {
delete m_videoWindow;
m_videoWindow = 0;
@@ -112,9 +112,8 @@ QGstreamerCaptureService::QGstreamerCaptureService(const QString &service, QObje
#if defined(HAVE_WIDGETS)
m_videoWidgetControl = new QGstreamerVideoWidgetControl(this);
// If the GStreamer sink element is not available (xvimagesink or ximagesink), don't provide
// the video widget control since it won't work anyway.
// QVideoWidget will fall back to QVideoRendererControl in that case.
// If the GStreamer video sink is not available, don't provide the video widget control since
// it won't work anyway. QVideoWidget will fall back to QVideoRendererControl in that case.
if (!m_videoWidgetControl->videoSink()) {
delete m_videoWidgetControl;
m_videoWidgetControl = 0;

View File

@@ -776,11 +776,11 @@ void QGstreamerCaptureSession::setState(QGstreamerCaptureSession::State newState
if (!m_waitingForEos) {
m_waitingForEos = true;
//qDebug() << "Waiting for EOS";
// Unless gstreamer is in GST_STATE_PLAYING our EOS message will not be received.
gst_element_set_state(m_pipeline, GST_STATE_PLAYING);
//with live sources it's necessary to send EOS even to pipeline
//before going to STOPPED state
gst_element_send_event(m_pipeline, gst_event_new_eos());
// Unless gstreamer is in GST_STATE_PLAYING our EOS message will not be received.
gst_element_set_state(m_pipeline, GST_STATE_PLAYING);
return;
} else {

View File

@@ -99,8 +99,8 @@ QGstreamerPlayerService::QGstreamerPlayerService(QObject *parent):
#else
m_videoWindow = new QGstreamerVideoWindow(this);
#endif
// If the GStreamer sink element is not available (xvimagesink), don't provide
// the video window control since it won't work anyway.
// If the GStreamer video sink is not available, don't provide the video window control since
// it won't work anyway.
if (!m_videoWindow->videoSink()) {
delete m_videoWindow;
m_videoWindow = 0;
@@ -109,8 +109,8 @@ QGstreamerPlayerService::QGstreamerPlayerService(QObject *parent):
#if defined(HAVE_WIDGETS)
m_videoWidget = new QGstreamerVideoWidgetControl(this);
// If the GStreamer sink element is not available (xvimagesink or ximagesink), don't provide
// the video widget control since it won't work anyway.
// If the GStreamer video sink is not available, don't provide the video widget control since
// it won't work anyway.
// QVideoWidget will fall back to QVideoRendererControl in that case.
if (!m_videoWidget->videoSink()) {
delete m_videoWidget;

View File

@@ -58,11 +58,7 @@ Q_LOGGING_CATEGORY(qLcVideo, "qt.multimedia.video")
\ingroup multimedia_video_qml
\inqmlmodule QtMultimedia
\c VideoOutput is part of the \b{QtMultimedia 5.0} module.
\qml
import QtQuick 2.0
import QtMultimedia 5.0
Rectangle {
width: 800