Merge remote-tracking branch 'origin/5.3' into 5.4
Conflicts: src/gsttools/qgstreamervideoinputdevicecontrol.cpp src/plugins/gstreamer/camerabin/camerabinserviceplugin.cpp src/plugins/gstreamer/mediacapture/qgstreamercaptureserviceplugin.cpp Change-Id: Ic854589562d2deeaa0ebf75840cb8a2ee32e99de
This commit is contained in:
1
config.tests/linux_v4l/linux_v4l.pro
Normal file
1
config.tests/linux_v4l/linux_v4l.pro
Normal file
@@ -0,0 +1 @@
|
|||||||
|
SOURCES += main.cpp
|
||||||
47
config.tests/linux_v4l/main.cpp
Normal file
47
config.tests/linux_v4l/main.cpp
Normal file
@@ -0,0 +1,47 @@
|
|||||||
|
/****************************************************************************
|
||||||
|
**
|
||||||
|
** Copyright (C) 2014 Digia Plc and/or its subsidiary(-ies).
|
||||||
|
** Contact: http://www.qt-project.org/legal
|
||||||
|
**
|
||||||
|
** This file is part of the Qt Toolkit.
|
||||||
|
**
|
||||||
|
** $QT_BEGIN_LICENSE:LGPL$
|
||||||
|
** Commercial License Usage
|
||||||
|
** Licensees holding valid commercial Qt licenses may use this file in
|
||||||
|
** accordance with the commercial license agreement provided with the
|
||||||
|
** Software or, alternatively, in accordance with the terms contained in
|
||||||
|
** a written agreement between you and Digia. For licensing terms and
|
||||||
|
** conditions see http://qt.digia.com/licensing. For further information
|
||||||
|
** use the contact form at http://qt.digia.com/contact-us.
|
||||||
|
**
|
||||||
|
** GNU Lesser General Public License Usage
|
||||||
|
** Alternatively, this file may be used under the terms of the GNU Lesser
|
||||||
|
** General Public License version 2.1 as published by the Free Software
|
||||||
|
** Foundation and appearing in the file LICENSE.LGPL included in the
|
||||||
|
** packaging of this file. Please review the following information to
|
||||||
|
** ensure the GNU Lesser General Public License version 2.1 requirements
|
||||||
|
** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
|
||||||
|
**
|
||||||
|
** In addition, as a special exception, Digia gives you certain additional
|
||||||
|
** rights. These rights are described in the Digia Qt LGPL Exception
|
||||||
|
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
|
||||||
|
**
|
||||||
|
** GNU General Public License Usage
|
||||||
|
** Alternatively, this file may be used under the terms of the GNU
|
||||||
|
** General Public License version 3.0 as published by the Free Software
|
||||||
|
** Foundation and appearing in the file LICENSE.GPL included in the
|
||||||
|
** packaging of this file. Please review the following information to
|
||||||
|
** ensure the GNU General Public License version 3.0 requirements will be
|
||||||
|
** met: http://www.gnu.org/copyleft/gpl.html.
|
||||||
|
**
|
||||||
|
**
|
||||||
|
** $QT_END_LICENSE$
|
||||||
|
**
|
||||||
|
****************************************************************************/
|
||||||
|
|
||||||
|
#include <linux/videodev2.h>
|
||||||
|
|
||||||
|
int main(int argc, char** argv)
|
||||||
|
{
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
@@ -30,8 +30,11 @@
|
|||||||
** $QT_END_LICENSE$
|
** $QT_END_LICENSE$
|
||||||
**
|
**
|
||||||
****************************************************************************/
|
****************************************************************************/
|
||||||
|
#ifndef _WIN32_WCE
|
||||||
#include <wmp.h>
|
#include <wmp.h>
|
||||||
|
#else
|
||||||
|
#include <wmpcore.h>
|
||||||
|
#endif
|
||||||
|
|
||||||
int main(int, char**)
|
int main(int, char**)
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -3,4 +3,5 @@ CONFIG += console
|
|||||||
|
|
||||||
SOURCES += main.cpp
|
SOURCES += main.cpp
|
||||||
|
|
||||||
LIBS += -lstrmiids -lole32 -lOleaut32 -luser32 -lgdi32
|
LIBS += -lstrmiids -lole32 -lOleaut32
|
||||||
|
!wince*:LIBS += -luser32 -lgdi32
|
||||||
|
|||||||
65
dist/changes-5.3.2
vendored
Normal file
65
dist/changes-5.3.2
vendored
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
Qt 5.3.2 is a bug-fix release. It maintains both forward and backward
|
||||||
|
compatibility (source and binary) with Qt 5.3.0 and 5.3.1.
|
||||||
|
|
||||||
|
For more details, refer to the online documentation included in this
|
||||||
|
distribution. The documentation is also available online:
|
||||||
|
|
||||||
|
http://qt-project.org/doc/qt-5.3
|
||||||
|
|
||||||
|
The Qt version 5.3 series is binary compatible with the 5.2.x series.
|
||||||
|
Applications compiled for 5.2 will continue to run with 5.3.
|
||||||
|
|
||||||
|
Some of the changes listed in this file include issue tracking numbers
|
||||||
|
corresponding to tasks in the Qt Bug Tracker:
|
||||||
|
|
||||||
|
http://bugreports.qt-project.org/
|
||||||
|
|
||||||
|
Each of these identifiers can be entered in the bug tracker to obtain more
|
||||||
|
information about a particular change.
|
||||||
|
|
||||||
|
****************************************************************************
|
||||||
|
* Platform Specific Changes *
|
||||||
|
****************************************************************************
|
||||||
|
|
||||||
|
Android
|
||||||
|
-------
|
||||||
|
|
||||||
|
- Fixed regression causing videos recorded with the camera not to be registered with the Android
|
||||||
|
media scanner, making them invisible to media browsing apps.
|
||||||
|
- Fixed crash when unloading a QCamera while a recording is active.
|
||||||
|
- [QTBUG-39307] Setting camera parameters on the QML Camera type (e.g. digitalZoom, flashMode)
|
||||||
|
now works correctly when set before the camera is loaded.
|
||||||
|
- [QTBUG-40208] QAudioOutput::setNotifyInterval() can now be used when the output is active.
|
||||||
|
- [QTBUG-40274] Fixed metadata not being loaded by the MediaPlayer when playing a remote media,
|
||||||
|
from a qrc file or from assets.
|
||||||
|
|
||||||
|
iOS
|
||||||
|
---
|
||||||
|
|
||||||
|
- [QTBUG-39036] Audio played using SoundEffect or QAudioOutput is now correctly muted when the
|
||||||
|
device is set to silent mode or when the screen is locked.
|
||||||
|
- [QTBUG-39385] The last video frame displayed in a QML VideoOutput doesn't remain on screen
|
||||||
|
anymore after destroying the VideoOutput.
|
||||||
|
|
||||||
|
Linux
|
||||||
|
-----
|
||||||
|
|
||||||
|
- MediaPlayer's loops property now works correctly when playing a media from a qrc file.
|
||||||
|
- [QTBUG-29742] Fixed Qt apps hanging when audio APIs are used and PulseAudio is not running.
|
||||||
|
- [QTBUG-39949] Fixed QMediaRecorder::setOutputLocation() not working with QUrl::fromLocalFile().
|
||||||
|
|
||||||
|
OS X
|
||||||
|
----
|
||||||
|
|
||||||
|
- Application doesn't freeze anymore when changing the system audio output device while audio
|
||||||
|
is being played with QSoundEffect or QAudioOutput.
|
||||||
|
- [QTBUG-38666] Video frames are now correctly positioned on screen when playing a live stream
|
||||||
|
in a QVideoWidget. This also applies to iOS.
|
||||||
|
- [QTBUG-38668] Fixed crash when setting QMediaRecorder's output location to a URL containing
|
||||||
|
nonstandard characters.
|
||||||
|
|
||||||
|
Windows
|
||||||
|
-------
|
||||||
|
|
||||||
|
- The DirectShow camera backend has been almost entirely rewritten. It doesn't provide any new
|
||||||
|
feature but it now works as it should.
|
||||||
@@ -31,7 +31,8 @@
|
|||||||
**
|
**
|
||||||
****************************************************************************/
|
****************************************************************************/
|
||||||
|
|
||||||
import QtQuick 2.0
|
import QtQuick 2.1
|
||||||
|
import QtQuick.Window 2.1
|
||||||
|
|
||||||
Effect {
|
Effect {
|
||||||
id: root
|
id: root
|
||||||
@@ -49,6 +50,7 @@ Effect {
|
|||||||
|
|
||||||
property real posX: -1
|
property real posX: -1
|
||||||
property real posY: -1
|
property real posY: -1
|
||||||
|
property real pixDens: Screen.pixelDensity
|
||||||
|
|
||||||
QtObject {
|
QtObject {
|
||||||
id: d
|
id: d
|
||||||
|
|||||||
@@ -31,7 +31,8 @@
|
|||||||
**
|
**
|
||||||
****************************************************************************/
|
****************************************************************************/
|
||||||
|
|
||||||
import QtQuick 2.0
|
import QtQuick 2.1
|
||||||
|
import QtQuick.Window 2.1
|
||||||
|
|
||||||
Effect {
|
Effect {
|
||||||
parameters: ListModel {
|
parameters: ListModel {
|
||||||
@@ -48,6 +49,7 @@ Effect {
|
|||||||
// Transform slider values, and bind result to shader uniforms
|
// Transform slider values, and bind result to shader uniforms
|
||||||
property real amplitude: parameters.get(0).value * 0.03
|
property real amplitude: parameters.get(0).value * 0.03
|
||||||
property real n: parameters.get(1).value * 7
|
property real n: parameters.get(1).value * 7
|
||||||
|
property real pixDens: Screen.pixelDensity
|
||||||
|
|
||||||
property real time: 0
|
property real time: 0
|
||||||
NumberAnimation on time { loops: Animation.Infinite; from: 0; to: Math.PI * 2; duration: 600 }
|
NumberAnimation on time { loops: Animation.Infinite; from: 0; to: Math.PI * 2; duration: 600 }
|
||||||
|
|||||||
@@ -43,7 +43,7 @@ ListModel {
|
|||||||
ListElement { name: "Emboss"; source: "EffectEmboss.qml" }
|
ListElement { name: "Emboss"; source: "EffectEmboss.qml" }
|
||||||
ListElement { name: "Glow"; source: "EffectGlow.qml" }
|
ListElement { name: "Glow"; source: "EffectGlow.qml" }
|
||||||
ListElement { name: "Isolate"; source: "EffectIsolate.qml" }
|
ListElement { name: "Isolate"; source: "EffectIsolate.qml" }
|
||||||
//ListElement { name: "Magnify"; source: "EffectMagnify.qml" }
|
ListElement { name: "Magnify"; source: "EffectMagnify.qml" }
|
||||||
ListElement { name: "Page curl"; source: "EffectPageCurl.qml" }
|
ListElement { name: "Page curl"; source: "EffectPageCurl.qml" }
|
||||||
ListElement { name: "Pixelate"; source: "EffectPixelate.qml" }
|
ListElement { name: "Pixelate"; source: "EffectPixelate.qml" }
|
||||||
ListElement { name: "Posterize"; source: "EffectPosterize.qml" }
|
ListElement { name: "Posterize"; source: "EffectPosterize.qml" }
|
||||||
|
|||||||
@@ -50,12 +50,15 @@ uniform float targetWidth;
|
|||||||
uniform float targetHeight;
|
uniform float targetHeight;
|
||||||
uniform float posX;
|
uniform float posX;
|
||||||
uniform float posY;
|
uniform float posY;
|
||||||
|
uniform float pixDens;
|
||||||
|
|
||||||
void main()
|
void main()
|
||||||
{
|
{
|
||||||
vec2 tc = qt_TexCoord0;
|
vec2 tc = qt_TexCoord0;
|
||||||
vec2 center = vec2(posX, posY);
|
vec2 center = vec2(posX, posY);
|
||||||
vec2 xy = gl_FragCoord.xy - center.xy;
|
vec2 xy = gl_FragCoord.xy - center.xy;
|
||||||
|
xy.x -= (pixDens * 14.0);
|
||||||
|
xy.y -= (pixDens * 29.0);
|
||||||
float r = sqrt(xy.x * xy.x + xy.y * xy.y);
|
float r = sqrt(xy.x * xy.x + xy.y * xy.y);
|
||||||
if (r < radius) {
|
if (r < radius) {
|
||||||
float h = diffractionIndex * 0.5 * radius;
|
float h = diffractionIndex * 0.5 * radius;
|
||||||
|
|||||||
@@ -55,12 +55,13 @@ const int ITER = 7;
|
|||||||
const float RATE = 0.1;
|
const float RATE = 0.1;
|
||||||
uniform float amplitude;
|
uniform float amplitude;
|
||||||
uniform float n;
|
uniform float n;
|
||||||
|
uniform float pixDens;
|
||||||
|
|
||||||
void main()
|
void main()
|
||||||
{
|
{
|
||||||
vec2 uv = qt_TexCoord0.xy;
|
vec2 uv = qt_TexCoord0.xy;
|
||||||
vec2 tc = uv;
|
vec2 tc = uv;
|
||||||
vec2 p = vec2(-1.0 + 2.0 * gl_FragCoord.x / targetWidth, -(-1.0 + 2.0 * gl_FragCoord.y / targetHeight));
|
vec2 p = vec2(-1.0 + 2.0 * (gl_FragCoord.x - (pixDens * 14.0)) / targetWidth, -(-1.0 + 2.0 * (gl_FragCoord.y - (pixDens * 29.0)) / targetHeight));
|
||||||
float diffx = 0.0;
|
float diffx = 0.0;
|
||||||
float diffy = 0.0;
|
float diffy = 0.0;
|
||||||
vec4 col;
|
vec4 col;
|
||||||
|
|||||||
@@ -12,10 +12,6 @@ win32 {
|
|||||||
qtCompileTest(evr)
|
qtCompileTest(evr)
|
||||||
} else:mac {
|
} else:mac {
|
||||||
qtCompileTest(avfoundation)
|
qtCompileTest(avfoundation)
|
||||||
} else:android:!android-no-sdk {
|
|
||||||
SDK_ROOT = $$(ANDROID_SDK_ROOT)
|
|
||||||
isEmpty(SDK_ROOT): SDK_ROOT = $$DEFAULT_ANDROID_SDK_ROOT
|
|
||||||
!exists($$SDK_ROOT/platforms/android-11/android.jar): error("QtMultimedia for Android requires API level 11")
|
|
||||||
} else:qnx {
|
} else:qnx {
|
||||||
qtCompileTest(mmrenderer)
|
qtCompileTest(mmrenderer)
|
||||||
} else {
|
} else {
|
||||||
@@ -25,6 +21,7 @@ win32 {
|
|||||||
qtCompileTest(gstreamer_photography)
|
qtCompileTest(gstreamer_photography)
|
||||||
qtCompileTest(gstreamer_encodingprofiles)
|
qtCompileTest(gstreamer_encodingprofiles)
|
||||||
qtCompileTest(gstreamer_appsrc)
|
qtCompileTest(gstreamer_appsrc)
|
||||||
|
qtCompileTest(linux_v4l)
|
||||||
}
|
}
|
||||||
qtCompileTest(resourcepolicy)
|
qtCompileTest(resourcepolicy)
|
||||||
qtCompileTest(gpu_vivante)
|
qtCompileTest(gpu_vivante)
|
||||||
|
|||||||
@@ -100,6 +100,8 @@ config_gstreamer_appsrc {
|
|||||||
LIBS_PRIVATE += -lgstapp-0.10
|
LIBS_PRIVATE += -lgstapp-0.10
|
||||||
}
|
}
|
||||||
|
|
||||||
|
config_linux_v4l: DEFINES += USE_V4L
|
||||||
|
|
||||||
HEADERS += $$PRIVATE_HEADERS
|
HEADERS += $$PRIVATE_HEADERS
|
||||||
|
|
||||||
DESTDIR = $$QT.multimedia.libs
|
DESTDIR = $$QT.multimedia.libs
|
||||||
|
|||||||
@@ -317,8 +317,10 @@ void QAndroidMediaPlayerControl::setMedia(const QMediaContent &mediaContent,
|
|||||||
mMediaPlayer->setDataSource(mediaPath);
|
mMediaPlayer->setDataSource(mediaPath);
|
||||||
mMediaPlayer->prepareAsync();
|
mMediaPlayer->prepareAsync();
|
||||||
|
|
||||||
if (!reloading)
|
if (!reloading) {
|
||||||
Q_EMIT mediaChanged(mMediaContent);
|
Q_EMIT mediaChanged(mMediaContent);
|
||||||
|
Q_EMIT actualMediaLocationChanged(mediaPath);
|
||||||
|
}
|
||||||
|
|
||||||
resetBufferingProgress();
|
resetBufferingProgress();
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -72,6 +72,7 @@ public:
|
|||||||
|
|
||||||
Q_SIGNALS:
|
Q_SIGNALS:
|
||||||
void metaDataUpdated();
|
void metaDataUpdated();
|
||||||
|
void actualMediaLocationChanged(const QString &url);
|
||||||
|
|
||||||
public Q_SLOTS:
|
public Q_SLOTS:
|
||||||
void setPosition(qint64 position) Q_DECL_OVERRIDE;
|
void setPosition(qint64 position) Q_DECL_OVERRIDE;
|
||||||
|
|||||||
@@ -45,8 +45,8 @@ QAndroidMediaService::QAndroidMediaService(QObject *parent)
|
|||||||
{
|
{
|
||||||
mMediaControl = new QAndroidMediaPlayerControl;
|
mMediaControl = new QAndroidMediaPlayerControl;
|
||||||
mMetadataControl = new QAndroidMetaDataReaderControl;
|
mMetadataControl = new QAndroidMetaDataReaderControl;
|
||||||
connect(mMediaControl, SIGNAL(mediaChanged(QMediaContent)),
|
connect(mMediaControl, SIGNAL(actualMediaLocationChanged(QString)),
|
||||||
mMetadataControl, SLOT(onMediaChanged(QMediaContent)));
|
mMetadataControl, SLOT(onMediaChanged(QString)));
|
||||||
connect(mMediaControl, SIGNAL(metaDataUpdated()),
|
connect(mMediaControl, SIGNAL(metaDataUpdated()),
|
||||||
mMetadataControl, SLOT(onUpdateMetaData()));
|
mMetadataControl, SLOT(onUpdateMetaData()));
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -93,18 +93,18 @@ QStringList QAndroidMetaDataReaderControl::availableMetaData() const
|
|||||||
return m_metadata.keys();
|
return m_metadata.keys();
|
||||||
}
|
}
|
||||||
|
|
||||||
void QAndroidMetaDataReaderControl::onMediaChanged(const QMediaContent &media)
|
void QAndroidMetaDataReaderControl::onMediaChanged(const QString &url)
|
||||||
{
|
{
|
||||||
if (!m_retriever)
|
if (!m_retriever)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
m_mediaContent = media;
|
m_mediaLocation = url;
|
||||||
updateData();
|
updateData();
|
||||||
}
|
}
|
||||||
|
|
||||||
void QAndroidMetaDataReaderControl::onUpdateMetaData()
|
void QAndroidMetaDataReaderControl::onUpdateMetaData()
|
||||||
{
|
{
|
||||||
if (!m_retriever || m_mediaContent.isNull())
|
if (!m_retriever || m_mediaLocation.isEmpty())
|
||||||
return;
|
return;
|
||||||
|
|
||||||
updateData();
|
updateData();
|
||||||
@@ -114,8 +114,8 @@ void QAndroidMetaDataReaderControl::updateData()
|
|||||||
{
|
{
|
||||||
m_metadata.clear();
|
m_metadata.clear();
|
||||||
|
|
||||||
if (!m_mediaContent.isNull()) {
|
if (!m_mediaLocation.isEmpty()) {
|
||||||
if (m_retriever->setDataSource(m_mediaContent.canonicalUrl())) {
|
if (m_retriever->setDataSource(m_mediaLocation)) {
|
||||||
QString mimeType = m_retriever->extractMetadata(AndroidMediaMetadataRetriever::MimeType);
|
QString mimeType = m_retriever->extractMetadata(AndroidMediaMetadataRetriever::MimeType);
|
||||||
if (!mimeType.isNull())
|
if (!mimeType.isNull())
|
||||||
m_metadata.insert(QMediaMetaData::MediaType, mimeType);
|
m_metadata.insert(QMediaMetaData::MediaType, mimeType);
|
||||||
|
|||||||
@@ -54,13 +54,13 @@ public:
|
|||||||
QStringList availableMetaData() const Q_DECL_OVERRIDE;
|
QStringList availableMetaData() const Q_DECL_OVERRIDE;
|
||||||
|
|
||||||
public Q_SLOTS:
|
public Q_SLOTS:
|
||||||
void onMediaChanged(const QMediaContent &media);
|
void onMediaChanged(const QString &url);
|
||||||
void onUpdateMetaData();
|
void onUpdateMetaData();
|
||||||
|
|
||||||
private:
|
private:
|
||||||
void updateData();
|
void updateData();
|
||||||
|
|
||||||
QMediaContent m_mediaContent;
|
QString m_mediaLocation;
|
||||||
bool m_available;
|
bool m_available;
|
||||||
QVariantMap m_metadata;
|
QVariantMap m_metadata;
|
||||||
|
|
||||||
|
|||||||
@@ -35,9 +35,24 @@
|
|||||||
|
|
||||||
#include <QtCore/private/qjnihelpers_p.h>
|
#include <QtCore/private/qjnihelpers_p.h>
|
||||||
#include <QtCore/private/qjni_p.h>
|
#include <QtCore/private/qjni_p.h>
|
||||||
|
#include <QtCore/QUrl>
|
||||||
|
#include <qdebug.h>
|
||||||
|
|
||||||
QT_BEGIN_NAMESPACE
|
QT_BEGIN_NAMESPACE
|
||||||
|
|
||||||
|
static bool exceptionCheckAndClear(JNIEnv *env)
|
||||||
|
{
|
||||||
|
if (Q_UNLIKELY(env->ExceptionCheck())) {
|
||||||
|
#ifdef QT_DEBUG
|
||||||
|
env->ExceptionDescribe();
|
||||||
|
#endif // QT_DEBUG
|
||||||
|
env->ExceptionClear();
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
AndroidMediaMetadataRetriever::AndroidMediaMetadataRetriever()
|
AndroidMediaMetadataRetriever::AndroidMediaMetadataRetriever()
|
||||||
{
|
{
|
||||||
m_metadataRetriever = QJNIObjectPrivate("android/media/MediaMetadataRetriever");
|
m_metadataRetriever = QJNIObjectPrivate("android/media/MediaMetadataRetriever");
|
||||||
@@ -68,55 +83,105 @@ void AndroidMediaMetadataRetriever::release()
|
|||||||
m_metadataRetriever.callMethod<void>("release");
|
m_metadataRetriever.callMethod<void>("release");
|
||||||
}
|
}
|
||||||
|
|
||||||
bool AndroidMediaMetadataRetriever::setDataSource(const QUrl &url)
|
bool AndroidMediaMetadataRetriever::setDataSource(const QString &urlString)
|
||||||
{
|
{
|
||||||
if (!m_metadataRetriever.isValid())
|
if (!m_metadataRetriever.isValid())
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
QJNIEnvironmentPrivate env;
|
QJNIEnvironmentPrivate env;
|
||||||
|
QUrl url(urlString);
|
||||||
|
|
||||||
bool loaded = false;
|
if (url.isLocalFile()) { // also includes qrc files (copied to a temp file)
|
||||||
|
QJNIObjectPrivate string = QJNIObjectPrivate::fromString(url.path());
|
||||||
|
QJNIObjectPrivate fileInputStream("java/io/FileInputStream",
|
||||||
|
"(Ljava/lang/String;)V",
|
||||||
|
string.object());
|
||||||
|
|
||||||
QJNIObjectPrivate string = QJNIObjectPrivate::fromString(url.toString());
|
if (exceptionCheckAndClear(env))
|
||||||
|
return false;
|
||||||
|
|
||||||
QJNIObjectPrivate uri = m_metadataRetriever.callStaticObjectMethod("android/net/Uri",
|
QJNIObjectPrivate fd = fileInputStream.callObjectMethod("getFD",
|
||||||
"parse",
|
"()Ljava/io/FileDescriptor;");
|
||||||
"(Ljava/lang/String;)Landroid/net/Uri;",
|
if (exceptionCheckAndClear(env)) {
|
||||||
string.object());
|
fileInputStream.callMethod<void>("close");
|
||||||
if (env->ExceptionCheck()) {
|
exceptionCheckAndClear(env);
|
||||||
env->ExceptionClear();
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
m_metadataRetriever.callMethod<void>("setDataSource",
|
||||||
|
"(Ljava/io/FileDescriptor;)V",
|
||||||
|
fd.object());
|
||||||
|
|
||||||
|
bool ok = !exceptionCheckAndClear(env);
|
||||||
|
|
||||||
|
fileInputStream.callMethod<void>("close");
|
||||||
|
exceptionCheckAndClear(env);
|
||||||
|
|
||||||
|
if (!ok)
|
||||||
|
return false;
|
||||||
|
} else if (url.scheme() == QLatin1String("assets")) {
|
||||||
|
QJNIObjectPrivate string = QJNIObjectPrivate::fromString(url.path().mid(1)); // remove first '/'
|
||||||
|
QJNIObjectPrivate activity(QtAndroidPrivate::activity());
|
||||||
|
QJNIObjectPrivate assetManager = activity.callObjectMethod("getAssets",
|
||||||
|
"()Landroid/content/res/AssetManager;");
|
||||||
|
QJNIObjectPrivate assetFd = assetManager.callObjectMethod("openFd",
|
||||||
|
"(Ljava/lang/String;)Landroid/content/res/AssetFileDescriptor;",
|
||||||
|
string.object());
|
||||||
|
if (exceptionCheckAndClear(env))
|
||||||
|
return false;
|
||||||
|
|
||||||
|
QJNIObjectPrivate fd = assetFd.callObjectMethod("getFileDescriptor",
|
||||||
|
"()Ljava/io/FileDescriptor;");
|
||||||
|
if (exceptionCheckAndClear(env)) {
|
||||||
|
assetFd.callMethod<void>("close");
|
||||||
|
exceptionCheckAndClear(env);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
m_metadataRetriever.callMethod<void>("setDataSource",
|
||||||
|
"(Ljava/io/FileDescriptor;JJ)V",
|
||||||
|
fd.object(),
|
||||||
|
assetFd.callMethod<jlong>("getStartOffset"),
|
||||||
|
assetFd.callMethod<jlong>("getLength"));
|
||||||
|
|
||||||
|
bool ok = !exceptionCheckAndClear(env);
|
||||||
|
|
||||||
|
assetFd.callMethod<void>("close");
|
||||||
|
exceptionCheckAndClear(env);
|
||||||
|
|
||||||
|
if (!ok)
|
||||||
|
return false;
|
||||||
|
} else if (QtAndroidPrivate::androidSdkVersion() >= 14) {
|
||||||
|
// On API levels >= 14, only setDataSource(String, Map<String, String>) accepts remote media
|
||||||
|
QJNIObjectPrivate string = QJNIObjectPrivate::fromString(urlString);
|
||||||
|
QJNIObjectPrivate hash("java/util/HashMap");
|
||||||
|
|
||||||
|
m_metadataRetriever.callMethod<void>("setDataSource",
|
||||||
|
"(Ljava/lang/String;Ljava/util/Map;)V",
|
||||||
|
string.object(),
|
||||||
|
hash.object());
|
||||||
|
if (exceptionCheckAndClear(env))
|
||||||
|
return false;
|
||||||
} else {
|
} else {
|
||||||
|
// While on API levels < 14, only setDataSource(Context, Uri) is available and works for
|
||||||
|
// remote media...
|
||||||
|
QJNIObjectPrivate string = QJNIObjectPrivate::fromString(urlString);
|
||||||
|
QJNIObjectPrivate uri = m_metadataRetriever.callStaticObjectMethod("android/net/Uri",
|
||||||
|
"parse",
|
||||||
|
"(Ljava/lang/String;)Landroid/net/Uri;",
|
||||||
|
string.object());
|
||||||
|
if (exceptionCheckAndClear(env))
|
||||||
|
return false;
|
||||||
|
|
||||||
m_metadataRetriever.callMethod<void>("setDataSource",
|
m_metadataRetriever.callMethod<void>("setDataSource",
|
||||||
"(Landroid/content/Context;Landroid/net/Uri;)V",
|
"(Landroid/content/Context;Landroid/net/Uri;)V",
|
||||||
QtAndroidPrivate::activity(),
|
QtAndroidPrivate::activity(),
|
||||||
uri.object());
|
uri.object());
|
||||||
if (env->ExceptionCheck())
|
if (exceptionCheckAndClear(env))
|
||||||
env->ExceptionClear();
|
return false;
|
||||||
else
|
|
||||||
loaded = true;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return loaded;
|
return true;
|
||||||
}
|
|
||||||
|
|
||||||
bool AndroidMediaMetadataRetriever::setDataSource(const QString &path)
|
|
||||||
{
|
|
||||||
if (!m_metadataRetriever.isValid())
|
|
||||||
return false;
|
|
||||||
|
|
||||||
QJNIEnvironmentPrivate env;
|
|
||||||
|
|
||||||
bool loaded = false;
|
|
||||||
|
|
||||||
m_metadataRetriever.callMethod<void>("setDataSource",
|
|
||||||
"(Ljava/lang/String;)V",
|
|
||||||
QJNIObjectPrivate::fromString(path).object());
|
|
||||||
if (env->ExceptionCheck())
|
|
||||||
env->ExceptionClear();
|
|
||||||
else
|
|
||||||
loaded = true;
|
|
||||||
|
|
||||||
return loaded;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
QT_END_NAMESPACE
|
QT_END_NAMESPACE
|
||||||
|
|||||||
@@ -35,7 +35,6 @@
|
|||||||
#define ANDROIDMEDIAMETADATARETRIEVER_H
|
#define ANDROIDMEDIAMETADATARETRIEVER_H
|
||||||
|
|
||||||
#include <QtCore/private/qjni_p.h>
|
#include <QtCore/private/qjni_p.h>
|
||||||
#include <qurl.h>
|
|
||||||
|
|
||||||
QT_BEGIN_NAMESPACE
|
QT_BEGIN_NAMESPACE
|
||||||
|
|
||||||
@@ -73,8 +72,7 @@ public:
|
|||||||
|
|
||||||
QString extractMetadata(MetadataKey key);
|
QString extractMetadata(MetadataKey key);
|
||||||
void release();
|
void release();
|
||||||
bool setDataSource(const QUrl &url);
|
bool setDataSource(const QString &url);
|
||||||
bool setDataSource(const QString &path);
|
|
||||||
|
|
||||||
private:
|
private:
|
||||||
QJNIObjectPrivate m_metadataRetriever;
|
QJNIObjectPrivate m_metadataRetriever;
|
||||||
|
|||||||
@@ -135,9 +135,9 @@ QMediaControl *AVFCameraService::requestControl(const char *name)
|
|||||||
void AVFCameraService::releaseControl(QMediaControl *control)
|
void AVFCameraService::releaseControl(QMediaControl *control)
|
||||||
{
|
{
|
||||||
if (m_videoOutput == control) {
|
if (m_videoOutput == control) {
|
||||||
m_videoOutput = 0;
|
|
||||||
m_session->setVideoOutput(0);
|
m_session->setVideoOutput(0);
|
||||||
delete control;
|
delete m_videoOutput;
|
||||||
|
m_videoOutput = 0;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -118,14 +118,15 @@ void AVFMediaPlayerService::releaseControl(QMediaControl *control)
|
|||||||
#ifdef QT_DEBUG_AVF
|
#ifdef QT_DEBUG_AVF
|
||||||
qDebug() << Q_FUNC_INFO << control;
|
qDebug() << Q_FUNC_INFO << control;
|
||||||
#endif
|
#endif
|
||||||
#if defined(Q_OS_OSX)
|
|
||||||
if (m_videoOutput == control) {
|
if (m_videoOutput == control) {
|
||||||
|
#if defined(Q_OS_OSX)
|
||||||
AVFVideoRendererControl *renderControl = qobject_cast<AVFVideoRendererControl*>(m_videoOutput);
|
AVFVideoRendererControl *renderControl = qobject_cast<AVFVideoRendererControl*>(m_videoOutput);
|
||||||
if (renderControl)
|
if (renderControl)
|
||||||
renderControl->setSurface(0);
|
renderControl->setSurface(0);
|
||||||
|
#endif
|
||||||
m_videoOutput = 0;
|
m_videoOutput = 0;
|
||||||
m_session->setVideoOutput(0);
|
m_session->setVideoOutput(0);
|
||||||
|
|
||||||
delete control;
|
delete control;
|
||||||
}
|
}
|
||||||
#endif
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -148,6 +148,9 @@ private:
|
|||||||
QByteArray rawData;
|
QByteArray rawData;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
void setAudioAvailable(bool available);
|
||||||
|
void setVideoAvailable(bool available);
|
||||||
|
|
||||||
AVFMediaPlayerService *m_service;
|
AVFMediaPlayerService *m_service;
|
||||||
AVFVideoOutput *m_videoOutput;
|
AVFVideoOutput *m_videoOutput;
|
||||||
|
|
||||||
|
|||||||
@@ -70,15 +70,11 @@ static void *AVFMediaPlayerSessionObserverCurrentItemObservationContext = &AVFMe
|
|||||||
AVPlayerItem *m_playerItem;
|
AVPlayerItem *m_playerItem;
|
||||||
AVPlayerLayer *m_playerLayer;
|
AVPlayerLayer *m_playerLayer;
|
||||||
NSURL *m_URL;
|
NSURL *m_URL;
|
||||||
bool m_audioAvailable;
|
|
||||||
bool m_videoAvailable;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@property (readonly, getter=player) AVPlayer* m_player;
|
@property (readonly, getter=player) AVPlayer* m_player;
|
||||||
@property (readonly, getter=playerItem) AVPlayerItem* m_playerItem;
|
@property (readonly, getter=playerItem) AVPlayerItem* m_playerItem;
|
||||||
@property (readonly, getter=playerLayer) AVPlayerLayer* m_playerLayer;
|
@property (readonly, getter=playerLayer) AVPlayerLayer* m_playerLayer;
|
||||||
@property (readonly, getter=audioAvailable) bool m_audioAvailable;
|
|
||||||
@property (readonly, getter=videoAvailable) bool m_videoAvailable;
|
|
||||||
@property (readonly, getter=session) AVFMediaPlayerSession* m_session;
|
@property (readonly, getter=session) AVFMediaPlayerSession* m_session;
|
||||||
|
|
||||||
- (AVFMediaPlayerSessionObserver *) initWithMediaPlayerSession:(AVFMediaPlayerSession *)session;
|
- (AVFMediaPlayerSessionObserver *) initWithMediaPlayerSession:(AVFMediaPlayerSession *)session;
|
||||||
@@ -96,7 +92,7 @@ static void *AVFMediaPlayerSessionObserverCurrentItemObservationContext = &AVFMe
|
|||||||
|
|
||||||
@implementation AVFMediaPlayerSessionObserver
|
@implementation AVFMediaPlayerSessionObserver
|
||||||
|
|
||||||
@synthesize m_player, m_playerItem, m_playerLayer, m_audioAvailable, m_videoAvailable, m_session;
|
@synthesize m_player, m_playerItem, m_playerLayer, m_session;
|
||||||
|
|
||||||
- (AVFMediaPlayerSessionObserver *) initWithMediaPlayerSession:(AVFMediaPlayerSession *)session
|
- (AVFMediaPlayerSessionObserver *) initWithMediaPlayerSession:(AVFMediaPlayerSession *)session
|
||||||
{
|
{
|
||||||
@@ -186,18 +182,6 @@ static void *AVFMediaPlayerSessionObserverCurrentItemObservationContext = &AVFMe
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
m_audioAvailable = false;
|
|
||||||
m_videoAvailable = false;
|
|
||||||
|
|
||||||
//Check each track of asset for audio and video content
|
|
||||||
NSArray *tracks = [asset tracks];
|
|
||||||
for (AVAssetTrack *track in tracks) {
|
|
||||||
if ([track hasMediaCharacteristic:AVMediaCharacteristicAudible])
|
|
||||||
m_audioAvailable = true;
|
|
||||||
if ([track hasMediaCharacteristic:AVMediaCharacteristicVisual])
|
|
||||||
m_videoAvailable = true;
|
|
||||||
}
|
|
||||||
|
|
||||||
//At this point we're ready to set up for playback of the asset.
|
//At this point we're ready to set up for playback of the asset.
|
||||||
//Stop observing our prior AVPlayerItem, if we have one.
|
//Stop observing our prior AVPlayerItem, if we have one.
|
||||||
if (m_playerItem)
|
if (m_playerItem)
|
||||||
@@ -258,18 +242,7 @@ static void *AVFMediaPlayerSessionObserverCurrentItemObservationContext = &AVFMe
|
|||||||
m_playerLayer = [AVPlayerLayer playerLayerWithPlayer:m_player];
|
m_playerLayer = [AVPlayerLayer playerLayerWithPlayer:m_player];
|
||||||
[m_playerLayer retain];
|
[m_playerLayer retain];
|
||||||
m_playerLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
|
m_playerLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
|
||||||
|
m_playerLayer.anchorPoint = CGPointMake(0.0f, 0.0f);
|
||||||
//Get the native size of the new item, and reset the bounds of the player layer
|
|
||||||
AVAsset *asset = m_playerItem.asset;
|
|
||||||
if (asset) {
|
|
||||||
NSArray *tracks = [asset tracksWithMediaType:AVMediaTypeVideo];
|
|
||||||
if ([tracks count]) {
|
|
||||||
AVAssetTrack *videoTrack = [tracks objectAtIndex:0];
|
|
||||||
m_playerLayer.anchorPoint = CGPointMake(0.0f, 0.0f);
|
|
||||||
m_playerLayer.bounds = CGRectMake(0.0f, 0.0f, videoTrack.naturalSize.width, videoTrack.naturalSize.height);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
//Observe the AVPlayer "currentItem" property to find out when any
|
//Observe the AVPlayer "currentItem" property to find out when any
|
||||||
@@ -366,22 +339,8 @@ static void *AVFMediaPlayerSessionObserverCurrentItemObservationContext = &AVFMe
|
|||||||
{
|
{
|
||||||
AVPlayerItem *newPlayerItem = [change objectForKey:NSKeyValueChangeNewKey];
|
AVPlayerItem *newPlayerItem = [change objectForKey:NSKeyValueChangeNewKey];
|
||||||
if (m_playerItem != newPlayerItem)
|
if (m_playerItem != newPlayerItem)
|
||||||
{
|
|
||||||
m_playerItem = newPlayerItem;
|
m_playerItem = newPlayerItem;
|
||||||
|
|
||||||
//Get the native size of the new item, and reset the bounds of the player layer
|
|
||||||
//AVAsset *asset = m_playerItem.asset;
|
|
||||||
AVAsset *asset = [m_playerItem asset];
|
|
||||||
if (asset) {
|
|
||||||
NSArray *tracks = [asset tracksWithMediaType:AVMediaTypeVideo];
|
|
||||||
if ([tracks count]) {
|
|
||||||
AVAssetTrack *videoTrack = [tracks objectAtIndex:0];
|
|
||||||
m_playerLayer.anchorPoint = CGPointMake(0.0f, 0.0f);
|
|
||||||
m_playerLayer.bounds = CGRectMake(0.0f, 0.0f, videoTrack.naturalSize.width, videoTrack.naturalSize.height);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
if (self.session)
|
if (self.session)
|
||||||
QMetaObject::invokeMethod(m_session, "processCurrentItemChanged", Qt::AutoConnection);
|
QMetaObject::invokeMethod(m_session, "processCurrentItemChanged", Qt::AutoConnection);
|
||||||
}
|
}
|
||||||
@@ -513,6 +472,9 @@ void AVFMediaPlayerSession::setMedia(const QMediaContent &content, QIODevice *st
|
|||||||
m_resources = content;
|
m_resources = content;
|
||||||
m_mediaStream = stream;
|
m_mediaStream = stream;
|
||||||
|
|
||||||
|
setAudioAvailable(false);
|
||||||
|
setVideoAvailable(false);
|
||||||
|
|
||||||
QMediaPlayer::MediaStatus oldMediaStatus = m_mediaStatus;
|
QMediaPlayer::MediaStatus oldMediaStatus = m_mediaStatus;
|
||||||
|
|
||||||
if (content.isNull() || content.canonicalUrl().isEmpty()) {
|
if (content.isNull() || content.canonicalUrl().isEmpty()) {
|
||||||
@@ -582,14 +544,32 @@ bool AVFMediaPlayerSession::isMuted() const
|
|||||||
return m_muted;
|
return m_muted;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void AVFMediaPlayerSession::setAudioAvailable(bool available)
|
||||||
|
{
|
||||||
|
if (m_audioAvailable == available)
|
||||||
|
return;
|
||||||
|
|
||||||
|
m_audioAvailable = available;
|
||||||
|
Q_EMIT audioAvailableChanged(available);
|
||||||
|
}
|
||||||
|
|
||||||
bool AVFMediaPlayerSession::isAudioAvailable() const
|
bool AVFMediaPlayerSession::isAudioAvailable() const
|
||||||
{
|
{
|
||||||
return [(AVFMediaPlayerSessionObserver*)m_observer audioAvailable];
|
return m_audioAvailable;
|
||||||
|
}
|
||||||
|
|
||||||
|
void AVFMediaPlayerSession::setVideoAvailable(bool available)
|
||||||
|
{
|
||||||
|
if (m_videoAvailable == available)
|
||||||
|
return;
|
||||||
|
|
||||||
|
m_videoAvailable = available;
|
||||||
|
Q_EMIT videoAvailableChanged(available);
|
||||||
}
|
}
|
||||||
|
|
||||||
bool AVFMediaPlayerSession::isVideoAvailable() const
|
bool AVFMediaPlayerSession::isVideoAvailable() const
|
||||||
{
|
{
|
||||||
return [(AVFMediaPlayerSessionObserver*)m_observer videoAvailable];
|
return m_videoAvailable;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool AVFMediaPlayerSession::isSeekable() const
|
bool AVFMediaPlayerSession::isSeekable() const
|
||||||
@@ -802,16 +782,37 @@ void AVFMediaPlayerSession::processLoadStateChange()
|
|||||||
bool isPlaying = (m_state != QMediaPlayer::StoppedState);
|
bool isPlaying = (m_state != QMediaPlayer::StoppedState);
|
||||||
|
|
||||||
if (currentStatus == AVPlayerStatusReadyToPlay) {
|
if (currentStatus == AVPlayerStatusReadyToPlay) {
|
||||||
|
AVPlayerItem *playerItem = [(AVFMediaPlayerSessionObserver*)m_observer playerItem];
|
||||||
|
if (playerItem) {
|
||||||
|
// Check each track for audio and video content
|
||||||
|
AVAssetTrack *videoTrack = nil;
|
||||||
|
NSArray *tracks = playerItem.tracks;
|
||||||
|
for (AVPlayerItemTrack *track in tracks) {
|
||||||
|
AVAssetTrack *assetTrack = track.assetTrack;
|
||||||
|
if (assetTrack) {
|
||||||
|
if ([assetTrack.mediaType isEqualToString:AVMediaTypeAudio])
|
||||||
|
setAudioAvailable(true);
|
||||||
|
if ([assetTrack.mediaType isEqualToString:AVMediaTypeVideo]) {
|
||||||
|
setVideoAvailable(true);
|
||||||
|
if (!videoTrack)
|
||||||
|
videoTrack = assetTrack;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get the native size of the video, and reset the bounds of the player layer
|
||||||
|
AVPlayerLayer *playerLayer = [(AVFMediaPlayerSessionObserver*)m_observer playerLayer];
|
||||||
|
if (videoTrack && playerLayer) {
|
||||||
|
playerLayer.bounds = CGRectMake(0.0f, 0.0f,
|
||||||
|
videoTrack.naturalSize.width,
|
||||||
|
videoTrack.naturalSize.height);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
qint64 currentDuration = duration();
|
qint64 currentDuration = duration();
|
||||||
if (m_duration != currentDuration)
|
if (m_duration != currentDuration)
|
||||||
Q_EMIT durationChanged(m_duration = currentDuration);
|
Q_EMIT durationChanged(m_duration = currentDuration);
|
||||||
|
|
||||||
if (m_audioAvailable != isAudioAvailable())
|
|
||||||
Q_EMIT audioAvailableChanged(m_audioAvailable = !m_audioAvailable);
|
|
||||||
|
|
||||||
if (m_videoAvailable != isVideoAvailable())
|
|
||||||
Q_EMIT videoAvailableChanged(m_videoAvailable = !m_videoAvailable);
|
|
||||||
|
|
||||||
newStatus = isPlaying ? QMediaPlayer::BufferedMedia : QMediaPlayer::LoadedMedia;
|
newStatus = isPlaying ? QMediaPlayer::BufferedMedia : QMediaPlayer::LoadedMedia;
|
||||||
|
|
||||||
if (m_state == QMediaPlayer::PlayingState && [(AVFMediaPlayerSessionObserver*)m_observer player]) {
|
if (m_state == QMediaPlayer::PlayingState && [(AVFMediaPlayerSessionObserver*)m_observer player]) {
|
||||||
|
|||||||
@@ -64,8 +64,10 @@ AVFVideoWidget::~AVFVideoWidget()
|
|||||||
qDebug() << Q_FUNC_INFO;
|
qDebug() << Q_FUNC_INFO;
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
if (m_playerLayer)
|
if (m_playerLayer) {
|
||||||
|
[m_playerLayer removeFromSuperlayer];
|
||||||
[m_playerLayer release];
|
[m_playerLayer release];
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
QSize AVFVideoWidget::sizeHint() const
|
QSize AVFVideoWidget::sizeHint() const
|
||||||
|
|||||||
@@ -61,8 +61,10 @@ AVFVideoWindowControl::AVFVideoWindowControl(QObject *parent)
|
|||||||
|
|
||||||
AVFVideoWindowControl::~AVFVideoWindowControl()
|
AVFVideoWindowControl::~AVFVideoWindowControl()
|
||||||
{
|
{
|
||||||
if (m_playerLayer)
|
if (m_playerLayer) {
|
||||||
|
[m_playerLayer removeFromSuperlayer];
|
||||||
[m_playerLayer release];
|
[m_playerLayer release];
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
WId AVFVideoWindowControl::winId() const
|
WId AVFVideoWindowControl::winId() const
|
||||||
|
|||||||
@@ -698,14 +698,14 @@ void CoreAudioOutput::audioThreadStop()
|
|||||||
{
|
{
|
||||||
stopTimers();
|
stopTimers();
|
||||||
if (m_audioThreadState.testAndSetAcquire(Running, Stopped))
|
if (m_audioThreadState.testAndSetAcquire(Running, Stopped))
|
||||||
m_threadFinished.wait(&m_mutex);
|
m_threadFinished.wait(&m_mutex, 500);
|
||||||
}
|
}
|
||||||
|
|
||||||
void CoreAudioOutput::audioThreadDrain()
|
void CoreAudioOutput::audioThreadDrain()
|
||||||
{
|
{
|
||||||
stopTimers();
|
stopTimers();
|
||||||
if (m_audioThreadState.testAndSetAcquire(Running, Draining))
|
if (m_audioThreadState.testAndSetAcquire(Running, Draining))
|
||||||
m_threadFinished.wait(&m_mutex);
|
m_threadFinished.wait(&m_mutex, 500);
|
||||||
}
|
}
|
||||||
|
|
||||||
void CoreAudioOutput::audioDeviceStop()
|
void CoreAudioOutput::audioDeviceStop()
|
||||||
|
|||||||
@@ -83,6 +83,8 @@ config_gstreamer_photography {
|
|||||||
DEFINES += GST_USE_UNSTABLE_API #prevents warnings because of unstable photography API
|
DEFINES += GST_USE_UNSTABLE_API #prevents warnings because of unstable photography API
|
||||||
}
|
}
|
||||||
|
|
||||||
|
config_linux_v4l: DEFINES += USE_V4L
|
||||||
|
|
||||||
OTHER_FILES += \
|
OTHER_FILES += \
|
||||||
camerabin.json
|
camerabin.json
|
||||||
|
|
||||||
|
|||||||
@@ -43,7 +43,10 @@
|
|||||||
#include <private/qgstutils_p.h>
|
#include <private/qgstutils_p.h>
|
||||||
|
|
||||||
#include <private/qcore_unix_p.h>
|
#include <private/qcore_unix_p.h>
|
||||||
|
|
||||||
|
#if defined(USE_V4L)
|
||||||
#include <linux/videodev2.h>
|
#include <linux/videodev2.h>
|
||||||
|
#endif
|
||||||
|
|
||||||
QT_BEGIN_NAMESPACE
|
QT_BEGIN_NAMESPACE
|
||||||
|
|
||||||
|
|||||||
@@ -15,7 +15,6 @@ HEADERS += $$PWD/qgstreamercaptureservice.h \
|
|||||||
$$PWD/qgstreamerrecordercontrol.h \
|
$$PWD/qgstreamerrecordercontrol.h \
|
||||||
$$PWD/qgstreamermediacontainercontrol.h \
|
$$PWD/qgstreamermediacontainercontrol.h \
|
||||||
$$PWD/qgstreamercameracontrol.h \
|
$$PWD/qgstreamercameracontrol.h \
|
||||||
$$PWD/qgstreamerv4l2input.h \
|
|
||||||
$$PWD/qgstreamercapturemetadatacontrol.h \
|
$$PWD/qgstreamercapturemetadatacontrol.h \
|
||||||
$$PWD/qgstreamerimagecapturecontrol.h \
|
$$PWD/qgstreamerimagecapturecontrol.h \
|
||||||
$$PWD/qgstreamerimageencode.h \
|
$$PWD/qgstreamerimageencode.h \
|
||||||
@@ -28,7 +27,6 @@ SOURCES += $$PWD/qgstreamercaptureservice.cpp \
|
|||||||
$$PWD/qgstreamerrecordercontrol.cpp \
|
$$PWD/qgstreamerrecordercontrol.cpp \
|
||||||
$$PWD/qgstreamermediacontainercontrol.cpp \
|
$$PWD/qgstreamermediacontainercontrol.cpp \
|
||||||
$$PWD/qgstreamercameracontrol.cpp \
|
$$PWD/qgstreamercameracontrol.cpp \
|
||||||
$$PWD/qgstreamerv4l2input.cpp \
|
|
||||||
$$PWD/qgstreamercapturemetadatacontrol.cpp \
|
$$PWD/qgstreamercapturemetadatacontrol.cpp \
|
||||||
$$PWD/qgstreamerimagecapturecontrol.cpp \
|
$$PWD/qgstreamerimagecapturecontrol.cpp \
|
||||||
$$PWD/qgstreamerimageencode.cpp \
|
$$PWD/qgstreamerimageencode.cpp \
|
||||||
@@ -37,13 +35,18 @@ SOURCES += $$PWD/qgstreamercaptureservice.cpp \
|
|||||||
# Camera usage with gstreamer needs to have
|
# Camera usage with gstreamer needs to have
|
||||||
#CONFIG += use_gstreamer_camera
|
#CONFIG += use_gstreamer_camera
|
||||||
|
|
||||||
use_gstreamer_camera {
|
use_gstreamer_camera:config_linux_v4l {
|
||||||
DEFINES += USE_GSTREAMER_CAMERA
|
DEFINES += USE_GSTREAMER_CAMERA
|
||||||
|
|
||||||
|
OTHER_FILES += \
|
||||||
|
mediacapturecamera.json
|
||||||
|
|
||||||
|
HEADERS += \
|
||||||
|
$$PWD/qgstreamerv4l2input.h
|
||||||
|
SOURCES += \
|
||||||
|
$$PWD/qgstreamerv4l2input.cpp
|
||||||
|
|
||||||
OTHER_FILES += \
|
|
||||||
mediacapturecamera.json
|
|
||||||
} else {
|
} else {
|
||||||
OTHER_FILES += \
|
OTHER_FILES += \
|
||||||
mediacapture.json
|
mediacapture.json
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -40,9 +40,12 @@
|
|||||||
#include "qgstreamerimageencode.h"
|
#include "qgstreamerimageencode.h"
|
||||||
#include "qgstreamercameracontrol.h"
|
#include "qgstreamercameracontrol.h"
|
||||||
#include <private/qgstreamerbushelper_p.h>
|
#include <private/qgstreamerbushelper_p.h>
|
||||||
#include "qgstreamerv4l2input.h"
|
|
||||||
#include "qgstreamercapturemetadatacontrol.h"
|
#include "qgstreamercapturemetadatacontrol.h"
|
||||||
|
|
||||||
|
#if defined(USE_GSTREAMER_CAMERA)
|
||||||
|
#include "qgstreamerv4l2input.h"
|
||||||
|
#endif
|
||||||
|
|
||||||
#include "qgstreamerimagecapturecontrol.h"
|
#include "qgstreamerimagecapturecontrol.h"
|
||||||
#include <private/qgstreameraudioinputselector_p.h>
|
#include <private/qgstreameraudioinputselector_p.h>
|
||||||
#include <private/qgstreamervideoinputdevicecontrol_p.h>
|
#include <private/qgstreamervideoinputdevicecontrol_p.h>
|
||||||
@@ -66,7 +69,9 @@ QGstreamerCaptureService::QGstreamerCaptureService(const QString &service, QObje
|
|||||||
m_cameraControl = 0;
|
m_cameraControl = 0;
|
||||||
m_metaDataControl = 0;
|
m_metaDataControl = 0;
|
||||||
|
|
||||||
|
#if defined(USE_GSTREAMER_CAMERA)
|
||||||
m_videoInput = 0;
|
m_videoInput = 0;
|
||||||
|
#endif
|
||||||
m_audioInputSelector = 0;
|
m_audioInputSelector = 0;
|
||||||
m_videoInputDevice = 0;
|
m_videoInputDevice = 0;
|
||||||
|
|
||||||
@@ -82,6 +87,7 @@ QGstreamerCaptureService::QGstreamerCaptureService(const QString &service, QObje
|
|||||||
m_captureSession = new QGstreamerCaptureSession(QGstreamerCaptureSession::Audio, this);
|
m_captureSession = new QGstreamerCaptureSession(QGstreamerCaptureSession::Audio, this);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#if defined(USE_GSTREAMER_CAMERA)
|
||||||
if (service == Q_MEDIASERVICE_CAMERA) {
|
if (service == Q_MEDIASERVICE_CAMERA) {
|
||||||
m_captureSession = new QGstreamerCaptureSession(QGstreamerCaptureSession::AudioAndVideo, this);
|
m_captureSession = new QGstreamerCaptureSession(QGstreamerCaptureSession::AudioAndVideo, this);
|
||||||
m_cameraControl = new QGstreamerCameraControl(m_captureSession);
|
m_cameraControl = new QGstreamerCameraControl(m_captureSession);
|
||||||
@@ -103,6 +109,7 @@ QGstreamerCaptureService::QGstreamerCaptureService(const QString &service, QObje
|
|||||||
#endif
|
#endif
|
||||||
m_imageCaptureControl = new QGstreamerImageCaptureControl(m_captureSession);
|
m_imageCaptureControl = new QGstreamerImageCaptureControl(m_captureSession);
|
||||||
}
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
m_audioInputSelector = new QGstreamerAudioInputSelector(this);
|
m_audioInputSelector = new QGstreamerAudioInputSelector(this);
|
||||||
connect(m_audioInputSelector, SIGNAL(activeInputChanged(QString)), m_captureSession, SLOT(setCaptureDevice(QString)));
|
connect(m_audioInputSelector, SIGNAL(activeInputChanged(QString)), m_captureSession, SLOT(setCaptureDevice(QString)));
|
||||||
|
|||||||
@@ -70,7 +70,9 @@ private:
|
|||||||
|
|
||||||
QGstreamerCaptureSession *m_captureSession;
|
QGstreamerCaptureSession *m_captureSession;
|
||||||
QGstreamerCameraControl *m_cameraControl;
|
QGstreamerCameraControl *m_cameraControl;
|
||||||
|
#if defined(USE_GSTREAMER_CAMERA)
|
||||||
QGstreamerV4L2Input *m_videoInput;
|
QGstreamerV4L2Input *m_videoInput;
|
||||||
|
#endif
|
||||||
QGstreamerCaptureMetaDataControl *m_metaDataControl;
|
QGstreamerCaptureMetaDataControl *m_metaDataControl;
|
||||||
|
|
||||||
QAudioInputSelectorControl *m_audioInputSelector;
|
QAudioInputSelectorControl *m_audioInputSelector;
|
||||||
|
|||||||
@@ -70,7 +70,8 @@ QOpenSLESAudioOutput::QOpenSLESAudioOutput(const QByteArray &device)
|
|||||||
m_periodSize(0),
|
m_periodSize(0),
|
||||||
m_elapsedTime(0),
|
m_elapsedTime(0),
|
||||||
m_processedBytes(0),
|
m_processedBytes(0),
|
||||||
m_availableBuffers(BUFFER_COUNT)
|
m_availableBuffers(BUFFER_COUNT),
|
||||||
|
m_eventMask(SL_PLAYEVENT_HEADATEND)
|
||||||
{
|
{
|
||||||
#ifndef ANDROID
|
#ifndef ANDROID
|
||||||
m_streamType = -1;
|
m_streamType = -1;
|
||||||
@@ -190,7 +191,33 @@ int QOpenSLESAudioOutput::bufferSize() const
|
|||||||
|
|
||||||
void QOpenSLESAudioOutput::setNotifyInterval(int ms)
|
void QOpenSLESAudioOutput::setNotifyInterval(int ms)
|
||||||
{
|
{
|
||||||
m_notifyInterval = ms > 0 ? ms : 0;
|
const int newInterval = ms > 0 ? ms : 0;
|
||||||
|
|
||||||
|
if (newInterval == m_notifyInterval)
|
||||||
|
return;
|
||||||
|
|
||||||
|
const SLuint32 newEvenMask = newInterval == 0 ? m_eventMask & ~SL_PLAYEVENT_HEADATNEWPOS
|
||||||
|
: m_eventMask & SL_PLAYEVENT_HEADATNEWPOS;
|
||||||
|
|
||||||
|
if (m_state == QAudio::StoppedState) {
|
||||||
|
m_eventMask = newEvenMask;
|
||||||
|
m_notifyInterval = newInterval;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (newEvenMask != m_eventMask
|
||||||
|
&& SL_RESULT_SUCCESS != (*m_playItf)->SetCallbackEventsMask(m_playItf, newEvenMask)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
m_eventMask = newEvenMask;
|
||||||
|
|
||||||
|
if (newInterval && SL_RESULT_SUCCESS != (*m_playItf)->SetPositionUpdatePeriod(m_playItf,
|
||||||
|
newInterval)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
m_notifyInterval = newInterval;
|
||||||
}
|
}
|
||||||
|
|
||||||
int QOpenSLESAudioOutput::notifyInterval() const
|
int QOpenSLESAudioOutput::notifyInterval() const
|
||||||
@@ -480,13 +507,12 @@ bool QOpenSLESAudioOutput::preparePlayer()
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
SLuint32 mask = SL_PLAYEVENT_HEADATEND;
|
|
||||||
if (m_notifyInterval && SL_RESULT_SUCCESS == (*m_playItf)->SetPositionUpdatePeriod(m_playItf,
|
if (m_notifyInterval && SL_RESULT_SUCCESS == (*m_playItf)->SetPositionUpdatePeriod(m_playItf,
|
||||||
m_notifyInterval)) {
|
m_notifyInterval)) {
|
||||||
mask |= SL_PLAYEVENT_HEADATNEWPOS;
|
m_eventMask |= SL_PLAYEVENT_HEADATNEWPOS;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (SL_RESULT_SUCCESS != (*m_playItf)->SetCallbackEventsMask(m_playItf, mask)) {
|
if (SL_RESULT_SUCCESS != (*m_playItf)->SetCallbackEventsMask(m_playItf, m_eventMask)) {
|
||||||
setError(QAudio::FatalError);
|
setError(QAudio::FatalError);
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -112,6 +112,7 @@ private:
|
|||||||
qint64 m_elapsedTime;
|
qint64 m_elapsedTime;
|
||||||
qint64 m_processedBytes;
|
qint64 m_processedBytes;
|
||||||
QAtomicInt m_availableBuffers;
|
QAtomicInt m_availableBuffers;
|
||||||
|
SLuint32 m_eventMask;
|
||||||
|
|
||||||
qint32 m_streamType;
|
qint32 m_streamType;
|
||||||
QTime m_clockStamp;
|
QTime m_clockStamp;
|
||||||
|
|||||||
@@ -44,7 +44,9 @@ unix:!mac:!android {
|
|||||||
config_alsa: SUBDIRS += alsa
|
config_alsa: SUBDIRS += alsa
|
||||||
|
|
||||||
# v4l is turned off because it is not supported in Qt 5
|
# v4l is turned off because it is not supported in Qt 5
|
||||||
# !maemo*:SUBDIRS += v4l
|
# config_linux_v4l {
|
||||||
|
# !maemo*:SUBDIRS += v4l
|
||||||
|
# }
|
||||||
}
|
}
|
||||||
|
|
||||||
mac:!simulator {
|
mac:!simulator {
|
||||||
|
|||||||
@@ -404,6 +404,7 @@ QList<QByteArray> QWindowsAudioDeviceInfo::availableDevices(QAudio::Mode mode)
|
|||||||
Q_UNUSED(mode)
|
Q_UNUSED(mode)
|
||||||
|
|
||||||
QList<QByteArray> devices;
|
QList<QByteArray> devices;
|
||||||
|
#ifndef Q_OS_WINCE
|
||||||
//enumerate device fullnames through directshow api
|
//enumerate device fullnames through directshow api
|
||||||
CoInitialize(NULL);
|
CoInitialize(NULL);
|
||||||
ICreateDevEnum *pDevEnum = NULL;
|
ICreateDevEnum *pDevEnum = NULL;
|
||||||
@@ -455,6 +456,35 @@ QList<QByteArray> QWindowsAudioDeviceInfo::availableDevices(QAudio::Mode mode)
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
CoUninitialize();
|
CoUninitialize();
|
||||||
|
#else // Q_OS_WINCE
|
||||||
|
if (mode == QAudio::AudioOutput) {
|
||||||
|
WAVEOUTCAPS woc;
|
||||||
|
unsigned long iNumDevs,i;
|
||||||
|
iNumDevs = waveOutGetNumDevs();
|
||||||
|
for (i=0;i<iNumDevs;i++) {
|
||||||
|
if (waveOutGetDevCaps(i, &woc, sizeof(WAVEOUTCAPS))
|
||||||
|
== MMSYSERR_NOERROR) {
|
||||||
|
QByteArray device;
|
||||||
|
QDataStream ds(&device, QIODevice::WriteOnly);
|
||||||
|
ds << quint32(i) << QString::fromWCharArray(woc.szPname);
|
||||||
|
devices.append(device);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
WAVEINCAPS woc;
|
||||||
|
unsigned long iNumDevs,i;
|
||||||
|
iNumDevs = waveInGetNumDevs();
|
||||||
|
for (i=0;i<iNumDevs;i++) {
|
||||||
|
if (waveInGetDevCaps(i, &woc, sizeof(WAVEINCAPS))
|
||||||
|
== MMSYSERR_NOERROR) {
|
||||||
|
QByteArray device;
|
||||||
|
QDataStream ds(&device, QIODevice::WriteOnly);
|
||||||
|
ds << quint32(i) << QString::fromWCharArray(woc.szPname);
|
||||||
|
devices.append(device);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
#endif // !Q_OS_WINCE
|
||||||
|
|
||||||
return devices;
|
return devices;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -5,7 +5,8 @@ PLUGIN_TYPE = audio
|
|||||||
PLUGIN_CLASS_NAME = QWindowsAudioPlugin
|
PLUGIN_CLASS_NAME = QWindowsAudioPlugin
|
||||||
load(qt_plugin)
|
load(qt_plugin)
|
||||||
|
|
||||||
LIBS += -lwinmm -lstrmiids -lole32 -loleaut32
|
LIBS += -lstrmiids -lole32 -loleaut32
|
||||||
|
!wince*:LIBS += -lwinmm
|
||||||
|
|
||||||
HEADERS += \
|
HEADERS += \
|
||||||
qwindowsaudioplugin.h \
|
qwindowsaudioplugin.h \
|
||||||
|
|||||||
@@ -1395,14 +1395,17 @@ int MFPlayerSession::bufferStatus()
|
|||||||
if (!m_netsourceStatistics)
|
if (!m_netsourceStatistics)
|
||||||
return 0;
|
return 0;
|
||||||
PROPVARIANT var;
|
PROPVARIANT var;
|
||||||
|
PropVariantInit(&var);
|
||||||
PROPERTYKEY key;
|
PROPERTYKEY key;
|
||||||
key.fmtid = MFNETSOURCE_STATISTICS;
|
key.fmtid = MFNETSOURCE_STATISTICS;
|
||||||
key.pid = MFNETSOURCE_BUFFERPROGRESS_ID;
|
key.pid = MFNETSOURCE_BUFFERPROGRESS_ID;
|
||||||
int progress = -1;
|
int progress = -1;
|
||||||
if (SUCCEEDED(m_netsourceStatistics->GetValue(key, &var))) {
|
// GetValue returns S_FALSE if the property is not available, which has
|
||||||
|
// a value > 0. We therefore can't use the SUCCEEDED macro here.
|
||||||
|
if (m_netsourceStatistics->GetValue(key, &var) == S_OK) {
|
||||||
progress = var.lVal;
|
progress = var.lVal;
|
||||||
|
PropVariantClear(&var);
|
||||||
}
|
}
|
||||||
PropVariantClear(&var);
|
|
||||||
|
|
||||||
#ifdef DEBUG_MEDIAFOUNDATION
|
#ifdef DEBUG_MEDIAFOUNDATION
|
||||||
qDebug() << "bufferStatus: progress = " << progress;
|
qDebug() << "bufferStatus: progress = " << progress;
|
||||||
@@ -1413,22 +1416,30 @@ int MFPlayerSession::bufferStatus()
|
|||||||
|
|
||||||
QMediaTimeRange MFPlayerSession::availablePlaybackRanges()
|
QMediaTimeRange MFPlayerSession::availablePlaybackRanges()
|
||||||
{
|
{
|
||||||
if (!m_netsourceStatistics)
|
// defaults to the whole media
|
||||||
return QMediaTimeRange();
|
qint64 start = 0;
|
||||||
|
qint64 end = qint64(m_duration / 10000);
|
||||||
|
|
||||||
qint64 start = 0, end = 0;
|
if (m_netsourceStatistics) {
|
||||||
PROPVARIANT var;
|
PROPVARIANT var;
|
||||||
PROPERTYKEY key;
|
PropVariantInit(&var);
|
||||||
key.fmtid = MFNETSOURCE_STATISTICS;
|
PROPERTYKEY key;
|
||||||
key.pid = MFNETSOURCE_SEEKRANGESTART_ID;
|
key.fmtid = MFNETSOURCE_STATISTICS;
|
||||||
if (SUCCEEDED(m_netsourceStatistics->GetValue(key, &var))) {
|
key.pid = MFNETSOURCE_SEEKRANGESTART_ID;
|
||||||
start = qint64(var.uhVal.QuadPart / 10000);
|
// GetValue returns S_FALSE if the property is not available, which has
|
||||||
key.pid = MFNETSOURCE_SEEKRANGEEND_ID;
|
// a value > 0. We therefore can't use the SUCCEEDED macro here.
|
||||||
if (SUCCEEDED(m_netsourceStatistics->GetValue(key, &var))) {
|
if (m_netsourceStatistics->GetValue(key, &var) == S_OK) {
|
||||||
end = qint64(var.uhVal.QuadPart / 10000);
|
start = qint64(var.uhVal.QuadPart / 10000);
|
||||||
|
PropVariantClear(&var);
|
||||||
|
PropVariantInit(&var);
|
||||||
|
key.pid = MFNETSOURCE_SEEKRANGEEND_ID;
|
||||||
|
if (m_netsourceStatistics->GetValue(key, &var) == S_OK) {
|
||||||
|
end = qint64(var.uhVal.QuadPart / 10000);
|
||||||
|
PropVariantClear(&var);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
PropVariantClear(&var);
|
|
||||||
return QMediaTimeRange(start, end);
|
return QMediaTimeRange(start, end);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -212,7 +212,7 @@ public:
|
|||||||
stride /= 4;
|
stride /= 4;
|
||||||
}
|
}
|
||||||
|
|
||||||
m_width = qreal(m_frame.width() / stride);
|
m_width = qreal(m_frame.width()) / stride;
|
||||||
textureSize.setWidth(stride);
|
textureSize.setWidth(stride);
|
||||||
|
|
||||||
if (m_textureSize != textureSize) {
|
if (m_textureSize != textureSize) {
|
||||||
|
|||||||
Reference in New Issue
Block a user