Merge remote-tracking branch 'origin/5.5' into dev

Change-Id: I17ddd1ce77e07cf75567e2dc723e29d54088f68e
This commit is contained in:
Liang Qi
2015-04-08 21:24:43 +02:00
55 changed files with 814 additions and 421 deletions

View File

@@ -135,8 +135,8 @@ void SpectrumAnalyserThread::calculateSpectrum(const QByteArray &buffer,
if (i>0 && i<m_numSamples/2) if (i>0 && i<m_numSamples/2)
imag = m_output[m_numSamples/2 + i]; imag = m_output[m_numSamples/2 + i];
const qreal magnitude = sqrt(real*real + imag*imag); const qreal magnitude = qSqrt(real*real + imag*imag);
qreal amplitude = SpectrumAnalyserMultiplier * log(magnitude); qreal amplitude = SpectrumAnalyserMultiplier * qLn(magnitude);
// Bound amplitude to [0.0, 1.0] // Bound amplitude to [0.0, 1.0]
m_spectrum[i].clipped = (amplitude > 1.0); m_spectrum[i].clipped = (amplitude > 1.0);

View File

@@ -62,8 +62,9 @@ struct PtrWrapper
template <typename T> template <typename T>
inline QDebug& operator<<(QDebug &debug, const Trace::PtrWrapper<T> &wrapper) inline QDebug& operator<<(QDebug &debug, const Trace::PtrWrapper<T> &wrapper)
{ {
debug.nospace() << "[" << (void*)wrapper.m_ptr << "]"; QDebugStateSaver saver(debug);
return debug.space(); debug.nospace() << '[' << static_cast<const void *>(wrapper.m_ptr) << ']';
return debug;
} }
template<typename T> template<typename T>

View File

@@ -62,8 +62,9 @@ struct PtrWrapper
template <typename T> template <typename T>
inline QDebug &operator<<(QDebug &debug, const Trace::PtrWrapper<T> &wrapper) inline QDebug &operator<<(QDebug &debug, const Trace::PtrWrapper<T> &wrapper)
{ {
debug.nospace() << "[" << (void*)wrapper.m_ptr << "]"; QDebugStateSaver saver(debug);
return debug.space(); debug.nospace() << '[' << static_cast<const void *>(wrapper.m_ptr) << ']';
return debug;
} }
#ifdef ENABLE_TRACE #ifdef ENABLE_TRACE

View File

@@ -34,7 +34,6 @@
#include <QDebug> #include <QDebug>
#include "qgstappsrc_p.h" #include "qgstappsrc_p.h"
#include <QtNetwork>
QGstAppSrc::QGstAppSrc(QObject *parent) QGstAppSrc::QGstAppSrc(QObject *parent)
:QObject(parent) :QObject(parent)

View File

@@ -201,6 +201,14 @@ void QVideoSurfaceGstDelegate::stop()
waitForAsyncEvent(&locker, &m_setupCondition, 500); waitForAsyncEvent(&locker, &m_setupCondition, 500);
} }
void QVideoSurfaceGstDelegate::unlock()
{
QMutexLocker locker(&m_mutex);
m_setupCondition.wakeAll();
m_renderCondition.wakeAll();
}
bool QVideoSurfaceGstDelegate::proposeAllocation(GstQuery *query) bool QVideoSurfaceGstDelegate::proposeAllocation(GstQuery *query)
{ {
QMutexLocker locker(&m_mutex); QMutexLocker locker(&m_mutex);
@@ -218,6 +226,7 @@ GstFlowReturn QVideoSurfaceGstDelegate::render(GstBuffer *buffer)
{ {
QMutexLocker locker(&m_mutex); QMutexLocker locker(&m_mutex);
m_renderReturn = GST_FLOW_OK;
m_renderBuffer = buffer; m_renderBuffer = buffer;
GstFlowReturn flowReturn = waitForAsyncEvent(&locker, &m_renderCondition, 300) GstFlowReturn flowReturn = waitForAsyncEvent(&locker, &m_renderCondition, 300)
@@ -291,8 +300,11 @@ bool QVideoSurfaceGstDelegate::handleEvent(QMutexLocker *locker)
gst_caps_unref(startCaps); gst_caps_unref(startCaps);
} else if (m_renderBuffer) { } else if (m_renderBuffer) {
GstBuffer *buffer = m_renderBuffer;
m_renderBuffer = 0;
m_renderReturn = GST_FLOW_ERROR;
if (m_activeRenderer && m_surface) { if (m_activeRenderer && m_surface) {
GstBuffer *buffer = m_renderBuffer;
gst_buffer_ref(buffer); gst_buffer_ref(buffer);
locker->unlock(); locker->unlock();
@@ -303,15 +315,11 @@ bool QVideoSurfaceGstDelegate::handleEvent(QMutexLocker *locker)
locker->relock(); locker->relock();
m_renderReturn = rendered if (rendered)
? GST_FLOW_OK m_renderReturn = GST_FLOW_OK;
: GST_FLOW_ERROR;
m_renderCondition.wakeAll();
} else {
m_renderReturn = GST_FLOW_ERROR;
m_renderCondition.wakeAll();
} }
m_renderCondition.wakeAll();
} else { } else {
m_setupCondition.wakeAll(); m_setupCondition.wakeAll();
@@ -415,12 +423,15 @@ void QGstVideoRendererSink::class_init(gpointer g_class, gpointer class_data)
sink_parent_class = reinterpret_cast<GstVideoSinkClass *>(g_type_class_peek_parent(g_class)); sink_parent_class = reinterpret_cast<GstVideoSinkClass *>(g_type_class_peek_parent(g_class));
GstVideoSinkClass *video_sink_class = reinterpret_cast<GstVideoSinkClass *>(g_class);
video_sink_class->show_frame = QGstVideoRendererSink::show_frame;
GstBaseSinkClass *base_sink_class = reinterpret_cast<GstBaseSinkClass *>(g_class); GstBaseSinkClass *base_sink_class = reinterpret_cast<GstBaseSinkClass *>(g_class);
base_sink_class->get_caps = QGstVideoRendererSink::get_caps; base_sink_class->get_caps = QGstVideoRendererSink::get_caps;
base_sink_class->set_caps = QGstVideoRendererSink::set_caps; base_sink_class->set_caps = QGstVideoRendererSink::set_caps;
base_sink_class->propose_allocation = QGstVideoRendererSink::propose_allocation; base_sink_class->propose_allocation = QGstVideoRendererSink::propose_allocation;
base_sink_class->stop = QGstVideoRendererSink::stop; base_sink_class->stop = QGstVideoRendererSink::stop;
base_sink_class->render = QGstVideoRendererSink::render; base_sink_class->unlock = QGstVideoRendererSink::unlock;
GstElementClass *element_class = reinterpret_cast<GstElementClass *>(g_class); GstElementClass *element_class = reinterpret_cast<GstElementClass *>(g_class);
element_class->change_state = QGstVideoRendererSink::change_state; element_class->change_state = QGstVideoRendererSink::change_state;
@@ -517,7 +528,14 @@ gboolean QGstVideoRendererSink::stop(GstBaseSink *base)
return TRUE; return TRUE;
} }
GstFlowReturn QGstVideoRendererSink::render(GstBaseSink *base, GstBuffer *buffer) gboolean QGstVideoRendererSink::unlock(GstBaseSink *base)
{
VO_SINK(base);
sink->delegate->unlock();
return TRUE;
}
GstFlowReturn QGstVideoRendererSink::show_frame(GstVideoSink *base, GstBuffer *buffer)
{ {
VO_SINK(base); VO_SINK(base);
return sink->delegate->render(buffer); return sink->delegate->render(buffer);

View File

@@ -162,6 +162,15 @@ void QVideoSurfaceGstDelegate::stop()
m_started = false; m_started = false;
} }
void QVideoSurfaceGstDelegate::unlock()
{
QMutexLocker locker(&m_mutex);
m_startCanceled = true;
m_setupCondition.wakeAll();
m_renderCondition.wakeAll();
}
bool QVideoSurfaceGstDelegate::isActive() bool QVideoSurfaceGstDelegate::isActive()
{ {
QMutexLocker locker(&m_mutex); QMutexLocker locker(&m_mutex);
@@ -218,8 +227,9 @@ GstFlowReturn QVideoSurfaceGstDelegate::render(GstBuffer *buffer)
void QVideoSurfaceGstDelegate::queuedStart() void QVideoSurfaceGstDelegate::queuedStart()
{ {
QMutexLocker locker(&m_mutex);
if (!m_startCanceled) { if (!m_startCanceled) {
QMutexLocker locker(&m_mutex);
m_started = m_surface->start(m_format); m_started = m_surface->start(m_format);
m_setupCondition.wakeAll(); m_setupCondition.wakeAll();
} }
@@ -238,6 +248,9 @@ void QVideoSurfaceGstDelegate::queuedRender()
{ {
QMutexLocker locker(&m_mutex); QMutexLocker locker(&m_mutex);
if (!m_frame.isValid())
return;
if (m_surface.isNull()) { if (m_surface.isNull()) {
qWarning() << "Rendering video frame to deleted surface, skip the frame"; qWarning() << "Rendering video frame to deleted surface, skip the frame";
m_renderReturn = GST_FLOW_OK; m_renderReturn = GST_FLOW_OK;
@@ -347,6 +360,7 @@ void QVideoSurfaceGstSink::class_init(gpointer g_class, gpointer class_data)
base_sink_class->buffer_alloc = QVideoSurfaceGstSink::buffer_alloc; base_sink_class->buffer_alloc = QVideoSurfaceGstSink::buffer_alloc;
base_sink_class->start = QVideoSurfaceGstSink::start; base_sink_class->start = QVideoSurfaceGstSink::start;
base_sink_class->stop = QVideoSurfaceGstSink::stop; base_sink_class->stop = QVideoSurfaceGstSink::stop;
base_sink_class->unlock = QVideoSurfaceGstSink::unlock;
GstElementClass *element_class = reinterpret_cast<GstElementClass *>(g_class); GstElementClass *element_class = reinterpret_cast<GstElementClass *>(g_class);
element_class->change_state = QVideoSurfaceGstSink::change_state; element_class->change_state = QVideoSurfaceGstSink::change_state;
@@ -601,6 +615,13 @@ gboolean QVideoSurfaceGstSink::stop(GstBaseSink *base)
return TRUE; return TRUE;
} }
gboolean QVideoSurfaceGstSink::unlock(GstBaseSink *base)
{
VO_SINK(base);
sink->delegate->unlock();
return TRUE;
}
GstFlowReturn QVideoSurfaceGstSink::show_frame(GstVideoSink *base, GstBuffer *buffer) GstFlowReturn QVideoSurfaceGstSink::show_frame(GstVideoSink *base, GstBuffer *buffer)
{ {
VO_SINK(base); VO_SINK(base);

View File

@@ -373,6 +373,7 @@ void QDeclarativeAudioEngine::appendFunction(QQmlListProperty<QObject> *property
if (category->name() == QLatin1String("default")) { if (category->name() == QLatin1String("default")) {
engine->m_defaultCategory = category; engine->m_defaultCategory = category;
} }
return;
} }
QDeclarativeAttenuationModel *attenModel = qobject_cast<QDeclarativeAttenuationModel*>(value); QDeclarativeAttenuationModel *attenModel = qobject_cast<QDeclarativeAttenuationModel*>(value);

View File

@@ -86,59 +86,62 @@ Q_CONSTRUCTOR_FUNCTION(qRegisterAudioMetaTypes)
#ifndef QT_NO_DEBUG_STREAM #ifndef QT_NO_DEBUG_STREAM
QDebug operator<<(QDebug dbg, QAudio::Error error) QDebug operator<<(QDebug dbg, QAudio::Error error)
{ {
QDebug nospace = dbg.nospace(); QDebugStateSaver saver(dbg);
dbg.nospace();
switch (error) { switch (error) {
case QAudio::NoError: case QAudio::NoError:
nospace << "NoError"; dbg << "NoError";
break; break;
case QAudio::OpenError: case QAudio::OpenError:
nospace << "OpenError"; dbg << "OpenError";
break; break;
case QAudio::IOError: case QAudio::IOError:
nospace << "IOError"; dbg << "IOError";
break; break;
case QAudio::UnderrunError: case QAudio::UnderrunError:
nospace << "UnderrunError"; dbg << "UnderrunError";
break; break;
case QAudio::FatalError: case QAudio::FatalError:
nospace << "FatalError"; dbg << "FatalError";
break; break;
} }
return nospace; return dbg;
} }
QDebug operator<<(QDebug dbg, QAudio::State state) QDebug operator<<(QDebug dbg, QAudio::State state)
{ {
QDebug nospace = dbg.nospace(); QDebugStateSaver saver(dbg);
dbg.nospace();
switch (state) { switch (state) {
case QAudio::ActiveState: case QAudio::ActiveState:
nospace << "ActiveState"; dbg << "ActiveState";
break; break;
case QAudio::SuspendedState: case QAudio::SuspendedState:
nospace << "SuspendedState"; dbg << "SuspendedState";
break; break;
case QAudio::StoppedState: case QAudio::StoppedState:
nospace << "StoppedState"; dbg << "StoppedState";
break; break;
case QAudio::IdleState: case QAudio::IdleState:
nospace << "IdleState"; dbg << "IdleState";
break; break;
} }
return nospace; return dbg;
} }
QDebug operator<<(QDebug dbg, QAudio::Mode mode) QDebug operator<<(QDebug dbg, QAudio::Mode mode)
{ {
QDebug nospace = dbg.nospace(); QDebugStateSaver saver(dbg);
dbg.nospace();
switch (mode) { switch (mode) {
case QAudio::AudioInput: case QAudio::AudioInput:
nospace << "AudioInput"; dbg << "AudioInput";
break; break;
case QAudio::AudioOutput: case QAudio::AudioOutput:
nospace << "AudioOutput"; dbg << "AudioOutput";
break; break;
} }
return nospace; return dbg;
} }
#endif #endif

View File

@@ -459,49 +459,50 @@ int QAudioFormat::bytesPerFrame() const
#ifndef QT_NO_DEBUG_STREAM #ifndef QT_NO_DEBUG_STREAM
QDebug operator<<(QDebug dbg, QAudioFormat::Endian endian) QDebug operator<<(QDebug dbg, QAudioFormat::Endian endian)
{ {
QDebug nospace = dbg.nospace(); QDebugStateSaver saver(dbg);
dbg.nospace();
switch (endian) { switch (endian) {
case QAudioFormat::BigEndian: case QAudioFormat::BigEndian:
nospace << "BigEndian"; dbg << "BigEndian";
break; break;
case QAudioFormat::LittleEndian: case QAudioFormat::LittleEndian:
nospace << "LittleEndian"; dbg << "LittleEndian";
break; break;
} }
return nospace; return dbg;
} }
QDebug operator<<(QDebug dbg, QAudioFormat::SampleType type) QDebug operator<<(QDebug dbg, QAudioFormat::SampleType type)
{ {
QDebug nospace = dbg.nospace(); QDebugStateSaver saver(dbg);
dbg.nospace();
switch (type) { switch (type) {
case QAudioFormat::SignedInt: case QAudioFormat::SignedInt:
nospace << "SignedInt"; dbg << "SignedInt";
break; break;
case QAudioFormat::UnSignedInt: case QAudioFormat::UnSignedInt:
nospace << "UnSignedInt"; dbg << "UnSignedInt";
break; break;
case QAudioFormat::Float: case QAudioFormat::Float:
nospace << "Float"; dbg << "Float";
break; break;
default: default:
nospace << "Unknown"; dbg << "Unknown";
break; break;
} }
return nospace; return dbg;
} }
QDebug operator<<(QDebug dbg, const QAudioFormat &f) QDebug operator<<(QDebug dbg, const QAudioFormat &f)
{ {
dbg.nospace() << "QAudioFormat(" << f.sampleRate(); QDebugStateSaver saver(dbg);
dbg.nospace() << "Hz, " << f.sampleSize(); dbg.nospace();
dbg.nospace() << "bit, channelCount=" << f.channelCount(); dbg << "QAudioFormat(" << f.sampleRate() << "Hz, "
dbg.nospace() << ", sampleType=" << f.sampleType(); << f.sampleSize() << "bit, channelCount=" << f.channelCount()
dbg.nospace() << ", byteOrder=" << f.byteOrder(); << ", sampleType=" << f.sampleType() << ", byteOrder=" << f.byteOrder()
dbg.nospace() << ", codec=" << f.codec(); << ", codec=" << f.codec() << ')';
dbg.nospace() << ")";
return dbg.space(); return dbg;
} }
#endif #endif

View File

@@ -33,8 +33,12 @@
#include "qsamplecache_p.h" #include "qsamplecache_p.h"
#include "qwavedecoder_p.h" #include "qwavedecoder_p.h"
#include <QtNetwork>
#include <QtNetwork/QNetworkAccessManager>
#include <QtNetwork/QNetworkReply>
#include <QtNetwork/QNetworkRequest>
#include <QtCore/QDebug>
//#define QT_SAMPLECACHE_DEBUG //#define QT_SAMPLECACHE_DEBUG
QT_BEGIN_NAMESPACE QT_BEGIN_NAMESPACE

View File

@@ -44,8 +44,8 @@
#include <QtCore/qcoreapplication.h> #include <QtCore/qcoreapplication.h>
#include <qaudioformat.h> #include <qaudioformat.h>
#include <QtNetwork>
#include <QTime> #include <QTime>
#include <QTimer>
#include "qsoundeffect_pulse_p.h" #include "qsoundeffect_pulse_p.h"

View File

@@ -315,6 +315,11 @@ QMediaPlayerControl::QMediaPlayerControl(QObject *parent):
Setting the media to a null QMediaContent will cause the control to discard all Setting the media to a null QMediaContent will cause the control to discard all
information relating to the current media source and to cease all I/O operations related information relating to the current media source and to cease all I/O operations related
to that media. to that media.
Qt resource files are never passed as is. If the service supports
QMediaServiceProviderHint::StreamPlayback, a \a stream is supplied, pointing to an opened
QFile. Otherwise, the resource is copied into a temporary file and \a media contains the
url to that file.
*/ */
/*! /*!

View File

@@ -96,6 +96,7 @@ public:
bool start(GstCaps *caps); bool start(GstCaps *caps);
void stop(); void stop();
void unlock();
bool proposeAllocation(GstQuery *query); bool proposeAllocation(GstQuery *query);
GstFlowReturn render(GstBuffer *buffer); GstFlowReturn render(GstBuffer *buffer);
@@ -153,7 +154,9 @@ private:
static gboolean stop(GstBaseSink *sink); static gboolean stop(GstBaseSink *sink);
static GstFlowReturn render(GstBaseSink *sink, GstBuffer *buffer); static gboolean unlock(GstBaseSink *sink);
static GstFlowReturn show_frame(GstVideoSink *sink, GstBuffer *buffer);
private: private:
QVideoSurfaceGstDelegate *delegate; QVideoSurfaceGstDelegate *delegate;

View File

@@ -88,6 +88,8 @@ public:
bool start(const QVideoSurfaceFormat &format, int bytesPerLine); bool start(const QVideoSurfaceFormat &format, int bytesPerLine);
void stop(); void stop();
void unlock();
bool isActive(); bool isActive();
QGstBufferPoolInterface *pool() { return m_pool; } QGstBufferPoolInterface *pool() { return m_pool; }
@@ -148,6 +150,8 @@ private:
static gboolean start(GstBaseSink *sink); static gboolean start(GstBaseSink *sink);
static gboolean stop(GstBaseSink *sink); static gboolean stop(GstBaseSink *sink);
static gboolean unlock(GstBaseSink *sink);
static GstFlowReturn show_frame(GstVideoSink *sink, GstBuffer *buffer); static GstFlowReturn show_frame(GstVideoSink *sink, GstBuffer *buffer);
private: private:

View File

@@ -33,7 +33,9 @@
#include "playlistfileparser_p.h" #include "playlistfileparser_p.h"
#include <qfileinfo.h> #include <qfileinfo.h>
#include <QtCore/QDebug>
#include <QtNetwork/QNetworkReply> #include <QtNetwork/QNetworkReply>
#include <QtNetwork/QNetworkRequest>
#include "qmediaobject_p.h" #include "qmediaobject_p.h"
#include "qmediametadata.h" #include "qmediametadata.h"

View File

@@ -45,8 +45,8 @@
// We mean it. // We mean it.
// //
#include <QtNetwork>
#include "qtmultimediadefs.h" #include "qtmultimediadefs.h"
#include <QtNetwork/QNetworkRequest>
QT_BEGIN_NAMESPACE QT_BEGIN_NAMESPACE

View File

@@ -48,6 +48,8 @@
#include <QtCore/qtimer.h> #include <QtCore/qtimer.h>
#include <QtCore/qdebug.h> #include <QtCore/qdebug.h>
#include <QtCore/qpointer.h> #include <QtCore/qpointer.h>
#include <QtCore/qfileinfo.h>
#include <QtCore/qtemporaryfile.h>
QT_BEGIN_NAMESPACE QT_BEGIN_NAMESPACE
@@ -103,22 +105,30 @@ public:
: provider(0) : provider(0)
, control(0) , control(0)
, state(QMediaPlayer::StoppedState) , state(QMediaPlayer::StoppedState)
, status(QMediaPlayer::UnknownMediaStatus)
, error(QMediaPlayer::NoError) , error(QMediaPlayer::NoError)
, ignoreNextStatusChange(-1)
, playlist(0) , playlist(0)
, networkAccessControl(0) , networkAccessControl(0)
, hasStreamPlaybackFeature(false)
, nestedPlaylists(0) , nestedPlaylists(0)
{} {}
QMediaServiceProvider *provider; QMediaServiceProvider *provider;
QMediaPlayerControl* control; QMediaPlayerControl* control;
QMediaPlayer::State state; QMediaPlayer::State state;
QMediaPlayer::MediaStatus status;
QMediaPlayer::Error error; QMediaPlayer::Error error;
QString errorString; QString errorString;
int ignoreNextStatusChange;
QPointer<QObject> videoOutput; QPointer<QObject> videoOutput;
QMediaPlaylist *playlist; QMediaPlaylist *playlist;
QMediaNetworkAccessControl *networkAccessControl; QMediaNetworkAccessControl *networkAccessControl;
QVideoSurfaceOutput surfaceOutput; QVideoSurfaceOutput surfaceOutput;
bool hasStreamPlaybackFeature;
QMediaContent qrcMedia;
QScopedPointer<QFile> qrcFile;
QMediaContent rootMedia; QMediaContent rootMedia;
QMediaContent pendingPlaylist; QMediaContent pendingPlaylist;
@@ -126,6 +136,8 @@ public:
bool isInChain(QUrl url); bool isInChain(QUrl url);
int nestedPlaylists; int nestedPlaylists;
void setMedia(const QMediaContent &media, QIODevice *stream = 0);
void setPlaylist(QMediaPlaylist *playlist); void setPlaylist(QMediaPlaylist *playlist);
void setPlaylistMedia(); void setPlaylistMedia();
void loadPlaylist(); void loadPlaylist();
@@ -137,6 +149,7 @@ public:
void _q_error(int error, const QString &errorString); void _q_error(int error, const QString &errorString);
void _q_updateMedia(const QMediaContent&); void _q_updateMedia(const QMediaContent&);
void _q_playlistDestroyed(); void _q_playlistDestroyed();
void _q_handleMediaChanged(const QMediaContent&);
void _q_handlePlaylistLoaded(); void _q_handlePlaylistLoaded();
void _q_handlePlaylistLoadFailed(); void _q_handlePlaylistLoadFailed();
}; };
@@ -196,22 +209,30 @@ void QMediaPlayerPrivate::_q_stateChanged(QMediaPlayer::State ps)
} }
} }
void QMediaPlayerPrivate::_q_mediaStatusChanged(QMediaPlayer::MediaStatus status) void QMediaPlayerPrivate::_q_mediaStatusChanged(QMediaPlayer::MediaStatus s)
{ {
Q_Q(QMediaPlayer); Q_Q(QMediaPlayer);
switch (status) { if (int(s) == ignoreNextStatusChange) {
case QMediaPlayer::StalledMedia: ignoreNextStatusChange = -1;
case QMediaPlayer::BufferingMedia: return;
q->addPropertyWatch("bufferStatus");
emit q->mediaStatusChanged(status);
break;
default:
q->removePropertyWatch("bufferStatus");
emit q->mediaStatusChanged(status);
break;
} }
if (s != status) {
status = s;
switch (s) {
case QMediaPlayer::StalledMedia:
case QMediaPlayer::BufferingMedia:
q->addPropertyWatch("bufferStatus");
break;
default:
q->removePropertyWatch("bufferStatus");
break;
}
emit q->mediaStatusChanged(s);
}
} }
void QMediaPlayerPrivate::_q_error(int error, const QString &errorString) void QMediaPlayerPrivate::_q_error(int error, const QString &errorString)
@@ -276,7 +297,7 @@ void QMediaPlayerPrivate::_q_updateMedia(const QMediaContent &media)
const QMediaPlayer::State currentState = state; const QMediaPlayer::State currentState = state;
control->setMedia(media, 0); setMedia(media, 0);
if (!media.isNull()) { if (!media.isNull()) {
switch (currentState) { switch (currentState) {
@@ -297,11 +318,76 @@ void QMediaPlayerPrivate::_q_updateMedia(const QMediaContent &media)
void QMediaPlayerPrivate::_q_playlistDestroyed() void QMediaPlayerPrivate::_q_playlistDestroyed()
{ {
playlist = 0; playlist = 0;
setMedia(QMediaContent(), 0);
}
void QMediaPlayerPrivate::setMedia(const QMediaContent &media, QIODevice *stream)
{
Q_Q(QMediaPlayer);
if (!control) if (!control)
return; return;
control->setMedia(QMediaContent(), 0); QScopedPointer<QFile> file;
// Backends can't play qrc files directly.
// If the backend supports StreamPlayback, we pass a QFile for that resource.
// If it doesn't, we copy the data to a temporary file and pass its path.
if (!media.isNull() && !stream && media.canonicalUrl().scheme() == QLatin1String("qrc")) {
qrcMedia = media;
file.reset(new QFile(QLatin1Char(':') + media.canonicalUrl().path()));
if (!file->open(QFile::ReadOnly)) {
QMetaObject::invokeMethod(q, "_q_error", Qt::QueuedConnection,
Q_ARG(int, QMediaPlayer::ResourceError),
Q_ARG(QString, QObject::tr("Attempting to play invalid Qt resource")));
QMetaObject::invokeMethod(q, "_q_mediaStatusChanged", Qt::QueuedConnection,
Q_ARG(QMediaPlayer::MediaStatus, QMediaPlayer::InvalidMedia));
file.reset();
// Ignore the next NoMedia status change, we just want to clear the current media
// on the backend side since we can't load the new one and we want to be in the
// InvalidMedia status.
ignoreNextStatusChange = QMediaPlayer::NoMedia;
control->setMedia(QMediaContent(), 0);
} else if (hasStreamPlaybackFeature) {
control->setMedia(media, file.data());
} else {
QTemporaryFile *tempFile = new QTemporaryFile;
// Preserve original file extension, some backends might not load the file if it doesn't
// have an extension.
const QString suffix = QFileInfo(*file).suffix();
if (!suffix.isEmpty())
tempFile->setFileTemplate(tempFile->fileTemplate() + QLatin1Char('.') + suffix);
// Copy the qrc data into the temporary file
tempFile->open();
char buffer[4096];
while (true) {
qint64 len = file->read(buffer, sizeof(buffer));
if (len < 1)
break;
tempFile->write(buffer, len);
}
tempFile->close();
file.reset(tempFile);
control->setMedia(QMediaContent(QUrl::fromLocalFile(file->fileName())), 0);
}
} else {
qrcMedia = QMediaContent();
control->setMedia(media, stream);
}
qrcFile.swap(file); // Cleans up any previous file
}
void QMediaPlayerPrivate::_q_handleMediaChanged(const QMediaContent &media)
{
Q_Q(QMediaPlayer);
emit q->currentMediaChanged(qrcMedia.isNull() ? media : qrcMedia);
} }
void QMediaPlayerPrivate::setPlaylist(QMediaPlaylist *pls) void QMediaPlayerPrivate::setPlaylist(QMediaPlaylist *pls)
@@ -333,7 +419,7 @@ void QMediaPlayerPrivate::setPlaylistMedia()
playlist->next(); playlist->next();
} }
return; return;
} else if (control != 0) { } else {
// If we've just switched to a new playlist, // If we've just switched to a new playlist,
// then last emitted currentMediaChanged was a playlist. // then last emitted currentMediaChanged was a playlist.
// Make sure we emit currentMediaChanged if new playlist has // Make sure we emit currentMediaChanged if new playlist has
@@ -344,14 +430,14 @@ void QMediaPlayerPrivate::setPlaylistMedia()
// test.wav -- processed by backend, // test.wav -- processed by backend,
// media is not changed, // media is not changed,
// frontend needs to emit currentMediaChanged // frontend needs to emit currentMediaChanged
bool isSameMedia = (control->media() == playlist->currentMedia()); bool isSameMedia = (q->currentMedia() == playlist->currentMedia());
control->setMedia(playlist->currentMedia(), 0); setMedia(playlist->currentMedia(), 0);
if (isSameMedia) { if (isSameMedia) {
emit q->currentMediaChanged(control->media()); emit q->currentMediaChanged(q->currentMedia());
} }
} }
} else { } else {
q->setMedia(QMediaContent(), 0); setMedia(QMediaContent(), 0);
} }
} }
@@ -441,7 +527,7 @@ void QMediaPlayerPrivate::_q_handlePlaylistLoadFailed()
if (playlist) if (playlist)
playlist->next(); playlist->next();
else else
control->setMedia(QMediaContent(), 0); setMedia(QMediaContent(), 0);
} }
static QMediaService *playerService(QMediaPlayer::Flags flags) static QMediaService *playerService(QMediaPlayer::Flags flags)
@@ -484,7 +570,7 @@ QMediaPlayer::QMediaPlayer(QObject *parent, QMediaPlayer::Flags flags):
d->control = qobject_cast<QMediaPlayerControl*>(d->service->requestControl(QMediaPlayerControl_iid)); d->control = qobject_cast<QMediaPlayerControl*>(d->service->requestControl(QMediaPlayerControl_iid));
d->networkAccessControl = qobject_cast<QMediaNetworkAccessControl*>(d->service->requestControl(QMediaNetworkAccessControl_iid)); d->networkAccessControl = qobject_cast<QMediaNetworkAccessControl*>(d->service->requestControl(QMediaNetworkAccessControl_iid));
if (d->control != 0) { if (d->control != 0) {
connect(d->control, SIGNAL(mediaChanged(QMediaContent)), SIGNAL(currentMediaChanged(QMediaContent))); connect(d->control, SIGNAL(mediaChanged(QMediaContent)), SLOT(_q_handleMediaChanged(QMediaContent)));
connect(d->control, SIGNAL(stateChanged(QMediaPlayer::State)), SLOT(_q_stateChanged(QMediaPlayer::State))); connect(d->control, SIGNAL(stateChanged(QMediaPlayer::State)), SLOT(_q_stateChanged(QMediaPlayer::State)));
connect(d->control, SIGNAL(mediaStatusChanged(QMediaPlayer::MediaStatus)), connect(d->control, SIGNAL(mediaStatusChanged(QMediaPlayer::MediaStatus)),
SLOT(_q_mediaStatusChanged(QMediaPlayer::MediaStatus))); SLOT(_q_mediaStatusChanged(QMediaPlayer::MediaStatus)));
@@ -500,11 +586,16 @@ QMediaPlayer::QMediaPlayer(QObject *parent, QMediaPlayer::Flags flags):
connect(d->control, SIGNAL(playbackRateChanged(qreal)), SIGNAL(playbackRateChanged(qreal))); connect(d->control, SIGNAL(playbackRateChanged(qreal)), SIGNAL(playbackRateChanged(qreal)));
connect(d->control, SIGNAL(bufferStatusChanged(int)), SIGNAL(bufferStatusChanged(int))); connect(d->control, SIGNAL(bufferStatusChanged(int)), SIGNAL(bufferStatusChanged(int)));
if (d->control->state() == PlayingState) d->state = d->control->state();
d->status = d->control->mediaStatus();
if (d->state == PlayingState)
addPropertyWatch("position"); addPropertyWatch("position");
if (d->control->mediaStatus() == StalledMedia || d->control->mediaStatus() == BufferingMedia) if (d->status == StalledMedia || d->status == BufferingMedia)
addPropertyWatch("bufferStatus"); addPropertyWatch("bufferStatus");
d->hasStreamPlaybackFeature = d->provider->supportedFeatures(d->service).testFlag(QMediaServiceProviderHint::StreamPlayback);
} }
if (d->networkAccessControl != 0) { if (d->networkAccessControl != 0) {
connect(d->networkAccessControl, SIGNAL(configurationChanged(QNetworkConfiguration)), connect(d->networkAccessControl, SIGNAL(configurationChanged(QNetworkConfiguration)),
@@ -549,7 +640,9 @@ const QIODevice *QMediaPlayer::mediaStream() const
{ {
Q_D(const QMediaPlayer); Q_D(const QMediaPlayer);
if (d->control != 0) // When playing a resource file, we might have passed a QFile to the backend. Hide it from
// the user.
if (d->control && d->qrcMedia.isNull())
return d->control->mediaStream(); return d->control->mediaStream();
return 0; return 0;
@@ -566,7 +659,12 @@ QMediaContent QMediaPlayer::currentMedia() const
{ {
Q_D(const QMediaPlayer); Q_D(const QMediaPlayer);
if (d->control != 0) // When playing a resource file, don't return the backend's current media, which
// can be a temporary file.
if (!d->qrcMedia.isNull())
return d->qrcMedia;
if (d->control)
return d->control->media(); return d->control->media();
return QMediaContent(); return QMediaContent();
@@ -600,12 +698,7 @@ QMediaPlayer::State QMediaPlayer::state() const
QMediaPlayer::MediaStatus QMediaPlayer::mediaStatus() const QMediaPlayer::MediaStatus QMediaPlayer::mediaStatus() const
{ {
Q_D(const QMediaPlayer); return d_func()->status;
if (d->control != 0)
return d->control->mediaStatus();
return QMediaPlayer::UnknownMediaStatus;
} }
qint64 QMediaPlayer::duration() const qint64 QMediaPlayer::duration() const
@@ -877,8 +970,8 @@ void QMediaPlayer::setMedia(const QMediaContent &media, QIODevice *stream)
// reset playlist to the 1st item // reset playlist to the 1st item
media.playlist()->setCurrentIndex(0); media.playlist()->setCurrentIndex(0);
d->setPlaylist(media.playlist()); d->setPlaylist(media.playlist());
} else if (d->control != 0) { } else {
d->control->setMedia(media, stream); d->setMedia(media, stream);
} }
} }

View File

@@ -202,6 +202,7 @@ private:
Q_PRIVATE_SLOT(d_func(), void _q_error(int, const QString &)) Q_PRIVATE_SLOT(d_func(), void _q_error(int, const QString &))
Q_PRIVATE_SLOT(d_func(), void _q_updateMedia(const QMediaContent&)) Q_PRIVATE_SLOT(d_func(), void _q_updateMedia(const QMediaContent&))
Q_PRIVATE_SLOT(d_func(), void _q_playlistDestroyed()) Q_PRIVATE_SLOT(d_func(), void _q_playlistDestroyed())
Q_PRIVATE_SLOT(d_func(), void _q_handleMediaChanged(const QMediaContent&))
Q_PRIVATE_SLOT(d_func(), void _q_handlePlaylistLoaded()) Q_PRIVATE_SLOT(d_func(), void _q_handlePlaylistLoaded())
Q_PRIVATE_SLOT(d_func(), void _q_handlePlaylistLoadFailed()) Q_PRIVATE_SLOT(d_func(), void _q_handlePlaylistLoadFailed())
}; };

View File

@@ -299,7 +299,14 @@ Q_GLOBAL_STATIC_WITH_ARGS(QMediaPluginLoader, loader,
class QPluginServiceProvider : public QMediaServiceProvider class QPluginServiceProvider : public QMediaServiceProvider
{ {
QMap<QMediaService*, QMediaServiceProviderPlugin*> pluginMap; struct MediaServiceData {
QByteArray type;
QMediaServiceProviderPlugin *plugin;
MediaServiceData() : plugin(0) { }
};
QMap<const QMediaService*, MediaServiceData> mediaServiceData;
public: public:
QMediaService* requestService(const QByteArray &type, const QMediaServiceProviderHint &hint) QMediaService* requestService(const QByteArray &type, const QMediaServiceProviderHint &hint)
@@ -416,8 +423,12 @@ public:
if (plugin != 0) { if (plugin != 0) {
QMediaService *service = plugin->create(key); QMediaService *service = plugin->create(key);
if (service != 0) if (service != 0) {
pluginMap.insert(service, plugin); MediaServiceData d;
d.type = type;
d.plugin = plugin;
mediaServiceData.insert(service, d);
}
return service; return service;
} }
@@ -430,13 +441,30 @@ public:
void releaseService(QMediaService *service) void releaseService(QMediaService *service)
{ {
if (service != 0) { if (service != 0) {
QMediaServiceProviderPlugin *plugin = pluginMap.take(service); MediaServiceData d = mediaServiceData.take(service);
if (plugin != 0) if (d.plugin != 0)
plugin->release(service); d.plugin->release(service);
} }
} }
QMediaServiceProviderHint::Features supportedFeatures(const QMediaService *service) const
{
if (service) {
MediaServiceData d = mediaServiceData.value(service);
if (d.plugin) {
QMediaServiceFeaturesInterface *iface =
qobject_cast<QMediaServiceFeaturesInterface*>(d.plugin);
if (iface)
return iface->supportedFeatures(d.type);
}
}
return QMediaServiceProviderHint::Features();
}
QMultimedia::SupportEstimate hasSupport(const QByteArray &serviceType, QMultimedia::SupportEstimate hasSupport(const QByteArray &serviceType,
const QString &mimeType, const QString &mimeType,
const QStringList& codecs, const QStringList& codecs,
@@ -660,6 +688,18 @@ Q_GLOBAL_STATIC(QPluginServiceProvider, pluginProvider);
Releases a media \a service requested with requestService(). Releases a media \a service requested with requestService().
*/ */
/*!
\fn QMediaServiceProvider::supportedFeatures(const QMediaService *service) const
Returns the features supported by a given \a service.
*/
QMediaServiceProviderHint::Features QMediaServiceProvider::supportedFeatures(const QMediaService *service) const
{
Q_UNUSED(service);
return QMediaServiceProviderHint::Features(0);
}
/*! /*!
\fn QMultimedia::SupportEstimate QMediaServiceProvider::hasSupport(const QByteArray &serviceType, const QString &mimeType, const QStringList& codecs, int flags) const \fn QMultimedia::SupportEstimate QMediaServiceProvider::hasSupport(const QByteArray &serviceType, const QString &mimeType, const QStringList& codecs, int flags) const

View File

@@ -53,6 +53,8 @@ public:
virtual QMediaService* requestService(const QByteArray &type, const QMediaServiceProviderHint &hint = QMediaServiceProviderHint()) = 0; virtual QMediaService* requestService(const QByteArray &type, const QMediaServiceProviderHint &hint = QMediaServiceProviderHint()) = 0;
virtual void releaseService(QMediaService *service) = 0; virtual void releaseService(QMediaService *service) = 0;
virtual QMediaServiceProviderHint::Features supportedFeatures(const QMediaService *service) const;
virtual QMultimedia::SupportEstimate hasSupport(const QByteArray &serviceType, virtual QMultimedia::SupportEstimate hasSupport(const QByteArray &serviceType,
const QString &mimeType, const QString &mimeType,
const QStringList& codecs, const QStringList& codecs,

View File

@@ -705,11 +705,13 @@ QMediaTimeRange operator-(const QMediaTimeRange &r1, const QMediaTimeRange &r2)
#ifndef QT_NO_DEBUG_STREAM #ifndef QT_NO_DEBUG_STREAM
QDebug operator<<(QDebug dbg, const QMediaTimeRange &range) QDebug operator<<(QDebug dbg, const QMediaTimeRange &range)
{ {
dbg.nospace() << "QMediaTimeRange( "; QDebugStateSaver saver(dbg);
foreach (const QMediaTimeInterval &interval, range.intervals()) { dbg.nospace();
dbg.nospace() << "(" << interval.start() << ", " << interval.end() << ") "; dbg << "QMediaTimeRange( ";
} foreach (const QMediaTimeInterval &interval, range.intervals())
dbg.space() << ")"; dbg << '(' << interval.start() << ", " << interval.end() << ") ";
dbg.space();
dbg << ')';
return dbg; return dbg;
} }
#endif #endif

View File

@@ -350,33 +350,37 @@ uchar *QAbstractPlanarVideoBuffer::map(MapMode mode, int *numBytes, int *bytesPe
#ifndef QT_NO_DEBUG_STREAM #ifndef QT_NO_DEBUG_STREAM
QDebug operator<<(QDebug dbg, QAbstractVideoBuffer::HandleType type) QDebug operator<<(QDebug dbg, QAbstractVideoBuffer::HandleType type)
{ {
QDebugStateSaver saver(dbg);
dbg.nospace();
switch (type) { switch (type) {
case QAbstractVideoBuffer::NoHandle: case QAbstractVideoBuffer::NoHandle:
return dbg.nospace() << "NoHandle"; return dbg << "NoHandle";
case QAbstractVideoBuffer::GLTextureHandle: case QAbstractVideoBuffer::GLTextureHandle:
return dbg.nospace() << "GLTextureHandle"; return dbg << "GLTextureHandle";
case QAbstractVideoBuffer::XvShmImageHandle: case QAbstractVideoBuffer::XvShmImageHandle:
return dbg.nospace() << "XvShmImageHandle"; return dbg << "XvShmImageHandle";
case QAbstractVideoBuffer::CoreImageHandle: case QAbstractVideoBuffer::CoreImageHandle:
return dbg.nospace() << "CoreImageHandle"; return dbg << "CoreImageHandle";
case QAbstractVideoBuffer::QPixmapHandle: case QAbstractVideoBuffer::QPixmapHandle:
return dbg.nospace() << "QPixmapHandle"; return dbg << "QPixmapHandle";
default: default:
return dbg.nospace() << QString(QLatin1String("UserHandle(%1)")).arg(int(type)).toLatin1().constData(); return dbg << "UserHandle(" << int(type) << ')';
} }
} }
QDebug operator<<(QDebug dbg, QAbstractVideoBuffer::MapMode mode) QDebug operator<<(QDebug dbg, QAbstractVideoBuffer::MapMode mode)
{ {
QDebugStateSaver saver(dbg);
dbg.nospace();
switch (mode) { switch (mode) {
case QAbstractVideoBuffer::ReadOnly: case QAbstractVideoBuffer::ReadOnly:
return dbg.nospace() << "ReadOnly"; return dbg << "ReadOnly";
case QAbstractVideoBuffer::ReadWrite: case QAbstractVideoBuffer::ReadWrite:
return dbg.nospace() << "ReadWrite"; return dbg << "ReadWrite";
case QAbstractVideoBuffer::WriteOnly: case QAbstractVideoBuffer::WriteOnly:
return dbg.nospace() << "WriteOnly"; return dbg << "WriteOnly";
default: default:
return dbg.nospace() << "NotMapped"; return dbg << "NotMapped";
} }
} }
#endif #endif

View File

@@ -353,18 +353,26 @@ void QAbstractVideoSurface::setNativeResolution(const QSize &resolution)
#ifndef QT_NO_DEBUG_STREAM #ifndef QT_NO_DEBUG_STREAM
QDebug operator<<(QDebug dbg, const QAbstractVideoSurface::Error& error) QDebug operator<<(QDebug dbg, const QAbstractVideoSurface::Error& error)
{ {
QDebugStateSaver saver(dbg);
dbg.nospace();
switch (error) { switch (error) {
case QAbstractVideoSurface::UnsupportedFormatError: case QAbstractVideoSurface::UnsupportedFormatError:
return dbg.nospace() << "UnsupportedFormatError"; dbg << "UnsupportedFormatError";
break;
case QAbstractVideoSurface::IncorrectFormatError: case QAbstractVideoSurface::IncorrectFormatError:
return dbg.nospace() << "IncorrectFormatError"; dbg << "IncorrectFormatError";
break;
case QAbstractVideoSurface::StoppedError: case QAbstractVideoSurface::StoppedError:
return dbg.nospace() << "StoppedError"; dbg << "StoppedError";
break;
case QAbstractVideoSurface::ResourceError: case QAbstractVideoSurface::ResourceError:
return dbg.nospace() << "ResourceError"; dbg << "ResourceError";
break;
default: default:
return dbg.nospace() << "NoError"; dbg << "NoError";
break;
} }
return dbg;
} }
#endif #endif

View File

@@ -1002,90 +1002,94 @@ QImage::Format QVideoFrame::imageFormatFromPixelFormat(PixelFormat format)
#ifndef QT_NO_DEBUG_STREAM #ifndef QT_NO_DEBUG_STREAM
QDebug operator<<(QDebug dbg, QVideoFrame::PixelFormat pf) QDebug operator<<(QDebug dbg, QVideoFrame::PixelFormat pf)
{ {
QDebugStateSaver saver(dbg);
dbg.nospace();
switch (pf) { switch (pf) {
case QVideoFrame::Format_Invalid: case QVideoFrame::Format_Invalid:
return dbg.nospace() << "Format_Invalid"; return dbg << "Format_Invalid";
case QVideoFrame::Format_ARGB32: case QVideoFrame::Format_ARGB32:
return dbg.nospace() << "Format_ARGB32"; return dbg << "Format_ARGB32";
case QVideoFrame::Format_ARGB32_Premultiplied: case QVideoFrame::Format_ARGB32_Premultiplied:
return dbg.nospace() << "Format_ARGB32_Premultiplied"; return dbg << "Format_ARGB32_Premultiplied";
case QVideoFrame::Format_RGB32: case QVideoFrame::Format_RGB32:
return dbg.nospace() << "Format_RGB32"; return dbg << "Format_RGB32";
case QVideoFrame::Format_RGB24: case QVideoFrame::Format_RGB24:
return dbg.nospace() << "Format_RGB24"; return dbg << "Format_RGB24";
case QVideoFrame::Format_RGB565: case QVideoFrame::Format_RGB565:
return dbg.nospace() << "Format_RGB565"; return dbg << "Format_RGB565";
case QVideoFrame::Format_RGB555: case QVideoFrame::Format_RGB555:
return dbg.nospace() << "Format_RGB555"; return dbg << "Format_RGB555";
case QVideoFrame::Format_ARGB8565_Premultiplied: case QVideoFrame::Format_ARGB8565_Premultiplied:
return dbg.nospace() << "Format_ARGB8565_Premultiplied"; return dbg << "Format_ARGB8565_Premultiplied";
case QVideoFrame::Format_BGRA32: case QVideoFrame::Format_BGRA32:
return dbg.nospace() << "Format_BGRA32"; return dbg << "Format_BGRA32";
case QVideoFrame::Format_BGRA32_Premultiplied: case QVideoFrame::Format_BGRA32_Premultiplied:
return dbg.nospace() << "Format_BGRA32_Premultiplied"; return dbg << "Format_BGRA32_Premultiplied";
case QVideoFrame::Format_BGR32: case QVideoFrame::Format_BGR32:
return dbg.nospace() << "Format_BGR32"; return dbg << "Format_BGR32";
case QVideoFrame::Format_BGR24: case QVideoFrame::Format_BGR24:
return dbg.nospace() << "Format_BGR24"; return dbg << "Format_BGR24";
case QVideoFrame::Format_BGR565: case QVideoFrame::Format_BGR565:
return dbg.nospace() << "Format_BGR565"; return dbg << "Format_BGR565";
case QVideoFrame::Format_BGR555: case QVideoFrame::Format_BGR555:
return dbg.nospace() << "Format_BGR555"; return dbg << "Format_BGR555";
case QVideoFrame::Format_BGRA5658_Premultiplied: case QVideoFrame::Format_BGRA5658_Premultiplied:
return dbg.nospace() << "Format_BGRA5658_Premultiplied"; return dbg << "Format_BGRA5658_Premultiplied";
case QVideoFrame::Format_AYUV444: case QVideoFrame::Format_AYUV444:
return dbg.nospace() << "Format_AYUV444"; return dbg << "Format_AYUV444";
case QVideoFrame::Format_AYUV444_Premultiplied: case QVideoFrame::Format_AYUV444_Premultiplied:
return dbg.nospace() << "Format_AYUV444_Premultiplied"; return dbg << "Format_AYUV444_Premultiplied";
case QVideoFrame::Format_YUV444: case QVideoFrame::Format_YUV444:
return dbg.nospace() << "Format_YUV444"; return dbg << "Format_YUV444";
case QVideoFrame::Format_YUV420P: case QVideoFrame::Format_YUV420P:
return dbg.nospace() << "Format_YUV420P"; return dbg << "Format_YUV420P";
case QVideoFrame::Format_YV12: case QVideoFrame::Format_YV12:
return dbg.nospace() << "Format_YV12"; return dbg << "Format_YV12";
case QVideoFrame::Format_UYVY: case QVideoFrame::Format_UYVY:
return dbg.nospace() << "Format_UYVY"; return dbg << "Format_UYVY";
case QVideoFrame::Format_YUYV: case QVideoFrame::Format_YUYV:
return dbg.nospace() << "Format_YUYV"; return dbg << "Format_YUYV";
case QVideoFrame::Format_NV12: case QVideoFrame::Format_NV12:
return dbg.nospace() << "Format_NV12"; return dbg << "Format_NV12";
case QVideoFrame::Format_NV21: case QVideoFrame::Format_NV21:
return dbg.nospace() << "Format_NV21"; return dbg << "Format_NV21";
case QVideoFrame::Format_IMC1: case QVideoFrame::Format_IMC1:
return dbg.nospace() << "Format_IMC1"; return dbg << "Format_IMC1";
case QVideoFrame::Format_IMC2: case QVideoFrame::Format_IMC2:
return dbg.nospace() << "Format_IMC2"; return dbg << "Format_IMC2";
case QVideoFrame::Format_IMC3: case QVideoFrame::Format_IMC3:
return dbg.nospace() << "Format_IMC3"; return dbg << "Format_IMC3";
case QVideoFrame::Format_IMC4: case QVideoFrame::Format_IMC4:
return dbg.nospace() << "Format_IMC4"; return dbg << "Format_IMC4";
case QVideoFrame::Format_Y8: case QVideoFrame::Format_Y8:
return dbg.nospace() << "Format_Y8"; return dbg << "Format_Y8";
case QVideoFrame::Format_Y16: case QVideoFrame::Format_Y16:
return dbg.nospace() << "Format_Y16"; return dbg << "Format_Y16";
case QVideoFrame::Format_Jpeg: case QVideoFrame::Format_Jpeg:
return dbg.nospace() << "Format_Jpeg"; return dbg << "Format_Jpeg";
case QVideoFrame::Format_AdobeDng: case QVideoFrame::Format_AdobeDng:
return dbg.nospace() << "Format_AdobeDng"; return dbg << "Format_AdobeDng";
case QVideoFrame::Format_CameraRaw: case QVideoFrame::Format_CameraRaw:
return dbg.nospace() << "Format_CameraRaw"; return dbg << "Format_CameraRaw";
default: default:
return dbg.nospace() << QString(QLatin1String("UserType(%1)" )).arg(int(pf)).toLatin1().constData(); return dbg << QString(QLatin1String("UserType(%1)" )).arg(int(pf)).toLatin1().constData();
} }
} }
QDebug operator<<(QDebug dbg, QVideoFrame::FieldType f) QDebug operator<<(QDebug dbg, QVideoFrame::FieldType f)
{ {
QDebugStateSaver saver(dbg);
dbg.nospace();
switch (f) { switch (f) {
case QVideoFrame::TopField: case QVideoFrame::TopField:
return dbg.nospace() << "TopField"; return dbg << "TopField";
case QVideoFrame::BottomField: case QVideoFrame::BottomField:
return dbg.nospace() << "BottomField"; return dbg << "BottomField";
case QVideoFrame::InterlacedFrame: case QVideoFrame::InterlacedFrame:
return dbg.nospace() << "InterlacedFrame"; return dbg << "InterlacedFrame";
default: default:
return dbg.nospace() << "ProgressiveFrame"; return dbg << "ProgressiveFrame";
} }
} }
@@ -1161,16 +1165,17 @@ static QString qFormatTimeStamps(qint64 start, qint64 end)
QDebug operator<<(QDebug dbg, const QVideoFrame& f) QDebug operator<<(QDebug dbg, const QVideoFrame& f)
{ {
dbg.nospace() << "QVideoFrame(" << f.size() << ", " QDebugStateSaver saver(dbg);
dbg.nospace();
dbg << "QVideoFrame(" << f.size() << ", "
<< f.pixelFormat() << ", " << f.pixelFormat() << ", "
<< f.handleType() << ", " << f.handleType() << ", "
<< f.mapMode() << ", " << f.mapMode() << ", "
<< qFormatTimeStamps(f.startTime(), f.endTime()).toLatin1().constData(); << qFormatTimeStamps(f.startTime(), f.endTime()).toLatin1().constData();
if (f.availableMetaData().count()) { if (f.availableMetaData().count())
dbg.nospace() << ", metaData: "; dbg << ", metaData: " << f.availableMetaData();
dbg.nospace() << f.availableMetaData(); dbg << ')';
} return dbg;
return dbg.nospace() << ")";
} }
#endif #endif

View File

@@ -569,61 +569,62 @@ void QVideoSurfaceFormat::setProperty(const char *name, const QVariant &value)
#ifndef QT_NO_DEBUG_STREAM #ifndef QT_NO_DEBUG_STREAM
QDebug operator<<(QDebug dbg, QVideoSurfaceFormat::YCbCrColorSpace cs) QDebug operator<<(QDebug dbg, QVideoSurfaceFormat::YCbCrColorSpace cs)
{ {
QDebug nospace = dbg.nospace(); QDebugStateSaver saver(dbg);
dbg.nospace();
switch (cs) { switch (cs) {
case QVideoSurfaceFormat::YCbCr_BT601: case QVideoSurfaceFormat::YCbCr_BT601:
nospace << "YCbCr_BT601"; dbg << "YCbCr_BT601";
break; break;
case QVideoSurfaceFormat::YCbCr_BT709: case QVideoSurfaceFormat::YCbCr_BT709:
nospace << "YCbCr_BT709"; dbg << "YCbCr_BT709";
break; break;
case QVideoSurfaceFormat::YCbCr_JPEG: case QVideoSurfaceFormat::YCbCr_JPEG:
nospace << "YCbCr_JPEG"; dbg << "YCbCr_JPEG";
break; break;
case QVideoSurfaceFormat::YCbCr_xvYCC601: case QVideoSurfaceFormat::YCbCr_xvYCC601:
nospace << "YCbCr_xvYCC601"; dbg << "YCbCr_xvYCC601";
break; break;
case QVideoSurfaceFormat::YCbCr_xvYCC709: case QVideoSurfaceFormat::YCbCr_xvYCC709:
nospace << "YCbCr_xvYCC709"; dbg << "YCbCr_xvYCC709";
break; break;
case QVideoSurfaceFormat::YCbCr_CustomMatrix: case QVideoSurfaceFormat::YCbCr_CustomMatrix:
nospace << "YCbCr_CustomMatrix"; dbg << "YCbCr_CustomMatrix";
break; break;
default: default:
nospace << "YCbCr_Undefined"; dbg << "YCbCr_Undefined";
break; break;
} }
return nospace; return dbg;
} }
QDebug operator<<(QDebug dbg, QVideoSurfaceFormat::Direction dir) QDebug operator<<(QDebug dbg, QVideoSurfaceFormat::Direction dir)
{ {
QDebug nospace = dbg.nospace(); QDebugStateSaver saver(dbg);
dbg.nospace();
switch (dir) { switch (dir) {
case QVideoSurfaceFormat::BottomToTop: case QVideoSurfaceFormat::BottomToTop:
nospace << "BottomToTop"; dbg << "BottomToTop";
break; break;
case QVideoSurfaceFormat::TopToBottom: case QVideoSurfaceFormat::TopToBottom:
nospace << "TopToBottom"; dbg << "TopToBottom";
break; break;
} }
return nospace; return dbg;
} }
QDebug operator<<(QDebug dbg, const QVideoSurfaceFormat &f) QDebug operator<<(QDebug dbg, const QVideoSurfaceFormat &f)
{ {
dbg.nospace() << "QVideoSurfaceFormat(" << f.pixelFormat(); QDebugStateSaver saver(dbg);
dbg.nospace() << ", " << f.frameSize(); dbg.nospace();
dbg.nospace() << ", viewport=" << f.viewport(); dbg << "QVideoSurfaceFormat(" << f.pixelFormat() << ", " << f.frameSize()
dbg.nospace() << ", pixelAspectRatio=" << f.pixelAspectRatio(); << ", viewport=" << f.viewport() << ", pixelAspectRatio=" << f.pixelAspectRatio()
dbg.nospace() << ", handleType=" << f.handleType(); << ", handleType=" << f.handleType() << ", yCbCrColorSpace=" << f.yCbCrColorSpace()
dbg.nospace() << ", yCbCrColorSpace=" << f.yCbCrColorSpace(); << ')';
dbg.nospace() << ")";
foreach(const QByteArray& propertyName, f.propertyNames()) foreach(const QByteArray& propertyName, f.propertyNames())
dbg << "\n " << propertyName.data() << " = " << f.property(propertyName.data()); dbg << "\n " << propertyName.data() << " = " << f.property(propertyName.data());
return dbg.space(); return dbg;
} }
#endif #endif

View File

@@ -318,8 +318,6 @@ void QAndroidMediaPlayerControl::setMedia(const QMediaContent &mediaContent,
if ((mState & (AndroidMediaPlayer::Idle | AndroidMediaPlayer::Uninitialized)) == 0) if ((mState & (AndroidMediaPlayer::Idle | AndroidMediaPlayer::Uninitialized)) == 0)
mMediaPlayer->release(); mMediaPlayer->release();
QString mediaPath;
if (mediaContent.isNull()) { if (mediaContent.isNull()) {
setMediaStatus(QMediaPlayer::NoMedia); setMediaStatus(QMediaPlayer::NoMedia);
} else { } else {
@@ -330,29 +328,17 @@ void QAndroidMediaPlayerControl::setMedia(const QMediaContent &mediaContent,
return; return;
} }
const QUrl url = mediaContent.canonicalUrl();
if (url.scheme() == QLatin1String("qrc")) {
const QString path = url.toString().mid(3);
mTempFile.reset(QTemporaryFile::createNativeFile(path));
if (!mTempFile.isNull())
mediaPath = QStringLiteral("file://") + mTempFile->fileName();
} else {
mediaPath = url.toString(QUrl::FullyEncoded);
}
if (mVideoSize.isValid() && mVideoOutput) if (mVideoSize.isValid() && mVideoOutput)
mVideoOutput->setVideoSize(mVideoSize); mVideoOutput->setVideoSize(mVideoSize);
if ((mMediaPlayer->display() == 0) && mVideoOutput) if ((mMediaPlayer->display() == 0) && mVideoOutput)
mMediaPlayer->setDisplay(mVideoOutput->surfaceTexture()); mMediaPlayer->setDisplay(mVideoOutput->surfaceTexture());
mMediaPlayer->setDataSource(mediaPath); mMediaPlayer->setDataSource(mediaContent.canonicalUrl().toString(QUrl::FullyEncoded));
mMediaPlayer->prepareAsync(); mMediaPlayer->prepareAsync();
} }
if (!mReloadingMedia) { if (!mReloadingMedia)
Q_EMIT mediaChanged(mMediaContent); Q_EMIT mediaChanged(mMediaContent);
Q_EMIT actualMediaLocationChanged(mediaPath);
}
resetBufferingProgress(); resetBufferingProgress();

View File

@@ -37,7 +37,6 @@
#include <qglobal.h> #include <qglobal.h>
#include <QMediaPlayerControl> #include <QMediaPlayerControl>
#include <qsize.h> #include <qsize.h>
#include <QtCore/QTemporaryFile>
QT_BEGIN_NAMESPACE QT_BEGIN_NAMESPACE
@@ -72,7 +71,6 @@ public:
Q_SIGNALS: Q_SIGNALS:
void metaDataUpdated(); void metaDataUpdated();
void actualMediaLocationChanged(const QString &url);
public Q_SLOTS: public Q_SLOTS:
void setPosition(qint64 position) Q_DECL_OVERRIDE; void setPosition(qint64 position) Q_DECL_OVERRIDE;
@@ -112,7 +110,6 @@ private:
int mPendingVolume; int mPendingVolume;
int mPendingMute; int mPendingMute;
bool mReloadingMedia; bool mReloadingMedia;
QScopedPointer<QTemporaryFile> mTempFile;
int mActiveStateChangeNotifiers; int mActiveStateChangeNotifiers;
void setState(QMediaPlayer::State state); void setState(QMediaPlayer::State state);

View File

@@ -45,8 +45,8 @@ QAndroidMediaService::QAndroidMediaService(QObject *parent)
{ {
mMediaControl = new QAndroidMediaPlayerControl; mMediaControl = new QAndroidMediaPlayerControl;
mMetadataControl = new QAndroidMetaDataReaderControl; mMetadataControl = new QAndroidMetaDataReaderControl;
connect(mMediaControl, SIGNAL(actualMediaLocationChanged(QString)), connect(mMediaControl, SIGNAL(mediaChanged(QMediaContent)),
mMetadataControl, SLOT(onMediaChanged(QString))); mMetadataControl, SLOT(onMediaChanged(QMediaContent)));
connect(mMediaControl, SIGNAL(metaDataUpdated()), connect(mMediaControl, SIGNAL(metaDataUpdated()),
mMetadataControl, SLOT(onUpdateMetaData())); mMetadataControl, SLOT(onUpdateMetaData()));
} }

View File

@@ -93,18 +93,18 @@ QStringList QAndroidMetaDataReaderControl::availableMetaData() const
return m_metadata.keys(); return m_metadata.keys();
} }
void QAndroidMetaDataReaderControl::onMediaChanged(const QString &url) void QAndroidMetaDataReaderControl::onMediaChanged(const QMediaContent &media)
{ {
if (!m_retriever) if (!m_retriever)
return; return;
m_mediaLocation = url; m_mediaContent = media;
updateData(); updateData();
} }
void QAndroidMetaDataReaderControl::onUpdateMetaData() void QAndroidMetaDataReaderControl::onUpdateMetaData()
{ {
if (!m_retriever || m_mediaLocation.isEmpty()) if (!m_retriever || m_mediaContent.isNull())
return; return;
updateData(); updateData();
@@ -114,8 +114,8 @@ void QAndroidMetaDataReaderControl::updateData()
{ {
m_metadata.clear(); m_metadata.clear();
if (!m_mediaLocation.isEmpty()) { if (!m_mediaContent.isNull()) {
if (m_retriever->setDataSource(m_mediaLocation)) { if (m_retriever->setDataSource(m_mediaContent.canonicalUrl())) {
QString mimeType = m_retriever->extractMetadata(AndroidMediaMetadataRetriever::MimeType); QString mimeType = m_retriever->extractMetadata(AndroidMediaMetadataRetriever::MimeType);
if (!mimeType.isNull()) if (!mimeType.isNull())
m_metadata.insert(QMediaMetaData::MediaType, mimeType); m_metadata.insert(QMediaMetaData::MediaType, mimeType);

View File

@@ -54,13 +54,13 @@ public:
QStringList availableMetaData() const Q_DECL_OVERRIDE; QStringList availableMetaData() const Q_DECL_OVERRIDE;
public Q_SLOTS: public Q_SLOTS:
void onMediaChanged(const QString &url); void onMediaChanged(const QMediaContent &media);
void onUpdateMetaData(); void onUpdateMetaData();
private: private:
void updateData(); void updateData();
QString m_mediaLocation; QMediaContent m_mediaContent;
bool m_available; bool m_available;
QVariantMap m_metadata; QVariantMap m_metadata;

View File

@@ -83,15 +83,14 @@ void AndroidMediaMetadataRetriever::release()
m_metadataRetriever.callMethod<void>("release"); m_metadataRetriever.callMethod<void>("release");
} }
bool AndroidMediaMetadataRetriever::setDataSource(const QString &urlString) bool AndroidMediaMetadataRetriever::setDataSource(const QUrl &url)
{ {
if (!m_metadataRetriever.isValid()) if (!m_metadataRetriever.isValid())
return false; return false;
QJNIEnvironmentPrivate env; QJNIEnvironmentPrivate env;
QUrl url(urlString);
if (url.isLocalFile()) { // also includes qrc files (copied to a temp file) if (url.isLocalFile()) { // also includes qrc files (copied to a temp file by QMediaPlayer)
QJNIObjectPrivate string = QJNIObjectPrivate::fromString(url.path()); QJNIObjectPrivate string = QJNIObjectPrivate::fromString(url.path());
QJNIObjectPrivate fileInputStream("java/io/FileInputStream", QJNIObjectPrivate fileInputStream("java/io/FileInputStream",
"(Ljava/lang/String;)V", "(Ljava/lang/String;)V",
@@ -153,7 +152,7 @@ bool AndroidMediaMetadataRetriever::setDataSource(const QString &urlString)
return false; return false;
} else if (QtAndroidPrivate::androidSdkVersion() >= 14) { } else if (QtAndroidPrivate::androidSdkVersion() >= 14) {
// On API levels >= 14, only setDataSource(String, Map<String, String>) accepts remote media // On API levels >= 14, only setDataSource(String, Map<String, String>) accepts remote media
QJNIObjectPrivate string = QJNIObjectPrivate::fromString(urlString); QJNIObjectPrivate string = QJNIObjectPrivate::fromString(url.toString(QUrl::FullyEncoded));
QJNIObjectPrivate hash("java/util/HashMap"); QJNIObjectPrivate hash("java/util/HashMap");
m_metadataRetriever.callMethod<void>("setDataSource", m_metadataRetriever.callMethod<void>("setDataSource",
@@ -165,7 +164,7 @@ bool AndroidMediaMetadataRetriever::setDataSource(const QString &urlString)
} else { } else {
// While on API levels < 14, only setDataSource(Context, Uri) is available and works for // While on API levels < 14, only setDataSource(Context, Uri) is available and works for
// remote media... // remote media...
QJNIObjectPrivate string = QJNIObjectPrivate::fromString(urlString); QJNIObjectPrivate string = QJNIObjectPrivate::fromString(url.toString(QUrl::FullyEncoded));
QJNIObjectPrivate uri = m_metadataRetriever.callStaticObjectMethod("android/net/Uri", QJNIObjectPrivate uri = m_metadataRetriever.callStaticObjectMethod("android/net/Uri",
"parse", "parse",
"(Ljava/lang/String;)Landroid/net/Uri;", "(Ljava/lang/String;)Landroid/net/Uri;",

View File

@@ -72,7 +72,7 @@ public:
QString extractMetadata(MetadataKey key); QString extractMetadata(MetadataKey key);
void release(); void release();
bool setDataSource(const QString &url); bool setDataSource(const QUrl &url);
private: private:
QJNIObjectPrivate m_metadataRetriever; QJNIObjectPrivate m_metadataRetriever;

View File

@@ -32,6 +32,7 @@
****************************************************************************/ ****************************************************************************/
#include "avfcameraviewfindersettingscontrol.h" #include "avfcameraviewfindersettingscontrol.h"
#include "private/qabstractvideobuffer_p.h"
#include "avfcamerarenderercontrol.h" #include "avfcamerarenderercontrol.h"
#include "avfcamerasession.h" #include "avfcamerasession.h"
#include "avfcameraservice.h" #include "avfcameraservice.h"
@@ -39,15 +40,17 @@
#include <QtMultimedia/qabstractvideosurface.h> #include <QtMultimedia/qabstractvideosurface.h>
#include <QtMultimedia/qabstractvideobuffer.h> #include <QtMultimedia/qabstractvideobuffer.h>
#include <QtMultimedia/qvideosurfaceformat.h> #include <QtMultimedia/qvideosurfaceformat.h>
QT_USE_NAMESPACE QT_USE_NAMESPACE
class CVPixelBufferVideoBuffer : public QAbstractVideoBuffer class CVPixelBufferVideoBuffer : public QAbstractPlanarVideoBuffer
{ {
friend class CVPixelBufferVideoBufferPrivate;
public: public:
CVPixelBufferVideoBuffer(CVPixelBufferRef buffer) CVPixelBufferVideoBuffer(CVPixelBufferRef buffer)
: QAbstractVideoBuffer(NoHandle) : QAbstractPlanarVideoBuffer(NoHandle)
, m_buffer(buffer) , m_buffer(buffer)
, m_mode(NotMapped) , m_mode(NotMapped)
{ {
@@ -61,6 +64,42 @@ public:
MapMode mapMode() const { return m_mode; } MapMode mapMode() const { return m_mode; }
int map(QAbstractVideoBuffer::MapMode mode, int *numBytes, int bytesPerLine[4], uchar *data[4])
{
// We only support RGBA or NV12 (or Apple's version of NV12),
// they are either 0 planes or 2.
const size_t nPlanes = CVPixelBufferGetPlaneCount(m_buffer);
Q_ASSERT(nPlanes <= 2);
if (!nPlanes) {
data[0] = map(mode, numBytes, bytesPerLine);
return data[0] ? 1 : 0;
}
// For a bi-planar format we have to set the parameters correctly:
if (mode != QAbstractVideoBuffer::NotMapped && m_mode == QAbstractVideoBuffer::NotMapped) {
CVPixelBufferLockBaseAddress(m_buffer, 0);
if (numBytes)
*numBytes = CVPixelBufferGetDataSize(m_buffer);
if (bytesPerLine) {
// At the moment we handle only bi-planar format.
bytesPerLine[0] = CVPixelBufferGetBytesPerRowOfPlane(m_buffer, 0);
bytesPerLine[1] = CVPixelBufferGetBytesPerRowOfPlane(m_buffer, 1);
}
if (data) {
data[0] = (uchar *)CVPixelBufferGetBaseAddressOfPlane(m_buffer, 0);
data[1] = (uchar *)CVPixelBufferGetBaseAddressOfPlane(m_buffer, 1);
}
m_mode = mode;
}
return nPlanes;
}
uchar *map(MapMode mode, int *numBytes, int *bytesPerLine) uchar *map(MapMode mode, int *numBytes, int *bytesPerLine)
{ {
if (mode != NotMapped && m_mode == NotMapped) { if (mode != NotMapped && m_mode == NotMapped) {
@@ -73,7 +112,6 @@ public:
*bytesPerLine = CVPixelBufferGetBytesPerRow(m_buffer); *bytesPerLine = CVPixelBufferGetBytesPerRow(m_buffer);
m_mode = mode; m_mode = mode;
return (uchar*)CVPixelBufferGetBaseAddress(m_buffer); return (uchar*)CVPixelBufferGetBaseAddress(m_buffer);
} else { } else {
return 0; return 0;
@@ -93,6 +131,7 @@ private:
MapMode m_mode; MapMode m_mode;
}; };
@interface AVFCaptureFramesDelegate : NSObject <AVCaptureVideoDataOutputSampleBufferDelegate> @interface AVFCaptureFramesDelegate : NSObject <AVCaptureVideoDataOutputSampleBufferDelegate>
{ {
@private @private

View File

@@ -74,6 +74,7 @@ private:
void setFramerate(qreal minFPS, qreal maxFPS, bool useActive); void setFramerate(qreal minFPS, qreal maxFPS, bool useActive);
void setPixelFormat(QVideoFrame::PixelFormat newFormat); void setPixelFormat(QVideoFrame::PixelFormat newFormat);
AVCaptureDeviceFormat *findBestFormatMatch(const QCameraViewfinderSettings &settings) const; AVCaptureDeviceFormat *findBestFormatMatch(const QCameraViewfinderSettings &settings) const;
QVector<QVideoFrame::PixelFormat> viewfinderPixelFormats() const;
bool convertPixelFormatIfSupported(QVideoFrame::PixelFormat format, unsigned &avfFormat) const; bool convertPixelFormatIfSupported(QVideoFrame::PixelFormat format, unsigned &avfFormat) const;
void applySettings(); void applySettings();
QCameraViewfinderSettings requestedSettings() const; QCameraViewfinderSettings requestedSettings() const;

View File

@@ -38,6 +38,8 @@
#include "avfcameraservice.h" #include "avfcameraservice.h"
#include "avfcameradebug.h" #include "avfcameradebug.h"
#include <QtMultimedia/qabstractvideosurface.h>
#include <QtCore/qvariant.h> #include <QtCore/qvariant.h>
#include <QtCore/qsysinfo.h> #include <QtCore/qsysinfo.h>
#include <QtCore/qvector.h> #include <QtCore/qvector.h>
@@ -52,28 +54,6 @@ QT_BEGIN_NAMESPACE
namespace { namespace {
QVector<QVideoFrame::PixelFormat> qt_viewfinder_pixel_formats(AVCaptureVideoDataOutput *videoOutput)
{
Q_ASSERT(videoOutput);
QVector<QVideoFrame::PixelFormat> qtFormats;
NSArray *pixelFormats = [videoOutput availableVideoCVPixelFormatTypes];
for (NSObject *obj in pixelFormats) {
if (![obj isKindOfClass:[NSNumber class]])
continue;
NSNumber *formatAsNSNumber = static_cast<NSNumber *>(obj);
// It's actually FourCharCode (== UInt32):
const QVideoFrame::PixelFormat qtFormat(AVFCameraViewfinderSettingsControl2::
QtPixelFormatFromCVFormat([formatAsNSNumber unsignedIntValue]));
if (qtFormat != QVideoFrame::Format_Invalid)
qtFormats << qtFormat;
}
return qtFormats;
}
bool qt_framerates_sane(const QCameraViewfinderSettings &settings) bool qt_framerates_sane(const QCameraViewfinderSettings &settings)
{ {
const qreal minFPS = settings.minimumFrameRate(); const qreal minFPS = settings.minimumFrameRate();
@@ -269,7 +249,8 @@ QList<QCameraViewfinderSettings> AVFCameraViewfinderSettingsControl2::supportedV
QVector<AVFPSRange> framerates; QVector<AVFPSRange> framerates;
QVector<QVideoFrame::PixelFormat> pixelFormats(qt_viewfinder_pixel_formats(m_videoOutput)); QVector<QVideoFrame::PixelFormat> pixelFormats(viewfinderPixelFormats());
if (!pixelFormats.size()) if (!pixelFormats.size())
pixelFormats << QVideoFrame::Format_Invalid; // The default value. pixelFormats << QVideoFrame::Format_Invalid; // The default value.
#if QT_MAC_PLATFORM_SDK_EQUAL_OR_ABOVE(__MAC_10_7, __IPHONE_7_0) #if QT_MAC_PLATFORM_SDK_EQUAL_OR_ABOVE(__MAC_10_7, __IPHONE_7_0)
@@ -397,6 +378,9 @@ QVideoFrame::PixelFormat AVFCameraViewfinderSettingsControl2::QtPixelFormatFromC
return QVideoFrame::Format_RGB24; return QVideoFrame::Format_RGB24;
case kCVPixelFormatType_24BGR: case kCVPixelFormatType_24BGR:
return QVideoFrame::Format_BGR24; return QVideoFrame::Format_BGR24;
case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange:
case kCVPixelFormatType_420YpCbCr8BiPlanarFullRange:
return QVideoFrame::Format_NV12;
default: default:
return QVideoFrame::Format_Invalid; return QVideoFrame::Format_Invalid;
} }
@@ -414,6 +398,9 @@ bool AVFCameraViewfinderSettingsControl2::CVPixelFormatFromQtFormat(QVideoFrame:
case QVideoFrame::Format_BGRA32: case QVideoFrame::Format_BGRA32:
conv = kCVPixelFormatType_32ARGB; conv = kCVPixelFormatType_32ARGB;
break; break;
case QVideoFrame::Format_NV12:
conv = kCVPixelFormatType_420YpCbCr8BiPlanarFullRange;
break;
// These two formats below are not supported // These two formats below are not supported
// by QSGVideoNodeFactory_RGB, so for now I have to // by QSGVideoNodeFactory_RGB, so for now I have to
// disable them. // disable them.
@@ -467,7 +454,37 @@ AVCaptureDeviceFormat *AVFCameraViewfinderSettingsControl2::findBestFormatMatch(
return nil; return nil;
} }
bool AVFCameraViewfinderSettingsControl2::convertPixelFormatIfSupported(QVideoFrame::PixelFormat qtFormat, unsigned &avfFormat)const QVector<QVideoFrame::PixelFormat> AVFCameraViewfinderSettingsControl2::viewfinderPixelFormats() const
{
Q_ASSERT(m_videoOutput);
QVector<QVideoFrame::PixelFormat> qtFormats;
QList<QVideoFrame::PixelFormat> filter;
NSArray *pixelFormats = [m_videoOutput availableVideoCVPixelFormatTypes];
const QAbstractVideoSurface *surface = m_service->videoOutput() ? m_service->videoOutput()->surface() : 0;
if (surface)
filter = surface->supportedPixelFormats();
for (NSObject *obj in pixelFormats) {
if (![obj isKindOfClass:[NSNumber class]])
continue;
NSNumber *formatAsNSNumber = static_cast<NSNumber *>(obj);
// It's actually FourCharCode (== UInt32):
const QVideoFrame::PixelFormat qtFormat(QtPixelFormatFromCVFormat([formatAsNSNumber unsignedIntValue]));
if (qtFormat != QVideoFrame::Format_Invalid && (!surface || filter.contains(qtFormat))
&& !qtFormats.contains(qtFormat)) { // Can happen, for example, with 8BiPlanar existing in video/full range.
qtFormats << qtFormat;
}
}
return qtFormats;
}
bool AVFCameraViewfinderSettingsControl2::convertPixelFormatIfSupported(QVideoFrame::PixelFormat qtFormat,
unsigned &avfFormat)const
{ {
Q_ASSERT(m_videoOutput); Q_ASSERT(m_videoOutput);
@@ -479,17 +496,25 @@ bool AVFCameraViewfinderSettingsControl2::convertPixelFormatIfSupported(QVideoFr
if (!formats || !formats.count) if (!formats || !formats.count)
return false; return false;
if (m_service->videoOutput() && m_service->videoOutput()->surface()) {
const QAbstractVideoSurface *surface = m_service->videoOutput()->surface();
if (!surface->supportedPixelFormats().contains(qtFormat))
return false;
}
bool found = false;
for (NSObject *obj in formats) { for (NSObject *obj in formats) {
if (![obj isKindOfClass:[NSNumber class]]) if (![obj isKindOfClass:[NSNumber class]])
continue; continue;
NSNumber *nsNum = static_cast<NSNumber *>(obj); NSNumber *nsNum = static_cast<NSNumber *>(obj);
if ([nsNum unsignedIntValue] == conv) { if ([nsNum unsignedIntValue] == conv) {
avfFormat = conv; avfFormat = conv;
return true; found = true;
} }
} }
return false; return found;
} }
void AVFCameraViewfinderSettingsControl2::applySettings() void AVFCameraViewfinderSettingsControl2::applySettings()

View File

@@ -605,21 +605,3 @@ HRESULT DirectShowIOSource::QueryDirection(PIN_DIRECTION *pPinDir)
return S_OK; return S_OK;
} }
} }
DirectShowRcSource::DirectShowRcSource(DirectShowEventLoop *loop)
: DirectShowIOSource(loop)
{
}
bool DirectShowRcSource::open(const QUrl &url)
{
m_file.moveToThread(QCoreApplication::instance()->thread());
m_file.setFileName(QLatin1Char(':') + url.path());
if (m_file.open(QIODevice::ReadOnly)) {
setDevice(&m_file);
return true;
} else {
return false;
}
}

View File

@@ -127,15 +127,4 @@ private:
QMutex m_mutex; QMutex m_mutex;
}; };
class DirectShowRcSource : public DirectShowIOSource
{
public:
DirectShowRcSource(DirectShowEventLoop *loop);
bool open(const QUrl &url);
private:
QFile m_file;
};
#endif #endif

View File

@@ -289,15 +289,6 @@ void DirectShowPlayerService::doSetUrlSource(QMutexLocker *locker)
fileSource->Release(); fileSource->Release();
locker->relock(); locker->relock();
} }
} else if (m_url.scheme() == QLatin1String("qrc")) {
DirectShowRcSource *rcSource = new DirectShowRcSource(m_loop);
locker->unlock();
if (rcSource->open(m_url) && SUCCEEDED(hr = m_graph->AddFilter(rcSource, L"Source")))
source = rcSource;
else
rcSource->Release();
locker->relock();
} }
if (!SUCCEEDED(hr)) { if (!SUCCEEDED(hr)) {

View File

@@ -54,7 +54,6 @@ QT_BEGIN_NAMESPACE
QGstreamerPlayerControl::QGstreamerPlayerControl(QGstreamerPlayerSession *session, QObject *parent) QGstreamerPlayerControl::QGstreamerPlayerControl(QGstreamerPlayerSession *session, QObject *parent)
: QMediaPlayerControl(parent) : QMediaPlayerControl(parent)
, m_ownStream(false)
, m_session(session) , m_session(session)
, m_userRequestedState(QMediaPlayer::StoppedState) , m_userRequestedState(QMediaPlayer::StoppedState)
, m_currentState(QMediaPlayer::StoppedState) , m_currentState(QMediaPlayer::StoppedState)
@@ -370,31 +369,6 @@ void QGstreamerPlayerControl::setMedia(const QMediaContent &content, QIODevice *
emit bufferStatusChanged(0); emit bufferStatusChanged(0);
} }
if (m_stream && m_stream != stream) {
if (m_ownStream)
delete m_stream;
m_stream = 0;
m_ownStream = false;
}
// If the canonical URL refers to a Qt resource, open with QFile and use
// the stream playback capability to play.
if (stream == 0 && content.canonicalUrl().scheme() == QLatin1String("qrc")) {
stream = new QFile(QLatin1Char(':') + content.canonicalUrl().path(), this);
if (!stream->open(QIODevice::ReadOnly)) {
delete stream;
m_mediaStatus = QMediaPlayer::InvalidMedia;
m_currentResource = content;
emit mediaChanged(m_currentResource);
emit error(QMediaPlayer::FormatError, tr("Attempting to play invalid Qt resource"));
if (m_currentState != QMediaPlayer::PlayingState)
m_resources->release();
popAndNotifyState();
return;
}
m_ownStream = true;
}
m_currentResource = content; m_currentResource = content;
m_stream = stream; m_stream = stream;

View File

@@ -116,7 +116,6 @@ private:
void pushState(); void pushState();
void popAndNotifyState(); void popAndNotifyState();
bool m_ownStream;
QGstreamerPlayerSession *m_session; QGstreamerPlayerSession *m_session;
QMediaPlayer::State m_userRequestedState; QMediaPlayer::State m_userRequestedState;
QMediaPlayer::State m_currentState; QMediaPlayer::State m_currentState;

View File

@@ -162,22 +162,6 @@ QByteArray MmRendererMediaPlayerControl::resourcePathForUrl(const QUrl &url)
const QFileInfo fileInfo(relativeFilePath); const QFileInfo fileInfo(relativeFilePath);
return QFile::encodeName(QStringLiteral("file://") + fileInfo.absoluteFilePath()); return QFile::encodeName(QStringLiteral("file://") + fileInfo.absoluteFilePath());
// QRC, copy to temporary file, as mmrenderer does not support resource files
} else if (url.scheme() == QStringLiteral("qrc")) {
const QString qrcPath = ':' + url.path();
const QFileInfo resourceFileInfo(qrcPath);
m_tempMediaFileName = QDir::tempPath() + QStringLiteral("/qtmedia_") +
QUuid::createUuid().toString() + QStringLiteral(".") +
resourceFileInfo.suffix();
if (!QFile::copy(qrcPath, m_tempMediaFileName)) {
const QString errorMsg = QString("Failed to copy resource file to temporary file "
"%1 for playback").arg(m_tempMediaFileName);
qDebug() << errorMsg;
emit error(0, errorMsg);
return QByteArray();
}
return QFile::encodeName(m_tempMediaFileName);
// HTTP or similar URL // HTTP or similar URL
} else { } else {
return url.toEncoded(); return url.toEncoded();
@@ -187,7 +171,7 @@ QByteArray MmRendererMediaPlayerControl::resourcePathForUrl(const QUrl &url)
void MmRendererMediaPlayerControl::attach() void MmRendererMediaPlayerControl::attach()
{ {
// Should only be called in detached state // Should only be called in detached state
Q_ASSERT(m_audioId == -1 && !m_inputAttached && m_tempMediaFileName.isEmpty()); Q_ASSERT(m_audioId == -1 && !m_inputAttached);
if (m_media.isNull() || !m_context) { if (m_media.isNull() || !m_context) {
setMediaStatus(QMediaPlayer::NoMedia); setMediaStatus(QMediaPlayer::NoMedia);
@@ -251,10 +235,6 @@ void MmRendererMediaPlayerControl::detach()
} }
} }
if (!m_tempMediaFileName.isEmpty()) {
QFile::remove(m_tempMediaFileName);
m_tempMediaFileName.clear();
}
m_loadingTimer.stop(); m_loadingTimer.stop();
} }

View File

@@ -156,7 +156,6 @@ private:
bool m_inputAttached; bool m_inputAttached;
int m_stopEventsToIgnore; int m_stopEventsToIgnore;
int m_bufferLevel; int m_bufferLevel;
QString m_tempMediaFileName;
QTimer m_loadingTimer; QTimer m_loadingTimer;
}; };

View File

@@ -752,7 +752,7 @@ void QWinRTMediaPlayerControl::setMedia(const QMediaContent &media, QIODevice *s
} }
emit mediaChanged(media); emit mediaChanged(media);
QString urlString; QString urlString = media.canonicalUrl().toString();
if (!d->stream) { if (!d->stream) {
// If we can read the file via Qt, use the byte stream approach // If we can read the file via Qt, use the byte stream approach
foreach (const QMediaResource &resource, media.resources()) { foreach (const QMediaResource &resource, media.resources()) {

View File

@@ -444,10 +444,6 @@ void QSGVideoItemSurface::stop()
bool QSGVideoItemSurface::present(const QVideoFrame &frame) bool QSGVideoItemSurface::present(const QVideoFrame &frame)
{ {
if (!frame.isValid()) {
qWarning() << Q_FUNC_INFO << "I'm getting bad frames here...";
return false;
}
m_backend->present(frame); m_backend->present(frame);
return true; return true;
} }

View File

@@ -36,7 +36,7 @@
#define QDECLARATIVEVIDEOOUTPUT_RENDER_P_H #define QDECLARATIVEVIDEOOUTPUT_RENDER_P_H
#include "qdeclarativevideooutput_backend_p.h" #include "qdeclarativevideooutput_backend_p.h"
#include "qsgvideonode_i420.h" #include "qsgvideonode_yuv.h"
#include "qsgvideonode_rgb.h" #include "qsgvideonode_rgb.h"
#include "qsgvideonode_texture.h" #include "qsgvideonode_texture.h"
@@ -57,14 +57,14 @@ public:
QDeclarativeVideoRendererBackend(QDeclarativeVideoOutput *parent); QDeclarativeVideoRendererBackend(QDeclarativeVideoOutput *parent);
~QDeclarativeVideoRendererBackend(); ~QDeclarativeVideoRendererBackend();
bool init(QMediaService *service); bool init(QMediaService *service) Q_DECL_OVERRIDE;
void itemChange(QQuickItem::ItemChange change, const QQuickItem::ItemChangeData &changeData); void itemChange(QQuickItem::ItemChange change, const QQuickItem::ItemChangeData &changeData) Q_DECL_OVERRIDE;
void releaseSource(); void releaseSource() Q_DECL_OVERRIDE;
void releaseControl(); void releaseControl() Q_DECL_OVERRIDE;
QSize nativeSize() const; QSize nativeSize() const Q_DECL_OVERRIDE;
void updateGeometry(); void updateGeometry() Q_DECL_OVERRIDE;
QSGNode *updatePaintNode(QSGNode *oldNode, QQuickItem::UpdatePaintNodeData *data); QSGNode *updatePaintNode(QSGNode *oldNode, QQuickItem::UpdatePaintNodeData *data) Q_DECL_OVERRIDE;
QAbstractVideoSurface *videoSurface() const; QAbstractVideoSurface *videoSurface() const Q_DECL_OVERRIDE;
QRectF adjustedViewport() const Q_DECL_OVERRIDE; QRectF adjustedViewport() const Q_DECL_OVERRIDE;
QOpenGLContext *glContext() const; QOpenGLContext *glContext() const;
@@ -86,7 +86,7 @@ private:
QOpenGLContext *m_glContext; QOpenGLContext *m_glContext;
QVideoFrame m_frame; QVideoFrame m_frame;
bool m_frameChanged; bool m_frameChanged;
QSGVideoNodeFactory_I420 m_i420Factory; QSGVideoNodeFactory_YUV m_i420Factory;
QSGVideoNodeFactory_RGB m_rgbFactory; QSGVideoNodeFactory_RGB m_rgbFactory;
QSGVideoNodeFactory_Texture m_textureFactory; QSGVideoNodeFactory_Texture m_textureFactory;
QMutex m_frameMutex; QMutex m_frameMutex;

View File

@@ -46,14 +46,14 @@ public:
QDeclarativeVideoWindowBackend(QDeclarativeVideoOutput *parent); QDeclarativeVideoWindowBackend(QDeclarativeVideoOutput *parent);
~QDeclarativeVideoWindowBackend(); ~QDeclarativeVideoWindowBackend();
bool init(QMediaService *service); bool init(QMediaService *service) Q_DECL_OVERRIDE;
void itemChange(QQuickItem::ItemChange change, const QQuickItem::ItemChangeData &changeData); void itemChange(QQuickItem::ItemChange change, const QQuickItem::ItemChangeData &changeData) Q_DECL_OVERRIDE;
void releaseSource(); void releaseSource() Q_DECL_OVERRIDE;
void releaseControl(); void releaseControl() Q_DECL_OVERRIDE;
QSize nativeSize() const; QSize nativeSize() const Q_DECL_OVERRIDE;
void updateGeometry(); void updateGeometry() Q_DECL_OVERRIDE;
QSGNode *updatePaintNode(QSGNode *oldNode, QQuickItem::UpdatePaintNodeData *data); QSGNode *updatePaintNode(QSGNode *oldNode, QQuickItem::UpdatePaintNodeData *data) Q_DECL_OVERRIDE;
QAbstractVideoSurface *videoSurface() const; QAbstractVideoSurface *videoSurface() const Q_DECL_OVERRIDE;
QRectF adjustedViewport() const Q_DECL_OVERRIDE; QRectF adjustedViewport() const Q_DECL_OVERRIDE;
private: private:

View File

@@ -30,7 +30,7 @@
** $QT_END_LICENSE$ ** $QT_END_LICENSE$
** **
****************************************************************************/ ****************************************************************************/
#include "qsgvideonode_i420.h" #include "qsgvideonode_yuv.h"
#include <QtCore/qmutex.h> #include <QtCore/qmutex.h>
#include <QtQuick/qsgtexturematerial.h> #include <QtQuick/qsgtexturematerial.h>
#include <QtQuick/qsgmaterial.h> #include <QtQuick/qsgmaterial.h>
@@ -40,21 +40,23 @@
QT_BEGIN_NAMESPACE QT_BEGIN_NAMESPACE
QList<QVideoFrame::PixelFormat> QSGVideoNodeFactory_I420::supportedPixelFormats( QList<QVideoFrame::PixelFormat> QSGVideoNodeFactory_YUV::supportedPixelFormats(
QAbstractVideoBuffer::HandleType handleType) const QAbstractVideoBuffer::HandleType handleType) const
{ {
QList<QVideoFrame::PixelFormat> formats; QList<QVideoFrame::PixelFormat> formats;
if (handleType == QAbstractVideoBuffer::NoHandle) if (handleType == QAbstractVideoBuffer::NoHandle) {
formats << QVideoFrame::Format_YUV420P << QVideoFrame::Format_YV12; formats << QVideoFrame::Format_YUV420P << QVideoFrame::Format_YV12
<< QVideoFrame::Format_NV12 << QVideoFrame::Format_NV21;
}
return formats; return formats;
} }
QSGVideoNode *QSGVideoNodeFactory_I420::createNode(const QVideoSurfaceFormat &format) QSGVideoNode *QSGVideoNodeFactory_YUV::createNode(const QVideoSurfaceFormat &format)
{ {
if (supportedPixelFormats(format.handleType()).contains(format.pixelFormat())) if (supportedPixelFormats(format.handleType()).contains(format.pixelFormat()))
return new QSGVideoNode_I420(format); return new QSGVideoNode_YUV(format);
return 0; return 0;
} }
@@ -136,12 +138,85 @@ protected:
int m_id_opacity; int m_id_opacity;
}; };
class QSGVideoMaterialShader_NV_12_21 : public QSGVideoMaterialShader_YUV420
class QSGVideoMaterial_YUV420 : public QSGMaterial
{ {
public: public:
QSGVideoMaterial_YUV420(const QVideoSurfaceFormat &format); QSGVideoMaterialShader_NV_12_21(bool isNV21) : m_isNV21(isNV21) {
~QSGVideoMaterial_YUV420(); }
virtual void updateState(const RenderState &state, QSGMaterial *newMaterial, QSGMaterial *oldMaterial);
protected:
virtual const char *vertexShader() const {
const char *shader =
"uniform highp mat4 qt_Matrix; \n"
"uniform highp float yWidth; \n"
"attribute highp vec4 qt_VertexPosition; \n"
"attribute highp vec2 qt_VertexTexCoord; \n"
"varying highp vec2 yTexCoord; \n"
"void main() { \n"
" yTexCoord = qt_VertexTexCoord * vec2(yWidth, 1);\n"
" gl_Position = qt_Matrix * qt_VertexPosition; \n"
"}";
return shader;
}
virtual const char *fragmentShader() const {
static const char *shaderNV12 =
"uniform sampler2D yTexture; \n"
"uniform sampler2D uvTexture; \n"
"uniform mediump mat4 colorMatrix; \n"
"uniform lowp float opacity; \n"
"varying highp vec2 yTexCoord; \n"
"void main() \n"
"{ \n"
" mediump float Y = texture2D(yTexture, yTexCoord).r; \n"
" mediump vec2 UV = texture2D(uvTexture, yTexCoord).ra; \n"
" mediump vec4 color = vec4(Y, UV.x, UV.y, 1.); \n"
" gl_FragColor = colorMatrix * color * opacity; \n"
"}";
static const char *shaderNV21 =
"uniform sampler2D yTexture; \n"
"uniform sampler2D uvTexture; \n"
"uniform mediump mat4 colorMatrix; \n"
"uniform lowp float opacity; \n"
"varying highp vec2 yTexCoord; \n"
"void main() \n"
"{ \n"
" mediump float Y = texture2D(yTexture, yTexCoord).r; \n"
" mediump vec2 UV = texture2D(uvTexture, yTexCoord).ar; \n"
" mediump vec4 color = vec4(Y, UV.x, UV.y, 1.); \n"
" gl_FragColor = colorMatrix * color * opacity; \n"
"}";
return m_isNV21 ? shaderNV21 : shaderNV12;
}
virtual void initialize() {
m_id_yTexture = program()->uniformLocation("yTexture");
m_id_uTexture = program()->uniformLocation("uvTexture");
m_id_matrix = program()->uniformLocation("qt_Matrix");
m_id_yWidth = program()->uniformLocation("yWidth");
m_id_colorMatrix = program()->uniformLocation("colorMatrix");
m_id_opacity = program()->uniformLocation("opacity");
}
private:
bool m_isNV21;
};
class QSGVideoMaterial_YUV : public QSGMaterial
{
public:
QSGVideoMaterial_YUV(const QVideoSurfaceFormat &format);
~QSGVideoMaterial_YUV();
bool isNV12_21() const {
const QVideoFrame::PixelFormat pf = m_format.pixelFormat();
return pf == QVideoFrame::Format_NV12 || pf == QVideoFrame::Format_NV21;
}
virtual QSGMaterialType *type() const { virtual QSGMaterialType *type() const {
static QSGMaterialType theType; static QSGMaterialType theType;
@@ -149,18 +224,25 @@ public:
} }
virtual QSGMaterialShader *createShader() const { virtual QSGMaterialShader *createShader() const {
const QVideoFrame::PixelFormat pf = m_format.pixelFormat();
if (isNV12_21())
return new QSGVideoMaterialShader_NV_12_21(pf == QVideoFrame::Format_NV21);
return new QSGVideoMaterialShader_YUV420; return new QSGVideoMaterialShader_YUV420;
} }
virtual int compare(const QSGMaterial *other) const { virtual int compare(const QSGMaterial *other) const {
const QSGVideoMaterial_YUV420 *m = static_cast<const QSGVideoMaterial_YUV420 *>(other); const QSGVideoMaterial_YUV *m = static_cast<const QSGVideoMaterial_YUV *>(other);
int d = m_textureIds[0] - m->m_textureIds[0]; int d = m_textureIds[0] - m->m_textureIds[0];
if (d) if (d)
return d; return d;
else if ((d = m_textureIds[1] - m->m_textureIds[1]) != 0)
d = m_textureIds[1] - m->m_textureIds[1];
if (m_textureIds.size() == 2 || d != 0)
return d; return d;
else
return m_textureIds[2] - m->m_textureIds[2]; return m_textureIds[2] - m->m_textureIds[2];
} }
void updateBlending() { void updateBlending() {
@@ -173,13 +255,12 @@ public:
} }
void bind(); void bind();
void bindTexture(int id, int w, int h, const uchar *bits); void bindTexture(int id, int w, int h, const uchar *bits, GLenum format);
QVideoSurfaceFormat m_format; QVideoSurfaceFormat m_format;
QSize m_textureSize; QSize m_textureSize;
static const uint Num_Texture_IDs = 3; QVector<GLuint> m_textureIds;
GLuint m_textureIds[Num_Texture_IDs];
qreal m_opacity; qreal m_opacity;
GLfloat m_yWidth; GLfloat m_yWidth;
@@ -190,13 +271,13 @@ public:
QMutex m_frameMutex; QMutex m_frameMutex;
}; };
QSGVideoMaterial_YUV420::QSGVideoMaterial_YUV420(const QVideoSurfaceFormat &format) : QSGVideoMaterial_YUV::QSGVideoMaterial_YUV(const QVideoSurfaceFormat &format) :
m_format(format), m_format(format),
m_opacity(1.0), m_opacity(1.0),
m_yWidth(1.0), m_yWidth(1.0),
m_uvWidth(1.0) m_uvWidth(1.0)
{ {
memset(m_textureIds, 0, sizeof(m_textureIds)); m_textureIds.resize(isNV12_21() ? 2 : 3);
switch (format.yCbCrColorSpace()) { switch (format.yCbCrColorSpace()) {
case QVideoSurfaceFormat::YCbCr_JPEG: case QVideoSurfaceFormat::YCbCr_JPEG:
@@ -225,20 +306,19 @@ QSGVideoMaterial_YUV420::QSGVideoMaterial_YUV420(const QVideoSurfaceFormat &form
setFlag(Blending, false); setFlag(Blending, false);
} }
QSGVideoMaterial_YUV420::~QSGVideoMaterial_YUV420() QSGVideoMaterial_YUV::~QSGVideoMaterial_YUV()
{ {
if (!m_textureSize.isEmpty()) { if (!m_textureSize.isEmpty()) {
if (QOpenGLContext *current = QOpenGLContext::currentContext()) if (QOpenGLContext *current = QOpenGLContext::currentContext())
current->functions()->glDeleteTextures(Num_Texture_IDs, m_textureIds); current->functions()->glDeleteTextures(m_textureIds.size(), &m_textureIds[0]);
else else
qWarning() << "QSGVideoMaterial_YUV420: Cannot obtain GL context, unable to delete textures"; qWarning() << "QSGVideoMaterial_YUV: Cannot obtain GL context, unable to delete textures";
} }
} }
void QSGVideoMaterial_YUV420::bind() void QSGVideoMaterial_YUV::bind()
{ {
QOpenGLFunctions *functions = QOpenGLContext::currentContext()->functions(); QOpenGLFunctions *functions = QOpenGLContext::currentContext()->functions();
QMutexLocker lock(&m_frameMutex); QMutexLocker lock(&m_frameMutex);
if (m_frame.isValid()) { if (m_frame.isValid()) {
if (m_frame.map(QAbstractVideoBuffer::ReadOnly)) { if (m_frame.map(QAbstractVideoBuffer::ReadOnly)) {
@@ -248,31 +328,43 @@ void QSGVideoMaterial_YUV420::bind()
// Frame has changed size, recreate textures... // Frame has changed size, recreate textures...
if (m_textureSize != m_frame.size()) { if (m_textureSize != m_frame.size()) {
if (!m_textureSize.isEmpty()) if (!m_textureSize.isEmpty())
functions->glDeleteTextures(Num_Texture_IDs, m_textureIds); functions->glDeleteTextures(m_textureIds.size(), &m_textureIds[0]);
functions->glGenTextures(Num_Texture_IDs, m_textureIds); functions->glGenTextures(m_textureIds.size(), &m_textureIds[0]);
m_textureSize = m_frame.size(); m_textureSize = m_frame.size();
} }
const int y = 0;
const int u = m_frame.pixelFormat() == QVideoFrame::Format_YUV420P ? 1 : 2;
const int v = m_frame.pixelFormat() == QVideoFrame::Format_YUV420P ? 2 : 1;
m_yWidth = qreal(fw) / m_frame.bytesPerLine(y);
m_uvWidth = qreal(fw) / (2 * m_frame.bytesPerLine(u));
GLint previousAlignment; GLint previousAlignment;
functions->glGetIntegerv(GL_UNPACK_ALIGNMENT, &previousAlignment); functions->glGetIntegerv(GL_UNPACK_ALIGNMENT, &previousAlignment);
functions->glPixelStorei(GL_UNPACK_ALIGNMENT, 1); functions->glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
functions->glActiveTexture(GL_TEXTURE1); if (isNV12_21()) {
bindTexture(m_textureIds[1], m_frame.bytesPerLine(u), fh / 2, m_frame.bits(u)); const int y = 0;
functions->glActiveTexture(GL_TEXTURE2); const int uv = 1;
bindTexture(m_textureIds[2], m_frame.bytesPerLine(v), fh / 2, m_frame.bits(v));
functions->glActiveTexture(GL_TEXTURE0); // Finish with 0 as default texture unit m_yWidth = qreal(fw) / m_frame.bytesPerLine(y);
bindTexture(m_textureIds[0], m_frame.bytesPerLine(y), fh, m_frame.bits(y)); m_uvWidth = m_yWidth;
functions->glActiveTexture(GL_TEXTURE1);
bindTexture(m_textureIds[1], m_frame.bytesPerLine(uv) / 2, fh / 2, m_frame.bits(uv), GL_LUMINANCE_ALPHA);
functions->glActiveTexture(GL_TEXTURE0); // Finish with 0 as default texture unit
bindTexture(m_textureIds[0], m_frame.bytesPerLine(y), fh, m_frame.bits(y), GL_LUMINANCE);
} else {
const int y = 0;
const int u = m_frame.pixelFormat() == QVideoFrame::Format_YUV420P ? 1 : 2;
const int v = m_frame.pixelFormat() == QVideoFrame::Format_YUV420P ? 2 : 1;
m_yWidth = qreal(fw) / m_frame.bytesPerLine(y);
m_uvWidth = qreal(fw) / (2 * m_frame.bytesPerLine(u));
functions->glActiveTexture(GL_TEXTURE1);
bindTexture(m_textureIds[1], m_frame.bytesPerLine(u), fh / 2, m_frame.bits(u), GL_LUMINANCE);
functions->glActiveTexture(GL_TEXTURE2);
bindTexture(m_textureIds[2], m_frame.bytesPerLine(v), fh / 2, m_frame.bits(v), GL_LUMINANCE);
functions->glActiveTexture(GL_TEXTURE0); // Finish with 0 as default texture unit
bindTexture(m_textureIds[0], m_frame.bytesPerLine(y), fh, m_frame.bits(y), GL_LUMINANCE);
}
functions->glPixelStorei(GL_UNPACK_ALIGNMENT, previousAlignment); functions->glPixelStorei(GL_UNPACK_ALIGNMENT, previousAlignment);
m_frame.unmap(); m_frame.unmap();
} }
@@ -280,51 +372,52 @@ void QSGVideoMaterial_YUV420::bind()
} else { } else {
functions->glActiveTexture(GL_TEXTURE1); functions->glActiveTexture(GL_TEXTURE1);
functions->glBindTexture(GL_TEXTURE_2D, m_textureIds[1]); functions->glBindTexture(GL_TEXTURE_2D, m_textureIds[1]);
functions->glActiveTexture(GL_TEXTURE2); if (!isNV12_21()) {
functions->glBindTexture(GL_TEXTURE_2D, m_textureIds[2]); functions->glActiveTexture(GL_TEXTURE2);
functions->glBindTexture(GL_TEXTURE_2D, m_textureIds[2]);
}
functions->glActiveTexture(GL_TEXTURE0); // Finish with 0 as default texture unit functions->glActiveTexture(GL_TEXTURE0); // Finish with 0 as default texture unit
functions->glBindTexture(GL_TEXTURE_2D, m_textureIds[0]); functions->glBindTexture(GL_TEXTURE_2D, m_textureIds[0]);
} }
} }
void QSGVideoMaterial_YUV420::bindTexture(int id, int w, int h, const uchar *bits) void QSGVideoMaterial_YUV::bindTexture(int id, int w, int h, const uchar *bits, GLenum format)
{ {
QOpenGLFunctions *functions = QOpenGLContext::currentContext()->functions(); QOpenGLFunctions *functions = QOpenGLContext::currentContext()->functions();
functions->glBindTexture(GL_TEXTURE_2D, id); functions->glBindTexture(GL_TEXTURE_2D, id);
functions->glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, w, h, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, bits); functions->glTexImage2D(GL_TEXTURE_2D, 0, format, w, h, 0, format, GL_UNSIGNED_BYTE, bits);
functions->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); functions->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
functions->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); functions->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
functions->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); functions->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
functions->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); functions->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
} }
QSGVideoNode_I420::QSGVideoNode_I420(const QVideoSurfaceFormat &format) : QSGVideoNode_YUV::QSGVideoNode_YUV(const QVideoSurfaceFormat &format) :
m_format(format) m_format(format)
{ {
setFlag(QSGNode::OwnsMaterial); setFlag(QSGNode::OwnsMaterial);
m_material = new QSGVideoMaterial_YUV420(format); m_material = new QSGVideoMaterial_YUV(format);
setMaterial(m_material); setMaterial(m_material);
} }
QSGVideoNode_I420::~QSGVideoNode_I420() QSGVideoNode_YUV::~QSGVideoNode_YUV()
{ {
} }
void QSGVideoNode_I420::setCurrentFrame(const QVideoFrame &frame, FrameFlags) void QSGVideoNode_YUV::setCurrentFrame(const QVideoFrame &frame, FrameFlags)
{ {
m_material->setCurrentFrame(frame); m_material->setCurrentFrame(frame);
markDirty(DirtyMaterial); markDirty(DirtyMaterial);
} }
void QSGVideoMaterialShader_YUV420::updateState(const RenderState &state, void QSGVideoMaterialShader_YUV420::updateState(const RenderState &state,
QSGMaterial *newMaterial, QSGMaterial *newMaterial,
QSGMaterial *oldMaterial) QSGMaterial *oldMaterial)
{ {
Q_UNUSED(oldMaterial); Q_UNUSED(oldMaterial);
QSGVideoMaterial_YUV420 *mat = static_cast<QSGVideoMaterial_YUV420 *>(newMaterial); QSGVideoMaterial_YUV *mat = static_cast<QSGVideoMaterial_YUV *>(newMaterial);
program()->setUniformValue(m_id_yTexture, 0); program()->setUniformValue(m_id_yTexture, 0);
program()->setUniformValue(m_id_uTexture, 1); program()->setUniformValue(m_id_uTexture, 1);
program()->setUniformValue(m_id_vTexture, 2); program()->setUniformValue(m_id_vTexture, 2);
@@ -342,4 +435,26 @@ void QSGVideoMaterialShader_YUV420::updateState(const RenderState &state,
program()->setUniformValue(m_id_matrix, state.combinedMatrix()); program()->setUniformValue(m_id_matrix, state.combinedMatrix());
} }
void QSGVideoMaterialShader_NV_12_21::updateState(const RenderState &state,
QSGMaterial *newMaterial,
QSGMaterial *oldMaterial)
{
Q_UNUSED(oldMaterial);
QSGVideoMaterial_YUV *mat = static_cast<QSGVideoMaterial_YUV *>(newMaterial);
program()->setUniformValue(m_id_yTexture, 0);
program()->setUniformValue(m_id_uTexture, 1);
mat->bind();
program()->setUniformValue(m_id_colorMatrix, mat->m_colorMatrix);
program()->setUniformValue(m_id_yWidth, mat->m_yWidth);
if (state.isOpacityDirty()) {
mat->m_opacity = state.opacity();
program()->setUniformValue(m_id_opacity, GLfloat(mat->m_opacity));
}
if (state.isMatrixDirty())
program()->setUniformValue(m_id_matrix, state.combinedMatrix());
}
QT_END_NAMESPACE QT_END_NAMESPACE

View File

@@ -31,20 +31,20 @@
** **
****************************************************************************/ ****************************************************************************/
#ifndef QSGVIDEONODE_I420_H #ifndef QSGVIDEONODE_YUV_H
#define QSGVIDEONODE_I420_H #define QSGVIDEONODE_YUV_H
#include <private/qsgvideonode_p.h> #include <private/qsgvideonode_p.h>
#include <QtMultimedia/qvideosurfaceformat.h> #include <QtMultimedia/qvideosurfaceformat.h>
QT_BEGIN_NAMESPACE QT_BEGIN_NAMESPACE
class QSGVideoMaterial_YUV420; class QSGVideoMaterial_YUV;
class QSGVideoNode_I420 : public QSGVideoNode class QSGVideoNode_YUV : public QSGVideoNode
{ {
public: public:
QSGVideoNode_I420(const QVideoSurfaceFormat &format); QSGVideoNode_YUV(const QVideoSurfaceFormat &format);
~QSGVideoNode_I420(); ~QSGVideoNode_YUV();
virtual QVideoFrame::PixelFormat pixelFormat() const { virtual QVideoFrame::PixelFormat pixelFormat() const {
return m_format.pixelFormat(); return m_format.pixelFormat();
@@ -58,10 +58,10 @@ private:
void bindTexture(int id, int unit, int w, int h, const uchar *bits); void bindTexture(int id, int unit, int w, int h, const uchar *bits);
QVideoSurfaceFormat m_format; QVideoSurfaceFormat m_format;
QSGVideoMaterial_YUV420 *m_material; QSGVideoMaterial_YUV *m_material;
}; };
class QSGVideoNodeFactory_I420 : public QSGVideoNodeFactoryInterface { class QSGVideoNodeFactory_YUV : public QSGVideoNodeFactoryInterface {
public: public:
QList<QVideoFrame::PixelFormat> supportedPixelFormats(QAbstractVideoBuffer::HandleType handleType) const; QList<QVideoFrame::PixelFormat> supportedPixelFormats(QAbstractVideoBuffer::HandleType handleType) const;
QSGVideoNode *createNode(const QVideoSurfaceFormat &format); QSGVideoNode *createNode(const QVideoSurfaceFormat &format);
@@ -69,4 +69,4 @@ public:
QT_END_NAMESPACE QT_END_NAMESPACE
#endif // QSGVIDEONODE_I420_H #endif // QSGVIDEONODE_YUV_H

View File

@@ -21,7 +21,7 @@ SOURCES += \
qdeclarativevideooutput.cpp \ qdeclarativevideooutput.cpp \
qdeclarativevideooutput_render.cpp \ qdeclarativevideooutput_render.cpp \
qdeclarativevideooutput_window.cpp \ qdeclarativevideooutput_window.cpp \
qsgvideonode_i420.cpp \ qsgvideonode_yuv.cpp \
qsgvideonode_rgb.cpp \ qsgvideonode_rgb.cpp \
qsgvideonode_texture.cpp qsgvideonode_texture.cpp
@@ -29,6 +29,6 @@ HEADERS += \
$$PRIVATE_HEADERS \ $$PRIVATE_HEADERS \
qdeclarativevideooutput_render_p.h \ qdeclarativevideooutput_render_p.h \
qdeclarativevideooutput_window_p.h \ qdeclarativevideooutput_window_p.h \
qsgvideonode_i420.h \ qsgvideonode_yuv.h \
qsgvideonode_rgb.h \ qsgvideonode_rgb.h \
qsgvideonode_texture.h qsgvideonode_texture.h

View File

@@ -2,6 +2,7 @@ CONFIG += testcase no_private_qt_headers_warning
TARGET = tst_qmediaplayer TARGET = tst_qmediaplayer
QT += network multimedia-private testlib QT += network multimedia-private testlib
SOURCES += tst_qmediaplayer.cpp SOURCES += tst_qmediaplayer.cpp
RESOURCES += testdata.qrc
include (../qmultimedia_common/mock.pri) include (../qmultimedia_common/mock.pri)
include (../qmultimedia_common/mockplayer.pri) include (../qmultimedia_common/mockplayer.pri)

View File

@@ -0,0 +1,5 @@
<!DOCTYPE RCC><RCC version="1.0">
<qresource prefix="/">
<file>testdata/nokia-tune.mp3</file>
</qresource>
</RCC>

Binary file not shown.

View File

@@ -133,6 +133,8 @@ private slots:
void testPlayerFlags(); void testPlayerFlags();
void testDestructor(); void testDestructor();
void testSupportedMimeTypes(); void testSupportedMimeTypes();
void testQrc_data();
void testQrc();
private: private:
void setupCommonTestData(); void setupCommonTestData();
@@ -976,12 +978,17 @@ void tst_QMediaPlayer::testPlaylist()
player->setPlaylist(playlist); player->setPlaylist(playlist);
player->play(); player->play();
QCOMPARE(ss.count(), 1); QCOMPARE(ss.count(), 1);
QCOMPARE(ms.count(), 1);
QCOMPARE(qvariant_cast<QMediaPlayer::MediaStatus>(ms.last().value(0)), QMediaPlayer::LoadingMedia);
ms.clear();
mockService->setState(QMediaPlayer::StoppedState, QMediaPlayer::InvalidMedia); mockService->setState(QMediaPlayer::StoppedState, QMediaPlayer::InvalidMedia);
QCOMPARE(player->state(), QMediaPlayer::PlayingState); QCOMPARE(player->state(), QMediaPlayer::PlayingState);
QCOMPARE(player->mediaStatus(), QMediaPlayer::InvalidMedia); QCOMPARE(player->mediaStatus(), QMediaPlayer::LoadingMedia);
QCOMPARE(ss.count(), 1); QCOMPARE(ss.count(), 1);
QCOMPARE(ms.count(), 1); QCOMPARE(ms.count(), 2);
QCOMPARE(qvariant_cast<QMediaPlayer::MediaStatus>(ms.at(0).value(0)), QMediaPlayer::InvalidMedia);
QCOMPARE(qvariant_cast<QMediaPlayer::MediaStatus>(ms.at(1).value(0)), QMediaPlayer::LoadingMedia);
// NOTE: status should begin transitioning through to BufferedMedia. // NOTE: status should begin transitioning through to BufferedMedia.
QCOMPARE(player->currentMedia(), content1); QCOMPARE(player->currentMedia(), content1);
@@ -1210,5 +1217,84 @@ void tst_QMediaPlayer::testSupportedMimeTypes()
// This is empty on some platforms, and not on others, so can't test something here at the moment. // This is empty on some platforms, and not on others, so can't test something here at the moment.
} }
void tst_QMediaPlayer::testQrc_data()
{
QTest::addColumn<QMediaContent>("mediaContent");
QTest::addColumn<QMediaPlayer::MediaStatus>("status");
QTest::addColumn<QMediaPlayer::Error>("error");
QTest::addColumn<int>("errorCount");
QTest::addColumn<bool>("hasStreamFeature");
QTest::addColumn<QString>("backendMediaContentScheme");
QTest::addColumn<bool>("backendHasStream");
QTest::newRow("invalid") << QMediaContent(QUrl(QLatin1String("qrc:/invalid.mp3")))
<< QMediaPlayer::InvalidMedia
<< QMediaPlayer::ResourceError
<< 1 // error count
<< false // No StreamPlayback support
<< QString() // backend should not have got any media (empty URL scheme)
<< false; // backend should not have got any stream
QTest::newRow("valid+nostream") << QMediaContent(QUrl(QLatin1String("qrc:/testdata/nokia-tune.mp3")))
<< QMediaPlayer::LoadingMedia
<< QMediaPlayer::NoError
<< 0 // error count
<< false // No StreamPlayback support
<< QStringLiteral("file") // backend should have a got a temporary file
<< false; // backend should not have got any stream
QTest::newRow("valid+stream") << QMediaContent(QUrl(QLatin1String("qrc:/testdata/nokia-tune.mp3")))
<< QMediaPlayer::LoadingMedia
<< QMediaPlayer::NoError
<< 0 // error count
<< true // StreamPlayback support
<< QStringLiteral("qrc")
<< true; // backend should have got a stream (QFile opened from the resource)
}
void tst_QMediaPlayer::testQrc()
{
QFETCH(QMediaContent, mediaContent);
QFETCH(QMediaPlayer::MediaStatus, status);
QFETCH(QMediaPlayer::Error, error);
QFETCH(int, errorCount);
QFETCH(bool, hasStreamFeature);
QFETCH(QString, backendMediaContentScheme);
QFETCH(bool, backendHasStream);
if (hasStreamFeature)
mockProvider->setSupportedFeatures(QMediaServiceProviderHint::StreamPlayback);
QMediaPlayer player;
mockService->setState(QMediaPlayer::PlayingState, QMediaPlayer::NoMedia);
QSignalSpy mediaSpy(&player, SIGNAL(currentMediaChanged(QMediaContent)));
QSignalSpy statusSpy(&player, SIGNAL(mediaStatusChanged(QMediaPlayer::MediaStatus)));
QSignalSpy errorSpy(&player, SIGNAL(error(QMediaPlayer::Error)));
player.setMedia(mediaContent);
QTRY_COMPARE(player.mediaStatus(), status);
QCOMPARE(statusSpy.count(), 1);
QCOMPARE(qvariant_cast<QMediaPlayer::MediaStatus>(statusSpy.last().value(0)), status);
QCOMPARE(player.media(), mediaContent);
QCOMPARE(player.currentMedia(), mediaContent);
QCOMPARE(mediaSpy.count(), 1);
QCOMPARE(qvariant_cast<QMediaContent>(mediaSpy.last().value(0)), mediaContent);
QCOMPARE(player.error(), error);
QCOMPARE(errorSpy.count(), errorCount);
if (errorCount > 0) {
QCOMPARE(qvariant_cast<QMediaPlayer::Error>(errorSpy.last().value(0)), error);
QVERIFY(!player.errorString().isEmpty());
}
// Check the media actually passed to the backend
QCOMPARE(mockService->mockControl->media().canonicalUrl().scheme(), backendMediaContentScheme);
QCOMPARE(bool(mockService->mockControl->mediaStream()), backendHasStream);
}
QTEST_GUILESS_MAIN(tst_QMediaPlayer) QTEST_GUILESS_MAIN(tst_QMediaPlayer)
#include "tst_qmediaplayer.moc" #include "tst_qmediaplayer.moc"

View File

@@ -91,11 +91,10 @@ public:
{ {
_stream = stream; _stream = stream;
_media = content; _media = content;
if (_state != QMediaPlayer::StoppedState) { _mediaStatus = _media.isNull() ? QMediaPlayer::NoMedia : QMediaPlayer::LoadingMedia;
_mediaStatus = _media.isNull() ? QMediaPlayer::NoMedia : QMediaPlayer::LoadingMedia; if (_state != QMediaPlayer::StoppedState)
emit stateChanged(_state = QMediaPlayer::StoppedState); emit stateChanged(_state = QMediaPlayer::StoppedState);
emit mediaStatusChanged(_mediaStatus); emit mediaStatusChanged(_mediaStatus);
}
emit mediaChanged(_media = content); emit mediaChanged(_media = content);
} }
QIODevice *mediaStream() const { return _stream; } QIODevice *mediaStream() const { return _stream; }

View File

@@ -61,6 +61,16 @@ public:
} }
} }
QMediaServiceProviderHint::Features supportedFeatures(const QMediaService *) const
{
return features;
}
void setSupportedFeatures(QMediaServiceProviderHint::Features f)
{
features = f;
}
QByteArray defaultDevice(const QByteArray &serviceType) const QByteArray defaultDevice(const QByteArray &serviceType) const
{ {
if (serviceType == Q_MEDIASERVICE_CAMERA) if (serviceType == Q_MEDIASERVICE_CAMERA)
@@ -97,6 +107,7 @@ public:
QMediaService *service; QMediaService *service;
bool deleteServiceOnRelease; bool deleteServiceOnRelease;
QMediaServiceProviderHint::Features features;
}; };
#endif // MOCKMEDIASERVICEPROVIDER_H #endif // MOCKMEDIASERVICEPROVIDER_H