Merge remote-tracking branch 'origin/5.4' into dev
Change-Id: I8b9177d90afac8b834d333efc6c22b6b35dceaf8
1
config.tests/linux_v4l/linux_v4l.pro
Normal file
@@ -0,0 +1 @@
|
|||||||
|
SOURCES += main.cpp
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
/****************************************************************************
|
/****************************************************************************
|
||||||
**
|
**
|
||||||
** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
|
** Copyright (C) 2014 Digia Plc and/or its subsidiary(-ies).
|
||||||
** Contact: http://www.qt-project.org/legal
|
** Contact: http://www.qt-project.org/legal
|
||||||
**
|
**
|
||||||
** This file is part of the Qt Toolkit.
|
** This file is part of the Qt Toolkit.
|
||||||
@@ -39,22 +39,9 @@
|
|||||||
**
|
**
|
||||||
****************************************************************************/
|
****************************************************************************/
|
||||||
|
|
||||||
#include "qt7backend.h"
|
#include <linux/videodev2.h>
|
||||||
|
|
||||||
#import <Foundation/NSAutoreleasePool.h>
|
int main(int argc, char** argv)
|
||||||
#include <CoreFoundation/CFBase.h>
|
|
||||||
|
|
||||||
|
|
||||||
QT_BEGIN_NAMESPACE
|
|
||||||
|
|
||||||
AutoReleasePool::AutoReleasePool()
|
|
||||||
{
|
{
|
||||||
pool = (void*)[[NSAutoreleasePool alloc] init];
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
AutoReleasePool::~AutoReleasePool()
|
|
||||||
{
|
|
||||||
[(NSAutoreleasePool*)pool release];
|
|
||||||
}
|
|
||||||
|
|
||||||
QT_END_NAMESPACE
|
|
||||||
@@ -30,8 +30,11 @@
|
|||||||
** $QT_END_LICENSE$
|
** $QT_END_LICENSE$
|
||||||
**
|
**
|
||||||
****************************************************************************/
|
****************************************************************************/
|
||||||
|
#ifndef _WIN32_WCE
|
||||||
#include <wmp.h>
|
#include <wmp.h>
|
||||||
|
#else
|
||||||
|
#include <wmpcore.h>
|
||||||
|
#endif
|
||||||
|
|
||||||
int main(int, char**)
|
int main(int, char**)
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -3,4 +3,5 @@ CONFIG += console
|
|||||||
|
|
||||||
SOURCES += main.cpp
|
SOURCES += main.cpp
|
||||||
|
|
||||||
LIBS += -lstrmiids -lole32 -lOleaut32 -luser32 -lgdi32
|
LIBS += -lstrmiids -lole32 -lOleaut32
|
||||||
|
!wince*:LIBS += -luser32 -lgdi32
|
||||||
|
|||||||
65
dist/changes-5.3.2
vendored
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
Qt 5.3.2 is a bug-fix release. It maintains both forward and backward
|
||||||
|
compatibility (source and binary) with Qt 5.3.0 and 5.3.1.
|
||||||
|
|
||||||
|
For more details, refer to the online documentation included in this
|
||||||
|
distribution. The documentation is also available online:
|
||||||
|
|
||||||
|
http://qt-project.org/doc/qt-5.3
|
||||||
|
|
||||||
|
The Qt version 5.3 series is binary compatible with the 5.2.x series.
|
||||||
|
Applications compiled for 5.2 will continue to run with 5.3.
|
||||||
|
|
||||||
|
Some of the changes listed in this file include issue tracking numbers
|
||||||
|
corresponding to tasks in the Qt Bug Tracker:
|
||||||
|
|
||||||
|
http://bugreports.qt-project.org/
|
||||||
|
|
||||||
|
Each of these identifiers can be entered in the bug tracker to obtain more
|
||||||
|
information about a particular change.
|
||||||
|
|
||||||
|
****************************************************************************
|
||||||
|
* Platform Specific Changes *
|
||||||
|
****************************************************************************
|
||||||
|
|
||||||
|
Android
|
||||||
|
-------
|
||||||
|
|
||||||
|
- Fixed regression causing videos recorded with the camera not to be registered with the Android
|
||||||
|
media scanner, making them invisible to media browsing apps.
|
||||||
|
- Fixed crash when unloading a QCamera while a recording is active.
|
||||||
|
- [QTBUG-39307] Setting camera parameters on the QML Camera type (e.g. digitalZoom, flashMode)
|
||||||
|
now works correctly when set before the camera is loaded.
|
||||||
|
- [QTBUG-40208] QAudioOutput::setNotifyInterval() can now be used when the output is active.
|
||||||
|
- [QTBUG-40274] Fixed metadata not being loaded by the MediaPlayer when playing a remote media,
|
||||||
|
from a qrc file or from assets.
|
||||||
|
|
||||||
|
iOS
|
||||||
|
---
|
||||||
|
|
||||||
|
- [QTBUG-39036] Audio played using SoundEffect or QAudioOutput is now correctly muted when the
|
||||||
|
device is set to silent mode or when the screen is locked.
|
||||||
|
- [QTBUG-39385] The last video frame displayed in a QML VideoOutput doesn't remain on screen
|
||||||
|
anymore after destroying the VideoOutput.
|
||||||
|
|
||||||
|
Linux
|
||||||
|
-----
|
||||||
|
|
||||||
|
- MediaPlayer's loops property now works correctly when playing a media from a qrc file.
|
||||||
|
- [QTBUG-29742] Fixed Qt apps hanging when audio APIs are used and PulseAudio is not running.
|
||||||
|
- [QTBUG-39949] Fixed QMediaRecorder::setOutputLocation() not working with QUrl::fromLocalFile().
|
||||||
|
|
||||||
|
OS X
|
||||||
|
----
|
||||||
|
|
||||||
|
- Application doesn't freeze anymore when changing the system audio output device while audio
|
||||||
|
is being played with QSoundEffect or QAudioOutput.
|
||||||
|
- [QTBUG-38666] Video frames are now correctly positioned on screen when playing a live stream
|
||||||
|
in a QVideoWidget. This also applies to iOS.
|
||||||
|
- [QTBUG-38668] Fixed crash when setting QMediaRecorder's output location to a URL containing
|
||||||
|
nonstandard characters.
|
||||||
|
|
||||||
|
Windows
|
||||||
|
-------
|
||||||
|
|
||||||
|
- The DirectShow camera backend has been almost entirely rewritten. It doesn't provide any new
|
||||||
|
feature but it now works as it should.
|
||||||
@@ -315,20 +315,11 @@ void InputTest::initializeAudio()
|
|||||||
void InputTest::createAudioInput()
|
void InputTest::createAudioInput()
|
||||||
{
|
{
|
||||||
m_audioInput = new QAudioInput(m_device, m_format, this);
|
m_audioInput = new QAudioInput(m_device, m_format, this);
|
||||||
connect(m_audioInput, SIGNAL(notify()), SLOT(notified()));
|
|
||||||
connect(m_audioInput, SIGNAL(stateChanged(QAudio::State)), SLOT(handleStateChanged(QAudio::State)));
|
|
||||||
m_volumeSlider->setValue(m_audioInput->volume() * 100);
|
m_volumeSlider->setValue(m_audioInput->volume() * 100);
|
||||||
m_audioInfo->start();
|
m_audioInfo->start();
|
||||||
m_audioInput->start(m_audioInfo);
|
m_audioInput->start(m_audioInfo);
|
||||||
}
|
}
|
||||||
|
|
||||||
void InputTest::notified()
|
|
||||||
{
|
|
||||||
qWarning() << "bytesReady = " << m_audioInput->bytesReady()
|
|
||||||
<< ", " << "elapsedUSecs = " <<m_audioInput->elapsedUSecs()
|
|
||||||
<< ", " << "processedUSecs = "<<m_audioInput->processedUSecs();
|
|
||||||
}
|
|
||||||
|
|
||||||
void InputTest::readMore()
|
void InputTest::readMore()
|
||||||
{
|
{
|
||||||
if (!m_audioInput)
|
if (!m_audioInput)
|
||||||
@@ -364,27 +355,19 @@ void InputTest::toggleSuspend()
|
|||||||
{
|
{
|
||||||
// toggle suspend/resume
|
// toggle suspend/resume
|
||||||
if (m_audioInput->state() == QAudio::SuspendedState) {
|
if (m_audioInput->state() == QAudio::SuspendedState) {
|
||||||
qWarning() << "status: Suspended, resume()";
|
|
||||||
m_audioInput->resume();
|
m_audioInput->resume();
|
||||||
m_suspendResumeButton->setText(tr(SUSPEND_LABEL));
|
m_suspendResumeButton->setText(tr(SUSPEND_LABEL));
|
||||||
} else if (m_audioInput->state() == QAudio::ActiveState) {
|
} else if (m_audioInput->state() == QAudio::ActiveState) {
|
||||||
qWarning() << "status: Active, suspend()";
|
|
||||||
m_audioInput->suspend();
|
m_audioInput->suspend();
|
||||||
m_suspendResumeButton->setText(tr(RESUME_LABEL));
|
m_suspendResumeButton->setText(tr(RESUME_LABEL));
|
||||||
} else if (m_audioInput->state() == QAudio::StoppedState) {
|
} else if (m_audioInput->state() == QAudio::StoppedState) {
|
||||||
qWarning() << "status: Stopped, resume()";
|
|
||||||
m_audioInput->resume();
|
m_audioInput->resume();
|
||||||
m_suspendResumeButton->setText(tr(SUSPEND_LABEL));
|
m_suspendResumeButton->setText(tr(SUSPEND_LABEL));
|
||||||
} else if (m_audioInput->state() == QAudio::IdleState) {
|
} else if (m_audioInput->state() == QAudio::IdleState) {
|
||||||
qWarning() << "status: IdleState";
|
// no-op
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void InputTest::handleStateChanged(QAudio::State state)
|
|
||||||
{
|
|
||||||
qWarning() << "state = " << state;
|
|
||||||
}
|
|
||||||
|
|
||||||
void InputTest::refreshDisplay()
|
void InputTest::refreshDisplay()
|
||||||
{
|
{
|
||||||
m_canvas->setLevel(m_audioInfo->level());
|
m_canvas->setLevel(m_audioInfo->level());
|
||||||
|
|||||||
@@ -110,11 +110,9 @@ private:
|
|||||||
|
|
||||||
private slots:
|
private slots:
|
||||||
void refreshDisplay();
|
void refreshDisplay();
|
||||||
void notified();
|
|
||||||
void readMore();
|
void readMore();
|
||||||
void toggleMode();
|
void toggleMode();
|
||||||
void toggleSuspend();
|
void toggleSuspend();
|
||||||
void handleStateChanged(QAudio::State state);
|
|
||||||
void deviceChanged(int index);
|
void deviceChanged(int index);
|
||||||
void sliderChanged(int value);
|
void sliderChanged(int value);
|
||||||
|
|
||||||
|
|||||||
@@ -247,8 +247,6 @@ void AudioTest::createAudioOutput()
|
|||||||
delete m_audioOutput;
|
delete m_audioOutput;
|
||||||
m_audioOutput = 0;
|
m_audioOutput = 0;
|
||||||
m_audioOutput = new QAudioOutput(m_device, m_format, this);
|
m_audioOutput = new QAudioOutput(m_device, m_format, this);
|
||||||
connect(m_audioOutput, SIGNAL(notify()), SLOT(notified()));
|
|
||||||
connect(m_audioOutput, SIGNAL(stateChanged(QAudio::State)), SLOT(handleStateChanged(QAudio::State)));
|
|
||||||
m_generator->start();
|
m_generator->start();
|
||||||
m_audioOutput->start(m_generator);
|
m_audioOutput->start(m_generator);
|
||||||
m_volumeSlider->setValue(int(m_audioOutput->volume()*100.0f));
|
m_volumeSlider->setValue(int(m_audioOutput->volume()*100.0f));
|
||||||
@@ -275,13 +273,6 @@ void AudioTest::volumeChanged(int value)
|
|||||||
m_audioOutput->setVolume(qreal(value/100.0f));
|
m_audioOutput->setVolume(qreal(value/100.0f));
|
||||||
}
|
}
|
||||||
|
|
||||||
void AudioTest::notified()
|
|
||||||
{
|
|
||||||
qWarning() << "bytesFree = " << m_audioOutput->bytesFree()
|
|
||||||
<< ", " << "elapsedUSecs = " << m_audioOutput->elapsedUSecs()
|
|
||||||
<< ", " << "processedUSecs = " << m_audioOutput->processedUSecs();
|
|
||||||
}
|
|
||||||
|
|
||||||
void AudioTest::pullTimerExpired()
|
void AudioTest::pullTimerExpired()
|
||||||
{
|
{
|
||||||
if (m_audioOutput && m_audioOutput->state() != QAudio::StoppedState) {
|
if (m_audioOutput && m_audioOutput->state() != QAudio::StoppedState) {
|
||||||
@@ -319,23 +310,16 @@ void AudioTest::toggleMode()
|
|||||||
void AudioTest::toggleSuspendResume()
|
void AudioTest::toggleSuspendResume()
|
||||||
{
|
{
|
||||||
if (m_audioOutput->state() == QAudio::SuspendedState) {
|
if (m_audioOutput->state() == QAudio::SuspendedState) {
|
||||||
qWarning() << "status: Suspended, resume()";
|
|
||||||
m_audioOutput->resume();
|
m_audioOutput->resume();
|
||||||
m_suspendResumeButton->setText(tr(SUSPEND_LABEL));
|
m_suspendResumeButton->setText(tr(SUSPEND_LABEL));
|
||||||
} else if (m_audioOutput->state() == QAudio::ActiveState) {
|
} else if (m_audioOutput->state() == QAudio::ActiveState) {
|
||||||
qWarning() << "status: Active, suspend()";
|
|
||||||
m_audioOutput->suspend();
|
m_audioOutput->suspend();
|
||||||
m_suspendResumeButton->setText(tr(RESUME_LABEL));
|
m_suspendResumeButton->setText(tr(RESUME_LABEL));
|
||||||
} else if (m_audioOutput->state() == QAudio::StoppedState) {
|
} else if (m_audioOutput->state() == QAudio::StoppedState) {
|
||||||
qWarning() << "status: Stopped, resume()";
|
|
||||||
m_audioOutput->resume();
|
m_audioOutput->resume();
|
||||||
m_suspendResumeButton->setText(tr(SUSPEND_LABEL));
|
m_suspendResumeButton->setText(tr(SUSPEND_LABEL));
|
||||||
} else if (m_audioOutput->state() == QAudio::IdleState) {
|
} else if (m_audioOutput->state() == QAudio::IdleState) {
|
||||||
qWarning() << "status: IdleState";
|
// no-op
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void AudioTest::handleStateChanged(QAudio::State state)
|
|
||||||
{
|
|
||||||
qWarning() << "state = " << state;
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -110,11 +110,9 @@ private:
|
|||||||
QByteArray m_buffer;
|
QByteArray m_buffer;
|
||||||
|
|
||||||
private slots:
|
private slots:
|
||||||
void notified();
|
|
||||||
void pullTimerExpired();
|
void pullTimerExpired();
|
||||||
void toggleMode();
|
void toggleMode();
|
||||||
void toggleSuspendResume();
|
void toggleSuspendResume();
|
||||||
void handleStateChanged(QAudio::State state);
|
|
||||||
void deviceChanged(int index);
|
void deviceChanged(int index);
|
||||||
void volumeChanged(int);
|
void volumeChanged(int);
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -114,6 +114,8 @@ AudioRecorder::AudioRecorder(QWidget *parent) :
|
|||||||
SLOT(updateProgress(qint64)));
|
SLOT(updateProgress(qint64)));
|
||||||
connect(audioRecorder, SIGNAL(statusChanged(QMediaRecorder::Status)), this,
|
connect(audioRecorder, SIGNAL(statusChanged(QMediaRecorder::Status)), this,
|
||||||
SLOT(updateStatus(QMediaRecorder::Status)));
|
SLOT(updateStatus(QMediaRecorder::Status)));
|
||||||
|
connect(audioRecorder, SIGNAL(stateChanged(QMediaRecorder::State)),
|
||||||
|
this, SLOT(onStateChanged(QMediaRecorder::State)));
|
||||||
connect(audioRecorder, SIGNAL(error(QMediaRecorder::Error)), this,
|
connect(audioRecorder, SIGNAL(error(QMediaRecorder::Error)), this,
|
||||||
SLOT(displayErrorMessage()));
|
SLOT(displayErrorMessage()));
|
||||||
}
|
}
|
||||||
@@ -138,47 +140,44 @@ void AudioRecorder::updateStatus(QMediaRecorder::Status status)
|
|||||||
|
|
||||||
switch (status) {
|
switch (status) {
|
||||||
case QMediaRecorder::RecordingStatus:
|
case QMediaRecorder::RecordingStatus:
|
||||||
if (audioLevels.count() != audioRecorder->audioSettings().channelCount()) {
|
statusMessage = tr("Recording to %1").arg(audioRecorder->actualLocation().toString());
|
||||||
qDeleteAll(audioLevels);
|
|
||||||
audioLevels.clear();
|
|
||||||
for (int i = 0; i < audioRecorder->audioSettings().channelCount(); ++i) {
|
|
||||||
QAudioLevel *level = new QAudioLevel(ui->centralwidget);
|
|
||||||
audioLevels.append(level);
|
|
||||||
ui->levelsLayout->addWidget(level);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
ui->recordButton->setText(tr("Stop"));
|
|
||||||
ui->pauseButton->setText(tr("Pause"));
|
|
||||||
if (audioRecorder->outputLocation().isEmpty())
|
|
||||||
statusMessage = tr("Recording");
|
|
||||||
else
|
|
||||||
statusMessage = tr("Recording to %1").arg(
|
|
||||||
audioRecorder->outputLocation().toString());
|
|
||||||
break;
|
break;
|
||||||
case QMediaRecorder::PausedStatus:
|
case QMediaRecorder::PausedStatus:
|
||||||
clearAudioLevels();
|
clearAudioLevels();
|
||||||
ui->recordButton->setText(tr("Stop"));
|
|
||||||
ui->pauseButton->setText(tr("Resume"));
|
|
||||||
statusMessage = tr("Paused");
|
statusMessage = tr("Paused");
|
||||||
break;
|
break;
|
||||||
case QMediaRecorder::UnloadedStatus:
|
case QMediaRecorder::UnloadedStatus:
|
||||||
case QMediaRecorder::LoadedStatus:
|
case QMediaRecorder::LoadedStatus:
|
||||||
clearAudioLevels();
|
clearAudioLevels();
|
||||||
ui->recordButton->setText(tr("Record"));
|
|
||||||
ui->pauseButton->setText(tr("Pause"));
|
|
||||||
statusMessage = tr("Stopped");
|
statusMessage = tr("Stopped");
|
||||||
default:
|
default:
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
ui->pauseButton->setEnabled(audioRecorder->state()
|
|
||||||
!= QMediaRecorder::StoppedState);
|
|
||||||
|
|
||||||
if (audioRecorder->error() == QMediaRecorder::NoError)
|
if (audioRecorder->error() == QMediaRecorder::NoError)
|
||||||
ui->statusbar->showMessage(statusMessage);
|
ui->statusbar->showMessage(statusMessage);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void AudioRecorder::onStateChanged(QMediaRecorder::State state)
|
||||||
|
{
|
||||||
|
switch (state) {
|
||||||
|
case QMediaRecorder::RecordingState:
|
||||||
|
ui->recordButton->setText(tr("Stop"));
|
||||||
|
ui->pauseButton->setText(tr("Pause"));
|
||||||
|
break;
|
||||||
|
case QMediaRecorder::PausedState:
|
||||||
|
ui->recordButton->setText(tr("Stop"));
|
||||||
|
ui->pauseButton->setText(tr("Resume"));
|
||||||
|
break;
|
||||||
|
case QMediaRecorder::StoppedState:
|
||||||
|
ui->recordButton->setText(tr("Record"));
|
||||||
|
ui->pauseButton->setText(tr("Pause"));
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
ui->pauseButton->setEnabled(audioRecorder->state() != QMediaRecorder::StoppedState);
|
||||||
|
}
|
||||||
|
|
||||||
static QVariant boxValue(const QComboBox *box)
|
static QVariant boxValue(const QComboBox *box)
|
||||||
{
|
{
|
||||||
int idx = box->currentIndex();
|
int idx = box->currentIndex();
|
||||||
@@ -347,6 +346,16 @@ QVector<qreal> getBufferLevels(const T *buffer, int frames, int channels)
|
|||||||
|
|
||||||
void AudioRecorder::processBuffer(const QAudioBuffer& buffer)
|
void AudioRecorder::processBuffer(const QAudioBuffer& buffer)
|
||||||
{
|
{
|
||||||
|
if (audioLevels.count() != buffer.format().channelCount()) {
|
||||||
|
qDeleteAll(audioLevels);
|
||||||
|
audioLevels.clear();
|
||||||
|
for (int i = 0; i < buffer.format().channelCount(); ++i) {
|
||||||
|
QAudioLevel *level = new QAudioLevel(ui->centralwidget);
|
||||||
|
audioLevels.append(level);
|
||||||
|
ui->levelsLayout->addWidget(level);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
QVector<qreal> levels = getBufferLevels(buffer);
|
QVector<qreal> levels = getBufferLevels(buffer);
|
||||||
for (int i = 0; i < levels.count(); ++i)
|
for (int i = 0; i < levels.count(); ++i)
|
||||||
audioLevels.at(i)->setLevel(levels.at(i));
|
audioLevels.at(i)->setLevel(levels.at(i));
|
||||||
|
|||||||
@@ -71,6 +71,7 @@ private slots:
|
|||||||
void toggleRecord();
|
void toggleRecord();
|
||||||
|
|
||||||
void updateStatus(QMediaRecorder::Status);
|
void updateStatus(QMediaRecorder::Status);
|
||||||
|
void onStateChanged(QMediaRecorder::State);
|
||||||
void updateProgress(qint64 pos);
|
void updateProgress(qint64 pos);
|
||||||
void displayErrorMessage();
|
void displayErrorMessage();
|
||||||
|
|
||||||
|
|||||||
BIN
examples/multimedia/video/doc/images/qmlvideo-menu.jpg
Normal file
|
After Width: | Height: | Size: 21 KiB |
|
Before Width: | Height: | Size: 63 KiB |
BIN
examples/multimedia/video/doc/images/qmlvideo-overlay.jpg
Normal file
|
After Width: | Height: | Size: 23 KiB |
|
Before Width: | Height: | Size: 64 KiB |
@@ -46,7 +46,7 @@ The following image shows the application executing the video-overlay scene,
|
|||||||
which creates a dummy overlay item (just a semi-transparent \l{Rectangle}),
|
which creates a dummy overlay item (just a semi-transparent \l{Rectangle}),
|
||||||
which moves across the \l{VideoOutput} item.
|
which moves across the \l{VideoOutput} item.
|
||||||
|
|
||||||
\image qmlvideo-overlay.png
|
\image qmlvideo-overlay.jpg
|
||||||
|
|
||||||
\include examples-run.qdocinc
|
\include examples-run.qdocinc
|
||||||
|
|
||||||
@@ -67,7 +67,7 @@ the following items:
|
|||||||
average over the past second.
|
average over the past second.
|
||||||
\endlist
|
\endlist
|
||||||
|
|
||||||
\image qmlvideo-menu.png
|
\image qmlvideo-menu.jpg
|
||||||
|
|
||||||
Each scene in the flickable list is implemented in its own QML file - for
|
Each scene in the flickable list is implemented in its own QML file - for
|
||||||
example the video-basic scene (which just displays a static \l{VideoOutput}
|
example the video-basic scene (which just displays a static \l{VideoOutput}
|
||||||
|
|||||||
|
Before Width: | Height: | Size: 1.8 KiB |
|
Before Width: | Height: | Size: 1.8 KiB After Width: | Height: | Size: 1.8 KiB |
@@ -1,35 +0,0 @@
|
|||||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
|
||||||
<!-- Created with Inkscape (http://www.inkscape.org/) -->
|
|
||||||
|
|
||||||
<svg
|
|
||||||
xmlns:dc="http://purl.org/dc/elements/1.1/"
|
|
||||||
xmlns:cc="http://creativecommons.org/ns#"
|
|
||||||
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
|
|
||||||
xmlns:svg="http://www.w3.org/2000/svg"
|
|
||||||
xmlns="http://www.w3.org/2000/svg"
|
|
||||||
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
|
||||||
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
|
||||||
width="10px"
|
|
||||||
height="46px"
|
|
||||||
version="1.1">
|
|
||||||
<g>
|
|
||||||
<defs>
|
|
||||||
<linearGradient id="MyGradient1" x1="0%" y1="0%" x2="100%" y2="100%">
|
|
||||||
<stop offset="0%" stop-color="lightcyan" />
|
|
||||||
<stop offset="100%" stop-color="dodgerblue" />
|
|
||||||
</linearGradient>
|
|
||||||
</defs>
|
|
||||||
<rect
|
|
||||||
stroke="white"
|
|
||||||
fill="url(#MyGradient1)"
|
|
||||||
stroke-linecap="round"
|
|
||||||
stroke-linejoin="round"
|
|
||||||
stroke-width="2"
|
|
||||||
width="8"
|
|
||||||
height="44"
|
|
||||||
x="1"
|
|
||||||
y="1"
|
|
||||||
rx="4"
|
|
||||||
ry="4"/>
|
|
||||||
</g>
|
|
||||||
</svg>
|
|
||||||
|
Before Width: | Height: | Size: 1.1 KiB |
@@ -1,35 +0,0 @@
|
|||||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
|
||||||
<!-- Created with Inkscape (http://www.inkscape.org/) -->
|
|
||||||
|
|
||||||
<svg
|
|
||||||
xmlns:dc="http://purl.org/dc/elements/1.1/"
|
|
||||||
xmlns:cc="http://creativecommons.org/ns#"
|
|
||||||
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
|
|
||||||
xmlns:svg="http://www.w3.org/2000/svg"
|
|
||||||
xmlns="http://www.w3.org/2000/svg"
|
|
||||||
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
|
||||||
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
|
||||||
width="10px"
|
|
||||||
height="46px"
|
|
||||||
version="1.1">
|
|
||||||
<g>
|
|
||||||
<defs>
|
|
||||||
<linearGradient id="MyGradient1" x1="0%" y1="0%" x2="100%" y2="100%">
|
|
||||||
<stop offset="0%" stop-color="skyblue" />
|
|
||||||
<stop offset="100%" stop-color="darkblue" />
|
|
||||||
</linearGradient>
|
|
||||||
</defs>
|
|
||||||
<rect
|
|
||||||
stroke="white"
|
|
||||||
fill="url(#MyGradient1)"
|
|
||||||
stroke-linecap="round"
|
|
||||||
stroke-linejoin="round"
|
|
||||||
stroke-width="2"
|
|
||||||
width="8"
|
|
||||||
height="44"
|
|
||||||
x="1"
|
|
||||||
y="1"
|
|
||||||
rx="4"
|
|
||||||
ry="4"/>
|
|
||||||
</g>
|
|
||||||
</svg>
|
|
||||||
|
Before Width: | Height: | Size: 1.1 KiB |
|
Before Width: | Height: | Size: 1.4 KiB |
@@ -1,5 +0,0 @@
|
|||||||
border.left: 10
|
|
||||||
border.top: 12
|
|
||||||
border.bottom: 12
|
|
||||||
border.right: 10
|
|
||||||
source: titlebar.png
|
|
||||||
|
Before Width: | Height: | Size: 662 B After Width: | Height: | Size: 1.2 KiB |
@@ -123,13 +123,14 @@ int main(int argc, char *argv[])
|
|||||||
|
|
||||||
const QStringList moviesLocation = QStandardPaths::standardLocations(QStandardPaths::MoviesLocation);
|
const QStringList moviesLocation = QStandardPaths::standardLocations(QStandardPaths::MoviesLocation);
|
||||||
const QUrl videoPath =
|
const QUrl videoPath =
|
||||||
QUrl::fromLocalFile(moviesLocation.isEmpty() ?
|
QUrl::fromLocalFile(moviesLocation.isEmpty() ?
|
||||||
app.applicationDirPath() :
|
app.applicationDirPath() :
|
||||||
moviesLocation.front());
|
moviesLocation.front());
|
||||||
viewer.rootContext()->setContextProperty("videoPath", videoPath);
|
viewer.rootContext()->setContextProperty("videoPath", videoPath);
|
||||||
|
|
||||||
QMetaObject::invokeMethod(rootObject, "init");
|
QMetaObject::invokeMethod(rootObject, "init");
|
||||||
|
|
||||||
|
viewer.setMinimumSize(QSize(640, 360));
|
||||||
viewer.show();
|
viewer.show();
|
||||||
|
|
||||||
return app.exec();
|
return app.exec();
|
||||||
|
|||||||
@@ -33,30 +33,31 @@
|
|||||||
|
|
||||||
import QtQuick 2.0
|
import QtQuick 2.0
|
||||||
|
|
||||||
Rectangle {
|
Item {
|
||||||
id: root
|
id: root
|
||||||
color: textColor
|
|
||||||
radius: 0.25 * height
|
|
||||||
|
|
||||||
property string text
|
property string text
|
||||||
property color bgColor: "white"
|
property color bgColor: "#757575"
|
||||||
property color bgColorSelected: "red"
|
property color bgColorSelected: "#bdbdbd"
|
||||||
property color textColor: "black"
|
property color textColor: "white"
|
||||||
|
property color textColorSelected: "black"
|
||||||
property alias enabled: mouseArea.enabled
|
property alias enabled: mouseArea.enabled
|
||||||
|
property alias radius: bgr.radius
|
||||||
|
|
||||||
signal clicked
|
signal clicked
|
||||||
|
|
||||||
Rectangle {
|
Rectangle {
|
||||||
anchors { fill: parent; margins: 1 }
|
id: bgr
|
||||||
|
anchors.fill: parent
|
||||||
color: mouseArea.pressed ? bgColorSelected : bgColor
|
color: mouseArea.pressed ? bgColorSelected : bgColor
|
||||||
radius: 0.25 * height
|
radius: height / 15
|
||||||
|
|
||||||
Text {
|
Text {
|
||||||
id: text
|
id: text
|
||||||
anchors.centerIn: parent
|
anchors.centerIn: parent
|
||||||
text: root.text
|
text: root.text
|
||||||
font.pixelSize: 0.5 * parent.height
|
font.pixelSize: 0.4 * parent.height
|
||||||
color: mouseArea.pressed ? bgColor : textColor
|
color: mouseArea.pressed ? textColorSelected : textColor
|
||||||
horizontalAlignment: Text.AlignHCenter
|
horizontalAlignment: Text.AlignHCenter
|
||||||
verticalAlignment: Text.AlignVCenter
|
verticalAlignment: Text.AlignVCenter
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -97,21 +97,16 @@ Rectangle {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function initialize() {
|
function initialize() {
|
||||||
console.log("[qmlvideo] Content.initialize: contentType " + contentType)
|
|
||||||
if ("video" == contentType) {
|
if ("video" == contentType) {
|
||||||
console.log("[qmlvideo] Content.initialize: loading VideoItem.qml")
|
|
||||||
contentLoader.source = "VideoItem.qml"
|
contentLoader.source = "VideoItem.qml"
|
||||||
if (Loader.Error == contentLoader.status) {
|
if (Loader.Error == contentLoader.status) {
|
||||||
console.log("[qmlvideo] Content.initialize: loading VideoDummy.qml")
|
|
||||||
contentLoader.source = "VideoDummy.qml"
|
contentLoader.source = "VideoDummy.qml"
|
||||||
dummy = true
|
dummy = true
|
||||||
}
|
}
|
||||||
contentLoader.item.volume = volume
|
contentLoader.item.volume = volume
|
||||||
} else if ("camera" == contentType) {
|
} else if ("camera" == contentType) {
|
||||||
console.log("[qmlvideo] Content.initialize: loading CameraItem.qml")
|
|
||||||
contentLoader.source = "CameraItem.qml"
|
contentLoader.source = "CameraItem.qml"
|
||||||
if (Loader.Error == contentLoader.status) {
|
if (Loader.Error == contentLoader.status) {
|
||||||
console.log("[qmlvideo] Content.initialize: loading CameraDummy.qml")
|
|
||||||
contentLoader.source = "CameraDummy.qml"
|
contentLoader.source = "CameraDummy.qml"
|
||||||
dummy = true
|
dummy = true
|
||||||
}
|
}
|
||||||
@@ -127,12 +122,10 @@ Rectangle {
|
|||||||
if (root.autoStart)
|
if (root.autoStart)
|
||||||
root.start()
|
root.start()
|
||||||
}
|
}
|
||||||
console.log("[qmlvideo] Content.initialize: complete")
|
|
||||||
root.initialized()
|
root.initialized()
|
||||||
}
|
}
|
||||||
|
|
||||||
function start() {
|
function start() {
|
||||||
console.log("[qmlvideo] Content.start")
|
|
||||||
if (contentLoader.item) {
|
if (contentLoader.item) {
|
||||||
if (root.contentType == "video")
|
if (root.contentType == "video")
|
||||||
contentLoader.item.mediaSource = root.source
|
contentLoader.item.mediaSource = root.source
|
||||||
@@ -142,7 +135,6 @@ Rectangle {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function stop() {
|
function stop() {
|
||||||
console.log("[qmlvideo] Content.stop")
|
|
||||||
if (contentLoader.item) {
|
if (contentLoader.item) {
|
||||||
contentLoader.item.stop()
|
contentLoader.item.stop()
|
||||||
if (root.contentType == "video")
|
if (root.contentType == "video")
|
||||||
|
|||||||
@@ -1,42 +0,0 @@
|
|||||||
/****************************************************************************
|
|
||||||
**
|
|
||||||
** Copyright (C) 2014 Digia Plc and/or its subsidiary(-ies).
|
|
||||||
** Contact: http://www.qt-project.org/legal
|
|
||||||
**
|
|
||||||
** This file is part of the Qt Mobility Components.
|
|
||||||
**
|
|
||||||
** $QT_BEGIN_LICENSE:LGPL21$
|
|
||||||
** Commercial License Usage
|
|
||||||
** Licensees holding valid commercial Qt licenses may use this file in
|
|
||||||
** accordance with the commercial license agreement provided with the
|
|
||||||
** Software or, alternatively, in accordance with the terms contained in
|
|
||||||
** a written agreement between you and Digia. For licensing terms and
|
|
||||||
** conditions see http://qt.digia.com/licensing. For further information
|
|
||||||
** use the contact form at http://qt.digia.com/contact-us.
|
|
||||||
**
|
|
||||||
** GNU Lesser General Public License Usage
|
|
||||||
** Alternatively, this file may be used under the terms of the GNU Lesser
|
|
||||||
** General Public License version 2.1 or version 3 as published by the Free
|
|
||||||
** Software Foundation and appearing in the file LICENSE.LGPLv21 and
|
|
||||||
** LICENSE.LGPLv3 included in the packaging of this file. Please review the
|
|
||||||
** following information to ensure the GNU Lesser General Public License
|
|
||||||
** requirements will be met: https://www.gnu.org/licenses/lgpl.html and
|
|
||||||
** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
|
|
||||||
**
|
|
||||||
** In addition, as a special exception, Digia gives you certain additional
|
|
||||||
** rights. These rights are described in the Digia Qt LGPL Exception
|
|
||||||
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
|
|
||||||
**
|
|
||||||
** $QT_END_LICENSE$
|
|
||||||
**
|
|
||||||
****************************************************************************/
|
|
||||||
|
|
||||||
import QtQuick 2.0
|
|
||||||
import QtSystemInfo 5.0
|
|
||||||
// NOTE: The QtSystemInfo module is not yet part of Qt 5
|
|
||||||
|
|
||||||
Item {
|
|
||||||
ScreenSaver {
|
|
||||||
screenSaverInhibited: true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -38,6 +38,8 @@ Rectangle {
|
|||||||
color: "transparent"
|
color: "transparent"
|
||||||
opacity: 0.0
|
opacity: 0.0
|
||||||
property alias enabled: mouseArea.enabled
|
property alias enabled: mouseArea.enabled
|
||||||
|
property int dialogWidth: 300
|
||||||
|
property int dialogHeight: 200
|
||||||
state: enabled ? "on" : "baseState"
|
state: enabled ? "on" : "baseState"
|
||||||
|
|
||||||
states: [
|
states: [
|
||||||
@@ -70,9 +72,9 @@ Rectangle {
|
|||||||
|
|
||||||
Rectangle {
|
Rectangle {
|
||||||
anchors.centerIn: parent
|
anchors.centerIn: parent
|
||||||
width: 300
|
width: dialogWidth
|
||||||
height: 200
|
height: dialogHeight
|
||||||
radius: 10
|
radius: 5
|
||||||
color: "white"
|
color: "white"
|
||||||
|
|
||||||
Text {
|
Text {
|
||||||
|
|||||||
@@ -40,6 +40,9 @@ Rectangle {
|
|||||||
|
|
||||||
property string folder
|
property string folder
|
||||||
|
|
||||||
|
property int itemHeight: Math.min(parent.width, parent.height) / 15
|
||||||
|
property int buttonHeight: Math.min(parent.width, parent.height) / 12
|
||||||
|
|
||||||
signal fileSelected(string file)
|
signal fileSelected(string file)
|
||||||
|
|
||||||
function selectFile(file) {
|
function selectFile(file) {
|
||||||
@@ -66,12 +69,12 @@ Rectangle {
|
|||||||
|
|
||||||
Rectangle {
|
Rectangle {
|
||||||
id: root
|
id: root
|
||||||
color: "white"
|
color: "black"
|
||||||
property bool showFocusHighlight: false
|
property bool showFocusHighlight: false
|
||||||
property variant folders: folders1
|
property variant folders: folders1
|
||||||
property variant view: view1
|
property variant view: view1
|
||||||
property alias folder: folders1.folder
|
property alias folder: folders1.folder
|
||||||
property color textColor: "black"
|
property color textColor: "white"
|
||||||
|
|
||||||
FolderListModel {
|
FolderListModel {
|
||||||
id: folders1
|
id: folders1
|
||||||
@@ -103,34 +106,39 @@ Rectangle {
|
|||||||
fileBrowser.selectFile(path)
|
fileBrowser.selectFile(path)
|
||||||
}
|
}
|
||||||
width: root.width
|
width: root.width
|
||||||
height: 52
|
height: folderImage.height
|
||||||
color: "transparent"
|
color: "transparent"
|
||||||
|
|
||||||
Rectangle {
|
Rectangle {
|
||||||
id: highlight; visible: false
|
id: highlight
|
||||||
|
visible: false
|
||||||
anchors.fill: parent
|
anchors.fill: parent
|
||||||
color: palette.highlight
|
anchors.leftMargin: 5
|
||||||
gradient: Gradient {
|
anchors.rightMargin: 5
|
||||||
GradientStop { id: t1; position: 0.0; color: palette.highlight }
|
color: "#212121"
|
||||||
GradientStop { id: t2; position: 1.0; color: Qt.lighter(palette.highlight) }
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Item {
|
Item {
|
||||||
width: 48; height: 48
|
id: folderImage
|
||||||
|
width: itemHeight
|
||||||
|
height: itemHeight
|
||||||
Image {
|
Image {
|
||||||
|
id: folderPicture
|
||||||
source: "qrc:/folder.png"
|
source: "qrc:/folder.png"
|
||||||
anchors.centerIn: parent
|
width: itemHeight * 0.9
|
||||||
|
height: itemHeight * 0.9
|
||||||
|
anchors.left: parent.left
|
||||||
|
anchors.margins: 5
|
||||||
visible: folders.isFolder(index)
|
visible: folders.isFolder(index)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Text {
|
Text {
|
||||||
id: nameText
|
id: nameText
|
||||||
anchors.fill: parent; verticalAlignment: Text.AlignVCenter
|
anchors.fill: parent;
|
||||||
|
verticalAlignment: Text.AlignVCenter
|
||||||
text: fileName
|
text: fileName
|
||||||
anchors.leftMargin: 54
|
anchors.leftMargin: itemHeight + 10
|
||||||
font.pixelSize: 32
|
|
||||||
color: (wrapper.ListView.isCurrentItem && root.showFocusHighlight) ? palette.highlightedText : textColor
|
color: (wrapper.ListView.isCurrentItem && root.showFocusHighlight) ? palette.highlightedText : textColor
|
||||||
elide: Text.ElideRight
|
elide: Text.ElideRight
|
||||||
}
|
}
|
||||||
@@ -142,7 +150,7 @@ Rectangle {
|
|||||||
root.showFocusHighlight = false;
|
root.showFocusHighlight = false;
|
||||||
wrapper.ListView.view.currentIndex = index;
|
wrapper.ListView.view.currentIndex = index;
|
||||||
}
|
}
|
||||||
onClicked: { if (folders == wrapper.ListView.view.model) launch() }
|
onClicked: { if (folders === wrapper.ListView.view.model) launch() }
|
||||||
}
|
}
|
||||||
|
|
||||||
states: [
|
states: [
|
||||||
@@ -160,17 +168,12 @@ Rectangle {
|
|||||||
id: view1
|
id: view1
|
||||||
anchors.top: titleBar.bottom
|
anchors.top: titleBar.bottom
|
||||||
anchors.bottom: cancelButton.top
|
anchors.bottom: cancelButton.top
|
||||||
x: 0
|
|
||||||
width: parent.width
|
width: parent.width
|
||||||
model: folders1
|
model: folders1
|
||||||
delegate: folderDelegate
|
delegate: folderDelegate
|
||||||
highlight: Rectangle {
|
highlight: Rectangle {
|
||||||
color: palette.highlight
|
color: "#212121"
|
||||||
visible: root.showFocusHighlight && view1.count != 0
|
visible: root.showFocusHighlight && view1.count != 0
|
||||||
gradient: Gradient {
|
|
||||||
GradientStop { id: t1; position: 0.0; color: palette.highlight }
|
|
||||||
GradientStop { id: t2; position: 1.0; color: Qt.lighter(palette.highlight) }
|
|
||||||
}
|
|
||||||
width: view1.currentItem == null ? 0 : view1.currentItem.width
|
width: view1.currentItem == null ? 0 : view1.currentItem.width
|
||||||
}
|
}
|
||||||
highlightMoveVelocity: 1000
|
highlightMoveVelocity: 1000
|
||||||
@@ -215,12 +218,8 @@ Rectangle {
|
|||||||
model: folders2
|
model: folders2
|
||||||
delegate: folderDelegate
|
delegate: folderDelegate
|
||||||
highlight: Rectangle {
|
highlight: Rectangle {
|
||||||
color: palette.highlight
|
color: "#212121"
|
||||||
visible: root.showFocusHighlight && view2.count != 0
|
visible: root.showFocusHighlight && view2.count != 0
|
||||||
gradient: Gradient {
|
|
||||||
GradientStop { id: t1; position: 0.0; color: palette.highlight }
|
|
||||||
GradientStop { id: t2; position: 1.0; color: Qt.lighter(palette.highlight) }
|
|
||||||
}
|
|
||||||
width: view1.currentItem == null ? 0 : view1.currentItem.width
|
width: view1.currentItem == null ? 0 : view1.currentItem.width
|
||||||
}
|
}
|
||||||
highlightMoveVelocity: 1000
|
highlightMoveVelocity: 1000
|
||||||
@@ -254,19 +253,29 @@ Rectangle {
|
|||||||
}
|
}
|
||||||
|
|
||||||
Rectangle {
|
Rectangle {
|
||||||
id: cancelButton
|
width: parent.width
|
||||||
width: 100
|
height: buttonHeight + 10
|
||||||
height: titleBar.height - 7
|
anchors.bottom: parent.bottom
|
||||||
color: "black"
|
color: "black"
|
||||||
anchors { bottom: parent.bottom; horizontalCenter: parent.horizontalCenter }
|
}
|
||||||
|
|
||||||
|
Rectangle {
|
||||||
|
id: cancelButton
|
||||||
|
width: parent.width
|
||||||
|
height: buttonHeight
|
||||||
|
color: "#212121"
|
||||||
|
anchors.bottom: parent.bottom
|
||||||
|
anchors.left: parent.left
|
||||||
|
anchors.right: parent.right
|
||||||
|
anchors.margins: 5
|
||||||
|
radius: buttonHeight / 15
|
||||||
|
|
||||||
Text {
|
Text {
|
||||||
anchors { fill: parent; margins: 4 }
|
anchors.fill: parent
|
||||||
text: "Cancel"
|
text: "Cancel"
|
||||||
color: "white"
|
color: "white"
|
||||||
horizontalAlignment: Text.AlignHCenter
|
horizontalAlignment: Text.AlignHCenter
|
||||||
verticalAlignment: Text.AlignVCenter
|
verticalAlignment: Text.AlignVCenter
|
||||||
font.pixelSize: 20
|
|
||||||
}
|
}
|
||||||
|
|
||||||
MouseArea {
|
MouseArea {
|
||||||
@@ -277,55 +286,66 @@ Rectangle {
|
|||||||
|
|
||||||
Keys.onPressed: {
|
Keys.onPressed: {
|
||||||
root.keyPressed(event.key);
|
root.keyPressed(event.key);
|
||||||
if (event.key == Qt.Key_Return || event.key == Qt.Key_Select || event.key == Qt.Key_Right) {
|
if (event.key === Qt.Key_Return || event.key === Qt.Key_Select || event.key === Qt.Key_Right) {
|
||||||
view.currentItem.launch();
|
view.currentItem.launch();
|
||||||
event.accepted = true;
|
event.accepted = true;
|
||||||
} else if (event.key == Qt.Key_Left) {
|
} else if (event.key === Qt.Key_Left) {
|
||||||
up();
|
up();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
BorderImage {
|
|
||||||
source: "qrc:/titlebar.sci";
|
Rectangle {
|
||||||
width: parent.width;
|
|
||||||
height: 52
|
|
||||||
y: -7
|
|
||||||
id: titleBar
|
id: titleBar
|
||||||
|
width: parent.width
|
||||||
|
height: buttonHeight + 10
|
||||||
|
anchors.top: parent.top
|
||||||
|
color: "black"
|
||||||
|
|
||||||
Rectangle {
|
Rectangle {
|
||||||
id: upButton
|
width: parent.width;
|
||||||
width: 48
|
height: buttonHeight
|
||||||
height: titleBar.height - 7
|
color: "#212121"
|
||||||
color: "transparent"
|
anchors.margins: 5
|
||||||
Image { anchors.centerIn: parent; source: "qrc:/up.png" }
|
anchors.top: parent.top
|
||||||
MouseArea { id: upRegion; anchors.centerIn: parent
|
anchors.left: parent.left
|
||||||
width: 56
|
anchors.right: parent.right
|
||||||
height: 56
|
radius: buttonHeight / 15
|
||||||
onClicked: up()
|
|
||||||
}
|
Rectangle {
|
||||||
states: [
|
id: upButton
|
||||||
State {
|
width: buttonHeight
|
||||||
name: "pressed"
|
height: buttonHeight
|
||||||
when: upRegion.pressed
|
color: "transparent"
|
||||||
PropertyChanges { target: upButton; color: palette.highlight }
|
Image {
|
||||||
|
width: itemHeight
|
||||||
|
height: itemHeight
|
||||||
|
anchors.centerIn: parent
|
||||||
|
source: "qrc:/up.png"
|
||||||
}
|
}
|
||||||
]
|
MouseArea { id: upRegion; anchors.centerIn: parent
|
||||||
}
|
width: buttonHeight
|
||||||
|
height: buttonHeight
|
||||||
|
onClicked: up()
|
||||||
|
}
|
||||||
|
states: [
|
||||||
|
State {
|
||||||
|
name: "pressed"
|
||||||
|
when: upRegion.pressed
|
||||||
|
PropertyChanges { target: upButton; color: palette.highlight }
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
Rectangle {
|
Text {
|
||||||
color: "gray"
|
anchors.left: upButton.right; anchors.right: parent.right; height: parent.height
|
||||||
x: 48
|
anchors.leftMargin: 5; anchors.rightMargin: 5
|
||||||
width: 1
|
text: folders.folder
|
||||||
height: 44
|
color: "white"
|
||||||
}
|
elide: Text.ElideLeft;
|
||||||
|
horizontalAlignment: Text.AlignLeft;
|
||||||
Text {
|
verticalAlignment: Text.AlignVCenter
|
||||||
anchors.left: upButton.right; anchors.right: parent.right; height: parent.height
|
}
|
||||||
anchors.leftMargin: 4; anchors.rightMargin: 4
|
|
||||||
text: folders.folder
|
|
||||||
color: "white"
|
|
||||||
elide: Text.ElideLeft; horizontalAlignment: Text.AlignRight; verticalAlignment: Text.AlignVCenter
|
|
||||||
font.pixelSize: 32
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -366,14 +386,14 @@ Rectangle {
|
|||||||
|
|
||||||
function keyPressed(key) {
|
function keyPressed(key) {
|
||||||
switch (key) {
|
switch (key) {
|
||||||
case Qt.Key_Up:
|
case Qt.Key_Up:
|
||||||
case Qt.Key_Down:
|
case Qt.Key_Down:
|
||||||
case Qt.Key_Left:
|
case Qt.Key_Left:
|
||||||
case Qt.Key_Right:
|
case Qt.Key_Right:
|
||||||
root.showFocusHighlight = true;
|
root.showFocusHighlight = true;
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
// do nothing
|
// do nothing
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -39,9 +39,9 @@ Rectangle {
|
|||||||
property alias buttonHeight: closeButton.height
|
property alias buttonHeight: closeButton.height
|
||||||
property string source1
|
property string source1
|
||||||
property string source2
|
property string source2
|
||||||
property int contentWidth: 250
|
property int contentWidth: parent.width / 2
|
||||||
property real volume: 0.25
|
property real volume: 0.25
|
||||||
property int margins: 10
|
property int margins: 5
|
||||||
property QtObject content
|
property QtObject content
|
||||||
|
|
||||||
signal close
|
signal close
|
||||||
@@ -54,9 +54,12 @@ Rectangle {
|
|||||||
right: parent.right
|
right: parent.right
|
||||||
margins: root.margins
|
margins: root.margins
|
||||||
}
|
}
|
||||||
width: 50
|
width: Math.max(parent.width, parent.height) / 12
|
||||||
height: 30
|
height: Math.min(parent.width, parent.height) / 12
|
||||||
z: 2.0
|
z: 2.0
|
||||||
|
bgColor: "#212121"
|
||||||
|
bgColorSelected: "#757575"
|
||||||
|
textColorSelected: "white"
|
||||||
text: "Back"
|
text: "Back"
|
||||||
onClicked: root.close()
|
onClicked: root.close()
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -59,15 +59,13 @@ Scene {
|
|||||||
}
|
}
|
||||||
text: content.started ? "Tap the screen to stop content"
|
text: content.started ? "Tap the screen to stop content"
|
||||||
: "Tap the screen to start content"
|
: "Tap the screen to start content"
|
||||||
color: "yellow"
|
color: "#e0e0e0"
|
||||||
font.pixelSize: 20
|
|
||||||
z: 2.0
|
z: 2.0
|
||||||
}
|
}
|
||||||
|
|
||||||
MouseArea {
|
MouseArea {
|
||||||
anchors.fill: parent
|
anchors.fill: parent
|
||||||
onClicked: {
|
onClicked: {
|
||||||
console.log("[qmlvideo] SceneBasic.onClicked, started = " + content.started)
|
|
||||||
if (content.started)
|
if (content.started)
|
||||||
content.stop()
|
content.stop()
|
||||||
else
|
else
|
||||||
|
|||||||
@@ -86,8 +86,7 @@ Scene {
|
|||||||
margins: 20
|
margins: 20
|
||||||
}
|
}
|
||||||
text: "Tap on the content to toggle full-screen mode"
|
text: "Tap on the content to toggle full-screen mode"
|
||||||
color: "yellow"
|
color: "#e0e0e0"
|
||||||
font.pixelSize: 20
|
|
||||||
z: 2.0
|
z: 2.0
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -91,8 +91,7 @@ Scene {
|
|||||||
margins: 20
|
margins: 20
|
||||||
}
|
}
|
||||||
text: "Tap on the content to toggle full-screen mode"
|
text: "Tap on the content to toggle full-screen mode"
|
||||||
color: "yellow"
|
color: "#e0e0e0"
|
||||||
font.pixelSize: 20
|
|
||||||
z: 2.0
|
z: 2.0
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -65,8 +65,7 @@ Scene {
|
|||||||
margins: 20
|
margins: 20
|
||||||
}
|
}
|
||||||
text: content() ? content().started ? "Tap to stop" : "Tap to start" : ""
|
text: content() ? content().started ? "Tap to stop" : "Tap to start" : ""
|
||||||
color: "yellow"
|
color: "#e0e0e0"
|
||||||
font.pixelSize: 20
|
|
||||||
}
|
}
|
||||||
|
|
||||||
MouseArea {
|
MouseArea {
|
||||||
|
|||||||
@@ -53,7 +53,7 @@ Scene {
|
|||||||
y: 0.5 * parent.height
|
y: 0.5 * parent.height
|
||||||
width: content.width
|
width: content.width
|
||||||
height: content.height
|
height: content.height
|
||||||
color: "yellow"
|
color: "#e0e0e0"
|
||||||
opacity: 0.5
|
opacity: 0.5
|
||||||
|
|
||||||
SequentialAnimation on x {
|
SequentialAnimation on x {
|
||||||
|
|||||||
@@ -56,7 +56,7 @@ Scene {
|
|||||||
bottom: rotateNegativeButton.top
|
bottom: rotateNegativeButton.top
|
||||||
margins: parent.margins
|
margins: parent.margins
|
||||||
}
|
}
|
||||||
width: 90
|
width: Math.max(parent.width, parent.height) / 10
|
||||||
height: root.buttonHeight
|
height: root.buttonHeight
|
||||||
text: "Rotate +" + delta
|
text: "Rotate +" + delta
|
||||||
onClicked: content.rotation = content.rotation + delta
|
onClicked: content.rotation = content.rotation + delta
|
||||||
@@ -69,7 +69,7 @@ Scene {
|
|||||||
verticalCenter: parent.verticalCenter
|
verticalCenter: parent.verticalCenter
|
||||||
margins: parent.margins
|
margins: parent.margins
|
||||||
}
|
}
|
||||||
width: 90
|
width: Math.max(parent.width, parent.height) / 10
|
||||||
height: root.buttonHeight
|
height: root.buttonHeight
|
||||||
text: "Rotate -" + delta
|
text: "Rotate -" + delta
|
||||||
onClicked: content.rotation = content.rotation - delta
|
onClicked: content.rotation = content.rotation - delta
|
||||||
@@ -82,7 +82,7 @@ Scene {
|
|||||||
verticalCenter: parent.verticalCenter
|
verticalCenter: parent.verticalCenter
|
||||||
margins: parent.margins
|
margins: parent.margins
|
||||||
}
|
}
|
||||||
width: 30
|
width: Math.max(parent.width, parent.height) / 25
|
||||||
height: root.buttonHeight
|
height: root.buttonHeight
|
||||||
enabled: false
|
enabled: false
|
||||||
text: content.rotation % 360
|
text: content.rotation % 360
|
||||||
|
|||||||
@@ -39,44 +39,64 @@ Rectangle {
|
|||||||
property string sceneSource: ""
|
property string sceneSource: ""
|
||||||
|
|
||||||
ListModel {
|
ListModel {
|
||||||
id: list
|
id: videolist
|
||||||
ListElement { name: "multi"; source: "SceneMulti.qml" }
|
ListElement { name: "Multi"; source: "SceneMulti.qml" }
|
||||||
ListElement { name: "video"; source: "VideoBasic.qml" }
|
ListElement { name: "Video"; source: "VideoBasic.qml" }
|
||||||
ListElement { name: "video-drag"; source: "VideoDrag.qml" }
|
ListElement { name: "Drag"; source: "VideoDrag.qml" }
|
||||||
ListElement { name: "video-fillmode"; source: "VideoFillMode.qml" }
|
ListElement { name: "Fillmode"; source: "VideoFillMode.qml" }
|
||||||
ListElement { name: "video-fullscreen"; source: "VideoFullScreen.qml" }
|
ListElement { name: "Fullscreen"; source: "VideoFullScreen.qml" }
|
||||||
ListElement { name: "video-fullscreen-inverted"; source: "VideoFullScreenInverted.qml" }
|
ListElement { name: "Fullscreen-inverted"; source: "VideoFullScreenInverted.qml" }
|
||||||
ListElement { name: "video-metadata"; source: "VideoMetadata.qml" }
|
ListElement { name: "Metadata"; source: "VideoMetadata.qml" }
|
||||||
ListElement { name: "video-move"; source: "VideoMove.qml" }
|
ListElement { name: "Move"; source: "VideoMove.qml" }
|
||||||
ListElement { name: "video-overlay"; source: "VideoOverlay.qml" }
|
ListElement { name: "Overlay"; source: "VideoOverlay.qml" }
|
||||||
ListElement { name: "video-playbackrate"; source: "VideoPlaybackRate.qml" }
|
ListElement { name: "Playback Rate"; source: "VideoPlaybackRate.qml" }
|
||||||
ListElement { name: "video-resize"; source: "VideoResize.qml" }
|
ListElement { name: "Resize"; source: "VideoResize.qml" }
|
||||||
ListElement { name: "video-rotate"; source: "VideoRotate.qml" }
|
ListElement { name: "Rotate"; source: "VideoRotate.qml" }
|
||||||
ListElement { name: "video-spin"; source: "VideoSpin.qml" }
|
ListElement { name: "Spin"; source: "VideoSpin.qml" }
|
||||||
ListElement { name: "video-seek"; source: "VideoSeek.qml" }
|
ListElement { name: "Seek"; source: "VideoSeek.qml" }
|
||||||
ListElement { name: "camera"; source: "CameraBasic.qml" }
|
}
|
||||||
ListElement { name: "camera-drag"; source: "CameraDrag.qml" }
|
|
||||||
ListElement { name: "camera-fullscreen"; source: "CameraFullScreen.qml" }
|
ListModel {
|
||||||
ListElement { name: "camera-fullscreen-inverted"; source: "CameraFullScreenInverted.qml" }
|
id: cameralist
|
||||||
ListElement { name: "camera-move"; source: "CameraMove.qml" }
|
ListElement { name: "Camera"; source: "CameraBasic.qml" }
|
||||||
ListElement { name: "camera-overlay"; source: "CameraOverlay.qml" }
|
ListElement { name: "Drag"; source: "CameraDrag.qml" }
|
||||||
ListElement { name: "camera-resize"; source: "CameraResize.qml" }
|
ListElement { name: "Fullscreen"; source: "CameraFullScreen.qml" }
|
||||||
ListElement { name: "camera-rotate"; source: "CameraRotate.qml" }
|
ListElement { name: "Fullscreen-inverted"; source: "CameraFullScreenInverted.qml" }
|
||||||
ListElement { name: "camera-spin"; source: "CameraSpin.qml" }
|
ListElement { name: "Move"; source: "CameraMove.qml" }
|
||||||
|
ListElement { name: "Overlay"; source: "CameraOverlay.qml" }
|
||||||
|
ListElement { name: "Resize"; source: "CameraResize.qml" }
|
||||||
|
ListElement { name: "Rotate"; source: "CameraRotate.qml" }
|
||||||
|
ListElement { name: "Spin"; source: "CameraSpin.qml" }
|
||||||
}
|
}
|
||||||
|
|
||||||
Component {
|
Component {
|
||||||
id: delegate
|
id: leftDelegate
|
||||||
Item {
|
Item {
|
||||||
id: delegateItem
|
width: root.width / 2
|
||||||
width: root.width
|
height: 0.8 * itemHeight
|
||||||
height: itemHeight
|
|
||||||
|
|
||||||
Button {
|
Button {
|
||||||
id: selectorItem
|
anchors.fill: parent
|
||||||
anchors.centerIn: parent
|
anchors.margins: 5
|
||||||
width: 0.9 * parent.width
|
anchors.rightMargin: 2.5
|
||||||
height: 0.8 * itemHeight
|
anchors.bottomMargin: 0
|
||||||
|
text: name
|
||||||
|
onClicked: root.sceneSource = source
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Component {
|
||||||
|
id: rightDelegate
|
||||||
|
Item {
|
||||||
|
width: root.width / 2
|
||||||
|
height: 0.8 * itemHeight
|
||||||
|
|
||||||
|
Button {
|
||||||
|
anchors.fill: parent
|
||||||
|
anchors.margins: 5
|
||||||
|
anchors.leftMargin: 2.5
|
||||||
|
anchors.bottomMargin: 0
|
||||||
text: name
|
text: name
|
||||||
onClicked: root.sceneSource = source
|
onClicked: root.sceneSource = source
|
||||||
}
|
}
|
||||||
@@ -85,20 +105,29 @@ Rectangle {
|
|||||||
|
|
||||||
Flickable {
|
Flickable {
|
||||||
anchors.fill: parent
|
anchors.fill: parent
|
||||||
contentHeight: (itemHeight * list.count) + layout.anchors.topMargin + layout.spacing
|
contentHeight: (itemHeight * videolist.count) + 10
|
||||||
clip: true
|
clip: true
|
||||||
|
|
||||||
Column {
|
Row {
|
||||||
id: layout
|
id: layout
|
||||||
|
|
||||||
anchors {
|
anchors {
|
||||||
fill: parent
|
fill: parent
|
||||||
topMargin: 10
|
topMargin: 5
|
||||||
|
bottomMargin: 5
|
||||||
}
|
}
|
||||||
|
|
||||||
Repeater {
|
Column {
|
||||||
model: list
|
Repeater {
|
||||||
delegate: delegate
|
model: videolist
|
||||||
|
delegate: leftDelegate
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Column {
|
||||||
|
Repeater {
|
||||||
|
model: cameralist
|
||||||
|
delegate: rightDelegate
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -35,7 +35,7 @@ import QtQuick 2.0
|
|||||||
|
|
||||||
Item {
|
Item {
|
||||||
id: seekControl
|
id: seekControl
|
||||||
height: 46
|
height: Math.min(parent.width, parent.height) / 20
|
||||||
property int duration: 0
|
property int duration: 0
|
||||||
property int playPosition: 0
|
property int playPosition: 0
|
||||||
property int seekPosition: 0
|
property int seekPosition: 0
|
||||||
@@ -45,8 +45,9 @@ Item {
|
|||||||
Rectangle {
|
Rectangle {
|
||||||
id: background
|
id: background
|
||||||
anchors.fill: parent
|
anchors.fill: parent
|
||||||
color: "black"
|
color: "white"
|
||||||
opacity: 0.3
|
opacity: 0.3
|
||||||
|
radius: parent.height / 15
|
||||||
}
|
}
|
||||||
|
|
||||||
Rectangle {
|
Rectangle {
|
||||||
@@ -60,7 +61,6 @@ Item {
|
|||||||
Text {
|
Text {
|
||||||
width: 90
|
width: 90
|
||||||
anchors { left: parent.left; top: parent.top; bottom: parent.bottom; leftMargin: 10 }
|
anchors { left: parent.left; top: parent.top; bottom: parent.bottom; leftMargin: 10 }
|
||||||
font { family: "Nokia Sans S60"; pixelSize: 24 }
|
|
||||||
horizontalAlignment: Text.AlignLeft
|
horizontalAlignment: Text.AlignLeft
|
||||||
verticalAlignment: Text.AlignVCenter
|
verticalAlignment: Text.AlignVCenter
|
||||||
color: "white"
|
color: "white"
|
||||||
@@ -71,7 +71,6 @@ Item {
|
|||||||
Text {
|
Text {
|
||||||
width: 90
|
width: 90
|
||||||
anchors { right: parent.right; top: parent.top; bottom: parent.bottom; rightMargin: 10 }
|
anchors { right: parent.right; top: parent.top; bottom: parent.bottom; rightMargin: 10 }
|
||||||
font { family: "Nokia Sans S60"; pixelSize: 24 }
|
|
||||||
horizontalAlignment: Text.AlignRight
|
horizontalAlignment: Text.AlignRight
|
||||||
verticalAlignment: Text.AlignVCenter
|
verticalAlignment: Text.AlignVCenter
|
||||||
color: "white"
|
color: "white"
|
||||||
@@ -79,35 +78,36 @@ Item {
|
|||||||
text: formatTime(duration)
|
text: formatTime(duration)
|
||||||
}
|
}
|
||||||
|
|
||||||
Image {
|
Rectangle {
|
||||||
id: progressHandle
|
id: progressHandle
|
||||||
height: 46
|
height: parent.height
|
||||||
width: 10
|
width: parent.height / 2
|
||||||
source: mouseArea.pressed ? "qrc:/images/progress_handle_pressed.svg" : "qrc:/images/progress_handle.svg"
|
color: "white"
|
||||||
|
opacity: 0.5
|
||||||
anchors.verticalCenter: progressBar.verticalCenter
|
anchors.verticalCenter: progressBar.verticalCenter
|
||||||
x: seekControl.duration == 0 ? 0 : seekControl.playPosition / seekControl.duration * 630
|
x: seekControl.duration == 0 ? 0 : seekControl.playPosition / seekControl.duration * background.width
|
||||||
|
|
||||||
MouseArea {
|
MouseArea {
|
||||||
id: mouseArea
|
id: mouseArea
|
||||||
anchors { horizontalCenter: parent.horizontalCenter; bottom: parent.bottom }
|
anchors { horizontalCenter: parent.horizontalCenter; bottom: parent.bottom }
|
||||||
height: 46+16
|
height: parent.height
|
||||||
width: height
|
width: parent.height * 2
|
||||||
enabled: seekControl.enabled
|
enabled: seekControl.enabled
|
||||||
drag {
|
drag {
|
||||||
target: progressHandle
|
target: progressHandle
|
||||||
axis: Drag.XAxis
|
axis: Drag.XAxis
|
||||||
minimumX: 0
|
minimumX: 0
|
||||||
maximumX: 631
|
maximumX: background.width
|
||||||
}
|
}
|
||||||
onPressed: {
|
onPressed: {
|
||||||
seekControl.seeking = true;
|
seekControl.seeking = true;
|
||||||
}
|
}
|
||||||
onCanceled: {
|
onCanceled: {
|
||||||
seekControl.seekPosition = progressHandle.x * seekControl.duration / 630
|
seekControl.seekPosition = progressHandle.x * seekControl.duration / background.width
|
||||||
seekControl.seeking = false
|
seekControl.seeking = false
|
||||||
}
|
}
|
||||||
onReleased: {
|
onReleased: {
|
||||||
seekControl.seekPosition = progressHandle.x * seekControl.duration / 630
|
seekControl.seekPosition = progressHandle.x * seekControl.duration / background.width
|
||||||
seekControl.seeking = false
|
seekControl.seeking = false
|
||||||
mouse.accepted = true
|
mouse.accepted = true
|
||||||
}
|
}
|
||||||
@@ -120,7 +120,7 @@ Item {
|
|||||||
interval: 300
|
interval: 300
|
||||||
running: seekControl.seeking
|
running: seekControl.seeking
|
||||||
onTriggered: {
|
onTriggered: {
|
||||||
seekControl.seekPosition = progressHandle.x*seekControl.duration/630
|
seekControl.seekPosition = progressHandle.x*seekControl.duration / background.width
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -54,16 +54,16 @@ Scene {
|
|||||||
verticalCenter: parent.verticalCenter
|
verticalCenter: parent.verticalCenter
|
||||||
margins: parent.margins
|
margins: parent.margins
|
||||||
}
|
}
|
||||||
width: 150
|
width: Math.max(parent.width, parent.height) / 5
|
||||||
height: root.buttonHeight
|
height: root.buttonHeight
|
||||||
text: "PreserveAspectFit"
|
text: "PreserveAspectFit"
|
||||||
onClicked: {
|
onClicked: {
|
||||||
if (!content.dummy) {
|
if (!content.dummy) {
|
||||||
var video = content.contentItem()
|
var video = content.contentItem()
|
||||||
if (video.fillMode == VideoOutput.Stretch) {
|
if (video.fillMode === VideoOutput.Stretch) {
|
||||||
video.fillMode = VideoOutput.PreserveAspectFit
|
video.fillMode = VideoOutput.PreserveAspectFit
|
||||||
text = "PreserveAspectFit"
|
text = "PreserveAspectFit"
|
||||||
} else if (video.fillMode == VideoOutput.PreserveAspectFit) {
|
} else if (video.fillMode === VideoOutput.PreserveAspectFit) {
|
||||||
video.fillMode = VideoOutput.PreserveAspectCrop
|
video.fillMode = VideoOutput.PreserveAspectCrop
|
||||||
text = "PreserveAspectCrop"
|
text = "PreserveAspectCrop"
|
||||||
} else {
|
} else {
|
||||||
|
|||||||
@@ -56,55 +56,55 @@ Scene {
|
|||||||
Column {
|
Column {
|
||||||
anchors.fill: parent
|
anchors.fill: parent
|
||||||
Text {
|
Text {
|
||||||
color: "yellow"
|
color: "#e0e0e0"
|
||||||
text: "Title:" + content.contentItem().metaData.title
|
text: "Title:" + content.contentItem().metaData.title
|
||||||
}
|
}
|
||||||
Text {
|
Text {
|
||||||
color: "yellow"
|
color: "#e0e0e0"
|
||||||
text: "Size:" + content.contentItem().metaData.size
|
text: "Size:" + content.contentItem().metaData.size
|
||||||
}
|
}
|
||||||
Text {
|
Text {
|
||||||
color: "yellow"
|
color: "#e0e0e0"
|
||||||
text: "Resolution:" + content.contentItem().metaData.resolution
|
text: "Resolution:" + content.contentItem().metaData.resolution
|
||||||
}
|
}
|
||||||
Text {
|
Text {
|
||||||
color: "yellow"
|
color: "#e0e0e0"
|
||||||
text: "Media type:" + content.contentItem().metaData.mediaType
|
text: "Media type:" + content.contentItem().metaData.mediaType
|
||||||
}
|
}
|
||||||
Text {
|
Text {
|
||||||
color: "yellow"
|
color: "#e0e0e0"
|
||||||
text: "Video codec:" + content.contentItem().metaData.videoCodec
|
text: "Video codec:" + content.contentItem().metaData.videoCodec
|
||||||
}
|
}
|
||||||
Text {
|
Text {
|
||||||
color: "yellow"
|
color: "#e0e0e0"
|
||||||
text: "Video bit rate:" + content.contentItem().metaData.videoBitRate
|
text: "Video bit rate:" + content.contentItem().metaData.videoBitRate
|
||||||
}
|
}
|
||||||
Text {
|
Text {
|
||||||
color: "yellow"
|
color: "#e0e0e0"
|
||||||
text: "Video frame rate:" +content.contentItem().metaData.videoFrameRate
|
text: "Video frame rate:" +content.contentItem().metaData.videoFrameRate
|
||||||
}
|
}
|
||||||
Text {
|
Text {
|
||||||
color: "yellow"
|
color: "#e0e0e0"
|
||||||
text: "Audio codec:" + content.contentItem().metaData.audioCodec
|
text: "Audio codec:" + content.contentItem().metaData.audioCodec
|
||||||
}
|
}
|
||||||
Text {
|
Text {
|
||||||
color: "yellow"
|
color: "#e0e0e0"
|
||||||
text: "Audio bit rate:" + content.contentItem().metaData.audioBitRate
|
text: "Audio bit rate:" + content.contentItem().metaData.audioBitRate
|
||||||
}
|
}
|
||||||
Text {
|
Text {
|
||||||
color: "yellow"
|
color: "#e0e0e0"
|
||||||
text: "Date:" + content.contentItem().metaData.date
|
text: "Date:" + content.contentItem().metaData.date
|
||||||
}
|
}
|
||||||
Text {
|
Text {
|
||||||
color: "yellow"
|
color: "#e0e0e0"
|
||||||
text: "Description:" + content.contentItem().metaData.description
|
text: "Description:" + content.contentItem().metaData.description
|
||||||
}
|
}
|
||||||
Text {
|
Text {
|
||||||
color: "yellow"
|
color: "#e0e0e0"
|
||||||
text: "Copyright:" + content.contentItem().metaData.copyright
|
text: "Copyright:" + content.contentItem().metaData.copyright
|
||||||
}
|
}
|
||||||
Text {
|
Text {
|
||||||
color: "yellow"
|
color: "#e0e0e0"
|
||||||
text: "Seekable:" + content.contentItem().metaData.seekable
|
text: "Seekable:" + content.contentItem().metaData.seekable
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -56,12 +56,12 @@ Scene {
|
|||||||
bottom: decreaseButton.top
|
bottom: decreaseButton.top
|
||||||
margins: parent.margins
|
margins: parent.margins
|
||||||
}
|
}
|
||||||
width: 90
|
width: Math.max(parent.width, parent.height) / 10
|
||||||
height: root.buttonHeight
|
height: root.buttonHeight
|
||||||
text: "Increase"
|
text: "Increase"
|
||||||
onClicked: {
|
onClicked: {
|
||||||
var video = content.contentItem()
|
var video = content.contentItem()
|
||||||
video.playbackRate = video.playbackRate + delta
|
video.playbackRate += delta
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -72,12 +72,12 @@ Scene {
|
|||||||
verticalCenter: parent.verticalCenter
|
verticalCenter: parent.verticalCenter
|
||||||
margins: parent.margins
|
margins: parent.margins
|
||||||
}
|
}
|
||||||
width: 90
|
width: Math.max(parent.width, parent.height) / 10
|
||||||
height: root.buttonHeight
|
height: root.buttonHeight
|
||||||
text: "Decrease"
|
text: "Decrease"
|
||||||
onClicked: {
|
onClicked: {
|
||||||
var video = content.contentItem()
|
var video = content.contentItem()
|
||||||
video.playbackRate = video.playbackRate - delta
|
video.playbackRate -= delta
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -88,7 +88,7 @@ Scene {
|
|||||||
verticalCenter: parent.verticalCenter
|
verticalCenter: parent.verticalCenter
|
||||||
margins: parent.margins
|
margins: parent.margins
|
||||||
}
|
}
|
||||||
width: 50
|
width: Math.max(parent.width, parent.height) / 25
|
||||||
height: root.buttonHeight
|
height: root.buttonHeight
|
||||||
enabled: false
|
enabled: false
|
||||||
text: Math.round(10 * content.contentItem().playbackRate) / 10
|
text: Math.round(10 * content.contentItem().playbackRate) / 10
|
||||||
|
|||||||
@@ -36,6 +36,7 @@ import QtQuick 2.0
|
|||||||
Scene {
|
Scene {
|
||||||
id: root
|
id: root
|
||||||
property string contentType: "video"
|
property string contentType: "video"
|
||||||
|
contentWidth: parent.width
|
||||||
|
|
||||||
Content {
|
Content {
|
||||||
id: content
|
id: content
|
||||||
@@ -51,13 +52,12 @@ Scene {
|
|||||||
anchors {
|
anchors {
|
||||||
left: parent.left
|
left: parent.left
|
||||||
right: parent.right
|
right: parent.right
|
||||||
leftMargin: 100
|
margins: 10
|
||||||
rightMargin: 140
|
|
||||||
bottom: parent.bottom
|
bottom: parent.bottom
|
||||||
}
|
}
|
||||||
duration: content.contentItem() ? content.contentItem().duration : 0
|
duration: content.contentItem() ? content.contentItem().duration : 0
|
||||||
playPosition: content.contentItem() ? content.contentItem().position : 0
|
playPosition: content.contentItem() ? content.contentItem().position : 0
|
||||||
onSeekPositionChanged: { content.contentItem().seek(seekPosition); }
|
onSeekPositionChanged: content.contentItem().seek(seekPosition);
|
||||||
}
|
}
|
||||||
|
|
||||||
Component.onCompleted: root.content = content
|
Component.onCompleted: root.content = content
|
||||||
|
|||||||
@@ -35,28 +35,21 @@ import QtQuick 2.0
|
|||||||
|
|
||||||
Rectangle {
|
Rectangle {
|
||||||
id: root
|
id: root
|
||||||
width: 640
|
anchors.fill: parent
|
||||||
height: 360
|
|
||||||
color: "black"
|
color: "black"
|
||||||
|
|
||||||
property string source1
|
property string source1
|
||||||
property string source2
|
property string source2
|
||||||
property color bgColor: "#002244"
|
property color bgColor: "black"
|
||||||
property real volume: 0.25
|
property real volume: 0.25
|
||||||
property bool perfMonitorsLogging: false
|
property bool perfMonitorsLogging: false
|
||||||
property bool perfMonitorsVisible: false
|
property bool perfMonitorsVisible: false
|
||||||
|
|
||||||
QtObject {
|
QtObject {
|
||||||
id: d
|
id: d
|
||||||
property int itemHeight: 40
|
property int itemHeight: root.height > root.width ? root.width / 10 : root.height / 10
|
||||||
property int buttonHeight: 0.8 * itemHeight
|
property int buttonHeight: 0.8 * itemHeight
|
||||||
property int margins: 10
|
property int margins: 5
|
||||||
}
|
|
||||||
|
|
||||||
// Create ScreenSaver element via Loader, so this app will still run if the
|
|
||||||
// SystemInfo module is not available
|
|
||||||
Loader {
|
|
||||||
source: "DisableScreenSaver.qml"
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Loader {
|
Loader {
|
||||||
@@ -71,7 +64,6 @@ Rectangle {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function init() {
|
function init() {
|
||||||
console.log("[qmlvideo] performanceLoader.init logging " + root.perfMonitorsLogging + " visible " + root.perfMonitorsVisible)
|
|
||||||
var enabled = root.perfMonitorsLogging || root.perfMonitorsVisible
|
var enabled = root.perfMonitorsLogging || root.perfMonitorsVisible
|
||||||
source = enabled ? "../performancemonitor/PerformanceItem.qml" : ""
|
source = enabled ? "../performancemonitor/PerformanceItem.qml" : ""
|
||||||
}
|
}
|
||||||
@@ -99,6 +91,9 @@ Rectangle {
|
|||||||
right: exitButton.left
|
right: exitButton.left
|
||||||
margins: d.margins
|
margins: d.margins
|
||||||
}
|
}
|
||||||
|
bgColor: "#212121"
|
||||||
|
bgColorSelected: "#757575"
|
||||||
|
textColorSelected: "white"
|
||||||
height: d.buttonHeight
|
height: d.buttonHeight
|
||||||
text: (root.source1 == "") ? "Select file 1" : root.source1
|
text: (root.source1 == "") ? "Select file 1" : root.source1
|
||||||
onClicked: fileBrowser1.show()
|
onClicked: fileBrowser1.show()
|
||||||
@@ -112,6 +107,9 @@ Rectangle {
|
|||||||
right: exitButton.left
|
right: exitButton.left
|
||||||
margins: d.margins
|
margins: d.margins
|
||||||
}
|
}
|
||||||
|
bgColor: "#212121"
|
||||||
|
bgColorSelected: "#757575"
|
||||||
|
textColorSelected: "white"
|
||||||
height: d.buttonHeight
|
height: d.buttonHeight
|
||||||
text: (root.source2 == "") ? "Select file 2" : root.source2
|
text: (root.source2 == "") ? "Select file 2" : root.source2
|
||||||
onClicked: fileBrowser2.show()
|
onClicked: fileBrowser2.show()
|
||||||
@@ -124,26 +122,58 @@ Rectangle {
|
|||||||
right: parent.right
|
right: parent.right
|
||||||
margins: d.margins
|
margins: d.margins
|
||||||
}
|
}
|
||||||
width: 50
|
bgColor: "#212121"
|
||||||
|
bgColorSelected: "#757575"
|
||||||
|
textColorSelected: "white"
|
||||||
|
width: parent.width / 10
|
||||||
height: d.buttonHeight
|
height: d.buttonHeight
|
||||||
text: "Exit"
|
text: "Exit"
|
||||||
onClicked: Qt.quit()
|
onClicked: Qt.quit()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Row {
|
||||||
|
id: modes
|
||||||
|
anchors.top: openFile2Button.bottom
|
||||||
|
anchors.margins: 0
|
||||||
|
anchors.topMargin: 5
|
||||||
|
Button {
|
||||||
|
width: root.width / 2
|
||||||
|
height: 0.8 * d.itemHeight
|
||||||
|
bgColor: "#212121"
|
||||||
|
radius: 0
|
||||||
|
text: "Video Modes"
|
||||||
|
enabled: false
|
||||||
|
}
|
||||||
|
Button {
|
||||||
|
width: root.width / 2
|
||||||
|
height: 0.8 * d.itemHeight
|
||||||
|
bgColor: "#212121"
|
||||||
|
radius: 0
|
||||||
|
text: "Camera Modes"
|
||||||
|
enabled: false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Rectangle {
|
||||||
|
id: divider
|
||||||
|
height: 1
|
||||||
|
width: parent.width
|
||||||
|
color: "black"
|
||||||
|
anchors.top: modes.bottom
|
||||||
|
}
|
||||||
|
|
||||||
SceneSelectionPanel {
|
SceneSelectionPanel {
|
||||||
id: sceneSelectionPanel
|
id: sceneSelectionPanel
|
||||||
itemHeight: d.itemHeight
|
itemHeight: d.itemHeight
|
||||||
color: "#004444"
|
color: "#212121"
|
||||||
anchors {
|
anchors {
|
||||||
top: openFile2Button.bottom
|
top: divider.bottom
|
||||||
left: parent.left
|
left: parent.left
|
||||||
right: parent.right
|
right: parent.right
|
||||||
bottom: parent.bottom
|
bottom: parent.bottom
|
||||||
margins: d.margins
|
|
||||||
}
|
}
|
||||||
radius: 10
|
radius: 0
|
||||||
onSceneSourceChanged: {
|
onSceneSourceChanged: {
|
||||||
console.log("[qmlvideo] main.onSceneSourceChanged source " + sceneSource)
|
|
||||||
sceneLoader.source = sceneSource
|
sceneLoader.source = sceneSource
|
||||||
var scene = null
|
var scene = null
|
||||||
var innerVisible = true
|
var innerVisible = true
|
||||||
@@ -213,7 +243,9 @@ Rectangle {
|
|||||||
|
|
||||||
ErrorDialog {
|
ErrorDialog {
|
||||||
id: errorDialog
|
id: errorDialog
|
||||||
anchors.fill: parent
|
anchors.fill: root
|
||||||
|
dialogWidth: d.itemHeight * 5
|
||||||
|
dialogHeight: d.itemHeight * 3
|
||||||
enabled: false
|
enabled: false
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -230,7 +262,6 @@ Rectangle {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function closeScene() {
|
function closeScene() {
|
||||||
console.log("[qmlvideo] main.closeScene")
|
|
||||||
sceneSelectionPanel.sceneSource = ""
|
sceneSelectionPanel.sceneSource = ""
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,13 +1,8 @@
|
|||||||
<RCC>
|
<RCC>
|
||||||
<qresource prefix="/">
|
<qresource prefix="/">
|
||||||
<file alias="leaves.jpg">images/leaves.jpg</file>
|
<file alias="leaves.jpg">images/leaves.jpg</file>
|
||||||
<file alias="close.png">images/close.png</file>
|
|
||||||
<file alias="folder.png">images/folder.png</file>
|
<file alias="folder.png">images/folder.png</file>
|
||||||
<file alias="titlebar.png">images/titlebar.png</file>
|
|
||||||
<file alias="titlebar.sci">images/titlebar.sci</file>
|
|
||||||
<file alias="up.png">images/up.png</file>
|
<file alias="up.png">images/up.png</file>
|
||||||
<file alias="progress_handle.svg">images/progress_handle.svg</file>
|
|
||||||
<file alias="progress_handle_pressed.svg">images/progress_handle_pressed.svg</file>
|
|
||||||
<file>qml/qmlvideo/Button.qml</file>
|
<file>qml/qmlvideo/Button.qml</file>
|
||||||
<file>qml/qmlvideo/CameraBasic.qml</file>
|
<file>qml/qmlvideo/CameraBasic.qml</file>
|
||||||
<file>qml/qmlvideo/CameraDrag.qml</file>
|
<file>qml/qmlvideo/CameraDrag.qml</file>
|
||||||
@@ -21,7 +16,6 @@
|
|||||||
<file>qml/qmlvideo/CameraRotate.qml</file>
|
<file>qml/qmlvideo/CameraRotate.qml</file>
|
||||||
<file>qml/qmlvideo/CameraSpin.qml</file>
|
<file>qml/qmlvideo/CameraSpin.qml</file>
|
||||||
<file>qml/qmlvideo/Content.qml</file>
|
<file>qml/qmlvideo/Content.qml</file>
|
||||||
<file>qml/qmlvideo/DisableScreenSaver.qml</file>
|
|
||||||
<file>qml/qmlvideo/ErrorDialog.qml</file>
|
<file>qml/qmlvideo/ErrorDialog.qml</file>
|
||||||
<file>qml/qmlvideo/FileBrowser.qml</file>
|
<file>qml/qmlvideo/FileBrowser.qml</file>
|
||||||
<file>qml/qmlvideo/main.qml</file>
|
<file>qml/qmlvideo/main.qml</file>
|
||||||
|
|||||||
@@ -32,7 +32,6 @@
|
|||||||
****************************************************************************/
|
****************************************************************************/
|
||||||
|
|
||||||
#include "filereader.h"
|
#include "filereader.h"
|
||||||
#include "trace.h"
|
|
||||||
|
|
||||||
#include <QCoreApplication>
|
#include <QCoreApplication>
|
||||||
#include <QDir>
|
#include <QDir>
|
||||||
@@ -42,7 +41,6 @@
|
|||||||
|
|
||||||
QString FileReader::readFile(const QString &fileName)
|
QString FileReader::readFile(const QString &fileName)
|
||||||
{
|
{
|
||||||
qtTrace() << "FileReader::readFile" << "fileName" << fileName;
|
|
||||||
QString content;
|
QString content;
|
||||||
QFile file(fileName);
|
QFile file(fileName);
|
||||||
if (file.open(QIODevice::ReadOnly)) {
|
if (file.open(QIODevice::ReadOnly)) {
|
||||||
|
|||||||
@@ -84,7 +84,6 @@ Rectangle {
|
|||||||
}
|
}
|
||||||
|
|
||||||
onEffectSourceChanged: {
|
onEffectSourceChanged: {
|
||||||
console.log("[qmlvideofx] Content.onEffectSourceChanged " + effectSource)
|
|
||||||
effectLoader.source = effectSource
|
effectLoader.source = effectSource
|
||||||
effectLoader.item.parent = root
|
effectLoader.item.parent = root
|
||||||
effectLoader.item.targetWidth = root.width
|
effectLoader.item.targetWidth = root.width
|
||||||
@@ -96,7 +95,6 @@ Rectangle {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function init() {
|
function init() {
|
||||||
console.log("[qmlvideofx] Content.init")
|
|
||||||
openImage("qrc:/images/qt-logo.png")
|
openImage("qrc:/images/qt-logo.png")
|
||||||
root.effectSource = "EffectPassThrough.qml"
|
root.effectSource = "EffectPassThrough.qml"
|
||||||
}
|
}
|
||||||
@@ -107,7 +105,6 @@ Rectangle {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function updateSource() {
|
function updateSource() {
|
||||||
console.log("[qmlvideofx] Content.updateSource")
|
|
||||||
if (contentLoader.item) {
|
if (contentLoader.item) {
|
||||||
contentLoader.item.parent = root
|
contentLoader.item.parent = root
|
||||||
contentLoader.item.anchors.fill = root
|
contentLoader.item.anchors.fill = root
|
||||||
@@ -118,7 +115,6 @@ Rectangle {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function openImage(path) {
|
function openImage(path) {
|
||||||
console.log("[qmlvideofx] Content.openImage \"" + path + "\"")
|
|
||||||
stop()
|
stop()
|
||||||
contentLoader.source = "ContentImage.qml"
|
contentLoader.source = "ContentImage.qml"
|
||||||
videoFramePaintedConnection.target = null
|
videoFramePaintedConnection.target = null
|
||||||
@@ -127,7 +123,6 @@ Rectangle {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function openVideo(path) {
|
function openVideo(path) {
|
||||||
console.log("[qmlvideofx] Content.openVideo \"" + path + "\"")
|
|
||||||
stop()
|
stop()
|
||||||
contentLoader.source = "ContentVideo.qml"
|
contentLoader.source = "ContentVideo.qml"
|
||||||
videoFramePaintedConnection.target = contentLoader.item
|
videoFramePaintedConnection.target = contentLoader.item
|
||||||
@@ -138,7 +133,6 @@ Rectangle {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function openCamera() {
|
function openCamera() {
|
||||||
console.log("[qmlvideofx] Content.openCamera")
|
|
||||||
stop()
|
stop()
|
||||||
contentLoader.source = "ContentCamera.qml"
|
contentLoader.source = "ContentCamera.qml"
|
||||||
videoFramePaintedConnection.target = contentLoader.item
|
videoFramePaintedConnection.target = contentLoader.item
|
||||||
@@ -146,7 +140,6 @@ Rectangle {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function stop() {
|
function stop() {
|
||||||
console.log("[qmlvideofx] Content.stop")
|
|
||||||
if (contentLoader.source == "ContentVideo.qml")
|
if (contentLoader.source == "ContentVideo.qml")
|
||||||
contentLoader.item.stop()
|
contentLoader.item.stop()
|
||||||
theSource.sourceItem = null
|
theSource.sourceItem = null
|
||||||
|
|||||||
@@ -1,43 +0,0 @@
|
|||||||
/****************************************************************************
|
|
||||||
**
|
|
||||||
** Copyright (C) 2014 Digia Plc and/or its subsidiary(-ies).
|
|
||||||
** Contact: http://www.qt-project.org/legal
|
|
||||||
**
|
|
||||||
** This file is part of the Qt Mobility Components.
|
|
||||||
**
|
|
||||||
** $QT_BEGIN_LICENSE:LGPL21$
|
|
||||||
** Commercial License Usage
|
|
||||||
** Licensees holding valid commercial Qt licenses may use this file in
|
|
||||||
** accordance with the commercial license agreement provided with the
|
|
||||||
** Software or, alternatively, in accordance with the terms contained in
|
|
||||||
** a written agreement between you and Digia. For licensing terms and
|
|
||||||
** conditions see http://qt.digia.com/licensing. For further information
|
|
||||||
** use the contact form at http://qt.digia.com/contact-us.
|
|
||||||
**
|
|
||||||
** GNU Lesser General Public License Usage
|
|
||||||
** Alternatively, this file may be used under the terms of the GNU Lesser
|
|
||||||
** General Public License version 2.1 or version 3 as published by the Free
|
|
||||||
** Software Foundation and appearing in the file LICENSE.LGPLv21 and
|
|
||||||
** LICENSE.LGPLv3 included in the packaging of this file. Please review the
|
|
||||||
** following information to ensure the GNU Lesser General Public License
|
|
||||||
** requirements will be met: https://www.gnu.org/licenses/lgpl.html and
|
|
||||||
** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
|
|
||||||
**
|
|
||||||
** In addition, as a special exception, Digia gives you certain additional
|
|
||||||
** rights. These rights are described in the Digia Qt LGPL Exception
|
|
||||||
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
|
|
||||||
**
|
|
||||||
** $QT_END_LICENSE$
|
|
||||||
**
|
|
||||||
****************************************************************************/
|
|
||||||
|
|
||||||
import QtQuick 2.0
|
|
||||||
import QtMultimedia 5.0
|
|
||||||
|
|
||||||
VideoOutput {
|
|
||||||
source: camera
|
|
||||||
|
|
||||||
Camera {
|
|
||||||
id: camera
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,43 +0,0 @@
|
|||||||
/****************************************************************************
|
|
||||||
**
|
|
||||||
** Copyright (C) 2014 Digia Plc and/or its subsidiary(-ies).
|
|
||||||
** Contact: http://www.qt-project.org/legal
|
|
||||||
**
|
|
||||||
** This file is part of the Qt Mobility Components.
|
|
||||||
**
|
|
||||||
** $QT_BEGIN_LICENSE:LGPL21$
|
|
||||||
** Commercial License Usage
|
|
||||||
** Licensees holding valid commercial Qt licenses may use this file in
|
|
||||||
** accordance with the commercial license agreement provided with the
|
|
||||||
** Software or, alternatively, in accordance with the terms contained in
|
|
||||||
** a written agreement between you and Digia. For licensing terms and
|
|
||||||
** conditions see http://qt.digia.com/licensing. For further information
|
|
||||||
** use the contact form at http://qt.digia.com/contact-us.
|
|
||||||
**
|
|
||||||
** GNU Lesser General Public License Usage
|
|
||||||
** Alternatively, this file may be used under the terms of the GNU Lesser
|
|
||||||
** General Public License version 2.1 or version 3 as published by the Free
|
|
||||||
** Software Foundation and appearing in the file LICENSE.LGPLv21 and
|
|
||||||
** LICENSE.LGPLv3 included in the packaging of this file. Please review the
|
|
||||||
** following information to ensure the GNU Lesser General Public License
|
|
||||||
** requirements will be met: https://www.gnu.org/licenses/lgpl.html and
|
|
||||||
** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
|
|
||||||
**
|
|
||||||
** In addition, as a special exception, Digia gives you certain additional
|
|
||||||
** rights. These rights are described in the Digia Qt LGPL Exception
|
|
||||||
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
|
|
||||||
**
|
|
||||||
** $QT_END_LICENSE$
|
|
||||||
**
|
|
||||||
****************************************************************************/
|
|
||||||
|
|
||||||
import QtQuick 2.0
|
|
||||||
import QtMobility.systeminfo 1.1
|
|
||||||
// NOTE: The QtSystemInfo module is not yet part of Qt 5
|
|
||||||
|
|
||||||
Item {
|
|
||||||
ScreenSaver {
|
|
||||||
screenSaverInhibited: true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@@ -31,7 +31,8 @@
|
|||||||
**
|
**
|
||||||
****************************************************************************/
|
****************************************************************************/
|
||||||
|
|
||||||
import QtQuick 2.0
|
import QtQuick 2.1
|
||||||
|
import QtQuick.Window 2.1
|
||||||
|
|
||||||
Effect {
|
Effect {
|
||||||
id: root
|
id: root
|
||||||
@@ -49,6 +50,7 @@ Effect {
|
|||||||
|
|
||||||
property real posX: -1
|
property real posX: -1
|
||||||
property real posY: -1
|
property real posY: -1
|
||||||
|
property real pixDens: Screen.pixelDensity
|
||||||
|
|
||||||
QtObject {
|
QtObject {
|
||||||
id: d
|
id: d
|
||||||
|
|||||||
@@ -31,7 +31,8 @@
|
|||||||
**
|
**
|
||||||
****************************************************************************/
|
****************************************************************************/
|
||||||
|
|
||||||
import QtQuick 2.0
|
import QtQuick 2.1
|
||||||
|
import QtQuick.Window 2.1
|
||||||
|
|
||||||
Effect {
|
Effect {
|
||||||
parameters: ListModel {
|
parameters: ListModel {
|
||||||
@@ -48,6 +49,7 @@ Effect {
|
|||||||
// Transform slider values, and bind result to shader uniforms
|
// Transform slider values, and bind result to shader uniforms
|
||||||
property real amplitude: parameters.get(0).value * 0.03
|
property real amplitude: parameters.get(0).value * 0.03
|
||||||
property real n: parameters.get(1).value * 7
|
property real n: parameters.get(1).value * 7
|
||||||
|
property real pixDens: Screen.pixelDensity
|
||||||
|
|
||||||
property real time: 0
|
property real time: 0
|
||||||
NumberAnimation on time { loops: Animation.Infinite; from: 0; to: Math.PI * 2; duration: 600 }
|
NumberAnimation on time { loops: Animation.Infinite; from: 0; to: Math.PI * 2; duration: 600 }
|
||||||
|
|||||||
@@ -43,7 +43,7 @@ ListModel {
|
|||||||
ListElement { name: "Emboss"; source: "EffectEmboss.qml" }
|
ListElement { name: "Emboss"; source: "EffectEmboss.qml" }
|
||||||
ListElement { name: "Glow"; source: "EffectGlow.qml" }
|
ListElement { name: "Glow"; source: "EffectGlow.qml" }
|
||||||
ListElement { name: "Isolate"; source: "EffectIsolate.qml" }
|
ListElement { name: "Isolate"; source: "EffectIsolate.qml" }
|
||||||
//ListElement { name: "Magnify"; source: "EffectMagnify.qml" }
|
ListElement { name: "Magnify"; source: "EffectMagnify.qml" }
|
||||||
ListElement { name: "Page curl"; source: "EffectPageCurl.qml" }
|
ListElement { name: "Page curl"; source: "EffectPageCurl.qml" }
|
||||||
ListElement { name: "Pixelate"; source: "EffectPixelate.qml" }
|
ListElement { name: "Pixelate"; source: "EffectPixelate.qml" }
|
||||||
ListElement { name: "Posterize"; source: "EffectPosterize.qml" }
|
ListElement { name: "Posterize"; source: "EffectPosterize.qml" }
|
||||||
|
|||||||
@@ -160,7 +160,6 @@ Rectangle {
|
|||||||
Loader {
|
Loader {
|
||||||
id: performanceLoader
|
id: performanceLoader
|
||||||
function init() {
|
function init() {
|
||||||
console.log("[qmlvideofx] performanceLoader.init logging " + root.perfMonitorsLogging + " visible " + root.perfMonitorsVisible)
|
|
||||||
var enabled = root.perfMonitorsLogging || root.perfMonitorsVisible
|
var enabled = root.perfMonitorsLogging || root.perfMonitorsVisible
|
||||||
source = enabled ? "../performancemonitor/PerformanceItem.qml" : ""
|
source = enabled ? "../performancemonitor/PerformanceItem.qml" : ""
|
||||||
}
|
}
|
||||||
@@ -249,11 +248,6 @@ Rectangle {
|
|||||||
height = windowHeight
|
height = windowHeight
|
||||||
width = windowWidth
|
width = windowWidth
|
||||||
|
|
||||||
console.log("[qmlvideofx] root.init")
|
|
||||||
console.log("Height: ", Screen.desktopAvailableHeight)
|
|
||||||
console.log("Width: ", Screen.desktopAvailableWidth)
|
|
||||||
console.log("Pixels per mm: ", Math.ceil(Screen.pixelDensity))
|
|
||||||
console.log("Orientation: ", Screen.orientation)
|
|
||||||
imageFileBrowser.folder = imagePath
|
imageFileBrowser.folder = imagePath
|
||||||
videoFileBrowser.folder = videoPath
|
videoFileBrowser.folder = videoPath
|
||||||
content.init()
|
content.init()
|
||||||
|
|||||||
@@ -3,10 +3,8 @@
|
|||||||
<file>images/qt-logo.png</file>
|
<file>images/qt-logo.png</file>
|
||||||
<file>qml/qmlvideofx/Button.qml</file>
|
<file>qml/qmlvideofx/Button.qml</file>
|
||||||
<file>qml/qmlvideofx/Content.qml</file>
|
<file>qml/qmlvideofx/Content.qml</file>
|
||||||
<file>qml/qmlvideofx/ContentCamera.qml</file>
|
|
||||||
<file>qml/qmlvideofx/ContentImage.qml</file>
|
<file>qml/qmlvideofx/ContentImage.qml</file>
|
||||||
<file>qml/qmlvideofx/ContentVideo.qml</file>
|
<file>qml/qmlvideofx/ContentVideo.qml</file>
|
||||||
<file>qml/qmlvideofx/DisableScreenSaver.qml</file>
|
|
||||||
<file>qml/qmlvideofx/Divider.qml</file>
|
<file>qml/qmlvideofx/Divider.qml</file>
|
||||||
<file>qml/qmlvideofx/Effect.qml</file>
|
<file>qml/qmlvideofx/Effect.qml</file>
|
||||||
<file>qml/qmlvideofx/EffectBillboard.qml</file>
|
<file>qml/qmlvideofx/EffectBillboard.qml</file>
|
||||||
|
|||||||
@@ -50,12 +50,15 @@ uniform float targetWidth;
|
|||||||
uniform float targetHeight;
|
uniform float targetHeight;
|
||||||
uniform float posX;
|
uniform float posX;
|
||||||
uniform float posY;
|
uniform float posY;
|
||||||
|
uniform float pixDens;
|
||||||
|
|
||||||
void main()
|
void main()
|
||||||
{
|
{
|
||||||
vec2 tc = qt_TexCoord0;
|
vec2 tc = qt_TexCoord0;
|
||||||
vec2 center = vec2(posX, posY);
|
vec2 center = vec2(posX, posY);
|
||||||
vec2 xy = gl_FragCoord.xy - center.xy;
|
vec2 xy = gl_FragCoord.xy - center.xy;
|
||||||
|
xy.x -= (pixDens * 14.0);
|
||||||
|
xy.y -= (pixDens * 29.0);
|
||||||
float r = sqrt(xy.x * xy.x + xy.y * xy.y);
|
float r = sqrt(xy.x * xy.x + xy.y * xy.y);
|
||||||
if (r < radius) {
|
if (r < radius) {
|
||||||
float h = diffractionIndex * 0.5 * radius;
|
float h = diffractionIndex * 0.5 * radius;
|
||||||
|
|||||||
@@ -55,12 +55,13 @@ const int ITER = 7;
|
|||||||
const float RATE = 0.1;
|
const float RATE = 0.1;
|
||||||
uniform float amplitude;
|
uniform float amplitude;
|
||||||
uniform float n;
|
uniform float n;
|
||||||
|
uniform float pixDens;
|
||||||
|
|
||||||
void main()
|
void main()
|
||||||
{
|
{
|
||||||
vec2 uv = qt_TexCoord0.xy;
|
vec2 uv = qt_TexCoord0.xy;
|
||||||
vec2 tc = uv;
|
vec2 tc = uv;
|
||||||
vec2 p = vec2(-1.0 + 2.0 * gl_FragCoord.x / targetWidth, -(-1.0 + 2.0 * gl_FragCoord.y / targetHeight));
|
vec2 p = vec2(-1.0 + 2.0 * (gl_FragCoord.x - (pixDens * 14.0)) / targetWidth, -(-1.0 + 2.0 * (gl_FragCoord.y - (pixDens * 29.0)) / targetHeight));
|
||||||
float diffx = 0.0;
|
float diffx = 0.0;
|
||||||
float diffy = 0.0;
|
float diffy = 0.0;
|
||||||
vec4 col;
|
vec4 col;
|
||||||
|
|||||||
@@ -125,7 +125,6 @@ void FrequencyMonitorPrivate::calculateAverageFrequency()
|
|||||||
void FrequencyMonitorPrivate::stalled()
|
void FrequencyMonitorPrivate::stalled()
|
||||||
{
|
{
|
||||||
if (m_instantaneousFrequency) {
|
if (m_instantaneousFrequency) {
|
||||||
qtVerboseTrace() << "FrequencyMonitor::stalled";
|
|
||||||
m_instantaneousFrequency = 0;
|
m_instantaneousFrequency = 0;
|
||||||
emit q_ptr->instantaneousFrequencyChanged(m_instantaneousFrequency);
|
emit q_ptr->instantaneousFrequencyChanged(m_instantaneousFrequency);
|
||||||
emit q_ptr->frequencyChanged();
|
emit q_ptr->frequencyChanged();
|
||||||
@@ -136,7 +135,6 @@ FrequencyMonitor::FrequencyMonitor(QObject *parent)
|
|||||||
: QObject(parent)
|
: QObject(parent)
|
||||||
{
|
{
|
||||||
d_ptr = new FrequencyMonitorPrivate(this);
|
d_ptr = new FrequencyMonitorPrivate(this);
|
||||||
qtTrace() << "FrequencyMonitor::FrequencyMonitor";
|
|
||||||
}
|
}
|
||||||
|
|
||||||
FrequencyMonitor::~FrequencyMonitor()
|
FrequencyMonitor::~FrequencyMonitor()
|
||||||
|
|||||||
@@ -46,6 +46,7 @@
|
|||||||
#include <QMediaService>
|
#include <QMediaService>
|
||||||
#include <QMediaRecorder>
|
#include <QMediaRecorder>
|
||||||
#include <QCameraViewfinder>
|
#include <QCameraViewfinder>
|
||||||
|
#include <QCameraInfo>
|
||||||
#include <QMediaMetaData>
|
#include <QMediaMetaData>
|
||||||
|
|
||||||
#include <QMessageBox>
|
#include <QMessageBox>
|
||||||
@@ -53,6 +54,8 @@
|
|||||||
|
|
||||||
#include <QtWidgets>
|
#include <QtWidgets>
|
||||||
|
|
||||||
|
Q_DECLARE_METATYPE(QCameraInfo)
|
||||||
|
|
||||||
Camera::Camera(QWidget *parent) :
|
Camera::Camera(QWidget *parent) :
|
||||||
QMainWindow(parent),
|
QMainWindow(parent),
|
||||||
ui(new Ui::Camera),
|
ui(new Ui::Camera),
|
||||||
@@ -65,26 +68,23 @@ Camera::Camera(QWidget *parent) :
|
|||||||
ui->setupUi(this);
|
ui->setupUi(this);
|
||||||
|
|
||||||
//Camera devices:
|
//Camera devices:
|
||||||
QByteArray cameraDevice;
|
|
||||||
|
|
||||||
QActionGroup *videoDevicesGroup = new QActionGroup(this);
|
QActionGroup *videoDevicesGroup = new QActionGroup(this);
|
||||||
videoDevicesGroup->setExclusive(true);
|
videoDevicesGroup->setExclusive(true);
|
||||||
foreach(const QByteArray &deviceName, QCamera::availableDevices()) {
|
foreach (const QCameraInfo &cameraInfo, QCameraInfo::availableCameras()) {
|
||||||
QString description = camera->deviceDescription(deviceName);
|
QAction *videoDeviceAction = new QAction(cameraInfo.description(), videoDevicesGroup);
|
||||||
QAction *videoDeviceAction = new QAction(description, videoDevicesGroup);
|
|
||||||
videoDeviceAction->setCheckable(true);
|
videoDeviceAction->setCheckable(true);
|
||||||
videoDeviceAction->setData(QVariant(deviceName));
|
videoDeviceAction->setData(QVariant::fromValue(cameraInfo));
|
||||||
if (cameraDevice.isEmpty()) {
|
if (cameraInfo == QCameraInfo::defaultCamera())
|
||||||
cameraDevice = deviceName;
|
|
||||||
videoDeviceAction->setChecked(true);
|
videoDeviceAction->setChecked(true);
|
||||||
}
|
|
||||||
ui->menuDevices->addAction(videoDeviceAction);
|
ui->menuDevices->addAction(videoDeviceAction);
|
||||||
}
|
}
|
||||||
|
|
||||||
connect(videoDevicesGroup, SIGNAL(triggered(QAction*)), SLOT(updateCameraDevice(QAction*)));
|
connect(videoDevicesGroup, SIGNAL(triggered(QAction*)), SLOT(updateCameraDevice(QAction*)));
|
||||||
connect(ui->captureWidget, SIGNAL(currentChanged(int)), SLOT(updateCaptureMode()));
|
connect(ui->captureWidget, SIGNAL(currentChanged(int)), SLOT(updateCaptureMode()));
|
||||||
|
|
||||||
setCamera(cameraDevice);
|
setCamera(QCameraInfo::defaultCamera());
|
||||||
}
|
}
|
||||||
|
|
||||||
Camera::~Camera()
|
Camera::~Camera()
|
||||||
@@ -94,16 +94,13 @@ Camera::~Camera()
|
|||||||
delete camera;
|
delete camera;
|
||||||
}
|
}
|
||||||
|
|
||||||
void Camera::setCamera(const QByteArray &cameraDevice)
|
void Camera::setCamera(const QCameraInfo &cameraInfo)
|
||||||
{
|
{
|
||||||
delete imageCapture;
|
delete imageCapture;
|
||||||
delete mediaRecorder;
|
delete mediaRecorder;
|
||||||
delete camera;
|
delete camera;
|
||||||
|
|
||||||
if (cameraDevice.isEmpty())
|
camera = new QCamera(cameraInfo);
|
||||||
camera = new QCamera;
|
|
||||||
else
|
|
||||||
camera = new QCamera(cameraDevice);
|
|
||||||
|
|
||||||
connect(camera, SIGNAL(stateChanged(QCamera::State)), this, SLOT(updateCameraState(QCamera::State)));
|
connect(camera, SIGNAL(stateChanged(QCamera::State)), this, SLOT(updateCameraState(QCamera::State)));
|
||||||
connect(camera, SIGNAL(error(QCamera::Error)), this, SLOT(displayCameraError()));
|
connect(camera, SIGNAL(error(QCamera::Error)), this, SLOT(displayCameraError()));
|
||||||
@@ -398,7 +395,7 @@ void Camera::displayCameraError()
|
|||||||
|
|
||||||
void Camera::updateCameraDevice(QAction *action)
|
void Camera::updateCameraDevice(QAction *action)
|
||||||
{
|
{
|
||||||
setCamera(action->data().toByteArray());
|
setCamera(qvariant_cast<QCameraInfo>(action->data()));
|
||||||
}
|
}
|
||||||
|
|
||||||
void Camera::displayViewfinder()
|
void Camera::displayViewfinder()
|
||||||
|
|||||||
@@ -60,7 +60,7 @@ public:
|
|||||||
~Camera();
|
~Camera();
|
||||||
|
|
||||||
private slots:
|
private slots:
|
||||||
void setCamera(const QByteArray &cameraDevice);
|
void setCamera(const QCameraInfo &cameraInfo);
|
||||||
|
|
||||||
void startCamera();
|
void startCamera();
|
||||||
void stopCamera();
|
void stopCamera();
|
||||||
|
|||||||
@@ -12,10 +12,6 @@ win32 {
|
|||||||
qtCompileTest(evr)
|
qtCompileTest(evr)
|
||||||
} else:mac {
|
} else:mac {
|
||||||
qtCompileTest(avfoundation)
|
qtCompileTest(avfoundation)
|
||||||
} else:android:!android-no-sdk {
|
|
||||||
SDK_ROOT = $$(ANDROID_SDK_ROOT)
|
|
||||||
isEmpty(SDK_ROOT): SDK_ROOT = $$DEFAULT_ANDROID_SDK_ROOT
|
|
||||||
!exists($$SDK_ROOT/platforms/android-11/android.jar): error("QtMultimedia for Android requires API level 11")
|
|
||||||
} else:qnx {
|
} else:qnx {
|
||||||
qtCompileTest(mmrenderer)
|
qtCompileTest(mmrenderer)
|
||||||
} else {
|
} else {
|
||||||
@@ -25,6 +21,7 @@ win32 {
|
|||||||
qtCompileTest(gstreamer_photography)
|
qtCompileTest(gstreamer_photography)
|
||||||
qtCompileTest(gstreamer_encodingprofiles)
|
qtCompileTest(gstreamer_encodingprofiles)
|
||||||
qtCompileTest(gstreamer_appsrc)
|
qtCompileTest(gstreamer_appsrc)
|
||||||
|
qtCompileTest(linux_v4l)
|
||||||
}
|
}
|
||||||
qtCompileTest(resourcepolicy)
|
qtCompileTest(resourcepolicy)
|
||||||
qtCompileTest(gpu_vivante)
|
qtCompileTest(gpu_vivante)
|
||||||
|
|||||||
@@ -100,6 +100,8 @@ config_gstreamer_appsrc {
|
|||||||
LIBS_PRIVATE += -lgstapp-0.10
|
LIBS_PRIVATE += -lgstapp-0.10
|
||||||
}
|
}
|
||||||
|
|
||||||
|
config_linux_v4l: DEFINES += USE_V4L
|
||||||
|
|
||||||
HEADERS += $$PRIVATE_HEADERS
|
HEADERS += $$PRIVATE_HEADERS
|
||||||
|
|
||||||
DESTDIR = $$QT.multimedia.libs
|
DESTDIR = $$QT.multimedia.libs
|
||||||
|
|||||||
@@ -42,8 +42,10 @@
|
|||||||
#include <QtCore/qstringlist.h>
|
#include <QtCore/qstringlist.h>
|
||||||
#include <qaudioformat.h>
|
#include <qaudioformat.h>
|
||||||
|
|
||||||
#include <private/qcore_unix_p.h>
|
#ifdef USE_V4L
|
||||||
#include <linux/videodev2.h>
|
# include <private/qcore_unix_p.h>
|
||||||
|
# include <linux/videodev2.h>
|
||||||
|
#endif
|
||||||
|
|
||||||
#include "qgstreamervideoinputdevicecontrol_p.h"
|
#include "qgstreamervideoinputdevicecontrol_p.h"
|
||||||
|
|
||||||
@@ -469,6 +471,7 @@ QVector<QGstUtils::CameraInfo> QGstUtils::enumerateCameras(GstElementFactory *fa
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#ifdef USE_V4L
|
||||||
QDir devDir(QStringLiteral("/dev"));
|
QDir devDir(QStringLiteral("/dev"));
|
||||||
devDir.setFilter(QDir::System);
|
devDir.setFilter(QDir::System);
|
||||||
|
|
||||||
@@ -516,6 +519,7 @@ QVector<QGstUtils::CameraInfo> QGstUtils::enumerateCameras(GstElementFactory *fa
|
|||||||
}
|
}
|
||||||
qt_safe_close(fd);
|
qt_safe_close(fd);
|
||||||
}
|
}
|
||||||
|
#endif // USE_V4L
|
||||||
|
|
||||||
return devices;
|
return devices;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -56,7 +56,6 @@ QVideoSurfaceGstDelegate::QVideoSurfaceGstDelegate(
|
|||||||
: m_surface(surface)
|
: m_surface(surface)
|
||||||
, m_pool(0)
|
, m_pool(0)
|
||||||
, m_renderReturn(GST_FLOW_ERROR)
|
, m_renderReturn(GST_FLOW_ERROR)
|
||||||
, m_lastPrerolledBuffer(0)
|
|
||||||
, m_bytesPerLine(0)
|
, m_bytesPerLine(0)
|
||||||
, m_startCanceled(false)
|
, m_startCanceled(false)
|
||||||
{
|
{
|
||||||
@@ -74,7 +73,6 @@ QVideoSurfaceGstDelegate::QVideoSurfaceGstDelegate(
|
|||||||
|
|
||||||
QVideoSurfaceGstDelegate::~QVideoSurfaceGstDelegate()
|
QVideoSurfaceGstDelegate::~QVideoSurfaceGstDelegate()
|
||||||
{
|
{
|
||||||
setLastPrerolledBuffer(0);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
QList<QVideoFrame::PixelFormat> QVideoSurfaceGstDelegate::supportedPixelFormats(QAbstractVideoBuffer::HandleType handleType) const
|
QList<QVideoFrame::PixelFormat> QVideoSurfaceGstDelegate::supportedPixelFormats(QAbstractVideoBuffer::HandleType handleType) const
|
||||||
@@ -209,23 +207,6 @@ GstFlowReturn QVideoSurfaceGstDelegate::render(GstBuffer *buffer)
|
|||||||
return m_renderReturn;
|
return m_renderReturn;
|
||||||
}
|
}
|
||||||
|
|
||||||
void QVideoSurfaceGstDelegate::setLastPrerolledBuffer(GstBuffer *prerolledBuffer)
|
|
||||||
{
|
|
||||||
// discard previously stored buffer
|
|
||||||
if (m_lastPrerolledBuffer) {
|
|
||||||
gst_buffer_unref(m_lastPrerolledBuffer);
|
|
||||||
m_lastPrerolledBuffer = 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!prerolledBuffer)
|
|
||||||
return;
|
|
||||||
|
|
||||||
// store a reference to the buffer
|
|
||||||
Q_ASSERT(!m_lastPrerolledBuffer);
|
|
||||||
m_lastPrerolledBuffer = prerolledBuffer;
|
|
||||||
gst_buffer_ref(m_lastPrerolledBuffer);
|
|
||||||
}
|
|
||||||
|
|
||||||
void QVideoSurfaceGstDelegate::queuedStart()
|
void QVideoSurfaceGstDelegate::queuedStart()
|
||||||
{
|
{
|
||||||
if (!m_startCanceled) {
|
if (!m_startCanceled) {
|
||||||
@@ -397,8 +378,6 @@ QVideoSurfaceGstSink *QVideoSurfaceGstSink::createSink(QAbstractVideoSurface *su
|
|||||||
|
|
||||||
sink->delegate = new QVideoSurfaceGstDelegate(surface);
|
sink->delegate = new QVideoSurfaceGstDelegate(surface);
|
||||||
|
|
||||||
g_signal_connect(G_OBJECT(sink), "notify::show-preroll-frame", G_CALLBACK(handleShowPrerollChange), sink);
|
|
||||||
|
|
||||||
return sink;
|
return sink;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -434,16 +413,15 @@ void QVideoSurfaceGstSink::class_init(gpointer g_class, gpointer class_data)
|
|||||||
|
|
||||||
sink_parent_class = reinterpret_cast<GstVideoSinkClass *>(g_type_class_peek_parent(g_class));
|
sink_parent_class = reinterpret_cast<GstVideoSinkClass *>(g_type_class_peek_parent(g_class));
|
||||||
|
|
||||||
|
GstVideoSinkClass *video_sink_class = reinterpret_cast<GstVideoSinkClass *>(g_class);
|
||||||
|
video_sink_class->show_frame = QVideoSurfaceGstSink::show_frame;
|
||||||
|
|
||||||
GstBaseSinkClass *base_sink_class = reinterpret_cast<GstBaseSinkClass *>(g_class);
|
GstBaseSinkClass *base_sink_class = reinterpret_cast<GstBaseSinkClass *>(g_class);
|
||||||
base_sink_class->get_caps = QVideoSurfaceGstSink::get_caps;
|
base_sink_class->get_caps = QVideoSurfaceGstSink::get_caps;
|
||||||
base_sink_class->set_caps = QVideoSurfaceGstSink::set_caps;
|
base_sink_class->set_caps = QVideoSurfaceGstSink::set_caps;
|
||||||
base_sink_class->buffer_alloc = QVideoSurfaceGstSink::buffer_alloc;
|
base_sink_class->buffer_alloc = QVideoSurfaceGstSink::buffer_alloc;
|
||||||
base_sink_class->start = QVideoSurfaceGstSink::start;
|
base_sink_class->start = QVideoSurfaceGstSink::start;
|
||||||
base_sink_class->stop = QVideoSurfaceGstSink::stop;
|
base_sink_class->stop = QVideoSurfaceGstSink::stop;
|
||||||
// base_sink_class->unlock = QVideoSurfaceGstSink::unlock; // Not implemented.
|
|
||||||
base_sink_class->event = QVideoSurfaceGstSink::event;
|
|
||||||
base_sink_class->preroll = QVideoSurfaceGstSink::preroll;
|
|
||||||
base_sink_class->render = QVideoSurfaceGstSink::render;
|
|
||||||
|
|
||||||
GstElementClass *element_class = reinterpret_cast<GstElementClass *>(g_class);
|
GstElementClass *element_class = reinterpret_cast<GstElementClass *>(g_class);
|
||||||
element_class->change_state = QVideoSurfaceGstSink::change_state;
|
element_class->change_state = QVideoSurfaceGstSink::change_state;
|
||||||
@@ -709,27 +687,6 @@ void QVideoSurfaceGstSink::setFrameTimeStamps(QVideoFrame *frame, GstBuffer *buf
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void QVideoSurfaceGstSink::handleShowPrerollChange(GObject *o, GParamSpec *p, gpointer d)
|
|
||||||
{
|
|
||||||
Q_UNUSED(o);
|
|
||||||
Q_UNUSED(p);
|
|
||||||
QVideoSurfaceGstSink *sink = reinterpret_cast<QVideoSurfaceGstSink *>(d);
|
|
||||||
|
|
||||||
gboolean value = true; // "show-preroll-frame" property is true by default
|
|
||||||
g_object_get(G_OBJECT(sink), "show-preroll-frame", &value, NULL);
|
|
||||||
|
|
||||||
GstBuffer *buffer = sink->delegate->lastPrerolledBuffer();
|
|
||||||
// Render the stored prerolled buffer if requested.
|
|
||||||
// e.g. player is in stopped mode, then seek operation is requested,
|
|
||||||
// surface now stores a prerolled frame, but doesn't display it until
|
|
||||||
// "show-preroll-frame" property is set to "true"
|
|
||||||
// when switching to pause or playing state.
|
|
||||||
if (value && buffer) {
|
|
||||||
sink->delegate->render(buffer);
|
|
||||||
sink->delegate->setLastPrerolledBuffer(0);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
GstFlowReturn QVideoSurfaceGstSink::buffer_alloc(
|
GstFlowReturn QVideoSurfaceGstSink::buffer_alloc(
|
||||||
GstBaseSink *base, guint64 offset, guint size, GstCaps *caps, GstBuffer **buffer)
|
GstBaseSink *base, guint64 offset, guint size, GstCaps *caps, GstBuffer **buffer)
|
||||||
{
|
{
|
||||||
@@ -842,44 +799,9 @@ gboolean QVideoSurfaceGstSink::stop(GstBaseSink *base)
|
|||||||
return TRUE;
|
return TRUE;
|
||||||
}
|
}
|
||||||
|
|
||||||
gboolean QVideoSurfaceGstSink::unlock(GstBaseSink *base)
|
GstFlowReturn QVideoSurfaceGstSink::show_frame(GstVideoSink *base, GstBuffer *buffer)
|
||||||
{
|
|
||||||
Q_UNUSED(base);
|
|
||||||
|
|
||||||
return TRUE;
|
|
||||||
}
|
|
||||||
|
|
||||||
gboolean QVideoSurfaceGstSink::event(GstBaseSink *base, GstEvent *event)
|
|
||||||
{
|
|
||||||
// discard prerolled frame
|
|
||||||
if (event->type == GST_EVENT_FLUSH_START) {
|
|
||||||
VO_SINK(base);
|
|
||||||
sink->delegate->setLastPrerolledBuffer(0);
|
|
||||||
}
|
|
||||||
|
|
||||||
return TRUE;
|
|
||||||
}
|
|
||||||
|
|
||||||
GstFlowReturn QVideoSurfaceGstSink::preroll(GstBaseSink *base, GstBuffer *buffer)
|
|
||||||
{
|
{
|
||||||
VO_SINK(base);
|
VO_SINK(base);
|
||||||
|
|
||||||
gboolean value = true; // "show-preroll-frame" property is true by default
|
|
||||||
g_object_get(G_OBJECT(base), "show-preroll-frame", &value, NULL);
|
|
||||||
if (value) {
|
|
||||||
sink->delegate->setLastPrerolledBuffer(0); // discard prerolled buffer
|
|
||||||
return sink->delegate->render(buffer); // display frame
|
|
||||||
}
|
|
||||||
|
|
||||||
// otherwise keep a reference to the buffer to display it later
|
|
||||||
sink->delegate->setLastPrerolledBuffer(buffer);
|
|
||||||
return GST_FLOW_OK;
|
|
||||||
}
|
|
||||||
|
|
||||||
GstFlowReturn QVideoSurfaceGstSink::render(GstBaseSink *base, GstBuffer *buffer)
|
|
||||||
{
|
|
||||||
VO_SINK(base);
|
|
||||||
sink->delegate->setLastPrerolledBuffer(0); // discard prerolled buffer
|
|
||||||
return sink->delegate->render(buffer);
|
return sink->delegate->render(buffer);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -281,6 +281,9 @@ void QDeclarativeRadioData::_q_availabilityChanged(QMultimedia::AvailabilityStat
|
|||||||
|
|
||||||
void QDeclarativeRadioData::connectSignals()
|
void QDeclarativeRadioData::connectSignals()
|
||||||
{
|
{
|
||||||
|
if (!m_radioData)
|
||||||
|
return;
|
||||||
|
|
||||||
connect(m_radioData, SIGNAL(programTypeChanged(QRadioData::ProgramType)), this,
|
connect(m_radioData, SIGNAL(programTypeChanged(QRadioData::ProgramType)), this,
|
||||||
SLOT(_q_programTypeChanged(QRadioData::ProgramType)));
|
SLOT(_q_programTypeChanged(QRadioData::ProgramType)));
|
||||||
|
|
||||||
|
|||||||
@@ -45,9 +45,6 @@
|
|||||||
#include "qsoundeffect_qaudio_p.h"
|
#include "qsoundeffect_qaudio_p.h"
|
||||||
|
|
||||||
#include <QtCore/qcoreapplication.h>
|
#include <QtCore/qcoreapplication.h>
|
||||||
#include <QtCore/qthread.h>
|
|
||||||
#include <QtCore/qmutex.h>
|
|
||||||
#include <QtCore/qwaitcondition.h>
|
|
||||||
#include <QtCore/qiodevice.h>
|
#include <QtCore/qiodevice.h>
|
||||||
|
|
||||||
//#include <QDebug>
|
//#include <QDebug>
|
||||||
|
|||||||
@@ -84,9 +84,6 @@ public:
|
|||||||
|
|
||||||
GstFlowReturn render(GstBuffer *buffer);
|
GstFlowReturn render(GstBuffer *buffer);
|
||||||
|
|
||||||
GstBuffer *lastPrerolledBuffer() const { return m_lastPrerolledBuffer; }
|
|
||||||
void setLastPrerolledBuffer(GstBuffer *lastPrerolledBuffer); // set prerolledBuffer to 0 to discard prerolled buffer
|
|
||||||
|
|
||||||
private slots:
|
private slots:
|
||||||
void queuedStart();
|
void queuedStart();
|
||||||
void queuedStop();
|
void queuedStop();
|
||||||
@@ -108,8 +105,6 @@ private:
|
|||||||
QVideoSurfaceFormat m_format;
|
QVideoSurfaceFormat m_format;
|
||||||
QVideoFrame m_frame;
|
QVideoFrame m_frame;
|
||||||
GstFlowReturn m_renderReturn;
|
GstFlowReturn m_renderReturn;
|
||||||
// this pointer is not 0 when there is a prerolled buffer waiting to be displayed
|
|
||||||
GstBuffer *m_lastPrerolledBuffer;
|
|
||||||
int m_bytesPerLine;
|
int m_bytesPerLine;
|
||||||
bool m_started;
|
bool m_started;
|
||||||
bool m_startCanceled;
|
bool m_startCanceled;
|
||||||
@@ -126,8 +121,6 @@ public:
|
|||||||
QAbstractVideoBuffer::HandleType handleType = QAbstractVideoBuffer::NoHandle);
|
QAbstractVideoBuffer::HandleType handleType = QAbstractVideoBuffer::NoHandle);
|
||||||
static void setFrameTimeStamps(QVideoFrame *frame, GstBuffer *buffer);
|
static void setFrameTimeStamps(QVideoFrame *frame, GstBuffer *buffer);
|
||||||
|
|
||||||
static void handleShowPrerollChange(GObject *o, GParamSpec *p, gpointer d);
|
|
||||||
|
|
||||||
private:
|
private:
|
||||||
static GType get_type();
|
static GType get_type();
|
||||||
static void class_init(gpointer g_class, gpointer class_data);
|
static void class_init(gpointer g_class, gpointer class_data);
|
||||||
@@ -147,11 +140,7 @@ private:
|
|||||||
static gboolean start(GstBaseSink *sink);
|
static gboolean start(GstBaseSink *sink);
|
||||||
static gboolean stop(GstBaseSink *sink);
|
static gboolean stop(GstBaseSink *sink);
|
||||||
|
|
||||||
static gboolean unlock(GstBaseSink *sink);
|
static GstFlowReturn show_frame(GstVideoSink *sink, GstBuffer *buffer);
|
||||||
|
|
||||||
static gboolean event(GstBaseSink *sink, GstEvent *event);
|
|
||||||
static GstFlowReturn preroll(GstBaseSink *sink, GstBuffer *buffer);
|
|
||||||
static GstFlowReturn render(GstBaseSink *sink, GstBuffer *buffer);
|
|
||||||
|
|
||||||
private:
|
private:
|
||||||
QVideoSurfaceGstDelegate *delegate;
|
QVideoSurfaceGstDelegate *delegate;
|
||||||
|
|||||||
@@ -51,6 +51,30 @@ public:
|
|||||||
|
|
||||||
virtual void parseLine(int lineIndex, const QString& line, const QUrl& root) = 0;
|
virtual void parseLine(int lineIndex, const QString& line, const QUrl& root) = 0;
|
||||||
|
|
||||||
|
protected:
|
||||||
|
QUrl expandToFullPath(const QUrl &root, const QString &line)
|
||||||
|
{
|
||||||
|
// On Linux, backslashes are not converted to forward slashes :/
|
||||||
|
if (line.startsWith(QLatin1String("//")) || line.startsWith(QLatin1String("\\\\"))) {
|
||||||
|
// Network share paths are not resolved
|
||||||
|
return QUrl::fromLocalFile(line);
|
||||||
|
}
|
||||||
|
|
||||||
|
QUrl url(line);
|
||||||
|
if (url.scheme().isEmpty()) {
|
||||||
|
// Resolve it relative to root
|
||||||
|
if (root.isLocalFile())
|
||||||
|
return root.resolved(QUrl::fromLocalFile(line));
|
||||||
|
else
|
||||||
|
return root.resolved(url);
|
||||||
|
} else if (url.scheme().length() == 1) {
|
||||||
|
// Assume it's a drive letter for a Windows path
|
||||||
|
url = QUrl::fromLocalFile(line);
|
||||||
|
}
|
||||||
|
|
||||||
|
return url;
|
||||||
|
}
|
||||||
|
|
||||||
Q_SIGNALS:
|
Q_SIGNALS:
|
||||||
void newItem(const QVariant& content);
|
void newItem(const QVariant& content);
|
||||||
void finished();
|
void finished();
|
||||||
@@ -138,29 +162,6 @@ public:
|
|||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
|
|
||||||
QUrl expandToFullPath(const QUrl& root, const QString& line)
|
|
||||||
{
|
|
||||||
// On Linux, backslashes are not converted to forward slashes :/
|
|
||||||
if (line.startsWith(QLatin1String("//")) || line.startsWith(QLatin1String("\\\\"))) {
|
|
||||||
// Network share paths are not resolved
|
|
||||||
return QUrl::fromLocalFile(line);
|
|
||||||
}
|
|
||||||
|
|
||||||
QUrl url(line);
|
|
||||||
if (url.scheme().isEmpty()) {
|
|
||||||
// Resolve it relative to root
|
|
||||||
if (root.isLocalFile())
|
|
||||||
return root.resolved(QUrl::fromLocalFile(line));
|
|
||||||
else
|
|
||||||
return root.resolved(url);
|
|
||||||
} else if (url.scheme().length() == 1) {
|
|
||||||
// Assume it's a drive letter for a Windows path
|
|
||||||
url = QUrl::fromLocalFile(line);
|
|
||||||
}
|
|
||||||
|
|
||||||
return url;
|
|
||||||
}
|
|
||||||
|
|
||||||
private:
|
private:
|
||||||
bool m_extendedFormat;
|
bool m_extendedFormat;
|
||||||
QVariantMap m_extraInfo;
|
QVariantMap m_extraInfo;
|
||||||
@@ -172,27 +173,9 @@ class PLSParser : public ParserBase
|
|||||||
public:
|
public:
|
||||||
PLSParser(QObject *parent)
|
PLSParser(QObject *parent)
|
||||||
: ParserBase(parent)
|
: ParserBase(parent)
|
||||||
, m_state(Header)
|
|
||||||
, m_count(0)
|
|
||||||
, m_readFlags(0)
|
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
enum ReadFlags
|
|
||||||
{
|
|
||||||
FileRead = 0x1,
|
|
||||||
TitleRead = 0x2,
|
|
||||||
LengthRead = 0x4,
|
|
||||||
All = FileRead | TitleRead | LengthRead
|
|
||||||
};
|
|
||||||
|
|
||||||
enum State
|
|
||||||
{
|
|
||||||
Header,
|
|
||||||
Track,
|
|
||||||
Footer
|
|
||||||
};
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
*
|
*
|
||||||
The format is essentially that of an INI file structured as follows:
|
The format is essentially that of an INI file structured as follows:
|
||||||
@@ -231,89 +214,25 @@ NumberOfEntries=2
|
|||||||
|
|
||||||
Version=2
|
Version=2
|
||||||
*/
|
*/
|
||||||
inline bool containsFlag(const ReadFlags& flag)
|
void parseLine(int, const QString &line, const QUrl &root)
|
||||||
{
|
{
|
||||||
return (m_readFlags & int(flag)) == flag;
|
// We ignore everything but 'File' entries, since that's the only thing we care about.
|
||||||
|
if (!line.startsWith(QLatin1String("File")))
|
||||||
|
return;
|
||||||
|
|
||||||
|
QString value = getValue(line);
|
||||||
|
if (value.isEmpty())
|
||||||
|
return;
|
||||||
|
|
||||||
|
emit newItem(expandToFullPath(root, value));
|
||||||
}
|
}
|
||||||
|
|
||||||
inline void setFlag(const ReadFlags& flag)
|
QString getValue(const QString& line) {
|
||||||
{
|
|
||||||
m_readFlags |= int(flag);
|
|
||||||
}
|
|
||||||
|
|
||||||
void parseLine(int lineIndex, const QString& line, const QUrl&)
|
|
||||||
{
|
|
||||||
switch (m_state) {
|
|
||||||
case Header:
|
|
||||||
if (line == QLatin1String("[playlist]")) {
|
|
||||||
m_state = Track;
|
|
||||||
setCount(1);
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case Track:
|
|
||||||
if (!containsFlag(FileRead) && line.startsWith(m_fileName)) {
|
|
||||||
m_item[QLatin1String("url")] = getValue(lineIndex, line);
|
|
||||||
setFlag(FileRead);
|
|
||||||
} else if (!containsFlag(TitleRead) && line.startsWith(m_titleName)) {
|
|
||||||
m_item[QMediaMetaData::Title] = getValue(lineIndex, line);
|
|
||||||
setFlag(TitleRead);
|
|
||||||
} else if (!containsFlag(LengthRead) && line.startsWith(m_lengthName)) {
|
|
||||||
//convert from seconds to miliseconds
|
|
||||||
int length = getValue(lineIndex, line).toInt();
|
|
||||||
if (length > 0)
|
|
||||||
m_item[QMediaMetaData::Duration] = length * 1000;
|
|
||||||
setFlag(LengthRead);
|
|
||||||
} else if (line.startsWith(QLatin1String("NumberOfEntries"))) {
|
|
||||||
m_state = Footer;
|
|
||||||
int entries = getValue(lineIndex, line).toInt();
|
|
||||||
int count = m_readFlags == 0 ? (m_count - 1) : m_count;
|
|
||||||
if (entries != count) {
|
|
||||||
emit error(QPlaylistFileParser::FormatError, tr("Error parsing playlist: %1, expected count = %2").
|
|
||||||
arg(line, QString::number(count)));
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
if (m_readFlags == int(All)) {
|
|
||||||
emit newItem(m_item);
|
|
||||||
setCount(m_count + 1);
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case Footer:
|
|
||||||
if (line.startsWith(QLatin1String("Version"))) {
|
|
||||||
int version = getValue(lineIndex, line).toInt();
|
|
||||||
if (version != 2)
|
|
||||||
emit error(QPlaylistFileParser::FormatError, QString(tr("Error parsing playlist at line[%1], expected version = 2")).arg(line));
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
QString getValue(int lineIndex, const QString& line) {
|
|
||||||
int start = line.indexOf('=');
|
int start = line.indexOf('=');
|
||||||
if (start < 0) {
|
if (start < 0)
|
||||||
emit error(QPlaylistFileParser::FormatError, QString(tr("Error parsing playlist at line[%1]:%2")).arg(QString::number(lineIndex), line));
|
|
||||||
return QString();
|
return QString();
|
||||||
}
|
|
||||||
return line.midRef(start + 1).trimmed().toString();
|
return line.midRef(start + 1).trimmed().toString();
|
||||||
}
|
}
|
||||||
|
|
||||||
void setCount(int count) {
|
|
||||||
m_count = count;
|
|
||||||
m_fileName = QStringLiteral("File%1").arg(count);
|
|
||||||
m_titleName = QStringLiteral("Title%1").arg(count);
|
|
||||||
m_lengthName = QStringLiteral("Length%1").arg(count);
|
|
||||||
m_item.clear();
|
|
||||||
m_readFlags = 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
private:
|
|
||||||
State m_state;
|
|
||||||
int m_count;
|
|
||||||
QString m_titleName;
|
|
||||||
QString m_fileName;
|
|
||||||
QString m_lengthName;
|
|
||||||
QVariantMap m_item;
|
|
||||||
int m_readFlags;
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -66,7 +66,7 @@ inline bool QMediaOpenGLHelper::isANGLE()
|
|||||||
#else
|
#else
|
||||||
bool isANGLE = false;
|
bool isANGLE = false;
|
||||||
|
|
||||||
# if defined(Q_OS_WIN) && (defined(QT_OPENGL_ES_2) || defined(QT_OPENGL_DYNAMIC))
|
# if defined(Q_OS_WIN) && !defined(Q_OS_WINCE) && (defined(QT_OPENGL_ES_2) || defined(QT_OPENGL_DYNAMIC))
|
||||||
if (QOpenGLContext::openGLModuleType() == QOpenGLContext::LibGLES) {
|
if (QOpenGLContext::openGLModuleType() == QOpenGLContext::LibGLES) {
|
||||||
// Although unlikely, technically LibGLES could mean a non-ANGLE EGL/GLES2 implementation too.
|
// Although unlikely, technically LibGLES could mean a non-ANGLE EGL/GLES2 implementation too.
|
||||||
// Verify that it is indeed ANGLE.
|
// Verify that it is indeed ANGLE.
|
||||||
@@ -98,7 +98,7 @@ inline bool QMediaOpenGLHelper::isANGLE()
|
|||||||
# endif // QT_OPENGL_ES_2_ANGLE_STATIC
|
# endif // QT_OPENGL_ES_2_ANGLE_STATIC
|
||||||
|
|
||||||
}
|
}
|
||||||
# endif // Q_OS_WIN && (QT_OPENGL_ES_2 || QT_OPENGL_DYNAMIC)
|
# endif // Q_OS_WIN && !Q_OS_WINCE && (QT_OPENGL_ES_2 || QT_OPENGL_DYNAMIC)
|
||||||
|
|
||||||
return isANGLE;
|
return isANGLE;
|
||||||
#endif // Q_OS_WINRT
|
#endif // Q_OS_WINRT
|
||||||
|
|||||||
@@ -760,7 +760,7 @@ qint64 QAlsaAudioInput::elapsedUSecs() const
|
|||||||
if (deviceState == QAudio::StoppedState)
|
if (deviceState == QAudio::StoppedState)
|
||||||
return 0;
|
return 0;
|
||||||
|
|
||||||
return clockStamp.elapsed()*1000;
|
return clockStamp.elapsed() * qint64(1000);
|
||||||
}
|
}
|
||||||
|
|
||||||
void QAlsaAudioInput::reset()
|
void QAlsaAudioInput::reset()
|
||||||
|
|||||||
@@ -774,7 +774,7 @@ qint64 QAlsaAudioOutput::elapsedUSecs() const
|
|||||||
if (deviceState == QAudio::StoppedState)
|
if (deviceState == QAudio::StoppedState)
|
||||||
return 0;
|
return 0;
|
||||||
|
|
||||||
return clockStamp.elapsed()*1000;
|
return clockStamp.elapsed() * qint64(1000);
|
||||||
}
|
}
|
||||||
|
|
||||||
void QAlsaAudioOutput::reset()
|
void QAlsaAudioOutput::reset()
|
||||||
|
|||||||
@@ -83,23 +83,6 @@ public class QtAndroidMediaPlayer
|
|||||||
|
|
||||||
private volatile int mState = State.Uninitialized;
|
private volatile int mState = State.Uninitialized;
|
||||||
|
|
||||||
private class ProgressWatcher
|
|
||||||
implements Runnable
|
|
||||||
{
|
|
||||||
@Override
|
|
||||||
public void run()
|
|
||||||
{
|
|
||||||
try {
|
|
||||||
while ((mState & (State.Started)) != 0) {
|
|
||||||
onProgressUpdateNative(getCurrentPosition(), mID);
|
|
||||||
Thread.sleep(1000);
|
|
||||||
}
|
|
||||||
} catch (final InterruptedException e) {
|
|
||||||
// Ignore
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* MediaPlayer OnErrorListener
|
* MediaPlayer OnErrorListener
|
||||||
*/
|
*/
|
||||||
@@ -257,8 +240,6 @@ public class QtAndroidMediaPlayer
|
|||||||
try {
|
try {
|
||||||
mMediaPlayer.start();
|
mMediaPlayer.start();
|
||||||
setState(State.Started);
|
setState(State.Started);
|
||||||
Thread progressThread = new Thread(new ProgressWatcher());
|
|
||||||
progressThread.start();
|
|
||||||
} catch (final IllegalStateException e) {
|
} catch (final IllegalStateException e) {
|
||||||
Log.d(TAG, "" + e.getMessage());
|
Log.d(TAG, "" + e.getMessage());
|
||||||
}
|
}
|
||||||
@@ -309,7 +290,6 @@ public class QtAndroidMediaPlayer
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
mMediaPlayer.seekTo(msec);
|
mMediaPlayer.seekTo(msec);
|
||||||
onProgressUpdateNative(msec, mID);
|
|
||||||
} catch (final IllegalStateException e) {
|
} catch (final IllegalStateException e) {
|
||||||
Log.d(TAG, "" + e.getMessage());
|
Log.d(TAG, "" + e.getMessage());
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -37,6 +37,36 @@
|
|||||||
|
|
||||||
QT_BEGIN_NAMESPACE
|
QT_BEGIN_NAMESPACE
|
||||||
|
|
||||||
|
class StateChangeNotifier
|
||||||
|
{
|
||||||
|
public:
|
||||||
|
StateChangeNotifier(QAndroidMediaPlayerControl *mp)
|
||||||
|
: mControl(mp)
|
||||||
|
, mPreviousState(mp->state())
|
||||||
|
, mPreviousMediaStatus(mp->mediaStatus())
|
||||||
|
{
|
||||||
|
++mControl->mActiveStateChangeNotifiers;
|
||||||
|
}
|
||||||
|
|
||||||
|
~StateChangeNotifier()
|
||||||
|
{
|
||||||
|
if (--mControl->mActiveStateChangeNotifiers)
|
||||||
|
return;
|
||||||
|
|
||||||
|
if (mPreviousState != mControl->state())
|
||||||
|
Q_EMIT mControl->stateChanged(mControl->state());
|
||||||
|
|
||||||
|
if (mPreviousMediaStatus != mControl->mediaStatus())
|
||||||
|
Q_EMIT mControl->mediaStatusChanged(mControl->mediaStatus());
|
||||||
|
}
|
||||||
|
|
||||||
|
private:
|
||||||
|
QAndroidMediaPlayerControl *mControl;
|
||||||
|
QMediaPlayer::State mPreviousState;
|
||||||
|
QMediaPlayer::MediaStatus mPreviousMediaStatus;
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
QAndroidMediaPlayerControl::QAndroidMediaPlayerControl(QObject *parent)
|
QAndroidMediaPlayerControl::QAndroidMediaPlayerControl(QObject *parent)
|
||||||
: QMediaPlayerControl(parent),
|
: QMediaPlayerControl(parent),
|
||||||
mMediaPlayer(new AndroidMediaPlayer),
|
mMediaPlayer(new AndroidMediaPlayer),
|
||||||
@@ -55,7 +85,9 @@ QAndroidMediaPlayerControl::QAndroidMediaPlayerControl(QObject *parent)
|
|||||||
mPendingPosition(-1),
|
mPendingPosition(-1),
|
||||||
mPendingSetMedia(false),
|
mPendingSetMedia(false),
|
||||||
mPendingVolume(-1),
|
mPendingVolume(-1),
|
||||||
mPendingMute(-1)
|
mPendingMute(-1),
|
||||||
|
mReloadingMedia(false),
|
||||||
|
mActiveStateChangeNotifiers(0)
|
||||||
{
|
{
|
||||||
connect(mMediaPlayer,SIGNAL(bufferingChanged(qint32)),
|
connect(mMediaPlayer,SIGNAL(bufferingChanged(qint32)),
|
||||||
this,SLOT(onBufferingChanged(qint32)));
|
this,SLOT(onBufferingChanged(qint32)));
|
||||||
@@ -107,17 +139,14 @@ qint64 QAndroidMediaPlayerControl::position() const
|
|||||||
if (mCurrentMediaStatus == QMediaPlayer::EndOfMedia)
|
if (mCurrentMediaStatus == QMediaPlayer::EndOfMedia)
|
||||||
return duration();
|
return duration();
|
||||||
|
|
||||||
if ((mState & (AndroidMediaPlayer::Idle
|
if ((mState & (AndroidMediaPlayer::Prepared
|
||||||
| AndroidMediaPlayer::Initialized
|
|
||||||
| AndroidMediaPlayer::Prepared
|
|
||||||
| AndroidMediaPlayer::Started
|
| AndroidMediaPlayer::Started
|
||||||
| AndroidMediaPlayer::Paused
|
| AndroidMediaPlayer::Paused
|
||||||
| AndroidMediaPlayer::Stopped
|
| AndroidMediaPlayer::PlaybackCompleted))) {
|
||||||
| AndroidMediaPlayer::PlaybackCompleted)) == 0) {
|
return mMediaPlayer->getCurrentPosition();
|
||||||
return (mPendingPosition == -1) ? 0 : mPendingPosition;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return (mCurrentState == QMediaPlayer::StoppedState) ? 0 : mMediaPlayer->getCurrentPosition();
|
return (mPendingPosition == -1) ? 0 : mPendingPosition;
|
||||||
}
|
}
|
||||||
|
|
||||||
void QAndroidMediaPlayerControl::setPosition(qint64 position)
|
void QAndroidMediaPlayerControl::setPosition(qint64 position)
|
||||||
@@ -127,24 +156,25 @@ void QAndroidMediaPlayerControl::setPosition(qint64 position)
|
|||||||
|
|
||||||
const int seekPosition = (position > INT_MAX) ? INT_MAX : position;
|
const int seekPosition = (position > INT_MAX) ? INT_MAX : position;
|
||||||
|
|
||||||
if ((mState & (AndroidMediaPlayer::Prepared
|
if (seekPosition == this->position())
|
||||||
| AndroidMediaPlayer::Started
|
|
||||||
| AndroidMediaPlayer::Paused
|
|
||||||
| AndroidMediaPlayer::PlaybackCompleted)) == 0) {
|
|
||||||
if (mPendingPosition != seekPosition) {
|
|
||||||
mPendingPosition = seekPosition;
|
|
||||||
Q_EMIT positionChanged(seekPosition);
|
|
||||||
}
|
|
||||||
return;
|
return;
|
||||||
}
|
|
||||||
|
StateChangeNotifier notifier(this);
|
||||||
|
|
||||||
if (mCurrentMediaStatus == QMediaPlayer::EndOfMedia)
|
if (mCurrentMediaStatus == QMediaPlayer::EndOfMedia)
|
||||||
setMediaStatus(QMediaPlayer::LoadedMedia);
|
setMediaStatus(QMediaPlayer::LoadedMedia);
|
||||||
|
|
||||||
mMediaPlayer->seekTo(seekPosition);
|
if ((mState & (AndroidMediaPlayer::Prepared
|
||||||
|
| AndroidMediaPlayer::Started
|
||||||
|
| AndroidMediaPlayer::Paused
|
||||||
|
| AndroidMediaPlayer::PlaybackCompleted)) == 0) {
|
||||||
|
mPendingPosition = seekPosition;
|
||||||
|
} else {
|
||||||
|
mMediaPlayer->seekTo(seekPosition);
|
||||||
|
|
||||||
if (mPendingPosition != -1) {
|
if (mPendingPosition != -1) {
|
||||||
mPendingPosition = -1;
|
mPendingPosition = -1;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Q_EMIT positionChanged(seekPosition);
|
Q_EMIT positionChanged(seekPosition);
|
||||||
@@ -275,9 +305,11 @@ const QIODevice *QAndroidMediaPlayerControl::mediaStream() const
|
|||||||
void QAndroidMediaPlayerControl::setMedia(const QMediaContent &mediaContent,
|
void QAndroidMediaPlayerControl::setMedia(const QMediaContent &mediaContent,
|
||||||
QIODevice *stream)
|
QIODevice *stream)
|
||||||
{
|
{
|
||||||
const bool reloading = (mMediaContent == mediaContent);
|
StateChangeNotifier notifier(this);
|
||||||
|
|
||||||
if (!reloading) {
|
mReloadingMedia = (mMediaContent == mediaContent);
|
||||||
|
|
||||||
|
if (!mReloadingMedia) {
|
||||||
mMediaContent = mediaContent;
|
mMediaContent = mediaContent;
|
||||||
mMediaStream = stream;
|
mMediaStream = stream;
|
||||||
}
|
}
|
||||||
@@ -286,41 +318,45 @@ void QAndroidMediaPlayerControl::setMedia(const QMediaContent &mediaContent,
|
|||||||
if ((mState & (AndroidMediaPlayer::Idle | AndroidMediaPlayer::Uninitialized)) == 0)
|
if ((mState & (AndroidMediaPlayer::Idle | AndroidMediaPlayer::Uninitialized)) == 0)
|
||||||
mMediaPlayer->release();
|
mMediaPlayer->release();
|
||||||
|
|
||||||
|
QString mediaPath;
|
||||||
|
|
||||||
if (mediaContent.isNull()) {
|
if (mediaContent.isNull()) {
|
||||||
setMediaStatus(QMediaPlayer::NoMedia);
|
setMediaStatus(QMediaPlayer::NoMedia);
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (mVideoOutput && !mVideoOutput->isReady()) {
|
|
||||||
// if a video output is set but the video texture is not ready, delay loading the media
|
|
||||||
// since it can cause problems on some hardware
|
|
||||||
mPendingSetMedia = true;
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const QUrl url = mediaContent.canonicalUrl();
|
|
||||||
QString mediaPath;
|
|
||||||
if (url.scheme() == QLatin1String("qrc")) {
|
|
||||||
const QString path = url.toString().mid(3);
|
|
||||||
mTempFile.reset(QTemporaryFile::createNativeFile(path));
|
|
||||||
if (!mTempFile.isNull())
|
|
||||||
mediaPath = QStringLiteral("file://") + mTempFile->fileName();
|
|
||||||
} else {
|
} else {
|
||||||
mediaPath = url.toString();
|
if (mVideoOutput && !mVideoOutput->isReady()) {
|
||||||
|
// if a video output is set but the video texture is not ready, delay loading the media
|
||||||
|
// since it can cause problems on some hardware
|
||||||
|
mPendingSetMedia = true;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const QUrl url = mediaContent.canonicalUrl();
|
||||||
|
if (url.scheme() == QLatin1String("qrc")) {
|
||||||
|
const QString path = url.toString().mid(3);
|
||||||
|
mTempFile.reset(QTemporaryFile::createNativeFile(path));
|
||||||
|
if (!mTempFile.isNull())
|
||||||
|
mediaPath = QStringLiteral("file://") + mTempFile->fileName();
|
||||||
|
} else {
|
||||||
|
mediaPath = url.toString();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (mVideoSize.isValid() && mVideoOutput)
|
||||||
|
mVideoOutput->setVideoSize(mVideoSize);
|
||||||
|
|
||||||
|
if ((mMediaPlayer->display() == 0) && mVideoOutput)
|
||||||
|
mMediaPlayer->setDisplay(mVideoOutput->surfaceTexture());
|
||||||
|
mMediaPlayer->setDataSource(mediaPath);
|
||||||
|
mMediaPlayer->prepareAsync();
|
||||||
}
|
}
|
||||||
|
|
||||||
if (mVideoSize.isValid() && mVideoOutput)
|
if (!mReloadingMedia) {
|
||||||
mVideoOutput->setVideoSize(mVideoSize);
|
|
||||||
|
|
||||||
if ((mMediaPlayer->display() == 0) && mVideoOutput)
|
|
||||||
mMediaPlayer->setDisplay(mVideoOutput->surfaceTexture());
|
|
||||||
mMediaPlayer->setDataSource(mediaPath);
|
|
||||||
mMediaPlayer->prepareAsync();
|
|
||||||
|
|
||||||
if (!reloading)
|
|
||||||
Q_EMIT mediaChanged(mMediaContent);
|
Q_EMIT mediaChanged(mMediaContent);
|
||||||
|
Q_EMIT actualMediaLocationChanged(mediaPath);
|
||||||
|
}
|
||||||
|
|
||||||
resetBufferingProgress();
|
resetBufferingProgress();
|
||||||
|
|
||||||
|
mReloadingMedia = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
void QAndroidMediaPlayerControl::setVideoOutput(QObject *videoOutput)
|
void QAndroidMediaPlayerControl::setVideoOutput(QObject *videoOutput)
|
||||||
@@ -344,6 +380,8 @@ void QAndroidMediaPlayerControl::setVideoOutput(QObject *videoOutput)
|
|||||||
|
|
||||||
void QAndroidMediaPlayerControl::play()
|
void QAndroidMediaPlayerControl::play()
|
||||||
{
|
{
|
||||||
|
StateChangeNotifier notifier(this);
|
||||||
|
|
||||||
// We need to prepare the mediaplayer again.
|
// We need to prepare the mediaplayer again.
|
||||||
if ((mState & AndroidMediaPlayer::Stopped) && !mMediaContent.isNull()) {
|
if ((mState & AndroidMediaPlayer::Stopped) && !mMediaContent.isNull()) {
|
||||||
setMedia(mMediaContent, mMediaStream);
|
setMedia(mMediaContent, mMediaStream);
|
||||||
@@ -364,6 +402,8 @@ void QAndroidMediaPlayerControl::play()
|
|||||||
|
|
||||||
void QAndroidMediaPlayerControl::pause()
|
void QAndroidMediaPlayerControl::pause()
|
||||||
{
|
{
|
||||||
|
StateChangeNotifier notifier(this);
|
||||||
|
|
||||||
setState(QMediaPlayer::PausedState);
|
setState(QMediaPlayer::PausedState);
|
||||||
|
|
||||||
if ((mState & (AndroidMediaPlayer::Started
|
if ((mState & (AndroidMediaPlayer::Started
|
||||||
@@ -378,6 +418,8 @@ void QAndroidMediaPlayerControl::pause()
|
|||||||
|
|
||||||
void QAndroidMediaPlayerControl::stop()
|
void QAndroidMediaPlayerControl::stop()
|
||||||
{
|
{
|
||||||
|
StateChangeNotifier notifier(this);
|
||||||
|
|
||||||
setState(QMediaPlayer::StoppedState);
|
setState(QMediaPlayer::StoppedState);
|
||||||
|
|
||||||
if ((mState & (AndroidMediaPlayer::Prepared
|
if ((mState & (AndroidMediaPlayer::Prepared
|
||||||
@@ -395,6 +437,8 @@ void QAndroidMediaPlayerControl::stop()
|
|||||||
|
|
||||||
void QAndroidMediaPlayerControl::onInfo(qint32 what, qint32 extra)
|
void QAndroidMediaPlayerControl::onInfo(qint32 what, qint32 extra)
|
||||||
{
|
{
|
||||||
|
StateChangeNotifier notifier(this);
|
||||||
|
|
||||||
Q_UNUSED(extra);
|
Q_UNUSED(extra);
|
||||||
switch (what) {
|
switch (what) {
|
||||||
case AndroidMediaPlayer::MEDIA_INFO_UNKNOWN:
|
case AndroidMediaPlayer::MEDIA_INFO_UNKNOWN:
|
||||||
@@ -426,6 +470,8 @@ void QAndroidMediaPlayerControl::onInfo(qint32 what, qint32 extra)
|
|||||||
|
|
||||||
void QAndroidMediaPlayerControl::onError(qint32 what, qint32 extra)
|
void QAndroidMediaPlayerControl::onError(qint32 what, qint32 extra)
|
||||||
{
|
{
|
||||||
|
StateChangeNotifier notifier(this);
|
||||||
|
|
||||||
QString errorString;
|
QString errorString;
|
||||||
QMediaPlayer::Error error = QMediaPlayer::ResourceError;
|
QMediaPlayer::Error error = QMediaPlayer::ResourceError;
|
||||||
|
|
||||||
@@ -478,6 +524,8 @@ void QAndroidMediaPlayerControl::onError(qint32 what, qint32 extra)
|
|||||||
|
|
||||||
void QAndroidMediaPlayerControl::onBufferingChanged(qint32 percent)
|
void QAndroidMediaPlayerControl::onBufferingChanged(qint32 percent)
|
||||||
{
|
{
|
||||||
|
StateChangeNotifier notifier(this);
|
||||||
|
|
||||||
mBuffering = percent != 100;
|
mBuffering = percent != 100;
|
||||||
mBufferPercent = percent;
|
mBufferPercent = percent;
|
||||||
|
|
||||||
@@ -509,6 +557,8 @@ void QAndroidMediaPlayerControl::onStateChanged(qint32 state)
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
StateChangeNotifier notifier(this);
|
||||||
|
|
||||||
mState = state;
|
mState = state;
|
||||||
switch (mState) {
|
switch (mState) {
|
||||||
case AndroidMediaPlayer::Idle:
|
case AndroidMediaPlayer::Idle:
|
||||||
@@ -516,7 +566,8 @@ void QAndroidMediaPlayerControl::onStateChanged(qint32 state)
|
|||||||
case AndroidMediaPlayer::Initialized:
|
case AndroidMediaPlayer::Initialized:
|
||||||
break;
|
break;
|
||||||
case AndroidMediaPlayer::Preparing:
|
case AndroidMediaPlayer::Preparing:
|
||||||
setMediaStatus(QMediaPlayer::LoadingMedia);
|
if (!mReloadingMedia)
|
||||||
|
setMediaStatus(QMediaPlayer::LoadingMedia);
|
||||||
break;
|
break;
|
||||||
case AndroidMediaPlayer::Prepared:
|
case AndroidMediaPlayer::Prepared:
|
||||||
setMediaStatus(QMediaPlayer::LoadedMedia);
|
setMediaStatus(QMediaPlayer::LoadedMedia);
|
||||||
@@ -537,6 +588,7 @@ void QAndroidMediaPlayerControl::onStateChanged(qint32 state)
|
|||||||
} else {
|
} else {
|
||||||
setMediaStatus(QMediaPlayer::BufferedMedia);
|
setMediaStatus(QMediaPlayer::BufferedMedia);
|
||||||
}
|
}
|
||||||
|
Q_EMIT positionChanged(position());
|
||||||
break;
|
break;
|
||||||
case AndroidMediaPlayer::Paused:
|
case AndroidMediaPlayer::Paused:
|
||||||
setState(QMediaPlayer::PausedState);
|
setState(QMediaPlayer::PausedState);
|
||||||
@@ -545,27 +597,32 @@ void QAndroidMediaPlayerControl::onStateChanged(qint32 state)
|
|||||||
setState(QMediaPlayer::StoppedState);
|
setState(QMediaPlayer::StoppedState);
|
||||||
setMediaStatus(QMediaPlayer::UnknownMediaStatus);
|
setMediaStatus(QMediaPlayer::UnknownMediaStatus);
|
||||||
mMediaPlayer->release();
|
mMediaPlayer->release();
|
||||||
|
Q_EMIT positionChanged(0);
|
||||||
break;
|
break;
|
||||||
case AndroidMediaPlayer::Stopped:
|
case AndroidMediaPlayer::Stopped:
|
||||||
setState(QMediaPlayer::StoppedState);
|
setState(QMediaPlayer::StoppedState);
|
||||||
setMediaStatus(QMediaPlayer::LoadedMedia);
|
setMediaStatus(QMediaPlayer::LoadedMedia);
|
||||||
setPosition(0);
|
Q_EMIT positionChanged(0);
|
||||||
break;
|
break;
|
||||||
case AndroidMediaPlayer::PlaybackCompleted:
|
case AndroidMediaPlayer::PlaybackCompleted:
|
||||||
setState(QMediaPlayer::StoppedState);
|
setState(QMediaPlayer::StoppedState);
|
||||||
setPosition(0);
|
|
||||||
setMediaStatus(QMediaPlayer::EndOfMedia);
|
setMediaStatus(QMediaPlayer::EndOfMedia);
|
||||||
break;
|
break;
|
||||||
case AndroidMediaPlayer::Uninitialized:
|
case AndroidMediaPlayer::Uninitialized:
|
||||||
// reset some properties
|
// reset some properties (unless we reload the same media)
|
||||||
resetBufferingProgress();
|
if (!mReloadingMedia) {
|
||||||
mPendingPosition = -1;
|
resetBufferingProgress();
|
||||||
mPendingSetMedia = false;
|
mPendingPosition = -1;
|
||||||
mPendingState = -1;
|
mPendingSetMedia = false;
|
||||||
|
mPendingState = -1;
|
||||||
|
|
||||||
setAudioAvailable(false);
|
Q_EMIT durationChanged(0);
|
||||||
setVideoAvailable(false);
|
Q_EMIT positionChanged(0);
|
||||||
setSeekable(true);
|
|
||||||
|
setAudioAvailable(false);
|
||||||
|
setVideoAvailable(false);
|
||||||
|
setSeekable(true);
|
||||||
|
}
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
break;
|
break;
|
||||||
@@ -597,7 +654,6 @@ void QAndroidMediaPlayerControl::setState(QMediaPlayer::State state)
|
|||||||
return;
|
return;
|
||||||
|
|
||||||
mCurrentState = state;
|
mCurrentState = state;
|
||||||
Q_EMIT stateChanged(mCurrentState);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void QAndroidMediaPlayerControl::setMediaStatus(QMediaPlayer::MediaStatus status)
|
void QAndroidMediaPlayerControl::setMediaStatus(QMediaPlayer::MediaStatus status)
|
||||||
@@ -605,14 +661,13 @@ void QAndroidMediaPlayerControl::setMediaStatus(QMediaPlayer::MediaStatus status
|
|||||||
if (mCurrentMediaStatus == status)
|
if (mCurrentMediaStatus == status)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
|
mCurrentMediaStatus = status;
|
||||||
|
|
||||||
if (status == QMediaPlayer::NoMedia || status == QMediaPlayer::InvalidMedia)
|
if (status == QMediaPlayer::NoMedia || status == QMediaPlayer::InvalidMedia)
|
||||||
Q_EMIT durationChanged(0);
|
Q_EMIT durationChanged(0);
|
||||||
|
|
||||||
if (status == QMediaPlayer::EndOfMedia)
|
if (status == QMediaPlayer::EndOfMedia)
|
||||||
Q_EMIT durationChanged(duration());
|
Q_EMIT positionChanged(position());
|
||||||
|
|
||||||
mCurrentMediaStatus = status;
|
|
||||||
Q_EMIT mediaStatusChanged(mCurrentMediaStatus);
|
|
||||||
|
|
||||||
updateBufferStatus();
|
updateBufferStatus();
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -72,6 +72,7 @@ public:
|
|||||||
|
|
||||||
Q_SIGNALS:
|
Q_SIGNALS:
|
||||||
void metaDataUpdated();
|
void metaDataUpdated();
|
||||||
|
void actualMediaLocationChanged(const QString &url);
|
||||||
|
|
||||||
public Q_SLOTS:
|
public Q_SLOTS:
|
||||||
void setPosition(qint64 position) Q_DECL_OVERRIDE;
|
void setPosition(qint64 position) Q_DECL_OVERRIDE;
|
||||||
@@ -110,7 +111,9 @@ private:
|
|||||||
bool mPendingSetMedia;
|
bool mPendingSetMedia;
|
||||||
int mPendingVolume;
|
int mPendingVolume;
|
||||||
int mPendingMute;
|
int mPendingMute;
|
||||||
|
bool mReloadingMedia;
|
||||||
QScopedPointer<QTemporaryFile> mTempFile;
|
QScopedPointer<QTemporaryFile> mTempFile;
|
||||||
|
int mActiveStateChangeNotifiers;
|
||||||
|
|
||||||
void setState(QMediaPlayer::State state);
|
void setState(QMediaPlayer::State state);
|
||||||
void setMediaStatus(QMediaPlayer::MediaStatus status);
|
void setMediaStatus(QMediaPlayer::MediaStatus status);
|
||||||
@@ -121,6 +124,8 @@ private:
|
|||||||
void resetBufferingProgress();
|
void resetBufferingProgress();
|
||||||
void flushPendingStates();
|
void flushPendingStates();
|
||||||
void updateBufferStatus();
|
void updateBufferStatus();
|
||||||
|
|
||||||
|
friend class StateChangeNotifier;
|
||||||
};
|
};
|
||||||
|
|
||||||
QT_END_NAMESPACE
|
QT_END_NAMESPACE
|
||||||
|
|||||||
@@ -45,8 +45,8 @@ QAndroidMediaService::QAndroidMediaService(QObject *parent)
|
|||||||
{
|
{
|
||||||
mMediaControl = new QAndroidMediaPlayerControl;
|
mMediaControl = new QAndroidMediaPlayerControl;
|
||||||
mMetadataControl = new QAndroidMetaDataReaderControl;
|
mMetadataControl = new QAndroidMetaDataReaderControl;
|
||||||
connect(mMediaControl, SIGNAL(mediaChanged(QMediaContent)),
|
connect(mMediaControl, SIGNAL(actualMediaLocationChanged(QString)),
|
||||||
mMetadataControl, SLOT(onMediaChanged(QMediaContent)));
|
mMetadataControl, SLOT(onMediaChanged(QString)));
|
||||||
connect(mMediaControl, SIGNAL(metaDataUpdated()),
|
connect(mMediaControl, SIGNAL(metaDataUpdated()),
|
||||||
mMetadataControl, SLOT(onUpdateMetaData()));
|
mMetadataControl, SLOT(onUpdateMetaData()));
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -93,18 +93,18 @@ QStringList QAndroidMetaDataReaderControl::availableMetaData() const
|
|||||||
return m_metadata.keys();
|
return m_metadata.keys();
|
||||||
}
|
}
|
||||||
|
|
||||||
void QAndroidMetaDataReaderControl::onMediaChanged(const QMediaContent &media)
|
void QAndroidMetaDataReaderControl::onMediaChanged(const QString &url)
|
||||||
{
|
{
|
||||||
if (!m_retriever)
|
if (!m_retriever)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
m_mediaContent = media;
|
m_mediaLocation = url;
|
||||||
updateData();
|
updateData();
|
||||||
}
|
}
|
||||||
|
|
||||||
void QAndroidMetaDataReaderControl::onUpdateMetaData()
|
void QAndroidMetaDataReaderControl::onUpdateMetaData()
|
||||||
{
|
{
|
||||||
if (!m_retriever || m_mediaContent.isNull())
|
if (!m_retriever || m_mediaLocation.isEmpty())
|
||||||
return;
|
return;
|
||||||
|
|
||||||
updateData();
|
updateData();
|
||||||
@@ -114,8 +114,8 @@ void QAndroidMetaDataReaderControl::updateData()
|
|||||||
{
|
{
|
||||||
m_metadata.clear();
|
m_metadata.clear();
|
||||||
|
|
||||||
if (!m_mediaContent.isNull()) {
|
if (!m_mediaLocation.isEmpty()) {
|
||||||
if (m_retriever->setDataSource(m_mediaContent.canonicalUrl())) {
|
if (m_retriever->setDataSource(m_mediaLocation)) {
|
||||||
QString mimeType = m_retriever->extractMetadata(AndroidMediaMetadataRetriever::MimeType);
|
QString mimeType = m_retriever->extractMetadata(AndroidMediaMetadataRetriever::MimeType);
|
||||||
if (!mimeType.isNull())
|
if (!mimeType.isNull())
|
||||||
m_metadata.insert(QMediaMetaData::MediaType, mimeType);
|
m_metadata.insert(QMediaMetaData::MediaType, mimeType);
|
||||||
|
|||||||
@@ -54,13 +54,13 @@ public:
|
|||||||
QStringList availableMetaData() const Q_DECL_OVERRIDE;
|
QStringList availableMetaData() const Q_DECL_OVERRIDE;
|
||||||
|
|
||||||
public Q_SLOTS:
|
public Q_SLOTS:
|
||||||
void onMediaChanged(const QMediaContent &media);
|
void onMediaChanged(const QString &url);
|
||||||
void onUpdateMetaData();
|
void onUpdateMetaData();
|
||||||
|
|
||||||
private:
|
private:
|
||||||
void updateData();
|
void updateData();
|
||||||
|
|
||||||
QMediaContent m_mediaContent;
|
QString m_mediaLocation;
|
||||||
bool m_available;
|
bool m_available;
|
||||||
QVariantMap m_metadata;
|
QVariantMap m_metadata;
|
||||||
|
|
||||||
|
|||||||
@@ -35,9 +35,24 @@
|
|||||||
|
|
||||||
#include <QtCore/private/qjnihelpers_p.h>
|
#include <QtCore/private/qjnihelpers_p.h>
|
||||||
#include <QtCore/private/qjni_p.h>
|
#include <QtCore/private/qjni_p.h>
|
||||||
|
#include <QtCore/QUrl>
|
||||||
|
#include <qdebug.h>
|
||||||
|
|
||||||
QT_BEGIN_NAMESPACE
|
QT_BEGIN_NAMESPACE
|
||||||
|
|
||||||
|
static bool exceptionCheckAndClear(JNIEnv *env)
|
||||||
|
{
|
||||||
|
if (Q_UNLIKELY(env->ExceptionCheck())) {
|
||||||
|
#ifdef QT_DEBUG
|
||||||
|
env->ExceptionDescribe();
|
||||||
|
#endif // QT_DEBUG
|
||||||
|
env->ExceptionClear();
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
AndroidMediaMetadataRetriever::AndroidMediaMetadataRetriever()
|
AndroidMediaMetadataRetriever::AndroidMediaMetadataRetriever()
|
||||||
{
|
{
|
||||||
m_metadataRetriever = QJNIObjectPrivate("android/media/MediaMetadataRetriever");
|
m_metadataRetriever = QJNIObjectPrivate("android/media/MediaMetadataRetriever");
|
||||||
@@ -68,55 +83,105 @@ void AndroidMediaMetadataRetriever::release()
|
|||||||
m_metadataRetriever.callMethod<void>("release");
|
m_metadataRetriever.callMethod<void>("release");
|
||||||
}
|
}
|
||||||
|
|
||||||
bool AndroidMediaMetadataRetriever::setDataSource(const QUrl &url)
|
bool AndroidMediaMetadataRetriever::setDataSource(const QString &urlString)
|
||||||
{
|
{
|
||||||
if (!m_metadataRetriever.isValid())
|
if (!m_metadataRetriever.isValid())
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
QJNIEnvironmentPrivate env;
|
QJNIEnvironmentPrivate env;
|
||||||
|
QUrl url(urlString);
|
||||||
|
|
||||||
bool loaded = false;
|
if (url.isLocalFile()) { // also includes qrc files (copied to a temp file)
|
||||||
|
QJNIObjectPrivate string = QJNIObjectPrivate::fromString(url.path());
|
||||||
|
QJNIObjectPrivate fileInputStream("java/io/FileInputStream",
|
||||||
|
"(Ljava/lang/String;)V",
|
||||||
|
string.object());
|
||||||
|
|
||||||
QJNIObjectPrivate string = QJNIObjectPrivate::fromString(url.toString());
|
if (exceptionCheckAndClear(env))
|
||||||
|
return false;
|
||||||
|
|
||||||
QJNIObjectPrivate uri = m_metadataRetriever.callStaticObjectMethod("android/net/Uri",
|
QJNIObjectPrivate fd = fileInputStream.callObjectMethod("getFD",
|
||||||
"parse",
|
"()Ljava/io/FileDescriptor;");
|
||||||
"(Ljava/lang/String;)Landroid/net/Uri;",
|
if (exceptionCheckAndClear(env)) {
|
||||||
string.object());
|
fileInputStream.callMethod<void>("close");
|
||||||
if (env->ExceptionCheck()) {
|
exceptionCheckAndClear(env);
|
||||||
env->ExceptionClear();
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
m_metadataRetriever.callMethod<void>("setDataSource",
|
||||||
|
"(Ljava/io/FileDescriptor;)V",
|
||||||
|
fd.object());
|
||||||
|
|
||||||
|
bool ok = !exceptionCheckAndClear(env);
|
||||||
|
|
||||||
|
fileInputStream.callMethod<void>("close");
|
||||||
|
exceptionCheckAndClear(env);
|
||||||
|
|
||||||
|
if (!ok)
|
||||||
|
return false;
|
||||||
|
} else if (url.scheme() == QLatin1String("assets")) {
|
||||||
|
QJNIObjectPrivate string = QJNIObjectPrivate::fromString(url.path().mid(1)); // remove first '/'
|
||||||
|
QJNIObjectPrivate activity(QtAndroidPrivate::activity());
|
||||||
|
QJNIObjectPrivate assetManager = activity.callObjectMethod("getAssets",
|
||||||
|
"()Landroid/content/res/AssetManager;");
|
||||||
|
QJNIObjectPrivate assetFd = assetManager.callObjectMethod("openFd",
|
||||||
|
"(Ljava/lang/String;)Landroid/content/res/AssetFileDescriptor;",
|
||||||
|
string.object());
|
||||||
|
if (exceptionCheckAndClear(env))
|
||||||
|
return false;
|
||||||
|
|
||||||
|
QJNIObjectPrivate fd = assetFd.callObjectMethod("getFileDescriptor",
|
||||||
|
"()Ljava/io/FileDescriptor;");
|
||||||
|
if (exceptionCheckAndClear(env)) {
|
||||||
|
assetFd.callMethod<void>("close");
|
||||||
|
exceptionCheckAndClear(env);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
m_metadataRetriever.callMethod<void>("setDataSource",
|
||||||
|
"(Ljava/io/FileDescriptor;JJ)V",
|
||||||
|
fd.object(),
|
||||||
|
assetFd.callMethod<jlong>("getStartOffset"),
|
||||||
|
assetFd.callMethod<jlong>("getLength"));
|
||||||
|
|
||||||
|
bool ok = !exceptionCheckAndClear(env);
|
||||||
|
|
||||||
|
assetFd.callMethod<void>("close");
|
||||||
|
exceptionCheckAndClear(env);
|
||||||
|
|
||||||
|
if (!ok)
|
||||||
|
return false;
|
||||||
|
} else if (QtAndroidPrivate::androidSdkVersion() >= 14) {
|
||||||
|
// On API levels >= 14, only setDataSource(String, Map<String, String>) accepts remote media
|
||||||
|
QJNIObjectPrivate string = QJNIObjectPrivate::fromString(urlString);
|
||||||
|
QJNIObjectPrivate hash("java/util/HashMap");
|
||||||
|
|
||||||
|
m_metadataRetriever.callMethod<void>("setDataSource",
|
||||||
|
"(Ljava/lang/String;Ljava/util/Map;)V",
|
||||||
|
string.object(),
|
||||||
|
hash.object());
|
||||||
|
if (exceptionCheckAndClear(env))
|
||||||
|
return false;
|
||||||
} else {
|
} else {
|
||||||
|
// While on API levels < 14, only setDataSource(Context, Uri) is available and works for
|
||||||
|
// remote media...
|
||||||
|
QJNIObjectPrivate string = QJNIObjectPrivate::fromString(urlString);
|
||||||
|
QJNIObjectPrivate uri = m_metadataRetriever.callStaticObjectMethod("android/net/Uri",
|
||||||
|
"parse",
|
||||||
|
"(Ljava/lang/String;)Landroid/net/Uri;",
|
||||||
|
string.object());
|
||||||
|
if (exceptionCheckAndClear(env))
|
||||||
|
return false;
|
||||||
|
|
||||||
m_metadataRetriever.callMethod<void>("setDataSource",
|
m_metadataRetriever.callMethod<void>("setDataSource",
|
||||||
"(Landroid/content/Context;Landroid/net/Uri;)V",
|
"(Landroid/content/Context;Landroid/net/Uri;)V",
|
||||||
QtAndroidPrivate::activity(),
|
QtAndroidPrivate::activity(),
|
||||||
uri.object());
|
uri.object());
|
||||||
if (env->ExceptionCheck())
|
if (exceptionCheckAndClear(env))
|
||||||
env->ExceptionClear();
|
return false;
|
||||||
else
|
|
||||||
loaded = true;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return loaded;
|
return true;
|
||||||
}
|
|
||||||
|
|
||||||
bool AndroidMediaMetadataRetriever::setDataSource(const QString &path)
|
|
||||||
{
|
|
||||||
if (!m_metadataRetriever.isValid())
|
|
||||||
return false;
|
|
||||||
|
|
||||||
QJNIEnvironmentPrivate env;
|
|
||||||
|
|
||||||
bool loaded = false;
|
|
||||||
|
|
||||||
m_metadataRetriever.callMethod<void>("setDataSource",
|
|
||||||
"(Ljava/lang/String;)V",
|
|
||||||
QJNIObjectPrivate::fromString(path).object());
|
|
||||||
if (env->ExceptionCheck())
|
|
||||||
env->ExceptionClear();
|
|
||||||
else
|
|
||||||
loaded = true;
|
|
||||||
|
|
||||||
return loaded;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
QT_END_NAMESPACE
|
QT_END_NAMESPACE
|
||||||
|
|||||||
@@ -35,7 +35,6 @@
|
|||||||
#define ANDROIDMEDIAMETADATARETRIEVER_H
|
#define ANDROIDMEDIAMETADATARETRIEVER_H
|
||||||
|
|
||||||
#include <QtCore/private/qjni_p.h>
|
#include <QtCore/private/qjni_p.h>
|
||||||
#include <qurl.h>
|
|
||||||
|
|
||||||
QT_BEGIN_NAMESPACE
|
QT_BEGIN_NAMESPACE
|
||||||
|
|
||||||
@@ -73,8 +72,7 @@ public:
|
|||||||
|
|
||||||
QString extractMetadata(MetadataKey key);
|
QString extractMetadata(MetadataKey key);
|
||||||
void release();
|
void release();
|
||||||
bool setDataSource(const QUrl &url);
|
bool setDataSource(const QString &url);
|
||||||
bool setDataSource(const QString &path);
|
|
||||||
|
|
||||||
private:
|
private:
|
||||||
QJNIObjectPrivate m_metadataRetriever;
|
QJNIObjectPrivate m_metadataRetriever;
|
||||||
|
|||||||
@@ -142,9 +142,9 @@ QMediaControl *AVFCameraService::requestControl(const char *name)
|
|||||||
void AVFCameraService::releaseControl(QMediaControl *control)
|
void AVFCameraService::releaseControl(QMediaControl *control)
|
||||||
{
|
{
|
||||||
if (m_videoOutput == control) {
|
if (m_videoOutput == control) {
|
||||||
m_videoOutput = 0;
|
|
||||||
m_session->setVideoOutput(0);
|
m_session->setVideoOutput(0);
|
||||||
delete control;
|
delete m_videoOutput;
|
||||||
|
m_videoOutput = 0;
|
||||||
}
|
}
|
||||||
AVFMediaVideoProbeControl *videoProbe = qobject_cast<AVFMediaVideoProbeControl *>(control);
|
AVFMediaVideoProbeControl *videoProbe = qobject_cast<AVFMediaVideoProbeControl *>(control);
|
||||||
if (videoProbe) {
|
if (videoProbe) {
|
||||||
|
|||||||
@@ -118,14 +118,15 @@ void AVFMediaPlayerService::releaseControl(QMediaControl *control)
|
|||||||
#ifdef QT_DEBUG_AVF
|
#ifdef QT_DEBUG_AVF
|
||||||
qDebug() << Q_FUNC_INFO << control;
|
qDebug() << Q_FUNC_INFO << control;
|
||||||
#endif
|
#endif
|
||||||
#if defined(Q_OS_OSX)
|
|
||||||
if (m_videoOutput == control) {
|
if (m_videoOutput == control) {
|
||||||
|
#if defined(Q_OS_OSX)
|
||||||
AVFVideoRendererControl *renderControl = qobject_cast<AVFVideoRendererControl*>(m_videoOutput);
|
AVFVideoRendererControl *renderControl = qobject_cast<AVFVideoRendererControl*>(m_videoOutput);
|
||||||
if (renderControl)
|
if (renderControl)
|
||||||
renderControl->setSurface(0);
|
renderControl->setSurface(0);
|
||||||
|
#endif
|
||||||
m_videoOutput = 0;
|
m_videoOutput = 0;
|
||||||
m_session->setVideoOutput(0);
|
m_session->setVideoOutput(0);
|
||||||
|
|
||||||
delete control;
|
delete control;
|
||||||
}
|
}
|
||||||
#endif
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -97,8 +97,6 @@ public Q_SLOTS:
|
|||||||
void processPositionChange();
|
void processPositionChange();
|
||||||
void processMediaLoadError();
|
void processMediaLoadError();
|
||||||
|
|
||||||
void processCurrentItemChanged();
|
|
||||||
|
|
||||||
Q_SIGNALS:
|
Q_SIGNALS:
|
||||||
void positionChanged(qint64 position);
|
void positionChanged(qint64 position);
|
||||||
void durationChanged(qint64 duration);
|
void durationChanged(qint64 duration);
|
||||||
@@ -148,6 +146,9 @@ private:
|
|||||||
QByteArray rawData;
|
QByteArray rawData;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
void setAudioAvailable(bool available);
|
||||||
|
void setVideoAvailable(bool available);
|
||||||
|
|
||||||
AVFMediaPlayerService *m_service;
|
AVFMediaPlayerService *m_service;
|
||||||
AVFVideoOutput *m_videoOutput;
|
AVFVideoOutput *m_videoOutput;
|
||||||
|
|
||||||
|
|||||||
@@ -70,15 +70,11 @@ static void *AVFMediaPlayerSessionObserverCurrentItemObservationContext = &AVFMe
|
|||||||
AVPlayerItem *m_playerItem;
|
AVPlayerItem *m_playerItem;
|
||||||
AVPlayerLayer *m_playerLayer;
|
AVPlayerLayer *m_playerLayer;
|
||||||
NSURL *m_URL;
|
NSURL *m_URL;
|
||||||
bool m_audioAvailable;
|
|
||||||
bool m_videoAvailable;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@property (readonly, getter=player) AVPlayer* m_player;
|
@property (readonly, getter=player) AVPlayer* m_player;
|
||||||
@property (readonly, getter=playerItem) AVPlayerItem* m_playerItem;
|
@property (readonly, getter=playerItem) AVPlayerItem* m_playerItem;
|
||||||
@property (readonly, getter=playerLayer) AVPlayerLayer* m_playerLayer;
|
@property (readonly, getter=playerLayer) AVPlayerLayer* m_playerLayer;
|
||||||
@property (readonly, getter=audioAvailable) bool m_audioAvailable;
|
|
||||||
@property (readonly, getter=videoAvailable) bool m_videoAvailable;
|
|
||||||
@property (readonly, getter=session) AVFMediaPlayerSession* m_session;
|
@property (readonly, getter=session) AVFMediaPlayerSession* m_session;
|
||||||
|
|
||||||
- (AVFMediaPlayerSessionObserver *) initWithMediaPlayerSession:(AVFMediaPlayerSession *)session;
|
- (AVFMediaPlayerSessionObserver *) initWithMediaPlayerSession:(AVFMediaPlayerSession *)session;
|
||||||
@@ -96,7 +92,7 @@ static void *AVFMediaPlayerSessionObserverCurrentItemObservationContext = &AVFMe
|
|||||||
|
|
||||||
@implementation AVFMediaPlayerSessionObserver
|
@implementation AVFMediaPlayerSessionObserver
|
||||||
|
|
||||||
@synthesize m_player, m_playerItem, m_playerLayer, m_audioAvailable, m_videoAvailable, m_session;
|
@synthesize m_player, m_playerItem, m_playerLayer, m_session;
|
||||||
|
|
||||||
- (AVFMediaPlayerSessionObserver *) initWithMediaPlayerSession:(AVFMediaPlayerSession *)session
|
- (AVFMediaPlayerSessionObserver *) initWithMediaPlayerSession:(AVFMediaPlayerSession *)session
|
||||||
{
|
{
|
||||||
@@ -186,18 +182,6 @@ static void *AVFMediaPlayerSessionObserverCurrentItemObservationContext = &AVFMe
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
m_audioAvailable = false;
|
|
||||||
m_videoAvailable = false;
|
|
||||||
|
|
||||||
//Check each track of asset for audio and video content
|
|
||||||
NSArray *tracks = [asset tracks];
|
|
||||||
for (AVAssetTrack *track in tracks) {
|
|
||||||
if ([track hasMediaCharacteristic:AVMediaCharacteristicAudible])
|
|
||||||
m_audioAvailable = true;
|
|
||||||
if ([track hasMediaCharacteristic:AVMediaCharacteristicVisual])
|
|
||||||
m_videoAvailable = true;
|
|
||||||
}
|
|
||||||
|
|
||||||
//At this point we're ready to set up for playback of the asset.
|
//At this point we're ready to set up for playback of the asset.
|
||||||
//Stop observing our prior AVPlayerItem, if we have one.
|
//Stop observing our prior AVPlayerItem, if we have one.
|
||||||
if (m_playerItem)
|
if (m_playerItem)
|
||||||
@@ -258,18 +242,7 @@ static void *AVFMediaPlayerSessionObserverCurrentItemObservationContext = &AVFMe
|
|||||||
m_playerLayer = [AVPlayerLayer playerLayerWithPlayer:m_player];
|
m_playerLayer = [AVPlayerLayer playerLayerWithPlayer:m_player];
|
||||||
[m_playerLayer retain];
|
[m_playerLayer retain];
|
||||||
m_playerLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
|
m_playerLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
|
||||||
|
m_playerLayer.anchorPoint = CGPointMake(0.0f, 0.0f);
|
||||||
//Get the native size of the new item, and reset the bounds of the player layer
|
|
||||||
AVAsset *asset = m_playerItem.asset;
|
|
||||||
if (asset) {
|
|
||||||
NSArray *tracks = [asset tracksWithMediaType:AVMediaTypeVideo];
|
|
||||||
if ([tracks count]) {
|
|
||||||
AVAssetTrack *videoTrack = [tracks objectAtIndex:0];
|
|
||||||
m_playerLayer.anchorPoint = CGPointMake(0.0f, 0.0f);
|
|
||||||
m_playerLayer.bounds = CGRectMake(0.0f, 0.0f, videoTrack.naturalSize.width, videoTrack.naturalSize.height);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
//Observe the AVPlayer "currentItem" property to find out when any
|
//Observe the AVPlayer "currentItem" property to find out when any
|
||||||
@@ -322,7 +295,7 @@ static void *AVFMediaPlayerSessionObserverCurrentItemObservationContext = &AVFMe
|
|||||||
//AVPlayerItem "status" property value observer.
|
//AVPlayerItem "status" property value observer.
|
||||||
if (context == AVFMediaPlayerSessionObserverStatusObservationContext)
|
if (context == AVFMediaPlayerSessionObserverStatusObservationContext)
|
||||||
{
|
{
|
||||||
AVPlayerStatus status = [[change objectForKey:NSKeyValueChangeNewKey] integerValue];
|
AVPlayerStatus status = (AVPlayerStatus)[[change objectForKey:NSKeyValueChangeNewKey] integerValue];
|
||||||
switch (status)
|
switch (status)
|
||||||
{
|
{
|
||||||
//Indicates that the status of the player is not yet known because
|
//Indicates that the status of the player is not yet known because
|
||||||
@@ -366,24 +339,7 @@ static void *AVFMediaPlayerSessionObserverCurrentItemObservationContext = &AVFMe
|
|||||||
{
|
{
|
||||||
AVPlayerItem *newPlayerItem = [change objectForKey:NSKeyValueChangeNewKey];
|
AVPlayerItem *newPlayerItem = [change objectForKey:NSKeyValueChangeNewKey];
|
||||||
if (m_playerItem != newPlayerItem)
|
if (m_playerItem != newPlayerItem)
|
||||||
{
|
|
||||||
m_playerItem = newPlayerItem;
|
m_playerItem = newPlayerItem;
|
||||||
|
|
||||||
//Get the native size of the new item, and reset the bounds of the player layer
|
|
||||||
//AVAsset *asset = m_playerItem.asset;
|
|
||||||
AVAsset *asset = [m_playerItem asset];
|
|
||||||
if (asset) {
|
|
||||||
NSArray *tracks = [asset tracksWithMediaType:AVMediaTypeVideo];
|
|
||||||
if ([tracks count]) {
|
|
||||||
AVAssetTrack *videoTrack = [tracks objectAtIndex:0];
|
|
||||||
m_playerLayer.anchorPoint = CGPointMake(0.0f, 0.0f);
|
|
||||||
m_playerLayer.bounds = CGRectMake(0.0f, 0.0f, videoTrack.naturalSize.width, videoTrack.naturalSize.height);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
if (self.session)
|
|
||||||
QMetaObject::invokeMethod(m_session, "processCurrentItemChanged", Qt::AutoConnection);
|
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
@@ -513,6 +469,9 @@ void AVFMediaPlayerSession::setMedia(const QMediaContent &content, QIODevice *st
|
|||||||
m_resources = content;
|
m_resources = content;
|
||||||
m_mediaStream = stream;
|
m_mediaStream = stream;
|
||||||
|
|
||||||
|
setAudioAvailable(false);
|
||||||
|
setVideoAvailable(false);
|
||||||
|
|
||||||
QMediaPlayer::MediaStatus oldMediaStatus = m_mediaStatus;
|
QMediaPlayer::MediaStatus oldMediaStatus = m_mediaStatus;
|
||||||
|
|
||||||
if (content.isNull() || content.canonicalUrl().isEmpty()) {
|
if (content.isNull() || content.canonicalUrl().isEmpty()) {
|
||||||
@@ -582,14 +541,32 @@ bool AVFMediaPlayerSession::isMuted() const
|
|||||||
return m_muted;
|
return m_muted;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void AVFMediaPlayerSession::setAudioAvailable(bool available)
|
||||||
|
{
|
||||||
|
if (m_audioAvailable == available)
|
||||||
|
return;
|
||||||
|
|
||||||
|
m_audioAvailable = available;
|
||||||
|
Q_EMIT audioAvailableChanged(available);
|
||||||
|
}
|
||||||
|
|
||||||
bool AVFMediaPlayerSession::isAudioAvailable() const
|
bool AVFMediaPlayerSession::isAudioAvailable() const
|
||||||
{
|
{
|
||||||
return [(AVFMediaPlayerSessionObserver*)m_observer audioAvailable];
|
return m_audioAvailable;
|
||||||
|
}
|
||||||
|
|
||||||
|
void AVFMediaPlayerSession::setVideoAvailable(bool available)
|
||||||
|
{
|
||||||
|
if (m_videoAvailable == available)
|
||||||
|
return;
|
||||||
|
|
||||||
|
m_videoAvailable = available;
|
||||||
|
Q_EMIT videoAvailableChanged(available);
|
||||||
}
|
}
|
||||||
|
|
||||||
bool AVFMediaPlayerSession::isVideoAvailable() const
|
bool AVFMediaPlayerSession::isVideoAvailable() const
|
||||||
{
|
{
|
||||||
return [(AVFMediaPlayerSessionObserver*)m_observer videoAvailable];
|
return m_videoAvailable;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool AVFMediaPlayerSession::isSeekable() const
|
bool AVFMediaPlayerSession::isSeekable() const
|
||||||
@@ -802,16 +779,41 @@ void AVFMediaPlayerSession::processLoadStateChange()
|
|||||||
bool isPlaying = (m_state != QMediaPlayer::StoppedState);
|
bool isPlaying = (m_state != QMediaPlayer::StoppedState);
|
||||||
|
|
||||||
if (currentStatus == AVPlayerStatusReadyToPlay) {
|
if (currentStatus == AVPlayerStatusReadyToPlay) {
|
||||||
|
AVPlayerItem *playerItem = [(AVFMediaPlayerSessionObserver*)m_observer playerItem];
|
||||||
|
if (playerItem) {
|
||||||
|
// Check each track for audio and video content
|
||||||
|
AVAssetTrack *videoTrack = nil;
|
||||||
|
NSArray *tracks = playerItem.tracks;
|
||||||
|
for (AVPlayerItemTrack *track in tracks) {
|
||||||
|
AVAssetTrack *assetTrack = track.assetTrack;
|
||||||
|
if (assetTrack) {
|
||||||
|
if ([assetTrack.mediaType isEqualToString:AVMediaTypeAudio])
|
||||||
|
setAudioAvailable(true);
|
||||||
|
if ([assetTrack.mediaType isEqualToString:AVMediaTypeVideo]) {
|
||||||
|
setVideoAvailable(true);
|
||||||
|
if (!videoTrack)
|
||||||
|
videoTrack = assetTrack;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get the native size of the video, and reset the bounds of the player layer
|
||||||
|
AVPlayerLayer *playerLayer = [(AVFMediaPlayerSessionObserver*)m_observer playerLayer];
|
||||||
|
if (videoTrack && playerLayer) {
|
||||||
|
playerLayer.bounds = CGRectMake(0.0f, 0.0f,
|
||||||
|
videoTrack.naturalSize.width,
|
||||||
|
videoTrack.naturalSize.height);
|
||||||
|
|
||||||
|
if (m_videoOutput && m_state != QMediaPlayer::StoppedState) {
|
||||||
|
m_videoOutput->setLayer(playerLayer);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
qint64 currentDuration = duration();
|
qint64 currentDuration = duration();
|
||||||
if (m_duration != currentDuration)
|
if (m_duration != currentDuration)
|
||||||
Q_EMIT durationChanged(m_duration = currentDuration);
|
Q_EMIT durationChanged(m_duration = currentDuration);
|
||||||
|
|
||||||
if (m_audioAvailable != isAudioAvailable())
|
|
||||||
Q_EMIT audioAvailableChanged(m_audioAvailable = !m_audioAvailable);
|
|
||||||
|
|
||||||
if (m_videoAvailable != isVideoAvailable())
|
|
||||||
Q_EMIT videoAvailableChanged(m_videoAvailable = !m_videoAvailable);
|
|
||||||
|
|
||||||
newStatus = isPlaying ? QMediaPlayer::BufferedMedia : QMediaPlayer::LoadedMedia;
|
newStatus = isPlaying ? QMediaPlayer::BufferedMedia : QMediaPlayer::LoadedMedia;
|
||||||
|
|
||||||
if (m_state == QMediaPlayer::PlayingState && [(AVFMediaPlayerSessionObserver*)m_observer player]) {
|
if (m_state == QMediaPlayer::PlayingState && [(AVFMediaPlayerSessionObserver*)m_observer player]) {
|
||||||
@@ -835,17 +837,3 @@ void AVFMediaPlayerSession::processMediaLoadError()
|
|||||||
Q_EMIT mediaStatusChanged(m_mediaStatus = QMediaPlayer::InvalidMedia);
|
Q_EMIT mediaStatusChanged(m_mediaStatus = QMediaPlayer::InvalidMedia);
|
||||||
Q_EMIT stateChanged(m_state = QMediaPlayer::StoppedState);
|
Q_EMIT stateChanged(m_state = QMediaPlayer::StoppedState);
|
||||||
}
|
}
|
||||||
|
|
||||||
void AVFMediaPlayerSession::processCurrentItemChanged()
|
|
||||||
{
|
|
||||||
#ifdef QT_DEBUG_AVF
|
|
||||||
qDebug() << Q_FUNC_INFO;
|
|
||||||
#endif
|
|
||||||
|
|
||||||
AVPlayerLayer *playerLayer = [(AVFMediaPlayerSessionObserver*)m_observer playerLayer];
|
|
||||||
|
|
||||||
if (m_videoOutput && m_state != QMediaPlayer::StoppedState) {
|
|
||||||
m_videoOutput->setLayer(playerLayer);
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -64,8 +64,10 @@ AVFVideoWidget::~AVFVideoWidget()
|
|||||||
qDebug() << Q_FUNC_INFO;
|
qDebug() << Q_FUNC_INFO;
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
if (m_playerLayer)
|
if (m_playerLayer) {
|
||||||
|
[m_playerLayer removeFromSuperlayer];
|
||||||
[m_playerLayer release];
|
[m_playerLayer release];
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
QSize AVFVideoWidget::sizeHint() const
|
QSize AVFVideoWidget::sizeHint() const
|
||||||
|
|||||||
@@ -61,8 +61,10 @@ AVFVideoWindowControl::AVFVideoWindowControl(QObject *parent)
|
|||||||
|
|
||||||
AVFVideoWindowControl::~AVFVideoWindowControl()
|
AVFVideoWindowControl::~AVFVideoWindowControl()
|
||||||
{
|
{
|
||||||
if (m_playerLayer)
|
if (m_playerLayer) {
|
||||||
|
[m_playerLayer removeFromSuperlayer];
|
||||||
[m_playerLayer release];
|
[m_playerLayer release];
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
WId AVFVideoWindowControl::winId() const
|
WId AVFVideoWindowControl::winId() const
|
||||||
|
|||||||
@@ -45,6 +45,7 @@
|
|||||||
# include "coreaudiosessionmanager.h"
|
# include "coreaudiosessionmanager.h"
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
#include <QtCore/QDataStream>
|
||||||
#include <QtCore/QDebug>
|
#include <QtCore/QDebug>
|
||||||
#include <QtCore/QSet>
|
#include <QtCore/QSet>
|
||||||
|
|
||||||
|
|||||||
@@ -38,6 +38,7 @@
|
|||||||
#include <CoreAudio/CoreAudioTypes.h>
|
#include <CoreAudio/CoreAudioTypes.h>
|
||||||
#include <AudioToolbox/AudioToolbox.h>
|
#include <AudioToolbox/AudioToolbox.h>
|
||||||
|
|
||||||
|
#include <QtCore/QIODevice>
|
||||||
#include <QtCore/QWaitCondition>
|
#include <QtCore/QWaitCondition>
|
||||||
#include <QtCore/QMutex>
|
#include <QtCore/QMutex>
|
||||||
#include <QtCore/QTimer>
|
#include <QtCore/QTimer>
|
||||||
|
|||||||
@@ -52,6 +52,7 @@
|
|||||||
#endif
|
#endif
|
||||||
|
|
||||||
#include <QtMultimedia/private/qaudiohelpers_p.h>
|
#include <QtMultimedia/private/qaudiohelpers_p.h>
|
||||||
|
#include <QtCore/QDataStream>
|
||||||
#include <QtCore/QDebug>
|
#include <QtCore/QDebug>
|
||||||
|
|
||||||
QT_BEGIN_NAMESPACE
|
QT_BEGIN_NAMESPACE
|
||||||
|
|||||||
@@ -41,6 +41,7 @@
|
|||||||
#include <AudioUnit/AudioUnit.h>
|
#include <AudioUnit/AudioUnit.h>
|
||||||
#include <CoreAudio/CoreAudioTypes.h>
|
#include <CoreAudio/CoreAudioTypes.h>
|
||||||
|
|
||||||
|
#include <QtCore/QIODevice>
|
||||||
#include <QtCore/QWaitCondition>
|
#include <QtCore/QWaitCondition>
|
||||||
#include <QtCore/QMutex>
|
#include <QtCore/QMutex>
|
||||||
|
|
||||||
|
|||||||
@@ -43,6 +43,7 @@
|
|||||||
#include "coreaudiodeviceinfo.h"
|
#include "coreaudiodeviceinfo.h"
|
||||||
#include "coreaudioutils.h"
|
#include "coreaudioutils.h"
|
||||||
|
|
||||||
|
#include <QtCore/QDataStream>
|
||||||
#include <QtCore/QTimer>
|
#include <QtCore/QTimer>
|
||||||
#include <QtCore/QDebug>
|
#include <QtCore/QDebug>
|
||||||
|
|
||||||
@@ -698,14 +699,14 @@ void CoreAudioOutput::audioThreadStop()
|
|||||||
{
|
{
|
||||||
stopTimers();
|
stopTimers();
|
||||||
if (m_audioThreadState.testAndSetAcquire(Running, Stopped))
|
if (m_audioThreadState.testAndSetAcquire(Running, Stopped))
|
||||||
m_threadFinished.wait(&m_mutex);
|
m_threadFinished.wait(&m_mutex, 500);
|
||||||
}
|
}
|
||||||
|
|
||||||
void CoreAudioOutput::audioThreadDrain()
|
void CoreAudioOutput::audioThreadDrain()
|
||||||
{
|
{
|
||||||
stopTimers();
|
stopTimers();
|
||||||
if (m_audioThreadState.testAndSetAcquire(Running, Draining))
|
if (m_audioThreadState.testAndSetAcquire(Running, Draining))
|
||||||
m_threadFinished.wait(&m_mutex);
|
m_threadFinished.wait(&m_mutex, 500);
|
||||||
}
|
}
|
||||||
|
|
||||||
void CoreAudioOutput::audioDeviceStop()
|
void CoreAudioOutput::audioDeviceStop()
|
||||||
|
|||||||
@@ -117,4 +117,28 @@ GstEncodingProfile *CameraBinAudioEncoder::createProfile()
|
|||||||
return profile;
|
return profile;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void CameraBinAudioEncoder::applySettings(GstElement *encoder)
|
||||||
|
{
|
||||||
|
GObjectClass * const objectClass = G_OBJECT_GET_CLASS(encoder);
|
||||||
|
const char * const name = gst_plugin_feature_get_name(
|
||||||
|
GST_PLUGIN_FEATURE(gst_element_get_factory(encoder)));
|
||||||
|
|
||||||
|
const bool isVorbis = qstrcmp(name, "vorbisenc") == 0;
|
||||||
|
|
||||||
|
const int bitRate = m_actualAudioSettings.bitRate();
|
||||||
|
if (!isVorbis && bitRate == -1) {
|
||||||
|
// Bit rate is invalid, don't evaluate the remaining conditions unless the encoder is
|
||||||
|
// vorbisenc which is known to accept -1 as an unspecified bitrate.
|
||||||
|
} else if (g_object_class_find_property(objectClass, "bitrate")) {
|
||||||
|
g_object_set(G_OBJECT(encoder), "bitrate", bitRate, NULL);
|
||||||
|
} else if (g_object_class_find_property(objectClass, "target-bitrate")) {
|
||||||
|
g_object_set(G_OBJECT(encoder), "target-bitrate", bitRate, NULL);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isVorbis) {
|
||||||
|
static const double qualities[] = { 0.1, 0.3, 0.5, 0.7, 1.0 };
|
||||||
|
g_object_set(G_OBJECT(encoder), "quality", qualities[m_actualAudioSettings.quality()], NULL);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
QT_END_NAMESPACE
|
QT_END_NAMESPACE
|
||||||
|
|||||||
@@ -78,6 +78,8 @@ public:
|
|||||||
|
|
||||||
GstEncodingProfile *createProfile();
|
GstEncodingProfile *createProfile();
|
||||||
|
|
||||||
|
void applySettings(GstElement *element);
|
||||||
|
|
||||||
Q_SIGNALS:
|
Q_SIGNALS:
|
||||||
void settingsChanged();
|
void settingsChanged();
|
||||||
|
|
||||||
|
|||||||
@@ -38,13 +38,9 @@
|
|||||||
|
|
||||||
#include "camerabinserviceplugin.h"
|
#include "camerabinserviceplugin.h"
|
||||||
|
|
||||||
|
|
||||||
#include "camerabinservice.h"
|
#include "camerabinservice.h"
|
||||||
#include <private/qgstutils_p.h>
|
#include <private/qgstutils_p.h>
|
||||||
|
|
||||||
#include <private/qcore_unix_p.h>
|
|
||||||
#include <linux/videodev2.h>
|
|
||||||
|
|
||||||
QT_BEGIN_NAMESPACE
|
QT_BEGIN_NAMESPACE
|
||||||
|
|
||||||
template <typename T, int N> static int lengthOf(const T(&)[N]) { return N; }
|
template <typename T, int N> static int lengthOf(const T(&)[N]) { return N; }
|
||||||
|
|||||||
@@ -98,6 +98,8 @@
|
|||||||
#define CAPTURE_START "start-capture"
|
#define CAPTURE_START "start-capture"
|
||||||
#define CAPTURE_STOP "stop-capture"
|
#define CAPTURE_STOP "stop-capture"
|
||||||
|
|
||||||
|
#define FILESINK_BIN_NAME "videobin-filesink"
|
||||||
|
|
||||||
#define CAMERABIN_IMAGE_MODE 1
|
#define CAMERABIN_IMAGE_MODE 1
|
||||||
#define CAMERABIN_VIDEO_MODE 2
|
#define CAMERABIN_VIDEO_MODE 2
|
||||||
|
|
||||||
@@ -133,6 +135,7 @@ CameraBinSession::CameraBinSession(GstElementFactory *sourceFactory, QObject *pa
|
|||||||
m_capsFilter(0),
|
m_capsFilter(0),
|
||||||
m_fileSink(0),
|
m_fileSink(0),
|
||||||
m_audioEncoder(0),
|
m_audioEncoder(0),
|
||||||
|
m_videoEncoder(0),
|
||||||
m_muxer(0)
|
m_muxer(0)
|
||||||
{
|
{
|
||||||
if (m_sourceFactory)
|
if (m_sourceFactory)
|
||||||
@@ -140,6 +143,8 @@ CameraBinSession::CameraBinSession(GstElementFactory *sourceFactory, QObject *pa
|
|||||||
|
|
||||||
m_camerabin = gst_element_factory_make("camerabin2", "camerabin2");
|
m_camerabin = gst_element_factory_make("camerabin2", "camerabin2");
|
||||||
g_signal_connect(G_OBJECT(m_camerabin), "notify::idle", G_CALLBACK(updateBusyStatus), this);
|
g_signal_connect(G_OBJECT(m_camerabin), "notify::idle", G_CALLBACK(updateBusyStatus), this);
|
||||||
|
g_signal_connect(G_OBJECT(m_camerabin), "element-added", G_CALLBACK(elementAdded), this);
|
||||||
|
g_signal_connect(G_OBJECT(m_camerabin), "element-removed", G_CALLBACK(elementRemoved), this);
|
||||||
qt_gst_object_ref_sink(m_camerabin);
|
qt_gst_object_ref_sink(m_camerabin);
|
||||||
|
|
||||||
m_bus = gst_element_get_bus(m_camerabin);
|
m_bus = gst_element_get_bus(m_camerabin);
|
||||||
@@ -344,6 +349,9 @@ void CameraBinSession::setupCaptureResolution()
|
|||||||
} else {
|
} else {
|
||||||
g_object_set(m_camerabin, VIEWFINDER_CAPS_PROPERTY, NULL, NULL);
|
g_object_set(m_camerabin, VIEWFINDER_CAPS_PROPERTY, NULL, NULL);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (m_videoEncoder)
|
||||||
|
m_videoEncodeControl->applySettings(m_videoEncoder);
|
||||||
}
|
}
|
||||||
|
|
||||||
void CameraBinSession::setAudioCaptureCaps()
|
void CameraBinSession::setAudioCaptureCaps()
|
||||||
@@ -370,6 +378,9 @@ void CameraBinSession::setAudioCaptureCaps()
|
|||||||
GstCaps *caps = gst_caps_new_full(structure, NULL);
|
GstCaps *caps = gst_caps_new_full(structure, NULL);
|
||||||
g_object_set(G_OBJECT(m_camerabin), AUDIO_CAPTURE_CAPS_PROPERTY, caps, NULL);
|
g_object_set(G_OBJECT(m_camerabin), AUDIO_CAPTURE_CAPS_PROPERTY, caps, NULL);
|
||||||
gst_caps_unref(caps);
|
gst_caps_unref(caps);
|
||||||
|
|
||||||
|
if (m_audioEncoder)
|
||||||
|
m_audioEncodeControl->applySettings(m_audioEncoder);
|
||||||
}
|
}
|
||||||
|
|
||||||
GstElement *CameraBinSession::buildCameraSource()
|
GstElement *CameraBinSession::buildCameraSource()
|
||||||
@@ -712,13 +723,19 @@ void CameraBinSession::updateBusyStatus(GObject *o, GParamSpec *p, gpointer d)
|
|||||||
|
|
||||||
qint64 CameraBinSession::duration() const
|
qint64 CameraBinSession::duration() const
|
||||||
{
|
{
|
||||||
GstFormat format = GST_FORMAT_TIME;
|
if (m_camerabin) {
|
||||||
gint64 duration = 0;
|
GstElement *fileSink = gst_bin_get_by_name(GST_BIN(m_camerabin), FILESINK_BIN_NAME);
|
||||||
|
if (fileSink) {
|
||||||
|
GstFormat format = GST_FORMAT_TIME;
|
||||||
|
gint64 duration = 0;
|
||||||
|
bool ret = gst_element_query_position(fileSink, &format, &duration);
|
||||||
|
gst_object_unref(GST_OBJECT(fileSink));
|
||||||
|
if (ret)
|
||||||
|
return duration / 1000000;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if ( m_camerabin && gst_element_query_position(m_camerabin, &format, &duration))
|
return 0;
|
||||||
return duration / 1000000;
|
|
||||||
else
|
|
||||||
return 0;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
bool CameraBinSession::isMuted() const
|
bool CameraBinSession::isMuted() const
|
||||||
@@ -1293,4 +1310,32 @@ QList<QSize> CameraBinSession::supportedResolutions(QPair<int,int> rate,
|
|||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void CameraBinSession::elementAdded(GstBin *, GstElement *element, CameraBinSession *session)
|
||||||
|
{
|
||||||
|
GstElementFactory *factory = gst_element_get_factory(element);
|
||||||
|
|
||||||
|
if (GST_IS_BIN(element)) {
|
||||||
|
g_signal_connect(G_OBJECT(element), "element-added", G_CALLBACK(elementAdded), session);
|
||||||
|
g_signal_connect(G_OBJECT(element), "element-removed", G_CALLBACK(elementRemoved), session);
|
||||||
|
} else if (!factory) {
|
||||||
|
// no-op
|
||||||
|
} else if (gst_element_factory_list_is_type(factory, GST_ELEMENT_FACTORY_TYPE_AUDIO_ENCODER)) {
|
||||||
|
session->m_audioEncoder = element;
|
||||||
|
|
||||||
|
session->m_audioEncodeControl->applySettings(element);
|
||||||
|
} else if (gst_element_factory_list_is_type(factory, GST_ELEMENT_FACTORY_TYPE_VIDEO_ENCODER)) {
|
||||||
|
session->m_videoEncoder = element;
|
||||||
|
|
||||||
|
session->m_videoEncodeControl->applySettings(element);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void CameraBinSession::elementRemoved(GstBin *, GstElement *element, CameraBinSession *session)
|
||||||
|
{
|
||||||
|
if (element == session->m_audioEncoder)
|
||||||
|
session->m_audioEncoder = 0;
|
||||||
|
else if (element == session->m_videoEncoder)
|
||||||
|
session->m_videoEncoder = 0;
|
||||||
|
}
|
||||||
|
|
||||||
QT_END_NAMESPACE
|
QT_END_NAMESPACE
|
||||||
|
|||||||
@@ -190,6 +190,9 @@ private:
|
|||||||
void setAudioCaptureCaps();
|
void setAudioCaptureCaps();
|
||||||
static void updateBusyStatus(GObject *o, GParamSpec *p, gpointer d);
|
static void updateBusyStatus(GObject *o, GParamSpec *p, gpointer d);
|
||||||
|
|
||||||
|
static void elementAdded(GstBin *bin, GstElement *element, CameraBinSession *session);
|
||||||
|
static void elementRemoved(GstBin *bin, GstElement *element, CameraBinSession *session);
|
||||||
|
|
||||||
QUrl m_sink;
|
QUrl m_sink;
|
||||||
QUrl m_actualSink;
|
QUrl m_actualSink;
|
||||||
bool m_recordingActive;
|
bool m_recordingActive;
|
||||||
@@ -241,6 +244,7 @@ private:
|
|||||||
GstElement *m_capsFilter;
|
GstElement *m_capsFilter;
|
||||||
GstElement *m_fileSink;
|
GstElement *m_fileSink;
|
||||||
GstElement *m_audioEncoder;
|
GstElement *m_audioEncoder;
|
||||||
|
GstElement *m_videoEncoder;
|
||||||
GstElement *m_muxer;
|
GstElement *m_muxer;
|
||||||
|
|
||||||
public:
|
public:
|
||||||
|
|||||||
@@ -175,4 +175,46 @@ GstEncodingProfile *CameraBinVideoEncoder::createProfile()
|
|||||||
return (GstEncodingProfile *)profile;
|
return (GstEncodingProfile *)profile;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void CameraBinVideoEncoder::applySettings(GstElement *encoder)
|
||||||
|
{
|
||||||
|
GObjectClass * const objectClass = G_OBJECT_GET_CLASS(encoder);
|
||||||
|
const char * const name = gst_plugin_feature_get_name(
|
||||||
|
GST_PLUGIN_FEATURE(gst_element_get_factory(encoder)));
|
||||||
|
|
||||||
|
const int bitRate = m_actualVideoSettings.bitRate();
|
||||||
|
if (bitRate == -1) {
|
||||||
|
// Bit rate is invalid, don't evaluate the remaining conditions.
|
||||||
|
} else if (g_object_class_find_property(objectClass, "bitrate")) {
|
||||||
|
g_object_set(G_OBJECT(encoder), "bitrate", bitRate, NULL);
|
||||||
|
} else if (g_object_class_find_property(objectClass, "target-bitrate")) {
|
||||||
|
g_object_set(G_OBJECT(encoder), "target-bitrate", bitRate, NULL);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (qstrcmp(name, "theoraenc") == 0) {
|
||||||
|
static const int qualities[] = { 8, 16, 32, 45, 60 };
|
||||||
|
g_object_set(G_OBJECT(encoder), "quality", qualities[m_actualVideoSettings.quality()], NULL);
|
||||||
|
} else if (qstrncmp(name, "avenc_", 6) == 0) {
|
||||||
|
if (g_object_class_find_property(objectClass, "pass")) {
|
||||||
|
static const int modes[] = { 0, 2, 512, 1024 };
|
||||||
|
g_object_set(G_OBJECT(encoder), "pass", modes[m_actualVideoSettings.encodingMode()], NULL);
|
||||||
|
}
|
||||||
|
if (g_object_class_find_property(objectClass, "quantizer")) {
|
||||||
|
static const double qualities[] = { 20, 8.0, 3.0, 2.5, 2.0 };
|
||||||
|
g_object_set(G_OBJECT(encoder), "quantizer", qualities[m_actualVideoSettings.quality()], NULL);
|
||||||
|
}
|
||||||
|
} else if (qstrncmp(name, "omx", 3) == 0) {
|
||||||
|
if (!g_object_class_find_property(objectClass, "control-rate")) {
|
||||||
|
} else switch (m_actualVideoSettings.encodingMode()) {
|
||||||
|
case QMultimedia::ConstantBitRateEncoding:
|
||||||
|
g_object_set(G_OBJECT(encoder), "control-rate", 2, NULL);
|
||||||
|
break;
|
||||||
|
case QMultimedia::AverageBitRateEncoding:
|
||||||
|
g_object_set(G_OBJECT(encoder), "control-rate", 1, NULL);
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
g_object_set(G_OBJECT(encoder), "control-rate", 0, NULL);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
QT_END_NAMESPACE
|
QT_END_NAMESPACE
|
||||||
|
|||||||
@@ -76,6 +76,8 @@ public:
|
|||||||
|
|
||||||
GstEncodingProfile *createProfile();
|
GstEncodingProfile *createProfile();
|
||||||
|
|
||||||
|
void applySettings(GstElement *encoder);
|
||||||
|
|
||||||
Q_SIGNALS:
|
Q_SIGNALS:
|
||||||
void settingsChanged();
|
void settingsChanged();
|
||||||
|
|
||||||
|
|||||||
@@ -15,7 +15,6 @@ HEADERS += $$PWD/qgstreamercaptureservice.h \
|
|||||||
$$PWD/qgstreamerrecordercontrol.h \
|
$$PWD/qgstreamerrecordercontrol.h \
|
||||||
$$PWD/qgstreamermediacontainercontrol.h \
|
$$PWD/qgstreamermediacontainercontrol.h \
|
||||||
$$PWD/qgstreamercameracontrol.h \
|
$$PWD/qgstreamercameracontrol.h \
|
||||||
$$PWD/qgstreamerv4l2input.h \
|
|
||||||
$$PWD/qgstreamercapturemetadatacontrol.h \
|
$$PWD/qgstreamercapturemetadatacontrol.h \
|
||||||
$$PWD/qgstreamerimagecapturecontrol.h \
|
$$PWD/qgstreamerimagecapturecontrol.h \
|
||||||
$$PWD/qgstreamerimageencode.h \
|
$$PWD/qgstreamerimageencode.h \
|
||||||
@@ -28,7 +27,6 @@ SOURCES += $$PWD/qgstreamercaptureservice.cpp \
|
|||||||
$$PWD/qgstreamerrecordercontrol.cpp \
|
$$PWD/qgstreamerrecordercontrol.cpp \
|
||||||
$$PWD/qgstreamermediacontainercontrol.cpp \
|
$$PWD/qgstreamermediacontainercontrol.cpp \
|
||||||
$$PWD/qgstreamercameracontrol.cpp \
|
$$PWD/qgstreamercameracontrol.cpp \
|
||||||
$$PWD/qgstreamerv4l2input.cpp \
|
|
||||||
$$PWD/qgstreamercapturemetadatacontrol.cpp \
|
$$PWD/qgstreamercapturemetadatacontrol.cpp \
|
||||||
$$PWD/qgstreamerimagecapturecontrol.cpp \
|
$$PWD/qgstreamerimagecapturecontrol.cpp \
|
||||||
$$PWD/qgstreamerimageencode.cpp \
|
$$PWD/qgstreamerimageencode.cpp \
|
||||||
@@ -37,13 +35,18 @@ SOURCES += $$PWD/qgstreamercaptureservice.cpp \
|
|||||||
# Camera usage with gstreamer needs to have
|
# Camera usage with gstreamer needs to have
|
||||||
#CONFIG += use_gstreamer_camera
|
#CONFIG += use_gstreamer_camera
|
||||||
|
|
||||||
use_gstreamer_camera {
|
use_gstreamer_camera:config_linux_v4l {
|
||||||
DEFINES += USE_GSTREAMER_CAMERA
|
DEFINES += USE_GSTREAMER_CAMERA
|
||||||
|
|
||||||
|
OTHER_FILES += \
|
||||||
|
mediacapturecamera.json
|
||||||
|
|
||||||
|
HEADERS += \
|
||||||
|
$$PWD/qgstreamerv4l2input.h
|
||||||
|
SOURCES += \
|
||||||
|
$$PWD/qgstreamerv4l2input.cpp
|
||||||
|
|
||||||
OTHER_FILES += \
|
|
||||||
mediacapturecamera.json
|
|
||||||
} else {
|
} else {
|
||||||
OTHER_FILES += \
|
OTHER_FILES += \
|
||||||
mediacapture.json
|
mediacapture.json
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -40,9 +40,12 @@
|
|||||||
#include "qgstreamerimageencode.h"
|
#include "qgstreamerimageencode.h"
|
||||||
#include "qgstreamercameracontrol.h"
|
#include "qgstreamercameracontrol.h"
|
||||||
#include <private/qgstreamerbushelper_p.h>
|
#include <private/qgstreamerbushelper_p.h>
|
||||||
#include "qgstreamerv4l2input.h"
|
|
||||||
#include "qgstreamercapturemetadatacontrol.h"
|
#include "qgstreamercapturemetadatacontrol.h"
|
||||||
|
|
||||||
|
#if defined(USE_GSTREAMER_CAMERA)
|
||||||
|
#include "qgstreamerv4l2input.h"
|
||||||
|
#endif
|
||||||
|
|
||||||
#include "qgstreamerimagecapturecontrol.h"
|
#include "qgstreamerimagecapturecontrol.h"
|
||||||
#include <private/qgstreameraudioinputselector_p.h>
|
#include <private/qgstreameraudioinputselector_p.h>
|
||||||
#include <private/qgstreamervideoinputdevicecontrol_p.h>
|
#include <private/qgstreamervideoinputdevicecontrol_p.h>
|
||||||
@@ -66,7 +69,9 @@ QGstreamerCaptureService::QGstreamerCaptureService(const QString &service, QObje
|
|||||||
m_cameraControl = 0;
|
m_cameraControl = 0;
|
||||||
m_metaDataControl = 0;
|
m_metaDataControl = 0;
|
||||||
|
|
||||||
|
#if defined(USE_GSTREAMER_CAMERA)
|
||||||
m_videoInput = 0;
|
m_videoInput = 0;
|
||||||
|
#endif
|
||||||
m_audioInputSelector = 0;
|
m_audioInputSelector = 0;
|
||||||
m_videoInputDevice = 0;
|
m_videoInputDevice = 0;
|
||||||
|
|
||||||
@@ -82,6 +87,7 @@ QGstreamerCaptureService::QGstreamerCaptureService(const QString &service, QObje
|
|||||||
m_captureSession = new QGstreamerCaptureSession(QGstreamerCaptureSession::Audio, this);
|
m_captureSession = new QGstreamerCaptureSession(QGstreamerCaptureSession::Audio, this);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#if defined(USE_GSTREAMER_CAMERA)
|
||||||
if (service == Q_MEDIASERVICE_CAMERA) {
|
if (service == Q_MEDIASERVICE_CAMERA) {
|
||||||
m_captureSession = new QGstreamerCaptureSession(QGstreamerCaptureSession::AudioAndVideo, this);
|
m_captureSession = new QGstreamerCaptureSession(QGstreamerCaptureSession::AudioAndVideo, this);
|
||||||
m_cameraControl = new QGstreamerCameraControl(m_captureSession);
|
m_cameraControl = new QGstreamerCameraControl(m_captureSession);
|
||||||
@@ -103,6 +109,7 @@ QGstreamerCaptureService::QGstreamerCaptureService(const QString &service, QObje
|
|||||||
#endif
|
#endif
|
||||||
m_imageCaptureControl = new QGstreamerImageCaptureControl(m_captureSession);
|
m_imageCaptureControl = new QGstreamerImageCaptureControl(m_captureSession);
|
||||||
}
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
m_audioInputSelector = new QGstreamerAudioInputSelector(this);
|
m_audioInputSelector = new QGstreamerAudioInputSelector(this);
|
||||||
connect(m_audioInputSelector, SIGNAL(activeInputChanged(QString)), m_captureSession, SLOT(setCaptureDevice(QString)));
|
connect(m_audioInputSelector, SIGNAL(activeInputChanged(QString)), m_captureSession, SLOT(setCaptureDevice(QString)));
|
||||||
|
|||||||
@@ -70,7 +70,9 @@ private:
|
|||||||
|
|
||||||
QGstreamerCaptureSession *m_captureSession;
|
QGstreamerCaptureSession *m_captureSession;
|
||||||
QGstreamerCameraControl *m_cameraControl;
|
QGstreamerCameraControl *m_cameraControl;
|
||||||
|
#if defined(USE_GSTREAMER_CAMERA)
|
||||||
QGstreamerV4L2Input *m_videoInput;
|
QGstreamerV4L2Input *m_videoInput;
|
||||||
|
#endif
|
||||||
QGstreamerCaptureMetaDataControl *m_metaDataControl;
|
QGstreamerCaptureMetaDataControl *m_metaDataControl;
|
||||||
|
|
||||||
QAudioInputSelectorControl *m_audioInputSelector;
|
QAudioInputSelectorControl *m_audioInputSelector;
|
||||||
|
|||||||
@@ -76,8 +76,10 @@ static const QGstreamerMetaDataKeyLookup *qt_gstreamerMetaDataKeys()
|
|||||||
|
|
||||||
// Music
|
// Music
|
||||||
metadataKeys->insert(GST_TAG_ALBUM, QMediaMetaData::AlbumTitle);
|
metadataKeys->insert(GST_TAG_ALBUM, QMediaMetaData::AlbumTitle);
|
||||||
metadataKeys->insert(GST_TAG_ARTIST, QMediaMetaData::AlbumArtist);
|
#if (GST_VERSION_MAJOR >= 0) && (GST_VERSION_MINOR >= 10) && (GST_VERSION_MICRO >= 25)
|
||||||
metadataKeys->insert(GST_TAG_PERFORMER, QMediaMetaData::ContributingArtist);
|
metadataKeys->insert(GST_TAG_ALBUM_ARTIST, QMediaMetaData::AlbumArtist);
|
||||||
|
#endif
|
||||||
|
metadataKeys->insert(GST_TAG_ARTIST, QMediaMetaData::ContributingArtist);
|
||||||
#if (GST_VERSION_MAJOR >= 0) && (GST_VERSION_MINOR >= 10) && (GST_VERSION_MICRO >= 19)
|
#if (GST_VERSION_MAJOR >= 0) && (GST_VERSION_MINOR >= 10) && (GST_VERSION_MICRO >= 19)
|
||||||
metadataKeys->insert(GST_TAG_COMPOSER, QMediaMetaData::Composer);
|
metadataKeys->insert(GST_TAG_COMPOSER, QMediaMetaData::Composer);
|
||||||
#endif
|
#endif
|
||||||
@@ -164,6 +166,11 @@ void QGstreamerMetaDataProvider::updateTags()
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (oldTags.isEmpty() != m_tags.isEmpty()) {
|
||||||
|
emit metaDataAvailableChanged(isMetaDataAvailable());
|
||||||
|
changed = true;
|
||||||
|
}
|
||||||
|
|
||||||
if (changed)
|
if (changed)
|
||||||
emit metaDataChanged();
|
emit metaDataChanged();
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -60,7 +60,6 @@ QGstreamerPlayerControl::QGstreamerPlayerControl(QGstreamerPlayerSession *sessio
|
|||||||
, m_currentState(QMediaPlayer::StoppedState)
|
, m_currentState(QMediaPlayer::StoppedState)
|
||||||
, m_mediaStatus(QMediaPlayer::NoMedia)
|
, m_mediaStatus(QMediaPlayer::NoMedia)
|
||||||
, m_bufferProgress(-1)
|
, m_bufferProgress(-1)
|
||||||
, m_seekToStartPending(false)
|
|
||||||
, m_pendingSeekPosition(-1)
|
, m_pendingSeekPosition(-1)
|
||||||
, m_setMediaPending(false)
|
, m_setMediaPending(false)
|
||||||
, m_stream(0)
|
, m_stream(0)
|
||||||
@@ -69,7 +68,7 @@ QGstreamerPlayerControl::QGstreamerPlayerControl(QGstreamerPlayerSession *sessio
|
|||||||
Q_ASSERT(m_resources);
|
Q_ASSERT(m_resources);
|
||||||
|
|
||||||
connect(m_session, SIGNAL(positionChanged(qint64)),
|
connect(m_session, SIGNAL(positionChanged(qint64)),
|
||||||
this, SLOT(updatePosition(qint64)));
|
this, SIGNAL(positionChanged(qint64)));
|
||||||
connect(m_session, SIGNAL(durationChanged(qint64)),
|
connect(m_session, SIGNAL(durationChanged(qint64)),
|
||||||
this, SIGNAL(durationChanged(qint64)));
|
this, SIGNAL(durationChanged(qint64)));
|
||||||
connect(m_session, SIGNAL(mutedStateChanged(bool)),
|
connect(m_session, SIGNAL(mutedStateChanged(bool)),
|
||||||
@@ -94,8 +93,6 @@ QGstreamerPlayerControl::QGstreamerPlayerControl(QGstreamerPlayerSession *sessio
|
|||||||
this, SLOT(handleInvalidMedia()));
|
this, SLOT(handleInvalidMedia()));
|
||||||
connect(m_session, SIGNAL(playbackRateChanged(qreal)),
|
connect(m_session, SIGNAL(playbackRateChanged(qreal)),
|
||||||
this, SIGNAL(playbackRateChanged(qreal)));
|
this, SIGNAL(playbackRateChanged(qreal)));
|
||||||
connect(m_session, SIGNAL(seekableChanged(bool)),
|
|
||||||
this, SLOT(applyPendingSeek(bool)));
|
|
||||||
|
|
||||||
connect(m_resources, SIGNAL(resourcesGranted()), SLOT(handleResourcesGranted()));
|
connect(m_resources, SIGNAL(resourcesGranted()), SLOT(handleResourcesGranted()));
|
||||||
//denied signal should be queued to have correct state update process,
|
//denied signal should be queued to have correct state update process,
|
||||||
@@ -117,7 +114,7 @@ QMediaPlayerResourceSetInterface* QGstreamerPlayerControl::resources() const
|
|||||||
|
|
||||||
qint64 QGstreamerPlayerControl::position() const
|
qint64 QGstreamerPlayerControl::position() const
|
||||||
{
|
{
|
||||||
return m_seekToStartPending ? 0 : m_session->position();
|
return m_pendingSeekPosition != -1 ? m_pendingSeekPosition : m_session->position();
|
||||||
}
|
}
|
||||||
|
|
||||||
qint64 QGstreamerPlayerControl::duration() const
|
qint64 QGstreamerPlayerControl::duration() const
|
||||||
@@ -183,15 +180,21 @@ void QGstreamerPlayerControl::setPosition(qint64 pos)
|
|||||||
|
|
||||||
if (m_mediaStatus == QMediaPlayer::EndOfMedia) {
|
if (m_mediaStatus == QMediaPlayer::EndOfMedia) {
|
||||||
m_mediaStatus = QMediaPlayer::LoadedMedia;
|
m_mediaStatus = QMediaPlayer::LoadedMedia;
|
||||||
m_seekToStartPending = true;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (m_session->isSeekable() && m_session->seek(pos)) {
|
if (m_currentState == QMediaPlayer::StoppedState) {
|
||||||
m_seekToStartPending = false;
|
|
||||||
} else {
|
|
||||||
m_pendingSeekPosition = pos;
|
m_pendingSeekPosition = pos;
|
||||||
//don't display the first video frame since it's not what user requested.
|
emit positionChanged(m_pendingSeekPosition);
|
||||||
m_session->showPrerollFrames(false);
|
} else if (m_session->isSeekable()) {
|
||||||
|
m_session->showPrerollFrames(true);
|
||||||
|
m_session->seek(pos);
|
||||||
|
m_pendingSeekPosition = -1;
|
||||||
|
} else if (m_session->state() == QMediaPlayer::StoppedState) {
|
||||||
|
m_pendingSeekPosition = pos;
|
||||||
|
emit positionChanged(m_pendingSeekPosition);
|
||||||
|
} else if (m_pendingSeekPosition != -1) {
|
||||||
|
m_pendingSeekPosition = -1;
|
||||||
|
emit positionChanged(m_pendingSeekPosition);
|
||||||
}
|
}
|
||||||
|
|
||||||
popAndNotifyState();
|
popAndNotifyState();
|
||||||
@@ -239,26 +242,30 @@ void QGstreamerPlayerControl::playOrPause(QMediaPlayer::State newState)
|
|||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
if (m_mediaStatus == QMediaPlayer::EndOfMedia && m_pendingSeekPosition == -1) {
|
||||||
|
m_pendingSeekPosition = 0;
|
||||||
|
}
|
||||||
|
|
||||||
if (!m_resources->isGranted())
|
if (!m_resources->isGranted())
|
||||||
m_resources->acquire();
|
m_resources->acquire();
|
||||||
|
|
||||||
if (m_resources->isGranted()) {
|
if (m_resources->isGranted()) {
|
||||||
if (m_seekToStartPending) {
|
// show prerolled frame if switching from stopped state
|
||||||
|
if (m_pendingSeekPosition == -1) {
|
||||||
|
m_session->showPrerollFrames(true);
|
||||||
|
} else if (m_session->state() == QMediaPlayer::StoppedState) {
|
||||||
|
// Don't evaluate the next two conditions.
|
||||||
|
} else if (m_session->isSeekable()) {
|
||||||
m_session->pause();
|
m_session->pause();
|
||||||
if (!m_session->seek(0)) {
|
m_session->showPrerollFrames(true);
|
||||||
m_bufferProgress = -1;
|
m_session->seek(m_pendingSeekPosition);
|
||||||
m_session->stop();
|
m_pendingSeekPosition = -1;
|
||||||
m_mediaStatus = QMediaPlayer::LoadingMedia;
|
} else {
|
||||||
}
|
m_pendingSeekPosition = -1;
|
||||||
m_seekToStartPending = false;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
bool ok = false;
|
bool ok = false;
|
||||||
|
|
||||||
// show prerolled frame if switching from stopped state
|
|
||||||
if (newState != QMediaPlayer::StoppedState && m_currentState == QMediaPlayer::StoppedState && m_pendingSeekPosition == -1)
|
|
||||||
m_session->showPrerollFrames(true);
|
|
||||||
|
|
||||||
//To prevent displaying the first video frame when playback is resumed
|
//To prevent displaying the first video frame when playback is resumed
|
||||||
//the pipeline is paused instead of playing, seeked to requested position,
|
//the pipeline is paused instead of playing, seeked to requested position,
|
||||||
//and after seeking is finished (position updated) playback is restarted
|
//and after seeking is finished (position updated) playback is restarted
|
||||||
@@ -305,7 +312,7 @@ void QGstreamerPlayerControl::stop()
|
|||||||
m_session->pause();
|
m_session->pause();
|
||||||
|
|
||||||
if (m_mediaStatus != QMediaPlayer::EndOfMedia) {
|
if (m_mediaStatus != QMediaPlayer::EndOfMedia) {
|
||||||
m_seekToStartPending = true;
|
m_pendingSeekPosition = 0;
|
||||||
emit positionChanged(position());
|
emit positionChanged(position());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -343,7 +350,7 @@ void QGstreamerPlayerControl::setMedia(const QMediaContent &content, QIODevice *
|
|||||||
|
|
||||||
m_currentState = QMediaPlayer::StoppedState;
|
m_currentState = QMediaPlayer::StoppedState;
|
||||||
QMediaContent oldMedia = m_currentResource;
|
QMediaContent oldMedia = m_currentResource;
|
||||||
m_pendingSeekPosition = -1;
|
m_pendingSeekPosition = 0;
|
||||||
m_session->showPrerollFrames(false); // do not show prerolled frames until pause() or play() explicitly called
|
m_session->showPrerollFrames(false); // do not show prerolled frames until pause() or play() explicitly called
|
||||||
m_setMediaPending = false;
|
m_setMediaPending = false;
|
||||||
|
|
||||||
@@ -390,7 +397,6 @@ void QGstreamerPlayerControl::setMedia(const QMediaContent &content, QIODevice *
|
|||||||
|
|
||||||
m_currentResource = content;
|
m_currentResource = content;
|
||||||
m_stream = stream;
|
m_stream = stream;
|
||||||
m_seekToStartPending = false;
|
|
||||||
|
|
||||||
QNetworkRequest request;
|
QNetworkRequest request;
|
||||||
|
|
||||||
@@ -462,8 +468,21 @@ void QGstreamerPlayerControl::updateSessionState(QMediaPlayer::State state)
|
|||||||
{
|
{
|
||||||
pushState();
|
pushState();
|
||||||
|
|
||||||
if (state == QMediaPlayer::StoppedState)
|
if (state == QMediaPlayer::StoppedState) {
|
||||||
|
m_session->showPrerollFrames(false);
|
||||||
m_currentState = QMediaPlayer::StoppedState;
|
m_currentState = QMediaPlayer::StoppedState;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (state == QMediaPlayer::PausedState && m_currentState != QMediaPlayer::StoppedState) {
|
||||||
|
if (m_pendingSeekPosition != -1 && m_session->isSeekable()) {
|
||||||
|
m_session->showPrerollFrames(true);
|
||||||
|
m_session->seek(m_pendingSeekPosition);
|
||||||
|
}
|
||||||
|
m_pendingSeekPosition = -1;
|
||||||
|
|
||||||
|
if (m_currentState == QMediaPlayer::PlayingState)
|
||||||
|
m_session->play();
|
||||||
|
}
|
||||||
|
|
||||||
updateMediaStatus();
|
updateMediaStatus();
|
||||||
|
|
||||||
@@ -512,7 +531,6 @@ void QGstreamerPlayerControl::processEOS()
|
|||||||
m_mediaStatus = QMediaPlayer::EndOfMedia;
|
m_mediaStatus = QMediaPlayer::EndOfMedia;
|
||||||
emit positionChanged(position());
|
emit positionChanged(position());
|
||||||
m_session->endOfMediaReset();
|
m_session->endOfMediaReset();
|
||||||
m_setMediaPending = true;
|
|
||||||
|
|
||||||
if (m_currentState != QMediaPlayer::StoppedState) {
|
if (m_currentState != QMediaPlayer::StoppedState) {
|
||||||
m_currentState = QMediaPlayer::StoppedState;
|
m_currentState = QMediaPlayer::StoppedState;
|
||||||
@@ -549,17 +567,12 @@ void QGstreamerPlayerControl::setBufferProgress(int progress)
|
|||||||
emit bufferStatusChanged(m_bufferProgress);
|
emit bufferStatusChanged(m_bufferProgress);
|
||||||
}
|
}
|
||||||
|
|
||||||
void QGstreamerPlayerControl::applyPendingSeek(bool isSeekable)
|
|
||||||
{
|
|
||||||
if (isSeekable && m_pendingSeekPosition != -1)
|
|
||||||
setPosition(m_pendingSeekPosition);
|
|
||||||
}
|
|
||||||
|
|
||||||
void QGstreamerPlayerControl::handleInvalidMedia()
|
void QGstreamerPlayerControl::handleInvalidMedia()
|
||||||
{
|
{
|
||||||
pushState();
|
pushState();
|
||||||
m_mediaStatus = QMediaPlayer::InvalidMedia;
|
m_mediaStatus = QMediaPlayer::InvalidMedia;
|
||||||
m_currentState = QMediaPlayer::StoppedState;
|
m_currentState = QMediaPlayer::StoppedState;
|
||||||
|
m_setMediaPending = true;
|
||||||
popAndNotifyState();
|
popAndNotifyState();
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -636,24 +649,4 @@ void QGstreamerPlayerControl::popAndNotifyState()
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void QGstreamerPlayerControl::updatePosition(qint64 pos)
|
|
||||||
{
|
|
||||||
#ifdef DEBUG_PLAYBIN
|
|
||||||
qDebug() << Q_FUNC_INFO << pos/1000.0 << "pending:" << m_pendingSeekPosition/1000.0;
|
|
||||||
#endif
|
|
||||||
|
|
||||||
if (m_pendingSeekPosition != -1) {
|
|
||||||
//seek request is complete, it's safe to resume playback
|
|
||||||
//with prerolled frame displayed
|
|
||||||
m_pendingSeekPosition = -1;
|
|
||||||
if (m_currentState != QMediaPlayer::StoppedState)
|
|
||||||
m_session->showPrerollFrames(true);
|
|
||||||
if (m_currentState == QMediaPlayer::PlayingState) {
|
|
||||||
m_session->play();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
emit positionChanged(pos);
|
|
||||||
}
|
|
||||||
|
|
||||||
QT_END_NAMESPACE
|
QT_END_NAMESPACE
|
||||||
|
|||||||