diff --git a/examples/multimedia/video/qmlvideofilter_opencl/README b/examples/multimedia/video/qmlvideofilter_opencl/README index 96b812b8..cc351e43 100644 --- a/examples/multimedia/video/qmlvideofilter_opencl/README +++ b/examples/multimedia/video/qmlvideofilter_opencl/README @@ -3,14 +3,11 @@ which is assumed to be provided in RGB format. The OpenCL operation is done on an OpenGL texture using CL-GL interop, without any further readbacks or copies (except for the initial texture upload, when necessary). -Currently OS X, Windows with real OpenGL (opengl32.dll) and Linux (GLX only) are +Currently OS X, Windows with real OpenGL (opengl32.dll) and Linux (GLX) are supported. Note that an OpenCL implementation with GPU support is required. The platform and device selection logic supports NVIDIA, AMD and Intel. Porting to other platforms is probably simple, see clCreateContextFromType. -On Windows you may need to edit testplugin.pro to specify the location of the -OpenCL headers and libraries. - YUV formats are not supported in this example. This is probably not an issue an OS X and Windows, but will most likely disable the example on Linux. diff --git a/examples/multimedia/video/qmlvideofilter_opencl/main.cpp b/examples/multimedia/video/qmlvideofilter_opencl/main.cpp index af5aa8f6..0ca5e35e 100644 --- a/examples/multimedia/video/qmlvideofilter_opencl/main.cpp +++ b/examples/multimedia/video/qmlvideofilter_opencl/main.cpp @@ -181,8 +181,6 @@ CLFilterRunnable::CLFilterRunnable(CLFilter *filter) : CL_WGL_HDC_KHR, (cl_context_properties) wglGetCurrentDC(), 0 }; #elif defined(Q_OS_LINUX) - // An elegant alternative to glXGetCurrentContext. This will even survive - // (without interop) when using something other than GLX. QVariant nativeGLXHandle = QOpenGLContext::currentContext()->nativeHandle(); QGLXNativeContext nativeGLXContext; if (!nativeGLXHandle.isNull() && nativeGLXHandle.canConvert()) @@ -191,6 +189,7 @@ CLFilterRunnable::CLFilterRunnable(CLFilter *filter) : qWarning("Failed to get the underlying GLX context from the current QOpenGLContext"); cl_context_properties contextProps[] = { CL_CONTEXT_PLATFORM, (cl_context_properties) platform, CL_GL_CONTEXT_KHR, (cl_context_properties) nativeGLXContext.context(), + CL_GLX_DISPLAY_KHR, (cl_context_properties) glXGetCurrentDisplay(), 0 }; #endif @@ -203,16 +202,22 @@ CLFilterRunnable::CLFilterRunnable(CLFilter *filter) : // Get the GPU device id #if defined(Q_OS_OSX) // On OS X, get the "online" device/GPU. This is required for OpenCL/OpenGL context sharing. - if (clGetGLContextInfoAPPLE(m_clContext, CGLGetCurrentContext(), - CL_CGL_DEVICE_FOR_CURRENT_VIRTUAL_SCREEN_APPLE, - sizeof(cl_device_id), &m_clDeviceId, NULL) != CL_SUCCESS) { + err = clGetGLContextInfoAPPLE(m_clContext, CGLGetCurrentContext(), + CL_CGL_DEVICE_FOR_CURRENT_VIRTUAL_SCREEN_APPLE, + sizeof(cl_device_id), &m_clDeviceId, 0); + if (err != CL_SUCCESS) { qWarning("Failed to get OpenCL device for current screen: %d", err); return; } #else - if (clGetDeviceIDs(platform, CL_DEVICE_TYPE_GPU, 1, &m_clDeviceId, 0) != CL_SUCCESS) { - qWarning("Failed to get OpenCL device"); - return; + clGetGLContextInfoKHR_fn getGLContextInfo = (clGetGLContextInfoKHR_fn) clGetExtensionFunctionAddress("clGetGLContextInfoKHR"); + if (!getGLContextInfo || getGLContextInfo(contextProps, CL_CURRENT_DEVICE_FOR_GL_CONTEXT_KHR, + sizeof(cl_device_id), &m_clDeviceId, 0) != CL_SUCCESS) { + err = clGetDeviceIDs(platform, CL_DEVICE_TYPE_GPU, 1, &m_clDeviceId, 0); + if (err != CL_SUCCESS) { + qWarning("Failed to get OpenCL device: %d", err); + return; + } } #endif @@ -291,15 +296,9 @@ QVideoFrame CLFilterRunnable::run(QVideoFrame *input, const QVideoSurfaceFormat Q_UNUSED(flags); // This example supports RGB data only, either in system memory (typical with cameras on all - // platforms) or as an OpenGL texture (e.g. video playback on OS X or on Windows with ANGLE). + // platforms) or as an OpenGL texture (e.g. video playback on OS X). // The latter is the fast path where everything happens on GPU. THe former involves a texture upload. - // ANGLE is not compatible with this example since we only do CL-GL interop, not D3D9/11. - if (QOpenGLContext::openGLModuleType() == QOpenGLContext::LibGLES) { - qWarning("ANGLE is not supported"); - return *input; - } - if (!input->isValid() || (input->handleType() != QAbstractVideoBuffer::NoHandle && input->handleType() != QAbstractVideoBuffer::GLTextureHandle)) { @@ -483,6 +482,9 @@ QVideoFrame InfoFilterRunnable::run(QVideoFrame *input, const QVideoSurfaceForma int main(int argc, char **argv) { +#ifdef Q_OS_WIN // avoid ANGLE on Windows + QCoreApplication::setAttribute(Qt::AA_UseDesktopOpenGL); +#endif QGuiApplication app(argc, argv); qmlRegisterType("qmlvideofilter.cl.test", 1, 0, "CLFilter"); diff --git a/examples/multimedia/video/qmlvideofilter_opencl/qmlvideofilter_opencl.pro b/examples/multimedia/video/qmlvideofilter_opencl/qmlvideofilter_opencl.pro index c83929f7..f7f191d2 100644 --- a/examples/multimedia/video/qmlvideofilter_opencl/qmlvideofilter_opencl.pro +++ b/examples/multimedia/video/qmlvideofilter_opencl/qmlvideofilter_opencl.pro @@ -14,9 +14,4 @@ INSTALLS += target osx: LIBS += -framework OpenCL unix: !osx: LIBS += -lOpenCL -win32:!winrt { - # Edit these as necessary - INCLUDEPATH += c:/cuda/include - LIBPATH += c:/cuda/lib/x64 - LIBS += -lopengl32 -lOpenCL -} +win32:!winrt: LIBS += -lopengl32 -lOpenCL diff --git a/src/gsttools/qgstreameraudioinputselector.cpp b/src/gsttools/qgstreameraudioinputselector.cpp index df1b9574..52587e07 100644 --- a/src/gsttools/qgstreameraudioinputselector.cpp +++ b/src/gsttools/qgstreameraudioinputselector.cpp @@ -111,36 +111,33 @@ void QGstreamerAudioInputSelector::updateAlsaDevices() { #ifdef HAVE_ALSA void **hints, **n; - int card = -1; + if (snd_device_name_hint(-1, "pcm", &hints) < 0) { + qWarning()<<"no alsa devices available"; + return; + } + n = hints; - while (snd_card_next(&card) == 0 && card >= 0) { - if (snd_device_name_hint(card, "pcm", &hints) < 0) - continue; + while (*n != NULL) { + char *name = snd_device_name_get_hint(*n, "NAME"); + char *descr = snd_device_name_get_hint(*n, "DESC"); + char *io = snd_device_name_get_hint(*n, "IOID"); - n = hints; - while (*n != NULL) { - char *name = snd_device_name_get_hint(*n, "NAME"); - char *descr = snd_device_name_get_hint(*n, "DESC"); - char *io = snd_device_name_get_hint(*n, "IOID"); - - if ((name != NULL) && (descr != NULL)) { - if ( io == NULL || qstrcmp(io,"Input") == 0 ) { - m_names.append(QLatin1String("alsa:")+QString::fromUtf8(name)); - m_descriptions.append(QString::fromUtf8(descr)); - } + if ((name != NULL) && (descr != NULL)) { + if ( io == NULL || qstrcmp(io,"Input") == 0 ) { + m_names.append(QLatin1String("alsa:")+QString::fromUtf8(name)); + m_descriptions.append(QString::fromUtf8(descr)); } - - if (name != NULL) - free(name); - if (descr != NULL) - free(descr); - if (io != NULL) - free(io); - ++n; } - snd_device_name_free_hint(hints); + if (name != NULL) + free(name); + if (descr != NULL) + free(descr); + if (io != NULL) + free(io); + n++; } + snd_device_name_free_hint(hints); #endif } diff --git a/src/gsttools/qgstutils.cpp b/src/gsttools/qgstutils.cpp index 55a0be1e..61ba09e7 100644 --- a/src/gsttools/qgstutils.cpp +++ b/src/gsttools/qgstutils.cpp @@ -42,6 +42,7 @@ #include #include #include +#include #include #include @@ -510,6 +511,10 @@ Q_GLOBAL_STATIC(FactoryCameraInfoMap, qt_camera_device_info); QVector QGstUtils::enumerateCameras(GstElementFactory *factory) { + static QElapsedTimer camerasCacheAgeTimer; + if (camerasCacheAgeTimer.isValid() && camerasCacheAgeTimer.elapsed() > 500) // ms + qt_camera_device_info()->clear(); + FactoryCameraInfoMap::const_iterator it = qt_camera_device_info()->constFind(factory); if (it != qt_camera_device_info()->constEnd()) return *it; @@ -568,6 +573,7 @@ QVector QGstUtils::enumerateCameras(GstElementFactory *fa } if (!devices.isEmpty() || !hasVideoSource) { + camerasCacheAgeTimer.restart(); return devices; } } @@ -626,6 +632,7 @@ QVector QGstUtils::enumerateCameras(GstElementFactory *fa } qt_safe_close(fd); } + camerasCacheAgeTimer.restart(); #endif // USE_V4L return devices; diff --git a/src/multimedia/audio/qaudiobuffer.cpp b/src/multimedia/audio/qaudiobuffer.cpp index 8b4597b5..87517b5e 100644 --- a/src/multimedia/audio/qaudiobuffer.cpp +++ b/src/multimedia/audio/qaudiobuffer.cpp @@ -264,6 +264,8 @@ QAudioBuffer::QAudioBuffer(int numFrames, const QAudioFormat &format, qint64 sta QAudioBuffer &QAudioBuffer::operator =(const QAudioBuffer &other) { if (this->d != other.d) { + if (d) + d->deref(); d = QAudioBufferPrivate::acquire(other.d); } return *this; diff --git a/src/multimediawidgets/qpaintervideosurface.cpp b/src/multimediawidgets/qpaintervideosurface.cpp index ea6e75e2..2dfbf341 100644 --- a/src/multimediawidgets/qpaintervideosurface.cpp +++ b/src/multimediawidgets/qpaintervideosurface.cpp @@ -91,15 +91,16 @@ QVideoSurfaceGenericPainter::QVideoSurfaceGenericPainter() : m_imageFormat(QImage::Format_Invalid) , m_scanLineDirection(QVideoSurfaceFormat::TopToBottom) { - m_imagePixelFormats - << QVideoFrame::Format_RGB32 - << QVideoFrame::Format_ARGB32 - << QVideoFrame::Format_RGB565; + m_imagePixelFormats << QVideoFrame::Format_RGB32; + // The raster formats should be a subset of the GL formats. #ifndef QT_NO_OPENGL if (QOpenGLContext::openGLModuleType() != QOpenGLContext::LibGLES) #endif m_imagePixelFormats << QVideoFrame::Format_RGB24; + + m_imagePixelFormats << QVideoFrame::Format_ARGB32 + << QVideoFrame::Format_RGB565; } QList QVideoSurfaceGenericPainter::supportedPixelFormats( @@ -1055,7 +1056,13 @@ QVideoSurfaceGlslPainter::QVideoSurfaceGlslPainter(QGLContext *context) m_imagePixelFormats << QVideoFrame::Format_RGB32 << QVideoFrame::Format_BGR32 - << QVideoFrame::Format_ARGB32 + << QVideoFrame::Format_ARGB32; + if (!context->contextHandle()->isOpenGLES()) { + m_imagePixelFormats + << QVideoFrame::Format_RGB24 + << QVideoFrame::Format_BGR24; + } + m_imagePixelFormats << QVideoFrame::Format_RGB565 << QVideoFrame::Format_YUV444 << QVideoFrame::Format_AYUV444 @@ -1064,11 +1071,6 @@ QVideoSurfaceGlslPainter::QVideoSurfaceGlslPainter(QGLContext *context) m_glPixelFormats << QVideoFrame::Format_RGB32 << QVideoFrame::Format_ARGB32; - if (!context->contextHandle()->isOpenGLES()) { - m_imagePixelFormats - << QVideoFrame::Format_RGB24 - << QVideoFrame::Format_BGR24; - } } QAbstractVideoSurface::Error QVideoSurfaceGlslPainter::start(const QVideoSurfaceFormat &format) diff --git a/src/plugins/alsa/qalsaaudiodeviceinfo.cpp b/src/plugins/alsa/qalsaaudiodeviceinfo.cpp index 717b6471..be90ca66 100644 --- a/src/plugins/alsa/qalsaaudiodeviceinfo.cpp +++ b/src/plugins/alsa/qalsaaudiodeviceinfo.cpp @@ -337,9 +337,14 @@ QList QAlsaAudioDeviceInfo::availableDevices(QAudio::Mode mode) #if(SND_LIB_MAJOR == 1 && SND_LIB_MINOR == 0 && SND_LIB_SUBMINOR >= 14) // Create a list of all current audio devices that support mode - void **hints; + void **hints, **n; char *name, *descr, *io; - int card = -1; + + if(snd_device_name_hint(-1, "pcm", &hints) < 0) { + qWarning() << "no alsa devices available"; + return devices; + } + n = hints; if(mode == QAudio::AudioInput) { filter = "Input"; @@ -347,35 +352,28 @@ QList QAlsaAudioDeviceInfo::availableDevices(QAudio::Mode mode) filter = "Output"; } - while (snd_card_next(&card) == 0 && card >= 0) { - if (snd_device_name_hint(card, "pcm", &hints) < 0) - continue; + while (*n != NULL) { + name = snd_device_name_get_hint(*n, "NAME"); + if (name != 0 && qstrcmp(name, "null") != 0) { + descr = snd_device_name_get_hint(*n, "DESC"); + io = snd_device_name_get_hint(*n, "IOID"); - void **n = hints; - while (*n != NULL) { - name = snd_device_name_get_hint(*n, "NAME"); - if (name != 0 && qstrcmp(name, "null") != 0) { - descr = snd_device_name_get_hint(*n, "DESC"); - io = snd_device_name_get_hint(*n, "IOID"); - - if ((descr != NULL) && ((io == NULL) || (io == filter))) { - QString deviceName = QLatin1String(name); - QString deviceDescription = QLatin1String(descr); - if (deviceDescription.contains(QLatin1String("Default Audio Device"))) - devices.prepend(deviceName.toLocal8Bit().constData()); - else - devices.append(deviceName.toLocal8Bit().constData()); - } - - free(descr); - free(io); + if ((descr != NULL) && ((io == NULL) || (io == filter))) { + QString deviceName = QLatin1String(name); + QString deviceDescription = QLatin1String(descr); + if (deviceDescription.contains(QLatin1String("Default Audio Device"))) + devices.prepend(deviceName.toLocal8Bit().constData()); + else + devices.append(deviceName.toLocal8Bit().constData()); } - free(name); - ++n; - } - snd_device_name_free_hint(hints); + free(descr); + free(io); + } + free(name); + ++n; } + snd_device_name_free_hint(hints); #else int idx = 0; char* name; @@ -416,41 +414,38 @@ void QAlsaAudioDeviceInfo::checkSurround() surround51 = false; surround71 = false; - void **hints; + void **hints, **n; char *name, *descr, *io; - int card = -1; - while (snd_card_next(&card) == 0 && card >= 0) { - if (snd_device_name_hint(card, "pcm", &hints) < 0) - continue; + if(snd_device_name_hint(-1, "pcm", &hints) < 0) + return; - void **n = hints; - while (*n != NULL) { - name = snd_device_name_get_hint(*n, "NAME"); - descr = snd_device_name_get_hint(*n, "DESC"); - io = snd_device_name_get_hint(*n, "IOID"); - if((name != NULL) && (descr != NULL)) { - QString deviceName = QLatin1String(name); - if (mode == QAudio::AudioOutput) { - if(deviceName.contains(QLatin1String("surround40"))) - surround40 = true; - if(deviceName.contains(QLatin1String("surround51"))) - surround51 = true; - if(deviceName.contains(QLatin1String("surround71"))) - surround71 = true; - } + n = hints; + + while (*n != NULL) { + name = snd_device_name_get_hint(*n, "NAME"); + descr = snd_device_name_get_hint(*n, "DESC"); + io = snd_device_name_get_hint(*n, "IOID"); + if((name != NULL) && (descr != NULL)) { + QString deviceName = QLatin1String(name); + if (mode == QAudio::AudioOutput) { + if(deviceName.contains(QLatin1String("surround40"))) + surround40 = true; + if(deviceName.contains(QLatin1String("surround51"))) + surround51 = true; + if(deviceName.contains(QLatin1String("surround71"))) + surround71 = true; } - if(name != NULL) - free(name); - if(descr != NULL) - free(descr); - if(io != NULL) - free(io); - ++n; } - - snd_device_name_free_hint(hints); + if(name != NULL) + free(name); + if(descr != NULL) + free(descr); + if(io != NULL) + free(io); + ++n; } + snd_device_name_free_hint(hints); } QT_END_NAMESPACE diff --git a/src/plugins/android/src/mediaplayer/qandroidmediaplayercontrol.cpp b/src/plugins/android/src/mediaplayer/qandroidmediaplayercontrol.cpp index 14a26c0b..c65dec44 100644 --- a/src/plugins/android/src/mediaplayer/qandroidmediaplayercontrol.cpp +++ b/src/plugins/android/src/mediaplayer/qandroidmediaplayercontrol.cpp @@ -307,7 +307,7 @@ void QAndroidMediaPlayerControl::setMedia(const QMediaContent &mediaContent, { StateChangeNotifier notifier(this); - mReloadingMedia = (mMediaContent == mediaContent); + mReloadingMedia = (mMediaContent == mediaContent) && !mPendingSetMedia; if (!mReloadingMedia) { mMediaContent = mediaContent; @@ -712,8 +712,8 @@ void QAndroidMediaPlayerControl::resetBufferingProgress() void QAndroidMediaPlayerControl::flushPendingStates() { if (mPendingSetMedia) { - mPendingSetMedia = false; setMedia(mMediaContent, 0); + mPendingSetMedia = false; return; } diff --git a/src/plugins/avfoundation/camera/avfcamerasession.h b/src/plugins/avfoundation/camera/avfcamerasession.h index 4f418cb1..9fd0b182 100644 --- a/src/plugins/avfoundation/camera/avfcamerasession.h +++ b/src/plugins/avfoundation/camera/avfcamerasession.h @@ -80,6 +80,7 @@ public: void addProbe(AVFMediaVideoProbeControl *probe); void removeProbe(AVFMediaVideoProbeControl *probe); + FourCharCode defaultCodec(); public Q_SLOTS: void setState(QCamera::State state); @@ -119,6 +120,7 @@ private: QSet m_videoProbes; QMutex m_videoProbesMutex; + FourCharCode m_defaultCodec; }; QT_END_NAMESPACE diff --git a/src/plugins/avfoundation/camera/avfcamerasession.mm b/src/plugins/avfoundation/camera/avfcamerasession.mm index 98fbb986..af30dd31 100644 --- a/src/plugins/avfoundation/camera/avfcamerasession.mm +++ b/src/plugins/avfoundation/camera/avfcamerasession.mm @@ -41,6 +41,7 @@ #include "avfmediavideoprobecontrol.h" #include "avfcameraviewfindersettingscontrol.h" #include "avfimageencodercontrol.h" +#include "avfcamerautility.h" #include #include @@ -143,6 +144,7 @@ AVFCameraSession::AVFCameraSession(AVFCameraService *service, QObject *parent) , m_active(false) , m_videoInput(nil) , m_audioInput(nil) + , m_defaultCodec(0) { m_captureSession = [[AVCaptureSession alloc] init]; m_observer = [[AVFCameraSessionObserver alloc] initWithCameraSession:this]; @@ -277,6 +279,8 @@ void AVFCameraSession::setState(QCamera::State newState) Q_EMIT readyToConfigureConnections(); [m_captureSession commitConfiguration]; [m_captureSession startRunning]; + m_defaultCodec = 0; + defaultCodec(); applyImageEncoderSettings(); applyViewfinderSettings(); } @@ -407,6 +411,25 @@ void AVFCameraSession::removeProbe(AVFMediaVideoProbeControl *probe) m_videoProbesMutex.unlock(); } +FourCharCode AVFCameraSession::defaultCodec() +{ + if (!m_defaultCodec) { +#if QT_MAC_PLATFORM_SDK_EQUAL_OR_ABOVE(__MAC_10_7, __IPHONE_7_0) + if (QSysInfo::MacintoshVersion >= qt_OS_limit(QSysInfo::MV_10_7, QSysInfo::MV_IOS_7_0)) { + if (AVCaptureDevice *device = videoCaptureDevice()) { + AVCaptureDeviceFormat *format = device.activeFormat; + if (!format || !format.formatDescription) + return m_defaultCodec; + m_defaultCodec = CMVideoFormatDescriptionGetCodecType(format.formatDescription); + } + } +#else + // TODO: extract media subtype. +#endif + } + return m_defaultCodec; +} + void AVFCameraSession::onCameraFrameFetched(const QVideoFrame &frame) { m_videoProbesMutex.lock(); diff --git a/src/plugins/avfoundation/camera/avfcamerautility.h b/src/plugins/avfoundation/camera/avfcamerautility.h index 1c13736e..03a61460 100644 --- a/src/plugins/avfoundation/camera/avfcamerautility.h +++ b/src/plugins/avfoundation/camera/avfcamerautility.h @@ -97,13 +97,17 @@ AVFRational qt_float_to_rational(qreal par, int limit); #if QT_MAC_PLATFORM_SDK_EQUAL_OR_ABOVE(__MAC_10_7, __IPHONE_7_0) -bool qt_is_video_range_subtype(AVCaptureDeviceFormat *format); +QVector qt_unique_device_formats(AVCaptureDevice *captureDevice, + FourCharCode preferredFormat); QSize qt_device_format_resolution(AVCaptureDeviceFormat *format); QSize qt_device_format_high_resolution(AVCaptureDeviceFormat *format); QSize qt_device_format_pixel_aspect_ratio(AVCaptureDeviceFormat *format); QVector qt_device_format_framerates(AVCaptureDeviceFormat *format); -AVCaptureDeviceFormat *qt_find_best_resolution_match(AVCaptureDevice *captureDevice, const QSize &res); -AVCaptureDeviceFormat *qt_find_best_framerate_match(AVCaptureDevice *captureDevice, Float64 fps); +AVCaptureDeviceFormat *qt_find_best_resolution_match(AVCaptureDevice *captureDevice, const QSize &res, + FourCharCode preferredFormat); +AVCaptureDeviceFormat *qt_find_best_framerate_match(AVCaptureDevice *captureDevice, + FourCharCode preferredFormat, + Float64 fps); AVFrameRateRange *qt_find_supported_framerate_range(AVCaptureDeviceFormat *format, Float64 fps); #endif diff --git a/src/plugins/avfoundation/camera/avfcamerautility.mm b/src/plugins/avfoundation/camera/avfcamerautility.mm index 554c25cb..f8d5647e 100644 --- a/src/plugins/avfoundation/camera/avfcamerautility.mm +++ b/src/plugins/avfoundation/camera/avfcamerautility.mm @@ -37,6 +37,7 @@ #include #include +#include #include #include @@ -107,19 +108,6 @@ AVFRational qt_float_to_rational(qreal par, int limit) #if QT_MAC_PLATFORM_SDK_EQUAL_OR_ABOVE(__MAC_10_7, __IPHONE_7_0) -bool qt_is_video_range_subtype(AVCaptureDeviceFormat *format) -{ - Q_ASSERT(format); -#ifdef Q_OS_IOS - // Use only 420f on iOS, not 420v. - const FourCharCode subType = CMFormatDescriptionGetMediaSubType(format.formatDescription); - return subType == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange; -#else - Q_UNUSED(format) -#endif - return false; -} - namespace { inline bool qt_area_sane(const QSize &size) @@ -128,40 +116,25 @@ inline bool qt_area_sane(const QSize &size) && std::numeric_limits::max() / size.width() >= size.height(); } -inline bool avf_format_compare(AVCaptureDeviceFormat *f1, AVCaptureDeviceFormat *f2) +struct ResolutionPredicate : std::binary_function { - Q_ASSERT(f1); - Q_ASSERT(f2); - const QSize r1(qt_device_format_resolution(f1)); - const QSize r2(qt_device_format_resolution(f2)); - return r1.width() > r2.width() && r1.height() > r2.height(); -} - -QVector qt_sort_device_formats(AVCaptureDevice *captureDevice) -{ - // Select only formats with framerate ranges + sort them by resoluions, - Q_ASSERT(captureDevice); - - QVectorsorted; - - NSArray *formats = captureDevice.formats; - if (!formats || !formats.count) - return sorted; - - sorted.reserve(formats.count); - for (AVCaptureDeviceFormat *format in formats) { - if (qt_is_video_range_subtype(format)) - continue; - if (format.videoSupportedFrameRateRanges && format.videoSupportedFrameRateRanges.count) { - const QSize resolution(qt_device_format_resolution(format)); - if (!resolution.isNull() && resolution.isValid()) - sorted << format; - } + bool operator() (AVCaptureDeviceFormat *f1, AVCaptureDeviceFormat *f2)const + { + Q_ASSERT(f1 && f2); + const QSize r1(qt_device_format_resolution(f1)); + const QSize r2(qt_device_format_resolution(f2)); + return r1.width() < r2.width() || (r2.width() == r1.width() && r1.height() < r2.height()); } +}; - std::sort(sorted.begin(), sorted.end(), avf_format_compare); - return sorted; -} +struct FormatHasNoFPSRange : std::unary_function +{ + bool operator() (AVCaptureDeviceFormat *format) + { + Q_ASSERT(format); + return !format.videoSupportedFrameRateRanges || !format.videoSupportedFrameRateRanges.count; + } +}; Float64 qt_find_min_framerate_distance(AVCaptureDeviceFormat *format, Float64 fps) { @@ -180,6 +153,50 @@ Float64 qt_find_min_framerate_distance(AVCaptureDeviceFormat *format, Float64 fp } // Unnamed namespace. +QVector qt_unique_device_formats(AVCaptureDevice *captureDevice, FourCharCode filter) +{ + // 'filter' is the format we prefer if we have duplicates. + Q_ASSERT(captureDevice); + + QVector formats; + + if (!captureDevice.formats || !captureDevice.formats.count) + return formats; + + formats.reserve(captureDevice.formats.count); + for (AVCaptureDeviceFormat *format in captureDevice.formats) { + const QSize resolution(qt_device_format_resolution(format)); + if (resolution.isNull() || !resolution.isValid()) + continue; + formats << format; + } + + if (!formats.size()) + return formats; + + std::sort(formats.begin(), formats.end(), ResolutionPredicate()); + + QSize size(qt_device_format_resolution(formats[0])); + FourCharCode codec = CMVideoFormatDescriptionGetCodecType(formats[0].formatDescription); + int last = 0; + for (int i = 1; i < formats.size(); ++i) { + const QSize nextSize(qt_device_format_resolution(formats[i])); + if (nextSize == size) { + if (codec == filter) + continue; + formats[last] = formats[i]; + } else { + ++last; + formats[last] = formats[i]; + size = nextSize; + } + codec = CMVideoFormatDescriptionGetCodecType(formats[i].formatDescription); + } + formats.resize(last + 1); + + return formats; +} + QSize qt_device_format_resolution(AVCaptureDeviceFormat *format) { Q_ASSERT(format); @@ -246,7 +263,9 @@ QSize qt_device_format_pixel_aspect_ratio(AVCaptureDeviceFormat *format) return QSize(asRatio.first, asRatio.second); } -AVCaptureDeviceFormat *qt_find_best_resolution_match(AVCaptureDevice *captureDevice, const QSize &request) +AVCaptureDeviceFormat *qt_find_best_resolution_match(AVCaptureDevice *captureDevice, + const QSize &request, + FourCharCode filter) { Q_ASSERT(captureDevice); Q_ASSERT(!request.isNull() && request.isValid()); @@ -254,9 +273,10 @@ AVCaptureDeviceFormat *qt_find_best_resolution_match(AVCaptureDevice *captureDev if (!captureDevice.formats || !captureDevice.formats.count) return 0; - for (AVCaptureDeviceFormat *format in captureDevice.formats) { - if (qt_is_video_range_subtype(format)) - continue; + QVector formats(qt_unique_device_formats(captureDevice, filter)); + + for (int i = 0; i < formats.size(); ++i) { + AVCaptureDeviceFormat *format = formats[i]; if (qt_device_format_resolution(format) == request) return format; // iOS only (still images). @@ -269,31 +289,30 @@ AVCaptureDeviceFormat *qt_find_best_resolution_match(AVCaptureDevice *captureDev typedef QPair FormatPair; - QVector formats; - formats.reserve(captureDevice.formats.count); + QVector pairs; // default|HR sizes + pairs.reserve(formats.size()); - for (AVCaptureDeviceFormat *format in captureDevice.formats) { - if (qt_is_video_range_subtype(format)) - continue; + for (int i = 0; i < formats.size(); ++i) { + AVCaptureDeviceFormat *format = formats[i]; const QSize res(qt_device_format_resolution(format)); if (!res.isNull() && res.isValid() && qt_area_sane(res)) - formats << FormatPair(res, format); + pairs << FormatPair(res, format); const QSize highRes(qt_device_format_high_resolution(format)); if (!highRes.isNull() && highRes.isValid() && qt_area_sane(highRes)) - formats << FormatPair(highRes, format); + pairs << FormatPair(highRes, format); } - if (!formats.size()) + if (!pairs.size()) return 0; - AVCaptureDeviceFormat *best = formats[0].second; - QSize next(formats[0].first); + AVCaptureDeviceFormat *best = pairs[0].second; + QSize next(pairs[0].first); int wDiff = qAbs(request.width() - next.width()); int hDiff = qAbs(request.height() - next.height()); const int area = request.width() * request.height(); int areaDiff = qAbs(area - next.width() * next.height()); - for (int i = 1; i < formats.size(); ++i) { - next = formats[i].first; + for (int i = 1; i < pairs.size(); ++i) { + next = pairs[i].first; const int newWDiff = qAbs(next.width() - request.width()); const int newHDiff = qAbs(next.height() - request.height()); const int newAreaDiff = qAbs(area - next.width() * next.height()); @@ -302,7 +321,7 @@ AVCaptureDeviceFormat *qt_find_best_resolution_match(AVCaptureDevice *captureDev || ((newWDiff <= wDiff || newHDiff <= hDiff) && newAreaDiff <= areaDiff)) { wDiff = newWDiff; hDiff = newHDiff; - best = formats[i].second; + best = pairs[i].second; areaDiff = newAreaDiff; } } @@ -310,15 +329,21 @@ AVCaptureDeviceFormat *qt_find_best_resolution_match(AVCaptureDevice *captureDev return best; } -AVCaptureDeviceFormat *qt_find_best_framerate_match(AVCaptureDevice *captureDevice, Float64 fps) +AVCaptureDeviceFormat *qt_find_best_framerate_match(AVCaptureDevice *captureDevice, + FourCharCode filter, + Float64 fps) { Q_ASSERT(captureDevice); Q_ASSERT(fps > 0.); const qreal epsilon = 0.1; - // Sort formats by their resolution. - const QVector sorted(qt_sort_device_formats(captureDevice)); + QVectorsorted(qt_unique_device_formats(captureDevice, filter)); + // Sort formats by their resolution in decreasing order: + std::sort(sorted.begin(), sorted.end(), std::not2(ResolutionPredicate())); + // We can use only formats with framerate ranges: + sorted.erase(std::remove_if(sorted.begin(), sorted.end(), FormatHasNoFPSRange()), sorted.end()); + if (!sorted.size()) return nil; diff --git a/src/plugins/avfoundation/camera/avfcameraviewfindersettingscontrol.mm b/src/plugins/avfoundation/camera/avfcameraviewfindersettingscontrol.mm index c5aa5733..250aae9c 100644 --- a/src/plugins/avfoundation/camera/avfcameraviewfindersettingscontrol.mm +++ b/src/plugins/avfoundation/camera/avfcameraviewfindersettingscontrol.mm @@ -279,9 +279,11 @@ QList AVFCameraViewfinderSettingsControl2::supportedV return supportedSettings; } - for (AVCaptureDeviceFormat *format in m_captureDevice.formats) { - if (qt_is_video_range_subtype(format)) - continue; + const QVector formats(qt_unique_device_formats(m_captureDevice, + m_session->defaultCodec())); + for (int i = 0; i < formats.size(); ++i) { + AVCaptureDeviceFormat *format = formats[i]; + const QSize res(qt_device_format_resolution(format)); if (res.isNull() || !res.isValid()) continue; @@ -435,12 +437,14 @@ AVCaptureDeviceFormat *AVFCameraViewfinderSettingsControl2::findBestFormatMatch( #if QT_MAC_PLATFORM_SDK_EQUAL_OR_ABOVE(__MAC_10_7, __IPHONE_7_0) if (QSysInfo::MacintoshVersion >= qt_OS_limit(QSysInfo::MV_10_7, QSysInfo::MV_IOS_7_0)) { Q_ASSERT(m_captureDevice); + Q_ASSERT(m_session); const QSize &resolution = settings.resolution(); if (!resolution.isNull() && resolution.isValid()) { // Either the exact match (including high resolution for images on iOS) // or a format with a resolution close to the requested one. - return qt_find_best_resolution_match(m_captureDevice, resolution); + return qt_find_best_resolution_match(m_captureDevice, resolution, + m_session->defaultCodec()); } // No resolution requested, what about framerates? @@ -453,7 +457,8 @@ AVCaptureDeviceFormat *AVFCameraViewfinderSettingsControl2::findBestFormatMatch( const qreal minFPS(settings.minimumFrameRate()); const qreal maxFPS(settings.maximumFrameRate()); if (minFPS || maxFPS) - return qt_find_best_framerate_match(m_captureDevice, maxFPS ? maxFPS : minFPS); + return qt_find_best_framerate_match(m_captureDevice, maxFPS ? maxFPS : minFPS, + m_session->defaultCodec()); // Ignore PAR for the moment (PAR without resolution can // pick a format with really bad resolution). // No need to test pixel format, just return settings. diff --git a/src/plugins/avfoundation/camera/avfimageencodercontrol.mm b/src/plugins/avfoundation/camera/avfimageencodercontrol.mm index ea25665e..36050c3a 100644 --- a/src/plugins/avfoundation/camera/avfimageencodercontrol.mm +++ b/src/plugins/avfoundation/camera/avfimageencodercontrol.mm @@ -48,20 +48,6 @@ QT_BEGIN_NAMESPACE -QSize qt_image_high_resolution(AVCaptureDeviceFormat *format) -{ - Q_ASSERT(format); - QSize res; -#if defined(Q_OS_IOS) && QT_IOS_PLATFORM_SDK_EQUAL_OR_ABOVE(__IPHONE_8_0) - if (QSysInfo::MacintoshVersion >= QSysInfo::MV_IOS_8_0) { - const CMVideoDimensions hrDim(format.highResolutionStillImageDimensions); - res.setWidth(hrDim.width); - res.setHeight(hrDim.height); - } -#endif - return res; -} - AVFImageEncoderControl::AVFImageEncoderControl(AVFCameraService *service) : m_service(service) { @@ -94,9 +80,11 @@ QList AVFImageEncoderControl::supportedResolutions(const QImageEncoderSet #if QT_MAC_PLATFORM_SDK_EQUAL_OR_ABOVE(__MAC_10_7, __IPHONE_7_0) if (QSysInfo::MacintoshVersion >= qt_OS_limit(QSysInfo::MV_10_7, QSysInfo::MV_IOS_7_0)) { AVCaptureDevice *captureDevice = m_service->session()->videoCaptureDevice(); - for (AVCaptureDeviceFormat *format in captureDevice.formats) { - if (qt_is_video_range_subtype(format)) - continue; + const QVector formats(qt_unique_device_formats(captureDevice, + m_service->session()->defaultCodec())); + + for (int i = 0; i < formats.size(); ++i) { + AVCaptureDeviceFormat *format = formats[i]; const QSize res(qt_device_format_resolution(format)); if (!res.isNull() && res.isValid()) @@ -108,7 +96,7 @@ QList AVFImageEncoderControl::supportedResolutions(const QImageEncoderSet // its source AVCaptureDevice instance’s activeFormat.formatDescription. However, // if you set this property to YES, the receiver emits still images at the capture // device’s highResolutionStillImageDimensions value. - const QSize hrRes(qt_image_high_resolution(format)); + const QSize hrRes(qt_device_format_high_resolution(format)); if (!hrRes.isNull() && hrRes.isValid()) resolutions << res; } @@ -152,7 +140,7 @@ QImageEncoderSettings AVFImageEncoderControl::imageSettings() const AVCaptureStillImageOutput *stillImageOutput = m_service->imageCaptureControl()->stillImageOutput(); if (stillImageOutput.highResolutionStillImageOutputEnabled) - res = qt_image_high_resolution(captureDevice.activeFormat); + res = qt_device_format_high_resolution(captureDevice.activeFormat); } #endif if (res.isNull() || !res.isValid()) { @@ -179,7 +167,6 @@ void AVFImageEncoderControl::setImageSettings(const QImageEncoderSettings &setti return; m_settings = settings; - applySettings(); } @@ -223,7 +210,8 @@ void AVFImageEncoderControl::applySettings() #if QT_MAC_PLATFORM_SDK_EQUAL_OR_ABOVE(__MAC_10_7, __IPHONE_7_0) if (QSysInfo::MacintoshVersion >= qt_OS_limit(QSysInfo::MV_10_7, QSysInfo::MV_IOS_7_0)) { AVCaptureDevice *captureDevice = m_service->session()->videoCaptureDevice(); - AVCaptureDeviceFormat *match = qt_find_best_resolution_match(captureDevice, res); + AVCaptureDeviceFormat *match = qt_find_best_resolution_match(captureDevice, res, + m_service->session()->defaultCodec()); if (!match) { qDebugCamera() << Q_FUNC_INFO << "unsupported resolution:" << res; @@ -242,7 +230,7 @@ void AVFImageEncoderControl::applySettings() #if defined(Q_OS_IOS) && QT_IOS_PLATFORM_SDK_EQUAL_OR_ABOVE(__IPHONE_8_0) if (QSysInfo::MacintoshVersion >= QSysInfo::MV_IOS_8_0) { AVCaptureStillImageOutput *imageOutput = m_service->imageCaptureControl()->stillImageOutput(); - if (res == qt_image_high_resolution(captureDevice.activeFormat)) + if (res == qt_device_format_high_resolution(captureDevice.activeFormat)) imageOutput.highResolutionStillImageOutputEnabled = YES; else imageOutput.highResolutionStillImageOutputEnabled = NO; diff --git a/src/plugins/plugins.pro b/src/plugins/plugins.pro index aa28bdd1..dc197918 100644 --- a/src/plugins/plugins.pro +++ b/src/plugins/plugins.pro @@ -6,7 +6,11 @@ TEMPLATE = subdirs -SUBDIRS += m3u videonode +SUBDIRS += m3u + +qtHaveModule(quick) { + SUBDIRS += videonode +} android { SUBDIRS += android opensles diff --git a/src/plugins/wmf/decoder/mfaudiodecodercontrol.cpp b/src/plugins/wmf/decoder/mfaudiodecodercontrol.cpp index a29bebbf..d0aed239 100644 --- a/src/plugins/wmf/decoder/mfaudiodecodercontrol.cpp +++ b/src/plugins/wmf/decoder/mfaudiodecodercontrol.cpp @@ -41,7 +41,11 @@ MFAudioDecoderControl::MFAudioDecoderControl(QObject *parent) , m_resampler(0) , m_state(QAudioDecoder::StoppedState) , m_device(0) + , m_mfInputStreamID(0) + , m_mfOutputStreamID(0) , m_bufferReady(false) + , m_duration(0) + , m_position(0) , m_loadingSource(false) , m_mfOutputType(0) , m_convertSample(0) @@ -53,8 +57,6 @@ MFAudioDecoderControl::MFAudioDecoderControl(QObject *parent) qCritical("MFAudioDecoderControl: Failed to create resampler(CLSID_CResamplerMediaObject)!"); return; } - m_mfInputStreamID = 0; - m_mfOutputStreamID = 0; m_resampler->AddInputStreams(1, &m_mfInputStreamID); connect(m_sourceResolver, SIGNAL(mediaSourceReady()), this, SLOT(handleMediaSourceReady())); diff --git a/tests/auto/unit/qmediaplayer/tst_qmediaplayer.cpp b/tests/auto/unit/qmediaplayer/tst_qmediaplayer.cpp index 982ff7ec..2082fe14 100644 --- a/tests/auto/unit/qmediaplayer/tst_qmediaplayer.cpp +++ b/tests/auto/unit/qmediaplayer/tst_qmediaplayer.cpp @@ -81,31 +81,46 @@ class tst_QMediaPlayer: public QObject Q_OBJECT public slots: - void initTestCase_data(); void initTestCase(); void cleanupTestCase(); void init(); void cleanup(); private slots: + void testNullService_data(); void testNullService(); void testValid(); + void testMedia_data(); void testMedia(); + void testDuration_data(); void testDuration(); + void testPosition_data(); void testPosition(); + void testVolume_data(); void testVolume(); + void testMuted_data(); void testMuted(); void testIsAvailable(); + void testVideoAvailable_data(); void testVideoAvailable(); + void testBufferStatus_data(); void testBufferStatus(); + void testSeekable_data(); void testSeekable(); + void testPlaybackRate_data(); void testPlaybackRate(); + void testError_data(); void testError(); + void testErrorString_data(); void testErrorString(); void testService(); + void testPlay_data(); void testPlay(); + void testPause_data(); void testPause(); + void testStop_data(); void testStop(); + void testMediaStatus_data(); void testMediaStatus(); void testPlaylist(); void testNetworkAccess(); @@ -120,12 +135,14 @@ private slots: void testSupportedMimeTypes(); private: + void setupCommonTestData(); + MockMediaServiceProvider *mockProvider; MockMediaPlayerService *mockService; QMediaPlayer *player; }; -void tst_QMediaPlayer::initTestCase_data() +void tst_QMediaPlayer::setupCommonTestData() { QTest::addColumn("valid"); QTest::addColumn("state"); @@ -193,6 +210,11 @@ void tst_QMediaPlayer::cleanup() delete mockService; } +void tst_QMediaPlayer::testNullService_data() +{ + setupCommonTestData(); +} + void tst_QMediaPlayer::testNullService() { mockProvider->service = 0; @@ -217,7 +239,7 @@ void tst_QMediaPlayer::testNullService() QCOMPARE(player.availability(), QMultimedia::ServiceMissing); { - QFETCH_GLOBAL(QMediaContent, mediaContent); + QFETCH(QMediaContent, mediaContent); QSignalSpy spy(&player, SIGNAL(currentMediaChanged(QMediaContent))); QFile file; @@ -249,8 +271,8 @@ void tst_QMediaPlayer::testNullService() QCOMPARE(stateSpy.count(), 0); QCOMPARE(statusSpy.count(), 0); } { - QFETCH_GLOBAL(int, volume); - QFETCH_GLOBAL(bool, muted); + QFETCH(int, volume); + QFETCH(bool, muted); QSignalSpy volumeSpy(&player, SIGNAL(volumeChanged(int))); QSignalSpy mutingSpy(&player, SIGNAL(mutedChanged(bool))); @@ -263,7 +285,7 @@ void tst_QMediaPlayer::testNullService() QCOMPARE(player.isMuted(), false); QCOMPARE(mutingSpy.count(), 0); } { - QFETCH_GLOBAL(qint64, position); + QFETCH(qint64, position); QSignalSpy spy(&player, SIGNAL(positionChanged(qint64))); @@ -271,7 +293,7 @@ void tst_QMediaPlayer::testNullService() QCOMPARE(player.position(), qint64(0)); QCOMPARE(spy.count(), 0); } { - QFETCH_GLOBAL(qreal, playbackRate); + QFETCH(qreal, playbackRate); QSignalSpy spy(&player, SIGNAL(playbackRateChanged(qreal))); @@ -307,16 +329,21 @@ void tst_QMediaPlayer::testNullService() void tst_QMediaPlayer::testValid() { /* - QFETCH_GLOBAL(bool, valid); + QFETCH(bool, valid); mockService->setIsValid(valid); QCOMPARE(player->isValid(), valid); */ } +void tst_QMediaPlayer::testMedia_data() +{ + setupCommonTestData(); +} + void tst_QMediaPlayer::testMedia() { - QFETCH_GLOBAL(QMediaContent, mediaContent); + QFETCH(QMediaContent, mediaContent); mockService->setMedia(mediaContent); QCOMPARE(player->currentMedia(), mediaContent); @@ -327,20 +354,30 @@ void tst_QMediaPlayer::testMedia() QCOMPARE((QBuffer*)player->mediaStream(), &stream); } +void tst_QMediaPlayer::testDuration_data() +{ + setupCommonTestData(); +} + void tst_QMediaPlayer::testDuration() { - QFETCH_GLOBAL(qint64, duration); + QFETCH(qint64, duration); mockService->setDuration(duration); QVERIFY(player->duration() == duration); } +void tst_QMediaPlayer::testPosition_data() +{ + setupCommonTestData(); +} + void tst_QMediaPlayer::testPosition() { - QFETCH_GLOBAL(bool, valid); - QFETCH_GLOBAL(bool, seekable); - QFETCH_GLOBAL(qint64, position); - QFETCH_GLOBAL(qint64, duration); + QFETCH(bool, valid); + QFETCH(bool, seekable); + QFETCH(qint64, position); + QFETCH(qint64, duration); mockService->setIsValid(valid); mockService->setSeekable(seekable); @@ -384,10 +421,15 @@ void tst_QMediaPlayer::testPosition() } } +void tst_QMediaPlayer::testVolume_data() +{ + setupCommonTestData(); +} + void tst_QMediaPlayer::testVolume() { - QFETCH_GLOBAL(bool, valid); - QFETCH_GLOBAL(int, volume); + QFETCH(bool, valid); + QFETCH(int, volume); mockService->setVolume(volume); QVERIFY(player->volume() == volume); @@ -415,11 +457,16 @@ void tst_QMediaPlayer::testVolume() } } +void tst_QMediaPlayer::testMuted_data() +{ + setupCommonTestData(); +} + void tst_QMediaPlayer::testMuted() { - QFETCH_GLOBAL(bool, valid); - QFETCH_GLOBAL(bool, muted); - QFETCH_GLOBAL(int, volume); + QFETCH(bool, valid); + QFETCH(bool, muted); + QFETCH(int, volume); if (valid) { mockService->setMuted(muted); @@ -434,34 +481,54 @@ void tst_QMediaPlayer::testMuted() } } +void tst_QMediaPlayer::testVideoAvailable_data() +{ + setupCommonTestData(); +} + void tst_QMediaPlayer::testVideoAvailable() { - QFETCH_GLOBAL(bool, videoAvailable); + QFETCH(bool, videoAvailable); mockService->setVideoAvailable(videoAvailable); QVERIFY(player->isVideoAvailable() == videoAvailable); } +void tst_QMediaPlayer::testBufferStatus_data() +{ + setupCommonTestData(); +} + void tst_QMediaPlayer::testBufferStatus() { - QFETCH_GLOBAL(int, bufferStatus); + QFETCH(int, bufferStatus); mockService->setBufferStatus(bufferStatus); QVERIFY(player->bufferStatus() == bufferStatus); } +void tst_QMediaPlayer::testSeekable_data() +{ + setupCommonTestData(); +} + void tst_QMediaPlayer::testSeekable() { - QFETCH_GLOBAL(bool, seekable); + QFETCH(bool, seekable); mockService->setSeekable(seekable); QVERIFY(player->isSeekable() == seekable); } +void tst_QMediaPlayer::testPlaybackRate_data() +{ + setupCommonTestData(); +} + void tst_QMediaPlayer::testPlaybackRate() { - QFETCH_GLOBAL(bool, valid); - QFETCH_GLOBAL(qreal, playbackRate); + QFETCH(bool, valid); + QFETCH(qreal, playbackRate); if (valid) { mockService->setPlaybackRate(playbackRate); @@ -474,17 +541,27 @@ void tst_QMediaPlayer::testPlaybackRate() } } +void tst_QMediaPlayer::testError_data() +{ + setupCommonTestData(); +} + void tst_QMediaPlayer::testError() { - QFETCH_GLOBAL(QMediaPlayer::Error, error); + QFETCH(QMediaPlayer::Error, error); mockService->setError(error); QVERIFY(player->error() == error); } +void tst_QMediaPlayer::testErrorString_data() +{ + setupCommonTestData(); +} + void tst_QMediaPlayer::testErrorString() { - QFETCH_GLOBAL(QString, errorString); + QFETCH(QString, errorString); mockService->setErrorString(errorString); QVERIFY(player->errorString() == errorString); @@ -499,7 +576,7 @@ void tst_QMediaPlayer::testIsAvailable() void tst_QMediaPlayer::testService() { /* - QFETCH_GLOBAL(bool, valid); + QFETCH(bool, valid); mockService->setIsValid(valid); @@ -510,11 +587,16 @@ void tst_QMediaPlayer::testService() */ } +void tst_QMediaPlayer::testPlay_data() +{ + setupCommonTestData(); +} + void tst_QMediaPlayer::testPlay() { - QFETCH_GLOBAL(bool, valid); - QFETCH_GLOBAL(QMediaContent, mediaContent); - QFETCH_GLOBAL(QMediaPlayer::State, state); + QFETCH(bool, valid); + QFETCH(QMediaContent, mediaContent); + QFETCH(QMediaPlayer::State, state); mockService->setIsValid(valid); mockService->setState(state); @@ -536,11 +618,16 @@ void tst_QMediaPlayer::testPlay() } } +void tst_QMediaPlayer::testPause_data() +{ + setupCommonTestData(); +} + void tst_QMediaPlayer::testPause() { - QFETCH_GLOBAL(bool, valid); - QFETCH_GLOBAL(QMediaContent, mediaContent); - QFETCH_GLOBAL(QMediaPlayer::State, state); + QFETCH(bool, valid); + QFETCH(QMediaContent, mediaContent); + QFETCH(QMediaPlayer::State, state); mockService->setIsValid(valid); mockService->setState(state); @@ -562,10 +649,15 @@ void tst_QMediaPlayer::testPause() } } +void tst_QMediaPlayer::testStop_data() +{ + setupCommonTestData(); +} + void tst_QMediaPlayer::testStop() { - QFETCH_GLOBAL(QMediaContent, mediaContent); - QFETCH_GLOBAL(QMediaPlayer::State, state); + QFETCH(QMediaContent, mediaContent); + QFETCH(QMediaPlayer::State, state); mockService->setState(state); mockService->setMedia(mediaContent); @@ -586,9 +678,14 @@ void tst_QMediaPlayer::testStop() } } +void tst_QMediaPlayer::testMediaStatus_data() +{ + setupCommonTestData(); +} + void tst_QMediaPlayer::testMediaStatus() { - QFETCH_GLOBAL(int, bufferStatus); + QFETCH(int, bufferStatus); int bufferSignals = 0; player->setNotifyInterval(10);