1/****************************************************************************
2**
3** Copyright (C) 2016 The Qt Company Ltd.
4** Copyright (C) 2016 Research In Motion
5** Contact: https://www.qt.io/licensing/
6**
7** This file is part of the Qt Toolkit.
8**
9** $QT_BEGIN_LICENSE:LGPL$
10** Commercial License Usage
11** Licensees holding valid commercial Qt licenses may use this file in
12** accordance with the commercial license agreement provided with the
13** Software or, alternatively, in accordance with the terms contained in
14** a written agreement between you and The Qt Company. For licensing terms
15** and conditions see https://www.qt.io/terms-conditions. For further
16** information use the contact form at https://www.qt.io/contact-us.
17**
18** GNU Lesser General Public License Usage
19** Alternatively, this file may be used under the terms of the GNU Lesser
20** General Public License version 3 as published by the Free Software
21** Foundation and appearing in the file LICENSE.LGPL3 included in the
22** packaging of this file. Please review the following information to
23** ensure the GNU Lesser General Public License version 3 requirements
24** will be met: https://www.gnu.org/licenses/lgpl-3.0.html.
25**
26** GNU General Public License Usage
27** Alternatively, this file may be used under the terms of the GNU
28** General Public License version 2.0 or (at your option) the GNU General
29** Public license version 3 or any later version approved by the KDE Free
30** Qt Foundation. The licenses are as published by the Free Software
31** Foundation and appearing in the file LICENSE.GPL2 and LICENSE.GPL3
32** included in the packaging of this file. Please review the following
33** information to ensure the GNU General Public License requirements will
34** be met: https://www.gnu.org/licenses/gpl-2.0.html and
35** https://www.gnu.org/licenses/gpl-3.0.html.
36**
37** $QT_END_LICENSE$
38**
39****************************************************************************/
40#include "qdeclarativevideooutput_p.h"
41
42#include "qdeclarativevideooutput_render_p.h"
43#include "qdeclarativevideooutput_window_p.h"
44#include <private/qvideooutputorientationhandler_p.h>
45#include <QtMultimedia/qmediaobject.h>
46#include <QtMultimedia/qmediaservice.h>
47#include <private/qmediapluginloader_p.h>
48#include <QtCore/qloggingcategory.h>
49
50static void initResource() {
51 Q_INIT_RESOURCE(qtmultimediaquicktools);
52}
53
54QT_BEGIN_NAMESPACE
55
56Q_LOGGING_CATEGORY(qLcVideo, "qt.multimedia.video")
57
58/*!
59 \qmltype VideoOutput
60 //! \instantiates QDeclarativeVideoOutput
61 \brief Render video or camera viewfinder.
62
63 \ingroup multimedia_qml
64 \ingroup multimedia_video_qml
65 \inqmlmodule QtMultimedia
66
67 \qml
68
69 Rectangle {
70 width: 800
71 height: 600
72 color: "black"
73
74 MediaPlayer {
75 id: player
76 source: "file://video.webm"
77 autoPlay: true
78 }
79
80 VideoOutput {
81 id: videoOutput
82 source: player
83 anchors.fill: parent
84 }
85 }
86
87 \endqml
88
89 The VideoOutput item supports untransformed, stretched, and uniformly scaled video presentation.
90 For a description of stretched uniformly scaled presentation, see the \l fillMode property
91 description.
92
93 The VideoOutput item works with backends that support either QVideoRendererControl or
94 QVideoWindowControl. If the backend only supports QVideoWindowControl, the video is rendered
95 onto an overlay window that is layered on top of the QtQuick window. Due to the nature of the
96 video overlays, certain features are not available for these kind of backends:
97 \list
98 \li Some transformations like rotations
99 \li Having other QtQuick items on top of the VideoOutput item
100 \endlist
101 Most backends however do support QVideoRendererControl and therefore don't have the limitations
102 listed above.
103
104 \sa MediaPlayer, Camera
105
106\omit
107 \section1 Screen Saver
108
109 If it is likely that an application will be playing video for an extended
110 period of time without user interaction it may be necessary to disable
111 the platform's screen saver. The \l ScreenSaver (from \l QtSystemInfo)
112 may be used to disable the screensaver in this fashion:
113
114 \qml
115 import QtSystemInfo 5.0
116
117 ScreenSaver { screenSaverEnabled: false }
118 \endqml
119\endomit
120*/
121
122// TODO: Restore Qt System Info docs when the module is released
123
124/*!
125 \internal
126 \class QDeclarativeVideoOutput
127 \brief The QDeclarativeVideoOutput class provides a video output item.
128*/
129
130QDeclarativeVideoOutput::QDeclarativeVideoOutput(QQuickItem *parent) :
131 QQuickItem(parent),
132 m_sourceType(NoSource),
133 m_fillMode(PreserveAspectFit),
134 m_geometryDirty(true),
135 m_orientation(0),
136 m_autoOrientation(false),
137 m_screenOrientationHandler(0)
138{
139 initResource();
140 setFlag(flag: ItemHasContents, enabled: true);
141 createBackend(service: nullptr);
142}
143
144QDeclarativeVideoOutput::~QDeclarativeVideoOutput()
145{
146 m_backend.reset();
147 m_source.clear();
148 _q_updateMediaObject();
149}
150
151/*!
152 \qmlproperty object QtMultimedia::VideoOutput::videoSurface
153 \since 5.15
154
155 This property holds the underlaying video surface that can be used
156 to render the video frames to this VideoOutput element.
157 It is similar to setting a QObject with \c videoSurface property as a source,
158 where this video surface will be set.
159
160 \sa source
161*/
162
163QAbstractVideoSurface *QDeclarativeVideoOutput::videoSurface() const
164{
165 return m_backend ? m_backend->videoSurface() : nullptr;
166}
167
168/*!
169 \qmlproperty variant QtMultimedia::VideoOutput::source
170
171 This property holds the source item providing the video frames like MediaPlayer or Camera.
172
173 If you are extending your own C++ classes to interoperate with VideoOutput, you can
174 either provide a QObject based class with a \c mediaObject property that exposes a
175 QMediaObject derived class that has a QVideoRendererControl available, or you can
176 provide a QObject based class with a writable \c videoSurface property that can
177 accept a QAbstractVideoSurface based class and can follow the correct protocol to
178 deliver QVideoFrames to it.
179*/
180
181void QDeclarativeVideoOutput::setSource(QObject *source)
182{
183 qCDebug(qLcVideo) << "source is" << source;
184
185 if (source == m_source.data())
186 return;
187
188 if (m_source && m_sourceType == MediaObjectSource) {
189 disconnect(sender: m_source.data(), signal: 0, receiver: this, SLOT(_q_updateMediaObject()));
190 disconnect(sender: m_source.data(), signal: 0, receiver: this, SLOT(_q_updateCameraInfo()));
191 }
192
193 if (m_backend)
194 m_backend->releaseSource();
195
196 m_source = source;
197
198 if (m_source) {
199 const QMetaObject *metaObject = m_source.data()->metaObject();
200
201 int mediaObjectPropertyIndex = metaObject->indexOfProperty(name: "mediaObject");
202 if (mediaObjectPropertyIndex != -1) {
203 const QMetaProperty mediaObjectProperty = metaObject->property(index: mediaObjectPropertyIndex);
204
205 if (mediaObjectProperty.hasNotifySignal()) {
206 QMetaMethod method = mediaObjectProperty.notifySignal();
207 QMetaObject::connect(sender: m_source.data(), signal_index: method.methodIndex(),
208 receiver: this, method_index: this->metaObject()->indexOfSlot(slot: "_q_updateMediaObject()"),
209 type: Qt::DirectConnection, types: 0);
210
211 }
212
213 int deviceIdPropertyIndex = metaObject->indexOfProperty(name: "deviceId");
214 if (deviceIdPropertyIndex != -1) { // Camera source
215 const QMetaProperty deviceIdProperty = metaObject->property(index: deviceIdPropertyIndex);
216
217 if (deviceIdProperty.hasNotifySignal()) {
218 QMetaMethod method = deviceIdProperty.notifySignal();
219 QMetaObject::connect(sender: m_source.data(), signal_index: method.methodIndex(),
220 receiver: this, method_index: this->metaObject()->indexOfSlot(slot: "_q_updateCameraInfo()"),
221 type: Qt::DirectConnection, types: 0);
222
223 }
224 }
225
226 m_sourceType = MediaObjectSource;
227 } else if (metaObject->indexOfProperty(name: "videoSurface") != -1) {
228 m_source.data()->setProperty(name: "videoSurface",
229 value: QVariant::fromValue<QAbstractVideoSurface *>(value: videoSurface()));
230 m_sourceType = VideoSurfaceSource;
231 } else {
232 m_sourceType = NoSource;
233 }
234 } else {
235 m_sourceType = NoSource;
236 }
237
238 _q_updateMediaObject();
239 emit sourceChanged();
240}
241
242Q_GLOBAL_STATIC_WITH_ARGS(QMediaPluginLoader, videoBackendFactoryLoader,
243 (QDeclarativeVideoBackendFactoryInterface_iid, QLatin1String("video/declarativevideobackend"), Qt::CaseInsensitive))
244
245bool QDeclarativeVideoOutput::createBackend(QMediaService *service)
246{
247 bool backendAvailable = false;
248
249 const auto instances = videoBackendFactoryLoader()->instances(key: QLatin1String("declarativevideobackend"));
250 for (QObject *instance : instances) {
251 if (QDeclarativeVideoBackendFactoryInterface *plugin = qobject_cast<QDeclarativeVideoBackendFactoryInterface*>(object: instance)) {
252 if (!m_backend)
253 m_backend.reset(other: plugin->create(parent: this));
254 if (m_backend && m_backend->init(service)) {
255 backendAvailable = true;
256 break;
257 }
258 }
259 }
260#if QT_CONFIG(opengl)
261 if (!backendAvailable) {
262 if (!m_backend)
263 m_backend.reset(other: new QDeclarativeVideoRendererBackend(this));
264 if (m_backend->init(service))
265 backendAvailable = true;
266 }
267#endif
268
269 // QDeclarativeVideoWindowBackend only works when there is a service with a QVideoWindowControl.
270 // Without service, the QDeclarativeVideoRendererBackend should always work.
271 if (!backendAvailable) {
272 Q_ASSERT(service);
273 m_backend.reset(other: new QDeclarativeVideoWindowBackend(this));
274 if (m_backend->init(service))
275 backendAvailable = true;
276 }
277
278 if (backendAvailable) {
279 // Since new backend has been created needs to update its geometry.
280 m_geometryDirty = true;
281
282 m_backend->clearFilters();
283 for (int i = 0; i < m_filters.count(); ++i)
284 m_backend->appendFilter(filter: m_filters[i]);
285 } else {
286 qWarning() << Q_FUNC_INFO << "Media service has neither renderer nor window control available.";
287 m_backend.reset();
288 }
289
290 return backendAvailable;
291}
292
293void QDeclarativeVideoOutput::_q_updateMediaObject()
294{
295 QMediaObject *mediaObject = 0;
296
297 if (m_source)
298 mediaObject = qobject_cast<QMediaObject*>(object: m_source.data()->property(name: "mediaObject").value<QObject*>());
299
300 qCDebug(qLcVideo) << "media object is" << mediaObject;
301
302 if (m_mediaObject.data() == mediaObject)
303 return;
304
305 m_mediaObject.clear();
306 m_service.clear();
307
308 if (mediaObject) {
309 if (QMediaService *service = mediaObject->service()) {
310 if (createBackend(service)) {
311 m_service = service;
312 m_mediaObject = mediaObject;
313 }
314 }
315 }
316
317 _q_updateCameraInfo();
318}
319
320void QDeclarativeVideoOutput::_q_updateCameraInfo()
321{
322 if (m_mediaObject) {
323 const QCamera *camera = qobject_cast<const QCamera *>(object: m_mediaObject);
324 if (camera) {
325 QCameraInfo info(*camera);
326
327 if (m_cameraInfo != info) {
328 m_cameraInfo = info;
329
330 // The camera position and orientation need to be taken into account for
331 // the viewport auto orientation
332 if (m_autoOrientation)
333 _q_screenOrientationChanged(m_screenOrientationHandler->currentOrientation());
334 }
335 }
336 } else {
337 m_cameraInfo = QCameraInfo();
338 }
339}
340
341/*!
342 \qmlproperty enumeration QtMultimedia::VideoOutput::fillMode
343
344 Set this property to define how the video is scaled to fit the target area.
345
346 \list
347 \li Stretch - the video is scaled to fit.
348 \li PreserveAspectFit - the video is scaled uniformly to fit without cropping
349 \li PreserveAspectCrop - the video is scaled uniformly to fill, cropping if necessary
350 \endlist
351
352 The default fill mode is PreserveAspectFit.
353*/
354
355QDeclarativeVideoOutput::FillMode QDeclarativeVideoOutput::fillMode() const
356{
357 return m_fillMode;
358}
359
360void QDeclarativeVideoOutput::setFillMode(FillMode mode)
361{
362 if (mode == m_fillMode)
363 return;
364
365 m_fillMode = mode;
366 m_geometryDirty = true;
367 update();
368
369 emit fillModeChanged(mode);
370}
371
372void QDeclarativeVideoOutput::_q_updateNativeSize()
373{
374 if (!m_backend)
375 return;
376
377 QSize size = m_backend->nativeSize();
378 if (!qIsDefaultAspect(o: m_orientation)) {
379 size.transpose();
380 }
381
382 if (m_nativeSize != size) {
383 m_nativeSize = size;
384
385 m_geometryDirty = true;
386
387 setImplicitWidth(size.width());
388 setImplicitHeight(size.height());
389
390 emit sourceRectChanged();
391 }
392}
393
394/* Based on fill mode and our size, figure out the source/dest rects */
395void QDeclarativeVideoOutput::_q_updateGeometry()
396{
397 const QRectF rect(0, 0, width(), height());
398 const QRectF absoluteRect(x(), y(), width(), height());
399
400 if (!m_geometryDirty && m_lastRect == absoluteRect)
401 return;
402
403 QRectF oldContentRect(m_contentRect);
404
405 m_geometryDirty = false;
406 m_lastRect = absoluteRect;
407
408 if (m_nativeSize.isEmpty()) {
409 //this is necessary for item to receive the
410 //first paint event and configure video surface.
411 m_contentRect = rect;
412 } else if (m_fillMode == Stretch) {
413 m_contentRect = rect;
414 } else if (m_fillMode == PreserveAspectFit || m_fillMode == PreserveAspectCrop) {
415 QSizeF scaled = m_nativeSize;
416 scaled.scale(s: rect.size(), mode: m_fillMode == PreserveAspectFit ?
417 Qt::KeepAspectRatio : Qt::KeepAspectRatioByExpanding);
418
419 m_contentRect = QRectF(QPointF(), scaled);
420 m_contentRect.moveCenter(p: rect.center());
421 }
422
423 if (m_backend) {
424 if (!m_backend->videoSurface() || m_backend->videoSurface()->isActive())
425 m_backend->updateGeometry();
426 else
427 m_geometryDirty = true;
428 }
429
430
431 if (m_contentRect != oldContentRect)
432 emit contentRectChanged();
433}
434
435void QDeclarativeVideoOutput::_q_screenOrientationChanged(int orientation)
436{
437 // If the source is a camera, take into account its sensor position and orientation
438 if (!m_cameraInfo.isNull()) {
439 switch (m_cameraInfo.position()) {
440 case QCamera::FrontFace:
441 // Front facing cameras are flipped horizontally, compensate the mirror
442 orientation += (360 - m_cameraInfo.orientation());
443 break;
444 case QCamera::BackFace:
445 default:
446 orientation += m_cameraInfo.orientation();
447 break;
448 }
449 }
450
451 setOrientation(orientation % 360);
452}
453
454/*!
455 \qmlproperty int QtMultimedia::VideoOutput::orientation
456
457 In some cases the source video stream requires a certain
458 orientation to be correct. This includes
459 sources like a camera viewfinder, where the displayed
460 viewfinder should match reality, no matter what rotation
461 the rest of the user interface has.
462
463 This property allows you to apply a rotation (in steps
464 of 90 degrees) to compensate for any user interface
465 rotation, with positive values in the anti-clockwise direction.
466
467 The orientation change will also affect the mapping
468 of coordinates from source to viewport.
469
470 \sa autoOrientation
471*/
472int QDeclarativeVideoOutput::orientation() const
473{
474 return m_orientation;
475}
476
477void QDeclarativeVideoOutput::setOrientation(int orientation)
478{
479 // Make sure it's a multiple of 90.
480 if (orientation % 90)
481 return;
482
483 // If there's no actual change, return
484 if (m_orientation == orientation)
485 return;
486
487 // If the new orientation is the same effect
488 // as the old one, don't update the video node stuff
489 if ((m_orientation % 360) == (orientation % 360)) {
490 m_orientation = orientation;
491 emit orientationChanged();
492 return;
493 }
494
495 m_geometryDirty = true;
496
497 // Otherwise, a new orientation
498 // See if we need to change aspect ratio orientation too
499 bool oldAspect = qIsDefaultAspect(o: m_orientation);
500 bool newAspect = qIsDefaultAspect(o: orientation);
501
502 m_orientation = orientation;
503
504 if (oldAspect != newAspect) {
505 m_nativeSize.transpose();
506
507 setImplicitWidth(m_nativeSize.width());
508 setImplicitHeight(m_nativeSize.height());
509
510 // Source rectangle does not change for orientation
511 }
512
513 update();
514 emit orientationChanged();
515}
516
517/*!
518 \qmlproperty bool QtMultimedia::VideoOutput::autoOrientation
519
520 This property allows you to enable and disable auto orientation
521 of the video stream, so that its orientation always matches
522 the orientation of the screen. If \c autoOrientation is enabled,
523 the \c orientation property is overwritten.
524
525 By default \c autoOrientation is disabled.
526
527 \sa orientation
528 \since 5.2
529*/
530bool QDeclarativeVideoOutput::autoOrientation() const
531{
532 return m_autoOrientation;
533}
534
535void QDeclarativeVideoOutput::setAutoOrientation(bool autoOrientation)
536{
537 if (autoOrientation == m_autoOrientation)
538 return;
539
540 m_autoOrientation = autoOrientation;
541 if (m_autoOrientation) {
542 m_screenOrientationHandler = new QVideoOutputOrientationHandler(this);
543 connect(sender: m_screenOrientationHandler, SIGNAL(orientationChanged(int)),
544 receiver: this, SLOT(_q_screenOrientationChanged(int)));
545
546 _q_screenOrientationChanged(orientation: m_screenOrientationHandler->currentOrientation());
547 } else {
548 disconnect(sender: m_screenOrientationHandler, SIGNAL(orientationChanged(int)),
549 receiver: this, SLOT(_q_screenOrientationChanged(int)));
550 m_screenOrientationHandler->deleteLater();
551 m_screenOrientationHandler = 0;
552 }
553
554 emit autoOrientationChanged();
555}
556
557/*!
558 \qmlproperty rectangle QtMultimedia::VideoOutput::contentRect
559
560 This property holds the item coordinates of the area that
561 would contain video to render. With certain fill modes,
562 this rectangle will be larger than the visible area of the
563 \c VideoOutput.
564
565 This property is useful when other coordinates are specified
566 in terms of the source dimensions - this applied for relative
567 (normalized) frame coordinates in the range of 0 to 1.0.
568
569 \sa mapRectToItem(), mapPointToItem()
570
571 Areas outside this will be transparent.
572*/
573QRectF QDeclarativeVideoOutput::contentRect() const
574{
575 return m_contentRect;
576}
577
578/*!
579 \qmlproperty rectangle QtMultimedia::VideoOutput::sourceRect
580
581 This property holds the area of the source video
582 content that is considered for rendering. The
583 values are in source pixel coordinates, adjusted for
584 the source's pixel aspect ratio.
585
586 Note that typically the top left corner of this rectangle
587 will be \c {0,0} while the width and height will be the
588 width and height of the input content. Only when the video
589 source has a viewport set, these values will differ.
590
591 The orientation setting does not affect this rectangle.
592
593 \sa QVideoSurfaceFormat::pixelAspectRatio()
594 \sa QVideoSurfaceFormat::viewport()
595*/
596QRectF QDeclarativeVideoOutput::sourceRect() const
597{
598 // We might have to transpose back
599 QSizeF size = m_nativeSize;
600 if (!qIsDefaultAspect(o: m_orientation)) {
601 size.transpose();
602 }
603
604 // No backend? Just assume no viewport.
605 if (!m_nativeSize.isValid() || !m_backend) {
606 return QRectF(QPointF(), size);
607 }
608
609 // Take the viewport into account for the top left position.
610 // m_nativeSize is already adjusted to the viewport, as it originats
611 // from QVideoSurfaceFormat::sizeHint(), which includes pixel aspect
612 // ratio and viewport.
613 const QRectF viewport = m_backend->adjustedViewport();
614 Q_ASSERT(viewport.size() == size);
615 return QRectF(viewport.topLeft(), size);
616}
617
618/*!
619 \qmlmethod QPointF QtMultimedia::VideoOutput::mapNormalizedPointToItem (const QPointF &point) const
620
621 Given normalized coordinates \a point (that is, each
622 component in the range of 0 to 1.0), return the mapped point
623 that it corresponds to (in item coordinates).
624 This mapping is affected by the orientation.
625
626 Depending on the fill mode, this point may lie outside the rendered
627 rectangle.
628 */
629QPointF QDeclarativeVideoOutput::mapNormalizedPointToItem(const QPointF &point) const
630{
631 qreal dx = point.x();
632 qreal dy = point.y();
633
634 if (qIsDefaultAspect(o: m_orientation)) {
635 dx *= m_contentRect.width();
636 dy *= m_contentRect.height();
637 } else {
638 dx *= m_contentRect.height();
639 dy *= m_contentRect.width();
640 }
641
642 switch (qNormalizedOrientation(o: m_orientation)) {
643 case 0:
644 default:
645 return m_contentRect.topLeft() + QPointF(dx, dy);
646 case 90:
647 return m_contentRect.bottomLeft() + QPointF(dy, -dx);
648 case 180:
649 return m_contentRect.bottomRight() + QPointF(-dx, -dy);
650 case 270:
651 return m_contentRect.topRight() + QPointF(-dy, dx);
652 }
653}
654
655/*!
656 \qmlmethod QRectF QtMultimedia::VideoOutput::mapNormalizedRectToItem(const QRectF &rectangle) const
657
658 Given a rectangle \a rectangle in normalized
659 coordinates (that is, each component in the range of 0 to 1.0),
660 return the mapped rectangle that it corresponds to (in item coordinates).
661 This mapping is affected by the orientation.
662
663 Depending on the fill mode, this rectangle may extend outside the rendered
664 rectangle.
665 */
666QRectF QDeclarativeVideoOutput::mapNormalizedRectToItem(const QRectF &rectangle) const
667{
668 return QRectF(mapNormalizedPointToItem(point: rectangle.topLeft()),
669 mapNormalizedPointToItem(point: rectangle.bottomRight())).normalized();
670}
671
672/*!
673 \qmlmethod QPointF QtMultimedia::VideoOutput::mapPointToSource(const QPointF &point) const
674
675 Given a point \a point in item coordinates, return the
676 corresponding point in source coordinates. This mapping is
677 affected by the orientation.
678
679 If the supplied point lies outside the rendered area, the returned
680 point will be outside the source rectangle.
681 */
682QPointF QDeclarativeVideoOutput::mapPointToSource(const QPointF &point) const
683{
684 QPointF norm = mapPointToSourceNormalized(point);
685
686 if (qIsDefaultAspect(o: m_orientation))
687 return QPointF(norm.x() * m_nativeSize.width(), norm.y() * m_nativeSize.height());
688 else
689 return QPointF(norm.x() * m_nativeSize.height(), norm.y() * m_nativeSize.width());
690}
691
692/*!
693 \qmlmethod QRectF QtMultimedia::VideoOutput::mapRectToSource(const QRectF &rectangle) const
694
695 Given a rectangle \a rectangle in item coordinates, return the
696 corresponding rectangle in source coordinates. This mapping is
697 affected by the orientation.
698
699 This mapping is affected by the orientation.
700
701 If the supplied point lies outside the rendered area, the returned
702 point will be outside the source rectangle.
703 */
704QRectF QDeclarativeVideoOutput::mapRectToSource(const QRectF &rectangle) const
705{
706 return QRectF(mapPointToSource(point: rectangle.topLeft()),
707 mapPointToSource(point: rectangle.bottomRight())).normalized();
708}
709
710/*!
711 \qmlmethod QPointF QtMultimedia::VideoOutput::mapPointToSourceNormalized(const QPointF &point) const
712
713 Given a point \a point in item coordinates, return the
714 corresponding point in normalized source coordinates. This mapping is
715 affected by the orientation.
716
717 If the supplied point lies outside the rendered area, the returned
718 point will be outside the source rectangle. No clamping is performed.
719 */
720QPointF QDeclarativeVideoOutput::mapPointToSourceNormalized(const QPointF &point) const
721{
722 if (m_contentRect.isEmpty())
723 return QPointF();
724
725 // Normalize the item source point
726 qreal nx = (point.x() - m_contentRect.left()) / m_contentRect.width();
727 qreal ny = (point.y() - m_contentRect.top()) / m_contentRect.height();
728
729 const qreal one(1.0f);
730
731 // For now, the origin of the source rectangle is 0,0
732 switch (qNormalizedOrientation(o: m_orientation)) {
733 case 0:
734 default:
735 return QPointF(nx, ny);
736 case 90:
737 return QPointF(one - ny, nx);
738 case 180:
739 return QPointF(one - nx, one - ny);
740 case 270:
741 return QPointF(ny, one - nx);
742 }
743}
744
745/*!
746 \qmlmethod QRectF QtMultimedia::VideoOutput::mapRectToSourceNormalized(const QRectF &rectangle) const
747
748 Given a rectangle \a rectangle in item coordinates, return the
749 corresponding rectangle in normalized source coordinates. This mapping is
750 affected by the orientation.
751
752 This mapping is affected by the orientation.
753
754 If the supplied point lies outside the rendered area, the returned
755 point will be outside the source rectangle. No clamping is performed.
756 */
757QRectF QDeclarativeVideoOutput::mapRectToSourceNormalized(const QRectF &rectangle) const
758{
759 return QRectF(mapPointToSourceNormalized(point: rectangle.topLeft()),
760 mapPointToSourceNormalized(point: rectangle.bottomRight())).normalized();
761}
762
763QDeclarativeVideoOutput::SourceType QDeclarativeVideoOutput::sourceType() const
764{
765 return m_sourceType;
766}
767
768/*!
769 \qmlmethod QPointF QtMultimedia::VideoOutput::mapPointToItem(const QPointF &point) const
770
771 Given a point \a point in source coordinates, return the
772 corresponding point in item coordinates. This mapping is
773 affected by the orientation.
774
775 Depending on the fill mode, this point may lie outside the rendered
776 rectangle.
777 */
778QPointF QDeclarativeVideoOutput::mapPointToItem(const QPointF &point) const
779{
780 if (m_nativeSize.isEmpty())
781 return QPointF();
782
783 // Just normalize and use that function
784 // m_nativeSize is transposed in some orientations
785 if (qIsDefaultAspect(o: m_orientation))
786 return mapNormalizedPointToItem(point: QPointF(point.x() / m_nativeSize.width(), point.y() / m_nativeSize.height()));
787 else
788 return mapNormalizedPointToItem(point: QPointF(point.x() / m_nativeSize.height(), point.y() / m_nativeSize.width()));
789}
790
791/*!
792 \qmlmethod QRectF QtMultimedia::VideoOutput::mapRectToItem(const QRectF &rectangle) const
793
794 Given a rectangle \a rectangle in source coordinates, return the
795 corresponding rectangle in item coordinates. This mapping is
796 affected by the orientation.
797
798 Depending on the fill mode, this rectangle may extend outside the rendered
799 rectangle.
800
801 */
802QRectF QDeclarativeVideoOutput::mapRectToItem(const QRectF &rectangle) const
803{
804 return QRectF(mapPointToItem(point: rectangle.topLeft()),
805 mapPointToItem(point: rectangle.bottomRight())).normalized();
806}
807
808QSGNode *QDeclarativeVideoOutput::updatePaintNode(QSGNode *oldNode, UpdatePaintNodeData *data)
809{
810 _q_updateGeometry();
811
812 if (!m_backend)
813 return 0;
814
815 return m_backend->updatePaintNode(oldNode, data);
816}
817
818void QDeclarativeVideoOutput::itemChange(QQuickItem::ItemChange change,
819 const QQuickItem::ItemChangeData &changeData)
820{
821 if (m_backend)
822 m_backend->itemChange(change, changeData);
823}
824
825void QDeclarativeVideoOutput::releaseResources()
826{
827 if (m_backend)
828 m_backend->releaseResources();
829}
830
831void QDeclarativeVideoOutput::geometryChanged(const QRectF &newGeometry, const QRectF &oldGeometry)
832{
833 Q_UNUSED(newGeometry);
834 Q_UNUSED(oldGeometry);
835
836 QQuickItem::geometryChanged(newGeometry, oldGeometry);
837
838 // Explicitly listen to geometry changes here. This is needed since changing the position does
839 // not trigger a call to updatePaintNode().
840 // We need to react to position changes though, as the window backened's display rect gets
841 // changed in that situation.
842 _q_updateGeometry();
843}
844
845/*!
846 \qmlproperty list<object> QtMultimedia::VideoOutput::filters
847
848 This property holds the list of video filters that are run on the video
849 frames. The order of the filters in the list matches the order in which
850 they will be invoked on the video frames. The objects in the list must be
851 instances of a subclass of QAbstractVideoFilter.
852
853 \sa QAbstractVideoFilter
854*/
855
856QQmlListProperty<QAbstractVideoFilter> QDeclarativeVideoOutput::filters()
857{
858 return QQmlListProperty<QAbstractVideoFilter>(this, 0, filter_append, filter_count, filter_at, filter_clear);
859}
860
861void QDeclarativeVideoOutput::filter_append(QQmlListProperty<QAbstractVideoFilter> *property, QAbstractVideoFilter *value)
862{
863 QDeclarativeVideoOutput *self = static_cast<QDeclarativeVideoOutput *>(property->object);
864 self->m_filters.append(t: value);
865 if (self->m_backend)
866 self->m_backend->appendFilter(filter: value);
867}
868
869int QDeclarativeVideoOutput::filter_count(QQmlListProperty<QAbstractVideoFilter> *property)
870{
871 QDeclarativeVideoOutput *self = static_cast<QDeclarativeVideoOutput *>(property->object);
872 return self->m_filters.count();
873}
874
875QAbstractVideoFilter *QDeclarativeVideoOutput::filter_at(QQmlListProperty<QAbstractVideoFilter> *property, int index)
876{
877 QDeclarativeVideoOutput *self = static_cast<QDeclarativeVideoOutput *>(property->object);
878 return self->m_filters.at(i: index);
879}
880
881void QDeclarativeVideoOutput::filter_clear(QQmlListProperty<QAbstractVideoFilter> *property)
882{
883 QDeclarativeVideoOutput *self = static_cast<QDeclarativeVideoOutput *>(property->object);
884 self->m_filters.clear();
885 if (self->m_backend)
886 self->m_backend->clearFilters();
887}
888
889void QDeclarativeVideoOutput::_q_invalidateSceneGraph()
890{
891 if (m_backend)
892 m_backend->invalidateSceneGraph();
893}
894
895/*!
896 \qmlproperty enumeration QtMultimedia::VideoOutput::flushMode
897 \since 5.13
898
899 Set this property to define what \c VideoOutput should show
900 when playback is finished or stopped.
901
902 \list
903 \li EmptyFrame - clears video output.
904 \li FirstFrame - shows the first valid frame.
905 \li LastFrame - shows the last valid frame.
906 \endlist
907
908 The default flush mode is EmptyFrame.
909*/
910
911void QDeclarativeVideoOutput::setFlushMode(FlushMode mode)
912{
913 if (m_flushMode == mode)
914 return;
915
916 m_flushMode = mode;
917 emit flushModeChanged();
918}
919
920QT_END_NAMESPACE
921

source code of qtmultimedia/src/qtmultimediaquicktools/qdeclarativevideooutput.cpp