1/****************************************************************************
2**
3** Copyright (C) 2016 Jolla Ltd.
4** Contact: https://www.qt.io/licensing/
5**
6** This file is part of the Qt Toolkit.
7**
8** $QT_BEGIN_LICENSE:LGPL$
9** Commercial License Usage
10** Licensees holding valid commercial Qt licenses may use this file in
11** accordance with the commercial license agreement provided with the
12** Software or, alternatively, in accordance with the terms contained in
13** a written agreement between you and The Qt Company. For licensing terms
14** and conditions see https://www.qt.io/terms-conditions. For further
15** information use the contact form at https://www.qt.io/contact-us.
16**
17** GNU Lesser General Public License Usage
18** Alternatively, this file may be used under the terms of the GNU Lesser
19** General Public License version 3 as published by the Free Software
20** Foundation and appearing in the file LICENSE.LGPL3 included in the
21** packaging of this file. Please review the following information to
22** ensure the GNU Lesser General Public License version 3 requirements
23** will be met: https://www.gnu.org/licenses/lgpl-3.0.html.
24**
25** GNU General Public License Usage
26** Alternatively, this file may be used under the terms of the GNU
27** General Public License version 2.0 or (at your option) the GNU General
28** Public license version 3 or any later version approved by the KDE Free
29** Qt Foundation. The licenses are as published by the Free Software
30** Foundation and appearing in the file LICENSE.GPL2 and LICENSE.GPL3
31** included in the packaging of this file. Please review the following
32** information to ensure the GNU General Public License requirements will
33** be met: https://www.gnu.org/licenses/gpl-2.0.html and
34** https://www.gnu.org/licenses/gpl-3.0.html.
35**
36** $QT_END_LICENSE$
37**
38****************************************************************************/
39
40#include <qabstractvideosurface.h>
41#include <qvideoframe.h>
42#include <QDebug>
43#include <QMap>
44#include <QThread>
45#include <QEvent>
46#include <QCoreApplication>
47
48#include <private/qmediapluginloader_p.h>
49#include "qgstvideobuffer_p.h"
50
51#include "qgstvideorenderersink_p.h"
52
53#include <gst/video/video.h>
54
55#include "qgstutils_p.h"
56
57#if QT_CONFIG(gstreamer_gl)
58#include <QOpenGLContext>
59#include <QGuiApplication>
60#include <QWindow>
61#include <qpa/qplatformnativeinterface.h>
62
63#include <gst/gl/gstglconfig.h>
64
65#if GST_GL_HAVE_WINDOW_X11
66# include <gst/gl/x11/gstgldisplay_x11.h>
67#endif
68#if GST_GL_HAVE_PLATFORM_EGL
69# include <gst/gl/egl/gstgldisplay_egl.h>
70#endif
71#if GST_CHECK_VERSION(1,11,1) && GST_GL_HAVE_WINDOW_WAYLAND
72# include <gst/gl/wayland/gstgldisplay_wayland.h>
73#endif
74#endif // #if QT_CONFIG(gstreamer_gl)
75
76//#define DEBUG_VIDEO_SURFACE_SINK
77
78QT_BEGIN_NAMESPACE
79
80QGstDefaultVideoRenderer::QGstDefaultVideoRenderer()
81{
82}
83
84QGstDefaultVideoRenderer::~QGstDefaultVideoRenderer()
85{
86}
87
88GstCaps *QGstDefaultVideoRenderer::getCaps(QAbstractVideoSurface *surface)
89{
90#if QT_CONFIG(gstreamer_gl)
91 if (QGstUtils::useOpenGL()) {
92 m_handleType = QAbstractVideoBuffer::GLTextureHandle;
93 auto formats = surface->supportedPixelFormats(type: m_handleType);
94 // Even if the surface does not support gl textures,
95 // glupload will be added to the pipeline and GLMemory will be requested.
96 // This will lead to upload data to gl textures
97 // and download it when the buffer will be used within rendering.
98 if (formats.isEmpty()) {
99 m_handleType = QAbstractVideoBuffer::NoHandle;
100 formats = surface->supportedPixelFormats(type: m_handleType);
101 }
102
103 GstCaps *caps = QGstUtils::capsForFormats(formats);
104 for (guint i = 0; i < gst_caps_get_size(caps); ++i)
105 gst_caps_set_features(caps, index: i, features: gst_caps_features_from_string(features: "memory:GLMemory"));
106
107 return caps;
108 }
109#endif
110 return QGstUtils::capsForFormats(formats: surface->supportedPixelFormats(type: QAbstractVideoBuffer::NoHandle));
111}
112
113bool QGstDefaultVideoRenderer::start(QAbstractVideoSurface *surface, GstCaps *caps)
114{
115 m_flushed = true;
116 m_format = QGstUtils::formatForCaps(caps, info: &m_videoInfo, handleType: m_handleType);
117
118 return m_format.isValid() && surface->start(format: m_format);
119}
120
121void QGstDefaultVideoRenderer::stop(QAbstractVideoSurface *surface)
122{
123 m_flushed = true;
124 if (surface)
125 surface->stop();
126}
127
128bool QGstDefaultVideoRenderer::present(QAbstractVideoSurface *surface, GstBuffer *buffer)
129{
130 m_flushed = false;
131
132 QGstVideoBuffer *videoBuffer = nullptr;
133#if QT_CONFIG(gstreamer_gl)
134 if (m_format.handleType() == QAbstractVideoBuffer::GLTextureHandle) {
135 GstGLMemory *glmem = GST_GL_MEMORY_CAST(gst_buffer_peek_memory(buffer, 0));
136 guint textureId = gst_gl_memory_get_texture_id(gl_mem: glmem);
137 videoBuffer = new QGstVideoBuffer(buffer, m_videoInfo, m_format.handleType(), textureId);
138 }
139#endif
140
141 if (!videoBuffer)
142 videoBuffer = new QGstVideoBuffer(buffer, m_videoInfo);
143
144 auto meta = gst_buffer_get_video_crop_meta (buffer);
145 if (meta) {
146 QRect vp(meta->x, meta->y, meta->width, meta->height);
147 if (m_format.viewport() != vp) {
148#ifdef DEBUG_VIDEO_SURFACE_SINK
149 qDebug() << Q_FUNC_INFO << " Update viewport on Metadata: [" << meta->height << "x" << meta->width << " | " << meta->x << "x" << meta->y << "]";
150#endif
151 //Update viewport if data is not the same
152 m_format.setViewport(vp);
153 surface->start(format: m_format);
154 }
155 }
156
157 QVideoFrame frame(
158 videoBuffer,
159 m_format.frameSize(),
160 m_format.pixelFormat());
161 QGstUtils::setFrameTimeStamps(frame: &frame, buffer);
162
163 return surface->present(frame);
164}
165
166void QGstDefaultVideoRenderer::flush(QAbstractVideoSurface *surface)
167{
168 if (surface && !m_flushed)
169 surface->present(frame: QVideoFrame());
170 m_flushed = true;
171}
172
173bool QGstDefaultVideoRenderer::proposeAllocation(GstQuery *)
174{
175 return true;
176}
177
178Q_GLOBAL_STATIC_WITH_ARGS(QMediaPluginLoader, rendererLoader,
179 (QGstVideoRendererInterface_iid, QLatin1String("video/gstvideorenderer"), Qt::CaseInsensitive))
180
181QVideoSurfaceGstDelegate::QVideoSurfaceGstDelegate(QAbstractVideoSurface *surface)
182 : m_surface(surface)
183{
184 const auto instances = rendererLoader()->instances(key: QGstVideoRendererPluginKey);
185 for (QObject *instance : instances) {
186 auto plugin = qobject_cast<QGstVideoRendererInterface*>(object: instance);
187 if (QGstVideoRenderer *renderer = plugin ? plugin->createRenderer() : nullptr)
188 m_renderers.append(t: renderer);
189 }
190
191 m_renderers.append(t: new QGstDefaultVideoRenderer);
192 updateSupportedFormats();
193 connect(sender: m_surface, SIGNAL(supportedFormatsChanged()), receiver: this, SLOT(updateSupportedFormats()));
194}
195
196QVideoSurfaceGstDelegate::~QVideoSurfaceGstDelegate()
197{
198 qDeleteAll(c: m_renderers);
199
200 if (m_surfaceCaps)
201 gst_caps_unref(caps: m_surfaceCaps);
202 if (m_startCaps)
203 gst_caps_unref(caps: m_startCaps);
204#if QT_CONFIG(gstreamer_gl)
205 if (m_gstGLDisplayContext)
206 gst_object_unref(object: m_gstGLDisplayContext);
207#endif
208}
209
210GstCaps *QVideoSurfaceGstDelegate::caps()
211{
212 QMutexLocker locker(&m_mutex);
213
214 gst_caps_ref(caps: m_surfaceCaps);
215
216 return m_surfaceCaps;
217}
218
219bool QVideoSurfaceGstDelegate::start(GstCaps *caps)
220{
221 QMutexLocker locker(&m_mutex);
222
223 if (m_activeRenderer) {
224 m_flush = true;
225 m_stop = true;
226 }
227
228 if (m_startCaps)
229 gst_caps_unref(caps: m_startCaps);
230 m_startCaps = caps;
231 gst_caps_ref(caps: m_startCaps);
232
233 /*
234 Waiting for start() to be invoked in the main thread may block
235 if gstreamer blocks the main thread until this call is finished.
236 This situation is rare and usually caused by setState(Null)
237 while pipeline is being prerolled.
238
239 The proper solution to this involves controlling gstreamer pipeline from
240 other thread than video surface.
241
242 Currently start() fails if wait() timed out.
243 */
244 if (!waitForAsyncEvent(locker: &locker, condition: &m_setupCondition, time: 1000) && m_startCaps) {
245 qWarning() << "Failed to start video surface due to main thread blocked.";
246 gst_caps_unref(caps: m_startCaps);
247 m_startCaps = 0;
248 }
249
250 return m_activeRenderer != 0;
251}
252
253void QVideoSurfaceGstDelegate::stop()
254{
255 QMutexLocker locker(&m_mutex);
256
257 if (!m_activeRenderer)
258 return;
259
260 m_flush = true;
261 m_stop = true;
262
263 if (m_startCaps) {
264 gst_caps_unref(caps: m_startCaps);
265 m_startCaps = 0;
266 }
267
268 waitForAsyncEvent(locker: &locker, condition: &m_setupCondition, time: 500);
269}
270
271void QVideoSurfaceGstDelegate::unlock()
272{
273 QMutexLocker locker(&m_mutex);
274
275 m_setupCondition.wakeAll();
276 m_renderCondition.wakeAll();
277}
278
279bool QVideoSurfaceGstDelegate::proposeAllocation(GstQuery *query)
280{
281 QMutexLocker locker(&m_mutex);
282
283 if (QGstVideoRenderer *pool = m_activeRenderer) {
284 locker.unlock();
285
286 return pool->proposeAllocation(query);
287 } else {
288 return false;
289 }
290}
291
292void QVideoSurfaceGstDelegate::flush()
293{
294 QMutexLocker locker(&m_mutex);
295
296 m_flush = true;
297 m_renderBuffer = 0;
298 m_renderCondition.wakeAll();
299
300 notify();
301}
302
303GstFlowReturn QVideoSurfaceGstDelegate::render(GstBuffer *buffer)
304{
305 QMutexLocker locker(&m_mutex);
306
307 m_renderReturn = GST_FLOW_OK;
308 m_renderBuffer = buffer;
309
310 waitForAsyncEvent(locker: &locker, condition: &m_renderCondition, time: 300);
311
312 m_renderBuffer = 0;
313
314 return m_renderReturn;
315}
316
317#if QT_CONFIG(gstreamer_gl)
318static GstGLContext *gstGLDisplayContext(QAbstractVideoSurface *surface)
319{
320 auto glContext = qobject_cast<QOpenGLContext*>(object: surface->property(name: "GLContext").value<QObject*>());
321 // Context is not ready yet.
322 if (!glContext)
323 return nullptr;
324
325 GstGLDisplay *display = nullptr;
326 const QString platform = QGuiApplication::platformName();
327 const char *contextName = "eglcontext";
328 GstGLPlatform glPlatform = GST_GL_PLATFORM_EGL;
329 QPlatformNativeInterface *pni = QGuiApplication::platformNativeInterface();
330
331#if GST_GL_HAVE_WINDOW_X11
332 if (platform == QLatin1String("xcb")) {
333 if (QOpenGLContext::openGLModuleType() == QOpenGLContext::LibGL) {
334 contextName = "glxcontext";
335 glPlatform = GST_GL_PLATFORM_GLX;
336 }
337
338 display = (GstGLDisplay *)gst_gl_display_x11_new_with_display(
339 display: (Display *)pni->nativeResourceForIntegration(resource: "display"));
340 }
341#endif
342
343#if GST_GL_HAVE_PLATFORM_EGL
344 if (!display && platform == QLatin1String("eglfs")) {
345 display = (GstGLDisplay *)gst_gl_display_egl_new_with_egl_display(
346 display: pni->nativeResourceForIntegration(resource: "egldisplay"));
347 }
348#endif
349
350#if GST_CHECK_VERSION(1,11,1)
351#if GST_GL_HAVE_WINDOW_WAYLAND
352 if (!display && platform.startsWith(s: QLatin1String("wayland"))) {
353 const char *displayName = (platform == QLatin1String("wayland"))
354 ? "display" : "egldisplay";
355
356 display = (GstGLDisplay *)gst_gl_display_wayland_new_with_display(
357 display: (struct wl_display *)pni->nativeResourceForIntegration(resource: displayName));
358 }
359#endif
360#endif
361
362 if (!display) {
363 qWarning() << "Could not create GstGLDisplay";
364 return nullptr;
365 }
366
367 void *nativeContext = pni->nativeResourceForContext(resource: contextName, context: glContext);
368 if (!nativeContext)
369 qWarning() << "Could not find resource for" << contextName;
370
371 GstGLContext *appContext = gst_gl_context_new_wrapped(display, handle: (guintptr)nativeContext, context_type: glPlatform, available_apis: GST_GL_API_ANY);
372 if (!appContext)
373 qWarning() << "Could not create wrappped context for platform:" << glPlatform;
374
375 GstGLContext *displayContext = nullptr;
376 GError *error = nullptr;
377 gst_gl_display_create_context(display, other_context: appContext, p_context: &displayContext, error: &error);
378 if (error) {
379 qWarning() << "Could not create display context:" << error->message;
380 g_clear_error(err: &error);
381 }
382
383 if (appContext)
384 gst_object_unref(object: appContext);
385
386 gst_object_unref(object: display);
387
388 return displayContext;
389}
390#endif // #if QT_CONFIG(gstreamer_gl)
391
392bool QVideoSurfaceGstDelegate::query(GstQuery *query)
393{
394#if QT_CONFIG(gstreamer_gl)
395 if (GST_QUERY_TYPE(query) == GST_QUERY_CONTEXT) {
396 const gchar *type;
397 gst_query_parse_context_type(query, context_type: &type);
398
399 if (strcmp(s1: type, s2: "gst.gl.local_context") != 0)
400 return false;
401
402 if (!m_gstGLDisplayContext)
403 m_gstGLDisplayContext = gstGLDisplayContext(surface: m_surface);
404
405 // No context yet.
406 if (!m_gstGLDisplayContext)
407 return false;
408
409 GstContext *context = nullptr;
410 gst_query_parse_context(query, context: &context);
411 context = context ? gst_context_copy(context) : gst_context_new(context_type: type, FALSE);
412 GstStructure *structure = gst_context_writable_structure(context);
413#if GST_CHECK_VERSION(1,11,1)
414 gst_structure_set(structure, fieldname: "context", GST_TYPE_GL_CONTEXT, m_gstGLDisplayContext, nullptr);
415#else
416 gst_structure_set(structure, "context", GST_GL_TYPE_CONTEXT, m_gstGLDisplayContext, nullptr);
417#endif
418 gst_query_set_context(query, context);
419 gst_context_unref(context);
420
421 return m_gstGLDisplayContext;
422 }
423#else
424 Q_UNUSED(query);
425#endif
426 return false;
427}
428
429bool QVideoSurfaceGstDelegate::event(QEvent *event)
430{
431 if (event->type() == QEvent::UpdateRequest) {
432 QMutexLocker locker(&m_mutex);
433
434 if (m_notified) {
435 while (handleEvent(locker: &locker)) {}
436 m_notified = false;
437 }
438 return true;
439 } else {
440 return QObject::event(event);
441 }
442}
443
444bool QVideoSurfaceGstDelegate::handleEvent(QMutexLocker *locker)
445{
446 if (m_flush) {
447 m_flush = false;
448 if (m_activeRenderer) {
449 locker->unlock();
450
451 m_activeRenderer->flush(surface: m_surface);
452 }
453 } else if (m_stop) {
454 m_stop = false;
455
456 if (QGstVideoRenderer * const activePool = m_activeRenderer) {
457 m_activeRenderer = 0;
458 locker->unlock();
459
460 activePool->stop(surface: m_surface);
461
462 locker->relock();
463 }
464 } else if (m_startCaps) {
465 Q_ASSERT(!m_activeRenderer);
466
467 GstCaps * const startCaps = m_startCaps;
468 m_startCaps = 0;
469
470 if (m_renderer && m_surface) {
471 locker->unlock();
472
473 const bool started = m_renderer->start(surface: m_surface, caps: startCaps);
474
475 locker->relock();
476
477 m_activeRenderer = started
478 ? m_renderer
479 : 0;
480 } else if (QGstVideoRenderer * const activePool = m_activeRenderer) {
481 m_activeRenderer = 0;
482 locker->unlock();
483
484 activePool->stop(surface: m_surface);
485
486 locker->relock();
487 }
488
489 gst_caps_unref(caps: startCaps);
490 } else if (m_renderBuffer) {
491 GstBuffer *buffer = m_renderBuffer;
492 m_renderBuffer = 0;
493 m_renderReturn = GST_FLOW_ERROR;
494
495 if (m_activeRenderer && m_surface) {
496 gst_buffer_ref(buf: buffer);
497
498 locker->unlock();
499
500 const bool rendered = m_activeRenderer->present(surface: m_surface, buffer);
501
502 gst_buffer_unref(buf: buffer);
503
504 locker->relock();
505
506 if (rendered)
507 m_renderReturn = GST_FLOW_OK;
508 }
509
510 m_renderCondition.wakeAll();
511 } else {
512 m_setupCondition.wakeAll();
513
514 return false;
515 }
516 return true;
517}
518
519void QVideoSurfaceGstDelegate::notify()
520{
521 if (!m_notified) {
522 m_notified = true;
523 QCoreApplication::postEvent(receiver: this, event: new QEvent(QEvent::UpdateRequest));
524 }
525}
526
527bool QVideoSurfaceGstDelegate::waitForAsyncEvent(
528 QMutexLocker *locker, QWaitCondition *condition, unsigned long time)
529{
530 if (QThread::currentThread() == thread()) {
531 while (handleEvent(locker)) {}
532 m_notified = false;
533
534 return true;
535 } else {
536 notify();
537
538 return condition->wait(lockedMutex: &m_mutex, time);
539 }
540}
541
542void QVideoSurfaceGstDelegate::updateSupportedFormats()
543{
544 if (m_surfaceCaps) {
545 gst_caps_unref(caps: m_surfaceCaps);
546 m_surfaceCaps = 0;
547 }
548
549 for (QGstVideoRenderer *pool : qAsConst(t&: m_renderers)) {
550 if (GstCaps *caps = pool->getCaps(surface: m_surface)) {
551 if (gst_caps_is_empty(caps)) {
552 gst_caps_unref(caps);
553 continue;
554 }
555
556 if (m_surfaceCaps)
557 gst_caps_unref(caps: m_surfaceCaps);
558
559 m_renderer = pool;
560 m_surfaceCaps = caps;
561 break;
562 } else {
563 gst_caps_unref(caps);
564 }
565 }
566}
567
568static GstVideoSinkClass *sink_parent_class;
569static QAbstractVideoSurface *current_surface;
570
571#define VO_SINK(s) QGstVideoRendererSink *sink(reinterpret_cast<QGstVideoRendererSink *>(s))
572
573QGstVideoRendererSink *QGstVideoRendererSink::createSink(QAbstractVideoSurface *surface)
574{
575 setSurface(surface);
576 QGstVideoRendererSink *sink = reinterpret_cast<QGstVideoRendererSink *>(
577 g_object_new(object_type: QGstVideoRendererSink::get_type(), first_property_name: 0));
578
579 g_signal_connect(G_OBJECT(sink), "notify::show-preroll-frame", G_CALLBACK(handleShowPrerollChange), sink);
580
581 return sink;
582}
583
584void QGstVideoRendererSink::setSurface(QAbstractVideoSurface *surface)
585{
586 current_surface = surface;
587 get_type();
588}
589
590GType QGstVideoRendererSink::get_type()
591{
592 static GType type = 0;
593
594 if (type == 0) {
595 static const GTypeInfo info =
596 {
597 .class_size: sizeof(QGstVideoRendererSinkClass), // class_size
598 .base_init: base_init, // base_init
599 .base_finalize: nullptr, // base_finalize
600 .class_init: class_init, // class_init
601 .class_finalize: nullptr, // class_finalize
602 .class_data: nullptr, // class_data
603 .instance_size: sizeof(QGstVideoRendererSink), // instance_size
604 .n_preallocs: 0, // n_preallocs
605 .instance_init: instance_init, // instance_init
606 .value_table: 0 // value_table
607 };
608
609 type = g_type_register_static(
610 GST_TYPE_VIDEO_SINK, type_name: "QGstVideoRendererSink", info: &info, flags: GTypeFlags(0));
611
612 // Register the sink type to be used in custom piplines.
613 // When surface is ready the sink can be used.
614 gst_element_register(plugin: nullptr, name: "qtvideosink", rank: GST_RANK_PRIMARY, type);
615 }
616
617 return type;
618}
619
620void QGstVideoRendererSink::class_init(gpointer g_class, gpointer class_data)
621{
622 Q_UNUSED(class_data);
623
624 sink_parent_class = reinterpret_cast<GstVideoSinkClass *>(g_type_class_peek_parent(g_class));
625
626 GstVideoSinkClass *video_sink_class = reinterpret_cast<GstVideoSinkClass *>(g_class);
627 video_sink_class->show_frame = QGstVideoRendererSink::show_frame;
628
629 GstBaseSinkClass *base_sink_class = reinterpret_cast<GstBaseSinkClass *>(g_class);
630 base_sink_class->get_caps = QGstVideoRendererSink::get_caps;
631 base_sink_class->set_caps = QGstVideoRendererSink::set_caps;
632 base_sink_class->propose_allocation = QGstVideoRendererSink::propose_allocation;
633 base_sink_class->stop = QGstVideoRendererSink::stop;
634 base_sink_class->unlock = QGstVideoRendererSink::unlock;
635 base_sink_class->query = QGstVideoRendererSink::query;
636
637 GstElementClass *element_class = reinterpret_cast<GstElementClass *>(g_class);
638 element_class->change_state = QGstVideoRendererSink::change_state;
639 gst_element_class_set_metadata(klass: element_class,
640 longname: "Qt built-in video renderer sink",
641 classification: "Sink/Video",
642 description: "Qt default built-in video renderer sink",
643 author: "The Qt Company");
644
645 GObjectClass *object_class = reinterpret_cast<GObjectClass *>(g_class);
646 object_class->finalize = QGstVideoRendererSink::finalize;
647}
648
649void QGstVideoRendererSink::base_init(gpointer g_class)
650{
651 static GstStaticPadTemplate sink_pad_template = GST_STATIC_PAD_TEMPLATE(
652 "sink", GST_PAD_SINK, GST_PAD_ALWAYS, GST_STATIC_CAPS(
653 "video/x-raw, "
654 "framerate = (fraction) [ 0, MAX ], "
655 "width = (int) [ 1, MAX ], "
656 "height = (int) [ 1, MAX ]"));
657
658 gst_element_class_add_pad_template(
659 GST_ELEMENT_CLASS(g_class), templ: gst_static_pad_template_get(pad_template: &sink_pad_template));
660}
661
662struct NullSurface : QAbstractVideoSurface
663{
664 NullSurface(QObject *parent = nullptr) : QAbstractVideoSurface(parent) { }
665
666 QList<QVideoFrame::PixelFormat> supportedPixelFormats(QAbstractVideoBuffer::HandleType) const override
667 {
668 return QList<QVideoFrame::PixelFormat>() << QVideoFrame::Format_RGB32;
669 }
670
671 bool present(const QVideoFrame &) override
672 {
673 return true;
674 }
675};
676
677void QGstVideoRendererSink::instance_init(GTypeInstance *instance, gpointer g_class)
678{
679 Q_UNUSED(g_class);
680 VO_SINK(instance);
681
682 if (!current_surface) {
683 qWarning() << "Using qtvideosink element without video surface";
684 static NullSurface nullSurface;
685 current_surface = &nullSurface;
686 }
687
688 sink->delegate = new QVideoSurfaceGstDelegate(current_surface);
689 sink->delegate->moveToThread(thread: current_surface->thread());
690 current_surface = nullptr;
691}
692
693void QGstVideoRendererSink::finalize(GObject *object)
694{
695 VO_SINK(object);
696
697 delete sink->delegate;
698
699 // Chain up
700 G_OBJECT_CLASS(sink_parent_class)->finalize(object);
701}
702
703void QGstVideoRendererSink::handleShowPrerollChange(GObject *o, GParamSpec *p, gpointer d)
704{
705 Q_UNUSED(o);
706 Q_UNUSED(p);
707 QGstVideoRendererSink *sink = reinterpret_cast<QGstVideoRendererSink *>(d);
708
709 gboolean showPrerollFrame = true; // "show-preroll-frame" property is true by default
710 g_object_get(G_OBJECT(sink), first_property_name: "show-preroll-frame", &showPrerollFrame, nullptr);
711
712 if (!showPrerollFrame) {
713 GstState state = GST_STATE_VOID_PENDING;
714 GstClockTime timeout = 10000000; // 10 ms
715 gst_element_get_state(GST_ELEMENT(sink), state: &state, pending: nullptr, timeout);
716 // show-preroll-frame being set to 'false' while in GST_STATE_PAUSED means
717 // the QMediaPlayer was stopped from the paused state.
718 // We need to flush the current frame.
719 if (state == GST_STATE_PAUSED)
720 sink->delegate->flush();
721 }
722}
723
724GstStateChangeReturn QGstVideoRendererSink::change_state(
725 GstElement *element, GstStateChange transition)
726{
727 QGstVideoRendererSink *sink = reinterpret_cast<QGstVideoRendererSink *>(element);
728
729 gboolean showPrerollFrame = true; // "show-preroll-frame" property is true by default
730 g_object_get(G_OBJECT(element), first_property_name: "show-preroll-frame", &showPrerollFrame, nullptr);
731
732 // If show-preroll-frame is 'false' when transitioning from GST_STATE_PLAYING to
733 // GST_STATE_PAUSED, it means the QMediaPlayer was stopped.
734 // We need to flush the current frame.
735 if (transition == GST_STATE_CHANGE_PLAYING_TO_PAUSED && !showPrerollFrame)
736 sink->delegate->flush();
737
738 return GST_ELEMENT_CLASS(sink_parent_class)->change_state(element, transition);
739}
740
741GstCaps *QGstVideoRendererSink::get_caps(GstBaseSink *base, GstCaps *filter)
742{
743 VO_SINK(base);
744
745 GstCaps *caps = sink->delegate->caps();
746 GstCaps *unfiltered = caps;
747 if (filter) {
748 caps = gst_caps_intersect(caps1: unfiltered, caps2: filter);
749 gst_caps_unref(caps: unfiltered);
750 }
751
752 return caps;
753}
754
755gboolean QGstVideoRendererSink::set_caps(GstBaseSink *base, GstCaps *caps)
756{
757 VO_SINK(base);
758
759#ifdef DEBUG_VIDEO_SURFACE_SINK
760 qDebug() << "set_caps:";
761 qDebug() << caps;
762#endif
763
764 if (!caps) {
765 sink->delegate->stop();
766
767 return TRUE;
768 } else if (sink->delegate->start(caps)) {
769 return TRUE;
770 } else {
771 return FALSE;
772 }
773}
774
775gboolean QGstVideoRendererSink::propose_allocation(GstBaseSink *base, GstQuery *query)
776{
777 VO_SINK(base);
778 return sink->delegate->proposeAllocation(query);
779}
780
781gboolean QGstVideoRendererSink::stop(GstBaseSink *base)
782{
783 VO_SINK(base);
784 sink->delegate->stop();
785 return TRUE;
786}
787
788gboolean QGstVideoRendererSink::unlock(GstBaseSink *base)
789{
790 VO_SINK(base);
791 sink->delegate->unlock();
792 return TRUE;
793}
794
795GstFlowReturn QGstVideoRendererSink::show_frame(GstVideoSink *base, GstBuffer *buffer)
796{
797 VO_SINK(base);
798 return sink->delegate->render(buffer);
799}
800
801gboolean QGstVideoRendererSink::query(GstBaseSink *base, GstQuery *query)
802{
803 VO_SINK(base);
804 if (sink->delegate->query(query))
805 return TRUE;
806
807 return GST_BASE_SINK_CLASS(sink_parent_class)->query(base, query);
808}
809
810QT_END_NAMESPACE
811

source code of qtmultimedia/src/gsttools/qgstvideorenderersink.cpp