1/****************************************************************************
2**
3** Copyright (C) 2016 The Qt Company Ltd.
4** Contact: https://www.qt.io/licensing/
5**
6** This file is part of the Qt Toolkit.
7**
8** $QT_BEGIN_LICENSE:LGPL$
9** Commercial License Usage
10** Licensees holding valid commercial Qt licenses may use this file in
11** accordance with the commercial license agreement provided with the
12** Software or, alternatively, in accordance with the terms contained in
13** a written agreement between you and The Qt Company. For licensing terms
14** and conditions see https://www.qt.io/terms-conditions. For further
15** information use the contact form at https://www.qt.io/contact-us.
16**
17** GNU Lesser General Public License Usage
18** Alternatively, this file may be used under the terms of the GNU Lesser
19** General Public License version 3 as published by the Free Software
20** Foundation and appearing in the file LICENSE.LGPL3 included in the
21** packaging of this file. Please review the following information to
22** ensure the GNU Lesser General Public License version 3 requirements
23** will be met: https://www.gnu.org/licenses/lgpl-3.0.html.
24**
25** GNU General Public License Usage
26** Alternatively, this file may be used under the terms of the GNU
27** General Public License version 2.0 or (at your option) the GNU General
28** Public license version 3 or any later version approved by the KDE Free
29** Qt Foundation. The licenses are as published by the Free Software
30** Foundation and appearing in the file LICENSE.GPL2 and LICENSE.GPL3
31** included in the packaging of this file. Please review the following
32** information to ensure the GNU General Public License requirements will
33** be met: https://www.gnu.org/licenses/gpl-2.0.html and
34** https://www.gnu.org/licenses/gpl-3.0.html.
35**
36** $QT_END_LICENSE$
37**
38****************************************************************************/
39
40#include "qgstreamercapturesession.h"
41#include "qgstreamerrecordercontrol.h"
42#include "qgstreamermediacontainercontrol.h"
43#include "qgstreameraudioencode.h"
44#include "qgstreamervideoencode.h"
45#include "qgstreamerimageencode.h"
46#include <qmediarecorder.h>
47#include <private/qgstreamervideorendererinterface_p.h>
48#include <private/qgstreameraudioprobecontrol_p.h>
49#include <private/qgstreamerbushelper_p.h>
50#include <private/qgstutils_p.h>
51
52#include <gst/gsttagsetter.h>
53#include <gst/gstversion.h>
54#include <gst/video/video.h>
55
56#include <QtCore/qdebug.h>
57#include <QtCore/qurl.h>
58#include <QtCore/qset.h>
59#include <QCoreApplication>
60#include <QtCore/qmetaobject.h>
61#include <QtCore/qfile.h>
62#include <QtGui/qimage.h>
63
64QT_BEGIN_NAMESPACE
65
66QGstreamerCaptureSession::QGstreamerCaptureSession(QGstreamerCaptureSession::CaptureMode captureMode, QObject *parent)
67 :QObject(parent),
68 m_state(StoppedState),
69 m_pendingState(StoppedState),
70 m_waitingForEos(false),
71 m_pipelineMode(EmptyPipeline),
72 m_captureMode(captureMode),
73 m_audioProbe(0),
74 m_audioInputFactory(0),
75 m_audioPreviewFactory(0),
76 m_videoInputFactory(0),
77 m_viewfinder(0),
78 m_viewfinderInterface(0),
79 m_audioSrc(0),
80 m_audioTee(0),
81 m_audioPreviewQueue(0),
82 m_audioPreview(0),
83 m_audioVolume(0),
84 m_muted(false),
85 m_volume(1.0),
86 m_videoSrc(0),
87 m_videoTee(0),
88 m_videoPreviewQueue(0),
89 m_videoPreview(0),
90 m_imageCaptureBin(0),
91 m_encodeBin(0),
92 m_passImage(false),
93 m_passPrerollImage(false)
94{
95 m_pipeline = gst_pipeline_new(name: "media-capture-pipeline");
96 qt_gst_object_ref_sink(object: m_pipeline);
97
98 m_bus = gst_element_get_bus(element: m_pipeline);
99 m_busHelper = new QGstreamerBusHelper(m_bus, this);
100 m_busHelper->installMessageFilter(filter: this);
101
102 m_audioEncodeControl = new QGstreamerAudioEncode(this);
103 m_videoEncodeControl = new QGstreamerVideoEncode(this);
104 m_imageEncodeControl = new QGstreamerImageEncode(this);
105 m_recorderControl = new QGstreamerRecorderControl(this);
106 connect(sender: m_recorderControl, signal: &QGstreamerRecorderControl::error, slot: [](int e, const QString &str) {
107 qWarning() << QMediaRecorder::Error(e) << ":" << str.toLatin1().constData();
108 });
109 m_mediaContainerControl = new QGstreamerMediaContainerControl(this);
110}
111
112QGstreamerCaptureSession::~QGstreamerCaptureSession()
113{
114 setState(StoppedState);
115 gst_element_set_state(element: m_pipeline, state: GST_STATE_NULL);
116 gst_object_unref(GST_OBJECT(m_bus));
117 gst_object_unref(GST_OBJECT(m_pipeline));
118}
119
120void QGstreamerCaptureSession::setCaptureMode(CaptureMode mode)
121{
122 m_captureMode = mode;
123}
124
125GstElement *QGstreamerCaptureSession::buildEncodeBin()
126{
127 GstElement *encodeBin = gst_bin_new(name: "encode-bin");
128
129 GstElement *muxer = gst_element_factory_make( factoryname: m_mediaContainerControl->formatElementName().constData(), name: "muxer");
130 if (!muxer) {
131 qWarning() << "Could not create a media muxer element:" << m_mediaContainerControl->formatElementName();
132 gst_object_unref(object: encodeBin);
133 return 0;
134 }
135
136 // Output location was rejected in setOutputlocation() if not a local file
137 QUrl actualSink = QUrl::fromLocalFile(localfile: QDir::currentPath()).resolved(relative: m_sink);
138 GstElement *fileSink = gst_element_factory_make(factoryname: "filesink", name: "filesink");
139 g_object_set(G_OBJECT(fileSink), first_property_name: "location", QFile::encodeName(fileName: actualSink.toLocalFile()).constData(), NULL);
140 gst_bin_add_many(GST_BIN(encodeBin), element_1: muxer, fileSink, NULL);
141
142 if (!gst_element_link(src: muxer, dest: fileSink)) {
143 gst_object_unref(object: encodeBin);
144 return 0;
145 }
146
147 if (m_captureMode & Audio) {
148 GstElement *audioConvert = gst_element_factory_make(factoryname: "audioconvert", name: "audioconvert");
149 GstElement *audioQueue = gst_element_factory_make(factoryname: "queue", name: "audio-encode-queue");
150 m_audioVolume = gst_element_factory_make(factoryname: "volume", name: "volume");
151 gst_bin_add_many(GST_BIN(encodeBin), element_1: audioConvert, audioQueue, m_audioVolume, NULL);
152
153 GstElement *audioEncoder = m_audioEncodeControl->createEncoder();
154 if (!audioEncoder) {
155 gst_object_unref(object: encodeBin);
156 qWarning() << "Could not create an audio encoder element:" << m_audioEncodeControl->audioSettings().codec();
157 return 0;
158 }
159
160 gst_bin_add(GST_BIN(encodeBin), element: audioEncoder);
161
162 if (!gst_element_link_many(element_1: audioConvert, element_2: audioQueue, m_audioVolume, audioEncoder, muxer, NULL)) {
163 m_audioVolume = 0;
164 gst_object_unref(object: encodeBin);
165 return 0;
166 }
167
168 g_object_set(G_OBJECT(m_audioVolume), first_property_name: "mute", m_muted, NULL);
169 g_object_set(G_OBJECT(m_audioVolume), first_property_name: "volume", m_volume, NULL);
170
171 // add ghostpads
172 GstPad *pad = gst_element_get_static_pad(element: audioConvert, name: "sink");
173 gst_element_add_pad(GST_ELEMENT(encodeBin), pad: gst_ghost_pad_new(name: "audiosink", target: pad));
174 gst_object_unref(GST_OBJECT(pad));
175 }
176
177 if (m_captureMode & Video) {
178 GstElement *videoQueue = gst_element_factory_make(factoryname: "queue", name: "video-encode-queue");
179 GstElement *colorspace = gst_element_factory_make(QT_GSTREAMER_COLORCONVERSION_ELEMENT_NAME, name: "videoconvert-encoder");
180 GstElement *videoscale = gst_element_factory_make(factoryname: "videoscale",name: "videoscale-encoder");
181 gst_bin_add_many(GST_BIN(encodeBin), element_1: videoQueue, colorspace, videoscale, NULL);
182
183 GstElement *videoEncoder = m_videoEncodeControl->createEncoder();
184 if (!videoEncoder) {
185 gst_object_unref(object: encodeBin);
186 qWarning() << "Could not create a video encoder element:" << m_videoEncodeControl->videoSettings().codec();
187 return 0;
188 }
189
190 gst_bin_add(GST_BIN(encodeBin), element: videoEncoder);
191
192 if (!gst_element_link_many(element_1: videoQueue, element_2: colorspace, videoscale, videoEncoder, muxer, NULL)) {
193 gst_object_unref(object: encodeBin);
194 return 0;
195 }
196
197 // add ghostpads
198 GstPad *pad = gst_element_get_static_pad(element: videoQueue, name: "sink");
199 gst_element_add_pad(GST_ELEMENT(encodeBin), pad: gst_ghost_pad_new(name: "videosink", target: pad));
200 gst_object_unref(GST_OBJECT(pad));
201 }
202
203 return encodeBin;
204}
205
206GstElement *QGstreamerCaptureSession::buildAudioSrc()
207{
208 GstElement *audioSrc = 0;
209 if (m_audioInputFactory)
210 audioSrc = m_audioInputFactory->buildElement();
211 else {
212 QString elementName = "alsasrc";
213 QString device;
214
215 if (m_captureDevice.startsWith(s: "alsa:")) {
216 device = m_captureDevice.mid(position: QString("alsa:").length());
217 } else if (m_captureDevice.startsWith(s: "oss:")) {
218 elementName = "osssrc";
219 device = m_captureDevice.mid(position: QString("oss:").length());
220 } else if (m_captureDevice.startsWith(s: "pulseaudio:")) {
221 elementName = "pulsesrc";
222 } else {
223 elementName = "autoaudiosrc";
224 }
225
226 audioSrc = gst_element_factory_make(factoryname: elementName.toLatin1().constData(), name: "audio_src");
227 if (audioSrc && !device.isEmpty())
228 g_object_set(G_OBJECT(audioSrc), first_property_name: "device", device.toLocal8Bit().constData(), NULL);
229 }
230
231 if (!audioSrc) {
232 emit error(error: int(QMediaRecorder::ResourceError), errorString: tr(s: "Could not create an audio source element"));
233 audioSrc = gst_element_factory_make(factoryname: "fakesrc", NULL);
234 }
235
236 return audioSrc;
237}
238
239GstElement *QGstreamerCaptureSession::buildAudioPreview()
240{
241 GstElement *previewElement = 0;
242
243 if (m_audioPreviewFactory) {
244 previewElement = m_audioPreviewFactory->buildElement();
245 } else {
246
247
248#if 1
249 previewElement = gst_element_factory_make(factoryname: "fakesink", name: "audio-preview");
250#else
251 GstElement *bin = gst_bin_new("audio-preview-bin");
252 GstElement *visual = gst_element_factory_make("libvisual_lv_scope", "audio-preview");
253 GstElement *sink = gst_element_factory_make("ximagesink", NULL);
254 gst_bin_add_many(GST_BIN(bin), visual, sink, NULL);
255 gst_element_link_many(visual,sink, NULL);
256
257
258 // add ghostpads
259 GstPad *pad = gst_element_get_static_pad(visual, "sink");
260 Q_ASSERT(pad);
261 gst_element_add_pad(GST_ELEMENT(bin), gst_ghost_pad_new("audiosink", pad));
262 gst_object_unref(GST_OBJECT(pad));
263
264 previewElement = bin;
265#endif
266 }
267
268 return previewElement;
269}
270
271GstElement *QGstreamerCaptureSession::buildVideoSrc()
272{
273 GstElement *videoSrc = 0;
274 if (m_videoInputFactory) {
275 videoSrc = m_videoInputFactory->buildElement();
276 } else {
277 videoSrc = gst_element_factory_make(factoryname: "videotestsrc", name: "video_test_src");
278 //videoSrc = gst_element_factory_make("v4l2src", "video_test_src");
279 }
280
281 return videoSrc;
282}
283
284GstElement *QGstreamerCaptureSession::buildVideoPreview()
285{
286 GstElement *previewElement = 0;
287
288 if (m_viewfinderInterface) {
289 GstElement *bin = gst_bin_new(name: "video-preview-bin");
290 GstElement *colorspace = gst_element_factory_make(QT_GSTREAMER_COLORCONVERSION_ELEMENT_NAME, name: "videoconvert-preview");
291 GstElement *capsFilter = gst_element_factory_make(factoryname: "capsfilter", name: "capsfilter-video-preview");
292 GstElement *preview = m_viewfinderInterface->videoSink();
293
294 gst_bin_add_many(GST_BIN(bin), element_1: colorspace, capsFilter, preview, NULL);
295 gst_element_link(src: colorspace,dest: capsFilter);
296 gst_element_link(src: capsFilter,dest: preview);
297
298 QSize resolution;
299 qreal frameRate = 0;
300
301 if (m_captureMode & Video) {
302 QVideoEncoderSettings videoSettings = m_videoEncodeControl->videoSettings();
303 resolution = videoSettings.resolution();
304 frameRate = videoSettings.frameRate();
305 } else if (m_captureMode & Image) {
306 resolution = m_imageEncodeControl->imageSettings().resolution();
307 }
308
309 GstCaps *caps = QGstUtils::videoFilterCaps();
310
311 if (!resolution.isEmpty()) {
312 gst_caps_set_simple(caps, field: "width", G_TYPE_INT, resolution.width(), NULL);
313 gst_caps_set_simple(caps, field: "height", G_TYPE_INT, resolution.height(), NULL);
314 }
315 if (frameRate > 0.001) {
316 QPair<int,int> rate = m_videoEncodeControl->rateAsRational();
317
318 //qDebug() << "frame rate:" << num << denum;
319
320 gst_caps_set_simple(caps, field: "framerate", GST_TYPE_FRACTION, rate.first, rate.second, NULL);
321 }
322
323 //qDebug() << "set video preview caps filter:" << gst_caps_to_string(caps);
324
325 g_object_set(G_OBJECT(capsFilter), first_property_name: "caps", caps, NULL);
326
327 gst_caps_unref(caps);
328
329 // add ghostpads
330 GstPad *pad = gst_element_get_static_pad(element: colorspace, name: "sink");
331 Q_ASSERT(pad);
332 gst_element_add_pad(GST_ELEMENT(bin), pad: gst_ghost_pad_new(name: "videosink", target: pad));
333 gst_object_unref(GST_OBJECT(pad));
334
335 previewElement = bin;
336 } else {
337#if 1
338 previewElement = gst_element_factory_make(factoryname: "fakesink", name: "video-preview");
339#else
340 GstElement *bin = gst_bin_new("video-preview-bin");
341 GstElement *colorspace = gst_element_factory_make(QT_GSTREAMER_COLORCONVERSION_ELEMENT_NAME, "videoconvert-preview");
342 GstElement *preview = gst_element_factory_make("ximagesink", "video-preview");
343 gst_bin_add_many(GST_BIN(bin), colorspace, preview, NULL);
344 gst_element_link(colorspace,preview);
345
346 // add ghostpads
347 GstPad *pad = gst_element_get_static_pad(colorspace, "sink");
348 Q_ASSERT(pad);
349 gst_element_add_pad(GST_ELEMENT(bin), gst_ghost_pad_new("videosink", pad));
350 gst_object_unref(GST_OBJECT(pad));
351
352 previewElement = bin;
353#endif
354 }
355
356 return previewElement;
357}
358
359void QGstreamerCaptureSession::probeCaps(GstCaps *caps)
360{
361#if GST_CHECK_VERSION(1,0,0)
362 gst_video_info_from_caps(info: &m_previewInfo, caps);
363#else
364 Q_UNUSED(caps);
365#endif
366}
367
368bool QGstreamerCaptureSession::probeBuffer(GstBuffer *buffer)
369{
370 if (m_passPrerollImage) {
371 m_passImage = false;
372 m_passPrerollImage = false;
373
374 return true;
375 } else if (!m_passImage) {
376 return false;
377 }
378
379 m_passImage = false;
380
381#if GST_CHECK_VERSION(1,0,0)
382 QImage img = QGstUtils::bufferToImage(buffer, info: m_previewInfo);
383#else
384 QImage img = QGstUtils::bufferToImage(buffer);
385#endif
386
387 if (img.isNull())
388 return true;
389
390 static QMetaMethod exposedSignal = QMetaMethod::fromSignal(signal: &QGstreamerCaptureSession::imageExposed);
391 exposedSignal.invoke(object: this,
392 connectionType: Qt::QueuedConnection,
393 Q_ARG(int,m_imageRequestId));
394
395 static QMetaMethod capturedSignal = QMetaMethod::fromSignal(signal: &QGstreamerCaptureSession::imageCaptured);
396 capturedSignal.invoke(object: this,
397 connectionType: Qt::QueuedConnection,
398 Q_ARG(int,m_imageRequestId),
399 Q_ARG(QImage,img));
400
401 return true;
402}
403
404static gboolean saveImageFilter(GstElement *element,
405 GstBuffer *buffer,
406 GstPad *pad,
407 void *appdata)
408{
409 Q_UNUSED(element);
410 Q_UNUSED(pad);
411 QGstreamerCaptureSession *session = (QGstreamerCaptureSession *)appdata;
412
413 QString fileName = session->m_imageFileName;
414
415 if (!fileName.isEmpty()) {
416 QFile f(fileName);
417 if (f.open(flags: QFile::WriteOnly)) {
418#if GST_CHECK_VERSION(1,0,0)
419 GstMapInfo info;
420 if (gst_buffer_map(buffer, info: &info, flags: GST_MAP_READ)) {
421 f.write(data: reinterpret_cast<const char *>(info.data), len: info.size);
422 gst_buffer_unmap(buffer, info: &info);
423 }
424#else
425 f.write(reinterpret_cast<const char *>(buffer->data), buffer->size);
426#endif
427 f.close();
428
429 static QMetaMethod savedSignal = QMetaMethod::fromSignal(signal: &QGstreamerCaptureSession::imageSaved);
430 savedSignal.invoke(object: session,
431 connectionType: Qt::QueuedConnection,
432 Q_ARG(int,session->m_imageRequestId),
433 Q_ARG(QString,fileName));
434 }
435 }
436
437 return TRUE;
438}
439
440GstElement *QGstreamerCaptureSession::buildImageCapture()
441{
442 GstElement *bin = gst_bin_new(name: "image-capture-bin");
443 GstElement *queue = gst_element_factory_make(factoryname: "queue", name: "queue-image-capture");
444 GstElement *colorspace = gst_element_factory_make(QT_GSTREAMER_COLORCONVERSION_ELEMENT_NAME, name: "videoconvert-image-capture");
445 GstElement *encoder = gst_element_factory_make(factoryname: "jpegenc", name: "image-encoder");
446 GstElement *sink = gst_element_factory_make(factoryname: "fakesink",name: "sink-image-capture");
447
448 GstPad *pad = gst_element_get_static_pad(element: queue, name: "src");
449 Q_ASSERT(pad);
450
451 addProbeToPad(pad, downstream: false);
452
453 gst_object_unref(GST_OBJECT(pad));
454
455 g_object_set(G_OBJECT(sink), first_property_name: "signal-handoffs", TRUE, NULL);
456 g_signal_connect(G_OBJECT(sink), "handoff", G_CALLBACK(saveImageFilter), this);
457
458 gst_bin_add_many(GST_BIN(bin), element_1: queue, colorspace, encoder, sink, NULL);
459 gst_element_link_many(element_1: queue, element_2: colorspace, encoder, sink, NULL);
460
461 // add ghostpads
462 pad = gst_element_get_static_pad(element: queue, name: "sink");
463 Q_ASSERT(pad);
464 gst_element_add_pad(GST_ELEMENT(bin), pad: gst_ghost_pad_new(name: "imagesink", target: pad));
465 gst_object_unref(GST_OBJECT(pad));
466
467 m_passImage = false;
468 m_passPrerollImage = true;
469 m_imageFileName = QString();
470
471 return bin;
472}
473
474void QGstreamerCaptureSession::captureImage(int requestId, const QString &fileName)
475{
476 m_imageRequestId = requestId;
477 m_imageFileName = fileName;
478 m_passImage = true;
479}
480
481
482#define REMOVE_ELEMENT(element) { if (element) {gst_bin_remove(GST_BIN(m_pipeline), element); element = 0;} }
483#define UNREF_ELEMENT(element) { if (element) { gst_object_unref(GST_OBJECT(element)); element = 0; } }
484
485bool QGstreamerCaptureSession::rebuildGraph(QGstreamerCaptureSession::PipelineMode newMode)
486{
487 removeAudioBufferProbe();
488 REMOVE_ELEMENT(m_audioSrc);
489 REMOVE_ELEMENT(m_audioPreview);
490 REMOVE_ELEMENT(m_audioPreviewQueue);
491 REMOVE_ELEMENT(m_audioTee);
492 REMOVE_ELEMENT(m_videoSrc);
493 REMOVE_ELEMENT(m_videoPreview);
494 REMOVE_ELEMENT(m_videoPreviewQueue);
495 REMOVE_ELEMENT(m_videoTee);
496 REMOVE_ELEMENT(m_encodeBin);
497 REMOVE_ELEMENT(m_imageCaptureBin);
498 m_audioVolume = 0;
499
500 bool ok = true;
501
502 switch (newMode) {
503 case EmptyPipeline:
504 break;
505 case PreviewPipeline:
506 if (m_captureMode & Audio) {
507 m_audioSrc = buildAudioSrc();
508 m_audioPreview = buildAudioPreview();
509
510 ok &= m_audioSrc && m_audioPreview;
511
512 if (ok) {
513 gst_bin_add_many(GST_BIN(m_pipeline), element_1: m_audioSrc, m_audioPreview, NULL);
514 ok &= gst_element_link(src: m_audioSrc, dest: m_audioPreview);
515 } else {
516 UNREF_ELEMENT(m_audioSrc);
517 UNREF_ELEMENT(m_audioPreview);
518 }
519 }
520 if (m_captureMode & Video || m_captureMode & Image) {
521 m_videoSrc = buildVideoSrc();
522 m_videoTee = gst_element_factory_make(factoryname: "tee", name: "video-preview-tee");
523 m_videoPreviewQueue = gst_element_factory_make(factoryname: "queue", name: "video-preview-queue");
524 m_videoPreview = buildVideoPreview();
525 m_imageCaptureBin = buildImageCapture();
526
527 ok &= m_videoSrc && m_videoTee && m_videoPreviewQueue && m_videoPreview && m_imageCaptureBin;
528
529 if (ok) {
530 gst_bin_add_many(GST_BIN(m_pipeline), element_1: m_videoSrc, m_videoTee,
531 m_videoPreviewQueue, m_videoPreview,
532 m_imageCaptureBin, NULL);
533
534 ok &= gst_element_link(src: m_videoSrc, dest: m_videoTee);
535 ok &= gst_element_link(src: m_videoTee, dest: m_videoPreviewQueue);
536 ok &= gst_element_link(src: m_videoPreviewQueue, dest: m_videoPreview);
537 ok &= gst_element_link(src: m_videoTee, dest: m_imageCaptureBin);
538 } else {
539 UNREF_ELEMENT(m_videoSrc);
540 UNREF_ELEMENT(m_videoTee);
541 UNREF_ELEMENT(m_videoPreviewQueue);
542 UNREF_ELEMENT(m_videoPreview);
543 UNREF_ELEMENT(m_imageCaptureBin);
544 }
545 }
546 break;
547 case RecordingPipeline:
548 m_encodeBin = buildEncodeBin();
549 gst_bin_add(GST_BIN(m_pipeline), element: m_encodeBin);
550
551 if (m_captureMode & Audio) {
552 m_audioSrc = buildAudioSrc();
553 ok &= m_audioSrc != 0;
554
555 gst_bin_add(GST_BIN(m_pipeline), element: m_audioSrc);
556 ok &= gst_element_link(src: m_audioSrc, dest: m_encodeBin);
557 }
558
559 if (m_captureMode & Video) {
560 m_videoSrc = buildVideoSrc();
561 ok &= m_videoSrc != 0;
562
563 gst_bin_add(GST_BIN(m_pipeline), element: m_videoSrc);
564 ok &= gst_element_link(src: m_videoSrc, dest: m_encodeBin);
565 }
566
567 if (!m_metaData.isEmpty())
568 setMetaData(m_metaData);
569
570 break;
571 case PreviewAndRecordingPipeline:
572 m_encodeBin = buildEncodeBin();
573 if (m_encodeBin)
574 gst_bin_add(GST_BIN(m_pipeline), element: m_encodeBin);
575
576 ok &= m_encodeBin != 0;
577
578 if (ok && m_captureMode & Audio) {
579 m_audioSrc = buildAudioSrc();
580 m_audioPreview = buildAudioPreview();
581 m_audioTee = gst_element_factory_make(factoryname: "tee", NULL);
582 m_audioPreviewQueue = gst_element_factory_make(factoryname: "queue", NULL);
583
584 ok &= m_audioSrc && m_audioPreview && m_audioTee && m_audioPreviewQueue;
585
586 if (ok) {
587 gst_bin_add_many(GST_BIN(m_pipeline), element_1: m_audioSrc, m_audioTee,
588 m_audioPreviewQueue, m_audioPreview, NULL);
589 ok &= gst_element_link(src: m_audioSrc, dest: m_audioTee);
590 ok &= gst_element_link(src: m_audioTee, dest: m_audioPreviewQueue);
591 ok &= gst_element_link(src: m_audioPreviewQueue, dest: m_audioPreview);
592 ok &= gst_element_link(src: m_audioTee, dest: m_encodeBin);
593 } else {
594 UNREF_ELEMENT(m_audioSrc);
595 UNREF_ELEMENT(m_audioPreview);
596 UNREF_ELEMENT(m_audioTee);
597 UNREF_ELEMENT(m_audioPreviewQueue);
598 }
599 }
600
601 if (ok && (m_captureMode & Video || m_captureMode & Image)) {
602 m_videoSrc = buildVideoSrc();
603 m_videoPreview = buildVideoPreview();
604 m_videoTee = gst_element_factory_make(factoryname: "tee", NULL);
605 m_videoPreviewQueue = gst_element_factory_make(factoryname: "queue", NULL);
606
607 ok &= m_videoSrc && m_videoPreview && m_videoTee && m_videoPreviewQueue;
608
609 if (ok) {
610 gst_bin_add_many(GST_BIN(m_pipeline), element_1: m_videoSrc, m_videoTee,
611 m_videoPreviewQueue, m_videoPreview, NULL);
612 ok &= gst_element_link(src: m_videoSrc, dest: m_videoTee);
613 ok &= gst_element_link(src: m_videoTee, dest: m_videoPreviewQueue);
614 ok &= gst_element_link(src: m_videoPreviewQueue, dest: m_videoPreview);
615 } else {
616 UNREF_ELEMENT(m_videoSrc);
617 UNREF_ELEMENT(m_videoTee);
618 UNREF_ELEMENT(m_videoPreviewQueue);
619 UNREF_ELEMENT(m_videoPreview);
620 }
621
622 if (ok && (m_captureMode & Video))
623 ok &= gst_element_link(src: m_videoTee, dest: m_encodeBin);
624 }
625
626 if (!m_metaData.isEmpty())
627 setMetaData(m_metaData);
628
629
630 break;
631 }
632
633 if (!ok) {
634 emit error(error: int(QMediaRecorder::FormatError),errorString: tr(s: "Failed to build media capture pipeline."));
635 }
636
637 dumpGraph( fileName: QString("rebuild_graph_%1_%2").arg(a: m_pipelineMode).arg(a: newMode) );
638#ifdef QT_GST_CAPTURE_DEBUG
639 if (m_encodeBin) {
640 QString fileName = QString("rebuild_graph_encode_%1_%2").arg(m_pipelineMode).arg(newMode);
641 GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(m_encodeBin), GST_DEBUG_GRAPH_SHOW_ALL, fileName.toLatin1());
642 }
643#endif
644
645 if (ok) {
646 addAudioBufferProbe();
647 m_pipelineMode = newMode;
648 } else {
649 m_pipelineMode = EmptyPipeline;
650
651 REMOVE_ELEMENT(m_audioSrc);
652 REMOVE_ELEMENT(m_audioPreview);
653 REMOVE_ELEMENT(m_audioPreviewQueue);
654 REMOVE_ELEMENT(m_audioTee);
655 REMOVE_ELEMENT(m_videoSrc);
656 REMOVE_ELEMENT(m_videoPreview);
657 REMOVE_ELEMENT(m_videoPreviewQueue);
658 REMOVE_ELEMENT(m_videoTee);
659 REMOVE_ELEMENT(m_encodeBin);
660 }
661
662 return ok;
663}
664
665void QGstreamerCaptureSession::dumpGraph(const QString &fileName)
666{
667#ifdef QT_GST_CAPTURE_DEBUG
668 GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(m_pipeline),
669 GstDebugGraphDetails(/*GST_DEBUG_GRAPH_SHOW_ALL |*/ GST_DEBUG_GRAPH_SHOW_MEDIA_TYPE | GST_DEBUG_GRAPH_SHOW_NON_DEFAULT_PARAMS | GST_DEBUG_GRAPH_SHOW_STATES),
670 fileName.toLatin1());
671#else
672 Q_UNUSED(fileName)
673#endif
674}
675
676QUrl QGstreamerCaptureSession::outputLocation() const
677{
678 return m_sink;
679}
680
681bool QGstreamerCaptureSession::setOutputLocation(const QUrl& sink)
682{
683 if (!sink.isRelative() && !sink.isLocalFile()) {
684 qWarning(msg: "Output location must be a local file");
685 return false;
686 }
687
688 m_sink = sink;
689 return true;
690}
691
692void QGstreamerCaptureSession::setAudioInput(QGstreamerElementFactory *audioInput)
693{
694 m_audioInputFactory = audioInput;
695}
696
697void QGstreamerCaptureSession::setAudioPreview(QGstreamerElementFactory *audioPreview)
698{
699 m_audioPreviewFactory = audioPreview;
700}
701
702void QGstreamerCaptureSession::setVideoInput(QGstreamerVideoInput *videoInput)
703{
704 m_videoInputFactory = videoInput;
705}
706
707void QGstreamerCaptureSession::setVideoPreview(QObject *viewfinder)
708{
709 m_viewfinderInterface = qobject_cast<QGstreamerVideoRendererInterface*>(object: viewfinder);
710 if (!m_viewfinderInterface)
711 viewfinder = 0;
712
713 if (m_viewfinder != viewfinder) {
714 bool oldReady = isReady();
715
716 if (m_viewfinder) {
717 disconnect(sender: m_viewfinder, SIGNAL(sinkChanged()),
718 receiver: this, SIGNAL(viewfinderChanged()));
719 disconnect(sender: m_viewfinder, SIGNAL(readyChanged(bool)),
720 receiver: this, SIGNAL(readyChanged(bool)));
721
722 m_busHelper->removeMessageFilter(filter: m_viewfinder);
723 }
724
725 m_viewfinder = viewfinder;
726 //m_viewfinderHasChanged = true;
727
728 if (m_viewfinder) {
729 connect(sender: m_viewfinder, SIGNAL(sinkChanged()),
730 receiver: this, SIGNAL(viewfinderChanged()));
731 connect(sender: m_viewfinder, SIGNAL(readyChanged(bool)),
732 receiver: this, SIGNAL(readyChanged(bool)));
733
734 m_busHelper->installMessageFilter(filter: m_viewfinder);
735 }
736
737 emit viewfinderChanged();
738 if (oldReady != isReady())
739 emit readyChanged(isReady());
740 }
741}
742
743bool QGstreamerCaptureSession::isReady() const
744{
745 //it's possible to use QCamera without any viewfinder attached
746 return !m_viewfinderInterface || m_viewfinderInterface->isReady();
747}
748
749QGstreamerCaptureSession::State QGstreamerCaptureSession::state() const
750{
751 return m_state;
752}
753
754QGstreamerCaptureSession::State QGstreamerCaptureSession::pendingState() const
755{
756 return m_pendingState;
757}
758
759void QGstreamerCaptureSession::setState(QGstreamerCaptureSession::State newState)
760{
761 if (newState == m_pendingState && !m_waitingForEos)
762 return;
763
764 m_pendingState = newState;
765
766 PipelineMode newMode = EmptyPipeline;
767
768 switch (newState) {
769 case PausedState:
770 case RecordingState:
771 newMode = PreviewAndRecordingPipeline;
772 break;
773 case PreviewState:
774 newMode = PreviewPipeline;
775 break;
776 case StoppedState:
777 newMode = EmptyPipeline;
778 break;
779 }
780
781 if (newMode != m_pipelineMode) {
782 if (m_pipelineMode == PreviewAndRecordingPipeline) {
783 if (!m_waitingForEos) {
784 m_waitingForEos = true;
785 //qDebug() << "Waiting for EOS";
786 // Unless gstreamer is in GST_STATE_PLAYING our EOS message will not be received.
787 gst_element_set_state(element: m_pipeline, state: GST_STATE_PLAYING);
788 //with live sources it's necessary to send EOS even to pipeline
789 //before going to STOPPED state
790 gst_element_send_event(element: m_pipeline, event: gst_event_new_eos());
791
792 return;
793 } else {
794 m_waitingForEos = false;
795 //qDebug() << "EOS received";
796 }
797 }
798
799 //select suitable default codecs/containers, if necessary
800 m_recorderControl->applySettings();
801
802 gst_element_set_state(element: m_pipeline, state: GST_STATE_NULL);
803
804 if (!rebuildGraph(newMode)) {
805 m_pendingState = StoppedState;
806 m_state = StoppedState;
807 emit stateChanged(state: StoppedState);
808
809 return;
810 }
811 }
812
813 switch (newState) {
814 case PausedState:
815 gst_element_set_state(element: m_pipeline, state: GST_STATE_PAUSED);
816 break;
817 case RecordingState:
818 case PreviewState:
819 gst_element_set_state(element: m_pipeline, state: GST_STATE_PLAYING);
820 break;
821 case StoppedState:
822 gst_element_set_state(element: m_pipeline, state: GST_STATE_NULL);
823 }
824
825 //we have to do it here, since gstreamer will not emit bus messages any more
826 if (newState == StoppedState) {
827 m_state = StoppedState;
828 emit stateChanged(state: StoppedState);
829 }
830}
831
832
833qint64 QGstreamerCaptureSession::duration() const
834{
835 gint64 duration = 0;
836 if (m_encodeBin && qt_gst_element_query_position(element: m_encodeBin, format: GST_FORMAT_TIME, cur: &duration))
837 return duration / 1000000;
838 else
839 return 0;
840}
841
842void QGstreamerCaptureSession::setCaptureDevice(const QString &deviceName)
843{
844 m_captureDevice = deviceName;
845}
846
847void QGstreamerCaptureSession::setMetaData(const QMap<QByteArray, QVariant> &data)
848{
849 //qDebug() << "QGstreamerCaptureSession::setMetaData" << data;
850 m_metaData = data;
851
852 if (m_encodeBin)
853 QGstUtils::setMetaData(GST_BIN(m_encodeBin), data);
854}
855
856bool QGstreamerCaptureSession::processBusMessage(const QGstreamerMessage &message)
857{
858 GstMessage* gm = message.rawMessage();
859
860 if (gm) {
861 if (GST_MESSAGE_TYPE(gm) == GST_MESSAGE_ERROR) {
862 GError *err;
863 gchar *debug;
864 gst_message_parse_error (message: gm, gerror: &err, debug: &debug);
865 emit error(error: int(QMediaRecorder::ResourceError),errorString: QString::fromUtf8(str: err->message));
866 g_error_free (error: err);
867 g_free (mem: debug);
868 }
869
870 if (GST_MESSAGE_SRC(gm) == GST_OBJECT_CAST(m_pipeline)) {
871 switch (GST_MESSAGE_TYPE(gm)) {
872 case GST_MESSAGE_DURATION:
873 break;
874
875 case GST_MESSAGE_EOS:
876 if (m_waitingForEos)
877 setState(m_pendingState);
878 break;
879
880 case GST_MESSAGE_STATE_CHANGED:
881 {
882
883 GstState oldState;
884 GstState newState;
885 GstState pending;
886
887 gst_message_parse_state_changed(message: gm, oldstate: &oldState, newstate: &newState, pending: &pending);
888
889 QStringList states;
890 states << "GST_STATE_VOID_PENDING" << "GST_STATE_NULL" << "GST_STATE_READY" << "GST_STATE_PAUSED" << "GST_STATE_PLAYING";
891
892 /*
893 qDebug() << QString("state changed: old: %1 new: %2 pending: %3") \
894 .arg(states[oldState]) \
895 .arg(states[newState]) \
896 .arg(states[pending]);
897
898 #define ENUM_NAME(c,e,v) (c::staticMetaObject.enumerator(c::staticMetaObject.indexOfEnumerator(e)).valueToKey((v)))
899
900 qDebug() << "Current session state:" << ENUM_NAME(QGstreamerCaptureSession,"State",m_state);
901 qDebug() << "Pending session state:" << ENUM_NAME(QGstreamerCaptureSession,"State",m_pendingState);
902 */
903
904 switch (newState) {
905 case GST_STATE_VOID_PENDING:
906 case GST_STATE_NULL:
907 case GST_STATE_READY:
908 if (m_state != StoppedState && m_pendingState == StoppedState) {
909 emit stateChanged(state: m_state = StoppedState);
910 dumpGraph(fileName: "stopped");
911 }
912 break;
913 case GST_STATE_PAUSED:
914 if (m_state != PausedState && m_pendingState == PausedState)
915 emit stateChanged(state: m_state = PausedState);
916 dumpGraph(fileName: "paused");
917
918 if (m_pipelineMode == RecordingPipeline && !m_metaData.isEmpty())
919 setMetaData(m_metaData);
920 break;
921 case GST_STATE_PLAYING:
922 {
923 if ((m_pendingState == PreviewState || m_pendingState == RecordingState) &&
924 m_state != m_pendingState)
925 {
926 m_state = m_pendingState;
927 emit stateChanged(state: m_state);
928 }
929
930 if (m_pipelineMode == PreviewPipeline)
931 dumpGraph(fileName: "preview");
932 else
933 dumpGraph(fileName: "recording");
934 }
935 break;
936 }
937 }
938 break;
939 default:
940 break;
941 }
942 //qDebug() << "New session state:" << ENUM_NAME(QGstreamerCaptureSession,"State",m_state);
943 }
944 }
945 return false;
946}
947
948void QGstreamerCaptureSession::setMuted(bool muted)
949{
950 if (bool(m_muted) != muted) {
951 m_muted = muted;
952 if (m_audioVolume)
953 g_object_set(G_OBJECT(m_audioVolume), first_property_name: "mute", m_muted, NULL);
954
955 emit mutedChanged(muted);
956 }
957}
958
959void QGstreamerCaptureSession::setVolume(qreal volume)
960{
961 if (!qFuzzyCompare(p1: double(volume), p2: m_volume)) {
962 m_volume = volume;
963 if (m_audioVolume)
964 g_object_set(G_OBJECT(m_audioVolume), first_property_name: "volume", m_volume, NULL);
965
966 emit volumeChanged(volume);
967 }
968}
969
970void QGstreamerCaptureSession::addProbe(QGstreamerAudioProbeControl* probe)
971{
972 Q_ASSERT(!m_audioProbe);
973 m_audioProbe = probe;
974 addAudioBufferProbe();
975}
976
977void QGstreamerCaptureSession::removeProbe(QGstreamerAudioProbeControl* probe)
978{
979 Q_ASSERT(m_audioProbe == probe);
980 removeAudioBufferProbe();
981 m_audioProbe = 0;
982}
983
984GstPad *QGstreamerCaptureSession::getAudioProbePad()
985{
986 // first see if preview element is available
987 if (m_audioPreview) {
988 GstPad *pad = gst_element_get_static_pad(element: m_audioPreview, name: "sink");
989 if (pad)
990 return pad;
991 }
992
993 // preview element is not available,
994 // try to use sink pin of audio encoder.
995 if (m_encodeBin) {
996 GstElement *audioEncoder = gst_bin_get_by_name(GST_BIN(m_encodeBin), name: "audio-encoder-bin");
997 if (audioEncoder) {
998 GstPad *pad = gst_element_get_static_pad(element: audioEncoder, name: "sink");
999 gst_object_unref(object: audioEncoder);
1000 if (pad)
1001 return pad;
1002 }
1003 }
1004
1005 return 0;
1006}
1007
1008void QGstreamerCaptureSession::removeAudioBufferProbe()
1009{
1010 if (!m_audioProbe)
1011 return;
1012
1013 GstPad *pad = getAudioProbePad();
1014 if (pad) {
1015 m_audioProbe->removeProbeFromPad(pad);
1016 gst_object_unref(GST_OBJECT(pad));
1017 }
1018}
1019
1020void QGstreamerCaptureSession::addAudioBufferProbe()
1021{
1022 if (!m_audioProbe)
1023 return;
1024
1025 GstPad *pad = getAudioProbePad();
1026 if (pad) {
1027 m_audioProbe->addProbeToPad(pad);
1028 gst_object_unref(GST_OBJECT(pad));
1029 }
1030}
1031
1032QT_END_NAMESPACE
1033

source code of qtmultimedia/src/plugins/gstreamer/mediacapture/qgstreamercapturesession.cpp