1// Copyright (C) 2016 The Qt Company Ltd.
2// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
3
4#include "qgstvideobuffer_p.h"
5#include "qgstreamervideosink_p.h"
6#include <private/qvideotexturehelper_p.h>
7#include <qpa/qplatformnativeinterface.h>
8#include <qguiapplication.h>
9
10#include <gst/video/video.h>
11#include <gst/video/video-frame.h>
12#include <gst/video/gstvideometa.h>
13#include <gst/pbutils/gstpluginsbaseversion.h>
14
15#include "qgstutils_p.h"
16
17#if QT_CONFIG(gstreamer_gl)
18#include <rhi/qrhi.h>
19#include <QtGui/qopenglcontext.h>
20#include <QtGui/qopenglfunctions.h>
21#include <QtGui/qopengl.h>
22
23#include <gst/gl/gstglconfig.h>
24#include <gst/gl/gstglmemory.h>
25#include <gst/gl/gstglsyncmeta.h>
26#if QT_CONFIG(linux_dmabuf)
27#include <gst/allocators/gstdmabuf.h>
28#endif
29
30#include <EGL/egl.h>
31#include <EGL/eglext.h>
32#endif
33
34QT_BEGIN_NAMESPACE
35
36// keep things building without drm_fourcc.h
37#define fourcc_code(a, b, c, d) ((uint32_t)(a) | ((uint32_t)(b) << 8) | \
38 ((uint32_t)(c) << 16) | ((uint32_t)(d) << 24))
39
40#define DRM_FORMAT_RGBA8888 fourcc_code('R', 'A', '2', '4') /* [31:0] R:G:B:A 8:8:8:8 little endian */
41#define DRM_FORMAT_RGB888 fourcc_code('R', 'G', '2', '4') /* [23:0] R:G:B little endian */
42#define DRM_FORMAT_RG88 fourcc_code('R', 'G', '8', '8') /* [15:0] R:G 8:8 little endian */
43#define DRM_FORMAT_ABGR8888 fourcc_code('A', 'B', '2', '4') /* [31:0] A:B:G:R 8:8:8:8 little endian */
44#define DRM_FORMAT_BGR888 fourcc_code('B', 'G', '2', '4') /* [23:0] B:G:R little endian */
45#define DRM_FORMAT_GR88 fourcc_code('G', 'R', '8', '8') /* [15:0] G:R 8:8 little endian */
46#define DRM_FORMAT_R8 fourcc_code('R', '8', ' ', ' ') /* [7:0] R */
47#define DRM_FORMAT_R16 fourcc_code('R', '1', '6', ' ') /* [15:0] R little endian */
48#define DRM_FORMAT_RGB565 fourcc_code('R', 'G', '1', '6') /* [15:0] R:G:B 5:6:5 little endian */
49#define DRM_FORMAT_RG1616 fourcc_code('R', 'G', '3', '2') /* [31:0] R:G 16:16 little endian */
50#define DRM_FORMAT_GR1616 fourcc_code('G', 'R', '3', '2') /* [31:0] G:R 16:16 little endian */
51#define DRM_FORMAT_BGRA1010102 fourcc_code('B', 'A', '3', '0') /* [31:0] B:G:R:A 10:10:10:2 little endian */
52
53QGstVideoBuffer::QGstVideoBuffer(GstBuffer *buffer, const GstVideoInfo &info, QGstreamerVideoSink *sink,
54 const QVideoFrameFormat &frameFormat,
55 QGstCaps::MemoryFormat format)
56 : QAbstractVideoBuffer((sink && sink->rhi() && format != QGstCaps::CpuMemory) ?
57 QVideoFrame::RhiTextureHandle : QVideoFrame::NoHandle, sink ? sink->rhi() : nullptr)
58 , memoryFormat(format)
59 , m_frameFormat(frameFormat)
60 , m_rhi(sink ? sink->rhi() : nullptr)
61 , m_videoInfo(info)
62 , m_buffer(buffer)
63{
64 gst_buffer_ref(buf: m_buffer);
65 if (sink) {
66 eglDisplay = sink->eglDisplay();
67 eglImageTargetTexture2D = sink->eglImageTargetTexture2D();
68 }
69}
70
71QGstVideoBuffer::~QGstVideoBuffer()
72{
73 unmap();
74
75 gst_buffer_unref(buf: m_buffer);
76}
77
78
79QVideoFrame::MapMode QGstVideoBuffer::mapMode() const
80{
81 return m_mode;
82}
83
84QAbstractVideoBuffer::MapData QGstVideoBuffer::map(QVideoFrame::MapMode mode)
85{
86 const GstMapFlags flags = GstMapFlags(((mode & QVideoFrame::ReadOnly) ? GST_MAP_READ : 0)
87 | ((mode & QVideoFrame::WriteOnly) ? GST_MAP_WRITE : 0));
88
89 MapData mapData;
90 if (mode == QVideoFrame::NotMapped || m_mode != QVideoFrame::NotMapped)
91 return mapData;
92
93 if (m_videoInfo.finfo->n_planes == 0) { // Encoded
94 if (gst_buffer_map(buffer: m_buffer, info: &m_frame.map[0], flags)) {
95 mapData.nPlanes = 1;
96 mapData.bytesPerLine[0] = -1;
97 mapData.size[0] = m_frame.map[0].size;
98 mapData.data[0] = static_cast<uchar *>(m_frame.map[0].data);
99
100 m_mode = mode;
101 }
102 } else if (gst_video_frame_map(frame: &m_frame, info: &m_videoInfo, buffer: m_buffer, flags)) {
103 mapData.nPlanes = GST_VIDEO_FRAME_N_PLANES(&m_frame);
104
105 for (guint i = 0; i < GST_VIDEO_FRAME_N_PLANES(&m_frame); ++i) {
106 mapData.bytesPerLine[i] = GST_VIDEO_FRAME_PLANE_STRIDE(&m_frame, i);
107 mapData.data[i] = static_cast<uchar *>(GST_VIDEO_FRAME_PLANE_DATA(&m_frame, i));
108 mapData.size[i] = mapData.bytesPerLine[i]*GST_VIDEO_FRAME_COMP_HEIGHT(&m_frame, i);
109 }
110
111 m_mode = mode;
112 }
113 return mapData;
114}
115
116void QGstVideoBuffer::unmap()
117{
118 if (m_mode != QVideoFrame::NotMapped) {
119 if (m_videoInfo.finfo->n_planes == 0)
120 gst_buffer_unmap(buffer: m_buffer, info: &m_frame.map[0]);
121 else
122 gst_video_frame_unmap(frame: &m_frame);
123 }
124 m_mode = QVideoFrame::NotMapped;
125}
126
127#if QT_CONFIG(gstreamer_gl) && QT_CONFIG(linux_dmabuf)
128static int
129fourccFromVideoInfo(const GstVideoInfo * info, int plane)
130{
131 GstVideoFormat format = GST_VIDEO_INFO_FORMAT (info);
132#if G_BYTE_ORDER == G_LITTLE_ENDIAN
133 const gint rgba_fourcc = DRM_FORMAT_ABGR8888;
134 const gint rgb_fourcc = DRM_FORMAT_BGR888;
135 const gint rg_fourcc = DRM_FORMAT_GR88;
136#else
137 const gint rgba_fourcc = DRM_FORMAT_RGBA8888;
138 const gint rgb_fourcc = DRM_FORMAT_RGB888;
139 const gint rg_fourcc = DRM_FORMAT_RG88;
140#endif
141
142 GST_DEBUG ("Getting DRM fourcc for %s plane %i",
143 gst_video_format_to_string (format), plane);
144
145 switch (format) {
146 case GST_VIDEO_FORMAT_RGB16:
147 case GST_VIDEO_FORMAT_BGR16:
148 return DRM_FORMAT_RGB565;
149
150 case GST_VIDEO_FORMAT_RGB:
151 case GST_VIDEO_FORMAT_BGR:
152 return rgb_fourcc;
153
154 case GST_VIDEO_FORMAT_RGBA:
155 case GST_VIDEO_FORMAT_RGBx:
156 case GST_VIDEO_FORMAT_BGRA:
157 case GST_VIDEO_FORMAT_BGRx:
158 case GST_VIDEO_FORMAT_ARGB:
159 case GST_VIDEO_FORMAT_xRGB:
160 case GST_VIDEO_FORMAT_ABGR:
161 case GST_VIDEO_FORMAT_xBGR:
162 case GST_VIDEO_FORMAT_AYUV:
163#if GST_CHECK_PLUGINS_BASE_VERSION(1,16,0)
164 case GST_VIDEO_FORMAT_VUYA:
165#endif
166 return rgba_fourcc;
167
168 case GST_VIDEO_FORMAT_GRAY8:
169 return DRM_FORMAT_R8;
170
171 case GST_VIDEO_FORMAT_YUY2:
172 case GST_VIDEO_FORMAT_UYVY:
173 case GST_VIDEO_FORMAT_GRAY16_LE:
174 case GST_VIDEO_FORMAT_GRAY16_BE:
175 return rg_fourcc;
176
177 case GST_VIDEO_FORMAT_NV12:
178 case GST_VIDEO_FORMAT_NV21:
179 return plane == 0 ? DRM_FORMAT_R8 : rg_fourcc;
180
181 case GST_VIDEO_FORMAT_I420:
182 case GST_VIDEO_FORMAT_YV12:
183 case GST_VIDEO_FORMAT_Y41B:
184 case GST_VIDEO_FORMAT_Y42B:
185 case GST_VIDEO_FORMAT_Y444:
186 return DRM_FORMAT_R8;
187
188#if GST_CHECK_PLUGINS_BASE_VERSION(1,16,0)
189 case GST_VIDEO_FORMAT_BGR10A2_LE:
190 return DRM_FORMAT_BGRA1010102;
191#endif
192
193// case GST_VIDEO_FORMAT_RGB10A2_LE:
194// return DRM_FORMAT_RGBA1010102;
195
196 case GST_VIDEO_FORMAT_P010_10LE:
197// case GST_VIDEO_FORMAT_P012_LE:
198// case GST_VIDEO_FORMAT_P016_LE:
199 return plane == 0 ? DRM_FORMAT_R16 : DRM_FORMAT_GR1616;
200
201 case GST_VIDEO_FORMAT_P010_10BE:
202// case GST_VIDEO_FORMAT_P012_BE:
203// case GST_VIDEO_FORMAT_P016_BE:
204 return plane == 0 ? DRM_FORMAT_R16 : DRM_FORMAT_RG1616;
205
206 default:
207 GST_ERROR ("Unsupported format for DMABuf.");
208 return -1;
209 }
210}
211#endif
212
213#if QT_CONFIG(gstreamer_gl)
214struct GlTextures
215{
216 uint count = 0;
217 bool owned = false;
218 std::array<guint32, QVideoTextureHelper::TextureDescription::maxPlanes> names;
219};
220
221class QGstQVideoFrameTextures : public QVideoFrameTextures
222{
223public:
224 QGstQVideoFrameTextures(QRhi *rhi, QSize size, QVideoFrameFormat::PixelFormat format, GlTextures &textures)
225 : m_rhi(rhi)
226 , m_glTextures(textures)
227 {
228 auto desc = QVideoTextureHelper::textureDescription(format);
229 for (uint i = 0; i < textures.count; ++i) {
230 QSize planeSize(desc->widthForPlane(width: size.width(), plane: int(i)),
231 desc->heightForPlane(height: size.height(), plane: int(i)));
232 m_textures[i].reset(p: rhi->newTexture(format: desc->textureFormat[i], pixelSize: planeSize, sampleCount: 1, flags: {}));
233 m_textures[i]->createFrom(src: {.object: textures.names[i], .layout: 0});
234 }
235 }
236
237 ~QGstQVideoFrameTextures()
238 {
239 m_rhi->makeThreadLocalNativeContextCurrent();
240 auto ctx = QOpenGLContext::currentContext();
241 if (m_glTextures.owned && ctx)
242 ctx->functions()->glDeleteTextures(n: int(m_glTextures.count), textures: m_glTextures.names.data());
243 }
244
245 QRhiTexture *texture(uint plane) const override
246 {
247 return plane < m_glTextures.count ? m_textures[plane].get() : nullptr;
248 }
249
250private:
251 QRhi *m_rhi = nullptr;
252 GlTextures m_glTextures;
253 std::unique_ptr<QRhiTexture> m_textures[QVideoTextureHelper::TextureDescription::maxPlanes];
254};
255
256
257static GlTextures mapFromGlTexture(GstBuffer *buffer, GstVideoFrame &frame, GstVideoInfo &videoInfo)
258{
259 auto *mem = GST_GL_BASE_MEMORY_CAST(gst_buffer_peek_memory(buffer, 0));
260 if (!mem)
261 return {};
262
263 if (!gst_video_frame_map(frame: &frame, info: &videoInfo, buffer, flags: GstMapFlags(GST_MAP_READ|GST_MAP_GL))) {
264 qWarning() << "Could not map GL textures";
265 return {};
266 }
267
268 auto *sync_meta = gst_buffer_get_gl_sync_meta(buffer);
269 GstBuffer *sync_buffer = nullptr;
270 if (!sync_meta) {
271 sync_buffer = gst_buffer_new();
272 sync_meta = gst_buffer_add_gl_sync_meta(context: mem->context, buffer: sync_buffer);
273 }
274 gst_gl_sync_meta_set_sync_point (sync_meta, context: mem->context);
275 gst_gl_sync_meta_wait (sync_meta, context: mem->context);
276 if (sync_buffer)
277 gst_buffer_unref(buf: sync_buffer);
278
279 GlTextures textures;
280 textures.count = frame.info.finfo->n_planes;
281
282 for (uint i = 0; i < textures.count; ++i)
283 textures.names[i] = *(guint32 *)frame.data[i];
284
285 gst_video_frame_unmap(frame: &frame);
286
287 return textures;
288}
289
290#if GST_GL_HAVE_PLATFORM_EGL && QT_CONFIG(linux_dmabuf)
291static GlTextures mapFromDmaBuffer(QRhi *rhi, GstBuffer *buffer, GstVideoFrame &frame,
292 GstVideoInfo &videoInfo, Qt::HANDLE eglDisplay,
293 QFunctionPointer eglImageTargetTexture2D)
294{
295 Q_ASSERT(gst_is_dmabuf_memory(gst_buffer_peek_memory(buffer, 0)));
296 Q_ASSERT(eglDisplay);
297 Q_ASSERT(eglImageTargetTexture2D);
298
299 auto *nativeHandles = static_cast<const QRhiGles2NativeHandles *>(rhi->nativeHandles());
300 auto glContext = nativeHandles->context;
301 if (!glContext) {
302 qWarning() << "no GL context";
303 return {};
304 }
305
306 if (!gst_video_frame_map(frame: &frame, info: &videoInfo, buffer, flags: GstMapFlags(GST_MAP_READ))) {
307 qDebug() << "Couldn't map DMA video frame";
308 return {};
309 }
310
311 GlTextures textures = {};
312 textures.owned = true;
313 textures.count = GST_VIDEO_FRAME_N_PLANES(&frame);
314 // int width = GST_VIDEO_FRAME_WIDTH(&frame);
315 // int height = GST_VIDEO_FRAME_HEIGHT(&frame);
316 Q_ASSERT(GST_VIDEO_FRAME_N_PLANES(&frame) == gst_buffer_n_memory(buffer));
317
318 QOpenGLFunctions functions(glContext);
319 functions.glGenTextures(n: int(textures.count), textures: textures.names.data());
320
321 // qDebug() << Qt::hex << "glGenTextures: glerror" << glGetError() << "egl error" << eglGetError();
322 // qDebug() << "converting DMA buffer nPlanes=" << nPlanes << m_textures[0] << m_textures[1] << m_textures[2];
323
324 for (int i = 0; i < int(textures.count); ++i) {
325 auto offset = GST_VIDEO_FRAME_PLANE_OFFSET(&frame, i);
326 auto stride = GST_VIDEO_FRAME_PLANE_STRIDE(&frame, i);
327 int planeWidth = GST_VIDEO_FRAME_COMP_WIDTH(&frame, i);
328 int planeHeight = GST_VIDEO_FRAME_COMP_HEIGHT(&frame, i);
329 auto mem = gst_buffer_peek_memory(buffer, idx: i);
330 int fd = gst_dmabuf_memory_get_fd(mem);
331
332 // qDebug() << " plane" << i << "size" << width << height << "stride" << stride << "offset" << offset << "fd=" << fd;
333 // ### do we need to open/close the fd?
334 // ### can we convert several planes at once?
335 // Get the correct DRM_FORMATs from the texture format in the description
336 EGLAttrib const attribute_list[] = {
337 EGL_WIDTH, planeWidth,
338 EGL_HEIGHT, planeHeight,
339 EGL_LINUX_DRM_FOURCC_EXT, fourccFromVideoInfo(info: &videoInfo, plane: i),
340 EGL_DMA_BUF_PLANE0_FD_EXT, fd,
341 EGL_DMA_BUF_PLANE0_OFFSET_EXT, (EGLAttrib)offset,
342 EGL_DMA_BUF_PLANE0_PITCH_EXT, stride,
343 EGL_NONE
344 };
345 EGLImage image = eglCreateImage(dpy: eglDisplay,
346 EGL_NO_CONTEXT,
347 EGL_LINUX_DMA_BUF_EXT,
348 buffer: nullptr,
349 attrib_list: attribute_list);
350 if (image == EGL_NO_IMAGE_KHR) {
351 qWarning() << "could not create EGL image for plane" << i << Qt::hex << eglGetError();
352 }
353 // qDebug() << Qt::hex << "eglCreateImage: glerror" << glGetError() << "egl error" << eglGetError();
354 functions.glBindTexture(GL_TEXTURE_2D, texture: textures.names[i]);
355 // qDebug() << Qt::hex << "bind texture: glerror" << glGetError() << "egl error" << eglGetError();
356 auto EGLImageTargetTexture2D = (PFNGLEGLIMAGETARGETTEXTURE2DOESPROC)eglImageTargetTexture2D;
357 EGLImageTargetTexture2D(GL_TEXTURE_2D, image);
358 // qDebug() << Qt::hex << "glerror" << glGetError() << "egl error" << eglGetError();
359 eglDestroyImage(dpy: eglDisplay, image);
360 }
361 gst_video_frame_unmap(frame: &frame);
362
363 return textures;
364}
365#endif
366#endif
367
368std::unique_ptr<QVideoFrameTextures> QGstVideoBuffer::mapTextures(QRhi *rhi)
369{
370 if (!rhi)
371 return {};
372
373#if QT_CONFIG(gstreamer_gl)
374 GlTextures textures = {};
375 if (memoryFormat == QGstCaps::GLTexture) {
376 textures = mapFromGlTexture(buffer: m_buffer, frame&: m_frame, videoInfo&: m_videoInfo);
377 }
378#if GST_GL_HAVE_PLATFORM_EGL && QT_CONFIG(linux_dmabuf)
379 else if (memoryFormat == QGstCaps::DMABuf) {
380 textures = mapFromDmaBuffer(rhi: m_rhi, buffer: m_buffer, frame&: m_frame, videoInfo&: m_videoInfo, eglDisplay, eglImageTargetTexture2D);
381 }
382#endif
383 if (textures.count > 0)
384 return std::make_unique<QGstQVideoFrameTextures>(args&: rhi, args: QSize{m_videoInfo.width, m_videoInfo.height},
385 args: m_frameFormat.pixelFormat(), args&: textures);
386#endif
387 return {};
388}
389
390QT_END_NAMESPACE
391

source code of qtmultimedia/src/plugins/multimedia/gstreamer/common/qgstvideobuffer.cpp