blob: 9d44e2b9717b33dca4989f9e1be3d1bc313796ef [file] [log] [blame]
xuesong.jiangae1548e2022-05-06 16:38:46 +08001/* GStreamer
2 * Copyright (C) 2022 <xuesong.jiang@amlogic.com>
3 *
4 * This library is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Library General Public
6 * License as published by the Free Software Foundation; either
7 * version 2 of the License, or (at your option) any later version.
8 *
9 * This library is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Library General Public License for more details.
13 *
14 * You should have received a copy of the GNU Library General Public
15 * License along with this library; if not, write to the
16 * Free Software Foundation, Inc., 51 Franklin Street, Suite 500,
17 * Boston, MA 02110-1335, USA.
18 */
19
20#ifdef HAVE_CONFIG_H
21#include <config.h>
22#endif
23
24#include <sys/stat.h>
25#include <fcntl.h>
26#include <errno.h>
27#include <string.h>
28#include <sys/mman.h>
29#include <sys/ioctl.h>
xuesong.jiange1a19662022-06-21 20:30:22 +080030#include <stdio.h>
xuesong.jiangae1548e2022-05-06 16:38:46 +080031
32#ifdef HAVE_GUDEV
33#include <gudev/gudev.h>
34#endif
35
36#include "ext/videodev2.h"
37#include "gstamlv4l2object.h"
38
39#include "gst/gst-i18n-plugin.h"
40
41#include <gst/video/video.h>
42#include <gst/allocators/gstdmabuf.h>
43
44GST_DEBUG_CATEGORY_EXTERN(aml_v4l2_debug);
45#define GST_CAT_DEFAULT aml_v4l2_debug
46
47#define DEFAULT_PROP_DEVICE_NAME NULL
48#define DEFAULT_PROP_DEVICE_FD -1
49#define DEFAULT_PROP_FLAGS 0
50#define DEFAULT_PROP_TV_NORM 0
51#define DEFAULT_PROP_IO_MODE GST_V4L2_IO_AUTO
52
53#define ENCODED_BUFFER_SIZE (2 * 1024 * 1024)
54#define DEFAULT_EXTRA_CAPTURE_BUF_SIZE 3
55
xuesong.jiange1a19662022-06-21 20:30:22 +080056#define V4L2_CONFIG_PARM_DECODE_CFGINFO (1 << 0)
57#define V4L2_CONFIG_PARM_DECODE_PSINFO (1 << 1)
58#define V4L2_CONFIG_PARM_DECODE_HDRINFO (1 << 2)
59#define V4L2_CONFIG_PARM_DECODE_CNTINFO (1 << 3)
60
xuesong.jiangae1548e2022-05-06 16:38:46 +080061enum
62{
63 PROP_0,
64 V4L2_STD_OBJECT_PROPS,
65};
66
67/*
68 * common format / caps utilities:
69 */
70typedef enum
71{
72 GST_V4L2_RAW = 1 << 0,
73 GST_V4L2_CODEC = 1 << 1,
74 GST_V4L2_TRANSPORT = 1 << 2,
75 GST_V4L2_NO_PARSE = 1 << 3,
76 GST_V4L2_ALL = 0xffff
77} GstAmlV4L2FormatFlags;
78
79typedef struct
80{
81 guint32 format;
82 gboolean dimensions;
83 GstAmlV4L2FormatFlags flags;
84} GstAmlV4L2FormatDesc;
85
86static const GstAmlV4L2FormatDesc gst_aml_v4l2_formats[] = {
87 /* RGB formats */
88 {V4L2_PIX_FMT_RGB332, TRUE, GST_V4L2_RAW},
89 {V4L2_PIX_FMT_ARGB555, TRUE, GST_V4L2_RAW},
90 {V4L2_PIX_FMT_XRGB555, TRUE, GST_V4L2_RAW},
91 {V4L2_PIX_FMT_ARGB555X, TRUE, GST_V4L2_RAW},
92 {V4L2_PIX_FMT_XRGB555X, TRUE, GST_V4L2_RAW},
93 {V4L2_PIX_FMT_RGB565, TRUE, GST_V4L2_RAW},
94 {V4L2_PIX_FMT_RGB565X, TRUE, GST_V4L2_RAW},
95 {V4L2_PIX_FMT_BGR666, TRUE, GST_V4L2_RAW},
96 {V4L2_PIX_FMT_BGR24, TRUE, GST_V4L2_RAW},
97 {V4L2_PIX_FMT_RGB24, TRUE, GST_V4L2_RAW},
98 {V4L2_PIX_FMT_ABGR32, TRUE, GST_V4L2_RAW},
99 {V4L2_PIX_FMT_XBGR32, TRUE, GST_V4L2_RAW},
100 {V4L2_PIX_FMT_ARGB32, TRUE, GST_V4L2_RAW},
101 {V4L2_PIX_FMT_XRGB32, TRUE, GST_V4L2_RAW},
102
103 /* Deprecated Packed RGB Image Formats (alpha ambiguity) */
104 {V4L2_PIX_FMT_RGB444, TRUE, GST_V4L2_RAW},
105 {V4L2_PIX_FMT_RGB555, TRUE, GST_V4L2_RAW},
106 {V4L2_PIX_FMT_RGB555X, TRUE, GST_V4L2_RAW},
107 {V4L2_PIX_FMT_BGR32, TRUE, GST_V4L2_RAW},
108 {V4L2_PIX_FMT_RGB32, TRUE, GST_V4L2_RAW},
109
110 /* Grey formats */
111 {V4L2_PIX_FMT_GREY, TRUE, GST_V4L2_RAW},
112 {V4L2_PIX_FMT_Y4, TRUE, GST_V4L2_RAW},
113 {V4L2_PIX_FMT_Y6, TRUE, GST_V4L2_RAW},
114 {V4L2_PIX_FMT_Y10, TRUE, GST_V4L2_RAW},
115 {V4L2_PIX_FMT_Y12, TRUE, GST_V4L2_RAW},
116 {V4L2_PIX_FMT_Y16, TRUE, GST_V4L2_RAW},
117 {V4L2_PIX_FMT_Y16_BE, TRUE, GST_V4L2_RAW},
118 {V4L2_PIX_FMT_Y10BPACK, TRUE, GST_V4L2_RAW},
119
120 /* Palette formats */
121 {V4L2_PIX_FMT_PAL8, TRUE, GST_V4L2_RAW},
122
123 /* Chrominance formats */
124 {V4L2_PIX_FMT_UV8, TRUE, GST_V4L2_RAW},
125
126 /* Luminance+Chrominance formats */
127 {V4L2_PIX_FMT_YVU410, TRUE, GST_V4L2_RAW},
128 {V4L2_PIX_FMT_YVU420, TRUE, GST_V4L2_RAW},
129 {V4L2_PIX_FMT_YVU420M, TRUE, GST_V4L2_RAW},
130 {V4L2_PIX_FMT_YUYV, TRUE, GST_V4L2_RAW},
131 {V4L2_PIX_FMT_YYUV, TRUE, GST_V4L2_RAW},
132 {V4L2_PIX_FMT_YVYU, TRUE, GST_V4L2_RAW},
133 {V4L2_PIX_FMT_UYVY, TRUE, GST_V4L2_RAW},
134 {V4L2_PIX_FMT_VYUY, TRUE, GST_V4L2_RAW},
135 {V4L2_PIX_FMT_YUV422P, TRUE, GST_V4L2_RAW},
136 {V4L2_PIX_FMT_YUV411P, TRUE, GST_V4L2_RAW},
137 {V4L2_PIX_FMT_Y41P, TRUE, GST_V4L2_RAW},
138 {V4L2_PIX_FMT_YUV444, TRUE, GST_V4L2_RAW},
139 {V4L2_PIX_FMT_YUV555, TRUE, GST_V4L2_RAW},
140 {V4L2_PIX_FMT_YUV565, TRUE, GST_V4L2_RAW},
141 {V4L2_PIX_FMT_YUV32, TRUE, GST_V4L2_RAW},
142 {V4L2_PIX_FMT_YUV410, TRUE, GST_V4L2_RAW},
143 {V4L2_PIX_FMT_YUV420, TRUE, GST_V4L2_RAW},
144 {V4L2_PIX_FMT_YUV420M, TRUE, GST_V4L2_RAW},
145 {V4L2_PIX_FMT_HI240, TRUE, GST_V4L2_RAW},
146 {V4L2_PIX_FMT_HM12, TRUE, GST_V4L2_RAW},
147 {V4L2_PIX_FMT_M420, TRUE, GST_V4L2_RAW},
148
149 /* two planes -- one Y, one Cr + Cb interleaved */
150 {V4L2_PIX_FMT_NV12, TRUE, GST_V4L2_RAW},
151 {V4L2_PIX_FMT_NV12M, TRUE, GST_V4L2_RAW},
152 {V4L2_PIX_FMT_NV12MT, TRUE, GST_V4L2_RAW},
153 {V4L2_PIX_FMT_NV12MT_16X16, TRUE, GST_V4L2_RAW},
154 {V4L2_PIX_FMT_NV21, TRUE, GST_V4L2_RAW},
155 {V4L2_PIX_FMT_NV21M, TRUE, GST_V4L2_RAW},
156 {V4L2_PIX_FMT_NV16, TRUE, GST_V4L2_RAW},
157 {V4L2_PIX_FMT_NV16M, TRUE, GST_V4L2_RAW},
158 {V4L2_PIX_FMT_NV61, TRUE, GST_V4L2_RAW},
159 {V4L2_PIX_FMT_NV61M, TRUE, GST_V4L2_RAW},
160 {V4L2_PIX_FMT_NV24, TRUE, GST_V4L2_RAW},
161 {V4L2_PIX_FMT_NV42, TRUE, GST_V4L2_RAW},
162
163 /* Bayer formats - see http://www.siliconimaging.com/RGB%20Bayer.htm */
164 {V4L2_PIX_FMT_SBGGR8, TRUE, GST_V4L2_RAW},
165 {V4L2_PIX_FMT_SGBRG8, TRUE, GST_V4L2_RAW},
166 {V4L2_PIX_FMT_SGRBG8, TRUE, GST_V4L2_RAW},
167 {V4L2_PIX_FMT_SRGGB8, TRUE, GST_V4L2_RAW},
168
169 /* compressed formats */
170 {V4L2_PIX_FMT_MJPEG, FALSE, GST_V4L2_CODEC},
171 {V4L2_PIX_FMT_JPEG, FALSE, GST_V4L2_CODEC},
172 {V4L2_PIX_FMT_PJPG, FALSE, GST_V4L2_CODEC},
173 {V4L2_PIX_FMT_DV, FALSE, GST_V4L2_TRANSPORT},
174 {V4L2_PIX_FMT_MPEG, FALSE, GST_V4L2_TRANSPORT},
175 {V4L2_PIX_FMT_FWHT, FALSE, GST_V4L2_CODEC},
176 {V4L2_PIX_FMT_H264, FALSE, GST_V4L2_CODEC},
177 {V4L2_PIX_FMT_H264_NO_SC, FALSE, GST_V4L2_CODEC},
178 {V4L2_PIX_FMT_H264_MVC, FALSE, GST_V4L2_CODEC},
179 {V4L2_PIX_FMT_HEVC, FALSE, GST_V4L2_CODEC},
180 {V4L2_PIX_FMT_H263, FALSE, GST_V4L2_CODEC},
181 {V4L2_PIX_FMT_MPEG1, FALSE, GST_V4L2_CODEC},
182 {V4L2_PIX_FMT_MPEG2, FALSE, GST_V4L2_CODEC},
183 {V4L2_PIX_FMT_MPEG4, FALSE, GST_V4L2_CODEC},
184 {V4L2_PIX_FMT_XVID, FALSE, GST_V4L2_CODEC},
185 {V4L2_PIX_FMT_VC1_ANNEX_G, FALSE, GST_V4L2_CODEC},
186 {V4L2_PIX_FMT_VC1_ANNEX_L, FALSE, GST_V4L2_CODEC},
187 {V4L2_PIX_FMT_VP8, FALSE, GST_V4L2_CODEC | GST_V4L2_NO_PARSE},
188 {V4L2_PIX_FMT_VP9, FALSE, GST_V4L2_CODEC | GST_V4L2_NO_PARSE},
189 {V4L2_PIX_FMT_AV1, FALSE, GST_V4L2_CODEC | GST_V4L2_NO_PARSE},
190
191 /* Vendor-specific formats */
192 {V4L2_PIX_FMT_WNVA, TRUE, GST_V4L2_CODEC},
193 {V4L2_PIX_FMT_SN9C10X, TRUE, GST_V4L2_CODEC},
194 {V4L2_PIX_FMT_PWC1, TRUE, GST_V4L2_CODEC},
195 {V4L2_PIX_FMT_PWC2, TRUE, GST_V4L2_CODEC},
196};
197
198#define GST_AML_V4L2_FORMAT_COUNT (G_N_ELEMENTS(gst_aml_v4l2_formats))
199
200static GSList *gst_aml_v4l2_object_get_format_list(GstAmlV4l2Object *v4l2object);
201
202#define GST_TYPE_AML_V4L2_DEVICE_FLAGS (gst_aml_v4l2_device_get_type())
203static GType
204gst_aml_v4l2_device_get_type(void)
205{
206 static GType v4l2_device_type = 0;
207
208 if (v4l2_device_type == 0)
209 {
210 static const GFlagsValue values[] = {
211 {V4L2_CAP_VIDEO_CAPTURE, "Device supports video capture", "capture"},
212 {V4L2_CAP_VIDEO_OUTPUT, "Device supports video playback", "output"},
213 {V4L2_CAP_VIDEO_OVERLAY, "Device supports video overlay", "overlay"},
214
215 {V4L2_CAP_VBI_CAPTURE, "Device supports the VBI capture", "vbi-capture"},
216 {V4L2_CAP_VBI_OUTPUT, "Device supports the VBI output", "vbi-output"},
217
218 {V4L2_CAP_TUNER, "Device has a tuner or modulator", "tuner"},
219 {V4L2_CAP_AUDIO, "Device has audio inputs or outputs", "audio"},
220
221 {0, NULL, NULL}};
222
223 v4l2_device_type =
224 g_flags_register_static("GstAmlV4l2DeviceTypeFlags", values);
225 }
226
227 return v4l2_device_type;
228}
229
230GType gst_aml_v4l2_io_mode_get_type(void)
231{
232 static GType v4l2_io_mode = 0;
233
234 if (!v4l2_io_mode)
235 {
236 static const GEnumValue io_modes[] = {
237 {GST_V4L2_IO_AUTO, "GST_V4L2_IO_AUTO", "auto"},
238 {GST_V4L2_IO_RW, "GST_V4L2_IO_RW", "rw"},
239 {GST_V4L2_IO_MMAP, "GST_V4L2_IO_MMAP", "mmap"},
240 {GST_V4L2_IO_USERPTR, "GST_V4L2_IO_USERPTR", "userptr"},
241 {GST_V4L2_IO_DMABUF, "GST_V4L2_IO_DMABUF", "dmabuf"},
242 {GST_V4L2_IO_DMABUF_IMPORT, "GST_V4L2_IO_DMABUF_IMPORT",
243 "dmabuf-import"},
244
245 {0, NULL, NULL}};
246 v4l2_io_mode = g_enum_register_static("GstAmlV4l2IOMode", io_modes);
247 }
248 return v4l2_io_mode;
249}
250
251void gst_aml_v4l2_object_install_properties_helper(GObjectClass *gobject_class,
252 const char *default_device)
253{
254 g_object_class_install_property(gobject_class, PROP_DEVICE,
255 g_param_spec_string("device", "Device", "Device location",
256 default_device, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
257 g_object_class_install_property(gobject_class, PROP_DEVICE_NAME,
258 g_param_spec_string("device-name", "Device name",
259 "Name of the device", DEFAULT_PROP_DEVICE_NAME,
260 G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
261 g_object_class_install_property(gobject_class, PROP_DEVICE_FD,
262 g_param_spec_int("device-fd", "File descriptor",
263 "File descriptor of the device", -1, G_MAXINT, DEFAULT_PROP_DEVICE_FD,
264 G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
265 g_object_class_install_property(gobject_class, PROP_FLAGS,
266 g_param_spec_flags("flags", "Flags", "Device type flags",
267 GST_TYPE_AML_V4L2_DEVICE_FLAGS, DEFAULT_PROP_FLAGS,
268 G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
269
270 /**
271 * GstV4l2Src:brightness:
272 *
273 * Picture brightness, or more precisely, the black level
274 */
275 g_object_class_install_property(gobject_class, PROP_BRIGHTNESS,
276 g_param_spec_int("brightness", "Brightness",
277 "Picture brightness, or more precisely, the black level", G_MININT,
278 G_MAXINT, 0,
279 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
280 /**
281 * GstV4l2Src:contrast:
282 *
283 * Picture contrast or luma gain
284 */
285 g_object_class_install_property(gobject_class, PROP_CONTRAST,
286 g_param_spec_int("contrast", "Contrast",
287 "Picture contrast or luma gain", G_MININT,
288 G_MAXINT, 0,
289 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
290 /**
291 * GstV4l2Src:saturation:
292 *
293 * Picture color saturation or chroma gain
294 */
295 g_object_class_install_property(gobject_class, PROP_SATURATION,
296 g_param_spec_int("saturation", "Saturation",
297 "Picture color saturation or chroma gain", G_MININT,
298 G_MAXINT, 0,
299 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
300 /**
301 * GstV4l2Src:hue:
302 *
303 * Hue or color balance
304 */
305 g_object_class_install_property(gobject_class, PROP_HUE,
306 g_param_spec_int("hue", "Hue",
307 "Hue or color balance", G_MININT,
308 G_MAXINT, 0,
309 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
310
311 /**
312 * GstV4l2Src:io-mode:
313 *
314 * IO Mode
315 */
316 g_object_class_install_property(gobject_class, PROP_IO_MODE,
317 g_param_spec_enum("io-mode", "IO mode",
318 "I/O mode",
319 GST_TYPE_AML_V4L2_IO_MODE, DEFAULT_PROP_IO_MODE,
320 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
321
322 /**
323 * GstV4l2Src:extra-controls:
324 *
325 * Additional v4l2 controls for the device. The controls are identified
326 * by the control name (lowercase with '_' for any non-alphanumeric
327 * characters).
328 *
329 * Since: 1.2
330 */
331 g_object_class_install_property(gobject_class, PROP_EXTRA_CONTROLS,
332 g_param_spec_boxed("extra-controls", "Extra Controls",
333 "Extra v4l2 controls (CIDs) for the device",
334 GST_TYPE_STRUCTURE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
335
336 /**
337 * GstV4l2Src:pixel-aspect-ratio:
338 *
339 * The pixel aspect ratio of the device. This overwrites the pixel aspect
340 * ratio queried from the device.
341 *
342 * Since: 1.2
343 */
344 g_object_class_install_property(gobject_class, PROP_PIXEL_ASPECT_RATIO,
345 g_param_spec_string("pixel-aspect-ratio", "Pixel Aspect Ratio",
346 "Overwrite the pixel aspect ratio of the device", "1/1",
347 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
348
349 /**
350 * GstV4l2Src:force-aspect-ratio:
351 *
352 * When enabled, the pixel aspect ratio queried from the device or set
353 * with the pixel-aspect-ratio property will be enforced.
354 *
355 * Since: 1.2
356 */
357 g_object_class_install_property(gobject_class, PROP_FORCE_ASPECT_RATIO,
358 g_param_spec_boolean("force-aspect-ratio", "Force aspect ratio",
359 "When enabled, the pixel aspect ratio will be enforced", TRUE,
360 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
361}
362
363void gst_aml_v4l2_object_install_m2m_properties_helper(GObjectClass *gobject_class)
364{
365 g_object_class_install_property(gobject_class, PROP_DEVICE,
366 g_param_spec_string("device", "Device", "Device location",
367 NULL, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
368
369 g_object_class_install_property(gobject_class, PROP_DEVICE_NAME,
370 g_param_spec_string("device-name", "Device name",
371 "Name of the device", DEFAULT_PROP_DEVICE_NAME,
372 G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
373
374 g_object_class_install_property(gobject_class, PROP_DEVICE_FD,
375 g_param_spec_int("device-fd", "File descriptor",
376 "File descriptor of the device", -1, G_MAXINT, DEFAULT_PROP_DEVICE_FD,
377 G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
378
379 g_object_class_install_property(gobject_class, PROP_OUTPUT_IO_MODE,
380 g_param_spec_enum("output-io-mode", "Output IO mode",
381 "Output side I/O mode (matches sink pad)",
382 GST_TYPE_AML_V4L2_IO_MODE, DEFAULT_PROP_IO_MODE,
383 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
384
385 g_object_class_install_property(gobject_class, PROP_CAPTURE_IO_MODE,
386 g_param_spec_enum("capture-io-mode", "Capture IO mode",
387 "Capture I/O mode (matches src pad)",
388 GST_TYPE_AML_V4L2_IO_MODE, DEFAULT_PROP_IO_MODE,
389 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
390
391 g_object_class_install_property(gobject_class, PROP_EXTRA_CONTROLS,
392 g_param_spec_boxed("extra-controls", "Extra Controls",
393 "Extra v4l2 controls (CIDs) for the device",
394 GST_TYPE_STRUCTURE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
395
396 g_object_class_install_property(gobject_class, PROP_DUMP_FRAME_LOCATION,
397 g_param_spec_string("dump-frame-location", "dump frame location",
398 "Location of the file to write decoder frames", NULL,
399 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
400}
401
402/* Support for 32bit off_t, this wrapper is casting off_t to gint64 */
403#ifdef HAVE_LIBV4L2
404#if SIZEOF_OFF_T < 8
405
406static gpointer
407v4l2_mmap_wrapper(gpointer start, gsize length, gint prot, gint flags, gint fd,
408 off_t offset)
409{
410 return v4l2_mmap(start, length, prot, flags, fd, (gint64)offset);
411}
412
413#define v4l2_mmap v4l2_mmap_wrapper
414
415#endif /* SIZEOF_OFF_T < 8 */
416#endif /* HAVE_LIBV4L2 */
417
418GstAmlV4l2Object *
419gst_aml_v4l2_object_new(GstElement *element,
420 GstObject *debug_object,
421 enum v4l2_buf_type type,
422 const char *default_device,
423 GstAmlV4l2GetInOutFunction get_in_out_func,
424 GstAmlV4l2SetInOutFunction set_in_out_func,
425 GstAmlV4l2UpdateFpsFunction update_fps_func)
426{
427 GstAmlV4l2Object *v4l2object;
428
429 /*
430 * some default values
431 */
432 v4l2object = g_new0(GstAmlV4l2Object, 1);
433
434 if ((V4L2_BUF_TYPE_VIDEO_CAPTURE == type || V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE == type))
435 {
436 const char *default_mode = getenv("GST_DEFAULT_V4L2_BUF_MODE");
437 GST_DEBUG("amlmodbuf GST_AML_DEFAULT_V4L2_BUF_MODE:%s", default_mode);
438 if (default_mode)
439 {
440 if (strcmp(default_mode, "DMA_BUF_IMPORT") == 0)
441 v4l2object->req_mode = GST_V4L2_IO_DMABUF_IMPORT;
442 else if (strcmp(default_mode, "DMA_BUF") == 0)
443 v4l2object->req_mode = GST_V4L2_IO_DMABUF;
444 GST_DEBUG("amlmodbuf set default buf default_mode:%d", v4l2object->req_mode);
445 }
446 }
447
448 v4l2object->type = type;
449 v4l2object->formats = NULL;
450
451 v4l2object->element = element;
452 v4l2object->dbg_obj = debug_object;
453 v4l2object->get_in_out_func = get_in_out_func;
454 v4l2object->set_in_out_func = set_in_out_func;
455 v4l2object->update_fps_func = update_fps_func;
456
457 v4l2object->video_fd = -1;
458 v4l2object->active = FALSE;
459 v4l2object->videodev = g_strdup(default_device);
460
461 v4l2object->norms = NULL;
462 v4l2object->channels = NULL;
463 v4l2object->colors = NULL;
464
465 v4l2object->keep_aspect = TRUE;
466
467 v4l2object->n_v4l2_planes = 0;
468
469 v4l2object->no_initial_format = FALSE;
470
471 /* We now disable libv4l2 by default, but have an env to enable it. */
472#ifdef HAVE_LIBV4L2
473 if (g_getenv("GST_V4L2_USE_LIBV4L2"))
474 {
475 v4l2object->fd_open = v4l2_fd_open;
476 v4l2object->close = v4l2_close;
477 v4l2object->dup = v4l2_dup;
478 v4l2object->ioctl = v4l2_ioctl;
479 v4l2object->read = v4l2_read;
480 v4l2object->mmap = v4l2_mmap;
481 v4l2object->munmap = v4l2_munmap;
482 }
483 else
484#endif
485 {
486 v4l2object->fd_open = NULL;
487 v4l2object->close = close;
488 v4l2object->dup = dup;
489 v4l2object->ioctl = ioctl;
490 v4l2object->read = read;
491 v4l2object->mmap = mmap;
492 v4l2object->munmap = munmap;
493 }
494 v4l2object->poll = gst_poll_new(TRUE);
495 v4l2object->can_wait_event = FALSE;
496 v4l2object->can_poll_device = TRUE;
497 v4l2object->tvin_port = -1;
498
499 v4l2object->dumpframefile = NULL;
500
501 return v4l2object;
502}
503
504static gboolean gst_aml_v4l2_object_clear_format_list(GstAmlV4l2Object *v4l2object);
505
506void gst_aml_v4l2_object_destroy(GstAmlV4l2Object *v4l2object)
507{
508 g_return_if_fail(v4l2object != NULL);
509
510 g_free(v4l2object->videodev);
511
512 g_free(v4l2object->channel);
513
514 if (v4l2object->formats)
515 {
516 gst_aml_v4l2_object_clear_format_list(v4l2object);
517 }
518
519 if (v4l2object->probed_caps)
520 {
521 gst_caps_unref(v4l2object->probed_caps);
522 }
523
524 if (v4l2object->extra_controls)
525 {
526 gst_structure_free(v4l2object->extra_controls);
527 }
528
529 gst_poll_free(v4l2object->poll);
530
531 g_free(v4l2object->dumpframefile);
532
533 g_free(v4l2object);
534}
535
536static gboolean
537gst_aml_v4l2_object_clear_format_list(GstAmlV4l2Object *v4l2object)
538{
539 g_slist_foreach(v4l2object->formats, (GFunc)g_free, NULL);
540 g_slist_free(v4l2object->formats);
541 v4l2object->formats = NULL;
542
543 return TRUE;
544}
545
546static gint
547gst_aml_v4l2_object_prop_to_cid(guint prop_id)
548{
549 gint cid = -1;
550
551 switch (prop_id)
552 {
553 case PROP_BRIGHTNESS:
554 cid = V4L2_CID_BRIGHTNESS;
555 break;
556 case PROP_CONTRAST:
557 cid = V4L2_CID_CONTRAST;
558 break;
559 case PROP_SATURATION:
560 cid = V4L2_CID_SATURATION;
561 break;
562 case PROP_HUE:
563 cid = V4L2_CID_HUE;
564 break;
565 default:
566 GST_WARNING("unmapped property id: %d", prop_id);
567 }
568 return cid;
569}
570
571gboolean
572gst_aml_v4l2_object_set_property_helper(GstAmlV4l2Object *v4l2object,
573 guint prop_id, const GValue *value, GParamSpec *pspec)
574{
575 switch (prop_id)
576 {
577 case PROP_DEVICE:
578 g_free(v4l2object->videodev);
579 v4l2object->videodev = g_value_dup_string(value);
580 break;
581 case PROP_BRIGHTNESS:
582 case PROP_CONTRAST:
583 case PROP_SATURATION:
584 case PROP_HUE:
585 {
586 gint cid = gst_aml_v4l2_object_prop_to_cid(prop_id);
587
588 if (cid != -1)
589 {
590 if (GST_AML_V4L2_IS_OPEN(v4l2object))
591 {
592 gst_aml_v4l2_set_attribute(v4l2object, cid, g_value_get_int(value));
593 }
594 }
595 return TRUE;
596 }
597 break;
598 case PROP_IO_MODE:
599 v4l2object->req_mode = g_value_get_enum(value);
600 break;
601 case PROP_CAPTURE_IO_MODE:
602 g_return_val_if_fail(!V4L2_TYPE_IS_OUTPUT(v4l2object->type), FALSE);
603 v4l2object->req_mode = g_value_get_enum(value);
604 break;
605 case PROP_OUTPUT_IO_MODE:
606 g_return_val_if_fail(V4L2_TYPE_IS_OUTPUT(v4l2object->type), FALSE);
607 v4l2object->req_mode = g_value_get_enum(value);
608 break;
609 case PROP_EXTRA_CONTROLS:
610 {
611 const GstStructure *s = gst_value_get_structure(value);
612
613 if (v4l2object->extra_controls)
614 gst_structure_free(v4l2object->extra_controls);
615
616 v4l2object->extra_controls = s ? gst_structure_copy(s) : NULL;
617 if (GST_AML_V4L2_IS_OPEN(v4l2object))
618 gst_aml_v4l2_set_controls(v4l2object, v4l2object->extra_controls);
619 break;
620 }
621 case PROP_PIXEL_ASPECT_RATIO:
622 if (v4l2object->par)
623 {
624 g_value_unset(v4l2object->par);
625 g_free(v4l2object->par);
626 }
627 v4l2object->par = g_new0(GValue, 1);
628 g_value_init(v4l2object->par, GST_TYPE_FRACTION);
629 if (!g_value_transform(value, v4l2object->par))
630 {
631 g_warning("Could not transform string to aspect ratio");
632 gst_value_set_fraction(v4l2object->par, 1, 1);
633 }
634
635 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "set PAR to %d/%d",
636 gst_value_get_fraction_numerator(v4l2object->par),
637 gst_value_get_fraction_denominator(v4l2object->par));
638 break;
639 case PROP_FORCE_ASPECT_RATIO:
640 v4l2object->keep_aspect = g_value_get_boolean(value);
641 break;
642 case PROP_DUMP_FRAME_LOCATION:
643 g_free(v4l2object->dumpframefile);
644 v4l2object->dumpframefile = g_value_dup_string(value);
645 break;
646 default:
647 return FALSE;
648 break;
649 }
650 return TRUE;
651}
652
653gboolean
654gst_aml_v4l2_object_get_property_helper(GstAmlV4l2Object *v4l2object,
655 guint prop_id, GValue *value, GParamSpec *pspec)
656{
657 switch (prop_id)
658 {
659 case PROP_DEVICE:
660 g_value_set_string(value, v4l2object->videodev);
661 break;
662 case PROP_DEVICE_NAME:
663 {
664 const guchar *name = NULL;
665
666 if (GST_AML_V4L2_IS_OPEN(v4l2object))
667 name = v4l2object->vcap.card;
668
669 g_value_set_string(value, (gchar *)name);
670 break;
671 }
672 case PROP_DEVICE_FD:
673 {
674 if (GST_AML_V4L2_IS_OPEN(v4l2object))
675 g_value_set_int(value, v4l2object->video_fd);
676 else
677 g_value_set_int(value, DEFAULT_PROP_DEVICE_FD);
678 break;
679 }
680 case PROP_FLAGS:
681 {
682 guint flags = 0;
683
684 if (GST_AML_V4L2_IS_OPEN(v4l2object))
685 {
686 flags |= v4l2object->device_caps &
687 (V4L2_CAP_VIDEO_CAPTURE |
688 V4L2_CAP_VIDEO_OUTPUT |
689 V4L2_CAP_VIDEO_OVERLAY |
690 V4L2_CAP_VBI_CAPTURE |
691 V4L2_CAP_VBI_OUTPUT | V4L2_CAP_TUNER | V4L2_CAP_AUDIO);
692
693 if (v4l2object->device_caps & V4L2_CAP_VIDEO_CAPTURE_MPLANE)
694 flags |= V4L2_CAP_VIDEO_CAPTURE;
695
696 if (v4l2object->device_caps & V4L2_CAP_VIDEO_OUTPUT_MPLANE)
697 flags |= V4L2_CAP_VIDEO_OUTPUT;
698 }
699 g_value_set_flags(value, flags);
700 break;
701 }
702 case PROP_BRIGHTNESS:
703 case PROP_CONTRAST:
704 case PROP_SATURATION:
705 case PROP_HUE:
706 {
707 gint cid = gst_aml_v4l2_object_prop_to_cid(prop_id);
708
709 if (cid != -1)
710 {
711 if (GST_AML_V4L2_IS_OPEN(v4l2object))
712 {
713 gint v;
714 if (gst_aml_v4l2_get_attribute(v4l2object, cid, &v))
715 {
716 g_value_set_int(value, v);
717 }
718 }
719 }
720 return TRUE;
721 }
722 break;
723 case PROP_IO_MODE:
724 g_value_set_enum(value, v4l2object->req_mode);
725 break;
726 case PROP_CAPTURE_IO_MODE:
727 g_return_val_if_fail(!V4L2_TYPE_IS_OUTPUT(v4l2object->type), FALSE);
728 g_value_set_enum(value, v4l2object->req_mode);
729 break;
730 case PROP_OUTPUT_IO_MODE:
731 g_return_val_if_fail(V4L2_TYPE_IS_OUTPUT(v4l2object->type), FALSE);
732 g_value_set_enum(value, v4l2object->req_mode);
733 break;
734 case PROP_EXTRA_CONTROLS:
735 gst_value_set_structure(value, v4l2object->extra_controls);
736 break;
737 case PROP_PIXEL_ASPECT_RATIO:
738 if (v4l2object->par)
739 g_value_transform(v4l2object->par, value);
740 break;
741 case PROP_FORCE_ASPECT_RATIO:
742 g_value_set_boolean(value, v4l2object->keep_aspect);
743 break;
744 case PROP_DUMP_FRAME_LOCATION:
745 g_value_set_string(value, v4l2object->dumpframefile);
746 break;
747 default:
748 return FALSE;
749 break;
750 }
751 return TRUE;
752}
753
754static void
755gst_aml_v4l2_get_driver_min_buffers(GstAmlV4l2Object *v4l2object)
756{
757 struct v4l2_control control = {
758 0,
759 };
760
761 g_return_if_fail(GST_AML_V4L2_IS_OPEN(v4l2object));
762
763 if (V4L2_TYPE_IS_OUTPUT(v4l2object->type))
764 control.id = V4L2_CID_MIN_BUFFERS_FOR_OUTPUT;
765 else
766 control.id = V4L2_CID_MIN_BUFFERS_FOR_CAPTURE;
767
768 if (v4l2object->ioctl(v4l2object->video_fd, VIDIOC_G_CTRL, &control) == 0)
769 {
770 GST_DEBUG_OBJECT(v4l2object->dbg_obj,
771 "driver requires a minimum of %d buffers", control.value);
772 v4l2object->min_buffers = control.value;
773 }
774 else
775 {
776 v4l2object->min_buffers = 0;
777 }
778}
779
780gboolean
781gst_aml_v4l2_object_open(GstAmlV4l2Object *v4l2object)
782{
783 if (!gst_aml_v4l2_open(v4l2object))
784 return FALSE;
785
786 return TRUE;
787}
788
789gboolean
790gst_aml_v4l2_object_open_shared(GstAmlV4l2Object *v4l2object, GstAmlV4l2Object *other)
791{
792 gboolean ret;
793
794 ret = gst_aml_v4l2_dup(v4l2object, other);
795
796 if (ret && !V4L2_TYPE_IS_OUTPUT(v4l2object->type))
797 {
798 gst_poll_fd_init(&v4l2object->pollfd);
799 v4l2object->pollfd.fd = v4l2object->video_fd;
800 gst_poll_add_fd(v4l2object->poll, &v4l2object->pollfd);
801 /* used for dequeue event */
802 gst_poll_fd_ctl_read(v4l2object->poll, &v4l2object->pollfd, TRUE);
803 gst_poll_fd_ctl_pri(v4l2object->poll, &v4l2object->pollfd, TRUE);
804 }
805
806 return ret;
807}
808
809gboolean
810gst_aml_v4l2_object_close(GstAmlV4l2Object *v4l2object)
811{
812 if (!gst_aml_v4l2_close(v4l2object))
813 return FALSE;
814
815 gst_caps_replace(&v4l2object->probed_caps, NULL);
816
817 /* reset our copy of the device caps */
818 v4l2object->device_caps = 0;
819
820 if (v4l2object->formats)
821 {
822 gst_aml_v4l2_object_clear_format_list(v4l2object);
823 }
824
825 if (v4l2object->par)
826 {
827 g_value_unset(v4l2object->par);
828 g_free(v4l2object->par);
829 v4l2object->par = NULL;
830 }
831
832 if (v4l2object->channel)
833 {
834 g_free(v4l2object->channel);
835 v4l2object->channel = NULL;
836 }
837
838 return TRUE;
839}
840
841static struct v4l2_fmtdesc *
842gst_aml_v4l2_object_get_format_from_fourcc(GstAmlV4l2Object *v4l2object,
843 guint32 fourcc)
844{
845 struct v4l2_fmtdesc *fmt;
846 GSList *walk;
847
848 if (fourcc == 0)
849 return NULL;
850
851 walk = gst_aml_v4l2_object_get_format_list(v4l2object);
852 while (walk)
853 {
854 fmt = (struct v4l2_fmtdesc *)walk->data;
855 if (fmt->pixelformat == fourcc)
856 return fmt;
857 /* special case for jpeg */
858 if (fmt->pixelformat == V4L2_PIX_FMT_MJPEG ||
859 fmt->pixelformat == V4L2_PIX_FMT_JPEG ||
860 fmt->pixelformat == V4L2_PIX_FMT_PJPG)
861 {
862 if (fourcc == V4L2_PIX_FMT_JPEG || fourcc == V4L2_PIX_FMT_MJPEG ||
863 fourcc == V4L2_PIX_FMT_PJPG)
864 {
865 return fmt;
866 }
867 }
868 walk = g_slist_next(walk);
869 }
870
871 return NULL;
872}
873
874/* complete made up ranking, the values themselves are meaningless */
875/* These ranks MUST be X such that X<<15 fits on a signed int - see
876 the comment at the end of gst_aml_v4l2_object_format_get_rank. */
877#define YUV_BASE_RANK 1000
878#define JPEG_BASE_RANK 500
879#define DV_BASE_RANK 200
880#define RGB_BASE_RANK 100
881#define YUV_ODD_BASE_RANK 50
882#define RGB_ODD_BASE_RANK 25
883#define BAYER_BASE_RANK 15
884#define S910_BASE_RANK 10
885#define GREY_BASE_RANK 5
886#define PWC_BASE_RANK 1
887
888static gint
889gst_aml_v4l2_object_format_get_rank(const struct v4l2_fmtdesc *fmt)
890{
891 guint32 fourcc = fmt->pixelformat;
892 gboolean emulated = ((fmt->flags & V4L2_FMT_FLAG_EMULATED) != 0);
893 gint rank = 0;
894
895 switch (fourcc)
896 {
897 case V4L2_PIX_FMT_MJPEG:
898 case V4L2_PIX_FMT_PJPG:
899 rank = JPEG_BASE_RANK;
900 break;
901 case V4L2_PIX_FMT_JPEG:
902 rank = JPEG_BASE_RANK + 1;
903 break;
904 case V4L2_PIX_FMT_MPEG: /* MPEG */
905 rank = JPEG_BASE_RANK + 2;
906 break;
907
908 case V4L2_PIX_FMT_RGB332:
909 case V4L2_PIX_FMT_ARGB555:
910 case V4L2_PIX_FMT_XRGB555:
911 case V4L2_PIX_FMT_RGB555:
912 case V4L2_PIX_FMT_ARGB555X:
913 case V4L2_PIX_FMT_XRGB555X:
914 case V4L2_PIX_FMT_RGB555X:
915 case V4L2_PIX_FMT_BGR666:
916 case V4L2_PIX_FMT_RGB565:
917 case V4L2_PIX_FMT_RGB565X:
918 case V4L2_PIX_FMT_RGB444:
919 case V4L2_PIX_FMT_Y4:
920 case V4L2_PIX_FMT_Y6:
921 case V4L2_PIX_FMT_Y10:
922 case V4L2_PIX_FMT_Y12:
923 case V4L2_PIX_FMT_Y10BPACK:
924 case V4L2_PIX_FMT_YUV555:
925 case V4L2_PIX_FMT_YUV565:
926 case V4L2_PIX_FMT_YUV32:
927 case V4L2_PIX_FMT_NV12MT_16X16:
928 case V4L2_PIX_FMT_NV42:
929 case V4L2_PIX_FMT_H264_MVC:
930 rank = RGB_ODD_BASE_RANK;
931 break;
932
933 case V4L2_PIX_FMT_RGB24:
934 case V4L2_PIX_FMT_BGR24:
935 rank = RGB_BASE_RANK - 1;
936 break;
937
938 case V4L2_PIX_FMT_RGB32:
939 case V4L2_PIX_FMT_BGR32:
940 case V4L2_PIX_FMT_ABGR32:
941 case V4L2_PIX_FMT_XBGR32:
942 case V4L2_PIX_FMT_ARGB32:
943 case V4L2_PIX_FMT_XRGB32:
944 rank = RGB_BASE_RANK;
945 break;
946
947 case V4L2_PIX_FMT_GREY: /* 8 Greyscale */
948 rank = GREY_BASE_RANK;
949 break;
950
951 case V4L2_PIX_FMT_NV12: /* 12 Y/CbCr 4:2:0 */
952 case V4L2_PIX_FMT_NV12M: /* Same as NV12 */
953 case V4L2_PIX_FMT_NV12MT: /* NV12 64x32 tile */
954 case V4L2_PIX_FMT_NV21: /* 12 Y/CrCb 4:2:0 */
955 case V4L2_PIX_FMT_NV21M: /* Same as NV21 */
956 case V4L2_PIX_FMT_YYUV: /* 16 YUV 4:2:2 */
957 case V4L2_PIX_FMT_HI240: /* 8 8-bit color */
958 case V4L2_PIX_FMT_NV16: /* 16 Y/CbCr 4:2:2 */
959 case V4L2_PIX_FMT_NV16M: /* Same as NV16 */
960 case V4L2_PIX_FMT_NV61: /* 16 Y/CrCb 4:2:2 */
961 case V4L2_PIX_FMT_NV61M: /* Same as NV61 */
962 case V4L2_PIX_FMT_NV24: /* 24 Y/CrCb 4:4:4 */
963 rank = YUV_ODD_BASE_RANK;
964 break;
965
966 case V4L2_PIX_FMT_YVU410: /* YVU9, 9 bits per pixel */
967 rank = YUV_BASE_RANK + 3;
968 break;
969 case V4L2_PIX_FMT_YUV410: /* YUV9, 9 bits per pixel */
970 rank = YUV_BASE_RANK + 2;
971 break;
972 case V4L2_PIX_FMT_YUV420: /* I420, 12 bits per pixel */
973 case V4L2_PIX_FMT_YUV420M:
974 rank = YUV_BASE_RANK + 7;
975 break;
976 case V4L2_PIX_FMT_YUYV: /* YUY2, 16 bits per pixel */
977 rank = YUV_BASE_RANK + 10;
978 break;
979 case V4L2_PIX_FMT_YVU420: /* YV12, 12 bits per pixel */
980 rank = YUV_BASE_RANK + 6;
981 break;
982 case V4L2_PIX_FMT_UYVY: /* UYVY, 16 bits per pixel */
983 rank = YUV_BASE_RANK + 9;
984 break;
985 case V4L2_PIX_FMT_YUV444:
986 rank = YUV_BASE_RANK + 6;
987 break;
988 case V4L2_PIX_FMT_Y41P: /* Y41P, 12 bits per pixel */
989 rank = YUV_BASE_RANK + 5;
990 break;
991 case V4L2_PIX_FMT_YUV411P: /* Y41B, 12 bits per pixel */
992 rank = YUV_BASE_RANK + 4;
993 break;
994 case V4L2_PIX_FMT_YUV422P: /* Y42B, 16 bits per pixel */
995 rank = YUV_BASE_RANK + 8;
996 break;
997
998 case V4L2_PIX_FMT_DV:
999 rank = DV_BASE_RANK;
1000 break;
1001
1002 case V4L2_PIX_FMT_WNVA: /* Winnov hw compres */
1003 rank = 0;
1004 break;
1005
1006 case V4L2_PIX_FMT_SBGGR8:
1007 case V4L2_PIX_FMT_SGBRG8:
1008 case V4L2_PIX_FMT_SGRBG8:
1009 case V4L2_PIX_FMT_SRGGB8:
1010 rank = BAYER_BASE_RANK;
1011 break;
1012
1013 case V4L2_PIX_FMT_SN9C10X:
1014 rank = S910_BASE_RANK;
1015 break;
1016
1017 case V4L2_PIX_FMT_PWC1:
1018 rank = PWC_BASE_RANK;
1019 break;
1020 case V4L2_PIX_FMT_PWC2:
1021 rank = PWC_BASE_RANK;
1022 break;
1023
1024 default:
1025 rank = 0;
1026 break;
1027 }
1028
1029 /* All ranks are below 1<<15 so a shift by 15
1030 * will a) make all non-emulated formats larger
1031 * than emulated and b) will not overflow
1032 */
1033 if (!emulated)
1034 rank <<= 15;
1035
1036 return rank;
1037}
1038
1039static gint
1040format_cmp_func(gconstpointer a, gconstpointer b)
1041{
1042 const struct v4l2_fmtdesc *fa = a;
1043 const struct v4l2_fmtdesc *fb = b;
1044
1045 if (fa->pixelformat == fb->pixelformat)
1046 return 0;
1047
1048 return gst_aml_v4l2_object_format_get_rank(fb) -
1049 gst_aml_v4l2_object_format_get_rank(fa);
1050}
1051
1052/******************************************************
1053 * gst_aml_v4l2_object_fill_format_list():
1054 * create list of supported capture formats
1055 * return value: TRUE on success, FALSE on error
1056 ******************************************************/
1057static gboolean
1058gst_aml_v4l2_object_fill_format_list(GstAmlV4l2Object *v4l2object,
1059 enum v4l2_buf_type type)
1060{
1061 gint n;
1062 struct v4l2_fmtdesc *format;
1063
1064 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "getting src format enumerations");
1065
1066 /* format enumeration */
1067 for (n = 0;; n++)
1068 {
1069 format = g_new0(struct v4l2_fmtdesc, 1);
1070
1071 format->index = n;
1072 format->type = type;
1073
1074 if (v4l2object->ioctl(v4l2object->video_fd, VIDIOC_ENUM_FMT, format) < 0)
1075 {
1076 if (errno == EINVAL)
1077 {
1078 g_free(format);
1079 break; /* end of enumeration */
1080 }
1081 else
1082 {
1083 goto failed;
1084 }
1085 }
1086
1087 GST_LOG_OBJECT(v4l2object->dbg_obj, "index: %u", format->index);
1088 GST_LOG_OBJECT(v4l2object->dbg_obj, "type: %d", format->type);
1089 GST_LOG_OBJECT(v4l2object->dbg_obj, "flags: %08x", format->flags);
1090 GST_LOG_OBJECT(v4l2object->dbg_obj, "description: '%s'",
1091 format->description);
1092 GST_LOG_OBJECT(v4l2object->dbg_obj, "pixelformat: %" GST_FOURCC_FORMAT,
1093 GST_FOURCC_ARGS(format->pixelformat));
1094
1095 /* sort formats according to our preference; we do this, because caps
1096 * are probed in the order the formats are in the list, and the order of
1097 * formats in the final probed caps matters for things like fixation */
1098 v4l2object->formats = g_slist_insert_sorted(v4l2object->formats, format,
1099 (GCompareFunc)format_cmp_func);
1100 }
1101
1102#ifndef GST_DISABLE_GST_DEBUG
1103 {
1104 GSList *l;
1105
1106 GST_INFO_OBJECT(v4l2object->dbg_obj, "got %d format(s):", n);
1107 for (l = v4l2object->formats; l != NULL; l = l->next)
1108 {
1109 format = l->data;
1110
1111 GST_INFO_OBJECT(v4l2object->dbg_obj,
1112 " %" GST_FOURCC_FORMAT "%s", GST_FOURCC_ARGS(format->pixelformat),
1113 ((format->flags & V4L2_FMT_FLAG_EMULATED)) ? " (emulated)" : "");
1114 }
1115 }
1116#endif
1117
1118 return TRUE;
1119
1120 /* ERRORS */
1121failed:
1122{
1123 g_free(format);
1124
1125 if (v4l2object->element)
1126 return FALSE;
1127
1128 GST_ELEMENT_ERROR(v4l2object->element, RESOURCE, SETTINGS,
1129 (_("Failed to enumerate possible video formats device '%s' can work "
1130 "with"),
1131 v4l2object->videodev),
1132 ("Failed to get number %d in pixelformat enumeration for %s. (%d - %s)",
1133 n, v4l2object->videodev, errno, g_strerror(errno)));
1134
1135 return FALSE;
1136}
1137}
1138
1139/*
1140 * Get the list of supported capture formats, a list of
1141 * <code>struct v4l2_fmtdesc</code>.
1142 */
1143static GSList *
1144gst_aml_v4l2_object_get_format_list(GstAmlV4l2Object *v4l2object)
1145{
1146 if (!v4l2object->formats)
1147 {
1148
1149 /* check usual way */
1150 gst_aml_v4l2_object_fill_format_list(v4l2object, v4l2object->type);
1151
1152 /* if our driver supports multi-planar
1153 * and if formats are still empty then we can workaround driver bug
1154 * by also looking up formats as if our device was not supporting
1155 * multiplanar */
1156 if (!v4l2object->formats)
1157 {
1158 switch (v4l2object->type)
1159 {
1160 case V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE:
1161 gst_aml_v4l2_object_fill_format_list(v4l2object,
1162 V4L2_BUF_TYPE_VIDEO_CAPTURE);
1163 break;
1164
1165 case V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE:
1166 gst_aml_v4l2_object_fill_format_list(v4l2object,
1167 V4L2_BUF_TYPE_VIDEO_OUTPUT);
1168 break;
1169
1170 default:
1171 break;
1172 }
1173 }
1174 }
1175 return v4l2object->formats;
1176}
1177
1178static GstVideoFormat
1179gst_aml_v4l2_object_v4l2fourcc_to_video_format(guint32 fourcc)
1180{
1181 GstVideoFormat format;
1182
1183 switch (fourcc)
1184 {
1185 case V4L2_PIX_FMT_GREY: /* 8 Greyscale */
1186 format = GST_VIDEO_FORMAT_GRAY8;
1187 break;
1188 case V4L2_PIX_FMT_Y16:
1189 format = GST_VIDEO_FORMAT_GRAY16_LE;
1190 break;
1191 case V4L2_PIX_FMT_Y16_BE:
1192 format = GST_VIDEO_FORMAT_GRAY16_BE;
1193 break;
1194 case V4L2_PIX_FMT_XRGB555:
1195 case V4L2_PIX_FMT_RGB555:
1196 format = GST_VIDEO_FORMAT_RGB15;
1197 break;
1198 case V4L2_PIX_FMT_XRGB555X:
1199 case V4L2_PIX_FMT_RGB555X:
1200 format = GST_VIDEO_FORMAT_BGR15;
1201 break;
1202 case V4L2_PIX_FMT_RGB565:
1203 format = GST_VIDEO_FORMAT_RGB16;
1204 break;
1205 case V4L2_PIX_FMT_RGB24:
1206 format = GST_VIDEO_FORMAT_RGB;
1207 break;
1208 case V4L2_PIX_FMT_BGR24:
1209 format = GST_VIDEO_FORMAT_BGR;
1210 break;
1211 case V4L2_PIX_FMT_XRGB32:
1212 case V4L2_PIX_FMT_RGB32:
1213 format = GST_VIDEO_FORMAT_xRGB;
1214 break;
1215 case V4L2_PIX_FMT_XBGR32:
1216 case V4L2_PIX_FMT_BGR32:
1217 format = GST_VIDEO_FORMAT_BGRx;
1218 break;
1219 case V4L2_PIX_FMT_ABGR32:
1220 format = GST_VIDEO_FORMAT_BGRA;
1221 break;
1222 case V4L2_PIX_FMT_ARGB32:
1223 format = GST_VIDEO_FORMAT_ARGB;
1224 break;
1225 case V4L2_PIX_FMT_NV12:
1226 case V4L2_PIX_FMT_NV12M:
1227 format = GST_VIDEO_FORMAT_NV12;
1228 break;
1229 case V4L2_PIX_FMT_NV12MT:
1230 format = GST_VIDEO_FORMAT_NV12_64Z32;
1231 break;
1232 case V4L2_PIX_FMT_NV21:
1233 case V4L2_PIX_FMT_NV21M:
1234 format = GST_VIDEO_FORMAT_NV21;
1235 break;
1236 case V4L2_PIX_FMT_YVU410:
1237 format = GST_VIDEO_FORMAT_YVU9;
1238 break;
1239 case V4L2_PIX_FMT_YUV410:
1240 format = GST_VIDEO_FORMAT_YUV9;
1241 break;
1242 case V4L2_PIX_FMT_YUV420:
1243 case V4L2_PIX_FMT_YUV420M:
1244 format = GST_VIDEO_FORMAT_I420;
1245 break;
1246 case V4L2_PIX_FMT_YUYV:
1247 format = GST_VIDEO_FORMAT_YUY2;
1248 break;
1249 case V4L2_PIX_FMT_YVU420:
1250 format = GST_VIDEO_FORMAT_YV12;
1251 break;
1252 case V4L2_PIX_FMT_UYVY:
1253 format = GST_VIDEO_FORMAT_UYVY;
1254 break;
1255 case V4L2_PIX_FMT_YUV411P:
1256 format = GST_VIDEO_FORMAT_Y41B;
1257 break;
1258 case V4L2_PIX_FMT_YUV422P:
1259 format = GST_VIDEO_FORMAT_Y42B;
1260 break;
1261 case V4L2_PIX_FMT_YVYU:
1262 format = GST_VIDEO_FORMAT_YVYU;
1263 break;
1264 case V4L2_PIX_FMT_NV16:
1265 case V4L2_PIX_FMT_NV16M:
1266 format = GST_VIDEO_FORMAT_NV16;
1267 break;
1268 case V4L2_PIX_FMT_NV61:
1269 case V4L2_PIX_FMT_NV61M:
1270 format = GST_VIDEO_FORMAT_NV61;
1271 break;
1272 case V4L2_PIX_FMT_NV24:
1273 format = GST_VIDEO_FORMAT_NV24;
1274 break;
1275 default:
1276 format = GST_VIDEO_FORMAT_UNKNOWN;
1277 break;
1278 }
1279
1280 return format;
1281}
1282
1283static gboolean
1284gst_amL_v4l2_object_v4l2fourcc_is_rgb(guint32 fourcc)
1285{
1286 gboolean ret = FALSE;
1287
1288 switch (fourcc)
1289 {
1290 case V4L2_PIX_FMT_XRGB555:
1291 case V4L2_PIX_FMT_RGB555:
1292 case V4L2_PIX_FMT_XRGB555X:
1293 case V4L2_PIX_FMT_RGB555X:
1294 case V4L2_PIX_FMT_RGB565:
1295 case V4L2_PIX_FMT_RGB24:
1296 case V4L2_PIX_FMT_BGR24:
1297 case V4L2_PIX_FMT_XRGB32:
1298 case V4L2_PIX_FMT_RGB32:
1299 case V4L2_PIX_FMT_XBGR32:
1300 case V4L2_PIX_FMT_BGR32:
1301 case V4L2_PIX_FMT_ABGR32:
1302 case V4L2_PIX_FMT_ARGB32:
1303 case V4L2_PIX_FMT_SBGGR8:
1304 case V4L2_PIX_FMT_SGBRG8:
1305 case V4L2_PIX_FMT_SGRBG8:
1306 case V4L2_PIX_FMT_SRGGB8:
1307 ret = TRUE;
1308 break;
1309 default:
1310 break;
1311 }
1312
1313 return ret;
1314}
1315
1316static GstStructure *
1317gst_aml_v4l2_object_v4l2fourcc_to_bare_struct(guint32 fourcc)
1318{
1319 GstStructure *structure = NULL;
1320
1321 switch (fourcc)
1322 {
1323 case V4L2_PIX_FMT_MJPEG: /* Motion-JPEG */
1324 case V4L2_PIX_FMT_PJPG: /* Progressive-JPEG */
1325 case V4L2_PIX_FMT_JPEG: /* JFIF JPEG */
1326 structure = gst_structure_new_empty("image/jpeg");
1327 break;
1328 case V4L2_PIX_FMT_MPEG1:
1329 structure = gst_structure_new("video/mpeg",
1330 "mpegversion", G_TYPE_INT, 1, NULL);
1331 break;
1332 case V4L2_PIX_FMT_MPEG2:
1333 structure = gst_structure_new("video/mpeg",
1334 "mpegversion", G_TYPE_INT, 2, NULL);
1335 gst_structure_set(structure, "systemstream", G_TYPE_BOOLEAN, FALSE, NULL);
1336 GST_DEBUG("aml set mpeg2 systemstream to false");
1337 break;
1338 case V4L2_PIX_FMT_MPEG4:
1339 case V4L2_PIX_FMT_XVID:
1340 structure = gst_structure_new("video/mpeg",
1341 "mpegversion", G_TYPE_INT, 4, "systemstream",
1342 G_TYPE_BOOLEAN, FALSE, NULL);
1343 break;
1344 case V4L2_PIX_FMT_FWHT:
1345 structure = gst_structure_new_empty("video/x-fwht");
1346 break;
1347 case V4L2_PIX_FMT_H263:
1348 structure = gst_structure_new("video/x-h263",
1349 "variant", G_TYPE_STRING, "itu", NULL);
1350 break;
1351 case V4L2_PIX_FMT_H264: /* H.264 */
1352 structure = gst_structure_new("video/x-h264",
1353 "stream-format", G_TYPE_STRING, "byte-stream", "alignment",
1354 G_TYPE_STRING, "au", NULL);
1355 break;
1356 case V4L2_PIX_FMT_H264_NO_SC:
1357 structure = gst_structure_new("video/x-h264",
1358 "stream-format", G_TYPE_STRING, "avc", "alignment",
1359 G_TYPE_STRING, "au", NULL);
1360 break;
1361 case V4L2_PIX_FMT_HEVC: /* H.265 */
1362 structure = gst_structure_new("video/x-h265",
1363 "stream-format", G_TYPE_STRING, "byte-stream", "alignment",
1364 G_TYPE_STRING, "au", NULL);
1365 break;
1366 case V4L2_PIX_FMT_VC1_ANNEX_G:
1367 case V4L2_PIX_FMT_VC1_ANNEX_L:
1368 structure = gst_structure_new("video/x-wmv",
1369 "wmvversion", G_TYPE_INT, 3, "format", G_TYPE_STRING, "WVC1", NULL);
1370 break;
1371 case V4L2_PIX_FMT_VP8:
1372 structure = gst_structure_new_empty("video/x-vp8");
1373 break;
1374 case V4L2_PIX_FMT_VP9:
1375 structure = gst_structure_new_empty("video/x-vp9");
1376 break;
1377 case V4L2_PIX_FMT_AV1:
1378 structure = gst_structure_new_empty("video/x-av1");
1379 break;
1380 case V4L2_PIX_FMT_GREY: /* 8 Greyscale */
1381 case V4L2_PIX_FMT_Y16:
1382 case V4L2_PIX_FMT_Y16_BE:
1383 case V4L2_PIX_FMT_XRGB555:
1384 case V4L2_PIX_FMT_RGB555:
1385 case V4L2_PIX_FMT_XRGB555X:
1386 case V4L2_PIX_FMT_RGB555X:
1387 case V4L2_PIX_FMT_RGB565:
1388 case V4L2_PIX_FMT_RGB24:
1389 case V4L2_PIX_FMT_BGR24:
1390 case V4L2_PIX_FMT_RGB32:
1391 case V4L2_PIX_FMT_XRGB32:
1392 case V4L2_PIX_FMT_ARGB32:
1393 case V4L2_PIX_FMT_BGR32:
1394 case V4L2_PIX_FMT_XBGR32:
1395 case V4L2_PIX_FMT_ABGR32:
1396 case V4L2_PIX_FMT_NV12: /* 12 Y/CbCr 4:2:0 */
1397 case V4L2_PIX_FMT_NV12M:
1398 case V4L2_PIX_FMT_NV12MT:
1399 case V4L2_PIX_FMT_NV21: /* 12 Y/CrCb 4:2:0 */
1400 case V4L2_PIX_FMT_NV21M:
1401 case V4L2_PIX_FMT_NV16: /* 16 Y/CbCr 4:2:2 */
1402 case V4L2_PIX_FMT_NV16M:
1403 case V4L2_PIX_FMT_NV61: /* 16 Y/CrCb 4:2:2 */
1404 case V4L2_PIX_FMT_NV61M:
1405 case V4L2_PIX_FMT_NV24: /* 24 Y/CrCb 4:4:4 */
1406 case V4L2_PIX_FMT_YVU410:
1407 case V4L2_PIX_FMT_YUV410:
1408 case V4L2_PIX_FMT_YUV420: /* I420/IYUV */
1409 case V4L2_PIX_FMT_YUV420M:
1410 case V4L2_PIX_FMT_YUYV:
1411 case V4L2_PIX_FMT_YVU420:
1412 case V4L2_PIX_FMT_UYVY:
1413 case V4L2_PIX_FMT_YUV422P:
1414 case V4L2_PIX_FMT_YVYU:
1415 case V4L2_PIX_FMT_YUV411P:
1416 {
1417 GstVideoFormat format;
1418 format = gst_aml_v4l2_object_v4l2fourcc_to_video_format(fourcc);
1419 if (format != GST_VIDEO_FORMAT_UNKNOWN)
1420 structure = gst_structure_new("video/x-raw",
1421 "format", G_TYPE_STRING, gst_video_format_to_string(format), NULL);
1422 break;
1423 }
1424 case V4L2_PIX_FMT_DV:
1425 structure =
1426 gst_structure_new("video/x-dv", "systemstream", G_TYPE_BOOLEAN, TRUE,
1427 NULL);
1428 break;
1429 case V4L2_PIX_FMT_MPEG: /* MPEG */
1430 structure = gst_structure_new("video/mpegts",
1431 "systemstream", G_TYPE_BOOLEAN, TRUE, NULL);
1432 break;
1433 case V4L2_PIX_FMT_WNVA: /* Winnov hw compres */
1434 break;
1435 case V4L2_PIX_FMT_SBGGR8:
1436 case V4L2_PIX_FMT_SGBRG8:
1437 case V4L2_PIX_FMT_SGRBG8:
1438 case V4L2_PIX_FMT_SRGGB8:
1439 structure = gst_structure_new("video/x-bayer", "format", G_TYPE_STRING,
1440 fourcc == V4L2_PIX_FMT_SBGGR8 ? "bggr" : fourcc == V4L2_PIX_FMT_SGBRG8 ? "gbrg"
1441 : fourcc == V4L2_PIX_FMT_SGRBG8 ? "grbg"
1442 :
1443 /* fourcc == V4L2_PIX_FMT_SRGGB8 ? */ "rggb",
1444 NULL);
1445 break;
1446 case V4L2_PIX_FMT_SN9C10X:
1447 structure = gst_structure_new_empty("video/x-sonix");
1448 break;
1449 case V4L2_PIX_FMT_PWC1:
1450 structure = gst_structure_new_empty("video/x-pwc1");
1451 break;
1452 case V4L2_PIX_FMT_PWC2:
1453 structure = gst_structure_new_empty("video/x-pwc2");
1454 break;
1455 case V4L2_PIX_FMT_RGB332:
1456 case V4L2_PIX_FMT_BGR666:
1457 case V4L2_PIX_FMT_ARGB555X:
1458 case V4L2_PIX_FMT_RGB565X:
1459 case V4L2_PIX_FMT_RGB444:
1460 case V4L2_PIX_FMT_YYUV: /* 16 YUV 4:2:2 */
1461 case V4L2_PIX_FMT_HI240: /* 8 8-bit color */
1462 case V4L2_PIX_FMT_Y4:
1463 case V4L2_PIX_FMT_Y6:
1464 case V4L2_PIX_FMT_Y10:
1465 case V4L2_PIX_FMT_Y12:
1466 case V4L2_PIX_FMT_Y10BPACK:
1467 case V4L2_PIX_FMT_YUV444:
1468 case V4L2_PIX_FMT_YUV555:
1469 case V4L2_PIX_FMT_YUV565:
1470 case V4L2_PIX_FMT_Y41P:
1471 case V4L2_PIX_FMT_YUV32:
1472 case V4L2_PIX_FMT_NV12MT_16X16:
1473 case V4L2_PIX_FMT_NV42:
1474 case V4L2_PIX_FMT_H264_MVC:
1475 default:
1476 GST_DEBUG("Unsupported fourcc 0x%08x %" GST_FOURCC_FORMAT,
1477 fourcc, GST_FOURCC_ARGS(fourcc));
1478 break;
1479 }
1480
1481 return structure;
1482}
1483
1484GstStructure *
1485gst_aml_v4l2_object_v4l2fourcc_to_structure(guint32 fourcc)
1486{
1487 GstStructure *template;
1488 gint i;
1489
1490 template = gst_aml_v4l2_object_v4l2fourcc_to_bare_struct(fourcc);
1491
1492 if (template == NULL)
1493 goto done;
1494
1495 for (i = 0; i < GST_AML_V4L2_FORMAT_COUNT; i++)
1496 {
1497 if (gst_aml_v4l2_formats[i].format != fourcc)
1498 continue;
1499
1500 if (gst_aml_v4l2_formats[i].dimensions)
1501 {
1502 gst_structure_set(template,
1503 "width", GST_TYPE_INT_RANGE, 1, GST_AML_V4L2_MAX_SIZE,
1504 "height", GST_TYPE_INT_RANGE, 1, GST_AML_V4L2_MAX_SIZE,
1505 "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL);
1506 }
1507 break;
1508 }
1509
1510done:
1511 return template;
1512}
1513
1514static GstCaps *
1515gst_aml_v4l2_object_get_caps_helper(GstAmlV4L2FormatFlags flags)
1516{
1517 GstStructure *structure;
1518 GstCaps *caps;
1519 guint i;
1520
1521 caps = gst_caps_new_empty();
1522 for (i = 0; i < GST_AML_V4L2_FORMAT_COUNT; i++)
1523 {
1524
1525 if ((gst_aml_v4l2_formats[i].flags & flags) == 0)
1526 continue;
1527
1528 structure =
1529 gst_aml_v4l2_object_v4l2fourcc_to_bare_struct(gst_aml_v4l2_formats[i].format);
1530
1531 if (structure)
1532 {
1533 GstStructure *alt_s = NULL;
1534
1535 if (gst_aml_v4l2_formats[i].dimensions)
1536 {
1537 gst_structure_set(structure,
1538 "width", GST_TYPE_INT_RANGE, 1, GST_AML_V4L2_MAX_SIZE,
1539 "height", GST_TYPE_INT_RANGE, 1, GST_AML_V4L2_MAX_SIZE,
1540 "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL);
1541 }
1542
1543 switch (gst_aml_v4l2_formats[i].format)
1544 {
1545 case V4L2_PIX_FMT_RGB32:
1546 alt_s = gst_structure_copy(structure);
1547 gst_structure_set(alt_s, "format", G_TYPE_STRING, "ARGB", NULL);
1548 break;
1549 case V4L2_PIX_FMT_BGR32:
1550 alt_s = gst_structure_copy(structure);
1551 gst_structure_set(alt_s, "format", G_TYPE_STRING, "BGRA", NULL);
1552 default:
1553 break;
1554 }
1555
1556 gst_caps_append_structure(caps, structure);
1557
1558 if (alt_s)
1559 gst_caps_append_structure(caps, alt_s);
1560 }
1561 }
1562
1563 return gst_caps_simplify(caps);
1564}
1565
1566GstCaps *
1567gst_aml_v4l2_object_get_all_caps(void)
1568{
1569 static GstCaps *caps = NULL;
1570
1571 if (g_once_init_enter(&caps))
1572 {
1573 GstCaps *all_caps = gst_aml_v4l2_object_get_caps_helper(GST_V4L2_ALL);
1574 GST_MINI_OBJECT_FLAG_SET(all_caps, GST_MINI_OBJECT_FLAG_MAY_BE_LEAKED);
1575 g_once_init_leave(&caps, all_caps);
1576 }
1577
1578 return caps;
1579}
1580
1581GstCaps *
1582gst_aml_v4l2_object_get_raw_caps(void)
1583{
1584 static GstCaps *caps = NULL;
1585
1586 if (g_once_init_enter(&caps))
1587 {
1588 GstCaps *raw_caps = gst_aml_v4l2_object_get_caps_helper(GST_V4L2_RAW);
1589 GST_MINI_OBJECT_FLAG_SET(raw_caps, GST_MINI_OBJECT_FLAG_MAY_BE_LEAKED);
1590 g_once_init_leave(&caps, raw_caps);
1591 }
1592
1593 return caps;
1594}
1595
1596GstCaps *
1597gst_aml_v4l2_object_get_codec_caps(void)
1598{
1599 static GstCaps *caps = NULL;
1600
1601 if (g_once_init_enter(&caps))
1602 {
1603 GstCaps *codec_caps = gst_aml_v4l2_object_get_caps_helper(GST_V4L2_CODEC);
1604 GST_MINI_OBJECT_FLAG_SET(codec_caps, GST_MINI_OBJECT_FLAG_MAY_BE_LEAKED);
1605 g_once_init_leave(&caps, codec_caps);
1606 }
1607
1608 return caps;
1609}
1610
1611/* collect data for the given caps
1612 * @caps: given input caps
1613 * @format: location for the v4l format
1614 * @w/@h: location for width and height
1615 * @fps_n/@fps_d: location for framerate
1616 * @size: location for expected size of the frame or 0 if unknown
1617 */
1618static gboolean
1619gst_aml_v4l2_object_get_caps_info(GstAmlV4l2Object *v4l2object, GstCaps *caps,
1620 struct v4l2_fmtdesc **format, GstVideoInfo *info)
1621{
1622 GstStructure *structure;
1623 guint32 fourcc = 0, fourcc_nc = 0;
1624 const gchar *mimetype;
1625 struct v4l2_fmtdesc *fmt = NULL;
1626
1627 structure = gst_caps_get_structure(caps, 0);
1628
1629 mimetype = gst_structure_get_name(structure);
1630
1631 if (!gst_video_info_from_caps(info, caps))
1632 goto invalid_format;
1633
1634 if (g_str_equal(mimetype, "video/x-raw"))
1635 {
1636 switch (GST_VIDEO_INFO_FORMAT(info))
1637 {
1638 case GST_VIDEO_FORMAT_I420:
1639 fourcc = V4L2_PIX_FMT_YUV420;
1640 fourcc_nc = V4L2_PIX_FMT_YUV420M;
1641 break;
1642 case GST_VIDEO_FORMAT_YUY2:
1643 fourcc = V4L2_PIX_FMT_YUYV;
1644 break;
1645 case GST_VIDEO_FORMAT_UYVY:
1646 fourcc = V4L2_PIX_FMT_UYVY;
1647 break;
1648 case GST_VIDEO_FORMAT_YV12:
1649 fourcc = V4L2_PIX_FMT_YVU420;
1650 break;
1651 case GST_VIDEO_FORMAT_Y41B:
1652 fourcc = V4L2_PIX_FMT_YUV411P;
1653 break;
1654 case GST_VIDEO_FORMAT_Y42B:
1655 fourcc = V4L2_PIX_FMT_YUV422P;
1656 break;
1657 case GST_VIDEO_FORMAT_NV12:
1658 fourcc = V4L2_PIX_FMT_NV12;
1659 fourcc_nc = V4L2_PIX_FMT_NV12M;
1660 break;
1661 case GST_VIDEO_FORMAT_NV12_64Z32:
1662 fourcc_nc = V4L2_PIX_FMT_NV12MT;
1663 break;
1664 case GST_VIDEO_FORMAT_NV21:
1665 fourcc = V4L2_PIX_FMT_NV21;
1666 fourcc_nc = V4L2_PIX_FMT_NV21M;
1667 break;
1668 case GST_VIDEO_FORMAT_NV16:
1669 fourcc = V4L2_PIX_FMT_NV16;
1670 fourcc_nc = V4L2_PIX_FMT_NV16M;
1671 break;
1672 case GST_VIDEO_FORMAT_NV61:
1673 fourcc = V4L2_PIX_FMT_NV61;
1674 fourcc_nc = V4L2_PIX_FMT_NV61M;
1675 break;
1676 case GST_VIDEO_FORMAT_NV24:
1677 fourcc = V4L2_PIX_FMT_NV24;
1678 break;
1679 case GST_VIDEO_FORMAT_YVYU:
1680 fourcc = V4L2_PIX_FMT_YVYU;
1681 break;
1682 case GST_VIDEO_FORMAT_RGB15:
1683 fourcc = V4L2_PIX_FMT_RGB555;
1684 fourcc_nc = V4L2_PIX_FMT_XRGB555;
1685 break;
1686 case GST_VIDEO_FORMAT_RGB16:
1687 fourcc = V4L2_PIX_FMT_RGB565;
1688 break;
1689 case GST_VIDEO_FORMAT_RGB:
1690 fourcc = V4L2_PIX_FMT_RGB24;
1691 break;
1692 case GST_VIDEO_FORMAT_BGR:
1693 fourcc = V4L2_PIX_FMT_BGR24;
1694 break;
1695 case GST_VIDEO_FORMAT_xRGB:
1696 fourcc = V4L2_PIX_FMT_RGB32;
1697 fourcc_nc = V4L2_PIX_FMT_XRGB32;
1698 break;
1699 case GST_VIDEO_FORMAT_ARGB:
1700 fourcc = V4L2_PIX_FMT_RGB32;
1701 fourcc_nc = V4L2_PIX_FMT_ARGB32;
1702 break;
1703 case GST_VIDEO_FORMAT_BGRx:
1704 fourcc = V4L2_PIX_FMT_BGR32;
1705 fourcc_nc = V4L2_PIX_FMT_XBGR32;
1706 break;
1707 case GST_VIDEO_FORMAT_BGRA:
1708 fourcc = V4L2_PIX_FMT_BGR32;
1709 fourcc_nc = V4L2_PIX_FMT_ABGR32;
1710 break;
1711 case GST_VIDEO_FORMAT_GRAY8:
1712 fourcc = V4L2_PIX_FMT_GREY;
1713 break;
1714 case GST_VIDEO_FORMAT_GRAY16_LE:
1715 fourcc = V4L2_PIX_FMT_Y16;
1716 break;
1717 case GST_VIDEO_FORMAT_GRAY16_BE:
1718 fourcc = V4L2_PIX_FMT_Y16_BE;
1719 break;
1720 case GST_VIDEO_FORMAT_BGR15:
1721 fourcc = V4L2_PIX_FMT_RGB555X;
1722 fourcc_nc = V4L2_PIX_FMT_XRGB555X;
1723 break;
1724 default:
1725 break;
1726 }
1727 }
1728 else
1729 {
1730 if (g_str_equal(mimetype, "video/mpegts"))
1731 {
1732 fourcc = V4L2_PIX_FMT_MPEG;
1733 }
1734 else if (g_str_equal(mimetype, "video/x-dv"))
1735 {
1736 fourcc = V4L2_PIX_FMT_DV;
1737 }
1738 else if (g_str_equal(mimetype, "image/jpeg"))
1739 {
1740 fourcc = V4L2_PIX_FMT_JPEG;
1741 }
1742 else if (g_str_equal(mimetype, "video/mpeg"))
1743 {
1744 gint version;
1745 if (gst_structure_get_int(structure, "mpegversion", &version))
1746 {
1747 switch (version)
1748 {
1749 case 1:
1750 fourcc = V4L2_PIX_FMT_MPEG1;
1751 break;
1752 case 2:
1753 fourcc = V4L2_PIX_FMT_MPEG2;
1754 break;
1755 case 4:
1756 fourcc = V4L2_PIX_FMT_MPEG4;
1757 fourcc_nc = V4L2_PIX_FMT_XVID;
1758 break;
1759 default:
1760 break;
1761 }
1762 }
1763 }
1764 else if (g_str_equal(mimetype, "video/x-fwht"))
1765 {
1766 fourcc = V4L2_PIX_FMT_FWHT;
1767 }
1768 else if (g_str_equal(mimetype, "video/x-h263"))
1769 {
1770 fourcc = V4L2_PIX_FMT_H263;
1771 }
1772 else if (g_str_equal(mimetype, "video/x-h264"))
1773 {
1774 const gchar *stream_format =
1775 gst_structure_get_string(structure, "stream-format");
1776 if (g_str_equal(stream_format, "avc"))
1777 fourcc = V4L2_PIX_FMT_H264_NO_SC;
1778 else
1779 fourcc = V4L2_PIX_FMT_H264;
1780 }
1781 else if (g_str_equal(mimetype, "video/x-h265"))
1782 {
1783 fourcc = V4L2_PIX_FMT_HEVC;
1784 }
1785 else if (g_str_equal(mimetype, "video/x-vp8"))
1786 {
1787 fourcc = V4L2_PIX_FMT_VP8;
1788 }
1789 else if (g_str_equal(mimetype, "video/x-vp9"))
1790 {
1791 fourcc = V4L2_PIX_FMT_VP9;
1792 }
1793 else if (g_str_equal(mimetype, "video/x-av1"))
1794 {
1795 fourcc = V4L2_PIX_FMT_AV1;
1796 }
1797 else if (g_str_equal(mimetype, "video/x-bayer"))
1798 {
1799 const gchar *format = gst_structure_get_string(structure, "format");
1800 if (format)
1801 {
1802 if (!g_ascii_strcasecmp(format, "bggr"))
1803 fourcc = V4L2_PIX_FMT_SBGGR8;
1804 else if (!g_ascii_strcasecmp(format, "gbrg"))
1805 fourcc = V4L2_PIX_FMT_SGBRG8;
1806 else if (!g_ascii_strcasecmp(format, "grbg"))
1807 fourcc = V4L2_PIX_FMT_SGRBG8;
1808 else if (!g_ascii_strcasecmp(format, "rggb"))
1809 fourcc = V4L2_PIX_FMT_SRGGB8;
1810 }
1811 }
1812 else if (g_str_equal(mimetype, "video/x-sonix"))
1813 {
1814 fourcc = V4L2_PIX_FMT_SN9C10X;
1815 }
1816 else if (g_str_equal(mimetype, "video/x-pwc1"))
1817 {
1818 fourcc = V4L2_PIX_FMT_PWC1;
1819 }
1820 else if (g_str_equal(mimetype, "video/x-pwc2"))
1821 {
1822 fourcc = V4L2_PIX_FMT_PWC2;
1823 }
1824 }
1825
1826 /* Prefer the non-contiguous if supported */
1827 v4l2object->prefered_non_contiguous = TRUE;
1828
1829 if (fourcc_nc)
1830 fmt = gst_aml_v4l2_object_get_format_from_fourcc(v4l2object, fourcc_nc);
1831 else if (fourcc == 0)
1832 goto unhandled_format;
1833
1834 if (fmt == NULL)
1835 {
1836 fmt = gst_aml_v4l2_object_get_format_from_fourcc(v4l2object, fourcc);
1837 v4l2object->prefered_non_contiguous = FALSE;
1838 }
1839
1840 if (fmt == NULL)
1841 goto unsupported_format;
1842
1843 *format = fmt;
1844
1845 return TRUE;
1846
1847 /* ERRORS */
1848invalid_format:
1849{
1850 GST_DEBUG_OBJECT(v4l2object, "invalid format");
1851 return FALSE;
1852}
1853unhandled_format:
1854{
1855 GST_DEBUG_OBJECT(v4l2object, "unhandled format");
1856 return FALSE;
1857}
1858unsupported_format:
1859{
1860 GST_DEBUG_OBJECT(v4l2object, "unsupported format");
1861 return FALSE;
1862}
1863}
1864
1865static gboolean
1866gst_aml_v4l2_object_get_nearest_size(GstAmlV4l2Object *v4l2object,
1867 guint32 pixelformat, gint *width, gint *height);
1868
1869static void
1870gst_aml_v4l2_object_add_aspect_ratio(GstAmlV4l2Object *v4l2object, GstStructure *s)
1871{
1872 if (v4l2object->keep_aspect && v4l2object->par)
1873 gst_structure_set_value(s, "pixel-aspect-ratio", v4l2object->par);
1874}
1875
1876/* returns TRUE if the value was changed in place, otherwise FALSE */
1877static gboolean
1878gst_aml_v4l2src_value_simplify(GValue *val)
1879{
1880 /* simplify list of one value to one value */
1881 if (GST_VALUE_HOLDS_LIST(val) && gst_value_list_get_size(val) == 1)
1882 {
1883 const GValue *list_val;
1884 GValue new_val = G_VALUE_INIT;
1885
1886 list_val = gst_value_list_get_value(val, 0);
1887 g_value_init(&new_val, G_VALUE_TYPE(list_val));
1888 g_value_copy(list_val, &new_val);
1889 g_value_unset(val);
1890 *val = new_val;
1891 return TRUE;
1892 }
1893
1894 return FALSE;
1895}
1896
1897static gboolean
1898gst_aml_v4l2_object_get_interlace_mode(enum v4l2_field field,
1899 GstVideoInterlaceMode *interlace_mode)
1900{
1901 switch (field)
1902 {
1903 case V4L2_FIELD_ANY:
1904 GST_ERROR("Driver bug detected - check driver with v4l2-compliance from http://git.linuxtv.org/v4l-utils.git\n");
1905 /* fallthrough */
1906 case V4L2_FIELD_NONE:
1907 *interlace_mode = GST_VIDEO_INTERLACE_MODE_PROGRESSIVE;
1908 return TRUE;
1909 case V4L2_FIELD_INTERLACED:
1910 case V4L2_FIELD_INTERLACED_TB:
1911 case V4L2_FIELD_INTERLACED_BT:
1912 *interlace_mode = GST_VIDEO_INTERLACE_MODE_INTERLEAVED;
1913 return TRUE;
1914 default:
1915 GST_ERROR("Unknown enum v4l2_field %d", field);
1916 return FALSE;
1917 }
1918}
1919
1920static gboolean
1921gst_aml_v4l2_object_get_colorspace(struct v4l2_format *fmt,
1922 GstVideoColorimetry *cinfo)
1923{
1924 gboolean is_rgb =
1925 gst_amL_v4l2_object_v4l2fourcc_is_rgb(fmt->fmt.pix.pixelformat);
1926 enum v4l2_colorspace colorspace;
1927 enum v4l2_quantization range;
1928 enum v4l2_ycbcr_encoding matrix;
1929 enum v4l2_xfer_func transfer;
1930 gboolean ret = TRUE;
1931
1932 if (V4L2_TYPE_IS_MULTIPLANAR(fmt->type))
1933 {
1934 colorspace = fmt->fmt.pix_mp.colorspace;
1935 range = fmt->fmt.pix_mp.quantization;
1936 matrix = fmt->fmt.pix_mp.ycbcr_enc;
1937 transfer = fmt->fmt.pix_mp.xfer_func;
1938 }
1939 else
1940 {
1941 colorspace = fmt->fmt.pix.colorspace;
1942 range = fmt->fmt.pix.quantization;
1943 matrix = fmt->fmt.pix.ycbcr_enc;
1944 transfer = fmt->fmt.pix.xfer_func;
1945 }
xuesong.jiange1a19662022-06-21 20:30:22 +08001946 GST_DEBUG("colorspace:%d, range:%d, matrix:%d, transfer:%d", colorspace, range, matrix, transfer);
1947 GST_DEBUG("cinfo update 1 time | range:%d, matrix:%d, transfer:%d, primaries:%d", cinfo->range, cinfo->matrix, cinfo->transfer, cinfo->primaries);
xuesong.jiangae1548e2022-05-06 16:38:46 +08001948
1949 /* First step, set the defaults for each primaries */
1950 switch (colorspace)
1951 {
1952 case V4L2_COLORSPACE_SMPTE170M:
1953 cinfo->range = GST_VIDEO_COLOR_RANGE_16_235;
1954 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT601;
1955 cinfo->transfer = GST_VIDEO_TRANSFER_BT709;
1956 cinfo->primaries = GST_VIDEO_COLOR_PRIMARIES_SMPTE170M;
1957 break;
1958 case V4L2_COLORSPACE_REC709:
1959 cinfo->range = GST_VIDEO_COLOR_RANGE_16_235;
1960 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT709;
1961 cinfo->transfer = GST_VIDEO_TRANSFER_BT709;
1962 cinfo->primaries = GST_VIDEO_COLOR_PRIMARIES_BT709;
1963 break;
1964 case V4L2_COLORSPACE_SRGB:
1965 case V4L2_COLORSPACE_JPEG:
1966 cinfo->range = GST_VIDEO_COLOR_RANGE_0_255;
1967 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT601;
1968 cinfo->transfer = GST_VIDEO_TRANSFER_SRGB;
1969 cinfo->primaries = GST_VIDEO_COLOR_PRIMARIES_BT709;
1970 break;
1971 case V4L2_COLORSPACE_OPRGB:
1972 cinfo->range = GST_VIDEO_COLOR_RANGE_16_235;
1973 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT601;
1974 cinfo->transfer = GST_VIDEO_TRANSFER_ADOBERGB;
1975 cinfo->primaries = GST_VIDEO_COLOR_PRIMARIES_ADOBERGB;
1976 break;
1977 case V4L2_COLORSPACE_BT2020:
1978 cinfo->range = GST_VIDEO_COLOR_RANGE_16_235;
1979 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT2020;
1980 cinfo->transfer = GST_VIDEO_TRANSFER_BT2020_12;
1981 cinfo->primaries = GST_VIDEO_COLOR_PRIMARIES_BT2020;
1982 break;
1983 case V4L2_COLORSPACE_SMPTE240M:
1984 cinfo->range = GST_VIDEO_COLOR_RANGE_16_235;
1985 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_SMPTE240M;
1986 cinfo->transfer = GST_VIDEO_TRANSFER_SMPTE240M;
1987 cinfo->primaries = GST_VIDEO_COLOR_PRIMARIES_SMPTE240M;
1988 break;
1989 case V4L2_COLORSPACE_470_SYSTEM_M:
1990 cinfo->range = GST_VIDEO_COLOR_RANGE_16_235;
1991 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT601;
1992 cinfo->transfer = GST_VIDEO_TRANSFER_BT709;
1993 cinfo->primaries = GST_VIDEO_COLOR_PRIMARIES_BT470M;
1994 break;
1995 case V4L2_COLORSPACE_470_SYSTEM_BG:
1996 cinfo->range = GST_VIDEO_COLOR_RANGE_16_235;
1997 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT601;
1998 cinfo->transfer = GST_VIDEO_TRANSFER_BT709;
1999 cinfo->primaries = GST_VIDEO_COLOR_PRIMARIES_BT470BG;
2000 break;
2001 case V4L2_COLORSPACE_RAW:
2002 /* Explicitly unknown */
2003 cinfo->range = GST_VIDEO_COLOR_RANGE_UNKNOWN;
2004 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_UNKNOWN;
2005 cinfo->transfer = GST_VIDEO_TRANSFER_UNKNOWN;
2006 cinfo->primaries = GST_VIDEO_COLOR_PRIMARIES_UNKNOWN;
2007 break;
2008 default:
2009 GST_DEBUG("Unknown enum v4l2_colorspace %d", colorspace);
2010 ret = FALSE;
2011 break;
2012 }
xuesong.jiange1a19662022-06-21 20:30:22 +08002013 GST_DEBUG("cinfo update 2 time | range:%d, matrix:%d, transfer:%d, primaries:%d", cinfo->range, cinfo->matrix, cinfo->transfer, cinfo->primaries);
xuesong.jiangae1548e2022-05-06 16:38:46 +08002014
2015 if (!ret)
2016 goto done;
2017
2018 /* Second step, apply any custom variation */
2019 switch (range)
2020 {
2021 case V4L2_QUANTIZATION_FULL_RANGE:
2022 cinfo->range = GST_VIDEO_COLOR_RANGE_0_255;
2023 break;
2024 case V4L2_QUANTIZATION_LIM_RANGE:
2025 cinfo->range = GST_VIDEO_COLOR_RANGE_16_235;
2026 break;
2027 case V4L2_QUANTIZATION_DEFAULT:
2028 /* replicated V4L2_MAP_QUANTIZATION_DEFAULT macro behavior */
2029 if (is_rgb && colorspace == V4L2_COLORSPACE_BT2020)
2030 cinfo->range = GST_VIDEO_COLOR_RANGE_16_235;
2031 else if (is_rgb || matrix == V4L2_YCBCR_ENC_XV601 || matrix == V4L2_YCBCR_ENC_XV709 || colorspace == V4L2_COLORSPACE_JPEG)
2032 cinfo->range = GST_VIDEO_COLOR_RANGE_0_255;
2033 else
2034 cinfo->range = GST_VIDEO_COLOR_RANGE_16_235;
2035 break;
2036 default:
2037 GST_WARNING("Unknown enum v4l2_quantization value %d", range);
2038 cinfo->range = GST_VIDEO_COLOR_RANGE_UNKNOWN;
2039 break;
2040 }
xuesong.jiange1a19662022-06-21 20:30:22 +08002041 GST_DEBUG("cinfo update 3 time | range:%d, matrix:%d, transfer:%d, primaries:%d", cinfo->range, cinfo->matrix, cinfo->transfer, cinfo->primaries);
xuesong.jiangae1548e2022-05-06 16:38:46 +08002042
2043 switch (matrix)
2044 {
2045 case V4L2_YCBCR_ENC_XV601:
2046 case V4L2_YCBCR_ENC_SYCC:
2047 GST_FIXME("XV601 and SYCC not defined, assuming 601");
2048 /* fallthrough */
2049 case V4L2_YCBCR_ENC_601:
2050 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT601;
2051 break;
2052 case V4L2_YCBCR_ENC_XV709:
2053 GST_FIXME("XV709 not defined, assuming 709");
2054 /* fallthrough */
2055 case V4L2_YCBCR_ENC_709:
2056 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT709;
2057 break;
2058 case V4L2_YCBCR_ENC_BT2020_CONST_LUM:
2059 GST_FIXME("BT2020 with constant luma is not defined, assuming BT2020");
2060 /* fallthrough */
2061 case V4L2_YCBCR_ENC_BT2020:
2062 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT2020;
2063 break;
2064 case V4L2_YCBCR_ENC_SMPTE240M:
2065 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_SMPTE240M;
2066 break;
2067 case V4L2_YCBCR_ENC_DEFAULT:
2068 /* nothing, just use defaults for colorspace */
2069 break;
2070 default:
2071 GST_WARNING("Unknown enum v4l2_ycbcr_encoding value %d", matrix);
2072 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_UNKNOWN;
2073 break;
2074 }
xuesong.jiange1a19662022-06-21 20:30:22 +08002075 GST_DEBUG("cinfo update 4 time | range:%d, matrix:%d, transfer:%d, primaries:%d", cinfo->range, cinfo->matrix, cinfo->transfer, cinfo->primaries);
xuesong.jiangae1548e2022-05-06 16:38:46 +08002076
2077 /* Set identity matrix for R'G'B' formats to avoid creating
2078 * confusion. This though is cosmetic as it's now properly ignored by
2079 * the video info API and videoconvert. */
2080 if (is_rgb)
2081 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_RGB;
2082
2083 switch (transfer)
2084 {
2085 case V4L2_XFER_FUNC_709:
2086 if (colorspace == V4L2_COLORSPACE_BT2020 && fmt->fmt.pix.height >= 2160)
2087 cinfo->transfer = GST_VIDEO_TRANSFER_BT2020_12;
2088 else
2089 cinfo->transfer = GST_VIDEO_TRANSFER_BT709;
2090 break;
2091 case V4L2_XFER_FUNC_SRGB:
2092 cinfo->transfer = GST_VIDEO_TRANSFER_SRGB;
2093 break;
2094 case V4L2_XFER_FUNC_OPRGB:
2095 cinfo->transfer = GST_VIDEO_TRANSFER_ADOBERGB;
2096 break;
2097 case V4L2_XFER_FUNC_SMPTE240M:
2098 cinfo->transfer = GST_VIDEO_TRANSFER_SMPTE240M;
2099 break;
2100 case V4L2_XFER_FUNC_NONE:
2101 cinfo->transfer = GST_VIDEO_TRANSFER_GAMMA10;
2102 break;
2103 case V4L2_XFER_FUNC_DEFAULT:
2104 /* nothing, just use defaults for colorspace */
2105 break;
2106 default:
2107 GST_WARNING("Unknown enum v4l2_xfer_func value %d", transfer);
2108 cinfo->transfer = GST_VIDEO_TRANSFER_UNKNOWN;
2109 break;
2110 }
xuesong.jiange1a19662022-06-21 20:30:22 +08002111 GST_DEBUG("cinfo update 5 time | range:%d, matrix:%d, transfer:%d, primaries:%d", cinfo->range, cinfo->matrix, cinfo->transfer, cinfo->primaries);
xuesong.jiangae1548e2022-05-06 16:38:46 +08002112
2113done:
2114 return ret;
2115}
2116
2117static int
2118gst_aml_v4l2_object_try_fmt(GstAmlV4l2Object *v4l2object,
2119 struct v4l2_format *try_fmt)
2120{
2121 int fd = v4l2object->video_fd;
2122 struct v4l2_format fmt;
2123 int r;
2124
2125 memcpy(&fmt, try_fmt, sizeof(fmt));
2126 r = v4l2object->ioctl(fd, VIDIOC_TRY_FMT, &fmt);
2127
2128 if (r < 0 && errno == ENOTTY)
2129 {
2130 /* The driver might not implement TRY_FMT, in which case we will try
2131 S_FMT to probe */
2132 if (GST_AML_V4L2_IS_ACTIVE(v4l2object))
2133 goto error;
2134
2135 memcpy(&fmt, try_fmt, sizeof(fmt));
2136 r = v4l2object->ioctl(fd, VIDIOC_S_FMT, &fmt);
2137 }
2138 memcpy(try_fmt, &fmt, sizeof(fmt));
2139
2140 return r;
2141
2142error:
2143 memcpy(try_fmt, &fmt, sizeof(fmt));
2144 GST_WARNING_OBJECT(v4l2object->dbg_obj,
2145 "Unable to try format: %s", g_strerror(errno));
2146 return r;
2147}
2148
2149static void
2150gst_aml_v4l2_object_add_interlace_mode(GstAmlV4l2Object *v4l2object,
2151 GstStructure *s, guint32 width, guint32 height, guint32 pixelformat)
2152{
2153 struct v4l2_format fmt;
2154 GValue interlace_formats = {
2155 0,
2156 };
2157 enum v4l2_field formats[] = {V4L2_FIELD_NONE, V4L2_FIELD_INTERLACED};
2158 gsize i;
2159 GstVideoInterlaceMode interlace_mode, prev = -1;
2160
2161 if (!g_str_equal(gst_structure_get_name(s), "video/x-raw"))
2162 return;
2163
2164 if (v4l2object->never_interlaced)
2165 {
2166 gst_structure_set(s, "interlace-mode", G_TYPE_STRING, "progressive", NULL);
2167 return;
2168 }
2169
2170 g_value_init(&interlace_formats, GST_TYPE_LIST);
2171
2172 /* Try twice - once for NONE, once for INTERLACED. */
2173 for (i = 0; i < G_N_ELEMENTS(formats); i++)
2174 {
2175 memset(&fmt, 0, sizeof(fmt));
2176 fmt.type = v4l2object->type;
2177 fmt.fmt.pix.width = width;
2178 fmt.fmt.pix.height = height;
2179 fmt.fmt.pix.pixelformat = pixelformat;
2180 fmt.fmt.pix.field = formats[i];
2181
2182 if (gst_aml_v4l2_object_try_fmt(v4l2object, &fmt) == 0 &&
2183 gst_aml_v4l2_object_get_interlace_mode(fmt.fmt.pix.field, &interlace_mode) && prev != interlace_mode)
2184 {
2185 GValue interlace_enum = {
2186 0,
2187 };
2188 const gchar *mode_string;
2189 g_value_init(&interlace_enum, G_TYPE_STRING);
2190 mode_string = gst_video_interlace_mode_to_string(interlace_mode);
2191 g_value_set_string(&interlace_enum, mode_string);
2192 gst_value_list_append_and_take_value(&interlace_formats,
2193 &interlace_enum);
2194 prev = interlace_mode;
2195 }
2196 }
2197
2198 if (gst_aml_v4l2src_value_simplify(&interlace_formats) || gst_value_list_get_size(&interlace_formats) > 0)
2199 gst_structure_take_value(s, "interlace-mode", &interlace_formats);
2200 else
2201 GST_WARNING_OBJECT(v4l2object, "Failed to determine interlace mode");
2202
2203 return;
2204}
2205
2206static void
2207gst_aml_v4l2_object_fill_colorimetry_list(GValue *list,
2208 GstVideoColorimetry *cinfo)
2209{
2210 GValue colorimetry = G_VALUE_INIT;
2211 guint size;
2212 guint i;
2213 gboolean found = FALSE;
2214
2215 g_value_init(&colorimetry, G_TYPE_STRING);
2216 g_value_take_string(&colorimetry, gst_video_colorimetry_to_string(cinfo));
xuesong.jiange1a19662022-06-21 20:30:22 +08002217 GST_DEBUG("fill colorimetry:%s into list", gst_video_colorimetry_to_string(cinfo));
xuesong.jiangae1548e2022-05-06 16:38:46 +08002218
2219 /* only insert if no duplicate */
2220 size = gst_value_list_get_size(list);
2221 for (i = 0; i < size; i++)
2222 {
2223 const GValue *tmp;
2224
2225 tmp = gst_value_list_get_value(list, i);
2226 if (gst_value_compare(&colorimetry, tmp) == GST_VALUE_EQUAL)
2227 {
2228 found = TRUE;
2229 break;
2230 }
2231 }
2232
2233 if (!found)
2234 gst_value_list_append_and_take_value(list, &colorimetry);
2235 else
2236 g_value_unset(&colorimetry);
2237}
2238
2239static void
2240gst_aml_v4l2_object_add_colorspace(GstAmlV4l2Object *v4l2object, GstStructure *s,
2241 guint32 width, guint32 height, guint32 pixelformat)
2242{
2243 struct v4l2_format fmt;
2244 GValue list = G_VALUE_INIT;
2245 GstVideoColorimetry cinfo;
2246 enum v4l2_colorspace req_cspace;
2247
2248 memset(&fmt, 0, sizeof(fmt));
2249 fmt.type = v4l2object->type;
2250 fmt.fmt.pix.width = width;
2251 fmt.fmt.pix.height = height;
2252 fmt.fmt.pix.pixelformat = pixelformat;
2253
2254 g_value_init(&list, GST_TYPE_LIST);
2255
2256 /* step 1: get device default colorspace and insert it first as
2257 * it should be the preferred one */
xuesong.jiange1a19662022-06-21 20:30:22 +08002258 GST_DEBUG("try for pixl format");
xuesong.jiangae1548e2022-05-06 16:38:46 +08002259 if (gst_aml_v4l2_object_try_fmt(v4l2object, &fmt) == 0)
2260 {
2261 if (gst_aml_v4l2_object_get_colorspace(&fmt, &cinfo))
2262 gst_aml_v4l2_object_fill_colorimetry_list(&list, &cinfo);
2263 }
2264
2265 /* step 2: probe all colorspace other than default
2266 * We don't probe all colorspace, range, matrix and transfer combination to
2267 * avoid ioctl flooding which could greatly increase initialization time
2268 * with low-speed devices (UVC...) */
2269 for (req_cspace = V4L2_COLORSPACE_SMPTE170M;
2270 req_cspace <= V4L2_COLORSPACE_RAW; req_cspace++)
2271 {
xuesong.jiange1a19662022-06-21 20:30:22 +08002272 GST_DEBUG("try for pixl format in while loop :%d", req_cspace);
xuesong.jiangae1548e2022-05-06 16:38:46 +08002273 /* V4L2_COLORSPACE_BT878 is deprecated and shall not be used, so skip */
2274 if (req_cspace == V4L2_COLORSPACE_BT878)
2275 continue;
2276
2277 if (V4L2_TYPE_IS_MULTIPLANAR(v4l2object->type))
2278 fmt.fmt.pix_mp.colorspace = req_cspace;
2279 else
2280 fmt.fmt.pix.colorspace = req_cspace;
2281
2282 if (gst_aml_v4l2_object_try_fmt(v4l2object, &fmt) == 0)
2283 {
xuesong.jiange1a19662022-06-21 20:30:22 +08002284 GST_DEBUG("try for pixl format in while loop :%d tried ok", req_cspace);
xuesong.jiangae1548e2022-05-06 16:38:46 +08002285 enum v4l2_colorspace colorspace;
2286
2287 if (V4L2_TYPE_IS_MULTIPLANAR(v4l2object->type))
2288 colorspace = fmt.fmt.pix_mp.colorspace;
2289 else
2290 colorspace = fmt.fmt.pix.colorspace;
2291
2292 if (colorspace == req_cspace)
2293 {
2294 if (gst_aml_v4l2_object_get_colorspace(&fmt, &cinfo))
2295 gst_aml_v4l2_object_fill_colorimetry_list(&list, &cinfo);
2296 }
2297 }
2298 }
2299
xuesong.jiang7b0882c2022-06-22 14:10:30 +08002300 GST_DEBUG("deal: caps with colorimetry 2,3,14,7");
xuesong.jiange1a19662022-06-21 20:30:22 +08002301 cinfo.range = 2;
2302 cinfo.matrix = 3;
2303 cinfo.transfer = 14;
2304 cinfo.primaries = 7;
2305 gst_aml_v4l2_object_fill_colorimetry_list(&list, &cinfo);
2306
xuesong.jiang7b0882c2022-06-22 14:10:30 +08002307 GST_DEBUG("deal: caps with colorimetry 2,6,13,7");
2308 cinfo.range = 2;
2309 cinfo.matrix = 6;
2310 cinfo.transfer = 13;
2311 cinfo.primaries = 7;
2312 gst_aml_v4l2_object_fill_colorimetry_list(&list, &cinfo);
2313
xuesong.jiangae1548e2022-05-06 16:38:46 +08002314 if (gst_value_list_get_size(&list) > 0)
2315 gst_structure_take_value(s, "colorimetry", &list);
2316 else
2317 g_value_unset(&list);
2318
2319 return;
2320}
2321
2322/* The frame interval enumeration code first appeared in Linux 2.6.19. */
2323static GstStructure *
2324gst_aml_v4l2_object_probe_caps_for_format_and_size(GstAmlV4l2Object *v4l2object,
2325 guint32 pixelformat,
2326 guint32 width, guint32 height, const GstStructure *template)
2327{
2328 gint fd = v4l2object->video_fd;
2329 struct v4l2_frmivalenum ival;
2330 guint32 num, denom;
2331 GstStructure *s;
2332 GValue rates = {
2333 0,
2334 };
2335
2336 memset(&ival, 0, sizeof(struct v4l2_frmivalenum));
2337 ival.index = 0;
2338 ival.pixel_format = pixelformat;
2339 ival.width = width;
2340 ival.height = height;
2341
2342 GST_LOG_OBJECT(v4l2object->dbg_obj,
2343 "get frame interval for %ux%u, %" GST_FOURCC_FORMAT, width, height,
2344 GST_FOURCC_ARGS(pixelformat));
2345
2346 /* keep in mind that v4l2 gives us frame intervals (durations); we invert the
2347 * fraction to get framerate */
2348 if (v4l2object->ioctl(fd, VIDIOC_ENUM_FRAMEINTERVALS, &ival) < 0)
2349 goto enum_frameintervals_failed;
2350
2351 if (ival.type == V4L2_FRMIVAL_TYPE_DISCRETE)
2352 {
2353 GValue rate = {
2354 0,
2355 };
2356
2357 g_value_init(&rates, GST_TYPE_LIST);
2358 g_value_init(&rate, GST_TYPE_FRACTION);
2359
2360 do
2361 {
2362 num = ival.discrete.numerator;
2363 denom = ival.discrete.denominator;
2364
2365 if (num > G_MAXINT || denom > G_MAXINT)
2366 {
2367 /* let us hope we don't get here... */
2368 num >>= 1;
2369 denom >>= 1;
2370 }
2371
2372 GST_LOG_OBJECT(v4l2object->dbg_obj, "adding discrete framerate: %d/%d",
2373 denom, num);
2374
2375 /* swap to get the framerate */
2376 gst_value_set_fraction(&rate, denom, num);
2377 gst_value_list_append_value(&rates, &rate);
2378
2379 ival.index++;
2380 } while (v4l2object->ioctl(fd, VIDIOC_ENUM_FRAMEINTERVALS, &ival) >= 0);
2381 }
2382 else if (ival.type == V4L2_FRMIVAL_TYPE_STEPWISE)
2383 {
2384 GValue min = {
2385 0,
2386 };
2387 GValue step = {
2388 0,
2389 };
2390 GValue max = {
2391 0,
2392 };
2393 gboolean added = FALSE;
2394 guint32 minnum, mindenom;
2395 guint32 maxnum, maxdenom;
2396
2397 g_value_init(&rates, GST_TYPE_LIST);
2398
2399 g_value_init(&min, GST_TYPE_FRACTION);
2400 g_value_init(&step, GST_TYPE_FRACTION);
2401 g_value_init(&max, GST_TYPE_FRACTION);
2402
2403 /* get the min */
2404 minnum = ival.stepwise.min.numerator;
2405 mindenom = ival.stepwise.min.denominator;
2406 if (minnum > G_MAXINT || mindenom > G_MAXINT)
2407 {
2408 minnum >>= 1;
2409 mindenom >>= 1;
2410 }
2411 GST_LOG_OBJECT(v4l2object->dbg_obj, "stepwise min frame interval: %d/%d",
2412 minnum, mindenom);
2413 gst_value_set_fraction(&min, minnum, mindenom);
2414
2415 /* get the max */
2416 maxnum = ival.stepwise.max.numerator;
2417 maxdenom = ival.stepwise.max.denominator;
2418 if (maxnum > G_MAXINT || maxdenom > G_MAXINT)
2419 {
2420 maxnum >>= 1;
2421 maxdenom >>= 1;
2422 }
2423
2424 GST_LOG_OBJECT(v4l2object->dbg_obj, "stepwise max frame interval: %d/%d",
2425 maxnum, maxdenom);
2426 gst_value_set_fraction(&max, maxnum, maxdenom);
2427
2428 /* get the step */
2429 num = ival.stepwise.step.numerator;
2430 denom = ival.stepwise.step.denominator;
2431 if (num > G_MAXINT || denom > G_MAXINT)
2432 {
2433 num >>= 1;
2434 denom >>= 1;
2435 }
2436
2437 if (num == 0 || denom == 0)
2438 {
2439 /* in this case we have a wrong fraction or no step, set the step to max
2440 * so that we only add the min value in the loop below */
2441 num = maxnum;
2442 denom = maxdenom;
2443 }
2444
2445 /* since we only have gst_value_fraction_subtract and not add, negate the
2446 * numerator */
2447 GST_LOG_OBJECT(v4l2object->dbg_obj, "stepwise step frame interval: %d/%d",
2448 num, denom);
2449 gst_value_set_fraction(&step, -num, denom);
2450
2451 while (gst_value_compare(&min, &max) != GST_VALUE_GREATER_THAN)
2452 {
2453 GValue rate = {
2454 0,
2455 };
2456
2457 num = gst_value_get_fraction_numerator(&min);
2458 denom = gst_value_get_fraction_denominator(&min);
2459 GST_LOG_OBJECT(v4l2object->dbg_obj, "adding stepwise framerate: %d/%d",
2460 denom, num);
2461
2462 /* invert to get the framerate */
2463 g_value_init(&rate, GST_TYPE_FRACTION);
2464 gst_value_set_fraction(&rate, denom, num);
2465 gst_value_list_append_value(&rates, &rate);
2466 added = TRUE;
2467
2468 /* we're actually adding because step was negated above. This is because
2469 * there is no _add function... */
2470 if (!gst_value_fraction_subtract(&min, &min, &step))
2471 {
2472 GST_WARNING_OBJECT(v4l2object->dbg_obj, "could not step fraction!");
2473 break;
2474 }
2475 }
2476 if (!added)
2477 {
2478 /* no range was added, leave the default range from the template */
2479 GST_WARNING_OBJECT(v4l2object->dbg_obj,
2480 "no range added, leaving default");
2481 g_value_unset(&rates);
2482 }
2483 }
2484 else if (ival.type == V4L2_FRMIVAL_TYPE_CONTINUOUS)
2485 {
2486 guint32 maxnum, maxdenom;
2487
2488 g_value_init(&rates, GST_TYPE_FRACTION_RANGE);
2489
2490 num = ival.stepwise.min.numerator;
2491 denom = ival.stepwise.min.denominator;
2492 if (num > G_MAXINT || denom > G_MAXINT)
2493 {
2494 num >>= 1;
2495 denom >>= 1;
2496 }
2497
2498 maxnum = ival.stepwise.max.numerator;
2499 maxdenom = ival.stepwise.max.denominator;
2500 if (maxnum > G_MAXINT || maxdenom > G_MAXINT)
2501 {
2502 maxnum >>= 1;
2503 maxdenom >>= 1;
2504 }
2505
2506 GST_LOG_OBJECT(v4l2object->dbg_obj,
2507 "continuous frame interval %d/%d to %d/%d", maxdenom, maxnum, denom,
2508 num);
2509
2510 gst_value_set_fraction_range_full(&rates, maxdenom, maxnum, denom, num);
2511 }
2512 else
2513 {
2514 goto unknown_type;
2515 }
2516
2517return_data:
2518 s = gst_structure_copy(template);
2519 gst_structure_set(s, "width", G_TYPE_INT, (gint)width,
2520 "height", G_TYPE_INT, (gint)height, NULL);
2521
2522 gst_aml_v4l2_object_add_aspect_ratio(v4l2object, s);
2523
2524 if (!v4l2object->skip_try_fmt_probes)
2525 {
2526 gst_aml_v4l2_object_add_interlace_mode(v4l2object, s, width, height,
2527 pixelformat);
2528 gst_aml_v4l2_object_add_colorspace(v4l2object, s, width, height, pixelformat);
2529 }
2530
2531 if (G_IS_VALUE(&rates))
2532 {
2533 gst_aml_v4l2src_value_simplify(&rates);
2534 /* only change the framerate on the template when we have a valid probed new
2535 * value */
2536 gst_structure_take_value(s, "framerate", &rates);
2537 }
2538 else if (v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE ||
2539 v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE)
2540 {
2541 gst_structure_set(s, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT,
2542 1, NULL);
2543 }
2544 return s;
2545
2546 /* ERRORS */
2547enum_frameintervals_failed:
2548{
2549 GST_DEBUG_OBJECT(v4l2object->dbg_obj,
2550 "Unable to enumerate intervals for %" GST_FOURCC_FORMAT "@%ux%u",
2551 GST_FOURCC_ARGS(pixelformat), width, height);
2552 goto return_data;
2553}
2554unknown_type:
2555{
2556 /* I don't see how this is actually an error, we ignore the format then */
2557 GST_WARNING_OBJECT(v4l2object->dbg_obj,
2558 "Unknown frame interval type at %" GST_FOURCC_FORMAT "@%ux%u: %u",
2559 GST_FOURCC_ARGS(pixelformat), width, height, ival.type);
2560 return NULL;
2561}
2562}
2563
2564static gint
2565sort_by_frame_size(GstStructure *s1, GstStructure *s2)
2566{
2567 int w1, h1, w2, h2;
2568
2569 gst_structure_get_int(s1, "width", &w1);
2570 gst_structure_get_int(s1, "height", &h1);
2571 gst_structure_get_int(s2, "width", &w2);
2572 gst_structure_get_int(s2, "height", &h2);
2573
2574 /* I think it's safe to assume that this won't overflow for a while */
2575 return ((w2 * h2) - (w1 * h1));
2576}
2577
2578static void
2579gst_aml_v4l2_object_update_and_append(GstAmlV4l2Object *v4l2object,
2580 guint32 format, GstCaps *caps, GstStructure *s)
2581{
2582 GstStructure *alt_s = NULL;
2583
2584 /* Encoded stream on output buffer need to be parsed */
2585 if (v4l2object->type == V4L2_BUF_TYPE_VIDEO_OUTPUT ||
2586 v4l2object->type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE)
2587 {
2588 gint i = 0;
2589
2590 for (; i < GST_AML_V4L2_FORMAT_COUNT; i++)
2591 {
2592 if (format == gst_aml_v4l2_formats[i].format &&
2593 gst_aml_v4l2_formats[i].flags & GST_V4L2_CODEC &&
2594 !(gst_aml_v4l2_formats[i].flags & GST_V4L2_NO_PARSE))
2595 {
2596 gst_structure_set(s, "parsed", G_TYPE_BOOLEAN, TRUE, NULL);
2597 break;
2598 }
2599 }
2600 }
2601
2602 if (v4l2object->has_alpha_component &&
2603 (v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE ||
2604 v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE))
2605 {
2606 switch (format)
2607 {
2608 case V4L2_PIX_FMT_RGB32:
2609 alt_s = gst_structure_copy(s);
2610 gst_structure_set(alt_s, "format", G_TYPE_STRING, "ARGB", NULL);
2611 break;
2612 case V4L2_PIX_FMT_BGR32:
2613 alt_s = gst_structure_copy(s);
2614 gst_structure_set(alt_s, "format", G_TYPE_STRING, "BGRA", NULL);
2615 break;
2616 default:
2617 break;
2618 }
2619 }
2620
2621 gst_caps_append_structure(caps, s);
2622
2623 if (alt_s)
2624 gst_caps_append_structure(caps, alt_s);
2625}
2626
2627static GstCaps *
2628gst_aml_v4l2_object_probe_caps_for_format(GstAmlV4l2Object *v4l2object,
2629 guint32 pixelformat, const GstStructure *template)
2630{
2631 GstCaps *ret = gst_caps_new_empty();
2632 GstStructure *tmp;
2633 gint fd = v4l2object->video_fd;
2634 struct v4l2_frmsizeenum size;
2635 GList *results = NULL;
2636 guint32 w, h;
2637
2638 if (pixelformat == GST_MAKE_FOURCC('M', 'P', 'E', 'G'))
2639 {
2640 gst_caps_append_structure(ret, gst_structure_copy(template));
2641 return ret;
2642 }
2643
2644 memset(&size, 0, sizeof(struct v4l2_frmsizeenum));
2645 size.index = 0;
2646 size.pixel_format = pixelformat;
2647
2648 GST_DEBUG_OBJECT(v4l2object->dbg_obj,
2649 "Enumerating frame sizes for %" GST_FOURCC_FORMAT,
2650 GST_FOURCC_ARGS(pixelformat));
2651
2652 if (v4l2object->ioctl(fd, VIDIOC_ENUM_FRAMESIZES, &size) < 0)
2653 goto enum_framesizes_failed;
2654
2655 if (size.type == V4L2_FRMSIZE_TYPE_DISCRETE)
2656 {
2657 do
2658 {
2659 GST_LOG_OBJECT(v4l2object->dbg_obj, "got discrete frame size %dx%d",
2660 size.discrete.width, size.discrete.height);
2661
2662 w = MIN(size.discrete.width, G_MAXINT);
2663 h = MIN(size.discrete.height, G_MAXINT);
2664
2665 if (w && h)
2666 {
2667 tmp =
2668 gst_aml_v4l2_object_probe_caps_for_format_and_size(v4l2object,
2669 pixelformat, w, h, template);
2670
2671 if (tmp)
2672 results = g_list_prepend(results, tmp);
2673 }
2674
2675 size.index++;
2676 } while (v4l2object->ioctl(fd, VIDIOC_ENUM_FRAMESIZES, &size) >= 0);
2677 GST_DEBUG_OBJECT(v4l2object->dbg_obj,
2678 "done iterating discrete frame sizes");
2679 }
2680 else if (size.type == V4L2_FRMSIZE_TYPE_STEPWISE)
2681 {
2682 guint32 maxw, maxh, step_w, step_h;
2683
2684 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "we have stepwise frame sizes:");
2685 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "min width: %d",
2686 size.stepwise.min_width);
2687 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "min height: %d",
2688 size.stepwise.min_height);
2689 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "max width: %d",
2690 size.stepwise.max_width);
2691 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "min height: %d",
2692 size.stepwise.max_height);
2693 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "step width: %d",
2694 size.stepwise.step_width);
2695 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "step height: %d",
2696 size.stepwise.step_height);
2697
2698 w = MAX(size.stepwise.min_width, 1);
2699 h = MAX(size.stepwise.min_height, 1);
2700 maxw = MIN(size.stepwise.max_width, G_MAXINT);
2701 maxh = MIN(size.stepwise.max_height, G_MAXINT);
2702
2703 step_w = MAX(size.stepwise.step_width, 1);
2704 step_h = MAX(size.stepwise.step_height, 1);
2705
2706 /* FIXME: check for sanity and that min/max are multiples of the steps */
2707
2708 /* we only query details for the max width/height since it's likely the
2709 * most restricted if there are any resolution-dependent restrictions */
2710 tmp = gst_aml_v4l2_object_probe_caps_for_format_and_size(v4l2object,
2711 pixelformat, maxw, maxh, template);
2712
2713 if (tmp)
2714 {
2715 GValue step_range = G_VALUE_INIT;
2716
2717 g_value_init(&step_range, GST_TYPE_INT_RANGE);
2718 gst_value_set_int_range_step(&step_range, w, maxw, step_w);
2719 gst_structure_set_value(tmp, "width", &step_range);
2720
2721 gst_value_set_int_range_step(&step_range, h, maxh, step_h);
2722 gst_structure_take_value(tmp, "height", &step_range);
2723
2724 /* no point using the results list here, since there's only one struct */
2725 gst_aml_v4l2_object_update_and_append(v4l2object, pixelformat, ret, tmp);
2726 }
2727 }
2728 else if (size.type == V4L2_FRMSIZE_TYPE_CONTINUOUS)
2729 {
2730 guint32 maxw, maxh;
2731
2732 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "we have continuous frame sizes:");
2733 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "min width: %d",
2734 size.stepwise.min_width);
2735 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "min height: %d",
2736 size.stepwise.min_height);
2737 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "max width: %d",
2738 size.stepwise.max_width);
2739 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "min height: %d",
2740 size.stepwise.max_height);
2741
2742 w = MAX(size.stepwise.min_width, 1);
2743 h = MAX(size.stepwise.min_height, 1);
2744 maxw = MIN(size.stepwise.max_width, G_MAXINT);
2745 maxh = MIN(size.stepwise.max_height, G_MAXINT);
2746
2747 tmp =
2748 gst_aml_v4l2_object_probe_caps_for_format_and_size(v4l2object, pixelformat,
2749 w, h, template);
2750 if (tmp)
2751 {
2752 gst_structure_set(tmp, "width", GST_TYPE_INT_RANGE, (gint)w,
2753 (gint)maxw, "height", GST_TYPE_INT_RANGE, (gint)h, (gint)maxh,
2754 NULL);
2755
2756 /* no point using the results list here, since there's only one struct */
2757 gst_aml_v4l2_object_update_and_append(v4l2object, pixelformat, ret, tmp);
2758 }
2759 }
2760 else
2761 {
2762 goto unknown_type;
2763 }
2764
2765 /* we use an intermediary list to store and then sort the results of the
2766 * probing because we can't make any assumptions about the order in which
2767 * the driver will give us the sizes, but we want the final caps to contain
2768 * the results starting with the highest resolution and having the lowest
2769 * resolution last, since order in caps matters for things like fixation. */
2770 results = g_list_sort(results, (GCompareFunc)sort_by_frame_size);
2771 while (results != NULL)
2772 {
2773 gst_aml_v4l2_object_update_and_append(v4l2object, pixelformat, ret,
2774 results->data);
2775 results = g_list_delete_link(results, results);
2776 }
2777
2778 if (gst_caps_is_empty(ret))
2779 goto enum_framesizes_no_results;
2780
2781 return ret;
2782
2783 /* ERRORS */
2784enum_framesizes_failed:
2785{
2786 /* I don't see how this is actually an error */
2787 GST_DEBUG_OBJECT(v4l2object->dbg_obj,
2788 "Failed to enumerate frame sizes for pixelformat %" GST_FOURCC_FORMAT
2789 " (%s)",
2790 GST_FOURCC_ARGS(pixelformat), g_strerror(errno));
2791 goto default_frame_sizes;
2792}
2793enum_framesizes_no_results:
2794{
2795 /* it's possible that VIDIOC_ENUM_FRAMESIZES is defined but the driver in
2796 * question doesn't actually support it yet */
2797 GST_DEBUG_OBJECT(v4l2object->dbg_obj,
2798 "No results for pixelformat %" GST_FOURCC_FORMAT
2799 " enumerating frame sizes, trying fallback",
2800 GST_FOURCC_ARGS(pixelformat));
2801 goto default_frame_sizes;
2802}
2803unknown_type:
2804{
2805 GST_WARNING_OBJECT(v4l2object->dbg_obj,
2806 "Unknown frame sizeenum type for pixelformat %" GST_FOURCC_FORMAT
2807 ": %u",
2808 GST_FOURCC_ARGS(pixelformat), size.type);
2809 goto default_frame_sizes;
2810}
2811
2812default_frame_sizes:
2813{
2814 gint min_w, max_w, min_h, max_h, fix_num = 0, fix_denom = 0;
2815
2816 /* This code is for Linux < 2.6.19 */
2817 min_w = min_h = 1;
2818 max_w = max_h = GST_AML_V4L2_MAX_SIZE;
2819 if (!gst_aml_v4l2_object_get_nearest_size(v4l2object, pixelformat, &min_w,
2820 &min_h))
2821 {
2822 GST_WARNING_OBJECT(v4l2object->dbg_obj,
2823 "Could not probe minimum capture size for pixelformat %" GST_FOURCC_FORMAT, GST_FOURCC_ARGS(pixelformat));
2824 }
2825 if (!gst_aml_v4l2_object_get_nearest_size(v4l2object, pixelformat, &max_w,
2826 &max_h))
2827 {
2828 GST_WARNING_OBJECT(v4l2object->dbg_obj,
2829 "Could not probe maximum capture size for pixelformat %" GST_FOURCC_FORMAT, GST_FOURCC_ARGS(pixelformat));
2830 }
2831
2832 tmp = gst_structure_copy(template);
2833 if (fix_num)
2834 {
2835 gst_structure_set(tmp, "framerate", GST_TYPE_FRACTION, fix_num,
2836 fix_denom, NULL);
2837 }
2838 else if (v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE ||
2839 v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE)
2840 {
2841 /* if norm can't be used, copy the template framerate */
2842 gst_structure_set(tmp, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1,
2843 G_MAXINT, 1, NULL);
2844 }
2845
2846 if (min_w == max_w)
2847 gst_structure_set(tmp, "width", G_TYPE_INT, max_w, NULL);
2848 else
2849 gst_structure_set(tmp, "width", GST_TYPE_INT_RANGE, min_w, max_w, NULL);
2850
2851 if (min_h == max_h)
2852 gst_structure_set(tmp, "height", G_TYPE_INT, max_h, NULL);
2853 else
2854 gst_structure_set(tmp, "height", GST_TYPE_INT_RANGE, min_h, max_h, NULL);
2855
2856 gst_aml_v4l2_object_add_aspect_ratio(v4l2object, tmp);
2857
2858 if (!v4l2object->skip_try_fmt_probes)
2859 {
2860 /* We could consider setting interlace mode from min and max. */
2861 gst_aml_v4l2_object_add_interlace_mode(v4l2object, tmp, max_w, max_h,
2862 pixelformat);
2863 /* We could consider to check colorspace for min too, in case it depends on
2864 * the size. But in this case, min and max could not be enough */
2865 gst_aml_v4l2_object_add_colorspace(v4l2object, tmp, max_w, max_h,
2866 pixelformat);
2867 }
2868
2869 gst_aml_v4l2_object_update_and_append(v4l2object, pixelformat, ret, tmp);
2870 return ret;
2871}
2872}
2873
2874static gboolean
2875gst_aml_v4l2_object_get_nearest_size(GstAmlV4l2Object *v4l2object,
2876 guint32 pixelformat, gint *width, gint *height)
2877{
2878 struct v4l2_format fmt;
2879 gboolean ret = FALSE;
2880 GstVideoInterlaceMode interlace_mode;
2881
2882 g_return_val_if_fail(width != NULL, FALSE);
2883 g_return_val_if_fail(height != NULL, FALSE);
2884
2885 GST_LOG_OBJECT(v4l2object->dbg_obj,
2886 "getting nearest size to %dx%d with format %" GST_FOURCC_FORMAT,
2887 *width, *height, GST_FOURCC_ARGS(pixelformat));
2888
2889 memset(&fmt, 0, sizeof(struct v4l2_format));
2890
2891 /* get size delimiters */
2892 memset(&fmt, 0, sizeof(fmt));
2893 fmt.type = v4l2object->type;
2894 fmt.fmt.pix.width = *width;
2895 fmt.fmt.pix.height = *height;
2896 fmt.fmt.pix.pixelformat = pixelformat;
2897 fmt.fmt.pix.field = V4L2_FIELD_ANY;
2898
2899 if (gst_aml_v4l2_object_try_fmt(v4l2object, &fmt) < 0)
2900 goto error;
2901
2902 GST_LOG_OBJECT(v4l2object->dbg_obj,
2903 "got nearest size %dx%d", fmt.fmt.pix.width, fmt.fmt.pix.height);
2904
2905 *width = fmt.fmt.pix.width;
2906 *height = fmt.fmt.pix.height;
2907
2908 if (!gst_aml_v4l2_object_get_interlace_mode(fmt.fmt.pix.field, &interlace_mode))
2909 {
2910 GST_WARNING_OBJECT(v4l2object->dbg_obj,
2911 "Unsupported field type for %" GST_FOURCC_FORMAT "@%ux%u: %u",
2912 GST_FOURCC_ARGS(pixelformat), *width, *height, fmt.fmt.pix.field);
2913 goto error;
2914 }
2915
2916 ret = TRUE;
2917
2918error:
2919 if (!ret)
2920 {
2921 GST_WARNING_OBJECT(v4l2object->dbg_obj,
2922 "Unable to try format: %s", g_strerror(errno));
2923 }
2924
2925 return ret;
2926}
2927
2928static gboolean
2929gst_aml_v4l2_object_is_dmabuf_supported(GstAmlV4l2Object *v4l2object)
2930{
2931 gboolean ret = TRUE;
2932 struct v4l2_exportbuffer expbuf = {
2933 .type = v4l2object->type,
2934 .index = -1,
2935 .plane = -1,
2936 .flags = O_CLOEXEC | O_RDWR,
2937 };
2938
2939 if (v4l2object->fmtdesc->flags & V4L2_FMT_FLAG_EMULATED)
2940 {
2941 GST_WARNING_OBJECT(v4l2object->dbg_obj,
2942 "libv4l2 converter detected, disabling DMABuf");
2943 ret = FALSE;
2944 }
2945
2946 /* Expected to fail, but ENOTTY tells us that it is not implemented. */
2947 v4l2object->ioctl(v4l2object->video_fd, VIDIOC_EXPBUF, &expbuf);
2948 if (errno == ENOTTY)
2949 ret = FALSE;
2950
2951 return ret;
2952}
2953
2954static gboolean
2955gst_aml_v4l2_object_setup_pool(GstAmlV4l2Object *v4l2object, GstCaps *caps)
2956{
2957 GstAmlV4l2IOMode mode;
2958
2959 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "initializing the %s system",
2960 V4L2_TYPE_IS_OUTPUT(v4l2object->type) ? "output" : "capture");
2961
2962 GST_AML_V4L2_CHECK_OPEN(v4l2object);
2963 GST_AML_V4L2_CHECK_NOT_ACTIVE(v4l2object);
2964
2965 /* find transport */
2966 mode = v4l2object->req_mode;
2967
2968 if (v4l2object->device_caps & V4L2_CAP_READWRITE)
2969 {
2970 if (v4l2object->req_mode == GST_V4L2_IO_AUTO)
2971 mode = GST_V4L2_IO_RW;
2972 }
2973 else if (v4l2object->req_mode == GST_V4L2_IO_RW)
2974 goto method_not_supported;
2975
2976 if (v4l2object->device_caps & V4L2_CAP_STREAMING)
2977 {
2978 if (v4l2object->req_mode == GST_V4L2_IO_AUTO)
2979 {
2980 if (!V4L2_TYPE_IS_OUTPUT(v4l2object->type) &&
2981 gst_aml_v4l2_object_is_dmabuf_supported(v4l2object))
2982 {
2983 mode = GST_V4L2_IO_DMABUF;
2984 }
2985 else
2986 {
2987 mode = GST_V4L2_IO_MMAP;
2988 }
2989 }
2990 }
2991 else if (v4l2object->req_mode == GST_V4L2_IO_MMAP ||
2992 v4l2object->req_mode == GST_V4L2_IO_DMABUF)
2993 goto method_not_supported;
2994
2995 /* if still no transport selected, error out */
2996 if (mode == GST_V4L2_IO_AUTO)
2997 goto no_supported_capture_method;
2998
2999 GST_INFO_OBJECT(v4l2object->dbg_obj, "accessing buffers via mode %d", mode);
3000 v4l2object->mode = mode;
3001
3002 /* If min_buffers is not set, the driver either does not support the control or
3003 it has not been asked yet via propose_allocation/decide_allocation. */
3004 if (!v4l2object->min_buffers)
3005 gst_aml_v4l2_get_driver_min_buffers(v4l2object);
3006
3007 /* Map the buffers */
3008 GST_LOG_OBJECT(v4l2object->dbg_obj, "initiating buffer pool");
3009
3010 if (!(v4l2object->pool = gst_aml_v4l2_buffer_pool_new(v4l2object, caps)))
3011 goto buffer_pool_new_failed;
3012
3013 GST_AML_V4L2_SET_ACTIVE(v4l2object);
3014
3015 return TRUE;
3016
3017 /* ERRORS */
3018buffer_pool_new_failed:
3019{
3020 GST_ELEMENT_ERROR(v4l2object->element, RESOURCE, READ,
3021 (_("Could not map buffers from device '%s'"),
3022 v4l2object->videodev),
3023 ("Failed to create buffer pool: %s", g_strerror(errno)));
3024 return FALSE;
3025}
3026method_not_supported:
3027{
3028 GST_ELEMENT_ERROR(v4l2object->element, RESOURCE, READ,
3029 (_("The driver of device '%s' does not support the IO method %d"),
3030 v4l2object->videodev, mode),
3031 (NULL));
3032 return FALSE;
3033}
3034no_supported_capture_method:
3035{
3036 GST_ELEMENT_ERROR(v4l2object->element, RESOURCE, READ,
3037 (_("The driver of device '%s' does not support any known IO "
3038 "method."),
3039 v4l2object->videodev),
3040 (NULL));
3041 return FALSE;
3042}
3043}
3044
3045static void
3046gst_aml_v4l2_object_set_stride(GstVideoInfo *info, GstVideoAlignment *align,
3047 gint plane, gint stride)
3048{
3049 const GstVideoFormatInfo *finfo = info->finfo;
3050
3051 if (GST_VIDEO_FORMAT_INFO_IS_TILED(finfo))
3052 {
3053 gint x_tiles, y_tiles, ws, hs, tile_height, padded_height;
3054
3055 ws = GST_VIDEO_FORMAT_INFO_TILE_WS(finfo);
3056 hs = GST_VIDEO_FORMAT_INFO_TILE_HS(finfo);
3057 tile_height = 1 << hs;
3058
3059 padded_height = GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT(finfo, plane,
3060 info->height + align->padding_top + align->padding_bottom);
3061 padded_height = GST_ROUND_UP_N(padded_height, tile_height);
3062
3063 x_tiles = stride >> ws;
3064 y_tiles = padded_height >> hs;
3065 info->stride[plane] = GST_VIDEO_TILE_MAKE_STRIDE(x_tiles, y_tiles);
3066 }
3067 else
3068 {
3069 info->stride[plane] = stride;
3070 }
3071}
3072
3073static void
3074gst_aml_v4l2_object_extrapolate_info(GstAmlV4l2Object *v4l2object,
3075 GstVideoInfo *info, GstVideoAlignment *align, gint stride)
3076{
3077 const GstVideoFormatInfo *finfo = info->finfo;
3078 gint i, estride, padded_height;
3079 gsize offs = 0;
3080
3081 g_return_if_fail(v4l2object->n_v4l2_planes == 1);
3082
3083 padded_height = info->height + align->padding_top + align->padding_bottom;
3084
3085 for (i = 0; i < finfo->n_planes; i++)
3086 {
3087 estride = gst_aml_v4l2_object_extrapolate_stride(finfo, i, stride);
3088
3089 gst_aml_v4l2_object_set_stride(info, align, i, estride);
3090
3091 info->offset[i] = offs;
3092 offs += estride *
3093 GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT(finfo, i, padded_height);
3094
3095 GST_DEBUG_OBJECT(v4l2object->dbg_obj,
3096 "Extrapolated for plane %d with base stride %d: "
3097 "stride %d, offset %" G_GSIZE_FORMAT,
3098 i, stride, info->stride[i],
3099 info->offset[i]);
3100 }
3101
3102 /* Update the image size according the amount of data we are going to
3103 * read/write. This workaround bugs in driver where the sizeimage provided
3104 * by TRY/S_FMT represent the buffer length (maximum size) rather then the expected
3105 * bytesused (buffer size). */
3106 if (offs < info->size)
3107 info->size = offs;
3108}
3109
3110static void
3111gst_aml_v4l2_object_save_format(GstAmlV4l2Object *v4l2object,
3112 struct v4l2_fmtdesc *fmtdesc, struct v4l2_format *format,
3113 GstVideoInfo *info, GstVideoAlignment *align)
3114{
3115 const GstVideoFormatInfo *finfo = info->finfo;
3116 gboolean standard_stride = TRUE;
3117 gint stride, pstride, padded_width, padded_height, i;
3118
3119 if (GST_VIDEO_INFO_FORMAT(info) == GST_VIDEO_FORMAT_ENCODED)
3120 {
3121 v4l2object->n_v4l2_planes = 1;
3122 info->size = format->fmt.pix.sizeimage;
3123 goto store_info;
3124 }
3125
3126 /* adjust right padding */
3127 if (V4L2_TYPE_IS_MULTIPLANAR(v4l2object->type))
3128 stride = format->fmt.pix_mp.plane_fmt[0].bytesperline;
3129 else
3130 stride = format->fmt.pix.bytesperline;
3131
3132 pstride = GST_VIDEO_FORMAT_INFO_PSTRIDE(finfo, 0);
3133 if (pstride)
3134 {
3135 padded_width = stride / pstride;
3136 }
3137 else
3138 {
3139 /* pstride can be 0 for complex formats */
3140 GST_WARNING_OBJECT(v4l2object->element,
3141 "format %s has a pstride of 0, cannot compute padded with",
3142 gst_video_format_to_string(GST_VIDEO_INFO_FORMAT(info)));
3143 padded_width = stride;
3144 }
3145
3146 if (padded_width < format->fmt.pix.width)
3147 GST_WARNING_OBJECT(v4l2object->dbg_obj,
3148 "Driver bug detected, stride (%d) is too small for the width (%d)",
3149 padded_width, format->fmt.pix.width);
3150
3151 align->padding_right = padded_width - info->width - align->padding_left;
3152
3153 /* adjust bottom padding */
3154 padded_height = format->fmt.pix.height;
3155
3156 if (GST_VIDEO_FORMAT_INFO_IS_TILED(finfo))
3157 {
3158 guint hs, tile_height;
3159
3160 hs = GST_VIDEO_FORMAT_INFO_TILE_HS(finfo);
3161 tile_height = 1 << hs;
3162
3163 padded_height = GST_ROUND_UP_N(padded_height, tile_height);
3164 }
3165
3166 align->padding_bottom = padded_height - info->height - align->padding_top;
3167
3168 /* setup the strides and offset */
3169 if (V4L2_TYPE_IS_MULTIPLANAR(v4l2object->type))
3170 {
3171 struct v4l2_pix_format_mplane *pix_mp = &format->fmt.pix_mp;
3172
3173 /* figure out the frame layout */
3174 v4l2object->n_v4l2_planes = MAX(1, pix_mp->num_planes);
3175 info->size = 0;
3176 for (i = 0; i < v4l2object->n_v4l2_planes; i++)
3177 {
3178 stride = pix_mp->plane_fmt[i].bytesperline;
3179
3180 if (info->stride[i] != stride)
3181 standard_stride = FALSE;
3182
3183 gst_aml_v4l2_object_set_stride(info, align, i, stride);
3184 info->offset[i] = info->size;
3185 info->size += pix_mp->plane_fmt[i].sizeimage;
3186 }
3187
3188 /* Extrapolate stride if planar format are being set in 1 v4l2 plane */
3189 if (v4l2object->n_v4l2_planes < finfo->n_planes)
3190 {
3191 stride = format->fmt.pix_mp.plane_fmt[0].bytesperline;
3192 gst_aml_v4l2_object_extrapolate_info(v4l2object, info, align, stride);
3193 }
3194 }
3195 else
3196 {
3197 /* only one plane in non-MPLANE mode */
3198 v4l2object->n_v4l2_planes = 1;
3199 info->size = format->fmt.pix.sizeimage;
3200 stride = format->fmt.pix.bytesperline;
3201
3202 if (info->stride[0] != stride)
3203 standard_stride = FALSE;
3204
3205 gst_aml_v4l2_object_extrapolate_info(v4l2object, info, align, stride);
3206 }
3207
3208 /* adjust the offset to take into account left and top */
3209 if (GST_VIDEO_FORMAT_INFO_IS_TILED(finfo))
3210 {
3211 if ((align->padding_left + align->padding_top) > 0)
3212 GST_WARNING_OBJECT(v4l2object->dbg_obj,
3213 "Left and top padding is not permitted for tiled formats");
3214 }
3215 else
3216 {
3217 for (i = 0; i < finfo->n_planes; i++)
3218 {
3219 gint vedge, hedge;
3220
3221 /* FIXME we assume plane as component as this is true for all supported
3222 * format we support. */
3223
3224 hedge = GST_VIDEO_FORMAT_INFO_SCALE_WIDTH(finfo, i, align->padding_left);
3225 vedge = GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT(finfo, i, align->padding_top);
3226
3227 info->offset[i] += (vedge * info->stride[i]) +
3228 (hedge * GST_VIDEO_INFO_COMP_PSTRIDE(info, i));
3229 }
3230 }
3231
3232store_info:
3233 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "Got sizeimage %" G_GSIZE_FORMAT,
3234 info->size);
3235
3236 /* to avoid copies we need video meta if there is padding */
3237 v4l2object->need_video_meta =
3238 ((align->padding_top + align->padding_left + align->padding_right +
3239 align->padding_bottom) != 0);
3240
3241 /* ... or if stride is non "standard" */
3242 if (!standard_stride)
3243 v4l2object->need_video_meta = TRUE;
3244
3245 /* ... or also video meta if we use multiple, non-contiguous, planes */
3246 if (v4l2object->n_v4l2_planes > 1)
3247 v4l2object->need_video_meta = TRUE;
3248
3249 v4l2object->info = *info;
3250 v4l2object->align = *align;
3251 v4l2object->format = *format;
3252 v4l2object->fmtdesc = fmtdesc;
3253
3254 /* if we have a framerate pre-calculate duration */
3255 if (info->fps_n > 0 && info->fps_d > 0)
3256 {
3257 v4l2object->duration = gst_util_uint64_scale_int(GST_SECOND, info->fps_d,
3258 info->fps_n);
3259 }
3260 else
3261 {
3262 v4l2object->duration = GST_CLOCK_TIME_NONE;
3263 }
3264}
3265
3266gint gst_aml_v4l2_object_extrapolate_stride(const GstVideoFormatInfo *finfo,
3267 gint plane, gint stride)
3268{
3269 gint estride;
3270
3271 switch (finfo->format)
3272 {
3273 case GST_VIDEO_FORMAT_NV12:
3274 case GST_VIDEO_FORMAT_NV12_64Z32:
3275 case GST_VIDEO_FORMAT_NV21:
3276 case GST_VIDEO_FORMAT_NV16:
3277 case GST_VIDEO_FORMAT_NV61:
3278 case GST_VIDEO_FORMAT_NV24:
3279 estride = (plane == 0 ? 1 : 2) *
3280 GST_VIDEO_FORMAT_INFO_SCALE_WIDTH(finfo, plane, stride);
3281 break;
3282 default:
3283 estride = GST_VIDEO_FORMAT_INFO_SCALE_WIDTH(finfo, plane, stride);
3284 break;
3285 }
3286
3287 return estride;
3288}
3289
3290static gboolean
3291gst_aml_v4l2_video_colorimetry_matches(const GstVideoColorimetry *cinfo,
3292 const gchar *color)
3293{
3294 GstVideoColorimetry ci;
3295 static const GstVideoColorimetry ci_likely_jpeg = {
3296 GST_VIDEO_COLOR_RANGE_0_255, GST_VIDEO_COLOR_MATRIX_BT601,
3297 GST_VIDEO_TRANSFER_UNKNOWN, GST_VIDEO_COLOR_PRIMARIES_UNKNOWN};
3298 static const GstVideoColorimetry ci_jpeg = {
3299 GST_VIDEO_COLOR_RANGE_0_255, GST_VIDEO_COLOR_MATRIX_BT601,
3300 GST_VIDEO_TRANSFER_SRGB, GST_VIDEO_COLOR_PRIMARIES_BT709};
3301
3302 if (!gst_video_colorimetry_from_string(&ci, color))
3303 return FALSE;
3304
3305 if (gst_video_colorimetry_is_equal(&ci, cinfo))
3306 return TRUE;
3307
3308 /* Allow 1:4:0:0 (produced by jpegdec) if the device expects 1:4:7:1 */
3309 if (gst_video_colorimetry_is_equal(&ci, &ci_likely_jpeg) && gst_video_colorimetry_is_equal(cinfo, &ci_jpeg))
3310 return TRUE;
3311
3312 return FALSE;
3313}
3314
3315static void
xuesong.jiange1a19662022-06-21 20:30:22 +08003316set_amlogic_vdec_parm(GstAmlV4l2Object *v4l2object, struct v4l2_streamparm *streamparm, GstCaps *caps)
xuesong.jiangae1548e2022-05-06 16:38:46 +08003317{
3318 struct aml_dec_params *decParm = (struct aml_dec_params *)streamparm->parm.raw_data;
3319 const char *env;
3320
3321 decParm->cfg.metadata_config_flag = 1 << 13;
3322
3323 if (v4l2object->type == V4L2_BUF_TYPE_VIDEO_OUTPUT || v4l2object->type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE)
3324 {
3325 env = getenv("V4L2_SET_AMLOGIC_DW_MODE");
3326 if (env)
3327 {
3328 int dwMode = atoi(env);
3329 switch (dwMode)
3330 {
3331 case 0:
3332 case 1:
3333 case 2:
3334 case 3:
3335 case 4:
3336 case 16:
3337 decParm->cfg.double_write_mode = dwMode;
3338 decParm->parms_status |= V4L2_CONFIG_PARM_DECODE_CFGINFO;
3339 break;
3340 }
3341 }
3342
3343 if (v4l2object->ioctl(v4l2object->video_fd, VIDIOC_S_PARM, streamparm) < 0)
3344 {
3345 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "set vdec parm fail");
3346 }
3347 else
3348 {
3349 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "Set dwMode to %d", decParm->cfg.double_write_mode);
3350 }
xuesong.jiange1a19662022-06-21 20:30:22 +08003351
3352 GstStructure *structure= gst_caps_get_structure(caps, 0);
3353 if (structure == NULL)
3354 {
3355 return;
3356 }
3357 if ( gst_structure_has_field(structure, "colorimetry") )
3358 {
3359 const char *colorimetry= gst_structure_get_string(structure,"colorimetry");
3360 GstVideoColorimetry vci = {0};
3361 if ( colorimetry && gst_video_colorimetry_from_string( &vci, colorimetry ))
3362 {
3363 decParm->parms_status |= V4L2_CONFIG_PARM_DECODE_HDRINFO;
3364 decParm->hdr.signal_type= (1<<29); /* present flag */
3365 /*set default value, this is to keep up with driver hdr info synchronization*/
3366 decParm->hdr.signal_type |= (5<<26) | (1<<24);
3367
3368 gint hdrColorimetry[4] = {0};
3369 hdrColorimetry[0]= (int)vci.range;
3370 hdrColorimetry[1]= (int)vci.matrix;
3371 hdrColorimetry[2]= (int)vci.transfer;
3372 hdrColorimetry[3]= (int)vci.primaries;
3373 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "colorimetry: [%d,%d,%d,%d]",
3374 hdrColorimetry[0],
3375 hdrColorimetry[1],
3376 hdrColorimetry[2],
3377 hdrColorimetry[3] );
3378 /* range */
3379 switch ( hdrColorimetry[0] )
3380 {
3381 case 1:
3382 case 2:
3383 decParm->hdr.signal_type |= ((hdrColorimetry[0] % 2)<<25);
3384 break;
3385 default:
3386 break;
3387 }
3388 /* matrix coefficient */
3389 switch ( hdrColorimetry[1] )
3390 {
3391 case 1: /* RGB */
3392 decParm->hdr.signal_type |= 0;
3393 break;
3394 case 2: /* FCC */
3395 decParm->hdr.signal_type |= 4;
3396 break;
3397 case 3: /* BT709 */
3398 decParm->hdr.signal_type |= 1;
3399 break;
3400 case 4: /* BT601 */
3401 decParm->hdr.signal_type |= 3;
3402 break;
3403 case 5: /* SMPTE240M */
3404 decParm->hdr.signal_type |= 7;
3405 break;
3406 case 6: /* BT2020 */
3407 decParm->hdr.signal_type |= 9;
3408 break;
3409 default: /* unknown */
3410 decParm->hdr.signal_type |= 2;
3411 break;
3412 }
3413 /* transfer function */
3414 switch ( hdrColorimetry[2] )
3415 {
3416 case 5: /* BT709 */
3417 decParm->hdr.signal_type |= (1<<8);
3418 break;
3419 case 6: /* SMPTE240M */
3420 decParm->hdr.signal_type |= (7<<8);
3421 break;
3422 case 9: /* LOG100 */
3423 decParm->hdr.signal_type |= (9<<8);
3424 break;
3425 case 10: /* LOG316 */
3426 decParm->hdr.signal_type |= (10<<8);
3427 break;
3428 case 12: /* BT2020_12 */
3429 decParm->hdr.signal_type |= (15<<8);
3430 break;
3431 case 11: /* BT2020_10 */
3432 decParm->hdr.signal_type |= (14<<8);
3433 break;
3434 case 13: /* SMPTE2084 */
3435 decParm->hdr.signal_type |= (16<<8);
3436 break;
3437 case 14: /* ARIB_STD_B67 */
3438 decParm->hdr.signal_type |= (18<<8);
3439 break;
3440 #if ((GST_VERSION_MAJOR == 1) && (GST_VERSION_MINOR >= 18))
3441 case 16: /* BT601 */
3442 decParm->hdr.signal_type |= (3<<8);
3443 break;
3444 #endif
3445 case 1: /* GAMMA10 */
3446 case 2: /* GAMMA18 */
3447 case 3: /* GAMMA20 */
3448 case 4: /* GAMMA22 */
3449 case 7: /* SRGB */
3450 case 8: /* GAMMA28 */
3451 case 15: /* ADOBERGB */
3452 default:
3453 break;
3454 }
3455 /* primaries */
3456 switch ( hdrColorimetry[3] )
3457 {
3458 case 1: /* BT709 */
3459 decParm->hdr.signal_type |= ((1<<24)|(1<<16));
3460 break;
3461 case 2: /* BT470M */
3462 decParm->hdr.signal_type |= ((1<<24)|(4<<16));
3463 break;
3464 case 3: /* BT470BG */
3465 decParm->hdr.signal_type |= ((1<<24)|(5<<16));
3466 break;
3467 case 4: /* SMPTE170M */
3468 decParm->hdr.signal_type |= ((1<<24)|(6<<16));
3469 break;
3470 case 5: /* SMPTE240M */
3471 decParm->hdr.signal_type |= ((1<<24)|(7<<16));
3472 break;
3473 case 6: /* FILM */
3474 decParm->hdr.signal_type |= ((1<<24)|(8<<16));
3475 break;
3476 case 7: /* BT2020 */
3477 decParm->hdr.signal_type |= ((1<<24)|(9<<16));
3478 break;
3479 case 8: /* ADOBERGB */
3480 default:
3481 break;
3482 }
3483 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "HDR signal_type %X", decParm->hdr.signal_type);
3484 }
3485
3486 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "got caps %" GST_PTR_FORMAT, caps);
3487 GstStructure *st = gst_caps_get_structure(caps, 0);
3488 GstCapsFeatures *features = gst_caps_get_features(caps, 0);
3489
3490 if (gst_structure_has_field(st, "colorimetry"))
3491 {
3492 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "have colorimetry");
3493 }
3494
3495 if (st && features)
3496 {
3497 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "trace in remove colorimetry");
3498 gst_structure_remove_field(st, "colorimetry");
3499 gst_caps_features_remove(features, "colorimetry");
3500 }
3501 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "caps after remove colorimetry %" GST_PTR_FORMAT, caps);
3502 }
3503
3504 if ( gst_structure_has_field(structure, "mastering-display-metadata") )
3505 {
3506 const char *masteringDisplay= gst_structure_get_string(structure,"mastering-display-metadata");
3507 float hdrMasteringDisplay[10];
3508 if ( masteringDisplay && sscanf( masteringDisplay, "%f:%f:%f:%f:%f:%f:%f:%f:%f:%f",
3509 &hdrMasteringDisplay[0],
3510 &hdrMasteringDisplay[1],
3511 &hdrMasteringDisplay[2],
3512 &hdrMasteringDisplay[3],
3513 &hdrMasteringDisplay[4],
3514 &hdrMasteringDisplay[5],
3515 &hdrMasteringDisplay[6],
3516 &hdrMasteringDisplay[7],
3517 &hdrMasteringDisplay[8],
3518 &hdrMasteringDisplay[9] ) == 10 )
3519 {
3520 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "mastering display [%f,%f,%f,%f,%f,%f,%f,%f,%f,%f]",
3521 hdrMasteringDisplay[0],
3522 hdrMasteringDisplay[1],
3523 hdrMasteringDisplay[2],
3524 hdrMasteringDisplay[3],
3525 hdrMasteringDisplay[4],
3526 hdrMasteringDisplay[5],
3527 hdrMasteringDisplay[6],
3528 hdrMasteringDisplay[7],
3529 hdrMasteringDisplay[8],
3530 hdrMasteringDisplay[9] );
3531
3532 decParm->hdr.color_parms.present_flag= 1;
3533 decParm->hdr.color_parms.primaries[2][0]= (uint32_t)(hdrMasteringDisplay[0]*50000); /* R.x */
3534 decParm->hdr.color_parms.primaries[2][1]= (uint32_t)(hdrMasteringDisplay[1]*50000); /* R.y */
3535 decParm->hdr.color_parms.primaries[0][0]= (uint32_t)(hdrMasteringDisplay[2]*50000); /* G.x */
3536 decParm->hdr.color_parms.primaries[0][1]= (uint32_t)(hdrMasteringDisplay[3]*50000); /* G.y */
3537 decParm->hdr.color_parms.primaries[1][0]= (uint32_t)(hdrMasteringDisplay[4]*50000); /* B.x */
3538 decParm->hdr.color_parms.primaries[1][1]= (uint32_t)(hdrMasteringDisplay[5]*50000); /* B.y */
3539 decParm->hdr.color_parms.white_point[0]= (uint32_t)(hdrMasteringDisplay[6]*50000);
3540 decParm->hdr.color_parms.white_point[1]= (uint32_t)(hdrMasteringDisplay[7]*50000);
3541 decParm->hdr.color_parms.luminance[0]= (uint32_t)(hdrMasteringDisplay[8]);
3542 decParm->hdr.color_parms.luminance[1]= (uint32_t)(hdrMasteringDisplay[9]);
3543 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "HDR mastering: primaries %X %X %X %X %X %X",
3544 decParm->hdr.color_parms.primaries[2][0],
3545 decParm->hdr.color_parms.primaries[2][1],
3546 decParm->hdr.color_parms.primaries[0][0],
3547 decParm->hdr.color_parms.primaries[0][1],
3548 decParm->hdr.color_parms.primaries[1][0],
3549 decParm->hdr.color_parms.primaries[1][1] );
3550 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "HDR mastering: white point: %X %X",
3551 decParm->hdr.color_parms.white_point[0],
3552 decParm->hdr.color_parms.white_point[1] );
3553 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "HDR mastering: luminance: %X %X",
3554 decParm->hdr.color_parms.luminance[0],
3555 decParm->hdr.color_parms.luminance[1] );
3556 }
3557
3558 GstStructure *st = gst_caps_get_structure(caps, 0);
3559 GstCapsFeatures * features = gst_caps_get_features(caps, 0);
3560 if (st && features)
3561 {
3562 gst_structure_remove_fields(st, "mastering-display-metadata", NULL);
3563 gst_caps_features_remove(features, "mastering-display-metadata");
3564 }
3565 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "caps after remove mastering-display-metadata %" GST_PTR_FORMAT, caps);
3566 }
xuesong.jiangae1548e2022-05-06 16:38:46 +08003567 }
3568}
3569
3570static gboolean
3571gst_aml_v4l2_object_set_format_full(GstAmlV4l2Object *v4l2object, GstCaps *caps,
3572 gboolean try_only, GstAmlV4l2Error *error)
3573{
3574 gint fd = v4l2object->video_fd;
3575 struct v4l2_format format;
3576 struct v4l2_streamparm streamparm;
3577 enum v4l2_field field;
3578 guint32 pixelformat;
3579 struct v4l2_fmtdesc *fmtdesc;
3580 GstVideoInfo info;
3581 GstVideoAlignment align;
3582 gint width, height, fps_n, fps_d;
3583 gint n_v4l_planes;
3584 gint i = 0;
3585 gboolean is_mplane;
3586 enum v4l2_colorspace colorspace = 0;
3587 enum v4l2_quantization range = 0;
3588 enum v4l2_ycbcr_encoding matrix = 0;
3589 enum v4l2_xfer_func transfer = 0;
3590 GstStructure *s;
3591 gboolean disable_colorimetry = FALSE;
3592
3593 g_return_val_if_fail(!v4l2object->skip_try_fmt_probes ||
3594 gst_caps_is_writable(caps),
3595 FALSE);
3596
3597 GST_AML_V4L2_CHECK_OPEN(v4l2object);
3598 if (!try_only)
3599 GST_AML_V4L2_CHECK_NOT_ACTIVE(v4l2object);
3600
3601 memset(&streamparm, 0x00, sizeof(struct v4l2_streamparm));
3602 streamparm.type = v4l2object->type;
xuesong.jiange1a19662022-06-21 20:30:22 +08003603 set_amlogic_vdec_parm(v4l2object, &streamparm, caps);
xuesong.jiangae1548e2022-05-06 16:38:46 +08003604
3605 is_mplane = V4L2_TYPE_IS_MULTIPLANAR(v4l2object->type);
3606
3607 gst_video_info_init(&info);
3608 gst_video_alignment_reset(&align);
3609
3610 if (!gst_aml_v4l2_object_get_caps_info(v4l2object, caps, &fmtdesc, &info))
3611 goto invalid_caps;
3612
3613 pixelformat = fmtdesc->pixelformat;
3614 width = GST_VIDEO_INFO_WIDTH(&info);
3615 height = GST_VIDEO_INFO_HEIGHT(&info);
3616 fps_n = GST_VIDEO_INFO_FPS_N(&info);
3617 fps_d = GST_VIDEO_INFO_FPS_D(&info);
3618
3619 /* if encoded format (GST_VIDEO_INFO_N_PLANES return 0)
3620 * or if contiguous is prefered */
3621 n_v4l_planes = GST_VIDEO_INFO_N_PLANES(&info);
3622 if (!n_v4l_planes || !v4l2object->prefered_non_contiguous)
3623 n_v4l_planes = 1;
3624
3625 if (GST_VIDEO_INFO_IS_INTERLACED(&info))
3626 {
3627 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "interlaced video");
3628 /* ideally we would differentiate between types of interlaced video
3629 * but there is not sufficient information in the caps..
3630 */
3631 field = V4L2_FIELD_INTERLACED;
3632 }
3633 else
3634 {
3635 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "progressive video");
3636 field = V4L2_FIELD_NONE;
3637 }
3638
3639 /* We first pick the main colorspace from the primaries */
3640 switch (info.colorimetry.primaries)
3641 {
3642 case GST_VIDEO_COLOR_PRIMARIES_BT709:
3643 /* There is two colorspaces using these primaries, use the range to
3644 * differentiate */
3645 if (info.colorimetry.range == GST_VIDEO_COLOR_RANGE_16_235)
3646 colorspace = V4L2_COLORSPACE_REC709;
3647 else
3648 colorspace = V4L2_COLORSPACE_SRGB;
3649 break;
3650 case GST_VIDEO_COLOR_PRIMARIES_BT2020:
3651 colorspace = V4L2_COLORSPACE_BT2020;
3652 break;
3653 case GST_VIDEO_COLOR_PRIMARIES_BT470M:
3654 colorspace = V4L2_COLORSPACE_470_SYSTEM_M;
3655 break;
3656 case GST_VIDEO_COLOR_PRIMARIES_BT470BG:
3657 colorspace = V4L2_COLORSPACE_470_SYSTEM_BG;
3658 break;
3659 case GST_VIDEO_COLOR_PRIMARIES_SMPTE170M:
3660 colorspace = V4L2_COLORSPACE_SMPTE170M;
3661 break;
3662 case GST_VIDEO_COLOR_PRIMARIES_SMPTE240M:
3663 colorspace = V4L2_COLORSPACE_SMPTE240M;
3664 break;
3665
3666 case GST_VIDEO_COLOR_PRIMARIES_FILM:
3667 case GST_VIDEO_COLOR_PRIMARIES_UNKNOWN:
3668 /* We don't know, we will guess */
3669 break;
3670
3671 default:
3672 GST_WARNING_OBJECT(v4l2object->dbg_obj,
3673 "Unknown colorimetry primaries %d", info.colorimetry.primaries);
3674 break;
3675 }
3676
3677 switch (info.colorimetry.range)
3678 {
3679 case GST_VIDEO_COLOR_RANGE_0_255:
3680 range = V4L2_QUANTIZATION_FULL_RANGE;
3681 break;
3682 case GST_VIDEO_COLOR_RANGE_16_235:
3683 range = V4L2_QUANTIZATION_LIM_RANGE;
3684 break;
3685 case GST_VIDEO_COLOR_RANGE_UNKNOWN:
3686 /* We let the driver pick a default one */
3687 break;
3688 default:
3689 GST_WARNING_OBJECT(v4l2object->dbg_obj,
3690 "Unknown colorimetry range %d", info.colorimetry.range);
3691 break;
3692 }
3693
3694 switch (info.colorimetry.matrix)
3695 {
3696 case GST_VIDEO_COLOR_MATRIX_RGB:
3697 /* Unspecified, leave to default */
3698 break;
3699 /* FCC is about the same as BT601 with less digit */
3700 case GST_VIDEO_COLOR_MATRIX_FCC:
3701 case GST_VIDEO_COLOR_MATRIX_BT601:
3702 matrix = V4L2_YCBCR_ENC_601;
3703 break;
3704 case GST_VIDEO_COLOR_MATRIX_BT709:
3705 matrix = V4L2_YCBCR_ENC_709;
3706 break;
3707 case GST_VIDEO_COLOR_MATRIX_SMPTE240M:
3708 matrix = V4L2_YCBCR_ENC_SMPTE240M;
3709 break;
3710 case GST_VIDEO_COLOR_MATRIX_BT2020:
3711 matrix = V4L2_YCBCR_ENC_BT2020;
3712 break;
3713 case GST_VIDEO_COLOR_MATRIX_UNKNOWN:
3714 /* We let the driver pick a default one */
3715 break;
3716 default:
3717 GST_WARNING_OBJECT(v4l2object->dbg_obj,
3718 "Unknown colorimetry matrix %d", info.colorimetry.matrix);
3719 break;
3720 }
3721
3722 switch (info.colorimetry.transfer)
3723 {
3724 case GST_VIDEO_TRANSFER_GAMMA18:
3725 case GST_VIDEO_TRANSFER_GAMMA20:
3726 case GST_VIDEO_TRANSFER_GAMMA22:
3727 case GST_VIDEO_TRANSFER_GAMMA28:
3728 GST_WARNING_OBJECT(v4l2object->dbg_obj,
3729 "GAMMA 18, 20, 22, 28 transfer functions not supported");
3730 /* fallthrough */
3731 case GST_VIDEO_TRANSFER_GAMMA10:
3732 transfer = V4L2_XFER_FUNC_NONE;
3733 break;
3734 case GST_VIDEO_TRANSFER_BT2020_12:
3735 case GST_VIDEO_TRANSFER_BT709:
3736 transfer = V4L2_XFER_FUNC_709;
3737 break;
3738 case GST_VIDEO_TRANSFER_SMPTE240M:
3739 transfer = V4L2_XFER_FUNC_SMPTE240M;
3740 break;
3741 case GST_VIDEO_TRANSFER_SRGB:
3742 transfer = V4L2_XFER_FUNC_SRGB;
3743 break;
3744 case GST_VIDEO_TRANSFER_LOG100:
3745 case GST_VIDEO_TRANSFER_LOG316:
3746 GST_WARNING_OBJECT(v4l2object->dbg_obj,
3747 "LOG 100, 316 transfer functions not supported");
3748 /* FIXME No known sensible default, maybe AdobeRGB ? */
3749 break;
3750 case GST_VIDEO_TRANSFER_UNKNOWN:
3751 /* We let the driver pick a default one */
3752 break;
3753 default:
3754 GST_WARNING_OBJECT(v4l2object->dbg_obj,
3755 "Unknown colorimetry tranfer %d", info.colorimetry.transfer);
3756 break;
3757 }
3758
3759 if (colorspace == 0)
3760 {
3761 /* Try to guess colorspace according to pixelformat and size */
3762 if (GST_VIDEO_INFO_IS_YUV(&info))
3763 {
3764 if (range == V4L2_QUANTIZATION_FULL_RANGE && matrix == V4L2_YCBCR_ENC_601 && transfer == 0)
3765 {
3766 /* Full range BT.601 YCbCr encoding with unknown primaries and transfer
3767 * function most likely is JPEG */
3768 colorspace = V4L2_COLORSPACE_JPEG;
3769 transfer = V4L2_XFER_FUNC_SRGB;
3770 }
3771 else
3772 {
3773 /* SD streams likely use SMPTE170M and HD streams REC709 */
3774 if (width <= 720 && height <= 576)
3775 colorspace = V4L2_COLORSPACE_SMPTE170M;
3776 else
3777 colorspace = V4L2_COLORSPACE_REC709;
3778 }
3779 }
3780 else if (GST_VIDEO_INFO_IS_RGB(&info))
3781 {
3782 colorspace = V4L2_COLORSPACE_SRGB;
3783 transfer = V4L2_XFER_FUNC_NONE;
3784 }
3785 }
3786
3787 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "Desired format %dx%d, format "
3788 "%" GST_FOURCC_FORMAT " stride: %d",
3789 width, height,
3790 GST_FOURCC_ARGS(pixelformat), GST_VIDEO_INFO_PLANE_STRIDE(&info, 0));
3791
3792 memset(&format, 0x00, sizeof(struct v4l2_format));
3793 format.type = v4l2object->type;
3794
3795 if (is_mplane)
3796 {
3797 format.type = v4l2object->type;
3798 format.fmt.pix_mp.pixelformat = pixelformat;
3799 format.fmt.pix_mp.width = width;
3800 format.fmt.pix_mp.height = height;
3801 format.fmt.pix_mp.field = field;
3802 format.fmt.pix_mp.num_planes = n_v4l_planes;
3803
3804 /* try to ask our prefered stride but it's not a failure if not
3805 * accepted */
3806 for (i = 0; i < n_v4l_planes; i++)
3807 {
3808 gint stride = GST_VIDEO_INFO_PLANE_STRIDE(&info, i);
3809
3810 if (GST_VIDEO_FORMAT_INFO_IS_TILED(info.finfo))
3811 stride = GST_VIDEO_TILE_X_TILES(stride) << GST_VIDEO_FORMAT_INFO_TILE_WS(info.finfo);
3812
3813 format.fmt.pix_mp.plane_fmt[i].bytesperline = stride;
3814 }
3815
3816 if (GST_VIDEO_INFO_FORMAT(&info) == GST_VIDEO_FORMAT_ENCODED)
3817 {
3818 if (v4l2object->req_mode == GST_V4L2_IO_DMABUF_IMPORT)
3819 format.fmt.pix_mp.plane_fmt[0].sizeimage = 1;
3820 else
3821 format.fmt.pix_mp.plane_fmt[0].sizeimage = ENCODED_BUFFER_SIZE;
3822 }
3823 }
3824 else
3825 {
3826 gint stride = GST_VIDEO_INFO_PLANE_STRIDE(&info, 0);
3827
3828 format.type = v4l2object->type;
3829
3830 format.fmt.pix.width = width;
3831 format.fmt.pix.height = height;
3832 format.fmt.pix.pixelformat = pixelformat;
3833 format.fmt.pix.field = field;
3834
3835 if (GST_VIDEO_FORMAT_INFO_IS_TILED(info.finfo))
3836 stride = GST_VIDEO_TILE_X_TILES(stride) << GST_VIDEO_FORMAT_INFO_TILE_WS(info.finfo);
3837
3838 /* try to ask our prefered stride */
3839 format.fmt.pix.bytesperline = stride;
3840
3841 if (GST_VIDEO_INFO_FORMAT(&info) == GST_VIDEO_FORMAT_ENCODED)
3842 {
3843 if (v4l2object->req_mode == GST_V4L2_IO_DMABUF_IMPORT)
3844 format.fmt.pix_mp.plane_fmt[0].sizeimage = 1;
3845 else
3846 format.fmt.pix_mp.plane_fmt[0].sizeimage = ENCODED_BUFFER_SIZE;
3847 }
3848 }
3849
3850 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "Desired format is %dx%d, format "
3851 "%" GST_FOURCC_FORMAT ", nb planes %d",
3852 format.fmt.pix.width,
3853 format.fmt.pix_mp.height,
3854 GST_FOURCC_ARGS(format.fmt.pix.pixelformat),
3855 is_mplane ? format.fmt.pix_mp.num_planes : 1);
3856
3857#ifndef GST_DISABLE_GST_DEBUG
3858 if (is_mplane)
3859 {
3860 for (i = 0; i < format.fmt.pix_mp.num_planes; i++)
3861 GST_DEBUG_OBJECT(v4l2object->dbg_obj, " stride %d",
3862 format.fmt.pix_mp.plane_fmt[i].bytesperline);
3863 }
3864 else
3865 {
3866 GST_DEBUG_OBJECT(v4l2object->dbg_obj, " stride %d",
3867 format.fmt.pix.bytesperline);
3868 }
3869#endif
3870
3871 if (is_mplane)
3872 {
3873 format.fmt.pix_mp.colorspace = colorspace;
3874 format.fmt.pix_mp.quantization = range;
3875 format.fmt.pix_mp.ycbcr_enc = matrix;
3876 format.fmt.pix_mp.xfer_func = transfer;
3877 }
3878 else
3879 {
3880 format.fmt.pix.priv = V4L2_PIX_FMT_PRIV_MAGIC;
3881 format.fmt.pix.colorspace = colorspace;
3882 format.fmt.pix.quantization = range;
3883 format.fmt.pix.ycbcr_enc = matrix;
3884 format.fmt.pix.xfer_func = transfer;
3885 }
3886
3887 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "Desired colorspace is %d:%d:%d:%d",
3888 colorspace, range, matrix, transfer);
3889
3890 if (try_only)
3891 {
3892 if (v4l2object->ioctl(fd, VIDIOC_TRY_FMT, &format) < 0)
3893 goto try_fmt_failed;
3894 }
3895 else
3896 {
3897 if (v4l2object->ioctl(fd, VIDIOC_S_FMT, &format) < 0)
3898 goto set_fmt_failed;
3899 }
3900
3901 if (is_mplane)
3902 {
3903 colorspace = format.fmt.pix_mp.colorspace;
3904 range = format.fmt.pix_mp.quantization;
3905 matrix = format.fmt.pix_mp.ycbcr_enc;
3906 transfer = format.fmt.pix_mp.xfer_func;
3907 }
3908 else
3909 {
3910 colorspace = format.fmt.pix.colorspace;
3911 range = format.fmt.pix.quantization;
3912 matrix = format.fmt.pix.ycbcr_enc;
3913 transfer = format.fmt.pix.xfer_func;
3914 }
3915
3916 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "Got format of %dx%d, format "
3917 "%" GST_FOURCC_FORMAT ", nb planes %d, colorspace %d:%d:%d:%d",
3918 format.fmt.pix.width, format.fmt.pix_mp.height,
3919 GST_FOURCC_ARGS(format.fmt.pix.pixelformat),
3920 is_mplane ? format.fmt.pix_mp.num_planes : 1,
3921 colorspace, range, matrix, transfer);
3922
3923#ifndef GST_DISABLE_GST_DEBUG
3924 if (is_mplane)
3925 {
3926 for (i = 0; i < format.fmt.pix_mp.num_planes; i++)
3927 GST_DEBUG_OBJECT(v4l2object->dbg_obj, " stride %d, sizeimage %d",
3928 format.fmt.pix_mp.plane_fmt[i].bytesperline,
3929 format.fmt.pix_mp.plane_fmt[i].sizeimage);
3930 }
3931 else
3932 {
3933 GST_DEBUG_OBJECT(v4l2object->dbg_obj, " stride %d, sizeimage %d",
3934 format.fmt.pix.bytesperline, format.fmt.pix.sizeimage);
3935 }
3936#endif
3937
3938 if (format.fmt.pix.pixelformat != pixelformat)
3939 goto invalid_pixelformat;
3940
3941 /* Only negotiate size with raw data.
3942 * For some codecs the dimensions are *not* in the bitstream, IIRC VC1
3943 * in ASF mode for example, there is also not reason for a driver to
3944 * change the size. */
3945 if (info.finfo->format != GST_VIDEO_FORMAT_ENCODED)
3946 {
3947 /* We can crop larger images */
3948 if (format.fmt.pix.width < width || format.fmt.pix.height < height)
3949 goto invalid_dimensions;
3950
3951 /* Note, this will be adjusted if upstream has non-centered cropping. */
3952 align.padding_top = 0;
3953 align.padding_bottom = format.fmt.pix.height - height;
3954 align.padding_left = 0;
3955 align.padding_right = format.fmt.pix.width - width;
3956 }
3957
3958 if (is_mplane && format.fmt.pix_mp.num_planes != n_v4l_planes)
3959 goto invalid_planes;
3960
3961 /* used to check colorimetry and interlace mode fields presence */
3962 s = gst_caps_get_structure(caps, 0);
3963
3964 if (!gst_aml_v4l2_object_get_interlace_mode(format.fmt.pix.field,
3965 &info.interlace_mode))
3966 goto invalid_field;
3967 if (gst_structure_has_field(s, "interlace-mode"))
3968 {
3969 if (format.fmt.pix.field != field)
3970 goto invalid_field;
3971 }
3972
3973 if (gst_aml_v4l2_object_get_colorspace(&format, &info.colorimetry))
3974 {
3975 if (gst_structure_has_field(s, "colorimetry"))
3976 {
xuesong.jiange1a19662022-06-21 20:30:22 +08003977 if (!gst_aml_v4l2_video_colorimetry_matches(&info.colorimetry, gst_structure_get_string(s, "colorimetry")))
3978 {
3979 // goto invalid_colorimetry;
3980 }
xuesong.jiangae1548e2022-05-06 16:38:46 +08003981 }
3982 }
3983 else
3984 {
3985 /* The driver (or libv4l2) is miss-behaving, just ignore colorimetry from
3986 * the TRY_FMT */
3987 disable_colorimetry = TRUE;
3988 if (gst_structure_has_field(s, "colorimetry"))
3989 gst_structure_remove_field(s, "colorimetry");
3990 }
3991
3992 /* In case we have skipped the try_fmt probes, we'll need to set the
3993 * colorimetry and interlace-mode back into the caps. */
3994 if (v4l2object->skip_try_fmt_probes)
3995 {
3996 if (!disable_colorimetry && !gst_structure_has_field(s, "colorimetry"))
3997 {
3998 gchar *str = gst_video_colorimetry_to_string(&info.colorimetry);
3999 gst_structure_set(s, "colorimetry", G_TYPE_STRING, str, NULL);
4000 g_free(str);
4001 }
4002
4003 if (!gst_structure_has_field(s, "interlace-mode"))
4004 gst_structure_set(s, "interlace-mode", G_TYPE_STRING,
4005 gst_video_interlace_mode_to_string(info.interlace_mode), NULL);
4006 }
4007
4008 if (try_only) /* good enough for trying only */
4009 return TRUE;
4010
4011 if (GST_VIDEO_INFO_HAS_ALPHA(&info))
4012 {
4013 struct v4l2_control ctl = {
4014 0,
4015 };
4016 ctl.id = V4L2_CID_ALPHA_COMPONENT;
4017 ctl.value = 0xff;
4018
4019 if (v4l2object->ioctl(fd, VIDIOC_S_CTRL, &ctl) < 0)
4020 GST_WARNING_OBJECT(v4l2object->dbg_obj,
4021 "Failed to set alpha component value");
4022 }
4023
4024 /* Is there a reason we require the caller to always specify a framerate? */
4025 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "Desired framerate: %u/%u", fps_n,
4026 fps_d);
4027
4028 if (v4l2object->ioctl(fd, VIDIOC_G_PARM, &streamparm) < 0)
4029 goto get_parm_failed;
4030
4031 if (v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE || v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE)
4032 {
4033 GST_VIDEO_INFO_FPS_N(&info) =
4034 streamparm.parm.capture.timeperframe.denominator;
4035 GST_VIDEO_INFO_FPS_D(&info) =
4036 streamparm.parm.capture.timeperframe.numerator;
4037
4038 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "Got capture framerate: %u/%u",
4039 streamparm.parm.capture.timeperframe.denominator,
4040 streamparm.parm.capture.timeperframe.numerator);
4041
4042 /* We used to skip frame rate setup if the camera was already setup
4043 * with the requested frame rate. This breaks some cameras though,
4044 * causing them to not output data (several models of Thinkpad cameras
4045 * have this problem at least).
4046 * So, don't skip. */
4047 GST_LOG_OBJECT(v4l2object->dbg_obj, "Setting capture framerate to %u/%u",
4048 fps_n, fps_d);
4049 /* We want to change the frame rate, so check whether we can. Some cheap USB
4050 * cameras don't have the capability */
4051 if ((streamparm.parm.capture.capability & V4L2_CAP_TIMEPERFRAME) == 0)
4052 {
4053 GST_DEBUG_OBJECT(v4l2object->dbg_obj,
4054 "Not setting capture framerate (not supported)");
4055 goto done;
4056 }
4057
4058 /* Note: V4L2 wants the frame interval, we have the frame rate */
4059 streamparm.parm.capture.timeperframe.numerator = fps_d;
4060 streamparm.parm.capture.timeperframe.denominator = fps_n;
4061
4062 /* some cheap USB cam's won't accept any change */
4063 if (v4l2object->ioctl(fd, VIDIOC_S_PARM, &streamparm) < 0)
4064 goto set_parm_failed;
4065
4066 if (streamparm.parm.capture.timeperframe.numerator > 0 &&
4067 streamparm.parm.capture.timeperframe.denominator > 0)
4068 {
4069 /* get new values */
4070 fps_d = streamparm.parm.capture.timeperframe.numerator;
4071 fps_n = streamparm.parm.capture.timeperframe.denominator;
4072
4073 GST_INFO_OBJECT(v4l2object->dbg_obj, "Set capture framerate to %u/%u",
4074 fps_n, fps_d);
4075 }
4076 else
4077 {
4078 /* fix v4l2 capture driver to provide framerate values */
4079 GST_WARNING_OBJECT(v4l2object->dbg_obj,
4080 "Reuse caps framerate %u/%u - fix v4l2 capture driver", fps_n, fps_d);
4081 }
4082
4083 GST_VIDEO_INFO_FPS_N(&info) = fps_n;
4084 GST_VIDEO_INFO_FPS_D(&info) = fps_d;
4085 }
4086 else if (v4l2object->type == V4L2_BUF_TYPE_VIDEO_OUTPUT || v4l2object->type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE)
4087 {
4088 GST_VIDEO_INFO_FPS_N(&info) =
4089 streamparm.parm.output.timeperframe.denominator;
4090 GST_VIDEO_INFO_FPS_D(&info) =
4091 streamparm.parm.output.timeperframe.numerator;
4092
4093 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "Got output framerate: %u/%u",
4094 streamparm.parm.output.timeperframe.denominator,
4095 streamparm.parm.output.timeperframe.numerator);
4096
4097 GST_LOG_OBJECT(v4l2object->dbg_obj, "Setting output framerate to %u/%u",
4098 fps_n, fps_d);
4099 if ((streamparm.parm.output.capability & V4L2_CAP_TIMEPERFRAME) == 0)
4100 {
4101 GST_DEBUG_OBJECT(v4l2object->dbg_obj,
4102 "Not setting output framerate (not supported)");
4103 goto done;
4104 }
4105
4106 /* Note: V4L2 wants the frame interval, we have the frame rate */
4107 streamparm.parm.output.timeperframe.numerator = fps_d;
4108 streamparm.parm.output.timeperframe.denominator = fps_n;
4109
4110 if (v4l2object->ioctl(fd, VIDIOC_S_PARM, &streamparm) < 0)
4111 goto set_parm_failed;
4112
4113 if (streamparm.parm.output.timeperframe.numerator > 0 &&
4114 streamparm.parm.output.timeperframe.denominator > 0)
4115 {
4116 /* get new values */
4117 fps_d = streamparm.parm.output.timeperframe.numerator;
4118 fps_n = streamparm.parm.output.timeperframe.denominator;
4119
4120 GST_INFO_OBJECT(v4l2object->dbg_obj, "Set output framerate to %u/%u",
4121 fps_n, fps_d);
4122 }
4123 else
4124 {
4125 /* fix v4l2 output driver to provide framerate values */
4126 GST_WARNING_OBJECT(v4l2object->dbg_obj,
4127 "Reuse caps framerate %u/%u - fix v4l2 output driver", fps_n, fps_d);
4128 }
4129
4130 GST_VIDEO_INFO_FPS_N(&info) = fps_n;
4131 GST_VIDEO_INFO_FPS_D(&info) = fps_d;
4132 }
4133
4134done:
4135 /* add boolean return, so we can fail on drivers bugs */
4136 gst_aml_v4l2_object_save_format(v4l2object, fmtdesc, &format, &info, &align);
4137
4138 /* now configure the pool */
4139 if (!gst_aml_v4l2_object_setup_pool(v4l2object, caps))
4140 goto pool_failed;
4141
4142 return TRUE;
4143
4144 /* ERRORS */
4145invalid_caps:
4146{
4147 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "can't parse caps %" GST_PTR_FORMAT,
4148 caps);
4149
4150 GST_AML_V4L2_ERROR(error, RESOURCE, SETTINGS,
4151 (_("Invalid caps")), ("Can't parse caps %" GST_PTR_FORMAT, caps));
4152 return FALSE;
4153}
4154try_fmt_failed:
4155{
4156 if (errno == EINVAL)
4157 {
4158 GST_AML_V4L2_ERROR(error, RESOURCE, SETTINGS,
4159 (_("Device '%s' has no supported format"), v4l2object->videodev),
4160 ("Call to TRY_FMT failed for %" GST_FOURCC_FORMAT " @ %dx%d: %s",
4161 GST_FOURCC_ARGS(pixelformat), width, height,
4162 g_strerror(errno)));
4163 }
4164 else
4165 {
4166 GST_AML_V4L2_ERROR(error, RESOURCE, FAILED,
4167 (_("Device '%s' failed during initialization"),
4168 v4l2object->videodev),
4169 ("Call to TRY_FMT failed for %" GST_FOURCC_FORMAT " @ %dx%d: %s",
4170 GST_FOURCC_ARGS(pixelformat), width, height,
4171 g_strerror(errno)));
4172 }
4173 return FALSE;
4174}
4175set_fmt_failed:
4176{
4177 if (errno == EBUSY)
4178 {
4179 GST_AML_V4L2_ERROR(error, RESOURCE, BUSY,
4180 (_("Device '%s' is busy"), v4l2object->videodev),
4181 ("Call to S_FMT failed for %" GST_FOURCC_FORMAT " @ %dx%d: %s",
4182 GST_FOURCC_ARGS(pixelformat), width, height,
4183 g_strerror(errno)));
4184 }
4185 else if (errno == EINVAL)
4186 {
4187 GST_AML_V4L2_ERROR(error, RESOURCE, SETTINGS,
4188 (_("Device '%s' has no supported format"), v4l2object->videodev),
4189 ("Call to S_FMT failed for %" GST_FOURCC_FORMAT " @ %dx%d: %s",
4190 GST_FOURCC_ARGS(pixelformat), width, height,
4191 g_strerror(errno)));
4192 }
4193 else
4194 {
4195 GST_AML_V4L2_ERROR(error, RESOURCE, FAILED,
4196 (_("Device '%s' failed during initialization"),
4197 v4l2object->videodev),
4198 ("Call to S_FMT failed for %" GST_FOURCC_FORMAT " @ %dx%d: %s",
4199 GST_FOURCC_ARGS(pixelformat), width, height,
4200 g_strerror(errno)));
4201 }
4202 return FALSE;
4203}
4204invalid_dimensions:
4205{
4206 GST_AML_V4L2_ERROR(error, RESOURCE, SETTINGS,
4207 (_("Device '%s' cannot capture at %dx%d"),
4208 v4l2object->videodev, width, height),
4209 ("Tried to capture at %dx%d, but device returned size %dx%d",
4210 width, height, format.fmt.pix.width, format.fmt.pix.height));
4211 return FALSE;
4212}
4213invalid_pixelformat:
4214{
4215 GST_AML_V4L2_ERROR(error, RESOURCE, SETTINGS,
4216 (_("Device '%s' cannot capture in the specified format"),
4217 v4l2object->videodev),
4218 ("Tried to capture in %" GST_FOURCC_FORMAT
4219 ", but device returned format"
4220 " %" GST_FOURCC_FORMAT,
4221 GST_FOURCC_ARGS(pixelformat),
4222 GST_FOURCC_ARGS(format.fmt.pix.pixelformat)));
4223 return FALSE;
4224}
4225invalid_planes:
4226{
4227 GST_AML_V4L2_ERROR(error, RESOURCE, SETTINGS,
4228 (_("Device '%s' does support non-contiguous planes"),
4229 v4l2object->videodev),
4230 ("Device wants %d planes", format.fmt.pix_mp.num_planes));
4231 return FALSE;
4232}
4233invalid_field:
4234{
4235 enum v4l2_field wanted_field;
4236
4237 if (is_mplane)
4238 wanted_field = format.fmt.pix_mp.field;
4239 else
4240 wanted_field = format.fmt.pix.field;
4241
4242 GST_AML_V4L2_ERROR(error, RESOURCE, SETTINGS,
4243 (_("Device '%s' does not support %s interlacing"),
4244 v4l2object->videodev,
4245 field == V4L2_FIELD_NONE ? "progressive" : "interleaved"),
4246 ("Device wants %s interlacing",
4247 wanted_field == V4L2_FIELD_NONE ? "progressive" : "interleaved"));
4248 return FALSE;
4249}
4250invalid_colorimetry:
4251{
4252 gchar *wanted_colorimetry;
4253
4254 wanted_colorimetry = gst_video_colorimetry_to_string(&info.colorimetry);
4255
4256 GST_AML_V4L2_ERROR(error, RESOURCE, SETTINGS,
4257 (_("Device '%s' does not support %s colorimetry"),
4258 v4l2object->videodev, gst_structure_get_string(s, "colorimetry")),
4259 ("Device wants %s colorimetry", wanted_colorimetry));
4260
4261 g_free(wanted_colorimetry);
4262 return FALSE;
4263}
4264get_parm_failed:
4265{
4266 /* it's possible that this call is not supported */
4267 if (errno != EINVAL && errno != ENOTTY)
4268 {
4269 GST_AML_V4L2_ERROR(error, RESOURCE, SETTINGS,
4270 (_("Could not get parameters on device '%s'"),
4271 v4l2object->videodev),
4272 GST_ERROR_SYSTEM);
4273 }
4274 goto done;
4275}
4276set_parm_failed:
4277{
4278 GST_AML_V4L2_ERROR(error, RESOURCE, SETTINGS,
4279 (_("Video device did not accept new frame rate setting.")),
4280 GST_ERROR_SYSTEM);
4281 goto done;
4282}
4283pool_failed:
4284{
4285 /* setup_pool already send the error */
4286 return FALSE;
4287}
4288}
4289
4290gboolean
4291gst_aml_v4l2_object_set_format(GstAmlV4l2Object *v4l2object, GstCaps *caps,
4292 GstAmlV4l2Error *error)
4293{
4294 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "Setting format to %" GST_PTR_FORMAT,
4295 caps);
4296 return gst_aml_v4l2_object_set_format_full(v4l2object, caps, FALSE, error);
4297}
4298
4299gboolean
4300gst_aml_v4l2_object_try_format(GstAmlV4l2Object *v4l2object, GstCaps *caps,
4301 GstAmlV4l2Error *error)
4302{
4303 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "Trying format %" GST_PTR_FORMAT,
4304 caps);
4305 return gst_aml_v4l2_object_set_format_full(v4l2object, caps, TRUE, error);
4306}
4307
4308GstFlowReturn
4309gst_aml_v4l2_object_poll(GstAmlV4l2Object *v4l2object)
4310{
4311 gint ret;
4312
4313 if (!v4l2object->can_poll_device)
4314 goto done;
4315
4316 GST_LOG_OBJECT(v4l2object, "polling device");
4317
4318again:
4319 ret = gst_poll_wait(v4l2object->poll, GST_CLOCK_TIME_NONE);
4320 if (G_UNLIKELY(ret < 0))
4321 {
4322 switch (errno)
4323 {
4324 case EBUSY:
4325 goto stopped;
4326 case EAGAIN:
4327 case EINTR:
4328 goto again;
4329 case ENXIO:
4330 GST_WARNING_OBJECT(v4l2object,
4331 "v4l2 device doesn't support polling. Disabling"
4332 " using libv4l2 in this case may cause deadlocks");
4333 v4l2object->can_poll_device = FALSE;
4334 goto done;
4335 default:
4336 goto select_error;
4337 }
4338 }
4339
4340done:
4341 return GST_FLOW_OK;
4342
4343 /* ERRORS */
4344stopped:
4345{
4346 GST_DEBUG_OBJECT(v4l2object, "stop called");
4347 return GST_FLOW_FLUSHING;
4348}
4349select_error:
4350{
4351 GST_ELEMENT_ERROR(v4l2object->element, RESOURCE, READ, (NULL),
4352 ("poll error %d: %s (%d)", ret, g_strerror(errno), errno));
4353 return GST_FLOW_ERROR;
4354}
4355}
4356
4357GstFlowReturn
4358gst_aml_v4l2_object_dqevent(GstAmlV4l2Object *v4l2object)
4359{
4360 GstFlowReturn res;
4361 struct v4l2_event evt;
4362
4363 if ((res = gst_aml_v4l2_object_poll(v4l2object)) != GST_FLOW_OK)
4364 goto poll_failed;
4365
4366 memset(&evt, 0x00, sizeof(struct v4l2_event));
4367 if (v4l2object->ioctl(v4l2object->video_fd, VIDIOC_DQEVENT, &evt) < 0)
4368 goto dqevent_failed;
4369
4370 switch (evt.type)
4371 {
4372 case V4L2_EVENT_SOURCE_CHANGE:
4373 return GST_AML_V4L2_FLOW_SOURCE_CHANGE;
4374 break;
4375 case V4L2_EVENT_EOS:
4376 return GST_AML_V4L2_FLOW_LAST_BUFFER;
4377 break;
4378 default:
4379 break;
4380 }
4381
4382 return GST_FLOW_OK;
4383
4384 /* ERRORS */
4385poll_failed:
4386{
4387 GST_DEBUG_OBJECT(v4l2object, "poll error %s", gst_flow_get_name(res));
4388 return res;
4389}
4390dqevent_failed:
4391{
4392 return GST_FLOW_ERROR;
4393}
4394}
4395
4396/**
4397 * gst_aml_v4l2_object_acquire_format:
4398 * @v4l2object the object
4399 * @info a GstVideoInfo to be filled
4400 *
4401 * Acquire the driver choosen format. This is useful in decoder or encoder elements where
4402 * the output format is choosen by the HW.
4403 *
4404 * Returns: %TRUE on success, %FALSE on failure.
4405 */
4406gboolean
4407gst_aml_v4l2_object_acquire_format(GstAmlV4l2Object *v4l2object, GstVideoInfo *info)
4408{
4409 struct v4l2_fmtdesc *fmtdesc;
4410 struct v4l2_format fmt;
4411 struct v4l2_crop crop;
4412 struct v4l2_selection sel;
4413 struct v4l2_rect *r = NULL;
4414 GstVideoFormat format;
4415 guint width, height;
4416 GstVideoAlignment align;
4417
4418 gst_video_info_init(info);
4419 gst_video_alignment_reset(&align);
4420
4421 memset(&fmt, 0x00, sizeof(struct v4l2_format));
4422 fmt.type = v4l2object->type;
4423 if (v4l2object->ioctl(v4l2object->video_fd, VIDIOC_G_FMT, &fmt) < 0)
4424 goto get_fmt_failed;
4425
4426 fmtdesc = gst_aml_v4l2_object_get_format_from_fourcc(v4l2object,
4427 fmt.fmt.pix.pixelformat);
4428 if (fmtdesc == NULL)
4429 goto unsupported_format;
4430
4431 /* No need to care about mplane, the four first params are the same */
4432 format = gst_aml_v4l2_object_v4l2fourcc_to_video_format(fmt.fmt.pix.pixelformat);
4433
4434 /* fails if we do no translate the fmt.pix.pixelformat to GstVideoFormat */
4435 if (format == GST_VIDEO_FORMAT_UNKNOWN)
4436 goto unsupported_format;
4437
4438 if (fmt.fmt.pix.width == 0 || fmt.fmt.pix.height == 0)
4439 goto invalid_dimensions;
4440
4441 width = fmt.fmt.pix.width;
4442 height = fmt.fmt.pix.height;
4443
4444 /* Use the default compose rectangle */
4445 memset(&sel, 0, sizeof(struct v4l2_selection));
4446 sel.type = v4l2object->type;
4447 sel.target = V4L2_SEL_TGT_COMPOSE_DEFAULT;
4448 if (v4l2object->ioctl(v4l2object->video_fd, VIDIOC_G_SELECTION, &sel) >= 0)
4449 {
4450 r = &sel.r;
4451 }
4452 else
4453 {
4454 /* For ancient kernels, fall back to G_CROP */
4455 memset(&crop, 0, sizeof(struct v4l2_crop));
4456 crop.type = v4l2object->type;
4457 if (v4l2object->ioctl(v4l2object->video_fd, VIDIOC_G_CROP, &crop) >= 0)
4458 r = &crop.c;
4459 }
4460 if (r)
4461 {
4462 align.padding_left = r->left;
4463 align.padding_top = r->top;
4464 align.padding_right = width - r->width - r->left;
4465 align.padding_bottom = height - r->height - r->top;
4466 width = r->width;
4467 height = r->height;
4468 }
4469
4470 gst_video_info_set_format(info, format, width, height);
4471
4472 switch (fmt.fmt.pix.field)
4473 {
4474 case V4L2_FIELD_ANY:
4475 case V4L2_FIELD_NONE:
4476 info->interlace_mode = GST_VIDEO_INTERLACE_MODE_PROGRESSIVE;
4477 break;
4478 case V4L2_FIELD_INTERLACED:
4479 case V4L2_FIELD_INTERLACED_TB:
4480 case V4L2_FIELD_INTERLACED_BT:
4481 info->interlace_mode = GST_VIDEO_INTERLACE_MODE_INTERLEAVED;
4482 break;
4483 default:
4484 goto unsupported_field;
4485 }
4486
4487 gst_aml_v4l2_object_get_colorspace(&fmt, &info->colorimetry);
4488
4489 gst_aml_v4l2_object_save_format(v4l2object, fmtdesc, &fmt, info, &align);
4490
4491 /* Shall we setup the pool ? */
4492
4493 return TRUE;
4494
4495get_fmt_failed:
4496{
4497 GST_ELEMENT_WARNING(v4l2object->element, RESOURCE, SETTINGS,
4498 (_("Video device did not provide output format.")), GST_ERROR_SYSTEM);
4499 return FALSE;
4500}
4501invalid_dimensions:
4502{
4503 GST_ELEMENT_WARNING(v4l2object->element, RESOURCE, SETTINGS,
4504 (_("Video device returned invalid dimensions.")),
4505 ("Expected non 0 dimensions, got %dx%d", fmt.fmt.pix.width,
4506 fmt.fmt.pix.height));
4507 return FALSE;
4508}
4509unsupported_field:
4510{
4511 GST_ELEMENT_ERROR(v4l2object->element, RESOURCE, SETTINGS,
4512 (_("Video device uses an unsupported interlacing method.")),
4513 ("V4L2 field type %d not supported", fmt.fmt.pix.field));
4514 return FALSE;
4515}
4516unsupported_format:
4517{
4518 GST_ELEMENT_ERROR(v4l2object->element, RESOURCE, SETTINGS,
4519 (_("Video device uses an unsupported pixel format.")),
4520 ("V4L2 format %" GST_FOURCC_FORMAT " not supported",
4521 GST_FOURCC_ARGS(fmt.fmt.pix.pixelformat)));
4522 return FALSE;
4523}
4524}
4525
4526gboolean
4527gst_aml_v4l2_object_set_crop(GstAmlV4l2Object *obj)
4528{
4529 struct v4l2_selection sel = {0};
4530 struct v4l2_crop crop = {0};
4531
4532 sel.type = obj->type;
4533 sel.target = V4L2_SEL_TGT_CROP;
4534 sel.flags = 0;
4535 sel.r.left = obj->align.padding_left;
4536 sel.r.top = obj->align.padding_top;
4537 sel.r.width = obj->info.width;
4538 sel.r.height = obj->info.height;
4539
4540 crop.type = obj->type;
4541 crop.c = sel.r;
4542
4543 if (obj->align.padding_left + obj->align.padding_top +
4544 obj->align.padding_right + obj->align.padding_bottom ==
4545 0)
4546 {
4547 GST_DEBUG_OBJECT(obj->dbg_obj, "no cropping needed");
4548 return TRUE;
4549 }
4550
4551 GST_DEBUG_OBJECT(obj->dbg_obj,
4552 "Desired cropping left %u, top %u, size %ux%u", crop.c.left, crop.c.top,
4553 crop.c.width, crop.c.height);
4554
4555 if (obj->ioctl(obj->video_fd, VIDIOC_S_SELECTION, &sel) < 0)
4556 {
4557 if (errno != ENOTTY)
4558 {
4559 GST_WARNING_OBJECT(obj->dbg_obj,
4560 "Failed to set crop rectangle with VIDIOC_S_SELECTION: %s",
4561 g_strerror(errno));
4562 return FALSE;
4563 }
4564 else
4565 {
4566 if (obj->ioctl(obj->video_fd, VIDIOC_S_CROP, &crop) < 0)
4567 {
4568 GST_WARNING_OBJECT(obj->dbg_obj, "VIDIOC_S_CROP failed");
4569 return FALSE;
4570 }
4571
4572 if (obj->ioctl(obj->video_fd, VIDIOC_G_CROP, &crop) < 0)
4573 {
4574 GST_WARNING_OBJECT(obj->dbg_obj, "VIDIOC_G_CROP failed");
4575 return FALSE;
4576 }
4577
4578 sel.r = crop.c;
4579 }
4580 }
4581
4582 GST_DEBUG_OBJECT(obj->dbg_obj,
4583 "Got cropping left %u, top %u, size %ux%u", crop.c.left, crop.c.top,
4584 crop.c.width, crop.c.height);
4585
4586 return TRUE;
4587}
4588
4589gboolean
4590gst_aml_v4l2_object_caps_equal(GstAmlV4l2Object *v4l2object, GstCaps *caps)
4591{
4592 GstStructure *config;
4593 GstCaps *oldcaps;
4594 gboolean ret;
4595
4596 if (!v4l2object->pool)
4597 return FALSE;
4598
4599 config = gst_buffer_pool_get_config(v4l2object->pool);
4600 gst_buffer_pool_config_get_params(config, &oldcaps, NULL, NULL, NULL);
4601
4602 ret = oldcaps && gst_caps_is_equal(caps, oldcaps);
4603
4604 gst_structure_free(config);
4605
4606 return ret;
4607}
4608
4609gboolean
4610gst_aml_v4l2_object_caps_is_subset(GstAmlV4l2Object *v4l2object, GstCaps *caps)
4611{
4612 GstStructure *config;
4613 GstCaps *oldcaps;
4614 gboolean ret;
4615
4616 if (!v4l2object->pool)
4617 return FALSE;
4618
4619 config = gst_buffer_pool_get_config(v4l2object->pool);
4620 gst_buffer_pool_config_get_params(config, &oldcaps, NULL, NULL, NULL);
4621
4622 ret = oldcaps && gst_caps_is_subset(oldcaps, caps);
4623
4624 gst_structure_free(config);
4625
4626 return ret;
4627}
4628
4629GstCaps *
4630gst_aml_v4l2_object_get_current_caps(GstAmlV4l2Object *v4l2object)
4631{
4632 GstStructure *config;
4633 GstCaps *oldcaps;
4634
4635 if (!v4l2object->pool)
4636 return NULL;
4637
4638 config = gst_buffer_pool_get_config(v4l2object->pool);
4639 gst_buffer_pool_config_get_params(config, &oldcaps, NULL, NULL, NULL);
4640
4641 if (oldcaps)
4642 gst_caps_ref(oldcaps);
4643
4644 gst_structure_free(config);
4645
4646 return oldcaps;
4647}
4648
4649gboolean
4650gst_aml_v4l2_object_unlock(GstAmlV4l2Object *v4l2object)
4651{
4652 gboolean ret = TRUE;
4653
4654 GST_LOG_OBJECT(v4l2object->dbg_obj, "start flushing");
4655
4656 gst_poll_set_flushing(v4l2object->poll, TRUE);
4657
4658 if (v4l2object->pool && gst_buffer_pool_is_active(v4l2object->pool))
4659 gst_buffer_pool_set_flushing(v4l2object->pool, TRUE);
4660
4661 return ret;
4662}
4663
4664gboolean
4665gst_aml_v4l2_object_unlock_stop(GstAmlV4l2Object *v4l2object)
4666{
4667 gboolean ret = TRUE;
4668
4669 GST_LOG_OBJECT(v4l2object->dbg_obj, "stop flushing");
4670
4671 if (v4l2object->pool && gst_buffer_pool_is_active(v4l2object->pool))
4672 gst_buffer_pool_set_flushing(v4l2object->pool, FALSE);
4673
4674 gst_poll_set_flushing(v4l2object->poll, FALSE);
4675
4676 return ret;
4677}
4678
4679gboolean
4680gst_aml_v4l2_object_stop(GstAmlV4l2Object *v4l2object)
4681{
4682 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "stopping");
4683
4684 if (!GST_AML_V4L2_IS_OPEN(v4l2object))
4685 goto done;
4686 if (!GST_AML_V4L2_IS_ACTIVE(v4l2object))
4687 goto done;
4688
4689 if (v4l2object->pool)
4690 {
4691 if (!gst_aml_v4l2_buffer_pool_orphan(&v4l2object->pool))
4692 {
4693 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "deactivating pool");
4694 gst_buffer_pool_set_active(v4l2object->pool, FALSE);
4695 gst_object_unref(v4l2object->pool);
4696 }
4697 v4l2object->pool = NULL;
4698 }
4699
4700 GST_AML_V4L2_SET_INACTIVE(v4l2object);
4701
4702done:
4703 return TRUE;
4704}
4705
4706GstCaps *
4707gst_aml_v4l2_object_probe_caps(GstAmlV4l2Object *v4l2object, GstCaps *filter)
4708{
4709 GstCaps *ret;
4710 GSList *walk;
4711 GSList *formats;
4712
4713 formats = gst_aml_v4l2_object_get_format_list(v4l2object);
4714
4715 ret = gst_caps_new_empty();
4716
4717 if (v4l2object->keep_aspect && !v4l2object->par)
4718 {
4719 struct v4l2_cropcap cropcap;
4720
4721 memset(&cropcap, 0, sizeof(cropcap));
4722
4723 cropcap.type = v4l2object->type;
4724 if (v4l2object->ioctl(v4l2object->video_fd, VIDIOC_CROPCAP, &cropcap) < 0)
4725 {
4726 if (errno != ENOTTY)
4727 GST_WARNING_OBJECT(v4l2object->dbg_obj,
4728 "Failed to probe pixel aspect ratio with VIDIOC_CROPCAP: %s",
4729 g_strerror(errno));
4730 }
4731 else if (cropcap.pixelaspect.numerator && cropcap.pixelaspect.denominator)
4732 {
4733 v4l2object->par = g_new0(GValue, 1);
4734 g_value_init(v4l2object->par, GST_TYPE_FRACTION);
4735 gst_value_set_fraction(v4l2object->par, cropcap.pixelaspect.numerator,
4736 cropcap.pixelaspect.denominator);
4737 }
4738 }
4739
4740 for (walk = formats; walk; walk = walk->next)
4741 {
4742 struct v4l2_fmtdesc *format;
4743 GstStructure *template;
4744 GstCaps *tmp, *tmp2;
4745
4746 format = (struct v4l2_fmtdesc *)walk->data;
4747
4748 template = gst_aml_v4l2_object_v4l2fourcc_to_bare_struct(format->pixelformat);
4749
4750 if (!template)
4751 {
4752 GST_DEBUG_OBJECT(v4l2object->dbg_obj,
4753 "unknown format %" GST_FOURCC_FORMAT,
4754 GST_FOURCC_ARGS(format->pixelformat));
4755 continue;
4756 }
4757
4758 /* If we have a filter, check if we need to probe this format or not */
4759 if (filter)
4760 {
4761 GstCaps *format_caps = gst_caps_new_empty();
4762
4763 gst_caps_append_structure(format_caps, gst_structure_copy(template));
xuesong.jiange1a19662022-06-21 20:30:22 +08004764 GST_INFO_OBJECT(v4l2object->dbg_obj, "format_caps: %" GST_PTR_FORMAT, format_caps);
xuesong.jiangae1548e2022-05-06 16:38:46 +08004765
4766 if (!gst_caps_can_intersect(format_caps, filter))
4767 {
4768 gst_caps_unref(format_caps);
4769 gst_structure_free(template);
4770 continue;
4771 }
4772
4773 gst_caps_unref(format_caps);
4774 }
4775
4776 tmp = gst_aml_v4l2_object_probe_caps_for_format(v4l2object,
4777 format->pixelformat, template);
xuesong.jiange1a19662022-06-21 20:30:22 +08004778 GST_INFO_OBJECT(v4l2object->dbg_obj, "tmp caps: %" GST_PTR_FORMAT, tmp);
xuesong.jiangae1548e2022-05-06 16:38:46 +08004779
4780 if (tmp)
4781 {
4782 tmp2 = gst_caps_copy(tmp);
4783 gst_caps_set_features_simple(tmp2, gst_caps_features_from_string(GST_CAPS_FEATURE_MEMORY_DMABUF));
4784 gst_caps_append(ret, tmp);
4785 gst_caps_append(ret, tmp2);
4786 }
4787
4788 gst_structure_free(template);
4789 }
4790
4791 if (filter)
4792 {
4793 GstCaps *tmp;
4794
4795 tmp = ret;
4796 ret = gst_caps_intersect_full(filter, ret, GST_CAPS_INTERSECT_FIRST);
4797 gst_caps_unref(tmp);
4798 }
4799
4800 GST_INFO_OBJECT(v4l2object->dbg_obj, "probed caps: %" GST_PTR_FORMAT, ret);
4801
4802 return ret;
4803}
4804
4805GstCaps *
4806gst_aml_v4l2_object_get_caps(GstAmlV4l2Object *v4l2object, GstCaps *filter)
4807{
4808 GstCaps *ret;
4809
4810 if (v4l2object->probed_caps == NULL)
4811 v4l2object->probed_caps = gst_aml_v4l2_object_probe_caps(v4l2object, NULL);
4812
4813 if (filter)
4814 {
4815 ret = gst_caps_intersect_full(filter, v4l2object->probed_caps,
4816 GST_CAPS_INTERSECT_FIRST);
4817 }
4818 else
4819 {
4820 ret = gst_caps_ref(v4l2object->probed_caps);
4821 }
4822
4823 return ret;
4824}
4825
4826gboolean
4827gst_aml_v4l2_object_decide_allocation(GstAmlV4l2Object *obj, GstQuery *query)
4828{
4829 GstCaps *caps;
4830 GstBufferPool *pool = NULL, *other_pool = NULL;
4831 GstStructure *config;
4832 guint size, min, max, own_min = 0;
4833 gboolean update;
4834 gboolean has_video_meta;
4835 gboolean can_share_own_pool, pushing_from_our_pool = FALSE;
4836 GstAllocator *allocator = NULL;
4837 GstAllocationParams params = {0};
4838
4839 GST_DEBUG_OBJECT(obj->dbg_obj, "decide allocation");
4840
4841 g_return_val_if_fail(obj->type == V4L2_BUF_TYPE_VIDEO_CAPTURE ||
4842 obj->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE,
4843 FALSE);
4844
4845 gst_query_parse_allocation(query, &caps, NULL);
4846
4847 if (obj->pool == NULL)
4848 {
4849 if (!gst_aml_v4l2_object_setup_pool(obj, caps))
4850 goto pool_failed;
4851 }
4852
4853 if (gst_query_get_n_allocation_params(query) > 0)
4854 gst_query_parse_nth_allocation_param(query, 0, &allocator, &params);
4855
4856 if (gst_query_get_n_allocation_pools(query) > 0)
4857 {
4858 gst_query_parse_nth_allocation_pool(query, 0, &pool, &size, &min, &max);
4859 update = TRUE;
4860 }
4861 else
4862 {
4863 pool = NULL;
4864 min = max = 0;
4865 size = 0;
4866 update = FALSE;
4867 }
4868
4869 GST_DEBUG_OBJECT(obj->dbg_obj, "allocation: size:%u min:%u max:%u pool:%" GST_PTR_FORMAT, size, min, max, pool);
4870
4871 has_video_meta =
4872 gst_query_find_allocation_meta(query, GST_VIDEO_META_API_TYPE, NULL);
4873
4874 can_share_own_pool = (has_video_meta || !obj->need_video_meta);
4875
4876 gst_aml_v4l2_get_driver_min_buffers(obj);
4877 /* We can't share our own pool, if it exceed V4L2 capacity */
4878 if (min + obj->min_buffers + 1 > VIDEO_MAX_FRAME)
4879 can_share_own_pool = FALSE;
4880
4881 /* select a pool */
4882 switch (obj->mode)
4883 {
4884 case GST_V4L2_IO_RW:
4885 if (pool)
4886 {
4887 /* in READ/WRITE mode, prefer a downstream pool because our own pool
4888 * doesn't help much, we have to write to it as well */
4889 GST_DEBUG_OBJECT(obj->dbg_obj,
4890 "read/write mode: using downstream pool");
4891 /* use the bigest size, when we use our own pool we can't really do any
4892 * other size than what the hardware gives us but for downstream pools
4893 * we can try */
4894 size = MAX(size, obj->info.size);
4895 }
4896 else if (can_share_own_pool)
4897 {
4898 /* no downstream pool, use our own then */
4899 GST_DEBUG_OBJECT(obj->dbg_obj,
4900 "read/write mode: no downstream pool, using our own");
4901 pool = gst_object_ref(obj->pool);
4902 size = obj->info.size;
4903 pushing_from_our_pool = TRUE;
4904 }
4905 break;
4906
4907 case GST_V4L2_IO_USERPTR:
4908 case GST_V4L2_IO_DMABUF_IMPORT:
4909 /* in importing mode, prefer our own pool, and pass the other pool to
4910 * our own, so it can serve itself */
4911 if (pool == NULL)
4912 goto no_downstream_pool;
4913 gst_aml_v4l2_buffer_pool_set_other_pool(GST_AML_V4L2_BUFFER_POOL(obj->pool),
4914 pool);
4915 other_pool = pool;
4916 gst_object_unref(pool);
4917 pool = gst_object_ref(obj->pool);
4918 size = obj->info.size;
4919 break;
4920
4921 case GST_V4L2_IO_MMAP:
4922 case GST_V4L2_IO_DMABUF:
4923 /* in streaming mode, prefer our own pool */
4924 /* Check if we can use it ... */
4925 if (can_share_own_pool)
4926 {
4927 if (pool)
4928 gst_object_unref(pool);
4929 pool = gst_object_ref(obj->pool);
4930 size = obj->info.size;
4931 GST_DEBUG_OBJECT(obj->dbg_obj,
4932 "streaming mode: using our own pool %" GST_PTR_FORMAT, pool);
4933 pushing_from_our_pool = TRUE;
4934 }
4935 else if (pool)
4936 {
4937 GST_DEBUG_OBJECT(obj->dbg_obj,
4938 "streaming mode: copying to downstream pool %" GST_PTR_FORMAT,
4939 pool);
4940 }
4941 else
4942 {
4943 GST_DEBUG_OBJECT(obj->dbg_obj,
4944 "streaming mode: no usable pool, copying to generic pool");
4945 size = MAX(size, obj->info.size);
4946 }
4947 break;
4948 case GST_V4L2_IO_AUTO:
4949 default:
4950 GST_WARNING_OBJECT(obj->dbg_obj, "unhandled mode");
4951 break;
4952 }
4953
4954 if (size == 0)
4955 goto no_size;
4956
4957 /* If pushing from our own pool, configure it with queried minimum,
4958 * otherwise use the minimum required */
4959 if (pushing_from_our_pool)
4960 {
4961 /* When pushing from our own pool, we need what downstream one, to be able
4962 * to fill the pipeline, the minimum required to decoder according to the
4963 * driver and 2 more, so we don't endup up with everything downstream or
4964 * held by the decoder. We account 2 buffers for v4l2 so when one is being
4965 * pushed downstream the other one can already be queued for the next
4966 * frame. */
4967 own_min = min + obj->min_buffers + 2;
4968
4969 /* If no allocation parameters where provided, allow for a little more
4970 * buffers and enable copy threshold */
4971 if (!update)
4972 {
4973 own_min += 2;
4974 gst_aml_v4l2_buffer_pool_copy_at_threshold(GST_AML_V4L2_BUFFER_POOL(pool),
4975 TRUE);
4976 }
4977 else
4978 {
4979 gst_aml_v4l2_buffer_pool_copy_at_threshold(GST_AML_V4L2_BUFFER_POOL(pool),
4980 FALSE);
4981 }
4982 }
4983 else
4984 {
4985 /* In this case we'll have to configure two buffer pool. For our buffer
4986 * pool, we'll need what the driver one, and one more, so we can dequeu */
4987 own_min = obj->min_buffers + 1;
4988 own_min = MAX(own_min, GST_AML_V4L2_MIN_BUFFERS);
4989
4990 /* for the downstream pool, we keep what downstream wants, though ensure
4991 * at least a minimum if downstream didn't suggest anything (we are
4992 * expecting the base class to create a default one for the context) */
4993 min = MAX(min, GST_AML_V4L2_MIN_BUFFERS);
4994
4995 /* To import we need the other pool to hold at least own_min */
4996 if (obj->pool == pool)
4997 min += own_min;
4998 }
4999
5000 /* Request a bigger max, if one was suggested but it's too small */
5001 if (max != 0)
5002 max = MAX(min, max);
5003
5004 /* First step, configure our own pool */
5005 config = gst_buffer_pool_get_config(obj->pool);
5006
5007 if (obj->need_video_meta || has_video_meta)
5008 {
5009 GST_DEBUG_OBJECT(obj->dbg_obj, "activate Video Meta");
5010 gst_buffer_pool_config_add_option(config,
5011 GST_BUFFER_POOL_OPTION_VIDEO_META);
5012 }
5013
5014 gst_buffer_pool_config_set_allocator(config, allocator, &params);
5015 gst_buffer_pool_config_set_params(config, caps, size, own_min + DEFAULT_EXTRA_CAPTURE_BUF_SIZE, own_min + DEFAULT_EXTRA_CAPTURE_BUF_SIZE);
5016
5017 GST_DEBUG_OBJECT(obj->dbg_obj, "setting own pool config to %" GST_PTR_FORMAT, config);
5018
5019 /* Our pool often need to adjust the value */
5020 if (!gst_buffer_pool_set_config(obj->pool, config))
5021 {
5022 config = gst_buffer_pool_get_config(obj->pool);
5023
5024 GST_DEBUG_OBJECT(obj->dbg_obj, "own pool config changed to %" GST_PTR_FORMAT, config);
5025
5026 /* our pool will adjust the maximum buffer, which we are fine with */
5027 if (!gst_buffer_pool_set_config(obj->pool, config))
5028 goto config_failed;
5029 }
5030
5031 /* Now configure the other pool if different */
5032 if (obj->pool != pool)
5033 other_pool = pool;
5034
5035 if (other_pool)
5036 {
5037 GstAmlV4l2VideoDec *self = (GstAmlV4l2VideoDec *)obj->element;
5038 if (self->is_secure_path)
5039 {
5040 params.flags |= GST_MEMORY_FLAG_LAST << 1; // in drmallocator GST_MEMORY_FLAG_LAST << 1 represent GST_MEMORY_FLAG_SECURE
5041 GST_DEBUG_OBJECT(obj, "set secure flag for drmbufferpool flag:0x%x", params.flags);
5042 }
5043 config = gst_buffer_pool_get_config(other_pool);
5044 gst_buffer_pool_config_set_allocator(config, allocator, &params);
5045 gst_buffer_pool_config_set_params(config, caps, size, own_min + DEFAULT_EXTRA_CAPTURE_BUF_SIZE, own_min + DEFAULT_EXTRA_CAPTURE_BUF_SIZE);
5046 gst_buffer_pool_config_set_video_alignment(config, &obj->align);
5047
5048 GST_DEBUG_OBJECT(obj->dbg_obj, "setting other pool config to %" GST_PTR_FORMAT, config);
5049
5050 /* if downstream supports video metadata, add this to the pool config */
5051 if (has_video_meta)
5052 {
5053 GST_DEBUG_OBJECT(obj->dbg_obj, "activate Video Meta");
5054 gst_buffer_pool_config_add_option(config,
5055 GST_BUFFER_POOL_OPTION_VIDEO_META);
5056 }
5057
5058 if (!gst_buffer_pool_set_config(other_pool, config))
5059 {
5060 config = gst_buffer_pool_get_config(other_pool);
5061
5062 if (!gst_buffer_pool_config_validate_params(config, caps, size, min,
5063 max))
5064 {
5065 gst_structure_free(config);
5066 goto config_failed;
5067 }
5068
5069 if (!gst_buffer_pool_set_config(other_pool, config))
5070 goto config_failed;
5071 }
5072 }
5073
5074 if (pool)
5075 {
5076 /* For simplicity, simply read back the active configuration, so our base
5077 * class get the right information */
5078 config = gst_buffer_pool_get_config(pool);
5079 gst_buffer_pool_config_get_params(config, NULL, &size, &min, &max);
5080 gst_structure_free(config);
5081 }
5082
5083 if (update)
5084 gst_query_set_nth_allocation_pool(query, 0, pool, size, min, max);
5085 else
5086 gst_query_add_allocation_pool(query, pool, size, min, max);
5087
5088 if (allocator)
5089 gst_object_unref(allocator);
5090
5091 if (pool)
5092 gst_object_unref(pool);
5093
5094 return TRUE;
5095
5096pool_failed:
5097{
5098 /* setup_pool already send the error */
5099 goto cleanup;
5100}
5101config_failed:
5102{
5103 GST_ELEMENT_ERROR(obj->element, RESOURCE, SETTINGS,
5104 (_("Failed to configure internal buffer pool.")), (NULL));
5105 goto cleanup;
5106}
5107no_size:
5108{
5109 GST_ELEMENT_ERROR(obj->element, RESOURCE, SETTINGS,
5110 (_("Video device did not suggest any buffer size.")), (NULL));
5111 goto cleanup;
5112}
5113cleanup:
5114{
5115 if (allocator)
5116 gst_object_unref(allocator);
5117
5118 if (pool)
5119 gst_object_unref(pool);
5120 return FALSE;
5121}
5122no_downstream_pool:
5123{
5124 GST_ELEMENT_ERROR(obj->element, RESOURCE, SETTINGS,
5125 (_("No downstream pool to import from.")),
5126 ("When importing DMABUF or USERPTR, we need a pool to import from"));
5127 return FALSE;
5128}
5129}
5130
5131gboolean
5132gst_aml_v4l2_object_propose_allocation(GstAmlV4l2Object *obj, GstQuery *query)
5133{
5134 GstBufferPool *pool;
5135 /* we need at least 2 buffers to operate */
5136 guint size, min, max;
5137 GstCaps *caps;
5138 gboolean need_pool;
5139
5140 /* Set defaults allocation parameters */
5141 size = obj->info.size;
5142 min = GST_AML_V4L2_MIN_BUFFERS;
5143 max = VIDEO_MAX_FRAME;
5144
5145 gst_query_parse_allocation(query, &caps, &need_pool);
5146
5147 if (caps == NULL)
5148 goto no_caps;
5149
5150 switch (obj->mode)
5151 {
5152 case GST_V4L2_IO_MMAP:
5153 case GST_V4L2_IO_DMABUF:
5154 if ((pool = obj->pool))
5155 gst_object_ref(pool);
5156 break;
5157 default:
5158 pool = NULL;
5159 break;
5160 }
5161
5162 if (pool != NULL)
5163 {
5164 GstCaps *pcaps;
5165 GstStructure *config;
5166
5167 /* we had a pool, check caps */
5168 config = gst_buffer_pool_get_config(pool);
5169 gst_buffer_pool_config_get_params(config, &pcaps, NULL, NULL, NULL);
5170
5171 GST_DEBUG_OBJECT(obj->dbg_obj,
5172 "we had a pool with caps %" GST_PTR_FORMAT, pcaps);
5173 if (!gst_caps_is_equal(caps, pcaps))
5174 {
5175 gst_structure_free(config);
5176 gst_object_unref(pool);
5177 goto different_caps;
5178 }
5179 gst_structure_free(config);
5180 }
5181 gst_aml_v4l2_get_driver_min_buffers(obj);
5182
5183 min = MAX(obj->min_buffers, GST_AML_V4L2_MIN_BUFFERS);
5184
5185 gst_query_add_allocation_pool(query, pool, size, min, max);
5186
5187 /* we also support various metadata */
5188 gst_query_add_allocation_meta(query, GST_VIDEO_META_API_TYPE, NULL);
5189
5190 if (pool)
5191 gst_object_unref(pool);
5192
5193 return TRUE;
5194
5195 /* ERRORS */
5196no_caps:
5197{
5198 GST_DEBUG_OBJECT(obj->dbg_obj, "no caps specified");
5199 return FALSE;
5200}
5201different_caps:
5202{
5203 /* different caps, we can't use this pool */
5204 GST_DEBUG_OBJECT(obj->dbg_obj, "pool has different caps");
5205 return FALSE;
5206}
5207}
5208
5209gboolean
5210gst_aml_v4l2_object_try_import(GstAmlV4l2Object *obj, GstBuffer *buffer)
5211{
5212 GstVideoMeta *vmeta;
5213 guint n_mem = gst_buffer_n_memory(buffer);
5214
5215 /* only import if requested */
5216 switch (obj->mode)
5217 {
5218 case GST_V4L2_IO_USERPTR:
5219 case GST_V4L2_IO_DMABUF_IMPORT:
5220 break;
5221 default:
5222 GST_DEBUG_OBJECT(obj->dbg_obj,
5223 "The io-mode does not enable importation");
5224 return FALSE;
5225 }
5226
5227 vmeta = gst_buffer_get_video_meta(buffer);
5228 if (!vmeta && obj->need_video_meta)
5229 {
5230 GST_DEBUG_OBJECT(obj->dbg_obj, "Downstream buffer uses standard "
5231 "stride/offset while the driver does not.");
5232 return FALSE;
5233 }
5234
5235 /* we need matching strides/offsets and size */
5236 if (vmeta)
5237 {
5238 guint p;
5239 gboolean need_fmt_update = FALSE;
5240
5241 if (vmeta->n_planes != GST_VIDEO_INFO_N_PLANES(&obj->info))
5242 {
5243 GST_WARNING_OBJECT(obj->dbg_obj,
5244 "Cannot import buffers with different number planes");
5245 return FALSE;
5246 }
5247
5248 for (p = 0; p < vmeta->n_planes; p++)
5249 {
5250 if (vmeta->stride[p] < obj->info.stride[p])
5251 {
5252 GST_DEBUG_OBJECT(obj->dbg_obj,
5253 "Not importing as remote stride %i is smaller then %i on plane %u",
5254 vmeta->stride[p], obj->info.stride[p], p);
5255 return FALSE;
5256 }
5257 else if (vmeta->stride[p] > obj->info.stride[p])
5258 {
5259 need_fmt_update = TRUE;
5260 }
5261
5262 if (vmeta->offset[p] < obj->info.offset[p])
5263 {
5264 GST_DEBUG_OBJECT(obj->dbg_obj,
5265 "Not importing as offset %" G_GSIZE_FORMAT
5266 " is smaller then %" G_GSIZE_FORMAT " on plane %u",
5267 vmeta->offset[p], obj->info.offset[p], p);
5268 return FALSE;
5269 }
5270 else if (vmeta->offset[p] > obj->info.offset[p])
5271 {
5272 need_fmt_update = TRUE;
5273 }
5274 }
5275
5276 if (need_fmt_update)
5277 {
5278 struct v4l2_format format;
5279 gint wanted_stride[GST_VIDEO_MAX_PLANES] = {
5280 0,
5281 };
5282
5283 format = obj->format;
5284
5285 /* update the current format with the stride we want to import from */
5286 if (V4L2_TYPE_IS_MULTIPLANAR(obj->type))
5287 {
5288 guint i;
5289
5290 GST_DEBUG_OBJECT(obj->dbg_obj, "Wanted strides:");
5291
5292 for (i = 0; i < obj->n_v4l2_planes; i++)
5293 {
5294 gint stride = vmeta->stride[i];
5295
5296 if (GST_VIDEO_FORMAT_INFO_IS_TILED(obj->info.finfo))
5297 stride = GST_VIDEO_TILE_X_TILES(stride) << GST_VIDEO_FORMAT_INFO_TILE_WS(obj->info.finfo);
5298
5299 format.fmt.pix_mp.plane_fmt[i].bytesperline = stride;
5300 wanted_stride[i] = stride;
5301 GST_DEBUG_OBJECT(obj->dbg_obj, " [%u] %i", i, wanted_stride[i]);
5302 }
5303 }
5304 else
5305 {
5306 gint stride = vmeta->stride[0];
5307
5308 GST_DEBUG_OBJECT(obj->dbg_obj, "Wanted stride: %i", stride);
5309
5310 if (GST_VIDEO_FORMAT_INFO_IS_TILED(obj->info.finfo))
5311 stride = GST_VIDEO_TILE_X_TILES(stride) << GST_VIDEO_FORMAT_INFO_TILE_WS(obj->info.finfo);
5312
5313 format.fmt.pix.bytesperline = stride;
5314 wanted_stride[0] = stride;
5315 }
5316
5317 if (obj->ioctl(obj->video_fd, VIDIOC_S_FMT, &format) < 0)
5318 {
5319 GST_WARNING_OBJECT(obj->dbg_obj,
5320 "Something went wrong trying to update current format: %s",
5321 g_strerror(errno));
5322 return FALSE;
5323 }
5324
5325 gst_aml_v4l2_object_save_format(obj, obj->fmtdesc, &format, &obj->info,
5326 &obj->align);
5327
5328 if (V4L2_TYPE_IS_MULTIPLANAR(obj->type))
5329 {
5330 guint i;
5331
5332 for (i = 0; i < obj->n_v4l2_planes; i++)
5333 {
5334 if (format.fmt.pix_mp.plane_fmt[i].bytesperline != wanted_stride[i])
5335 {
5336 GST_DEBUG_OBJECT(obj->dbg_obj,
5337 "[%i] Driver did not accept the new stride (wants %i, got %i)",
5338 i, format.fmt.pix_mp.plane_fmt[i].bytesperline,
5339 wanted_stride[i]);
5340 return FALSE;
5341 }
5342 }
5343 }
5344 else
5345 {
5346 if (format.fmt.pix.bytesperline != wanted_stride[0])
5347 {
5348 GST_DEBUG_OBJECT(obj->dbg_obj,
5349 "Driver did not accept the new stride (wants %i, got %i)",
5350 format.fmt.pix.bytesperline, wanted_stride[0]);
5351 return FALSE;
5352 }
5353 }
5354 }
5355 }
5356
5357 /* we can always import single memory buffer, but otherwise we need the same
5358 * amount of memory object. */
5359 if (n_mem != 1 && n_mem != obj->n_v4l2_planes)
5360 {
5361 GST_DEBUG_OBJECT(obj->dbg_obj, "Can only import %i memory, "
5362 "buffers contains %u memory",
5363 obj->n_v4l2_planes, n_mem);
5364 return FALSE;
5365 }
5366
5367 /* For DMABuf importation we need DMABuf of course */
5368 if (obj->mode == GST_V4L2_IO_DMABUF_IMPORT)
5369 {
5370 guint i;
5371
5372 for (i = 0; i < n_mem; i++)
5373 {
5374 GstMemory *mem = gst_buffer_peek_memory(buffer, i);
5375
5376 if (!gst_is_dmabuf_memory(mem))
5377 {
5378 GST_DEBUG_OBJECT(obj->dbg_obj, "Cannot import non-DMABuf memory.");
5379 return FALSE;
5380 }
5381 }
5382 }
5383
5384 /* for the remaining, only the kernel driver can tell */
5385 return TRUE;
5386}
5387
5388gboolean gst_aml_v4l2_set_drm_mode(GstAmlV4l2Object *v4l2object)
5389{
5390 /* On AmLogic, output obj use of GST_V4L2_IO_DMABUF_IMPORT implies secure memory */
5391 if (v4l2object->req_mode == GST_V4L2_IO_DMABUF_IMPORT)
5392 {
5393 int rc;
5394 struct v4l2_queryctrl queryctrl;
5395 struct v4l2_control control;
5396
5397 GstAmlV4l2VideoDec *self = (GstAmlV4l2VideoDec *)v4l2object->element;
5398 self->is_secure_path = TRUE;
5399
5400#define V4L2_CID_USER_AMLOGIC_BASE (V4L2_CID_USER_BASE + 0x1100)
5401#define AML_V4L2_SET_DRMMODE (V4L2_CID_USER_AMLOGIC_BASE + 0)
5402 memset(&queryctrl, 0, sizeof(queryctrl));
5403 queryctrl.id = AML_V4L2_SET_DRMMODE;
5404
5405 rc = v4l2object->ioctl(v4l2object->video_fd, VIDIOC_QUERYCTRL, &queryctrl);
5406 if (rc == 0)
5407 {
5408 if (!(queryctrl.flags & V4L2_CTRL_FLAG_DISABLED))
5409 {
5410 memset(&control, 0, sizeof(control));
5411 control.id = AML_V4L2_SET_DRMMODE;
5412 control.value = 1;
5413 rc = v4l2object->ioctl(v4l2object->video_fd, VIDIOC_S_CTRL, &control);
5414 if (rc != 0)
5415 {
5416 GST_ERROR_OBJECT(v4l2object, "AML_V4L2_SET_DRMMODE fail: rc %d", rc);
5417 return FALSE;
5418 }
5419 GST_DEBUG_OBJECT(v4l2object, "AML_V4L2_SET_DRMMODE set succ");
5420 return TRUE;
5421 }
5422 else
5423 {
5424 GST_DEBUG_OBJECT(v4l2object, "AML_V4L2_SET_DRMMODE is disabled");
5425 return TRUE;
5426 }
5427 }
5428 else
5429 {
5430 GST_ERROR_OBJECT(v4l2object, "VIDIOC_QUERYCTRL for AML_V4L2_SET_DRMMODE fail");
5431 return FALSE;
5432 }
5433 }
5434 else
5435 {
5436 GST_DEBUG_OBJECT(v4l2object, "req mode is not GST_V4L2_IO_DMABUF_IMPORT, DRM mode does not need to be configured");
5437 return TRUE;
5438 }
5439}