blob: 76ff8b363b1f1f7beaf25a0f776c33aab10066c0 [file] [log] [blame]
xuesong.jiangae1548e2022-05-06 16:38:46 +08001/* GStreamer
2 * Copyright (C) 2022 <xuesong.jiang@amlogic.com>
3 *
4 * This library is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Library General Public
6 * License as published by the Free Software Foundation; either
7 * version 2 of the License, or (at your option) any later version.
8 *
9 * This library is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Library General Public License for more details.
13 *
14 * You should have received a copy of the GNU Library General Public
15 * License along with this library; if not, write to the
16 * Free Software Foundation, Inc., 51 Franklin Street, Suite 500,
17 * Boston, MA 02110-1335, USA.
18 */
19
20#ifdef HAVE_CONFIG_H
21#include <config.h>
22#endif
23
24#include <sys/stat.h>
25#include <fcntl.h>
26#include <errno.h>
27#include <string.h>
28#include <sys/mman.h>
29#include <sys/ioctl.h>
xuesong.jiange1a19662022-06-21 20:30:22 +080030#include <stdio.h>
xuesong.jiangae1548e2022-05-06 16:38:46 +080031
32#ifdef HAVE_GUDEV
33#include <gudev/gudev.h>
34#endif
35
36#include "ext/videodev2.h"
37#include "gstamlv4l2object.h"
38
39#include "gst/gst-i18n-plugin.h"
40
41#include <gst/video/video.h>
42#include <gst/allocators/gstdmabuf.h>
43
44GST_DEBUG_CATEGORY_EXTERN(aml_v4l2_debug);
45#define GST_CAT_DEFAULT aml_v4l2_debug
46
47#define DEFAULT_PROP_DEVICE_NAME NULL
48#define DEFAULT_PROP_DEVICE_FD -1
49#define DEFAULT_PROP_FLAGS 0
50#define DEFAULT_PROP_TV_NORM 0
51#define DEFAULT_PROP_IO_MODE GST_V4L2_IO_AUTO
52
53#define ENCODED_BUFFER_SIZE (2 * 1024 * 1024)
54#define DEFAULT_EXTRA_CAPTURE_BUF_SIZE 3
55
xuesong.jiange1a19662022-06-21 20:30:22 +080056#define V4L2_CONFIG_PARM_DECODE_CFGINFO (1 << 0)
57#define V4L2_CONFIG_PARM_DECODE_PSINFO (1 << 1)
58#define V4L2_CONFIG_PARM_DECODE_HDRINFO (1 << 2)
59#define V4L2_CONFIG_PARM_DECODE_CNTINFO (1 << 3)
60
xuesong.jiangae1548e2022-05-06 16:38:46 +080061enum
62{
63 PROP_0,
64 V4L2_STD_OBJECT_PROPS,
65};
66
67/*
68 * common format / caps utilities:
69 */
70typedef enum
71{
72 GST_V4L2_RAW = 1 << 0,
73 GST_V4L2_CODEC = 1 << 1,
74 GST_V4L2_TRANSPORT = 1 << 2,
75 GST_V4L2_NO_PARSE = 1 << 3,
76 GST_V4L2_ALL = 0xffff
77} GstAmlV4L2FormatFlags;
78
79typedef struct
80{
81 guint32 format;
82 gboolean dimensions;
83 GstAmlV4L2FormatFlags flags;
84} GstAmlV4L2FormatDesc;
85
86static const GstAmlV4L2FormatDesc gst_aml_v4l2_formats[] = {
87 /* RGB formats */
88 {V4L2_PIX_FMT_RGB332, TRUE, GST_V4L2_RAW},
89 {V4L2_PIX_FMT_ARGB555, TRUE, GST_V4L2_RAW},
90 {V4L2_PIX_FMT_XRGB555, TRUE, GST_V4L2_RAW},
91 {V4L2_PIX_FMT_ARGB555X, TRUE, GST_V4L2_RAW},
92 {V4L2_PIX_FMT_XRGB555X, TRUE, GST_V4L2_RAW},
93 {V4L2_PIX_FMT_RGB565, TRUE, GST_V4L2_RAW},
94 {V4L2_PIX_FMT_RGB565X, TRUE, GST_V4L2_RAW},
95 {V4L2_PIX_FMT_BGR666, TRUE, GST_V4L2_RAW},
96 {V4L2_PIX_FMT_BGR24, TRUE, GST_V4L2_RAW},
97 {V4L2_PIX_FMT_RGB24, TRUE, GST_V4L2_RAW},
98 {V4L2_PIX_FMT_ABGR32, TRUE, GST_V4L2_RAW},
99 {V4L2_PIX_FMT_XBGR32, TRUE, GST_V4L2_RAW},
100 {V4L2_PIX_FMT_ARGB32, TRUE, GST_V4L2_RAW},
101 {V4L2_PIX_FMT_XRGB32, TRUE, GST_V4L2_RAW},
102
103 /* Deprecated Packed RGB Image Formats (alpha ambiguity) */
104 {V4L2_PIX_FMT_RGB444, TRUE, GST_V4L2_RAW},
105 {V4L2_PIX_FMT_RGB555, TRUE, GST_V4L2_RAW},
106 {V4L2_PIX_FMT_RGB555X, TRUE, GST_V4L2_RAW},
107 {V4L2_PIX_FMT_BGR32, TRUE, GST_V4L2_RAW},
108 {V4L2_PIX_FMT_RGB32, TRUE, GST_V4L2_RAW},
109
110 /* Grey formats */
111 {V4L2_PIX_FMT_GREY, TRUE, GST_V4L2_RAW},
112 {V4L2_PIX_FMT_Y4, TRUE, GST_V4L2_RAW},
113 {V4L2_PIX_FMT_Y6, TRUE, GST_V4L2_RAW},
114 {V4L2_PIX_FMT_Y10, TRUE, GST_V4L2_RAW},
115 {V4L2_PIX_FMT_Y12, TRUE, GST_V4L2_RAW},
116 {V4L2_PIX_FMT_Y16, TRUE, GST_V4L2_RAW},
117 {V4L2_PIX_FMT_Y16_BE, TRUE, GST_V4L2_RAW},
118 {V4L2_PIX_FMT_Y10BPACK, TRUE, GST_V4L2_RAW},
119
120 /* Palette formats */
121 {V4L2_PIX_FMT_PAL8, TRUE, GST_V4L2_RAW},
122
123 /* Chrominance formats */
124 {V4L2_PIX_FMT_UV8, TRUE, GST_V4L2_RAW},
125
126 /* Luminance+Chrominance formats */
127 {V4L2_PIX_FMT_YVU410, TRUE, GST_V4L2_RAW},
128 {V4L2_PIX_FMT_YVU420, TRUE, GST_V4L2_RAW},
129 {V4L2_PIX_FMT_YVU420M, TRUE, GST_V4L2_RAW},
130 {V4L2_PIX_FMT_YUYV, TRUE, GST_V4L2_RAW},
131 {V4L2_PIX_FMT_YYUV, TRUE, GST_V4L2_RAW},
132 {V4L2_PIX_FMT_YVYU, TRUE, GST_V4L2_RAW},
133 {V4L2_PIX_FMT_UYVY, TRUE, GST_V4L2_RAW},
134 {V4L2_PIX_FMT_VYUY, TRUE, GST_V4L2_RAW},
135 {V4L2_PIX_FMT_YUV422P, TRUE, GST_V4L2_RAW},
136 {V4L2_PIX_FMT_YUV411P, TRUE, GST_V4L2_RAW},
137 {V4L2_PIX_FMT_Y41P, TRUE, GST_V4L2_RAW},
138 {V4L2_PIX_FMT_YUV444, TRUE, GST_V4L2_RAW},
139 {V4L2_PIX_FMT_YUV555, TRUE, GST_V4L2_RAW},
140 {V4L2_PIX_FMT_YUV565, TRUE, GST_V4L2_RAW},
141 {V4L2_PIX_FMT_YUV32, TRUE, GST_V4L2_RAW},
142 {V4L2_PIX_FMT_YUV410, TRUE, GST_V4L2_RAW},
143 {V4L2_PIX_FMT_YUV420, TRUE, GST_V4L2_RAW},
144 {V4L2_PIX_FMT_YUV420M, TRUE, GST_V4L2_RAW},
145 {V4L2_PIX_FMT_HI240, TRUE, GST_V4L2_RAW},
146 {V4L2_PIX_FMT_HM12, TRUE, GST_V4L2_RAW},
147 {V4L2_PIX_FMT_M420, TRUE, GST_V4L2_RAW},
148
149 /* two planes -- one Y, one Cr + Cb interleaved */
150 {V4L2_PIX_FMT_NV12, TRUE, GST_V4L2_RAW},
151 {V4L2_PIX_FMT_NV12M, TRUE, GST_V4L2_RAW},
152 {V4L2_PIX_FMT_NV12MT, TRUE, GST_V4L2_RAW},
153 {V4L2_PIX_FMT_NV12MT_16X16, TRUE, GST_V4L2_RAW},
154 {V4L2_PIX_FMT_NV21, TRUE, GST_V4L2_RAW},
155 {V4L2_PIX_FMT_NV21M, TRUE, GST_V4L2_RAW},
156 {V4L2_PIX_FMT_NV16, TRUE, GST_V4L2_RAW},
157 {V4L2_PIX_FMT_NV16M, TRUE, GST_V4L2_RAW},
158 {V4L2_PIX_FMT_NV61, TRUE, GST_V4L2_RAW},
159 {V4L2_PIX_FMT_NV61M, TRUE, GST_V4L2_RAW},
160 {V4L2_PIX_FMT_NV24, TRUE, GST_V4L2_RAW},
161 {V4L2_PIX_FMT_NV42, TRUE, GST_V4L2_RAW},
162
163 /* Bayer formats - see http://www.siliconimaging.com/RGB%20Bayer.htm */
164 {V4L2_PIX_FMT_SBGGR8, TRUE, GST_V4L2_RAW},
165 {V4L2_PIX_FMT_SGBRG8, TRUE, GST_V4L2_RAW},
166 {V4L2_PIX_FMT_SGRBG8, TRUE, GST_V4L2_RAW},
167 {V4L2_PIX_FMT_SRGGB8, TRUE, GST_V4L2_RAW},
168
169 /* compressed formats */
170 {V4L2_PIX_FMT_MJPEG, FALSE, GST_V4L2_CODEC},
171 {V4L2_PIX_FMT_JPEG, FALSE, GST_V4L2_CODEC},
172 {V4L2_PIX_FMT_PJPG, FALSE, GST_V4L2_CODEC},
173 {V4L2_PIX_FMT_DV, FALSE, GST_V4L2_TRANSPORT},
174 {V4L2_PIX_FMT_MPEG, FALSE, GST_V4L2_TRANSPORT},
175 {V4L2_PIX_FMT_FWHT, FALSE, GST_V4L2_CODEC},
176 {V4L2_PIX_FMT_H264, FALSE, GST_V4L2_CODEC},
177 {V4L2_PIX_FMT_H264_NO_SC, FALSE, GST_V4L2_CODEC},
178 {V4L2_PIX_FMT_H264_MVC, FALSE, GST_V4L2_CODEC},
179 {V4L2_PIX_FMT_HEVC, FALSE, GST_V4L2_CODEC},
180 {V4L2_PIX_FMT_H263, FALSE, GST_V4L2_CODEC},
181 {V4L2_PIX_FMT_MPEG1, FALSE, GST_V4L2_CODEC},
182 {V4L2_PIX_FMT_MPEG2, FALSE, GST_V4L2_CODEC},
183 {V4L2_PIX_FMT_MPEG4, FALSE, GST_V4L2_CODEC},
184 {V4L2_PIX_FMT_XVID, FALSE, GST_V4L2_CODEC},
185 {V4L2_PIX_FMT_VC1_ANNEX_G, FALSE, GST_V4L2_CODEC},
186 {V4L2_PIX_FMT_VC1_ANNEX_L, FALSE, GST_V4L2_CODEC},
187 {V4L2_PIX_FMT_VP8, FALSE, GST_V4L2_CODEC | GST_V4L2_NO_PARSE},
188 {V4L2_PIX_FMT_VP9, FALSE, GST_V4L2_CODEC | GST_V4L2_NO_PARSE},
189 {V4L2_PIX_FMT_AV1, FALSE, GST_V4L2_CODEC | GST_V4L2_NO_PARSE},
190
191 /* Vendor-specific formats */
192 {V4L2_PIX_FMT_WNVA, TRUE, GST_V4L2_CODEC},
193 {V4L2_PIX_FMT_SN9C10X, TRUE, GST_V4L2_CODEC},
194 {V4L2_PIX_FMT_PWC1, TRUE, GST_V4L2_CODEC},
195 {V4L2_PIX_FMT_PWC2, TRUE, GST_V4L2_CODEC},
196};
197
198#define GST_AML_V4L2_FORMAT_COUNT (G_N_ELEMENTS(gst_aml_v4l2_formats))
199
200static GSList *gst_aml_v4l2_object_get_format_list(GstAmlV4l2Object *v4l2object);
201
202#define GST_TYPE_AML_V4L2_DEVICE_FLAGS (gst_aml_v4l2_device_get_type())
203static GType
204gst_aml_v4l2_device_get_type(void)
205{
206 static GType v4l2_device_type = 0;
207
208 if (v4l2_device_type == 0)
209 {
210 static const GFlagsValue values[] = {
211 {V4L2_CAP_VIDEO_CAPTURE, "Device supports video capture", "capture"},
212 {V4L2_CAP_VIDEO_OUTPUT, "Device supports video playback", "output"},
213 {V4L2_CAP_VIDEO_OVERLAY, "Device supports video overlay", "overlay"},
214
215 {V4L2_CAP_VBI_CAPTURE, "Device supports the VBI capture", "vbi-capture"},
216 {V4L2_CAP_VBI_OUTPUT, "Device supports the VBI output", "vbi-output"},
217
218 {V4L2_CAP_TUNER, "Device has a tuner or modulator", "tuner"},
219 {V4L2_CAP_AUDIO, "Device has audio inputs or outputs", "audio"},
220
221 {0, NULL, NULL}};
222
223 v4l2_device_type =
224 g_flags_register_static("GstAmlV4l2DeviceTypeFlags", values);
225 }
226
227 return v4l2_device_type;
228}
229
230GType gst_aml_v4l2_io_mode_get_type(void)
231{
232 static GType v4l2_io_mode = 0;
233
234 if (!v4l2_io_mode)
235 {
236 static const GEnumValue io_modes[] = {
237 {GST_V4L2_IO_AUTO, "GST_V4L2_IO_AUTO", "auto"},
238 {GST_V4L2_IO_RW, "GST_V4L2_IO_RW", "rw"},
239 {GST_V4L2_IO_MMAP, "GST_V4L2_IO_MMAP", "mmap"},
240 {GST_V4L2_IO_USERPTR, "GST_V4L2_IO_USERPTR", "userptr"},
241 {GST_V4L2_IO_DMABUF, "GST_V4L2_IO_DMABUF", "dmabuf"},
242 {GST_V4L2_IO_DMABUF_IMPORT, "GST_V4L2_IO_DMABUF_IMPORT",
243 "dmabuf-import"},
244
245 {0, NULL, NULL}};
246 v4l2_io_mode = g_enum_register_static("GstAmlV4l2IOMode", io_modes);
247 }
248 return v4l2_io_mode;
249}
250
251void gst_aml_v4l2_object_install_properties_helper(GObjectClass *gobject_class,
252 const char *default_device)
253{
254 g_object_class_install_property(gobject_class, PROP_DEVICE,
255 g_param_spec_string("device", "Device", "Device location",
256 default_device, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
257 g_object_class_install_property(gobject_class, PROP_DEVICE_NAME,
258 g_param_spec_string("device-name", "Device name",
259 "Name of the device", DEFAULT_PROP_DEVICE_NAME,
260 G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
261 g_object_class_install_property(gobject_class, PROP_DEVICE_FD,
262 g_param_spec_int("device-fd", "File descriptor",
263 "File descriptor of the device", -1, G_MAXINT, DEFAULT_PROP_DEVICE_FD,
264 G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
265 g_object_class_install_property(gobject_class, PROP_FLAGS,
266 g_param_spec_flags("flags", "Flags", "Device type flags",
267 GST_TYPE_AML_V4L2_DEVICE_FLAGS, DEFAULT_PROP_FLAGS,
268 G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
269
270 /**
271 * GstV4l2Src:brightness:
272 *
273 * Picture brightness, or more precisely, the black level
274 */
275 g_object_class_install_property(gobject_class, PROP_BRIGHTNESS,
276 g_param_spec_int("brightness", "Brightness",
277 "Picture brightness, or more precisely, the black level", G_MININT,
278 G_MAXINT, 0,
279 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
280 /**
281 * GstV4l2Src:contrast:
282 *
283 * Picture contrast or luma gain
284 */
285 g_object_class_install_property(gobject_class, PROP_CONTRAST,
286 g_param_spec_int("contrast", "Contrast",
287 "Picture contrast or luma gain", G_MININT,
288 G_MAXINT, 0,
289 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
290 /**
291 * GstV4l2Src:saturation:
292 *
293 * Picture color saturation or chroma gain
294 */
295 g_object_class_install_property(gobject_class, PROP_SATURATION,
296 g_param_spec_int("saturation", "Saturation",
297 "Picture color saturation or chroma gain", G_MININT,
298 G_MAXINT, 0,
299 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
300 /**
301 * GstV4l2Src:hue:
302 *
303 * Hue or color balance
304 */
305 g_object_class_install_property(gobject_class, PROP_HUE,
306 g_param_spec_int("hue", "Hue",
307 "Hue or color balance", G_MININT,
308 G_MAXINT, 0,
309 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
310
311 /**
312 * GstV4l2Src:io-mode:
313 *
314 * IO Mode
315 */
316 g_object_class_install_property(gobject_class, PROP_IO_MODE,
317 g_param_spec_enum("io-mode", "IO mode",
318 "I/O mode",
319 GST_TYPE_AML_V4L2_IO_MODE, DEFAULT_PROP_IO_MODE,
320 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
321
322 /**
323 * GstV4l2Src:extra-controls:
324 *
325 * Additional v4l2 controls for the device. The controls are identified
326 * by the control name (lowercase with '_' for any non-alphanumeric
327 * characters).
328 *
329 * Since: 1.2
330 */
331 g_object_class_install_property(gobject_class, PROP_EXTRA_CONTROLS,
332 g_param_spec_boxed("extra-controls", "Extra Controls",
333 "Extra v4l2 controls (CIDs) for the device",
334 GST_TYPE_STRUCTURE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
335
336 /**
337 * GstV4l2Src:pixel-aspect-ratio:
338 *
339 * The pixel aspect ratio of the device. This overwrites the pixel aspect
340 * ratio queried from the device.
341 *
342 * Since: 1.2
343 */
344 g_object_class_install_property(gobject_class, PROP_PIXEL_ASPECT_RATIO,
345 g_param_spec_string("pixel-aspect-ratio", "Pixel Aspect Ratio",
346 "Overwrite the pixel aspect ratio of the device", "1/1",
347 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
348
349 /**
350 * GstV4l2Src:force-aspect-ratio:
351 *
352 * When enabled, the pixel aspect ratio queried from the device or set
353 * with the pixel-aspect-ratio property will be enforced.
354 *
355 * Since: 1.2
356 */
357 g_object_class_install_property(gobject_class, PROP_FORCE_ASPECT_RATIO,
358 g_param_spec_boolean("force-aspect-ratio", "Force aspect ratio",
359 "When enabled, the pixel aspect ratio will be enforced", TRUE,
360 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
361}
362
363void gst_aml_v4l2_object_install_m2m_properties_helper(GObjectClass *gobject_class)
364{
365 g_object_class_install_property(gobject_class, PROP_DEVICE,
366 g_param_spec_string("device", "Device", "Device location",
367 NULL, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
368
369 g_object_class_install_property(gobject_class, PROP_DEVICE_NAME,
370 g_param_spec_string("device-name", "Device name",
371 "Name of the device", DEFAULT_PROP_DEVICE_NAME,
372 G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
373
374 g_object_class_install_property(gobject_class, PROP_DEVICE_FD,
375 g_param_spec_int("device-fd", "File descriptor",
376 "File descriptor of the device", -1, G_MAXINT, DEFAULT_PROP_DEVICE_FD,
377 G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
378
379 g_object_class_install_property(gobject_class, PROP_OUTPUT_IO_MODE,
380 g_param_spec_enum("output-io-mode", "Output IO mode",
381 "Output side I/O mode (matches sink pad)",
382 GST_TYPE_AML_V4L2_IO_MODE, DEFAULT_PROP_IO_MODE,
383 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
384
385 g_object_class_install_property(gobject_class, PROP_CAPTURE_IO_MODE,
386 g_param_spec_enum("capture-io-mode", "Capture IO mode",
387 "Capture I/O mode (matches src pad)",
388 GST_TYPE_AML_V4L2_IO_MODE, DEFAULT_PROP_IO_MODE,
389 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
390
391 g_object_class_install_property(gobject_class, PROP_EXTRA_CONTROLS,
392 g_param_spec_boxed("extra-controls", "Extra Controls",
393 "Extra v4l2 controls (CIDs) for the device",
394 GST_TYPE_STRUCTURE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
395
396 g_object_class_install_property(gobject_class, PROP_DUMP_FRAME_LOCATION,
397 g_param_spec_string("dump-frame-location", "dump frame location",
398 "Location of the file to write decoder frames", NULL,
399 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
400}
401
402/* Support for 32bit off_t, this wrapper is casting off_t to gint64 */
403#ifdef HAVE_LIBV4L2
404#if SIZEOF_OFF_T < 8
405
406static gpointer
407v4l2_mmap_wrapper(gpointer start, gsize length, gint prot, gint flags, gint fd,
408 off_t offset)
409{
410 return v4l2_mmap(start, length, prot, flags, fd, (gint64)offset);
411}
412
413#define v4l2_mmap v4l2_mmap_wrapper
414
415#endif /* SIZEOF_OFF_T < 8 */
416#endif /* HAVE_LIBV4L2 */
417
418GstAmlV4l2Object *
419gst_aml_v4l2_object_new(GstElement *element,
420 GstObject *debug_object,
421 enum v4l2_buf_type type,
422 const char *default_device,
423 GstAmlV4l2GetInOutFunction get_in_out_func,
424 GstAmlV4l2SetInOutFunction set_in_out_func,
425 GstAmlV4l2UpdateFpsFunction update_fps_func)
426{
427 GstAmlV4l2Object *v4l2object;
428
429 /*
430 * some default values
431 */
432 v4l2object = g_new0(GstAmlV4l2Object, 1);
433
434 if ((V4L2_BUF_TYPE_VIDEO_CAPTURE == type || V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE == type))
435 {
436 const char *default_mode = getenv("GST_DEFAULT_V4L2_BUF_MODE");
437 GST_DEBUG("amlmodbuf GST_AML_DEFAULT_V4L2_BUF_MODE:%s", default_mode);
438 if (default_mode)
439 {
440 if (strcmp(default_mode, "DMA_BUF_IMPORT") == 0)
441 v4l2object->req_mode = GST_V4L2_IO_DMABUF_IMPORT;
442 else if (strcmp(default_mode, "DMA_BUF") == 0)
443 v4l2object->req_mode = GST_V4L2_IO_DMABUF;
444 GST_DEBUG("amlmodbuf set default buf default_mode:%d", v4l2object->req_mode);
445 }
446 }
447
448 v4l2object->type = type;
449 v4l2object->formats = NULL;
450
451 v4l2object->element = element;
452 v4l2object->dbg_obj = debug_object;
453 v4l2object->get_in_out_func = get_in_out_func;
454 v4l2object->set_in_out_func = set_in_out_func;
455 v4l2object->update_fps_func = update_fps_func;
456
457 v4l2object->video_fd = -1;
458 v4l2object->active = FALSE;
459 v4l2object->videodev = g_strdup(default_device);
460
461 v4l2object->norms = NULL;
462 v4l2object->channels = NULL;
463 v4l2object->colors = NULL;
464
465 v4l2object->keep_aspect = TRUE;
466
467 v4l2object->n_v4l2_planes = 0;
468
469 v4l2object->no_initial_format = FALSE;
470
471 /* We now disable libv4l2 by default, but have an env to enable it. */
472#ifdef HAVE_LIBV4L2
473 if (g_getenv("GST_V4L2_USE_LIBV4L2"))
474 {
475 v4l2object->fd_open = v4l2_fd_open;
476 v4l2object->close = v4l2_close;
477 v4l2object->dup = v4l2_dup;
478 v4l2object->ioctl = v4l2_ioctl;
479 v4l2object->read = v4l2_read;
480 v4l2object->mmap = v4l2_mmap;
481 v4l2object->munmap = v4l2_munmap;
482 }
483 else
484#endif
485 {
486 v4l2object->fd_open = NULL;
487 v4l2object->close = close;
488 v4l2object->dup = dup;
489 v4l2object->ioctl = ioctl;
490 v4l2object->read = read;
491 v4l2object->mmap = mmap;
492 v4l2object->munmap = munmap;
493 }
494 v4l2object->poll = gst_poll_new(TRUE);
495 v4l2object->can_wait_event = FALSE;
496 v4l2object->can_poll_device = TRUE;
497 v4l2object->tvin_port = -1;
498
499 v4l2object->dumpframefile = NULL;
500
501 return v4l2object;
502}
503
504static gboolean gst_aml_v4l2_object_clear_format_list(GstAmlV4l2Object *v4l2object);
505
506void gst_aml_v4l2_object_destroy(GstAmlV4l2Object *v4l2object)
507{
508 g_return_if_fail(v4l2object != NULL);
509
510 g_free(v4l2object->videodev);
511
512 g_free(v4l2object->channel);
513
514 if (v4l2object->formats)
515 {
516 gst_aml_v4l2_object_clear_format_list(v4l2object);
517 }
518
519 if (v4l2object->probed_caps)
520 {
521 gst_caps_unref(v4l2object->probed_caps);
522 }
523
524 if (v4l2object->extra_controls)
525 {
526 gst_structure_free(v4l2object->extra_controls);
527 }
528
529 gst_poll_free(v4l2object->poll);
530
531 g_free(v4l2object->dumpframefile);
532
533 g_free(v4l2object);
534}
535
536static gboolean
537gst_aml_v4l2_object_clear_format_list(GstAmlV4l2Object *v4l2object)
538{
539 g_slist_foreach(v4l2object->formats, (GFunc)g_free, NULL);
540 g_slist_free(v4l2object->formats);
541 v4l2object->formats = NULL;
542
543 return TRUE;
544}
545
546static gint
547gst_aml_v4l2_object_prop_to_cid(guint prop_id)
548{
549 gint cid = -1;
550
551 switch (prop_id)
552 {
553 case PROP_BRIGHTNESS:
554 cid = V4L2_CID_BRIGHTNESS;
555 break;
556 case PROP_CONTRAST:
557 cid = V4L2_CID_CONTRAST;
558 break;
559 case PROP_SATURATION:
560 cid = V4L2_CID_SATURATION;
561 break;
562 case PROP_HUE:
563 cid = V4L2_CID_HUE;
564 break;
565 default:
566 GST_WARNING("unmapped property id: %d", prop_id);
567 }
568 return cid;
569}
570
571gboolean
572gst_aml_v4l2_object_set_property_helper(GstAmlV4l2Object *v4l2object,
573 guint prop_id, const GValue *value, GParamSpec *pspec)
574{
575 switch (prop_id)
576 {
577 case PROP_DEVICE:
578 g_free(v4l2object->videodev);
579 v4l2object->videodev = g_value_dup_string(value);
580 break;
581 case PROP_BRIGHTNESS:
582 case PROP_CONTRAST:
583 case PROP_SATURATION:
584 case PROP_HUE:
585 {
586 gint cid = gst_aml_v4l2_object_prop_to_cid(prop_id);
587
588 if (cid != -1)
589 {
590 if (GST_AML_V4L2_IS_OPEN(v4l2object))
591 {
592 gst_aml_v4l2_set_attribute(v4l2object, cid, g_value_get_int(value));
593 }
594 }
595 return TRUE;
596 }
597 break;
598 case PROP_IO_MODE:
599 v4l2object->req_mode = g_value_get_enum(value);
600 break;
601 case PROP_CAPTURE_IO_MODE:
602 g_return_val_if_fail(!V4L2_TYPE_IS_OUTPUT(v4l2object->type), FALSE);
603 v4l2object->req_mode = g_value_get_enum(value);
604 break;
605 case PROP_OUTPUT_IO_MODE:
606 g_return_val_if_fail(V4L2_TYPE_IS_OUTPUT(v4l2object->type), FALSE);
607 v4l2object->req_mode = g_value_get_enum(value);
608 break;
609 case PROP_EXTRA_CONTROLS:
610 {
611 const GstStructure *s = gst_value_get_structure(value);
612
613 if (v4l2object->extra_controls)
614 gst_structure_free(v4l2object->extra_controls);
615
616 v4l2object->extra_controls = s ? gst_structure_copy(s) : NULL;
617 if (GST_AML_V4L2_IS_OPEN(v4l2object))
618 gst_aml_v4l2_set_controls(v4l2object, v4l2object->extra_controls);
619 break;
620 }
621 case PROP_PIXEL_ASPECT_RATIO:
622 if (v4l2object->par)
623 {
624 g_value_unset(v4l2object->par);
625 g_free(v4l2object->par);
626 }
627 v4l2object->par = g_new0(GValue, 1);
628 g_value_init(v4l2object->par, GST_TYPE_FRACTION);
629 if (!g_value_transform(value, v4l2object->par))
630 {
631 g_warning("Could not transform string to aspect ratio");
632 gst_value_set_fraction(v4l2object->par, 1, 1);
633 }
634
635 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "set PAR to %d/%d",
636 gst_value_get_fraction_numerator(v4l2object->par),
637 gst_value_get_fraction_denominator(v4l2object->par));
638 break;
639 case PROP_FORCE_ASPECT_RATIO:
640 v4l2object->keep_aspect = g_value_get_boolean(value);
641 break;
642 case PROP_DUMP_FRAME_LOCATION:
643 g_free(v4l2object->dumpframefile);
644 v4l2object->dumpframefile = g_value_dup_string(value);
645 break;
646 default:
647 return FALSE;
648 break;
649 }
650 return TRUE;
651}
652
653gboolean
654gst_aml_v4l2_object_get_property_helper(GstAmlV4l2Object *v4l2object,
655 guint prop_id, GValue *value, GParamSpec *pspec)
656{
657 switch (prop_id)
658 {
659 case PROP_DEVICE:
660 g_value_set_string(value, v4l2object->videodev);
661 break;
662 case PROP_DEVICE_NAME:
663 {
664 const guchar *name = NULL;
665
666 if (GST_AML_V4L2_IS_OPEN(v4l2object))
667 name = v4l2object->vcap.card;
668
669 g_value_set_string(value, (gchar *)name);
670 break;
671 }
672 case PROP_DEVICE_FD:
673 {
674 if (GST_AML_V4L2_IS_OPEN(v4l2object))
675 g_value_set_int(value, v4l2object->video_fd);
676 else
677 g_value_set_int(value, DEFAULT_PROP_DEVICE_FD);
678 break;
679 }
680 case PROP_FLAGS:
681 {
682 guint flags = 0;
683
684 if (GST_AML_V4L2_IS_OPEN(v4l2object))
685 {
686 flags |= v4l2object->device_caps &
687 (V4L2_CAP_VIDEO_CAPTURE |
688 V4L2_CAP_VIDEO_OUTPUT |
689 V4L2_CAP_VIDEO_OVERLAY |
690 V4L2_CAP_VBI_CAPTURE |
691 V4L2_CAP_VBI_OUTPUT | V4L2_CAP_TUNER | V4L2_CAP_AUDIO);
692
693 if (v4l2object->device_caps & V4L2_CAP_VIDEO_CAPTURE_MPLANE)
694 flags |= V4L2_CAP_VIDEO_CAPTURE;
695
696 if (v4l2object->device_caps & V4L2_CAP_VIDEO_OUTPUT_MPLANE)
697 flags |= V4L2_CAP_VIDEO_OUTPUT;
698 }
699 g_value_set_flags(value, flags);
700 break;
701 }
702 case PROP_BRIGHTNESS:
703 case PROP_CONTRAST:
704 case PROP_SATURATION:
705 case PROP_HUE:
706 {
707 gint cid = gst_aml_v4l2_object_prop_to_cid(prop_id);
708
709 if (cid != -1)
710 {
711 if (GST_AML_V4L2_IS_OPEN(v4l2object))
712 {
713 gint v;
714 if (gst_aml_v4l2_get_attribute(v4l2object, cid, &v))
715 {
716 g_value_set_int(value, v);
717 }
718 }
719 }
720 return TRUE;
721 }
722 break;
723 case PROP_IO_MODE:
724 g_value_set_enum(value, v4l2object->req_mode);
725 break;
726 case PROP_CAPTURE_IO_MODE:
727 g_return_val_if_fail(!V4L2_TYPE_IS_OUTPUT(v4l2object->type), FALSE);
728 g_value_set_enum(value, v4l2object->req_mode);
729 break;
730 case PROP_OUTPUT_IO_MODE:
731 g_return_val_if_fail(V4L2_TYPE_IS_OUTPUT(v4l2object->type), FALSE);
732 g_value_set_enum(value, v4l2object->req_mode);
733 break;
734 case PROP_EXTRA_CONTROLS:
735 gst_value_set_structure(value, v4l2object->extra_controls);
736 break;
737 case PROP_PIXEL_ASPECT_RATIO:
738 if (v4l2object->par)
739 g_value_transform(v4l2object->par, value);
740 break;
741 case PROP_FORCE_ASPECT_RATIO:
742 g_value_set_boolean(value, v4l2object->keep_aspect);
743 break;
744 case PROP_DUMP_FRAME_LOCATION:
745 g_value_set_string(value, v4l2object->dumpframefile);
746 break;
747 default:
748 return FALSE;
749 break;
750 }
751 return TRUE;
752}
753
754static void
755gst_aml_v4l2_get_driver_min_buffers(GstAmlV4l2Object *v4l2object)
756{
757 struct v4l2_control control = {
758 0,
759 };
760
761 g_return_if_fail(GST_AML_V4L2_IS_OPEN(v4l2object));
762
763 if (V4L2_TYPE_IS_OUTPUT(v4l2object->type))
764 control.id = V4L2_CID_MIN_BUFFERS_FOR_OUTPUT;
765 else
766 control.id = V4L2_CID_MIN_BUFFERS_FOR_CAPTURE;
767
768 if (v4l2object->ioctl(v4l2object->video_fd, VIDIOC_G_CTRL, &control) == 0)
769 {
770 GST_DEBUG_OBJECT(v4l2object->dbg_obj,
771 "driver requires a minimum of %d buffers", control.value);
772 v4l2object->min_buffers = control.value;
773 }
774 else
775 {
776 v4l2object->min_buffers = 0;
777 }
778}
779
780gboolean
781gst_aml_v4l2_object_open(GstAmlV4l2Object *v4l2object)
782{
783 if (!gst_aml_v4l2_open(v4l2object))
784 return FALSE;
785
786 return TRUE;
787}
788
789gboolean
790gst_aml_v4l2_object_open_shared(GstAmlV4l2Object *v4l2object, GstAmlV4l2Object *other)
791{
792 gboolean ret;
793
794 ret = gst_aml_v4l2_dup(v4l2object, other);
795
796 if (ret && !V4L2_TYPE_IS_OUTPUT(v4l2object->type))
797 {
798 gst_poll_fd_init(&v4l2object->pollfd);
799 v4l2object->pollfd.fd = v4l2object->video_fd;
800 gst_poll_add_fd(v4l2object->poll, &v4l2object->pollfd);
801 /* used for dequeue event */
802 gst_poll_fd_ctl_read(v4l2object->poll, &v4l2object->pollfd, TRUE);
803 gst_poll_fd_ctl_pri(v4l2object->poll, &v4l2object->pollfd, TRUE);
804 }
805
806 return ret;
807}
808
809gboolean
810gst_aml_v4l2_object_close(GstAmlV4l2Object *v4l2object)
811{
812 if (!gst_aml_v4l2_close(v4l2object))
813 return FALSE;
814
815 gst_caps_replace(&v4l2object->probed_caps, NULL);
816
817 /* reset our copy of the device caps */
818 v4l2object->device_caps = 0;
819
820 if (v4l2object->formats)
821 {
822 gst_aml_v4l2_object_clear_format_list(v4l2object);
823 }
824
825 if (v4l2object->par)
826 {
827 g_value_unset(v4l2object->par);
828 g_free(v4l2object->par);
829 v4l2object->par = NULL;
830 }
831
832 if (v4l2object->channel)
833 {
834 g_free(v4l2object->channel);
835 v4l2object->channel = NULL;
836 }
837
838 return TRUE;
839}
840
841static struct v4l2_fmtdesc *
842gst_aml_v4l2_object_get_format_from_fourcc(GstAmlV4l2Object *v4l2object,
843 guint32 fourcc)
844{
845 struct v4l2_fmtdesc *fmt;
846 GSList *walk;
847
848 if (fourcc == 0)
849 return NULL;
850
851 walk = gst_aml_v4l2_object_get_format_list(v4l2object);
852 while (walk)
853 {
854 fmt = (struct v4l2_fmtdesc *)walk->data;
855 if (fmt->pixelformat == fourcc)
856 return fmt;
857 /* special case for jpeg */
858 if (fmt->pixelformat == V4L2_PIX_FMT_MJPEG ||
859 fmt->pixelformat == V4L2_PIX_FMT_JPEG ||
860 fmt->pixelformat == V4L2_PIX_FMT_PJPG)
861 {
862 if (fourcc == V4L2_PIX_FMT_JPEG || fourcc == V4L2_PIX_FMT_MJPEG ||
863 fourcc == V4L2_PIX_FMT_PJPG)
864 {
865 return fmt;
866 }
867 }
868 walk = g_slist_next(walk);
869 }
870
871 return NULL;
872}
873
874/* complete made up ranking, the values themselves are meaningless */
875/* These ranks MUST be X such that X<<15 fits on a signed int - see
876 the comment at the end of gst_aml_v4l2_object_format_get_rank. */
877#define YUV_BASE_RANK 1000
878#define JPEG_BASE_RANK 500
879#define DV_BASE_RANK 200
880#define RGB_BASE_RANK 100
881#define YUV_ODD_BASE_RANK 50
882#define RGB_ODD_BASE_RANK 25
883#define BAYER_BASE_RANK 15
884#define S910_BASE_RANK 10
885#define GREY_BASE_RANK 5
886#define PWC_BASE_RANK 1
887
888static gint
889gst_aml_v4l2_object_format_get_rank(const struct v4l2_fmtdesc *fmt)
890{
891 guint32 fourcc = fmt->pixelformat;
892 gboolean emulated = ((fmt->flags & V4L2_FMT_FLAG_EMULATED) != 0);
893 gint rank = 0;
894
895 switch (fourcc)
896 {
897 case V4L2_PIX_FMT_MJPEG:
898 case V4L2_PIX_FMT_PJPG:
899 rank = JPEG_BASE_RANK;
900 break;
901 case V4L2_PIX_FMT_JPEG:
902 rank = JPEG_BASE_RANK + 1;
903 break;
904 case V4L2_PIX_FMT_MPEG: /* MPEG */
905 rank = JPEG_BASE_RANK + 2;
906 break;
907
908 case V4L2_PIX_FMT_RGB332:
909 case V4L2_PIX_FMT_ARGB555:
910 case V4L2_PIX_FMT_XRGB555:
911 case V4L2_PIX_FMT_RGB555:
912 case V4L2_PIX_FMT_ARGB555X:
913 case V4L2_PIX_FMT_XRGB555X:
914 case V4L2_PIX_FMT_RGB555X:
915 case V4L2_PIX_FMT_BGR666:
916 case V4L2_PIX_FMT_RGB565:
917 case V4L2_PIX_FMT_RGB565X:
918 case V4L2_PIX_FMT_RGB444:
919 case V4L2_PIX_FMT_Y4:
920 case V4L2_PIX_FMT_Y6:
921 case V4L2_PIX_FMT_Y10:
922 case V4L2_PIX_FMT_Y12:
923 case V4L2_PIX_FMT_Y10BPACK:
924 case V4L2_PIX_FMT_YUV555:
925 case V4L2_PIX_FMT_YUV565:
926 case V4L2_PIX_FMT_YUV32:
927 case V4L2_PIX_FMT_NV12MT_16X16:
928 case V4L2_PIX_FMT_NV42:
929 case V4L2_PIX_FMT_H264_MVC:
930 rank = RGB_ODD_BASE_RANK;
931 break;
932
933 case V4L2_PIX_FMT_RGB24:
934 case V4L2_PIX_FMT_BGR24:
935 rank = RGB_BASE_RANK - 1;
936 break;
937
938 case V4L2_PIX_FMT_RGB32:
939 case V4L2_PIX_FMT_BGR32:
940 case V4L2_PIX_FMT_ABGR32:
941 case V4L2_PIX_FMT_XBGR32:
942 case V4L2_PIX_FMT_ARGB32:
943 case V4L2_PIX_FMT_XRGB32:
944 rank = RGB_BASE_RANK;
945 break;
946
947 case V4L2_PIX_FMT_GREY: /* 8 Greyscale */
948 rank = GREY_BASE_RANK;
949 break;
950
951 case V4L2_PIX_FMT_NV12: /* 12 Y/CbCr 4:2:0 */
952 case V4L2_PIX_FMT_NV12M: /* Same as NV12 */
953 case V4L2_PIX_FMT_NV12MT: /* NV12 64x32 tile */
954 case V4L2_PIX_FMT_NV21: /* 12 Y/CrCb 4:2:0 */
955 case V4L2_PIX_FMT_NV21M: /* Same as NV21 */
956 case V4L2_PIX_FMT_YYUV: /* 16 YUV 4:2:2 */
957 case V4L2_PIX_FMT_HI240: /* 8 8-bit color */
958 case V4L2_PIX_FMT_NV16: /* 16 Y/CbCr 4:2:2 */
959 case V4L2_PIX_FMT_NV16M: /* Same as NV16 */
960 case V4L2_PIX_FMT_NV61: /* 16 Y/CrCb 4:2:2 */
961 case V4L2_PIX_FMT_NV61M: /* Same as NV61 */
962 case V4L2_PIX_FMT_NV24: /* 24 Y/CrCb 4:4:4 */
963 rank = YUV_ODD_BASE_RANK;
964 break;
965
966 case V4L2_PIX_FMT_YVU410: /* YVU9, 9 bits per pixel */
967 rank = YUV_BASE_RANK + 3;
968 break;
969 case V4L2_PIX_FMT_YUV410: /* YUV9, 9 bits per pixel */
970 rank = YUV_BASE_RANK + 2;
971 break;
972 case V4L2_PIX_FMT_YUV420: /* I420, 12 bits per pixel */
973 case V4L2_PIX_FMT_YUV420M:
974 rank = YUV_BASE_RANK + 7;
975 break;
976 case V4L2_PIX_FMT_YUYV: /* YUY2, 16 bits per pixel */
977 rank = YUV_BASE_RANK + 10;
978 break;
979 case V4L2_PIX_FMT_YVU420: /* YV12, 12 bits per pixel */
980 rank = YUV_BASE_RANK + 6;
981 break;
982 case V4L2_PIX_FMT_UYVY: /* UYVY, 16 bits per pixel */
983 rank = YUV_BASE_RANK + 9;
984 break;
985 case V4L2_PIX_FMT_YUV444:
986 rank = YUV_BASE_RANK + 6;
987 break;
988 case V4L2_PIX_FMT_Y41P: /* Y41P, 12 bits per pixel */
989 rank = YUV_BASE_RANK + 5;
990 break;
991 case V4L2_PIX_FMT_YUV411P: /* Y41B, 12 bits per pixel */
992 rank = YUV_BASE_RANK + 4;
993 break;
994 case V4L2_PIX_FMT_YUV422P: /* Y42B, 16 bits per pixel */
995 rank = YUV_BASE_RANK + 8;
996 break;
997
998 case V4L2_PIX_FMT_DV:
999 rank = DV_BASE_RANK;
1000 break;
1001
1002 case V4L2_PIX_FMT_WNVA: /* Winnov hw compres */
1003 rank = 0;
1004 break;
1005
1006 case V4L2_PIX_FMT_SBGGR8:
1007 case V4L2_PIX_FMT_SGBRG8:
1008 case V4L2_PIX_FMT_SGRBG8:
1009 case V4L2_PIX_FMT_SRGGB8:
1010 rank = BAYER_BASE_RANK;
1011 break;
1012
1013 case V4L2_PIX_FMT_SN9C10X:
1014 rank = S910_BASE_RANK;
1015 break;
1016
1017 case V4L2_PIX_FMT_PWC1:
1018 rank = PWC_BASE_RANK;
1019 break;
1020 case V4L2_PIX_FMT_PWC2:
1021 rank = PWC_BASE_RANK;
1022 break;
1023
1024 default:
1025 rank = 0;
1026 break;
1027 }
1028
1029 /* All ranks are below 1<<15 so a shift by 15
1030 * will a) make all non-emulated formats larger
1031 * than emulated and b) will not overflow
1032 */
1033 if (!emulated)
1034 rank <<= 15;
1035
1036 return rank;
1037}
1038
1039static gint
1040format_cmp_func(gconstpointer a, gconstpointer b)
1041{
1042 const struct v4l2_fmtdesc *fa = a;
1043 const struct v4l2_fmtdesc *fb = b;
1044
1045 if (fa->pixelformat == fb->pixelformat)
1046 return 0;
1047
1048 return gst_aml_v4l2_object_format_get_rank(fb) -
1049 gst_aml_v4l2_object_format_get_rank(fa);
1050}
1051
1052/******************************************************
1053 * gst_aml_v4l2_object_fill_format_list():
1054 * create list of supported capture formats
1055 * return value: TRUE on success, FALSE on error
1056 ******************************************************/
1057static gboolean
1058gst_aml_v4l2_object_fill_format_list(GstAmlV4l2Object *v4l2object,
1059 enum v4l2_buf_type type)
1060{
1061 gint n;
1062 struct v4l2_fmtdesc *format;
1063
1064 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "getting src format enumerations");
1065
1066 /* format enumeration */
1067 for (n = 0;; n++)
1068 {
1069 format = g_new0(struct v4l2_fmtdesc, 1);
1070
1071 format->index = n;
1072 format->type = type;
1073
1074 if (v4l2object->ioctl(v4l2object->video_fd, VIDIOC_ENUM_FMT, format) < 0)
1075 {
1076 if (errno == EINVAL)
1077 {
1078 g_free(format);
1079 break; /* end of enumeration */
1080 }
1081 else
1082 {
1083 goto failed;
1084 }
1085 }
1086
1087 GST_LOG_OBJECT(v4l2object->dbg_obj, "index: %u", format->index);
1088 GST_LOG_OBJECT(v4l2object->dbg_obj, "type: %d", format->type);
1089 GST_LOG_OBJECT(v4l2object->dbg_obj, "flags: %08x", format->flags);
1090 GST_LOG_OBJECT(v4l2object->dbg_obj, "description: '%s'",
1091 format->description);
1092 GST_LOG_OBJECT(v4l2object->dbg_obj, "pixelformat: %" GST_FOURCC_FORMAT,
1093 GST_FOURCC_ARGS(format->pixelformat));
1094
1095 /* sort formats according to our preference; we do this, because caps
1096 * are probed in the order the formats are in the list, and the order of
1097 * formats in the final probed caps matters for things like fixation */
1098 v4l2object->formats = g_slist_insert_sorted(v4l2object->formats, format,
1099 (GCompareFunc)format_cmp_func);
1100 }
1101
1102#ifndef GST_DISABLE_GST_DEBUG
1103 {
1104 GSList *l;
1105
1106 GST_INFO_OBJECT(v4l2object->dbg_obj, "got %d format(s):", n);
1107 for (l = v4l2object->formats; l != NULL; l = l->next)
1108 {
1109 format = l->data;
1110
1111 GST_INFO_OBJECT(v4l2object->dbg_obj,
1112 " %" GST_FOURCC_FORMAT "%s", GST_FOURCC_ARGS(format->pixelformat),
1113 ((format->flags & V4L2_FMT_FLAG_EMULATED)) ? " (emulated)" : "");
1114 }
1115 }
1116#endif
1117
1118 return TRUE;
1119
1120 /* ERRORS */
1121failed:
1122{
1123 g_free(format);
1124
1125 if (v4l2object->element)
1126 return FALSE;
1127
1128 GST_ELEMENT_ERROR(v4l2object->element, RESOURCE, SETTINGS,
1129 (_("Failed to enumerate possible video formats device '%s' can work "
1130 "with"),
1131 v4l2object->videodev),
1132 ("Failed to get number %d in pixelformat enumeration for %s. (%d - %s)",
1133 n, v4l2object->videodev, errno, g_strerror(errno)));
1134
1135 return FALSE;
1136}
1137}
1138
1139/*
1140 * Get the list of supported capture formats, a list of
1141 * <code>struct v4l2_fmtdesc</code>.
1142 */
1143static GSList *
1144gst_aml_v4l2_object_get_format_list(GstAmlV4l2Object *v4l2object)
1145{
1146 if (!v4l2object->formats)
1147 {
1148
1149 /* check usual way */
1150 gst_aml_v4l2_object_fill_format_list(v4l2object, v4l2object->type);
1151
1152 /* if our driver supports multi-planar
1153 * and if formats are still empty then we can workaround driver bug
1154 * by also looking up formats as if our device was not supporting
1155 * multiplanar */
1156 if (!v4l2object->formats)
1157 {
1158 switch (v4l2object->type)
1159 {
1160 case V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE:
1161 gst_aml_v4l2_object_fill_format_list(v4l2object,
1162 V4L2_BUF_TYPE_VIDEO_CAPTURE);
1163 break;
1164
1165 case V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE:
1166 gst_aml_v4l2_object_fill_format_list(v4l2object,
1167 V4L2_BUF_TYPE_VIDEO_OUTPUT);
1168 break;
1169
1170 default:
1171 break;
1172 }
1173 }
1174 }
1175 return v4l2object->formats;
1176}
1177
1178static GstVideoFormat
1179gst_aml_v4l2_object_v4l2fourcc_to_video_format(guint32 fourcc)
1180{
1181 GstVideoFormat format;
1182
1183 switch (fourcc)
1184 {
1185 case V4L2_PIX_FMT_GREY: /* 8 Greyscale */
1186 format = GST_VIDEO_FORMAT_GRAY8;
1187 break;
1188 case V4L2_PIX_FMT_Y16:
1189 format = GST_VIDEO_FORMAT_GRAY16_LE;
1190 break;
1191 case V4L2_PIX_FMT_Y16_BE:
1192 format = GST_VIDEO_FORMAT_GRAY16_BE;
1193 break;
1194 case V4L2_PIX_FMT_XRGB555:
1195 case V4L2_PIX_FMT_RGB555:
1196 format = GST_VIDEO_FORMAT_RGB15;
1197 break;
1198 case V4L2_PIX_FMT_XRGB555X:
1199 case V4L2_PIX_FMT_RGB555X:
1200 format = GST_VIDEO_FORMAT_BGR15;
1201 break;
1202 case V4L2_PIX_FMT_RGB565:
1203 format = GST_VIDEO_FORMAT_RGB16;
1204 break;
1205 case V4L2_PIX_FMT_RGB24:
1206 format = GST_VIDEO_FORMAT_RGB;
1207 break;
1208 case V4L2_PIX_FMT_BGR24:
1209 format = GST_VIDEO_FORMAT_BGR;
1210 break;
1211 case V4L2_PIX_FMT_XRGB32:
1212 case V4L2_PIX_FMT_RGB32:
1213 format = GST_VIDEO_FORMAT_xRGB;
1214 break;
1215 case V4L2_PIX_FMT_XBGR32:
1216 case V4L2_PIX_FMT_BGR32:
1217 format = GST_VIDEO_FORMAT_BGRx;
1218 break;
1219 case V4L2_PIX_FMT_ABGR32:
1220 format = GST_VIDEO_FORMAT_BGRA;
1221 break;
1222 case V4L2_PIX_FMT_ARGB32:
1223 format = GST_VIDEO_FORMAT_ARGB;
1224 break;
1225 case V4L2_PIX_FMT_NV12:
1226 case V4L2_PIX_FMT_NV12M:
1227 format = GST_VIDEO_FORMAT_NV12;
1228 break;
1229 case V4L2_PIX_FMT_NV12MT:
1230 format = GST_VIDEO_FORMAT_NV12_64Z32;
1231 break;
1232 case V4L2_PIX_FMT_NV21:
1233 case V4L2_PIX_FMT_NV21M:
1234 format = GST_VIDEO_FORMAT_NV21;
1235 break;
1236 case V4L2_PIX_FMT_YVU410:
1237 format = GST_VIDEO_FORMAT_YVU9;
1238 break;
1239 case V4L2_PIX_FMT_YUV410:
1240 format = GST_VIDEO_FORMAT_YUV9;
1241 break;
1242 case V4L2_PIX_FMT_YUV420:
1243 case V4L2_PIX_FMT_YUV420M:
1244 format = GST_VIDEO_FORMAT_I420;
1245 break;
1246 case V4L2_PIX_FMT_YUYV:
1247 format = GST_VIDEO_FORMAT_YUY2;
1248 break;
1249 case V4L2_PIX_FMT_YVU420:
1250 format = GST_VIDEO_FORMAT_YV12;
1251 break;
1252 case V4L2_PIX_FMT_UYVY:
1253 format = GST_VIDEO_FORMAT_UYVY;
1254 break;
1255 case V4L2_PIX_FMT_YUV411P:
1256 format = GST_VIDEO_FORMAT_Y41B;
1257 break;
1258 case V4L2_PIX_FMT_YUV422P:
1259 format = GST_VIDEO_FORMAT_Y42B;
1260 break;
1261 case V4L2_PIX_FMT_YVYU:
1262 format = GST_VIDEO_FORMAT_YVYU;
1263 break;
1264 case V4L2_PIX_FMT_NV16:
1265 case V4L2_PIX_FMT_NV16M:
1266 format = GST_VIDEO_FORMAT_NV16;
1267 break;
1268 case V4L2_PIX_FMT_NV61:
1269 case V4L2_PIX_FMT_NV61M:
1270 format = GST_VIDEO_FORMAT_NV61;
1271 break;
1272 case V4L2_PIX_FMT_NV24:
1273 format = GST_VIDEO_FORMAT_NV24;
1274 break;
1275 default:
1276 format = GST_VIDEO_FORMAT_UNKNOWN;
1277 break;
1278 }
1279
1280 return format;
1281}
1282
1283static gboolean
1284gst_amL_v4l2_object_v4l2fourcc_is_rgb(guint32 fourcc)
1285{
1286 gboolean ret = FALSE;
1287
1288 switch (fourcc)
1289 {
1290 case V4L2_PIX_FMT_XRGB555:
1291 case V4L2_PIX_FMT_RGB555:
1292 case V4L2_PIX_FMT_XRGB555X:
1293 case V4L2_PIX_FMT_RGB555X:
1294 case V4L2_PIX_FMT_RGB565:
1295 case V4L2_PIX_FMT_RGB24:
1296 case V4L2_PIX_FMT_BGR24:
1297 case V4L2_PIX_FMT_XRGB32:
1298 case V4L2_PIX_FMT_RGB32:
1299 case V4L2_PIX_FMT_XBGR32:
1300 case V4L2_PIX_FMT_BGR32:
1301 case V4L2_PIX_FMT_ABGR32:
1302 case V4L2_PIX_FMT_ARGB32:
1303 case V4L2_PIX_FMT_SBGGR8:
1304 case V4L2_PIX_FMT_SGBRG8:
1305 case V4L2_PIX_FMT_SGRBG8:
1306 case V4L2_PIX_FMT_SRGGB8:
1307 ret = TRUE;
1308 break;
1309 default:
1310 break;
1311 }
1312
1313 return ret;
1314}
1315
1316static GstStructure *
1317gst_aml_v4l2_object_v4l2fourcc_to_bare_struct(guint32 fourcc)
1318{
1319 GstStructure *structure = NULL;
1320
1321 switch (fourcc)
1322 {
1323 case V4L2_PIX_FMT_MJPEG: /* Motion-JPEG */
1324 case V4L2_PIX_FMT_PJPG: /* Progressive-JPEG */
1325 case V4L2_PIX_FMT_JPEG: /* JFIF JPEG */
1326 structure = gst_structure_new_empty("image/jpeg");
1327 break;
1328 case V4L2_PIX_FMT_MPEG1:
1329 structure = gst_structure_new("video/mpeg",
1330 "mpegversion", G_TYPE_INT, 1, NULL);
fei.dengb5bfaa82022-07-12 17:27:13 +08001331 gst_structure_set(structure, "systemstream", G_TYPE_BOOLEAN, FALSE, NULL);
1332 GST_DEBUG("aml set mpeg1 systemstream to false");
xuesong.jiangae1548e2022-05-06 16:38:46 +08001333 break;
1334 case V4L2_PIX_FMT_MPEG2:
1335 structure = gst_structure_new("video/mpeg",
1336 "mpegversion", G_TYPE_INT, 2, NULL);
1337 gst_structure_set(structure, "systemstream", G_TYPE_BOOLEAN, FALSE, NULL);
1338 GST_DEBUG("aml set mpeg2 systemstream to false");
1339 break;
1340 case V4L2_PIX_FMT_MPEG4:
1341 case V4L2_PIX_FMT_XVID:
1342 structure = gst_structure_new("video/mpeg",
1343 "mpegversion", G_TYPE_INT, 4, "systemstream",
1344 G_TYPE_BOOLEAN, FALSE, NULL);
1345 break;
1346 case V4L2_PIX_FMT_FWHT:
1347 structure = gst_structure_new_empty("video/x-fwht");
1348 break;
1349 case V4L2_PIX_FMT_H263:
1350 structure = gst_structure_new("video/x-h263",
1351 "variant", G_TYPE_STRING, "itu", NULL);
1352 break;
1353 case V4L2_PIX_FMT_H264: /* H.264 */
1354 structure = gst_structure_new("video/x-h264",
1355 "stream-format", G_TYPE_STRING, "byte-stream", "alignment",
1356 G_TYPE_STRING, "au", NULL);
1357 break;
1358 case V4L2_PIX_FMT_H264_NO_SC:
1359 structure = gst_structure_new("video/x-h264",
1360 "stream-format", G_TYPE_STRING, "avc", "alignment",
1361 G_TYPE_STRING, "au", NULL);
1362 break;
1363 case V4L2_PIX_FMT_HEVC: /* H.265 */
1364 structure = gst_structure_new("video/x-h265",
1365 "stream-format", G_TYPE_STRING, "byte-stream", "alignment",
1366 G_TYPE_STRING, "au", NULL);
1367 break;
1368 case V4L2_PIX_FMT_VC1_ANNEX_G:
1369 case V4L2_PIX_FMT_VC1_ANNEX_L:
1370 structure = gst_structure_new("video/x-wmv",
1371 "wmvversion", G_TYPE_INT, 3, "format", G_TYPE_STRING, "WVC1", NULL);
1372 break;
1373 case V4L2_PIX_FMT_VP8:
1374 structure = gst_structure_new_empty("video/x-vp8");
1375 break;
1376 case V4L2_PIX_FMT_VP9:
1377 structure = gst_structure_new_empty("video/x-vp9");
1378 break;
1379 case V4L2_PIX_FMT_AV1:
1380 structure = gst_structure_new_empty("video/x-av1");
1381 break;
1382 case V4L2_PIX_FMT_GREY: /* 8 Greyscale */
1383 case V4L2_PIX_FMT_Y16:
1384 case V4L2_PIX_FMT_Y16_BE:
1385 case V4L2_PIX_FMT_XRGB555:
1386 case V4L2_PIX_FMT_RGB555:
1387 case V4L2_PIX_FMT_XRGB555X:
1388 case V4L2_PIX_FMT_RGB555X:
1389 case V4L2_PIX_FMT_RGB565:
1390 case V4L2_PIX_FMT_RGB24:
1391 case V4L2_PIX_FMT_BGR24:
1392 case V4L2_PIX_FMT_RGB32:
1393 case V4L2_PIX_FMT_XRGB32:
1394 case V4L2_PIX_FMT_ARGB32:
1395 case V4L2_PIX_FMT_BGR32:
1396 case V4L2_PIX_FMT_XBGR32:
1397 case V4L2_PIX_FMT_ABGR32:
1398 case V4L2_PIX_FMT_NV12: /* 12 Y/CbCr 4:2:0 */
1399 case V4L2_PIX_FMT_NV12M:
1400 case V4L2_PIX_FMT_NV12MT:
1401 case V4L2_PIX_FMT_NV21: /* 12 Y/CrCb 4:2:0 */
1402 case V4L2_PIX_FMT_NV21M:
1403 case V4L2_PIX_FMT_NV16: /* 16 Y/CbCr 4:2:2 */
1404 case V4L2_PIX_FMT_NV16M:
1405 case V4L2_PIX_FMT_NV61: /* 16 Y/CrCb 4:2:2 */
1406 case V4L2_PIX_FMT_NV61M:
1407 case V4L2_PIX_FMT_NV24: /* 24 Y/CrCb 4:4:4 */
1408 case V4L2_PIX_FMT_YVU410:
1409 case V4L2_PIX_FMT_YUV410:
1410 case V4L2_PIX_FMT_YUV420: /* I420/IYUV */
1411 case V4L2_PIX_FMT_YUV420M:
1412 case V4L2_PIX_FMT_YUYV:
1413 case V4L2_PIX_FMT_YVU420:
1414 case V4L2_PIX_FMT_UYVY:
1415 case V4L2_PIX_FMT_YUV422P:
1416 case V4L2_PIX_FMT_YVYU:
1417 case V4L2_PIX_FMT_YUV411P:
1418 {
1419 GstVideoFormat format;
1420 format = gst_aml_v4l2_object_v4l2fourcc_to_video_format(fourcc);
1421 if (format != GST_VIDEO_FORMAT_UNKNOWN)
1422 structure = gst_structure_new("video/x-raw",
1423 "format", G_TYPE_STRING, gst_video_format_to_string(format), NULL);
1424 break;
1425 }
1426 case V4L2_PIX_FMT_DV:
1427 structure =
1428 gst_structure_new("video/x-dv", "systemstream", G_TYPE_BOOLEAN, TRUE,
1429 NULL);
1430 break;
1431 case V4L2_PIX_FMT_MPEG: /* MPEG */
1432 structure = gst_structure_new("video/mpegts",
1433 "systemstream", G_TYPE_BOOLEAN, TRUE, NULL);
1434 break;
1435 case V4L2_PIX_FMT_WNVA: /* Winnov hw compres */
1436 break;
1437 case V4L2_PIX_FMT_SBGGR8:
1438 case V4L2_PIX_FMT_SGBRG8:
1439 case V4L2_PIX_FMT_SGRBG8:
1440 case V4L2_PIX_FMT_SRGGB8:
1441 structure = gst_structure_new("video/x-bayer", "format", G_TYPE_STRING,
1442 fourcc == V4L2_PIX_FMT_SBGGR8 ? "bggr" : fourcc == V4L2_PIX_FMT_SGBRG8 ? "gbrg"
1443 : fourcc == V4L2_PIX_FMT_SGRBG8 ? "grbg"
1444 :
1445 /* fourcc == V4L2_PIX_FMT_SRGGB8 ? */ "rggb",
1446 NULL);
1447 break;
1448 case V4L2_PIX_FMT_SN9C10X:
1449 structure = gst_structure_new_empty("video/x-sonix");
1450 break;
1451 case V4L2_PIX_FMT_PWC1:
1452 structure = gst_structure_new_empty("video/x-pwc1");
1453 break;
1454 case V4L2_PIX_FMT_PWC2:
1455 structure = gst_structure_new_empty("video/x-pwc2");
1456 break;
1457 case V4L2_PIX_FMT_RGB332:
1458 case V4L2_PIX_FMT_BGR666:
1459 case V4L2_PIX_FMT_ARGB555X:
1460 case V4L2_PIX_FMT_RGB565X:
1461 case V4L2_PIX_FMT_RGB444:
1462 case V4L2_PIX_FMT_YYUV: /* 16 YUV 4:2:2 */
1463 case V4L2_PIX_FMT_HI240: /* 8 8-bit color */
1464 case V4L2_PIX_FMT_Y4:
1465 case V4L2_PIX_FMT_Y6:
1466 case V4L2_PIX_FMT_Y10:
1467 case V4L2_PIX_FMT_Y12:
1468 case V4L2_PIX_FMT_Y10BPACK:
1469 case V4L2_PIX_FMT_YUV444:
1470 case V4L2_PIX_FMT_YUV555:
1471 case V4L2_PIX_FMT_YUV565:
1472 case V4L2_PIX_FMT_Y41P:
1473 case V4L2_PIX_FMT_YUV32:
1474 case V4L2_PIX_FMT_NV12MT_16X16:
1475 case V4L2_PIX_FMT_NV42:
1476 case V4L2_PIX_FMT_H264_MVC:
1477 default:
1478 GST_DEBUG("Unsupported fourcc 0x%08x %" GST_FOURCC_FORMAT,
1479 fourcc, GST_FOURCC_ARGS(fourcc));
1480 break;
1481 }
1482
1483 return structure;
1484}
1485
1486GstStructure *
1487gst_aml_v4l2_object_v4l2fourcc_to_structure(guint32 fourcc)
1488{
1489 GstStructure *template;
1490 gint i;
1491
1492 template = gst_aml_v4l2_object_v4l2fourcc_to_bare_struct(fourcc);
1493
1494 if (template == NULL)
1495 goto done;
1496
1497 for (i = 0; i < GST_AML_V4L2_FORMAT_COUNT; i++)
1498 {
1499 if (gst_aml_v4l2_formats[i].format != fourcc)
1500 continue;
1501
1502 if (gst_aml_v4l2_formats[i].dimensions)
1503 {
1504 gst_structure_set(template,
1505 "width", GST_TYPE_INT_RANGE, 1, GST_AML_V4L2_MAX_SIZE,
1506 "height", GST_TYPE_INT_RANGE, 1, GST_AML_V4L2_MAX_SIZE,
1507 "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL);
1508 }
1509 break;
1510 }
1511
1512done:
1513 return template;
1514}
1515
1516static GstCaps *
1517gst_aml_v4l2_object_get_caps_helper(GstAmlV4L2FormatFlags flags)
1518{
1519 GstStructure *structure;
1520 GstCaps *caps;
1521 guint i;
1522
1523 caps = gst_caps_new_empty();
1524 for (i = 0; i < GST_AML_V4L2_FORMAT_COUNT; i++)
1525 {
1526
1527 if ((gst_aml_v4l2_formats[i].flags & flags) == 0)
1528 continue;
1529
1530 structure =
1531 gst_aml_v4l2_object_v4l2fourcc_to_bare_struct(gst_aml_v4l2_formats[i].format);
1532
1533 if (structure)
1534 {
1535 GstStructure *alt_s = NULL;
1536
1537 if (gst_aml_v4l2_formats[i].dimensions)
1538 {
1539 gst_structure_set(structure,
1540 "width", GST_TYPE_INT_RANGE, 1, GST_AML_V4L2_MAX_SIZE,
1541 "height", GST_TYPE_INT_RANGE, 1, GST_AML_V4L2_MAX_SIZE,
1542 "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL);
1543 }
1544
1545 switch (gst_aml_v4l2_formats[i].format)
1546 {
1547 case V4L2_PIX_FMT_RGB32:
1548 alt_s = gst_structure_copy(structure);
1549 gst_structure_set(alt_s, "format", G_TYPE_STRING, "ARGB", NULL);
1550 break;
1551 case V4L2_PIX_FMT_BGR32:
1552 alt_s = gst_structure_copy(structure);
1553 gst_structure_set(alt_s, "format", G_TYPE_STRING, "BGRA", NULL);
1554 default:
1555 break;
1556 }
1557
1558 gst_caps_append_structure(caps, structure);
1559
1560 if (alt_s)
1561 gst_caps_append_structure(caps, alt_s);
1562 }
1563 }
1564
1565 return gst_caps_simplify(caps);
1566}
1567
1568GstCaps *
1569gst_aml_v4l2_object_get_all_caps(void)
1570{
1571 static GstCaps *caps = NULL;
1572
1573 if (g_once_init_enter(&caps))
1574 {
1575 GstCaps *all_caps = gst_aml_v4l2_object_get_caps_helper(GST_V4L2_ALL);
1576 GST_MINI_OBJECT_FLAG_SET(all_caps, GST_MINI_OBJECT_FLAG_MAY_BE_LEAKED);
1577 g_once_init_leave(&caps, all_caps);
1578 }
1579
1580 return caps;
1581}
1582
1583GstCaps *
1584gst_aml_v4l2_object_get_raw_caps(void)
1585{
1586 static GstCaps *caps = NULL;
1587
1588 if (g_once_init_enter(&caps))
1589 {
1590 GstCaps *raw_caps = gst_aml_v4l2_object_get_caps_helper(GST_V4L2_RAW);
1591 GST_MINI_OBJECT_FLAG_SET(raw_caps, GST_MINI_OBJECT_FLAG_MAY_BE_LEAKED);
1592 g_once_init_leave(&caps, raw_caps);
1593 }
1594
1595 return caps;
1596}
1597
1598GstCaps *
1599gst_aml_v4l2_object_get_codec_caps(void)
1600{
1601 static GstCaps *caps = NULL;
1602
1603 if (g_once_init_enter(&caps))
1604 {
1605 GstCaps *codec_caps = gst_aml_v4l2_object_get_caps_helper(GST_V4L2_CODEC);
1606 GST_MINI_OBJECT_FLAG_SET(codec_caps, GST_MINI_OBJECT_FLAG_MAY_BE_LEAKED);
1607 g_once_init_leave(&caps, codec_caps);
1608 }
1609
1610 return caps;
1611}
1612
1613/* collect data for the given caps
1614 * @caps: given input caps
1615 * @format: location for the v4l format
1616 * @w/@h: location for width and height
1617 * @fps_n/@fps_d: location for framerate
1618 * @size: location for expected size of the frame or 0 if unknown
1619 */
1620static gboolean
1621gst_aml_v4l2_object_get_caps_info(GstAmlV4l2Object *v4l2object, GstCaps *caps,
1622 struct v4l2_fmtdesc **format, GstVideoInfo *info)
1623{
1624 GstStructure *structure;
1625 guint32 fourcc = 0, fourcc_nc = 0;
1626 const gchar *mimetype;
1627 struct v4l2_fmtdesc *fmt = NULL;
1628
1629 structure = gst_caps_get_structure(caps, 0);
1630
1631 mimetype = gst_structure_get_name(structure);
1632
1633 if (!gst_video_info_from_caps(info, caps))
1634 goto invalid_format;
1635
1636 if (g_str_equal(mimetype, "video/x-raw"))
1637 {
1638 switch (GST_VIDEO_INFO_FORMAT(info))
1639 {
1640 case GST_VIDEO_FORMAT_I420:
1641 fourcc = V4L2_PIX_FMT_YUV420;
1642 fourcc_nc = V4L2_PIX_FMT_YUV420M;
1643 break;
1644 case GST_VIDEO_FORMAT_YUY2:
1645 fourcc = V4L2_PIX_FMT_YUYV;
1646 break;
1647 case GST_VIDEO_FORMAT_UYVY:
1648 fourcc = V4L2_PIX_FMT_UYVY;
1649 break;
1650 case GST_VIDEO_FORMAT_YV12:
1651 fourcc = V4L2_PIX_FMT_YVU420;
1652 break;
1653 case GST_VIDEO_FORMAT_Y41B:
1654 fourcc = V4L2_PIX_FMT_YUV411P;
1655 break;
1656 case GST_VIDEO_FORMAT_Y42B:
1657 fourcc = V4L2_PIX_FMT_YUV422P;
1658 break;
1659 case GST_VIDEO_FORMAT_NV12:
1660 fourcc = V4L2_PIX_FMT_NV12;
1661 fourcc_nc = V4L2_PIX_FMT_NV12M;
1662 break;
1663 case GST_VIDEO_FORMAT_NV12_64Z32:
1664 fourcc_nc = V4L2_PIX_FMT_NV12MT;
1665 break;
1666 case GST_VIDEO_FORMAT_NV21:
1667 fourcc = V4L2_PIX_FMT_NV21;
1668 fourcc_nc = V4L2_PIX_FMT_NV21M;
1669 break;
1670 case GST_VIDEO_FORMAT_NV16:
1671 fourcc = V4L2_PIX_FMT_NV16;
1672 fourcc_nc = V4L2_PIX_FMT_NV16M;
1673 break;
1674 case GST_VIDEO_FORMAT_NV61:
1675 fourcc = V4L2_PIX_FMT_NV61;
1676 fourcc_nc = V4L2_PIX_FMT_NV61M;
1677 break;
1678 case GST_VIDEO_FORMAT_NV24:
1679 fourcc = V4L2_PIX_FMT_NV24;
1680 break;
1681 case GST_VIDEO_FORMAT_YVYU:
1682 fourcc = V4L2_PIX_FMT_YVYU;
1683 break;
1684 case GST_VIDEO_FORMAT_RGB15:
1685 fourcc = V4L2_PIX_FMT_RGB555;
1686 fourcc_nc = V4L2_PIX_FMT_XRGB555;
1687 break;
1688 case GST_VIDEO_FORMAT_RGB16:
1689 fourcc = V4L2_PIX_FMT_RGB565;
1690 break;
1691 case GST_VIDEO_FORMAT_RGB:
1692 fourcc = V4L2_PIX_FMT_RGB24;
1693 break;
1694 case GST_VIDEO_FORMAT_BGR:
1695 fourcc = V4L2_PIX_FMT_BGR24;
1696 break;
1697 case GST_VIDEO_FORMAT_xRGB:
1698 fourcc = V4L2_PIX_FMT_RGB32;
1699 fourcc_nc = V4L2_PIX_FMT_XRGB32;
1700 break;
1701 case GST_VIDEO_FORMAT_ARGB:
1702 fourcc = V4L2_PIX_FMT_RGB32;
1703 fourcc_nc = V4L2_PIX_FMT_ARGB32;
1704 break;
1705 case GST_VIDEO_FORMAT_BGRx:
1706 fourcc = V4L2_PIX_FMT_BGR32;
1707 fourcc_nc = V4L2_PIX_FMT_XBGR32;
1708 break;
1709 case GST_VIDEO_FORMAT_BGRA:
1710 fourcc = V4L2_PIX_FMT_BGR32;
1711 fourcc_nc = V4L2_PIX_FMT_ABGR32;
1712 break;
1713 case GST_VIDEO_FORMAT_GRAY8:
1714 fourcc = V4L2_PIX_FMT_GREY;
1715 break;
1716 case GST_VIDEO_FORMAT_GRAY16_LE:
1717 fourcc = V4L2_PIX_FMT_Y16;
1718 break;
1719 case GST_VIDEO_FORMAT_GRAY16_BE:
1720 fourcc = V4L2_PIX_FMT_Y16_BE;
1721 break;
1722 case GST_VIDEO_FORMAT_BGR15:
1723 fourcc = V4L2_PIX_FMT_RGB555X;
1724 fourcc_nc = V4L2_PIX_FMT_XRGB555X;
1725 break;
1726 default:
1727 break;
1728 }
1729 }
1730 else
1731 {
1732 if (g_str_equal(mimetype, "video/mpegts"))
1733 {
1734 fourcc = V4L2_PIX_FMT_MPEG;
1735 }
1736 else if (g_str_equal(mimetype, "video/x-dv"))
1737 {
1738 fourcc = V4L2_PIX_FMT_DV;
1739 }
1740 else if (g_str_equal(mimetype, "image/jpeg"))
1741 {
1742 fourcc = V4L2_PIX_FMT_JPEG;
1743 }
1744 else if (g_str_equal(mimetype, "video/mpeg"))
1745 {
1746 gint version;
1747 if (gst_structure_get_int(structure, "mpegversion", &version))
1748 {
1749 switch (version)
1750 {
1751 case 1:
1752 fourcc = V4L2_PIX_FMT_MPEG1;
1753 break;
1754 case 2:
1755 fourcc = V4L2_PIX_FMT_MPEG2;
1756 break;
1757 case 4:
1758 fourcc = V4L2_PIX_FMT_MPEG4;
1759 fourcc_nc = V4L2_PIX_FMT_XVID;
1760 break;
1761 default:
1762 break;
1763 }
1764 }
1765 }
1766 else if (g_str_equal(mimetype, "video/x-fwht"))
1767 {
1768 fourcc = V4L2_PIX_FMT_FWHT;
1769 }
1770 else if (g_str_equal(mimetype, "video/x-h263"))
1771 {
1772 fourcc = V4L2_PIX_FMT_H263;
1773 }
1774 else if (g_str_equal(mimetype, "video/x-h264"))
1775 {
1776 const gchar *stream_format =
1777 gst_structure_get_string(structure, "stream-format");
1778 if (g_str_equal(stream_format, "avc"))
1779 fourcc = V4L2_PIX_FMT_H264_NO_SC;
1780 else
1781 fourcc = V4L2_PIX_FMT_H264;
1782 }
1783 else if (g_str_equal(mimetype, "video/x-h265"))
1784 {
1785 fourcc = V4L2_PIX_FMT_HEVC;
1786 }
1787 else if (g_str_equal(mimetype, "video/x-vp8"))
1788 {
1789 fourcc = V4L2_PIX_FMT_VP8;
1790 }
1791 else if (g_str_equal(mimetype, "video/x-vp9"))
1792 {
1793 fourcc = V4L2_PIX_FMT_VP9;
1794 }
1795 else if (g_str_equal(mimetype, "video/x-av1"))
1796 {
1797 fourcc = V4L2_PIX_FMT_AV1;
1798 }
1799 else if (g_str_equal(mimetype, "video/x-bayer"))
1800 {
hanghang.luo3128f102022-08-18 10:36:19 +08001801 const gchar *vformat = gst_structure_get_string(structure, "format");
1802 if (vformat)
xuesong.jiangae1548e2022-05-06 16:38:46 +08001803 {
1804 if (!g_ascii_strcasecmp(format, "bggr"))
1805 fourcc = V4L2_PIX_FMT_SBGGR8;
1806 else if (!g_ascii_strcasecmp(format, "gbrg"))
1807 fourcc = V4L2_PIX_FMT_SGBRG8;
1808 else if (!g_ascii_strcasecmp(format, "grbg"))
1809 fourcc = V4L2_PIX_FMT_SGRBG8;
1810 else if (!g_ascii_strcasecmp(format, "rggb"))
1811 fourcc = V4L2_PIX_FMT_SRGGB8;
1812 }
1813 }
1814 else if (g_str_equal(mimetype, "video/x-sonix"))
1815 {
1816 fourcc = V4L2_PIX_FMT_SN9C10X;
1817 }
1818 else if (g_str_equal(mimetype, "video/x-pwc1"))
1819 {
1820 fourcc = V4L2_PIX_FMT_PWC1;
1821 }
1822 else if (g_str_equal(mimetype, "video/x-pwc2"))
1823 {
1824 fourcc = V4L2_PIX_FMT_PWC2;
1825 }
1826 }
1827
1828 /* Prefer the non-contiguous if supported */
1829 v4l2object->prefered_non_contiguous = TRUE;
1830
1831 if (fourcc_nc)
1832 fmt = gst_aml_v4l2_object_get_format_from_fourcc(v4l2object, fourcc_nc);
1833 else if (fourcc == 0)
1834 goto unhandled_format;
1835
1836 if (fmt == NULL)
1837 {
1838 fmt = gst_aml_v4l2_object_get_format_from_fourcc(v4l2object, fourcc);
1839 v4l2object->prefered_non_contiguous = FALSE;
1840 }
1841
1842 if (fmt == NULL)
1843 goto unsupported_format;
1844
1845 *format = fmt;
1846
1847 return TRUE;
1848
1849 /* ERRORS */
1850invalid_format:
1851{
1852 GST_DEBUG_OBJECT(v4l2object, "invalid format");
1853 return FALSE;
1854}
1855unhandled_format:
1856{
1857 GST_DEBUG_OBJECT(v4l2object, "unhandled format");
1858 return FALSE;
1859}
1860unsupported_format:
1861{
1862 GST_DEBUG_OBJECT(v4l2object, "unsupported format");
1863 return FALSE;
1864}
1865}
1866
1867static gboolean
1868gst_aml_v4l2_object_get_nearest_size(GstAmlV4l2Object *v4l2object,
1869 guint32 pixelformat, gint *width, gint *height);
1870
1871static void
1872gst_aml_v4l2_object_add_aspect_ratio(GstAmlV4l2Object *v4l2object, GstStructure *s)
1873{
1874 if (v4l2object->keep_aspect && v4l2object->par)
1875 gst_structure_set_value(s, "pixel-aspect-ratio", v4l2object->par);
1876}
1877
1878/* returns TRUE if the value was changed in place, otherwise FALSE */
1879static gboolean
1880gst_aml_v4l2src_value_simplify(GValue *val)
1881{
1882 /* simplify list of one value to one value */
1883 if (GST_VALUE_HOLDS_LIST(val) && gst_value_list_get_size(val) == 1)
1884 {
1885 const GValue *list_val;
1886 GValue new_val = G_VALUE_INIT;
1887
1888 list_val = gst_value_list_get_value(val, 0);
1889 g_value_init(&new_val, G_VALUE_TYPE(list_val));
1890 g_value_copy(list_val, &new_val);
1891 g_value_unset(val);
1892 *val = new_val;
1893 return TRUE;
1894 }
1895
1896 return FALSE;
1897}
1898
1899static gboolean
1900gst_aml_v4l2_object_get_interlace_mode(enum v4l2_field field,
1901 GstVideoInterlaceMode *interlace_mode)
1902{
1903 switch (field)
1904 {
1905 case V4L2_FIELD_ANY:
1906 GST_ERROR("Driver bug detected - check driver with v4l2-compliance from http://git.linuxtv.org/v4l-utils.git\n");
1907 /* fallthrough */
1908 case V4L2_FIELD_NONE:
1909 *interlace_mode = GST_VIDEO_INTERLACE_MODE_PROGRESSIVE;
1910 return TRUE;
1911 case V4L2_FIELD_INTERLACED:
1912 case V4L2_FIELD_INTERLACED_TB:
1913 case V4L2_FIELD_INTERLACED_BT:
1914 *interlace_mode = GST_VIDEO_INTERLACE_MODE_INTERLEAVED;
1915 return TRUE;
1916 default:
1917 GST_ERROR("Unknown enum v4l2_field %d", field);
1918 return FALSE;
1919 }
1920}
1921
1922static gboolean
1923gst_aml_v4l2_object_get_colorspace(struct v4l2_format *fmt,
1924 GstVideoColorimetry *cinfo)
1925{
1926 gboolean is_rgb =
1927 gst_amL_v4l2_object_v4l2fourcc_is_rgb(fmt->fmt.pix.pixelformat);
1928 enum v4l2_colorspace colorspace;
1929 enum v4l2_quantization range;
1930 enum v4l2_ycbcr_encoding matrix;
1931 enum v4l2_xfer_func transfer;
1932 gboolean ret = TRUE;
1933
1934 if (V4L2_TYPE_IS_MULTIPLANAR(fmt->type))
1935 {
1936 colorspace = fmt->fmt.pix_mp.colorspace;
1937 range = fmt->fmt.pix_mp.quantization;
1938 matrix = fmt->fmt.pix_mp.ycbcr_enc;
1939 transfer = fmt->fmt.pix_mp.xfer_func;
1940 }
1941 else
1942 {
1943 colorspace = fmt->fmt.pix.colorspace;
1944 range = fmt->fmt.pix.quantization;
1945 matrix = fmt->fmt.pix.ycbcr_enc;
1946 transfer = fmt->fmt.pix.xfer_func;
1947 }
xuesong.jiange1a19662022-06-21 20:30:22 +08001948 GST_DEBUG("colorspace:%d, range:%d, matrix:%d, transfer:%d", colorspace, range, matrix, transfer);
1949 GST_DEBUG("cinfo update 1 time | range:%d, matrix:%d, transfer:%d, primaries:%d", cinfo->range, cinfo->matrix, cinfo->transfer, cinfo->primaries);
xuesong.jiangae1548e2022-05-06 16:38:46 +08001950
1951 /* First step, set the defaults for each primaries */
1952 switch (colorspace)
1953 {
1954 case V4L2_COLORSPACE_SMPTE170M:
1955 cinfo->range = GST_VIDEO_COLOR_RANGE_16_235;
1956 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT601;
1957 cinfo->transfer = GST_VIDEO_TRANSFER_BT709;
1958 cinfo->primaries = GST_VIDEO_COLOR_PRIMARIES_SMPTE170M;
1959 break;
1960 case V4L2_COLORSPACE_REC709:
1961 cinfo->range = GST_VIDEO_COLOR_RANGE_16_235;
1962 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT709;
1963 cinfo->transfer = GST_VIDEO_TRANSFER_BT709;
1964 cinfo->primaries = GST_VIDEO_COLOR_PRIMARIES_BT709;
1965 break;
1966 case V4L2_COLORSPACE_SRGB:
1967 case V4L2_COLORSPACE_JPEG:
1968 cinfo->range = GST_VIDEO_COLOR_RANGE_0_255;
1969 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT601;
1970 cinfo->transfer = GST_VIDEO_TRANSFER_SRGB;
1971 cinfo->primaries = GST_VIDEO_COLOR_PRIMARIES_BT709;
1972 break;
1973 case V4L2_COLORSPACE_OPRGB:
1974 cinfo->range = GST_VIDEO_COLOR_RANGE_16_235;
1975 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT601;
1976 cinfo->transfer = GST_VIDEO_TRANSFER_ADOBERGB;
1977 cinfo->primaries = GST_VIDEO_COLOR_PRIMARIES_ADOBERGB;
1978 break;
1979 case V4L2_COLORSPACE_BT2020:
1980 cinfo->range = GST_VIDEO_COLOR_RANGE_16_235;
1981 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT2020;
1982 cinfo->transfer = GST_VIDEO_TRANSFER_BT2020_12;
1983 cinfo->primaries = GST_VIDEO_COLOR_PRIMARIES_BT2020;
1984 break;
1985 case V4L2_COLORSPACE_SMPTE240M:
1986 cinfo->range = GST_VIDEO_COLOR_RANGE_16_235;
1987 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_SMPTE240M;
1988 cinfo->transfer = GST_VIDEO_TRANSFER_SMPTE240M;
1989 cinfo->primaries = GST_VIDEO_COLOR_PRIMARIES_SMPTE240M;
1990 break;
1991 case V4L2_COLORSPACE_470_SYSTEM_M:
1992 cinfo->range = GST_VIDEO_COLOR_RANGE_16_235;
1993 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT601;
1994 cinfo->transfer = GST_VIDEO_TRANSFER_BT709;
1995 cinfo->primaries = GST_VIDEO_COLOR_PRIMARIES_BT470M;
1996 break;
1997 case V4L2_COLORSPACE_470_SYSTEM_BG:
1998 cinfo->range = GST_VIDEO_COLOR_RANGE_16_235;
1999 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT601;
2000 cinfo->transfer = GST_VIDEO_TRANSFER_BT709;
2001 cinfo->primaries = GST_VIDEO_COLOR_PRIMARIES_BT470BG;
2002 break;
2003 case V4L2_COLORSPACE_RAW:
2004 /* Explicitly unknown */
2005 cinfo->range = GST_VIDEO_COLOR_RANGE_UNKNOWN;
2006 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_UNKNOWN;
2007 cinfo->transfer = GST_VIDEO_TRANSFER_UNKNOWN;
2008 cinfo->primaries = GST_VIDEO_COLOR_PRIMARIES_UNKNOWN;
2009 break;
2010 default:
2011 GST_DEBUG("Unknown enum v4l2_colorspace %d", colorspace);
2012 ret = FALSE;
2013 break;
2014 }
xuesong.jiange1a19662022-06-21 20:30:22 +08002015 GST_DEBUG("cinfo update 2 time | range:%d, matrix:%d, transfer:%d, primaries:%d", cinfo->range, cinfo->matrix, cinfo->transfer, cinfo->primaries);
xuesong.jiangae1548e2022-05-06 16:38:46 +08002016
2017 if (!ret)
2018 goto done;
2019
2020 /* Second step, apply any custom variation */
2021 switch (range)
2022 {
2023 case V4L2_QUANTIZATION_FULL_RANGE:
2024 cinfo->range = GST_VIDEO_COLOR_RANGE_0_255;
2025 break;
2026 case V4L2_QUANTIZATION_LIM_RANGE:
2027 cinfo->range = GST_VIDEO_COLOR_RANGE_16_235;
2028 break;
2029 case V4L2_QUANTIZATION_DEFAULT:
2030 /* replicated V4L2_MAP_QUANTIZATION_DEFAULT macro behavior */
2031 if (is_rgb && colorspace == V4L2_COLORSPACE_BT2020)
2032 cinfo->range = GST_VIDEO_COLOR_RANGE_16_235;
2033 else if (is_rgb || matrix == V4L2_YCBCR_ENC_XV601 || matrix == V4L2_YCBCR_ENC_XV709 || colorspace == V4L2_COLORSPACE_JPEG)
2034 cinfo->range = GST_VIDEO_COLOR_RANGE_0_255;
2035 else
2036 cinfo->range = GST_VIDEO_COLOR_RANGE_16_235;
2037 break;
2038 default:
2039 GST_WARNING("Unknown enum v4l2_quantization value %d", range);
2040 cinfo->range = GST_VIDEO_COLOR_RANGE_UNKNOWN;
2041 break;
2042 }
xuesong.jiange1a19662022-06-21 20:30:22 +08002043 GST_DEBUG("cinfo update 3 time | range:%d, matrix:%d, transfer:%d, primaries:%d", cinfo->range, cinfo->matrix, cinfo->transfer, cinfo->primaries);
xuesong.jiangae1548e2022-05-06 16:38:46 +08002044
2045 switch (matrix)
2046 {
2047 case V4L2_YCBCR_ENC_XV601:
2048 case V4L2_YCBCR_ENC_SYCC:
2049 GST_FIXME("XV601 and SYCC not defined, assuming 601");
2050 /* fallthrough */
2051 case V4L2_YCBCR_ENC_601:
2052 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT601;
2053 break;
2054 case V4L2_YCBCR_ENC_XV709:
2055 GST_FIXME("XV709 not defined, assuming 709");
2056 /* fallthrough */
2057 case V4L2_YCBCR_ENC_709:
2058 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT709;
2059 break;
2060 case V4L2_YCBCR_ENC_BT2020_CONST_LUM:
2061 GST_FIXME("BT2020 with constant luma is not defined, assuming BT2020");
2062 /* fallthrough */
2063 case V4L2_YCBCR_ENC_BT2020:
2064 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT2020;
2065 break;
2066 case V4L2_YCBCR_ENC_SMPTE240M:
2067 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_SMPTE240M;
2068 break;
2069 case V4L2_YCBCR_ENC_DEFAULT:
2070 /* nothing, just use defaults for colorspace */
2071 break;
2072 default:
2073 GST_WARNING("Unknown enum v4l2_ycbcr_encoding value %d", matrix);
2074 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_UNKNOWN;
2075 break;
2076 }
xuesong.jiange1a19662022-06-21 20:30:22 +08002077 GST_DEBUG("cinfo update 4 time | range:%d, matrix:%d, transfer:%d, primaries:%d", cinfo->range, cinfo->matrix, cinfo->transfer, cinfo->primaries);
xuesong.jiangae1548e2022-05-06 16:38:46 +08002078
2079 /* Set identity matrix for R'G'B' formats to avoid creating
2080 * confusion. This though is cosmetic as it's now properly ignored by
2081 * the video info API and videoconvert. */
2082 if (is_rgb)
2083 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_RGB;
2084
2085 switch (transfer)
2086 {
2087 case V4L2_XFER_FUNC_709:
2088 if (colorspace == V4L2_COLORSPACE_BT2020 && fmt->fmt.pix.height >= 2160)
2089 cinfo->transfer = GST_VIDEO_TRANSFER_BT2020_12;
2090 else
2091 cinfo->transfer = GST_VIDEO_TRANSFER_BT709;
2092 break;
2093 case V4L2_XFER_FUNC_SRGB:
2094 cinfo->transfer = GST_VIDEO_TRANSFER_SRGB;
2095 break;
2096 case V4L2_XFER_FUNC_OPRGB:
2097 cinfo->transfer = GST_VIDEO_TRANSFER_ADOBERGB;
2098 break;
2099 case V4L2_XFER_FUNC_SMPTE240M:
2100 cinfo->transfer = GST_VIDEO_TRANSFER_SMPTE240M;
2101 break;
2102 case V4L2_XFER_FUNC_NONE:
2103 cinfo->transfer = GST_VIDEO_TRANSFER_GAMMA10;
2104 break;
2105 case V4L2_XFER_FUNC_DEFAULT:
2106 /* nothing, just use defaults for colorspace */
2107 break;
2108 default:
2109 GST_WARNING("Unknown enum v4l2_xfer_func value %d", transfer);
2110 cinfo->transfer = GST_VIDEO_TRANSFER_UNKNOWN;
2111 break;
2112 }
xuesong.jiange1a19662022-06-21 20:30:22 +08002113 GST_DEBUG("cinfo update 5 time | range:%d, matrix:%d, transfer:%d, primaries:%d", cinfo->range, cinfo->matrix, cinfo->transfer, cinfo->primaries);
xuesong.jiangae1548e2022-05-06 16:38:46 +08002114
2115done:
2116 return ret;
2117}
2118
2119static int
2120gst_aml_v4l2_object_try_fmt(GstAmlV4l2Object *v4l2object,
2121 struct v4l2_format *try_fmt)
2122{
2123 int fd = v4l2object->video_fd;
2124 struct v4l2_format fmt;
2125 int r;
2126
2127 memcpy(&fmt, try_fmt, sizeof(fmt));
2128 r = v4l2object->ioctl(fd, VIDIOC_TRY_FMT, &fmt);
2129
2130 if (r < 0 && errno == ENOTTY)
2131 {
2132 /* The driver might not implement TRY_FMT, in which case we will try
2133 S_FMT to probe */
2134 if (GST_AML_V4L2_IS_ACTIVE(v4l2object))
2135 goto error;
2136
2137 memcpy(&fmt, try_fmt, sizeof(fmt));
2138 r = v4l2object->ioctl(fd, VIDIOC_S_FMT, &fmt);
2139 }
2140 memcpy(try_fmt, &fmt, sizeof(fmt));
2141
2142 return r;
2143
2144error:
2145 memcpy(try_fmt, &fmt, sizeof(fmt));
2146 GST_WARNING_OBJECT(v4l2object->dbg_obj,
2147 "Unable to try format: %s", g_strerror(errno));
2148 return r;
2149}
2150
2151static void
2152gst_aml_v4l2_object_add_interlace_mode(GstAmlV4l2Object *v4l2object,
2153 GstStructure *s, guint32 width, guint32 height, guint32 pixelformat)
2154{
2155 struct v4l2_format fmt;
2156 GValue interlace_formats = {
2157 0,
2158 };
2159 enum v4l2_field formats[] = {V4L2_FIELD_NONE, V4L2_FIELD_INTERLACED};
2160 gsize i;
2161 GstVideoInterlaceMode interlace_mode, prev = -1;
2162
2163 if (!g_str_equal(gst_structure_get_name(s), "video/x-raw"))
2164 return;
2165
2166 if (v4l2object->never_interlaced)
2167 {
2168 gst_structure_set(s, "interlace-mode", G_TYPE_STRING, "progressive", NULL);
2169 return;
2170 }
2171
2172 g_value_init(&interlace_formats, GST_TYPE_LIST);
2173
2174 /* Try twice - once for NONE, once for INTERLACED. */
2175 for (i = 0; i < G_N_ELEMENTS(formats); i++)
2176 {
2177 memset(&fmt, 0, sizeof(fmt));
2178 fmt.type = v4l2object->type;
2179 fmt.fmt.pix.width = width;
2180 fmt.fmt.pix.height = height;
2181 fmt.fmt.pix.pixelformat = pixelformat;
2182 fmt.fmt.pix.field = formats[i];
2183
2184 if (gst_aml_v4l2_object_try_fmt(v4l2object, &fmt) == 0 &&
2185 gst_aml_v4l2_object_get_interlace_mode(fmt.fmt.pix.field, &interlace_mode) && prev != interlace_mode)
2186 {
2187 GValue interlace_enum = {
2188 0,
2189 };
2190 const gchar *mode_string;
2191 g_value_init(&interlace_enum, G_TYPE_STRING);
2192 mode_string = gst_video_interlace_mode_to_string(interlace_mode);
2193 g_value_set_string(&interlace_enum, mode_string);
2194 gst_value_list_append_and_take_value(&interlace_formats,
2195 &interlace_enum);
2196 prev = interlace_mode;
2197 }
2198 }
2199
2200 if (gst_aml_v4l2src_value_simplify(&interlace_formats) || gst_value_list_get_size(&interlace_formats) > 0)
2201 gst_structure_take_value(s, "interlace-mode", &interlace_formats);
2202 else
2203 GST_WARNING_OBJECT(v4l2object, "Failed to determine interlace mode");
2204
2205 return;
2206}
2207
2208static void
2209gst_aml_v4l2_object_fill_colorimetry_list(GValue *list,
2210 GstVideoColorimetry *cinfo)
2211{
2212 GValue colorimetry = G_VALUE_INIT;
2213 guint size;
2214 guint i;
2215 gboolean found = FALSE;
2216
2217 g_value_init(&colorimetry, G_TYPE_STRING);
2218 g_value_take_string(&colorimetry, gst_video_colorimetry_to_string(cinfo));
xuesong.jiange1a19662022-06-21 20:30:22 +08002219 GST_DEBUG("fill colorimetry:%s into list", gst_video_colorimetry_to_string(cinfo));
xuesong.jiangae1548e2022-05-06 16:38:46 +08002220
2221 /* only insert if no duplicate */
2222 size = gst_value_list_get_size(list);
2223 for (i = 0; i < size; i++)
2224 {
2225 const GValue *tmp;
2226
2227 tmp = gst_value_list_get_value(list, i);
2228 if (gst_value_compare(&colorimetry, tmp) == GST_VALUE_EQUAL)
2229 {
2230 found = TRUE;
2231 break;
2232 }
2233 }
2234
2235 if (!found)
2236 gst_value_list_append_and_take_value(list, &colorimetry);
2237 else
2238 g_value_unset(&colorimetry);
2239}
2240
2241static void
2242gst_aml_v4l2_object_add_colorspace(GstAmlV4l2Object *v4l2object, GstStructure *s,
2243 guint32 width, guint32 height, guint32 pixelformat)
2244{
2245 struct v4l2_format fmt;
2246 GValue list = G_VALUE_INIT;
2247 GstVideoColorimetry cinfo;
2248 enum v4l2_colorspace req_cspace;
2249
2250 memset(&fmt, 0, sizeof(fmt));
2251 fmt.type = v4l2object->type;
2252 fmt.fmt.pix.width = width;
2253 fmt.fmt.pix.height = height;
2254 fmt.fmt.pix.pixelformat = pixelformat;
2255
2256 g_value_init(&list, GST_TYPE_LIST);
2257
2258 /* step 1: get device default colorspace and insert it first as
2259 * it should be the preferred one */
xuesong.jiange1a19662022-06-21 20:30:22 +08002260 GST_DEBUG("try for pixl format");
xuesong.jiangae1548e2022-05-06 16:38:46 +08002261 if (gst_aml_v4l2_object_try_fmt(v4l2object, &fmt) == 0)
2262 {
2263 if (gst_aml_v4l2_object_get_colorspace(&fmt, &cinfo))
2264 gst_aml_v4l2_object_fill_colorimetry_list(&list, &cinfo);
2265 }
2266
2267 /* step 2: probe all colorspace other than default
2268 * We don't probe all colorspace, range, matrix and transfer combination to
2269 * avoid ioctl flooding which could greatly increase initialization time
2270 * with low-speed devices (UVC...) */
2271 for (req_cspace = V4L2_COLORSPACE_SMPTE170M;
2272 req_cspace <= V4L2_COLORSPACE_RAW; req_cspace++)
2273 {
xuesong.jiange1a19662022-06-21 20:30:22 +08002274 GST_DEBUG("try for pixl format in while loop :%d", req_cspace);
xuesong.jiangae1548e2022-05-06 16:38:46 +08002275 /* V4L2_COLORSPACE_BT878 is deprecated and shall not be used, so skip */
2276 if (req_cspace == V4L2_COLORSPACE_BT878)
2277 continue;
2278
2279 if (V4L2_TYPE_IS_MULTIPLANAR(v4l2object->type))
2280 fmt.fmt.pix_mp.colorspace = req_cspace;
2281 else
2282 fmt.fmt.pix.colorspace = req_cspace;
2283
2284 if (gst_aml_v4l2_object_try_fmt(v4l2object, &fmt) == 0)
2285 {
xuesong.jiange1a19662022-06-21 20:30:22 +08002286 GST_DEBUG("try for pixl format in while loop :%d tried ok", req_cspace);
xuesong.jiangae1548e2022-05-06 16:38:46 +08002287 enum v4l2_colorspace colorspace;
2288
2289 if (V4L2_TYPE_IS_MULTIPLANAR(v4l2object->type))
2290 colorspace = fmt.fmt.pix_mp.colorspace;
2291 else
2292 colorspace = fmt.fmt.pix.colorspace;
2293
2294 if (colorspace == req_cspace)
2295 {
2296 if (gst_aml_v4l2_object_get_colorspace(&fmt, &cinfo))
2297 gst_aml_v4l2_object_fill_colorimetry_list(&list, &cinfo);
2298 }
2299 }
2300 }
2301
xuesong.jiang7b0882c2022-06-22 14:10:30 +08002302 GST_DEBUG("deal: caps with colorimetry 2,3,14,7");
xuesong.jiange1a19662022-06-21 20:30:22 +08002303 cinfo.range = 2;
2304 cinfo.matrix = 3;
2305 cinfo.transfer = 14;
2306 cinfo.primaries = 7;
2307 gst_aml_v4l2_object_fill_colorimetry_list(&list, &cinfo);
2308
xuesong.jiang7b0882c2022-06-22 14:10:30 +08002309 GST_DEBUG("deal: caps with colorimetry 2,6,13,7");
2310 cinfo.range = 2;
2311 cinfo.matrix = 6;
2312 cinfo.transfer = 13;
2313 cinfo.primaries = 7;
2314 gst_aml_v4l2_object_fill_colorimetry_list(&list, &cinfo);
2315
xuesong.jiang5c9aca72022-07-12 16:29:24 +08002316 GST_DEBUG("deal: caps with colorimetry 2,6,14,7");
2317 cinfo.range = 2;
2318 cinfo.matrix = 6;
2319 cinfo.transfer = 14;
2320 cinfo.primaries = 7;
2321 gst_aml_v4l2_object_fill_colorimetry_list(&list, &cinfo);
2322
fei.dengccc89632022-07-15 19:10:17 +08002323 GST_DEBUG("deal: caps with colorimetry 2,6,0,7");
2324 cinfo.range = 2;
2325 cinfo.matrix = 6;
2326 cinfo.transfer = 0;
2327 cinfo.primaries = 7;
2328 gst_aml_v4l2_object_fill_colorimetry_list(&list, &cinfo);
2329
fei.dengca85b052022-07-19 14:49:23 +08002330 GST_DEBUG("deal: caps with colorimetry 0,6,0,7");
2331 cinfo.range = 0;
2332 cinfo.matrix = 6;
2333 cinfo.transfer = 0;
2334 cinfo.primaries = 7;
2335 gst_aml_v4l2_object_fill_colorimetry_list(&list, &cinfo);
2336
sheng.liua326d202022-07-20 14:15:34 +08002337 GST_DEBUG("deal: caps with colorimetry 2,3,0,0");
2338 cinfo.range = 2;
2339 cinfo.matrix = 3;
2340 cinfo.transfer = 0;
2341 cinfo.primaries = 0;
2342 gst_aml_v4l2_object_fill_colorimetry_list(&list, &cinfo);
2343
xuesong.jiangae1548e2022-05-06 16:38:46 +08002344 if (gst_value_list_get_size(&list) > 0)
2345 gst_structure_take_value(s, "colorimetry", &list);
2346 else
2347 g_value_unset(&list);
2348
2349 return;
2350}
2351
2352/* The frame interval enumeration code first appeared in Linux 2.6.19. */
2353static GstStructure *
2354gst_aml_v4l2_object_probe_caps_for_format_and_size(GstAmlV4l2Object *v4l2object,
2355 guint32 pixelformat,
2356 guint32 width, guint32 height, const GstStructure *template)
2357{
2358 gint fd = v4l2object->video_fd;
2359 struct v4l2_frmivalenum ival;
2360 guint32 num, denom;
2361 GstStructure *s;
2362 GValue rates = {
2363 0,
2364 };
2365
2366 memset(&ival, 0, sizeof(struct v4l2_frmivalenum));
2367 ival.index = 0;
2368 ival.pixel_format = pixelformat;
2369 ival.width = width;
2370 ival.height = height;
2371
2372 GST_LOG_OBJECT(v4l2object->dbg_obj,
2373 "get frame interval for %ux%u, %" GST_FOURCC_FORMAT, width, height,
2374 GST_FOURCC_ARGS(pixelformat));
2375
2376 /* keep in mind that v4l2 gives us frame intervals (durations); we invert the
2377 * fraction to get framerate */
2378 if (v4l2object->ioctl(fd, VIDIOC_ENUM_FRAMEINTERVALS, &ival) < 0)
2379 goto enum_frameintervals_failed;
2380
2381 if (ival.type == V4L2_FRMIVAL_TYPE_DISCRETE)
2382 {
2383 GValue rate = {
2384 0,
2385 };
2386
2387 g_value_init(&rates, GST_TYPE_LIST);
2388 g_value_init(&rate, GST_TYPE_FRACTION);
2389
2390 do
2391 {
2392 num = ival.discrete.numerator;
2393 denom = ival.discrete.denominator;
2394
2395 if (num > G_MAXINT || denom > G_MAXINT)
2396 {
2397 /* let us hope we don't get here... */
2398 num >>= 1;
2399 denom >>= 1;
2400 }
2401
2402 GST_LOG_OBJECT(v4l2object->dbg_obj, "adding discrete framerate: %d/%d",
2403 denom, num);
2404
2405 /* swap to get the framerate */
2406 gst_value_set_fraction(&rate, denom, num);
2407 gst_value_list_append_value(&rates, &rate);
2408
2409 ival.index++;
2410 } while (v4l2object->ioctl(fd, VIDIOC_ENUM_FRAMEINTERVALS, &ival) >= 0);
2411 }
2412 else if (ival.type == V4L2_FRMIVAL_TYPE_STEPWISE)
2413 {
2414 GValue min = {
2415 0,
2416 };
2417 GValue step = {
2418 0,
2419 };
2420 GValue max = {
2421 0,
2422 };
2423 gboolean added = FALSE;
2424 guint32 minnum, mindenom;
2425 guint32 maxnum, maxdenom;
2426
2427 g_value_init(&rates, GST_TYPE_LIST);
2428
2429 g_value_init(&min, GST_TYPE_FRACTION);
2430 g_value_init(&step, GST_TYPE_FRACTION);
2431 g_value_init(&max, GST_TYPE_FRACTION);
2432
2433 /* get the min */
2434 minnum = ival.stepwise.min.numerator;
2435 mindenom = ival.stepwise.min.denominator;
2436 if (minnum > G_MAXINT || mindenom > G_MAXINT)
2437 {
2438 minnum >>= 1;
2439 mindenom >>= 1;
2440 }
2441 GST_LOG_OBJECT(v4l2object->dbg_obj, "stepwise min frame interval: %d/%d",
2442 minnum, mindenom);
2443 gst_value_set_fraction(&min, minnum, mindenom);
2444
2445 /* get the max */
2446 maxnum = ival.stepwise.max.numerator;
2447 maxdenom = ival.stepwise.max.denominator;
2448 if (maxnum > G_MAXINT || maxdenom > G_MAXINT)
2449 {
2450 maxnum >>= 1;
2451 maxdenom >>= 1;
2452 }
2453
2454 GST_LOG_OBJECT(v4l2object->dbg_obj, "stepwise max frame interval: %d/%d",
2455 maxnum, maxdenom);
2456 gst_value_set_fraction(&max, maxnum, maxdenom);
2457
2458 /* get the step */
2459 num = ival.stepwise.step.numerator;
2460 denom = ival.stepwise.step.denominator;
2461 if (num > G_MAXINT || denom > G_MAXINT)
2462 {
2463 num >>= 1;
2464 denom >>= 1;
2465 }
2466
2467 if (num == 0 || denom == 0)
2468 {
2469 /* in this case we have a wrong fraction or no step, set the step to max
2470 * so that we only add the min value in the loop below */
2471 num = maxnum;
2472 denom = maxdenom;
2473 }
2474
2475 /* since we only have gst_value_fraction_subtract and not add, negate the
2476 * numerator */
2477 GST_LOG_OBJECT(v4l2object->dbg_obj, "stepwise step frame interval: %d/%d",
2478 num, denom);
2479 gst_value_set_fraction(&step, -num, denom);
2480
2481 while (gst_value_compare(&min, &max) != GST_VALUE_GREATER_THAN)
2482 {
2483 GValue rate = {
2484 0,
2485 };
2486
2487 num = gst_value_get_fraction_numerator(&min);
2488 denom = gst_value_get_fraction_denominator(&min);
2489 GST_LOG_OBJECT(v4l2object->dbg_obj, "adding stepwise framerate: %d/%d",
2490 denom, num);
2491
2492 /* invert to get the framerate */
2493 g_value_init(&rate, GST_TYPE_FRACTION);
2494 gst_value_set_fraction(&rate, denom, num);
2495 gst_value_list_append_value(&rates, &rate);
2496 added = TRUE;
2497
2498 /* we're actually adding because step was negated above. This is because
2499 * there is no _add function... */
2500 if (!gst_value_fraction_subtract(&min, &min, &step))
2501 {
2502 GST_WARNING_OBJECT(v4l2object->dbg_obj, "could not step fraction!");
2503 break;
2504 }
2505 }
2506 if (!added)
2507 {
2508 /* no range was added, leave the default range from the template */
2509 GST_WARNING_OBJECT(v4l2object->dbg_obj,
2510 "no range added, leaving default");
2511 g_value_unset(&rates);
2512 }
2513 }
2514 else if (ival.type == V4L2_FRMIVAL_TYPE_CONTINUOUS)
2515 {
2516 guint32 maxnum, maxdenom;
2517
2518 g_value_init(&rates, GST_TYPE_FRACTION_RANGE);
2519
2520 num = ival.stepwise.min.numerator;
2521 denom = ival.stepwise.min.denominator;
2522 if (num > G_MAXINT || denom > G_MAXINT)
2523 {
2524 num >>= 1;
2525 denom >>= 1;
2526 }
2527
2528 maxnum = ival.stepwise.max.numerator;
2529 maxdenom = ival.stepwise.max.denominator;
2530 if (maxnum > G_MAXINT || maxdenom > G_MAXINT)
2531 {
2532 maxnum >>= 1;
2533 maxdenom >>= 1;
2534 }
2535
2536 GST_LOG_OBJECT(v4l2object->dbg_obj,
2537 "continuous frame interval %d/%d to %d/%d", maxdenom, maxnum, denom,
2538 num);
2539
2540 gst_value_set_fraction_range_full(&rates, maxdenom, maxnum, denom, num);
2541 }
2542 else
2543 {
2544 goto unknown_type;
2545 }
2546
2547return_data:
2548 s = gst_structure_copy(template);
2549 gst_structure_set(s, "width", G_TYPE_INT, (gint)width,
2550 "height", G_TYPE_INT, (gint)height, NULL);
2551
2552 gst_aml_v4l2_object_add_aspect_ratio(v4l2object, s);
2553
2554 if (!v4l2object->skip_try_fmt_probes)
2555 {
2556 gst_aml_v4l2_object_add_interlace_mode(v4l2object, s, width, height,
2557 pixelformat);
2558 gst_aml_v4l2_object_add_colorspace(v4l2object, s, width, height, pixelformat);
2559 }
2560
2561 if (G_IS_VALUE(&rates))
2562 {
2563 gst_aml_v4l2src_value_simplify(&rates);
2564 /* only change the framerate on the template when we have a valid probed new
2565 * value */
2566 gst_structure_take_value(s, "framerate", &rates);
2567 }
2568 else if (v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE ||
2569 v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE)
2570 {
2571 gst_structure_set(s, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT,
2572 1, NULL);
2573 }
2574 return s;
2575
2576 /* ERRORS */
2577enum_frameintervals_failed:
2578{
2579 GST_DEBUG_OBJECT(v4l2object->dbg_obj,
2580 "Unable to enumerate intervals for %" GST_FOURCC_FORMAT "@%ux%u",
2581 GST_FOURCC_ARGS(pixelformat), width, height);
2582 goto return_data;
2583}
2584unknown_type:
2585{
2586 /* I don't see how this is actually an error, we ignore the format then */
2587 GST_WARNING_OBJECT(v4l2object->dbg_obj,
2588 "Unknown frame interval type at %" GST_FOURCC_FORMAT "@%ux%u: %u",
2589 GST_FOURCC_ARGS(pixelformat), width, height, ival.type);
2590 return NULL;
2591}
2592}
2593
2594static gint
2595sort_by_frame_size(GstStructure *s1, GstStructure *s2)
2596{
2597 int w1, h1, w2, h2;
2598
2599 gst_structure_get_int(s1, "width", &w1);
2600 gst_structure_get_int(s1, "height", &h1);
2601 gst_structure_get_int(s2, "width", &w2);
2602 gst_structure_get_int(s2, "height", &h2);
2603
2604 /* I think it's safe to assume that this won't overflow for a while */
2605 return ((w2 * h2) - (w1 * h1));
2606}
2607
2608static void
2609gst_aml_v4l2_object_update_and_append(GstAmlV4l2Object *v4l2object,
2610 guint32 format, GstCaps *caps, GstStructure *s)
2611{
2612 GstStructure *alt_s = NULL;
2613
2614 /* Encoded stream on output buffer need to be parsed */
2615 if (v4l2object->type == V4L2_BUF_TYPE_VIDEO_OUTPUT ||
2616 v4l2object->type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE)
2617 {
2618 gint i = 0;
2619
2620 for (; i < GST_AML_V4L2_FORMAT_COUNT; i++)
2621 {
2622 if (format == gst_aml_v4l2_formats[i].format &&
2623 gst_aml_v4l2_formats[i].flags & GST_V4L2_CODEC &&
2624 !(gst_aml_v4l2_formats[i].flags & GST_V4L2_NO_PARSE))
2625 {
2626 gst_structure_set(s, "parsed", G_TYPE_BOOLEAN, TRUE, NULL);
2627 break;
2628 }
2629 }
2630 }
2631
2632 if (v4l2object->has_alpha_component &&
2633 (v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE ||
2634 v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE))
2635 {
2636 switch (format)
2637 {
2638 case V4L2_PIX_FMT_RGB32:
2639 alt_s = gst_structure_copy(s);
2640 gst_structure_set(alt_s, "format", G_TYPE_STRING, "ARGB", NULL);
2641 break;
2642 case V4L2_PIX_FMT_BGR32:
2643 alt_s = gst_structure_copy(s);
2644 gst_structure_set(alt_s, "format", G_TYPE_STRING, "BGRA", NULL);
2645 break;
2646 default:
2647 break;
2648 }
2649 }
2650
2651 gst_caps_append_structure(caps, s);
2652
2653 if (alt_s)
2654 gst_caps_append_structure(caps, alt_s);
2655}
2656
2657static GstCaps *
2658gst_aml_v4l2_object_probe_caps_for_format(GstAmlV4l2Object *v4l2object,
2659 guint32 pixelformat, const GstStructure *template)
2660{
2661 GstCaps *ret = gst_caps_new_empty();
2662 GstStructure *tmp;
2663 gint fd = v4l2object->video_fd;
2664 struct v4l2_frmsizeenum size;
2665 GList *results = NULL;
2666 guint32 w, h;
2667
2668 if (pixelformat == GST_MAKE_FOURCC('M', 'P', 'E', 'G'))
2669 {
2670 gst_caps_append_structure(ret, gst_structure_copy(template));
2671 return ret;
2672 }
2673
2674 memset(&size, 0, sizeof(struct v4l2_frmsizeenum));
2675 size.index = 0;
2676 size.pixel_format = pixelformat;
2677
2678 GST_DEBUG_OBJECT(v4l2object->dbg_obj,
2679 "Enumerating frame sizes for %" GST_FOURCC_FORMAT,
2680 GST_FOURCC_ARGS(pixelformat));
2681
2682 if (v4l2object->ioctl(fd, VIDIOC_ENUM_FRAMESIZES, &size) < 0)
2683 goto enum_framesizes_failed;
2684
2685 if (size.type == V4L2_FRMSIZE_TYPE_DISCRETE)
2686 {
2687 do
2688 {
2689 GST_LOG_OBJECT(v4l2object->dbg_obj, "got discrete frame size %dx%d",
2690 size.discrete.width, size.discrete.height);
2691
2692 w = MIN(size.discrete.width, G_MAXINT);
2693 h = MIN(size.discrete.height, G_MAXINT);
2694
2695 if (w && h)
2696 {
2697 tmp =
2698 gst_aml_v4l2_object_probe_caps_for_format_and_size(v4l2object,
2699 pixelformat, w, h, template);
2700
2701 if (tmp)
2702 results = g_list_prepend(results, tmp);
2703 }
2704
2705 size.index++;
2706 } while (v4l2object->ioctl(fd, VIDIOC_ENUM_FRAMESIZES, &size) >= 0);
2707 GST_DEBUG_OBJECT(v4l2object->dbg_obj,
2708 "done iterating discrete frame sizes");
2709 }
2710 else if (size.type == V4L2_FRMSIZE_TYPE_STEPWISE)
2711 {
2712 guint32 maxw, maxh, step_w, step_h;
2713
2714 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "we have stepwise frame sizes:");
2715 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "min width: %d",
2716 size.stepwise.min_width);
2717 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "min height: %d",
2718 size.stepwise.min_height);
2719 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "max width: %d",
2720 size.stepwise.max_width);
2721 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "min height: %d",
2722 size.stepwise.max_height);
2723 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "step width: %d",
2724 size.stepwise.step_width);
2725 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "step height: %d",
2726 size.stepwise.step_height);
2727
2728 w = MAX(size.stepwise.min_width, 1);
2729 h = MAX(size.stepwise.min_height, 1);
2730 maxw = MIN(size.stepwise.max_width, G_MAXINT);
2731 maxh = MIN(size.stepwise.max_height, G_MAXINT);
2732
2733 step_w = MAX(size.stepwise.step_width, 1);
2734 step_h = MAX(size.stepwise.step_height, 1);
2735
2736 /* FIXME: check for sanity and that min/max are multiples of the steps */
2737
2738 /* we only query details for the max width/height since it's likely the
2739 * most restricted if there are any resolution-dependent restrictions */
2740 tmp = gst_aml_v4l2_object_probe_caps_for_format_and_size(v4l2object,
2741 pixelformat, maxw, maxh, template);
2742
2743 if (tmp)
2744 {
2745 GValue step_range = G_VALUE_INIT;
2746
2747 g_value_init(&step_range, GST_TYPE_INT_RANGE);
2748 gst_value_set_int_range_step(&step_range, w, maxw, step_w);
2749 gst_structure_set_value(tmp, "width", &step_range);
2750
2751 gst_value_set_int_range_step(&step_range, h, maxh, step_h);
2752 gst_structure_take_value(tmp, "height", &step_range);
2753
2754 /* no point using the results list here, since there's only one struct */
2755 gst_aml_v4l2_object_update_and_append(v4l2object, pixelformat, ret, tmp);
2756 }
2757 }
2758 else if (size.type == V4L2_FRMSIZE_TYPE_CONTINUOUS)
2759 {
2760 guint32 maxw, maxh;
2761
2762 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "we have continuous frame sizes:");
2763 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "min width: %d",
2764 size.stepwise.min_width);
2765 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "min height: %d",
2766 size.stepwise.min_height);
2767 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "max width: %d",
2768 size.stepwise.max_width);
2769 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "min height: %d",
2770 size.stepwise.max_height);
2771
2772 w = MAX(size.stepwise.min_width, 1);
2773 h = MAX(size.stepwise.min_height, 1);
2774 maxw = MIN(size.stepwise.max_width, G_MAXINT);
2775 maxh = MIN(size.stepwise.max_height, G_MAXINT);
2776
2777 tmp =
2778 gst_aml_v4l2_object_probe_caps_for_format_and_size(v4l2object, pixelformat,
2779 w, h, template);
2780 if (tmp)
2781 {
2782 gst_structure_set(tmp, "width", GST_TYPE_INT_RANGE, (gint)w,
2783 (gint)maxw, "height", GST_TYPE_INT_RANGE, (gint)h, (gint)maxh,
2784 NULL);
2785
2786 /* no point using the results list here, since there's only one struct */
2787 gst_aml_v4l2_object_update_and_append(v4l2object, pixelformat, ret, tmp);
2788 }
2789 }
2790 else
2791 {
2792 goto unknown_type;
2793 }
2794
2795 /* we use an intermediary list to store and then sort the results of the
2796 * probing because we can't make any assumptions about the order in which
2797 * the driver will give us the sizes, but we want the final caps to contain
2798 * the results starting with the highest resolution and having the lowest
2799 * resolution last, since order in caps matters for things like fixation. */
2800 results = g_list_sort(results, (GCompareFunc)sort_by_frame_size);
2801 while (results != NULL)
2802 {
2803 gst_aml_v4l2_object_update_and_append(v4l2object, pixelformat, ret,
2804 results->data);
2805 results = g_list_delete_link(results, results);
2806 }
2807
2808 if (gst_caps_is_empty(ret))
2809 goto enum_framesizes_no_results;
2810
2811 return ret;
2812
2813 /* ERRORS */
2814enum_framesizes_failed:
2815{
2816 /* I don't see how this is actually an error */
2817 GST_DEBUG_OBJECT(v4l2object->dbg_obj,
2818 "Failed to enumerate frame sizes for pixelformat %" GST_FOURCC_FORMAT
2819 " (%s)",
2820 GST_FOURCC_ARGS(pixelformat), g_strerror(errno));
2821 goto default_frame_sizes;
2822}
2823enum_framesizes_no_results:
2824{
2825 /* it's possible that VIDIOC_ENUM_FRAMESIZES is defined but the driver in
2826 * question doesn't actually support it yet */
2827 GST_DEBUG_OBJECT(v4l2object->dbg_obj,
2828 "No results for pixelformat %" GST_FOURCC_FORMAT
2829 " enumerating frame sizes, trying fallback",
2830 GST_FOURCC_ARGS(pixelformat));
2831 goto default_frame_sizes;
2832}
2833unknown_type:
2834{
2835 GST_WARNING_OBJECT(v4l2object->dbg_obj,
2836 "Unknown frame sizeenum type for pixelformat %" GST_FOURCC_FORMAT
2837 ": %u",
2838 GST_FOURCC_ARGS(pixelformat), size.type);
2839 goto default_frame_sizes;
2840}
2841
2842default_frame_sizes:
2843{
2844 gint min_w, max_w, min_h, max_h, fix_num = 0, fix_denom = 0;
2845
2846 /* This code is for Linux < 2.6.19 */
2847 min_w = min_h = 1;
2848 max_w = max_h = GST_AML_V4L2_MAX_SIZE;
2849 if (!gst_aml_v4l2_object_get_nearest_size(v4l2object, pixelformat, &min_w,
2850 &min_h))
2851 {
2852 GST_WARNING_OBJECT(v4l2object->dbg_obj,
2853 "Could not probe minimum capture size for pixelformat %" GST_FOURCC_FORMAT, GST_FOURCC_ARGS(pixelformat));
2854 }
2855 if (!gst_aml_v4l2_object_get_nearest_size(v4l2object, pixelformat, &max_w,
2856 &max_h))
2857 {
2858 GST_WARNING_OBJECT(v4l2object->dbg_obj,
2859 "Could not probe maximum capture size for pixelformat %" GST_FOURCC_FORMAT, GST_FOURCC_ARGS(pixelformat));
2860 }
2861
2862 tmp = gst_structure_copy(template);
hanghang.luo3128f102022-08-18 10:36:19 +08002863#ifdef DELETE_FOR_LGE
xuesong.jiangae1548e2022-05-06 16:38:46 +08002864 if (fix_num)
2865 {
2866 gst_structure_set(tmp, "framerate", GST_TYPE_FRACTION, fix_num,
2867 fix_denom, NULL);
2868 }
hanghang.luo3128f102022-08-18 10:36:19 +08002869 else
2870#endif
2871 if (v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE ||
2872 v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE)
xuesong.jiangae1548e2022-05-06 16:38:46 +08002873 {
2874 /* if norm can't be used, copy the template framerate */
2875 gst_structure_set(tmp, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1,
2876 G_MAXINT, 1, NULL);
2877 }
2878
2879 if (min_w == max_w)
2880 gst_structure_set(tmp, "width", G_TYPE_INT, max_w, NULL);
2881 else
2882 gst_structure_set(tmp, "width", GST_TYPE_INT_RANGE, min_w, max_w, NULL);
2883
2884 if (min_h == max_h)
2885 gst_structure_set(tmp, "height", G_TYPE_INT, max_h, NULL);
2886 else
2887 gst_structure_set(tmp, "height", GST_TYPE_INT_RANGE, min_h, max_h, NULL);
2888
2889 gst_aml_v4l2_object_add_aspect_ratio(v4l2object, tmp);
2890
2891 if (!v4l2object->skip_try_fmt_probes)
2892 {
2893 /* We could consider setting interlace mode from min and max. */
2894 gst_aml_v4l2_object_add_interlace_mode(v4l2object, tmp, max_w, max_h,
2895 pixelformat);
2896 /* We could consider to check colorspace for min too, in case it depends on
2897 * the size. But in this case, min and max could not be enough */
2898 gst_aml_v4l2_object_add_colorspace(v4l2object, tmp, max_w, max_h,
2899 pixelformat);
2900 }
2901
2902 gst_aml_v4l2_object_update_and_append(v4l2object, pixelformat, ret, tmp);
2903 return ret;
2904}
2905}
2906
2907static gboolean
2908gst_aml_v4l2_object_get_nearest_size(GstAmlV4l2Object *v4l2object,
2909 guint32 pixelformat, gint *width, gint *height)
2910{
2911 struct v4l2_format fmt;
2912 gboolean ret = FALSE;
2913 GstVideoInterlaceMode interlace_mode;
2914
2915 g_return_val_if_fail(width != NULL, FALSE);
2916 g_return_val_if_fail(height != NULL, FALSE);
2917
2918 GST_LOG_OBJECT(v4l2object->dbg_obj,
2919 "getting nearest size to %dx%d with format %" GST_FOURCC_FORMAT,
2920 *width, *height, GST_FOURCC_ARGS(pixelformat));
2921
2922 memset(&fmt, 0, sizeof(struct v4l2_format));
2923
2924 /* get size delimiters */
2925 memset(&fmt, 0, sizeof(fmt));
2926 fmt.type = v4l2object->type;
2927 fmt.fmt.pix.width = *width;
2928 fmt.fmt.pix.height = *height;
2929 fmt.fmt.pix.pixelformat = pixelformat;
2930 fmt.fmt.pix.field = V4L2_FIELD_ANY;
2931
2932 if (gst_aml_v4l2_object_try_fmt(v4l2object, &fmt) < 0)
2933 goto error;
2934
2935 GST_LOG_OBJECT(v4l2object->dbg_obj,
2936 "got nearest size %dx%d", fmt.fmt.pix.width, fmt.fmt.pix.height);
2937
2938 *width = fmt.fmt.pix.width;
2939 *height = fmt.fmt.pix.height;
2940
2941 if (!gst_aml_v4l2_object_get_interlace_mode(fmt.fmt.pix.field, &interlace_mode))
2942 {
2943 GST_WARNING_OBJECT(v4l2object->dbg_obj,
2944 "Unsupported field type for %" GST_FOURCC_FORMAT "@%ux%u: %u",
2945 GST_FOURCC_ARGS(pixelformat), *width, *height, fmt.fmt.pix.field);
2946 goto error;
2947 }
2948
2949 ret = TRUE;
2950
2951error:
2952 if (!ret)
2953 {
2954 GST_WARNING_OBJECT(v4l2object->dbg_obj,
2955 "Unable to try format: %s", g_strerror(errno));
2956 }
2957
2958 return ret;
2959}
2960
2961static gboolean
2962gst_aml_v4l2_object_is_dmabuf_supported(GstAmlV4l2Object *v4l2object)
2963{
2964 gboolean ret = TRUE;
2965 struct v4l2_exportbuffer expbuf = {
2966 .type = v4l2object->type,
2967 .index = -1,
2968 .plane = -1,
2969 .flags = O_CLOEXEC | O_RDWR,
2970 };
2971
2972 if (v4l2object->fmtdesc->flags & V4L2_FMT_FLAG_EMULATED)
2973 {
2974 GST_WARNING_OBJECT(v4l2object->dbg_obj,
2975 "libv4l2 converter detected, disabling DMABuf");
2976 ret = FALSE;
2977 }
2978
2979 /* Expected to fail, but ENOTTY tells us that it is not implemented. */
2980 v4l2object->ioctl(v4l2object->video_fd, VIDIOC_EXPBUF, &expbuf);
2981 if (errno == ENOTTY)
2982 ret = FALSE;
2983
2984 return ret;
2985}
2986
2987static gboolean
2988gst_aml_v4l2_object_setup_pool(GstAmlV4l2Object *v4l2object, GstCaps *caps)
2989{
2990 GstAmlV4l2IOMode mode;
2991
2992 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "initializing the %s system",
2993 V4L2_TYPE_IS_OUTPUT(v4l2object->type) ? "output" : "capture");
2994
2995 GST_AML_V4L2_CHECK_OPEN(v4l2object);
2996 GST_AML_V4L2_CHECK_NOT_ACTIVE(v4l2object);
2997
2998 /* find transport */
2999 mode = v4l2object->req_mode;
3000
3001 if (v4l2object->device_caps & V4L2_CAP_READWRITE)
3002 {
3003 if (v4l2object->req_mode == GST_V4L2_IO_AUTO)
3004 mode = GST_V4L2_IO_RW;
3005 }
3006 else if (v4l2object->req_mode == GST_V4L2_IO_RW)
3007 goto method_not_supported;
3008
3009 if (v4l2object->device_caps & V4L2_CAP_STREAMING)
3010 {
3011 if (v4l2object->req_mode == GST_V4L2_IO_AUTO)
3012 {
3013 if (!V4L2_TYPE_IS_OUTPUT(v4l2object->type) &&
3014 gst_aml_v4l2_object_is_dmabuf_supported(v4l2object))
3015 {
3016 mode = GST_V4L2_IO_DMABUF;
3017 }
3018 else
3019 {
3020 mode = GST_V4L2_IO_MMAP;
3021 }
3022 }
3023 }
3024 else if (v4l2object->req_mode == GST_V4L2_IO_MMAP ||
3025 v4l2object->req_mode == GST_V4L2_IO_DMABUF)
3026 goto method_not_supported;
3027
3028 /* if still no transport selected, error out */
3029 if (mode == GST_V4L2_IO_AUTO)
3030 goto no_supported_capture_method;
3031
3032 GST_INFO_OBJECT(v4l2object->dbg_obj, "accessing buffers via mode %d", mode);
3033 v4l2object->mode = mode;
3034
3035 /* If min_buffers is not set, the driver either does not support the control or
3036 it has not been asked yet via propose_allocation/decide_allocation. */
3037 if (!v4l2object->min_buffers)
3038 gst_aml_v4l2_get_driver_min_buffers(v4l2object);
3039
3040 /* Map the buffers */
3041 GST_LOG_OBJECT(v4l2object->dbg_obj, "initiating buffer pool");
3042
3043 if (!(v4l2object->pool = gst_aml_v4l2_buffer_pool_new(v4l2object, caps)))
3044 goto buffer_pool_new_failed;
3045
3046 GST_AML_V4L2_SET_ACTIVE(v4l2object);
3047
3048 return TRUE;
3049
3050 /* ERRORS */
3051buffer_pool_new_failed:
3052{
3053 GST_ELEMENT_ERROR(v4l2object->element, RESOURCE, READ,
3054 (_("Could not map buffers from device '%s'"),
3055 v4l2object->videodev),
3056 ("Failed to create buffer pool: %s", g_strerror(errno)));
3057 return FALSE;
3058}
3059method_not_supported:
3060{
3061 GST_ELEMENT_ERROR(v4l2object->element, RESOURCE, READ,
3062 (_("The driver of device '%s' does not support the IO method %d"),
3063 v4l2object->videodev, mode),
3064 (NULL));
3065 return FALSE;
3066}
3067no_supported_capture_method:
3068{
3069 GST_ELEMENT_ERROR(v4l2object->element, RESOURCE, READ,
3070 (_("The driver of device '%s' does not support any known IO "
3071 "method."),
3072 v4l2object->videodev),
3073 (NULL));
3074 return FALSE;
3075}
3076}
3077
3078static void
3079gst_aml_v4l2_object_set_stride(GstVideoInfo *info, GstVideoAlignment *align,
3080 gint plane, gint stride)
3081{
3082 const GstVideoFormatInfo *finfo = info->finfo;
3083
3084 if (GST_VIDEO_FORMAT_INFO_IS_TILED(finfo))
3085 {
3086 gint x_tiles, y_tiles, ws, hs, tile_height, padded_height;
3087
3088 ws = GST_VIDEO_FORMAT_INFO_TILE_WS(finfo);
3089 hs = GST_VIDEO_FORMAT_INFO_TILE_HS(finfo);
3090 tile_height = 1 << hs;
3091
3092 padded_height = GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT(finfo, plane,
3093 info->height + align->padding_top + align->padding_bottom);
3094 padded_height = GST_ROUND_UP_N(padded_height, tile_height);
3095
3096 x_tiles = stride >> ws;
3097 y_tiles = padded_height >> hs;
3098 info->stride[plane] = GST_VIDEO_TILE_MAKE_STRIDE(x_tiles, y_tiles);
3099 }
3100 else
3101 {
3102 info->stride[plane] = stride;
3103 }
3104}
3105
3106static void
3107gst_aml_v4l2_object_extrapolate_info(GstAmlV4l2Object *v4l2object,
3108 GstVideoInfo *info, GstVideoAlignment *align, gint stride)
3109{
3110 const GstVideoFormatInfo *finfo = info->finfo;
3111 gint i, estride, padded_height;
3112 gsize offs = 0;
3113
3114 g_return_if_fail(v4l2object->n_v4l2_planes == 1);
3115
3116 padded_height = info->height + align->padding_top + align->padding_bottom;
3117
3118 for (i = 0; i < finfo->n_planes; i++)
3119 {
3120 estride = gst_aml_v4l2_object_extrapolate_stride(finfo, i, stride);
3121
3122 gst_aml_v4l2_object_set_stride(info, align, i, estride);
3123
3124 info->offset[i] = offs;
3125 offs += estride *
3126 GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT(finfo, i, padded_height);
3127
3128 GST_DEBUG_OBJECT(v4l2object->dbg_obj,
3129 "Extrapolated for plane %d with base stride %d: "
3130 "stride %d, offset %" G_GSIZE_FORMAT,
3131 i, stride, info->stride[i],
3132 info->offset[i]);
3133 }
3134
3135 /* Update the image size according the amount of data we are going to
3136 * read/write. This workaround bugs in driver where the sizeimage provided
3137 * by TRY/S_FMT represent the buffer length (maximum size) rather then the expected
3138 * bytesused (buffer size). */
3139 if (offs < info->size)
3140 info->size = offs;
3141}
3142
3143static void
3144gst_aml_v4l2_object_save_format(GstAmlV4l2Object *v4l2object,
3145 struct v4l2_fmtdesc *fmtdesc, struct v4l2_format *format,
3146 GstVideoInfo *info, GstVideoAlignment *align)
3147{
3148 const GstVideoFormatInfo *finfo = info->finfo;
3149 gboolean standard_stride = TRUE;
3150 gint stride, pstride, padded_width, padded_height, i;
3151
3152 if (GST_VIDEO_INFO_FORMAT(info) == GST_VIDEO_FORMAT_ENCODED)
3153 {
3154 v4l2object->n_v4l2_planes = 1;
3155 info->size = format->fmt.pix.sizeimage;
3156 goto store_info;
3157 }
3158
3159 /* adjust right padding */
3160 if (V4L2_TYPE_IS_MULTIPLANAR(v4l2object->type))
3161 stride = format->fmt.pix_mp.plane_fmt[0].bytesperline;
3162 else
3163 stride = format->fmt.pix.bytesperline;
3164
3165 pstride = GST_VIDEO_FORMAT_INFO_PSTRIDE(finfo, 0);
3166 if (pstride)
3167 {
3168 padded_width = stride / pstride;
3169 }
3170 else
3171 {
3172 /* pstride can be 0 for complex formats */
3173 GST_WARNING_OBJECT(v4l2object->element,
3174 "format %s has a pstride of 0, cannot compute padded with",
3175 gst_video_format_to_string(GST_VIDEO_INFO_FORMAT(info)));
3176 padded_width = stride;
3177 }
3178
3179 if (padded_width < format->fmt.pix.width)
3180 GST_WARNING_OBJECT(v4l2object->dbg_obj,
3181 "Driver bug detected, stride (%d) is too small for the width (%d)",
3182 padded_width, format->fmt.pix.width);
3183
3184 align->padding_right = padded_width - info->width - align->padding_left;
3185
3186 /* adjust bottom padding */
3187 padded_height = format->fmt.pix.height;
3188
3189 if (GST_VIDEO_FORMAT_INFO_IS_TILED(finfo))
3190 {
3191 guint hs, tile_height;
3192
3193 hs = GST_VIDEO_FORMAT_INFO_TILE_HS(finfo);
3194 tile_height = 1 << hs;
3195
3196 padded_height = GST_ROUND_UP_N(padded_height, tile_height);
3197 }
3198
3199 align->padding_bottom = padded_height - info->height - align->padding_top;
3200
3201 /* setup the strides and offset */
3202 if (V4L2_TYPE_IS_MULTIPLANAR(v4l2object->type))
3203 {
3204 struct v4l2_pix_format_mplane *pix_mp = &format->fmt.pix_mp;
3205
3206 /* figure out the frame layout */
3207 v4l2object->n_v4l2_planes = MAX(1, pix_mp->num_planes);
3208 info->size = 0;
3209 for (i = 0; i < v4l2object->n_v4l2_planes; i++)
3210 {
3211 stride = pix_mp->plane_fmt[i].bytesperline;
3212
3213 if (info->stride[i] != stride)
3214 standard_stride = FALSE;
3215
3216 gst_aml_v4l2_object_set_stride(info, align, i, stride);
3217 info->offset[i] = info->size;
3218 info->size += pix_mp->plane_fmt[i].sizeimage;
3219 }
3220
3221 /* Extrapolate stride if planar format are being set in 1 v4l2 plane */
3222 if (v4l2object->n_v4l2_planes < finfo->n_planes)
3223 {
3224 stride = format->fmt.pix_mp.plane_fmt[0].bytesperline;
3225 gst_aml_v4l2_object_extrapolate_info(v4l2object, info, align, stride);
3226 }
3227 }
3228 else
3229 {
3230 /* only one plane in non-MPLANE mode */
3231 v4l2object->n_v4l2_planes = 1;
3232 info->size = format->fmt.pix.sizeimage;
3233 stride = format->fmt.pix.bytesperline;
3234
3235 if (info->stride[0] != stride)
3236 standard_stride = FALSE;
3237
3238 gst_aml_v4l2_object_extrapolate_info(v4l2object, info, align, stride);
3239 }
3240
3241 /* adjust the offset to take into account left and top */
3242 if (GST_VIDEO_FORMAT_INFO_IS_TILED(finfo))
3243 {
3244 if ((align->padding_left + align->padding_top) > 0)
3245 GST_WARNING_OBJECT(v4l2object->dbg_obj,
3246 "Left and top padding is not permitted for tiled formats");
3247 }
3248 else
3249 {
3250 for (i = 0; i < finfo->n_planes; i++)
3251 {
3252 gint vedge, hedge;
3253
3254 /* FIXME we assume plane as component as this is true for all supported
3255 * format we support. */
3256
3257 hedge = GST_VIDEO_FORMAT_INFO_SCALE_WIDTH(finfo, i, align->padding_left);
3258 vedge = GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT(finfo, i, align->padding_top);
3259
3260 info->offset[i] += (vedge * info->stride[i]) +
3261 (hedge * GST_VIDEO_INFO_COMP_PSTRIDE(info, i));
3262 }
3263 }
3264
3265store_info:
3266 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "Got sizeimage %" G_GSIZE_FORMAT,
3267 info->size);
3268
3269 /* to avoid copies we need video meta if there is padding */
3270 v4l2object->need_video_meta =
3271 ((align->padding_top + align->padding_left + align->padding_right +
3272 align->padding_bottom) != 0);
3273
3274 /* ... or if stride is non "standard" */
3275 if (!standard_stride)
3276 v4l2object->need_video_meta = TRUE;
3277
3278 /* ... or also video meta if we use multiple, non-contiguous, planes */
3279 if (v4l2object->n_v4l2_planes > 1)
3280 v4l2object->need_video_meta = TRUE;
3281
3282 v4l2object->info = *info;
3283 v4l2object->align = *align;
3284 v4l2object->format = *format;
3285 v4l2object->fmtdesc = fmtdesc;
3286
3287 /* if we have a framerate pre-calculate duration */
3288 if (info->fps_n > 0 && info->fps_d > 0)
3289 {
3290 v4l2object->duration = gst_util_uint64_scale_int(GST_SECOND, info->fps_d,
3291 info->fps_n);
3292 }
3293 else
3294 {
3295 v4l2object->duration = GST_CLOCK_TIME_NONE;
3296 }
3297}
3298
3299gint gst_aml_v4l2_object_extrapolate_stride(const GstVideoFormatInfo *finfo,
3300 gint plane, gint stride)
3301{
3302 gint estride;
3303
3304 switch (finfo->format)
3305 {
3306 case GST_VIDEO_FORMAT_NV12:
3307 case GST_VIDEO_FORMAT_NV12_64Z32:
3308 case GST_VIDEO_FORMAT_NV21:
3309 case GST_VIDEO_FORMAT_NV16:
3310 case GST_VIDEO_FORMAT_NV61:
3311 case GST_VIDEO_FORMAT_NV24:
3312 estride = (plane == 0 ? 1 : 2) *
3313 GST_VIDEO_FORMAT_INFO_SCALE_WIDTH(finfo, plane, stride);
3314 break;
3315 default:
3316 estride = GST_VIDEO_FORMAT_INFO_SCALE_WIDTH(finfo, plane, stride);
3317 break;
3318 }
3319
3320 return estride;
3321}
3322
3323static gboolean
3324gst_aml_v4l2_video_colorimetry_matches(const GstVideoColorimetry *cinfo,
3325 const gchar *color)
3326{
3327 GstVideoColorimetry ci;
3328 static const GstVideoColorimetry ci_likely_jpeg = {
3329 GST_VIDEO_COLOR_RANGE_0_255, GST_VIDEO_COLOR_MATRIX_BT601,
3330 GST_VIDEO_TRANSFER_UNKNOWN, GST_VIDEO_COLOR_PRIMARIES_UNKNOWN};
3331 static const GstVideoColorimetry ci_jpeg = {
3332 GST_VIDEO_COLOR_RANGE_0_255, GST_VIDEO_COLOR_MATRIX_BT601,
3333 GST_VIDEO_TRANSFER_SRGB, GST_VIDEO_COLOR_PRIMARIES_BT709};
3334
3335 if (!gst_video_colorimetry_from_string(&ci, color))
3336 return FALSE;
3337
3338 if (gst_video_colorimetry_is_equal(&ci, cinfo))
3339 return TRUE;
3340
3341 /* Allow 1:4:0:0 (produced by jpegdec) if the device expects 1:4:7:1 */
3342 if (gst_video_colorimetry_is_equal(&ci, &ci_likely_jpeg) && gst_video_colorimetry_is_equal(cinfo, &ci_jpeg))
3343 return TRUE;
3344
3345 return FALSE;
3346}
3347
3348static void
fei.deng7c3d67f2022-11-09 11:06:05 +08003349set_amlogic_vdec_parm(GstAmlV4l2Object *v4l2object, struct v4l2_streamparm *streamparm, GstCaps *caps, guint32 pixFormat)
xuesong.jiangae1548e2022-05-06 16:38:46 +08003350{
3351 struct aml_dec_params *decParm = (struct aml_dec_params *)streamparm->parm.raw_data;
3352 const char *env;
3353
3354 decParm->cfg.metadata_config_flag = 1 << 13;
3355
3356 if (v4l2object->type == V4L2_BUF_TYPE_VIDEO_OUTPUT || v4l2object->type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE)
3357 {
fei.dengccc89632022-07-15 19:10:17 +08003358 /*set bit12 value to 1,
3359 *v4l2 output 0 pts of second interlace field frame */
fei.denga6ae3282022-07-15 19:50:30 +08003360 //decParm->cfg.metadata_config_flag |= (1 << 12);
fei.deng7c3d67f2022-11-09 11:06:05 +08003361 decParm->parms_status = V4L2_CONFIG_PARM_DECODE_CFGINFO;
3362 switch (pixFormat)
3363 {
3364 default:
3365 case V4L2_PIX_FMT_MPEG:
3366 case V4L2_PIX_FMT_H264:
3367 decParm->cfg.double_write_mode= VDEC_DW_NO_AFBC;
3368 break;
3369 case V4L2_PIX_FMT_HEVC:
3370 case V4L2_PIX_FMT_VP9:
3371 case V4L2_PIX_FMT_AV1:
3372 decParm->cfg.double_write_mode= VDEC_DW_AFBC_AUTO_1_2;
3373 break;
3374 }
xuesong.jiangae1548e2022-05-06 16:38:46 +08003375 env = getenv("V4L2_SET_AMLOGIC_DW_MODE");
3376 if (env)
3377 {
3378 int dwMode = atoi(env);
3379 switch (dwMode)
3380 {
3381 case 0:
3382 case 1:
3383 case 2:
3384 case 3:
3385 case 4:
3386 case 16:
3387 decParm->cfg.double_write_mode = dwMode;
3388 decParm->parms_status |= V4L2_CONFIG_PARM_DECODE_CFGINFO;
3389 break;
3390 }
3391 }
3392
3393 if (v4l2object->ioctl(v4l2object->video_fd, VIDIOC_S_PARM, streamparm) < 0)
3394 {
3395 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "set vdec parm fail");
3396 }
3397 else
3398 {
3399 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "Set dwMode to %d", decParm->cfg.double_write_mode);
3400 }
xuesong.jiange1a19662022-06-21 20:30:22 +08003401
3402 GstStructure *structure= gst_caps_get_structure(caps, 0);
3403 if (structure == NULL)
3404 {
3405 return;
3406 }
3407 if ( gst_structure_has_field(structure, "colorimetry") )
3408 {
3409 const char *colorimetry= gst_structure_get_string(structure,"colorimetry");
3410 GstVideoColorimetry vci = {0};
3411 if ( colorimetry && gst_video_colorimetry_from_string( &vci, colorimetry ))
3412 {
3413 decParm->parms_status |= V4L2_CONFIG_PARM_DECODE_HDRINFO;
3414 decParm->hdr.signal_type= (1<<29); /* present flag */
3415 /*set default value, this is to keep up with driver hdr info synchronization*/
3416 decParm->hdr.signal_type |= (5<<26) | (1<<24);
3417
3418 gint hdrColorimetry[4] = {0};
3419 hdrColorimetry[0]= (int)vci.range;
3420 hdrColorimetry[1]= (int)vci.matrix;
3421 hdrColorimetry[2]= (int)vci.transfer;
3422 hdrColorimetry[3]= (int)vci.primaries;
3423 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "colorimetry: [%d,%d,%d,%d]",
3424 hdrColorimetry[0],
3425 hdrColorimetry[1],
3426 hdrColorimetry[2],
3427 hdrColorimetry[3] );
3428 /* range */
3429 switch ( hdrColorimetry[0] )
3430 {
3431 case 1:
3432 case 2:
3433 decParm->hdr.signal_type |= ((hdrColorimetry[0] % 2)<<25);
3434 break;
3435 default:
3436 break;
3437 }
3438 /* matrix coefficient */
3439 switch ( hdrColorimetry[1] )
3440 {
3441 case 1: /* RGB */
3442 decParm->hdr.signal_type |= 0;
3443 break;
3444 case 2: /* FCC */
3445 decParm->hdr.signal_type |= 4;
3446 break;
3447 case 3: /* BT709 */
3448 decParm->hdr.signal_type |= 1;
3449 break;
3450 case 4: /* BT601 */
3451 decParm->hdr.signal_type |= 3;
3452 break;
3453 case 5: /* SMPTE240M */
3454 decParm->hdr.signal_type |= 7;
3455 break;
3456 case 6: /* BT2020 */
3457 decParm->hdr.signal_type |= 9;
3458 break;
3459 default: /* unknown */
3460 decParm->hdr.signal_type |= 2;
3461 break;
3462 }
3463 /* transfer function */
3464 switch ( hdrColorimetry[2] )
3465 {
3466 case 5: /* BT709 */
3467 decParm->hdr.signal_type |= (1<<8);
3468 break;
3469 case 6: /* SMPTE240M */
3470 decParm->hdr.signal_type |= (7<<8);
3471 break;
3472 case 9: /* LOG100 */
3473 decParm->hdr.signal_type |= (9<<8);
3474 break;
3475 case 10: /* LOG316 */
3476 decParm->hdr.signal_type |= (10<<8);
3477 break;
3478 case 12: /* BT2020_12 */
3479 decParm->hdr.signal_type |= (15<<8);
3480 break;
3481 case 11: /* BT2020_10 */
3482 decParm->hdr.signal_type |= (14<<8);
3483 break;
3484 case 13: /* SMPTE2084 */
3485 decParm->hdr.signal_type |= (16<<8);
3486 break;
3487 case 14: /* ARIB_STD_B67 */
3488 decParm->hdr.signal_type |= (18<<8);
3489 break;
3490 #if ((GST_VERSION_MAJOR == 1) && (GST_VERSION_MINOR >= 18))
3491 case 16: /* BT601 */
3492 decParm->hdr.signal_type |= (3<<8);
3493 break;
3494 #endif
3495 case 1: /* GAMMA10 */
3496 case 2: /* GAMMA18 */
3497 case 3: /* GAMMA20 */
3498 case 4: /* GAMMA22 */
3499 case 7: /* SRGB */
3500 case 8: /* GAMMA28 */
3501 case 15: /* ADOBERGB */
3502 default:
3503 break;
3504 }
3505 /* primaries */
3506 switch ( hdrColorimetry[3] )
3507 {
3508 case 1: /* BT709 */
3509 decParm->hdr.signal_type |= ((1<<24)|(1<<16));
3510 break;
3511 case 2: /* BT470M */
3512 decParm->hdr.signal_type |= ((1<<24)|(4<<16));
3513 break;
3514 case 3: /* BT470BG */
3515 decParm->hdr.signal_type |= ((1<<24)|(5<<16));
3516 break;
3517 case 4: /* SMPTE170M */
3518 decParm->hdr.signal_type |= ((1<<24)|(6<<16));
3519 break;
3520 case 5: /* SMPTE240M */
3521 decParm->hdr.signal_type |= ((1<<24)|(7<<16));
3522 break;
3523 case 6: /* FILM */
3524 decParm->hdr.signal_type |= ((1<<24)|(8<<16));
3525 break;
3526 case 7: /* BT2020 */
3527 decParm->hdr.signal_type |= ((1<<24)|(9<<16));
3528 break;
3529 case 8: /* ADOBERGB */
3530 default:
3531 break;
3532 }
3533 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "HDR signal_type %X", decParm->hdr.signal_type);
3534 }
3535
3536 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "got caps %" GST_PTR_FORMAT, caps);
3537 GstStructure *st = gst_caps_get_structure(caps, 0);
3538 GstCapsFeatures *features = gst_caps_get_features(caps, 0);
3539
3540 if (gst_structure_has_field(st, "colorimetry"))
3541 {
3542 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "have colorimetry");
3543 }
3544
3545 if (st && features)
3546 {
3547 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "trace in remove colorimetry");
3548 gst_structure_remove_field(st, "colorimetry");
3549 gst_caps_features_remove(features, "colorimetry");
3550 }
3551 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "caps after remove colorimetry %" GST_PTR_FORMAT, caps);
3552 }
3553
3554 if ( gst_structure_has_field(structure, "mastering-display-metadata") )
3555 {
3556 const char *masteringDisplay= gst_structure_get_string(structure,"mastering-display-metadata");
3557 float hdrMasteringDisplay[10];
3558 if ( masteringDisplay && sscanf( masteringDisplay, "%f:%f:%f:%f:%f:%f:%f:%f:%f:%f",
3559 &hdrMasteringDisplay[0],
3560 &hdrMasteringDisplay[1],
3561 &hdrMasteringDisplay[2],
3562 &hdrMasteringDisplay[3],
3563 &hdrMasteringDisplay[4],
3564 &hdrMasteringDisplay[5],
3565 &hdrMasteringDisplay[6],
3566 &hdrMasteringDisplay[7],
3567 &hdrMasteringDisplay[8],
3568 &hdrMasteringDisplay[9] ) == 10 )
3569 {
3570 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "mastering display [%f,%f,%f,%f,%f,%f,%f,%f,%f,%f]",
3571 hdrMasteringDisplay[0],
3572 hdrMasteringDisplay[1],
3573 hdrMasteringDisplay[2],
3574 hdrMasteringDisplay[3],
3575 hdrMasteringDisplay[4],
3576 hdrMasteringDisplay[5],
3577 hdrMasteringDisplay[6],
3578 hdrMasteringDisplay[7],
3579 hdrMasteringDisplay[8],
3580 hdrMasteringDisplay[9] );
3581
3582 decParm->hdr.color_parms.present_flag= 1;
3583 decParm->hdr.color_parms.primaries[2][0]= (uint32_t)(hdrMasteringDisplay[0]*50000); /* R.x */
3584 decParm->hdr.color_parms.primaries[2][1]= (uint32_t)(hdrMasteringDisplay[1]*50000); /* R.y */
3585 decParm->hdr.color_parms.primaries[0][0]= (uint32_t)(hdrMasteringDisplay[2]*50000); /* G.x */
3586 decParm->hdr.color_parms.primaries[0][1]= (uint32_t)(hdrMasteringDisplay[3]*50000); /* G.y */
3587 decParm->hdr.color_parms.primaries[1][0]= (uint32_t)(hdrMasteringDisplay[4]*50000); /* B.x */
3588 decParm->hdr.color_parms.primaries[1][1]= (uint32_t)(hdrMasteringDisplay[5]*50000); /* B.y */
3589 decParm->hdr.color_parms.white_point[0]= (uint32_t)(hdrMasteringDisplay[6]*50000);
3590 decParm->hdr.color_parms.white_point[1]= (uint32_t)(hdrMasteringDisplay[7]*50000);
3591 decParm->hdr.color_parms.luminance[0]= (uint32_t)(hdrMasteringDisplay[8]);
3592 decParm->hdr.color_parms.luminance[1]= (uint32_t)(hdrMasteringDisplay[9]);
3593 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "HDR mastering: primaries %X %X %X %X %X %X",
3594 decParm->hdr.color_parms.primaries[2][0],
3595 decParm->hdr.color_parms.primaries[2][1],
3596 decParm->hdr.color_parms.primaries[0][0],
3597 decParm->hdr.color_parms.primaries[0][1],
3598 decParm->hdr.color_parms.primaries[1][0],
3599 decParm->hdr.color_parms.primaries[1][1] );
3600 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "HDR mastering: white point: %X %X",
3601 decParm->hdr.color_parms.white_point[0],
3602 decParm->hdr.color_parms.white_point[1] );
3603 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "HDR mastering: luminance: %X %X",
3604 decParm->hdr.color_parms.luminance[0],
3605 decParm->hdr.color_parms.luminance[1] );
3606 }
3607
3608 GstStructure *st = gst_caps_get_structure(caps, 0);
3609 GstCapsFeatures * features = gst_caps_get_features(caps, 0);
3610 if (st && features)
3611 {
3612 gst_structure_remove_fields(st, "mastering-display-metadata", NULL);
3613 gst_caps_features_remove(features, "mastering-display-metadata");
3614 }
3615 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "caps after remove mastering-display-metadata %" GST_PTR_FORMAT, caps);
3616 }
xuesong.jiangae1548e2022-05-06 16:38:46 +08003617 }
3618}
3619
3620static gboolean
3621gst_aml_v4l2_object_set_format_full(GstAmlV4l2Object *v4l2object, GstCaps *caps,
3622 gboolean try_only, GstAmlV4l2Error *error)
3623{
3624 gint fd = v4l2object->video_fd;
3625 struct v4l2_format format;
3626 struct v4l2_streamparm streamparm;
3627 enum v4l2_field field;
3628 guint32 pixelformat;
3629 struct v4l2_fmtdesc *fmtdesc;
3630 GstVideoInfo info;
3631 GstVideoAlignment align;
3632 gint width, height, fps_n, fps_d;
3633 gint n_v4l_planes;
3634 gint i = 0;
3635 gboolean is_mplane;
3636 enum v4l2_colorspace colorspace = 0;
3637 enum v4l2_quantization range = 0;
3638 enum v4l2_ycbcr_encoding matrix = 0;
3639 enum v4l2_xfer_func transfer = 0;
3640 GstStructure *s;
3641 gboolean disable_colorimetry = FALSE;
3642
3643 g_return_val_if_fail(!v4l2object->skip_try_fmt_probes ||
3644 gst_caps_is_writable(caps),
3645 FALSE);
3646
3647 GST_AML_V4L2_CHECK_OPEN(v4l2object);
3648 if (!try_only)
3649 GST_AML_V4L2_CHECK_NOT_ACTIVE(v4l2object);
3650
xuesong.jiangae1548e2022-05-06 16:38:46 +08003651 is_mplane = V4L2_TYPE_IS_MULTIPLANAR(v4l2object->type);
3652
3653 gst_video_info_init(&info);
3654 gst_video_alignment_reset(&align);
3655
3656 if (!gst_aml_v4l2_object_get_caps_info(v4l2object, caps, &fmtdesc, &info))
3657 goto invalid_caps;
3658
3659 pixelformat = fmtdesc->pixelformat;
3660 width = GST_VIDEO_INFO_WIDTH(&info);
3661 height = GST_VIDEO_INFO_HEIGHT(&info);
3662 fps_n = GST_VIDEO_INFO_FPS_N(&info);
3663 fps_d = GST_VIDEO_INFO_FPS_D(&info);
3664
fei.deng7c3d67f2022-11-09 11:06:05 +08003665 //set amlogic params here,because we need pix format to set dw mode
3666 memset(&streamparm, 0x00, sizeof(struct v4l2_streamparm));
3667 streamparm.type = v4l2object->type;
3668 set_amlogic_vdec_parm(v4l2object, &streamparm, caps, pixelformat);
3669
xuesong.jiangae1548e2022-05-06 16:38:46 +08003670 /* if encoded format (GST_VIDEO_INFO_N_PLANES return 0)
3671 * or if contiguous is prefered */
3672 n_v4l_planes = GST_VIDEO_INFO_N_PLANES(&info);
3673 if (!n_v4l_planes || !v4l2object->prefered_non_contiguous)
3674 n_v4l_planes = 1;
3675
3676 if (GST_VIDEO_INFO_IS_INTERLACED(&info))
3677 {
3678 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "interlaced video");
3679 /* ideally we would differentiate between types of interlaced video
3680 * but there is not sufficient information in the caps..
3681 */
3682 field = V4L2_FIELD_INTERLACED;
3683 }
3684 else
3685 {
3686 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "progressive video");
3687 field = V4L2_FIELD_NONE;
3688 }
3689
3690 /* We first pick the main colorspace from the primaries */
3691 switch (info.colorimetry.primaries)
3692 {
3693 case GST_VIDEO_COLOR_PRIMARIES_BT709:
3694 /* There is two colorspaces using these primaries, use the range to
3695 * differentiate */
3696 if (info.colorimetry.range == GST_VIDEO_COLOR_RANGE_16_235)
3697 colorspace = V4L2_COLORSPACE_REC709;
3698 else
3699 colorspace = V4L2_COLORSPACE_SRGB;
3700 break;
3701 case GST_VIDEO_COLOR_PRIMARIES_BT2020:
3702 colorspace = V4L2_COLORSPACE_BT2020;
3703 break;
3704 case GST_VIDEO_COLOR_PRIMARIES_BT470M:
3705 colorspace = V4L2_COLORSPACE_470_SYSTEM_M;
3706 break;
3707 case GST_VIDEO_COLOR_PRIMARIES_BT470BG:
3708 colorspace = V4L2_COLORSPACE_470_SYSTEM_BG;
3709 break;
3710 case GST_VIDEO_COLOR_PRIMARIES_SMPTE170M:
3711 colorspace = V4L2_COLORSPACE_SMPTE170M;
3712 break;
3713 case GST_VIDEO_COLOR_PRIMARIES_SMPTE240M:
3714 colorspace = V4L2_COLORSPACE_SMPTE240M;
3715 break;
3716
3717 case GST_VIDEO_COLOR_PRIMARIES_FILM:
3718 case GST_VIDEO_COLOR_PRIMARIES_UNKNOWN:
3719 /* We don't know, we will guess */
3720 break;
3721
3722 default:
3723 GST_WARNING_OBJECT(v4l2object->dbg_obj,
3724 "Unknown colorimetry primaries %d", info.colorimetry.primaries);
3725 break;
3726 }
3727
3728 switch (info.colorimetry.range)
3729 {
3730 case GST_VIDEO_COLOR_RANGE_0_255:
3731 range = V4L2_QUANTIZATION_FULL_RANGE;
3732 break;
3733 case GST_VIDEO_COLOR_RANGE_16_235:
3734 range = V4L2_QUANTIZATION_LIM_RANGE;
3735 break;
3736 case GST_VIDEO_COLOR_RANGE_UNKNOWN:
3737 /* We let the driver pick a default one */
3738 break;
3739 default:
3740 GST_WARNING_OBJECT(v4l2object->dbg_obj,
3741 "Unknown colorimetry range %d", info.colorimetry.range);
3742 break;
3743 }
3744
3745 switch (info.colorimetry.matrix)
3746 {
3747 case GST_VIDEO_COLOR_MATRIX_RGB:
3748 /* Unspecified, leave to default */
3749 break;
3750 /* FCC is about the same as BT601 with less digit */
3751 case GST_VIDEO_COLOR_MATRIX_FCC:
3752 case GST_VIDEO_COLOR_MATRIX_BT601:
3753 matrix = V4L2_YCBCR_ENC_601;
3754 break;
3755 case GST_VIDEO_COLOR_MATRIX_BT709:
3756 matrix = V4L2_YCBCR_ENC_709;
3757 break;
3758 case GST_VIDEO_COLOR_MATRIX_SMPTE240M:
3759 matrix = V4L2_YCBCR_ENC_SMPTE240M;
3760 break;
3761 case GST_VIDEO_COLOR_MATRIX_BT2020:
3762 matrix = V4L2_YCBCR_ENC_BT2020;
3763 break;
3764 case GST_VIDEO_COLOR_MATRIX_UNKNOWN:
3765 /* We let the driver pick a default one */
3766 break;
3767 default:
3768 GST_WARNING_OBJECT(v4l2object->dbg_obj,
3769 "Unknown colorimetry matrix %d", info.colorimetry.matrix);
3770 break;
3771 }
3772
3773 switch (info.colorimetry.transfer)
3774 {
3775 case GST_VIDEO_TRANSFER_GAMMA18:
3776 case GST_VIDEO_TRANSFER_GAMMA20:
3777 case GST_VIDEO_TRANSFER_GAMMA22:
3778 case GST_VIDEO_TRANSFER_GAMMA28:
3779 GST_WARNING_OBJECT(v4l2object->dbg_obj,
3780 "GAMMA 18, 20, 22, 28 transfer functions not supported");
3781 /* fallthrough */
3782 case GST_VIDEO_TRANSFER_GAMMA10:
3783 transfer = V4L2_XFER_FUNC_NONE;
3784 break;
3785 case GST_VIDEO_TRANSFER_BT2020_12:
3786 case GST_VIDEO_TRANSFER_BT709:
3787 transfer = V4L2_XFER_FUNC_709;
3788 break;
3789 case GST_VIDEO_TRANSFER_SMPTE240M:
3790 transfer = V4L2_XFER_FUNC_SMPTE240M;
3791 break;
3792 case GST_VIDEO_TRANSFER_SRGB:
3793 transfer = V4L2_XFER_FUNC_SRGB;
3794 break;
3795 case GST_VIDEO_TRANSFER_LOG100:
3796 case GST_VIDEO_TRANSFER_LOG316:
3797 GST_WARNING_OBJECT(v4l2object->dbg_obj,
3798 "LOG 100, 316 transfer functions not supported");
3799 /* FIXME No known sensible default, maybe AdobeRGB ? */
3800 break;
3801 case GST_VIDEO_TRANSFER_UNKNOWN:
3802 /* We let the driver pick a default one */
3803 break;
3804 default:
3805 GST_WARNING_OBJECT(v4l2object->dbg_obj,
3806 "Unknown colorimetry tranfer %d", info.colorimetry.transfer);
3807 break;
3808 }
3809
3810 if (colorspace == 0)
3811 {
3812 /* Try to guess colorspace according to pixelformat and size */
3813 if (GST_VIDEO_INFO_IS_YUV(&info))
3814 {
3815 if (range == V4L2_QUANTIZATION_FULL_RANGE && matrix == V4L2_YCBCR_ENC_601 && transfer == 0)
3816 {
3817 /* Full range BT.601 YCbCr encoding with unknown primaries and transfer
3818 * function most likely is JPEG */
3819 colorspace = V4L2_COLORSPACE_JPEG;
3820 transfer = V4L2_XFER_FUNC_SRGB;
3821 }
3822 else
3823 {
3824 /* SD streams likely use SMPTE170M and HD streams REC709 */
3825 if (width <= 720 && height <= 576)
3826 colorspace = V4L2_COLORSPACE_SMPTE170M;
3827 else
3828 colorspace = V4L2_COLORSPACE_REC709;
3829 }
3830 }
3831 else if (GST_VIDEO_INFO_IS_RGB(&info))
3832 {
3833 colorspace = V4L2_COLORSPACE_SRGB;
3834 transfer = V4L2_XFER_FUNC_NONE;
3835 }
3836 }
3837
3838 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "Desired format %dx%d, format "
3839 "%" GST_FOURCC_FORMAT " stride: %d",
3840 width, height,
3841 GST_FOURCC_ARGS(pixelformat), GST_VIDEO_INFO_PLANE_STRIDE(&info, 0));
3842
3843 memset(&format, 0x00, sizeof(struct v4l2_format));
3844 format.type = v4l2object->type;
3845
3846 if (is_mplane)
3847 {
3848 format.type = v4l2object->type;
3849 format.fmt.pix_mp.pixelformat = pixelformat;
3850 format.fmt.pix_mp.width = width;
3851 format.fmt.pix_mp.height = height;
3852 format.fmt.pix_mp.field = field;
3853 format.fmt.pix_mp.num_planes = n_v4l_planes;
3854
3855 /* try to ask our prefered stride but it's not a failure if not
3856 * accepted */
3857 for (i = 0; i < n_v4l_planes; i++)
3858 {
3859 gint stride = GST_VIDEO_INFO_PLANE_STRIDE(&info, i);
3860
3861 if (GST_VIDEO_FORMAT_INFO_IS_TILED(info.finfo))
3862 stride = GST_VIDEO_TILE_X_TILES(stride) << GST_VIDEO_FORMAT_INFO_TILE_WS(info.finfo);
3863
3864 format.fmt.pix_mp.plane_fmt[i].bytesperline = stride;
3865 }
3866
3867 if (GST_VIDEO_INFO_FORMAT(&info) == GST_VIDEO_FORMAT_ENCODED)
3868 {
3869 if (v4l2object->req_mode == GST_V4L2_IO_DMABUF_IMPORT)
3870 format.fmt.pix_mp.plane_fmt[0].sizeimage = 1;
3871 else
3872 format.fmt.pix_mp.plane_fmt[0].sizeimage = ENCODED_BUFFER_SIZE;
3873 }
3874 }
3875 else
3876 {
3877 gint stride = GST_VIDEO_INFO_PLANE_STRIDE(&info, 0);
3878
3879 format.type = v4l2object->type;
3880
3881 format.fmt.pix.width = width;
3882 format.fmt.pix.height = height;
3883 format.fmt.pix.pixelformat = pixelformat;
3884 format.fmt.pix.field = field;
3885
3886 if (GST_VIDEO_FORMAT_INFO_IS_TILED(info.finfo))
3887 stride = GST_VIDEO_TILE_X_TILES(stride) << GST_VIDEO_FORMAT_INFO_TILE_WS(info.finfo);
3888
3889 /* try to ask our prefered stride */
3890 format.fmt.pix.bytesperline = stride;
3891
3892 if (GST_VIDEO_INFO_FORMAT(&info) == GST_VIDEO_FORMAT_ENCODED)
3893 {
3894 if (v4l2object->req_mode == GST_V4L2_IO_DMABUF_IMPORT)
3895 format.fmt.pix_mp.plane_fmt[0].sizeimage = 1;
3896 else
3897 format.fmt.pix_mp.plane_fmt[0].sizeimage = ENCODED_BUFFER_SIZE;
3898 }
3899 }
3900
3901 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "Desired format is %dx%d, format "
3902 "%" GST_FOURCC_FORMAT ", nb planes %d",
3903 format.fmt.pix.width,
3904 format.fmt.pix_mp.height,
3905 GST_FOURCC_ARGS(format.fmt.pix.pixelformat),
3906 is_mplane ? format.fmt.pix_mp.num_planes : 1);
3907
3908#ifndef GST_DISABLE_GST_DEBUG
3909 if (is_mplane)
3910 {
3911 for (i = 0; i < format.fmt.pix_mp.num_planes; i++)
3912 GST_DEBUG_OBJECT(v4l2object->dbg_obj, " stride %d",
3913 format.fmt.pix_mp.plane_fmt[i].bytesperline);
3914 }
3915 else
3916 {
3917 GST_DEBUG_OBJECT(v4l2object->dbg_obj, " stride %d",
3918 format.fmt.pix.bytesperline);
3919 }
3920#endif
3921
3922 if (is_mplane)
3923 {
3924 format.fmt.pix_mp.colorspace = colorspace;
3925 format.fmt.pix_mp.quantization = range;
3926 format.fmt.pix_mp.ycbcr_enc = matrix;
3927 format.fmt.pix_mp.xfer_func = transfer;
3928 }
3929 else
3930 {
3931 format.fmt.pix.priv = V4L2_PIX_FMT_PRIV_MAGIC;
3932 format.fmt.pix.colorspace = colorspace;
3933 format.fmt.pix.quantization = range;
3934 format.fmt.pix.ycbcr_enc = matrix;
3935 format.fmt.pix.xfer_func = transfer;
3936 }
3937
3938 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "Desired colorspace is %d:%d:%d:%d",
3939 colorspace, range, matrix, transfer);
3940
3941 if (try_only)
3942 {
3943 if (v4l2object->ioctl(fd, VIDIOC_TRY_FMT, &format) < 0)
3944 goto try_fmt_failed;
3945 }
3946 else
3947 {
3948 if (v4l2object->ioctl(fd, VIDIOC_S_FMT, &format) < 0)
3949 goto set_fmt_failed;
3950 }
3951
3952 if (is_mplane)
3953 {
3954 colorspace = format.fmt.pix_mp.colorspace;
3955 range = format.fmt.pix_mp.quantization;
3956 matrix = format.fmt.pix_mp.ycbcr_enc;
3957 transfer = format.fmt.pix_mp.xfer_func;
3958 }
3959 else
3960 {
3961 colorspace = format.fmt.pix.colorspace;
3962 range = format.fmt.pix.quantization;
3963 matrix = format.fmt.pix.ycbcr_enc;
3964 transfer = format.fmt.pix.xfer_func;
3965 }
3966
3967 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "Got format of %dx%d, format "
3968 "%" GST_FOURCC_FORMAT ", nb planes %d, colorspace %d:%d:%d:%d",
3969 format.fmt.pix.width, format.fmt.pix_mp.height,
3970 GST_FOURCC_ARGS(format.fmt.pix.pixelformat),
3971 is_mplane ? format.fmt.pix_mp.num_planes : 1,
3972 colorspace, range, matrix, transfer);
3973
3974#ifndef GST_DISABLE_GST_DEBUG
3975 if (is_mplane)
3976 {
3977 for (i = 0; i < format.fmt.pix_mp.num_planes; i++)
3978 GST_DEBUG_OBJECT(v4l2object->dbg_obj, " stride %d, sizeimage %d",
3979 format.fmt.pix_mp.plane_fmt[i].bytesperline,
3980 format.fmt.pix_mp.plane_fmt[i].sizeimage);
3981 }
3982 else
3983 {
3984 GST_DEBUG_OBJECT(v4l2object->dbg_obj, " stride %d, sizeimage %d",
3985 format.fmt.pix.bytesperline, format.fmt.pix.sizeimage);
3986 }
3987#endif
3988
3989 if (format.fmt.pix.pixelformat != pixelformat)
3990 goto invalid_pixelformat;
3991
3992 /* Only negotiate size with raw data.
3993 * For some codecs the dimensions are *not* in the bitstream, IIRC VC1
3994 * in ASF mode for example, there is also not reason for a driver to
3995 * change the size. */
3996 if (info.finfo->format != GST_VIDEO_FORMAT_ENCODED)
3997 {
3998 /* We can crop larger images */
3999 if (format.fmt.pix.width < width || format.fmt.pix.height < height)
4000 goto invalid_dimensions;
4001
4002 /* Note, this will be adjusted if upstream has non-centered cropping. */
4003 align.padding_top = 0;
4004 align.padding_bottom = format.fmt.pix.height - height;
4005 align.padding_left = 0;
4006 align.padding_right = format.fmt.pix.width - width;
4007 }
4008
4009 if (is_mplane && format.fmt.pix_mp.num_planes != n_v4l_planes)
4010 goto invalid_planes;
4011
4012 /* used to check colorimetry and interlace mode fields presence */
4013 s = gst_caps_get_structure(caps, 0);
4014
4015 if (!gst_aml_v4l2_object_get_interlace_mode(format.fmt.pix.field,
4016 &info.interlace_mode))
4017 goto invalid_field;
4018 if (gst_structure_has_field(s, "interlace-mode"))
4019 {
4020 if (format.fmt.pix.field != field)
4021 goto invalid_field;
4022 }
4023
4024 if (gst_aml_v4l2_object_get_colorspace(&format, &info.colorimetry))
4025 {
4026 if (gst_structure_has_field(s, "colorimetry"))
4027 {
xuesong.jiange1a19662022-06-21 20:30:22 +08004028 if (!gst_aml_v4l2_video_colorimetry_matches(&info.colorimetry, gst_structure_get_string(s, "colorimetry")))
4029 {
4030 // goto invalid_colorimetry;
4031 }
xuesong.jiangae1548e2022-05-06 16:38:46 +08004032 }
4033 }
4034 else
4035 {
4036 /* The driver (or libv4l2) is miss-behaving, just ignore colorimetry from
4037 * the TRY_FMT */
4038 disable_colorimetry = TRUE;
4039 if (gst_structure_has_field(s, "colorimetry"))
4040 gst_structure_remove_field(s, "colorimetry");
4041 }
4042
4043 /* In case we have skipped the try_fmt probes, we'll need to set the
4044 * colorimetry and interlace-mode back into the caps. */
4045 if (v4l2object->skip_try_fmt_probes)
4046 {
4047 if (!disable_colorimetry && !gst_structure_has_field(s, "colorimetry"))
4048 {
4049 gchar *str = gst_video_colorimetry_to_string(&info.colorimetry);
4050 gst_structure_set(s, "colorimetry", G_TYPE_STRING, str, NULL);
4051 g_free(str);
4052 }
4053
4054 if (!gst_structure_has_field(s, "interlace-mode"))
4055 gst_structure_set(s, "interlace-mode", G_TYPE_STRING,
4056 gst_video_interlace_mode_to_string(info.interlace_mode), NULL);
4057 }
4058
4059 if (try_only) /* good enough for trying only */
4060 return TRUE;
4061
4062 if (GST_VIDEO_INFO_HAS_ALPHA(&info))
4063 {
4064 struct v4l2_control ctl = {
4065 0,
4066 };
4067 ctl.id = V4L2_CID_ALPHA_COMPONENT;
4068 ctl.value = 0xff;
4069
4070 if (v4l2object->ioctl(fd, VIDIOC_S_CTRL, &ctl) < 0)
4071 GST_WARNING_OBJECT(v4l2object->dbg_obj,
4072 "Failed to set alpha component value");
4073 }
4074
4075 /* Is there a reason we require the caller to always specify a framerate? */
4076 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "Desired framerate: %u/%u", fps_n,
4077 fps_d);
4078
4079 if (v4l2object->ioctl(fd, VIDIOC_G_PARM, &streamparm) < 0)
4080 goto get_parm_failed;
4081
4082 if (v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE || v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE)
4083 {
4084 GST_VIDEO_INFO_FPS_N(&info) =
4085 streamparm.parm.capture.timeperframe.denominator;
4086 GST_VIDEO_INFO_FPS_D(&info) =
4087 streamparm.parm.capture.timeperframe.numerator;
4088
4089 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "Got capture framerate: %u/%u",
4090 streamparm.parm.capture.timeperframe.denominator,
4091 streamparm.parm.capture.timeperframe.numerator);
4092
4093 /* We used to skip frame rate setup if the camera was already setup
4094 * with the requested frame rate. This breaks some cameras though,
4095 * causing them to not output data (several models of Thinkpad cameras
4096 * have this problem at least).
4097 * So, don't skip. */
4098 GST_LOG_OBJECT(v4l2object->dbg_obj, "Setting capture framerate to %u/%u",
4099 fps_n, fps_d);
4100 /* We want to change the frame rate, so check whether we can. Some cheap USB
4101 * cameras don't have the capability */
4102 if ((streamparm.parm.capture.capability & V4L2_CAP_TIMEPERFRAME) == 0)
4103 {
4104 GST_DEBUG_OBJECT(v4l2object->dbg_obj,
4105 "Not setting capture framerate (not supported)");
4106 goto done;
4107 }
4108
4109 /* Note: V4L2 wants the frame interval, we have the frame rate */
4110 streamparm.parm.capture.timeperframe.numerator = fps_d;
4111 streamparm.parm.capture.timeperframe.denominator = fps_n;
4112
4113 /* some cheap USB cam's won't accept any change */
4114 if (v4l2object->ioctl(fd, VIDIOC_S_PARM, &streamparm) < 0)
4115 goto set_parm_failed;
4116
4117 if (streamparm.parm.capture.timeperframe.numerator > 0 &&
4118 streamparm.parm.capture.timeperframe.denominator > 0)
4119 {
4120 /* get new values */
4121 fps_d = streamparm.parm.capture.timeperframe.numerator;
4122 fps_n = streamparm.parm.capture.timeperframe.denominator;
4123
4124 GST_INFO_OBJECT(v4l2object->dbg_obj, "Set capture framerate to %u/%u",
4125 fps_n, fps_d);
4126 }
4127 else
4128 {
4129 /* fix v4l2 capture driver to provide framerate values */
4130 GST_WARNING_OBJECT(v4l2object->dbg_obj,
4131 "Reuse caps framerate %u/%u - fix v4l2 capture driver", fps_n, fps_d);
4132 }
4133
4134 GST_VIDEO_INFO_FPS_N(&info) = fps_n;
4135 GST_VIDEO_INFO_FPS_D(&info) = fps_d;
4136 }
4137 else if (v4l2object->type == V4L2_BUF_TYPE_VIDEO_OUTPUT || v4l2object->type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE)
4138 {
4139 GST_VIDEO_INFO_FPS_N(&info) =
4140 streamparm.parm.output.timeperframe.denominator;
4141 GST_VIDEO_INFO_FPS_D(&info) =
4142 streamparm.parm.output.timeperframe.numerator;
4143
4144 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "Got output framerate: %u/%u",
4145 streamparm.parm.output.timeperframe.denominator,
4146 streamparm.parm.output.timeperframe.numerator);
4147
4148 GST_LOG_OBJECT(v4l2object->dbg_obj, "Setting output framerate to %u/%u",
4149 fps_n, fps_d);
4150 if ((streamparm.parm.output.capability & V4L2_CAP_TIMEPERFRAME) == 0)
4151 {
4152 GST_DEBUG_OBJECT(v4l2object->dbg_obj,
4153 "Not setting output framerate (not supported)");
4154 goto done;
4155 }
4156
4157 /* Note: V4L2 wants the frame interval, we have the frame rate */
4158 streamparm.parm.output.timeperframe.numerator = fps_d;
4159 streamparm.parm.output.timeperframe.denominator = fps_n;
4160
4161 if (v4l2object->ioctl(fd, VIDIOC_S_PARM, &streamparm) < 0)
4162 goto set_parm_failed;
4163
4164 if (streamparm.parm.output.timeperframe.numerator > 0 &&
4165 streamparm.parm.output.timeperframe.denominator > 0)
4166 {
4167 /* get new values */
4168 fps_d = streamparm.parm.output.timeperframe.numerator;
4169 fps_n = streamparm.parm.output.timeperframe.denominator;
4170
4171 GST_INFO_OBJECT(v4l2object->dbg_obj, "Set output framerate to %u/%u",
4172 fps_n, fps_d);
4173 }
4174 else
4175 {
4176 /* fix v4l2 output driver to provide framerate values */
4177 GST_WARNING_OBJECT(v4l2object->dbg_obj,
4178 "Reuse caps framerate %u/%u - fix v4l2 output driver", fps_n, fps_d);
4179 }
4180
4181 GST_VIDEO_INFO_FPS_N(&info) = fps_n;
4182 GST_VIDEO_INFO_FPS_D(&info) = fps_d;
4183 }
4184
4185done:
4186 /* add boolean return, so we can fail on drivers bugs */
4187 gst_aml_v4l2_object_save_format(v4l2object, fmtdesc, &format, &info, &align);
4188
4189 /* now configure the pool */
4190 if (!gst_aml_v4l2_object_setup_pool(v4l2object, caps))
4191 goto pool_failed;
4192
4193 return TRUE;
4194
4195 /* ERRORS */
4196invalid_caps:
4197{
4198 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "can't parse caps %" GST_PTR_FORMAT,
4199 caps);
4200
4201 GST_AML_V4L2_ERROR(error, RESOURCE, SETTINGS,
4202 (_("Invalid caps")), ("Can't parse caps %" GST_PTR_FORMAT, caps));
4203 return FALSE;
4204}
4205try_fmt_failed:
4206{
4207 if (errno == EINVAL)
4208 {
4209 GST_AML_V4L2_ERROR(error, RESOURCE, SETTINGS,
4210 (_("Device '%s' has no supported format"), v4l2object->videodev),
4211 ("Call to TRY_FMT failed for %" GST_FOURCC_FORMAT " @ %dx%d: %s",
4212 GST_FOURCC_ARGS(pixelformat), width, height,
4213 g_strerror(errno)));
4214 }
4215 else
4216 {
4217 GST_AML_V4L2_ERROR(error, RESOURCE, FAILED,
4218 (_("Device '%s' failed during initialization"),
4219 v4l2object->videodev),
4220 ("Call to TRY_FMT failed for %" GST_FOURCC_FORMAT " @ %dx%d: %s",
4221 GST_FOURCC_ARGS(pixelformat), width, height,
4222 g_strerror(errno)));
4223 }
4224 return FALSE;
4225}
4226set_fmt_failed:
4227{
4228 if (errno == EBUSY)
4229 {
4230 GST_AML_V4L2_ERROR(error, RESOURCE, BUSY,
4231 (_("Device '%s' is busy"), v4l2object->videodev),
4232 ("Call to S_FMT failed for %" GST_FOURCC_FORMAT " @ %dx%d: %s",
4233 GST_FOURCC_ARGS(pixelformat), width, height,
4234 g_strerror(errno)));
4235 }
4236 else if (errno == EINVAL)
4237 {
4238 GST_AML_V4L2_ERROR(error, RESOURCE, SETTINGS,
4239 (_("Device '%s' has no supported format"), v4l2object->videodev),
4240 ("Call to S_FMT failed for %" GST_FOURCC_FORMAT " @ %dx%d: %s",
4241 GST_FOURCC_ARGS(pixelformat), width, height,
4242 g_strerror(errno)));
4243 }
4244 else
4245 {
4246 GST_AML_V4L2_ERROR(error, RESOURCE, FAILED,
4247 (_("Device '%s' failed during initialization"),
4248 v4l2object->videodev),
4249 ("Call to S_FMT failed for %" GST_FOURCC_FORMAT " @ %dx%d: %s",
4250 GST_FOURCC_ARGS(pixelformat), width, height,
4251 g_strerror(errno)));
4252 }
4253 return FALSE;
4254}
4255invalid_dimensions:
4256{
4257 GST_AML_V4L2_ERROR(error, RESOURCE, SETTINGS,
4258 (_("Device '%s' cannot capture at %dx%d"),
4259 v4l2object->videodev, width, height),
4260 ("Tried to capture at %dx%d, but device returned size %dx%d",
4261 width, height, format.fmt.pix.width, format.fmt.pix.height));
4262 return FALSE;
4263}
4264invalid_pixelformat:
4265{
4266 GST_AML_V4L2_ERROR(error, RESOURCE, SETTINGS,
4267 (_("Device '%s' cannot capture in the specified format"),
4268 v4l2object->videodev),
4269 ("Tried to capture in %" GST_FOURCC_FORMAT
4270 ", but device returned format"
4271 " %" GST_FOURCC_FORMAT,
4272 GST_FOURCC_ARGS(pixelformat),
4273 GST_FOURCC_ARGS(format.fmt.pix.pixelformat)));
4274 return FALSE;
4275}
4276invalid_planes:
4277{
4278 GST_AML_V4L2_ERROR(error, RESOURCE, SETTINGS,
4279 (_("Device '%s' does support non-contiguous planes"),
4280 v4l2object->videodev),
4281 ("Device wants %d planes", format.fmt.pix_mp.num_planes));
4282 return FALSE;
4283}
4284invalid_field:
4285{
4286 enum v4l2_field wanted_field;
4287
4288 if (is_mplane)
4289 wanted_field = format.fmt.pix_mp.field;
4290 else
4291 wanted_field = format.fmt.pix.field;
4292
4293 GST_AML_V4L2_ERROR(error, RESOURCE, SETTINGS,
4294 (_("Device '%s' does not support %s interlacing"),
4295 v4l2object->videodev,
4296 field == V4L2_FIELD_NONE ? "progressive" : "interleaved"),
4297 ("Device wants %s interlacing",
4298 wanted_field == V4L2_FIELD_NONE ? "progressive" : "interleaved"));
4299 return FALSE;
4300}
hanghang.luo3128f102022-08-18 10:36:19 +08004301#ifdef DELETE_FOR_LGE
xuesong.jiangae1548e2022-05-06 16:38:46 +08004302invalid_colorimetry:
4303{
4304 gchar *wanted_colorimetry;
4305
4306 wanted_colorimetry = gst_video_colorimetry_to_string(&info.colorimetry);
4307
4308 GST_AML_V4L2_ERROR(error, RESOURCE, SETTINGS,
4309 (_("Device '%s' does not support %s colorimetry"),
4310 v4l2object->videodev, gst_structure_get_string(s, "colorimetry")),
4311 ("Device wants %s colorimetry", wanted_colorimetry));
4312
4313 g_free(wanted_colorimetry);
4314 return FALSE;
4315}
hanghang.luo3128f102022-08-18 10:36:19 +08004316#endif
xuesong.jiangae1548e2022-05-06 16:38:46 +08004317get_parm_failed:
4318{
4319 /* it's possible that this call is not supported */
4320 if (errno != EINVAL && errno != ENOTTY)
4321 {
4322 GST_AML_V4L2_ERROR(error, RESOURCE, SETTINGS,
4323 (_("Could not get parameters on device '%s'"),
4324 v4l2object->videodev),
4325 GST_ERROR_SYSTEM);
4326 }
4327 goto done;
4328}
4329set_parm_failed:
4330{
4331 GST_AML_V4L2_ERROR(error, RESOURCE, SETTINGS,
4332 (_("Video device did not accept new frame rate setting.")),
4333 GST_ERROR_SYSTEM);
4334 goto done;
4335}
4336pool_failed:
4337{
4338 /* setup_pool already send the error */
4339 return FALSE;
4340}
4341}
4342
4343gboolean
4344gst_aml_v4l2_object_set_format(GstAmlV4l2Object *v4l2object, GstCaps *caps,
4345 GstAmlV4l2Error *error)
4346{
4347 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "Setting format to %" GST_PTR_FORMAT,
4348 caps);
4349 return gst_aml_v4l2_object_set_format_full(v4l2object, caps, FALSE, error);
4350}
4351
4352gboolean
4353gst_aml_v4l2_object_try_format(GstAmlV4l2Object *v4l2object, GstCaps *caps,
4354 GstAmlV4l2Error *error)
4355{
4356 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "Trying format %" GST_PTR_FORMAT,
4357 caps);
4358 return gst_aml_v4l2_object_set_format_full(v4l2object, caps, TRUE, error);
4359}
4360
4361GstFlowReturn
4362gst_aml_v4l2_object_poll(GstAmlV4l2Object *v4l2object)
4363{
4364 gint ret;
4365
4366 if (!v4l2object->can_poll_device)
4367 goto done;
4368
4369 GST_LOG_OBJECT(v4l2object, "polling device");
4370
4371again:
4372 ret = gst_poll_wait(v4l2object->poll, GST_CLOCK_TIME_NONE);
4373 if (G_UNLIKELY(ret < 0))
4374 {
4375 switch (errno)
4376 {
4377 case EBUSY:
4378 goto stopped;
4379 case EAGAIN:
4380 case EINTR:
4381 goto again;
4382 case ENXIO:
4383 GST_WARNING_OBJECT(v4l2object,
4384 "v4l2 device doesn't support polling. Disabling"
4385 " using libv4l2 in this case may cause deadlocks");
4386 v4l2object->can_poll_device = FALSE;
4387 goto done;
4388 default:
4389 goto select_error;
4390 }
4391 }
4392
4393done:
4394 return GST_FLOW_OK;
4395
4396 /* ERRORS */
4397stopped:
4398{
4399 GST_DEBUG_OBJECT(v4l2object, "stop called");
4400 return GST_FLOW_FLUSHING;
4401}
4402select_error:
4403{
4404 GST_ELEMENT_ERROR(v4l2object->element, RESOURCE, READ, (NULL),
4405 ("poll error %d: %s (%d)", ret, g_strerror(errno), errno));
4406 return GST_FLOW_ERROR;
4407}
4408}
4409
4410GstFlowReturn
4411gst_aml_v4l2_object_dqevent(GstAmlV4l2Object *v4l2object)
4412{
4413 GstFlowReturn res;
4414 struct v4l2_event evt;
4415
4416 if ((res = gst_aml_v4l2_object_poll(v4l2object)) != GST_FLOW_OK)
4417 goto poll_failed;
4418
4419 memset(&evt, 0x00, sizeof(struct v4l2_event));
4420 if (v4l2object->ioctl(v4l2object->video_fd, VIDIOC_DQEVENT, &evt) < 0)
4421 goto dqevent_failed;
4422
4423 switch (evt.type)
4424 {
4425 case V4L2_EVENT_SOURCE_CHANGE:
4426 return GST_AML_V4L2_FLOW_SOURCE_CHANGE;
4427 break;
4428 case V4L2_EVENT_EOS:
4429 return GST_AML_V4L2_FLOW_LAST_BUFFER;
4430 break;
4431 default:
4432 break;
4433 }
4434
4435 return GST_FLOW_OK;
4436
4437 /* ERRORS */
4438poll_failed:
4439{
4440 GST_DEBUG_OBJECT(v4l2object, "poll error %s", gst_flow_get_name(res));
4441 return res;
4442}
4443dqevent_failed:
4444{
4445 return GST_FLOW_ERROR;
4446}
4447}
4448
4449/**
4450 * gst_aml_v4l2_object_acquire_format:
4451 * @v4l2object the object
4452 * @info a GstVideoInfo to be filled
4453 *
4454 * Acquire the driver choosen format. This is useful in decoder or encoder elements where
4455 * the output format is choosen by the HW.
4456 *
4457 * Returns: %TRUE on success, %FALSE on failure.
4458 */
4459gboolean
4460gst_aml_v4l2_object_acquire_format(GstAmlV4l2Object *v4l2object, GstVideoInfo *info)
4461{
4462 struct v4l2_fmtdesc *fmtdesc;
4463 struct v4l2_format fmt;
4464 struct v4l2_crop crop;
4465 struct v4l2_selection sel;
4466 struct v4l2_rect *r = NULL;
4467 GstVideoFormat format;
4468 guint width, height;
4469 GstVideoAlignment align;
4470
4471 gst_video_info_init(info);
4472 gst_video_alignment_reset(&align);
4473
4474 memset(&fmt, 0x00, sizeof(struct v4l2_format));
4475 fmt.type = v4l2object->type;
4476 if (v4l2object->ioctl(v4l2object->video_fd, VIDIOC_G_FMT, &fmt) < 0)
4477 goto get_fmt_failed;
4478
4479 fmtdesc = gst_aml_v4l2_object_get_format_from_fourcc(v4l2object,
4480 fmt.fmt.pix.pixelformat);
4481 if (fmtdesc == NULL)
4482 goto unsupported_format;
4483
4484 /* No need to care about mplane, the four first params are the same */
4485 format = gst_aml_v4l2_object_v4l2fourcc_to_video_format(fmt.fmt.pix.pixelformat);
4486
4487 /* fails if we do no translate the fmt.pix.pixelformat to GstVideoFormat */
4488 if (format == GST_VIDEO_FORMAT_UNKNOWN)
4489 goto unsupported_format;
4490
4491 if (fmt.fmt.pix.width == 0 || fmt.fmt.pix.height == 0)
4492 goto invalid_dimensions;
4493
4494 width = fmt.fmt.pix.width;
4495 height = fmt.fmt.pix.height;
4496
4497 /* Use the default compose rectangle */
4498 memset(&sel, 0, sizeof(struct v4l2_selection));
4499 sel.type = v4l2object->type;
4500 sel.target = V4L2_SEL_TGT_COMPOSE_DEFAULT;
4501 if (v4l2object->ioctl(v4l2object->video_fd, VIDIOC_G_SELECTION, &sel) >= 0)
4502 {
4503 r = &sel.r;
4504 }
4505 else
4506 {
4507 /* For ancient kernels, fall back to G_CROP */
4508 memset(&crop, 0, sizeof(struct v4l2_crop));
4509 crop.type = v4l2object->type;
4510 if (v4l2object->ioctl(v4l2object->video_fd, VIDIOC_G_CROP, &crop) >= 0)
4511 r = &crop.c;
4512 }
4513 if (r)
4514 {
4515 align.padding_left = r->left;
4516 align.padding_top = r->top;
4517 align.padding_right = width - r->width - r->left;
4518 align.padding_bottom = height - r->height - r->top;
4519 width = r->width;
4520 height = r->height;
4521 }
4522
4523 gst_video_info_set_format(info, format, width, height);
4524
4525 switch (fmt.fmt.pix.field)
4526 {
4527 case V4L2_FIELD_ANY:
4528 case V4L2_FIELD_NONE:
4529 info->interlace_mode = GST_VIDEO_INTERLACE_MODE_PROGRESSIVE;
4530 break;
4531 case V4L2_FIELD_INTERLACED:
4532 case V4L2_FIELD_INTERLACED_TB:
4533 case V4L2_FIELD_INTERLACED_BT:
4534 info->interlace_mode = GST_VIDEO_INTERLACE_MODE_INTERLEAVED;
4535 break;
4536 default:
4537 goto unsupported_field;
4538 }
4539
4540 gst_aml_v4l2_object_get_colorspace(&fmt, &info->colorimetry);
4541
4542 gst_aml_v4l2_object_save_format(v4l2object, fmtdesc, &fmt, info, &align);
4543
4544 /* Shall we setup the pool ? */
4545
4546 return TRUE;
4547
4548get_fmt_failed:
4549{
4550 GST_ELEMENT_WARNING(v4l2object->element, RESOURCE, SETTINGS,
4551 (_("Video device did not provide output format.")), GST_ERROR_SYSTEM);
4552 return FALSE;
4553}
4554invalid_dimensions:
4555{
4556 GST_ELEMENT_WARNING(v4l2object->element, RESOURCE, SETTINGS,
4557 (_("Video device returned invalid dimensions.")),
4558 ("Expected non 0 dimensions, got %dx%d", fmt.fmt.pix.width,
4559 fmt.fmt.pix.height));
4560 return FALSE;
4561}
4562unsupported_field:
4563{
4564 GST_ELEMENT_ERROR(v4l2object->element, RESOURCE, SETTINGS,
4565 (_("Video device uses an unsupported interlacing method.")),
4566 ("V4L2 field type %d not supported", fmt.fmt.pix.field));
4567 return FALSE;
4568}
4569unsupported_format:
4570{
4571 GST_ELEMENT_ERROR(v4l2object->element, RESOURCE, SETTINGS,
4572 (_("Video device uses an unsupported pixel format.")),
4573 ("V4L2 format %" GST_FOURCC_FORMAT " not supported",
4574 GST_FOURCC_ARGS(fmt.fmt.pix.pixelformat)));
4575 return FALSE;
4576}
4577}
4578
4579gboolean
4580gst_aml_v4l2_object_set_crop(GstAmlV4l2Object *obj)
4581{
4582 struct v4l2_selection sel = {0};
4583 struct v4l2_crop crop = {0};
4584
4585 sel.type = obj->type;
4586 sel.target = V4L2_SEL_TGT_CROP;
4587 sel.flags = 0;
4588 sel.r.left = obj->align.padding_left;
4589 sel.r.top = obj->align.padding_top;
4590 sel.r.width = obj->info.width;
4591 sel.r.height = obj->info.height;
4592
4593 crop.type = obj->type;
4594 crop.c = sel.r;
4595
4596 if (obj->align.padding_left + obj->align.padding_top +
4597 obj->align.padding_right + obj->align.padding_bottom ==
4598 0)
4599 {
4600 GST_DEBUG_OBJECT(obj->dbg_obj, "no cropping needed");
4601 return TRUE;
4602 }
4603
4604 GST_DEBUG_OBJECT(obj->dbg_obj,
4605 "Desired cropping left %u, top %u, size %ux%u", crop.c.left, crop.c.top,
4606 crop.c.width, crop.c.height);
4607
4608 if (obj->ioctl(obj->video_fd, VIDIOC_S_SELECTION, &sel) < 0)
4609 {
4610 if (errno != ENOTTY)
4611 {
4612 GST_WARNING_OBJECT(obj->dbg_obj,
4613 "Failed to set crop rectangle with VIDIOC_S_SELECTION: %s",
4614 g_strerror(errno));
4615 return FALSE;
4616 }
4617 else
4618 {
4619 if (obj->ioctl(obj->video_fd, VIDIOC_S_CROP, &crop) < 0)
4620 {
4621 GST_WARNING_OBJECT(obj->dbg_obj, "VIDIOC_S_CROP failed");
4622 return FALSE;
4623 }
4624
4625 if (obj->ioctl(obj->video_fd, VIDIOC_G_CROP, &crop) < 0)
4626 {
4627 GST_WARNING_OBJECT(obj->dbg_obj, "VIDIOC_G_CROP failed");
4628 return FALSE;
4629 }
4630
4631 sel.r = crop.c;
4632 }
4633 }
4634
4635 GST_DEBUG_OBJECT(obj->dbg_obj,
4636 "Got cropping left %u, top %u, size %ux%u", crop.c.left, crop.c.top,
4637 crop.c.width, crop.c.height);
4638
4639 return TRUE;
4640}
4641
4642gboolean
4643gst_aml_v4l2_object_caps_equal(GstAmlV4l2Object *v4l2object, GstCaps *caps)
4644{
4645 GstStructure *config;
4646 GstCaps *oldcaps;
4647 gboolean ret;
4648
4649 if (!v4l2object->pool)
4650 return FALSE;
4651
4652 config = gst_buffer_pool_get_config(v4l2object->pool);
4653 gst_buffer_pool_config_get_params(config, &oldcaps, NULL, NULL, NULL);
4654
4655 ret = oldcaps && gst_caps_is_equal(caps, oldcaps);
4656
4657 gst_structure_free(config);
4658
4659 return ret;
4660}
4661
4662gboolean
4663gst_aml_v4l2_object_caps_is_subset(GstAmlV4l2Object *v4l2object, GstCaps *caps)
4664{
4665 GstStructure *config;
4666 GstCaps *oldcaps;
4667 gboolean ret;
4668
4669 if (!v4l2object->pool)
4670 return FALSE;
4671
4672 config = gst_buffer_pool_get_config(v4l2object->pool);
4673 gst_buffer_pool_config_get_params(config, &oldcaps, NULL, NULL, NULL);
4674
4675 ret = oldcaps && gst_caps_is_subset(oldcaps, caps);
4676
4677 gst_structure_free(config);
4678
4679 return ret;
4680}
4681
4682GstCaps *
4683gst_aml_v4l2_object_get_current_caps(GstAmlV4l2Object *v4l2object)
4684{
4685 GstStructure *config;
4686 GstCaps *oldcaps;
4687
4688 if (!v4l2object->pool)
4689 return NULL;
4690
4691 config = gst_buffer_pool_get_config(v4l2object->pool);
4692 gst_buffer_pool_config_get_params(config, &oldcaps, NULL, NULL, NULL);
4693
4694 if (oldcaps)
4695 gst_caps_ref(oldcaps);
4696
4697 gst_structure_free(config);
4698
4699 return oldcaps;
4700}
4701
4702gboolean
4703gst_aml_v4l2_object_unlock(GstAmlV4l2Object *v4l2object)
4704{
4705 gboolean ret = TRUE;
4706
4707 GST_LOG_OBJECT(v4l2object->dbg_obj, "start flushing");
4708
4709 gst_poll_set_flushing(v4l2object->poll, TRUE);
4710
4711 if (v4l2object->pool && gst_buffer_pool_is_active(v4l2object->pool))
4712 gst_buffer_pool_set_flushing(v4l2object->pool, TRUE);
4713
4714 return ret;
4715}
4716
4717gboolean
4718gst_aml_v4l2_object_unlock_stop(GstAmlV4l2Object *v4l2object)
4719{
4720 gboolean ret = TRUE;
4721
4722 GST_LOG_OBJECT(v4l2object->dbg_obj, "stop flushing");
4723
4724 if (v4l2object->pool && gst_buffer_pool_is_active(v4l2object->pool))
4725 gst_buffer_pool_set_flushing(v4l2object->pool, FALSE);
4726
4727 gst_poll_set_flushing(v4l2object->poll, FALSE);
4728
4729 return ret;
4730}
4731
4732gboolean
4733gst_aml_v4l2_object_stop(GstAmlV4l2Object *v4l2object)
4734{
4735 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "stopping");
4736
4737 if (!GST_AML_V4L2_IS_OPEN(v4l2object))
4738 goto done;
4739 if (!GST_AML_V4L2_IS_ACTIVE(v4l2object))
4740 goto done;
4741
4742 if (v4l2object->pool)
4743 {
4744 if (!gst_aml_v4l2_buffer_pool_orphan(&v4l2object->pool))
4745 {
4746 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "deactivating pool");
4747 gst_buffer_pool_set_active(v4l2object->pool, FALSE);
4748 gst_object_unref(v4l2object->pool);
4749 }
4750 v4l2object->pool = NULL;
4751 }
4752
4753 GST_AML_V4L2_SET_INACTIVE(v4l2object);
4754
4755done:
4756 return TRUE;
4757}
4758
4759GstCaps *
4760gst_aml_v4l2_object_probe_caps(GstAmlV4l2Object *v4l2object, GstCaps *filter)
4761{
4762 GstCaps *ret;
4763 GSList *walk;
4764 GSList *formats;
4765
4766 formats = gst_aml_v4l2_object_get_format_list(v4l2object);
4767
4768 ret = gst_caps_new_empty();
4769
4770 if (v4l2object->keep_aspect && !v4l2object->par)
4771 {
4772 struct v4l2_cropcap cropcap;
4773
4774 memset(&cropcap, 0, sizeof(cropcap));
4775
4776 cropcap.type = v4l2object->type;
4777 if (v4l2object->ioctl(v4l2object->video_fd, VIDIOC_CROPCAP, &cropcap) < 0)
4778 {
4779 if (errno != ENOTTY)
4780 GST_WARNING_OBJECT(v4l2object->dbg_obj,
4781 "Failed to probe pixel aspect ratio with VIDIOC_CROPCAP: %s",
4782 g_strerror(errno));
4783 }
4784 else if (cropcap.pixelaspect.numerator && cropcap.pixelaspect.denominator)
4785 {
4786 v4l2object->par = g_new0(GValue, 1);
4787 g_value_init(v4l2object->par, GST_TYPE_FRACTION);
4788 gst_value_set_fraction(v4l2object->par, cropcap.pixelaspect.numerator,
4789 cropcap.pixelaspect.denominator);
4790 }
4791 }
4792
4793 for (walk = formats; walk; walk = walk->next)
4794 {
4795 struct v4l2_fmtdesc *format;
4796 GstStructure *template;
4797 GstCaps *tmp, *tmp2;
4798
4799 format = (struct v4l2_fmtdesc *)walk->data;
4800
4801 template = gst_aml_v4l2_object_v4l2fourcc_to_bare_struct(format->pixelformat);
4802
4803 if (!template)
4804 {
4805 GST_DEBUG_OBJECT(v4l2object->dbg_obj,
4806 "unknown format %" GST_FOURCC_FORMAT,
4807 GST_FOURCC_ARGS(format->pixelformat));
4808 continue;
4809 }
4810
4811 /* If we have a filter, check if we need to probe this format or not */
4812 if (filter)
4813 {
4814 GstCaps *format_caps = gst_caps_new_empty();
4815
4816 gst_caps_append_structure(format_caps, gst_structure_copy(template));
xuesong.jiange1a19662022-06-21 20:30:22 +08004817 GST_INFO_OBJECT(v4l2object->dbg_obj, "format_caps: %" GST_PTR_FORMAT, format_caps);
xuesong.jiangae1548e2022-05-06 16:38:46 +08004818
4819 if (!gst_caps_can_intersect(format_caps, filter))
4820 {
4821 gst_caps_unref(format_caps);
4822 gst_structure_free(template);
4823 continue;
4824 }
4825
4826 gst_caps_unref(format_caps);
4827 }
4828
4829 tmp = gst_aml_v4l2_object_probe_caps_for_format(v4l2object,
4830 format->pixelformat, template);
xuesong.jiange1a19662022-06-21 20:30:22 +08004831 GST_INFO_OBJECT(v4l2object->dbg_obj, "tmp caps: %" GST_PTR_FORMAT, tmp);
xuesong.jiangae1548e2022-05-06 16:38:46 +08004832
4833 if (tmp)
4834 {
4835 tmp2 = gst_caps_copy(tmp);
4836 gst_caps_set_features_simple(tmp2, gst_caps_features_from_string(GST_CAPS_FEATURE_MEMORY_DMABUF));
4837 gst_caps_append(ret, tmp);
4838 gst_caps_append(ret, tmp2);
4839 }
4840
4841 gst_structure_free(template);
4842 }
4843
4844 if (filter)
4845 {
4846 GstCaps *tmp;
4847
4848 tmp = ret;
4849 ret = gst_caps_intersect_full(filter, ret, GST_CAPS_INTERSECT_FIRST);
4850 gst_caps_unref(tmp);
4851 }
4852
4853 GST_INFO_OBJECT(v4l2object->dbg_obj, "probed caps: %" GST_PTR_FORMAT, ret);
4854
4855 return ret;
4856}
4857
4858GstCaps *
4859gst_aml_v4l2_object_get_caps(GstAmlV4l2Object *v4l2object, GstCaps *filter)
4860{
4861 GstCaps *ret;
4862
4863 if (v4l2object->probed_caps == NULL)
4864 v4l2object->probed_caps = gst_aml_v4l2_object_probe_caps(v4l2object, NULL);
4865
4866 if (filter)
4867 {
4868 ret = gst_caps_intersect_full(filter, v4l2object->probed_caps,
4869 GST_CAPS_INTERSECT_FIRST);
4870 }
4871 else
4872 {
4873 ret = gst_caps_ref(v4l2object->probed_caps);
4874 }
4875
4876 return ret;
4877}
4878
4879gboolean
4880gst_aml_v4l2_object_decide_allocation(GstAmlV4l2Object *obj, GstQuery *query)
4881{
4882 GstCaps *caps;
4883 GstBufferPool *pool = NULL, *other_pool = NULL;
4884 GstStructure *config;
4885 guint size, min, max, own_min = 0;
4886 gboolean update;
4887 gboolean has_video_meta;
4888 gboolean can_share_own_pool, pushing_from_our_pool = FALSE;
4889 GstAllocator *allocator = NULL;
4890 GstAllocationParams params = {0};
4891
4892 GST_DEBUG_OBJECT(obj->dbg_obj, "decide allocation");
4893
4894 g_return_val_if_fail(obj->type == V4L2_BUF_TYPE_VIDEO_CAPTURE ||
4895 obj->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE,
4896 FALSE);
4897
4898 gst_query_parse_allocation(query, &caps, NULL);
4899
4900 if (obj->pool == NULL)
4901 {
4902 if (!gst_aml_v4l2_object_setup_pool(obj, caps))
4903 goto pool_failed;
4904 }
4905
4906 if (gst_query_get_n_allocation_params(query) > 0)
4907 gst_query_parse_nth_allocation_param(query, 0, &allocator, &params);
4908
4909 if (gst_query_get_n_allocation_pools(query) > 0)
4910 {
4911 gst_query_parse_nth_allocation_pool(query, 0, &pool, &size, &min, &max);
4912 update = TRUE;
4913 }
4914 else
4915 {
4916 pool = NULL;
4917 min = max = 0;
4918 size = 0;
4919 update = FALSE;
4920 }
4921
4922 GST_DEBUG_OBJECT(obj->dbg_obj, "allocation: size:%u min:%u max:%u pool:%" GST_PTR_FORMAT, size, min, max, pool);
4923
4924 has_video_meta =
4925 gst_query_find_allocation_meta(query, GST_VIDEO_META_API_TYPE, NULL);
4926
4927 can_share_own_pool = (has_video_meta || !obj->need_video_meta);
4928
4929 gst_aml_v4l2_get_driver_min_buffers(obj);
4930 /* We can't share our own pool, if it exceed V4L2 capacity */
4931 if (min + obj->min_buffers + 1 > VIDEO_MAX_FRAME)
4932 can_share_own_pool = FALSE;
4933
4934 /* select a pool */
4935 switch (obj->mode)
4936 {
4937 case GST_V4L2_IO_RW:
4938 if (pool)
4939 {
4940 /* in READ/WRITE mode, prefer a downstream pool because our own pool
4941 * doesn't help much, we have to write to it as well */
4942 GST_DEBUG_OBJECT(obj->dbg_obj,
4943 "read/write mode: using downstream pool");
4944 /* use the bigest size, when we use our own pool we can't really do any
4945 * other size than what the hardware gives us but for downstream pools
4946 * we can try */
4947 size = MAX(size, obj->info.size);
4948 }
4949 else if (can_share_own_pool)
4950 {
4951 /* no downstream pool, use our own then */
4952 GST_DEBUG_OBJECT(obj->dbg_obj,
4953 "read/write mode: no downstream pool, using our own");
4954 pool = gst_object_ref(obj->pool);
4955 size = obj->info.size;
4956 pushing_from_our_pool = TRUE;
4957 }
4958 break;
4959
4960 case GST_V4L2_IO_USERPTR:
4961 case GST_V4L2_IO_DMABUF_IMPORT:
4962 /* in importing mode, prefer our own pool, and pass the other pool to
4963 * our own, so it can serve itself */
4964 if (pool == NULL)
4965 goto no_downstream_pool;
4966 gst_aml_v4l2_buffer_pool_set_other_pool(GST_AML_V4L2_BUFFER_POOL(obj->pool),
4967 pool);
4968 other_pool = pool;
4969 gst_object_unref(pool);
4970 pool = gst_object_ref(obj->pool);
4971 size = obj->info.size;
4972 break;
4973
4974 case GST_V4L2_IO_MMAP:
4975 case GST_V4L2_IO_DMABUF:
4976 /* in streaming mode, prefer our own pool */
4977 /* Check if we can use it ... */
4978 if (can_share_own_pool)
4979 {
4980 if (pool)
4981 gst_object_unref(pool);
4982 pool = gst_object_ref(obj->pool);
4983 size = obj->info.size;
4984 GST_DEBUG_OBJECT(obj->dbg_obj,
4985 "streaming mode: using our own pool %" GST_PTR_FORMAT, pool);
4986 pushing_from_our_pool = TRUE;
4987 }
4988 else if (pool)
4989 {
4990 GST_DEBUG_OBJECT(obj->dbg_obj,
4991 "streaming mode: copying to downstream pool %" GST_PTR_FORMAT,
4992 pool);
4993 }
4994 else
4995 {
4996 GST_DEBUG_OBJECT(obj->dbg_obj,
4997 "streaming mode: no usable pool, copying to generic pool");
4998 size = MAX(size, obj->info.size);
4999 }
5000 break;
5001 case GST_V4L2_IO_AUTO:
5002 default:
5003 GST_WARNING_OBJECT(obj->dbg_obj, "unhandled mode");
5004 break;
5005 }
5006
5007 if (size == 0)
5008 goto no_size;
5009
5010 /* If pushing from our own pool, configure it with queried minimum,
5011 * otherwise use the minimum required */
5012 if (pushing_from_our_pool)
5013 {
5014 /* When pushing from our own pool, we need what downstream one, to be able
5015 * to fill the pipeline, the minimum required to decoder according to the
5016 * driver and 2 more, so we don't endup up with everything downstream or
5017 * held by the decoder. We account 2 buffers for v4l2 so when one is being
5018 * pushed downstream the other one can already be queued for the next
5019 * frame. */
5020 own_min = min + obj->min_buffers + 2;
5021
5022 /* If no allocation parameters where provided, allow for a little more
5023 * buffers and enable copy threshold */
5024 if (!update)
5025 {
5026 own_min += 2;
5027 gst_aml_v4l2_buffer_pool_copy_at_threshold(GST_AML_V4L2_BUFFER_POOL(pool),
5028 TRUE);
5029 }
5030 else
5031 {
5032 gst_aml_v4l2_buffer_pool_copy_at_threshold(GST_AML_V4L2_BUFFER_POOL(pool),
5033 FALSE);
5034 }
5035 }
5036 else
5037 {
5038 /* In this case we'll have to configure two buffer pool. For our buffer
5039 * pool, we'll need what the driver one, and one more, so we can dequeu */
5040 own_min = obj->min_buffers + 1;
5041 own_min = MAX(own_min, GST_AML_V4L2_MIN_BUFFERS);
5042
5043 /* for the downstream pool, we keep what downstream wants, though ensure
5044 * at least a minimum if downstream didn't suggest anything (we are
5045 * expecting the base class to create a default one for the context) */
5046 min = MAX(min, GST_AML_V4L2_MIN_BUFFERS);
5047
5048 /* To import we need the other pool to hold at least own_min */
5049 if (obj->pool == pool)
5050 min += own_min;
5051 }
5052
5053 /* Request a bigger max, if one was suggested but it's too small */
5054 if (max != 0)
5055 max = MAX(min, max);
5056
5057 /* First step, configure our own pool */
5058 config = gst_buffer_pool_get_config(obj->pool);
5059
5060 if (obj->need_video_meta || has_video_meta)
5061 {
5062 GST_DEBUG_OBJECT(obj->dbg_obj, "activate Video Meta");
5063 gst_buffer_pool_config_add_option(config,
5064 GST_BUFFER_POOL_OPTION_VIDEO_META);
5065 }
5066
5067 gst_buffer_pool_config_set_allocator(config, allocator, &params);
5068 gst_buffer_pool_config_set_params(config, caps, size, own_min + DEFAULT_EXTRA_CAPTURE_BUF_SIZE, own_min + DEFAULT_EXTRA_CAPTURE_BUF_SIZE);
5069
5070 GST_DEBUG_OBJECT(obj->dbg_obj, "setting own pool config to %" GST_PTR_FORMAT, config);
5071
5072 /* Our pool often need to adjust the value */
5073 if (!gst_buffer_pool_set_config(obj->pool, config))
5074 {
5075 config = gst_buffer_pool_get_config(obj->pool);
5076
5077 GST_DEBUG_OBJECT(obj->dbg_obj, "own pool config changed to %" GST_PTR_FORMAT, config);
5078
5079 /* our pool will adjust the maximum buffer, which we are fine with */
5080 if (!gst_buffer_pool_set_config(obj->pool, config))
5081 goto config_failed;
5082 }
5083
5084 /* Now configure the other pool if different */
5085 if (obj->pool != pool)
5086 other_pool = pool;
5087
5088 if (other_pool)
5089 {
5090 GstAmlV4l2VideoDec *self = (GstAmlV4l2VideoDec *)obj->element;
5091 if (self->is_secure_path)
5092 {
5093 params.flags |= GST_MEMORY_FLAG_LAST << 1; // in drmallocator GST_MEMORY_FLAG_LAST << 1 represent GST_MEMORY_FLAG_SECURE
5094 GST_DEBUG_OBJECT(obj, "set secure flag for drmbufferpool flag:0x%x", params.flags);
5095 }
5096 config = gst_buffer_pool_get_config(other_pool);
5097 gst_buffer_pool_config_set_allocator(config, allocator, &params);
5098 gst_buffer_pool_config_set_params(config, caps, size, own_min + DEFAULT_EXTRA_CAPTURE_BUF_SIZE, own_min + DEFAULT_EXTRA_CAPTURE_BUF_SIZE);
5099 gst_buffer_pool_config_set_video_alignment(config, &obj->align);
5100
5101 GST_DEBUG_OBJECT(obj->dbg_obj, "setting other pool config to %" GST_PTR_FORMAT, config);
5102
5103 /* if downstream supports video metadata, add this to the pool config */
5104 if (has_video_meta)
5105 {
5106 GST_DEBUG_OBJECT(obj->dbg_obj, "activate Video Meta");
5107 gst_buffer_pool_config_add_option(config,
5108 GST_BUFFER_POOL_OPTION_VIDEO_META);
5109 }
5110
5111 if (!gst_buffer_pool_set_config(other_pool, config))
5112 {
5113 config = gst_buffer_pool_get_config(other_pool);
5114
5115 if (!gst_buffer_pool_config_validate_params(config, caps, size, min,
5116 max))
5117 {
5118 gst_structure_free(config);
5119 goto config_failed;
5120 }
5121
5122 if (!gst_buffer_pool_set_config(other_pool, config))
5123 goto config_failed;
5124 }
5125 }
5126
5127 if (pool)
5128 {
5129 /* For simplicity, simply read back the active configuration, so our base
5130 * class get the right information */
5131 config = gst_buffer_pool_get_config(pool);
5132 gst_buffer_pool_config_get_params(config, NULL, &size, &min, &max);
5133 gst_structure_free(config);
5134 }
5135
5136 if (update)
5137 gst_query_set_nth_allocation_pool(query, 0, pool, size, min, max);
5138 else
5139 gst_query_add_allocation_pool(query, pool, size, min, max);
5140
5141 if (allocator)
5142 gst_object_unref(allocator);
5143
5144 if (pool)
5145 gst_object_unref(pool);
5146
5147 return TRUE;
5148
5149pool_failed:
5150{
5151 /* setup_pool already send the error */
5152 goto cleanup;
5153}
5154config_failed:
5155{
5156 GST_ELEMENT_ERROR(obj->element, RESOURCE, SETTINGS,
5157 (_("Failed to configure internal buffer pool.")), (NULL));
5158 goto cleanup;
5159}
5160no_size:
5161{
5162 GST_ELEMENT_ERROR(obj->element, RESOURCE, SETTINGS,
5163 (_("Video device did not suggest any buffer size.")), (NULL));
5164 goto cleanup;
5165}
5166cleanup:
5167{
5168 if (allocator)
5169 gst_object_unref(allocator);
5170
5171 if (pool)
5172 gst_object_unref(pool);
5173 return FALSE;
5174}
5175no_downstream_pool:
5176{
5177 GST_ELEMENT_ERROR(obj->element, RESOURCE, SETTINGS,
5178 (_("No downstream pool to import from.")),
5179 ("When importing DMABUF or USERPTR, we need a pool to import from"));
5180 return FALSE;
5181}
5182}
5183
5184gboolean
5185gst_aml_v4l2_object_propose_allocation(GstAmlV4l2Object *obj, GstQuery *query)
5186{
5187 GstBufferPool *pool;
5188 /* we need at least 2 buffers to operate */
5189 guint size, min, max;
5190 GstCaps *caps;
5191 gboolean need_pool;
5192
5193 /* Set defaults allocation parameters */
5194 size = obj->info.size;
5195 min = GST_AML_V4L2_MIN_BUFFERS;
5196 max = VIDEO_MAX_FRAME;
5197
5198 gst_query_parse_allocation(query, &caps, &need_pool);
5199
5200 if (caps == NULL)
5201 goto no_caps;
5202
5203 switch (obj->mode)
5204 {
5205 case GST_V4L2_IO_MMAP:
5206 case GST_V4L2_IO_DMABUF:
5207 if ((pool = obj->pool))
5208 gst_object_ref(pool);
5209 break;
5210 default:
5211 pool = NULL;
5212 break;
5213 }
5214
5215 if (pool != NULL)
5216 {
5217 GstCaps *pcaps;
5218 GstStructure *config;
5219
5220 /* we had a pool, check caps */
5221 config = gst_buffer_pool_get_config(pool);
5222 gst_buffer_pool_config_get_params(config, &pcaps, NULL, NULL, NULL);
5223
5224 GST_DEBUG_OBJECT(obj->dbg_obj,
5225 "we had a pool with caps %" GST_PTR_FORMAT, pcaps);
5226 if (!gst_caps_is_equal(caps, pcaps))
5227 {
5228 gst_structure_free(config);
5229 gst_object_unref(pool);
5230 goto different_caps;
5231 }
5232 gst_structure_free(config);
5233 }
5234 gst_aml_v4l2_get_driver_min_buffers(obj);
5235
5236 min = MAX(obj->min_buffers, GST_AML_V4L2_MIN_BUFFERS);
5237
5238 gst_query_add_allocation_pool(query, pool, size, min, max);
5239
5240 /* we also support various metadata */
5241 gst_query_add_allocation_meta(query, GST_VIDEO_META_API_TYPE, NULL);
5242
5243 if (pool)
5244 gst_object_unref(pool);
5245
5246 return TRUE;
5247
5248 /* ERRORS */
5249no_caps:
5250{
5251 GST_DEBUG_OBJECT(obj->dbg_obj, "no caps specified");
5252 return FALSE;
5253}
5254different_caps:
5255{
5256 /* different caps, we can't use this pool */
5257 GST_DEBUG_OBJECT(obj->dbg_obj, "pool has different caps");
5258 return FALSE;
5259}
5260}
5261
5262gboolean
5263gst_aml_v4l2_object_try_import(GstAmlV4l2Object *obj, GstBuffer *buffer)
5264{
5265 GstVideoMeta *vmeta;
5266 guint n_mem = gst_buffer_n_memory(buffer);
5267
5268 /* only import if requested */
5269 switch (obj->mode)
5270 {
5271 case GST_V4L2_IO_USERPTR:
5272 case GST_V4L2_IO_DMABUF_IMPORT:
5273 break;
5274 default:
5275 GST_DEBUG_OBJECT(obj->dbg_obj,
5276 "The io-mode does not enable importation");
5277 return FALSE;
5278 }
5279
5280 vmeta = gst_buffer_get_video_meta(buffer);
5281 if (!vmeta && obj->need_video_meta)
5282 {
5283 GST_DEBUG_OBJECT(obj->dbg_obj, "Downstream buffer uses standard "
5284 "stride/offset while the driver does not.");
5285 return FALSE;
5286 }
5287
5288 /* we need matching strides/offsets and size */
5289 if (vmeta)
5290 {
5291 guint p;
5292 gboolean need_fmt_update = FALSE;
5293
5294 if (vmeta->n_planes != GST_VIDEO_INFO_N_PLANES(&obj->info))
5295 {
5296 GST_WARNING_OBJECT(obj->dbg_obj,
5297 "Cannot import buffers with different number planes");
5298 return FALSE;
5299 }
5300
5301 for (p = 0; p < vmeta->n_planes; p++)
5302 {
5303 if (vmeta->stride[p] < obj->info.stride[p])
5304 {
5305 GST_DEBUG_OBJECT(obj->dbg_obj,
5306 "Not importing as remote stride %i is smaller then %i on plane %u",
5307 vmeta->stride[p], obj->info.stride[p], p);
5308 return FALSE;
5309 }
5310 else if (vmeta->stride[p] > obj->info.stride[p])
5311 {
5312 need_fmt_update = TRUE;
5313 }
5314
5315 if (vmeta->offset[p] < obj->info.offset[p])
5316 {
5317 GST_DEBUG_OBJECT(obj->dbg_obj,
5318 "Not importing as offset %" G_GSIZE_FORMAT
5319 " is smaller then %" G_GSIZE_FORMAT " on plane %u",
5320 vmeta->offset[p], obj->info.offset[p], p);
5321 return FALSE;
5322 }
5323 else if (vmeta->offset[p] > obj->info.offset[p])
5324 {
5325 need_fmt_update = TRUE;
5326 }
5327 }
5328
5329 if (need_fmt_update)
5330 {
5331 struct v4l2_format format;
5332 gint wanted_stride[GST_VIDEO_MAX_PLANES] = {
5333 0,
5334 };
5335
5336 format = obj->format;
5337
5338 /* update the current format with the stride we want to import from */
5339 if (V4L2_TYPE_IS_MULTIPLANAR(obj->type))
5340 {
5341 guint i;
5342
5343 GST_DEBUG_OBJECT(obj->dbg_obj, "Wanted strides:");
5344
5345 for (i = 0; i < obj->n_v4l2_planes; i++)
5346 {
5347 gint stride = vmeta->stride[i];
5348
5349 if (GST_VIDEO_FORMAT_INFO_IS_TILED(obj->info.finfo))
5350 stride = GST_VIDEO_TILE_X_TILES(stride) << GST_VIDEO_FORMAT_INFO_TILE_WS(obj->info.finfo);
5351
5352 format.fmt.pix_mp.plane_fmt[i].bytesperline = stride;
5353 wanted_stride[i] = stride;
5354 GST_DEBUG_OBJECT(obj->dbg_obj, " [%u] %i", i, wanted_stride[i]);
5355 }
5356 }
5357 else
5358 {
5359 gint stride = vmeta->stride[0];
5360
5361 GST_DEBUG_OBJECT(obj->dbg_obj, "Wanted stride: %i", stride);
5362
5363 if (GST_VIDEO_FORMAT_INFO_IS_TILED(obj->info.finfo))
5364 stride = GST_VIDEO_TILE_X_TILES(stride) << GST_VIDEO_FORMAT_INFO_TILE_WS(obj->info.finfo);
5365
5366 format.fmt.pix.bytesperline = stride;
5367 wanted_stride[0] = stride;
5368 }
5369
5370 if (obj->ioctl(obj->video_fd, VIDIOC_S_FMT, &format) < 0)
5371 {
5372 GST_WARNING_OBJECT(obj->dbg_obj,
5373 "Something went wrong trying to update current format: %s",
5374 g_strerror(errno));
5375 return FALSE;
5376 }
5377
5378 gst_aml_v4l2_object_save_format(obj, obj->fmtdesc, &format, &obj->info,
5379 &obj->align);
5380
5381 if (V4L2_TYPE_IS_MULTIPLANAR(obj->type))
5382 {
5383 guint i;
5384
5385 for (i = 0; i < obj->n_v4l2_planes; i++)
5386 {
5387 if (format.fmt.pix_mp.plane_fmt[i].bytesperline != wanted_stride[i])
5388 {
5389 GST_DEBUG_OBJECT(obj->dbg_obj,
5390 "[%i] Driver did not accept the new stride (wants %i, got %i)",
5391 i, format.fmt.pix_mp.plane_fmt[i].bytesperline,
5392 wanted_stride[i]);
5393 return FALSE;
5394 }
5395 }
5396 }
5397 else
5398 {
5399 if (format.fmt.pix.bytesperline != wanted_stride[0])
5400 {
5401 GST_DEBUG_OBJECT(obj->dbg_obj,
5402 "Driver did not accept the new stride (wants %i, got %i)",
5403 format.fmt.pix.bytesperline, wanted_stride[0]);
5404 return FALSE;
5405 }
5406 }
5407 }
5408 }
5409
5410 /* we can always import single memory buffer, but otherwise we need the same
5411 * amount of memory object. */
5412 if (n_mem != 1 && n_mem != obj->n_v4l2_planes)
5413 {
5414 GST_DEBUG_OBJECT(obj->dbg_obj, "Can only import %i memory, "
5415 "buffers contains %u memory",
5416 obj->n_v4l2_planes, n_mem);
5417 return FALSE;
5418 }
5419
5420 /* For DMABuf importation we need DMABuf of course */
5421 if (obj->mode == GST_V4L2_IO_DMABUF_IMPORT)
5422 {
5423 guint i;
5424
5425 for (i = 0; i < n_mem; i++)
5426 {
5427 GstMemory *mem = gst_buffer_peek_memory(buffer, i);
5428
5429 if (!gst_is_dmabuf_memory(mem))
5430 {
5431 GST_DEBUG_OBJECT(obj->dbg_obj, "Cannot import non-DMABuf memory.");
5432 return FALSE;
5433 }
5434 }
5435 }
5436
5437 /* for the remaining, only the kernel driver can tell */
5438 return TRUE;
5439}
5440
5441gboolean gst_aml_v4l2_set_drm_mode(GstAmlV4l2Object *v4l2object)
5442{
5443 /* On AmLogic, output obj use of GST_V4L2_IO_DMABUF_IMPORT implies secure memory */
5444 if (v4l2object->req_mode == GST_V4L2_IO_DMABUF_IMPORT)
5445 {
5446 int rc;
5447 struct v4l2_queryctrl queryctrl;
5448 struct v4l2_control control;
5449
5450 GstAmlV4l2VideoDec *self = (GstAmlV4l2VideoDec *)v4l2object->element;
5451 self->is_secure_path = TRUE;
5452
5453#define V4L2_CID_USER_AMLOGIC_BASE (V4L2_CID_USER_BASE + 0x1100)
5454#define AML_V4L2_SET_DRMMODE (V4L2_CID_USER_AMLOGIC_BASE + 0)
5455 memset(&queryctrl, 0, sizeof(queryctrl));
5456 queryctrl.id = AML_V4L2_SET_DRMMODE;
5457
5458 rc = v4l2object->ioctl(v4l2object->video_fd, VIDIOC_QUERYCTRL, &queryctrl);
5459 if (rc == 0)
5460 {
5461 if (!(queryctrl.flags & V4L2_CTRL_FLAG_DISABLED))
5462 {
5463 memset(&control, 0, sizeof(control));
5464 control.id = AML_V4L2_SET_DRMMODE;
5465 control.value = 1;
5466 rc = v4l2object->ioctl(v4l2object->video_fd, VIDIOC_S_CTRL, &control);
5467 if (rc != 0)
5468 {
5469 GST_ERROR_OBJECT(v4l2object, "AML_V4L2_SET_DRMMODE fail: rc %d", rc);
5470 return FALSE;
5471 }
5472 GST_DEBUG_OBJECT(v4l2object, "AML_V4L2_SET_DRMMODE set succ");
5473 return TRUE;
5474 }
5475 else
5476 {
5477 GST_DEBUG_OBJECT(v4l2object, "AML_V4L2_SET_DRMMODE is disabled");
5478 return TRUE;
5479 }
5480 }
5481 else
5482 {
5483 GST_ERROR_OBJECT(v4l2object, "VIDIOC_QUERYCTRL for AML_V4L2_SET_DRMMODE fail");
5484 return FALSE;
5485 }
5486 }
5487 else
5488 {
5489 GST_DEBUG_OBJECT(v4l2object, "req mode is not GST_V4L2_IO_DMABUF_IMPORT, DRM mode does not need to be configured");
5490 return TRUE;
5491 }
5492}