blob: ee00acfdbae2f7aa6987d08c4b2eabb8357662e9 [file] [log] [blame]
xuesong.jiangae1548e2022-05-06 16:38:46 +08001/* GStreamer
2 * Copyright (C) 2022 <xuesong.jiang@amlogic.com>
3 *
4 * This library is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Library General Public
6 * License as published by the Free Software Foundation; either
7 * version 2 of the License, or (at your option) any later version.
8 *
9 * This library is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Library General Public License for more details.
13 *
14 * You should have received a copy of the GNU Library General Public
15 * License along with this library; if not, write to the
16 * Free Software Foundation, Inc., 51 Franklin Street, Suite 500,
17 * Boston, MA 02110-1335, USA.
18 */
19
20#ifdef HAVE_CONFIG_H
21#include <config.h>
22#endif
23
24#include <sys/stat.h>
25#include <fcntl.h>
26#include <errno.h>
27#include <string.h>
28#include <sys/mman.h>
29#include <sys/ioctl.h>
xuesong.jiange1a19662022-06-21 20:30:22 +080030#include <stdio.h>
xuesong.jiangae1548e2022-05-06 16:38:46 +080031
32#ifdef HAVE_GUDEV
33#include <gudev/gudev.h>
34#endif
35
36#include "ext/videodev2.h"
37#include "gstamlv4l2object.h"
38
39#include "gst/gst-i18n-plugin.h"
40
41#include <gst/video/video.h>
42#include <gst/allocators/gstdmabuf.h>
43
44GST_DEBUG_CATEGORY_EXTERN(aml_v4l2_debug);
45#define GST_CAT_DEFAULT aml_v4l2_debug
46
47#define DEFAULT_PROP_DEVICE_NAME NULL
48#define DEFAULT_PROP_DEVICE_FD -1
49#define DEFAULT_PROP_FLAGS 0
50#define DEFAULT_PROP_TV_NORM 0
51#define DEFAULT_PROP_IO_MODE GST_V4L2_IO_AUTO
52
53#define ENCODED_BUFFER_SIZE (2 * 1024 * 1024)
54#define DEFAULT_EXTRA_CAPTURE_BUF_SIZE 3
55
xuesong.jiange1a19662022-06-21 20:30:22 +080056#define V4L2_CONFIG_PARM_DECODE_CFGINFO (1 << 0)
57#define V4L2_CONFIG_PARM_DECODE_PSINFO (1 << 1)
58#define V4L2_CONFIG_PARM_DECODE_HDRINFO (1 << 2)
59#define V4L2_CONFIG_PARM_DECODE_CNTINFO (1 << 3)
60
xuesong.jiangae1548e2022-05-06 16:38:46 +080061enum
62{
63 PROP_0,
64 V4L2_STD_OBJECT_PROPS,
65};
66
67/*
68 * common format / caps utilities:
69 */
70typedef enum
71{
72 GST_V4L2_RAW = 1 << 0,
73 GST_V4L2_CODEC = 1 << 1,
74 GST_V4L2_TRANSPORT = 1 << 2,
75 GST_V4L2_NO_PARSE = 1 << 3,
76 GST_V4L2_ALL = 0xffff
77} GstAmlV4L2FormatFlags;
78
79typedef struct
80{
81 guint32 format;
82 gboolean dimensions;
83 GstAmlV4L2FormatFlags flags;
84} GstAmlV4L2FormatDesc;
85
86static const GstAmlV4L2FormatDesc gst_aml_v4l2_formats[] = {
87 /* RGB formats */
88 {V4L2_PIX_FMT_RGB332, TRUE, GST_V4L2_RAW},
89 {V4L2_PIX_FMT_ARGB555, TRUE, GST_V4L2_RAW},
90 {V4L2_PIX_FMT_XRGB555, TRUE, GST_V4L2_RAW},
91 {V4L2_PIX_FMT_ARGB555X, TRUE, GST_V4L2_RAW},
92 {V4L2_PIX_FMT_XRGB555X, TRUE, GST_V4L2_RAW},
93 {V4L2_PIX_FMT_RGB565, TRUE, GST_V4L2_RAW},
94 {V4L2_PIX_FMT_RGB565X, TRUE, GST_V4L2_RAW},
95 {V4L2_PIX_FMT_BGR666, TRUE, GST_V4L2_RAW},
96 {V4L2_PIX_FMT_BGR24, TRUE, GST_V4L2_RAW},
97 {V4L2_PIX_FMT_RGB24, TRUE, GST_V4L2_RAW},
98 {V4L2_PIX_FMT_ABGR32, TRUE, GST_V4L2_RAW},
99 {V4L2_PIX_FMT_XBGR32, TRUE, GST_V4L2_RAW},
100 {V4L2_PIX_FMT_ARGB32, TRUE, GST_V4L2_RAW},
101 {V4L2_PIX_FMT_XRGB32, TRUE, GST_V4L2_RAW},
102
103 /* Deprecated Packed RGB Image Formats (alpha ambiguity) */
104 {V4L2_PIX_FMT_RGB444, TRUE, GST_V4L2_RAW},
105 {V4L2_PIX_FMT_RGB555, TRUE, GST_V4L2_RAW},
106 {V4L2_PIX_FMT_RGB555X, TRUE, GST_V4L2_RAW},
107 {V4L2_PIX_FMT_BGR32, TRUE, GST_V4L2_RAW},
108 {V4L2_PIX_FMT_RGB32, TRUE, GST_V4L2_RAW},
109
110 /* Grey formats */
111 {V4L2_PIX_FMT_GREY, TRUE, GST_V4L2_RAW},
112 {V4L2_PIX_FMT_Y4, TRUE, GST_V4L2_RAW},
113 {V4L2_PIX_FMT_Y6, TRUE, GST_V4L2_RAW},
114 {V4L2_PIX_FMT_Y10, TRUE, GST_V4L2_RAW},
115 {V4L2_PIX_FMT_Y12, TRUE, GST_V4L2_RAW},
116 {V4L2_PIX_FMT_Y16, TRUE, GST_V4L2_RAW},
117 {V4L2_PIX_FMT_Y16_BE, TRUE, GST_V4L2_RAW},
118 {V4L2_PIX_FMT_Y10BPACK, TRUE, GST_V4L2_RAW},
119
120 /* Palette formats */
121 {V4L2_PIX_FMT_PAL8, TRUE, GST_V4L2_RAW},
122
123 /* Chrominance formats */
124 {V4L2_PIX_FMT_UV8, TRUE, GST_V4L2_RAW},
125
126 /* Luminance+Chrominance formats */
127 {V4L2_PIX_FMT_YVU410, TRUE, GST_V4L2_RAW},
128 {V4L2_PIX_FMT_YVU420, TRUE, GST_V4L2_RAW},
129 {V4L2_PIX_FMT_YVU420M, TRUE, GST_V4L2_RAW},
130 {V4L2_PIX_FMT_YUYV, TRUE, GST_V4L2_RAW},
131 {V4L2_PIX_FMT_YYUV, TRUE, GST_V4L2_RAW},
132 {V4L2_PIX_FMT_YVYU, TRUE, GST_V4L2_RAW},
133 {V4L2_PIX_FMT_UYVY, TRUE, GST_V4L2_RAW},
134 {V4L2_PIX_FMT_VYUY, TRUE, GST_V4L2_RAW},
135 {V4L2_PIX_FMT_YUV422P, TRUE, GST_V4L2_RAW},
136 {V4L2_PIX_FMT_YUV411P, TRUE, GST_V4L2_RAW},
137 {V4L2_PIX_FMT_Y41P, TRUE, GST_V4L2_RAW},
138 {V4L2_PIX_FMT_YUV444, TRUE, GST_V4L2_RAW},
139 {V4L2_PIX_FMT_YUV555, TRUE, GST_V4L2_RAW},
140 {V4L2_PIX_FMT_YUV565, TRUE, GST_V4L2_RAW},
141 {V4L2_PIX_FMT_YUV32, TRUE, GST_V4L2_RAW},
142 {V4L2_PIX_FMT_YUV410, TRUE, GST_V4L2_RAW},
143 {V4L2_PIX_FMT_YUV420, TRUE, GST_V4L2_RAW},
144 {V4L2_PIX_FMT_YUV420M, TRUE, GST_V4L2_RAW},
145 {V4L2_PIX_FMT_HI240, TRUE, GST_V4L2_RAW},
146 {V4L2_PIX_FMT_HM12, TRUE, GST_V4L2_RAW},
147 {V4L2_PIX_FMT_M420, TRUE, GST_V4L2_RAW},
148
149 /* two planes -- one Y, one Cr + Cb interleaved */
150 {V4L2_PIX_FMT_NV12, TRUE, GST_V4L2_RAW},
151 {V4L2_PIX_FMT_NV12M, TRUE, GST_V4L2_RAW},
152 {V4L2_PIX_FMT_NV12MT, TRUE, GST_V4L2_RAW},
153 {V4L2_PIX_FMT_NV12MT_16X16, TRUE, GST_V4L2_RAW},
154 {V4L2_PIX_FMT_NV21, TRUE, GST_V4L2_RAW},
155 {V4L2_PIX_FMT_NV21M, TRUE, GST_V4L2_RAW},
156 {V4L2_PIX_FMT_NV16, TRUE, GST_V4L2_RAW},
157 {V4L2_PIX_FMT_NV16M, TRUE, GST_V4L2_RAW},
158 {V4L2_PIX_FMT_NV61, TRUE, GST_V4L2_RAW},
159 {V4L2_PIX_FMT_NV61M, TRUE, GST_V4L2_RAW},
160 {V4L2_PIX_FMT_NV24, TRUE, GST_V4L2_RAW},
161 {V4L2_PIX_FMT_NV42, TRUE, GST_V4L2_RAW},
162
163 /* Bayer formats - see http://www.siliconimaging.com/RGB%20Bayer.htm */
164 {V4L2_PIX_FMT_SBGGR8, TRUE, GST_V4L2_RAW},
165 {V4L2_PIX_FMT_SGBRG8, TRUE, GST_V4L2_RAW},
166 {V4L2_PIX_FMT_SGRBG8, TRUE, GST_V4L2_RAW},
167 {V4L2_PIX_FMT_SRGGB8, TRUE, GST_V4L2_RAW},
168
169 /* compressed formats */
170 {V4L2_PIX_FMT_MJPEG, FALSE, GST_V4L2_CODEC},
171 {V4L2_PIX_FMT_JPEG, FALSE, GST_V4L2_CODEC},
172 {V4L2_PIX_FMT_PJPG, FALSE, GST_V4L2_CODEC},
173 {V4L2_PIX_FMT_DV, FALSE, GST_V4L2_TRANSPORT},
174 {V4L2_PIX_FMT_MPEG, FALSE, GST_V4L2_TRANSPORT},
175 {V4L2_PIX_FMT_FWHT, FALSE, GST_V4L2_CODEC},
176 {V4L2_PIX_FMT_H264, FALSE, GST_V4L2_CODEC},
177 {V4L2_PIX_FMT_H264_NO_SC, FALSE, GST_V4L2_CODEC},
178 {V4L2_PIX_FMT_H264_MVC, FALSE, GST_V4L2_CODEC},
179 {V4L2_PIX_FMT_HEVC, FALSE, GST_V4L2_CODEC},
180 {V4L2_PIX_FMT_H263, FALSE, GST_V4L2_CODEC},
181 {V4L2_PIX_FMT_MPEG1, FALSE, GST_V4L2_CODEC},
182 {V4L2_PIX_FMT_MPEG2, FALSE, GST_V4L2_CODEC},
183 {V4L2_PIX_FMT_MPEG4, FALSE, GST_V4L2_CODEC},
184 {V4L2_PIX_FMT_XVID, FALSE, GST_V4L2_CODEC},
185 {V4L2_PIX_FMT_VC1_ANNEX_G, FALSE, GST_V4L2_CODEC},
186 {V4L2_PIX_FMT_VC1_ANNEX_L, FALSE, GST_V4L2_CODEC},
187 {V4L2_PIX_FMT_VP8, FALSE, GST_V4L2_CODEC | GST_V4L2_NO_PARSE},
188 {V4L2_PIX_FMT_VP9, FALSE, GST_V4L2_CODEC | GST_V4L2_NO_PARSE},
189 {V4L2_PIX_FMT_AV1, FALSE, GST_V4L2_CODEC | GST_V4L2_NO_PARSE},
190
191 /* Vendor-specific formats */
192 {V4L2_PIX_FMT_WNVA, TRUE, GST_V4L2_CODEC},
193 {V4L2_PIX_FMT_SN9C10X, TRUE, GST_V4L2_CODEC},
194 {V4L2_PIX_FMT_PWC1, TRUE, GST_V4L2_CODEC},
195 {V4L2_PIX_FMT_PWC2, TRUE, GST_V4L2_CODEC},
196};
197
198#define GST_AML_V4L2_FORMAT_COUNT (G_N_ELEMENTS(gst_aml_v4l2_formats))
199
200static GSList *gst_aml_v4l2_object_get_format_list(GstAmlV4l2Object *v4l2object);
201
202#define GST_TYPE_AML_V4L2_DEVICE_FLAGS (gst_aml_v4l2_device_get_type())
203static GType
204gst_aml_v4l2_device_get_type(void)
205{
206 static GType v4l2_device_type = 0;
207
208 if (v4l2_device_type == 0)
209 {
210 static const GFlagsValue values[] = {
211 {V4L2_CAP_VIDEO_CAPTURE, "Device supports video capture", "capture"},
212 {V4L2_CAP_VIDEO_OUTPUT, "Device supports video playback", "output"},
213 {V4L2_CAP_VIDEO_OVERLAY, "Device supports video overlay", "overlay"},
214
215 {V4L2_CAP_VBI_CAPTURE, "Device supports the VBI capture", "vbi-capture"},
216 {V4L2_CAP_VBI_OUTPUT, "Device supports the VBI output", "vbi-output"},
217
218 {V4L2_CAP_TUNER, "Device has a tuner or modulator", "tuner"},
219 {V4L2_CAP_AUDIO, "Device has audio inputs or outputs", "audio"},
220
221 {0, NULL, NULL}};
222
223 v4l2_device_type =
224 g_flags_register_static("GstAmlV4l2DeviceTypeFlags", values);
225 }
226
227 return v4l2_device_type;
228}
229
230GType gst_aml_v4l2_io_mode_get_type(void)
231{
232 static GType v4l2_io_mode = 0;
233
234 if (!v4l2_io_mode)
235 {
236 static const GEnumValue io_modes[] = {
237 {GST_V4L2_IO_AUTO, "GST_V4L2_IO_AUTO", "auto"},
238 {GST_V4L2_IO_RW, "GST_V4L2_IO_RW", "rw"},
239 {GST_V4L2_IO_MMAP, "GST_V4L2_IO_MMAP", "mmap"},
240 {GST_V4L2_IO_USERPTR, "GST_V4L2_IO_USERPTR", "userptr"},
241 {GST_V4L2_IO_DMABUF, "GST_V4L2_IO_DMABUF", "dmabuf"},
242 {GST_V4L2_IO_DMABUF_IMPORT, "GST_V4L2_IO_DMABUF_IMPORT",
243 "dmabuf-import"},
244
245 {0, NULL, NULL}};
246 v4l2_io_mode = g_enum_register_static("GstAmlV4l2IOMode", io_modes);
247 }
248 return v4l2_io_mode;
249}
250
251void gst_aml_v4l2_object_install_properties_helper(GObjectClass *gobject_class,
252 const char *default_device)
253{
254 g_object_class_install_property(gobject_class, PROP_DEVICE,
255 g_param_spec_string("device", "Device", "Device location",
256 default_device, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
257 g_object_class_install_property(gobject_class, PROP_DEVICE_NAME,
258 g_param_spec_string("device-name", "Device name",
259 "Name of the device", DEFAULT_PROP_DEVICE_NAME,
260 G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
261 g_object_class_install_property(gobject_class, PROP_DEVICE_FD,
262 g_param_spec_int("device-fd", "File descriptor",
263 "File descriptor of the device", -1, G_MAXINT, DEFAULT_PROP_DEVICE_FD,
264 G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
265 g_object_class_install_property(gobject_class, PROP_FLAGS,
266 g_param_spec_flags("flags", "Flags", "Device type flags",
267 GST_TYPE_AML_V4L2_DEVICE_FLAGS, DEFAULT_PROP_FLAGS,
268 G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
269
270 /**
271 * GstV4l2Src:brightness:
272 *
273 * Picture brightness, or more precisely, the black level
274 */
275 g_object_class_install_property(gobject_class, PROP_BRIGHTNESS,
276 g_param_spec_int("brightness", "Brightness",
277 "Picture brightness, or more precisely, the black level", G_MININT,
278 G_MAXINT, 0,
279 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
280 /**
281 * GstV4l2Src:contrast:
282 *
283 * Picture contrast or luma gain
284 */
285 g_object_class_install_property(gobject_class, PROP_CONTRAST,
286 g_param_spec_int("contrast", "Contrast",
287 "Picture contrast or luma gain", G_MININT,
288 G_MAXINT, 0,
289 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
290 /**
291 * GstV4l2Src:saturation:
292 *
293 * Picture color saturation or chroma gain
294 */
295 g_object_class_install_property(gobject_class, PROP_SATURATION,
296 g_param_spec_int("saturation", "Saturation",
297 "Picture color saturation or chroma gain", G_MININT,
298 G_MAXINT, 0,
299 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
300 /**
301 * GstV4l2Src:hue:
302 *
303 * Hue or color balance
304 */
305 g_object_class_install_property(gobject_class, PROP_HUE,
306 g_param_spec_int("hue", "Hue",
307 "Hue or color balance", G_MININT,
308 G_MAXINT, 0,
309 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
310
311 /**
312 * GstV4l2Src:io-mode:
313 *
314 * IO Mode
315 */
316 g_object_class_install_property(gobject_class, PROP_IO_MODE,
317 g_param_spec_enum("io-mode", "IO mode",
318 "I/O mode",
319 GST_TYPE_AML_V4L2_IO_MODE, DEFAULT_PROP_IO_MODE,
320 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
321
322 /**
323 * GstV4l2Src:extra-controls:
324 *
325 * Additional v4l2 controls for the device. The controls are identified
326 * by the control name (lowercase with '_' for any non-alphanumeric
327 * characters).
328 *
329 * Since: 1.2
330 */
331 g_object_class_install_property(gobject_class, PROP_EXTRA_CONTROLS,
332 g_param_spec_boxed("extra-controls", "Extra Controls",
333 "Extra v4l2 controls (CIDs) for the device",
334 GST_TYPE_STRUCTURE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
335
336 /**
337 * GstV4l2Src:pixel-aspect-ratio:
338 *
339 * The pixel aspect ratio of the device. This overwrites the pixel aspect
340 * ratio queried from the device.
341 *
342 * Since: 1.2
343 */
344 g_object_class_install_property(gobject_class, PROP_PIXEL_ASPECT_RATIO,
345 g_param_spec_string("pixel-aspect-ratio", "Pixel Aspect Ratio",
346 "Overwrite the pixel aspect ratio of the device", "1/1",
347 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
348
349 /**
350 * GstV4l2Src:force-aspect-ratio:
351 *
352 * When enabled, the pixel aspect ratio queried from the device or set
353 * with the pixel-aspect-ratio property will be enforced.
354 *
355 * Since: 1.2
356 */
357 g_object_class_install_property(gobject_class, PROP_FORCE_ASPECT_RATIO,
358 g_param_spec_boolean("force-aspect-ratio", "Force aspect ratio",
359 "When enabled, the pixel aspect ratio will be enforced", TRUE,
360 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
361}
362
363void gst_aml_v4l2_object_install_m2m_properties_helper(GObjectClass *gobject_class)
364{
365 g_object_class_install_property(gobject_class, PROP_DEVICE,
366 g_param_spec_string("device", "Device", "Device location",
367 NULL, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
368
369 g_object_class_install_property(gobject_class, PROP_DEVICE_NAME,
370 g_param_spec_string("device-name", "Device name",
371 "Name of the device", DEFAULT_PROP_DEVICE_NAME,
372 G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
373
374 g_object_class_install_property(gobject_class, PROP_DEVICE_FD,
375 g_param_spec_int("device-fd", "File descriptor",
376 "File descriptor of the device", -1, G_MAXINT, DEFAULT_PROP_DEVICE_FD,
377 G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
378
379 g_object_class_install_property(gobject_class, PROP_OUTPUT_IO_MODE,
380 g_param_spec_enum("output-io-mode", "Output IO mode",
381 "Output side I/O mode (matches sink pad)",
382 GST_TYPE_AML_V4L2_IO_MODE, DEFAULT_PROP_IO_MODE,
383 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
384
385 g_object_class_install_property(gobject_class, PROP_CAPTURE_IO_MODE,
386 g_param_spec_enum("capture-io-mode", "Capture IO mode",
387 "Capture I/O mode (matches src pad)",
388 GST_TYPE_AML_V4L2_IO_MODE, DEFAULT_PROP_IO_MODE,
389 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
390
391 g_object_class_install_property(gobject_class, PROP_EXTRA_CONTROLS,
392 g_param_spec_boxed("extra-controls", "Extra Controls",
393 "Extra v4l2 controls (CIDs) for the device",
394 GST_TYPE_STRUCTURE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
395
396 g_object_class_install_property(gobject_class, PROP_DUMP_FRAME_LOCATION,
397 g_param_spec_string("dump-frame-location", "dump frame location",
398 "Location of the file to write decoder frames", NULL,
399 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
400}
401
402/* Support for 32bit off_t, this wrapper is casting off_t to gint64 */
403#ifdef HAVE_LIBV4L2
404#if SIZEOF_OFF_T < 8
405
406static gpointer
407v4l2_mmap_wrapper(gpointer start, gsize length, gint prot, gint flags, gint fd,
408 off_t offset)
409{
410 return v4l2_mmap(start, length, prot, flags, fd, (gint64)offset);
411}
412
413#define v4l2_mmap v4l2_mmap_wrapper
414
415#endif /* SIZEOF_OFF_T < 8 */
416#endif /* HAVE_LIBV4L2 */
417
418GstAmlV4l2Object *
419gst_aml_v4l2_object_new(GstElement *element,
420 GstObject *debug_object,
421 enum v4l2_buf_type type,
422 const char *default_device,
423 GstAmlV4l2GetInOutFunction get_in_out_func,
424 GstAmlV4l2SetInOutFunction set_in_out_func,
425 GstAmlV4l2UpdateFpsFunction update_fps_func)
426{
427 GstAmlV4l2Object *v4l2object;
428
429 /*
430 * some default values
431 */
432 v4l2object = g_new0(GstAmlV4l2Object, 1);
433
434 if ((V4L2_BUF_TYPE_VIDEO_CAPTURE == type || V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE == type))
435 {
436 const char *default_mode = getenv("GST_DEFAULT_V4L2_BUF_MODE");
437 GST_DEBUG("amlmodbuf GST_AML_DEFAULT_V4L2_BUF_MODE:%s", default_mode);
438 if (default_mode)
439 {
440 if (strcmp(default_mode, "DMA_BUF_IMPORT") == 0)
441 v4l2object->req_mode = GST_V4L2_IO_DMABUF_IMPORT;
442 else if (strcmp(default_mode, "DMA_BUF") == 0)
443 v4l2object->req_mode = GST_V4L2_IO_DMABUF;
444 GST_DEBUG("amlmodbuf set default buf default_mode:%d", v4l2object->req_mode);
445 }
446 }
447
448 v4l2object->type = type;
449 v4l2object->formats = NULL;
450
451 v4l2object->element = element;
452 v4l2object->dbg_obj = debug_object;
453 v4l2object->get_in_out_func = get_in_out_func;
454 v4l2object->set_in_out_func = set_in_out_func;
455 v4l2object->update_fps_func = update_fps_func;
456
457 v4l2object->video_fd = -1;
458 v4l2object->active = FALSE;
459 v4l2object->videodev = g_strdup(default_device);
460
461 v4l2object->norms = NULL;
462 v4l2object->channels = NULL;
463 v4l2object->colors = NULL;
464
465 v4l2object->keep_aspect = TRUE;
466
467 v4l2object->n_v4l2_planes = 0;
468
469 v4l2object->no_initial_format = FALSE;
470
471 /* We now disable libv4l2 by default, but have an env to enable it. */
472#ifdef HAVE_LIBV4L2
473 if (g_getenv("GST_V4L2_USE_LIBV4L2"))
474 {
475 v4l2object->fd_open = v4l2_fd_open;
476 v4l2object->close = v4l2_close;
477 v4l2object->dup = v4l2_dup;
478 v4l2object->ioctl = v4l2_ioctl;
479 v4l2object->read = v4l2_read;
480 v4l2object->mmap = v4l2_mmap;
481 v4l2object->munmap = v4l2_munmap;
482 }
483 else
484#endif
485 {
486 v4l2object->fd_open = NULL;
487 v4l2object->close = close;
488 v4l2object->dup = dup;
489 v4l2object->ioctl = ioctl;
490 v4l2object->read = read;
491 v4l2object->mmap = mmap;
492 v4l2object->munmap = munmap;
493 }
494 v4l2object->poll = gst_poll_new(TRUE);
495 v4l2object->can_wait_event = FALSE;
496 v4l2object->can_poll_device = TRUE;
497 v4l2object->tvin_port = -1;
498
499 v4l2object->dumpframefile = NULL;
500
501 return v4l2object;
502}
503
504static gboolean gst_aml_v4l2_object_clear_format_list(GstAmlV4l2Object *v4l2object);
505
506void gst_aml_v4l2_object_destroy(GstAmlV4l2Object *v4l2object)
507{
508 g_return_if_fail(v4l2object != NULL);
509
510 g_free(v4l2object->videodev);
511
512 g_free(v4l2object->channel);
513
514 if (v4l2object->formats)
515 {
516 gst_aml_v4l2_object_clear_format_list(v4l2object);
517 }
518
519 if (v4l2object->probed_caps)
520 {
521 gst_caps_unref(v4l2object->probed_caps);
522 }
523
524 if (v4l2object->extra_controls)
525 {
526 gst_structure_free(v4l2object->extra_controls);
527 }
528
529 gst_poll_free(v4l2object->poll);
530
531 g_free(v4l2object->dumpframefile);
532
533 g_free(v4l2object);
534}
535
536static gboolean
537gst_aml_v4l2_object_clear_format_list(GstAmlV4l2Object *v4l2object)
538{
539 g_slist_foreach(v4l2object->formats, (GFunc)g_free, NULL);
540 g_slist_free(v4l2object->formats);
541 v4l2object->formats = NULL;
542
543 return TRUE;
544}
545
546static gint
547gst_aml_v4l2_object_prop_to_cid(guint prop_id)
548{
549 gint cid = -1;
550
551 switch (prop_id)
552 {
553 case PROP_BRIGHTNESS:
554 cid = V4L2_CID_BRIGHTNESS;
555 break;
556 case PROP_CONTRAST:
557 cid = V4L2_CID_CONTRAST;
558 break;
559 case PROP_SATURATION:
560 cid = V4L2_CID_SATURATION;
561 break;
562 case PROP_HUE:
563 cid = V4L2_CID_HUE;
564 break;
565 default:
566 GST_WARNING("unmapped property id: %d", prop_id);
567 }
568 return cid;
569}
570
571gboolean
572gst_aml_v4l2_object_set_property_helper(GstAmlV4l2Object *v4l2object,
573 guint prop_id, const GValue *value, GParamSpec *pspec)
574{
575 switch (prop_id)
576 {
577 case PROP_DEVICE:
578 g_free(v4l2object->videodev);
579 v4l2object->videodev = g_value_dup_string(value);
580 break;
581 case PROP_BRIGHTNESS:
582 case PROP_CONTRAST:
583 case PROP_SATURATION:
584 case PROP_HUE:
585 {
586 gint cid = gst_aml_v4l2_object_prop_to_cid(prop_id);
587
588 if (cid != -1)
589 {
590 if (GST_AML_V4L2_IS_OPEN(v4l2object))
591 {
592 gst_aml_v4l2_set_attribute(v4l2object, cid, g_value_get_int(value));
593 }
594 }
595 return TRUE;
596 }
597 break;
598 case PROP_IO_MODE:
599 v4l2object->req_mode = g_value_get_enum(value);
600 break;
601 case PROP_CAPTURE_IO_MODE:
602 g_return_val_if_fail(!V4L2_TYPE_IS_OUTPUT(v4l2object->type), FALSE);
603 v4l2object->req_mode = g_value_get_enum(value);
604 break;
605 case PROP_OUTPUT_IO_MODE:
606 g_return_val_if_fail(V4L2_TYPE_IS_OUTPUT(v4l2object->type), FALSE);
607 v4l2object->req_mode = g_value_get_enum(value);
608 break;
609 case PROP_EXTRA_CONTROLS:
610 {
611 const GstStructure *s = gst_value_get_structure(value);
612
613 if (v4l2object->extra_controls)
614 gst_structure_free(v4l2object->extra_controls);
615
616 v4l2object->extra_controls = s ? gst_structure_copy(s) : NULL;
617 if (GST_AML_V4L2_IS_OPEN(v4l2object))
618 gst_aml_v4l2_set_controls(v4l2object, v4l2object->extra_controls);
619 break;
620 }
621 case PROP_PIXEL_ASPECT_RATIO:
622 if (v4l2object->par)
623 {
624 g_value_unset(v4l2object->par);
625 g_free(v4l2object->par);
626 }
627 v4l2object->par = g_new0(GValue, 1);
628 g_value_init(v4l2object->par, GST_TYPE_FRACTION);
629 if (!g_value_transform(value, v4l2object->par))
630 {
631 g_warning("Could not transform string to aspect ratio");
632 gst_value_set_fraction(v4l2object->par, 1, 1);
633 }
634
635 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "set PAR to %d/%d",
636 gst_value_get_fraction_numerator(v4l2object->par),
637 gst_value_get_fraction_denominator(v4l2object->par));
638 break;
639 case PROP_FORCE_ASPECT_RATIO:
640 v4l2object->keep_aspect = g_value_get_boolean(value);
641 break;
642 case PROP_DUMP_FRAME_LOCATION:
643 g_free(v4l2object->dumpframefile);
644 v4l2object->dumpframefile = g_value_dup_string(value);
645 break;
646 default:
647 return FALSE;
648 break;
649 }
650 return TRUE;
651}
652
653gboolean
654gst_aml_v4l2_object_get_property_helper(GstAmlV4l2Object *v4l2object,
655 guint prop_id, GValue *value, GParamSpec *pspec)
656{
657 switch (prop_id)
658 {
659 case PROP_DEVICE:
660 g_value_set_string(value, v4l2object->videodev);
661 break;
662 case PROP_DEVICE_NAME:
663 {
664 const guchar *name = NULL;
665
666 if (GST_AML_V4L2_IS_OPEN(v4l2object))
667 name = v4l2object->vcap.card;
668
669 g_value_set_string(value, (gchar *)name);
670 break;
671 }
672 case PROP_DEVICE_FD:
673 {
674 if (GST_AML_V4L2_IS_OPEN(v4l2object))
675 g_value_set_int(value, v4l2object->video_fd);
676 else
677 g_value_set_int(value, DEFAULT_PROP_DEVICE_FD);
678 break;
679 }
680 case PROP_FLAGS:
681 {
682 guint flags = 0;
683
684 if (GST_AML_V4L2_IS_OPEN(v4l2object))
685 {
686 flags |= v4l2object->device_caps &
687 (V4L2_CAP_VIDEO_CAPTURE |
688 V4L2_CAP_VIDEO_OUTPUT |
689 V4L2_CAP_VIDEO_OVERLAY |
690 V4L2_CAP_VBI_CAPTURE |
691 V4L2_CAP_VBI_OUTPUT | V4L2_CAP_TUNER | V4L2_CAP_AUDIO);
692
693 if (v4l2object->device_caps & V4L2_CAP_VIDEO_CAPTURE_MPLANE)
694 flags |= V4L2_CAP_VIDEO_CAPTURE;
695
696 if (v4l2object->device_caps & V4L2_CAP_VIDEO_OUTPUT_MPLANE)
697 flags |= V4L2_CAP_VIDEO_OUTPUT;
698 }
699 g_value_set_flags(value, flags);
700 break;
701 }
702 case PROP_BRIGHTNESS:
703 case PROP_CONTRAST:
704 case PROP_SATURATION:
705 case PROP_HUE:
706 {
707 gint cid = gst_aml_v4l2_object_prop_to_cid(prop_id);
708
709 if (cid != -1)
710 {
711 if (GST_AML_V4L2_IS_OPEN(v4l2object))
712 {
713 gint v;
714 if (gst_aml_v4l2_get_attribute(v4l2object, cid, &v))
715 {
716 g_value_set_int(value, v);
717 }
718 }
719 }
720 return TRUE;
721 }
722 break;
723 case PROP_IO_MODE:
724 g_value_set_enum(value, v4l2object->req_mode);
725 break;
726 case PROP_CAPTURE_IO_MODE:
727 g_return_val_if_fail(!V4L2_TYPE_IS_OUTPUT(v4l2object->type), FALSE);
728 g_value_set_enum(value, v4l2object->req_mode);
729 break;
730 case PROP_OUTPUT_IO_MODE:
731 g_return_val_if_fail(V4L2_TYPE_IS_OUTPUT(v4l2object->type), FALSE);
732 g_value_set_enum(value, v4l2object->req_mode);
733 break;
734 case PROP_EXTRA_CONTROLS:
735 gst_value_set_structure(value, v4l2object->extra_controls);
736 break;
737 case PROP_PIXEL_ASPECT_RATIO:
738 if (v4l2object->par)
739 g_value_transform(v4l2object->par, value);
740 break;
741 case PROP_FORCE_ASPECT_RATIO:
742 g_value_set_boolean(value, v4l2object->keep_aspect);
743 break;
744 case PROP_DUMP_FRAME_LOCATION:
745 g_value_set_string(value, v4l2object->dumpframefile);
746 break;
747 default:
748 return FALSE;
749 break;
750 }
751 return TRUE;
752}
753
754static void
755gst_aml_v4l2_get_driver_min_buffers(GstAmlV4l2Object *v4l2object)
756{
757 struct v4l2_control control = {
758 0,
759 };
760
761 g_return_if_fail(GST_AML_V4L2_IS_OPEN(v4l2object));
762
763 if (V4L2_TYPE_IS_OUTPUT(v4l2object->type))
764 control.id = V4L2_CID_MIN_BUFFERS_FOR_OUTPUT;
765 else
766 control.id = V4L2_CID_MIN_BUFFERS_FOR_CAPTURE;
767
768 if (v4l2object->ioctl(v4l2object->video_fd, VIDIOC_G_CTRL, &control) == 0)
769 {
770 GST_DEBUG_OBJECT(v4l2object->dbg_obj,
771 "driver requires a minimum of %d buffers", control.value);
772 v4l2object->min_buffers = control.value;
773 }
774 else
775 {
776 v4l2object->min_buffers = 0;
777 }
778}
779
780gboolean
781gst_aml_v4l2_object_open(GstAmlV4l2Object *v4l2object)
782{
783 if (!gst_aml_v4l2_open(v4l2object))
784 return FALSE;
785
786 return TRUE;
787}
788
789gboolean
790gst_aml_v4l2_object_open_shared(GstAmlV4l2Object *v4l2object, GstAmlV4l2Object *other)
791{
792 gboolean ret;
793
794 ret = gst_aml_v4l2_dup(v4l2object, other);
795
796 if (ret && !V4L2_TYPE_IS_OUTPUT(v4l2object->type))
797 {
798 gst_poll_fd_init(&v4l2object->pollfd);
799 v4l2object->pollfd.fd = v4l2object->video_fd;
800 gst_poll_add_fd(v4l2object->poll, &v4l2object->pollfd);
801 /* used for dequeue event */
802 gst_poll_fd_ctl_read(v4l2object->poll, &v4l2object->pollfd, TRUE);
803 gst_poll_fd_ctl_pri(v4l2object->poll, &v4l2object->pollfd, TRUE);
804 }
805
806 return ret;
807}
808
809gboolean
810gst_aml_v4l2_object_close(GstAmlV4l2Object *v4l2object)
811{
812 if (!gst_aml_v4l2_close(v4l2object))
813 return FALSE;
814
815 gst_caps_replace(&v4l2object->probed_caps, NULL);
816
817 /* reset our copy of the device caps */
818 v4l2object->device_caps = 0;
819
820 if (v4l2object->formats)
821 {
822 gst_aml_v4l2_object_clear_format_list(v4l2object);
823 }
824
825 if (v4l2object->par)
826 {
827 g_value_unset(v4l2object->par);
828 g_free(v4l2object->par);
829 v4l2object->par = NULL;
830 }
831
832 if (v4l2object->channel)
833 {
834 g_free(v4l2object->channel);
835 v4l2object->channel = NULL;
836 }
837
838 return TRUE;
839}
840
841static struct v4l2_fmtdesc *
842gst_aml_v4l2_object_get_format_from_fourcc(GstAmlV4l2Object *v4l2object,
843 guint32 fourcc)
844{
845 struct v4l2_fmtdesc *fmt;
846 GSList *walk;
847
848 if (fourcc == 0)
849 return NULL;
850
851 walk = gst_aml_v4l2_object_get_format_list(v4l2object);
852 while (walk)
853 {
854 fmt = (struct v4l2_fmtdesc *)walk->data;
855 if (fmt->pixelformat == fourcc)
856 return fmt;
857 /* special case for jpeg */
858 if (fmt->pixelformat == V4L2_PIX_FMT_MJPEG ||
859 fmt->pixelformat == V4L2_PIX_FMT_JPEG ||
860 fmt->pixelformat == V4L2_PIX_FMT_PJPG)
861 {
862 if (fourcc == V4L2_PIX_FMT_JPEG || fourcc == V4L2_PIX_FMT_MJPEG ||
863 fourcc == V4L2_PIX_FMT_PJPG)
864 {
865 return fmt;
866 }
867 }
868 walk = g_slist_next(walk);
869 }
870
871 return NULL;
872}
873
874/* complete made up ranking, the values themselves are meaningless */
875/* These ranks MUST be X such that X<<15 fits on a signed int - see
876 the comment at the end of gst_aml_v4l2_object_format_get_rank. */
877#define YUV_BASE_RANK 1000
878#define JPEG_BASE_RANK 500
879#define DV_BASE_RANK 200
880#define RGB_BASE_RANK 100
881#define YUV_ODD_BASE_RANK 50
882#define RGB_ODD_BASE_RANK 25
883#define BAYER_BASE_RANK 15
884#define S910_BASE_RANK 10
885#define GREY_BASE_RANK 5
886#define PWC_BASE_RANK 1
887
888static gint
889gst_aml_v4l2_object_format_get_rank(const struct v4l2_fmtdesc *fmt)
890{
891 guint32 fourcc = fmt->pixelformat;
892 gboolean emulated = ((fmt->flags & V4L2_FMT_FLAG_EMULATED) != 0);
893 gint rank = 0;
894
895 switch (fourcc)
896 {
897 case V4L2_PIX_FMT_MJPEG:
898 case V4L2_PIX_FMT_PJPG:
899 rank = JPEG_BASE_RANK;
900 break;
901 case V4L2_PIX_FMT_JPEG:
902 rank = JPEG_BASE_RANK + 1;
903 break;
904 case V4L2_PIX_FMT_MPEG: /* MPEG */
905 rank = JPEG_BASE_RANK + 2;
906 break;
907
908 case V4L2_PIX_FMT_RGB332:
909 case V4L2_PIX_FMT_ARGB555:
910 case V4L2_PIX_FMT_XRGB555:
911 case V4L2_PIX_FMT_RGB555:
912 case V4L2_PIX_FMT_ARGB555X:
913 case V4L2_PIX_FMT_XRGB555X:
914 case V4L2_PIX_FMT_RGB555X:
915 case V4L2_PIX_FMT_BGR666:
916 case V4L2_PIX_FMT_RGB565:
917 case V4L2_PIX_FMT_RGB565X:
918 case V4L2_PIX_FMT_RGB444:
919 case V4L2_PIX_FMT_Y4:
920 case V4L2_PIX_FMT_Y6:
921 case V4L2_PIX_FMT_Y10:
922 case V4L2_PIX_FMT_Y12:
923 case V4L2_PIX_FMT_Y10BPACK:
924 case V4L2_PIX_FMT_YUV555:
925 case V4L2_PIX_FMT_YUV565:
926 case V4L2_PIX_FMT_YUV32:
927 case V4L2_PIX_FMT_NV12MT_16X16:
928 case V4L2_PIX_FMT_NV42:
929 case V4L2_PIX_FMT_H264_MVC:
930 rank = RGB_ODD_BASE_RANK;
931 break;
932
933 case V4L2_PIX_FMT_RGB24:
934 case V4L2_PIX_FMT_BGR24:
935 rank = RGB_BASE_RANK - 1;
936 break;
937
938 case V4L2_PIX_FMT_RGB32:
939 case V4L2_PIX_FMT_BGR32:
940 case V4L2_PIX_FMT_ABGR32:
941 case V4L2_PIX_FMT_XBGR32:
942 case V4L2_PIX_FMT_ARGB32:
943 case V4L2_PIX_FMT_XRGB32:
944 rank = RGB_BASE_RANK;
945 break;
946
947 case V4L2_PIX_FMT_GREY: /* 8 Greyscale */
948 rank = GREY_BASE_RANK;
949 break;
950
951 case V4L2_PIX_FMT_NV12: /* 12 Y/CbCr 4:2:0 */
952 case V4L2_PIX_FMT_NV12M: /* Same as NV12 */
953 case V4L2_PIX_FMT_NV12MT: /* NV12 64x32 tile */
954 case V4L2_PIX_FMT_NV21: /* 12 Y/CrCb 4:2:0 */
955 case V4L2_PIX_FMT_NV21M: /* Same as NV21 */
956 case V4L2_PIX_FMT_YYUV: /* 16 YUV 4:2:2 */
957 case V4L2_PIX_FMT_HI240: /* 8 8-bit color */
958 case V4L2_PIX_FMT_NV16: /* 16 Y/CbCr 4:2:2 */
959 case V4L2_PIX_FMT_NV16M: /* Same as NV16 */
960 case V4L2_PIX_FMT_NV61: /* 16 Y/CrCb 4:2:2 */
961 case V4L2_PIX_FMT_NV61M: /* Same as NV61 */
962 case V4L2_PIX_FMT_NV24: /* 24 Y/CrCb 4:4:4 */
963 rank = YUV_ODD_BASE_RANK;
964 break;
965
966 case V4L2_PIX_FMT_YVU410: /* YVU9, 9 bits per pixel */
967 rank = YUV_BASE_RANK + 3;
968 break;
969 case V4L2_PIX_FMT_YUV410: /* YUV9, 9 bits per pixel */
970 rank = YUV_BASE_RANK + 2;
971 break;
972 case V4L2_PIX_FMT_YUV420: /* I420, 12 bits per pixel */
973 case V4L2_PIX_FMT_YUV420M:
974 rank = YUV_BASE_RANK + 7;
975 break;
976 case V4L2_PIX_FMT_YUYV: /* YUY2, 16 bits per pixel */
977 rank = YUV_BASE_RANK + 10;
978 break;
979 case V4L2_PIX_FMT_YVU420: /* YV12, 12 bits per pixel */
980 rank = YUV_BASE_RANK + 6;
981 break;
982 case V4L2_PIX_FMT_UYVY: /* UYVY, 16 bits per pixel */
983 rank = YUV_BASE_RANK + 9;
984 break;
985 case V4L2_PIX_FMT_YUV444:
986 rank = YUV_BASE_RANK + 6;
987 break;
988 case V4L2_PIX_FMT_Y41P: /* Y41P, 12 bits per pixel */
989 rank = YUV_BASE_RANK + 5;
990 break;
991 case V4L2_PIX_FMT_YUV411P: /* Y41B, 12 bits per pixel */
992 rank = YUV_BASE_RANK + 4;
993 break;
994 case V4L2_PIX_FMT_YUV422P: /* Y42B, 16 bits per pixel */
995 rank = YUV_BASE_RANK + 8;
996 break;
997
998 case V4L2_PIX_FMT_DV:
999 rank = DV_BASE_RANK;
1000 break;
1001
1002 case V4L2_PIX_FMT_WNVA: /* Winnov hw compres */
1003 rank = 0;
1004 break;
1005
1006 case V4L2_PIX_FMT_SBGGR8:
1007 case V4L2_PIX_FMT_SGBRG8:
1008 case V4L2_PIX_FMT_SGRBG8:
1009 case V4L2_PIX_FMT_SRGGB8:
1010 rank = BAYER_BASE_RANK;
1011 break;
1012
1013 case V4L2_PIX_FMT_SN9C10X:
1014 rank = S910_BASE_RANK;
1015 break;
1016
1017 case V4L2_PIX_FMT_PWC1:
1018 rank = PWC_BASE_RANK;
1019 break;
1020 case V4L2_PIX_FMT_PWC2:
1021 rank = PWC_BASE_RANK;
1022 break;
1023
1024 default:
1025 rank = 0;
1026 break;
1027 }
1028
1029 /* All ranks are below 1<<15 so a shift by 15
1030 * will a) make all non-emulated formats larger
1031 * than emulated and b) will not overflow
1032 */
1033 if (!emulated)
1034 rank <<= 15;
1035
1036 return rank;
1037}
1038
1039static gint
1040format_cmp_func(gconstpointer a, gconstpointer b)
1041{
1042 const struct v4l2_fmtdesc *fa = a;
1043 const struct v4l2_fmtdesc *fb = b;
1044
1045 if (fa->pixelformat == fb->pixelformat)
1046 return 0;
1047
1048 return gst_aml_v4l2_object_format_get_rank(fb) -
1049 gst_aml_v4l2_object_format_get_rank(fa);
1050}
1051
1052/******************************************************
1053 * gst_aml_v4l2_object_fill_format_list():
1054 * create list of supported capture formats
1055 * return value: TRUE on success, FALSE on error
1056 ******************************************************/
1057static gboolean
1058gst_aml_v4l2_object_fill_format_list(GstAmlV4l2Object *v4l2object,
1059 enum v4l2_buf_type type)
1060{
1061 gint n;
1062 struct v4l2_fmtdesc *format;
1063
1064 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "getting src format enumerations");
1065
1066 /* format enumeration */
1067 for (n = 0;; n++)
1068 {
1069 format = g_new0(struct v4l2_fmtdesc, 1);
1070
1071 format->index = n;
1072 format->type = type;
1073
1074 if (v4l2object->ioctl(v4l2object->video_fd, VIDIOC_ENUM_FMT, format) < 0)
1075 {
1076 if (errno == EINVAL)
1077 {
1078 g_free(format);
1079 break; /* end of enumeration */
1080 }
1081 else
1082 {
1083 goto failed;
1084 }
1085 }
1086
1087 GST_LOG_OBJECT(v4l2object->dbg_obj, "index: %u", format->index);
1088 GST_LOG_OBJECT(v4l2object->dbg_obj, "type: %d", format->type);
1089 GST_LOG_OBJECT(v4l2object->dbg_obj, "flags: %08x", format->flags);
1090 GST_LOG_OBJECT(v4l2object->dbg_obj, "description: '%s'",
1091 format->description);
1092 GST_LOG_OBJECT(v4l2object->dbg_obj, "pixelformat: %" GST_FOURCC_FORMAT,
1093 GST_FOURCC_ARGS(format->pixelformat));
1094
1095 /* sort formats according to our preference; we do this, because caps
1096 * are probed in the order the formats are in the list, and the order of
1097 * formats in the final probed caps matters for things like fixation */
1098 v4l2object->formats = g_slist_insert_sorted(v4l2object->formats, format,
1099 (GCompareFunc)format_cmp_func);
1100 }
1101
1102#ifndef GST_DISABLE_GST_DEBUG
1103 {
1104 GSList *l;
1105
1106 GST_INFO_OBJECT(v4l2object->dbg_obj, "got %d format(s):", n);
1107 for (l = v4l2object->formats; l != NULL; l = l->next)
1108 {
1109 format = l->data;
1110
1111 GST_INFO_OBJECT(v4l2object->dbg_obj,
1112 " %" GST_FOURCC_FORMAT "%s", GST_FOURCC_ARGS(format->pixelformat),
1113 ((format->flags & V4L2_FMT_FLAG_EMULATED)) ? " (emulated)" : "");
1114 }
1115 }
1116#endif
1117
1118 return TRUE;
1119
1120 /* ERRORS */
1121failed:
1122{
1123 g_free(format);
1124
1125 if (v4l2object->element)
1126 return FALSE;
1127
1128 GST_ELEMENT_ERROR(v4l2object->element, RESOURCE, SETTINGS,
1129 (_("Failed to enumerate possible video formats device '%s' can work "
1130 "with"),
1131 v4l2object->videodev),
1132 ("Failed to get number %d in pixelformat enumeration for %s. (%d - %s)",
1133 n, v4l2object->videodev, errno, g_strerror(errno)));
1134
1135 return FALSE;
1136}
1137}
1138
1139/*
1140 * Get the list of supported capture formats, a list of
1141 * <code>struct v4l2_fmtdesc</code>.
1142 */
1143static GSList *
1144gst_aml_v4l2_object_get_format_list(GstAmlV4l2Object *v4l2object)
1145{
1146 if (!v4l2object->formats)
1147 {
1148
1149 /* check usual way */
1150 gst_aml_v4l2_object_fill_format_list(v4l2object, v4l2object->type);
1151
1152 /* if our driver supports multi-planar
1153 * and if formats are still empty then we can workaround driver bug
1154 * by also looking up formats as if our device was not supporting
1155 * multiplanar */
1156 if (!v4l2object->formats)
1157 {
1158 switch (v4l2object->type)
1159 {
1160 case V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE:
1161 gst_aml_v4l2_object_fill_format_list(v4l2object,
1162 V4L2_BUF_TYPE_VIDEO_CAPTURE);
1163 break;
1164
1165 case V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE:
1166 gst_aml_v4l2_object_fill_format_list(v4l2object,
1167 V4L2_BUF_TYPE_VIDEO_OUTPUT);
1168 break;
1169
1170 default:
1171 break;
1172 }
1173 }
1174 }
1175 return v4l2object->formats;
1176}
1177
1178static GstVideoFormat
1179gst_aml_v4l2_object_v4l2fourcc_to_video_format(guint32 fourcc)
1180{
1181 GstVideoFormat format;
1182
1183 switch (fourcc)
1184 {
1185 case V4L2_PIX_FMT_GREY: /* 8 Greyscale */
1186 format = GST_VIDEO_FORMAT_GRAY8;
1187 break;
1188 case V4L2_PIX_FMT_Y16:
1189 format = GST_VIDEO_FORMAT_GRAY16_LE;
1190 break;
1191 case V4L2_PIX_FMT_Y16_BE:
1192 format = GST_VIDEO_FORMAT_GRAY16_BE;
1193 break;
1194 case V4L2_PIX_FMT_XRGB555:
1195 case V4L2_PIX_FMT_RGB555:
1196 format = GST_VIDEO_FORMAT_RGB15;
1197 break;
1198 case V4L2_PIX_FMT_XRGB555X:
1199 case V4L2_PIX_FMT_RGB555X:
1200 format = GST_VIDEO_FORMAT_BGR15;
1201 break;
1202 case V4L2_PIX_FMT_RGB565:
1203 format = GST_VIDEO_FORMAT_RGB16;
1204 break;
1205 case V4L2_PIX_FMT_RGB24:
1206 format = GST_VIDEO_FORMAT_RGB;
1207 break;
1208 case V4L2_PIX_FMT_BGR24:
1209 format = GST_VIDEO_FORMAT_BGR;
1210 break;
1211 case V4L2_PIX_FMT_XRGB32:
1212 case V4L2_PIX_FMT_RGB32:
1213 format = GST_VIDEO_FORMAT_xRGB;
1214 break;
1215 case V4L2_PIX_FMT_XBGR32:
1216 case V4L2_PIX_FMT_BGR32:
1217 format = GST_VIDEO_FORMAT_BGRx;
1218 break;
1219 case V4L2_PIX_FMT_ABGR32:
1220 format = GST_VIDEO_FORMAT_BGRA;
1221 break;
1222 case V4L2_PIX_FMT_ARGB32:
1223 format = GST_VIDEO_FORMAT_ARGB;
1224 break;
1225 case V4L2_PIX_FMT_NV12:
1226 case V4L2_PIX_FMT_NV12M:
1227 format = GST_VIDEO_FORMAT_NV12;
1228 break;
1229 case V4L2_PIX_FMT_NV12MT:
1230 format = GST_VIDEO_FORMAT_NV12_64Z32;
1231 break;
1232 case V4L2_PIX_FMT_NV21:
1233 case V4L2_PIX_FMT_NV21M:
1234 format = GST_VIDEO_FORMAT_NV21;
1235 break;
1236 case V4L2_PIX_FMT_YVU410:
1237 format = GST_VIDEO_FORMAT_YVU9;
1238 break;
1239 case V4L2_PIX_FMT_YUV410:
1240 format = GST_VIDEO_FORMAT_YUV9;
1241 break;
1242 case V4L2_PIX_FMT_YUV420:
1243 case V4L2_PIX_FMT_YUV420M:
1244 format = GST_VIDEO_FORMAT_I420;
1245 break;
1246 case V4L2_PIX_FMT_YUYV:
1247 format = GST_VIDEO_FORMAT_YUY2;
1248 break;
1249 case V4L2_PIX_FMT_YVU420:
1250 format = GST_VIDEO_FORMAT_YV12;
1251 break;
1252 case V4L2_PIX_FMT_UYVY:
1253 format = GST_VIDEO_FORMAT_UYVY;
1254 break;
1255 case V4L2_PIX_FMT_YUV411P:
1256 format = GST_VIDEO_FORMAT_Y41B;
1257 break;
1258 case V4L2_PIX_FMT_YUV422P:
1259 format = GST_VIDEO_FORMAT_Y42B;
1260 break;
1261 case V4L2_PIX_FMT_YVYU:
1262 format = GST_VIDEO_FORMAT_YVYU;
1263 break;
1264 case V4L2_PIX_FMT_NV16:
1265 case V4L2_PIX_FMT_NV16M:
1266 format = GST_VIDEO_FORMAT_NV16;
1267 break;
1268 case V4L2_PIX_FMT_NV61:
1269 case V4L2_PIX_FMT_NV61M:
1270 format = GST_VIDEO_FORMAT_NV61;
1271 break;
1272 case V4L2_PIX_FMT_NV24:
1273 format = GST_VIDEO_FORMAT_NV24;
1274 break;
1275 default:
1276 format = GST_VIDEO_FORMAT_UNKNOWN;
1277 break;
1278 }
1279
1280 return format;
1281}
1282
1283static gboolean
1284gst_amL_v4l2_object_v4l2fourcc_is_rgb(guint32 fourcc)
1285{
1286 gboolean ret = FALSE;
1287
1288 switch (fourcc)
1289 {
1290 case V4L2_PIX_FMT_XRGB555:
1291 case V4L2_PIX_FMT_RGB555:
1292 case V4L2_PIX_FMT_XRGB555X:
1293 case V4L2_PIX_FMT_RGB555X:
1294 case V4L2_PIX_FMT_RGB565:
1295 case V4L2_PIX_FMT_RGB24:
1296 case V4L2_PIX_FMT_BGR24:
1297 case V4L2_PIX_FMT_XRGB32:
1298 case V4L2_PIX_FMT_RGB32:
1299 case V4L2_PIX_FMT_XBGR32:
1300 case V4L2_PIX_FMT_BGR32:
1301 case V4L2_PIX_FMT_ABGR32:
1302 case V4L2_PIX_FMT_ARGB32:
1303 case V4L2_PIX_FMT_SBGGR8:
1304 case V4L2_PIX_FMT_SGBRG8:
1305 case V4L2_PIX_FMT_SGRBG8:
1306 case V4L2_PIX_FMT_SRGGB8:
1307 ret = TRUE;
1308 break;
1309 default:
1310 break;
1311 }
1312
1313 return ret;
1314}
1315
1316static GstStructure *
1317gst_aml_v4l2_object_v4l2fourcc_to_bare_struct(guint32 fourcc)
1318{
1319 GstStructure *structure = NULL;
1320
1321 switch (fourcc)
1322 {
1323 case V4L2_PIX_FMT_MJPEG: /* Motion-JPEG */
1324 case V4L2_PIX_FMT_PJPG: /* Progressive-JPEG */
1325 case V4L2_PIX_FMT_JPEG: /* JFIF JPEG */
1326 structure = gst_structure_new_empty("image/jpeg");
1327 break;
1328 case V4L2_PIX_FMT_MPEG1:
1329 structure = gst_structure_new("video/mpeg",
1330 "mpegversion", G_TYPE_INT, 1, NULL);
1331 break;
1332 case V4L2_PIX_FMT_MPEG2:
1333 structure = gst_structure_new("video/mpeg",
1334 "mpegversion", G_TYPE_INT, 2, NULL);
1335 gst_structure_set(structure, "systemstream", G_TYPE_BOOLEAN, FALSE, NULL);
1336 GST_DEBUG("aml set mpeg2 systemstream to false");
1337 break;
1338 case V4L2_PIX_FMT_MPEG4:
1339 case V4L2_PIX_FMT_XVID:
1340 structure = gst_structure_new("video/mpeg",
1341 "mpegversion", G_TYPE_INT, 4, "systemstream",
1342 G_TYPE_BOOLEAN, FALSE, NULL);
1343 break;
1344 case V4L2_PIX_FMT_FWHT:
1345 structure = gst_structure_new_empty("video/x-fwht");
1346 break;
1347 case V4L2_PIX_FMT_H263:
1348 structure = gst_structure_new("video/x-h263",
1349 "variant", G_TYPE_STRING, "itu", NULL);
1350 break;
1351 case V4L2_PIX_FMT_H264: /* H.264 */
1352 structure = gst_structure_new("video/x-h264",
1353 "stream-format", G_TYPE_STRING, "byte-stream", "alignment",
1354 G_TYPE_STRING, "au", NULL);
1355 break;
1356 case V4L2_PIX_FMT_H264_NO_SC:
1357 structure = gst_structure_new("video/x-h264",
1358 "stream-format", G_TYPE_STRING, "avc", "alignment",
1359 G_TYPE_STRING, "au", NULL);
1360 break;
1361 case V4L2_PIX_FMT_HEVC: /* H.265 */
1362 structure = gst_structure_new("video/x-h265",
1363 "stream-format", G_TYPE_STRING, "byte-stream", "alignment",
1364 G_TYPE_STRING, "au", NULL);
1365 break;
1366 case V4L2_PIX_FMT_VC1_ANNEX_G:
1367 case V4L2_PIX_FMT_VC1_ANNEX_L:
1368 structure = gst_structure_new("video/x-wmv",
1369 "wmvversion", G_TYPE_INT, 3, "format", G_TYPE_STRING, "WVC1", NULL);
1370 break;
1371 case V4L2_PIX_FMT_VP8:
1372 structure = gst_structure_new_empty("video/x-vp8");
1373 break;
1374 case V4L2_PIX_FMT_VP9:
1375 structure = gst_structure_new_empty("video/x-vp9");
1376 break;
1377 case V4L2_PIX_FMT_AV1:
1378 structure = gst_structure_new_empty("video/x-av1");
1379 break;
1380 case V4L2_PIX_FMT_GREY: /* 8 Greyscale */
1381 case V4L2_PIX_FMT_Y16:
1382 case V4L2_PIX_FMT_Y16_BE:
1383 case V4L2_PIX_FMT_XRGB555:
1384 case V4L2_PIX_FMT_RGB555:
1385 case V4L2_PIX_FMT_XRGB555X:
1386 case V4L2_PIX_FMT_RGB555X:
1387 case V4L2_PIX_FMT_RGB565:
1388 case V4L2_PIX_FMT_RGB24:
1389 case V4L2_PIX_FMT_BGR24:
1390 case V4L2_PIX_FMT_RGB32:
1391 case V4L2_PIX_FMT_XRGB32:
1392 case V4L2_PIX_FMT_ARGB32:
1393 case V4L2_PIX_FMT_BGR32:
1394 case V4L2_PIX_FMT_XBGR32:
1395 case V4L2_PIX_FMT_ABGR32:
1396 case V4L2_PIX_FMT_NV12: /* 12 Y/CbCr 4:2:0 */
1397 case V4L2_PIX_FMT_NV12M:
1398 case V4L2_PIX_FMT_NV12MT:
1399 case V4L2_PIX_FMT_NV21: /* 12 Y/CrCb 4:2:0 */
1400 case V4L2_PIX_FMT_NV21M:
1401 case V4L2_PIX_FMT_NV16: /* 16 Y/CbCr 4:2:2 */
1402 case V4L2_PIX_FMT_NV16M:
1403 case V4L2_PIX_FMT_NV61: /* 16 Y/CrCb 4:2:2 */
1404 case V4L2_PIX_FMT_NV61M:
1405 case V4L2_PIX_FMT_NV24: /* 24 Y/CrCb 4:4:4 */
1406 case V4L2_PIX_FMT_YVU410:
1407 case V4L2_PIX_FMT_YUV410:
1408 case V4L2_PIX_FMT_YUV420: /* I420/IYUV */
1409 case V4L2_PIX_FMT_YUV420M:
1410 case V4L2_PIX_FMT_YUYV:
1411 case V4L2_PIX_FMT_YVU420:
1412 case V4L2_PIX_FMT_UYVY:
1413 case V4L2_PIX_FMT_YUV422P:
1414 case V4L2_PIX_FMT_YVYU:
1415 case V4L2_PIX_FMT_YUV411P:
1416 {
1417 GstVideoFormat format;
1418 format = gst_aml_v4l2_object_v4l2fourcc_to_video_format(fourcc);
1419 if (format != GST_VIDEO_FORMAT_UNKNOWN)
1420 structure = gst_structure_new("video/x-raw",
1421 "format", G_TYPE_STRING, gst_video_format_to_string(format), NULL);
1422 break;
1423 }
1424 case V4L2_PIX_FMT_DV:
1425 structure =
1426 gst_structure_new("video/x-dv", "systemstream", G_TYPE_BOOLEAN, TRUE,
1427 NULL);
1428 break;
1429 case V4L2_PIX_FMT_MPEG: /* MPEG */
1430 structure = gst_structure_new("video/mpegts",
1431 "systemstream", G_TYPE_BOOLEAN, TRUE, NULL);
1432 break;
1433 case V4L2_PIX_FMT_WNVA: /* Winnov hw compres */
1434 break;
1435 case V4L2_PIX_FMT_SBGGR8:
1436 case V4L2_PIX_FMT_SGBRG8:
1437 case V4L2_PIX_FMT_SGRBG8:
1438 case V4L2_PIX_FMT_SRGGB8:
1439 structure = gst_structure_new("video/x-bayer", "format", G_TYPE_STRING,
1440 fourcc == V4L2_PIX_FMT_SBGGR8 ? "bggr" : fourcc == V4L2_PIX_FMT_SGBRG8 ? "gbrg"
1441 : fourcc == V4L2_PIX_FMT_SGRBG8 ? "grbg"
1442 :
1443 /* fourcc == V4L2_PIX_FMT_SRGGB8 ? */ "rggb",
1444 NULL);
1445 break;
1446 case V4L2_PIX_FMT_SN9C10X:
1447 structure = gst_structure_new_empty("video/x-sonix");
1448 break;
1449 case V4L2_PIX_FMT_PWC1:
1450 structure = gst_structure_new_empty("video/x-pwc1");
1451 break;
1452 case V4L2_PIX_FMT_PWC2:
1453 structure = gst_structure_new_empty("video/x-pwc2");
1454 break;
1455 case V4L2_PIX_FMT_RGB332:
1456 case V4L2_PIX_FMT_BGR666:
1457 case V4L2_PIX_FMT_ARGB555X:
1458 case V4L2_PIX_FMT_RGB565X:
1459 case V4L2_PIX_FMT_RGB444:
1460 case V4L2_PIX_FMT_YYUV: /* 16 YUV 4:2:2 */
1461 case V4L2_PIX_FMT_HI240: /* 8 8-bit color */
1462 case V4L2_PIX_FMT_Y4:
1463 case V4L2_PIX_FMT_Y6:
1464 case V4L2_PIX_FMT_Y10:
1465 case V4L2_PIX_FMT_Y12:
1466 case V4L2_PIX_FMT_Y10BPACK:
1467 case V4L2_PIX_FMT_YUV444:
1468 case V4L2_PIX_FMT_YUV555:
1469 case V4L2_PIX_FMT_YUV565:
1470 case V4L2_PIX_FMT_Y41P:
1471 case V4L2_PIX_FMT_YUV32:
1472 case V4L2_PIX_FMT_NV12MT_16X16:
1473 case V4L2_PIX_FMT_NV42:
1474 case V4L2_PIX_FMT_H264_MVC:
1475 default:
1476 GST_DEBUG("Unsupported fourcc 0x%08x %" GST_FOURCC_FORMAT,
1477 fourcc, GST_FOURCC_ARGS(fourcc));
1478 break;
1479 }
1480
1481 return structure;
1482}
1483
1484GstStructure *
1485gst_aml_v4l2_object_v4l2fourcc_to_structure(guint32 fourcc)
1486{
1487 GstStructure *template;
1488 gint i;
1489
1490 template = gst_aml_v4l2_object_v4l2fourcc_to_bare_struct(fourcc);
1491
1492 if (template == NULL)
1493 goto done;
1494
1495 for (i = 0; i < GST_AML_V4L2_FORMAT_COUNT; i++)
1496 {
1497 if (gst_aml_v4l2_formats[i].format != fourcc)
1498 continue;
1499
1500 if (gst_aml_v4l2_formats[i].dimensions)
1501 {
1502 gst_structure_set(template,
1503 "width", GST_TYPE_INT_RANGE, 1, GST_AML_V4L2_MAX_SIZE,
1504 "height", GST_TYPE_INT_RANGE, 1, GST_AML_V4L2_MAX_SIZE,
1505 "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL);
1506 }
1507 break;
1508 }
1509
1510done:
1511 return template;
1512}
1513
1514static GstCaps *
1515gst_aml_v4l2_object_get_caps_helper(GstAmlV4L2FormatFlags flags)
1516{
1517 GstStructure *structure;
1518 GstCaps *caps;
1519 guint i;
1520
1521 caps = gst_caps_new_empty();
1522 for (i = 0; i < GST_AML_V4L2_FORMAT_COUNT; i++)
1523 {
1524
1525 if ((gst_aml_v4l2_formats[i].flags & flags) == 0)
1526 continue;
1527
1528 structure =
1529 gst_aml_v4l2_object_v4l2fourcc_to_bare_struct(gst_aml_v4l2_formats[i].format);
1530
1531 if (structure)
1532 {
1533 GstStructure *alt_s = NULL;
1534
1535 if (gst_aml_v4l2_formats[i].dimensions)
1536 {
1537 gst_structure_set(structure,
1538 "width", GST_TYPE_INT_RANGE, 1, GST_AML_V4L2_MAX_SIZE,
1539 "height", GST_TYPE_INT_RANGE, 1, GST_AML_V4L2_MAX_SIZE,
1540 "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL);
1541 }
1542
1543 switch (gst_aml_v4l2_formats[i].format)
1544 {
1545 case V4L2_PIX_FMT_RGB32:
1546 alt_s = gst_structure_copy(structure);
1547 gst_structure_set(alt_s, "format", G_TYPE_STRING, "ARGB", NULL);
1548 break;
1549 case V4L2_PIX_FMT_BGR32:
1550 alt_s = gst_structure_copy(structure);
1551 gst_structure_set(alt_s, "format", G_TYPE_STRING, "BGRA", NULL);
1552 default:
1553 break;
1554 }
1555
1556 gst_caps_append_structure(caps, structure);
1557
1558 if (alt_s)
1559 gst_caps_append_structure(caps, alt_s);
1560 }
1561 }
1562
1563 return gst_caps_simplify(caps);
1564}
1565
1566GstCaps *
1567gst_aml_v4l2_object_get_all_caps(void)
1568{
1569 static GstCaps *caps = NULL;
1570
1571 if (g_once_init_enter(&caps))
1572 {
1573 GstCaps *all_caps = gst_aml_v4l2_object_get_caps_helper(GST_V4L2_ALL);
1574 GST_MINI_OBJECT_FLAG_SET(all_caps, GST_MINI_OBJECT_FLAG_MAY_BE_LEAKED);
1575 g_once_init_leave(&caps, all_caps);
1576 }
1577
1578 return caps;
1579}
1580
1581GstCaps *
1582gst_aml_v4l2_object_get_raw_caps(void)
1583{
1584 static GstCaps *caps = NULL;
1585
1586 if (g_once_init_enter(&caps))
1587 {
1588 GstCaps *raw_caps = gst_aml_v4l2_object_get_caps_helper(GST_V4L2_RAW);
1589 GST_MINI_OBJECT_FLAG_SET(raw_caps, GST_MINI_OBJECT_FLAG_MAY_BE_LEAKED);
1590 g_once_init_leave(&caps, raw_caps);
1591 }
1592
1593 return caps;
1594}
1595
1596GstCaps *
1597gst_aml_v4l2_object_get_codec_caps(void)
1598{
1599 static GstCaps *caps = NULL;
1600
1601 if (g_once_init_enter(&caps))
1602 {
1603 GstCaps *codec_caps = gst_aml_v4l2_object_get_caps_helper(GST_V4L2_CODEC);
1604 GST_MINI_OBJECT_FLAG_SET(codec_caps, GST_MINI_OBJECT_FLAG_MAY_BE_LEAKED);
1605 g_once_init_leave(&caps, codec_caps);
1606 }
1607
1608 return caps;
1609}
1610
1611/* collect data for the given caps
1612 * @caps: given input caps
1613 * @format: location for the v4l format
1614 * @w/@h: location for width and height
1615 * @fps_n/@fps_d: location for framerate
1616 * @size: location for expected size of the frame or 0 if unknown
1617 */
1618static gboolean
1619gst_aml_v4l2_object_get_caps_info(GstAmlV4l2Object *v4l2object, GstCaps *caps,
1620 struct v4l2_fmtdesc **format, GstVideoInfo *info)
1621{
1622 GstStructure *structure;
1623 guint32 fourcc = 0, fourcc_nc = 0;
1624 const gchar *mimetype;
1625 struct v4l2_fmtdesc *fmt = NULL;
1626
1627 structure = gst_caps_get_structure(caps, 0);
1628
1629 mimetype = gst_structure_get_name(structure);
1630
1631 if (!gst_video_info_from_caps(info, caps))
1632 goto invalid_format;
1633
1634 if (g_str_equal(mimetype, "video/x-raw"))
1635 {
1636 switch (GST_VIDEO_INFO_FORMAT(info))
1637 {
1638 case GST_VIDEO_FORMAT_I420:
1639 fourcc = V4L2_PIX_FMT_YUV420;
1640 fourcc_nc = V4L2_PIX_FMT_YUV420M;
1641 break;
1642 case GST_VIDEO_FORMAT_YUY2:
1643 fourcc = V4L2_PIX_FMT_YUYV;
1644 break;
1645 case GST_VIDEO_FORMAT_UYVY:
1646 fourcc = V4L2_PIX_FMT_UYVY;
1647 break;
1648 case GST_VIDEO_FORMAT_YV12:
1649 fourcc = V4L2_PIX_FMT_YVU420;
1650 break;
1651 case GST_VIDEO_FORMAT_Y41B:
1652 fourcc = V4L2_PIX_FMT_YUV411P;
1653 break;
1654 case GST_VIDEO_FORMAT_Y42B:
1655 fourcc = V4L2_PIX_FMT_YUV422P;
1656 break;
1657 case GST_VIDEO_FORMAT_NV12:
1658 fourcc = V4L2_PIX_FMT_NV12;
1659 fourcc_nc = V4L2_PIX_FMT_NV12M;
1660 break;
1661 case GST_VIDEO_FORMAT_NV12_64Z32:
1662 fourcc_nc = V4L2_PIX_FMT_NV12MT;
1663 break;
1664 case GST_VIDEO_FORMAT_NV21:
1665 fourcc = V4L2_PIX_FMT_NV21;
1666 fourcc_nc = V4L2_PIX_FMT_NV21M;
1667 break;
1668 case GST_VIDEO_FORMAT_NV16:
1669 fourcc = V4L2_PIX_FMT_NV16;
1670 fourcc_nc = V4L2_PIX_FMT_NV16M;
1671 break;
1672 case GST_VIDEO_FORMAT_NV61:
1673 fourcc = V4L2_PIX_FMT_NV61;
1674 fourcc_nc = V4L2_PIX_FMT_NV61M;
1675 break;
1676 case GST_VIDEO_FORMAT_NV24:
1677 fourcc = V4L2_PIX_FMT_NV24;
1678 break;
1679 case GST_VIDEO_FORMAT_YVYU:
1680 fourcc = V4L2_PIX_FMT_YVYU;
1681 break;
1682 case GST_VIDEO_FORMAT_RGB15:
1683 fourcc = V4L2_PIX_FMT_RGB555;
1684 fourcc_nc = V4L2_PIX_FMT_XRGB555;
1685 break;
1686 case GST_VIDEO_FORMAT_RGB16:
1687 fourcc = V4L2_PIX_FMT_RGB565;
1688 break;
1689 case GST_VIDEO_FORMAT_RGB:
1690 fourcc = V4L2_PIX_FMT_RGB24;
1691 break;
1692 case GST_VIDEO_FORMAT_BGR:
1693 fourcc = V4L2_PIX_FMT_BGR24;
1694 break;
1695 case GST_VIDEO_FORMAT_xRGB:
1696 fourcc = V4L2_PIX_FMT_RGB32;
1697 fourcc_nc = V4L2_PIX_FMT_XRGB32;
1698 break;
1699 case GST_VIDEO_FORMAT_ARGB:
1700 fourcc = V4L2_PIX_FMT_RGB32;
1701 fourcc_nc = V4L2_PIX_FMT_ARGB32;
1702 break;
1703 case GST_VIDEO_FORMAT_BGRx:
1704 fourcc = V4L2_PIX_FMT_BGR32;
1705 fourcc_nc = V4L2_PIX_FMT_XBGR32;
1706 break;
1707 case GST_VIDEO_FORMAT_BGRA:
1708 fourcc = V4L2_PIX_FMT_BGR32;
1709 fourcc_nc = V4L2_PIX_FMT_ABGR32;
1710 break;
1711 case GST_VIDEO_FORMAT_GRAY8:
1712 fourcc = V4L2_PIX_FMT_GREY;
1713 break;
1714 case GST_VIDEO_FORMAT_GRAY16_LE:
1715 fourcc = V4L2_PIX_FMT_Y16;
1716 break;
1717 case GST_VIDEO_FORMAT_GRAY16_BE:
1718 fourcc = V4L2_PIX_FMT_Y16_BE;
1719 break;
1720 case GST_VIDEO_FORMAT_BGR15:
1721 fourcc = V4L2_PIX_FMT_RGB555X;
1722 fourcc_nc = V4L2_PIX_FMT_XRGB555X;
1723 break;
1724 default:
1725 break;
1726 }
1727 }
1728 else
1729 {
1730 if (g_str_equal(mimetype, "video/mpegts"))
1731 {
1732 fourcc = V4L2_PIX_FMT_MPEG;
1733 }
1734 else if (g_str_equal(mimetype, "video/x-dv"))
1735 {
1736 fourcc = V4L2_PIX_FMT_DV;
1737 }
1738 else if (g_str_equal(mimetype, "image/jpeg"))
1739 {
1740 fourcc = V4L2_PIX_FMT_JPEG;
1741 }
1742 else if (g_str_equal(mimetype, "video/mpeg"))
1743 {
1744 gint version;
1745 if (gst_structure_get_int(structure, "mpegversion", &version))
1746 {
1747 switch (version)
1748 {
1749 case 1:
1750 fourcc = V4L2_PIX_FMT_MPEG1;
1751 break;
1752 case 2:
1753 fourcc = V4L2_PIX_FMT_MPEG2;
1754 break;
1755 case 4:
1756 fourcc = V4L2_PIX_FMT_MPEG4;
1757 fourcc_nc = V4L2_PIX_FMT_XVID;
1758 break;
1759 default:
1760 break;
1761 }
1762 }
1763 }
1764 else if (g_str_equal(mimetype, "video/x-fwht"))
1765 {
1766 fourcc = V4L2_PIX_FMT_FWHT;
1767 }
1768 else if (g_str_equal(mimetype, "video/x-h263"))
1769 {
1770 fourcc = V4L2_PIX_FMT_H263;
1771 }
1772 else if (g_str_equal(mimetype, "video/x-h264"))
1773 {
1774 const gchar *stream_format =
1775 gst_structure_get_string(structure, "stream-format");
1776 if (g_str_equal(stream_format, "avc"))
1777 fourcc = V4L2_PIX_FMT_H264_NO_SC;
1778 else
1779 fourcc = V4L2_PIX_FMT_H264;
1780 }
1781 else if (g_str_equal(mimetype, "video/x-h265"))
1782 {
1783 fourcc = V4L2_PIX_FMT_HEVC;
1784 }
1785 else if (g_str_equal(mimetype, "video/x-vp8"))
1786 {
1787 fourcc = V4L2_PIX_FMT_VP8;
1788 }
1789 else if (g_str_equal(mimetype, "video/x-vp9"))
1790 {
1791 fourcc = V4L2_PIX_FMT_VP9;
1792 }
1793 else if (g_str_equal(mimetype, "video/x-av1"))
1794 {
1795 fourcc = V4L2_PIX_FMT_AV1;
1796 }
1797 else if (g_str_equal(mimetype, "video/x-bayer"))
1798 {
1799 const gchar *format = gst_structure_get_string(structure, "format");
1800 if (format)
1801 {
1802 if (!g_ascii_strcasecmp(format, "bggr"))
1803 fourcc = V4L2_PIX_FMT_SBGGR8;
1804 else if (!g_ascii_strcasecmp(format, "gbrg"))
1805 fourcc = V4L2_PIX_FMT_SGBRG8;
1806 else if (!g_ascii_strcasecmp(format, "grbg"))
1807 fourcc = V4L2_PIX_FMT_SGRBG8;
1808 else if (!g_ascii_strcasecmp(format, "rggb"))
1809 fourcc = V4L2_PIX_FMT_SRGGB8;
1810 }
1811 }
1812 else if (g_str_equal(mimetype, "video/x-sonix"))
1813 {
1814 fourcc = V4L2_PIX_FMT_SN9C10X;
1815 }
1816 else if (g_str_equal(mimetype, "video/x-pwc1"))
1817 {
1818 fourcc = V4L2_PIX_FMT_PWC1;
1819 }
1820 else if (g_str_equal(mimetype, "video/x-pwc2"))
1821 {
1822 fourcc = V4L2_PIX_FMT_PWC2;
1823 }
1824 }
1825
1826 /* Prefer the non-contiguous if supported */
1827 v4l2object->prefered_non_contiguous = TRUE;
1828
1829 if (fourcc_nc)
1830 fmt = gst_aml_v4l2_object_get_format_from_fourcc(v4l2object, fourcc_nc);
1831 else if (fourcc == 0)
1832 goto unhandled_format;
1833
1834 if (fmt == NULL)
1835 {
1836 fmt = gst_aml_v4l2_object_get_format_from_fourcc(v4l2object, fourcc);
1837 v4l2object->prefered_non_contiguous = FALSE;
1838 }
1839
1840 if (fmt == NULL)
1841 goto unsupported_format;
1842
1843 *format = fmt;
1844
1845 return TRUE;
1846
1847 /* ERRORS */
1848invalid_format:
1849{
1850 GST_DEBUG_OBJECT(v4l2object, "invalid format");
1851 return FALSE;
1852}
1853unhandled_format:
1854{
1855 GST_DEBUG_OBJECT(v4l2object, "unhandled format");
1856 return FALSE;
1857}
1858unsupported_format:
1859{
1860 GST_DEBUG_OBJECT(v4l2object, "unsupported format");
1861 return FALSE;
1862}
1863}
1864
1865static gboolean
1866gst_aml_v4l2_object_get_nearest_size(GstAmlV4l2Object *v4l2object,
1867 guint32 pixelformat, gint *width, gint *height);
1868
1869static void
1870gst_aml_v4l2_object_add_aspect_ratio(GstAmlV4l2Object *v4l2object, GstStructure *s)
1871{
1872 if (v4l2object->keep_aspect && v4l2object->par)
1873 gst_structure_set_value(s, "pixel-aspect-ratio", v4l2object->par);
1874}
1875
1876/* returns TRUE if the value was changed in place, otherwise FALSE */
1877static gboolean
1878gst_aml_v4l2src_value_simplify(GValue *val)
1879{
1880 /* simplify list of one value to one value */
1881 if (GST_VALUE_HOLDS_LIST(val) && gst_value_list_get_size(val) == 1)
1882 {
1883 const GValue *list_val;
1884 GValue new_val = G_VALUE_INIT;
1885
1886 list_val = gst_value_list_get_value(val, 0);
1887 g_value_init(&new_val, G_VALUE_TYPE(list_val));
1888 g_value_copy(list_val, &new_val);
1889 g_value_unset(val);
1890 *val = new_val;
1891 return TRUE;
1892 }
1893
1894 return FALSE;
1895}
1896
1897static gboolean
1898gst_aml_v4l2_object_get_interlace_mode(enum v4l2_field field,
1899 GstVideoInterlaceMode *interlace_mode)
1900{
1901 switch (field)
1902 {
1903 case V4L2_FIELD_ANY:
1904 GST_ERROR("Driver bug detected - check driver with v4l2-compliance from http://git.linuxtv.org/v4l-utils.git\n");
1905 /* fallthrough */
1906 case V4L2_FIELD_NONE:
1907 *interlace_mode = GST_VIDEO_INTERLACE_MODE_PROGRESSIVE;
1908 return TRUE;
1909 case V4L2_FIELD_INTERLACED:
1910 case V4L2_FIELD_INTERLACED_TB:
1911 case V4L2_FIELD_INTERLACED_BT:
1912 *interlace_mode = GST_VIDEO_INTERLACE_MODE_INTERLEAVED;
1913 return TRUE;
1914 default:
1915 GST_ERROR("Unknown enum v4l2_field %d", field);
1916 return FALSE;
1917 }
1918}
1919
1920static gboolean
1921gst_aml_v4l2_object_get_colorspace(struct v4l2_format *fmt,
1922 GstVideoColorimetry *cinfo)
1923{
1924 gboolean is_rgb =
1925 gst_amL_v4l2_object_v4l2fourcc_is_rgb(fmt->fmt.pix.pixelformat);
1926 enum v4l2_colorspace colorspace;
1927 enum v4l2_quantization range;
1928 enum v4l2_ycbcr_encoding matrix;
1929 enum v4l2_xfer_func transfer;
1930 gboolean ret = TRUE;
1931
1932 if (V4L2_TYPE_IS_MULTIPLANAR(fmt->type))
1933 {
1934 colorspace = fmt->fmt.pix_mp.colorspace;
1935 range = fmt->fmt.pix_mp.quantization;
1936 matrix = fmt->fmt.pix_mp.ycbcr_enc;
1937 transfer = fmt->fmt.pix_mp.xfer_func;
1938 }
1939 else
1940 {
1941 colorspace = fmt->fmt.pix.colorspace;
1942 range = fmt->fmt.pix.quantization;
1943 matrix = fmt->fmt.pix.ycbcr_enc;
1944 transfer = fmt->fmt.pix.xfer_func;
1945 }
xuesong.jiange1a19662022-06-21 20:30:22 +08001946 GST_DEBUG("colorspace:%d, range:%d, matrix:%d, transfer:%d", colorspace, range, matrix, transfer);
1947 GST_DEBUG("cinfo update 1 time | range:%d, matrix:%d, transfer:%d, primaries:%d", cinfo->range, cinfo->matrix, cinfo->transfer, cinfo->primaries);
xuesong.jiangae1548e2022-05-06 16:38:46 +08001948
1949 /* First step, set the defaults for each primaries */
1950 switch (colorspace)
1951 {
1952 case V4L2_COLORSPACE_SMPTE170M:
1953 cinfo->range = GST_VIDEO_COLOR_RANGE_16_235;
1954 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT601;
1955 cinfo->transfer = GST_VIDEO_TRANSFER_BT709;
1956 cinfo->primaries = GST_VIDEO_COLOR_PRIMARIES_SMPTE170M;
1957 break;
1958 case V4L2_COLORSPACE_REC709:
1959 cinfo->range = GST_VIDEO_COLOR_RANGE_16_235;
1960 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT709;
1961 cinfo->transfer = GST_VIDEO_TRANSFER_BT709;
1962 cinfo->primaries = GST_VIDEO_COLOR_PRIMARIES_BT709;
1963 break;
1964 case V4L2_COLORSPACE_SRGB:
1965 case V4L2_COLORSPACE_JPEG:
1966 cinfo->range = GST_VIDEO_COLOR_RANGE_0_255;
1967 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT601;
1968 cinfo->transfer = GST_VIDEO_TRANSFER_SRGB;
1969 cinfo->primaries = GST_VIDEO_COLOR_PRIMARIES_BT709;
1970 break;
1971 case V4L2_COLORSPACE_OPRGB:
1972 cinfo->range = GST_VIDEO_COLOR_RANGE_16_235;
1973 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT601;
1974 cinfo->transfer = GST_VIDEO_TRANSFER_ADOBERGB;
1975 cinfo->primaries = GST_VIDEO_COLOR_PRIMARIES_ADOBERGB;
1976 break;
1977 case V4L2_COLORSPACE_BT2020:
1978 cinfo->range = GST_VIDEO_COLOR_RANGE_16_235;
1979 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT2020;
1980 cinfo->transfer = GST_VIDEO_TRANSFER_BT2020_12;
1981 cinfo->primaries = GST_VIDEO_COLOR_PRIMARIES_BT2020;
1982 break;
1983 case V4L2_COLORSPACE_SMPTE240M:
1984 cinfo->range = GST_VIDEO_COLOR_RANGE_16_235;
1985 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_SMPTE240M;
1986 cinfo->transfer = GST_VIDEO_TRANSFER_SMPTE240M;
1987 cinfo->primaries = GST_VIDEO_COLOR_PRIMARIES_SMPTE240M;
1988 break;
1989 case V4L2_COLORSPACE_470_SYSTEM_M:
1990 cinfo->range = GST_VIDEO_COLOR_RANGE_16_235;
1991 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT601;
1992 cinfo->transfer = GST_VIDEO_TRANSFER_BT709;
1993 cinfo->primaries = GST_VIDEO_COLOR_PRIMARIES_BT470M;
1994 break;
1995 case V4L2_COLORSPACE_470_SYSTEM_BG:
1996 cinfo->range = GST_VIDEO_COLOR_RANGE_16_235;
1997 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT601;
1998 cinfo->transfer = GST_VIDEO_TRANSFER_BT709;
1999 cinfo->primaries = GST_VIDEO_COLOR_PRIMARIES_BT470BG;
2000 break;
2001 case V4L2_COLORSPACE_RAW:
2002 /* Explicitly unknown */
2003 cinfo->range = GST_VIDEO_COLOR_RANGE_UNKNOWN;
2004 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_UNKNOWN;
2005 cinfo->transfer = GST_VIDEO_TRANSFER_UNKNOWN;
2006 cinfo->primaries = GST_VIDEO_COLOR_PRIMARIES_UNKNOWN;
2007 break;
2008 default:
2009 GST_DEBUG("Unknown enum v4l2_colorspace %d", colorspace);
2010 ret = FALSE;
2011 break;
2012 }
xuesong.jiange1a19662022-06-21 20:30:22 +08002013 GST_DEBUG("cinfo update 2 time | range:%d, matrix:%d, transfer:%d, primaries:%d", cinfo->range, cinfo->matrix, cinfo->transfer, cinfo->primaries);
xuesong.jiangae1548e2022-05-06 16:38:46 +08002014
2015 if (!ret)
2016 goto done;
2017
2018 /* Second step, apply any custom variation */
2019 switch (range)
2020 {
2021 case V4L2_QUANTIZATION_FULL_RANGE:
2022 cinfo->range = GST_VIDEO_COLOR_RANGE_0_255;
2023 break;
2024 case V4L2_QUANTIZATION_LIM_RANGE:
2025 cinfo->range = GST_VIDEO_COLOR_RANGE_16_235;
2026 break;
2027 case V4L2_QUANTIZATION_DEFAULT:
2028 /* replicated V4L2_MAP_QUANTIZATION_DEFAULT macro behavior */
2029 if (is_rgb && colorspace == V4L2_COLORSPACE_BT2020)
2030 cinfo->range = GST_VIDEO_COLOR_RANGE_16_235;
2031 else if (is_rgb || matrix == V4L2_YCBCR_ENC_XV601 || matrix == V4L2_YCBCR_ENC_XV709 || colorspace == V4L2_COLORSPACE_JPEG)
2032 cinfo->range = GST_VIDEO_COLOR_RANGE_0_255;
2033 else
2034 cinfo->range = GST_VIDEO_COLOR_RANGE_16_235;
2035 break;
2036 default:
2037 GST_WARNING("Unknown enum v4l2_quantization value %d", range);
2038 cinfo->range = GST_VIDEO_COLOR_RANGE_UNKNOWN;
2039 break;
2040 }
xuesong.jiange1a19662022-06-21 20:30:22 +08002041 GST_DEBUG("cinfo update 3 time | range:%d, matrix:%d, transfer:%d, primaries:%d", cinfo->range, cinfo->matrix, cinfo->transfer, cinfo->primaries);
xuesong.jiangae1548e2022-05-06 16:38:46 +08002042
2043 switch (matrix)
2044 {
2045 case V4L2_YCBCR_ENC_XV601:
2046 case V4L2_YCBCR_ENC_SYCC:
2047 GST_FIXME("XV601 and SYCC not defined, assuming 601");
2048 /* fallthrough */
2049 case V4L2_YCBCR_ENC_601:
2050 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT601;
2051 break;
2052 case V4L2_YCBCR_ENC_XV709:
2053 GST_FIXME("XV709 not defined, assuming 709");
2054 /* fallthrough */
2055 case V4L2_YCBCR_ENC_709:
2056 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT709;
2057 break;
2058 case V4L2_YCBCR_ENC_BT2020_CONST_LUM:
2059 GST_FIXME("BT2020 with constant luma is not defined, assuming BT2020");
2060 /* fallthrough */
2061 case V4L2_YCBCR_ENC_BT2020:
2062 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT2020;
2063 break;
2064 case V4L2_YCBCR_ENC_SMPTE240M:
2065 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_SMPTE240M;
2066 break;
2067 case V4L2_YCBCR_ENC_DEFAULT:
2068 /* nothing, just use defaults for colorspace */
2069 break;
2070 default:
2071 GST_WARNING("Unknown enum v4l2_ycbcr_encoding value %d", matrix);
2072 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_UNKNOWN;
2073 break;
2074 }
xuesong.jiange1a19662022-06-21 20:30:22 +08002075 GST_DEBUG("cinfo update 4 time | range:%d, matrix:%d, transfer:%d, primaries:%d", cinfo->range, cinfo->matrix, cinfo->transfer, cinfo->primaries);
xuesong.jiangae1548e2022-05-06 16:38:46 +08002076
2077 /* Set identity matrix for R'G'B' formats to avoid creating
2078 * confusion. This though is cosmetic as it's now properly ignored by
2079 * the video info API and videoconvert. */
2080 if (is_rgb)
2081 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_RGB;
2082
2083 switch (transfer)
2084 {
2085 case V4L2_XFER_FUNC_709:
2086 if (colorspace == V4L2_COLORSPACE_BT2020 && fmt->fmt.pix.height >= 2160)
2087 cinfo->transfer = GST_VIDEO_TRANSFER_BT2020_12;
2088 else
2089 cinfo->transfer = GST_VIDEO_TRANSFER_BT709;
2090 break;
2091 case V4L2_XFER_FUNC_SRGB:
2092 cinfo->transfer = GST_VIDEO_TRANSFER_SRGB;
2093 break;
2094 case V4L2_XFER_FUNC_OPRGB:
2095 cinfo->transfer = GST_VIDEO_TRANSFER_ADOBERGB;
2096 break;
2097 case V4L2_XFER_FUNC_SMPTE240M:
2098 cinfo->transfer = GST_VIDEO_TRANSFER_SMPTE240M;
2099 break;
2100 case V4L2_XFER_FUNC_NONE:
2101 cinfo->transfer = GST_VIDEO_TRANSFER_GAMMA10;
2102 break;
2103 case V4L2_XFER_FUNC_DEFAULT:
2104 /* nothing, just use defaults for colorspace */
2105 break;
2106 default:
2107 GST_WARNING("Unknown enum v4l2_xfer_func value %d", transfer);
2108 cinfo->transfer = GST_VIDEO_TRANSFER_UNKNOWN;
2109 break;
2110 }
xuesong.jiange1a19662022-06-21 20:30:22 +08002111 GST_DEBUG("cinfo update 5 time | range:%d, matrix:%d, transfer:%d, primaries:%d", cinfo->range, cinfo->matrix, cinfo->transfer, cinfo->primaries);
xuesong.jiangae1548e2022-05-06 16:38:46 +08002112
2113done:
2114 return ret;
2115}
2116
2117static int
2118gst_aml_v4l2_object_try_fmt(GstAmlV4l2Object *v4l2object,
2119 struct v4l2_format *try_fmt)
2120{
2121 int fd = v4l2object->video_fd;
2122 struct v4l2_format fmt;
2123 int r;
2124
2125 memcpy(&fmt, try_fmt, sizeof(fmt));
2126 r = v4l2object->ioctl(fd, VIDIOC_TRY_FMT, &fmt);
2127
2128 if (r < 0 && errno == ENOTTY)
2129 {
2130 /* The driver might not implement TRY_FMT, in which case we will try
2131 S_FMT to probe */
2132 if (GST_AML_V4L2_IS_ACTIVE(v4l2object))
2133 goto error;
2134
2135 memcpy(&fmt, try_fmt, sizeof(fmt));
2136 r = v4l2object->ioctl(fd, VIDIOC_S_FMT, &fmt);
2137 }
2138 memcpy(try_fmt, &fmt, sizeof(fmt));
2139
2140 return r;
2141
2142error:
2143 memcpy(try_fmt, &fmt, sizeof(fmt));
2144 GST_WARNING_OBJECT(v4l2object->dbg_obj,
2145 "Unable to try format: %s", g_strerror(errno));
2146 return r;
2147}
2148
2149static void
2150gst_aml_v4l2_object_add_interlace_mode(GstAmlV4l2Object *v4l2object,
2151 GstStructure *s, guint32 width, guint32 height, guint32 pixelformat)
2152{
2153 struct v4l2_format fmt;
2154 GValue interlace_formats = {
2155 0,
2156 };
2157 enum v4l2_field formats[] = {V4L2_FIELD_NONE, V4L2_FIELD_INTERLACED};
2158 gsize i;
2159 GstVideoInterlaceMode interlace_mode, prev = -1;
2160
2161 if (!g_str_equal(gst_structure_get_name(s), "video/x-raw"))
2162 return;
2163
2164 if (v4l2object->never_interlaced)
2165 {
2166 gst_structure_set(s, "interlace-mode", G_TYPE_STRING, "progressive", NULL);
2167 return;
2168 }
2169
2170 g_value_init(&interlace_formats, GST_TYPE_LIST);
2171
2172 /* Try twice - once for NONE, once for INTERLACED. */
2173 for (i = 0; i < G_N_ELEMENTS(formats); i++)
2174 {
2175 memset(&fmt, 0, sizeof(fmt));
2176 fmt.type = v4l2object->type;
2177 fmt.fmt.pix.width = width;
2178 fmt.fmt.pix.height = height;
2179 fmt.fmt.pix.pixelformat = pixelformat;
2180 fmt.fmt.pix.field = formats[i];
2181
2182 if (gst_aml_v4l2_object_try_fmt(v4l2object, &fmt) == 0 &&
2183 gst_aml_v4l2_object_get_interlace_mode(fmt.fmt.pix.field, &interlace_mode) && prev != interlace_mode)
2184 {
2185 GValue interlace_enum = {
2186 0,
2187 };
2188 const gchar *mode_string;
2189 g_value_init(&interlace_enum, G_TYPE_STRING);
2190 mode_string = gst_video_interlace_mode_to_string(interlace_mode);
2191 g_value_set_string(&interlace_enum, mode_string);
2192 gst_value_list_append_and_take_value(&interlace_formats,
2193 &interlace_enum);
2194 prev = interlace_mode;
2195 }
2196 }
2197
2198 if (gst_aml_v4l2src_value_simplify(&interlace_formats) || gst_value_list_get_size(&interlace_formats) > 0)
2199 gst_structure_take_value(s, "interlace-mode", &interlace_formats);
2200 else
2201 GST_WARNING_OBJECT(v4l2object, "Failed to determine interlace mode");
2202
2203 return;
2204}
2205
2206static void
2207gst_aml_v4l2_object_fill_colorimetry_list(GValue *list,
2208 GstVideoColorimetry *cinfo)
2209{
2210 GValue colorimetry = G_VALUE_INIT;
2211 guint size;
2212 guint i;
2213 gboolean found = FALSE;
2214
2215 g_value_init(&colorimetry, G_TYPE_STRING);
2216 g_value_take_string(&colorimetry, gst_video_colorimetry_to_string(cinfo));
xuesong.jiange1a19662022-06-21 20:30:22 +08002217 GST_DEBUG("fill colorimetry:%s into list", gst_video_colorimetry_to_string(cinfo));
xuesong.jiangae1548e2022-05-06 16:38:46 +08002218
2219 /* only insert if no duplicate */
2220 size = gst_value_list_get_size(list);
2221 for (i = 0; i < size; i++)
2222 {
2223 const GValue *tmp;
2224
2225 tmp = gst_value_list_get_value(list, i);
2226 if (gst_value_compare(&colorimetry, tmp) == GST_VALUE_EQUAL)
2227 {
2228 found = TRUE;
2229 break;
2230 }
2231 }
2232
2233 if (!found)
2234 gst_value_list_append_and_take_value(list, &colorimetry);
2235 else
2236 g_value_unset(&colorimetry);
2237}
2238
2239static void
2240gst_aml_v4l2_object_add_colorspace(GstAmlV4l2Object *v4l2object, GstStructure *s,
2241 guint32 width, guint32 height, guint32 pixelformat)
2242{
2243 struct v4l2_format fmt;
2244 GValue list = G_VALUE_INIT;
2245 GstVideoColorimetry cinfo;
2246 enum v4l2_colorspace req_cspace;
2247
2248 memset(&fmt, 0, sizeof(fmt));
2249 fmt.type = v4l2object->type;
2250 fmt.fmt.pix.width = width;
2251 fmt.fmt.pix.height = height;
2252 fmt.fmt.pix.pixelformat = pixelformat;
2253
2254 g_value_init(&list, GST_TYPE_LIST);
2255
2256 /* step 1: get device default colorspace and insert it first as
2257 * it should be the preferred one */
xuesong.jiange1a19662022-06-21 20:30:22 +08002258 GST_DEBUG("try for pixl format");
xuesong.jiangae1548e2022-05-06 16:38:46 +08002259 if (gst_aml_v4l2_object_try_fmt(v4l2object, &fmt) == 0)
2260 {
2261 if (gst_aml_v4l2_object_get_colorspace(&fmt, &cinfo))
2262 gst_aml_v4l2_object_fill_colorimetry_list(&list, &cinfo);
2263 }
2264
2265 /* step 2: probe all colorspace other than default
2266 * We don't probe all colorspace, range, matrix and transfer combination to
2267 * avoid ioctl flooding which could greatly increase initialization time
2268 * with low-speed devices (UVC...) */
2269 for (req_cspace = V4L2_COLORSPACE_SMPTE170M;
2270 req_cspace <= V4L2_COLORSPACE_RAW; req_cspace++)
2271 {
xuesong.jiange1a19662022-06-21 20:30:22 +08002272 GST_DEBUG("try for pixl format in while loop :%d", req_cspace);
xuesong.jiangae1548e2022-05-06 16:38:46 +08002273 /* V4L2_COLORSPACE_BT878 is deprecated and shall not be used, so skip */
2274 if (req_cspace == V4L2_COLORSPACE_BT878)
2275 continue;
2276
2277 if (V4L2_TYPE_IS_MULTIPLANAR(v4l2object->type))
2278 fmt.fmt.pix_mp.colorspace = req_cspace;
2279 else
2280 fmt.fmt.pix.colorspace = req_cspace;
2281
2282 if (gst_aml_v4l2_object_try_fmt(v4l2object, &fmt) == 0)
2283 {
xuesong.jiange1a19662022-06-21 20:30:22 +08002284 GST_DEBUG("try for pixl format in while loop :%d tried ok", req_cspace);
xuesong.jiangae1548e2022-05-06 16:38:46 +08002285 enum v4l2_colorspace colorspace;
2286
2287 if (V4L2_TYPE_IS_MULTIPLANAR(v4l2object->type))
2288 colorspace = fmt.fmt.pix_mp.colorspace;
2289 else
2290 colorspace = fmt.fmt.pix.colorspace;
2291
2292 if (colorspace == req_cspace)
2293 {
2294 if (gst_aml_v4l2_object_get_colorspace(&fmt, &cinfo))
2295 gst_aml_v4l2_object_fill_colorimetry_list(&list, &cinfo);
2296 }
2297 }
2298 }
2299
xuesong.jiange1a19662022-06-21 20:30:22 +08002300 //deal: caps with colorimetry 2,3,14,7
2301 cinfo.range = 2;
2302 cinfo.matrix = 3;
2303 cinfo.transfer = 14;
2304 cinfo.primaries = 7;
2305 gst_aml_v4l2_object_fill_colorimetry_list(&list, &cinfo);
2306
xuesong.jiangae1548e2022-05-06 16:38:46 +08002307 if (gst_value_list_get_size(&list) > 0)
2308 gst_structure_take_value(s, "colorimetry", &list);
2309 else
2310 g_value_unset(&list);
2311
2312 return;
2313}
2314
2315/* The frame interval enumeration code first appeared in Linux 2.6.19. */
2316static GstStructure *
2317gst_aml_v4l2_object_probe_caps_for_format_and_size(GstAmlV4l2Object *v4l2object,
2318 guint32 pixelformat,
2319 guint32 width, guint32 height, const GstStructure *template)
2320{
2321 gint fd = v4l2object->video_fd;
2322 struct v4l2_frmivalenum ival;
2323 guint32 num, denom;
2324 GstStructure *s;
2325 GValue rates = {
2326 0,
2327 };
2328
2329 memset(&ival, 0, sizeof(struct v4l2_frmivalenum));
2330 ival.index = 0;
2331 ival.pixel_format = pixelformat;
2332 ival.width = width;
2333 ival.height = height;
2334
2335 GST_LOG_OBJECT(v4l2object->dbg_obj,
2336 "get frame interval for %ux%u, %" GST_FOURCC_FORMAT, width, height,
2337 GST_FOURCC_ARGS(pixelformat));
2338
2339 /* keep in mind that v4l2 gives us frame intervals (durations); we invert the
2340 * fraction to get framerate */
2341 if (v4l2object->ioctl(fd, VIDIOC_ENUM_FRAMEINTERVALS, &ival) < 0)
2342 goto enum_frameintervals_failed;
2343
2344 if (ival.type == V4L2_FRMIVAL_TYPE_DISCRETE)
2345 {
2346 GValue rate = {
2347 0,
2348 };
2349
2350 g_value_init(&rates, GST_TYPE_LIST);
2351 g_value_init(&rate, GST_TYPE_FRACTION);
2352
2353 do
2354 {
2355 num = ival.discrete.numerator;
2356 denom = ival.discrete.denominator;
2357
2358 if (num > G_MAXINT || denom > G_MAXINT)
2359 {
2360 /* let us hope we don't get here... */
2361 num >>= 1;
2362 denom >>= 1;
2363 }
2364
2365 GST_LOG_OBJECT(v4l2object->dbg_obj, "adding discrete framerate: %d/%d",
2366 denom, num);
2367
2368 /* swap to get the framerate */
2369 gst_value_set_fraction(&rate, denom, num);
2370 gst_value_list_append_value(&rates, &rate);
2371
2372 ival.index++;
2373 } while (v4l2object->ioctl(fd, VIDIOC_ENUM_FRAMEINTERVALS, &ival) >= 0);
2374 }
2375 else if (ival.type == V4L2_FRMIVAL_TYPE_STEPWISE)
2376 {
2377 GValue min = {
2378 0,
2379 };
2380 GValue step = {
2381 0,
2382 };
2383 GValue max = {
2384 0,
2385 };
2386 gboolean added = FALSE;
2387 guint32 minnum, mindenom;
2388 guint32 maxnum, maxdenom;
2389
2390 g_value_init(&rates, GST_TYPE_LIST);
2391
2392 g_value_init(&min, GST_TYPE_FRACTION);
2393 g_value_init(&step, GST_TYPE_FRACTION);
2394 g_value_init(&max, GST_TYPE_FRACTION);
2395
2396 /* get the min */
2397 minnum = ival.stepwise.min.numerator;
2398 mindenom = ival.stepwise.min.denominator;
2399 if (minnum > G_MAXINT || mindenom > G_MAXINT)
2400 {
2401 minnum >>= 1;
2402 mindenom >>= 1;
2403 }
2404 GST_LOG_OBJECT(v4l2object->dbg_obj, "stepwise min frame interval: %d/%d",
2405 minnum, mindenom);
2406 gst_value_set_fraction(&min, minnum, mindenom);
2407
2408 /* get the max */
2409 maxnum = ival.stepwise.max.numerator;
2410 maxdenom = ival.stepwise.max.denominator;
2411 if (maxnum > G_MAXINT || maxdenom > G_MAXINT)
2412 {
2413 maxnum >>= 1;
2414 maxdenom >>= 1;
2415 }
2416
2417 GST_LOG_OBJECT(v4l2object->dbg_obj, "stepwise max frame interval: %d/%d",
2418 maxnum, maxdenom);
2419 gst_value_set_fraction(&max, maxnum, maxdenom);
2420
2421 /* get the step */
2422 num = ival.stepwise.step.numerator;
2423 denom = ival.stepwise.step.denominator;
2424 if (num > G_MAXINT || denom > G_MAXINT)
2425 {
2426 num >>= 1;
2427 denom >>= 1;
2428 }
2429
2430 if (num == 0 || denom == 0)
2431 {
2432 /* in this case we have a wrong fraction or no step, set the step to max
2433 * so that we only add the min value in the loop below */
2434 num = maxnum;
2435 denom = maxdenom;
2436 }
2437
2438 /* since we only have gst_value_fraction_subtract and not add, negate the
2439 * numerator */
2440 GST_LOG_OBJECT(v4l2object->dbg_obj, "stepwise step frame interval: %d/%d",
2441 num, denom);
2442 gst_value_set_fraction(&step, -num, denom);
2443
2444 while (gst_value_compare(&min, &max) != GST_VALUE_GREATER_THAN)
2445 {
2446 GValue rate = {
2447 0,
2448 };
2449
2450 num = gst_value_get_fraction_numerator(&min);
2451 denom = gst_value_get_fraction_denominator(&min);
2452 GST_LOG_OBJECT(v4l2object->dbg_obj, "adding stepwise framerate: %d/%d",
2453 denom, num);
2454
2455 /* invert to get the framerate */
2456 g_value_init(&rate, GST_TYPE_FRACTION);
2457 gst_value_set_fraction(&rate, denom, num);
2458 gst_value_list_append_value(&rates, &rate);
2459 added = TRUE;
2460
2461 /* we're actually adding because step was negated above. This is because
2462 * there is no _add function... */
2463 if (!gst_value_fraction_subtract(&min, &min, &step))
2464 {
2465 GST_WARNING_OBJECT(v4l2object->dbg_obj, "could not step fraction!");
2466 break;
2467 }
2468 }
2469 if (!added)
2470 {
2471 /* no range was added, leave the default range from the template */
2472 GST_WARNING_OBJECT(v4l2object->dbg_obj,
2473 "no range added, leaving default");
2474 g_value_unset(&rates);
2475 }
2476 }
2477 else if (ival.type == V4L2_FRMIVAL_TYPE_CONTINUOUS)
2478 {
2479 guint32 maxnum, maxdenom;
2480
2481 g_value_init(&rates, GST_TYPE_FRACTION_RANGE);
2482
2483 num = ival.stepwise.min.numerator;
2484 denom = ival.stepwise.min.denominator;
2485 if (num > G_MAXINT || denom > G_MAXINT)
2486 {
2487 num >>= 1;
2488 denom >>= 1;
2489 }
2490
2491 maxnum = ival.stepwise.max.numerator;
2492 maxdenom = ival.stepwise.max.denominator;
2493 if (maxnum > G_MAXINT || maxdenom > G_MAXINT)
2494 {
2495 maxnum >>= 1;
2496 maxdenom >>= 1;
2497 }
2498
2499 GST_LOG_OBJECT(v4l2object->dbg_obj,
2500 "continuous frame interval %d/%d to %d/%d", maxdenom, maxnum, denom,
2501 num);
2502
2503 gst_value_set_fraction_range_full(&rates, maxdenom, maxnum, denom, num);
2504 }
2505 else
2506 {
2507 goto unknown_type;
2508 }
2509
2510return_data:
2511 s = gst_structure_copy(template);
2512 gst_structure_set(s, "width", G_TYPE_INT, (gint)width,
2513 "height", G_TYPE_INT, (gint)height, NULL);
2514
2515 gst_aml_v4l2_object_add_aspect_ratio(v4l2object, s);
2516
2517 if (!v4l2object->skip_try_fmt_probes)
2518 {
2519 gst_aml_v4l2_object_add_interlace_mode(v4l2object, s, width, height,
2520 pixelformat);
2521 gst_aml_v4l2_object_add_colorspace(v4l2object, s, width, height, pixelformat);
2522 }
2523
2524 if (G_IS_VALUE(&rates))
2525 {
2526 gst_aml_v4l2src_value_simplify(&rates);
2527 /* only change the framerate on the template when we have a valid probed new
2528 * value */
2529 gst_structure_take_value(s, "framerate", &rates);
2530 }
2531 else if (v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE ||
2532 v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE)
2533 {
2534 gst_structure_set(s, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT,
2535 1, NULL);
2536 }
2537 return s;
2538
2539 /* ERRORS */
2540enum_frameintervals_failed:
2541{
2542 GST_DEBUG_OBJECT(v4l2object->dbg_obj,
2543 "Unable to enumerate intervals for %" GST_FOURCC_FORMAT "@%ux%u",
2544 GST_FOURCC_ARGS(pixelformat), width, height);
2545 goto return_data;
2546}
2547unknown_type:
2548{
2549 /* I don't see how this is actually an error, we ignore the format then */
2550 GST_WARNING_OBJECT(v4l2object->dbg_obj,
2551 "Unknown frame interval type at %" GST_FOURCC_FORMAT "@%ux%u: %u",
2552 GST_FOURCC_ARGS(pixelformat), width, height, ival.type);
2553 return NULL;
2554}
2555}
2556
2557static gint
2558sort_by_frame_size(GstStructure *s1, GstStructure *s2)
2559{
2560 int w1, h1, w2, h2;
2561
2562 gst_structure_get_int(s1, "width", &w1);
2563 gst_structure_get_int(s1, "height", &h1);
2564 gst_structure_get_int(s2, "width", &w2);
2565 gst_structure_get_int(s2, "height", &h2);
2566
2567 /* I think it's safe to assume that this won't overflow for a while */
2568 return ((w2 * h2) - (w1 * h1));
2569}
2570
2571static void
2572gst_aml_v4l2_object_update_and_append(GstAmlV4l2Object *v4l2object,
2573 guint32 format, GstCaps *caps, GstStructure *s)
2574{
2575 GstStructure *alt_s = NULL;
2576
2577 /* Encoded stream on output buffer need to be parsed */
2578 if (v4l2object->type == V4L2_BUF_TYPE_VIDEO_OUTPUT ||
2579 v4l2object->type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE)
2580 {
2581 gint i = 0;
2582
2583 for (; i < GST_AML_V4L2_FORMAT_COUNT; i++)
2584 {
2585 if (format == gst_aml_v4l2_formats[i].format &&
2586 gst_aml_v4l2_formats[i].flags & GST_V4L2_CODEC &&
2587 !(gst_aml_v4l2_formats[i].flags & GST_V4L2_NO_PARSE))
2588 {
2589 gst_structure_set(s, "parsed", G_TYPE_BOOLEAN, TRUE, NULL);
2590 break;
2591 }
2592 }
2593 }
2594
2595 if (v4l2object->has_alpha_component &&
2596 (v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE ||
2597 v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE))
2598 {
2599 switch (format)
2600 {
2601 case V4L2_PIX_FMT_RGB32:
2602 alt_s = gst_structure_copy(s);
2603 gst_structure_set(alt_s, "format", G_TYPE_STRING, "ARGB", NULL);
2604 break;
2605 case V4L2_PIX_FMT_BGR32:
2606 alt_s = gst_structure_copy(s);
2607 gst_structure_set(alt_s, "format", G_TYPE_STRING, "BGRA", NULL);
2608 break;
2609 default:
2610 break;
2611 }
2612 }
2613
2614 gst_caps_append_structure(caps, s);
2615
2616 if (alt_s)
2617 gst_caps_append_structure(caps, alt_s);
2618}
2619
2620static GstCaps *
2621gst_aml_v4l2_object_probe_caps_for_format(GstAmlV4l2Object *v4l2object,
2622 guint32 pixelformat, const GstStructure *template)
2623{
2624 GstCaps *ret = gst_caps_new_empty();
2625 GstStructure *tmp;
2626 gint fd = v4l2object->video_fd;
2627 struct v4l2_frmsizeenum size;
2628 GList *results = NULL;
2629 guint32 w, h;
2630
2631 if (pixelformat == GST_MAKE_FOURCC('M', 'P', 'E', 'G'))
2632 {
2633 gst_caps_append_structure(ret, gst_structure_copy(template));
2634 return ret;
2635 }
2636
2637 memset(&size, 0, sizeof(struct v4l2_frmsizeenum));
2638 size.index = 0;
2639 size.pixel_format = pixelformat;
2640
2641 GST_DEBUG_OBJECT(v4l2object->dbg_obj,
2642 "Enumerating frame sizes for %" GST_FOURCC_FORMAT,
2643 GST_FOURCC_ARGS(pixelformat));
2644
2645 if (v4l2object->ioctl(fd, VIDIOC_ENUM_FRAMESIZES, &size) < 0)
2646 goto enum_framesizes_failed;
2647
2648 if (size.type == V4L2_FRMSIZE_TYPE_DISCRETE)
2649 {
2650 do
2651 {
2652 GST_LOG_OBJECT(v4l2object->dbg_obj, "got discrete frame size %dx%d",
2653 size.discrete.width, size.discrete.height);
2654
2655 w = MIN(size.discrete.width, G_MAXINT);
2656 h = MIN(size.discrete.height, G_MAXINT);
2657
2658 if (w && h)
2659 {
2660 tmp =
2661 gst_aml_v4l2_object_probe_caps_for_format_and_size(v4l2object,
2662 pixelformat, w, h, template);
2663
2664 if (tmp)
2665 results = g_list_prepend(results, tmp);
2666 }
2667
2668 size.index++;
2669 } while (v4l2object->ioctl(fd, VIDIOC_ENUM_FRAMESIZES, &size) >= 0);
2670 GST_DEBUG_OBJECT(v4l2object->dbg_obj,
2671 "done iterating discrete frame sizes");
2672 }
2673 else if (size.type == V4L2_FRMSIZE_TYPE_STEPWISE)
2674 {
2675 guint32 maxw, maxh, step_w, step_h;
2676
2677 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "we have stepwise frame sizes:");
2678 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "min width: %d",
2679 size.stepwise.min_width);
2680 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "min height: %d",
2681 size.stepwise.min_height);
2682 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "max width: %d",
2683 size.stepwise.max_width);
2684 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "min height: %d",
2685 size.stepwise.max_height);
2686 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "step width: %d",
2687 size.stepwise.step_width);
2688 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "step height: %d",
2689 size.stepwise.step_height);
2690
2691 w = MAX(size.stepwise.min_width, 1);
2692 h = MAX(size.stepwise.min_height, 1);
2693 maxw = MIN(size.stepwise.max_width, G_MAXINT);
2694 maxh = MIN(size.stepwise.max_height, G_MAXINT);
2695
2696 step_w = MAX(size.stepwise.step_width, 1);
2697 step_h = MAX(size.stepwise.step_height, 1);
2698
2699 /* FIXME: check for sanity and that min/max are multiples of the steps */
2700
2701 /* we only query details for the max width/height since it's likely the
2702 * most restricted if there are any resolution-dependent restrictions */
2703 tmp = gst_aml_v4l2_object_probe_caps_for_format_and_size(v4l2object,
2704 pixelformat, maxw, maxh, template);
2705
2706 if (tmp)
2707 {
2708 GValue step_range = G_VALUE_INIT;
2709
2710 g_value_init(&step_range, GST_TYPE_INT_RANGE);
2711 gst_value_set_int_range_step(&step_range, w, maxw, step_w);
2712 gst_structure_set_value(tmp, "width", &step_range);
2713
2714 gst_value_set_int_range_step(&step_range, h, maxh, step_h);
2715 gst_structure_take_value(tmp, "height", &step_range);
2716
2717 /* no point using the results list here, since there's only one struct */
2718 gst_aml_v4l2_object_update_and_append(v4l2object, pixelformat, ret, tmp);
2719 }
2720 }
2721 else if (size.type == V4L2_FRMSIZE_TYPE_CONTINUOUS)
2722 {
2723 guint32 maxw, maxh;
2724
2725 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "we have continuous frame sizes:");
2726 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "min width: %d",
2727 size.stepwise.min_width);
2728 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "min height: %d",
2729 size.stepwise.min_height);
2730 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "max width: %d",
2731 size.stepwise.max_width);
2732 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "min height: %d",
2733 size.stepwise.max_height);
2734
2735 w = MAX(size.stepwise.min_width, 1);
2736 h = MAX(size.stepwise.min_height, 1);
2737 maxw = MIN(size.stepwise.max_width, G_MAXINT);
2738 maxh = MIN(size.stepwise.max_height, G_MAXINT);
2739
2740 tmp =
2741 gst_aml_v4l2_object_probe_caps_for_format_and_size(v4l2object, pixelformat,
2742 w, h, template);
2743 if (tmp)
2744 {
2745 gst_structure_set(tmp, "width", GST_TYPE_INT_RANGE, (gint)w,
2746 (gint)maxw, "height", GST_TYPE_INT_RANGE, (gint)h, (gint)maxh,
2747 NULL);
2748
2749 /* no point using the results list here, since there's only one struct */
2750 gst_aml_v4l2_object_update_and_append(v4l2object, pixelformat, ret, tmp);
2751 }
2752 }
2753 else
2754 {
2755 goto unknown_type;
2756 }
2757
2758 /* we use an intermediary list to store and then sort the results of the
2759 * probing because we can't make any assumptions about the order in which
2760 * the driver will give us the sizes, but we want the final caps to contain
2761 * the results starting with the highest resolution and having the lowest
2762 * resolution last, since order in caps matters for things like fixation. */
2763 results = g_list_sort(results, (GCompareFunc)sort_by_frame_size);
2764 while (results != NULL)
2765 {
2766 gst_aml_v4l2_object_update_and_append(v4l2object, pixelformat, ret,
2767 results->data);
2768 results = g_list_delete_link(results, results);
2769 }
2770
2771 if (gst_caps_is_empty(ret))
2772 goto enum_framesizes_no_results;
2773
2774 return ret;
2775
2776 /* ERRORS */
2777enum_framesizes_failed:
2778{
2779 /* I don't see how this is actually an error */
2780 GST_DEBUG_OBJECT(v4l2object->dbg_obj,
2781 "Failed to enumerate frame sizes for pixelformat %" GST_FOURCC_FORMAT
2782 " (%s)",
2783 GST_FOURCC_ARGS(pixelformat), g_strerror(errno));
2784 goto default_frame_sizes;
2785}
2786enum_framesizes_no_results:
2787{
2788 /* it's possible that VIDIOC_ENUM_FRAMESIZES is defined but the driver in
2789 * question doesn't actually support it yet */
2790 GST_DEBUG_OBJECT(v4l2object->dbg_obj,
2791 "No results for pixelformat %" GST_FOURCC_FORMAT
2792 " enumerating frame sizes, trying fallback",
2793 GST_FOURCC_ARGS(pixelformat));
2794 goto default_frame_sizes;
2795}
2796unknown_type:
2797{
2798 GST_WARNING_OBJECT(v4l2object->dbg_obj,
2799 "Unknown frame sizeenum type for pixelformat %" GST_FOURCC_FORMAT
2800 ": %u",
2801 GST_FOURCC_ARGS(pixelformat), size.type);
2802 goto default_frame_sizes;
2803}
2804
2805default_frame_sizes:
2806{
2807 gint min_w, max_w, min_h, max_h, fix_num = 0, fix_denom = 0;
2808
2809 /* This code is for Linux < 2.6.19 */
2810 min_w = min_h = 1;
2811 max_w = max_h = GST_AML_V4L2_MAX_SIZE;
2812 if (!gst_aml_v4l2_object_get_nearest_size(v4l2object, pixelformat, &min_w,
2813 &min_h))
2814 {
2815 GST_WARNING_OBJECT(v4l2object->dbg_obj,
2816 "Could not probe minimum capture size for pixelformat %" GST_FOURCC_FORMAT, GST_FOURCC_ARGS(pixelformat));
2817 }
2818 if (!gst_aml_v4l2_object_get_nearest_size(v4l2object, pixelformat, &max_w,
2819 &max_h))
2820 {
2821 GST_WARNING_OBJECT(v4l2object->dbg_obj,
2822 "Could not probe maximum capture size for pixelformat %" GST_FOURCC_FORMAT, GST_FOURCC_ARGS(pixelformat));
2823 }
2824
2825 tmp = gst_structure_copy(template);
2826 if (fix_num)
2827 {
2828 gst_structure_set(tmp, "framerate", GST_TYPE_FRACTION, fix_num,
2829 fix_denom, NULL);
2830 }
2831 else if (v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE ||
2832 v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE)
2833 {
2834 /* if norm can't be used, copy the template framerate */
2835 gst_structure_set(tmp, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1,
2836 G_MAXINT, 1, NULL);
2837 }
2838
2839 if (min_w == max_w)
2840 gst_structure_set(tmp, "width", G_TYPE_INT, max_w, NULL);
2841 else
2842 gst_structure_set(tmp, "width", GST_TYPE_INT_RANGE, min_w, max_w, NULL);
2843
2844 if (min_h == max_h)
2845 gst_structure_set(tmp, "height", G_TYPE_INT, max_h, NULL);
2846 else
2847 gst_structure_set(tmp, "height", GST_TYPE_INT_RANGE, min_h, max_h, NULL);
2848
2849 gst_aml_v4l2_object_add_aspect_ratio(v4l2object, tmp);
2850
2851 if (!v4l2object->skip_try_fmt_probes)
2852 {
2853 /* We could consider setting interlace mode from min and max. */
2854 gst_aml_v4l2_object_add_interlace_mode(v4l2object, tmp, max_w, max_h,
2855 pixelformat);
2856 /* We could consider to check colorspace for min too, in case it depends on
2857 * the size. But in this case, min and max could not be enough */
2858 gst_aml_v4l2_object_add_colorspace(v4l2object, tmp, max_w, max_h,
2859 pixelformat);
2860 }
2861
2862 gst_aml_v4l2_object_update_and_append(v4l2object, pixelformat, ret, tmp);
2863 return ret;
2864}
2865}
2866
2867static gboolean
2868gst_aml_v4l2_object_get_nearest_size(GstAmlV4l2Object *v4l2object,
2869 guint32 pixelformat, gint *width, gint *height)
2870{
2871 struct v4l2_format fmt;
2872 gboolean ret = FALSE;
2873 GstVideoInterlaceMode interlace_mode;
2874
2875 g_return_val_if_fail(width != NULL, FALSE);
2876 g_return_val_if_fail(height != NULL, FALSE);
2877
2878 GST_LOG_OBJECT(v4l2object->dbg_obj,
2879 "getting nearest size to %dx%d with format %" GST_FOURCC_FORMAT,
2880 *width, *height, GST_FOURCC_ARGS(pixelformat));
2881
2882 memset(&fmt, 0, sizeof(struct v4l2_format));
2883
2884 /* get size delimiters */
2885 memset(&fmt, 0, sizeof(fmt));
2886 fmt.type = v4l2object->type;
2887 fmt.fmt.pix.width = *width;
2888 fmt.fmt.pix.height = *height;
2889 fmt.fmt.pix.pixelformat = pixelformat;
2890 fmt.fmt.pix.field = V4L2_FIELD_ANY;
2891
2892 if (gst_aml_v4l2_object_try_fmt(v4l2object, &fmt) < 0)
2893 goto error;
2894
2895 GST_LOG_OBJECT(v4l2object->dbg_obj,
2896 "got nearest size %dx%d", fmt.fmt.pix.width, fmt.fmt.pix.height);
2897
2898 *width = fmt.fmt.pix.width;
2899 *height = fmt.fmt.pix.height;
2900
2901 if (!gst_aml_v4l2_object_get_interlace_mode(fmt.fmt.pix.field, &interlace_mode))
2902 {
2903 GST_WARNING_OBJECT(v4l2object->dbg_obj,
2904 "Unsupported field type for %" GST_FOURCC_FORMAT "@%ux%u: %u",
2905 GST_FOURCC_ARGS(pixelformat), *width, *height, fmt.fmt.pix.field);
2906 goto error;
2907 }
2908
2909 ret = TRUE;
2910
2911error:
2912 if (!ret)
2913 {
2914 GST_WARNING_OBJECT(v4l2object->dbg_obj,
2915 "Unable to try format: %s", g_strerror(errno));
2916 }
2917
2918 return ret;
2919}
2920
2921static gboolean
2922gst_aml_v4l2_object_is_dmabuf_supported(GstAmlV4l2Object *v4l2object)
2923{
2924 gboolean ret = TRUE;
2925 struct v4l2_exportbuffer expbuf = {
2926 .type = v4l2object->type,
2927 .index = -1,
2928 .plane = -1,
2929 .flags = O_CLOEXEC | O_RDWR,
2930 };
2931
2932 if (v4l2object->fmtdesc->flags & V4L2_FMT_FLAG_EMULATED)
2933 {
2934 GST_WARNING_OBJECT(v4l2object->dbg_obj,
2935 "libv4l2 converter detected, disabling DMABuf");
2936 ret = FALSE;
2937 }
2938
2939 /* Expected to fail, but ENOTTY tells us that it is not implemented. */
2940 v4l2object->ioctl(v4l2object->video_fd, VIDIOC_EXPBUF, &expbuf);
2941 if (errno == ENOTTY)
2942 ret = FALSE;
2943
2944 return ret;
2945}
2946
2947static gboolean
2948gst_aml_v4l2_object_setup_pool(GstAmlV4l2Object *v4l2object, GstCaps *caps)
2949{
2950 GstAmlV4l2IOMode mode;
2951
2952 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "initializing the %s system",
2953 V4L2_TYPE_IS_OUTPUT(v4l2object->type) ? "output" : "capture");
2954
2955 GST_AML_V4L2_CHECK_OPEN(v4l2object);
2956 GST_AML_V4L2_CHECK_NOT_ACTIVE(v4l2object);
2957
2958 /* find transport */
2959 mode = v4l2object->req_mode;
2960
2961 if (v4l2object->device_caps & V4L2_CAP_READWRITE)
2962 {
2963 if (v4l2object->req_mode == GST_V4L2_IO_AUTO)
2964 mode = GST_V4L2_IO_RW;
2965 }
2966 else if (v4l2object->req_mode == GST_V4L2_IO_RW)
2967 goto method_not_supported;
2968
2969 if (v4l2object->device_caps & V4L2_CAP_STREAMING)
2970 {
2971 if (v4l2object->req_mode == GST_V4L2_IO_AUTO)
2972 {
2973 if (!V4L2_TYPE_IS_OUTPUT(v4l2object->type) &&
2974 gst_aml_v4l2_object_is_dmabuf_supported(v4l2object))
2975 {
2976 mode = GST_V4L2_IO_DMABUF;
2977 }
2978 else
2979 {
2980 mode = GST_V4L2_IO_MMAP;
2981 }
2982 }
2983 }
2984 else if (v4l2object->req_mode == GST_V4L2_IO_MMAP ||
2985 v4l2object->req_mode == GST_V4L2_IO_DMABUF)
2986 goto method_not_supported;
2987
2988 /* if still no transport selected, error out */
2989 if (mode == GST_V4L2_IO_AUTO)
2990 goto no_supported_capture_method;
2991
2992 GST_INFO_OBJECT(v4l2object->dbg_obj, "accessing buffers via mode %d", mode);
2993 v4l2object->mode = mode;
2994
2995 /* If min_buffers is not set, the driver either does not support the control or
2996 it has not been asked yet via propose_allocation/decide_allocation. */
2997 if (!v4l2object->min_buffers)
2998 gst_aml_v4l2_get_driver_min_buffers(v4l2object);
2999
3000 /* Map the buffers */
3001 GST_LOG_OBJECT(v4l2object->dbg_obj, "initiating buffer pool");
3002
3003 if (!(v4l2object->pool = gst_aml_v4l2_buffer_pool_new(v4l2object, caps)))
3004 goto buffer_pool_new_failed;
3005
3006 GST_AML_V4L2_SET_ACTIVE(v4l2object);
3007
3008 return TRUE;
3009
3010 /* ERRORS */
3011buffer_pool_new_failed:
3012{
3013 GST_ELEMENT_ERROR(v4l2object->element, RESOURCE, READ,
3014 (_("Could not map buffers from device '%s'"),
3015 v4l2object->videodev),
3016 ("Failed to create buffer pool: %s", g_strerror(errno)));
3017 return FALSE;
3018}
3019method_not_supported:
3020{
3021 GST_ELEMENT_ERROR(v4l2object->element, RESOURCE, READ,
3022 (_("The driver of device '%s' does not support the IO method %d"),
3023 v4l2object->videodev, mode),
3024 (NULL));
3025 return FALSE;
3026}
3027no_supported_capture_method:
3028{
3029 GST_ELEMENT_ERROR(v4l2object->element, RESOURCE, READ,
3030 (_("The driver of device '%s' does not support any known IO "
3031 "method."),
3032 v4l2object->videodev),
3033 (NULL));
3034 return FALSE;
3035}
3036}
3037
3038static void
3039gst_aml_v4l2_object_set_stride(GstVideoInfo *info, GstVideoAlignment *align,
3040 gint plane, gint stride)
3041{
3042 const GstVideoFormatInfo *finfo = info->finfo;
3043
3044 if (GST_VIDEO_FORMAT_INFO_IS_TILED(finfo))
3045 {
3046 gint x_tiles, y_tiles, ws, hs, tile_height, padded_height;
3047
3048 ws = GST_VIDEO_FORMAT_INFO_TILE_WS(finfo);
3049 hs = GST_VIDEO_FORMAT_INFO_TILE_HS(finfo);
3050 tile_height = 1 << hs;
3051
3052 padded_height = GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT(finfo, plane,
3053 info->height + align->padding_top + align->padding_bottom);
3054 padded_height = GST_ROUND_UP_N(padded_height, tile_height);
3055
3056 x_tiles = stride >> ws;
3057 y_tiles = padded_height >> hs;
3058 info->stride[plane] = GST_VIDEO_TILE_MAKE_STRIDE(x_tiles, y_tiles);
3059 }
3060 else
3061 {
3062 info->stride[plane] = stride;
3063 }
3064}
3065
3066static void
3067gst_aml_v4l2_object_extrapolate_info(GstAmlV4l2Object *v4l2object,
3068 GstVideoInfo *info, GstVideoAlignment *align, gint stride)
3069{
3070 const GstVideoFormatInfo *finfo = info->finfo;
3071 gint i, estride, padded_height;
3072 gsize offs = 0;
3073
3074 g_return_if_fail(v4l2object->n_v4l2_planes == 1);
3075
3076 padded_height = info->height + align->padding_top + align->padding_bottom;
3077
3078 for (i = 0; i < finfo->n_planes; i++)
3079 {
3080 estride = gst_aml_v4l2_object_extrapolate_stride(finfo, i, stride);
3081
3082 gst_aml_v4l2_object_set_stride(info, align, i, estride);
3083
3084 info->offset[i] = offs;
3085 offs += estride *
3086 GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT(finfo, i, padded_height);
3087
3088 GST_DEBUG_OBJECT(v4l2object->dbg_obj,
3089 "Extrapolated for plane %d with base stride %d: "
3090 "stride %d, offset %" G_GSIZE_FORMAT,
3091 i, stride, info->stride[i],
3092 info->offset[i]);
3093 }
3094
3095 /* Update the image size according the amount of data we are going to
3096 * read/write. This workaround bugs in driver where the sizeimage provided
3097 * by TRY/S_FMT represent the buffer length (maximum size) rather then the expected
3098 * bytesused (buffer size). */
3099 if (offs < info->size)
3100 info->size = offs;
3101}
3102
3103static void
3104gst_aml_v4l2_object_save_format(GstAmlV4l2Object *v4l2object,
3105 struct v4l2_fmtdesc *fmtdesc, struct v4l2_format *format,
3106 GstVideoInfo *info, GstVideoAlignment *align)
3107{
3108 const GstVideoFormatInfo *finfo = info->finfo;
3109 gboolean standard_stride = TRUE;
3110 gint stride, pstride, padded_width, padded_height, i;
3111
3112 if (GST_VIDEO_INFO_FORMAT(info) == GST_VIDEO_FORMAT_ENCODED)
3113 {
3114 v4l2object->n_v4l2_planes = 1;
3115 info->size = format->fmt.pix.sizeimage;
3116 goto store_info;
3117 }
3118
3119 /* adjust right padding */
3120 if (V4L2_TYPE_IS_MULTIPLANAR(v4l2object->type))
3121 stride = format->fmt.pix_mp.plane_fmt[0].bytesperline;
3122 else
3123 stride = format->fmt.pix.bytesperline;
3124
3125 pstride = GST_VIDEO_FORMAT_INFO_PSTRIDE(finfo, 0);
3126 if (pstride)
3127 {
3128 padded_width = stride / pstride;
3129 }
3130 else
3131 {
3132 /* pstride can be 0 for complex formats */
3133 GST_WARNING_OBJECT(v4l2object->element,
3134 "format %s has a pstride of 0, cannot compute padded with",
3135 gst_video_format_to_string(GST_VIDEO_INFO_FORMAT(info)));
3136 padded_width = stride;
3137 }
3138
3139 if (padded_width < format->fmt.pix.width)
3140 GST_WARNING_OBJECT(v4l2object->dbg_obj,
3141 "Driver bug detected, stride (%d) is too small for the width (%d)",
3142 padded_width, format->fmt.pix.width);
3143
3144 align->padding_right = padded_width - info->width - align->padding_left;
3145
3146 /* adjust bottom padding */
3147 padded_height = format->fmt.pix.height;
3148
3149 if (GST_VIDEO_FORMAT_INFO_IS_TILED(finfo))
3150 {
3151 guint hs, tile_height;
3152
3153 hs = GST_VIDEO_FORMAT_INFO_TILE_HS(finfo);
3154 tile_height = 1 << hs;
3155
3156 padded_height = GST_ROUND_UP_N(padded_height, tile_height);
3157 }
3158
3159 align->padding_bottom = padded_height - info->height - align->padding_top;
3160
3161 /* setup the strides and offset */
3162 if (V4L2_TYPE_IS_MULTIPLANAR(v4l2object->type))
3163 {
3164 struct v4l2_pix_format_mplane *pix_mp = &format->fmt.pix_mp;
3165
3166 /* figure out the frame layout */
3167 v4l2object->n_v4l2_planes = MAX(1, pix_mp->num_planes);
3168 info->size = 0;
3169 for (i = 0; i < v4l2object->n_v4l2_planes; i++)
3170 {
3171 stride = pix_mp->plane_fmt[i].bytesperline;
3172
3173 if (info->stride[i] != stride)
3174 standard_stride = FALSE;
3175
3176 gst_aml_v4l2_object_set_stride(info, align, i, stride);
3177 info->offset[i] = info->size;
3178 info->size += pix_mp->plane_fmt[i].sizeimage;
3179 }
3180
3181 /* Extrapolate stride if planar format are being set in 1 v4l2 plane */
3182 if (v4l2object->n_v4l2_planes < finfo->n_planes)
3183 {
3184 stride = format->fmt.pix_mp.plane_fmt[0].bytesperline;
3185 gst_aml_v4l2_object_extrapolate_info(v4l2object, info, align, stride);
3186 }
3187 }
3188 else
3189 {
3190 /* only one plane in non-MPLANE mode */
3191 v4l2object->n_v4l2_planes = 1;
3192 info->size = format->fmt.pix.sizeimage;
3193 stride = format->fmt.pix.bytesperline;
3194
3195 if (info->stride[0] != stride)
3196 standard_stride = FALSE;
3197
3198 gst_aml_v4l2_object_extrapolate_info(v4l2object, info, align, stride);
3199 }
3200
3201 /* adjust the offset to take into account left and top */
3202 if (GST_VIDEO_FORMAT_INFO_IS_TILED(finfo))
3203 {
3204 if ((align->padding_left + align->padding_top) > 0)
3205 GST_WARNING_OBJECT(v4l2object->dbg_obj,
3206 "Left and top padding is not permitted for tiled formats");
3207 }
3208 else
3209 {
3210 for (i = 0; i < finfo->n_planes; i++)
3211 {
3212 gint vedge, hedge;
3213
3214 /* FIXME we assume plane as component as this is true for all supported
3215 * format we support. */
3216
3217 hedge = GST_VIDEO_FORMAT_INFO_SCALE_WIDTH(finfo, i, align->padding_left);
3218 vedge = GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT(finfo, i, align->padding_top);
3219
3220 info->offset[i] += (vedge * info->stride[i]) +
3221 (hedge * GST_VIDEO_INFO_COMP_PSTRIDE(info, i));
3222 }
3223 }
3224
3225store_info:
3226 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "Got sizeimage %" G_GSIZE_FORMAT,
3227 info->size);
3228
3229 /* to avoid copies we need video meta if there is padding */
3230 v4l2object->need_video_meta =
3231 ((align->padding_top + align->padding_left + align->padding_right +
3232 align->padding_bottom) != 0);
3233
3234 /* ... or if stride is non "standard" */
3235 if (!standard_stride)
3236 v4l2object->need_video_meta = TRUE;
3237
3238 /* ... or also video meta if we use multiple, non-contiguous, planes */
3239 if (v4l2object->n_v4l2_planes > 1)
3240 v4l2object->need_video_meta = TRUE;
3241
3242 v4l2object->info = *info;
3243 v4l2object->align = *align;
3244 v4l2object->format = *format;
3245 v4l2object->fmtdesc = fmtdesc;
3246
3247 /* if we have a framerate pre-calculate duration */
3248 if (info->fps_n > 0 && info->fps_d > 0)
3249 {
3250 v4l2object->duration = gst_util_uint64_scale_int(GST_SECOND, info->fps_d,
3251 info->fps_n);
3252 }
3253 else
3254 {
3255 v4l2object->duration = GST_CLOCK_TIME_NONE;
3256 }
3257}
3258
3259gint gst_aml_v4l2_object_extrapolate_stride(const GstVideoFormatInfo *finfo,
3260 gint plane, gint stride)
3261{
3262 gint estride;
3263
3264 switch (finfo->format)
3265 {
3266 case GST_VIDEO_FORMAT_NV12:
3267 case GST_VIDEO_FORMAT_NV12_64Z32:
3268 case GST_VIDEO_FORMAT_NV21:
3269 case GST_VIDEO_FORMAT_NV16:
3270 case GST_VIDEO_FORMAT_NV61:
3271 case GST_VIDEO_FORMAT_NV24:
3272 estride = (plane == 0 ? 1 : 2) *
3273 GST_VIDEO_FORMAT_INFO_SCALE_WIDTH(finfo, plane, stride);
3274 break;
3275 default:
3276 estride = GST_VIDEO_FORMAT_INFO_SCALE_WIDTH(finfo, plane, stride);
3277 break;
3278 }
3279
3280 return estride;
3281}
3282
3283static gboolean
3284gst_aml_v4l2_video_colorimetry_matches(const GstVideoColorimetry *cinfo,
3285 const gchar *color)
3286{
3287 GstVideoColorimetry ci;
3288 static const GstVideoColorimetry ci_likely_jpeg = {
3289 GST_VIDEO_COLOR_RANGE_0_255, GST_VIDEO_COLOR_MATRIX_BT601,
3290 GST_VIDEO_TRANSFER_UNKNOWN, GST_VIDEO_COLOR_PRIMARIES_UNKNOWN};
3291 static const GstVideoColorimetry ci_jpeg = {
3292 GST_VIDEO_COLOR_RANGE_0_255, GST_VIDEO_COLOR_MATRIX_BT601,
3293 GST_VIDEO_TRANSFER_SRGB, GST_VIDEO_COLOR_PRIMARIES_BT709};
3294
3295 if (!gst_video_colorimetry_from_string(&ci, color))
3296 return FALSE;
3297
3298 if (gst_video_colorimetry_is_equal(&ci, cinfo))
3299 return TRUE;
3300
3301 /* Allow 1:4:0:0 (produced by jpegdec) if the device expects 1:4:7:1 */
3302 if (gst_video_colorimetry_is_equal(&ci, &ci_likely_jpeg) && gst_video_colorimetry_is_equal(cinfo, &ci_jpeg))
3303 return TRUE;
3304
3305 return FALSE;
3306}
3307
3308static void
xuesong.jiange1a19662022-06-21 20:30:22 +08003309set_amlogic_vdec_parm(GstAmlV4l2Object *v4l2object, struct v4l2_streamparm *streamparm, GstCaps *caps)
xuesong.jiangae1548e2022-05-06 16:38:46 +08003310{
3311 struct aml_dec_params *decParm = (struct aml_dec_params *)streamparm->parm.raw_data;
3312 const char *env;
3313
3314 decParm->cfg.metadata_config_flag = 1 << 13;
3315
3316 if (v4l2object->type == V4L2_BUF_TYPE_VIDEO_OUTPUT || v4l2object->type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE)
3317 {
3318 env = getenv("V4L2_SET_AMLOGIC_DW_MODE");
3319 if (env)
3320 {
3321 int dwMode = atoi(env);
3322 switch (dwMode)
3323 {
3324 case 0:
3325 case 1:
3326 case 2:
3327 case 3:
3328 case 4:
3329 case 16:
3330 decParm->cfg.double_write_mode = dwMode;
3331 decParm->parms_status |= V4L2_CONFIG_PARM_DECODE_CFGINFO;
3332 break;
3333 }
3334 }
3335
3336 if (v4l2object->ioctl(v4l2object->video_fd, VIDIOC_S_PARM, streamparm) < 0)
3337 {
3338 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "set vdec parm fail");
3339 }
3340 else
3341 {
3342 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "Set dwMode to %d", decParm->cfg.double_write_mode);
3343 }
xuesong.jiange1a19662022-06-21 20:30:22 +08003344
3345 GstStructure *structure= gst_caps_get_structure(caps, 0);
3346 if (structure == NULL)
3347 {
3348 return;
3349 }
3350 if ( gst_structure_has_field(structure, "colorimetry") )
3351 {
3352 const char *colorimetry= gst_structure_get_string(structure,"colorimetry");
3353 GstVideoColorimetry vci = {0};
3354 if ( colorimetry && gst_video_colorimetry_from_string( &vci, colorimetry ))
3355 {
3356 decParm->parms_status |= V4L2_CONFIG_PARM_DECODE_HDRINFO;
3357 decParm->hdr.signal_type= (1<<29); /* present flag */
3358 /*set default value, this is to keep up with driver hdr info synchronization*/
3359 decParm->hdr.signal_type |= (5<<26) | (1<<24);
3360
3361 gint hdrColorimetry[4] = {0};
3362 hdrColorimetry[0]= (int)vci.range;
3363 hdrColorimetry[1]= (int)vci.matrix;
3364 hdrColorimetry[2]= (int)vci.transfer;
3365 hdrColorimetry[3]= (int)vci.primaries;
3366 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "colorimetry: [%d,%d,%d,%d]",
3367 hdrColorimetry[0],
3368 hdrColorimetry[1],
3369 hdrColorimetry[2],
3370 hdrColorimetry[3] );
3371 /* range */
3372 switch ( hdrColorimetry[0] )
3373 {
3374 case 1:
3375 case 2:
3376 decParm->hdr.signal_type |= ((hdrColorimetry[0] % 2)<<25);
3377 break;
3378 default:
3379 break;
3380 }
3381 /* matrix coefficient */
3382 switch ( hdrColorimetry[1] )
3383 {
3384 case 1: /* RGB */
3385 decParm->hdr.signal_type |= 0;
3386 break;
3387 case 2: /* FCC */
3388 decParm->hdr.signal_type |= 4;
3389 break;
3390 case 3: /* BT709 */
3391 decParm->hdr.signal_type |= 1;
3392 break;
3393 case 4: /* BT601 */
3394 decParm->hdr.signal_type |= 3;
3395 break;
3396 case 5: /* SMPTE240M */
3397 decParm->hdr.signal_type |= 7;
3398 break;
3399 case 6: /* BT2020 */
3400 decParm->hdr.signal_type |= 9;
3401 break;
3402 default: /* unknown */
3403 decParm->hdr.signal_type |= 2;
3404 break;
3405 }
3406 /* transfer function */
3407 switch ( hdrColorimetry[2] )
3408 {
3409 case 5: /* BT709 */
3410 decParm->hdr.signal_type |= (1<<8);
3411 break;
3412 case 6: /* SMPTE240M */
3413 decParm->hdr.signal_type |= (7<<8);
3414 break;
3415 case 9: /* LOG100 */
3416 decParm->hdr.signal_type |= (9<<8);
3417 break;
3418 case 10: /* LOG316 */
3419 decParm->hdr.signal_type |= (10<<8);
3420 break;
3421 case 12: /* BT2020_12 */
3422 decParm->hdr.signal_type |= (15<<8);
3423 break;
3424 case 11: /* BT2020_10 */
3425 decParm->hdr.signal_type |= (14<<8);
3426 break;
3427 case 13: /* SMPTE2084 */
3428 decParm->hdr.signal_type |= (16<<8);
3429 break;
3430 case 14: /* ARIB_STD_B67 */
3431 decParm->hdr.signal_type |= (18<<8);
3432 break;
3433 #if ((GST_VERSION_MAJOR == 1) && (GST_VERSION_MINOR >= 18))
3434 case 16: /* BT601 */
3435 decParm->hdr.signal_type |= (3<<8);
3436 break;
3437 #endif
3438 case 1: /* GAMMA10 */
3439 case 2: /* GAMMA18 */
3440 case 3: /* GAMMA20 */
3441 case 4: /* GAMMA22 */
3442 case 7: /* SRGB */
3443 case 8: /* GAMMA28 */
3444 case 15: /* ADOBERGB */
3445 default:
3446 break;
3447 }
3448 /* primaries */
3449 switch ( hdrColorimetry[3] )
3450 {
3451 case 1: /* BT709 */
3452 decParm->hdr.signal_type |= ((1<<24)|(1<<16));
3453 break;
3454 case 2: /* BT470M */
3455 decParm->hdr.signal_type |= ((1<<24)|(4<<16));
3456 break;
3457 case 3: /* BT470BG */
3458 decParm->hdr.signal_type |= ((1<<24)|(5<<16));
3459 break;
3460 case 4: /* SMPTE170M */
3461 decParm->hdr.signal_type |= ((1<<24)|(6<<16));
3462 break;
3463 case 5: /* SMPTE240M */
3464 decParm->hdr.signal_type |= ((1<<24)|(7<<16));
3465 break;
3466 case 6: /* FILM */
3467 decParm->hdr.signal_type |= ((1<<24)|(8<<16));
3468 break;
3469 case 7: /* BT2020 */
3470 decParm->hdr.signal_type |= ((1<<24)|(9<<16));
3471 break;
3472 case 8: /* ADOBERGB */
3473 default:
3474 break;
3475 }
3476 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "HDR signal_type %X", decParm->hdr.signal_type);
3477 }
3478
3479 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "got caps %" GST_PTR_FORMAT, caps);
3480 GstStructure *st = gst_caps_get_structure(caps, 0);
3481 GstCapsFeatures *features = gst_caps_get_features(caps, 0);
3482
3483 if (gst_structure_has_field(st, "colorimetry"))
3484 {
3485 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "have colorimetry");
3486 }
3487
3488 if (st && features)
3489 {
3490 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "trace in remove colorimetry");
3491 gst_structure_remove_field(st, "colorimetry");
3492 gst_caps_features_remove(features, "colorimetry");
3493 }
3494 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "caps after remove colorimetry %" GST_PTR_FORMAT, caps);
3495 }
3496
3497 if ( gst_structure_has_field(structure, "mastering-display-metadata") )
3498 {
3499 const char *masteringDisplay= gst_structure_get_string(structure,"mastering-display-metadata");
3500 float hdrMasteringDisplay[10];
3501 if ( masteringDisplay && sscanf( masteringDisplay, "%f:%f:%f:%f:%f:%f:%f:%f:%f:%f",
3502 &hdrMasteringDisplay[0],
3503 &hdrMasteringDisplay[1],
3504 &hdrMasteringDisplay[2],
3505 &hdrMasteringDisplay[3],
3506 &hdrMasteringDisplay[4],
3507 &hdrMasteringDisplay[5],
3508 &hdrMasteringDisplay[6],
3509 &hdrMasteringDisplay[7],
3510 &hdrMasteringDisplay[8],
3511 &hdrMasteringDisplay[9] ) == 10 )
3512 {
3513 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "mastering display [%f,%f,%f,%f,%f,%f,%f,%f,%f,%f]",
3514 hdrMasteringDisplay[0],
3515 hdrMasteringDisplay[1],
3516 hdrMasteringDisplay[2],
3517 hdrMasteringDisplay[3],
3518 hdrMasteringDisplay[4],
3519 hdrMasteringDisplay[5],
3520 hdrMasteringDisplay[6],
3521 hdrMasteringDisplay[7],
3522 hdrMasteringDisplay[8],
3523 hdrMasteringDisplay[9] );
3524
3525 decParm->hdr.color_parms.present_flag= 1;
3526 decParm->hdr.color_parms.primaries[2][0]= (uint32_t)(hdrMasteringDisplay[0]*50000); /* R.x */
3527 decParm->hdr.color_parms.primaries[2][1]= (uint32_t)(hdrMasteringDisplay[1]*50000); /* R.y */
3528 decParm->hdr.color_parms.primaries[0][0]= (uint32_t)(hdrMasteringDisplay[2]*50000); /* G.x */
3529 decParm->hdr.color_parms.primaries[0][1]= (uint32_t)(hdrMasteringDisplay[3]*50000); /* G.y */
3530 decParm->hdr.color_parms.primaries[1][0]= (uint32_t)(hdrMasteringDisplay[4]*50000); /* B.x */
3531 decParm->hdr.color_parms.primaries[1][1]= (uint32_t)(hdrMasteringDisplay[5]*50000); /* B.y */
3532 decParm->hdr.color_parms.white_point[0]= (uint32_t)(hdrMasteringDisplay[6]*50000);
3533 decParm->hdr.color_parms.white_point[1]= (uint32_t)(hdrMasteringDisplay[7]*50000);
3534 decParm->hdr.color_parms.luminance[0]= (uint32_t)(hdrMasteringDisplay[8]);
3535 decParm->hdr.color_parms.luminance[1]= (uint32_t)(hdrMasteringDisplay[9]);
3536 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "HDR mastering: primaries %X %X %X %X %X %X",
3537 decParm->hdr.color_parms.primaries[2][0],
3538 decParm->hdr.color_parms.primaries[2][1],
3539 decParm->hdr.color_parms.primaries[0][0],
3540 decParm->hdr.color_parms.primaries[0][1],
3541 decParm->hdr.color_parms.primaries[1][0],
3542 decParm->hdr.color_parms.primaries[1][1] );
3543 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "HDR mastering: white point: %X %X",
3544 decParm->hdr.color_parms.white_point[0],
3545 decParm->hdr.color_parms.white_point[1] );
3546 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "HDR mastering: luminance: %X %X",
3547 decParm->hdr.color_parms.luminance[0],
3548 decParm->hdr.color_parms.luminance[1] );
3549 }
3550
3551 GstStructure *st = gst_caps_get_structure(caps, 0);
3552 GstCapsFeatures * features = gst_caps_get_features(caps, 0);
3553 if (st && features)
3554 {
3555 gst_structure_remove_fields(st, "mastering-display-metadata", NULL);
3556 gst_caps_features_remove(features, "mastering-display-metadata");
3557 }
3558 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "caps after remove mastering-display-metadata %" GST_PTR_FORMAT, caps);
3559 }
xuesong.jiangae1548e2022-05-06 16:38:46 +08003560 }
3561}
3562
3563static gboolean
3564gst_aml_v4l2_object_set_format_full(GstAmlV4l2Object *v4l2object, GstCaps *caps,
3565 gboolean try_only, GstAmlV4l2Error *error)
3566{
3567 gint fd = v4l2object->video_fd;
3568 struct v4l2_format format;
3569 struct v4l2_streamparm streamparm;
3570 enum v4l2_field field;
3571 guint32 pixelformat;
3572 struct v4l2_fmtdesc *fmtdesc;
3573 GstVideoInfo info;
3574 GstVideoAlignment align;
3575 gint width, height, fps_n, fps_d;
3576 gint n_v4l_planes;
3577 gint i = 0;
3578 gboolean is_mplane;
3579 enum v4l2_colorspace colorspace = 0;
3580 enum v4l2_quantization range = 0;
3581 enum v4l2_ycbcr_encoding matrix = 0;
3582 enum v4l2_xfer_func transfer = 0;
3583 GstStructure *s;
3584 gboolean disable_colorimetry = FALSE;
3585
3586 g_return_val_if_fail(!v4l2object->skip_try_fmt_probes ||
3587 gst_caps_is_writable(caps),
3588 FALSE);
3589
3590 GST_AML_V4L2_CHECK_OPEN(v4l2object);
3591 if (!try_only)
3592 GST_AML_V4L2_CHECK_NOT_ACTIVE(v4l2object);
3593
3594 memset(&streamparm, 0x00, sizeof(struct v4l2_streamparm));
3595 streamparm.type = v4l2object->type;
xuesong.jiange1a19662022-06-21 20:30:22 +08003596 set_amlogic_vdec_parm(v4l2object, &streamparm, caps);
xuesong.jiangae1548e2022-05-06 16:38:46 +08003597
3598 is_mplane = V4L2_TYPE_IS_MULTIPLANAR(v4l2object->type);
3599
3600 gst_video_info_init(&info);
3601 gst_video_alignment_reset(&align);
3602
3603 if (!gst_aml_v4l2_object_get_caps_info(v4l2object, caps, &fmtdesc, &info))
3604 goto invalid_caps;
3605
3606 pixelformat = fmtdesc->pixelformat;
3607 width = GST_VIDEO_INFO_WIDTH(&info);
3608 height = GST_VIDEO_INFO_HEIGHT(&info);
3609 fps_n = GST_VIDEO_INFO_FPS_N(&info);
3610 fps_d = GST_VIDEO_INFO_FPS_D(&info);
3611
3612 /* if encoded format (GST_VIDEO_INFO_N_PLANES return 0)
3613 * or if contiguous is prefered */
3614 n_v4l_planes = GST_VIDEO_INFO_N_PLANES(&info);
3615 if (!n_v4l_planes || !v4l2object->prefered_non_contiguous)
3616 n_v4l_planes = 1;
3617
3618 if (GST_VIDEO_INFO_IS_INTERLACED(&info))
3619 {
3620 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "interlaced video");
3621 /* ideally we would differentiate between types of interlaced video
3622 * but there is not sufficient information in the caps..
3623 */
3624 field = V4L2_FIELD_INTERLACED;
3625 }
3626 else
3627 {
3628 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "progressive video");
3629 field = V4L2_FIELD_NONE;
3630 }
3631
3632 /* We first pick the main colorspace from the primaries */
3633 switch (info.colorimetry.primaries)
3634 {
3635 case GST_VIDEO_COLOR_PRIMARIES_BT709:
3636 /* There is two colorspaces using these primaries, use the range to
3637 * differentiate */
3638 if (info.colorimetry.range == GST_VIDEO_COLOR_RANGE_16_235)
3639 colorspace = V4L2_COLORSPACE_REC709;
3640 else
3641 colorspace = V4L2_COLORSPACE_SRGB;
3642 break;
3643 case GST_VIDEO_COLOR_PRIMARIES_BT2020:
3644 colorspace = V4L2_COLORSPACE_BT2020;
3645 break;
3646 case GST_VIDEO_COLOR_PRIMARIES_BT470M:
3647 colorspace = V4L2_COLORSPACE_470_SYSTEM_M;
3648 break;
3649 case GST_VIDEO_COLOR_PRIMARIES_BT470BG:
3650 colorspace = V4L2_COLORSPACE_470_SYSTEM_BG;
3651 break;
3652 case GST_VIDEO_COLOR_PRIMARIES_SMPTE170M:
3653 colorspace = V4L2_COLORSPACE_SMPTE170M;
3654 break;
3655 case GST_VIDEO_COLOR_PRIMARIES_SMPTE240M:
3656 colorspace = V4L2_COLORSPACE_SMPTE240M;
3657 break;
3658
3659 case GST_VIDEO_COLOR_PRIMARIES_FILM:
3660 case GST_VIDEO_COLOR_PRIMARIES_UNKNOWN:
3661 /* We don't know, we will guess */
3662 break;
3663
3664 default:
3665 GST_WARNING_OBJECT(v4l2object->dbg_obj,
3666 "Unknown colorimetry primaries %d", info.colorimetry.primaries);
3667 break;
3668 }
3669
3670 switch (info.colorimetry.range)
3671 {
3672 case GST_VIDEO_COLOR_RANGE_0_255:
3673 range = V4L2_QUANTIZATION_FULL_RANGE;
3674 break;
3675 case GST_VIDEO_COLOR_RANGE_16_235:
3676 range = V4L2_QUANTIZATION_LIM_RANGE;
3677 break;
3678 case GST_VIDEO_COLOR_RANGE_UNKNOWN:
3679 /* We let the driver pick a default one */
3680 break;
3681 default:
3682 GST_WARNING_OBJECT(v4l2object->dbg_obj,
3683 "Unknown colorimetry range %d", info.colorimetry.range);
3684 break;
3685 }
3686
3687 switch (info.colorimetry.matrix)
3688 {
3689 case GST_VIDEO_COLOR_MATRIX_RGB:
3690 /* Unspecified, leave to default */
3691 break;
3692 /* FCC is about the same as BT601 with less digit */
3693 case GST_VIDEO_COLOR_MATRIX_FCC:
3694 case GST_VIDEO_COLOR_MATRIX_BT601:
3695 matrix = V4L2_YCBCR_ENC_601;
3696 break;
3697 case GST_VIDEO_COLOR_MATRIX_BT709:
3698 matrix = V4L2_YCBCR_ENC_709;
3699 break;
3700 case GST_VIDEO_COLOR_MATRIX_SMPTE240M:
3701 matrix = V4L2_YCBCR_ENC_SMPTE240M;
3702 break;
3703 case GST_VIDEO_COLOR_MATRIX_BT2020:
3704 matrix = V4L2_YCBCR_ENC_BT2020;
3705 break;
3706 case GST_VIDEO_COLOR_MATRIX_UNKNOWN:
3707 /* We let the driver pick a default one */
3708 break;
3709 default:
3710 GST_WARNING_OBJECT(v4l2object->dbg_obj,
3711 "Unknown colorimetry matrix %d", info.colorimetry.matrix);
3712 break;
3713 }
3714
3715 switch (info.colorimetry.transfer)
3716 {
3717 case GST_VIDEO_TRANSFER_GAMMA18:
3718 case GST_VIDEO_TRANSFER_GAMMA20:
3719 case GST_VIDEO_TRANSFER_GAMMA22:
3720 case GST_VIDEO_TRANSFER_GAMMA28:
3721 GST_WARNING_OBJECT(v4l2object->dbg_obj,
3722 "GAMMA 18, 20, 22, 28 transfer functions not supported");
3723 /* fallthrough */
3724 case GST_VIDEO_TRANSFER_GAMMA10:
3725 transfer = V4L2_XFER_FUNC_NONE;
3726 break;
3727 case GST_VIDEO_TRANSFER_BT2020_12:
3728 case GST_VIDEO_TRANSFER_BT709:
3729 transfer = V4L2_XFER_FUNC_709;
3730 break;
3731 case GST_VIDEO_TRANSFER_SMPTE240M:
3732 transfer = V4L2_XFER_FUNC_SMPTE240M;
3733 break;
3734 case GST_VIDEO_TRANSFER_SRGB:
3735 transfer = V4L2_XFER_FUNC_SRGB;
3736 break;
3737 case GST_VIDEO_TRANSFER_LOG100:
3738 case GST_VIDEO_TRANSFER_LOG316:
3739 GST_WARNING_OBJECT(v4l2object->dbg_obj,
3740 "LOG 100, 316 transfer functions not supported");
3741 /* FIXME No known sensible default, maybe AdobeRGB ? */
3742 break;
3743 case GST_VIDEO_TRANSFER_UNKNOWN:
3744 /* We let the driver pick a default one */
3745 break;
3746 default:
3747 GST_WARNING_OBJECT(v4l2object->dbg_obj,
3748 "Unknown colorimetry tranfer %d", info.colorimetry.transfer);
3749 break;
3750 }
3751
3752 if (colorspace == 0)
3753 {
3754 /* Try to guess colorspace according to pixelformat and size */
3755 if (GST_VIDEO_INFO_IS_YUV(&info))
3756 {
3757 if (range == V4L2_QUANTIZATION_FULL_RANGE && matrix == V4L2_YCBCR_ENC_601 && transfer == 0)
3758 {
3759 /* Full range BT.601 YCbCr encoding with unknown primaries and transfer
3760 * function most likely is JPEG */
3761 colorspace = V4L2_COLORSPACE_JPEG;
3762 transfer = V4L2_XFER_FUNC_SRGB;
3763 }
3764 else
3765 {
3766 /* SD streams likely use SMPTE170M and HD streams REC709 */
3767 if (width <= 720 && height <= 576)
3768 colorspace = V4L2_COLORSPACE_SMPTE170M;
3769 else
3770 colorspace = V4L2_COLORSPACE_REC709;
3771 }
3772 }
3773 else if (GST_VIDEO_INFO_IS_RGB(&info))
3774 {
3775 colorspace = V4L2_COLORSPACE_SRGB;
3776 transfer = V4L2_XFER_FUNC_NONE;
3777 }
3778 }
3779
3780 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "Desired format %dx%d, format "
3781 "%" GST_FOURCC_FORMAT " stride: %d",
3782 width, height,
3783 GST_FOURCC_ARGS(pixelformat), GST_VIDEO_INFO_PLANE_STRIDE(&info, 0));
3784
3785 memset(&format, 0x00, sizeof(struct v4l2_format));
3786 format.type = v4l2object->type;
3787
3788 if (is_mplane)
3789 {
3790 format.type = v4l2object->type;
3791 format.fmt.pix_mp.pixelformat = pixelformat;
3792 format.fmt.pix_mp.width = width;
3793 format.fmt.pix_mp.height = height;
3794 format.fmt.pix_mp.field = field;
3795 format.fmt.pix_mp.num_planes = n_v4l_planes;
3796
3797 /* try to ask our prefered stride but it's not a failure if not
3798 * accepted */
3799 for (i = 0; i < n_v4l_planes; i++)
3800 {
3801 gint stride = GST_VIDEO_INFO_PLANE_STRIDE(&info, i);
3802
3803 if (GST_VIDEO_FORMAT_INFO_IS_TILED(info.finfo))
3804 stride = GST_VIDEO_TILE_X_TILES(stride) << GST_VIDEO_FORMAT_INFO_TILE_WS(info.finfo);
3805
3806 format.fmt.pix_mp.plane_fmt[i].bytesperline = stride;
3807 }
3808
3809 if (GST_VIDEO_INFO_FORMAT(&info) == GST_VIDEO_FORMAT_ENCODED)
3810 {
3811 if (v4l2object->req_mode == GST_V4L2_IO_DMABUF_IMPORT)
3812 format.fmt.pix_mp.plane_fmt[0].sizeimage = 1;
3813 else
3814 format.fmt.pix_mp.plane_fmt[0].sizeimage = ENCODED_BUFFER_SIZE;
3815 }
3816 }
3817 else
3818 {
3819 gint stride = GST_VIDEO_INFO_PLANE_STRIDE(&info, 0);
3820
3821 format.type = v4l2object->type;
3822
3823 format.fmt.pix.width = width;
3824 format.fmt.pix.height = height;
3825 format.fmt.pix.pixelformat = pixelformat;
3826 format.fmt.pix.field = field;
3827
3828 if (GST_VIDEO_FORMAT_INFO_IS_TILED(info.finfo))
3829 stride = GST_VIDEO_TILE_X_TILES(stride) << GST_VIDEO_FORMAT_INFO_TILE_WS(info.finfo);
3830
3831 /* try to ask our prefered stride */
3832 format.fmt.pix.bytesperline = stride;
3833
3834 if (GST_VIDEO_INFO_FORMAT(&info) == GST_VIDEO_FORMAT_ENCODED)
3835 {
3836 if (v4l2object->req_mode == GST_V4L2_IO_DMABUF_IMPORT)
3837 format.fmt.pix_mp.plane_fmt[0].sizeimage = 1;
3838 else
3839 format.fmt.pix_mp.plane_fmt[0].sizeimage = ENCODED_BUFFER_SIZE;
3840 }
3841 }
3842
3843 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "Desired format is %dx%d, format "
3844 "%" GST_FOURCC_FORMAT ", nb planes %d",
3845 format.fmt.pix.width,
3846 format.fmt.pix_mp.height,
3847 GST_FOURCC_ARGS(format.fmt.pix.pixelformat),
3848 is_mplane ? format.fmt.pix_mp.num_planes : 1);
3849
3850#ifndef GST_DISABLE_GST_DEBUG
3851 if (is_mplane)
3852 {
3853 for (i = 0; i < format.fmt.pix_mp.num_planes; i++)
3854 GST_DEBUG_OBJECT(v4l2object->dbg_obj, " stride %d",
3855 format.fmt.pix_mp.plane_fmt[i].bytesperline);
3856 }
3857 else
3858 {
3859 GST_DEBUG_OBJECT(v4l2object->dbg_obj, " stride %d",
3860 format.fmt.pix.bytesperline);
3861 }
3862#endif
3863
3864 if (is_mplane)
3865 {
3866 format.fmt.pix_mp.colorspace = colorspace;
3867 format.fmt.pix_mp.quantization = range;
3868 format.fmt.pix_mp.ycbcr_enc = matrix;
3869 format.fmt.pix_mp.xfer_func = transfer;
3870 }
3871 else
3872 {
3873 format.fmt.pix.priv = V4L2_PIX_FMT_PRIV_MAGIC;
3874 format.fmt.pix.colorspace = colorspace;
3875 format.fmt.pix.quantization = range;
3876 format.fmt.pix.ycbcr_enc = matrix;
3877 format.fmt.pix.xfer_func = transfer;
3878 }
3879
3880 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "Desired colorspace is %d:%d:%d:%d",
3881 colorspace, range, matrix, transfer);
3882
3883 if (try_only)
3884 {
3885 if (v4l2object->ioctl(fd, VIDIOC_TRY_FMT, &format) < 0)
3886 goto try_fmt_failed;
3887 }
3888 else
3889 {
3890 if (v4l2object->ioctl(fd, VIDIOC_S_FMT, &format) < 0)
3891 goto set_fmt_failed;
3892 }
3893
3894 if (is_mplane)
3895 {
3896 colorspace = format.fmt.pix_mp.colorspace;
3897 range = format.fmt.pix_mp.quantization;
3898 matrix = format.fmt.pix_mp.ycbcr_enc;
3899 transfer = format.fmt.pix_mp.xfer_func;
3900 }
3901 else
3902 {
3903 colorspace = format.fmt.pix.colorspace;
3904 range = format.fmt.pix.quantization;
3905 matrix = format.fmt.pix.ycbcr_enc;
3906 transfer = format.fmt.pix.xfer_func;
3907 }
3908
3909 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "Got format of %dx%d, format "
3910 "%" GST_FOURCC_FORMAT ", nb planes %d, colorspace %d:%d:%d:%d",
3911 format.fmt.pix.width, format.fmt.pix_mp.height,
3912 GST_FOURCC_ARGS(format.fmt.pix.pixelformat),
3913 is_mplane ? format.fmt.pix_mp.num_planes : 1,
3914 colorspace, range, matrix, transfer);
3915
3916#ifndef GST_DISABLE_GST_DEBUG
3917 if (is_mplane)
3918 {
3919 for (i = 0; i < format.fmt.pix_mp.num_planes; i++)
3920 GST_DEBUG_OBJECT(v4l2object->dbg_obj, " stride %d, sizeimage %d",
3921 format.fmt.pix_mp.plane_fmt[i].bytesperline,
3922 format.fmt.pix_mp.plane_fmt[i].sizeimage);
3923 }
3924 else
3925 {
3926 GST_DEBUG_OBJECT(v4l2object->dbg_obj, " stride %d, sizeimage %d",
3927 format.fmt.pix.bytesperline, format.fmt.pix.sizeimage);
3928 }
3929#endif
3930
3931 if (format.fmt.pix.pixelformat != pixelformat)
3932 goto invalid_pixelformat;
3933
3934 /* Only negotiate size with raw data.
3935 * For some codecs the dimensions are *not* in the bitstream, IIRC VC1
3936 * in ASF mode for example, there is also not reason for a driver to
3937 * change the size. */
3938 if (info.finfo->format != GST_VIDEO_FORMAT_ENCODED)
3939 {
3940 /* We can crop larger images */
3941 if (format.fmt.pix.width < width || format.fmt.pix.height < height)
3942 goto invalid_dimensions;
3943
3944 /* Note, this will be adjusted if upstream has non-centered cropping. */
3945 align.padding_top = 0;
3946 align.padding_bottom = format.fmt.pix.height - height;
3947 align.padding_left = 0;
3948 align.padding_right = format.fmt.pix.width - width;
3949 }
3950
3951 if (is_mplane && format.fmt.pix_mp.num_planes != n_v4l_planes)
3952 goto invalid_planes;
3953
3954 /* used to check colorimetry and interlace mode fields presence */
3955 s = gst_caps_get_structure(caps, 0);
3956
3957 if (!gst_aml_v4l2_object_get_interlace_mode(format.fmt.pix.field,
3958 &info.interlace_mode))
3959 goto invalid_field;
3960 if (gst_structure_has_field(s, "interlace-mode"))
3961 {
3962 if (format.fmt.pix.field != field)
3963 goto invalid_field;
3964 }
3965
3966 if (gst_aml_v4l2_object_get_colorspace(&format, &info.colorimetry))
3967 {
3968 if (gst_structure_has_field(s, "colorimetry"))
3969 {
xuesong.jiange1a19662022-06-21 20:30:22 +08003970 if (!gst_aml_v4l2_video_colorimetry_matches(&info.colorimetry, gst_structure_get_string(s, "colorimetry")))
3971 {
3972 // goto invalid_colorimetry;
3973 }
xuesong.jiangae1548e2022-05-06 16:38:46 +08003974 }
3975 }
3976 else
3977 {
3978 /* The driver (or libv4l2) is miss-behaving, just ignore colorimetry from
3979 * the TRY_FMT */
3980 disable_colorimetry = TRUE;
3981 if (gst_structure_has_field(s, "colorimetry"))
3982 gst_structure_remove_field(s, "colorimetry");
3983 }
3984
3985 /* In case we have skipped the try_fmt probes, we'll need to set the
3986 * colorimetry and interlace-mode back into the caps. */
3987 if (v4l2object->skip_try_fmt_probes)
3988 {
3989 if (!disable_colorimetry && !gst_structure_has_field(s, "colorimetry"))
3990 {
3991 gchar *str = gst_video_colorimetry_to_string(&info.colorimetry);
3992 gst_structure_set(s, "colorimetry", G_TYPE_STRING, str, NULL);
3993 g_free(str);
3994 }
3995
3996 if (!gst_structure_has_field(s, "interlace-mode"))
3997 gst_structure_set(s, "interlace-mode", G_TYPE_STRING,
3998 gst_video_interlace_mode_to_string(info.interlace_mode), NULL);
3999 }
4000
4001 if (try_only) /* good enough for trying only */
4002 return TRUE;
4003
4004 if (GST_VIDEO_INFO_HAS_ALPHA(&info))
4005 {
4006 struct v4l2_control ctl = {
4007 0,
4008 };
4009 ctl.id = V4L2_CID_ALPHA_COMPONENT;
4010 ctl.value = 0xff;
4011
4012 if (v4l2object->ioctl(fd, VIDIOC_S_CTRL, &ctl) < 0)
4013 GST_WARNING_OBJECT(v4l2object->dbg_obj,
4014 "Failed to set alpha component value");
4015 }
4016
4017 /* Is there a reason we require the caller to always specify a framerate? */
4018 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "Desired framerate: %u/%u", fps_n,
4019 fps_d);
4020
4021 if (v4l2object->ioctl(fd, VIDIOC_G_PARM, &streamparm) < 0)
4022 goto get_parm_failed;
4023
4024 if (v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE || v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE)
4025 {
4026 GST_VIDEO_INFO_FPS_N(&info) =
4027 streamparm.parm.capture.timeperframe.denominator;
4028 GST_VIDEO_INFO_FPS_D(&info) =
4029 streamparm.parm.capture.timeperframe.numerator;
4030
4031 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "Got capture framerate: %u/%u",
4032 streamparm.parm.capture.timeperframe.denominator,
4033 streamparm.parm.capture.timeperframe.numerator);
4034
4035 /* We used to skip frame rate setup if the camera was already setup
4036 * with the requested frame rate. This breaks some cameras though,
4037 * causing them to not output data (several models of Thinkpad cameras
4038 * have this problem at least).
4039 * So, don't skip. */
4040 GST_LOG_OBJECT(v4l2object->dbg_obj, "Setting capture framerate to %u/%u",
4041 fps_n, fps_d);
4042 /* We want to change the frame rate, so check whether we can. Some cheap USB
4043 * cameras don't have the capability */
4044 if ((streamparm.parm.capture.capability & V4L2_CAP_TIMEPERFRAME) == 0)
4045 {
4046 GST_DEBUG_OBJECT(v4l2object->dbg_obj,
4047 "Not setting capture framerate (not supported)");
4048 goto done;
4049 }
4050
4051 /* Note: V4L2 wants the frame interval, we have the frame rate */
4052 streamparm.parm.capture.timeperframe.numerator = fps_d;
4053 streamparm.parm.capture.timeperframe.denominator = fps_n;
4054
4055 /* some cheap USB cam's won't accept any change */
4056 if (v4l2object->ioctl(fd, VIDIOC_S_PARM, &streamparm) < 0)
4057 goto set_parm_failed;
4058
4059 if (streamparm.parm.capture.timeperframe.numerator > 0 &&
4060 streamparm.parm.capture.timeperframe.denominator > 0)
4061 {
4062 /* get new values */
4063 fps_d = streamparm.parm.capture.timeperframe.numerator;
4064 fps_n = streamparm.parm.capture.timeperframe.denominator;
4065
4066 GST_INFO_OBJECT(v4l2object->dbg_obj, "Set capture framerate to %u/%u",
4067 fps_n, fps_d);
4068 }
4069 else
4070 {
4071 /* fix v4l2 capture driver to provide framerate values */
4072 GST_WARNING_OBJECT(v4l2object->dbg_obj,
4073 "Reuse caps framerate %u/%u - fix v4l2 capture driver", fps_n, fps_d);
4074 }
4075
4076 GST_VIDEO_INFO_FPS_N(&info) = fps_n;
4077 GST_VIDEO_INFO_FPS_D(&info) = fps_d;
4078 }
4079 else if (v4l2object->type == V4L2_BUF_TYPE_VIDEO_OUTPUT || v4l2object->type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE)
4080 {
4081 GST_VIDEO_INFO_FPS_N(&info) =
4082 streamparm.parm.output.timeperframe.denominator;
4083 GST_VIDEO_INFO_FPS_D(&info) =
4084 streamparm.parm.output.timeperframe.numerator;
4085
4086 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "Got output framerate: %u/%u",
4087 streamparm.parm.output.timeperframe.denominator,
4088 streamparm.parm.output.timeperframe.numerator);
4089
4090 GST_LOG_OBJECT(v4l2object->dbg_obj, "Setting output framerate to %u/%u",
4091 fps_n, fps_d);
4092 if ((streamparm.parm.output.capability & V4L2_CAP_TIMEPERFRAME) == 0)
4093 {
4094 GST_DEBUG_OBJECT(v4l2object->dbg_obj,
4095 "Not setting output framerate (not supported)");
4096 goto done;
4097 }
4098
4099 /* Note: V4L2 wants the frame interval, we have the frame rate */
4100 streamparm.parm.output.timeperframe.numerator = fps_d;
4101 streamparm.parm.output.timeperframe.denominator = fps_n;
4102
4103 if (v4l2object->ioctl(fd, VIDIOC_S_PARM, &streamparm) < 0)
4104 goto set_parm_failed;
4105
4106 if (streamparm.parm.output.timeperframe.numerator > 0 &&
4107 streamparm.parm.output.timeperframe.denominator > 0)
4108 {
4109 /* get new values */
4110 fps_d = streamparm.parm.output.timeperframe.numerator;
4111 fps_n = streamparm.parm.output.timeperframe.denominator;
4112
4113 GST_INFO_OBJECT(v4l2object->dbg_obj, "Set output framerate to %u/%u",
4114 fps_n, fps_d);
4115 }
4116 else
4117 {
4118 /* fix v4l2 output driver to provide framerate values */
4119 GST_WARNING_OBJECT(v4l2object->dbg_obj,
4120 "Reuse caps framerate %u/%u - fix v4l2 output driver", fps_n, fps_d);
4121 }
4122
4123 GST_VIDEO_INFO_FPS_N(&info) = fps_n;
4124 GST_VIDEO_INFO_FPS_D(&info) = fps_d;
4125 }
4126
4127done:
4128 /* add boolean return, so we can fail on drivers bugs */
4129 gst_aml_v4l2_object_save_format(v4l2object, fmtdesc, &format, &info, &align);
4130
4131 /* now configure the pool */
4132 if (!gst_aml_v4l2_object_setup_pool(v4l2object, caps))
4133 goto pool_failed;
4134
4135 return TRUE;
4136
4137 /* ERRORS */
4138invalid_caps:
4139{
4140 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "can't parse caps %" GST_PTR_FORMAT,
4141 caps);
4142
4143 GST_AML_V4L2_ERROR(error, RESOURCE, SETTINGS,
4144 (_("Invalid caps")), ("Can't parse caps %" GST_PTR_FORMAT, caps));
4145 return FALSE;
4146}
4147try_fmt_failed:
4148{
4149 if (errno == EINVAL)
4150 {
4151 GST_AML_V4L2_ERROR(error, RESOURCE, SETTINGS,
4152 (_("Device '%s' has no supported format"), v4l2object->videodev),
4153 ("Call to TRY_FMT failed for %" GST_FOURCC_FORMAT " @ %dx%d: %s",
4154 GST_FOURCC_ARGS(pixelformat), width, height,
4155 g_strerror(errno)));
4156 }
4157 else
4158 {
4159 GST_AML_V4L2_ERROR(error, RESOURCE, FAILED,
4160 (_("Device '%s' failed during initialization"),
4161 v4l2object->videodev),
4162 ("Call to TRY_FMT failed for %" GST_FOURCC_FORMAT " @ %dx%d: %s",
4163 GST_FOURCC_ARGS(pixelformat), width, height,
4164 g_strerror(errno)));
4165 }
4166 return FALSE;
4167}
4168set_fmt_failed:
4169{
4170 if (errno == EBUSY)
4171 {
4172 GST_AML_V4L2_ERROR(error, RESOURCE, BUSY,
4173 (_("Device '%s' is busy"), v4l2object->videodev),
4174 ("Call to S_FMT failed for %" GST_FOURCC_FORMAT " @ %dx%d: %s",
4175 GST_FOURCC_ARGS(pixelformat), width, height,
4176 g_strerror(errno)));
4177 }
4178 else if (errno == EINVAL)
4179 {
4180 GST_AML_V4L2_ERROR(error, RESOURCE, SETTINGS,
4181 (_("Device '%s' has no supported format"), v4l2object->videodev),
4182 ("Call to S_FMT failed for %" GST_FOURCC_FORMAT " @ %dx%d: %s",
4183 GST_FOURCC_ARGS(pixelformat), width, height,
4184 g_strerror(errno)));
4185 }
4186 else
4187 {
4188 GST_AML_V4L2_ERROR(error, RESOURCE, FAILED,
4189 (_("Device '%s' failed during initialization"),
4190 v4l2object->videodev),
4191 ("Call to S_FMT failed for %" GST_FOURCC_FORMAT " @ %dx%d: %s",
4192 GST_FOURCC_ARGS(pixelformat), width, height,
4193 g_strerror(errno)));
4194 }
4195 return FALSE;
4196}
4197invalid_dimensions:
4198{
4199 GST_AML_V4L2_ERROR(error, RESOURCE, SETTINGS,
4200 (_("Device '%s' cannot capture at %dx%d"),
4201 v4l2object->videodev, width, height),
4202 ("Tried to capture at %dx%d, but device returned size %dx%d",
4203 width, height, format.fmt.pix.width, format.fmt.pix.height));
4204 return FALSE;
4205}
4206invalid_pixelformat:
4207{
4208 GST_AML_V4L2_ERROR(error, RESOURCE, SETTINGS,
4209 (_("Device '%s' cannot capture in the specified format"),
4210 v4l2object->videodev),
4211 ("Tried to capture in %" GST_FOURCC_FORMAT
4212 ", but device returned format"
4213 " %" GST_FOURCC_FORMAT,
4214 GST_FOURCC_ARGS(pixelformat),
4215 GST_FOURCC_ARGS(format.fmt.pix.pixelformat)));
4216 return FALSE;
4217}
4218invalid_planes:
4219{
4220 GST_AML_V4L2_ERROR(error, RESOURCE, SETTINGS,
4221 (_("Device '%s' does support non-contiguous planes"),
4222 v4l2object->videodev),
4223 ("Device wants %d planes", format.fmt.pix_mp.num_planes));
4224 return FALSE;
4225}
4226invalid_field:
4227{
4228 enum v4l2_field wanted_field;
4229
4230 if (is_mplane)
4231 wanted_field = format.fmt.pix_mp.field;
4232 else
4233 wanted_field = format.fmt.pix.field;
4234
4235 GST_AML_V4L2_ERROR(error, RESOURCE, SETTINGS,
4236 (_("Device '%s' does not support %s interlacing"),
4237 v4l2object->videodev,
4238 field == V4L2_FIELD_NONE ? "progressive" : "interleaved"),
4239 ("Device wants %s interlacing",
4240 wanted_field == V4L2_FIELD_NONE ? "progressive" : "interleaved"));
4241 return FALSE;
4242}
4243invalid_colorimetry:
4244{
4245 gchar *wanted_colorimetry;
4246
4247 wanted_colorimetry = gst_video_colorimetry_to_string(&info.colorimetry);
4248
4249 GST_AML_V4L2_ERROR(error, RESOURCE, SETTINGS,
4250 (_("Device '%s' does not support %s colorimetry"),
4251 v4l2object->videodev, gst_structure_get_string(s, "colorimetry")),
4252 ("Device wants %s colorimetry", wanted_colorimetry));
4253
4254 g_free(wanted_colorimetry);
4255 return FALSE;
4256}
4257get_parm_failed:
4258{
4259 /* it's possible that this call is not supported */
4260 if (errno != EINVAL && errno != ENOTTY)
4261 {
4262 GST_AML_V4L2_ERROR(error, RESOURCE, SETTINGS,
4263 (_("Could not get parameters on device '%s'"),
4264 v4l2object->videodev),
4265 GST_ERROR_SYSTEM);
4266 }
4267 goto done;
4268}
4269set_parm_failed:
4270{
4271 GST_AML_V4L2_ERROR(error, RESOURCE, SETTINGS,
4272 (_("Video device did not accept new frame rate setting.")),
4273 GST_ERROR_SYSTEM);
4274 goto done;
4275}
4276pool_failed:
4277{
4278 /* setup_pool already send the error */
4279 return FALSE;
4280}
4281}
4282
4283gboolean
4284gst_aml_v4l2_object_set_format(GstAmlV4l2Object *v4l2object, GstCaps *caps,
4285 GstAmlV4l2Error *error)
4286{
4287 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "Setting format to %" GST_PTR_FORMAT,
4288 caps);
4289 return gst_aml_v4l2_object_set_format_full(v4l2object, caps, FALSE, error);
4290}
4291
4292gboolean
4293gst_aml_v4l2_object_try_format(GstAmlV4l2Object *v4l2object, GstCaps *caps,
4294 GstAmlV4l2Error *error)
4295{
4296 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "Trying format %" GST_PTR_FORMAT,
4297 caps);
4298 return gst_aml_v4l2_object_set_format_full(v4l2object, caps, TRUE, error);
4299}
4300
4301GstFlowReturn
4302gst_aml_v4l2_object_poll(GstAmlV4l2Object *v4l2object)
4303{
4304 gint ret;
4305
4306 if (!v4l2object->can_poll_device)
4307 goto done;
4308
4309 GST_LOG_OBJECT(v4l2object, "polling device");
4310
4311again:
4312 ret = gst_poll_wait(v4l2object->poll, GST_CLOCK_TIME_NONE);
4313 if (G_UNLIKELY(ret < 0))
4314 {
4315 switch (errno)
4316 {
4317 case EBUSY:
4318 goto stopped;
4319 case EAGAIN:
4320 case EINTR:
4321 goto again;
4322 case ENXIO:
4323 GST_WARNING_OBJECT(v4l2object,
4324 "v4l2 device doesn't support polling. Disabling"
4325 " using libv4l2 in this case may cause deadlocks");
4326 v4l2object->can_poll_device = FALSE;
4327 goto done;
4328 default:
4329 goto select_error;
4330 }
4331 }
4332
4333done:
4334 return GST_FLOW_OK;
4335
4336 /* ERRORS */
4337stopped:
4338{
4339 GST_DEBUG_OBJECT(v4l2object, "stop called");
4340 return GST_FLOW_FLUSHING;
4341}
4342select_error:
4343{
4344 GST_ELEMENT_ERROR(v4l2object->element, RESOURCE, READ, (NULL),
4345 ("poll error %d: %s (%d)", ret, g_strerror(errno), errno));
4346 return GST_FLOW_ERROR;
4347}
4348}
4349
4350GstFlowReturn
4351gst_aml_v4l2_object_dqevent(GstAmlV4l2Object *v4l2object)
4352{
4353 GstFlowReturn res;
4354 struct v4l2_event evt;
4355
4356 if ((res = gst_aml_v4l2_object_poll(v4l2object)) != GST_FLOW_OK)
4357 goto poll_failed;
4358
4359 memset(&evt, 0x00, sizeof(struct v4l2_event));
4360 if (v4l2object->ioctl(v4l2object->video_fd, VIDIOC_DQEVENT, &evt) < 0)
4361 goto dqevent_failed;
4362
4363 switch (evt.type)
4364 {
4365 case V4L2_EVENT_SOURCE_CHANGE:
4366 return GST_AML_V4L2_FLOW_SOURCE_CHANGE;
4367 break;
4368 case V4L2_EVENT_EOS:
4369 return GST_AML_V4L2_FLOW_LAST_BUFFER;
4370 break;
4371 default:
4372 break;
4373 }
4374
4375 return GST_FLOW_OK;
4376
4377 /* ERRORS */
4378poll_failed:
4379{
4380 GST_DEBUG_OBJECT(v4l2object, "poll error %s", gst_flow_get_name(res));
4381 return res;
4382}
4383dqevent_failed:
4384{
4385 return GST_FLOW_ERROR;
4386}
4387}
4388
4389/**
4390 * gst_aml_v4l2_object_acquire_format:
4391 * @v4l2object the object
4392 * @info a GstVideoInfo to be filled
4393 *
4394 * Acquire the driver choosen format. This is useful in decoder or encoder elements where
4395 * the output format is choosen by the HW.
4396 *
4397 * Returns: %TRUE on success, %FALSE on failure.
4398 */
4399gboolean
4400gst_aml_v4l2_object_acquire_format(GstAmlV4l2Object *v4l2object, GstVideoInfo *info)
4401{
4402 struct v4l2_fmtdesc *fmtdesc;
4403 struct v4l2_format fmt;
4404 struct v4l2_crop crop;
4405 struct v4l2_selection sel;
4406 struct v4l2_rect *r = NULL;
4407 GstVideoFormat format;
4408 guint width, height;
4409 GstVideoAlignment align;
4410
4411 gst_video_info_init(info);
4412 gst_video_alignment_reset(&align);
4413
4414 memset(&fmt, 0x00, sizeof(struct v4l2_format));
4415 fmt.type = v4l2object->type;
4416 if (v4l2object->ioctl(v4l2object->video_fd, VIDIOC_G_FMT, &fmt) < 0)
4417 goto get_fmt_failed;
4418
4419 fmtdesc = gst_aml_v4l2_object_get_format_from_fourcc(v4l2object,
4420 fmt.fmt.pix.pixelformat);
4421 if (fmtdesc == NULL)
4422 goto unsupported_format;
4423
4424 /* No need to care about mplane, the four first params are the same */
4425 format = gst_aml_v4l2_object_v4l2fourcc_to_video_format(fmt.fmt.pix.pixelformat);
4426
4427 /* fails if we do no translate the fmt.pix.pixelformat to GstVideoFormat */
4428 if (format == GST_VIDEO_FORMAT_UNKNOWN)
4429 goto unsupported_format;
4430
4431 if (fmt.fmt.pix.width == 0 || fmt.fmt.pix.height == 0)
4432 goto invalid_dimensions;
4433
4434 width = fmt.fmt.pix.width;
4435 height = fmt.fmt.pix.height;
4436
4437 /* Use the default compose rectangle */
4438 memset(&sel, 0, sizeof(struct v4l2_selection));
4439 sel.type = v4l2object->type;
4440 sel.target = V4L2_SEL_TGT_COMPOSE_DEFAULT;
4441 if (v4l2object->ioctl(v4l2object->video_fd, VIDIOC_G_SELECTION, &sel) >= 0)
4442 {
4443 r = &sel.r;
4444 }
4445 else
4446 {
4447 /* For ancient kernels, fall back to G_CROP */
4448 memset(&crop, 0, sizeof(struct v4l2_crop));
4449 crop.type = v4l2object->type;
4450 if (v4l2object->ioctl(v4l2object->video_fd, VIDIOC_G_CROP, &crop) >= 0)
4451 r = &crop.c;
4452 }
4453 if (r)
4454 {
4455 align.padding_left = r->left;
4456 align.padding_top = r->top;
4457 align.padding_right = width - r->width - r->left;
4458 align.padding_bottom = height - r->height - r->top;
4459 width = r->width;
4460 height = r->height;
4461 }
4462
4463 gst_video_info_set_format(info, format, width, height);
4464
4465 switch (fmt.fmt.pix.field)
4466 {
4467 case V4L2_FIELD_ANY:
4468 case V4L2_FIELD_NONE:
4469 info->interlace_mode = GST_VIDEO_INTERLACE_MODE_PROGRESSIVE;
4470 break;
4471 case V4L2_FIELD_INTERLACED:
4472 case V4L2_FIELD_INTERLACED_TB:
4473 case V4L2_FIELD_INTERLACED_BT:
4474 info->interlace_mode = GST_VIDEO_INTERLACE_MODE_INTERLEAVED;
4475 break;
4476 default:
4477 goto unsupported_field;
4478 }
4479
4480 gst_aml_v4l2_object_get_colorspace(&fmt, &info->colorimetry);
4481
4482 gst_aml_v4l2_object_save_format(v4l2object, fmtdesc, &fmt, info, &align);
4483
4484 /* Shall we setup the pool ? */
4485
4486 return TRUE;
4487
4488get_fmt_failed:
4489{
4490 GST_ELEMENT_WARNING(v4l2object->element, RESOURCE, SETTINGS,
4491 (_("Video device did not provide output format.")), GST_ERROR_SYSTEM);
4492 return FALSE;
4493}
4494invalid_dimensions:
4495{
4496 GST_ELEMENT_WARNING(v4l2object->element, RESOURCE, SETTINGS,
4497 (_("Video device returned invalid dimensions.")),
4498 ("Expected non 0 dimensions, got %dx%d", fmt.fmt.pix.width,
4499 fmt.fmt.pix.height));
4500 return FALSE;
4501}
4502unsupported_field:
4503{
4504 GST_ELEMENT_ERROR(v4l2object->element, RESOURCE, SETTINGS,
4505 (_("Video device uses an unsupported interlacing method.")),
4506 ("V4L2 field type %d not supported", fmt.fmt.pix.field));
4507 return FALSE;
4508}
4509unsupported_format:
4510{
4511 GST_ELEMENT_ERROR(v4l2object->element, RESOURCE, SETTINGS,
4512 (_("Video device uses an unsupported pixel format.")),
4513 ("V4L2 format %" GST_FOURCC_FORMAT " not supported",
4514 GST_FOURCC_ARGS(fmt.fmt.pix.pixelformat)));
4515 return FALSE;
4516}
4517}
4518
4519gboolean
4520gst_aml_v4l2_object_set_crop(GstAmlV4l2Object *obj)
4521{
4522 struct v4l2_selection sel = {0};
4523 struct v4l2_crop crop = {0};
4524
4525 sel.type = obj->type;
4526 sel.target = V4L2_SEL_TGT_CROP;
4527 sel.flags = 0;
4528 sel.r.left = obj->align.padding_left;
4529 sel.r.top = obj->align.padding_top;
4530 sel.r.width = obj->info.width;
4531 sel.r.height = obj->info.height;
4532
4533 crop.type = obj->type;
4534 crop.c = sel.r;
4535
4536 if (obj->align.padding_left + obj->align.padding_top +
4537 obj->align.padding_right + obj->align.padding_bottom ==
4538 0)
4539 {
4540 GST_DEBUG_OBJECT(obj->dbg_obj, "no cropping needed");
4541 return TRUE;
4542 }
4543
4544 GST_DEBUG_OBJECT(obj->dbg_obj,
4545 "Desired cropping left %u, top %u, size %ux%u", crop.c.left, crop.c.top,
4546 crop.c.width, crop.c.height);
4547
4548 if (obj->ioctl(obj->video_fd, VIDIOC_S_SELECTION, &sel) < 0)
4549 {
4550 if (errno != ENOTTY)
4551 {
4552 GST_WARNING_OBJECT(obj->dbg_obj,
4553 "Failed to set crop rectangle with VIDIOC_S_SELECTION: %s",
4554 g_strerror(errno));
4555 return FALSE;
4556 }
4557 else
4558 {
4559 if (obj->ioctl(obj->video_fd, VIDIOC_S_CROP, &crop) < 0)
4560 {
4561 GST_WARNING_OBJECT(obj->dbg_obj, "VIDIOC_S_CROP failed");
4562 return FALSE;
4563 }
4564
4565 if (obj->ioctl(obj->video_fd, VIDIOC_G_CROP, &crop) < 0)
4566 {
4567 GST_WARNING_OBJECT(obj->dbg_obj, "VIDIOC_G_CROP failed");
4568 return FALSE;
4569 }
4570
4571 sel.r = crop.c;
4572 }
4573 }
4574
4575 GST_DEBUG_OBJECT(obj->dbg_obj,
4576 "Got cropping left %u, top %u, size %ux%u", crop.c.left, crop.c.top,
4577 crop.c.width, crop.c.height);
4578
4579 return TRUE;
4580}
4581
4582gboolean
4583gst_aml_v4l2_object_caps_equal(GstAmlV4l2Object *v4l2object, GstCaps *caps)
4584{
4585 GstStructure *config;
4586 GstCaps *oldcaps;
4587 gboolean ret;
4588
4589 if (!v4l2object->pool)
4590 return FALSE;
4591
4592 config = gst_buffer_pool_get_config(v4l2object->pool);
4593 gst_buffer_pool_config_get_params(config, &oldcaps, NULL, NULL, NULL);
4594
4595 ret = oldcaps && gst_caps_is_equal(caps, oldcaps);
4596
4597 gst_structure_free(config);
4598
4599 return ret;
4600}
4601
4602gboolean
4603gst_aml_v4l2_object_caps_is_subset(GstAmlV4l2Object *v4l2object, GstCaps *caps)
4604{
4605 GstStructure *config;
4606 GstCaps *oldcaps;
4607 gboolean ret;
4608
4609 if (!v4l2object->pool)
4610 return FALSE;
4611
4612 config = gst_buffer_pool_get_config(v4l2object->pool);
4613 gst_buffer_pool_config_get_params(config, &oldcaps, NULL, NULL, NULL);
4614
4615 ret = oldcaps && gst_caps_is_subset(oldcaps, caps);
4616
4617 gst_structure_free(config);
4618
4619 return ret;
4620}
4621
4622GstCaps *
4623gst_aml_v4l2_object_get_current_caps(GstAmlV4l2Object *v4l2object)
4624{
4625 GstStructure *config;
4626 GstCaps *oldcaps;
4627
4628 if (!v4l2object->pool)
4629 return NULL;
4630
4631 config = gst_buffer_pool_get_config(v4l2object->pool);
4632 gst_buffer_pool_config_get_params(config, &oldcaps, NULL, NULL, NULL);
4633
4634 if (oldcaps)
4635 gst_caps_ref(oldcaps);
4636
4637 gst_structure_free(config);
4638
4639 return oldcaps;
4640}
4641
4642gboolean
4643gst_aml_v4l2_object_unlock(GstAmlV4l2Object *v4l2object)
4644{
4645 gboolean ret = TRUE;
4646
4647 GST_LOG_OBJECT(v4l2object->dbg_obj, "start flushing");
4648
4649 gst_poll_set_flushing(v4l2object->poll, TRUE);
4650
4651 if (v4l2object->pool && gst_buffer_pool_is_active(v4l2object->pool))
4652 gst_buffer_pool_set_flushing(v4l2object->pool, TRUE);
4653
4654 return ret;
4655}
4656
4657gboolean
4658gst_aml_v4l2_object_unlock_stop(GstAmlV4l2Object *v4l2object)
4659{
4660 gboolean ret = TRUE;
4661
4662 GST_LOG_OBJECT(v4l2object->dbg_obj, "stop flushing");
4663
4664 if (v4l2object->pool && gst_buffer_pool_is_active(v4l2object->pool))
4665 gst_buffer_pool_set_flushing(v4l2object->pool, FALSE);
4666
4667 gst_poll_set_flushing(v4l2object->poll, FALSE);
4668
4669 return ret;
4670}
4671
4672gboolean
4673gst_aml_v4l2_object_stop(GstAmlV4l2Object *v4l2object)
4674{
4675 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "stopping");
4676
4677 if (!GST_AML_V4L2_IS_OPEN(v4l2object))
4678 goto done;
4679 if (!GST_AML_V4L2_IS_ACTIVE(v4l2object))
4680 goto done;
4681
4682 if (v4l2object->pool)
4683 {
4684 if (!gst_aml_v4l2_buffer_pool_orphan(&v4l2object->pool))
4685 {
4686 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "deactivating pool");
4687 gst_buffer_pool_set_active(v4l2object->pool, FALSE);
4688 gst_object_unref(v4l2object->pool);
4689 }
4690 v4l2object->pool = NULL;
4691 }
4692
4693 GST_AML_V4L2_SET_INACTIVE(v4l2object);
4694
4695done:
4696 return TRUE;
4697}
4698
4699GstCaps *
4700gst_aml_v4l2_object_probe_caps(GstAmlV4l2Object *v4l2object, GstCaps *filter)
4701{
4702 GstCaps *ret;
4703 GSList *walk;
4704 GSList *formats;
4705
4706 formats = gst_aml_v4l2_object_get_format_list(v4l2object);
4707
4708 ret = gst_caps_new_empty();
4709
4710 if (v4l2object->keep_aspect && !v4l2object->par)
4711 {
4712 struct v4l2_cropcap cropcap;
4713
4714 memset(&cropcap, 0, sizeof(cropcap));
4715
4716 cropcap.type = v4l2object->type;
4717 if (v4l2object->ioctl(v4l2object->video_fd, VIDIOC_CROPCAP, &cropcap) < 0)
4718 {
4719 if (errno != ENOTTY)
4720 GST_WARNING_OBJECT(v4l2object->dbg_obj,
4721 "Failed to probe pixel aspect ratio with VIDIOC_CROPCAP: %s",
4722 g_strerror(errno));
4723 }
4724 else if (cropcap.pixelaspect.numerator && cropcap.pixelaspect.denominator)
4725 {
4726 v4l2object->par = g_new0(GValue, 1);
4727 g_value_init(v4l2object->par, GST_TYPE_FRACTION);
4728 gst_value_set_fraction(v4l2object->par, cropcap.pixelaspect.numerator,
4729 cropcap.pixelaspect.denominator);
4730 }
4731 }
4732
4733 for (walk = formats; walk; walk = walk->next)
4734 {
4735 struct v4l2_fmtdesc *format;
4736 GstStructure *template;
4737 GstCaps *tmp, *tmp2;
4738
4739 format = (struct v4l2_fmtdesc *)walk->data;
4740
4741 template = gst_aml_v4l2_object_v4l2fourcc_to_bare_struct(format->pixelformat);
4742
4743 if (!template)
4744 {
4745 GST_DEBUG_OBJECT(v4l2object->dbg_obj,
4746 "unknown format %" GST_FOURCC_FORMAT,
4747 GST_FOURCC_ARGS(format->pixelformat));
4748 continue;
4749 }
4750
4751 /* If we have a filter, check if we need to probe this format or not */
4752 if (filter)
4753 {
4754 GstCaps *format_caps = gst_caps_new_empty();
4755
4756 gst_caps_append_structure(format_caps, gst_structure_copy(template));
xuesong.jiange1a19662022-06-21 20:30:22 +08004757 GST_INFO_OBJECT(v4l2object->dbg_obj, "format_caps: %" GST_PTR_FORMAT, format_caps);
xuesong.jiangae1548e2022-05-06 16:38:46 +08004758
4759 if (!gst_caps_can_intersect(format_caps, filter))
4760 {
4761 gst_caps_unref(format_caps);
4762 gst_structure_free(template);
4763 continue;
4764 }
4765
4766 gst_caps_unref(format_caps);
4767 }
4768
4769 tmp = gst_aml_v4l2_object_probe_caps_for_format(v4l2object,
4770 format->pixelformat, template);
xuesong.jiange1a19662022-06-21 20:30:22 +08004771 GST_INFO_OBJECT(v4l2object->dbg_obj, "tmp caps: %" GST_PTR_FORMAT, tmp);
xuesong.jiangae1548e2022-05-06 16:38:46 +08004772
4773 if (tmp)
4774 {
4775 tmp2 = gst_caps_copy(tmp);
4776 gst_caps_set_features_simple(tmp2, gst_caps_features_from_string(GST_CAPS_FEATURE_MEMORY_DMABUF));
4777 gst_caps_append(ret, tmp);
4778 gst_caps_append(ret, tmp2);
4779 }
4780
4781 gst_structure_free(template);
4782 }
4783
4784 if (filter)
4785 {
4786 GstCaps *tmp;
4787
4788 tmp = ret;
4789 ret = gst_caps_intersect_full(filter, ret, GST_CAPS_INTERSECT_FIRST);
4790 gst_caps_unref(tmp);
4791 }
4792
4793 GST_INFO_OBJECT(v4l2object->dbg_obj, "probed caps: %" GST_PTR_FORMAT, ret);
4794
4795 return ret;
4796}
4797
4798GstCaps *
4799gst_aml_v4l2_object_get_caps(GstAmlV4l2Object *v4l2object, GstCaps *filter)
4800{
4801 GstCaps *ret;
4802
4803 if (v4l2object->probed_caps == NULL)
4804 v4l2object->probed_caps = gst_aml_v4l2_object_probe_caps(v4l2object, NULL);
4805
4806 if (filter)
4807 {
4808 ret = gst_caps_intersect_full(filter, v4l2object->probed_caps,
4809 GST_CAPS_INTERSECT_FIRST);
4810 }
4811 else
4812 {
4813 ret = gst_caps_ref(v4l2object->probed_caps);
4814 }
4815
4816 return ret;
4817}
4818
4819gboolean
4820gst_aml_v4l2_object_decide_allocation(GstAmlV4l2Object *obj, GstQuery *query)
4821{
4822 GstCaps *caps;
4823 GstBufferPool *pool = NULL, *other_pool = NULL;
4824 GstStructure *config;
4825 guint size, min, max, own_min = 0;
4826 gboolean update;
4827 gboolean has_video_meta;
4828 gboolean can_share_own_pool, pushing_from_our_pool = FALSE;
4829 GstAllocator *allocator = NULL;
4830 GstAllocationParams params = {0};
4831
4832 GST_DEBUG_OBJECT(obj->dbg_obj, "decide allocation");
4833
4834 g_return_val_if_fail(obj->type == V4L2_BUF_TYPE_VIDEO_CAPTURE ||
4835 obj->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE,
4836 FALSE);
4837
4838 gst_query_parse_allocation(query, &caps, NULL);
4839
4840 if (obj->pool == NULL)
4841 {
4842 if (!gst_aml_v4l2_object_setup_pool(obj, caps))
4843 goto pool_failed;
4844 }
4845
4846 if (gst_query_get_n_allocation_params(query) > 0)
4847 gst_query_parse_nth_allocation_param(query, 0, &allocator, &params);
4848
4849 if (gst_query_get_n_allocation_pools(query) > 0)
4850 {
4851 gst_query_parse_nth_allocation_pool(query, 0, &pool, &size, &min, &max);
4852 update = TRUE;
4853 }
4854 else
4855 {
4856 pool = NULL;
4857 min = max = 0;
4858 size = 0;
4859 update = FALSE;
4860 }
4861
4862 GST_DEBUG_OBJECT(obj->dbg_obj, "allocation: size:%u min:%u max:%u pool:%" GST_PTR_FORMAT, size, min, max, pool);
4863
4864 has_video_meta =
4865 gst_query_find_allocation_meta(query, GST_VIDEO_META_API_TYPE, NULL);
4866
4867 can_share_own_pool = (has_video_meta || !obj->need_video_meta);
4868
4869 gst_aml_v4l2_get_driver_min_buffers(obj);
4870 /* We can't share our own pool, if it exceed V4L2 capacity */
4871 if (min + obj->min_buffers + 1 > VIDEO_MAX_FRAME)
4872 can_share_own_pool = FALSE;
4873
4874 /* select a pool */
4875 switch (obj->mode)
4876 {
4877 case GST_V4L2_IO_RW:
4878 if (pool)
4879 {
4880 /* in READ/WRITE mode, prefer a downstream pool because our own pool
4881 * doesn't help much, we have to write to it as well */
4882 GST_DEBUG_OBJECT(obj->dbg_obj,
4883 "read/write mode: using downstream pool");
4884 /* use the bigest size, when we use our own pool we can't really do any
4885 * other size than what the hardware gives us but for downstream pools
4886 * we can try */
4887 size = MAX(size, obj->info.size);
4888 }
4889 else if (can_share_own_pool)
4890 {
4891 /* no downstream pool, use our own then */
4892 GST_DEBUG_OBJECT(obj->dbg_obj,
4893 "read/write mode: no downstream pool, using our own");
4894 pool = gst_object_ref(obj->pool);
4895 size = obj->info.size;
4896 pushing_from_our_pool = TRUE;
4897 }
4898 break;
4899
4900 case GST_V4L2_IO_USERPTR:
4901 case GST_V4L2_IO_DMABUF_IMPORT:
4902 /* in importing mode, prefer our own pool, and pass the other pool to
4903 * our own, so it can serve itself */
4904 if (pool == NULL)
4905 goto no_downstream_pool;
4906 gst_aml_v4l2_buffer_pool_set_other_pool(GST_AML_V4L2_BUFFER_POOL(obj->pool),
4907 pool);
4908 other_pool = pool;
4909 gst_object_unref(pool);
4910 pool = gst_object_ref(obj->pool);
4911 size = obj->info.size;
4912 break;
4913
4914 case GST_V4L2_IO_MMAP:
4915 case GST_V4L2_IO_DMABUF:
4916 /* in streaming mode, prefer our own pool */
4917 /* Check if we can use it ... */
4918 if (can_share_own_pool)
4919 {
4920 if (pool)
4921 gst_object_unref(pool);
4922 pool = gst_object_ref(obj->pool);
4923 size = obj->info.size;
4924 GST_DEBUG_OBJECT(obj->dbg_obj,
4925 "streaming mode: using our own pool %" GST_PTR_FORMAT, pool);
4926 pushing_from_our_pool = TRUE;
4927 }
4928 else if (pool)
4929 {
4930 GST_DEBUG_OBJECT(obj->dbg_obj,
4931 "streaming mode: copying to downstream pool %" GST_PTR_FORMAT,
4932 pool);
4933 }
4934 else
4935 {
4936 GST_DEBUG_OBJECT(obj->dbg_obj,
4937 "streaming mode: no usable pool, copying to generic pool");
4938 size = MAX(size, obj->info.size);
4939 }
4940 break;
4941 case GST_V4L2_IO_AUTO:
4942 default:
4943 GST_WARNING_OBJECT(obj->dbg_obj, "unhandled mode");
4944 break;
4945 }
4946
4947 if (size == 0)
4948 goto no_size;
4949
4950 /* If pushing from our own pool, configure it with queried minimum,
4951 * otherwise use the minimum required */
4952 if (pushing_from_our_pool)
4953 {
4954 /* When pushing from our own pool, we need what downstream one, to be able
4955 * to fill the pipeline, the minimum required to decoder according to the
4956 * driver and 2 more, so we don't endup up with everything downstream or
4957 * held by the decoder. We account 2 buffers for v4l2 so when one is being
4958 * pushed downstream the other one can already be queued for the next
4959 * frame. */
4960 own_min = min + obj->min_buffers + 2;
4961
4962 /* If no allocation parameters where provided, allow for a little more
4963 * buffers and enable copy threshold */
4964 if (!update)
4965 {
4966 own_min += 2;
4967 gst_aml_v4l2_buffer_pool_copy_at_threshold(GST_AML_V4L2_BUFFER_POOL(pool),
4968 TRUE);
4969 }
4970 else
4971 {
4972 gst_aml_v4l2_buffer_pool_copy_at_threshold(GST_AML_V4L2_BUFFER_POOL(pool),
4973 FALSE);
4974 }
4975 }
4976 else
4977 {
4978 /* In this case we'll have to configure two buffer pool. For our buffer
4979 * pool, we'll need what the driver one, and one more, so we can dequeu */
4980 own_min = obj->min_buffers + 1;
4981 own_min = MAX(own_min, GST_AML_V4L2_MIN_BUFFERS);
4982
4983 /* for the downstream pool, we keep what downstream wants, though ensure
4984 * at least a minimum if downstream didn't suggest anything (we are
4985 * expecting the base class to create a default one for the context) */
4986 min = MAX(min, GST_AML_V4L2_MIN_BUFFERS);
4987
4988 /* To import we need the other pool to hold at least own_min */
4989 if (obj->pool == pool)
4990 min += own_min;
4991 }
4992
4993 /* Request a bigger max, if one was suggested but it's too small */
4994 if (max != 0)
4995 max = MAX(min, max);
4996
4997 /* First step, configure our own pool */
4998 config = gst_buffer_pool_get_config(obj->pool);
4999
5000 if (obj->need_video_meta || has_video_meta)
5001 {
5002 GST_DEBUG_OBJECT(obj->dbg_obj, "activate Video Meta");
5003 gst_buffer_pool_config_add_option(config,
5004 GST_BUFFER_POOL_OPTION_VIDEO_META);
5005 }
5006
5007 gst_buffer_pool_config_set_allocator(config, allocator, &params);
5008 gst_buffer_pool_config_set_params(config, caps, size, own_min + DEFAULT_EXTRA_CAPTURE_BUF_SIZE, own_min + DEFAULT_EXTRA_CAPTURE_BUF_SIZE);
5009
5010 GST_DEBUG_OBJECT(obj->dbg_obj, "setting own pool config to %" GST_PTR_FORMAT, config);
5011
5012 /* Our pool often need to adjust the value */
5013 if (!gst_buffer_pool_set_config(obj->pool, config))
5014 {
5015 config = gst_buffer_pool_get_config(obj->pool);
5016
5017 GST_DEBUG_OBJECT(obj->dbg_obj, "own pool config changed to %" GST_PTR_FORMAT, config);
5018
5019 /* our pool will adjust the maximum buffer, which we are fine with */
5020 if (!gst_buffer_pool_set_config(obj->pool, config))
5021 goto config_failed;
5022 }
5023
5024 /* Now configure the other pool if different */
5025 if (obj->pool != pool)
5026 other_pool = pool;
5027
5028 if (other_pool)
5029 {
5030 GstAmlV4l2VideoDec *self = (GstAmlV4l2VideoDec *)obj->element;
5031 if (self->is_secure_path)
5032 {
5033 params.flags |= GST_MEMORY_FLAG_LAST << 1; // in drmallocator GST_MEMORY_FLAG_LAST << 1 represent GST_MEMORY_FLAG_SECURE
5034 GST_DEBUG_OBJECT(obj, "set secure flag for drmbufferpool flag:0x%x", params.flags);
5035 }
5036 config = gst_buffer_pool_get_config(other_pool);
5037 gst_buffer_pool_config_set_allocator(config, allocator, &params);
5038 gst_buffer_pool_config_set_params(config, caps, size, own_min + DEFAULT_EXTRA_CAPTURE_BUF_SIZE, own_min + DEFAULT_EXTRA_CAPTURE_BUF_SIZE);
5039 gst_buffer_pool_config_set_video_alignment(config, &obj->align);
5040
5041 GST_DEBUG_OBJECT(obj->dbg_obj, "setting other pool config to %" GST_PTR_FORMAT, config);
5042
5043 /* if downstream supports video metadata, add this to the pool config */
5044 if (has_video_meta)
5045 {
5046 GST_DEBUG_OBJECT(obj->dbg_obj, "activate Video Meta");
5047 gst_buffer_pool_config_add_option(config,
5048 GST_BUFFER_POOL_OPTION_VIDEO_META);
5049 }
5050
5051 if (!gst_buffer_pool_set_config(other_pool, config))
5052 {
5053 config = gst_buffer_pool_get_config(other_pool);
5054
5055 if (!gst_buffer_pool_config_validate_params(config, caps, size, min,
5056 max))
5057 {
5058 gst_structure_free(config);
5059 goto config_failed;
5060 }
5061
5062 if (!gst_buffer_pool_set_config(other_pool, config))
5063 goto config_failed;
5064 }
5065 }
5066
5067 if (pool)
5068 {
5069 /* For simplicity, simply read back the active configuration, so our base
5070 * class get the right information */
5071 config = gst_buffer_pool_get_config(pool);
5072 gst_buffer_pool_config_get_params(config, NULL, &size, &min, &max);
5073 gst_structure_free(config);
5074 }
5075
5076 if (update)
5077 gst_query_set_nth_allocation_pool(query, 0, pool, size, min, max);
5078 else
5079 gst_query_add_allocation_pool(query, pool, size, min, max);
5080
5081 if (allocator)
5082 gst_object_unref(allocator);
5083
5084 if (pool)
5085 gst_object_unref(pool);
5086
5087 return TRUE;
5088
5089pool_failed:
5090{
5091 /* setup_pool already send the error */
5092 goto cleanup;
5093}
5094config_failed:
5095{
5096 GST_ELEMENT_ERROR(obj->element, RESOURCE, SETTINGS,
5097 (_("Failed to configure internal buffer pool.")), (NULL));
5098 goto cleanup;
5099}
5100no_size:
5101{
5102 GST_ELEMENT_ERROR(obj->element, RESOURCE, SETTINGS,
5103 (_("Video device did not suggest any buffer size.")), (NULL));
5104 goto cleanup;
5105}
5106cleanup:
5107{
5108 if (allocator)
5109 gst_object_unref(allocator);
5110
5111 if (pool)
5112 gst_object_unref(pool);
5113 return FALSE;
5114}
5115no_downstream_pool:
5116{
5117 GST_ELEMENT_ERROR(obj->element, RESOURCE, SETTINGS,
5118 (_("No downstream pool to import from.")),
5119 ("When importing DMABUF or USERPTR, we need a pool to import from"));
5120 return FALSE;
5121}
5122}
5123
5124gboolean
5125gst_aml_v4l2_object_propose_allocation(GstAmlV4l2Object *obj, GstQuery *query)
5126{
5127 GstBufferPool *pool;
5128 /* we need at least 2 buffers to operate */
5129 guint size, min, max;
5130 GstCaps *caps;
5131 gboolean need_pool;
5132
5133 /* Set defaults allocation parameters */
5134 size = obj->info.size;
5135 min = GST_AML_V4L2_MIN_BUFFERS;
5136 max = VIDEO_MAX_FRAME;
5137
5138 gst_query_parse_allocation(query, &caps, &need_pool);
5139
5140 if (caps == NULL)
5141 goto no_caps;
5142
5143 switch (obj->mode)
5144 {
5145 case GST_V4L2_IO_MMAP:
5146 case GST_V4L2_IO_DMABUF:
5147 if ((pool = obj->pool))
5148 gst_object_ref(pool);
5149 break;
5150 default:
5151 pool = NULL;
5152 break;
5153 }
5154
5155 if (pool != NULL)
5156 {
5157 GstCaps *pcaps;
5158 GstStructure *config;
5159
5160 /* we had a pool, check caps */
5161 config = gst_buffer_pool_get_config(pool);
5162 gst_buffer_pool_config_get_params(config, &pcaps, NULL, NULL, NULL);
5163
5164 GST_DEBUG_OBJECT(obj->dbg_obj,
5165 "we had a pool with caps %" GST_PTR_FORMAT, pcaps);
5166 if (!gst_caps_is_equal(caps, pcaps))
5167 {
5168 gst_structure_free(config);
5169 gst_object_unref(pool);
5170 goto different_caps;
5171 }
5172 gst_structure_free(config);
5173 }
5174 gst_aml_v4l2_get_driver_min_buffers(obj);
5175
5176 min = MAX(obj->min_buffers, GST_AML_V4L2_MIN_BUFFERS);
5177
5178 gst_query_add_allocation_pool(query, pool, size, min, max);
5179
5180 /* we also support various metadata */
5181 gst_query_add_allocation_meta(query, GST_VIDEO_META_API_TYPE, NULL);
5182
5183 if (pool)
5184 gst_object_unref(pool);
5185
5186 return TRUE;
5187
5188 /* ERRORS */
5189no_caps:
5190{
5191 GST_DEBUG_OBJECT(obj->dbg_obj, "no caps specified");
5192 return FALSE;
5193}
5194different_caps:
5195{
5196 /* different caps, we can't use this pool */
5197 GST_DEBUG_OBJECT(obj->dbg_obj, "pool has different caps");
5198 return FALSE;
5199}
5200}
5201
5202gboolean
5203gst_aml_v4l2_object_try_import(GstAmlV4l2Object *obj, GstBuffer *buffer)
5204{
5205 GstVideoMeta *vmeta;
5206 guint n_mem = gst_buffer_n_memory(buffer);
5207
5208 /* only import if requested */
5209 switch (obj->mode)
5210 {
5211 case GST_V4L2_IO_USERPTR:
5212 case GST_V4L2_IO_DMABUF_IMPORT:
5213 break;
5214 default:
5215 GST_DEBUG_OBJECT(obj->dbg_obj,
5216 "The io-mode does not enable importation");
5217 return FALSE;
5218 }
5219
5220 vmeta = gst_buffer_get_video_meta(buffer);
5221 if (!vmeta && obj->need_video_meta)
5222 {
5223 GST_DEBUG_OBJECT(obj->dbg_obj, "Downstream buffer uses standard "
5224 "stride/offset while the driver does not.");
5225 return FALSE;
5226 }
5227
5228 /* we need matching strides/offsets and size */
5229 if (vmeta)
5230 {
5231 guint p;
5232 gboolean need_fmt_update = FALSE;
5233
5234 if (vmeta->n_planes != GST_VIDEO_INFO_N_PLANES(&obj->info))
5235 {
5236 GST_WARNING_OBJECT(obj->dbg_obj,
5237 "Cannot import buffers with different number planes");
5238 return FALSE;
5239 }
5240
5241 for (p = 0; p < vmeta->n_planes; p++)
5242 {
5243 if (vmeta->stride[p] < obj->info.stride[p])
5244 {
5245 GST_DEBUG_OBJECT(obj->dbg_obj,
5246 "Not importing as remote stride %i is smaller then %i on plane %u",
5247 vmeta->stride[p], obj->info.stride[p], p);
5248 return FALSE;
5249 }
5250 else if (vmeta->stride[p] > obj->info.stride[p])
5251 {
5252 need_fmt_update = TRUE;
5253 }
5254
5255 if (vmeta->offset[p] < obj->info.offset[p])
5256 {
5257 GST_DEBUG_OBJECT(obj->dbg_obj,
5258 "Not importing as offset %" G_GSIZE_FORMAT
5259 " is smaller then %" G_GSIZE_FORMAT " on plane %u",
5260 vmeta->offset[p], obj->info.offset[p], p);
5261 return FALSE;
5262 }
5263 else if (vmeta->offset[p] > obj->info.offset[p])
5264 {
5265 need_fmt_update = TRUE;
5266 }
5267 }
5268
5269 if (need_fmt_update)
5270 {
5271 struct v4l2_format format;
5272 gint wanted_stride[GST_VIDEO_MAX_PLANES] = {
5273 0,
5274 };
5275
5276 format = obj->format;
5277
5278 /* update the current format with the stride we want to import from */
5279 if (V4L2_TYPE_IS_MULTIPLANAR(obj->type))
5280 {
5281 guint i;
5282
5283 GST_DEBUG_OBJECT(obj->dbg_obj, "Wanted strides:");
5284
5285 for (i = 0; i < obj->n_v4l2_planes; i++)
5286 {
5287 gint stride = vmeta->stride[i];
5288
5289 if (GST_VIDEO_FORMAT_INFO_IS_TILED(obj->info.finfo))
5290 stride = GST_VIDEO_TILE_X_TILES(stride) << GST_VIDEO_FORMAT_INFO_TILE_WS(obj->info.finfo);
5291
5292 format.fmt.pix_mp.plane_fmt[i].bytesperline = stride;
5293 wanted_stride[i] = stride;
5294 GST_DEBUG_OBJECT(obj->dbg_obj, " [%u] %i", i, wanted_stride[i]);
5295 }
5296 }
5297 else
5298 {
5299 gint stride = vmeta->stride[0];
5300
5301 GST_DEBUG_OBJECT(obj->dbg_obj, "Wanted stride: %i", stride);
5302
5303 if (GST_VIDEO_FORMAT_INFO_IS_TILED(obj->info.finfo))
5304 stride = GST_VIDEO_TILE_X_TILES(stride) << GST_VIDEO_FORMAT_INFO_TILE_WS(obj->info.finfo);
5305
5306 format.fmt.pix.bytesperline = stride;
5307 wanted_stride[0] = stride;
5308 }
5309
5310 if (obj->ioctl(obj->video_fd, VIDIOC_S_FMT, &format) < 0)
5311 {
5312 GST_WARNING_OBJECT(obj->dbg_obj,
5313 "Something went wrong trying to update current format: %s",
5314 g_strerror(errno));
5315 return FALSE;
5316 }
5317
5318 gst_aml_v4l2_object_save_format(obj, obj->fmtdesc, &format, &obj->info,
5319 &obj->align);
5320
5321 if (V4L2_TYPE_IS_MULTIPLANAR(obj->type))
5322 {
5323 guint i;
5324
5325 for (i = 0; i < obj->n_v4l2_planes; i++)
5326 {
5327 if (format.fmt.pix_mp.plane_fmt[i].bytesperline != wanted_stride[i])
5328 {
5329 GST_DEBUG_OBJECT(obj->dbg_obj,
5330 "[%i] Driver did not accept the new stride (wants %i, got %i)",
5331 i, format.fmt.pix_mp.plane_fmt[i].bytesperline,
5332 wanted_stride[i]);
5333 return FALSE;
5334 }
5335 }
5336 }
5337 else
5338 {
5339 if (format.fmt.pix.bytesperline != wanted_stride[0])
5340 {
5341 GST_DEBUG_OBJECT(obj->dbg_obj,
5342 "Driver did not accept the new stride (wants %i, got %i)",
5343 format.fmt.pix.bytesperline, wanted_stride[0]);
5344 return FALSE;
5345 }
5346 }
5347 }
5348 }
5349
5350 /* we can always import single memory buffer, but otherwise we need the same
5351 * amount of memory object. */
5352 if (n_mem != 1 && n_mem != obj->n_v4l2_planes)
5353 {
5354 GST_DEBUG_OBJECT(obj->dbg_obj, "Can only import %i memory, "
5355 "buffers contains %u memory",
5356 obj->n_v4l2_planes, n_mem);
5357 return FALSE;
5358 }
5359
5360 /* For DMABuf importation we need DMABuf of course */
5361 if (obj->mode == GST_V4L2_IO_DMABUF_IMPORT)
5362 {
5363 guint i;
5364
5365 for (i = 0; i < n_mem; i++)
5366 {
5367 GstMemory *mem = gst_buffer_peek_memory(buffer, i);
5368
5369 if (!gst_is_dmabuf_memory(mem))
5370 {
5371 GST_DEBUG_OBJECT(obj->dbg_obj, "Cannot import non-DMABuf memory.");
5372 return FALSE;
5373 }
5374 }
5375 }
5376
5377 /* for the remaining, only the kernel driver can tell */
5378 return TRUE;
5379}
5380
5381gboolean gst_aml_v4l2_set_drm_mode(GstAmlV4l2Object *v4l2object)
5382{
5383 /* On AmLogic, output obj use of GST_V4L2_IO_DMABUF_IMPORT implies secure memory */
5384 if (v4l2object->req_mode == GST_V4L2_IO_DMABUF_IMPORT)
5385 {
5386 int rc;
5387 struct v4l2_queryctrl queryctrl;
5388 struct v4l2_control control;
5389
5390 GstAmlV4l2VideoDec *self = (GstAmlV4l2VideoDec *)v4l2object->element;
5391 self->is_secure_path = TRUE;
5392
5393#define V4L2_CID_USER_AMLOGIC_BASE (V4L2_CID_USER_BASE + 0x1100)
5394#define AML_V4L2_SET_DRMMODE (V4L2_CID_USER_AMLOGIC_BASE + 0)
5395 memset(&queryctrl, 0, sizeof(queryctrl));
5396 queryctrl.id = AML_V4L2_SET_DRMMODE;
5397
5398 rc = v4l2object->ioctl(v4l2object->video_fd, VIDIOC_QUERYCTRL, &queryctrl);
5399 if (rc == 0)
5400 {
5401 if (!(queryctrl.flags & V4L2_CTRL_FLAG_DISABLED))
5402 {
5403 memset(&control, 0, sizeof(control));
5404 control.id = AML_V4L2_SET_DRMMODE;
5405 control.value = 1;
5406 rc = v4l2object->ioctl(v4l2object->video_fd, VIDIOC_S_CTRL, &control);
5407 if (rc != 0)
5408 {
5409 GST_ERROR_OBJECT(v4l2object, "AML_V4L2_SET_DRMMODE fail: rc %d", rc);
5410 return FALSE;
5411 }
5412 GST_DEBUG_OBJECT(v4l2object, "AML_V4L2_SET_DRMMODE set succ");
5413 return TRUE;
5414 }
5415 else
5416 {
5417 GST_DEBUG_OBJECT(v4l2object, "AML_V4L2_SET_DRMMODE is disabled");
5418 return TRUE;
5419 }
5420 }
5421 else
5422 {
5423 GST_ERROR_OBJECT(v4l2object, "VIDIOC_QUERYCTRL for AML_V4L2_SET_DRMMODE fail");
5424 return FALSE;
5425 }
5426 }
5427 else
5428 {
5429 GST_DEBUG_OBJECT(v4l2object, "req mode is not GST_V4L2_IO_DMABUF_IMPORT, DRM mode does not need to be configured");
5430 return TRUE;
5431 }
5432}