blob: 707ecd510422e34f8b6472168e4d5b339140e4d9 [file] [log] [blame]
xuesong.jiangae1548e2022-05-06 16:38:46 +08001/* GStreamer
2 * Copyright (C) 2022 <xuesong.jiang@amlogic.com>
3 *
4 * This library is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Library General Public
6 * License as published by the Free Software Foundation; either
7 * version 2 of the License, or (at your option) any later version.
8 *
9 * This library is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Library General Public License for more details.
13 *
14 * You should have received a copy of the GNU Library General Public
15 * License along with this library; if not, write to the
16 * Free Software Foundation, Inc., 51 Franklin Street, Suite 500,
17 * Boston, MA 02110-1335, USA.
18 */
19
20#ifdef HAVE_CONFIG_H
21#include <config.h>
22#endif
23
24#include <sys/stat.h>
25#include <fcntl.h>
26#include <errno.h>
27#include <string.h>
28#include <sys/mman.h>
29#include <sys/ioctl.h>
xuesong.jiange1a19662022-06-21 20:30:22 +080030#include <stdio.h>
zengliang.lic9f869d2023-02-15 08:32:32 +000031#include <sys/utsname.h>
xuesong.jiangae1548e2022-05-06 16:38:46 +080032
33#ifdef HAVE_GUDEV
34#include <gudev/gudev.h>
35#endif
36
37#include "ext/videodev2.h"
38#include "gstamlv4l2object.h"
39
40#include "gst/gst-i18n-plugin.h"
41
42#include <gst/video/video.h>
43#include <gst/allocators/gstdmabuf.h>
44
45GST_DEBUG_CATEGORY_EXTERN(aml_v4l2_debug);
46#define GST_CAT_DEFAULT aml_v4l2_debug
47
48#define DEFAULT_PROP_DEVICE_NAME NULL
49#define DEFAULT_PROP_DEVICE_FD -1
50#define DEFAULT_PROP_FLAGS 0
51#define DEFAULT_PROP_TV_NORM 0
52#define DEFAULT_PROP_IO_MODE GST_V4L2_IO_AUTO
53
xuesong.jiangc5dac0f2023-02-01 14:42:24 +080054#define ENCODED_BUFFER_SIZE (3 * 1024 * 1024)
xuesong.jiangae1548e2022-05-06 16:38:46 +080055
xuesong.jiange1a19662022-06-21 20:30:22 +080056#define V4L2_CONFIG_PARM_DECODE_CFGINFO (1 << 0)
57#define V4L2_CONFIG_PARM_DECODE_PSINFO (1 << 1)
58#define V4L2_CONFIG_PARM_DECODE_HDRINFO (1 << 2)
59#define V4L2_CONFIG_PARM_DECODE_CNTINFO (1 << 3)
60
zengliang.lic9f869d2023-02-15 08:32:32 +000061#define V4L2_CID_USER_AMLOGIC_BASE (V4L2_CID_USER_BASE + 0x1100)
62#define AML_V4L2_SET_DRMMODE (V4L2_CID_USER_AMLOGIC_BASE + 0)
63#define AML_V4L2_DEC_PARMS_CONFIG (V4L2_CID_USER_AMLOGIC_BASE + 7)
xuesong.jiang22a9b112023-05-24 09:01:59 +000064#define AML_V4L2_SET_STREAM_MODE (V4L2_CID_USER_AMLOGIC_BASE + 9)
zengliang.lic9f869d2023-02-15 08:32:32 +000065
xuesong.jiangae1548e2022-05-06 16:38:46 +080066enum
67{
68 PROP_0,
69 V4L2_STD_OBJECT_PROPS,
70};
71
72/*
73 * common format / caps utilities:
74 */
75typedef enum
76{
77 GST_V4L2_RAW = 1 << 0,
78 GST_V4L2_CODEC = 1 << 1,
79 GST_V4L2_TRANSPORT = 1 << 2,
80 GST_V4L2_NO_PARSE = 1 << 3,
81 GST_V4L2_ALL = 0xffff
82} GstAmlV4L2FormatFlags;
83
84typedef struct
85{
86 guint32 format;
87 gboolean dimensions;
88 GstAmlV4L2FormatFlags flags;
89} GstAmlV4L2FormatDesc;
90
91static const GstAmlV4L2FormatDesc gst_aml_v4l2_formats[] = {
92 /* RGB formats */
93 {V4L2_PIX_FMT_RGB332, TRUE, GST_V4L2_RAW},
94 {V4L2_PIX_FMT_ARGB555, TRUE, GST_V4L2_RAW},
95 {V4L2_PIX_FMT_XRGB555, TRUE, GST_V4L2_RAW},
96 {V4L2_PIX_FMT_ARGB555X, TRUE, GST_V4L2_RAW},
97 {V4L2_PIX_FMT_XRGB555X, TRUE, GST_V4L2_RAW},
98 {V4L2_PIX_FMT_RGB565, TRUE, GST_V4L2_RAW},
99 {V4L2_PIX_FMT_RGB565X, TRUE, GST_V4L2_RAW},
100 {V4L2_PIX_FMT_BGR666, TRUE, GST_V4L2_RAW},
101 {V4L2_PIX_FMT_BGR24, TRUE, GST_V4L2_RAW},
102 {V4L2_PIX_FMT_RGB24, TRUE, GST_V4L2_RAW},
103 {V4L2_PIX_FMT_ABGR32, TRUE, GST_V4L2_RAW},
104 {V4L2_PIX_FMT_XBGR32, TRUE, GST_V4L2_RAW},
105 {V4L2_PIX_FMT_ARGB32, TRUE, GST_V4L2_RAW},
106 {V4L2_PIX_FMT_XRGB32, TRUE, GST_V4L2_RAW},
107
108 /* Deprecated Packed RGB Image Formats (alpha ambiguity) */
109 {V4L2_PIX_FMT_RGB444, TRUE, GST_V4L2_RAW},
110 {V4L2_PIX_FMT_RGB555, TRUE, GST_V4L2_RAW},
111 {V4L2_PIX_FMT_RGB555X, TRUE, GST_V4L2_RAW},
112 {V4L2_PIX_FMT_BGR32, TRUE, GST_V4L2_RAW},
113 {V4L2_PIX_FMT_RGB32, TRUE, GST_V4L2_RAW},
114
115 /* Grey formats */
116 {V4L2_PIX_FMT_GREY, TRUE, GST_V4L2_RAW},
117 {V4L2_PIX_FMT_Y4, TRUE, GST_V4L2_RAW},
118 {V4L2_PIX_FMT_Y6, TRUE, GST_V4L2_RAW},
119 {V4L2_PIX_FMT_Y10, TRUE, GST_V4L2_RAW},
120 {V4L2_PIX_FMT_Y12, TRUE, GST_V4L2_RAW},
121 {V4L2_PIX_FMT_Y16, TRUE, GST_V4L2_RAW},
122 {V4L2_PIX_FMT_Y16_BE, TRUE, GST_V4L2_RAW},
123 {V4L2_PIX_FMT_Y10BPACK, TRUE, GST_V4L2_RAW},
124
125 /* Palette formats */
126 {V4L2_PIX_FMT_PAL8, TRUE, GST_V4L2_RAW},
127
128 /* Chrominance formats */
129 {V4L2_PIX_FMT_UV8, TRUE, GST_V4L2_RAW},
130
131 /* Luminance+Chrominance formats */
132 {V4L2_PIX_FMT_YVU410, TRUE, GST_V4L2_RAW},
133 {V4L2_PIX_FMT_YVU420, TRUE, GST_V4L2_RAW},
134 {V4L2_PIX_FMT_YVU420M, TRUE, GST_V4L2_RAW},
135 {V4L2_PIX_FMT_YUYV, TRUE, GST_V4L2_RAW},
136 {V4L2_PIX_FMT_YYUV, TRUE, GST_V4L2_RAW},
137 {V4L2_PIX_FMT_YVYU, TRUE, GST_V4L2_RAW},
138 {V4L2_PIX_FMT_UYVY, TRUE, GST_V4L2_RAW},
139 {V4L2_PIX_FMT_VYUY, TRUE, GST_V4L2_RAW},
140 {V4L2_PIX_FMT_YUV422P, TRUE, GST_V4L2_RAW},
141 {V4L2_PIX_FMT_YUV411P, TRUE, GST_V4L2_RAW},
142 {V4L2_PIX_FMT_Y41P, TRUE, GST_V4L2_RAW},
143 {V4L2_PIX_FMT_YUV444, TRUE, GST_V4L2_RAW},
144 {V4L2_PIX_FMT_YUV555, TRUE, GST_V4L2_RAW},
145 {V4L2_PIX_FMT_YUV565, TRUE, GST_V4L2_RAW},
146 {V4L2_PIX_FMT_YUV32, TRUE, GST_V4L2_RAW},
147 {V4L2_PIX_FMT_YUV410, TRUE, GST_V4L2_RAW},
148 {V4L2_PIX_FMT_YUV420, TRUE, GST_V4L2_RAW},
149 {V4L2_PIX_FMT_YUV420M, TRUE, GST_V4L2_RAW},
150 {V4L2_PIX_FMT_HI240, TRUE, GST_V4L2_RAW},
151 {V4L2_PIX_FMT_HM12, TRUE, GST_V4L2_RAW},
152 {V4L2_PIX_FMT_M420, TRUE, GST_V4L2_RAW},
153
154 /* two planes -- one Y, one Cr + Cb interleaved */
155 {V4L2_PIX_FMT_NV12, TRUE, GST_V4L2_RAW},
156 {V4L2_PIX_FMT_NV12M, TRUE, GST_V4L2_RAW},
157 {V4L2_PIX_FMT_NV12MT, TRUE, GST_V4L2_RAW},
158 {V4L2_PIX_FMT_NV12MT_16X16, TRUE, GST_V4L2_RAW},
159 {V4L2_PIX_FMT_NV21, TRUE, GST_V4L2_RAW},
160 {V4L2_PIX_FMT_NV21M, TRUE, GST_V4L2_RAW},
161 {V4L2_PIX_FMT_NV16, TRUE, GST_V4L2_RAW},
162 {V4L2_PIX_FMT_NV16M, TRUE, GST_V4L2_RAW},
163 {V4L2_PIX_FMT_NV61, TRUE, GST_V4L2_RAW},
164 {V4L2_PIX_FMT_NV61M, TRUE, GST_V4L2_RAW},
165 {V4L2_PIX_FMT_NV24, TRUE, GST_V4L2_RAW},
166 {V4L2_PIX_FMT_NV42, TRUE, GST_V4L2_RAW},
167
168 /* Bayer formats - see http://www.siliconimaging.com/RGB%20Bayer.htm */
169 {V4L2_PIX_FMT_SBGGR8, TRUE, GST_V4L2_RAW},
170 {V4L2_PIX_FMT_SGBRG8, TRUE, GST_V4L2_RAW},
171 {V4L2_PIX_FMT_SGRBG8, TRUE, GST_V4L2_RAW},
172 {V4L2_PIX_FMT_SRGGB8, TRUE, GST_V4L2_RAW},
173
174 /* compressed formats */
175 {V4L2_PIX_FMT_MJPEG, FALSE, GST_V4L2_CODEC},
176 {V4L2_PIX_FMT_JPEG, FALSE, GST_V4L2_CODEC},
177 {V4L2_PIX_FMT_PJPG, FALSE, GST_V4L2_CODEC},
178 {V4L2_PIX_FMT_DV, FALSE, GST_V4L2_TRANSPORT},
179 {V4L2_PIX_FMT_MPEG, FALSE, GST_V4L2_TRANSPORT},
180 {V4L2_PIX_FMT_FWHT, FALSE, GST_V4L2_CODEC},
xuesong.jiang26fa9792023-09-21 11:34:40 +0000181 {V4L2_PIX_FMT_H264, FALSE, GST_V4L2_CODEC | GST_V4L2_NO_PARSE},
182 {V4L2_PIX_FMT_H264_NO_SC, FALSE, GST_V4L2_CODEC | GST_V4L2_NO_PARSE},
xuesong.jiangae1548e2022-05-06 16:38:46 +0800183 {V4L2_PIX_FMT_H264_MVC, FALSE, GST_V4L2_CODEC},
184 {V4L2_PIX_FMT_HEVC, FALSE, GST_V4L2_CODEC},
185 {V4L2_PIX_FMT_H263, FALSE, GST_V4L2_CODEC},
186 {V4L2_PIX_FMT_MPEG1, FALSE, GST_V4L2_CODEC},
187 {V4L2_PIX_FMT_MPEG2, FALSE, GST_V4L2_CODEC},
188 {V4L2_PIX_FMT_MPEG4, FALSE, GST_V4L2_CODEC},
189 {V4L2_PIX_FMT_XVID, FALSE, GST_V4L2_CODEC},
190 {V4L2_PIX_FMT_VC1_ANNEX_G, FALSE, GST_V4L2_CODEC},
191 {V4L2_PIX_FMT_VC1_ANNEX_L, FALSE, GST_V4L2_CODEC},
192 {V4L2_PIX_FMT_VP8, FALSE, GST_V4L2_CODEC | GST_V4L2_NO_PARSE},
193 {V4L2_PIX_FMT_VP9, FALSE, GST_V4L2_CODEC | GST_V4L2_NO_PARSE},
194 {V4L2_PIX_FMT_AV1, FALSE, GST_V4L2_CODEC | GST_V4L2_NO_PARSE},
195
196 /* Vendor-specific formats */
197 {V4L2_PIX_FMT_WNVA, TRUE, GST_V4L2_CODEC},
198 {V4L2_PIX_FMT_SN9C10X, TRUE, GST_V4L2_CODEC},
199 {V4L2_PIX_FMT_PWC1, TRUE, GST_V4L2_CODEC},
200 {V4L2_PIX_FMT_PWC2, TRUE, GST_V4L2_CODEC},
201};
202
203#define GST_AML_V4L2_FORMAT_COUNT (G_N_ELEMENTS(gst_aml_v4l2_formats))
204
205static GSList *gst_aml_v4l2_object_get_format_list(GstAmlV4l2Object *v4l2object);
xuesong.jiang22a9b112023-05-24 09:01:59 +0000206static gboolean gst_aml_v4l2_set_control(GstAmlV4l2Object *v4l2object, guint ctl);
xuesong.jiangae1548e2022-05-06 16:38:46 +0800207
208#define GST_TYPE_AML_V4L2_DEVICE_FLAGS (gst_aml_v4l2_device_get_type())
209static GType
210gst_aml_v4l2_device_get_type(void)
211{
212 static GType v4l2_device_type = 0;
213
214 if (v4l2_device_type == 0)
215 {
216 static const GFlagsValue values[] = {
217 {V4L2_CAP_VIDEO_CAPTURE, "Device supports video capture", "capture"},
218 {V4L2_CAP_VIDEO_OUTPUT, "Device supports video playback", "output"},
219 {V4L2_CAP_VIDEO_OVERLAY, "Device supports video overlay", "overlay"},
220
221 {V4L2_CAP_VBI_CAPTURE, "Device supports the VBI capture", "vbi-capture"},
222 {V4L2_CAP_VBI_OUTPUT, "Device supports the VBI output", "vbi-output"},
223
224 {V4L2_CAP_TUNER, "Device has a tuner or modulator", "tuner"},
225 {V4L2_CAP_AUDIO, "Device has audio inputs or outputs", "audio"},
226
227 {0, NULL, NULL}};
228
229 v4l2_device_type =
230 g_flags_register_static("GstAmlV4l2DeviceTypeFlags", values);
231 }
232
233 return v4l2_device_type;
234}
235
236GType gst_aml_v4l2_io_mode_get_type(void)
237{
238 static GType v4l2_io_mode = 0;
239
240 if (!v4l2_io_mode)
241 {
242 static const GEnumValue io_modes[] = {
243 {GST_V4L2_IO_AUTO, "GST_V4L2_IO_AUTO", "auto"},
244 {GST_V4L2_IO_RW, "GST_V4L2_IO_RW", "rw"},
245 {GST_V4L2_IO_MMAP, "GST_V4L2_IO_MMAP", "mmap"},
246 {GST_V4L2_IO_USERPTR, "GST_V4L2_IO_USERPTR", "userptr"},
247 {GST_V4L2_IO_DMABUF, "GST_V4L2_IO_DMABUF", "dmabuf"},
248 {GST_V4L2_IO_DMABUF_IMPORT, "GST_V4L2_IO_DMABUF_IMPORT",
249 "dmabuf-import"},
250
251 {0, NULL, NULL}};
252 v4l2_io_mode = g_enum_register_static("GstAmlV4l2IOMode", io_modes);
253 }
254 return v4l2_io_mode;
255}
256
257void gst_aml_v4l2_object_install_properties_helper(GObjectClass *gobject_class,
258 const char *default_device)
259{
260 g_object_class_install_property(gobject_class, PROP_DEVICE,
261 g_param_spec_string("device", "Device", "Device location",
262 default_device, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
263 g_object_class_install_property(gobject_class, PROP_DEVICE_NAME,
264 g_param_spec_string("device-name", "Device name",
265 "Name of the device", DEFAULT_PROP_DEVICE_NAME,
266 G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
267 g_object_class_install_property(gobject_class, PROP_DEVICE_FD,
268 g_param_spec_int("device-fd", "File descriptor",
269 "File descriptor of the device", -1, G_MAXINT, DEFAULT_PROP_DEVICE_FD,
270 G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
271 g_object_class_install_property(gobject_class, PROP_FLAGS,
272 g_param_spec_flags("flags", "Flags", "Device type flags",
273 GST_TYPE_AML_V4L2_DEVICE_FLAGS, DEFAULT_PROP_FLAGS,
274 G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
275
276 /**
277 * GstV4l2Src:brightness:
278 *
279 * Picture brightness, or more precisely, the black level
280 */
281 g_object_class_install_property(gobject_class, PROP_BRIGHTNESS,
282 g_param_spec_int("brightness", "Brightness",
283 "Picture brightness, or more precisely, the black level", G_MININT,
284 G_MAXINT, 0,
285 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
286 /**
287 * GstV4l2Src:contrast:
288 *
289 * Picture contrast or luma gain
290 */
291 g_object_class_install_property(gobject_class, PROP_CONTRAST,
292 g_param_spec_int("contrast", "Contrast",
293 "Picture contrast or luma gain", G_MININT,
294 G_MAXINT, 0,
295 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
296 /**
297 * GstV4l2Src:saturation:
298 *
299 * Picture color saturation or chroma gain
300 */
301 g_object_class_install_property(gobject_class, PROP_SATURATION,
302 g_param_spec_int("saturation", "Saturation",
303 "Picture color saturation or chroma gain", G_MININT,
304 G_MAXINT, 0,
305 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
306 /**
307 * GstV4l2Src:hue:
308 *
309 * Hue or color balance
310 */
311 g_object_class_install_property(gobject_class, PROP_HUE,
312 g_param_spec_int("hue", "Hue",
313 "Hue or color balance", G_MININT,
314 G_MAXINT, 0,
315 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
316
317 /**
318 * GstV4l2Src:io-mode:
319 *
320 * IO Mode
321 */
322 g_object_class_install_property(gobject_class, PROP_IO_MODE,
323 g_param_spec_enum("io-mode", "IO mode",
324 "I/O mode",
325 GST_TYPE_AML_V4L2_IO_MODE, DEFAULT_PROP_IO_MODE,
326 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
327
328 /**
329 * GstV4l2Src:extra-controls:
330 *
331 * Additional v4l2 controls for the device. The controls are identified
332 * by the control name (lowercase with '_' for any non-alphanumeric
333 * characters).
334 *
335 * Since: 1.2
336 */
337 g_object_class_install_property(gobject_class, PROP_EXTRA_CONTROLS,
338 g_param_spec_boxed("extra-controls", "Extra Controls",
339 "Extra v4l2 controls (CIDs) for the device",
340 GST_TYPE_STRUCTURE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
341
342 /**
343 * GstV4l2Src:pixel-aspect-ratio:
344 *
345 * The pixel aspect ratio of the device. This overwrites the pixel aspect
346 * ratio queried from the device.
347 *
348 * Since: 1.2
349 */
350 g_object_class_install_property(gobject_class, PROP_PIXEL_ASPECT_RATIO,
351 g_param_spec_string("pixel-aspect-ratio", "Pixel Aspect Ratio",
352 "Overwrite the pixel aspect ratio of the device", "1/1",
353 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
354
355 /**
356 * GstV4l2Src:force-aspect-ratio:
357 *
358 * When enabled, the pixel aspect ratio queried from the device or set
359 * with the pixel-aspect-ratio property will be enforced.
360 *
361 * Since: 1.2
362 */
363 g_object_class_install_property(gobject_class, PROP_FORCE_ASPECT_RATIO,
364 g_param_spec_boolean("force-aspect-ratio", "Force aspect ratio",
365 "When enabled, the pixel aspect ratio will be enforced", TRUE,
366 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
367}
368
369void gst_aml_v4l2_object_install_m2m_properties_helper(GObjectClass *gobject_class)
370{
371 g_object_class_install_property(gobject_class, PROP_DEVICE,
372 g_param_spec_string("device", "Device", "Device location",
373 NULL, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
374
375 g_object_class_install_property(gobject_class, PROP_DEVICE_NAME,
376 g_param_spec_string("device-name", "Device name",
377 "Name of the device", DEFAULT_PROP_DEVICE_NAME,
378 G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
379
380 g_object_class_install_property(gobject_class, PROP_DEVICE_FD,
381 g_param_spec_int("device-fd", "File descriptor",
382 "File descriptor of the device", -1, G_MAXINT, DEFAULT_PROP_DEVICE_FD,
383 G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
384
385 g_object_class_install_property(gobject_class, PROP_OUTPUT_IO_MODE,
386 g_param_spec_enum("output-io-mode", "Output IO mode",
387 "Output side I/O mode (matches sink pad)",
388 GST_TYPE_AML_V4L2_IO_MODE, DEFAULT_PROP_IO_MODE,
389 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
390
391 g_object_class_install_property(gobject_class, PROP_CAPTURE_IO_MODE,
392 g_param_spec_enum("capture-io-mode", "Capture IO mode",
393 "Capture I/O mode (matches src pad)",
394 GST_TYPE_AML_V4L2_IO_MODE, DEFAULT_PROP_IO_MODE,
395 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
396
397 g_object_class_install_property(gobject_class, PROP_EXTRA_CONTROLS,
398 g_param_spec_boxed("extra-controls", "Extra Controls",
399 "Extra v4l2 controls (CIDs) for the device",
400 GST_TYPE_STRUCTURE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
401
402 g_object_class_install_property(gobject_class, PROP_DUMP_FRAME_LOCATION,
403 g_param_spec_string("dump-frame-location", "dump frame location",
404 "Location of the file to write decoder frames", NULL,
405 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
xuesong.jiang22a9b112023-05-24 09:01:59 +0000406
407 g_object_class_install_property(gobject_class, PROP_STREAM_MODE,
408 g_param_spec_boolean("stream-mode", "Configure v4l2 stream mode",
409 "TRUE for stream mode, FALSE for frame mode",
410 FALSE,
411 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
xuesong.jiangae1548e2022-05-06 16:38:46 +0800412}
413
414/* Support for 32bit off_t, this wrapper is casting off_t to gint64 */
415#ifdef HAVE_LIBV4L2
416#if SIZEOF_OFF_T < 8
417
418static gpointer
419v4l2_mmap_wrapper(gpointer start, gsize length, gint prot, gint flags, gint fd,
420 off_t offset)
421{
422 return v4l2_mmap(start, length, prot, flags, fd, (gint64)offset);
423}
424
425#define v4l2_mmap v4l2_mmap_wrapper
426
427#endif /* SIZEOF_OFF_T < 8 */
428#endif /* HAVE_LIBV4L2 */
429
430GstAmlV4l2Object *
431gst_aml_v4l2_object_new(GstElement *element,
432 GstObject *debug_object,
433 enum v4l2_buf_type type,
434 const char *default_device,
435 GstAmlV4l2GetInOutFunction get_in_out_func,
436 GstAmlV4l2SetInOutFunction set_in_out_func,
437 GstAmlV4l2UpdateFpsFunction update_fps_func)
438{
439 GstAmlV4l2Object *v4l2object;
440
441 /*
442 * some default values
443 */
444 v4l2object = g_new0(GstAmlV4l2Object, 1);
445
446 if ((V4L2_BUF_TYPE_VIDEO_CAPTURE == type || V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE == type))
447 {
448 const char *default_mode = getenv("GST_DEFAULT_V4L2_BUF_MODE");
449 GST_DEBUG("amlmodbuf GST_AML_DEFAULT_V4L2_BUF_MODE:%s", default_mode);
fei.denge9458472023-04-18 02:05:48 +0000450 //v4l2object->req_mode = GST_V4L2_IO_DMABUF_IMPORT;
xuesong.jiangae1548e2022-05-06 16:38:46 +0800451 if (default_mode)
452 {
453 if (strcmp(default_mode, "DMA_BUF_IMPORT") == 0)
454 v4l2object->req_mode = GST_V4L2_IO_DMABUF_IMPORT;
455 else if (strcmp(default_mode, "DMA_BUF") == 0)
456 v4l2object->req_mode = GST_V4L2_IO_DMABUF;
457 GST_DEBUG("amlmodbuf set default buf default_mode:%d", v4l2object->req_mode);
458 }
459 }
460
461 v4l2object->type = type;
462 v4l2object->formats = NULL;
463
464 v4l2object->element = element;
465 v4l2object->dbg_obj = debug_object;
466 v4l2object->get_in_out_func = get_in_out_func;
467 v4l2object->set_in_out_func = set_in_out_func;
468 v4l2object->update_fps_func = update_fps_func;
469
470 v4l2object->video_fd = -1;
471 v4l2object->active = FALSE;
472 v4l2object->videodev = g_strdup(default_device);
473
474 v4l2object->norms = NULL;
475 v4l2object->channels = NULL;
476 v4l2object->colors = NULL;
477
478 v4l2object->keep_aspect = TRUE;
xuesong.jiang22a9b112023-05-24 09:01:59 +0000479 v4l2object->stream_mode = FALSE;
xuesong.jiangae1548e2022-05-06 16:38:46 +0800480
481 v4l2object->n_v4l2_planes = 0;
482
483 v4l2object->no_initial_format = FALSE;
484
485 /* We now disable libv4l2 by default, but have an env to enable it. */
486#ifdef HAVE_LIBV4L2
487 if (g_getenv("GST_V4L2_USE_LIBV4L2"))
488 {
489 v4l2object->fd_open = v4l2_fd_open;
490 v4l2object->close = v4l2_close;
491 v4l2object->dup = v4l2_dup;
492 v4l2object->ioctl = v4l2_ioctl;
493 v4l2object->read = v4l2_read;
494 v4l2object->mmap = v4l2_mmap;
495 v4l2object->munmap = v4l2_munmap;
496 }
497 else
498#endif
499 {
500 v4l2object->fd_open = NULL;
501 v4l2object->close = close;
502 v4l2object->dup = dup;
503 v4l2object->ioctl = ioctl;
504 v4l2object->read = read;
505 v4l2object->mmap = mmap;
506 v4l2object->munmap = munmap;
507 }
508 v4l2object->poll = gst_poll_new(TRUE);
509 v4l2object->can_wait_event = FALSE;
510 v4l2object->can_poll_device = TRUE;
511 v4l2object->tvin_port = -1;
512
513 v4l2object->dumpframefile = NULL;
514
xuesong.jiangc5dac0f2023-02-01 14:42:24 +0800515 /* jxsdbg resolution switching */
516 v4l2object->old_other_pool = NULL;
517 v4l2object->old_old_other_pool = NULL;
518 v4l2object->outstanding_buf_num = 0;
xuesong.jiangae1548e2022-05-06 16:38:46 +0800519 return v4l2object;
520}
521
522static gboolean gst_aml_v4l2_object_clear_format_list(GstAmlV4l2Object *v4l2object);
523
524void gst_aml_v4l2_object_destroy(GstAmlV4l2Object *v4l2object)
525{
526 g_return_if_fail(v4l2object != NULL);
527
528 g_free(v4l2object->videodev);
529
530 g_free(v4l2object->channel);
531
532 if (v4l2object->formats)
533 {
534 gst_aml_v4l2_object_clear_format_list(v4l2object);
535 }
536
537 if (v4l2object->probed_caps)
538 {
539 gst_caps_unref(v4l2object->probed_caps);
540 }
541
542 if (v4l2object->extra_controls)
543 {
544 gst_structure_free(v4l2object->extra_controls);
545 }
546
547 gst_poll_free(v4l2object->poll);
548
549 g_free(v4l2object->dumpframefile);
550
xuesong.jiangc5dac0f2023-02-01 14:42:24 +0800551 /* jxsdbg resolution switching */
552 if (v4l2object->old_other_pool)
553 {
554 gst_object_unref(v4l2object->old_other_pool);
555 v4l2object->old_other_pool = NULL;
556 }
557 if (v4l2object->old_old_other_pool)
558 {
559 gst_object_unref(v4l2object->old_old_other_pool);
560 v4l2object->old_old_other_pool = NULL;
561 }
562 v4l2object->outstanding_buf_num = 0;
563
xuesong.jiangae1548e2022-05-06 16:38:46 +0800564 g_free(v4l2object);
565}
566
567static gboolean
568gst_aml_v4l2_object_clear_format_list(GstAmlV4l2Object *v4l2object)
569{
570 g_slist_foreach(v4l2object->formats, (GFunc)g_free, NULL);
571 g_slist_free(v4l2object->formats);
572 v4l2object->formats = NULL;
573
574 return TRUE;
575}
576
577static gint
578gst_aml_v4l2_object_prop_to_cid(guint prop_id)
579{
580 gint cid = -1;
581
582 switch (prop_id)
583 {
584 case PROP_BRIGHTNESS:
585 cid = V4L2_CID_BRIGHTNESS;
586 break;
587 case PROP_CONTRAST:
588 cid = V4L2_CID_CONTRAST;
589 break;
590 case PROP_SATURATION:
591 cid = V4L2_CID_SATURATION;
592 break;
593 case PROP_HUE:
594 cid = V4L2_CID_HUE;
595 break;
596 default:
597 GST_WARNING("unmapped property id: %d", prop_id);
598 }
599 return cid;
600}
601
602gboolean
603gst_aml_v4l2_object_set_property_helper(GstAmlV4l2Object *v4l2object,
604 guint prop_id, const GValue *value, GParamSpec *pspec)
605{
606 switch (prop_id)
607 {
608 case PROP_DEVICE:
609 g_free(v4l2object->videodev);
610 v4l2object->videodev = g_value_dup_string(value);
611 break;
612 case PROP_BRIGHTNESS:
613 case PROP_CONTRAST:
614 case PROP_SATURATION:
615 case PROP_HUE:
616 {
617 gint cid = gst_aml_v4l2_object_prop_to_cid(prop_id);
618
619 if (cid != -1)
620 {
621 if (GST_AML_V4L2_IS_OPEN(v4l2object))
622 {
623 gst_aml_v4l2_set_attribute(v4l2object, cid, g_value_get_int(value));
624 }
625 }
626 return TRUE;
627 }
628 break;
629 case PROP_IO_MODE:
630 v4l2object->req_mode = g_value_get_enum(value);
631 break;
632 case PROP_CAPTURE_IO_MODE:
633 g_return_val_if_fail(!V4L2_TYPE_IS_OUTPUT(v4l2object->type), FALSE);
634 v4l2object->req_mode = g_value_get_enum(value);
635 break;
636 case PROP_OUTPUT_IO_MODE:
637 g_return_val_if_fail(V4L2_TYPE_IS_OUTPUT(v4l2object->type), FALSE);
638 v4l2object->req_mode = g_value_get_enum(value);
639 break;
640 case PROP_EXTRA_CONTROLS:
641 {
642 const GstStructure *s = gst_value_get_structure(value);
643
644 if (v4l2object->extra_controls)
645 gst_structure_free(v4l2object->extra_controls);
646
647 v4l2object->extra_controls = s ? gst_structure_copy(s) : NULL;
648 if (GST_AML_V4L2_IS_OPEN(v4l2object))
649 gst_aml_v4l2_set_controls(v4l2object, v4l2object->extra_controls);
650 break;
651 }
652 case PROP_PIXEL_ASPECT_RATIO:
653 if (v4l2object->par)
654 {
655 g_value_unset(v4l2object->par);
656 g_free(v4l2object->par);
657 }
658 v4l2object->par = g_new0(GValue, 1);
659 g_value_init(v4l2object->par, GST_TYPE_FRACTION);
660 if (!g_value_transform(value, v4l2object->par))
661 {
662 g_warning("Could not transform string to aspect ratio");
663 gst_value_set_fraction(v4l2object->par, 1, 1);
664 }
665
666 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "set PAR to %d/%d",
667 gst_value_get_fraction_numerator(v4l2object->par),
668 gst_value_get_fraction_denominator(v4l2object->par));
669 break;
670 case PROP_FORCE_ASPECT_RATIO:
671 v4l2object->keep_aspect = g_value_get_boolean(value);
672 break;
673 case PROP_DUMP_FRAME_LOCATION:
674 g_free(v4l2object->dumpframefile);
675 v4l2object->dumpframefile = g_value_dup_string(value);
676 break;
xuesong.jiang22a9b112023-05-24 09:01:59 +0000677 case PROP_STREAM_MODE:
678 v4l2object->stream_mode = g_value_get_boolean(value);
679 break;
xuesong.jiangae1548e2022-05-06 16:38:46 +0800680 default:
681 return FALSE;
682 break;
683 }
684 return TRUE;
685}
686
687gboolean
688gst_aml_v4l2_object_get_property_helper(GstAmlV4l2Object *v4l2object,
689 guint prop_id, GValue *value, GParamSpec *pspec)
690{
691 switch (prop_id)
692 {
693 case PROP_DEVICE:
694 g_value_set_string(value, v4l2object->videodev);
695 break;
696 case PROP_DEVICE_NAME:
697 {
698 const guchar *name = NULL;
699
700 if (GST_AML_V4L2_IS_OPEN(v4l2object))
701 name = v4l2object->vcap.card;
702
703 g_value_set_string(value, (gchar *)name);
704 break;
705 }
706 case PROP_DEVICE_FD:
707 {
708 if (GST_AML_V4L2_IS_OPEN(v4l2object))
709 g_value_set_int(value, v4l2object->video_fd);
710 else
711 g_value_set_int(value, DEFAULT_PROP_DEVICE_FD);
712 break;
713 }
714 case PROP_FLAGS:
715 {
716 guint flags = 0;
717
718 if (GST_AML_V4L2_IS_OPEN(v4l2object))
719 {
720 flags |= v4l2object->device_caps &
721 (V4L2_CAP_VIDEO_CAPTURE |
722 V4L2_CAP_VIDEO_OUTPUT |
723 V4L2_CAP_VIDEO_OVERLAY |
724 V4L2_CAP_VBI_CAPTURE |
725 V4L2_CAP_VBI_OUTPUT | V4L2_CAP_TUNER | V4L2_CAP_AUDIO);
726
727 if (v4l2object->device_caps & V4L2_CAP_VIDEO_CAPTURE_MPLANE)
728 flags |= V4L2_CAP_VIDEO_CAPTURE;
729
730 if (v4l2object->device_caps & V4L2_CAP_VIDEO_OUTPUT_MPLANE)
731 flags |= V4L2_CAP_VIDEO_OUTPUT;
732 }
733 g_value_set_flags(value, flags);
734 break;
735 }
736 case PROP_BRIGHTNESS:
737 case PROP_CONTRAST:
738 case PROP_SATURATION:
739 case PROP_HUE:
740 {
741 gint cid = gst_aml_v4l2_object_prop_to_cid(prop_id);
742
743 if (cid != -1)
744 {
745 if (GST_AML_V4L2_IS_OPEN(v4l2object))
746 {
747 gint v;
748 if (gst_aml_v4l2_get_attribute(v4l2object, cid, &v))
749 {
750 g_value_set_int(value, v);
751 }
752 }
753 }
754 return TRUE;
755 }
756 break;
757 case PROP_IO_MODE:
758 g_value_set_enum(value, v4l2object->req_mode);
759 break;
760 case PROP_CAPTURE_IO_MODE:
761 g_return_val_if_fail(!V4L2_TYPE_IS_OUTPUT(v4l2object->type), FALSE);
762 g_value_set_enum(value, v4l2object->req_mode);
763 break;
764 case PROP_OUTPUT_IO_MODE:
765 g_return_val_if_fail(V4L2_TYPE_IS_OUTPUT(v4l2object->type), FALSE);
766 g_value_set_enum(value, v4l2object->req_mode);
767 break;
768 case PROP_EXTRA_CONTROLS:
769 gst_value_set_structure(value, v4l2object->extra_controls);
770 break;
771 case PROP_PIXEL_ASPECT_RATIO:
772 if (v4l2object->par)
773 g_value_transform(v4l2object->par, value);
774 break;
775 case PROP_FORCE_ASPECT_RATIO:
776 g_value_set_boolean(value, v4l2object->keep_aspect);
777 break;
778 case PROP_DUMP_FRAME_LOCATION:
779 g_value_set_string(value, v4l2object->dumpframefile);
780 break;
xuesong.jiang22a9b112023-05-24 09:01:59 +0000781 case PROP_STREAM_MODE:
782 g_value_set_boolean(value, v4l2object->stream_mode);
783 break;
xuesong.jiangae1548e2022-05-06 16:38:46 +0800784 default:
785 return FALSE;
786 break;
787 }
788 return TRUE;
789}
790
791static void
792gst_aml_v4l2_get_driver_min_buffers(GstAmlV4l2Object *v4l2object)
793{
794 struct v4l2_control control = {
795 0,
796 };
797
798 g_return_if_fail(GST_AML_V4L2_IS_OPEN(v4l2object));
799
800 if (V4L2_TYPE_IS_OUTPUT(v4l2object->type))
801 control.id = V4L2_CID_MIN_BUFFERS_FOR_OUTPUT;
802 else
803 control.id = V4L2_CID_MIN_BUFFERS_FOR_CAPTURE;
804
805 if (v4l2object->ioctl(v4l2object->video_fd, VIDIOC_G_CTRL, &control) == 0)
806 {
807 GST_DEBUG_OBJECT(v4l2object->dbg_obj,
808 "driver requires a minimum of %d buffers", control.value);
809 v4l2object->min_buffers = control.value;
810 }
811 else
812 {
813 v4l2object->min_buffers = 0;
814 }
815}
816
817gboolean
818gst_aml_v4l2_object_open(GstAmlV4l2Object *v4l2object)
819{
820 if (!gst_aml_v4l2_open(v4l2object))
821 return FALSE;
822
823 return TRUE;
824}
825
826gboolean
827gst_aml_v4l2_object_open_shared(GstAmlV4l2Object *v4l2object, GstAmlV4l2Object *other)
828{
829 gboolean ret;
830
831 ret = gst_aml_v4l2_dup(v4l2object, other);
832
833 if (ret && !V4L2_TYPE_IS_OUTPUT(v4l2object->type))
834 {
835 gst_poll_fd_init(&v4l2object->pollfd);
836 v4l2object->pollfd.fd = v4l2object->video_fd;
837 gst_poll_add_fd(v4l2object->poll, &v4l2object->pollfd);
838 /* used for dequeue event */
839 gst_poll_fd_ctl_read(v4l2object->poll, &v4l2object->pollfd, TRUE);
840 gst_poll_fd_ctl_pri(v4l2object->poll, &v4l2object->pollfd, TRUE);
841 }
842
843 return ret;
844}
845
846gboolean
847gst_aml_v4l2_object_close(GstAmlV4l2Object *v4l2object)
848{
849 if (!gst_aml_v4l2_close(v4l2object))
850 return FALSE;
851
852 gst_caps_replace(&v4l2object->probed_caps, NULL);
853
854 /* reset our copy of the device caps */
855 v4l2object->device_caps = 0;
856
857 if (v4l2object->formats)
858 {
859 gst_aml_v4l2_object_clear_format_list(v4l2object);
860 }
861
862 if (v4l2object->par)
863 {
864 g_value_unset(v4l2object->par);
865 g_free(v4l2object->par);
866 v4l2object->par = NULL;
867 }
868
869 if (v4l2object->channel)
870 {
871 g_free(v4l2object->channel);
872 v4l2object->channel = NULL;
873 }
874
875 return TRUE;
876}
877
878static struct v4l2_fmtdesc *
879gst_aml_v4l2_object_get_format_from_fourcc(GstAmlV4l2Object *v4l2object,
880 guint32 fourcc)
881{
882 struct v4l2_fmtdesc *fmt;
883 GSList *walk;
884
885 if (fourcc == 0)
886 return NULL;
887
888 walk = gst_aml_v4l2_object_get_format_list(v4l2object);
889 while (walk)
890 {
891 fmt = (struct v4l2_fmtdesc *)walk->data;
892 if (fmt->pixelformat == fourcc)
893 return fmt;
894 /* special case for jpeg */
895 if (fmt->pixelformat == V4L2_PIX_FMT_MJPEG ||
896 fmt->pixelformat == V4L2_PIX_FMT_JPEG ||
897 fmt->pixelformat == V4L2_PIX_FMT_PJPG)
898 {
899 if (fourcc == V4L2_PIX_FMT_JPEG || fourcc == V4L2_PIX_FMT_MJPEG ||
900 fourcc == V4L2_PIX_FMT_PJPG)
901 {
902 return fmt;
903 }
904 }
905 walk = g_slist_next(walk);
906 }
907
908 return NULL;
909}
910
911/* complete made up ranking, the values themselves are meaningless */
912/* These ranks MUST be X such that X<<15 fits on a signed int - see
913 the comment at the end of gst_aml_v4l2_object_format_get_rank. */
914#define YUV_BASE_RANK 1000
915#define JPEG_BASE_RANK 500
916#define DV_BASE_RANK 200
917#define RGB_BASE_RANK 100
918#define YUV_ODD_BASE_RANK 50
919#define RGB_ODD_BASE_RANK 25
920#define BAYER_BASE_RANK 15
921#define S910_BASE_RANK 10
922#define GREY_BASE_RANK 5
923#define PWC_BASE_RANK 1
924
925static gint
926gst_aml_v4l2_object_format_get_rank(const struct v4l2_fmtdesc *fmt)
927{
928 guint32 fourcc = fmt->pixelformat;
929 gboolean emulated = ((fmt->flags & V4L2_FMT_FLAG_EMULATED) != 0);
930 gint rank = 0;
931
932 switch (fourcc)
933 {
934 case V4L2_PIX_FMT_MJPEG:
935 case V4L2_PIX_FMT_PJPG:
936 rank = JPEG_BASE_RANK;
937 break;
938 case V4L2_PIX_FMT_JPEG:
939 rank = JPEG_BASE_RANK + 1;
940 break;
941 case V4L2_PIX_FMT_MPEG: /* MPEG */
942 rank = JPEG_BASE_RANK + 2;
943 break;
944
945 case V4L2_PIX_FMT_RGB332:
946 case V4L2_PIX_FMT_ARGB555:
947 case V4L2_PIX_FMT_XRGB555:
948 case V4L2_PIX_FMT_RGB555:
949 case V4L2_PIX_FMT_ARGB555X:
950 case V4L2_PIX_FMT_XRGB555X:
951 case V4L2_PIX_FMT_RGB555X:
952 case V4L2_PIX_FMT_BGR666:
953 case V4L2_PIX_FMT_RGB565:
954 case V4L2_PIX_FMT_RGB565X:
955 case V4L2_PIX_FMT_RGB444:
956 case V4L2_PIX_FMT_Y4:
957 case V4L2_PIX_FMT_Y6:
958 case V4L2_PIX_FMT_Y10:
959 case V4L2_PIX_FMT_Y12:
960 case V4L2_PIX_FMT_Y10BPACK:
961 case V4L2_PIX_FMT_YUV555:
962 case V4L2_PIX_FMT_YUV565:
963 case V4L2_PIX_FMT_YUV32:
964 case V4L2_PIX_FMT_NV12MT_16X16:
965 case V4L2_PIX_FMT_NV42:
966 case V4L2_PIX_FMT_H264_MVC:
967 rank = RGB_ODD_BASE_RANK;
968 break;
969
970 case V4L2_PIX_FMT_RGB24:
971 case V4L2_PIX_FMT_BGR24:
972 rank = RGB_BASE_RANK - 1;
973 break;
974
975 case V4L2_PIX_FMT_RGB32:
976 case V4L2_PIX_FMT_BGR32:
977 case V4L2_PIX_FMT_ABGR32:
978 case V4L2_PIX_FMT_XBGR32:
979 case V4L2_PIX_FMT_ARGB32:
980 case V4L2_PIX_FMT_XRGB32:
981 rank = RGB_BASE_RANK;
982 break;
983
984 case V4L2_PIX_FMT_GREY: /* 8 Greyscale */
985 rank = GREY_BASE_RANK;
986 break;
987
988 case V4L2_PIX_FMT_NV12: /* 12 Y/CbCr 4:2:0 */
989 case V4L2_PIX_FMT_NV12M: /* Same as NV12 */
990 case V4L2_PIX_FMT_NV12MT: /* NV12 64x32 tile */
991 case V4L2_PIX_FMT_NV21: /* 12 Y/CrCb 4:2:0 */
992 case V4L2_PIX_FMT_NV21M: /* Same as NV21 */
993 case V4L2_PIX_FMT_YYUV: /* 16 YUV 4:2:2 */
994 case V4L2_PIX_FMT_HI240: /* 8 8-bit color */
995 case V4L2_PIX_FMT_NV16: /* 16 Y/CbCr 4:2:2 */
996 case V4L2_PIX_FMT_NV16M: /* Same as NV16 */
997 case V4L2_PIX_FMT_NV61: /* 16 Y/CrCb 4:2:2 */
998 case V4L2_PIX_FMT_NV61M: /* Same as NV61 */
999 case V4L2_PIX_FMT_NV24: /* 24 Y/CrCb 4:4:4 */
1000 rank = YUV_ODD_BASE_RANK;
1001 break;
1002
1003 case V4L2_PIX_FMT_YVU410: /* YVU9, 9 bits per pixel */
1004 rank = YUV_BASE_RANK + 3;
1005 break;
1006 case V4L2_PIX_FMT_YUV410: /* YUV9, 9 bits per pixel */
1007 rank = YUV_BASE_RANK + 2;
1008 break;
1009 case V4L2_PIX_FMT_YUV420: /* I420, 12 bits per pixel */
1010 case V4L2_PIX_FMT_YUV420M:
1011 rank = YUV_BASE_RANK + 7;
1012 break;
1013 case V4L2_PIX_FMT_YUYV: /* YUY2, 16 bits per pixel */
1014 rank = YUV_BASE_RANK + 10;
1015 break;
1016 case V4L2_PIX_FMT_YVU420: /* YV12, 12 bits per pixel */
1017 rank = YUV_BASE_RANK + 6;
1018 break;
1019 case V4L2_PIX_FMT_UYVY: /* UYVY, 16 bits per pixel */
1020 rank = YUV_BASE_RANK + 9;
1021 break;
1022 case V4L2_PIX_FMT_YUV444:
1023 rank = YUV_BASE_RANK + 6;
1024 break;
1025 case V4L2_PIX_FMT_Y41P: /* Y41P, 12 bits per pixel */
1026 rank = YUV_BASE_RANK + 5;
1027 break;
1028 case V4L2_PIX_FMT_YUV411P: /* Y41B, 12 bits per pixel */
1029 rank = YUV_BASE_RANK + 4;
1030 break;
1031 case V4L2_PIX_FMT_YUV422P: /* Y42B, 16 bits per pixel */
1032 rank = YUV_BASE_RANK + 8;
1033 break;
1034
1035 case V4L2_PIX_FMT_DV:
1036 rank = DV_BASE_RANK;
1037 break;
1038
1039 case V4L2_PIX_FMT_WNVA: /* Winnov hw compres */
1040 rank = 0;
1041 break;
1042
1043 case V4L2_PIX_FMT_SBGGR8:
1044 case V4L2_PIX_FMT_SGBRG8:
1045 case V4L2_PIX_FMT_SGRBG8:
1046 case V4L2_PIX_FMT_SRGGB8:
1047 rank = BAYER_BASE_RANK;
1048 break;
1049
1050 case V4L2_PIX_FMT_SN9C10X:
1051 rank = S910_BASE_RANK;
1052 break;
1053
1054 case V4L2_PIX_FMT_PWC1:
1055 rank = PWC_BASE_RANK;
1056 break;
1057 case V4L2_PIX_FMT_PWC2:
1058 rank = PWC_BASE_RANK;
1059 break;
1060
1061 default:
1062 rank = 0;
1063 break;
1064 }
1065
1066 /* All ranks are below 1<<15 so a shift by 15
1067 * will a) make all non-emulated formats larger
1068 * than emulated and b) will not overflow
1069 */
1070 if (!emulated)
1071 rank <<= 15;
1072
1073 return rank;
1074}
1075
1076static gint
1077format_cmp_func(gconstpointer a, gconstpointer b)
1078{
1079 const struct v4l2_fmtdesc *fa = a;
1080 const struct v4l2_fmtdesc *fb = b;
1081
1082 if (fa->pixelformat == fb->pixelformat)
1083 return 0;
1084
1085 return gst_aml_v4l2_object_format_get_rank(fb) -
1086 gst_aml_v4l2_object_format_get_rank(fa);
1087}
1088
1089/******************************************************
1090 * gst_aml_v4l2_object_fill_format_list():
1091 * create list of supported capture formats
1092 * return value: TRUE on success, FALSE on error
1093 ******************************************************/
1094static gboolean
1095gst_aml_v4l2_object_fill_format_list(GstAmlV4l2Object *v4l2object,
1096 enum v4l2_buf_type type)
1097{
1098 gint n;
1099 struct v4l2_fmtdesc *format;
1100
1101 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "getting src format enumerations");
1102
1103 /* format enumeration */
1104 for (n = 0;; n++)
1105 {
1106 format = g_new0(struct v4l2_fmtdesc, 1);
1107
1108 format->index = n;
1109 format->type = type;
1110
1111 if (v4l2object->ioctl(v4l2object->video_fd, VIDIOC_ENUM_FMT, format) < 0)
1112 {
1113 if (errno == EINVAL)
1114 {
1115 g_free(format);
1116 break; /* end of enumeration */
1117 }
1118 else
1119 {
1120 goto failed;
1121 }
1122 }
1123
1124 GST_LOG_OBJECT(v4l2object->dbg_obj, "index: %u", format->index);
1125 GST_LOG_OBJECT(v4l2object->dbg_obj, "type: %d", format->type);
1126 GST_LOG_OBJECT(v4l2object->dbg_obj, "flags: %08x", format->flags);
1127 GST_LOG_OBJECT(v4l2object->dbg_obj, "description: '%s'",
1128 format->description);
1129 GST_LOG_OBJECT(v4l2object->dbg_obj, "pixelformat: %" GST_FOURCC_FORMAT,
1130 GST_FOURCC_ARGS(format->pixelformat));
1131
xuesong.jiang282ca572023-05-05 09:03:32 +00001132
1133 if (V4L2_PIX_FMT_YUV420M == format->pixelformat || V4L2_PIX_FMT_YUV420 == format->pixelformat)
1134 {
1135 GST_LOG_OBJECT(v4l2object->dbg_obj, "aml v4l2 driver didn't real support YU12 and YM12, ignore it");
1136 continue;
1137 }
1138
xuesong.jiangae1548e2022-05-06 16:38:46 +08001139 /* sort formats according to our preference; we do this, because caps
1140 * are probed in the order the formats are in the list, and the order of
1141 * formats in the final probed caps matters for things like fixation */
1142 v4l2object->formats = g_slist_insert_sorted(v4l2object->formats, format,
1143 (GCompareFunc)format_cmp_func);
1144 }
1145
1146#ifndef GST_DISABLE_GST_DEBUG
1147 {
1148 GSList *l;
1149
1150 GST_INFO_OBJECT(v4l2object->dbg_obj, "got %d format(s):", n);
1151 for (l = v4l2object->formats; l != NULL; l = l->next)
1152 {
1153 format = l->data;
1154
1155 GST_INFO_OBJECT(v4l2object->dbg_obj,
1156 " %" GST_FOURCC_FORMAT "%s", GST_FOURCC_ARGS(format->pixelformat),
1157 ((format->flags & V4L2_FMT_FLAG_EMULATED)) ? " (emulated)" : "");
1158 }
1159 }
1160#endif
1161
1162 return TRUE;
1163
1164 /* ERRORS */
1165failed:
1166{
1167 g_free(format);
1168
1169 if (v4l2object->element)
1170 return FALSE;
1171
1172 GST_ELEMENT_ERROR(v4l2object->element, RESOURCE, SETTINGS,
1173 (_("Failed to enumerate possible video formats device '%s' can work "
1174 "with"),
1175 v4l2object->videodev),
1176 ("Failed to get number %d in pixelformat enumeration for %s. (%d - %s)",
1177 n, v4l2object->videodev, errno, g_strerror(errno)));
1178
1179 return FALSE;
1180}
1181}
1182
1183/*
1184 * Get the list of supported capture formats, a list of
1185 * <code>struct v4l2_fmtdesc</code>.
1186 */
1187static GSList *
1188gst_aml_v4l2_object_get_format_list(GstAmlV4l2Object *v4l2object)
1189{
1190 if (!v4l2object->formats)
1191 {
1192
1193 /* check usual way */
1194 gst_aml_v4l2_object_fill_format_list(v4l2object, v4l2object->type);
1195
1196 /* if our driver supports multi-planar
1197 * and if formats are still empty then we can workaround driver bug
1198 * by also looking up formats as if our device was not supporting
1199 * multiplanar */
1200 if (!v4l2object->formats)
1201 {
1202 switch (v4l2object->type)
1203 {
1204 case V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE:
1205 gst_aml_v4l2_object_fill_format_list(v4l2object,
1206 V4L2_BUF_TYPE_VIDEO_CAPTURE);
1207 break;
1208
1209 case V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE:
1210 gst_aml_v4l2_object_fill_format_list(v4l2object,
1211 V4L2_BUF_TYPE_VIDEO_OUTPUT);
1212 break;
1213
1214 default:
1215 break;
1216 }
1217 }
1218 }
1219 return v4l2object->formats;
1220}
1221
1222static GstVideoFormat
1223gst_aml_v4l2_object_v4l2fourcc_to_video_format(guint32 fourcc)
1224{
1225 GstVideoFormat format;
1226
1227 switch (fourcc)
1228 {
1229 case V4L2_PIX_FMT_GREY: /* 8 Greyscale */
1230 format = GST_VIDEO_FORMAT_GRAY8;
1231 break;
1232 case V4L2_PIX_FMT_Y16:
1233 format = GST_VIDEO_FORMAT_GRAY16_LE;
1234 break;
1235 case V4L2_PIX_FMT_Y16_BE:
1236 format = GST_VIDEO_FORMAT_GRAY16_BE;
1237 break;
1238 case V4L2_PIX_FMT_XRGB555:
1239 case V4L2_PIX_FMT_RGB555:
1240 format = GST_VIDEO_FORMAT_RGB15;
1241 break;
1242 case V4L2_PIX_FMT_XRGB555X:
1243 case V4L2_PIX_FMT_RGB555X:
1244 format = GST_VIDEO_FORMAT_BGR15;
1245 break;
1246 case V4L2_PIX_FMT_RGB565:
1247 format = GST_VIDEO_FORMAT_RGB16;
1248 break;
1249 case V4L2_PIX_FMT_RGB24:
1250 format = GST_VIDEO_FORMAT_RGB;
1251 break;
1252 case V4L2_PIX_FMT_BGR24:
1253 format = GST_VIDEO_FORMAT_BGR;
1254 break;
1255 case V4L2_PIX_FMT_XRGB32:
1256 case V4L2_PIX_FMT_RGB32:
1257 format = GST_VIDEO_FORMAT_xRGB;
1258 break;
1259 case V4L2_PIX_FMT_XBGR32:
1260 case V4L2_PIX_FMT_BGR32:
1261 format = GST_VIDEO_FORMAT_BGRx;
1262 break;
1263 case V4L2_PIX_FMT_ABGR32:
1264 format = GST_VIDEO_FORMAT_BGRA;
1265 break;
1266 case V4L2_PIX_FMT_ARGB32:
1267 format = GST_VIDEO_FORMAT_ARGB;
1268 break;
1269 case V4L2_PIX_FMT_NV12:
1270 case V4L2_PIX_FMT_NV12M:
1271 format = GST_VIDEO_FORMAT_NV12;
1272 break;
1273 case V4L2_PIX_FMT_NV12MT:
1274 format = GST_VIDEO_FORMAT_NV12_64Z32;
1275 break;
1276 case V4L2_PIX_FMT_NV21:
1277 case V4L2_PIX_FMT_NV21M:
1278 format = GST_VIDEO_FORMAT_NV21;
1279 break;
1280 case V4L2_PIX_FMT_YVU410:
1281 format = GST_VIDEO_FORMAT_YVU9;
1282 break;
1283 case V4L2_PIX_FMT_YUV410:
1284 format = GST_VIDEO_FORMAT_YUV9;
1285 break;
1286 case V4L2_PIX_FMT_YUV420:
1287 case V4L2_PIX_FMT_YUV420M:
1288 format = GST_VIDEO_FORMAT_I420;
1289 break;
1290 case V4L2_PIX_FMT_YUYV:
1291 format = GST_VIDEO_FORMAT_YUY2;
1292 break;
1293 case V4L2_PIX_FMT_YVU420:
1294 format = GST_VIDEO_FORMAT_YV12;
1295 break;
1296 case V4L2_PIX_FMT_UYVY:
1297 format = GST_VIDEO_FORMAT_UYVY;
1298 break;
1299 case V4L2_PIX_FMT_YUV411P:
1300 format = GST_VIDEO_FORMAT_Y41B;
1301 break;
1302 case V4L2_PIX_FMT_YUV422P:
1303 format = GST_VIDEO_FORMAT_Y42B;
1304 break;
1305 case V4L2_PIX_FMT_YVYU:
1306 format = GST_VIDEO_FORMAT_YVYU;
1307 break;
1308 case V4L2_PIX_FMT_NV16:
1309 case V4L2_PIX_FMT_NV16M:
1310 format = GST_VIDEO_FORMAT_NV16;
1311 break;
1312 case V4L2_PIX_FMT_NV61:
1313 case V4L2_PIX_FMT_NV61M:
1314 format = GST_VIDEO_FORMAT_NV61;
1315 break;
1316 case V4L2_PIX_FMT_NV24:
1317 format = GST_VIDEO_FORMAT_NV24;
1318 break;
1319 default:
1320 format = GST_VIDEO_FORMAT_UNKNOWN;
1321 break;
1322 }
1323
1324 return format;
1325}
1326
1327static gboolean
1328gst_amL_v4l2_object_v4l2fourcc_is_rgb(guint32 fourcc)
1329{
1330 gboolean ret = FALSE;
1331
1332 switch (fourcc)
1333 {
1334 case V4L2_PIX_FMT_XRGB555:
1335 case V4L2_PIX_FMT_RGB555:
1336 case V4L2_PIX_FMT_XRGB555X:
1337 case V4L2_PIX_FMT_RGB555X:
1338 case V4L2_PIX_FMT_RGB565:
1339 case V4L2_PIX_FMT_RGB24:
1340 case V4L2_PIX_FMT_BGR24:
1341 case V4L2_PIX_FMT_XRGB32:
1342 case V4L2_PIX_FMT_RGB32:
1343 case V4L2_PIX_FMT_XBGR32:
1344 case V4L2_PIX_FMT_BGR32:
1345 case V4L2_PIX_FMT_ABGR32:
1346 case V4L2_PIX_FMT_ARGB32:
1347 case V4L2_PIX_FMT_SBGGR8:
1348 case V4L2_PIX_FMT_SGBRG8:
1349 case V4L2_PIX_FMT_SGRBG8:
1350 case V4L2_PIX_FMT_SRGGB8:
1351 ret = TRUE;
1352 break;
1353 default:
1354 break;
1355 }
1356
1357 return ret;
1358}
1359
1360static GstStructure *
1361gst_aml_v4l2_object_v4l2fourcc_to_bare_struct(guint32 fourcc)
1362{
1363 GstStructure *structure = NULL;
1364
1365 switch (fourcc)
1366 {
1367 case V4L2_PIX_FMT_MJPEG: /* Motion-JPEG */
1368 case V4L2_PIX_FMT_PJPG: /* Progressive-JPEG */
1369 case V4L2_PIX_FMT_JPEG: /* JFIF JPEG */
1370 structure = gst_structure_new_empty("image/jpeg");
1371 break;
1372 case V4L2_PIX_FMT_MPEG1:
1373 structure = gst_structure_new("video/mpeg",
1374 "mpegversion", G_TYPE_INT, 1, NULL);
fei.dengb5bfaa82022-07-12 17:27:13 +08001375 gst_structure_set(structure, "systemstream", G_TYPE_BOOLEAN, FALSE, NULL);
1376 GST_DEBUG("aml set mpeg1 systemstream to false");
xuesong.jiangae1548e2022-05-06 16:38:46 +08001377 break;
1378 case V4L2_PIX_FMT_MPEG2:
1379 structure = gst_structure_new("video/mpeg",
1380 "mpegversion", G_TYPE_INT, 2, NULL);
1381 gst_structure_set(structure, "systemstream", G_TYPE_BOOLEAN, FALSE, NULL);
1382 GST_DEBUG("aml set mpeg2 systemstream to false");
1383 break;
1384 case V4L2_PIX_FMT_MPEG4:
1385 case V4L2_PIX_FMT_XVID:
1386 structure = gst_structure_new("video/mpeg",
1387 "mpegversion", G_TYPE_INT, 4, "systemstream",
1388 G_TYPE_BOOLEAN, FALSE, NULL);
1389 break;
1390 case V4L2_PIX_FMT_FWHT:
1391 structure = gst_structure_new_empty("video/x-fwht");
1392 break;
1393 case V4L2_PIX_FMT_H263:
1394 structure = gst_structure_new("video/x-h263",
1395 "variant", G_TYPE_STRING, "itu", NULL);
1396 break;
1397 case V4L2_PIX_FMT_H264: /* H.264 */
1398 structure = gst_structure_new("video/x-h264",
1399 "stream-format", G_TYPE_STRING, "byte-stream", "alignment",
1400 G_TYPE_STRING, "au", NULL);
1401 break;
1402 case V4L2_PIX_FMT_H264_NO_SC:
1403 structure = gst_structure_new("video/x-h264",
1404 "stream-format", G_TYPE_STRING, "avc", "alignment",
1405 G_TYPE_STRING, "au", NULL);
1406 break;
1407 case V4L2_PIX_FMT_HEVC: /* H.265 */
1408 structure = gst_structure_new("video/x-h265",
1409 "stream-format", G_TYPE_STRING, "byte-stream", "alignment",
1410 G_TYPE_STRING, "au", NULL);
1411 break;
1412 case V4L2_PIX_FMT_VC1_ANNEX_G:
1413 case V4L2_PIX_FMT_VC1_ANNEX_L:
1414 structure = gst_structure_new("video/x-wmv",
1415 "wmvversion", G_TYPE_INT, 3, "format", G_TYPE_STRING, "WVC1", NULL);
1416 break;
1417 case V4L2_PIX_FMT_VP8:
1418 structure = gst_structure_new_empty("video/x-vp8");
1419 break;
1420 case V4L2_PIX_FMT_VP9:
1421 structure = gst_structure_new_empty("video/x-vp9");
1422 break;
1423 case V4L2_PIX_FMT_AV1:
1424 structure = gst_structure_new_empty("video/x-av1");
1425 break;
1426 case V4L2_PIX_FMT_GREY: /* 8 Greyscale */
1427 case V4L2_PIX_FMT_Y16:
1428 case V4L2_PIX_FMT_Y16_BE:
1429 case V4L2_PIX_FMT_XRGB555:
1430 case V4L2_PIX_FMT_RGB555:
1431 case V4L2_PIX_FMT_XRGB555X:
1432 case V4L2_PIX_FMT_RGB555X:
1433 case V4L2_PIX_FMT_RGB565:
1434 case V4L2_PIX_FMT_RGB24:
1435 case V4L2_PIX_FMT_BGR24:
1436 case V4L2_PIX_FMT_RGB32:
1437 case V4L2_PIX_FMT_XRGB32:
1438 case V4L2_PIX_FMT_ARGB32:
1439 case V4L2_PIX_FMT_BGR32:
1440 case V4L2_PIX_FMT_XBGR32:
1441 case V4L2_PIX_FMT_ABGR32:
1442 case V4L2_PIX_FMT_NV12: /* 12 Y/CbCr 4:2:0 */
1443 case V4L2_PIX_FMT_NV12M:
1444 case V4L2_PIX_FMT_NV12MT:
1445 case V4L2_PIX_FMT_NV21: /* 12 Y/CrCb 4:2:0 */
1446 case V4L2_PIX_FMT_NV21M:
1447 case V4L2_PIX_FMT_NV16: /* 16 Y/CbCr 4:2:2 */
1448 case V4L2_PIX_FMT_NV16M:
1449 case V4L2_PIX_FMT_NV61: /* 16 Y/CrCb 4:2:2 */
1450 case V4L2_PIX_FMT_NV61M:
1451 case V4L2_PIX_FMT_NV24: /* 24 Y/CrCb 4:4:4 */
1452 case V4L2_PIX_FMT_YVU410:
1453 case V4L2_PIX_FMT_YUV410:
1454 case V4L2_PIX_FMT_YUV420: /* I420/IYUV */
1455 case V4L2_PIX_FMT_YUV420M:
1456 case V4L2_PIX_FMT_YUYV:
1457 case V4L2_PIX_FMT_YVU420:
1458 case V4L2_PIX_FMT_UYVY:
1459 case V4L2_PIX_FMT_YUV422P:
1460 case V4L2_PIX_FMT_YVYU:
1461 case V4L2_PIX_FMT_YUV411P:
1462 {
1463 GstVideoFormat format;
1464 format = gst_aml_v4l2_object_v4l2fourcc_to_video_format(fourcc);
1465 if (format != GST_VIDEO_FORMAT_UNKNOWN)
1466 structure = gst_structure_new("video/x-raw",
1467 "format", G_TYPE_STRING, gst_video_format_to_string(format), NULL);
1468 break;
1469 }
1470 case V4L2_PIX_FMT_DV:
1471 structure =
1472 gst_structure_new("video/x-dv", "systemstream", G_TYPE_BOOLEAN, TRUE,
1473 NULL);
1474 break;
1475 case V4L2_PIX_FMT_MPEG: /* MPEG */
1476 structure = gst_structure_new("video/mpegts",
1477 "systemstream", G_TYPE_BOOLEAN, TRUE, NULL);
1478 break;
1479 case V4L2_PIX_FMT_WNVA: /* Winnov hw compres */
1480 break;
1481 case V4L2_PIX_FMT_SBGGR8:
1482 case V4L2_PIX_FMT_SGBRG8:
1483 case V4L2_PIX_FMT_SGRBG8:
1484 case V4L2_PIX_FMT_SRGGB8:
1485 structure = gst_structure_new("video/x-bayer", "format", G_TYPE_STRING,
1486 fourcc == V4L2_PIX_FMT_SBGGR8 ? "bggr" : fourcc == V4L2_PIX_FMT_SGBRG8 ? "gbrg"
1487 : fourcc == V4L2_PIX_FMT_SGRBG8 ? "grbg"
1488 :
1489 /* fourcc == V4L2_PIX_FMT_SRGGB8 ? */ "rggb",
1490 NULL);
1491 break;
1492 case V4L2_PIX_FMT_SN9C10X:
1493 structure = gst_structure_new_empty("video/x-sonix");
1494 break;
1495 case V4L2_PIX_FMT_PWC1:
1496 structure = gst_structure_new_empty("video/x-pwc1");
1497 break;
1498 case V4L2_PIX_FMT_PWC2:
1499 structure = gst_structure_new_empty("video/x-pwc2");
1500 break;
1501 case V4L2_PIX_FMT_RGB332:
1502 case V4L2_PIX_FMT_BGR666:
1503 case V4L2_PIX_FMT_ARGB555X:
1504 case V4L2_PIX_FMT_RGB565X:
1505 case V4L2_PIX_FMT_RGB444:
1506 case V4L2_PIX_FMT_YYUV: /* 16 YUV 4:2:2 */
1507 case V4L2_PIX_FMT_HI240: /* 8 8-bit color */
1508 case V4L2_PIX_FMT_Y4:
1509 case V4L2_PIX_FMT_Y6:
1510 case V4L2_PIX_FMT_Y10:
1511 case V4L2_PIX_FMT_Y12:
1512 case V4L2_PIX_FMT_Y10BPACK:
1513 case V4L2_PIX_FMT_YUV444:
1514 case V4L2_PIX_FMT_YUV555:
1515 case V4L2_PIX_FMT_YUV565:
1516 case V4L2_PIX_FMT_Y41P:
1517 case V4L2_PIX_FMT_YUV32:
1518 case V4L2_PIX_FMT_NV12MT_16X16:
1519 case V4L2_PIX_FMT_NV42:
1520 case V4L2_PIX_FMT_H264_MVC:
1521 default:
1522 GST_DEBUG("Unsupported fourcc 0x%08x %" GST_FOURCC_FORMAT,
1523 fourcc, GST_FOURCC_ARGS(fourcc));
1524 break;
1525 }
1526
1527 return structure;
1528}
1529
1530GstStructure *
1531gst_aml_v4l2_object_v4l2fourcc_to_structure(guint32 fourcc)
1532{
1533 GstStructure *template;
1534 gint i;
1535
1536 template = gst_aml_v4l2_object_v4l2fourcc_to_bare_struct(fourcc);
1537
1538 if (template == NULL)
1539 goto done;
1540
1541 for (i = 0; i < GST_AML_V4L2_FORMAT_COUNT; i++)
1542 {
1543 if (gst_aml_v4l2_formats[i].format != fourcc)
1544 continue;
1545
1546 if (gst_aml_v4l2_formats[i].dimensions)
1547 {
1548 gst_structure_set(template,
1549 "width", GST_TYPE_INT_RANGE, 1, GST_AML_V4L2_MAX_SIZE,
1550 "height", GST_TYPE_INT_RANGE, 1, GST_AML_V4L2_MAX_SIZE,
1551 "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL);
1552 }
1553 break;
1554 }
1555
1556done:
1557 return template;
1558}
1559
1560static GstCaps *
1561gst_aml_v4l2_object_get_caps_helper(GstAmlV4L2FormatFlags flags)
1562{
1563 GstStructure *structure;
1564 GstCaps *caps;
1565 guint i;
1566
1567 caps = gst_caps_new_empty();
1568 for (i = 0; i < GST_AML_V4L2_FORMAT_COUNT; i++)
1569 {
1570
1571 if ((gst_aml_v4l2_formats[i].flags & flags) == 0)
1572 continue;
1573
1574 structure =
1575 gst_aml_v4l2_object_v4l2fourcc_to_bare_struct(gst_aml_v4l2_formats[i].format);
1576
1577 if (structure)
1578 {
1579 GstStructure *alt_s = NULL;
1580
1581 if (gst_aml_v4l2_formats[i].dimensions)
1582 {
1583 gst_structure_set(structure,
1584 "width", GST_TYPE_INT_RANGE, 1, GST_AML_V4L2_MAX_SIZE,
1585 "height", GST_TYPE_INT_RANGE, 1, GST_AML_V4L2_MAX_SIZE,
1586 "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL);
1587 }
1588
1589 switch (gst_aml_v4l2_formats[i].format)
1590 {
1591 case V4L2_PIX_FMT_RGB32:
1592 alt_s = gst_structure_copy(structure);
1593 gst_structure_set(alt_s, "format", G_TYPE_STRING, "ARGB", NULL);
1594 break;
1595 case V4L2_PIX_FMT_BGR32:
1596 alt_s = gst_structure_copy(structure);
1597 gst_structure_set(alt_s, "format", G_TYPE_STRING, "BGRA", NULL);
1598 default:
1599 break;
1600 }
1601
1602 gst_caps_append_structure(caps, structure);
1603
1604 if (alt_s)
1605 gst_caps_append_structure(caps, alt_s);
1606 }
1607 }
1608
1609 return gst_caps_simplify(caps);
1610}
1611
1612GstCaps *
1613gst_aml_v4l2_object_get_all_caps(void)
1614{
1615 static GstCaps *caps = NULL;
1616
1617 if (g_once_init_enter(&caps))
1618 {
1619 GstCaps *all_caps = gst_aml_v4l2_object_get_caps_helper(GST_V4L2_ALL);
1620 GST_MINI_OBJECT_FLAG_SET(all_caps, GST_MINI_OBJECT_FLAG_MAY_BE_LEAKED);
1621 g_once_init_leave(&caps, all_caps);
1622 }
1623
1624 return caps;
1625}
1626
1627GstCaps *
1628gst_aml_v4l2_object_get_raw_caps(void)
1629{
1630 static GstCaps *caps = NULL;
1631
1632 if (g_once_init_enter(&caps))
1633 {
1634 GstCaps *raw_caps = gst_aml_v4l2_object_get_caps_helper(GST_V4L2_RAW);
1635 GST_MINI_OBJECT_FLAG_SET(raw_caps, GST_MINI_OBJECT_FLAG_MAY_BE_LEAKED);
1636 g_once_init_leave(&caps, raw_caps);
1637 }
1638
1639 return caps;
1640}
1641
1642GstCaps *
1643gst_aml_v4l2_object_get_codec_caps(void)
1644{
1645 static GstCaps *caps = NULL;
1646
1647 if (g_once_init_enter(&caps))
1648 {
1649 GstCaps *codec_caps = gst_aml_v4l2_object_get_caps_helper(GST_V4L2_CODEC);
1650 GST_MINI_OBJECT_FLAG_SET(codec_caps, GST_MINI_OBJECT_FLAG_MAY_BE_LEAKED);
1651 g_once_init_leave(&caps, codec_caps);
1652 }
1653
1654 return caps;
1655}
1656
1657/* collect data for the given caps
1658 * @caps: given input caps
1659 * @format: location for the v4l format
1660 * @w/@h: location for width and height
1661 * @fps_n/@fps_d: location for framerate
1662 * @size: location for expected size of the frame or 0 if unknown
1663 */
1664static gboolean
1665gst_aml_v4l2_object_get_caps_info(GstAmlV4l2Object *v4l2object, GstCaps *caps,
1666 struct v4l2_fmtdesc **format, GstVideoInfo *info)
1667{
1668 GstStructure *structure;
1669 guint32 fourcc = 0, fourcc_nc = 0;
1670 const gchar *mimetype;
1671 struct v4l2_fmtdesc *fmt = NULL;
1672
fei.denge9458472023-04-18 02:05:48 +00001673 GST_DEBUG_OBJECT(v4l2object, "got caps: %" GST_PTR_FORMAT, caps);
1674
xuesong.jiangae1548e2022-05-06 16:38:46 +08001675 structure = gst_caps_get_structure(caps, 0);
1676
1677 mimetype = gst_structure_get_name(structure);
1678
1679 if (!gst_video_info_from_caps(info, caps))
1680 goto invalid_format;
1681
1682 if (g_str_equal(mimetype, "video/x-raw"))
1683 {
1684 switch (GST_VIDEO_INFO_FORMAT(info))
1685 {
1686 case GST_VIDEO_FORMAT_I420:
1687 fourcc = V4L2_PIX_FMT_YUV420;
1688 fourcc_nc = V4L2_PIX_FMT_YUV420M;
1689 break;
1690 case GST_VIDEO_FORMAT_YUY2:
1691 fourcc = V4L2_PIX_FMT_YUYV;
1692 break;
1693 case GST_VIDEO_FORMAT_UYVY:
1694 fourcc = V4L2_PIX_FMT_UYVY;
1695 break;
1696 case GST_VIDEO_FORMAT_YV12:
1697 fourcc = V4L2_PIX_FMT_YVU420;
1698 break;
1699 case GST_VIDEO_FORMAT_Y41B:
1700 fourcc = V4L2_PIX_FMT_YUV411P;
1701 break;
1702 case GST_VIDEO_FORMAT_Y42B:
1703 fourcc = V4L2_PIX_FMT_YUV422P;
1704 break;
1705 case GST_VIDEO_FORMAT_NV12:
1706 fourcc = V4L2_PIX_FMT_NV12;
1707 fourcc_nc = V4L2_PIX_FMT_NV12M;
1708 break;
1709 case GST_VIDEO_FORMAT_NV12_64Z32:
1710 fourcc_nc = V4L2_PIX_FMT_NV12MT;
1711 break;
1712 case GST_VIDEO_FORMAT_NV21:
1713 fourcc = V4L2_PIX_FMT_NV21;
1714 fourcc_nc = V4L2_PIX_FMT_NV21M;
1715 break;
1716 case GST_VIDEO_FORMAT_NV16:
1717 fourcc = V4L2_PIX_FMT_NV16;
1718 fourcc_nc = V4L2_PIX_FMT_NV16M;
1719 break;
1720 case GST_VIDEO_FORMAT_NV61:
1721 fourcc = V4L2_PIX_FMT_NV61;
1722 fourcc_nc = V4L2_PIX_FMT_NV61M;
1723 break;
1724 case GST_VIDEO_FORMAT_NV24:
1725 fourcc = V4L2_PIX_FMT_NV24;
1726 break;
1727 case GST_VIDEO_FORMAT_YVYU:
1728 fourcc = V4L2_PIX_FMT_YVYU;
1729 break;
1730 case GST_VIDEO_FORMAT_RGB15:
1731 fourcc = V4L2_PIX_FMT_RGB555;
1732 fourcc_nc = V4L2_PIX_FMT_XRGB555;
1733 break;
1734 case GST_VIDEO_FORMAT_RGB16:
1735 fourcc = V4L2_PIX_FMT_RGB565;
1736 break;
1737 case GST_VIDEO_FORMAT_RGB:
1738 fourcc = V4L2_PIX_FMT_RGB24;
1739 break;
1740 case GST_VIDEO_FORMAT_BGR:
1741 fourcc = V4L2_PIX_FMT_BGR24;
1742 break;
1743 case GST_VIDEO_FORMAT_xRGB:
1744 fourcc = V4L2_PIX_FMT_RGB32;
1745 fourcc_nc = V4L2_PIX_FMT_XRGB32;
1746 break;
1747 case GST_VIDEO_FORMAT_ARGB:
1748 fourcc = V4L2_PIX_FMT_RGB32;
1749 fourcc_nc = V4L2_PIX_FMT_ARGB32;
1750 break;
1751 case GST_VIDEO_FORMAT_BGRx:
1752 fourcc = V4L2_PIX_FMT_BGR32;
1753 fourcc_nc = V4L2_PIX_FMT_XBGR32;
1754 break;
1755 case GST_VIDEO_FORMAT_BGRA:
1756 fourcc = V4L2_PIX_FMT_BGR32;
1757 fourcc_nc = V4L2_PIX_FMT_ABGR32;
1758 break;
1759 case GST_VIDEO_FORMAT_GRAY8:
1760 fourcc = V4L2_PIX_FMT_GREY;
1761 break;
1762 case GST_VIDEO_FORMAT_GRAY16_LE:
1763 fourcc = V4L2_PIX_FMT_Y16;
1764 break;
1765 case GST_VIDEO_FORMAT_GRAY16_BE:
1766 fourcc = V4L2_PIX_FMT_Y16_BE;
1767 break;
1768 case GST_VIDEO_FORMAT_BGR15:
1769 fourcc = V4L2_PIX_FMT_RGB555X;
1770 fourcc_nc = V4L2_PIX_FMT_XRGB555X;
1771 break;
1772 default:
1773 break;
1774 }
1775 }
1776 else
1777 {
1778 if (g_str_equal(mimetype, "video/mpegts"))
1779 {
1780 fourcc = V4L2_PIX_FMT_MPEG;
1781 }
1782 else if (g_str_equal(mimetype, "video/x-dv"))
1783 {
1784 fourcc = V4L2_PIX_FMT_DV;
1785 }
1786 else if (g_str_equal(mimetype, "image/jpeg"))
1787 {
1788 fourcc = V4L2_PIX_FMT_JPEG;
1789 }
1790 else if (g_str_equal(mimetype, "video/mpeg"))
1791 {
1792 gint version;
1793 if (gst_structure_get_int(structure, "mpegversion", &version))
1794 {
1795 switch (version)
1796 {
1797 case 1:
1798 fourcc = V4L2_PIX_FMT_MPEG1;
1799 break;
1800 case 2:
1801 fourcc = V4L2_PIX_FMT_MPEG2;
1802 break;
1803 case 4:
1804 fourcc = V4L2_PIX_FMT_MPEG4;
1805 fourcc_nc = V4L2_PIX_FMT_XVID;
1806 break;
1807 default:
1808 break;
1809 }
1810 }
1811 }
1812 else if (g_str_equal(mimetype, "video/x-fwht"))
1813 {
1814 fourcc = V4L2_PIX_FMT_FWHT;
1815 }
1816 else if (g_str_equal(mimetype, "video/x-h263"))
1817 {
1818 fourcc = V4L2_PIX_FMT_H263;
1819 }
1820 else if (g_str_equal(mimetype, "video/x-h264"))
1821 {
1822 const gchar *stream_format =
1823 gst_structure_get_string(structure, "stream-format");
1824 if (g_str_equal(stream_format, "avc"))
1825 fourcc = V4L2_PIX_FMT_H264_NO_SC;
1826 else
1827 fourcc = V4L2_PIX_FMT_H264;
1828 }
1829 else if (g_str_equal(mimetype, "video/x-h265"))
1830 {
1831 fourcc = V4L2_PIX_FMT_HEVC;
1832 }
1833 else if (g_str_equal(mimetype, "video/x-vp8"))
1834 {
1835 fourcc = V4L2_PIX_FMT_VP8;
1836 }
1837 else if (g_str_equal(mimetype, "video/x-vp9"))
1838 {
1839 fourcc = V4L2_PIX_FMT_VP9;
1840 }
1841 else if (g_str_equal(mimetype, "video/x-av1"))
1842 {
1843 fourcc = V4L2_PIX_FMT_AV1;
1844 }
1845 else if (g_str_equal(mimetype, "video/x-bayer"))
1846 {
hanghang.luo3128f102022-08-18 10:36:19 +08001847 const gchar *vformat = gst_structure_get_string(structure, "format");
1848 if (vformat)
xuesong.jiangae1548e2022-05-06 16:38:46 +08001849 {
1850 if (!g_ascii_strcasecmp(format, "bggr"))
1851 fourcc = V4L2_PIX_FMT_SBGGR8;
1852 else if (!g_ascii_strcasecmp(format, "gbrg"))
1853 fourcc = V4L2_PIX_FMT_SGBRG8;
1854 else if (!g_ascii_strcasecmp(format, "grbg"))
1855 fourcc = V4L2_PIX_FMT_SGRBG8;
1856 else if (!g_ascii_strcasecmp(format, "rggb"))
1857 fourcc = V4L2_PIX_FMT_SRGGB8;
1858 }
1859 }
1860 else if (g_str_equal(mimetype, "video/x-sonix"))
1861 {
1862 fourcc = V4L2_PIX_FMT_SN9C10X;
1863 }
1864 else if (g_str_equal(mimetype, "video/x-pwc1"))
1865 {
1866 fourcc = V4L2_PIX_FMT_PWC1;
1867 }
1868 else if (g_str_equal(mimetype, "video/x-pwc2"))
1869 {
1870 fourcc = V4L2_PIX_FMT_PWC2;
1871 }
1872 }
1873
1874 /* Prefer the non-contiguous if supported */
1875 v4l2object->prefered_non_contiguous = TRUE;
1876
1877 if (fourcc_nc)
1878 fmt = gst_aml_v4l2_object_get_format_from_fourcc(v4l2object, fourcc_nc);
1879 else if (fourcc == 0)
1880 goto unhandled_format;
1881
1882 if (fmt == NULL)
1883 {
1884 fmt = gst_aml_v4l2_object_get_format_from_fourcc(v4l2object, fourcc);
1885 v4l2object->prefered_non_contiguous = FALSE;
1886 }
1887
1888 if (fmt == NULL)
1889 goto unsupported_format;
1890
1891 *format = fmt;
1892
1893 return TRUE;
1894
1895 /* ERRORS */
1896invalid_format:
1897{
1898 GST_DEBUG_OBJECT(v4l2object, "invalid format");
1899 return FALSE;
1900}
1901unhandled_format:
1902{
1903 GST_DEBUG_OBJECT(v4l2object, "unhandled format");
1904 return FALSE;
1905}
1906unsupported_format:
1907{
1908 GST_DEBUG_OBJECT(v4l2object, "unsupported format");
1909 return FALSE;
1910}
1911}
1912
1913static gboolean
1914gst_aml_v4l2_object_get_nearest_size(GstAmlV4l2Object *v4l2object,
1915 guint32 pixelformat, gint *width, gint *height);
1916
1917static void
1918gst_aml_v4l2_object_add_aspect_ratio(GstAmlV4l2Object *v4l2object, GstStructure *s)
1919{
1920 if (v4l2object->keep_aspect && v4l2object->par)
1921 gst_structure_set_value(s, "pixel-aspect-ratio", v4l2object->par);
1922}
1923
1924/* returns TRUE if the value was changed in place, otherwise FALSE */
1925static gboolean
1926gst_aml_v4l2src_value_simplify(GValue *val)
1927{
1928 /* simplify list of one value to one value */
1929 if (GST_VALUE_HOLDS_LIST(val) && gst_value_list_get_size(val) == 1)
1930 {
1931 const GValue *list_val;
1932 GValue new_val = G_VALUE_INIT;
1933
1934 list_val = gst_value_list_get_value(val, 0);
1935 g_value_init(&new_val, G_VALUE_TYPE(list_val));
1936 g_value_copy(list_val, &new_val);
1937 g_value_unset(val);
1938 *val = new_val;
1939 return TRUE;
1940 }
1941
1942 return FALSE;
1943}
1944
1945static gboolean
1946gst_aml_v4l2_object_get_interlace_mode(enum v4l2_field field,
1947 GstVideoInterlaceMode *interlace_mode)
1948{
1949 switch (field)
1950 {
1951 case V4L2_FIELD_ANY:
1952 GST_ERROR("Driver bug detected - check driver with v4l2-compliance from http://git.linuxtv.org/v4l-utils.git\n");
1953 /* fallthrough */
1954 case V4L2_FIELD_NONE:
1955 *interlace_mode = GST_VIDEO_INTERLACE_MODE_PROGRESSIVE;
1956 return TRUE;
1957 case V4L2_FIELD_INTERLACED:
1958 case V4L2_FIELD_INTERLACED_TB:
1959 case V4L2_FIELD_INTERLACED_BT:
1960 *interlace_mode = GST_VIDEO_INTERLACE_MODE_INTERLEAVED;
1961 return TRUE;
1962 default:
1963 GST_ERROR("Unknown enum v4l2_field %d", field);
1964 return FALSE;
1965 }
1966}
1967
1968static gboolean
1969gst_aml_v4l2_object_get_colorspace(struct v4l2_format *fmt,
1970 GstVideoColorimetry *cinfo)
1971{
1972 gboolean is_rgb =
1973 gst_amL_v4l2_object_v4l2fourcc_is_rgb(fmt->fmt.pix.pixelformat);
1974 enum v4l2_colorspace colorspace;
1975 enum v4l2_quantization range;
1976 enum v4l2_ycbcr_encoding matrix;
1977 enum v4l2_xfer_func transfer;
1978 gboolean ret = TRUE;
1979
1980 if (V4L2_TYPE_IS_MULTIPLANAR(fmt->type))
1981 {
1982 colorspace = fmt->fmt.pix_mp.colorspace;
1983 range = fmt->fmt.pix_mp.quantization;
1984 matrix = fmt->fmt.pix_mp.ycbcr_enc;
1985 transfer = fmt->fmt.pix_mp.xfer_func;
1986 }
1987 else
1988 {
1989 colorspace = fmt->fmt.pix.colorspace;
1990 range = fmt->fmt.pix.quantization;
1991 matrix = fmt->fmt.pix.ycbcr_enc;
1992 transfer = fmt->fmt.pix.xfer_func;
1993 }
xuesong.jiange1a19662022-06-21 20:30:22 +08001994 GST_DEBUG("colorspace:%d, range:%d, matrix:%d, transfer:%d", colorspace, range, matrix, transfer);
1995 GST_DEBUG("cinfo update 1 time | range:%d, matrix:%d, transfer:%d, primaries:%d", cinfo->range, cinfo->matrix, cinfo->transfer, cinfo->primaries);
xuesong.jiangae1548e2022-05-06 16:38:46 +08001996
1997 /* First step, set the defaults for each primaries */
1998 switch (colorspace)
1999 {
2000 case V4L2_COLORSPACE_SMPTE170M:
2001 cinfo->range = GST_VIDEO_COLOR_RANGE_16_235;
2002 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT601;
2003 cinfo->transfer = GST_VIDEO_TRANSFER_BT709;
2004 cinfo->primaries = GST_VIDEO_COLOR_PRIMARIES_SMPTE170M;
2005 break;
2006 case V4L2_COLORSPACE_REC709:
2007 cinfo->range = GST_VIDEO_COLOR_RANGE_16_235;
2008 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT709;
2009 cinfo->transfer = GST_VIDEO_TRANSFER_BT709;
2010 cinfo->primaries = GST_VIDEO_COLOR_PRIMARIES_BT709;
2011 break;
2012 case V4L2_COLORSPACE_SRGB:
2013 case V4L2_COLORSPACE_JPEG:
2014 cinfo->range = GST_VIDEO_COLOR_RANGE_0_255;
2015 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT601;
2016 cinfo->transfer = GST_VIDEO_TRANSFER_SRGB;
2017 cinfo->primaries = GST_VIDEO_COLOR_PRIMARIES_BT709;
2018 break;
2019 case V4L2_COLORSPACE_OPRGB:
2020 cinfo->range = GST_VIDEO_COLOR_RANGE_16_235;
2021 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT601;
2022 cinfo->transfer = GST_VIDEO_TRANSFER_ADOBERGB;
2023 cinfo->primaries = GST_VIDEO_COLOR_PRIMARIES_ADOBERGB;
2024 break;
2025 case V4L2_COLORSPACE_BT2020:
2026 cinfo->range = GST_VIDEO_COLOR_RANGE_16_235;
2027 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT2020;
2028 cinfo->transfer = GST_VIDEO_TRANSFER_BT2020_12;
2029 cinfo->primaries = GST_VIDEO_COLOR_PRIMARIES_BT2020;
2030 break;
2031 case V4L2_COLORSPACE_SMPTE240M:
2032 cinfo->range = GST_VIDEO_COLOR_RANGE_16_235;
2033 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_SMPTE240M;
2034 cinfo->transfer = GST_VIDEO_TRANSFER_SMPTE240M;
2035 cinfo->primaries = GST_VIDEO_COLOR_PRIMARIES_SMPTE240M;
2036 break;
2037 case V4L2_COLORSPACE_470_SYSTEM_M:
2038 cinfo->range = GST_VIDEO_COLOR_RANGE_16_235;
2039 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT601;
2040 cinfo->transfer = GST_VIDEO_TRANSFER_BT709;
2041 cinfo->primaries = GST_VIDEO_COLOR_PRIMARIES_BT470M;
2042 break;
2043 case V4L2_COLORSPACE_470_SYSTEM_BG:
2044 cinfo->range = GST_VIDEO_COLOR_RANGE_16_235;
2045 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT601;
2046 cinfo->transfer = GST_VIDEO_TRANSFER_BT709;
2047 cinfo->primaries = GST_VIDEO_COLOR_PRIMARIES_BT470BG;
2048 break;
2049 case V4L2_COLORSPACE_RAW:
2050 /* Explicitly unknown */
2051 cinfo->range = GST_VIDEO_COLOR_RANGE_UNKNOWN;
2052 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_UNKNOWN;
2053 cinfo->transfer = GST_VIDEO_TRANSFER_UNKNOWN;
2054 cinfo->primaries = GST_VIDEO_COLOR_PRIMARIES_UNKNOWN;
2055 break;
2056 default:
2057 GST_DEBUG("Unknown enum v4l2_colorspace %d", colorspace);
2058 ret = FALSE;
2059 break;
2060 }
xuesong.jiange1a19662022-06-21 20:30:22 +08002061 GST_DEBUG("cinfo update 2 time | range:%d, matrix:%d, transfer:%d, primaries:%d", cinfo->range, cinfo->matrix, cinfo->transfer, cinfo->primaries);
xuesong.jiangae1548e2022-05-06 16:38:46 +08002062
2063 if (!ret)
2064 goto done;
2065
2066 /* Second step, apply any custom variation */
2067 switch (range)
2068 {
2069 case V4L2_QUANTIZATION_FULL_RANGE:
2070 cinfo->range = GST_VIDEO_COLOR_RANGE_0_255;
2071 break;
2072 case V4L2_QUANTIZATION_LIM_RANGE:
2073 cinfo->range = GST_VIDEO_COLOR_RANGE_16_235;
2074 break;
2075 case V4L2_QUANTIZATION_DEFAULT:
2076 /* replicated V4L2_MAP_QUANTIZATION_DEFAULT macro behavior */
2077 if (is_rgb && colorspace == V4L2_COLORSPACE_BT2020)
2078 cinfo->range = GST_VIDEO_COLOR_RANGE_16_235;
2079 else if (is_rgb || matrix == V4L2_YCBCR_ENC_XV601 || matrix == V4L2_YCBCR_ENC_XV709 || colorspace == V4L2_COLORSPACE_JPEG)
2080 cinfo->range = GST_VIDEO_COLOR_RANGE_0_255;
2081 else
2082 cinfo->range = GST_VIDEO_COLOR_RANGE_16_235;
2083 break;
2084 default:
2085 GST_WARNING("Unknown enum v4l2_quantization value %d", range);
2086 cinfo->range = GST_VIDEO_COLOR_RANGE_UNKNOWN;
2087 break;
2088 }
xuesong.jiange1a19662022-06-21 20:30:22 +08002089 GST_DEBUG("cinfo update 3 time | range:%d, matrix:%d, transfer:%d, primaries:%d", cinfo->range, cinfo->matrix, cinfo->transfer, cinfo->primaries);
xuesong.jiangae1548e2022-05-06 16:38:46 +08002090
2091 switch (matrix)
2092 {
2093 case V4L2_YCBCR_ENC_XV601:
2094 case V4L2_YCBCR_ENC_SYCC:
2095 GST_FIXME("XV601 and SYCC not defined, assuming 601");
2096 /* fallthrough */
2097 case V4L2_YCBCR_ENC_601:
2098 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT601;
2099 break;
2100 case V4L2_YCBCR_ENC_XV709:
2101 GST_FIXME("XV709 not defined, assuming 709");
2102 /* fallthrough */
2103 case V4L2_YCBCR_ENC_709:
2104 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT709;
2105 break;
2106 case V4L2_YCBCR_ENC_BT2020_CONST_LUM:
2107 GST_FIXME("BT2020 with constant luma is not defined, assuming BT2020");
2108 /* fallthrough */
2109 case V4L2_YCBCR_ENC_BT2020:
2110 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT2020;
2111 break;
2112 case V4L2_YCBCR_ENC_SMPTE240M:
2113 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_SMPTE240M;
2114 break;
2115 case V4L2_YCBCR_ENC_DEFAULT:
2116 /* nothing, just use defaults for colorspace */
2117 break;
2118 default:
2119 GST_WARNING("Unknown enum v4l2_ycbcr_encoding value %d", matrix);
2120 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_UNKNOWN;
2121 break;
2122 }
xuesong.jiange1a19662022-06-21 20:30:22 +08002123 GST_DEBUG("cinfo update 4 time | range:%d, matrix:%d, transfer:%d, primaries:%d", cinfo->range, cinfo->matrix, cinfo->transfer, cinfo->primaries);
xuesong.jiangae1548e2022-05-06 16:38:46 +08002124
2125 /* Set identity matrix for R'G'B' formats to avoid creating
2126 * confusion. This though is cosmetic as it's now properly ignored by
2127 * the video info API and videoconvert. */
2128 if (is_rgb)
2129 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_RGB;
2130
2131 switch (transfer)
2132 {
2133 case V4L2_XFER_FUNC_709:
2134 if (colorspace == V4L2_COLORSPACE_BT2020 && fmt->fmt.pix.height >= 2160)
2135 cinfo->transfer = GST_VIDEO_TRANSFER_BT2020_12;
2136 else
2137 cinfo->transfer = GST_VIDEO_TRANSFER_BT709;
2138 break;
2139 case V4L2_XFER_FUNC_SRGB:
2140 cinfo->transfer = GST_VIDEO_TRANSFER_SRGB;
2141 break;
2142 case V4L2_XFER_FUNC_OPRGB:
2143 cinfo->transfer = GST_VIDEO_TRANSFER_ADOBERGB;
2144 break;
2145 case V4L2_XFER_FUNC_SMPTE240M:
2146 cinfo->transfer = GST_VIDEO_TRANSFER_SMPTE240M;
2147 break;
2148 case V4L2_XFER_FUNC_NONE:
2149 cinfo->transfer = GST_VIDEO_TRANSFER_GAMMA10;
2150 break;
2151 case V4L2_XFER_FUNC_DEFAULT:
2152 /* nothing, just use defaults for colorspace */
2153 break;
2154 default:
2155 GST_WARNING("Unknown enum v4l2_xfer_func value %d", transfer);
2156 cinfo->transfer = GST_VIDEO_TRANSFER_UNKNOWN;
2157 break;
2158 }
xuesong.jiange1a19662022-06-21 20:30:22 +08002159 GST_DEBUG("cinfo update 5 time | range:%d, matrix:%d, transfer:%d, primaries:%d", cinfo->range, cinfo->matrix, cinfo->transfer, cinfo->primaries);
xuesong.jiangae1548e2022-05-06 16:38:46 +08002160
2161done:
2162 return ret;
2163}
2164
2165static int
2166gst_aml_v4l2_object_try_fmt(GstAmlV4l2Object *v4l2object,
2167 struct v4l2_format *try_fmt)
2168{
2169 int fd = v4l2object->video_fd;
2170 struct v4l2_format fmt;
2171 int r;
2172
2173 memcpy(&fmt, try_fmt, sizeof(fmt));
2174 r = v4l2object->ioctl(fd, VIDIOC_TRY_FMT, &fmt);
2175
2176 if (r < 0 && errno == ENOTTY)
2177 {
2178 /* The driver might not implement TRY_FMT, in which case we will try
2179 S_FMT to probe */
2180 if (GST_AML_V4L2_IS_ACTIVE(v4l2object))
2181 goto error;
2182
2183 memcpy(&fmt, try_fmt, sizeof(fmt));
2184 r = v4l2object->ioctl(fd, VIDIOC_S_FMT, &fmt);
2185 }
2186 memcpy(try_fmt, &fmt, sizeof(fmt));
2187
2188 return r;
2189
2190error:
2191 memcpy(try_fmt, &fmt, sizeof(fmt));
2192 GST_WARNING_OBJECT(v4l2object->dbg_obj,
2193 "Unable to try format: %s", g_strerror(errno));
2194 return r;
2195}
2196
2197static void
2198gst_aml_v4l2_object_add_interlace_mode(GstAmlV4l2Object *v4l2object,
2199 GstStructure *s, guint32 width, guint32 height, guint32 pixelformat)
2200{
2201 struct v4l2_format fmt;
2202 GValue interlace_formats = {
2203 0,
2204 };
2205 enum v4l2_field formats[] = {V4L2_FIELD_NONE, V4L2_FIELD_INTERLACED};
2206 gsize i;
2207 GstVideoInterlaceMode interlace_mode, prev = -1;
2208
2209 if (!g_str_equal(gst_structure_get_name(s), "video/x-raw"))
2210 return;
2211
2212 if (v4l2object->never_interlaced)
2213 {
2214 gst_structure_set(s, "interlace-mode", G_TYPE_STRING, "progressive", NULL);
2215 return;
2216 }
2217
2218 g_value_init(&interlace_formats, GST_TYPE_LIST);
2219
2220 /* Try twice - once for NONE, once for INTERLACED. */
2221 for (i = 0; i < G_N_ELEMENTS(formats); i++)
2222 {
2223 memset(&fmt, 0, sizeof(fmt));
2224 fmt.type = v4l2object->type;
2225 fmt.fmt.pix.width = width;
2226 fmt.fmt.pix.height = height;
2227 fmt.fmt.pix.pixelformat = pixelformat;
2228 fmt.fmt.pix.field = formats[i];
2229
2230 if (gst_aml_v4l2_object_try_fmt(v4l2object, &fmt) == 0 &&
2231 gst_aml_v4l2_object_get_interlace_mode(fmt.fmt.pix.field, &interlace_mode) && prev != interlace_mode)
2232 {
2233 GValue interlace_enum = {
2234 0,
2235 };
2236 const gchar *mode_string;
2237 g_value_init(&interlace_enum, G_TYPE_STRING);
2238 mode_string = gst_video_interlace_mode_to_string(interlace_mode);
2239 g_value_set_string(&interlace_enum, mode_string);
2240 gst_value_list_append_and_take_value(&interlace_formats,
2241 &interlace_enum);
2242 prev = interlace_mode;
2243 }
2244 }
2245
2246 if (gst_aml_v4l2src_value_simplify(&interlace_formats) || gst_value_list_get_size(&interlace_formats) > 0)
2247 gst_structure_take_value(s, "interlace-mode", &interlace_formats);
2248 else
2249 GST_WARNING_OBJECT(v4l2object, "Failed to determine interlace mode");
2250
2251 return;
2252}
2253
2254static void
2255gst_aml_v4l2_object_fill_colorimetry_list(GValue *list,
2256 GstVideoColorimetry *cinfo)
2257{
2258 GValue colorimetry = G_VALUE_INIT;
2259 guint size;
2260 guint i;
2261 gboolean found = FALSE;
2262
2263 g_value_init(&colorimetry, G_TYPE_STRING);
2264 g_value_take_string(&colorimetry, gst_video_colorimetry_to_string(cinfo));
xuesong.jiange1a19662022-06-21 20:30:22 +08002265 GST_DEBUG("fill colorimetry:%s into list", gst_video_colorimetry_to_string(cinfo));
xuesong.jiangae1548e2022-05-06 16:38:46 +08002266
2267 /* only insert if no duplicate */
2268 size = gst_value_list_get_size(list);
2269 for (i = 0; i < size; i++)
2270 {
2271 const GValue *tmp;
2272
2273 tmp = gst_value_list_get_value(list, i);
2274 if (gst_value_compare(&colorimetry, tmp) == GST_VALUE_EQUAL)
2275 {
2276 found = TRUE;
2277 break;
2278 }
2279 }
2280
2281 if (!found)
2282 gst_value_list_append_and_take_value(list, &colorimetry);
2283 else
2284 g_value_unset(&colorimetry);
2285}
2286
2287static void
2288gst_aml_v4l2_object_add_colorspace(GstAmlV4l2Object *v4l2object, GstStructure *s,
2289 guint32 width, guint32 height, guint32 pixelformat)
2290{
2291 struct v4l2_format fmt;
2292 GValue list = G_VALUE_INIT;
2293 GstVideoColorimetry cinfo;
2294 enum v4l2_colorspace req_cspace;
2295
2296 memset(&fmt, 0, sizeof(fmt));
2297 fmt.type = v4l2object->type;
2298 fmt.fmt.pix.width = width;
2299 fmt.fmt.pix.height = height;
2300 fmt.fmt.pix.pixelformat = pixelformat;
2301
2302 g_value_init(&list, GST_TYPE_LIST);
2303
2304 /* step 1: get device default colorspace and insert it first as
2305 * it should be the preferred one */
xuesong.jiange1a19662022-06-21 20:30:22 +08002306 GST_DEBUG("try for pixl format");
xuesong.jiangae1548e2022-05-06 16:38:46 +08002307 if (gst_aml_v4l2_object_try_fmt(v4l2object, &fmt) == 0)
2308 {
2309 if (gst_aml_v4l2_object_get_colorspace(&fmt, &cinfo))
2310 gst_aml_v4l2_object_fill_colorimetry_list(&list, &cinfo);
2311 }
2312
2313 /* step 2: probe all colorspace other than default
2314 * We don't probe all colorspace, range, matrix and transfer combination to
2315 * avoid ioctl flooding which could greatly increase initialization time
2316 * with low-speed devices (UVC...) */
2317 for (req_cspace = V4L2_COLORSPACE_SMPTE170M;
2318 req_cspace <= V4L2_COLORSPACE_RAW; req_cspace++)
2319 {
xuesong.jiange1a19662022-06-21 20:30:22 +08002320 GST_DEBUG("try for pixl format in while loop :%d", req_cspace);
xuesong.jiangae1548e2022-05-06 16:38:46 +08002321 /* V4L2_COLORSPACE_BT878 is deprecated and shall not be used, so skip */
2322 if (req_cspace == V4L2_COLORSPACE_BT878)
2323 continue;
2324
2325 if (V4L2_TYPE_IS_MULTIPLANAR(v4l2object->type))
2326 fmt.fmt.pix_mp.colorspace = req_cspace;
2327 else
2328 fmt.fmt.pix.colorspace = req_cspace;
2329
2330 if (gst_aml_v4l2_object_try_fmt(v4l2object, &fmt) == 0)
2331 {
xuesong.jiange1a19662022-06-21 20:30:22 +08002332 GST_DEBUG("try for pixl format in while loop :%d tried ok", req_cspace);
xuesong.jiangae1548e2022-05-06 16:38:46 +08002333 enum v4l2_colorspace colorspace;
2334
2335 if (V4L2_TYPE_IS_MULTIPLANAR(v4l2object->type))
2336 colorspace = fmt.fmt.pix_mp.colorspace;
2337 else
2338 colorspace = fmt.fmt.pix.colorspace;
2339
2340 if (colorspace == req_cspace)
2341 {
2342 if (gst_aml_v4l2_object_get_colorspace(&fmt, &cinfo))
2343 gst_aml_v4l2_object_fill_colorimetry_list(&list, &cinfo);
2344 }
2345 }
2346 }
2347
xuesong.jiang7b0882c2022-06-22 14:10:30 +08002348 GST_DEBUG("deal: caps with colorimetry 2,3,14,7");
xuesong.jiange1a19662022-06-21 20:30:22 +08002349 cinfo.range = 2;
2350 cinfo.matrix = 3;
2351 cinfo.transfer = 14;
2352 cinfo.primaries = 7;
2353 gst_aml_v4l2_object_fill_colorimetry_list(&list, &cinfo);
2354
xuesong.jiang7b0882c2022-06-22 14:10:30 +08002355 GST_DEBUG("deal: caps with colorimetry 2,6,13,7");
2356 cinfo.range = 2;
2357 cinfo.matrix = 6;
2358 cinfo.transfer = 13;
2359 cinfo.primaries = 7;
2360 gst_aml_v4l2_object_fill_colorimetry_list(&list, &cinfo);
2361
xuesong.jiang5c9aca72022-07-12 16:29:24 +08002362 GST_DEBUG("deal: caps with colorimetry 2,6,14,7");
2363 cinfo.range = 2;
2364 cinfo.matrix = 6;
2365 cinfo.transfer = 14;
2366 cinfo.primaries = 7;
2367 gst_aml_v4l2_object_fill_colorimetry_list(&list, &cinfo);
2368
fei.dengccc89632022-07-15 19:10:17 +08002369 GST_DEBUG("deal: caps with colorimetry 2,6,0,7");
2370 cinfo.range = 2;
2371 cinfo.matrix = 6;
2372 cinfo.transfer = 0;
2373 cinfo.primaries = 7;
2374 gst_aml_v4l2_object_fill_colorimetry_list(&list, &cinfo);
2375
fei.dengca85b052022-07-19 14:49:23 +08002376 GST_DEBUG("deal: caps with colorimetry 0,6,0,7");
2377 cinfo.range = 0;
2378 cinfo.matrix = 6;
2379 cinfo.transfer = 0;
2380 cinfo.primaries = 7;
2381 gst_aml_v4l2_object_fill_colorimetry_list(&list, &cinfo);
2382
sheng.liua326d202022-07-20 14:15:34 +08002383 GST_DEBUG("deal: caps with colorimetry 2,3,0,0");
2384 cinfo.range = 2;
2385 cinfo.matrix = 3;
2386 cinfo.transfer = 0;
2387 cinfo.primaries = 0;
2388 gst_aml_v4l2_object_fill_colorimetry_list(&list, &cinfo);
2389
xuesong.jiangae1548e2022-05-06 16:38:46 +08002390 if (gst_value_list_get_size(&list) > 0)
2391 gst_structure_take_value(s, "colorimetry", &list);
2392 else
2393 g_value_unset(&list);
2394
2395 return;
2396}
2397
2398/* The frame interval enumeration code first appeared in Linux 2.6.19. */
2399static GstStructure *
2400gst_aml_v4l2_object_probe_caps_for_format_and_size(GstAmlV4l2Object *v4l2object,
2401 guint32 pixelformat,
2402 guint32 width, guint32 height, const GstStructure *template)
2403{
2404 gint fd = v4l2object->video_fd;
2405 struct v4l2_frmivalenum ival;
2406 guint32 num, denom;
2407 GstStructure *s;
2408 GValue rates = {
2409 0,
2410 };
2411
2412 memset(&ival, 0, sizeof(struct v4l2_frmivalenum));
2413 ival.index = 0;
2414 ival.pixel_format = pixelformat;
2415 ival.width = width;
2416 ival.height = height;
2417
2418 GST_LOG_OBJECT(v4l2object->dbg_obj,
2419 "get frame interval for %ux%u, %" GST_FOURCC_FORMAT, width, height,
2420 GST_FOURCC_ARGS(pixelformat));
2421
2422 /* keep in mind that v4l2 gives us frame intervals (durations); we invert the
2423 * fraction to get framerate */
2424 if (v4l2object->ioctl(fd, VIDIOC_ENUM_FRAMEINTERVALS, &ival) < 0)
2425 goto enum_frameintervals_failed;
2426
2427 if (ival.type == V4L2_FRMIVAL_TYPE_DISCRETE)
2428 {
2429 GValue rate = {
2430 0,
2431 };
2432
2433 g_value_init(&rates, GST_TYPE_LIST);
2434 g_value_init(&rate, GST_TYPE_FRACTION);
2435
2436 do
2437 {
2438 num = ival.discrete.numerator;
2439 denom = ival.discrete.denominator;
2440
2441 if (num > G_MAXINT || denom > G_MAXINT)
2442 {
2443 /* let us hope we don't get here... */
2444 num >>= 1;
2445 denom >>= 1;
2446 }
2447
2448 GST_LOG_OBJECT(v4l2object->dbg_obj, "adding discrete framerate: %d/%d",
2449 denom, num);
2450
2451 /* swap to get the framerate */
2452 gst_value_set_fraction(&rate, denom, num);
2453 gst_value_list_append_value(&rates, &rate);
2454
2455 ival.index++;
2456 } while (v4l2object->ioctl(fd, VIDIOC_ENUM_FRAMEINTERVALS, &ival) >= 0);
2457 }
2458 else if (ival.type == V4L2_FRMIVAL_TYPE_STEPWISE)
2459 {
2460 GValue min = {
2461 0,
2462 };
2463 GValue step = {
2464 0,
2465 };
2466 GValue max = {
2467 0,
2468 };
2469 gboolean added = FALSE;
2470 guint32 minnum, mindenom;
2471 guint32 maxnum, maxdenom;
2472
2473 g_value_init(&rates, GST_TYPE_LIST);
2474
2475 g_value_init(&min, GST_TYPE_FRACTION);
2476 g_value_init(&step, GST_TYPE_FRACTION);
2477 g_value_init(&max, GST_TYPE_FRACTION);
2478
2479 /* get the min */
2480 minnum = ival.stepwise.min.numerator;
2481 mindenom = ival.stepwise.min.denominator;
2482 if (minnum > G_MAXINT || mindenom > G_MAXINT)
2483 {
2484 minnum >>= 1;
2485 mindenom >>= 1;
2486 }
2487 GST_LOG_OBJECT(v4l2object->dbg_obj, "stepwise min frame interval: %d/%d",
2488 minnum, mindenom);
2489 gst_value_set_fraction(&min, minnum, mindenom);
2490
2491 /* get the max */
2492 maxnum = ival.stepwise.max.numerator;
2493 maxdenom = ival.stepwise.max.denominator;
2494 if (maxnum > G_MAXINT || maxdenom > G_MAXINT)
2495 {
2496 maxnum >>= 1;
2497 maxdenom >>= 1;
2498 }
2499
2500 GST_LOG_OBJECT(v4l2object->dbg_obj, "stepwise max frame interval: %d/%d",
2501 maxnum, maxdenom);
2502 gst_value_set_fraction(&max, maxnum, maxdenom);
2503
2504 /* get the step */
2505 num = ival.stepwise.step.numerator;
2506 denom = ival.stepwise.step.denominator;
2507 if (num > G_MAXINT || denom > G_MAXINT)
2508 {
2509 num >>= 1;
2510 denom >>= 1;
2511 }
2512
2513 if (num == 0 || denom == 0)
2514 {
2515 /* in this case we have a wrong fraction or no step, set the step to max
2516 * so that we only add the min value in the loop below */
2517 num = maxnum;
2518 denom = maxdenom;
2519 }
2520
2521 /* since we only have gst_value_fraction_subtract and not add, negate the
2522 * numerator */
2523 GST_LOG_OBJECT(v4l2object->dbg_obj, "stepwise step frame interval: %d/%d",
2524 num, denom);
2525 gst_value_set_fraction(&step, -num, denom);
2526
2527 while (gst_value_compare(&min, &max) != GST_VALUE_GREATER_THAN)
2528 {
2529 GValue rate = {
2530 0,
2531 };
2532
2533 num = gst_value_get_fraction_numerator(&min);
2534 denom = gst_value_get_fraction_denominator(&min);
2535 GST_LOG_OBJECT(v4l2object->dbg_obj, "adding stepwise framerate: %d/%d",
2536 denom, num);
2537
2538 /* invert to get the framerate */
2539 g_value_init(&rate, GST_TYPE_FRACTION);
2540 gst_value_set_fraction(&rate, denom, num);
2541 gst_value_list_append_value(&rates, &rate);
2542 added = TRUE;
2543
2544 /* we're actually adding because step was negated above. This is because
2545 * there is no _add function... */
2546 if (!gst_value_fraction_subtract(&min, &min, &step))
2547 {
2548 GST_WARNING_OBJECT(v4l2object->dbg_obj, "could not step fraction!");
2549 break;
2550 }
2551 }
2552 if (!added)
2553 {
2554 /* no range was added, leave the default range from the template */
2555 GST_WARNING_OBJECT(v4l2object->dbg_obj,
2556 "no range added, leaving default");
2557 g_value_unset(&rates);
2558 }
2559 }
2560 else if (ival.type == V4L2_FRMIVAL_TYPE_CONTINUOUS)
2561 {
2562 guint32 maxnum, maxdenom;
2563
2564 g_value_init(&rates, GST_TYPE_FRACTION_RANGE);
2565
2566 num = ival.stepwise.min.numerator;
2567 denom = ival.stepwise.min.denominator;
2568 if (num > G_MAXINT || denom > G_MAXINT)
2569 {
2570 num >>= 1;
2571 denom >>= 1;
2572 }
2573
2574 maxnum = ival.stepwise.max.numerator;
2575 maxdenom = ival.stepwise.max.denominator;
2576 if (maxnum > G_MAXINT || maxdenom > G_MAXINT)
2577 {
2578 maxnum >>= 1;
2579 maxdenom >>= 1;
2580 }
2581
2582 GST_LOG_OBJECT(v4l2object->dbg_obj,
2583 "continuous frame interval %d/%d to %d/%d", maxdenom, maxnum, denom,
2584 num);
2585
2586 gst_value_set_fraction_range_full(&rates, maxdenom, maxnum, denom, num);
2587 }
2588 else
2589 {
2590 goto unknown_type;
2591 }
2592
2593return_data:
2594 s = gst_structure_copy(template);
2595 gst_structure_set(s, "width", G_TYPE_INT, (gint)width,
2596 "height", G_TYPE_INT, (gint)height, NULL);
2597
2598 gst_aml_v4l2_object_add_aspect_ratio(v4l2object, s);
2599
2600 if (!v4l2object->skip_try_fmt_probes)
2601 {
2602 gst_aml_v4l2_object_add_interlace_mode(v4l2object, s, width, height,
2603 pixelformat);
xuesong.jiangc5dac0f2023-02-01 14:42:24 +08002604 // gst_aml_v4l2_object_add_colorspace(v4l2object, s, width, height, pixelformat);
xuesong.jiangae1548e2022-05-06 16:38:46 +08002605 }
2606
2607 if (G_IS_VALUE(&rates))
2608 {
2609 gst_aml_v4l2src_value_simplify(&rates);
2610 /* only change the framerate on the template when we have a valid probed new
2611 * value */
2612 gst_structure_take_value(s, "framerate", &rates);
2613 }
2614 else if (v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE ||
2615 v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE)
2616 {
2617 gst_structure_set(s, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT,
2618 1, NULL);
2619 }
2620 return s;
2621
2622 /* ERRORS */
2623enum_frameintervals_failed:
2624{
2625 GST_DEBUG_OBJECT(v4l2object->dbg_obj,
2626 "Unable to enumerate intervals for %" GST_FOURCC_FORMAT "@%ux%u",
2627 GST_FOURCC_ARGS(pixelformat), width, height);
2628 goto return_data;
2629}
2630unknown_type:
2631{
2632 /* I don't see how this is actually an error, we ignore the format then */
2633 GST_WARNING_OBJECT(v4l2object->dbg_obj,
2634 "Unknown frame interval type at %" GST_FOURCC_FORMAT "@%ux%u: %u",
2635 GST_FOURCC_ARGS(pixelformat), width, height, ival.type);
2636 return NULL;
2637}
2638}
2639
2640static gint
2641sort_by_frame_size(GstStructure *s1, GstStructure *s2)
2642{
2643 int w1, h1, w2, h2;
2644
2645 gst_structure_get_int(s1, "width", &w1);
2646 gst_structure_get_int(s1, "height", &h1);
2647 gst_structure_get_int(s2, "width", &w2);
2648 gst_structure_get_int(s2, "height", &h2);
2649
2650 /* I think it's safe to assume that this won't overflow for a while */
2651 return ((w2 * h2) - (w1 * h1));
2652}
2653
2654static void
2655gst_aml_v4l2_object_update_and_append(GstAmlV4l2Object *v4l2object,
2656 guint32 format, GstCaps *caps, GstStructure *s)
2657{
2658 GstStructure *alt_s = NULL;
2659
2660 /* Encoded stream on output buffer need to be parsed */
2661 if (v4l2object->type == V4L2_BUF_TYPE_VIDEO_OUTPUT ||
2662 v4l2object->type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE)
2663 {
2664 gint i = 0;
2665
2666 for (; i < GST_AML_V4L2_FORMAT_COUNT; i++)
2667 {
2668 if (format == gst_aml_v4l2_formats[i].format &&
2669 gst_aml_v4l2_formats[i].flags & GST_V4L2_CODEC &&
2670 !(gst_aml_v4l2_formats[i].flags & GST_V4L2_NO_PARSE))
2671 {
2672 gst_structure_set(s, "parsed", G_TYPE_BOOLEAN, TRUE, NULL);
2673 break;
2674 }
2675 }
2676 }
2677
2678 if (v4l2object->has_alpha_component &&
2679 (v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE ||
2680 v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE))
2681 {
2682 switch (format)
2683 {
2684 case V4L2_PIX_FMT_RGB32:
2685 alt_s = gst_structure_copy(s);
2686 gst_structure_set(alt_s, "format", G_TYPE_STRING, "ARGB", NULL);
2687 break;
2688 case V4L2_PIX_FMT_BGR32:
2689 alt_s = gst_structure_copy(s);
2690 gst_structure_set(alt_s, "format", G_TYPE_STRING, "BGRA", NULL);
2691 break;
2692 default:
2693 break;
2694 }
2695 }
2696
2697 gst_caps_append_structure(caps, s);
2698
2699 if (alt_s)
2700 gst_caps_append_structure(caps, alt_s);
2701}
2702
2703static GstCaps *
2704gst_aml_v4l2_object_probe_caps_for_format(GstAmlV4l2Object *v4l2object,
2705 guint32 pixelformat, const GstStructure *template)
2706{
2707 GstCaps *ret = gst_caps_new_empty();
2708 GstStructure *tmp;
2709 gint fd = v4l2object->video_fd;
2710 struct v4l2_frmsizeenum size;
2711 GList *results = NULL;
2712 guint32 w, h;
2713
2714 if (pixelformat == GST_MAKE_FOURCC('M', 'P', 'E', 'G'))
2715 {
2716 gst_caps_append_structure(ret, gst_structure_copy(template));
2717 return ret;
2718 }
2719
2720 memset(&size, 0, sizeof(struct v4l2_frmsizeenum));
2721 size.index = 0;
2722 size.pixel_format = pixelformat;
2723
2724 GST_DEBUG_OBJECT(v4l2object->dbg_obj,
2725 "Enumerating frame sizes for %" GST_FOURCC_FORMAT,
2726 GST_FOURCC_ARGS(pixelformat));
2727
2728 if (v4l2object->ioctl(fd, VIDIOC_ENUM_FRAMESIZES, &size) < 0)
2729 goto enum_framesizes_failed;
2730
2731 if (size.type == V4L2_FRMSIZE_TYPE_DISCRETE)
2732 {
2733 do
2734 {
2735 GST_LOG_OBJECT(v4l2object->dbg_obj, "got discrete frame size %dx%d",
2736 size.discrete.width, size.discrete.height);
2737
2738 w = MIN(size.discrete.width, G_MAXINT);
2739 h = MIN(size.discrete.height, G_MAXINT);
2740
2741 if (w && h)
2742 {
2743 tmp =
2744 gst_aml_v4l2_object_probe_caps_for_format_and_size(v4l2object,
2745 pixelformat, w, h, template);
2746
2747 if (tmp)
2748 results = g_list_prepend(results, tmp);
2749 }
2750
2751 size.index++;
2752 } while (v4l2object->ioctl(fd, VIDIOC_ENUM_FRAMESIZES, &size) >= 0);
2753 GST_DEBUG_OBJECT(v4l2object->dbg_obj,
2754 "done iterating discrete frame sizes");
2755 }
2756 else if (size.type == V4L2_FRMSIZE_TYPE_STEPWISE)
2757 {
2758 guint32 maxw, maxh, step_w, step_h;
2759
2760 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "we have stepwise frame sizes:");
2761 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "min width: %d",
2762 size.stepwise.min_width);
2763 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "min height: %d",
2764 size.stepwise.min_height);
2765 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "max width: %d",
2766 size.stepwise.max_width);
2767 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "min height: %d",
2768 size.stepwise.max_height);
2769 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "step width: %d",
2770 size.stepwise.step_width);
2771 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "step height: %d",
2772 size.stepwise.step_height);
2773
2774 w = MAX(size.stepwise.min_width, 1);
2775 h = MAX(size.stepwise.min_height, 1);
2776 maxw = MIN(size.stepwise.max_width, G_MAXINT);
2777 maxh = MIN(size.stepwise.max_height, G_MAXINT);
2778
hanghang.luo9edfc7d2023-05-17 07:01:05 +00002779 /* in this position,updating resolution only to pass the negotiation
2780 * actually, the details about resolution refer to function:
2781 * gst_aml_v4l2_object_set_format_full for checking.
2782 */
2783 GST_DEBUG_OBJECT (v4l2object->dbg_obj, "update maxw_maxh to MAX(maxw,maxh)_MAX(maxw,maxh)");
2784 maxh = MAX (maxw, maxh);
2785 maxw = maxh;
2786
xuesong.jiangae1548e2022-05-06 16:38:46 +08002787 step_w = MAX(size.stepwise.step_width, 1);
2788 step_h = MAX(size.stepwise.step_height, 1);
2789
2790 /* FIXME: check for sanity and that min/max are multiples of the steps */
2791
2792 /* we only query details for the max width/height since it's likely the
2793 * most restricted if there are any resolution-dependent restrictions */
2794 tmp = gst_aml_v4l2_object_probe_caps_for_format_and_size(v4l2object,
2795 pixelformat, maxw, maxh, template);
2796
2797 if (tmp)
2798 {
2799 GValue step_range = G_VALUE_INIT;
2800
2801 g_value_init(&step_range, GST_TYPE_INT_RANGE);
2802 gst_value_set_int_range_step(&step_range, w, maxw, step_w);
2803 gst_structure_set_value(tmp, "width", &step_range);
2804
2805 gst_value_set_int_range_step(&step_range, h, maxh, step_h);
2806 gst_structure_take_value(tmp, "height", &step_range);
2807
2808 /* no point using the results list here, since there's only one struct */
2809 gst_aml_v4l2_object_update_and_append(v4l2object, pixelformat, ret, tmp);
2810 }
2811 }
2812 else if (size.type == V4L2_FRMSIZE_TYPE_CONTINUOUS)
2813 {
2814 guint32 maxw, maxh;
2815
2816 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "we have continuous frame sizes:");
2817 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "min width: %d",
2818 size.stepwise.min_width);
2819 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "min height: %d",
2820 size.stepwise.min_height);
2821 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "max width: %d",
2822 size.stepwise.max_width);
2823 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "min height: %d",
2824 size.stepwise.max_height);
2825
2826 w = MAX(size.stepwise.min_width, 1);
2827 h = MAX(size.stepwise.min_height, 1);
2828 maxw = MIN(size.stepwise.max_width, G_MAXINT);
2829 maxh = MIN(size.stepwise.max_height, G_MAXINT);
2830
2831 tmp =
2832 gst_aml_v4l2_object_probe_caps_for_format_and_size(v4l2object, pixelformat,
2833 w, h, template);
2834 if (tmp)
2835 {
2836 gst_structure_set(tmp, "width", GST_TYPE_INT_RANGE, (gint)w,
2837 (gint)maxw, "height", GST_TYPE_INT_RANGE, (gint)h, (gint)maxh,
2838 NULL);
2839
2840 /* no point using the results list here, since there's only one struct */
2841 gst_aml_v4l2_object_update_and_append(v4l2object, pixelformat, ret, tmp);
2842 }
2843 }
2844 else
2845 {
2846 goto unknown_type;
2847 }
2848
2849 /* we use an intermediary list to store and then sort the results of the
2850 * probing because we can't make any assumptions about the order in which
2851 * the driver will give us the sizes, but we want the final caps to contain
2852 * the results starting with the highest resolution and having the lowest
2853 * resolution last, since order in caps matters for things like fixation. */
2854 results = g_list_sort(results, (GCompareFunc)sort_by_frame_size);
2855 while (results != NULL)
2856 {
2857 gst_aml_v4l2_object_update_and_append(v4l2object, pixelformat, ret,
2858 results->data);
2859 results = g_list_delete_link(results, results);
2860 }
2861
2862 if (gst_caps_is_empty(ret))
2863 goto enum_framesizes_no_results;
2864
2865 return ret;
2866
2867 /* ERRORS */
2868enum_framesizes_failed:
2869{
2870 /* I don't see how this is actually an error */
2871 GST_DEBUG_OBJECT(v4l2object->dbg_obj,
2872 "Failed to enumerate frame sizes for pixelformat %" GST_FOURCC_FORMAT
2873 " (%s)",
2874 GST_FOURCC_ARGS(pixelformat), g_strerror(errno));
2875 goto default_frame_sizes;
2876}
2877enum_framesizes_no_results:
2878{
2879 /* it's possible that VIDIOC_ENUM_FRAMESIZES is defined but the driver in
2880 * question doesn't actually support it yet */
2881 GST_DEBUG_OBJECT(v4l2object->dbg_obj,
2882 "No results for pixelformat %" GST_FOURCC_FORMAT
2883 " enumerating frame sizes, trying fallback",
2884 GST_FOURCC_ARGS(pixelformat));
2885 goto default_frame_sizes;
2886}
2887unknown_type:
2888{
2889 GST_WARNING_OBJECT(v4l2object->dbg_obj,
2890 "Unknown frame sizeenum type for pixelformat %" GST_FOURCC_FORMAT
2891 ": %u",
2892 GST_FOURCC_ARGS(pixelformat), size.type);
2893 goto default_frame_sizes;
2894}
2895
2896default_frame_sizes:
2897{
2898 gint min_w, max_w, min_h, max_h, fix_num = 0, fix_denom = 0;
2899
2900 /* This code is for Linux < 2.6.19 */
2901 min_w = min_h = 1;
2902 max_w = max_h = GST_AML_V4L2_MAX_SIZE;
2903 if (!gst_aml_v4l2_object_get_nearest_size(v4l2object, pixelformat, &min_w,
2904 &min_h))
2905 {
2906 GST_WARNING_OBJECT(v4l2object->dbg_obj,
2907 "Could not probe minimum capture size for pixelformat %" GST_FOURCC_FORMAT, GST_FOURCC_ARGS(pixelformat));
2908 }
2909 if (!gst_aml_v4l2_object_get_nearest_size(v4l2object, pixelformat, &max_w,
2910 &max_h))
2911 {
2912 GST_WARNING_OBJECT(v4l2object->dbg_obj,
2913 "Could not probe maximum capture size for pixelformat %" GST_FOURCC_FORMAT, GST_FOURCC_ARGS(pixelformat));
2914 }
2915
2916 tmp = gst_structure_copy(template);
hanghang.luo3128f102022-08-18 10:36:19 +08002917#ifdef DELETE_FOR_LGE
xuesong.jiangae1548e2022-05-06 16:38:46 +08002918 if (fix_num)
2919 {
2920 gst_structure_set(tmp, "framerate", GST_TYPE_FRACTION, fix_num,
2921 fix_denom, NULL);
2922 }
hanghang.luo3128f102022-08-18 10:36:19 +08002923 else
2924#endif
xuesong.jiangc5dac0f2023-02-01 14:42:24 +08002925 if (v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE ||
2926 v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE)
xuesong.jiangae1548e2022-05-06 16:38:46 +08002927 {
2928 /* if norm can't be used, copy the template framerate */
2929 gst_structure_set(tmp, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1,
2930 G_MAXINT, 1, NULL);
2931 }
2932
2933 if (min_w == max_w)
2934 gst_structure_set(tmp, "width", G_TYPE_INT, max_w, NULL);
2935 else
2936 gst_structure_set(tmp, "width", GST_TYPE_INT_RANGE, min_w, max_w, NULL);
2937
2938 if (min_h == max_h)
2939 gst_structure_set(tmp, "height", G_TYPE_INT, max_h, NULL);
2940 else
2941 gst_structure_set(tmp, "height", GST_TYPE_INT_RANGE, min_h, max_h, NULL);
2942
2943 gst_aml_v4l2_object_add_aspect_ratio(v4l2object, tmp);
2944
2945 if (!v4l2object->skip_try_fmt_probes)
2946 {
2947 /* We could consider setting interlace mode from min and max. */
2948 gst_aml_v4l2_object_add_interlace_mode(v4l2object, tmp, max_w, max_h,
2949 pixelformat);
2950 /* We could consider to check colorspace for min too, in case it depends on
2951 * the size. But in this case, min and max could not be enough */
2952 gst_aml_v4l2_object_add_colorspace(v4l2object, tmp, max_w, max_h,
2953 pixelformat);
2954 }
2955
2956 gst_aml_v4l2_object_update_and_append(v4l2object, pixelformat, ret, tmp);
2957 return ret;
2958}
2959}
2960
2961static gboolean
2962gst_aml_v4l2_object_get_nearest_size(GstAmlV4l2Object *v4l2object,
2963 guint32 pixelformat, gint *width, gint *height)
2964{
2965 struct v4l2_format fmt;
2966 gboolean ret = FALSE;
2967 GstVideoInterlaceMode interlace_mode;
2968
2969 g_return_val_if_fail(width != NULL, FALSE);
2970 g_return_val_if_fail(height != NULL, FALSE);
2971
2972 GST_LOG_OBJECT(v4l2object->dbg_obj,
2973 "getting nearest size to %dx%d with format %" GST_FOURCC_FORMAT,
2974 *width, *height, GST_FOURCC_ARGS(pixelformat));
2975
2976 memset(&fmt, 0, sizeof(struct v4l2_format));
2977
2978 /* get size delimiters */
2979 memset(&fmt, 0, sizeof(fmt));
2980 fmt.type = v4l2object->type;
2981 fmt.fmt.pix.width = *width;
2982 fmt.fmt.pix.height = *height;
2983 fmt.fmt.pix.pixelformat = pixelformat;
2984 fmt.fmt.pix.field = V4L2_FIELD_ANY;
2985
2986 if (gst_aml_v4l2_object_try_fmt(v4l2object, &fmt) < 0)
2987 goto error;
2988
2989 GST_LOG_OBJECT(v4l2object->dbg_obj,
2990 "got nearest size %dx%d", fmt.fmt.pix.width, fmt.fmt.pix.height);
2991
2992 *width = fmt.fmt.pix.width;
2993 *height = fmt.fmt.pix.height;
2994
2995 if (!gst_aml_v4l2_object_get_interlace_mode(fmt.fmt.pix.field, &interlace_mode))
2996 {
2997 GST_WARNING_OBJECT(v4l2object->dbg_obj,
2998 "Unsupported field type for %" GST_FOURCC_FORMAT "@%ux%u: %u",
2999 GST_FOURCC_ARGS(pixelformat), *width, *height, fmt.fmt.pix.field);
3000 goto error;
3001 }
3002
3003 ret = TRUE;
3004
3005error:
3006 if (!ret)
3007 {
3008 GST_WARNING_OBJECT(v4l2object->dbg_obj,
3009 "Unable to try format: %s", g_strerror(errno));
3010 }
3011
3012 return ret;
3013}
3014
3015static gboolean
3016gst_aml_v4l2_object_is_dmabuf_supported(GstAmlV4l2Object *v4l2object)
3017{
3018 gboolean ret = TRUE;
3019 struct v4l2_exportbuffer expbuf = {
3020 .type = v4l2object->type,
3021 .index = -1,
3022 .plane = -1,
3023 .flags = O_CLOEXEC | O_RDWR,
3024 };
3025
3026 if (v4l2object->fmtdesc->flags & V4L2_FMT_FLAG_EMULATED)
3027 {
3028 GST_WARNING_OBJECT(v4l2object->dbg_obj,
3029 "libv4l2 converter detected, disabling DMABuf");
3030 ret = FALSE;
3031 }
3032
3033 /* Expected to fail, but ENOTTY tells us that it is not implemented. */
3034 v4l2object->ioctl(v4l2object->video_fd, VIDIOC_EXPBUF, &expbuf);
3035 if (errno == ENOTTY)
3036 ret = FALSE;
3037
3038 return ret;
3039}
3040
3041static gboolean
3042gst_aml_v4l2_object_setup_pool(GstAmlV4l2Object *v4l2object, GstCaps *caps)
3043{
3044 GstAmlV4l2IOMode mode;
3045
3046 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "initializing the %s system",
3047 V4L2_TYPE_IS_OUTPUT(v4l2object->type) ? "output" : "capture");
3048
3049 GST_AML_V4L2_CHECK_OPEN(v4l2object);
3050 GST_AML_V4L2_CHECK_NOT_ACTIVE(v4l2object);
3051
3052 /* find transport */
3053 mode = v4l2object->req_mode;
3054
3055 if (v4l2object->device_caps & V4L2_CAP_READWRITE)
3056 {
3057 if (v4l2object->req_mode == GST_V4L2_IO_AUTO)
3058 mode = GST_V4L2_IO_RW;
3059 }
3060 else if (v4l2object->req_mode == GST_V4L2_IO_RW)
3061 goto method_not_supported;
3062
3063 if (v4l2object->device_caps & V4L2_CAP_STREAMING)
3064 {
3065 if (v4l2object->req_mode == GST_V4L2_IO_AUTO)
3066 {
3067 if (!V4L2_TYPE_IS_OUTPUT(v4l2object->type) &&
3068 gst_aml_v4l2_object_is_dmabuf_supported(v4l2object))
3069 {
3070 mode = GST_V4L2_IO_DMABUF;
3071 }
3072 else
3073 {
3074 mode = GST_V4L2_IO_MMAP;
3075 }
3076 }
3077 }
3078 else if (v4l2object->req_mode == GST_V4L2_IO_MMAP ||
3079 v4l2object->req_mode == GST_V4L2_IO_DMABUF)
3080 goto method_not_supported;
3081
3082 /* if still no transport selected, error out */
3083 if (mode == GST_V4L2_IO_AUTO)
3084 goto no_supported_capture_method;
3085
3086 GST_INFO_OBJECT(v4l2object->dbg_obj, "accessing buffers via mode %d", mode);
3087 v4l2object->mode = mode;
3088
3089 /* If min_buffers is not set, the driver either does not support the control or
3090 it has not been asked yet via propose_allocation/decide_allocation. */
3091 if (!v4l2object->min_buffers)
3092 gst_aml_v4l2_get_driver_min_buffers(v4l2object);
3093
3094 /* Map the buffers */
3095 GST_LOG_OBJECT(v4l2object->dbg_obj, "initiating buffer pool");
3096
3097 if (!(v4l2object->pool = gst_aml_v4l2_buffer_pool_new(v4l2object, caps)))
3098 goto buffer_pool_new_failed;
3099
3100 GST_AML_V4L2_SET_ACTIVE(v4l2object);
3101
3102 return TRUE;
3103
3104 /* ERRORS */
3105buffer_pool_new_failed:
3106{
3107 GST_ELEMENT_ERROR(v4l2object->element, RESOURCE, READ,
3108 (_("Could not map buffers from device '%s'"),
3109 v4l2object->videodev),
3110 ("Failed to create buffer pool: %s", g_strerror(errno)));
3111 return FALSE;
3112}
3113method_not_supported:
3114{
3115 GST_ELEMENT_ERROR(v4l2object->element, RESOURCE, READ,
3116 (_("The driver of device '%s' does not support the IO method %d"),
3117 v4l2object->videodev, mode),
3118 (NULL));
3119 return FALSE;
3120}
3121no_supported_capture_method:
3122{
3123 GST_ELEMENT_ERROR(v4l2object->element, RESOURCE, READ,
3124 (_("The driver of device '%s' does not support any known IO "
3125 "method."),
3126 v4l2object->videodev),
3127 (NULL));
3128 return FALSE;
3129}
3130}
3131
3132static void
3133gst_aml_v4l2_object_set_stride(GstVideoInfo *info, GstVideoAlignment *align,
3134 gint plane, gint stride)
3135{
3136 const GstVideoFormatInfo *finfo = info->finfo;
3137
3138 if (GST_VIDEO_FORMAT_INFO_IS_TILED(finfo))
3139 {
3140 gint x_tiles, y_tiles, ws, hs, tile_height, padded_height;
3141
3142 ws = GST_VIDEO_FORMAT_INFO_TILE_WS(finfo);
3143 hs = GST_VIDEO_FORMAT_INFO_TILE_HS(finfo);
3144 tile_height = 1 << hs;
3145
3146 padded_height = GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT(finfo, plane,
3147 info->height + align->padding_top + align->padding_bottom);
3148 padded_height = GST_ROUND_UP_N(padded_height, tile_height);
3149
3150 x_tiles = stride >> ws;
3151 y_tiles = padded_height >> hs;
3152 info->stride[plane] = GST_VIDEO_TILE_MAKE_STRIDE(x_tiles, y_tiles);
3153 }
3154 else
3155 {
3156 info->stride[plane] = stride;
3157 }
3158}
3159
3160static void
3161gst_aml_v4l2_object_extrapolate_info(GstAmlV4l2Object *v4l2object,
3162 GstVideoInfo *info, GstVideoAlignment *align, gint stride)
3163{
3164 const GstVideoFormatInfo *finfo = info->finfo;
3165 gint i, estride, padded_height;
3166 gsize offs = 0;
3167
3168 g_return_if_fail(v4l2object->n_v4l2_planes == 1);
3169
3170 padded_height = info->height + align->padding_top + align->padding_bottom;
3171
3172 for (i = 0; i < finfo->n_planes; i++)
3173 {
3174 estride = gst_aml_v4l2_object_extrapolate_stride(finfo, i, stride);
3175
3176 gst_aml_v4l2_object_set_stride(info, align, i, estride);
3177
3178 info->offset[i] = offs;
3179 offs += estride *
3180 GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT(finfo, i, padded_height);
3181
3182 GST_DEBUG_OBJECT(v4l2object->dbg_obj,
3183 "Extrapolated for plane %d with base stride %d: "
3184 "stride %d, offset %" G_GSIZE_FORMAT,
3185 i, stride, info->stride[i],
3186 info->offset[i]);
3187 }
3188
3189 /* Update the image size according the amount of data we are going to
3190 * read/write. This workaround bugs in driver where the sizeimage provided
3191 * by TRY/S_FMT represent the buffer length (maximum size) rather then the expected
3192 * bytesused (buffer size). */
3193 if (offs < info->size)
3194 info->size = offs;
3195}
3196
3197static void
3198gst_aml_v4l2_object_save_format(GstAmlV4l2Object *v4l2object,
3199 struct v4l2_fmtdesc *fmtdesc, struct v4l2_format *format,
3200 GstVideoInfo *info, GstVideoAlignment *align)
3201{
3202 const GstVideoFormatInfo *finfo = info->finfo;
3203 gboolean standard_stride = TRUE;
3204 gint stride, pstride, padded_width, padded_height, i;
3205
3206 if (GST_VIDEO_INFO_FORMAT(info) == GST_VIDEO_FORMAT_ENCODED)
3207 {
3208 v4l2object->n_v4l2_planes = 1;
3209 info->size = format->fmt.pix.sizeimage;
3210 goto store_info;
3211 }
3212
3213 /* adjust right padding */
3214 if (V4L2_TYPE_IS_MULTIPLANAR(v4l2object->type))
3215 stride = format->fmt.pix_mp.plane_fmt[0].bytesperline;
3216 else
3217 stride = format->fmt.pix.bytesperline;
3218
3219 pstride = GST_VIDEO_FORMAT_INFO_PSTRIDE(finfo, 0);
3220 if (pstride)
3221 {
3222 padded_width = stride / pstride;
3223 }
3224 else
3225 {
3226 /* pstride can be 0 for complex formats */
3227 GST_WARNING_OBJECT(v4l2object->element,
3228 "format %s has a pstride of 0, cannot compute padded with",
3229 gst_video_format_to_string(GST_VIDEO_INFO_FORMAT(info)));
3230 padded_width = stride;
3231 }
3232
3233 if (padded_width < format->fmt.pix.width)
3234 GST_WARNING_OBJECT(v4l2object->dbg_obj,
3235 "Driver bug detected, stride (%d) is too small for the width (%d)",
3236 padded_width, format->fmt.pix.width);
3237
3238 align->padding_right = padded_width - info->width - align->padding_left;
3239
3240 /* adjust bottom padding */
3241 padded_height = format->fmt.pix.height;
3242
3243 if (GST_VIDEO_FORMAT_INFO_IS_TILED(finfo))
3244 {
3245 guint hs, tile_height;
3246
3247 hs = GST_VIDEO_FORMAT_INFO_TILE_HS(finfo);
3248 tile_height = 1 << hs;
3249
3250 padded_height = GST_ROUND_UP_N(padded_height, tile_height);
3251 }
3252
3253 align->padding_bottom = padded_height - info->height - align->padding_top;
3254
3255 /* setup the strides and offset */
3256 if (V4L2_TYPE_IS_MULTIPLANAR(v4l2object->type))
3257 {
3258 struct v4l2_pix_format_mplane *pix_mp = &format->fmt.pix_mp;
3259
3260 /* figure out the frame layout */
3261 v4l2object->n_v4l2_planes = MAX(1, pix_mp->num_planes);
3262 info->size = 0;
3263 for (i = 0; i < v4l2object->n_v4l2_planes; i++)
3264 {
3265 stride = pix_mp->plane_fmt[i].bytesperline;
3266
3267 if (info->stride[i] != stride)
3268 standard_stride = FALSE;
3269
3270 gst_aml_v4l2_object_set_stride(info, align, i, stride);
3271 info->offset[i] = info->size;
3272 info->size += pix_mp->plane_fmt[i].sizeimage;
3273 }
3274
3275 /* Extrapolate stride if planar format are being set in 1 v4l2 plane */
3276 if (v4l2object->n_v4l2_planes < finfo->n_planes)
3277 {
3278 stride = format->fmt.pix_mp.plane_fmt[0].bytesperline;
3279 gst_aml_v4l2_object_extrapolate_info(v4l2object, info, align, stride);
3280 }
3281 }
3282 else
3283 {
3284 /* only one plane in non-MPLANE mode */
3285 v4l2object->n_v4l2_planes = 1;
3286 info->size = format->fmt.pix.sizeimage;
3287 stride = format->fmt.pix.bytesperline;
3288
3289 if (info->stride[0] != stride)
3290 standard_stride = FALSE;
3291
3292 gst_aml_v4l2_object_extrapolate_info(v4l2object, info, align, stride);
3293 }
3294
3295 /* adjust the offset to take into account left and top */
3296 if (GST_VIDEO_FORMAT_INFO_IS_TILED(finfo))
3297 {
3298 if ((align->padding_left + align->padding_top) > 0)
3299 GST_WARNING_OBJECT(v4l2object->dbg_obj,
3300 "Left and top padding is not permitted for tiled formats");
3301 }
3302 else
3303 {
3304 for (i = 0; i < finfo->n_planes; i++)
3305 {
3306 gint vedge, hedge;
3307
3308 /* FIXME we assume plane as component as this is true for all supported
3309 * format we support. */
3310
3311 hedge = GST_VIDEO_FORMAT_INFO_SCALE_WIDTH(finfo, i, align->padding_left);
3312 vedge = GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT(finfo, i, align->padding_top);
3313
3314 info->offset[i] += (vedge * info->stride[i]) +
3315 (hedge * GST_VIDEO_INFO_COMP_PSTRIDE(info, i));
3316 }
3317 }
3318
3319store_info:
3320 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "Got sizeimage %" G_GSIZE_FORMAT,
3321 info->size);
3322
3323 /* to avoid copies we need video meta if there is padding */
3324 v4l2object->need_video_meta =
3325 ((align->padding_top + align->padding_left + align->padding_right +
3326 align->padding_bottom) != 0);
3327
3328 /* ... or if stride is non "standard" */
3329 if (!standard_stride)
3330 v4l2object->need_video_meta = TRUE;
3331
3332 /* ... or also video meta if we use multiple, non-contiguous, planes */
3333 if (v4l2object->n_v4l2_planes > 1)
3334 v4l2object->need_video_meta = TRUE;
3335
3336 v4l2object->info = *info;
3337 v4l2object->align = *align;
3338 v4l2object->format = *format;
3339 v4l2object->fmtdesc = fmtdesc;
3340
3341 /* if we have a framerate pre-calculate duration */
3342 if (info->fps_n > 0 && info->fps_d > 0)
3343 {
3344 v4l2object->duration = gst_util_uint64_scale_int(GST_SECOND, info->fps_d,
3345 info->fps_n);
3346 }
3347 else
3348 {
3349 v4l2object->duration = GST_CLOCK_TIME_NONE;
3350 }
3351}
3352
3353gint gst_aml_v4l2_object_extrapolate_stride(const GstVideoFormatInfo *finfo,
3354 gint plane, gint stride)
3355{
3356 gint estride;
3357
3358 switch (finfo->format)
3359 {
3360 case GST_VIDEO_FORMAT_NV12:
3361 case GST_VIDEO_FORMAT_NV12_64Z32:
3362 case GST_VIDEO_FORMAT_NV21:
3363 case GST_VIDEO_FORMAT_NV16:
3364 case GST_VIDEO_FORMAT_NV61:
3365 case GST_VIDEO_FORMAT_NV24:
3366 estride = (plane == 0 ? 1 : 2) *
3367 GST_VIDEO_FORMAT_INFO_SCALE_WIDTH(finfo, plane, stride);
3368 break;
3369 default:
3370 estride = GST_VIDEO_FORMAT_INFO_SCALE_WIDTH(finfo, plane, stride);
3371 break;
3372 }
3373
3374 return estride;
3375}
3376
3377static gboolean
3378gst_aml_v4l2_video_colorimetry_matches(const GstVideoColorimetry *cinfo,
3379 const gchar *color)
3380{
3381 GstVideoColorimetry ci;
3382 static const GstVideoColorimetry ci_likely_jpeg = {
3383 GST_VIDEO_COLOR_RANGE_0_255, GST_VIDEO_COLOR_MATRIX_BT601,
3384 GST_VIDEO_TRANSFER_UNKNOWN, GST_VIDEO_COLOR_PRIMARIES_UNKNOWN};
3385 static const GstVideoColorimetry ci_jpeg = {
3386 GST_VIDEO_COLOR_RANGE_0_255, GST_VIDEO_COLOR_MATRIX_BT601,
3387 GST_VIDEO_TRANSFER_SRGB, GST_VIDEO_COLOR_PRIMARIES_BT709};
3388
3389 if (!gst_video_colorimetry_from_string(&ci, color))
3390 return FALSE;
3391
3392 if (gst_video_colorimetry_is_equal(&ci, cinfo))
3393 return TRUE;
3394
3395 /* Allow 1:4:0:0 (produced by jpegdec) if the device expects 1:4:7:1 */
3396 if (gst_video_colorimetry_is_equal(&ci, &ci_likely_jpeg) && gst_video_colorimetry_is_equal(cinfo, &ci_jpeg))
3397 return TRUE;
3398
3399 return FALSE;
3400}
3401
3402static void
fei.deng7c3d67f2022-11-09 11:06:05 +08003403set_amlogic_vdec_parm(GstAmlV4l2Object *v4l2object, struct v4l2_streamparm *streamparm, GstCaps *caps, guint32 pixFormat)
xuesong.jiangae1548e2022-05-06 16:38:46 +08003404{
3405 struct aml_dec_params *decParm = (struct aml_dec_params *)streamparm->parm.raw_data;
3406 const char *env;
zengliang.lic9f869d2023-02-15 08:32:32 +00003407 struct v4l2_ext_control control;
3408 struct v4l2_ext_controls ctrls;
3409 gboolean use_ext_config = FALSE;
3410 int major = 0,minor = 0;
3411 struct utsname info;
xuesong.jiangae1548e2022-05-06 16:38:46 +08003412
xuesong.jiang22a9b112023-05-24 09:01:59 +00003413 decParm->cfg.data[0] = 0;
3414 decParm->cfg.data[1] = 0;
3415 decParm->cfg.data[2] = 0;
3416 decParm->cfg.data[3] = 0;
3417 decParm->cfg.data[4] = 0;
3418
xuesong.jiangae1548e2022-05-06 16:38:46 +08003419 decParm->cfg.metadata_config_flag = 1 << 13;
3420
3421 if (v4l2object->type == V4L2_BUF_TYPE_VIDEO_OUTPUT || v4l2object->type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE)
3422 {
fei.dengccc89632022-07-15 19:10:17 +08003423 /*set bit12 value to 1,
3424 *v4l2 output 0 pts of second interlace field frame */
fei.denga6ae3282022-07-15 19:50:30 +08003425 //decParm->cfg.metadata_config_flag |= (1 << 12);
fei.deng7c3d67f2022-11-09 11:06:05 +08003426 decParm->parms_status = V4L2_CONFIG_PARM_DECODE_CFGINFO;
3427 switch (pixFormat)
3428 {
3429 default:
3430 case V4L2_PIX_FMT_MPEG:
3431 case V4L2_PIX_FMT_H264:
3432 decParm->cfg.double_write_mode= VDEC_DW_NO_AFBC;
3433 break;
3434 case V4L2_PIX_FMT_HEVC:
3435 case V4L2_PIX_FMT_VP9:
3436 case V4L2_PIX_FMT_AV1:
zengliang.lib1725ae2023-03-08 03:20:21 +00003437 decParm->cfg.double_write_mode= VDEC_DW_AFBC_AUTO_1_4;
fei.deng7c3d67f2022-11-09 11:06:05 +08003438 break;
3439 }
xuesong.jiangae1548e2022-05-06 16:38:46 +08003440 env = getenv("V4L2_SET_AMLOGIC_DW_MODE");
3441 if (env)
3442 {
3443 int dwMode = atoi(env);
3444 switch (dwMode)
3445 {
3446 case 0:
3447 case 1:
3448 case 2:
3449 case 3:
3450 case 4:
3451 case 16:
zengliang.lib1725ae2023-03-08 03:20:21 +00003452 case 256:
3453 case 512:
xuesong.jiangae1548e2022-05-06 16:38:46 +08003454 decParm->cfg.double_write_mode = dwMode;
3455 decParm->parms_status |= V4L2_CONFIG_PARM_DECODE_CFGINFO;
3456 break;
3457 }
3458 }
xuesong.jiangc5dac0f2023-02-01 14:42:24 +08003459 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "cfg dw mode to %d", decParm->cfg.double_write_mode);
3460
3461 // decParm->cfg.double_write_mode = 0x03;
3462 decParm->parms_status |= V4L2_CONFIG_PARM_DECODE_CFGINFO;
3463
3464 decParm->cfg.ref_buf_margin = GST_AML_V4L2_DEFAULT_CAP_BUF_MARGIN;
xuesong.jiangae1548e2022-05-06 16:38:46 +08003465
zengliang.lic9f869d2023-02-15 08:32:32 +00003466 if (uname(&info) || sscanf(info.release, "%d.%d", &major, &minor) <= 0)
xuesong.jiangae1548e2022-05-06 16:38:46 +08003467 {
zengliang.lic9f869d2023-02-15 08:32:32 +00003468 GST_DEBUG("get linux version failed");
xuesong.jiangae1548e2022-05-06 16:38:46 +08003469 }
zengliang.lic9f869d2023-02-15 08:32:32 +00003470 GST_DEBUG("linux major version %d %d", major,minor);
3471
3472 use_ext_config = ((major == 5 && minor >= 15) || major >= 6) ? TRUE: FALSE;
3473
3474 if (use_ext_config)
3475 {
3476 memset(&ctrls, 0, sizeof(ctrls));
3477 memset(&control, 0, sizeof(control));
3478 control.id = AML_V4L2_DEC_PARMS_CONFIG;
3479 control.ptr = decParm;
3480 control.size = sizeof(struct aml_dec_params);
3481 ctrls.count = 1;
3482 ctrls.controls = &control;
3483 if (v4l2object->ioctl( v4l2object->video_fd, VIDIOC_S_EXT_CTRLS, &ctrls ) <0)
3484 {
3485 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "set vdec parm fail");
3486 }
3487 else
3488 {
3489 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "set dwMode to %d, margin to %d", decParm->cfg.double_write_mode, decParm->cfg.ref_buf_margin);
3490 }
3491 }
xuesong.jiangae1548e2022-05-06 16:38:46 +08003492 else
3493 {
zengliang.lic9f869d2023-02-15 08:32:32 +00003494 if (v4l2object->ioctl(v4l2object->video_fd, VIDIOC_S_PARM, streamparm) < 0)
3495 {
3496 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "set vdec parm fail");
3497 }
3498 else
3499 {
3500 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "Set dwMode to %d, margin to %d", decParm->cfg.double_write_mode, decParm->cfg.ref_buf_margin);
3501 }
xuesong.jiangae1548e2022-05-06 16:38:46 +08003502 }
xuesong.jiange1a19662022-06-21 20:30:22 +08003503
3504 GstStructure *structure= gst_caps_get_structure(caps, 0);
3505 if (structure == NULL)
3506 {
3507 return;
3508 }
3509 if ( gst_structure_has_field(structure, "colorimetry") )
3510 {
3511 const char *colorimetry= gst_structure_get_string(structure,"colorimetry");
3512 GstVideoColorimetry vci = {0};
3513 if ( colorimetry && gst_video_colorimetry_from_string( &vci, colorimetry ))
3514 {
3515 decParm->parms_status |= V4L2_CONFIG_PARM_DECODE_HDRINFO;
3516 decParm->hdr.signal_type= (1<<29); /* present flag */
3517 /*set default value, this is to keep up with driver hdr info synchronization*/
3518 decParm->hdr.signal_type |= (5<<26) | (1<<24);
3519
3520 gint hdrColorimetry[4] = {0};
3521 hdrColorimetry[0]= (int)vci.range;
3522 hdrColorimetry[1]= (int)vci.matrix;
3523 hdrColorimetry[2]= (int)vci.transfer;
3524 hdrColorimetry[3]= (int)vci.primaries;
3525 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "colorimetry: [%d,%d,%d,%d]",
3526 hdrColorimetry[0],
3527 hdrColorimetry[1],
3528 hdrColorimetry[2],
3529 hdrColorimetry[3] );
3530 /* range */
3531 switch ( hdrColorimetry[0] )
3532 {
3533 case 1:
3534 case 2:
3535 decParm->hdr.signal_type |= ((hdrColorimetry[0] % 2)<<25);
3536 break;
3537 default:
3538 break;
3539 }
3540 /* matrix coefficient */
3541 switch ( hdrColorimetry[1] )
3542 {
3543 case 1: /* RGB */
3544 decParm->hdr.signal_type |= 0;
3545 break;
3546 case 2: /* FCC */
3547 decParm->hdr.signal_type |= 4;
3548 break;
3549 case 3: /* BT709 */
3550 decParm->hdr.signal_type |= 1;
3551 break;
3552 case 4: /* BT601 */
3553 decParm->hdr.signal_type |= 3;
3554 break;
3555 case 5: /* SMPTE240M */
3556 decParm->hdr.signal_type |= 7;
3557 break;
3558 case 6: /* BT2020 */
3559 decParm->hdr.signal_type |= 9;
3560 break;
3561 default: /* unknown */
3562 decParm->hdr.signal_type |= 2;
3563 break;
3564 }
3565 /* transfer function */
3566 switch ( hdrColorimetry[2] )
3567 {
3568 case 5: /* BT709 */
3569 decParm->hdr.signal_type |= (1<<8);
3570 break;
3571 case 6: /* SMPTE240M */
3572 decParm->hdr.signal_type |= (7<<8);
3573 break;
3574 case 9: /* LOG100 */
3575 decParm->hdr.signal_type |= (9<<8);
3576 break;
3577 case 10: /* LOG316 */
3578 decParm->hdr.signal_type |= (10<<8);
3579 break;
3580 case 12: /* BT2020_12 */
3581 decParm->hdr.signal_type |= (15<<8);
3582 break;
3583 case 11: /* BT2020_10 */
3584 decParm->hdr.signal_type |= (14<<8);
3585 break;
3586 case 13: /* SMPTE2084 */
3587 decParm->hdr.signal_type |= (16<<8);
3588 break;
3589 case 14: /* ARIB_STD_B67 */
3590 decParm->hdr.signal_type |= (18<<8);
3591 break;
3592 #if ((GST_VERSION_MAJOR == 1) && (GST_VERSION_MINOR >= 18))
3593 case 16: /* BT601 */
3594 decParm->hdr.signal_type |= (3<<8);
3595 break;
3596 #endif
3597 case 1: /* GAMMA10 */
3598 case 2: /* GAMMA18 */
3599 case 3: /* GAMMA20 */
3600 case 4: /* GAMMA22 */
3601 case 7: /* SRGB */
3602 case 8: /* GAMMA28 */
3603 case 15: /* ADOBERGB */
3604 default:
3605 break;
3606 }
3607 /* primaries */
3608 switch ( hdrColorimetry[3] )
3609 {
3610 case 1: /* BT709 */
3611 decParm->hdr.signal_type |= ((1<<24)|(1<<16));
3612 break;
3613 case 2: /* BT470M */
3614 decParm->hdr.signal_type |= ((1<<24)|(4<<16));
3615 break;
3616 case 3: /* BT470BG */
3617 decParm->hdr.signal_type |= ((1<<24)|(5<<16));
3618 break;
3619 case 4: /* SMPTE170M */
3620 decParm->hdr.signal_type |= ((1<<24)|(6<<16));
3621 break;
3622 case 5: /* SMPTE240M */
3623 decParm->hdr.signal_type |= ((1<<24)|(7<<16));
3624 break;
3625 case 6: /* FILM */
3626 decParm->hdr.signal_type |= ((1<<24)|(8<<16));
3627 break;
3628 case 7: /* BT2020 */
3629 decParm->hdr.signal_type |= ((1<<24)|(9<<16));
3630 break;
3631 case 8: /* ADOBERGB */
3632 default:
3633 break;
3634 }
3635 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "HDR signal_type %X", decParm->hdr.signal_type);
3636 }
3637
3638 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "got caps %" GST_PTR_FORMAT, caps);
3639 GstStructure *st = gst_caps_get_structure(caps, 0);
3640 GstCapsFeatures *features = gst_caps_get_features(caps, 0);
3641
3642 if (gst_structure_has_field(st, "colorimetry"))
3643 {
3644 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "have colorimetry");
3645 }
3646
3647 if (st && features)
3648 {
3649 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "trace in remove colorimetry");
3650 gst_structure_remove_field(st, "colorimetry");
3651 gst_caps_features_remove(features, "colorimetry");
3652 }
3653 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "caps after remove colorimetry %" GST_PTR_FORMAT, caps);
3654 }
3655
3656 if ( gst_structure_has_field(structure, "mastering-display-metadata") )
3657 {
3658 const char *masteringDisplay= gst_structure_get_string(structure,"mastering-display-metadata");
3659 float hdrMasteringDisplay[10];
3660 if ( masteringDisplay && sscanf( masteringDisplay, "%f:%f:%f:%f:%f:%f:%f:%f:%f:%f",
3661 &hdrMasteringDisplay[0],
3662 &hdrMasteringDisplay[1],
3663 &hdrMasteringDisplay[2],
3664 &hdrMasteringDisplay[3],
3665 &hdrMasteringDisplay[4],
3666 &hdrMasteringDisplay[5],
3667 &hdrMasteringDisplay[6],
3668 &hdrMasteringDisplay[7],
3669 &hdrMasteringDisplay[8],
3670 &hdrMasteringDisplay[9] ) == 10 )
3671 {
3672 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "mastering display [%f,%f,%f,%f,%f,%f,%f,%f,%f,%f]",
3673 hdrMasteringDisplay[0],
3674 hdrMasteringDisplay[1],
3675 hdrMasteringDisplay[2],
3676 hdrMasteringDisplay[3],
3677 hdrMasteringDisplay[4],
3678 hdrMasteringDisplay[5],
3679 hdrMasteringDisplay[6],
3680 hdrMasteringDisplay[7],
3681 hdrMasteringDisplay[8],
3682 hdrMasteringDisplay[9] );
3683
3684 decParm->hdr.color_parms.present_flag= 1;
3685 decParm->hdr.color_parms.primaries[2][0]= (uint32_t)(hdrMasteringDisplay[0]*50000); /* R.x */
3686 decParm->hdr.color_parms.primaries[2][1]= (uint32_t)(hdrMasteringDisplay[1]*50000); /* R.y */
3687 decParm->hdr.color_parms.primaries[0][0]= (uint32_t)(hdrMasteringDisplay[2]*50000); /* G.x */
3688 decParm->hdr.color_parms.primaries[0][1]= (uint32_t)(hdrMasteringDisplay[3]*50000); /* G.y */
3689 decParm->hdr.color_parms.primaries[1][0]= (uint32_t)(hdrMasteringDisplay[4]*50000); /* B.x */
3690 decParm->hdr.color_parms.primaries[1][1]= (uint32_t)(hdrMasteringDisplay[5]*50000); /* B.y */
3691 decParm->hdr.color_parms.white_point[0]= (uint32_t)(hdrMasteringDisplay[6]*50000);
3692 decParm->hdr.color_parms.white_point[1]= (uint32_t)(hdrMasteringDisplay[7]*50000);
3693 decParm->hdr.color_parms.luminance[0]= (uint32_t)(hdrMasteringDisplay[8]);
3694 decParm->hdr.color_parms.luminance[1]= (uint32_t)(hdrMasteringDisplay[9]);
3695 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "HDR mastering: primaries %X %X %X %X %X %X",
3696 decParm->hdr.color_parms.primaries[2][0],
3697 decParm->hdr.color_parms.primaries[2][1],
3698 decParm->hdr.color_parms.primaries[0][0],
3699 decParm->hdr.color_parms.primaries[0][1],
3700 decParm->hdr.color_parms.primaries[1][0],
3701 decParm->hdr.color_parms.primaries[1][1] );
3702 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "HDR mastering: white point: %X %X",
3703 decParm->hdr.color_parms.white_point[0],
3704 decParm->hdr.color_parms.white_point[1] );
3705 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "HDR mastering: luminance: %X %X",
3706 decParm->hdr.color_parms.luminance[0],
3707 decParm->hdr.color_parms.luminance[1] );
3708 }
3709
3710 GstStructure *st = gst_caps_get_structure(caps, 0);
3711 GstCapsFeatures * features = gst_caps_get_features(caps, 0);
3712 if (st && features)
3713 {
3714 gst_structure_remove_fields(st, "mastering-display-metadata", NULL);
3715 gst_caps_features_remove(features, "mastering-display-metadata");
3716 }
3717 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "caps after remove mastering-display-metadata %" GST_PTR_FORMAT, caps);
3718 }
xuesong.jiangae1548e2022-05-06 16:38:46 +08003719 }
3720}
3721
xuesong.jiangc5dac0f2023-02-01 14:42:24 +08003722static gint gst_aml_v4l2_object_get_dw_mode(GstAmlV4l2Object *v4l2object)
3723{
3724 struct v4l2_streamparm streamparm;
3725 struct aml_dec_params *decParm = (struct aml_dec_params *)(&streamparm.parm.raw_data);
3726 memset(&streamparm, 0x00, sizeof(struct v4l2_streamparm));
3727
3728 streamparm.type = V4L2_BUF_TYPE_VIDEO_OUTPUT;
3729 if (v4l2object->ioctl(v4l2object->video_fd, VIDIOC_G_PARM, &streamparm) >= 0)
3730 {
3731 GST_DEBUG_OBJECT(v4l2object, "get dw mode:%d in type V4L2_BUF_TYPE_VIDEO_OUTPUT", decParm->cfg.double_write_mode);
3732 return decParm->cfg.double_write_mode;
3733 }
3734
3735 streamparm.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
3736 if (v4l2object->ioctl(v4l2object->video_fd, VIDIOC_G_PARM, &streamparm) >= 0)
3737 {
3738 GST_DEBUG_OBJECT(v4l2object, "get dw mode:%d in type V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE", decParm->cfg.double_write_mode);
3739 return decParm->cfg.double_write_mode;
3740 }
3741
3742 GST_ERROR_OBJECT(v4l2object, "can't get dw mode in type V4L2_BUF_TYPE_VIDEO_OUTPUT or V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE ret -1");
3743 return -1;
3744}
3745
xuesong.jiangae1548e2022-05-06 16:38:46 +08003746static gboolean
3747gst_aml_v4l2_object_set_format_full(GstAmlV4l2Object *v4l2object, GstCaps *caps,
3748 gboolean try_only, GstAmlV4l2Error *error)
3749{
3750 gint fd = v4l2object->video_fd;
3751 struct v4l2_format format;
3752 struct v4l2_streamparm streamparm;
3753 enum v4l2_field field;
3754 guint32 pixelformat;
3755 struct v4l2_fmtdesc *fmtdesc;
3756 GstVideoInfo info;
3757 GstVideoAlignment align;
3758 gint width, height, fps_n, fps_d;
3759 gint n_v4l_planes;
3760 gint i = 0;
3761 gboolean is_mplane;
3762 enum v4l2_colorspace colorspace = 0;
3763 enum v4l2_quantization range = 0;
3764 enum v4l2_ycbcr_encoding matrix = 0;
3765 enum v4l2_xfer_func transfer = 0;
3766 GstStructure *s;
3767 gboolean disable_colorimetry = FALSE;
3768
3769 g_return_val_if_fail(!v4l2object->skip_try_fmt_probes ||
3770 gst_caps_is_writable(caps),
3771 FALSE);
3772
3773 GST_AML_V4L2_CHECK_OPEN(v4l2object);
3774 if (!try_only)
3775 GST_AML_V4L2_CHECK_NOT_ACTIVE(v4l2object);
3776
xuesong.jiangae1548e2022-05-06 16:38:46 +08003777 is_mplane = V4L2_TYPE_IS_MULTIPLANAR(v4l2object->type);
3778
3779 gst_video_info_init(&info);
3780 gst_video_alignment_reset(&align);
3781
3782 if (!gst_aml_v4l2_object_get_caps_info(v4l2object, caps, &fmtdesc, &info))
3783 goto invalid_caps;
3784
3785 pixelformat = fmtdesc->pixelformat;
3786 width = GST_VIDEO_INFO_WIDTH(&info);
3787 height = GST_VIDEO_INFO_HEIGHT(&info);
3788 fps_n = GST_VIDEO_INFO_FPS_N(&info);
3789 fps_d = GST_VIDEO_INFO_FPS_D(&info);
3790
hanghang.luo9edfc7d2023-05-17 07:01:05 +00003791 GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Check image size");
3792 struct v4l2_frmsizeenum size;
3793 memset (&size, 0, sizeof (struct v4l2_frmsizeenum));
3794 size.index = 0;
3795 size.pixel_format = pixelformat;
3796 if (v4l2object->ioctl (fd, VIDIOC_ENUM_FRAMESIZES, &size) < 0)
3797 return FALSE;
3798 if (size.type == V4L2_FRMSIZE_TYPE_STEPWISE)
3799 {
3800 guint32 maxw, maxh;
3801 maxw = MIN (size.stepwise.max_width, G_MAXINT);
3802 maxh = MIN (size.stepwise.max_height, G_MAXINT);
3803 GST_DEBUG_OBJECT (v4l2object->dbg_obj, "image from caps w_h:%d_%d", width, height);
3804 GST_DEBUG_OBJECT (v4l2object->dbg_obj, "v4l2 support max w_h:%d_%d", maxw, maxh);
3805 if (width*height > maxw*maxh)
3806 return FALSE;
3807 GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Check image size ok");
3808 }
3809
fei.deng7c3d67f2022-11-09 11:06:05 +08003810 //set amlogic params here,because we need pix format to set dw mode
3811 memset(&streamparm, 0x00, sizeof(struct v4l2_streamparm));
3812 streamparm.type = v4l2object->type;
3813 set_amlogic_vdec_parm(v4l2object, &streamparm, caps, pixelformat);
3814
xuesong.jiangae1548e2022-05-06 16:38:46 +08003815 /* if encoded format (GST_VIDEO_INFO_N_PLANES return 0)
3816 * or if contiguous is prefered */
3817 n_v4l_planes = GST_VIDEO_INFO_N_PLANES(&info);
3818 if (!n_v4l_planes || !v4l2object->prefered_non_contiguous)
3819 n_v4l_planes = 1;
3820
3821 if (GST_VIDEO_INFO_IS_INTERLACED(&info))
3822 {
3823 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "interlaced video");
3824 /* ideally we would differentiate between types of interlaced video
3825 * but there is not sufficient information in the caps..
3826 */
3827 field = V4L2_FIELD_INTERLACED;
3828 }
3829 else
3830 {
3831 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "progressive video");
3832 field = V4L2_FIELD_NONE;
3833 }
3834
3835 /* We first pick the main colorspace from the primaries */
3836 switch (info.colorimetry.primaries)
3837 {
3838 case GST_VIDEO_COLOR_PRIMARIES_BT709:
3839 /* There is two colorspaces using these primaries, use the range to
3840 * differentiate */
3841 if (info.colorimetry.range == GST_VIDEO_COLOR_RANGE_16_235)
3842 colorspace = V4L2_COLORSPACE_REC709;
3843 else
3844 colorspace = V4L2_COLORSPACE_SRGB;
3845 break;
3846 case GST_VIDEO_COLOR_PRIMARIES_BT2020:
3847 colorspace = V4L2_COLORSPACE_BT2020;
3848 break;
3849 case GST_VIDEO_COLOR_PRIMARIES_BT470M:
3850 colorspace = V4L2_COLORSPACE_470_SYSTEM_M;
3851 break;
3852 case GST_VIDEO_COLOR_PRIMARIES_BT470BG:
3853 colorspace = V4L2_COLORSPACE_470_SYSTEM_BG;
3854 break;
3855 case GST_VIDEO_COLOR_PRIMARIES_SMPTE170M:
3856 colorspace = V4L2_COLORSPACE_SMPTE170M;
3857 break;
3858 case GST_VIDEO_COLOR_PRIMARIES_SMPTE240M:
3859 colorspace = V4L2_COLORSPACE_SMPTE240M;
3860 break;
3861
3862 case GST_VIDEO_COLOR_PRIMARIES_FILM:
3863 case GST_VIDEO_COLOR_PRIMARIES_UNKNOWN:
3864 /* We don't know, we will guess */
3865 break;
3866
3867 default:
3868 GST_WARNING_OBJECT(v4l2object->dbg_obj,
3869 "Unknown colorimetry primaries %d", info.colorimetry.primaries);
3870 break;
3871 }
3872
3873 switch (info.colorimetry.range)
3874 {
3875 case GST_VIDEO_COLOR_RANGE_0_255:
3876 range = V4L2_QUANTIZATION_FULL_RANGE;
3877 break;
3878 case GST_VIDEO_COLOR_RANGE_16_235:
3879 range = V4L2_QUANTIZATION_LIM_RANGE;
3880 break;
3881 case GST_VIDEO_COLOR_RANGE_UNKNOWN:
3882 /* We let the driver pick a default one */
3883 break;
3884 default:
3885 GST_WARNING_OBJECT(v4l2object->dbg_obj,
3886 "Unknown colorimetry range %d", info.colorimetry.range);
3887 break;
3888 }
3889
3890 switch (info.colorimetry.matrix)
3891 {
3892 case GST_VIDEO_COLOR_MATRIX_RGB:
3893 /* Unspecified, leave to default */
3894 break;
3895 /* FCC is about the same as BT601 with less digit */
3896 case GST_VIDEO_COLOR_MATRIX_FCC:
3897 case GST_VIDEO_COLOR_MATRIX_BT601:
3898 matrix = V4L2_YCBCR_ENC_601;
3899 break;
3900 case GST_VIDEO_COLOR_MATRIX_BT709:
3901 matrix = V4L2_YCBCR_ENC_709;
3902 break;
3903 case GST_VIDEO_COLOR_MATRIX_SMPTE240M:
3904 matrix = V4L2_YCBCR_ENC_SMPTE240M;
3905 break;
3906 case GST_VIDEO_COLOR_MATRIX_BT2020:
3907 matrix = V4L2_YCBCR_ENC_BT2020;
3908 break;
3909 case GST_VIDEO_COLOR_MATRIX_UNKNOWN:
3910 /* We let the driver pick a default one */
3911 break;
3912 default:
3913 GST_WARNING_OBJECT(v4l2object->dbg_obj,
3914 "Unknown colorimetry matrix %d", info.colorimetry.matrix);
3915 break;
3916 }
3917
3918 switch (info.colorimetry.transfer)
3919 {
3920 case GST_VIDEO_TRANSFER_GAMMA18:
3921 case GST_VIDEO_TRANSFER_GAMMA20:
3922 case GST_VIDEO_TRANSFER_GAMMA22:
3923 case GST_VIDEO_TRANSFER_GAMMA28:
3924 GST_WARNING_OBJECT(v4l2object->dbg_obj,
3925 "GAMMA 18, 20, 22, 28 transfer functions not supported");
3926 /* fallthrough */
3927 case GST_VIDEO_TRANSFER_GAMMA10:
3928 transfer = V4L2_XFER_FUNC_NONE;
3929 break;
3930 case GST_VIDEO_TRANSFER_BT2020_12:
3931 case GST_VIDEO_TRANSFER_BT709:
3932 transfer = V4L2_XFER_FUNC_709;
3933 break;
3934 case GST_VIDEO_TRANSFER_SMPTE240M:
3935 transfer = V4L2_XFER_FUNC_SMPTE240M;
3936 break;
3937 case GST_VIDEO_TRANSFER_SRGB:
3938 transfer = V4L2_XFER_FUNC_SRGB;
3939 break;
3940 case GST_VIDEO_TRANSFER_LOG100:
3941 case GST_VIDEO_TRANSFER_LOG316:
3942 GST_WARNING_OBJECT(v4l2object->dbg_obj,
3943 "LOG 100, 316 transfer functions not supported");
3944 /* FIXME No known sensible default, maybe AdobeRGB ? */
3945 break;
3946 case GST_VIDEO_TRANSFER_UNKNOWN:
3947 /* We let the driver pick a default one */
3948 break;
3949 default:
3950 GST_WARNING_OBJECT(v4l2object->dbg_obj,
3951 "Unknown colorimetry tranfer %d", info.colorimetry.transfer);
3952 break;
3953 }
3954
3955 if (colorspace == 0)
3956 {
3957 /* Try to guess colorspace according to pixelformat and size */
3958 if (GST_VIDEO_INFO_IS_YUV(&info))
3959 {
3960 if (range == V4L2_QUANTIZATION_FULL_RANGE && matrix == V4L2_YCBCR_ENC_601 && transfer == 0)
3961 {
3962 /* Full range BT.601 YCbCr encoding with unknown primaries and transfer
3963 * function most likely is JPEG */
3964 colorspace = V4L2_COLORSPACE_JPEG;
3965 transfer = V4L2_XFER_FUNC_SRGB;
3966 }
3967 else
3968 {
3969 /* SD streams likely use SMPTE170M and HD streams REC709 */
3970 if (width <= 720 && height <= 576)
3971 colorspace = V4L2_COLORSPACE_SMPTE170M;
3972 else
3973 colorspace = V4L2_COLORSPACE_REC709;
3974 }
3975 }
3976 else if (GST_VIDEO_INFO_IS_RGB(&info))
3977 {
3978 colorspace = V4L2_COLORSPACE_SRGB;
3979 transfer = V4L2_XFER_FUNC_NONE;
3980 }
3981 }
3982
3983 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "Desired format %dx%d, format "
3984 "%" GST_FOURCC_FORMAT " stride: %d",
3985 width, height,
3986 GST_FOURCC_ARGS(pixelformat), GST_VIDEO_INFO_PLANE_STRIDE(&info, 0));
3987
3988 memset(&format, 0x00, sizeof(struct v4l2_format));
3989 format.type = v4l2object->type;
3990
3991 if (is_mplane)
3992 {
3993 format.type = v4l2object->type;
3994 format.fmt.pix_mp.pixelformat = pixelformat;
3995 format.fmt.pix_mp.width = width;
3996 format.fmt.pix_mp.height = height;
3997 format.fmt.pix_mp.field = field;
3998 format.fmt.pix_mp.num_planes = n_v4l_planes;
3999
4000 /* try to ask our prefered stride but it's not a failure if not
4001 * accepted */
4002 for (i = 0; i < n_v4l_planes; i++)
4003 {
4004 gint stride = GST_VIDEO_INFO_PLANE_STRIDE(&info, i);
4005
4006 if (GST_VIDEO_FORMAT_INFO_IS_TILED(info.finfo))
4007 stride = GST_VIDEO_TILE_X_TILES(stride) << GST_VIDEO_FORMAT_INFO_TILE_WS(info.finfo);
4008
4009 format.fmt.pix_mp.plane_fmt[i].bytesperline = stride;
4010 }
4011
4012 if (GST_VIDEO_INFO_FORMAT(&info) == GST_VIDEO_FORMAT_ENCODED)
4013 {
4014 if (v4l2object->req_mode == GST_V4L2_IO_DMABUF_IMPORT)
4015 format.fmt.pix_mp.plane_fmt[0].sizeimage = 1;
4016 else
4017 format.fmt.pix_mp.plane_fmt[0].sizeimage = ENCODED_BUFFER_SIZE;
4018 }
4019 }
4020 else
4021 {
4022 gint stride = GST_VIDEO_INFO_PLANE_STRIDE(&info, 0);
4023
4024 format.type = v4l2object->type;
4025
4026 format.fmt.pix.width = width;
4027 format.fmt.pix.height = height;
4028 format.fmt.pix.pixelformat = pixelformat;
4029 format.fmt.pix.field = field;
4030
4031 if (GST_VIDEO_FORMAT_INFO_IS_TILED(info.finfo))
4032 stride = GST_VIDEO_TILE_X_TILES(stride) << GST_VIDEO_FORMAT_INFO_TILE_WS(info.finfo);
4033
4034 /* try to ask our prefered stride */
4035 format.fmt.pix.bytesperline = stride;
4036
4037 if (GST_VIDEO_INFO_FORMAT(&info) == GST_VIDEO_FORMAT_ENCODED)
4038 {
4039 if (v4l2object->req_mode == GST_V4L2_IO_DMABUF_IMPORT)
4040 format.fmt.pix_mp.plane_fmt[0].sizeimage = 1;
4041 else
4042 format.fmt.pix_mp.plane_fmt[0].sizeimage = ENCODED_BUFFER_SIZE;
4043 }
4044 }
4045
4046 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "Desired format is %dx%d, format "
4047 "%" GST_FOURCC_FORMAT ", nb planes %d",
4048 format.fmt.pix.width,
4049 format.fmt.pix_mp.height,
4050 GST_FOURCC_ARGS(format.fmt.pix.pixelformat),
4051 is_mplane ? format.fmt.pix_mp.num_planes : 1);
4052
4053#ifndef GST_DISABLE_GST_DEBUG
4054 if (is_mplane)
4055 {
4056 for (i = 0; i < format.fmt.pix_mp.num_planes; i++)
4057 GST_DEBUG_OBJECT(v4l2object->dbg_obj, " stride %d",
4058 format.fmt.pix_mp.plane_fmt[i].bytesperline);
4059 }
4060 else
4061 {
4062 GST_DEBUG_OBJECT(v4l2object->dbg_obj, " stride %d",
4063 format.fmt.pix.bytesperline);
4064 }
4065#endif
4066
4067 if (is_mplane)
4068 {
4069 format.fmt.pix_mp.colorspace = colorspace;
4070 format.fmt.pix_mp.quantization = range;
4071 format.fmt.pix_mp.ycbcr_enc = matrix;
4072 format.fmt.pix_mp.xfer_func = transfer;
4073 }
4074 else
4075 {
4076 format.fmt.pix.priv = V4L2_PIX_FMT_PRIV_MAGIC;
4077 format.fmt.pix.colorspace = colorspace;
4078 format.fmt.pix.quantization = range;
4079 format.fmt.pix.ycbcr_enc = matrix;
4080 format.fmt.pix.xfer_func = transfer;
4081 }
4082
4083 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "Desired colorspace is %d:%d:%d:%d",
4084 colorspace, range, matrix, transfer);
4085
4086 if (try_only)
4087 {
4088 if (v4l2object->ioctl(fd, VIDIOC_TRY_FMT, &format) < 0)
4089 goto try_fmt_failed;
4090 }
4091 else
4092 {
4093 if (v4l2object->ioctl(fd, VIDIOC_S_FMT, &format) < 0)
4094 goto set_fmt_failed;
4095 }
4096
4097 if (is_mplane)
4098 {
4099 colorspace = format.fmt.pix_mp.colorspace;
4100 range = format.fmt.pix_mp.quantization;
4101 matrix = format.fmt.pix_mp.ycbcr_enc;
4102 transfer = format.fmt.pix_mp.xfer_func;
4103 }
4104 else
4105 {
4106 colorspace = format.fmt.pix.colorspace;
4107 range = format.fmt.pix.quantization;
4108 matrix = format.fmt.pix.ycbcr_enc;
4109 transfer = format.fmt.pix.xfer_func;
4110 }
4111
4112 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "Got format of %dx%d, format "
4113 "%" GST_FOURCC_FORMAT ", nb planes %d, colorspace %d:%d:%d:%d",
4114 format.fmt.pix.width, format.fmt.pix_mp.height,
4115 GST_FOURCC_ARGS(format.fmt.pix.pixelformat),
4116 is_mplane ? format.fmt.pix_mp.num_planes : 1,
4117 colorspace, range, matrix, transfer);
4118
4119#ifndef GST_DISABLE_GST_DEBUG
4120 if (is_mplane)
4121 {
4122 for (i = 0; i < format.fmt.pix_mp.num_planes; i++)
4123 GST_DEBUG_OBJECT(v4l2object->dbg_obj, " stride %d, sizeimage %d",
4124 format.fmt.pix_mp.plane_fmt[i].bytesperline,
4125 format.fmt.pix_mp.plane_fmt[i].sizeimage);
4126 }
4127 else
4128 {
4129 GST_DEBUG_OBJECT(v4l2object->dbg_obj, " stride %d, sizeimage %d",
4130 format.fmt.pix.bytesperline, format.fmt.pix.sizeimage);
4131 }
4132#endif
4133
4134 if (format.fmt.pix.pixelformat != pixelformat)
4135 goto invalid_pixelformat;
4136
4137 /* Only negotiate size with raw data.
4138 * For some codecs the dimensions are *not* in the bitstream, IIRC VC1
4139 * in ASF mode for example, there is also not reason for a driver to
4140 * change the size. */
4141 if (info.finfo->format != GST_VIDEO_FORMAT_ENCODED)
4142 {
4143 /* We can crop larger images */
4144 if (format.fmt.pix.width < width || format.fmt.pix.height < height)
4145 goto invalid_dimensions;
4146
4147 /* Note, this will be adjusted if upstream has non-centered cropping. */
4148 align.padding_top = 0;
4149 align.padding_bottom = format.fmt.pix.height - height;
4150 align.padding_left = 0;
4151 align.padding_right = format.fmt.pix.width - width;
4152 }
4153
4154 if (is_mplane && format.fmt.pix_mp.num_planes != n_v4l_planes)
4155 goto invalid_planes;
4156
4157 /* used to check colorimetry and interlace mode fields presence */
4158 s = gst_caps_get_structure(caps, 0);
4159
4160 if (!gst_aml_v4l2_object_get_interlace_mode(format.fmt.pix.field,
4161 &info.interlace_mode))
4162 goto invalid_field;
4163 if (gst_structure_has_field(s, "interlace-mode"))
4164 {
4165 if (format.fmt.pix.field != field)
4166 goto invalid_field;
4167 }
4168
4169 if (gst_aml_v4l2_object_get_colorspace(&format, &info.colorimetry))
4170 {
4171 if (gst_structure_has_field(s, "colorimetry"))
4172 {
xuesong.jiange1a19662022-06-21 20:30:22 +08004173 if (!gst_aml_v4l2_video_colorimetry_matches(&info.colorimetry, gst_structure_get_string(s, "colorimetry")))
4174 {
4175 // goto invalid_colorimetry;
4176 }
xuesong.jiangae1548e2022-05-06 16:38:46 +08004177 }
4178 }
4179 else
4180 {
4181 /* The driver (or libv4l2) is miss-behaving, just ignore colorimetry from
4182 * the TRY_FMT */
4183 disable_colorimetry = TRUE;
4184 if (gst_structure_has_field(s, "colorimetry"))
4185 gst_structure_remove_field(s, "colorimetry");
4186 }
4187
4188 /* In case we have skipped the try_fmt probes, we'll need to set the
4189 * colorimetry and interlace-mode back into the caps. */
4190 if (v4l2object->skip_try_fmt_probes)
4191 {
4192 if (!disable_colorimetry && !gst_structure_has_field(s, "colorimetry"))
4193 {
4194 gchar *str = gst_video_colorimetry_to_string(&info.colorimetry);
4195 gst_structure_set(s, "colorimetry", G_TYPE_STRING, str, NULL);
4196 g_free(str);
4197 }
4198
4199 if (!gst_structure_has_field(s, "interlace-mode"))
4200 gst_structure_set(s, "interlace-mode", G_TYPE_STRING,
4201 gst_video_interlace_mode_to_string(info.interlace_mode), NULL);
4202 }
4203
4204 if (try_only) /* good enough for trying only */
4205 return TRUE;
4206
4207 if (GST_VIDEO_INFO_HAS_ALPHA(&info))
4208 {
4209 struct v4l2_control ctl = {
4210 0,
4211 };
4212 ctl.id = V4L2_CID_ALPHA_COMPONENT;
4213 ctl.value = 0xff;
4214
4215 if (v4l2object->ioctl(fd, VIDIOC_S_CTRL, &ctl) < 0)
4216 GST_WARNING_OBJECT(v4l2object->dbg_obj,
4217 "Failed to set alpha component value");
4218 }
4219
4220 /* Is there a reason we require the caller to always specify a framerate? */
4221 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "Desired framerate: %u/%u", fps_n,
4222 fps_d);
4223
4224 if (v4l2object->ioctl(fd, VIDIOC_G_PARM, &streamparm) < 0)
4225 goto get_parm_failed;
4226
4227 if (v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE || v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE)
4228 {
4229 GST_VIDEO_INFO_FPS_N(&info) =
4230 streamparm.parm.capture.timeperframe.denominator;
4231 GST_VIDEO_INFO_FPS_D(&info) =
4232 streamparm.parm.capture.timeperframe.numerator;
4233
4234 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "Got capture framerate: %u/%u",
4235 streamparm.parm.capture.timeperframe.denominator,
4236 streamparm.parm.capture.timeperframe.numerator);
4237
4238 /* We used to skip frame rate setup if the camera was already setup
4239 * with the requested frame rate. This breaks some cameras though,
4240 * causing them to not output data (several models of Thinkpad cameras
4241 * have this problem at least).
4242 * So, don't skip. */
4243 GST_LOG_OBJECT(v4l2object->dbg_obj, "Setting capture framerate to %u/%u",
4244 fps_n, fps_d);
4245 /* We want to change the frame rate, so check whether we can. Some cheap USB
4246 * cameras don't have the capability */
4247 if ((streamparm.parm.capture.capability & V4L2_CAP_TIMEPERFRAME) == 0)
4248 {
4249 GST_DEBUG_OBJECT(v4l2object->dbg_obj,
4250 "Not setting capture framerate (not supported)");
4251 goto done;
4252 }
4253
4254 /* Note: V4L2 wants the frame interval, we have the frame rate */
4255 streamparm.parm.capture.timeperframe.numerator = fps_d;
4256 streamparm.parm.capture.timeperframe.denominator = fps_n;
4257
zengliang.lic9f869d2023-02-15 08:32:32 +00004258 /* Amlogic sets parameters to the decoder and only supports delivery of private structures */
4259 //some cheap USB cam's won't accept any change */
4260 //if (v4l2object->ioctl(fd, VIDIOC_S_PARM, &streamparm) < 0)
4261 //goto set_parm_failed;
xuesong.jiangae1548e2022-05-06 16:38:46 +08004262
4263 if (streamparm.parm.capture.timeperframe.numerator > 0 &&
4264 streamparm.parm.capture.timeperframe.denominator > 0)
4265 {
4266 /* get new values */
4267 fps_d = streamparm.parm.capture.timeperframe.numerator;
4268 fps_n = streamparm.parm.capture.timeperframe.denominator;
4269
4270 GST_INFO_OBJECT(v4l2object->dbg_obj, "Set capture framerate to %u/%u",
4271 fps_n, fps_d);
4272 }
4273 else
4274 {
4275 /* fix v4l2 capture driver to provide framerate values */
4276 GST_WARNING_OBJECT(v4l2object->dbg_obj,
4277 "Reuse caps framerate %u/%u - fix v4l2 capture driver", fps_n, fps_d);
4278 }
4279
4280 GST_VIDEO_INFO_FPS_N(&info) = fps_n;
4281 GST_VIDEO_INFO_FPS_D(&info) = fps_d;
4282 }
4283 else if (v4l2object->type == V4L2_BUF_TYPE_VIDEO_OUTPUT || v4l2object->type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE)
4284 {
4285 GST_VIDEO_INFO_FPS_N(&info) =
4286 streamparm.parm.output.timeperframe.denominator;
4287 GST_VIDEO_INFO_FPS_D(&info) =
4288 streamparm.parm.output.timeperframe.numerator;
4289
4290 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "Got output framerate: %u/%u",
4291 streamparm.parm.output.timeperframe.denominator,
4292 streamparm.parm.output.timeperframe.numerator);
4293
4294 GST_LOG_OBJECT(v4l2object->dbg_obj, "Setting output framerate to %u/%u",
4295 fps_n, fps_d);
4296 if ((streamparm.parm.output.capability & V4L2_CAP_TIMEPERFRAME) == 0)
4297 {
4298 GST_DEBUG_OBJECT(v4l2object->dbg_obj,
4299 "Not setting output framerate (not supported)");
4300 goto done;
4301 }
4302
4303 /* Note: V4L2 wants the frame interval, we have the frame rate */
4304 streamparm.parm.output.timeperframe.numerator = fps_d;
4305 streamparm.parm.output.timeperframe.denominator = fps_n;
4306
zengliang.lic9f869d2023-02-15 08:32:32 +00004307 /*Amlogic sets parameters to the decoder and only supports delivery of private structures*/
4308 //if (v4l2object->ioctl(fd, VIDIOC_S_PARM, &streamparm) < 0)
4309 //goto set_parm_failed;
xuesong.jiangae1548e2022-05-06 16:38:46 +08004310
4311 if (streamparm.parm.output.timeperframe.numerator > 0 &&
4312 streamparm.parm.output.timeperframe.denominator > 0)
4313 {
4314 /* get new values */
4315 fps_d = streamparm.parm.output.timeperframe.numerator;
4316 fps_n = streamparm.parm.output.timeperframe.denominator;
4317
4318 GST_INFO_OBJECT(v4l2object->dbg_obj, "Set output framerate to %u/%u",
4319 fps_n, fps_d);
4320 }
4321 else
4322 {
4323 /* fix v4l2 output driver to provide framerate values */
4324 GST_WARNING_OBJECT(v4l2object->dbg_obj,
4325 "Reuse caps framerate %u/%u - fix v4l2 output driver", fps_n, fps_d);
4326 }
4327
4328 GST_VIDEO_INFO_FPS_N(&info) = fps_n;
4329 GST_VIDEO_INFO_FPS_D(&info) = fps_d;
4330 }
4331
4332done:
4333 /* add boolean return, so we can fail on drivers bugs */
4334 gst_aml_v4l2_object_save_format(v4l2object, fmtdesc, &format, &info, &align);
4335
4336 /* now configure the pool */
4337 if (!gst_aml_v4l2_object_setup_pool(v4l2object, caps))
4338 goto pool_failed;
4339
4340 return TRUE;
4341
4342 /* ERRORS */
4343invalid_caps:
4344{
4345 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "can't parse caps %" GST_PTR_FORMAT,
4346 caps);
4347
4348 GST_AML_V4L2_ERROR(error, RESOURCE, SETTINGS,
4349 (_("Invalid caps")), ("Can't parse caps %" GST_PTR_FORMAT, caps));
4350 return FALSE;
4351}
4352try_fmt_failed:
4353{
4354 if (errno == EINVAL)
4355 {
4356 GST_AML_V4L2_ERROR(error, RESOURCE, SETTINGS,
4357 (_("Device '%s' has no supported format"), v4l2object->videodev),
4358 ("Call to TRY_FMT failed for %" GST_FOURCC_FORMAT " @ %dx%d: %s",
4359 GST_FOURCC_ARGS(pixelformat), width, height,
4360 g_strerror(errno)));
4361 }
4362 else
4363 {
4364 GST_AML_V4L2_ERROR(error, RESOURCE, FAILED,
4365 (_("Device '%s' failed during initialization"),
4366 v4l2object->videodev),
4367 ("Call to TRY_FMT failed for %" GST_FOURCC_FORMAT " @ %dx%d: %s",
4368 GST_FOURCC_ARGS(pixelformat), width, height,
4369 g_strerror(errno)));
4370 }
4371 return FALSE;
4372}
4373set_fmt_failed:
4374{
4375 if (errno == EBUSY)
4376 {
4377 GST_AML_V4L2_ERROR(error, RESOURCE, BUSY,
4378 (_("Device '%s' is busy"), v4l2object->videodev),
4379 ("Call to S_FMT failed for %" GST_FOURCC_FORMAT " @ %dx%d: %s",
4380 GST_FOURCC_ARGS(pixelformat), width, height,
4381 g_strerror(errno)));
4382 }
4383 else if (errno == EINVAL)
4384 {
4385 GST_AML_V4L2_ERROR(error, RESOURCE, SETTINGS,
4386 (_("Device '%s' has no supported format"), v4l2object->videodev),
4387 ("Call to S_FMT failed for %" GST_FOURCC_FORMAT " @ %dx%d: %s",
4388 GST_FOURCC_ARGS(pixelformat), width, height,
4389 g_strerror(errno)));
4390 }
4391 else
4392 {
4393 GST_AML_V4L2_ERROR(error, RESOURCE, FAILED,
4394 (_("Device '%s' failed during initialization"),
4395 v4l2object->videodev),
4396 ("Call to S_FMT failed for %" GST_FOURCC_FORMAT " @ %dx%d: %s",
4397 GST_FOURCC_ARGS(pixelformat), width, height,
4398 g_strerror(errno)));
4399 }
4400 return FALSE;
4401}
4402invalid_dimensions:
4403{
4404 GST_AML_V4L2_ERROR(error, RESOURCE, SETTINGS,
4405 (_("Device '%s' cannot capture at %dx%d"),
4406 v4l2object->videodev, width, height),
4407 ("Tried to capture at %dx%d, but device returned size %dx%d",
4408 width, height, format.fmt.pix.width, format.fmt.pix.height));
4409 return FALSE;
4410}
4411invalid_pixelformat:
4412{
4413 GST_AML_V4L2_ERROR(error, RESOURCE, SETTINGS,
4414 (_("Device '%s' cannot capture in the specified format"),
4415 v4l2object->videodev),
4416 ("Tried to capture in %" GST_FOURCC_FORMAT
4417 ", but device returned format"
4418 " %" GST_FOURCC_FORMAT,
4419 GST_FOURCC_ARGS(pixelformat),
4420 GST_FOURCC_ARGS(format.fmt.pix.pixelformat)));
4421 return FALSE;
4422}
4423invalid_planes:
4424{
4425 GST_AML_V4L2_ERROR(error, RESOURCE, SETTINGS,
4426 (_("Device '%s' does support non-contiguous planes"),
4427 v4l2object->videodev),
4428 ("Device wants %d planes", format.fmt.pix_mp.num_planes));
4429 return FALSE;
4430}
4431invalid_field:
4432{
4433 enum v4l2_field wanted_field;
4434
4435 if (is_mplane)
4436 wanted_field = format.fmt.pix_mp.field;
4437 else
4438 wanted_field = format.fmt.pix.field;
4439
4440 GST_AML_V4L2_ERROR(error, RESOURCE, SETTINGS,
4441 (_("Device '%s' does not support %s interlacing"),
4442 v4l2object->videodev,
4443 field == V4L2_FIELD_NONE ? "progressive" : "interleaved"),
4444 ("Device wants %s interlacing",
4445 wanted_field == V4L2_FIELD_NONE ? "progressive" : "interleaved"));
4446 return FALSE;
4447}
hanghang.luo3128f102022-08-18 10:36:19 +08004448#ifdef DELETE_FOR_LGE
xuesong.jiangae1548e2022-05-06 16:38:46 +08004449invalid_colorimetry:
4450{
4451 gchar *wanted_colorimetry;
4452
4453 wanted_colorimetry = gst_video_colorimetry_to_string(&info.colorimetry);
4454
4455 GST_AML_V4L2_ERROR(error, RESOURCE, SETTINGS,
4456 (_("Device '%s' does not support %s colorimetry"),
4457 v4l2object->videodev, gst_structure_get_string(s, "colorimetry")),
4458 ("Device wants %s colorimetry", wanted_colorimetry));
4459
4460 g_free(wanted_colorimetry);
4461 return FALSE;
4462}
hanghang.luo3128f102022-08-18 10:36:19 +08004463#endif
xuesong.jiangae1548e2022-05-06 16:38:46 +08004464get_parm_failed:
4465{
4466 /* it's possible that this call is not supported */
4467 if (errno != EINVAL && errno != ENOTTY)
4468 {
4469 GST_AML_V4L2_ERROR(error, RESOURCE, SETTINGS,
4470 (_("Could not get parameters on device '%s'"),
4471 v4l2object->videodev),
4472 GST_ERROR_SYSTEM);
4473 }
4474 goto done;
4475}
4476set_parm_failed:
4477{
4478 GST_AML_V4L2_ERROR(error, RESOURCE, SETTINGS,
4479 (_("Video device did not accept new frame rate setting.")),
4480 GST_ERROR_SYSTEM);
4481 goto done;
4482}
4483pool_failed:
4484{
4485 /* setup_pool already send the error */
4486 return FALSE;
4487}
4488}
4489
4490gboolean
4491gst_aml_v4l2_object_set_format(GstAmlV4l2Object *v4l2object, GstCaps *caps,
4492 GstAmlV4l2Error *error)
4493{
4494 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "Setting format to %" GST_PTR_FORMAT,
4495 caps);
4496 return gst_aml_v4l2_object_set_format_full(v4l2object, caps, FALSE, error);
4497}
4498
4499gboolean
4500gst_aml_v4l2_object_try_format(GstAmlV4l2Object *v4l2object, GstCaps *caps,
4501 GstAmlV4l2Error *error)
4502{
4503 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "Trying format %" GST_PTR_FORMAT,
4504 caps);
4505 return gst_aml_v4l2_object_set_format_full(v4l2object, caps, TRUE, error);
4506}
4507
4508GstFlowReturn
4509gst_aml_v4l2_object_poll(GstAmlV4l2Object *v4l2object)
4510{
4511 gint ret;
4512
4513 if (!v4l2object->can_poll_device)
4514 goto done;
4515
4516 GST_LOG_OBJECT(v4l2object, "polling device");
4517
4518again:
4519 ret = gst_poll_wait(v4l2object->poll, GST_CLOCK_TIME_NONE);
4520 if (G_UNLIKELY(ret < 0))
4521 {
4522 switch (errno)
4523 {
4524 case EBUSY:
4525 goto stopped;
4526 case EAGAIN:
4527 case EINTR:
4528 goto again;
4529 case ENXIO:
4530 GST_WARNING_OBJECT(v4l2object,
4531 "v4l2 device doesn't support polling. Disabling"
4532 " using libv4l2 in this case may cause deadlocks");
4533 v4l2object->can_poll_device = FALSE;
4534 goto done;
4535 default:
4536 goto select_error;
4537 }
4538 }
4539
4540done:
4541 return GST_FLOW_OK;
4542
4543 /* ERRORS */
4544stopped:
4545{
4546 GST_DEBUG_OBJECT(v4l2object, "stop called");
4547 return GST_FLOW_FLUSHING;
4548}
4549select_error:
4550{
4551 GST_ELEMENT_ERROR(v4l2object->element, RESOURCE, READ, (NULL),
4552 ("poll error %d: %s (%d)", ret, g_strerror(errno), errno));
4553 return GST_FLOW_ERROR;
4554}
4555}
4556
4557GstFlowReturn
4558gst_aml_v4l2_object_dqevent(GstAmlV4l2Object *v4l2object)
4559{
4560 GstFlowReturn res;
4561 struct v4l2_event evt;
4562
4563 if ((res = gst_aml_v4l2_object_poll(v4l2object)) != GST_FLOW_OK)
4564 goto poll_failed;
4565
4566 memset(&evt, 0x00, sizeof(struct v4l2_event));
4567 if (v4l2object->ioctl(v4l2object->video_fd, VIDIOC_DQEVENT, &evt) < 0)
4568 goto dqevent_failed;
4569
4570 switch (evt.type)
4571 {
4572 case V4L2_EVENT_SOURCE_CHANGE:
4573 return GST_AML_V4L2_FLOW_SOURCE_CHANGE;
4574 break;
4575 case V4L2_EVENT_EOS:
4576 return GST_AML_V4L2_FLOW_LAST_BUFFER;
4577 break;
4578 default:
4579 break;
4580 }
4581
4582 return GST_FLOW_OK;
4583
4584 /* ERRORS */
4585poll_failed:
4586{
4587 GST_DEBUG_OBJECT(v4l2object, "poll error %s", gst_flow_get_name(res));
4588 return res;
4589}
4590dqevent_failed:
4591{
4592 return GST_FLOW_ERROR;
4593}
4594}
4595
4596/**
4597 * gst_aml_v4l2_object_acquire_format:
4598 * @v4l2object the object
4599 * @info a GstVideoInfo to be filled
4600 *
4601 * Acquire the driver choosen format. This is useful in decoder or encoder elements where
4602 * the output format is choosen by the HW.
4603 *
4604 * Returns: %TRUE on success, %FALSE on failure.
4605 */
4606gboolean
4607gst_aml_v4l2_object_acquire_format(GstAmlV4l2Object *v4l2object, GstVideoInfo *info)
4608{
4609 struct v4l2_fmtdesc *fmtdesc;
4610 struct v4l2_format fmt;
4611 struct v4l2_crop crop;
4612 struct v4l2_selection sel;
4613 struct v4l2_rect *r = NULL;
4614 GstVideoFormat format;
4615 guint width, height;
4616 GstVideoAlignment align;
xuesong.jiangc5dac0f2023-02-01 14:42:24 +08004617 gint dw_mode;
xuesong.jiangae1548e2022-05-06 16:38:46 +08004618
4619 gst_video_info_init(info);
4620 gst_video_alignment_reset(&align);
4621
4622 memset(&fmt, 0x00, sizeof(struct v4l2_format));
4623 fmt.type = v4l2object->type;
fei.denge9458472023-04-18 02:05:48 +00004624 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "fmt.type:%d", fmt.type);
xuesong.jiangae1548e2022-05-06 16:38:46 +08004625 if (v4l2object->ioctl(v4l2object->video_fd, VIDIOC_G_FMT, &fmt) < 0)
4626 goto get_fmt_failed;
4627
4628 fmtdesc = gst_aml_v4l2_object_get_format_from_fourcc(v4l2object,
4629 fmt.fmt.pix.pixelformat);
4630 if (fmtdesc == NULL)
4631 goto unsupported_format;
4632
4633 /* No need to care about mplane, the four first params are the same */
4634 format = gst_aml_v4l2_object_v4l2fourcc_to_video_format(fmt.fmt.pix.pixelformat);
4635
4636 /* fails if we do no translate the fmt.pix.pixelformat to GstVideoFormat */
4637 if (format == GST_VIDEO_FORMAT_UNKNOWN)
4638 goto unsupported_format;
4639
4640 if (fmt.fmt.pix.width == 0 || fmt.fmt.pix.height == 0)
4641 goto invalid_dimensions;
4642
4643 width = fmt.fmt.pix.width;
4644 height = fmt.fmt.pix.height;
xuesong.jiangae1548e2022-05-06 16:38:46 +08004645 /* Use the default compose rectangle */
4646 memset(&sel, 0, sizeof(struct v4l2_selection));
4647 sel.type = v4l2object->type;
4648 sel.target = V4L2_SEL_TGT_COMPOSE_DEFAULT;
4649 if (v4l2object->ioctl(v4l2object->video_fd, VIDIOC_G_SELECTION, &sel) >= 0)
4650 {
4651 r = &sel.r;
4652 }
4653 else
4654 {
4655 /* For ancient kernels, fall back to G_CROP */
4656 memset(&crop, 0, sizeof(struct v4l2_crop));
4657 crop.type = v4l2object->type;
4658 if (v4l2object->ioctl(v4l2object->video_fd, VIDIOC_G_CROP, &crop) >= 0)
4659 r = &crop.c;
4660 }
xuesong.jiangc5dac0f2023-02-01 14:42:24 +08004661
4662 dw_mode = gst_aml_v4l2_object_get_dw_mode(v4l2object);
4663 if (r && (dw_mode >= 0 && dw_mode != 16))
xuesong.jiangae1548e2022-05-06 16:38:46 +08004664 {
4665 align.padding_left = r->left;
4666 align.padding_top = r->top;
4667 align.padding_right = width - r->width - r->left;
4668 align.padding_bottom = height - r->height - r->top;
4669 width = r->width;
4670 height = r->height;
xuesong.jiang282ca572023-05-05 09:03:32 +00004671 width = (width/2) *2; /* align for dw*/
4672 height = (height/2) *2; /* align for dw*/
xuesong.jiangae1548e2022-05-06 16:38:46 +08004673 }
xuesong.jiangc5dac0f2023-02-01 14:42:24 +08004674 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "final w:%d, h:%d", width, height);
xuesong.jiangae1548e2022-05-06 16:38:46 +08004675
4676 gst_video_info_set_format(info, format, width, height);
4677
4678 switch (fmt.fmt.pix.field)
4679 {
4680 case V4L2_FIELD_ANY:
4681 case V4L2_FIELD_NONE:
4682 info->interlace_mode = GST_VIDEO_INTERLACE_MODE_PROGRESSIVE;
4683 break;
4684 case V4L2_FIELD_INTERLACED:
4685 case V4L2_FIELD_INTERLACED_TB:
4686 case V4L2_FIELD_INTERLACED_BT:
4687 info->interlace_mode = GST_VIDEO_INTERLACE_MODE_INTERLEAVED;
4688 break;
4689 default:
4690 goto unsupported_field;
4691 }
4692
4693 gst_aml_v4l2_object_get_colorspace(&fmt, &info->colorimetry);
4694
4695 gst_aml_v4l2_object_save_format(v4l2object, fmtdesc, &fmt, info, &align);
4696
4697 /* Shall we setup the pool ? */
4698
4699 return TRUE;
4700
4701get_fmt_failed:
4702{
4703 GST_ELEMENT_WARNING(v4l2object->element, RESOURCE, SETTINGS,
4704 (_("Video device did not provide output format.")), GST_ERROR_SYSTEM);
4705 return FALSE;
4706}
4707invalid_dimensions:
4708{
4709 GST_ELEMENT_WARNING(v4l2object->element, RESOURCE, SETTINGS,
4710 (_("Video device returned invalid dimensions.")),
4711 ("Expected non 0 dimensions, got %dx%d", fmt.fmt.pix.width,
4712 fmt.fmt.pix.height));
4713 return FALSE;
4714}
4715unsupported_field:
4716{
4717 GST_ELEMENT_ERROR(v4l2object->element, RESOURCE, SETTINGS,
4718 (_("Video device uses an unsupported interlacing method.")),
4719 ("V4L2 field type %d not supported", fmt.fmt.pix.field));
4720 return FALSE;
4721}
4722unsupported_format:
4723{
4724 GST_ELEMENT_ERROR(v4l2object->element, RESOURCE, SETTINGS,
4725 (_("Video device uses an unsupported pixel format.")),
4726 ("V4L2 format %" GST_FOURCC_FORMAT " not supported",
4727 GST_FOURCC_ARGS(fmt.fmt.pix.pixelformat)));
4728 return FALSE;
4729}
4730}
4731
4732gboolean
4733gst_aml_v4l2_object_set_crop(GstAmlV4l2Object *obj)
4734{
4735 struct v4l2_selection sel = {0};
4736 struct v4l2_crop crop = {0};
4737
4738 sel.type = obj->type;
4739 sel.target = V4L2_SEL_TGT_CROP;
4740 sel.flags = 0;
4741 sel.r.left = obj->align.padding_left;
4742 sel.r.top = obj->align.padding_top;
4743 sel.r.width = obj->info.width;
4744 sel.r.height = obj->info.height;
4745
4746 crop.type = obj->type;
4747 crop.c = sel.r;
4748
4749 if (obj->align.padding_left + obj->align.padding_top +
4750 obj->align.padding_right + obj->align.padding_bottom ==
4751 0)
4752 {
4753 GST_DEBUG_OBJECT(obj->dbg_obj, "no cropping needed");
4754 return TRUE;
4755 }
4756
4757 GST_DEBUG_OBJECT(obj->dbg_obj,
4758 "Desired cropping left %u, top %u, size %ux%u", crop.c.left, crop.c.top,
4759 crop.c.width, crop.c.height);
4760
4761 if (obj->ioctl(obj->video_fd, VIDIOC_S_SELECTION, &sel) < 0)
4762 {
4763 if (errno != ENOTTY)
4764 {
4765 GST_WARNING_OBJECT(obj->dbg_obj,
4766 "Failed to set crop rectangle with VIDIOC_S_SELECTION: %s",
4767 g_strerror(errno));
4768 return FALSE;
4769 }
4770 else
4771 {
4772 if (obj->ioctl(obj->video_fd, VIDIOC_S_CROP, &crop) < 0)
4773 {
4774 GST_WARNING_OBJECT(obj->dbg_obj, "VIDIOC_S_CROP failed");
4775 return FALSE;
4776 }
4777
4778 if (obj->ioctl(obj->video_fd, VIDIOC_G_CROP, &crop) < 0)
4779 {
4780 GST_WARNING_OBJECT(obj->dbg_obj, "VIDIOC_G_CROP failed");
4781 return FALSE;
4782 }
4783
4784 sel.r = crop.c;
4785 }
4786 }
4787
4788 GST_DEBUG_OBJECT(obj->dbg_obj,
4789 "Got cropping left %u, top %u, size %ux%u", crop.c.left, crop.c.top,
4790 crop.c.width, crop.c.height);
4791
4792 return TRUE;
4793}
4794
4795gboolean
4796gst_aml_v4l2_object_caps_equal(GstAmlV4l2Object *v4l2object, GstCaps *caps)
4797{
4798 GstStructure *config;
4799 GstCaps *oldcaps;
4800 gboolean ret;
4801
4802 if (!v4l2object->pool)
4803 return FALSE;
4804
4805 config = gst_buffer_pool_get_config(v4l2object->pool);
4806 gst_buffer_pool_config_get_params(config, &oldcaps, NULL, NULL, NULL);
4807
4808 ret = oldcaps && gst_caps_is_equal(caps, oldcaps);
4809
4810 gst_structure_free(config);
4811
4812 return ret;
4813}
4814
4815gboolean
4816gst_aml_v4l2_object_caps_is_subset(GstAmlV4l2Object *v4l2object, GstCaps *caps)
4817{
4818 GstStructure *config;
4819 GstCaps *oldcaps;
4820 gboolean ret;
4821
4822 if (!v4l2object->pool)
4823 return FALSE;
4824
4825 config = gst_buffer_pool_get_config(v4l2object->pool);
4826 gst_buffer_pool_config_get_params(config, &oldcaps, NULL, NULL, NULL);
4827
4828 ret = oldcaps && gst_caps_is_subset(oldcaps, caps);
4829
4830 gst_structure_free(config);
4831
4832 return ret;
4833}
4834
4835GstCaps *
4836gst_aml_v4l2_object_get_current_caps(GstAmlV4l2Object *v4l2object)
4837{
4838 GstStructure *config;
4839 GstCaps *oldcaps;
4840
4841 if (!v4l2object->pool)
4842 return NULL;
4843
4844 config = gst_buffer_pool_get_config(v4l2object->pool);
4845 gst_buffer_pool_config_get_params(config, &oldcaps, NULL, NULL, NULL);
4846
4847 if (oldcaps)
4848 gst_caps_ref(oldcaps);
4849
4850 gst_structure_free(config);
4851
4852 return oldcaps;
4853}
4854
4855gboolean
4856gst_aml_v4l2_object_unlock(GstAmlV4l2Object *v4l2object)
4857{
4858 gboolean ret = TRUE;
4859
4860 GST_LOG_OBJECT(v4l2object->dbg_obj, "start flushing");
4861
4862 gst_poll_set_flushing(v4l2object->poll, TRUE);
4863
4864 if (v4l2object->pool && gst_buffer_pool_is_active(v4l2object->pool))
4865 gst_buffer_pool_set_flushing(v4l2object->pool, TRUE);
4866
4867 return ret;
4868}
4869
4870gboolean
4871gst_aml_v4l2_object_unlock_stop(GstAmlV4l2Object *v4l2object)
4872{
4873 gboolean ret = TRUE;
4874
4875 GST_LOG_OBJECT(v4l2object->dbg_obj, "stop flushing");
4876
4877 if (v4l2object->pool && gst_buffer_pool_is_active(v4l2object->pool))
4878 gst_buffer_pool_set_flushing(v4l2object->pool, FALSE);
4879
4880 gst_poll_set_flushing(v4l2object->poll, FALSE);
4881
4882 return ret;
4883}
4884
4885gboolean
4886gst_aml_v4l2_object_stop(GstAmlV4l2Object *v4l2object)
4887{
xuesong.jiangc5dac0f2023-02-01 14:42:24 +08004888 GstAmlV4l2BufferPool *bpool = GST_AML_V4L2_BUFFER_POOL(v4l2object->pool);
4889
xuesong.jiangae1548e2022-05-06 16:38:46 +08004890 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "stopping");
4891
4892 if (!GST_AML_V4L2_IS_OPEN(v4l2object))
4893 goto done;
4894 if (!GST_AML_V4L2_IS_ACTIVE(v4l2object))
4895 goto done;
4896
xuesong.jiangc5dac0f2023-02-01 14:42:24 +08004897 if (bpool && bpool->other_pool) /* jxsdbg for resolution switch */
4898 {
4899 if (v4l2object->old_other_pool)
4900 {
4901 /* this case indicate 1st switch did not wait all old pool buf recycle and 2nd switch is coming.
4902 so save 1st old pool */
4903 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "switching occurs during last switching buf recycle flow");
4904 v4l2object->old_old_other_pool = v4l2object->old_other_pool;
4905 }
4906
4907 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "switching flow, ref old drmbufferpool");
4908 v4l2object->old_other_pool = bpool->other_pool;
4909 gst_object_ref(v4l2object->old_other_pool);
4910 }
4911
xuesong.jiangae1548e2022-05-06 16:38:46 +08004912 if (v4l2object->pool)
4913 {
4914 if (!gst_aml_v4l2_buffer_pool_orphan(&v4l2object->pool))
4915 {
4916 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "deactivating pool");
4917 gst_buffer_pool_set_active(v4l2object->pool, FALSE);
4918 gst_object_unref(v4l2object->pool);
4919 }
4920 v4l2object->pool = NULL;
4921 }
4922
4923 GST_AML_V4L2_SET_INACTIVE(v4l2object);
4924
4925done:
4926 return TRUE;
4927}
4928
4929GstCaps *
4930gst_aml_v4l2_object_probe_caps(GstAmlV4l2Object *v4l2object, GstCaps *filter)
4931{
4932 GstCaps *ret;
4933 GSList *walk;
4934 GSList *formats;
4935
xuesong.jiangc5dac0f2023-02-01 14:42:24 +08004936 GST_INFO_OBJECT(v4l2object->dbg_obj, "filter caps: %" GST_PTR_FORMAT, filter);
xuesong.jiangae1548e2022-05-06 16:38:46 +08004937 formats = gst_aml_v4l2_object_get_format_list(v4l2object);
4938
4939 ret = gst_caps_new_empty();
4940
4941 if (v4l2object->keep_aspect && !v4l2object->par)
4942 {
4943 struct v4l2_cropcap cropcap;
4944
4945 memset(&cropcap, 0, sizeof(cropcap));
4946
4947 cropcap.type = v4l2object->type;
4948 if (v4l2object->ioctl(v4l2object->video_fd, VIDIOC_CROPCAP, &cropcap) < 0)
4949 {
4950 if (errno != ENOTTY)
4951 GST_WARNING_OBJECT(v4l2object->dbg_obj,
4952 "Failed to probe pixel aspect ratio with VIDIOC_CROPCAP: %s",
4953 g_strerror(errno));
4954 }
4955 else if (cropcap.pixelaspect.numerator && cropcap.pixelaspect.denominator)
4956 {
4957 v4l2object->par = g_new0(GValue, 1);
4958 g_value_init(v4l2object->par, GST_TYPE_FRACTION);
4959 gst_value_set_fraction(v4l2object->par, cropcap.pixelaspect.numerator,
4960 cropcap.pixelaspect.denominator);
4961 }
4962 }
4963
4964 for (walk = formats; walk; walk = walk->next)
4965 {
4966 struct v4l2_fmtdesc *format;
4967 GstStructure *template;
4968 GstCaps *tmp, *tmp2;
4969
4970 format = (struct v4l2_fmtdesc *)walk->data;
4971
4972 template = gst_aml_v4l2_object_v4l2fourcc_to_bare_struct(format->pixelformat);
4973
4974 if (!template)
4975 {
4976 GST_DEBUG_OBJECT(v4l2object->dbg_obj,
4977 "unknown format %" GST_FOURCC_FORMAT,
4978 GST_FOURCC_ARGS(format->pixelformat));
4979 continue;
4980 }
4981
4982 /* If we have a filter, check if we need to probe this format or not */
4983 if (filter)
4984 {
4985 GstCaps *format_caps = gst_caps_new_empty();
4986
4987 gst_caps_append_structure(format_caps, gst_structure_copy(template));
xuesong.jiange1a19662022-06-21 20:30:22 +08004988 GST_INFO_OBJECT(v4l2object->dbg_obj, "format_caps: %" GST_PTR_FORMAT, format_caps);
xuesong.jiangae1548e2022-05-06 16:38:46 +08004989
4990 if (!gst_caps_can_intersect(format_caps, filter))
4991 {
4992 gst_caps_unref(format_caps);
4993 gst_structure_free(template);
4994 continue;
4995 }
4996
4997 gst_caps_unref(format_caps);
4998 }
4999
5000 tmp = gst_aml_v4l2_object_probe_caps_for_format(v4l2object,
5001 format->pixelformat, template);
xuesong.jiange1a19662022-06-21 20:30:22 +08005002 GST_INFO_OBJECT(v4l2object->dbg_obj, "tmp caps: %" GST_PTR_FORMAT, tmp);
xuesong.jiangae1548e2022-05-06 16:38:46 +08005003
5004 if (tmp)
5005 {
5006 tmp2 = gst_caps_copy(tmp);
5007 gst_caps_set_features_simple(tmp2, gst_caps_features_from_string(GST_CAPS_FEATURE_MEMORY_DMABUF));
5008 gst_caps_append(ret, tmp);
5009 gst_caps_append(ret, tmp2);
5010 }
5011
5012 gst_structure_free(template);
5013 }
5014
5015 if (filter)
5016 {
5017 GstCaps *tmp;
5018
5019 tmp = ret;
5020 ret = gst_caps_intersect_full(filter, ret, GST_CAPS_INTERSECT_FIRST);
5021 gst_caps_unref(tmp);
5022 }
5023
xuesong.jiang22a9b112023-05-24 09:01:59 +00005024 if (v4l2object->stream_mode)
5025 {
5026 GST_INFO_OBJECT(v4l2object->dbg_obj, "ret caps: %" GST_PTR_FORMAT, ret);
5027 for (guint i = 0; i < gst_caps_get_size(ret); i++)
5028 {
5029 GstStructure *s = gst_caps_get_structure(ret, i);
5030 if (s)
5031 gst_structure_remove_field(s, "alignment");
5032
5033 GST_DEBUG("i:%d, s:%p", i, s);
5034 }
5035 GST_INFO_OBJECT(v4l2object->dbg_obj, "new ret caps: %" GST_PTR_FORMAT, ret);
5036 }
5037
xuesong.jiangae1548e2022-05-06 16:38:46 +08005038 GST_INFO_OBJECT(v4l2object->dbg_obj, "probed caps: %" GST_PTR_FORMAT, ret);
5039
5040 return ret;
5041}
5042
5043GstCaps *
5044gst_aml_v4l2_object_get_caps(GstAmlV4l2Object *v4l2object, GstCaps *filter)
5045{
5046 GstCaps *ret;
5047
5048 if (v4l2object->probed_caps == NULL)
5049 v4l2object->probed_caps = gst_aml_v4l2_object_probe_caps(v4l2object, NULL);
5050
5051 if (filter)
5052 {
5053 ret = gst_caps_intersect_full(filter, v4l2object->probed_caps,
5054 GST_CAPS_INTERSECT_FIRST);
5055 }
5056 else
5057 {
5058 ret = gst_caps_ref(v4l2object->probed_caps);
5059 }
5060
5061 return ret;
5062}
5063
5064gboolean
5065gst_aml_v4l2_object_decide_allocation(GstAmlV4l2Object *obj, GstQuery *query)
5066{
5067 GstCaps *caps;
5068 GstBufferPool *pool = NULL, *other_pool = NULL;
5069 GstStructure *config;
5070 guint size, min, max, own_min = 0;
5071 gboolean update;
5072 gboolean has_video_meta;
5073 gboolean can_share_own_pool, pushing_from_our_pool = FALSE;
5074 GstAllocator *allocator = NULL;
5075 GstAllocationParams params = {0};
5076
5077 GST_DEBUG_OBJECT(obj->dbg_obj, "decide allocation");
5078
5079 g_return_val_if_fail(obj->type == V4L2_BUF_TYPE_VIDEO_CAPTURE ||
5080 obj->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE,
5081 FALSE);
5082
5083 gst_query_parse_allocation(query, &caps, NULL);
5084
5085 if (obj->pool == NULL)
5086 {
5087 if (!gst_aml_v4l2_object_setup_pool(obj, caps))
5088 goto pool_failed;
5089 }
5090
5091 if (gst_query_get_n_allocation_params(query) > 0)
5092 gst_query_parse_nth_allocation_param(query, 0, &allocator, &params);
5093
5094 if (gst_query_get_n_allocation_pools(query) > 0)
5095 {
5096 gst_query_parse_nth_allocation_pool(query, 0, &pool, &size, &min, &max);
5097 update = TRUE;
5098 }
5099 else
5100 {
5101 pool = NULL;
5102 min = max = 0;
5103 size = 0;
5104 update = FALSE;
5105 }
5106
5107 GST_DEBUG_OBJECT(obj->dbg_obj, "allocation: size:%u min:%u max:%u pool:%" GST_PTR_FORMAT, size, min, max, pool);
5108
5109 has_video_meta =
5110 gst_query_find_allocation_meta(query, GST_VIDEO_META_API_TYPE, NULL);
5111
5112 can_share_own_pool = (has_video_meta || !obj->need_video_meta);
5113
5114 gst_aml_v4l2_get_driver_min_buffers(obj);
5115 /* We can't share our own pool, if it exceed V4L2 capacity */
5116 if (min + obj->min_buffers + 1 > VIDEO_MAX_FRAME)
5117 can_share_own_pool = FALSE;
5118
5119 /* select a pool */
5120 switch (obj->mode)
5121 {
5122 case GST_V4L2_IO_RW:
5123 if (pool)
5124 {
5125 /* in READ/WRITE mode, prefer a downstream pool because our own pool
5126 * doesn't help much, we have to write to it as well */
5127 GST_DEBUG_OBJECT(obj->dbg_obj,
5128 "read/write mode: using downstream pool");
5129 /* use the bigest size, when we use our own pool we can't really do any
5130 * other size than what the hardware gives us but for downstream pools
5131 * we can try */
5132 size = MAX(size, obj->info.size);
5133 }
5134 else if (can_share_own_pool)
5135 {
5136 /* no downstream pool, use our own then */
5137 GST_DEBUG_OBJECT(obj->dbg_obj,
5138 "read/write mode: no downstream pool, using our own");
5139 pool = gst_object_ref(obj->pool);
5140 size = obj->info.size;
5141 pushing_from_our_pool = TRUE;
5142 }
5143 break;
5144
5145 case GST_V4L2_IO_USERPTR:
5146 case GST_V4L2_IO_DMABUF_IMPORT:
5147 /* in importing mode, prefer our own pool, and pass the other pool to
5148 * our own, so it can serve itself */
5149 if (pool == NULL)
5150 goto no_downstream_pool;
xuesong.jiangc5dac0f2023-02-01 14:42:24 +08005151 gst_aml_v4l2_buffer_pool_set_other_pool(GST_AML_V4L2_BUFFER_POOL(obj->pool), pool);
xuesong.jiangae1548e2022-05-06 16:38:46 +08005152 other_pool = pool;
5153 gst_object_unref(pool);
5154 pool = gst_object_ref(obj->pool);
5155 size = obj->info.size;
5156 break;
5157
5158 case GST_V4L2_IO_MMAP:
5159 case GST_V4L2_IO_DMABUF:
5160 /* in streaming mode, prefer our own pool */
5161 /* Check if we can use it ... */
5162 if (can_share_own_pool)
5163 {
5164 if (pool)
5165 gst_object_unref(pool);
5166 pool = gst_object_ref(obj->pool);
5167 size = obj->info.size;
5168 GST_DEBUG_OBJECT(obj->dbg_obj,
5169 "streaming mode: using our own pool %" GST_PTR_FORMAT, pool);
5170 pushing_from_our_pool = TRUE;
5171 }
5172 else if (pool)
5173 {
5174 GST_DEBUG_OBJECT(obj->dbg_obj,
5175 "streaming mode: copying to downstream pool %" GST_PTR_FORMAT,
5176 pool);
5177 }
5178 else
5179 {
5180 GST_DEBUG_OBJECT(obj->dbg_obj,
5181 "streaming mode: no usable pool, copying to generic pool");
5182 size = MAX(size, obj->info.size);
5183 }
5184 break;
5185 case GST_V4L2_IO_AUTO:
5186 default:
5187 GST_WARNING_OBJECT(obj->dbg_obj, "unhandled mode");
5188 break;
5189 }
5190
5191 if (size == 0)
5192 goto no_size;
5193
5194 /* If pushing from our own pool, configure it with queried minimum,
5195 * otherwise use the minimum required */
5196 if (pushing_from_our_pool)
5197 {
5198 /* When pushing from our own pool, we need what downstream one, to be able
5199 * to fill the pipeline, the minimum required to decoder according to the
5200 * driver and 2 more, so we don't endup up with everything downstream or
5201 * held by the decoder. We account 2 buffers for v4l2 so when one is being
5202 * pushed downstream the other one can already be queued for the next
5203 * frame. */
5204 own_min = min + obj->min_buffers + 2;
5205
5206 /* If no allocation parameters where provided, allow for a little more
5207 * buffers and enable copy threshold */
5208 if (!update)
5209 {
5210 own_min += 2;
5211 gst_aml_v4l2_buffer_pool_copy_at_threshold(GST_AML_V4L2_BUFFER_POOL(pool),
5212 TRUE);
5213 }
5214 else
5215 {
5216 gst_aml_v4l2_buffer_pool_copy_at_threshold(GST_AML_V4L2_BUFFER_POOL(pool),
5217 FALSE);
5218 }
5219 }
5220 else
5221 {
xuesong.jiangc5dac0f2023-02-01 14:42:24 +08005222 min = obj->min_buffers;
5223 max = min;
xuesong.jiangae1548e2022-05-06 16:38:46 +08005224 }
5225
5226 /* Request a bigger max, if one was suggested but it's too small */
5227 if (max != 0)
5228 max = MAX(min, max);
5229
5230 /* First step, configure our own pool */
5231 config = gst_buffer_pool_get_config(obj->pool);
5232
5233 if (obj->need_video_meta || has_video_meta)
5234 {
5235 GST_DEBUG_OBJECT(obj->dbg_obj, "activate Video Meta");
5236 gst_buffer_pool_config_add_option(config,
5237 GST_BUFFER_POOL_OPTION_VIDEO_META);
5238 }
5239
5240 gst_buffer_pool_config_set_allocator(config, allocator, &params);
xuesong.jiangc5dac0f2023-02-01 14:42:24 +08005241 gst_buffer_pool_config_set_params(config, caps, size, min, max);
xuesong.jiangae1548e2022-05-06 16:38:46 +08005242
5243 GST_DEBUG_OBJECT(obj->dbg_obj, "setting own pool config to %" GST_PTR_FORMAT, config);
5244
5245 /* Our pool often need to adjust the value */
5246 if (!gst_buffer_pool_set_config(obj->pool, config))
5247 {
5248 config = gst_buffer_pool_get_config(obj->pool);
5249
5250 GST_DEBUG_OBJECT(obj->dbg_obj, "own pool config changed to %" GST_PTR_FORMAT, config);
5251
5252 /* our pool will adjust the maximum buffer, which we are fine with */
5253 if (!gst_buffer_pool_set_config(obj->pool, config))
5254 goto config_failed;
5255 }
5256
5257 /* Now configure the other pool if different */
5258 if (obj->pool != pool)
5259 other_pool = pool;
5260
5261 if (other_pool)
5262 {
5263 GstAmlV4l2VideoDec *self = (GstAmlV4l2VideoDec *)obj->element;
xuesong.jiangc5dac0f2023-02-01 14:42:24 +08005264 guint other_min = min;
5265 guint other_max = max;
5266
5267 if (obj->old_other_pool || obj->old_old_other_pool) //jxsdbg for switching
5268 {
5269 obj->outstanding_buf_num = gst_aml_v4l2_object_get_outstanding_capture_buf_num(obj);
5270 other_min = min - obj->outstanding_buf_num;
5271 other_max = max - obj->outstanding_buf_num;
5272 GST_DEBUG_OBJECT(obj, "oop:%p, ooop:%p, outstanding buf num:%d, set min, max to %d,%d",
5273 obj->old_other_pool, obj->old_old_other_pool,
5274 obj->outstanding_buf_num, other_min, other_max);
5275 }
5276
xuesong.jiangae1548e2022-05-06 16:38:46 +08005277 if (self->is_secure_path)
5278 {
5279 params.flags |= GST_MEMORY_FLAG_LAST << 1; // in drmallocator GST_MEMORY_FLAG_LAST << 1 represent GST_MEMORY_FLAG_SECURE
5280 GST_DEBUG_OBJECT(obj, "set secure flag for drmbufferpool flag:0x%x", params.flags);
5281 }
5282 config = gst_buffer_pool_get_config(other_pool);
5283 gst_buffer_pool_config_set_allocator(config, allocator, &params);
xuesong.jiangc5dac0f2023-02-01 14:42:24 +08005284 gst_buffer_pool_config_set_params (config, caps, size, other_min, other_max);
xuesong.jiangae1548e2022-05-06 16:38:46 +08005285 gst_buffer_pool_config_set_video_alignment(config, &obj->align);
5286
5287 GST_DEBUG_OBJECT(obj->dbg_obj, "setting other pool config to %" GST_PTR_FORMAT, config);
5288
5289 /* if downstream supports video metadata, add this to the pool config */
5290 if (has_video_meta)
5291 {
5292 GST_DEBUG_OBJECT(obj->dbg_obj, "activate Video Meta");
5293 gst_buffer_pool_config_add_option(config,
5294 GST_BUFFER_POOL_OPTION_VIDEO_META);
5295 }
5296
5297 if (!gst_buffer_pool_set_config(other_pool, config))
5298 {
5299 config = gst_buffer_pool_get_config(other_pool);
5300
5301 if (!gst_buffer_pool_config_validate_params(config, caps, size, min,
5302 max))
5303 {
5304 gst_structure_free(config);
5305 goto config_failed;
5306 }
5307
5308 if (!gst_buffer_pool_set_config(other_pool, config))
5309 goto config_failed;
5310 }
5311 }
5312
5313 if (pool)
5314 {
5315 /* For simplicity, simply read back the active configuration, so our base
5316 * class get the right information */
5317 config = gst_buffer_pool_get_config(pool);
5318 gst_buffer_pool_config_get_params(config, NULL, &size, &min, &max);
5319 gst_structure_free(config);
5320 }
5321
5322 if (update)
5323 gst_query_set_nth_allocation_pool(query, 0, pool, size, min, max);
5324 else
5325 gst_query_add_allocation_pool(query, pool, size, min, max);
5326
5327 if (allocator)
5328 gst_object_unref(allocator);
5329
5330 if (pool)
5331 gst_object_unref(pool);
5332
5333 return TRUE;
5334
5335pool_failed:
5336{
5337 /* setup_pool already send the error */
5338 goto cleanup;
5339}
5340config_failed:
5341{
5342 GST_ELEMENT_ERROR(obj->element, RESOURCE, SETTINGS,
5343 (_("Failed to configure internal buffer pool.")), (NULL));
5344 goto cleanup;
5345}
5346no_size:
5347{
5348 GST_ELEMENT_ERROR(obj->element, RESOURCE, SETTINGS,
5349 (_("Video device did not suggest any buffer size.")), (NULL));
5350 goto cleanup;
5351}
5352cleanup:
5353{
5354 if (allocator)
5355 gst_object_unref(allocator);
5356
5357 if (pool)
5358 gst_object_unref(pool);
5359 return FALSE;
5360}
5361no_downstream_pool:
5362{
5363 GST_ELEMENT_ERROR(obj->element, RESOURCE, SETTINGS,
5364 (_("No downstream pool to import from.")),
5365 ("When importing DMABUF or USERPTR, we need a pool to import from"));
5366 return FALSE;
5367}
5368}
5369
5370gboolean
5371gst_aml_v4l2_object_propose_allocation(GstAmlV4l2Object *obj, GstQuery *query)
5372{
5373 GstBufferPool *pool;
5374 /* we need at least 2 buffers to operate */
5375 guint size, min, max;
5376 GstCaps *caps;
5377 gboolean need_pool;
5378
5379 /* Set defaults allocation parameters */
5380 size = obj->info.size;
5381 min = GST_AML_V4L2_MIN_BUFFERS;
5382 max = VIDEO_MAX_FRAME;
5383
5384 gst_query_parse_allocation(query, &caps, &need_pool);
5385
5386 if (caps == NULL)
5387 goto no_caps;
5388
5389 switch (obj->mode)
5390 {
5391 case GST_V4L2_IO_MMAP:
5392 case GST_V4L2_IO_DMABUF:
5393 if ((pool = obj->pool))
5394 gst_object_ref(pool);
5395 break;
5396 default:
5397 pool = NULL;
5398 break;
5399 }
5400
5401 if (pool != NULL)
5402 {
5403 GstCaps *pcaps;
5404 GstStructure *config;
5405
5406 /* we had a pool, check caps */
5407 config = gst_buffer_pool_get_config(pool);
5408 gst_buffer_pool_config_get_params(config, &pcaps, NULL, NULL, NULL);
5409
5410 GST_DEBUG_OBJECT(obj->dbg_obj,
5411 "we had a pool with caps %" GST_PTR_FORMAT, pcaps);
5412 if (!gst_caps_is_equal(caps, pcaps))
5413 {
5414 gst_structure_free(config);
5415 gst_object_unref(pool);
5416 goto different_caps;
5417 }
5418 gst_structure_free(config);
5419 }
5420 gst_aml_v4l2_get_driver_min_buffers(obj);
5421
5422 min = MAX(obj->min_buffers, GST_AML_V4L2_MIN_BUFFERS);
5423
5424 gst_query_add_allocation_pool(query, pool, size, min, max);
5425
5426 /* we also support various metadata */
5427 gst_query_add_allocation_meta(query, GST_VIDEO_META_API_TYPE, NULL);
5428
5429 if (pool)
5430 gst_object_unref(pool);
5431
5432 return TRUE;
5433
5434 /* ERRORS */
5435no_caps:
5436{
5437 GST_DEBUG_OBJECT(obj->dbg_obj, "no caps specified");
5438 return FALSE;
5439}
5440different_caps:
5441{
5442 /* different caps, we can't use this pool */
5443 GST_DEBUG_OBJECT(obj->dbg_obj, "pool has different caps");
5444 return FALSE;
5445}
5446}
5447
5448gboolean
5449gst_aml_v4l2_object_try_import(GstAmlV4l2Object *obj, GstBuffer *buffer)
5450{
5451 GstVideoMeta *vmeta;
5452 guint n_mem = gst_buffer_n_memory(buffer);
5453
5454 /* only import if requested */
5455 switch (obj->mode)
5456 {
5457 case GST_V4L2_IO_USERPTR:
5458 case GST_V4L2_IO_DMABUF_IMPORT:
5459 break;
5460 default:
5461 GST_DEBUG_OBJECT(obj->dbg_obj,
5462 "The io-mode does not enable importation");
5463 return FALSE;
5464 }
5465
5466 vmeta = gst_buffer_get_video_meta(buffer);
5467 if (!vmeta && obj->need_video_meta)
5468 {
5469 GST_DEBUG_OBJECT(obj->dbg_obj, "Downstream buffer uses standard "
5470 "stride/offset while the driver does not.");
5471 return FALSE;
5472 }
5473
5474 /* we need matching strides/offsets and size */
5475 if (vmeta)
5476 {
5477 guint p;
5478 gboolean need_fmt_update = FALSE;
5479
5480 if (vmeta->n_planes != GST_VIDEO_INFO_N_PLANES(&obj->info))
5481 {
5482 GST_WARNING_OBJECT(obj->dbg_obj,
5483 "Cannot import buffers with different number planes");
5484 return FALSE;
5485 }
5486
5487 for (p = 0; p < vmeta->n_planes; p++)
5488 {
5489 if (vmeta->stride[p] < obj->info.stride[p])
5490 {
5491 GST_DEBUG_OBJECT(obj->dbg_obj,
5492 "Not importing as remote stride %i is smaller then %i on plane %u",
5493 vmeta->stride[p], obj->info.stride[p], p);
5494 return FALSE;
5495 }
5496 else if (vmeta->stride[p] > obj->info.stride[p])
5497 {
5498 need_fmt_update = TRUE;
5499 }
5500
5501 if (vmeta->offset[p] < obj->info.offset[p])
5502 {
5503 GST_DEBUG_OBJECT(obj->dbg_obj,
5504 "Not importing as offset %" G_GSIZE_FORMAT
5505 " is smaller then %" G_GSIZE_FORMAT " on plane %u",
5506 vmeta->offset[p], obj->info.offset[p], p);
5507 return FALSE;
5508 }
5509 else if (vmeta->offset[p] > obj->info.offset[p])
5510 {
5511 need_fmt_update = TRUE;
5512 }
5513 }
5514
5515 if (need_fmt_update)
5516 {
5517 struct v4l2_format format;
5518 gint wanted_stride[GST_VIDEO_MAX_PLANES] = {
5519 0,
5520 };
5521
5522 format = obj->format;
5523
5524 /* update the current format with the stride we want to import from */
5525 if (V4L2_TYPE_IS_MULTIPLANAR(obj->type))
5526 {
5527 guint i;
5528
5529 GST_DEBUG_OBJECT(obj->dbg_obj, "Wanted strides:");
5530
5531 for (i = 0; i < obj->n_v4l2_planes; i++)
5532 {
5533 gint stride = vmeta->stride[i];
5534
5535 if (GST_VIDEO_FORMAT_INFO_IS_TILED(obj->info.finfo))
5536 stride = GST_VIDEO_TILE_X_TILES(stride) << GST_VIDEO_FORMAT_INFO_TILE_WS(obj->info.finfo);
5537
5538 format.fmt.pix_mp.plane_fmt[i].bytesperline = stride;
5539 wanted_stride[i] = stride;
5540 GST_DEBUG_OBJECT(obj->dbg_obj, " [%u] %i", i, wanted_stride[i]);
5541 }
5542 }
5543 else
5544 {
5545 gint stride = vmeta->stride[0];
5546
5547 GST_DEBUG_OBJECT(obj->dbg_obj, "Wanted stride: %i", stride);
5548
5549 if (GST_VIDEO_FORMAT_INFO_IS_TILED(obj->info.finfo))
5550 stride = GST_VIDEO_TILE_X_TILES(stride) << GST_VIDEO_FORMAT_INFO_TILE_WS(obj->info.finfo);
5551
5552 format.fmt.pix.bytesperline = stride;
5553 wanted_stride[0] = stride;
5554 }
5555
5556 if (obj->ioctl(obj->video_fd, VIDIOC_S_FMT, &format) < 0)
5557 {
5558 GST_WARNING_OBJECT(obj->dbg_obj,
5559 "Something went wrong trying to update current format: %s",
5560 g_strerror(errno));
5561 return FALSE;
5562 }
5563
5564 gst_aml_v4l2_object_save_format(obj, obj->fmtdesc, &format, &obj->info,
5565 &obj->align);
5566
5567 if (V4L2_TYPE_IS_MULTIPLANAR(obj->type))
5568 {
5569 guint i;
5570
5571 for (i = 0; i < obj->n_v4l2_planes; i++)
5572 {
5573 if (format.fmt.pix_mp.plane_fmt[i].bytesperline != wanted_stride[i])
5574 {
5575 GST_DEBUG_OBJECT(obj->dbg_obj,
5576 "[%i] Driver did not accept the new stride (wants %i, got %i)",
5577 i, format.fmt.pix_mp.plane_fmt[i].bytesperline,
5578 wanted_stride[i]);
5579 return FALSE;
5580 }
5581 }
5582 }
5583 else
5584 {
5585 if (format.fmt.pix.bytesperline != wanted_stride[0])
5586 {
5587 GST_DEBUG_OBJECT(obj->dbg_obj,
5588 "Driver did not accept the new stride (wants %i, got %i)",
5589 format.fmt.pix.bytesperline, wanted_stride[0]);
5590 return FALSE;
5591 }
5592 }
5593 }
5594 }
5595
5596 /* we can always import single memory buffer, but otherwise we need the same
5597 * amount of memory object. */
5598 if (n_mem != 1 && n_mem != obj->n_v4l2_planes)
5599 {
5600 GST_DEBUG_OBJECT(obj->dbg_obj, "Can only import %i memory, "
5601 "buffers contains %u memory",
5602 obj->n_v4l2_planes, n_mem);
5603 return FALSE;
5604 }
5605
5606 /* For DMABuf importation we need DMABuf of course */
5607 if (obj->mode == GST_V4L2_IO_DMABUF_IMPORT)
5608 {
5609 guint i;
5610
5611 for (i = 0; i < n_mem; i++)
5612 {
5613 GstMemory *mem = gst_buffer_peek_memory(buffer, i);
5614
5615 if (!gst_is_dmabuf_memory(mem))
5616 {
5617 GST_DEBUG_OBJECT(obj->dbg_obj, "Cannot import non-DMABuf memory.");
5618 return FALSE;
5619 }
5620 }
5621 }
5622
5623 /* for the remaining, only the kernel driver can tell */
5624 return TRUE;
5625}
5626
xuesong.jiang22a9b112023-05-24 09:01:59 +00005627static gboolean gst_aml_v4l2_set_control(GstAmlV4l2Object *v4l2object, guint ctl)
5628{
5629 int rc;
5630 struct v4l2_queryctrl queryctrl;
5631 struct v4l2_control control;
5632
5633 GstAmlV4l2VideoDec *self = (GstAmlV4l2VideoDec *)v4l2object->element;
5634 self->is_secure_path = TRUE;
5635
5636 memset(&queryctrl, 0, sizeof(queryctrl));
5637 queryctrl.id = ctl;
5638
5639 rc = v4l2object->ioctl(v4l2object->video_fd, VIDIOC_QUERYCTRL, &queryctrl);
5640 if (rc == 0)
5641 {
5642 if (!(queryctrl.flags & V4L2_CTRL_FLAG_DISABLED))
5643 {
5644 memset(&control, 0, sizeof(control));
5645 control.id = ctl;
5646 control.value = 1;
5647 rc = v4l2object->ioctl(v4l2object->video_fd, VIDIOC_S_CTRL, &control);
5648 if (rc != 0)
5649 {
5650 GST_ERROR_OBJECT(v4l2object->dbg_obj, "set ctl:0x%x fail rc %d", ctl, rc);
5651 return FALSE;
5652 }
5653 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "set ctl:0x%x succ", ctl);
5654 return TRUE;
5655 }
5656 else
5657 {
5658 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "ctl:0x%x is disabled", ctl);
5659 return TRUE;
5660 }
5661 }
5662 else
5663 {
5664 GST_ERROR_OBJECT(v4l2object->dbg_obj, "VIDIOC_QUERYCTRL for 0x:%x fail", ctl);
5665 return FALSE;
5666 }
5667}
5668
5669
xuesong.jiangae1548e2022-05-06 16:38:46 +08005670gboolean gst_aml_v4l2_set_drm_mode(GstAmlV4l2Object *v4l2object)
5671{
5672 /* On AmLogic, output obj use of GST_V4L2_IO_DMABUF_IMPORT implies secure memory */
5673 if (v4l2object->req_mode == GST_V4L2_IO_DMABUF_IMPORT)
5674 {
xuesong.jiangae1548e2022-05-06 16:38:46 +08005675
xuesong.jiang22a9b112023-05-24 09:01:59 +00005676 if (gst_aml_v4l2_set_control(v4l2object, AML_V4L2_SET_DRMMODE))
xuesong.jiangae1548e2022-05-06 16:38:46 +08005677 {
xuesong.jiang22a9b112023-05-24 09:01:59 +00005678 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "AML_V4L2_SET_DRMMODE set succ");
5679 return TRUE;
xuesong.jiangae1548e2022-05-06 16:38:46 +08005680 }
5681 else
5682 {
xuesong.jiang22a9b112023-05-24 09:01:59 +00005683 GST_ERROR_OBJECT(v4l2object->dbg_obj, "AML_V4L2_SET_DRMMODE set fail");
xuesong.jiangae1548e2022-05-06 16:38:46 +08005684 return FALSE;
5685 }
5686 }
5687 else
5688 {
xuesong.jiang22a9b112023-05-24 09:01:59 +00005689 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "req mode is not GST_V4L2_IO_DMABUF_IMPORT, DRM mode does not need to be configured");
5690 return TRUE;
5691 }
5692}
5693
5694gboolean gst_aml_v4l2_set_stream_mode(GstAmlV4l2Object *v4l2object)
5695{
5696 if (v4l2object->stream_mode)
5697 {
5698 if (gst_aml_v4l2_set_control(v4l2object, AML_V4L2_SET_STREAM_MODE))
5699 {
5700 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "AML_V4L2_SET_STREAM_MODE set succ");
5701 return TRUE;
5702 }
5703 else
5704 {
5705 GST_ERROR_OBJECT(v4l2object->dbg_obj, "AML_V4L2_SET_STREAM_MODE set fail");
5706 return FALSE;
5707 }
5708 }
5709 else
5710 {
5711 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "req mode is not stream mode, frame mode in configured by default");
xuesong.jiangae1548e2022-05-06 16:38:46 +08005712 return TRUE;
5713 }
xuesong.jiangc5dac0f2023-02-01 14:42:24 +08005714}
5715
5716gint gst_aml_v4l2_object_get_outstanding_capture_buf_num(GstAmlV4l2Object *obj)
5717{
5718 gint ret = 0;
5719 gint count = 0;
5720
5721 if (obj->old_other_pool)
5722 {
5723 count = gst_buffer_pool_get_outstanding_num(obj->old_other_pool);
5724 if (count)
5725 {
5726 ret += count;
5727 }
5728 else
5729 {
5730 gst_object_unref(obj->old_other_pool);
5731 obj->old_other_pool = NULL;
5732 }
5733 }
5734
5735 count = 0;
5736 if (obj->old_old_other_pool)
5737 {
5738 count = gst_buffer_pool_get_outstanding_num(obj->old_old_other_pool);
5739 if (count)
5740 {
5741 ret += count;
5742 }
5743 else
5744 {
5745 gst_object_unref(obj->old_old_other_pool);
5746 obj->old_old_other_pool = NULL;
5747 }
5748 }
5749
5750 return ret;
xuesong.jiangae1548e2022-05-06 16:38:46 +08005751}