blob: 40c4c7279e75c89822a5cc60a5db97d0fe94118c [file] [log] [blame]
xuesong.jiangae1548e2022-05-06 16:38:46 +08001/* GStreamer
2 * Copyright (C) 2022 <xuesong.jiang@amlogic.com>
3 *
4 * This library is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Library General Public
6 * License as published by the Free Software Foundation; either
7 * version 2 of the License, or (at your option) any later version.
8 *
9 * This library is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Library General Public License for more details.
13 *
14 * You should have received a copy of the GNU Library General Public
15 * License along with this library; if not, write to the
16 * Free Software Foundation, Inc., 51 Franklin Street, Suite 500,
17 * Boston, MA 02110-1335, USA.
18 */
19
20#ifdef HAVE_CONFIG_H
21#include <config.h>
22#endif
23
24#include <sys/stat.h>
25#include <fcntl.h>
26#include <errno.h>
27#include <string.h>
28#include <sys/mman.h>
29#include <sys/ioctl.h>
30
31#ifdef HAVE_GUDEV
32#include <gudev/gudev.h>
33#endif
34
35#include "ext/videodev2.h"
36#include "gstamlv4l2object.h"
37
38#include "gst/gst-i18n-plugin.h"
39
40#include <gst/video/video.h>
41#include <gst/allocators/gstdmabuf.h>
42
43GST_DEBUG_CATEGORY_EXTERN(aml_v4l2_debug);
44#define GST_CAT_DEFAULT aml_v4l2_debug
45
46#define DEFAULT_PROP_DEVICE_NAME NULL
47#define DEFAULT_PROP_DEVICE_FD -1
48#define DEFAULT_PROP_FLAGS 0
49#define DEFAULT_PROP_TV_NORM 0
50#define DEFAULT_PROP_IO_MODE GST_V4L2_IO_AUTO
51
52#define ENCODED_BUFFER_SIZE (2 * 1024 * 1024)
53#define DEFAULT_EXTRA_CAPTURE_BUF_SIZE 3
54
55enum
56{
57 PROP_0,
58 V4L2_STD_OBJECT_PROPS,
59};
60
61/*
62 * common format / caps utilities:
63 */
64typedef enum
65{
66 GST_V4L2_RAW = 1 << 0,
67 GST_V4L2_CODEC = 1 << 1,
68 GST_V4L2_TRANSPORT = 1 << 2,
69 GST_V4L2_NO_PARSE = 1 << 3,
70 GST_V4L2_ALL = 0xffff
71} GstAmlV4L2FormatFlags;
72
73typedef struct
74{
75 guint32 format;
76 gboolean dimensions;
77 GstAmlV4L2FormatFlags flags;
78} GstAmlV4L2FormatDesc;
79
80static const GstAmlV4L2FormatDesc gst_aml_v4l2_formats[] = {
81 /* RGB formats */
82 {V4L2_PIX_FMT_RGB332, TRUE, GST_V4L2_RAW},
83 {V4L2_PIX_FMT_ARGB555, TRUE, GST_V4L2_RAW},
84 {V4L2_PIX_FMT_XRGB555, TRUE, GST_V4L2_RAW},
85 {V4L2_PIX_FMT_ARGB555X, TRUE, GST_V4L2_RAW},
86 {V4L2_PIX_FMT_XRGB555X, TRUE, GST_V4L2_RAW},
87 {V4L2_PIX_FMT_RGB565, TRUE, GST_V4L2_RAW},
88 {V4L2_PIX_FMT_RGB565X, TRUE, GST_V4L2_RAW},
89 {V4L2_PIX_FMT_BGR666, TRUE, GST_V4L2_RAW},
90 {V4L2_PIX_FMT_BGR24, TRUE, GST_V4L2_RAW},
91 {V4L2_PIX_FMT_RGB24, TRUE, GST_V4L2_RAW},
92 {V4L2_PIX_FMT_ABGR32, TRUE, GST_V4L2_RAW},
93 {V4L2_PIX_FMT_XBGR32, TRUE, GST_V4L2_RAW},
94 {V4L2_PIX_FMT_ARGB32, TRUE, GST_V4L2_RAW},
95 {V4L2_PIX_FMT_XRGB32, TRUE, GST_V4L2_RAW},
96
97 /* Deprecated Packed RGB Image Formats (alpha ambiguity) */
98 {V4L2_PIX_FMT_RGB444, TRUE, GST_V4L2_RAW},
99 {V4L2_PIX_FMT_RGB555, TRUE, GST_V4L2_RAW},
100 {V4L2_PIX_FMT_RGB555X, TRUE, GST_V4L2_RAW},
101 {V4L2_PIX_FMT_BGR32, TRUE, GST_V4L2_RAW},
102 {V4L2_PIX_FMT_RGB32, TRUE, GST_V4L2_RAW},
103
104 /* Grey formats */
105 {V4L2_PIX_FMT_GREY, TRUE, GST_V4L2_RAW},
106 {V4L2_PIX_FMT_Y4, TRUE, GST_V4L2_RAW},
107 {V4L2_PIX_FMT_Y6, TRUE, GST_V4L2_RAW},
108 {V4L2_PIX_FMT_Y10, TRUE, GST_V4L2_RAW},
109 {V4L2_PIX_FMT_Y12, TRUE, GST_V4L2_RAW},
110 {V4L2_PIX_FMT_Y16, TRUE, GST_V4L2_RAW},
111 {V4L2_PIX_FMT_Y16_BE, TRUE, GST_V4L2_RAW},
112 {V4L2_PIX_FMT_Y10BPACK, TRUE, GST_V4L2_RAW},
113
114 /* Palette formats */
115 {V4L2_PIX_FMT_PAL8, TRUE, GST_V4L2_RAW},
116
117 /* Chrominance formats */
118 {V4L2_PIX_FMT_UV8, TRUE, GST_V4L2_RAW},
119
120 /* Luminance+Chrominance formats */
121 {V4L2_PIX_FMT_YVU410, TRUE, GST_V4L2_RAW},
122 {V4L2_PIX_FMT_YVU420, TRUE, GST_V4L2_RAW},
123 {V4L2_PIX_FMT_YVU420M, TRUE, GST_V4L2_RAW},
124 {V4L2_PIX_FMT_YUYV, TRUE, GST_V4L2_RAW},
125 {V4L2_PIX_FMT_YYUV, TRUE, GST_V4L2_RAW},
126 {V4L2_PIX_FMT_YVYU, TRUE, GST_V4L2_RAW},
127 {V4L2_PIX_FMT_UYVY, TRUE, GST_V4L2_RAW},
128 {V4L2_PIX_FMT_VYUY, TRUE, GST_V4L2_RAW},
129 {V4L2_PIX_FMT_YUV422P, TRUE, GST_V4L2_RAW},
130 {V4L2_PIX_FMT_YUV411P, TRUE, GST_V4L2_RAW},
131 {V4L2_PIX_FMT_Y41P, TRUE, GST_V4L2_RAW},
132 {V4L2_PIX_FMT_YUV444, TRUE, GST_V4L2_RAW},
133 {V4L2_PIX_FMT_YUV555, TRUE, GST_V4L2_RAW},
134 {V4L2_PIX_FMT_YUV565, TRUE, GST_V4L2_RAW},
135 {V4L2_PIX_FMT_YUV32, TRUE, GST_V4L2_RAW},
136 {V4L2_PIX_FMT_YUV410, TRUE, GST_V4L2_RAW},
137 {V4L2_PIX_FMT_YUV420, TRUE, GST_V4L2_RAW},
138 {V4L2_PIX_FMT_YUV420M, TRUE, GST_V4L2_RAW},
139 {V4L2_PIX_FMT_HI240, TRUE, GST_V4L2_RAW},
140 {V4L2_PIX_FMT_HM12, TRUE, GST_V4L2_RAW},
141 {V4L2_PIX_FMT_M420, TRUE, GST_V4L2_RAW},
142
143 /* two planes -- one Y, one Cr + Cb interleaved */
144 {V4L2_PIX_FMT_NV12, TRUE, GST_V4L2_RAW},
145 {V4L2_PIX_FMT_NV12M, TRUE, GST_V4L2_RAW},
146 {V4L2_PIX_FMT_NV12MT, TRUE, GST_V4L2_RAW},
147 {V4L2_PIX_FMT_NV12MT_16X16, TRUE, GST_V4L2_RAW},
148 {V4L2_PIX_FMT_NV21, TRUE, GST_V4L2_RAW},
149 {V4L2_PIX_FMT_NV21M, TRUE, GST_V4L2_RAW},
150 {V4L2_PIX_FMT_NV16, TRUE, GST_V4L2_RAW},
151 {V4L2_PIX_FMT_NV16M, TRUE, GST_V4L2_RAW},
152 {V4L2_PIX_FMT_NV61, TRUE, GST_V4L2_RAW},
153 {V4L2_PIX_FMT_NV61M, TRUE, GST_V4L2_RAW},
154 {V4L2_PIX_FMT_NV24, TRUE, GST_V4L2_RAW},
155 {V4L2_PIX_FMT_NV42, TRUE, GST_V4L2_RAW},
156
157 /* Bayer formats - see http://www.siliconimaging.com/RGB%20Bayer.htm */
158 {V4L2_PIX_FMT_SBGGR8, TRUE, GST_V4L2_RAW},
159 {V4L2_PIX_FMT_SGBRG8, TRUE, GST_V4L2_RAW},
160 {V4L2_PIX_FMT_SGRBG8, TRUE, GST_V4L2_RAW},
161 {V4L2_PIX_FMT_SRGGB8, TRUE, GST_V4L2_RAW},
162
163 /* compressed formats */
164 {V4L2_PIX_FMT_MJPEG, FALSE, GST_V4L2_CODEC},
165 {V4L2_PIX_FMT_JPEG, FALSE, GST_V4L2_CODEC},
166 {V4L2_PIX_FMT_PJPG, FALSE, GST_V4L2_CODEC},
167 {V4L2_PIX_FMT_DV, FALSE, GST_V4L2_TRANSPORT},
168 {V4L2_PIX_FMT_MPEG, FALSE, GST_V4L2_TRANSPORT},
169 {V4L2_PIX_FMT_FWHT, FALSE, GST_V4L2_CODEC},
170 {V4L2_PIX_FMT_H264, FALSE, GST_V4L2_CODEC},
171 {V4L2_PIX_FMT_H264_NO_SC, FALSE, GST_V4L2_CODEC},
172 {V4L2_PIX_FMT_H264_MVC, FALSE, GST_V4L2_CODEC},
173 {V4L2_PIX_FMT_HEVC, FALSE, GST_V4L2_CODEC},
174 {V4L2_PIX_FMT_H263, FALSE, GST_V4L2_CODEC},
175 {V4L2_PIX_FMT_MPEG1, FALSE, GST_V4L2_CODEC},
176 {V4L2_PIX_FMT_MPEG2, FALSE, GST_V4L2_CODEC},
177 {V4L2_PIX_FMT_MPEG4, FALSE, GST_V4L2_CODEC},
178 {V4L2_PIX_FMT_XVID, FALSE, GST_V4L2_CODEC},
179 {V4L2_PIX_FMT_VC1_ANNEX_G, FALSE, GST_V4L2_CODEC},
180 {V4L2_PIX_FMT_VC1_ANNEX_L, FALSE, GST_V4L2_CODEC},
181 {V4L2_PIX_FMT_VP8, FALSE, GST_V4L2_CODEC | GST_V4L2_NO_PARSE},
182 {V4L2_PIX_FMT_VP9, FALSE, GST_V4L2_CODEC | GST_V4L2_NO_PARSE},
183 {V4L2_PIX_FMT_AV1, FALSE, GST_V4L2_CODEC | GST_V4L2_NO_PARSE},
184
185 /* Vendor-specific formats */
186 {V4L2_PIX_FMT_WNVA, TRUE, GST_V4L2_CODEC},
187 {V4L2_PIX_FMT_SN9C10X, TRUE, GST_V4L2_CODEC},
188 {V4L2_PIX_FMT_PWC1, TRUE, GST_V4L2_CODEC},
189 {V4L2_PIX_FMT_PWC2, TRUE, GST_V4L2_CODEC},
190};
191
192#define GST_AML_V4L2_FORMAT_COUNT (G_N_ELEMENTS(gst_aml_v4l2_formats))
193
194static GSList *gst_aml_v4l2_object_get_format_list(GstAmlV4l2Object *v4l2object);
195
196#define GST_TYPE_AML_V4L2_DEVICE_FLAGS (gst_aml_v4l2_device_get_type())
197static GType
198gst_aml_v4l2_device_get_type(void)
199{
200 static GType v4l2_device_type = 0;
201
202 if (v4l2_device_type == 0)
203 {
204 static const GFlagsValue values[] = {
205 {V4L2_CAP_VIDEO_CAPTURE, "Device supports video capture", "capture"},
206 {V4L2_CAP_VIDEO_OUTPUT, "Device supports video playback", "output"},
207 {V4L2_CAP_VIDEO_OVERLAY, "Device supports video overlay", "overlay"},
208
209 {V4L2_CAP_VBI_CAPTURE, "Device supports the VBI capture", "vbi-capture"},
210 {V4L2_CAP_VBI_OUTPUT, "Device supports the VBI output", "vbi-output"},
211
212 {V4L2_CAP_TUNER, "Device has a tuner or modulator", "tuner"},
213 {V4L2_CAP_AUDIO, "Device has audio inputs or outputs", "audio"},
214
215 {0, NULL, NULL}};
216
217 v4l2_device_type =
218 g_flags_register_static("GstAmlV4l2DeviceTypeFlags", values);
219 }
220
221 return v4l2_device_type;
222}
223
224GType gst_aml_v4l2_io_mode_get_type(void)
225{
226 static GType v4l2_io_mode = 0;
227
228 if (!v4l2_io_mode)
229 {
230 static const GEnumValue io_modes[] = {
231 {GST_V4L2_IO_AUTO, "GST_V4L2_IO_AUTO", "auto"},
232 {GST_V4L2_IO_RW, "GST_V4L2_IO_RW", "rw"},
233 {GST_V4L2_IO_MMAP, "GST_V4L2_IO_MMAP", "mmap"},
234 {GST_V4L2_IO_USERPTR, "GST_V4L2_IO_USERPTR", "userptr"},
235 {GST_V4L2_IO_DMABUF, "GST_V4L2_IO_DMABUF", "dmabuf"},
236 {GST_V4L2_IO_DMABUF_IMPORT, "GST_V4L2_IO_DMABUF_IMPORT",
237 "dmabuf-import"},
238
239 {0, NULL, NULL}};
240 v4l2_io_mode = g_enum_register_static("GstAmlV4l2IOMode", io_modes);
241 }
242 return v4l2_io_mode;
243}
244
245void gst_aml_v4l2_object_install_properties_helper(GObjectClass *gobject_class,
246 const char *default_device)
247{
248 g_object_class_install_property(gobject_class, PROP_DEVICE,
249 g_param_spec_string("device", "Device", "Device location",
250 default_device, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
251 g_object_class_install_property(gobject_class, PROP_DEVICE_NAME,
252 g_param_spec_string("device-name", "Device name",
253 "Name of the device", DEFAULT_PROP_DEVICE_NAME,
254 G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
255 g_object_class_install_property(gobject_class, PROP_DEVICE_FD,
256 g_param_spec_int("device-fd", "File descriptor",
257 "File descriptor of the device", -1, G_MAXINT, DEFAULT_PROP_DEVICE_FD,
258 G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
259 g_object_class_install_property(gobject_class, PROP_FLAGS,
260 g_param_spec_flags("flags", "Flags", "Device type flags",
261 GST_TYPE_AML_V4L2_DEVICE_FLAGS, DEFAULT_PROP_FLAGS,
262 G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
263
264 /**
265 * GstV4l2Src:brightness:
266 *
267 * Picture brightness, or more precisely, the black level
268 */
269 g_object_class_install_property(gobject_class, PROP_BRIGHTNESS,
270 g_param_spec_int("brightness", "Brightness",
271 "Picture brightness, or more precisely, the black level", G_MININT,
272 G_MAXINT, 0,
273 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
274 /**
275 * GstV4l2Src:contrast:
276 *
277 * Picture contrast or luma gain
278 */
279 g_object_class_install_property(gobject_class, PROP_CONTRAST,
280 g_param_spec_int("contrast", "Contrast",
281 "Picture contrast or luma gain", G_MININT,
282 G_MAXINT, 0,
283 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
284 /**
285 * GstV4l2Src:saturation:
286 *
287 * Picture color saturation or chroma gain
288 */
289 g_object_class_install_property(gobject_class, PROP_SATURATION,
290 g_param_spec_int("saturation", "Saturation",
291 "Picture color saturation or chroma gain", G_MININT,
292 G_MAXINT, 0,
293 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
294 /**
295 * GstV4l2Src:hue:
296 *
297 * Hue or color balance
298 */
299 g_object_class_install_property(gobject_class, PROP_HUE,
300 g_param_spec_int("hue", "Hue",
301 "Hue or color balance", G_MININT,
302 G_MAXINT, 0,
303 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
304
305 /**
306 * GstV4l2Src:io-mode:
307 *
308 * IO Mode
309 */
310 g_object_class_install_property(gobject_class, PROP_IO_MODE,
311 g_param_spec_enum("io-mode", "IO mode",
312 "I/O mode",
313 GST_TYPE_AML_V4L2_IO_MODE, DEFAULT_PROP_IO_MODE,
314 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
315
316 /**
317 * GstV4l2Src:extra-controls:
318 *
319 * Additional v4l2 controls for the device. The controls are identified
320 * by the control name (lowercase with '_' for any non-alphanumeric
321 * characters).
322 *
323 * Since: 1.2
324 */
325 g_object_class_install_property(gobject_class, PROP_EXTRA_CONTROLS,
326 g_param_spec_boxed("extra-controls", "Extra Controls",
327 "Extra v4l2 controls (CIDs) for the device",
328 GST_TYPE_STRUCTURE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
329
330 /**
331 * GstV4l2Src:pixel-aspect-ratio:
332 *
333 * The pixel aspect ratio of the device. This overwrites the pixel aspect
334 * ratio queried from the device.
335 *
336 * Since: 1.2
337 */
338 g_object_class_install_property(gobject_class, PROP_PIXEL_ASPECT_RATIO,
339 g_param_spec_string("pixel-aspect-ratio", "Pixel Aspect Ratio",
340 "Overwrite the pixel aspect ratio of the device", "1/1",
341 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
342
343 /**
344 * GstV4l2Src:force-aspect-ratio:
345 *
346 * When enabled, the pixel aspect ratio queried from the device or set
347 * with the pixel-aspect-ratio property will be enforced.
348 *
349 * Since: 1.2
350 */
351 g_object_class_install_property(gobject_class, PROP_FORCE_ASPECT_RATIO,
352 g_param_spec_boolean("force-aspect-ratio", "Force aspect ratio",
353 "When enabled, the pixel aspect ratio will be enforced", TRUE,
354 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
355}
356
357void gst_aml_v4l2_object_install_m2m_properties_helper(GObjectClass *gobject_class)
358{
359 g_object_class_install_property(gobject_class, PROP_DEVICE,
360 g_param_spec_string("device", "Device", "Device location",
361 NULL, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
362
363 g_object_class_install_property(gobject_class, PROP_DEVICE_NAME,
364 g_param_spec_string("device-name", "Device name",
365 "Name of the device", DEFAULT_PROP_DEVICE_NAME,
366 G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
367
368 g_object_class_install_property(gobject_class, PROP_DEVICE_FD,
369 g_param_spec_int("device-fd", "File descriptor",
370 "File descriptor of the device", -1, G_MAXINT, DEFAULT_PROP_DEVICE_FD,
371 G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
372
373 g_object_class_install_property(gobject_class, PROP_OUTPUT_IO_MODE,
374 g_param_spec_enum("output-io-mode", "Output IO mode",
375 "Output side I/O mode (matches sink pad)",
376 GST_TYPE_AML_V4L2_IO_MODE, DEFAULT_PROP_IO_MODE,
377 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
378
379 g_object_class_install_property(gobject_class, PROP_CAPTURE_IO_MODE,
380 g_param_spec_enum("capture-io-mode", "Capture IO mode",
381 "Capture I/O mode (matches src pad)",
382 GST_TYPE_AML_V4L2_IO_MODE, DEFAULT_PROP_IO_MODE,
383 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
384
385 g_object_class_install_property(gobject_class, PROP_EXTRA_CONTROLS,
386 g_param_spec_boxed("extra-controls", "Extra Controls",
387 "Extra v4l2 controls (CIDs) for the device",
388 GST_TYPE_STRUCTURE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
389
390 g_object_class_install_property(gobject_class, PROP_DUMP_FRAME_LOCATION,
391 g_param_spec_string("dump-frame-location", "dump frame location",
392 "Location of the file to write decoder frames", NULL,
393 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
394}
395
396/* Support for 32bit off_t, this wrapper is casting off_t to gint64 */
397#ifdef HAVE_LIBV4L2
398#if SIZEOF_OFF_T < 8
399
400static gpointer
401v4l2_mmap_wrapper(gpointer start, gsize length, gint prot, gint flags, gint fd,
402 off_t offset)
403{
404 return v4l2_mmap(start, length, prot, flags, fd, (gint64)offset);
405}
406
407#define v4l2_mmap v4l2_mmap_wrapper
408
409#endif /* SIZEOF_OFF_T < 8 */
410#endif /* HAVE_LIBV4L2 */
411
412GstAmlV4l2Object *
413gst_aml_v4l2_object_new(GstElement *element,
414 GstObject *debug_object,
415 enum v4l2_buf_type type,
416 const char *default_device,
417 GstAmlV4l2GetInOutFunction get_in_out_func,
418 GstAmlV4l2SetInOutFunction set_in_out_func,
419 GstAmlV4l2UpdateFpsFunction update_fps_func)
420{
421 GstAmlV4l2Object *v4l2object;
422
423 /*
424 * some default values
425 */
426 v4l2object = g_new0(GstAmlV4l2Object, 1);
427
428 if ((V4L2_BUF_TYPE_VIDEO_CAPTURE == type || V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE == type))
429 {
430 const char *default_mode = getenv("GST_DEFAULT_V4L2_BUF_MODE");
431 GST_DEBUG("amlmodbuf GST_AML_DEFAULT_V4L2_BUF_MODE:%s", default_mode);
432 if (default_mode)
433 {
434 if (strcmp(default_mode, "DMA_BUF_IMPORT") == 0)
435 v4l2object->req_mode = GST_V4L2_IO_DMABUF_IMPORT;
436 else if (strcmp(default_mode, "DMA_BUF") == 0)
437 v4l2object->req_mode = GST_V4L2_IO_DMABUF;
438 GST_DEBUG("amlmodbuf set default buf default_mode:%d", v4l2object->req_mode);
439 }
440 }
441
442 v4l2object->type = type;
443 v4l2object->formats = NULL;
444
445 v4l2object->element = element;
446 v4l2object->dbg_obj = debug_object;
447 v4l2object->get_in_out_func = get_in_out_func;
448 v4l2object->set_in_out_func = set_in_out_func;
449 v4l2object->update_fps_func = update_fps_func;
450
451 v4l2object->video_fd = -1;
452 v4l2object->active = FALSE;
453 v4l2object->videodev = g_strdup(default_device);
454
455 v4l2object->norms = NULL;
456 v4l2object->channels = NULL;
457 v4l2object->colors = NULL;
458
459 v4l2object->keep_aspect = TRUE;
460
461 v4l2object->n_v4l2_planes = 0;
462
463 v4l2object->no_initial_format = FALSE;
464
465 /* We now disable libv4l2 by default, but have an env to enable it. */
466#ifdef HAVE_LIBV4L2
467 if (g_getenv("GST_V4L2_USE_LIBV4L2"))
468 {
469 v4l2object->fd_open = v4l2_fd_open;
470 v4l2object->close = v4l2_close;
471 v4l2object->dup = v4l2_dup;
472 v4l2object->ioctl = v4l2_ioctl;
473 v4l2object->read = v4l2_read;
474 v4l2object->mmap = v4l2_mmap;
475 v4l2object->munmap = v4l2_munmap;
476 }
477 else
478#endif
479 {
480 v4l2object->fd_open = NULL;
481 v4l2object->close = close;
482 v4l2object->dup = dup;
483 v4l2object->ioctl = ioctl;
484 v4l2object->read = read;
485 v4l2object->mmap = mmap;
486 v4l2object->munmap = munmap;
487 }
488 v4l2object->poll = gst_poll_new(TRUE);
489 v4l2object->can_wait_event = FALSE;
490 v4l2object->can_poll_device = TRUE;
491 v4l2object->tvin_port = -1;
492
493 v4l2object->dumpframefile = NULL;
494
495 return v4l2object;
496}
497
498static gboolean gst_aml_v4l2_object_clear_format_list(GstAmlV4l2Object *v4l2object);
499
500void gst_aml_v4l2_object_destroy(GstAmlV4l2Object *v4l2object)
501{
502 g_return_if_fail(v4l2object != NULL);
503
504 g_free(v4l2object->videodev);
505
506 g_free(v4l2object->channel);
507
508 if (v4l2object->formats)
509 {
510 gst_aml_v4l2_object_clear_format_list(v4l2object);
511 }
512
513 if (v4l2object->probed_caps)
514 {
515 gst_caps_unref(v4l2object->probed_caps);
516 }
517
518 if (v4l2object->extra_controls)
519 {
520 gst_structure_free(v4l2object->extra_controls);
521 }
522
523 gst_poll_free(v4l2object->poll);
524
525 g_free(v4l2object->dumpframefile);
526
527 g_free(v4l2object);
528}
529
530static gboolean
531gst_aml_v4l2_object_clear_format_list(GstAmlV4l2Object *v4l2object)
532{
533 g_slist_foreach(v4l2object->formats, (GFunc)g_free, NULL);
534 g_slist_free(v4l2object->formats);
535 v4l2object->formats = NULL;
536
537 return TRUE;
538}
539
540static gint
541gst_aml_v4l2_object_prop_to_cid(guint prop_id)
542{
543 gint cid = -1;
544
545 switch (prop_id)
546 {
547 case PROP_BRIGHTNESS:
548 cid = V4L2_CID_BRIGHTNESS;
549 break;
550 case PROP_CONTRAST:
551 cid = V4L2_CID_CONTRAST;
552 break;
553 case PROP_SATURATION:
554 cid = V4L2_CID_SATURATION;
555 break;
556 case PROP_HUE:
557 cid = V4L2_CID_HUE;
558 break;
559 default:
560 GST_WARNING("unmapped property id: %d", prop_id);
561 }
562 return cid;
563}
564
565gboolean
566gst_aml_v4l2_object_set_property_helper(GstAmlV4l2Object *v4l2object,
567 guint prop_id, const GValue *value, GParamSpec *pspec)
568{
569 switch (prop_id)
570 {
571 case PROP_DEVICE:
572 g_free(v4l2object->videodev);
573 v4l2object->videodev = g_value_dup_string(value);
574 break;
575 case PROP_BRIGHTNESS:
576 case PROP_CONTRAST:
577 case PROP_SATURATION:
578 case PROP_HUE:
579 {
580 gint cid = gst_aml_v4l2_object_prop_to_cid(prop_id);
581
582 if (cid != -1)
583 {
584 if (GST_AML_V4L2_IS_OPEN(v4l2object))
585 {
586 gst_aml_v4l2_set_attribute(v4l2object, cid, g_value_get_int(value));
587 }
588 }
589 return TRUE;
590 }
591 break;
592 case PROP_IO_MODE:
593 v4l2object->req_mode = g_value_get_enum(value);
594 break;
595 case PROP_CAPTURE_IO_MODE:
596 g_return_val_if_fail(!V4L2_TYPE_IS_OUTPUT(v4l2object->type), FALSE);
597 v4l2object->req_mode = g_value_get_enum(value);
598 break;
599 case PROP_OUTPUT_IO_MODE:
600 g_return_val_if_fail(V4L2_TYPE_IS_OUTPUT(v4l2object->type), FALSE);
601 v4l2object->req_mode = g_value_get_enum(value);
602 break;
603 case PROP_EXTRA_CONTROLS:
604 {
605 const GstStructure *s = gst_value_get_structure(value);
606
607 if (v4l2object->extra_controls)
608 gst_structure_free(v4l2object->extra_controls);
609
610 v4l2object->extra_controls = s ? gst_structure_copy(s) : NULL;
611 if (GST_AML_V4L2_IS_OPEN(v4l2object))
612 gst_aml_v4l2_set_controls(v4l2object, v4l2object->extra_controls);
613 break;
614 }
615 case PROP_PIXEL_ASPECT_RATIO:
616 if (v4l2object->par)
617 {
618 g_value_unset(v4l2object->par);
619 g_free(v4l2object->par);
620 }
621 v4l2object->par = g_new0(GValue, 1);
622 g_value_init(v4l2object->par, GST_TYPE_FRACTION);
623 if (!g_value_transform(value, v4l2object->par))
624 {
625 g_warning("Could not transform string to aspect ratio");
626 gst_value_set_fraction(v4l2object->par, 1, 1);
627 }
628
629 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "set PAR to %d/%d",
630 gst_value_get_fraction_numerator(v4l2object->par),
631 gst_value_get_fraction_denominator(v4l2object->par));
632 break;
633 case PROP_FORCE_ASPECT_RATIO:
634 v4l2object->keep_aspect = g_value_get_boolean(value);
635 break;
636 case PROP_DUMP_FRAME_LOCATION:
637 g_free(v4l2object->dumpframefile);
638 v4l2object->dumpframefile = g_value_dup_string(value);
639 break;
640 default:
641 return FALSE;
642 break;
643 }
644 return TRUE;
645}
646
647gboolean
648gst_aml_v4l2_object_get_property_helper(GstAmlV4l2Object *v4l2object,
649 guint prop_id, GValue *value, GParamSpec *pspec)
650{
651 switch (prop_id)
652 {
653 case PROP_DEVICE:
654 g_value_set_string(value, v4l2object->videodev);
655 break;
656 case PROP_DEVICE_NAME:
657 {
658 const guchar *name = NULL;
659
660 if (GST_AML_V4L2_IS_OPEN(v4l2object))
661 name = v4l2object->vcap.card;
662
663 g_value_set_string(value, (gchar *)name);
664 break;
665 }
666 case PROP_DEVICE_FD:
667 {
668 if (GST_AML_V4L2_IS_OPEN(v4l2object))
669 g_value_set_int(value, v4l2object->video_fd);
670 else
671 g_value_set_int(value, DEFAULT_PROP_DEVICE_FD);
672 break;
673 }
674 case PROP_FLAGS:
675 {
676 guint flags = 0;
677
678 if (GST_AML_V4L2_IS_OPEN(v4l2object))
679 {
680 flags |= v4l2object->device_caps &
681 (V4L2_CAP_VIDEO_CAPTURE |
682 V4L2_CAP_VIDEO_OUTPUT |
683 V4L2_CAP_VIDEO_OVERLAY |
684 V4L2_CAP_VBI_CAPTURE |
685 V4L2_CAP_VBI_OUTPUT | V4L2_CAP_TUNER | V4L2_CAP_AUDIO);
686
687 if (v4l2object->device_caps & V4L2_CAP_VIDEO_CAPTURE_MPLANE)
688 flags |= V4L2_CAP_VIDEO_CAPTURE;
689
690 if (v4l2object->device_caps & V4L2_CAP_VIDEO_OUTPUT_MPLANE)
691 flags |= V4L2_CAP_VIDEO_OUTPUT;
692 }
693 g_value_set_flags(value, flags);
694 break;
695 }
696 case PROP_BRIGHTNESS:
697 case PROP_CONTRAST:
698 case PROP_SATURATION:
699 case PROP_HUE:
700 {
701 gint cid = gst_aml_v4l2_object_prop_to_cid(prop_id);
702
703 if (cid != -1)
704 {
705 if (GST_AML_V4L2_IS_OPEN(v4l2object))
706 {
707 gint v;
708 if (gst_aml_v4l2_get_attribute(v4l2object, cid, &v))
709 {
710 g_value_set_int(value, v);
711 }
712 }
713 }
714 return TRUE;
715 }
716 break;
717 case PROP_IO_MODE:
718 g_value_set_enum(value, v4l2object->req_mode);
719 break;
720 case PROP_CAPTURE_IO_MODE:
721 g_return_val_if_fail(!V4L2_TYPE_IS_OUTPUT(v4l2object->type), FALSE);
722 g_value_set_enum(value, v4l2object->req_mode);
723 break;
724 case PROP_OUTPUT_IO_MODE:
725 g_return_val_if_fail(V4L2_TYPE_IS_OUTPUT(v4l2object->type), FALSE);
726 g_value_set_enum(value, v4l2object->req_mode);
727 break;
728 case PROP_EXTRA_CONTROLS:
729 gst_value_set_structure(value, v4l2object->extra_controls);
730 break;
731 case PROP_PIXEL_ASPECT_RATIO:
732 if (v4l2object->par)
733 g_value_transform(v4l2object->par, value);
734 break;
735 case PROP_FORCE_ASPECT_RATIO:
736 g_value_set_boolean(value, v4l2object->keep_aspect);
737 break;
738 case PROP_DUMP_FRAME_LOCATION:
739 g_value_set_string(value, v4l2object->dumpframefile);
740 break;
741 default:
742 return FALSE;
743 break;
744 }
745 return TRUE;
746}
747
748static void
749gst_aml_v4l2_get_driver_min_buffers(GstAmlV4l2Object *v4l2object)
750{
751 struct v4l2_control control = {
752 0,
753 };
754
755 g_return_if_fail(GST_AML_V4L2_IS_OPEN(v4l2object));
756
757 if (V4L2_TYPE_IS_OUTPUT(v4l2object->type))
758 control.id = V4L2_CID_MIN_BUFFERS_FOR_OUTPUT;
759 else
760 control.id = V4L2_CID_MIN_BUFFERS_FOR_CAPTURE;
761
762 if (v4l2object->ioctl(v4l2object->video_fd, VIDIOC_G_CTRL, &control) == 0)
763 {
764 GST_DEBUG_OBJECT(v4l2object->dbg_obj,
765 "driver requires a minimum of %d buffers", control.value);
766 v4l2object->min_buffers = control.value;
767 }
768 else
769 {
770 v4l2object->min_buffers = 0;
771 }
772}
773
774gboolean
775gst_aml_v4l2_object_open(GstAmlV4l2Object *v4l2object)
776{
777 if (!gst_aml_v4l2_open(v4l2object))
778 return FALSE;
779
780 return TRUE;
781}
782
783gboolean
784gst_aml_v4l2_object_open_shared(GstAmlV4l2Object *v4l2object, GstAmlV4l2Object *other)
785{
786 gboolean ret;
787
788 ret = gst_aml_v4l2_dup(v4l2object, other);
789
790 if (ret && !V4L2_TYPE_IS_OUTPUT(v4l2object->type))
791 {
792 gst_poll_fd_init(&v4l2object->pollfd);
793 v4l2object->pollfd.fd = v4l2object->video_fd;
794 gst_poll_add_fd(v4l2object->poll, &v4l2object->pollfd);
795 /* used for dequeue event */
796 gst_poll_fd_ctl_read(v4l2object->poll, &v4l2object->pollfd, TRUE);
797 gst_poll_fd_ctl_pri(v4l2object->poll, &v4l2object->pollfd, TRUE);
798 }
799
800 return ret;
801}
802
803gboolean
804gst_aml_v4l2_object_close(GstAmlV4l2Object *v4l2object)
805{
806 if (!gst_aml_v4l2_close(v4l2object))
807 return FALSE;
808
809 gst_caps_replace(&v4l2object->probed_caps, NULL);
810
811 /* reset our copy of the device caps */
812 v4l2object->device_caps = 0;
813
814 if (v4l2object->formats)
815 {
816 gst_aml_v4l2_object_clear_format_list(v4l2object);
817 }
818
819 if (v4l2object->par)
820 {
821 g_value_unset(v4l2object->par);
822 g_free(v4l2object->par);
823 v4l2object->par = NULL;
824 }
825
826 if (v4l2object->channel)
827 {
828 g_free(v4l2object->channel);
829 v4l2object->channel = NULL;
830 }
831
832 return TRUE;
833}
834
835static struct v4l2_fmtdesc *
836gst_aml_v4l2_object_get_format_from_fourcc(GstAmlV4l2Object *v4l2object,
837 guint32 fourcc)
838{
839 struct v4l2_fmtdesc *fmt;
840 GSList *walk;
841
842 if (fourcc == 0)
843 return NULL;
844
845 walk = gst_aml_v4l2_object_get_format_list(v4l2object);
846 while (walk)
847 {
848 fmt = (struct v4l2_fmtdesc *)walk->data;
849 if (fmt->pixelformat == fourcc)
850 return fmt;
851 /* special case for jpeg */
852 if (fmt->pixelformat == V4L2_PIX_FMT_MJPEG ||
853 fmt->pixelformat == V4L2_PIX_FMT_JPEG ||
854 fmt->pixelformat == V4L2_PIX_FMT_PJPG)
855 {
856 if (fourcc == V4L2_PIX_FMT_JPEG || fourcc == V4L2_PIX_FMT_MJPEG ||
857 fourcc == V4L2_PIX_FMT_PJPG)
858 {
859 return fmt;
860 }
861 }
862 walk = g_slist_next(walk);
863 }
864
865 return NULL;
866}
867
868/* complete made up ranking, the values themselves are meaningless */
869/* These ranks MUST be X such that X<<15 fits on a signed int - see
870 the comment at the end of gst_aml_v4l2_object_format_get_rank. */
871#define YUV_BASE_RANK 1000
872#define JPEG_BASE_RANK 500
873#define DV_BASE_RANK 200
874#define RGB_BASE_RANK 100
875#define YUV_ODD_BASE_RANK 50
876#define RGB_ODD_BASE_RANK 25
877#define BAYER_BASE_RANK 15
878#define S910_BASE_RANK 10
879#define GREY_BASE_RANK 5
880#define PWC_BASE_RANK 1
881
882static gint
883gst_aml_v4l2_object_format_get_rank(const struct v4l2_fmtdesc *fmt)
884{
885 guint32 fourcc = fmt->pixelformat;
886 gboolean emulated = ((fmt->flags & V4L2_FMT_FLAG_EMULATED) != 0);
887 gint rank = 0;
888
889 switch (fourcc)
890 {
891 case V4L2_PIX_FMT_MJPEG:
892 case V4L2_PIX_FMT_PJPG:
893 rank = JPEG_BASE_RANK;
894 break;
895 case V4L2_PIX_FMT_JPEG:
896 rank = JPEG_BASE_RANK + 1;
897 break;
898 case V4L2_PIX_FMT_MPEG: /* MPEG */
899 rank = JPEG_BASE_RANK + 2;
900 break;
901
902 case V4L2_PIX_FMT_RGB332:
903 case V4L2_PIX_FMT_ARGB555:
904 case V4L2_PIX_FMT_XRGB555:
905 case V4L2_PIX_FMT_RGB555:
906 case V4L2_PIX_FMT_ARGB555X:
907 case V4L2_PIX_FMT_XRGB555X:
908 case V4L2_PIX_FMT_RGB555X:
909 case V4L2_PIX_FMT_BGR666:
910 case V4L2_PIX_FMT_RGB565:
911 case V4L2_PIX_FMT_RGB565X:
912 case V4L2_PIX_FMT_RGB444:
913 case V4L2_PIX_FMT_Y4:
914 case V4L2_PIX_FMT_Y6:
915 case V4L2_PIX_FMT_Y10:
916 case V4L2_PIX_FMT_Y12:
917 case V4L2_PIX_FMT_Y10BPACK:
918 case V4L2_PIX_FMT_YUV555:
919 case V4L2_PIX_FMT_YUV565:
920 case V4L2_PIX_FMT_YUV32:
921 case V4L2_PIX_FMT_NV12MT_16X16:
922 case V4L2_PIX_FMT_NV42:
923 case V4L2_PIX_FMT_H264_MVC:
924 rank = RGB_ODD_BASE_RANK;
925 break;
926
927 case V4L2_PIX_FMT_RGB24:
928 case V4L2_PIX_FMT_BGR24:
929 rank = RGB_BASE_RANK - 1;
930 break;
931
932 case V4L2_PIX_FMT_RGB32:
933 case V4L2_PIX_FMT_BGR32:
934 case V4L2_PIX_FMT_ABGR32:
935 case V4L2_PIX_FMT_XBGR32:
936 case V4L2_PIX_FMT_ARGB32:
937 case V4L2_PIX_FMT_XRGB32:
938 rank = RGB_BASE_RANK;
939 break;
940
941 case V4L2_PIX_FMT_GREY: /* 8 Greyscale */
942 rank = GREY_BASE_RANK;
943 break;
944
945 case V4L2_PIX_FMT_NV12: /* 12 Y/CbCr 4:2:0 */
946 case V4L2_PIX_FMT_NV12M: /* Same as NV12 */
947 case V4L2_PIX_FMT_NV12MT: /* NV12 64x32 tile */
948 case V4L2_PIX_FMT_NV21: /* 12 Y/CrCb 4:2:0 */
949 case V4L2_PIX_FMT_NV21M: /* Same as NV21 */
950 case V4L2_PIX_FMT_YYUV: /* 16 YUV 4:2:2 */
951 case V4L2_PIX_FMT_HI240: /* 8 8-bit color */
952 case V4L2_PIX_FMT_NV16: /* 16 Y/CbCr 4:2:2 */
953 case V4L2_PIX_FMT_NV16M: /* Same as NV16 */
954 case V4L2_PIX_FMT_NV61: /* 16 Y/CrCb 4:2:2 */
955 case V4L2_PIX_FMT_NV61M: /* Same as NV61 */
956 case V4L2_PIX_FMT_NV24: /* 24 Y/CrCb 4:4:4 */
957 rank = YUV_ODD_BASE_RANK;
958 break;
959
960 case V4L2_PIX_FMT_YVU410: /* YVU9, 9 bits per pixel */
961 rank = YUV_BASE_RANK + 3;
962 break;
963 case V4L2_PIX_FMT_YUV410: /* YUV9, 9 bits per pixel */
964 rank = YUV_BASE_RANK + 2;
965 break;
966 case V4L2_PIX_FMT_YUV420: /* I420, 12 bits per pixel */
967 case V4L2_PIX_FMT_YUV420M:
968 rank = YUV_BASE_RANK + 7;
969 break;
970 case V4L2_PIX_FMT_YUYV: /* YUY2, 16 bits per pixel */
971 rank = YUV_BASE_RANK + 10;
972 break;
973 case V4L2_PIX_FMT_YVU420: /* YV12, 12 bits per pixel */
974 rank = YUV_BASE_RANK + 6;
975 break;
976 case V4L2_PIX_FMT_UYVY: /* UYVY, 16 bits per pixel */
977 rank = YUV_BASE_RANK + 9;
978 break;
979 case V4L2_PIX_FMT_YUV444:
980 rank = YUV_BASE_RANK + 6;
981 break;
982 case V4L2_PIX_FMT_Y41P: /* Y41P, 12 bits per pixel */
983 rank = YUV_BASE_RANK + 5;
984 break;
985 case V4L2_PIX_FMT_YUV411P: /* Y41B, 12 bits per pixel */
986 rank = YUV_BASE_RANK + 4;
987 break;
988 case V4L2_PIX_FMT_YUV422P: /* Y42B, 16 bits per pixel */
989 rank = YUV_BASE_RANK + 8;
990 break;
991
992 case V4L2_PIX_FMT_DV:
993 rank = DV_BASE_RANK;
994 break;
995
996 case V4L2_PIX_FMT_WNVA: /* Winnov hw compres */
997 rank = 0;
998 break;
999
1000 case V4L2_PIX_FMT_SBGGR8:
1001 case V4L2_PIX_FMT_SGBRG8:
1002 case V4L2_PIX_FMT_SGRBG8:
1003 case V4L2_PIX_FMT_SRGGB8:
1004 rank = BAYER_BASE_RANK;
1005 break;
1006
1007 case V4L2_PIX_FMT_SN9C10X:
1008 rank = S910_BASE_RANK;
1009 break;
1010
1011 case V4L2_PIX_FMT_PWC1:
1012 rank = PWC_BASE_RANK;
1013 break;
1014 case V4L2_PIX_FMT_PWC2:
1015 rank = PWC_BASE_RANK;
1016 break;
1017
1018 default:
1019 rank = 0;
1020 break;
1021 }
1022
1023 /* All ranks are below 1<<15 so a shift by 15
1024 * will a) make all non-emulated formats larger
1025 * than emulated and b) will not overflow
1026 */
1027 if (!emulated)
1028 rank <<= 15;
1029
1030 return rank;
1031}
1032
1033static gint
1034format_cmp_func(gconstpointer a, gconstpointer b)
1035{
1036 const struct v4l2_fmtdesc *fa = a;
1037 const struct v4l2_fmtdesc *fb = b;
1038
1039 if (fa->pixelformat == fb->pixelformat)
1040 return 0;
1041
1042 return gst_aml_v4l2_object_format_get_rank(fb) -
1043 gst_aml_v4l2_object_format_get_rank(fa);
1044}
1045
1046/******************************************************
1047 * gst_aml_v4l2_object_fill_format_list():
1048 * create list of supported capture formats
1049 * return value: TRUE on success, FALSE on error
1050 ******************************************************/
1051static gboolean
1052gst_aml_v4l2_object_fill_format_list(GstAmlV4l2Object *v4l2object,
1053 enum v4l2_buf_type type)
1054{
1055 gint n;
1056 struct v4l2_fmtdesc *format;
1057
1058 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "getting src format enumerations");
1059
1060 /* format enumeration */
1061 for (n = 0;; n++)
1062 {
1063 format = g_new0(struct v4l2_fmtdesc, 1);
1064
1065 format->index = n;
1066 format->type = type;
1067
1068 if (v4l2object->ioctl(v4l2object->video_fd, VIDIOC_ENUM_FMT, format) < 0)
1069 {
1070 if (errno == EINVAL)
1071 {
1072 g_free(format);
1073 break; /* end of enumeration */
1074 }
1075 else
1076 {
1077 goto failed;
1078 }
1079 }
1080
1081 GST_LOG_OBJECT(v4l2object->dbg_obj, "index: %u", format->index);
1082 GST_LOG_OBJECT(v4l2object->dbg_obj, "type: %d", format->type);
1083 GST_LOG_OBJECT(v4l2object->dbg_obj, "flags: %08x", format->flags);
1084 GST_LOG_OBJECT(v4l2object->dbg_obj, "description: '%s'",
1085 format->description);
1086 GST_LOG_OBJECT(v4l2object->dbg_obj, "pixelformat: %" GST_FOURCC_FORMAT,
1087 GST_FOURCC_ARGS(format->pixelformat));
1088
1089 /* sort formats according to our preference; we do this, because caps
1090 * are probed in the order the formats are in the list, and the order of
1091 * formats in the final probed caps matters for things like fixation */
1092 v4l2object->formats = g_slist_insert_sorted(v4l2object->formats, format,
1093 (GCompareFunc)format_cmp_func);
1094 }
1095
1096#ifndef GST_DISABLE_GST_DEBUG
1097 {
1098 GSList *l;
1099
1100 GST_INFO_OBJECT(v4l2object->dbg_obj, "got %d format(s):", n);
1101 for (l = v4l2object->formats; l != NULL; l = l->next)
1102 {
1103 format = l->data;
1104
1105 GST_INFO_OBJECT(v4l2object->dbg_obj,
1106 " %" GST_FOURCC_FORMAT "%s", GST_FOURCC_ARGS(format->pixelformat),
1107 ((format->flags & V4L2_FMT_FLAG_EMULATED)) ? " (emulated)" : "");
1108 }
1109 }
1110#endif
1111
1112 return TRUE;
1113
1114 /* ERRORS */
1115failed:
1116{
1117 g_free(format);
1118
1119 if (v4l2object->element)
1120 return FALSE;
1121
1122 GST_ELEMENT_ERROR(v4l2object->element, RESOURCE, SETTINGS,
1123 (_("Failed to enumerate possible video formats device '%s' can work "
1124 "with"),
1125 v4l2object->videodev),
1126 ("Failed to get number %d in pixelformat enumeration for %s. (%d - %s)",
1127 n, v4l2object->videodev, errno, g_strerror(errno)));
1128
1129 return FALSE;
1130}
1131}
1132
1133/*
1134 * Get the list of supported capture formats, a list of
1135 * <code>struct v4l2_fmtdesc</code>.
1136 */
1137static GSList *
1138gst_aml_v4l2_object_get_format_list(GstAmlV4l2Object *v4l2object)
1139{
1140 if (!v4l2object->formats)
1141 {
1142
1143 /* check usual way */
1144 gst_aml_v4l2_object_fill_format_list(v4l2object, v4l2object->type);
1145
1146 /* if our driver supports multi-planar
1147 * and if formats are still empty then we can workaround driver bug
1148 * by also looking up formats as if our device was not supporting
1149 * multiplanar */
1150 if (!v4l2object->formats)
1151 {
1152 switch (v4l2object->type)
1153 {
1154 case V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE:
1155 gst_aml_v4l2_object_fill_format_list(v4l2object,
1156 V4L2_BUF_TYPE_VIDEO_CAPTURE);
1157 break;
1158
1159 case V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE:
1160 gst_aml_v4l2_object_fill_format_list(v4l2object,
1161 V4L2_BUF_TYPE_VIDEO_OUTPUT);
1162 break;
1163
1164 default:
1165 break;
1166 }
1167 }
1168 }
1169 return v4l2object->formats;
1170}
1171
1172static GstVideoFormat
1173gst_aml_v4l2_object_v4l2fourcc_to_video_format(guint32 fourcc)
1174{
1175 GstVideoFormat format;
1176
1177 switch (fourcc)
1178 {
1179 case V4L2_PIX_FMT_GREY: /* 8 Greyscale */
1180 format = GST_VIDEO_FORMAT_GRAY8;
1181 break;
1182 case V4L2_PIX_FMT_Y16:
1183 format = GST_VIDEO_FORMAT_GRAY16_LE;
1184 break;
1185 case V4L2_PIX_FMT_Y16_BE:
1186 format = GST_VIDEO_FORMAT_GRAY16_BE;
1187 break;
1188 case V4L2_PIX_FMT_XRGB555:
1189 case V4L2_PIX_FMT_RGB555:
1190 format = GST_VIDEO_FORMAT_RGB15;
1191 break;
1192 case V4L2_PIX_FMT_XRGB555X:
1193 case V4L2_PIX_FMT_RGB555X:
1194 format = GST_VIDEO_FORMAT_BGR15;
1195 break;
1196 case V4L2_PIX_FMT_RGB565:
1197 format = GST_VIDEO_FORMAT_RGB16;
1198 break;
1199 case V4L2_PIX_FMT_RGB24:
1200 format = GST_VIDEO_FORMAT_RGB;
1201 break;
1202 case V4L2_PIX_FMT_BGR24:
1203 format = GST_VIDEO_FORMAT_BGR;
1204 break;
1205 case V4L2_PIX_FMT_XRGB32:
1206 case V4L2_PIX_FMT_RGB32:
1207 format = GST_VIDEO_FORMAT_xRGB;
1208 break;
1209 case V4L2_PIX_FMT_XBGR32:
1210 case V4L2_PIX_FMT_BGR32:
1211 format = GST_VIDEO_FORMAT_BGRx;
1212 break;
1213 case V4L2_PIX_FMT_ABGR32:
1214 format = GST_VIDEO_FORMAT_BGRA;
1215 break;
1216 case V4L2_PIX_FMT_ARGB32:
1217 format = GST_VIDEO_FORMAT_ARGB;
1218 break;
1219 case V4L2_PIX_FMT_NV12:
1220 case V4L2_PIX_FMT_NV12M:
1221 format = GST_VIDEO_FORMAT_NV12;
1222 break;
1223 case V4L2_PIX_FMT_NV12MT:
1224 format = GST_VIDEO_FORMAT_NV12_64Z32;
1225 break;
1226 case V4L2_PIX_FMT_NV21:
1227 case V4L2_PIX_FMT_NV21M:
1228 format = GST_VIDEO_FORMAT_NV21;
1229 break;
1230 case V4L2_PIX_FMT_YVU410:
1231 format = GST_VIDEO_FORMAT_YVU9;
1232 break;
1233 case V4L2_PIX_FMT_YUV410:
1234 format = GST_VIDEO_FORMAT_YUV9;
1235 break;
1236 case V4L2_PIX_FMT_YUV420:
1237 case V4L2_PIX_FMT_YUV420M:
1238 format = GST_VIDEO_FORMAT_I420;
1239 break;
1240 case V4L2_PIX_FMT_YUYV:
1241 format = GST_VIDEO_FORMAT_YUY2;
1242 break;
1243 case V4L2_PIX_FMT_YVU420:
1244 format = GST_VIDEO_FORMAT_YV12;
1245 break;
1246 case V4L2_PIX_FMT_UYVY:
1247 format = GST_VIDEO_FORMAT_UYVY;
1248 break;
1249 case V4L2_PIX_FMT_YUV411P:
1250 format = GST_VIDEO_FORMAT_Y41B;
1251 break;
1252 case V4L2_PIX_FMT_YUV422P:
1253 format = GST_VIDEO_FORMAT_Y42B;
1254 break;
1255 case V4L2_PIX_FMT_YVYU:
1256 format = GST_VIDEO_FORMAT_YVYU;
1257 break;
1258 case V4L2_PIX_FMT_NV16:
1259 case V4L2_PIX_FMT_NV16M:
1260 format = GST_VIDEO_FORMAT_NV16;
1261 break;
1262 case V4L2_PIX_FMT_NV61:
1263 case V4L2_PIX_FMT_NV61M:
1264 format = GST_VIDEO_FORMAT_NV61;
1265 break;
1266 case V4L2_PIX_FMT_NV24:
1267 format = GST_VIDEO_FORMAT_NV24;
1268 break;
1269 default:
1270 format = GST_VIDEO_FORMAT_UNKNOWN;
1271 break;
1272 }
1273
1274 return format;
1275}
1276
1277static gboolean
1278gst_amL_v4l2_object_v4l2fourcc_is_rgb(guint32 fourcc)
1279{
1280 gboolean ret = FALSE;
1281
1282 switch (fourcc)
1283 {
1284 case V4L2_PIX_FMT_XRGB555:
1285 case V4L2_PIX_FMT_RGB555:
1286 case V4L2_PIX_FMT_XRGB555X:
1287 case V4L2_PIX_FMT_RGB555X:
1288 case V4L2_PIX_FMT_RGB565:
1289 case V4L2_PIX_FMT_RGB24:
1290 case V4L2_PIX_FMT_BGR24:
1291 case V4L2_PIX_FMT_XRGB32:
1292 case V4L2_PIX_FMT_RGB32:
1293 case V4L2_PIX_FMT_XBGR32:
1294 case V4L2_PIX_FMT_BGR32:
1295 case V4L2_PIX_FMT_ABGR32:
1296 case V4L2_PIX_FMT_ARGB32:
1297 case V4L2_PIX_FMT_SBGGR8:
1298 case V4L2_PIX_FMT_SGBRG8:
1299 case V4L2_PIX_FMT_SGRBG8:
1300 case V4L2_PIX_FMT_SRGGB8:
1301 ret = TRUE;
1302 break;
1303 default:
1304 break;
1305 }
1306
1307 return ret;
1308}
1309
1310static GstStructure *
1311gst_aml_v4l2_object_v4l2fourcc_to_bare_struct(guint32 fourcc)
1312{
1313 GstStructure *structure = NULL;
1314
1315 switch (fourcc)
1316 {
1317 case V4L2_PIX_FMT_MJPEG: /* Motion-JPEG */
1318 case V4L2_PIX_FMT_PJPG: /* Progressive-JPEG */
1319 case V4L2_PIX_FMT_JPEG: /* JFIF JPEG */
1320 structure = gst_structure_new_empty("image/jpeg");
1321 break;
1322 case V4L2_PIX_FMT_MPEG1:
1323 structure = gst_structure_new("video/mpeg",
1324 "mpegversion", G_TYPE_INT, 1, NULL);
1325 break;
1326 case V4L2_PIX_FMT_MPEG2:
1327 structure = gst_structure_new("video/mpeg",
1328 "mpegversion", G_TYPE_INT, 2, NULL);
1329 gst_structure_set(structure, "systemstream", G_TYPE_BOOLEAN, FALSE, NULL);
1330 GST_DEBUG("aml set mpeg2 systemstream to false");
1331 break;
1332 case V4L2_PIX_FMT_MPEG4:
1333 case V4L2_PIX_FMT_XVID:
1334 structure = gst_structure_new("video/mpeg",
1335 "mpegversion", G_TYPE_INT, 4, "systemstream",
1336 G_TYPE_BOOLEAN, FALSE, NULL);
1337 break;
1338 case V4L2_PIX_FMT_FWHT:
1339 structure = gst_structure_new_empty("video/x-fwht");
1340 break;
1341 case V4L2_PIX_FMT_H263:
1342 structure = gst_structure_new("video/x-h263",
1343 "variant", G_TYPE_STRING, "itu", NULL);
1344 break;
1345 case V4L2_PIX_FMT_H264: /* H.264 */
1346 structure = gst_structure_new("video/x-h264",
1347 "stream-format", G_TYPE_STRING, "byte-stream", "alignment",
1348 G_TYPE_STRING, "au", NULL);
1349 break;
1350 case V4L2_PIX_FMT_H264_NO_SC:
1351 structure = gst_structure_new("video/x-h264",
1352 "stream-format", G_TYPE_STRING, "avc", "alignment",
1353 G_TYPE_STRING, "au", NULL);
1354 break;
1355 case V4L2_PIX_FMT_HEVC: /* H.265 */
1356 structure = gst_structure_new("video/x-h265",
1357 "stream-format", G_TYPE_STRING, "byte-stream", "alignment",
1358 G_TYPE_STRING, "au", NULL);
1359 break;
1360 case V4L2_PIX_FMT_VC1_ANNEX_G:
1361 case V4L2_PIX_FMT_VC1_ANNEX_L:
1362 structure = gst_structure_new("video/x-wmv",
1363 "wmvversion", G_TYPE_INT, 3, "format", G_TYPE_STRING, "WVC1", NULL);
1364 break;
1365 case V4L2_PIX_FMT_VP8:
1366 structure = gst_structure_new_empty("video/x-vp8");
1367 break;
1368 case V4L2_PIX_FMT_VP9:
1369 structure = gst_structure_new_empty("video/x-vp9");
1370 break;
1371 case V4L2_PIX_FMT_AV1:
1372 structure = gst_structure_new_empty("video/x-av1");
1373 break;
1374 case V4L2_PIX_FMT_GREY: /* 8 Greyscale */
1375 case V4L2_PIX_FMT_Y16:
1376 case V4L2_PIX_FMT_Y16_BE:
1377 case V4L2_PIX_FMT_XRGB555:
1378 case V4L2_PIX_FMT_RGB555:
1379 case V4L2_PIX_FMT_XRGB555X:
1380 case V4L2_PIX_FMT_RGB555X:
1381 case V4L2_PIX_FMT_RGB565:
1382 case V4L2_PIX_FMT_RGB24:
1383 case V4L2_PIX_FMT_BGR24:
1384 case V4L2_PIX_FMT_RGB32:
1385 case V4L2_PIX_FMT_XRGB32:
1386 case V4L2_PIX_FMT_ARGB32:
1387 case V4L2_PIX_FMT_BGR32:
1388 case V4L2_PIX_FMT_XBGR32:
1389 case V4L2_PIX_FMT_ABGR32:
1390 case V4L2_PIX_FMT_NV12: /* 12 Y/CbCr 4:2:0 */
1391 case V4L2_PIX_FMT_NV12M:
1392 case V4L2_PIX_FMT_NV12MT:
1393 case V4L2_PIX_FMT_NV21: /* 12 Y/CrCb 4:2:0 */
1394 case V4L2_PIX_FMT_NV21M:
1395 case V4L2_PIX_FMT_NV16: /* 16 Y/CbCr 4:2:2 */
1396 case V4L2_PIX_FMT_NV16M:
1397 case V4L2_PIX_FMT_NV61: /* 16 Y/CrCb 4:2:2 */
1398 case V4L2_PIX_FMT_NV61M:
1399 case V4L2_PIX_FMT_NV24: /* 24 Y/CrCb 4:4:4 */
1400 case V4L2_PIX_FMT_YVU410:
1401 case V4L2_PIX_FMT_YUV410:
1402 case V4L2_PIX_FMT_YUV420: /* I420/IYUV */
1403 case V4L2_PIX_FMT_YUV420M:
1404 case V4L2_PIX_FMT_YUYV:
1405 case V4L2_PIX_FMT_YVU420:
1406 case V4L2_PIX_FMT_UYVY:
1407 case V4L2_PIX_FMT_YUV422P:
1408 case V4L2_PIX_FMT_YVYU:
1409 case V4L2_PIX_FMT_YUV411P:
1410 {
1411 GstVideoFormat format;
1412 format = gst_aml_v4l2_object_v4l2fourcc_to_video_format(fourcc);
1413 if (format != GST_VIDEO_FORMAT_UNKNOWN)
1414 structure = gst_structure_new("video/x-raw",
1415 "format", G_TYPE_STRING, gst_video_format_to_string(format), NULL);
1416 break;
1417 }
1418 case V4L2_PIX_FMT_DV:
1419 structure =
1420 gst_structure_new("video/x-dv", "systemstream", G_TYPE_BOOLEAN, TRUE,
1421 NULL);
1422 break;
1423 case V4L2_PIX_FMT_MPEG: /* MPEG */
1424 structure = gst_structure_new("video/mpegts",
1425 "systemstream", G_TYPE_BOOLEAN, TRUE, NULL);
1426 break;
1427 case V4L2_PIX_FMT_WNVA: /* Winnov hw compres */
1428 break;
1429 case V4L2_PIX_FMT_SBGGR8:
1430 case V4L2_PIX_FMT_SGBRG8:
1431 case V4L2_PIX_FMT_SGRBG8:
1432 case V4L2_PIX_FMT_SRGGB8:
1433 structure = gst_structure_new("video/x-bayer", "format", G_TYPE_STRING,
1434 fourcc == V4L2_PIX_FMT_SBGGR8 ? "bggr" : fourcc == V4L2_PIX_FMT_SGBRG8 ? "gbrg"
1435 : fourcc == V4L2_PIX_FMT_SGRBG8 ? "grbg"
1436 :
1437 /* fourcc == V4L2_PIX_FMT_SRGGB8 ? */ "rggb",
1438 NULL);
1439 break;
1440 case V4L2_PIX_FMT_SN9C10X:
1441 structure = gst_structure_new_empty("video/x-sonix");
1442 break;
1443 case V4L2_PIX_FMT_PWC1:
1444 structure = gst_structure_new_empty("video/x-pwc1");
1445 break;
1446 case V4L2_PIX_FMT_PWC2:
1447 structure = gst_structure_new_empty("video/x-pwc2");
1448 break;
1449 case V4L2_PIX_FMT_RGB332:
1450 case V4L2_PIX_FMT_BGR666:
1451 case V4L2_PIX_FMT_ARGB555X:
1452 case V4L2_PIX_FMT_RGB565X:
1453 case V4L2_PIX_FMT_RGB444:
1454 case V4L2_PIX_FMT_YYUV: /* 16 YUV 4:2:2 */
1455 case V4L2_PIX_FMT_HI240: /* 8 8-bit color */
1456 case V4L2_PIX_FMT_Y4:
1457 case V4L2_PIX_FMT_Y6:
1458 case V4L2_PIX_FMT_Y10:
1459 case V4L2_PIX_FMT_Y12:
1460 case V4L2_PIX_FMT_Y10BPACK:
1461 case V4L2_PIX_FMT_YUV444:
1462 case V4L2_PIX_FMT_YUV555:
1463 case V4L2_PIX_FMT_YUV565:
1464 case V4L2_PIX_FMT_Y41P:
1465 case V4L2_PIX_FMT_YUV32:
1466 case V4L2_PIX_FMT_NV12MT_16X16:
1467 case V4L2_PIX_FMT_NV42:
1468 case V4L2_PIX_FMT_H264_MVC:
1469 default:
1470 GST_DEBUG("Unsupported fourcc 0x%08x %" GST_FOURCC_FORMAT,
1471 fourcc, GST_FOURCC_ARGS(fourcc));
1472 break;
1473 }
1474
1475 return structure;
1476}
1477
1478GstStructure *
1479gst_aml_v4l2_object_v4l2fourcc_to_structure(guint32 fourcc)
1480{
1481 GstStructure *template;
1482 gint i;
1483
1484 template = gst_aml_v4l2_object_v4l2fourcc_to_bare_struct(fourcc);
1485
1486 if (template == NULL)
1487 goto done;
1488
1489 for (i = 0; i < GST_AML_V4L2_FORMAT_COUNT; i++)
1490 {
1491 if (gst_aml_v4l2_formats[i].format != fourcc)
1492 continue;
1493
1494 if (gst_aml_v4l2_formats[i].dimensions)
1495 {
1496 gst_structure_set(template,
1497 "width", GST_TYPE_INT_RANGE, 1, GST_AML_V4L2_MAX_SIZE,
1498 "height", GST_TYPE_INT_RANGE, 1, GST_AML_V4L2_MAX_SIZE,
1499 "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL);
1500 }
1501 break;
1502 }
1503
1504done:
1505 return template;
1506}
1507
1508static GstCaps *
1509gst_aml_v4l2_object_get_caps_helper(GstAmlV4L2FormatFlags flags)
1510{
1511 GstStructure *structure;
1512 GstCaps *caps;
1513 guint i;
1514
1515 caps = gst_caps_new_empty();
1516 for (i = 0; i < GST_AML_V4L2_FORMAT_COUNT; i++)
1517 {
1518
1519 if ((gst_aml_v4l2_formats[i].flags & flags) == 0)
1520 continue;
1521
1522 structure =
1523 gst_aml_v4l2_object_v4l2fourcc_to_bare_struct(gst_aml_v4l2_formats[i].format);
1524
1525 if (structure)
1526 {
1527 GstStructure *alt_s = NULL;
1528
1529 if (gst_aml_v4l2_formats[i].dimensions)
1530 {
1531 gst_structure_set(structure,
1532 "width", GST_TYPE_INT_RANGE, 1, GST_AML_V4L2_MAX_SIZE,
1533 "height", GST_TYPE_INT_RANGE, 1, GST_AML_V4L2_MAX_SIZE,
1534 "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL);
1535 }
1536
1537 switch (gst_aml_v4l2_formats[i].format)
1538 {
1539 case V4L2_PIX_FMT_RGB32:
1540 alt_s = gst_structure_copy(structure);
1541 gst_structure_set(alt_s, "format", G_TYPE_STRING, "ARGB", NULL);
1542 break;
1543 case V4L2_PIX_FMT_BGR32:
1544 alt_s = gst_structure_copy(structure);
1545 gst_structure_set(alt_s, "format", G_TYPE_STRING, "BGRA", NULL);
1546 default:
1547 break;
1548 }
1549
1550 gst_caps_append_structure(caps, structure);
1551
1552 if (alt_s)
1553 gst_caps_append_structure(caps, alt_s);
1554 }
1555 }
1556
1557 return gst_caps_simplify(caps);
1558}
1559
1560GstCaps *
1561gst_aml_v4l2_object_get_all_caps(void)
1562{
1563 static GstCaps *caps = NULL;
1564
1565 if (g_once_init_enter(&caps))
1566 {
1567 GstCaps *all_caps = gst_aml_v4l2_object_get_caps_helper(GST_V4L2_ALL);
1568 GST_MINI_OBJECT_FLAG_SET(all_caps, GST_MINI_OBJECT_FLAG_MAY_BE_LEAKED);
1569 g_once_init_leave(&caps, all_caps);
1570 }
1571
1572 return caps;
1573}
1574
1575GstCaps *
1576gst_aml_v4l2_object_get_raw_caps(void)
1577{
1578 static GstCaps *caps = NULL;
1579
1580 if (g_once_init_enter(&caps))
1581 {
1582 GstCaps *raw_caps = gst_aml_v4l2_object_get_caps_helper(GST_V4L2_RAW);
1583 GST_MINI_OBJECT_FLAG_SET(raw_caps, GST_MINI_OBJECT_FLAG_MAY_BE_LEAKED);
1584 g_once_init_leave(&caps, raw_caps);
1585 }
1586
1587 return caps;
1588}
1589
1590GstCaps *
1591gst_aml_v4l2_object_get_codec_caps(void)
1592{
1593 static GstCaps *caps = NULL;
1594
1595 if (g_once_init_enter(&caps))
1596 {
1597 GstCaps *codec_caps = gst_aml_v4l2_object_get_caps_helper(GST_V4L2_CODEC);
1598 GST_MINI_OBJECT_FLAG_SET(codec_caps, GST_MINI_OBJECT_FLAG_MAY_BE_LEAKED);
1599 g_once_init_leave(&caps, codec_caps);
1600 }
1601
1602 return caps;
1603}
1604
1605/* collect data for the given caps
1606 * @caps: given input caps
1607 * @format: location for the v4l format
1608 * @w/@h: location for width and height
1609 * @fps_n/@fps_d: location for framerate
1610 * @size: location for expected size of the frame or 0 if unknown
1611 */
1612static gboolean
1613gst_aml_v4l2_object_get_caps_info(GstAmlV4l2Object *v4l2object, GstCaps *caps,
1614 struct v4l2_fmtdesc **format, GstVideoInfo *info)
1615{
1616 GstStructure *structure;
1617 guint32 fourcc = 0, fourcc_nc = 0;
1618 const gchar *mimetype;
1619 struct v4l2_fmtdesc *fmt = NULL;
1620
1621 structure = gst_caps_get_structure(caps, 0);
1622
1623 mimetype = gst_structure_get_name(structure);
1624
1625 if (!gst_video_info_from_caps(info, caps))
1626 goto invalid_format;
1627
1628 if (g_str_equal(mimetype, "video/x-raw"))
1629 {
1630 switch (GST_VIDEO_INFO_FORMAT(info))
1631 {
1632 case GST_VIDEO_FORMAT_I420:
1633 fourcc = V4L2_PIX_FMT_YUV420;
1634 fourcc_nc = V4L2_PIX_FMT_YUV420M;
1635 break;
1636 case GST_VIDEO_FORMAT_YUY2:
1637 fourcc = V4L2_PIX_FMT_YUYV;
1638 break;
1639 case GST_VIDEO_FORMAT_UYVY:
1640 fourcc = V4L2_PIX_FMT_UYVY;
1641 break;
1642 case GST_VIDEO_FORMAT_YV12:
1643 fourcc = V4L2_PIX_FMT_YVU420;
1644 break;
1645 case GST_VIDEO_FORMAT_Y41B:
1646 fourcc = V4L2_PIX_FMT_YUV411P;
1647 break;
1648 case GST_VIDEO_FORMAT_Y42B:
1649 fourcc = V4L2_PIX_FMT_YUV422P;
1650 break;
1651 case GST_VIDEO_FORMAT_NV12:
1652 fourcc = V4L2_PIX_FMT_NV12;
1653 fourcc_nc = V4L2_PIX_FMT_NV12M;
1654 break;
1655 case GST_VIDEO_FORMAT_NV12_64Z32:
1656 fourcc_nc = V4L2_PIX_FMT_NV12MT;
1657 break;
1658 case GST_VIDEO_FORMAT_NV21:
1659 fourcc = V4L2_PIX_FMT_NV21;
1660 fourcc_nc = V4L2_PIX_FMT_NV21M;
1661 break;
1662 case GST_VIDEO_FORMAT_NV16:
1663 fourcc = V4L2_PIX_FMT_NV16;
1664 fourcc_nc = V4L2_PIX_FMT_NV16M;
1665 break;
1666 case GST_VIDEO_FORMAT_NV61:
1667 fourcc = V4L2_PIX_FMT_NV61;
1668 fourcc_nc = V4L2_PIX_FMT_NV61M;
1669 break;
1670 case GST_VIDEO_FORMAT_NV24:
1671 fourcc = V4L2_PIX_FMT_NV24;
1672 break;
1673 case GST_VIDEO_FORMAT_YVYU:
1674 fourcc = V4L2_PIX_FMT_YVYU;
1675 break;
1676 case GST_VIDEO_FORMAT_RGB15:
1677 fourcc = V4L2_PIX_FMT_RGB555;
1678 fourcc_nc = V4L2_PIX_FMT_XRGB555;
1679 break;
1680 case GST_VIDEO_FORMAT_RGB16:
1681 fourcc = V4L2_PIX_FMT_RGB565;
1682 break;
1683 case GST_VIDEO_FORMAT_RGB:
1684 fourcc = V4L2_PIX_FMT_RGB24;
1685 break;
1686 case GST_VIDEO_FORMAT_BGR:
1687 fourcc = V4L2_PIX_FMT_BGR24;
1688 break;
1689 case GST_VIDEO_FORMAT_xRGB:
1690 fourcc = V4L2_PIX_FMT_RGB32;
1691 fourcc_nc = V4L2_PIX_FMT_XRGB32;
1692 break;
1693 case GST_VIDEO_FORMAT_ARGB:
1694 fourcc = V4L2_PIX_FMT_RGB32;
1695 fourcc_nc = V4L2_PIX_FMT_ARGB32;
1696 break;
1697 case GST_VIDEO_FORMAT_BGRx:
1698 fourcc = V4L2_PIX_FMT_BGR32;
1699 fourcc_nc = V4L2_PIX_FMT_XBGR32;
1700 break;
1701 case GST_VIDEO_FORMAT_BGRA:
1702 fourcc = V4L2_PIX_FMT_BGR32;
1703 fourcc_nc = V4L2_PIX_FMT_ABGR32;
1704 break;
1705 case GST_VIDEO_FORMAT_GRAY8:
1706 fourcc = V4L2_PIX_FMT_GREY;
1707 break;
1708 case GST_VIDEO_FORMAT_GRAY16_LE:
1709 fourcc = V4L2_PIX_FMT_Y16;
1710 break;
1711 case GST_VIDEO_FORMAT_GRAY16_BE:
1712 fourcc = V4L2_PIX_FMT_Y16_BE;
1713 break;
1714 case GST_VIDEO_FORMAT_BGR15:
1715 fourcc = V4L2_PIX_FMT_RGB555X;
1716 fourcc_nc = V4L2_PIX_FMT_XRGB555X;
1717 break;
1718 default:
1719 break;
1720 }
1721 }
1722 else
1723 {
1724 if (g_str_equal(mimetype, "video/mpegts"))
1725 {
1726 fourcc = V4L2_PIX_FMT_MPEG;
1727 }
1728 else if (g_str_equal(mimetype, "video/x-dv"))
1729 {
1730 fourcc = V4L2_PIX_FMT_DV;
1731 }
1732 else if (g_str_equal(mimetype, "image/jpeg"))
1733 {
1734 fourcc = V4L2_PIX_FMT_JPEG;
1735 }
1736 else if (g_str_equal(mimetype, "video/mpeg"))
1737 {
1738 gint version;
1739 if (gst_structure_get_int(structure, "mpegversion", &version))
1740 {
1741 switch (version)
1742 {
1743 case 1:
1744 fourcc = V4L2_PIX_FMT_MPEG1;
1745 break;
1746 case 2:
1747 fourcc = V4L2_PIX_FMT_MPEG2;
1748 break;
1749 case 4:
1750 fourcc = V4L2_PIX_FMT_MPEG4;
1751 fourcc_nc = V4L2_PIX_FMT_XVID;
1752 break;
1753 default:
1754 break;
1755 }
1756 }
1757 }
1758 else if (g_str_equal(mimetype, "video/x-fwht"))
1759 {
1760 fourcc = V4L2_PIX_FMT_FWHT;
1761 }
1762 else if (g_str_equal(mimetype, "video/x-h263"))
1763 {
1764 fourcc = V4L2_PIX_FMT_H263;
1765 }
1766 else if (g_str_equal(mimetype, "video/x-h264"))
1767 {
1768 const gchar *stream_format =
1769 gst_structure_get_string(structure, "stream-format");
1770 if (g_str_equal(stream_format, "avc"))
1771 fourcc = V4L2_PIX_FMT_H264_NO_SC;
1772 else
1773 fourcc = V4L2_PIX_FMT_H264;
1774 }
1775 else if (g_str_equal(mimetype, "video/x-h265"))
1776 {
1777 fourcc = V4L2_PIX_FMT_HEVC;
1778 }
1779 else if (g_str_equal(mimetype, "video/x-vp8"))
1780 {
1781 fourcc = V4L2_PIX_FMT_VP8;
1782 }
1783 else if (g_str_equal(mimetype, "video/x-vp9"))
1784 {
1785 fourcc = V4L2_PIX_FMT_VP9;
1786 }
1787 else if (g_str_equal(mimetype, "video/x-av1"))
1788 {
1789 fourcc = V4L2_PIX_FMT_AV1;
1790 }
1791 else if (g_str_equal(mimetype, "video/x-bayer"))
1792 {
1793 const gchar *format = gst_structure_get_string(structure, "format");
1794 if (format)
1795 {
1796 if (!g_ascii_strcasecmp(format, "bggr"))
1797 fourcc = V4L2_PIX_FMT_SBGGR8;
1798 else if (!g_ascii_strcasecmp(format, "gbrg"))
1799 fourcc = V4L2_PIX_FMT_SGBRG8;
1800 else if (!g_ascii_strcasecmp(format, "grbg"))
1801 fourcc = V4L2_PIX_FMT_SGRBG8;
1802 else if (!g_ascii_strcasecmp(format, "rggb"))
1803 fourcc = V4L2_PIX_FMT_SRGGB8;
1804 }
1805 }
1806 else if (g_str_equal(mimetype, "video/x-sonix"))
1807 {
1808 fourcc = V4L2_PIX_FMT_SN9C10X;
1809 }
1810 else if (g_str_equal(mimetype, "video/x-pwc1"))
1811 {
1812 fourcc = V4L2_PIX_FMT_PWC1;
1813 }
1814 else if (g_str_equal(mimetype, "video/x-pwc2"))
1815 {
1816 fourcc = V4L2_PIX_FMT_PWC2;
1817 }
1818 }
1819
1820 /* Prefer the non-contiguous if supported */
1821 v4l2object->prefered_non_contiguous = TRUE;
1822
1823 if (fourcc_nc)
1824 fmt = gst_aml_v4l2_object_get_format_from_fourcc(v4l2object, fourcc_nc);
1825 else if (fourcc == 0)
1826 goto unhandled_format;
1827
1828 if (fmt == NULL)
1829 {
1830 fmt = gst_aml_v4l2_object_get_format_from_fourcc(v4l2object, fourcc);
1831 v4l2object->prefered_non_contiguous = FALSE;
1832 }
1833
1834 if (fmt == NULL)
1835 goto unsupported_format;
1836
1837 *format = fmt;
1838
1839 return TRUE;
1840
1841 /* ERRORS */
1842invalid_format:
1843{
1844 GST_DEBUG_OBJECT(v4l2object, "invalid format");
1845 return FALSE;
1846}
1847unhandled_format:
1848{
1849 GST_DEBUG_OBJECT(v4l2object, "unhandled format");
1850 return FALSE;
1851}
1852unsupported_format:
1853{
1854 GST_DEBUG_OBJECT(v4l2object, "unsupported format");
1855 return FALSE;
1856}
1857}
1858
1859static gboolean
1860gst_aml_v4l2_object_get_nearest_size(GstAmlV4l2Object *v4l2object,
1861 guint32 pixelformat, gint *width, gint *height);
1862
1863static void
1864gst_aml_v4l2_object_add_aspect_ratio(GstAmlV4l2Object *v4l2object, GstStructure *s)
1865{
1866 if (v4l2object->keep_aspect && v4l2object->par)
1867 gst_structure_set_value(s, "pixel-aspect-ratio", v4l2object->par);
1868}
1869
1870/* returns TRUE if the value was changed in place, otherwise FALSE */
1871static gboolean
1872gst_aml_v4l2src_value_simplify(GValue *val)
1873{
1874 /* simplify list of one value to one value */
1875 if (GST_VALUE_HOLDS_LIST(val) && gst_value_list_get_size(val) == 1)
1876 {
1877 const GValue *list_val;
1878 GValue new_val = G_VALUE_INIT;
1879
1880 list_val = gst_value_list_get_value(val, 0);
1881 g_value_init(&new_val, G_VALUE_TYPE(list_val));
1882 g_value_copy(list_val, &new_val);
1883 g_value_unset(val);
1884 *val = new_val;
1885 return TRUE;
1886 }
1887
1888 return FALSE;
1889}
1890
1891static gboolean
1892gst_aml_v4l2_object_get_interlace_mode(enum v4l2_field field,
1893 GstVideoInterlaceMode *interlace_mode)
1894{
1895 switch (field)
1896 {
1897 case V4L2_FIELD_ANY:
1898 GST_ERROR("Driver bug detected - check driver with v4l2-compliance from http://git.linuxtv.org/v4l-utils.git\n");
1899 /* fallthrough */
1900 case V4L2_FIELD_NONE:
1901 *interlace_mode = GST_VIDEO_INTERLACE_MODE_PROGRESSIVE;
1902 return TRUE;
1903 case V4L2_FIELD_INTERLACED:
1904 case V4L2_FIELD_INTERLACED_TB:
1905 case V4L2_FIELD_INTERLACED_BT:
1906 *interlace_mode = GST_VIDEO_INTERLACE_MODE_INTERLEAVED;
1907 return TRUE;
1908 default:
1909 GST_ERROR("Unknown enum v4l2_field %d", field);
1910 return FALSE;
1911 }
1912}
1913
1914static gboolean
1915gst_aml_v4l2_object_get_colorspace(struct v4l2_format *fmt,
1916 GstVideoColorimetry *cinfo)
1917{
1918 gboolean is_rgb =
1919 gst_amL_v4l2_object_v4l2fourcc_is_rgb(fmt->fmt.pix.pixelformat);
1920 enum v4l2_colorspace colorspace;
1921 enum v4l2_quantization range;
1922 enum v4l2_ycbcr_encoding matrix;
1923 enum v4l2_xfer_func transfer;
1924 gboolean ret = TRUE;
1925
1926 if (V4L2_TYPE_IS_MULTIPLANAR(fmt->type))
1927 {
1928 colorspace = fmt->fmt.pix_mp.colorspace;
1929 range = fmt->fmt.pix_mp.quantization;
1930 matrix = fmt->fmt.pix_mp.ycbcr_enc;
1931 transfer = fmt->fmt.pix_mp.xfer_func;
1932 }
1933 else
1934 {
1935 colorspace = fmt->fmt.pix.colorspace;
1936 range = fmt->fmt.pix.quantization;
1937 matrix = fmt->fmt.pix.ycbcr_enc;
1938 transfer = fmt->fmt.pix.xfer_func;
1939 }
1940
1941 /* First step, set the defaults for each primaries */
1942 switch (colorspace)
1943 {
1944 case V4L2_COLORSPACE_SMPTE170M:
1945 cinfo->range = GST_VIDEO_COLOR_RANGE_16_235;
1946 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT601;
1947 cinfo->transfer = GST_VIDEO_TRANSFER_BT709;
1948 cinfo->primaries = GST_VIDEO_COLOR_PRIMARIES_SMPTE170M;
1949 break;
1950 case V4L2_COLORSPACE_REC709:
1951 cinfo->range = GST_VIDEO_COLOR_RANGE_16_235;
1952 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT709;
1953 cinfo->transfer = GST_VIDEO_TRANSFER_BT709;
1954 cinfo->primaries = GST_VIDEO_COLOR_PRIMARIES_BT709;
1955 break;
1956 case V4L2_COLORSPACE_SRGB:
1957 case V4L2_COLORSPACE_JPEG:
1958 cinfo->range = GST_VIDEO_COLOR_RANGE_0_255;
1959 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT601;
1960 cinfo->transfer = GST_VIDEO_TRANSFER_SRGB;
1961 cinfo->primaries = GST_VIDEO_COLOR_PRIMARIES_BT709;
1962 break;
1963 case V4L2_COLORSPACE_OPRGB:
1964 cinfo->range = GST_VIDEO_COLOR_RANGE_16_235;
1965 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT601;
1966 cinfo->transfer = GST_VIDEO_TRANSFER_ADOBERGB;
1967 cinfo->primaries = GST_VIDEO_COLOR_PRIMARIES_ADOBERGB;
1968 break;
1969 case V4L2_COLORSPACE_BT2020:
1970 cinfo->range = GST_VIDEO_COLOR_RANGE_16_235;
1971 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT2020;
1972 cinfo->transfer = GST_VIDEO_TRANSFER_BT2020_12;
1973 cinfo->primaries = GST_VIDEO_COLOR_PRIMARIES_BT2020;
1974 break;
1975 case V4L2_COLORSPACE_SMPTE240M:
1976 cinfo->range = GST_VIDEO_COLOR_RANGE_16_235;
1977 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_SMPTE240M;
1978 cinfo->transfer = GST_VIDEO_TRANSFER_SMPTE240M;
1979 cinfo->primaries = GST_VIDEO_COLOR_PRIMARIES_SMPTE240M;
1980 break;
1981 case V4L2_COLORSPACE_470_SYSTEM_M:
1982 cinfo->range = GST_VIDEO_COLOR_RANGE_16_235;
1983 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT601;
1984 cinfo->transfer = GST_VIDEO_TRANSFER_BT709;
1985 cinfo->primaries = GST_VIDEO_COLOR_PRIMARIES_BT470M;
1986 break;
1987 case V4L2_COLORSPACE_470_SYSTEM_BG:
1988 cinfo->range = GST_VIDEO_COLOR_RANGE_16_235;
1989 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT601;
1990 cinfo->transfer = GST_VIDEO_TRANSFER_BT709;
1991 cinfo->primaries = GST_VIDEO_COLOR_PRIMARIES_BT470BG;
1992 break;
1993 case V4L2_COLORSPACE_RAW:
1994 /* Explicitly unknown */
1995 cinfo->range = GST_VIDEO_COLOR_RANGE_UNKNOWN;
1996 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_UNKNOWN;
1997 cinfo->transfer = GST_VIDEO_TRANSFER_UNKNOWN;
1998 cinfo->primaries = GST_VIDEO_COLOR_PRIMARIES_UNKNOWN;
1999 break;
2000 default:
2001 GST_DEBUG("Unknown enum v4l2_colorspace %d", colorspace);
2002 ret = FALSE;
2003 break;
2004 }
2005
2006 if (!ret)
2007 goto done;
2008
2009 /* Second step, apply any custom variation */
2010 switch (range)
2011 {
2012 case V4L2_QUANTIZATION_FULL_RANGE:
2013 cinfo->range = GST_VIDEO_COLOR_RANGE_0_255;
2014 break;
2015 case V4L2_QUANTIZATION_LIM_RANGE:
2016 cinfo->range = GST_VIDEO_COLOR_RANGE_16_235;
2017 break;
2018 case V4L2_QUANTIZATION_DEFAULT:
2019 /* replicated V4L2_MAP_QUANTIZATION_DEFAULT macro behavior */
2020 if (is_rgb && colorspace == V4L2_COLORSPACE_BT2020)
2021 cinfo->range = GST_VIDEO_COLOR_RANGE_16_235;
2022 else if (is_rgb || matrix == V4L2_YCBCR_ENC_XV601 || matrix == V4L2_YCBCR_ENC_XV709 || colorspace == V4L2_COLORSPACE_JPEG)
2023 cinfo->range = GST_VIDEO_COLOR_RANGE_0_255;
2024 else
2025 cinfo->range = GST_VIDEO_COLOR_RANGE_16_235;
2026 break;
2027 default:
2028 GST_WARNING("Unknown enum v4l2_quantization value %d", range);
2029 cinfo->range = GST_VIDEO_COLOR_RANGE_UNKNOWN;
2030 break;
2031 }
2032
2033 switch (matrix)
2034 {
2035 case V4L2_YCBCR_ENC_XV601:
2036 case V4L2_YCBCR_ENC_SYCC:
2037 GST_FIXME("XV601 and SYCC not defined, assuming 601");
2038 /* fallthrough */
2039 case V4L2_YCBCR_ENC_601:
2040 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT601;
2041 break;
2042 case V4L2_YCBCR_ENC_XV709:
2043 GST_FIXME("XV709 not defined, assuming 709");
2044 /* fallthrough */
2045 case V4L2_YCBCR_ENC_709:
2046 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT709;
2047 break;
2048 case V4L2_YCBCR_ENC_BT2020_CONST_LUM:
2049 GST_FIXME("BT2020 with constant luma is not defined, assuming BT2020");
2050 /* fallthrough */
2051 case V4L2_YCBCR_ENC_BT2020:
2052 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT2020;
2053 break;
2054 case V4L2_YCBCR_ENC_SMPTE240M:
2055 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_SMPTE240M;
2056 break;
2057 case V4L2_YCBCR_ENC_DEFAULT:
2058 /* nothing, just use defaults for colorspace */
2059 break;
2060 default:
2061 GST_WARNING("Unknown enum v4l2_ycbcr_encoding value %d", matrix);
2062 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_UNKNOWN;
2063 break;
2064 }
2065
2066 /* Set identity matrix for R'G'B' formats to avoid creating
2067 * confusion. This though is cosmetic as it's now properly ignored by
2068 * the video info API and videoconvert. */
2069 if (is_rgb)
2070 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_RGB;
2071
2072 switch (transfer)
2073 {
2074 case V4L2_XFER_FUNC_709:
2075 if (colorspace == V4L2_COLORSPACE_BT2020 && fmt->fmt.pix.height >= 2160)
2076 cinfo->transfer = GST_VIDEO_TRANSFER_BT2020_12;
2077 else
2078 cinfo->transfer = GST_VIDEO_TRANSFER_BT709;
2079 break;
2080 case V4L2_XFER_FUNC_SRGB:
2081 cinfo->transfer = GST_VIDEO_TRANSFER_SRGB;
2082 break;
2083 case V4L2_XFER_FUNC_OPRGB:
2084 cinfo->transfer = GST_VIDEO_TRANSFER_ADOBERGB;
2085 break;
2086 case V4L2_XFER_FUNC_SMPTE240M:
2087 cinfo->transfer = GST_VIDEO_TRANSFER_SMPTE240M;
2088 break;
2089 case V4L2_XFER_FUNC_NONE:
2090 cinfo->transfer = GST_VIDEO_TRANSFER_GAMMA10;
2091 break;
2092 case V4L2_XFER_FUNC_DEFAULT:
2093 /* nothing, just use defaults for colorspace */
2094 break;
2095 default:
2096 GST_WARNING("Unknown enum v4l2_xfer_func value %d", transfer);
2097 cinfo->transfer = GST_VIDEO_TRANSFER_UNKNOWN;
2098 break;
2099 }
2100
2101done:
2102 return ret;
2103}
2104
2105static int
2106gst_aml_v4l2_object_try_fmt(GstAmlV4l2Object *v4l2object,
2107 struct v4l2_format *try_fmt)
2108{
2109 int fd = v4l2object->video_fd;
2110 struct v4l2_format fmt;
2111 int r;
2112
2113 memcpy(&fmt, try_fmt, sizeof(fmt));
2114 r = v4l2object->ioctl(fd, VIDIOC_TRY_FMT, &fmt);
2115
2116 if (r < 0 && errno == ENOTTY)
2117 {
2118 /* The driver might not implement TRY_FMT, in which case we will try
2119 S_FMT to probe */
2120 if (GST_AML_V4L2_IS_ACTIVE(v4l2object))
2121 goto error;
2122
2123 memcpy(&fmt, try_fmt, sizeof(fmt));
2124 r = v4l2object->ioctl(fd, VIDIOC_S_FMT, &fmt);
2125 }
2126 memcpy(try_fmt, &fmt, sizeof(fmt));
2127
2128 return r;
2129
2130error:
2131 memcpy(try_fmt, &fmt, sizeof(fmt));
2132 GST_WARNING_OBJECT(v4l2object->dbg_obj,
2133 "Unable to try format: %s", g_strerror(errno));
2134 return r;
2135}
2136
2137static void
2138gst_aml_v4l2_object_add_interlace_mode(GstAmlV4l2Object *v4l2object,
2139 GstStructure *s, guint32 width, guint32 height, guint32 pixelformat)
2140{
2141 struct v4l2_format fmt;
2142 GValue interlace_formats = {
2143 0,
2144 };
2145 enum v4l2_field formats[] = {V4L2_FIELD_NONE, V4L2_FIELD_INTERLACED};
2146 gsize i;
2147 GstVideoInterlaceMode interlace_mode, prev = -1;
2148
2149 if (!g_str_equal(gst_structure_get_name(s), "video/x-raw"))
2150 return;
2151
2152 if (v4l2object->never_interlaced)
2153 {
2154 gst_structure_set(s, "interlace-mode", G_TYPE_STRING, "progressive", NULL);
2155 return;
2156 }
2157
2158 g_value_init(&interlace_formats, GST_TYPE_LIST);
2159
2160 /* Try twice - once for NONE, once for INTERLACED. */
2161 for (i = 0; i < G_N_ELEMENTS(formats); i++)
2162 {
2163 memset(&fmt, 0, sizeof(fmt));
2164 fmt.type = v4l2object->type;
2165 fmt.fmt.pix.width = width;
2166 fmt.fmt.pix.height = height;
2167 fmt.fmt.pix.pixelformat = pixelformat;
2168 fmt.fmt.pix.field = formats[i];
2169
2170 if (gst_aml_v4l2_object_try_fmt(v4l2object, &fmt) == 0 &&
2171 gst_aml_v4l2_object_get_interlace_mode(fmt.fmt.pix.field, &interlace_mode) && prev != interlace_mode)
2172 {
2173 GValue interlace_enum = {
2174 0,
2175 };
2176 const gchar *mode_string;
2177 g_value_init(&interlace_enum, G_TYPE_STRING);
2178 mode_string = gst_video_interlace_mode_to_string(interlace_mode);
2179 g_value_set_string(&interlace_enum, mode_string);
2180 gst_value_list_append_and_take_value(&interlace_formats,
2181 &interlace_enum);
2182 prev = interlace_mode;
2183 }
2184 }
2185
2186 if (gst_aml_v4l2src_value_simplify(&interlace_formats) || gst_value_list_get_size(&interlace_formats) > 0)
2187 gst_structure_take_value(s, "interlace-mode", &interlace_formats);
2188 else
2189 GST_WARNING_OBJECT(v4l2object, "Failed to determine interlace mode");
2190
2191 return;
2192}
2193
2194static void
2195gst_aml_v4l2_object_fill_colorimetry_list(GValue *list,
2196 GstVideoColorimetry *cinfo)
2197{
2198 GValue colorimetry = G_VALUE_INIT;
2199 guint size;
2200 guint i;
2201 gboolean found = FALSE;
2202
2203 g_value_init(&colorimetry, G_TYPE_STRING);
2204 g_value_take_string(&colorimetry, gst_video_colorimetry_to_string(cinfo));
2205
2206 /* only insert if no duplicate */
2207 size = gst_value_list_get_size(list);
2208 for (i = 0; i < size; i++)
2209 {
2210 const GValue *tmp;
2211
2212 tmp = gst_value_list_get_value(list, i);
2213 if (gst_value_compare(&colorimetry, tmp) == GST_VALUE_EQUAL)
2214 {
2215 found = TRUE;
2216 break;
2217 }
2218 }
2219
2220 if (!found)
2221 gst_value_list_append_and_take_value(list, &colorimetry);
2222 else
2223 g_value_unset(&colorimetry);
2224}
2225
2226static void
2227gst_aml_v4l2_object_add_colorspace(GstAmlV4l2Object *v4l2object, GstStructure *s,
2228 guint32 width, guint32 height, guint32 pixelformat)
2229{
2230 struct v4l2_format fmt;
2231 GValue list = G_VALUE_INIT;
2232 GstVideoColorimetry cinfo;
2233 enum v4l2_colorspace req_cspace;
2234
2235 memset(&fmt, 0, sizeof(fmt));
2236 fmt.type = v4l2object->type;
2237 fmt.fmt.pix.width = width;
2238 fmt.fmt.pix.height = height;
2239 fmt.fmt.pix.pixelformat = pixelformat;
2240
2241 g_value_init(&list, GST_TYPE_LIST);
2242
2243 /* step 1: get device default colorspace and insert it first as
2244 * it should be the preferred one */
2245 if (gst_aml_v4l2_object_try_fmt(v4l2object, &fmt) == 0)
2246 {
2247 if (gst_aml_v4l2_object_get_colorspace(&fmt, &cinfo))
2248 gst_aml_v4l2_object_fill_colorimetry_list(&list, &cinfo);
2249 }
2250
2251 /* step 2: probe all colorspace other than default
2252 * We don't probe all colorspace, range, matrix and transfer combination to
2253 * avoid ioctl flooding which could greatly increase initialization time
2254 * with low-speed devices (UVC...) */
2255 for (req_cspace = V4L2_COLORSPACE_SMPTE170M;
2256 req_cspace <= V4L2_COLORSPACE_RAW; req_cspace++)
2257 {
2258 /* V4L2_COLORSPACE_BT878 is deprecated and shall not be used, so skip */
2259 if (req_cspace == V4L2_COLORSPACE_BT878)
2260 continue;
2261
2262 if (V4L2_TYPE_IS_MULTIPLANAR(v4l2object->type))
2263 fmt.fmt.pix_mp.colorspace = req_cspace;
2264 else
2265 fmt.fmt.pix.colorspace = req_cspace;
2266
2267 if (gst_aml_v4l2_object_try_fmt(v4l2object, &fmt) == 0)
2268 {
2269 enum v4l2_colorspace colorspace;
2270
2271 if (V4L2_TYPE_IS_MULTIPLANAR(v4l2object->type))
2272 colorspace = fmt.fmt.pix_mp.colorspace;
2273 else
2274 colorspace = fmt.fmt.pix.colorspace;
2275
2276 if (colorspace == req_cspace)
2277 {
2278 if (gst_aml_v4l2_object_get_colorspace(&fmt, &cinfo))
2279 gst_aml_v4l2_object_fill_colorimetry_list(&list, &cinfo);
2280 }
2281 }
2282 }
2283
2284 if (gst_value_list_get_size(&list) > 0)
2285 gst_structure_take_value(s, "colorimetry", &list);
2286 else
2287 g_value_unset(&list);
2288
2289 return;
2290}
2291
2292/* The frame interval enumeration code first appeared in Linux 2.6.19. */
2293static GstStructure *
2294gst_aml_v4l2_object_probe_caps_for_format_and_size(GstAmlV4l2Object *v4l2object,
2295 guint32 pixelformat,
2296 guint32 width, guint32 height, const GstStructure *template)
2297{
2298 gint fd = v4l2object->video_fd;
2299 struct v4l2_frmivalenum ival;
2300 guint32 num, denom;
2301 GstStructure *s;
2302 GValue rates = {
2303 0,
2304 };
2305
2306 memset(&ival, 0, sizeof(struct v4l2_frmivalenum));
2307 ival.index = 0;
2308 ival.pixel_format = pixelformat;
2309 ival.width = width;
2310 ival.height = height;
2311
2312 GST_LOG_OBJECT(v4l2object->dbg_obj,
2313 "get frame interval for %ux%u, %" GST_FOURCC_FORMAT, width, height,
2314 GST_FOURCC_ARGS(pixelformat));
2315
2316 /* keep in mind that v4l2 gives us frame intervals (durations); we invert the
2317 * fraction to get framerate */
2318 if (v4l2object->ioctl(fd, VIDIOC_ENUM_FRAMEINTERVALS, &ival) < 0)
2319 goto enum_frameintervals_failed;
2320
2321 if (ival.type == V4L2_FRMIVAL_TYPE_DISCRETE)
2322 {
2323 GValue rate = {
2324 0,
2325 };
2326
2327 g_value_init(&rates, GST_TYPE_LIST);
2328 g_value_init(&rate, GST_TYPE_FRACTION);
2329
2330 do
2331 {
2332 num = ival.discrete.numerator;
2333 denom = ival.discrete.denominator;
2334
2335 if (num > G_MAXINT || denom > G_MAXINT)
2336 {
2337 /* let us hope we don't get here... */
2338 num >>= 1;
2339 denom >>= 1;
2340 }
2341
2342 GST_LOG_OBJECT(v4l2object->dbg_obj, "adding discrete framerate: %d/%d",
2343 denom, num);
2344
2345 /* swap to get the framerate */
2346 gst_value_set_fraction(&rate, denom, num);
2347 gst_value_list_append_value(&rates, &rate);
2348
2349 ival.index++;
2350 } while (v4l2object->ioctl(fd, VIDIOC_ENUM_FRAMEINTERVALS, &ival) >= 0);
2351 }
2352 else if (ival.type == V4L2_FRMIVAL_TYPE_STEPWISE)
2353 {
2354 GValue min = {
2355 0,
2356 };
2357 GValue step = {
2358 0,
2359 };
2360 GValue max = {
2361 0,
2362 };
2363 gboolean added = FALSE;
2364 guint32 minnum, mindenom;
2365 guint32 maxnum, maxdenom;
2366
2367 g_value_init(&rates, GST_TYPE_LIST);
2368
2369 g_value_init(&min, GST_TYPE_FRACTION);
2370 g_value_init(&step, GST_TYPE_FRACTION);
2371 g_value_init(&max, GST_TYPE_FRACTION);
2372
2373 /* get the min */
2374 minnum = ival.stepwise.min.numerator;
2375 mindenom = ival.stepwise.min.denominator;
2376 if (minnum > G_MAXINT || mindenom > G_MAXINT)
2377 {
2378 minnum >>= 1;
2379 mindenom >>= 1;
2380 }
2381 GST_LOG_OBJECT(v4l2object->dbg_obj, "stepwise min frame interval: %d/%d",
2382 minnum, mindenom);
2383 gst_value_set_fraction(&min, minnum, mindenom);
2384
2385 /* get the max */
2386 maxnum = ival.stepwise.max.numerator;
2387 maxdenom = ival.stepwise.max.denominator;
2388 if (maxnum > G_MAXINT || maxdenom > G_MAXINT)
2389 {
2390 maxnum >>= 1;
2391 maxdenom >>= 1;
2392 }
2393
2394 GST_LOG_OBJECT(v4l2object->dbg_obj, "stepwise max frame interval: %d/%d",
2395 maxnum, maxdenom);
2396 gst_value_set_fraction(&max, maxnum, maxdenom);
2397
2398 /* get the step */
2399 num = ival.stepwise.step.numerator;
2400 denom = ival.stepwise.step.denominator;
2401 if (num > G_MAXINT || denom > G_MAXINT)
2402 {
2403 num >>= 1;
2404 denom >>= 1;
2405 }
2406
2407 if (num == 0 || denom == 0)
2408 {
2409 /* in this case we have a wrong fraction or no step, set the step to max
2410 * so that we only add the min value in the loop below */
2411 num = maxnum;
2412 denom = maxdenom;
2413 }
2414
2415 /* since we only have gst_value_fraction_subtract and not add, negate the
2416 * numerator */
2417 GST_LOG_OBJECT(v4l2object->dbg_obj, "stepwise step frame interval: %d/%d",
2418 num, denom);
2419 gst_value_set_fraction(&step, -num, denom);
2420
2421 while (gst_value_compare(&min, &max) != GST_VALUE_GREATER_THAN)
2422 {
2423 GValue rate = {
2424 0,
2425 };
2426
2427 num = gst_value_get_fraction_numerator(&min);
2428 denom = gst_value_get_fraction_denominator(&min);
2429 GST_LOG_OBJECT(v4l2object->dbg_obj, "adding stepwise framerate: %d/%d",
2430 denom, num);
2431
2432 /* invert to get the framerate */
2433 g_value_init(&rate, GST_TYPE_FRACTION);
2434 gst_value_set_fraction(&rate, denom, num);
2435 gst_value_list_append_value(&rates, &rate);
2436 added = TRUE;
2437
2438 /* we're actually adding because step was negated above. This is because
2439 * there is no _add function... */
2440 if (!gst_value_fraction_subtract(&min, &min, &step))
2441 {
2442 GST_WARNING_OBJECT(v4l2object->dbg_obj, "could not step fraction!");
2443 break;
2444 }
2445 }
2446 if (!added)
2447 {
2448 /* no range was added, leave the default range from the template */
2449 GST_WARNING_OBJECT(v4l2object->dbg_obj,
2450 "no range added, leaving default");
2451 g_value_unset(&rates);
2452 }
2453 }
2454 else if (ival.type == V4L2_FRMIVAL_TYPE_CONTINUOUS)
2455 {
2456 guint32 maxnum, maxdenom;
2457
2458 g_value_init(&rates, GST_TYPE_FRACTION_RANGE);
2459
2460 num = ival.stepwise.min.numerator;
2461 denom = ival.stepwise.min.denominator;
2462 if (num > G_MAXINT || denom > G_MAXINT)
2463 {
2464 num >>= 1;
2465 denom >>= 1;
2466 }
2467
2468 maxnum = ival.stepwise.max.numerator;
2469 maxdenom = ival.stepwise.max.denominator;
2470 if (maxnum > G_MAXINT || maxdenom > G_MAXINT)
2471 {
2472 maxnum >>= 1;
2473 maxdenom >>= 1;
2474 }
2475
2476 GST_LOG_OBJECT(v4l2object->dbg_obj,
2477 "continuous frame interval %d/%d to %d/%d", maxdenom, maxnum, denom,
2478 num);
2479
2480 gst_value_set_fraction_range_full(&rates, maxdenom, maxnum, denom, num);
2481 }
2482 else
2483 {
2484 goto unknown_type;
2485 }
2486
2487return_data:
2488 s = gst_structure_copy(template);
2489 gst_structure_set(s, "width", G_TYPE_INT, (gint)width,
2490 "height", G_TYPE_INT, (gint)height, NULL);
2491
2492 gst_aml_v4l2_object_add_aspect_ratio(v4l2object, s);
2493
2494 if (!v4l2object->skip_try_fmt_probes)
2495 {
2496 gst_aml_v4l2_object_add_interlace_mode(v4l2object, s, width, height,
2497 pixelformat);
2498 gst_aml_v4l2_object_add_colorspace(v4l2object, s, width, height, pixelformat);
2499 }
2500
2501 if (G_IS_VALUE(&rates))
2502 {
2503 gst_aml_v4l2src_value_simplify(&rates);
2504 /* only change the framerate on the template when we have a valid probed new
2505 * value */
2506 gst_structure_take_value(s, "framerate", &rates);
2507 }
2508 else if (v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE ||
2509 v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE)
2510 {
2511 gst_structure_set(s, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT,
2512 1, NULL);
2513 }
2514 return s;
2515
2516 /* ERRORS */
2517enum_frameintervals_failed:
2518{
2519 GST_DEBUG_OBJECT(v4l2object->dbg_obj,
2520 "Unable to enumerate intervals for %" GST_FOURCC_FORMAT "@%ux%u",
2521 GST_FOURCC_ARGS(pixelformat), width, height);
2522 goto return_data;
2523}
2524unknown_type:
2525{
2526 /* I don't see how this is actually an error, we ignore the format then */
2527 GST_WARNING_OBJECT(v4l2object->dbg_obj,
2528 "Unknown frame interval type at %" GST_FOURCC_FORMAT "@%ux%u: %u",
2529 GST_FOURCC_ARGS(pixelformat), width, height, ival.type);
2530 return NULL;
2531}
2532}
2533
2534static gint
2535sort_by_frame_size(GstStructure *s1, GstStructure *s2)
2536{
2537 int w1, h1, w2, h2;
2538
2539 gst_structure_get_int(s1, "width", &w1);
2540 gst_structure_get_int(s1, "height", &h1);
2541 gst_structure_get_int(s2, "width", &w2);
2542 gst_structure_get_int(s2, "height", &h2);
2543
2544 /* I think it's safe to assume that this won't overflow for a while */
2545 return ((w2 * h2) - (w1 * h1));
2546}
2547
2548static void
2549gst_aml_v4l2_object_update_and_append(GstAmlV4l2Object *v4l2object,
2550 guint32 format, GstCaps *caps, GstStructure *s)
2551{
2552 GstStructure *alt_s = NULL;
2553
2554 /* Encoded stream on output buffer need to be parsed */
2555 if (v4l2object->type == V4L2_BUF_TYPE_VIDEO_OUTPUT ||
2556 v4l2object->type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE)
2557 {
2558 gint i = 0;
2559
2560 for (; i < GST_AML_V4L2_FORMAT_COUNT; i++)
2561 {
2562 if (format == gst_aml_v4l2_formats[i].format &&
2563 gst_aml_v4l2_formats[i].flags & GST_V4L2_CODEC &&
2564 !(gst_aml_v4l2_formats[i].flags & GST_V4L2_NO_PARSE))
2565 {
2566 gst_structure_set(s, "parsed", G_TYPE_BOOLEAN, TRUE, NULL);
2567 break;
2568 }
2569 }
2570 }
2571
2572 if (v4l2object->has_alpha_component &&
2573 (v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE ||
2574 v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE))
2575 {
2576 switch (format)
2577 {
2578 case V4L2_PIX_FMT_RGB32:
2579 alt_s = gst_structure_copy(s);
2580 gst_structure_set(alt_s, "format", G_TYPE_STRING, "ARGB", NULL);
2581 break;
2582 case V4L2_PIX_FMT_BGR32:
2583 alt_s = gst_structure_copy(s);
2584 gst_structure_set(alt_s, "format", G_TYPE_STRING, "BGRA", NULL);
2585 break;
2586 default:
2587 break;
2588 }
2589 }
2590
2591 gst_caps_append_structure(caps, s);
2592
2593 if (alt_s)
2594 gst_caps_append_structure(caps, alt_s);
2595}
2596
2597static GstCaps *
2598gst_aml_v4l2_object_probe_caps_for_format(GstAmlV4l2Object *v4l2object,
2599 guint32 pixelformat, const GstStructure *template)
2600{
2601 GstCaps *ret = gst_caps_new_empty();
2602 GstStructure *tmp;
2603 gint fd = v4l2object->video_fd;
2604 struct v4l2_frmsizeenum size;
2605 GList *results = NULL;
2606 guint32 w, h;
2607
2608 if (pixelformat == GST_MAKE_FOURCC('M', 'P', 'E', 'G'))
2609 {
2610 gst_caps_append_structure(ret, gst_structure_copy(template));
2611 return ret;
2612 }
2613
2614 memset(&size, 0, sizeof(struct v4l2_frmsizeenum));
2615 size.index = 0;
2616 size.pixel_format = pixelformat;
2617
2618 GST_DEBUG_OBJECT(v4l2object->dbg_obj,
2619 "Enumerating frame sizes for %" GST_FOURCC_FORMAT,
2620 GST_FOURCC_ARGS(pixelformat));
2621
2622 if (v4l2object->ioctl(fd, VIDIOC_ENUM_FRAMESIZES, &size) < 0)
2623 goto enum_framesizes_failed;
2624
2625 if (size.type == V4L2_FRMSIZE_TYPE_DISCRETE)
2626 {
2627 do
2628 {
2629 GST_LOG_OBJECT(v4l2object->dbg_obj, "got discrete frame size %dx%d",
2630 size.discrete.width, size.discrete.height);
2631
2632 w = MIN(size.discrete.width, G_MAXINT);
2633 h = MIN(size.discrete.height, G_MAXINT);
2634
2635 if (w && h)
2636 {
2637 tmp =
2638 gst_aml_v4l2_object_probe_caps_for_format_and_size(v4l2object,
2639 pixelformat, w, h, template);
2640
2641 if (tmp)
2642 results = g_list_prepend(results, tmp);
2643 }
2644
2645 size.index++;
2646 } while (v4l2object->ioctl(fd, VIDIOC_ENUM_FRAMESIZES, &size) >= 0);
2647 GST_DEBUG_OBJECT(v4l2object->dbg_obj,
2648 "done iterating discrete frame sizes");
2649 }
2650 else if (size.type == V4L2_FRMSIZE_TYPE_STEPWISE)
2651 {
2652 guint32 maxw, maxh, step_w, step_h;
2653
2654 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "we have stepwise frame sizes:");
2655 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "min width: %d",
2656 size.stepwise.min_width);
2657 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "min height: %d",
2658 size.stepwise.min_height);
2659 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "max width: %d",
2660 size.stepwise.max_width);
2661 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "min height: %d",
2662 size.stepwise.max_height);
2663 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "step width: %d",
2664 size.stepwise.step_width);
2665 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "step height: %d",
2666 size.stepwise.step_height);
2667
2668 w = MAX(size.stepwise.min_width, 1);
2669 h = MAX(size.stepwise.min_height, 1);
2670 maxw = MIN(size.stepwise.max_width, G_MAXINT);
2671 maxh = MIN(size.stepwise.max_height, G_MAXINT);
2672
2673 step_w = MAX(size.stepwise.step_width, 1);
2674 step_h = MAX(size.stepwise.step_height, 1);
2675
2676 /* FIXME: check for sanity and that min/max are multiples of the steps */
2677
2678 /* we only query details for the max width/height since it's likely the
2679 * most restricted if there are any resolution-dependent restrictions */
2680 tmp = gst_aml_v4l2_object_probe_caps_for_format_and_size(v4l2object,
2681 pixelformat, maxw, maxh, template);
2682
2683 if (tmp)
2684 {
2685 GValue step_range = G_VALUE_INIT;
2686
2687 g_value_init(&step_range, GST_TYPE_INT_RANGE);
2688 gst_value_set_int_range_step(&step_range, w, maxw, step_w);
2689 gst_structure_set_value(tmp, "width", &step_range);
2690
2691 gst_value_set_int_range_step(&step_range, h, maxh, step_h);
2692 gst_structure_take_value(tmp, "height", &step_range);
2693
2694 /* no point using the results list here, since there's only one struct */
2695 gst_aml_v4l2_object_update_and_append(v4l2object, pixelformat, ret, tmp);
2696 }
2697 }
2698 else if (size.type == V4L2_FRMSIZE_TYPE_CONTINUOUS)
2699 {
2700 guint32 maxw, maxh;
2701
2702 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "we have continuous frame sizes:");
2703 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "min width: %d",
2704 size.stepwise.min_width);
2705 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "min height: %d",
2706 size.stepwise.min_height);
2707 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "max width: %d",
2708 size.stepwise.max_width);
2709 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "min height: %d",
2710 size.stepwise.max_height);
2711
2712 w = MAX(size.stepwise.min_width, 1);
2713 h = MAX(size.stepwise.min_height, 1);
2714 maxw = MIN(size.stepwise.max_width, G_MAXINT);
2715 maxh = MIN(size.stepwise.max_height, G_MAXINT);
2716
2717 tmp =
2718 gst_aml_v4l2_object_probe_caps_for_format_and_size(v4l2object, pixelformat,
2719 w, h, template);
2720 if (tmp)
2721 {
2722 gst_structure_set(tmp, "width", GST_TYPE_INT_RANGE, (gint)w,
2723 (gint)maxw, "height", GST_TYPE_INT_RANGE, (gint)h, (gint)maxh,
2724 NULL);
2725
2726 /* no point using the results list here, since there's only one struct */
2727 gst_aml_v4l2_object_update_and_append(v4l2object, pixelformat, ret, tmp);
2728 }
2729 }
2730 else
2731 {
2732 goto unknown_type;
2733 }
2734
2735 /* we use an intermediary list to store and then sort the results of the
2736 * probing because we can't make any assumptions about the order in which
2737 * the driver will give us the sizes, but we want the final caps to contain
2738 * the results starting with the highest resolution and having the lowest
2739 * resolution last, since order in caps matters for things like fixation. */
2740 results = g_list_sort(results, (GCompareFunc)sort_by_frame_size);
2741 while (results != NULL)
2742 {
2743 gst_aml_v4l2_object_update_and_append(v4l2object, pixelformat, ret,
2744 results->data);
2745 results = g_list_delete_link(results, results);
2746 }
2747
2748 if (gst_caps_is_empty(ret))
2749 goto enum_framesizes_no_results;
2750
2751 return ret;
2752
2753 /* ERRORS */
2754enum_framesizes_failed:
2755{
2756 /* I don't see how this is actually an error */
2757 GST_DEBUG_OBJECT(v4l2object->dbg_obj,
2758 "Failed to enumerate frame sizes for pixelformat %" GST_FOURCC_FORMAT
2759 " (%s)",
2760 GST_FOURCC_ARGS(pixelformat), g_strerror(errno));
2761 goto default_frame_sizes;
2762}
2763enum_framesizes_no_results:
2764{
2765 /* it's possible that VIDIOC_ENUM_FRAMESIZES is defined but the driver in
2766 * question doesn't actually support it yet */
2767 GST_DEBUG_OBJECT(v4l2object->dbg_obj,
2768 "No results for pixelformat %" GST_FOURCC_FORMAT
2769 " enumerating frame sizes, trying fallback",
2770 GST_FOURCC_ARGS(pixelformat));
2771 goto default_frame_sizes;
2772}
2773unknown_type:
2774{
2775 GST_WARNING_OBJECT(v4l2object->dbg_obj,
2776 "Unknown frame sizeenum type for pixelformat %" GST_FOURCC_FORMAT
2777 ": %u",
2778 GST_FOURCC_ARGS(pixelformat), size.type);
2779 goto default_frame_sizes;
2780}
2781
2782default_frame_sizes:
2783{
2784 gint min_w, max_w, min_h, max_h, fix_num = 0, fix_denom = 0;
2785
2786 /* This code is for Linux < 2.6.19 */
2787 min_w = min_h = 1;
2788 max_w = max_h = GST_AML_V4L2_MAX_SIZE;
2789 if (!gst_aml_v4l2_object_get_nearest_size(v4l2object, pixelformat, &min_w,
2790 &min_h))
2791 {
2792 GST_WARNING_OBJECT(v4l2object->dbg_obj,
2793 "Could not probe minimum capture size for pixelformat %" GST_FOURCC_FORMAT, GST_FOURCC_ARGS(pixelformat));
2794 }
2795 if (!gst_aml_v4l2_object_get_nearest_size(v4l2object, pixelformat, &max_w,
2796 &max_h))
2797 {
2798 GST_WARNING_OBJECT(v4l2object->dbg_obj,
2799 "Could not probe maximum capture size for pixelformat %" GST_FOURCC_FORMAT, GST_FOURCC_ARGS(pixelformat));
2800 }
2801
2802 tmp = gst_structure_copy(template);
2803 if (fix_num)
2804 {
2805 gst_structure_set(tmp, "framerate", GST_TYPE_FRACTION, fix_num,
2806 fix_denom, NULL);
2807 }
2808 else if (v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE ||
2809 v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE)
2810 {
2811 /* if norm can't be used, copy the template framerate */
2812 gst_structure_set(tmp, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1,
2813 G_MAXINT, 1, NULL);
2814 }
2815
2816 if (min_w == max_w)
2817 gst_structure_set(tmp, "width", G_TYPE_INT, max_w, NULL);
2818 else
2819 gst_structure_set(tmp, "width", GST_TYPE_INT_RANGE, min_w, max_w, NULL);
2820
2821 if (min_h == max_h)
2822 gst_structure_set(tmp, "height", G_TYPE_INT, max_h, NULL);
2823 else
2824 gst_structure_set(tmp, "height", GST_TYPE_INT_RANGE, min_h, max_h, NULL);
2825
2826 gst_aml_v4l2_object_add_aspect_ratio(v4l2object, tmp);
2827
2828 if (!v4l2object->skip_try_fmt_probes)
2829 {
2830 /* We could consider setting interlace mode from min and max. */
2831 gst_aml_v4l2_object_add_interlace_mode(v4l2object, tmp, max_w, max_h,
2832 pixelformat);
2833 /* We could consider to check colorspace for min too, in case it depends on
2834 * the size. But in this case, min and max could not be enough */
2835 gst_aml_v4l2_object_add_colorspace(v4l2object, tmp, max_w, max_h,
2836 pixelformat);
2837 }
2838
2839 gst_aml_v4l2_object_update_and_append(v4l2object, pixelformat, ret, tmp);
2840 return ret;
2841}
2842}
2843
2844static gboolean
2845gst_aml_v4l2_object_get_nearest_size(GstAmlV4l2Object *v4l2object,
2846 guint32 pixelformat, gint *width, gint *height)
2847{
2848 struct v4l2_format fmt;
2849 gboolean ret = FALSE;
2850 GstVideoInterlaceMode interlace_mode;
2851
2852 g_return_val_if_fail(width != NULL, FALSE);
2853 g_return_val_if_fail(height != NULL, FALSE);
2854
2855 GST_LOG_OBJECT(v4l2object->dbg_obj,
2856 "getting nearest size to %dx%d with format %" GST_FOURCC_FORMAT,
2857 *width, *height, GST_FOURCC_ARGS(pixelformat));
2858
2859 memset(&fmt, 0, sizeof(struct v4l2_format));
2860
2861 /* get size delimiters */
2862 memset(&fmt, 0, sizeof(fmt));
2863 fmt.type = v4l2object->type;
2864 fmt.fmt.pix.width = *width;
2865 fmt.fmt.pix.height = *height;
2866 fmt.fmt.pix.pixelformat = pixelformat;
2867 fmt.fmt.pix.field = V4L2_FIELD_ANY;
2868
2869 if (gst_aml_v4l2_object_try_fmt(v4l2object, &fmt) < 0)
2870 goto error;
2871
2872 GST_LOG_OBJECT(v4l2object->dbg_obj,
2873 "got nearest size %dx%d", fmt.fmt.pix.width, fmt.fmt.pix.height);
2874
2875 *width = fmt.fmt.pix.width;
2876 *height = fmt.fmt.pix.height;
2877
2878 if (!gst_aml_v4l2_object_get_interlace_mode(fmt.fmt.pix.field, &interlace_mode))
2879 {
2880 GST_WARNING_OBJECT(v4l2object->dbg_obj,
2881 "Unsupported field type for %" GST_FOURCC_FORMAT "@%ux%u: %u",
2882 GST_FOURCC_ARGS(pixelformat), *width, *height, fmt.fmt.pix.field);
2883 goto error;
2884 }
2885
2886 ret = TRUE;
2887
2888error:
2889 if (!ret)
2890 {
2891 GST_WARNING_OBJECT(v4l2object->dbg_obj,
2892 "Unable to try format: %s", g_strerror(errno));
2893 }
2894
2895 return ret;
2896}
2897
2898static gboolean
2899gst_aml_v4l2_object_is_dmabuf_supported(GstAmlV4l2Object *v4l2object)
2900{
2901 gboolean ret = TRUE;
2902 struct v4l2_exportbuffer expbuf = {
2903 .type = v4l2object->type,
2904 .index = -1,
2905 .plane = -1,
2906 .flags = O_CLOEXEC | O_RDWR,
2907 };
2908
2909 if (v4l2object->fmtdesc->flags & V4L2_FMT_FLAG_EMULATED)
2910 {
2911 GST_WARNING_OBJECT(v4l2object->dbg_obj,
2912 "libv4l2 converter detected, disabling DMABuf");
2913 ret = FALSE;
2914 }
2915
2916 /* Expected to fail, but ENOTTY tells us that it is not implemented. */
2917 v4l2object->ioctl(v4l2object->video_fd, VIDIOC_EXPBUF, &expbuf);
2918 if (errno == ENOTTY)
2919 ret = FALSE;
2920
2921 return ret;
2922}
2923
2924static gboolean
2925gst_aml_v4l2_object_setup_pool(GstAmlV4l2Object *v4l2object, GstCaps *caps)
2926{
2927 GstAmlV4l2IOMode mode;
2928
2929 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "initializing the %s system",
2930 V4L2_TYPE_IS_OUTPUT(v4l2object->type) ? "output" : "capture");
2931
2932 GST_AML_V4L2_CHECK_OPEN(v4l2object);
2933 GST_AML_V4L2_CHECK_NOT_ACTIVE(v4l2object);
2934
2935 /* find transport */
2936 mode = v4l2object->req_mode;
2937
2938 if (v4l2object->device_caps & V4L2_CAP_READWRITE)
2939 {
2940 if (v4l2object->req_mode == GST_V4L2_IO_AUTO)
2941 mode = GST_V4L2_IO_RW;
2942 }
2943 else if (v4l2object->req_mode == GST_V4L2_IO_RW)
2944 goto method_not_supported;
2945
2946 if (v4l2object->device_caps & V4L2_CAP_STREAMING)
2947 {
2948 if (v4l2object->req_mode == GST_V4L2_IO_AUTO)
2949 {
2950 if (!V4L2_TYPE_IS_OUTPUT(v4l2object->type) &&
2951 gst_aml_v4l2_object_is_dmabuf_supported(v4l2object))
2952 {
2953 mode = GST_V4L2_IO_DMABUF;
2954 }
2955 else
2956 {
2957 mode = GST_V4L2_IO_MMAP;
2958 }
2959 }
2960 }
2961 else if (v4l2object->req_mode == GST_V4L2_IO_MMAP ||
2962 v4l2object->req_mode == GST_V4L2_IO_DMABUF)
2963 goto method_not_supported;
2964
2965 /* if still no transport selected, error out */
2966 if (mode == GST_V4L2_IO_AUTO)
2967 goto no_supported_capture_method;
2968
2969 GST_INFO_OBJECT(v4l2object->dbg_obj, "accessing buffers via mode %d", mode);
2970 v4l2object->mode = mode;
2971
2972 /* If min_buffers is not set, the driver either does not support the control or
2973 it has not been asked yet via propose_allocation/decide_allocation. */
2974 if (!v4l2object->min_buffers)
2975 gst_aml_v4l2_get_driver_min_buffers(v4l2object);
2976
2977 /* Map the buffers */
2978 GST_LOG_OBJECT(v4l2object->dbg_obj, "initiating buffer pool");
2979
2980 if (!(v4l2object->pool = gst_aml_v4l2_buffer_pool_new(v4l2object, caps)))
2981 goto buffer_pool_new_failed;
2982
2983 GST_AML_V4L2_SET_ACTIVE(v4l2object);
2984
2985 return TRUE;
2986
2987 /* ERRORS */
2988buffer_pool_new_failed:
2989{
2990 GST_ELEMENT_ERROR(v4l2object->element, RESOURCE, READ,
2991 (_("Could not map buffers from device '%s'"),
2992 v4l2object->videodev),
2993 ("Failed to create buffer pool: %s", g_strerror(errno)));
2994 return FALSE;
2995}
2996method_not_supported:
2997{
2998 GST_ELEMENT_ERROR(v4l2object->element, RESOURCE, READ,
2999 (_("The driver of device '%s' does not support the IO method %d"),
3000 v4l2object->videodev, mode),
3001 (NULL));
3002 return FALSE;
3003}
3004no_supported_capture_method:
3005{
3006 GST_ELEMENT_ERROR(v4l2object->element, RESOURCE, READ,
3007 (_("The driver of device '%s' does not support any known IO "
3008 "method."),
3009 v4l2object->videodev),
3010 (NULL));
3011 return FALSE;
3012}
3013}
3014
3015static void
3016gst_aml_v4l2_object_set_stride(GstVideoInfo *info, GstVideoAlignment *align,
3017 gint plane, gint stride)
3018{
3019 const GstVideoFormatInfo *finfo = info->finfo;
3020
3021 if (GST_VIDEO_FORMAT_INFO_IS_TILED(finfo))
3022 {
3023 gint x_tiles, y_tiles, ws, hs, tile_height, padded_height;
3024
3025 ws = GST_VIDEO_FORMAT_INFO_TILE_WS(finfo);
3026 hs = GST_VIDEO_FORMAT_INFO_TILE_HS(finfo);
3027 tile_height = 1 << hs;
3028
3029 padded_height = GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT(finfo, plane,
3030 info->height + align->padding_top + align->padding_bottom);
3031 padded_height = GST_ROUND_UP_N(padded_height, tile_height);
3032
3033 x_tiles = stride >> ws;
3034 y_tiles = padded_height >> hs;
3035 info->stride[plane] = GST_VIDEO_TILE_MAKE_STRIDE(x_tiles, y_tiles);
3036 }
3037 else
3038 {
3039 info->stride[plane] = stride;
3040 }
3041}
3042
3043static void
3044gst_aml_v4l2_object_extrapolate_info(GstAmlV4l2Object *v4l2object,
3045 GstVideoInfo *info, GstVideoAlignment *align, gint stride)
3046{
3047 const GstVideoFormatInfo *finfo = info->finfo;
3048 gint i, estride, padded_height;
3049 gsize offs = 0;
3050
3051 g_return_if_fail(v4l2object->n_v4l2_planes == 1);
3052
3053 padded_height = info->height + align->padding_top + align->padding_bottom;
3054
3055 for (i = 0; i < finfo->n_planes; i++)
3056 {
3057 estride = gst_aml_v4l2_object_extrapolate_stride(finfo, i, stride);
3058
3059 gst_aml_v4l2_object_set_stride(info, align, i, estride);
3060
3061 info->offset[i] = offs;
3062 offs += estride *
3063 GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT(finfo, i, padded_height);
3064
3065 GST_DEBUG_OBJECT(v4l2object->dbg_obj,
3066 "Extrapolated for plane %d with base stride %d: "
3067 "stride %d, offset %" G_GSIZE_FORMAT,
3068 i, stride, info->stride[i],
3069 info->offset[i]);
3070 }
3071
3072 /* Update the image size according the amount of data we are going to
3073 * read/write. This workaround bugs in driver where the sizeimage provided
3074 * by TRY/S_FMT represent the buffer length (maximum size) rather then the expected
3075 * bytesused (buffer size). */
3076 if (offs < info->size)
3077 info->size = offs;
3078}
3079
3080static void
3081gst_aml_v4l2_object_save_format(GstAmlV4l2Object *v4l2object,
3082 struct v4l2_fmtdesc *fmtdesc, struct v4l2_format *format,
3083 GstVideoInfo *info, GstVideoAlignment *align)
3084{
3085 const GstVideoFormatInfo *finfo = info->finfo;
3086 gboolean standard_stride = TRUE;
3087 gint stride, pstride, padded_width, padded_height, i;
3088
3089 if (GST_VIDEO_INFO_FORMAT(info) == GST_VIDEO_FORMAT_ENCODED)
3090 {
3091 v4l2object->n_v4l2_planes = 1;
3092 info->size = format->fmt.pix.sizeimage;
3093 goto store_info;
3094 }
3095
3096 /* adjust right padding */
3097 if (V4L2_TYPE_IS_MULTIPLANAR(v4l2object->type))
3098 stride = format->fmt.pix_mp.plane_fmt[0].bytesperline;
3099 else
3100 stride = format->fmt.pix.bytesperline;
3101
3102 pstride = GST_VIDEO_FORMAT_INFO_PSTRIDE(finfo, 0);
3103 if (pstride)
3104 {
3105 padded_width = stride / pstride;
3106 }
3107 else
3108 {
3109 /* pstride can be 0 for complex formats */
3110 GST_WARNING_OBJECT(v4l2object->element,
3111 "format %s has a pstride of 0, cannot compute padded with",
3112 gst_video_format_to_string(GST_VIDEO_INFO_FORMAT(info)));
3113 padded_width = stride;
3114 }
3115
3116 if (padded_width < format->fmt.pix.width)
3117 GST_WARNING_OBJECT(v4l2object->dbg_obj,
3118 "Driver bug detected, stride (%d) is too small for the width (%d)",
3119 padded_width, format->fmt.pix.width);
3120
3121 align->padding_right = padded_width - info->width - align->padding_left;
3122
3123 /* adjust bottom padding */
3124 padded_height = format->fmt.pix.height;
3125
3126 if (GST_VIDEO_FORMAT_INFO_IS_TILED(finfo))
3127 {
3128 guint hs, tile_height;
3129
3130 hs = GST_VIDEO_FORMAT_INFO_TILE_HS(finfo);
3131 tile_height = 1 << hs;
3132
3133 padded_height = GST_ROUND_UP_N(padded_height, tile_height);
3134 }
3135
3136 align->padding_bottom = padded_height - info->height - align->padding_top;
3137
3138 /* setup the strides and offset */
3139 if (V4L2_TYPE_IS_MULTIPLANAR(v4l2object->type))
3140 {
3141 struct v4l2_pix_format_mplane *pix_mp = &format->fmt.pix_mp;
3142
3143 /* figure out the frame layout */
3144 v4l2object->n_v4l2_planes = MAX(1, pix_mp->num_planes);
3145 info->size = 0;
3146 for (i = 0; i < v4l2object->n_v4l2_planes; i++)
3147 {
3148 stride = pix_mp->plane_fmt[i].bytesperline;
3149
3150 if (info->stride[i] != stride)
3151 standard_stride = FALSE;
3152
3153 gst_aml_v4l2_object_set_stride(info, align, i, stride);
3154 info->offset[i] = info->size;
3155 info->size += pix_mp->plane_fmt[i].sizeimage;
3156 }
3157
3158 /* Extrapolate stride if planar format are being set in 1 v4l2 plane */
3159 if (v4l2object->n_v4l2_planes < finfo->n_planes)
3160 {
3161 stride = format->fmt.pix_mp.plane_fmt[0].bytesperline;
3162 gst_aml_v4l2_object_extrapolate_info(v4l2object, info, align, stride);
3163 }
3164 }
3165 else
3166 {
3167 /* only one plane in non-MPLANE mode */
3168 v4l2object->n_v4l2_planes = 1;
3169 info->size = format->fmt.pix.sizeimage;
3170 stride = format->fmt.pix.bytesperline;
3171
3172 if (info->stride[0] != stride)
3173 standard_stride = FALSE;
3174
3175 gst_aml_v4l2_object_extrapolate_info(v4l2object, info, align, stride);
3176 }
3177
3178 /* adjust the offset to take into account left and top */
3179 if (GST_VIDEO_FORMAT_INFO_IS_TILED(finfo))
3180 {
3181 if ((align->padding_left + align->padding_top) > 0)
3182 GST_WARNING_OBJECT(v4l2object->dbg_obj,
3183 "Left and top padding is not permitted for tiled formats");
3184 }
3185 else
3186 {
3187 for (i = 0; i < finfo->n_planes; i++)
3188 {
3189 gint vedge, hedge;
3190
3191 /* FIXME we assume plane as component as this is true for all supported
3192 * format we support. */
3193
3194 hedge = GST_VIDEO_FORMAT_INFO_SCALE_WIDTH(finfo, i, align->padding_left);
3195 vedge = GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT(finfo, i, align->padding_top);
3196
3197 info->offset[i] += (vedge * info->stride[i]) +
3198 (hedge * GST_VIDEO_INFO_COMP_PSTRIDE(info, i));
3199 }
3200 }
3201
3202store_info:
3203 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "Got sizeimage %" G_GSIZE_FORMAT,
3204 info->size);
3205
3206 /* to avoid copies we need video meta if there is padding */
3207 v4l2object->need_video_meta =
3208 ((align->padding_top + align->padding_left + align->padding_right +
3209 align->padding_bottom) != 0);
3210
3211 /* ... or if stride is non "standard" */
3212 if (!standard_stride)
3213 v4l2object->need_video_meta = TRUE;
3214
3215 /* ... or also video meta if we use multiple, non-contiguous, planes */
3216 if (v4l2object->n_v4l2_planes > 1)
3217 v4l2object->need_video_meta = TRUE;
3218
3219 v4l2object->info = *info;
3220 v4l2object->align = *align;
3221 v4l2object->format = *format;
3222 v4l2object->fmtdesc = fmtdesc;
3223
3224 /* if we have a framerate pre-calculate duration */
3225 if (info->fps_n > 0 && info->fps_d > 0)
3226 {
3227 v4l2object->duration = gst_util_uint64_scale_int(GST_SECOND, info->fps_d,
3228 info->fps_n);
3229 }
3230 else
3231 {
3232 v4l2object->duration = GST_CLOCK_TIME_NONE;
3233 }
3234}
3235
3236gint gst_aml_v4l2_object_extrapolate_stride(const GstVideoFormatInfo *finfo,
3237 gint plane, gint stride)
3238{
3239 gint estride;
3240
3241 switch (finfo->format)
3242 {
3243 case GST_VIDEO_FORMAT_NV12:
3244 case GST_VIDEO_FORMAT_NV12_64Z32:
3245 case GST_VIDEO_FORMAT_NV21:
3246 case GST_VIDEO_FORMAT_NV16:
3247 case GST_VIDEO_FORMAT_NV61:
3248 case GST_VIDEO_FORMAT_NV24:
3249 estride = (plane == 0 ? 1 : 2) *
3250 GST_VIDEO_FORMAT_INFO_SCALE_WIDTH(finfo, plane, stride);
3251 break;
3252 default:
3253 estride = GST_VIDEO_FORMAT_INFO_SCALE_WIDTH(finfo, plane, stride);
3254 break;
3255 }
3256
3257 return estride;
3258}
3259
3260static gboolean
3261gst_aml_v4l2_video_colorimetry_matches(const GstVideoColorimetry *cinfo,
3262 const gchar *color)
3263{
3264 GstVideoColorimetry ci;
3265 static const GstVideoColorimetry ci_likely_jpeg = {
3266 GST_VIDEO_COLOR_RANGE_0_255, GST_VIDEO_COLOR_MATRIX_BT601,
3267 GST_VIDEO_TRANSFER_UNKNOWN, GST_VIDEO_COLOR_PRIMARIES_UNKNOWN};
3268 static const GstVideoColorimetry ci_jpeg = {
3269 GST_VIDEO_COLOR_RANGE_0_255, GST_VIDEO_COLOR_MATRIX_BT601,
3270 GST_VIDEO_TRANSFER_SRGB, GST_VIDEO_COLOR_PRIMARIES_BT709};
3271
3272 if (!gst_video_colorimetry_from_string(&ci, color))
3273 return FALSE;
3274
3275 if (gst_video_colorimetry_is_equal(&ci, cinfo))
3276 return TRUE;
3277
3278 /* Allow 1:4:0:0 (produced by jpegdec) if the device expects 1:4:7:1 */
3279 if (gst_video_colorimetry_is_equal(&ci, &ci_likely_jpeg) && gst_video_colorimetry_is_equal(cinfo, &ci_jpeg))
3280 return TRUE;
3281
3282 return FALSE;
3283}
3284
3285static void
3286set_amlogic_vdec_parm(GstAmlV4l2Object *v4l2object, struct v4l2_streamparm *streamparm)
3287{
3288 struct aml_dec_params *decParm = (struct aml_dec_params *)streamparm->parm.raw_data;
3289 const char *env;
3290
3291 decParm->cfg.metadata_config_flag = 1 << 13;
3292
3293 if (v4l2object->type == V4L2_BUF_TYPE_VIDEO_OUTPUT || v4l2object->type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE)
3294 {
3295 env = getenv("V4L2_SET_AMLOGIC_DW_MODE");
3296 if (env)
3297 {
3298 int dwMode = atoi(env);
3299 switch (dwMode)
3300 {
3301 case 0:
3302 case 1:
3303 case 2:
3304 case 3:
3305 case 4:
3306 case 16:
3307 decParm->cfg.double_write_mode = dwMode;
3308 decParm->parms_status |= V4L2_CONFIG_PARM_DECODE_CFGINFO;
3309 break;
3310 }
3311 }
3312
3313 if (v4l2object->ioctl(v4l2object->video_fd, VIDIOC_S_PARM, streamparm) < 0)
3314 {
3315 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "set vdec parm fail");
3316 }
3317 else
3318 {
3319 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "Set dwMode to %d", decParm->cfg.double_write_mode);
3320 }
3321 }
3322}
3323
3324static gboolean
3325gst_aml_v4l2_object_set_format_full(GstAmlV4l2Object *v4l2object, GstCaps *caps,
3326 gboolean try_only, GstAmlV4l2Error *error)
3327{
3328 gint fd = v4l2object->video_fd;
3329 struct v4l2_format format;
3330 struct v4l2_streamparm streamparm;
3331 enum v4l2_field field;
3332 guint32 pixelformat;
3333 struct v4l2_fmtdesc *fmtdesc;
3334 GstVideoInfo info;
3335 GstVideoAlignment align;
3336 gint width, height, fps_n, fps_d;
3337 gint n_v4l_planes;
3338 gint i = 0;
3339 gboolean is_mplane;
3340 enum v4l2_colorspace colorspace = 0;
3341 enum v4l2_quantization range = 0;
3342 enum v4l2_ycbcr_encoding matrix = 0;
3343 enum v4l2_xfer_func transfer = 0;
3344 GstStructure *s;
3345 gboolean disable_colorimetry = FALSE;
3346
3347 g_return_val_if_fail(!v4l2object->skip_try_fmt_probes ||
3348 gst_caps_is_writable(caps),
3349 FALSE);
3350
3351 GST_AML_V4L2_CHECK_OPEN(v4l2object);
3352 if (!try_only)
3353 GST_AML_V4L2_CHECK_NOT_ACTIVE(v4l2object);
3354
3355 memset(&streamparm, 0x00, sizeof(struct v4l2_streamparm));
3356 streamparm.type = v4l2object->type;
3357 set_amlogic_vdec_parm(v4l2object, &streamparm);
3358
3359 is_mplane = V4L2_TYPE_IS_MULTIPLANAR(v4l2object->type);
3360
3361 gst_video_info_init(&info);
3362 gst_video_alignment_reset(&align);
3363
3364 if (!gst_aml_v4l2_object_get_caps_info(v4l2object, caps, &fmtdesc, &info))
3365 goto invalid_caps;
3366
3367 pixelformat = fmtdesc->pixelformat;
3368 width = GST_VIDEO_INFO_WIDTH(&info);
3369 height = GST_VIDEO_INFO_HEIGHT(&info);
3370 fps_n = GST_VIDEO_INFO_FPS_N(&info);
3371 fps_d = GST_VIDEO_INFO_FPS_D(&info);
3372
3373 /* if encoded format (GST_VIDEO_INFO_N_PLANES return 0)
3374 * or if contiguous is prefered */
3375 n_v4l_planes = GST_VIDEO_INFO_N_PLANES(&info);
3376 if (!n_v4l_planes || !v4l2object->prefered_non_contiguous)
3377 n_v4l_planes = 1;
3378
3379 if (GST_VIDEO_INFO_IS_INTERLACED(&info))
3380 {
3381 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "interlaced video");
3382 /* ideally we would differentiate between types of interlaced video
3383 * but there is not sufficient information in the caps..
3384 */
3385 field = V4L2_FIELD_INTERLACED;
3386 }
3387 else
3388 {
3389 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "progressive video");
3390 field = V4L2_FIELD_NONE;
3391 }
3392
3393 /* We first pick the main colorspace from the primaries */
3394 switch (info.colorimetry.primaries)
3395 {
3396 case GST_VIDEO_COLOR_PRIMARIES_BT709:
3397 /* There is two colorspaces using these primaries, use the range to
3398 * differentiate */
3399 if (info.colorimetry.range == GST_VIDEO_COLOR_RANGE_16_235)
3400 colorspace = V4L2_COLORSPACE_REC709;
3401 else
3402 colorspace = V4L2_COLORSPACE_SRGB;
3403 break;
3404 case GST_VIDEO_COLOR_PRIMARIES_BT2020:
3405 colorspace = V4L2_COLORSPACE_BT2020;
3406 break;
3407 case GST_VIDEO_COLOR_PRIMARIES_BT470M:
3408 colorspace = V4L2_COLORSPACE_470_SYSTEM_M;
3409 break;
3410 case GST_VIDEO_COLOR_PRIMARIES_BT470BG:
3411 colorspace = V4L2_COLORSPACE_470_SYSTEM_BG;
3412 break;
3413 case GST_VIDEO_COLOR_PRIMARIES_SMPTE170M:
3414 colorspace = V4L2_COLORSPACE_SMPTE170M;
3415 break;
3416 case GST_VIDEO_COLOR_PRIMARIES_SMPTE240M:
3417 colorspace = V4L2_COLORSPACE_SMPTE240M;
3418 break;
3419
3420 case GST_VIDEO_COLOR_PRIMARIES_FILM:
3421 case GST_VIDEO_COLOR_PRIMARIES_UNKNOWN:
3422 /* We don't know, we will guess */
3423 break;
3424
3425 default:
3426 GST_WARNING_OBJECT(v4l2object->dbg_obj,
3427 "Unknown colorimetry primaries %d", info.colorimetry.primaries);
3428 break;
3429 }
3430
3431 switch (info.colorimetry.range)
3432 {
3433 case GST_VIDEO_COLOR_RANGE_0_255:
3434 range = V4L2_QUANTIZATION_FULL_RANGE;
3435 break;
3436 case GST_VIDEO_COLOR_RANGE_16_235:
3437 range = V4L2_QUANTIZATION_LIM_RANGE;
3438 break;
3439 case GST_VIDEO_COLOR_RANGE_UNKNOWN:
3440 /* We let the driver pick a default one */
3441 break;
3442 default:
3443 GST_WARNING_OBJECT(v4l2object->dbg_obj,
3444 "Unknown colorimetry range %d", info.colorimetry.range);
3445 break;
3446 }
3447
3448 switch (info.colorimetry.matrix)
3449 {
3450 case GST_VIDEO_COLOR_MATRIX_RGB:
3451 /* Unspecified, leave to default */
3452 break;
3453 /* FCC is about the same as BT601 with less digit */
3454 case GST_VIDEO_COLOR_MATRIX_FCC:
3455 case GST_VIDEO_COLOR_MATRIX_BT601:
3456 matrix = V4L2_YCBCR_ENC_601;
3457 break;
3458 case GST_VIDEO_COLOR_MATRIX_BT709:
3459 matrix = V4L2_YCBCR_ENC_709;
3460 break;
3461 case GST_VIDEO_COLOR_MATRIX_SMPTE240M:
3462 matrix = V4L2_YCBCR_ENC_SMPTE240M;
3463 break;
3464 case GST_VIDEO_COLOR_MATRIX_BT2020:
3465 matrix = V4L2_YCBCR_ENC_BT2020;
3466 break;
3467 case GST_VIDEO_COLOR_MATRIX_UNKNOWN:
3468 /* We let the driver pick a default one */
3469 break;
3470 default:
3471 GST_WARNING_OBJECT(v4l2object->dbg_obj,
3472 "Unknown colorimetry matrix %d", info.colorimetry.matrix);
3473 break;
3474 }
3475
3476 switch (info.colorimetry.transfer)
3477 {
3478 case GST_VIDEO_TRANSFER_GAMMA18:
3479 case GST_VIDEO_TRANSFER_GAMMA20:
3480 case GST_VIDEO_TRANSFER_GAMMA22:
3481 case GST_VIDEO_TRANSFER_GAMMA28:
3482 GST_WARNING_OBJECT(v4l2object->dbg_obj,
3483 "GAMMA 18, 20, 22, 28 transfer functions not supported");
3484 /* fallthrough */
3485 case GST_VIDEO_TRANSFER_GAMMA10:
3486 transfer = V4L2_XFER_FUNC_NONE;
3487 break;
3488 case GST_VIDEO_TRANSFER_BT2020_12:
3489 case GST_VIDEO_TRANSFER_BT709:
3490 transfer = V4L2_XFER_FUNC_709;
3491 break;
3492 case GST_VIDEO_TRANSFER_SMPTE240M:
3493 transfer = V4L2_XFER_FUNC_SMPTE240M;
3494 break;
3495 case GST_VIDEO_TRANSFER_SRGB:
3496 transfer = V4L2_XFER_FUNC_SRGB;
3497 break;
3498 case GST_VIDEO_TRANSFER_LOG100:
3499 case GST_VIDEO_TRANSFER_LOG316:
3500 GST_WARNING_OBJECT(v4l2object->dbg_obj,
3501 "LOG 100, 316 transfer functions not supported");
3502 /* FIXME No known sensible default, maybe AdobeRGB ? */
3503 break;
3504 case GST_VIDEO_TRANSFER_UNKNOWN:
3505 /* We let the driver pick a default one */
3506 break;
3507 default:
3508 GST_WARNING_OBJECT(v4l2object->dbg_obj,
3509 "Unknown colorimetry tranfer %d", info.colorimetry.transfer);
3510 break;
3511 }
3512
3513 if (colorspace == 0)
3514 {
3515 /* Try to guess colorspace according to pixelformat and size */
3516 if (GST_VIDEO_INFO_IS_YUV(&info))
3517 {
3518 if (range == V4L2_QUANTIZATION_FULL_RANGE && matrix == V4L2_YCBCR_ENC_601 && transfer == 0)
3519 {
3520 /* Full range BT.601 YCbCr encoding with unknown primaries and transfer
3521 * function most likely is JPEG */
3522 colorspace = V4L2_COLORSPACE_JPEG;
3523 transfer = V4L2_XFER_FUNC_SRGB;
3524 }
3525 else
3526 {
3527 /* SD streams likely use SMPTE170M and HD streams REC709 */
3528 if (width <= 720 && height <= 576)
3529 colorspace = V4L2_COLORSPACE_SMPTE170M;
3530 else
3531 colorspace = V4L2_COLORSPACE_REC709;
3532 }
3533 }
3534 else if (GST_VIDEO_INFO_IS_RGB(&info))
3535 {
3536 colorspace = V4L2_COLORSPACE_SRGB;
3537 transfer = V4L2_XFER_FUNC_NONE;
3538 }
3539 }
3540
3541 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "Desired format %dx%d, format "
3542 "%" GST_FOURCC_FORMAT " stride: %d",
3543 width, height,
3544 GST_FOURCC_ARGS(pixelformat), GST_VIDEO_INFO_PLANE_STRIDE(&info, 0));
3545
3546 memset(&format, 0x00, sizeof(struct v4l2_format));
3547 format.type = v4l2object->type;
3548
3549 if (is_mplane)
3550 {
3551 format.type = v4l2object->type;
3552 format.fmt.pix_mp.pixelformat = pixelformat;
3553 format.fmt.pix_mp.width = width;
3554 format.fmt.pix_mp.height = height;
3555 format.fmt.pix_mp.field = field;
3556 format.fmt.pix_mp.num_planes = n_v4l_planes;
3557
3558 /* try to ask our prefered stride but it's not a failure if not
3559 * accepted */
3560 for (i = 0; i < n_v4l_planes; i++)
3561 {
3562 gint stride = GST_VIDEO_INFO_PLANE_STRIDE(&info, i);
3563
3564 if (GST_VIDEO_FORMAT_INFO_IS_TILED(info.finfo))
3565 stride = GST_VIDEO_TILE_X_TILES(stride) << GST_VIDEO_FORMAT_INFO_TILE_WS(info.finfo);
3566
3567 format.fmt.pix_mp.plane_fmt[i].bytesperline = stride;
3568 }
3569
3570 if (GST_VIDEO_INFO_FORMAT(&info) == GST_VIDEO_FORMAT_ENCODED)
3571 {
3572 if (v4l2object->req_mode == GST_V4L2_IO_DMABUF_IMPORT)
3573 format.fmt.pix_mp.plane_fmt[0].sizeimage = 1;
3574 else
3575 format.fmt.pix_mp.plane_fmt[0].sizeimage = ENCODED_BUFFER_SIZE;
3576 }
3577 }
3578 else
3579 {
3580 gint stride = GST_VIDEO_INFO_PLANE_STRIDE(&info, 0);
3581
3582 format.type = v4l2object->type;
3583
3584 format.fmt.pix.width = width;
3585 format.fmt.pix.height = height;
3586 format.fmt.pix.pixelformat = pixelformat;
3587 format.fmt.pix.field = field;
3588
3589 if (GST_VIDEO_FORMAT_INFO_IS_TILED(info.finfo))
3590 stride = GST_VIDEO_TILE_X_TILES(stride) << GST_VIDEO_FORMAT_INFO_TILE_WS(info.finfo);
3591
3592 /* try to ask our prefered stride */
3593 format.fmt.pix.bytesperline = stride;
3594
3595 if (GST_VIDEO_INFO_FORMAT(&info) == GST_VIDEO_FORMAT_ENCODED)
3596 {
3597 if (v4l2object->req_mode == GST_V4L2_IO_DMABUF_IMPORT)
3598 format.fmt.pix_mp.plane_fmt[0].sizeimage = 1;
3599 else
3600 format.fmt.pix_mp.plane_fmt[0].sizeimage = ENCODED_BUFFER_SIZE;
3601 }
3602 }
3603
3604 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "Desired format is %dx%d, format "
3605 "%" GST_FOURCC_FORMAT ", nb planes %d",
3606 format.fmt.pix.width,
3607 format.fmt.pix_mp.height,
3608 GST_FOURCC_ARGS(format.fmt.pix.pixelformat),
3609 is_mplane ? format.fmt.pix_mp.num_planes : 1);
3610
3611#ifndef GST_DISABLE_GST_DEBUG
3612 if (is_mplane)
3613 {
3614 for (i = 0; i < format.fmt.pix_mp.num_planes; i++)
3615 GST_DEBUG_OBJECT(v4l2object->dbg_obj, " stride %d",
3616 format.fmt.pix_mp.plane_fmt[i].bytesperline);
3617 }
3618 else
3619 {
3620 GST_DEBUG_OBJECT(v4l2object->dbg_obj, " stride %d",
3621 format.fmt.pix.bytesperline);
3622 }
3623#endif
3624
3625 if (is_mplane)
3626 {
3627 format.fmt.pix_mp.colorspace = colorspace;
3628 format.fmt.pix_mp.quantization = range;
3629 format.fmt.pix_mp.ycbcr_enc = matrix;
3630 format.fmt.pix_mp.xfer_func = transfer;
3631 }
3632 else
3633 {
3634 format.fmt.pix.priv = V4L2_PIX_FMT_PRIV_MAGIC;
3635 format.fmt.pix.colorspace = colorspace;
3636 format.fmt.pix.quantization = range;
3637 format.fmt.pix.ycbcr_enc = matrix;
3638 format.fmt.pix.xfer_func = transfer;
3639 }
3640
3641 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "Desired colorspace is %d:%d:%d:%d",
3642 colorspace, range, matrix, transfer);
3643
3644 if (try_only)
3645 {
3646 if (v4l2object->ioctl(fd, VIDIOC_TRY_FMT, &format) < 0)
3647 goto try_fmt_failed;
3648 }
3649 else
3650 {
3651 if (v4l2object->ioctl(fd, VIDIOC_S_FMT, &format) < 0)
3652 goto set_fmt_failed;
3653 }
3654
3655 if (is_mplane)
3656 {
3657 colorspace = format.fmt.pix_mp.colorspace;
3658 range = format.fmt.pix_mp.quantization;
3659 matrix = format.fmt.pix_mp.ycbcr_enc;
3660 transfer = format.fmt.pix_mp.xfer_func;
3661 }
3662 else
3663 {
3664 colorspace = format.fmt.pix.colorspace;
3665 range = format.fmt.pix.quantization;
3666 matrix = format.fmt.pix.ycbcr_enc;
3667 transfer = format.fmt.pix.xfer_func;
3668 }
3669
3670 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "Got format of %dx%d, format "
3671 "%" GST_FOURCC_FORMAT ", nb planes %d, colorspace %d:%d:%d:%d",
3672 format.fmt.pix.width, format.fmt.pix_mp.height,
3673 GST_FOURCC_ARGS(format.fmt.pix.pixelformat),
3674 is_mplane ? format.fmt.pix_mp.num_planes : 1,
3675 colorspace, range, matrix, transfer);
3676
3677#ifndef GST_DISABLE_GST_DEBUG
3678 if (is_mplane)
3679 {
3680 for (i = 0; i < format.fmt.pix_mp.num_planes; i++)
3681 GST_DEBUG_OBJECT(v4l2object->dbg_obj, " stride %d, sizeimage %d",
3682 format.fmt.pix_mp.plane_fmt[i].bytesperline,
3683 format.fmt.pix_mp.plane_fmt[i].sizeimage);
3684 }
3685 else
3686 {
3687 GST_DEBUG_OBJECT(v4l2object->dbg_obj, " stride %d, sizeimage %d",
3688 format.fmt.pix.bytesperline, format.fmt.pix.sizeimage);
3689 }
3690#endif
3691
3692 if (format.fmt.pix.pixelformat != pixelformat)
3693 goto invalid_pixelformat;
3694
3695 /* Only negotiate size with raw data.
3696 * For some codecs the dimensions are *not* in the bitstream, IIRC VC1
3697 * in ASF mode for example, there is also not reason for a driver to
3698 * change the size. */
3699 if (info.finfo->format != GST_VIDEO_FORMAT_ENCODED)
3700 {
3701 /* We can crop larger images */
3702 if (format.fmt.pix.width < width || format.fmt.pix.height < height)
3703 goto invalid_dimensions;
3704
3705 /* Note, this will be adjusted if upstream has non-centered cropping. */
3706 align.padding_top = 0;
3707 align.padding_bottom = format.fmt.pix.height - height;
3708 align.padding_left = 0;
3709 align.padding_right = format.fmt.pix.width - width;
3710 }
3711
3712 if (is_mplane && format.fmt.pix_mp.num_planes != n_v4l_planes)
3713 goto invalid_planes;
3714
3715 /* used to check colorimetry and interlace mode fields presence */
3716 s = gst_caps_get_structure(caps, 0);
3717
3718 if (!gst_aml_v4l2_object_get_interlace_mode(format.fmt.pix.field,
3719 &info.interlace_mode))
3720 goto invalid_field;
3721 if (gst_structure_has_field(s, "interlace-mode"))
3722 {
3723 if (format.fmt.pix.field != field)
3724 goto invalid_field;
3725 }
3726
3727 if (gst_aml_v4l2_object_get_colorspace(&format, &info.colorimetry))
3728 {
3729 if (gst_structure_has_field(s, "colorimetry"))
3730 {
3731 if (!gst_aml_v4l2_video_colorimetry_matches(&info.colorimetry,
3732 gst_structure_get_string(s, "colorimetry")))
3733 goto invalid_colorimetry;
3734 }
3735 }
3736 else
3737 {
3738 /* The driver (or libv4l2) is miss-behaving, just ignore colorimetry from
3739 * the TRY_FMT */
3740 disable_colorimetry = TRUE;
3741 if (gst_structure_has_field(s, "colorimetry"))
3742 gst_structure_remove_field(s, "colorimetry");
3743 }
3744
3745 /* In case we have skipped the try_fmt probes, we'll need to set the
3746 * colorimetry and interlace-mode back into the caps. */
3747 if (v4l2object->skip_try_fmt_probes)
3748 {
3749 if (!disable_colorimetry && !gst_structure_has_field(s, "colorimetry"))
3750 {
3751 gchar *str = gst_video_colorimetry_to_string(&info.colorimetry);
3752 gst_structure_set(s, "colorimetry", G_TYPE_STRING, str, NULL);
3753 g_free(str);
3754 }
3755
3756 if (!gst_structure_has_field(s, "interlace-mode"))
3757 gst_structure_set(s, "interlace-mode", G_TYPE_STRING,
3758 gst_video_interlace_mode_to_string(info.interlace_mode), NULL);
3759 }
3760
3761 if (try_only) /* good enough for trying only */
3762 return TRUE;
3763
3764 if (GST_VIDEO_INFO_HAS_ALPHA(&info))
3765 {
3766 struct v4l2_control ctl = {
3767 0,
3768 };
3769 ctl.id = V4L2_CID_ALPHA_COMPONENT;
3770 ctl.value = 0xff;
3771
3772 if (v4l2object->ioctl(fd, VIDIOC_S_CTRL, &ctl) < 0)
3773 GST_WARNING_OBJECT(v4l2object->dbg_obj,
3774 "Failed to set alpha component value");
3775 }
3776
3777 /* Is there a reason we require the caller to always specify a framerate? */
3778 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "Desired framerate: %u/%u", fps_n,
3779 fps_d);
3780
3781 if (v4l2object->ioctl(fd, VIDIOC_G_PARM, &streamparm) < 0)
3782 goto get_parm_failed;
3783
3784 if (v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE || v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE)
3785 {
3786 GST_VIDEO_INFO_FPS_N(&info) =
3787 streamparm.parm.capture.timeperframe.denominator;
3788 GST_VIDEO_INFO_FPS_D(&info) =
3789 streamparm.parm.capture.timeperframe.numerator;
3790
3791 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "Got capture framerate: %u/%u",
3792 streamparm.parm.capture.timeperframe.denominator,
3793 streamparm.parm.capture.timeperframe.numerator);
3794
3795 /* We used to skip frame rate setup if the camera was already setup
3796 * with the requested frame rate. This breaks some cameras though,
3797 * causing them to not output data (several models of Thinkpad cameras
3798 * have this problem at least).
3799 * So, don't skip. */
3800 GST_LOG_OBJECT(v4l2object->dbg_obj, "Setting capture framerate to %u/%u",
3801 fps_n, fps_d);
3802 /* We want to change the frame rate, so check whether we can. Some cheap USB
3803 * cameras don't have the capability */
3804 if ((streamparm.parm.capture.capability & V4L2_CAP_TIMEPERFRAME) == 0)
3805 {
3806 GST_DEBUG_OBJECT(v4l2object->dbg_obj,
3807 "Not setting capture framerate (not supported)");
3808 goto done;
3809 }
3810
3811 /* Note: V4L2 wants the frame interval, we have the frame rate */
3812 streamparm.parm.capture.timeperframe.numerator = fps_d;
3813 streamparm.parm.capture.timeperframe.denominator = fps_n;
3814
3815 /* some cheap USB cam's won't accept any change */
3816 if (v4l2object->ioctl(fd, VIDIOC_S_PARM, &streamparm) < 0)
3817 goto set_parm_failed;
3818
3819 if (streamparm.parm.capture.timeperframe.numerator > 0 &&
3820 streamparm.parm.capture.timeperframe.denominator > 0)
3821 {
3822 /* get new values */
3823 fps_d = streamparm.parm.capture.timeperframe.numerator;
3824 fps_n = streamparm.parm.capture.timeperframe.denominator;
3825
3826 GST_INFO_OBJECT(v4l2object->dbg_obj, "Set capture framerate to %u/%u",
3827 fps_n, fps_d);
3828 }
3829 else
3830 {
3831 /* fix v4l2 capture driver to provide framerate values */
3832 GST_WARNING_OBJECT(v4l2object->dbg_obj,
3833 "Reuse caps framerate %u/%u - fix v4l2 capture driver", fps_n, fps_d);
3834 }
3835
3836 GST_VIDEO_INFO_FPS_N(&info) = fps_n;
3837 GST_VIDEO_INFO_FPS_D(&info) = fps_d;
3838 }
3839 else if (v4l2object->type == V4L2_BUF_TYPE_VIDEO_OUTPUT || v4l2object->type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE)
3840 {
3841 GST_VIDEO_INFO_FPS_N(&info) =
3842 streamparm.parm.output.timeperframe.denominator;
3843 GST_VIDEO_INFO_FPS_D(&info) =
3844 streamparm.parm.output.timeperframe.numerator;
3845
3846 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "Got output framerate: %u/%u",
3847 streamparm.parm.output.timeperframe.denominator,
3848 streamparm.parm.output.timeperframe.numerator);
3849
3850 GST_LOG_OBJECT(v4l2object->dbg_obj, "Setting output framerate to %u/%u",
3851 fps_n, fps_d);
3852 if ((streamparm.parm.output.capability & V4L2_CAP_TIMEPERFRAME) == 0)
3853 {
3854 GST_DEBUG_OBJECT(v4l2object->dbg_obj,
3855 "Not setting output framerate (not supported)");
3856 goto done;
3857 }
3858
3859 /* Note: V4L2 wants the frame interval, we have the frame rate */
3860 streamparm.parm.output.timeperframe.numerator = fps_d;
3861 streamparm.parm.output.timeperframe.denominator = fps_n;
3862
3863 if (v4l2object->ioctl(fd, VIDIOC_S_PARM, &streamparm) < 0)
3864 goto set_parm_failed;
3865
3866 if (streamparm.parm.output.timeperframe.numerator > 0 &&
3867 streamparm.parm.output.timeperframe.denominator > 0)
3868 {
3869 /* get new values */
3870 fps_d = streamparm.parm.output.timeperframe.numerator;
3871 fps_n = streamparm.parm.output.timeperframe.denominator;
3872
3873 GST_INFO_OBJECT(v4l2object->dbg_obj, "Set output framerate to %u/%u",
3874 fps_n, fps_d);
3875 }
3876 else
3877 {
3878 /* fix v4l2 output driver to provide framerate values */
3879 GST_WARNING_OBJECT(v4l2object->dbg_obj,
3880 "Reuse caps framerate %u/%u - fix v4l2 output driver", fps_n, fps_d);
3881 }
3882
3883 GST_VIDEO_INFO_FPS_N(&info) = fps_n;
3884 GST_VIDEO_INFO_FPS_D(&info) = fps_d;
3885 }
3886
3887done:
3888 /* add boolean return, so we can fail on drivers bugs */
3889 gst_aml_v4l2_object_save_format(v4l2object, fmtdesc, &format, &info, &align);
3890
3891 /* now configure the pool */
3892 if (!gst_aml_v4l2_object_setup_pool(v4l2object, caps))
3893 goto pool_failed;
3894
3895 return TRUE;
3896
3897 /* ERRORS */
3898invalid_caps:
3899{
3900 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "can't parse caps %" GST_PTR_FORMAT,
3901 caps);
3902
3903 GST_AML_V4L2_ERROR(error, RESOURCE, SETTINGS,
3904 (_("Invalid caps")), ("Can't parse caps %" GST_PTR_FORMAT, caps));
3905 return FALSE;
3906}
3907try_fmt_failed:
3908{
3909 if (errno == EINVAL)
3910 {
3911 GST_AML_V4L2_ERROR(error, RESOURCE, SETTINGS,
3912 (_("Device '%s' has no supported format"), v4l2object->videodev),
3913 ("Call to TRY_FMT failed for %" GST_FOURCC_FORMAT " @ %dx%d: %s",
3914 GST_FOURCC_ARGS(pixelformat), width, height,
3915 g_strerror(errno)));
3916 }
3917 else
3918 {
3919 GST_AML_V4L2_ERROR(error, RESOURCE, FAILED,
3920 (_("Device '%s' failed during initialization"),
3921 v4l2object->videodev),
3922 ("Call to TRY_FMT failed for %" GST_FOURCC_FORMAT " @ %dx%d: %s",
3923 GST_FOURCC_ARGS(pixelformat), width, height,
3924 g_strerror(errno)));
3925 }
3926 return FALSE;
3927}
3928set_fmt_failed:
3929{
3930 if (errno == EBUSY)
3931 {
3932 GST_AML_V4L2_ERROR(error, RESOURCE, BUSY,
3933 (_("Device '%s' is busy"), v4l2object->videodev),
3934 ("Call to S_FMT failed for %" GST_FOURCC_FORMAT " @ %dx%d: %s",
3935 GST_FOURCC_ARGS(pixelformat), width, height,
3936 g_strerror(errno)));
3937 }
3938 else if (errno == EINVAL)
3939 {
3940 GST_AML_V4L2_ERROR(error, RESOURCE, SETTINGS,
3941 (_("Device '%s' has no supported format"), v4l2object->videodev),
3942 ("Call to S_FMT failed for %" GST_FOURCC_FORMAT " @ %dx%d: %s",
3943 GST_FOURCC_ARGS(pixelformat), width, height,
3944 g_strerror(errno)));
3945 }
3946 else
3947 {
3948 GST_AML_V4L2_ERROR(error, RESOURCE, FAILED,
3949 (_("Device '%s' failed during initialization"),
3950 v4l2object->videodev),
3951 ("Call to S_FMT failed for %" GST_FOURCC_FORMAT " @ %dx%d: %s",
3952 GST_FOURCC_ARGS(pixelformat), width, height,
3953 g_strerror(errno)));
3954 }
3955 return FALSE;
3956}
3957invalid_dimensions:
3958{
3959 GST_AML_V4L2_ERROR(error, RESOURCE, SETTINGS,
3960 (_("Device '%s' cannot capture at %dx%d"),
3961 v4l2object->videodev, width, height),
3962 ("Tried to capture at %dx%d, but device returned size %dx%d",
3963 width, height, format.fmt.pix.width, format.fmt.pix.height));
3964 return FALSE;
3965}
3966invalid_pixelformat:
3967{
3968 GST_AML_V4L2_ERROR(error, RESOURCE, SETTINGS,
3969 (_("Device '%s' cannot capture in the specified format"),
3970 v4l2object->videodev),
3971 ("Tried to capture in %" GST_FOURCC_FORMAT
3972 ", but device returned format"
3973 " %" GST_FOURCC_FORMAT,
3974 GST_FOURCC_ARGS(pixelformat),
3975 GST_FOURCC_ARGS(format.fmt.pix.pixelformat)));
3976 return FALSE;
3977}
3978invalid_planes:
3979{
3980 GST_AML_V4L2_ERROR(error, RESOURCE, SETTINGS,
3981 (_("Device '%s' does support non-contiguous planes"),
3982 v4l2object->videodev),
3983 ("Device wants %d planes", format.fmt.pix_mp.num_planes));
3984 return FALSE;
3985}
3986invalid_field:
3987{
3988 enum v4l2_field wanted_field;
3989
3990 if (is_mplane)
3991 wanted_field = format.fmt.pix_mp.field;
3992 else
3993 wanted_field = format.fmt.pix.field;
3994
3995 GST_AML_V4L2_ERROR(error, RESOURCE, SETTINGS,
3996 (_("Device '%s' does not support %s interlacing"),
3997 v4l2object->videodev,
3998 field == V4L2_FIELD_NONE ? "progressive" : "interleaved"),
3999 ("Device wants %s interlacing",
4000 wanted_field == V4L2_FIELD_NONE ? "progressive" : "interleaved"));
4001 return FALSE;
4002}
4003invalid_colorimetry:
4004{
4005 gchar *wanted_colorimetry;
4006
4007 wanted_colorimetry = gst_video_colorimetry_to_string(&info.colorimetry);
4008
4009 GST_AML_V4L2_ERROR(error, RESOURCE, SETTINGS,
4010 (_("Device '%s' does not support %s colorimetry"),
4011 v4l2object->videodev, gst_structure_get_string(s, "colorimetry")),
4012 ("Device wants %s colorimetry", wanted_colorimetry));
4013
4014 g_free(wanted_colorimetry);
4015 return FALSE;
4016}
4017get_parm_failed:
4018{
4019 /* it's possible that this call is not supported */
4020 if (errno != EINVAL && errno != ENOTTY)
4021 {
4022 GST_AML_V4L2_ERROR(error, RESOURCE, SETTINGS,
4023 (_("Could not get parameters on device '%s'"),
4024 v4l2object->videodev),
4025 GST_ERROR_SYSTEM);
4026 }
4027 goto done;
4028}
4029set_parm_failed:
4030{
4031 GST_AML_V4L2_ERROR(error, RESOURCE, SETTINGS,
4032 (_("Video device did not accept new frame rate setting.")),
4033 GST_ERROR_SYSTEM);
4034 goto done;
4035}
4036pool_failed:
4037{
4038 /* setup_pool already send the error */
4039 return FALSE;
4040}
4041}
4042
4043gboolean
4044gst_aml_v4l2_object_set_format(GstAmlV4l2Object *v4l2object, GstCaps *caps,
4045 GstAmlV4l2Error *error)
4046{
4047 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "Setting format to %" GST_PTR_FORMAT,
4048 caps);
4049 return gst_aml_v4l2_object_set_format_full(v4l2object, caps, FALSE, error);
4050}
4051
4052gboolean
4053gst_aml_v4l2_object_try_format(GstAmlV4l2Object *v4l2object, GstCaps *caps,
4054 GstAmlV4l2Error *error)
4055{
4056 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "Trying format %" GST_PTR_FORMAT,
4057 caps);
4058 return gst_aml_v4l2_object_set_format_full(v4l2object, caps, TRUE, error);
4059}
4060
4061GstFlowReturn
4062gst_aml_v4l2_object_poll(GstAmlV4l2Object *v4l2object)
4063{
4064 gint ret;
4065
4066 if (!v4l2object->can_poll_device)
4067 goto done;
4068
4069 GST_LOG_OBJECT(v4l2object, "polling device");
4070
4071again:
4072 ret = gst_poll_wait(v4l2object->poll, GST_CLOCK_TIME_NONE);
4073 if (G_UNLIKELY(ret < 0))
4074 {
4075 switch (errno)
4076 {
4077 case EBUSY:
4078 goto stopped;
4079 case EAGAIN:
4080 case EINTR:
4081 goto again;
4082 case ENXIO:
4083 GST_WARNING_OBJECT(v4l2object,
4084 "v4l2 device doesn't support polling. Disabling"
4085 " using libv4l2 in this case may cause deadlocks");
4086 v4l2object->can_poll_device = FALSE;
4087 goto done;
4088 default:
4089 goto select_error;
4090 }
4091 }
4092
4093done:
4094 return GST_FLOW_OK;
4095
4096 /* ERRORS */
4097stopped:
4098{
4099 GST_DEBUG_OBJECT(v4l2object, "stop called");
4100 return GST_FLOW_FLUSHING;
4101}
4102select_error:
4103{
4104 GST_ELEMENT_ERROR(v4l2object->element, RESOURCE, READ, (NULL),
4105 ("poll error %d: %s (%d)", ret, g_strerror(errno), errno));
4106 return GST_FLOW_ERROR;
4107}
4108}
4109
4110GstFlowReturn
4111gst_aml_v4l2_object_dqevent(GstAmlV4l2Object *v4l2object)
4112{
4113 GstFlowReturn res;
4114 struct v4l2_event evt;
4115
4116 if ((res = gst_aml_v4l2_object_poll(v4l2object)) != GST_FLOW_OK)
4117 goto poll_failed;
4118
4119 memset(&evt, 0x00, sizeof(struct v4l2_event));
4120 if (v4l2object->ioctl(v4l2object->video_fd, VIDIOC_DQEVENT, &evt) < 0)
4121 goto dqevent_failed;
4122
4123 switch (evt.type)
4124 {
4125 case V4L2_EVENT_SOURCE_CHANGE:
4126 return GST_AML_V4L2_FLOW_SOURCE_CHANGE;
4127 break;
4128 case V4L2_EVENT_EOS:
4129 return GST_AML_V4L2_FLOW_LAST_BUFFER;
4130 break;
4131 default:
4132 break;
4133 }
4134
4135 return GST_FLOW_OK;
4136
4137 /* ERRORS */
4138poll_failed:
4139{
4140 GST_DEBUG_OBJECT(v4l2object, "poll error %s", gst_flow_get_name(res));
4141 return res;
4142}
4143dqevent_failed:
4144{
4145 return GST_FLOW_ERROR;
4146}
4147}
4148
4149/**
4150 * gst_aml_v4l2_object_acquire_format:
4151 * @v4l2object the object
4152 * @info a GstVideoInfo to be filled
4153 *
4154 * Acquire the driver choosen format. This is useful in decoder or encoder elements where
4155 * the output format is choosen by the HW.
4156 *
4157 * Returns: %TRUE on success, %FALSE on failure.
4158 */
4159gboolean
4160gst_aml_v4l2_object_acquire_format(GstAmlV4l2Object *v4l2object, GstVideoInfo *info)
4161{
4162 struct v4l2_fmtdesc *fmtdesc;
4163 struct v4l2_format fmt;
4164 struct v4l2_crop crop;
4165 struct v4l2_selection sel;
4166 struct v4l2_rect *r = NULL;
4167 GstVideoFormat format;
4168 guint width, height;
4169 GstVideoAlignment align;
4170
4171 gst_video_info_init(info);
4172 gst_video_alignment_reset(&align);
4173
4174 memset(&fmt, 0x00, sizeof(struct v4l2_format));
4175 fmt.type = v4l2object->type;
4176 if (v4l2object->ioctl(v4l2object->video_fd, VIDIOC_G_FMT, &fmt) < 0)
4177 goto get_fmt_failed;
4178
4179 fmtdesc = gst_aml_v4l2_object_get_format_from_fourcc(v4l2object,
4180 fmt.fmt.pix.pixelformat);
4181 if (fmtdesc == NULL)
4182 goto unsupported_format;
4183
4184 /* No need to care about mplane, the four first params are the same */
4185 format = gst_aml_v4l2_object_v4l2fourcc_to_video_format(fmt.fmt.pix.pixelformat);
4186
4187 /* fails if we do no translate the fmt.pix.pixelformat to GstVideoFormat */
4188 if (format == GST_VIDEO_FORMAT_UNKNOWN)
4189 goto unsupported_format;
4190
4191 if (fmt.fmt.pix.width == 0 || fmt.fmt.pix.height == 0)
4192 goto invalid_dimensions;
4193
4194 width = fmt.fmt.pix.width;
4195 height = fmt.fmt.pix.height;
4196
4197 /* Use the default compose rectangle */
4198 memset(&sel, 0, sizeof(struct v4l2_selection));
4199 sel.type = v4l2object->type;
4200 sel.target = V4L2_SEL_TGT_COMPOSE_DEFAULT;
4201 if (v4l2object->ioctl(v4l2object->video_fd, VIDIOC_G_SELECTION, &sel) >= 0)
4202 {
4203 r = &sel.r;
4204 }
4205 else
4206 {
4207 /* For ancient kernels, fall back to G_CROP */
4208 memset(&crop, 0, sizeof(struct v4l2_crop));
4209 crop.type = v4l2object->type;
4210 if (v4l2object->ioctl(v4l2object->video_fd, VIDIOC_G_CROP, &crop) >= 0)
4211 r = &crop.c;
4212 }
4213 if (r)
4214 {
4215 align.padding_left = r->left;
4216 align.padding_top = r->top;
4217 align.padding_right = width - r->width - r->left;
4218 align.padding_bottom = height - r->height - r->top;
4219 width = r->width;
4220 height = r->height;
4221 }
4222
4223 gst_video_info_set_format(info, format, width, height);
4224
4225 switch (fmt.fmt.pix.field)
4226 {
4227 case V4L2_FIELD_ANY:
4228 case V4L2_FIELD_NONE:
4229 info->interlace_mode = GST_VIDEO_INTERLACE_MODE_PROGRESSIVE;
4230 break;
4231 case V4L2_FIELD_INTERLACED:
4232 case V4L2_FIELD_INTERLACED_TB:
4233 case V4L2_FIELD_INTERLACED_BT:
4234 info->interlace_mode = GST_VIDEO_INTERLACE_MODE_INTERLEAVED;
4235 break;
4236 default:
4237 goto unsupported_field;
4238 }
4239
4240 gst_aml_v4l2_object_get_colorspace(&fmt, &info->colorimetry);
4241
4242 gst_aml_v4l2_object_save_format(v4l2object, fmtdesc, &fmt, info, &align);
4243
4244 /* Shall we setup the pool ? */
4245
4246 return TRUE;
4247
4248get_fmt_failed:
4249{
4250 GST_ELEMENT_WARNING(v4l2object->element, RESOURCE, SETTINGS,
4251 (_("Video device did not provide output format.")), GST_ERROR_SYSTEM);
4252 return FALSE;
4253}
4254invalid_dimensions:
4255{
4256 GST_ELEMENT_WARNING(v4l2object->element, RESOURCE, SETTINGS,
4257 (_("Video device returned invalid dimensions.")),
4258 ("Expected non 0 dimensions, got %dx%d", fmt.fmt.pix.width,
4259 fmt.fmt.pix.height));
4260 return FALSE;
4261}
4262unsupported_field:
4263{
4264 GST_ELEMENT_ERROR(v4l2object->element, RESOURCE, SETTINGS,
4265 (_("Video device uses an unsupported interlacing method.")),
4266 ("V4L2 field type %d not supported", fmt.fmt.pix.field));
4267 return FALSE;
4268}
4269unsupported_format:
4270{
4271 GST_ELEMENT_ERROR(v4l2object->element, RESOURCE, SETTINGS,
4272 (_("Video device uses an unsupported pixel format.")),
4273 ("V4L2 format %" GST_FOURCC_FORMAT " not supported",
4274 GST_FOURCC_ARGS(fmt.fmt.pix.pixelformat)));
4275 return FALSE;
4276}
4277}
4278
4279gboolean
4280gst_aml_v4l2_object_set_crop(GstAmlV4l2Object *obj)
4281{
4282 struct v4l2_selection sel = {0};
4283 struct v4l2_crop crop = {0};
4284
4285 sel.type = obj->type;
4286 sel.target = V4L2_SEL_TGT_CROP;
4287 sel.flags = 0;
4288 sel.r.left = obj->align.padding_left;
4289 sel.r.top = obj->align.padding_top;
4290 sel.r.width = obj->info.width;
4291 sel.r.height = obj->info.height;
4292
4293 crop.type = obj->type;
4294 crop.c = sel.r;
4295
4296 if (obj->align.padding_left + obj->align.padding_top +
4297 obj->align.padding_right + obj->align.padding_bottom ==
4298 0)
4299 {
4300 GST_DEBUG_OBJECT(obj->dbg_obj, "no cropping needed");
4301 return TRUE;
4302 }
4303
4304 GST_DEBUG_OBJECT(obj->dbg_obj,
4305 "Desired cropping left %u, top %u, size %ux%u", crop.c.left, crop.c.top,
4306 crop.c.width, crop.c.height);
4307
4308 if (obj->ioctl(obj->video_fd, VIDIOC_S_SELECTION, &sel) < 0)
4309 {
4310 if (errno != ENOTTY)
4311 {
4312 GST_WARNING_OBJECT(obj->dbg_obj,
4313 "Failed to set crop rectangle with VIDIOC_S_SELECTION: %s",
4314 g_strerror(errno));
4315 return FALSE;
4316 }
4317 else
4318 {
4319 if (obj->ioctl(obj->video_fd, VIDIOC_S_CROP, &crop) < 0)
4320 {
4321 GST_WARNING_OBJECT(obj->dbg_obj, "VIDIOC_S_CROP failed");
4322 return FALSE;
4323 }
4324
4325 if (obj->ioctl(obj->video_fd, VIDIOC_G_CROP, &crop) < 0)
4326 {
4327 GST_WARNING_OBJECT(obj->dbg_obj, "VIDIOC_G_CROP failed");
4328 return FALSE;
4329 }
4330
4331 sel.r = crop.c;
4332 }
4333 }
4334
4335 GST_DEBUG_OBJECT(obj->dbg_obj,
4336 "Got cropping left %u, top %u, size %ux%u", crop.c.left, crop.c.top,
4337 crop.c.width, crop.c.height);
4338
4339 return TRUE;
4340}
4341
4342gboolean
4343gst_aml_v4l2_object_caps_equal(GstAmlV4l2Object *v4l2object, GstCaps *caps)
4344{
4345 GstStructure *config;
4346 GstCaps *oldcaps;
4347 gboolean ret;
4348
4349 if (!v4l2object->pool)
4350 return FALSE;
4351
4352 config = gst_buffer_pool_get_config(v4l2object->pool);
4353 gst_buffer_pool_config_get_params(config, &oldcaps, NULL, NULL, NULL);
4354
4355 ret = oldcaps && gst_caps_is_equal(caps, oldcaps);
4356
4357 gst_structure_free(config);
4358
4359 return ret;
4360}
4361
4362gboolean
4363gst_aml_v4l2_object_caps_is_subset(GstAmlV4l2Object *v4l2object, GstCaps *caps)
4364{
4365 GstStructure *config;
4366 GstCaps *oldcaps;
4367 gboolean ret;
4368
4369 if (!v4l2object->pool)
4370 return FALSE;
4371
4372 config = gst_buffer_pool_get_config(v4l2object->pool);
4373 gst_buffer_pool_config_get_params(config, &oldcaps, NULL, NULL, NULL);
4374
4375 ret = oldcaps && gst_caps_is_subset(oldcaps, caps);
4376
4377 gst_structure_free(config);
4378
4379 return ret;
4380}
4381
4382GstCaps *
4383gst_aml_v4l2_object_get_current_caps(GstAmlV4l2Object *v4l2object)
4384{
4385 GstStructure *config;
4386 GstCaps *oldcaps;
4387
4388 if (!v4l2object->pool)
4389 return NULL;
4390
4391 config = gst_buffer_pool_get_config(v4l2object->pool);
4392 gst_buffer_pool_config_get_params(config, &oldcaps, NULL, NULL, NULL);
4393
4394 if (oldcaps)
4395 gst_caps_ref(oldcaps);
4396
4397 gst_structure_free(config);
4398
4399 return oldcaps;
4400}
4401
4402gboolean
4403gst_aml_v4l2_object_unlock(GstAmlV4l2Object *v4l2object)
4404{
4405 gboolean ret = TRUE;
4406
4407 GST_LOG_OBJECT(v4l2object->dbg_obj, "start flushing");
4408
4409 gst_poll_set_flushing(v4l2object->poll, TRUE);
4410
4411 if (v4l2object->pool && gst_buffer_pool_is_active(v4l2object->pool))
4412 gst_buffer_pool_set_flushing(v4l2object->pool, TRUE);
4413
4414 return ret;
4415}
4416
4417gboolean
4418gst_aml_v4l2_object_unlock_stop(GstAmlV4l2Object *v4l2object)
4419{
4420 gboolean ret = TRUE;
4421
4422 GST_LOG_OBJECT(v4l2object->dbg_obj, "stop flushing");
4423
4424 if (v4l2object->pool && gst_buffer_pool_is_active(v4l2object->pool))
4425 gst_buffer_pool_set_flushing(v4l2object->pool, FALSE);
4426
4427 gst_poll_set_flushing(v4l2object->poll, FALSE);
4428
4429 return ret;
4430}
4431
4432gboolean
4433gst_aml_v4l2_object_stop(GstAmlV4l2Object *v4l2object)
4434{
4435 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "stopping");
4436
4437 if (!GST_AML_V4L2_IS_OPEN(v4l2object))
4438 goto done;
4439 if (!GST_AML_V4L2_IS_ACTIVE(v4l2object))
4440 goto done;
4441
4442 if (v4l2object->pool)
4443 {
4444 if (!gst_aml_v4l2_buffer_pool_orphan(&v4l2object->pool))
4445 {
4446 GST_DEBUG_OBJECT(v4l2object->dbg_obj, "deactivating pool");
4447 gst_buffer_pool_set_active(v4l2object->pool, FALSE);
4448 gst_object_unref(v4l2object->pool);
4449 }
4450 v4l2object->pool = NULL;
4451 }
4452
4453 GST_AML_V4L2_SET_INACTIVE(v4l2object);
4454
4455done:
4456 return TRUE;
4457}
4458
4459GstCaps *
4460gst_aml_v4l2_object_probe_caps(GstAmlV4l2Object *v4l2object, GstCaps *filter)
4461{
4462 GstCaps *ret;
4463 GSList *walk;
4464 GSList *formats;
4465
4466 formats = gst_aml_v4l2_object_get_format_list(v4l2object);
4467
4468 ret = gst_caps_new_empty();
4469
4470 if (v4l2object->keep_aspect && !v4l2object->par)
4471 {
4472 struct v4l2_cropcap cropcap;
4473
4474 memset(&cropcap, 0, sizeof(cropcap));
4475
4476 cropcap.type = v4l2object->type;
4477 if (v4l2object->ioctl(v4l2object->video_fd, VIDIOC_CROPCAP, &cropcap) < 0)
4478 {
4479 if (errno != ENOTTY)
4480 GST_WARNING_OBJECT(v4l2object->dbg_obj,
4481 "Failed to probe pixel aspect ratio with VIDIOC_CROPCAP: %s",
4482 g_strerror(errno));
4483 }
4484 else if (cropcap.pixelaspect.numerator && cropcap.pixelaspect.denominator)
4485 {
4486 v4l2object->par = g_new0(GValue, 1);
4487 g_value_init(v4l2object->par, GST_TYPE_FRACTION);
4488 gst_value_set_fraction(v4l2object->par, cropcap.pixelaspect.numerator,
4489 cropcap.pixelaspect.denominator);
4490 }
4491 }
4492
4493 for (walk = formats; walk; walk = walk->next)
4494 {
4495 struct v4l2_fmtdesc *format;
4496 GstStructure *template;
4497 GstCaps *tmp, *tmp2;
4498
4499 format = (struct v4l2_fmtdesc *)walk->data;
4500
4501 template = gst_aml_v4l2_object_v4l2fourcc_to_bare_struct(format->pixelformat);
4502
4503 if (!template)
4504 {
4505 GST_DEBUG_OBJECT(v4l2object->dbg_obj,
4506 "unknown format %" GST_FOURCC_FORMAT,
4507 GST_FOURCC_ARGS(format->pixelformat));
4508 continue;
4509 }
4510
4511 /* If we have a filter, check if we need to probe this format or not */
4512 if (filter)
4513 {
4514 GstCaps *format_caps = gst_caps_new_empty();
4515
4516 gst_caps_append_structure(format_caps, gst_structure_copy(template));
4517
4518 if (!gst_caps_can_intersect(format_caps, filter))
4519 {
4520 gst_caps_unref(format_caps);
4521 gst_structure_free(template);
4522 continue;
4523 }
4524
4525 gst_caps_unref(format_caps);
4526 }
4527
4528 tmp = gst_aml_v4l2_object_probe_caps_for_format(v4l2object,
4529 format->pixelformat, template);
4530
4531 if (tmp)
4532 {
4533 tmp2 = gst_caps_copy(tmp);
4534 gst_caps_set_features_simple(tmp2, gst_caps_features_from_string(GST_CAPS_FEATURE_MEMORY_DMABUF));
4535 gst_caps_append(ret, tmp);
4536 gst_caps_append(ret, tmp2);
4537 }
4538
4539 gst_structure_free(template);
4540 }
4541
4542 if (filter)
4543 {
4544 GstCaps *tmp;
4545
4546 tmp = ret;
4547 ret = gst_caps_intersect_full(filter, ret, GST_CAPS_INTERSECT_FIRST);
4548 gst_caps_unref(tmp);
4549 }
4550
4551 GST_INFO_OBJECT(v4l2object->dbg_obj, "probed caps: %" GST_PTR_FORMAT, ret);
4552
4553 return ret;
4554}
4555
4556GstCaps *
4557gst_aml_v4l2_object_get_caps(GstAmlV4l2Object *v4l2object, GstCaps *filter)
4558{
4559 GstCaps *ret;
4560
4561 if (v4l2object->probed_caps == NULL)
4562 v4l2object->probed_caps = gst_aml_v4l2_object_probe_caps(v4l2object, NULL);
4563
4564 if (filter)
4565 {
4566 ret = gst_caps_intersect_full(filter, v4l2object->probed_caps,
4567 GST_CAPS_INTERSECT_FIRST);
4568 }
4569 else
4570 {
4571 ret = gst_caps_ref(v4l2object->probed_caps);
4572 }
4573
4574 return ret;
4575}
4576
4577gboolean
4578gst_aml_v4l2_object_decide_allocation(GstAmlV4l2Object *obj, GstQuery *query)
4579{
4580 GstCaps *caps;
4581 GstBufferPool *pool = NULL, *other_pool = NULL;
4582 GstStructure *config;
4583 guint size, min, max, own_min = 0;
4584 gboolean update;
4585 gboolean has_video_meta;
4586 gboolean can_share_own_pool, pushing_from_our_pool = FALSE;
4587 GstAllocator *allocator = NULL;
4588 GstAllocationParams params = {0};
4589
4590 GST_DEBUG_OBJECT(obj->dbg_obj, "decide allocation");
4591
4592 g_return_val_if_fail(obj->type == V4L2_BUF_TYPE_VIDEO_CAPTURE ||
4593 obj->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE,
4594 FALSE);
4595
4596 gst_query_parse_allocation(query, &caps, NULL);
4597
4598 if (obj->pool == NULL)
4599 {
4600 if (!gst_aml_v4l2_object_setup_pool(obj, caps))
4601 goto pool_failed;
4602 }
4603
4604 if (gst_query_get_n_allocation_params(query) > 0)
4605 gst_query_parse_nth_allocation_param(query, 0, &allocator, &params);
4606
4607 if (gst_query_get_n_allocation_pools(query) > 0)
4608 {
4609 gst_query_parse_nth_allocation_pool(query, 0, &pool, &size, &min, &max);
4610 update = TRUE;
4611 }
4612 else
4613 {
4614 pool = NULL;
4615 min = max = 0;
4616 size = 0;
4617 update = FALSE;
4618 }
4619
4620 GST_DEBUG_OBJECT(obj->dbg_obj, "allocation: size:%u min:%u max:%u pool:%" GST_PTR_FORMAT, size, min, max, pool);
4621
4622 has_video_meta =
4623 gst_query_find_allocation_meta(query, GST_VIDEO_META_API_TYPE, NULL);
4624
4625 can_share_own_pool = (has_video_meta || !obj->need_video_meta);
4626
4627 gst_aml_v4l2_get_driver_min_buffers(obj);
4628 /* We can't share our own pool, if it exceed V4L2 capacity */
4629 if (min + obj->min_buffers + 1 > VIDEO_MAX_FRAME)
4630 can_share_own_pool = FALSE;
4631
4632 /* select a pool */
4633 switch (obj->mode)
4634 {
4635 case GST_V4L2_IO_RW:
4636 if (pool)
4637 {
4638 /* in READ/WRITE mode, prefer a downstream pool because our own pool
4639 * doesn't help much, we have to write to it as well */
4640 GST_DEBUG_OBJECT(obj->dbg_obj,
4641 "read/write mode: using downstream pool");
4642 /* use the bigest size, when we use our own pool we can't really do any
4643 * other size than what the hardware gives us but for downstream pools
4644 * we can try */
4645 size = MAX(size, obj->info.size);
4646 }
4647 else if (can_share_own_pool)
4648 {
4649 /* no downstream pool, use our own then */
4650 GST_DEBUG_OBJECT(obj->dbg_obj,
4651 "read/write mode: no downstream pool, using our own");
4652 pool = gst_object_ref(obj->pool);
4653 size = obj->info.size;
4654 pushing_from_our_pool = TRUE;
4655 }
4656 break;
4657
4658 case GST_V4L2_IO_USERPTR:
4659 case GST_V4L2_IO_DMABUF_IMPORT:
4660 /* in importing mode, prefer our own pool, and pass the other pool to
4661 * our own, so it can serve itself */
4662 if (pool == NULL)
4663 goto no_downstream_pool;
4664 gst_aml_v4l2_buffer_pool_set_other_pool(GST_AML_V4L2_BUFFER_POOL(obj->pool),
4665 pool);
4666 other_pool = pool;
4667 gst_object_unref(pool);
4668 pool = gst_object_ref(obj->pool);
4669 size = obj->info.size;
4670 break;
4671
4672 case GST_V4L2_IO_MMAP:
4673 case GST_V4L2_IO_DMABUF:
4674 /* in streaming mode, prefer our own pool */
4675 /* Check if we can use it ... */
4676 if (can_share_own_pool)
4677 {
4678 if (pool)
4679 gst_object_unref(pool);
4680 pool = gst_object_ref(obj->pool);
4681 size = obj->info.size;
4682 GST_DEBUG_OBJECT(obj->dbg_obj,
4683 "streaming mode: using our own pool %" GST_PTR_FORMAT, pool);
4684 pushing_from_our_pool = TRUE;
4685 }
4686 else if (pool)
4687 {
4688 GST_DEBUG_OBJECT(obj->dbg_obj,
4689 "streaming mode: copying to downstream pool %" GST_PTR_FORMAT,
4690 pool);
4691 }
4692 else
4693 {
4694 GST_DEBUG_OBJECT(obj->dbg_obj,
4695 "streaming mode: no usable pool, copying to generic pool");
4696 size = MAX(size, obj->info.size);
4697 }
4698 break;
4699 case GST_V4L2_IO_AUTO:
4700 default:
4701 GST_WARNING_OBJECT(obj->dbg_obj, "unhandled mode");
4702 break;
4703 }
4704
4705 if (size == 0)
4706 goto no_size;
4707
4708 /* If pushing from our own pool, configure it with queried minimum,
4709 * otherwise use the minimum required */
4710 if (pushing_from_our_pool)
4711 {
4712 /* When pushing from our own pool, we need what downstream one, to be able
4713 * to fill the pipeline, the minimum required to decoder according to the
4714 * driver and 2 more, so we don't endup up with everything downstream or
4715 * held by the decoder. We account 2 buffers for v4l2 so when one is being
4716 * pushed downstream the other one can already be queued for the next
4717 * frame. */
4718 own_min = min + obj->min_buffers + 2;
4719
4720 /* If no allocation parameters where provided, allow for a little more
4721 * buffers and enable copy threshold */
4722 if (!update)
4723 {
4724 own_min += 2;
4725 gst_aml_v4l2_buffer_pool_copy_at_threshold(GST_AML_V4L2_BUFFER_POOL(pool),
4726 TRUE);
4727 }
4728 else
4729 {
4730 gst_aml_v4l2_buffer_pool_copy_at_threshold(GST_AML_V4L2_BUFFER_POOL(pool),
4731 FALSE);
4732 }
4733 }
4734 else
4735 {
4736 /* In this case we'll have to configure two buffer pool. For our buffer
4737 * pool, we'll need what the driver one, and one more, so we can dequeu */
4738 own_min = obj->min_buffers + 1;
4739 own_min = MAX(own_min, GST_AML_V4L2_MIN_BUFFERS);
4740
4741 /* for the downstream pool, we keep what downstream wants, though ensure
4742 * at least a minimum if downstream didn't suggest anything (we are
4743 * expecting the base class to create a default one for the context) */
4744 min = MAX(min, GST_AML_V4L2_MIN_BUFFERS);
4745
4746 /* To import we need the other pool to hold at least own_min */
4747 if (obj->pool == pool)
4748 min += own_min;
4749 }
4750
4751 /* Request a bigger max, if one was suggested but it's too small */
4752 if (max != 0)
4753 max = MAX(min, max);
4754
4755 /* First step, configure our own pool */
4756 config = gst_buffer_pool_get_config(obj->pool);
4757
4758 if (obj->need_video_meta || has_video_meta)
4759 {
4760 GST_DEBUG_OBJECT(obj->dbg_obj, "activate Video Meta");
4761 gst_buffer_pool_config_add_option(config,
4762 GST_BUFFER_POOL_OPTION_VIDEO_META);
4763 }
4764
4765 gst_buffer_pool_config_set_allocator(config, allocator, &params);
4766 gst_buffer_pool_config_set_params(config, caps, size, own_min + DEFAULT_EXTRA_CAPTURE_BUF_SIZE, own_min + DEFAULT_EXTRA_CAPTURE_BUF_SIZE);
4767
4768 GST_DEBUG_OBJECT(obj->dbg_obj, "setting own pool config to %" GST_PTR_FORMAT, config);
4769
4770 /* Our pool often need to adjust the value */
4771 if (!gst_buffer_pool_set_config(obj->pool, config))
4772 {
4773 config = gst_buffer_pool_get_config(obj->pool);
4774
4775 GST_DEBUG_OBJECT(obj->dbg_obj, "own pool config changed to %" GST_PTR_FORMAT, config);
4776
4777 /* our pool will adjust the maximum buffer, which we are fine with */
4778 if (!gst_buffer_pool_set_config(obj->pool, config))
4779 goto config_failed;
4780 }
4781
4782 /* Now configure the other pool if different */
4783 if (obj->pool != pool)
4784 other_pool = pool;
4785
4786 if (other_pool)
4787 {
4788 GstAmlV4l2VideoDec *self = (GstAmlV4l2VideoDec *)obj->element;
4789 if (self->is_secure_path)
4790 {
4791 params.flags |= GST_MEMORY_FLAG_LAST << 1; // in drmallocator GST_MEMORY_FLAG_LAST << 1 represent GST_MEMORY_FLAG_SECURE
4792 GST_DEBUG_OBJECT(obj, "set secure flag for drmbufferpool flag:0x%x", params.flags);
4793 }
4794 config = gst_buffer_pool_get_config(other_pool);
4795 gst_buffer_pool_config_set_allocator(config, allocator, &params);
4796 gst_buffer_pool_config_set_params(config, caps, size, own_min + DEFAULT_EXTRA_CAPTURE_BUF_SIZE, own_min + DEFAULT_EXTRA_CAPTURE_BUF_SIZE);
4797 gst_buffer_pool_config_set_video_alignment(config, &obj->align);
4798
4799 GST_DEBUG_OBJECT(obj->dbg_obj, "setting other pool config to %" GST_PTR_FORMAT, config);
4800
4801 /* if downstream supports video metadata, add this to the pool config */
4802 if (has_video_meta)
4803 {
4804 GST_DEBUG_OBJECT(obj->dbg_obj, "activate Video Meta");
4805 gst_buffer_pool_config_add_option(config,
4806 GST_BUFFER_POOL_OPTION_VIDEO_META);
4807 }
4808
4809 if (!gst_buffer_pool_set_config(other_pool, config))
4810 {
4811 config = gst_buffer_pool_get_config(other_pool);
4812
4813 if (!gst_buffer_pool_config_validate_params(config, caps, size, min,
4814 max))
4815 {
4816 gst_structure_free(config);
4817 goto config_failed;
4818 }
4819
4820 if (!gst_buffer_pool_set_config(other_pool, config))
4821 goto config_failed;
4822 }
4823 }
4824
4825 if (pool)
4826 {
4827 /* For simplicity, simply read back the active configuration, so our base
4828 * class get the right information */
4829 config = gst_buffer_pool_get_config(pool);
4830 gst_buffer_pool_config_get_params(config, NULL, &size, &min, &max);
4831 gst_structure_free(config);
4832 }
4833
4834 if (update)
4835 gst_query_set_nth_allocation_pool(query, 0, pool, size, min, max);
4836 else
4837 gst_query_add_allocation_pool(query, pool, size, min, max);
4838
4839 if (allocator)
4840 gst_object_unref(allocator);
4841
4842 if (pool)
4843 gst_object_unref(pool);
4844
4845 return TRUE;
4846
4847pool_failed:
4848{
4849 /* setup_pool already send the error */
4850 goto cleanup;
4851}
4852config_failed:
4853{
4854 GST_ELEMENT_ERROR(obj->element, RESOURCE, SETTINGS,
4855 (_("Failed to configure internal buffer pool.")), (NULL));
4856 goto cleanup;
4857}
4858no_size:
4859{
4860 GST_ELEMENT_ERROR(obj->element, RESOURCE, SETTINGS,
4861 (_("Video device did not suggest any buffer size.")), (NULL));
4862 goto cleanup;
4863}
4864cleanup:
4865{
4866 if (allocator)
4867 gst_object_unref(allocator);
4868
4869 if (pool)
4870 gst_object_unref(pool);
4871 return FALSE;
4872}
4873no_downstream_pool:
4874{
4875 GST_ELEMENT_ERROR(obj->element, RESOURCE, SETTINGS,
4876 (_("No downstream pool to import from.")),
4877 ("When importing DMABUF or USERPTR, we need a pool to import from"));
4878 return FALSE;
4879}
4880}
4881
4882gboolean
4883gst_aml_v4l2_object_propose_allocation(GstAmlV4l2Object *obj, GstQuery *query)
4884{
4885 GstBufferPool *pool;
4886 /* we need at least 2 buffers to operate */
4887 guint size, min, max;
4888 GstCaps *caps;
4889 gboolean need_pool;
4890
4891 /* Set defaults allocation parameters */
4892 size = obj->info.size;
4893 min = GST_AML_V4L2_MIN_BUFFERS;
4894 max = VIDEO_MAX_FRAME;
4895
4896 gst_query_parse_allocation(query, &caps, &need_pool);
4897
4898 if (caps == NULL)
4899 goto no_caps;
4900
4901 switch (obj->mode)
4902 {
4903 case GST_V4L2_IO_MMAP:
4904 case GST_V4L2_IO_DMABUF:
4905 if ((pool = obj->pool))
4906 gst_object_ref(pool);
4907 break;
4908 default:
4909 pool = NULL;
4910 break;
4911 }
4912
4913 if (pool != NULL)
4914 {
4915 GstCaps *pcaps;
4916 GstStructure *config;
4917
4918 /* we had a pool, check caps */
4919 config = gst_buffer_pool_get_config(pool);
4920 gst_buffer_pool_config_get_params(config, &pcaps, NULL, NULL, NULL);
4921
4922 GST_DEBUG_OBJECT(obj->dbg_obj,
4923 "we had a pool with caps %" GST_PTR_FORMAT, pcaps);
4924 if (!gst_caps_is_equal(caps, pcaps))
4925 {
4926 gst_structure_free(config);
4927 gst_object_unref(pool);
4928 goto different_caps;
4929 }
4930 gst_structure_free(config);
4931 }
4932 gst_aml_v4l2_get_driver_min_buffers(obj);
4933
4934 min = MAX(obj->min_buffers, GST_AML_V4L2_MIN_BUFFERS);
4935
4936 gst_query_add_allocation_pool(query, pool, size, min, max);
4937
4938 /* we also support various metadata */
4939 gst_query_add_allocation_meta(query, GST_VIDEO_META_API_TYPE, NULL);
4940
4941 if (pool)
4942 gst_object_unref(pool);
4943
4944 return TRUE;
4945
4946 /* ERRORS */
4947no_caps:
4948{
4949 GST_DEBUG_OBJECT(obj->dbg_obj, "no caps specified");
4950 return FALSE;
4951}
4952different_caps:
4953{
4954 /* different caps, we can't use this pool */
4955 GST_DEBUG_OBJECT(obj->dbg_obj, "pool has different caps");
4956 return FALSE;
4957}
4958}
4959
4960gboolean
4961gst_aml_v4l2_object_try_import(GstAmlV4l2Object *obj, GstBuffer *buffer)
4962{
4963 GstVideoMeta *vmeta;
4964 guint n_mem = gst_buffer_n_memory(buffer);
4965
4966 /* only import if requested */
4967 switch (obj->mode)
4968 {
4969 case GST_V4L2_IO_USERPTR:
4970 case GST_V4L2_IO_DMABUF_IMPORT:
4971 break;
4972 default:
4973 GST_DEBUG_OBJECT(obj->dbg_obj,
4974 "The io-mode does not enable importation");
4975 return FALSE;
4976 }
4977
4978 vmeta = gst_buffer_get_video_meta(buffer);
4979 if (!vmeta && obj->need_video_meta)
4980 {
4981 GST_DEBUG_OBJECT(obj->dbg_obj, "Downstream buffer uses standard "
4982 "stride/offset while the driver does not.");
4983 return FALSE;
4984 }
4985
4986 /* we need matching strides/offsets and size */
4987 if (vmeta)
4988 {
4989 guint p;
4990 gboolean need_fmt_update = FALSE;
4991
4992 if (vmeta->n_planes != GST_VIDEO_INFO_N_PLANES(&obj->info))
4993 {
4994 GST_WARNING_OBJECT(obj->dbg_obj,
4995 "Cannot import buffers with different number planes");
4996 return FALSE;
4997 }
4998
4999 for (p = 0; p < vmeta->n_planes; p++)
5000 {
5001 if (vmeta->stride[p] < obj->info.stride[p])
5002 {
5003 GST_DEBUG_OBJECT(obj->dbg_obj,
5004 "Not importing as remote stride %i is smaller then %i on plane %u",
5005 vmeta->stride[p], obj->info.stride[p], p);
5006 return FALSE;
5007 }
5008 else if (vmeta->stride[p] > obj->info.stride[p])
5009 {
5010 need_fmt_update = TRUE;
5011 }
5012
5013 if (vmeta->offset[p] < obj->info.offset[p])
5014 {
5015 GST_DEBUG_OBJECT(obj->dbg_obj,
5016 "Not importing as offset %" G_GSIZE_FORMAT
5017 " is smaller then %" G_GSIZE_FORMAT " on plane %u",
5018 vmeta->offset[p], obj->info.offset[p], p);
5019 return FALSE;
5020 }
5021 else if (vmeta->offset[p] > obj->info.offset[p])
5022 {
5023 need_fmt_update = TRUE;
5024 }
5025 }
5026
5027 if (need_fmt_update)
5028 {
5029 struct v4l2_format format;
5030 gint wanted_stride[GST_VIDEO_MAX_PLANES] = {
5031 0,
5032 };
5033
5034 format = obj->format;
5035
5036 /* update the current format with the stride we want to import from */
5037 if (V4L2_TYPE_IS_MULTIPLANAR(obj->type))
5038 {
5039 guint i;
5040
5041 GST_DEBUG_OBJECT(obj->dbg_obj, "Wanted strides:");
5042
5043 for (i = 0; i < obj->n_v4l2_planes; i++)
5044 {
5045 gint stride = vmeta->stride[i];
5046
5047 if (GST_VIDEO_FORMAT_INFO_IS_TILED(obj->info.finfo))
5048 stride = GST_VIDEO_TILE_X_TILES(stride) << GST_VIDEO_FORMAT_INFO_TILE_WS(obj->info.finfo);
5049
5050 format.fmt.pix_mp.plane_fmt[i].bytesperline = stride;
5051 wanted_stride[i] = stride;
5052 GST_DEBUG_OBJECT(obj->dbg_obj, " [%u] %i", i, wanted_stride[i]);
5053 }
5054 }
5055 else
5056 {
5057 gint stride = vmeta->stride[0];
5058
5059 GST_DEBUG_OBJECT(obj->dbg_obj, "Wanted stride: %i", stride);
5060
5061 if (GST_VIDEO_FORMAT_INFO_IS_TILED(obj->info.finfo))
5062 stride = GST_VIDEO_TILE_X_TILES(stride) << GST_VIDEO_FORMAT_INFO_TILE_WS(obj->info.finfo);
5063
5064 format.fmt.pix.bytesperline = stride;
5065 wanted_stride[0] = stride;
5066 }
5067
5068 if (obj->ioctl(obj->video_fd, VIDIOC_S_FMT, &format) < 0)
5069 {
5070 GST_WARNING_OBJECT(obj->dbg_obj,
5071 "Something went wrong trying to update current format: %s",
5072 g_strerror(errno));
5073 return FALSE;
5074 }
5075
5076 gst_aml_v4l2_object_save_format(obj, obj->fmtdesc, &format, &obj->info,
5077 &obj->align);
5078
5079 if (V4L2_TYPE_IS_MULTIPLANAR(obj->type))
5080 {
5081 guint i;
5082
5083 for (i = 0; i < obj->n_v4l2_planes; i++)
5084 {
5085 if (format.fmt.pix_mp.plane_fmt[i].bytesperline != wanted_stride[i])
5086 {
5087 GST_DEBUG_OBJECT(obj->dbg_obj,
5088 "[%i] Driver did not accept the new stride (wants %i, got %i)",
5089 i, format.fmt.pix_mp.plane_fmt[i].bytesperline,
5090 wanted_stride[i]);
5091 return FALSE;
5092 }
5093 }
5094 }
5095 else
5096 {
5097 if (format.fmt.pix.bytesperline != wanted_stride[0])
5098 {
5099 GST_DEBUG_OBJECT(obj->dbg_obj,
5100 "Driver did not accept the new stride (wants %i, got %i)",
5101 format.fmt.pix.bytesperline, wanted_stride[0]);
5102 return FALSE;
5103 }
5104 }
5105 }
5106 }
5107
5108 /* we can always import single memory buffer, but otherwise we need the same
5109 * amount of memory object. */
5110 if (n_mem != 1 && n_mem != obj->n_v4l2_planes)
5111 {
5112 GST_DEBUG_OBJECT(obj->dbg_obj, "Can only import %i memory, "
5113 "buffers contains %u memory",
5114 obj->n_v4l2_planes, n_mem);
5115 return FALSE;
5116 }
5117
5118 /* For DMABuf importation we need DMABuf of course */
5119 if (obj->mode == GST_V4L2_IO_DMABUF_IMPORT)
5120 {
5121 guint i;
5122
5123 for (i = 0; i < n_mem; i++)
5124 {
5125 GstMemory *mem = gst_buffer_peek_memory(buffer, i);
5126
5127 if (!gst_is_dmabuf_memory(mem))
5128 {
5129 GST_DEBUG_OBJECT(obj->dbg_obj, "Cannot import non-DMABuf memory.");
5130 return FALSE;
5131 }
5132 }
5133 }
5134
5135 /* for the remaining, only the kernel driver can tell */
5136 return TRUE;
5137}
5138
5139gboolean gst_aml_v4l2_set_drm_mode(GstAmlV4l2Object *v4l2object)
5140{
5141 /* On AmLogic, output obj use of GST_V4L2_IO_DMABUF_IMPORT implies secure memory */
5142 if (v4l2object->req_mode == GST_V4L2_IO_DMABUF_IMPORT)
5143 {
5144 int rc;
5145 struct v4l2_queryctrl queryctrl;
5146 struct v4l2_control control;
5147
5148 GstAmlV4l2VideoDec *self = (GstAmlV4l2VideoDec *)v4l2object->element;
5149 self->is_secure_path = TRUE;
5150
5151#define V4L2_CID_USER_AMLOGIC_BASE (V4L2_CID_USER_BASE + 0x1100)
5152#define AML_V4L2_SET_DRMMODE (V4L2_CID_USER_AMLOGIC_BASE + 0)
5153 memset(&queryctrl, 0, sizeof(queryctrl));
5154 queryctrl.id = AML_V4L2_SET_DRMMODE;
5155
5156 rc = v4l2object->ioctl(v4l2object->video_fd, VIDIOC_QUERYCTRL, &queryctrl);
5157 if (rc == 0)
5158 {
5159 if (!(queryctrl.flags & V4L2_CTRL_FLAG_DISABLED))
5160 {
5161 memset(&control, 0, sizeof(control));
5162 control.id = AML_V4L2_SET_DRMMODE;
5163 control.value = 1;
5164 rc = v4l2object->ioctl(v4l2object->video_fd, VIDIOC_S_CTRL, &control);
5165 if (rc != 0)
5166 {
5167 GST_ERROR_OBJECT(v4l2object, "AML_V4L2_SET_DRMMODE fail: rc %d", rc);
5168 return FALSE;
5169 }
5170 GST_DEBUG_OBJECT(v4l2object, "AML_V4L2_SET_DRMMODE set succ");
5171 return TRUE;
5172 }
5173 else
5174 {
5175 GST_DEBUG_OBJECT(v4l2object, "AML_V4L2_SET_DRMMODE is disabled");
5176 return TRUE;
5177 }
5178 }
5179 else
5180 {
5181 GST_ERROR_OBJECT(v4l2object, "VIDIOC_QUERYCTRL for AML_V4L2_SET_DRMMODE fail");
5182 return FALSE;
5183 }
5184 }
5185 else
5186 {
5187 GST_DEBUG_OBJECT(v4l2object, "req mode is not GST_V4L2_IO_DMABUF_IMPORT, DRM mode does not need to be configured");
5188 return TRUE;
5189 }
5190}