blob: f173492d3aa8f9b87d40118bb6885c97c3557500 [file] [log] [blame]
xuesong.jiangae1548e2022-05-06 16:38:46 +08001/* GStreamer
2 * Copyright (C) 2022 <xuesong.jiang@amlogic.com>
3 *
4 * This library is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Library General Public
6 * License as published by the Free Software Foundation; either
7 * version 2 of the License, or (at your option) any later version.
8 *
9 * This library is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Library General Public License for more details.
13 *
14 * You should have received a copy of the GNU Library General Public
15 * License along with this library; if not, write to the
16 * Free Software Foundation, Inc., 51 Franklin Street, Suite 500,
17 * Boston, MA 02110-1335, USA.
18 */
19
20#ifdef HAVE_CONFIG_H
21#include "config.h"
22#endif
23
24#include <sys/stat.h>
25#include <fcntl.h>
26#include <errno.h>
27#include <unistd.h>
28#include <string.h>
29
30#include "gstamlv4l2object.h"
31#include "gstamlv4l2videodec.h"
32
33#include <string.h>
34#include <gst/gst-i18n-plugin.h>
35#include <gst/allocators/gstdmabuf.h>
36
37GST_DEBUG_CATEGORY_STATIC(gst_aml_v4l2_video_dec_debug);
38#define GST_CAT_DEFAULT gst_aml_v4l2_video_dec_debug
39
40#ifdef GST_VIDEO_DECODER_STREAM_LOCK
41#undef GST_VIDEO_DECODER_STREAM_LOCK
42#define GST_VIDEO_DECODER_STREAM_LOCK(decoder) \
43 { \
44 GST_DEBUG("aml v4l2 dec locking"); \
45 g_rec_mutex_lock(&GST_VIDEO_DECODER(decoder)->stream_lock); \
46 GST_DEBUG("aml v4l2 dec locked"); \
47 }
48#endif
49
50#ifdef GST_VIDEO_DECODER_STREAM_UNLOCK
51#undef GST_VIDEO_DECODER_STREAM_UNLOCK
52#define GST_VIDEO_DECODER_STREAM_UNLOCK(decoder) \
53 { \
54 g_rec_mutex_unlock(&GST_VIDEO_DECODER(decoder)->stream_lock); \
55 GST_DEBUG("aml v4l2 dec unlocked"); \
56 }
57#endif
58typedef struct
59{
60 gchar *device;
61 GstCaps *sink_caps;
62 GstCaps *src_caps;
63 const gchar *longname;
64 const gchar *description;
65} GstAmlV4l2VideoDecCData;
66
67enum
68{
69 PROP_0,
70 V4L2_STD_OBJECT_PROPS
71};
72
73#define gst_aml_v4l2_video_dec_parent_class parent_class
74G_DEFINE_ABSTRACT_TYPE(GstAmlV4l2VideoDec, gst_aml_v4l2_video_dec,
75 GST_TYPE_VIDEO_DECODER);
76
77static GstFlowReturn gst_aml_v4l2_video_dec_finish(GstVideoDecoder *decoder);
78
79static void
80gst_aml_v4l2_video_dec_set_property(GObject *object,
81 guint prop_id, const GValue *value, GParamSpec *pspec)
82{
83 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(object);
84
85 switch (prop_id)
86 {
87 case PROP_CAPTURE_IO_MODE:
88 if (!gst_aml_v4l2_object_set_property_helper(self->v4l2capture,
89 prop_id, value, pspec))
90 {
91 G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
92 }
93 break;
94 case PROP_DUMP_FRAME_LOCATION:
95 if (!gst_aml_v4l2_object_set_property_helper(self->v4l2capture,
96 prop_id, value, pspec))
97 {
98 G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
99 }
100 break;
101
102 /* By default, only set on output */
103 default:
104 if (!gst_aml_v4l2_object_set_property_helper(self->v4l2output,
105 prop_id, value, pspec))
106 {
107 G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
108 }
109 break;
110 }
111}
112
113static void
114gst_aml_v4l2_video_dec_get_property(GObject *object,
115 guint prop_id, GValue *value, GParamSpec *pspec)
116{
117 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(object);
118
119 switch (prop_id)
120 {
121 case PROP_CAPTURE_IO_MODE:
122 if (!gst_aml_v4l2_object_get_property_helper(self->v4l2capture,
123 prop_id, value, pspec))
124 {
125 G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
126 }
127 break;
128
129 /* By default read from output */
130 default:
131 if (!gst_aml_v4l2_object_get_property_helper(self->v4l2output,
132 prop_id, value, pspec))
133 {
134 G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
135 }
136 break;
137 }
138}
139
140static gboolean
141gst_aml_v4l2_video_dec_open(GstVideoDecoder *decoder)
142{
143 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
144 GstCaps *codec_caps;
145
146 GST_DEBUG_OBJECT(self, "Opening");
147
148 if (!gst_aml_v4l2_object_open(self->v4l2output))
149 goto failure;
150
151 if (!gst_aml_v4l2_object_open_shared(self->v4l2capture, self->v4l2output))
152 goto failure;
153
154 codec_caps = gst_pad_get_pad_template_caps(decoder->sinkpad);
155 self->probed_sinkcaps = gst_aml_v4l2_object_probe_caps(self->v4l2output,
156 codec_caps);
157 gst_caps_unref(codec_caps);
158
159 if (gst_caps_is_empty(self->probed_sinkcaps))
160 goto no_encoded_format;
161
162 return TRUE;
163
164no_encoded_format:
165 GST_ELEMENT_ERROR(self, RESOURCE, SETTINGS,
166 (_("Decoder on device %s has no supported input format"),
167 self->v4l2output->videodev),
168 (NULL));
169 goto failure;
170
171failure:
172 if (GST_AML_V4L2_IS_OPEN(self->v4l2output))
173 gst_aml_v4l2_object_close(self->v4l2output);
174
175 if (GST_AML_V4L2_IS_OPEN(self->v4l2capture))
176 gst_aml_v4l2_object_close(self->v4l2capture);
177
178 gst_caps_replace(&self->probed_srccaps, NULL);
179 gst_caps_replace(&self->probed_sinkcaps, NULL);
180
181 return FALSE;
182}
183
184static gboolean
185gst_aml_v4l2_video_dec_close(GstVideoDecoder *decoder)
186{
187 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
188
189 GST_DEBUG_OBJECT(self, "Closing");
190
191 gst_aml_v4l2_object_close(self->v4l2output);
192 gst_aml_v4l2_object_close(self->v4l2capture);
193 gst_caps_replace(&self->probed_srccaps, NULL);
194 gst_caps_replace(&self->probed_sinkcaps, NULL);
195
196 return TRUE;
197}
198
199static gboolean
200gst_aml_v4l2_video_dec_start(GstVideoDecoder *decoder)
201{
202 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
203
204 GST_DEBUG_OBJECT(self, "Starting");
205
206 gst_aml_v4l2_object_unlock(self->v4l2output);
207 g_atomic_int_set(&self->active, TRUE);
208 self->output_flow = GST_FLOW_OK;
209
210 return TRUE;
211}
212
213static gboolean
214gst_aml_v4l2_video_dec_stop(GstVideoDecoder *decoder)
215{
216 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
217
218 GST_DEBUG_OBJECT(self, "Stopping");
219
220 gst_aml_v4l2_object_unlock(self->v4l2output);
221 gst_aml_v4l2_object_unlock(self->v4l2capture);
222
223 /* Wait for capture thread to stop */
224 gst_pad_stop_task(decoder->srcpad);
225
226 GST_VIDEO_DECODER_STREAM_LOCK(decoder);
227 self->output_flow = GST_FLOW_OK;
228 GST_VIDEO_DECODER_STREAM_UNLOCK(decoder);
229
230 /* Should have been flushed already */
231 g_assert(g_atomic_int_get(&self->active) == FALSE);
232
233 gst_aml_v4l2_object_stop(self->v4l2output);
234 gst_aml_v4l2_object_stop(self->v4l2capture);
235
236 if (self->input_state)
237 {
238 gst_video_codec_state_unref(self->input_state);
239 self->input_state = NULL;
240 }
241
242 GST_DEBUG_OBJECT(self, "Stopped");
243
244 return TRUE;
245}
246
247static gboolean
248gst_aml_v4l2_video_dec_set_format(GstVideoDecoder *decoder,
249 GstVideoCodecState *state)
250{
251 GstAmlV4l2Error error = GST_AML_V4L2_ERROR_INIT;
252 gboolean ret = TRUE;
253 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
254 GstCaps *caps;
255
256 GST_DEBUG_OBJECT(self, "Setting format: %" GST_PTR_FORMAT, state->caps);
257 GstCapsFeatures *const features = gst_caps_get_features(state->caps, 0);
258
259 if (gst_caps_features_contains(features, GST_CAPS_FEATURE_MEMORY_DMABUF))
260 self->v4l2output->req_mode = GST_V4L2_IO_DMABUF_IMPORT;
261
262 if (self->input_state)
263 {
264 if (gst_aml_v4l2_object_caps_equal(self->v4l2output, state->caps))
265 {
266 GST_DEBUG_OBJECT(self, "Compatible caps");
267 goto done;
268 }
269 gst_video_codec_state_unref(self->input_state);
270 self->input_state = NULL;
271
272 gst_aml_v4l2_video_dec_finish(decoder);
273 gst_aml_v4l2_object_stop(self->v4l2output);
274
275 /* The renegotiation flow don't blend with the base class flow. To properly
276 * stop the capture pool, if the buffers can't be orphaned, we need to
277 * reclaim our buffers, which will happend through the allocation query.
278 * The allocation query is triggered by gst_video_decoder_negotiate() which
279 * requires the output caps to be set, but we can't know this information
280 * as we rely on the decoder, which requires the capture queue to be
281 * stopped.
282 *
283 * To workaround this issue, we simply run an allocation query with the
284 * old negotiated caps in order to drain/reclaim our buffers. That breaks
285 * the complexity and should not have much impact in performance since the
286 * following allocation query will happen on a drained pipeline and won't
287 * block. */
288 if (self->v4l2capture->pool &&
289 !gst_aml_v4l2_buffer_pool_orphan(&self->v4l2capture->pool))
290 {
291 GstCaps *caps = gst_pad_get_current_caps(decoder->srcpad);
292 if (caps)
293 {
294 GstQuery *query = gst_query_new_allocation(caps, FALSE);
295 gst_pad_peer_query(decoder->srcpad, query);
296 gst_query_unref(query);
297 gst_caps_unref(caps);
298 }
299 }
300
301 gst_aml_v4l2_object_stop(self->v4l2capture);
302 self->output_flow = GST_FLOW_OK;
303 }
304
305 if ((ret = gst_aml_v4l2_set_drm_mode(self->v4l2output)) == FALSE)
306 {
307 GST_ERROR_OBJECT(self, "config output drm mode error");
308 goto done;
309 }
310
311 ret = gst_aml_v4l2_object_set_format(self->v4l2output, state->caps, &error);
312
313 gst_caps_replace(&self->probed_srccaps, NULL);
314 self->probed_srccaps = gst_aml_v4l2_object_probe_caps(self->v4l2capture,
315 gst_aml_v4l2_object_get_raw_caps());
316
317 if (gst_caps_is_empty(self->probed_srccaps))
318 goto no_raw_format;
319
320 caps = gst_caps_copy(self->probed_srccaps);
321 gst_caps_set_features_simple(caps, gst_caps_features_from_string(GST_CAPS_FEATURE_MEMORY_DMABUF));
322 gst_caps_append(self->probed_srccaps, caps);
323 if (ret)
324 self->input_state = gst_video_codec_state_ref(state);
325 else
326 gst_aml_v4l2_error(self, &error);
327
328done:
329 return ret;
330
331no_raw_format:
332 GST_ELEMENT_ERROR(self, RESOURCE, SETTINGS,
333 (_("Decoder on device %s has no supported output format"),
334 self->v4l2output->videodev),
335 (NULL));
336 return GST_FLOW_ERROR;
337}
338
339static gboolean
340gst_aml_v4l2_video_dec_flush(GstVideoDecoder *decoder)
341{
342 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
343
344 GST_DEBUG_OBJECT(self, "Flushed");
345
346 /* Ensure the processing thread has stopped for the reverse playback
347 * discount case */
348 if (gst_pad_get_task_state(decoder->srcpad) == GST_TASK_STARTED)
349 {
350 GST_VIDEO_DECODER_STREAM_UNLOCK(decoder);
351
352 gst_aml_v4l2_object_unlock(self->v4l2output);
353 gst_aml_v4l2_object_unlock(self->v4l2capture);
354 gst_pad_stop_task(decoder->srcpad);
355 GST_VIDEO_DECODER_STREAM_LOCK(decoder);
356 }
357
358 self->output_flow = GST_FLOW_OK;
359
360 gst_aml_v4l2_object_unlock_stop(self->v4l2output);
361 gst_aml_v4l2_object_unlock_stop(self->v4l2capture);
362
363 if (self->v4l2output->pool)
364 gst_aml_v4l2_buffer_pool_flush(self->v4l2output->pool);
365
366 /* gst_aml_v4l2_buffer_pool_flush() calls streamon the capture pool and must be
367 * called after gst_aml_v4l2_object_unlock_stop() stopped flushing the buffer
368 * pool. */
369 if (self->v4l2capture->pool)
370 gst_aml_v4l2_buffer_pool_flush(self->v4l2capture->pool);
371
372 return TRUE;
373}
374
375static gboolean
376gst_aml_v4l2_video_dec_negotiate(GstVideoDecoder *decoder)
377{
378 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
379
380 /* We don't allow renegotiation without carefull disabling the pool */
381 if (self->v4l2capture->pool &&
382 gst_buffer_pool_is_active(GST_BUFFER_POOL(self->v4l2capture->pool)))
383 return TRUE;
384
385 return GST_VIDEO_DECODER_CLASS(parent_class)->negotiate(decoder);
386}
387
388static gboolean
389gst_aml_v4l2_decoder_cmd(GstAmlV4l2Object *v4l2object, guint cmd, guint flags)
390{
391 struct v4l2_decoder_cmd dcmd = {
392 0,
393 };
394
395 GST_DEBUG_OBJECT(v4l2object->element,
396 "sending v4l2 decoder command %u with flags %u", cmd, flags);
397
398 if (!GST_AML_V4L2_IS_OPEN(v4l2object))
399 return FALSE;
400
401 dcmd.cmd = cmd;
402 dcmd.flags = flags;
403 if (v4l2object->ioctl(v4l2object->video_fd, VIDIOC_DECODER_CMD, &dcmd) < 0)
404 goto dcmd_failed;
405
406 return TRUE;
407
408dcmd_failed:
409 if (errno == ENOTTY)
410 {
411 GST_INFO_OBJECT(v4l2object->element,
412 "Failed to send decoder command %u with flags %u for '%s'. (%s)",
413 cmd, flags, v4l2object->videodev, g_strerror(errno));
414 }
415 else
416 {
417 GST_ERROR_OBJECT(v4l2object->element,
418 "Failed to send decoder command %u with flags %u for '%s'. (%s)",
419 cmd, flags, v4l2object->videodev, g_strerror(errno));
420 }
421 return FALSE;
422}
423
424static GstFlowReturn
425gst_aml_v4l2_video_dec_finish(GstVideoDecoder *decoder)
426{
427 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
428 GstFlowReturn ret = GST_FLOW_OK;
429 GstBuffer *buffer;
430
431 if (gst_pad_get_task_state(decoder->srcpad) != GST_TASK_STARTED)
432 goto done;
433
434 GST_DEBUG_OBJECT(self, "Finishing decoding");
435
436 GST_VIDEO_DECODER_STREAM_UNLOCK(decoder);
437
438 if (gst_aml_v4l2_decoder_cmd(self->v4l2output, V4L2_DEC_CMD_STOP, 0))
439 {
440 GstTask *task = decoder->srcpad->task;
441
442 /* If the decoder stop command succeeded, just wait until processing is
443 * finished */
444 GST_DEBUG_OBJECT(self, "Waiting for decoder stop");
445 GST_OBJECT_LOCK(task);
446 while (GST_TASK_STATE(task) == GST_TASK_STARTED)
447 GST_TASK_WAIT(task);
448 GST_OBJECT_UNLOCK(task);
449 ret = GST_FLOW_FLUSHING;
450 }
451 else
452 {
453 /* otherwise keep queuing empty buffers until the processing thread has
454 * stopped, _pool_process() will return FLUSHING when that happened */
455 while (ret == GST_FLOW_OK)
456 {
457 buffer = gst_buffer_new();
458 ret =
459 gst_aml_v4l2_buffer_pool_process(GST_AML_V4L2_BUFFER_POOL(self->v4l2output->pool), &buffer);
460 gst_buffer_unref(buffer);
461 }
462 }
463
464 /* and ensure the processing thread has stopped in case another error
465 * occured. */
466 gst_aml_v4l2_object_unlock(self->v4l2capture);
467 gst_pad_stop_task(decoder->srcpad);
468 GST_VIDEO_DECODER_STREAM_LOCK(decoder);
469
470 if (ret == GST_FLOW_FLUSHING)
471 ret = self->output_flow;
472
473 GST_DEBUG_OBJECT(decoder, "Done draining buffers");
474
475 /* TODO Shall we cleanup any reffed frame to workaround broken decoders ? */
476
477done:
478 return ret;
479}
480
481static GstFlowReturn
482gst_aml_v4l2_video_dec_drain(GstVideoDecoder *decoder)
483{
484 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
485
486 GST_DEBUG_OBJECT(self, "Draining...");
487 gst_aml_v4l2_video_dec_finish(decoder);
488 gst_aml_v4l2_video_dec_flush(decoder);
489
490 return GST_FLOW_OK;
491}
492
493static GstVideoCodecFrame *
494gst_aml_v4l2_video_dec_get_oldest_frame(GstVideoDecoder *decoder)
495{
496 GstVideoCodecFrame *frame = NULL;
497 GList *frames, *l;
498 gint count = 0;
499
500 frames = gst_video_decoder_get_frames(decoder);
501
502 for (l = frames; l != NULL; l = l->next)
503 {
504 GstVideoCodecFrame *f = l->data;
505
506 if (!frame || (GST_CLOCK_TIME_IS_VALID(frame->pts) && GST_CLOCK_TIME_IS_VALID(f->pts) && (frame->pts > f->pts)))
507 frame = f;
508
509 count++;
510 }
511
512 if (frame)
513 {
514 GST_LOG_OBJECT(decoder,
515 "Oldest frame is %d %" GST_TIME_FORMAT " and %d frames left",
516 frame->system_frame_number, GST_TIME_ARGS(frame->pts), count - 1);
517 gst_video_codec_frame_ref(frame);
518 }
519
520 g_list_free_full(frames, (GDestroyNotify)gst_video_codec_frame_unref);
521
522 return frame;
523}
524
525static gboolean
526gst_aml_v4l2_video_remove_padding(GstCapsFeatures *features,
527 GstStructure *structure, gpointer user_data)
528{
529 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(user_data);
530 GstVideoAlignment *align = &self->v4l2capture->align;
531 GstVideoInfo *info = &self->v4l2capture->info;
532 int width, height;
533
534 if (!gst_structure_get_int(structure, "width", &width))
535 return TRUE;
536
537 if (!gst_structure_get_int(structure, "height", &height))
538 return TRUE;
539
540 if (align->padding_left != 0 || align->padding_top != 0 ||
541 height != info->height + align->padding_bottom)
542 return TRUE;
543
544 if (height == info->height + align->padding_bottom)
545 {
546 /* Some drivers may round up width to the padded with */
547 if (width == info->width + align->padding_right)
548 gst_structure_set(structure,
549 "width", G_TYPE_INT, width - align->padding_right,
550 "height", G_TYPE_INT, height - align->padding_bottom, NULL);
551 /* Some drivers may keep visible width and only round up bytesperline */
552 else if (width == info->width)
553 gst_structure_set(structure,
554 "height", G_TYPE_INT, height - align->padding_bottom, NULL);
555 }
556
557 return TRUE;
558}
559
560static void
561gst_aml_v4l2_video_dec_loop(GstVideoDecoder *decoder)
562{
563 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
564 GstAmlV4l2BufferPool *v4l2_pool;
565 GstAmlV4l2Error error = GST_AML_V4L2_ERROR_INIT;
566 GstBufferPool *pool;
567 GstVideoCodecFrame *frame;
568 GstBuffer *buffer = NULL;
569 GstFlowReturn ret;
570
571 if (G_UNLIKELY(!GST_AML_V4L2_IS_ACTIVE(self->v4l2capture)))
572 {
573 GstVideoInfo info;
574 GstVideoCodecState *output_state;
575 GstCaps *acquired_caps, *available_caps, *caps, *filter;
576 GstStructure *st;
577
578 GST_DEBUG_OBJECT(self, "waitting source change event");
579 /* Wait until received SOURCE_CHANGE event to get right video format */
580 while (self->v4l2capture->can_wait_event && self->v4l2capture->need_wait_event)
581 {
582 ret = gst_aml_v4l2_object_dqevent(self->v4l2capture);
583 if (ret == GST_AML_V4L2_FLOW_SOURCE_CHANGE)
584 {
585 GST_DEBUG_OBJECT(self, "Received source change event");
586 break;
587 }
588 else if (ret == GST_AML_V4L2_FLOW_LAST_BUFFER)
589 {
590 GST_DEBUG_OBJECT(self, "Received eos event");
591 goto beach;
592 }
593 else if (ret != GST_FLOW_OK)
594 {
595 GST_ERROR_OBJECT(self, "dqevent error");
596 goto beach;
597 }
598 }
599 self->v4l2capture->need_wait_event = FALSE;
600
601 if (!gst_aml_v4l2_object_acquire_format(self->v4l2capture, &info))
602 goto not_negotiated;
603
604 /* Create caps from the acquired format, remove the format field */
605 acquired_caps = gst_video_info_to_caps(&info);
606 GST_DEBUG_OBJECT(self, "Acquired caps: %" GST_PTR_FORMAT, acquired_caps);
607 st = gst_caps_get_structure(acquired_caps, 0);
608 gst_structure_remove_fields(st, "format", "colorimetry", "chroma-site",
609 NULL);
610
611 /* Probe currently available pixel formats */
612 available_caps = gst_caps_copy(self->probed_srccaps);
613 GST_DEBUG_OBJECT(self, "Available caps: %" GST_PTR_FORMAT, available_caps);
614
615 /* Replace coded size with visible size, we want to negotiate visible size
616 * with downstream, not coded size. */
617 gst_caps_map_in_place(available_caps, gst_aml_v4l2_video_remove_padding, self);
618
619 filter = gst_caps_intersect_full(available_caps, acquired_caps,
620 GST_CAPS_INTERSECT_FIRST);
621 caps = gst_caps_copy(filter);
622 gst_caps_set_features_simple(caps, gst_caps_features_from_string(GST_CAPS_FEATURE_MEMORY_DMABUF));
623 gst_caps_append(filter, caps);
624
625 GST_DEBUG_OBJECT(self, "Filtered caps: %" GST_PTR_FORMAT, filter);
626 gst_caps_unref(acquired_caps);
627 gst_caps_unref(available_caps);
628 caps = gst_pad_peer_query_caps(decoder->srcpad, filter);
629 gst_caps_unref(filter);
630
631 GST_DEBUG_OBJECT(self, "Possible decoded caps: %" GST_PTR_FORMAT, caps);
632 if (gst_caps_is_empty(caps))
633 {
634 gst_caps_unref(caps);
635 goto not_negotiated;
636 }
637
638 /* Fixate pixel format */
639 caps = gst_caps_fixate(caps);
640
641 GST_DEBUG_OBJECT(self, "Chosen decoded caps: %" GST_PTR_FORMAT, caps);
642
643 /* Try to set negotiated format, on success replace acquired format */
644 if (gst_aml_v4l2_object_set_format(self->v4l2capture, caps, &error))
645 gst_video_info_from_caps(&info, caps);
646 else
647 gst_aml_v4l2_clear_error(&error);
648 gst_caps_unref(caps);
649
650 output_state = gst_video_decoder_set_output_state(decoder,
651 info.finfo->format, info.width, info.height, self->input_state);
652
653 /* Copy the rest of the information, there might be more in the future */
654 output_state->info.interlace_mode = info.interlace_mode;
655 gst_video_codec_state_unref(output_state);
656
657 if (!gst_video_decoder_negotiate(decoder))
658 {
659 if (GST_PAD_IS_FLUSHING(decoder->srcpad))
660 goto flushing;
661 else
662 goto not_negotiated;
663 }
664
665 /* Ensure our internal pool is activated */
666 if (!gst_buffer_pool_set_active(GST_BUFFER_POOL(self->v4l2capture->pool),
667 TRUE))
668 goto activate_failed;
669 }
670
671 GST_LOG_OBJECT(decoder, "Allocate output buffer");
672
673 v4l2_pool = GST_AML_V4L2_BUFFER_POOL(self->v4l2capture->pool);
674
675 self->output_flow = GST_FLOW_OK;
676 do
677 {
678 /* We cannot use the base class allotate helper since it taking the internal
679 * stream lock. we know that the acquire may need to poll until more frames
680 * comes in and holding this lock would prevent that.
681 */
682 pool = gst_video_decoder_get_buffer_pool(decoder);
683
684 /* Pool may be NULL if we started going to READY state */
685 if (pool == NULL)
686 {
687 ret = GST_FLOW_FLUSHING;
688 goto beach;
689 }
690
691 ret = gst_buffer_pool_acquire_buffer(pool, &buffer, NULL);
692 g_object_unref(pool);
693
694 if (ret != GST_FLOW_OK)
695 goto beach;
696
697 GST_LOG_OBJECT(decoder, "Process output buffer");
698 ret = gst_aml_v4l2_buffer_pool_process(v4l2_pool, &buffer);
699 if (ret == GST_AML_V4L2_FLOW_SOURCE_CHANGE)
700 {
701 gst_aml_v4l2_object_stop(self->v4l2capture);
702 return;
703 }
704
705 } while (ret == GST_AML_V4L2_FLOW_CORRUPTED_BUFFER);
706
707 if (ret != GST_FLOW_OK)
708 goto beach;
709
710 frame = gst_aml_v4l2_video_dec_get_oldest_frame(decoder);
711
712 if (frame)
713 {
714 frame->output_buffer = buffer;
715 buffer = NULL;
716 ret = gst_video_decoder_finish_frame(decoder, frame);
717
718 if (ret != GST_FLOW_OK)
719 goto beach;
720 }
721 else
722 {
723 GST_WARNING_OBJECT(decoder, "Decoder is producing too many buffers");
724 gst_buffer_unref(buffer);
725 }
726
727 return;
728 /* ERRORS */
729not_negotiated:
730{
731 GST_ERROR_OBJECT(self, "not negotiated");
732 ret = GST_FLOW_NOT_NEGOTIATED;
733 goto beach;
734}
735activate_failed:
736{
737 GST_ERROR_OBJECT(self, "Buffer pool activation failed");
738 GST_ELEMENT_ERROR(self, RESOURCE, SETTINGS,
739 (_("Failed to allocate required memory.")),
740 ("Buffer pool activation failed"));
741 ret = GST_FLOW_ERROR;
742 goto beach;
743}
744flushing:
745{
746 ret = GST_FLOW_FLUSHING;
747 goto beach;
748}
749beach:
750 GST_DEBUG_OBJECT(decoder, "Leaving output thread: %s",
751 gst_flow_get_name(ret));
752
753 gst_buffer_replace(&buffer, NULL);
754 self->output_flow = ret;
755 gst_aml_v4l2_object_unlock(self->v4l2output);
756 gst_pad_pause_task(decoder->srcpad);
757}
758
759static GstFlowReturn
760gst_aml_v4l2_video_dec_handle_frame(GstVideoDecoder *decoder,
761 GstVideoCodecFrame *frame)
762{
763 GstAmlV4l2Error error = GST_AML_V4L2_ERROR_INIT;
764 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
765 GstBufferPool *pool = GST_BUFFER_POOL(self->v4l2output->pool);
766 GstFlowReturn ret = GST_FLOW_OK;
767 gboolean processed = FALSE;
768 GstBuffer *tmp;
769 GstTaskState task_state;
770 GstCaps *caps;
771
772 GST_DEBUG_OBJECT(self, "Handling frame %d", frame->system_frame_number);
773
774 if (G_UNLIKELY(!g_atomic_int_get(&self->active)))
775 goto flushing;
776
777 if (G_UNLIKELY(!GST_AML_V4L2_IS_ACTIVE(self->v4l2output)))
778 {
779 if (!self->input_state)
780 goto not_negotiated;
781 if (!gst_aml_v4l2_object_set_format(self->v4l2output, self->input_state->caps,
782 &error))
783 goto not_negotiated;
784 }
785
786 if (G_UNLIKELY(!GST_AML_V4L2_IS_ACTIVE(self->v4l2capture)))
787 {
788 GstBuffer *codec_data;
789
790 GST_DEBUG_OBJECT(self, "Sending header");
791
792 codec_data = self->input_state->codec_data;
793
794 /* We are running in byte-stream mode, so we don't know the headers, but
795 * we need to send something, otherwise the decoder will refuse to
796 * intialize.
797 */
798 if (codec_data)
799 {
800 gst_buffer_ref(codec_data);
801 }
802 else
803 {
804 codec_data = gst_buffer_ref(frame->input_buffer);
805 processed = TRUE;
806 }
807
808 /* Ensure input internal pool is active */
809 if (!gst_buffer_pool_is_active(pool))
810 {
811 GstStructure *config = gst_buffer_pool_get_config(pool);
812 guint min = MAX(self->v4l2output->min_buffers, GST_AML_V4L2_MIN_BUFFERS);
813 guint max = VIDEO_MAX_FRAME;
814
815 // gst_buffer_pool_config_set_params (config, self->input_state->caps,
816 // self->v4l2output->info.size, min, max);
817 gst_buffer_pool_config_set_params(config, self->input_state->caps, self->v4l2output->info.size, self->v4l2output->min_buffers, self->v4l2output->min_buffers);
818
819 /* There is no reason to refuse this config */
820 if (!gst_buffer_pool_set_config(pool, config))
821 goto activate_failed;
822
823 if (!gst_buffer_pool_set_active(pool, TRUE))
824 goto activate_failed;
825 }
826
827 GST_VIDEO_DECODER_STREAM_UNLOCK(decoder);
828 ret =
829 gst_aml_v4l2_buffer_pool_process(GST_AML_V4L2_BUFFER_POOL(self->v4l2output->pool), &codec_data);
830 GST_VIDEO_DECODER_STREAM_LOCK(decoder);
831
832 gst_buffer_unref(codec_data);
833
834 /* For decoders G_FMT returns coded size, G_SELECTION returns visible size
835 * in the compose rectangle. gst_aml_v4l2_object_acquire_format() checks both
836 * and returns the visible size as with/height and the coded size as
837 * padding. */
838 }
839
840 task_state = gst_pad_get_task_state(GST_VIDEO_DECODER_SRC_PAD(self));
841 if (task_state == GST_TASK_STOPPED || task_state == GST_TASK_PAUSED)
842 {
843 /* It's possible that the processing thread stopped due to an error */
844 if (self->output_flow != GST_FLOW_OK &&
845 self->output_flow != GST_FLOW_FLUSHING)
846 {
847 GST_DEBUG_OBJECT(self, "Processing loop stopped with error, leaving");
848 ret = self->output_flow;
849 goto drop;
850 }
851
852 GST_DEBUG_OBJECT(self, "Starting decoding thread");
853
854 /* Start the processing task, when it quits, the task will disable input
855 * processing to unlock input if draining, or prevent potential block */
856 self->output_flow = GST_FLOW_FLUSHING;
857 if (!gst_pad_start_task(decoder->srcpad,
858 (GstTaskFunction)gst_aml_v4l2_video_dec_loop, self, NULL))
859 goto start_task_failed;
860 }
861
862 if (!processed)
863 {
864 GST_VIDEO_DECODER_STREAM_UNLOCK(decoder);
865 ret =
866 gst_aml_v4l2_buffer_pool_process(GST_AML_V4L2_BUFFER_POOL(self->v4l2output->pool), &frame->input_buffer);
867 GST_VIDEO_DECODER_STREAM_LOCK(decoder);
868
869 if (ret == GST_FLOW_FLUSHING)
870 {
871 if (gst_pad_get_task_state(GST_VIDEO_DECODER_SRC_PAD(self)) !=
872 GST_TASK_STARTED)
873 ret = self->output_flow;
874 goto drop;
875 }
876 else if (ret != GST_FLOW_OK)
877 {
878 goto process_failed;
879 }
880 }
881
882 /* No need to keep input arround */
883 tmp = frame->input_buffer;
884 frame->input_buffer = gst_buffer_new();
885 gst_buffer_copy_into(frame->input_buffer, tmp,
886 GST_BUFFER_COPY_FLAGS | GST_BUFFER_COPY_TIMESTAMPS |
887 GST_BUFFER_COPY_META,
888 0, 0);
889 gst_buffer_unref(tmp);
890
891 gst_video_codec_frame_unref(frame);
892 return ret;
893
894 /* ERRORS */
895not_negotiated:
896{
897 GST_ERROR_OBJECT(self, "not negotiated");
898 ret = GST_FLOW_NOT_NEGOTIATED;
899 gst_aml_v4l2_error(self, &error);
900 goto drop;
901}
902activate_failed:
903{
904 GST_ELEMENT_ERROR(self, RESOURCE, SETTINGS,
905 (_("Failed to allocate required memory.")),
906 ("Buffer pool activation failed"));
907 ret = GST_FLOW_ERROR;
908 goto drop;
909}
910flushing:
911{
912 ret = GST_FLOW_FLUSHING;
913 goto drop;
914}
915
916start_task_failed:
917{
918 GST_ELEMENT_ERROR(self, RESOURCE, FAILED,
919 (_("Failed to start decoding thread.")), (NULL));
920 ret = GST_FLOW_ERROR;
921 goto drop;
922}
923process_failed:
924{
925 GST_ELEMENT_ERROR(self, RESOURCE, FAILED,
926 (_("Failed to process frame.")),
927 ("Maybe be due to not enough memory or failing driver"));
928 ret = GST_FLOW_ERROR;
929 goto drop;
930}
931drop:
932{
933 gst_video_decoder_drop_frame(decoder, frame);
934 return ret;
935}
936}
937
938static gboolean
939gst_aml_v4l2_video_dec_decide_allocation(GstVideoDecoder *decoder,
940 GstQuery *query)
941{
942 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
943 GstClockTime latency;
944 gboolean ret = FALSE;
945
946 if (gst_aml_v4l2_object_decide_allocation(self->v4l2capture, query))
947 ret = GST_VIDEO_DECODER_CLASS(parent_class)->decide_allocation(decoder, query);
948
949 if (GST_CLOCK_TIME_IS_VALID(self->v4l2capture->duration))
950 {
951 latency = self->v4l2capture->min_buffers * self->v4l2capture->duration;
952 GST_DEBUG_OBJECT(self, "Setting latency: %" GST_TIME_FORMAT " (%" G_GUINT32_FORMAT " * %" G_GUINT64_FORMAT, GST_TIME_ARGS(latency),
953 self->v4l2capture->min_buffers, self->v4l2capture->duration);
954 gst_video_decoder_set_latency(decoder, latency, latency);
955 }
956 else
957 {
958 GST_WARNING_OBJECT(self, "Duration invalid, not setting latency");
959 }
960
961 return ret;
962}
963
964static gboolean
965gst_aml_v4l2_video_dec_src_query(GstVideoDecoder *decoder, GstQuery *query)
966{
967 gboolean ret = TRUE;
968 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
969
970 switch (GST_QUERY_TYPE(query))
971 {
972 case GST_QUERY_CAPS:
973 {
974 GstCaps *filter, *result = NULL;
975 GstPad *pad = GST_VIDEO_DECODER_SRC_PAD(decoder);
976
977 gst_query_parse_caps(query, &filter);
978
979 if (self->probed_srccaps)
980 result = gst_caps_ref(self->probed_srccaps);
981 else
982 result = gst_pad_get_pad_template_caps(pad);
983
984 if (filter)
985 {
986 GstCaps *tmp = result;
987 result =
988 gst_caps_intersect_full(filter, tmp, GST_CAPS_INTERSECT_FIRST);
989 gst_caps_unref(tmp);
990 }
991
992 GST_DEBUG_OBJECT(self, "Returning src caps %" GST_PTR_FORMAT, result);
993
994 gst_query_set_caps_result(query, result);
995 gst_caps_unref(result);
996 break;
997 }
998
999 default:
1000 ret = GST_VIDEO_DECODER_CLASS(parent_class)->src_query(decoder, query);
1001 break;
1002 }
1003
1004 return ret;
1005}
1006
1007static GstCaps *
1008gst_aml_v4l2_video_dec_sink_getcaps(GstVideoDecoder *decoder, GstCaps *filter)
1009{
1010 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
1011 GstCaps *result;
1012
1013 result = gst_video_decoder_proxy_getcaps(decoder, self->probed_sinkcaps,
1014 filter);
1015
1016 GST_DEBUG_OBJECT(self, "Returning sink caps %" GST_PTR_FORMAT, result);
1017
1018 return result;
1019}
1020
1021static gboolean
1022gst_aml_v4l2_video_dec_sink_event(GstVideoDecoder *decoder, GstEvent *event)
1023{
1024 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
1025 gboolean ret;
1026 GstEventType type = GST_EVENT_TYPE(event);
1027
1028 switch (type)
1029 {
1030 case GST_EVENT_FLUSH_START:
1031 GST_DEBUG_OBJECT(self, "flush start");
1032 gst_aml_v4l2_object_unlock(self->v4l2output);
1033 gst_aml_v4l2_object_unlock(self->v4l2capture);
1034 break;
1035 default:
1036 break;
1037 }
1038
1039 ret = GST_VIDEO_DECODER_CLASS(parent_class)->sink_event(decoder, event);
1040
1041 switch (type)
1042 {
1043 case GST_EVENT_FLUSH_START:
1044 /* The processing thread should stop now, wait for it */
1045 gst_pad_stop_task(decoder->srcpad);
1046 GST_DEBUG_OBJECT(self, "flush start done");
1047 break;
1048 default:
1049 break;
1050 }
1051
1052 return ret;
1053}
1054
1055static GstStateChangeReturn
1056gst_aml_v4l2_video_dec_change_state(GstElement *element,
1057 GstStateChange transition)
1058{
1059 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(element);
1060 GstVideoDecoder *decoder = GST_VIDEO_DECODER(element);
1061
1062 if (transition == GST_STATE_CHANGE_PAUSED_TO_READY)
1063 {
1064 g_atomic_int_set(&self->active, FALSE);
1065 gst_aml_v4l2_object_unlock(self->v4l2output);
1066 gst_aml_v4l2_object_unlock(self->v4l2capture);
1067 gst_pad_stop_task(decoder->srcpad);
1068 }
1069
1070 return GST_ELEMENT_CLASS(parent_class)->change_state(element, transition);
1071}
1072
1073static void
1074gst_aml_v4l2_video_dec_dispose(GObject *object)
1075{
1076 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(object);
1077
1078 gst_caps_replace(&self->probed_sinkcaps, NULL);
1079 gst_caps_replace(&self->probed_srccaps, NULL);
1080
1081 G_OBJECT_CLASS(parent_class)->dispose(object);
1082}
1083
1084static void
1085gst_aml_v4l2_video_dec_finalize(GObject *object)
1086{
1087 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(object);
1088
1089 gst_aml_v4l2_object_destroy(self->v4l2capture);
1090 gst_aml_v4l2_object_destroy(self->v4l2output);
1091
1092 G_OBJECT_CLASS(parent_class)->finalize(object);
1093}
1094
1095static void
1096gst_aml_v4l2_video_dec_init(GstAmlV4l2VideoDec *self)
1097{
1098 /* V4L2 object are created in subinstance_init */
1099 self->is_secure_path = FALSE;
1100}
1101
1102static void
1103gst_aml_v4l2_video_dec_subinstance_init(GTypeInstance *instance, gpointer g_class)
1104{
1105 GstAmlV4l2VideoDecClass *klass = GST_AML_V4L2_VIDEO_DEC_CLASS(g_class);
1106 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(instance);
1107 GstVideoDecoder *decoder = GST_VIDEO_DECODER(instance);
1108
1109 gst_video_decoder_set_packetized(decoder, TRUE);
1110
1111 self->v4l2output = gst_aml_v4l2_object_new(GST_ELEMENT(self),
1112 GST_OBJECT(GST_VIDEO_DECODER_SINK_PAD(self)),
1113 V4L2_BUF_TYPE_VIDEO_OUTPUT, klass->default_device,
1114 gst_aml_v4l2_get_output, gst_aml_v4l2_set_output, NULL);
1115 self->v4l2output->no_initial_format = TRUE;
1116 self->v4l2output->keep_aspect = FALSE;
1117
1118 self->v4l2capture = gst_aml_v4l2_object_new(GST_ELEMENT(self),
1119 GST_OBJECT(GST_VIDEO_DECODER_SRC_PAD(self)),
1120 V4L2_BUF_TYPE_VIDEO_CAPTURE, klass->default_device,
1121 gst_aml_v4l2_get_input, gst_aml_v4l2_set_input, NULL);
1122 self->v4l2capture->need_wait_event = TRUE;
1123}
1124
1125static void
1126gst_aml_v4l2_video_dec_class_init(GstAmlV4l2VideoDecClass *klass)
1127{
1128 GstElementClass *element_class;
1129 GObjectClass *gobject_class;
1130 GstVideoDecoderClass *video_decoder_class;
1131
1132 parent_class = g_type_class_peek_parent(klass);
1133
1134 element_class = (GstElementClass *)klass;
1135 gobject_class = (GObjectClass *)klass;
1136 video_decoder_class = (GstVideoDecoderClass *)klass;
1137
1138 GST_DEBUG_CATEGORY_INIT(gst_aml_v4l2_video_dec_debug, "amlv4l2videodec", 0,
1139 "AML V4L2 Video Decoder");
1140
1141 gobject_class->dispose = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_dispose);
1142 gobject_class->finalize = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_finalize);
1143 gobject_class->set_property =
1144 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_set_property);
1145 gobject_class->get_property =
1146 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_get_property);
1147
1148 video_decoder_class->open = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_open);
1149 video_decoder_class->close = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_close);
1150 video_decoder_class->start = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_start);
1151 video_decoder_class->stop = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_stop);
1152 video_decoder_class->finish = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_finish);
1153 video_decoder_class->flush = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_flush);
1154 video_decoder_class->drain = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_drain);
1155 video_decoder_class->set_format =
1156 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_set_format);
1157 video_decoder_class->negotiate =
1158 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_negotiate);
1159 video_decoder_class->decide_allocation =
1160 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_decide_allocation);
1161 /* FIXME propose_allocation or not ? */
1162 video_decoder_class->handle_frame =
1163 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_handle_frame);
1164 video_decoder_class->getcaps =
1165 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_sink_getcaps);
1166 video_decoder_class->src_query =
1167 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_src_query);
1168 video_decoder_class->sink_event =
1169 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_sink_event);
1170
1171 element_class->change_state =
1172 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_change_state);
1173
1174 gst_aml_v4l2_object_install_m2m_properties_helper(gobject_class);
1175}
1176
1177static void
1178gst_aml_v4l2_video_dec_subclass_init(gpointer g_class, gpointer data)
1179{
1180 GstAmlV4l2VideoDecClass *klass = GST_AML_V4L2_VIDEO_DEC_CLASS(g_class);
1181 GstElementClass *element_class = GST_ELEMENT_CLASS(g_class);
1182 GstAmlV4l2VideoDecCData *cdata = data;
1183
1184 klass->default_device = cdata->device;
1185
1186 /* Note: gst_pad_template_new() take the floating ref from the caps */
1187 gst_element_class_add_pad_template(element_class,
1188 gst_pad_template_new("sink", GST_PAD_SINK, GST_PAD_ALWAYS,
1189 cdata->sink_caps));
1190 gst_element_class_add_pad_template(element_class,
1191 gst_pad_template_new("src", GST_PAD_SRC, GST_PAD_ALWAYS,
1192 cdata->src_caps));
1193
1194 gst_element_class_set_metadata(element_class, cdata->longname,
1195 "Codec/Decoder/Video/Hardware", cdata->description,
1196 "Xuesong Jiang <Xuesong.Jiang@amlogic.com>");
1197
1198 gst_caps_unref(cdata->sink_caps);
1199 gst_caps_unref(cdata->src_caps);
1200 g_free(cdata);
1201}
1202
1203/* Probing functions */
1204gboolean
1205gst_aml_v4l2_is_video_dec(GstCaps *sink_caps, GstCaps *src_caps)
1206{
1207 gboolean ret = FALSE;
1208
1209 if (gst_caps_is_subset(sink_caps, gst_aml_v4l2_object_get_codec_caps()) && gst_caps_is_subset(src_caps, gst_aml_v4l2_object_get_raw_caps()))
1210 ret = TRUE;
1211
1212 return ret;
1213}
1214
1215static gchar *
1216gst_aml_v4l2_video_dec_set_metadata(GstStructure *s, GstAmlV4l2VideoDecCData *cdata,
1217 const gchar *basename)
1218{
1219 gchar *codec_name = NULL;
1220 gchar *type_name = NULL;
1221 gboolean got_value = FALSE;
1222
1223#define SET_META(codec) \
1224 G_STMT_START \
1225 { \
1226 cdata->longname = "AML V4L2 " codec " Decoder"; \
1227 cdata->description = "Decodes " codec " streams via V4L2 API"; \
1228 codec_name = g_ascii_strdown(codec, -1); \
1229 } \
1230 G_STMT_END
1231
1232 if (gst_structure_has_name(s, "image/jpeg"))
1233 {
1234 SET_META("JPEG");
1235 }
1236 else if (gst_structure_has_name(s, "video/mpeg"))
1237 {
1238 gint mpegversion = 0;
1239 gint *list = NULL;
1240 got_value = gst_structure_get_int(s, "mpegversion", &mpegversion);
1241 if (FALSE == got_value)
1242 {
1243 got_value = gst_structure_get_list(s, "mpegversion", &list);
1244 if (TRUE == got_value && (1 == *list || 2 == *list))
1245 {
1246 SET_META("MPEG2");
1247 }
1248 else
1249 {
1250 SET_META("MPEG4");
1251 }
1252 }
1253 else
1254 {
1255 SET_META("MPEG4");
1256 }
1257 }
1258 else if (gst_structure_has_name(s, "video/x-h263"))
1259 {
1260 SET_META("H263");
1261 }
1262 else if (gst_structure_has_name(s, "video/x-fwht"))
1263 {
1264 SET_META("FWHT");
1265 }
1266 else if (gst_structure_has_name(s, "video/x-h264"))
1267 {
1268 SET_META("H264");
1269 }
1270 else if (gst_structure_has_name(s, "video/x-h265"))
1271 {
1272 SET_META("H265");
1273 }
1274 else if (gst_structure_has_name(s, "video/x-wmv"))
1275 {
1276 SET_META("VC1");
1277 }
1278 else if (gst_structure_has_name(s, "video/x-vp8"))
1279 {
1280 SET_META("VP8");
1281 }
1282 else if (gst_structure_has_name(s, "video/x-vp9"))
1283 {
1284 SET_META("VP9");
1285 }
1286 else if (gst_structure_has_name(s, "video/x-av1"))
1287 {
1288 SET_META("AV1");
1289 }
1290 else if (gst_structure_has_name(s, "video/x-bayer"))
1291 {
1292 SET_META("BAYER");
1293 }
1294 else if (gst_structure_has_name(s, "video/x-sonix"))
1295 {
1296 SET_META("SONIX");
1297 }
1298 else if (gst_structure_has_name(s, "video/x-pwc1"))
1299 {
1300 SET_META("PWC1");
1301 }
1302 else if (gst_structure_has_name(s, "video/x-pwc2"))
1303 {
1304 SET_META("PWC2");
1305 }
1306 else
1307 {
1308 /* This code should be kept on sync with the exposed CODEC type of format
1309 * from gstamlv4l2object.c. This warning will only occure in case we forget
1310 * to also add a format here. */
1311 gchar *s_str = gst_structure_to_string(s);
1312 g_warning("Missing fixed name mapping for caps '%s', this is a GStreamer "
1313 "bug, please report at https://bugs.gnome.org",
1314 s_str);
1315 g_free(s_str);
1316 }
1317
1318 if (codec_name)
1319 {
1320 type_name = g_strdup_printf("amlv4l2%sdec", codec_name);
1321 if (g_type_from_name(type_name) != 0)
1322 {
1323 g_free(type_name);
1324 type_name = g_strdup_printf("amlv4l2%s%sdec", basename, codec_name);
1325 }
1326
1327 g_free(codec_name);
1328 }
1329
1330 return type_name;
1331#undef SET_META
1332}
1333
1334void gst_aml_v4l2_video_dec_register(GstPlugin *plugin, const gchar *basename,
1335 const gchar *device_path, GstCaps *sink_caps, GstCaps *src_caps)
1336{
1337 gint i;
1338
1339 for (i = 0; i < gst_caps_get_size(sink_caps); i++)
1340 {
1341 GstAmlV4l2VideoDecCData *cdata;
1342 GstStructure *s;
1343 GTypeQuery type_query;
1344 GTypeInfo type_info = {
1345 0,
1346 };
1347 GType type, subtype;
1348 gchar *type_name;
1349
1350 s = gst_caps_get_structure(sink_caps, i);
1351
1352 cdata = g_new0(GstAmlV4l2VideoDecCData, 1);
1353 cdata->device = g_strdup(device_path);
1354 cdata->sink_caps = gst_caps_new_empty();
1355 gst_caps_append_structure(cdata->sink_caps, gst_structure_copy(s));
1356 gst_caps_append_structure(cdata->sink_caps, gst_structure_copy(s));
1357 gst_caps_set_features(cdata->sink_caps, 0, gst_caps_features_from_string(GST_CAPS_FEATURE_MEMORY_DMABUF));
1358 cdata->src_caps = gst_caps_copy(src_caps);
1359 gst_caps_set_features_simple(cdata->src_caps, gst_caps_features_from_string(GST_CAPS_FEATURE_MEMORY_DMABUF));
1360 gst_caps_append(cdata->src_caps, gst_caps_copy(src_caps));
1361 type_name = gst_aml_v4l2_video_dec_set_metadata(s, cdata, basename);
1362
1363 /* Skip over if we hit an unmapped type */
1364 if (!type_name)
1365 {
1366 g_free(cdata);
1367 continue;
1368 }
1369
1370 type = gst_aml_v4l2_video_dec_get_type();
1371 g_type_query(type, &type_query);
1372 memset(&type_info, 0, sizeof(type_info));
1373 type_info.class_size = type_query.class_size;
1374 type_info.instance_size = type_query.instance_size;
1375 type_info.class_init = gst_aml_v4l2_video_dec_subclass_init;
1376 type_info.class_data = cdata;
1377 type_info.instance_init = gst_aml_v4l2_video_dec_subinstance_init;
1378
1379 subtype = g_type_register_static(type, type_name, &type_info, 0);
1380 if (!gst_element_register(plugin, type_name, GST_RANK_PRIMARY + 1,
1381 subtype))
1382 GST_WARNING("Failed to register plugin '%s'", type_name);
1383
1384 g_free(type_name);
1385 }
1386}