blob: c273d3ab32268777b915220639e0c3a586a2d900 [file] [log] [blame]
xuesong.jiangae1548e2022-05-06 16:38:46 +08001/* GStreamer
2 * Copyright (C) 2022 <xuesong.jiang@amlogic.com>
3 *
4 * This library is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Library General Public
6 * License as published by the Free Software Foundation; either
7 * version 2 of the License, or (at your option) any later version.
8 *
9 * This library is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Library General Public License for more details.
13 *
14 * You should have received a copy of the GNU Library General Public
15 * License along with this library; if not, write to the
16 * Free Software Foundation, Inc., 51 Franklin Street, Suite 500,
17 * Boston, MA 02110-1335, USA.
18 */
19
20#ifdef HAVE_CONFIG_H
21#include "config.h"
22#endif
23
24#include <sys/stat.h>
25#include <fcntl.h>
26#include <errno.h>
27#include <unistd.h>
28#include <string.h>
29
30#include "gstamlv4l2object.h"
31#include "gstamlv4l2videodec.h"
32
33#include <string.h>
34#include <gst/gst-i18n-plugin.h>
35#include <gst/allocators/gstdmabuf.h>
36
37GST_DEBUG_CATEGORY_STATIC(gst_aml_v4l2_video_dec_debug);
38#define GST_CAT_DEFAULT gst_aml_v4l2_video_dec_debug
39
40#ifdef GST_VIDEO_DECODER_STREAM_LOCK
41#undef GST_VIDEO_DECODER_STREAM_LOCK
42#define GST_VIDEO_DECODER_STREAM_LOCK(decoder) \
43 { \
xuesong.jiang61ea8012022-05-12 15:38:17 +080044 GST_INFO("aml v4l2 dec locking"); \
xuesong.jiangae1548e2022-05-06 16:38:46 +080045 g_rec_mutex_lock(&GST_VIDEO_DECODER(decoder)->stream_lock); \
xuesong.jiang61ea8012022-05-12 15:38:17 +080046 GST_INFO("aml v4l2 dec locked"); \
xuesong.jiangae1548e2022-05-06 16:38:46 +080047 }
48#endif
49
50#ifdef GST_VIDEO_DECODER_STREAM_UNLOCK
51#undef GST_VIDEO_DECODER_STREAM_UNLOCK
52#define GST_VIDEO_DECODER_STREAM_UNLOCK(decoder) \
53 { \
xuesong.jiang252f6862022-05-07 14:53:41 +080054 GST_INFO("aml v4l2 dec unlocking"); \
xuesong.jiangae1548e2022-05-06 16:38:46 +080055 g_rec_mutex_unlock(&GST_VIDEO_DECODER(decoder)->stream_lock); \
xuesong.jiang61ea8012022-05-12 15:38:17 +080056 GST_INFO("aml v4l2 dec unlocked"); \
xuesong.jiangae1548e2022-05-06 16:38:46 +080057 }
58#endif
xuesong.jiang61ea8012022-05-12 15:38:17 +080059
hanghang.luo36df2852022-08-24 15:02:27 +080060#ifndef ABSDIFF
61#define ABSDIFF(a,b) (((a) > (b)) ? ((a) - (b)) : ((b) - (a)))
62#endif
63
xuesong.jiang61ea8012022-05-12 15:38:17 +080064#if GST_IMPORT_LGE_PROP
65typedef struct _GstAmlResourceInfo
66{
67 gchar *coretype;
68 gint videoport;
69 gint audioport;
70 gint maxwidth;
71 gint maxheight;
72 gint mixerport;
73} GstAmlResourceInfo;
74
75struct _GstAmlV4l2VideoDecLgeCtxt
76{
77 GstAmlResourceInfo res_info;
78 guint64 dec_size;
79 guint64 undec_size;
80 gchar *app_type;
81 gboolean clip_mode;
82};
83#endif
84
xuesong.jiangae1548e2022-05-06 16:38:46 +080085typedef struct
86{
87 gchar *device;
88 GstCaps *sink_caps;
89 GstCaps *src_caps;
90 const gchar *longname;
91 const gchar *description;
92} GstAmlV4l2VideoDecCData;
93
94enum
95{
96 PROP_0,
xuesong.jiang61ea8012022-05-12 15:38:17 +080097 V4L2_STD_OBJECT_PROPS,
98#if GST_IMPORT_LGE_PROP
99 LGE_RESOURCE_INFO,
100 LGE_DECODE_SIZE,
101 LGE_UNDECODE_SIZE,
102 LGE_APP_TYPE,
103 LGE_CLIP_MODE
104#endif
xuesong.jiangae1548e2022-05-06 16:38:46 +0800105};
106
107#define gst_aml_v4l2_video_dec_parent_class parent_class
108G_DEFINE_ABSTRACT_TYPE(GstAmlV4l2VideoDec, gst_aml_v4l2_video_dec,
109 GST_TYPE_VIDEO_DECODER);
110
111static GstFlowReturn gst_aml_v4l2_video_dec_finish(GstVideoDecoder *decoder);
xuesong.jiang61ea8012022-05-12 15:38:17 +0800112#if GST_IMPORT_LGE_PROP
113static void gst_aml_v4l2_video_dec_install_lge_properties_helper(GObjectClass *gobject_class);
114#endif
xuesong.jiangae1548e2022-05-06 16:38:46 +0800115
116static void
117gst_aml_v4l2_video_dec_set_property(GObject *object,
118 guint prop_id, const GValue *value, GParamSpec *pspec)
119{
120 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(object);
121
122 switch (prop_id)
123 {
124 case PROP_CAPTURE_IO_MODE:
125 if (!gst_aml_v4l2_object_set_property_helper(self->v4l2capture,
126 prop_id, value, pspec))
127 {
128 G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
129 }
130 break;
131 case PROP_DUMP_FRAME_LOCATION:
132 if (!gst_aml_v4l2_object_set_property_helper(self->v4l2capture,
133 prop_id, value, pspec))
134 {
135 G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
136 }
137 break;
xuesong.jiang61ea8012022-05-12 15:38:17 +0800138#if GST_IMPORT_LGE_PROP
139 case LGE_RESOURCE_INFO:
140 {
141 GST_DEBUG_OBJECT(self, "LGE up layer set res info");
142 GstStructure *r_info = g_value_get_object(value);
143 if (r_info)
144 {
145 if (gst_structure_has_field(r_info, "coretype"))
146 {
147 if (self->lge_ctxt->res_info.coretype)
148 g_free(self->lge_ctxt->res_info.coretype);
149 self->lge_ctxt->res_info.coretype = g_strdup(gst_structure_get_string(r_info, "coretype"));
150 }
151 if (gst_structure_has_field(r_info, "videoport"))
152 gst_structure_get_int(r_info, "videoport", &(self->lge_ctxt->res_info.videoport));
153 if (gst_structure_has_field(r_info, "audioport"))
154 gst_structure_get_int(r_info, "audioport", &(self->lge_ctxt->res_info.audioport));
155 if (gst_structure_has_field(r_info, "maxwidth"))
156 gst_structure_get_int(r_info, "maxwidth", &(self->lge_ctxt->res_info.maxwidth));
157 if (gst_structure_has_field(r_info, "maxheight"))
158 gst_structure_get_int(r_info, "maxheight", &(self->lge_ctxt->res_info.maxheight));
159 if (gst_structure_has_field(r_info, "mixerport"))
160 gst_structure_get_int(r_info, "mixerport", &(self->lge_ctxt->res_info.mixerport));
161 }
162 break;
163 }
164 case LGE_APP_TYPE:
165 {
166 GST_DEBUG_OBJECT(self, "LGE up layer set app type");
167 if (self->lge_ctxt->app_type)
168 g_free(self->lge_ctxt->app_type);
169 self->lge_ctxt->app_type = g_strdup(g_value_get_string(value));
170 break;
171 }
172 case LGE_CLIP_MODE:
173 {
174 GST_DEBUG_OBJECT(self, "LGE up layer set clip mode");
175 self->lge_ctxt->clip_mode = g_strdup(g_value_get_boolean(value));
176 break;
177 }
178#endif
xuesong.jiangae1548e2022-05-06 16:38:46 +0800179 /* By default, only set on output */
180 default:
181 if (!gst_aml_v4l2_object_set_property_helper(self->v4l2output,
182 prop_id, value, pspec))
183 {
184 G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
185 }
186 break;
187 }
188}
189
190static void
191gst_aml_v4l2_video_dec_get_property(GObject *object,
192 guint prop_id, GValue *value, GParamSpec *pspec)
193{
194 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(object);
195
196 switch (prop_id)
197 {
198 case PROP_CAPTURE_IO_MODE:
199 if (!gst_aml_v4l2_object_get_property_helper(self->v4l2capture,
200 prop_id, value, pspec))
201 {
202 G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
203 }
204 break;
205
xuesong.jiang61ea8012022-05-12 15:38:17 +0800206#if GST_IMPORT_LGE_PROP
207 case LGE_DECODE_SIZE:
208 {
209 GST_DEBUG_OBJECT(self, "LGE up layer get dec size");
210 self->lge_ctxt->dec_size = -1;
211 g_value_set_int(value, self->lge_ctxt->dec_size);
212 break;
213 }
214 case LGE_UNDECODE_SIZE:
215 {
216 GST_DEBUG_OBJECT(self, "LGE up layer get undec size");
217 self->lge_ctxt->undec_size = -1;
218 g_value_set_int(value, self->lge_ctxt->undec_size);
219 break;
220 }
221#endif
222
xuesong.jiangae1548e2022-05-06 16:38:46 +0800223 /* By default read from output */
224 default:
225 if (!gst_aml_v4l2_object_get_property_helper(self->v4l2output,
226 prop_id, value, pspec))
227 {
228 G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
229 }
230 break;
231 }
232}
233
234static gboolean
235gst_aml_v4l2_video_dec_open(GstVideoDecoder *decoder)
236{
237 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
238 GstCaps *codec_caps;
239
240 GST_DEBUG_OBJECT(self, "Opening");
241
242 if (!gst_aml_v4l2_object_open(self->v4l2output))
243 goto failure;
244
245 if (!gst_aml_v4l2_object_open_shared(self->v4l2capture, self->v4l2output))
246 goto failure;
247
248 codec_caps = gst_pad_get_pad_template_caps(decoder->sinkpad);
249 self->probed_sinkcaps = gst_aml_v4l2_object_probe_caps(self->v4l2output,
250 codec_caps);
251 gst_caps_unref(codec_caps);
252
253 if (gst_caps_is_empty(self->probed_sinkcaps))
254 goto no_encoded_format;
255
256 return TRUE;
257
258no_encoded_format:
259 GST_ELEMENT_ERROR(self, RESOURCE, SETTINGS,
260 (_("Decoder on device %s has no supported input format"),
261 self->v4l2output->videodev),
262 (NULL));
263 goto failure;
264
265failure:
266 if (GST_AML_V4L2_IS_OPEN(self->v4l2output))
267 gst_aml_v4l2_object_close(self->v4l2output);
268
269 if (GST_AML_V4L2_IS_OPEN(self->v4l2capture))
270 gst_aml_v4l2_object_close(self->v4l2capture);
271
272 gst_caps_replace(&self->probed_srccaps, NULL);
273 gst_caps_replace(&self->probed_sinkcaps, NULL);
274
275 return FALSE;
276}
277
278static gboolean
279gst_aml_v4l2_video_dec_close(GstVideoDecoder *decoder)
280{
281 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
282
283 GST_DEBUG_OBJECT(self, "Closing");
284
285 gst_aml_v4l2_object_close(self->v4l2output);
286 gst_aml_v4l2_object_close(self->v4l2capture);
287 gst_caps_replace(&self->probed_srccaps, NULL);
288 gst_caps_replace(&self->probed_sinkcaps, NULL);
289
290 return TRUE;
291}
292
293static gboolean
294gst_aml_v4l2_video_dec_start(GstVideoDecoder *decoder)
295{
296 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
297
298 GST_DEBUG_OBJECT(self, "Starting");
299
300 gst_aml_v4l2_object_unlock(self->v4l2output);
301 g_atomic_int_set(&self->active, TRUE);
302 self->output_flow = GST_FLOW_OK;
303
304 return TRUE;
305}
306
307static gboolean
308gst_aml_v4l2_video_dec_stop(GstVideoDecoder *decoder)
309{
310 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
311
312 GST_DEBUG_OBJECT(self, "Stopping");
313
314 gst_aml_v4l2_object_unlock(self->v4l2output);
315 gst_aml_v4l2_object_unlock(self->v4l2capture);
316
317 /* Wait for capture thread to stop */
318 gst_pad_stop_task(decoder->srcpad);
319
320 GST_VIDEO_DECODER_STREAM_LOCK(decoder);
321 self->output_flow = GST_FLOW_OK;
322 GST_VIDEO_DECODER_STREAM_UNLOCK(decoder);
323
324 /* Should have been flushed already */
325 g_assert(g_atomic_int_get(&self->active) == FALSE);
326
327 gst_aml_v4l2_object_stop(self->v4l2output);
328 gst_aml_v4l2_object_stop(self->v4l2capture);
329
330 if (self->input_state)
331 {
332 gst_video_codec_state_unref(self->input_state);
333 self->input_state = NULL;
334 }
335
336 GST_DEBUG_OBJECT(self, "Stopped");
337
338 return TRUE;
339}
340
341static gboolean
342gst_aml_v4l2_video_dec_set_format(GstVideoDecoder *decoder,
343 GstVideoCodecState *state)
344{
345 GstAmlV4l2Error error = GST_AML_V4L2_ERROR_INIT;
346 gboolean ret = TRUE;
347 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
348 GstCaps *caps;
349
350 GST_DEBUG_OBJECT(self, "Setting format: %" GST_PTR_FORMAT, state->caps);
351 GstCapsFeatures *const features = gst_caps_get_features(state->caps, 0);
352
353 if (gst_caps_features_contains(features, GST_CAPS_FEATURE_MEMORY_DMABUF))
354 self->v4l2output->req_mode = GST_V4L2_IO_DMABUF_IMPORT;
355
356 if (self->input_state)
357 {
358 if (gst_aml_v4l2_object_caps_equal(self->v4l2output, state->caps))
359 {
360 GST_DEBUG_OBJECT(self, "Compatible caps");
361 goto done;
362 }
363 gst_video_codec_state_unref(self->input_state);
364 self->input_state = NULL;
365
366 gst_aml_v4l2_video_dec_finish(decoder);
367 gst_aml_v4l2_object_stop(self->v4l2output);
368
369 /* The renegotiation flow don't blend with the base class flow. To properly
370 * stop the capture pool, if the buffers can't be orphaned, we need to
371 * reclaim our buffers, which will happend through the allocation query.
372 * The allocation query is triggered by gst_video_decoder_negotiate() which
373 * requires the output caps to be set, but we can't know this information
374 * as we rely on the decoder, which requires the capture queue to be
375 * stopped.
376 *
377 * To workaround this issue, we simply run an allocation query with the
378 * old negotiated caps in order to drain/reclaim our buffers. That breaks
379 * the complexity and should not have much impact in performance since the
380 * following allocation query will happen on a drained pipeline and won't
381 * block. */
382 if (self->v4l2capture->pool &&
383 !gst_aml_v4l2_buffer_pool_orphan(&self->v4l2capture->pool))
384 {
385 GstCaps *caps = gst_pad_get_current_caps(decoder->srcpad);
386 if (caps)
387 {
388 GstQuery *query = gst_query_new_allocation(caps, FALSE);
389 gst_pad_peer_query(decoder->srcpad, query);
390 gst_query_unref(query);
391 gst_caps_unref(caps);
392 }
393 }
394
395 gst_aml_v4l2_object_stop(self->v4l2capture);
396 self->output_flow = GST_FLOW_OK;
397 }
398
399 if ((ret = gst_aml_v4l2_set_drm_mode(self->v4l2output)) == FALSE)
400 {
401 GST_ERROR_OBJECT(self, "config output drm mode error");
402 goto done;
403 }
404
405 ret = gst_aml_v4l2_object_set_format(self->v4l2output, state->caps, &error);
406
407 gst_caps_replace(&self->probed_srccaps, NULL);
408 self->probed_srccaps = gst_aml_v4l2_object_probe_caps(self->v4l2capture,
409 gst_aml_v4l2_object_get_raw_caps());
410
411 if (gst_caps_is_empty(self->probed_srccaps))
412 goto no_raw_format;
413
414 caps = gst_caps_copy(self->probed_srccaps);
415 gst_caps_set_features_simple(caps, gst_caps_features_from_string(GST_CAPS_FEATURE_MEMORY_DMABUF));
416 gst_caps_append(self->probed_srccaps, caps);
417 if (ret)
418 self->input_state = gst_video_codec_state_ref(state);
419 else
420 gst_aml_v4l2_error(self, &error);
421
422done:
423 return ret;
424
425no_raw_format:
426 GST_ELEMENT_ERROR(self, RESOURCE, SETTINGS,
427 (_("Decoder on device %s has no supported output format"),
428 self->v4l2output->videodev),
429 (NULL));
430 return GST_FLOW_ERROR;
431}
432
433static gboolean
434gst_aml_v4l2_video_dec_flush(GstVideoDecoder *decoder)
435{
436 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
437
438 GST_DEBUG_OBJECT(self, "Flushed");
439
440 /* Ensure the processing thread has stopped for the reverse playback
441 * discount case */
442 if (gst_pad_get_task_state(decoder->srcpad) == GST_TASK_STARTED)
443 {
444 GST_VIDEO_DECODER_STREAM_UNLOCK(decoder);
445
446 gst_aml_v4l2_object_unlock(self->v4l2output);
447 gst_aml_v4l2_object_unlock(self->v4l2capture);
448 gst_pad_stop_task(decoder->srcpad);
449 GST_VIDEO_DECODER_STREAM_LOCK(decoder);
450 }
451
452 self->output_flow = GST_FLOW_OK;
453
454 gst_aml_v4l2_object_unlock_stop(self->v4l2output);
455 gst_aml_v4l2_object_unlock_stop(self->v4l2capture);
456
457 if (self->v4l2output->pool)
458 gst_aml_v4l2_buffer_pool_flush(self->v4l2output->pool);
459
460 /* gst_aml_v4l2_buffer_pool_flush() calls streamon the capture pool and must be
461 * called after gst_aml_v4l2_object_unlock_stop() stopped flushing the buffer
462 * pool. */
463 if (self->v4l2capture->pool)
464 gst_aml_v4l2_buffer_pool_flush(self->v4l2capture->pool);
465
466 return TRUE;
467}
468
469static gboolean
470gst_aml_v4l2_video_dec_negotiate(GstVideoDecoder *decoder)
471{
472 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
473
xuesong.jiang681d3602022-06-24 21:23:35 +0800474 if (TRUE == self->v4l2output->is_svp)
475 {
476 GstStructure *s;
477 GstEvent *event;
478
479 s = gst_structure_new_empty ("IS_SVP");
480 event = gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM_STICKY, s);
481 GST_DEBUG_OBJECT(self, "before Send SVP Event :%p", event);
482 gst_pad_push_event (decoder->srcpad, event);
483 GST_DEBUG_OBJECT(self, "after Send SVP Event :%p", event);
484 }
485
xuesong.jiangae1548e2022-05-06 16:38:46 +0800486 /* We don't allow renegotiation without carefull disabling the pool */
487 if (self->v4l2capture->pool &&
488 gst_buffer_pool_is_active(GST_BUFFER_POOL(self->v4l2capture->pool)))
489 return TRUE;
490
491 return GST_VIDEO_DECODER_CLASS(parent_class)->negotiate(decoder);
492}
493
494static gboolean
495gst_aml_v4l2_decoder_cmd(GstAmlV4l2Object *v4l2object, guint cmd, guint flags)
496{
497 struct v4l2_decoder_cmd dcmd = {
498 0,
499 };
500
501 GST_DEBUG_OBJECT(v4l2object->element,
502 "sending v4l2 decoder command %u with flags %u", cmd, flags);
503
504 if (!GST_AML_V4L2_IS_OPEN(v4l2object))
505 return FALSE;
506
507 dcmd.cmd = cmd;
508 dcmd.flags = flags;
509 if (v4l2object->ioctl(v4l2object->video_fd, VIDIOC_DECODER_CMD, &dcmd) < 0)
510 goto dcmd_failed;
511
512 return TRUE;
513
514dcmd_failed:
515 if (errno == ENOTTY)
516 {
517 GST_INFO_OBJECT(v4l2object->element,
518 "Failed to send decoder command %u with flags %u for '%s'. (%s)",
519 cmd, flags, v4l2object->videodev, g_strerror(errno));
520 }
521 else
522 {
523 GST_ERROR_OBJECT(v4l2object->element,
524 "Failed to send decoder command %u with flags %u for '%s'. (%s)",
525 cmd, flags, v4l2object->videodev, g_strerror(errno));
526 }
527 return FALSE;
528}
529
530static GstFlowReturn
531gst_aml_v4l2_video_dec_finish(GstVideoDecoder *decoder)
532{
533 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
534 GstFlowReturn ret = GST_FLOW_OK;
535 GstBuffer *buffer;
536
537 if (gst_pad_get_task_state(decoder->srcpad) != GST_TASK_STARTED)
538 goto done;
539
540 GST_DEBUG_OBJECT(self, "Finishing decoding");
541
542 GST_VIDEO_DECODER_STREAM_UNLOCK(decoder);
543
544 if (gst_aml_v4l2_decoder_cmd(self->v4l2output, V4L2_DEC_CMD_STOP, 0))
545 {
546 GstTask *task = decoder->srcpad->task;
547
548 /* If the decoder stop command succeeded, just wait until processing is
549 * finished */
550 GST_DEBUG_OBJECT(self, "Waiting for decoder stop");
551 GST_OBJECT_LOCK(task);
552 while (GST_TASK_STATE(task) == GST_TASK_STARTED)
553 GST_TASK_WAIT(task);
554 GST_OBJECT_UNLOCK(task);
555 ret = GST_FLOW_FLUSHING;
556 }
557 else
558 {
559 /* otherwise keep queuing empty buffers until the processing thread has
560 * stopped, _pool_process() will return FLUSHING when that happened */
561 while (ret == GST_FLOW_OK)
562 {
563 buffer = gst_buffer_new();
564 ret =
565 gst_aml_v4l2_buffer_pool_process(GST_AML_V4L2_BUFFER_POOL(self->v4l2output->pool), &buffer);
566 gst_buffer_unref(buffer);
567 }
568 }
569
570 /* and ensure the processing thread has stopped in case another error
571 * occured. */
572 gst_aml_v4l2_object_unlock(self->v4l2capture);
573 gst_pad_stop_task(decoder->srcpad);
574 GST_VIDEO_DECODER_STREAM_LOCK(decoder);
575
576 if (ret == GST_FLOW_FLUSHING)
577 ret = self->output_flow;
578
579 GST_DEBUG_OBJECT(decoder, "Done draining buffers");
580
581 /* TODO Shall we cleanup any reffed frame to workaround broken decoders ? */
582
583done:
584 return ret;
585}
586
587static GstFlowReturn
588gst_aml_v4l2_video_dec_drain(GstVideoDecoder *decoder)
589{
590 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
591
592 GST_DEBUG_OBJECT(self, "Draining...");
593 gst_aml_v4l2_video_dec_finish(decoder);
594 gst_aml_v4l2_video_dec_flush(decoder);
595
596 return GST_FLOW_OK;
597}
598
599static GstVideoCodecFrame *
600gst_aml_v4l2_video_dec_get_oldest_frame(GstVideoDecoder *decoder)
601{
602 GstVideoCodecFrame *frame = NULL;
603 GList *frames, *l;
604 gint count = 0;
605
606 frames = gst_video_decoder_get_frames(decoder);
607
608 for (l = frames; l != NULL; l = l->next)
609 {
610 GstVideoCodecFrame *f = l->data;
611
612 if (!frame || (GST_CLOCK_TIME_IS_VALID(frame->pts) && GST_CLOCK_TIME_IS_VALID(f->pts) && (frame->pts > f->pts)))
613 frame = f;
614
615 count++;
616 }
617
618 if (frame)
619 {
620 GST_LOG_OBJECT(decoder,
621 "Oldest frame is %d %" GST_TIME_FORMAT " and %d frames left",
622 frame->system_frame_number, GST_TIME_ARGS(frame->pts), count - 1);
623 gst_video_codec_frame_ref(frame);
624 }
625
626 g_list_free_full(frames, (GDestroyNotify)gst_video_codec_frame_unref);
627
628 return frame;
629}
630
fei.dengbee20862022-06-14 14:59:48 +0800631static GstVideoCodecFrame *
632gst_aml_v4l2_video_dec_get_right_frame(GstVideoDecoder *decoder, GstClockTime pts)
633{
634 GstVideoCodecFrame *frame = NULL;
635 GList *frames, *l;
636 gint count = 0;
637
638 frames = gst_video_decoder_get_frames(decoder);
639
640 for (l = frames; l != NULL; l = l->next)
641 {
642 GstVideoCodecFrame *f = l->data;
643
hanghang.luo3128f102022-08-18 10:36:19 +0800644 if (GST_CLOCK_TIME_IS_VALID(pts) && (ABSDIFF(f->pts,pts)) < 10) {
fei.dengbee20862022-06-14 14:59:48 +0800645 frame = f;
646 break;
647 } else {
648 if (!frame || (GST_CLOCK_TIME_IS_VALID(frame->pts) && GST_CLOCK_TIME_IS_VALID(f->pts) && (frame->pts > f->pts)))
649 frame = f;
650 }
651
652 count++;
653 }
654
655 if (frame)
656 {
657 GST_LOG_OBJECT(decoder,
658 "frame is %d %" GST_TIME_FORMAT " and %d frames left",
659 frame->system_frame_number, GST_TIME_ARGS(frame->pts), count - 1);
660 gst_video_codec_frame_ref(frame);
661 }
662
663 g_list_free_full(frames, (GDestroyNotify)gst_video_codec_frame_unref);
664
665 return frame;
666}
667
xuesong.jiangae1548e2022-05-06 16:38:46 +0800668static gboolean
669gst_aml_v4l2_video_remove_padding(GstCapsFeatures *features,
670 GstStructure *structure, gpointer user_data)
671{
672 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(user_data);
673 GstVideoAlignment *align = &self->v4l2capture->align;
674 GstVideoInfo *info = &self->v4l2capture->info;
675 int width, height;
676
677 if (!gst_structure_get_int(structure, "width", &width))
678 return TRUE;
679
680 if (!gst_structure_get_int(structure, "height", &height))
681 return TRUE;
682
683 if (align->padding_left != 0 || align->padding_top != 0 ||
684 height != info->height + align->padding_bottom)
685 return TRUE;
686
687 if (height == info->height + align->padding_bottom)
688 {
689 /* Some drivers may round up width to the padded with */
690 if (width == info->width + align->padding_right)
691 gst_structure_set(structure,
692 "width", G_TYPE_INT, width - align->padding_right,
693 "height", G_TYPE_INT, height - align->padding_bottom, NULL);
694 /* Some drivers may keep visible width and only round up bytesperline */
695 else if (width == info->width)
696 gst_structure_set(structure,
697 "height", G_TYPE_INT, height - align->padding_bottom, NULL);
698 }
699
700 return TRUE;
701}
702
703static void
sheng.liubcf036c2022-06-21 15:55:42 +0800704gst_v4l2_drop_event (GstAmlV4l2Object * v4l2object)
sheng.liub56bbc52022-06-21 11:02:33 +0800705{
706 struct v4l2_event evt;
707 gint ret;
708
709 memset (&evt, 0x00, sizeof (struct v4l2_event));
710 ret = v4l2object->ioctl (v4l2object->video_fd, VIDIOC_DQEVENT, &evt);
711 if (ret < 0)
712 {
713 GST_DEBUG_OBJECT (v4l2object, "dqevent failed");
714 return;
715 }
716
717 switch (evt.type)
718 {
719 case V4L2_EVENT_SOURCE_CHANGE:
720 GST_DEBUG_OBJECT (v4l2object, "Drop GST_V4L2_FLOW_SOURCE_CHANGE");
721 break;
722 case V4L2_EVENT_EOS:
723 GST_DEBUG_OBJECT (v4l2object, "Drop GST_V4L2_FLOW_LAST_BUFFER");
724 break;
725 default:
726 break;
727 }
728
729 return;
730}
731
732static void
xuesong.jiangae1548e2022-05-06 16:38:46 +0800733gst_aml_v4l2_video_dec_loop(GstVideoDecoder *decoder)
734{
735 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
736 GstAmlV4l2BufferPool *v4l2_pool;
737 GstAmlV4l2Error error = GST_AML_V4L2_ERROR_INIT;
738 GstBufferPool *pool;
739 GstVideoCodecFrame *frame;
740 GstBuffer *buffer = NULL;
741 GstFlowReturn ret;
742
743 if (G_UNLIKELY(!GST_AML_V4L2_IS_ACTIVE(self->v4l2capture)))
744 {
745 GstVideoInfo info;
746 GstVideoCodecState *output_state;
747 GstCaps *acquired_caps, *available_caps, *caps, *filter;
748 GstStructure *st;
749
750 GST_DEBUG_OBJECT(self, "waitting source change event");
751 /* Wait until received SOURCE_CHANGE event to get right video format */
752 while (self->v4l2capture->can_wait_event && self->v4l2capture->need_wait_event)
753 {
754 ret = gst_aml_v4l2_object_dqevent(self->v4l2capture);
755 if (ret == GST_AML_V4L2_FLOW_SOURCE_CHANGE)
756 {
757 GST_DEBUG_OBJECT(self, "Received source change event");
758 break;
759 }
760 else if (ret == GST_AML_V4L2_FLOW_LAST_BUFFER)
761 {
762 GST_DEBUG_OBJECT(self, "Received eos event");
763 goto beach;
764 }
765 else if (ret != GST_FLOW_OK)
766 {
767 GST_ERROR_OBJECT(self, "dqevent error");
768 goto beach;
769 }
770 }
771 self->v4l2capture->need_wait_event = FALSE;
772
sheng.liu0c77f6c2022-06-17 21:33:20 +0800773 if (TRUE == self->v4l2output->is_svp)
774 {
775 GstPad *peer;
776 GstStructure *s;
777 GstEvent *event;
778
779 peer = gst_pad_get_peer (decoder->srcpad);
780 if (peer)
781 {
782 s = gst_structure_new_empty ("IS_SVP");
783 event = gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM, s);
784 gst_pad_send_event (peer, event);
785 GST_DEBUG_OBJECT(self, "Send SVP Event");
786 gst_object_unref (peer);
787 }
788 }
789
sheng.liub56bbc52022-06-21 11:02:33 +0800790 if (self->v4l2capture->need_drop_event)
791 {
792 // drop V4L2_EVENT_SOURCE_CHANGE
793 gst_v4l2_drop_event(self->v4l2capture);
794 self->v4l2capture->need_drop_event = FALSE;
795 }
796
xuesong.jiangae1548e2022-05-06 16:38:46 +0800797 if (!gst_aml_v4l2_object_acquire_format(self->v4l2capture, &info))
798 goto not_negotiated;
799
800 /* Create caps from the acquired format, remove the format field */
801 acquired_caps = gst_video_info_to_caps(&info);
802 GST_DEBUG_OBJECT(self, "Acquired caps: %" GST_PTR_FORMAT, acquired_caps);
803 st = gst_caps_get_structure(acquired_caps, 0);
804 gst_structure_remove_fields(st, "format", "colorimetry", "chroma-site",
805 NULL);
806
807 /* Probe currently available pixel formats */
808 available_caps = gst_caps_copy(self->probed_srccaps);
809 GST_DEBUG_OBJECT(self, "Available caps: %" GST_PTR_FORMAT, available_caps);
810
811 /* Replace coded size with visible size, we want to negotiate visible size
812 * with downstream, not coded size. */
813 gst_caps_map_in_place(available_caps, gst_aml_v4l2_video_remove_padding, self);
814
815 filter = gst_caps_intersect_full(available_caps, acquired_caps,
816 GST_CAPS_INTERSECT_FIRST);
817 caps = gst_caps_copy(filter);
818 gst_caps_set_features_simple(caps, gst_caps_features_from_string(GST_CAPS_FEATURE_MEMORY_DMABUF));
819 gst_caps_append(filter, caps);
820
821 GST_DEBUG_OBJECT(self, "Filtered caps: %" GST_PTR_FORMAT, filter);
822 gst_caps_unref(acquired_caps);
823 gst_caps_unref(available_caps);
824 caps = gst_pad_peer_query_caps(decoder->srcpad, filter);
825 gst_caps_unref(filter);
826
827 GST_DEBUG_OBJECT(self, "Possible decoded caps: %" GST_PTR_FORMAT, caps);
828 if (gst_caps_is_empty(caps))
829 {
830 gst_caps_unref(caps);
831 goto not_negotiated;
832 }
833
834 /* Fixate pixel format */
835 caps = gst_caps_fixate(caps);
836
837 GST_DEBUG_OBJECT(self, "Chosen decoded caps: %" GST_PTR_FORMAT, caps);
838
839 /* Try to set negotiated format, on success replace acquired format */
840 if (gst_aml_v4l2_object_set_format(self->v4l2capture, caps, &error))
841 gst_video_info_from_caps(&info, caps);
842 else
843 gst_aml_v4l2_clear_error(&error);
844 gst_caps_unref(caps);
845
846 output_state = gst_video_decoder_set_output_state(decoder,
847 info.finfo->format, info.width, info.height, self->input_state);
848
849 /* Copy the rest of the information, there might be more in the future */
850 output_state->info.interlace_mode = info.interlace_mode;
851 gst_video_codec_state_unref(output_state);
852
853 if (!gst_video_decoder_negotiate(decoder))
854 {
855 if (GST_PAD_IS_FLUSHING(decoder->srcpad))
856 goto flushing;
857 else
858 goto not_negotiated;
859 }
860
861 /* Ensure our internal pool is activated */
862 if (!gst_buffer_pool_set_active(GST_BUFFER_POOL(self->v4l2capture->pool),
863 TRUE))
864 goto activate_failed;
865 }
866
867 GST_LOG_OBJECT(decoder, "Allocate output buffer");
868
869 v4l2_pool = GST_AML_V4L2_BUFFER_POOL(self->v4l2capture->pool);
870
871 self->output_flow = GST_FLOW_OK;
872 do
873 {
874 /* We cannot use the base class allotate helper since it taking the internal
875 * stream lock. we know that the acquire may need to poll until more frames
876 * comes in and holding this lock would prevent that.
877 */
878 pool = gst_video_decoder_get_buffer_pool(decoder);
879
880 /* Pool may be NULL if we started going to READY state */
881 if (pool == NULL)
882 {
fei.dengbee20862022-06-14 14:59:48 +0800883 GST_WARNING_OBJECT(decoder, "gst_video_decoder_get_buffer_pool goto beach");
xuesong.jiangae1548e2022-05-06 16:38:46 +0800884 ret = GST_FLOW_FLUSHING;
885 goto beach;
886 }
887
888 ret = gst_buffer_pool_acquire_buffer(pool, &buffer, NULL);
fei.dengccc89632022-07-15 19:10:17 +0800889 //calculate a new pts for interlace stream
890 if (ret == GST_FLOW_OK &&
891 self->v4l2capture->info.interlace_mode == GST_VIDEO_INTERLACE_MODE_INTERLEAVED)
892 {
893 //if buffer pts is valid, reduce 1/2 duration
894 if (GST_BUFFER_DURATION_IS_VALID(buffer))
895 {
896 GST_BUFFER_DURATION(buffer) = GST_BUFFER_DURATION(buffer)/2;
897 }
898 GST_BUFFER_FLAG_UNSET(buffer, GST_VIDEO_BUFFER_FLAG_INTERLACED);
899 //reset pts
fei.denga6ae3282022-07-15 19:50:30 +0800900 if (GST_BUFFER_TIMESTAMP (buffer) == 0LL || self->last_out_pts == GST_BUFFER_TIMESTAMP (buffer))
fei.dengccc89632022-07-15 19:10:17 +0800901 {
902 double rate = ((double)self->input_state->info.fps_n/(double)self->input_state->info.fps_d)*2;
903 GST_BUFFER_TIMESTAMP(buffer) = self->last_out_pts + 1000000000LL/rate;
904 }
905 }
906
xuesong.jiangae1548e2022-05-06 16:38:46 +0800907 g_object_unref(pool);
908
sheng.liu4e01a472022-06-21 15:38:25 +0800909 if (ret == GST_AML_V4L2_FLOW_LAST_BUFFER) {
sheng.liubcf036c2022-06-21 15:55:42 +0800910 GST_LOG_OBJECT(decoder, "Get GST_AML_V4L2_FLOW_LAST_BUFFER");
sheng.liub56bbc52022-06-21 11:02:33 +0800911 self->v4l2capture->need_drop_event = TRUE;
912 goto beach;
913 }
914
sheng.liu8d18ed22022-05-26 17:28:15 +0800915 if (ret == GST_AML_V4L2_FLOW_SOURCE_CHANGE)
916 {
917 GST_LOG_OBJECT(decoder, "Get GST_AML_V4L2_FLOW_SOURCE_CHANGE");
918 gst_aml_v4l2_object_stop(self->v4l2capture);
919 return;
920 }
921
fei.dengbee20862022-06-14 14:59:48 +0800922 if (ret != GST_FLOW_OK) {
923 GST_WARNING_OBJECT(decoder, "gst_buffer_pool_acquire_buffer goto beach ret:%d",ret);
xuesong.jiangae1548e2022-05-06 16:38:46 +0800924 goto beach;
fei.dengbee20862022-06-14 14:59:48 +0800925 }
xuesong.jiangae1548e2022-05-06 16:38:46 +0800926
927 GST_LOG_OBJECT(decoder, "Process output buffer");
928 ret = gst_aml_v4l2_buffer_pool_process(v4l2_pool, &buffer);
929 if (ret == GST_AML_V4L2_FLOW_SOURCE_CHANGE)
930 {
931 gst_aml_v4l2_object_stop(self->v4l2capture);
932 return;
933 }
934
935 } while (ret == GST_AML_V4L2_FLOW_CORRUPTED_BUFFER);
936
937 if (ret != GST_FLOW_OK)
938 goto beach;
939
fei.dengbee20862022-06-14 14:59:48 +0800940 frame = gst_aml_v4l2_video_dec_get_right_frame(decoder, GST_BUFFER_TIMESTAMP (buffer));
xuesong.jiangae1548e2022-05-06 16:38:46 +0800941 if (frame)
942 {
zengliang.li32cb11e2022-11-24 12:10:26 +0800943 if (!GST_CLOCK_TIME_IS_VALID(frame->pts))
944 {
945 double rate = ((double)self->input_state->info.fps_n/(double)self->input_state->info.fps_d);
946 GST_BUFFER_TIMESTAMP(buffer) = self->last_out_pts + 1000000000LL/rate;
947 }
fei.dengccc89632022-07-15 19:10:17 +0800948 self->last_out_pts = GST_BUFFER_TIMESTAMP(buffer);
xuesong.jiangae1548e2022-05-06 16:38:46 +0800949 frame->output_buffer = buffer;
fei.dengccc89632022-07-15 19:10:17 +0800950 frame->pts = GST_BUFFER_TIMESTAMP(buffer);
951 frame->duration = GST_BUFFER_DURATION(buffer);
xuesong.jiangae1548e2022-05-06 16:38:46 +0800952 buffer = NULL;
953 ret = gst_video_decoder_finish_frame(decoder, frame);
954
955 if (ret != GST_FLOW_OK)
956 goto beach;
957 }
958 else
959 {
960 GST_WARNING_OBJECT(decoder, "Decoder is producing too many buffers");
961 gst_buffer_unref(buffer);
962 }
963
964 return;
965 /* ERRORS */
966not_negotiated:
967{
968 GST_ERROR_OBJECT(self, "not negotiated");
969 ret = GST_FLOW_NOT_NEGOTIATED;
970 goto beach;
971}
972activate_failed:
973{
974 GST_ERROR_OBJECT(self, "Buffer pool activation failed");
975 GST_ELEMENT_ERROR(self, RESOURCE, SETTINGS,
976 (_("Failed to allocate required memory.")),
977 ("Buffer pool activation failed"));
978 ret = GST_FLOW_ERROR;
979 goto beach;
980}
981flushing:
982{
983 ret = GST_FLOW_FLUSHING;
984 goto beach;
985}
986beach:
987 GST_DEBUG_OBJECT(decoder, "Leaving output thread: %s",
988 gst_flow_get_name(ret));
989
990 gst_buffer_replace(&buffer, NULL);
991 self->output_flow = ret;
992 gst_aml_v4l2_object_unlock(self->v4l2output);
993 gst_pad_pause_task(decoder->srcpad);
994}
995
996static GstFlowReturn
997gst_aml_v4l2_video_dec_handle_frame(GstVideoDecoder *decoder,
998 GstVideoCodecFrame *frame)
999{
1000 GstAmlV4l2Error error = GST_AML_V4L2_ERROR_INIT;
1001 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
1002 GstBufferPool *pool = GST_BUFFER_POOL(self->v4l2output->pool);
1003 GstFlowReturn ret = GST_FLOW_OK;
1004 gboolean processed = FALSE;
1005 GstBuffer *tmp;
1006 GstTaskState task_state;
1007 GstCaps *caps;
1008
1009 GST_DEBUG_OBJECT(self, "Handling frame %d", frame->system_frame_number);
1010
1011 if (G_UNLIKELY(!g_atomic_int_get(&self->active)))
1012 goto flushing;
1013
1014 if (G_UNLIKELY(!GST_AML_V4L2_IS_ACTIVE(self->v4l2output)))
1015 {
1016 if (!self->input_state)
1017 goto not_negotiated;
1018 if (!gst_aml_v4l2_object_set_format(self->v4l2output, self->input_state->caps,
1019 &error))
1020 goto not_negotiated;
1021 }
1022
1023 if (G_UNLIKELY(!GST_AML_V4L2_IS_ACTIVE(self->v4l2capture)))
1024 {
1025 GstBuffer *codec_data;
sheng.liu0c77f6c2022-06-17 21:33:20 +08001026 GstCapsFeatures *features = NULL;
1027
1028 features = gst_caps_get_features(self->input_state->caps, 0);
1029 if (features && gst_caps_features_contains(features, GST_CAPS_FEATURE_MEMORY_DMABUF))
1030 {
1031 GST_DEBUG_OBJECT(self, "Is SVP");
1032 self->v4l2output->is_svp = TRUE;
1033 }
xuesong.jiangae1548e2022-05-06 16:38:46 +08001034
1035 GST_DEBUG_OBJECT(self, "Sending header");
1036
1037 codec_data = self->input_state->codec_data;
1038
1039 /* We are running in byte-stream mode, so we don't know the headers, but
1040 * we need to send something, otherwise the decoder will refuse to
1041 * intialize.
1042 */
1043 if (codec_data)
1044 {
1045 gst_buffer_ref(codec_data);
1046 }
1047 else
1048 {
1049 codec_data = gst_buffer_ref(frame->input_buffer);
1050 processed = TRUE;
1051 }
1052
1053 /* Ensure input internal pool is active */
1054 if (!gst_buffer_pool_is_active(pool))
1055 {
1056 GstStructure *config = gst_buffer_pool_get_config(pool);
1057 guint min = MAX(self->v4l2output->min_buffers, GST_AML_V4L2_MIN_BUFFERS);
1058 guint max = VIDEO_MAX_FRAME;
1059
1060 // gst_buffer_pool_config_set_params (config, self->input_state->caps,
1061 // self->v4l2output->info.size, min, max);
1062 gst_buffer_pool_config_set_params(config, self->input_state->caps, self->v4l2output->info.size, self->v4l2output->min_buffers, self->v4l2output->min_buffers);
1063
1064 /* There is no reason to refuse this config */
1065 if (!gst_buffer_pool_set_config(pool, config))
1066 goto activate_failed;
1067
1068 if (!gst_buffer_pool_set_active(pool, TRUE))
1069 goto activate_failed;
1070 }
1071
1072 GST_VIDEO_DECODER_STREAM_UNLOCK(decoder);
1073 ret =
1074 gst_aml_v4l2_buffer_pool_process(GST_AML_V4L2_BUFFER_POOL(self->v4l2output->pool), &codec_data);
1075 GST_VIDEO_DECODER_STREAM_LOCK(decoder);
1076
1077 gst_buffer_unref(codec_data);
1078
1079 /* For decoders G_FMT returns coded size, G_SELECTION returns visible size
1080 * in the compose rectangle. gst_aml_v4l2_object_acquire_format() checks both
1081 * and returns the visible size as with/height and the coded size as
1082 * padding. */
1083 }
1084
1085 task_state = gst_pad_get_task_state(GST_VIDEO_DECODER_SRC_PAD(self));
1086 if (task_state == GST_TASK_STOPPED || task_state == GST_TASK_PAUSED)
1087 {
1088 /* It's possible that the processing thread stopped due to an error */
1089 if (self->output_flow != GST_FLOW_OK &&
1090 self->output_flow != GST_FLOW_FLUSHING)
1091 {
1092 GST_DEBUG_OBJECT(self, "Processing loop stopped with error, leaving");
1093 ret = self->output_flow;
1094 goto drop;
1095 }
1096
1097 GST_DEBUG_OBJECT(self, "Starting decoding thread");
1098
1099 /* Start the processing task, when it quits, the task will disable input
1100 * processing to unlock input if draining, or prevent potential block */
1101 self->output_flow = GST_FLOW_FLUSHING;
1102 if (!gst_pad_start_task(decoder->srcpad,
1103 (GstTaskFunction)gst_aml_v4l2_video_dec_loop, self, NULL))
1104 goto start_task_failed;
1105 }
1106
1107 if (!processed)
1108 {
1109 GST_VIDEO_DECODER_STREAM_UNLOCK(decoder);
1110 ret =
1111 gst_aml_v4l2_buffer_pool_process(GST_AML_V4L2_BUFFER_POOL(self->v4l2output->pool), &frame->input_buffer);
1112 GST_VIDEO_DECODER_STREAM_LOCK(decoder);
1113
1114 if (ret == GST_FLOW_FLUSHING)
1115 {
1116 if (gst_pad_get_task_state(GST_VIDEO_DECODER_SRC_PAD(self)) !=
1117 GST_TASK_STARTED)
1118 ret = self->output_flow;
1119 goto drop;
1120 }
1121 else if (ret != GST_FLOW_OK)
1122 {
1123 goto process_failed;
1124 }
1125 }
1126
1127 /* No need to keep input arround */
1128 tmp = frame->input_buffer;
1129 frame->input_buffer = gst_buffer_new();
1130 gst_buffer_copy_into(frame->input_buffer, tmp,
1131 GST_BUFFER_COPY_FLAGS | GST_BUFFER_COPY_TIMESTAMPS |
1132 GST_BUFFER_COPY_META,
1133 0, 0);
1134 gst_buffer_unref(tmp);
1135
1136 gst_video_codec_frame_unref(frame);
1137 return ret;
1138
1139 /* ERRORS */
1140not_negotiated:
1141{
1142 GST_ERROR_OBJECT(self, "not negotiated");
1143 ret = GST_FLOW_NOT_NEGOTIATED;
1144 gst_aml_v4l2_error(self, &error);
1145 goto drop;
1146}
1147activate_failed:
1148{
1149 GST_ELEMENT_ERROR(self, RESOURCE, SETTINGS,
1150 (_("Failed to allocate required memory.")),
1151 ("Buffer pool activation failed"));
1152 ret = GST_FLOW_ERROR;
1153 goto drop;
1154}
1155flushing:
1156{
1157 ret = GST_FLOW_FLUSHING;
1158 goto drop;
1159}
1160
1161start_task_failed:
1162{
1163 GST_ELEMENT_ERROR(self, RESOURCE, FAILED,
1164 (_("Failed to start decoding thread.")), (NULL));
1165 ret = GST_FLOW_ERROR;
1166 goto drop;
1167}
1168process_failed:
1169{
1170 GST_ELEMENT_ERROR(self, RESOURCE, FAILED,
1171 (_("Failed to process frame.")),
1172 ("Maybe be due to not enough memory or failing driver"));
1173 ret = GST_FLOW_ERROR;
1174 goto drop;
1175}
1176drop:
1177{
1178 gst_video_decoder_drop_frame(decoder, frame);
1179 return ret;
1180}
1181}
1182
1183static gboolean
1184gst_aml_v4l2_video_dec_decide_allocation(GstVideoDecoder *decoder,
1185 GstQuery *query)
1186{
1187 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
1188 GstClockTime latency;
1189 gboolean ret = FALSE;
1190
1191 if (gst_aml_v4l2_object_decide_allocation(self->v4l2capture, query))
1192 ret = GST_VIDEO_DECODER_CLASS(parent_class)->decide_allocation(decoder, query);
1193
1194 if (GST_CLOCK_TIME_IS_VALID(self->v4l2capture->duration))
1195 {
1196 latency = self->v4l2capture->min_buffers * self->v4l2capture->duration;
1197 GST_DEBUG_OBJECT(self, "Setting latency: %" GST_TIME_FORMAT " (%" G_GUINT32_FORMAT " * %" G_GUINT64_FORMAT, GST_TIME_ARGS(latency),
1198 self->v4l2capture->min_buffers, self->v4l2capture->duration);
1199 gst_video_decoder_set_latency(decoder, latency, latency);
1200 }
1201 else
1202 {
1203 GST_WARNING_OBJECT(self, "Duration invalid, not setting latency");
1204 }
1205
1206 return ret;
1207}
1208
1209static gboolean
1210gst_aml_v4l2_video_dec_src_query(GstVideoDecoder *decoder, GstQuery *query)
1211{
1212 gboolean ret = TRUE;
1213 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
1214
1215 switch (GST_QUERY_TYPE(query))
1216 {
1217 case GST_QUERY_CAPS:
1218 {
1219 GstCaps *filter, *result = NULL;
1220 GstPad *pad = GST_VIDEO_DECODER_SRC_PAD(decoder);
1221
1222 gst_query_parse_caps(query, &filter);
1223
1224 if (self->probed_srccaps)
1225 result = gst_caps_ref(self->probed_srccaps);
1226 else
1227 result = gst_pad_get_pad_template_caps(pad);
1228
1229 if (filter)
1230 {
1231 GstCaps *tmp = result;
1232 result =
1233 gst_caps_intersect_full(filter, tmp, GST_CAPS_INTERSECT_FIRST);
1234 gst_caps_unref(tmp);
1235 }
1236
1237 GST_DEBUG_OBJECT(self, "Returning src caps %" GST_PTR_FORMAT, result);
1238
1239 gst_query_set_caps_result(query, result);
1240 gst_caps_unref(result);
1241 break;
1242 }
1243
1244 default:
1245 ret = GST_VIDEO_DECODER_CLASS(parent_class)->src_query(decoder, query);
1246 break;
1247 }
1248
1249 return ret;
1250}
1251
1252static GstCaps *
1253gst_aml_v4l2_video_dec_sink_getcaps(GstVideoDecoder *decoder, GstCaps *filter)
1254{
1255 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
1256 GstCaps *result;
1257
1258 result = gst_video_decoder_proxy_getcaps(decoder, self->probed_sinkcaps,
1259 filter);
1260
1261 GST_DEBUG_OBJECT(self, "Returning sink caps %" GST_PTR_FORMAT, result);
1262
1263 return result;
1264}
1265
1266static gboolean
1267gst_aml_v4l2_video_dec_sink_event(GstVideoDecoder *decoder, GstEvent *event)
1268{
1269 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
1270 gboolean ret;
1271 GstEventType type = GST_EVENT_TYPE(event);
1272
1273 switch (type)
1274 {
1275 case GST_EVENT_FLUSH_START:
1276 GST_DEBUG_OBJECT(self, "flush start");
1277 gst_aml_v4l2_object_unlock(self->v4l2output);
1278 gst_aml_v4l2_object_unlock(self->v4l2capture);
1279 break;
1280 default:
1281 break;
1282 }
1283
1284 ret = GST_VIDEO_DECODER_CLASS(parent_class)->sink_event(decoder, event);
1285
1286 switch (type)
1287 {
1288 case GST_EVENT_FLUSH_START:
1289 /* The processing thread should stop now, wait for it */
1290 gst_pad_stop_task(decoder->srcpad);
1291 GST_DEBUG_OBJECT(self, "flush start done");
1292 break;
1293 default:
1294 break;
1295 }
1296
1297 return ret;
1298}
1299
1300static GstStateChangeReturn
1301gst_aml_v4l2_video_dec_change_state(GstElement *element,
1302 GstStateChange transition)
1303{
1304 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(element);
1305 GstVideoDecoder *decoder = GST_VIDEO_DECODER(element);
1306
1307 if (transition == GST_STATE_CHANGE_PAUSED_TO_READY)
1308 {
1309 g_atomic_int_set(&self->active, FALSE);
1310 gst_aml_v4l2_object_unlock(self->v4l2output);
1311 gst_aml_v4l2_object_unlock(self->v4l2capture);
1312 gst_pad_stop_task(decoder->srcpad);
1313 }
1314
1315 return GST_ELEMENT_CLASS(parent_class)->change_state(element, transition);
1316}
1317
1318static void
1319gst_aml_v4l2_video_dec_dispose(GObject *object)
1320{
1321 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(object);
1322
1323 gst_caps_replace(&self->probed_sinkcaps, NULL);
1324 gst_caps_replace(&self->probed_srccaps, NULL);
1325
1326 G_OBJECT_CLASS(parent_class)->dispose(object);
1327}
1328
1329static void
1330gst_aml_v4l2_video_dec_finalize(GObject *object)
1331{
1332 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(object);
1333
1334 gst_aml_v4l2_object_destroy(self->v4l2capture);
1335 gst_aml_v4l2_object_destroy(self->v4l2output);
1336
xuesong.jiang61ea8012022-05-12 15:38:17 +08001337#if GST_IMPORT_LGE_PROP
1338 if (self->lge_ctxt)
1339 {
1340 if (self->lge_ctxt->app_type)
1341 g_free(self->lge_ctxt->app_type);
1342 if (self->lge_ctxt->res_info.coretype)
1343 g_free(self->lge_ctxt->res_info.coretype);
1344 free(self->lge_ctxt);
1345 }
1346
1347#endif
1348
xuesong.jiangae1548e2022-05-06 16:38:46 +08001349 G_OBJECT_CLASS(parent_class)->finalize(object);
1350}
1351
1352static void
1353gst_aml_v4l2_video_dec_init(GstAmlV4l2VideoDec *self)
1354{
1355 /* V4L2 object are created in subinstance_init */
1356 self->is_secure_path = FALSE;
xuesong.jiang61ea8012022-05-12 15:38:17 +08001357#if GST_IMPORT_LGE_PROP
1358 self->lge_ctxt = malloc(sizeof(GstAmlV4l2VideoDecLgeCtxt));
1359 memset(self->lge_ctxt, 0, sizeof(GstAmlV4l2VideoDecLgeCtxt));
1360#endif
xuesong.jiangae1548e2022-05-06 16:38:46 +08001361}
1362
1363static void
1364gst_aml_v4l2_video_dec_subinstance_init(GTypeInstance *instance, gpointer g_class)
1365{
1366 GstAmlV4l2VideoDecClass *klass = GST_AML_V4L2_VIDEO_DEC_CLASS(g_class);
1367 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(instance);
1368 GstVideoDecoder *decoder = GST_VIDEO_DECODER(instance);
1369
1370 gst_video_decoder_set_packetized(decoder, TRUE);
1371
1372 self->v4l2output = gst_aml_v4l2_object_new(GST_ELEMENT(self),
1373 GST_OBJECT(GST_VIDEO_DECODER_SINK_PAD(self)),
1374 V4L2_BUF_TYPE_VIDEO_OUTPUT, klass->default_device,
1375 gst_aml_v4l2_get_output, gst_aml_v4l2_set_output, NULL);
1376 self->v4l2output->no_initial_format = TRUE;
1377 self->v4l2output->keep_aspect = FALSE;
sheng.liu0c77f6c2022-06-17 21:33:20 +08001378 self->v4l2output->is_svp = FALSE;
xuesong.jiangae1548e2022-05-06 16:38:46 +08001379
1380 self->v4l2capture = gst_aml_v4l2_object_new(GST_ELEMENT(self),
1381 GST_OBJECT(GST_VIDEO_DECODER_SRC_PAD(self)),
1382 V4L2_BUF_TYPE_VIDEO_CAPTURE, klass->default_device,
1383 gst_aml_v4l2_get_input, gst_aml_v4l2_set_input, NULL);
1384 self->v4l2capture->need_wait_event = TRUE;
sheng.liub56bbc52022-06-21 11:02:33 +08001385 self->v4l2capture->need_drop_event = FALSE;
xuesong.jiangae1548e2022-05-06 16:38:46 +08001386}
1387
1388static void
1389gst_aml_v4l2_video_dec_class_init(GstAmlV4l2VideoDecClass *klass)
1390{
1391 GstElementClass *element_class;
1392 GObjectClass *gobject_class;
1393 GstVideoDecoderClass *video_decoder_class;
1394
1395 parent_class = g_type_class_peek_parent(klass);
1396
1397 element_class = (GstElementClass *)klass;
1398 gobject_class = (GObjectClass *)klass;
1399 video_decoder_class = (GstVideoDecoderClass *)klass;
1400
1401 GST_DEBUG_CATEGORY_INIT(gst_aml_v4l2_video_dec_debug, "amlv4l2videodec", 0,
1402 "AML V4L2 Video Decoder");
1403
1404 gobject_class->dispose = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_dispose);
1405 gobject_class->finalize = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_finalize);
1406 gobject_class->set_property =
1407 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_set_property);
1408 gobject_class->get_property =
1409 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_get_property);
1410
1411 video_decoder_class->open = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_open);
1412 video_decoder_class->close = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_close);
1413 video_decoder_class->start = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_start);
1414 video_decoder_class->stop = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_stop);
1415 video_decoder_class->finish = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_finish);
1416 video_decoder_class->flush = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_flush);
1417 video_decoder_class->drain = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_drain);
1418 video_decoder_class->set_format =
1419 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_set_format);
1420 video_decoder_class->negotiate =
1421 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_negotiate);
1422 video_decoder_class->decide_allocation =
1423 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_decide_allocation);
1424 /* FIXME propose_allocation or not ? */
1425 video_decoder_class->handle_frame =
1426 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_handle_frame);
1427 video_decoder_class->getcaps =
1428 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_sink_getcaps);
1429 video_decoder_class->src_query =
1430 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_src_query);
1431 video_decoder_class->sink_event =
1432 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_sink_event);
1433
1434 element_class->change_state =
1435 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_change_state);
1436
1437 gst_aml_v4l2_object_install_m2m_properties_helper(gobject_class);
xuesong.jiang61ea8012022-05-12 15:38:17 +08001438#if GST_IMPORT_LGE_PROP
1439 gst_aml_v4l2_video_dec_install_lge_properties_helper(gobject_class);
1440#endif
xuesong.jiangae1548e2022-05-06 16:38:46 +08001441}
1442
1443static void
1444gst_aml_v4l2_video_dec_subclass_init(gpointer g_class, gpointer data)
1445{
1446 GstAmlV4l2VideoDecClass *klass = GST_AML_V4L2_VIDEO_DEC_CLASS(g_class);
1447 GstElementClass *element_class = GST_ELEMENT_CLASS(g_class);
1448 GstAmlV4l2VideoDecCData *cdata = data;
1449
1450 klass->default_device = cdata->device;
1451
1452 /* Note: gst_pad_template_new() take the floating ref from the caps */
1453 gst_element_class_add_pad_template(element_class,
1454 gst_pad_template_new("sink", GST_PAD_SINK, GST_PAD_ALWAYS,
1455 cdata->sink_caps));
1456 gst_element_class_add_pad_template(element_class,
1457 gst_pad_template_new("src", GST_PAD_SRC, GST_PAD_ALWAYS,
1458 cdata->src_caps));
1459
1460 gst_element_class_set_metadata(element_class, cdata->longname,
1461 "Codec/Decoder/Video/Hardware", cdata->description,
1462 "Xuesong Jiang <Xuesong.Jiang@amlogic.com>");
1463
1464 gst_caps_unref(cdata->sink_caps);
1465 gst_caps_unref(cdata->src_caps);
1466 g_free(cdata);
1467}
1468
1469/* Probing functions */
1470gboolean
1471gst_aml_v4l2_is_video_dec(GstCaps *sink_caps, GstCaps *src_caps)
1472{
1473 gboolean ret = FALSE;
1474
1475 if (gst_caps_is_subset(sink_caps, gst_aml_v4l2_object_get_codec_caps()) && gst_caps_is_subset(src_caps, gst_aml_v4l2_object_get_raw_caps()))
1476 ret = TRUE;
1477
1478 return ret;
1479}
1480
1481static gchar *
1482gst_aml_v4l2_video_dec_set_metadata(GstStructure *s, GstAmlV4l2VideoDecCData *cdata,
1483 const gchar *basename)
1484{
1485 gchar *codec_name = NULL;
1486 gchar *type_name = NULL;
1487 gboolean got_value = FALSE;
1488
1489#define SET_META(codec) \
1490 G_STMT_START \
1491 { \
1492 cdata->longname = "AML V4L2 " codec " Decoder"; \
1493 cdata->description = "Decodes " codec " streams via V4L2 API"; \
1494 codec_name = g_ascii_strdown(codec, -1); \
1495 } \
1496 G_STMT_END
1497
1498 if (gst_structure_has_name(s, "image/jpeg"))
1499 {
1500 SET_META("JPEG");
1501 }
1502 else if (gst_structure_has_name(s, "video/mpeg"))
1503 {
1504 gint mpegversion = 0;
1505 gint *list = NULL;
1506 got_value = gst_structure_get_int(s, "mpegversion", &mpegversion);
1507 if (FALSE == got_value)
1508 {
1509 got_value = gst_structure_get_list(s, "mpegversion", &list);
1510 if (TRUE == got_value && (1 == *list || 2 == *list))
1511 {
1512 SET_META("MPEG2");
1513 }
1514 else
1515 {
1516 SET_META("MPEG4");
1517 }
1518 }
1519 else
1520 {
1521 SET_META("MPEG4");
1522 }
1523 }
1524 else if (gst_structure_has_name(s, "video/x-h263"))
1525 {
1526 SET_META("H263");
1527 }
1528 else if (gst_structure_has_name(s, "video/x-fwht"))
1529 {
1530 SET_META("FWHT");
1531 }
1532 else if (gst_structure_has_name(s, "video/x-h264"))
1533 {
1534 SET_META("H264");
1535 }
1536 else if (gst_structure_has_name(s, "video/x-h265"))
1537 {
1538 SET_META("H265");
1539 }
1540 else if (gst_structure_has_name(s, "video/x-wmv"))
1541 {
1542 SET_META("VC1");
1543 }
1544 else if (gst_structure_has_name(s, "video/x-vp8"))
1545 {
1546 SET_META("VP8");
1547 }
1548 else if (gst_structure_has_name(s, "video/x-vp9"))
1549 {
1550 SET_META("VP9");
1551 }
1552 else if (gst_structure_has_name(s, "video/x-av1"))
1553 {
1554 SET_META("AV1");
1555 }
1556 else if (gst_structure_has_name(s, "video/x-bayer"))
1557 {
1558 SET_META("BAYER");
1559 }
1560 else if (gst_structure_has_name(s, "video/x-sonix"))
1561 {
1562 SET_META("SONIX");
1563 }
1564 else if (gst_structure_has_name(s, "video/x-pwc1"))
1565 {
1566 SET_META("PWC1");
1567 }
1568 else if (gst_structure_has_name(s, "video/x-pwc2"))
1569 {
1570 SET_META("PWC2");
1571 }
1572 else
1573 {
1574 /* This code should be kept on sync with the exposed CODEC type of format
1575 * from gstamlv4l2object.c. This warning will only occure in case we forget
1576 * to also add a format here. */
1577 gchar *s_str = gst_structure_to_string(s);
1578 g_warning("Missing fixed name mapping for caps '%s', this is a GStreamer "
1579 "bug, please report at https://bugs.gnome.org",
1580 s_str);
1581 g_free(s_str);
1582 }
1583
1584 if (codec_name)
1585 {
1586 type_name = g_strdup_printf("amlv4l2%sdec", codec_name);
1587 if (g_type_from_name(type_name) != 0)
1588 {
1589 g_free(type_name);
1590 type_name = g_strdup_printf("amlv4l2%s%sdec", basename, codec_name);
1591 }
1592
1593 g_free(codec_name);
1594 }
1595
1596 return type_name;
1597#undef SET_META
1598}
1599
1600void gst_aml_v4l2_video_dec_register(GstPlugin *plugin, const gchar *basename,
1601 const gchar *device_path, GstCaps *sink_caps, GstCaps *src_caps)
1602{
1603 gint i;
1604
1605 for (i = 0; i < gst_caps_get_size(sink_caps); i++)
1606 {
1607 GstAmlV4l2VideoDecCData *cdata;
1608 GstStructure *s;
1609 GTypeQuery type_query;
1610 GTypeInfo type_info = {
1611 0,
1612 };
1613 GType type, subtype;
1614 gchar *type_name;
1615
1616 s = gst_caps_get_structure(sink_caps, i);
1617
1618 cdata = g_new0(GstAmlV4l2VideoDecCData, 1);
1619 cdata->device = g_strdup(device_path);
1620 cdata->sink_caps = gst_caps_new_empty();
1621 gst_caps_append_structure(cdata->sink_caps, gst_structure_copy(s));
1622 gst_caps_append_structure(cdata->sink_caps, gst_structure_copy(s));
1623 gst_caps_set_features(cdata->sink_caps, 0, gst_caps_features_from_string(GST_CAPS_FEATURE_MEMORY_DMABUF));
1624 cdata->src_caps = gst_caps_copy(src_caps);
1625 gst_caps_set_features_simple(cdata->src_caps, gst_caps_features_from_string(GST_CAPS_FEATURE_MEMORY_DMABUF));
1626 gst_caps_append(cdata->src_caps, gst_caps_copy(src_caps));
1627 type_name = gst_aml_v4l2_video_dec_set_metadata(s, cdata, basename);
1628
1629 /* Skip over if we hit an unmapped type */
1630 if (!type_name)
1631 {
1632 g_free(cdata);
1633 continue;
1634 }
1635
1636 type = gst_aml_v4l2_video_dec_get_type();
1637 g_type_query(type, &type_query);
1638 memset(&type_info, 0, sizeof(type_info));
1639 type_info.class_size = type_query.class_size;
1640 type_info.instance_size = type_query.instance_size;
1641 type_info.class_init = gst_aml_v4l2_video_dec_subclass_init;
1642 type_info.class_data = cdata;
1643 type_info.instance_init = gst_aml_v4l2_video_dec_subinstance_init;
1644
1645 subtype = g_type_register_static(type, type_name, &type_info, 0);
1646 if (!gst_element_register(plugin, type_name, GST_RANK_PRIMARY + 1,
1647 subtype))
1648 GST_WARNING("Failed to register plugin '%s'", type_name);
1649
1650 g_free(type_name);
1651 }
1652}
xuesong.jiang61ea8012022-05-12 15:38:17 +08001653
1654#if GST_IMPORT_LGE_PROP
1655static void gst_aml_v4l2_video_dec_install_lge_properties_helper(GObjectClass *gobject_class)
1656{
1657 g_object_class_install_property(gobject_class, LGE_RESOURCE_INFO,
1658 g_param_spec_object("resource-info", "resource-info",
1659 "After acquisition of H/W resources is completed, allocated resource information must be delivered to the decoder and the sink",
1660 GST_TYPE_STRUCTURE,
1661 G_PARAM_READABLE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
1662
1663 g_object_class_install_property(gobject_class, LGE_DECODE_SIZE,
1664 g_param_spec_uint64("decoded-size", "decoded-size",
1665 "The total amount of decoder element's decoded video es after constructing pipeline or flushing pipeline update unit is byte.",
1666 0, G_MAXUINT64,
1667 0, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
1668
1669 g_object_class_install_property(gobject_class, LGE_UNDECODE_SIZE,
1670 g_param_spec_uint64("undecoded-size", "undecoded-size",
1671 "video decoder element's total undecoded data update unit is byte.",
1672 0, G_MAXUINT64,
1673 0, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
1674
1675 g_object_class_install_property(gobject_class, LGE_APP_TYPE,
1676 g_param_spec_string("app-type", "app-type",
1677 "set application type.",
1678 "default_app",
1679 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
1680
1681 g_object_class_install_property(gobject_class, LGE_CLIP_MODE,
1682 g_param_spec_boolean("clip-mode", "clip-mode",
1683 "When seeking, Content is moving faster for a while to skip frames.",
1684 FALSE,
1685 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
1686}
1687#endif