blob: 70d3ba6f9f2c524cb829e05cca1c368b0ae968dd [file] [log] [blame]
xuesong.jiangae1548e2022-05-06 16:38:46 +08001/* GStreamer
2 * Copyright (C) 2022 <xuesong.jiang@amlogic.com>
3 *
4 * This library is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Library General Public
6 * License as published by the Free Software Foundation; either
7 * version 2 of the License, or (at your option) any later version.
8 *
9 * This library is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Library General Public License for more details.
13 *
14 * You should have received a copy of the GNU Library General Public
15 * License along with this library; if not, write to the
16 * Free Software Foundation, Inc., 51 Franklin Street, Suite 500,
17 * Boston, MA 02110-1335, USA.
18 */
19
20#ifdef HAVE_CONFIG_H
21#include "config.h"
22#endif
23
24#include <sys/stat.h>
25#include <fcntl.h>
26#include <errno.h>
27#include <unistd.h>
28#include <string.h>
29
30#include "gstamlv4l2object.h"
31#include "gstamlv4l2videodec.h"
32
33#include <string.h>
34#include <gst/gst-i18n-plugin.h>
35#include <gst/allocators/gstdmabuf.h>
36
37GST_DEBUG_CATEGORY_STATIC(gst_aml_v4l2_video_dec_debug);
38#define GST_CAT_DEFAULT gst_aml_v4l2_video_dec_debug
39
40#ifdef GST_VIDEO_DECODER_STREAM_LOCK
41#undef GST_VIDEO_DECODER_STREAM_LOCK
42#define GST_VIDEO_DECODER_STREAM_LOCK(decoder) \
43 { \
fei.denge9458472023-04-18 02:05:48 +000044 GST_TRACE("aml v4l2 dec locking"); \
xuesong.jiangae1548e2022-05-06 16:38:46 +080045 g_rec_mutex_lock(&GST_VIDEO_DECODER(decoder)->stream_lock); \
fei.denge9458472023-04-18 02:05:48 +000046 GST_TRACE("aml v4l2 dec locked"); \
xuesong.jiangae1548e2022-05-06 16:38:46 +080047 }
48#endif
49
50#ifdef GST_VIDEO_DECODER_STREAM_UNLOCK
51#undef GST_VIDEO_DECODER_STREAM_UNLOCK
52#define GST_VIDEO_DECODER_STREAM_UNLOCK(decoder) \
53 { \
fei.denge9458472023-04-18 02:05:48 +000054 GST_TRACE("aml v4l2 dec unlocking"); \
xuesong.jiangae1548e2022-05-06 16:38:46 +080055 g_rec_mutex_unlock(&GST_VIDEO_DECODER(decoder)->stream_lock); \
fei.denge9458472023-04-18 02:05:48 +000056 GST_TRACE("aml v4l2 dec unlocked"); \
xuesong.jiangae1548e2022-05-06 16:38:46 +080057 }
58#endif
xuesong.jiang61ea8012022-05-12 15:38:17 +080059
hanghang.luo36df2852022-08-24 15:02:27 +080060#ifndef ABSDIFF
61#define ABSDIFF(a,b) (((a) > (b)) ? ((a) - (b)) : ((b) - (a)))
62#endif
63
xuesong.jiang61ea8012022-05-12 15:38:17 +080064#if GST_IMPORT_LGE_PROP
65typedef struct _GstAmlResourceInfo
66{
67 gchar *coretype;
68 gint videoport;
69 gint audioport;
70 gint maxwidth;
71 gint maxheight;
72 gint mixerport;
73} GstAmlResourceInfo;
74
75struct _GstAmlV4l2VideoDecLgeCtxt
76{
77 GstAmlResourceInfo res_info;
78 guint64 dec_size;
79 guint64 undec_size;
80 gchar *app_type;
81 gboolean clip_mode;
82};
83#endif
84
xuesong.jiangae1548e2022-05-06 16:38:46 +080085typedef struct
86{
87 gchar *device;
88 GstCaps *sink_caps;
89 GstCaps *src_caps;
90 const gchar *longname;
91 const gchar *description;
92} GstAmlV4l2VideoDecCData;
93
94enum
95{
96 PROP_0,
xuesong.jiang61ea8012022-05-12 15:38:17 +080097 V4L2_STD_OBJECT_PROPS,
98#if GST_IMPORT_LGE_PROP
99 LGE_RESOURCE_INFO,
100 LGE_DECODE_SIZE,
101 LGE_UNDECODE_SIZE,
102 LGE_APP_TYPE,
103 LGE_CLIP_MODE
104#endif
xuesong.jiangae1548e2022-05-06 16:38:46 +0800105};
106
107#define gst_aml_v4l2_video_dec_parent_class parent_class
108G_DEFINE_ABSTRACT_TYPE(GstAmlV4l2VideoDec, gst_aml_v4l2_video_dec,
109 GST_TYPE_VIDEO_DECODER);
110
111static GstFlowReturn gst_aml_v4l2_video_dec_finish(GstVideoDecoder *decoder);
xuesong.jiang61ea8012022-05-12 15:38:17 +0800112#if GST_IMPORT_LGE_PROP
113static void gst_aml_v4l2_video_dec_install_lge_properties_helper(GObjectClass *gobject_class);
114#endif
xuesong.jiangae1548e2022-05-06 16:38:46 +0800115
116static void
117gst_aml_v4l2_video_dec_set_property(GObject *object,
118 guint prop_id, const GValue *value, GParamSpec *pspec)
119{
120 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(object);
121
122 switch (prop_id)
123 {
124 case PROP_CAPTURE_IO_MODE:
125 if (!gst_aml_v4l2_object_set_property_helper(self->v4l2capture,
126 prop_id, value, pspec))
127 {
128 G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
129 }
130 break;
131 case PROP_DUMP_FRAME_LOCATION:
132 if (!gst_aml_v4l2_object_set_property_helper(self->v4l2capture,
133 prop_id, value, pspec))
134 {
135 G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
136 }
137 break;
xuesong.jiang61ea8012022-05-12 15:38:17 +0800138#if GST_IMPORT_LGE_PROP
139 case LGE_RESOURCE_INFO:
140 {
141 GST_DEBUG_OBJECT(self, "LGE up layer set res info");
142 GstStructure *r_info = g_value_get_object(value);
143 if (r_info)
144 {
145 if (gst_structure_has_field(r_info, "coretype"))
146 {
147 if (self->lge_ctxt->res_info.coretype)
148 g_free(self->lge_ctxt->res_info.coretype);
149 self->lge_ctxt->res_info.coretype = g_strdup(gst_structure_get_string(r_info, "coretype"));
150 }
151 if (gst_structure_has_field(r_info, "videoport"))
152 gst_structure_get_int(r_info, "videoport", &(self->lge_ctxt->res_info.videoport));
153 if (gst_structure_has_field(r_info, "audioport"))
154 gst_structure_get_int(r_info, "audioport", &(self->lge_ctxt->res_info.audioport));
155 if (gst_structure_has_field(r_info, "maxwidth"))
156 gst_structure_get_int(r_info, "maxwidth", &(self->lge_ctxt->res_info.maxwidth));
157 if (gst_structure_has_field(r_info, "maxheight"))
158 gst_structure_get_int(r_info, "maxheight", &(self->lge_ctxt->res_info.maxheight));
159 if (gst_structure_has_field(r_info, "mixerport"))
160 gst_structure_get_int(r_info, "mixerport", &(self->lge_ctxt->res_info.mixerport));
161 }
162 break;
163 }
164 case LGE_APP_TYPE:
165 {
166 GST_DEBUG_OBJECT(self, "LGE up layer set app type");
167 if (self->lge_ctxt->app_type)
168 g_free(self->lge_ctxt->app_type);
169 self->lge_ctxt->app_type = g_strdup(g_value_get_string(value));
170 break;
171 }
172 case LGE_CLIP_MODE:
173 {
174 GST_DEBUG_OBJECT(self, "LGE up layer set clip mode");
175 self->lge_ctxt->clip_mode = g_strdup(g_value_get_boolean(value));
176 break;
177 }
178#endif
xuesong.jiangae1548e2022-05-06 16:38:46 +0800179 /* By default, only set on output */
180 default:
181 if (!gst_aml_v4l2_object_set_property_helper(self->v4l2output,
182 prop_id, value, pspec))
183 {
184 G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
185 }
186 break;
187 }
188}
189
190static void
191gst_aml_v4l2_video_dec_get_property(GObject *object,
192 guint prop_id, GValue *value, GParamSpec *pspec)
193{
194 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(object);
195
196 switch (prop_id)
197 {
198 case PROP_CAPTURE_IO_MODE:
199 if (!gst_aml_v4l2_object_get_property_helper(self->v4l2capture,
200 prop_id, value, pspec))
201 {
202 G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
203 }
204 break;
205
xuesong.jiang61ea8012022-05-12 15:38:17 +0800206#if GST_IMPORT_LGE_PROP
207 case LGE_DECODE_SIZE:
208 {
209 GST_DEBUG_OBJECT(self, "LGE up layer get dec size");
210 self->lge_ctxt->dec_size = -1;
211 g_value_set_int(value, self->lge_ctxt->dec_size);
212 break;
213 }
214 case LGE_UNDECODE_SIZE:
215 {
216 GST_DEBUG_OBJECT(self, "LGE up layer get undec size");
217 self->lge_ctxt->undec_size = -1;
218 g_value_set_int(value, self->lge_ctxt->undec_size);
219 break;
220 }
221#endif
222
xuesong.jiangae1548e2022-05-06 16:38:46 +0800223 /* By default read from output */
224 default:
225 if (!gst_aml_v4l2_object_get_property_helper(self->v4l2output,
226 prop_id, value, pspec))
227 {
228 G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
229 }
230 break;
231 }
232}
233
234static gboolean
235gst_aml_v4l2_video_dec_open(GstVideoDecoder *decoder)
236{
237 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
238 GstCaps *codec_caps;
239
240 GST_DEBUG_OBJECT(self, "Opening");
241
242 if (!gst_aml_v4l2_object_open(self->v4l2output))
243 goto failure;
244
245 if (!gst_aml_v4l2_object_open_shared(self->v4l2capture, self->v4l2output))
246 goto failure;
247
248 codec_caps = gst_pad_get_pad_template_caps(decoder->sinkpad);
249 self->probed_sinkcaps = gst_aml_v4l2_object_probe_caps(self->v4l2output,
250 codec_caps);
251 gst_caps_unref(codec_caps);
252
253 if (gst_caps_is_empty(self->probed_sinkcaps))
254 goto no_encoded_format;
255
256 return TRUE;
257
258no_encoded_format:
259 GST_ELEMENT_ERROR(self, RESOURCE, SETTINGS,
260 (_("Decoder on device %s has no supported input format"),
261 self->v4l2output->videodev),
262 (NULL));
263 goto failure;
264
265failure:
266 if (GST_AML_V4L2_IS_OPEN(self->v4l2output))
267 gst_aml_v4l2_object_close(self->v4l2output);
268
269 if (GST_AML_V4L2_IS_OPEN(self->v4l2capture))
270 gst_aml_v4l2_object_close(self->v4l2capture);
271
272 gst_caps_replace(&self->probed_srccaps, NULL);
273 gst_caps_replace(&self->probed_sinkcaps, NULL);
274
275 return FALSE;
276}
277
278static gboolean
279gst_aml_v4l2_video_dec_close(GstVideoDecoder *decoder)
280{
281 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
282
283 GST_DEBUG_OBJECT(self, "Closing");
284
285 gst_aml_v4l2_object_close(self->v4l2output);
286 gst_aml_v4l2_object_close(self->v4l2capture);
287 gst_caps_replace(&self->probed_srccaps, NULL);
288 gst_caps_replace(&self->probed_sinkcaps, NULL);
289
290 return TRUE;
291}
292
293static gboolean
294gst_aml_v4l2_video_dec_start(GstVideoDecoder *decoder)
295{
296 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
297
298 GST_DEBUG_OBJECT(self, "Starting");
299
300 gst_aml_v4l2_object_unlock(self->v4l2output);
301 g_atomic_int_set(&self->active, TRUE);
302 self->output_flow = GST_FLOW_OK;
303
304 return TRUE;
305}
306
307static gboolean
308gst_aml_v4l2_video_dec_stop(GstVideoDecoder *decoder)
309{
310 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
311
312 GST_DEBUG_OBJECT(self, "Stopping");
313
314 gst_aml_v4l2_object_unlock(self->v4l2output);
315 gst_aml_v4l2_object_unlock(self->v4l2capture);
316
317 /* Wait for capture thread to stop */
318 gst_pad_stop_task(decoder->srcpad);
319
320 GST_VIDEO_DECODER_STREAM_LOCK(decoder);
321 self->output_flow = GST_FLOW_OK;
322 GST_VIDEO_DECODER_STREAM_UNLOCK(decoder);
323
324 /* Should have been flushed already */
325 g_assert(g_atomic_int_get(&self->active) == FALSE);
326
327 gst_aml_v4l2_object_stop(self->v4l2output);
328 gst_aml_v4l2_object_stop(self->v4l2capture);
329
330 if (self->input_state)
331 {
332 gst_video_codec_state_unref(self->input_state);
333 self->input_state = NULL;
334 }
335
336 GST_DEBUG_OBJECT(self, "Stopped");
337
338 return TRUE;
339}
340
341static gboolean
342gst_aml_v4l2_video_dec_set_format(GstVideoDecoder *decoder,
343 GstVideoCodecState *state)
344{
345 GstAmlV4l2Error error = GST_AML_V4L2_ERROR_INIT;
346 gboolean ret = TRUE;
347 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
348 GstCaps *caps;
349
350 GST_DEBUG_OBJECT(self, "Setting format: %" GST_PTR_FORMAT, state->caps);
351 GstCapsFeatures *const features = gst_caps_get_features(state->caps, 0);
352
353 if (gst_caps_features_contains(features, GST_CAPS_FEATURE_MEMORY_DMABUF))
354 self->v4l2output->req_mode = GST_V4L2_IO_DMABUF_IMPORT;
355
356 if (self->input_state)
357 {
358 if (gst_aml_v4l2_object_caps_equal(self->v4l2output, state->caps))
359 {
360 GST_DEBUG_OBJECT(self, "Compatible caps");
361 goto done;
362 }
363 gst_video_codec_state_unref(self->input_state);
364 self->input_state = NULL;
365
366 gst_aml_v4l2_video_dec_finish(decoder);
367 gst_aml_v4l2_object_stop(self->v4l2output);
368
369 /* The renegotiation flow don't blend with the base class flow. To properly
370 * stop the capture pool, if the buffers can't be orphaned, we need to
371 * reclaim our buffers, which will happend through the allocation query.
372 * The allocation query is triggered by gst_video_decoder_negotiate() which
373 * requires the output caps to be set, but we can't know this information
374 * as we rely on the decoder, which requires the capture queue to be
375 * stopped.
376 *
377 * To workaround this issue, we simply run an allocation query with the
378 * old negotiated caps in order to drain/reclaim our buffers. That breaks
379 * the complexity and should not have much impact in performance since the
380 * following allocation query will happen on a drained pipeline and won't
381 * block. */
382 if (self->v4l2capture->pool &&
383 !gst_aml_v4l2_buffer_pool_orphan(&self->v4l2capture->pool))
384 {
385 GstCaps *caps = gst_pad_get_current_caps(decoder->srcpad);
386 if (caps)
387 {
388 GstQuery *query = gst_query_new_allocation(caps, FALSE);
389 gst_pad_peer_query(decoder->srcpad, query);
390 gst_query_unref(query);
391 gst_caps_unref(caps);
392 }
393 }
394
395 gst_aml_v4l2_object_stop(self->v4l2capture);
396 self->output_flow = GST_FLOW_OK;
397 }
398
399 if ((ret = gst_aml_v4l2_set_drm_mode(self->v4l2output)) == FALSE)
400 {
401 GST_ERROR_OBJECT(self, "config output drm mode error");
402 goto done;
403 }
404
xuesong.jiang22a9b112023-05-24 09:01:59 +0000405 if ((ret = gst_aml_v4l2_set_stream_mode(self->v4l2output)) == FALSE)
406 {
407 GST_ERROR_OBJECT(self, "config output stream mode error");
408 goto done;
409 }
410
xuesong.jiangae1548e2022-05-06 16:38:46 +0800411 ret = gst_aml_v4l2_object_set_format(self->v4l2output, state->caps, &error);
412
413 gst_caps_replace(&self->probed_srccaps, NULL);
414 self->probed_srccaps = gst_aml_v4l2_object_probe_caps(self->v4l2capture,
415 gst_aml_v4l2_object_get_raw_caps());
416
417 if (gst_caps_is_empty(self->probed_srccaps))
418 goto no_raw_format;
419
420 caps = gst_caps_copy(self->probed_srccaps);
421 gst_caps_set_features_simple(caps, gst_caps_features_from_string(GST_CAPS_FEATURE_MEMORY_DMABUF));
422 gst_caps_append(self->probed_srccaps, caps);
423 if (ret)
424 self->input_state = gst_video_codec_state_ref(state);
425 else
426 gst_aml_v4l2_error(self, &error);
427
428done:
429 return ret;
430
431no_raw_format:
432 GST_ELEMENT_ERROR(self, RESOURCE, SETTINGS,
433 (_("Decoder on device %s has no supported output format"),
434 self->v4l2output->videodev),
435 (NULL));
436 return GST_FLOW_ERROR;
437}
438
439static gboolean
440gst_aml_v4l2_video_dec_flush(GstVideoDecoder *decoder)
441{
442 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
443
444 GST_DEBUG_OBJECT(self, "Flushed");
445
446 /* Ensure the processing thread has stopped for the reverse playback
447 * discount case */
448 if (gst_pad_get_task_state(decoder->srcpad) == GST_TASK_STARTED)
449 {
450 GST_VIDEO_DECODER_STREAM_UNLOCK(decoder);
451
452 gst_aml_v4l2_object_unlock(self->v4l2output);
453 gst_aml_v4l2_object_unlock(self->v4l2capture);
454 gst_pad_stop_task(decoder->srcpad);
455 GST_VIDEO_DECODER_STREAM_LOCK(decoder);
456 }
457
458 self->output_flow = GST_FLOW_OK;
459
460 gst_aml_v4l2_object_unlock_stop(self->v4l2output);
461 gst_aml_v4l2_object_unlock_stop(self->v4l2capture);
462
463 if (self->v4l2output->pool)
464 gst_aml_v4l2_buffer_pool_flush(self->v4l2output->pool);
465
466 /* gst_aml_v4l2_buffer_pool_flush() calls streamon the capture pool and must be
467 * called after gst_aml_v4l2_object_unlock_stop() stopped flushing the buffer
468 * pool. */
469 if (self->v4l2capture->pool)
470 gst_aml_v4l2_buffer_pool_flush(self->v4l2capture->pool);
471
472 return TRUE;
473}
474
475static gboolean
476gst_aml_v4l2_video_dec_negotiate(GstVideoDecoder *decoder)
477{
478 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
479
xuesong.jiang681d3602022-06-24 21:23:35 +0800480 if (TRUE == self->v4l2output->is_svp)
481 {
482 GstStructure *s;
483 GstEvent *event;
484
485 s = gst_structure_new_empty ("IS_SVP");
486 event = gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM_STICKY, s);
487 GST_DEBUG_OBJECT(self, "before Send SVP Event :%p", event);
488 gst_pad_push_event (decoder->srcpad, event);
489 GST_DEBUG_OBJECT(self, "after Send SVP Event :%p", event);
490 }
491
xuesong.jiangae1548e2022-05-06 16:38:46 +0800492 /* We don't allow renegotiation without carefull disabling the pool */
493 if (self->v4l2capture->pool &&
494 gst_buffer_pool_is_active(GST_BUFFER_POOL(self->v4l2capture->pool)))
495 return TRUE;
496
497 return GST_VIDEO_DECODER_CLASS(parent_class)->negotiate(decoder);
498}
499
500static gboolean
501gst_aml_v4l2_decoder_cmd(GstAmlV4l2Object *v4l2object, guint cmd, guint flags)
502{
503 struct v4l2_decoder_cmd dcmd = {
504 0,
505 };
506
507 GST_DEBUG_OBJECT(v4l2object->element,
508 "sending v4l2 decoder command %u with flags %u", cmd, flags);
509
510 if (!GST_AML_V4L2_IS_OPEN(v4l2object))
511 return FALSE;
512
513 dcmd.cmd = cmd;
514 dcmd.flags = flags;
515 if (v4l2object->ioctl(v4l2object->video_fd, VIDIOC_DECODER_CMD, &dcmd) < 0)
516 goto dcmd_failed;
517
518 return TRUE;
519
520dcmd_failed:
521 if (errno == ENOTTY)
522 {
523 GST_INFO_OBJECT(v4l2object->element,
524 "Failed to send decoder command %u with flags %u for '%s'. (%s)",
525 cmd, flags, v4l2object->videodev, g_strerror(errno));
526 }
527 else
528 {
529 GST_ERROR_OBJECT(v4l2object->element,
530 "Failed to send decoder command %u with flags %u for '%s'. (%s)",
531 cmd, flags, v4l2object->videodev, g_strerror(errno));
532 }
533 return FALSE;
534}
535
536static GstFlowReturn
537gst_aml_v4l2_video_dec_finish(GstVideoDecoder *decoder)
538{
539 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
540 GstFlowReturn ret = GST_FLOW_OK;
541 GstBuffer *buffer;
542
543 if (gst_pad_get_task_state(decoder->srcpad) != GST_TASK_STARTED)
544 goto done;
545
546 GST_DEBUG_OBJECT(self, "Finishing decoding");
547
548 GST_VIDEO_DECODER_STREAM_UNLOCK(decoder);
549
550 if (gst_aml_v4l2_decoder_cmd(self->v4l2output, V4L2_DEC_CMD_STOP, 0))
551 {
552 GstTask *task = decoder->srcpad->task;
553
554 /* If the decoder stop command succeeded, just wait until processing is
555 * finished */
556 GST_DEBUG_OBJECT(self, "Waiting for decoder stop");
557 GST_OBJECT_LOCK(task);
558 while (GST_TASK_STATE(task) == GST_TASK_STARTED)
559 GST_TASK_WAIT(task);
560 GST_OBJECT_UNLOCK(task);
561 ret = GST_FLOW_FLUSHING;
562 }
563 else
564 {
565 /* otherwise keep queuing empty buffers until the processing thread has
566 * stopped, _pool_process() will return FLUSHING when that happened */
567 while (ret == GST_FLOW_OK)
568 {
xuesong.jiangc5dac0f2023-02-01 14:42:24 +0800569 GST_DEBUG_OBJECT(self, "queue empty output buf");
xuesong.jiangae1548e2022-05-06 16:38:46 +0800570 buffer = gst_buffer_new();
571 ret =
572 gst_aml_v4l2_buffer_pool_process(GST_AML_V4L2_BUFFER_POOL(self->v4l2output->pool), &buffer);
573 gst_buffer_unref(buffer);
574 }
575 }
576
577 /* and ensure the processing thread has stopped in case another error
578 * occured. */
579 gst_aml_v4l2_object_unlock(self->v4l2capture);
580 gst_pad_stop_task(decoder->srcpad);
581 GST_VIDEO_DECODER_STREAM_LOCK(decoder);
582
583 if (ret == GST_FLOW_FLUSHING)
584 ret = self->output_flow;
585
586 GST_DEBUG_OBJECT(decoder, "Done draining buffers");
587
588 /* TODO Shall we cleanup any reffed frame to workaround broken decoders ? */
589
590done:
591 return ret;
592}
593
594static GstFlowReturn
595gst_aml_v4l2_video_dec_drain(GstVideoDecoder *decoder)
596{
597 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
598
599 GST_DEBUG_OBJECT(self, "Draining...");
600 gst_aml_v4l2_video_dec_finish(decoder);
601 gst_aml_v4l2_video_dec_flush(decoder);
602
603 return GST_FLOW_OK;
604}
605
606static GstVideoCodecFrame *
607gst_aml_v4l2_video_dec_get_oldest_frame(GstVideoDecoder *decoder)
608{
609 GstVideoCodecFrame *frame = NULL;
610 GList *frames, *l;
611 gint count = 0;
612
613 frames = gst_video_decoder_get_frames(decoder);
614
615 for (l = frames; l != NULL; l = l->next)
616 {
617 GstVideoCodecFrame *f = l->data;
618
619 if (!frame || (GST_CLOCK_TIME_IS_VALID(frame->pts) && GST_CLOCK_TIME_IS_VALID(f->pts) && (frame->pts > f->pts)))
620 frame = f;
621
622 count++;
623 }
624
625 if (frame)
626 {
627 GST_LOG_OBJECT(decoder,
628 "Oldest frame is %d %" GST_TIME_FORMAT " and %d frames left",
629 frame->system_frame_number, GST_TIME_ARGS(frame->pts), count - 1);
630 gst_video_codec_frame_ref(frame);
631 }
632
633 g_list_free_full(frames, (GDestroyNotify)gst_video_codec_frame_unref);
634
635 return frame;
636}
637
fei.dengbee20862022-06-14 14:59:48 +0800638static GstVideoCodecFrame *
xuesong.jiange24aef92023-06-16 06:39:10 +0000639gst_aml_v4l2_video_dec_get_right_frame_for_frame_mode(GstVideoDecoder *decoder, GstClockTime pts)
fei.dengbee20862022-06-14 14:59:48 +0800640{
641 GstVideoCodecFrame *frame = NULL;
642 GList *frames, *l;
643 gint count = 0;
644
xuesong.jiange24aef92023-06-16 06:39:10 +0000645 GST_LOG_OBJECT (decoder, "trace in with pts: %" GST_TIME_FORMAT, GST_TIME_ARGS(pts));
646
fei.dengbee20862022-06-14 14:59:48 +0800647 frames = gst_video_decoder_get_frames(decoder);
648
649 for (l = frames; l != NULL; l = l->next)
650 {
651 GstVideoCodecFrame *f = l->data;
fei.denge9458472023-04-18 02:05:48 +0000652
xuesong.jiangc5dac0f2023-02-01 14:42:24 +0800653 if (GST_CLOCK_TIME_IS_VALID(pts) && (ABSDIFF(f->pts,pts)) < 1000) {
fei.dengbee20862022-06-14 14:59:48 +0800654 frame = f;
fei.dengbee20862022-06-14 14:59:48 +0800655 } else {
656 if (!frame || (GST_CLOCK_TIME_IS_VALID(frame->pts) && GST_CLOCK_TIME_IS_VALID(f->pts) && (frame->pts > f->pts)))
657 frame = f;
658 }
659
660 count++;
661 }
662
663 if (frame)
664 {
665 GST_LOG_OBJECT(decoder,
666 "frame is %d %" GST_TIME_FORMAT " and %d frames left",
667 frame->system_frame_number, GST_TIME_ARGS(frame->pts), count - 1);
668 gst_video_codec_frame_ref(frame);
669 }
670
671 g_list_free_full(frames, (GDestroyNotify)gst_video_codec_frame_unref);
672
xuesong.jiange24aef92023-06-16 06:39:10 +0000673 GST_LOG_OBJECT (decoder, "trace out ret:%p", frame);
fei.dengbee20862022-06-14 14:59:48 +0800674 return frame;
675}
676
xuesong.jiange24aef92023-06-16 06:39:10 +0000677static GstVideoCodecFrame *
678gst_aml_v4l2_video_dec_get_right_frame_for_stream_mode(GstVideoDecoder *decoder, GstClockTime pts)
679{
680 GstVideoCodecFrame *frame = NULL;
681 GList *frames, *l;
682 gint count = 0;
683
684 GST_LOG_OBJECT (decoder, "trace in with pts: %" GST_TIME_FORMAT, GST_TIME_ARGS(pts));
685
686 frames = gst_video_decoder_get_frames(decoder);
687 guint frames_len = 0;
688 frames_len = g_list_length(frames);
689 GST_LOG_OBJECT (decoder, "got frames list len:%d", frames_len);
690
691 frame = frames->data;
692
693 for (l = frames; l != NULL; l = l->next)
694 {
695 GstVideoCodecFrame *f = l->data;
696
697 if (GST_CLOCK_TIME_IS_VALID(pts) && (ABSDIFF(f->pts, pts)) < 1000)
698 {
699 /* found the right frame */
700 frame = f;
701 break;
702 }
703 else if(GST_CLOCK_TIME_IS_VALID(pts) && (f->pts < pts))
704 {
705 GST_LOG_OBJECT(decoder,
706 "stream mode drop frame %d %" GST_TIME_FORMAT,
707 frame->system_frame_number, GST_TIME_ARGS(frame->pts));
708
709 gst_video_codec_frame_ref(f);
710 // gst_video_decoder_drop_frame(decoder, f);
711 gst_video_decoder_release_frame(decoder, f);
712 }
713 else
714 {
715 GST_LOG_OBJECT (decoder, "dbg");
716 }
717 }
718
719 if (frame)
720 {
721 guint l_len = 0;
722 l = gst_video_decoder_get_frames(decoder);
723 l_len = g_list_length(l);
724 g_list_free_full(l, (GDestroyNotify)gst_video_codec_frame_unref);
725
726 GST_LOG_OBJECT(decoder,
727 "frame is %d %" GST_TIME_FORMAT " and %d frames left",
728 frame->system_frame_number, GST_TIME_ARGS(frame->pts), l_len);
729 gst_video_codec_frame_ref(frame);
730 }
731
732 g_list_free_full(frames, (GDestroyNotify)gst_video_codec_frame_unref);
733
734 GST_LOG_OBJECT (decoder, "trace out ret:%p", frame);
735 return frame;
736}
737
738static GstVideoCodecFrame *
739gst_aml_v4l2_video_dec_get_right_frame(GstVideoDecoder *decoder, GstClockTime pts)
740{
741 GstAmlV4l2VideoDec *self = (GstAmlV4l2VideoDec *)decoder;
742 if (self->v4l2output->stream_mode)
743 return gst_aml_v4l2_video_dec_get_right_frame_for_stream_mode(decoder, pts);
744 else
745 return gst_aml_v4l2_video_dec_get_right_frame_for_frame_mode(decoder, pts);
746}
747
xuesong.jiangae1548e2022-05-06 16:38:46 +0800748static gboolean
749gst_aml_v4l2_video_remove_padding(GstCapsFeatures *features,
750 GstStructure *structure, gpointer user_data)
751{
752 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(user_data);
753 GstVideoAlignment *align = &self->v4l2capture->align;
754 GstVideoInfo *info = &self->v4l2capture->info;
755 int width, height;
756
757 if (!gst_structure_get_int(structure, "width", &width))
758 return TRUE;
759
760 if (!gst_structure_get_int(structure, "height", &height))
761 return TRUE;
762
763 if (align->padding_left != 0 || align->padding_top != 0 ||
764 height != info->height + align->padding_bottom)
765 return TRUE;
766
767 if (height == info->height + align->padding_bottom)
768 {
769 /* Some drivers may round up width to the padded with */
770 if (width == info->width + align->padding_right)
771 gst_structure_set(structure,
772 "width", G_TYPE_INT, width - align->padding_right,
773 "height", G_TYPE_INT, height - align->padding_bottom, NULL);
774 /* Some drivers may keep visible width and only round up bytesperline */
775 else if (width == info->width)
776 gst_structure_set(structure,
777 "height", G_TYPE_INT, height - align->padding_bottom, NULL);
778 }
779
780 return TRUE;
781}
782
783static void
sheng.liubcf036c2022-06-21 15:55:42 +0800784gst_v4l2_drop_event (GstAmlV4l2Object * v4l2object)
sheng.liub56bbc52022-06-21 11:02:33 +0800785{
786 struct v4l2_event evt;
787 gint ret;
788
789 memset (&evt, 0x00, sizeof (struct v4l2_event));
790 ret = v4l2object->ioctl (v4l2object->video_fd, VIDIOC_DQEVENT, &evt);
791 if (ret < 0)
792 {
793 GST_DEBUG_OBJECT (v4l2object, "dqevent failed");
794 return;
795 }
796
797 switch (evt.type)
798 {
799 case V4L2_EVENT_SOURCE_CHANGE:
800 GST_DEBUG_OBJECT (v4l2object, "Drop GST_V4L2_FLOW_SOURCE_CHANGE");
801 break;
802 case V4L2_EVENT_EOS:
803 GST_DEBUG_OBJECT (v4l2object, "Drop GST_V4L2_FLOW_LAST_BUFFER");
804 break;
805 default:
806 break;
807 }
808
809 return;
810}
811
812static void
xuesong.jiangae1548e2022-05-06 16:38:46 +0800813gst_aml_v4l2_video_dec_loop(GstVideoDecoder *decoder)
814{
815 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
816 GstAmlV4l2BufferPool *v4l2_pool;
817 GstAmlV4l2Error error = GST_AML_V4L2_ERROR_INIT;
818 GstBufferPool *pool;
819 GstVideoCodecFrame *frame;
820 GstBuffer *buffer = NULL;
821 GstFlowReturn ret;
822
823 if (G_UNLIKELY(!GST_AML_V4L2_IS_ACTIVE(self->v4l2capture)))
824 {
825 GstVideoInfo info;
826 GstVideoCodecState *output_state;
xuesong.jiang282ca572023-05-05 09:03:32 +0000827 GstCaps *acquired_caps, *available_caps, *caps, *filter;
xuesong.jiangae1548e2022-05-06 16:38:46 +0800828 GstStructure *st;
829
830 GST_DEBUG_OBJECT(self, "waitting source change event");
831 /* Wait until received SOURCE_CHANGE event to get right video format */
832 while (self->v4l2capture->can_wait_event && self->v4l2capture->need_wait_event)
833 {
834 ret = gst_aml_v4l2_object_dqevent(self->v4l2capture);
835 if (ret == GST_AML_V4L2_FLOW_SOURCE_CHANGE)
836 {
837 GST_DEBUG_OBJECT(self, "Received source change event");
838 break;
839 }
840 else if (ret == GST_AML_V4L2_FLOW_LAST_BUFFER)
841 {
842 GST_DEBUG_OBJECT(self, "Received eos event");
843 goto beach;
844 }
845 else if (ret != GST_FLOW_OK)
846 {
847 GST_ERROR_OBJECT(self, "dqevent error");
848 goto beach;
849 }
850 }
851 self->v4l2capture->need_wait_event = FALSE;
852
sheng.liu0c77f6c2022-06-17 21:33:20 +0800853 if (TRUE == self->v4l2output->is_svp)
854 {
855 GstPad *peer;
856 GstStructure *s;
857 GstEvent *event;
858
859 peer = gst_pad_get_peer (decoder->srcpad);
860 if (peer)
861 {
862 s = gst_structure_new_empty ("IS_SVP");
863 event = gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM, s);
864 gst_pad_send_event (peer, event);
865 GST_DEBUG_OBJECT(self, "Send SVP Event");
866 gst_object_unref (peer);
867 }
868 }
869
sheng.liub56bbc52022-06-21 11:02:33 +0800870 if (self->v4l2capture->need_drop_event)
871 {
872 // drop V4L2_EVENT_SOURCE_CHANGE
873 gst_v4l2_drop_event(self->v4l2capture);
874 self->v4l2capture->need_drop_event = FALSE;
875 }
876
xuesong.jiangae1548e2022-05-06 16:38:46 +0800877 if (!gst_aml_v4l2_object_acquire_format(self->v4l2capture, &info))
878 goto not_negotiated;
879
880 /* Create caps from the acquired format, remove the format field */
881 acquired_caps = gst_video_info_to_caps(&info);
882 GST_DEBUG_OBJECT(self, "Acquired caps: %" GST_PTR_FORMAT, acquired_caps);
883 st = gst_caps_get_structure(acquired_caps, 0);
xuesong.jiang282ca572023-05-05 09:03:32 +0000884 gst_structure_remove_fields(st, "format", "colorimetry", "chroma-site", NULL);
885
886 /* Probe currently available pixel formats */
887 available_caps = gst_caps_copy(self->probed_srccaps);
888 GST_DEBUG_OBJECT(self, "Available caps: %" GST_PTR_FORMAT, available_caps);
889
890 /* Replace coded size with visible size, we want to negotiate visible size
891 * with downstream, not coded size. */
892 gst_caps_map_in_place(available_caps, gst_aml_v4l2_video_remove_padding, self);
893
894 filter = gst_caps_intersect_full(available_caps, acquired_caps, GST_CAPS_INTERSECT_FIRST);
xuesong.jiangae1548e2022-05-06 16:38:46 +0800895 caps = gst_caps_copy(filter);
896 gst_caps_set_features_simple(caps, gst_caps_features_from_string(GST_CAPS_FEATURE_MEMORY_DMABUF));
897 gst_caps_append(filter, caps);
898
899 GST_DEBUG_OBJECT(self, "Filtered caps: %" GST_PTR_FORMAT, filter);
900 gst_caps_unref(acquired_caps);
xuesong.jiang282ca572023-05-05 09:03:32 +0000901 gst_caps_unref(available_caps);
xuesong.jiangae1548e2022-05-06 16:38:46 +0800902 caps = gst_pad_peer_query_caps(decoder->srcpad, filter);
903 gst_caps_unref(filter);
904
905 GST_DEBUG_OBJECT(self, "Possible decoded caps: %" GST_PTR_FORMAT, caps);
906 if (gst_caps_is_empty(caps))
907 {
908 gst_caps_unref(caps);
909 goto not_negotiated;
910 }
911
912 /* Fixate pixel format */
913 caps = gst_caps_fixate(caps);
914
915 GST_DEBUG_OBJECT(self, "Chosen decoded caps: %" GST_PTR_FORMAT, caps);
916
917 /* Try to set negotiated format, on success replace acquired format */
918 if (gst_aml_v4l2_object_set_format(self->v4l2capture, caps, &error))
919 gst_video_info_from_caps(&info, caps);
920 else
921 gst_aml_v4l2_clear_error(&error);
922 gst_caps_unref(caps);
923
924 output_state = gst_video_decoder_set_output_state(decoder,
925 info.finfo->format, info.width, info.height, self->input_state);
926
927 /* Copy the rest of the information, there might be more in the future */
928 output_state->info.interlace_mode = info.interlace_mode;
929 gst_video_codec_state_unref(output_state);
930
931 if (!gst_video_decoder_negotiate(decoder))
932 {
933 if (GST_PAD_IS_FLUSHING(decoder->srcpad))
934 goto flushing;
935 else
936 goto not_negotiated;
937 }
938
939 /* Ensure our internal pool is activated */
940 if (!gst_buffer_pool_set_active(GST_BUFFER_POOL(self->v4l2capture->pool),
941 TRUE))
942 goto activate_failed;
xuesong.jiangc5dac0f2023-02-01 14:42:24 +0800943
944 g_mutex_lock(&self->res_chg_lock);
945 GST_LOG_OBJECT(decoder, "signal resolution changed");
946 self->is_res_chg = FALSE;
947 g_cond_signal(&self->res_chg_cond);
948 g_mutex_unlock(&self->res_chg_lock);
xuesong.jiangae1548e2022-05-06 16:38:46 +0800949 }
950
951 GST_LOG_OBJECT(decoder, "Allocate output buffer");
952
953 v4l2_pool = GST_AML_V4L2_BUFFER_POOL(self->v4l2capture->pool);
954
955 self->output_flow = GST_FLOW_OK;
956 do
957 {
958 /* We cannot use the base class allotate helper since it taking the internal
959 * stream lock. we know that the acquire may need to poll until more frames
960 * comes in and holding this lock would prevent that.
961 */
962 pool = gst_video_decoder_get_buffer_pool(decoder);
963
964 /* Pool may be NULL if we started going to READY state */
965 if (pool == NULL)
966 {
fei.dengbee20862022-06-14 14:59:48 +0800967 GST_WARNING_OBJECT(decoder, "gst_video_decoder_get_buffer_pool goto beach");
xuesong.jiangae1548e2022-05-06 16:38:46 +0800968 ret = GST_FLOW_FLUSHING;
969 goto beach;
970 }
971
972 ret = gst_buffer_pool_acquire_buffer(pool, &buffer, NULL);
fei.dengccc89632022-07-15 19:10:17 +0800973 //calculate a new pts for interlace stream
974 if (ret == GST_FLOW_OK &&
975 self->v4l2capture->info.interlace_mode == GST_VIDEO_INTERLACE_MODE_INTERLEAVED)
976 {
977 //if buffer pts is valid, reduce 1/2 duration
978 if (GST_BUFFER_DURATION_IS_VALID(buffer))
979 {
980 GST_BUFFER_DURATION(buffer) = GST_BUFFER_DURATION(buffer)/2;
981 }
982 GST_BUFFER_FLAG_UNSET(buffer, GST_VIDEO_BUFFER_FLAG_INTERLACED);
983 //reset pts
fei.denga6ae3282022-07-15 19:50:30 +0800984 if (GST_BUFFER_TIMESTAMP (buffer) == 0LL || self->last_out_pts == GST_BUFFER_TIMESTAMP (buffer))
fei.dengccc89632022-07-15 19:10:17 +0800985 {
986 double rate = ((double)self->input_state->info.fps_n/(double)self->input_state->info.fps_d)*2;
987 GST_BUFFER_TIMESTAMP(buffer) = self->last_out_pts + 1000000000LL/rate;
988 }
989 }
990
xuesong.jiangae1548e2022-05-06 16:38:46 +0800991 g_object_unref(pool);
992
sheng.liu4e01a472022-06-21 15:38:25 +0800993 if (ret == GST_AML_V4L2_FLOW_LAST_BUFFER) {
sheng.liubcf036c2022-06-21 15:55:42 +0800994 GST_LOG_OBJECT(decoder, "Get GST_AML_V4L2_FLOW_LAST_BUFFER");
sheng.liub56bbc52022-06-21 11:02:33 +0800995 self->v4l2capture->need_drop_event = TRUE;
996 goto beach;
997 }
998
sheng.liu8d18ed22022-05-26 17:28:15 +0800999 if (ret == GST_AML_V4L2_FLOW_SOURCE_CHANGE)
1000 {
1001 GST_LOG_OBJECT(decoder, "Get GST_AML_V4L2_FLOW_SOURCE_CHANGE");
xuesong.jiangc5dac0f2023-02-01 14:42:24 +08001002
1003 g_mutex_lock (&self->res_chg_lock);
1004 self->is_res_chg = TRUE;
1005 g_mutex_unlock (&self->res_chg_lock);
1006
sheng.liu8d18ed22022-05-26 17:28:15 +08001007 gst_aml_v4l2_object_stop(self->v4l2capture);
1008 return;
1009 }
1010
fei.dengbee20862022-06-14 14:59:48 +08001011 if (ret != GST_FLOW_OK) {
1012 GST_WARNING_OBJECT(decoder, "gst_buffer_pool_acquire_buffer goto beach ret:%d",ret);
xuesong.jiangae1548e2022-05-06 16:38:46 +08001013 goto beach;
fei.dengbee20862022-06-14 14:59:48 +08001014 }
xuesong.jiangae1548e2022-05-06 16:38:46 +08001015
xuesong.jiangc5dac0f2023-02-01 14:42:24 +08001016 GST_LOG_OBJECT(decoder, "Process output buffer (switching flow outstanding num:%d)", self->v4l2capture->outstanding_buf_num);
xuesong.jiangae1548e2022-05-06 16:38:46 +08001017 ret = gst_aml_v4l2_buffer_pool_process(v4l2_pool, &buffer);
1018 if (ret == GST_AML_V4L2_FLOW_SOURCE_CHANGE)
1019 {
1020 gst_aml_v4l2_object_stop(self->v4l2capture);
1021 return;
1022 }
1023
1024 } while (ret == GST_AML_V4L2_FLOW_CORRUPTED_BUFFER);
1025
1026 if (ret != GST_FLOW_OK)
1027 goto beach;
1028
fei.dengbee20862022-06-14 14:59:48 +08001029 frame = gst_aml_v4l2_video_dec_get_right_frame(decoder, GST_BUFFER_TIMESTAMP (buffer));
xuesong.jiangae1548e2022-05-06 16:38:46 +08001030 if (frame)
1031 {
zengliang.li32cb11e2022-11-24 12:10:26 +08001032 if (!GST_CLOCK_TIME_IS_VALID(frame->pts))
1033 {
zengliang.li92ff6822023-06-06 07:12:52 +00001034 if (!GST_CLOCK_TIME_IS_VALID(self->last_out_pts))
1035 {
1036 if (GST_CLOCK_TIME_IS_VALID(frame->dts))
1037 {
1038 GST_BUFFER_TIMESTAMP(buffer) = frame->dts;
1039 }
1040 else
1041 {
1042 GST_WARNING_OBJECT (decoder,"sorry,we have no baseline to calculate pts");
1043 goto beach;
1044 }
1045 }
1046 else
1047 {
1048 double rate = ((double)self->input_state->info.fps_n/(double)self->input_state->info.fps_d);
1049 GST_BUFFER_TIMESTAMP(buffer) = self->last_out_pts + 1000000000LL/rate;
1050 }
zengliang.li32cb11e2022-11-24 12:10:26 +08001051 }
fei.dengccc89632022-07-15 19:10:17 +08001052 self->last_out_pts = GST_BUFFER_TIMESTAMP(buffer);
xuesong.jiangae1548e2022-05-06 16:38:46 +08001053 frame->output_buffer = buffer;
fei.dengccc89632022-07-15 19:10:17 +08001054 frame->pts = GST_BUFFER_TIMESTAMP(buffer);
1055 frame->duration = GST_BUFFER_DURATION(buffer);
xuesong.jiangae1548e2022-05-06 16:38:46 +08001056 buffer = NULL;
1057 ret = gst_video_decoder_finish_frame(decoder, frame);
1058
1059 if (ret != GST_FLOW_OK)
1060 goto beach;
1061 }
1062 else
1063 {
1064 GST_WARNING_OBJECT(decoder, "Decoder is producing too many buffers");
1065 gst_buffer_unref(buffer);
1066 }
1067
1068 return;
1069 /* ERRORS */
1070not_negotiated:
1071{
1072 GST_ERROR_OBJECT(self, "not negotiated");
1073 ret = GST_FLOW_NOT_NEGOTIATED;
1074 goto beach;
1075}
1076activate_failed:
1077{
1078 GST_ERROR_OBJECT(self, "Buffer pool activation failed");
1079 GST_ELEMENT_ERROR(self, RESOURCE, SETTINGS,
1080 (_("Failed to allocate required memory.")),
1081 ("Buffer pool activation failed"));
1082 ret = GST_FLOW_ERROR;
1083 goto beach;
1084}
1085flushing:
1086{
1087 ret = GST_FLOW_FLUSHING;
1088 goto beach;
1089}
1090beach:
1091 GST_DEBUG_OBJECT(decoder, "Leaving output thread: %s",
1092 gst_flow_get_name(ret));
1093
1094 gst_buffer_replace(&buffer, NULL);
1095 self->output_flow = ret;
1096 gst_aml_v4l2_object_unlock(self->v4l2output);
1097 gst_pad_pause_task(decoder->srcpad);
1098}
1099
1100static GstFlowReturn
1101gst_aml_v4l2_video_dec_handle_frame(GstVideoDecoder *decoder,
1102 GstVideoCodecFrame *frame)
1103{
1104 GstAmlV4l2Error error = GST_AML_V4L2_ERROR_INIT;
1105 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
1106 GstBufferPool *pool = GST_BUFFER_POOL(self->v4l2output->pool);
1107 GstFlowReturn ret = GST_FLOW_OK;
1108 gboolean processed = FALSE;
1109 GstBuffer *tmp;
1110 GstTaskState task_state;
1111 GstCaps *caps;
1112
1113 GST_DEBUG_OBJECT(self, "Handling frame %d", frame->system_frame_number);
1114
1115 if (G_UNLIKELY(!g_atomic_int_get(&self->active)))
1116 goto flushing;
1117
1118 if (G_UNLIKELY(!GST_AML_V4L2_IS_ACTIVE(self->v4l2output)))
1119 {
1120 if (!self->input_state)
1121 goto not_negotiated;
1122 if (!gst_aml_v4l2_object_set_format(self->v4l2output, self->input_state->caps,
1123 &error))
1124 goto not_negotiated;
1125 }
1126
1127 if (G_UNLIKELY(!GST_AML_V4L2_IS_ACTIVE(self->v4l2capture)))
1128 {
1129 GstBuffer *codec_data;
sheng.liu0c77f6c2022-06-17 21:33:20 +08001130 GstCapsFeatures *features = NULL;
1131
1132 features = gst_caps_get_features(self->input_state->caps, 0);
1133 if (features && gst_caps_features_contains(features, GST_CAPS_FEATURE_MEMORY_DMABUF))
1134 {
1135 GST_DEBUG_OBJECT(self, "Is SVP");
1136 self->v4l2output->is_svp = TRUE;
1137 }
xuesong.jiangae1548e2022-05-06 16:38:46 +08001138
1139 GST_DEBUG_OBJECT(self, "Sending header");
1140
1141 codec_data = self->input_state->codec_data;
1142
1143 /* We are running in byte-stream mode, so we don't know the headers, but
1144 * we need to send something, otherwise the decoder will refuse to
1145 * intialize.
1146 */
1147 if (codec_data)
1148 {
1149 gst_buffer_ref(codec_data);
1150 }
1151 else
1152 {
1153 codec_data = gst_buffer_ref(frame->input_buffer);
1154 processed = TRUE;
1155 }
1156
1157 /* Ensure input internal pool is active */
1158 if (!gst_buffer_pool_is_active(pool))
1159 {
1160 GstStructure *config = gst_buffer_pool_get_config(pool);
xuesong.jiangc5dac0f2023-02-01 14:42:24 +08001161 // guint min = MAX(self->v4l2output->min_buffers, GST_AML_V4L2_MIN_BUFFERS);
1162 // guint max = VIDEO_MAX_FRAME;
xuesong.jiangae1548e2022-05-06 16:38:46 +08001163 // gst_buffer_pool_config_set_params (config, self->input_state->caps,
1164 // self->v4l2output->info.size, min, max);
1165 gst_buffer_pool_config_set_params(config, self->input_state->caps, self->v4l2output->info.size, self->v4l2output->min_buffers, self->v4l2output->min_buffers);
1166
1167 /* There is no reason to refuse this config */
1168 if (!gst_buffer_pool_set_config(pool, config))
1169 goto activate_failed;
xuesong.jiangc5dac0f2023-02-01 14:42:24 +08001170 GST_DEBUG_OBJECT(self, "setting output pool config to %" GST_PTR_FORMAT, config);
xuesong.jiangae1548e2022-05-06 16:38:46 +08001171
1172 if (!gst_buffer_pool_set_active(pool, TRUE))
1173 goto activate_failed;
1174 }
1175
1176 GST_VIDEO_DECODER_STREAM_UNLOCK(decoder);
1177 ret =
1178 gst_aml_v4l2_buffer_pool_process(GST_AML_V4L2_BUFFER_POOL(self->v4l2output->pool), &codec_data);
1179 GST_VIDEO_DECODER_STREAM_LOCK(decoder);
1180
1181 gst_buffer_unref(codec_data);
1182
1183 /* For decoders G_FMT returns coded size, G_SELECTION returns visible size
1184 * in the compose rectangle. gst_aml_v4l2_object_acquire_format() checks both
1185 * and returns the visible size as with/height and the coded size as
1186 * padding. */
1187 }
1188
1189 task_state = gst_pad_get_task_state(GST_VIDEO_DECODER_SRC_PAD(self));
1190 if (task_state == GST_TASK_STOPPED || task_state == GST_TASK_PAUSED)
1191 {
1192 /* It's possible that the processing thread stopped due to an error */
1193 if (self->output_flow != GST_FLOW_OK &&
1194 self->output_flow != GST_FLOW_FLUSHING)
1195 {
1196 GST_DEBUG_OBJECT(self, "Processing loop stopped with error, leaving");
1197 ret = self->output_flow;
1198 goto drop;
1199 }
1200
1201 GST_DEBUG_OBJECT(self, "Starting decoding thread");
1202
1203 /* Start the processing task, when it quits, the task will disable input
1204 * processing to unlock input if draining, or prevent potential block */
1205 self->output_flow = GST_FLOW_FLUSHING;
1206 if (!gst_pad_start_task(decoder->srcpad,
1207 (GstTaskFunction)gst_aml_v4l2_video_dec_loop, self, NULL))
1208 goto start_task_failed;
1209 }
1210
1211 if (!processed)
1212 {
1213 GST_VIDEO_DECODER_STREAM_UNLOCK(decoder);
1214 ret =
1215 gst_aml_v4l2_buffer_pool_process(GST_AML_V4L2_BUFFER_POOL(self->v4l2output->pool), &frame->input_buffer);
1216 GST_VIDEO_DECODER_STREAM_LOCK(decoder);
1217
1218 if (ret == GST_FLOW_FLUSHING)
1219 {
1220 if (gst_pad_get_task_state(GST_VIDEO_DECODER_SRC_PAD(self)) !=
1221 GST_TASK_STARTED)
1222 ret = self->output_flow;
1223 goto drop;
1224 }
1225 else if (ret != GST_FLOW_OK)
1226 {
1227 goto process_failed;
1228 }
1229 }
1230
1231 /* No need to keep input arround */
1232 tmp = frame->input_buffer;
1233 frame->input_buffer = gst_buffer_new();
1234 gst_buffer_copy_into(frame->input_buffer, tmp,
1235 GST_BUFFER_COPY_FLAGS | GST_BUFFER_COPY_TIMESTAMPS |
1236 GST_BUFFER_COPY_META,
1237 0, 0);
1238 gst_buffer_unref(tmp);
1239
1240 gst_video_codec_frame_unref(frame);
1241 return ret;
1242
1243 /* ERRORS */
1244not_negotiated:
1245{
1246 GST_ERROR_OBJECT(self, "not negotiated");
1247 ret = GST_FLOW_NOT_NEGOTIATED;
1248 gst_aml_v4l2_error(self, &error);
1249 goto drop;
1250}
1251activate_failed:
1252{
1253 GST_ELEMENT_ERROR(self, RESOURCE, SETTINGS,
1254 (_("Failed to allocate required memory.")),
1255 ("Buffer pool activation failed"));
1256 ret = GST_FLOW_ERROR;
1257 goto drop;
1258}
1259flushing:
1260{
1261 ret = GST_FLOW_FLUSHING;
1262 goto drop;
1263}
1264
1265start_task_failed:
1266{
1267 GST_ELEMENT_ERROR(self, RESOURCE, FAILED,
1268 (_("Failed to start decoding thread.")), (NULL));
1269 ret = GST_FLOW_ERROR;
1270 goto drop;
1271}
1272process_failed:
1273{
1274 GST_ELEMENT_ERROR(self, RESOURCE, FAILED,
1275 (_("Failed to process frame.")),
1276 ("Maybe be due to not enough memory or failing driver"));
1277 ret = GST_FLOW_ERROR;
1278 goto drop;
1279}
1280drop:
1281{
1282 gst_video_decoder_drop_frame(decoder, frame);
1283 return ret;
1284}
1285}
1286
1287static gboolean
1288gst_aml_v4l2_video_dec_decide_allocation(GstVideoDecoder *decoder,
1289 GstQuery *query)
1290{
1291 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
1292 GstClockTime latency;
1293 gboolean ret = FALSE;
1294
1295 if (gst_aml_v4l2_object_decide_allocation(self->v4l2capture, query))
1296 ret = GST_VIDEO_DECODER_CLASS(parent_class)->decide_allocation(decoder, query);
1297
1298 if (GST_CLOCK_TIME_IS_VALID(self->v4l2capture->duration))
1299 {
1300 latency = self->v4l2capture->min_buffers * self->v4l2capture->duration;
1301 GST_DEBUG_OBJECT(self, "Setting latency: %" GST_TIME_FORMAT " (%" G_GUINT32_FORMAT " * %" G_GUINT64_FORMAT, GST_TIME_ARGS(latency),
1302 self->v4l2capture->min_buffers, self->v4l2capture->duration);
1303 gst_video_decoder_set_latency(decoder, latency, latency);
1304 }
1305 else
1306 {
1307 GST_WARNING_OBJECT(self, "Duration invalid, not setting latency");
1308 }
1309
1310 return ret;
1311}
1312
1313static gboolean
1314gst_aml_v4l2_video_dec_src_query(GstVideoDecoder *decoder, GstQuery *query)
1315{
1316 gboolean ret = TRUE;
1317 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
1318
1319 switch (GST_QUERY_TYPE(query))
1320 {
1321 case GST_QUERY_CAPS:
1322 {
1323 GstCaps *filter, *result = NULL;
1324 GstPad *pad = GST_VIDEO_DECODER_SRC_PAD(decoder);
1325
1326 gst_query_parse_caps(query, &filter);
1327
1328 if (self->probed_srccaps)
1329 result = gst_caps_ref(self->probed_srccaps);
1330 else
1331 result = gst_pad_get_pad_template_caps(pad);
1332
1333 if (filter)
1334 {
1335 GstCaps *tmp = result;
1336 result =
1337 gst_caps_intersect_full(filter, tmp, GST_CAPS_INTERSECT_FIRST);
1338 gst_caps_unref(tmp);
1339 }
1340
1341 GST_DEBUG_OBJECT(self, "Returning src caps %" GST_PTR_FORMAT, result);
1342
1343 gst_query_set_caps_result(query, result);
1344 gst_caps_unref(result);
1345 break;
1346 }
1347
1348 default:
1349 ret = GST_VIDEO_DECODER_CLASS(parent_class)->src_query(decoder, query);
1350 break;
1351 }
1352
1353 return ret;
1354}
1355
1356static GstCaps *
1357gst_aml_v4l2_video_dec_sink_getcaps(GstVideoDecoder *decoder, GstCaps *filter)
1358{
1359 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
1360 GstCaps *result;
1361
1362 result = gst_video_decoder_proxy_getcaps(decoder, self->probed_sinkcaps,
1363 filter);
1364
1365 GST_DEBUG_OBJECT(self, "Returning sink caps %" GST_PTR_FORMAT, result);
1366
1367 return result;
1368}
1369
1370static gboolean
1371gst_aml_v4l2_video_dec_sink_event(GstVideoDecoder *decoder, GstEvent *event)
1372{
1373 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
1374 gboolean ret;
1375 GstEventType type = GST_EVENT_TYPE(event);
1376
1377 switch (type)
1378 {
1379 case GST_EVENT_FLUSH_START:
1380 GST_DEBUG_OBJECT(self, "flush start");
xuesong.jiangc5dac0f2023-02-01 14:42:24 +08001381
1382 g_mutex_lock (&self->res_chg_lock);
1383 while (self->is_res_chg)
1384 {
1385 GST_LOG_OBJECT(decoder, "wait resolution change finish");
1386 g_cond_wait(&self->res_chg_cond, &self->res_chg_lock);
1387 }
1388 g_mutex_unlock (&self->res_chg_lock);
1389
zengliang.li92ff6822023-06-06 07:12:52 +00001390 self->last_out_pts = GST_CLOCK_TIME_NONE;
xuesong.jiangae1548e2022-05-06 16:38:46 +08001391 gst_aml_v4l2_object_unlock(self->v4l2output);
1392 gst_aml_v4l2_object_unlock(self->v4l2capture);
1393 break;
1394 default:
1395 break;
1396 }
1397
1398 ret = GST_VIDEO_DECODER_CLASS(parent_class)->sink_event(decoder, event);
1399
1400 switch (type)
1401 {
1402 case GST_EVENT_FLUSH_START:
1403 /* The processing thread should stop now, wait for it */
1404 gst_pad_stop_task(decoder->srcpad);
1405 GST_DEBUG_OBJECT(self, "flush start done");
1406 break;
1407 default:
1408 break;
1409 }
1410
1411 return ret;
1412}
1413
1414static GstStateChangeReturn
1415gst_aml_v4l2_video_dec_change_state(GstElement *element,
1416 GstStateChange transition)
1417{
1418 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(element);
1419 GstVideoDecoder *decoder = GST_VIDEO_DECODER(element);
1420
1421 if (transition == GST_STATE_CHANGE_PAUSED_TO_READY)
1422 {
1423 g_atomic_int_set(&self->active, FALSE);
1424 gst_aml_v4l2_object_unlock(self->v4l2output);
1425 gst_aml_v4l2_object_unlock(self->v4l2capture);
1426 gst_pad_stop_task(decoder->srcpad);
1427 }
1428
1429 return GST_ELEMENT_CLASS(parent_class)->change_state(element, transition);
1430}
1431
1432static void
1433gst_aml_v4l2_video_dec_dispose(GObject *object)
1434{
1435 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(object);
1436
1437 gst_caps_replace(&self->probed_sinkcaps, NULL);
1438 gst_caps_replace(&self->probed_srccaps, NULL);
1439
1440 G_OBJECT_CLASS(parent_class)->dispose(object);
1441}
1442
1443static void
1444gst_aml_v4l2_video_dec_finalize(GObject *object)
1445{
1446 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(object);
1447
1448 gst_aml_v4l2_object_destroy(self->v4l2capture);
1449 gst_aml_v4l2_object_destroy(self->v4l2output);
1450
xuesong.jiangc5dac0f2023-02-01 14:42:24 +08001451 g_mutex_clear(&self->res_chg_lock);
1452 g_cond_clear(&self->res_chg_cond);
1453
xuesong.jiang61ea8012022-05-12 15:38:17 +08001454#if GST_IMPORT_LGE_PROP
1455 if (self->lge_ctxt)
1456 {
1457 if (self->lge_ctxt->app_type)
1458 g_free(self->lge_ctxt->app_type);
1459 if (self->lge_ctxt->res_info.coretype)
1460 g_free(self->lge_ctxt->res_info.coretype);
1461 free(self->lge_ctxt);
1462 }
1463
1464#endif
1465
xuesong.jiangae1548e2022-05-06 16:38:46 +08001466 G_OBJECT_CLASS(parent_class)->finalize(object);
1467}
1468
1469static void
1470gst_aml_v4l2_video_dec_init(GstAmlV4l2VideoDec *self)
1471{
1472 /* V4L2 object are created in subinstance_init */
zengliang.li92ff6822023-06-06 07:12:52 +00001473 self->last_out_pts = GST_CLOCK_TIME_NONE;
xuesong.jiangae1548e2022-05-06 16:38:46 +08001474 self->is_secure_path = FALSE;
xuesong.jiangc5dac0f2023-02-01 14:42:24 +08001475 self->is_res_chg = FALSE;
1476 g_mutex_init(&self->res_chg_lock);
1477 g_cond_init(&self->res_chg_cond);
xuesong.jiang61ea8012022-05-12 15:38:17 +08001478#if GST_IMPORT_LGE_PROP
1479 self->lge_ctxt = malloc(sizeof(GstAmlV4l2VideoDecLgeCtxt));
1480 memset(self->lge_ctxt, 0, sizeof(GstAmlV4l2VideoDecLgeCtxt));
1481#endif
xuesong.jiangae1548e2022-05-06 16:38:46 +08001482}
1483
1484static void
1485gst_aml_v4l2_video_dec_subinstance_init(GTypeInstance *instance, gpointer g_class)
1486{
1487 GstAmlV4l2VideoDecClass *klass = GST_AML_V4L2_VIDEO_DEC_CLASS(g_class);
1488 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(instance);
1489 GstVideoDecoder *decoder = GST_VIDEO_DECODER(instance);
1490
1491 gst_video_decoder_set_packetized(decoder, TRUE);
1492
1493 self->v4l2output = gst_aml_v4l2_object_new(GST_ELEMENT(self),
1494 GST_OBJECT(GST_VIDEO_DECODER_SINK_PAD(self)),
1495 V4L2_BUF_TYPE_VIDEO_OUTPUT, klass->default_device,
1496 gst_aml_v4l2_get_output, gst_aml_v4l2_set_output, NULL);
1497 self->v4l2output->no_initial_format = TRUE;
1498 self->v4l2output->keep_aspect = FALSE;
sheng.liu0c77f6c2022-06-17 21:33:20 +08001499 self->v4l2output->is_svp = FALSE;
xuesong.jiangae1548e2022-05-06 16:38:46 +08001500
1501 self->v4l2capture = gst_aml_v4l2_object_new(GST_ELEMENT(self),
1502 GST_OBJECT(GST_VIDEO_DECODER_SRC_PAD(self)),
1503 V4L2_BUF_TYPE_VIDEO_CAPTURE, klass->default_device,
1504 gst_aml_v4l2_get_input, gst_aml_v4l2_set_input, NULL);
1505 self->v4l2capture->need_wait_event = TRUE;
sheng.liub56bbc52022-06-21 11:02:33 +08001506 self->v4l2capture->need_drop_event = FALSE;
xuesong.jiangae1548e2022-05-06 16:38:46 +08001507}
1508
1509static void
1510gst_aml_v4l2_video_dec_class_init(GstAmlV4l2VideoDecClass *klass)
1511{
1512 GstElementClass *element_class;
1513 GObjectClass *gobject_class;
1514 GstVideoDecoderClass *video_decoder_class;
1515
1516 parent_class = g_type_class_peek_parent(klass);
1517
1518 element_class = (GstElementClass *)klass;
1519 gobject_class = (GObjectClass *)klass;
1520 video_decoder_class = (GstVideoDecoderClass *)klass;
1521
1522 GST_DEBUG_CATEGORY_INIT(gst_aml_v4l2_video_dec_debug, "amlv4l2videodec", 0,
1523 "AML V4L2 Video Decoder");
1524
1525 gobject_class->dispose = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_dispose);
1526 gobject_class->finalize = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_finalize);
1527 gobject_class->set_property =
1528 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_set_property);
1529 gobject_class->get_property =
1530 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_get_property);
1531
1532 video_decoder_class->open = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_open);
1533 video_decoder_class->close = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_close);
1534 video_decoder_class->start = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_start);
1535 video_decoder_class->stop = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_stop);
1536 video_decoder_class->finish = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_finish);
1537 video_decoder_class->flush = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_flush);
1538 video_decoder_class->drain = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_drain);
1539 video_decoder_class->set_format =
1540 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_set_format);
1541 video_decoder_class->negotiate =
1542 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_negotiate);
1543 video_decoder_class->decide_allocation =
1544 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_decide_allocation);
1545 /* FIXME propose_allocation or not ? */
1546 video_decoder_class->handle_frame =
1547 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_handle_frame);
1548 video_decoder_class->getcaps =
1549 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_sink_getcaps);
1550 video_decoder_class->src_query =
1551 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_src_query);
1552 video_decoder_class->sink_event =
1553 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_sink_event);
1554
1555 element_class->change_state =
1556 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_change_state);
1557
1558 gst_aml_v4l2_object_install_m2m_properties_helper(gobject_class);
xuesong.jiang61ea8012022-05-12 15:38:17 +08001559#if GST_IMPORT_LGE_PROP
1560 gst_aml_v4l2_video_dec_install_lge_properties_helper(gobject_class);
1561#endif
xuesong.jiangae1548e2022-05-06 16:38:46 +08001562}
1563
1564static void
1565gst_aml_v4l2_video_dec_subclass_init(gpointer g_class, gpointer data)
1566{
1567 GstAmlV4l2VideoDecClass *klass = GST_AML_V4L2_VIDEO_DEC_CLASS(g_class);
1568 GstElementClass *element_class = GST_ELEMENT_CLASS(g_class);
1569 GstAmlV4l2VideoDecCData *cdata = data;
1570
1571 klass->default_device = cdata->device;
1572
1573 /* Note: gst_pad_template_new() take the floating ref from the caps */
1574 gst_element_class_add_pad_template(element_class,
1575 gst_pad_template_new("sink", GST_PAD_SINK, GST_PAD_ALWAYS,
1576 cdata->sink_caps));
1577 gst_element_class_add_pad_template(element_class,
1578 gst_pad_template_new("src", GST_PAD_SRC, GST_PAD_ALWAYS,
1579 cdata->src_caps));
1580
1581 gst_element_class_set_metadata(element_class, cdata->longname,
1582 "Codec/Decoder/Video/Hardware", cdata->description,
1583 "Xuesong Jiang <Xuesong.Jiang@amlogic.com>");
1584
1585 gst_caps_unref(cdata->sink_caps);
1586 gst_caps_unref(cdata->src_caps);
1587 g_free(cdata);
1588}
1589
1590/* Probing functions */
1591gboolean
1592gst_aml_v4l2_is_video_dec(GstCaps *sink_caps, GstCaps *src_caps)
1593{
1594 gboolean ret = FALSE;
1595
1596 if (gst_caps_is_subset(sink_caps, gst_aml_v4l2_object_get_codec_caps()) && gst_caps_is_subset(src_caps, gst_aml_v4l2_object_get_raw_caps()))
1597 ret = TRUE;
1598
1599 return ret;
1600}
1601
1602static gchar *
1603gst_aml_v4l2_video_dec_set_metadata(GstStructure *s, GstAmlV4l2VideoDecCData *cdata,
1604 const gchar *basename)
1605{
1606 gchar *codec_name = NULL;
1607 gchar *type_name = NULL;
1608 gboolean got_value = FALSE;
1609
1610#define SET_META(codec) \
1611 G_STMT_START \
1612 { \
1613 cdata->longname = "AML V4L2 " codec " Decoder"; \
1614 cdata->description = "Decodes " codec " streams via V4L2 API"; \
1615 codec_name = g_ascii_strdown(codec, -1); \
1616 } \
1617 G_STMT_END
1618
1619 if (gst_structure_has_name(s, "image/jpeg"))
1620 {
1621 SET_META("JPEG");
1622 }
1623 else if (gst_structure_has_name(s, "video/mpeg"))
1624 {
1625 gint mpegversion = 0;
1626 gint *list = NULL;
1627 got_value = gst_structure_get_int(s, "mpegversion", &mpegversion);
1628 if (FALSE == got_value)
1629 {
1630 got_value = gst_structure_get_list(s, "mpegversion", &list);
1631 if (TRUE == got_value && (1 == *list || 2 == *list))
1632 {
1633 SET_META("MPEG2");
1634 }
1635 else
1636 {
1637 SET_META("MPEG4");
1638 }
1639 }
1640 else
1641 {
1642 SET_META("MPEG4");
1643 }
1644 }
1645 else if (gst_structure_has_name(s, "video/x-h263"))
1646 {
1647 SET_META("H263");
1648 }
1649 else if (gst_structure_has_name(s, "video/x-fwht"))
1650 {
1651 SET_META("FWHT");
1652 }
1653 else if (gst_structure_has_name(s, "video/x-h264"))
1654 {
1655 SET_META("H264");
1656 }
1657 else if (gst_structure_has_name(s, "video/x-h265"))
1658 {
1659 SET_META("H265");
1660 }
1661 else if (gst_structure_has_name(s, "video/x-wmv"))
1662 {
1663 SET_META("VC1");
1664 }
1665 else if (gst_structure_has_name(s, "video/x-vp8"))
1666 {
1667 SET_META("VP8");
1668 }
1669 else if (gst_structure_has_name(s, "video/x-vp9"))
1670 {
1671 SET_META("VP9");
1672 }
1673 else if (gst_structure_has_name(s, "video/x-av1"))
1674 {
1675 SET_META("AV1");
1676 }
1677 else if (gst_structure_has_name(s, "video/x-bayer"))
1678 {
1679 SET_META("BAYER");
1680 }
1681 else if (gst_structure_has_name(s, "video/x-sonix"))
1682 {
1683 SET_META("SONIX");
1684 }
1685 else if (gst_structure_has_name(s, "video/x-pwc1"))
1686 {
1687 SET_META("PWC1");
1688 }
1689 else if (gst_structure_has_name(s, "video/x-pwc2"))
1690 {
1691 SET_META("PWC2");
1692 }
1693 else
1694 {
1695 /* This code should be kept on sync with the exposed CODEC type of format
1696 * from gstamlv4l2object.c. This warning will only occure in case we forget
1697 * to also add a format here. */
1698 gchar *s_str = gst_structure_to_string(s);
1699 g_warning("Missing fixed name mapping for caps '%s', this is a GStreamer "
1700 "bug, please report at https://bugs.gnome.org",
1701 s_str);
1702 g_free(s_str);
1703 }
1704
1705 if (codec_name)
1706 {
1707 type_name = g_strdup_printf("amlv4l2%sdec", codec_name);
1708 if (g_type_from_name(type_name) != 0)
1709 {
1710 g_free(type_name);
1711 type_name = g_strdup_printf("amlv4l2%s%sdec", basename, codec_name);
1712 }
1713
1714 g_free(codec_name);
1715 }
1716
1717 return type_name;
1718#undef SET_META
1719}
1720
1721void gst_aml_v4l2_video_dec_register(GstPlugin *plugin, const gchar *basename,
1722 const gchar *device_path, GstCaps *sink_caps, GstCaps *src_caps)
1723{
1724 gint i;
1725
1726 for (i = 0; i < gst_caps_get_size(sink_caps); i++)
1727 {
1728 GstAmlV4l2VideoDecCData *cdata;
1729 GstStructure *s;
1730 GTypeQuery type_query;
1731 GTypeInfo type_info = {
1732 0,
1733 };
1734 GType type, subtype;
1735 gchar *type_name;
1736
1737 s = gst_caps_get_structure(sink_caps, i);
1738
1739 cdata = g_new0(GstAmlV4l2VideoDecCData, 1);
1740 cdata->device = g_strdup(device_path);
1741 cdata->sink_caps = gst_caps_new_empty();
1742 gst_caps_append_structure(cdata->sink_caps, gst_structure_copy(s));
1743 gst_caps_append_structure(cdata->sink_caps, gst_structure_copy(s));
1744 gst_caps_set_features(cdata->sink_caps, 0, gst_caps_features_from_string(GST_CAPS_FEATURE_MEMORY_DMABUF));
1745 cdata->src_caps = gst_caps_copy(src_caps);
1746 gst_caps_set_features_simple(cdata->src_caps, gst_caps_features_from_string(GST_CAPS_FEATURE_MEMORY_DMABUF));
1747 gst_caps_append(cdata->src_caps, gst_caps_copy(src_caps));
1748 type_name = gst_aml_v4l2_video_dec_set_metadata(s, cdata, basename);
1749
1750 /* Skip over if we hit an unmapped type */
1751 if (!type_name)
1752 {
1753 g_free(cdata);
1754 continue;
1755 }
1756
1757 type = gst_aml_v4l2_video_dec_get_type();
1758 g_type_query(type, &type_query);
1759 memset(&type_info, 0, sizeof(type_info));
1760 type_info.class_size = type_query.class_size;
1761 type_info.instance_size = type_query.instance_size;
1762 type_info.class_init = gst_aml_v4l2_video_dec_subclass_init;
1763 type_info.class_data = cdata;
1764 type_info.instance_init = gst_aml_v4l2_video_dec_subinstance_init;
1765
1766 subtype = g_type_register_static(type, type_name, &type_info, 0);
1767 if (!gst_element_register(plugin, type_name, GST_RANK_PRIMARY + 1,
1768 subtype))
1769 GST_WARNING("Failed to register plugin '%s'", type_name);
1770
1771 g_free(type_name);
1772 }
1773}
xuesong.jiang61ea8012022-05-12 15:38:17 +08001774
1775#if GST_IMPORT_LGE_PROP
1776static void gst_aml_v4l2_video_dec_install_lge_properties_helper(GObjectClass *gobject_class)
1777{
1778 g_object_class_install_property(gobject_class, LGE_RESOURCE_INFO,
1779 g_param_spec_object("resource-info", "resource-info",
1780 "After acquisition of H/W resources is completed, allocated resource information must be delivered to the decoder and the sink",
1781 GST_TYPE_STRUCTURE,
1782 G_PARAM_READABLE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
1783
1784 g_object_class_install_property(gobject_class, LGE_DECODE_SIZE,
1785 g_param_spec_uint64("decoded-size", "decoded-size",
1786 "The total amount of decoder element's decoded video es after constructing pipeline or flushing pipeline update unit is byte.",
1787 0, G_MAXUINT64,
1788 0, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
1789
1790 g_object_class_install_property(gobject_class, LGE_UNDECODE_SIZE,
1791 g_param_spec_uint64("undecoded-size", "undecoded-size",
1792 "video decoder element's total undecoded data update unit is byte.",
1793 0, G_MAXUINT64,
1794 0, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
1795
1796 g_object_class_install_property(gobject_class, LGE_APP_TYPE,
1797 g_param_spec_string("app-type", "app-type",
1798 "set application type.",
1799 "default_app",
1800 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
1801
1802 g_object_class_install_property(gobject_class, LGE_CLIP_MODE,
1803 g_param_spec_boolean("clip-mode", "clip-mode",
1804 "When seeking, Content is moving faster for a while to skip frames.",
1805 FALSE,
1806 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
1807}
1808#endif