blob: 1df101c23ee1bcc6e2835b31ab8e68f9879a19bf [file] [log] [blame]
xuesong.jiangae1548e2022-05-06 16:38:46 +08001/* GStreamer
2 * Copyright (C) 2022 <xuesong.jiang@amlogic.com>
3 *
4 * This library is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Library General Public
6 * License as published by the Free Software Foundation; either
7 * version 2 of the License, or (at your option) any later version.
8 *
9 * This library is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Library General Public License for more details.
13 *
14 * You should have received a copy of the GNU Library General Public
15 * License along with this library; if not, write to the
16 * Free Software Foundation, Inc., 51 Franklin Street, Suite 500,
17 * Boston, MA 02110-1335, USA.
18 */
19
20#ifdef HAVE_CONFIG_H
21#include "config.h"
22#endif
23
24#include <sys/stat.h>
25#include <fcntl.h>
26#include <errno.h>
27#include <unistd.h>
28#include <string.h>
29
30#include "gstamlv4l2object.h"
31#include "gstamlv4l2videodec.h"
32
33#include <string.h>
34#include <gst/gst-i18n-plugin.h>
35#include <gst/allocators/gstdmabuf.h>
36
37GST_DEBUG_CATEGORY_STATIC(gst_aml_v4l2_video_dec_debug);
38#define GST_CAT_DEFAULT gst_aml_v4l2_video_dec_debug
39
40#ifdef GST_VIDEO_DECODER_STREAM_LOCK
41#undef GST_VIDEO_DECODER_STREAM_LOCK
42#define GST_VIDEO_DECODER_STREAM_LOCK(decoder) \
43 { \
fei.denge9458472023-04-18 02:05:48 +000044 GST_TRACE("aml v4l2 dec locking"); \
xuesong.jiangae1548e2022-05-06 16:38:46 +080045 g_rec_mutex_lock(&GST_VIDEO_DECODER(decoder)->stream_lock); \
fei.denge9458472023-04-18 02:05:48 +000046 GST_TRACE("aml v4l2 dec locked"); \
xuesong.jiangae1548e2022-05-06 16:38:46 +080047 }
48#endif
49
50#ifdef GST_VIDEO_DECODER_STREAM_UNLOCK
51#undef GST_VIDEO_DECODER_STREAM_UNLOCK
52#define GST_VIDEO_DECODER_STREAM_UNLOCK(decoder) \
53 { \
fei.denge9458472023-04-18 02:05:48 +000054 GST_TRACE("aml v4l2 dec unlocking"); \
xuesong.jiangae1548e2022-05-06 16:38:46 +080055 g_rec_mutex_unlock(&GST_VIDEO_DECODER(decoder)->stream_lock); \
fei.denge9458472023-04-18 02:05:48 +000056 GST_TRACE("aml v4l2 dec unlocked"); \
xuesong.jiangae1548e2022-05-06 16:38:46 +080057 }
58#endif
xuesong.jiang61ea8012022-05-12 15:38:17 +080059
zengliang.lidcd41462024-06-19 16:05:12 +080060#define GST_AML_V4L2_CC_IMPORT_QUARK gst_aml_v4l2_buffer_pool_cc_import_quark ()
61
hanghang.luo36df2852022-08-24 15:02:27 +080062#ifndef ABSDIFF
63#define ABSDIFF(a,b) (((a) > (b)) ? ((a) - (b)) : ((b) - (a)))
64#endif
65
xuesong.jiang61ea8012022-05-12 15:38:17 +080066#if GST_IMPORT_LGE_PROP
67typedef struct _GstAmlResourceInfo
68{
69 gchar *coretype;
70 gint videoport;
71 gint audioport;
72 gint maxwidth;
73 gint maxheight;
74 gint mixerport;
75} GstAmlResourceInfo;
76
77struct _GstAmlV4l2VideoDecLgeCtxt
78{
79 GstAmlResourceInfo res_info;
80 guint64 dec_size;
81 guint64 undec_size;
82 gchar *app_type;
83 gboolean clip_mode;
84};
85#endif
86
xuesong.jiangae1548e2022-05-06 16:38:46 +080087typedef struct
88{
89 gchar *device;
90 GstCaps *sink_caps;
91 GstCaps *src_caps;
92 const gchar *longname;
93 const gchar *description;
94} GstAmlV4l2VideoDecCData;
95
96enum
97{
98 PROP_0,
xuesong.jiang61ea8012022-05-12 15:38:17 +080099 V4L2_STD_OBJECT_PROPS,
100#if GST_IMPORT_LGE_PROP
101 LGE_RESOURCE_INFO,
102 LGE_DECODE_SIZE,
103 LGE_UNDECODE_SIZE,
104 LGE_APP_TYPE,
105 LGE_CLIP_MODE
106#endif
xuesong.jiangae1548e2022-05-06 16:38:46 +0800107};
108
xuesong.jiang406ee302023-06-28 03:45:22 +0000109enum
110{
111 SIGNAL_DECODED_PTS,
112 MAX_SIGNAL
113};
114
115static guint g_signals[MAX_SIGNAL]= {0};
116
xuesong.jiangae1548e2022-05-06 16:38:46 +0800117#define gst_aml_v4l2_video_dec_parent_class parent_class
118G_DEFINE_ABSTRACT_TYPE(GstAmlV4l2VideoDec, gst_aml_v4l2_video_dec,
119 GST_TYPE_VIDEO_DECODER);
120
121static GstFlowReturn gst_aml_v4l2_video_dec_finish(GstVideoDecoder *decoder);
xuesong.jiang61ea8012022-05-12 15:38:17 +0800122#if GST_IMPORT_LGE_PROP
123static void gst_aml_v4l2_video_dec_install_lge_properties_helper(GObjectClass *gobject_class);
124#endif
xuesong.jiangae1548e2022-05-06 16:38:46 +0800125
126static void
127gst_aml_v4l2_video_dec_set_property(GObject *object,
128 guint prop_id, const GValue *value, GParamSpec *pspec)
129{
130 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(object);
131
132 switch (prop_id)
133 {
134 case PROP_CAPTURE_IO_MODE:
xuesong.jiangae1548e2022-05-06 16:38:46 +0800135 case PROP_DUMP_FRAME_LOCATION:
zengliang.lidcd41462024-06-19 16:05:12 +0800136 case PROP_CC_DATA:
137 if (!gst_aml_v4l2_object_set_property_helper(self->v4l2capture,
138 prop_id, value, pspec))
139 {
140 G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
141 }
142 break;
xuesong.jiang61ea8012022-05-12 15:38:17 +0800143#if GST_IMPORT_LGE_PROP
144 case LGE_RESOURCE_INFO:
145 {
146 GST_DEBUG_OBJECT(self, "LGE up layer set res info");
147 GstStructure *r_info = g_value_get_object(value);
148 if (r_info)
149 {
150 if (gst_structure_has_field(r_info, "coretype"))
151 {
152 if (self->lge_ctxt->res_info.coretype)
153 g_free(self->lge_ctxt->res_info.coretype);
154 self->lge_ctxt->res_info.coretype = g_strdup(gst_structure_get_string(r_info, "coretype"));
155 }
156 if (gst_structure_has_field(r_info, "videoport"))
157 gst_structure_get_int(r_info, "videoport", &(self->lge_ctxt->res_info.videoport));
158 if (gst_structure_has_field(r_info, "audioport"))
159 gst_structure_get_int(r_info, "audioport", &(self->lge_ctxt->res_info.audioport));
160 if (gst_structure_has_field(r_info, "maxwidth"))
161 gst_structure_get_int(r_info, "maxwidth", &(self->lge_ctxt->res_info.maxwidth));
162 if (gst_structure_has_field(r_info, "maxheight"))
163 gst_structure_get_int(r_info, "maxheight", &(self->lge_ctxt->res_info.maxheight));
164 if (gst_structure_has_field(r_info, "mixerport"))
165 gst_structure_get_int(r_info, "mixerport", &(self->lge_ctxt->res_info.mixerport));
166 }
167 break;
168 }
169 case LGE_APP_TYPE:
170 {
171 GST_DEBUG_OBJECT(self, "LGE up layer set app type");
172 if (self->lge_ctxt->app_type)
173 g_free(self->lge_ctxt->app_type);
174 self->lge_ctxt->app_type = g_strdup(g_value_get_string(value));
175 break;
176 }
177 case LGE_CLIP_MODE:
178 {
179 GST_DEBUG_OBJECT(self, "LGE up layer set clip mode");
180 self->lge_ctxt->clip_mode = g_strdup(g_value_get_boolean(value));
181 break;
182 }
183#endif
xuesong.jiangae1548e2022-05-06 16:38:46 +0800184 /* By default, only set on output */
185 default:
186 if (!gst_aml_v4l2_object_set_property_helper(self->v4l2output,
187 prop_id, value, pspec))
188 {
189 G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
190 }
191 break;
192 }
193}
194
195static void
196gst_aml_v4l2_video_dec_get_property(GObject *object,
197 guint prop_id, GValue *value, GParamSpec *pspec)
198{
199 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(object);
200
201 switch (prop_id)
202 {
203 case PROP_CAPTURE_IO_MODE:
zengliang.lidcd41462024-06-19 16:05:12 +0800204 case PROP_CC_DATA:
fei.dengaf682762024-06-24 19:06:03 +0800205 case PROP_DECODING_ERROR_FRAMES:
zengliang.lidcd41462024-06-19 16:05:12 +0800206 if (!gst_aml_v4l2_object_get_property_helper(self->v4l2capture,
207 prop_id, value, pspec))
208 {
209 G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
210 }
211 break;
xuesong.jiang61ea8012022-05-12 15:38:17 +0800212#if GST_IMPORT_LGE_PROP
213 case LGE_DECODE_SIZE:
214 {
215 GST_DEBUG_OBJECT(self, "LGE up layer get dec size");
216 self->lge_ctxt->dec_size = -1;
217 g_value_set_int(value, self->lge_ctxt->dec_size);
218 break;
219 }
220 case LGE_UNDECODE_SIZE:
221 {
222 GST_DEBUG_OBJECT(self, "LGE up layer get undec size");
223 self->lge_ctxt->undec_size = -1;
224 g_value_set_int(value, self->lge_ctxt->undec_size);
225 break;
226 }
227#endif
228
xuesong.jiangae1548e2022-05-06 16:38:46 +0800229 /* By default read from output */
230 default:
231 if (!gst_aml_v4l2_object_get_property_helper(self->v4l2output,
232 prop_id, value, pspec))
233 {
234 G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
235 }
236 break;
237 }
238}
239
240static gboolean
241gst_aml_v4l2_video_dec_open(GstVideoDecoder *decoder)
242{
243 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
244 GstCaps *codec_caps;
245
246 GST_DEBUG_OBJECT(self, "Opening");
247
248 if (!gst_aml_v4l2_object_open(self->v4l2output))
249 goto failure;
250
251 if (!gst_aml_v4l2_object_open_shared(self->v4l2capture, self->v4l2output))
252 goto failure;
253
254 codec_caps = gst_pad_get_pad_template_caps(decoder->sinkpad);
255 self->probed_sinkcaps = gst_aml_v4l2_object_probe_caps(self->v4l2output,
256 codec_caps);
257 gst_caps_unref(codec_caps);
258
259 if (gst_caps_is_empty(self->probed_sinkcaps))
260 goto no_encoded_format;
261
262 return TRUE;
263
264no_encoded_format:
265 GST_ELEMENT_ERROR(self, RESOURCE, SETTINGS,
266 (_("Decoder on device %s has no supported input format"),
267 self->v4l2output->videodev),
268 (NULL));
269 goto failure;
270
271failure:
272 if (GST_AML_V4L2_IS_OPEN(self->v4l2output))
273 gst_aml_v4l2_object_close(self->v4l2output);
274
275 if (GST_AML_V4L2_IS_OPEN(self->v4l2capture))
276 gst_aml_v4l2_object_close(self->v4l2capture);
277
278 gst_caps_replace(&self->probed_srccaps, NULL);
279 gst_caps_replace(&self->probed_sinkcaps, NULL);
280
281 return FALSE;
282}
283
284static gboolean
285gst_aml_v4l2_video_dec_close(GstVideoDecoder *decoder)
286{
287 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
288
289 GST_DEBUG_OBJECT(self, "Closing");
290
291 gst_aml_v4l2_object_close(self->v4l2output);
292 gst_aml_v4l2_object_close(self->v4l2capture);
293 gst_caps_replace(&self->probed_srccaps, NULL);
294 gst_caps_replace(&self->probed_sinkcaps, NULL);
295
296 return TRUE;
297}
298
299static gboolean
300gst_aml_v4l2_video_dec_start(GstVideoDecoder *decoder)
301{
302 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
303
304 GST_DEBUG_OBJECT(self, "Starting");
305
306 gst_aml_v4l2_object_unlock(self->v4l2output);
307 g_atomic_int_set(&self->active, TRUE);
308 self->output_flow = GST_FLOW_OK;
309
310 return TRUE;
311}
312
313static gboolean
314gst_aml_v4l2_video_dec_stop(GstVideoDecoder *decoder)
315{
316 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
317
318 GST_DEBUG_OBJECT(self, "Stopping");
319
320 gst_aml_v4l2_object_unlock(self->v4l2output);
321 gst_aml_v4l2_object_unlock(self->v4l2capture);
322
323 /* Wait for capture thread to stop */
324 gst_pad_stop_task(decoder->srcpad);
325
326 GST_VIDEO_DECODER_STREAM_LOCK(decoder);
327 self->output_flow = GST_FLOW_OK;
328 GST_VIDEO_DECODER_STREAM_UNLOCK(decoder);
329
330 /* Should have been flushed already */
331 g_assert(g_atomic_int_get(&self->active) == FALSE);
332
333 gst_aml_v4l2_object_stop(self->v4l2output);
334 gst_aml_v4l2_object_stop(self->v4l2capture);
335
336 if (self->input_state)
337 {
338 gst_video_codec_state_unref(self->input_state);
339 self->input_state = NULL;
340 }
341
342 GST_DEBUG_OBJECT(self, "Stopped");
343
344 return TRUE;
345}
346
347static gboolean
hanghang.luo8e1225b2023-10-10 08:54:28 +0000348gst_aml_v4l2_video_dec_codec_chg(GstVideoDecoder *decoder,
349 GstVideoCodecState *state)
350{
351 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
352 GstStructure *s_old = NULL;
353 GstStructure *s_new = NULL;
354
355 // first play, must set foramt;
356 if (!self->input_state)
357 return TRUE;
358
359 if (self->input_state->caps)
360 s_old = gst_caps_get_structure(self->input_state->caps,0);
361 if (state->caps)
362 s_new = gst_caps_get_structure(state->caps,0);
363
364 if (s_new && s_old && strcmp(gst_structure_get_name(s_new),gst_structure_get_name(s_old)))
365 return TRUE;
366 return FALSE;
367}
368
369static gboolean
370gst_aml_v4l2_video_dec_res_chg(GstVideoDecoder *decoder,
371 GstVideoCodecState *state)
372{
373 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
374 gboolean ret = FALSE;
375 gint width_new = -1,height_new = -1,width_old = -1,height_old = -1;
376 GstStructure *s_old = NULL;
377 GstStructure *s_new = NULL;
378
379 // first play, must set foramt;
380 if (!self->input_state)
381 {
382 ret = TRUE;
383 goto done;
384 }
385
386 if (self->input_state->caps)
387 s_old = gst_caps_get_structure(self->input_state->caps,0);
388 if (state->caps)
389 s_new = gst_caps_get_structure(state->caps,0);
390
391 if (s_new && gst_structure_has_field(s_new,"width") && gst_structure_has_field(s_new,"height"))
392 {
393 gst_structure_get_int(s_new,"width",&width_new);
394 gst_structure_get_int(s_new,"height",&height_new);
395 }
396 if (s_old && gst_structure_has_field(s_old,"width") && gst_structure_has_field(s_old,"height"))
397 {
398 gst_structure_get_int(s_old,"width",&width_old);
399 gst_structure_get_int(s_old,"height",&height_old);
400 }
401
402 if (width_new != width_old || height_new != height_old)
403 ret = TRUE;
404
405done:
406 GST_DEBUG_OBJECT(self, "ret is %d",ret);
407 return ret;
408}
409
410static gboolean
xuesong.jiangae1548e2022-05-06 16:38:46 +0800411gst_aml_v4l2_video_dec_set_format(GstVideoDecoder *decoder,
412 GstVideoCodecState *state)
413{
414 GstAmlV4l2Error error = GST_AML_V4L2_ERROR_INIT;
415 gboolean ret = TRUE;
416 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
417 GstCaps *caps;
418
419 GST_DEBUG_OBJECT(self, "Setting format: %" GST_PTR_FORMAT, state->caps);
hanghang.luo8e1225b2023-10-10 08:54:28 +0000420 if (self->input_state)
421 {
422 if (gst_aml_v4l2_video_dec_res_chg(decoder,state) || gst_aml_v4l2_video_dec_codec_chg(decoder,state))
423 GST_DEBUG_OBJECT(self, "resolution or codec changed");
424 else
425 goto done;
426 }
427
xuesong.jiangae1548e2022-05-06 16:38:46 +0800428 GstCapsFeatures *const features = gst_caps_get_features(state->caps, 0);
hanghang.luoc54208e2023-09-22 02:43:54 +0000429 GstStructure *s = gst_caps_get_structure(state->caps,0);
430 if (s && gst_structure_has_field(s,"format"))
431 {
432 if (!strcmp("XVID",gst_structure_get_string(s,"format")))
433 {
434 GST_DEBUG_OBJECT(self, "This is a DIVX file, cannot support");
435 ret = FALSE;
436 goto done;
437 }
438 }
xuesong.jiangae1548e2022-05-06 16:38:46 +0800439
440 if (gst_caps_features_contains(features, GST_CAPS_FEATURE_MEMORY_DMABUF))
441 self->v4l2output->req_mode = GST_V4L2_IO_DMABUF_IMPORT;
442
443 if (self->input_state)
444 {
445 if (gst_aml_v4l2_object_caps_equal(self->v4l2output, state->caps))
446 {
447 GST_DEBUG_OBJECT(self, "Compatible caps");
448 goto done;
449 }
450 gst_video_codec_state_unref(self->input_state);
451 self->input_state = NULL;
452
453 gst_aml_v4l2_video_dec_finish(decoder);
454 gst_aml_v4l2_object_stop(self->v4l2output);
455
456 /* The renegotiation flow don't blend with the base class flow. To properly
457 * stop the capture pool, if the buffers can't be orphaned, we need to
458 * reclaim our buffers, which will happend through the allocation query.
459 * The allocation query is triggered by gst_video_decoder_negotiate() which
460 * requires the output caps to be set, but we can't know this information
461 * as we rely on the decoder, which requires the capture queue to be
462 * stopped.
463 *
464 * To workaround this issue, we simply run an allocation query with the
465 * old negotiated caps in order to drain/reclaim our buffers. That breaks
466 * the complexity and should not have much impact in performance since the
467 * following allocation query will happen on a drained pipeline and won't
468 * block. */
469 if (self->v4l2capture->pool &&
470 !gst_aml_v4l2_buffer_pool_orphan(&self->v4l2capture->pool))
471 {
472 GstCaps *caps = gst_pad_get_current_caps(decoder->srcpad);
473 if (caps)
474 {
475 GstQuery *query = gst_query_new_allocation(caps, FALSE);
476 gst_pad_peer_query(decoder->srcpad, query);
477 gst_query_unref(query);
478 gst_caps_unref(caps);
479 }
480 }
481
482 gst_aml_v4l2_object_stop(self->v4l2capture);
483 self->output_flow = GST_FLOW_OK;
484 }
485
486 if ((ret = gst_aml_v4l2_set_drm_mode(self->v4l2output)) == FALSE)
487 {
488 GST_ERROR_OBJECT(self, "config output drm mode error");
489 goto done;
490 }
491
xuesong.jiang22a9b112023-05-24 09:01:59 +0000492 if ((ret = gst_aml_v4l2_set_stream_mode(self->v4l2output)) == FALSE)
493 {
494 GST_ERROR_OBJECT(self, "config output stream mode error");
495 goto done;
496 }
497
xuesong.jiangae1548e2022-05-06 16:38:46 +0800498 ret = gst_aml_v4l2_object_set_format(self->v4l2output, state->caps, &error);
499
500 gst_caps_replace(&self->probed_srccaps, NULL);
501 self->probed_srccaps = gst_aml_v4l2_object_probe_caps(self->v4l2capture,
502 gst_aml_v4l2_object_get_raw_caps());
503
504 if (gst_caps_is_empty(self->probed_srccaps))
505 goto no_raw_format;
506
507 caps = gst_caps_copy(self->probed_srccaps);
508 gst_caps_set_features_simple(caps, gst_caps_features_from_string(GST_CAPS_FEATURE_MEMORY_DMABUF));
509 gst_caps_append(self->probed_srccaps, caps);
510 if (ret)
511 self->input_state = gst_video_codec_state_ref(state);
512 else
513 gst_aml_v4l2_error(self, &error);
514
515done:
516 return ret;
517
518no_raw_format:
519 GST_ELEMENT_ERROR(self, RESOURCE, SETTINGS,
520 (_("Decoder on device %s has no supported output format"),
521 self->v4l2output->videodev),
522 (NULL));
523 return GST_FLOW_ERROR;
524}
525
526static gboolean
527gst_aml_v4l2_video_dec_flush(GstVideoDecoder *decoder)
528{
529 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
530
531 GST_DEBUG_OBJECT(self, "Flushed");
532
533 /* Ensure the processing thread has stopped for the reverse playback
534 * discount case */
535 if (gst_pad_get_task_state(decoder->srcpad) == GST_TASK_STARTED)
536 {
537 GST_VIDEO_DECODER_STREAM_UNLOCK(decoder);
538
539 gst_aml_v4l2_object_unlock(self->v4l2output);
540 gst_aml_v4l2_object_unlock(self->v4l2capture);
541 gst_pad_stop_task(decoder->srcpad);
542 GST_VIDEO_DECODER_STREAM_LOCK(decoder);
543 }
544
545 self->output_flow = GST_FLOW_OK;
546
547 gst_aml_v4l2_object_unlock_stop(self->v4l2output);
548 gst_aml_v4l2_object_unlock_stop(self->v4l2capture);
549
550 if (self->v4l2output->pool)
551 gst_aml_v4l2_buffer_pool_flush(self->v4l2output->pool);
552
553 /* gst_aml_v4l2_buffer_pool_flush() calls streamon the capture pool and must be
554 * called after gst_aml_v4l2_object_unlock_stop() stopped flushing the buffer
555 * pool. */
556 if (self->v4l2capture->pool)
557 gst_aml_v4l2_buffer_pool_flush(self->v4l2capture->pool);
558
559 return TRUE;
560}
561
562static gboolean
563gst_aml_v4l2_video_dec_negotiate(GstVideoDecoder *decoder)
564{
565 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
566
xuesong.jiang681d3602022-06-24 21:23:35 +0800567 if (TRUE == self->v4l2output->is_svp)
568 {
569 GstStructure *s;
570 GstEvent *event;
571
572 s = gst_structure_new_empty ("IS_SVP");
573 event = gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM_STICKY, s);
574 GST_DEBUG_OBJECT(self, "before Send SVP Event :%p", event);
575 gst_pad_push_event (decoder->srcpad, event);
576 GST_DEBUG_OBJECT(self, "after Send SVP Event :%p", event);
577 }
578
xuesong.jiangae1548e2022-05-06 16:38:46 +0800579 /* We don't allow renegotiation without carefull disabling the pool */
580 if (self->v4l2capture->pool &&
581 gst_buffer_pool_is_active(GST_BUFFER_POOL(self->v4l2capture->pool)))
582 return TRUE;
583
584 return GST_VIDEO_DECODER_CLASS(parent_class)->negotiate(decoder);
585}
586
587static gboolean
588gst_aml_v4l2_decoder_cmd(GstAmlV4l2Object *v4l2object, guint cmd, guint flags)
589{
590 struct v4l2_decoder_cmd dcmd = {
591 0,
592 };
593
594 GST_DEBUG_OBJECT(v4l2object->element,
595 "sending v4l2 decoder command %u with flags %u", cmd, flags);
596
597 if (!GST_AML_V4L2_IS_OPEN(v4l2object))
598 return FALSE;
599
600 dcmd.cmd = cmd;
601 dcmd.flags = flags;
602 if (v4l2object->ioctl(v4l2object->video_fd, VIDIOC_DECODER_CMD, &dcmd) < 0)
603 goto dcmd_failed;
604
605 return TRUE;
606
607dcmd_failed:
608 if (errno == ENOTTY)
609 {
610 GST_INFO_OBJECT(v4l2object->element,
611 "Failed to send decoder command %u with flags %u for '%s'. (%s)",
612 cmd, flags, v4l2object->videodev, g_strerror(errno));
613 }
614 else
615 {
616 GST_ERROR_OBJECT(v4l2object->element,
617 "Failed to send decoder command %u with flags %u for '%s'. (%s)",
618 cmd, flags, v4l2object->videodev, g_strerror(errno));
619 }
620 return FALSE;
621}
622
623static GstFlowReturn
624gst_aml_v4l2_video_dec_finish(GstVideoDecoder *decoder)
625{
626 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
627 GstFlowReturn ret = GST_FLOW_OK;
628 GstBuffer *buffer;
629
630 if (gst_pad_get_task_state(decoder->srcpad) != GST_TASK_STARTED)
631 goto done;
632
633 GST_DEBUG_OBJECT(self, "Finishing decoding");
634
635 GST_VIDEO_DECODER_STREAM_UNLOCK(decoder);
636
637 if (gst_aml_v4l2_decoder_cmd(self->v4l2output, V4L2_DEC_CMD_STOP, 0))
638 {
639 GstTask *task = decoder->srcpad->task;
640
641 /* If the decoder stop command succeeded, just wait until processing is
642 * finished */
643 GST_DEBUG_OBJECT(self, "Waiting for decoder stop");
644 GST_OBJECT_LOCK(task);
645 while (GST_TASK_STATE(task) == GST_TASK_STARTED)
646 GST_TASK_WAIT(task);
647 GST_OBJECT_UNLOCK(task);
648 ret = GST_FLOW_FLUSHING;
649 }
650 else
651 {
652 /* otherwise keep queuing empty buffers until the processing thread has
653 * stopped, _pool_process() will return FLUSHING when that happened */
654 while (ret == GST_FLOW_OK)
655 {
xuesong.jiangc5dac0f2023-02-01 14:42:24 +0800656 GST_DEBUG_OBJECT(self, "queue empty output buf");
xuesong.jiangae1548e2022-05-06 16:38:46 +0800657 buffer = gst_buffer_new();
658 ret =
659 gst_aml_v4l2_buffer_pool_process(GST_AML_V4L2_BUFFER_POOL(self->v4l2output->pool), &buffer);
660 gst_buffer_unref(buffer);
661 }
662 }
663
664 /* and ensure the processing thread has stopped in case another error
665 * occured. */
666 gst_aml_v4l2_object_unlock(self->v4l2capture);
667 gst_pad_stop_task(decoder->srcpad);
668 GST_VIDEO_DECODER_STREAM_LOCK(decoder);
669
670 if (ret == GST_FLOW_FLUSHING)
671 ret = self->output_flow;
672
673 GST_DEBUG_OBJECT(decoder, "Done draining buffers");
674
675 /* TODO Shall we cleanup any reffed frame to workaround broken decoders ? */
676
677done:
678 return ret;
679}
680
681static GstFlowReturn
682gst_aml_v4l2_video_dec_drain(GstVideoDecoder *decoder)
683{
684 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
685
686 GST_DEBUG_OBJECT(self, "Draining...");
687 gst_aml_v4l2_video_dec_finish(decoder);
688 gst_aml_v4l2_video_dec_flush(decoder);
689
690 return GST_FLOW_OK;
691}
692
693static GstVideoCodecFrame *
694gst_aml_v4l2_video_dec_get_oldest_frame(GstVideoDecoder *decoder)
695{
696 GstVideoCodecFrame *frame = NULL;
697 GList *frames, *l;
698 gint count = 0;
699
700 frames = gst_video_decoder_get_frames(decoder);
701
702 for (l = frames; l != NULL; l = l->next)
703 {
704 GstVideoCodecFrame *f = l->data;
705
706 if (!frame || (GST_CLOCK_TIME_IS_VALID(frame->pts) && GST_CLOCK_TIME_IS_VALID(f->pts) && (frame->pts > f->pts)))
707 frame = f;
708
709 count++;
710 }
711
712 if (frame)
713 {
714 GST_LOG_OBJECT(decoder,
715 "Oldest frame is %d %" GST_TIME_FORMAT " and %d frames left",
716 frame->system_frame_number, GST_TIME_ARGS(frame->pts), count - 1);
717 gst_video_codec_frame_ref(frame);
718 }
719
720 g_list_free_full(frames, (GDestroyNotify)gst_video_codec_frame_unref);
721
722 return frame;
723}
724
fei.dengbee20862022-06-14 14:59:48 +0800725static GstVideoCodecFrame *
xuesong.jiange24aef92023-06-16 06:39:10 +0000726gst_aml_v4l2_video_dec_get_right_frame_for_frame_mode(GstVideoDecoder *decoder, GstClockTime pts)
fei.dengbee20862022-06-14 14:59:48 +0800727{
728 GstVideoCodecFrame *frame = NULL;
729 GList *frames, *l;
730 gint count = 0;
731
xuesong.jiange24aef92023-06-16 06:39:10 +0000732 GST_LOG_OBJECT (decoder, "trace in with pts: %" GST_TIME_FORMAT, GST_TIME_ARGS(pts));
733
fei.dengbee20862022-06-14 14:59:48 +0800734 frames = gst_video_decoder_get_frames(decoder);
735
736 for (l = frames; l != NULL; l = l->next)
737 {
738 GstVideoCodecFrame *f = l->data;
fei.denge9458472023-04-18 02:05:48 +0000739
xuesong.jiangc5dac0f2023-02-01 14:42:24 +0800740 if (GST_CLOCK_TIME_IS_VALID(pts) && (ABSDIFF(f->pts,pts)) < 1000) {
fei.dengbee20862022-06-14 14:59:48 +0800741 frame = f;
fei.dengbee20862022-06-14 14:59:48 +0800742 }
fei.dengbee20862022-06-14 14:59:48 +0800743 count++;
744 }
745
zengliang.liddee2da2023-07-14 07:27:05 +0000746 if (!frame)
747 {
748 for (l = frames; l != NULL; l = l->next)
749 {
750 GstVideoCodecFrame *f = l->data;
751 if (!GST_CLOCK_TIME_IS_VALID(f->pts))
752 {
753 frame = f;
754 }
755 GST_DEBUG("The pts of the expected output frame is invalid");
756 }
757 }
758
fei.dengbee20862022-06-14 14:59:48 +0800759 if (frame)
760 {
761 GST_LOG_OBJECT(decoder,
762 "frame is %d %" GST_TIME_FORMAT " and %d frames left",
763 frame->system_frame_number, GST_TIME_ARGS(frame->pts), count - 1);
764 gst_video_codec_frame_ref(frame);
765 }
766
767 g_list_free_full(frames, (GDestroyNotify)gst_video_codec_frame_unref);
768
xuesong.jiange24aef92023-06-16 06:39:10 +0000769 GST_LOG_OBJECT (decoder, "trace out ret:%p", frame);
fei.dengbee20862022-06-14 14:59:48 +0800770 return frame;
771}
772
xuesong.jiange24aef92023-06-16 06:39:10 +0000773static GstVideoCodecFrame *
774gst_aml_v4l2_video_dec_get_right_frame_for_stream_mode(GstVideoDecoder *decoder, GstClockTime pts)
775{
776 GstVideoCodecFrame *frame = NULL;
777 GList *frames, *l;
778 gint count = 0;
779
780 GST_LOG_OBJECT (decoder, "trace in with pts: %" GST_TIME_FORMAT, GST_TIME_ARGS(pts));
781
782 frames = gst_video_decoder_get_frames(decoder);
783 guint frames_len = 0;
784 frames_len = g_list_length(frames);
785 GST_LOG_OBJECT (decoder, "got frames list len:%d", frames_len);
786
787 frame = frames->data;
788
789 for (l = frames; l != NULL; l = l->next)
790 {
791 GstVideoCodecFrame *f = l->data;
792
793 if (GST_CLOCK_TIME_IS_VALID(pts) && (ABSDIFF(f->pts, pts)) < 1000)
794 {
795 /* found the right frame */
796 frame = f;
797 break;
798 }
799 else if(GST_CLOCK_TIME_IS_VALID(pts) && (f->pts < pts))
800 {
801 GST_LOG_OBJECT(decoder,
802 "stream mode drop frame %d %" GST_TIME_FORMAT,
803 frame->system_frame_number, GST_TIME_ARGS(frame->pts));
804
805 gst_video_codec_frame_ref(f);
806 // gst_video_decoder_drop_frame(decoder, f);
807 gst_video_decoder_release_frame(decoder, f);
808 }
809 else
810 {
811 GST_LOG_OBJECT (decoder, "dbg");
812 }
813 }
814
815 if (frame)
816 {
817 guint l_len = 0;
818 l = gst_video_decoder_get_frames(decoder);
819 l_len = g_list_length(l);
820 g_list_free_full(l, (GDestroyNotify)gst_video_codec_frame_unref);
821
822 GST_LOG_OBJECT(decoder,
823 "frame is %d %" GST_TIME_FORMAT " and %d frames left",
824 frame->system_frame_number, GST_TIME_ARGS(frame->pts), l_len);
825 gst_video_codec_frame_ref(frame);
826 }
827
828 g_list_free_full(frames, (GDestroyNotify)gst_video_codec_frame_unref);
829
830 GST_LOG_OBJECT (decoder, "trace out ret:%p", frame);
831 return frame;
832}
833
834static GstVideoCodecFrame *
835gst_aml_v4l2_video_dec_get_right_frame(GstVideoDecoder *decoder, GstClockTime pts)
836{
837 GstAmlV4l2VideoDec *self = (GstAmlV4l2VideoDec *)decoder;
838 if (self->v4l2output->stream_mode)
839 return gst_aml_v4l2_video_dec_get_right_frame_for_stream_mode(decoder, pts);
840 else
841 return gst_aml_v4l2_video_dec_get_right_frame_for_frame_mode(decoder, pts);
842}
843
xuesong.jiangae1548e2022-05-06 16:38:46 +0800844static gboolean
845gst_aml_v4l2_video_remove_padding(GstCapsFeatures *features,
846 GstStructure *structure, gpointer user_data)
847{
848 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(user_data);
849 GstVideoAlignment *align = &self->v4l2capture->align;
850 GstVideoInfo *info = &self->v4l2capture->info;
851 int width, height;
852
853 if (!gst_structure_get_int(structure, "width", &width))
854 return TRUE;
855
856 if (!gst_structure_get_int(structure, "height", &height))
857 return TRUE;
858
859 if (align->padding_left != 0 || align->padding_top != 0 ||
860 height != info->height + align->padding_bottom)
861 return TRUE;
862
863 if (height == info->height + align->padding_bottom)
864 {
865 /* Some drivers may round up width to the padded with */
866 if (width == info->width + align->padding_right)
867 gst_structure_set(structure,
868 "width", G_TYPE_INT, width - align->padding_right,
869 "height", G_TYPE_INT, height - align->padding_bottom, NULL);
870 /* Some drivers may keep visible width and only round up bytesperline */
871 else if (width == info->width)
872 gst_structure_set(structure,
873 "height", G_TYPE_INT, height - align->padding_bottom, NULL);
874 }
875
876 return TRUE;
877}
878
879static void
sheng.liubcf036c2022-06-21 15:55:42 +0800880gst_v4l2_drop_event (GstAmlV4l2Object * v4l2object)
sheng.liub56bbc52022-06-21 11:02:33 +0800881{
882 struct v4l2_event evt;
883 gint ret;
884
885 memset (&evt, 0x00, sizeof (struct v4l2_event));
886 ret = v4l2object->ioctl (v4l2object->video_fd, VIDIOC_DQEVENT, &evt);
887 if (ret < 0)
888 {
889 GST_DEBUG_OBJECT (v4l2object, "dqevent failed");
890 return;
891 }
892
893 switch (evt.type)
894 {
895 case V4L2_EVENT_SOURCE_CHANGE:
896 GST_DEBUG_OBJECT (v4l2object, "Drop GST_V4L2_FLOW_SOURCE_CHANGE");
897 break;
898 case V4L2_EVENT_EOS:
899 GST_DEBUG_OBJECT (v4l2object, "Drop GST_V4L2_FLOW_LAST_BUFFER");
900 break;
901 default:
902 break;
903 }
904
905 return;
906}
907
908static void
hanghang.luo2eec4892023-07-18 06:44:42 +0000909gst_aml_v4l2_video_dec_set_output_status(GstVideoDecoder *decoder,GstVideoInfo info)
910{
911 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
912 GstVideoCodecState *output_state;
hanghang.luo9b60a3c2023-08-01 16:01:47 +0000913 struct v4l2_selection sel;
914 struct v4l2_rect *r = NULL;
915 GstStructure *s;
916 gint width = 0;
917 gint height = 0;
hanghang.luo2eec4892023-07-18 06:44:42 +0000918 output_state = gst_video_decoder_set_output_state(decoder,
919 info.finfo->format, info.width, info.height, self->input_state);
hanghang.luo9b60a3c2023-08-01 16:01:47 +0000920 memset(&sel, 0, sizeof(struct v4l2_selection));
921 sel.type = self->v4l2capture->type;
922 sel.target = V4L2_SEL_TGT_COMPOSE_DEFAULT;
923 if (self->v4l2capture->ioctl(self->v4l2capture->video_fd, VIDIOC_G_SELECTION, &sel) >= 0)
924 {
925 r = &sel.r;
926 width = (r->width/2)*2;
927 height = (r->height/2)*2;
928 GST_DEBUG_OBJECT(self, "w:%d h:%d ",width,height);
929 }
930 else
931 GST_DEBUG_OBJECT(self, "iotcl error");
hanghang.luo2eec4892023-07-18 06:44:42 +0000932 if (output_state)
933 {
934 output_state->info.interlace_mode = info.interlace_mode;
935 output_state->allocation_caps =gst_video_info_to_caps(&info);
hanghang.luo2eec4892023-07-18 06:44:42 +0000936 output_state->caps =gst_video_info_to_caps(&info);
hanghang.luo9b60a3c2023-08-01 16:01:47 +0000937 s = gst_caps_get_structure(output_state->caps, 0);
938 if (s)
939 {
940 gst_structure_set(s,"coded_width",G_TYPE_INT,info.width,NULL);
941 gst_structure_set(s,"coded_height",G_TYPE_INT,info.height,NULL);
942 gst_structure_set(s,"width",G_TYPE_INT,width,NULL);
943 gst_structure_set(s,"height",G_TYPE_INT,height,NULL);
944 GST_DEBUG_OBJECT(self, "output_state->caps: %" GST_PTR_FORMAT, output_state->caps);
945 gst_video_codec_state_unref(output_state);
946 }
hanghang.luo2eec4892023-07-18 06:44:42 +0000947 }
948}
949
zengliang.lidcd41462024-06-19 16:05:12 +0800950static GQuark
951gst_aml_v4l2_buffer_pool_cc_import_quark (void)
952{
953 static GQuark quark = 0;
954
955 if (quark == 0)
956 quark = g_quark_from_string ("GstAmlV4l2BufferPoolCcUsePtrData");
957
958 return quark;
959}
960
961static gboolean
962foreach_cc_buffer_list_match_pts_func(GList *list , GstVideoCodecFrame *frame)
963{
964 GList *l;
965 if (g_list_length (list) > 0)
966 {
967 for (l = list; l != NULL; l = l->next)
968 {
969 GstBuffer *cc_buffer = l->data;
970 if (GST_BUFFER_TIMESTAMP (frame->output_buffer) == GST_BUFFER_TIMESTAMP (cc_buffer))
971 {
972 gst_mini_object_set_qdata (GST_MINI_OBJECT (frame->output_buffer), GST_AML_V4L2_CC_IMPORT_QUARK,
973 gst_buffer_ref(cc_buffer), (GDestroyNotify) gst_buffer_unref);
974 #if 0
975 //Debug code:dump cc data
976 GstMapInfo gst_map;
977 gst_buffer_map(cc_buffer,&gst_map,GST_MAP_READ);
978 int fd=open("/data/test/cc1.data",O_RDWR |O_CREAT|O_APPEND,0777);
979 if (gst_map.size>0)
980 write(fd,gst_map.data,gst_map.size);
981 close(fd);
982 gst_buffer_unmap(cc_buffer,&gst_map);
983 #endif
984 GST_DEBUG("match success");
985 return TRUE;
986 }
987 else
988 {
989 GST_DEBUG("match fail");
990 }
991 }
992 GST_WARNING("no match frame in the bufferlist");
993 }
994 else
995 {
996 GST_WARNING("list is null,can not foreach");
997 }
998 return FALSE;
999}
1000
hanghang.luo2eec4892023-07-18 06:44:42 +00001001static void
xuesong.jiangae1548e2022-05-06 16:38:46 +08001002gst_aml_v4l2_video_dec_loop(GstVideoDecoder *decoder)
1003{
1004 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
1005 GstAmlV4l2BufferPool *v4l2_pool;
1006 GstAmlV4l2Error error = GST_AML_V4L2_ERROR_INIT;
1007 GstBufferPool *pool;
1008 GstVideoCodecFrame *frame;
1009 GstBuffer *buffer = NULL;
1010 GstFlowReturn ret;
1011
1012 if (G_UNLIKELY(!GST_AML_V4L2_IS_ACTIVE(self->v4l2capture)))
1013 {
1014 GstVideoInfo info;
xuesong.jiang282ca572023-05-05 09:03:32 +00001015 GstCaps *acquired_caps, *available_caps, *caps, *filter;
xuesong.jiangae1548e2022-05-06 16:38:46 +08001016 GstStructure *st;
xuesong.jiangae1548e2022-05-06 16:38:46 +08001017 GST_DEBUG_OBJECT(self, "waitting source change event");
1018 /* Wait until received SOURCE_CHANGE event to get right video format */
1019 while (self->v4l2capture->can_wait_event && self->v4l2capture->need_wait_event)
1020 {
1021 ret = gst_aml_v4l2_object_dqevent(self->v4l2capture);
1022 if (ret == GST_AML_V4L2_FLOW_SOURCE_CHANGE)
1023 {
fei.deng2a06e042023-10-10 03:09:45 +00001024 //let flush start event blocked until capture buffer pool actived
1025 self->is_res_chg = TRUE;
xuesong.jiangae1548e2022-05-06 16:38:46 +08001026 GST_DEBUG_OBJECT(self, "Received source change event");
1027 break;
1028 }
1029 else if (ret == GST_AML_V4L2_FLOW_LAST_BUFFER)
1030 {
1031 GST_DEBUG_OBJECT(self, "Received eos event");
1032 goto beach;
1033 }
1034 else if (ret != GST_FLOW_OK)
1035 {
1036 GST_ERROR_OBJECT(self, "dqevent error");
1037 goto beach;
1038 }
1039 }
1040 self->v4l2capture->need_wait_event = FALSE;
1041
sheng.liu0c77f6c2022-06-17 21:33:20 +08001042 if (TRUE == self->v4l2output->is_svp)
1043 {
1044 GstPad *peer;
1045 GstStructure *s;
1046 GstEvent *event;
1047
1048 peer = gst_pad_get_peer (decoder->srcpad);
1049 if (peer)
1050 {
hanghang.luo70f07ef2023-07-13 02:23:06 +00001051 s = gst_structure_new_empty ("IS_SVP");
1052 if (s)
1053 {
1054 event = gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM, s);
1055 gst_pad_send_event (peer, event);
1056 GST_DEBUG_OBJECT(self, "Send SVP Event");
1057 }
1058 gst_object_unref (peer);
sheng.liu0c77f6c2022-06-17 21:33:20 +08001059 }
1060 }
1061
sheng.liub56bbc52022-06-21 11:02:33 +08001062 if (self->v4l2capture->need_drop_event)
1063 {
1064 // drop V4L2_EVENT_SOURCE_CHANGE
1065 gst_v4l2_drop_event(self->v4l2capture);
1066 self->v4l2capture->need_drop_event = FALSE;
1067 }
1068
xuesong.jiangae1548e2022-05-06 16:38:46 +08001069 if (!gst_aml_v4l2_object_acquire_format(self->v4l2capture, &info))
1070 goto not_negotiated;
hanghang.luo8ec04e92023-10-09 08:14:24 +00001071
xuesong.jiangae1548e2022-05-06 16:38:46 +08001072 /* Create caps from the acquired format, remove the format field */
1073 acquired_caps = gst_video_info_to_caps(&info);
1074 GST_DEBUG_OBJECT(self, "Acquired caps: %" GST_PTR_FORMAT, acquired_caps);
1075 st = gst_caps_get_structure(acquired_caps, 0);
xuesong.jiang282ca572023-05-05 09:03:32 +00001076 gst_structure_remove_fields(st, "format", "colorimetry", "chroma-site", NULL);
1077
1078 /* Probe currently available pixel formats */
1079 available_caps = gst_caps_copy(self->probed_srccaps);
1080 GST_DEBUG_OBJECT(self, "Available caps: %" GST_PTR_FORMAT, available_caps);
1081
1082 /* Replace coded size with visible size, we want to negotiate visible size
1083 * with downstream, not coded size. */
1084 gst_caps_map_in_place(available_caps, gst_aml_v4l2_video_remove_padding, self);
1085
1086 filter = gst_caps_intersect_full(available_caps, acquired_caps, GST_CAPS_INTERSECT_FIRST);
xuesong.jiangae1548e2022-05-06 16:38:46 +08001087 caps = gst_caps_copy(filter);
1088 gst_caps_set_features_simple(caps, gst_caps_features_from_string(GST_CAPS_FEATURE_MEMORY_DMABUF));
1089 gst_caps_append(filter, caps);
1090
1091 GST_DEBUG_OBJECT(self, "Filtered caps: %" GST_PTR_FORMAT, filter);
1092 gst_caps_unref(acquired_caps);
xuesong.jiang282ca572023-05-05 09:03:32 +00001093 gst_caps_unref(available_caps);
xuesong.jiangae1548e2022-05-06 16:38:46 +08001094 caps = gst_pad_peer_query_caps(decoder->srcpad, filter);
1095 gst_caps_unref(filter);
1096
1097 GST_DEBUG_OBJECT(self, "Possible decoded caps: %" GST_PTR_FORMAT, caps);
1098 if (gst_caps_is_empty(caps))
1099 {
1100 gst_caps_unref(caps);
1101 goto not_negotiated;
1102 }
1103
1104 /* Fixate pixel format */
1105 caps = gst_caps_fixate(caps);
1106
1107 GST_DEBUG_OBJECT(self, "Chosen decoded caps: %" GST_PTR_FORMAT, caps);
1108
1109 /* Try to set negotiated format, on success replace acquired format */
1110 if (gst_aml_v4l2_object_set_format(self->v4l2capture, caps, &error))
1111 gst_video_info_from_caps(&info, caps);
1112 else
1113 gst_aml_v4l2_clear_error(&error);
1114 gst_caps_unref(caps);
hanghang.luo2eec4892023-07-18 06:44:42 +00001115 gst_aml_v4l2_video_dec_set_output_status(decoder,info);
xuesong.jiangae1548e2022-05-06 16:38:46 +08001116 if (!gst_video_decoder_negotiate(decoder))
1117 {
1118 if (GST_PAD_IS_FLUSHING(decoder->srcpad))
1119 goto flushing;
1120 else
1121 goto not_negotiated;
1122 }
1123
1124 /* Ensure our internal pool is activated */
1125 if (!gst_buffer_pool_set_active(GST_BUFFER_POOL(self->v4l2capture->pool),
1126 TRUE))
1127 goto activate_failed;
xuesong.jiangc5dac0f2023-02-01 14:42:24 +08001128
1129 g_mutex_lock(&self->res_chg_lock);
1130 GST_LOG_OBJECT(decoder, "signal resolution changed");
1131 self->is_res_chg = FALSE;
1132 g_cond_signal(&self->res_chg_cond);
1133 g_mutex_unlock(&self->res_chg_lock);
xuesong.jiangae1548e2022-05-06 16:38:46 +08001134 }
1135
1136 GST_LOG_OBJECT(decoder, "Allocate output buffer");
1137
1138 v4l2_pool = GST_AML_V4L2_BUFFER_POOL(self->v4l2capture->pool);
1139
1140 self->output_flow = GST_FLOW_OK;
1141 do
1142 {
1143 /* We cannot use the base class allotate helper since it taking the internal
1144 * stream lock. we know that the acquire may need to poll until more frames
1145 * comes in and holding this lock would prevent that.
1146 */
1147 pool = gst_video_decoder_get_buffer_pool(decoder);
1148
1149 /* Pool may be NULL if we started going to READY state */
1150 if (pool == NULL)
1151 {
fei.dengbee20862022-06-14 14:59:48 +08001152 GST_WARNING_OBJECT(decoder, "gst_video_decoder_get_buffer_pool goto beach");
xuesong.jiangae1548e2022-05-06 16:38:46 +08001153 ret = GST_FLOW_FLUSHING;
1154 goto beach;
1155 }
1156
1157 ret = gst_buffer_pool_acquire_buffer(pool, &buffer, NULL);
zengliang.lidcd41462024-06-19 16:05:12 +08001158
fei.dengccc89632022-07-15 19:10:17 +08001159 //calculate a new pts for interlace stream
hanghang.luo8ec04e92023-10-09 08:14:24 +00001160 if (ret == GST_FLOW_OK && self->v4l2capture->info.interlace_mode == GST_VIDEO_INTERLACE_MODE_INTERLEAVED)
fei.dengccc89632022-07-15 19:10:17 +08001161 {
1162 //if buffer pts is valid, reduce 1/2 duration
1163 if (GST_BUFFER_DURATION_IS_VALID(buffer))
1164 {
1165 GST_BUFFER_DURATION(buffer) = GST_BUFFER_DURATION(buffer)/2;
1166 }
1167 GST_BUFFER_FLAG_UNSET(buffer, GST_VIDEO_BUFFER_FLAG_INTERLACED);
1168 //reset pts
hanghang.luo6a5bdff2024-04-15 06:29:43 +00001169 if (GST_BUFFER_TIMESTAMP (buffer) == 0LL)
fei.dengccc89632022-07-15 19:10:17 +08001170 {
1171 double rate = ((double)self->input_state->info.fps_n/(double)self->input_state->info.fps_d)*2;
1172 GST_BUFFER_TIMESTAMP(buffer) = self->last_out_pts + 1000000000LL/rate;
1173 }
1174 }
1175
xuesong.jiangae1548e2022-05-06 16:38:46 +08001176 g_object_unref(pool);
1177
fei.deng9a5cd6e2023-06-30 12:09:18 +00001178 if (ret == GST_FLOW_OK && GST_BUFFER_FLAG_IS_SET(buffer,GST_AML_V4L2_BUFFER_FLAG_LAST_EMPTY)) {
fei.deng990965a2023-11-15 07:03:15 +00001179 GST_LOG_OBJECT(decoder, "Get GST_AML_V4L2_FLOW_LAST_BUFFER");
1180 self->v4l2capture->need_drop_event = TRUE;
1181 gst_aml_v4l2_buffer_pool_process(v4l2_pool, &buffer);
1182 if (self->is_res_chg) {
1183 //we must release last buffer
1184 gst_buffer_unref(buffer);
1185 //if resolution changed event received,we should set need_drop_event to false
1186 self->v4l2capture->need_drop_event = FALSE;
1187 gst_aml_v4l2_object_stop(self->v4l2capture);
1188 //unblock flush start event
1189 g_mutex_lock(&self->res_chg_lock);
1190 self->is_res_chg = FALSE;
1191 g_cond_signal(&self->res_chg_cond);
1192 g_mutex_unlock(&self->res_chg_lock);
1193 return;
1194 } else {
1195 goto beach;
1196 }
sheng.liub56bbc52022-06-21 11:02:33 +08001197 }
1198
zengliang.lidcd41462024-06-19 16:05:12 +08001199 if (ret == GST_AML_V4L2_FLOW_CC_DATA)
1200 {
1201 GST_DEBUG_OBJECT(decoder, "continue get cc data ");
1202 continue;
1203 }
1204
sheng.liu8d18ed22022-05-26 17:28:15 +08001205 if (ret == GST_AML_V4L2_FLOW_SOURCE_CHANGE)
1206 {
1207 GST_LOG_OBJECT(decoder, "Get GST_AML_V4L2_FLOW_SOURCE_CHANGE");
xuesong.jiangc5dac0f2023-02-01 14:42:24 +08001208
1209 g_mutex_lock (&self->res_chg_lock);
1210 self->is_res_chg = TRUE;
1211 g_mutex_unlock (&self->res_chg_lock);
sheng.liu8d18ed22022-05-26 17:28:15 +08001212 return;
1213 }
fei.dengaf682762024-06-24 19:06:03 +08001214 //decoding error happened
1215 if (ret == GST_AML_V4L2_FLOW_DECODING_ERROR)
1216 {
1217 continue;
1218 }
sheng.liu8d18ed22022-05-26 17:28:15 +08001219
fei.dengbee20862022-06-14 14:59:48 +08001220 if (ret != GST_FLOW_OK) {
1221 GST_WARNING_OBJECT(decoder, "gst_buffer_pool_acquire_buffer goto beach ret:%d",ret);
xuesong.jiangae1548e2022-05-06 16:38:46 +08001222 goto beach;
fei.dengbee20862022-06-14 14:59:48 +08001223 }
xuesong.jiangae1548e2022-05-06 16:38:46 +08001224
xuesong.jiangc5dac0f2023-02-01 14:42:24 +08001225 GST_LOG_OBJECT(decoder, "Process output buffer (switching flow outstanding num:%d)", self->v4l2capture->outstanding_buf_num);
xuesong.jiangae1548e2022-05-06 16:38:46 +08001226 ret = gst_aml_v4l2_buffer_pool_process(v4l2_pool, &buffer);
xuesong.jiang406ee302023-06-28 03:45:22 +00001227
1228 GST_DEBUG_OBJECT(decoder, "send pts:%lld - %" GST_TIME_FORMAT, GST_BUFFER_PTS(buffer), GST_TIME_ARGS(GST_BUFFER_PTS(buffer)));
1229 g_signal_emit (self, g_signals[SIGNAL_DECODED_PTS], 0, GST_BUFFER_PTS(buffer));
1230
xuesong.jiangae1548e2022-05-06 16:38:46 +08001231 if (ret == GST_AML_V4L2_FLOW_SOURCE_CHANGE)
1232 {
1233 gst_aml_v4l2_object_stop(self->v4l2capture);
1234 return;
1235 }
1236
fei.dengaf682762024-06-24 19:06:03 +08001237 } while ((ret == GST_AML_V4L2_FLOW_CORRUPTED_BUFFER) ||
1238 (ret == GST_AML_V4L2_FLOW_CC_DATA) ||
1239 (ret == GST_AML_V4L2_FLOW_DECODING_ERROR));
xuesong.jiangae1548e2022-05-06 16:38:46 +08001240
1241 if (ret != GST_FLOW_OK)
1242 goto beach;
1243
fei.dengbee20862022-06-14 14:59:48 +08001244 frame = gst_aml_v4l2_video_dec_get_right_frame(decoder, GST_BUFFER_TIMESTAMP (buffer));
xuesong.jiangae1548e2022-05-06 16:38:46 +08001245 if (frame)
1246 {
zengliang.li32cb11e2022-11-24 12:10:26 +08001247 if (!GST_CLOCK_TIME_IS_VALID(frame->pts))
1248 {
zengliang.li92ff6822023-06-06 07:12:52 +00001249 if (!GST_CLOCK_TIME_IS_VALID(self->last_out_pts))
1250 {
1251 if (GST_CLOCK_TIME_IS_VALID(frame->dts))
1252 {
1253 GST_BUFFER_TIMESTAMP(buffer) = frame->dts;
1254 }
1255 else
1256 {
1257 GST_WARNING_OBJECT (decoder,"sorry,we have no baseline to calculate pts");
1258 goto beach;
1259 }
1260 }
1261 else
1262 {
1263 double rate = ((double)self->input_state->info.fps_n/(double)self->input_state->info.fps_d);
1264 GST_BUFFER_TIMESTAMP(buffer) = self->last_out_pts + 1000000000LL/rate;
1265 }
zengliang.li32cb11e2022-11-24 12:10:26 +08001266 }
fei.dengccc89632022-07-15 19:10:17 +08001267 self->last_out_pts = GST_BUFFER_TIMESTAMP(buffer);
xuesong.jiangae1548e2022-05-06 16:38:46 +08001268 frame->output_buffer = buffer;
fei.dengccc89632022-07-15 19:10:17 +08001269 frame->pts = GST_BUFFER_TIMESTAMP(buffer);
1270 frame->duration = GST_BUFFER_DURATION(buffer);
xuesong.jiangae1548e2022-05-06 16:38:46 +08001271 buffer = NULL;
zengliang.lidcd41462024-06-19 16:05:12 +08001272
1273 GST_DEBUG_OBJECT (decoder,"enable_cc_data:%d",self->v4l2capture->enable_cc_data);
1274 if (self->v4l2capture->enable_cc_data)
1275 {
1276 if (foreach_cc_buffer_list_match_pts_func(v4l2_pool->cc_buffer_list, frame))
1277 {
1278 GST_DEBUG("cc buffer and frame bind success");
1279 GstBuffer *cc_buffer = gst_mini_object_get_qdata (GST_MINI_OBJECT (frame->output_buffer),
1280 GST_AML_V4L2_CC_IMPORT_QUARK);
1281 #if 0
1282 //Debug code:dump cc data
1283 GstMapInfo gst_map;
1284 gst_buffer_map(cc_buffer,&gst_map,GST_MAP_READ);
1285 int fd=open("/data/test/cc2.data",O_RDWR |O_CREAT|O_APPEND,0777);
1286 if (gst_map.size>0)
1287 write(fd,gst_map.data,gst_map.size);
1288 close(fd);
1289 gst_buffer_unmap(cc_buffer,&gst_map);
1290 #endif
1291 v4l2_pool->cc_buffer_list = g_list_remove(v4l2_pool->cc_buffer_list,cc_buffer);
1292 gst_buffer_unref(cc_buffer);
1293 }
1294 else
1295 {
1296 GST_WARNING("bufferlist is empty or no match frame in the bufferlist");
1297 }
1298 }
xuesong.jiangae1548e2022-05-06 16:38:46 +08001299 ret = gst_video_decoder_finish_frame(decoder, frame);
1300
1301 if (ret != GST_FLOW_OK)
1302 goto beach;
1303 }
1304 else
1305 {
1306 GST_WARNING_OBJECT(decoder, "Decoder is producing too many buffers");
1307 gst_buffer_unref(buffer);
1308 }
1309
1310 return;
1311 /* ERRORS */
1312not_negotiated:
1313{
1314 GST_ERROR_OBJECT(self, "not negotiated");
1315 ret = GST_FLOW_NOT_NEGOTIATED;
1316 goto beach;
1317}
1318activate_failed:
1319{
1320 GST_ERROR_OBJECT(self, "Buffer pool activation failed");
1321 GST_ELEMENT_ERROR(self, RESOURCE, SETTINGS,
1322 (_("Failed to allocate required memory.")),
1323 ("Buffer pool activation failed"));
1324 ret = GST_FLOW_ERROR;
1325 goto beach;
1326}
1327flushing:
1328{
1329 ret = GST_FLOW_FLUSHING;
1330 goto beach;
1331}
1332beach:
1333 GST_DEBUG_OBJECT(decoder, "Leaving output thread: %s",
1334 gst_flow_get_name(ret));
fei.deng2a06e042023-10-10 03:09:45 +00001335 if (self->is_res_chg) {
1336 //unblock flush start event
1337 g_mutex_lock(&self->res_chg_lock);
1338 self->is_res_chg = FALSE;
1339 g_cond_signal(&self->res_chg_cond);
1340 g_mutex_unlock(&self->res_chg_lock);
1341 }
xuesong.jiangae1548e2022-05-06 16:38:46 +08001342 gst_buffer_replace(&buffer, NULL);
1343 self->output_flow = ret;
1344 gst_aml_v4l2_object_unlock(self->v4l2output);
1345 gst_pad_pause_task(decoder->srcpad);
1346}
1347
1348static GstFlowReturn
1349gst_aml_v4l2_video_dec_handle_frame(GstVideoDecoder *decoder,
1350 GstVideoCodecFrame *frame)
1351{
1352 GstAmlV4l2Error error = GST_AML_V4L2_ERROR_INIT;
1353 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
1354 GstBufferPool *pool = GST_BUFFER_POOL(self->v4l2output->pool);
1355 GstFlowReturn ret = GST_FLOW_OK;
1356 gboolean processed = FALSE;
1357 GstBuffer *tmp;
1358 GstTaskState task_state;
1359 GstCaps *caps;
1360
1361 GST_DEBUG_OBJECT(self, "Handling frame %d", frame->system_frame_number);
1362
1363 if (G_UNLIKELY(!g_atomic_int_get(&self->active)))
1364 goto flushing;
1365
1366 if (G_UNLIKELY(!GST_AML_V4L2_IS_ACTIVE(self->v4l2output)))
1367 {
1368 if (!self->input_state)
1369 goto not_negotiated;
1370 if (!gst_aml_v4l2_object_set_format(self->v4l2output, self->input_state->caps,
1371 &error))
1372 goto not_negotiated;
1373 }
1374
1375 if (G_UNLIKELY(!GST_AML_V4L2_IS_ACTIVE(self->v4l2capture)))
1376 {
1377 GstBuffer *codec_data;
sheng.liu0c77f6c2022-06-17 21:33:20 +08001378 GstCapsFeatures *features = NULL;
1379
1380 features = gst_caps_get_features(self->input_state->caps, 0);
1381 if (features && gst_caps_features_contains(features, GST_CAPS_FEATURE_MEMORY_DMABUF))
1382 {
1383 GST_DEBUG_OBJECT(self, "Is SVP");
1384 self->v4l2output->is_svp = TRUE;
1385 }
xuesong.jiangae1548e2022-05-06 16:38:46 +08001386
1387 GST_DEBUG_OBJECT(self, "Sending header");
1388
1389 codec_data = self->input_state->codec_data;
1390
1391 /* We are running in byte-stream mode, so we don't know the headers, but
1392 * we need to send something, otherwise the decoder will refuse to
1393 * intialize.
1394 */
1395 if (codec_data)
1396 {
1397 gst_buffer_ref(codec_data);
1398 }
1399 else
1400 {
1401 codec_data = gst_buffer_ref(frame->input_buffer);
1402 processed = TRUE;
1403 }
1404
1405 /* Ensure input internal pool is active */
1406 if (!gst_buffer_pool_is_active(pool))
1407 {
1408 GstStructure *config = gst_buffer_pool_get_config(pool);
xuesong.jiangc5dac0f2023-02-01 14:42:24 +08001409 // guint min = MAX(self->v4l2output->min_buffers, GST_AML_V4L2_MIN_BUFFERS);
1410 // guint max = VIDEO_MAX_FRAME;
xuesong.jiangae1548e2022-05-06 16:38:46 +08001411 // gst_buffer_pool_config_set_params (config, self->input_state->caps,
1412 // self->v4l2output->info.size, min, max);
1413 gst_buffer_pool_config_set_params(config, self->input_state->caps, self->v4l2output->info.size, self->v4l2output->min_buffers, self->v4l2output->min_buffers);
1414
1415 /* There is no reason to refuse this config */
1416 if (!gst_buffer_pool_set_config(pool, config))
1417 goto activate_failed;
xuesong.jiangc5dac0f2023-02-01 14:42:24 +08001418 GST_DEBUG_OBJECT(self, "setting output pool config to %" GST_PTR_FORMAT, config);
xuesong.jiangae1548e2022-05-06 16:38:46 +08001419
1420 if (!gst_buffer_pool_set_active(pool, TRUE))
1421 goto activate_failed;
1422 }
1423
1424 GST_VIDEO_DECODER_STREAM_UNLOCK(decoder);
1425 ret =
1426 gst_aml_v4l2_buffer_pool_process(GST_AML_V4L2_BUFFER_POOL(self->v4l2output->pool), &codec_data);
hanghang.luoc54208e2023-09-22 02:43:54 +00001427 self->codec_data_inject = TRUE;
xuesong.jiangae1548e2022-05-06 16:38:46 +08001428 GST_VIDEO_DECODER_STREAM_LOCK(decoder);
1429
1430 gst_buffer_unref(codec_data);
1431
1432 /* For decoders G_FMT returns coded size, G_SELECTION returns visible size
1433 * in the compose rectangle. gst_aml_v4l2_object_acquire_format() checks both
1434 * and returns the visible size as with/height and the coded size as
1435 * padding. */
1436 }
1437
1438 task_state = gst_pad_get_task_state(GST_VIDEO_DECODER_SRC_PAD(self));
1439 if (task_state == GST_TASK_STOPPED || task_state == GST_TASK_PAUSED)
1440 {
1441 /* It's possible that the processing thread stopped due to an error */
1442 if (self->output_flow != GST_FLOW_OK &&
1443 self->output_flow != GST_FLOW_FLUSHING)
1444 {
1445 GST_DEBUG_OBJECT(self, "Processing loop stopped with error, leaving");
1446 ret = self->output_flow;
1447 goto drop;
1448 }
1449
1450 GST_DEBUG_OBJECT(self, "Starting decoding thread");
1451
1452 /* Start the processing task, when it quits, the task will disable input
1453 * processing to unlock input if draining, or prevent potential block */
1454 self->output_flow = GST_FLOW_FLUSHING;
1455 if (!gst_pad_start_task(decoder->srcpad,
1456 (GstTaskFunction)gst_aml_v4l2_video_dec_loop, self, NULL))
1457 goto start_task_failed;
1458 }
1459
1460 if (!processed)
1461 {
1462 GST_VIDEO_DECODER_STREAM_UNLOCK(decoder);
hanghang.luoc54208e2023-09-22 02:43:54 +00001463 if (!self->codec_data_inject && self->input_state->codec_data)
1464 {
1465 ret = gst_aml_v4l2_buffer_pool_process
1466 (GST_AML_V4L2_BUFFER_POOL(self->v4l2output->pool), &self->input_state->codec_data);
1467 self->codec_data_inject = TRUE;
1468 if (ret != GST_FLOW_OK)
1469 goto send_codec_failed;
1470 }
xuesong.jiangae1548e2022-05-06 16:38:46 +08001471 ret =
1472 gst_aml_v4l2_buffer_pool_process(GST_AML_V4L2_BUFFER_POOL(self->v4l2output->pool), &frame->input_buffer);
1473 GST_VIDEO_DECODER_STREAM_LOCK(decoder);
1474
1475 if (ret == GST_FLOW_FLUSHING)
1476 {
1477 if (gst_pad_get_task_state(GST_VIDEO_DECODER_SRC_PAD(self)) !=
1478 GST_TASK_STARTED)
1479 ret = self->output_flow;
1480 goto drop;
1481 }
1482 else if (ret != GST_FLOW_OK)
1483 {
1484 goto process_failed;
1485 }
1486 }
1487
1488 /* No need to keep input arround */
1489 tmp = frame->input_buffer;
1490 frame->input_buffer = gst_buffer_new();
1491 gst_buffer_copy_into(frame->input_buffer, tmp,
1492 GST_BUFFER_COPY_FLAGS | GST_BUFFER_COPY_TIMESTAMPS |
1493 GST_BUFFER_COPY_META,
1494 0, 0);
1495 gst_buffer_unref(tmp);
1496
1497 gst_video_codec_frame_unref(frame);
1498 return ret;
1499
1500 /* ERRORS */
hanghang.luoc54208e2023-09-22 02:43:54 +00001501send_codec_failed:
1502 GST_ERROR_OBJECT(self, "send codec_date fialed.ret is %d",ret);
1503 goto drop;
xuesong.jiangae1548e2022-05-06 16:38:46 +08001504not_negotiated:
1505{
1506 GST_ERROR_OBJECT(self, "not negotiated");
1507 ret = GST_FLOW_NOT_NEGOTIATED;
1508 gst_aml_v4l2_error(self, &error);
1509 goto drop;
1510}
1511activate_failed:
1512{
1513 GST_ELEMENT_ERROR(self, RESOURCE, SETTINGS,
1514 (_("Failed to allocate required memory.")),
1515 ("Buffer pool activation failed"));
1516 ret = GST_FLOW_ERROR;
1517 goto drop;
1518}
1519flushing:
1520{
1521 ret = GST_FLOW_FLUSHING;
1522 goto drop;
1523}
1524
1525start_task_failed:
1526{
1527 GST_ELEMENT_ERROR(self, RESOURCE, FAILED,
1528 (_("Failed to start decoding thread.")), (NULL));
1529 ret = GST_FLOW_ERROR;
1530 goto drop;
1531}
1532process_failed:
1533{
1534 GST_ELEMENT_ERROR(self, RESOURCE, FAILED,
1535 (_("Failed to process frame.")),
1536 ("Maybe be due to not enough memory or failing driver"));
1537 ret = GST_FLOW_ERROR;
1538 goto drop;
1539}
1540drop:
1541{
1542 gst_video_decoder_drop_frame(decoder, frame);
1543 return ret;
1544}
1545}
1546
1547static gboolean
1548gst_aml_v4l2_video_dec_decide_allocation(GstVideoDecoder *decoder,
1549 GstQuery *query)
1550{
1551 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
1552 GstClockTime latency;
1553 gboolean ret = FALSE;
1554
1555 if (gst_aml_v4l2_object_decide_allocation(self->v4l2capture, query))
1556 ret = GST_VIDEO_DECODER_CLASS(parent_class)->decide_allocation(decoder, query);
1557
1558 if (GST_CLOCK_TIME_IS_VALID(self->v4l2capture->duration))
1559 {
1560 latency = self->v4l2capture->min_buffers * self->v4l2capture->duration;
1561 GST_DEBUG_OBJECT(self, "Setting latency: %" GST_TIME_FORMAT " (%" G_GUINT32_FORMAT " * %" G_GUINT64_FORMAT, GST_TIME_ARGS(latency),
1562 self->v4l2capture->min_buffers, self->v4l2capture->duration);
1563 gst_video_decoder_set_latency(decoder, latency, latency);
1564 }
1565 else
1566 {
1567 GST_WARNING_OBJECT(self, "Duration invalid, not setting latency");
1568 }
1569
1570 return ret;
1571}
1572
1573static gboolean
1574gst_aml_v4l2_video_dec_src_query(GstVideoDecoder *decoder, GstQuery *query)
1575{
1576 gboolean ret = TRUE;
1577 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
1578
1579 switch (GST_QUERY_TYPE(query))
1580 {
1581 case GST_QUERY_CAPS:
1582 {
1583 GstCaps *filter, *result = NULL;
1584 GstPad *pad = GST_VIDEO_DECODER_SRC_PAD(decoder);
1585
1586 gst_query_parse_caps(query, &filter);
1587
1588 if (self->probed_srccaps)
1589 result = gst_caps_ref(self->probed_srccaps);
1590 else
1591 result = gst_pad_get_pad_template_caps(pad);
1592
1593 if (filter)
1594 {
1595 GstCaps *tmp = result;
1596 result =
1597 gst_caps_intersect_full(filter, tmp, GST_CAPS_INTERSECT_FIRST);
1598 gst_caps_unref(tmp);
1599 }
1600
1601 GST_DEBUG_OBJECT(self, "Returning src caps %" GST_PTR_FORMAT, result);
1602
1603 gst_query_set_caps_result(query, result);
1604 gst_caps_unref(result);
1605 break;
1606 }
1607
1608 default:
1609 ret = GST_VIDEO_DECODER_CLASS(parent_class)->src_query(decoder, query);
1610 break;
1611 }
1612
1613 return ret;
1614}
1615
1616static GstCaps *
1617gst_aml_v4l2_video_dec_sink_getcaps(GstVideoDecoder *decoder, GstCaps *filter)
1618{
1619 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
1620 GstCaps *result;
1621
1622 result = gst_video_decoder_proxy_getcaps(decoder, self->probed_sinkcaps,
1623 filter);
1624
1625 GST_DEBUG_OBJECT(self, "Returning sink caps %" GST_PTR_FORMAT, result);
1626
1627 return result;
1628}
1629
1630static gboolean
1631gst_aml_v4l2_video_dec_sink_event(GstVideoDecoder *decoder, GstEvent *event)
1632{
1633 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
1634 gboolean ret;
1635 GstEventType type = GST_EVENT_TYPE(event);
1636
1637 switch (type)
1638 {
xuesong.jiang406ee302023-06-28 03:45:22 +00001639 case GST_EVENT_STREAM_START:
1640 {
1641 GstStructure *s;
1642 GstEvent *event;
1643 GST_DEBUG_OBJECT(self, "new private event");
1644 s = gst_structure_new("private_signal", "obj_ptr", G_TYPE_POINTER, self, "sig_name", G_TYPE_STRING, "decoded-pts", NULL);
1645 event = gst_event_new_custom (GST_EVENT_CUSTOM_UPSTREAM, s);
1646 GST_DEBUG_OBJECT(self, "before Send private_signal Event :%p", event);
1647 gst_pad_push_event (decoder->sinkpad, event);
1648 GST_DEBUG_OBJECT(self, "after Send private_signal Event :%p", event);
1649 break;
1650 }
zengliang.li51f54b62023-10-10 12:14:49 +00001651 case GST_EVENT_CAPS:
1652 {
1653 GstCaps *caps;
1654 GstStructure *structure;
sheng.liu641aa422023-12-26 07:05:59 +00001655 gint num, denom;
zengliang.li51f54b62023-10-10 12:14:49 +00001656
1657 gst_event_parse_caps (event, &caps);
1658 structure= gst_caps_get_structure(caps, 0);
1659 if ( gst_structure_has_field(structure, "parsed") )
1660 {
1661 gboolean parsed = TRUE;
1662 if ( gst_structure_get_boolean( structure, "parsed", &parsed ) )
1663 {
1664 self->v4l2output->stream_mode = !parsed;
1665 GST_DEBUG("frame parsed:%d, set stream_mode to %d", parsed, self->v4l2output->stream_mode);
1666 }
1667 }
sheng.liu641aa422023-12-26 07:05:59 +00001668
sheng.liudb26f7d2024-01-22 11:24:23 +00001669 if ( gst_structure_get_fraction( structure, "framerate", &num, &denom ) )
1670 {
1671 if ( denom == 0 ) denom= 1;
1672
1673 if (self->v4l2capture->fps)
1674 {
1675 g_value_unset(self->v4l2capture->fps);
1676 g_free(self->v4l2capture->fps);
1677 }
1678
1679 self->v4l2capture->fps = g_new0(GValue, 1);
1680 g_value_init(self->v4l2capture->fps, GST_TYPE_FRACTION);
1681 gst_value_set_fraction(self->v4l2capture->fps, num, denom);
1682
1683 GST_DEBUG_OBJECT(self, "get framerate ratio %d:%d", num, denom);
1684 }
1685
sheng.liu641aa422023-12-26 07:05:59 +00001686 if (( gst_structure_get_fraction( structure, "pixel-aspect-ratio", &num, &denom ) ) &&
1687 ( !self->v4l2capture->have_set_par ) )
1688 {
1689 if ( (num <= 0) || (denom <= 0) )
1690 {
1691 num= denom= 1;
1692 }
1693
1694 if ( self->v4l2capture->par )
1695 {
1696 g_value_unset(self->v4l2capture->par);
1697 g_free(self->v4l2capture->par);
1698 }
1699
1700 self->v4l2capture->par = g_new0(GValue, 1);
1701 g_value_init(self->v4l2capture->par, GST_TYPE_FRACTION);
1702 gst_value_set_fraction(self->v4l2capture->par, num, denom);
1703 GST_DEBUG_OBJECT(self, "get pixel aspect ratio %d:%d", num, denom);
1704 }
zengliang.li51f54b62023-10-10 12:14:49 +00001705 break;
1706 }
xuesong.jiangae1548e2022-05-06 16:38:46 +08001707 case GST_EVENT_FLUSH_START:
1708 GST_DEBUG_OBJECT(self, "flush start");
xuesong.jiangc5dac0f2023-02-01 14:42:24 +08001709
1710 g_mutex_lock (&self->res_chg_lock);
1711 while (self->is_res_chg)
1712 {
1713 GST_LOG_OBJECT(decoder, "wait resolution change finish");
1714 g_cond_wait(&self->res_chg_cond, &self->res_chg_lock);
1715 }
1716 g_mutex_unlock (&self->res_chg_lock);
1717
zengliang.li92ff6822023-06-06 07:12:52 +00001718 self->last_out_pts = GST_CLOCK_TIME_NONE;
xuesong.jiangae1548e2022-05-06 16:38:46 +08001719 gst_aml_v4l2_object_unlock(self->v4l2output);
1720 gst_aml_v4l2_object_unlock(self->v4l2capture);
1721 break;
1722 default:
1723 break;
1724 }
1725
1726 ret = GST_VIDEO_DECODER_CLASS(parent_class)->sink_event(decoder, event);
1727
1728 switch (type)
1729 {
1730 case GST_EVENT_FLUSH_START:
1731 /* The processing thread should stop now, wait for it */
1732 gst_pad_stop_task(decoder->srcpad);
hanghang.luoc54208e2023-09-22 02:43:54 +00001733 self->codec_data_inject = FALSE;
xuesong.jiangae1548e2022-05-06 16:38:46 +08001734 GST_DEBUG_OBJECT(self, "flush start done");
1735 break;
1736 default:
1737 break;
1738 }
1739
1740 return ret;
1741}
1742
1743static GstStateChangeReturn
1744gst_aml_v4l2_video_dec_change_state(GstElement *element,
1745 GstStateChange transition)
1746{
1747 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(element);
1748 GstVideoDecoder *decoder = GST_VIDEO_DECODER(element);
1749
1750 if (transition == GST_STATE_CHANGE_PAUSED_TO_READY)
1751 {
1752 g_atomic_int_set(&self->active, FALSE);
1753 gst_aml_v4l2_object_unlock(self->v4l2output);
1754 gst_aml_v4l2_object_unlock(self->v4l2capture);
1755 gst_pad_stop_task(decoder->srcpad);
1756 }
1757
1758 return GST_ELEMENT_CLASS(parent_class)->change_state(element, transition);
1759}
1760
1761static void
1762gst_aml_v4l2_video_dec_dispose(GObject *object)
1763{
1764 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(object);
1765
1766 gst_caps_replace(&self->probed_sinkcaps, NULL);
1767 gst_caps_replace(&self->probed_srccaps, NULL);
1768
1769 G_OBJECT_CLASS(parent_class)->dispose(object);
1770}
1771
1772static void
1773gst_aml_v4l2_video_dec_finalize(GObject *object)
1774{
1775 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(object);
1776
1777 gst_aml_v4l2_object_destroy(self->v4l2capture);
1778 gst_aml_v4l2_object_destroy(self->v4l2output);
1779
xuesong.jiangc5dac0f2023-02-01 14:42:24 +08001780 g_mutex_clear(&self->res_chg_lock);
1781 g_cond_clear(&self->res_chg_cond);
1782
xuesong.jiang61ea8012022-05-12 15:38:17 +08001783#if GST_IMPORT_LGE_PROP
1784 if (self->lge_ctxt)
1785 {
1786 if (self->lge_ctxt->app_type)
1787 g_free(self->lge_ctxt->app_type);
1788 if (self->lge_ctxt->res_info.coretype)
1789 g_free(self->lge_ctxt->res_info.coretype);
1790 free(self->lge_ctxt);
1791 }
1792
1793#endif
1794
xuesong.jiangae1548e2022-05-06 16:38:46 +08001795 G_OBJECT_CLASS(parent_class)->finalize(object);
1796}
1797
1798static void
1799gst_aml_v4l2_video_dec_init(GstAmlV4l2VideoDec *self)
1800{
1801 /* V4L2 object are created in subinstance_init */
zengliang.li92ff6822023-06-06 07:12:52 +00001802 self->last_out_pts = GST_CLOCK_TIME_NONE;
xuesong.jiangae1548e2022-05-06 16:38:46 +08001803 self->is_secure_path = FALSE;
xuesong.jiangc5dac0f2023-02-01 14:42:24 +08001804 self->is_res_chg = FALSE;
hanghang.luoc54208e2023-09-22 02:43:54 +00001805 self->codec_data_inject = FALSE;
xuesong.jiangc5dac0f2023-02-01 14:42:24 +08001806 g_mutex_init(&self->res_chg_lock);
1807 g_cond_init(&self->res_chg_cond);
xuesong.jiang61ea8012022-05-12 15:38:17 +08001808#if GST_IMPORT_LGE_PROP
1809 self->lge_ctxt = malloc(sizeof(GstAmlV4l2VideoDecLgeCtxt));
1810 memset(self->lge_ctxt, 0, sizeof(GstAmlV4l2VideoDecLgeCtxt));
1811#endif
xuesong.jiangae1548e2022-05-06 16:38:46 +08001812}
1813
1814static void
1815gst_aml_v4l2_video_dec_subinstance_init(GTypeInstance *instance, gpointer g_class)
1816{
1817 GstAmlV4l2VideoDecClass *klass = GST_AML_V4L2_VIDEO_DEC_CLASS(g_class);
1818 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(instance);
1819 GstVideoDecoder *decoder = GST_VIDEO_DECODER(instance);
1820
1821 gst_video_decoder_set_packetized(decoder, TRUE);
1822
1823 self->v4l2output = gst_aml_v4l2_object_new(GST_ELEMENT(self),
1824 GST_OBJECT(GST_VIDEO_DECODER_SINK_PAD(self)),
1825 V4L2_BUF_TYPE_VIDEO_OUTPUT, klass->default_device,
1826 gst_aml_v4l2_get_output, gst_aml_v4l2_set_output, NULL);
1827 self->v4l2output->no_initial_format = TRUE;
1828 self->v4l2output->keep_aspect = FALSE;
sheng.liu0c77f6c2022-06-17 21:33:20 +08001829 self->v4l2output->is_svp = FALSE;
xuesong.jiangae1548e2022-05-06 16:38:46 +08001830
1831 self->v4l2capture = gst_aml_v4l2_object_new(GST_ELEMENT(self),
1832 GST_OBJECT(GST_VIDEO_DECODER_SRC_PAD(self)),
1833 V4L2_BUF_TYPE_VIDEO_CAPTURE, klass->default_device,
1834 gst_aml_v4l2_get_input, gst_aml_v4l2_set_input, NULL);
1835 self->v4l2capture->need_wait_event = TRUE;
sheng.liub56bbc52022-06-21 11:02:33 +08001836 self->v4l2capture->need_drop_event = FALSE;
xuesong.jiangae1548e2022-05-06 16:38:46 +08001837}
1838
1839static void
1840gst_aml_v4l2_video_dec_class_init(GstAmlV4l2VideoDecClass *klass)
1841{
1842 GstElementClass *element_class;
1843 GObjectClass *gobject_class;
1844 GstVideoDecoderClass *video_decoder_class;
1845
1846 parent_class = g_type_class_peek_parent(klass);
1847
1848 element_class = (GstElementClass *)klass;
1849 gobject_class = (GObjectClass *)klass;
1850 video_decoder_class = (GstVideoDecoderClass *)klass;
1851
1852 GST_DEBUG_CATEGORY_INIT(gst_aml_v4l2_video_dec_debug, "amlv4l2videodec", 0,
1853 "AML V4L2 Video Decoder");
1854
1855 gobject_class->dispose = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_dispose);
1856 gobject_class->finalize = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_finalize);
1857 gobject_class->set_property =
1858 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_set_property);
1859 gobject_class->get_property =
1860 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_get_property);
1861
1862 video_decoder_class->open = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_open);
1863 video_decoder_class->close = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_close);
1864 video_decoder_class->start = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_start);
1865 video_decoder_class->stop = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_stop);
1866 video_decoder_class->finish = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_finish);
1867 video_decoder_class->flush = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_flush);
1868 video_decoder_class->drain = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_drain);
1869 video_decoder_class->set_format =
1870 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_set_format);
1871 video_decoder_class->negotiate =
1872 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_negotiate);
1873 video_decoder_class->decide_allocation =
1874 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_decide_allocation);
1875 /* FIXME propose_allocation or not ? */
1876 video_decoder_class->handle_frame =
1877 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_handle_frame);
1878 video_decoder_class->getcaps =
1879 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_sink_getcaps);
1880 video_decoder_class->src_query =
1881 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_src_query);
1882 video_decoder_class->sink_event =
1883 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_sink_event);
1884
1885 element_class->change_state =
1886 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_change_state);
1887
xuesong.jiang406ee302023-06-28 03:45:22 +00001888 g_signals[SIGNAL_DECODED_PTS] = g_signal_new ("decoded-pts",
1889 G_TYPE_FROM_CLASS(GST_ELEMENT_CLASS(klass)),
1890 G_SIGNAL_RUN_LAST,
1891 0, /* class offset */
1892 NULL, /* accumulator */
1893 NULL, /* accu data */
1894 g_cclosure_marshal_generic,
1895 G_TYPE_NONE,
1896 1,
1897 G_TYPE_UINT64);
1898
xuesong.jiangae1548e2022-05-06 16:38:46 +08001899 gst_aml_v4l2_object_install_m2m_properties_helper(gobject_class);
xuesong.jiang61ea8012022-05-12 15:38:17 +08001900#if GST_IMPORT_LGE_PROP
1901 gst_aml_v4l2_video_dec_install_lge_properties_helper(gobject_class);
1902#endif
xuesong.jiangae1548e2022-05-06 16:38:46 +08001903}
1904
1905static void
1906gst_aml_v4l2_video_dec_subclass_init(gpointer g_class, gpointer data)
1907{
1908 GstAmlV4l2VideoDecClass *klass = GST_AML_V4L2_VIDEO_DEC_CLASS(g_class);
1909 GstElementClass *element_class = GST_ELEMENT_CLASS(g_class);
1910 GstAmlV4l2VideoDecCData *cdata = data;
1911
1912 klass->default_device = cdata->device;
1913
1914 /* Note: gst_pad_template_new() take the floating ref from the caps */
1915 gst_element_class_add_pad_template(element_class,
1916 gst_pad_template_new("sink", GST_PAD_SINK, GST_PAD_ALWAYS,
1917 cdata->sink_caps));
1918 gst_element_class_add_pad_template(element_class,
1919 gst_pad_template_new("src", GST_PAD_SRC, GST_PAD_ALWAYS,
1920 cdata->src_caps));
1921
1922 gst_element_class_set_metadata(element_class, cdata->longname,
1923 "Codec/Decoder/Video/Hardware", cdata->description,
1924 "Xuesong Jiang <Xuesong.Jiang@amlogic.com>");
1925
1926 gst_caps_unref(cdata->sink_caps);
1927 gst_caps_unref(cdata->src_caps);
1928 g_free(cdata);
1929}
1930
1931/* Probing functions */
1932gboolean
1933gst_aml_v4l2_is_video_dec(GstCaps *sink_caps, GstCaps *src_caps)
1934{
1935 gboolean ret = FALSE;
1936
1937 if (gst_caps_is_subset(sink_caps, gst_aml_v4l2_object_get_codec_caps()) && gst_caps_is_subset(src_caps, gst_aml_v4l2_object_get_raw_caps()))
1938 ret = TRUE;
1939
1940 return ret;
1941}
1942
1943static gchar *
1944gst_aml_v4l2_video_dec_set_metadata(GstStructure *s, GstAmlV4l2VideoDecCData *cdata,
1945 const gchar *basename)
1946{
1947 gchar *codec_name = NULL;
1948 gchar *type_name = NULL;
1949 gboolean got_value = FALSE;
1950
1951#define SET_META(codec) \
1952 G_STMT_START \
1953 { \
1954 cdata->longname = "AML V4L2 " codec " Decoder"; \
1955 cdata->description = "Decodes " codec " streams via V4L2 API"; \
1956 codec_name = g_ascii_strdown(codec, -1); \
1957 } \
1958 G_STMT_END
1959
1960 if (gst_structure_has_name(s, "image/jpeg"))
1961 {
1962 SET_META("JPEG");
1963 }
1964 else if (gst_structure_has_name(s, "video/mpeg"))
1965 {
1966 gint mpegversion = 0;
1967 gint *list = NULL;
1968 got_value = gst_structure_get_int(s, "mpegversion", &mpegversion);
1969 if (FALSE == got_value)
1970 {
1971 got_value = gst_structure_get_list(s, "mpegversion", &list);
1972 if (TRUE == got_value && (1 == *list || 2 == *list))
1973 {
1974 SET_META("MPEG2");
1975 }
1976 else
1977 {
1978 SET_META("MPEG4");
1979 }
1980 }
1981 else
1982 {
1983 SET_META("MPEG4");
1984 }
1985 }
1986 else if (gst_structure_has_name(s, "video/x-h263"))
1987 {
1988 SET_META("H263");
1989 }
1990 else if (gst_structure_has_name(s, "video/x-fwht"))
1991 {
1992 SET_META("FWHT");
1993 }
1994 else if (gst_structure_has_name(s, "video/x-h264"))
1995 {
1996 SET_META("H264");
1997 }
1998 else if (gst_structure_has_name(s, "video/x-h265"))
1999 {
2000 SET_META("H265");
2001 }
2002 else if (gst_structure_has_name(s, "video/x-wmv"))
2003 {
2004 SET_META("VC1");
2005 }
2006 else if (gst_structure_has_name(s, "video/x-vp8"))
2007 {
2008 SET_META("VP8");
2009 }
2010 else if (gst_structure_has_name(s, "video/x-vp9"))
2011 {
2012 SET_META("VP9");
2013 }
2014 else if (gst_structure_has_name(s, "video/x-av1"))
2015 {
2016 SET_META("AV1");
2017 }
zengliang.li51f54b62023-10-10 12:14:49 +00002018 else if (gst_structure_has_name(s, "video/x-avs"))
2019 {
2020 SET_META("AVS");
2021 }
2022 else if (gst_structure_has_name(s, "video/x-avs2"))
2023 {
2024 SET_META("AVS2");
2025 }
2026 else if (gst_structure_has_name(s, "video/x-avs3"))
2027 {
2028 SET_META("AVS3");
2029 }
xuesong.jiangae1548e2022-05-06 16:38:46 +08002030 else if (gst_structure_has_name(s, "video/x-bayer"))
2031 {
2032 SET_META("BAYER");
2033 }
2034 else if (gst_structure_has_name(s, "video/x-sonix"))
2035 {
2036 SET_META("SONIX");
2037 }
2038 else if (gst_structure_has_name(s, "video/x-pwc1"))
2039 {
2040 SET_META("PWC1");
2041 }
2042 else if (gst_structure_has_name(s, "video/x-pwc2"))
2043 {
2044 SET_META("PWC2");
2045 }
2046 else
2047 {
2048 /* This code should be kept on sync with the exposed CODEC type of format
2049 * from gstamlv4l2object.c. This warning will only occure in case we forget
2050 * to also add a format here. */
2051 gchar *s_str = gst_structure_to_string(s);
2052 g_warning("Missing fixed name mapping for caps '%s', this is a GStreamer "
2053 "bug, please report at https://bugs.gnome.org",
2054 s_str);
2055 g_free(s_str);
2056 }
2057
2058 if (codec_name)
2059 {
2060 type_name = g_strdup_printf("amlv4l2%sdec", codec_name);
2061 if (g_type_from_name(type_name) != 0)
2062 {
2063 g_free(type_name);
2064 type_name = g_strdup_printf("amlv4l2%s%sdec", basename, codec_name);
2065 }
2066
2067 g_free(codec_name);
2068 }
2069
2070 return type_name;
2071#undef SET_META
2072}
2073
2074void gst_aml_v4l2_video_dec_register(GstPlugin *plugin, const gchar *basename,
2075 const gchar *device_path, GstCaps *sink_caps, GstCaps *src_caps)
2076{
2077 gint i;
2078
2079 for (i = 0; i < gst_caps_get_size(sink_caps); i++)
2080 {
2081 GstAmlV4l2VideoDecCData *cdata;
2082 GstStructure *s;
2083 GTypeQuery type_query;
2084 GTypeInfo type_info = {
2085 0,
2086 };
2087 GType type, subtype;
2088 gchar *type_name;
2089
2090 s = gst_caps_get_structure(sink_caps, i);
2091
2092 cdata = g_new0(GstAmlV4l2VideoDecCData, 1);
2093 cdata->device = g_strdup(device_path);
2094 cdata->sink_caps = gst_caps_new_empty();
2095 gst_caps_append_structure(cdata->sink_caps, gst_structure_copy(s));
2096 gst_caps_append_structure(cdata->sink_caps, gst_structure_copy(s));
2097 gst_caps_set_features(cdata->sink_caps, 0, gst_caps_features_from_string(GST_CAPS_FEATURE_MEMORY_DMABUF));
2098 cdata->src_caps = gst_caps_copy(src_caps);
2099 gst_caps_set_features_simple(cdata->src_caps, gst_caps_features_from_string(GST_CAPS_FEATURE_MEMORY_DMABUF));
2100 gst_caps_append(cdata->src_caps, gst_caps_copy(src_caps));
2101 type_name = gst_aml_v4l2_video_dec_set_metadata(s, cdata, basename);
2102
2103 /* Skip over if we hit an unmapped type */
2104 if (!type_name)
2105 {
2106 g_free(cdata);
2107 continue;
2108 }
2109
2110 type = gst_aml_v4l2_video_dec_get_type();
2111 g_type_query(type, &type_query);
2112 memset(&type_info, 0, sizeof(type_info));
2113 type_info.class_size = type_query.class_size;
2114 type_info.instance_size = type_query.instance_size;
2115 type_info.class_init = gst_aml_v4l2_video_dec_subclass_init;
2116 type_info.class_data = cdata;
2117 type_info.instance_init = gst_aml_v4l2_video_dec_subinstance_init;
2118
2119 subtype = g_type_register_static(type, type_name, &type_info, 0);
2120 if (!gst_element_register(plugin, type_name, GST_RANK_PRIMARY + 1,
2121 subtype))
2122 GST_WARNING("Failed to register plugin '%s'", type_name);
2123
2124 g_free(type_name);
2125 }
2126}
xuesong.jiang61ea8012022-05-12 15:38:17 +08002127
2128#if GST_IMPORT_LGE_PROP
2129static void gst_aml_v4l2_video_dec_install_lge_properties_helper(GObjectClass *gobject_class)
2130{
2131 g_object_class_install_property(gobject_class, LGE_RESOURCE_INFO,
2132 g_param_spec_object("resource-info", "resource-info",
2133 "After acquisition of H/W resources is completed, allocated resource information must be delivered to the decoder and the sink",
2134 GST_TYPE_STRUCTURE,
2135 G_PARAM_READABLE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
2136
2137 g_object_class_install_property(gobject_class, LGE_DECODE_SIZE,
2138 g_param_spec_uint64("decoded-size", "decoded-size",
2139 "The total amount of decoder element's decoded video es after constructing pipeline or flushing pipeline update unit is byte.",
2140 0, G_MAXUINT64,
2141 0, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
2142
2143 g_object_class_install_property(gobject_class, LGE_UNDECODE_SIZE,
2144 g_param_spec_uint64("undecoded-size", "undecoded-size",
2145 "video decoder element's total undecoded data update unit is byte.",
2146 0, G_MAXUINT64,
2147 0, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
2148
2149 g_object_class_install_property(gobject_class, LGE_APP_TYPE,
2150 g_param_spec_string("app-type", "app-type",
2151 "set application type.",
2152 "default_app",
2153 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
2154
2155 g_object_class_install_property(gobject_class, LGE_CLIP_MODE,
2156 g_param_spec_boolean("clip-mode", "clip-mode",
2157 "When seeking, Content is moving faster for a while to skip frames.",
2158 FALSE,
2159 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
2160}
2161#endif