blob: 15290123c4f4bafcb0ed7f8a7f351f2028d53b25 [file] [log] [blame]
xuesong.jiangae1548e2022-05-06 16:38:46 +08001/* GStreamer
2 * Copyright (C) 2022 <xuesong.jiang@amlogic.com>
3 *
4 * This library is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Library General Public
6 * License as published by the Free Software Foundation; either
7 * version 2 of the License, or (at your option) any later version.
8 *
9 * This library is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Library General Public License for more details.
13 *
14 * You should have received a copy of the GNU Library General Public
15 * License along with this library; if not, write to the
16 * Free Software Foundation, Inc., 51 Franklin Street, Suite 500,
17 * Boston, MA 02110-1335, USA.
18 */
19
20#ifdef HAVE_CONFIG_H
21#include "config.h"
22#endif
23
24#include <sys/stat.h>
25#include <fcntl.h>
26#include <errno.h>
27#include <unistd.h>
28#include <string.h>
29
30#include "gstamlv4l2object.h"
31#include "gstamlv4l2videodec.h"
32
33#include <string.h>
34#include <gst/gst-i18n-plugin.h>
35#include <gst/allocators/gstdmabuf.h>
36
37GST_DEBUG_CATEGORY_STATIC(gst_aml_v4l2_video_dec_debug);
38#define GST_CAT_DEFAULT gst_aml_v4l2_video_dec_debug
39
40#ifdef GST_VIDEO_DECODER_STREAM_LOCK
41#undef GST_VIDEO_DECODER_STREAM_LOCK
42#define GST_VIDEO_DECODER_STREAM_LOCK(decoder) \
43 { \
fei.denge9458472023-04-18 02:05:48 +000044 GST_TRACE("aml v4l2 dec locking"); \
xuesong.jiangae1548e2022-05-06 16:38:46 +080045 g_rec_mutex_lock(&GST_VIDEO_DECODER(decoder)->stream_lock); \
fei.denge9458472023-04-18 02:05:48 +000046 GST_TRACE("aml v4l2 dec locked"); \
xuesong.jiangae1548e2022-05-06 16:38:46 +080047 }
48#endif
49
50#ifdef GST_VIDEO_DECODER_STREAM_UNLOCK
51#undef GST_VIDEO_DECODER_STREAM_UNLOCK
52#define GST_VIDEO_DECODER_STREAM_UNLOCK(decoder) \
53 { \
fei.denge9458472023-04-18 02:05:48 +000054 GST_TRACE("aml v4l2 dec unlocking"); \
xuesong.jiangae1548e2022-05-06 16:38:46 +080055 g_rec_mutex_unlock(&GST_VIDEO_DECODER(decoder)->stream_lock); \
fei.denge9458472023-04-18 02:05:48 +000056 GST_TRACE("aml v4l2 dec unlocked"); \
xuesong.jiangae1548e2022-05-06 16:38:46 +080057 }
58#endif
xuesong.jiang61ea8012022-05-12 15:38:17 +080059
hanghang.luo36df2852022-08-24 15:02:27 +080060#ifndef ABSDIFF
61#define ABSDIFF(a,b) (((a) > (b)) ? ((a) - (b)) : ((b) - (a)))
62#endif
63
xuesong.jiang61ea8012022-05-12 15:38:17 +080064#if GST_IMPORT_LGE_PROP
65typedef struct _GstAmlResourceInfo
66{
67 gchar *coretype;
68 gint videoport;
69 gint audioport;
70 gint maxwidth;
71 gint maxheight;
72 gint mixerport;
73} GstAmlResourceInfo;
74
75struct _GstAmlV4l2VideoDecLgeCtxt
76{
77 GstAmlResourceInfo res_info;
78 guint64 dec_size;
79 guint64 undec_size;
80 gchar *app_type;
81 gboolean clip_mode;
82};
83#endif
84
xuesong.jiangae1548e2022-05-06 16:38:46 +080085typedef struct
86{
87 gchar *device;
88 GstCaps *sink_caps;
89 GstCaps *src_caps;
90 const gchar *longname;
91 const gchar *description;
92} GstAmlV4l2VideoDecCData;
93
94enum
95{
96 PROP_0,
xuesong.jiang61ea8012022-05-12 15:38:17 +080097 V4L2_STD_OBJECT_PROPS,
98#if GST_IMPORT_LGE_PROP
99 LGE_RESOURCE_INFO,
100 LGE_DECODE_SIZE,
101 LGE_UNDECODE_SIZE,
102 LGE_APP_TYPE,
103 LGE_CLIP_MODE
104#endif
xuesong.jiangae1548e2022-05-06 16:38:46 +0800105};
106
xuesong.jiang406ee302023-06-28 03:45:22 +0000107enum
108{
109 SIGNAL_DECODED_PTS,
110 MAX_SIGNAL
111};
112
113static guint g_signals[MAX_SIGNAL]= {0};
114
xuesong.jiangae1548e2022-05-06 16:38:46 +0800115#define gst_aml_v4l2_video_dec_parent_class parent_class
116G_DEFINE_ABSTRACT_TYPE(GstAmlV4l2VideoDec, gst_aml_v4l2_video_dec,
117 GST_TYPE_VIDEO_DECODER);
118
119static GstFlowReturn gst_aml_v4l2_video_dec_finish(GstVideoDecoder *decoder);
xuesong.jiang61ea8012022-05-12 15:38:17 +0800120#if GST_IMPORT_LGE_PROP
121static void gst_aml_v4l2_video_dec_install_lge_properties_helper(GObjectClass *gobject_class);
122#endif
xuesong.jiangae1548e2022-05-06 16:38:46 +0800123
124static void
125gst_aml_v4l2_video_dec_set_property(GObject *object,
126 guint prop_id, const GValue *value, GParamSpec *pspec)
127{
128 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(object);
129
130 switch (prop_id)
131 {
132 case PROP_CAPTURE_IO_MODE:
133 if (!gst_aml_v4l2_object_set_property_helper(self->v4l2capture,
134 prop_id, value, pspec))
135 {
136 G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
137 }
138 break;
139 case PROP_DUMP_FRAME_LOCATION:
140 if (!gst_aml_v4l2_object_set_property_helper(self->v4l2capture,
141 prop_id, value, pspec))
142 {
143 G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
144 }
145 break;
xuesong.jiang61ea8012022-05-12 15:38:17 +0800146#if GST_IMPORT_LGE_PROP
147 case LGE_RESOURCE_INFO:
148 {
149 GST_DEBUG_OBJECT(self, "LGE up layer set res info");
150 GstStructure *r_info = g_value_get_object(value);
151 if (r_info)
152 {
153 if (gst_structure_has_field(r_info, "coretype"))
154 {
155 if (self->lge_ctxt->res_info.coretype)
156 g_free(self->lge_ctxt->res_info.coretype);
157 self->lge_ctxt->res_info.coretype = g_strdup(gst_structure_get_string(r_info, "coretype"));
158 }
159 if (gst_structure_has_field(r_info, "videoport"))
160 gst_structure_get_int(r_info, "videoport", &(self->lge_ctxt->res_info.videoport));
161 if (gst_structure_has_field(r_info, "audioport"))
162 gst_structure_get_int(r_info, "audioport", &(self->lge_ctxt->res_info.audioport));
163 if (gst_structure_has_field(r_info, "maxwidth"))
164 gst_structure_get_int(r_info, "maxwidth", &(self->lge_ctxt->res_info.maxwidth));
165 if (gst_structure_has_field(r_info, "maxheight"))
166 gst_structure_get_int(r_info, "maxheight", &(self->lge_ctxt->res_info.maxheight));
167 if (gst_structure_has_field(r_info, "mixerport"))
168 gst_structure_get_int(r_info, "mixerport", &(self->lge_ctxt->res_info.mixerport));
169 }
170 break;
171 }
172 case LGE_APP_TYPE:
173 {
174 GST_DEBUG_OBJECT(self, "LGE up layer set app type");
175 if (self->lge_ctxt->app_type)
176 g_free(self->lge_ctxt->app_type);
177 self->lge_ctxt->app_type = g_strdup(g_value_get_string(value));
178 break;
179 }
180 case LGE_CLIP_MODE:
181 {
182 GST_DEBUG_OBJECT(self, "LGE up layer set clip mode");
183 self->lge_ctxt->clip_mode = g_strdup(g_value_get_boolean(value));
184 break;
185 }
186#endif
xuesong.jiangae1548e2022-05-06 16:38:46 +0800187 /* By default, only set on output */
188 default:
189 if (!gst_aml_v4l2_object_set_property_helper(self->v4l2output,
190 prop_id, value, pspec))
191 {
192 G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
193 }
194 break;
195 }
196}
197
198static void
199gst_aml_v4l2_video_dec_get_property(GObject *object,
200 guint prop_id, GValue *value, GParamSpec *pspec)
201{
202 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(object);
203
204 switch (prop_id)
205 {
206 case PROP_CAPTURE_IO_MODE:
207 if (!gst_aml_v4l2_object_get_property_helper(self->v4l2capture,
208 prop_id, value, pspec))
209 {
210 G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
211 }
212 break;
213
xuesong.jiang61ea8012022-05-12 15:38:17 +0800214#if GST_IMPORT_LGE_PROP
215 case LGE_DECODE_SIZE:
216 {
217 GST_DEBUG_OBJECT(self, "LGE up layer get dec size");
218 self->lge_ctxt->dec_size = -1;
219 g_value_set_int(value, self->lge_ctxt->dec_size);
220 break;
221 }
222 case LGE_UNDECODE_SIZE:
223 {
224 GST_DEBUG_OBJECT(self, "LGE up layer get undec size");
225 self->lge_ctxt->undec_size = -1;
226 g_value_set_int(value, self->lge_ctxt->undec_size);
227 break;
228 }
229#endif
230
xuesong.jiangae1548e2022-05-06 16:38:46 +0800231 /* By default read from output */
232 default:
233 if (!gst_aml_v4l2_object_get_property_helper(self->v4l2output,
234 prop_id, value, pspec))
235 {
236 G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
237 }
238 break;
239 }
240}
241
242static gboolean
243gst_aml_v4l2_video_dec_open(GstVideoDecoder *decoder)
244{
245 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
246 GstCaps *codec_caps;
247
248 GST_DEBUG_OBJECT(self, "Opening");
249
250 if (!gst_aml_v4l2_object_open(self->v4l2output))
251 goto failure;
252
253 if (!gst_aml_v4l2_object_open_shared(self->v4l2capture, self->v4l2output))
254 goto failure;
255
256 codec_caps = gst_pad_get_pad_template_caps(decoder->sinkpad);
257 self->probed_sinkcaps = gst_aml_v4l2_object_probe_caps(self->v4l2output,
258 codec_caps);
259 gst_caps_unref(codec_caps);
260
261 if (gst_caps_is_empty(self->probed_sinkcaps))
262 goto no_encoded_format;
263
264 return TRUE;
265
266no_encoded_format:
267 GST_ELEMENT_ERROR(self, RESOURCE, SETTINGS,
268 (_("Decoder on device %s has no supported input format"),
269 self->v4l2output->videodev),
270 (NULL));
271 goto failure;
272
273failure:
274 if (GST_AML_V4L2_IS_OPEN(self->v4l2output))
275 gst_aml_v4l2_object_close(self->v4l2output);
276
277 if (GST_AML_V4L2_IS_OPEN(self->v4l2capture))
278 gst_aml_v4l2_object_close(self->v4l2capture);
279
280 gst_caps_replace(&self->probed_srccaps, NULL);
281 gst_caps_replace(&self->probed_sinkcaps, NULL);
282
283 return FALSE;
284}
285
286static gboolean
287gst_aml_v4l2_video_dec_close(GstVideoDecoder *decoder)
288{
289 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
290
291 GST_DEBUG_OBJECT(self, "Closing");
292
293 gst_aml_v4l2_object_close(self->v4l2output);
294 gst_aml_v4l2_object_close(self->v4l2capture);
295 gst_caps_replace(&self->probed_srccaps, NULL);
296 gst_caps_replace(&self->probed_sinkcaps, NULL);
297
298 return TRUE;
299}
300
301static gboolean
302gst_aml_v4l2_video_dec_start(GstVideoDecoder *decoder)
303{
304 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
305
306 GST_DEBUG_OBJECT(self, "Starting");
307
308 gst_aml_v4l2_object_unlock(self->v4l2output);
309 g_atomic_int_set(&self->active, TRUE);
310 self->output_flow = GST_FLOW_OK;
311
312 return TRUE;
313}
314
315static gboolean
316gst_aml_v4l2_video_dec_stop(GstVideoDecoder *decoder)
317{
318 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
319
320 GST_DEBUG_OBJECT(self, "Stopping");
321
322 gst_aml_v4l2_object_unlock(self->v4l2output);
323 gst_aml_v4l2_object_unlock(self->v4l2capture);
324
325 /* Wait for capture thread to stop */
326 gst_pad_stop_task(decoder->srcpad);
327
328 GST_VIDEO_DECODER_STREAM_LOCK(decoder);
329 self->output_flow = GST_FLOW_OK;
330 GST_VIDEO_DECODER_STREAM_UNLOCK(decoder);
331
332 /* Should have been flushed already */
333 g_assert(g_atomic_int_get(&self->active) == FALSE);
334
335 gst_aml_v4l2_object_stop(self->v4l2output);
336 gst_aml_v4l2_object_stop(self->v4l2capture);
337
338 if (self->input_state)
339 {
340 gst_video_codec_state_unref(self->input_state);
341 self->input_state = NULL;
342 }
343
344 GST_DEBUG_OBJECT(self, "Stopped");
345
346 return TRUE;
347}
348
349static gboolean
350gst_aml_v4l2_video_dec_set_format(GstVideoDecoder *decoder,
351 GstVideoCodecState *state)
352{
353 GstAmlV4l2Error error = GST_AML_V4L2_ERROR_INIT;
354 gboolean ret = TRUE;
355 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
356 GstCaps *caps;
357
358 GST_DEBUG_OBJECT(self, "Setting format: %" GST_PTR_FORMAT, state->caps);
359 GstCapsFeatures *const features = gst_caps_get_features(state->caps, 0);
360
361 if (gst_caps_features_contains(features, GST_CAPS_FEATURE_MEMORY_DMABUF))
362 self->v4l2output->req_mode = GST_V4L2_IO_DMABUF_IMPORT;
363
364 if (self->input_state)
365 {
366 if (gst_aml_v4l2_object_caps_equal(self->v4l2output, state->caps))
367 {
368 GST_DEBUG_OBJECT(self, "Compatible caps");
369 goto done;
370 }
371 gst_video_codec_state_unref(self->input_state);
372 self->input_state = NULL;
373
374 gst_aml_v4l2_video_dec_finish(decoder);
375 gst_aml_v4l2_object_stop(self->v4l2output);
376
377 /* The renegotiation flow don't blend with the base class flow. To properly
378 * stop the capture pool, if the buffers can't be orphaned, we need to
379 * reclaim our buffers, which will happend through the allocation query.
380 * The allocation query is triggered by gst_video_decoder_negotiate() which
381 * requires the output caps to be set, but we can't know this information
382 * as we rely on the decoder, which requires the capture queue to be
383 * stopped.
384 *
385 * To workaround this issue, we simply run an allocation query with the
386 * old negotiated caps in order to drain/reclaim our buffers. That breaks
387 * the complexity and should not have much impact in performance since the
388 * following allocation query will happen on a drained pipeline and won't
389 * block. */
390 if (self->v4l2capture->pool &&
391 !gst_aml_v4l2_buffer_pool_orphan(&self->v4l2capture->pool))
392 {
393 GstCaps *caps = gst_pad_get_current_caps(decoder->srcpad);
394 if (caps)
395 {
396 GstQuery *query = gst_query_new_allocation(caps, FALSE);
397 gst_pad_peer_query(decoder->srcpad, query);
398 gst_query_unref(query);
399 gst_caps_unref(caps);
400 }
401 }
402
403 gst_aml_v4l2_object_stop(self->v4l2capture);
404 self->output_flow = GST_FLOW_OK;
405 }
406
407 if ((ret = gst_aml_v4l2_set_drm_mode(self->v4l2output)) == FALSE)
408 {
409 GST_ERROR_OBJECT(self, "config output drm mode error");
410 goto done;
411 }
412
xuesong.jiang22a9b112023-05-24 09:01:59 +0000413 if ((ret = gst_aml_v4l2_set_stream_mode(self->v4l2output)) == FALSE)
414 {
415 GST_ERROR_OBJECT(self, "config output stream mode error");
416 goto done;
417 }
418
xuesong.jiangae1548e2022-05-06 16:38:46 +0800419 ret = gst_aml_v4l2_object_set_format(self->v4l2output, state->caps, &error);
420
421 gst_caps_replace(&self->probed_srccaps, NULL);
422 self->probed_srccaps = gst_aml_v4l2_object_probe_caps(self->v4l2capture,
423 gst_aml_v4l2_object_get_raw_caps());
424
425 if (gst_caps_is_empty(self->probed_srccaps))
426 goto no_raw_format;
427
428 caps = gst_caps_copy(self->probed_srccaps);
429 gst_caps_set_features_simple(caps, gst_caps_features_from_string(GST_CAPS_FEATURE_MEMORY_DMABUF));
430 gst_caps_append(self->probed_srccaps, caps);
431 if (ret)
432 self->input_state = gst_video_codec_state_ref(state);
433 else
434 gst_aml_v4l2_error(self, &error);
435
436done:
437 return ret;
438
439no_raw_format:
440 GST_ELEMENT_ERROR(self, RESOURCE, SETTINGS,
441 (_("Decoder on device %s has no supported output format"),
442 self->v4l2output->videodev),
443 (NULL));
444 return GST_FLOW_ERROR;
445}
446
447static gboolean
448gst_aml_v4l2_video_dec_flush(GstVideoDecoder *decoder)
449{
450 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
451
452 GST_DEBUG_OBJECT(self, "Flushed");
453
454 /* Ensure the processing thread has stopped for the reverse playback
455 * discount case */
456 if (gst_pad_get_task_state(decoder->srcpad) == GST_TASK_STARTED)
457 {
458 GST_VIDEO_DECODER_STREAM_UNLOCK(decoder);
459
460 gst_aml_v4l2_object_unlock(self->v4l2output);
461 gst_aml_v4l2_object_unlock(self->v4l2capture);
462 gst_pad_stop_task(decoder->srcpad);
463 GST_VIDEO_DECODER_STREAM_LOCK(decoder);
464 }
465
466 self->output_flow = GST_FLOW_OK;
467
468 gst_aml_v4l2_object_unlock_stop(self->v4l2output);
469 gst_aml_v4l2_object_unlock_stop(self->v4l2capture);
470
471 if (self->v4l2output->pool)
472 gst_aml_v4l2_buffer_pool_flush(self->v4l2output->pool);
473
474 /* gst_aml_v4l2_buffer_pool_flush() calls streamon the capture pool and must be
475 * called after gst_aml_v4l2_object_unlock_stop() stopped flushing the buffer
476 * pool. */
477 if (self->v4l2capture->pool)
478 gst_aml_v4l2_buffer_pool_flush(self->v4l2capture->pool);
479
480 return TRUE;
481}
482
483static gboolean
484gst_aml_v4l2_video_dec_negotiate(GstVideoDecoder *decoder)
485{
486 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
487
xuesong.jiang681d3602022-06-24 21:23:35 +0800488 if (TRUE == self->v4l2output->is_svp)
489 {
490 GstStructure *s;
491 GstEvent *event;
492
493 s = gst_structure_new_empty ("IS_SVP");
494 event = gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM_STICKY, s);
495 GST_DEBUG_OBJECT(self, "before Send SVP Event :%p", event);
496 gst_pad_push_event (decoder->srcpad, event);
497 GST_DEBUG_OBJECT(self, "after Send SVP Event :%p", event);
498 }
499
xuesong.jiangae1548e2022-05-06 16:38:46 +0800500 /* We don't allow renegotiation without carefull disabling the pool */
501 if (self->v4l2capture->pool &&
502 gst_buffer_pool_is_active(GST_BUFFER_POOL(self->v4l2capture->pool)))
503 return TRUE;
504
505 return GST_VIDEO_DECODER_CLASS(parent_class)->negotiate(decoder);
506}
507
508static gboolean
509gst_aml_v4l2_decoder_cmd(GstAmlV4l2Object *v4l2object, guint cmd, guint flags)
510{
511 struct v4l2_decoder_cmd dcmd = {
512 0,
513 };
514
515 GST_DEBUG_OBJECT(v4l2object->element,
516 "sending v4l2 decoder command %u with flags %u", cmd, flags);
517
518 if (!GST_AML_V4L2_IS_OPEN(v4l2object))
519 return FALSE;
520
521 dcmd.cmd = cmd;
522 dcmd.flags = flags;
523 if (v4l2object->ioctl(v4l2object->video_fd, VIDIOC_DECODER_CMD, &dcmd) < 0)
524 goto dcmd_failed;
525
526 return TRUE;
527
528dcmd_failed:
529 if (errno == ENOTTY)
530 {
531 GST_INFO_OBJECT(v4l2object->element,
532 "Failed to send decoder command %u with flags %u for '%s'. (%s)",
533 cmd, flags, v4l2object->videodev, g_strerror(errno));
534 }
535 else
536 {
537 GST_ERROR_OBJECT(v4l2object->element,
538 "Failed to send decoder command %u with flags %u for '%s'. (%s)",
539 cmd, flags, v4l2object->videodev, g_strerror(errno));
540 }
541 return FALSE;
542}
543
544static GstFlowReturn
545gst_aml_v4l2_video_dec_finish(GstVideoDecoder *decoder)
546{
547 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
548 GstFlowReturn ret = GST_FLOW_OK;
549 GstBuffer *buffer;
550
551 if (gst_pad_get_task_state(decoder->srcpad) != GST_TASK_STARTED)
552 goto done;
553
554 GST_DEBUG_OBJECT(self, "Finishing decoding");
555
556 GST_VIDEO_DECODER_STREAM_UNLOCK(decoder);
557
558 if (gst_aml_v4l2_decoder_cmd(self->v4l2output, V4L2_DEC_CMD_STOP, 0))
559 {
560 GstTask *task = decoder->srcpad->task;
561
562 /* If the decoder stop command succeeded, just wait until processing is
563 * finished */
564 GST_DEBUG_OBJECT(self, "Waiting for decoder stop");
565 GST_OBJECT_LOCK(task);
566 while (GST_TASK_STATE(task) == GST_TASK_STARTED)
567 GST_TASK_WAIT(task);
568 GST_OBJECT_UNLOCK(task);
569 ret = GST_FLOW_FLUSHING;
570 }
571 else
572 {
573 /* otherwise keep queuing empty buffers until the processing thread has
574 * stopped, _pool_process() will return FLUSHING when that happened */
575 while (ret == GST_FLOW_OK)
576 {
xuesong.jiangc5dac0f2023-02-01 14:42:24 +0800577 GST_DEBUG_OBJECT(self, "queue empty output buf");
xuesong.jiangae1548e2022-05-06 16:38:46 +0800578 buffer = gst_buffer_new();
579 ret =
580 gst_aml_v4l2_buffer_pool_process(GST_AML_V4L2_BUFFER_POOL(self->v4l2output->pool), &buffer);
581 gst_buffer_unref(buffer);
582 }
583 }
584
585 /* and ensure the processing thread has stopped in case another error
586 * occured. */
587 gst_aml_v4l2_object_unlock(self->v4l2capture);
588 gst_pad_stop_task(decoder->srcpad);
589 GST_VIDEO_DECODER_STREAM_LOCK(decoder);
590
591 if (ret == GST_FLOW_FLUSHING)
592 ret = self->output_flow;
593
594 GST_DEBUG_OBJECT(decoder, "Done draining buffers");
595
596 /* TODO Shall we cleanup any reffed frame to workaround broken decoders ? */
597
598done:
599 return ret;
600}
601
602static GstFlowReturn
603gst_aml_v4l2_video_dec_drain(GstVideoDecoder *decoder)
604{
605 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
606
607 GST_DEBUG_OBJECT(self, "Draining...");
608 gst_aml_v4l2_video_dec_finish(decoder);
609 gst_aml_v4l2_video_dec_flush(decoder);
610
611 return GST_FLOW_OK;
612}
613
614static GstVideoCodecFrame *
615gst_aml_v4l2_video_dec_get_oldest_frame(GstVideoDecoder *decoder)
616{
617 GstVideoCodecFrame *frame = NULL;
618 GList *frames, *l;
619 gint count = 0;
620
621 frames = gst_video_decoder_get_frames(decoder);
622
623 for (l = frames; l != NULL; l = l->next)
624 {
625 GstVideoCodecFrame *f = l->data;
626
627 if (!frame || (GST_CLOCK_TIME_IS_VALID(frame->pts) && GST_CLOCK_TIME_IS_VALID(f->pts) && (frame->pts > f->pts)))
628 frame = f;
629
630 count++;
631 }
632
633 if (frame)
634 {
635 GST_LOG_OBJECT(decoder,
636 "Oldest frame is %d %" GST_TIME_FORMAT " and %d frames left",
637 frame->system_frame_number, GST_TIME_ARGS(frame->pts), count - 1);
638 gst_video_codec_frame_ref(frame);
639 }
640
641 g_list_free_full(frames, (GDestroyNotify)gst_video_codec_frame_unref);
642
643 return frame;
644}
645
fei.dengbee20862022-06-14 14:59:48 +0800646static GstVideoCodecFrame *
xuesong.jiange24aef92023-06-16 06:39:10 +0000647gst_aml_v4l2_video_dec_get_right_frame_for_frame_mode(GstVideoDecoder *decoder, GstClockTime pts)
fei.dengbee20862022-06-14 14:59:48 +0800648{
649 GstVideoCodecFrame *frame = NULL;
650 GList *frames, *l;
651 gint count = 0;
652
xuesong.jiange24aef92023-06-16 06:39:10 +0000653 GST_LOG_OBJECT (decoder, "trace in with pts: %" GST_TIME_FORMAT, GST_TIME_ARGS(pts));
654
fei.dengbee20862022-06-14 14:59:48 +0800655 frames = gst_video_decoder_get_frames(decoder);
656
657 for (l = frames; l != NULL; l = l->next)
658 {
659 GstVideoCodecFrame *f = l->data;
fei.denge9458472023-04-18 02:05:48 +0000660
xuesong.jiangc5dac0f2023-02-01 14:42:24 +0800661 if (GST_CLOCK_TIME_IS_VALID(pts) && (ABSDIFF(f->pts,pts)) < 1000) {
fei.dengbee20862022-06-14 14:59:48 +0800662 frame = f;
fei.dengbee20862022-06-14 14:59:48 +0800663 }
fei.dengbee20862022-06-14 14:59:48 +0800664 count++;
665 }
666
zengliang.liddee2da2023-07-14 07:27:05 +0000667 if (!frame)
668 {
669 for (l = frames; l != NULL; l = l->next)
670 {
671 GstVideoCodecFrame *f = l->data;
672 if (!GST_CLOCK_TIME_IS_VALID(f->pts))
673 {
674 frame = f;
675 }
676 GST_DEBUG("The pts of the expected output frame is invalid");
677 }
678 }
679
fei.dengbee20862022-06-14 14:59:48 +0800680 if (frame)
681 {
682 GST_LOG_OBJECT(decoder,
683 "frame is %d %" GST_TIME_FORMAT " and %d frames left",
684 frame->system_frame_number, GST_TIME_ARGS(frame->pts), count - 1);
685 gst_video_codec_frame_ref(frame);
686 }
687
688 g_list_free_full(frames, (GDestroyNotify)gst_video_codec_frame_unref);
689
xuesong.jiange24aef92023-06-16 06:39:10 +0000690 GST_LOG_OBJECT (decoder, "trace out ret:%p", frame);
fei.dengbee20862022-06-14 14:59:48 +0800691 return frame;
692}
693
xuesong.jiange24aef92023-06-16 06:39:10 +0000694static GstVideoCodecFrame *
695gst_aml_v4l2_video_dec_get_right_frame_for_stream_mode(GstVideoDecoder *decoder, GstClockTime pts)
696{
697 GstVideoCodecFrame *frame = NULL;
698 GList *frames, *l;
699 gint count = 0;
700
701 GST_LOG_OBJECT (decoder, "trace in with pts: %" GST_TIME_FORMAT, GST_TIME_ARGS(pts));
702
703 frames = gst_video_decoder_get_frames(decoder);
704 guint frames_len = 0;
705 frames_len = g_list_length(frames);
706 GST_LOG_OBJECT (decoder, "got frames list len:%d", frames_len);
707
708 frame = frames->data;
709
710 for (l = frames; l != NULL; l = l->next)
711 {
712 GstVideoCodecFrame *f = l->data;
713
714 if (GST_CLOCK_TIME_IS_VALID(pts) && (ABSDIFF(f->pts, pts)) < 1000)
715 {
716 /* found the right frame */
717 frame = f;
718 break;
719 }
720 else if(GST_CLOCK_TIME_IS_VALID(pts) && (f->pts < pts))
721 {
722 GST_LOG_OBJECT(decoder,
723 "stream mode drop frame %d %" GST_TIME_FORMAT,
724 frame->system_frame_number, GST_TIME_ARGS(frame->pts));
725
726 gst_video_codec_frame_ref(f);
727 // gst_video_decoder_drop_frame(decoder, f);
728 gst_video_decoder_release_frame(decoder, f);
729 }
730 else
731 {
732 GST_LOG_OBJECT (decoder, "dbg");
733 }
734 }
735
736 if (frame)
737 {
738 guint l_len = 0;
739 l = gst_video_decoder_get_frames(decoder);
740 l_len = g_list_length(l);
741 g_list_free_full(l, (GDestroyNotify)gst_video_codec_frame_unref);
742
743 GST_LOG_OBJECT(decoder,
744 "frame is %d %" GST_TIME_FORMAT " and %d frames left",
745 frame->system_frame_number, GST_TIME_ARGS(frame->pts), l_len);
746 gst_video_codec_frame_ref(frame);
747 }
748
749 g_list_free_full(frames, (GDestroyNotify)gst_video_codec_frame_unref);
750
751 GST_LOG_OBJECT (decoder, "trace out ret:%p", frame);
752 return frame;
753}
754
755static GstVideoCodecFrame *
756gst_aml_v4l2_video_dec_get_right_frame(GstVideoDecoder *decoder, GstClockTime pts)
757{
758 GstAmlV4l2VideoDec *self = (GstAmlV4l2VideoDec *)decoder;
759 if (self->v4l2output->stream_mode)
760 return gst_aml_v4l2_video_dec_get_right_frame_for_stream_mode(decoder, pts);
761 else
762 return gst_aml_v4l2_video_dec_get_right_frame_for_frame_mode(decoder, pts);
763}
764
xuesong.jiangae1548e2022-05-06 16:38:46 +0800765static gboolean
766gst_aml_v4l2_video_remove_padding(GstCapsFeatures *features,
767 GstStructure *structure, gpointer user_data)
768{
769 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(user_data);
770 GstVideoAlignment *align = &self->v4l2capture->align;
771 GstVideoInfo *info = &self->v4l2capture->info;
772 int width, height;
773
774 if (!gst_structure_get_int(structure, "width", &width))
775 return TRUE;
776
777 if (!gst_structure_get_int(structure, "height", &height))
778 return TRUE;
779
780 if (align->padding_left != 0 || align->padding_top != 0 ||
781 height != info->height + align->padding_bottom)
782 return TRUE;
783
784 if (height == info->height + align->padding_bottom)
785 {
786 /* Some drivers may round up width to the padded with */
787 if (width == info->width + align->padding_right)
788 gst_structure_set(structure,
789 "width", G_TYPE_INT, width - align->padding_right,
790 "height", G_TYPE_INT, height - align->padding_bottom, NULL);
791 /* Some drivers may keep visible width and only round up bytesperline */
792 else if (width == info->width)
793 gst_structure_set(structure,
794 "height", G_TYPE_INT, height - align->padding_bottom, NULL);
795 }
796
797 return TRUE;
798}
799
800static void
sheng.liubcf036c2022-06-21 15:55:42 +0800801gst_v4l2_drop_event (GstAmlV4l2Object * v4l2object)
sheng.liub56bbc52022-06-21 11:02:33 +0800802{
803 struct v4l2_event evt;
804 gint ret;
805
806 memset (&evt, 0x00, sizeof (struct v4l2_event));
807 ret = v4l2object->ioctl (v4l2object->video_fd, VIDIOC_DQEVENT, &evt);
808 if (ret < 0)
809 {
810 GST_DEBUG_OBJECT (v4l2object, "dqevent failed");
811 return;
812 }
813
814 switch (evt.type)
815 {
816 case V4L2_EVENT_SOURCE_CHANGE:
817 GST_DEBUG_OBJECT (v4l2object, "Drop GST_V4L2_FLOW_SOURCE_CHANGE");
818 break;
819 case V4L2_EVENT_EOS:
820 GST_DEBUG_OBJECT (v4l2object, "Drop GST_V4L2_FLOW_LAST_BUFFER");
821 break;
822 default:
823 break;
824 }
825
826 return;
827}
828
829static void
hanghang.luo70f07ef2023-07-13 02:23:06 +0000830gst_aml_v4l2_video_dec_set_fence(GstVideoDecoder *decoder,GstVideoInfo *info)
831{
832 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
833 GstStructure *s;
834 GstEvent *event;
835
836 guint fence_num = (info->interlace_mode == GST_VIDEO_INTERLACE_MODE_INTERLEAVED) ?
837 GST_AML_V4L2_DEFAULT_CAP_BUF_MARGIN : self->v4l2capture->min_buffers-2;
838 s = gst_structure_new ("video_fence","fence_num",G_TYPE_UINT,fence_num,NULL);
839 if (s)
840 {
841 event = gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM, s);
842 GST_DEBUG_OBJECT(self,"Send video_fence Event: %"GST_PTR_FORMAT,event);
843 gst_pad_push_event (decoder->srcpad, event);
844 }
845}
846
847static void
xuesong.jiangae1548e2022-05-06 16:38:46 +0800848gst_aml_v4l2_video_dec_loop(GstVideoDecoder *decoder)
849{
850 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
851 GstAmlV4l2BufferPool *v4l2_pool;
852 GstAmlV4l2Error error = GST_AML_V4L2_ERROR_INIT;
853 GstBufferPool *pool;
854 GstVideoCodecFrame *frame;
855 GstBuffer *buffer = NULL;
856 GstFlowReturn ret;
857
858 if (G_UNLIKELY(!GST_AML_V4L2_IS_ACTIVE(self->v4l2capture)))
859 {
860 GstVideoInfo info;
861 GstVideoCodecState *output_state;
xuesong.jiang282ca572023-05-05 09:03:32 +0000862 GstCaps *acquired_caps, *available_caps, *caps, *filter;
xuesong.jiangae1548e2022-05-06 16:38:46 +0800863 GstStructure *st;
864
865 GST_DEBUG_OBJECT(self, "waitting source change event");
866 /* Wait until received SOURCE_CHANGE event to get right video format */
867 while (self->v4l2capture->can_wait_event && self->v4l2capture->need_wait_event)
868 {
869 ret = gst_aml_v4l2_object_dqevent(self->v4l2capture);
870 if (ret == GST_AML_V4L2_FLOW_SOURCE_CHANGE)
871 {
872 GST_DEBUG_OBJECT(self, "Received source change event");
873 break;
874 }
875 else if (ret == GST_AML_V4L2_FLOW_LAST_BUFFER)
876 {
877 GST_DEBUG_OBJECT(self, "Received eos event");
878 goto beach;
879 }
880 else if (ret != GST_FLOW_OK)
881 {
882 GST_ERROR_OBJECT(self, "dqevent error");
883 goto beach;
884 }
885 }
886 self->v4l2capture->need_wait_event = FALSE;
887
sheng.liu0c77f6c2022-06-17 21:33:20 +0800888 if (TRUE == self->v4l2output->is_svp)
889 {
890 GstPad *peer;
891 GstStructure *s;
892 GstEvent *event;
893
894 peer = gst_pad_get_peer (decoder->srcpad);
895 if (peer)
896 {
hanghang.luo70f07ef2023-07-13 02:23:06 +0000897 s = gst_structure_new_empty ("IS_SVP");
898 if (s)
899 {
900 event = gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM, s);
901 gst_pad_send_event (peer, event);
902 GST_DEBUG_OBJECT(self, "Send SVP Event");
903 }
904 gst_object_unref (peer);
sheng.liu0c77f6c2022-06-17 21:33:20 +0800905 }
906 }
907
sheng.liub56bbc52022-06-21 11:02:33 +0800908 if (self->v4l2capture->need_drop_event)
909 {
910 // drop V4L2_EVENT_SOURCE_CHANGE
911 gst_v4l2_drop_event(self->v4l2capture);
912 self->v4l2capture->need_drop_event = FALSE;
913 }
914
xuesong.jiangae1548e2022-05-06 16:38:46 +0800915 if (!gst_aml_v4l2_object_acquire_format(self->v4l2capture, &info))
916 goto not_negotiated;
917
918 /* Create caps from the acquired format, remove the format field */
919 acquired_caps = gst_video_info_to_caps(&info);
920 GST_DEBUG_OBJECT(self, "Acquired caps: %" GST_PTR_FORMAT, acquired_caps);
921 st = gst_caps_get_structure(acquired_caps, 0);
xuesong.jiang282ca572023-05-05 09:03:32 +0000922 gst_structure_remove_fields(st, "format", "colorimetry", "chroma-site", NULL);
923
924 /* Probe currently available pixel formats */
925 available_caps = gst_caps_copy(self->probed_srccaps);
926 GST_DEBUG_OBJECT(self, "Available caps: %" GST_PTR_FORMAT, available_caps);
927
928 /* Replace coded size with visible size, we want to negotiate visible size
929 * with downstream, not coded size. */
930 gst_caps_map_in_place(available_caps, gst_aml_v4l2_video_remove_padding, self);
931
932 filter = gst_caps_intersect_full(available_caps, acquired_caps, GST_CAPS_INTERSECT_FIRST);
xuesong.jiangae1548e2022-05-06 16:38:46 +0800933 caps = gst_caps_copy(filter);
934 gst_caps_set_features_simple(caps, gst_caps_features_from_string(GST_CAPS_FEATURE_MEMORY_DMABUF));
935 gst_caps_append(filter, caps);
936
937 GST_DEBUG_OBJECT(self, "Filtered caps: %" GST_PTR_FORMAT, filter);
938 gst_caps_unref(acquired_caps);
xuesong.jiang282ca572023-05-05 09:03:32 +0000939 gst_caps_unref(available_caps);
xuesong.jiangae1548e2022-05-06 16:38:46 +0800940 caps = gst_pad_peer_query_caps(decoder->srcpad, filter);
941 gst_caps_unref(filter);
942
943 GST_DEBUG_OBJECT(self, "Possible decoded caps: %" GST_PTR_FORMAT, caps);
944 if (gst_caps_is_empty(caps))
945 {
946 gst_caps_unref(caps);
947 goto not_negotiated;
948 }
949
950 /* Fixate pixel format */
951 caps = gst_caps_fixate(caps);
952
953 GST_DEBUG_OBJECT(self, "Chosen decoded caps: %" GST_PTR_FORMAT, caps);
954
955 /* Try to set negotiated format, on success replace acquired format */
956 if (gst_aml_v4l2_object_set_format(self->v4l2capture, caps, &error))
957 gst_video_info_from_caps(&info, caps);
958 else
959 gst_aml_v4l2_clear_error(&error);
960 gst_caps_unref(caps);
hanghang.luo70f07ef2023-07-13 02:23:06 +0000961 gst_aml_v4l2_video_dec_set_fence(decoder,&info);
xuesong.jiangae1548e2022-05-06 16:38:46 +0800962 output_state = gst_video_decoder_set_output_state(decoder,
963 info.finfo->format, info.width, info.height, self->input_state);
964
965 /* Copy the rest of the information, there might be more in the future */
966 output_state->info.interlace_mode = info.interlace_mode;
967 gst_video_codec_state_unref(output_state);
968
969 if (!gst_video_decoder_negotiate(decoder))
970 {
971 if (GST_PAD_IS_FLUSHING(decoder->srcpad))
972 goto flushing;
973 else
974 goto not_negotiated;
975 }
976
977 /* Ensure our internal pool is activated */
978 if (!gst_buffer_pool_set_active(GST_BUFFER_POOL(self->v4l2capture->pool),
979 TRUE))
980 goto activate_failed;
xuesong.jiangc5dac0f2023-02-01 14:42:24 +0800981
982 g_mutex_lock(&self->res_chg_lock);
983 GST_LOG_OBJECT(decoder, "signal resolution changed");
984 self->is_res_chg = FALSE;
985 g_cond_signal(&self->res_chg_cond);
986 g_mutex_unlock(&self->res_chg_lock);
xuesong.jiangae1548e2022-05-06 16:38:46 +0800987 }
988
989 GST_LOG_OBJECT(decoder, "Allocate output buffer");
990
991 v4l2_pool = GST_AML_V4L2_BUFFER_POOL(self->v4l2capture->pool);
992
993 self->output_flow = GST_FLOW_OK;
994 do
995 {
996 /* We cannot use the base class allotate helper since it taking the internal
997 * stream lock. we know that the acquire may need to poll until more frames
998 * comes in and holding this lock would prevent that.
999 */
1000 pool = gst_video_decoder_get_buffer_pool(decoder);
1001
1002 /* Pool may be NULL if we started going to READY state */
1003 if (pool == NULL)
1004 {
fei.dengbee20862022-06-14 14:59:48 +08001005 GST_WARNING_OBJECT(decoder, "gst_video_decoder_get_buffer_pool goto beach");
xuesong.jiangae1548e2022-05-06 16:38:46 +08001006 ret = GST_FLOW_FLUSHING;
1007 goto beach;
1008 }
1009
1010 ret = gst_buffer_pool_acquire_buffer(pool, &buffer, NULL);
fei.dengccc89632022-07-15 19:10:17 +08001011 //calculate a new pts for interlace stream
1012 if (ret == GST_FLOW_OK &&
1013 self->v4l2capture->info.interlace_mode == GST_VIDEO_INTERLACE_MODE_INTERLEAVED)
1014 {
1015 //if buffer pts is valid, reduce 1/2 duration
1016 if (GST_BUFFER_DURATION_IS_VALID(buffer))
1017 {
1018 GST_BUFFER_DURATION(buffer) = GST_BUFFER_DURATION(buffer)/2;
1019 }
1020 GST_BUFFER_FLAG_UNSET(buffer, GST_VIDEO_BUFFER_FLAG_INTERLACED);
1021 //reset pts
fei.denga6ae3282022-07-15 19:50:30 +08001022 if (GST_BUFFER_TIMESTAMP (buffer) == 0LL || self->last_out_pts == GST_BUFFER_TIMESTAMP (buffer))
fei.dengccc89632022-07-15 19:10:17 +08001023 {
1024 double rate = ((double)self->input_state->info.fps_n/(double)self->input_state->info.fps_d)*2;
1025 GST_BUFFER_TIMESTAMP(buffer) = self->last_out_pts + 1000000000LL/rate;
1026 }
1027 }
1028
xuesong.jiangae1548e2022-05-06 16:38:46 +08001029 g_object_unref(pool);
1030
fei.deng9a5cd6e2023-06-30 12:09:18 +00001031 if (ret == GST_FLOW_OK && GST_BUFFER_FLAG_IS_SET(buffer,GST_AML_V4L2_BUFFER_FLAG_LAST_EMPTY)) {
sheng.liubcf036c2022-06-21 15:55:42 +08001032 GST_LOG_OBJECT(decoder, "Get GST_AML_V4L2_FLOW_LAST_BUFFER");
sheng.liub56bbc52022-06-21 11:02:33 +08001033 self->v4l2capture->need_drop_event = TRUE;
fei.deng594df4b2023-06-26 07:03:29 +00001034 gst_aml_v4l2_buffer_pool_process(v4l2_pool, &buffer);
sheng.liub56bbc52022-06-21 11:02:33 +08001035 goto beach;
1036 }
1037
sheng.liu8d18ed22022-05-26 17:28:15 +08001038 if (ret == GST_AML_V4L2_FLOW_SOURCE_CHANGE)
1039 {
1040 GST_LOG_OBJECT(decoder, "Get GST_AML_V4L2_FLOW_SOURCE_CHANGE");
xuesong.jiangc5dac0f2023-02-01 14:42:24 +08001041
1042 g_mutex_lock (&self->res_chg_lock);
1043 self->is_res_chg = TRUE;
1044 g_mutex_unlock (&self->res_chg_lock);
1045
sheng.liu8d18ed22022-05-26 17:28:15 +08001046 gst_aml_v4l2_object_stop(self->v4l2capture);
1047 return;
1048 }
1049
fei.dengbee20862022-06-14 14:59:48 +08001050 if (ret != GST_FLOW_OK) {
1051 GST_WARNING_OBJECT(decoder, "gst_buffer_pool_acquire_buffer goto beach ret:%d",ret);
xuesong.jiangae1548e2022-05-06 16:38:46 +08001052 goto beach;
fei.dengbee20862022-06-14 14:59:48 +08001053 }
xuesong.jiangae1548e2022-05-06 16:38:46 +08001054
xuesong.jiangc5dac0f2023-02-01 14:42:24 +08001055 GST_LOG_OBJECT(decoder, "Process output buffer (switching flow outstanding num:%d)", self->v4l2capture->outstanding_buf_num);
xuesong.jiangae1548e2022-05-06 16:38:46 +08001056 ret = gst_aml_v4l2_buffer_pool_process(v4l2_pool, &buffer);
xuesong.jiang406ee302023-06-28 03:45:22 +00001057
1058 GST_DEBUG_OBJECT(decoder, "send pts:%lld - %" GST_TIME_FORMAT, GST_BUFFER_PTS(buffer), GST_TIME_ARGS(GST_BUFFER_PTS(buffer)));
1059 g_signal_emit (self, g_signals[SIGNAL_DECODED_PTS], 0, GST_BUFFER_PTS(buffer));
1060
xuesong.jiangae1548e2022-05-06 16:38:46 +08001061 if (ret == GST_AML_V4L2_FLOW_SOURCE_CHANGE)
1062 {
1063 gst_aml_v4l2_object_stop(self->v4l2capture);
1064 return;
1065 }
1066
1067 } while (ret == GST_AML_V4L2_FLOW_CORRUPTED_BUFFER);
1068
1069 if (ret != GST_FLOW_OK)
1070 goto beach;
1071
fei.dengbee20862022-06-14 14:59:48 +08001072 frame = gst_aml_v4l2_video_dec_get_right_frame(decoder, GST_BUFFER_TIMESTAMP (buffer));
xuesong.jiangae1548e2022-05-06 16:38:46 +08001073 if (frame)
1074 {
zengliang.li32cb11e2022-11-24 12:10:26 +08001075 if (!GST_CLOCK_TIME_IS_VALID(frame->pts))
1076 {
zengliang.li92ff6822023-06-06 07:12:52 +00001077 if (!GST_CLOCK_TIME_IS_VALID(self->last_out_pts))
1078 {
1079 if (GST_CLOCK_TIME_IS_VALID(frame->dts))
1080 {
1081 GST_BUFFER_TIMESTAMP(buffer) = frame->dts;
1082 }
1083 else
1084 {
1085 GST_WARNING_OBJECT (decoder,"sorry,we have no baseline to calculate pts");
1086 goto beach;
1087 }
1088 }
1089 else
1090 {
1091 double rate = ((double)self->input_state->info.fps_n/(double)self->input_state->info.fps_d);
1092 GST_BUFFER_TIMESTAMP(buffer) = self->last_out_pts + 1000000000LL/rate;
1093 }
zengliang.li32cb11e2022-11-24 12:10:26 +08001094 }
fei.dengccc89632022-07-15 19:10:17 +08001095 self->last_out_pts = GST_BUFFER_TIMESTAMP(buffer);
xuesong.jiangae1548e2022-05-06 16:38:46 +08001096 frame->output_buffer = buffer;
fei.dengccc89632022-07-15 19:10:17 +08001097 frame->pts = GST_BUFFER_TIMESTAMP(buffer);
1098 frame->duration = GST_BUFFER_DURATION(buffer);
xuesong.jiangae1548e2022-05-06 16:38:46 +08001099 buffer = NULL;
1100 ret = gst_video_decoder_finish_frame(decoder, frame);
1101
1102 if (ret != GST_FLOW_OK)
1103 goto beach;
1104 }
1105 else
1106 {
1107 GST_WARNING_OBJECT(decoder, "Decoder is producing too many buffers");
1108 gst_buffer_unref(buffer);
1109 }
1110
1111 return;
1112 /* ERRORS */
1113not_negotiated:
1114{
1115 GST_ERROR_OBJECT(self, "not negotiated");
1116 ret = GST_FLOW_NOT_NEGOTIATED;
1117 goto beach;
1118}
1119activate_failed:
1120{
1121 GST_ERROR_OBJECT(self, "Buffer pool activation failed");
1122 GST_ELEMENT_ERROR(self, RESOURCE, SETTINGS,
1123 (_("Failed to allocate required memory.")),
1124 ("Buffer pool activation failed"));
1125 ret = GST_FLOW_ERROR;
1126 goto beach;
1127}
1128flushing:
1129{
1130 ret = GST_FLOW_FLUSHING;
1131 goto beach;
1132}
1133beach:
1134 GST_DEBUG_OBJECT(decoder, "Leaving output thread: %s",
1135 gst_flow_get_name(ret));
1136
1137 gst_buffer_replace(&buffer, NULL);
1138 self->output_flow = ret;
1139 gst_aml_v4l2_object_unlock(self->v4l2output);
1140 gst_pad_pause_task(decoder->srcpad);
1141}
1142
1143static GstFlowReturn
1144gst_aml_v4l2_video_dec_handle_frame(GstVideoDecoder *decoder,
1145 GstVideoCodecFrame *frame)
1146{
1147 GstAmlV4l2Error error = GST_AML_V4L2_ERROR_INIT;
1148 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
1149 GstBufferPool *pool = GST_BUFFER_POOL(self->v4l2output->pool);
1150 GstFlowReturn ret = GST_FLOW_OK;
1151 gboolean processed = FALSE;
1152 GstBuffer *tmp;
1153 GstTaskState task_state;
1154 GstCaps *caps;
1155
1156 GST_DEBUG_OBJECT(self, "Handling frame %d", frame->system_frame_number);
1157
1158 if (G_UNLIKELY(!g_atomic_int_get(&self->active)))
1159 goto flushing;
1160
1161 if (G_UNLIKELY(!GST_AML_V4L2_IS_ACTIVE(self->v4l2output)))
1162 {
1163 if (!self->input_state)
1164 goto not_negotiated;
1165 if (!gst_aml_v4l2_object_set_format(self->v4l2output, self->input_state->caps,
1166 &error))
1167 goto not_negotiated;
1168 }
1169
1170 if (G_UNLIKELY(!GST_AML_V4L2_IS_ACTIVE(self->v4l2capture)))
1171 {
1172 GstBuffer *codec_data;
sheng.liu0c77f6c2022-06-17 21:33:20 +08001173 GstCapsFeatures *features = NULL;
1174
1175 features = gst_caps_get_features(self->input_state->caps, 0);
1176 if (features && gst_caps_features_contains(features, GST_CAPS_FEATURE_MEMORY_DMABUF))
1177 {
1178 GST_DEBUG_OBJECT(self, "Is SVP");
1179 self->v4l2output->is_svp = TRUE;
1180 }
xuesong.jiangae1548e2022-05-06 16:38:46 +08001181
1182 GST_DEBUG_OBJECT(self, "Sending header");
1183
1184 codec_data = self->input_state->codec_data;
1185
1186 /* We are running in byte-stream mode, so we don't know the headers, but
1187 * we need to send something, otherwise the decoder will refuse to
1188 * intialize.
1189 */
1190 if (codec_data)
1191 {
1192 gst_buffer_ref(codec_data);
1193 }
1194 else
1195 {
1196 codec_data = gst_buffer_ref(frame->input_buffer);
1197 processed = TRUE;
1198 }
1199
1200 /* Ensure input internal pool is active */
1201 if (!gst_buffer_pool_is_active(pool))
1202 {
1203 GstStructure *config = gst_buffer_pool_get_config(pool);
xuesong.jiangc5dac0f2023-02-01 14:42:24 +08001204 // guint min = MAX(self->v4l2output->min_buffers, GST_AML_V4L2_MIN_BUFFERS);
1205 // guint max = VIDEO_MAX_FRAME;
xuesong.jiangae1548e2022-05-06 16:38:46 +08001206 // gst_buffer_pool_config_set_params (config, self->input_state->caps,
1207 // self->v4l2output->info.size, min, max);
1208 gst_buffer_pool_config_set_params(config, self->input_state->caps, self->v4l2output->info.size, self->v4l2output->min_buffers, self->v4l2output->min_buffers);
1209
1210 /* There is no reason to refuse this config */
1211 if (!gst_buffer_pool_set_config(pool, config))
1212 goto activate_failed;
xuesong.jiangc5dac0f2023-02-01 14:42:24 +08001213 GST_DEBUG_OBJECT(self, "setting output pool config to %" GST_PTR_FORMAT, config);
xuesong.jiangae1548e2022-05-06 16:38:46 +08001214
1215 if (!gst_buffer_pool_set_active(pool, TRUE))
1216 goto activate_failed;
1217 }
1218
1219 GST_VIDEO_DECODER_STREAM_UNLOCK(decoder);
1220 ret =
1221 gst_aml_v4l2_buffer_pool_process(GST_AML_V4L2_BUFFER_POOL(self->v4l2output->pool), &codec_data);
1222 GST_VIDEO_DECODER_STREAM_LOCK(decoder);
1223
1224 gst_buffer_unref(codec_data);
1225
1226 /* For decoders G_FMT returns coded size, G_SELECTION returns visible size
1227 * in the compose rectangle. gst_aml_v4l2_object_acquire_format() checks both
1228 * and returns the visible size as with/height and the coded size as
1229 * padding. */
1230 }
1231
1232 task_state = gst_pad_get_task_state(GST_VIDEO_DECODER_SRC_PAD(self));
1233 if (task_state == GST_TASK_STOPPED || task_state == GST_TASK_PAUSED)
1234 {
1235 /* It's possible that the processing thread stopped due to an error */
1236 if (self->output_flow != GST_FLOW_OK &&
1237 self->output_flow != GST_FLOW_FLUSHING)
1238 {
1239 GST_DEBUG_OBJECT(self, "Processing loop stopped with error, leaving");
1240 ret = self->output_flow;
1241 goto drop;
1242 }
1243
1244 GST_DEBUG_OBJECT(self, "Starting decoding thread");
1245
1246 /* Start the processing task, when it quits, the task will disable input
1247 * processing to unlock input if draining, or prevent potential block */
1248 self->output_flow = GST_FLOW_FLUSHING;
1249 if (!gst_pad_start_task(decoder->srcpad,
1250 (GstTaskFunction)gst_aml_v4l2_video_dec_loop, self, NULL))
1251 goto start_task_failed;
1252 }
1253
1254 if (!processed)
1255 {
1256 GST_VIDEO_DECODER_STREAM_UNLOCK(decoder);
1257 ret =
1258 gst_aml_v4l2_buffer_pool_process(GST_AML_V4L2_BUFFER_POOL(self->v4l2output->pool), &frame->input_buffer);
1259 GST_VIDEO_DECODER_STREAM_LOCK(decoder);
1260
1261 if (ret == GST_FLOW_FLUSHING)
1262 {
1263 if (gst_pad_get_task_state(GST_VIDEO_DECODER_SRC_PAD(self)) !=
1264 GST_TASK_STARTED)
1265 ret = self->output_flow;
1266 goto drop;
1267 }
1268 else if (ret != GST_FLOW_OK)
1269 {
1270 goto process_failed;
1271 }
1272 }
1273
1274 /* No need to keep input arround */
1275 tmp = frame->input_buffer;
1276 frame->input_buffer = gst_buffer_new();
1277 gst_buffer_copy_into(frame->input_buffer, tmp,
1278 GST_BUFFER_COPY_FLAGS | GST_BUFFER_COPY_TIMESTAMPS |
1279 GST_BUFFER_COPY_META,
1280 0, 0);
1281 gst_buffer_unref(tmp);
1282
1283 gst_video_codec_frame_unref(frame);
1284 return ret;
1285
1286 /* ERRORS */
1287not_negotiated:
1288{
1289 GST_ERROR_OBJECT(self, "not negotiated");
1290 ret = GST_FLOW_NOT_NEGOTIATED;
1291 gst_aml_v4l2_error(self, &error);
1292 goto drop;
1293}
1294activate_failed:
1295{
1296 GST_ELEMENT_ERROR(self, RESOURCE, SETTINGS,
1297 (_("Failed to allocate required memory.")),
1298 ("Buffer pool activation failed"));
1299 ret = GST_FLOW_ERROR;
1300 goto drop;
1301}
1302flushing:
1303{
1304 ret = GST_FLOW_FLUSHING;
1305 goto drop;
1306}
1307
1308start_task_failed:
1309{
1310 GST_ELEMENT_ERROR(self, RESOURCE, FAILED,
1311 (_("Failed to start decoding thread.")), (NULL));
1312 ret = GST_FLOW_ERROR;
1313 goto drop;
1314}
1315process_failed:
1316{
1317 GST_ELEMENT_ERROR(self, RESOURCE, FAILED,
1318 (_("Failed to process frame.")),
1319 ("Maybe be due to not enough memory or failing driver"));
1320 ret = GST_FLOW_ERROR;
1321 goto drop;
1322}
1323drop:
1324{
1325 gst_video_decoder_drop_frame(decoder, frame);
1326 return ret;
1327}
1328}
1329
1330static gboolean
1331gst_aml_v4l2_video_dec_decide_allocation(GstVideoDecoder *decoder,
1332 GstQuery *query)
1333{
1334 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
1335 GstClockTime latency;
1336 gboolean ret = FALSE;
1337
1338 if (gst_aml_v4l2_object_decide_allocation(self->v4l2capture, query))
1339 ret = GST_VIDEO_DECODER_CLASS(parent_class)->decide_allocation(decoder, query);
1340
1341 if (GST_CLOCK_TIME_IS_VALID(self->v4l2capture->duration))
1342 {
1343 latency = self->v4l2capture->min_buffers * self->v4l2capture->duration;
1344 GST_DEBUG_OBJECT(self, "Setting latency: %" GST_TIME_FORMAT " (%" G_GUINT32_FORMAT " * %" G_GUINT64_FORMAT, GST_TIME_ARGS(latency),
1345 self->v4l2capture->min_buffers, self->v4l2capture->duration);
1346 gst_video_decoder_set_latency(decoder, latency, latency);
1347 }
1348 else
1349 {
1350 GST_WARNING_OBJECT(self, "Duration invalid, not setting latency");
1351 }
1352
1353 return ret;
1354}
1355
1356static gboolean
1357gst_aml_v4l2_video_dec_src_query(GstVideoDecoder *decoder, GstQuery *query)
1358{
1359 gboolean ret = TRUE;
1360 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
1361
1362 switch (GST_QUERY_TYPE(query))
1363 {
1364 case GST_QUERY_CAPS:
1365 {
1366 GstCaps *filter, *result = NULL;
1367 GstPad *pad = GST_VIDEO_DECODER_SRC_PAD(decoder);
1368
1369 gst_query_parse_caps(query, &filter);
1370
1371 if (self->probed_srccaps)
1372 result = gst_caps_ref(self->probed_srccaps);
1373 else
1374 result = gst_pad_get_pad_template_caps(pad);
1375
1376 if (filter)
1377 {
1378 GstCaps *tmp = result;
1379 result =
1380 gst_caps_intersect_full(filter, tmp, GST_CAPS_INTERSECT_FIRST);
1381 gst_caps_unref(tmp);
1382 }
1383
1384 GST_DEBUG_OBJECT(self, "Returning src caps %" GST_PTR_FORMAT, result);
1385
1386 gst_query_set_caps_result(query, result);
1387 gst_caps_unref(result);
1388 break;
1389 }
1390
1391 default:
1392 ret = GST_VIDEO_DECODER_CLASS(parent_class)->src_query(decoder, query);
1393 break;
1394 }
1395
1396 return ret;
1397}
1398
1399static GstCaps *
1400gst_aml_v4l2_video_dec_sink_getcaps(GstVideoDecoder *decoder, GstCaps *filter)
1401{
1402 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
1403 GstCaps *result;
1404
1405 result = gst_video_decoder_proxy_getcaps(decoder, self->probed_sinkcaps,
1406 filter);
1407
1408 GST_DEBUG_OBJECT(self, "Returning sink caps %" GST_PTR_FORMAT, result);
1409
1410 return result;
1411}
1412
1413static gboolean
1414gst_aml_v4l2_video_dec_sink_event(GstVideoDecoder *decoder, GstEvent *event)
1415{
1416 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
1417 gboolean ret;
1418 GstEventType type = GST_EVENT_TYPE(event);
1419
1420 switch (type)
1421 {
xuesong.jiang406ee302023-06-28 03:45:22 +00001422 case GST_EVENT_STREAM_START:
1423 {
1424 GstStructure *s;
1425 GstEvent *event;
1426 GST_DEBUG_OBJECT(self, "new private event");
1427 s = gst_structure_new("private_signal", "obj_ptr", G_TYPE_POINTER, self, "sig_name", G_TYPE_STRING, "decoded-pts", NULL);
1428 event = gst_event_new_custom (GST_EVENT_CUSTOM_UPSTREAM, s);
1429 GST_DEBUG_OBJECT(self, "before Send private_signal Event :%p", event);
1430 gst_pad_push_event (decoder->sinkpad, event);
1431 GST_DEBUG_OBJECT(self, "after Send private_signal Event :%p", event);
1432 break;
1433 }
xuesong.jiangae1548e2022-05-06 16:38:46 +08001434 case GST_EVENT_FLUSH_START:
1435 GST_DEBUG_OBJECT(self, "flush start");
xuesong.jiangc5dac0f2023-02-01 14:42:24 +08001436
1437 g_mutex_lock (&self->res_chg_lock);
1438 while (self->is_res_chg)
1439 {
1440 GST_LOG_OBJECT(decoder, "wait resolution change finish");
1441 g_cond_wait(&self->res_chg_cond, &self->res_chg_lock);
1442 }
1443 g_mutex_unlock (&self->res_chg_lock);
1444
zengliang.li92ff6822023-06-06 07:12:52 +00001445 self->last_out_pts = GST_CLOCK_TIME_NONE;
xuesong.jiangae1548e2022-05-06 16:38:46 +08001446 gst_aml_v4l2_object_unlock(self->v4l2output);
1447 gst_aml_v4l2_object_unlock(self->v4l2capture);
1448 break;
1449 default:
1450 break;
1451 }
1452
1453 ret = GST_VIDEO_DECODER_CLASS(parent_class)->sink_event(decoder, event);
1454
1455 switch (type)
1456 {
1457 case GST_EVENT_FLUSH_START:
1458 /* The processing thread should stop now, wait for it */
1459 gst_pad_stop_task(decoder->srcpad);
1460 GST_DEBUG_OBJECT(self, "flush start done");
1461 break;
1462 default:
1463 break;
1464 }
1465
1466 return ret;
1467}
1468
1469static GstStateChangeReturn
1470gst_aml_v4l2_video_dec_change_state(GstElement *element,
1471 GstStateChange transition)
1472{
1473 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(element);
1474 GstVideoDecoder *decoder = GST_VIDEO_DECODER(element);
1475
1476 if (transition == GST_STATE_CHANGE_PAUSED_TO_READY)
1477 {
1478 g_atomic_int_set(&self->active, FALSE);
1479 gst_aml_v4l2_object_unlock(self->v4l2output);
1480 gst_aml_v4l2_object_unlock(self->v4l2capture);
1481 gst_pad_stop_task(decoder->srcpad);
1482 }
1483
1484 return GST_ELEMENT_CLASS(parent_class)->change_state(element, transition);
1485}
1486
1487static void
1488gst_aml_v4l2_video_dec_dispose(GObject *object)
1489{
1490 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(object);
1491
1492 gst_caps_replace(&self->probed_sinkcaps, NULL);
1493 gst_caps_replace(&self->probed_srccaps, NULL);
1494
1495 G_OBJECT_CLASS(parent_class)->dispose(object);
1496}
1497
1498static void
1499gst_aml_v4l2_video_dec_finalize(GObject *object)
1500{
1501 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(object);
1502
1503 gst_aml_v4l2_object_destroy(self->v4l2capture);
1504 gst_aml_v4l2_object_destroy(self->v4l2output);
1505
xuesong.jiangc5dac0f2023-02-01 14:42:24 +08001506 g_mutex_clear(&self->res_chg_lock);
1507 g_cond_clear(&self->res_chg_cond);
1508
xuesong.jiang61ea8012022-05-12 15:38:17 +08001509#if GST_IMPORT_LGE_PROP
1510 if (self->lge_ctxt)
1511 {
1512 if (self->lge_ctxt->app_type)
1513 g_free(self->lge_ctxt->app_type);
1514 if (self->lge_ctxt->res_info.coretype)
1515 g_free(self->lge_ctxt->res_info.coretype);
1516 free(self->lge_ctxt);
1517 }
1518
1519#endif
1520
xuesong.jiangae1548e2022-05-06 16:38:46 +08001521 G_OBJECT_CLASS(parent_class)->finalize(object);
1522}
1523
1524static void
1525gst_aml_v4l2_video_dec_init(GstAmlV4l2VideoDec *self)
1526{
1527 /* V4L2 object are created in subinstance_init */
zengliang.li92ff6822023-06-06 07:12:52 +00001528 self->last_out_pts = GST_CLOCK_TIME_NONE;
xuesong.jiangae1548e2022-05-06 16:38:46 +08001529 self->is_secure_path = FALSE;
xuesong.jiangc5dac0f2023-02-01 14:42:24 +08001530 self->is_res_chg = FALSE;
1531 g_mutex_init(&self->res_chg_lock);
1532 g_cond_init(&self->res_chg_cond);
xuesong.jiang61ea8012022-05-12 15:38:17 +08001533#if GST_IMPORT_LGE_PROP
1534 self->lge_ctxt = malloc(sizeof(GstAmlV4l2VideoDecLgeCtxt));
1535 memset(self->lge_ctxt, 0, sizeof(GstAmlV4l2VideoDecLgeCtxt));
1536#endif
xuesong.jiangae1548e2022-05-06 16:38:46 +08001537}
1538
1539static void
1540gst_aml_v4l2_video_dec_subinstance_init(GTypeInstance *instance, gpointer g_class)
1541{
1542 GstAmlV4l2VideoDecClass *klass = GST_AML_V4L2_VIDEO_DEC_CLASS(g_class);
1543 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(instance);
1544 GstVideoDecoder *decoder = GST_VIDEO_DECODER(instance);
1545
1546 gst_video_decoder_set_packetized(decoder, TRUE);
1547
1548 self->v4l2output = gst_aml_v4l2_object_new(GST_ELEMENT(self),
1549 GST_OBJECT(GST_VIDEO_DECODER_SINK_PAD(self)),
1550 V4L2_BUF_TYPE_VIDEO_OUTPUT, klass->default_device,
1551 gst_aml_v4l2_get_output, gst_aml_v4l2_set_output, NULL);
1552 self->v4l2output->no_initial_format = TRUE;
1553 self->v4l2output->keep_aspect = FALSE;
sheng.liu0c77f6c2022-06-17 21:33:20 +08001554 self->v4l2output->is_svp = FALSE;
xuesong.jiangae1548e2022-05-06 16:38:46 +08001555
1556 self->v4l2capture = gst_aml_v4l2_object_new(GST_ELEMENT(self),
1557 GST_OBJECT(GST_VIDEO_DECODER_SRC_PAD(self)),
1558 V4L2_BUF_TYPE_VIDEO_CAPTURE, klass->default_device,
1559 gst_aml_v4l2_get_input, gst_aml_v4l2_set_input, NULL);
1560 self->v4l2capture->need_wait_event = TRUE;
sheng.liub56bbc52022-06-21 11:02:33 +08001561 self->v4l2capture->need_drop_event = FALSE;
xuesong.jiangae1548e2022-05-06 16:38:46 +08001562}
1563
1564static void
1565gst_aml_v4l2_video_dec_class_init(GstAmlV4l2VideoDecClass *klass)
1566{
1567 GstElementClass *element_class;
1568 GObjectClass *gobject_class;
1569 GstVideoDecoderClass *video_decoder_class;
1570
1571 parent_class = g_type_class_peek_parent(klass);
1572
1573 element_class = (GstElementClass *)klass;
1574 gobject_class = (GObjectClass *)klass;
1575 video_decoder_class = (GstVideoDecoderClass *)klass;
1576
1577 GST_DEBUG_CATEGORY_INIT(gst_aml_v4l2_video_dec_debug, "amlv4l2videodec", 0,
1578 "AML V4L2 Video Decoder");
1579
1580 gobject_class->dispose = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_dispose);
1581 gobject_class->finalize = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_finalize);
1582 gobject_class->set_property =
1583 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_set_property);
1584 gobject_class->get_property =
1585 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_get_property);
1586
1587 video_decoder_class->open = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_open);
1588 video_decoder_class->close = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_close);
1589 video_decoder_class->start = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_start);
1590 video_decoder_class->stop = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_stop);
1591 video_decoder_class->finish = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_finish);
1592 video_decoder_class->flush = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_flush);
1593 video_decoder_class->drain = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_drain);
1594 video_decoder_class->set_format =
1595 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_set_format);
1596 video_decoder_class->negotiate =
1597 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_negotiate);
1598 video_decoder_class->decide_allocation =
1599 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_decide_allocation);
1600 /* FIXME propose_allocation or not ? */
1601 video_decoder_class->handle_frame =
1602 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_handle_frame);
1603 video_decoder_class->getcaps =
1604 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_sink_getcaps);
1605 video_decoder_class->src_query =
1606 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_src_query);
1607 video_decoder_class->sink_event =
1608 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_sink_event);
1609
1610 element_class->change_state =
1611 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_change_state);
1612
xuesong.jiang406ee302023-06-28 03:45:22 +00001613 g_signals[SIGNAL_DECODED_PTS] = g_signal_new ("decoded-pts",
1614 G_TYPE_FROM_CLASS(GST_ELEMENT_CLASS(klass)),
1615 G_SIGNAL_RUN_LAST,
1616 0, /* class offset */
1617 NULL, /* accumulator */
1618 NULL, /* accu data */
1619 g_cclosure_marshal_generic,
1620 G_TYPE_NONE,
1621 1,
1622 G_TYPE_UINT64);
1623
xuesong.jiangae1548e2022-05-06 16:38:46 +08001624 gst_aml_v4l2_object_install_m2m_properties_helper(gobject_class);
xuesong.jiang61ea8012022-05-12 15:38:17 +08001625#if GST_IMPORT_LGE_PROP
1626 gst_aml_v4l2_video_dec_install_lge_properties_helper(gobject_class);
1627#endif
xuesong.jiangae1548e2022-05-06 16:38:46 +08001628}
1629
1630static void
1631gst_aml_v4l2_video_dec_subclass_init(gpointer g_class, gpointer data)
1632{
1633 GstAmlV4l2VideoDecClass *klass = GST_AML_V4L2_VIDEO_DEC_CLASS(g_class);
1634 GstElementClass *element_class = GST_ELEMENT_CLASS(g_class);
1635 GstAmlV4l2VideoDecCData *cdata = data;
1636
1637 klass->default_device = cdata->device;
1638
1639 /* Note: gst_pad_template_new() take the floating ref from the caps */
1640 gst_element_class_add_pad_template(element_class,
1641 gst_pad_template_new("sink", GST_PAD_SINK, GST_PAD_ALWAYS,
1642 cdata->sink_caps));
1643 gst_element_class_add_pad_template(element_class,
1644 gst_pad_template_new("src", GST_PAD_SRC, GST_PAD_ALWAYS,
1645 cdata->src_caps));
1646
1647 gst_element_class_set_metadata(element_class, cdata->longname,
1648 "Codec/Decoder/Video/Hardware", cdata->description,
1649 "Xuesong Jiang <Xuesong.Jiang@amlogic.com>");
1650
1651 gst_caps_unref(cdata->sink_caps);
1652 gst_caps_unref(cdata->src_caps);
1653 g_free(cdata);
1654}
1655
1656/* Probing functions */
1657gboolean
1658gst_aml_v4l2_is_video_dec(GstCaps *sink_caps, GstCaps *src_caps)
1659{
1660 gboolean ret = FALSE;
1661
1662 if (gst_caps_is_subset(sink_caps, gst_aml_v4l2_object_get_codec_caps()) && gst_caps_is_subset(src_caps, gst_aml_v4l2_object_get_raw_caps()))
1663 ret = TRUE;
1664
1665 return ret;
1666}
1667
1668static gchar *
1669gst_aml_v4l2_video_dec_set_metadata(GstStructure *s, GstAmlV4l2VideoDecCData *cdata,
1670 const gchar *basename)
1671{
1672 gchar *codec_name = NULL;
1673 gchar *type_name = NULL;
1674 gboolean got_value = FALSE;
1675
1676#define SET_META(codec) \
1677 G_STMT_START \
1678 { \
1679 cdata->longname = "AML V4L2 " codec " Decoder"; \
1680 cdata->description = "Decodes " codec " streams via V4L2 API"; \
1681 codec_name = g_ascii_strdown(codec, -1); \
1682 } \
1683 G_STMT_END
1684
1685 if (gst_structure_has_name(s, "image/jpeg"))
1686 {
1687 SET_META("JPEG");
1688 }
1689 else if (gst_structure_has_name(s, "video/mpeg"))
1690 {
1691 gint mpegversion = 0;
1692 gint *list = NULL;
1693 got_value = gst_structure_get_int(s, "mpegversion", &mpegversion);
1694 if (FALSE == got_value)
1695 {
1696 got_value = gst_structure_get_list(s, "mpegversion", &list);
1697 if (TRUE == got_value && (1 == *list || 2 == *list))
1698 {
1699 SET_META("MPEG2");
1700 }
1701 else
1702 {
1703 SET_META("MPEG4");
1704 }
1705 }
1706 else
1707 {
1708 SET_META("MPEG4");
1709 }
1710 }
1711 else if (gst_structure_has_name(s, "video/x-h263"))
1712 {
1713 SET_META("H263");
1714 }
1715 else if (gst_structure_has_name(s, "video/x-fwht"))
1716 {
1717 SET_META("FWHT");
1718 }
1719 else if (gst_structure_has_name(s, "video/x-h264"))
1720 {
1721 SET_META("H264");
1722 }
1723 else if (gst_structure_has_name(s, "video/x-h265"))
1724 {
1725 SET_META("H265");
1726 }
1727 else if (gst_structure_has_name(s, "video/x-wmv"))
1728 {
1729 SET_META("VC1");
1730 }
1731 else if (gst_structure_has_name(s, "video/x-vp8"))
1732 {
1733 SET_META("VP8");
1734 }
1735 else if (gst_structure_has_name(s, "video/x-vp9"))
1736 {
1737 SET_META("VP9");
1738 }
1739 else if (gst_structure_has_name(s, "video/x-av1"))
1740 {
1741 SET_META("AV1");
1742 }
1743 else if (gst_structure_has_name(s, "video/x-bayer"))
1744 {
1745 SET_META("BAYER");
1746 }
1747 else if (gst_structure_has_name(s, "video/x-sonix"))
1748 {
1749 SET_META("SONIX");
1750 }
1751 else if (gst_structure_has_name(s, "video/x-pwc1"))
1752 {
1753 SET_META("PWC1");
1754 }
1755 else if (gst_structure_has_name(s, "video/x-pwc2"))
1756 {
1757 SET_META("PWC2");
1758 }
1759 else
1760 {
1761 /* This code should be kept on sync with the exposed CODEC type of format
1762 * from gstamlv4l2object.c. This warning will only occure in case we forget
1763 * to also add a format here. */
1764 gchar *s_str = gst_structure_to_string(s);
1765 g_warning("Missing fixed name mapping for caps '%s', this is a GStreamer "
1766 "bug, please report at https://bugs.gnome.org",
1767 s_str);
1768 g_free(s_str);
1769 }
1770
1771 if (codec_name)
1772 {
1773 type_name = g_strdup_printf("amlv4l2%sdec", codec_name);
1774 if (g_type_from_name(type_name) != 0)
1775 {
1776 g_free(type_name);
1777 type_name = g_strdup_printf("amlv4l2%s%sdec", basename, codec_name);
1778 }
1779
1780 g_free(codec_name);
1781 }
1782
1783 return type_name;
1784#undef SET_META
1785}
1786
1787void gst_aml_v4l2_video_dec_register(GstPlugin *plugin, const gchar *basename,
1788 const gchar *device_path, GstCaps *sink_caps, GstCaps *src_caps)
1789{
1790 gint i;
1791
1792 for (i = 0; i < gst_caps_get_size(sink_caps); i++)
1793 {
1794 GstAmlV4l2VideoDecCData *cdata;
1795 GstStructure *s;
1796 GTypeQuery type_query;
1797 GTypeInfo type_info = {
1798 0,
1799 };
1800 GType type, subtype;
1801 gchar *type_name;
1802
1803 s = gst_caps_get_structure(sink_caps, i);
1804
1805 cdata = g_new0(GstAmlV4l2VideoDecCData, 1);
1806 cdata->device = g_strdup(device_path);
1807 cdata->sink_caps = gst_caps_new_empty();
1808 gst_caps_append_structure(cdata->sink_caps, gst_structure_copy(s));
1809 gst_caps_append_structure(cdata->sink_caps, gst_structure_copy(s));
1810 gst_caps_set_features(cdata->sink_caps, 0, gst_caps_features_from_string(GST_CAPS_FEATURE_MEMORY_DMABUF));
1811 cdata->src_caps = gst_caps_copy(src_caps);
1812 gst_caps_set_features_simple(cdata->src_caps, gst_caps_features_from_string(GST_CAPS_FEATURE_MEMORY_DMABUF));
1813 gst_caps_append(cdata->src_caps, gst_caps_copy(src_caps));
1814 type_name = gst_aml_v4l2_video_dec_set_metadata(s, cdata, basename);
1815
1816 /* Skip over if we hit an unmapped type */
1817 if (!type_name)
1818 {
1819 g_free(cdata);
1820 continue;
1821 }
1822
1823 type = gst_aml_v4l2_video_dec_get_type();
1824 g_type_query(type, &type_query);
1825 memset(&type_info, 0, sizeof(type_info));
1826 type_info.class_size = type_query.class_size;
1827 type_info.instance_size = type_query.instance_size;
1828 type_info.class_init = gst_aml_v4l2_video_dec_subclass_init;
1829 type_info.class_data = cdata;
1830 type_info.instance_init = gst_aml_v4l2_video_dec_subinstance_init;
1831
1832 subtype = g_type_register_static(type, type_name, &type_info, 0);
1833 if (!gst_element_register(plugin, type_name, GST_RANK_PRIMARY + 1,
1834 subtype))
1835 GST_WARNING("Failed to register plugin '%s'", type_name);
1836
1837 g_free(type_name);
1838 }
1839}
xuesong.jiang61ea8012022-05-12 15:38:17 +08001840
1841#if GST_IMPORT_LGE_PROP
1842static void gst_aml_v4l2_video_dec_install_lge_properties_helper(GObjectClass *gobject_class)
1843{
1844 g_object_class_install_property(gobject_class, LGE_RESOURCE_INFO,
1845 g_param_spec_object("resource-info", "resource-info",
1846 "After acquisition of H/W resources is completed, allocated resource information must be delivered to the decoder and the sink",
1847 GST_TYPE_STRUCTURE,
1848 G_PARAM_READABLE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
1849
1850 g_object_class_install_property(gobject_class, LGE_DECODE_SIZE,
1851 g_param_spec_uint64("decoded-size", "decoded-size",
1852 "The total amount of decoder element's decoded video es after constructing pipeline or flushing pipeline update unit is byte.",
1853 0, G_MAXUINT64,
1854 0, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
1855
1856 g_object_class_install_property(gobject_class, LGE_UNDECODE_SIZE,
1857 g_param_spec_uint64("undecoded-size", "undecoded-size",
1858 "video decoder element's total undecoded data update unit is byte.",
1859 0, G_MAXUINT64,
1860 0, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
1861
1862 g_object_class_install_property(gobject_class, LGE_APP_TYPE,
1863 g_param_spec_string("app-type", "app-type",
1864 "set application type.",
1865 "default_app",
1866 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
1867
1868 g_object_class_install_property(gobject_class, LGE_CLIP_MODE,
1869 g_param_spec_boolean("clip-mode", "clip-mode",
1870 "When seeking, Content is moving faster for a while to skip frames.",
1871 FALSE,
1872 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
1873}
1874#endif