blob: 139c7b706d7db24d8453675c3f2722e41699464d [file] [log] [blame]
xuesong.jiangae1548e2022-05-06 16:38:46 +08001/* GStreamer
2 * Copyright (C) 2022 <xuesong.jiang@amlogic.com>
3 *
4 * This library is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Library General Public
6 * License as published by the Free Software Foundation; either
7 * version 2 of the License, or (at your option) any later version.
8 *
9 * This library is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Library General Public License for more details.
13 *
14 * You should have received a copy of the GNU Library General Public
15 * License along with this library; if not, write to the
16 * Free Software Foundation, Inc., 51 Franklin Street, Suite 500,
17 * Boston, MA 02110-1335, USA.
18 */
19
20#ifdef HAVE_CONFIG_H
21#include "config.h"
22#endif
23
24#include <sys/stat.h>
25#include <fcntl.h>
26#include <errno.h>
27#include <unistd.h>
28#include <string.h>
29
30#include "gstamlv4l2object.h"
31#include "gstamlv4l2videodec.h"
32
33#include <string.h>
34#include <gst/gst-i18n-plugin.h>
35#include <gst/allocators/gstdmabuf.h>
36
37GST_DEBUG_CATEGORY_STATIC(gst_aml_v4l2_video_dec_debug);
38#define GST_CAT_DEFAULT gst_aml_v4l2_video_dec_debug
39
40#ifdef GST_VIDEO_DECODER_STREAM_LOCK
41#undef GST_VIDEO_DECODER_STREAM_LOCK
42#define GST_VIDEO_DECODER_STREAM_LOCK(decoder) \
43 { \
fei.denge9458472023-04-18 02:05:48 +000044 GST_TRACE("aml v4l2 dec locking"); \
xuesong.jiangae1548e2022-05-06 16:38:46 +080045 g_rec_mutex_lock(&GST_VIDEO_DECODER(decoder)->stream_lock); \
fei.denge9458472023-04-18 02:05:48 +000046 GST_TRACE("aml v4l2 dec locked"); \
xuesong.jiangae1548e2022-05-06 16:38:46 +080047 }
48#endif
49
50#ifdef GST_VIDEO_DECODER_STREAM_UNLOCK
51#undef GST_VIDEO_DECODER_STREAM_UNLOCK
52#define GST_VIDEO_DECODER_STREAM_UNLOCK(decoder) \
53 { \
fei.denge9458472023-04-18 02:05:48 +000054 GST_TRACE("aml v4l2 dec unlocking"); \
xuesong.jiangae1548e2022-05-06 16:38:46 +080055 g_rec_mutex_unlock(&GST_VIDEO_DECODER(decoder)->stream_lock); \
fei.denge9458472023-04-18 02:05:48 +000056 GST_TRACE("aml v4l2 dec unlocked"); \
xuesong.jiangae1548e2022-05-06 16:38:46 +080057 }
58#endif
xuesong.jiang61ea8012022-05-12 15:38:17 +080059
hanghang.luo36df2852022-08-24 15:02:27 +080060#ifndef ABSDIFF
61#define ABSDIFF(a,b) (((a) > (b)) ? ((a) - (b)) : ((b) - (a)))
62#endif
63
xuesong.jiang61ea8012022-05-12 15:38:17 +080064#if GST_IMPORT_LGE_PROP
65typedef struct _GstAmlResourceInfo
66{
67 gchar *coretype;
68 gint videoport;
69 gint audioport;
70 gint maxwidth;
71 gint maxheight;
72 gint mixerport;
73} GstAmlResourceInfo;
74
75struct _GstAmlV4l2VideoDecLgeCtxt
76{
77 GstAmlResourceInfo res_info;
78 guint64 dec_size;
79 guint64 undec_size;
80 gchar *app_type;
81 gboolean clip_mode;
82};
83#endif
84
xuesong.jiangae1548e2022-05-06 16:38:46 +080085typedef struct
86{
87 gchar *device;
88 GstCaps *sink_caps;
89 GstCaps *src_caps;
90 const gchar *longname;
91 const gchar *description;
92} GstAmlV4l2VideoDecCData;
93
94enum
95{
96 PROP_0,
xuesong.jiang61ea8012022-05-12 15:38:17 +080097 V4L2_STD_OBJECT_PROPS,
98#if GST_IMPORT_LGE_PROP
99 LGE_RESOURCE_INFO,
100 LGE_DECODE_SIZE,
101 LGE_UNDECODE_SIZE,
102 LGE_APP_TYPE,
103 LGE_CLIP_MODE
104#endif
xuesong.jiangae1548e2022-05-06 16:38:46 +0800105};
106
xuesong.jiang406ee302023-06-28 03:45:22 +0000107enum
108{
109 SIGNAL_DECODED_PTS,
110 MAX_SIGNAL
111};
112
113static guint g_signals[MAX_SIGNAL]= {0};
114
xuesong.jiangae1548e2022-05-06 16:38:46 +0800115#define gst_aml_v4l2_video_dec_parent_class parent_class
116G_DEFINE_ABSTRACT_TYPE(GstAmlV4l2VideoDec, gst_aml_v4l2_video_dec,
117 GST_TYPE_VIDEO_DECODER);
118
119static GstFlowReturn gst_aml_v4l2_video_dec_finish(GstVideoDecoder *decoder);
xuesong.jiang61ea8012022-05-12 15:38:17 +0800120#if GST_IMPORT_LGE_PROP
121static void gst_aml_v4l2_video_dec_install_lge_properties_helper(GObjectClass *gobject_class);
122#endif
xuesong.jiangae1548e2022-05-06 16:38:46 +0800123
124static void
125gst_aml_v4l2_video_dec_set_property(GObject *object,
126 guint prop_id, const GValue *value, GParamSpec *pspec)
127{
128 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(object);
129
130 switch (prop_id)
131 {
132 case PROP_CAPTURE_IO_MODE:
133 if (!gst_aml_v4l2_object_set_property_helper(self->v4l2capture,
134 prop_id, value, pspec))
135 {
136 G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
137 }
138 break;
139 case PROP_DUMP_FRAME_LOCATION:
140 if (!gst_aml_v4l2_object_set_property_helper(self->v4l2capture,
141 prop_id, value, pspec))
142 {
143 G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
144 }
145 break;
xuesong.jiang61ea8012022-05-12 15:38:17 +0800146#if GST_IMPORT_LGE_PROP
147 case LGE_RESOURCE_INFO:
148 {
149 GST_DEBUG_OBJECT(self, "LGE up layer set res info");
150 GstStructure *r_info = g_value_get_object(value);
151 if (r_info)
152 {
153 if (gst_structure_has_field(r_info, "coretype"))
154 {
155 if (self->lge_ctxt->res_info.coretype)
156 g_free(self->lge_ctxt->res_info.coretype);
157 self->lge_ctxt->res_info.coretype = g_strdup(gst_structure_get_string(r_info, "coretype"));
158 }
159 if (gst_structure_has_field(r_info, "videoport"))
160 gst_structure_get_int(r_info, "videoport", &(self->lge_ctxt->res_info.videoport));
161 if (gst_structure_has_field(r_info, "audioport"))
162 gst_structure_get_int(r_info, "audioport", &(self->lge_ctxt->res_info.audioport));
163 if (gst_structure_has_field(r_info, "maxwidth"))
164 gst_structure_get_int(r_info, "maxwidth", &(self->lge_ctxt->res_info.maxwidth));
165 if (gst_structure_has_field(r_info, "maxheight"))
166 gst_structure_get_int(r_info, "maxheight", &(self->lge_ctxt->res_info.maxheight));
167 if (gst_structure_has_field(r_info, "mixerport"))
168 gst_structure_get_int(r_info, "mixerport", &(self->lge_ctxt->res_info.mixerport));
169 }
170 break;
171 }
172 case LGE_APP_TYPE:
173 {
174 GST_DEBUG_OBJECT(self, "LGE up layer set app type");
175 if (self->lge_ctxt->app_type)
176 g_free(self->lge_ctxt->app_type);
177 self->lge_ctxt->app_type = g_strdup(g_value_get_string(value));
178 break;
179 }
180 case LGE_CLIP_MODE:
181 {
182 GST_DEBUG_OBJECT(self, "LGE up layer set clip mode");
183 self->lge_ctxt->clip_mode = g_strdup(g_value_get_boolean(value));
184 break;
185 }
186#endif
xuesong.jiangae1548e2022-05-06 16:38:46 +0800187 /* By default, only set on output */
188 default:
189 if (!gst_aml_v4l2_object_set_property_helper(self->v4l2output,
190 prop_id, value, pspec))
191 {
192 G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
193 }
194 break;
195 }
196}
197
198static void
199gst_aml_v4l2_video_dec_get_property(GObject *object,
200 guint prop_id, GValue *value, GParamSpec *pspec)
201{
202 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(object);
203
204 switch (prop_id)
205 {
206 case PROP_CAPTURE_IO_MODE:
207 if (!gst_aml_v4l2_object_get_property_helper(self->v4l2capture,
208 prop_id, value, pspec))
209 {
210 G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
211 }
212 break;
213
xuesong.jiang61ea8012022-05-12 15:38:17 +0800214#if GST_IMPORT_LGE_PROP
215 case LGE_DECODE_SIZE:
216 {
217 GST_DEBUG_OBJECT(self, "LGE up layer get dec size");
218 self->lge_ctxt->dec_size = -1;
219 g_value_set_int(value, self->lge_ctxt->dec_size);
220 break;
221 }
222 case LGE_UNDECODE_SIZE:
223 {
224 GST_DEBUG_OBJECT(self, "LGE up layer get undec size");
225 self->lge_ctxt->undec_size = -1;
226 g_value_set_int(value, self->lge_ctxt->undec_size);
227 break;
228 }
229#endif
230
xuesong.jiangae1548e2022-05-06 16:38:46 +0800231 /* By default read from output */
232 default:
233 if (!gst_aml_v4l2_object_get_property_helper(self->v4l2output,
234 prop_id, value, pspec))
235 {
236 G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
237 }
238 break;
239 }
240}
241
242static gboolean
243gst_aml_v4l2_video_dec_open(GstVideoDecoder *decoder)
244{
245 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
246 GstCaps *codec_caps;
247
248 GST_DEBUG_OBJECT(self, "Opening");
249
250 if (!gst_aml_v4l2_object_open(self->v4l2output))
251 goto failure;
252
253 if (!gst_aml_v4l2_object_open_shared(self->v4l2capture, self->v4l2output))
254 goto failure;
255
256 codec_caps = gst_pad_get_pad_template_caps(decoder->sinkpad);
257 self->probed_sinkcaps = gst_aml_v4l2_object_probe_caps(self->v4l2output,
258 codec_caps);
259 gst_caps_unref(codec_caps);
260
261 if (gst_caps_is_empty(self->probed_sinkcaps))
262 goto no_encoded_format;
263
264 return TRUE;
265
266no_encoded_format:
267 GST_ELEMENT_ERROR(self, RESOURCE, SETTINGS,
268 (_("Decoder on device %s has no supported input format"),
269 self->v4l2output->videodev),
270 (NULL));
271 goto failure;
272
273failure:
274 if (GST_AML_V4L2_IS_OPEN(self->v4l2output))
275 gst_aml_v4l2_object_close(self->v4l2output);
276
277 if (GST_AML_V4L2_IS_OPEN(self->v4l2capture))
278 gst_aml_v4l2_object_close(self->v4l2capture);
279
280 gst_caps_replace(&self->probed_srccaps, NULL);
281 gst_caps_replace(&self->probed_sinkcaps, NULL);
282
283 return FALSE;
284}
285
286static gboolean
287gst_aml_v4l2_video_dec_close(GstVideoDecoder *decoder)
288{
289 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
290
291 GST_DEBUG_OBJECT(self, "Closing");
292
293 gst_aml_v4l2_object_close(self->v4l2output);
294 gst_aml_v4l2_object_close(self->v4l2capture);
295 gst_caps_replace(&self->probed_srccaps, NULL);
296 gst_caps_replace(&self->probed_sinkcaps, NULL);
297
298 return TRUE;
299}
300
301static gboolean
302gst_aml_v4l2_video_dec_start(GstVideoDecoder *decoder)
303{
304 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
305
306 GST_DEBUG_OBJECT(self, "Starting");
307
308 gst_aml_v4l2_object_unlock(self->v4l2output);
309 g_atomic_int_set(&self->active, TRUE);
310 self->output_flow = GST_FLOW_OK;
311
312 return TRUE;
313}
314
315static gboolean
316gst_aml_v4l2_video_dec_stop(GstVideoDecoder *decoder)
317{
318 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
319
320 GST_DEBUG_OBJECT(self, "Stopping");
321
322 gst_aml_v4l2_object_unlock(self->v4l2output);
323 gst_aml_v4l2_object_unlock(self->v4l2capture);
324
325 /* Wait for capture thread to stop */
326 gst_pad_stop_task(decoder->srcpad);
327
328 GST_VIDEO_DECODER_STREAM_LOCK(decoder);
329 self->output_flow = GST_FLOW_OK;
330 GST_VIDEO_DECODER_STREAM_UNLOCK(decoder);
331
332 /* Should have been flushed already */
333 g_assert(g_atomic_int_get(&self->active) == FALSE);
334
335 gst_aml_v4l2_object_stop(self->v4l2output);
336 gst_aml_v4l2_object_stop(self->v4l2capture);
337
338 if (self->input_state)
339 {
340 gst_video_codec_state_unref(self->input_state);
341 self->input_state = NULL;
342 }
343
344 GST_DEBUG_OBJECT(self, "Stopped");
345
346 return TRUE;
347}
348
349static gboolean
350gst_aml_v4l2_video_dec_set_format(GstVideoDecoder *decoder,
351 GstVideoCodecState *state)
352{
353 GstAmlV4l2Error error = GST_AML_V4L2_ERROR_INIT;
354 gboolean ret = TRUE;
355 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
356 GstCaps *caps;
357
358 GST_DEBUG_OBJECT(self, "Setting format: %" GST_PTR_FORMAT, state->caps);
359 GstCapsFeatures *const features = gst_caps_get_features(state->caps, 0);
360
361 if (gst_caps_features_contains(features, GST_CAPS_FEATURE_MEMORY_DMABUF))
362 self->v4l2output->req_mode = GST_V4L2_IO_DMABUF_IMPORT;
363
364 if (self->input_state)
365 {
366 if (gst_aml_v4l2_object_caps_equal(self->v4l2output, state->caps))
367 {
368 GST_DEBUG_OBJECT(self, "Compatible caps");
369 goto done;
370 }
371 gst_video_codec_state_unref(self->input_state);
372 self->input_state = NULL;
373
374 gst_aml_v4l2_video_dec_finish(decoder);
375 gst_aml_v4l2_object_stop(self->v4l2output);
376
377 /* The renegotiation flow don't blend with the base class flow. To properly
378 * stop the capture pool, if the buffers can't be orphaned, we need to
379 * reclaim our buffers, which will happend through the allocation query.
380 * The allocation query is triggered by gst_video_decoder_negotiate() which
381 * requires the output caps to be set, but we can't know this information
382 * as we rely on the decoder, which requires the capture queue to be
383 * stopped.
384 *
385 * To workaround this issue, we simply run an allocation query with the
386 * old negotiated caps in order to drain/reclaim our buffers. That breaks
387 * the complexity and should not have much impact in performance since the
388 * following allocation query will happen on a drained pipeline and won't
389 * block. */
390 if (self->v4l2capture->pool &&
391 !gst_aml_v4l2_buffer_pool_orphan(&self->v4l2capture->pool))
392 {
393 GstCaps *caps = gst_pad_get_current_caps(decoder->srcpad);
394 if (caps)
395 {
396 GstQuery *query = gst_query_new_allocation(caps, FALSE);
397 gst_pad_peer_query(decoder->srcpad, query);
398 gst_query_unref(query);
399 gst_caps_unref(caps);
400 }
401 }
402
403 gst_aml_v4l2_object_stop(self->v4l2capture);
404 self->output_flow = GST_FLOW_OK;
405 }
406
407 if ((ret = gst_aml_v4l2_set_drm_mode(self->v4l2output)) == FALSE)
408 {
409 GST_ERROR_OBJECT(self, "config output drm mode error");
410 goto done;
411 }
412
xuesong.jiang22a9b112023-05-24 09:01:59 +0000413 if ((ret = gst_aml_v4l2_set_stream_mode(self->v4l2output)) == FALSE)
414 {
415 GST_ERROR_OBJECT(self, "config output stream mode error");
416 goto done;
417 }
418
xuesong.jiangae1548e2022-05-06 16:38:46 +0800419 ret = gst_aml_v4l2_object_set_format(self->v4l2output, state->caps, &error);
420
421 gst_caps_replace(&self->probed_srccaps, NULL);
422 self->probed_srccaps = gst_aml_v4l2_object_probe_caps(self->v4l2capture,
423 gst_aml_v4l2_object_get_raw_caps());
424
425 if (gst_caps_is_empty(self->probed_srccaps))
426 goto no_raw_format;
427
428 caps = gst_caps_copy(self->probed_srccaps);
429 gst_caps_set_features_simple(caps, gst_caps_features_from_string(GST_CAPS_FEATURE_MEMORY_DMABUF));
430 gst_caps_append(self->probed_srccaps, caps);
431 if (ret)
432 self->input_state = gst_video_codec_state_ref(state);
433 else
434 gst_aml_v4l2_error(self, &error);
435
436done:
437 return ret;
438
439no_raw_format:
440 GST_ELEMENT_ERROR(self, RESOURCE, SETTINGS,
441 (_("Decoder on device %s has no supported output format"),
442 self->v4l2output->videodev),
443 (NULL));
444 return GST_FLOW_ERROR;
445}
446
447static gboolean
448gst_aml_v4l2_video_dec_flush(GstVideoDecoder *decoder)
449{
450 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
451
452 GST_DEBUG_OBJECT(self, "Flushed");
453
454 /* Ensure the processing thread has stopped for the reverse playback
455 * discount case */
456 if (gst_pad_get_task_state(decoder->srcpad) == GST_TASK_STARTED)
457 {
458 GST_VIDEO_DECODER_STREAM_UNLOCK(decoder);
459
460 gst_aml_v4l2_object_unlock(self->v4l2output);
461 gst_aml_v4l2_object_unlock(self->v4l2capture);
462 gst_pad_stop_task(decoder->srcpad);
463 GST_VIDEO_DECODER_STREAM_LOCK(decoder);
464 }
465
466 self->output_flow = GST_FLOW_OK;
467
468 gst_aml_v4l2_object_unlock_stop(self->v4l2output);
469 gst_aml_v4l2_object_unlock_stop(self->v4l2capture);
470
471 if (self->v4l2output->pool)
472 gst_aml_v4l2_buffer_pool_flush(self->v4l2output->pool);
473
474 /* gst_aml_v4l2_buffer_pool_flush() calls streamon the capture pool and must be
475 * called after gst_aml_v4l2_object_unlock_stop() stopped flushing the buffer
476 * pool. */
477 if (self->v4l2capture->pool)
478 gst_aml_v4l2_buffer_pool_flush(self->v4l2capture->pool);
479
480 return TRUE;
481}
482
483static gboolean
484gst_aml_v4l2_video_dec_negotiate(GstVideoDecoder *decoder)
485{
486 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
487
xuesong.jiang681d3602022-06-24 21:23:35 +0800488 if (TRUE == self->v4l2output->is_svp)
489 {
490 GstStructure *s;
491 GstEvent *event;
492
493 s = gst_structure_new_empty ("IS_SVP");
494 event = gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM_STICKY, s);
495 GST_DEBUG_OBJECT(self, "before Send SVP Event :%p", event);
496 gst_pad_push_event (decoder->srcpad, event);
497 GST_DEBUG_OBJECT(self, "after Send SVP Event :%p", event);
498 }
499
xuesong.jiangae1548e2022-05-06 16:38:46 +0800500 /* We don't allow renegotiation without carefull disabling the pool */
501 if (self->v4l2capture->pool &&
502 gst_buffer_pool_is_active(GST_BUFFER_POOL(self->v4l2capture->pool)))
503 return TRUE;
504
505 return GST_VIDEO_DECODER_CLASS(parent_class)->negotiate(decoder);
506}
507
508static gboolean
509gst_aml_v4l2_decoder_cmd(GstAmlV4l2Object *v4l2object, guint cmd, guint flags)
510{
511 struct v4l2_decoder_cmd dcmd = {
512 0,
513 };
514
515 GST_DEBUG_OBJECT(v4l2object->element,
516 "sending v4l2 decoder command %u with flags %u", cmd, flags);
517
518 if (!GST_AML_V4L2_IS_OPEN(v4l2object))
519 return FALSE;
520
521 dcmd.cmd = cmd;
522 dcmd.flags = flags;
523 if (v4l2object->ioctl(v4l2object->video_fd, VIDIOC_DECODER_CMD, &dcmd) < 0)
524 goto dcmd_failed;
525
526 return TRUE;
527
528dcmd_failed:
529 if (errno == ENOTTY)
530 {
531 GST_INFO_OBJECT(v4l2object->element,
532 "Failed to send decoder command %u with flags %u for '%s'. (%s)",
533 cmd, flags, v4l2object->videodev, g_strerror(errno));
534 }
535 else
536 {
537 GST_ERROR_OBJECT(v4l2object->element,
538 "Failed to send decoder command %u with flags %u for '%s'. (%s)",
539 cmd, flags, v4l2object->videodev, g_strerror(errno));
540 }
541 return FALSE;
542}
543
544static GstFlowReturn
545gst_aml_v4l2_video_dec_finish(GstVideoDecoder *decoder)
546{
547 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
548 GstFlowReturn ret = GST_FLOW_OK;
549 GstBuffer *buffer;
550
551 if (gst_pad_get_task_state(decoder->srcpad) != GST_TASK_STARTED)
552 goto done;
553
554 GST_DEBUG_OBJECT(self, "Finishing decoding");
555
556 GST_VIDEO_DECODER_STREAM_UNLOCK(decoder);
557
558 if (gst_aml_v4l2_decoder_cmd(self->v4l2output, V4L2_DEC_CMD_STOP, 0))
559 {
560 GstTask *task = decoder->srcpad->task;
561
562 /* If the decoder stop command succeeded, just wait until processing is
563 * finished */
564 GST_DEBUG_OBJECT(self, "Waiting for decoder stop");
565 GST_OBJECT_LOCK(task);
566 while (GST_TASK_STATE(task) == GST_TASK_STARTED)
567 GST_TASK_WAIT(task);
568 GST_OBJECT_UNLOCK(task);
569 ret = GST_FLOW_FLUSHING;
570 }
571 else
572 {
573 /* otherwise keep queuing empty buffers until the processing thread has
574 * stopped, _pool_process() will return FLUSHING when that happened */
575 while (ret == GST_FLOW_OK)
576 {
xuesong.jiangc5dac0f2023-02-01 14:42:24 +0800577 GST_DEBUG_OBJECT(self, "queue empty output buf");
xuesong.jiangae1548e2022-05-06 16:38:46 +0800578 buffer = gst_buffer_new();
579 ret =
580 gst_aml_v4l2_buffer_pool_process(GST_AML_V4L2_BUFFER_POOL(self->v4l2output->pool), &buffer);
581 gst_buffer_unref(buffer);
582 }
583 }
584
585 /* and ensure the processing thread has stopped in case another error
586 * occured. */
587 gst_aml_v4l2_object_unlock(self->v4l2capture);
588 gst_pad_stop_task(decoder->srcpad);
589 GST_VIDEO_DECODER_STREAM_LOCK(decoder);
590
591 if (ret == GST_FLOW_FLUSHING)
592 ret = self->output_flow;
593
594 GST_DEBUG_OBJECT(decoder, "Done draining buffers");
595
596 /* TODO Shall we cleanup any reffed frame to workaround broken decoders ? */
597
598done:
599 return ret;
600}
601
602static GstFlowReturn
603gst_aml_v4l2_video_dec_drain(GstVideoDecoder *decoder)
604{
605 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
606
607 GST_DEBUG_OBJECT(self, "Draining...");
608 gst_aml_v4l2_video_dec_finish(decoder);
609 gst_aml_v4l2_video_dec_flush(decoder);
610
611 return GST_FLOW_OK;
612}
613
614static GstVideoCodecFrame *
615gst_aml_v4l2_video_dec_get_oldest_frame(GstVideoDecoder *decoder)
616{
617 GstVideoCodecFrame *frame = NULL;
618 GList *frames, *l;
619 gint count = 0;
620
621 frames = gst_video_decoder_get_frames(decoder);
622
623 for (l = frames; l != NULL; l = l->next)
624 {
625 GstVideoCodecFrame *f = l->data;
626
627 if (!frame || (GST_CLOCK_TIME_IS_VALID(frame->pts) && GST_CLOCK_TIME_IS_VALID(f->pts) && (frame->pts > f->pts)))
628 frame = f;
629
630 count++;
631 }
632
633 if (frame)
634 {
635 GST_LOG_OBJECT(decoder,
636 "Oldest frame is %d %" GST_TIME_FORMAT " and %d frames left",
637 frame->system_frame_number, GST_TIME_ARGS(frame->pts), count - 1);
638 gst_video_codec_frame_ref(frame);
639 }
640
641 g_list_free_full(frames, (GDestroyNotify)gst_video_codec_frame_unref);
642
643 return frame;
644}
645
fei.dengbee20862022-06-14 14:59:48 +0800646static GstVideoCodecFrame *
xuesong.jiange24aef92023-06-16 06:39:10 +0000647gst_aml_v4l2_video_dec_get_right_frame_for_frame_mode(GstVideoDecoder *decoder, GstClockTime pts)
fei.dengbee20862022-06-14 14:59:48 +0800648{
649 GstVideoCodecFrame *frame = NULL;
650 GList *frames, *l;
651 gint count = 0;
652
xuesong.jiange24aef92023-06-16 06:39:10 +0000653 GST_LOG_OBJECT (decoder, "trace in with pts: %" GST_TIME_FORMAT, GST_TIME_ARGS(pts));
654
fei.dengbee20862022-06-14 14:59:48 +0800655 frames = gst_video_decoder_get_frames(decoder);
656
657 for (l = frames; l != NULL; l = l->next)
658 {
659 GstVideoCodecFrame *f = l->data;
fei.denge9458472023-04-18 02:05:48 +0000660
xuesong.jiangc5dac0f2023-02-01 14:42:24 +0800661 if (GST_CLOCK_TIME_IS_VALID(pts) && (ABSDIFF(f->pts,pts)) < 1000) {
fei.dengbee20862022-06-14 14:59:48 +0800662 frame = f;
fei.dengbee20862022-06-14 14:59:48 +0800663 } else {
664 if (!frame || (GST_CLOCK_TIME_IS_VALID(frame->pts) && GST_CLOCK_TIME_IS_VALID(f->pts) && (frame->pts > f->pts)))
665 frame = f;
666 }
667
668 count++;
669 }
670
671 if (frame)
672 {
673 GST_LOG_OBJECT(decoder,
674 "frame is %d %" GST_TIME_FORMAT " and %d frames left",
675 frame->system_frame_number, GST_TIME_ARGS(frame->pts), count - 1);
676 gst_video_codec_frame_ref(frame);
677 }
678
679 g_list_free_full(frames, (GDestroyNotify)gst_video_codec_frame_unref);
680
xuesong.jiange24aef92023-06-16 06:39:10 +0000681 GST_LOG_OBJECT (decoder, "trace out ret:%p", frame);
fei.dengbee20862022-06-14 14:59:48 +0800682 return frame;
683}
684
xuesong.jiange24aef92023-06-16 06:39:10 +0000685static GstVideoCodecFrame *
686gst_aml_v4l2_video_dec_get_right_frame_for_stream_mode(GstVideoDecoder *decoder, GstClockTime pts)
687{
688 GstVideoCodecFrame *frame = NULL;
689 GList *frames, *l;
690 gint count = 0;
691
692 GST_LOG_OBJECT (decoder, "trace in with pts: %" GST_TIME_FORMAT, GST_TIME_ARGS(pts));
693
694 frames = gst_video_decoder_get_frames(decoder);
695 guint frames_len = 0;
696 frames_len = g_list_length(frames);
697 GST_LOG_OBJECT (decoder, "got frames list len:%d", frames_len);
698
699 frame = frames->data;
700
701 for (l = frames; l != NULL; l = l->next)
702 {
703 GstVideoCodecFrame *f = l->data;
704
705 if (GST_CLOCK_TIME_IS_VALID(pts) && (ABSDIFF(f->pts, pts)) < 1000)
706 {
707 /* found the right frame */
708 frame = f;
709 break;
710 }
711 else if(GST_CLOCK_TIME_IS_VALID(pts) && (f->pts < pts))
712 {
713 GST_LOG_OBJECT(decoder,
714 "stream mode drop frame %d %" GST_TIME_FORMAT,
715 frame->system_frame_number, GST_TIME_ARGS(frame->pts));
716
717 gst_video_codec_frame_ref(f);
718 // gst_video_decoder_drop_frame(decoder, f);
719 gst_video_decoder_release_frame(decoder, f);
720 }
721 else
722 {
723 GST_LOG_OBJECT (decoder, "dbg");
724 }
725 }
726
727 if (frame)
728 {
729 guint l_len = 0;
730 l = gst_video_decoder_get_frames(decoder);
731 l_len = g_list_length(l);
732 g_list_free_full(l, (GDestroyNotify)gst_video_codec_frame_unref);
733
734 GST_LOG_OBJECT(decoder,
735 "frame is %d %" GST_TIME_FORMAT " and %d frames left",
736 frame->system_frame_number, GST_TIME_ARGS(frame->pts), l_len);
737 gst_video_codec_frame_ref(frame);
738 }
739
740 g_list_free_full(frames, (GDestroyNotify)gst_video_codec_frame_unref);
741
742 GST_LOG_OBJECT (decoder, "trace out ret:%p", frame);
743 return frame;
744}
745
746static GstVideoCodecFrame *
747gst_aml_v4l2_video_dec_get_right_frame(GstVideoDecoder *decoder, GstClockTime pts)
748{
749 GstAmlV4l2VideoDec *self = (GstAmlV4l2VideoDec *)decoder;
750 if (self->v4l2output->stream_mode)
751 return gst_aml_v4l2_video_dec_get_right_frame_for_stream_mode(decoder, pts);
752 else
753 return gst_aml_v4l2_video_dec_get_right_frame_for_frame_mode(decoder, pts);
754}
755
xuesong.jiangae1548e2022-05-06 16:38:46 +0800756static gboolean
757gst_aml_v4l2_video_remove_padding(GstCapsFeatures *features,
758 GstStructure *structure, gpointer user_data)
759{
760 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(user_data);
761 GstVideoAlignment *align = &self->v4l2capture->align;
762 GstVideoInfo *info = &self->v4l2capture->info;
763 int width, height;
764
765 if (!gst_structure_get_int(structure, "width", &width))
766 return TRUE;
767
768 if (!gst_structure_get_int(structure, "height", &height))
769 return TRUE;
770
771 if (align->padding_left != 0 || align->padding_top != 0 ||
772 height != info->height + align->padding_bottom)
773 return TRUE;
774
775 if (height == info->height + align->padding_bottom)
776 {
777 /* Some drivers may round up width to the padded with */
778 if (width == info->width + align->padding_right)
779 gst_structure_set(structure,
780 "width", G_TYPE_INT, width - align->padding_right,
781 "height", G_TYPE_INT, height - align->padding_bottom, NULL);
782 /* Some drivers may keep visible width and only round up bytesperline */
783 else if (width == info->width)
784 gst_structure_set(structure,
785 "height", G_TYPE_INT, height - align->padding_bottom, NULL);
786 }
787
788 return TRUE;
789}
790
791static void
sheng.liubcf036c2022-06-21 15:55:42 +0800792gst_v4l2_drop_event (GstAmlV4l2Object * v4l2object)
sheng.liub56bbc52022-06-21 11:02:33 +0800793{
794 struct v4l2_event evt;
795 gint ret;
796
797 memset (&evt, 0x00, sizeof (struct v4l2_event));
798 ret = v4l2object->ioctl (v4l2object->video_fd, VIDIOC_DQEVENT, &evt);
799 if (ret < 0)
800 {
801 GST_DEBUG_OBJECT (v4l2object, "dqevent failed");
802 return;
803 }
804
805 switch (evt.type)
806 {
807 case V4L2_EVENT_SOURCE_CHANGE:
808 GST_DEBUG_OBJECT (v4l2object, "Drop GST_V4L2_FLOW_SOURCE_CHANGE");
809 break;
810 case V4L2_EVENT_EOS:
811 GST_DEBUG_OBJECT (v4l2object, "Drop GST_V4L2_FLOW_LAST_BUFFER");
812 break;
813 default:
814 break;
815 }
816
817 return;
818}
819
820static void
hanghang.luo419c4a92023-07-14 07:36:07 +0000821gst_aml_v4l2_video_dec_set_fence(GstVideoDecoder *decoder)
hanghang.luo70f07ef2023-07-13 02:23:06 +0000822{
823 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
824 GstStructure *s;
825 GstEvent *event;
826
hanghang.luo419c4a92023-07-14 07:36:07 +0000827 guint fence_num = self->v4l2capture->min_buffers-2;
hanghang.luo70f07ef2023-07-13 02:23:06 +0000828 s = gst_structure_new ("video_fence","fence_num",G_TYPE_UINT,fence_num,NULL);
829 if (s)
830 {
831 event = gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM, s);
832 GST_DEBUG_OBJECT(self,"Send video_fence Event: %"GST_PTR_FORMAT,event);
833 gst_pad_push_event (decoder->srcpad, event);
834 }
835}
836
837static void
hanghang.luo2eec4892023-07-18 06:44:42 +0000838gst_aml_v4l2_video_dec_set_output_status(GstVideoDecoder *decoder,GstVideoInfo info)
839{
840 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
841 GstVideoCodecState *output_state;
842 output_state = gst_video_decoder_set_output_state(decoder,
843 info.finfo->format, info.width, info.height, self->input_state);
844 if (output_state)
845 {
846 output_state->info.interlace_mode = info.interlace_mode;
847 output_state->allocation_caps =gst_video_info_to_caps(&info);
848 info.width = self->v4l2output->info.width;
849 info.height = self->v4l2output->info.height;
850 output_state->caps =gst_video_info_to_caps(&info);
851 gst_video_codec_state_unref(output_state);
852 }
853}
854
855static void
xuesong.jiangae1548e2022-05-06 16:38:46 +0800856gst_aml_v4l2_video_dec_loop(GstVideoDecoder *decoder)
857{
858 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
859 GstAmlV4l2BufferPool *v4l2_pool;
860 GstAmlV4l2Error error = GST_AML_V4L2_ERROR_INIT;
861 GstBufferPool *pool;
862 GstVideoCodecFrame *frame;
863 GstBuffer *buffer = NULL;
864 GstFlowReturn ret;
865
866 if (G_UNLIKELY(!GST_AML_V4L2_IS_ACTIVE(self->v4l2capture)))
867 {
868 GstVideoInfo info;
xuesong.jiang282ca572023-05-05 09:03:32 +0000869 GstCaps *acquired_caps, *available_caps, *caps, *filter;
xuesong.jiangae1548e2022-05-06 16:38:46 +0800870 GstStructure *st;
871
872 GST_DEBUG_OBJECT(self, "waitting source change event");
873 /* Wait until received SOURCE_CHANGE event to get right video format */
874 while (self->v4l2capture->can_wait_event && self->v4l2capture->need_wait_event)
875 {
876 ret = gst_aml_v4l2_object_dqevent(self->v4l2capture);
877 if (ret == GST_AML_V4L2_FLOW_SOURCE_CHANGE)
878 {
879 GST_DEBUG_OBJECT(self, "Received source change event");
880 break;
881 }
882 else if (ret == GST_AML_V4L2_FLOW_LAST_BUFFER)
883 {
884 GST_DEBUG_OBJECT(self, "Received eos event");
885 goto beach;
886 }
887 else if (ret != GST_FLOW_OK)
888 {
889 GST_ERROR_OBJECT(self, "dqevent error");
890 goto beach;
891 }
892 }
893 self->v4l2capture->need_wait_event = FALSE;
894
sheng.liu0c77f6c2022-06-17 21:33:20 +0800895 if (TRUE == self->v4l2output->is_svp)
896 {
897 GstPad *peer;
898 GstStructure *s;
899 GstEvent *event;
900
901 peer = gst_pad_get_peer (decoder->srcpad);
902 if (peer)
903 {
hanghang.luo70f07ef2023-07-13 02:23:06 +0000904 s = gst_structure_new_empty ("IS_SVP");
905 if (s)
906 {
907 event = gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM, s);
908 gst_pad_send_event (peer, event);
909 GST_DEBUG_OBJECT(self, "Send SVP Event");
910 }
911 gst_object_unref (peer);
sheng.liu0c77f6c2022-06-17 21:33:20 +0800912 }
913 }
914
sheng.liub56bbc52022-06-21 11:02:33 +0800915 if (self->v4l2capture->need_drop_event)
916 {
917 // drop V4L2_EVENT_SOURCE_CHANGE
918 gst_v4l2_drop_event(self->v4l2capture);
919 self->v4l2capture->need_drop_event = FALSE;
920 }
921
xuesong.jiangae1548e2022-05-06 16:38:46 +0800922 if (!gst_aml_v4l2_object_acquire_format(self->v4l2capture, &info))
923 goto not_negotiated;
hanghang.luo419c4a92023-07-14 07:36:07 +0000924 if (info.interlace_mode == GST_VIDEO_INTERLACE_MODE_INTERLEAVED)
925 {
926 GST_DEBUG_OBJECT(self,"change interlace to progressive");
927 info.interlace_mode = GST_VIDEO_INTERLACE_MODE_PROGRESSIVE;
928 self->is_interlace = TRUE;
929 }
xuesong.jiangae1548e2022-05-06 16:38:46 +0800930 /* Create caps from the acquired format, remove the format field */
931 acquired_caps = gst_video_info_to_caps(&info);
932 GST_DEBUG_OBJECT(self, "Acquired caps: %" GST_PTR_FORMAT, acquired_caps);
933 st = gst_caps_get_structure(acquired_caps, 0);
xuesong.jiang282ca572023-05-05 09:03:32 +0000934 gst_structure_remove_fields(st, "format", "colorimetry", "chroma-site", NULL);
935
936 /* Probe currently available pixel formats */
937 available_caps = gst_caps_copy(self->probed_srccaps);
938 GST_DEBUG_OBJECT(self, "Available caps: %" GST_PTR_FORMAT, available_caps);
939
940 /* Replace coded size with visible size, we want to negotiate visible size
941 * with downstream, not coded size. */
942 gst_caps_map_in_place(available_caps, gst_aml_v4l2_video_remove_padding, self);
943
944 filter = gst_caps_intersect_full(available_caps, acquired_caps, GST_CAPS_INTERSECT_FIRST);
xuesong.jiangae1548e2022-05-06 16:38:46 +0800945 caps = gst_caps_copy(filter);
946 gst_caps_set_features_simple(caps, gst_caps_features_from_string(GST_CAPS_FEATURE_MEMORY_DMABUF));
947 gst_caps_append(filter, caps);
948
949 GST_DEBUG_OBJECT(self, "Filtered caps: %" GST_PTR_FORMAT, filter);
950 gst_caps_unref(acquired_caps);
xuesong.jiang282ca572023-05-05 09:03:32 +0000951 gst_caps_unref(available_caps);
xuesong.jiangae1548e2022-05-06 16:38:46 +0800952 caps = gst_pad_peer_query_caps(decoder->srcpad, filter);
953 gst_caps_unref(filter);
954
955 GST_DEBUG_OBJECT(self, "Possible decoded caps: %" GST_PTR_FORMAT, caps);
956 if (gst_caps_is_empty(caps))
957 {
958 gst_caps_unref(caps);
959 goto not_negotiated;
960 }
961
962 /* Fixate pixel format */
963 caps = gst_caps_fixate(caps);
964
965 GST_DEBUG_OBJECT(self, "Chosen decoded caps: %" GST_PTR_FORMAT, caps);
966
967 /* Try to set negotiated format, on success replace acquired format */
968 if (gst_aml_v4l2_object_set_format(self->v4l2capture, caps, &error))
969 gst_video_info_from_caps(&info, caps);
970 else
971 gst_aml_v4l2_clear_error(&error);
972 gst_caps_unref(caps);
hanghang.luo419c4a92023-07-14 07:36:07 +0000973 gst_aml_v4l2_video_dec_set_fence(decoder);
hanghang.luo2eec4892023-07-18 06:44:42 +0000974 gst_aml_v4l2_video_dec_set_output_status(decoder,info);
xuesong.jiangae1548e2022-05-06 16:38:46 +0800975 if (!gst_video_decoder_negotiate(decoder))
976 {
977 if (GST_PAD_IS_FLUSHING(decoder->srcpad))
978 goto flushing;
979 else
980 goto not_negotiated;
981 }
982
983 /* Ensure our internal pool is activated */
984 if (!gst_buffer_pool_set_active(GST_BUFFER_POOL(self->v4l2capture->pool),
985 TRUE))
986 goto activate_failed;
xuesong.jiangc5dac0f2023-02-01 14:42:24 +0800987
988 g_mutex_lock(&self->res_chg_lock);
989 GST_LOG_OBJECT(decoder, "signal resolution changed");
990 self->is_res_chg = FALSE;
991 g_cond_signal(&self->res_chg_cond);
992 g_mutex_unlock(&self->res_chg_lock);
xuesong.jiangae1548e2022-05-06 16:38:46 +0800993 }
994
995 GST_LOG_OBJECT(decoder, "Allocate output buffer");
996
997 v4l2_pool = GST_AML_V4L2_BUFFER_POOL(self->v4l2capture->pool);
998
999 self->output_flow = GST_FLOW_OK;
1000 do
1001 {
1002 /* We cannot use the base class allotate helper since it taking the internal
1003 * stream lock. we know that the acquire may need to poll until more frames
1004 * comes in and holding this lock would prevent that.
1005 */
1006 pool = gst_video_decoder_get_buffer_pool(decoder);
1007
1008 /* Pool may be NULL if we started going to READY state */
1009 if (pool == NULL)
1010 {
fei.dengbee20862022-06-14 14:59:48 +08001011 GST_WARNING_OBJECT(decoder, "gst_video_decoder_get_buffer_pool goto beach");
xuesong.jiangae1548e2022-05-06 16:38:46 +08001012 ret = GST_FLOW_FLUSHING;
1013 goto beach;
1014 }
1015
1016 ret = gst_buffer_pool_acquire_buffer(pool, &buffer, NULL);
fei.dengccc89632022-07-15 19:10:17 +08001017 //calculate a new pts for interlace stream
hanghang.luo419c4a92023-07-14 07:36:07 +00001018 if (ret == GST_FLOW_OK && self->is_interlace)
fei.dengccc89632022-07-15 19:10:17 +08001019 {
1020 //if buffer pts is valid, reduce 1/2 duration
1021 if (GST_BUFFER_DURATION_IS_VALID(buffer))
1022 {
1023 GST_BUFFER_DURATION(buffer) = GST_BUFFER_DURATION(buffer)/2;
1024 }
1025 GST_BUFFER_FLAG_UNSET(buffer, GST_VIDEO_BUFFER_FLAG_INTERLACED);
1026 //reset pts
fei.denga6ae3282022-07-15 19:50:30 +08001027 if (GST_BUFFER_TIMESTAMP (buffer) == 0LL || self->last_out_pts == GST_BUFFER_TIMESTAMP (buffer))
fei.dengccc89632022-07-15 19:10:17 +08001028 {
1029 double rate = ((double)self->input_state->info.fps_n/(double)self->input_state->info.fps_d)*2;
1030 GST_BUFFER_TIMESTAMP(buffer) = self->last_out_pts + 1000000000LL/rate;
1031 }
1032 }
1033
xuesong.jiangae1548e2022-05-06 16:38:46 +08001034 g_object_unref(pool);
1035
fei.deng9a5cd6e2023-06-30 12:09:18 +00001036 if (ret == GST_FLOW_OK && GST_BUFFER_FLAG_IS_SET(buffer,GST_AML_V4L2_BUFFER_FLAG_LAST_EMPTY)) {
sheng.liubcf036c2022-06-21 15:55:42 +08001037 GST_LOG_OBJECT(decoder, "Get GST_AML_V4L2_FLOW_LAST_BUFFER");
sheng.liub56bbc52022-06-21 11:02:33 +08001038 self->v4l2capture->need_drop_event = TRUE;
fei.deng594df4b2023-06-26 07:03:29 +00001039 gst_aml_v4l2_buffer_pool_process(v4l2_pool, &buffer);
sheng.liub56bbc52022-06-21 11:02:33 +08001040 goto beach;
1041 }
1042
sheng.liu8d18ed22022-05-26 17:28:15 +08001043 if (ret == GST_AML_V4L2_FLOW_SOURCE_CHANGE)
1044 {
1045 GST_LOG_OBJECT(decoder, "Get GST_AML_V4L2_FLOW_SOURCE_CHANGE");
xuesong.jiangc5dac0f2023-02-01 14:42:24 +08001046
1047 g_mutex_lock (&self->res_chg_lock);
1048 self->is_res_chg = TRUE;
1049 g_mutex_unlock (&self->res_chg_lock);
1050
sheng.liu8d18ed22022-05-26 17:28:15 +08001051 gst_aml_v4l2_object_stop(self->v4l2capture);
1052 return;
1053 }
1054
fei.dengbee20862022-06-14 14:59:48 +08001055 if (ret != GST_FLOW_OK) {
1056 GST_WARNING_OBJECT(decoder, "gst_buffer_pool_acquire_buffer goto beach ret:%d",ret);
xuesong.jiangae1548e2022-05-06 16:38:46 +08001057 goto beach;
fei.dengbee20862022-06-14 14:59:48 +08001058 }
xuesong.jiangae1548e2022-05-06 16:38:46 +08001059
xuesong.jiangc5dac0f2023-02-01 14:42:24 +08001060 GST_LOG_OBJECT(decoder, "Process output buffer (switching flow outstanding num:%d)", self->v4l2capture->outstanding_buf_num);
xuesong.jiangae1548e2022-05-06 16:38:46 +08001061 ret = gst_aml_v4l2_buffer_pool_process(v4l2_pool, &buffer);
xuesong.jiang406ee302023-06-28 03:45:22 +00001062
1063 GST_DEBUG_OBJECT(decoder, "send pts:%lld - %" GST_TIME_FORMAT, GST_BUFFER_PTS(buffer), GST_TIME_ARGS(GST_BUFFER_PTS(buffer)));
1064 g_signal_emit (self, g_signals[SIGNAL_DECODED_PTS], 0, GST_BUFFER_PTS(buffer));
1065
xuesong.jiangae1548e2022-05-06 16:38:46 +08001066 if (ret == GST_AML_V4L2_FLOW_SOURCE_CHANGE)
1067 {
1068 gst_aml_v4l2_object_stop(self->v4l2capture);
1069 return;
1070 }
1071
1072 } while (ret == GST_AML_V4L2_FLOW_CORRUPTED_BUFFER);
1073
1074 if (ret != GST_FLOW_OK)
1075 goto beach;
1076
fei.dengbee20862022-06-14 14:59:48 +08001077 frame = gst_aml_v4l2_video_dec_get_right_frame(decoder, GST_BUFFER_TIMESTAMP (buffer));
xuesong.jiangae1548e2022-05-06 16:38:46 +08001078 if (frame)
1079 {
zengliang.li32cb11e2022-11-24 12:10:26 +08001080 if (!GST_CLOCK_TIME_IS_VALID(frame->pts))
1081 {
zengliang.li92ff6822023-06-06 07:12:52 +00001082 if (!GST_CLOCK_TIME_IS_VALID(self->last_out_pts))
1083 {
1084 if (GST_CLOCK_TIME_IS_VALID(frame->dts))
1085 {
1086 GST_BUFFER_TIMESTAMP(buffer) = frame->dts;
1087 }
1088 else
1089 {
1090 GST_WARNING_OBJECT (decoder,"sorry,we have no baseline to calculate pts");
1091 goto beach;
1092 }
1093 }
1094 else
1095 {
1096 double rate = ((double)self->input_state->info.fps_n/(double)self->input_state->info.fps_d);
1097 GST_BUFFER_TIMESTAMP(buffer) = self->last_out_pts + 1000000000LL/rate;
1098 }
zengliang.li32cb11e2022-11-24 12:10:26 +08001099 }
fei.dengccc89632022-07-15 19:10:17 +08001100 self->last_out_pts = GST_BUFFER_TIMESTAMP(buffer);
xuesong.jiangae1548e2022-05-06 16:38:46 +08001101 frame->output_buffer = buffer;
fei.dengccc89632022-07-15 19:10:17 +08001102 frame->pts = GST_BUFFER_TIMESTAMP(buffer);
1103 frame->duration = GST_BUFFER_DURATION(buffer);
xuesong.jiangae1548e2022-05-06 16:38:46 +08001104 buffer = NULL;
1105 ret = gst_video_decoder_finish_frame(decoder, frame);
1106
1107 if (ret != GST_FLOW_OK)
1108 goto beach;
1109 }
1110 else
1111 {
1112 GST_WARNING_OBJECT(decoder, "Decoder is producing too many buffers");
1113 gst_buffer_unref(buffer);
1114 }
1115
1116 return;
1117 /* ERRORS */
1118not_negotiated:
1119{
1120 GST_ERROR_OBJECT(self, "not negotiated");
1121 ret = GST_FLOW_NOT_NEGOTIATED;
1122 goto beach;
1123}
1124activate_failed:
1125{
1126 GST_ERROR_OBJECT(self, "Buffer pool activation failed");
1127 GST_ELEMENT_ERROR(self, RESOURCE, SETTINGS,
1128 (_("Failed to allocate required memory.")),
1129 ("Buffer pool activation failed"));
1130 ret = GST_FLOW_ERROR;
1131 goto beach;
1132}
1133flushing:
1134{
1135 ret = GST_FLOW_FLUSHING;
1136 goto beach;
1137}
1138beach:
1139 GST_DEBUG_OBJECT(decoder, "Leaving output thread: %s",
1140 gst_flow_get_name(ret));
1141
1142 gst_buffer_replace(&buffer, NULL);
1143 self->output_flow = ret;
1144 gst_aml_v4l2_object_unlock(self->v4l2output);
1145 gst_pad_pause_task(decoder->srcpad);
1146}
1147
1148static GstFlowReturn
1149gst_aml_v4l2_video_dec_handle_frame(GstVideoDecoder *decoder,
1150 GstVideoCodecFrame *frame)
1151{
1152 GstAmlV4l2Error error = GST_AML_V4L2_ERROR_INIT;
1153 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
1154 GstBufferPool *pool = GST_BUFFER_POOL(self->v4l2output->pool);
1155 GstFlowReturn ret = GST_FLOW_OK;
1156 gboolean processed = FALSE;
1157 GstBuffer *tmp;
1158 GstTaskState task_state;
1159 GstCaps *caps;
1160
1161 GST_DEBUG_OBJECT(self, "Handling frame %d", frame->system_frame_number);
1162
1163 if (G_UNLIKELY(!g_atomic_int_get(&self->active)))
1164 goto flushing;
1165
1166 if (G_UNLIKELY(!GST_AML_V4L2_IS_ACTIVE(self->v4l2output)))
1167 {
1168 if (!self->input_state)
1169 goto not_negotiated;
1170 if (!gst_aml_v4l2_object_set_format(self->v4l2output, self->input_state->caps,
1171 &error))
1172 goto not_negotiated;
1173 }
1174
1175 if (G_UNLIKELY(!GST_AML_V4L2_IS_ACTIVE(self->v4l2capture)))
1176 {
1177 GstBuffer *codec_data;
sheng.liu0c77f6c2022-06-17 21:33:20 +08001178 GstCapsFeatures *features = NULL;
1179
1180 features = gst_caps_get_features(self->input_state->caps, 0);
1181 if (features && gst_caps_features_contains(features, GST_CAPS_FEATURE_MEMORY_DMABUF))
1182 {
1183 GST_DEBUG_OBJECT(self, "Is SVP");
1184 self->v4l2output->is_svp = TRUE;
1185 }
xuesong.jiangae1548e2022-05-06 16:38:46 +08001186
1187 GST_DEBUG_OBJECT(self, "Sending header");
1188
1189 codec_data = self->input_state->codec_data;
1190
1191 /* We are running in byte-stream mode, so we don't know the headers, but
1192 * we need to send something, otherwise the decoder will refuse to
1193 * intialize.
1194 */
1195 if (codec_data)
1196 {
1197 gst_buffer_ref(codec_data);
1198 }
1199 else
1200 {
1201 codec_data = gst_buffer_ref(frame->input_buffer);
1202 processed = TRUE;
1203 }
1204
1205 /* Ensure input internal pool is active */
1206 if (!gst_buffer_pool_is_active(pool))
1207 {
1208 GstStructure *config = gst_buffer_pool_get_config(pool);
xuesong.jiangc5dac0f2023-02-01 14:42:24 +08001209 // guint min = MAX(self->v4l2output->min_buffers, GST_AML_V4L2_MIN_BUFFERS);
1210 // guint max = VIDEO_MAX_FRAME;
xuesong.jiangae1548e2022-05-06 16:38:46 +08001211 // gst_buffer_pool_config_set_params (config, self->input_state->caps,
1212 // self->v4l2output->info.size, min, max);
1213 gst_buffer_pool_config_set_params(config, self->input_state->caps, self->v4l2output->info.size, self->v4l2output->min_buffers, self->v4l2output->min_buffers);
1214
1215 /* There is no reason to refuse this config */
1216 if (!gst_buffer_pool_set_config(pool, config))
1217 goto activate_failed;
xuesong.jiangc5dac0f2023-02-01 14:42:24 +08001218 GST_DEBUG_OBJECT(self, "setting output pool config to %" GST_PTR_FORMAT, config);
xuesong.jiangae1548e2022-05-06 16:38:46 +08001219
1220 if (!gst_buffer_pool_set_active(pool, TRUE))
1221 goto activate_failed;
1222 }
1223
1224 GST_VIDEO_DECODER_STREAM_UNLOCK(decoder);
1225 ret =
1226 gst_aml_v4l2_buffer_pool_process(GST_AML_V4L2_BUFFER_POOL(self->v4l2output->pool), &codec_data);
1227 GST_VIDEO_DECODER_STREAM_LOCK(decoder);
1228
1229 gst_buffer_unref(codec_data);
1230
1231 /* For decoders G_FMT returns coded size, G_SELECTION returns visible size
1232 * in the compose rectangle. gst_aml_v4l2_object_acquire_format() checks both
1233 * and returns the visible size as with/height and the coded size as
1234 * padding. */
1235 }
1236
1237 task_state = gst_pad_get_task_state(GST_VIDEO_DECODER_SRC_PAD(self));
1238 if (task_state == GST_TASK_STOPPED || task_state == GST_TASK_PAUSED)
1239 {
1240 /* It's possible that the processing thread stopped due to an error */
1241 if (self->output_flow != GST_FLOW_OK &&
1242 self->output_flow != GST_FLOW_FLUSHING)
1243 {
1244 GST_DEBUG_OBJECT(self, "Processing loop stopped with error, leaving");
1245 ret = self->output_flow;
1246 goto drop;
1247 }
1248
1249 GST_DEBUG_OBJECT(self, "Starting decoding thread");
1250
1251 /* Start the processing task, when it quits, the task will disable input
1252 * processing to unlock input if draining, or prevent potential block */
1253 self->output_flow = GST_FLOW_FLUSHING;
1254 if (!gst_pad_start_task(decoder->srcpad,
1255 (GstTaskFunction)gst_aml_v4l2_video_dec_loop, self, NULL))
1256 goto start_task_failed;
1257 }
1258
1259 if (!processed)
1260 {
1261 GST_VIDEO_DECODER_STREAM_UNLOCK(decoder);
1262 ret =
1263 gst_aml_v4l2_buffer_pool_process(GST_AML_V4L2_BUFFER_POOL(self->v4l2output->pool), &frame->input_buffer);
1264 GST_VIDEO_DECODER_STREAM_LOCK(decoder);
1265
1266 if (ret == GST_FLOW_FLUSHING)
1267 {
1268 if (gst_pad_get_task_state(GST_VIDEO_DECODER_SRC_PAD(self)) !=
1269 GST_TASK_STARTED)
1270 ret = self->output_flow;
1271 goto drop;
1272 }
1273 else if (ret != GST_FLOW_OK)
1274 {
1275 goto process_failed;
1276 }
1277 }
1278
1279 /* No need to keep input arround */
1280 tmp = frame->input_buffer;
1281 frame->input_buffer = gst_buffer_new();
1282 gst_buffer_copy_into(frame->input_buffer, tmp,
1283 GST_BUFFER_COPY_FLAGS | GST_BUFFER_COPY_TIMESTAMPS |
1284 GST_BUFFER_COPY_META,
1285 0, 0);
1286 gst_buffer_unref(tmp);
1287
1288 gst_video_codec_frame_unref(frame);
1289 return ret;
1290
1291 /* ERRORS */
1292not_negotiated:
1293{
1294 GST_ERROR_OBJECT(self, "not negotiated");
1295 ret = GST_FLOW_NOT_NEGOTIATED;
1296 gst_aml_v4l2_error(self, &error);
1297 goto drop;
1298}
1299activate_failed:
1300{
1301 GST_ELEMENT_ERROR(self, RESOURCE, SETTINGS,
1302 (_("Failed to allocate required memory.")),
1303 ("Buffer pool activation failed"));
1304 ret = GST_FLOW_ERROR;
1305 goto drop;
1306}
1307flushing:
1308{
1309 ret = GST_FLOW_FLUSHING;
1310 goto drop;
1311}
1312
1313start_task_failed:
1314{
1315 GST_ELEMENT_ERROR(self, RESOURCE, FAILED,
1316 (_("Failed to start decoding thread.")), (NULL));
1317 ret = GST_FLOW_ERROR;
1318 goto drop;
1319}
1320process_failed:
1321{
1322 GST_ELEMENT_ERROR(self, RESOURCE, FAILED,
1323 (_("Failed to process frame.")),
1324 ("Maybe be due to not enough memory or failing driver"));
1325 ret = GST_FLOW_ERROR;
1326 goto drop;
1327}
1328drop:
1329{
1330 gst_video_decoder_drop_frame(decoder, frame);
1331 return ret;
1332}
1333}
1334
1335static gboolean
1336gst_aml_v4l2_video_dec_decide_allocation(GstVideoDecoder *decoder,
1337 GstQuery *query)
1338{
1339 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
1340 GstClockTime latency;
1341 gboolean ret = FALSE;
1342
1343 if (gst_aml_v4l2_object_decide_allocation(self->v4l2capture, query))
1344 ret = GST_VIDEO_DECODER_CLASS(parent_class)->decide_allocation(decoder, query);
1345
1346 if (GST_CLOCK_TIME_IS_VALID(self->v4l2capture->duration))
1347 {
1348 latency = self->v4l2capture->min_buffers * self->v4l2capture->duration;
1349 GST_DEBUG_OBJECT(self, "Setting latency: %" GST_TIME_FORMAT " (%" G_GUINT32_FORMAT " * %" G_GUINT64_FORMAT, GST_TIME_ARGS(latency),
1350 self->v4l2capture->min_buffers, self->v4l2capture->duration);
1351 gst_video_decoder_set_latency(decoder, latency, latency);
1352 }
1353 else
1354 {
1355 GST_WARNING_OBJECT(self, "Duration invalid, not setting latency");
1356 }
1357
1358 return ret;
1359}
1360
1361static gboolean
1362gst_aml_v4l2_video_dec_src_query(GstVideoDecoder *decoder, GstQuery *query)
1363{
1364 gboolean ret = TRUE;
1365 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
1366
1367 switch (GST_QUERY_TYPE(query))
1368 {
1369 case GST_QUERY_CAPS:
1370 {
1371 GstCaps *filter, *result = NULL;
1372 GstPad *pad = GST_VIDEO_DECODER_SRC_PAD(decoder);
1373
1374 gst_query_parse_caps(query, &filter);
1375
1376 if (self->probed_srccaps)
1377 result = gst_caps_ref(self->probed_srccaps);
1378 else
1379 result = gst_pad_get_pad_template_caps(pad);
1380
1381 if (filter)
1382 {
1383 GstCaps *tmp = result;
1384 result =
1385 gst_caps_intersect_full(filter, tmp, GST_CAPS_INTERSECT_FIRST);
1386 gst_caps_unref(tmp);
1387 }
1388
1389 GST_DEBUG_OBJECT(self, "Returning src caps %" GST_PTR_FORMAT, result);
1390
1391 gst_query_set_caps_result(query, result);
1392 gst_caps_unref(result);
1393 break;
1394 }
1395
1396 default:
1397 ret = GST_VIDEO_DECODER_CLASS(parent_class)->src_query(decoder, query);
1398 break;
1399 }
1400
1401 return ret;
1402}
1403
1404static GstCaps *
1405gst_aml_v4l2_video_dec_sink_getcaps(GstVideoDecoder *decoder, GstCaps *filter)
1406{
1407 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
1408 GstCaps *result;
1409
1410 result = gst_video_decoder_proxy_getcaps(decoder, self->probed_sinkcaps,
1411 filter);
1412
1413 GST_DEBUG_OBJECT(self, "Returning sink caps %" GST_PTR_FORMAT, result);
1414
1415 return result;
1416}
1417
1418static gboolean
1419gst_aml_v4l2_video_dec_sink_event(GstVideoDecoder *decoder, GstEvent *event)
1420{
1421 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
1422 gboolean ret;
1423 GstEventType type = GST_EVENT_TYPE(event);
1424
1425 switch (type)
1426 {
xuesong.jiang406ee302023-06-28 03:45:22 +00001427 case GST_EVENT_STREAM_START:
1428 {
1429 GstStructure *s;
1430 GstEvent *event;
1431 GST_DEBUG_OBJECT(self, "new private event");
1432 s = gst_structure_new("private_signal", "obj_ptr", G_TYPE_POINTER, self, "sig_name", G_TYPE_STRING, "decoded-pts", NULL);
1433 event = gst_event_new_custom (GST_EVENT_CUSTOM_UPSTREAM, s);
1434 GST_DEBUG_OBJECT(self, "before Send private_signal Event :%p", event);
1435 gst_pad_push_event (decoder->sinkpad, event);
1436 GST_DEBUG_OBJECT(self, "after Send private_signal Event :%p", event);
1437 break;
1438 }
xuesong.jiangae1548e2022-05-06 16:38:46 +08001439 case GST_EVENT_FLUSH_START:
1440 GST_DEBUG_OBJECT(self, "flush start");
xuesong.jiangc5dac0f2023-02-01 14:42:24 +08001441
1442 g_mutex_lock (&self->res_chg_lock);
1443 while (self->is_res_chg)
1444 {
1445 GST_LOG_OBJECT(decoder, "wait resolution change finish");
1446 g_cond_wait(&self->res_chg_cond, &self->res_chg_lock);
1447 }
1448 g_mutex_unlock (&self->res_chg_lock);
1449
zengliang.li92ff6822023-06-06 07:12:52 +00001450 self->last_out_pts = GST_CLOCK_TIME_NONE;
xuesong.jiangae1548e2022-05-06 16:38:46 +08001451 gst_aml_v4l2_object_unlock(self->v4l2output);
1452 gst_aml_v4l2_object_unlock(self->v4l2capture);
1453 break;
1454 default:
1455 break;
1456 }
1457
1458 ret = GST_VIDEO_DECODER_CLASS(parent_class)->sink_event(decoder, event);
1459
1460 switch (type)
1461 {
1462 case GST_EVENT_FLUSH_START:
1463 /* The processing thread should stop now, wait for it */
1464 gst_pad_stop_task(decoder->srcpad);
1465 GST_DEBUG_OBJECT(self, "flush start done");
1466 break;
1467 default:
1468 break;
1469 }
1470
1471 return ret;
1472}
1473
1474static GstStateChangeReturn
1475gst_aml_v4l2_video_dec_change_state(GstElement *element,
1476 GstStateChange transition)
1477{
1478 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(element);
1479 GstVideoDecoder *decoder = GST_VIDEO_DECODER(element);
1480
1481 if (transition == GST_STATE_CHANGE_PAUSED_TO_READY)
1482 {
1483 g_atomic_int_set(&self->active, FALSE);
1484 gst_aml_v4l2_object_unlock(self->v4l2output);
1485 gst_aml_v4l2_object_unlock(self->v4l2capture);
1486 gst_pad_stop_task(decoder->srcpad);
1487 }
1488
1489 return GST_ELEMENT_CLASS(parent_class)->change_state(element, transition);
1490}
1491
1492static void
1493gst_aml_v4l2_video_dec_dispose(GObject *object)
1494{
1495 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(object);
1496
1497 gst_caps_replace(&self->probed_sinkcaps, NULL);
1498 gst_caps_replace(&self->probed_srccaps, NULL);
1499
1500 G_OBJECT_CLASS(parent_class)->dispose(object);
1501}
1502
1503static void
1504gst_aml_v4l2_video_dec_finalize(GObject *object)
1505{
1506 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(object);
1507
1508 gst_aml_v4l2_object_destroy(self->v4l2capture);
1509 gst_aml_v4l2_object_destroy(self->v4l2output);
1510
xuesong.jiangc5dac0f2023-02-01 14:42:24 +08001511 g_mutex_clear(&self->res_chg_lock);
1512 g_cond_clear(&self->res_chg_cond);
1513
xuesong.jiang61ea8012022-05-12 15:38:17 +08001514#if GST_IMPORT_LGE_PROP
1515 if (self->lge_ctxt)
1516 {
1517 if (self->lge_ctxt->app_type)
1518 g_free(self->lge_ctxt->app_type);
1519 if (self->lge_ctxt->res_info.coretype)
1520 g_free(self->lge_ctxt->res_info.coretype);
1521 free(self->lge_ctxt);
1522 }
1523
1524#endif
1525
xuesong.jiangae1548e2022-05-06 16:38:46 +08001526 G_OBJECT_CLASS(parent_class)->finalize(object);
1527}
1528
1529static void
1530gst_aml_v4l2_video_dec_init(GstAmlV4l2VideoDec *self)
1531{
1532 /* V4L2 object are created in subinstance_init */
zengliang.li92ff6822023-06-06 07:12:52 +00001533 self->last_out_pts = GST_CLOCK_TIME_NONE;
xuesong.jiangae1548e2022-05-06 16:38:46 +08001534 self->is_secure_path = FALSE;
xuesong.jiangc5dac0f2023-02-01 14:42:24 +08001535 self->is_res_chg = FALSE;
hanghang.luo419c4a92023-07-14 07:36:07 +00001536 self->is_interlace = FALSE;
xuesong.jiangc5dac0f2023-02-01 14:42:24 +08001537 g_mutex_init(&self->res_chg_lock);
1538 g_cond_init(&self->res_chg_cond);
xuesong.jiang61ea8012022-05-12 15:38:17 +08001539#if GST_IMPORT_LGE_PROP
1540 self->lge_ctxt = malloc(sizeof(GstAmlV4l2VideoDecLgeCtxt));
1541 memset(self->lge_ctxt, 0, sizeof(GstAmlV4l2VideoDecLgeCtxt));
1542#endif
xuesong.jiangae1548e2022-05-06 16:38:46 +08001543}
1544
1545static void
1546gst_aml_v4l2_video_dec_subinstance_init(GTypeInstance *instance, gpointer g_class)
1547{
1548 GstAmlV4l2VideoDecClass *klass = GST_AML_V4L2_VIDEO_DEC_CLASS(g_class);
1549 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(instance);
1550 GstVideoDecoder *decoder = GST_VIDEO_DECODER(instance);
1551
1552 gst_video_decoder_set_packetized(decoder, TRUE);
1553
1554 self->v4l2output = gst_aml_v4l2_object_new(GST_ELEMENT(self),
1555 GST_OBJECT(GST_VIDEO_DECODER_SINK_PAD(self)),
1556 V4L2_BUF_TYPE_VIDEO_OUTPUT, klass->default_device,
1557 gst_aml_v4l2_get_output, gst_aml_v4l2_set_output, NULL);
1558 self->v4l2output->no_initial_format = TRUE;
1559 self->v4l2output->keep_aspect = FALSE;
sheng.liu0c77f6c2022-06-17 21:33:20 +08001560 self->v4l2output->is_svp = FALSE;
xuesong.jiangae1548e2022-05-06 16:38:46 +08001561
1562 self->v4l2capture = gst_aml_v4l2_object_new(GST_ELEMENT(self),
1563 GST_OBJECT(GST_VIDEO_DECODER_SRC_PAD(self)),
1564 V4L2_BUF_TYPE_VIDEO_CAPTURE, klass->default_device,
1565 gst_aml_v4l2_get_input, gst_aml_v4l2_set_input, NULL);
1566 self->v4l2capture->need_wait_event = TRUE;
sheng.liub56bbc52022-06-21 11:02:33 +08001567 self->v4l2capture->need_drop_event = FALSE;
xuesong.jiangae1548e2022-05-06 16:38:46 +08001568}
1569
1570static void
1571gst_aml_v4l2_video_dec_class_init(GstAmlV4l2VideoDecClass *klass)
1572{
1573 GstElementClass *element_class;
1574 GObjectClass *gobject_class;
1575 GstVideoDecoderClass *video_decoder_class;
1576
1577 parent_class = g_type_class_peek_parent(klass);
1578
1579 element_class = (GstElementClass *)klass;
1580 gobject_class = (GObjectClass *)klass;
1581 video_decoder_class = (GstVideoDecoderClass *)klass;
1582
1583 GST_DEBUG_CATEGORY_INIT(gst_aml_v4l2_video_dec_debug, "amlv4l2videodec", 0,
1584 "AML V4L2 Video Decoder");
1585
1586 gobject_class->dispose = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_dispose);
1587 gobject_class->finalize = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_finalize);
1588 gobject_class->set_property =
1589 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_set_property);
1590 gobject_class->get_property =
1591 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_get_property);
1592
1593 video_decoder_class->open = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_open);
1594 video_decoder_class->close = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_close);
1595 video_decoder_class->start = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_start);
1596 video_decoder_class->stop = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_stop);
1597 video_decoder_class->finish = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_finish);
1598 video_decoder_class->flush = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_flush);
1599 video_decoder_class->drain = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_drain);
1600 video_decoder_class->set_format =
1601 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_set_format);
1602 video_decoder_class->negotiate =
1603 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_negotiate);
1604 video_decoder_class->decide_allocation =
1605 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_decide_allocation);
1606 /* FIXME propose_allocation or not ? */
1607 video_decoder_class->handle_frame =
1608 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_handle_frame);
1609 video_decoder_class->getcaps =
1610 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_sink_getcaps);
1611 video_decoder_class->src_query =
1612 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_src_query);
1613 video_decoder_class->sink_event =
1614 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_sink_event);
1615
1616 element_class->change_state =
1617 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_change_state);
1618
xuesong.jiang406ee302023-06-28 03:45:22 +00001619 g_signals[SIGNAL_DECODED_PTS] = g_signal_new ("decoded-pts",
1620 G_TYPE_FROM_CLASS(GST_ELEMENT_CLASS(klass)),
1621 G_SIGNAL_RUN_LAST,
1622 0, /* class offset */
1623 NULL, /* accumulator */
1624 NULL, /* accu data */
1625 g_cclosure_marshal_generic,
1626 G_TYPE_NONE,
1627 1,
1628 G_TYPE_UINT64);
1629
xuesong.jiangae1548e2022-05-06 16:38:46 +08001630 gst_aml_v4l2_object_install_m2m_properties_helper(gobject_class);
xuesong.jiang61ea8012022-05-12 15:38:17 +08001631#if GST_IMPORT_LGE_PROP
1632 gst_aml_v4l2_video_dec_install_lge_properties_helper(gobject_class);
1633#endif
xuesong.jiangae1548e2022-05-06 16:38:46 +08001634}
1635
1636static void
1637gst_aml_v4l2_video_dec_subclass_init(gpointer g_class, gpointer data)
1638{
1639 GstAmlV4l2VideoDecClass *klass = GST_AML_V4L2_VIDEO_DEC_CLASS(g_class);
1640 GstElementClass *element_class = GST_ELEMENT_CLASS(g_class);
1641 GstAmlV4l2VideoDecCData *cdata = data;
1642
1643 klass->default_device = cdata->device;
1644
1645 /* Note: gst_pad_template_new() take the floating ref from the caps */
1646 gst_element_class_add_pad_template(element_class,
1647 gst_pad_template_new("sink", GST_PAD_SINK, GST_PAD_ALWAYS,
1648 cdata->sink_caps));
1649 gst_element_class_add_pad_template(element_class,
1650 gst_pad_template_new("src", GST_PAD_SRC, GST_PAD_ALWAYS,
1651 cdata->src_caps));
1652
1653 gst_element_class_set_metadata(element_class, cdata->longname,
1654 "Codec/Decoder/Video/Hardware", cdata->description,
1655 "Xuesong Jiang <Xuesong.Jiang@amlogic.com>");
1656
1657 gst_caps_unref(cdata->sink_caps);
1658 gst_caps_unref(cdata->src_caps);
1659 g_free(cdata);
1660}
1661
1662/* Probing functions */
1663gboolean
1664gst_aml_v4l2_is_video_dec(GstCaps *sink_caps, GstCaps *src_caps)
1665{
1666 gboolean ret = FALSE;
1667
1668 if (gst_caps_is_subset(sink_caps, gst_aml_v4l2_object_get_codec_caps()) && gst_caps_is_subset(src_caps, gst_aml_v4l2_object_get_raw_caps()))
1669 ret = TRUE;
1670
1671 return ret;
1672}
1673
1674static gchar *
1675gst_aml_v4l2_video_dec_set_metadata(GstStructure *s, GstAmlV4l2VideoDecCData *cdata,
1676 const gchar *basename)
1677{
1678 gchar *codec_name = NULL;
1679 gchar *type_name = NULL;
1680 gboolean got_value = FALSE;
1681
1682#define SET_META(codec) \
1683 G_STMT_START \
1684 { \
1685 cdata->longname = "AML V4L2 " codec " Decoder"; \
1686 cdata->description = "Decodes " codec " streams via V4L2 API"; \
1687 codec_name = g_ascii_strdown(codec, -1); \
1688 } \
1689 G_STMT_END
1690
1691 if (gst_structure_has_name(s, "image/jpeg"))
1692 {
1693 SET_META("JPEG");
1694 }
1695 else if (gst_structure_has_name(s, "video/mpeg"))
1696 {
1697 gint mpegversion = 0;
1698 gint *list = NULL;
1699 got_value = gst_structure_get_int(s, "mpegversion", &mpegversion);
1700 if (FALSE == got_value)
1701 {
1702 got_value = gst_structure_get_list(s, "mpegversion", &list);
1703 if (TRUE == got_value && (1 == *list || 2 == *list))
1704 {
1705 SET_META("MPEG2");
1706 }
1707 else
1708 {
1709 SET_META("MPEG4");
1710 }
1711 }
1712 else
1713 {
1714 SET_META("MPEG4");
1715 }
1716 }
1717 else if (gst_structure_has_name(s, "video/x-h263"))
1718 {
1719 SET_META("H263");
1720 }
1721 else if (gst_structure_has_name(s, "video/x-fwht"))
1722 {
1723 SET_META("FWHT");
1724 }
1725 else if (gst_structure_has_name(s, "video/x-h264"))
1726 {
1727 SET_META("H264");
1728 }
1729 else if (gst_structure_has_name(s, "video/x-h265"))
1730 {
1731 SET_META("H265");
1732 }
1733 else if (gst_structure_has_name(s, "video/x-wmv"))
1734 {
1735 SET_META("VC1");
1736 }
1737 else if (gst_structure_has_name(s, "video/x-vp8"))
1738 {
1739 SET_META("VP8");
1740 }
1741 else if (gst_structure_has_name(s, "video/x-vp9"))
1742 {
1743 SET_META("VP9");
1744 }
1745 else if (gst_structure_has_name(s, "video/x-av1"))
1746 {
1747 SET_META("AV1");
1748 }
1749 else if (gst_structure_has_name(s, "video/x-bayer"))
1750 {
1751 SET_META("BAYER");
1752 }
1753 else if (gst_structure_has_name(s, "video/x-sonix"))
1754 {
1755 SET_META("SONIX");
1756 }
1757 else if (gst_structure_has_name(s, "video/x-pwc1"))
1758 {
1759 SET_META("PWC1");
1760 }
1761 else if (gst_structure_has_name(s, "video/x-pwc2"))
1762 {
1763 SET_META("PWC2");
1764 }
1765 else
1766 {
1767 /* This code should be kept on sync with the exposed CODEC type of format
1768 * from gstamlv4l2object.c. This warning will only occure in case we forget
1769 * to also add a format here. */
1770 gchar *s_str = gst_structure_to_string(s);
1771 g_warning("Missing fixed name mapping for caps '%s', this is a GStreamer "
1772 "bug, please report at https://bugs.gnome.org",
1773 s_str);
1774 g_free(s_str);
1775 }
1776
1777 if (codec_name)
1778 {
1779 type_name = g_strdup_printf("amlv4l2%sdec", codec_name);
1780 if (g_type_from_name(type_name) != 0)
1781 {
1782 g_free(type_name);
1783 type_name = g_strdup_printf("amlv4l2%s%sdec", basename, codec_name);
1784 }
1785
1786 g_free(codec_name);
1787 }
1788
1789 return type_name;
1790#undef SET_META
1791}
1792
1793void gst_aml_v4l2_video_dec_register(GstPlugin *plugin, const gchar *basename,
1794 const gchar *device_path, GstCaps *sink_caps, GstCaps *src_caps)
1795{
1796 gint i;
1797
1798 for (i = 0; i < gst_caps_get_size(sink_caps); i++)
1799 {
1800 GstAmlV4l2VideoDecCData *cdata;
1801 GstStructure *s;
1802 GTypeQuery type_query;
1803 GTypeInfo type_info = {
1804 0,
1805 };
1806 GType type, subtype;
1807 gchar *type_name;
1808
1809 s = gst_caps_get_structure(sink_caps, i);
1810
1811 cdata = g_new0(GstAmlV4l2VideoDecCData, 1);
1812 cdata->device = g_strdup(device_path);
1813 cdata->sink_caps = gst_caps_new_empty();
1814 gst_caps_append_structure(cdata->sink_caps, gst_structure_copy(s));
1815 gst_caps_append_structure(cdata->sink_caps, gst_structure_copy(s));
1816 gst_caps_set_features(cdata->sink_caps, 0, gst_caps_features_from_string(GST_CAPS_FEATURE_MEMORY_DMABUF));
1817 cdata->src_caps = gst_caps_copy(src_caps);
1818 gst_caps_set_features_simple(cdata->src_caps, gst_caps_features_from_string(GST_CAPS_FEATURE_MEMORY_DMABUF));
1819 gst_caps_append(cdata->src_caps, gst_caps_copy(src_caps));
1820 type_name = gst_aml_v4l2_video_dec_set_metadata(s, cdata, basename);
1821
1822 /* Skip over if we hit an unmapped type */
1823 if (!type_name)
1824 {
1825 g_free(cdata);
1826 continue;
1827 }
1828
1829 type = gst_aml_v4l2_video_dec_get_type();
1830 g_type_query(type, &type_query);
1831 memset(&type_info, 0, sizeof(type_info));
1832 type_info.class_size = type_query.class_size;
1833 type_info.instance_size = type_query.instance_size;
1834 type_info.class_init = gst_aml_v4l2_video_dec_subclass_init;
1835 type_info.class_data = cdata;
1836 type_info.instance_init = gst_aml_v4l2_video_dec_subinstance_init;
1837
1838 subtype = g_type_register_static(type, type_name, &type_info, 0);
1839 if (!gst_element_register(plugin, type_name, GST_RANK_PRIMARY + 1,
1840 subtype))
1841 GST_WARNING("Failed to register plugin '%s'", type_name);
1842
1843 g_free(type_name);
1844 }
1845}
xuesong.jiang61ea8012022-05-12 15:38:17 +08001846
1847#if GST_IMPORT_LGE_PROP
1848static void gst_aml_v4l2_video_dec_install_lge_properties_helper(GObjectClass *gobject_class)
1849{
1850 g_object_class_install_property(gobject_class, LGE_RESOURCE_INFO,
1851 g_param_spec_object("resource-info", "resource-info",
1852 "After acquisition of H/W resources is completed, allocated resource information must be delivered to the decoder and the sink",
1853 GST_TYPE_STRUCTURE,
1854 G_PARAM_READABLE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
1855
1856 g_object_class_install_property(gobject_class, LGE_DECODE_SIZE,
1857 g_param_spec_uint64("decoded-size", "decoded-size",
1858 "The total amount of decoder element's decoded video es after constructing pipeline or flushing pipeline update unit is byte.",
1859 0, G_MAXUINT64,
1860 0, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
1861
1862 g_object_class_install_property(gobject_class, LGE_UNDECODE_SIZE,
1863 g_param_spec_uint64("undecoded-size", "undecoded-size",
1864 "video decoder element's total undecoded data update unit is byte.",
1865 0, G_MAXUINT64,
1866 0, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
1867
1868 g_object_class_install_property(gobject_class, LGE_APP_TYPE,
1869 g_param_spec_string("app-type", "app-type",
1870 "set application type.",
1871 "default_app",
1872 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
1873
1874 g_object_class_install_property(gobject_class, LGE_CLIP_MODE,
1875 g_param_spec_boolean("clip-mode", "clip-mode",
1876 "When seeking, Content is moving faster for a while to skip frames.",
1877 FALSE,
1878 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
1879}
1880#endif