blob: 1b810da80c3001cbf2134710ad8bece6d2f16250 [file] [log] [blame]
xuesong.jiangae1548e2022-05-06 16:38:46 +08001/* GStreamer
2 * Copyright (C) 2022 <xuesong.jiang@amlogic.com>
3 *
4 * This library is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Library General Public
6 * License as published by the Free Software Foundation; either
7 * version 2 of the License, or (at your option) any later version.
8 *
9 * This library is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Library General Public License for more details.
13 *
14 * You should have received a copy of the GNU Library General Public
15 * License along with this library; if not, write to the
16 * Free Software Foundation, Inc., 51 Franklin Street, Suite 500,
17 * Boston, MA 02110-1335, USA.
18 */
19
20#ifdef HAVE_CONFIG_H
21#include "config.h"
22#endif
23
24#include <sys/stat.h>
25#include <fcntl.h>
26#include <errno.h>
27#include <unistd.h>
28#include <string.h>
29
30#include "gstamlv4l2object.h"
31#include "gstamlv4l2videodec.h"
32
33#include <string.h>
34#include <gst/gst-i18n-plugin.h>
35#include <gst/allocators/gstdmabuf.h>
36
37GST_DEBUG_CATEGORY_STATIC(gst_aml_v4l2_video_dec_debug);
38#define GST_CAT_DEFAULT gst_aml_v4l2_video_dec_debug
39
40#ifdef GST_VIDEO_DECODER_STREAM_LOCK
41#undef GST_VIDEO_DECODER_STREAM_LOCK
42#define GST_VIDEO_DECODER_STREAM_LOCK(decoder) \
43 { \
fei.denge9458472023-04-18 02:05:48 +000044 GST_TRACE("aml v4l2 dec locking"); \
xuesong.jiangae1548e2022-05-06 16:38:46 +080045 g_rec_mutex_lock(&GST_VIDEO_DECODER(decoder)->stream_lock); \
fei.denge9458472023-04-18 02:05:48 +000046 GST_TRACE("aml v4l2 dec locked"); \
xuesong.jiangae1548e2022-05-06 16:38:46 +080047 }
48#endif
49
50#ifdef GST_VIDEO_DECODER_STREAM_UNLOCK
51#undef GST_VIDEO_DECODER_STREAM_UNLOCK
52#define GST_VIDEO_DECODER_STREAM_UNLOCK(decoder) \
53 { \
fei.denge9458472023-04-18 02:05:48 +000054 GST_TRACE("aml v4l2 dec unlocking"); \
xuesong.jiangae1548e2022-05-06 16:38:46 +080055 g_rec_mutex_unlock(&GST_VIDEO_DECODER(decoder)->stream_lock); \
fei.denge9458472023-04-18 02:05:48 +000056 GST_TRACE("aml v4l2 dec unlocked"); \
xuesong.jiangae1548e2022-05-06 16:38:46 +080057 }
58#endif
xuesong.jiang61ea8012022-05-12 15:38:17 +080059
hanghang.luo36df2852022-08-24 15:02:27 +080060#ifndef ABSDIFF
61#define ABSDIFF(a,b) (((a) > (b)) ? ((a) - (b)) : ((b) - (a)))
62#endif
63
xuesong.jiang61ea8012022-05-12 15:38:17 +080064#if GST_IMPORT_LGE_PROP
65typedef struct _GstAmlResourceInfo
66{
67 gchar *coretype;
68 gint videoport;
69 gint audioport;
70 gint maxwidth;
71 gint maxheight;
72 gint mixerport;
73} GstAmlResourceInfo;
74
75struct _GstAmlV4l2VideoDecLgeCtxt
76{
77 GstAmlResourceInfo res_info;
78 guint64 dec_size;
79 guint64 undec_size;
80 gchar *app_type;
81 gboolean clip_mode;
82};
83#endif
84
xuesong.jiangae1548e2022-05-06 16:38:46 +080085typedef struct
86{
87 gchar *device;
88 GstCaps *sink_caps;
89 GstCaps *src_caps;
90 const gchar *longname;
91 const gchar *description;
92} GstAmlV4l2VideoDecCData;
93
94enum
95{
96 PROP_0,
xuesong.jiang61ea8012022-05-12 15:38:17 +080097 V4L2_STD_OBJECT_PROPS,
98#if GST_IMPORT_LGE_PROP
99 LGE_RESOURCE_INFO,
100 LGE_DECODE_SIZE,
101 LGE_UNDECODE_SIZE,
102 LGE_APP_TYPE,
103 LGE_CLIP_MODE
104#endif
xuesong.jiangae1548e2022-05-06 16:38:46 +0800105};
106
xuesong.jiang406ee302023-06-28 03:45:22 +0000107enum
108{
109 SIGNAL_DECODED_PTS,
110 MAX_SIGNAL
111};
112
113static guint g_signals[MAX_SIGNAL]= {0};
114
xuesong.jiangae1548e2022-05-06 16:38:46 +0800115#define gst_aml_v4l2_video_dec_parent_class parent_class
116G_DEFINE_ABSTRACT_TYPE(GstAmlV4l2VideoDec, gst_aml_v4l2_video_dec,
117 GST_TYPE_VIDEO_DECODER);
118
119static GstFlowReturn gst_aml_v4l2_video_dec_finish(GstVideoDecoder *decoder);
xuesong.jiang61ea8012022-05-12 15:38:17 +0800120#if GST_IMPORT_LGE_PROP
121static void gst_aml_v4l2_video_dec_install_lge_properties_helper(GObjectClass *gobject_class);
122#endif
xuesong.jiangae1548e2022-05-06 16:38:46 +0800123
124static void
125gst_aml_v4l2_video_dec_set_property(GObject *object,
126 guint prop_id, const GValue *value, GParamSpec *pspec)
127{
128 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(object);
129
130 switch (prop_id)
131 {
132 case PROP_CAPTURE_IO_MODE:
133 if (!gst_aml_v4l2_object_set_property_helper(self->v4l2capture,
134 prop_id, value, pspec))
135 {
136 G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
137 }
138 break;
139 case PROP_DUMP_FRAME_LOCATION:
140 if (!gst_aml_v4l2_object_set_property_helper(self->v4l2capture,
141 prop_id, value, pspec))
142 {
143 G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
144 }
145 break;
xuesong.jiang61ea8012022-05-12 15:38:17 +0800146#if GST_IMPORT_LGE_PROP
147 case LGE_RESOURCE_INFO:
148 {
149 GST_DEBUG_OBJECT(self, "LGE up layer set res info");
150 GstStructure *r_info = g_value_get_object(value);
151 if (r_info)
152 {
153 if (gst_structure_has_field(r_info, "coretype"))
154 {
155 if (self->lge_ctxt->res_info.coretype)
156 g_free(self->lge_ctxt->res_info.coretype);
157 self->lge_ctxt->res_info.coretype = g_strdup(gst_structure_get_string(r_info, "coretype"));
158 }
159 if (gst_structure_has_field(r_info, "videoport"))
160 gst_structure_get_int(r_info, "videoport", &(self->lge_ctxt->res_info.videoport));
161 if (gst_structure_has_field(r_info, "audioport"))
162 gst_structure_get_int(r_info, "audioport", &(self->lge_ctxt->res_info.audioport));
163 if (gst_structure_has_field(r_info, "maxwidth"))
164 gst_structure_get_int(r_info, "maxwidth", &(self->lge_ctxt->res_info.maxwidth));
165 if (gst_structure_has_field(r_info, "maxheight"))
166 gst_structure_get_int(r_info, "maxheight", &(self->lge_ctxt->res_info.maxheight));
167 if (gst_structure_has_field(r_info, "mixerport"))
168 gst_structure_get_int(r_info, "mixerport", &(self->lge_ctxt->res_info.mixerport));
169 }
170 break;
171 }
172 case LGE_APP_TYPE:
173 {
174 GST_DEBUG_OBJECT(self, "LGE up layer set app type");
175 if (self->lge_ctxt->app_type)
176 g_free(self->lge_ctxt->app_type);
177 self->lge_ctxt->app_type = g_strdup(g_value_get_string(value));
178 break;
179 }
180 case LGE_CLIP_MODE:
181 {
182 GST_DEBUG_OBJECT(self, "LGE up layer set clip mode");
183 self->lge_ctxt->clip_mode = g_strdup(g_value_get_boolean(value));
184 break;
185 }
186#endif
xuesong.jiangae1548e2022-05-06 16:38:46 +0800187 /* By default, only set on output */
188 default:
189 if (!gst_aml_v4l2_object_set_property_helper(self->v4l2output,
190 prop_id, value, pspec))
191 {
192 G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
193 }
194 break;
195 }
196}
197
198static void
199gst_aml_v4l2_video_dec_get_property(GObject *object,
200 guint prop_id, GValue *value, GParamSpec *pspec)
201{
202 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(object);
203
204 switch (prop_id)
205 {
206 case PROP_CAPTURE_IO_MODE:
207 if (!gst_aml_v4l2_object_get_property_helper(self->v4l2capture,
208 prop_id, value, pspec))
209 {
210 G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
211 }
212 break;
213
xuesong.jiang61ea8012022-05-12 15:38:17 +0800214#if GST_IMPORT_LGE_PROP
215 case LGE_DECODE_SIZE:
216 {
217 GST_DEBUG_OBJECT(self, "LGE up layer get dec size");
218 self->lge_ctxt->dec_size = -1;
219 g_value_set_int(value, self->lge_ctxt->dec_size);
220 break;
221 }
222 case LGE_UNDECODE_SIZE:
223 {
224 GST_DEBUG_OBJECT(self, "LGE up layer get undec size");
225 self->lge_ctxt->undec_size = -1;
226 g_value_set_int(value, self->lge_ctxt->undec_size);
227 break;
228 }
229#endif
230
xuesong.jiangae1548e2022-05-06 16:38:46 +0800231 /* By default read from output */
232 default:
233 if (!gst_aml_v4l2_object_get_property_helper(self->v4l2output,
234 prop_id, value, pspec))
235 {
236 G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
237 }
238 break;
239 }
240}
241
242static gboolean
243gst_aml_v4l2_video_dec_open(GstVideoDecoder *decoder)
244{
245 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
246 GstCaps *codec_caps;
247
248 GST_DEBUG_OBJECT(self, "Opening");
249
250 if (!gst_aml_v4l2_object_open(self->v4l2output))
251 goto failure;
252
253 if (!gst_aml_v4l2_object_open_shared(self->v4l2capture, self->v4l2output))
254 goto failure;
255
256 codec_caps = gst_pad_get_pad_template_caps(decoder->sinkpad);
257 self->probed_sinkcaps = gst_aml_v4l2_object_probe_caps(self->v4l2output,
258 codec_caps);
259 gst_caps_unref(codec_caps);
260
261 if (gst_caps_is_empty(self->probed_sinkcaps))
262 goto no_encoded_format;
263
264 return TRUE;
265
266no_encoded_format:
267 GST_ELEMENT_ERROR(self, RESOURCE, SETTINGS,
268 (_("Decoder on device %s has no supported input format"),
269 self->v4l2output->videodev),
270 (NULL));
271 goto failure;
272
273failure:
274 if (GST_AML_V4L2_IS_OPEN(self->v4l2output))
275 gst_aml_v4l2_object_close(self->v4l2output);
276
277 if (GST_AML_V4L2_IS_OPEN(self->v4l2capture))
278 gst_aml_v4l2_object_close(self->v4l2capture);
279
280 gst_caps_replace(&self->probed_srccaps, NULL);
281 gst_caps_replace(&self->probed_sinkcaps, NULL);
282
283 return FALSE;
284}
285
286static gboolean
287gst_aml_v4l2_video_dec_close(GstVideoDecoder *decoder)
288{
289 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
290
291 GST_DEBUG_OBJECT(self, "Closing");
292
293 gst_aml_v4l2_object_close(self->v4l2output);
294 gst_aml_v4l2_object_close(self->v4l2capture);
295 gst_caps_replace(&self->probed_srccaps, NULL);
296 gst_caps_replace(&self->probed_sinkcaps, NULL);
297
298 return TRUE;
299}
300
301static gboolean
302gst_aml_v4l2_video_dec_start(GstVideoDecoder *decoder)
303{
304 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
305
306 GST_DEBUG_OBJECT(self, "Starting");
307
308 gst_aml_v4l2_object_unlock(self->v4l2output);
309 g_atomic_int_set(&self->active, TRUE);
310 self->output_flow = GST_FLOW_OK;
311
312 return TRUE;
313}
314
315static gboolean
316gst_aml_v4l2_video_dec_stop(GstVideoDecoder *decoder)
317{
318 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
319
320 GST_DEBUG_OBJECT(self, "Stopping");
321
322 gst_aml_v4l2_object_unlock(self->v4l2output);
323 gst_aml_v4l2_object_unlock(self->v4l2capture);
324
325 /* Wait for capture thread to stop */
326 gst_pad_stop_task(decoder->srcpad);
327
328 GST_VIDEO_DECODER_STREAM_LOCK(decoder);
329 self->output_flow = GST_FLOW_OK;
330 GST_VIDEO_DECODER_STREAM_UNLOCK(decoder);
331
332 /* Should have been flushed already */
333 g_assert(g_atomic_int_get(&self->active) == FALSE);
334
335 gst_aml_v4l2_object_stop(self->v4l2output);
336 gst_aml_v4l2_object_stop(self->v4l2capture);
337
338 if (self->input_state)
339 {
340 gst_video_codec_state_unref(self->input_state);
341 self->input_state = NULL;
342 }
343
344 GST_DEBUG_OBJECT(self, "Stopped");
345
346 return TRUE;
347}
348
349static gboolean
350gst_aml_v4l2_video_dec_set_format(GstVideoDecoder *decoder,
351 GstVideoCodecState *state)
352{
353 GstAmlV4l2Error error = GST_AML_V4L2_ERROR_INIT;
354 gboolean ret = TRUE;
355 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
356 GstCaps *caps;
357
358 GST_DEBUG_OBJECT(self, "Setting format: %" GST_PTR_FORMAT, state->caps);
359 GstCapsFeatures *const features = gst_caps_get_features(state->caps, 0);
360
361 if (gst_caps_features_contains(features, GST_CAPS_FEATURE_MEMORY_DMABUF))
362 self->v4l2output->req_mode = GST_V4L2_IO_DMABUF_IMPORT;
363
364 if (self->input_state)
365 {
366 if (gst_aml_v4l2_object_caps_equal(self->v4l2output, state->caps))
367 {
368 GST_DEBUG_OBJECT(self, "Compatible caps");
369 goto done;
370 }
371 gst_video_codec_state_unref(self->input_state);
372 self->input_state = NULL;
373
374 gst_aml_v4l2_video_dec_finish(decoder);
375 gst_aml_v4l2_object_stop(self->v4l2output);
376
377 /* The renegotiation flow don't blend with the base class flow. To properly
378 * stop the capture pool, if the buffers can't be orphaned, we need to
379 * reclaim our buffers, which will happend through the allocation query.
380 * The allocation query is triggered by gst_video_decoder_negotiate() which
381 * requires the output caps to be set, but we can't know this information
382 * as we rely on the decoder, which requires the capture queue to be
383 * stopped.
384 *
385 * To workaround this issue, we simply run an allocation query with the
386 * old negotiated caps in order to drain/reclaim our buffers. That breaks
387 * the complexity and should not have much impact in performance since the
388 * following allocation query will happen on a drained pipeline and won't
389 * block. */
390 if (self->v4l2capture->pool &&
391 !gst_aml_v4l2_buffer_pool_orphan(&self->v4l2capture->pool))
392 {
393 GstCaps *caps = gst_pad_get_current_caps(decoder->srcpad);
394 if (caps)
395 {
396 GstQuery *query = gst_query_new_allocation(caps, FALSE);
397 gst_pad_peer_query(decoder->srcpad, query);
398 gst_query_unref(query);
399 gst_caps_unref(caps);
400 }
401 }
402
403 gst_aml_v4l2_object_stop(self->v4l2capture);
404 self->output_flow = GST_FLOW_OK;
405 }
406
407 if ((ret = gst_aml_v4l2_set_drm_mode(self->v4l2output)) == FALSE)
408 {
409 GST_ERROR_OBJECT(self, "config output drm mode error");
410 goto done;
411 }
412
xuesong.jiang22a9b112023-05-24 09:01:59 +0000413 if ((ret = gst_aml_v4l2_set_stream_mode(self->v4l2output)) == FALSE)
414 {
415 GST_ERROR_OBJECT(self, "config output stream mode error");
416 goto done;
417 }
418
xuesong.jiangae1548e2022-05-06 16:38:46 +0800419 ret = gst_aml_v4l2_object_set_format(self->v4l2output, state->caps, &error);
420
421 gst_caps_replace(&self->probed_srccaps, NULL);
422 self->probed_srccaps = gst_aml_v4l2_object_probe_caps(self->v4l2capture,
423 gst_aml_v4l2_object_get_raw_caps());
424
425 if (gst_caps_is_empty(self->probed_srccaps))
426 goto no_raw_format;
427
428 caps = gst_caps_copy(self->probed_srccaps);
429 gst_caps_set_features_simple(caps, gst_caps_features_from_string(GST_CAPS_FEATURE_MEMORY_DMABUF));
430 gst_caps_append(self->probed_srccaps, caps);
431 if (ret)
432 self->input_state = gst_video_codec_state_ref(state);
433 else
434 gst_aml_v4l2_error(self, &error);
435
436done:
437 return ret;
438
439no_raw_format:
440 GST_ELEMENT_ERROR(self, RESOURCE, SETTINGS,
441 (_("Decoder on device %s has no supported output format"),
442 self->v4l2output->videodev),
443 (NULL));
444 return GST_FLOW_ERROR;
445}
446
447static gboolean
448gst_aml_v4l2_video_dec_flush(GstVideoDecoder *decoder)
449{
450 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
451
452 GST_DEBUG_OBJECT(self, "Flushed");
453
454 /* Ensure the processing thread has stopped for the reverse playback
455 * discount case */
456 if (gst_pad_get_task_state(decoder->srcpad) == GST_TASK_STARTED)
457 {
458 GST_VIDEO_DECODER_STREAM_UNLOCK(decoder);
459
460 gst_aml_v4l2_object_unlock(self->v4l2output);
461 gst_aml_v4l2_object_unlock(self->v4l2capture);
462 gst_pad_stop_task(decoder->srcpad);
463 GST_VIDEO_DECODER_STREAM_LOCK(decoder);
464 }
465
466 self->output_flow = GST_FLOW_OK;
467
468 gst_aml_v4l2_object_unlock_stop(self->v4l2output);
469 gst_aml_v4l2_object_unlock_stop(self->v4l2capture);
470
471 if (self->v4l2output->pool)
472 gst_aml_v4l2_buffer_pool_flush(self->v4l2output->pool);
473
474 /* gst_aml_v4l2_buffer_pool_flush() calls streamon the capture pool and must be
475 * called after gst_aml_v4l2_object_unlock_stop() stopped flushing the buffer
476 * pool. */
477 if (self->v4l2capture->pool)
478 gst_aml_v4l2_buffer_pool_flush(self->v4l2capture->pool);
479
480 return TRUE;
481}
482
483static gboolean
484gst_aml_v4l2_video_dec_negotiate(GstVideoDecoder *decoder)
485{
486 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
487
xuesong.jiang681d3602022-06-24 21:23:35 +0800488 if (TRUE == self->v4l2output->is_svp)
489 {
490 GstStructure *s;
491 GstEvent *event;
492
493 s = gst_structure_new_empty ("IS_SVP");
494 event = gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM_STICKY, s);
495 GST_DEBUG_OBJECT(self, "before Send SVP Event :%p", event);
496 gst_pad_push_event (decoder->srcpad, event);
497 GST_DEBUG_OBJECT(self, "after Send SVP Event :%p", event);
498 }
499
xuesong.jiangae1548e2022-05-06 16:38:46 +0800500 /* We don't allow renegotiation without carefull disabling the pool */
501 if (self->v4l2capture->pool &&
502 gst_buffer_pool_is_active(GST_BUFFER_POOL(self->v4l2capture->pool)))
503 return TRUE;
504
505 return GST_VIDEO_DECODER_CLASS(parent_class)->negotiate(decoder);
506}
507
508static gboolean
509gst_aml_v4l2_decoder_cmd(GstAmlV4l2Object *v4l2object, guint cmd, guint flags)
510{
511 struct v4l2_decoder_cmd dcmd = {
512 0,
513 };
514
515 GST_DEBUG_OBJECT(v4l2object->element,
516 "sending v4l2 decoder command %u with flags %u", cmd, flags);
517
518 if (!GST_AML_V4L2_IS_OPEN(v4l2object))
519 return FALSE;
520
521 dcmd.cmd = cmd;
522 dcmd.flags = flags;
523 if (v4l2object->ioctl(v4l2object->video_fd, VIDIOC_DECODER_CMD, &dcmd) < 0)
524 goto dcmd_failed;
525
526 return TRUE;
527
528dcmd_failed:
529 if (errno == ENOTTY)
530 {
531 GST_INFO_OBJECT(v4l2object->element,
532 "Failed to send decoder command %u with flags %u for '%s'. (%s)",
533 cmd, flags, v4l2object->videodev, g_strerror(errno));
534 }
535 else
536 {
537 GST_ERROR_OBJECT(v4l2object->element,
538 "Failed to send decoder command %u with flags %u for '%s'. (%s)",
539 cmd, flags, v4l2object->videodev, g_strerror(errno));
540 }
541 return FALSE;
542}
543
544static GstFlowReturn
545gst_aml_v4l2_video_dec_finish(GstVideoDecoder *decoder)
546{
547 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
548 GstFlowReturn ret = GST_FLOW_OK;
549 GstBuffer *buffer;
550
551 if (gst_pad_get_task_state(decoder->srcpad) != GST_TASK_STARTED)
552 goto done;
553
554 GST_DEBUG_OBJECT(self, "Finishing decoding");
555
556 GST_VIDEO_DECODER_STREAM_UNLOCK(decoder);
557
558 if (gst_aml_v4l2_decoder_cmd(self->v4l2output, V4L2_DEC_CMD_STOP, 0))
559 {
560 GstTask *task = decoder->srcpad->task;
561
562 /* If the decoder stop command succeeded, just wait until processing is
563 * finished */
564 GST_DEBUG_OBJECT(self, "Waiting for decoder stop");
565 GST_OBJECT_LOCK(task);
566 while (GST_TASK_STATE(task) == GST_TASK_STARTED)
567 GST_TASK_WAIT(task);
568 GST_OBJECT_UNLOCK(task);
569 ret = GST_FLOW_FLUSHING;
570 }
571 else
572 {
573 /* otherwise keep queuing empty buffers until the processing thread has
574 * stopped, _pool_process() will return FLUSHING when that happened */
575 while (ret == GST_FLOW_OK)
576 {
xuesong.jiangc5dac0f2023-02-01 14:42:24 +0800577 GST_DEBUG_OBJECT(self, "queue empty output buf");
xuesong.jiangae1548e2022-05-06 16:38:46 +0800578 buffer = gst_buffer_new();
579 ret =
580 gst_aml_v4l2_buffer_pool_process(GST_AML_V4L2_BUFFER_POOL(self->v4l2output->pool), &buffer);
581 gst_buffer_unref(buffer);
582 }
583 }
584
585 /* and ensure the processing thread has stopped in case another error
586 * occured. */
587 gst_aml_v4l2_object_unlock(self->v4l2capture);
588 gst_pad_stop_task(decoder->srcpad);
589 GST_VIDEO_DECODER_STREAM_LOCK(decoder);
590
591 if (ret == GST_FLOW_FLUSHING)
592 ret = self->output_flow;
593
594 GST_DEBUG_OBJECT(decoder, "Done draining buffers");
595
596 /* TODO Shall we cleanup any reffed frame to workaround broken decoders ? */
597
598done:
599 return ret;
600}
601
602static GstFlowReturn
603gst_aml_v4l2_video_dec_drain(GstVideoDecoder *decoder)
604{
605 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
606
607 GST_DEBUG_OBJECT(self, "Draining...");
608 gst_aml_v4l2_video_dec_finish(decoder);
609 gst_aml_v4l2_video_dec_flush(decoder);
610
611 return GST_FLOW_OK;
612}
613
614static GstVideoCodecFrame *
615gst_aml_v4l2_video_dec_get_oldest_frame(GstVideoDecoder *decoder)
616{
617 GstVideoCodecFrame *frame = NULL;
618 GList *frames, *l;
619 gint count = 0;
620
621 frames = gst_video_decoder_get_frames(decoder);
622
623 for (l = frames; l != NULL; l = l->next)
624 {
625 GstVideoCodecFrame *f = l->data;
626
627 if (!frame || (GST_CLOCK_TIME_IS_VALID(frame->pts) && GST_CLOCK_TIME_IS_VALID(f->pts) && (frame->pts > f->pts)))
628 frame = f;
629
630 count++;
631 }
632
633 if (frame)
634 {
635 GST_LOG_OBJECT(decoder,
636 "Oldest frame is %d %" GST_TIME_FORMAT " and %d frames left",
637 frame->system_frame_number, GST_TIME_ARGS(frame->pts), count - 1);
638 gst_video_codec_frame_ref(frame);
639 }
640
641 g_list_free_full(frames, (GDestroyNotify)gst_video_codec_frame_unref);
642
643 return frame;
644}
645
fei.dengbee20862022-06-14 14:59:48 +0800646static GstVideoCodecFrame *
xuesong.jiange24aef92023-06-16 06:39:10 +0000647gst_aml_v4l2_video_dec_get_right_frame_for_frame_mode(GstVideoDecoder *decoder, GstClockTime pts)
fei.dengbee20862022-06-14 14:59:48 +0800648{
649 GstVideoCodecFrame *frame = NULL;
650 GList *frames, *l;
651 gint count = 0;
652
xuesong.jiange24aef92023-06-16 06:39:10 +0000653 GST_LOG_OBJECT (decoder, "trace in with pts: %" GST_TIME_FORMAT, GST_TIME_ARGS(pts));
654
fei.dengbee20862022-06-14 14:59:48 +0800655 frames = gst_video_decoder_get_frames(decoder);
656
657 for (l = frames; l != NULL; l = l->next)
658 {
659 GstVideoCodecFrame *f = l->data;
fei.denge9458472023-04-18 02:05:48 +0000660
xuesong.jiangc5dac0f2023-02-01 14:42:24 +0800661 if (GST_CLOCK_TIME_IS_VALID(pts) && (ABSDIFF(f->pts,pts)) < 1000) {
fei.dengbee20862022-06-14 14:59:48 +0800662 frame = f;
fei.dengbee20862022-06-14 14:59:48 +0800663 } else {
664 if (!frame || (GST_CLOCK_TIME_IS_VALID(frame->pts) && GST_CLOCK_TIME_IS_VALID(f->pts) && (frame->pts > f->pts)))
665 frame = f;
666 }
667
668 count++;
669 }
670
671 if (frame)
672 {
673 GST_LOG_OBJECT(decoder,
674 "frame is %d %" GST_TIME_FORMAT " and %d frames left",
675 frame->system_frame_number, GST_TIME_ARGS(frame->pts), count - 1);
676 gst_video_codec_frame_ref(frame);
677 }
678
679 g_list_free_full(frames, (GDestroyNotify)gst_video_codec_frame_unref);
680
xuesong.jiange24aef92023-06-16 06:39:10 +0000681 GST_LOG_OBJECT (decoder, "trace out ret:%p", frame);
fei.dengbee20862022-06-14 14:59:48 +0800682 return frame;
683}
684
xuesong.jiange24aef92023-06-16 06:39:10 +0000685static GstVideoCodecFrame *
686gst_aml_v4l2_video_dec_get_right_frame_for_stream_mode(GstVideoDecoder *decoder, GstClockTime pts)
687{
688 GstVideoCodecFrame *frame = NULL;
689 GList *frames, *l;
690 gint count = 0;
691
692 GST_LOG_OBJECT (decoder, "trace in with pts: %" GST_TIME_FORMAT, GST_TIME_ARGS(pts));
693
694 frames = gst_video_decoder_get_frames(decoder);
695 guint frames_len = 0;
696 frames_len = g_list_length(frames);
697 GST_LOG_OBJECT (decoder, "got frames list len:%d", frames_len);
698
699 frame = frames->data;
700
701 for (l = frames; l != NULL; l = l->next)
702 {
703 GstVideoCodecFrame *f = l->data;
704
705 if (GST_CLOCK_TIME_IS_VALID(pts) && (ABSDIFF(f->pts, pts)) < 1000)
706 {
707 /* found the right frame */
708 frame = f;
709 break;
710 }
711 else if(GST_CLOCK_TIME_IS_VALID(pts) && (f->pts < pts))
712 {
713 GST_LOG_OBJECT(decoder,
714 "stream mode drop frame %d %" GST_TIME_FORMAT,
715 frame->system_frame_number, GST_TIME_ARGS(frame->pts));
716
717 gst_video_codec_frame_ref(f);
718 // gst_video_decoder_drop_frame(decoder, f);
719 gst_video_decoder_release_frame(decoder, f);
720 }
721 else
722 {
723 GST_LOG_OBJECT (decoder, "dbg");
724 }
725 }
726
727 if (frame)
728 {
729 guint l_len = 0;
730 l = gst_video_decoder_get_frames(decoder);
731 l_len = g_list_length(l);
732 g_list_free_full(l, (GDestroyNotify)gst_video_codec_frame_unref);
733
734 GST_LOG_OBJECT(decoder,
735 "frame is %d %" GST_TIME_FORMAT " and %d frames left",
736 frame->system_frame_number, GST_TIME_ARGS(frame->pts), l_len);
737 gst_video_codec_frame_ref(frame);
738 }
739
740 g_list_free_full(frames, (GDestroyNotify)gst_video_codec_frame_unref);
741
742 GST_LOG_OBJECT (decoder, "trace out ret:%p", frame);
743 return frame;
744}
745
746static GstVideoCodecFrame *
747gst_aml_v4l2_video_dec_get_right_frame(GstVideoDecoder *decoder, GstClockTime pts)
748{
749 GstAmlV4l2VideoDec *self = (GstAmlV4l2VideoDec *)decoder;
750 if (self->v4l2output->stream_mode)
751 return gst_aml_v4l2_video_dec_get_right_frame_for_stream_mode(decoder, pts);
752 else
753 return gst_aml_v4l2_video_dec_get_right_frame_for_frame_mode(decoder, pts);
754}
755
xuesong.jiangae1548e2022-05-06 16:38:46 +0800756static gboolean
757gst_aml_v4l2_video_remove_padding(GstCapsFeatures *features,
758 GstStructure *structure, gpointer user_data)
759{
760 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(user_data);
761 GstVideoAlignment *align = &self->v4l2capture->align;
762 GstVideoInfo *info = &self->v4l2capture->info;
763 int width, height;
764
765 if (!gst_structure_get_int(structure, "width", &width))
766 return TRUE;
767
768 if (!gst_structure_get_int(structure, "height", &height))
769 return TRUE;
770
771 if (align->padding_left != 0 || align->padding_top != 0 ||
772 height != info->height + align->padding_bottom)
773 return TRUE;
774
775 if (height == info->height + align->padding_bottom)
776 {
777 /* Some drivers may round up width to the padded with */
778 if (width == info->width + align->padding_right)
779 gst_structure_set(structure,
780 "width", G_TYPE_INT, width - align->padding_right,
781 "height", G_TYPE_INT, height - align->padding_bottom, NULL);
782 /* Some drivers may keep visible width and only round up bytesperline */
783 else if (width == info->width)
784 gst_structure_set(structure,
785 "height", G_TYPE_INT, height - align->padding_bottom, NULL);
786 }
787
788 return TRUE;
789}
790
791static void
sheng.liubcf036c2022-06-21 15:55:42 +0800792gst_v4l2_drop_event (GstAmlV4l2Object * v4l2object)
sheng.liub56bbc52022-06-21 11:02:33 +0800793{
794 struct v4l2_event evt;
795 gint ret;
796
797 memset (&evt, 0x00, sizeof (struct v4l2_event));
798 ret = v4l2object->ioctl (v4l2object->video_fd, VIDIOC_DQEVENT, &evt);
799 if (ret < 0)
800 {
801 GST_DEBUG_OBJECT (v4l2object, "dqevent failed");
802 return;
803 }
804
805 switch (evt.type)
806 {
807 case V4L2_EVENT_SOURCE_CHANGE:
808 GST_DEBUG_OBJECT (v4l2object, "Drop GST_V4L2_FLOW_SOURCE_CHANGE");
809 break;
810 case V4L2_EVENT_EOS:
811 GST_DEBUG_OBJECT (v4l2object, "Drop GST_V4L2_FLOW_LAST_BUFFER");
812 break;
813 default:
814 break;
815 }
816
817 return;
818}
819
820static void
hanghang.luo419c4a92023-07-14 07:36:07 +0000821gst_aml_v4l2_video_dec_set_fence(GstVideoDecoder *decoder)
hanghang.luo70f07ef2023-07-13 02:23:06 +0000822{
823 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
824 GstStructure *s;
825 GstEvent *event;
826
hanghang.luo419c4a92023-07-14 07:36:07 +0000827 guint fence_num = self->v4l2capture->min_buffers-2;
hanghang.luo70f07ef2023-07-13 02:23:06 +0000828 s = gst_structure_new ("video_fence","fence_num",G_TYPE_UINT,fence_num,NULL);
829 if (s)
830 {
831 event = gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM, s);
832 GST_DEBUG_OBJECT(self,"Send video_fence Event: %"GST_PTR_FORMAT,event);
833 gst_pad_push_event (decoder->srcpad, event);
834 }
835}
836
837static void
hanghang.luo2eec4892023-07-18 06:44:42 +0000838gst_aml_v4l2_video_dec_set_output_status(GstVideoDecoder *decoder,GstVideoInfo info)
839{
840 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
841 GstVideoCodecState *output_state;
hanghang.luo9b60a3c2023-08-01 16:01:47 +0000842 struct v4l2_selection sel;
843 struct v4l2_rect *r = NULL;
844 GstStructure *s;
845 gint width = 0;
846 gint height = 0;
hanghang.luo2eec4892023-07-18 06:44:42 +0000847 output_state = gst_video_decoder_set_output_state(decoder,
848 info.finfo->format, info.width, info.height, self->input_state);
hanghang.luo9b60a3c2023-08-01 16:01:47 +0000849 memset(&sel, 0, sizeof(struct v4l2_selection));
850 sel.type = self->v4l2capture->type;
851 sel.target = V4L2_SEL_TGT_COMPOSE_DEFAULT;
852 if (self->v4l2capture->ioctl(self->v4l2capture->video_fd, VIDIOC_G_SELECTION, &sel) >= 0)
853 {
854 r = &sel.r;
855 width = (r->width/2)*2;
856 height = (r->height/2)*2;
857 GST_DEBUG_OBJECT(self, "w:%d h:%d ",width,height);
858 }
859 else
860 GST_DEBUG_OBJECT(self, "iotcl error");
hanghang.luo2eec4892023-07-18 06:44:42 +0000861 if (output_state)
862 {
863 output_state->info.interlace_mode = info.interlace_mode;
864 output_state->allocation_caps =gst_video_info_to_caps(&info);
hanghang.luo2eec4892023-07-18 06:44:42 +0000865 output_state->caps =gst_video_info_to_caps(&info);
hanghang.luo9b60a3c2023-08-01 16:01:47 +0000866 s = gst_caps_get_structure(output_state->caps, 0);
867 if (s)
868 {
869 gst_structure_set(s,"coded_width",G_TYPE_INT,info.width,NULL);
870 gst_structure_set(s,"coded_height",G_TYPE_INT,info.height,NULL);
871 gst_structure_set(s,"width",G_TYPE_INT,width,NULL);
872 gst_structure_set(s,"height",G_TYPE_INT,height,NULL);
873 GST_DEBUG_OBJECT(self, "output_state->caps: %" GST_PTR_FORMAT, output_state->caps);
874 gst_video_codec_state_unref(output_state);
875 }
hanghang.luo2eec4892023-07-18 06:44:42 +0000876 }
877}
878
879static void
xuesong.jiangae1548e2022-05-06 16:38:46 +0800880gst_aml_v4l2_video_dec_loop(GstVideoDecoder *decoder)
881{
882 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
883 GstAmlV4l2BufferPool *v4l2_pool;
884 GstAmlV4l2Error error = GST_AML_V4L2_ERROR_INIT;
885 GstBufferPool *pool;
886 GstVideoCodecFrame *frame;
887 GstBuffer *buffer = NULL;
888 GstFlowReturn ret;
889
890 if (G_UNLIKELY(!GST_AML_V4L2_IS_ACTIVE(self->v4l2capture)))
891 {
892 GstVideoInfo info;
xuesong.jiang282ca572023-05-05 09:03:32 +0000893 GstCaps *acquired_caps, *available_caps, *caps, *filter;
xuesong.jiangae1548e2022-05-06 16:38:46 +0800894 GstStructure *st;
895
896 GST_DEBUG_OBJECT(self, "waitting source change event");
897 /* Wait until received SOURCE_CHANGE event to get right video format */
898 while (self->v4l2capture->can_wait_event && self->v4l2capture->need_wait_event)
899 {
900 ret = gst_aml_v4l2_object_dqevent(self->v4l2capture);
901 if (ret == GST_AML_V4L2_FLOW_SOURCE_CHANGE)
902 {
903 GST_DEBUG_OBJECT(self, "Received source change event");
904 break;
905 }
906 else if (ret == GST_AML_V4L2_FLOW_LAST_BUFFER)
907 {
908 GST_DEBUG_OBJECT(self, "Received eos event");
909 goto beach;
910 }
911 else if (ret != GST_FLOW_OK)
912 {
913 GST_ERROR_OBJECT(self, "dqevent error");
914 goto beach;
915 }
916 }
917 self->v4l2capture->need_wait_event = FALSE;
918
sheng.liu0c77f6c2022-06-17 21:33:20 +0800919 if (TRUE == self->v4l2output->is_svp)
920 {
921 GstPad *peer;
922 GstStructure *s;
923 GstEvent *event;
924
925 peer = gst_pad_get_peer (decoder->srcpad);
926 if (peer)
927 {
hanghang.luo70f07ef2023-07-13 02:23:06 +0000928 s = gst_structure_new_empty ("IS_SVP");
929 if (s)
930 {
931 event = gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM, s);
932 gst_pad_send_event (peer, event);
933 GST_DEBUG_OBJECT(self, "Send SVP Event");
934 }
935 gst_object_unref (peer);
sheng.liu0c77f6c2022-06-17 21:33:20 +0800936 }
937 }
938
sheng.liub56bbc52022-06-21 11:02:33 +0800939 if (self->v4l2capture->need_drop_event)
940 {
941 // drop V4L2_EVENT_SOURCE_CHANGE
942 gst_v4l2_drop_event(self->v4l2capture);
943 self->v4l2capture->need_drop_event = FALSE;
944 }
945
xuesong.jiangae1548e2022-05-06 16:38:46 +0800946 if (!gst_aml_v4l2_object_acquire_format(self->v4l2capture, &info))
947 goto not_negotiated;
hanghang.luo419c4a92023-07-14 07:36:07 +0000948 if (info.interlace_mode == GST_VIDEO_INTERLACE_MODE_INTERLEAVED)
949 {
950 GST_DEBUG_OBJECT(self,"change interlace to progressive");
951 info.interlace_mode = GST_VIDEO_INTERLACE_MODE_PROGRESSIVE;
952 self->is_interlace = TRUE;
953 }
xuesong.jiangae1548e2022-05-06 16:38:46 +0800954 /* Create caps from the acquired format, remove the format field */
955 acquired_caps = gst_video_info_to_caps(&info);
956 GST_DEBUG_OBJECT(self, "Acquired caps: %" GST_PTR_FORMAT, acquired_caps);
957 st = gst_caps_get_structure(acquired_caps, 0);
xuesong.jiang282ca572023-05-05 09:03:32 +0000958 gst_structure_remove_fields(st, "format", "colorimetry", "chroma-site", NULL);
959
960 /* Probe currently available pixel formats */
961 available_caps = gst_caps_copy(self->probed_srccaps);
962 GST_DEBUG_OBJECT(self, "Available caps: %" GST_PTR_FORMAT, available_caps);
963
964 /* Replace coded size with visible size, we want to negotiate visible size
965 * with downstream, not coded size. */
966 gst_caps_map_in_place(available_caps, gst_aml_v4l2_video_remove_padding, self);
967
968 filter = gst_caps_intersect_full(available_caps, acquired_caps, GST_CAPS_INTERSECT_FIRST);
xuesong.jiangae1548e2022-05-06 16:38:46 +0800969 caps = gst_caps_copy(filter);
970 gst_caps_set_features_simple(caps, gst_caps_features_from_string(GST_CAPS_FEATURE_MEMORY_DMABUF));
971 gst_caps_append(filter, caps);
972
973 GST_DEBUG_OBJECT(self, "Filtered caps: %" GST_PTR_FORMAT, filter);
974 gst_caps_unref(acquired_caps);
xuesong.jiang282ca572023-05-05 09:03:32 +0000975 gst_caps_unref(available_caps);
xuesong.jiangae1548e2022-05-06 16:38:46 +0800976 caps = gst_pad_peer_query_caps(decoder->srcpad, filter);
977 gst_caps_unref(filter);
978
979 GST_DEBUG_OBJECT(self, "Possible decoded caps: %" GST_PTR_FORMAT, caps);
980 if (gst_caps_is_empty(caps))
981 {
982 gst_caps_unref(caps);
983 goto not_negotiated;
984 }
985
986 /* Fixate pixel format */
987 caps = gst_caps_fixate(caps);
988
989 GST_DEBUG_OBJECT(self, "Chosen decoded caps: %" GST_PTR_FORMAT, caps);
990
991 /* Try to set negotiated format, on success replace acquired format */
992 if (gst_aml_v4l2_object_set_format(self->v4l2capture, caps, &error))
993 gst_video_info_from_caps(&info, caps);
994 else
995 gst_aml_v4l2_clear_error(&error);
996 gst_caps_unref(caps);
hanghang.luo419c4a92023-07-14 07:36:07 +0000997 gst_aml_v4l2_video_dec_set_fence(decoder);
hanghang.luo2eec4892023-07-18 06:44:42 +0000998 gst_aml_v4l2_video_dec_set_output_status(decoder,info);
xuesong.jiangae1548e2022-05-06 16:38:46 +0800999 if (!gst_video_decoder_negotiate(decoder))
1000 {
1001 if (GST_PAD_IS_FLUSHING(decoder->srcpad))
1002 goto flushing;
1003 else
1004 goto not_negotiated;
1005 }
1006
1007 /* Ensure our internal pool is activated */
1008 if (!gst_buffer_pool_set_active(GST_BUFFER_POOL(self->v4l2capture->pool),
1009 TRUE))
1010 goto activate_failed;
xuesong.jiangc5dac0f2023-02-01 14:42:24 +08001011
1012 g_mutex_lock(&self->res_chg_lock);
1013 GST_LOG_OBJECT(decoder, "signal resolution changed");
1014 self->is_res_chg = FALSE;
1015 g_cond_signal(&self->res_chg_cond);
1016 g_mutex_unlock(&self->res_chg_lock);
xuesong.jiangae1548e2022-05-06 16:38:46 +08001017 }
1018
1019 GST_LOG_OBJECT(decoder, "Allocate output buffer");
1020
1021 v4l2_pool = GST_AML_V4L2_BUFFER_POOL(self->v4l2capture->pool);
1022
1023 self->output_flow = GST_FLOW_OK;
1024 do
1025 {
1026 /* We cannot use the base class allotate helper since it taking the internal
1027 * stream lock. we know that the acquire may need to poll until more frames
1028 * comes in and holding this lock would prevent that.
1029 */
1030 pool = gst_video_decoder_get_buffer_pool(decoder);
1031
1032 /* Pool may be NULL if we started going to READY state */
1033 if (pool == NULL)
1034 {
fei.dengbee20862022-06-14 14:59:48 +08001035 GST_WARNING_OBJECT(decoder, "gst_video_decoder_get_buffer_pool goto beach");
xuesong.jiangae1548e2022-05-06 16:38:46 +08001036 ret = GST_FLOW_FLUSHING;
1037 goto beach;
1038 }
1039
1040 ret = gst_buffer_pool_acquire_buffer(pool, &buffer, NULL);
fei.dengccc89632022-07-15 19:10:17 +08001041 //calculate a new pts for interlace stream
hanghang.luo419c4a92023-07-14 07:36:07 +00001042 if (ret == GST_FLOW_OK && self->is_interlace)
fei.dengccc89632022-07-15 19:10:17 +08001043 {
1044 //if buffer pts is valid, reduce 1/2 duration
1045 if (GST_BUFFER_DURATION_IS_VALID(buffer))
1046 {
1047 GST_BUFFER_DURATION(buffer) = GST_BUFFER_DURATION(buffer)/2;
1048 }
1049 GST_BUFFER_FLAG_UNSET(buffer, GST_VIDEO_BUFFER_FLAG_INTERLACED);
1050 //reset pts
fei.denga6ae3282022-07-15 19:50:30 +08001051 if (GST_BUFFER_TIMESTAMP (buffer) == 0LL || self->last_out_pts == GST_BUFFER_TIMESTAMP (buffer))
fei.dengccc89632022-07-15 19:10:17 +08001052 {
1053 double rate = ((double)self->input_state->info.fps_n/(double)self->input_state->info.fps_d)*2;
1054 GST_BUFFER_TIMESTAMP(buffer) = self->last_out_pts + 1000000000LL/rate;
1055 }
1056 }
1057
xuesong.jiangae1548e2022-05-06 16:38:46 +08001058 g_object_unref(pool);
1059
fei.deng9a5cd6e2023-06-30 12:09:18 +00001060 if (ret == GST_FLOW_OK && GST_BUFFER_FLAG_IS_SET(buffer,GST_AML_V4L2_BUFFER_FLAG_LAST_EMPTY)) {
sheng.liubcf036c2022-06-21 15:55:42 +08001061 GST_LOG_OBJECT(decoder, "Get GST_AML_V4L2_FLOW_LAST_BUFFER");
sheng.liub56bbc52022-06-21 11:02:33 +08001062 self->v4l2capture->need_drop_event = TRUE;
fei.deng594df4b2023-06-26 07:03:29 +00001063 gst_aml_v4l2_buffer_pool_process(v4l2_pool, &buffer);
sheng.liub56bbc52022-06-21 11:02:33 +08001064 goto beach;
1065 }
1066
sheng.liu8d18ed22022-05-26 17:28:15 +08001067 if (ret == GST_AML_V4L2_FLOW_SOURCE_CHANGE)
1068 {
1069 GST_LOG_OBJECT(decoder, "Get GST_AML_V4L2_FLOW_SOURCE_CHANGE");
xuesong.jiangc5dac0f2023-02-01 14:42:24 +08001070
1071 g_mutex_lock (&self->res_chg_lock);
1072 self->is_res_chg = TRUE;
1073 g_mutex_unlock (&self->res_chg_lock);
1074
sheng.liu8d18ed22022-05-26 17:28:15 +08001075 gst_aml_v4l2_object_stop(self->v4l2capture);
1076 return;
1077 }
1078
fei.dengbee20862022-06-14 14:59:48 +08001079 if (ret != GST_FLOW_OK) {
1080 GST_WARNING_OBJECT(decoder, "gst_buffer_pool_acquire_buffer goto beach ret:%d",ret);
xuesong.jiangae1548e2022-05-06 16:38:46 +08001081 goto beach;
fei.dengbee20862022-06-14 14:59:48 +08001082 }
xuesong.jiangae1548e2022-05-06 16:38:46 +08001083
xuesong.jiangc5dac0f2023-02-01 14:42:24 +08001084 GST_LOG_OBJECT(decoder, "Process output buffer (switching flow outstanding num:%d)", self->v4l2capture->outstanding_buf_num);
xuesong.jiangae1548e2022-05-06 16:38:46 +08001085 ret = gst_aml_v4l2_buffer_pool_process(v4l2_pool, &buffer);
xuesong.jiang406ee302023-06-28 03:45:22 +00001086
1087 GST_DEBUG_OBJECT(decoder, "send pts:%lld - %" GST_TIME_FORMAT, GST_BUFFER_PTS(buffer), GST_TIME_ARGS(GST_BUFFER_PTS(buffer)));
1088 g_signal_emit (self, g_signals[SIGNAL_DECODED_PTS], 0, GST_BUFFER_PTS(buffer));
1089
xuesong.jiangae1548e2022-05-06 16:38:46 +08001090 if (ret == GST_AML_V4L2_FLOW_SOURCE_CHANGE)
1091 {
1092 gst_aml_v4l2_object_stop(self->v4l2capture);
1093 return;
1094 }
1095
1096 } while (ret == GST_AML_V4L2_FLOW_CORRUPTED_BUFFER);
1097
1098 if (ret != GST_FLOW_OK)
1099 goto beach;
1100
fei.dengbee20862022-06-14 14:59:48 +08001101 frame = gst_aml_v4l2_video_dec_get_right_frame(decoder, GST_BUFFER_TIMESTAMP (buffer));
xuesong.jiangae1548e2022-05-06 16:38:46 +08001102 if (frame)
1103 {
zengliang.li32cb11e2022-11-24 12:10:26 +08001104 if (!GST_CLOCK_TIME_IS_VALID(frame->pts))
1105 {
zengliang.li92ff6822023-06-06 07:12:52 +00001106 if (!GST_CLOCK_TIME_IS_VALID(self->last_out_pts))
1107 {
1108 if (GST_CLOCK_TIME_IS_VALID(frame->dts))
1109 {
1110 GST_BUFFER_TIMESTAMP(buffer) = frame->dts;
1111 }
1112 else
1113 {
1114 GST_WARNING_OBJECT (decoder,"sorry,we have no baseline to calculate pts");
1115 goto beach;
1116 }
1117 }
1118 else
1119 {
1120 double rate = ((double)self->input_state->info.fps_n/(double)self->input_state->info.fps_d);
1121 GST_BUFFER_TIMESTAMP(buffer) = self->last_out_pts + 1000000000LL/rate;
1122 }
zengliang.li32cb11e2022-11-24 12:10:26 +08001123 }
fei.dengccc89632022-07-15 19:10:17 +08001124 self->last_out_pts = GST_BUFFER_TIMESTAMP(buffer);
xuesong.jiangae1548e2022-05-06 16:38:46 +08001125 frame->output_buffer = buffer;
fei.dengccc89632022-07-15 19:10:17 +08001126 frame->pts = GST_BUFFER_TIMESTAMP(buffer);
1127 frame->duration = GST_BUFFER_DURATION(buffer);
xuesong.jiangae1548e2022-05-06 16:38:46 +08001128 buffer = NULL;
1129 ret = gst_video_decoder_finish_frame(decoder, frame);
1130
1131 if (ret != GST_FLOW_OK)
1132 goto beach;
1133 }
1134 else
1135 {
1136 GST_WARNING_OBJECT(decoder, "Decoder is producing too many buffers");
1137 gst_buffer_unref(buffer);
1138 }
1139
1140 return;
1141 /* ERRORS */
1142not_negotiated:
1143{
1144 GST_ERROR_OBJECT(self, "not negotiated");
1145 ret = GST_FLOW_NOT_NEGOTIATED;
1146 goto beach;
1147}
1148activate_failed:
1149{
1150 GST_ERROR_OBJECT(self, "Buffer pool activation failed");
1151 GST_ELEMENT_ERROR(self, RESOURCE, SETTINGS,
1152 (_("Failed to allocate required memory.")),
1153 ("Buffer pool activation failed"));
1154 ret = GST_FLOW_ERROR;
1155 goto beach;
1156}
1157flushing:
1158{
1159 ret = GST_FLOW_FLUSHING;
1160 goto beach;
1161}
1162beach:
1163 GST_DEBUG_OBJECT(decoder, "Leaving output thread: %s",
1164 gst_flow_get_name(ret));
1165
1166 gst_buffer_replace(&buffer, NULL);
1167 self->output_flow = ret;
1168 gst_aml_v4l2_object_unlock(self->v4l2output);
1169 gst_pad_pause_task(decoder->srcpad);
1170}
1171
1172static GstFlowReturn
1173gst_aml_v4l2_video_dec_handle_frame(GstVideoDecoder *decoder,
1174 GstVideoCodecFrame *frame)
1175{
1176 GstAmlV4l2Error error = GST_AML_V4L2_ERROR_INIT;
1177 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
1178 GstBufferPool *pool = GST_BUFFER_POOL(self->v4l2output->pool);
1179 GstFlowReturn ret = GST_FLOW_OK;
1180 gboolean processed = FALSE;
1181 GstBuffer *tmp;
1182 GstTaskState task_state;
1183 GstCaps *caps;
1184
1185 GST_DEBUG_OBJECT(self, "Handling frame %d", frame->system_frame_number);
1186
1187 if (G_UNLIKELY(!g_atomic_int_get(&self->active)))
1188 goto flushing;
1189
1190 if (G_UNLIKELY(!GST_AML_V4L2_IS_ACTIVE(self->v4l2output)))
1191 {
1192 if (!self->input_state)
1193 goto not_negotiated;
1194 if (!gst_aml_v4l2_object_set_format(self->v4l2output, self->input_state->caps,
1195 &error))
1196 goto not_negotiated;
1197 }
1198
1199 if (G_UNLIKELY(!GST_AML_V4L2_IS_ACTIVE(self->v4l2capture)))
1200 {
1201 GstBuffer *codec_data;
sheng.liu0c77f6c2022-06-17 21:33:20 +08001202 GstCapsFeatures *features = NULL;
1203
1204 features = gst_caps_get_features(self->input_state->caps, 0);
1205 if (features && gst_caps_features_contains(features, GST_CAPS_FEATURE_MEMORY_DMABUF))
1206 {
1207 GST_DEBUG_OBJECT(self, "Is SVP");
1208 self->v4l2output->is_svp = TRUE;
1209 }
xuesong.jiangae1548e2022-05-06 16:38:46 +08001210
1211 GST_DEBUG_OBJECT(self, "Sending header");
1212
1213 codec_data = self->input_state->codec_data;
1214
1215 /* We are running in byte-stream mode, so we don't know the headers, but
1216 * we need to send something, otherwise the decoder will refuse to
1217 * intialize.
1218 */
1219 if (codec_data)
1220 {
1221 gst_buffer_ref(codec_data);
1222 }
1223 else
1224 {
1225 codec_data = gst_buffer_ref(frame->input_buffer);
1226 processed = TRUE;
1227 }
1228
1229 /* Ensure input internal pool is active */
1230 if (!gst_buffer_pool_is_active(pool))
1231 {
1232 GstStructure *config = gst_buffer_pool_get_config(pool);
xuesong.jiangc5dac0f2023-02-01 14:42:24 +08001233 // guint min = MAX(self->v4l2output->min_buffers, GST_AML_V4L2_MIN_BUFFERS);
1234 // guint max = VIDEO_MAX_FRAME;
xuesong.jiangae1548e2022-05-06 16:38:46 +08001235 // gst_buffer_pool_config_set_params (config, self->input_state->caps,
1236 // self->v4l2output->info.size, min, max);
1237 gst_buffer_pool_config_set_params(config, self->input_state->caps, self->v4l2output->info.size, self->v4l2output->min_buffers, self->v4l2output->min_buffers);
1238
1239 /* There is no reason to refuse this config */
1240 if (!gst_buffer_pool_set_config(pool, config))
1241 goto activate_failed;
xuesong.jiangc5dac0f2023-02-01 14:42:24 +08001242 GST_DEBUG_OBJECT(self, "setting output pool config to %" GST_PTR_FORMAT, config);
xuesong.jiangae1548e2022-05-06 16:38:46 +08001243
1244 if (!gst_buffer_pool_set_active(pool, TRUE))
1245 goto activate_failed;
1246 }
1247
1248 GST_VIDEO_DECODER_STREAM_UNLOCK(decoder);
1249 ret =
1250 gst_aml_v4l2_buffer_pool_process(GST_AML_V4L2_BUFFER_POOL(self->v4l2output->pool), &codec_data);
1251 GST_VIDEO_DECODER_STREAM_LOCK(decoder);
1252
1253 gst_buffer_unref(codec_data);
1254
1255 /* For decoders G_FMT returns coded size, G_SELECTION returns visible size
1256 * in the compose rectangle. gst_aml_v4l2_object_acquire_format() checks both
1257 * and returns the visible size as with/height and the coded size as
1258 * padding. */
1259 }
1260
1261 task_state = gst_pad_get_task_state(GST_VIDEO_DECODER_SRC_PAD(self));
1262 if (task_state == GST_TASK_STOPPED || task_state == GST_TASK_PAUSED)
1263 {
1264 /* It's possible that the processing thread stopped due to an error */
1265 if (self->output_flow != GST_FLOW_OK &&
1266 self->output_flow != GST_FLOW_FLUSHING)
1267 {
1268 GST_DEBUG_OBJECT(self, "Processing loop stopped with error, leaving");
1269 ret = self->output_flow;
1270 goto drop;
1271 }
1272
1273 GST_DEBUG_OBJECT(self, "Starting decoding thread");
1274
1275 /* Start the processing task, when it quits, the task will disable input
1276 * processing to unlock input if draining, or prevent potential block */
1277 self->output_flow = GST_FLOW_FLUSHING;
1278 if (!gst_pad_start_task(decoder->srcpad,
1279 (GstTaskFunction)gst_aml_v4l2_video_dec_loop, self, NULL))
1280 goto start_task_failed;
1281 }
1282
1283 if (!processed)
1284 {
1285 GST_VIDEO_DECODER_STREAM_UNLOCK(decoder);
1286 ret =
1287 gst_aml_v4l2_buffer_pool_process(GST_AML_V4L2_BUFFER_POOL(self->v4l2output->pool), &frame->input_buffer);
1288 GST_VIDEO_DECODER_STREAM_LOCK(decoder);
1289
1290 if (ret == GST_FLOW_FLUSHING)
1291 {
1292 if (gst_pad_get_task_state(GST_VIDEO_DECODER_SRC_PAD(self)) !=
1293 GST_TASK_STARTED)
1294 ret = self->output_flow;
1295 goto drop;
1296 }
1297 else if (ret != GST_FLOW_OK)
1298 {
1299 goto process_failed;
1300 }
1301 }
1302
1303 /* No need to keep input arround */
1304 tmp = frame->input_buffer;
1305 frame->input_buffer = gst_buffer_new();
1306 gst_buffer_copy_into(frame->input_buffer, tmp,
1307 GST_BUFFER_COPY_FLAGS | GST_BUFFER_COPY_TIMESTAMPS |
1308 GST_BUFFER_COPY_META,
1309 0, 0);
1310 gst_buffer_unref(tmp);
1311
1312 gst_video_codec_frame_unref(frame);
1313 return ret;
1314
1315 /* ERRORS */
1316not_negotiated:
1317{
1318 GST_ERROR_OBJECT(self, "not negotiated");
1319 ret = GST_FLOW_NOT_NEGOTIATED;
1320 gst_aml_v4l2_error(self, &error);
1321 goto drop;
1322}
1323activate_failed:
1324{
1325 GST_ELEMENT_ERROR(self, RESOURCE, SETTINGS,
1326 (_("Failed to allocate required memory.")),
1327 ("Buffer pool activation failed"));
1328 ret = GST_FLOW_ERROR;
1329 goto drop;
1330}
1331flushing:
1332{
1333 ret = GST_FLOW_FLUSHING;
1334 goto drop;
1335}
1336
1337start_task_failed:
1338{
1339 GST_ELEMENT_ERROR(self, RESOURCE, FAILED,
1340 (_("Failed to start decoding thread.")), (NULL));
1341 ret = GST_FLOW_ERROR;
1342 goto drop;
1343}
1344process_failed:
1345{
1346 GST_ELEMENT_ERROR(self, RESOURCE, FAILED,
1347 (_("Failed to process frame.")),
1348 ("Maybe be due to not enough memory or failing driver"));
1349 ret = GST_FLOW_ERROR;
1350 goto drop;
1351}
1352drop:
1353{
1354 gst_video_decoder_drop_frame(decoder, frame);
1355 return ret;
1356}
1357}
1358
1359static gboolean
1360gst_aml_v4l2_video_dec_decide_allocation(GstVideoDecoder *decoder,
1361 GstQuery *query)
1362{
1363 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
1364 GstClockTime latency;
1365 gboolean ret = FALSE;
1366
1367 if (gst_aml_v4l2_object_decide_allocation(self->v4l2capture, query))
1368 ret = GST_VIDEO_DECODER_CLASS(parent_class)->decide_allocation(decoder, query);
1369
1370 if (GST_CLOCK_TIME_IS_VALID(self->v4l2capture->duration))
1371 {
1372 latency = self->v4l2capture->min_buffers * self->v4l2capture->duration;
1373 GST_DEBUG_OBJECT(self, "Setting latency: %" GST_TIME_FORMAT " (%" G_GUINT32_FORMAT " * %" G_GUINT64_FORMAT, GST_TIME_ARGS(latency),
1374 self->v4l2capture->min_buffers, self->v4l2capture->duration);
1375 gst_video_decoder_set_latency(decoder, latency, latency);
1376 }
1377 else
1378 {
1379 GST_WARNING_OBJECT(self, "Duration invalid, not setting latency");
1380 }
1381
1382 return ret;
1383}
1384
1385static gboolean
1386gst_aml_v4l2_video_dec_src_query(GstVideoDecoder *decoder, GstQuery *query)
1387{
1388 gboolean ret = TRUE;
1389 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
1390
1391 switch (GST_QUERY_TYPE(query))
1392 {
1393 case GST_QUERY_CAPS:
1394 {
1395 GstCaps *filter, *result = NULL;
1396 GstPad *pad = GST_VIDEO_DECODER_SRC_PAD(decoder);
1397
1398 gst_query_parse_caps(query, &filter);
1399
1400 if (self->probed_srccaps)
1401 result = gst_caps_ref(self->probed_srccaps);
1402 else
1403 result = gst_pad_get_pad_template_caps(pad);
1404
1405 if (filter)
1406 {
1407 GstCaps *tmp = result;
1408 result =
1409 gst_caps_intersect_full(filter, tmp, GST_CAPS_INTERSECT_FIRST);
1410 gst_caps_unref(tmp);
1411 }
1412
1413 GST_DEBUG_OBJECT(self, "Returning src caps %" GST_PTR_FORMAT, result);
1414
1415 gst_query_set_caps_result(query, result);
1416 gst_caps_unref(result);
1417 break;
1418 }
1419
1420 default:
1421 ret = GST_VIDEO_DECODER_CLASS(parent_class)->src_query(decoder, query);
1422 break;
1423 }
1424
1425 return ret;
1426}
1427
1428static GstCaps *
1429gst_aml_v4l2_video_dec_sink_getcaps(GstVideoDecoder *decoder, GstCaps *filter)
1430{
1431 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
1432 GstCaps *result;
1433
1434 result = gst_video_decoder_proxy_getcaps(decoder, self->probed_sinkcaps,
1435 filter);
1436
1437 GST_DEBUG_OBJECT(self, "Returning sink caps %" GST_PTR_FORMAT, result);
1438
1439 return result;
1440}
1441
1442static gboolean
1443gst_aml_v4l2_video_dec_sink_event(GstVideoDecoder *decoder, GstEvent *event)
1444{
1445 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
1446 gboolean ret;
1447 GstEventType type = GST_EVENT_TYPE(event);
1448
1449 switch (type)
1450 {
xuesong.jiang406ee302023-06-28 03:45:22 +00001451 case GST_EVENT_STREAM_START:
1452 {
1453 GstStructure *s;
1454 GstEvent *event;
1455 GST_DEBUG_OBJECT(self, "new private event");
1456 s = gst_structure_new("private_signal", "obj_ptr", G_TYPE_POINTER, self, "sig_name", G_TYPE_STRING, "decoded-pts", NULL);
1457 event = gst_event_new_custom (GST_EVENT_CUSTOM_UPSTREAM, s);
1458 GST_DEBUG_OBJECT(self, "before Send private_signal Event :%p", event);
1459 gst_pad_push_event (decoder->sinkpad, event);
1460 GST_DEBUG_OBJECT(self, "after Send private_signal Event :%p", event);
1461 break;
1462 }
xuesong.jiangae1548e2022-05-06 16:38:46 +08001463 case GST_EVENT_FLUSH_START:
1464 GST_DEBUG_OBJECT(self, "flush start");
xuesong.jiangc5dac0f2023-02-01 14:42:24 +08001465
1466 g_mutex_lock (&self->res_chg_lock);
1467 while (self->is_res_chg)
1468 {
1469 GST_LOG_OBJECT(decoder, "wait resolution change finish");
1470 g_cond_wait(&self->res_chg_cond, &self->res_chg_lock);
1471 }
1472 g_mutex_unlock (&self->res_chg_lock);
1473
zengliang.li92ff6822023-06-06 07:12:52 +00001474 self->last_out_pts = GST_CLOCK_TIME_NONE;
xuesong.jiangae1548e2022-05-06 16:38:46 +08001475 gst_aml_v4l2_object_unlock(self->v4l2output);
1476 gst_aml_v4l2_object_unlock(self->v4l2capture);
1477 break;
1478 default:
1479 break;
1480 }
1481
1482 ret = GST_VIDEO_DECODER_CLASS(parent_class)->sink_event(decoder, event);
1483
1484 switch (type)
1485 {
1486 case GST_EVENT_FLUSH_START:
1487 /* The processing thread should stop now, wait for it */
1488 gst_pad_stop_task(decoder->srcpad);
1489 GST_DEBUG_OBJECT(self, "flush start done");
1490 break;
1491 default:
1492 break;
1493 }
1494
1495 return ret;
1496}
1497
1498static GstStateChangeReturn
1499gst_aml_v4l2_video_dec_change_state(GstElement *element,
1500 GstStateChange transition)
1501{
1502 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(element);
1503 GstVideoDecoder *decoder = GST_VIDEO_DECODER(element);
1504
1505 if (transition == GST_STATE_CHANGE_PAUSED_TO_READY)
1506 {
1507 g_atomic_int_set(&self->active, FALSE);
1508 gst_aml_v4l2_object_unlock(self->v4l2output);
1509 gst_aml_v4l2_object_unlock(self->v4l2capture);
1510 gst_pad_stop_task(decoder->srcpad);
1511 }
1512
1513 return GST_ELEMENT_CLASS(parent_class)->change_state(element, transition);
1514}
1515
1516static void
1517gst_aml_v4l2_video_dec_dispose(GObject *object)
1518{
1519 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(object);
1520
1521 gst_caps_replace(&self->probed_sinkcaps, NULL);
1522 gst_caps_replace(&self->probed_srccaps, NULL);
1523
1524 G_OBJECT_CLASS(parent_class)->dispose(object);
1525}
1526
1527static void
1528gst_aml_v4l2_video_dec_finalize(GObject *object)
1529{
1530 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(object);
1531
1532 gst_aml_v4l2_object_destroy(self->v4l2capture);
1533 gst_aml_v4l2_object_destroy(self->v4l2output);
1534
xuesong.jiangc5dac0f2023-02-01 14:42:24 +08001535 g_mutex_clear(&self->res_chg_lock);
1536 g_cond_clear(&self->res_chg_cond);
1537
xuesong.jiang61ea8012022-05-12 15:38:17 +08001538#if GST_IMPORT_LGE_PROP
1539 if (self->lge_ctxt)
1540 {
1541 if (self->lge_ctxt->app_type)
1542 g_free(self->lge_ctxt->app_type);
1543 if (self->lge_ctxt->res_info.coretype)
1544 g_free(self->lge_ctxt->res_info.coretype);
1545 free(self->lge_ctxt);
1546 }
1547
1548#endif
1549
xuesong.jiangae1548e2022-05-06 16:38:46 +08001550 G_OBJECT_CLASS(parent_class)->finalize(object);
1551}
1552
1553static void
1554gst_aml_v4l2_video_dec_init(GstAmlV4l2VideoDec *self)
1555{
1556 /* V4L2 object are created in subinstance_init */
zengliang.li92ff6822023-06-06 07:12:52 +00001557 self->last_out_pts = GST_CLOCK_TIME_NONE;
xuesong.jiangae1548e2022-05-06 16:38:46 +08001558 self->is_secure_path = FALSE;
xuesong.jiangc5dac0f2023-02-01 14:42:24 +08001559 self->is_res_chg = FALSE;
hanghang.luo419c4a92023-07-14 07:36:07 +00001560 self->is_interlace = FALSE;
xuesong.jiangc5dac0f2023-02-01 14:42:24 +08001561 g_mutex_init(&self->res_chg_lock);
1562 g_cond_init(&self->res_chg_cond);
xuesong.jiang61ea8012022-05-12 15:38:17 +08001563#if GST_IMPORT_LGE_PROP
1564 self->lge_ctxt = malloc(sizeof(GstAmlV4l2VideoDecLgeCtxt));
1565 memset(self->lge_ctxt, 0, sizeof(GstAmlV4l2VideoDecLgeCtxt));
1566#endif
xuesong.jiangae1548e2022-05-06 16:38:46 +08001567}
1568
1569static void
1570gst_aml_v4l2_video_dec_subinstance_init(GTypeInstance *instance, gpointer g_class)
1571{
1572 GstAmlV4l2VideoDecClass *klass = GST_AML_V4L2_VIDEO_DEC_CLASS(g_class);
1573 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(instance);
1574 GstVideoDecoder *decoder = GST_VIDEO_DECODER(instance);
1575
1576 gst_video_decoder_set_packetized(decoder, TRUE);
1577
1578 self->v4l2output = gst_aml_v4l2_object_new(GST_ELEMENT(self),
1579 GST_OBJECT(GST_VIDEO_DECODER_SINK_PAD(self)),
1580 V4L2_BUF_TYPE_VIDEO_OUTPUT, klass->default_device,
1581 gst_aml_v4l2_get_output, gst_aml_v4l2_set_output, NULL);
1582 self->v4l2output->no_initial_format = TRUE;
1583 self->v4l2output->keep_aspect = FALSE;
sheng.liu0c77f6c2022-06-17 21:33:20 +08001584 self->v4l2output->is_svp = FALSE;
xuesong.jiangae1548e2022-05-06 16:38:46 +08001585
1586 self->v4l2capture = gst_aml_v4l2_object_new(GST_ELEMENT(self),
1587 GST_OBJECT(GST_VIDEO_DECODER_SRC_PAD(self)),
1588 V4L2_BUF_TYPE_VIDEO_CAPTURE, klass->default_device,
1589 gst_aml_v4l2_get_input, gst_aml_v4l2_set_input, NULL);
1590 self->v4l2capture->need_wait_event = TRUE;
sheng.liub56bbc52022-06-21 11:02:33 +08001591 self->v4l2capture->need_drop_event = FALSE;
xuesong.jiangae1548e2022-05-06 16:38:46 +08001592}
1593
1594static void
1595gst_aml_v4l2_video_dec_class_init(GstAmlV4l2VideoDecClass *klass)
1596{
1597 GstElementClass *element_class;
1598 GObjectClass *gobject_class;
1599 GstVideoDecoderClass *video_decoder_class;
1600
1601 parent_class = g_type_class_peek_parent(klass);
1602
1603 element_class = (GstElementClass *)klass;
1604 gobject_class = (GObjectClass *)klass;
1605 video_decoder_class = (GstVideoDecoderClass *)klass;
1606
1607 GST_DEBUG_CATEGORY_INIT(gst_aml_v4l2_video_dec_debug, "amlv4l2videodec", 0,
1608 "AML V4L2 Video Decoder");
1609
1610 gobject_class->dispose = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_dispose);
1611 gobject_class->finalize = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_finalize);
1612 gobject_class->set_property =
1613 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_set_property);
1614 gobject_class->get_property =
1615 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_get_property);
1616
1617 video_decoder_class->open = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_open);
1618 video_decoder_class->close = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_close);
1619 video_decoder_class->start = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_start);
1620 video_decoder_class->stop = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_stop);
1621 video_decoder_class->finish = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_finish);
1622 video_decoder_class->flush = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_flush);
1623 video_decoder_class->drain = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_drain);
1624 video_decoder_class->set_format =
1625 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_set_format);
1626 video_decoder_class->negotiate =
1627 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_negotiate);
1628 video_decoder_class->decide_allocation =
1629 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_decide_allocation);
1630 /* FIXME propose_allocation or not ? */
1631 video_decoder_class->handle_frame =
1632 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_handle_frame);
1633 video_decoder_class->getcaps =
1634 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_sink_getcaps);
1635 video_decoder_class->src_query =
1636 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_src_query);
1637 video_decoder_class->sink_event =
1638 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_sink_event);
1639
1640 element_class->change_state =
1641 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_change_state);
1642
xuesong.jiang406ee302023-06-28 03:45:22 +00001643 g_signals[SIGNAL_DECODED_PTS] = g_signal_new ("decoded-pts",
1644 G_TYPE_FROM_CLASS(GST_ELEMENT_CLASS(klass)),
1645 G_SIGNAL_RUN_LAST,
1646 0, /* class offset */
1647 NULL, /* accumulator */
1648 NULL, /* accu data */
1649 g_cclosure_marshal_generic,
1650 G_TYPE_NONE,
1651 1,
1652 G_TYPE_UINT64);
1653
xuesong.jiangae1548e2022-05-06 16:38:46 +08001654 gst_aml_v4l2_object_install_m2m_properties_helper(gobject_class);
xuesong.jiang61ea8012022-05-12 15:38:17 +08001655#if GST_IMPORT_LGE_PROP
1656 gst_aml_v4l2_video_dec_install_lge_properties_helper(gobject_class);
1657#endif
xuesong.jiangae1548e2022-05-06 16:38:46 +08001658}
1659
1660static void
1661gst_aml_v4l2_video_dec_subclass_init(gpointer g_class, gpointer data)
1662{
1663 GstAmlV4l2VideoDecClass *klass = GST_AML_V4L2_VIDEO_DEC_CLASS(g_class);
1664 GstElementClass *element_class = GST_ELEMENT_CLASS(g_class);
1665 GstAmlV4l2VideoDecCData *cdata = data;
1666
1667 klass->default_device = cdata->device;
1668
1669 /* Note: gst_pad_template_new() take the floating ref from the caps */
1670 gst_element_class_add_pad_template(element_class,
1671 gst_pad_template_new("sink", GST_PAD_SINK, GST_PAD_ALWAYS,
1672 cdata->sink_caps));
1673 gst_element_class_add_pad_template(element_class,
1674 gst_pad_template_new("src", GST_PAD_SRC, GST_PAD_ALWAYS,
1675 cdata->src_caps));
1676
1677 gst_element_class_set_metadata(element_class, cdata->longname,
1678 "Codec/Decoder/Video/Hardware", cdata->description,
1679 "Xuesong Jiang <Xuesong.Jiang@amlogic.com>");
1680
1681 gst_caps_unref(cdata->sink_caps);
1682 gst_caps_unref(cdata->src_caps);
1683 g_free(cdata);
1684}
1685
1686/* Probing functions */
1687gboolean
1688gst_aml_v4l2_is_video_dec(GstCaps *sink_caps, GstCaps *src_caps)
1689{
1690 gboolean ret = FALSE;
1691
1692 if (gst_caps_is_subset(sink_caps, gst_aml_v4l2_object_get_codec_caps()) && gst_caps_is_subset(src_caps, gst_aml_v4l2_object_get_raw_caps()))
1693 ret = TRUE;
1694
1695 return ret;
1696}
1697
1698static gchar *
1699gst_aml_v4l2_video_dec_set_metadata(GstStructure *s, GstAmlV4l2VideoDecCData *cdata,
1700 const gchar *basename)
1701{
1702 gchar *codec_name = NULL;
1703 gchar *type_name = NULL;
1704 gboolean got_value = FALSE;
1705
1706#define SET_META(codec) \
1707 G_STMT_START \
1708 { \
1709 cdata->longname = "AML V4L2 " codec " Decoder"; \
1710 cdata->description = "Decodes " codec " streams via V4L2 API"; \
1711 codec_name = g_ascii_strdown(codec, -1); \
1712 } \
1713 G_STMT_END
1714
1715 if (gst_structure_has_name(s, "image/jpeg"))
1716 {
1717 SET_META("JPEG");
1718 }
1719 else if (gst_structure_has_name(s, "video/mpeg"))
1720 {
1721 gint mpegversion = 0;
1722 gint *list = NULL;
1723 got_value = gst_structure_get_int(s, "mpegversion", &mpegversion);
1724 if (FALSE == got_value)
1725 {
1726 got_value = gst_structure_get_list(s, "mpegversion", &list);
1727 if (TRUE == got_value && (1 == *list || 2 == *list))
1728 {
1729 SET_META("MPEG2");
1730 }
1731 else
1732 {
1733 SET_META("MPEG4");
1734 }
1735 }
1736 else
1737 {
1738 SET_META("MPEG4");
1739 }
1740 }
1741 else if (gst_structure_has_name(s, "video/x-h263"))
1742 {
1743 SET_META("H263");
1744 }
1745 else if (gst_structure_has_name(s, "video/x-fwht"))
1746 {
1747 SET_META("FWHT");
1748 }
1749 else if (gst_structure_has_name(s, "video/x-h264"))
1750 {
1751 SET_META("H264");
1752 }
1753 else if (gst_structure_has_name(s, "video/x-h265"))
1754 {
1755 SET_META("H265");
1756 }
1757 else if (gst_structure_has_name(s, "video/x-wmv"))
1758 {
1759 SET_META("VC1");
1760 }
1761 else if (gst_structure_has_name(s, "video/x-vp8"))
1762 {
1763 SET_META("VP8");
1764 }
1765 else if (gst_structure_has_name(s, "video/x-vp9"))
1766 {
1767 SET_META("VP9");
1768 }
1769 else if (gst_structure_has_name(s, "video/x-av1"))
1770 {
1771 SET_META("AV1");
1772 }
1773 else if (gst_structure_has_name(s, "video/x-bayer"))
1774 {
1775 SET_META("BAYER");
1776 }
1777 else if (gst_structure_has_name(s, "video/x-sonix"))
1778 {
1779 SET_META("SONIX");
1780 }
1781 else if (gst_structure_has_name(s, "video/x-pwc1"))
1782 {
1783 SET_META("PWC1");
1784 }
1785 else if (gst_structure_has_name(s, "video/x-pwc2"))
1786 {
1787 SET_META("PWC2");
1788 }
1789 else
1790 {
1791 /* This code should be kept on sync with the exposed CODEC type of format
1792 * from gstamlv4l2object.c. This warning will only occure in case we forget
1793 * to also add a format here. */
1794 gchar *s_str = gst_structure_to_string(s);
1795 g_warning("Missing fixed name mapping for caps '%s', this is a GStreamer "
1796 "bug, please report at https://bugs.gnome.org",
1797 s_str);
1798 g_free(s_str);
1799 }
1800
1801 if (codec_name)
1802 {
1803 type_name = g_strdup_printf("amlv4l2%sdec", codec_name);
1804 if (g_type_from_name(type_name) != 0)
1805 {
1806 g_free(type_name);
1807 type_name = g_strdup_printf("amlv4l2%s%sdec", basename, codec_name);
1808 }
1809
1810 g_free(codec_name);
1811 }
1812
1813 return type_name;
1814#undef SET_META
1815}
1816
1817void gst_aml_v4l2_video_dec_register(GstPlugin *plugin, const gchar *basename,
1818 const gchar *device_path, GstCaps *sink_caps, GstCaps *src_caps)
1819{
1820 gint i;
1821
1822 for (i = 0; i < gst_caps_get_size(sink_caps); i++)
1823 {
1824 GstAmlV4l2VideoDecCData *cdata;
1825 GstStructure *s;
1826 GTypeQuery type_query;
1827 GTypeInfo type_info = {
1828 0,
1829 };
1830 GType type, subtype;
1831 gchar *type_name;
1832
1833 s = gst_caps_get_structure(sink_caps, i);
1834
1835 cdata = g_new0(GstAmlV4l2VideoDecCData, 1);
1836 cdata->device = g_strdup(device_path);
1837 cdata->sink_caps = gst_caps_new_empty();
1838 gst_caps_append_structure(cdata->sink_caps, gst_structure_copy(s));
1839 gst_caps_append_structure(cdata->sink_caps, gst_structure_copy(s));
1840 gst_caps_set_features(cdata->sink_caps, 0, gst_caps_features_from_string(GST_CAPS_FEATURE_MEMORY_DMABUF));
1841 cdata->src_caps = gst_caps_copy(src_caps);
1842 gst_caps_set_features_simple(cdata->src_caps, gst_caps_features_from_string(GST_CAPS_FEATURE_MEMORY_DMABUF));
1843 gst_caps_append(cdata->src_caps, gst_caps_copy(src_caps));
1844 type_name = gst_aml_v4l2_video_dec_set_metadata(s, cdata, basename);
1845
1846 /* Skip over if we hit an unmapped type */
1847 if (!type_name)
1848 {
1849 g_free(cdata);
1850 continue;
1851 }
1852
1853 type = gst_aml_v4l2_video_dec_get_type();
1854 g_type_query(type, &type_query);
1855 memset(&type_info, 0, sizeof(type_info));
1856 type_info.class_size = type_query.class_size;
1857 type_info.instance_size = type_query.instance_size;
1858 type_info.class_init = gst_aml_v4l2_video_dec_subclass_init;
1859 type_info.class_data = cdata;
1860 type_info.instance_init = gst_aml_v4l2_video_dec_subinstance_init;
1861
1862 subtype = g_type_register_static(type, type_name, &type_info, 0);
1863 if (!gst_element_register(plugin, type_name, GST_RANK_PRIMARY + 1,
1864 subtype))
1865 GST_WARNING("Failed to register plugin '%s'", type_name);
1866
1867 g_free(type_name);
1868 }
1869}
xuesong.jiang61ea8012022-05-12 15:38:17 +08001870
1871#if GST_IMPORT_LGE_PROP
1872static void gst_aml_v4l2_video_dec_install_lge_properties_helper(GObjectClass *gobject_class)
1873{
1874 g_object_class_install_property(gobject_class, LGE_RESOURCE_INFO,
1875 g_param_spec_object("resource-info", "resource-info",
1876 "After acquisition of H/W resources is completed, allocated resource information must be delivered to the decoder and the sink",
1877 GST_TYPE_STRUCTURE,
1878 G_PARAM_READABLE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
1879
1880 g_object_class_install_property(gobject_class, LGE_DECODE_SIZE,
1881 g_param_spec_uint64("decoded-size", "decoded-size",
1882 "The total amount of decoder element's decoded video es after constructing pipeline or flushing pipeline update unit is byte.",
1883 0, G_MAXUINT64,
1884 0, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
1885
1886 g_object_class_install_property(gobject_class, LGE_UNDECODE_SIZE,
1887 g_param_spec_uint64("undecoded-size", "undecoded-size",
1888 "video decoder element's total undecoded data update unit is byte.",
1889 0, G_MAXUINT64,
1890 0, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
1891
1892 g_object_class_install_property(gobject_class, LGE_APP_TYPE,
1893 g_param_spec_string("app-type", "app-type",
1894 "set application type.",
1895 "default_app",
1896 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
1897
1898 g_object_class_install_property(gobject_class, LGE_CLIP_MODE,
1899 g_param_spec_boolean("clip-mode", "clip-mode",
1900 "When seeking, Content is moving faster for a while to skip frames.",
1901 FALSE,
1902 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
1903}
1904#endif