blob: 671a1d52fc434ade9d4e9e6856db8da17bf03c8a [file] [log] [blame]
xuesong.jiangae1548e2022-05-06 16:38:46 +08001/* GStreamer
2 * Copyright (C) 2022 <xuesong.jiang@amlogic.com>
3 *
4 * This library is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Library General Public
6 * License as published by the Free Software Foundation; either
7 * version 2 of the License, or (at your option) any later version.
8 *
9 * This library is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Library General Public License for more details.
13 *
14 * You should have received a copy of the GNU Library General Public
15 * License along with this library; if not, write to the
16 * Free Software Foundation, Inc., 51 Franklin Street, Suite 500,
17 * Boston, MA 02110-1335, USA.
18 */
19
20#ifdef HAVE_CONFIG_H
21#include "config.h"
22#endif
23
24#include <sys/stat.h>
25#include <fcntl.h>
26#include <errno.h>
27#include <unistd.h>
28#include <string.h>
29
30#include "gstamlv4l2object.h"
31#include "gstamlv4l2videodec.h"
32
33#include <string.h>
34#include <gst/gst-i18n-plugin.h>
35#include <gst/allocators/gstdmabuf.h>
36
37GST_DEBUG_CATEGORY_STATIC(gst_aml_v4l2_video_dec_debug);
38#define GST_CAT_DEFAULT gst_aml_v4l2_video_dec_debug
39
40#ifdef GST_VIDEO_DECODER_STREAM_LOCK
41#undef GST_VIDEO_DECODER_STREAM_LOCK
42#define GST_VIDEO_DECODER_STREAM_LOCK(decoder) \
43 { \
fei.denge9458472023-04-18 02:05:48 +000044 GST_TRACE("aml v4l2 dec locking"); \
xuesong.jiangae1548e2022-05-06 16:38:46 +080045 g_rec_mutex_lock(&GST_VIDEO_DECODER(decoder)->stream_lock); \
fei.denge9458472023-04-18 02:05:48 +000046 GST_TRACE("aml v4l2 dec locked"); \
xuesong.jiangae1548e2022-05-06 16:38:46 +080047 }
48#endif
49
50#ifdef GST_VIDEO_DECODER_STREAM_UNLOCK
51#undef GST_VIDEO_DECODER_STREAM_UNLOCK
52#define GST_VIDEO_DECODER_STREAM_UNLOCK(decoder) \
53 { \
fei.denge9458472023-04-18 02:05:48 +000054 GST_TRACE("aml v4l2 dec unlocking"); \
xuesong.jiangae1548e2022-05-06 16:38:46 +080055 g_rec_mutex_unlock(&GST_VIDEO_DECODER(decoder)->stream_lock); \
fei.denge9458472023-04-18 02:05:48 +000056 GST_TRACE("aml v4l2 dec unlocked"); \
xuesong.jiangae1548e2022-05-06 16:38:46 +080057 }
58#endif
xuesong.jiang61ea8012022-05-12 15:38:17 +080059
hanghang.luo36df2852022-08-24 15:02:27 +080060#ifndef ABSDIFF
61#define ABSDIFF(a,b) (((a) > (b)) ? ((a) - (b)) : ((b) - (a)))
62#endif
63
xuesong.jiang61ea8012022-05-12 15:38:17 +080064#if GST_IMPORT_LGE_PROP
65typedef struct _GstAmlResourceInfo
66{
67 gchar *coretype;
68 gint videoport;
69 gint audioport;
70 gint maxwidth;
71 gint maxheight;
72 gint mixerport;
73} GstAmlResourceInfo;
74
75struct _GstAmlV4l2VideoDecLgeCtxt
76{
77 GstAmlResourceInfo res_info;
78 guint64 dec_size;
79 guint64 undec_size;
80 gchar *app_type;
81 gboolean clip_mode;
82};
83#endif
84
xuesong.jiangae1548e2022-05-06 16:38:46 +080085typedef struct
86{
87 gchar *device;
88 GstCaps *sink_caps;
89 GstCaps *src_caps;
90 const gchar *longname;
91 const gchar *description;
92} GstAmlV4l2VideoDecCData;
93
94enum
95{
96 PROP_0,
xuesong.jiang61ea8012022-05-12 15:38:17 +080097 V4L2_STD_OBJECT_PROPS,
98#if GST_IMPORT_LGE_PROP
99 LGE_RESOURCE_INFO,
100 LGE_DECODE_SIZE,
101 LGE_UNDECODE_SIZE,
102 LGE_APP_TYPE,
103 LGE_CLIP_MODE
104#endif
xuesong.jiangae1548e2022-05-06 16:38:46 +0800105};
106
xuesong.jiang406ee302023-06-28 03:45:22 +0000107enum
108{
109 SIGNAL_DECODED_PTS,
110 MAX_SIGNAL
111};
112
113static guint g_signals[MAX_SIGNAL]= {0};
114
xuesong.jiangae1548e2022-05-06 16:38:46 +0800115#define gst_aml_v4l2_video_dec_parent_class parent_class
116G_DEFINE_ABSTRACT_TYPE(GstAmlV4l2VideoDec, gst_aml_v4l2_video_dec,
117 GST_TYPE_VIDEO_DECODER);
118
119static GstFlowReturn gst_aml_v4l2_video_dec_finish(GstVideoDecoder *decoder);
xuesong.jiang61ea8012022-05-12 15:38:17 +0800120#if GST_IMPORT_LGE_PROP
121static void gst_aml_v4l2_video_dec_install_lge_properties_helper(GObjectClass *gobject_class);
122#endif
xuesong.jiangae1548e2022-05-06 16:38:46 +0800123
124static void
125gst_aml_v4l2_video_dec_set_property(GObject *object,
126 guint prop_id, const GValue *value, GParamSpec *pspec)
127{
128 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(object);
129
130 switch (prop_id)
131 {
132 case PROP_CAPTURE_IO_MODE:
133 if (!gst_aml_v4l2_object_set_property_helper(self->v4l2capture,
134 prop_id, value, pspec))
135 {
136 G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
137 }
138 break;
139 case PROP_DUMP_FRAME_LOCATION:
140 if (!gst_aml_v4l2_object_set_property_helper(self->v4l2capture,
141 prop_id, value, pspec))
142 {
143 G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
144 }
145 break;
xuesong.jiang61ea8012022-05-12 15:38:17 +0800146#if GST_IMPORT_LGE_PROP
147 case LGE_RESOURCE_INFO:
148 {
149 GST_DEBUG_OBJECT(self, "LGE up layer set res info");
150 GstStructure *r_info = g_value_get_object(value);
151 if (r_info)
152 {
153 if (gst_structure_has_field(r_info, "coretype"))
154 {
155 if (self->lge_ctxt->res_info.coretype)
156 g_free(self->lge_ctxt->res_info.coretype);
157 self->lge_ctxt->res_info.coretype = g_strdup(gst_structure_get_string(r_info, "coretype"));
158 }
159 if (gst_structure_has_field(r_info, "videoport"))
160 gst_structure_get_int(r_info, "videoport", &(self->lge_ctxt->res_info.videoport));
161 if (gst_structure_has_field(r_info, "audioport"))
162 gst_structure_get_int(r_info, "audioport", &(self->lge_ctxt->res_info.audioport));
163 if (gst_structure_has_field(r_info, "maxwidth"))
164 gst_structure_get_int(r_info, "maxwidth", &(self->lge_ctxt->res_info.maxwidth));
165 if (gst_structure_has_field(r_info, "maxheight"))
166 gst_structure_get_int(r_info, "maxheight", &(self->lge_ctxt->res_info.maxheight));
167 if (gst_structure_has_field(r_info, "mixerport"))
168 gst_structure_get_int(r_info, "mixerport", &(self->lge_ctxt->res_info.mixerport));
169 }
170 break;
171 }
172 case LGE_APP_TYPE:
173 {
174 GST_DEBUG_OBJECT(self, "LGE up layer set app type");
175 if (self->lge_ctxt->app_type)
176 g_free(self->lge_ctxt->app_type);
177 self->lge_ctxt->app_type = g_strdup(g_value_get_string(value));
178 break;
179 }
180 case LGE_CLIP_MODE:
181 {
182 GST_DEBUG_OBJECT(self, "LGE up layer set clip mode");
183 self->lge_ctxt->clip_mode = g_strdup(g_value_get_boolean(value));
184 break;
185 }
186#endif
xuesong.jiangae1548e2022-05-06 16:38:46 +0800187 /* By default, only set on output */
188 default:
189 if (!gst_aml_v4l2_object_set_property_helper(self->v4l2output,
190 prop_id, value, pspec))
191 {
192 G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
193 }
194 break;
195 }
196}
197
198static void
199gst_aml_v4l2_video_dec_get_property(GObject *object,
200 guint prop_id, GValue *value, GParamSpec *pspec)
201{
202 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(object);
203
204 switch (prop_id)
205 {
206 case PROP_CAPTURE_IO_MODE:
207 if (!gst_aml_v4l2_object_get_property_helper(self->v4l2capture,
208 prop_id, value, pspec))
209 {
210 G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
211 }
212 break;
213
xuesong.jiang61ea8012022-05-12 15:38:17 +0800214#if GST_IMPORT_LGE_PROP
215 case LGE_DECODE_SIZE:
216 {
217 GST_DEBUG_OBJECT(self, "LGE up layer get dec size");
218 self->lge_ctxt->dec_size = -1;
219 g_value_set_int(value, self->lge_ctxt->dec_size);
220 break;
221 }
222 case LGE_UNDECODE_SIZE:
223 {
224 GST_DEBUG_OBJECT(self, "LGE up layer get undec size");
225 self->lge_ctxt->undec_size = -1;
226 g_value_set_int(value, self->lge_ctxt->undec_size);
227 break;
228 }
229#endif
230
xuesong.jiangae1548e2022-05-06 16:38:46 +0800231 /* By default read from output */
232 default:
233 if (!gst_aml_v4l2_object_get_property_helper(self->v4l2output,
234 prop_id, value, pspec))
235 {
236 G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
237 }
238 break;
239 }
240}
241
242static gboolean
243gst_aml_v4l2_video_dec_open(GstVideoDecoder *decoder)
244{
245 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
246 GstCaps *codec_caps;
247
248 GST_DEBUG_OBJECT(self, "Opening");
249
250 if (!gst_aml_v4l2_object_open(self->v4l2output))
251 goto failure;
252
253 if (!gst_aml_v4l2_object_open_shared(self->v4l2capture, self->v4l2output))
254 goto failure;
255
256 codec_caps = gst_pad_get_pad_template_caps(decoder->sinkpad);
257 self->probed_sinkcaps = gst_aml_v4l2_object_probe_caps(self->v4l2output,
258 codec_caps);
259 gst_caps_unref(codec_caps);
260
261 if (gst_caps_is_empty(self->probed_sinkcaps))
262 goto no_encoded_format;
263
264 return TRUE;
265
266no_encoded_format:
267 GST_ELEMENT_ERROR(self, RESOURCE, SETTINGS,
268 (_("Decoder on device %s has no supported input format"),
269 self->v4l2output->videodev),
270 (NULL));
271 goto failure;
272
273failure:
274 if (GST_AML_V4L2_IS_OPEN(self->v4l2output))
275 gst_aml_v4l2_object_close(self->v4l2output);
276
277 if (GST_AML_V4L2_IS_OPEN(self->v4l2capture))
278 gst_aml_v4l2_object_close(self->v4l2capture);
279
280 gst_caps_replace(&self->probed_srccaps, NULL);
281 gst_caps_replace(&self->probed_sinkcaps, NULL);
282
283 return FALSE;
284}
285
286static gboolean
287gst_aml_v4l2_video_dec_close(GstVideoDecoder *decoder)
288{
289 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
290
291 GST_DEBUG_OBJECT(self, "Closing");
292
293 gst_aml_v4l2_object_close(self->v4l2output);
294 gst_aml_v4l2_object_close(self->v4l2capture);
295 gst_caps_replace(&self->probed_srccaps, NULL);
296 gst_caps_replace(&self->probed_sinkcaps, NULL);
297
298 return TRUE;
299}
300
301static gboolean
302gst_aml_v4l2_video_dec_start(GstVideoDecoder *decoder)
303{
304 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
305
306 GST_DEBUG_OBJECT(self, "Starting");
307
308 gst_aml_v4l2_object_unlock(self->v4l2output);
309 g_atomic_int_set(&self->active, TRUE);
310 self->output_flow = GST_FLOW_OK;
311
312 return TRUE;
313}
314
315static gboolean
316gst_aml_v4l2_video_dec_stop(GstVideoDecoder *decoder)
317{
318 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
319
320 GST_DEBUG_OBJECT(self, "Stopping");
321
322 gst_aml_v4l2_object_unlock(self->v4l2output);
323 gst_aml_v4l2_object_unlock(self->v4l2capture);
324
325 /* Wait for capture thread to stop */
326 gst_pad_stop_task(decoder->srcpad);
327
328 GST_VIDEO_DECODER_STREAM_LOCK(decoder);
329 self->output_flow = GST_FLOW_OK;
330 GST_VIDEO_DECODER_STREAM_UNLOCK(decoder);
331
332 /* Should have been flushed already */
333 g_assert(g_atomic_int_get(&self->active) == FALSE);
334
335 gst_aml_v4l2_object_stop(self->v4l2output);
336 gst_aml_v4l2_object_stop(self->v4l2capture);
337
338 if (self->input_state)
339 {
340 gst_video_codec_state_unref(self->input_state);
341 self->input_state = NULL;
342 }
343
344 GST_DEBUG_OBJECT(self, "Stopped");
345
346 return TRUE;
347}
348
349static gboolean
350gst_aml_v4l2_video_dec_set_format(GstVideoDecoder *decoder,
351 GstVideoCodecState *state)
352{
353 GstAmlV4l2Error error = GST_AML_V4L2_ERROR_INIT;
354 gboolean ret = TRUE;
355 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
356 GstCaps *caps;
357
358 GST_DEBUG_OBJECT(self, "Setting format: %" GST_PTR_FORMAT, state->caps);
359 GstCapsFeatures *const features = gst_caps_get_features(state->caps, 0);
360
361 if (gst_caps_features_contains(features, GST_CAPS_FEATURE_MEMORY_DMABUF))
362 self->v4l2output->req_mode = GST_V4L2_IO_DMABUF_IMPORT;
363
364 if (self->input_state)
365 {
366 if (gst_aml_v4l2_object_caps_equal(self->v4l2output, state->caps))
367 {
368 GST_DEBUG_OBJECT(self, "Compatible caps");
369 goto done;
370 }
371 gst_video_codec_state_unref(self->input_state);
372 self->input_state = NULL;
373
374 gst_aml_v4l2_video_dec_finish(decoder);
375 gst_aml_v4l2_object_stop(self->v4l2output);
376
377 /* The renegotiation flow don't blend with the base class flow. To properly
378 * stop the capture pool, if the buffers can't be orphaned, we need to
379 * reclaim our buffers, which will happend through the allocation query.
380 * The allocation query is triggered by gst_video_decoder_negotiate() which
381 * requires the output caps to be set, but we can't know this information
382 * as we rely on the decoder, which requires the capture queue to be
383 * stopped.
384 *
385 * To workaround this issue, we simply run an allocation query with the
386 * old negotiated caps in order to drain/reclaim our buffers. That breaks
387 * the complexity and should not have much impact in performance since the
388 * following allocation query will happen on a drained pipeline and won't
389 * block. */
390 if (self->v4l2capture->pool &&
391 !gst_aml_v4l2_buffer_pool_orphan(&self->v4l2capture->pool))
392 {
393 GstCaps *caps = gst_pad_get_current_caps(decoder->srcpad);
394 if (caps)
395 {
396 GstQuery *query = gst_query_new_allocation(caps, FALSE);
397 gst_pad_peer_query(decoder->srcpad, query);
398 gst_query_unref(query);
399 gst_caps_unref(caps);
400 }
401 }
402
403 gst_aml_v4l2_object_stop(self->v4l2capture);
404 self->output_flow = GST_FLOW_OK;
405 }
406
407 if ((ret = gst_aml_v4l2_set_drm_mode(self->v4l2output)) == FALSE)
408 {
409 GST_ERROR_OBJECT(self, "config output drm mode error");
410 goto done;
411 }
412
xuesong.jiang22a9b112023-05-24 09:01:59 +0000413 if ((ret = gst_aml_v4l2_set_stream_mode(self->v4l2output)) == FALSE)
414 {
415 GST_ERROR_OBJECT(self, "config output stream mode error");
416 goto done;
417 }
418
xuesong.jiangae1548e2022-05-06 16:38:46 +0800419 ret = gst_aml_v4l2_object_set_format(self->v4l2output, state->caps, &error);
420
421 gst_caps_replace(&self->probed_srccaps, NULL);
422 self->probed_srccaps = gst_aml_v4l2_object_probe_caps(self->v4l2capture,
423 gst_aml_v4l2_object_get_raw_caps());
424
425 if (gst_caps_is_empty(self->probed_srccaps))
426 goto no_raw_format;
427
428 caps = gst_caps_copy(self->probed_srccaps);
429 gst_caps_set_features_simple(caps, gst_caps_features_from_string(GST_CAPS_FEATURE_MEMORY_DMABUF));
430 gst_caps_append(self->probed_srccaps, caps);
431 if (ret)
432 self->input_state = gst_video_codec_state_ref(state);
433 else
434 gst_aml_v4l2_error(self, &error);
435
436done:
437 return ret;
438
439no_raw_format:
440 GST_ELEMENT_ERROR(self, RESOURCE, SETTINGS,
441 (_("Decoder on device %s has no supported output format"),
442 self->v4l2output->videodev),
443 (NULL));
444 return GST_FLOW_ERROR;
445}
446
447static gboolean
448gst_aml_v4l2_video_dec_flush(GstVideoDecoder *decoder)
449{
450 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
451
452 GST_DEBUG_OBJECT(self, "Flushed");
453
454 /* Ensure the processing thread has stopped for the reverse playback
455 * discount case */
456 if (gst_pad_get_task_state(decoder->srcpad) == GST_TASK_STARTED)
457 {
458 GST_VIDEO_DECODER_STREAM_UNLOCK(decoder);
459
460 gst_aml_v4l2_object_unlock(self->v4l2output);
461 gst_aml_v4l2_object_unlock(self->v4l2capture);
462 gst_pad_stop_task(decoder->srcpad);
463 GST_VIDEO_DECODER_STREAM_LOCK(decoder);
464 }
465
466 self->output_flow = GST_FLOW_OK;
467
468 gst_aml_v4l2_object_unlock_stop(self->v4l2output);
469 gst_aml_v4l2_object_unlock_stop(self->v4l2capture);
470
471 if (self->v4l2output->pool)
472 gst_aml_v4l2_buffer_pool_flush(self->v4l2output->pool);
473
474 /* gst_aml_v4l2_buffer_pool_flush() calls streamon the capture pool and must be
475 * called after gst_aml_v4l2_object_unlock_stop() stopped flushing the buffer
476 * pool. */
477 if (self->v4l2capture->pool)
478 gst_aml_v4l2_buffer_pool_flush(self->v4l2capture->pool);
479
480 return TRUE;
481}
482
483static gboolean
484gst_aml_v4l2_video_dec_negotiate(GstVideoDecoder *decoder)
485{
486 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
487
xuesong.jiang681d3602022-06-24 21:23:35 +0800488 if (TRUE == self->v4l2output->is_svp)
489 {
490 GstStructure *s;
491 GstEvent *event;
492
493 s = gst_structure_new_empty ("IS_SVP");
494 event = gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM_STICKY, s);
495 GST_DEBUG_OBJECT(self, "before Send SVP Event :%p", event);
496 gst_pad_push_event (decoder->srcpad, event);
497 GST_DEBUG_OBJECT(self, "after Send SVP Event :%p", event);
498 }
499
xuesong.jiangae1548e2022-05-06 16:38:46 +0800500 /* We don't allow renegotiation without carefull disabling the pool */
501 if (self->v4l2capture->pool &&
502 gst_buffer_pool_is_active(GST_BUFFER_POOL(self->v4l2capture->pool)))
503 return TRUE;
504
505 return GST_VIDEO_DECODER_CLASS(parent_class)->negotiate(decoder);
506}
507
508static gboolean
509gst_aml_v4l2_decoder_cmd(GstAmlV4l2Object *v4l2object, guint cmd, guint flags)
510{
511 struct v4l2_decoder_cmd dcmd = {
512 0,
513 };
514
515 GST_DEBUG_OBJECT(v4l2object->element,
516 "sending v4l2 decoder command %u with flags %u", cmd, flags);
517
518 if (!GST_AML_V4L2_IS_OPEN(v4l2object))
519 return FALSE;
520
521 dcmd.cmd = cmd;
522 dcmd.flags = flags;
523 if (v4l2object->ioctl(v4l2object->video_fd, VIDIOC_DECODER_CMD, &dcmd) < 0)
524 goto dcmd_failed;
525
526 return TRUE;
527
528dcmd_failed:
529 if (errno == ENOTTY)
530 {
531 GST_INFO_OBJECT(v4l2object->element,
532 "Failed to send decoder command %u with flags %u for '%s'. (%s)",
533 cmd, flags, v4l2object->videodev, g_strerror(errno));
534 }
535 else
536 {
537 GST_ERROR_OBJECT(v4l2object->element,
538 "Failed to send decoder command %u with flags %u for '%s'. (%s)",
539 cmd, flags, v4l2object->videodev, g_strerror(errno));
540 }
541 return FALSE;
542}
543
544static GstFlowReturn
545gst_aml_v4l2_video_dec_finish(GstVideoDecoder *decoder)
546{
547 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
548 GstFlowReturn ret = GST_FLOW_OK;
549 GstBuffer *buffer;
550
551 if (gst_pad_get_task_state(decoder->srcpad) != GST_TASK_STARTED)
552 goto done;
553
554 GST_DEBUG_OBJECT(self, "Finishing decoding");
555
556 GST_VIDEO_DECODER_STREAM_UNLOCK(decoder);
557
558 if (gst_aml_v4l2_decoder_cmd(self->v4l2output, V4L2_DEC_CMD_STOP, 0))
559 {
560 GstTask *task = decoder->srcpad->task;
561
562 /* If the decoder stop command succeeded, just wait until processing is
563 * finished */
564 GST_DEBUG_OBJECT(self, "Waiting for decoder stop");
565 GST_OBJECT_LOCK(task);
566 while (GST_TASK_STATE(task) == GST_TASK_STARTED)
567 GST_TASK_WAIT(task);
568 GST_OBJECT_UNLOCK(task);
569 ret = GST_FLOW_FLUSHING;
570 }
571 else
572 {
573 /* otherwise keep queuing empty buffers until the processing thread has
574 * stopped, _pool_process() will return FLUSHING when that happened */
575 while (ret == GST_FLOW_OK)
576 {
xuesong.jiangc5dac0f2023-02-01 14:42:24 +0800577 GST_DEBUG_OBJECT(self, "queue empty output buf");
xuesong.jiangae1548e2022-05-06 16:38:46 +0800578 buffer = gst_buffer_new();
579 ret =
580 gst_aml_v4l2_buffer_pool_process(GST_AML_V4L2_BUFFER_POOL(self->v4l2output->pool), &buffer);
581 gst_buffer_unref(buffer);
582 }
583 }
584
585 /* and ensure the processing thread has stopped in case another error
586 * occured. */
587 gst_aml_v4l2_object_unlock(self->v4l2capture);
588 gst_pad_stop_task(decoder->srcpad);
589 GST_VIDEO_DECODER_STREAM_LOCK(decoder);
590
591 if (ret == GST_FLOW_FLUSHING)
592 ret = self->output_flow;
593
594 GST_DEBUG_OBJECT(decoder, "Done draining buffers");
595
596 /* TODO Shall we cleanup any reffed frame to workaround broken decoders ? */
597
598done:
599 return ret;
600}
601
602static GstFlowReturn
603gst_aml_v4l2_video_dec_drain(GstVideoDecoder *decoder)
604{
605 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
606
607 GST_DEBUG_OBJECT(self, "Draining...");
608 gst_aml_v4l2_video_dec_finish(decoder);
609 gst_aml_v4l2_video_dec_flush(decoder);
610
611 return GST_FLOW_OK;
612}
613
614static GstVideoCodecFrame *
615gst_aml_v4l2_video_dec_get_oldest_frame(GstVideoDecoder *decoder)
616{
617 GstVideoCodecFrame *frame = NULL;
618 GList *frames, *l;
619 gint count = 0;
620
621 frames = gst_video_decoder_get_frames(decoder);
622
623 for (l = frames; l != NULL; l = l->next)
624 {
625 GstVideoCodecFrame *f = l->data;
626
627 if (!frame || (GST_CLOCK_TIME_IS_VALID(frame->pts) && GST_CLOCK_TIME_IS_VALID(f->pts) && (frame->pts > f->pts)))
628 frame = f;
629
630 count++;
631 }
632
633 if (frame)
634 {
635 GST_LOG_OBJECT(decoder,
636 "Oldest frame is %d %" GST_TIME_FORMAT " and %d frames left",
637 frame->system_frame_number, GST_TIME_ARGS(frame->pts), count - 1);
638 gst_video_codec_frame_ref(frame);
639 }
640
641 g_list_free_full(frames, (GDestroyNotify)gst_video_codec_frame_unref);
642
643 return frame;
644}
645
fei.dengbee20862022-06-14 14:59:48 +0800646static GstVideoCodecFrame *
xuesong.jiange24aef92023-06-16 06:39:10 +0000647gst_aml_v4l2_video_dec_get_right_frame_for_frame_mode(GstVideoDecoder *decoder, GstClockTime pts)
fei.dengbee20862022-06-14 14:59:48 +0800648{
649 GstVideoCodecFrame *frame = NULL;
650 GList *frames, *l;
651 gint count = 0;
652
xuesong.jiange24aef92023-06-16 06:39:10 +0000653 GST_LOG_OBJECT (decoder, "trace in with pts: %" GST_TIME_FORMAT, GST_TIME_ARGS(pts));
654
fei.dengbee20862022-06-14 14:59:48 +0800655 frames = gst_video_decoder_get_frames(decoder);
656
657 for (l = frames; l != NULL; l = l->next)
658 {
659 GstVideoCodecFrame *f = l->data;
fei.denge9458472023-04-18 02:05:48 +0000660
xuesong.jiangc5dac0f2023-02-01 14:42:24 +0800661 if (GST_CLOCK_TIME_IS_VALID(pts) && (ABSDIFF(f->pts,pts)) < 1000) {
fei.dengbee20862022-06-14 14:59:48 +0800662 frame = f;
fei.dengbee20862022-06-14 14:59:48 +0800663 } else {
664 if (!frame || (GST_CLOCK_TIME_IS_VALID(frame->pts) && GST_CLOCK_TIME_IS_VALID(f->pts) && (frame->pts > f->pts)))
665 frame = f;
666 }
667
668 count++;
669 }
670
671 if (frame)
672 {
673 GST_LOG_OBJECT(decoder,
674 "frame is %d %" GST_TIME_FORMAT " and %d frames left",
675 frame->system_frame_number, GST_TIME_ARGS(frame->pts), count - 1);
676 gst_video_codec_frame_ref(frame);
677 }
678
679 g_list_free_full(frames, (GDestroyNotify)gst_video_codec_frame_unref);
680
xuesong.jiange24aef92023-06-16 06:39:10 +0000681 GST_LOG_OBJECT (decoder, "trace out ret:%p", frame);
fei.dengbee20862022-06-14 14:59:48 +0800682 return frame;
683}
684
xuesong.jiange24aef92023-06-16 06:39:10 +0000685static GstVideoCodecFrame *
686gst_aml_v4l2_video_dec_get_right_frame_for_stream_mode(GstVideoDecoder *decoder, GstClockTime pts)
687{
688 GstVideoCodecFrame *frame = NULL;
689 GList *frames, *l;
690 gint count = 0;
691
692 GST_LOG_OBJECT (decoder, "trace in with pts: %" GST_TIME_FORMAT, GST_TIME_ARGS(pts));
693
694 frames = gst_video_decoder_get_frames(decoder);
695 guint frames_len = 0;
696 frames_len = g_list_length(frames);
697 GST_LOG_OBJECT (decoder, "got frames list len:%d", frames_len);
698
699 frame = frames->data;
700
701 for (l = frames; l != NULL; l = l->next)
702 {
703 GstVideoCodecFrame *f = l->data;
704
705 if (GST_CLOCK_TIME_IS_VALID(pts) && (ABSDIFF(f->pts, pts)) < 1000)
706 {
707 /* found the right frame */
708 frame = f;
709 break;
710 }
711 else if(GST_CLOCK_TIME_IS_VALID(pts) && (f->pts < pts))
712 {
713 GST_LOG_OBJECT(decoder,
714 "stream mode drop frame %d %" GST_TIME_FORMAT,
715 frame->system_frame_number, GST_TIME_ARGS(frame->pts));
716
717 gst_video_codec_frame_ref(f);
718 // gst_video_decoder_drop_frame(decoder, f);
719 gst_video_decoder_release_frame(decoder, f);
720 }
721 else
722 {
723 GST_LOG_OBJECT (decoder, "dbg");
724 }
725 }
726
727 if (frame)
728 {
729 guint l_len = 0;
730 l = gst_video_decoder_get_frames(decoder);
731 l_len = g_list_length(l);
732 g_list_free_full(l, (GDestroyNotify)gst_video_codec_frame_unref);
733
734 GST_LOG_OBJECT(decoder,
735 "frame is %d %" GST_TIME_FORMAT " and %d frames left",
736 frame->system_frame_number, GST_TIME_ARGS(frame->pts), l_len);
737 gst_video_codec_frame_ref(frame);
738 }
739
740 g_list_free_full(frames, (GDestroyNotify)gst_video_codec_frame_unref);
741
742 GST_LOG_OBJECT (decoder, "trace out ret:%p", frame);
743 return frame;
744}
745
746static GstVideoCodecFrame *
747gst_aml_v4l2_video_dec_get_right_frame(GstVideoDecoder *decoder, GstClockTime pts)
748{
749 GstAmlV4l2VideoDec *self = (GstAmlV4l2VideoDec *)decoder;
750 if (self->v4l2output->stream_mode)
751 return gst_aml_v4l2_video_dec_get_right_frame_for_stream_mode(decoder, pts);
752 else
753 return gst_aml_v4l2_video_dec_get_right_frame_for_frame_mode(decoder, pts);
754}
755
xuesong.jiangae1548e2022-05-06 16:38:46 +0800756static gboolean
757gst_aml_v4l2_video_remove_padding(GstCapsFeatures *features,
758 GstStructure *structure, gpointer user_data)
759{
760 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(user_data);
761 GstVideoAlignment *align = &self->v4l2capture->align;
762 GstVideoInfo *info = &self->v4l2capture->info;
763 int width, height;
764
765 if (!gst_structure_get_int(structure, "width", &width))
766 return TRUE;
767
768 if (!gst_structure_get_int(structure, "height", &height))
769 return TRUE;
770
771 if (align->padding_left != 0 || align->padding_top != 0 ||
772 height != info->height + align->padding_bottom)
773 return TRUE;
774
775 if (height == info->height + align->padding_bottom)
776 {
777 /* Some drivers may round up width to the padded with */
778 if (width == info->width + align->padding_right)
779 gst_structure_set(structure,
780 "width", G_TYPE_INT, width - align->padding_right,
781 "height", G_TYPE_INT, height - align->padding_bottom, NULL);
782 /* Some drivers may keep visible width and only round up bytesperline */
783 else if (width == info->width)
784 gst_structure_set(structure,
785 "height", G_TYPE_INT, height - align->padding_bottom, NULL);
786 }
787
788 return TRUE;
789}
790
791static void
sheng.liubcf036c2022-06-21 15:55:42 +0800792gst_v4l2_drop_event (GstAmlV4l2Object * v4l2object)
sheng.liub56bbc52022-06-21 11:02:33 +0800793{
794 struct v4l2_event evt;
795 gint ret;
796
797 memset (&evt, 0x00, sizeof (struct v4l2_event));
798 ret = v4l2object->ioctl (v4l2object->video_fd, VIDIOC_DQEVENT, &evt);
799 if (ret < 0)
800 {
801 GST_DEBUG_OBJECT (v4l2object, "dqevent failed");
802 return;
803 }
804
805 switch (evt.type)
806 {
807 case V4L2_EVENT_SOURCE_CHANGE:
808 GST_DEBUG_OBJECT (v4l2object, "Drop GST_V4L2_FLOW_SOURCE_CHANGE");
809 break;
810 case V4L2_EVENT_EOS:
811 GST_DEBUG_OBJECT (v4l2object, "Drop GST_V4L2_FLOW_LAST_BUFFER");
812 break;
813 default:
814 break;
815 }
816
817 return;
818}
819
820static void
xuesong.jiangae1548e2022-05-06 16:38:46 +0800821gst_aml_v4l2_video_dec_loop(GstVideoDecoder *decoder)
822{
823 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
824 GstAmlV4l2BufferPool *v4l2_pool;
825 GstAmlV4l2Error error = GST_AML_V4L2_ERROR_INIT;
826 GstBufferPool *pool;
827 GstVideoCodecFrame *frame;
828 GstBuffer *buffer = NULL;
829 GstFlowReturn ret;
830
831 if (G_UNLIKELY(!GST_AML_V4L2_IS_ACTIVE(self->v4l2capture)))
832 {
833 GstVideoInfo info;
834 GstVideoCodecState *output_state;
xuesong.jiang282ca572023-05-05 09:03:32 +0000835 GstCaps *acquired_caps, *available_caps, *caps, *filter;
xuesong.jiangae1548e2022-05-06 16:38:46 +0800836 GstStructure *st;
837
838 GST_DEBUG_OBJECT(self, "waitting source change event");
839 /* Wait until received SOURCE_CHANGE event to get right video format */
840 while (self->v4l2capture->can_wait_event && self->v4l2capture->need_wait_event)
841 {
842 ret = gst_aml_v4l2_object_dqevent(self->v4l2capture);
843 if (ret == GST_AML_V4L2_FLOW_SOURCE_CHANGE)
844 {
845 GST_DEBUG_OBJECT(self, "Received source change event");
846 break;
847 }
848 else if (ret == GST_AML_V4L2_FLOW_LAST_BUFFER)
849 {
850 GST_DEBUG_OBJECT(self, "Received eos event");
851 goto beach;
852 }
853 else if (ret != GST_FLOW_OK)
854 {
855 GST_ERROR_OBJECT(self, "dqevent error");
856 goto beach;
857 }
858 }
859 self->v4l2capture->need_wait_event = FALSE;
860
sheng.liu0c77f6c2022-06-17 21:33:20 +0800861 if (TRUE == self->v4l2output->is_svp)
862 {
863 GstPad *peer;
864 GstStructure *s;
865 GstEvent *event;
866
867 peer = gst_pad_get_peer (decoder->srcpad);
868 if (peer)
869 {
870 s = gst_structure_new_empty ("IS_SVP");
871 event = gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM, s);
872 gst_pad_send_event (peer, event);
873 GST_DEBUG_OBJECT(self, "Send SVP Event");
874 gst_object_unref (peer);
875 }
876 }
877
sheng.liub56bbc52022-06-21 11:02:33 +0800878 if (self->v4l2capture->need_drop_event)
879 {
880 // drop V4L2_EVENT_SOURCE_CHANGE
881 gst_v4l2_drop_event(self->v4l2capture);
882 self->v4l2capture->need_drop_event = FALSE;
883 }
884
xuesong.jiangae1548e2022-05-06 16:38:46 +0800885 if (!gst_aml_v4l2_object_acquire_format(self->v4l2capture, &info))
886 goto not_negotiated;
887
888 /* Create caps from the acquired format, remove the format field */
889 acquired_caps = gst_video_info_to_caps(&info);
890 GST_DEBUG_OBJECT(self, "Acquired caps: %" GST_PTR_FORMAT, acquired_caps);
891 st = gst_caps_get_structure(acquired_caps, 0);
xuesong.jiang282ca572023-05-05 09:03:32 +0000892 gst_structure_remove_fields(st, "format", "colorimetry", "chroma-site", NULL);
893
894 /* Probe currently available pixel formats */
895 available_caps = gst_caps_copy(self->probed_srccaps);
896 GST_DEBUG_OBJECT(self, "Available caps: %" GST_PTR_FORMAT, available_caps);
897
898 /* Replace coded size with visible size, we want to negotiate visible size
899 * with downstream, not coded size. */
900 gst_caps_map_in_place(available_caps, gst_aml_v4l2_video_remove_padding, self);
901
902 filter = gst_caps_intersect_full(available_caps, acquired_caps, GST_CAPS_INTERSECT_FIRST);
xuesong.jiangae1548e2022-05-06 16:38:46 +0800903 caps = gst_caps_copy(filter);
904 gst_caps_set_features_simple(caps, gst_caps_features_from_string(GST_CAPS_FEATURE_MEMORY_DMABUF));
905 gst_caps_append(filter, caps);
906
907 GST_DEBUG_OBJECT(self, "Filtered caps: %" GST_PTR_FORMAT, filter);
908 gst_caps_unref(acquired_caps);
xuesong.jiang282ca572023-05-05 09:03:32 +0000909 gst_caps_unref(available_caps);
xuesong.jiangae1548e2022-05-06 16:38:46 +0800910 caps = gst_pad_peer_query_caps(decoder->srcpad, filter);
911 gst_caps_unref(filter);
912
913 GST_DEBUG_OBJECT(self, "Possible decoded caps: %" GST_PTR_FORMAT, caps);
914 if (gst_caps_is_empty(caps))
915 {
916 gst_caps_unref(caps);
917 goto not_negotiated;
918 }
919
920 /* Fixate pixel format */
921 caps = gst_caps_fixate(caps);
922
923 GST_DEBUG_OBJECT(self, "Chosen decoded caps: %" GST_PTR_FORMAT, caps);
924
925 /* Try to set negotiated format, on success replace acquired format */
926 if (gst_aml_v4l2_object_set_format(self->v4l2capture, caps, &error))
927 gst_video_info_from_caps(&info, caps);
928 else
929 gst_aml_v4l2_clear_error(&error);
930 gst_caps_unref(caps);
931
932 output_state = gst_video_decoder_set_output_state(decoder,
933 info.finfo->format, info.width, info.height, self->input_state);
934
935 /* Copy the rest of the information, there might be more in the future */
936 output_state->info.interlace_mode = info.interlace_mode;
937 gst_video_codec_state_unref(output_state);
938
939 if (!gst_video_decoder_negotiate(decoder))
940 {
941 if (GST_PAD_IS_FLUSHING(decoder->srcpad))
942 goto flushing;
943 else
944 goto not_negotiated;
945 }
946
947 /* Ensure our internal pool is activated */
948 if (!gst_buffer_pool_set_active(GST_BUFFER_POOL(self->v4l2capture->pool),
949 TRUE))
950 goto activate_failed;
xuesong.jiangc5dac0f2023-02-01 14:42:24 +0800951
952 g_mutex_lock(&self->res_chg_lock);
953 GST_LOG_OBJECT(decoder, "signal resolution changed");
954 self->is_res_chg = FALSE;
955 g_cond_signal(&self->res_chg_cond);
956 g_mutex_unlock(&self->res_chg_lock);
xuesong.jiangae1548e2022-05-06 16:38:46 +0800957 }
958
959 GST_LOG_OBJECT(decoder, "Allocate output buffer");
960
961 v4l2_pool = GST_AML_V4L2_BUFFER_POOL(self->v4l2capture->pool);
962
963 self->output_flow = GST_FLOW_OK;
964 do
965 {
966 /* We cannot use the base class allotate helper since it taking the internal
967 * stream lock. we know that the acquire may need to poll until more frames
968 * comes in and holding this lock would prevent that.
969 */
970 pool = gst_video_decoder_get_buffer_pool(decoder);
971
972 /* Pool may be NULL if we started going to READY state */
973 if (pool == NULL)
974 {
fei.dengbee20862022-06-14 14:59:48 +0800975 GST_WARNING_OBJECT(decoder, "gst_video_decoder_get_buffer_pool goto beach");
xuesong.jiangae1548e2022-05-06 16:38:46 +0800976 ret = GST_FLOW_FLUSHING;
977 goto beach;
978 }
979
980 ret = gst_buffer_pool_acquire_buffer(pool, &buffer, NULL);
fei.dengccc89632022-07-15 19:10:17 +0800981 //calculate a new pts for interlace stream
982 if (ret == GST_FLOW_OK &&
983 self->v4l2capture->info.interlace_mode == GST_VIDEO_INTERLACE_MODE_INTERLEAVED)
984 {
985 //if buffer pts is valid, reduce 1/2 duration
986 if (GST_BUFFER_DURATION_IS_VALID(buffer))
987 {
988 GST_BUFFER_DURATION(buffer) = GST_BUFFER_DURATION(buffer)/2;
989 }
990 GST_BUFFER_FLAG_UNSET(buffer, GST_VIDEO_BUFFER_FLAG_INTERLACED);
991 //reset pts
fei.denga6ae3282022-07-15 19:50:30 +0800992 if (GST_BUFFER_TIMESTAMP (buffer) == 0LL || self->last_out_pts == GST_BUFFER_TIMESTAMP (buffer))
fei.dengccc89632022-07-15 19:10:17 +0800993 {
994 double rate = ((double)self->input_state->info.fps_n/(double)self->input_state->info.fps_d)*2;
995 GST_BUFFER_TIMESTAMP(buffer) = self->last_out_pts + 1000000000LL/rate;
996 }
997 }
998
xuesong.jiangae1548e2022-05-06 16:38:46 +0800999 g_object_unref(pool);
1000
sheng.liu4e01a472022-06-21 15:38:25 +08001001 if (ret == GST_AML_V4L2_FLOW_LAST_BUFFER) {
sheng.liubcf036c2022-06-21 15:55:42 +08001002 GST_LOG_OBJECT(decoder, "Get GST_AML_V4L2_FLOW_LAST_BUFFER");
sheng.liub56bbc52022-06-21 11:02:33 +08001003 self->v4l2capture->need_drop_event = TRUE;
1004 goto beach;
1005 }
1006
sheng.liu8d18ed22022-05-26 17:28:15 +08001007 if (ret == GST_AML_V4L2_FLOW_SOURCE_CHANGE)
1008 {
1009 GST_LOG_OBJECT(decoder, "Get GST_AML_V4L2_FLOW_SOURCE_CHANGE");
xuesong.jiangc5dac0f2023-02-01 14:42:24 +08001010
1011 g_mutex_lock (&self->res_chg_lock);
1012 self->is_res_chg = TRUE;
1013 g_mutex_unlock (&self->res_chg_lock);
1014
sheng.liu8d18ed22022-05-26 17:28:15 +08001015 gst_aml_v4l2_object_stop(self->v4l2capture);
1016 return;
1017 }
1018
fei.dengbee20862022-06-14 14:59:48 +08001019 if (ret != GST_FLOW_OK) {
1020 GST_WARNING_OBJECT(decoder, "gst_buffer_pool_acquire_buffer goto beach ret:%d",ret);
xuesong.jiangae1548e2022-05-06 16:38:46 +08001021 goto beach;
fei.dengbee20862022-06-14 14:59:48 +08001022 }
xuesong.jiangae1548e2022-05-06 16:38:46 +08001023
xuesong.jiangc5dac0f2023-02-01 14:42:24 +08001024 GST_LOG_OBJECT(decoder, "Process output buffer (switching flow outstanding num:%d)", self->v4l2capture->outstanding_buf_num);
xuesong.jiangae1548e2022-05-06 16:38:46 +08001025 ret = gst_aml_v4l2_buffer_pool_process(v4l2_pool, &buffer);
xuesong.jiang406ee302023-06-28 03:45:22 +00001026
1027 GST_DEBUG_OBJECT(decoder, "send pts:%lld - %" GST_TIME_FORMAT, GST_BUFFER_PTS(buffer), GST_TIME_ARGS(GST_BUFFER_PTS(buffer)));
1028 g_signal_emit (self, g_signals[SIGNAL_DECODED_PTS], 0, GST_BUFFER_PTS(buffer));
1029
xuesong.jiangae1548e2022-05-06 16:38:46 +08001030 if (ret == GST_AML_V4L2_FLOW_SOURCE_CHANGE)
1031 {
1032 gst_aml_v4l2_object_stop(self->v4l2capture);
1033 return;
1034 }
1035
1036 } while (ret == GST_AML_V4L2_FLOW_CORRUPTED_BUFFER);
1037
1038 if (ret != GST_FLOW_OK)
1039 goto beach;
1040
fei.dengbee20862022-06-14 14:59:48 +08001041 frame = gst_aml_v4l2_video_dec_get_right_frame(decoder, GST_BUFFER_TIMESTAMP (buffer));
xuesong.jiangae1548e2022-05-06 16:38:46 +08001042 if (frame)
1043 {
zengliang.li32cb11e2022-11-24 12:10:26 +08001044 if (!GST_CLOCK_TIME_IS_VALID(frame->pts))
1045 {
zengliang.li92ff6822023-06-06 07:12:52 +00001046 if (!GST_CLOCK_TIME_IS_VALID(self->last_out_pts))
1047 {
1048 if (GST_CLOCK_TIME_IS_VALID(frame->dts))
1049 {
1050 GST_BUFFER_TIMESTAMP(buffer) = frame->dts;
1051 }
1052 else
1053 {
1054 GST_WARNING_OBJECT (decoder,"sorry,we have no baseline to calculate pts");
1055 goto beach;
1056 }
1057 }
1058 else
1059 {
1060 double rate = ((double)self->input_state->info.fps_n/(double)self->input_state->info.fps_d);
1061 GST_BUFFER_TIMESTAMP(buffer) = self->last_out_pts + 1000000000LL/rate;
1062 }
zengliang.li32cb11e2022-11-24 12:10:26 +08001063 }
fei.dengccc89632022-07-15 19:10:17 +08001064 self->last_out_pts = GST_BUFFER_TIMESTAMP(buffer);
xuesong.jiangae1548e2022-05-06 16:38:46 +08001065 frame->output_buffer = buffer;
fei.dengccc89632022-07-15 19:10:17 +08001066 frame->pts = GST_BUFFER_TIMESTAMP(buffer);
1067 frame->duration = GST_BUFFER_DURATION(buffer);
xuesong.jiangae1548e2022-05-06 16:38:46 +08001068 buffer = NULL;
1069 ret = gst_video_decoder_finish_frame(decoder, frame);
1070
1071 if (ret != GST_FLOW_OK)
1072 goto beach;
1073 }
1074 else
1075 {
1076 GST_WARNING_OBJECT(decoder, "Decoder is producing too many buffers");
1077 gst_buffer_unref(buffer);
1078 }
1079
1080 return;
1081 /* ERRORS */
1082not_negotiated:
1083{
1084 GST_ERROR_OBJECT(self, "not negotiated");
1085 ret = GST_FLOW_NOT_NEGOTIATED;
1086 goto beach;
1087}
1088activate_failed:
1089{
1090 GST_ERROR_OBJECT(self, "Buffer pool activation failed");
1091 GST_ELEMENT_ERROR(self, RESOURCE, SETTINGS,
1092 (_("Failed to allocate required memory.")),
1093 ("Buffer pool activation failed"));
1094 ret = GST_FLOW_ERROR;
1095 goto beach;
1096}
1097flushing:
1098{
1099 ret = GST_FLOW_FLUSHING;
1100 goto beach;
1101}
1102beach:
1103 GST_DEBUG_OBJECT(decoder, "Leaving output thread: %s",
1104 gst_flow_get_name(ret));
1105
1106 gst_buffer_replace(&buffer, NULL);
1107 self->output_flow = ret;
1108 gst_aml_v4l2_object_unlock(self->v4l2output);
1109 gst_pad_pause_task(decoder->srcpad);
1110}
1111
1112static GstFlowReturn
1113gst_aml_v4l2_video_dec_handle_frame(GstVideoDecoder *decoder,
1114 GstVideoCodecFrame *frame)
1115{
1116 GstAmlV4l2Error error = GST_AML_V4L2_ERROR_INIT;
1117 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
1118 GstBufferPool *pool = GST_BUFFER_POOL(self->v4l2output->pool);
1119 GstFlowReturn ret = GST_FLOW_OK;
1120 gboolean processed = FALSE;
1121 GstBuffer *tmp;
1122 GstTaskState task_state;
1123 GstCaps *caps;
1124
1125 GST_DEBUG_OBJECT(self, "Handling frame %d", frame->system_frame_number);
1126
1127 if (G_UNLIKELY(!g_atomic_int_get(&self->active)))
1128 goto flushing;
1129
1130 if (G_UNLIKELY(!GST_AML_V4L2_IS_ACTIVE(self->v4l2output)))
1131 {
1132 if (!self->input_state)
1133 goto not_negotiated;
1134 if (!gst_aml_v4l2_object_set_format(self->v4l2output, self->input_state->caps,
1135 &error))
1136 goto not_negotiated;
1137 }
1138
1139 if (G_UNLIKELY(!GST_AML_V4L2_IS_ACTIVE(self->v4l2capture)))
1140 {
1141 GstBuffer *codec_data;
sheng.liu0c77f6c2022-06-17 21:33:20 +08001142 GstCapsFeatures *features = NULL;
1143
1144 features = gst_caps_get_features(self->input_state->caps, 0);
1145 if (features && gst_caps_features_contains(features, GST_CAPS_FEATURE_MEMORY_DMABUF))
1146 {
1147 GST_DEBUG_OBJECT(self, "Is SVP");
1148 self->v4l2output->is_svp = TRUE;
1149 }
xuesong.jiangae1548e2022-05-06 16:38:46 +08001150
1151 GST_DEBUG_OBJECT(self, "Sending header");
1152
1153 codec_data = self->input_state->codec_data;
1154
1155 /* We are running in byte-stream mode, so we don't know the headers, but
1156 * we need to send something, otherwise the decoder will refuse to
1157 * intialize.
1158 */
1159 if (codec_data)
1160 {
1161 gst_buffer_ref(codec_data);
1162 }
1163 else
1164 {
1165 codec_data = gst_buffer_ref(frame->input_buffer);
1166 processed = TRUE;
1167 }
1168
1169 /* Ensure input internal pool is active */
1170 if (!gst_buffer_pool_is_active(pool))
1171 {
1172 GstStructure *config = gst_buffer_pool_get_config(pool);
xuesong.jiangc5dac0f2023-02-01 14:42:24 +08001173 // guint min = MAX(self->v4l2output->min_buffers, GST_AML_V4L2_MIN_BUFFERS);
1174 // guint max = VIDEO_MAX_FRAME;
xuesong.jiangae1548e2022-05-06 16:38:46 +08001175 // gst_buffer_pool_config_set_params (config, self->input_state->caps,
1176 // self->v4l2output->info.size, min, max);
1177 gst_buffer_pool_config_set_params(config, self->input_state->caps, self->v4l2output->info.size, self->v4l2output->min_buffers, self->v4l2output->min_buffers);
1178
1179 /* There is no reason to refuse this config */
1180 if (!gst_buffer_pool_set_config(pool, config))
1181 goto activate_failed;
xuesong.jiangc5dac0f2023-02-01 14:42:24 +08001182 GST_DEBUG_OBJECT(self, "setting output pool config to %" GST_PTR_FORMAT, config);
xuesong.jiangae1548e2022-05-06 16:38:46 +08001183
1184 if (!gst_buffer_pool_set_active(pool, TRUE))
1185 goto activate_failed;
1186 }
1187
1188 GST_VIDEO_DECODER_STREAM_UNLOCK(decoder);
1189 ret =
1190 gst_aml_v4l2_buffer_pool_process(GST_AML_V4L2_BUFFER_POOL(self->v4l2output->pool), &codec_data);
1191 GST_VIDEO_DECODER_STREAM_LOCK(decoder);
1192
1193 gst_buffer_unref(codec_data);
1194
1195 /* For decoders G_FMT returns coded size, G_SELECTION returns visible size
1196 * in the compose rectangle. gst_aml_v4l2_object_acquire_format() checks both
1197 * and returns the visible size as with/height and the coded size as
1198 * padding. */
1199 }
1200
1201 task_state = gst_pad_get_task_state(GST_VIDEO_DECODER_SRC_PAD(self));
1202 if (task_state == GST_TASK_STOPPED || task_state == GST_TASK_PAUSED)
1203 {
1204 /* It's possible that the processing thread stopped due to an error */
1205 if (self->output_flow != GST_FLOW_OK &&
1206 self->output_flow != GST_FLOW_FLUSHING)
1207 {
1208 GST_DEBUG_OBJECT(self, "Processing loop stopped with error, leaving");
1209 ret = self->output_flow;
1210 goto drop;
1211 }
1212
1213 GST_DEBUG_OBJECT(self, "Starting decoding thread");
1214
1215 /* Start the processing task, when it quits, the task will disable input
1216 * processing to unlock input if draining, or prevent potential block */
1217 self->output_flow = GST_FLOW_FLUSHING;
1218 if (!gst_pad_start_task(decoder->srcpad,
1219 (GstTaskFunction)gst_aml_v4l2_video_dec_loop, self, NULL))
1220 goto start_task_failed;
1221 }
1222
1223 if (!processed)
1224 {
1225 GST_VIDEO_DECODER_STREAM_UNLOCK(decoder);
1226 ret =
1227 gst_aml_v4l2_buffer_pool_process(GST_AML_V4L2_BUFFER_POOL(self->v4l2output->pool), &frame->input_buffer);
1228 GST_VIDEO_DECODER_STREAM_LOCK(decoder);
1229
1230 if (ret == GST_FLOW_FLUSHING)
1231 {
1232 if (gst_pad_get_task_state(GST_VIDEO_DECODER_SRC_PAD(self)) !=
1233 GST_TASK_STARTED)
1234 ret = self->output_flow;
1235 goto drop;
1236 }
1237 else if (ret != GST_FLOW_OK)
1238 {
1239 goto process_failed;
1240 }
1241 }
1242
1243 /* No need to keep input arround */
1244 tmp = frame->input_buffer;
1245 frame->input_buffer = gst_buffer_new();
1246 gst_buffer_copy_into(frame->input_buffer, tmp,
1247 GST_BUFFER_COPY_FLAGS | GST_BUFFER_COPY_TIMESTAMPS |
1248 GST_BUFFER_COPY_META,
1249 0, 0);
1250 gst_buffer_unref(tmp);
1251
1252 gst_video_codec_frame_unref(frame);
1253 return ret;
1254
1255 /* ERRORS */
1256not_negotiated:
1257{
1258 GST_ERROR_OBJECT(self, "not negotiated");
1259 ret = GST_FLOW_NOT_NEGOTIATED;
1260 gst_aml_v4l2_error(self, &error);
1261 goto drop;
1262}
1263activate_failed:
1264{
1265 GST_ELEMENT_ERROR(self, RESOURCE, SETTINGS,
1266 (_("Failed to allocate required memory.")),
1267 ("Buffer pool activation failed"));
1268 ret = GST_FLOW_ERROR;
1269 goto drop;
1270}
1271flushing:
1272{
1273 ret = GST_FLOW_FLUSHING;
1274 goto drop;
1275}
1276
1277start_task_failed:
1278{
1279 GST_ELEMENT_ERROR(self, RESOURCE, FAILED,
1280 (_("Failed to start decoding thread.")), (NULL));
1281 ret = GST_FLOW_ERROR;
1282 goto drop;
1283}
1284process_failed:
1285{
1286 GST_ELEMENT_ERROR(self, RESOURCE, FAILED,
1287 (_("Failed to process frame.")),
1288 ("Maybe be due to not enough memory or failing driver"));
1289 ret = GST_FLOW_ERROR;
1290 goto drop;
1291}
1292drop:
1293{
1294 gst_video_decoder_drop_frame(decoder, frame);
1295 return ret;
1296}
1297}
1298
1299static gboolean
1300gst_aml_v4l2_video_dec_decide_allocation(GstVideoDecoder *decoder,
1301 GstQuery *query)
1302{
1303 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
1304 GstClockTime latency;
1305 gboolean ret = FALSE;
1306
1307 if (gst_aml_v4l2_object_decide_allocation(self->v4l2capture, query))
1308 ret = GST_VIDEO_DECODER_CLASS(parent_class)->decide_allocation(decoder, query);
1309
1310 if (GST_CLOCK_TIME_IS_VALID(self->v4l2capture->duration))
1311 {
1312 latency = self->v4l2capture->min_buffers * self->v4l2capture->duration;
1313 GST_DEBUG_OBJECT(self, "Setting latency: %" GST_TIME_FORMAT " (%" G_GUINT32_FORMAT " * %" G_GUINT64_FORMAT, GST_TIME_ARGS(latency),
1314 self->v4l2capture->min_buffers, self->v4l2capture->duration);
1315 gst_video_decoder_set_latency(decoder, latency, latency);
1316 }
1317 else
1318 {
1319 GST_WARNING_OBJECT(self, "Duration invalid, not setting latency");
1320 }
1321
1322 return ret;
1323}
1324
1325static gboolean
1326gst_aml_v4l2_video_dec_src_query(GstVideoDecoder *decoder, GstQuery *query)
1327{
1328 gboolean ret = TRUE;
1329 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
1330
1331 switch (GST_QUERY_TYPE(query))
1332 {
1333 case GST_QUERY_CAPS:
1334 {
1335 GstCaps *filter, *result = NULL;
1336 GstPad *pad = GST_VIDEO_DECODER_SRC_PAD(decoder);
1337
1338 gst_query_parse_caps(query, &filter);
1339
1340 if (self->probed_srccaps)
1341 result = gst_caps_ref(self->probed_srccaps);
1342 else
1343 result = gst_pad_get_pad_template_caps(pad);
1344
1345 if (filter)
1346 {
1347 GstCaps *tmp = result;
1348 result =
1349 gst_caps_intersect_full(filter, tmp, GST_CAPS_INTERSECT_FIRST);
1350 gst_caps_unref(tmp);
1351 }
1352
1353 GST_DEBUG_OBJECT(self, "Returning src caps %" GST_PTR_FORMAT, result);
1354
1355 gst_query_set_caps_result(query, result);
1356 gst_caps_unref(result);
1357 break;
1358 }
1359
1360 default:
1361 ret = GST_VIDEO_DECODER_CLASS(parent_class)->src_query(decoder, query);
1362 break;
1363 }
1364
1365 return ret;
1366}
1367
1368static GstCaps *
1369gst_aml_v4l2_video_dec_sink_getcaps(GstVideoDecoder *decoder, GstCaps *filter)
1370{
1371 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
1372 GstCaps *result;
1373
1374 result = gst_video_decoder_proxy_getcaps(decoder, self->probed_sinkcaps,
1375 filter);
1376
1377 GST_DEBUG_OBJECT(self, "Returning sink caps %" GST_PTR_FORMAT, result);
1378
1379 return result;
1380}
1381
1382static gboolean
1383gst_aml_v4l2_video_dec_sink_event(GstVideoDecoder *decoder, GstEvent *event)
1384{
1385 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
1386 gboolean ret;
1387 GstEventType type = GST_EVENT_TYPE(event);
1388
1389 switch (type)
1390 {
xuesong.jiang406ee302023-06-28 03:45:22 +00001391 case GST_EVENT_STREAM_START:
1392 {
1393 GstStructure *s;
1394 GstEvent *event;
1395 GST_DEBUG_OBJECT(self, "new private event");
1396 s = gst_structure_new("private_signal", "obj_ptr", G_TYPE_POINTER, self, "sig_name", G_TYPE_STRING, "decoded-pts", NULL);
1397 event = gst_event_new_custom (GST_EVENT_CUSTOM_UPSTREAM, s);
1398 GST_DEBUG_OBJECT(self, "before Send private_signal Event :%p", event);
1399 gst_pad_push_event (decoder->sinkpad, event);
1400 GST_DEBUG_OBJECT(self, "after Send private_signal Event :%p", event);
1401 break;
1402 }
xuesong.jiangae1548e2022-05-06 16:38:46 +08001403 case GST_EVENT_FLUSH_START:
1404 GST_DEBUG_OBJECT(self, "flush start");
xuesong.jiangc5dac0f2023-02-01 14:42:24 +08001405
1406 g_mutex_lock (&self->res_chg_lock);
1407 while (self->is_res_chg)
1408 {
1409 GST_LOG_OBJECT(decoder, "wait resolution change finish");
1410 g_cond_wait(&self->res_chg_cond, &self->res_chg_lock);
1411 }
1412 g_mutex_unlock (&self->res_chg_lock);
1413
zengliang.li92ff6822023-06-06 07:12:52 +00001414 self->last_out_pts = GST_CLOCK_TIME_NONE;
xuesong.jiangae1548e2022-05-06 16:38:46 +08001415 gst_aml_v4l2_object_unlock(self->v4l2output);
1416 gst_aml_v4l2_object_unlock(self->v4l2capture);
1417 break;
1418 default:
1419 break;
1420 }
1421
1422 ret = GST_VIDEO_DECODER_CLASS(parent_class)->sink_event(decoder, event);
1423
1424 switch (type)
1425 {
1426 case GST_EVENT_FLUSH_START:
1427 /* The processing thread should stop now, wait for it */
1428 gst_pad_stop_task(decoder->srcpad);
1429 GST_DEBUG_OBJECT(self, "flush start done");
1430 break;
1431 default:
1432 break;
1433 }
1434
1435 return ret;
1436}
1437
1438static GstStateChangeReturn
1439gst_aml_v4l2_video_dec_change_state(GstElement *element,
1440 GstStateChange transition)
1441{
1442 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(element);
1443 GstVideoDecoder *decoder = GST_VIDEO_DECODER(element);
1444
1445 if (transition == GST_STATE_CHANGE_PAUSED_TO_READY)
1446 {
1447 g_atomic_int_set(&self->active, FALSE);
1448 gst_aml_v4l2_object_unlock(self->v4l2output);
1449 gst_aml_v4l2_object_unlock(self->v4l2capture);
1450 gst_pad_stop_task(decoder->srcpad);
1451 }
1452
1453 return GST_ELEMENT_CLASS(parent_class)->change_state(element, transition);
1454}
1455
1456static void
1457gst_aml_v4l2_video_dec_dispose(GObject *object)
1458{
1459 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(object);
1460
1461 gst_caps_replace(&self->probed_sinkcaps, NULL);
1462 gst_caps_replace(&self->probed_srccaps, NULL);
1463
1464 G_OBJECT_CLASS(parent_class)->dispose(object);
1465}
1466
1467static void
1468gst_aml_v4l2_video_dec_finalize(GObject *object)
1469{
1470 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(object);
1471
1472 gst_aml_v4l2_object_destroy(self->v4l2capture);
1473 gst_aml_v4l2_object_destroy(self->v4l2output);
1474
xuesong.jiangc5dac0f2023-02-01 14:42:24 +08001475 g_mutex_clear(&self->res_chg_lock);
1476 g_cond_clear(&self->res_chg_cond);
1477
xuesong.jiang61ea8012022-05-12 15:38:17 +08001478#if GST_IMPORT_LGE_PROP
1479 if (self->lge_ctxt)
1480 {
1481 if (self->lge_ctxt->app_type)
1482 g_free(self->lge_ctxt->app_type);
1483 if (self->lge_ctxt->res_info.coretype)
1484 g_free(self->lge_ctxt->res_info.coretype);
1485 free(self->lge_ctxt);
1486 }
1487
1488#endif
1489
xuesong.jiangae1548e2022-05-06 16:38:46 +08001490 G_OBJECT_CLASS(parent_class)->finalize(object);
1491}
1492
1493static void
1494gst_aml_v4l2_video_dec_init(GstAmlV4l2VideoDec *self)
1495{
1496 /* V4L2 object are created in subinstance_init */
zengliang.li92ff6822023-06-06 07:12:52 +00001497 self->last_out_pts = GST_CLOCK_TIME_NONE;
xuesong.jiangae1548e2022-05-06 16:38:46 +08001498 self->is_secure_path = FALSE;
xuesong.jiangc5dac0f2023-02-01 14:42:24 +08001499 self->is_res_chg = FALSE;
1500 g_mutex_init(&self->res_chg_lock);
1501 g_cond_init(&self->res_chg_cond);
xuesong.jiang61ea8012022-05-12 15:38:17 +08001502#if GST_IMPORT_LGE_PROP
1503 self->lge_ctxt = malloc(sizeof(GstAmlV4l2VideoDecLgeCtxt));
1504 memset(self->lge_ctxt, 0, sizeof(GstAmlV4l2VideoDecLgeCtxt));
1505#endif
xuesong.jiangae1548e2022-05-06 16:38:46 +08001506}
1507
1508static void
1509gst_aml_v4l2_video_dec_subinstance_init(GTypeInstance *instance, gpointer g_class)
1510{
1511 GstAmlV4l2VideoDecClass *klass = GST_AML_V4L2_VIDEO_DEC_CLASS(g_class);
1512 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(instance);
1513 GstVideoDecoder *decoder = GST_VIDEO_DECODER(instance);
1514
1515 gst_video_decoder_set_packetized(decoder, TRUE);
1516
1517 self->v4l2output = gst_aml_v4l2_object_new(GST_ELEMENT(self),
1518 GST_OBJECT(GST_VIDEO_DECODER_SINK_PAD(self)),
1519 V4L2_BUF_TYPE_VIDEO_OUTPUT, klass->default_device,
1520 gst_aml_v4l2_get_output, gst_aml_v4l2_set_output, NULL);
1521 self->v4l2output->no_initial_format = TRUE;
1522 self->v4l2output->keep_aspect = FALSE;
sheng.liu0c77f6c2022-06-17 21:33:20 +08001523 self->v4l2output->is_svp = FALSE;
xuesong.jiangae1548e2022-05-06 16:38:46 +08001524
1525 self->v4l2capture = gst_aml_v4l2_object_new(GST_ELEMENT(self),
1526 GST_OBJECT(GST_VIDEO_DECODER_SRC_PAD(self)),
1527 V4L2_BUF_TYPE_VIDEO_CAPTURE, klass->default_device,
1528 gst_aml_v4l2_get_input, gst_aml_v4l2_set_input, NULL);
1529 self->v4l2capture->need_wait_event = TRUE;
sheng.liub56bbc52022-06-21 11:02:33 +08001530 self->v4l2capture->need_drop_event = FALSE;
xuesong.jiangae1548e2022-05-06 16:38:46 +08001531}
1532
1533static void
1534gst_aml_v4l2_video_dec_class_init(GstAmlV4l2VideoDecClass *klass)
1535{
1536 GstElementClass *element_class;
1537 GObjectClass *gobject_class;
1538 GstVideoDecoderClass *video_decoder_class;
1539
1540 parent_class = g_type_class_peek_parent(klass);
1541
1542 element_class = (GstElementClass *)klass;
1543 gobject_class = (GObjectClass *)klass;
1544 video_decoder_class = (GstVideoDecoderClass *)klass;
1545
1546 GST_DEBUG_CATEGORY_INIT(gst_aml_v4l2_video_dec_debug, "amlv4l2videodec", 0,
1547 "AML V4L2 Video Decoder");
1548
1549 gobject_class->dispose = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_dispose);
1550 gobject_class->finalize = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_finalize);
1551 gobject_class->set_property =
1552 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_set_property);
1553 gobject_class->get_property =
1554 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_get_property);
1555
1556 video_decoder_class->open = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_open);
1557 video_decoder_class->close = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_close);
1558 video_decoder_class->start = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_start);
1559 video_decoder_class->stop = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_stop);
1560 video_decoder_class->finish = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_finish);
1561 video_decoder_class->flush = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_flush);
1562 video_decoder_class->drain = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_drain);
1563 video_decoder_class->set_format =
1564 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_set_format);
1565 video_decoder_class->negotiate =
1566 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_negotiate);
1567 video_decoder_class->decide_allocation =
1568 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_decide_allocation);
1569 /* FIXME propose_allocation or not ? */
1570 video_decoder_class->handle_frame =
1571 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_handle_frame);
1572 video_decoder_class->getcaps =
1573 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_sink_getcaps);
1574 video_decoder_class->src_query =
1575 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_src_query);
1576 video_decoder_class->sink_event =
1577 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_sink_event);
1578
1579 element_class->change_state =
1580 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_change_state);
1581
xuesong.jiang406ee302023-06-28 03:45:22 +00001582 g_signals[SIGNAL_DECODED_PTS] = g_signal_new ("decoded-pts",
1583 G_TYPE_FROM_CLASS(GST_ELEMENT_CLASS(klass)),
1584 G_SIGNAL_RUN_LAST,
1585 0, /* class offset */
1586 NULL, /* accumulator */
1587 NULL, /* accu data */
1588 g_cclosure_marshal_generic,
1589 G_TYPE_NONE,
1590 1,
1591 G_TYPE_UINT64);
1592
xuesong.jiangae1548e2022-05-06 16:38:46 +08001593 gst_aml_v4l2_object_install_m2m_properties_helper(gobject_class);
xuesong.jiang61ea8012022-05-12 15:38:17 +08001594#if GST_IMPORT_LGE_PROP
1595 gst_aml_v4l2_video_dec_install_lge_properties_helper(gobject_class);
1596#endif
xuesong.jiangae1548e2022-05-06 16:38:46 +08001597}
1598
1599static void
1600gst_aml_v4l2_video_dec_subclass_init(gpointer g_class, gpointer data)
1601{
1602 GstAmlV4l2VideoDecClass *klass = GST_AML_V4L2_VIDEO_DEC_CLASS(g_class);
1603 GstElementClass *element_class = GST_ELEMENT_CLASS(g_class);
1604 GstAmlV4l2VideoDecCData *cdata = data;
1605
1606 klass->default_device = cdata->device;
1607
1608 /* Note: gst_pad_template_new() take the floating ref from the caps */
1609 gst_element_class_add_pad_template(element_class,
1610 gst_pad_template_new("sink", GST_PAD_SINK, GST_PAD_ALWAYS,
1611 cdata->sink_caps));
1612 gst_element_class_add_pad_template(element_class,
1613 gst_pad_template_new("src", GST_PAD_SRC, GST_PAD_ALWAYS,
1614 cdata->src_caps));
1615
1616 gst_element_class_set_metadata(element_class, cdata->longname,
1617 "Codec/Decoder/Video/Hardware", cdata->description,
1618 "Xuesong Jiang <Xuesong.Jiang@amlogic.com>");
1619
1620 gst_caps_unref(cdata->sink_caps);
1621 gst_caps_unref(cdata->src_caps);
1622 g_free(cdata);
1623}
1624
1625/* Probing functions */
1626gboolean
1627gst_aml_v4l2_is_video_dec(GstCaps *sink_caps, GstCaps *src_caps)
1628{
1629 gboolean ret = FALSE;
1630
1631 if (gst_caps_is_subset(sink_caps, gst_aml_v4l2_object_get_codec_caps()) && gst_caps_is_subset(src_caps, gst_aml_v4l2_object_get_raw_caps()))
1632 ret = TRUE;
1633
1634 return ret;
1635}
1636
1637static gchar *
1638gst_aml_v4l2_video_dec_set_metadata(GstStructure *s, GstAmlV4l2VideoDecCData *cdata,
1639 const gchar *basename)
1640{
1641 gchar *codec_name = NULL;
1642 gchar *type_name = NULL;
1643 gboolean got_value = FALSE;
1644
1645#define SET_META(codec) \
1646 G_STMT_START \
1647 { \
1648 cdata->longname = "AML V4L2 " codec " Decoder"; \
1649 cdata->description = "Decodes " codec " streams via V4L2 API"; \
1650 codec_name = g_ascii_strdown(codec, -1); \
1651 } \
1652 G_STMT_END
1653
1654 if (gst_structure_has_name(s, "image/jpeg"))
1655 {
1656 SET_META("JPEG");
1657 }
1658 else if (gst_structure_has_name(s, "video/mpeg"))
1659 {
1660 gint mpegversion = 0;
1661 gint *list = NULL;
1662 got_value = gst_structure_get_int(s, "mpegversion", &mpegversion);
1663 if (FALSE == got_value)
1664 {
1665 got_value = gst_structure_get_list(s, "mpegversion", &list);
1666 if (TRUE == got_value && (1 == *list || 2 == *list))
1667 {
1668 SET_META("MPEG2");
1669 }
1670 else
1671 {
1672 SET_META("MPEG4");
1673 }
1674 }
1675 else
1676 {
1677 SET_META("MPEG4");
1678 }
1679 }
1680 else if (gst_structure_has_name(s, "video/x-h263"))
1681 {
1682 SET_META("H263");
1683 }
1684 else if (gst_structure_has_name(s, "video/x-fwht"))
1685 {
1686 SET_META("FWHT");
1687 }
1688 else if (gst_structure_has_name(s, "video/x-h264"))
1689 {
1690 SET_META("H264");
1691 }
1692 else if (gst_structure_has_name(s, "video/x-h265"))
1693 {
1694 SET_META("H265");
1695 }
1696 else if (gst_structure_has_name(s, "video/x-wmv"))
1697 {
1698 SET_META("VC1");
1699 }
1700 else if (gst_structure_has_name(s, "video/x-vp8"))
1701 {
1702 SET_META("VP8");
1703 }
1704 else if (gst_structure_has_name(s, "video/x-vp9"))
1705 {
1706 SET_META("VP9");
1707 }
1708 else if (gst_structure_has_name(s, "video/x-av1"))
1709 {
1710 SET_META("AV1");
1711 }
1712 else if (gst_structure_has_name(s, "video/x-bayer"))
1713 {
1714 SET_META("BAYER");
1715 }
1716 else if (gst_structure_has_name(s, "video/x-sonix"))
1717 {
1718 SET_META("SONIX");
1719 }
1720 else if (gst_structure_has_name(s, "video/x-pwc1"))
1721 {
1722 SET_META("PWC1");
1723 }
1724 else if (gst_structure_has_name(s, "video/x-pwc2"))
1725 {
1726 SET_META("PWC2");
1727 }
1728 else
1729 {
1730 /* This code should be kept on sync with the exposed CODEC type of format
1731 * from gstamlv4l2object.c. This warning will only occure in case we forget
1732 * to also add a format here. */
1733 gchar *s_str = gst_structure_to_string(s);
1734 g_warning("Missing fixed name mapping for caps '%s', this is a GStreamer "
1735 "bug, please report at https://bugs.gnome.org",
1736 s_str);
1737 g_free(s_str);
1738 }
1739
1740 if (codec_name)
1741 {
1742 type_name = g_strdup_printf("amlv4l2%sdec", codec_name);
1743 if (g_type_from_name(type_name) != 0)
1744 {
1745 g_free(type_name);
1746 type_name = g_strdup_printf("amlv4l2%s%sdec", basename, codec_name);
1747 }
1748
1749 g_free(codec_name);
1750 }
1751
1752 return type_name;
1753#undef SET_META
1754}
1755
1756void gst_aml_v4l2_video_dec_register(GstPlugin *plugin, const gchar *basename,
1757 const gchar *device_path, GstCaps *sink_caps, GstCaps *src_caps)
1758{
1759 gint i;
1760
1761 for (i = 0; i < gst_caps_get_size(sink_caps); i++)
1762 {
1763 GstAmlV4l2VideoDecCData *cdata;
1764 GstStructure *s;
1765 GTypeQuery type_query;
1766 GTypeInfo type_info = {
1767 0,
1768 };
1769 GType type, subtype;
1770 gchar *type_name;
1771
1772 s = gst_caps_get_structure(sink_caps, i);
1773
1774 cdata = g_new0(GstAmlV4l2VideoDecCData, 1);
1775 cdata->device = g_strdup(device_path);
1776 cdata->sink_caps = gst_caps_new_empty();
1777 gst_caps_append_structure(cdata->sink_caps, gst_structure_copy(s));
1778 gst_caps_append_structure(cdata->sink_caps, gst_structure_copy(s));
1779 gst_caps_set_features(cdata->sink_caps, 0, gst_caps_features_from_string(GST_CAPS_FEATURE_MEMORY_DMABUF));
1780 cdata->src_caps = gst_caps_copy(src_caps);
1781 gst_caps_set_features_simple(cdata->src_caps, gst_caps_features_from_string(GST_CAPS_FEATURE_MEMORY_DMABUF));
1782 gst_caps_append(cdata->src_caps, gst_caps_copy(src_caps));
1783 type_name = gst_aml_v4l2_video_dec_set_metadata(s, cdata, basename);
1784
1785 /* Skip over if we hit an unmapped type */
1786 if (!type_name)
1787 {
1788 g_free(cdata);
1789 continue;
1790 }
1791
1792 type = gst_aml_v4l2_video_dec_get_type();
1793 g_type_query(type, &type_query);
1794 memset(&type_info, 0, sizeof(type_info));
1795 type_info.class_size = type_query.class_size;
1796 type_info.instance_size = type_query.instance_size;
1797 type_info.class_init = gst_aml_v4l2_video_dec_subclass_init;
1798 type_info.class_data = cdata;
1799 type_info.instance_init = gst_aml_v4l2_video_dec_subinstance_init;
1800
1801 subtype = g_type_register_static(type, type_name, &type_info, 0);
1802 if (!gst_element_register(plugin, type_name, GST_RANK_PRIMARY + 1,
1803 subtype))
1804 GST_WARNING("Failed to register plugin '%s'", type_name);
1805
1806 g_free(type_name);
1807 }
1808}
xuesong.jiang61ea8012022-05-12 15:38:17 +08001809
1810#if GST_IMPORT_LGE_PROP
1811static void gst_aml_v4l2_video_dec_install_lge_properties_helper(GObjectClass *gobject_class)
1812{
1813 g_object_class_install_property(gobject_class, LGE_RESOURCE_INFO,
1814 g_param_spec_object("resource-info", "resource-info",
1815 "After acquisition of H/W resources is completed, allocated resource information must be delivered to the decoder and the sink",
1816 GST_TYPE_STRUCTURE,
1817 G_PARAM_READABLE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
1818
1819 g_object_class_install_property(gobject_class, LGE_DECODE_SIZE,
1820 g_param_spec_uint64("decoded-size", "decoded-size",
1821 "The total amount of decoder element's decoded video es after constructing pipeline or flushing pipeline update unit is byte.",
1822 0, G_MAXUINT64,
1823 0, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
1824
1825 g_object_class_install_property(gobject_class, LGE_UNDECODE_SIZE,
1826 g_param_spec_uint64("undecoded-size", "undecoded-size",
1827 "video decoder element's total undecoded data update unit is byte.",
1828 0, G_MAXUINT64,
1829 0, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
1830
1831 g_object_class_install_property(gobject_class, LGE_APP_TYPE,
1832 g_param_spec_string("app-type", "app-type",
1833 "set application type.",
1834 "default_app",
1835 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
1836
1837 g_object_class_install_property(gobject_class, LGE_CLIP_MODE,
1838 g_param_spec_boolean("clip-mode", "clip-mode",
1839 "When seeking, Content is moving faster for a while to skip frames.",
1840 FALSE,
1841 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
1842}
1843#endif