blob: 4a275b483836e36e749457703beed6fb5a7c908f [file] [log] [blame]
xuesong.jiangae1548e2022-05-06 16:38:46 +08001/* GStreamer
2 * Copyright (C) 2022 <xuesong.jiang@amlogic.com>
3 *
4 * This library is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Library General Public
6 * License as published by the Free Software Foundation; either
7 * version 2 of the License, or (at your option) any later version.
8 *
9 * This library is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Library General Public License for more details.
13 *
14 * You should have received a copy of the GNU Library General Public
15 * License along with this library; if not, write to the
16 * Free Software Foundation, Inc., 51 Franklin Street, Suite 500,
17 * Boston, MA 02110-1335, USA.
18 */
19
20#ifdef HAVE_CONFIG_H
21#include "config.h"
22#endif
23
24#include <sys/stat.h>
25#include <fcntl.h>
26#include <errno.h>
27#include <unistd.h>
28#include <string.h>
29
30#include "gstamlv4l2object.h"
31#include "gstamlv4l2videodec.h"
32
33#include <string.h>
34#include <gst/gst-i18n-plugin.h>
35#include <gst/allocators/gstdmabuf.h>
36
37GST_DEBUG_CATEGORY_STATIC(gst_aml_v4l2_video_dec_debug);
38#define GST_CAT_DEFAULT gst_aml_v4l2_video_dec_debug
39
40#ifdef GST_VIDEO_DECODER_STREAM_LOCK
41#undef GST_VIDEO_DECODER_STREAM_LOCK
42#define GST_VIDEO_DECODER_STREAM_LOCK(decoder) \
43 { \
fei.denge9458472023-04-18 02:05:48 +000044 GST_TRACE("aml v4l2 dec locking"); \
xuesong.jiangae1548e2022-05-06 16:38:46 +080045 g_rec_mutex_lock(&GST_VIDEO_DECODER(decoder)->stream_lock); \
fei.denge9458472023-04-18 02:05:48 +000046 GST_TRACE("aml v4l2 dec locked"); \
xuesong.jiangae1548e2022-05-06 16:38:46 +080047 }
48#endif
49
50#ifdef GST_VIDEO_DECODER_STREAM_UNLOCK
51#undef GST_VIDEO_DECODER_STREAM_UNLOCK
52#define GST_VIDEO_DECODER_STREAM_UNLOCK(decoder) \
53 { \
fei.denge9458472023-04-18 02:05:48 +000054 GST_TRACE("aml v4l2 dec unlocking"); \
xuesong.jiangae1548e2022-05-06 16:38:46 +080055 g_rec_mutex_unlock(&GST_VIDEO_DECODER(decoder)->stream_lock); \
fei.denge9458472023-04-18 02:05:48 +000056 GST_TRACE("aml v4l2 dec unlocked"); \
xuesong.jiangae1548e2022-05-06 16:38:46 +080057 }
58#endif
xuesong.jiang61ea8012022-05-12 15:38:17 +080059
hanghang.luo36df2852022-08-24 15:02:27 +080060#ifndef ABSDIFF
61#define ABSDIFF(a,b) (((a) > (b)) ? ((a) - (b)) : ((b) - (a)))
62#endif
63
xuesong.jiang61ea8012022-05-12 15:38:17 +080064#if GST_IMPORT_LGE_PROP
65typedef struct _GstAmlResourceInfo
66{
67 gchar *coretype;
68 gint videoport;
69 gint audioport;
70 gint maxwidth;
71 gint maxheight;
72 gint mixerport;
73} GstAmlResourceInfo;
74
75struct _GstAmlV4l2VideoDecLgeCtxt
76{
77 GstAmlResourceInfo res_info;
78 guint64 dec_size;
79 guint64 undec_size;
80 gchar *app_type;
81 gboolean clip_mode;
82};
83#endif
84
xuesong.jiangae1548e2022-05-06 16:38:46 +080085typedef struct
86{
87 gchar *device;
88 GstCaps *sink_caps;
89 GstCaps *src_caps;
90 const gchar *longname;
91 const gchar *description;
92} GstAmlV4l2VideoDecCData;
93
94enum
95{
96 PROP_0,
xuesong.jiang61ea8012022-05-12 15:38:17 +080097 V4L2_STD_OBJECT_PROPS,
98#if GST_IMPORT_LGE_PROP
99 LGE_RESOURCE_INFO,
100 LGE_DECODE_SIZE,
101 LGE_UNDECODE_SIZE,
102 LGE_APP_TYPE,
103 LGE_CLIP_MODE
104#endif
xuesong.jiangae1548e2022-05-06 16:38:46 +0800105};
106
xuesong.jiang406ee302023-06-28 03:45:22 +0000107enum
108{
109 SIGNAL_DECODED_PTS,
110 MAX_SIGNAL
111};
112
113static guint g_signals[MAX_SIGNAL]= {0};
114
xuesong.jiangae1548e2022-05-06 16:38:46 +0800115#define gst_aml_v4l2_video_dec_parent_class parent_class
116G_DEFINE_ABSTRACT_TYPE(GstAmlV4l2VideoDec, gst_aml_v4l2_video_dec,
117 GST_TYPE_VIDEO_DECODER);
118
119static GstFlowReturn gst_aml_v4l2_video_dec_finish(GstVideoDecoder *decoder);
xuesong.jiang61ea8012022-05-12 15:38:17 +0800120#if GST_IMPORT_LGE_PROP
121static void gst_aml_v4l2_video_dec_install_lge_properties_helper(GObjectClass *gobject_class);
122#endif
xuesong.jiangae1548e2022-05-06 16:38:46 +0800123
124static void
125gst_aml_v4l2_video_dec_set_property(GObject *object,
126 guint prop_id, const GValue *value, GParamSpec *pspec)
127{
128 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(object);
129
130 switch (prop_id)
131 {
132 case PROP_CAPTURE_IO_MODE:
133 if (!gst_aml_v4l2_object_set_property_helper(self->v4l2capture,
134 prop_id, value, pspec))
135 {
136 G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
137 }
138 break;
139 case PROP_DUMP_FRAME_LOCATION:
140 if (!gst_aml_v4l2_object_set_property_helper(self->v4l2capture,
141 prop_id, value, pspec))
142 {
143 G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
144 }
145 break;
xuesong.jiang61ea8012022-05-12 15:38:17 +0800146#if GST_IMPORT_LGE_PROP
147 case LGE_RESOURCE_INFO:
148 {
149 GST_DEBUG_OBJECT(self, "LGE up layer set res info");
150 GstStructure *r_info = g_value_get_object(value);
151 if (r_info)
152 {
153 if (gst_structure_has_field(r_info, "coretype"))
154 {
155 if (self->lge_ctxt->res_info.coretype)
156 g_free(self->lge_ctxt->res_info.coretype);
157 self->lge_ctxt->res_info.coretype = g_strdup(gst_structure_get_string(r_info, "coretype"));
158 }
159 if (gst_structure_has_field(r_info, "videoport"))
160 gst_structure_get_int(r_info, "videoport", &(self->lge_ctxt->res_info.videoport));
161 if (gst_structure_has_field(r_info, "audioport"))
162 gst_structure_get_int(r_info, "audioport", &(self->lge_ctxt->res_info.audioport));
163 if (gst_structure_has_field(r_info, "maxwidth"))
164 gst_structure_get_int(r_info, "maxwidth", &(self->lge_ctxt->res_info.maxwidth));
165 if (gst_structure_has_field(r_info, "maxheight"))
166 gst_structure_get_int(r_info, "maxheight", &(self->lge_ctxt->res_info.maxheight));
167 if (gst_structure_has_field(r_info, "mixerport"))
168 gst_structure_get_int(r_info, "mixerport", &(self->lge_ctxt->res_info.mixerport));
169 }
170 break;
171 }
172 case LGE_APP_TYPE:
173 {
174 GST_DEBUG_OBJECT(self, "LGE up layer set app type");
175 if (self->lge_ctxt->app_type)
176 g_free(self->lge_ctxt->app_type);
177 self->lge_ctxt->app_type = g_strdup(g_value_get_string(value));
178 break;
179 }
180 case LGE_CLIP_MODE:
181 {
182 GST_DEBUG_OBJECT(self, "LGE up layer set clip mode");
183 self->lge_ctxt->clip_mode = g_strdup(g_value_get_boolean(value));
184 break;
185 }
186#endif
xuesong.jiangae1548e2022-05-06 16:38:46 +0800187 /* By default, only set on output */
188 default:
189 if (!gst_aml_v4l2_object_set_property_helper(self->v4l2output,
190 prop_id, value, pspec))
191 {
192 G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
193 }
194 break;
195 }
196}
197
198static void
199gst_aml_v4l2_video_dec_get_property(GObject *object,
200 guint prop_id, GValue *value, GParamSpec *pspec)
201{
202 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(object);
203
204 switch (prop_id)
205 {
206 case PROP_CAPTURE_IO_MODE:
207 if (!gst_aml_v4l2_object_get_property_helper(self->v4l2capture,
208 prop_id, value, pspec))
209 {
210 G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
211 }
212 break;
213
xuesong.jiang61ea8012022-05-12 15:38:17 +0800214#if GST_IMPORT_LGE_PROP
215 case LGE_DECODE_SIZE:
216 {
217 GST_DEBUG_OBJECT(self, "LGE up layer get dec size");
218 self->lge_ctxt->dec_size = -1;
219 g_value_set_int(value, self->lge_ctxt->dec_size);
220 break;
221 }
222 case LGE_UNDECODE_SIZE:
223 {
224 GST_DEBUG_OBJECT(self, "LGE up layer get undec size");
225 self->lge_ctxt->undec_size = -1;
226 g_value_set_int(value, self->lge_ctxt->undec_size);
227 break;
228 }
229#endif
230
xuesong.jiangae1548e2022-05-06 16:38:46 +0800231 /* By default read from output */
232 default:
233 if (!gst_aml_v4l2_object_get_property_helper(self->v4l2output,
234 prop_id, value, pspec))
235 {
236 G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
237 }
238 break;
239 }
240}
241
242static gboolean
243gst_aml_v4l2_video_dec_open(GstVideoDecoder *decoder)
244{
245 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
246 GstCaps *codec_caps;
247
248 GST_DEBUG_OBJECT(self, "Opening");
249
250 if (!gst_aml_v4l2_object_open(self->v4l2output))
251 goto failure;
252
253 if (!gst_aml_v4l2_object_open_shared(self->v4l2capture, self->v4l2output))
254 goto failure;
255
256 codec_caps = gst_pad_get_pad_template_caps(decoder->sinkpad);
257 self->probed_sinkcaps = gst_aml_v4l2_object_probe_caps(self->v4l2output,
258 codec_caps);
259 gst_caps_unref(codec_caps);
260
261 if (gst_caps_is_empty(self->probed_sinkcaps))
262 goto no_encoded_format;
263
264 return TRUE;
265
266no_encoded_format:
267 GST_ELEMENT_ERROR(self, RESOURCE, SETTINGS,
268 (_("Decoder on device %s has no supported input format"),
269 self->v4l2output->videodev),
270 (NULL));
271 goto failure;
272
273failure:
274 if (GST_AML_V4L2_IS_OPEN(self->v4l2output))
275 gst_aml_v4l2_object_close(self->v4l2output);
276
277 if (GST_AML_V4L2_IS_OPEN(self->v4l2capture))
278 gst_aml_v4l2_object_close(self->v4l2capture);
279
280 gst_caps_replace(&self->probed_srccaps, NULL);
281 gst_caps_replace(&self->probed_sinkcaps, NULL);
282
283 return FALSE;
284}
285
286static gboolean
287gst_aml_v4l2_video_dec_close(GstVideoDecoder *decoder)
288{
289 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
290
291 GST_DEBUG_OBJECT(self, "Closing");
292
293 gst_aml_v4l2_object_close(self->v4l2output);
294 gst_aml_v4l2_object_close(self->v4l2capture);
295 gst_caps_replace(&self->probed_srccaps, NULL);
296 gst_caps_replace(&self->probed_sinkcaps, NULL);
297
298 return TRUE;
299}
300
301static gboolean
302gst_aml_v4l2_video_dec_start(GstVideoDecoder *decoder)
303{
304 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
305
306 GST_DEBUG_OBJECT(self, "Starting");
307
308 gst_aml_v4l2_object_unlock(self->v4l2output);
309 g_atomic_int_set(&self->active, TRUE);
310 self->output_flow = GST_FLOW_OK;
311
312 return TRUE;
313}
314
315static gboolean
316gst_aml_v4l2_video_dec_stop(GstVideoDecoder *decoder)
317{
318 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
319
320 GST_DEBUG_OBJECT(self, "Stopping");
321
322 gst_aml_v4l2_object_unlock(self->v4l2output);
323 gst_aml_v4l2_object_unlock(self->v4l2capture);
324
325 /* Wait for capture thread to stop */
326 gst_pad_stop_task(decoder->srcpad);
327
328 GST_VIDEO_DECODER_STREAM_LOCK(decoder);
329 self->output_flow = GST_FLOW_OK;
330 GST_VIDEO_DECODER_STREAM_UNLOCK(decoder);
331
332 /* Should have been flushed already */
333 g_assert(g_atomic_int_get(&self->active) == FALSE);
334
335 gst_aml_v4l2_object_stop(self->v4l2output);
336 gst_aml_v4l2_object_stop(self->v4l2capture);
337
338 if (self->input_state)
339 {
340 gst_video_codec_state_unref(self->input_state);
341 self->input_state = NULL;
342 }
343
344 GST_DEBUG_OBJECT(self, "Stopped");
345
346 return TRUE;
347}
348
349static gboolean
350gst_aml_v4l2_video_dec_set_format(GstVideoDecoder *decoder,
351 GstVideoCodecState *state)
352{
353 GstAmlV4l2Error error = GST_AML_V4L2_ERROR_INIT;
354 gboolean ret = TRUE;
355 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
356 GstCaps *caps;
357
358 GST_DEBUG_OBJECT(self, "Setting format: %" GST_PTR_FORMAT, state->caps);
359 GstCapsFeatures *const features = gst_caps_get_features(state->caps, 0);
hanghang.luoc54208e2023-09-22 02:43:54 +0000360 GstStructure *s = gst_caps_get_structure(state->caps,0);
361 if (s && gst_structure_has_field(s,"format"))
362 {
363 if (!strcmp("XVID",gst_structure_get_string(s,"format")))
364 {
365 GST_DEBUG_OBJECT(self, "This is a DIVX file, cannot support");
366 ret = FALSE;
367 goto done;
368 }
369 }
xuesong.jiangae1548e2022-05-06 16:38:46 +0800370
371 if (gst_caps_features_contains(features, GST_CAPS_FEATURE_MEMORY_DMABUF))
372 self->v4l2output->req_mode = GST_V4L2_IO_DMABUF_IMPORT;
373
374 if (self->input_state)
375 {
376 if (gst_aml_v4l2_object_caps_equal(self->v4l2output, state->caps))
377 {
378 GST_DEBUG_OBJECT(self, "Compatible caps");
379 goto done;
380 }
381 gst_video_codec_state_unref(self->input_state);
382 self->input_state = NULL;
383
384 gst_aml_v4l2_video_dec_finish(decoder);
385 gst_aml_v4l2_object_stop(self->v4l2output);
386
387 /* The renegotiation flow don't blend with the base class flow. To properly
388 * stop the capture pool, if the buffers can't be orphaned, we need to
389 * reclaim our buffers, which will happend through the allocation query.
390 * The allocation query is triggered by gst_video_decoder_negotiate() which
391 * requires the output caps to be set, but we can't know this information
392 * as we rely on the decoder, which requires the capture queue to be
393 * stopped.
394 *
395 * To workaround this issue, we simply run an allocation query with the
396 * old negotiated caps in order to drain/reclaim our buffers. That breaks
397 * the complexity and should not have much impact in performance since the
398 * following allocation query will happen on a drained pipeline and won't
399 * block. */
400 if (self->v4l2capture->pool &&
401 !gst_aml_v4l2_buffer_pool_orphan(&self->v4l2capture->pool))
402 {
403 GstCaps *caps = gst_pad_get_current_caps(decoder->srcpad);
404 if (caps)
405 {
406 GstQuery *query = gst_query_new_allocation(caps, FALSE);
407 gst_pad_peer_query(decoder->srcpad, query);
408 gst_query_unref(query);
409 gst_caps_unref(caps);
410 }
411 }
412
413 gst_aml_v4l2_object_stop(self->v4l2capture);
414 self->output_flow = GST_FLOW_OK;
415 }
416
417 if ((ret = gst_aml_v4l2_set_drm_mode(self->v4l2output)) == FALSE)
418 {
419 GST_ERROR_OBJECT(self, "config output drm mode error");
420 goto done;
421 }
422
xuesong.jiang22a9b112023-05-24 09:01:59 +0000423 if ((ret = gst_aml_v4l2_set_stream_mode(self->v4l2output)) == FALSE)
424 {
425 GST_ERROR_OBJECT(self, "config output stream mode error");
426 goto done;
427 }
428
xuesong.jiangae1548e2022-05-06 16:38:46 +0800429 ret = gst_aml_v4l2_object_set_format(self->v4l2output, state->caps, &error);
430
431 gst_caps_replace(&self->probed_srccaps, NULL);
432 self->probed_srccaps = gst_aml_v4l2_object_probe_caps(self->v4l2capture,
433 gst_aml_v4l2_object_get_raw_caps());
434
435 if (gst_caps_is_empty(self->probed_srccaps))
436 goto no_raw_format;
437
438 caps = gst_caps_copy(self->probed_srccaps);
439 gst_caps_set_features_simple(caps, gst_caps_features_from_string(GST_CAPS_FEATURE_MEMORY_DMABUF));
440 gst_caps_append(self->probed_srccaps, caps);
441 if (ret)
442 self->input_state = gst_video_codec_state_ref(state);
443 else
444 gst_aml_v4l2_error(self, &error);
445
446done:
447 return ret;
448
449no_raw_format:
450 GST_ELEMENT_ERROR(self, RESOURCE, SETTINGS,
451 (_("Decoder on device %s has no supported output format"),
452 self->v4l2output->videodev),
453 (NULL));
454 return GST_FLOW_ERROR;
455}
456
457static gboolean
458gst_aml_v4l2_video_dec_flush(GstVideoDecoder *decoder)
459{
460 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
461
462 GST_DEBUG_OBJECT(self, "Flushed");
463
464 /* Ensure the processing thread has stopped for the reverse playback
465 * discount case */
466 if (gst_pad_get_task_state(decoder->srcpad) == GST_TASK_STARTED)
467 {
468 GST_VIDEO_DECODER_STREAM_UNLOCK(decoder);
469
470 gst_aml_v4l2_object_unlock(self->v4l2output);
471 gst_aml_v4l2_object_unlock(self->v4l2capture);
472 gst_pad_stop_task(decoder->srcpad);
473 GST_VIDEO_DECODER_STREAM_LOCK(decoder);
474 }
475
476 self->output_flow = GST_FLOW_OK;
477
478 gst_aml_v4l2_object_unlock_stop(self->v4l2output);
479 gst_aml_v4l2_object_unlock_stop(self->v4l2capture);
480
481 if (self->v4l2output->pool)
482 gst_aml_v4l2_buffer_pool_flush(self->v4l2output->pool);
483
484 /* gst_aml_v4l2_buffer_pool_flush() calls streamon the capture pool and must be
485 * called after gst_aml_v4l2_object_unlock_stop() stopped flushing the buffer
486 * pool. */
487 if (self->v4l2capture->pool)
488 gst_aml_v4l2_buffer_pool_flush(self->v4l2capture->pool);
489
490 return TRUE;
491}
492
493static gboolean
494gst_aml_v4l2_video_dec_negotiate(GstVideoDecoder *decoder)
495{
496 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
497
xuesong.jiang681d3602022-06-24 21:23:35 +0800498 if (TRUE == self->v4l2output->is_svp)
499 {
500 GstStructure *s;
501 GstEvent *event;
502
503 s = gst_structure_new_empty ("IS_SVP");
504 event = gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM_STICKY, s);
505 GST_DEBUG_OBJECT(self, "before Send SVP Event :%p", event);
506 gst_pad_push_event (decoder->srcpad, event);
507 GST_DEBUG_OBJECT(self, "after Send SVP Event :%p", event);
508 }
509
xuesong.jiangae1548e2022-05-06 16:38:46 +0800510 /* We don't allow renegotiation without carefull disabling the pool */
511 if (self->v4l2capture->pool &&
512 gst_buffer_pool_is_active(GST_BUFFER_POOL(self->v4l2capture->pool)))
513 return TRUE;
514
515 return GST_VIDEO_DECODER_CLASS(parent_class)->negotiate(decoder);
516}
517
518static gboolean
519gst_aml_v4l2_decoder_cmd(GstAmlV4l2Object *v4l2object, guint cmd, guint flags)
520{
521 struct v4l2_decoder_cmd dcmd = {
522 0,
523 };
524
525 GST_DEBUG_OBJECT(v4l2object->element,
526 "sending v4l2 decoder command %u with flags %u", cmd, flags);
527
528 if (!GST_AML_V4L2_IS_OPEN(v4l2object))
529 return FALSE;
530
531 dcmd.cmd = cmd;
532 dcmd.flags = flags;
533 if (v4l2object->ioctl(v4l2object->video_fd, VIDIOC_DECODER_CMD, &dcmd) < 0)
534 goto dcmd_failed;
535
536 return TRUE;
537
538dcmd_failed:
539 if (errno == ENOTTY)
540 {
541 GST_INFO_OBJECT(v4l2object->element,
542 "Failed to send decoder command %u with flags %u for '%s'. (%s)",
543 cmd, flags, v4l2object->videodev, g_strerror(errno));
544 }
545 else
546 {
547 GST_ERROR_OBJECT(v4l2object->element,
548 "Failed to send decoder command %u with flags %u for '%s'. (%s)",
549 cmd, flags, v4l2object->videodev, g_strerror(errno));
550 }
551 return FALSE;
552}
553
554static GstFlowReturn
555gst_aml_v4l2_video_dec_finish(GstVideoDecoder *decoder)
556{
557 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
558 GstFlowReturn ret = GST_FLOW_OK;
559 GstBuffer *buffer;
560
561 if (gst_pad_get_task_state(decoder->srcpad) != GST_TASK_STARTED)
562 goto done;
563
564 GST_DEBUG_OBJECT(self, "Finishing decoding");
565
566 GST_VIDEO_DECODER_STREAM_UNLOCK(decoder);
567
568 if (gst_aml_v4l2_decoder_cmd(self->v4l2output, V4L2_DEC_CMD_STOP, 0))
569 {
570 GstTask *task = decoder->srcpad->task;
571
572 /* If the decoder stop command succeeded, just wait until processing is
573 * finished */
574 GST_DEBUG_OBJECT(self, "Waiting for decoder stop");
575 GST_OBJECT_LOCK(task);
576 while (GST_TASK_STATE(task) == GST_TASK_STARTED)
577 GST_TASK_WAIT(task);
578 GST_OBJECT_UNLOCK(task);
579 ret = GST_FLOW_FLUSHING;
580 }
581 else
582 {
583 /* otherwise keep queuing empty buffers until the processing thread has
584 * stopped, _pool_process() will return FLUSHING when that happened */
585 while (ret == GST_FLOW_OK)
586 {
xuesong.jiangc5dac0f2023-02-01 14:42:24 +0800587 GST_DEBUG_OBJECT(self, "queue empty output buf");
xuesong.jiangae1548e2022-05-06 16:38:46 +0800588 buffer = gst_buffer_new();
589 ret =
590 gst_aml_v4l2_buffer_pool_process(GST_AML_V4L2_BUFFER_POOL(self->v4l2output->pool), &buffer);
591 gst_buffer_unref(buffer);
592 }
593 }
594
595 /* and ensure the processing thread has stopped in case another error
596 * occured. */
597 gst_aml_v4l2_object_unlock(self->v4l2capture);
598 gst_pad_stop_task(decoder->srcpad);
599 GST_VIDEO_DECODER_STREAM_LOCK(decoder);
600
601 if (ret == GST_FLOW_FLUSHING)
602 ret = self->output_flow;
603
604 GST_DEBUG_OBJECT(decoder, "Done draining buffers");
605
606 /* TODO Shall we cleanup any reffed frame to workaround broken decoders ? */
607
608done:
609 return ret;
610}
611
612static GstFlowReturn
613gst_aml_v4l2_video_dec_drain(GstVideoDecoder *decoder)
614{
615 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
616
617 GST_DEBUG_OBJECT(self, "Draining...");
618 gst_aml_v4l2_video_dec_finish(decoder);
619 gst_aml_v4l2_video_dec_flush(decoder);
620
621 return GST_FLOW_OK;
622}
623
624static GstVideoCodecFrame *
625gst_aml_v4l2_video_dec_get_oldest_frame(GstVideoDecoder *decoder)
626{
627 GstVideoCodecFrame *frame = NULL;
628 GList *frames, *l;
629 gint count = 0;
630
631 frames = gst_video_decoder_get_frames(decoder);
632
633 for (l = frames; l != NULL; l = l->next)
634 {
635 GstVideoCodecFrame *f = l->data;
636
637 if (!frame || (GST_CLOCK_TIME_IS_VALID(frame->pts) && GST_CLOCK_TIME_IS_VALID(f->pts) && (frame->pts > f->pts)))
638 frame = f;
639
640 count++;
641 }
642
643 if (frame)
644 {
645 GST_LOG_OBJECT(decoder,
646 "Oldest frame is %d %" GST_TIME_FORMAT " and %d frames left",
647 frame->system_frame_number, GST_TIME_ARGS(frame->pts), count - 1);
648 gst_video_codec_frame_ref(frame);
649 }
650
651 g_list_free_full(frames, (GDestroyNotify)gst_video_codec_frame_unref);
652
653 return frame;
654}
655
fei.dengbee20862022-06-14 14:59:48 +0800656static GstVideoCodecFrame *
xuesong.jiange24aef92023-06-16 06:39:10 +0000657gst_aml_v4l2_video_dec_get_right_frame_for_frame_mode(GstVideoDecoder *decoder, GstClockTime pts)
fei.dengbee20862022-06-14 14:59:48 +0800658{
659 GstVideoCodecFrame *frame = NULL;
660 GList *frames, *l;
661 gint count = 0;
662
xuesong.jiange24aef92023-06-16 06:39:10 +0000663 GST_LOG_OBJECT (decoder, "trace in with pts: %" GST_TIME_FORMAT, GST_TIME_ARGS(pts));
664
fei.dengbee20862022-06-14 14:59:48 +0800665 frames = gst_video_decoder_get_frames(decoder);
666
667 for (l = frames; l != NULL; l = l->next)
668 {
669 GstVideoCodecFrame *f = l->data;
fei.denge9458472023-04-18 02:05:48 +0000670
xuesong.jiangc5dac0f2023-02-01 14:42:24 +0800671 if (GST_CLOCK_TIME_IS_VALID(pts) && (ABSDIFF(f->pts,pts)) < 1000) {
fei.dengbee20862022-06-14 14:59:48 +0800672 frame = f;
fei.dengbee20862022-06-14 14:59:48 +0800673 }
fei.dengbee20862022-06-14 14:59:48 +0800674 count++;
675 }
676
zengliang.liddee2da2023-07-14 07:27:05 +0000677 if (!frame)
678 {
679 for (l = frames; l != NULL; l = l->next)
680 {
681 GstVideoCodecFrame *f = l->data;
682 if (!GST_CLOCK_TIME_IS_VALID(f->pts))
683 {
684 frame = f;
685 }
686 GST_DEBUG("The pts of the expected output frame is invalid");
687 }
688 }
689
fei.dengbee20862022-06-14 14:59:48 +0800690 if (frame)
691 {
692 GST_LOG_OBJECT(decoder,
693 "frame is %d %" GST_TIME_FORMAT " and %d frames left",
694 frame->system_frame_number, GST_TIME_ARGS(frame->pts), count - 1);
695 gst_video_codec_frame_ref(frame);
696 }
697
698 g_list_free_full(frames, (GDestroyNotify)gst_video_codec_frame_unref);
699
xuesong.jiange24aef92023-06-16 06:39:10 +0000700 GST_LOG_OBJECT (decoder, "trace out ret:%p", frame);
fei.dengbee20862022-06-14 14:59:48 +0800701 return frame;
702}
703
xuesong.jiange24aef92023-06-16 06:39:10 +0000704static GstVideoCodecFrame *
705gst_aml_v4l2_video_dec_get_right_frame_for_stream_mode(GstVideoDecoder *decoder, GstClockTime pts)
706{
707 GstVideoCodecFrame *frame = NULL;
708 GList *frames, *l;
709 gint count = 0;
710
711 GST_LOG_OBJECT (decoder, "trace in with pts: %" GST_TIME_FORMAT, GST_TIME_ARGS(pts));
712
713 frames = gst_video_decoder_get_frames(decoder);
714 guint frames_len = 0;
715 frames_len = g_list_length(frames);
716 GST_LOG_OBJECT (decoder, "got frames list len:%d", frames_len);
717
718 frame = frames->data;
719
720 for (l = frames; l != NULL; l = l->next)
721 {
722 GstVideoCodecFrame *f = l->data;
723
724 if (GST_CLOCK_TIME_IS_VALID(pts) && (ABSDIFF(f->pts, pts)) < 1000)
725 {
726 /* found the right frame */
727 frame = f;
728 break;
729 }
730 else if(GST_CLOCK_TIME_IS_VALID(pts) && (f->pts < pts))
731 {
732 GST_LOG_OBJECT(decoder,
733 "stream mode drop frame %d %" GST_TIME_FORMAT,
734 frame->system_frame_number, GST_TIME_ARGS(frame->pts));
735
736 gst_video_codec_frame_ref(f);
737 // gst_video_decoder_drop_frame(decoder, f);
738 gst_video_decoder_release_frame(decoder, f);
739 }
740 else
741 {
742 GST_LOG_OBJECT (decoder, "dbg");
743 }
744 }
745
746 if (frame)
747 {
748 guint l_len = 0;
749 l = gst_video_decoder_get_frames(decoder);
750 l_len = g_list_length(l);
751 g_list_free_full(l, (GDestroyNotify)gst_video_codec_frame_unref);
752
753 GST_LOG_OBJECT(decoder,
754 "frame is %d %" GST_TIME_FORMAT " and %d frames left",
755 frame->system_frame_number, GST_TIME_ARGS(frame->pts), l_len);
756 gst_video_codec_frame_ref(frame);
757 }
758
759 g_list_free_full(frames, (GDestroyNotify)gst_video_codec_frame_unref);
760
761 GST_LOG_OBJECT (decoder, "trace out ret:%p", frame);
762 return frame;
763}
764
765static GstVideoCodecFrame *
766gst_aml_v4l2_video_dec_get_right_frame(GstVideoDecoder *decoder, GstClockTime pts)
767{
768 GstAmlV4l2VideoDec *self = (GstAmlV4l2VideoDec *)decoder;
769 if (self->v4l2output->stream_mode)
770 return gst_aml_v4l2_video_dec_get_right_frame_for_stream_mode(decoder, pts);
771 else
772 return gst_aml_v4l2_video_dec_get_right_frame_for_frame_mode(decoder, pts);
773}
774
xuesong.jiangae1548e2022-05-06 16:38:46 +0800775static gboolean
776gst_aml_v4l2_video_remove_padding(GstCapsFeatures *features,
777 GstStructure *structure, gpointer user_data)
778{
779 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(user_data);
780 GstVideoAlignment *align = &self->v4l2capture->align;
781 GstVideoInfo *info = &self->v4l2capture->info;
782 int width, height;
783
784 if (!gst_structure_get_int(structure, "width", &width))
785 return TRUE;
786
787 if (!gst_structure_get_int(structure, "height", &height))
788 return TRUE;
789
790 if (align->padding_left != 0 || align->padding_top != 0 ||
791 height != info->height + align->padding_bottom)
792 return TRUE;
793
794 if (height == info->height + align->padding_bottom)
795 {
796 /* Some drivers may round up width to the padded with */
797 if (width == info->width + align->padding_right)
798 gst_structure_set(structure,
799 "width", G_TYPE_INT, width - align->padding_right,
800 "height", G_TYPE_INT, height - align->padding_bottom, NULL);
801 /* Some drivers may keep visible width and only round up bytesperline */
802 else if (width == info->width)
803 gst_structure_set(structure,
804 "height", G_TYPE_INT, height - align->padding_bottom, NULL);
805 }
806
807 return TRUE;
808}
809
810static void
sheng.liubcf036c2022-06-21 15:55:42 +0800811gst_v4l2_drop_event (GstAmlV4l2Object * v4l2object)
sheng.liub56bbc52022-06-21 11:02:33 +0800812{
813 struct v4l2_event evt;
814 gint ret;
815
816 memset (&evt, 0x00, sizeof (struct v4l2_event));
817 ret = v4l2object->ioctl (v4l2object->video_fd, VIDIOC_DQEVENT, &evt);
818 if (ret < 0)
819 {
820 GST_DEBUG_OBJECT (v4l2object, "dqevent failed");
821 return;
822 }
823
824 switch (evt.type)
825 {
826 case V4L2_EVENT_SOURCE_CHANGE:
827 GST_DEBUG_OBJECT (v4l2object, "Drop GST_V4L2_FLOW_SOURCE_CHANGE");
828 break;
829 case V4L2_EVENT_EOS:
830 GST_DEBUG_OBJECT (v4l2object, "Drop GST_V4L2_FLOW_LAST_BUFFER");
831 break;
832 default:
833 break;
834 }
835
836 return;
837}
838
839static void
hanghang.luo419c4a92023-07-14 07:36:07 +0000840gst_aml_v4l2_video_dec_set_fence(GstVideoDecoder *decoder)
hanghang.luo70f07ef2023-07-13 02:23:06 +0000841{
842 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
843 GstStructure *s;
844 GstEvent *event;
845
hanghang.luo419c4a92023-07-14 07:36:07 +0000846 guint fence_num = self->v4l2capture->min_buffers-2;
hanghang.luo70f07ef2023-07-13 02:23:06 +0000847 s = gst_structure_new ("video_fence","fence_num",G_TYPE_UINT,fence_num,NULL);
848 if (s)
849 {
850 event = gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM, s);
851 GST_DEBUG_OBJECT(self,"Send video_fence Event: %"GST_PTR_FORMAT,event);
852 gst_pad_push_event (decoder->srcpad, event);
853 }
854}
855
856static void
hanghang.luo2eec4892023-07-18 06:44:42 +0000857gst_aml_v4l2_video_dec_set_output_status(GstVideoDecoder *decoder,GstVideoInfo info)
858{
859 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
860 GstVideoCodecState *output_state;
hanghang.luo9b60a3c2023-08-01 16:01:47 +0000861 struct v4l2_selection sel;
862 struct v4l2_rect *r = NULL;
863 GstStructure *s;
864 gint width = 0;
865 gint height = 0;
hanghang.luo2eec4892023-07-18 06:44:42 +0000866 output_state = gst_video_decoder_set_output_state(decoder,
867 info.finfo->format, info.width, info.height, self->input_state);
hanghang.luo9b60a3c2023-08-01 16:01:47 +0000868 memset(&sel, 0, sizeof(struct v4l2_selection));
869 sel.type = self->v4l2capture->type;
870 sel.target = V4L2_SEL_TGT_COMPOSE_DEFAULT;
871 if (self->v4l2capture->ioctl(self->v4l2capture->video_fd, VIDIOC_G_SELECTION, &sel) >= 0)
872 {
873 r = &sel.r;
874 width = (r->width/2)*2;
875 height = (r->height/2)*2;
876 GST_DEBUG_OBJECT(self, "w:%d h:%d ",width,height);
877 }
878 else
879 GST_DEBUG_OBJECT(self, "iotcl error");
hanghang.luo2eec4892023-07-18 06:44:42 +0000880 if (output_state)
881 {
882 output_state->info.interlace_mode = info.interlace_mode;
883 output_state->allocation_caps =gst_video_info_to_caps(&info);
hanghang.luo2eec4892023-07-18 06:44:42 +0000884 output_state->caps =gst_video_info_to_caps(&info);
hanghang.luo9b60a3c2023-08-01 16:01:47 +0000885 s = gst_caps_get_structure(output_state->caps, 0);
886 if (s)
887 {
888 gst_structure_set(s,"coded_width",G_TYPE_INT,info.width,NULL);
889 gst_structure_set(s,"coded_height",G_TYPE_INT,info.height,NULL);
890 gst_structure_set(s,"width",G_TYPE_INT,width,NULL);
891 gst_structure_set(s,"height",G_TYPE_INT,height,NULL);
892 GST_DEBUG_OBJECT(self, "output_state->caps: %" GST_PTR_FORMAT, output_state->caps);
893 gst_video_codec_state_unref(output_state);
894 }
hanghang.luo2eec4892023-07-18 06:44:42 +0000895 }
896}
897
898static void
xuesong.jiangae1548e2022-05-06 16:38:46 +0800899gst_aml_v4l2_video_dec_loop(GstVideoDecoder *decoder)
900{
901 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
902 GstAmlV4l2BufferPool *v4l2_pool;
903 GstAmlV4l2Error error = GST_AML_V4L2_ERROR_INIT;
904 GstBufferPool *pool;
905 GstVideoCodecFrame *frame;
906 GstBuffer *buffer = NULL;
907 GstFlowReturn ret;
908
909 if (G_UNLIKELY(!GST_AML_V4L2_IS_ACTIVE(self->v4l2capture)))
910 {
911 GstVideoInfo info;
xuesong.jiang282ca572023-05-05 09:03:32 +0000912 GstCaps *acquired_caps, *available_caps, *caps, *filter;
xuesong.jiangae1548e2022-05-06 16:38:46 +0800913 GstStructure *st;
xuesong.jiangae1548e2022-05-06 16:38:46 +0800914 GST_DEBUG_OBJECT(self, "waitting source change event");
915 /* Wait until received SOURCE_CHANGE event to get right video format */
916 while (self->v4l2capture->can_wait_event && self->v4l2capture->need_wait_event)
917 {
918 ret = gst_aml_v4l2_object_dqevent(self->v4l2capture);
919 if (ret == GST_AML_V4L2_FLOW_SOURCE_CHANGE)
920 {
fei.deng2a06e042023-10-10 03:09:45 +0000921 //let flush start event blocked until capture buffer pool actived
922 self->is_res_chg = TRUE;
xuesong.jiangae1548e2022-05-06 16:38:46 +0800923 GST_DEBUG_OBJECT(self, "Received source change event");
924 break;
925 }
926 else if (ret == GST_AML_V4L2_FLOW_LAST_BUFFER)
927 {
928 GST_DEBUG_OBJECT(self, "Received eos event");
929 goto beach;
930 }
931 else if (ret != GST_FLOW_OK)
932 {
933 GST_ERROR_OBJECT(self, "dqevent error");
934 goto beach;
935 }
936 }
937 self->v4l2capture->need_wait_event = FALSE;
938
sheng.liu0c77f6c2022-06-17 21:33:20 +0800939 if (TRUE == self->v4l2output->is_svp)
940 {
941 GstPad *peer;
942 GstStructure *s;
943 GstEvent *event;
944
945 peer = gst_pad_get_peer (decoder->srcpad);
946 if (peer)
947 {
hanghang.luo70f07ef2023-07-13 02:23:06 +0000948 s = gst_structure_new_empty ("IS_SVP");
949 if (s)
950 {
951 event = gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM, s);
952 gst_pad_send_event (peer, event);
953 GST_DEBUG_OBJECT(self, "Send SVP Event");
954 }
955 gst_object_unref (peer);
sheng.liu0c77f6c2022-06-17 21:33:20 +0800956 }
957 }
958
sheng.liub56bbc52022-06-21 11:02:33 +0800959 if (self->v4l2capture->need_drop_event)
960 {
961 // drop V4L2_EVENT_SOURCE_CHANGE
962 gst_v4l2_drop_event(self->v4l2capture);
963 self->v4l2capture->need_drop_event = FALSE;
964 }
965
xuesong.jiangae1548e2022-05-06 16:38:46 +0800966 if (!gst_aml_v4l2_object_acquire_format(self->v4l2capture, &info))
967 goto not_negotiated;
hanghang.luo8ec04e92023-10-09 08:14:24 +0000968
xuesong.jiangae1548e2022-05-06 16:38:46 +0800969 /* Create caps from the acquired format, remove the format field */
970 acquired_caps = gst_video_info_to_caps(&info);
971 GST_DEBUG_OBJECT(self, "Acquired caps: %" GST_PTR_FORMAT, acquired_caps);
972 st = gst_caps_get_structure(acquired_caps, 0);
xuesong.jiang282ca572023-05-05 09:03:32 +0000973 gst_structure_remove_fields(st, "format", "colorimetry", "chroma-site", NULL);
974
975 /* Probe currently available pixel formats */
976 available_caps = gst_caps_copy(self->probed_srccaps);
977 GST_DEBUG_OBJECT(self, "Available caps: %" GST_PTR_FORMAT, available_caps);
978
979 /* Replace coded size with visible size, we want to negotiate visible size
980 * with downstream, not coded size. */
981 gst_caps_map_in_place(available_caps, gst_aml_v4l2_video_remove_padding, self);
982
983 filter = gst_caps_intersect_full(available_caps, acquired_caps, GST_CAPS_INTERSECT_FIRST);
xuesong.jiangae1548e2022-05-06 16:38:46 +0800984 caps = gst_caps_copy(filter);
985 gst_caps_set_features_simple(caps, gst_caps_features_from_string(GST_CAPS_FEATURE_MEMORY_DMABUF));
986 gst_caps_append(filter, caps);
987
988 GST_DEBUG_OBJECT(self, "Filtered caps: %" GST_PTR_FORMAT, filter);
989 gst_caps_unref(acquired_caps);
xuesong.jiang282ca572023-05-05 09:03:32 +0000990 gst_caps_unref(available_caps);
xuesong.jiangae1548e2022-05-06 16:38:46 +0800991 caps = gst_pad_peer_query_caps(decoder->srcpad, filter);
992 gst_caps_unref(filter);
993
994 GST_DEBUG_OBJECT(self, "Possible decoded caps: %" GST_PTR_FORMAT, caps);
995 if (gst_caps_is_empty(caps))
996 {
997 gst_caps_unref(caps);
998 goto not_negotiated;
999 }
1000
1001 /* Fixate pixel format */
1002 caps = gst_caps_fixate(caps);
1003
1004 GST_DEBUG_OBJECT(self, "Chosen decoded caps: %" GST_PTR_FORMAT, caps);
1005
1006 /* Try to set negotiated format, on success replace acquired format */
1007 if (gst_aml_v4l2_object_set_format(self->v4l2capture, caps, &error))
1008 gst_video_info_from_caps(&info, caps);
1009 else
1010 gst_aml_v4l2_clear_error(&error);
1011 gst_caps_unref(caps);
hanghang.luo419c4a92023-07-14 07:36:07 +00001012 gst_aml_v4l2_video_dec_set_fence(decoder);
hanghang.luo2eec4892023-07-18 06:44:42 +00001013 gst_aml_v4l2_video_dec_set_output_status(decoder,info);
xuesong.jiangae1548e2022-05-06 16:38:46 +08001014 if (!gst_video_decoder_negotiate(decoder))
1015 {
1016 if (GST_PAD_IS_FLUSHING(decoder->srcpad))
1017 goto flushing;
1018 else
1019 goto not_negotiated;
1020 }
1021
1022 /* Ensure our internal pool is activated */
1023 if (!gst_buffer_pool_set_active(GST_BUFFER_POOL(self->v4l2capture->pool),
1024 TRUE))
1025 goto activate_failed;
xuesong.jiangc5dac0f2023-02-01 14:42:24 +08001026
1027 g_mutex_lock(&self->res_chg_lock);
1028 GST_LOG_OBJECT(decoder, "signal resolution changed");
1029 self->is_res_chg = FALSE;
1030 g_cond_signal(&self->res_chg_cond);
1031 g_mutex_unlock(&self->res_chg_lock);
xuesong.jiangae1548e2022-05-06 16:38:46 +08001032 }
1033
1034 GST_LOG_OBJECT(decoder, "Allocate output buffer");
1035
1036 v4l2_pool = GST_AML_V4L2_BUFFER_POOL(self->v4l2capture->pool);
1037
1038 self->output_flow = GST_FLOW_OK;
1039 do
1040 {
1041 /* We cannot use the base class allotate helper since it taking the internal
1042 * stream lock. we know that the acquire may need to poll until more frames
1043 * comes in and holding this lock would prevent that.
1044 */
1045 pool = gst_video_decoder_get_buffer_pool(decoder);
1046
1047 /* Pool may be NULL if we started going to READY state */
1048 if (pool == NULL)
1049 {
fei.dengbee20862022-06-14 14:59:48 +08001050 GST_WARNING_OBJECT(decoder, "gst_video_decoder_get_buffer_pool goto beach");
xuesong.jiangae1548e2022-05-06 16:38:46 +08001051 ret = GST_FLOW_FLUSHING;
1052 goto beach;
1053 }
1054
1055 ret = gst_buffer_pool_acquire_buffer(pool, &buffer, NULL);
fei.dengccc89632022-07-15 19:10:17 +08001056 //calculate a new pts for interlace stream
hanghang.luo8ec04e92023-10-09 08:14:24 +00001057 if (ret == GST_FLOW_OK && self->v4l2capture->info.interlace_mode == GST_VIDEO_INTERLACE_MODE_INTERLEAVED)
fei.dengccc89632022-07-15 19:10:17 +08001058 {
1059 //if buffer pts is valid, reduce 1/2 duration
1060 if (GST_BUFFER_DURATION_IS_VALID(buffer))
1061 {
1062 GST_BUFFER_DURATION(buffer) = GST_BUFFER_DURATION(buffer)/2;
1063 }
1064 GST_BUFFER_FLAG_UNSET(buffer, GST_VIDEO_BUFFER_FLAG_INTERLACED);
1065 //reset pts
fei.denga6ae3282022-07-15 19:50:30 +08001066 if (GST_BUFFER_TIMESTAMP (buffer) == 0LL || self->last_out_pts == GST_BUFFER_TIMESTAMP (buffer))
fei.dengccc89632022-07-15 19:10:17 +08001067 {
1068 double rate = ((double)self->input_state->info.fps_n/(double)self->input_state->info.fps_d)*2;
1069 GST_BUFFER_TIMESTAMP(buffer) = self->last_out_pts + 1000000000LL/rate;
1070 }
1071 }
1072
xuesong.jiangae1548e2022-05-06 16:38:46 +08001073 g_object_unref(pool);
1074
fei.deng9a5cd6e2023-06-30 12:09:18 +00001075 if (ret == GST_FLOW_OK && GST_BUFFER_FLAG_IS_SET(buffer,GST_AML_V4L2_BUFFER_FLAG_LAST_EMPTY)) {
sheng.liubcf036c2022-06-21 15:55:42 +08001076 GST_LOG_OBJECT(decoder, "Get GST_AML_V4L2_FLOW_LAST_BUFFER");
sheng.liub56bbc52022-06-21 11:02:33 +08001077 self->v4l2capture->need_drop_event = TRUE;
fei.deng594df4b2023-06-26 07:03:29 +00001078 gst_aml_v4l2_buffer_pool_process(v4l2_pool, &buffer);
sheng.liub56bbc52022-06-21 11:02:33 +08001079 goto beach;
1080 }
1081
sheng.liu8d18ed22022-05-26 17:28:15 +08001082 if (ret == GST_AML_V4L2_FLOW_SOURCE_CHANGE)
1083 {
1084 GST_LOG_OBJECT(decoder, "Get GST_AML_V4L2_FLOW_SOURCE_CHANGE");
xuesong.jiangc5dac0f2023-02-01 14:42:24 +08001085
1086 g_mutex_lock (&self->res_chg_lock);
1087 self->is_res_chg = TRUE;
1088 g_mutex_unlock (&self->res_chg_lock);
1089
sheng.liu8d18ed22022-05-26 17:28:15 +08001090 gst_aml_v4l2_object_stop(self->v4l2capture);
1091 return;
1092 }
1093
fei.dengbee20862022-06-14 14:59:48 +08001094 if (ret != GST_FLOW_OK) {
1095 GST_WARNING_OBJECT(decoder, "gst_buffer_pool_acquire_buffer goto beach ret:%d",ret);
xuesong.jiangae1548e2022-05-06 16:38:46 +08001096 goto beach;
fei.dengbee20862022-06-14 14:59:48 +08001097 }
xuesong.jiangae1548e2022-05-06 16:38:46 +08001098
xuesong.jiangc5dac0f2023-02-01 14:42:24 +08001099 GST_LOG_OBJECT(decoder, "Process output buffer (switching flow outstanding num:%d)", self->v4l2capture->outstanding_buf_num);
xuesong.jiangae1548e2022-05-06 16:38:46 +08001100 ret = gst_aml_v4l2_buffer_pool_process(v4l2_pool, &buffer);
xuesong.jiang406ee302023-06-28 03:45:22 +00001101
1102 GST_DEBUG_OBJECT(decoder, "send pts:%lld - %" GST_TIME_FORMAT, GST_BUFFER_PTS(buffer), GST_TIME_ARGS(GST_BUFFER_PTS(buffer)));
1103 g_signal_emit (self, g_signals[SIGNAL_DECODED_PTS], 0, GST_BUFFER_PTS(buffer));
1104
xuesong.jiangae1548e2022-05-06 16:38:46 +08001105 if (ret == GST_AML_V4L2_FLOW_SOURCE_CHANGE)
1106 {
1107 gst_aml_v4l2_object_stop(self->v4l2capture);
1108 return;
1109 }
1110
1111 } while (ret == GST_AML_V4L2_FLOW_CORRUPTED_BUFFER);
1112
1113 if (ret != GST_FLOW_OK)
1114 goto beach;
1115
fei.dengbee20862022-06-14 14:59:48 +08001116 frame = gst_aml_v4l2_video_dec_get_right_frame(decoder, GST_BUFFER_TIMESTAMP (buffer));
xuesong.jiangae1548e2022-05-06 16:38:46 +08001117 if (frame)
1118 {
zengliang.li32cb11e2022-11-24 12:10:26 +08001119 if (!GST_CLOCK_TIME_IS_VALID(frame->pts))
1120 {
zengliang.li92ff6822023-06-06 07:12:52 +00001121 if (!GST_CLOCK_TIME_IS_VALID(self->last_out_pts))
1122 {
1123 if (GST_CLOCK_TIME_IS_VALID(frame->dts))
1124 {
1125 GST_BUFFER_TIMESTAMP(buffer) = frame->dts;
1126 }
1127 else
1128 {
1129 GST_WARNING_OBJECT (decoder,"sorry,we have no baseline to calculate pts");
1130 goto beach;
1131 }
1132 }
1133 else
1134 {
1135 double rate = ((double)self->input_state->info.fps_n/(double)self->input_state->info.fps_d);
1136 GST_BUFFER_TIMESTAMP(buffer) = self->last_out_pts + 1000000000LL/rate;
1137 }
zengliang.li32cb11e2022-11-24 12:10:26 +08001138 }
fei.dengccc89632022-07-15 19:10:17 +08001139 self->last_out_pts = GST_BUFFER_TIMESTAMP(buffer);
xuesong.jiangae1548e2022-05-06 16:38:46 +08001140 frame->output_buffer = buffer;
fei.dengccc89632022-07-15 19:10:17 +08001141 frame->pts = GST_BUFFER_TIMESTAMP(buffer);
1142 frame->duration = GST_BUFFER_DURATION(buffer);
xuesong.jiangae1548e2022-05-06 16:38:46 +08001143 buffer = NULL;
1144 ret = gst_video_decoder_finish_frame(decoder, frame);
1145
1146 if (ret != GST_FLOW_OK)
1147 goto beach;
1148 }
1149 else
1150 {
1151 GST_WARNING_OBJECT(decoder, "Decoder is producing too many buffers");
1152 gst_buffer_unref(buffer);
1153 }
1154
1155 return;
1156 /* ERRORS */
1157not_negotiated:
1158{
1159 GST_ERROR_OBJECT(self, "not negotiated");
1160 ret = GST_FLOW_NOT_NEGOTIATED;
1161 goto beach;
1162}
1163activate_failed:
1164{
1165 GST_ERROR_OBJECT(self, "Buffer pool activation failed");
1166 GST_ELEMENT_ERROR(self, RESOURCE, SETTINGS,
1167 (_("Failed to allocate required memory.")),
1168 ("Buffer pool activation failed"));
1169 ret = GST_FLOW_ERROR;
1170 goto beach;
1171}
1172flushing:
1173{
1174 ret = GST_FLOW_FLUSHING;
1175 goto beach;
1176}
1177beach:
1178 GST_DEBUG_OBJECT(decoder, "Leaving output thread: %s",
1179 gst_flow_get_name(ret));
fei.deng2a06e042023-10-10 03:09:45 +00001180 if (self->is_res_chg) {
1181 //unblock flush start event
1182 g_mutex_lock(&self->res_chg_lock);
1183 self->is_res_chg = FALSE;
1184 g_cond_signal(&self->res_chg_cond);
1185 g_mutex_unlock(&self->res_chg_lock);
1186 }
xuesong.jiangae1548e2022-05-06 16:38:46 +08001187 gst_buffer_replace(&buffer, NULL);
1188 self->output_flow = ret;
1189 gst_aml_v4l2_object_unlock(self->v4l2output);
1190 gst_pad_pause_task(decoder->srcpad);
1191}
1192
1193static GstFlowReturn
1194gst_aml_v4l2_video_dec_handle_frame(GstVideoDecoder *decoder,
1195 GstVideoCodecFrame *frame)
1196{
1197 GstAmlV4l2Error error = GST_AML_V4L2_ERROR_INIT;
1198 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
1199 GstBufferPool *pool = GST_BUFFER_POOL(self->v4l2output->pool);
1200 GstFlowReturn ret = GST_FLOW_OK;
1201 gboolean processed = FALSE;
1202 GstBuffer *tmp;
1203 GstTaskState task_state;
1204 GstCaps *caps;
1205
1206 GST_DEBUG_OBJECT(self, "Handling frame %d", frame->system_frame_number);
1207
1208 if (G_UNLIKELY(!g_atomic_int_get(&self->active)))
1209 goto flushing;
1210
1211 if (G_UNLIKELY(!GST_AML_V4L2_IS_ACTIVE(self->v4l2output)))
1212 {
1213 if (!self->input_state)
1214 goto not_negotiated;
1215 if (!gst_aml_v4l2_object_set_format(self->v4l2output, self->input_state->caps,
1216 &error))
1217 goto not_negotiated;
1218 }
1219
1220 if (G_UNLIKELY(!GST_AML_V4L2_IS_ACTIVE(self->v4l2capture)))
1221 {
1222 GstBuffer *codec_data;
sheng.liu0c77f6c2022-06-17 21:33:20 +08001223 GstCapsFeatures *features = NULL;
1224
1225 features = gst_caps_get_features(self->input_state->caps, 0);
1226 if (features && gst_caps_features_contains(features, GST_CAPS_FEATURE_MEMORY_DMABUF))
1227 {
1228 GST_DEBUG_OBJECT(self, "Is SVP");
1229 self->v4l2output->is_svp = TRUE;
1230 }
xuesong.jiangae1548e2022-05-06 16:38:46 +08001231
1232 GST_DEBUG_OBJECT(self, "Sending header");
1233
1234 codec_data = self->input_state->codec_data;
1235
1236 /* We are running in byte-stream mode, so we don't know the headers, but
1237 * we need to send something, otherwise the decoder will refuse to
1238 * intialize.
1239 */
1240 if (codec_data)
1241 {
1242 gst_buffer_ref(codec_data);
1243 }
1244 else
1245 {
1246 codec_data = gst_buffer_ref(frame->input_buffer);
1247 processed = TRUE;
1248 }
1249
1250 /* Ensure input internal pool is active */
1251 if (!gst_buffer_pool_is_active(pool))
1252 {
1253 GstStructure *config = gst_buffer_pool_get_config(pool);
xuesong.jiangc5dac0f2023-02-01 14:42:24 +08001254 // guint min = MAX(self->v4l2output->min_buffers, GST_AML_V4L2_MIN_BUFFERS);
1255 // guint max = VIDEO_MAX_FRAME;
xuesong.jiangae1548e2022-05-06 16:38:46 +08001256 // gst_buffer_pool_config_set_params (config, self->input_state->caps,
1257 // self->v4l2output->info.size, min, max);
1258 gst_buffer_pool_config_set_params(config, self->input_state->caps, self->v4l2output->info.size, self->v4l2output->min_buffers, self->v4l2output->min_buffers);
1259
1260 /* There is no reason to refuse this config */
1261 if (!gst_buffer_pool_set_config(pool, config))
1262 goto activate_failed;
xuesong.jiangc5dac0f2023-02-01 14:42:24 +08001263 GST_DEBUG_OBJECT(self, "setting output pool config to %" GST_PTR_FORMAT, config);
xuesong.jiangae1548e2022-05-06 16:38:46 +08001264
1265 if (!gst_buffer_pool_set_active(pool, TRUE))
1266 goto activate_failed;
1267 }
1268
1269 GST_VIDEO_DECODER_STREAM_UNLOCK(decoder);
1270 ret =
1271 gst_aml_v4l2_buffer_pool_process(GST_AML_V4L2_BUFFER_POOL(self->v4l2output->pool), &codec_data);
hanghang.luoc54208e2023-09-22 02:43:54 +00001272 self->codec_data_inject = TRUE;
xuesong.jiangae1548e2022-05-06 16:38:46 +08001273 GST_VIDEO_DECODER_STREAM_LOCK(decoder);
1274
1275 gst_buffer_unref(codec_data);
1276
1277 /* For decoders G_FMT returns coded size, G_SELECTION returns visible size
1278 * in the compose rectangle. gst_aml_v4l2_object_acquire_format() checks both
1279 * and returns the visible size as with/height and the coded size as
1280 * padding. */
1281 }
1282
1283 task_state = gst_pad_get_task_state(GST_VIDEO_DECODER_SRC_PAD(self));
1284 if (task_state == GST_TASK_STOPPED || task_state == GST_TASK_PAUSED)
1285 {
1286 /* It's possible that the processing thread stopped due to an error */
1287 if (self->output_flow != GST_FLOW_OK &&
1288 self->output_flow != GST_FLOW_FLUSHING)
1289 {
1290 GST_DEBUG_OBJECT(self, "Processing loop stopped with error, leaving");
1291 ret = self->output_flow;
1292 goto drop;
1293 }
1294
1295 GST_DEBUG_OBJECT(self, "Starting decoding thread");
1296
1297 /* Start the processing task, when it quits, the task will disable input
1298 * processing to unlock input if draining, or prevent potential block */
1299 self->output_flow = GST_FLOW_FLUSHING;
1300 if (!gst_pad_start_task(decoder->srcpad,
1301 (GstTaskFunction)gst_aml_v4l2_video_dec_loop, self, NULL))
1302 goto start_task_failed;
1303 }
1304
1305 if (!processed)
1306 {
1307 GST_VIDEO_DECODER_STREAM_UNLOCK(decoder);
hanghang.luoc54208e2023-09-22 02:43:54 +00001308 if (!self->codec_data_inject && self->input_state->codec_data)
1309 {
1310 ret = gst_aml_v4l2_buffer_pool_process
1311 (GST_AML_V4L2_BUFFER_POOL(self->v4l2output->pool), &self->input_state->codec_data);
1312 self->codec_data_inject = TRUE;
1313 if (ret != GST_FLOW_OK)
1314 goto send_codec_failed;
1315 }
xuesong.jiangae1548e2022-05-06 16:38:46 +08001316 ret =
1317 gst_aml_v4l2_buffer_pool_process(GST_AML_V4L2_BUFFER_POOL(self->v4l2output->pool), &frame->input_buffer);
1318 GST_VIDEO_DECODER_STREAM_LOCK(decoder);
1319
1320 if (ret == GST_FLOW_FLUSHING)
1321 {
1322 if (gst_pad_get_task_state(GST_VIDEO_DECODER_SRC_PAD(self)) !=
1323 GST_TASK_STARTED)
1324 ret = self->output_flow;
1325 goto drop;
1326 }
1327 else if (ret != GST_FLOW_OK)
1328 {
1329 goto process_failed;
1330 }
1331 }
1332
1333 /* No need to keep input arround */
1334 tmp = frame->input_buffer;
1335 frame->input_buffer = gst_buffer_new();
1336 gst_buffer_copy_into(frame->input_buffer, tmp,
1337 GST_BUFFER_COPY_FLAGS | GST_BUFFER_COPY_TIMESTAMPS |
1338 GST_BUFFER_COPY_META,
1339 0, 0);
1340 gst_buffer_unref(tmp);
1341
1342 gst_video_codec_frame_unref(frame);
1343 return ret;
1344
1345 /* ERRORS */
hanghang.luoc54208e2023-09-22 02:43:54 +00001346send_codec_failed:
1347 GST_ERROR_OBJECT(self, "send codec_date fialed.ret is %d",ret);
1348 goto drop;
xuesong.jiangae1548e2022-05-06 16:38:46 +08001349not_negotiated:
1350{
1351 GST_ERROR_OBJECT(self, "not negotiated");
1352 ret = GST_FLOW_NOT_NEGOTIATED;
1353 gst_aml_v4l2_error(self, &error);
1354 goto drop;
1355}
1356activate_failed:
1357{
1358 GST_ELEMENT_ERROR(self, RESOURCE, SETTINGS,
1359 (_("Failed to allocate required memory.")),
1360 ("Buffer pool activation failed"));
1361 ret = GST_FLOW_ERROR;
1362 goto drop;
1363}
1364flushing:
1365{
1366 ret = GST_FLOW_FLUSHING;
1367 goto drop;
1368}
1369
1370start_task_failed:
1371{
1372 GST_ELEMENT_ERROR(self, RESOURCE, FAILED,
1373 (_("Failed to start decoding thread.")), (NULL));
1374 ret = GST_FLOW_ERROR;
1375 goto drop;
1376}
1377process_failed:
1378{
1379 GST_ELEMENT_ERROR(self, RESOURCE, FAILED,
1380 (_("Failed to process frame.")),
1381 ("Maybe be due to not enough memory or failing driver"));
1382 ret = GST_FLOW_ERROR;
1383 goto drop;
1384}
1385drop:
1386{
1387 gst_video_decoder_drop_frame(decoder, frame);
1388 return ret;
1389}
1390}
1391
1392static gboolean
1393gst_aml_v4l2_video_dec_decide_allocation(GstVideoDecoder *decoder,
1394 GstQuery *query)
1395{
1396 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
1397 GstClockTime latency;
1398 gboolean ret = FALSE;
1399
1400 if (gst_aml_v4l2_object_decide_allocation(self->v4l2capture, query))
1401 ret = GST_VIDEO_DECODER_CLASS(parent_class)->decide_allocation(decoder, query);
1402
1403 if (GST_CLOCK_TIME_IS_VALID(self->v4l2capture->duration))
1404 {
1405 latency = self->v4l2capture->min_buffers * self->v4l2capture->duration;
1406 GST_DEBUG_OBJECT(self, "Setting latency: %" GST_TIME_FORMAT " (%" G_GUINT32_FORMAT " * %" G_GUINT64_FORMAT, GST_TIME_ARGS(latency),
1407 self->v4l2capture->min_buffers, self->v4l2capture->duration);
1408 gst_video_decoder_set_latency(decoder, latency, latency);
1409 }
1410 else
1411 {
1412 GST_WARNING_OBJECT(self, "Duration invalid, not setting latency");
1413 }
1414
1415 return ret;
1416}
1417
1418static gboolean
1419gst_aml_v4l2_video_dec_src_query(GstVideoDecoder *decoder, GstQuery *query)
1420{
1421 gboolean ret = TRUE;
1422 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
1423
1424 switch (GST_QUERY_TYPE(query))
1425 {
1426 case GST_QUERY_CAPS:
1427 {
1428 GstCaps *filter, *result = NULL;
1429 GstPad *pad = GST_VIDEO_DECODER_SRC_PAD(decoder);
1430
1431 gst_query_parse_caps(query, &filter);
1432
1433 if (self->probed_srccaps)
1434 result = gst_caps_ref(self->probed_srccaps);
1435 else
1436 result = gst_pad_get_pad_template_caps(pad);
1437
1438 if (filter)
1439 {
1440 GstCaps *tmp = result;
1441 result =
1442 gst_caps_intersect_full(filter, tmp, GST_CAPS_INTERSECT_FIRST);
1443 gst_caps_unref(tmp);
1444 }
1445
1446 GST_DEBUG_OBJECT(self, "Returning src caps %" GST_PTR_FORMAT, result);
1447
1448 gst_query_set_caps_result(query, result);
1449 gst_caps_unref(result);
1450 break;
1451 }
1452
1453 default:
1454 ret = GST_VIDEO_DECODER_CLASS(parent_class)->src_query(decoder, query);
1455 break;
1456 }
1457
1458 return ret;
1459}
1460
1461static GstCaps *
1462gst_aml_v4l2_video_dec_sink_getcaps(GstVideoDecoder *decoder, GstCaps *filter)
1463{
1464 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
1465 GstCaps *result;
1466
1467 result = gst_video_decoder_proxy_getcaps(decoder, self->probed_sinkcaps,
1468 filter);
1469
1470 GST_DEBUG_OBJECT(self, "Returning sink caps %" GST_PTR_FORMAT, result);
1471
1472 return result;
1473}
1474
1475static gboolean
1476gst_aml_v4l2_video_dec_sink_event(GstVideoDecoder *decoder, GstEvent *event)
1477{
1478 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
1479 gboolean ret;
1480 GstEventType type = GST_EVENT_TYPE(event);
1481
1482 switch (type)
1483 {
xuesong.jiang406ee302023-06-28 03:45:22 +00001484 case GST_EVENT_STREAM_START:
1485 {
1486 GstStructure *s;
1487 GstEvent *event;
1488 GST_DEBUG_OBJECT(self, "new private event");
1489 s = gst_structure_new("private_signal", "obj_ptr", G_TYPE_POINTER, self, "sig_name", G_TYPE_STRING, "decoded-pts", NULL);
1490 event = gst_event_new_custom (GST_EVENT_CUSTOM_UPSTREAM, s);
1491 GST_DEBUG_OBJECT(self, "before Send private_signal Event :%p", event);
1492 gst_pad_push_event (decoder->sinkpad, event);
1493 GST_DEBUG_OBJECT(self, "after Send private_signal Event :%p", event);
1494 break;
1495 }
xuesong.jiangae1548e2022-05-06 16:38:46 +08001496 case GST_EVENT_FLUSH_START:
1497 GST_DEBUG_OBJECT(self, "flush start");
xuesong.jiangc5dac0f2023-02-01 14:42:24 +08001498
1499 g_mutex_lock (&self->res_chg_lock);
1500 while (self->is_res_chg)
1501 {
1502 GST_LOG_OBJECT(decoder, "wait resolution change finish");
1503 g_cond_wait(&self->res_chg_cond, &self->res_chg_lock);
1504 }
1505 g_mutex_unlock (&self->res_chg_lock);
1506
zengliang.li92ff6822023-06-06 07:12:52 +00001507 self->last_out_pts = GST_CLOCK_TIME_NONE;
xuesong.jiangae1548e2022-05-06 16:38:46 +08001508 gst_aml_v4l2_object_unlock(self->v4l2output);
1509 gst_aml_v4l2_object_unlock(self->v4l2capture);
1510 break;
1511 default:
1512 break;
1513 }
1514
1515 ret = GST_VIDEO_DECODER_CLASS(parent_class)->sink_event(decoder, event);
1516
1517 switch (type)
1518 {
1519 case GST_EVENT_FLUSH_START:
1520 /* The processing thread should stop now, wait for it */
1521 gst_pad_stop_task(decoder->srcpad);
hanghang.luoc54208e2023-09-22 02:43:54 +00001522 self->codec_data_inject = FALSE;
xuesong.jiangae1548e2022-05-06 16:38:46 +08001523 GST_DEBUG_OBJECT(self, "flush start done");
1524 break;
1525 default:
1526 break;
1527 }
1528
1529 return ret;
1530}
1531
1532static GstStateChangeReturn
1533gst_aml_v4l2_video_dec_change_state(GstElement *element,
1534 GstStateChange transition)
1535{
1536 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(element);
1537 GstVideoDecoder *decoder = GST_VIDEO_DECODER(element);
1538
1539 if (transition == GST_STATE_CHANGE_PAUSED_TO_READY)
1540 {
1541 g_atomic_int_set(&self->active, FALSE);
1542 gst_aml_v4l2_object_unlock(self->v4l2output);
1543 gst_aml_v4l2_object_unlock(self->v4l2capture);
1544 gst_pad_stop_task(decoder->srcpad);
1545 }
1546
1547 return GST_ELEMENT_CLASS(parent_class)->change_state(element, transition);
1548}
1549
1550static void
1551gst_aml_v4l2_video_dec_dispose(GObject *object)
1552{
1553 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(object);
1554
1555 gst_caps_replace(&self->probed_sinkcaps, NULL);
1556 gst_caps_replace(&self->probed_srccaps, NULL);
1557
1558 G_OBJECT_CLASS(parent_class)->dispose(object);
1559}
1560
1561static void
1562gst_aml_v4l2_video_dec_finalize(GObject *object)
1563{
1564 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(object);
1565
1566 gst_aml_v4l2_object_destroy(self->v4l2capture);
1567 gst_aml_v4l2_object_destroy(self->v4l2output);
1568
xuesong.jiangc5dac0f2023-02-01 14:42:24 +08001569 g_mutex_clear(&self->res_chg_lock);
1570 g_cond_clear(&self->res_chg_cond);
1571
xuesong.jiang61ea8012022-05-12 15:38:17 +08001572#if GST_IMPORT_LGE_PROP
1573 if (self->lge_ctxt)
1574 {
1575 if (self->lge_ctxt->app_type)
1576 g_free(self->lge_ctxt->app_type);
1577 if (self->lge_ctxt->res_info.coretype)
1578 g_free(self->lge_ctxt->res_info.coretype);
1579 free(self->lge_ctxt);
1580 }
1581
1582#endif
1583
xuesong.jiangae1548e2022-05-06 16:38:46 +08001584 G_OBJECT_CLASS(parent_class)->finalize(object);
1585}
1586
1587static void
1588gst_aml_v4l2_video_dec_init(GstAmlV4l2VideoDec *self)
1589{
1590 /* V4L2 object are created in subinstance_init */
zengliang.li92ff6822023-06-06 07:12:52 +00001591 self->last_out_pts = GST_CLOCK_TIME_NONE;
xuesong.jiangae1548e2022-05-06 16:38:46 +08001592 self->is_secure_path = FALSE;
xuesong.jiangc5dac0f2023-02-01 14:42:24 +08001593 self->is_res_chg = FALSE;
hanghang.luoc54208e2023-09-22 02:43:54 +00001594 self->codec_data_inject = FALSE;
xuesong.jiangc5dac0f2023-02-01 14:42:24 +08001595 g_mutex_init(&self->res_chg_lock);
1596 g_cond_init(&self->res_chg_cond);
xuesong.jiang61ea8012022-05-12 15:38:17 +08001597#if GST_IMPORT_LGE_PROP
1598 self->lge_ctxt = malloc(sizeof(GstAmlV4l2VideoDecLgeCtxt));
1599 memset(self->lge_ctxt, 0, sizeof(GstAmlV4l2VideoDecLgeCtxt));
1600#endif
xuesong.jiangae1548e2022-05-06 16:38:46 +08001601}
1602
1603static void
1604gst_aml_v4l2_video_dec_subinstance_init(GTypeInstance *instance, gpointer g_class)
1605{
1606 GstAmlV4l2VideoDecClass *klass = GST_AML_V4L2_VIDEO_DEC_CLASS(g_class);
1607 GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(instance);
1608 GstVideoDecoder *decoder = GST_VIDEO_DECODER(instance);
1609
1610 gst_video_decoder_set_packetized(decoder, TRUE);
1611
1612 self->v4l2output = gst_aml_v4l2_object_new(GST_ELEMENT(self),
1613 GST_OBJECT(GST_VIDEO_DECODER_SINK_PAD(self)),
1614 V4L2_BUF_TYPE_VIDEO_OUTPUT, klass->default_device,
1615 gst_aml_v4l2_get_output, gst_aml_v4l2_set_output, NULL);
1616 self->v4l2output->no_initial_format = TRUE;
1617 self->v4l2output->keep_aspect = FALSE;
sheng.liu0c77f6c2022-06-17 21:33:20 +08001618 self->v4l2output->is_svp = FALSE;
xuesong.jiangae1548e2022-05-06 16:38:46 +08001619
1620 self->v4l2capture = gst_aml_v4l2_object_new(GST_ELEMENT(self),
1621 GST_OBJECT(GST_VIDEO_DECODER_SRC_PAD(self)),
1622 V4L2_BUF_TYPE_VIDEO_CAPTURE, klass->default_device,
1623 gst_aml_v4l2_get_input, gst_aml_v4l2_set_input, NULL);
1624 self->v4l2capture->need_wait_event = TRUE;
sheng.liub56bbc52022-06-21 11:02:33 +08001625 self->v4l2capture->need_drop_event = FALSE;
xuesong.jiangae1548e2022-05-06 16:38:46 +08001626}
1627
1628static void
1629gst_aml_v4l2_video_dec_class_init(GstAmlV4l2VideoDecClass *klass)
1630{
1631 GstElementClass *element_class;
1632 GObjectClass *gobject_class;
1633 GstVideoDecoderClass *video_decoder_class;
1634
1635 parent_class = g_type_class_peek_parent(klass);
1636
1637 element_class = (GstElementClass *)klass;
1638 gobject_class = (GObjectClass *)klass;
1639 video_decoder_class = (GstVideoDecoderClass *)klass;
1640
1641 GST_DEBUG_CATEGORY_INIT(gst_aml_v4l2_video_dec_debug, "amlv4l2videodec", 0,
1642 "AML V4L2 Video Decoder");
1643
1644 gobject_class->dispose = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_dispose);
1645 gobject_class->finalize = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_finalize);
1646 gobject_class->set_property =
1647 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_set_property);
1648 gobject_class->get_property =
1649 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_get_property);
1650
1651 video_decoder_class->open = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_open);
1652 video_decoder_class->close = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_close);
1653 video_decoder_class->start = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_start);
1654 video_decoder_class->stop = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_stop);
1655 video_decoder_class->finish = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_finish);
1656 video_decoder_class->flush = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_flush);
1657 video_decoder_class->drain = GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_drain);
1658 video_decoder_class->set_format =
1659 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_set_format);
1660 video_decoder_class->negotiate =
1661 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_negotiate);
1662 video_decoder_class->decide_allocation =
1663 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_decide_allocation);
1664 /* FIXME propose_allocation or not ? */
1665 video_decoder_class->handle_frame =
1666 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_handle_frame);
1667 video_decoder_class->getcaps =
1668 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_sink_getcaps);
1669 video_decoder_class->src_query =
1670 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_src_query);
1671 video_decoder_class->sink_event =
1672 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_sink_event);
1673
1674 element_class->change_state =
1675 GST_DEBUG_FUNCPTR(gst_aml_v4l2_video_dec_change_state);
1676
xuesong.jiang406ee302023-06-28 03:45:22 +00001677 g_signals[SIGNAL_DECODED_PTS] = g_signal_new ("decoded-pts",
1678 G_TYPE_FROM_CLASS(GST_ELEMENT_CLASS(klass)),
1679 G_SIGNAL_RUN_LAST,
1680 0, /* class offset */
1681 NULL, /* accumulator */
1682 NULL, /* accu data */
1683 g_cclosure_marshal_generic,
1684 G_TYPE_NONE,
1685 1,
1686 G_TYPE_UINT64);
1687
xuesong.jiangae1548e2022-05-06 16:38:46 +08001688 gst_aml_v4l2_object_install_m2m_properties_helper(gobject_class);
xuesong.jiang61ea8012022-05-12 15:38:17 +08001689#if GST_IMPORT_LGE_PROP
1690 gst_aml_v4l2_video_dec_install_lge_properties_helper(gobject_class);
1691#endif
xuesong.jiangae1548e2022-05-06 16:38:46 +08001692}
1693
1694static void
1695gst_aml_v4l2_video_dec_subclass_init(gpointer g_class, gpointer data)
1696{
1697 GstAmlV4l2VideoDecClass *klass = GST_AML_V4L2_VIDEO_DEC_CLASS(g_class);
1698 GstElementClass *element_class = GST_ELEMENT_CLASS(g_class);
1699 GstAmlV4l2VideoDecCData *cdata = data;
1700
1701 klass->default_device = cdata->device;
1702
1703 /* Note: gst_pad_template_new() take the floating ref from the caps */
1704 gst_element_class_add_pad_template(element_class,
1705 gst_pad_template_new("sink", GST_PAD_SINK, GST_PAD_ALWAYS,
1706 cdata->sink_caps));
1707 gst_element_class_add_pad_template(element_class,
1708 gst_pad_template_new("src", GST_PAD_SRC, GST_PAD_ALWAYS,
1709 cdata->src_caps));
1710
1711 gst_element_class_set_metadata(element_class, cdata->longname,
1712 "Codec/Decoder/Video/Hardware", cdata->description,
1713 "Xuesong Jiang <Xuesong.Jiang@amlogic.com>");
1714
1715 gst_caps_unref(cdata->sink_caps);
1716 gst_caps_unref(cdata->src_caps);
1717 g_free(cdata);
1718}
1719
1720/* Probing functions */
1721gboolean
1722gst_aml_v4l2_is_video_dec(GstCaps *sink_caps, GstCaps *src_caps)
1723{
1724 gboolean ret = FALSE;
1725
1726 if (gst_caps_is_subset(sink_caps, gst_aml_v4l2_object_get_codec_caps()) && gst_caps_is_subset(src_caps, gst_aml_v4l2_object_get_raw_caps()))
1727 ret = TRUE;
1728
1729 return ret;
1730}
1731
1732static gchar *
1733gst_aml_v4l2_video_dec_set_metadata(GstStructure *s, GstAmlV4l2VideoDecCData *cdata,
1734 const gchar *basename)
1735{
1736 gchar *codec_name = NULL;
1737 gchar *type_name = NULL;
1738 gboolean got_value = FALSE;
1739
1740#define SET_META(codec) \
1741 G_STMT_START \
1742 { \
1743 cdata->longname = "AML V4L2 " codec " Decoder"; \
1744 cdata->description = "Decodes " codec " streams via V4L2 API"; \
1745 codec_name = g_ascii_strdown(codec, -1); \
1746 } \
1747 G_STMT_END
1748
1749 if (gst_structure_has_name(s, "image/jpeg"))
1750 {
1751 SET_META("JPEG");
1752 }
1753 else if (gst_structure_has_name(s, "video/mpeg"))
1754 {
1755 gint mpegversion = 0;
1756 gint *list = NULL;
1757 got_value = gst_structure_get_int(s, "mpegversion", &mpegversion);
1758 if (FALSE == got_value)
1759 {
1760 got_value = gst_structure_get_list(s, "mpegversion", &list);
1761 if (TRUE == got_value && (1 == *list || 2 == *list))
1762 {
1763 SET_META("MPEG2");
1764 }
1765 else
1766 {
1767 SET_META("MPEG4");
1768 }
1769 }
1770 else
1771 {
1772 SET_META("MPEG4");
1773 }
1774 }
1775 else if (gst_structure_has_name(s, "video/x-h263"))
1776 {
1777 SET_META("H263");
1778 }
1779 else if (gst_structure_has_name(s, "video/x-fwht"))
1780 {
1781 SET_META("FWHT");
1782 }
1783 else if (gst_structure_has_name(s, "video/x-h264"))
1784 {
1785 SET_META("H264");
1786 }
1787 else if (gst_structure_has_name(s, "video/x-h265"))
1788 {
1789 SET_META("H265");
1790 }
1791 else if (gst_structure_has_name(s, "video/x-wmv"))
1792 {
1793 SET_META("VC1");
1794 }
1795 else if (gst_structure_has_name(s, "video/x-vp8"))
1796 {
1797 SET_META("VP8");
1798 }
1799 else if (gst_structure_has_name(s, "video/x-vp9"))
1800 {
1801 SET_META("VP9");
1802 }
1803 else if (gst_structure_has_name(s, "video/x-av1"))
1804 {
1805 SET_META("AV1");
1806 }
1807 else if (gst_structure_has_name(s, "video/x-bayer"))
1808 {
1809 SET_META("BAYER");
1810 }
1811 else if (gst_structure_has_name(s, "video/x-sonix"))
1812 {
1813 SET_META("SONIX");
1814 }
1815 else if (gst_structure_has_name(s, "video/x-pwc1"))
1816 {
1817 SET_META("PWC1");
1818 }
1819 else if (gst_structure_has_name(s, "video/x-pwc2"))
1820 {
1821 SET_META("PWC2");
1822 }
1823 else
1824 {
1825 /* This code should be kept on sync with the exposed CODEC type of format
1826 * from gstamlv4l2object.c. This warning will only occure in case we forget
1827 * to also add a format here. */
1828 gchar *s_str = gst_structure_to_string(s);
1829 g_warning("Missing fixed name mapping for caps '%s', this is a GStreamer "
1830 "bug, please report at https://bugs.gnome.org",
1831 s_str);
1832 g_free(s_str);
1833 }
1834
1835 if (codec_name)
1836 {
1837 type_name = g_strdup_printf("amlv4l2%sdec", codec_name);
1838 if (g_type_from_name(type_name) != 0)
1839 {
1840 g_free(type_name);
1841 type_name = g_strdup_printf("amlv4l2%s%sdec", basename, codec_name);
1842 }
1843
1844 g_free(codec_name);
1845 }
1846
1847 return type_name;
1848#undef SET_META
1849}
1850
1851void gst_aml_v4l2_video_dec_register(GstPlugin *plugin, const gchar *basename,
1852 const gchar *device_path, GstCaps *sink_caps, GstCaps *src_caps)
1853{
1854 gint i;
1855
1856 for (i = 0; i < gst_caps_get_size(sink_caps); i++)
1857 {
1858 GstAmlV4l2VideoDecCData *cdata;
1859 GstStructure *s;
1860 GTypeQuery type_query;
1861 GTypeInfo type_info = {
1862 0,
1863 };
1864 GType type, subtype;
1865 gchar *type_name;
1866
1867 s = gst_caps_get_structure(sink_caps, i);
1868
1869 cdata = g_new0(GstAmlV4l2VideoDecCData, 1);
1870 cdata->device = g_strdup(device_path);
1871 cdata->sink_caps = gst_caps_new_empty();
1872 gst_caps_append_structure(cdata->sink_caps, gst_structure_copy(s));
1873 gst_caps_append_structure(cdata->sink_caps, gst_structure_copy(s));
1874 gst_caps_set_features(cdata->sink_caps, 0, gst_caps_features_from_string(GST_CAPS_FEATURE_MEMORY_DMABUF));
1875 cdata->src_caps = gst_caps_copy(src_caps);
1876 gst_caps_set_features_simple(cdata->src_caps, gst_caps_features_from_string(GST_CAPS_FEATURE_MEMORY_DMABUF));
1877 gst_caps_append(cdata->src_caps, gst_caps_copy(src_caps));
1878 type_name = gst_aml_v4l2_video_dec_set_metadata(s, cdata, basename);
1879
1880 /* Skip over if we hit an unmapped type */
1881 if (!type_name)
1882 {
1883 g_free(cdata);
1884 continue;
1885 }
1886
1887 type = gst_aml_v4l2_video_dec_get_type();
1888 g_type_query(type, &type_query);
1889 memset(&type_info, 0, sizeof(type_info));
1890 type_info.class_size = type_query.class_size;
1891 type_info.instance_size = type_query.instance_size;
1892 type_info.class_init = gst_aml_v4l2_video_dec_subclass_init;
1893 type_info.class_data = cdata;
1894 type_info.instance_init = gst_aml_v4l2_video_dec_subinstance_init;
1895
1896 subtype = g_type_register_static(type, type_name, &type_info, 0);
1897 if (!gst_element_register(plugin, type_name, GST_RANK_PRIMARY + 1,
1898 subtype))
1899 GST_WARNING("Failed to register plugin '%s'", type_name);
1900
1901 g_free(type_name);
1902 }
1903}
xuesong.jiang61ea8012022-05-12 15:38:17 +08001904
1905#if GST_IMPORT_LGE_PROP
1906static void gst_aml_v4l2_video_dec_install_lge_properties_helper(GObjectClass *gobject_class)
1907{
1908 g_object_class_install_property(gobject_class, LGE_RESOURCE_INFO,
1909 g_param_spec_object("resource-info", "resource-info",
1910 "After acquisition of H/W resources is completed, allocated resource information must be delivered to the decoder and the sink",
1911 GST_TYPE_STRUCTURE,
1912 G_PARAM_READABLE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
1913
1914 g_object_class_install_property(gobject_class, LGE_DECODE_SIZE,
1915 g_param_spec_uint64("decoded-size", "decoded-size",
1916 "The total amount of decoder element's decoded video es after constructing pipeline or flushing pipeline update unit is byte.",
1917 0, G_MAXUINT64,
1918 0, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
1919
1920 g_object_class_install_property(gobject_class, LGE_UNDECODE_SIZE,
1921 g_param_spec_uint64("undecoded-size", "undecoded-size",
1922 "video decoder element's total undecoded data update unit is byte.",
1923 0, G_MAXUINT64,
1924 0, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
1925
1926 g_object_class_install_property(gobject_class, LGE_APP_TYPE,
1927 g_param_spec_string("app-type", "app-type",
1928 "set application type.",
1929 "default_app",
1930 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
1931
1932 g_object_class_install_property(gobject_class, LGE_CLIP_MODE,
1933 g_param_spec_boolean("clip-mode", "clip-mode",
1934 "When seeking, Content is moving faster for a while to skip frames.",
1935 FALSE,
1936 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
1937}
1938#endif