videodec: CB1 move base class to amlv4l2dec git  [1/1]

PD#SWPL-181702

Problem:
move videodecoder base class to our amlv4l2dec git

Solution:
move videodecoder base class to our amlv4l2dec git

Verify:
AH212

Signed-off-by: le.han <le.han@amlogic.com>
Change-Id: I719c1160ecd7c125649f9c0d978eb14c769ddb93
diff --git a/src/gstamlv4l2videodec.c b/src/gstamlv4l2videodec.c
index d6243c2..f35590f 100644
--- a/src/gstamlv4l2videodec.c
+++ b/src/gstamlv4l2videodec.c
@@ -37,22 +37,22 @@
 GST_DEBUG_CATEGORY_STATIC(gst_aml_v4l2_video_dec_debug);
 #define GST_CAT_DEFAULT gst_aml_v4l2_video_dec_debug
 
-#ifdef GST_VIDEO_DECODER_STREAM_LOCK
-#undef GST_VIDEO_DECODER_STREAM_LOCK
-#define GST_VIDEO_DECODER_STREAM_LOCK(decoder)                      \
+#ifdef GST_AML_VIDEO_DECODER_STREAM_LOCK
+#undef GST_AML_VIDEO_DECODER_STREAM_LOCK
+#define GST_AML_VIDEO_DECODER_STREAM_LOCK(decoder)                      \
     {                                                               \
         GST_TRACE("aml v4l2 dec locking");                           \
-        g_rec_mutex_lock(&GST_VIDEO_DECODER(decoder)->stream_lock); \
+        g_rec_mutex_lock(&GST_AML_VIDEO_DECODER(decoder)->stream_lock); \
         GST_TRACE("aml v4l2 dec locked");                            \
     }
 #endif
 
-#ifdef GST_VIDEO_DECODER_STREAM_UNLOCK
-#undef GST_VIDEO_DECODER_STREAM_UNLOCK
-#define GST_VIDEO_DECODER_STREAM_UNLOCK(decoder)                      \
+#ifdef GST_AML_VIDEO_DECODER_STREAM_UNLOCK
+#undef GST_AML_VIDEO_DECODER_STREAM_UNLOCK
+#define GST_AML_VIDEO_DECODER_STREAM_UNLOCK(decoder)                      \
     {                                                                 \
         GST_TRACE("aml v4l2 dec unlocking");                           \
-        g_rec_mutex_unlock(&GST_VIDEO_DECODER(decoder)->stream_lock); \
+        g_rec_mutex_unlock(&GST_AML_VIDEO_DECODER(decoder)->stream_lock); \
         GST_TRACE("aml v4l2 dec unlocked");                            \
     }
 #endif
@@ -117,14 +117,14 @@
 
 #define gst_aml_v4l2_video_dec_parent_class parent_class
 G_DEFINE_ABSTRACT_TYPE(GstAmlV4l2VideoDec, gst_aml_v4l2_video_dec,
-                       GST_TYPE_VIDEO_DECODER);
+                       GST_AML_TYPE_VIDEO_DECODER);
 
-static GstFlowReturn gst_aml_v4l2_video_dec_finish(GstVideoDecoder *decoder);
+static GstFlowReturn gst_aml_v4l2_video_dec_finish(GstAmlVideoDecoder *decoder);
 #if GST_IMPORT_LGE_PROP
 static void gst_aml_v4l2_video_dec_install_lge_properties_helper(GObjectClass *gobject_class);
 #endif
-static GstClockTime calc_output_buffer_pts_func(GstVideoDecoder *decoder);
-static GstClockTime calc_duration_func(GstVideoDecoder *decoder);
+static GstClockTime calc_output_buffer_pts_func(GstAmlVideoDecoder *decoder);
+static GstClockTime calc_duration_func(GstAmlVideoDecoder *decoder);
 
 static void
 gst_aml_v4l2_video_dec_set_property(GObject *object,
@@ -241,7 +241,7 @@
 }
 
 static gboolean
-gst_aml_v4l2_video_dec_open(GstVideoDecoder *decoder)
+gst_aml_v4l2_video_dec_open(GstAmlVideoDecoder *decoder)
 {
     GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
     GstCaps *codec_caps;
@@ -285,7 +285,7 @@
 }
 
 static gboolean
-gst_aml_v4l2_video_dec_close(GstVideoDecoder *decoder)
+gst_aml_v4l2_video_dec_close(GstAmlVideoDecoder *decoder)
 {
     GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
 
@@ -300,7 +300,7 @@
 }
 
 static gboolean
-gst_aml_v4l2_video_dec_start(GstVideoDecoder *decoder)
+gst_aml_v4l2_video_dec_start(GstAmlVideoDecoder *decoder)
 {
     GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
 
@@ -314,7 +314,7 @@
 }
 
 static gboolean
-gst_aml_v4l2_video_dec_stop(GstVideoDecoder *decoder)
+gst_aml_v4l2_video_dec_stop(GstAmlVideoDecoder *decoder)
 {
     GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
 
@@ -326,9 +326,9 @@
     /* Wait for capture thread to stop */
     gst_pad_stop_task(decoder->srcpad);
 
-    GST_VIDEO_DECODER_STREAM_LOCK(decoder);
+    GST_AML_VIDEO_DECODER_STREAM_LOCK(decoder);
     self->output_flow = GST_FLOW_OK;
-    GST_VIDEO_DECODER_STREAM_UNLOCK(decoder);
+    GST_AML_VIDEO_DECODER_STREAM_UNLOCK(decoder);
 
     /* Should have been flushed already */
     g_assert(g_atomic_int_get(&self->active) == FALSE);
@@ -338,7 +338,7 @@
 
     if (self->input_state)
     {
-        gst_video_codec_state_unref(self->input_state);
+        gst_aml_video_codec_state_unref(self->input_state);
         self->input_state = NULL;
     }
 
@@ -348,8 +348,8 @@
 }
 
 static gboolean
-gst_aml_v4l2_video_dec_codec_chg(GstVideoDecoder *decoder,
-                                  GstVideoCodecState *state)
+gst_aml_v4l2_video_dec_codec_chg(GstAmlVideoDecoder *decoder,
+                                  GstAmlVideoCodecState *state)
 {
     GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
     GstStructure *s_old = NULL;
@@ -370,8 +370,8 @@
 }
 
 static gboolean
-gst_aml_v4l2_video_dec_res_chg(GstVideoDecoder *decoder,
-                                  GstVideoCodecState *state)
+gst_aml_v4l2_video_dec_res_chg(GstAmlVideoDecoder *decoder,
+                                  GstAmlVideoCodecState *state)
 {
     GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
     gboolean ret = FALSE;
@@ -411,8 +411,8 @@
 }
 
 static gboolean
-gst_aml_v4l2_video_dec_set_format(GstVideoDecoder *decoder,
-                                  GstVideoCodecState *state)
+gst_aml_v4l2_video_dec_set_format(GstAmlVideoDecoder *decoder,
+                                  GstAmlVideoCodecState *state)
 {
     GstAmlV4l2Error error = GST_AML_V4L2_ERROR_INIT;
     gboolean ret = TRUE;
@@ -454,13 +454,13 @@
         gst_aml_v4l2_video_dec_finish(decoder);
         gst_aml_v4l2_object_stop(self->v4l2output);
 
-        gst_video_codec_state_unref(self->input_state);
+        gst_aml_video_codec_state_unref(self->input_state);
         self->input_state = NULL;
 
         /* The renegotiation flow don't blend with the base class flow. To properly
          * stop the capture pool, if the buffers can't be orphaned, we need to
          * reclaim our buffers, which will happend through the allocation query.
-         * The allocation query is triggered by gst_video_decoder_negotiate() which
+         * The allocation query is triggered by gst_aml_video_decoder_negotiate() which
          * requires the output caps to be set, but we can't know this information
          * as we rely on the decoder, which requires the capture queue to be
          * stopped.
@@ -523,7 +523,7 @@
     gst_caps_append(self->probed_srccaps, caps);
 
     if (ret)
-        self->input_state = gst_video_codec_state_ref(state);
+        self->input_state = gst_aml_video_codec_state_ref(state);
     else
         gst_aml_v4l2_error(self, &error);
 
@@ -539,7 +539,7 @@
 }
 
 static gboolean
-gst_aml_v4l2_video_dec_flush(GstVideoDecoder *decoder)
+gst_aml_v4l2_video_dec_flush(GstAmlVideoDecoder *decoder)
 {
     GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
 
@@ -549,12 +549,12 @@
      * discount case */
     if (gst_pad_get_task_state(decoder->srcpad) == GST_TASK_STARTED)
     {
-        GST_VIDEO_DECODER_STREAM_UNLOCK(decoder);
+        GST_AML_VIDEO_DECODER_STREAM_UNLOCK(decoder);
 
         gst_aml_v4l2_object_unlock(self->v4l2output);
         gst_aml_v4l2_object_unlock(self->v4l2capture);
         gst_pad_stop_task(decoder->srcpad);
-        GST_VIDEO_DECODER_STREAM_LOCK(decoder);
+        GST_AML_VIDEO_DECODER_STREAM_LOCK(decoder);
     }
 
     self->output_flow = GST_FLOW_OK;
@@ -575,7 +575,7 @@
 }
 
 static gboolean
-gst_aml_v4l2_video_dec_negotiate(GstVideoDecoder *decoder)
+gst_aml_v4l2_video_dec_negotiate(GstAmlVideoDecoder *decoder)
 {
     GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
 
@@ -596,7 +596,7 @@
         gst_buffer_pool_is_active(GST_BUFFER_POOL(self->v4l2capture->pool)))
         return TRUE;
 
-    return GST_VIDEO_DECODER_CLASS(parent_class)->negotiate(decoder);
+    return GST_AML_VIDEO_DECODER_CLASS(parent_class)->negotiate(decoder);
 }
 
 static gboolean
@@ -636,7 +636,7 @@
 }
 
 static GstFlowReturn
-gst_aml_v4l2_video_dec_finish(GstVideoDecoder *decoder)
+gst_aml_v4l2_video_dec_finish(GstAmlVideoDecoder *decoder)
 {
     GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
     GstFlowReturn ret = GST_FLOW_OK;
@@ -647,7 +647,7 @@
 
     GST_DEBUG_OBJECT(self, "Finishing decoding");
 
-    GST_VIDEO_DECODER_STREAM_UNLOCK(decoder);
+    GST_AML_VIDEO_DECODER_STREAM_UNLOCK(decoder);
 
     if (gst_aml_v4l2_decoder_cmd(self->v4l2output, V4L2_DEC_CMD_STOP, 0))
     {
@@ -680,7 +680,7 @@
      * occured. */
     gst_aml_v4l2_object_unlock(self->v4l2capture);
     gst_pad_stop_task(decoder->srcpad);
-    GST_VIDEO_DECODER_STREAM_LOCK(decoder);
+    GST_AML_VIDEO_DECODER_STREAM_LOCK(decoder);
 
     if (ret == GST_FLOW_FLUSHING)
         ret = self->output_flow;
@@ -697,7 +697,7 @@
 }
 
 static GstFlowReturn
-gst_aml_v4l2_video_dec_drain(GstVideoDecoder *decoder)
+gst_aml_v4l2_video_dec_drain(GstAmlVideoDecoder *decoder)
 {
     GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
 
@@ -708,20 +708,20 @@
     return GST_FLOW_OK;
 }
 
-static GstVideoCodecFrame *
-gst_aml_v4l2_video_dec_get_right_frame_for_frame_mode(GstVideoDecoder *decoder, GstClockTime pts)
+static GstAmlVideoCodecFrame *
+gst_aml_v4l2_video_dec_get_right_frame_for_frame_mode(GstAmlVideoDecoder *decoder, GstClockTime pts)
 {
-    GstVideoCodecFrame *frame = NULL;
+    GstAmlVideoCodecFrame *frame = NULL;
     GList *frames, *l;
     gint count = 0;
 
     GST_LOG_OBJECT (decoder, "trace in with pts: %" GST_TIME_FORMAT, GST_TIME_ARGS(pts));
 
-    frames = gst_video_decoder_get_frames(decoder);
+    frames = gst_aml_video_decoder_get_frames(decoder);
 
     for (l = frames; l != NULL; l = l->next)
     {
-        GstVideoCodecFrame *f = l->data;
+        GstAmlVideoCodecFrame *f = l->data;
 
         if (GST_CLOCK_TIME_IS_VALID(pts) && (ABSDIFF(f->pts,pts)) < 1000) {
             frame = f;
@@ -733,7 +733,7 @@
     {
         for (l = frames; l != NULL; l = l->next)
         {
-            GstVideoCodecFrame *f = l->data;
+            GstAmlVideoCodecFrame *f = l->data;
             if (!GST_CLOCK_TIME_IS_VALID(f->pts))
             {
                 frame = f;
@@ -748,23 +748,23 @@
         GST_LOG_OBJECT(decoder,
                        "frame %p is %d %" GST_TIME_FORMAT " and %d frames left",
                        frame, frame->system_frame_number, GST_TIME_ARGS(frame->pts), count - 1);
-        gst_video_codec_frame_ref(frame);
+        gst_aml_video_codec_frame_ref(frame);
     }
 
-    g_list_free_full(frames, (GDestroyNotify)gst_video_codec_frame_unref);
+    g_list_free_full(frames, (GDestroyNotify)gst_aml_video_codec_frame_unref);
 
     return frame;
 }
 
-static GstVideoCodecFrame *
-gst_aml_v4l2_video_dec_get_right_frame_for_stream_mode(GstVideoDecoder *decoder, GstClockTime pts)
+static GstAmlVideoCodecFrame *
+gst_aml_v4l2_video_dec_get_right_frame_for_stream_mode(GstAmlVideoDecoder *decoder, GstClockTime pts)
 {
-    GstVideoCodecFrame *frame = NULL;
+    GstAmlVideoCodecFrame *frame = NULL;
     GList *frames, *l;
     guint frames_len = 0;
     GST_LOG_OBJECT (decoder, "trace in with pts: %" GST_TIME_FORMAT, GST_TIME_ARGS(pts));
 
-    if (!(frames = gst_video_decoder_get_frames(decoder)))
+    if (!(frames = gst_aml_video_decoder_get_frames(decoder)))
         goto done;
 
     frames_len = g_list_length(frames);
@@ -772,7 +772,7 @@
 
     for (l = frames; l != NULL; l = l->next)
     {
-        GstVideoCodecFrame *f = l->data;
+        GstAmlVideoCodecFrame *f = l->data;
 
         if (GST_CLOCK_TIME_IS_VALID(pts) && (ABSDIFF(f->pts, pts)) < 1000)
         {
@@ -789,9 +789,9 @@
                 "stream mode drop frame %d %" GST_TIME_FORMAT,
                 f->system_frame_number, GST_TIME_ARGS(f->pts));
 
-            gst_video_codec_frame_ref(f);
-            // gst_video_decoder_drop_frame(decoder, f);
-            gst_video_decoder_release_frame(decoder, f);
+            gst_aml_video_codec_frame_ref(f);
+            // gst_aml_video_decoder_drop_frame(decoder, f);
+            gst_aml_video_decoder_release_frame(decoder, f);
         }
         else
         {
@@ -802,24 +802,24 @@
     if (frame)
     {
         guint l_len = 0;
-        l = gst_video_decoder_get_frames(decoder);
+        l = gst_aml_video_decoder_get_frames(decoder);
         l_len = g_list_length(l);
-        g_list_free_full(l, (GDestroyNotify)gst_video_codec_frame_unref);
+        g_list_free_full(l, (GDestroyNotify)gst_aml_video_codec_frame_unref);
 
         GST_LOG_OBJECT(decoder,
                        "frame %p is %d %" GST_TIME_FORMAT " and %d frames left",
                        frame, frame->system_frame_number, GST_TIME_ARGS(frame->pts), l_len);
-        gst_video_codec_frame_ref(frame);
+        gst_aml_video_codec_frame_ref(frame);
     }
 
-    g_list_free_full(frames, (GDestroyNotify)gst_video_codec_frame_unref);
+    g_list_free_full(frames, (GDestroyNotify)gst_aml_video_codec_frame_unref);
 
 done:
     return frame;
 }
 
-static GstVideoCodecFrame *
-gst_aml_v4l2_video_dec_get_right_frame(GstVideoDecoder *decoder, GstClockTime pts)
+static GstAmlVideoCodecFrame *
+gst_aml_v4l2_video_dec_get_right_frame(GstAmlVideoDecoder *decoder, GstClockTime pts)
 {
     GstAmlV4l2VideoDec *self = (GstAmlV4l2VideoDec *)decoder;
     if (self->v4l2output->stream_mode)
@@ -893,17 +893,17 @@
 }
 
 static void
-gst_aml_v4l2_video_dec_set_output_status(GstVideoDecoder *decoder,GstVideoInfo info)
+gst_aml_v4l2_video_dec_set_output_status(GstAmlVideoDecoder *decoder,GstVideoInfo info)
 {
     GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
-    GstVideoCodecState *output_state;
+    GstAmlVideoCodecState *output_state;
     struct v4l2_selection sel;
     struct v4l2_rect *r = NULL;
     GstStructure *s;
     gint width = 0;
     gint height = 0;
     GST_DEBUG("%d   %d",info.width, info.height);
-    output_state = gst_video_decoder_set_output_state(decoder,
+    output_state = gst_aml_video_decoder_set_output_state(decoder,
                    info.finfo->format, info.width, info.height, self->input_state);
     memset(&sel, 0, sizeof(struct v4l2_selection));
     sel.type = self->v4l2capture->type;
@@ -930,7 +930,7 @@
             gst_structure_set(s,"width",G_TYPE_INT,info.width,NULL);
             gst_structure_set(s,"height",G_TYPE_INT,info.height,NULL);
             GST_DEBUG_OBJECT(self, "output_state->caps: %" GST_PTR_FORMAT, output_state->caps);
-            gst_video_codec_state_unref(output_state);
+            gst_aml_video_codec_state_unref(output_state);
         }
     }
 }
@@ -947,7 +947,7 @@
 }
 
 static gboolean
-foreach_cc_buffer_list_match_pts_func(GList *list , GstVideoCodecFrame *frame)
+foreach_cc_buffer_list_match_pts_func(GList *list , GstAmlVideoCodecFrame *frame)
 {
     GList *l;
     if (g_list_length (list) > 0)
@@ -987,7 +987,7 @@
 }
 
 static GstClockTime
-calc_output_buffer_pts_func(GstVideoDecoder *decoder)
+calc_output_buffer_pts_func(GstAmlVideoDecoder *decoder)
 {
     GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
     GstClockTime pts = GST_CLOCK_TIME_NONE;
@@ -1007,7 +1007,7 @@
 }
 
 static GstClockTime
-calc_duration_func(GstVideoDecoder *decoder)
+calc_duration_func(GstAmlVideoDecoder *decoder)
 {
     GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
     GstClockTime duration = GST_CLOCK_TIME_NONE;
@@ -1031,13 +1031,13 @@
 }
 
 static void
-gst_aml_v4l2_video_dec_loop(GstVideoDecoder *decoder)
+gst_aml_v4l2_video_dec_loop(GstAmlVideoDecoder *decoder)
 {
     GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
     GstAmlV4l2BufferPool *v4l2_pool;
     GstAmlV4l2Error error = GST_AML_V4L2_ERROR_INIT;
     GstBufferPool *pool;
-    GstVideoCodecFrame *frame;
+    GstAmlVideoCodecFrame *frame;
     GstBuffer *buffer = NULL;
     GstFlowReturn ret;
 
@@ -1145,7 +1145,7 @@
             gst_aml_v4l2_clear_error(&error);
         gst_caps_unref(caps);
         gst_aml_v4l2_video_dec_set_output_status(decoder,info);
-        if (!gst_video_decoder_negotiate(decoder))
+        if (!gst_aml_video_decoder_negotiate(decoder))
         {
             if (GST_PAD_IS_FLUSHING(decoder->srcpad))
                 goto flushing;
@@ -1179,12 +1179,12 @@
          * stream lock. we know that the acquire may need to poll until more frames
          * comes in and holding this lock would prevent that.
          */
-        pool = gst_video_decoder_get_buffer_pool(decoder);
+        pool = gst_aml_video_decoder_get_buffer_pool(decoder);
 
         /* Pool may be NULL if we started going to READY state */
         if (pool == NULL)
         {
-            GST_WARNING_OBJECT(decoder, "gst_video_decoder_get_buffer_pool goto beach");
+            GST_WARNING_OBJECT(decoder, "gst_aml_video_decoder_get_buffer_pool goto beach");
             ret = GST_FLOW_FLUSHING;
             goto beach;
         }
@@ -1315,7 +1315,7 @@
                GST_WARNING("bufferlist is empty or no match frame in the bufferlist");
             }
         }
-        ret = gst_video_decoder_finish_frame(decoder, frame);
+        ret = gst_aml_video_decoder_finish_frame(decoder, frame);
 
         if (ret != GST_FLOW_OK)
             goto beach;
@@ -1366,8 +1366,8 @@
 }
 
 static GstFlowReturn
-gst_aml_v4l2_video_dec_handle_frame(GstVideoDecoder *decoder,
-                                    GstVideoCodecFrame *frame)
+gst_aml_v4l2_video_dec_handle_frame(GstAmlVideoDecoder *decoder,
+                                    GstAmlVideoCodecFrame *frame)
 {
     GstAmlV4l2Error error = GST_AML_V4L2_ERROR_INIT;
     GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
@@ -1441,11 +1441,11 @@
                 goto activate_failed;
         }
 
-        GST_VIDEO_DECODER_STREAM_UNLOCK(decoder);
+        GST_AML_VIDEO_DECODER_STREAM_UNLOCK(decoder);
         ret =
             gst_aml_v4l2_buffer_pool_process(GST_AML_V4L2_BUFFER_POOL(self->v4l2output->pool), &codec_data);
         self->codec_data_inject = TRUE;
-        GST_VIDEO_DECODER_STREAM_LOCK(decoder);
+        GST_AML_VIDEO_DECODER_STREAM_LOCK(decoder);
 
         gst_buffer_unref(codec_data);
 
@@ -1455,7 +1455,7 @@
          * padding. */
     }
 
-    task_state = gst_pad_get_task_state(GST_VIDEO_DECODER_SRC_PAD(self));
+    task_state = gst_pad_get_task_state(GST_AML_VIDEO_DECODER_SRC_PAD(self));
     if (task_state == GST_TASK_STOPPED || task_state == GST_TASK_PAUSED)
     {
         /* It's possible that the processing thread stopped due to an error */
@@ -1479,7 +1479,7 @@
 
     if (!processed)
     {
-        GST_VIDEO_DECODER_STREAM_UNLOCK(decoder);
+        GST_AML_VIDEO_DECODER_STREAM_UNLOCK(decoder);
         if (!self->codec_data_inject && self->input_state->codec_data)
         {
             ret = gst_aml_v4l2_buffer_pool_process
@@ -1490,11 +1490,11 @@
         }
         ret =
             gst_aml_v4l2_buffer_pool_process(GST_AML_V4L2_BUFFER_POOL(self->v4l2output->pool), &frame->input_buffer);
-        GST_VIDEO_DECODER_STREAM_LOCK(decoder);
+        GST_AML_VIDEO_DECODER_STREAM_LOCK(decoder);
 
         if (ret == GST_FLOW_FLUSHING)
         {
-            if (gst_pad_get_task_state(GST_VIDEO_DECODER_SRC_PAD(self)) !=
+            if (gst_pad_get_task_state(GST_AML_VIDEO_DECODER_SRC_PAD(self)) !=
                 GST_TASK_STARTED)
                 ret = self->output_flow;
             goto drop;
@@ -1514,7 +1514,7 @@
                          0, 0);
     gst_buffer_unref(tmp);
 
-    gst_video_codec_frame_unref(frame);
+    gst_aml_video_codec_frame_unref(frame);
     return ret;
 
     /* ERRORS */
@@ -1559,13 +1559,13 @@
 }
 drop:
 {
-    gst_video_decoder_drop_frame(decoder, frame);
+    gst_aml_video_decoder_drop_frame(decoder, frame);
     return ret;
 }
 }
 
 static gboolean
-gst_aml_v4l2_video_dec_decide_allocation(GstVideoDecoder *decoder,
+gst_aml_v4l2_video_dec_decide_allocation(GstAmlVideoDecoder *decoder,
                                          GstQuery *query)
 {
     GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
@@ -1573,14 +1573,14 @@
     gboolean ret = FALSE;
 
     if (gst_aml_v4l2_object_decide_allocation(self->v4l2capture, query))
-        ret = GST_VIDEO_DECODER_CLASS(parent_class)->decide_allocation(decoder, query);
+        ret = GST_AML_VIDEO_DECODER_CLASS(parent_class)->decide_allocation(decoder, query);
 
     if (GST_CLOCK_TIME_IS_VALID(self->v4l2capture->duration))
     {
         latency = self->v4l2capture->min_buffers * self->v4l2capture->duration;
         GST_DEBUG_OBJECT(self, "Setting latency: %" GST_TIME_FORMAT " (%" G_GUINT32_FORMAT " * %" G_GUINT64_FORMAT, GST_TIME_ARGS(latency),
                          self->v4l2capture->min_buffers, self->v4l2capture->duration);
-        gst_video_decoder_set_latency(decoder, latency, latency);
+        gst_aml_video_decoder_set_latency(decoder, latency, latency);
     }
     else
     {
@@ -1591,7 +1591,7 @@
 }
 
 static gboolean
-gst_aml_v4l2_video_dec_src_query(GstVideoDecoder *decoder, GstQuery *query)
+gst_aml_v4l2_video_dec_src_query(GstAmlVideoDecoder *decoder, GstQuery *query)
 {
     gboolean ret = TRUE;
     GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
@@ -1601,7 +1601,7 @@
     case GST_QUERY_CAPS:
     {
         GstCaps *filter, *result = NULL;
-        GstPad *pad = GST_VIDEO_DECODER_SRC_PAD(decoder);
+        GstPad *pad = GST_AML_VIDEO_DECODER_SRC_PAD(decoder);
 
         gst_query_parse_caps(query, &filter);
 
@@ -1626,7 +1626,7 @@
     }
 
     default:
-        ret = GST_VIDEO_DECODER_CLASS(parent_class)->src_query(decoder, query);
+        ret = GST_AML_VIDEO_DECODER_CLASS(parent_class)->src_query(decoder, query);
         break;
     }
 
@@ -1634,12 +1634,12 @@
 }
 
 static GstCaps *
-gst_aml_v4l2_video_dec_sink_getcaps(GstVideoDecoder *decoder, GstCaps *filter)
+gst_aml_v4l2_video_dec_sink_getcaps(GstAmlVideoDecoder *decoder, GstCaps *filter)
 {
     GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
     GstCaps *result;
 
-    result = gst_video_decoder_proxy_getcaps(decoder, self->probed_sinkcaps,
+    result = gst_aml_video_decoder_proxy_getcaps(decoder, self->probed_sinkcaps,
                                              filter);
 
     GST_DEBUG_OBJECT(self, "Returning sink caps %" GST_PTR_FORMAT, result);
@@ -1648,7 +1648,7 @@
 }
 
 static gboolean
-gst_aml_v4l2_video_dec_sink_event(GstVideoDecoder *decoder, GstEvent *event)
+gst_aml_v4l2_video_dec_sink_event(GstAmlVideoDecoder *decoder, GstEvent *event)
 {
     GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(decoder);
     gboolean ret;
@@ -1763,7 +1763,7 @@
         break;
     }
 
-    ret = GST_VIDEO_DECODER_CLASS(parent_class)->sink_event(decoder, event);
+    ret = GST_AML_VIDEO_DECODER_CLASS(parent_class)->sink_event(decoder, event);
 
     switch (type)
     {
@@ -1785,7 +1785,7 @@
                                     GstStateChange transition)
 {
     GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(element);
-    GstVideoDecoder *decoder = GST_VIDEO_DECODER(element);
+    GstAmlVideoDecoder *decoder = GST_AML_VIDEO_DECODER(element);
 
     GST_DEBUG_OBJECT(element, "change state from %s to %s",
       gst_element_state_get_name (GST_STATE_TRANSITION_CURRENT (transition)),
@@ -1861,12 +1861,12 @@
 {
     GstAmlV4l2VideoDecClass *klass = GST_AML_V4L2_VIDEO_DEC_CLASS(g_class);
     GstAmlV4l2VideoDec *self = GST_AML_V4L2_VIDEO_DEC(instance);
-    GstVideoDecoder *decoder = GST_VIDEO_DECODER(instance);
+    GstAmlVideoDecoder *decoder = GST_AML_VIDEO_DECODER(instance);
 
-    gst_video_decoder_set_packetized(decoder, TRUE);
+    gst_aml_video_decoder_set_packetized(decoder, TRUE);
 
     self->v4l2output = gst_aml_v4l2_object_new(GST_ELEMENT(self),
-                                               GST_OBJECT(GST_VIDEO_DECODER_SINK_PAD(self)),
+                                               GST_OBJECT(GST_AML_VIDEO_DECODER_SINK_PAD(self)),
                                                V4L2_BUF_TYPE_VIDEO_OUTPUT, klass->default_device,
                                                gst_aml_v4l2_get_output, gst_aml_v4l2_set_output, NULL);
     self->v4l2output->no_initial_format = TRUE;
@@ -1874,7 +1874,7 @@
     self->v4l2output->is_svp = FALSE;
 
     self->v4l2capture = gst_aml_v4l2_object_new(GST_ELEMENT(self),
-                                                GST_OBJECT(GST_VIDEO_DECODER_SRC_PAD(self)),
+                                                GST_OBJECT(GST_AML_VIDEO_DECODER_SRC_PAD(self)),
                                                 V4L2_BUF_TYPE_VIDEO_CAPTURE, klass->default_device,
                                                 gst_aml_v4l2_get_input, gst_aml_v4l2_set_input, NULL);
     self->v4l2capture->need_wait_event = TRUE;
@@ -1886,13 +1886,13 @@
 {
     GstElementClass *element_class;
     GObjectClass *gobject_class;
-    GstVideoDecoderClass *video_decoder_class;
+    GstAmlVideoDecoderClass *video_decoder_class;
 
     parent_class = g_type_class_peek_parent(klass);
 
     element_class = (GstElementClass *)klass;
     gobject_class = (GObjectClass *)klass;
-    video_decoder_class = (GstVideoDecoderClass *)klass;
+    video_decoder_class = (GstAmlVideoDecoderClass *)klass;
 
     GST_DEBUG_CATEGORY_INIT(gst_aml_v4l2_video_dec_debug, "amlv4l2videodec", 0,
                             "AML V4L2 Video Decoder");