smp: add lock protect for malloc/free [1/1]
PD#SWPL-167241
Problem:
No smp protect for malloc free
Solution:
add smp protect for malloc free
Verify:
t5m
Change-Id: Id4798be598afa209bde5ed85f48644023ad2c490
Signed-off-by: xia.jin <xia.jin@amlogic.com>
diff --git a/common/dlmalloc.c b/common/dlmalloc.c
index da0eab3..0100ba5 100644
--- a/common/dlmalloc.c
+++ b/common/dlmalloc.c
@@ -33,6 +33,10 @@
#ifdef CONFIG_AML_UASAN
#include <amlogic/uasan.h>
#endif
+#ifdef CONFIG_ARMV8_MULTIENTRY
+#include <spinlock.h>
+static spin_lock_t malloc_lock;
+#endif
DECLARE_GLOBAL_DATA_PTR;
@@ -656,6 +660,9 @@
memset((void *)mem_malloc_start, 0x0, size);
#endif
malloc_bin_reloc();
+#ifdef CONFIG_ARMV8_MULTIENTRY
+ spin_lock_init(&malloc_lock);
+#endif
}
/* field-extraction macros */
@@ -1344,6 +1351,9 @@
if ((long)bytes < 0) return NULL;
+#ifdef CONFIG_ARMV8_MULTIENTRY
+ spin_lock(&malloc_lock);
+#endif
nb = request2size(bytes); /* padded request size; */
/* Check for exact match in a bin */
@@ -1370,6 +1380,9 @@
set_inuse_bit_at_offset(victim, victim_size);
check_malloced_chunk(victim, nb);
VALGRIND_MALLOCLIKE_BLOCK(chunk2mem(victim), bytes, SIZE_SZ, false);
+#ifdef CONFIG_ARMV8_MULTIENTRY
+ spin_unlock(&malloc_lock);
+#endif
#ifdef CONFIG_AML_UASAN
uasan_alloc(victim, bytes);
return chunk2mem(victim);
@@ -1403,7 +1416,9 @@
set_inuse_bit_at_offset(victim, victim_size);
check_malloced_chunk(victim, nb);
VALGRIND_MALLOCLIKE_BLOCK(chunk2mem(victim), bytes, SIZE_SZ, false);
-
+#ifdef CONFIG_ARMV8_MULTIENTRY
+ spin_unlock(&malloc_lock);
+ #endif
#ifdef CONFIG_AML_UASAN
uasan_alloc(victim, bytes);
return chunk2mem(victim);
@@ -1433,6 +1448,9 @@
set_foot(remainder, remainder_size);
check_malloced_chunk(victim, nb);
VALGRIND_MALLOCLIKE_BLOCK(chunk2mem(victim), bytes, SIZE_SZ, false);
+#ifdef CONFIG_ARMV8_MULTIENTRY
+ spin_unlock(&malloc_lock);
+#endif
#ifdef CONFIG_AML_UASAN
uasan_alloc(victim, bytes);
return chunk2mem(victim);
@@ -1448,6 +1466,9 @@
set_inuse_bit_at_offset(victim, victim_size);
check_malloced_chunk(victim, nb);
VALGRIND_MALLOCLIKE_BLOCK(chunk2mem(victim), bytes, SIZE_SZ, false);
+#ifdef CONFIG_ARMV8_MULTIENTRY
+ spin_unlock(&malloc_lock);
+#endif
#ifdef CONFIG_AML_UASAN
uasan_alloc(victim, bytes);
return chunk2mem(victim);
@@ -1509,6 +1530,9 @@
set_foot(remainder, remainder_size);
check_malloced_chunk(victim, nb);
VALGRIND_MALLOCLIKE_BLOCK(chunk2mem(victim), bytes, SIZE_SZ, false);
+#ifdef CONFIG_ARMV8_MULTIENTRY
+ spin_unlock(&malloc_lock);
+#endif
#ifdef CONFIG_AML_UASAN
uasan_alloc(victim, bytes);
return chunk2mem(victim);
@@ -1523,6 +1547,9 @@
unlink(victim, bck, fwd);
check_malloced_chunk(victim, nb);
VALGRIND_MALLOCLIKE_BLOCK(chunk2mem(victim), bytes, SIZE_SZ, false);
+#ifdef CONFIG_ARMV8_MULTIENTRY
+ spin_unlock(&malloc_lock);
+#endif
#ifdef CONFIG_AML_UASAN
uasan_alloc(victim, bytes);
return chunk2mem(victim);
@@ -1582,8 +1609,15 @@
/* Try to extend */
malloc_extend_top(nb);
+#ifdef CONFIG_ARMV8_MULTIENTRY
+ if ( (remainder_size = chunksize(top) - nb) < (long)MINSIZE) {
+ spin_unlock(&malloc_lock);
+ return NULL; /* propagate failure */
+ }
+#else
if ( (remainder_size = chunksize(top) - nb) < (long)MINSIZE)
return NULL; /* propagate failure */
+#endif
}
victim = top;
@@ -1592,6 +1626,9 @@
set_head(top, remainder_size | PREV_INUSE);
check_malloced_chunk(victim, nb);
VALGRIND_MALLOCLIKE_BLOCK(chunk2mem(victim), bytes, SIZE_SZ, false);
+#ifdef CONFIG_ARMV8_MULTIENTRY
+ spin_unlock(&malloc_lock);
+#endif
#ifdef CONFIG_AML_UASAN
uasan_alloc(victim, bytes);
return chunk2mem(victim);
@@ -1665,6 +1702,9 @@
}
#endif
+#ifdef CONFIG_ARMV8_MULTIENTRY
+ spin_lock(&malloc_lock);
+#endif
check_inuse_chunk(p);
sz = hd & ~PREV_INUSE;
@@ -1693,6 +1733,9 @@
top = p;
if ((unsigned long)(sz) >= (unsigned long)trim_threshold)
malloc_trim(top_pad);
+ #ifdef CONFIG_ARMV8_MULTIENTRY
+ spin_unlock(&malloc_lock);
+ #endif
return;
}
@@ -1730,12 +1773,12 @@
set_foot(p, sz);
if (!islr)
frontlink(p, sz, idx, bck, fwd);
+#ifdef CONFIG_ARMV8_MULTIENTRY
+ spin_unlock(&malloc_lock);
+#endif
}
-
-
-
/*
Realloc algorithm:
@@ -1771,7 +1814,32 @@
*/
+#ifdef CONFIG_ARMV8_MULTIENTRY
+Void_t* rEALLOc(Void_t* oldmem, size_t bytes)
+{
+ void *new;
+ mchunkptr oldp;
+ size_t oldsize;
+ if (bytes <= 0)
+ return NULL;
+ if (!oldmem)
+ return mALLOc(bytes);
+
+ oldp = mem2chunk(oldmem);
+ oldsize = chunksize(oldp);
+
+ new = mALLOc(bytes);
+ if (!new)
+ return NULL;
+ if (oldsize > bytes)
+ memcpy(new, oldmem, bytes);
+ else
+ memcpy(new, oldmem, oldsize);
+ fREe(oldmem);
+ return new;
+}
+#else
#if __STD_C
Void_t* rEALLOc(Void_t* oldmem, size_t bytes)
#else
@@ -2031,6 +2099,7 @@
return chunk2mem(newp);
#endif
}
+#endif