[PATCH 11/11] RFC: Hack dlmalloc to poison memory
Simon Glass
sjg at chromium.org
Mon Apr 11 20:35:46 CEST 2022
Hi Andrew,
On Thu, 7 Apr 2022 at 03:41, Andrew Scull <ascull at google.com> wrote:
>
> This is a hugely ugly hack to poison and unpoison memory allocated by
> dlmalloc. It wraps every access dlmalloc makes to the metadata breifly
> allow it access, taking care not to then poison the parts of the record
> which overlap.
>
> The result is very small redzones between the allocations, which has
> limted value but has able to spot immediate buffer overruns.
>
> The instrumentation is extremely intrusive and would be benefited by
> more intrusions to increase redzone sizes etc.
>
> Signed-off-by: Andrew Scull <ascull at google.com>
> ---
> common/dlmalloc.c | 284 ++++++++++++++++++++++++++++++++++++------
> include/compiler.h | 1 +
> include/linux/types.h | 1 +
> 3 files changed, 245 insertions(+), 41 deletions(-)
This actually seems very useful to me. I wonder if it could be split
into a few patches, for easier review?
Also what is the mechanism to enable/disable this?
I've made a few comments anyway.
The changes to include/ need a look. I'm not sure whether we can avoid
them, but if not, they could go in their own patch.
>
> diff --git a/common/dlmalloc.c b/common/dlmalloc.c
> index 11729e8c85..614f004579 100644
> --- a/common/dlmalloc.c
> +++ b/common/dlmalloc.c
> @@ -8,6 +8,8 @@
> * as file malloc-2.6.6.c.
> */
>
> +#define DEBUG
> +
> #include <common.h>
> #include <log.h>
> #include <asm/global_data.h>
> @@ -16,6 +18,8 @@
> #define DEBUG
> #endif
>
> +#include <sanitizer/asan_interface.h>
> +
> #include <malloc.h>
> #include <asm/io.h>
>
> @@ -31,6 +35,17 @@ void malloc_stats();
>
> DECLARE_GLOBAL_DATA_PTR;
>
> +/*
> +#undef ASAN_POISON_MEMORY_REGION
> +#define ASAN_POISON_MEMORY_REGION(p, s) do { \
> + if ((uintptr_t)p == 0x0000150200c0) { \
> + printf("size %lx\n", s); \
> + *(int*)NULL = 9; \
> + } \
> + __asan_poison_memory_region(p, s); \
> +} while (0)
> +*/
> +
> /*
> Emulation of sbrk for WIN32
> All code within the ifdef WIN32 is untested by me.
> @@ -409,12 +424,26 @@ nextchunk-> +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
>
> /* Ptr to next physical malloc_chunk. */
>
> -#define next_chunk(p) ((mchunkptr)( ((char*)(p)) + ((p)->size & ~PREV_INUSE) ))
> +#define _next_chunk(p) ((mchunkptr)( ((char*)(p)) + ((p)->size & ~PREV_INUSE) ))
> +#define next_chunk(p) ({ \
> + mchunkptr _ptr = (p); \
> + ASAN_UNPOISON_MEMORY_REGION(((char*)_ptr) + SIZE_SZ, SIZE_SZ); \
Where is that defined?
> + mchunkptr _ret = _next_chunk(_ptr); \
> + ASAN_POISON_MEMORY_REGION(((char*)_ptr) + SIZE_SZ, SIZE_SZ); \
> + _ret; \
> + })
>
> /* Ptr to previous physical malloc_chunk */
>
> -#define prev_chunk(p)\
> +#define _prev_chunk(p)\
> ((mchunkptr)( ((char*)(p)) - ((p)->prev_size) ))
> +#define prev_chunk(p) ({ \
> + mchunkptr _ptr = (p); \
> + ASAN_UNPOISON_MEMORY_REGION(_ptr, SIZE_SZ); \
How about ASON_CURE() for brevity
> + mchunkptr _ret = _prev_chunk(_ptr); \
> + ASAN_POISON_MEMORY_REGION(_ptr, SIZE_SZ); \
ASON_POISON() ?
> + _ret; \
> + })
>
>
> /* Treat space at ptr + offset as a chunk */
> @@ -430,35 +459,102 @@ nextchunk-> +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
>
> /* extract p's inuse bit */
>
> -#define inuse(p)\
> +#define _inuse(p)\
> ((((mchunkptr)(((char*)(p))+((p)->size & ~PREV_INUSE)))->size) & PREV_INUSE)
> +#define inuse(p) ({ \
> + mchunkptr _p = (p); \
> + ASAN_UNPOISON_MEMORY_REGION(((char*)_p) + SIZE_SZ, SIZE_SZ); \
> + mchunkptr _ptr = ((mchunkptr)(((char*)_p)+(_p->size & ~PREV_INUSE))); \
> + ASAN_UNPOISON_MEMORY_REGION(((char*)_ptr) + SIZE_SZ, SIZE_SZ); \
> + INTERNAL_SIZE_T _ret = _inuse(_p); \
> + ASAN_POISON_MEMORY_REGION(((char*)_ptr) + SIZE_SZ, SIZE_SZ); \
> + ASAN_POISON_MEMORY_REGION(((char*)_p) + SIZE_SZ, SIZE_SZ); \
> + _ret; \
> + })
>
> /* extract inuse bit of previous chunk */
>
> -#define prev_inuse(p) ((p)->size & PREV_INUSE)
> +#define _prev_inuse(p) ((p)->size & PREV_INUSE)
> +#define prev_inuse(p) ({ \
> + mchunkptr _ptr = (p); \
> + ASAN_UNPOISON_MEMORY_REGION(((char*)_ptr) + SIZE_SZ, SIZE_SZ); \
> + INTERNAL_SIZE_T _ret = _prev_inuse(_ptr); \
> + ASAN_POISON_MEMORY_REGION(((char*)_ptr) + SIZE_SZ, SIZE_SZ); \
> + _ret; \
> + })
>
> /* check for mmap()'ed chunk */
>
> -#define chunk_is_mmapped(p) ((p)->size & IS_MMAPPED)
> +#define _chunk_is_mmapped(p) ((p)->size & IS_MMAPPED)
> +#define chunk_is_mmapped(p) ({ \
> + mchunkptr _ptr = (p); \
> + ASAN_UNPOISON_MEMORY_REGION(((char*)_ptr) + SIZE_SZ, SIZE_SZ); \
> + INTERNAL_SIZE_T _ret = _chunk_is_mmapped(_ptr); \
> + ASAN_POISON_MEMORY_REGION(((char*)_ptr) + SIZE_SZ, SIZE_SZ); \
> + _ret; \
> + })
>
> /* set/clear chunk as in use without otherwise disturbing */
>
> -#define set_inuse(p)\
> +#define _set_inuse(p)\
> ((mchunkptr)(((char*)(p)) + ((p)->size & ~PREV_INUSE)))->size |= PREV_INUSE
> -
> -#define clear_inuse(p)\
> +#define set_inuse(p, s) ({ \
> + mchunkptr _p = (p); \
> + ASAN_UNPOISON_MEMORY_REGION(((char*)_p) + SIZE_SZ, SIZE_SZ); \
> + mchunkptr _ptr = ((mchunkptr)(((char*)_p)+(_p->size & ~PREV_INUSE))); \
> + ASAN_UNPOISON_MEMORY_REGION(((char*)_ptr) + SIZE_SZ, SIZE_SZ); \
> + _set_inuse(_p, (s)); \
> + ASAN_POISON_MEMORY_REGION(((char*)_ptr) + SIZE_SZ, SIZE_SZ); \
> + ASAN_POISON_MEMORY_REGION(((char*)_p) + SIZE_SZ, SIZE_SZ); \
> + })
> +
> +#define _clear_inuse(p)\
> ((mchunkptr)(((char*)(p)) + ((p)->size & ~PREV_INUSE)))->size &= ~(PREV_INUSE)
> +#define clear_inuse(p, s) ({ \
> + __typeof__(p) _p = (p); \
> + ASAN_UNPOISON_MEMORY_REGION(((char*)_p) + SIZE_SZ, SIZE_SZ); \
> + mchunkptr _ptr = ((mchunkptr)(((char*)_p)+(_p->size & ~PREV_INUSE))); \
> + ASAN_UNPOISON_MEMORY_REGION(((char*)_ptr) + SIZE_SZ, SIZE_SZ); \
> + _clear_inuse(_p, (s)); \
> + ASAN_POISON_MEMORY_REGION(((char*)_ptr) + SIZE_SZ, SIZE_SZ); \
> + ASAN_POISON_MEMORY_REGION(((char*)_p) + SIZE_SZ, SIZE_SZ); \
> + })
>
> /* check/set/clear inuse bits in known places */
>
> -#define inuse_bit_at_offset(p, s)\
> +#define _inuse_bit_at_offset(p, s)\
> (((mchunkptr)(((char*)(p)) + (s)))->size & PREV_INUSE)
> -
> -#define set_inuse_bit_at_offset(p, s)\
> +#define inuse_bit_at_offset(p, s) ({ \
> + __typeof__(p) _p = (p); \
> + __typeof__(s) _s = (s); \
> + __typeof__(p) _ptr = (mchunkptr)(((char*)_p) + _s); \
> + ASAN_UNPOISON_MEMORY_REGION(((char*)_ptr) + SIZE_SZ, SIZE_SZ); \
> + INTERNAL_SIZE_T _ret = _inuse_bit_at_offset(_p, _s); \
> + ASAN_POISON_MEMORY_REGION(((char*)_ptr) + SIZE_SZ, SIZE_SZ); \
> + _ret; \
> + })
> +
> +#define _set_inuse_bit_at_offset(p, s)\
> (((mchunkptr)(((char*)(p)) + (s)))->size |= PREV_INUSE)
> -
> -#define clear_inuse_bit_at_offset(p, s)\
> +#define set_inuse_bit_at_offset(p, s) ({ \
> + __typeof__(p) _p = (p); \
> + __typeof__(s) _s = (s); \
> + __typeof__(p) _ptr = (mchunkptr)(((char*)_p) + _s); \
> + ASAN_UNPOISON_MEMORY_REGION(((char*)_ptr) + SIZE_SZ, SIZE_SZ); \
> + _set_inuse_bit_at_offset(_p, _s); \
> + ASAN_POISON_MEMORY_REGION(((char*)_ptr) + SIZE_SZ, SIZE_SZ); \
> + })
> +
> +#define _clear_inuse_bit_at_offset(p, s)\
> (((mchunkptr)(((char*)(p)) + (s)))->size &= ~(PREV_INUSE))
> +#define clear_inuse_bit_at_offset(p, s) ({ \
> + __typeof__(p) _p = (p); \
> + __typeof__(s) _s = (s); \
> + __typeof__(p) _ptr = (mchunkptr)(((char*)_p) + _s); \
> + ASAN_UNPOISON_MEMORY_REGION(((char*)_ptr) + SIZE_SZ, SIZE_SZ); \
> + _clear_inuse_bit_at_offset(_p, _s); \
> + ASAN_POISON_MEMORY_REGION(((char*)_ptr) + SIZE_SZ, SIZE_SZ); \
> + })
>
>
>
> @@ -469,19 +565,46 @@ nextchunk-> +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
>
> /* Get size, ignoring use bits */
>
> -#define chunksize(p) ((p)->size & ~(SIZE_BITS))
> +#define _chunksize(p) ((p)->size & ~(SIZE_BITS))
> +#define chunksize(p) ({ \
> + mchunkptr _ptr = (p); \
> + ASAN_UNPOISON_MEMORY_REGION(((char*)_ptr) + SIZE_SZ, SIZE_SZ); \
> + INTERNAL_SIZE_T _ret = _chunksize(_ptr); \
> + ASAN_POISON_MEMORY_REGION(((char*)_ptr) + SIZE_SZ, SIZE_SZ); \
> + _ret; \
> + })
>
> /* Set size at head, without disturbing its use bit */
>
> -#define set_head_size(p, s) ((p)->size = (((p)->size & PREV_INUSE) | (s)))
> +#define _set_head_size(p, s) ((p)->size = (((p)->size & PREV_INUSE) | (s)))
> +#define set_head_size(p, s) ({ \
> + __typeof__(p) _ptr = (p); \
> + ASAN_UNPOISON_MEMORY_REGION(((char*)_ptr) + SIZE_SZ, SIZE_SZ); \
> + _set_head_size(_ptr, (s)); \
> + ASAN_POISON_MEMORY_REGION(((char*)_ptr) + SIZE_SZ, SIZE_SZ); \
> + })
>
> /* Set size/use ignoring previous bits in header */
>
> -#define set_head(p, s) ((p)->size = (s))
> +#define _set_head(p, s) ((p)->size = (s))
> +#define set_head(p, s) ({ \
> + __typeof__(p) _ptr = (p); \
> + ASAN_UNPOISON_MEMORY_REGION(((char*)_ptr) + SIZE_SZ, SIZE_SZ); \
> + _set_head(_ptr, (s)); \
> + ASAN_POISON_MEMORY_REGION(((char*)_ptr) + SIZE_SZ, SIZE_SZ); \
> + })
>
> /* Set size at footer (only when chunk is not in use) */
>
> -#define set_foot(p, s) (((mchunkptr)((char*)(p) + (s)))->prev_size = (s))
> +#define _set_foot(p, s) (((mchunkptr)((char*)(p) + (s)))->prev_size = (s))
> +#define set_foot(p, s) ({ \
> + __typeof__(p) _p = (p); \
> + __typeof__(s) _s = (s); \
> + __typeof__(p) _ptr = (mchunkptr)(((char*)_p) + _s); \
> + ASAN_UNPOISON_MEMORY_REGION(_ptr, SIZE_SZ); \
> + _set_foot(_p, (s)); \
> + ASAN_POISON_MEMORY_REGION(_ptr, SIZE_SZ); \
> + })
>
>
>
> @@ -604,8 +727,11 @@ void *sbrk(ptrdiff_t increment)
> * if we are giving memory back make sure we clear it out since
> * we set MORECORE_CLEARS to 1
> */
> - if (increment < 0)
> + if (increment < 0) {
> + ASAN_UNPOISON_MEMORY_REGION((void *)new, -increment);
> memset((void *)new, 0, -increment);
> + ASAN_POISON_MEMORY_REGION((void *)new, -increment);
> + }
>
> if ((new < mem_malloc_start) || (new > mem_malloc_end))
> return (void *)MORECORE_FAILURE;
> @@ -630,13 +756,24 @@ void mem_malloc_init(ulong start, ulong size)
> #ifdef CONFIG_SYS_MALLOC_CLEAR_ON_INIT
> memset((void *)mem_malloc_start, 0x0, size);
> #endif
> + ASAN_POISON_MEMORY_REGION((void *)start, size);
> malloc_bin_reloc();
> }
>
> /* field-extraction macros */
>
> -#define first(b) ((b)->fd)
> -#define last(b) ((b)->bk)
> +#define first(b) ({ \
> + ASAN_UNPOISON_MEMORY_REGION(((char*)b) + 2*SIZE_SZ, 2*sizeof(uintptr_t)); \
> + void *_ret = (b)->fd; \
> + ASAN_UNPOISON_MEMORY_REGION(((char*)b) + 2*SIZE_SZ, 2*sizeof(uintptr_t)); \
> + _ret; \
> + })
> +#define last(b) ({ \
> + ASAN_UNPOISON_MEMORY_REGION(((char*)b) + 2*SIZE_SZ, 2*sizeof(uintptr_t)); \
> + void *_ret = (b)->bk; \
> + ASAN_UNPOISON_MEMORY_REGION(((char*)b) + 2*SIZE_SZ, 2*sizeof(uintptr_t)); \
> + _ret; \
> + })
>
> /*
> Indexing into bins
> @@ -781,7 +918,7 @@ static void do_check_chunk(mchunkptr p)
> static void do_check_chunk(p) mchunkptr p;
> #endif
> {
> - INTERNAL_SIZE_T sz = p->size & ~PREV_INUSE;
> + INTERNAL_SIZE_T sz = chunksize(p);
>
> /* No checkable chunk is mmapped */
> assert(!chunk_is_mmapped(p));
> @@ -802,7 +939,7 @@ static void do_check_free_chunk(mchunkptr p)
> static void do_check_free_chunk(p) mchunkptr p;
> #endif
> {
> - INTERNAL_SIZE_T sz = p->size & ~PREV_INUSE;
> + INTERNAL_SIZE_T sz = chunksize(p);
> mchunkptr next = chunk_at_offset(p, sz);
>
> do_check_chunk(p);
> @@ -816,14 +953,22 @@ static void do_check_free_chunk(p) mchunkptr p;
> assert((sz & MALLOC_ALIGN_MASK) == 0);
> assert(aligned_OK(chunk2mem(p)));
> /* ... matching footer field */
> + ASAN_UNPOISON_MEMORY_REGION(next, SIZE_SZ);
> assert(next->prev_size == sz);
> + ASAN_POISON_MEMORY_REGION(next, SIZE_SZ);
> /* ... and is fully consolidated */
> assert(prev_inuse(p));
> assert (next == top || inuse(next));
>
> /* ... and has minimally sane links */
> + ASAN_UNPOISON_MEMORY_REGION(((char*)p) + 2*SIZE_SZ, 2*sizeof(uintptr_t));
> + ASAN_UNPOISON_MEMORY_REGION(((char*)p->fd) + 2*SIZE_SZ, 2*sizeof(uintptr_t));
> + ASAN_UNPOISON_MEMORY_REGION(((char*)p->bk) + 2*SIZE_SZ, 2*sizeof(uintptr_t));
> assert(p->fd->bk == p);
> assert(p->bk->fd == p);
> + ASAN_POISON_MEMORY_REGION(((char*)p->fd) + 2*SIZE_SZ, 2*sizeof(uintptr_t));
> + ASAN_POISON_MEMORY_REGION(((char*)p->bk) + 2*SIZE_SZ, 2*sizeof(uintptr_t));
> + ASAN_POISON_MEMORY_REGION(((char*)p) + 2*SIZE_SZ, 2*sizeof(uintptr_t));
> }
> else /* markers are always of size SIZE_SZ */
> assert(sz == SIZE_SZ);
> @@ -867,7 +1012,7 @@ static void do_check_malloced_chunk(mchunkptr p, INTERNAL_SIZE_T s)
> static void do_check_malloced_chunk(p, s) mchunkptr p; INTERNAL_SIZE_T s;
> #endif
> {
> - INTERNAL_SIZE_T sz = p->size & ~PREV_INUSE;
> + INTERNAL_SIZE_T sz = chunksize(p);
> long room = sz - s;
>
> do_check_inuse_chunk(p);
> @@ -919,31 +1064,48 @@ static void do_check_malloced_chunk(p, s) mchunkptr p; INTERNAL_SIZE_T s;
>
> #define frontlink(P, S, IDX, BK, FD) \
> { \
> + ASAN_UNPOISON_MEMORY_REGION(((char*)P) + 2*SIZE_SZ, 2*sizeof(uintptr_t)); \
> if (S < MAX_SMALLBIN_SIZE) \
> { \
> IDX = smallbin_index(S); \
> mark_binblock(IDX); \
> BK = bin_at(IDX); \
> + ASAN_UNPOISON_MEMORY_REGION(((char*)BK) + 2*SIZE_SZ, 2*sizeof(uintptr_t)); \
> FD = BK->fd; \
> P->bk = BK; \
> P->fd = FD; \
> + ASAN_UNPOISON_MEMORY_REGION(((char*)FD) + 2*SIZE_SZ, 2*sizeof(uintptr_t)); \
> FD->bk = BK->fd = P; \
> } \
> else \
> { \
> IDX = bin_index(S); \
> BK = bin_at(IDX); \
> + ASAN_UNPOISON_MEMORY_REGION(((char*)BK) + 2*SIZE_SZ, 2*sizeof(uintptr_t)); \
> FD = BK->fd; \
> + ASAN_UNPOISON_MEMORY_REGION(((char*)FD) + 2*SIZE_SZ, 2*sizeof(uintptr_t)); \
> if (FD == BK) mark_binblock(IDX); \
> else \
> { \
> - while (FD != BK && S < chunksize(FD)) FD = FD->fd; \
> + while (FD != BK && S < chunksize(FD)) { \
> + mchunkptr old = FD; \
> + FD = FD->fd; \
> + ASAN_POISON_MEMORY_REGION(((char*)old) + 2*SIZE_SZ, 2*sizeof(uintptr_t)); \
> + ASAN_UNPOISON_MEMORY_REGION(((char*)FD) + 2*SIZE_SZ, 2*sizeof(uintptr_t)); \
> + } \
> + mchunkptr oldbk = BK; \
> BK = FD->bk; \
> + ASAN_POISON_MEMORY_REGION(((char*)oldbk) + 2*SIZE_SZ, 2*sizeof(uintptr_t)); \
> + ASAN_UNPOISON_MEMORY_REGION(((char*)BK) + 2*SIZE_SZ, 2*sizeof(uintptr_t)); \
> + ASAN_UNPOISON_MEMORY_REGION(((char*)FD) + 2*SIZE_SZ, 2*sizeof(uintptr_t)); \
> } \
> P->bk = BK; \
> P->fd = FD; \
> FD->bk = BK->fd = P; \
> } \
> + ASAN_POISON_MEMORY_REGION(((char*)P) + 2*SIZE_SZ, 2*sizeof(uintptr_t)); \
> + ASAN_POISON_MEMORY_REGION(((char*)BK) + 2*SIZE_SZ, 2*sizeof(uintptr_t)); \
> + ASAN_POISON_MEMORY_REGION(((char*)FD) + 2*SIZE_SZ, 2*sizeof(uintptr_t)); \
> }
>
>
> @@ -951,18 +1113,26 @@ static void do_check_malloced_chunk(p, s) mchunkptr p; INTERNAL_SIZE_T s;
>
> #define unlink(P, BK, FD) \
> { \
> + ASAN_UNPOISON_MEMORY_REGION(((char*)P) + 2*SIZE_SZ, 2*sizeof(uintptr_t)); \
> BK = P->bk; \
> FD = P->fd; \
> + ASAN_POISON_MEMORY_REGION(((char*)P) + 2*SIZE_SZ, 2*sizeof(uintptr_t)); \
> + ASAN_UNPOISON_MEMORY_REGION(((char*)BK) + 2*SIZE_SZ, 2*sizeof(uintptr_t)); \
> + ASAN_UNPOISON_MEMORY_REGION(((char*)FD) + 2*SIZE_SZ, 2*sizeof(uintptr_t)); \
> FD->bk = BK; \
> BK->fd = FD; \
> + ASAN_POISON_MEMORY_REGION(((char*)BK) + 2*SIZE_SZ, 2*sizeof(uintptr_t)); \
> + ASAN_POISON_MEMORY_REGION(((char*)FD) + 2*SIZE_SZ, 2*sizeof(uintptr_t)); \
> } \
>
> /* Place p as the last remainder */
>
> #define link_last_remainder(P) \
> { \
> + ASAN_UNPOISON_MEMORY_REGION(((char*)P) + 2*SIZE_SZ, 2*sizeof(uintptr_t)); \
> last_remainder->fd = last_remainder->bk = P; \
> P->fd = P->bk = last_remainder; \
> + ASAN_POISON_MEMORY_REGION(((char*)P) + 2*SIZE_SZ, 2*sizeof(uintptr_t)); \
> }
>
> /* Clear the last_remainder bin */
> @@ -1280,6 +1450,14 @@ static void malloc_extend_top(nb) INTERNAL_SIZE_T nb;
>
> */
>
> +#define publish_mem(p) ({ \
Could use a comment
> + __typeof__(p) _chunk = (p); \
> + Void_t *_ret = chunk2mem(_chunk); \
> + INTERNAL_SIZE_T _size = malloc_usable_size(_ret); \
> + ASAN_UNPOISON_MEMORY_REGION(_ret, _size); \
> + _ret; \
> + })
> +
> #if __STD_C
> Void_t* mALLOc(size_t bytes)
> #else
> @@ -1339,7 +1517,7 @@ Void_t* mALLOc(bytes) size_t bytes;
> unlink(victim, bck, fwd);
> set_inuse_bit_at_offset(victim, victim_size);
> check_malloced_chunk(victim, nb);
> - return chunk2mem(victim);
> + return publish_mem(victim);
> }
>
> idx += 2; /* Set for bin scan below. We've already scanned 2 bins. */
> @@ -1350,7 +1528,11 @@ Void_t* mALLOc(bytes) size_t bytes;
> idx = bin_index(nb);
> bin = bin_at(idx);
>
> - for (victim = last(bin); victim != bin; victim = victim->bk)
> + for (victim = last(bin); victim != bin;
> + ASAN_UNPOISON_MEMORY_REGION((char*)victim + 2*SIZE_SZ, 2*sizeof(uintptr_t)),
> + victim = victim->bk,
> + ASAN_POISON_MEMORY_REGION((char*)victim + 2*SIZE_SZ, 2*sizeof(uintptr_t))
> + )
> {
> victim_size = chunksize(victim);
> remainder_size = victim_size - nb;
> @@ -1366,7 +1548,7 @@ Void_t* mALLOc(bytes) size_t bytes;
> unlink(victim, bck, fwd);
> set_inuse_bit_at_offset(victim, victim_size);
> check_malloced_chunk(victim, nb);
> - return chunk2mem(victim);
> + return publish_mem(victim);
> }
> }
>
> @@ -1389,7 +1571,7 @@ Void_t* mALLOc(bytes) size_t bytes;
> set_head(remainder, remainder_size | PREV_INUSE);
> set_foot(remainder, remainder_size);
> check_malloced_chunk(victim, nb);
> - return chunk2mem(victim);
> + return publish_mem(victim);
> }
>
> clear_last_remainder;
> @@ -1398,7 +1580,7 @@ Void_t* mALLOc(bytes) size_t bytes;
> {
> set_inuse_bit_at_offset(victim, victim_size);
> check_malloced_chunk(victim, nb);
> - return chunk2mem(victim);
> + return publish_mem(victim);
> }
>
> /* Else place in bin */
> @@ -1439,7 +1621,11 @@ Void_t* mALLOc(bytes) size_t bytes;
> {
> /* Find and use first big enough chunk ... */
>
> - for (victim = last(bin); victim != bin; victim = victim->bk)
> + for (victim = last(bin); victim != bin;
> + ASAN_UNPOISON_MEMORY_REGION((char*)victim + 2*SIZE_SZ, 2*sizeof(uintptr_t)),
> + victim = victim->bk,
> + ASAN_POISON_MEMORY_REGION((char*)victim + 2*SIZE_SZ, 2*sizeof(uintptr_t))
> + )
> {
> victim_size = chunksize(victim);
> remainder_size = victim_size - nb;
> @@ -1453,7 +1639,7 @@ Void_t* mALLOc(bytes) size_t bytes;
> set_head(remainder, remainder_size | PREV_INUSE);
> set_foot(remainder, remainder_size);
> check_malloced_chunk(victim, nb);
> - return chunk2mem(victim);
> + return publish_mem(victim);
> }
>
> else if (remainder_size >= 0) /* take */
> @@ -1461,7 +1647,7 @@ Void_t* mALLOc(bytes) size_t bytes;
> set_inuse_bit_at_offset(victim, victim_size);
> unlink(victim, bck, fwd);
> check_malloced_chunk(victim, nb);
> - return chunk2mem(victim);
> + return publish_mem(victim);
> }
>
> }
> @@ -1509,7 +1695,7 @@ Void_t* mALLOc(bytes) size_t bytes;
> /* If big and would otherwise need to extend, try to use mmap instead */
> if ((unsigned long)nb >= (unsigned long)mmap_threshold &&
> (victim = mmap_chunk(nb)))
> - return chunk2mem(victim);
> + return publish_mem(victim);
> #endif
>
> /* Try to extend */
> @@ -1523,7 +1709,8 @@ Void_t* mALLOc(bytes) size_t bytes;
> top = chunk_at_offset(victim, nb);
> set_head(top, remainder_size | PREV_INUSE);
> check_malloced_chunk(victim, nb);
> - return chunk2mem(victim);
> +
> + return publish_mem(victim);
>
> }
>
> @@ -1578,8 +1765,12 @@ void fREe(mem) Void_t* mem;
> if (mem == NULL) /* free(0) has no effect */
> return;
>
> + ASAN_POISON_MEMORY_REGION(mem, malloc_usable_size(mem));
> +
> p = mem2chunk(mem);
> + ASAN_UNPOISON_MEMORY_REGION(((char*)p) + SIZE_SZ, SIZE_SZ);
> hd = p->size;
> + ASAN_POISON_MEMORY_REGION(((char*)p) + SIZE_SZ, SIZE_SZ);
>
> #if HAVE_MMAP
> if (hd & IS_MMAPPED) /* release mmapped memory. */
> @@ -1601,7 +1792,9 @@ void fREe(mem) Void_t* mem;
>
> if (!(hd & PREV_INUSE)) /* consolidate backward */
> {
> + ASAN_UNPOISON_MEMORY_REGION(((char*)p), SIZE_SZ);
> prevsz = p->prev_size;
> + ASAN_POISON_MEMORY_REGION(((char*)p), SIZE_SZ);
> p = chunk_at_offset(p, -((long) prevsz));
> sz += prevsz;
> unlink(p, bck, fwd);
> @@ -1620,20 +1813,25 @@ void fREe(mem) Void_t* mem;
>
> if (!(hd & PREV_INUSE)) /* consolidate backward */
> {
> + ASAN_UNPOISON_MEMORY_REGION(p, SIZE_SZ);
> prevsz = p->prev_size;
> + ASAN_POISON_MEMORY_REGION(p, SIZE_SZ);
> p = chunk_at_offset(p, -((long) prevsz));
> sz += prevsz;
>
> + ASAN_UNPOISON_MEMORY_REGION((char*)p + 2*SIZE_SZ, 2*sizeof(uintptr_t));
> if (p->fd == last_remainder) /* keep as last_remainder */
> islr = 1;
> else
> unlink(p, bck, fwd);
> + ASAN_POISON_MEMORY_REGION((char*)p + 2*SIZE_SZ, 2*sizeof(uintptr_t));
> }
>
> if (!(inuse_bit_at_offset(next, nextsz))) /* consolidate forward */
> {
> sz += nextsz;
>
> + ASAN_UNPOISON_MEMORY_REGION((char*)next + 2*SIZE_SZ, 2*sizeof(uintptr_t));
> if (!islr && next->fd == last_remainder) /* re-insert last_remainder */
> {
> islr = 1;
> @@ -1641,6 +1839,7 @@ void fREe(mem) Void_t* mem;
> }
> else
> unlink(next, bck, fwd);
> + ASAN_POISON_MEMORY_REGION((char*)next + 2*SIZE_SZ, 2*sizeof(uintptr_t));
> }
>
>
> @@ -1747,7 +1946,7 @@ Void_t* rEALLOc(oldmem, bytes) Void_t* oldmem; size_t bytes;
> {
> #if HAVE_MREMAP
> newp = mremap_chunk(oldp, nb);
> - if(newp) return chunk2mem(newp);
> + if(newp) return publish_mem(newp);
> #endif
> /* Note the extra SIZE_SZ overhead. */
> if(oldsize - SIZE_SZ >= nb) return oldmem; /* do nothing */
> @@ -1782,7 +1981,7 @@ Void_t* rEALLOc(oldmem, bytes) Void_t* oldmem; size_t bytes;
> top = chunk_at_offset(oldp, nb);
> set_head(top, (newsize - nb) | PREV_INUSE);
> set_head_size(oldp, nb);
> - return chunk2mem(oldp);
> + return publish_mem(oldp);
> }
> }
>
> @@ -1895,7 +2094,7 @@ Void_t* rEALLOc(oldmem, bytes) Void_t* oldmem; size_t bytes;
> }
>
> check_inuse_chunk(newp);
> - return chunk2mem(newp);
> + return publish_mem(newp);
> }
>
>
> @@ -2006,7 +2205,7 @@ Void_t* mEMALIGn(alignment, bytes) size_t alignment; size_t bytes;
> {
> #if HAVE_MMAP
> if(chunk_is_mmapped(p))
> - return chunk2mem(p); /* nothing more to do */
> + return publish_mem(p); /* nothing more to do */
> #endif
> }
> else /* misaligned */
> @@ -2032,7 +2231,7 @@ Void_t* mEMALIGn(alignment, bytes) size_t alignment; size_t bytes;
> {
> newp->prev_size = p->prev_size + leadsize;
> set_head(newp, newsize|IS_MMAPPED);
> - return chunk2mem(newp);
> + return publish_mem(newp);
> }
> #endif
>
> @@ -2060,7 +2259,7 @@ Void_t* mEMALIGn(alignment, bytes) size_t alignment; size_t bytes;
> }
>
> check_inuse_chunk(p);
> - return chunk2mem(p);
> + return publish_mem(p);
>
> }
>
> @@ -2318,7 +2517,10 @@ static void malloc_update_mallinfo()
> for (i = 1; i < NAV; ++i)
> {
> b = bin_at(i);
> - for (p = last(b); p != b; p = p->bk)
> + for (p = last(b); p != b;
> + ASAN_UNPOISON_MEMORY_REGION((char*)p + 2*SIZE_SZ, 2*sizeof(uintptr_t)),
> + p = p->bk,
> + ASAN_POISON_MEMORY_REGION((char*)p + 2*SIZE_SZ, 2*sizeof(uintptr_t)))
> {
> #ifdef DEBUG
> check_free_chunk(p);
> diff --git a/include/compiler.h b/include/compiler.h
> index ef7b2cb1f7..7f7e82daf7 100644
> --- a/include/compiler.h
> +++ b/include/compiler.h
> @@ -126,6 +126,7 @@ typedef __u32 u32;
>
> /* Type for `void *' pointers. */
> typedef unsigned long int uintptr_t;
> +typedef long int intptr_t;
>
> #include <linux/string.h>
> #include <linux/types.h>
> diff --git a/include/linux/types.h b/include/linux/types.h
> index baa2c491ea..d86d2611c0 100644
> --- a/include/linux/types.h
> +++ b/include/linux/types.h
> @@ -25,6 +25,7 @@ typedef __kernel_uid16_t uid16_t;
> typedef __kernel_gid16_t gid16_t;
>
> typedef unsigned long uintptr_t;
> +typedef long intptr_t;
>
> #ifdef CONFIG_UID16
> /* This is defined by include/asm-{arch}/posix_types.h */
> --
> 2.35.1.1094.g7c7d902a7c-goog
>
Regards,
Simon
More information about the U-Boot
mailing list