From 167cc146688e14fe876c7cbb33de831b5abfae3d Mon Sep 17 00:00:00 2001 From: David Carlier Date: Sun, 19 Jun 2022 12:06:43 +0100 Subject: [PATCH] __builtin_memcpy/set_inline from clang usage introduction. no extra calls guaranteed during LLVM IR generation but works only on constants unlike their non inline counterparts. --- include/mimalloc-internal.h | 29 +++++++++++++++++++++++++++++ src/heap.c | 4 ++-- 2 files changed, 31 insertions(+), 2 deletions(-) diff --git a/include/mimalloc-internal.h b/include/mimalloc-internal.h index 9fa371082..40692c5ca 100644 --- a/include/mimalloc-internal.h +++ b/include/mimalloc-internal.h @@ -168,6 +168,21 @@ bool _mi_page_is_valid(mi_page_t* page); #define __has_builtin(x) 0 #endif +#if __has_builtin(__builtin_memcpy_inline) +#define _mi_memcpy_inline(x, y, s) __builtin_memcpy_inline(x, y, s) +#elif defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 201112L) +#define _mi_memcpy_inline(x, y, s) do { _Static_assert(__builtin_choose_expr(__builtin_constant_p(s), 1, 0), "`_mi_memcpy_inline` must be a constant integer"); memcpy(x, y, s); } while (0) +#else +#define _mi_memcpy_inline(x, y, s) memcpy(x, y, s) +#endif + +#if __has_builtin(__builtin_memset_inline) +#define _mi_memset_inline(x, y, s) __builtin_memset_inline(x, y, s) +#elif defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 201112L) +#define _mi_memset_inline(x, y, s) do { _Static_assert(__builtin_choose_expr(__builtin_constant_p(s), 1, 0), "`_mi_memset_inline` must be a constant integer"); memset(x, y, s); } while (0) +#else +#define _mi_memset_inline(x, y, s) memset(x, y, s) +#endif /* ----------------------------------------------------------- Error codes passed to `_mi_fatal_error` @@ -975,6 +990,17 @@ static inline void _mi_memzero_aligned(void* dst, size_t n) { void* adst = __builtin_assume_aligned(dst, MI_INTPTR_SIZE); _mi_memzero(adst, n); } + +#define _mi_memcpy_inline_aligned(dst, src, n) \ + mi_assert_internal(((uintptr_t)dst % MI_INTPTR_SIZE == 0) && ((uintptr_t)src % MI_INTPTR_SIZE == 0)); \ + void* adst = __builtin_assume_aligned(dst, MI_INTPTR_SIZE); \ + const void* asrc = __builtin_assume_aligned(src, MI_INTPTR_SIZE); \ + _mi_memcpy_inline(adst, asrc, n) + +#define _mi_memzero_inline_aligned(dst, n) \ + mi_assert_internal((uintptr_t)dst % MI_INTPTR_SIZE == 0); \ + void* adst = __builtin_assume_aligned(dst, MI_INTPTR_SIZE); \ + _mi_memzero_inline(adst, n) #else // Default fallback on `_mi_memcpy` static inline void _mi_memcpy_aligned(void* dst, const void* src, size_t n) { @@ -986,6 +1012,9 @@ static inline void _mi_memzero_aligned(void* dst, size_t n) { mi_assert_internal((uintptr_t)dst % MI_INTPTR_SIZE == 0); _mi_memzero(dst, n); } + +#define _mi_memcpy_inline_aligned(dst, src, n) _mi_memcpy_aligned(dst, src, n) +#define _mi_memzero_inline_aligned(dst, n) _mi_memzero_aligned(dst, n) #endif diff --git a/src/heap.c b/src/heap.c index 45cb14c16..64304f1fa 100644 --- a/src/heap.c +++ b/src/heap.c @@ -193,7 +193,7 @@ mi_decl_nodiscard mi_heap_t* mi_heap_new(void) { mi_heap_t* bheap = mi_heap_get_backing(); mi_heap_t* heap = mi_heap_malloc_tp(bheap, mi_heap_t); // todo: OS allocate in secure mode? if (heap==NULL) return NULL; - _mi_memcpy_aligned(heap, &_mi_heap_empty, sizeof(mi_heap_t)); + _mi_memcpy_inline_aligned(heap, &_mi_heap_empty, sizeof(mi_heap_t)); heap->tld = bheap->tld; heap->thread_id = _mi_thread_id(); _mi_random_split(&bheap->random, &heap->random); @@ -220,7 +220,7 @@ static void mi_heap_reset_pages(mi_heap_t* heap) { #ifdef MI_MEDIUM_DIRECT memset(&heap->pages_free_medium, 0, sizeof(heap->pages_free_medium)); #endif - _mi_memcpy_aligned(&heap->pages, &_mi_heap_empty.pages, sizeof(heap->pages)); + _mi_memcpy_inline_aligned(&heap->pages, &_mi_heap_empty.pages, sizeof(heap->pages)); heap->thread_delayed_free = NULL; heap->page_count = 0; }