diff options
Diffstat (limited to 'include/jemalloc/internal/jemalloc_internal.h.in')
-rw-r--r-- | include/jemalloc/internal/jemalloc_internal.h.in | 235 |
1 files changed, 111 insertions, 124 deletions
diff --git a/include/jemalloc/internal/jemalloc_internal.h.in b/include/jemalloc/internal/jemalloc_internal.h.in index 53c135c2..d24a1fe6 100644 --- a/include/jemalloc/internal/jemalloc_internal.h.in +++ b/include/jemalloc/internal/jemalloc_internal.h.in @@ -1,5 +1,5 @@ #ifndef JEMALLOC_INTERNAL_H -#define JEMALLOC_INTERNAL_H +#define JEMALLOC_INTERNAL_H #include <math.h> #ifdef _WIN32 # include <windows.h> @@ -54,8 +54,7 @@ typedef intptr_t ssize_t; #endif #include <fcntl.h> -#define JEMALLOC_NO_DEMANGLE -#include "../jemalloc@install_suffix@.h" +#include "jemalloc_internal_defs.h" #ifdef JEMALLOC_UTRACE #include <sys/ktrace.h> @@ -66,13 +65,18 @@ typedef intptr_t ssize_t; #include <valgrind/memcheck.h> #endif -#include "jemalloc/internal/private_namespace.h" - -#ifdef JEMALLOC_CC_SILENCE -#define UNUSED JEMALLOC_ATTR(unused) +#define JEMALLOC_NO_DEMANGLE +#ifdef JEMALLOC_JET +# define JEMALLOC_N(n) jet_##n +# include "jemalloc/internal/public_namespace.h" +# define JEMALLOC_NO_RENAME +# include "../jemalloc@install_suffix@.h" +# undef JEMALLOC_NO_RENAME #else -#define UNUSED +# define JEMALLOC_N(n) @private_namespace@##n +# include "../jemalloc@install_suffix@.h" #endif +#include "jemalloc/internal/private_namespace.h" static const bool config_debug = #ifdef JEMALLOC_DEBUG @@ -221,47 +225,12 @@ static const bool config_ivsalloc = * JEMALLOC_H_INLINES : Inline functions. */ /******************************************************************************/ -#define JEMALLOC_H_TYPES - -#define ALLOCM_LG_ALIGN_MASK ((int)0x3f) +#define JEMALLOC_H_TYPES -#define ZU(z) ((size_t)z) -#define QU(q) ((uint64_t)q) +#include "jemalloc/internal/jemalloc_internal_macros.h" -#ifndef __DECONST -# define __DECONST(type, var) ((type)(uintptr_t)(const void *)(var)) -#endif - -/* - * JEMALLOC_ALWAYS_INLINE is used within header files for functions that are - * static inline functions if inlining is enabled, and single-definition - * library-private functions if inlining is disabled. - * - * JEMALLOC_ALWAYS_INLINE_C is for use in .c files, in which case the denoted - * functions are always static, regardless of whether inlining is enabled. - */ -#ifdef JEMALLOC_DEBUG - /* Disable inlining to make debugging easier. */ -# define JEMALLOC_ALWAYS_INLINE -# define JEMALLOC_ALWAYS_INLINE_C static -# define JEMALLOC_INLINE -# define inline -#else -# define JEMALLOC_ENABLE_INLINE -# ifdef JEMALLOC_HAVE_ATTR -# define JEMALLOC_ALWAYS_INLINE \ - static inline JEMALLOC_ATTR(unused) JEMALLOC_ATTR(always_inline) -# define JEMALLOC_ALWAYS_INLINE_C \ - static inline JEMALLOC_ATTR(always_inline) -# else -# define JEMALLOC_ALWAYS_INLINE static inline -# define JEMALLOC_ALWAYS_INLINE_C static inline -# endif -# define JEMALLOC_INLINE static inline -# ifdef _MSC_VER -# define inline _inline -# endif -#endif +#define MALLOCX_LG_ALIGN_MASK ((int)0x3f) +#define ALLOCM_LG_ALIGN_MASK ((int)0x3f) /* Smallest size class to support. */ #define LG_TINY_MIN 3 @@ -493,7 +462,7 @@ static const bool config_ivsalloc = #undef JEMALLOC_H_TYPES /******************************************************************************/ -#define JEMALLOC_H_STRUCTS +#define JEMALLOC_H_STRUCTS #include "jemalloc/internal/util.h" #include "jemalloc/internal/atomic.h" @@ -522,14 +491,14 @@ typedef struct { uint64_t deallocated; } thread_allocated_t; /* - * The JEMALLOC_CONCAT() wrapper is necessary to pass {0, 0} via a cpp macro + * The JEMALLOC_ARG_CONCAT() wrapper is necessary to pass {0, 0} via a cpp macro * argument. */ -#define THREAD_ALLOCATED_INITIALIZER JEMALLOC_CONCAT({0, 0}) +#define THREAD_ALLOCATED_INITIALIZER JEMALLOC_ARG_CONCAT({0, 0}) #undef JEMALLOC_H_STRUCTS /******************************************************************************/ -#define JEMALLOC_H_EXTERNS +#define JEMALLOC_H_EXTERNS extern bool opt_abort; extern bool opt_junk; @@ -589,7 +558,7 @@ void jemalloc_postfork_child(void); #undef JEMALLOC_H_EXTERNS /******************************************************************************/ -#define JEMALLOC_H_INLINES +#define JEMALLOC_H_INLINES #include "jemalloc/internal/util.h" #include "jemalloc/internal/atomic.h" @@ -764,32 +733,36 @@ choose_arena(arena_t *arena) #include "jemalloc/internal/quarantine.h" #ifndef JEMALLOC_ENABLE_INLINE -void *imallocx(size_t size, bool try_tcache, arena_t *arena); +void *imalloct(size_t size, bool try_tcache, arena_t *arena); void *imalloc(size_t size); -void *icallocx(size_t size, bool try_tcache, arena_t *arena); +void *icalloct(size_t size, bool try_tcache, arena_t *arena); void *icalloc(size_t size); -void *ipallocx(size_t usize, size_t alignment, bool zero, bool try_tcache, +void *ipalloct(size_t usize, size_t alignment, bool zero, bool try_tcache, arena_t *arena); void *ipalloc(size_t usize, size_t alignment, bool zero); size_t isalloc(const void *ptr, bool demote); size_t ivsalloc(const void *ptr, bool demote); size_t u2rz(size_t usize); size_t p2rz(const void *ptr); -void idallocx(void *ptr, bool try_tcache); +void idalloct(void *ptr, bool try_tcache); void idalloc(void *ptr); -void iqallocx(void *ptr, bool try_tcache); +void iqalloct(void *ptr, bool try_tcache); void iqalloc(void *ptr); -void *irallocx(void *ptr, size_t size, size_t extra, size_t alignment, - bool zero, bool no_move, bool try_tcache_alloc, bool try_tcache_dalloc, +void *iralloct_realign(void *ptr, size_t oldsize, size_t size, size_t extra, + size_t alignment, bool zero, bool try_tcache_alloc, bool try_tcache_dalloc, arena_t *arena); +void *iralloct(void *ptr, size_t size, size_t extra, size_t alignment, + bool zero, bool try_tcache_alloc, bool try_tcache_dalloc, arena_t *arena); void *iralloc(void *ptr, size_t size, size_t extra, size_t alignment, - bool zero, bool no_move); + bool zero); +bool ixalloc(void *ptr, size_t size, size_t extra, size_t alignment, + bool zero); malloc_tsd_protos(JEMALLOC_ATTR(unused), thread_allocated, thread_allocated_t) #endif #if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_C_)) JEMALLOC_ALWAYS_INLINE void * -imallocx(size_t size, bool try_tcache, arena_t *arena) +imalloct(size_t size, bool try_tcache, arena_t *arena) { assert(size != 0); @@ -804,11 +777,11 @@ JEMALLOC_ALWAYS_INLINE void * imalloc(size_t size) { - return (imallocx(size, true, NULL)); + return (imalloct(size, true, NULL)); } JEMALLOC_ALWAYS_INLINE void * -icallocx(size_t size, bool try_tcache, arena_t *arena) +icalloct(size_t size, bool try_tcache, arena_t *arena) { if (size <= arena_maxclass) @@ -821,11 +794,11 @@ JEMALLOC_ALWAYS_INLINE void * icalloc(size_t size) { - return (icallocx(size, true, NULL)); + return (icalloct(size, true, NULL)); } JEMALLOC_ALWAYS_INLINE void * -ipallocx(size_t usize, size_t alignment, bool zero, bool try_tcache, +ipalloct(size_t usize, size_t alignment, bool zero, bool try_tcache, arena_t *arena) { void *ret; @@ -853,7 +826,7 @@ JEMALLOC_ALWAYS_INLINE void * ipalloc(size_t usize, size_t alignment, bool zero) { - return (ipallocx(usize, alignment, zero, true, NULL)); + return (ipalloct(usize, alignment, zero, true, NULL)); } /* @@ -885,7 +858,7 @@ ivsalloc(const void *ptr, bool demote) { /* Return 0 if ptr is not within a chunk managed by jemalloc. */ - if (rtree_get(chunks_rtree, (uintptr_t)CHUNK_ADDR2BASE(ptr)) == NULL) + if (rtree_get(chunks_rtree, (uintptr_t)CHUNK_ADDR2BASE(ptr)) == 0) return (0); return (isalloc(ptr, demote)); @@ -914,7 +887,7 @@ p2rz(const void *ptr) } JEMALLOC_ALWAYS_INLINE void -idallocx(void *ptr, bool try_tcache) +idalloct(void *ptr, bool try_tcache) { arena_chunk_t *chunk; @@ -931,31 +904,63 @@ JEMALLOC_ALWAYS_INLINE void idalloc(void *ptr) { - idallocx(ptr, true); + idalloct(ptr, true); } JEMALLOC_ALWAYS_INLINE void -iqallocx(void *ptr, bool try_tcache) +iqalloct(void *ptr, bool try_tcache) { if (config_fill && opt_quarantine) quarantine(ptr); else - idallocx(ptr, try_tcache); + idalloct(ptr, try_tcache); } JEMALLOC_ALWAYS_INLINE void iqalloc(void *ptr) { - iqallocx(ptr, true); + iqalloct(ptr, true); } JEMALLOC_ALWAYS_INLINE void * -irallocx(void *ptr, size_t size, size_t extra, size_t alignment, bool zero, - bool no_move, bool try_tcache_alloc, bool try_tcache_dalloc, arena_t *arena) +iralloct_realign(void *ptr, size_t oldsize, size_t size, size_t extra, + size_t alignment, bool zero, bool try_tcache_alloc, bool try_tcache_dalloc, + arena_t *arena) +{ + void *p; + size_t usize, copysize; + + usize = sa2u(size + extra, alignment); + if (usize == 0) + return (NULL); + p = ipalloct(usize, alignment, zero, try_tcache_alloc, arena); + if (p == NULL) { + if (extra == 0) + return (NULL); + /* Try again, without extra this time. */ + usize = sa2u(size, alignment); + if (usize == 0) + return (NULL); + p = ipalloct(usize, alignment, zero, try_tcache_alloc, arena); + if (p == NULL) + return (NULL); + } + /* + * Copy at most size bytes (not size+extra), since the caller has no + * expectation that the extra bytes will be reliably preserved. + */ + copysize = (size < oldsize) ? size : oldsize; + memcpy(p, ptr, copysize); + iqalloct(ptr, try_tcache_dalloc); + return (p); +} + +JEMALLOC_ALWAYS_INLINE void * +iralloct(void *ptr, size_t size, size_t extra, size_t alignment, bool zero, + bool try_tcache_alloc, bool try_tcache_dalloc, arena_t *arena) { - void *ret; size_t oldsize; assert(ptr != NULL); @@ -965,68 +970,50 @@ irallocx(void *ptr, size_t size, size_t extra, size_t alignment, bool zero, if (alignment != 0 && ((uintptr_t)ptr & ((uintptr_t)alignment-1)) != 0) { - size_t usize, copysize; - /* * Existing object alignment is inadequate; allocate new space * and copy. */ - if (no_move) - return (NULL); - usize = sa2u(size + extra, alignment); - if (usize == 0) - return (NULL); - ret = ipallocx(usize, alignment, zero, try_tcache_alloc, arena); - if (ret == NULL) { - if (extra == 0) - return (NULL); - /* Try again, without extra this time. */ - usize = sa2u(size, alignment); - if (usize == 0) - return (NULL); - ret = ipallocx(usize, alignment, zero, try_tcache_alloc, - arena); - if (ret == NULL) - return (NULL); - } - /* - * Copy at most size bytes (not size+extra), since the caller - * has no expectation that the extra bytes will be reliably - * preserved. - */ - copysize = (size < oldsize) ? size : oldsize; - memcpy(ret, ptr, copysize); - iqallocx(ptr, try_tcache_dalloc); - return (ret); + return (iralloct_realign(ptr, oldsize, size, extra, alignment, + zero, try_tcache_alloc, try_tcache_dalloc, arena)); } - if (no_move) { - if (size <= arena_maxclass) { - return (arena_ralloc_no_move(ptr, oldsize, size, - extra, zero)); - } else { - return (huge_ralloc_no_move(ptr, oldsize, size, - extra)); - } + if (size + extra <= arena_maxclass) { + return (arena_ralloc(arena, ptr, oldsize, size, extra, + alignment, zero, try_tcache_alloc, + try_tcache_dalloc)); } else { - if (size + extra <= arena_maxclass) { - return (arena_ralloc(arena, ptr, oldsize, size, extra, - alignment, zero, try_tcache_alloc, - try_tcache_dalloc)); - } else { - return (huge_ralloc(ptr, oldsize, size, extra, - alignment, zero, try_tcache_dalloc)); - } + return (huge_ralloc(ptr, oldsize, size, extra, + alignment, zero, try_tcache_dalloc)); } } JEMALLOC_ALWAYS_INLINE void * -iralloc(void *ptr, size_t size, size_t extra, size_t alignment, bool zero, - bool no_move) +iralloc(void *ptr, size_t size, size_t extra, size_t alignment, bool zero) +{ + + return (iralloct(ptr, size, extra, alignment, zero, true, true, NULL)); +} + +JEMALLOC_ALWAYS_INLINE bool +ixalloc(void *ptr, size_t size, size_t extra, size_t alignment, bool zero) { + size_t oldsize; + + assert(ptr != NULL); + assert(size != 0); - return (irallocx(ptr, size, extra, alignment, zero, no_move, true, true, - NULL)); + oldsize = isalloc(ptr, config_prof); + if (alignment != 0 && ((uintptr_t)ptr & ((uintptr_t)alignment-1)) + != 0) { + /* Existing object alignment is inadequate. */ + return (true); + } + + if (size <= arena_maxclass) + return (arena_ralloc_no_move(ptr, oldsize, size, extra, zero)); + else + return (huge_ralloc_no_move(ptr, oldsize, size, extra)); } malloc_tsd_externs(thread_allocated, thread_allocated_t) |