1 #ifndef JEMALLOC_INTERNAL_INLINES_C_H 2 #define JEMALLOC_INTERNAL_INLINES_C_H 3 4 #include "jemalloc/internal/jemalloc_internal_types.h" 5 #include "jemalloc/internal/sz.h" 6 #include "jemalloc/internal/witness.h" 7 8 JEMALLOC_ALWAYS_INLINE arena_t * 9 iaalloc(tsdn_t *tsdn, const void *ptr) { 10 assert(ptr != NULL); 11 12 return arena_aalloc(tsdn, ptr); 13 } 14 15 JEMALLOC_ALWAYS_INLINE size_t 16 isalloc(tsdn_t *tsdn, const void *ptr) { 17 assert(ptr != NULL); 18 19 return arena_salloc(tsdn, ptr); 20 } 21 22 JEMALLOC_ALWAYS_INLINE void * 23 iallocztm(tsdn_t *tsdn, size_t size, szind_t ind, bool zero, tcache_t *tcache, 24 bool is_internal, arena_t *arena, bool slow_path) { 25 void *ret; 26 27 assert(size != 0); 28 assert(!is_internal || tcache == NULL); 29 assert(!is_internal || arena == NULL || arena_is_auto(arena)); 30 witness_assert_depth_to_rank(tsdn_witness_tsdp_get(tsdn), 31 WITNESS_RANK_CORE, 0); 32 33 ret = arena_malloc(tsdn, arena, size, ind, zero, tcache, slow_path); 34 if (config_stats && is_internal && likely(ret != NULL)) { 35 arena_internal_add(iaalloc(tsdn, ret), isalloc(tsdn, ret)); 36 } 37 return ret; 38 } 39 40 JEMALLOC_ALWAYS_INLINE void * 41 ialloc(tsd_t *tsd, size_t size, szind_t ind, bool zero, bool slow_path) { 42 return iallocztm(tsd_tsdn(tsd), size, ind, zero, tcache_get(tsd), false, 43 NULL, slow_path); 44 } 45 46 JEMALLOC_ALWAYS_INLINE void * 47 ipallocztm(tsdn_t *tsdn, size_t usize, size_t alignment, bool zero, 48 tcache_t *tcache, bool is_internal, arena_t *arena) { 49 void *ret; 50 51 assert(usize != 0); 52 assert(usize == sz_sa2u(usize, alignment)); 53 assert(!is_internal || tcache == NULL); 54 assert(!is_internal || arena == NULL || arena_is_auto(arena)); 55 witness_assert_depth_to_rank(tsdn_witness_tsdp_get(tsdn), 56 WITNESS_RANK_CORE, 0); 57 58 ret = arena_palloc(tsdn, arena, usize, alignment, zero, tcache); 59 assert(ALIGNMENT_ADDR2BASE(ret, alignment) == ret); 60 if (config_stats && is_internal && likely(ret != NULL)) { 61 arena_internal_add(iaalloc(tsdn, ret), isalloc(tsdn, ret)); 62 } 63 return ret; 64 } 65 66 JEMALLOC_ALWAYS_INLINE void * 67 ipalloct(tsdn_t *tsdn, size_t usize, size_t alignment, bool zero, 68 tcache_t *tcache, arena_t *arena) { 69 return ipallocztm(tsdn, usize, alignment, zero, tcache, false, arena); 70 } 71 72 JEMALLOC_ALWAYS_INLINE void * 73 ipalloc(tsd_t *tsd, size_t usize, size_t alignment, bool zero) { 74 return ipallocztm(tsd_tsdn(tsd), usize, alignment, zero, 75 tcache_get(tsd), false, NULL); 76 } 77 78 JEMALLOC_ALWAYS_INLINE size_t 79 ivsalloc(tsdn_t *tsdn, const void *ptr) { 80 return arena_vsalloc(tsdn, ptr); 81 } 82 83 JEMALLOC_ALWAYS_INLINE void 84 idalloctm(tsdn_t *tsdn, void *ptr, tcache_t *tcache, alloc_ctx_t *alloc_ctx, 85 bool is_internal, bool slow_path) { 86 assert(ptr != NULL); 87 assert(!is_internal || tcache == NULL); 88 assert(!is_internal || arena_is_auto(iaalloc(tsdn, ptr))); 89 witness_assert_depth_to_rank(tsdn_witness_tsdp_get(tsdn), 90 WITNESS_RANK_CORE, 0); 91 if (config_stats && is_internal) { 92 arena_internal_sub(iaalloc(tsdn, ptr), isalloc(tsdn, ptr)); 93 } 94 if (!is_internal && !tsdn_null(tsdn) && 95 tsd_reentrancy_level_get(tsdn_tsd(tsdn)) != 0) { 96 assert(tcache == NULL); 97 } 98 arena_dalloc(tsdn, ptr, tcache, alloc_ctx, slow_path); 99 } 100 101 JEMALLOC_ALWAYS_INLINE void 102 idalloc(tsd_t *tsd, void *ptr) { 103 idalloctm(tsd_tsdn(tsd), ptr, tcache_get(tsd), NULL, false, true); 104 } 105 106 JEMALLOC_ALWAYS_INLINE void 107 isdalloct(tsdn_t *tsdn, void *ptr, size_t size, tcache_t *tcache, 108 alloc_ctx_t *alloc_ctx, bool slow_path) { 109 witness_assert_depth_to_rank(tsdn_witness_tsdp_get(tsdn), 110 WITNESS_RANK_CORE, 0); 111 arena_sdalloc(tsdn, ptr, size, tcache, alloc_ctx, slow_path); 112 } 113 114 JEMALLOC_ALWAYS_INLINE void * 115 iralloct_realign(tsdn_t *tsdn, void *ptr, size_t oldsize, size_t size, 116 size_t extra, size_t alignment, bool zero, tcache_t *tcache, 117 arena_t *arena) { 118 witness_assert_depth_to_rank(tsdn_witness_tsdp_get(tsdn), 119 WITNESS_RANK_CORE, 0); 120 void *p; 121 size_t usize, copysize; 122 123 usize = sz_sa2u(size + extra, alignment); 124 if (unlikely(usize == 0 || usize > LARGE_MAXCLASS)) { 125 return NULL; 126 } 127 p = ipalloct(tsdn, usize, alignment, zero, tcache, arena); 128 if (p == NULL) { 129 if (extra == 0) { 130 return NULL; 131 } 132 /* Try again, without extra this time. */ 133 usize = sz_sa2u(size, alignment); 134 if (unlikely(usize == 0 || usize > LARGE_MAXCLASS)) { 135 return NULL; 136 } 137 p = ipalloct(tsdn, usize, alignment, zero, tcache, arena); 138 if (p == NULL) { 139 return NULL; 140 } 141 } 142 /* 143 * Copy at most size bytes (not size+extra), since the caller has no 144 * expectation that the extra bytes will be reliably preserved. 145 */ 146 copysize = (size < oldsize) ? size : oldsize; 147 memcpy(p, ptr, copysize); 148 isdalloct(tsdn, ptr, oldsize, tcache, NULL, true); 149 return p; 150 } 151 152 JEMALLOC_ALWAYS_INLINE void * 153 iralloct(tsdn_t *tsdn, void *ptr, size_t oldsize, size_t size, size_t alignment, 154 bool zero, tcache_t *tcache, arena_t *arena) { 155 assert(ptr != NULL); 156 assert(size != 0); 157 witness_assert_depth_to_rank(tsdn_witness_tsdp_get(tsdn), 158 WITNESS_RANK_CORE, 0); 159 160 if (alignment != 0 && ((uintptr_t)ptr & ((uintptr_t)alignment-1)) 161 != 0) { 162 /* 163 * Existing object alignment is inadequate; allocate new space 164 * and copy. 165 */ 166 return iralloct_realign(tsdn, ptr, oldsize, size, 0, alignment, 167 zero, tcache, arena); 168 } 169 170 return arena_ralloc(tsdn, arena, ptr, oldsize, size, alignment, zero, 171 tcache); 172 } 173 174 JEMALLOC_ALWAYS_INLINE void * 175 iralloc(tsd_t *tsd, void *ptr, size_t oldsize, size_t size, size_t alignment, 176 bool zero) { 177 return iralloct(tsd_tsdn(tsd), ptr, oldsize, size, alignment, zero, 178 tcache_get(tsd), NULL); 179 } 180 181 JEMALLOC_ALWAYS_INLINE bool 182 ixalloc(tsdn_t *tsdn, void *ptr, size_t oldsize, size_t size, size_t extra, 183 size_t alignment, bool zero) { 184 assert(ptr != NULL); 185 assert(size != 0); 186 witness_assert_depth_to_rank(tsdn_witness_tsdp_get(tsdn), 187 WITNESS_RANK_CORE, 0); 188 189 if (alignment != 0 && ((uintptr_t)ptr & ((uintptr_t)alignment-1)) 190 != 0) { 191 /* Existing object alignment is inadequate. */ 192 return true; 193 } 194 195 return arena_ralloc_no_move(tsdn, ptr, oldsize, size, extra, zero); 196 } 197 198 #endif /* JEMALLOC_INTERNAL_INLINES_C_H */ 199