1 #ifndef JEMALLOC_INTERNAL_INLINES_C_H
2 #define JEMALLOC_INTERNAL_INLINES_C_H
3 
4 #include "jemalloc/internal/jemalloc_internal_types.h"
5 #include "jemalloc/internal/sz.h"
6 #include "jemalloc/internal/witness.h"
7 
8 JEMALLOC_ALWAYS_INLINE arena_t *
9 iaalloc(tsdn_t *tsdn, const void *ptr) {
10 	assert(ptr != NULL);
11 
12 	return arena_aalloc(tsdn, ptr);
13 }
14 
15 JEMALLOC_ALWAYS_INLINE size_t
16 isalloc(tsdn_t *tsdn, const void *ptr) {
17 	assert(ptr != NULL);
18 
19 	return arena_salloc(tsdn, ptr);
20 }
21 
22 JEMALLOC_ALWAYS_INLINE void *
23 iallocztm(tsdn_t *tsdn, size_t size, szind_t ind, bool zero, tcache_t *tcache,
24     bool is_internal, arena_t *arena, bool slow_path) {
25 	void *ret;
26 
27 	assert(size != 0);
28 	assert(!is_internal || tcache == NULL);
29 	assert(!is_internal || arena == NULL || arena_is_auto(arena));
30 	witness_assert_depth_to_rank(tsdn_witness_tsdp_get(tsdn),
31 	    WITNESS_RANK_CORE, 0);
32 
33 	ret = arena_malloc(tsdn, arena, size, ind, zero, tcache, slow_path);
34 	if (config_stats && is_internal && likely(ret != NULL)) {
35 		arena_internal_add(iaalloc(tsdn, ret), isalloc(tsdn, ret));
36 	}
37 	return ret;
38 }
39 
40 JEMALLOC_ALWAYS_INLINE void *
41 ialloc(tsd_t *tsd, size_t size, szind_t ind, bool zero, bool slow_path) {
42 	return iallocztm(tsd_tsdn(tsd), size, ind, zero, tcache_get(tsd), false,
43 	    NULL, slow_path);
44 }
45 
46 JEMALLOC_ALWAYS_INLINE void *
47 ipallocztm(tsdn_t *tsdn, size_t usize, size_t alignment, bool zero,
48     tcache_t *tcache, bool is_internal, arena_t *arena) {
49 	void *ret;
50 
51 	assert(usize != 0);
52 	assert(usize == sz_sa2u(usize, alignment));
53 	assert(!is_internal || tcache == NULL);
54 	assert(!is_internal || arena == NULL || arena_is_auto(arena));
55 	witness_assert_depth_to_rank(tsdn_witness_tsdp_get(tsdn),
56 	    WITNESS_RANK_CORE, 0);
57 
58 	ret = arena_palloc(tsdn, arena, usize, alignment, zero, tcache);
59 	assert(ALIGNMENT_ADDR2BASE(ret, alignment) == ret);
60 	if (config_stats && is_internal && likely(ret != NULL)) {
61 		arena_internal_add(iaalloc(tsdn, ret), isalloc(tsdn, ret));
62 	}
63 	return ret;
64 }
65 
66 JEMALLOC_ALWAYS_INLINE void *
67 ipalloct(tsdn_t *tsdn, size_t usize, size_t alignment, bool zero,
68     tcache_t *tcache, arena_t *arena) {
69 	return ipallocztm(tsdn, usize, alignment, zero, tcache, false, arena);
70 }
71 
72 JEMALLOC_ALWAYS_INLINE void *
73 ipalloc(tsd_t *tsd, size_t usize, size_t alignment, bool zero) {
74 	return ipallocztm(tsd_tsdn(tsd), usize, alignment, zero,
75 	    tcache_get(tsd), false, NULL);
76 }
77 
78 JEMALLOC_ALWAYS_INLINE size_t
79 ivsalloc(tsdn_t *tsdn, const void *ptr) {
80 	return arena_vsalloc(tsdn, ptr);
81 }
82 
83 JEMALLOC_ALWAYS_INLINE void
84 idalloctm(tsdn_t *tsdn, void *ptr, tcache_t *tcache, alloc_ctx_t *alloc_ctx,
85     bool is_internal, bool slow_path) {
86 	assert(ptr != NULL);
87 	assert(!is_internal || tcache == NULL);
88 	assert(!is_internal || arena_is_auto(iaalloc(tsdn, ptr)));
89 	witness_assert_depth_to_rank(tsdn_witness_tsdp_get(tsdn),
90 	    WITNESS_RANK_CORE, 0);
91 	if (config_stats && is_internal) {
92 		arena_internal_sub(iaalloc(tsdn, ptr), isalloc(tsdn, ptr));
93 	}
94 	if (!is_internal && tsd_reentrancy_level_get(tsdn_tsd(tsdn)) != 0) {
95 		assert(tcache == NULL);
96 	}
97 	arena_dalloc(tsdn, ptr, tcache, alloc_ctx, slow_path);
98 }
99 
100 JEMALLOC_ALWAYS_INLINE void
101 idalloc(tsd_t *tsd, void *ptr) {
102 	idalloctm(tsd_tsdn(tsd), ptr, tcache_get(tsd), NULL, false, true);
103 }
104 
105 JEMALLOC_ALWAYS_INLINE void
106 isdalloct(tsdn_t *tsdn, void *ptr, size_t size, tcache_t *tcache,
107     alloc_ctx_t *alloc_ctx, bool slow_path) {
108 	witness_assert_depth_to_rank(tsdn_witness_tsdp_get(tsdn),
109 	    WITNESS_RANK_CORE, 0);
110 	arena_sdalloc(tsdn, ptr, size, tcache, alloc_ctx, slow_path);
111 }
112 
113 JEMALLOC_ALWAYS_INLINE void *
114 iralloct_realign(tsdn_t *tsdn, void *ptr, size_t oldsize, size_t size,
115     size_t extra, size_t alignment, bool zero, tcache_t *tcache,
116     arena_t *arena) {
117 	witness_assert_depth_to_rank(tsdn_witness_tsdp_get(tsdn),
118 	    WITNESS_RANK_CORE, 0);
119 	void *p;
120 	size_t usize, copysize;
121 
122 	usize = sz_sa2u(size + extra, alignment);
123 	if (unlikely(usize == 0 || usize > LARGE_MAXCLASS)) {
124 		return NULL;
125 	}
126 	p = ipalloct(tsdn, usize, alignment, zero, tcache, arena);
127 	if (p == NULL) {
128 		if (extra == 0) {
129 			return NULL;
130 		}
131 		/* Try again, without extra this time. */
132 		usize = sz_sa2u(size, alignment);
133 		if (unlikely(usize == 0 || usize > LARGE_MAXCLASS)) {
134 			return NULL;
135 		}
136 		p = ipalloct(tsdn, usize, alignment, zero, tcache, arena);
137 		if (p == NULL) {
138 			return NULL;
139 		}
140 	}
141 	/*
142 	 * Copy at most size bytes (not size+extra), since the caller has no
143 	 * expectation that the extra bytes will be reliably preserved.
144 	 */
145 	copysize = (size < oldsize) ? size : oldsize;
146 	memcpy(p, ptr, copysize);
147 	isdalloct(tsdn, ptr, oldsize, tcache, NULL, true);
148 	return p;
149 }
150 
151 JEMALLOC_ALWAYS_INLINE void *
152 iralloct(tsdn_t *tsdn, void *ptr, size_t oldsize, size_t size, size_t alignment,
153     bool zero, tcache_t *tcache, arena_t *arena) {
154 	assert(ptr != NULL);
155 	assert(size != 0);
156 	witness_assert_depth_to_rank(tsdn_witness_tsdp_get(tsdn),
157 	    WITNESS_RANK_CORE, 0);
158 
159 	if (alignment != 0 && ((uintptr_t)ptr & ((uintptr_t)alignment-1))
160 	    != 0) {
161 		/*
162 		 * Existing object alignment is inadequate; allocate new space
163 		 * and copy.
164 		 */
165 		return iralloct_realign(tsdn, ptr, oldsize, size, 0, alignment,
166 		    zero, tcache, arena);
167 	}
168 
169 	return arena_ralloc(tsdn, arena, ptr, oldsize, size, alignment, zero,
170 	    tcache);
171 }
172 
173 JEMALLOC_ALWAYS_INLINE void *
174 iralloc(tsd_t *tsd, void *ptr, size_t oldsize, size_t size, size_t alignment,
175     bool zero) {
176 	return iralloct(tsd_tsdn(tsd), ptr, oldsize, size, alignment, zero,
177 	    tcache_get(tsd), NULL);
178 }
179 
180 JEMALLOC_ALWAYS_INLINE bool
181 ixalloc(tsdn_t *tsdn, void *ptr, size_t oldsize, size_t size, size_t extra,
182     size_t alignment, bool zero) {
183 	assert(ptr != NULL);
184 	assert(size != 0);
185 	witness_assert_depth_to_rank(tsdn_witness_tsdp_get(tsdn),
186 	    WITNESS_RANK_CORE, 0);
187 
188 	if (alignment != 0 && ((uintptr_t)ptr & ((uintptr_t)alignment-1))
189 	    != 0) {
190 		/* Existing object alignment is inadequate. */
191 		return true;
192 	}
193 
194 	return arena_ralloc_no_move(tsdn, ptr, oldsize, size, extra, zero);
195 }
196 
197 #endif /* JEMALLOC_INTERNAL_INLINES_C_H */
198