1 #include "test/jemalloc_test.h"
2 
3 #include "jemalloc/internal/rtree.h"
4 
5 rtree_node_alloc_t *rtree_node_alloc_orig;
6 rtree_node_dalloc_t *rtree_node_dalloc_orig;
7 rtree_leaf_alloc_t *rtree_leaf_alloc_orig;
8 rtree_leaf_dalloc_t *rtree_leaf_dalloc_orig;
9 
10 /* Potentially too large to safely place on the stack. */
11 rtree_t test_rtree;
12 
13 static rtree_node_elm_t *
rtree_node_alloc_intercept(tsdn_t * tsdn,rtree_t * rtree,size_t nelms)14 rtree_node_alloc_intercept(tsdn_t *tsdn, rtree_t *rtree, size_t nelms) {
15 	rtree_node_elm_t *node;
16 
17 	if (rtree != &test_rtree) {
18 		return rtree_node_alloc_orig(tsdn, rtree, nelms);
19 	}
20 
21 	malloc_mutex_unlock(tsdn, &rtree->init_lock);
22 	node = (rtree_node_elm_t *)calloc(nelms, sizeof(rtree_node_elm_t));
23 	assert_ptr_not_null(node, "Unexpected calloc() failure");
24 	malloc_mutex_lock(tsdn, &rtree->init_lock);
25 
26 	return node;
27 }
28 
29 static void
rtree_node_dalloc_intercept(tsdn_t * tsdn,rtree_t * rtree,rtree_node_elm_t * node)30 rtree_node_dalloc_intercept(tsdn_t *tsdn, rtree_t *rtree,
31     rtree_node_elm_t *node) {
32 	if (rtree != &test_rtree) {
33 		rtree_node_dalloc_orig(tsdn, rtree, node);
34 		return;
35 	}
36 
37 	free(node);
38 }
39 
40 static rtree_leaf_elm_t *
rtree_leaf_alloc_intercept(tsdn_t * tsdn,rtree_t * rtree,size_t nelms)41 rtree_leaf_alloc_intercept(tsdn_t *tsdn, rtree_t *rtree, size_t nelms) {
42 	rtree_leaf_elm_t *leaf;
43 
44 	if (rtree != &test_rtree) {
45 		return rtree_leaf_alloc_orig(tsdn, rtree, nelms);
46 	}
47 
48 	malloc_mutex_unlock(tsdn, &rtree->init_lock);
49 	leaf = (rtree_leaf_elm_t *)calloc(nelms, sizeof(rtree_leaf_elm_t));
50 	assert_ptr_not_null(leaf, "Unexpected calloc() failure");
51 	malloc_mutex_lock(tsdn, &rtree->init_lock);
52 
53 	return leaf;
54 }
55 
56 static void
rtree_leaf_dalloc_intercept(tsdn_t * tsdn,rtree_t * rtree,rtree_leaf_elm_t * leaf)57 rtree_leaf_dalloc_intercept(tsdn_t *tsdn, rtree_t *rtree,
58     rtree_leaf_elm_t *leaf) {
59 	if (rtree != &test_rtree) {
60 		rtree_leaf_dalloc_orig(tsdn, rtree, leaf);
61 		return;
62 	}
63 
64 	free(leaf);
65 }
66 
TEST_BEGIN(test_rtree_read_empty)67 TEST_BEGIN(test_rtree_read_empty) {
68 	tsdn_t *tsdn;
69 
70 	tsdn = tsdn_fetch();
71 
72 	rtree_t *rtree = &test_rtree;
73 	rtree_ctx_t rtree_ctx;
74 	rtree_ctx_data_init(&rtree_ctx);
75 	assert_false(rtree_new(rtree, false), "Unexpected rtree_new() failure");
76 	assert_ptr_null(rtree_extent_read(tsdn, rtree, &rtree_ctx, PAGE,
77 	    false), "rtree_extent_read() should return NULL for empty tree");
78 	rtree_delete(tsdn, rtree);
79 }
80 TEST_END
81 
82 #undef NTHREADS
83 #undef NITERS
84 #undef SEED
85 
TEST_BEGIN(test_rtree_extrema)86 TEST_BEGIN(test_rtree_extrema) {
87 	extent_t extent_a, extent_b;
88 	extent_init(&extent_a, NULL, NULL, SC_LARGE_MINCLASS, false,
89 	    sz_size2index(SC_LARGE_MINCLASS), 0,
90 	    extent_state_active, false, false, true);
91 	extent_init(&extent_b, NULL, NULL, 0, false, SC_NSIZES, 0,
92 	    extent_state_active, false, false, true);
93 
94 	tsdn_t *tsdn = tsdn_fetch();
95 
96 	rtree_t *rtree = &test_rtree;
97 	rtree_ctx_t rtree_ctx;
98 	rtree_ctx_data_init(&rtree_ctx);
99 	assert_false(rtree_new(rtree, false), "Unexpected rtree_new() failure");
100 
101 	assert_false(rtree_write(tsdn, rtree, &rtree_ctx, PAGE, &extent_a,
102 	    extent_szind_get(&extent_a), extent_slab_get(&extent_a)),
103 	    "Unexpected rtree_write() failure");
104 	rtree_szind_slab_update(tsdn, rtree, &rtree_ctx, PAGE,
105 	    extent_szind_get(&extent_a), extent_slab_get(&extent_a));
106 	assert_ptr_eq(rtree_extent_read(tsdn, rtree, &rtree_ctx, PAGE, true),
107 	    &extent_a,
108 	    "rtree_extent_read() should return previously set value");
109 
110 	assert_false(rtree_write(tsdn, rtree, &rtree_ctx, ~((uintptr_t)0),
111 	    &extent_b, extent_szind_get_maybe_invalid(&extent_b),
112 	    extent_slab_get(&extent_b)), "Unexpected rtree_write() failure");
113 	assert_ptr_eq(rtree_extent_read(tsdn, rtree, &rtree_ctx,
114 	    ~((uintptr_t)0), true), &extent_b,
115 	    "rtree_extent_read() should return previously set value");
116 
117 	rtree_delete(tsdn, rtree);
118 }
119 TEST_END
120 
TEST_BEGIN(test_rtree_bits)121 TEST_BEGIN(test_rtree_bits) {
122 	tsdn_t *tsdn = tsdn_fetch();
123 
124 	uintptr_t keys[] = {PAGE, PAGE + 1,
125 	    PAGE + (((uintptr_t)1) << LG_PAGE) - 1};
126 
127 	extent_t extent;
128 	extent_init(&extent, NULL, NULL, 0, false, SC_NSIZES, 0,
129 	    extent_state_active, false, false, true);
130 
131 	rtree_t *rtree = &test_rtree;
132 	rtree_ctx_t rtree_ctx;
133 	rtree_ctx_data_init(&rtree_ctx);
134 	assert_false(rtree_new(rtree, false), "Unexpected rtree_new() failure");
135 
136 	for (unsigned i = 0; i < sizeof(keys)/sizeof(uintptr_t); i++) {
137 		assert_false(rtree_write(tsdn, rtree, &rtree_ctx, keys[i],
138 		    &extent, SC_NSIZES, false),
139 		    "Unexpected rtree_write() failure");
140 		for (unsigned j = 0; j < sizeof(keys)/sizeof(uintptr_t); j++) {
141 			assert_ptr_eq(rtree_extent_read(tsdn, rtree, &rtree_ctx,
142 			    keys[j], true), &extent,
143 			    "rtree_extent_read() should return previously set "
144 			    "value and ignore insignificant key bits; i=%u, "
145 			    "j=%u, set key=%#"FMTxPTR", get key=%#"FMTxPTR, i,
146 			    j, keys[i], keys[j]);
147 		}
148 		assert_ptr_null(rtree_extent_read(tsdn, rtree, &rtree_ctx,
149 		    (((uintptr_t)2) << LG_PAGE), false),
150 		    "Only leftmost rtree leaf should be set; i=%u", i);
151 		rtree_clear(tsdn, rtree, &rtree_ctx, keys[i]);
152 	}
153 
154 	rtree_delete(tsdn, rtree);
155 }
156 TEST_END
157 
TEST_BEGIN(test_rtree_random)158 TEST_BEGIN(test_rtree_random) {
159 #define NSET 16
160 #define SEED 42
161 	sfmt_t *sfmt = init_gen_rand(SEED);
162 	tsdn_t *tsdn = tsdn_fetch();
163 	uintptr_t keys[NSET];
164 	rtree_t *rtree = &test_rtree;
165 	rtree_ctx_t rtree_ctx;
166 	rtree_ctx_data_init(&rtree_ctx);
167 
168 	extent_t extent;
169 	extent_init(&extent, NULL, NULL, 0, false, SC_NSIZES, 0,
170 	    extent_state_active, false, false, true);
171 
172 	assert_false(rtree_new(rtree, false), "Unexpected rtree_new() failure");
173 
174 	for (unsigned i = 0; i < NSET; i++) {
175 		keys[i] = (uintptr_t)gen_rand64(sfmt);
176 		rtree_leaf_elm_t *elm = rtree_leaf_elm_lookup(tsdn, rtree,
177 		    &rtree_ctx, keys[i], false, true);
178 		assert_ptr_not_null(elm,
179 		    "Unexpected rtree_leaf_elm_lookup() failure");
180 		rtree_leaf_elm_write(tsdn, rtree, elm, &extent, SC_NSIZES,
181 		    false);
182 		assert_ptr_eq(rtree_extent_read(tsdn, rtree, &rtree_ctx,
183 		    keys[i], true), &extent,
184 		    "rtree_extent_read() should return previously set value");
185 	}
186 	for (unsigned i = 0; i < NSET; i++) {
187 		assert_ptr_eq(rtree_extent_read(tsdn, rtree, &rtree_ctx,
188 		    keys[i], true), &extent,
189 		    "rtree_extent_read() should return previously set value, "
190 		    "i=%u", i);
191 	}
192 
193 	for (unsigned i = 0; i < NSET; i++) {
194 		rtree_clear(tsdn, rtree, &rtree_ctx, keys[i]);
195 		assert_ptr_null(rtree_extent_read(tsdn, rtree, &rtree_ctx,
196 		    keys[i], true),
197 		   "rtree_extent_read() should return previously set value");
198 	}
199 	for (unsigned i = 0; i < NSET; i++) {
200 		assert_ptr_null(rtree_extent_read(tsdn, rtree, &rtree_ctx,
201 		    keys[i], true),
202 		    "rtree_extent_read() should return previously set value");
203 	}
204 
205 	rtree_delete(tsdn, rtree);
206 	fini_gen_rand(sfmt);
207 #undef NSET
208 #undef SEED
209 }
210 TEST_END
211 
212 int
main(void)213 main(void) {
214 	rtree_node_alloc_orig = rtree_node_alloc;
215 	rtree_node_alloc = rtree_node_alloc_intercept;
216 	rtree_node_dalloc_orig = rtree_node_dalloc;
217 	rtree_node_dalloc = rtree_node_dalloc_intercept;
218 	rtree_leaf_alloc_orig = rtree_leaf_alloc;
219 	rtree_leaf_alloc = rtree_leaf_alloc_intercept;
220 	rtree_leaf_dalloc_orig = rtree_leaf_dalloc;
221 	rtree_leaf_dalloc = rtree_leaf_dalloc_intercept;
222 
223 	return test(
224 	    test_rtree_read_empty,
225 	    test_rtree_extrema,
226 	    test_rtree_bits,
227 	    test_rtree_random);
228 }
229