1 //===-- asan_noinst_test.cpp ----------------------------------------------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file is a part of AddressSanitizer, an address sanity checker.
10 //
11 // This test file should be compiled w/o asan instrumentation.
12 //===----------------------------------------------------------------------===//
13 
14 #include <assert.h>
15 #include <sanitizer/allocator_interface.h>
16 #include <stdio.h>
17 #include <stdlib.h>
18 #include <string.h>  // for memset()
19 
20 #include <algorithm>
21 #include <limits>
22 #include <vector>
23 
24 #include "asan_allocator.h"
25 #include "asan_internal.h"
26 #include "asan_mapping.h"
27 #include "asan_test_utils.h"
28 
29 using namespace __sanitizer;
30 
31 // ATTENTION!
32 // Please don't call intercepted functions (including malloc() and friends)
33 // in this test. The static runtime library is linked explicitly (without
34 // -fsanitize=address), thus the interceptors do not work correctly on OS X.
35 
36 // Make sure __asan_init is called before any test case is run.
37 struct AsanInitCaller {
AsanInitCallerAsanInitCaller38   AsanInitCaller() {
39     __asan_init();
40   }
41 };
42 static AsanInitCaller asan_init_caller;
43 
TEST(AddressSanitizer,InternalSimpleDeathTest)44 TEST(AddressSanitizer, InternalSimpleDeathTest) {
45   EXPECT_DEATH(exit(1), "");
46 }
47 
MallocStress(size_t n)48 static void MallocStress(size_t n) {
49   u32 seed = my_rand();
50   BufferedStackTrace stack1;
51   stack1.trace_buffer[0] = 0xa123;
52   stack1.trace_buffer[1] = 0xa456;
53   stack1.size = 2;
54 
55   BufferedStackTrace stack2;
56   stack2.trace_buffer[0] = 0xb123;
57   stack2.trace_buffer[1] = 0xb456;
58   stack2.size = 2;
59 
60   BufferedStackTrace stack3;
61   stack3.trace_buffer[0] = 0xc123;
62   stack3.trace_buffer[1] = 0xc456;
63   stack3.size = 2;
64 
65   std::vector<void *> vec;
66   for (size_t i = 0; i < n; i++) {
67     if ((i % 3) == 0) {
68       if (vec.empty()) continue;
69       size_t idx = my_rand_r(&seed) % vec.size();
70       void *ptr = vec[idx];
71       vec[idx] = vec.back();
72       vec.pop_back();
73       __asan::asan_free(ptr, &stack1, __asan::FROM_MALLOC);
74     } else {
75       size_t size = my_rand_r(&seed) % 1000 + 1;
76       switch ((my_rand_r(&seed) % 128)) {
77         case 0: size += 1024; break;
78         case 1: size += 2048; break;
79         case 2: size += 4096; break;
80       }
81       size_t alignment = 1 << (my_rand_r(&seed) % 10 + 1);
82       char *ptr = (char*)__asan::asan_memalign(alignment, size,
83                                                &stack2, __asan::FROM_MALLOC);
84       EXPECT_EQ(size, __asan::asan_malloc_usable_size(ptr, 0, 0));
85       vec.push_back(ptr);
86       ptr[0] = 0;
87       ptr[size-1] = 0;
88       ptr[size/2] = 0;
89     }
90   }
91   for (size_t i = 0; i < vec.size(); i++)
92     __asan::asan_free(vec[i], &stack3, __asan::FROM_MALLOC);
93 }
94 
95 
TEST(AddressSanitizer,NoInstMallocTest)96 TEST(AddressSanitizer, NoInstMallocTest) {
97   MallocStress(ASAN_LOW_MEMORY ? 300000 : 1000000);
98 }
99 
TEST(AddressSanitizer,ThreadedMallocStressTest)100 TEST(AddressSanitizer, ThreadedMallocStressTest) {
101   const int kNumThreads = 4;
102   const int kNumIterations = (ASAN_LOW_MEMORY) ? 10000 : 100000;
103   pthread_t t[kNumThreads];
104   for (int i = 0; i < kNumThreads; i++) {
105     PTHREAD_CREATE(&t[i], 0, (void* (*)(void *x))MallocStress,
106         (void*)kNumIterations);
107   }
108   for (int i = 0; i < kNumThreads; i++) {
109     PTHREAD_JOIN(t[i], 0);
110   }
111 }
112 
PrintShadow(const char * tag,uptr ptr,size_t size)113 static void PrintShadow(const char *tag, uptr ptr, size_t size) {
114   fprintf(stderr, "%s shadow: %lx size % 3ld: ", tag, (long)ptr, (long)size);
115   uptr prev_shadow = 0;
116   for (sptr i = -32; i < (sptr)size + 32; i++) {
117     uptr shadow = __asan::MemToShadow(ptr + i);
118     if (i == 0 || i == (sptr)size)
119       fprintf(stderr, ".");
120     if (shadow != prev_shadow) {
121       prev_shadow = shadow;
122       fprintf(stderr, "%02x", (int)*(u8*)shadow);
123     }
124   }
125   fprintf(stderr, "\n");
126 }
127 
TEST(AddressSanitizer,DISABLED_InternalPrintShadow)128 TEST(AddressSanitizer, DISABLED_InternalPrintShadow) {
129   for (size_t size = 1; size <= 513; size++) {
130     char *ptr = new char[size];
131     PrintShadow("m", (uptr)ptr, size);
132     delete [] ptr;
133     PrintShadow("f", (uptr)ptr, size);
134   }
135 }
136 
TEST(AddressSanitizer,QuarantineTest)137 TEST(AddressSanitizer, QuarantineTest) {
138   BufferedStackTrace stack;
139   stack.trace_buffer[0] = 0x890;
140   stack.size = 1;
141 
142   const int size = 1024;
143   void *p = __asan::asan_malloc(size, &stack);
144   __asan::asan_free(p, &stack, __asan::FROM_MALLOC);
145   size_t i;
146   size_t max_i = 1 << 30;
147   for (i = 0; i < max_i; i++) {
148     void *p1 = __asan::asan_malloc(size, &stack);
149     __asan::asan_free(p1, &stack, __asan::FROM_MALLOC);
150     if (p1 == p) break;
151   }
152   EXPECT_GE(i, 10000U);
153   EXPECT_LT(i, max_i);
154 }
155 
156 #if !defined(__NetBSD__)
ThreadedQuarantineTestWorker(void * unused)157 void *ThreadedQuarantineTestWorker(void *unused) {
158   (void)unused;
159   u32 seed = my_rand();
160   BufferedStackTrace stack;
161   stack.trace_buffer[0] = 0x890;
162   stack.size = 1;
163 
164   for (size_t i = 0; i < 1000; i++) {
165     void *p = __asan::asan_malloc(1 + (my_rand_r(&seed) % 4000), &stack);
166     __asan::asan_free(p, &stack, __asan::FROM_MALLOC);
167   }
168   return NULL;
169 }
170 
171 // Check that the thread local allocators are flushed when threads are
172 // destroyed.
TEST(AddressSanitizer,ThreadedQuarantineTest)173 TEST(AddressSanitizer, ThreadedQuarantineTest) {
174   // Run the routine once to warm up ASAN internal structures to get more
175   // predictable incremental memory changes.
176   pthread_t t;
177   PTHREAD_CREATE(&t, NULL, ThreadedQuarantineTestWorker, 0);
178   PTHREAD_JOIN(t, 0);
179 
180   const int n_threads = 3000;
181   size_t mmaped1 = __sanitizer_get_heap_size();
182   for (int i = 0; i < n_threads; i++) {
183     pthread_t t;
184     PTHREAD_CREATE(&t, NULL, ThreadedQuarantineTestWorker, 0);
185     PTHREAD_JOIN(t, 0);
186     size_t mmaped2 = __sanitizer_get_heap_size();
187     // Figure out why this much memory is required.
188     EXPECT_LT(mmaped2 - mmaped1, 320U * (1 << 20));
189   }
190 }
191 #endif
192 
ThreadedOneSizeMallocStress(void * unused)193 void *ThreadedOneSizeMallocStress(void *unused) {
194   (void)unused;
195   BufferedStackTrace stack;
196   stack.trace_buffer[0] = 0x890;
197   stack.size = 1;
198   const size_t kNumMallocs = 1000;
199   for (int iter = 0; iter < 1000; iter++) {
200     void *p[kNumMallocs];
201     for (size_t i = 0; i < kNumMallocs; i++) {
202       p[i] = __asan::asan_malloc(32, &stack);
203     }
204     for (size_t i = 0; i < kNumMallocs; i++) {
205       __asan::asan_free(p[i], &stack, __asan::FROM_MALLOC);
206     }
207   }
208   return NULL;
209 }
210 
TEST(AddressSanitizer,ThreadedOneSizeMallocStressTest)211 TEST(AddressSanitizer, ThreadedOneSizeMallocStressTest) {
212   const int kNumThreads = 4;
213   pthread_t t[kNumThreads];
214   for (int i = 0; i < kNumThreads; i++) {
215     PTHREAD_CREATE(&t[i], 0, ThreadedOneSizeMallocStress, 0);
216   }
217   for (int i = 0; i < kNumThreads; i++) {
218     PTHREAD_JOIN(t[i], 0);
219   }
220 }
221 
TEST(AddressSanitizer,ShadowRegionIsPoisonedTest)222 TEST(AddressSanitizer, ShadowRegionIsPoisonedTest) {
223   using __asan::kHighMemEnd;
224   // Check that __asan_region_is_poisoned works for shadow regions.
225   uptr ptr = kLowShadowBeg + 200;
226   EXPECT_EQ(ptr, __asan_region_is_poisoned(ptr, 100));
227   ptr = kShadowGapBeg + 200;
228   EXPECT_EQ(ptr, __asan_region_is_poisoned(ptr, 100));
229   ptr = kHighShadowBeg + 200;
230   EXPECT_EQ(ptr, __asan_region_is_poisoned(ptr, 100));
231 }
232 
233 // Test __asan_load1 & friends.
234 typedef void (*CB)(uptr p);
TestLoadStoreCallbacks(CB cb[2][5])235 static void TestLoadStoreCallbacks(CB cb[2][5]) {
236   uptr buggy_ptr;
237 
238   __asan_test_only_reported_buggy_pointer = &buggy_ptr;
239   BufferedStackTrace stack;
240   stack.trace_buffer[0] = 0x890;
241   stack.size = 1;
242 
243   for (uptr len = 16; len <= 32; len++) {
244     char *ptr = (char*) __asan::asan_malloc(len, &stack);
245     uptr p = reinterpret_cast<uptr>(ptr);
246     for (uptr is_write = 0; is_write <= 1; is_write++) {
247       for (uptr size_log = 0; size_log <= 4; size_log++) {
248         uptr size = 1 << size_log;
249         CB call = cb[is_write][size_log];
250         // Iterate only size-aligned offsets.
251         for (uptr offset = 0; offset <= len; offset += size) {
252           buggy_ptr = 0;
253           call(p + offset);
254           if (offset + size <= len)
255             EXPECT_EQ(buggy_ptr, 0U);
256           else
257             EXPECT_EQ(buggy_ptr, p + offset);
258         }
259       }
260     }
261     __asan::asan_free(ptr, &stack, __asan::FROM_MALLOC);
262   }
263   __asan_test_only_reported_buggy_pointer = 0;
264 }
265 
TEST(AddressSanitizer,LoadStoreCallbacks)266 TEST(AddressSanitizer, LoadStoreCallbacks) {
267   CB cb[2][5] = {{
268                      __asan_load1,
269                      __asan_load2,
270                      __asan_load4,
271                      __asan_load8,
272                      __asan_load16,
273                  },
274                  {
275                      __asan_store1,
276                      __asan_store2,
277                      __asan_store4,
278                      __asan_store8,
279                      __asan_store16,
280                  }};
281   TestLoadStoreCallbacks(cb);
282 }
283 
284 #if defined(__x86_64__) && \
285     !(defined(SANITIZER_APPLE) || defined(SANITIZER_WINDOWS))
286 // clang-format off
287 
288 #define CALL_ASAN_MEMORY_ACCESS_CALLBACK_ADD(s, reg, op)        \
289   void CallAsanMemoryAccessAdd##reg##op##s(uptr address) {      \
290   asm("push  %%" #reg " \n"                                     \
291   "mov   %[x], %%" #reg " \n"                                   \
292   "call  __asan_check_" #op "_add_" #s "_" #reg "\n"            \
293   "pop   %%" #reg " \n"                                         \
294   :                                                             \
295   : [x] "r"(address)                                            \
296       : "r8", "rdi");                                           \
297   }
298 
299 #define TEST_ASAN_MEMORY_ACCESS_CALLBACKS_ADD(reg)            \
300   CALL_ASAN_MEMORY_ACCESS_CALLBACK_ADD(1, reg, load)          \
301   CALL_ASAN_MEMORY_ACCESS_CALLBACK_ADD(1, reg, store)         \
302   CALL_ASAN_MEMORY_ACCESS_CALLBACK_ADD(2, reg, load)          \
303   CALL_ASAN_MEMORY_ACCESS_CALLBACK_ADD(2, reg, store)         \
304   CALL_ASAN_MEMORY_ACCESS_CALLBACK_ADD(4, reg, load)          \
305   CALL_ASAN_MEMORY_ACCESS_CALLBACK_ADD(4, reg, store)         \
306   CALL_ASAN_MEMORY_ACCESS_CALLBACK_ADD(8, reg, load)          \
307   CALL_ASAN_MEMORY_ACCESS_CALLBACK_ADD(8, reg, store)         \
308   CALL_ASAN_MEMORY_ACCESS_CALLBACK_ADD(16, reg, load)         \
309   CALL_ASAN_MEMORY_ACCESS_CALLBACK_ADD(16, reg, store)        \
310                                                               \
311   TEST(AddressSanitizer, LoadStoreCallbacksAddX86##reg) {     \
312     CB cb[2][5] = {{                                          \
313                        CallAsanMemoryAccessAdd##reg##load1,   \
314                        CallAsanMemoryAccessAdd##reg##load2,   \
315                        CallAsanMemoryAccessAdd##reg##load4,   \
316                        CallAsanMemoryAccessAdd##reg##load8,   \
317                        CallAsanMemoryAccessAdd##reg##load16,  \
318                    },                                         \
319                    {                                          \
320                        CallAsanMemoryAccessAdd##reg##store1,  \
321                        CallAsanMemoryAccessAdd##reg##store2,  \
322                        CallAsanMemoryAccessAdd##reg##store4,  \
323                        CallAsanMemoryAccessAdd##reg##store8,  \
324                        CallAsanMemoryAccessAdd##reg##store16, \
325                    }};                                        \
326     TestLoadStoreCallbacks(cb);                               \
327   }
328 
329 // Instantiate all but R10 and R11 callbacks. We are using PLTSafe class with
330 // the intrinsic, which guarantees that the code generation will never emit
331 // R10 or R11 callbacks.
332 TEST_ASAN_MEMORY_ACCESS_CALLBACKS_ADD(RAX)
333 TEST_ASAN_MEMORY_ACCESS_CALLBACKS_ADD(RBX)
334 TEST_ASAN_MEMORY_ACCESS_CALLBACKS_ADD(RCX)
335 TEST_ASAN_MEMORY_ACCESS_CALLBACKS_ADD(RDX)
336 TEST_ASAN_MEMORY_ACCESS_CALLBACKS_ADD(RSI)
337 TEST_ASAN_MEMORY_ACCESS_CALLBACKS_ADD(RDI)
338 TEST_ASAN_MEMORY_ACCESS_CALLBACKS_ADD(RBP)
339 TEST_ASAN_MEMORY_ACCESS_CALLBACKS_ADD(R8)
340 TEST_ASAN_MEMORY_ACCESS_CALLBACKS_ADD(R9)
341 TEST_ASAN_MEMORY_ACCESS_CALLBACKS_ADD(R12)
342 TEST_ASAN_MEMORY_ACCESS_CALLBACKS_ADD(R13)
343 TEST_ASAN_MEMORY_ACCESS_CALLBACKS_ADD(R14)
344 TEST_ASAN_MEMORY_ACCESS_CALLBACKS_ADD(R15)
345 
346 // clang-format on
347 #endif
348