1*490215a3Smrg //===-- asan_fake_stack.cc ------------------------------------------------===//
2*490215a3Smrg //
3*490215a3Smrg // This file is distributed under the University of Illinois Open Source
4*490215a3Smrg // License. See LICENSE.TXT for details.
5*490215a3Smrg //
6*490215a3Smrg //===----------------------------------------------------------------------===//
7*490215a3Smrg //
8*490215a3Smrg // This file is a part of AddressSanitizer, an address sanity checker.
9*490215a3Smrg //
10*490215a3Smrg // FakeStack is used to detect use-after-return bugs.
11*490215a3Smrg //===----------------------------------------------------------------------===//
12*490215a3Smrg 
13*490215a3Smrg #include "asan_allocator.h"
14*490215a3Smrg #include "asan_poisoning.h"
15*490215a3Smrg #include "asan_thread.h"
16*490215a3Smrg 
17*490215a3Smrg namespace __asan {
18*490215a3Smrg 
19*490215a3Smrg static const u64 kMagic1 = kAsanStackAfterReturnMagic;
20*490215a3Smrg static const u64 kMagic2 = (kMagic1 << 8) | kMagic1;
21*490215a3Smrg static const u64 kMagic4 = (kMagic2 << 16) | kMagic2;
22*490215a3Smrg static const u64 kMagic8 = (kMagic4 << 32) | kMagic4;
23*490215a3Smrg 
24*490215a3Smrg static const u64 kAllocaRedzoneSize = 32UL;
25*490215a3Smrg static const u64 kAllocaRedzoneMask = 31UL;
26*490215a3Smrg 
27*490215a3Smrg // For small size classes inline PoisonShadow for better performance.
SetShadow(uptr ptr,uptr size,uptr class_id,u64 magic)28*490215a3Smrg ALWAYS_INLINE void SetShadow(uptr ptr, uptr size, uptr class_id, u64 magic) {
29*490215a3Smrg   u64 *shadow = reinterpret_cast<u64*>(MemToShadow(ptr));
30*490215a3Smrg   if (SHADOW_SCALE == 3 && class_id <= 6) {
31*490215a3Smrg     // This code expects SHADOW_SCALE=3.
32*490215a3Smrg     for (uptr i = 0; i < (((uptr)1) << class_id); i++) {
33*490215a3Smrg       shadow[i] = magic;
34*490215a3Smrg       // Make sure this does not become memset.
35*490215a3Smrg       SanitizerBreakOptimization(nullptr);
36*490215a3Smrg     }
37*490215a3Smrg   } else {
38*490215a3Smrg     // The size class is too big, it's cheaper to poison only size bytes.
39*490215a3Smrg     PoisonShadow(ptr, size, static_cast<u8>(magic));
40*490215a3Smrg   }
41*490215a3Smrg }
42*490215a3Smrg 
Create(uptr stack_size_log)43*490215a3Smrg FakeStack *FakeStack::Create(uptr stack_size_log) {
44*490215a3Smrg   static uptr kMinStackSizeLog = 16;
45*490215a3Smrg   static uptr kMaxStackSizeLog = FIRST_32_SECOND_64(24, 28);
46*490215a3Smrg   if (stack_size_log < kMinStackSizeLog)
47*490215a3Smrg     stack_size_log = kMinStackSizeLog;
48*490215a3Smrg   if (stack_size_log > kMaxStackSizeLog)
49*490215a3Smrg     stack_size_log = kMaxStackSizeLog;
50*490215a3Smrg   uptr size = RequiredSize(stack_size_log);
51*490215a3Smrg   FakeStack *res = reinterpret_cast<FakeStack *>(
52*490215a3Smrg       flags()->uar_noreserve ? MmapNoReserveOrDie(size, "FakeStack")
53*490215a3Smrg                              : MmapOrDie(size, "FakeStack"));
54*490215a3Smrg   res->stack_size_log_ = stack_size_log;
55*490215a3Smrg   u8 *p = reinterpret_cast<u8 *>(res);
56*490215a3Smrg   VReport(1, "T%d: FakeStack created: %p -- %p stack_size_log: %zd; "
57*490215a3Smrg           "mmapped %zdK, noreserve=%d \n",
58*490215a3Smrg           GetCurrentTidOrInvalid(), p,
59*490215a3Smrg           p + FakeStack::RequiredSize(stack_size_log), stack_size_log,
60*490215a3Smrg           size >> 10, flags()->uar_noreserve);
61*490215a3Smrg   return res;
62*490215a3Smrg }
63*490215a3Smrg 
Destroy(int tid)64*490215a3Smrg void FakeStack::Destroy(int tid) {
65*490215a3Smrg   PoisonAll(0);
66*490215a3Smrg   if (Verbosity() >= 2) {
67*490215a3Smrg     InternalScopedString str(kNumberOfSizeClasses * 50);
68*490215a3Smrg     for (uptr class_id = 0; class_id < kNumberOfSizeClasses; class_id++)
69*490215a3Smrg       str.append("%zd: %zd/%zd; ", class_id, hint_position_[class_id],
70*490215a3Smrg                  NumberOfFrames(stack_size_log(), class_id));
71*490215a3Smrg     Report("T%d: FakeStack destroyed: %s\n", tid, str.data());
72*490215a3Smrg   }
73*490215a3Smrg   uptr size = RequiredSize(stack_size_log_);
74*490215a3Smrg   FlushUnneededASanShadowMemory(reinterpret_cast<uptr>(this), size);
75*490215a3Smrg   UnmapOrDie(this, size);
76*490215a3Smrg }
77*490215a3Smrg 
PoisonAll(u8 magic)78*490215a3Smrg void FakeStack::PoisonAll(u8 magic) {
79*490215a3Smrg   PoisonShadow(reinterpret_cast<uptr>(this), RequiredSize(stack_size_log()),
80*490215a3Smrg                magic);
81*490215a3Smrg }
82*490215a3Smrg 
83*490215a3Smrg #if !defined(_MSC_VER) || defined(__clang__)
84*490215a3Smrg ALWAYS_INLINE USED
85*490215a3Smrg #endif
Allocate(uptr stack_size_log,uptr class_id,uptr real_stack)86*490215a3Smrg FakeFrame *FakeStack::Allocate(uptr stack_size_log, uptr class_id,
87*490215a3Smrg                                uptr real_stack) {
88*490215a3Smrg   CHECK_LT(class_id, kNumberOfSizeClasses);
89*490215a3Smrg   if (needs_gc_)
90*490215a3Smrg     GC(real_stack);
91*490215a3Smrg   uptr &hint_position = hint_position_[class_id];
92*490215a3Smrg   const int num_iter = NumberOfFrames(stack_size_log, class_id);
93*490215a3Smrg   u8 *flags = GetFlags(stack_size_log, class_id);
94*490215a3Smrg   for (int i = 0; i < num_iter; i++) {
95*490215a3Smrg     uptr pos = ModuloNumberOfFrames(stack_size_log, class_id, hint_position++);
96*490215a3Smrg     // This part is tricky. On one hand, checking and setting flags[pos]
97*490215a3Smrg     // should be atomic to ensure async-signal safety. But on the other hand,
98*490215a3Smrg     // if the signal arrives between checking and setting flags[pos], the
99*490215a3Smrg     // signal handler's fake stack will start from a different hint_position
100*490215a3Smrg     // and so will not touch this particular byte. So, it is safe to do this
101*490215a3Smrg     // with regular non-atomic load and store (at least I was not able to make
102*490215a3Smrg     // this code crash).
103*490215a3Smrg     if (flags[pos]) continue;
104*490215a3Smrg     flags[pos] = 1;
105*490215a3Smrg     FakeFrame *res = reinterpret_cast<FakeFrame *>(
106*490215a3Smrg         GetFrame(stack_size_log, class_id, pos));
107*490215a3Smrg     res->real_stack = real_stack;
108*490215a3Smrg     *SavedFlagPtr(reinterpret_cast<uptr>(res), class_id) = &flags[pos];
109*490215a3Smrg     return res;
110*490215a3Smrg   }
111*490215a3Smrg   return nullptr; // We are out of fake stack.
112*490215a3Smrg }
113*490215a3Smrg 
AddrIsInFakeStack(uptr ptr,uptr * frame_beg,uptr * frame_end)114*490215a3Smrg uptr FakeStack::AddrIsInFakeStack(uptr ptr, uptr *frame_beg, uptr *frame_end) {
115*490215a3Smrg   uptr stack_size_log = this->stack_size_log();
116*490215a3Smrg   uptr beg = reinterpret_cast<uptr>(GetFrame(stack_size_log, 0, 0));
117*490215a3Smrg   uptr end = reinterpret_cast<uptr>(this) + RequiredSize(stack_size_log);
118*490215a3Smrg   if (ptr < beg || ptr >= end) return 0;
119*490215a3Smrg   uptr class_id = (ptr - beg) >> stack_size_log;
120*490215a3Smrg   uptr base = beg + (class_id << stack_size_log);
121*490215a3Smrg   CHECK_LE(base, ptr);
122*490215a3Smrg   CHECK_LT(ptr, base + (((uptr)1) << stack_size_log));
123*490215a3Smrg   uptr pos = (ptr - base) >> (kMinStackFrameSizeLog + class_id);
124*490215a3Smrg   uptr res = base + pos * BytesInSizeClass(class_id);
125*490215a3Smrg   *frame_end = res + BytesInSizeClass(class_id);
126*490215a3Smrg   *frame_beg = res + sizeof(FakeFrame);
127*490215a3Smrg   return res;
128*490215a3Smrg }
129*490215a3Smrg 
HandleNoReturn()130*490215a3Smrg void FakeStack::HandleNoReturn() {
131*490215a3Smrg   needs_gc_ = true;
132*490215a3Smrg }
133*490215a3Smrg 
134*490215a3Smrg // When throw, longjmp or some such happens we don't call OnFree() and
135*490215a3Smrg // as the result may leak one or more fake frames, but the good news is that
136*490215a3Smrg // we are notified about all such events by HandleNoReturn().
137*490215a3Smrg // If we recently had such no-return event we need to collect garbage frames.
138*490215a3Smrg // We do it based on their 'real_stack' values -- everything that is lower
139*490215a3Smrg // than the current real_stack is garbage.
GC(uptr real_stack)140*490215a3Smrg NOINLINE void FakeStack::GC(uptr real_stack) {
141*490215a3Smrg   uptr collected = 0;
142*490215a3Smrg   for (uptr class_id = 0; class_id < kNumberOfSizeClasses; class_id++) {
143*490215a3Smrg     u8 *flags = GetFlags(stack_size_log(), class_id);
144*490215a3Smrg     for (uptr i = 0, n = NumberOfFrames(stack_size_log(), class_id); i < n;
145*490215a3Smrg          i++) {
146*490215a3Smrg       if (flags[i] == 0) continue;  // not allocated.
147*490215a3Smrg       FakeFrame *ff = reinterpret_cast<FakeFrame *>(
148*490215a3Smrg           GetFrame(stack_size_log(), class_id, i));
149*490215a3Smrg       if (ff->real_stack < real_stack) {
150*490215a3Smrg         flags[i] = 0;
151*490215a3Smrg         collected++;
152*490215a3Smrg       }
153*490215a3Smrg     }
154*490215a3Smrg   }
155*490215a3Smrg   needs_gc_ = false;
156*490215a3Smrg }
157*490215a3Smrg 
ForEachFakeFrame(RangeIteratorCallback callback,void * arg)158*490215a3Smrg void FakeStack::ForEachFakeFrame(RangeIteratorCallback callback, void *arg) {
159*490215a3Smrg   for (uptr class_id = 0; class_id < kNumberOfSizeClasses; class_id++) {
160*490215a3Smrg     u8 *flags = GetFlags(stack_size_log(), class_id);
161*490215a3Smrg     for (uptr i = 0, n = NumberOfFrames(stack_size_log(), class_id); i < n;
162*490215a3Smrg          i++) {
163*490215a3Smrg       if (flags[i] == 0) continue;  // not allocated.
164*490215a3Smrg       FakeFrame *ff = reinterpret_cast<FakeFrame *>(
165*490215a3Smrg           GetFrame(stack_size_log(), class_id, i));
166*490215a3Smrg       uptr begin = reinterpret_cast<uptr>(ff);
167*490215a3Smrg       callback(begin, begin + FakeStack::BytesInSizeClass(class_id), arg);
168*490215a3Smrg     }
169*490215a3Smrg   }
170*490215a3Smrg }
171*490215a3Smrg 
172*490215a3Smrg #if (SANITIZER_LINUX && !SANITIZER_ANDROID) || SANITIZER_FUCHSIA
173*490215a3Smrg static THREADLOCAL FakeStack *fake_stack_tls;
174*490215a3Smrg 
GetTLSFakeStack()175*490215a3Smrg FakeStack *GetTLSFakeStack() {
176*490215a3Smrg   return fake_stack_tls;
177*490215a3Smrg }
SetTLSFakeStack(FakeStack * fs)178*490215a3Smrg void SetTLSFakeStack(FakeStack *fs) {
179*490215a3Smrg   fake_stack_tls = fs;
180*490215a3Smrg }
181*490215a3Smrg #else
GetTLSFakeStack()182*490215a3Smrg FakeStack *GetTLSFakeStack() { return 0; }
SetTLSFakeStack(FakeStack * fs)183*490215a3Smrg void SetTLSFakeStack(FakeStack *fs) { }
184*490215a3Smrg #endif  // (SANITIZER_LINUX && !SANITIZER_ANDROID) || SANITIZER_FUCHSIA
185*490215a3Smrg 
GetFakeStack()186*490215a3Smrg static FakeStack *GetFakeStack() {
187*490215a3Smrg   AsanThread *t = GetCurrentThread();
188*490215a3Smrg   if (!t) return nullptr;
189*490215a3Smrg   return t->fake_stack();
190*490215a3Smrg }
191*490215a3Smrg 
GetFakeStackFast()192*490215a3Smrg static FakeStack *GetFakeStackFast() {
193*490215a3Smrg   if (FakeStack *fs = GetTLSFakeStack())
194*490215a3Smrg     return fs;
195*490215a3Smrg   if (!__asan_option_detect_stack_use_after_return)
196*490215a3Smrg     return nullptr;
197*490215a3Smrg   return GetFakeStack();
198*490215a3Smrg }
199*490215a3Smrg 
OnMalloc(uptr class_id,uptr size)200*490215a3Smrg ALWAYS_INLINE uptr OnMalloc(uptr class_id, uptr size) {
201*490215a3Smrg   FakeStack *fs = GetFakeStackFast();
202*490215a3Smrg   if (!fs) return 0;
203*490215a3Smrg   uptr local_stack;
204*490215a3Smrg   uptr real_stack = reinterpret_cast<uptr>(&local_stack);
205*490215a3Smrg   FakeFrame *ff = fs->Allocate(fs->stack_size_log(), class_id, real_stack);
206*490215a3Smrg   if (!ff) return 0;  // Out of fake stack.
207*490215a3Smrg   uptr ptr = reinterpret_cast<uptr>(ff);
208*490215a3Smrg   SetShadow(ptr, size, class_id, 0);
209*490215a3Smrg   return ptr;
210*490215a3Smrg }
211*490215a3Smrg 
OnFree(uptr ptr,uptr class_id,uptr size)212*490215a3Smrg ALWAYS_INLINE void OnFree(uptr ptr, uptr class_id, uptr size) {
213*490215a3Smrg   FakeStack::Deallocate(ptr, class_id);
214*490215a3Smrg   SetShadow(ptr, size, class_id, kMagic8);
215*490215a3Smrg }
216*490215a3Smrg 
217*490215a3Smrg } // namespace __asan
218*490215a3Smrg 
219*490215a3Smrg // ---------------------- Interface ---------------- {{{1
220*490215a3Smrg using namespace __asan;
221*490215a3Smrg #define DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(class_id)                       \
222*490215a3Smrg   extern "C" SANITIZER_INTERFACE_ATTRIBUTE uptr                                \
223*490215a3Smrg       __asan_stack_malloc_##class_id(uptr size) {                              \
224*490215a3Smrg     return OnMalloc(class_id, size);                                           \
225*490215a3Smrg   }                                                                            \
226*490215a3Smrg   extern "C" SANITIZER_INTERFACE_ATTRIBUTE void __asan_stack_free_##class_id(  \
227*490215a3Smrg       uptr ptr, uptr size) {                                                   \
228*490215a3Smrg     OnFree(ptr, class_id, size);                                               \
229*490215a3Smrg   }
230*490215a3Smrg 
231*490215a3Smrg DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(0)
232*490215a3Smrg DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(1)
233*490215a3Smrg DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(2)
234*490215a3Smrg DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(3)
235*490215a3Smrg DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(4)
236*490215a3Smrg DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(5)
237*490215a3Smrg DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(6)
238*490215a3Smrg DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(7)
239*490215a3Smrg DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(8)
240*490215a3Smrg DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(9)
241*490215a3Smrg DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(10)
242*490215a3Smrg extern "C" {
243*490215a3Smrg SANITIZER_INTERFACE_ATTRIBUTE
__asan_get_current_fake_stack()244*490215a3Smrg void *__asan_get_current_fake_stack() { return GetFakeStackFast(); }
245*490215a3Smrg 
246*490215a3Smrg SANITIZER_INTERFACE_ATTRIBUTE
__asan_addr_is_in_fake_stack(void * fake_stack,void * addr,void ** beg,void ** end)247*490215a3Smrg void *__asan_addr_is_in_fake_stack(void *fake_stack, void *addr, void **beg,
248*490215a3Smrg                                    void **end) {
249*490215a3Smrg   FakeStack *fs = reinterpret_cast<FakeStack*>(fake_stack);
250*490215a3Smrg   if (!fs) return nullptr;
251*490215a3Smrg   uptr frame_beg, frame_end;
252*490215a3Smrg   FakeFrame *frame = reinterpret_cast<FakeFrame *>(fs->AddrIsInFakeStack(
253*490215a3Smrg       reinterpret_cast<uptr>(addr), &frame_beg, &frame_end));
254*490215a3Smrg   if (!frame) return nullptr;
255*490215a3Smrg   if (frame->magic != kCurrentStackFrameMagic)
256*490215a3Smrg     return nullptr;
257*490215a3Smrg   if (beg) *beg = reinterpret_cast<void*>(frame_beg);
258*490215a3Smrg   if (end) *end = reinterpret_cast<void*>(frame_end);
259*490215a3Smrg   return reinterpret_cast<void*>(frame->real_stack);
260*490215a3Smrg }
261*490215a3Smrg 
262*490215a3Smrg SANITIZER_INTERFACE_ATTRIBUTE
__asan_alloca_poison(uptr addr,uptr size)263*490215a3Smrg void __asan_alloca_poison(uptr addr, uptr size) {
264*490215a3Smrg   uptr LeftRedzoneAddr = addr - kAllocaRedzoneSize;
265*490215a3Smrg   uptr PartialRzAddr = addr + size;
266*490215a3Smrg   uptr RightRzAddr = (PartialRzAddr + kAllocaRedzoneMask) & ~kAllocaRedzoneMask;
267*490215a3Smrg   uptr PartialRzAligned = PartialRzAddr & ~(SHADOW_GRANULARITY - 1);
268*490215a3Smrg   FastPoisonShadow(LeftRedzoneAddr, kAllocaRedzoneSize, kAsanAllocaLeftMagic);
269*490215a3Smrg   FastPoisonShadowPartialRightRedzone(
270*490215a3Smrg       PartialRzAligned, PartialRzAddr % SHADOW_GRANULARITY,
271*490215a3Smrg       RightRzAddr - PartialRzAligned, kAsanAllocaRightMagic);
272*490215a3Smrg   FastPoisonShadow(RightRzAddr, kAllocaRedzoneSize, kAsanAllocaRightMagic);
273*490215a3Smrg }
274*490215a3Smrg 
275*490215a3Smrg SANITIZER_INTERFACE_ATTRIBUTE
__asan_allocas_unpoison(uptr top,uptr bottom)276*490215a3Smrg void __asan_allocas_unpoison(uptr top, uptr bottom) {
277*490215a3Smrg   if ((!top) || (top > bottom)) return;
278*490215a3Smrg   REAL(memset)(reinterpret_cast<void*>(MemToShadow(top)), 0,
279*490215a3Smrg                (bottom - top) / SHADOW_GRANULARITY);
280*490215a3Smrg }
281*490215a3Smrg } // extern "C"
282