1 //===-- msan_poisoning.cpp --------------------------------------*- C++ -*-===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file is a part of MemorySanitizer.
10 //
11 //===----------------------------------------------------------------------===//
12
13 #include "msan_poisoning.h"
14
15 #include "interception/interception.h"
16 #include "msan_origin.h"
17 #include "sanitizer_common/sanitizer_common.h"
18
19 DECLARE_REAL(void *, memset, void *dest, int c, uptr n)
20 DECLARE_REAL(void *, memcpy, void *dest, const void *src, uptr n)
21 DECLARE_REAL(void *, memmove, void *dest, const void *src, uptr n)
22
23 namespace __msan {
24
GetOriginIfPoisoned(uptr addr,uptr size)25 u32 GetOriginIfPoisoned(uptr addr, uptr size) {
26 unsigned char *s = (unsigned char *)MEM_TO_SHADOW(addr);
27 for (uptr i = 0; i < size; ++i)
28 if (s[i]) return *(u32 *)SHADOW_TO_ORIGIN(((uptr)s + i) & ~3UL);
29 return 0;
30 }
31
SetOriginIfPoisoned(uptr addr,uptr src_shadow,uptr size,u32 src_origin)32 void SetOriginIfPoisoned(uptr addr, uptr src_shadow, uptr size,
33 u32 src_origin) {
34 uptr dst_s = MEM_TO_SHADOW(addr);
35 uptr src_s = src_shadow;
36 uptr src_s_end = src_s + size;
37
38 for (; src_s < src_s_end; ++dst_s, ++src_s)
39 if (*(u8 *)src_s) *(u32 *)SHADOW_TO_ORIGIN(dst_s & ~3UL) = src_origin;
40 }
41
CopyOrigin(const void * dst,const void * src,uptr size,StackTrace * stack)42 void CopyOrigin(const void *dst, const void *src, uptr size,
43 StackTrace *stack) {
44 if (!MEM_IS_APP(dst) || !MEM_IS_APP(src)) return;
45
46 uptr d = (uptr)dst;
47 uptr beg = d & ~3UL;
48 // Copy left unaligned origin if that memory is poisoned.
49 if (beg < d) {
50 u32 o = GetOriginIfPoisoned((uptr)src, beg + 4 - d);
51 if (o) {
52 if (__msan_get_track_origins() > 1) o = ChainOrigin(o, stack);
53 *(u32 *)MEM_TO_ORIGIN(beg) = o;
54 }
55 beg += 4;
56 }
57
58 uptr end = (d + size) & ~3UL;
59 // If both ends fall into the same 4-byte slot, we are done.
60 if (end < beg) return;
61
62 // Copy right unaligned origin if that memory is poisoned.
63 if (end < d + size) {
64 u32 o = GetOriginIfPoisoned((uptr)src + (end - d), (d + size) - end);
65 if (o) {
66 if (__msan_get_track_origins() > 1) o = ChainOrigin(o, stack);
67 *(u32 *)MEM_TO_ORIGIN(end) = o;
68 }
69 }
70
71 if (beg < end) {
72 // Align src up.
73 uptr s = ((uptr)src + 3) & ~3UL;
74 // FIXME: factor out to msan_copy_origin_aligned
75 if (__msan_get_track_origins() > 1) {
76 u32 *src = (u32 *)MEM_TO_ORIGIN(s);
77 u32 *src_s = (u32 *)MEM_TO_SHADOW(s);
78 u32 *src_end = (u32 *)MEM_TO_ORIGIN(s + (end - beg));
79 u32 *dst = (u32 *)MEM_TO_ORIGIN(beg);
80 u32 src_o = 0;
81 u32 dst_o = 0;
82 for (; src < src_end; ++src, ++src_s, ++dst) {
83 if (!*src_s) continue;
84 if (*src != src_o) {
85 src_o = *src;
86 dst_o = ChainOrigin(src_o, stack);
87 }
88 *dst = dst_o;
89 }
90 } else {
91 REAL(memcpy)((void *)MEM_TO_ORIGIN(beg), (void *)MEM_TO_ORIGIN(s),
92 end - beg);
93 }
94 }
95 }
96
ReverseCopyOrigin(const void * dst,const void * src,uptr size,StackTrace * stack)97 void ReverseCopyOrigin(const void *dst, const void *src, uptr size,
98 StackTrace *stack) {
99 if (!MEM_IS_APP(dst) || !MEM_IS_APP(src))
100 return;
101
102 uptr d = (uptr)dst;
103 uptr end = (d + size) & ~3UL;
104
105 // Copy right unaligned origin if that memory is poisoned.
106 if (end < d + size) {
107 u32 o = GetOriginIfPoisoned((uptr)src + (end - d), (d + size) - end);
108 if (o) {
109 if (__msan_get_track_origins() > 1)
110 o = ChainOrigin(o, stack);
111 *(u32 *)MEM_TO_ORIGIN(end) = o;
112 }
113 }
114
115 uptr beg = d & ~3UL;
116
117 if (beg + 4 < end) {
118 // Align src up.
119 uptr s = ((uptr)src + 3) & ~3UL;
120 if (__msan_get_track_origins() > 1) {
121 u32 *src = (u32 *)MEM_TO_ORIGIN(s + end - beg - 4);
122 u32 *src_s = (u32 *)MEM_TO_SHADOW(s + end - beg - 4);
123 u32 *src_begin = (u32 *)MEM_TO_ORIGIN(s);
124 u32 *dst = (u32 *)MEM_TO_ORIGIN(end - 4);
125 u32 src_o = 0;
126 u32 dst_o = 0;
127 for (; src >= src_begin; --src, --src_s, --dst) {
128 if (!*src_s)
129 continue;
130 if (*src != src_o) {
131 src_o = *src;
132 dst_o = ChainOrigin(src_o, stack);
133 }
134 *dst = dst_o;
135 }
136 } else {
137 REAL(memmove)
138 ((void *)MEM_TO_ORIGIN(beg), (void *)MEM_TO_ORIGIN(s), end - beg - 4);
139 }
140 }
141
142 // Copy left unaligned origin if that memory is poisoned.
143 if (beg < d) {
144 u32 o = GetOriginIfPoisoned((uptr)src, beg + 4 - d);
145 if (o) {
146 if (__msan_get_track_origins() > 1)
147 o = ChainOrigin(o, stack);
148 *(u32 *)MEM_TO_ORIGIN(beg) = o;
149 }
150 }
151 }
152
MoveOrigin(const void * dst,const void * src,uptr size,StackTrace * stack)153 void MoveOrigin(const void *dst, const void *src, uptr size,
154 StackTrace *stack) {
155 // If destination origin range overlaps with source origin range, move
156 // origins by coping origins in a reverse order; otherwise, copy origins in
157 // a normal order.
158 uptr src_aligned_beg = reinterpret_cast<uptr>(src) & ~3UL;
159 uptr src_aligned_end = (reinterpret_cast<uptr>(src) + size) & ~3UL;
160 uptr dst_aligned_beg = reinterpret_cast<uptr>(dst) & ~3UL;
161 if (dst_aligned_beg < src_aligned_end && dst_aligned_beg >= src_aligned_beg)
162 return ReverseCopyOrigin(dst, src, size, stack);
163 return CopyOrigin(dst, src, size, stack);
164 }
165
MoveShadowAndOrigin(const void * dst,const void * src,uptr size,StackTrace * stack)166 void MoveShadowAndOrigin(const void *dst, const void *src, uptr size,
167 StackTrace *stack) {
168 if (!MEM_IS_APP(dst)) return;
169 if (!MEM_IS_APP(src)) return;
170 if (src == dst) return;
171 // MoveOrigin transfers origins by refering to their shadows. So we
172 // need to move origins before moving shadows.
173 if (__msan_get_track_origins())
174 MoveOrigin(dst, src, size, stack);
175 REAL(memmove)((void *)MEM_TO_SHADOW((uptr)dst),
176 (void *)MEM_TO_SHADOW((uptr)src), size);
177 }
178
CopyShadowAndOrigin(const void * dst,const void * src,uptr size,StackTrace * stack)179 void CopyShadowAndOrigin(const void *dst, const void *src, uptr size,
180 StackTrace *stack) {
181 if (!MEM_IS_APP(dst)) return;
182 if (!MEM_IS_APP(src)) return;
183 // Because origin's range is slightly larger than app range, memcpy may also
184 // cause overlapped origin ranges.
185 REAL(memcpy)((void *)MEM_TO_SHADOW((uptr)dst),
186 (void *)MEM_TO_SHADOW((uptr)src), size);
187 if (__msan_get_track_origins())
188 MoveOrigin(dst, src, size, stack);
189 }
190
CopyMemory(void * dst,const void * src,uptr size,StackTrace * stack)191 void CopyMemory(void *dst, const void *src, uptr size, StackTrace *stack) {
192 REAL(memcpy)(dst, src, size);
193 CopyShadowAndOrigin(dst, src, size, stack);
194 }
195
SetShadow(const void * ptr,uptr size,u8 value)196 void SetShadow(const void *ptr, uptr size, u8 value) {
197 uptr PageSize = GetPageSizeCached();
198 uptr shadow_beg = MEM_TO_SHADOW(ptr);
199 uptr shadow_end = shadow_beg + size;
200 if (value ||
201 shadow_end - shadow_beg < common_flags()->clear_shadow_mmap_threshold) {
202 REAL(memset)((void *)shadow_beg, value, shadow_end - shadow_beg);
203 } else {
204 uptr page_beg = RoundUpTo(shadow_beg, PageSize);
205 uptr page_end = RoundDownTo(shadow_end, PageSize);
206
207 if (page_beg >= page_end) {
208 REAL(memset)((void *)shadow_beg, 0, shadow_end - shadow_beg);
209 } else {
210 if (page_beg != shadow_beg) {
211 REAL(memset)((void *)shadow_beg, 0, page_beg - shadow_beg);
212 }
213 if (page_end != shadow_end) {
214 REAL(memset)((void *)page_end, 0, shadow_end - page_end);
215 }
216 if (!MmapFixedSuperNoReserve(page_beg, page_end - page_beg))
217 Die();
218 }
219 }
220 }
221
SetOrigin(const void * dst,uptr size,u32 origin)222 void SetOrigin(const void *dst, uptr size, u32 origin) {
223 // Origin mapping is 4 bytes per 4 bytes of application memory.
224 // Here we extend the range such that its left and right bounds are both
225 // 4 byte aligned.
226 uptr x = MEM_TO_ORIGIN((uptr)dst);
227 uptr beg = x & ~3UL; // align down.
228 uptr end = (x + size + 3) & ~3UL; // align up.
229 u64 origin64 = ((u64)origin << 32) | origin;
230 // This is like memset, but the value is 32-bit. We unroll by 2 to write
231 // 64 bits at once. May want to unroll further to get 128-bit stores.
232 if (beg & 7ULL) {
233 *(u32 *)beg = origin;
234 beg += 4;
235 }
236 for (uptr addr = beg; addr < (end & ~7UL); addr += 8) *(u64 *)addr = origin64;
237 if (end & 7ULL) *(u32 *)(end - 4) = origin;
238 }
239
PoisonMemory(const void * dst,uptr size,StackTrace * stack)240 void PoisonMemory(const void *dst, uptr size, StackTrace *stack) {
241 SetShadow(dst, size, (u8)-1);
242
243 if (__msan_get_track_origins()) {
244 Origin o = Origin::CreateHeapOrigin(stack);
245 SetOrigin(dst, size, o.raw_id());
246 }
247 }
248
249 } // namespace __msan
250