1 /* AddressSanitizer, a fast memory error detector.
2    Copyright (C) 2012-2020 Free Software Foundation, Inc.
3    Contributed by Kostya Serebryany <kcc@google.com>
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11 
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15 for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "backend.h"
26 #include "target.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "gimple.h"
30 #include "cfghooks.h"
31 #include "alloc-pool.h"
32 #include "tree-pass.h"
33 #include "memmodel.h"
34 #include "tm_p.h"
35 #include "ssa.h"
36 #include "stringpool.h"
37 #include "tree-ssanames.h"
38 #include "optabs.h"
39 #include "emit-rtl.h"
40 #include "cgraph.h"
41 #include "gimple-pretty-print.h"
42 #include "alias.h"
43 #include "fold-const.h"
44 #include "cfganal.h"
45 #include "gimplify.h"
46 #include "gimple-iterator.h"
47 #include "varasm.h"
48 #include "stor-layout.h"
49 #include "tree-iterator.h"
50 #include "stringpool.h"
51 #include "attribs.h"
52 #include "asan.h"
53 #include "dojump.h"
54 #include "explow.h"
55 #include "expr.h"
56 #include "output.h"
57 #include "langhooks.h"
58 #include "cfgloop.h"
59 #include "gimple-builder.h"
60 #include "gimple-fold.h"
61 #include "ubsan.h"
62 #include "builtins.h"
63 #include "fnmatch.h"
64 #include "tree-inline.h"
65 #include "tree-ssa.h"
66 
67 /* AddressSanitizer finds out-of-bounds and use-after-free bugs
68    with <2x slowdown on average.
69 
70    The tool consists of two parts:
71    instrumentation module (this file) and a run-time library.
72    The instrumentation module adds a run-time check before every memory insn.
73      For a 8- or 16- byte load accessing address X:
74        ShadowAddr = (X >> 3) + Offset
75        ShadowValue = *(char*)ShadowAddr;  // *(short*) for 16-byte access.
76        if (ShadowValue)
77 	 __asan_report_load8(X);
78      For a load of N bytes (N=1, 2 or 4) from address X:
79        ShadowAddr = (X >> 3) + Offset
80        ShadowValue = *(char*)ShadowAddr;
81        if (ShadowValue)
82 	 if ((X & 7) + N - 1 > ShadowValue)
83 	   __asan_report_loadN(X);
84    Stores are instrumented similarly, but using __asan_report_storeN functions.
85    A call too __asan_init_vN() is inserted to the list of module CTORs.
86    N is the version number of the AddressSanitizer API. The changes between the
87    API versions are listed in libsanitizer/asan/asan_interface_internal.h.
88 
89    The run-time library redefines malloc (so that redzone are inserted around
90    the allocated memory) and free (so that reuse of free-ed memory is delayed),
91    provides __asan_report* and __asan_init_vN functions.
92 
93    Read more:
94    http://code.google.com/p/address-sanitizer/wiki/AddressSanitizerAlgorithm
95 
96    The current implementation supports detection of out-of-bounds and
97    use-after-free in the heap, on the stack and for global variables.
98 
99    [Protection of stack variables]
100 
101    To understand how detection of out-of-bounds and use-after-free works
102    for stack variables, lets look at this example on x86_64 where the
103    stack grows downward:
104 
105      int
106      foo ()
107      {
108        char a[23] = {0};
109        int b[2] = {0};
110 
111        a[5] = 1;
112        b[1] = 2;
113 
114        return a[5] + b[1];
115      }
116 
117    For this function, the stack protected by asan will be organized as
118    follows, from the top of the stack to the bottom:
119 
120    Slot 1/ [red zone of 32 bytes called 'RIGHT RedZone']
121 
122    Slot 2/ [8 bytes of red zone, that adds up to the space of 'a' to make
123 	   the next slot be 32 bytes aligned; this one is called Partial
124 	   Redzone; this 32 bytes alignment is an asan constraint]
125 
126    Slot 3/ [24 bytes for variable 'a']
127 
128    Slot 4/ [red zone of 32 bytes called 'Middle RedZone']
129 
130    Slot 5/ [24 bytes of Partial Red Zone (similar to slot 2]
131 
132    Slot 6/ [8 bytes for variable 'b']
133 
134    Slot 7/ [32 bytes of Red Zone at the bottom of the stack, called
135 	    'LEFT RedZone']
136 
137    The 32 bytes of LEFT red zone at the bottom of the stack can be
138    decomposed as such:
139 
140      1/ The first 8 bytes contain a magical asan number that is always
141      0x41B58AB3.
142 
143      2/ The following 8 bytes contains a pointer to a string (to be
144      parsed at runtime by the runtime asan library), which format is
145      the following:
146 
147       "<function-name> <space> <num-of-variables-on-the-stack>
148       (<32-bytes-aligned-offset-in-bytes-of-variable> <space>
149       <length-of-var-in-bytes> ){n} "
150 
151 	where '(...){n}' means the content inside the parenthesis occurs 'n'
152 	times, with 'n' being the number of variables on the stack.
153 
154      3/ The following 8 bytes contain the PC of the current function which
155      will be used by the run-time library to print an error message.
156 
157      4/ The following 8 bytes are reserved for internal use by the run-time.
158 
159    The shadow memory for that stack layout is going to look like this:
160 
161      - content of shadow memory 8 bytes for slot 7: 0xF1F1F1F1.
162        The F1 byte pattern is a magic number called
163        ASAN_STACK_MAGIC_LEFT and is a way for the runtime to know that
164        the memory for that shadow byte is part of a the LEFT red zone
165        intended to seat at the bottom of the variables on the stack.
166 
167      - content of shadow memory 8 bytes for slots 6 and 5:
168        0xF4F4F400.  The F4 byte pattern is a magic number
169        called ASAN_STACK_MAGIC_PARTIAL.  It flags the fact that the
170        memory region for this shadow byte is a PARTIAL red zone
171        intended to pad a variable A, so that the slot following
172        {A,padding} is 32 bytes aligned.
173 
174        Note that the fact that the least significant byte of this
175        shadow memory content is 00 means that 8 bytes of its
176        corresponding memory (which corresponds to the memory of
177        variable 'b') is addressable.
178 
179      - content of shadow memory 8 bytes for slot 4: 0xF2F2F2F2.
180        The F2 byte pattern is a magic number called
181        ASAN_STACK_MAGIC_MIDDLE.  It flags the fact that the memory
182        region for this shadow byte is a MIDDLE red zone intended to
183        seat between two 32 aligned slots of {variable,padding}.
184 
185      - content of shadow memory 8 bytes for slot 3 and 2:
186        0xF4000000.  This represents is the concatenation of
187        variable 'a' and the partial red zone following it, like what we
188        had for variable 'b'.  The least significant 3 bytes being 00
189        means that the 3 bytes of variable 'a' are addressable.
190 
191      - content of shadow memory 8 bytes for slot 1: 0xF3F3F3F3.
192        The F3 byte pattern is a magic number called
193        ASAN_STACK_MAGIC_RIGHT.  It flags the fact that the memory
194        region for this shadow byte is a RIGHT red zone intended to seat
195        at the top of the variables of the stack.
196 
197    Note that the real variable layout is done in expand_used_vars in
198    cfgexpand.c.  As far as Address Sanitizer is concerned, it lays out
199    stack variables as well as the different red zones, emits some
200    prologue code to populate the shadow memory as to poison (mark as
201    non-accessible) the regions of the red zones and mark the regions of
202    stack variables as accessible, and emit some epilogue code to
203    un-poison (mark as accessible) the regions of red zones right before
204    the function exits.
205 
206    [Protection of global variables]
207 
208    The basic idea is to insert a red zone between two global variables
209    and install a constructor function that calls the asan runtime to do
210    the populating of the relevant shadow memory regions at load time.
211 
212    So the global variables are laid out as to insert a red zone between
213    them. The size of the red zones is so that each variable starts on a
214    32 bytes boundary.
215 
216    Then a constructor function is installed so that, for each global
217    variable, it calls the runtime asan library function
218    __asan_register_globals_with an instance of this type:
219 
220      struct __asan_global
221      {
222        // Address of the beginning of the global variable.
223        const void *__beg;
224 
225        // Initial size of the global variable.
226        uptr __size;
227 
228        // Size of the global variable + size of the red zone.  This
229        //   size is 32 bytes aligned.
230        uptr __size_with_redzone;
231 
232        // Name of the global variable.
233        const void *__name;
234 
235        // Name of the module where the global variable is declared.
236        const void *__module_name;
237 
238        // 1 if it has dynamic initialization, 0 otherwise.
239        uptr __has_dynamic_init;
240 
241        // A pointer to struct that contains source location, could be NULL.
242        __asan_global_source_location *__location;
243      }
244 
245    A destructor function that calls the runtime asan library function
246    _asan_unregister_globals is also installed.  */
247 
248 static unsigned HOST_WIDE_INT asan_shadow_offset_value;
249 static bool asan_shadow_offset_computed;
250 static vec<char *> sanitized_sections;
251 static tree last_alloca_addr;
252 
253 /* Set of variable declarations that are going to be guarded by
254    use-after-scope sanitizer.  */
255 
256 hash_set<tree> *asan_handled_variables = NULL;
257 
258 hash_set <tree> *asan_used_labels = NULL;
259 
260 /* Sets shadow offset to value in string VAL.  */
261 
262 bool
set_asan_shadow_offset(const char * val)263 set_asan_shadow_offset (const char *val)
264 {
265   char *endp;
266 
267   errno = 0;
268 #ifdef HAVE_LONG_LONG
269   asan_shadow_offset_value = strtoull (val, &endp, 0);
270 #else
271   asan_shadow_offset_value = strtoul (val, &endp, 0);
272 #endif
273   if (!(*val != '\0' && *endp == '\0' && errno == 0))
274     return false;
275 
276   asan_shadow_offset_computed = true;
277 
278   return true;
279 }
280 
281 /* Set list of user-defined sections that need to be sanitized.  */
282 
283 void
set_sanitized_sections(const char * sections)284 set_sanitized_sections (const char *sections)
285 {
286   char *pat;
287   unsigned i;
288   FOR_EACH_VEC_ELT (sanitized_sections, i, pat)
289     free (pat);
290   sanitized_sections.truncate (0);
291 
292   for (const char *s = sections; *s; )
293     {
294       const char *end;
295       for (end = s; *end && *end != ','; ++end);
296       size_t len = end - s;
297       sanitized_sections.safe_push (xstrndup (s, len));
298       s = *end ? end + 1 : end;
299     }
300 }
301 
302 bool
asan_mark_p(gimple * stmt,enum asan_mark_flags flag)303 asan_mark_p (gimple *stmt, enum asan_mark_flags flag)
304 {
305   return (gimple_call_internal_p (stmt, IFN_ASAN_MARK)
306 	  && tree_to_uhwi (gimple_call_arg (stmt, 0)) == flag);
307 }
308 
309 bool
asan_sanitize_stack_p(void)310 asan_sanitize_stack_p (void)
311 {
312   return (sanitize_flags_p (SANITIZE_ADDRESS) && param_asan_stack);
313 }
314 
315 bool
asan_sanitize_allocas_p(void)316 asan_sanitize_allocas_p (void)
317 {
318   return (asan_sanitize_stack_p () && param_asan_protect_allocas);
319 }
320 
321 /* Checks whether section SEC should be sanitized.  */
322 
323 static bool
section_sanitized_p(const char * sec)324 section_sanitized_p (const char *sec)
325 {
326   char *pat;
327   unsigned i;
328   FOR_EACH_VEC_ELT (sanitized_sections, i, pat)
329     if (fnmatch (pat, sec, FNM_PERIOD) == 0)
330       return true;
331   return false;
332 }
333 
334 /* Returns Asan shadow offset.  */
335 
336 static unsigned HOST_WIDE_INT
asan_shadow_offset()337 asan_shadow_offset ()
338 {
339   if (!asan_shadow_offset_computed)
340     {
341       asan_shadow_offset_computed = true;
342       asan_shadow_offset_value = targetm.asan_shadow_offset ();
343     }
344   return asan_shadow_offset_value;
345 }
346 
347 /* Returns Asan shadow offset has been set.  */
348 bool
asan_shadow_offset_set_p()349 asan_shadow_offset_set_p ()
350 {
351   return asan_shadow_offset_computed;
352 }
353 
354 alias_set_type asan_shadow_set = -1;
355 
356 /* Pointer types to 1, 2 or 4 byte integers in shadow memory.  A separate
357    alias set is used for all shadow memory accesses.  */
358 static GTY(()) tree shadow_ptr_types[3];
359 
360 /* Decl for __asan_option_detect_stack_use_after_return.  */
361 static GTY(()) tree asan_detect_stack_use_after_return;
362 
363 /* Hashtable support for memory references used by gimple
364    statements.  */
365 
366 /* This type represents a reference to a memory region.  */
367 struct asan_mem_ref
368 {
369   /* The expression of the beginning of the memory region.  */
370   tree start;
371 
372   /* The size of the access.  */
373   HOST_WIDE_INT access_size;
374 };
375 
376 object_allocator <asan_mem_ref> asan_mem_ref_pool ("asan_mem_ref");
377 
378 /* Initializes an instance of asan_mem_ref.  */
379 
380 static void
asan_mem_ref_init(asan_mem_ref * ref,tree start,HOST_WIDE_INT access_size)381 asan_mem_ref_init (asan_mem_ref *ref, tree start, HOST_WIDE_INT access_size)
382 {
383   ref->start = start;
384   ref->access_size = access_size;
385 }
386 
387 /* Allocates memory for an instance of asan_mem_ref into the memory
388    pool returned by asan_mem_ref_get_alloc_pool and initialize it.
389    START is the address of (or the expression pointing to) the
390    beginning of memory reference.  ACCESS_SIZE is the size of the
391    access to the referenced memory.  */
392 
393 static asan_mem_ref*
asan_mem_ref_new(tree start,HOST_WIDE_INT access_size)394 asan_mem_ref_new (tree start, HOST_WIDE_INT access_size)
395 {
396   asan_mem_ref *ref = asan_mem_ref_pool.allocate ();
397 
398   asan_mem_ref_init (ref, start, access_size);
399   return ref;
400 }
401 
402 /* This builds and returns a pointer to the end of the memory region
403    that starts at START and of length LEN.  */
404 
405 tree
asan_mem_ref_get_end(tree start,tree len)406 asan_mem_ref_get_end (tree start, tree len)
407 {
408   if (len == NULL_TREE || integer_zerop (len))
409     return start;
410 
411   if (!ptrofftype_p (len))
412     len = convert_to_ptrofftype (len);
413 
414   return fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (start), start, len);
415 }
416 
417 /*  Return a tree expression that represents the end of the referenced
418     memory region.  Beware that this function can actually build a new
419     tree expression.  */
420 
421 tree
asan_mem_ref_get_end(const asan_mem_ref * ref,tree len)422 asan_mem_ref_get_end (const asan_mem_ref *ref, tree len)
423 {
424   return asan_mem_ref_get_end (ref->start, len);
425 }
426 
427 struct asan_mem_ref_hasher : nofree_ptr_hash <asan_mem_ref>
428 {
429   static inline hashval_t hash (const asan_mem_ref *);
430   static inline bool equal (const asan_mem_ref *, const asan_mem_ref *);
431 };
432 
433 /* Hash a memory reference.  */
434 
435 inline hashval_t
hash(const asan_mem_ref * mem_ref)436 asan_mem_ref_hasher::hash (const asan_mem_ref *mem_ref)
437 {
438   return iterative_hash_expr (mem_ref->start, 0);
439 }
440 
441 /* Compare two memory references.  We accept the length of either
442    memory references to be NULL_TREE.  */
443 
444 inline bool
equal(const asan_mem_ref * m1,const asan_mem_ref * m2)445 asan_mem_ref_hasher::equal (const asan_mem_ref *m1,
446 			    const asan_mem_ref *m2)
447 {
448   return operand_equal_p (m1->start, m2->start, 0);
449 }
450 
451 static hash_table<asan_mem_ref_hasher> *asan_mem_ref_ht;
452 
453 /* Returns a reference to the hash table containing memory references.
454    This function ensures that the hash table is created.  Note that
455    this hash table is updated by the function
456    update_mem_ref_hash_table.  */
457 
458 static hash_table<asan_mem_ref_hasher> *
get_mem_ref_hash_table()459 get_mem_ref_hash_table ()
460 {
461   if (!asan_mem_ref_ht)
462     asan_mem_ref_ht = new hash_table<asan_mem_ref_hasher> (10);
463 
464   return asan_mem_ref_ht;
465 }
466 
467 /* Clear all entries from the memory references hash table.  */
468 
469 static void
empty_mem_ref_hash_table()470 empty_mem_ref_hash_table ()
471 {
472   if (asan_mem_ref_ht)
473     asan_mem_ref_ht->empty ();
474 }
475 
476 /* Free the memory references hash table.  */
477 
478 static void
free_mem_ref_resources()479 free_mem_ref_resources ()
480 {
481   delete asan_mem_ref_ht;
482   asan_mem_ref_ht = NULL;
483 
484   asan_mem_ref_pool.release ();
485 }
486 
487 /* Return true iff the memory reference REF has been instrumented.  */
488 
489 static bool
has_mem_ref_been_instrumented(tree ref,HOST_WIDE_INT access_size)490 has_mem_ref_been_instrumented (tree ref, HOST_WIDE_INT access_size)
491 {
492   asan_mem_ref r;
493   asan_mem_ref_init (&r, ref, access_size);
494 
495   asan_mem_ref *saved_ref = get_mem_ref_hash_table ()->find (&r);
496   return saved_ref && saved_ref->access_size >= access_size;
497 }
498 
499 /* Return true iff the memory reference REF has been instrumented.  */
500 
501 static bool
has_mem_ref_been_instrumented(const asan_mem_ref * ref)502 has_mem_ref_been_instrumented (const asan_mem_ref *ref)
503 {
504   return has_mem_ref_been_instrumented (ref->start, ref->access_size);
505 }
506 
507 /* Return true iff access to memory region starting at REF and of
508    length LEN has been instrumented.  */
509 
510 static bool
has_mem_ref_been_instrumented(const asan_mem_ref * ref,tree len)511 has_mem_ref_been_instrumented (const asan_mem_ref *ref, tree len)
512 {
513   HOST_WIDE_INT size_in_bytes
514     = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
515 
516   return size_in_bytes != -1
517     && has_mem_ref_been_instrumented (ref->start, size_in_bytes);
518 }
519 
520 /* Set REF to the memory reference present in a gimple assignment
521    ASSIGNMENT.  Return true upon successful completion, false
522    otherwise.  */
523 
524 static bool
get_mem_ref_of_assignment(const gassign * assignment,asan_mem_ref * ref,bool * ref_is_store)525 get_mem_ref_of_assignment (const gassign *assignment,
526 			   asan_mem_ref *ref,
527 			   bool *ref_is_store)
528 {
529   gcc_assert (gimple_assign_single_p (assignment));
530 
531   if (gimple_store_p (assignment)
532       && !gimple_clobber_p (assignment))
533     {
534       ref->start = gimple_assign_lhs (assignment);
535       *ref_is_store = true;
536     }
537   else if (gimple_assign_load_p (assignment))
538     {
539       ref->start = gimple_assign_rhs1 (assignment);
540       *ref_is_store = false;
541     }
542   else
543     return false;
544 
545   ref->access_size = int_size_in_bytes (TREE_TYPE (ref->start));
546   return true;
547 }
548 
549 /* Return address of last allocated dynamic alloca.  */
550 
551 static tree
get_last_alloca_addr()552 get_last_alloca_addr ()
553 {
554   if (last_alloca_addr)
555     return last_alloca_addr;
556 
557   last_alloca_addr = create_tmp_reg (ptr_type_node, "last_alloca_addr");
558   gassign *g = gimple_build_assign (last_alloca_addr, null_pointer_node);
559   edge e = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun));
560   gsi_insert_on_edge_immediate (e, g);
561   return last_alloca_addr;
562 }
563 
564 /* Insert __asan_allocas_unpoison (top, bottom) call before
565    __builtin_stack_restore (new_sp) call.
566    The pseudocode of this routine should look like this:
567      top = last_alloca_addr;
568      bot = new_sp;
569      __asan_allocas_unpoison (top, bot);
570      last_alloca_addr = new_sp;
571      __builtin_stack_restore (new_sp);
572    In general, we can't use new_sp as bot parameter because on some
573    architectures SP has non zero offset from dynamic stack area.  Moreover, on
574    some architectures this offset (STACK_DYNAMIC_OFFSET) becomes known for each
575    particular function only after all callees were expanded to rtl.
576    The most noticeable example is PowerPC{,64}, see
577    http://refspecs.linuxfoundation.org/ELF/ppc64/PPC-elf64abi.html#DYNAM-STACK.
578    To overcome the issue we use following trick: pass new_sp as a second
579    parameter to __asan_allocas_unpoison and rewrite it during expansion with
580    new_sp + (virtual_dynamic_stack_rtx - sp) later in
581    expand_asan_emit_allocas_unpoison function.  */
582 
583 static void
handle_builtin_stack_restore(gcall * call,gimple_stmt_iterator * iter)584 handle_builtin_stack_restore (gcall *call, gimple_stmt_iterator *iter)
585 {
586   if (!iter || !asan_sanitize_allocas_p ())
587     return;
588 
589   tree last_alloca = get_last_alloca_addr ();
590   tree restored_stack = gimple_call_arg (call, 0);
591   tree fn = builtin_decl_implicit (BUILT_IN_ASAN_ALLOCAS_UNPOISON);
592   gimple *g = gimple_build_call (fn, 2, last_alloca, restored_stack);
593   gsi_insert_before (iter, g, GSI_SAME_STMT);
594   g = gimple_build_assign (last_alloca, restored_stack);
595   gsi_insert_before (iter, g, GSI_SAME_STMT);
596 }
597 
598 /* Deploy and poison redzones around __builtin_alloca call.  To do this, we
599    should replace this call with another one with changed parameters and
600    replace all its uses with new address, so
601        addr = __builtin_alloca (old_size, align);
602    is replaced by
603        left_redzone_size = max (align, ASAN_RED_ZONE_SIZE);
604    Following two statements are optimized out if we know that
605    old_size & (ASAN_RED_ZONE_SIZE - 1) == 0, i.e. alloca doesn't need partial
606    redzone.
607        misalign = old_size & (ASAN_RED_ZONE_SIZE - 1);
608        partial_redzone_size = ASAN_RED_ZONE_SIZE - misalign;
609        right_redzone_size = ASAN_RED_ZONE_SIZE;
610        additional_size = left_redzone_size + partial_redzone_size +
611                          right_redzone_size;
612        new_size = old_size + additional_size;
613        new_alloca = __builtin_alloca (new_size, max (align, 32))
614        __asan_alloca_poison (new_alloca, old_size)
615        addr = new_alloca + max (align, ASAN_RED_ZONE_SIZE);
616        last_alloca_addr = new_alloca;
617    ADDITIONAL_SIZE is added to make new memory allocation contain not only
618    requested memory, but also left, partial and right redzones as well as some
619    additional space, required by alignment.  */
620 
621 static void
handle_builtin_alloca(gcall * call,gimple_stmt_iterator * iter)622 handle_builtin_alloca (gcall *call, gimple_stmt_iterator *iter)
623 {
624   if (!iter || !asan_sanitize_allocas_p ())
625     return;
626 
627   gassign *g;
628   gcall *gg;
629   const HOST_WIDE_INT redzone_mask = ASAN_RED_ZONE_SIZE - 1;
630 
631   tree last_alloca = get_last_alloca_addr ();
632   tree callee = gimple_call_fndecl (call);
633   tree old_size = gimple_call_arg (call, 0);
634   tree ptr_type = gimple_call_lhs (call) ? TREE_TYPE (gimple_call_lhs (call))
635 					 : ptr_type_node;
636   tree partial_size = NULL_TREE;
637   unsigned int align
638     = DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA
639       ? 0 : tree_to_uhwi (gimple_call_arg (call, 1));
640 
641   /* If ALIGN > ASAN_RED_ZONE_SIZE, we embed left redzone into first ALIGN
642      bytes of allocated space.  Otherwise, align alloca to ASAN_RED_ZONE_SIZE
643      manually.  */
644   align = MAX (align, ASAN_RED_ZONE_SIZE * BITS_PER_UNIT);
645 
646   tree alloca_rz_mask = build_int_cst (size_type_node, redzone_mask);
647   tree redzone_size = build_int_cst (size_type_node, ASAN_RED_ZONE_SIZE);
648 
649   /* Extract lower bits from old_size.  */
650   wide_int size_nonzero_bits = get_nonzero_bits (old_size);
651   wide_int rz_mask
652     = wi::uhwi (redzone_mask, wi::get_precision (size_nonzero_bits));
653   wide_int old_size_lower_bits = wi::bit_and (size_nonzero_bits, rz_mask);
654 
655   /* If alloca size is aligned to ASAN_RED_ZONE_SIZE, we don't need partial
656      redzone.  Otherwise, compute its size here.  */
657   if (wi::ne_p (old_size_lower_bits, 0))
658     {
659       /* misalign = size & (ASAN_RED_ZONE_SIZE - 1)
660          partial_size = ASAN_RED_ZONE_SIZE - misalign.  */
661       g = gimple_build_assign (make_ssa_name (size_type_node, NULL),
662 			       BIT_AND_EXPR, old_size, alloca_rz_mask);
663       gsi_insert_before (iter, g, GSI_SAME_STMT);
664       tree misalign = gimple_assign_lhs (g);
665       g = gimple_build_assign (make_ssa_name (size_type_node, NULL), MINUS_EXPR,
666 			       redzone_size, misalign);
667       gsi_insert_before (iter, g, GSI_SAME_STMT);
668       partial_size = gimple_assign_lhs (g);
669     }
670 
671   /* additional_size = align + ASAN_RED_ZONE_SIZE.  */
672   tree additional_size = build_int_cst (size_type_node, align / BITS_PER_UNIT
673 							+ ASAN_RED_ZONE_SIZE);
674   /* If alloca has partial redzone, include it to additional_size too.  */
675   if (partial_size)
676     {
677       /* additional_size += partial_size.  */
678       g = gimple_build_assign (make_ssa_name (size_type_node), PLUS_EXPR,
679 			       partial_size, additional_size);
680       gsi_insert_before (iter, g, GSI_SAME_STMT);
681       additional_size = gimple_assign_lhs (g);
682     }
683 
684   /* new_size = old_size + additional_size.  */
685   g = gimple_build_assign (make_ssa_name (size_type_node), PLUS_EXPR, old_size,
686 			   additional_size);
687   gsi_insert_before (iter, g, GSI_SAME_STMT);
688   tree new_size = gimple_assign_lhs (g);
689 
690   /* Build new __builtin_alloca call:
691        new_alloca_with_rz = __builtin_alloca (new_size, align).  */
692   tree fn = builtin_decl_implicit (BUILT_IN_ALLOCA_WITH_ALIGN);
693   gg = gimple_build_call (fn, 2, new_size,
694 			  build_int_cst (size_type_node, align));
695   tree new_alloca_with_rz = make_ssa_name (ptr_type, gg);
696   gimple_call_set_lhs (gg, new_alloca_with_rz);
697   gsi_insert_before (iter, gg, GSI_SAME_STMT);
698 
699   /* new_alloca = new_alloca_with_rz + align.  */
700   g = gimple_build_assign (make_ssa_name (ptr_type), POINTER_PLUS_EXPR,
701 			   new_alloca_with_rz,
702 			   build_int_cst (size_type_node,
703 					  align / BITS_PER_UNIT));
704   gsi_insert_before (iter, g, GSI_SAME_STMT);
705   tree new_alloca = gimple_assign_lhs (g);
706 
707   /* Poison newly created alloca redzones:
708       __asan_alloca_poison (new_alloca, old_size).  */
709   fn = builtin_decl_implicit (BUILT_IN_ASAN_ALLOCA_POISON);
710   gg = gimple_build_call (fn, 2, new_alloca, old_size);
711   gsi_insert_before (iter, gg, GSI_SAME_STMT);
712 
713   /* Save new_alloca_with_rz value into last_alloca to use it during
714      allocas unpoisoning.  */
715   g = gimple_build_assign (last_alloca, new_alloca_with_rz);
716   gsi_insert_before (iter, g, GSI_SAME_STMT);
717 
718   /* Finally, replace old alloca ptr with NEW_ALLOCA.  */
719   replace_call_with_value (iter, new_alloca);
720 }
721 
722 /* Return the memory references contained in a gimple statement
723    representing a builtin call that has to do with memory access.  */
724 
725 static bool
726 get_mem_refs_of_builtin_call (gcall *call,
727 			      asan_mem_ref *src0,
728 			      tree *src0_len,
729 			      bool *src0_is_store,
730 			      asan_mem_ref *src1,
731 			      tree *src1_len,
732 			      bool *src1_is_store,
733 			      asan_mem_ref *dst,
734 			      tree *dst_len,
735 			      bool *dst_is_store,
736 			      bool *dest_is_deref,
737 			      bool *intercepted_p,
738 			      gimple_stmt_iterator *iter = NULL)
739 {
740   gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
741 
742   tree callee = gimple_call_fndecl (call);
743   tree source0 = NULL_TREE, source1 = NULL_TREE,
744     dest = NULL_TREE, len = NULL_TREE;
745   bool is_store = true, got_reference_p = false;
746   HOST_WIDE_INT access_size = 1;
747 
748   *intercepted_p = asan_intercepted_p ((DECL_FUNCTION_CODE (callee)));
749 
750   switch (DECL_FUNCTION_CODE (callee))
751     {
752       /* (s, s, n) style memops.  */
753     case BUILT_IN_BCMP:
754     case BUILT_IN_MEMCMP:
755       source0 = gimple_call_arg (call, 0);
756       source1 = gimple_call_arg (call, 1);
757       len = gimple_call_arg (call, 2);
758       break;
759 
760       /* (src, dest, n) style memops.  */
761     case BUILT_IN_BCOPY:
762       source0 = gimple_call_arg (call, 0);
763       dest = gimple_call_arg (call, 1);
764       len = gimple_call_arg (call, 2);
765       break;
766 
767       /* (dest, src, n) style memops.  */
768     case BUILT_IN_MEMCPY:
769     case BUILT_IN_MEMCPY_CHK:
770     case BUILT_IN_MEMMOVE:
771     case BUILT_IN_MEMMOVE_CHK:
772     case BUILT_IN_MEMPCPY:
773     case BUILT_IN_MEMPCPY_CHK:
774       dest = gimple_call_arg (call, 0);
775       source0 = gimple_call_arg (call, 1);
776       len = gimple_call_arg (call, 2);
777       break;
778 
779       /* (dest, n) style memops.  */
780     case BUILT_IN_BZERO:
781       dest = gimple_call_arg (call, 0);
782       len = gimple_call_arg (call, 1);
783       break;
784 
785       /* (dest, x, n) style memops*/
786     case BUILT_IN_MEMSET:
787     case BUILT_IN_MEMSET_CHK:
788       dest = gimple_call_arg (call, 0);
789       len = gimple_call_arg (call, 2);
790       break;
791 
792     case BUILT_IN_STRLEN:
793       source0 = gimple_call_arg (call, 0);
794       len = gimple_call_lhs (call);
795       break;
796 
797     case BUILT_IN_STACK_RESTORE:
798       handle_builtin_stack_restore (call, iter);
799       break;
800 
801     CASE_BUILT_IN_ALLOCA:
802       handle_builtin_alloca (call, iter);
803       break;
804     /* And now the __atomic* and __sync builtins.
805        These are handled differently from the classical memory
806        access builtins above.  */
807 
808     case BUILT_IN_ATOMIC_LOAD_1:
809       is_store = false;
810       /* FALLTHRU */
811     case BUILT_IN_SYNC_FETCH_AND_ADD_1:
812     case BUILT_IN_SYNC_FETCH_AND_SUB_1:
813     case BUILT_IN_SYNC_FETCH_AND_OR_1:
814     case BUILT_IN_SYNC_FETCH_AND_AND_1:
815     case BUILT_IN_SYNC_FETCH_AND_XOR_1:
816     case BUILT_IN_SYNC_FETCH_AND_NAND_1:
817     case BUILT_IN_SYNC_ADD_AND_FETCH_1:
818     case BUILT_IN_SYNC_SUB_AND_FETCH_1:
819     case BUILT_IN_SYNC_OR_AND_FETCH_1:
820     case BUILT_IN_SYNC_AND_AND_FETCH_1:
821     case BUILT_IN_SYNC_XOR_AND_FETCH_1:
822     case BUILT_IN_SYNC_NAND_AND_FETCH_1:
823     case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
824     case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
825     case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
826     case BUILT_IN_SYNC_LOCK_RELEASE_1:
827     case BUILT_IN_ATOMIC_EXCHANGE_1:
828     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
829     case BUILT_IN_ATOMIC_STORE_1:
830     case BUILT_IN_ATOMIC_ADD_FETCH_1:
831     case BUILT_IN_ATOMIC_SUB_FETCH_1:
832     case BUILT_IN_ATOMIC_AND_FETCH_1:
833     case BUILT_IN_ATOMIC_NAND_FETCH_1:
834     case BUILT_IN_ATOMIC_XOR_FETCH_1:
835     case BUILT_IN_ATOMIC_OR_FETCH_1:
836     case BUILT_IN_ATOMIC_FETCH_ADD_1:
837     case BUILT_IN_ATOMIC_FETCH_SUB_1:
838     case BUILT_IN_ATOMIC_FETCH_AND_1:
839     case BUILT_IN_ATOMIC_FETCH_NAND_1:
840     case BUILT_IN_ATOMIC_FETCH_XOR_1:
841     case BUILT_IN_ATOMIC_FETCH_OR_1:
842       access_size = 1;
843       goto do_atomic;
844 
845     case BUILT_IN_ATOMIC_LOAD_2:
846       is_store = false;
847       /* FALLTHRU */
848     case BUILT_IN_SYNC_FETCH_AND_ADD_2:
849     case BUILT_IN_SYNC_FETCH_AND_SUB_2:
850     case BUILT_IN_SYNC_FETCH_AND_OR_2:
851     case BUILT_IN_SYNC_FETCH_AND_AND_2:
852     case BUILT_IN_SYNC_FETCH_AND_XOR_2:
853     case BUILT_IN_SYNC_FETCH_AND_NAND_2:
854     case BUILT_IN_SYNC_ADD_AND_FETCH_2:
855     case BUILT_IN_SYNC_SUB_AND_FETCH_2:
856     case BUILT_IN_SYNC_OR_AND_FETCH_2:
857     case BUILT_IN_SYNC_AND_AND_FETCH_2:
858     case BUILT_IN_SYNC_XOR_AND_FETCH_2:
859     case BUILT_IN_SYNC_NAND_AND_FETCH_2:
860     case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
861     case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
862     case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
863     case BUILT_IN_SYNC_LOCK_RELEASE_2:
864     case BUILT_IN_ATOMIC_EXCHANGE_2:
865     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
866     case BUILT_IN_ATOMIC_STORE_2:
867     case BUILT_IN_ATOMIC_ADD_FETCH_2:
868     case BUILT_IN_ATOMIC_SUB_FETCH_2:
869     case BUILT_IN_ATOMIC_AND_FETCH_2:
870     case BUILT_IN_ATOMIC_NAND_FETCH_2:
871     case BUILT_IN_ATOMIC_XOR_FETCH_2:
872     case BUILT_IN_ATOMIC_OR_FETCH_2:
873     case BUILT_IN_ATOMIC_FETCH_ADD_2:
874     case BUILT_IN_ATOMIC_FETCH_SUB_2:
875     case BUILT_IN_ATOMIC_FETCH_AND_2:
876     case BUILT_IN_ATOMIC_FETCH_NAND_2:
877     case BUILT_IN_ATOMIC_FETCH_XOR_2:
878     case BUILT_IN_ATOMIC_FETCH_OR_2:
879       access_size = 2;
880       goto do_atomic;
881 
882     case BUILT_IN_ATOMIC_LOAD_4:
883       is_store = false;
884       /* FALLTHRU */
885     case BUILT_IN_SYNC_FETCH_AND_ADD_4:
886     case BUILT_IN_SYNC_FETCH_AND_SUB_4:
887     case BUILT_IN_SYNC_FETCH_AND_OR_4:
888     case BUILT_IN_SYNC_FETCH_AND_AND_4:
889     case BUILT_IN_SYNC_FETCH_AND_XOR_4:
890     case BUILT_IN_SYNC_FETCH_AND_NAND_4:
891     case BUILT_IN_SYNC_ADD_AND_FETCH_4:
892     case BUILT_IN_SYNC_SUB_AND_FETCH_4:
893     case BUILT_IN_SYNC_OR_AND_FETCH_4:
894     case BUILT_IN_SYNC_AND_AND_FETCH_4:
895     case BUILT_IN_SYNC_XOR_AND_FETCH_4:
896     case BUILT_IN_SYNC_NAND_AND_FETCH_4:
897     case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
898     case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
899     case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
900     case BUILT_IN_SYNC_LOCK_RELEASE_4:
901     case BUILT_IN_ATOMIC_EXCHANGE_4:
902     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
903     case BUILT_IN_ATOMIC_STORE_4:
904     case BUILT_IN_ATOMIC_ADD_FETCH_4:
905     case BUILT_IN_ATOMIC_SUB_FETCH_4:
906     case BUILT_IN_ATOMIC_AND_FETCH_4:
907     case BUILT_IN_ATOMIC_NAND_FETCH_4:
908     case BUILT_IN_ATOMIC_XOR_FETCH_4:
909     case BUILT_IN_ATOMIC_OR_FETCH_4:
910     case BUILT_IN_ATOMIC_FETCH_ADD_4:
911     case BUILT_IN_ATOMIC_FETCH_SUB_4:
912     case BUILT_IN_ATOMIC_FETCH_AND_4:
913     case BUILT_IN_ATOMIC_FETCH_NAND_4:
914     case BUILT_IN_ATOMIC_FETCH_XOR_4:
915     case BUILT_IN_ATOMIC_FETCH_OR_4:
916       access_size = 4;
917       goto do_atomic;
918 
919     case BUILT_IN_ATOMIC_LOAD_8:
920       is_store = false;
921       /* FALLTHRU */
922     case BUILT_IN_SYNC_FETCH_AND_ADD_8:
923     case BUILT_IN_SYNC_FETCH_AND_SUB_8:
924     case BUILT_IN_SYNC_FETCH_AND_OR_8:
925     case BUILT_IN_SYNC_FETCH_AND_AND_8:
926     case BUILT_IN_SYNC_FETCH_AND_XOR_8:
927     case BUILT_IN_SYNC_FETCH_AND_NAND_8:
928     case BUILT_IN_SYNC_ADD_AND_FETCH_8:
929     case BUILT_IN_SYNC_SUB_AND_FETCH_8:
930     case BUILT_IN_SYNC_OR_AND_FETCH_8:
931     case BUILT_IN_SYNC_AND_AND_FETCH_8:
932     case BUILT_IN_SYNC_XOR_AND_FETCH_8:
933     case BUILT_IN_SYNC_NAND_AND_FETCH_8:
934     case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
935     case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
936     case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
937     case BUILT_IN_SYNC_LOCK_RELEASE_8:
938     case BUILT_IN_ATOMIC_EXCHANGE_8:
939     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
940     case BUILT_IN_ATOMIC_STORE_8:
941     case BUILT_IN_ATOMIC_ADD_FETCH_8:
942     case BUILT_IN_ATOMIC_SUB_FETCH_8:
943     case BUILT_IN_ATOMIC_AND_FETCH_8:
944     case BUILT_IN_ATOMIC_NAND_FETCH_8:
945     case BUILT_IN_ATOMIC_XOR_FETCH_8:
946     case BUILT_IN_ATOMIC_OR_FETCH_8:
947     case BUILT_IN_ATOMIC_FETCH_ADD_8:
948     case BUILT_IN_ATOMIC_FETCH_SUB_8:
949     case BUILT_IN_ATOMIC_FETCH_AND_8:
950     case BUILT_IN_ATOMIC_FETCH_NAND_8:
951     case BUILT_IN_ATOMIC_FETCH_XOR_8:
952     case BUILT_IN_ATOMIC_FETCH_OR_8:
953       access_size = 8;
954       goto do_atomic;
955 
956     case BUILT_IN_ATOMIC_LOAD_16:
957       is_store = false;
958       /* FALLTHRU */
959     case BUILT_IN_SYNC_FETCH_AND_ADD_16:
960     case BUILT_IN_SYNC_FETCH_AND_SUB_16:
961     case BUILT_IN_SYNC_FETCH_AND_OR_16:
962     case BUILT_IN_SYNC_FETCH_AND_AND_16:
963     case BUILT_IN_SYNC_FETCH_AND_XOR_16:
964     case BUILT_IN_SYNC_FETCH_AND_NAND_16:
965     case BUILT_IN_SYNC_ADD_AND_FETCH_16:
966     case BUILT_IN_SYNC_SUB_AND_FETCH_16:
967     case BUILT_IN_SYNC_OR_AND_FETCH_16:
968     case BUILT_IN_SYNC_AND_AND_FETCH_16:
969     case BUILT_IN_SYNC_XOR_AND_FETCH_16:
970     case BUILT_IN_SYNC_NAND_AND_FETCH_16:
971     case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
972     case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
973     case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
974     case BUILT_IN_SYNC_LOCK_RELEASE_16:
975     case BUILT_IN_ATOMIC_EXCHANGE_16:
976     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
977     case BUILT_IN_ATOMIC_STORE_16:
978     case BUILT_IN_ATOMIC_ADD_FETCH_16:
979     case BUILT_IN_ATOMIC_SUB_FETCH_16:
980     case BUILT_IN_ATOMIC_AND_FETCH_16:
981     case BUILT_IN_ATOMIC_NAND_FETCH_16:
982     case BUILT_IN_ATOMIC_XOR_FETCH_16:
983     case BUILT_IN_ATOMIC_OR_FETCH_16:
984     case BUILT_IN_ATOMIC_FETCH_ADD_16:
985     case BUILT_IN_ATOMIC_FETCH_SUB_16:
986     case BUILT_IN_ATOMIC_FETCH_AND_16:
987     case BUILT_IN_ATOMIC_FETCH_NAND_16:
988     case BUILT_IN_ATOMIC_FETCH_XOR_16:
989     case BUILT_IN_ATOMIC_FETCH_OR_16:
990       access_size = 16;
991       /* FALLTHRU */
992     do_atomic:
993       {
994 	dest = gimple_call_arg (call, 0);
995 	/* DEST represents the address of a memory location.
996 	   instrument_derefs wants the memory location, so lets
997 	   dereference the address DEST before handing it to
998 	   instrument_derefs.  */
999 	tree type = build_nonstandard_integer_type (access_size
1000 						    * BITS_PER_UNIT, 1);
1001 	dest = build2 (MEM_REF, type, dest,
1002 		       build_int_cst (build_pointer_type (char_type_node), 0));
1003 	break;
1004       }
1005 
1006     default:
1007       /* The other builtins memory access are not instrumented in this
1008 	 function because they either don't have any length parameter,
1009 	 or their length parameter is just a limit.  */
1010       break;
1011     }
1012 
1013   if (len != NULL_TREE)
1014     {
1015       if (source0 != NULL_TREE)
1016 	{
1017 	  src0->start = source0;
1018 	  src0->access_size = access_size;
1019 	  *src0_len = len;
1020 	  *src0_is_store = false;
1021 	}
1022 
1023       if (source1 != NULL_TREE)
1024 	{
1025 	  src1->start = source1;
1026 	  src1->access_size = access_size;
1027 	  *src1_len = len;
1028 	  *src1_is_store = false;
1029 	}
1030 
1031       if (dest != NULL_TREE)
1032 	{
1033 	  dst->start = dest;
1034 	  dst->access_size = access_size;
1035 	  *dst_len = len;
1036 	  *dst_is_store = true;
1037 	}
1038 
1039       got_reference_p = true;
1040     }
1041   else if (dest)
1042     {
1043       dst->start = dest;
1044       dst->access_size = access_size;
1045       *dst_len = NULL_TREE;
1046       *dst_is_store = is_store;
1047       *dest_is_deref = true;
1048       got_reference_p = true;
1049     }
1050 
1051   return got_reference_p;
1052 }
1053 
1054 /* Return true iff a given gimple statement has been instrumented.
1055    Note that the statement is "defined" by the memory references it
1056    contains.  */
1057 
1058 static bool
has_stmt_been_instrumented_p(gimple * stmt)1059 has_stmt_been_instrumented_p (gimple *stmt)
1060 {
1061   if (gimple_assign_single_p (stmt))
1062     {
1063       bool r_is_store;
1064       asan_mem_ref r;
1065       asan_mem_ref_init (&r, NULL, 1);
1066 
1067       if (get_mem_ref_of_assignment (as_a <gassign *> (stmt), &r,
1068 				     &r_is_store))
1069 	return has_mem_ref_been_instrumented (&r);
1070     }
1071   else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
1072     {
1073       asan_mem_ref src0, src1, dest;
1074       asan_mem_ref_init (&src0, NULL, 1);
1075       asan_mem_ref_init (&src1, NULL, 1);
1076       asan_mem_ref_init (&dest, NULL, 1);
1077 
1078       tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
1079       bool src0_is_store = false, src1_is_store = false,
1080 	dest_is_store = false, dest_is_deref = false, intercepted_p = true;
1081       if (get_mem_refs_of_builtin_call (as_a <gcall *> (stmt),
1082 					&src0, &src0_len, &src0_is_store,
1083 					&src1, &src1_len, &src1_is_store,
1084 					&dest, &dest_len, &dest_is_store,
1085 					&dest_is_deref, &intercepted_p))
1086 	{
1087 	  if (src0.start != NULL_TREE
1088 	      && !has_mem_ref_been_instrumented (&src0, src0_len))
1089 	    return false;
1090 
1091 	  if (src1.start != NULL_TREE
1092 	      && !has_mem_ref_been_instrumented (&src1, src1_len))
1093 	    return false;
1094 
1095 	  if (dest.start != NULL_TREE
1096 	      && !has_mem_ref_been_instrumented (&dest, dest_len))
1097 	    return false;
1098 
1099 	  return true;
1100 	}
1101     }
1102   else if (is_gimple_call (stmt) && gimple_store_p (stmt))
1103     {
1104       asan_mem_ref r;
1105       asan_mem_ref_init (&r, NULL, 1);
1106 
1107       r.start = gimple_call_lhs (stmt);
1108       r.access_size = int_size_in_bytes (TREE_TYPE (r.start));
1109       return has_mem_ref_been_instrumented (&r);
1110     }
1111 
1112   return false;
1113 }
1114 
1115 /*  Insert a memory reference into the hash table.  */
1116 
1117 static void
update_mem_ref_hash_table(tree ref,HOST_WIDE_INT access_size)1118 update_mem_ref_hash_table (tree ref, HOST_WIDE_INT access_size)
1119 {
1120   hash_table<asan_mem_ref_hasher> *ht = get_mem_ref_hash_table ();
1121 
1122   asan_mem_ref r;
1123   asan_mem_ref_init (&r, ref, access_size);
1124 
1125   asan_mem_ref **slot = ht->find_slot (&r, INSERT);
1126   if (*slot == NULL || (*slot)->access_size < access_size)
1127     *slot = asan_mem_ref_new (ref, access_size);
1128 }
1129 
1130 /* Initialize shadow_ptr_types array.  */
1131 
1132 static void
asan_init_shadow_ptr_types(void)1133 asan_init_shadow_ptr_types (void)
1134 {
1135   asan_shadow_set = new_alias_set ();
1136   tree types[3] = { signed_char_type_node, short_integer_type_node,
1137 		    integer_type_node };
1138 
1139   for (unsigned i = 0; i < 3; i++)
1140     {
1141       shadow_ptr_types[i] = build_distinct_type_copy (types[i]);
1142       TYPE_ALIAS_SET (shadow_ptr_types[i]) = asan_shadow_set;
1143       shadow_ptr_types[i] = build_pointer_type (shadow_ptr_types[i]);
1144     }
1145 
1146   initialize_sanitizer_builtins ();
1147 }
1148 
1149 /* Create ADDR_EXPR of STRING_CST with the PP pretty printer text.  */
1150 
1151 static tree
asan_pp_string(pretty_printer * pp)1152 asan_pp_string (pretty_printer *pp)
1153 {
1154   const char *buf = pp_formatted_text (pp);
1155   size_t len = strlen (buf);
1156   tree ret = build_string (len + 1, buf);
1157   TREE_TYPE (ret)
1158     = build_array_type (TREE_TYPE (shadow_ptr_types[0]),
1159 			build_index_type (size_int (len)));
1160   TREE_READONLY (ret) = 1;
1161   TREE_STATIC (ret) = 1;
1162   return build1 (ADDR_EXPR, shadow_ptr_types[0], ret);
1163 }
1164 
1165 /* Clear shadow memory at SHADOW_MEM, LEN bytes.  Can't call a library call here
1166    though.  */
1167 
1168 static void
asan_clear_shadow(rtx shadow_mem,HOST_WIDE_INT len)1169 asan_clear_shadow (rtx shadow_mem, HOST_WIDE_INT len)
1170 {
1171   rtx_insn *insn, *insns, *jump;
1172   rtx_code_label *top_label;
1173   rtx end, addr, tmp;
1174 
1175   gcc_assert ((len & 3) == 0);
1176   start_sequence ();
1177   clear_storage (shadow_mem, GEN_INT (len), BLOCK_OP_NORMAL);
1178   insns = get_insns ();
1179   end_sequence ();
1180   for (insn = insns; insn; insn = NEXT_INSN (insn))
1181     if (CALL_P (insn))
1182       break;
1183   if (insn == NULL_RTX)
1184     {
1185       emit_insn (insns);
1186       return;
1187     }
1188 
1189   top_label = gen_label_rtx ();
1190   addr = copy_to_mode_reg (Pmode, XEXP (shadow_mem, 0));
1191   shadow_mem = adjust_automodify_address (shadow_mem, SImode, addr, 0);
1192   end = force_reg (Pmode, plus_constant (Pmode, addr, len));
1193   emit_label (top_label);
1194 
1195   emit_move_insn (shadow_mem, const0_rtx);
1196   tmp = expand_simple_binop (Pmode, PLUS, addr, gen_int_mode (4, Pmode), addr,
1197 			     true, OPTAB_LIB_WIDEN);
1198   if (tmp != addr)
1199     emit_move_insn (addr, tmp);
1200   emit_cmp_and_jump_insns (addr, end, LT, NULL_RTX, Pmode, true, top_label);
1201   jump = get_last_insn ();
1202   gcc_assert (JUMP_P (jump));
1203   add_reg_br_prob_note (jump,
1204 			profile_probability::guessed_always ()
1205 			   .apply_scale (80, 100));
1206 }
1207 
1208 void
asan_function_start(void)1209 asan_function_start (void)
1210 {
1211   section *fnsec = function_section (current_function_decl);
1212   switch_to_section (fnsec);
1213   ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LASANPC",
1214 			 current_function_funcdef_no);
1215 }
1216 
1217 /* Return number of shadow bytes that are occupied by a local variable
1218    of SIZE bytes.  */
1219 
1220 static unsigned HOST_WIDE_INT
shadow_mem_size(unsigned HOST_WIDE_INT size)1221 shadow_mem_size (unsigned HOST_WIDE_INT size)
1222 {
1223   /* It must be possible to align stack variables to granularity
1224      of shadow memory.  */
1225   gcc_assert (BITS_PER_UNIT
1226 	      * ASAN_SHADOW_GRANULARITY <= MAX_SUPPORTED_STACK_ALIGNMENT);
1227 
1228   return ROUND_UP (size, ASAN_SHADOW_GRANULARITY) / ASAN_SHADOW_GRANULARITY;
1229 }
1230 
1231 /* Always emit 4 bytes at a time.  */
1232 #define RZ_BUFFER_SIZE 4
1233 
1234 /* ASAN redzone buffer container that handles emission of shadow bytes.  */
1235 class asan_redzone_buffer
1236 {
1237 public:
1238   /* Constructor.  */
asan_redzone_buffer(rtx shadow_mem,HOST_WIDE_INT prev_offset)1239   asan_redzone_buffer (rtx shadow_mem, HOST_WIDE_INT prev_offset):
1240     m_shadow_mem (shadow_mem), m_prev_offset (prev_offset),
1241     m_original_offset (prev_offset), m_shadow_bytes (RZ_BUFFER_SIZE)
1242   {}
1243 
1244   /* Emit VALUE shadow byte at a given OFFSET.  */
1245   void emit_redzone_byte (HOST_WIDE_INT offset, unsigned char value);
1246 
1247   /* Emit RTX emission of the content of the buffer.  */
1248   void flush_redzone_payload (void);
1249 
1250 private:
1251   /* Flush if the content of the buffer is full
1252      (equal to RZ_BUFFER_SIZE).  */
1253   void flush_if_full (void);
1254 
1255   /* Memory where we last emitted a redzone payload.  */
1256   rtx m_shadow_mem;
1257 
1258   /* Relative offset where we last emitted a redzone payload.  */
1259   HOST_WIDE_INT m_prev_offset;
1260 
1261   /* Relative original offset.  Used for checking only.  */
1262   HOST_WIDE_INT m_original_offset;
1263 
1264 public:
1265   /* Buffer with redzone payload.  */
1266   auto_vec<unsigned char> m_shadow_bytes;
1267 };
1268 
1269 /* Emit VALUE shadow byte at a given OFFSET.  */
1270 
1271 void
emit_redzone_byte(HOST_WIDE_INT offset,unsigned char value)1272 asan_redzone_buffer::emit_redzone_byte (HOST_WIDE_INT offset,
1273 					unsigned char value)
1274 {
1275   gcc_assert ((offset & (ASAN_SHADOW_GRANULARITY - 1)) == 0);
1276   gcc_assert (offset >= m_prev_offset);
1277 
1278   HOST_WIDE_INT off
1279     = m_prev_offset + ASAN_SHADOW_GRANULARITY * m_shadow_bytes.length ();
1280   if (off == offset)
1281     {
1282       /* Consecutive shadow memory byte.  */
1283       m_shadow_bytes.safe_push (value);
1284       flush_if_full ();
1285     }
1286   else
1287     {
1288       if (!m_shadow_bytes.is_empty ())
1289 	flush_redzone_payload ();
1290 
1291       /* Maybe start earlier in order to use aligned store.  */
1292       HOST_WIDE_INT align = (offset - m_prev_offset) % ASAN_RED_ZONE_SIZE;
1293       if (align)
1294 	{
1295 	  offset -= align;
1296 	  for (unsigned i = 0; i < align / BITS_PER_UNIT; i++)
1297 	    m_shadow_bytes.safe_push (0);
1298 	}
1299 
1300       /* Adjust m_prev_offset and m_shadow_mem.  */
1301       HOST_WIDE_INT diff = offset - m_prev_offset;
1302       m_shadow_mem = adjust_address (m_shadow_mem, VOIDmode,
1303 				     diff >> ASAN_SHADOW_SHIFT);
1304       m_prev_offset = offset;
1305       m_shadow_bytes.safe_push (value);
1306       flush_if_full ();
1307     }
1308 }
1309 
1310 /* Emit RTX emission of the content of the buffer.  */
1311 
1312 void
flush_redzone_payload(void)1313 asan_redzone_buffer::flush_redzone_payload (void)
1314 {
1315   gcc_assert (WORDS_BIG_ENDIAN == BYTES_BIG_ENDIAN);
1316 
1317   if (m_shadow_bytes.is_empty ())
1318     return;
1319 
1320   /* Be sure we always emit to an aligned address.  */
1321   gcc_assert (((m_prev_offset - m_original_offset)
1322 	      & (ASAN_RED_ZONE_SIZE - 1)) == 0);
1323 
1324   /* Fill it to RZ_BUFFER_SIZE bytes with zeros if needed.  */
1325   unsigned l = m_shadow_bytes.length ();
1326   for (unsigned i = 0; i <= RZ_BUFFER_SIZE - l; i++)
1327     m_shadow_bytes.safe_push (0);
1328 
1329   if (dump_file && (dump_flags & TDF_DETAILS))
1330     fprintf (dump_file,
1331 	     "Flushing rzbuffer at offset %" PRId64 " with: ", m_prev_offset);
1332 
1333   unsigned HOST_WIDE_INT val = 0;
1334   for (unsigned i = 0; i < RZ_BUFFER_SIZE; i++)
1335     {
1336       unsigned char v
1337 	= m_shadow_bytes[BYTES_BIG_ENDIAN ? RZ_BUFFER_SIZE - i - 1 : i];
1338       val |= (unsigned HOST_WIDE_INT)v << (BITS_PER_UNIT * i);
1339       if (dump_file && (dump_flags & TDF_DETAILS))
1340 	fprintf (dump_file, "%02x ", v);
1341     }
1342 
1343   if (dump_file && (dump_flags & TDF_DETAILS))
1344     fprintf (dump_file, "\n");
1345 
1346   rtx c = gen_int_mode (val, SImode);
1347   m_shadow_mem = adjust_address (m_shadow_mem, SImode, 0);
1348   emit_move_insn (m_shadow_mem, c);
1349   m_shadow_bytes.truncate (0);
1350 }
1351 
1352 /* Flush if the content of the buffer is full
1353    (equal to RZ_BUFFER_SIZE).  */
1354 
1355 void
flush_if_full(void)1356 asan_redzone_buffer::flush_if_full (void)
1357 {
1358   if (m_shadow_bytes.length () == RZ_BUFFER_SIZE)
1359     flush_redzone_payload ();
1360 }
1361 
1362 /* Insert code to protect stack vars.  The prologue sequence should be emitted
1363    directly, epilogue sequence returned.  BASE is the register holding the
1364    stack base, against which OFFSETS array offsets are relative to, OFFSETS
1365    array contains pairs of offsets in reverse order, always the end offset
1366    of some gap that needs protection followed by starting offset,
1367    and DECLS is an array of representative decls for each var partition.
1368    LENGTH is the length of the OFFSETS array, DECLS array is LENGTH / 2 - 1
1369    elements long (OFFSETS include gap before the first variable as well
1370    as gaps after each stack variable).  PBASE is, if non-NULL, some pseudo
1371    register which stack vars DECL_RTLs are based on.  Either BASE should be
1372    assigned to PBASE, when not doing use after return protection, or
1373    corresponding address based on __asan_stack_malloc* return value.  */
1374 
1375 rtx_insn *
asan_emit_stack_protection(rtx base,rtx pbase,unsigned int alignb,HOST_WIDE_INT * offsets,tree * decls,int length)1376 asan_emit_stack_protection (rtx base, rtx pbase, unsigned int alignb,
1377 			    HOST_WIDE_INT *offsets, tree *decls, int length)
1378 {
1379   rtx shadow_base, shadow_mem, ret, mem, orig_base;
1380   rtx_code_label *lab;
1381   rtx_insn *insns;
1382   char buf[32];
1383   HOST_WIDE_INT base_offset = offsets[length - 1];
1384   HOST_WIDE_INT base_align_bias = 0, offset, prev_offset;
1385   HOST_WIDE_INT asan_frame_size = offsets[0] - base_offset;
1386   HOST_WIDE_INT last_offset, last_size, last_size_aligned;
1387   int l;
1388   unsigned char cur_shadow_byte = ASAN_STACK_MAGIC_LEFT;
1389   tree str_cst, decl, id;
1390   int use_after_return_class = -1;
1391 
1392   if (shadow_ptr_types[0] == NULL_TREE)
1393     asan_init_shadow_ptr_types ();
1394 
1395   expanded_location cfun_xloc
1396     = expand_location (DECL_SOURCE_LOCATION (current_function_decl));
1397 
1398   /* First of all, prepare the description string.  */
1399   pretty_printer asan_pp;
1400 
1401   pp_decimal_int (&asan_pp, length / 2 - 1);
1402   pp_space (&asan_pp);
1403   for (l = length - 2; l; l -= 2)
1404     {
1405       tree decl = decls[l / 2 - 1];
1406       pp_wide_integer (&asan_pp, offsets[l] - base_offset);
1407       pp_space (&asan_pp);
1408       pp_wide_integer (&asan_pp, offsets[l - 1] - offsets[l]);
1409       pp_space (&asan_pp);
1410 
1411       expanded_location xloc
1412 	= expand_location (DECL_SOURCE_LOCATION (decl));
1413       char location[32];
1414 
1415       if (xloc.file == cfun_xloc.file)
1416 	sprintf (location, ":%d", xloc.line);
1417       else
1418 	location[0] = '\0';
1419 
1420       if (DECL_P (decl) && DECL_NAME (decl))
1421 	{
1422 	  unsigned idlen
1423 	    = IDENTIFIER_LENGTH (DECL_NAME (decl)) + strlen (location);
1424 	  pp_decimal_int (&asan_pp, idlen);
1425 	  pp_space (&asan_pp);
1426 	  pp_tree_identifier (&asan_pp, DECL_NAME (decl));
1427 	  pp_string (&asan_pp, location);
1428 	}
1429       else
1430 	pp_string (&asan_pp, "9 <unknown>");
1431 
1432       if (l > 2)
1433 	pp_space (&asan_pp);
1434     }
1435   str_cst = asan_pp_string (&asan_pp);
1436 
1437   /* Emit the prologue sequence.  */
1438   if (asan_frame_size > 32 && asan_frame_size <= 65536 && pbase
1439       && param_asan_use_after_return)
1440     {
1441       use_after_return_class = floor_log2 (asan_frame_size - 1) - 5;
1442       /* __asan_stack_malloc_N guarantees alignment
1443 	 N < 6 ? (64 << N) : 4096 bytes.  */
1444       if (alignb > (use_after_return_class < 6
1445 		    ? (64U << use_after_return_class) : 4096U))
1446 	use_after_return_class = -1;
1447       else if (alignb > ASAN_RED_ZONE_SIZE && (asan_frame_size & (alignb - 1)))
1448 	base_align_bias = ((asan_frame_size + alignb - 1)
1449 			   & ~(alignb - HOST_WIDE_INT_1)) - asan_frame_size;
1450     }
1451 
1452   /* Align base if target is STRICT_ALIGNMENT.  */
1453   if (STRICT_ALIGNMENT)
1454     {
1455       const HOST_WIDE_INT align
1456 	= (GET_MODE_ALIGNMENT (SImode) / BITS_PER_UNIT) << ASAN_SHADOW_SHIFT;
1457       base = expand_binop (Pmode, and_optab, base, gen_int_mode (-align, Pmode),
1458 			   NULL_RTX, 1, OPTAB_DIRECT);
1459     }
1460 
1461   if (use_after_return_class == -1 && pbase)
1462     emit_move_insn (pbase, base);
1463 
1464   base = expand_binop (Pmode, add_optab, base,
1465 		       gen_int_mode (base_offset - base_align_bias, Pmode),
1466 		       NULL_RTX, 1, OPTAB_DIRECT);
1467   orig_base = NULL_RTX;
1468   if (use_after_return_class != -1)
1469     {
1470       if (asan_detect_stack_use_after_return == NULL_TREE)
1471 	{
1472 	  id = get_identifier ("__asan_option_detect_stack_use_after_return");
1473 	  decl = build_decl (BUILTINS_LOCATION, VAR_DECL, id,
1474 			     integer_type_node);
1475 	  SET_DECL_ASSEMBLER_NAME (decl, id);
1476 	  TREE_ADDRESSABLE (decl) = 1;
1477 	  DECL_ARTIFICIAL (decl) = 1;
1478 	  DECL_IGNORED_P (decl) = 1;
1479 	  DECL_EXTERNAL (decl) = 1;
1480 	  TREE_STATIC (decl) = 1;
1481 	  TREE_PUBLIC (decl) = 1;
1482 	  TREE_USED (decl) = 1;
1483 	  asan_detect_stack_use_after_return = decl;
1484 	}
1485       orig_base = gen_reg_rtx (Pmode);
1486       emit_move_insn (orig_base, base);
1487       ret = expand_normal (asan_detect_stack_use_after_return);
1488       lab = gen_label_rtx ();
1489       emit_cmp_and_jump_insns (ret, const0_rtx, EQ, NULL_RTX,
1490 			       VOIDmode, 0, lab,
1491 			       profile_probability::very_likely ());
1492       snprintf (buf, sizeof buf, "__asan_stack_malloc_%d",
1493 		use_after_return_class);
1494       ret = init_one_libfunc (buf);
1495       ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
1496 				     GEN_INT (asan_frame_size
1497 					      + base_align_bias),
1498 				     TYPE_MODE (pointer_sized_int_node));
1499       /* __asan_stack_malloc_[n] returns a pointer to fake stack if succeeded
1500 	 and NULL otherwise.  Check RET value is NULL here and jump over the
1501 	 BASE reassignment in this case.  Otherwise, reassign BASE to RET.  */
1502       emit_cmp_and_jump_insns (ret, const0_rtx, EQ, NULL_RTX,
1503 			       VOIDmode, 0, lab,
1504 			       profile_probability:: very_unlikely ());
1505       ret = convert_memory_address (Pmode, ret);
1506       emit_move_insn (base, ret);
1507       emit_label (lab);
1508       emit_move_insn (pbase, expand_binop (Pmode, add_optab, base,
1509 					   gen_int_mode (base_align_bias
1510 							 - base_offset, Pmode),
1511 					   NULL_RTX, 1, OPTAB_DIRECT));
1512     }
1513   mem = gen_rtx_MEM (ptr_mode, base);
1514   mem = adjust_address (mem, VOIDmode, base_align_bias);
1515   emit_move_insn (mem, gen_int_mode (ASAN_STACK_FRAME_MAGIC, ptr_mode));
1516   mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
1517   emit_move_insn (mem, expand_normal (str_cst));
1518   mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
1519   ASM_GENERATE_INTERNAL_LABEL (buf, "LASANPC", current_function_funcdef_no);
1520   id = get_identifier (buf);
1521   decl = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1522 		    VAR_DECL, id, char_type_node);
1523   SET_DECL_ASSEMBLER_NAME (decl, id);
1524   TREE_ADDRESSABLE (decl) = 1;
1525   TREE_READONLY (decl) = 1;
1526   DECL_ARTIFICIAL (decl) = 1;
1527   DECL_IGNORED_P (decl) = 1;
1528   TREE_STATIC (decl) = 1;
1529   TREE_PUBLIC (decl) = 0;
1530   TREE_USED (decl) = 1;
1531   DECL_INITIAL (decl) = decl;
1532   TREE_ASM_WRITTEN (decl) = 1;
1533   TREE_ASM_WRITTEN (id) = 1;
1534   emit_move_insn (mem, expand_normal (build_fold_addr_expr (decl)));
1535   shadow_base = expand_binop (Pmode, lshr_optab, base,
1536 			      gen_int_shift_amount (Pmode, ASAN_SHADOW_SHIFT),
1537 			      NULL_RTX, 1, OPTAB_DIRECT);
1538   shadow_base
1539     = plus_constant (Pmode, shadow_base,
1540 		     asan_shadow_offset ()
1541 		     + (base_align_bias >> ASAN_SHADOW_SHIFT));
1542   gcc_assert (asan_shadow_set != -1
1543 	      && (ASAN_RED_ZONE_SIZE >> ASAN_SHADOW_SHIFT) == 4);
1544   shadow_mem = gen_rtx_MEM (SImode, shadow_base);
1545   set_mem_alias_set (shadow_mem, asan_shadow_set);
1546   if (STRICT_ALIGNMENT)
1547     set_mem_align (shadow_mem, (GET_MODE_ALIGNMENT (SImode)));
1548   prev_offset = base_offset;
1549 
1550   asan_redzone_buffer rz_buffer (shadow_mem, prev_offset);
1551   for (l = length; l; l -= 2)
1552     {
1553       if (l == 2)
1554 	cur_shadow_byte = ASAN_STACK_MAGIC_RIGHT;
1555       offset = offsets[l - 1];
1556 
1557       bool extra_byte = (offset - base_offset) & (ASAN_SHADOW_GRANULARITY - 1);
1558       /* If a red-zone is not aligned to ASAN_SHADOW_GRANULARITY then
1559 	 the previous stack variable has size % ASAN_SHADOW_GRANULARITY != 0.
1560 	 In that case we have to emit one extra byte that will describe
1561 	 how many bytes (our of ASAN_SHADOW_GRANULARITY) can be accessed.  */
1562       if (extra_byte)
1563 	{
1564 	  HOST_WIDE_INT aoff
1565 	    = base_offset + ((offset - base_offset)
1566 			     & ~(ASAN_SHADOW_GRANULARITY - HOST_WIDE_INT_1));
1567 	  rz_buffer.emit_redzone_byte (aoff, offset - aoff);
1568 	  offset = aoff + ASAN_SHADOW_GRANULARITY;
1569 	}
1570 
1571       /* Calculate size of red zone payload.  */
1572       while (offset < offsets[l - 2])
1573 	{
1574 	  rz_buffer.emit_redzone_byte (offset, cur_shadow_byte);
1575 	  offset += ASAN_SHADOW_GRANULARITY;
1576 	}
1577 
1578       cur_shadow_byte = ASAN_STACK_MAGIC_MIDDLE;
1579     }
1580 
1581   /* As the automatic variables are aligned to
1582      ASAN_RED_ZONE_SIZE / ASAN_SHADOW_GRANULARITY, the buffer should be
1583      flushed here.  */
1584   gcc_assert (rz_buffer.m_shadow_bytes.is_empty ());
1585 
1586   do_pending_stack_adjust ();
1587 
1588   /* Construct epilogue sequence.  */
1589   start_sequence ();
1590 
1591   lab = NULL;
1592   if (use_after_return_class != -1)
1593     {
1594       rtx_code_label *lab2 = gen_label_rtx ();
1595       char c = (char) ASAN_STACK_MAGIC_USE_AFTER_RET;
1596       emit_cmp_and_jump_insns (orig_base, base, EQ, NULL_RTX,
1597 			       VOIDmode, 0, lab2,
1598 			       profile_probability::very_likely ());
1599       shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
1600       set_mem_alias_set (shadow_mem, asan_shadow_set);
1601       mem = gen_rtx_MEM (ptr_mode, base);
1602       mem = adjust_address (mem, VOIDmode, base_align_bias);
1603       emit_move_insn (mem, gen_int_mode (ASAN_STACK_RETIRED_MAGIC, ptr_mode));
1604       unsigned HOST_WIDE_INT sz = asan_frame_size >> ASAN_SHADOW_SHIFT;
1605       if (use_after_return_class < 5
1606 	  && can_store_by_pieces (sz, builtin_memset_read_str, &c,
1607 				  BITS_PER_UNIT, true))
1608 	{
1609 	  /* Emit:
1610 	       memset(ShadowBase, kAsanStackAfterReturnMagic, ShadowSize);
1611 	       **SavedFlagPtr(FakeStack, class_id) = 0
1612 	  */
1613 	  store_by_pieces (shadow_mem, sz, builtin_memset_read_str, &c,
1614 			   BITS_PER_UNIT, true, RETURN_BEGIN);
1615 
1616 	  unsigned HOST_WIDE_INT offset
1617 	    = (1 << (use_after_return_class + 6));
1618 	  offset -= GET_MODE_SIZE (ptr_mode);
1619 	  mem = gen_rtx_MEM (ptr_mode, base);
1620 	  mem = adjust_address (mem, ptr_mode, offset);
1621 	  rtx addr = gen_reg_rtx (ptr_mode);
1622 	  emit_move_insn (addr, mem);
1623 	  addr = convert_memory_address (Pmode, addr);
1624 	  mem = gen_rtx_MEM (QImode, addr);
1625 	  emit_move_insn (mem, const0_rtx);
1626 	}
1627       else if (use_after_return_class >= 5
1628 	       || !set_storage_via_setmem (shadow_mem,
1629 					   GEN_INT (sz),
1630 					   gen_int_mode (c, QImode),
1631 					   BITS_PER_UNIT, BITS_PER_UNIT,
1632 					   -1, sz, sz, sz))
1633 	{
1634 	  snprintf (buf, sizeof buf, "__asan_stack_free_%d",
1635 		    use_after_return_class);
1636 	  ret = init_one_libfunc (buf);
1637 	  rtx addr = convert_memory_address (ptr_mode, base);
1638 	  rtx orig_addr = convert_memory_address (ptr_mode, orig_base);
1639 	  emit_library_call (ret, LCT_NORMAL, ptr_mode, addr, ptr_mode,
1640 			     GEN_INT (asan_frame_size + base_align_bias),
1641 			     TYPE_MODE (pointer_sized_int_node),
1642 			     orig_addr, ptr_mode);
1643 	}
1644       lab = gen_label_rtx ();
1645       emit_jump (lab);
1646       emit_label (lab2);
1647     }
1648 
1649   shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
1650   set_mem_alias_set (shadow_mem, asan_shadow_set);
1651 
1652   if (STRICT_ALIGNMENT)
1653     set_mem_align (shadow_mem, (GET_MODE_ALIGNMENT (SImode)));
1654 
1655   prev_offset = base_offset;
1656   last_offset = base_offset;
1657   last_size = 0;
1658   last_size_aligned = 0;
1659   for (l = length; l; l -= 2)
1660     {
1661       offset = base_offset + ((offsets[l - 1] - base_offset)
1662 			      & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
1663       if (last_offset + last_size_aligned < offset)
1664 	{
1665 	  shadow_mem = adjust_address (shadow_mem, VOIDmode,
1666 				       (last_offset - prev_offset)
1667 				       >> ASAN_SHADOW_SHIFT);
1668 	  prev_offset = last_offset;
1669 	  asan_clear_shadow (shadow_mem, last_size_aligned >> ASAN_SHADOW_SHIFT);
1670 	  last_offset = offset;
1671 	  last_size = 0;
1672 	}
1673       else
1674 	last_size = offset - last_offset;
1675       last_size += base_offset + ((offsets[l - 2] - base_offset)
1676 				  & ~(ASAN_MIN_RED_ZONE_SIZE - HOST_WIDE_INT_1))
1677 		   - offset;
1678 
1679       /* Unpoison shadow memory that corresponds to a variable that is
1680 	 is subject of use-after-return sanitization.  */
1681       if (l > 2)
1682 	{
1683 	  decl = decls[l / 2 - 2];
1684 	  if (asan_handled_variables != NULL
1685 	      && asan_handled_variables->contains (decl))
1686 	    {
1687 	      HOST_WIDE_INT size = offsets[l - 3] - offsets[l - 2];
1688 	      if (dump_file && (dump_flags & TDF_DETAILS))
1689 		{
1690 		  const char *n = (DECL_NAME (decl)
1691 				   ? IDENTIFIER_POINTER (DECL_NAME (decl))
1692 				   : "<unknown>");
1693 		  fprintf (dump_file, "Unpoisoning shadow stack for variable: "
1694 			   "%s (%" PRId64 " B)\n", n, size);
1695 		}
1696 
1697 		last_size += size & ~(ASAN_MIN_RED_ZONE_SIZE - HOST_WIDE_INT_1);
1698 	    }
1699 	}
1700       last_size_aligned
1701 	= ((last_size + (ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1))
1702 	   & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
1703     }
1704   if (last_size_aligned)
1705     {
1706       shadow_mem = adjust_address (shadow_mem, VOIDmode,
1707 				   (last_offset - prev_offset)
1708 				   >> ASAN_SHADOW_SHIFT);
1709       asan_clear_shadow (shadow_mem, last_size_aligned >> ASAN_SHADOW_SHIFT);
1710     }
1711 
1712   /* Clean-up set with instrumented stack variables.  */
1713   delete asan_handled_variables;
1714   asan_handled_variables = NULL;
1715   delete asan_used_labels;
1716   asan_used_labels = NULL;
1717 
1718   do_pending_stack_adjust ();
1719   if (lab)
1720     emit_label (lab);
1721 
1722   insns = get_insns ();
1723   end_sequence ();
1724   return insns;
1725 }
1726 
1727 /* Emit __asan_allocas_unpoison (top, bot) call.  The BASE parameter corresponds
1728    to BOT argument, for TOP virtual_stack_dynamic_rtx is used.  NEW_SEQUENCE
1729    indicates whether we're emitting new instructions sequence or not.  */
1730 
1731 rtx_insn *
asan_emit_allocas_unpoison(rtx top,rtx bot,rtx_insn * before)1732 asan_emit_allocas_unpoison (rtx top, rtx bot, rtx_insn *before)
1733 {
1734   if (before)
1735     push_to_sequence (before);
1736   else
1737     start_sequence ();
1738   rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
1739   top = convert_memory_address (ptr_mode, top);
1740   bot = convert_memory_address (ptr_mode, bot);
1741   emit_library_call (ret, LCT_NORMAL, ptr_mode,
1742 		     top, ptr_mode, bot, ptr_mode);
1743 
1744   do_pending_stack_adjust ();
1745   rtx_insn *insns = get_insns ();
1746   end_sequence ();
1747   return insns;
1748 }
1749 
1750 /* Return true if DECL, a global var, might be overridden and needs
1751    therefore a local alias.  */
1752 
1753 static bool
asan_needs_local_alias(tree decl)1754 asan_needs_local_alias (tree decl)
1755 {
1756   return DECL_WEAK (decl) || !targetm.binds_local_p (decl);
1757 }
1758 
1759 /* Return true if DECL, a global var, is an artificial ODR indicator symbol
1760    therefore doesn't need protection.  */
1761 
1762 static bool
is_odr_indicator(tree decl)1763 is_odr_indicator (tree decl)
1764 {
1765   return (DECL_ARTIFICIAL (decl)
1766 	  && lookup_attribute ("asan odr indicator", DECL_ATTRIBUTES (decl)));
1767 }
1768 
1769 /* Return true if DECL is a VAR_DECL that should be protected
1770    by Address Sanitizer, by appending a red zone with protected
1771    shadow memory after it and aligning it to at least
1772    ASAN_RED_ZONE_SIZE bytes.  */
1773 
1774 bool
asan_protect_global(tree decl,bool ignore_decl_rtl_set_p)1775 asan_protect_global (tree decl, bool ignore_decl_rtl_set_p)
1776 {
1777   if (!param_asan_globals)
1778     return false;
1779 
1780   rtx rtl, symbol;
1781 
1782   if (TREE_CODE (decl) == STRING_CST)
1783     {
1784       /* Instrument all STRING_CSTs except those created
1785 	 by asan_pp_string here.  */
1786       if (shadow_ptr_types[0] != NULL_TREE
1787 	  && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1788 	  && TREE_TYPE (TREE_TYPE (decl)) == TREE_TYPE (shadow_ptr_types[0]))
1789 	return false;
1790       return true;
1791     }
1792   if (!VAR_P (decl)
1793       /* TLS vars aren't statically protectable.  */
1794       || DECL_THREAD_LOCAL_P (decl)
1795       /* Externs will be protected elsewhere.  */
1796       || DECL_EXTERNAL (decl)
1797       /* PR sanitizer/81697: For architectures that use section anchors first
1798 	 call to asan_protect_global may occur before DECL_RTL (decl) is set.
1799 	 We should ignore DECL_RTL_SET_P then, because otherwise the first call
1800 	 to asan_protect_global will return FALSE and the following calls on the
1801 	 same decl after setting DECL_RTL (decl) will return TRUE and we'll end
1802 	 up with inconsistency at runtime.  */
1803       || (!DECL_RTL_SET_P (decl) && !ignore_decl_rtl_set_p)
1804       /* Comdat vars pose an ABI problem, we can't know if
1805 	 the var that is selected by the linker will have
1806 	 padding or not.  */
1807       || DECL_ONE_ONLY (decl)
1808       /* Similarly for common vars.  People can use -fno-common.
1809 	 Note: Linux kernel is built with -fno-common, so we do instrument
1810 	 globals there even if it is C.  */
1811       || (DECL_COMMON (decl) && TREE_PUBLIC (decl))
1812       /* Don't protect if using user section, often vars placed
1813 	 into user section from multiple TUs are then assumed
1814 	 to be an array of such vars, putting padding in there
1815 	 breaks this assumption.  */
1816       || (DECL_SECTION_NAME (decl) != NULL
1817 	  && !symtab_node::get (decl)->implicit_section
1818 	  && !section_sanitized_p (DECL_SECTION_NAME (decl)))
1819       || DECL_SIZE (decl) == 0
1820       || ASAN_RED_ZONE_SIZE * BITS_PER_UNIT > MAX_OFILE_ALIGNMENT
1821       || TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1822       || !valid_constant_size_p (DECL_SIZE_UNIT (decl))
1823       || DECL_ALIGN_UNIT (decl) > 2 * ASAN_RED_ZONE_SIZE
1824       || TREE_TYPE (decl) == ubsan_get_source_location_type ()
1825       || is_odr_indicator (decl))
1826     return false;
1827 
1828   if (!ignore_decl_rtl_set_p || DECL_RTL_SET_P (decl))
1829     {
1830 
1831       rtl = DECL_RTL (decl);
1832       if (!MEM_P (rtl) || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF)
1833 	return false;
1834       symbol = XEXP (rtl, 0);
1835 
1836       if (CONSTANT_POOL_ADDRESS_P (symbol)
1837 	  || TREE_CONSTANT_POOL_ADDRESS_P (symbol))
1838 	return false;
1839     }
1840 
1841   if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)))
1842     return false;
1843 
1844   if (!TARGET_SUPPORTS_ALIASES && asan_needs_local_alias (decl))
1845     return false;
1846 
1847   return true;
1848 }
1849 
1850 /* Construct a function tree for __asan_report_{load,store}{1,2,4,8,16,_n}.
1851    IS_STORE is either 1 (for a store) or 0 (for a load).  */
1852 
1853 static tree
report_error_func(bool is_store,bool recover_p,HOST_WIDE_INT size_in_bytes,int * nargs)1854 report_error_func (bool is_store, bool recover_p, HOST_WIDE_INT size_in_bytes,
1855 		   int *nargs)
1856 {
1857   static enum built_in_function report[2][2][6]
1858     = { { { BUILT_IN_ASAN_REPORT_LOAD1, BUILT_IN_ASAN_REPORT_LOAD2,
1859 	    BUILT_IN_ASAN_REPORT_LOAD4, BUILT_IN_ASAN_REPORT_LOAD8,
1860 	    BUILT_IN_ASAN_REPORT_LOAD16, BUILT_IN_ASAN_REPORT_LOAD_N },
1861 	  { BUILT_IN_ASAN_REPORT_STORE1, BUILT_IN_ASAN_REPORT_STORE2,
1862 	    BUILT_IN_ASAN_REPORT_STORE4, BUILT_IN_ASAN_REPORT_STORE8,
1863 	    BUILT_IN_ASAN_REPORT_STORE16, BUILT_IN_ASAN_REPORT_STORE_N } },
1864 	{ { BUILT_IN_ASAN_REPORT_LOAD1_NOABORT,
1865 	    BUILT_IN_ASAN_REPORT_LOAD2_NOABORT,
1866 	    BUILT_IN_ASAN_REPORT_LOAD4_NOABORT,
1867 	    BUILT_IN_ASAN_REPORT_LOAD8_NOABORT,
1868 	    BUILT_IN_ASAN_REPORT_LOAD16_NOABORT,
1869 	    BUILT_IN_ASAN_REPORT_LOAD_N_NOABORT },
1870 	  { BUILT_IN_ASAN_REPORT_STORE1_NOABORT,
1871 	    BUILT_IN_ASAN_REPORT_STORE2_NOABORT,
1872 	    BUILT_IN_ASAN_REPORT_STORE4_NOABORT,
1873 	    BUILT_IN_ASAN_REPORT_STORE8_NOABORT,
1874 	    BUILT_IN_ASAN_REPORT_STORE16_NOABORT,
1875 	    BUILT_IN_ASAN_REPORT_STORE_N_NOABORT } } };
1876   if (size_in_bytes == -1)
1877     {
1878       *nargs = 2;
1879       return builtin_decl_implicit (report[recover_p][is_store][5]);
1880     }
1881   *nargs = 1;
1882   int size_log2 = exact_log2 (size_in_bytes);
1883   return builtin_decl_implicit (report[recover_p][is_store][size_log2]);
1884 }
1885 
1886 /* Construct a function tree for __asan_{load,store}{1,2,4,8,16,_n}.
1887    IS_STORE is either 1 (for a store) or 0 (for a load).  */
1888 
1889 static tree
check_func(bool is_store,bool recover_p,HOST_WIDE_INT size_in_bytes,int * nargs)1890 check_func (bool is_store, bool recover_p, HOST_WIDE_INT size_in_bytes,
1891 	    int *nargs)
1892 {
1893   static enum built_in_function check[2][2][6]
1894     = { { { BUILT_IN_ASAN_LOAD1, BUILT_IN_ASAN_LOAD2,
1895 	    BUILT_IN_ASAN_LOAD4, BUILT_IN_ASAN_LOAD8,
1896 	    BUILT_IN_ASAN_LOAD16, BUILT_IN_ASAN_LOADN },
1897 	  { BUILT_IN_ASAN_STORE1, BUILT_IN_ASAN_STORE2,
1898 	    BUILT_IN_ASAN_STORE4, BUILT_IN_ASAN_STORE8,
1899 	    BUILT_IN_ASAN_STORE16, BUILT_IN_ASAN_STOREN } },
1900 	{ { BUILT_IN_ASAN_LOAD1_NOABORT,
1901 	    BUILT_IN_ASAN_LOAD2_NOABORT,
1902 	    BUILT_IN_ASAN_LOAD4_NOABORT,
1903 	    BUILT_IN_ASAN_LOAD8_NOABORT,
1904 	    BUILT_IN_ASAN_LOAD16_NOABORT,
1905 	    BUILT_IN_ASAN_LOADN_NOABORT },
1906 	  { BUILT_IN_ASAN_STORE1_NOABORT,
1907 	    BUILT_IN_ASAN_STORE2_NOABORT,
1908 	    BUILT_IN_ASAN_STORE4_NOABORT,
1909 	    BUILT_IN_ASAN_STORE8_NOABORT,
1910 	    BUILT_IN_ASAN_STORE16_NOABORT,
1911 	    BUILT_IN_ASAN_STOREN_NOABORT } } };
1912   if (size_in_bytes == -1)
1913     {
1914       *nargs = 2;
1915       return builtin_decl_implicit (check[recover_p][is_store][5]);
1916     }
1917   *nargs = 1;
1918   int size_log2 = exact_log2 (size_in_bytes);
1919   return builtin_decl_implicit (check[recover_p][is_store][size_log2]);
1920 }
1921 
1922 /* Split the current basic block and create a condition statement
1923    insertion point right before or after the statement pointed to by
1924    ITER.  Return an iterator to the point at which the caller might
1925    safely insert the condition statement.
1926 
1927    THEN_BLOCK must be set to the address of an uninitialized instance
1928    of basic_block.  The function will then set *THEN_BLOCK to the
1929    'then block' of the condition statement to be inserted by the
1930    caller.
1931 
1932    If CREATE_THEN_FALLTHRU_EDGE is false, no edge will be created from
1933    *THEN_BLOCK to *FALLTHROUGH_BLOCK.
1934 
1935    Similarly, the function will set *FALLTRHOUGH_BLOCK to the 'else
1936    block' of the condition statement to be inserted by the caller.
1937 
1938    Note that *FALLTHROUGH_BLOCK is a new block that contains the
1939    statements starting from *ITER, and *THEN_BLOCK is a new empty
1940    block.
1941 
1942    *ITER is adjusted to point to always point to the first statement
1943     of the basic block * FALLTHROUGH_BLOCK.  That statement is the
1944     same as what ITER was pointing to prior to calling this function,
1945     if BEFORE_P is true; otherwise, it is its following statement.  */
1946 
1947 gimple_stmt_iterator
create_cond_insert_point(gimple_stmt_iterator * iter,bool before_p,bool then_more_likely_p,bool create_then_fallthru_edge,basic_block * then_block,basic_block * fallthrough_block)1948 create_cond_insert_point (gimple_stmt_iterator *iter,
1949 			  bool before_p,
1950 			  bool then_more_likely_p,
1951 			  bool create_then_fallthru_edge,
1952 			  basic_block *then_block,
1953 			  basic_block *fallthrough_block)
1954 {
1955   gimple_stmt_iterator gsi = *iter;
1956 
1957   if (!gsi_end_p (gsi) && before_p)
1958     gsi_prev (&gsi);
1959 
1960   basic_block cur_bb = gsi_bb (*iter);
1961 
1962   edge e = split_block (cur_bb, gsi_stmt (gsi));
1963 
1964   /* Get a hold on the 'condition block', the 'then block' and the
1965      'else block'.  */
1966   basic_block cond_bb = e->src;
1967   basic_block fallthru_bb = e->dest;
1968   basic_block then_bb = create_empty_bb (cond_bb);
1969   if (current_loops)
1970     {
1971       add_bb_to_loop (then_bb, cond_bb->loop_father);
1972       loops_state_set (LOOPS_NEED_FIXUP);
1973     }
1974 
1975   /* Set up the newly created 'then block'.  */
1976   e = make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
1977   profile_probability fallthrough_probability
1978     = then_more_likely_p
1979     ? profile_probability::very_unlikely ()
1980     : profile_probability::very_likely ();
1981   e->probability = fallthrough_probability.invert ();
1982   then_bb->count = e->count ();
1983   if (create_then_fallthru_edge)
1984     make_single_succ_edge (then_bb, fallthru_bb, EDGE_FALLTHRU);
1985 
1986   /* Set up the fallthrough basic block.  */
1987   e = find_edge (cond_bb, fallthru_bb);
1988   e->flags = EDGE_FALSE_VALUE;
1989   e->probability = fallthrough_probability;
1990 
1991   /* Update dominance info for the newly created then_bb; note that
1992      fallthru_bb's dominance info has already been updated by
1993      split_bock.  */
1994   if (dom_info_available_p (CDI_DOMINATORS))
1995     set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
1996 
1997   *then_block = then_bb;
1998   *fallthrough_block = fallthru_bb;
1999   *iter = gsi_start_bb (fallthru_bb);
2000 
2001   return gsi_last_bb (cond_bb);
2002 }
2003 
2004 /* Insert an if condition followed by a 'then block' right before the
2005    statement pointed to by ITER.  The fallthrough block -- which is the
2006    else block of the condition as well as the destination of the
2007    outcoming edge of the 'then block' -- starts with the statement
2008    pointed to by ITER.
2009 
2010    COND is the condition of the if.
2011 
2012    If THEN_MORE_LIKELY_P is true, the probability of the edge to the
2013    'then block' is higher than the probability of the edge to the
2014    fallthrough block.
2015 
2016    Upon completion of the function, *THEN_BB is set to the newly
2017    inserted 'then block' and similarly, *FALLTHROUGH_BB is set to the
2018    fallthrough block.
2019 
2020    *ITER is adjusted to still point to the same statement it was
2021    pointing to initially.  */
2022 
2023 static void
insert_if_then_before_iter(gcond * cond,gimple_stmt_iterator * iter,bool then_more_likely_p,basic_block * then_bb,basic_block * fallthrough_bb)2024 insert_if_then_before_iter (gcond *cond,
2025 			    gimple_stmt_iterator *iter,
2026 			    bool then_more_likely_p,
2027 			    basic_block *then_bb,
2028 			    basic_block *fallthrough_bb)
2029 {
2030   gimple_stmt_iterator cond_insert_point =
2031     create_cond_insert_point (iter,
2032 			      /*before_p=*/true,
2033 			      then_more_likely_p,
2034 			      /*create_then_fallthru_edge=*/true,
2035 			      then_bb,
2036 			      fallthrough_bb);
2037   gsi_insert_after (&cond_insert_point, cond, GSI_NEW_STMT);
2038 }
2039 
2040 /* Build (base_addr >> ASAN_SHADOW_SHIFT) + asan_shadow_offset ().
2041    If RETURN_ADDRESS is set to true, return memory location instread
2042    of a value in the shadow memory.  */
2043 
2044 static tree
2045 build_shadow_mem_access (gimple_stmt_iterator *gsi, location_t location,
2046 			 tree base_addr, tree shadow_ptr_type,
2047 			 bool return_address = false)
2048 {
2049   tree t, uintptr_type = TREE_TYPE (base_addr);
2050   tree shadow_type = TREE_TYPE (shadow_ptr_type);
2051   gimple *g;
2052 
2053   t = build_int_cst (uintptr_type, ASAN_SHADOW_SHIFT);
2054   g = gimple_build_assign (make_ssa_name (uintptr_type), RSHIFT_EXPR,
2055 			   base_addr, t);
2056   gimple_set_location (g, location);
2057   gsi_insert_after (gsi, g, GSI_NEW_STMT);
2058 
2059   t = build_int_cst (uintptr_type, asan_shadow_offset ());
2060   g = gimple_build_assign (make_ssa_name (uintptr_type), PLUS_EXPR,
2061 			   gimple_assign_lhs (g), t);
2062   gimple_set_location (g, location);
2063   gsi_insert_after (gsi, g, GSI_NEW_STMT);
2064 
2065   g = gimple_build_assign (make_ssa_name (shadow_ptr_type), NOP_EXPR,
2066 			   gimple_assign_lhs (g));
2067   gimple_set_location (g, location);
2068   gsi_insert_after (gsi, g, GSI_NEW_STMT);
2069 
2070   if (!return_address)
2071     {
2072       t = build2 (MEM_REF, shadow_type, gimple_assign_lhs (g),
2073 		  build_int_cst (shadow_ptr_type, 0));
2074       g = gimple_build_assign (make_ssa_name (shadow_type), MEM_REF, t);
2075       gimple_set_location (g, location);
2076       gsi_insert_after (gsi, g, GSI_NEW_STMT);
2077     }
2078 
2079   return gimple_assign_lhs (g);
2080 }
2081 
2082 /* BASE can already be an SSA_NAME; in that case, do not create a
2083    new SSA_NAME for it.  */
2084 
2085 static tree
maybe_create_ssa_name(location_t loc,tree base,gimple_stmt_iterator * iter,bool before_p)2086 maybe_create_ssa_name (location_t loc, tree base, gimple_stmt_iterator *iter,
2087 		       bool before_p)
2088 {
2089   STRIP_USELESS_TYPE_CONVERSION (base);
2090   if (TREE_CODE (base) == SSA_NAME)
2091     return base;
2092   gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (base)), base);
2093   gimple_set_location (g, loc);
2094   if (before_p)
2095     gsi_insert_before (iter, g, GSI_SAME_STMT);
2096   else
2097     gsi_insert_after (iter, g, GSI_NEW_STMT);
2098   return gimple_assign_lhs (g);
2099 }
2100 
2101 /* LEN can already have necessary size and precision;
2102    in that case, do not create a new variable.  */
2103 
2104 tree
maybe_cast_to_ptrmode(location_t loc,tree len,gimple_stmt_iterator * iter,bool before_p)2105 maybe_cast_to_ptrmode (location_t loc, tree len, gimple_stmt_iterator *iter,
2106 		       bool before_p)
2107 {
2108   if (ptrofftype_p (len))
2109     return len;
2110   gimple *g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2111 				  NOP_EXPR, len);
2112   gimple_set_location (g, loc);
2113   if (before_p)
2114     gsi_insert_before (iter, g, GSI_SAME_STMT);
2115   else
2116     gsi_insert_after (iter, g, GSI_NEW_STMT);
2117   return gimple_assign_lhs (g);
2118 }
2119 
2120 /* Instrument the memory access instruction BASE.  Insert new
2121    statements before or after ITER.
2122 
2123    Note that the memory access represented by BASE can be either an
2124    SSA_NAME, or a non-SSA expression.  LOCATION is the source code
2125    location.  IS_STORE is TRUE for a store, FALSE for a load.
2126    BEFORE_P is TRUE for inserting the instrumentation code before
2127    ITER, FALSE for inserting it after ITER.  IS_SCALAR_ACCESS is TRUE
2128    for a scalar memory access and FALSE for memory region access.
2129    NON_ZERO_P is TRUE if memory region is guaranteed to have non-zero
2130    length.  ALIGN tells alignment of accessed memory object.
2131 
2132    START_INSTRUMENTED and END_INSTRUMENTED are TRUE if start/end of
2133    memory region have already been instrumented.
2134 
2135    If BEFORE_P is TRUE, *ITER is arranged to still point to the
2136    statement it was pointing to prior to calling this function,
2137    otherwise, it points to the statement logically following it.  */
2138 
2139 static void
2140 build_check_stmt (location_t loc, tree base, tree len,
2141 		  HOST_WIDE_INT size_in_bytes, gimple_stmt_iterator *iter,
2142 		  bool is_non_zero_len, bool before_p, bool is_store,
2143 		  bool is_scalar_access, unsigned int align = 0)
2144 {
2145   gimple_stmt_iterator gsi = *iter;
2146   gimple *g;
2147 
2148   gcc_assert (!(size_in_bytes > 0 && !is_non_zero_len));
2149 
2150   gsi = *iter;
2151 
2152   base = unshare_expr (base);
2153   base = maybe_create_ssa_name (loc, base, &gsi, before_p);
2154 
2155   if (len)
2156     {
2157       len = unshare_expr (len);
2158       len = maybe_cast_to_ptrmode (loc, len, iter, before_p);
2159     }
2160   else
2161     {
2162       gcc_assert (size_in_bytes != -1);
2163       len = build_int_cst (pointer_sized_int_node, size_in_bytes);
2164     }
2165 
2166   if (size_in_bytes > 1)
2167     {
2168       if ((size_in_bytes & (size_in_bytes - 1)) != 0
2169 	  || size_in_bytes > 16)
2170 	is_scalar_access = false;
2171       else if (align && align < size_in_bytes * BITS_PER_UNIT)
2172 	{
2173 	  /* On non-strict alignment targets, if
2174 	     16-byte access is just 8-byte aligned,
2175 	     this will result in misaligned shadow
2176 	     memory 2 byte load, but otherwise can
2177 	     be handled using one read.  */
2178 	  if (size_in_bytes != 16
2179 	      || STRICT_ALIGNMENT
2180 	      || align < 8 * BITS_PER_UNIT)
2181 	    is_scalar_access = false;
2182 	}
2183     }
2184 
2185   HOST_WIDE_INT flags = 0;
2186   if (is_store)
2187     flags |= ASAN_CHECK_STORE;
2188   if (is_non_zero_len)
2189     flags |= ASAN_CHECK_NON_ZERO_LEN;
2190   if (is_scalar_access)
2191     flags |= ASAN_CHECK_SCALAR_ACCESS;
2192 
2193   g = gimple_build_call_internal (IFN_ASAN_CHECK, 4,
2194 				  build_int_cst (integer_type_node, flags),
2195 				  base, len,
2196 				  build_int_cst (integer_type_node,
2197 						 align / BITS_PER_UNIT));
2198   gimple_set_location (g, loc);
2199   if (before_p)
2200     gsi_insert_before (&gsi, g, GSI_SAME_STMT);
2201   else
2202     {
2203       gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2204       gsi_next (&gsi);
2205       *iter = gsi;
2206     }
2207 }
2208 
2209 /* If T represents a memory access, add instrumentation code before ITER.
2210    LOCATION is source code location.
2211    IS_STORE is either TRUE (for a store) or FALSE (for a load).  */
2212 
2213 static void
instrument_derefs(gimple_stmt_iterator * iter,tree t,location_t location,bool is_store)2214 instrument_derefs (gimple_stmt_iterator *iter, tree t,
2215 		   location_t location, bool is_store)
2216 {
2217   if (is_store && !param_asan_instrument_writes)
2218     return;
2219   if (!is_store && !param_asan_instrument_reads)
2220     return;
2221 
2222   tree type, base;
2223   HOST_WIDE_INT size_in_bytes;
2224   if (location == UNKNOWN_LOCATION)
2225     location = EXPR_LOCATION (t);
2226 
2227   type = TREE_TYPE (t);
2228   switch (TREE_CODE (t))
2229     {
2230     case ARRAY_REF:
2231     case COMPONENT_REF:
2232     case INDIRECT_REF:
2233     case MEM_REF:
2234     case VAR_DECL:
2235     case BIT_FIELD_REF:
2236       break;
2237       /* FALLTHRU */
2238     default:
2239       return;
2240     }
2241 
2242   size_in_bytes = int_size_in_bytes (type);
2243   if (size_in_bytes <= 0)
2244     return;
2245 
2246   poly_int64 bitsize, bitpos;
2247   tree offset;
2248   machine_mode mode;
2249   int unsignedp, reversep, volatilep = 0;
2250   tree inner = get_inner_reference (t, &bitsize, &bitpos, &offset, &mode,
2251 				    &unsignedp, &reversep, &volatilep);
2252 
2253   if (TREE_CODE (t) == COMPONENT_REF
2254       && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)) != NULL_TREE)
2255     {
2256       tree repr = DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1));
2257       instrument_derefs (iter, build3 (COMPONENT_REF, TREE_TYPE (repr),
2258 				       TREE_OPERAND (t, 0), repr,
2259 				       TREE_OPERAND (t, 2)),
2260 			 location, is_store);
2261       return;
2262     }
2263 
2264   if (!multiple_p (bitpos, BITS_PER_UNIT)
2265       || maybe_ne (bitsize, size_in_bytes * BITS_PER_UNIT))
2266     return;
2267 
2268   if (VAR_P (inner) && DECL_HARD_REGISTER (inner))
2269     return;
2270 
2271   poly_int64 decl_size;
2272   if (VAR_P (inner)
2273       && offset == NULL_TREE
2274       && DECL_SIZE (inner)
2275       && poly_int_tree_p (DECL_SIZE (inner), &decl_size)
2276       && known_subrange_p (bitpos, bitsize, 0, decl_size))
2277     {
2278       if (DECL_THREAD_LOCAL_P (inner))
2279 	return;
2280       if (!param_asan_globals && is_global_var (inner))
2281         return;
2282       if (!TREE_STATIC (inner))
2283 	{
2284 	  /* Automatic vars in the current function will be always
2285 	     accessible.  */
2286 	  if (decl_function_context (inner) == current_function_decl
2287 	      && (!asan_sanitize_use_after_scope ()
2288 		  || !TREE_ADDRESSABLE (inner)))
2289 	    return;
2290 	}
2291       /* Always instrument external vars, they might be dynamically
2292 	 initialized.  */
2293       else if (!DECL_EXTERNAL (inner))
2294 	{
2295 	  /* For static vars if they are known not to be dynamically
2296 	     initialized, they will be always accessible.  */
2297 	  varpool_node *vnode = varpool_node::get (inner);
2298 	  if (vnode && !vnode->dynamically_initialized)
2299 	    return;
2300 	}
2301     }
2302 
2303   base = build_fold_addr_expr (t);
2304   if (!has_mem_ref_been_instrumented (base, size_in_bytes))
2305     {
2306       unsigned int align = get_object_alignment (t);
2307       build_check_stmt (location, base, NULL_TREE, size_in_bytes, iter,
2308 			/*is_non_zero_len*/size_in_bytes > 0, /*before_p=*/true,
2309 			is_store, /*is_scalar_access*/true, align);
2310       update_mem_ref_hash_table (base, size_in_bytes);
2311       update_mem_ref_hash_table (t, size_in_bytes);
2312     }
2313 
2314 }
2315 
2316 /*  Insert a memory reference into the hash table if access length
2317     can be determined in compile time.  */
2318 
2319 static void
maybe_update_mem_ref_hash_table(tree base,tree len)2320 maybe_update_mem_ref_hash_table (tree base, tree len)
2321 {
2322   if (!POINTER_TYPE_P (TREE_TYPE (base))
2323       || !INTEGRAL_TYPE_P (TREE_TYPE (len)))
2324     return;
2325 
2326   HOST_WIDE_INT size_in_bytes = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
2327 
2328   if (size_in_bytes != -1)
2329     update_mem_ref_hash_table (base, size_in_bytes);
2330 }
2331 
2332 /* Instrument an access to a contiguous memory region that starts at
2333    the address pointed to by BASE, over a length of LEN (expressed in
2334    the sizeof (*BASE) bytes).  ITER points to the instruction before
2335    which the instrumentation instructions must be inserted.  LOCATION
2336    is the source location that the instrumentation instructions must
2337    have.  If IS_STORE is true, then the memory access is a store;
2338    otherwise, it's a load.  */
2339 
2340 static void
instrument_mem_region_access(tree base,tree len,gimple_stmt_iterator * iter,location_t location,bool is_store)2341 instrument_mem_region_access (tree base, tree len,
2342 			      gimple_stmt_iterator *iter,
2343 			      location_t location, bool is_store)
2344 {
2345   if (!POINTER_TYPE_P (TREE_TYPE (base))
2346       || !INTEGRAL_TYPE_P (TREE_TYPE (len))
2347       || integer_zerop (len))
2348     return;
2349 
2350   HOST_WIDE_INT size_in_bytes = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
2351 
2352   if ((size_in_bytes == -1)
2353       || !has_mem_ref_been_instrumented (base, size_in_bytes))
2354     {
2355       build_check_stmt (location, base, len, size_in_bytes, iter,
2356 			/*is_non_zero_len*/size_in_bytes > 0, /*before_p*/true,
2357 			is_store, /*is_scalar_access*/false, /*align*/0);
2358     }
2359 
2360   maybe_update_mem_ref_hash_table (base, len);
2361   *iter = gsi_for_stmt (gsi_stmt (*iter));
2362 }
2363 
2364 /* Instrument the call to a built-in memory access function that is
2365    pointed to by the iterator ITER.
2366 
2367    Upon completion, return TRUE iff *ITER has been advanced to the
2368    statement following the one it was originally pointing to.  */
2369 
2370 static bool
instrument_builtin_call(gimple_stmt_iterator * iter)2371 instrument_builtin_call (gimple_stmt_iterator *iter)
2372 {
2373   if (!param_asan_memintrin)
2374     return false;
2375 
2376   bool iter_advanced_p = false;
2377   gcall *call = as_a <gcall *> (gsi_stmt (*iter));
2378 
2379   gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
2380 
2381   location_t loc = gimple_location (call);
2382 
2383   asan_mem_ref src0, src1, dest;
2384   asan_mem_ref_init (&src0, NULL, 1);
2385   asan_mem_ref_init (&src1, NULL, 1);
2386   asan_mem_ref_init (&dest, NULL, 1);
2387 
2388   tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
2389   bool src0_is_store = false, src1_is_store = false, dest_is_store = false,
2390     dest_is_deref = false, intercepted_p = true;
2391 
2392   if (get_mem_refs_of_builtin_call (call,
2393 				    &src0, &src0_len, &src0_is_store,
2394 				    &src1, &src1_len, &src1_is_store,
2395 				    &dest, &dest_len, &dest_is_store,
2396 				    &dest_is_deref, &intercepted_p, iter))
2397     {
2398       if (dest_is_deref)
2399 	{
2400 	  instrument_derefs (iter, dest.start, loc, dest_is_store);
2401 	  gsi_next (iter);
2402 	  iter_advanced_p = true;
2403 	}
2404       else if (!intercepted_p
2405 	       && (src0_len || src1_len || dest_len))
2406 	{
2407 	  if (src0.start != NULL_TREE)
2408 	    instrument_mem_region_access (src0.start, src0_len,
2409 					  iter, loc, /*is_store=*/false);
2410 	  if (src1.start != NULL_TREE)
2411 	    instrument_mem_region_access (src1.start, src1_len,
2412 					  iter, loc, /*is_store=*/false);
2413 	  if (dest.start != NULL_TREE)
2414 	    instrument_mem_region_access (dest.start, dest_len,
2415 					  iter, loc, /*is_store=*/true);
2416 
2417 	  *iter = gsi_for_stmt (call);
2418 	  gsi_next (iter);
2419 	  iter_advanced_p = true;
2420 	}
2421       else
2422 	{
2423 	  if (src0.start != NULL_TREE)
2424 	    maybe_update_mem_ref_hash_table (src0.start, src0_len);
2425 	  if (src1.start != NULL_TREE)
2426 	    maybe_update_mem_ref_hash_table (src1.start, src1_len);
2427 	  if (dest.start != NULL_TREE)
2428 	    maybe_update_mem_ref_hash_table (dest.start, dest_len);
2429 	}
2430     }
2431   return iter_advanced_p;
2432 }
2433 
2434 /*  Instrument the assignment statement ITER if it is subject to
2435     instrumentation.  Return TRUE iff instrumentation actually
2436     happened.  In that case, the iterator ITER is advanced to the next
2437     logical expression following the one initially pointed to by ITER,
2438     and the relevant memory reference that which access has been
2439     instrumented is added to the memory references hash table.  */
2440 
2441 static bool
maybe_instrument_assignment(gimple_stmt_iterator * iter)2442 maybe_instrument_assignment (gimple_stmt_iterator *iter)
2443 {
2444   gimple *s = gsi_stmt (*iter);
2445 
2446   gcc_assert (gimple_assign_single_p (s));
2447 
2448   tree ref_expr = NULL_TREE;
2449   bool is_store, is_instrumented = false;
2450 
2451   if (gimple_store_p (s))
2452     {
2453       ref_expr = gimple_assign_lhs (s);
2454       is_store = true;
2455       instrument_derefs (iter, ref_expr,
2456 			 gimple_location (s),
2457 			 is_store);
2458       is_instrumented = true;
2459     }
2460 
2461   if (gimple_assign_load_p (s))
2462     {
2463       ref_expr = gimple_assign_rhs1 (s);
2464       is_store = false;
2465       instrument_derefs (iter, ref_expr,
2466 			 gimple_location (s),
2467 			 is_store);
2468       is_instrumented = true;
2469     }
2470 
2471   if (is_instrumented)
2472     gsi_next (iter);
2473 
2474   return is_instrumented;
2475 }
2476 
2477 /* Instrument the function call pointed to by the iterator ITER, if it
2478    is subject to instrumentation.  At the moment, the only function
2479    calls that are instrumented are some built-in functions that access
2480    memory.  Look at instrument_builtin_call to learn more.
2481 
2482    Upon completion return TRUE iff *ITER was advanced to the statement
2483    following the one it was originally pointing to.  */
2484 
2485 static bool
maybe_instrument_call(gimple_stmt_iterator * iter)2486 maybe_instrument_call (gimple_stmt_iterator *iter)
2487 {
2488   gimple *stmt = gsi_stmt (*iter);
2489   bool is_builtin = gimple_call_builtin_p (stmt, BUILT_IN_NORMAL);
2490 
2491   if (is_builtin && instrument_builtin_call (iter))
2492     return true;
2493 
2494   if (gimple_call_noreturn_p (stmt))
2495     {
2496       if (is_builtin)
2497 	{
2498 	  tree callee = gimple_call_fndecl (stmt);
2499 	  switch (DECL_FUNCTION_CODE (callee))
2500 	    {
2501 	    case BUILT_IN_UNREACHABLE:
2502 	    case BUILT_IN_TRAP:
2503 	      /* Don't instrument these.  */
2504 	      return false;
2505 	    default:
2506 	      break;
2507 	    }
2508 	}
2509       tree decl = builtin_decl_implicit (BUILT_IN_ASAN_HANDLE_NO_RETURN);
2510       gimple *g = gimple_build_call (decl, 0);
2511       gimple_set_location (g, gimple_location (stmt));
2512       gsi_insert_before (iter, g, GSI_SAME_STMT);
2513     }
2514 
2515   bool instrumented = false;
2516   if (gimple_store_p (stmt))
2517     {
2518       tree ref_expr = gimple_call_lhs (stmt);
2519       instrument_derefs (iter, ref_expr,
2520 			 gimple_location (stmt),
2521 			 /*is_store=*/true);
2522 
2523       instrumented = true;
2524     }
2525 
2526   /* Walk through gimple_call arguments and check them id needed.  */
2527   unsigned args_num = gimple_call_num_args (stmt);
2528   for (unsigned i = 0; i < args_num; ++i)
2529     {
2530       tree arg = gimple_call_arg (stmt, i);
2531       /* If ARG is not a non-aggregate register variable, compiler in general
2532 	 creates temporary for it and pass it as argument to gimple call.
2533 	 But in some cases, e.g. when we pass by value a small structure that
2534 	 fits to register, compiler can avoid extra overhead by pulling out
2535 	 these temporaries.  In this case, we should check the argument.  */
2536       if (!is_gimple_reg (arg) && !is_gimple_min_invariant (arg))
2537 	{
2538 	  instrument_derefs (iter, arg,
2539 			     gimple_location (stmt),
2540 			     /*is_store=*/false);
2541 	  instrumented = true;
2542 	}
2543     }
2544   if (instrumented)
2545     gsi_next (iter);
2546   return instrumented;
2547 }
2548 
2549 /* Walk each instruction of all basic block and instrument those that
2550    represent memory references: loads, stores, or function calls.
2551    In a given basic block, this function avoids instrumenting memory
2552    references that have already been instrumented.  */
2553 
2554 static void
transform_statements(void)2555 transform_statements (void)
2556 {
2557   basic_block bb, last_bb = NULL;
2558   gimple_stmt_iterator i;
2559   int saved_last_basic_block = last_basic_block_for_fn (cfun);
2560 
2561   FOR_EACH_BB_FN (bb, cfun)
2562     {
2563       basic_block prev_bb = bb;
2564 
2565       if (bb->index >= saved_last_basic_block) continue;
2566 
2567       /* Flush the mem ref hash table, if current bb doesn't have
2568 	 exactly one predecessor, or if that predecessor (skipping
2569 	 over asan created basic blocks) isn't the last processed
2570 	 basic block.  Thus we effectively flush on extended basic
2571 	 block boundaries.  */
2572       while (single_pred_p (prev_bb))
2573 	{
2574 	  prev_bb = single_pred (prev_bb);
2575 	  if (prev_bb->index < saved_last_basic_block)
2576 	    break;
2577 	}
2578       if (prev_bb != last_bb)
2579 	empty_mem_ref_hash_table ();
2580       last_bb = bb;
2581 
2582       for (i = gsi_start_bb (bb); !gsi_end_p (i);)
2583 	{
2584 	  gimple *s = gsi_stmt (i);
2585 
2586 	  if (has_stmt_been_instrumented_p (s))
2587 	    gsi_next (&i);
2588 	  else if (gimple_assign_single_p (s)
2589 		   && !gimple_clobber_p (s)
2590 		   && maybe_instrument_assignment (&i))
2591 	    /*  Nothing to do as maybe_instrument_assignment advanced
2592 		the iterator I.  */;
2593 	  else if (is_gimple_call (s) && maybe_instrument_call (&i))
2594 	    /*  Nothing to do as maybe_instrument_call
2595 		advanced the iterator I.  */;
2596 	  else
2597 	    {
2598 	      /* No instrumentation happened.
2599 
2600 		 If the current instruction is a function call that
2601 		 might free something, let's forget about the memory
2602 		 references that got instrumented.  Otherwise we might
2603 		 miss some instrumentation opportunities.  Do the same
2604 		 for a ASAN_MARK poisoning internal function.  */
2605 	      if (is_gimple_call (s)
2606 		  && (!nonfreeing_call_p (s)
2607 		      || asan_mark_p (s, ASAN_MARK_POISON)))
2608 		empty_mem_ref_hash_table ();
2609 
2610 	      gsi_next (&i);
2611 	    }
2612 	}
2613     }
2614   free_mem_ref_resources ();
2615 }
2616 
2617 /* Build
2618    __asan_before_dynamic_init (module_name)
2619    or
2620    __asan_after_dynamic_init ()
2621    call.  */
2622 
2623 tree
asan_dynamic_init_call(bool after_p)2624 asan_dynamic_init_call (bool after_p)
2625 {
2626   if (shadow_ptr_types[0] == NULL_TREE)
2627     asan_init_shadow_ptr_types ();
2628 
2629   tree fn = builtin_decl_implicit (after_p
2630 				   ? BUILT_IN_ASAN_AFTER_DYNAMIC_INIT
2631 				   : BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT);
2632   tree module_name_cst = NULL_TREE;
2633   if (!after_p)
2634     {
2635       pretty_printer module_name_pp;
2636       pp_string (&module_name_pp, main_input_filename);
2637 
2638       module_name_cst = asan_pp_string (&module_name_pp);
2639       module_name_cst = fold_convert (const_ptr_type_node,
2640 				      module_name_cst);
2641     }
2642 
2643   return build_call_expr (fn, after_p ? 0 : 1, module_name_cst);
2644 }
2645 
2646 /* Build
2647    struct __asan_global
2648    {
2649      const void *__beg;
2650      uptr __size;
2651      uptr __size_with_redzone;
2652      const void *__name;
2653      const void *__module_name;
2654      uptr __has_dynamic_init;
2655      __asan_global_source_location *__location;
2656      char *__odr_indicator;
2657    } type.  */
2658 
2659 static tree
asan_global_struct(void)2660 asan_global_struct (void)
2661 {
2662   static const char *field_names[]
2663     = { "__beg", "__size", "__size_with_redzone",
2664 	"__name", "__module_name", "__has_dynamic_init", "__location",
2665 	"__odr_indicator" };
2666   tree fields[ARRAY_SIZE (field_names)], ret;
2667   unsigned i;
2668 
2669   ret = make_node (RECORD_TYPE);
2670   for (i = 0; i < ARRAY_SIZE (field_names); i++)
2671     {
2672       fields[i]
2673 	= build_decl (UNKNOWN_LOCATION, FIELD_DECL,
2674 		      get_identifier (field_names[i]),
2675 		      (i == 0 || i == 3) ? const_ptr_type_node
2676 		      : pointer_sized_int_node);
2677       DECL_CONTEXT (fields[i]) = ret;
2678       if (i)
2679 	DECL_CHAIN (fields[i - 1]) = fields[i];
2680     }
2681   tree type_decl = build_decl (input_location, TYPE_DECL,
2682 			       get_identifier ("__asan_global"), ret);
2683   DECL_IGNORED_P (type_decl) = 1;
2684   DECL_ARTIFICIAL (type_decl) = 1;
2685   TYPE_FIELDS (ret) = fields[0];
2686   TYPE_NAME (ret) = type_decl;
2687   TYPE_STUB_DECL (ret) = type_decl;
2688   TYPE_ARTIFICIAL (ret) = 1;
2689   layout_type (ret);
2690   return ret;
2691 }
2692 
2693 /* Create and return odr indicator symbol for DECL.
2694    TYPE is __asan_global struct type as returned by asan_global_struct.  */
2695 
2696 static tree
create_odr_indicator(tree decl,tree type)2697 create_odr_indicator (tree decl, tree type)
2698 {
2699   char *name;
2700   tree uptr = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
2701   tree decl_name
2702     = (HAS_DECL_ASSEMBLER_NAME_P (decl) ? DECL_ASSEMBLER_NAME (decl)
2703 					: DECL_NAME (decl));
2704   /* DECL_NAME theoretically might be NULL.  Bail out with 0 in this case.  */
2705   if (decl_name == NULL_TREE)
2706     return build_int_cst (uptr, 0);
2707   const char *dname = IDENTIFIER_POINTER (decl_name);
2708   if (HAS_DECL_ASSEMBLER_NAME_P (decl))
2709     dname = targetm.strip_name_encoding (dname);
2710   size_t len = strlen (dname) + sizeof ("__odr_asan_");
2711   name = XALLOCAVEC (char, len);
2712   snprintf (name, len, "__odr_asan_%s", dname);
2713 #ifndef NO_DOT_IN_LABEL
2714   name[sizeof ("__odr_asan") - 1] = '.';
2715 #elif !defined(NO_DOLLAR_IN_LABEL)
2716   name[sizeof ("__odr_asan") - 1] = '$';
2717 #endif
2718   tree var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (name),
2719 			 char_type_node);
2720   TREE_ADDRESSABLE (var) = 1;
2721   TREE_READONLY (var) = 0;
2722   TREE_THIS_VOLATILE (var) = 1;
2723   DECL_GIMPLE_REG_P (var) = 0;
2724   DECL_ARTIFICIAL (var) = 1;
2725   DECL_IGNORED_P (var) = 1;
2726   TREE_STATIC (var) = 1;
2727   TREE_PUBLIC (var) = 1;
2728   DECL_VISIBILITY (var) = DECL_VISIBILITY (decl);
2729   DECL_VISIBILITY_SPECIFIED (var) = DECL_VISIBILITY_SPECIFIED (decl);
2730 
2731   TREE_USED (var) = 1;
2732   tree ctor = build_constructor_va (TREE_TYPE (var), 1, NULL_TREE,
2733 				    build_int_cst (unsigned_type_node, 0));
2734   TREE_CONSTANT (ctor) = 1;
2735   TREE_STATIC (ctor) = 1;
2736   DECL_INITIAL (var) = ctor;
2737   DECL_ATTRIBUTES (var) = tree_cons (get_identifier ("asan odr indicator"),
2738 				     NULL, DECL_ATTRIBUTES (var));
2739   make_decl_rtl (var);
2740   varpool_node::finalize_decl (var);
2741   return fold_convert (uptr, build_fold_addr_expr (var));
2742 }
2743 
2744 /* Return true if DECL, a global var, might be overridden and needs
2745    an additional odr indicator symbol.  */
2746 
2747 static bool
asan_needs_odr_indicator_p(tree decl)2748 asan_needs_odr_indicator_p (tree decl)
2749 {
2750   /* Don't emit ODR indicators for kernel because:
2751      a) Kernel is written in C thus doesn't need ODR indicators.
2752      b) Some kernel code may have assumptions about symbols containing specific
2753         patterns in their names.  Since ODR indicators contain original names
2754         of symbols they are emitted for, these assumptions would be broken for
2755         ODR indicator symbols.  */
2756   return (!(flag_sanitize & SANITIZE_KERNEL_ADDRESS)
2757 	  && !DECL_ARTIFICIAL (decl)
2758 	  && !DECL_WEAK (decl)
2759 	  && TREE_PUBLIC (decl));
2760 }
2761 
2762 /* Append description of a single global DECL into vector V.
2763    TYPE is __asan_global struct type as returned by asan_global_struct.  */
2764 
2765 static void
asan_add_global(tree decl,tree type,vec<constructor_elt,va_gc> * v)2766 asan_add_global (tree decl, tree type, vec<constructor_elt, va_gc> *v)
2767 {
2768   tree init, uptr = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
2769   unsigned HOST_WIDE_INT size;
2770   tree str_cst, module_name_cst, refdecl = decl;
2771   vec<constructor_elt, va_gc> *vinner = NULL;
2772 
2773   pretty_printer asan_pp, module_name_pp;
2774 
2775   if (DECL_NAME (decl))
2776     pp_tree_identifier (&asan_pp, DECL_NAME (decl));
2777   else
2778     pp_string (&asan_pp, "<unknown>");
2779   str_cst = asan_pp_string (&asan_pp);
2780 
2781   pp_string (&module_name_pp, main_input_filename);
2782   module_name_cst = asan_pp_string (&module_name_pp);
2783 
2784   if (asan_needs_local_alias (decl))
2785     {
2786       char buf[20];
2787       ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", vec_safe_length (v) + 1);
2788       refdecl = build_decl (DECL_SOURCE_LOCATION (decl),
2789 			    VAR_DECL, get_identifier (buf), TREE_TYPE (decl));
2790       TREE_ADDRESSABLE (refdecl) = TREE_ADDRESSABLE (decl);
2791       TREE_READONLY (refdecl) = TREE_READONLY (decl);
2792       TREE_THIS_VOLATILE (refdecl) = TREE_THIS_VOLATILE (decl);
2793       DECL_GIMPLE_REG_P (refdecl) = DECL_GIMPLE_REG_P (decl);
2794       DECL_ARTIFICIAL (refdecl) = DECL_ARTIFICIAL (decl);
2795       DECL_IGNORED_P (refdecl) = DECL_IGNORED_P (decl);
2796       TREE_STATIC (refdecl) = 1;
2797       TREE_PUBLIC (refdecl) = 0;
2798       TREE_USED (refdecl) = 1;
2799       assemble_alias (refdecl, DECL_ASSEMBLER_NAME (decl));
2800     }
2801 
2802   tree odr_indicator_ptr
2803     = (asan_needs_odr_indicator_p (decl) ? create_odr_indicator (decl, type)
2804 					 : build_int_cst (uptr, 0));
2805   CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2806 			  fold_convert (const_ptr_type_node,
2807 					build_fold_addr_expr (refdecl)));
2808   size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
2809   CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
2810   size += asan_red_zone_size (size);
2811   CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
2812   CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2813 			  fold_convert (const_ptr_type_node, str_cst));
2814   CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2815 			  fold_convert (const_ptr_type_node, module_name_cst));
2816   varpool_node *vnode = varpool_node::get (decl);
2817   int has_dynamic_init = 0;
2818   /* FIXME: Enable initialization order fiasco detection in LTO mode once
2819      proper fix for PR 79061 will be applied.  */
2820   if (!in_lto_p)
2821     has_dynamic_init = vnode ? vnode->dynamically_initialized : 0;
2822   CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2823 			  build_int_cst (uptr, has_dynamic_init));
2824   tree locptr = NULL_TREE;
2825   location_t loc = DECL_SOURCE_LOCATION (decl);
2826   expanded_location xloc = expand_location (loc);
2827   if (xloc.file != NULL)
2828     {
2829       static int lasanloccnt = 0;
2830       char buf[25];
2831       ASM_GENERATE_INTERNAL_LABEL (buf, "LASANLOC", ++lasanloccnt);
2832       tree var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
2833 			     ubsan_get_source_location_type ());
2834       TREE_STATIC (var) = 1;
2835       TREE_PUBLIC (var) = 0;
2836       DECL_ARTIFICIAL (var) = 1;
2837       DECL_IGNORED_P (var) = 1;
2838       pretty_printer filename_pp;
2839       pp_string (&filename_pp, xloc.file);
2840       tree str = asan_pp_string (&filename_pp);
2841       tree ctor = build_constructor_va (TREE_TYPE (var), 3,
2842 					NULL_TREE, str, NULL_TREE,
2843 					build_int_cst (unsigned_type_node,
2844 						       xloc.line), NULL_TREE,
2845 					build_int_cst (unsigned_type_node,
2846 						       xloc.column));
2847       TREE_CONSTANT (ctor) = 1;
2848       TREE_STATIC (ctor) = 1;
2849       DECL_INITIAL (var) = ctor;
2850       varpool_node::finalize_decl (var);
2851       locptr = fold_convert (uptr, build_fold_addr_expr (var));
2852     }
2853   else
2854     locptr = build_int_cst (uptr, 0);
2855   CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, locptr);
2856   CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, odr_indicator_ptr);
2857   init = build_constructor (type, vinner);
2858   CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, init);
2859 }
2860 
2861 /* Initialize sanitizer.def builtins if the FE hasn't initialized them.  */
2862 void
initialize_sanitizer_builtins(void)2863 initialize_sanitizer_builtins (void)
2864 {
2865   tree decl;
2866 
2867   if (builtin_decl_implicit_p (BUILT_IN_ASAN_INIT))
2868     return;
2869 
2870   tree BT_FN_VOID = build_function_type_list (void_type_node, NULL_TREE);
2871   tree BT_FN_VOID_PTR
2872     = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
2873   tree BT_FN_VOID_CONST_PTR
2874     = build_function_type_list (void_type_node, const_ptr_type_node, NULL_TREE);
2875   tree BT_FN_VOID_PTR_PTR
2876     = build_function_type_list (void_type_node, ptr_type_node,
2877 				ptr_type_node, NULL_TREE);
2878   tree BT_FN_VOID_PTR_PTR_PTR
2879     = build_function_type_list (void_type_node, ptr_type_node,
2880 				ptr_type_node, ptr_type_node, NULL_TREE);
2881   tree BT_FN_VOID_PTR_PTRMODE
2882     = build_function_type_list (void_type_node, ptr_type_node,
2883 				pointer_sized_int_node, NULL_TREE);
2884   tree BT_FN_VOID_INT
2885     = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
2886   tree BT_FN_SIZE_CONST_PTR_INT
2887     = build_function_type_list (size_type_node, const_ptr_type_node,
2888 				integer_type_node, NULL_TREE);
2889 
2890   tree BT_FN_VOID_UINT8_UINT8
2891     = build_function_type_list (void_type_node, unsigned_char_type_node,
2892 				unsigned_char_type_node, NULL_TREE);
2893   tree BT_FN_VOID_UINT16_UINT16
2894     = build_function_type_list (void_type_node, uint16_type_node,
2895 				uint16_type_node, NULL_TREE);
2896   tree BT_FN_VOID_UINT32_UINT32
2897     = build_function_type_list (void_type_node, uint32_type_node,
2898 				uint32_type_node, NULL_TREE);
2899   tree BT_FN_VOID_UINT64_UINT64
2900     = build_function_type_list (void_type_node, uint64_type_node,
2901 				uint64_type_node, NULL_TREE);
2902   tree BT_FN_VOID_FLOAT_FLOAT
2903     = build_function_type_list (void_type_node, float_type_node,
2904 				float_type_node, NULL_TREE);
2905   tree BT_FN_VOID_DOUBLE_DOUBLE
2906     = build_function_type_list (void_type_node, double_type_node,
2907 				double_type_node, NULL_TREE);
2908   tree BT_FN_VOID_UINT64_PTR
2909     = build_function_type_list (void_type_node, uint64_type_node,
2910 				ptr_type_node, NULL_TREE);
2911 
2912   tree BT_FN_BOOL_VPTR_PTR_IX_INT_INT[5];
2913   tree BT_FN_IX_CONST_VPTR_INT[5];
2914   tree BT_FN_IX_VPTR_IX_INT[5];
2915   tree BT_FN_VOID_VPTR_IX_INT[5];
2916   tree vptr
2917     = build_pointer_type (build_qualified_type (void_type_node,
2918 						TYPE_QUAL_VOLATILE));
2919   tree cvptr
2920     = build_pointer_type (build_qualified_type (void_type_node,
2921 						TYPE_QUAL_VOLATILE
2922 						|TYPE_QUAL_CONST));
2923   tree boolt
2924     = lang_hooks.types.type_for_size (BOOL_TYPE_SIZE, 1);
2925   int i;
2926   for (i = 0; i < 5; i++)
2927     {
2928       tree ix = build_nonstandard_integer_type (BITS_PER_UNIT * (1 << i), 1);
2929       BT_FN_BOOL_VPTR_PTR_IX_INT_INT[i]
2930 	= build_function_type_list (boolt, vptr, ptr_type_node, ix,
2931 				    integer_type_node, integer_type_node,
2932 				    NULL_TREE);
2933       BT_FN_IX_CONST_VPTR_INT[i]
2934 	= build_function_type_list (ix, cvptr, integer_type_node, NULL_TREE);
2935       BT_FN_IX_VPTR_IX_INT[i]
2936 	= build_function_type_list (ix, vptr, ix, integer_type_node,
2937 				    NULL_TREE);
2938       BT_FN_VOID_VPTR_IX_INT[i]
2939 	= build_function_type_list (void_type_node, vptr, ix,
2940 				    integer_type_node, NULL_TREE);
2941     }
2942 #define BT_FN_BOOL_VPTR_PTR_I1_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[0]
2943 #define BT_FN_I1_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[0]
2944 #define BT_FN_I1_VPTR_I1_INT BT_FN_IX_VPTR_IX_INT[0]
2945 #define BT_FN_VOID_VPTR_I1_INT BT_FN_VOID_VPTR_IX_INT[0]
2946 #define BT_FN_BOOL_VPTR_PTR_I2_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[1]
2947 #define BT_FN_I2_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[1]
2948 #define BT_FN_I2_VPTR_I2_INT BT_FN_IX_VPTR_IX_INT[1]
2949 #define BT_FN_VOID_VPTR_I2_INT BT_FN_VOID_VPTR_IX_INT[1]
2950 #define BT_FN_BOOL_VPTR_PTR_I4_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[2]
2951 #define BT_FN_I4_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[2]
2952 #define BT_FN_I4_VPTR_I4_INT BT_FN_IX_VPTR_IX_INT[2]
2953 #define BT_FN_VOID_VPTR_I4_INT BT_FN_VOID_VPTR_IX_INT[2]
2954 #define BT_FN_BOOL_VPTR_PTR_I8_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[3]
2955 #define BT_FN_I8_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[3]
2956 #define BT_FN_I8_VPTR_I8_INT BT_FN_IX_VPTR_IX_INT[3]
2957 #define BT_FN_VOID_VPTR_I8_INT BT_FN_VOID_VPTR_IX_INT[3]
2958 #define BT_FN_BOOL_VPTR_PTR_I16_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[4]
2959 #define BT_FN_I16_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[4]
2960 #define BT_FN_I16_VPTR_I16_INT BT_FN_IX_VPTR_IX_INT[4]
2961 #define BT_FN_VOID_VPTR_I16_INT BT_FN_VOID_VPTR_IX_INT[4]
2962 #undef ATTR_NOTHROW_LEAF_LIST
2963 #define ATTR_NOTHROW_LEAF_LIST ECF_NOTHROW | ECF_LEAF
2964 #undef ATTR_TMPURE_NOTHROW_LEAF_LIST
2965 #define ATTR_TMPURE_NOTHROW_LEAF_LIST ECF_TM_PURE | ATTR_NOTHROW_LEAF_LIST
2966 #undef ATTR_NORETURN_NOTHROW_LEAF_LIST
2967 #define ATTR_NORETURN_NOTHROW_LEAF_LIST ECF_NORETURN | ATTR_NOTHROW_LEAF_LIST
2968 #undef ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST
2969 #define ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST \
2970   ECF_CONST | ATTR_NORETURN_NOTHROW_LEAF_LIST
2971 #undef ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST
2972 #define ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST \
2973   ECF_TM_PURE | ATTR_NORETURN_NOTHROW_LEAF_LIST
2974 #undef ATTR_COLD_NOTHROW_LEAF_LIST
2975 #define ATTR_COLD_NOTHROW_LEAF_LIST \
2976   /* ECF_COLD missing */ ATTR_NOTHROW_LEAF_LIST
2977 #undef ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST
2978 #define ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST \
2979   /* ECF_COLD missing */ ATTR_NORETURN_NOTHROW_LEAF_LIST
2980 #undef ATTR_COLD_CONST_NORETURN_NOTHROW_LEAF_LIST
2981 #define ATTR_COLD_CONST_NORETURN_NOTHROW_LEAF_LIST \
2982   /* ECF_COLD missing */ ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST
2983 #undef ATTR_PURE_NOTHROW_LEAF_LIST
2984 #define ATTR_PURE_NOTHROW_LEAF_LIST ECF_PURE | ATTR_NOTHROW_LEAF_LIST
2985 #undef DEF_BUILTIN_STUB
2986 #define DEF_BUILTIN_STUB(ENUM, NAME)
2987 #undef DEF_SANITIZER_BUILTIN_1
2988 #define DEF_SANITIZER_BUILTIN_1(ENUM, NAME, TYPE, ATTRS)		\
2989   do {									\
2990     decl = add_builtin_function ("__builtin_" NAME, TYPE, ENUM,		\
2991 				 BUILT_IN_NORMAL, NAME, NULL_TREE);	\
2992     set_call_expr_flags (decl, ATTRS);					\
2993     set_builtin_decl (ENUM, decl, true);				\
2994   } while (0)
2995 #undef DEF_SANITIZER_BUILTIN
2996 #define DEF_SANITIZER_BUILTIN(ENUM, NAME, TYPE, ATTRS)	\
2997   DEF_SANITIZER_BUILTIN_1 (ENUM, NAME, TYPE, ATTRS);
2998 
2999 #include "sanitizer.def"
3000 
3001   /* -fsanitize=object-size uses __builtin_object_size, but that might
3002      not be available for e.g. Fortran at this point.  We use
3003      DEF_SANITIZER_BUILTIN here only as a convenience macro.  */
3004   if ((flag_sanitize & SANITIZE_OBJECT_SIZE)
3005       && !builtin_decl_implicit_p (BUILT_IN_OBJECT_SIZE))
3006     DEF_SANITIZER_BUILTIN_1 (BUILT_IN_OBJECT_SIZE, "object_size",
3007 			     BT_FN_SIZE_CONST_PTR_INT,
3008 			     ATTR_PURE_NOTHROW_LEAF_LIST);
3009 
3010 #undef DEF_SANITIZER_BUILTIN_1
3011 #undef DEF_SANITIZER_BUILTIN
3012 #undef DEF_BUILTIN_STUB
3013 }
3014 
3015 /* Called via htab_traverse.  Count number of emitted
3016    STRING_CSTs in the constant hash table.  */
3017 
3018 int
count_string_csts(constant_descriptor_tree ** slot,unsigned HOST_WIDE_INT * data)3019 count_string_csts (constant_descriptor_tree **slot,
3020 		   unsigned HOST_WIDE_INT *data)
3021 {
3022   struct constant_descriptor_tree *desc = *slot;
3023   if (TREE_CODE (desc->value) == STRING_CST
3024       && TREE_ASM_WRITTEN (desc->value)
3025       && asan_protect_global (desc->value))
3026     ++*data;
3027   return 1;
3028 }
3029 
3030 /* Helper structure to pass two parameters to
3031    add_string_csts.  */
3032 
3033 struct asan_add_string_csts_data
3034 {
3035   tree type;
3036   vec<constructor_elt, va_gc> *v;
3037 };
3038 
3039 /* Called via hash_table::traverse.  Call asan_add_global
3040    on emitted STRING_CSTs from the constant hash table.  */
3041 
3042 int
add_string_csts(constant_descriptor_tree ** slot,asan_add_string_csts_data * aascd)3043 add_string_csts (constant_descriptor_tree **slot,
3044 		 asan_add_string_csts_data *aascd)
3045 {
3046   struct constant_descriptor_tree *desc = *slot;
3047   if (TREE_CODE (desc->value) == STRING_CST
3048       && TREE_ASM_WRITTEN (desc->value)
3049       && asan_protect_global (desc->value))
3050     {
3051       asan_add_global (SYMBOL_REF_DECL (XEXP (desc->rtl, 0)),
3052 		       aascd->type, aascd->v);
3053     }
3054   return 1;
3055 }
3056 
3057 /* Needs to be GTY(()), because cgraph_build_static_cdtor may
3058    invoke ggc_collect.  */
3059 static GTY(()) tree asan_ctor_statements;
3060 
3061 /* Module-level instrumentation.
3062    - Insert __asan_init_vN() into the list of CTORs.
3063    - TODO: insert redzones around globals.
3064  */
3065 
3066 void
asan_finish_file(void)3067 asan_finish_file (void)
3068 {
3069   varpool_node *vnode;
3070   unsigned HOST_WIDE_INT gcount = 0;
3071 
3072   if (shadow_ptr_types[0] == NULL_TREE)
3073     asan_init_shadow_ptr_types ();
3074   /* Avoid instrumenting code in the asan ctors/dtors.
3075      We don't need to insert padding after the description strings,
3076      nor after .LASAN* array.  */
3077   flag_sanitize &= ~SANITIZE_ADDRESS;
3078 
3079   /* For user-space we want asan constructors to run first.
3080      Linux kernel does not support priorities other than default, and the only
3081      other user of constructors is coverage. So we run with the default
3082      priority.  */
3083   int priority = flag_sanitize & SANITIZE_USER_ADDRESS
3084                  ? MAX_RESERVED_INIT_PRIORITY - 1 : DEFAULT_INIT_PRIORITY;
3085 
3086   if (flag_sanitize & SANITIZE_USER_ADDRESS)
3087     {
3088       tree fn = builtin_decl_implicit (BUILT_IN_ASAN_INIT);
3089       append_to_statement_list (build_call_expr (fn, 0), &asan_ctor_statements);
3090       fn = builtin_decl_implicit (BUILT_IN_ASAN_VERSION_MISMATCH_CHECK);
3091       append_to_statement_list (build_call_expr (fn, 0), &asan_ctor_statements);
3092     }
3093   FOR_EACH_DEFINED_VARIABLE (vnode)
3094     if (TREE_ASM_WRITTEN (vnode->decl)
3095 	&& asan_protect_global (vnode->decl))
3096       ++gcount;
3097   hash_table<tree_descriptor_hasher> *const_desc_htab = constant_pool_htab ();
3098   const_desc_htab->traverse<unsigned HOST_WIDE_INT *, count_string_csts>
3099     (&gcount);
3100   if (gcount)
3101     {
3102       tree type = asan_global_struct (), var, ctor;
3103       tree dtor_statements = NULL_TREE;
3104       vec<constructor_elt, va_gc> *v;
3105       char buf[20];
3106 
3107       type = build_array_type_nelts (type, gcount);
3108       ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", 0);
3109       var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
3110 			type);
3111       TREE_STATIC (var) = 1;
3112       TREE_PUBLIC (var) = 0;
3113       DECL_ARTIFICIAL (var) = 1;
3114       DECL_IGNORED_P (var) = 1;
3115       vec_alloc (v, gcount);
3116       FOR_EACH_DEFINED_VARIABLE (vnode)
3117 	if (TREE_ASM_WRITTEN (vnode->decl)
3118 	    && asan_protect_global (vnode->decl))
3119 	  asan_add_global (vnode->decl, TREE_TYPE (type), v);
3120       struct asan_add_string_csts_data aascd;
3121       aascd.type = TREE_TYPE (type);
3122       aascd.v = v;
3123       const_desc_htab->traverse<asan_add_string_csts_data *, add_string_csts>
3124        	(&aascd);
3125       ctor = build_constructor (type, v);
3126       TREE_CONSTANT (ctor) = 1;
3127       TREE_STATIC (ctor) = 1;
3128       DECL_INITIAL (var) = ctor;
3129       SET_DECL_ALIGN (var, MAX (DECL_ALIGN (var),
3130 				ASAN_SHADOW_GRANULARITY * BITS_PER_UNIT));
3131 
3132       varpool_node::finalize_decl (var);
3133 
3134       tree fn = builtin_decl_implicit (BUILT_IN_ASAN_REGISTER_GLOBALS);
3135       tree gcount_tree = build_int_cst (pointer_sized_int_node, gcount);
3136       append_to_statement_list (build_call_expr (fn, 2,
3137 						 build_fold_addr_expr (var),
3138 						 gcount_tree),
3139 				&asan_ctor_statements);
3140 
3141       fn = builtin_decl_implicit (BUILT_IN_ASAN_UNREGISTER_GLOBALS);
3142       append_to_statement_list (build_call_expr (fn, 2,
3143 						 build_fold_addr_expr (var),
3144 						 gcount_tree),
3145 				&dtor_statements);
3146       cgraph_build_static_cdtor ('D', dtor_statements, priority);
3147     }
3148   if (asan_ctor_statements)
3149     cgraph_build_static_cdtor ('I', asan_ctor_statements, priority);
3150   flag_sanitize |= SANITIZE_ADDRESS;
3151 }
3152 
3153 /* Poison or unpoison (depending on IS_CLOBBER variable) shadow memory based
3154    on SHADOW address.  Newly added statements will be added to ITER with
3155    given location LOC.  We mark SIZE bytes in shadow memory, where
3156    LAST_CHUNK_SIZE is greater than zero in situation where we are at the
3157    end of a variable.  */
3158 
3159 static void
asan_store_shadow_bytes(gimple_stmt_iterator * iter,location_t loc,tree shadow,unsigned HOST_WIDE_INT base_addr_offset,bool is_clobber,unsigned size,unsigned last_chunk_size)3160 asan_store_shadow_bytes (gimple_stmt_iterator *iter, location_t loc,
3161 			 tree shadow,
3162 			 unsigned HOST_WIDE_INT base_addr_offset,
3163 			 bool is_clobber, unsigned size,
3164 			 unsigned last_chunk_size)
3165 {
3166   tree shadow_ptr_type;
3167 
3168   switch (size)
3169     {
3170     case 1:
3171       shadow_ptr_type = shadow_ptr_types[0];
3172       break;
3173     case 2:
3174       shadow_ptr_type = shadow_ptr_types[1];
3175       break;
3176     case 4:
3177       shadow_ptr_type = shadow_ptr_types[2];
3178       break;
3179     default:
3180       gcc_unreachable ();
3181     }
3182 
3183   unsigned char c = (char) is_clobber ? ASAN_STACK_MAGIC_USE_AFTER_SCOPE : 0;
3184   unsigned HOST_WIDE_INT val = 0;
3185   unsigned last_pos = size;
3186   if (last_chunk_size && !is_clobber)
3187     last_pos = BYTES_BIG_ENDIAN ? 0 : size - 1;
3188   for (unsigned i = 0; i < size; ++i)
3189     {
3190       unsigned char shadow_c = c;
3191       if (i == last_pos)
3192 	shadow_c = last_chunk_size;
3193       val |= (unsigned HOST_WIDE_INT) shadow_c << (BITS_PER_UNIT * i);
3194     }
3195 
3196   /* Handle last chunk in unpoisoning.  */
3197   tree magic = build_int_cst (TREE_TYPE (shadow_ptr_type), val);
3198 
3199   tree dest = build2 (MEM_REF, TREE_TYPE (shadow_ptr_type), shadow,
3200 		      build_int_cst (shadow_ptr_type, base_addr_offset));
3201 
3202   gimple *g = gimple_build_assign (dest, magic);
3203   gimple_set_location (g, loc);
3204   gsi_insert_after (iter, g, GSI_NEW_STMT);
3205 }
3206 
3207 /* Expand the ASAN_MARK builtins.  */
3208 
3209 bool
asan_expand_mark_ifn(gimple_stmt_iterator * iter)3210 asan_expand_mark_ifn (gimple_stmt_iterator *iter)
3211 {
3212   gimple *g = gsi_stmt (*iter);
3213   location_t loc = gimple_location (g);
3214   HOST_WIDE_INT flag = tree_to_shwi (gimple_call_arg (g, 0));
3215   bool is_poison = ((asan_mark_flags)flag) == ASAN_MARK_POISON;
3216 
3217   tree base = gimple_call_arg (g, 1);
3218   gcc_checking_assert (TREE_CODE (base) == ADDR_EXPR);
3219   tree decl = TREE_OPERAND (base, 0);
3220 
3221   /* For a nested function, we can have: ASAN_MARK (2, &FRAME.2.fp_input, 4) */
3222   if (TREE_CODE (decl) == COMPONENT_REF
3223       && DECL_NONLOCAL_FRAME (TREE_OPERAND (decl, 0)))
3224     decl = TREE_OPERAND (decl, 0);
3225 
3226   gcc_checking_assert (TREE_CODE (decl) == VAR_DECL);
3227 
3228   if (is_poison)
3229     {
3230       if (asan_handled_variables == NULL)
3231 	asan_handled_variables = new hash_set<tree> (16);
3232       asan_handled_variables->add (decl);
3233     }
3234   tree len = gimple_call_arg (g, 2);
3235 
3236   gcc_assert (tree_fits_shwi_p (len));
3237   unsigned HOST_WIDE_INT size_in_bytes = tree_to_shwi (len);
3238   gcc_assert (size_in_bytes);
3239 
3240   g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
3241 			   NOP_EXPR, base);
3242   gimple_set_location (g, loc);
3243   gsi_replace (iter, g, false);
3244   tree base_addr = gimple_assign_lhs (g);
3245 
3246   /* Generate direct emission if size_in_bytes is small.  */
3247   if (size_in_bytes
3248       <= (unsigned)param_use_after_scope_direct_emission_threshold)
3249     {
3250       const unsigned HOST_WIDE_INT shadow_size
3251 	= shadow_mem_size (size_in_bytes);
3252       const unsigned int shadow_align
3253 	= (get_pointer_alignment (base) / BITS_PER_UNIT) >> ASAN_SHADOW_SHIFT;
3254 
3255       tree shadow = build_shadow_mem_access (iter, loc, base_addr,
3256 					     shadow_ptr_types[0], true);
3257 
3258       for (unsigned HOST_WIDE_INT offset = 0; offset < shadow_size;)
3259 	{
3260 	  unsigned size = 1;
3261 	  if (shadow_size - offset >= 4
3262 	      && (!STRICT_ALIGNMENT || shadow_align >= 4))
3263 	    size = 4;
3264 	  else if (shadow_size - offset >= 2
3265 		   && (!STRICT_ALIGNMENT || shadow_align >= 2))
3266 	    size = 2;
3267 
3268 	  unsigned HOST_WIDE_INT last_chunk_size = 0;
3269 	  unsigned HOST_WIDE_INT s = (offset + size) * ASAN_SHADOW_GRANULARITY;
3270 	  if (s > size_in_bytes)
3271 	    last_chunk_size = ASAN_SHADOW_GRANULARITY - (s - size_in_bytes);
3272 
3273 	  asan_store_shadow_bytes (iter, loc, shadow, offset, is_poison,
3274 				   size, last_chunk_size);
3275 	  offset += size;
3276 	}
3277     }
3278   else
3279     {
3280       g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
3281 			       NOP_EXPR, len);
3282       gimple_set_location (g, loc);
3283       gsi_insert_before (iter, g, GSI_SAME_STMT);
3284       tree sz_arg = gimple_assign_lhs (g);
3285 
3286       tree fun
3287 	= builtin_decl_implicit (is_poison ? BUILT_IN_ASAN_POISON_STACK_MEMORY
3288 				 : BUILT_IN_ASAN_UNPOISON_STACK_MEMORY);
3289       g = gimple_build_call (fun, 2, base_addr, sz_arg);
3290       gimple_set_location (g, loc);
3291       gsi_insert_after (iter, g, GSI_NEW_STMT);
3292     }
3293 
3294   return false;
3295 }
3296 
3297 /* Expand the ASAN_{LOAD,STORE} builtins.  */
3298 
3299 bool
asan_expand_check_ifn(gimple_stmt_iterator * iter,bool use_calls)3300 asan_expand_check_ifn (gimple_stmt_iterator *iter, bool use_calls)
3301 {
3302   gimple *g = gsi_stmt (*iter);
3303   location_t loc = gimple_location (g);
3304   bool recover_p;
3305   if (flag_sanitize & SANITIZE_USER_ADDRESS)
3306     recover_p = (flag_sanitize_recover & SANITIZE_USER_ADDRESS) != 0;
3307   else
3308     recover_p = (flag_sanitize_recover & SANITIZE_KERNEL_ADDRESS) != 0;
3309 
3310   HOST_WIDE_INT flags = tree_to_shwi (gimple_call_arg (g, 0));
3311   gcc_assert (flags < ASAN_CHECK_LAST);
3312   bool is_scalar_access = (flags & ASAN_CHECK_SCALAR_ACCESS) != 0;
3313   bool is_store = (flags & ASAN_CHECK_STORE) != 0;
3314   bool is_non_zero_len = (flags & ASAN_CHECK_NON_ZERO_LEN) != 0;
3315 
3316   tree base = gimple_call_arg (g, 1);
3317   tree len = gimple_call_arg (g, 2);
3318   HOST_WIDE_INT align = tree_to_shwi (gimple_call_arg (g, 3));
3319 
3320   HOST_WIDE_INT size_in_bytes
3321     = is_scalar_access && tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
3322 
3323   if (use_calls)
3324     {
3325       /* Instrument using callbacks.  */
3326       gimple *g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
3327 				      NOP_EXPR, base);
3328       gimple_set_location (g, loc);
3329       gsi_insert_before (iter, g, GSI_SAME_STMT);
3330       tree base_addr = gimple_assign_lhs (g);
3331 
3332       int nargs;
3333       tree fun = check_func (is_store, recover_p, size_in_bytes, &nargs);
3334       if (nargs == 1)
3335 	g = gimple_build_call (fun, 1, base_addr);
3336       else
3337 	{
3338 	  gcc_assert (nargs == 2);
3339 	  g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
3340 				   NOP_EXPR, len);
3341 	  gimple_set_location (g, loc);
3342 	  gsi_insert_before (iter, g, GSI_SAME_STMT);
3343 	  tree sz_arg = gimple_assign_lhs (g);
3344 	  g = gimple_build_call (fun, nargs, base_addr, sz_arg);
3345 	}
3346       gimple_set_location (g, loc);
3347       gsi_replace (iter, g, false);
3348       return false;
3349     }
3350 
3351   HOST_WIDE_INT real_size_in_bytes = size_in_bytes == -1 ? 1 : size_in_bytes;
3352 
3353   tree shadow_ptr_type = shadow_ptr_types[real_size_in_bytes == 16 ? 1 : 0];
3354   tree shadow_type = TREE_TYPE (shadow_ptr_type);
3355 
3356   gimple_stmt_iterator gsi = *iter;
3357 
3358   if (!is_non_zero_len)
3359     {
3360       /* So, the length of the memory area to asan-protect is
3361 	 non-constant.  Let's guard the generated instrumentation code
3362 	 like:
3363 
3364 	 if (len != 0)
3365 	   {
3366 	     //asan instrumentation code goes here.
3367 	   }
3368 	 // falltrough instructions, starting with *ITER.  */
3369 
3370       g = gimple_build_cond (NE_EXPR,
3371 			    len,
3372 			    build_int_cst (TREE_TYPE (len), 0),
3373 			    NULL_TREE, NULL_TREE);
3374       gimple_set_location (g, loc);
3375 
3376       basic_block then_bb, fallthrough_bb;
3377       insert_if_then_before_iter (as_a <gcond *> (g), iter,
3378 				  /*then_more_likely_p=*/true,
3379 				  &then_bb, &fallthrough_bb);
3380       /* Note that fallthrough_bb starts with the statement that was
3381 	pointed to by ITER.  */
3382 
3383       /* The 'then block' of the 'if (len != 0) condition is where
3384 	we'll generate the asan instrumentation code now.  */
3385       gsi = gsi_last_bb (then_bb);
3386     }
3387 
3388   /* Get an iterator on the point where we can add the condition
3389      statement for the instrumentation.  */
3390   basic_block then_bb, else_bb;
3391   gsi = create_cond_insert_point (&gsi, /*before_p*/false,
3392 				  /*then_more_likely_p=*/false,
3393 				  /*create_then_fallthru_edge*/recover_p,
3394 				  &then_bb,
3395 				  &else_bb);
3396 
3397   g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
3398 			   NOP_EXPR, base);
3399   gimple_set_location (g, loc);
3400   gsi_insert_before (&gsi, g, GSI_NEW_STMT);
3401   tree base_addr = gimple_assign_lhs (g);
3402 
3403   tree t = NULL_TREE;
3404   if (real_size_in_bytes >= 8)
3405     {
3406       tree shadow = build_shadow_mem_access (&gsi, loc, base_addr,
3407 					     shadow_ptr_type);
3408       t = shadow;
3409     }
3410   else
3411     {
3412       /* Slow path for 1, 2 and 4 byte accesses.  */
3413       /* Test (shadow != 0)
3414 	 & ((base_addr & 7) + (real_size_in_bytes - 1)) >= shadow).  */
3415       tree shadow = build_shadow_mem_access (&gsi, loc, base_addr,
3416 					     shadow_ptr_type);
3417       gimple *shadow_test = build_assign (NE_EXPR, shadow, 0);
3418       gimple_seq seq = NULL;
3419       gimple_seq_add_stmt (&seq, shadow_test);
3420       /* Aligned (>= 8 bytes) can test just
3421 	 (real_size_in_bytes - 1 >= shadow), as base_addr & 7 is known
3422 	 to be 0.  */
3423       if (align < 8)
3424 	{
3425 	  gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR,
3426 						   base_addr, 7));
3427 	  gimple_seq_add_stmt (&seq,
3428 			       build_type_cast (shadow_type,
3429 						gimple_seq_last (seq)));
3430 	  if (real_size_in_bytes > 1)
3431 	    gimple_seq_add_stmt (&seq,
3432 				 build_assign (PLUS_EXPR,
3433 					       gimple_seq_last (seq),
3434 					       real_size_in_bytes - 1));
3435 	  t = gimple_assign_lhs (gimple_seq_last_stmt (seq));
3436 	}
3437       else
3438 	t = build_int_cst (shadow_type, real_size_in_bytes - 1);
3439       gimple_seq_add_stmt (&seq, build_assign (GE_EXPR, t, shadow));
3440       gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
3441 					       gimple_seq_last (seq)));
3442       t = gimple_assign_lhs (gimple_seq_last (seq));
3443       gimple_seq_set_location (seq, loc);
3444       gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
3445 
3446       /* For non-constant, misaligned or otherwise weird access sizes,
3447        check first and last byte.  */
3448       if (size_in_bytes == -1)
3449 	{
3450 	  g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
3451 				   MINUS_EXPR, len,
3452 				   build_int_cst (pointer_sized_int_node, 1));
3453 	  gimple_set_location (g, loc);
3454 	  gsi_insert_after (&gsi, g, GSI_NEW_STMT);
3455 	  tree last = gimple_assign_lhs (g);
3456 	  g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
3457 				   PLUS_EXPR, base_addr, last);
3458 	  gimple_set_location (g, loc);
3459 	  gsi_insert_after (&gsi, g, GSI_NEW_STMT);
3460 	  tree base_end_addr = gimple_assign_lhs (g);
3461 
3462 	  tree shadow = build_shadow_mem_access (&gsi, loc, base_end_addr,
3463 						 shadow_ptr_type);
3464 	  gimple *shadow_test = build_assign (NE_EXPR, shadow, 0);
3465 	  gimple_seq seq = NULL;
3466 	  gimple_seq_add_stmt (&seq, shadow_test);
3467 	  gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR,
3468 						   base_end_addr, 7));
3469 	  gimple_seq_add_stmt (&seq, build_type_cast (shadow_type,
3470 						      gimple_seq_last (seq)));
3471 	  gimple_seq_add_stmt (&seq, build_assign (GE_EXPR,
3472 						   gimple_seq_last (seq),
3473 						   shadow));
3474 	  gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
3475 						   gimple_seq_last (seq)));
3476 	  gimple_seq_add_stmt (&seq, build_assign (BIT_IOR_EXPR, t,
3477 						   gimple_seq_last (seq)));
3478 	  t = gimple_assign_lhs (gimple_seq_last (seq));
3479 	  gimple_seq_set_location (seq, loc);
3480 	  gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
3481 	}
3482     }
3483 
3484   g = gimple_build_cond (NE_EXPR, t, build_int_cst (TREE_TYPE (t), 0),
3485 			 NULL_TREE, NULL_TREE);
3486   gimple_set_location (g, loc);
3487   gsi_insert_after (&gsi, g, GSI_NEW_STMT);
3488 
3489   /* Generate call to the run-time library (e.g. __asan_report_load8).  */
3490   gsi = gsi_start_bb (then_bb);
3491   int nargs;
3492   tree fun = report_error_func (is_store, recover_p, size_in_bytes, &nargs);
3493   g = gimple_build_call (fun, nargs, base_addr, len);
3494   gimple_set_location (g, loc);
3495   gsi_insert_after (&gsi, g, GSI_NEW_STMT);
3496 
3497   gsi_remove (iter, true);
3498   *iter = gsi_start_bb (else_bb);
3499 
3500   return true;
3501 }
3502 
3503 /* Create ASAN shadow variable for a VAR_DECL which has been rewritten
3504    into SSA.  Already seen VAR_DECLs are stored in SHADOW_VARS_MAPPING.  */
3505 
3506 static tree
create_asan_shadow_var(tree var_decl,hash_map<tree,tree> & shadow_vars_mapping)3507 create_asan_shadow_var (tree var_decl,
3508 			hash_map<tree, tree> &shadow_vars_mapping)
3509 {
3510   tree *slot = shadow_vars_mapping.get (var_decl);
3511   if (slot == NULL)
3512     {
3513       tree shadow_var = copy_node (var_decl);
3514 
3515       copy_body_data id;
3516       memset (&id, 0, sizeof (copy_body_data));
3517       id.src_fn = id.dst_fn = current_function_decl;
3518       copy_decl_for_dup_finish (&id, var_decl, shadow_var);
3519 
3520       DECL_ARTIFICIAL (shadow_var) = 1;
3521       DECL_IGNORED_P (shadow_var) = 1;
3522       DECL_SEEN_IN_BIND_EXPR_P (shadow_var) = 0;
3523       gimple_add_tmp_var (shadow_var);
3524 
3525       shadow_vars_mapping.put (var_decl, shadow_var);
3526       return shadow_var;
3527     }
3528   else
3529     return *slot;
3530 }
3531 
3532 /* Expand ASAN_POISON ifn.  */
3533 
3534 bool
asan_expand_poison_ifn(gimple_stmt_iterator * iter,bool * need_commit_edge_insert,hash_map<tree,tree> & shadow_vars_mapping)3535 asan_expand_poison_ifn (gimple_stmt_iterator *iter,
3536 			bool *need_commit_edge_insert,
3537 			hash_map<tree, tree> &shadow_vars_mapping)
3538 {
3539   gimple *g = gsi_stmt (*iter);
3540   tree poisoned_var = gimple_call_lhs (g);
3541   if (!poisoned_var || has_zero_uses (poisoned_var))
3542     {
3543       gsi_remove (iter, true);
3544       return true;
3545     }
3546 
3547   if (SSA_NAME_VAR (poisoned_var) == NULL_TREE)
3548     SET_SSA_NAME_VAR_OR_IDENTIFIER (poisoned_var,
3549 				    create_tmp_var (TREE_TYPE (poisoned_var)));
3550 
3551   tree shadow_var = create_asan_shadow_var (SSA_NAME_VAR (poisoned_var),
3552 					    shadow_vars_mapping);
3553 
3554   bool recover_p;
3555   if (flag_sanitize & SANITIZE_USER_ADDRESS)
3556     recover_p = (flag_sanitize_recover & SANITIZE_USER_ADDRESS) != 0;
3557   else
3558     recover_p = (flag_sanitize_recover & SANITIZE_KERNEL_ADDRESS) != 0;
3559   tree size = DECL_SIZE_UNIT (shadow_var);
3560   gimple *poison_call
3561     = gimple_build_call_internal (IFN_ASAN_MARK, 3,
3562 				  build_int_cst (integer_type_node,
3563 						 ASAN_MARK_POISON),
3564 				  build_fold_addr_expr (shadow_var), size);
3565 
3566   gimple *use;
3567   imm_use_iterator imm_iter;
3568   FOR_EACH_IMM_USE_STMT (use, imm_iter, poisoned_var)
3569     {
3570       if (is_gimple_debug (use))
3571 	continue;
3572 
3573       int nargs;
3574       bool store_p = gimple_call_internal_p (use, IFN_ASAN_POISON_USE);
3575       tree fun = report_error_func (store_p, recover_p, tree_to_uhwi (size),
3576 				    &nargs);
3577 
3578       gcall *call = gimple_build_call (fun, 1,
3579 				       build_fold_addr_expr (shadow_var));
3580       gimple_set_location (call, gimple_location (use));
3581       gimple *call_to_insert = call;
3582 
3583       /* The USE can be a gimple PHI node.  If so, insert the call on
3584 	 all edges leading to the PHI node.  */
3585       if (is_a <gphi *> (use))
3586 	{
3587 	  gphi *phi = dyn_cast<gphi *> (use);
3588 	  for (unsigned i = 0; i < gimple_phi_num_args (phi); ++i)
3589 	    if (gimple_phi_arg_def (phi, i) == poisoned_var)
3590 	      {
3591 		edge e = gimple_phi_arg_edge (phi, i);
3592 
3593 		/* Do not insert on an edge we can't split.  */
3594 		if (e->flags & EDGE_ABNORMAL)
3595 		  continue;
3596 
3597 		if (call_to_insert == NULL)
3598 		  call_to_insert = gimple_copy (call);
3599 
3600 		gsi_insert_seq_on_edge (e, call_to_insert);
3601 		*need_commit_edge_insert = true;
3602 		call_to_insert = NULL;
3603 	      }
3604 	}
3605       else
3606 	{
3607 	  gimple_stmt_iterator gsi = gsi_for_stmt (use);
3608 	  if (store_p)
3609 	    gsi_replace (&gsi, call, true);
3610 	  else
3611 	    gsi_insert_before (&gsi, call, GSI_NEW_STMT);
3612 	}
3613     }
3614 
3615   SSA_NAME_IS_DEFAULT_DEF (poisoned_var) = true;
3616   SSA_NAME_DEF_STMT (poisoned_var) = gimple_build_nop ();
3617   gsi_replace (iter, poison_call, false);
3618 
3619   return true;
3620 }
3621 
3622 /* Instrument the current function.  */
3623 
3624 static unsigned int
asan_instrument(void)3625 asan_instrument (void)
3626 {
3627   if (shadow_ptr_types[0] == NULL_TREE)
3628     asan_init_shadow_ptr_types ();
3629   transform_statements ();
3630   last_alloca_addr = NULL_TREE;
3631   return 0;
3632 }
3633 
3634 static bool
gate_asan(void)3635 gate_asan (void)
3636 {
3637   return sanitize_flags_p (SANITIZE_ADDRESS);
3638 }
3639 
3640 namespace {
3641 
3642 const pass_data pass_data_asan =
3643 {
3644   GIMPLE_PASS, /* type */
3645   "asan", /* name */
3646   OPTGROUP_NONE, /* optinfo_flags */
3647   TV_NONE, /* tv_id */
3648   ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
3649   0, /* properties_provided */
3650   0, /* properties_destroyed */
3651   0, /* todo_flags_start */
3652   TODO_update_ssa, /* todo_flags_finish */
3653 };
3654 
3655 class pass_asan : public gimple_opt_pass
3656 {
3657 public:
pass_asan(gcc::context * ctxt)3658   pass_asan (gcc::context *ctxt)
3659     : gimple_opt_pass (pass_data_asan, ctxt)
3660   {}
3661 
3662   /* opt_pass methods: */
clone()3663   opt_pass * clone () { return new pass_asan (m_ctxt); }
gate(function *)3664   virtual bool gate (function *) { return gate_asan (); }
execute(function *)3665   virtual unsigned int execute (function *) { return asan_instrument (); }
3666 
3667 }; // class pass_asan
3668 
3669 } // anon namespace
3670 
3671 gimple_opt_pass *
make_pass_asan(gcc::context * ctxt)3672 make_pass_asan (gcc::context *ctxt)
3673 {
3674   return new pass_asan (ctxt);
3675 }
3676 
3677 namespace {
3678 
3679 const pass_data pass_data_asan_O0 =
3680 {
3681   GIMPLE_PASS, /* type */
3682   "asan0", /* name */
3683   OPTGROUP_NONE, /* optinfo_flags */
3684   TV_NONE, /* tv_id */
3685   ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
3686   0, /* properties_provided */
3687   0, /* properties_destroyed */
3688   0, /* todo_flags_start */
3689   TODO_update_ssa, /* todo_flags_finish */
3690 };
3691 
3692 class pass_asan_O0 : public gimple_opt_pass
3693 {
3694 public:
pass_asan_O0(gcc::context * ctxt)3695   pass_asan_O0 (gcc::context *ctxt)
3696     : gimple_opt_pass (pass_data_asan_O0, ctxt)
3697   {}
3698 
3699   /* opt_pass methods: */
gate(function *)3700   virtual bool gate (function *) { return !optimize && gate_asan (); }
execute(function *)3701   virtual unsigned int execute (function *) { return asan_instrument (); }
3702 
3703 }; // class pass_asan_O0
3704 
3705 } // anon namespace
3706 
3707 gimple_opt_pass *
make_pass_asan_O0(gcc::context * ctxt)3708 make_pass_asan_O0 (gcc::context *ctxt)
3709 {
3710   return new pass_asan_O0 (ctxt);
3711 }
3712 
3713 #include "gt-asan.h"
3714