1 /* Alias analysis for trees.
2 Copyright (C) 2004, 2005 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
20 Boston, MA 02110-1301, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
31 #include "timevar.h"
32 #include "expr.h"
33 #include "ggc.h"
34 #include "langhooks.h"
35 #include "flags.h"
36 #include "function.h"
37 #include "diagnostic.h"
38 #include "tree-dump.h"
39 #include "tree-gimple.h"
40 #include "tree-flow.h"
41 #include "tree-inline.h"
42 #include "tree-pass.h"
43 #include "tree-ssa-structalias.h"
44 #include "convert.h"
45 #include "params.h"
46 #include "ipa-type-escape.h"
47 #include "vec.h"
48 #include "bitmap.h"
49 #include "vecprim.h"
50 #include "pointer-set.h"
51
52 /* Obstack used to hold grouping bitmaps and other temporary bitmaps used by
53 aliasing */
54 static bitmap_obstack alias_obstack;
55
56 /* 'true' after aliases have been computed (see compute_may_aliases). */
57 bool aliases_computed_p;
58
59 /* Structure to map a variable to its alias set and keep track of the
60 virtual operands that will be needed to represent it. */
61 struct alias_map_d
62 {
63 /* Variable and its alias set. */
64 tree var;
65 HOST_WIDE_INT set;
66
67 /* Total number of virtual operands that will be needed to represent
68 all the aliases of VAR. */
69 long total_alias_vops;
70
71 /* Nonzero if the aliases for this memory tag have been grouped
72 already. Used in group_aliases. */
73 unsigned int grouped_p : 1;
74
75 /* Set of variables aliased with VAR. This is the exact same
76 information contained in VAR_ANN (VAR)->MAY_ALIASES, but in
77 bitmap form to speed up alias grouping. */
78 bitmap may_aliases;
79 };
80
81
82 /* Counters used to display statistics on alias analysis. */
83 struct alias_stats_d
84 {
85 unsigned int alias_queries;
86 unsigned int alias_mayalias;
87 unsigned int alias_noalias;
88 unsigned int simple_queries;
89 unsigned int simple_resolved;
90 unsigned int tbaa_queries;
91 unsigned int tbaa_resolved;
92 unsigned int structnoaddress_queries;
93 unsigned int structnoaddress_resolved;
94 };
95
96
97 /* Local variables. */
98 static struct alias_stats_d alias_stats;
99
100 /* Local functions. */
101 static void compute_flow_insensitive_aliasing (struct alias_info *);
102 static void finalize_ref_all_pointers (struct alias_info *);
103 static void dump_alias_stats (FILE *);
104 static bool may_alias_p (tree, HOST_WIDE_INT, tree, HOST_WIDE_INT, bool);
105 static tree create_memory_tag (tree type, bool is_type_tag);
106 static tree get_tmt_for (tree, struct alias_info *);
107 static tree get_nmt_for (tree);
108 static void add_may_alias (tree, tree);
109 static void replace_may_alias (tree, size_t, tree);
110 static struct alias_info *init_alias_info (void);
111 static void delete_alias_info (struct alias_info *);
112 static void compute_flow_sensitive_aliasing (struct alias_info *);
113 static void setup_pointers_and_addressables (struct alias_info *);
114 static void create_global_var (void);
115 static void maybe_create_global_var (struct alias_info *ai);
116 static void group_aliases (struct alias_info *);
117 static void set_pt_anything (tree ptr);
118
119 /* Global declarations. */
120
121 /* Call clobbered variables in the function. If bit I is set, then
122 REFERENCED_VARS (I) is call-clobbered. */
123 bitmap call_clobbered_vars;
124
125 /* Addressable variables in the function. If bit I is set, then
126 REFERENCED_VARS (I) has had its address taken. Note that
127 CALL_CLOBBERED_VARS and ADDRESSABLE_VARS are not related. An
128 addressable variable is not necessarily call-clobbered (e.g., a
129 local addressable whose address does not escape) and not all
130 call-clobbered variables are addressable (e.g., a local static
131 variable). */
132 bitmap addressable_vars;
133
134 /* When the program has too many call-clobbered variables and call-sites,
135 this variable is used to represent the clobbering effects of function
136 calls. In these cases, all the call clobbered variables in the program
137 are forced to alias this variable. This reduces compile times by not
138 having to keep track of too many V_MAY_DEF expressions at call sites. */
139 tree global_var;
140
141 /* qsort comparison function to sort type/name tags by DECL_UID. */
142
143 static int
sort_tags_by_id(const void * pa,const void * pb)144 sort_tags_by_id (const void *pa, const void *pb)
145 {
146 tree a = *(tree *)pa;
147 tree b = *(tree *)pb;
148
149 return DECL_UID (a) - DECL_UID (b);
150 }
151
152 /* Initialize WORKLIST to contain those memory tags that are marked call
153 clobbered. Initialized WORKLIST2 to contain the reasons these
154 memory tags escaped. */
155
156 static void
init_transitive_clobber_worklist(VEC (tree,heap)** worklist,VEC (int,heap)** worklist2)157 init_transitive_clobber_worklist (VEC (tree, heap) **worklist,
158 VEC (int, heap) **worklist2)
159 {
160 referenced_var_iterator rvi;
161 tree curr;
162
163 FOR_EACH_REFERENCED_VAR (curr, rvi)
164 {
165 if (MTAG_P (curr) && is_call_clobbered (curr))
166 {
167 VEC_safe_push (tree, heap, *worklist, curr);
168 VEC_safe_push (int, heap, *worklist2, var_ann (curr)->escape_mask);
169 }
170 }
171 }
172
173 /* Add ALIAS to WORKLIST (and the reason for escaping REASON to WORKLIST2) if
174 ALIAS is not already marked call clobbered, and is a memory
175 tag. */
176
177 static void
add_to_worklist(tree alias,VEC (tree,heap)** worklist,VEC (int,heap)** worklist2,int reason)178 add_to_worklist (tree alias, VEC (tree, heap) **worklist,
179 VEC (int, heap) **worklist2,
180 int reason)
181 {
182 if (MTAG_P (alias) && !is_call_clobbered (alias))
183 {
184 VEC_safe_push (tree, heap, *worklist, alias);
185 VEC_safe_push (int, heap, *worklist2, reason);
186 }
187 }
188
189 /* Mark aliases of TAG as call clobbered, and place any tags on the
190 alias list that were not already call clobbered on WORKLIST. */
191
192 static void
mark_aliases_call_clobbered(tree tag,VEC (tree,heap)** worklist,VEC (int,heap)** worklist2)193 mark_aliases_call_clobbered (tree tag, VEC (tree, heap) **worklist,
194 VEC (int, heap) **worklist2)
195 {
196 unsigned int i;
197 VEC (tree, gc) *ma;
198 tree entry;
199 var_ann_t ta = var_ann (tag);
200
201 if (!MTAG_P (tag))
202 return;
203 ma = may_aliases (tag);
204 if (!ma)
205 return;
206
207 for (i = 0; VEC_iterate (tree, ma, i, entry); i++)
208 {
209 if (!unmodifiable_var_p (entry))
210 {
211 add_to_worklist (entry, worklist, worklist2, ta->escape_mask);
212 mark_call_clobbered (entry, ta->escape_mask);
213 }
214 }
215 }
216
217 /* Tags containing global vars need to be marked as global.
218 Tags containing call clobbered vars need to be marked as call
219 clobbered. */
220
221 static void
compute_tag_properties(void)222 compute_tag_properties (void)
223 {
224 referenced_var_iterator rvi;
225 tree tag;
226 bool changed = true;
227 VEC (tree, heap) *taglist = NULL;
228
229 FOR_EACH_REFERENCED_VAR (tag, rvi)
230 {
231 if (!MTAG_P (tag) || TREE_CODE (tag) == STRUCT_FIELD_TAG)
232 continue;
233 VEC_safe_push (tree, heap, taglist, tag);
234 }
235
236 /* We sort the taglist by DECL_UID, for two reasons.
237 1. To get a sequential ordering to make the bitmap accesses
238 faster.
239 2. Because of the way we compute aliases, it's more likely that
240 an earlier tag is included in a later tag, and this will reduce
241 the number of iterations.
242
243 If we had a real tag graph, we would just topo-order it and be
244 done with it. */
245 qsort (VEC_address (tree, taglist),
246 VEC_length (tree, taglist),
247 sizeof (tree),
248 sort_tags_by_id);
249
250 /* Go through each tag not marked as global, and if it aliases
251 global vars, mark it global.
252
253 If the tag contains call clobbered vars, mark it call
254 clobbered.
255
256 This loop iterates because tags may appear in the may-aliases
257 list of other tags when we group. */
258
259 while (changed)
260 {
261 unsigned int k;
262
263 changed = false;
264 for (k = 0; VEC_iterate (tree, taglist, k, tag); k++)
265 {
266 VEC (tree, gc) *ma;
267 unsigned int i;
268 tree entry;
269 bool tagcc = is_call_clobbered (tag);
270 bool tagglobal = MTAG_GLOBAL (tag);
271
272 if (tagcc && tagglobal)
273 continue;
274
275 ma = may_aliases (tag);
276 if (!ma)
277 continue;
278
279 for (i = 0; VEC_iterate (tree, ma, i, entry); i++)
280 {
281 /* Call clobbered entries cause the tag to be marked
282 call clobbered. */
283 if (!tagcc && is_call_clobbered (entry))
284 {
285 mark_call_clobbered (tag, var_ann (entry)->escape_mask);
286 tagcc = true;
287 changed = true;
288 }
289
290 /* Global vars cause the tag to be marked global. */
291 if (!tagglobal && is_global_var (entry))
292 {
293 MTAG_GLOBAL (tag) = true;
294 changed = true;
295 tagglobal = true;
296 }
297
298 /* Early exit once both global and cc are set, since the
299 loop can't do any more than that. */
300 if (tagcc && tagglobal)
301 break;
302 }
303 }
304 }
305 VEC_free (tree, heap, taglist);
306 }
307
308 /* Set up the initial variable clobbers and globalness.
309 When this function completes, only tags whose aliases need to be
310 clobbered will be set clobbered. Tags clobbered because they
311 contain call clobbered vars are handled in compute_tag_properties. */
312
313 static void
set_initial_properties(struct alias_info * ai)314 set_initial_properties (struct alias_info *ai)
315 {
316 unsigned int i;
317 referenced_var_iterator rvi;
318 tree var;
319 tree ptr;
320
321 FOR_EACH_REFERENCED_VAR (var, rvi)
322 {
323 if (is_global_var (var)
324 && (!var_can_have_subvars (var)
325 || get_subvars_for_var (var) == NULL))
326 {
327 if (!unmodifiable_var_p (var))
328 mark_call_clobbered (var, ESCAPE_IS_GLOBAL);
329 }
330 else if (TREE_CODE (var) == PARM_DECL
331 && default_def (var)
332 && POINTER_TYPE_P (TREE_TYPE (var)))
333 {
334 tree def = default_def (var);
335 get_ptr_info (def)->value_escapes_p = 1;
336 get_ptr_info (def)->escape_mask |= ESCAPE_IS_PARM;
337 }
338 }
339
340 for (i = 0; VEC_iterate (tree, ai->processed_ptrs, i, ptr); i++)
341 {
342 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
343 var_ann_t v_ann = var_ann (SSA_NAME_VAR (ptr));
344
345 if (pi->value_escapes_p)
346 {
347 /* If PTR escapes then its associated memory tags and
348 pointed-to variables are call-clobbered. */
349 if (pi->name_mem_tag)
350 mark_call_clobbered (pi->name_mem_tag, pi->escape_mask);
351
352 if (v_ann->symbol_mem_tag)
353 mark_call_clobbered (v_ann->symbol_mem_tag, pi->escape_mask);
354
355 if (pi->pt_vars)
356 {
357 bitmap_iterator bi;
358 unsigned int j;
359 EXECUTE_IF_SET_IN_BITMAP (pi->pt_vars, 0, j, bi)
360 if (!unmodifiable_var_p (referenced_var (j)))
361 mark_call_clobbered (referenced_var (j), pi->escape_mask);
362 }
363 }
364
365 /* If the name tag is call clobbered, so is the symbol tag
366 associated with the base VAR_DECL. */
367 if (pi->name_mem_tag
368 && v_ann->symbol_mem_tag
369 && is_call_clobbered (pi->name_mem_tag))
370 mark_call_clobbered (v_ann->symbol_mem_tag, pi->escape_mask);
371
372 /* Name tags and symbol tags that we don't know where they point
373 to, might point to global memory, and thus, are clobbered.
374
375 FIXME: This is not quite right. They should only be
376 clobbered if value_escapes_p is true, regardless of whether
377 they point to global memory or not.
378 So removing this code and fixing all the bugs would be nice.
379 It is the cause of a bunch of clobbering. */
380 if ((pi->pt_global_mem || pi->pt_anything)
381 && pi->is_dereferenced && pi->name_mem_tag)
382 {
383 mark_call_clobbered (pi->name_mem_tag, ESCAPE_IS_GLOBAL);
384 MTAG_GLOBAL (pi->name_mem_tag) = true;
385 }
386
387 if ((pi->pt_global_mem || pi->pt_anything)
388 && pi->is_dereferenced
389 && v_ann->symbol_mem_tag)
390 {
391 mark_call_clobbered (v_ann->symbol_mem_tag, ESCAPE_IS_GLOBAL);
392 MTAG_GLOBAL (v_ann->symbol_mem_tag) = true;
393 }
394 }
395 }
396
397
398 /* This variable is set to true if we are updating the used alone
399 information for SMTs, or are in a pass that is going to break it
400 temporarily. */
401 bool updating_used_alone;
402
403 /* Compute which variables need to be marked call clobbered because
404 their tag is call clobbered, and which tags need to be marked
405 global because they contain global variables. */
406
407 static void
compute_call_clobbered(struct alias_info * ai)408 compute_call_clobbered (struct alias_info *ai)
409 {
410 VEC (tree, heap) *worklist = NULL;
411 VEC(int,heap) *worklist2 = NULL;
412
413 set_initial_properties (ai);
414 init_transitive_clobber_worklist (&worklist, &worklist2);
415 while (VEC_length (tree, worklist) != 0)
416 {
417 tree curr = VEC_pop (tree, worklist);
418 int reason = VEC_pop (int, worklist2);
419
420 mark_call_clobbered (curr, reason);
421 mark_aliases_call_clobbered (curr, &worklist, &worklist2);
422 }
423 VEC_free (tree, heap, worklist);
424 VEC_free (int, heap, worklist2);
425 compute_tag_properties ();
426 }
427
428
429 /* Helper for recalculate_used_alone. Return a conservatively correct
430 answer as to whether STMT may make a store on the LHS to SYM. */
431
432 static bool
lhs_may_store_to(tree stmt,tree sym ATTRIBUTE_UNUSED)433 lhs_may_store_to (tree stmt, tree sym ATTRIBUTE_UNUSED)
434 {
435 tree lhs = TREE_OPERAND (stmt, 0);
436
437 lhs = get_base_address (lhs);
438
439 if (!lhs)
440 return false;
441
442 if (TREE_CODE (lhs) == SSA_NAME)
443 return false;
444 /* We could do better here by looking at the type tag of LHS, but it
445 is unclear whether this is worth it. */
446 return true;
447 }
448
449 /* Recalculate the used_alone information for SMTs . */
450
451 void
recalculate_used_alone(void)452 recalculate_used_alone (void)
453 {
454 VEC (tree, heap) *calls = NULL;
455 block_stmt_iterator bsi;
456 basic_block bb;
457 tree stmt;
458 size_t i;
459 referenced_var_iterator rvi;
460 tree var;
461
462 /* First, reset all the SMT used alone bits to zero. */
463 updating_used_alone = true;
464 FOR_EACH_REFERENCED_VAR (var, rvi)
465 if (TREE_CODE (var) == SYMBOL_MEMORY_TAG)
466 {
467 SMT_OLD_USED_ALONE (var) = SMT_USED_ALONE (var);
468 SMT_USED_ALONE (var) = 0;
469 }
470
471 /* Walk all the statements.
472 Calls get put into a list of statements to update, since we will
473 need to update operands on them if we make any changes.
474 If we see a bare use of a SMT anywhere in a real virtual use or virtual
475 def, mark the SMT as used alone, and for renaming. */
476 FOR_EACH_BB (bb)
477 {
478 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
479 {
480 bool iscall = false;
481 ssa_op_iter iter;
482
483 stmt = bsi_stmt (bsi);
484
485 if (TREE_CODE (stmt) == CALL_EXPR
486 || (TREE_CODE (stmt) == MODIFY_EXPR
487 && TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR))
488 {
489 iscall = true;
490 VEC_safe_push (tree, heap, calls, stmt);
491 }
492
493 FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter,
494 SSA_OP_VUSE | SSA_OP_VIRTUAL_DEFS)
495 {
496 tree svar = var;
497
498 if (TREE_CODE (var) == SSA_NAME)
499 svar = SSA_NAME_VAR (var);
500
501 if (TREE_CODE (svar) == SYMBOL_MEMORY_TAG)
502 {
503 /* We only care about the LHS on calls. */
504 if (iscall && !lhs_may_store_to (stmt, svar))
505 continue;
506
507 if (!SMT_USED_ALONE (svar))
508 {
509 SMT_USED_ALONE (svar) = true;
510
511 /* Only need to mark for renaming if it wasn't
512 used alone before. */
513 if (!SMT_OLD_USED_ALONE (svar))
514 mark_sym_for_renaming (svar);
515 }
516 }
517 }
518 }
519 }
520
521 /* Update the operands on all the calls we saw. */
522 if (calls)
523 {
524 for (i = 0; VEC_iterate (tree, calls, i, stmt); i++)
525 update_stmt (stmt);
526 }
527
528 /* We need to mark SMT's that are no longer used for renaming so the
529 symbols go away, or else verification will be angry with us, even
530 though they are dead. */
531 FOR_EACH_REFERENCED_VAR (var, rvi)
532 if (TREE_CODE (var) == SYMBOL_MEMORY_TAG)
533 {
534 if (SMT_OLD_USED_ALONE (var) && !SMT_USED_ALONE (var))
535 mark_sym_for_renaming (var);
536 }
537
538 VEC_free (tree, heap, calls);
539 updating_used_alone = false;
540 }
541
542 /* Compute may-alias information for every variable referenced in function
543 FNDECL.
544
545 Alias analysis proceeds in 3 main phases:
546
547 1- Points-to and escape analysis.
548
549 This phase walks the use-def chains in the SSA web looking for three
550 things:
551
552 * Assignments of the form P_i = &VAR
553 * Assignments of the form P_i = malloc()
554 * Pointers and ADDR_EXPR that escape the current function.
555
556 The concept of 'escaping' is the same one used in the Java world. When
557 a pointer or an ADDR_EXPR escapes, it means that it has been exposed
558 outside of the current function. So, assignment to global variables,
559 function arguments and returning a pointer are all escape sites, as are
560 conversions between pointers and integers.
561
562 This is where we are currently limited. Since not everything is renamed
563 into SSA, we lose track of escape properties when a pointer is stashed
564 inside a field in a structure, for instance. In those cases, we are
565 assuming that the pointer does escape.
566
567 We use escape analysis to determine whether a variable is
568 call-clobbered. Simply put, if an ADDR_EXPR escapes, then the variable
569 is call-clobbered. If a pointer P_i escapes, then all the variables
570 pointed-to by P_i (and its memory tag) also escape.
571
572 2- Compute flow-sensitive aliases
573
574 We have two classes of memory tags. Memory tags associated with the
575 pointed-to data type of the pointers in the program. These tags are
576 called "symbol memory tag" (SMT). The other class are those associated
577 with SSA_NAMEs, called "name memory tag" (NMT). The basic idea is that
578 when adding operands for an INDIRECT_REF *P_i, we will first check
579 whether P_i has a name tag, if it does we use it, because that will have
580 more precise aliasing information. Otherwise, we use the standard symbol
581 tag.
582
583 In this phase, we go through all the pointers we found in points-to
584 analysis and create alias sets for the name memory tags associated with
585 each pointer P_i. If P_i escapes, we mark call-clobbered the variables
586 it points to and its tag.
587
588
589 3- Compute flow-insensitive aliases
590
591 This pass will compare the alias set of every symbol memory tag and
592 every addressable variable found in the program. Given a symbol
593 memory tag SMT and an addressable variable V. If the alias sets of
594 SMT and V conflict (as computed by may_alias_p), then V is marked
595 as an alias tag and added to the alias set of SMT.
596
597 For instance, consider the following function:
598
599 foo (int i)
600 {
601 int *p, a, b;
602
603 if (i > 10)
604 p = &a;
605 else
606 p = &b;
607
608 *p = 3;
609 a = b + 2;
610 return *p;
611 }
612
613 After aliasing analysis has finished, the symbol memory tag for pointer
614 'p' will have two aliases, namely variables 'a' and 'b'. Every time
615 pointer 'p' is dereferenced, we want to mark the operation as a
616 potential reference to 'a' and 'b'.
617
618 foo (int i)
619 {
620 int *p, a, b;
621
622 if (i_2 > 10)
623 p_4 = &a;
624 else
625 p_6 = &b;
626 # p_1 = PHI <p_4(1), p_6(2)>;
627
628 # a_7 = V_MAY_DEF <a_3>;
629 # b_8 = V_MAY_DEF <b_5>;
630 *p_1 = 3;
631
632 # a_9 = V_MAY_DEF <a_7>
633 # VUSE <b_8>
634 a_9 = b_8 + 2;
635
636 # VUSE <a_9>;
637 # VUSE <b_8>;
638 return *p_1;
639 }
640
641 In certain cases, the list of may aliases for a pointer may grow too
642 large. This may cause an explosion in the number of virtual operands
643 inserted in the code. Resulting in increased memory consumption and
644 compilation time.
645
646 When the number of virtual operands needed to represent aliased
647 loads and stores grows too large (configurable with @option{--param
648 max-aliased-vops}), alias sets are grouped to avoid severe
649 compile-time slow downs and memory consumption. See group_aliases. */
650
651 static unsigned int
compute_may_aliases(void)652 compute_may_aliases (void)
653 {
654 struct alias_info *ai;
655
656 memset (&alias_stats, 0, sizeof (alias_stats));
657
658 /* Initialize aliasing information. */
659 ai = init_alias_info ();
660
661 /* For each pointer P_i, determine the sets of variables that P_i may
662 point-to. For every addressable variable V, determine whether the
663 address of V escapes the current function, making V call-clobbered
664 (i.e., whether &V is stored in a global variable or if its passed as a
665 function call argument). */
666 compute_points_to_sets (ai);
667
668 /* Collect all pointers and addressable variables, compute alias sets,
669 create memory tags for pointers and promote variables whose address is
670 not needed anymore. */
671 setup_pointers_and_addressables (ai);
672
673 /* Compute flow-sensitive, points-to based aliasing for all the name
674 memory tags. Note that this pass needs to be done before flow
675 insensitive analysis because it uses the points-to information
676 gathered before to mark call-clobbered symbol tags. */
677 compute_flow_sensitive_aliasing (ai);
678
679 /* Compute type-based flow-insensitive aliasing for all the type
680 memory tags. */
681 compute_flow_insensitive_aliasing (ai);
682
683 /* Compute call clobbering information. */
684 compute_call_clobbered (ai);
685
686 /* Determine if we need to enable alias grouping. */
687 if (ai->total_alias_vops >= MAX_ALIASED_VOPS)
688 group_aliases (ai);
689
690 /* If the program has too many call-clobbered variables and/or function
691 calls, create .GLOBAL_VAR and use it to model call-clobbering
692 semantics at call sites. This reduces the number of virtual operands
693 considerably, improving compile times at the expense of lost
694 aliasing precision. */
695 maybe_create_global_var (ai);
696
697 /* If the program contains ref-all pointers, finalize may-alias information
698 for them. This pass needs to be run after call-clobbering information
699 has been computed. */
700 if (ai->ref_all_symbol_mem_tag)
701 finalize_ref_all_pointers (ai);
702
703 /* Debugging dumps. */
704 if (dump_file)
705 {
706 dump_referenced_vars (dump_file);
707 if (dump_flags & TDF_STATS)
708 dump_alias_stats (dump_file);
709 dump_points_to_info (dump_file);
710 dump_alias_info (dump_file);
711 }
712
713 /* Deallocate memory used by aliasing data structures. */
714 delete_alias_info (ai);
715
716 updating_used_alone = true;
717 {
718 block_stmt_iterator bsi;
719 basic_block bb;
720 FOR_EACH_BB (bb)
721 {
722 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
723 {
724 update_stmt_if_modified (bsi_stmt (bsi));
725 }
726 }
727 }
728 recalculate_used_alone ();
729 updating_used_alone = false;
730 return 0;
731 }
732
733
734 struct tree_opt_pass pass_may_alias =
735 {
736 "alias", /* name */
737 NULL, /* gate */
738 compute_may_aliases, /* execute */
739 NULL, /* sub */
740 NULL, /* next */
741 0, /* static_pass_number */
742 TV_TREE_MAY_ALIAS, /* tv_id */
743 PROP_cfg | PROP_ssa, /* properties_required */
744 PROP_alias, /* properties_provided */
745 0, /* properties_destroyed */
746 0, /* todo_flags_start */
747 TODO_dump_func | TODO_update_ssa
748 | TODO_ggc_collect | TODO_verify_ssa
749 | TODO_verify_stmts, /* todo_flags_finish */
750 0 /* letter */
751 };
752
753
754 /* Data structure used to count the number of dereferences to PTR
755 inside an expression. */
756 struct count_ptr_d
757 {
758 tree ptr;
759 unsigned count;
760 };
761
762
763 /* Helper for count_uses_and_derefs. Called by walk_tree to look for
764 (ALIGN/MISALIGNED_)INDIRECT_REF nodes for the pointer passed in DATA. */
765
766 static tree
count_ptr_derefs(tree * tp,int * walk_subtrees,void * data)767 count_ptr_derefs (tree *tp, int *walk_subtrees, void *data)
768 {
769 struct count_ptr_d *count_p = (struct count_ptr_d *) data;
770
771 /* Do not walk inside ADDR_EXPR nodes. In the expression &ptr->fld,
772 pointer 'ptr' is *not* dereferenced, it is simply used to compute
773 the address of 'fld' as 'ptr + offsetof(fld)'. */
774 if (TREE_CODE (*tp) == ADDR_EXPR)
775 {
776 *walk_subtrees = 0;
777 return NULL_TREE;
778 }
779
780 if (INDIRECT_REF_P (*tp) && TREE_OPERAND (*tp, 0) == count_p->ptr)
781 count_p->count++;
782
783 return NULL_TREE;
784 }
785
786
787 /* Count the number of direct and indirect uses for pointer PTR in
788 statement STMT. The two counts are stored in *NUM_USES_P and
789 *NUM_DEREFS_P respectively. *IS_STORE_P is set to 'true' if at
790 least one of those dereferences is a store operation. */
791
792 void
count_uses_and_derefs(tree ptr,tree stmt,unsigned * num_uses_p,unsigned * num_derefs_p,bool * is_store)793 count_uses_and_derefs (tree ptr, tree stmt, unsigned *num_uses_p,
794 unsigned *num_derefs_p, bool *is_store)
795 {
796 ssa_op_iter i;
797 tree use;
798
799 *num_uses_p = 0;
800 *num_derefs_p = 0;
801 *is_store = false;
802
803 /* Find out the total number of uses of PTR in STMT. */
804 FOR_EACH_SSA_TREE_OPERAND (use, stmt, i, SSA_OP_USE)
805 if (use == ptr)
806 (*num_uses_p)++;
807
808 /* Now count the number of indirect references to PTR. This is
809 truly awful, but we don't have much choice. There are no parent
810 pointers inside INDIRECT_REFs, so an expression like
811 '*x_1 = foo (x_1, *x_1)' needs to be traversed piece by piece to
812 find all the indirect and direct uses of x_1 inside. The only
813 shortcut we can take is the fact that GIMPLE only allows
814 INDIRECT_REFs inside the expressions below. */
815 if (TREE_CODE (stmt) == MODIFY_EXPR
816 || (TREE_CODE (stmt) == RETURN_EXPR
817 && TREE_CODE (TREE_OPERAND (stmt, 0)) == MODIFY_EXPR)
818 || TREE_CODE (stmt) == ASM_EXPR
819 || TREE_CODE (stmt) == CALL_EXPR)
820 {
821 tree lhs, rhs;
822
823 if (TREE_CODE (stmt) == MODIFY_EXPR)
824 {
825 lhs = TREE_OPERAND (stmt, 0);
826 rhs = TREE_OPERAND (stmt, 1);
827 }
828 else if (TREE_CODE (stmt) == RETURN_EXPR)
829 {
830 tree e = TREE_OPERAND (stmt, 0);
831 lhs = TREE_OPERAND (e, 0);
832 rhs = TREE_OPERAND (e, 1);
833 }
834 else if (TREE_CODE (stmt) == ASM_EXPR)
835 {
836 lhs = ASM_OUTPUTS (stmt);
837 rhs = ASM_INPUTS (stmt);
838 }
839 else
840 {
841 lhs = NULL_TREE;
842 rhs = stmt;
843 }
844
845 if (lhs && (TREE_CODE (lhs) == TREE_LIST || EXPR_P (lhs)))
846 {
847 struct count_ptr_d count;
848 count.ptr = ptr;
849 count.count = 0;
850 walk_tree (&lhs, count_ptr_derefs, &count, NULL);
851 *is_store = true;
852 *num_derefs_p = count.count;
853 }
854
855 if (rhs && (TREE_CODE (rhs) == TREE_LIST || EXPR_P (rhs)))
856 {
857 struct count_ptr_d count;
858 count.ptr = ptr;
859 count.count = 0;
860 walk_tree (&rhs, count_ptr_derefs, &count, NULL);
861 *num_derefs_p += count.count;
862 }
863 }
864
865 gcc_assert (*num_uses_p >= *num_derefs_p);
866 }
867
868 /* Initialize the data structures used for alias analysis. */
869
870 static struct alias_info *
init_alias_info(void)871 init_alias_info (void)
872 {
873 struct alias_info *ai;
874 referenced_var_iterator rvi;
875 tree var;
876
877 bitmap_obstack_initialize (&alias_obstack);
878 ai = XCNEW (struct alias_info);
879 ai->ssa_names_visited = sbitmap_alloc (num_ssa_names);
880 sbitmap_zero (ai->ssa_names_visited);
881 ai->processed_ptrs = VEC_alloc (tree, heap, 50);
882 ai->written_vars = BITMAP_ALLOC (&alias_obstack);
883 ai->dereferenced_ptrs_store = BITMAP_ALLOC (&alias_obstack);
884 ai->dereferenced_ptrs_load = BITMAP_ALLOC (&alias_obstack);
885
886 /* If aliases have been computed before, clear existing information. */
887 if (aliases_computed_p)
888 {
889 unsigned i;
890
891 /* Similarly, clear the set of addressable variables. In this
892 case, we can just clear the set because addressability is
893 only computed here. */
894 bitmap_clear (addressable_vars);
895
896 /* Clear flow-insensitive alias information from each symbol. */
897 FOR_EACH_REFERENCED_VAR (var, rvi)
898 {
899 var_ann_t ann = var_ann (var);
900
901 ann->is_aliased = 0;
902 ann->may_aliases = NULL;
903 NUM_REFERENCES_CLEAR (ann);
904
905 /* Since we are about to re-discover call-clobbered
906 variables, clear the call-clobbered flag. Variables that
907 are intrinsically call-clobbered (globals, local statics,
908 etc) will not be marked by the aliasing code, so we can't
909 remove them from CALL_CLOBBERED_VARS.
910
911 NB: STRUCT_FIELDS are still call clobbered if they are for
912 a global variable, so we *don't* clear their call clobberedness
913 just because they are tags, though we will clear it if they
914 aren't for global variables. */
915 if (TREE_CODE (var) == NAME_MEMORY_TAG
916 || TREE_CODE (var) == SYMBOL_MEMORY_TAG
917 || !is_global_var (var))
918 clear_call_clobbered (var);
919 }
920
921 /* Clear flow-sensitive points-to information from each SSA name. */
922 for (i = 1; i < num_ssa_names; i++)
923 {
924 tree name = ssa_name (i);
925
926 if (!name || !POINTER_TYPE_P (TREE_TYPE (name)))
927 continue;
928
929 if (SSA_NAME_PTR_INFO (name))
930 {
931 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (name);
932
933 /* Clear all the flags but keep the name tag to
934 avoid creating new temporaries unnecessarily. If
935 this pointer is found to point to a subset or
936 superset of its former points-to set, then a new
937 tag will need to be created in create_name_tags. */
938 pi->pt_anything = 0;
939 pi->pt_null = 0;
940 pi->value_escapes_p = 0;
941 pi->is_dereferenced = 0;
942 if (pi->pt_vars)
943 bitmap_clear (pi->pt_vars);
944 }
945 }
946 }
947
948 /* Next time, we will need to reset alias information. */
949 aliases_computed_p = true;
950
951 return ai;
952 }
953
954
955 /* Deallocate memory used by alias analysis. */
956
957 static void
delete_alias_info(struct alias_info * ai)958 delete_alias_info (struct alias_info *ai)
959 {
960 size_t i;
961 referenced_var_iterator rvi;
962 tree var;
963
964 sbitmap_free (ai->ssa_names_visited);
965 VEC_free (tree, heap, ai->processed_ptrs);
966
967 for (i = 0; i < ai->num_addressable_vars; i++)
968 free (ai->addressable_vars[i]);
969
970 FOR_EACH_REFERENCED_VAR(var, rvi)
971 {
972 var_ann_t ann = var_ann (var);
973 NUM_REFERENCES_CLEAR (ann);
974 }
975
976 free (ai->addressable_vars);
977
978 for (i = 0; i < ai->num_pointers; i++)
979 free (ai->pointers[i]);
980 free (ai->pointers);
981
982 BITMAP_FREE (ai->written_vars);
983 BITMAP_FREE (ai->dereferenced_ptrs_store);
984 BITMAP_FREE (ai->dereferenced_ptrs_load);
985 bitmap_obstack_release (&alias_obstack);
986 free (ai);
987
988 delete_points_to_sets ();
989 }
990
991 /* Used for hashing to identify pointer infos with identical
992 pt_vars bitmaps. */
993 static int
eq_ptr_info(const void * p1,const void * p2)994 eq_ptr_info (const void *p1, const void *p2)
995 {
996 const struct ptr_info_def *n1 = (const struct ptr_info_def *) p1;
997 const struct ptr_info_def *n2 = (const struct ptr_info_def *) p2;
998 return bitmap_equal_p (n1->pt_vars, n2->pt_vars);
999 }
1000
1001 static hashval_t
ptr_info_hash(const void * p)1002 ptr_info_hash (const void *p)
1003 {
1004 const struct ptr_info_def *n = (const struct ptr_info_def *) p;
1005 return bitmap_hash (n->pt_vars);
1006 }
1007
1008 /* Create name tags for all the pointers that have been dereferenced.
1009 We only create a name tag for a pointer P if P is found to point to
1010 a set of variables (so that we can alias them to *P) or if it is
1011 the result of a call to malloc (which means that P cannot point to
1012 anything else nor alias any other variable).
1013
1014 If two pointers P and Q point to the same set of variables, they
1015 are assigned the same name tag. */
1016
1017 static void
create_name_tags(void)1018 create_name_tags (void)
1019 {
1020 size_t i;
1021 VEC (tree, heap) *with_ptvars = NULL;
1022 tree ptr;
1023 htab_t ptr_hash;
1024
1025 /* Collect the list of pointers with a non-empty points to set. */
1026 for (i = 1; i < num_ssa_names; i++)
1027 {
1028 tree ptr = ssa_name (i);
1029 struct ptr_info_def *pi;
1030
1031 if (!ptr
1032 || !POINTER_TYPE_P (TREE_TYPE (ptr))
1033 || !SSA_NAME_PTR_INFO (ptr))
1034 continue;
1035
1036 pi = SSA_NAME_PTR_INFO (ptr);
1037
1038 if (pi->pt_anything || !pi->is_dereferenced)
1039 {
1040 /* No name tags for pointers that have not been
1041 dereferenced or point to an arbitrary location. */
1042 pi->name_mem_tag = NULL_TREE;
1043 continue;
1044 }
1045
1046 /* Set pt_anything on the pointers without pt_vars filled in so
1047 that they are assigned a symbol tag. */
1048 if (pi->pt_vars && !bitmap_empty_p (pi->pt_vars))
1049 VEC_safe_push (tree, heap, with_ptvars, ptr);
1050 else
1051 set_pt_anything (ptr);
1052 }
1053
1054 /* If we didn't find any pointers with pt_vars set, we're done. */
1055 if (!with_ptvars)
1056 return;
1057
1058 ptr_hash = htab_create (10, ptr_info_hash, eq_ptr_info, NULL);
1059 /* Now go through the pointers with pt_vars, and find a name tag
1060 with the same pt_vars as this pointer, or create one if one
1061 doesn't exist. */
1062 for (i = 0; VEC_iterate (tree, with_ptvars, i, ptr); i++)
1063 {
1064 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
1065 tree old_name_tag = pi->name_mem_tag;
1066 struct ptr_info_def **slot;
1067
1068 /* If PTR points to a set of variables, check if we don't
1069 have another pointer Q with the same points-to set before
1070 creating a tag. If so, use Q's tag instead of creating a
1071 new one.
1072
1073 This is important for not creating unnecessary symbols
1074 and also for copy propagation. If we ever need to
1075 propagate PTR into Q or vice-versa, we would run into
1076 problems if they both had different name tags because
1077 they would have different SSA version numbers (which
1078 would force us to take the name tags in and out of SSA). */
1079
1080 slot = (struct ptr_info_def **) htab_find_slot (ptr_hash, pi, INSERT);
1081 if (*slot)
1082 pi->name_mem_tag = (*slot)->name_mem_tag;
1083 else
1084 {
1085 *slot = pi;
1086 /* If we didn't find a pointer with the same points-to set
1087 as PTR, create a new name tag if needed. */
1088 if (pi->name_mem_tag == NULL_TREE)
1089 pi->name_mem_tag = get_nmt_for (ptr);
1090 }
1091
1092 /* If the new name tag computed for PTR is different than
1093 the old name tag that it used to have, then the old tag
1094 needs to be removed from the IL, so we mark it for
1095 renaming. */
1096 if (old_name_tag && old_name_tag != pi->name_mem_tag)
1097 mark_sym_for_renaming (old_name_tag);
1098
1099 TREE_THIS_VOLATILE (pi->name_mem_tag)
1100 |= TREE_THIS_VOLATILE (TREE_TYPE (TREE_TYPE (ptr)));
1101
1102 /* Mark the new name tag for renaming. */
1103 mark_sym_for_renaming (pi->name_mem_tag);
1104 }
1105 htab_delete (ptr_hash);
1106
1107 VEC_free (tree, heap, with_ptvars);
1108 }
1109
1110
1111 /* For every pointer P_i in AI->PROCESSED_PTRS, create may-alias sets for
1112 the name memory tag (NMT) associated with P_i. If P_i escapes, then its
1113 name tag and the variables it points-to are call-clobbered. Finally, if
1114 P_i escapes and we could not determine where it points to, then all the
1115 variables in the same alias set as *P_i are marked call-clobbered. This
1116 is necessary because we must assume that P_i may take the address of any
1117 variable in the same alias set. */
1118
1119 static void
compute_flow_sensitive_aliasing(struct alias_info * ai)1120 compute_flow_sensitive_aliasing (struct alias_info *ai)
1121 {
1122 size_t i;
1123 tree ptr;
1124
1125 for (i = 0; VEC_iterate (tree, ai->processed_ptrs, i, ptr); i++)
1126 {
1127 if (!find_what_p_points_to (ptr))
1128 set_pt_anything (ptr);
1129 }
1130
1131 create_name_tags ();
1132
1133 for (i = 0; VEC_iterate (tree, ai->processed_ptrs, i, ptr); i++)
1134 {
1135 unsigned j;
1136 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
1137 var_ann_t v_ann = var_ann (SSA_NAME_VAR (ptr));
1138 bitmap_iterator bi;
1139
1140
1141 /* Set up aliasing information for PTR's name memory tag (if it has
1142 one). Note that only pointers that have been dereferenced will
1143 have a name memory tag. */
1144 if (pi->name_mem_tag && pi->pt_vars)
1145 EXECUTE_IF_SET_IN_BITMAP (pi->pt_vars, 0, j, bi)
1146 {
1147 add_may_alias (pi->name_mem_tag, referenced_var (j));
1148 add_may_alias (v_ann->symbol_mem_tag, referenced_var (j));
1149 }
1150 }
1151 }
1152
1153
1154 /* Compute type-based alias sets. Traverse all the pointers and
1155 addressable variables found in setup_pointers_and_addressables.
1156
1157 For every pointer P in AI->POINTERS and addressable variable V in
1158 AI->ADDRESSABLE_VARS, add V to the may-alias sets of P's symbol
1159 memory tag (SMT) if their alias sets conflict. V is then marked as
1160 an alias tag so that the operand scanner knows that statements
1161 containing V have aliased operands. */
1162
1163 static void
compute_flow_insensitive_aliasing(struct alias_info * ai)1164 compute_flow_insensitive_aliasing (struct alias_info *ai)
1165 {
1166 size_t i;
1167
1168 /* Initialize counter for the total number of virtual operands that
1169 aliasing will introduce. When AI->TOTAL_ALIAS_VOPS goes beyond the
1170 threshold set by --params max-alias-vops, we enable alias
1171 grouping. */
1172 ai->total_alias_vops = 0;
1173
1174 /* For every pointer P, determine which addressable variables may alias
1175 with P's symbol memory tag. */
1176 for (i = 0; i < ai->num_pointers; i++)
1177 {
1178 size_t j;
1179 struct alias_map_d *p_map = ai->pointers[i];
1180 tree tag = var_ann (p_map->var)->symbol_mem_tag;
1181 var_ann_t tag_ann = var_ann (tag);
1182 tree var;
1183
1184 /* Call-clobbering information is not finalized yet at this point. */
1185 if (PTR_IS_REF_ALL (p_map->var))
1186 continue;
1187
1188 p_map->total_alias_vops = 0;
1189 p_map->may_aliases = BITMAP_ALLOC (&alias_obstack);
1190
1191 /* Add any pre-existing may_aliases to the bitmap used to represent
1192 TAG's alias set in case we need to group aliases. */
1193 for (j = 0; VEC_iterate (tree, tag_ann->may_aliases, j, var); ++j)
1194 bitmap_set_bit (p_map->may_aliases, DECL_UID (var));
1195
1196 for (j = 0; j < ai->num_addressable_vars; j++)
1197 {
1198 struct alias_map_d *v_map;
1199 var_ann_t v_ann;
1200 bool tag_stored_p, var_stored_p;
1201
1202 v_map = ai->addressable_vars[j];
1203 var = v_map->var;
1204 v_ann = var_ann (var);
1205
1206 /* Skip memory tags and variables that have never been
1207 written to. We also need to check if the variables are
1208 call-clobbered because they may be overwritten by
1209 function calls.
1210
1211 Note this is effectively random accessing elements in
1212 the sparse bitset, which can be highly inefficient.
1213 So we first check the call_clobbered status of the
1214 tag and variable before querying the bitmap. */
1215 tag_stored_p = is_call_clobbered (tag)
1216 || bitmap_bit_p (ai->written_vars, DECL_UID (tag));
1217 var_stored_p = is_call_clobbered (var)
1218 || bitmap_bit_p (ai->written_vars, DECL_UID (var));
1219 if (!tag_stored_p && !var_stored_p)
1220 continue;
1221
1222 if (may_alias_p (p_map->var, p_map->set, var, v_map->set, false))
1223 {
1224 size_t num_tag_refs, num_var_refs;
1225
1226 num_tag_refs = NUM_REFERENCES (tag_ann);
1227 num_var_refs = NUM_REFERENCES (v_ann);
1228
1229 /* Add VAR to TAG's may-aliases set. */
1230
1231 /* We should never have a var with subvars here, because
1232 they shouldn't get into the set of addressable vars */
1233 gcc_assert (!var_can_have_subvars (var)
1234 || get_subvars_for_var (var) == NULL);
1235
1236 add_may_alias (tag, var);
1237 /* Update the bitmap used to represent TAG's alias set
1238 in case we need to group aliases. */
1239 bitmap_set_bit (p_map->may_aliases, DECL_UID (var));
1240
1241 /* Update the total number of virtual operands due to
1242 aliasing. Since we are adding one more alias to TAG's
1243 may-aliases set, the total number of virtual operands due
1244 to aliasing will be increased by the number of references
1245 made to VAR and TAG (every reference to TAG will also
1246 count as a reference to VAR). */
1247 ai->total_alias_vops += (num_var_refs + num_tag_refs);
1248 p_map->total_alias_vops += (num_var_refs + num_tag_refs);
1249
1250
1251 }
1252 }
1253 }
1254
1255 /* Since this analysis is based exclusively on symbols, it fails to
1256 handle cases where two pointers P and Q have different memory
1257 tags with conflicting alias set numbers but no aliased symbols in
1258 common.
1259
1260 For example, suppose that we have two memory tags SMT.1 and SMT.2
1261 such that
1262
1263 may-aliases (SMT.1) = { a }
1264 may-aliases (SMT.2) = { b }
1265
1266 and the alias set number of SMT.1 conflicts with that of SMT.2.
1267 Since they don't have symbols in common, loads and stores from
1268 SMT.1 and SMT.2 will seem independent of each other, which will
1269 lead to the optimizers making invalid transformations (see
1270 testsuite/gcc.c-torture/execute/pr15262-[12].c).
1271
1272 To avoid this problem, we do a final traversal of AI->POINTERS
1273 looking for pairs of pointers that have no aliased symbols in
1274 common and yet have conflicting alias set numbers. */
1275 for (i = 0; i < ai->num_pointers; i++)
1276 {
1277 size_t j;
1278 struct alias_map_d *p_map1 = ai->pointers[i];
1279 tree tag1 = var_ann (p_map1->var)->symbol_mem_tag;
1280 bitmap may_aliases1 = p_map1->may_aliases;
1281
1282 if (PTR_IS_REF_ALL (p_map1->var))
1283 continue;
1284
1285 for (j = i + 1; j < ai->num_pointers; j++)
1286 {
1287 struct alias_map_d *p_map2 = ai->pointers[j];
1288 tree tag2 = var_ann (p_map2->var)->symbol_mem_tag;
1289 bitmap may_aliases2 = p_map2->may_aliases;
1290
1291 if (PTR_IS_REF_ALL (p_map2->var))
1292 continue;
1293
1294 /* If the pointers may not point to each other, do nothing. */
1295 if (!may_alias_p (p_map1->var, p_map1->set, tag2, p_map2->set, true))
1296 continue;
1297
1298 /* The two pointers may alias each other. If they already have
1299 symbols in common, do nothing. */
1300 if (bitmap_intersect_p (may_aliases1, may_aliases2))
1301 continue;
1302
1303 if (!bitmap_empty_p (may_aliases2))
1304 {
1305 unsigned int k;
1306 bitmap_iterator bi;
1307
1308 /* Add all the aliases for TAG2 into TAG1's alias set.
1309 FIXME, update grouping heuristic counters. */
1310 EXECUTE_IF_SET_IN_BITMAP (may_aliases2, 0, k, bi)
1311 add_may_alias (tag1, referenced_var (k));
1312 bitmap_ior_into (may_aliases1, may_aliases2);
1313 }
1314 else
1315 {
1316 /* Since TAG2 does not have any aliases of its own, add
1317 TAG2 itself to the alias set of TAG1. */
1318 add_may_alias (tag1, tag2);
1319 bitmap_set_bit (may_aliases1, DECL_UID (tag2));
1320 }
1321 }
1322 }
1323
1324 if (dump_file)
1325 fprintf (dump_file, "\n%s: Total number of aliased vops: %ld\n",
1326 get_name (current_function_decl),
1327 ai->total_alias_vops);
1328 }
1329
1330
1331 /* Finalize may-alias information for ref-all pointers. Traverse all
1332 the addressable variables found in setup_pointers_and_addressables.
1333
1334 If flow-sensitive alias analysis has attached a name memory tag to
1335 a ref-all pointer, we will use it for the dereferences because that
1336 will have more precise aliasing information. But if there is no
1337 name tag, we will use a special symbol tag that aliases all the
1338 call-clobbered addressable variables. */
1339
1340 static void
finalize_ref_all_pointers(struct alias_info * ai)1341 finalize_ref_all_pointers (struct alias_info *ai)
1342 {
1343 size_t i;
1344
1345 if (global_var)
1346 add_may_alias (ai->ref_all_symbol_mem_tag, global_var);
1347 else
1348 {
1349 /* First add the real call-clobbered variables. */
1350 for (i = 0; i < ai->num_addressable_vars; i++)
1351 {
1352 tree var = ai->addressable_vars[i]->var;
1353 if (is_call_clobbered (var))
1354 add_may_alias (ai->ref_all_symbol_mem_tag, var);
1355 }
1356
1357 /* Then add the call-clobbered pointer memory tags. See
1358 compute_flow_insensitive_aliasing for the rationale. */
1359 for (i = 0; i < ai->num_pointers; i++)
1360 {
1361 tree ptr = ai->pointers[i]->var, tag;
1362 if (PTR_IS_REF_ALL (ptr))
1363 continue;
1364 tag = var_ann (ptr)->symbol_mem_tag;
1365 if (is_call_clobbered (tag))
1366 add_may_alias (ai->ref_all_symbol_mem_tag, tag);
1367 }
1368 }
1369 }
1370
1371
1372 /* Comparison function for qsort used in group_aliases. */
1373
1374 static int
total_alias_vops_cmp(const void * p,const void * q)1375 total_alias_vops_cmp (const void *p, const void *q)
1376 {
1377 const struct alias_map_d **p1 = (const struct alias_map_d **)p;
1378 const struct alias_map_d **p2 = (const struct alias_map_d **)q;
1379 long n1 = (*p1)->total_alias_vops;
1380 long n2 = (*p2)->total_alias_vops;
1381
1382 /* We want to sort in descending order. */
1383 return (n1 > n2 ? -1 : (n1 == n2) ? 0 : 1);
1384 }
1385
1386 /* Group all the aliases for TAG to make TAG represent all the
1387 variables in its alias set. Update the total number
1388 of virtual operands due to aliasing (AI->TOTAL_ALIAS_VOPS). This
1389 function will make TAG be the unique alias tag for all the
1390 variables in its may-aliases. So, given:
1391
1392 may-aliases(TAG) = { V1, V2, V3 }
1393
1394 This function will group the variables into:
1395
1396 may-aliases(V1) = { TAG }
1397 may-aliases(V2) = { TAG }
1398 may-aliases(V2) = { TAG } */
1399
1400 static void
group_aliases_into(tree tag,bitmap tag_aliases,struct alias_info * ai)1401 group_aliases_into (tree tag, bitmap tag_aliases, struct alias_info *ai)
1402 {
1403 unsigned int i;
1404 var_ann_t tag_ann = var_ann (tag);
1405 size_t num_tag_refs = NUM_REFERENCES (tag_ann);
1406 bitmap_iterator bi;
1407
1408 EXECUTE_IF_SET_IN_BITMAP (tag_aliases, 0, i, bi)
1409 {
1410 tree var = referenced_var (i);
1411 var_ann_t ann = var_ann (var);
1412
1413 /* Make TAG the unique alias of VAR. */
1414 ann->is_aliased = 0;
1415 ann->may_aliases = NULL;
1416
1417 /* Note that VAR and TAG may be the same if the function has no
1418 addressable variables (see the discussion at the end of
1419 setup_pointers_and_addressables). */
1420 if (var != tag)
1421 add_may_alias (var, tag);
1422
1423 /* Reduce total number of virtual operands contributed
1424 by TAG on behalf of VAR. Notice that the references to VAR
1425 itself won't be removed. We will merely replace them with
1426 references to TAG. */
1427 ai->total_alias_vops -= num_tag_refs;
1428 }
1429
1430 /* We have reduced the number of virtual operands that TAG makes on
1431 behalf of all the variables formerly aliased with it. However,
1432 we have also "removed" all the virtual operands for TAG itself,
1433 so we add them back. */
1434 ai->total_alias_vops += num_tag_refs;
1435
1436 /* TAG no longer has any aliases. */
1437 tag_ann->may_aliases = NULL;
1438 }
1439
1440
1441 /* Group may-aliases sets to reduce the number of virtual operands due
1442 to aliasing.
1443
1444 1- Sort the list of pointers in decreasing number of contributed
1445 virtual operands.
1446
1447 2- Take the first entry in AI->POINTERS and revert the role of
1448 the memory tag and its aliases. Usually, whenever an aliased
1449 variable Vi is found to alias with a memory tag T, we add Vi
1450 to the may-aliases set for T. Meaning that after alias
1451 analysis, we will have:
1452
1453 may-aliases(T) = { V1, V2, V3, ..., Vn }
1454
1455 This means that every statement that references T, will get 'n'
1456 virtual operands for each of the Vi tags. But, when alias
1457 grouping is enabled, we make T an alias tag and add it to the
1458 alias set of all the Vi variables:
1459
1460 may-aliases(V1) = { T }
1461 may-aliases(V2) = { T }
1462 ...
1463 may-aliases(Vn) = { T }
1464
1465 This has two effects: (a) statements referencing T will only get
1466 a single virtual operand, and, (b) all the variables Vi will now
1467 appear to alias each other. So, we lose alias precision to
1468 improve compile time. But, in theory, a program with such a high
1469 level of aliasing should not be very optimizable in the first
1470 place.
1471
1472 3- Since variables may be in the alias set of more than one
1473 memory tag, the grouping done in step (2) needs to be extended
1474 to all the memory tags that have a non-empty intersection with
1475 the may-aliases set of tag T. For instance, if we originally
1476 had these may-aliases sets:
1477
1478 may-aliases(T) = { V1, V2, V3 }
1479 may-aliases(R) = { V2, V4 }
1480
1481 In step (2) we would have reverted the aliases for T as:
1482
1483 may-aliases(V1) = { T }
1484 may-aliases(V2) = { T }
1485 may-aliases(V3) = { T }
1486
1487 But note that now V2 is no longer aliased with R. We could
1488 add R to may-aliases(V2), but we are in the process of
1489 grouping aliases to reduce virtual operands so what we do is
1490 add V4 to the grouping to obtain:
1491
1492 may-aliases(V1) = { T }
1493 may-aliases(V2) = { T }
1494 may-aliases(V3) = { T }
1495 may-aliases(V4) = { T }
1496
1497 4- If the total number of virtual operands due to aliasing is
1498 still above the threshold set by max-alias-vops, go back to (2). */
1499
1500 static void
group_aliases(struct alias_info * ai)1501 group_aliases (struct alias_info *ai)
1502 {
1503 size_t i;
1504 tree ptr;
1505
1506 /* Sort the POINTERS array in descending order of contributed
1507 virtual operands. */
1508 qsort (ai->pointers, ai->num_pointers, sizeof (struct alias_map_d *),
1509 total_alias_vops_cmp);
1510
1511 /* For every pointer in AI->POINTERS, reverse the roles of its tag
1512 and the tag's may-aliases set. */
1513 for (i = 0; i < ai->num_pointers; i++)
1514 {
1515 size_t j;
1516 tree tag1 = var_ann (ai->pointers[i]->var)->symbol_mem_tag;
1517 bitmap tag1_aliases = ai->pointers[i]->may_aliases;
1518
1519 /* Skip tags that have been grouped already. */
1520 if (ai->pointers[i]->grouped_p)
1521 continue;
1522
1523 /* See if TAG1 had any aliases in common with other symbol tags.
1524 If we find a TAG2 with common aliases with TAG1, add TAG2's
1525 aliases into TAG1. */
1526 for (j = i + 1; j < ai->num_pointers; j++)
1527 {
1528 bitmap tag2_aliases = ai->pointers[j]->may_aliases;
1529
1530 if (bitmap_intersect_p (tag1_aliases, tag2_aliases))
1531 {
1532 tree tag2 = var_ann (ai->pointers[j]->var)->symbol_mem_tag;
1533
1534 bitmap_ior_into (tag1_aliases, tag2_aliases);
1535
1536 /* TAG2 does not need its aliases anymore. */
1537 bitmap_clear (tag2_aliases);
1538 var_ann (tag2)->may_aliases = NULL;
1539
1540 /* TAG1 is the unique alias of TAG2. */
1541 add_may_alias (tag2, tag1);
1542
1543 ai->pointers[j]->grouped_p = true;
1544 }
1545 }
1546
1547 /* Now group all the aliases we collected into TAG1. */
1548 group_aliases_into (tag1, tag1_aliases, ai);
1549
1550 /* If we've reduced total number of virtual operands below the
1551 threshold, stop. */
1552 if (ai->total_alias_vops < MAX_ALIASED_VOPS)
1553 break;
1554 }
1555
1556 /* Finally, all the variables that have been grouped cannot be in
1557 the may-alias set of name memory tags. Suppose that we have
1558 grouped the aliases in this code so that may-aliases(a) = SMT.20
1559
1560 p_5 = &a;
1561 ...
1562 # a_9 = V_MAY_DEF <a_8>
1563 p_5->field = 0
1564 ... Several modifications to SMT.20 ...
1565 # VUSE <a_9>
1566 x_30 = p_5->field
1567
1568 Since p_5 points to 'a', the optimizers will try to propagate 0
1569 into p_5->field, but that is wrong because there have been
1570 modifications to 'SMT.20' in between. To prevent this we have to
1571 replace 'a' with 'SMT.20' in the name tag of p_5. */
1572 for (i = 0; VEC_iterate (tree, ai->processed_ptrs, i, ptr); i++)
1573 {
1574 size_t j;
1575 tree name_tag = SSA_NAME_PTR_INFO (ptr)->name_mem_tag;
1576 VEC(tree,gc) *aliases;
1577 tree alias;
1578
1579 if (name_tag == NULL_TREE)
1580 continue;
1581
1582 aliases = var_ann (name_tag)->may_aliases;
1583 for (j = 0; VEC_iterate (tree, aliases, j, alias); j++)
1584 {
1585 var_ann_t ann = var_ann (alias);
1586
1587 if ((!MTAG_P (alias)
1588 || TREE_CODE (alias) == STRUCT_FIELD_TAG)
1589 && ann->may_aliases)
1590 {
1591 tree new_alias;
1592
1593 gcc_assert (VEC_length (tree, ann->may_aliases) == 1);
1594
1595 new_alias = VEC_index (tree, ann->may_aliases, 0);
1596 replace_may_alias (name_tag, j, new_alias);
1597 }
1598 }
1599 }
1600
1601 if (dump_file)
1602 fprintf (dump_file,
1603 "%s: Total number of aliased vops after grouping: %ld%s\n",
1604 get_name (current_function_decl),
1605 ai->total_alias_vops,
1606 (ai->total_alias_vops < 0) ? " (negative values are OK)" : "");
1607 }
1608
1609
1610 /* Create a new alias set entry for VAR in AI->ADDRESSABLE_VARS. */
1611
1612 static void
create_alias_map_for(tree var,struct alias_info * ai)1613 create_alias_map_for (tree var, struct alias_info *ai)
1614 {
1615 struct alias_map_d *alias_map;
1616 alias_map = XCNEW (struct alias_map_d);
1617 alias_map->var = var;
1618 alias_map->set = get_alias_set (var);
1619 ai->addressable_vars[ai->num_addressable_vars++] = alias_map;
1620 }
1621
1622
1623 /* Create memory tags for all the dereferenced pointers and build the
1624 ADDRESSABLE_VARS and POINTERS arrays used for building the may-alias
1625 sets. Based on the address escape and points-to information collected
1626 earlier, this pass will also clear the TREE_ADDRESSABLE flag from those
1627 variables whose address is not needed anymore. */
1628
1629 static void
setup_pointers_and_addressables(struct alias_info * ai)1630 setup_pointers_and_addressables (struct alias_info *ai)
1631 {
1632 size_t n_vars, num_addressable_vars, num_pointers;
1633 referenced_var_iterator rvi;
1634 tree var;
1635 VEC (tree, heap) *varvec = NULL;
1636 safe_referenced_var_iterator srvi;
1637
1638 /* Size up the arrays ADDRESSABLE_VARS and POINTERS. */
1639 num_addressable_vars = num_pointers = 0;
1640
1641 FOR_EACH_REFERENCED_VAR (var, rvi)
1642 {
1643 if (may_be_aliased (var))
1644 num_addressable_vars++;
1645
1646 if (POINTER_TYPE_P (TREE_TYPE (var)))
1647 {
1648 /* Since we don't keep track of volatile variables, assume that
1649 these pointers are used in indirect store operations. */
1650 if (TREE_THIS_VOLATILE (var))
1651 bitmap_set_bit (ai->dereferenced_ptrs_store, DECL_UID (var));
1652
1653 num_pointers++;
1654 }
1655 }
1656
1657 /* Create ADDRESSABLE_VARS and POINTERS. Note that these arrays are
1658 always going to be slightly bigger than we actually need them
1659 because some TREE_ADDRESSABLE variables will be marked
1660 non-addressable below and only pointers with unique symbol tags are
1661 going to be added to POINTERS. */
1662 ai->addressable_vars = XCNEWVEC (struct alias_map_d *, num_addressable_vars);
1663 ai->pointers = XCNEWVEC (struct alias_map_d *, num_pointers);
1664 ai->num_addressable_vars = 0;
1665 ai->num_pointers = 0;
1666
1667 /* Since we will be creating symbol memory tags within this loop,
1668 cache the value of NUM_REFERENCED_VARS to avoid processing the
1669 additional tags unnecessarily. */
1670 n_vars = num_referenced_vars;
1671
1672 FOR_EACH_REFERENCED_VAR_SAFE (var, varvec, srvi)
1673 {
1674 var_ann_t v_ann = var_ann (var);
1675 subvar_t svars;
1676
1677 /* Name memory tags already have flow-sensitive aliasing
1678 information, so they need not be processed by
1679 compute_flow_insensitive_aliasing. Similarly, symbol memory
1680 tags are already accounted for when we process their
1681 associated pointer.
1682
1683 Structure fields, on the other hand, have to have some of this
1684 information processed for them, but it's pointless to mark them
1685 non-addressable (since they are fake variables anyway). */
1686 if (MTAG_P (var) && TREE_CODE (var) != STRUCT_FIELD_TAG)
1687 continue;
1688
1689 /* Remove the ADDRESSABLE flag from every addressable variable whose
1690 address is not needed anymore. This is caused by the propagation
1691 of ADDR_EXPR constants into INDIRECT_REF expressions and the
1692 removal of dead pointer assignments done by the early scalar
1693 cleanup passes. */
1694 if (TREE_ADDRESSABLE (var))
1695 {
1696 if (!bitmap_bit_p (addressable_vars, DECL_UID (var))
1697 && TREE_CODE (var) != RESULT_DECL
1698 && !is_global_var (var))
1699 {
1700 bool okay_to_mark = true;
1701
1702 /* Since VAR is now a regular GIMPLE register, we will need
1703 to rename VAR into SSA afterwards. */
1704 mark_sym_for_renaming (var);
1705
1706 /* If VAR can have sub-variables, and any of its
1707 sub-variables has its address taken, then we cannot
1708 remove the addressable flag from VAR. */
1709 if (var_can_have_subvars (var)
1710 && (svars = get_subvars_for_var (var)))
1711 {
1712 subvar_t sv;
1713
1714 for (sv = svars; sv; sv = sv->next)
1715 {
1716 if (bitmap_bit_p (addressable_vars, DECL_UID (sv->var)))
1717 okay_to_mark = false;
1718 mark_sym_for_renaming (sv->var);
1719 }
1720 }
1721
1722 /* The address of VAR is not needed, remove the
1723 addressable bit, so that it can be optimized as a
1724 regular variable. */
1725 if (okay_to_mark)
1726 mark_non_addressable (var);
1727 }
1728 }
1729
1730 /* Global variables and addressable locals may be aliased. Create an
1731 entry in ADDRESSABLE_VARS for VAR. */
1732 if (may_be_aliased (var)
1733 && (!var_can_have_subvars (var)
1734 || get_subvars_for_var (var) == NULL))
1735 {
1736 create_alias_map_for (var, ai);
1737 mark_sym_for_renaming (var);
1738 }
1739
1740 /* Add pointer variables that have been dereferenced to the POINTERS
1741 array and create a symbol memory tag for them. */
1742 if (POINTER_TYPE_P (TREE_TYPE (var)))
1743 {
1744 if ((bitmap_bit_p (ai->dereferenced_ptrs_store, DECL_UID (var))
1745 || bitmap_bit_p (ai->dereferenced_ptrs_load, DECL_UID (var))))
1746 {
1747 tree tag;
1748 var_ann_t t_ann;
1749
1750 /* If pointer VAR still doesn't have a memory tag
1751 associated with it, create it now or re-use an
1752 existing one. */
1753 tag = get_tmt_for (var, ai);
1754 t_ann = var_ann (tag);
1755
1756 /* The symbol tag will need to be renamed into SSA
1757 afterwards. Note that we cannot do this inside
1758 get_tmt_for because aliasing may run multiple times
1759 and we only create symbol tags the first time. */
1760 mark_sym_for_renaming (tag);
1761
1762 /* Similarly, if pointer VAR used to have another type
1763 tag, we will need to process it in the renamer to
1764 remove the stale virtual operands. */
1765 if (v_ann->symbol_mem_tag)
1766 mark_sym_for_renaming (v_ann->symbol_mem_tag);
1767
1768 /* Associate the tag with pointer VAR. */
1769 v_ann->symbol_mem_tag = tag;
1770
1771 /* If pointer VAR has been used in a store operation,
1772 then its memory tag must be marked as written-to. */
1773 if (bitmap_bit_p (ai->dereferenced_ptrs_store, DECL_UID (var)))
1774 bitmap_set_bit (ai->written_vars, DECL_UID (tag));
1775
1776 /* All the dereferences of pointer VAR count as
1777 references of TAG. Since TAG can be associated with
1778 several pointers, add the dereferences of VAR to the
1779 TAG. */
1780 NUM_REFERENCES_SET (t_ann,
1781 NUM_REFERENCES (t_ann)
1782 + NUM_REFERENCES (v_ann));
1783 }
1784 else
1785 {
1786 /* The pointer has not been dereferenced. If it had a
1787 symbol memory tag, remove it and mark the old tag for
1788 renaming to remove it out of the IL. */
1789 var_ann_t ann = var_ann (var);
1790 tree tag = ann->symbol_mem_tag;
1791 if (tag)
1792 {
1793 mark_sym_for_renaming (tag);
1794 ann->symbol_mem_tag = NULL_TREE;
1795 }
1796 }
1797 }
1798 }
1799 VEC_free (tree, heap, varvec);
1800 }
1801
1802
1803 /* Determine whether to use .GLOBAL_VAR to model call clobbering semantics. At
1804 every call site, we need to emit V_MAY_DEF expressions to represent the
1805 clobbering effects of the call for variables whose address escapes the
1806 current function.
1807
1808 One approach is to group all call-clobbered variables into a single
1809 representative that is used as an alias of every call-clobbered variable
1810 (.GLOBAL_VAR). This works well, but it ties the optimizer hands because
1811 references to any call clobbered variable is a reference to .GLOBAL_VAR.
1812
1813 The second approach is to emit a clobbering V_MAY_DEF for every
1814 call-clobbered variable at call sites. This is the preferred way in terms
1815 of optimization opportunities but it may create too many V_MAY_DEF operands
1816 if there are many call clobbered variables and function calls in the
1817 function.
1818
1819 To decide whether or not to use .GLOBAL_VAR we multiply the number of
1820 function calls found by the number of call-clobbered variables. If that
1821 product is beyond a certain threshold, as determined by the parameterized
1822 values shown below, we use .GLOBAL_VAR.
1823
1824 FIXME. This heuristic should be improved. One idea is to use several
1825 .GLOBAL_VARs of different types instead of a single one. The thresholds
1826 have been derived from a typical bootstrap cycle, including all target
1827 libraries. Compile times were found increase by ~1% compared to using
1828 .GLOBAL_VAR. */
1829
1830 static void
maybe_create_global_var(struct alias_info * ai)1831 maybe_create_global_var (struct alias_info *ai)
1832 {
1833 unsigned i, n_clobbered;
1834 bitmap_iterator bi;
1835
1836 /* No need to create it, if we have one already. */
1837 if (global_var == NULL_TREE)
1838 {
1839 /* Count all the call-clobbered variables. */
1840 n_clobbered = 0;
1841 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
1842 {
1843 n_clobbered++;
1844 }
1845
1846 /* If the number of virtual operands that would be needed to
1847 model all the call-clobbered variables is larger than
1848 GLOBAL_VAR_THRESHOLD, create .GLOBAL_VAR.
1849
1850 Also create .GLOBAL_VAR if there are no call-clobbered
1851 variables and the program contains a mixture of pure/const
1852 and regular function calls. This is to avoid the problem
1853 described in PR 20115:
1854
1855 int X;
1856 int func_pure (void) { return X; }
1857 int func_non_pure (int a) { X += a; }
1858 int foo ()
1859 {
1860 int a = func_pure ();
1861 func_non_pure (a);
1862 a = func_pure ();
1863 return a;
1864 }
1865
1866 Since foo() has no call-clobbered variables, there is
1867 no relationship between the calls to func_pure and
1868 func_non_pure. Since func_pure has no side-effects, value
1869 numbering optimizations elide the second call to func_pure.
1870 So, if we have some pure/const and some regular calls in the
1871 program we create .GLOBAL_VAR to avoid missing these
1872 relations. */
1873 if (ai->num_calls_found * n_clobbered >= (size_t) GLOBAL_VAR_THRESHOLD
1874 || (n_clobbered == 0
1875 && ai->num_calls_found > 0
1876 && ai->num_pure_const_calls_found > 0
1877 && ai->num_calls_found > ai->num_pure_const_calls_found))
1878 create_global_var ();
1879 }
1880
1881 /* Mark all call-clobbered symbols for renaming. Since the initial
1882 rewrite into SSA ignored all call sites, we may need to rename
1883 .GLOBAL_VAR and the call-clobbered variables. */
1884 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
1885 {
1886 tree var = referenced_var (i);
1887
1888 /* If the function has calls to clobbering functions and
1889 .GLOBAL_VAR has been created, make it an alias for all
1890 call-clobbered variables. */
1891 if (global_var && var != global_var)
1892 {
1893 add_may_alias (var, global_var);
1894 gcc_assert (!get_subvars_for_var (var));
1895 }
1896
1897 mark_sym_for_renaming (var);
1898 }
1899 }
1900
1901
1902 /* Return TRUE if pointer PTR may point to variable VAR.
1903
1904 MEM_ALIAS_SET is the alias set for the memory location pointed-to by PTR
1905 This is needed because when checking for type conflicts we are
1906 interested in the alias set of the memory location pointed-to by
1907 PTR. The alias set of PTR itself is irrelevant.
1908
1909 VAR_ALIAS_SET is the alias set for VAR. */
1910
1911 static bool
may_alias_p(tree ptr,HOST_WIDE_INT mem_alias_set,tree var,HOST_WIDE_INT var_alias_set,bool alias_set_only)1912 may_alias_p (tree ptr, HOST_WIDE_INT mem_alias_set,
1913 tree var, HOST_WIDE_INT var_alias_set,
1914 bool alias_set_only)
1915 {
1916 tree mem;
1917
1918 alias_stats.alias_queries++;
1919 alias_stats.simple_queries++;
1920
1921 /* By convention, a variable cannot alias itself. */
1922 mem = var_ann (ptr)->symbol_mem_tag;
1923 if (mem == var)
1924 {
1925 alias_stats.alias_noalias++;
1926 alias_stats.simple_resolved++;
1927 return false;
1928 }
1929
1930 /* If -fargument-noalias-global is > 2, pointer arguments may
1931 not point to anything else. */
1932 if (flag_argument_noalias > 2 && TREE_CODE (ptr) == PARM_DECL)
1933 {
1934 alias_stats.alias_noalias++;
1935 alias_stats.simple_resolved++;
1936 return false;
1937 }
1938
1939 /* If -fargument-noalias-global is > 1, pointer arguments may
1940 not point to global variables. */
1941 if (flag_argument_noalias > 1 && is_global_var (var)
1942 && TREE_CODE (ptr) == PARM_DECL)
1943 {
1944 alias_stats.alias_noalias++;
1945 alias_stats.simple_resolved++;
1946 return false;
1947 }
1948
1949 /* If either MEM or VAR is a read-only global and the other one
1950 isn't, then PTR cannot point to VAR. */
1951 if ((unmodifiable_var_p (mem) && !unmodifiable_var_p (var))
1952 || (unmodifiable_var_p (var) && !unmodifiable_var_p (mem)))
1953 {
1954 alias_stats.alias_noalias++;
1955 alias_stats.simple_resolved++;
1956 return false;
1957 }
1958
1959 gcc_assert (TREE_CODE (mem) == SYMBOL_MEMORY_TAG);
1960
1961 alias_stats.tbaa_queries++;
1962
1963 /* If the alias sets don't conflict then MEM cannot alias VAR. */
1964 if (!alias_sets_conflict_p (mem_alias_set, var_alias_set))
1965 {
1966 alias_stats.alias_noalias++;
1967 alias_stats.tbaa_resolved++;
1968 return false;
1969 }
1970
1971 /* If var is a record or union type, ptr cannot point into var
1972 unless there is some operation explicit address operation in the
1973 program that can reference a field of the ptr's dereferenced
1974 type. This also assumes that the types of both var and ptr are
1975 contained within the compilation unit, and that there is no fancy
1976 addressing arithmetic associated with any of the types
1977 involved. */
1978
1979 if ((mem_alias_set != 0) && (var_alias_set != 0))
1980 {
1981 tree ptr_type = TREE_TYPE (ptr);
1982 tree var_type = TREE_TYPE (var);
1983
1984 /* The star count is -1 if the type at the end of the pointer_to
1985 chain is not a record or union type. */
1986 if ((!alias_set_only) &&
1987 ipa_type_escape_star_count_of_interesting_type (var_type) >= 0)
1988 {
1989 int ptr_star_count = 0;
1990
1991 /* Ipa_type_escape_star_count_of_interesting_type is a little to
1992 restrictive for the pointer type, need to allow pointers to
1993 primitive types as long as those types cannot be pointers
1994 to everything. */
1995 while (POINTER_TYPE_P (ptr_type))
1996 /* Strip the *'s off. */
1997 {
1998 ptr_type = TREE_TYPE (ptr_type);
1999 ptr_star_count++;
2000 }
2001
2002 /* There does not appear to be a better test to see if the
2003 pointer type was one of the pointer to everything
2004 types. */
2005
2006 if (ptr_star_count > 0)
2007 {
2008 alias_stats.structnoaddress_queries++;
2009 if (ipa_type_escape_field_does_not_clobber_p (var_type,
2010 TREE_TYPE (ptr)))
2011 {
2012 alias_stats.structnoaddress_resolved++;
2013 alias_stats.alias_noalias++;
2014 return false;
2015 }
2016 }
2017 else if (ptr_star_count == 0)
2018 {
2019 /* If ptr_type was not really a pointer to type, it cannot
2020 alias. */
2021 alias_stats.structnoaddress_queries++;
2022 alias_stats.structnoaddress_resolved++;
2023 alias_stats.alias_noalias++;
2024 return false;
2025 }
2026 }
2027 }
2028
2029 alias_stats.alias_mayalias++;
2030 return true;
2031 }
2032
2033
2034 /* Add ALIAS to the set of variables that may alias VAR. */
2035
2036 static void
add_may_alias(tree var,tree alias)2037 add_may_alias (tree var, tree alias)
2038 {
2039 size_t i;
2040 var_ann_t v_ann = get_var_ann (var);
2041 var_ann_t a_ann = get_var_ann (alias);
2042 tree al;
2043
2044 /* Don't allow self-referential aliases. */
2045 gcc_assert (var != alias);
2046
2047 /* ALIAS must be addressable if it's being added to an alias set. */
2048 #if 1
2049 TREE_ADDRESSABLE (alias) = 1;
2050 #else
2051 gcc_assert (may_be_aliased (alias));
2052 #endif
2053
2054 if (v_ann->may_aliases == NULL)
2055 v_ann->may_aliases = VEC_alloc (tree, gc, 2);
2056
2057 /* Avoid adding duplicates. */
2058 for (i = 0; VEC_iterate (tree, v_ann->may_aliases, i, al); i++)
2059 if (alias == al)
2060 return;
2061
2062 VEC_safe_push (tree, gc, v_ann->may_aliases, alias);
2063 a_ann->is_aliased = 1;
2064 }
2065
2066
2067 /* Replace alias I in the alias sets of VAR with NEW_ALIAS. */
2068
2069 static void
replace_may_alias(tree var,size_t i,tree new_alias)2070 replace_may_alias (tree var, size_t i, tree new_alias)
2071 {
2072 var_ann_t v_ann = var_ann (var);
2073 VEC_replace (tree, v_ann->may_aliases, i, new_alias);
2074 }
2075
2076
2077 /* Mark pointer PTR as pointing to an arbitrary memory location. */
2078
2079 static void
set_pt_anything(tree ptr)2080 set_pt_anything (tree ptr)
2081 {
2082 struct ptr_info_def *pi = get_ptr_info (ptr);
2083
2084 pi->pt_anything = 1;
2085 pi->pt_vars = NULL;
2086
2087 /* The pointer used to have a name tag, but we now found it pointing
2088 to an arbitrary location. The name tag needs to be renamed and
2089 disassociated from PTR. */
2090 if (pi->name_mem_tag)
2091 {
2092 mark_sym_for_renaming (pi->name_mem_tag);
2093 pi->name_mem_tag = NULL_TREE;
2094 }
2095 }
2096
2097
2098 /* Return true if STMT is an "escape" site from the current function. Escape
2099 sites those statements which might expose the address of a variable
2100 outside the current function. STMT is an escape site iff:
2101
2102 1- STMT is a function call, or
2103 2- STMT is an __asm__ expression, or
2104 3- STMT is an assignment to a non-local variable, or
2105 4- STMT is a return statement.
2106
2107 Return the type of escape site found, if we found one, or NO_ESCAPE
2108 if none. */
2109
2110 enum escape_type
is_escape_site(tree stmt)2111 is_escape_site (tree stmt)
2112 {
2113 tree call = get_call_expr_in (stmt);
2114 if (call != NULL_TREE)
2115 {
2116 if (!TREE_SIDE_EFFECTS (call))
2117 return ESCAPE_TO_PURE_CONST;
2118
2119 return ESCAPE_TO_CALL;
2120 }
2121 else if (TREE_CODE (stmt) == ASM_EXPR)
2122 return ESCAPE_TO_ASM;
2123 else if (TREE_CODE (stmt) == MODIFY_EXPR)
2124 {
2125 tree lhs = TREE_OPERAND (stmt, 0);
2126
2127 /* Get to the base of _REF nodes. */
2128 if (TREE_CODE (lhs) != SSA_NAME)
2129 lhs = get_base_address (lhs);
2130
2131 /* If we couldn't recognize the LHS of the assignment, assume that it
2132 is a non-local store. */
2133 if (lhs == NULL_TREE)
2134 return ESCAPE_UNKNOWN;
2135
2136 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == NOP_EXPR
2137 || TREE_CODE (TREE_OPERAND (stmt, 1)) == CONVERT_EXPR
2138 || TREE_CODE (TREE_OPERAND (stmt, 1)) == VIEW_CONVERT_EXPR)
2139 {
2140 tree from = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (stmt, 1), 0));
2141 tree to = TREE_TYPE (TREE_OPERAND (stmt, 1));
2142
2143 /* If the RHS is a conversion between a pointer and an integer, the
2144 pointer escapes since we can't track the integer. */
2145 if (POINTER_TYPE_P (from) && !POINTER_TYPE_P (to))
2146 return ESCAPE_BAD_CAST;
2147
2148 /* Same if the RHS is a conversion between a regular pointer and a
2149 ref-all pointer since we can't track the SMT of the former. */
2150 if (POINTER_TYPE_P (from) && !TYPE_REF_CAN_ALIAS_ALL (from)
2151 && POINTER_TYPE_P (to) && TYPE_REF_CAN_ALIAS_ALL (to))
2152 return ESCAPE_BAD_CAST;
2153 }
2154
2155 /* If the LHS is an SSA name, it can't possibly represent a non-local
2156 memory store. */
2157 if (TREE_CODE (lhs) == SSA_NAME)
2158 return NO_ESCAPE;
2159
2160 /* FIXME: LHS is not an SSA_NAME. Even if it's an assignment to a
2161 local variables we cannot be sure if it will escape, because we
2162 don't have information about objects not in SSA form. Need to
2163 implement something along the lines of
2164
2165 J.-D. Choi, M. Gupta, M. J. Serrano, V. C. Sreedhar, and S. P.
2166 Midkiff, ``Escape analysis for java,'' in Proceedings of the
2167 Conference on Object-Oriented Programming Systems, Languages, and
2168 Applications (OOPSLA), pp. 1-19, 1999. */
2169 return ESCAPE_STORED_IN_GLOBAL;
2170 }
2171 else if (TREE_CODE (stmt) == RETURN_EXPR)
2172 return ESCAPE_TO_RETURN;
2173
2174 return NO_ESCAPE;
2175 }
2176
2177 /* Create a new memory tag of type TYPE.
2178 Does NOT push it into the current binding. */
2179
2180 static tree
create_tag_raw(enum tree_code code,tree type,const char * prefix)2181 create_tag_raw (enum tree_code code, tree type, const char *prefix)
2182 {
2183 tree tmp_var;
2184 tree new_type;
2185
2186 /* Make the type of the variable writable. */
2187 new_type = build_type_variant (type, 0, 0);
2188 TYPE_ATTRIBUTES (new_type) = TYPE_ATTRIBUTES (type);
2189
2190 tmp_var = build_decl (code, create_tmp_var_name (prefix),
2191 type);
2192 /* Make the variable writable. */
2193 TREE_READONLY (tmp_var) = 0;
2194
2195 /* It doesn't start out global. */
2196 MTAG_GLOBAL (tmp_var) = 0;
2197 TREE_STATIC (tmp_var) = 0;
2198 TREE_USED (tmp_var) = 1;
2199
2200 return tmp_var;
2201 }
2202
2203 /* Create a new memory tag of type TYPE. If IS_TYPE_TAG is true, the tag
2204 is considered to represent all the pointers whose pointed-to types are
2205 in the same alias set class. Otherwise, the tag represents a single
2206 SSA_NAME pointer variable. */
2207
2208 static tree
create_memory_tag(tree type,bool is_type_tag)2209 create_memory_tag (tree type, bool is_type_tag)
2210 {
2211 var_ann_t ann;
2212 tree tag = create_tag_raw (is_type_tag ? SYMBOL_MEMORY_TAG : NAME_MEMORY_TAG,
2213 type, (is_type_tag) ? "SMT" : "NMT");
2214
2215 /* By default, memory tags are local variables. Alias analysis will
2216 determine whether they should be considered globals. */
2217 DECL_CONTEXT (tag) = current_function_decl;
2218
2219 /* Memory tags are by definition addressable. */
2220 TREE_ADDRESSABLE (tag) = 1;
2221
2222 ann = get_var_ann (tag);
2223 ann->symbol_mem_tag = NULL_TREE;
2224
2225 /* Add the tag to the symbol table. */
2226 add_referenced_var (tag);
2227
2228 return tag;
2229 }
2230
2231
2232 /* Create a name memory tag to represent a specific SSA_NAME pointer P_i.
2233 This is used if P_i has been found to point to a specific set of
2234 variables or to a non-aliased memory location like the address returned
2235 by malloc functions. */
2236
2237 static tree
get_nmt_for(tree ptr)2238 get_nmt_for (tree ptr)
2239 {
2240 struct ptr_info_def *pi = get_ptr_info (ptr);
2241 tree tag = pi->name_mem_tag;
2242
2243 if (tag == NULL_TREE)
2244 tag = create_memory_tag (TREE_TYPE (TREE_TYPE (ptr)), false);
2245 return tag;
2246 }
2247
2248
2249 /* Return the symbol memory tag associated to pointer PTR. A memory
2250 tag is an artificial variable that represents the memory location
2251 pointed-to by PTR. It is used to model the effects of pointer
2252 de-references on addressable variables.
2253
2254 AI points to the data gathered during alias analysis. This
2255 function populates the array AI->POINTERS. */
2256
2257 static tree
get_tmt_for(tree ptr,struct alias_info * ai)2258 get_tmt_for (tree ptr, struct alias_info *ai)
2259 {
2260 size_t i;
2261 tree tag;
2262 tree tag_type = TREE_TYPE (TREE_TYPE (ptr));
2263 HOST_WIDE_INT tag_set = get_alias_set (tag_type);
2264
2265 /* We use a unique memory tag for all the ref-all pointers. */
2266 if (PTR_IS_REF_ALL (ptr))
2267 {
2268 if (!ai->ref_all_symbol_mem_tag)
2269 ai->ref_all_symbol_mem_tag = create_memory_tag (void_type_node, true);
2270 return ai->ref_all_symbol_mem_tag;
2271 }
2272
2273 /* To avoid creating unnecessary memory tags, only create one memory tag
2274 per alias set class. Note that it may be tempting to group
2275 memory tags based on conflicting alias sets instead of
2276 equivalence. That would be wrong because alias sets are not
2277 necessarily transitive (as demonstrated by the libstdc++ test
2278 23_containers/vector/cons/4.cc). Given three alias sets A, B, C
2279 such that conflicts (A, B) == true and conflicts (A, C) == true,
2280 it does not necessarily follow that conflicts (B, C) == true. */
2281 for (i = 0, tag = NULL_TREE; i < ai->num_pointers; i++)
2282 {
2283 struct alias_map_d *curr = ai->pointers[i];
2284 tree curr_tag = var_ann (curr->var)->symbol_mem_tag;
2285 if (tag_set == curr->set)
2286 {
2287 tag = curr_tag;
2288 break;
2289 }
2290 }
2291
2292 /* If VAR cannot alias with any of the existing memory tags, create a new
2293 tag for PTR and add it to the POINTERS array. */
2294 if (tag == NULL_TREE)
2295 {
2296 struct alias_map_d *alias_map;
2297
2298 /* If PTR did not have a symbol tag already, create a new SMT.*
2299 artificial variable representing the memory location
2300 pointed-to by PTR. */
2301 if (var_ann (ptr)->symbol_mem_tag == NULL_TREE)
2302 tag = create_memory_tag (tag_type, true);
2303 else
2304 tag = var_ann (ptr)->symbol_mem_tag;
2305
2306 /* Add PTR to the POINTERS array. Note that we are not interested in
2307 PTR's alias set. Instead, we cache the alias set for the memory that
2308 PTR points to. */
2309 alias_map = XCNEW (struct alias_map_d);
2310 alias_map->var = ptr;
2311 alias_map->set = tag_set;
2312 ai->pointers[ai->num_pointers++] = alias_map;
2313 }
2314
2315 /* If the pointed-to type is volatile, so is the tag. */
2316 TREE_THIS_VOLATILE (tag) |= TREE_THIS_VOLATILE (tag_type);
2317
2318 /* Make sure that the symbol tag has the same alias set as the
2319 pointed-to type. */
2320 gcc_assert (tag_set == get_alias_set (tag));
2321
2322 return tag;
2323 }
2324
2325
2326 /* Create GLOBAL_VAR, an artificial global variable to act as a
2327 representative of all the variables that may be clobbered by function
2328 calls. */
2329
2330 static void
create_global_var(void)2331 create_global_var (void)
2332 {
2333 global_var = build_decl (VAR_DECL, get_identifier (".GLOBAL_VAR"),
2334 void_type_node);
2335 DECL_ARTIFICIAL (global_var) = 1;
2336 TREE_READONLY (global_var) = 0;
2337 DECL_EXTERNAL (global_var) = 1;
2338 TREE_STATIC (global_var) = 1;
2339 TREE_USED (global_var) = 1;
2340 DECL_CONTEXT (global_var) = NULL_TREE;
2341 TREE_THIS_VOLATILE (global_var) = 0;
2342 TREE_ADDRESSABLE (global_var) = 0;
2343
2344 create_var_ann (global_var);
2345 mark_call_clobbered (global_var, ESCAPE_UNKNOWN);
2346 add_referenced_var (global_var);
2347 mark_sym_for_renaming (global_var);
2348 }
2349
2350
2351 /* Dump alias statistics on FILE. */
2352
2353 static void
dump_alias_stats(FILE * file)2354 dump_alias_stats (FILE *file)
2355 {
2356 const char *funcname
2357 = lang_hooks.decl_printable_name (current_function_decl, 2);
2358 fprintf (file, "\nAlias statistics for %s\n\n", funcname);
2359 fprintf (file, "Total alias queries:\t%u\n", alias_stats.alias_queries);
2360 fprintf (file, "Total alias mayalias results:\t%u\n",
2361 alias_stats.alias_mayalias);
2362 fprintf (file, "Total alias noalias results:\t%u\n",
2363 alias_stats.alias_noalias);
2364 fprintf (file, "Total simple queries:\t%u\n",
2365 alias_stats.simple_queries);
2366 fprintf (file, "Total simple resolved:\t%u\n",
2367 alias_stats.simple_resolved);
2368 fprintf (file, "Total TBAA queries:\t%u\n",
2369 alias_stats.tbaa_queries);
2370 fprintf (file, "Total TBAA resolved:\t%u\n",
2371 alias_stats.tbaa_resolved);
2372 fprintf (file, "Total non-addressable structure type queries:\t%u\n",
2373 alias_stats.structnoaddress_queries);
2374 fprintf (file, "Total non-addressable structure type resolved:\t%u\n",
2375 alias_stats.structnoaddress_resolved);
2376 }
2377
2378
2379 /* Dump alias information on FILE. */
2380
2381 void
dump_alias_info(FILE * file)2382 dump_alias_info (FILE *file)
2383 {
2384 size_t i;
2385 const char *funcname
2386 = lang_hooks.decl_printable_name (current_function_decl, 2);
2387 referenced_var_iterator rvi;
2388 tree var;
2389
2390 fprintf (file, "\nFlow-insensitive alias information for %s\n\n", funcname);
2391
2392 fprintf (file, "Aliased symbols\n\n");
2393
2394 FOR_EACH_REFERENCED_VAR (var, rvi)
2395 {
2396 if (may_be_aliased (var))
2397 dump_variable (file, var);
2398 }
2399
2400 fprintf (file, "\nDereferenced pointers\n\n");
2401
2402 FOR_EACH_REFERENCED_VAR (var, rvi)
2403 {
2404 var_ann_t ann = var_ann (var);
2405 if (ann->symbol_mem_tag)
2406 dump_variable (file, var);
2407 }
2408
2409 fprintf (file, "\nSymbol memory tags\n\n");
2410
2411 FOR_EACH_REFERENCED_VAR (var, rvi)
2412 {
2413 if (TREE_CODE (var) == SYMBOL_MEMORY_TAG)
2414 dump_variable (file, var);
2415 }
2416
2417 fprintf (file, "\n\nFlow-sensitive alias information for %s\n\n", funcname);
2418
2419 fprintf (file, "SSA_NAME pointers\n\n");
2420 for (i = 1; i < num_ssa_names; i++)
2421 {
2422 tree ptr = ssa_name (i);
2423 struct ptr_info_def *pi;
2424
2425 if (ptr == NULL_TREE)
2426 continue;
2427
2428 pi = SSA_NAME_PTR_INFO (ptr);
2429 if (!SSA_NAME_IN_FREE_LIST (ptr)
2430 && pi
2431 && pi->name_mem_tag)
2432 dump_points_to_info_for (file, ptr);
2433 }
2434
2435 fprintf (file, "\nName memory tags\n\n");
2436
2437 FOR_EACH_REFERENCED_VAR (var, rvi)
2438 {
2439 if (TREE_CODE (var) == NAME_MEMORY_TAG)
2440 dump_variable (file, var);
2441 }
2442
2443 fprintf (file, "\n");
2444 }
2445
2446
2447 /* Dump alias information on stderr. */
2448
2449 void
debug_alias_info(void)2450 debug_alias_info (void)
2451 {
2452 dump_alias_info (stderr);
2453 }
2454
2455
2456 /* Return the alias information associated with pointer T. It creates a
2457 new instance if none existed. */
2458
2459 struct ptr_info_def *
get_ptr_info(tree t)2460 get_ptr_info (tree t)
2461 {
2462 struct ptr_info_def *pi;
2463
2464 gcc_assert (POINTER_TYPE_P (TREE_TYPE (t)));
2465
2466 pi = SSA_NAME_PTR_INFO (t);
2467 if (pi == NULL)
2468 {
2469 pi = GGC_NEW (struct ptr_info_def);
2470 memset ((void *)pi, 0, sizeof (*pi));
2471 SSA_NAME_PTR_INFO (t) = pi;
2472 }
2473
2474 return pi;
2475 }
2476
2477
2478 /* Dump points-to information for SSA_NAME PTR into FILE. */
2479
2480 void
dump_points_to_info_for(FILE * file,tree ptr)2481 dump_points_to_info_for (FILE *file, tree ptr)
2482 {
2483 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
2484
2485 print_generic_expr (file, ptr, dump_flags);
2486
2487 if (pi)
2488 {
2489 if (pi->name_mem_tag)
2490 {
2491 fprintf (file, ", name memory tag: ");
2492 print_generic_expr (file, pi->name_mem_tag, dump_flags);
2493 }
2494
2495 if (pi->is_dereferenced)
2496 fprintf (file, ", is dereferenced");
2497
2498 if (pi->value_escapes_p)
2499 fprintf (file, ", its value escapes");
2500
2501 if (pi->pt_anything)
2502 fprintf (file, ", points-to anything");
2503
2504 if (pi->pt_null)
2505 fprintf (file, ", points-to NULL");
2506
2507 if (pi->pt_vars)
2508 {
2509 unsigned ix;
2510 bitmap_iterator bi;
2511
2512 fprintf (file, ", points-to vars: { ");
2513 EXECUTE_IF_SET_IN_BITMAP (pi->pt_vars, 0, ix, bi)
2514 {
2515 print_generic_expr (file, referenced_var (ix), dump_flags);
2516 fprintf (file, " ");
2517 }
2518 fprintf (file, "}");
2519 }
2520 }
2521
2522 fprintf (file, "\n");
2523 }
2524
2525
2526 /* Dump points-to information for VAR into stderr. */
2527
2528 void
debug_points_to_info_for(tree var)2529 debug_points_to_info_for (tree var)
2530 {
2531 dump_points_to_info_for (stderr, var);
2532 }
2533
2534
2535 /* Dump points-to information into FILE. NOTE: This function is slow, as
2536 it needs to traverse the whole CFG looking for pointer SSA_NAMEs. */
2537
2538 void
dump_points_to_info(FILE * file)2539 dump_points_to_info (FILE *file)
2540 {
2541 basic_block bb;
2542 block_stmt_iterator si;
2543 ssa_op_iter iter;
2544 const char *fname =
2545 lang_hooks.decl_printable_name (current_function_decl, 2);
2546 referenced_var_iterator rvi;
2547 tree var;
2548
2549 fprintf (file, "\n\nPointed-to sets for pointers in %s\n\n", fname);
2550
2551 /* First dump points-to information for the default definitions of
2552 pointer variables. This is necessary because default definitions are
2553 not part of the code. */
2554 FOR_EACH_REFERENCED_VAR (var, rvi)
2555 {
2556 if (POINTER_TYPE_P (TREE_TYPE (var)))
2557 {
2558 tree def = default_def (var);
2559 if (def)
2560 dump_points_to_info_for (file, def);
2561 }
2562 }
2563
2564 /* Dump points-to information for every pointer defined in the program. */
2565 FOR_EACH_BB (bb)
2566 {
2567 tree phi;
2568
2569 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
2570 {
2571 tree ptr = PHI_RESULT (phi);
2572 if (POINTER_TYPE_P (TREE_TYPE (ptr)))
2573 dump_points_to_info_for (file, ptr);
2574 }
2575
2576 for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
2577 {
2578 tree stmt = bsi_stmt (si);
2579 tree def;
2580 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_DEF)
2581 if (POINTER_TYPE_P (TREE_TYPE (def)))
2582 dump_points_to_info_for (file, def);
2583 }
2584 }
2585
2586 fprintf (file, "\n");
2587 }
2588
2589
2590 /* Dump points-to info pointed to by PTO into STDERR. */
2591
2592 void
debug_points_to_info(void)2593 debug_points_to_info (void)
2594 {
2595 dump_points_to_info (stderr);
2596 }
2597
2598 /* Dump to FILE the list of variables that may be aliasing VAR. */
2599
2600 void
dump_may_aliases_for(FILE * file,tree var)2601 dump_may_aliases_for (FILE *file, tree var)
2602 {
2603 VEC(tree, gc) *aliases;
2604
2605 if (TREE_CODE (var) == SSA_NAME)
2606 var = SSA_NAME_VAR (var);
2607
2608 aliases = var_ann (var)->may_aliases;
2609 if (aliases)
2610 {
2611 size_t i;
2612 tree al;
2613 fprintf (file, "{ ");
2614 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
2615 {
2616 print_generic_expr (file, al, dump_flags);
2617 fprintf (file, " ");
2618 }
2619 fprintf (file, "}");
2620 }
2621 }
2622
2623
2624 /* Dump to stderr the list of variables that may be aliasing VAR. */
2625
2626 void
debug_may_aliases_for(tree var)2627 debug_may_aliases_for (tree var)
2628 {
2629 dump_may_aliases_for (stderr, var);
2630 }
2631
2632 /* Return true if VAR may be aliased. */
2633
2634 bool
may_be_aliased(tree var)2635 may_be_aliased (tree var)
2636 {
2637 /* Obviously. */
2638 if (TREE_ADDRESSABLE (var))
2639 return true;
2640
2641 /* Globally visible variables can have their addresses taken by other
2642 translation units. */
2643
2644 if (MTAG_P (var)
2645 && (MTAG_GLOBAL (var) || TREE_PUBLIC (var)))
2646 return true;
2647 else if (!MTAG_P (var)
2648 && (DECL_EXTERNAL (var) || TREE_PUBLIC (var)))
2649 return true;
2650
2651 /* Automatic variables can't have their addresses escape any other way.
2652 This must be after the check for global variables, as extern declarations
2653 do not have TREE_STATIC set. */
2654 if (!TREE_STATIC (var))
2655 return false;
2656
2657 /* If we're in unit-at-a-time mode, then we must have seen all occurrences
2658 of address-of operators, and so we can trust TREE_ADDRESSABLE. Otherwise
2659 we can only be sure the variable isn't addressable if it's local to the
2660 current function. */
2661 if (flag_unit_at_a_time)
2662 return false;
2663 if (decl_function_context (var) == current_function_decl)
2664 return false;
2665
2666 return true;
2667 }
2668
2669
2670 /* Given two symbols return TRUE if one is in the alias set of the other. */
2671 bool
is_aliased_with(tree tag,tree sym)2672 is_aliased_with (tree tag, tree sym)
2673 {
2674 size_t i;
2675 VEC(tree,gc) *aliases;
2676 tree al;
2677
2678 if (var_ann (sym)->is_aliased)
2679 {
2680 aliases = var_ann (tag)->may_aliases;
2681
2682 if (aliases == NULL)
2683 return false;
2684
2685 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
2686 if (al == sym)
2687 return true;
2688 }
2689 else
2690 {
2691 aliases = var_ann (sym)->may_aliases;
2692
2693 if (aliases == NULL)
2694 return false;
2695
2696 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
2697 if (al == tag)
2698 return true;
2699 }
2700
2701 return false;
2702 }
2703
2704
2705 /* Given two tags return TRUE if their may-alias sets intersect. */
2706
2707 bool
may_aliases_intersect(tree tag1,tree tag2)2708 may_aliases_intersect (tree tag1, tree tag2)
2709 {
2710 struct pointer_set_t *set1 = pointer_set_create ();
2711 unsigned i;
2712 VEC(tree,gc) *may_aliases1 = may_aliases (tag1);
2713 VEC(tree,gc) *may_aliases2 = may_aliases (tag2);
2714 tree sym;
2715
2716 /* Insert all the symbols from the first may-alias set into the
2717 pointer-set. */
2718 for (i = 0; VEC_iterate (tree, may_aliases1, i, sym); i++)
2719 pointer_set_insert (set1, sym);
2720
2721 /* Go through the second may-alias set and check if it contains symbols that
2722 are common with the first set. */
2723 for (i = 0; VEC_iterate (tree, may_aliases2, i, sym); i++)
2724 if (pointer_set_contains (set1, sym))
2725 {
2726 pointer_set_destroy (set1);
2727 return true;
2728 }
2729
2730 pointer_set_destroy (set1);
2731 return false;
2732 }
2733
2734
2735 /* The following is based on code in add_stmt_operand to ensure that the
2736 same defs/uses/vdefs/vuses will be found after replacing a reference
2737 to var (or ARRAY_REF to var) with an INDIRECT_REF to ptr whose value
2738 is the address of var. Return a memtag for the ptr, after adding the
2739 proper may_aliases to it (which are the aliases of var, if it has any,
2740 or var itself). */
2741
2742 static tree
add_may_alias_for_new_tag(tree tag,tree var)2743 add_may_alias_for_new_tag (tree tag, tree var)
2744 {
2745 var_ann_t v_ann = var_ann (var);
2746 VEC(tree, gc) *aliases = v_ann->may_aliases;
2747
2748 /* Case 1: |aliases| == 1 */
2749 if ((aliases != NULL)
2750 && (VEC_length (tree, aliases) == 1))
2751 {
2752 tree ali = VEC_index (tree, aliases, 0);
2753
2754 if (TREE_CODE (ali) == SYMBOL_MEMORY_TAG)
2755 return ali;
2756 }
2757
2758 /* Case 2: |aliases| == 0 */
2759 if (aliases == NULL)
2760 add_may_alias (tag, var);
2761 else
2762 {
2763 /* Case 3: |aliases| > 1 */
2764 unsigned i;
2765 tree al;
2766
2767 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
2768 add_may_alias (tag, al);
2769 }
2770
2771 return tag;
2772 }
2773
2774 /* Create a new symbol tag for PTR. Construct the may-alias list of this type
2775 tag so that it has the aliasing of VAR, or of the relevant subvars of VAR
2776 according to the location accessed by EXPR.
2777
2778 Note, the set of aliases represented by the new symbol tag are not marked
2779 for renaming. */
2780
2781 void
new_type_alias(tree ptr,tree var,tree expr)2782 new_type_alias (tree ptr, tree var, tree expr)
2783 {
2784 var_ann_t p_ann = var_ann (ptr);
2785 tree tag_type = TREE_TYPE (TREE_TYPE (ptr));
2786 tree tag;
2787 subvar_t svars;
2788 tree ali = NULL_TREE;
2789 HOST_WIDE_INT offset, size, maxsize;
2790 tree ref;
2791
2792 gcc_assert (p_ann->symbol_mem_tag == NULL_TREE);
2793 gcc_assert (!MTAG_P (var));
2794
2795 ref = get_ref_base_and_extent (expr, &offset, &size, &maxsize);
2796 gcc_assert (ref);
2797
2798 tag = create_memory_tag (tag_type, true);
2799 p_ann->symbol_mem_tag = tag;
2800
2801 /* Add VAR to the may-alias set of PTR's new symbol tag. If VAR has
2802 subvars, add the subvars to the tag instead of the actual var. */
2803 if (var_can_have_subvars (var)
2804 && (svars = get_subvars_for_var (var)))
2805 {
2806 subvar_t sv;
2807 VEC (tree, heap) *overlaps = NULL;
2808 unsigned int len;
2809
2810 for (sv = svars; sv; sv = sv->next)
2811 {
2812 bool exact;
2813
2814 if (overlap_subvar (offset, maxsize, sv->var, &exact))
2815 VEC_safe_push (tree, heap, overlaps, sv->var);
2816 }
2817 len = VEC_length (tree, overlaps);
2818 if (dump_file && (dump_flags & TDF_DETAILS))
2819 fprintf (dump_file, "\nnumber of overlapping subvars = %u\n", len);
2820 gcc_assert (len);
2821
2822 if (len == 1)
2823 ali = add_may_alias_for_new_tag (tag, VEC_index (tree, overlaps, 0));
2824 else if (len > 1)
2825 {
2826 unsigned int k;
2827 tree sv_var;
2828
2829 for (k = 0; VEC_iterate (tree, overlaps, k, sv_var); k++)
2830 {
2831 ali = add_may_alias_for_new_tag (tag, sv_var);
2832
2833 if (ali != tag)
2834 {
2835 /* Can happen only if 'Case 1' of add_may_alias_for_new_tag
2836 took place. Since more than one svar was found, we add
2837 'ali' as one of the may_aliases of the new tag. */
2838 add_may_alias (tag, ali);
2839 ali = tag;
2840 }
2841 }
2842 }
2843 }
2844 else
2845 ali = add_may_alias_for_new_tag (tag, var);
2846
2847 p_ann->symbol_mem_tag = ali;
2848 TREE_READONLY (tag) = TREE_READONLY (var);
2849 MTAG_GLOBAL (tag) = is_global_var (var);
2850 }
2851
2852 /* This represents the used range of a variable. */
2853
2854 typedef struct used_part
2855 {
2856 HOST_WIDE_INT minused;
2857 HOST_WIDE_INT maxused;
2858 /* True if we have an explicit use/def of some portion of this variable,
2859 even if it is all of it. i.e. a.b = 5 or temp = a.b. */
2860 bool explicit_uses;
2861 /* True if we have an implicit use/def of some portion of this
2862 variable. Implicit uses occur when we can't tell what part we
2863 are referencing, and have to make conservative assumptions. */
2864 bool implicit_uses;
2865 /* True if the structure is only written to or taken its address. */
2866 bool write_only;
2867 } *used_part_t;
2868
2869 /* An array of used_part structures, indexed by variable uid. */
2870
2871 static htab_t used_portions;
2872
2873 struct used_part_map
2874 {
2875 unsigned int uid;
2876 used_part_t to;
2877 };
2878
2879 /* Return true if the uid in the two used part maps are equal. */
2880
2881 static int
used_part_map_eq(const void * va,const void * vb)2882 used_part_map_eq (const void *va, const void *vb)
2883 {
2884 const struct used_part_map *a = (const struct used_part_map *) va;
2885 const struct used_part_map *b = (const struct used_part_map *) vb;
2886 return (a->uid == b->uid);
2887 }
2888
2889 /* Hash a from uid in a used_part_map. */
2890
2891 static unsigned int
used_part_map_hash(const void * item)2892 used_part_map_hash (const void *item)
2893 {
2894 return ((const struct used_part_map *)item)->uid;
2895 }
2896
2897 /* Free a used part map element. */
2898
2899 static void
free_used_part_map(void * item)2900 free_used_part_map (void *item)
2901 {
2902 free (((struct used_part_map *)item)->to);
2903 free (item);
2904 }
2905
2906 /* Lookup a used_part structure for a UID. */
2907
2908 static used_part_t
up_lookup(unsigned int uid)2909 up_lookup (unsigned int uid)
2910 {
2911 struct used_part_map *h, in;
2912 in.uid = uid;
2913 h = (struct used_part_map *) htab_find_with_hash (used_portions, &in, uid);
2914 if (!h)
2915 return NULL;
2916 return h->to;
2917 }
2918
2919 /* Insert the pair UID, TO into the used part hashtable. */
2920
2921 static void
up_insert(unsigned int uid,used_part_t to)2922 up_insert (unsigned int uid, used_part_t to)
2923 {
2924 struct used_part_map *h;
2925 void **loc;
2926
2927 h = XNEW (struct used_part_map);
2928 h->uid = uid;
2929 h->to = to;
2930 loc = htab_find_slot_with_hash (used_portions, h,
2931 uid, INSERT);
2932 if (*loc != NULL)
2933 free (*loc);
2934 *(struct used_part_map **) loc = h;
2935 }
2936
2937
2938 /* Given a variable uid, UID, get or create the entry in the used portions
2939 table for the variable. */
2940
2941 static used_part_t
get_or_create_used_part_for(size_t uid)2942 get_or_create_used_part_for (size_t uid)
2943 {
2944 used_part_t up;
2945 if ((up = up_lookup (uid)) == NULL)
2946 {
2947 up = XCNEW (struct used_part);
2948 up->minused = INT_MAX;
2949 up->maxused = 0;
2950 up->explicit_uses = false;
2951 up->implicit_uses = false;
2952 up->write_only = true;
2953 }
2954
2955 return up;
2956 }
2957
2958
2959 /* Create and return a structure sub-variable for field type FIELD at
2960 offset OFFSET, with size SIZE, of variable VAR. */
2961
2962 static tree
create_sft(tree var,tree field,unsigned HOST_WIDE_INT offset,unsigned HOST_WIDE_INT size)2963 create_sft (tree var, tree field, unsigned HOST_WIDE_INT offset,
2964 unsigned HOST_WIDE_INT size)
2965 {
2966 var_ann_t ann;
2967 tree subvar = create_tag_raw (STRUCT_FIELD_TAG, field, "SFT");
2968
2969 /* We need to copy the various flags from VAR to SUBVAR, so that
2970 they are is_global_var iff the original variable was. */
2971 DECL_CONTEXT (subvar) = DECL_CONTEXT (var);
2972 MTAG_GLOBAL (subvar) = DECL_EXTERNAL (var);
2973 TREE_PUBLIC (subvar) = TREE_PUBLIC (var);
2974 TREE_STATIC (subvar) = TREE_STATIC (var);
2975 TREE_READONLY (subvar) = TREE_READONLY (var);
2976 TREE_ADDRESSABLE (subvar) = TREE_ADDRESSABLE (var);
2977
2978 /* Add the new variable to REFERENCED_VARS. */
2979 ann = get_var_ann (subvar);
2980 ann->symbol_mem_tag = NULL;
2981 add_referenced_var (subvar);
2982 SFT_PARENT_VAR (subvar) = var;
2983 SFT_OFFSET (subvar) = offset;
2984 SFT_SIZE (subvar) = size;
2985 return subvar;
2986 }
2987
2988
2989 /* Given an aggregate VAR, create the subvariables that represent its
2990 fields. */
2991
2992 static void
create_overlap_variables_for(tree var)2993 create_overlap_variables_for (tree var)
2994 {
2995 VEC(fieldoff_s,heap) *fieldstack = NULL;
2996 used_part_t up;
2997 size_t uid = DECL_UID (var);
2998
2999 up = up_lookup (uid);
3000 if (!up
3001 || up->write_only)
3002 return;
3003
3004 push_fields_onto_fieldstack (TREE_TYPE (var), &fieldstack, 0, NULL);
3005 if (VEC_length (fieldoff_s, fieldstack) != 0)
3006 {
3007 subvar_t *subvars;
3008 fieldoff_s *fo;
3009 bool notokay = false;
3010 int fieldcount = 0;
3011 int i;
3012 HOST_WIDE_INT lastfooffset = -1;
3013 HOST_WIDE_INT lastfosize = -1;
3014 tree lastfotype = NULL_TREE;
3015
3016 /* Not all fields have DECL_SIZE set, and those that don't, we don't
3017 know their size, and thus, can't handle.
3018 The same is true of fields with DECL_SIZE that is not an integer
3019 constant (such as variable sized fields).
3020 Fields with offsets which are not constant will have an offset < 0
3021 We *could* handle fields that are constant sized arrays, but
3022 currently don't. Doing so would require some extra changes to
3023 tree-ssa-operands.c. */
3024
3025 for (i = 0; VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
3026 {
3027 if (!fo->size
3028 || TREE_CODE (fo->size) != INTEGER_CST
3029 || fo->offset < 0)
3030 {
3031 notokay = true;
3032 break;
3033 }
3034 fieldcount++;
3035 }
3036
3037 /* The current heuristic we use is as follows:
3038 If the variable has no used portions in this function, no
3039 structure vars are created for it.
3040 Otherwise,
3041 If the variable has less than SALIAS_MAX_IMPLICIT_FIELDS,
3042 we always create structure vars for them.
3043 If the variable has more than SALIAS_MAX_IMPLICIT_FIELDS, and
3044 some explicit uses, we create structure vars for them.
3045 If the variable has more than SALIAS_MAX_IMPLICIT_FIELDS, and
3046 no explicit uses, we do not create structure vars for them.
3047 */
3048
3049 if (fieldcount >= SALIAS_MAX_IMPLICIT_FIELDS
3050 && !up->explicit_uses)
3051 {
3052 if (dump_file && (dump_flags & TDF_DETAILS))
3053 {
3054 fprintf (dump_file, "Variable ");
3055 print_generic_expr (dump_file, var, 0);
3056 fprintf (dump_file, " has no explicit uses in this function, and is > SALIAS_MAX_IMPLICIT_FIELDS, so skipping\n");
3057 }
3058 notokay = true;
3059 }
3060
3061 /* Bail out, if we can't create overlap variables. */
3062 if (notokay)
3063 {
3064 VEC_free (fieldoff_s, heap, fieldstack);
3065 return;
3066 }
3067
3068 /* Otherwise, create the variables. */
3069 subvars = lookup_subvars_for_var (var);
3070
3071 sort_fieldstack (fieldstack);
3072
3073 for (i = VEC_length (fieldoff_s, fieldstack);
3074 VEC_iterate (fieldoff_s, fieldstack, --i, fo);)
3075 {
3076 subvar_t sv;
3077 HOST_WIDE_INT fosize;
3078 tree currfotype;
3079
3080 fosize = TREE_INT_CST_LOW (fo->size);
3081 currfotype = fo->type;
3082
3083 /* If this field isn't in the used portion,
3084 or it has the exact same offset and size as the last
3085 field, skip it. */
3086
3087 if (((fo->offset <= up->minused
3088 && fo->offset + fosize <= up->minused)
3089 || fo->offset >= up->maxused)
3090 || (fo->offset == lastfooffset
3091 && fosize == lastfosize
3092 && currfotype == lastfotype))
3093 continue;
3094 sv = GGC_NEW (struct subvar);
3095 sv->next = *subvars;
3096 sv->var = create_sft (var, fo->type, fo->offset, fosize);
3097
3098 if (dump_file)
3099 {
3100 fprintf (dump_file, "structure field tag %s created for var %s",
3101 get_name (sv->var), get_name (var));
3102 fprintf (dump_file, " offset " HOST_WIDE_INT_PRINT_DEC,
3103 SFT_OFFSET (sv->var));
3104 fprintf (dump_file, " size " HOST_WIDE_INT_PRINT_DEC,
3105 SFT_SIZE (sv->var));
3106 fprintf (dump_file, "\n");
3107 }
3108
3109 lastfotype = currfotype;
3110 lastfooffset = fo->offset;
3111 lastfosize = fosize;
3112 *subvars = sv;
3113 }
3114
3115 /* Once we have created subvars, the original is no longer call
3116 clobbered on its own. Its call clobbered status depends
3117 completely on the call clobbered status of the subvars.
3118
3119 add_referenced_var in the above loop will take care of
3120 marking subvars of global variables as call clobbered for us
3121 to start, since they are global as well. */
3122 clear_call_clobbered (var);
3123 }
3124
3125 VEC_free (fieldoff_s, heap, fieldstack);
3126 }
3127
3128
3129 /* Find the conservative answer to the question of what portions of what
3130 structures are used by this statement. We assume that if we have a
3131 component ref with a known size + offset, that we only need that part
3132 of the structure. For unknown cases, or cases where we do something
3133 to the whole structure, we assume we need to create fields for the
3134 entire structure. */
3135
3136 static tree
find_used_portions(tree * tp,int * walk_subtrees,void * lhs_p)3137 find_used_portions (tree *tp, int *walk_subtrees, void *lhs_p)
3138 {
3139 switch (TREE_CODE (*tp))
3140 {
3141 case MODIFY_EXPR:
3142 /* Recurse manually here to track whether the use is in the
3143 LHS of an assignment. */
3144 find_used_portions (&TREE_OPERAND (*tp, 0), walk_subtrees, tp);
3145 return find_used_portions (&TREE_OPERAND (*tp, 1), walk_subtrees, NULL);
3146 case REALPART_EXPR:
3147 case IMAGPART_EXPR:
3148 case COMPONENT_REF:
3149 case ARRAY_REF:
3150 {
3151 HOST_WIDE_INT bitsize;
3152 HOST_WIDE_INT bitmaxsize;
3153 HOST_WIDE_INT bitpos;
3154 tree ref;
3155 ref = get_ref_base_and_extent (*tp, &bitpos, &bitsize, &bitmaxsize);
3156 if (DECL_P (ref)
3157 && var_can_have_subvars (ref)
3158 && bitmaxsize != -1)
3159 {
3160 size_t uid = DECL_UID (ref);
3161 used_part_t up;
3162
3163 up = get_or_create_used_part_for (uid);
3164
3165 if (bitpos <= up->minused)
3166 up->minused = bitpos;
3167 if ((bitpos + bitmaxsize >= up->maxused))
3168 up->maxused = bitpos + bitmaxsize;
3169
3170 if (bitsize == bitmaxsize)
3171 up->explicit_uses = true;
3172 else
3173 up->implicit_uses = true;
3174 if (!lhs_p)
3175 up->write_only = false;
3176 up_insert (uid, up);
3177
3178 *walk_subtrees = 0;
3179 return NULL_TREE;
3180 }
3181 }
3182 break;
3183 /* This is here to make sure we mark the entire base variable as used
3184 when you take its address. Because our used portion analysis is
3185 simple, we aren't looking at casts or pointer arithmetic to see what
3186 happens when you take the address. */
3187 case ADDR_EXPR:
3188 {
3189 tree var = get_base_address (TREE_OPERAND (*tp, 0));
3190
3191 if (var
3192 && DECL_P (var)
3193 && DECL_SIZE (var)
3194 && var_can_have_subvars (var)
3195 && TREE_CODE (DECL_SIZE (var)) == INTEGER_CST)
3196 {
3197 used_part_t up;
3198 size_t uid = DECL_UID (var);
3199
3200 up = get_or_create_used_part_for (uid);
3201
3202 up->minused = 0;
3203 up->maxused = TREE_INT_CST_LOW (DECL_SIZE (var));
3204 up->implicit_uses = true;
3205 if (!lhs_p)
3206 up->write_only = false;
3207
3208 up_insert (uid, up);
3209 *walk_subtrees = 0;
3210 return NULL_TREE;
3211 }
3212 }
3213 break;
3214 case CALL_EXPR:
3215 {
3216 tree *arg;
3217 for (arg = &TREE_OPERAND (*tp, 1); *arg; arg = &TREE_CHAIN (*arg))
3218 {
3219 if (TREE_CODE (TREE_VALUE (*arg)) != ADDR_EXPR)
3220 find_used_portions (&TREE_VALUE (*arg), walk_subtrees, NULL);
3221 }
3222 *walk_subtrees = 0;
3223 return NULL_TREE;
3224 }
3225 case VAR_DECL:
3226 case PARM_DECL:
3227 case RESULT_DECL:
3228 {
3229 tree var = *tp;
3230 if (DECL_SIZE (var)
3231 && var_can_have_subvars (var)
3232 && TREE_CODE (DECL_SIZE (var)) == INTEGER_CST)
3233 {
3234 used_part_t up;
3235 size_t uid = DECL_UID (var);
3236
3237 up = get_or_create_used_part_for (uid);
3238
3239 up->minused = 0;
3240 up->maxused = TREE_INT_CST_LOW (DECL_SIZE (var));
3241 up->implicit_uses = true;
3242
3243 up_insert (uid, up);
3244 *walk_subtrees = 0;
3245 return NULL_TREE;
3246 }
3247 }
3248 break;
3249
3250 default:
3251 break;
3252
3253 }
3254 return NULL_TREE;
3255 }
3256
3257 /* Create structure field variables for structures used in this function. */
3258
3259 static unsigned int
create_structure_vars(void)3260 create_structure_vars (void)
3261 {
3262 basic_block bb;
3263 safe_referenced_var_iterator rvi;
3264 VEC (tree, heap) *varvec = NULL;
3265 tree var;
3266
3267 used_portions = htab_create (10, used_part_map_hash, used_part_map_eq,
3268 free_used_part_map);
3269
3270 FOR_EACH_BB (bb)
3271 {
3272 block_stmt_iterator bsi;
3273 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3274 {
3275 walk_tree_without_duplicates (bsi_stmt_ptr (bsi),
3276 find_used_portions,
3277 NULL);
3278 }
3279 }
3280 FOR_EACH_REFERENCED_VAR_SAFE (var, varvec, rvi)
3281 {
3282 /* The C++ FE creates vars without DECL_SIZE set, for some reason. */
3283 if (var
3284 && DECL_SIZE (var)
3285 && var_can_have_subvars (var)
3286 && !MTAG_P (var)
3287 && TREE_CODE (DECL_SIZE (var)) == INTEGER_CST)
3288 create_overlap_variables_for (var);
3289 }
3290 htab_delete (used_portions);
3291 VEC_free (tree, heap, varvec);
3292 return 0;
3293 }
3294
3295 static bool
gate_structure_vars(void)3296 gate_structure_vars (void)
3297 {
3298 return flag_tree_salias != 0;
3299 }
3300
3301 struct tree_opt_pass pass_create_structure_vars =
3302 {
3303 "salias", /* name */
3304 gate_structure_vars, /* gate */
3305 create_structure_vars, /* execute */
3306 NULL, /* sub */
3307 NULL, /* next */
3308 0, /* static_pass_number */
3309 0, /* tv_id */
3310 PROP_cfg, /* properties_required */
3311 0, /* properties_provided */
3312 0, /* properties_destroyed */
3313 0, /* todo_flags_start */
3314 TODO_dump_func, /* todo_flags_finish */
3315 0 /* letter */
3316 };
3317
3318 /* Reset the DECL_CALL_CLOBBERED flags on our referenced vars. In
3319 theory, this only needs to be done for globals. */
3320
3321 static unsigned int
reset_cc_flags(void)3322 reset_cc_flags (void)
3323 {
3324 tree var;
3325 referenced_var_iterator rvi;
3326
3327 FOR_EACH_REFERENCED_VAR (var, rvi)
3328 DECL_CALL_CLOBBERED (var) = false;
3329 return 0;
3330 }
3331
3332 struct tree_opt_pass pass_reset_cc_flags =
3333 {
3334 NULL, /* name */
3335 NULL, /* gate */
3336 reset_cc_flags, /* execute */
3337 NULL, /* sub */
3338 NULL, /* next */
3339 0, /* static_pass_number */
3340 0, /* tv_id */
3341 PROP_referenced_vars |PROP_cfg, /* properties_required */
3342 0, /* properties_provided */
3343 0, /* properties_destroyed */
3344 0, /* todo_flags_start */
3345 0, /* todo_flags_finish */
3346 0 /* letter */
3347 };
3348