xref: /openbsd/gnu/gcc/gcc/tree-ssa-operands.c (revision 404b540a)
1 /* SSA operands management for trees.
2    Copyright (C) 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10 
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14 GNU General Public License for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING.  If not, write to
18 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
19 Boston, MA 02110-1301, USA.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "diagnostic.h"
29 #include "tree-flow.h"
30 #include "tree-inline.h"
31 #include "tree-pass.h"
32 #include "ggc.h"
33 #include "timevar.h"
34 #include "toplev.h"
35 #include "langhooks.h"
36 #include "ipa-reference.h"
37 
38 /* This file contains the code required to manage the operands cache of the
39    SSA optimizer.  For every stmt, we maintain an operand cache in the stmt
40    annotation.  This cache contains operands that will be of interest to
41    optimizers and other passes wishing to manipulate the IL.
42 
43    The operand type are broken up into REAL and VIRTUAL operands.  The real
44    operands are represented as pointers into the stmt's operand tree.  Thus
45    any manipulation of the real operands will be reflected in the actual tree.
46    Virtual operands are represented solely in the cache, although the base
47    variable for the SSA_NAME may, or may not occur in the stmt's tree.
48    Manipulation of the virtual operands will not be reflected in the stmt tree.
49 
50    The routines in this file are concerned with creating this operand cache
51    from a stmt tree.
52 
53    The operand tree is the parsed by the various get_* routines which look
54    through the stmt tree for the occurrence of operands which may be of
55    interest, and calls are made to the append_* routines whenever one is
56    found.  There are 5 of these routines, each representing one of the
57    5 types of operands. Defs, Uses, Virtual Uses, Virtual May Defs, and
58    Virtual Must Defs.
59 
60    The append_* routines check for duplication, and simply keep a list of
61    unique objects for each operand type in the build_* extendable vectors.
62 
63    Once the stmt tree is completely parsed, the finalize_ssa_operands()
64    routine is called, which proceeds to perform the finalization routine
65    on each of the 5 operand vectors which have been built up.
66 
67    If the stmt had a previous operand cache, the finalization routines
68    attempt to match up the new operands with the old ones.  If it's a perfect
69    match, the old vector is simply reused.  If it isn't a perfect match, then
70    a new vector is created and the new operands are placed there.  For
71    virtual operands, if the previous cache had SSA_NAME version of a
72    variable, and that same variable occurs in the same operands cache, then
73    the new cache vector will also get the same SSA_NAME.
74 
75   i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new operand
76   vector for VUSE, then the new vector will also be modified such that
77   it contains 'a_5' rather than 'a'.  */
78 
79 /* Flags to describe operand properties in helpers.  */
80 
81 /* By default, operands are loaded.  */
82 #define opf_none	0
83 
84 /* Operand is the target of an assignment expression or a
85    call-clobbered variable.  */
86 #define opf_is_def 	(1 << 0)
87 
88 /* Operand is the target of an assignment expression.  */
89 #define opf_kill_def 	(1 << 1)
90 
91 /* No virtual operands should be created in the expression.  This is used
92    when traversing ADDR_EXPR nodes which have different semantics than
93    other expressions.  Inside an ADDR_EXPR node, the only operands that we
94    need to consider are indices into arrays.  For instance, &a.b[i] should
95    generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
96    VUSE for 'b'.  */
97 #define opf_no_vops 	(1 << 2)
98 
99 /* Operand is a "non-specific" kill for call-clobbers and such.  This
100    is used to distinguish "reset the world" events from explicit
101    MODIFY_EXPRs.  */
102 #define opf_non_specific  (1 << 3)
103 
104 /* Array for building all the def operands.  */
105 static VEC(tree,heap) *build_defs;
106 
107 /* Array for building all the use operands.  */
108 static VEC(tree,heap) *build_uses;
109 
110 /* Array for building all the V_MAY_DEF operands.  */
111 static VEC(tree,heap) *build_v_may_defs;
112 
113 /* Array for building all the VUSE operands.  */
114 static VEC(tree,heap) *build_vuses;
115 
116 /* Array for building all the V_MUST_DEF operands.  */
117 static VEC(tree,heap) *build_v_must_defs;
118 
119 /* These arrays are the cached operand vectors for call clobbered calls.  */
120 static bool ops_active = false;
121 
122 static GTY (()) struct ssa_operand_memory_d *operand_memory = NULL;
123 static unsigned operand_memory_index;
124 
125 static void get_expr_operands (tree, tree *, int);
126 
127 static def_optype_p free_defs = NULL;
128 static use_optype_p free_uses = NULL;
129 static vuse_optype_p free_vuses = NULL;
130 static maydef_optype_p free_maydefs = NULL;
131 static mustdef_optype_p free_mustdefs = NULL;
132 
133 /* Allocates operand OP of given TYPE from the appropriate free list,
134    or of the new value if the list is empty.  */
135 
136 #define ALLOC_OPTYPE(OP, TYPE)				\
137   do							\
138     {							\
139       TYPE##_optype_p ret = free_##TYPE##s;		\
140       if (ret)						\
141 	free_##TYPE##s = ret->next;			\
142       else						\
143 	ret = ssa_operand_alloc (sizeof (*ret));	\
144       (OP) = ret;					\
145     } while (0)
146 
147 /* Return the DECL_UID of the base variable of T.  */
148 
149 static inline unsigned
get_name_decl(tree t)150 get_name_decl (tree t)
151 {
152   if (TREE_CODE (t) != SSA_NAME)
153     return DECL_UID (t);
154   else
155     return DECL_UID (SSA_NAME_VAR (t));
156 }
157 
158 
159 /* Comparison function for qsort used in operand_build_sort_virtual.  */
160 
161 static int
operand_build_cmp(const void * p,const void * q)162 operand_build_cmp (const void *p, const void *q)
163 {
164   tree e1 = *((const tree *)p);
165   tree e2 = *((const tree *)q);
166   unsigned int u1,u2;
167 
168   u1 = get_name_decl (e1);
169   u2 = get_name_decl (e2);
170 
171   /* We want to sort in ascending order.  They can never be equal.  */
172 #ifdef ENABLE_CHECKING
173   gcc_assert (u1 != u2);
174 #endif
175   return (u1 > u2 ? 1 : -1);
176 }
177 
178 
179 /* Sort the virtual operands in LIST from lowest DECL_UID to highest.  */
180 
181 static inline void
operand_build_sort_virtual(VEC (tree,heap)* list)182 operand_build_sort_virtual (VEC(tree,heap) *list)
183 {
184   int num = VEC_length (tree, list);
185 
186   if (num < 2)
187     return;
188 
189   if (num == 2)
190     {
191       if (get_name_decl (VEC_index (tree, list, 0))
192 	  > get_name_decl (VEC_index (tree, list, 1)))
193 	{
194 	  /* Swap elements if in the wrong order.  */
195 	  tree tmp = VEC_index (tree, list, 0);
196 	  VEC_replace (tree, list, 0, VEC_index (tree, list, 1));
197 	  VEC_replace (tree, list, 1, tmp);
198 	}
199       return;
200     }
201 
202   /* There are 3 or more elements, call qsort.  */
203   qsort (VEC_address (tree, list),
204 	 VEC_length (tree, list),
205 	 sizeof (tree),
206 	 operand_build_cmp);
207 }
208 
209 
210 /*  Return true if the SSA operands cache is active.  */
211 
212 bool
ssa_operands_active(void)213 ssa_operands_active (void)
214 {
215   return ops_active;
216 }
217 
218 
219 /* Structure storing statistics on how many call clobbers we have, and
220    how many where avoided.  */
221 
222 static struct
223 {
224   /* Number of call-clobbered ops we attempt to add to calls in
225      add_call_clobber_ops.  */
226   unsigned int clobbered_vars;
227 
228   /* Number of write-clobbers (V_MAY_DEFs) avoided by using
229      not_written information.  */
230   unsigned int static_write_clobbers_avoided;
231 
232   /* Number of reads (VUSEs) avoided by using not_read information.  */
233   unsigned int static_read_clobbers_avoided;
234 
235   /* Number of write-clobbers avoided because the variable can't escape to
236      this call.  */
237   unsigned int unescapable_clobbers_avoided;
238 
239   /* Number of read-only uses we attempt to add to calls in
240      add_call_read_ops.  */
241   unsigned int readonly_clobbers;
242 
243   /* Number of read-only uses we avoid using not_read information.  */
244   unsigned int static_readonly_clobbers_avoided;
245 } clobber_stats;
246 
247 
248 /* Initialize the operand cache routines.  */
249 
250 void
init_ssa_operands(void)251 init_ssa_operands (void)
252 {
253   build_defs = VEC_alloc (tree, heap, 5);
254   build_uses = VEC_alloc (tree, heap, 10);
255   build_vuses = VEC_alloc (tree, heap, 25);
256   build_v_may_defs = VEC_alloc (tree, heap, 25);
257   build_v_must_defs = VEC_alloc (tree, heap, 25);
258 
259   gcc_assert (operand_memory == NULL);
260   operand_memory_index = SSA_OPERAND_MEMORY_SIZE;
261   ops_active = true;
262   memset (&clobber_stats, 0, sizeof (clobber_stats));
263 }
264 
265 
266 /* Dispose of anything required by the operand routines.  */
267 
268 void
fini_ssa_operands(void)269 fini_ssa_operands (void)
270 {
271   struct ssa_operand_memory_d *ptr;
272   VEC_free (tree, heap, build_defs);
273   VEC_free (tree, heap, build_uses);
274   VEC_free (tree, heap, build_v_must_defs);
275   VEC_free (tree, heap, build_v_may_defs);
276   VEC_free (tree, heap, build_vuses);
277   free_defs = NULL;
278   free_uses = NULL;
279   free_vuses = NULL;
280   free_maydefs = NULL;
281   free_mustdefs = NULL;
282   while ((ptr = operand_memory) != NULL)
283     {
284       operand_memory = operand_memory->next;
285       ggc_free (ptr);
286     }
287 
288   ops_active = false;
289 
290   if (dump_file && (dump_flags & TDF_STATS))
291     {
292       fprintf (dump_file, "Original clobbered vars:%d\n",
293 	       clobber_stats.clobbered_vars);
294       fprintf (dump_file, "Static write clobbers avoided:%d\n",
295 	       clobber_stats.static_write_clobbers_avoided);
296       fprintf (dump_file, "Static read clobbers avoided:%d\n",
297 	       clobber_stats.static_read_clobbers_avoided);
298       fprintf (dump_file, "Unescapable clobbers avoided:%d\n",
299 	       clobber_stats.unescapable_clobbers_avoided);
300       fprintf (dump_file, "Original read-only clobbers:%d\n",
301 	       clobber_stats.readonly_clobbers);
302       fprintf (dump_file, "Static read-only clobbers avoided:%d\n",
303 	       clobber_stats.static_readonly_clobbers_avoided);
304     }
305 }
306 
307 
308 /* Return memory for operands of SIZE chunks.  */
309 
310 static inline void *
ssa_operand_alloc(unsigned size)311 ssa_operand_alloc (unsigned size)
312 {
313   char *ptr;
314   if (operand_memory_index + size >= SSA_OPERAND_MEMORY_SIZE)
315     {
316       struct ssa_operand_memory_d *ptr;
317       ptr = GGC_NEW (struct ssa_operand_memory_d);
318       ptr->next = operand_memory;
319       operand_memory = ptr;
320       operand_memory_index = 0;
321     }
322   ptr = &(operand_memory->mem[operand_memory_index]);
323   operand_memory_index += size;
324   return ptr;
325 }
326 
327 
328 
329 /* This routine makes sure that PTR is in an immediate use list, and makes
330    sure the stmt pointer is set to the current stmt.  */
331 
332 static inline void
set_virtual_use_link(use_operand_p ptr,tree stmt)333 set_virtual_use_link (use_operand_p ptr, tree stmt)
334 {
335   /*  fold_stmt may have changed the stmt pointers.  */
336   if (ptr->stmt != stmt)
337     ptr->stmt = stmt;
338 
339   /* If this use isn't in a list, add it to the correct list.  */
340   if (!ptr->prev)
341     link_imm_use (ptr, *(ptr->use));
342 }
343 
344 /* Appends ELT after TO, and moves the TO pointer to ELT.  */
345 
346 #define APPEND_OP_AFTER(ELT, TO)	\
347   do					\
348     {					\
349       (TO)->next = (ELT);		\
350       (TO) = (ELT);			\
351     } while (0)
352 
353 /* Appends head of list FROM after TO, and move both pointers
354    to their successors.  */
355 
356 #define MOVE_HEAD_AFTER(FROM, TO)	\
357   do					\
358     {					\
359       APPEND_OP_AFTER (FROM, TO);	\
360       (FROM) = (FROM)->next;		\
361     } while (0)
362 
363 /* Moves OP to appropriate freelist.  OP is set to its successor.  */
364 
365 #define MOVE_HEAD_TO_FREELIST(OP, TYPE)			\
366   do							\
367     {							\
368       TYPE##_optype_p next = (OP)->next;		\
369       (OP)->next = free_##TYPE##s;			\
370       free_##TYPE##s = (OP);				\
371       (OP) = next;					\
372     } while (0)
373 
374 /* Initializes immediate use at USE_PTR to value VAL, and links it to the list
375    of immediate uses.  STMT is the current statement.  */
376 
377 #define INITIALIZE_USE(USE_PTR, VAL, STMT)		\
378   do							\
379     {							\
380       (USE_PTR)->use = (VAL);				\
381       link_imm_use_stmt ((USE_PTR), *(VAL), (STMT));	\
382     } while (0)
383 
384 /* Adds OP to the list of defs after LAST, and moves
385    LAST to the new element.  */
386 
387 static inline void
add_def_op(tree * op,def_optype_p * last)388 add_def_op (tree *op, def_optype_p *last)
389 {
390   def_optype_p new;
391 
392   ALLOC_OPTYPE (new, def);
393   DEF_OP_PTR (new) = op;
394   APPEND_OP_AFTER (new, *last);
395 }
396 
397 /* Adds OP to the list of uses of statement STMT after LAST, and moves
398    LAST to the new element.  */
399 
400 static inline void
add_use_op(tree stmt,tree * op,use_optype_p * last)401 add_use_op (tree stmt, tree *op, use_optype_p *last)
402 {
403   use_optype_p new;
404 
405   ALLOC_OPTYPE (new, use);
406   INITIALIZE_USE (USE_OP_PTR (new), op, stmt);
407   APPEND_OP_AFTER (new, *last);
408 }
409 
410 /* Adds OP to the list of vuses of statement STMT after LAST, and moves
411    LAST to the new element.  */
412 
413 static inline void
add_vuse_op(tree stmt,tree op,vuse_optype_p * last)414 add_vuse_op (tree stmt, tree op, vuse_optype_p *last)
415 {
416   vuse_optype_p new;
417 
418   ALLOC_OPTYPE (new, vuse);
419   VUSE_OP (new) = op;
420   INITIALIZE_USE (VUSE_OP_PTR (new), &VUSE_OP (new), stmt);
421   APPEND_OP_AFTER (new, *last);
422 }
423 
424 /* Adds OP to the list of maydefs of statement STMT after LAST, and moves
425    LAST to the new element.  */
426 
427 static inline void
add_maydef_op(tree stmt,tree op,maydef_optype_p * last)428 add_maydef_op (tree stmt, tree op, maydef_optype_p *last)
429 {
430   maydef_optype_p new;
431 
432   ALLOC_OPTYPE (new, maydef);
433   MAYDEF_RESULT (new) = op;
434   MAYDEF_OP (new) = op;
435   INITIALIZE_USE (MAYDEF_OP_PTR (new), &MAYDEF_OP (new), stmt);
436   APPEND_OP_AFTER (new, *last);
437 }
438 
439 /* Adds OP to the list of mustdefs of statement STMT after LAST, and moves
440    LAST to the new element.  */
441 
442 static inline void
add_mustdef_op(tree stmt,tree op,mustdef_optype_p * last)443 add_mustdef_op (tree stmt, tree op, mustdef_optype_p *last)
444 {
445   mustdef_optype_p new;
446 
447   ALLOC_OPTYPE (new, mustdef);
448   MUSTDEF_RESULT (new) = op;
449   MUSTDEF_KILL (new) = op;
450   INITIALIZE_USE (MUSTDEF_KILL_PTR (new), &MUSTDEF_KILL (new), stmt);
451   APPEND_OP_AFTER (new, *last);
452 }
453 
454 /* Takes elements from build_defs and turns them into def operands of STMT.
455    TODO -- Given that def operands list is not necessarily sorted, merging
456 	   the operands this way does not make much sense.
457 	-- Make build_defs VEC of tree *.  */
458 
459 static inline void
finalize_ssa_def_ops(tree stmt)460 finalize_ssa_def_ops (tree stmt)
461 {
462   unsigned new_i;
463   struct def_optype_d new_list;
464   def_optype_p old_ops, last;
465   tree *old_base;
466 
467   new_list.next = NULL;
468   last = &new_list;
469 
470   old_ops = DEF_OPS (stmt);
471 
472   new_i = 0;
473   while (old_ops && new_i < VEC_length (tree, build_defs))
474     {
475       tree *new_base = (tree *) VEC_index (tree, build_defs, new_i);
476       old_base = DEF_OP_PTR (old_ops);
477 
478       if (old_base == new_base)
479         {
480 	  /* if variables are the same, reuse this node.  */
481 	  MOVE_HEAD_AFTER (old_ops, last);
482 	  new_i++;
483 	}
484       else if (old_base < new_base)
485 	{
486 	  /* if old is less than new, old goes to the free list.  */
487 	  MOVE_HEAD_TO_FREELIST (old_ops, def);
488 	}
489       else
490 	{
491 	  /* This is a new operand.  */
492 	  add_def_op (new_base, &last);
493 	  new_i++;
494 	}
495     }
496 
497   /* If there is anything remaining in the build_defs list, simply emit it.  */
498   for ( ; new_i < VEC_length (tree, build_defs); new_i++)
499     add_def_op ((tree *) VEC_index (tree, build_defs, new_i), &last);
500 
501   last->next = NULL;
502 
503   /* If there is anything in the old list, free it.  */
504   if (old_ops)
505     {
506       old_ops->next = free_defs;
507       free_defs = old_ops;
508     }
509 
510   /* Now set the stmt's operands.  */
511   DEF_OPS (stmt) = new_list.next;
512 
513 #ifdef ENABLE_CHECKING
514   {
515     def_optype_p ptr;
516     unsigned x = 0;
517     for (ptr = DEF_OPS (stmt); ptr; ptr = ptr->next)
518       x++;
519 
520     gcc_assert (x == VEC_length (tree, build_defs));
521   }
522 #endif
523 }
524 
525 /* This routine will create stmt operands for STMT from the def build list.  */
526 
527 static void
finalize_ssa_defs(tree stmt)528 finalize_ssa_defs (tree stmt)
529 {
530   unsigned int num = VEC_length (tree, build_defs);
531 
532   /* There should only be a single real definition per assignment.  */
533   gcc_assert ((stmt && TREE_CODE (stmt) != MODIFY_EXPR) || num <= 1);
534 
535   /* If there is an old list, often the new list is identical, or close, so
536      find the elements at the beginning that are the same as the vector.  */
537   finalize_ssa_def_ops (stmt);
538   VEC_truncate (tree, build_defs, 0);
539 }
540 
541 /* Takes elements from build_uses and turns them into use operands of STMT.
542    TODO -- Make build_uses VEC of tree *.  */
543 
544 static inline void
finalize_ssa_use_ops(tree stmt)545 finalize_ssa_use_ops (tree stmt)
546 {
547   unsigned new_i;
548   struct use_optype_d new_list;
549   use_optype_p old_ops, ptr, last;
550 
551   new_list.next = NULL;
552   last = &new_list;
553 
554   old_ops = USE_OPS (stmt);
555 
556   /* If there is anything in the old list, free it.  */
557   if (old_ops)
558     {
559       for (ptr = old_ops; ptr; ptr = ptr->next)
560 	delink_imm_use (USE_OP_PTR (ptr));
561       old_ops->next = free_uses;
562       free_uses = old_ops;
563     }
564 
565   /* Now create nodes for all the new nodes.  */
566   for (new_i = 0; new_i < VEC_length (tree, build_uses); new_i++)
567     add_use_op (stmt, (tree *) VEC_index (tree, build_uses, new_i), &last);
568 
569   last->next = NULL;
570 
571   /* Now set the stmt's operands.  */
572   USE_OPS (stmt) = new_list.next;
573 
574 #ifdef ENABLE_CHECKING
575   {
576     unsigned x = 0;
577     for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
578       x++;
579 
580     gcc_assert (x == VEC_length (tree, build_uses));
581   }
582 #endif
583 }
584 
585 /* Return a new use operand vector for STMT, comparing to OLD_OPS_P.  */
586 
587 static void
finalize_ssa_uses(tree stmt)588 finalize_ssa_uses (tree stmt)
589 {
590 #ifdef ENABLE_CHECKING
591   {
592     unsigned x;
593     unsigned num = VEC_length (tree, build_uses);
594 
595     /* If the pointer to the operand is the statement itself, something is
596        wrong.  It means that we are pointing to a local variable (the
597        initial call to update_stmt_operands does not pass a pointer to a
598        statement).  */
599     for (x = 0; x < num; x++)
600       gcc_assert (*((tree *)VEC_index (tree, build_uses, x)) != stmt);
601   }
602 #endif
603   finalize_ssa_use_ops (stmt);
604   VEC_truncate (tree, build_uses, 0);
605 }
606 
607 
608 /* Takes elements from build_v_may_defs and turns them into maydef operands of
609    STMT.  */
610 
611 static inline void
finalize_ssa_v_may_def_ops(tree stmt)612 finalize_ssa_v_may_def_ops (tree stmt)
613 {
614   unsigned new_i;
615   struct maydef_optype_d new_list;
616   maydef_optype_p old_ops, ptr, last;
617   tree act;
618   unsigned old_base, new_base;
619 
620   new_list.next = NULL;
621   last = &new_list;
622 
623   old_ops = MAYDEF_OPS (stmt);
624 
625   new_i = 0;
626   while (old_ops && new_i < VEC_length (tree, build_v_may_defs))
627     {
628       act = VEC_index (tree, build_v_may_defs, new_i);
629       new_base = get_name_decl (act);
630       old_base = get_name_decl (MAYDEF_OP (old_ops));
631 
632       if (old_base == new_base)
633         {
634 	  /* if variables are the same, reuse this node.  */
635 	  MOVE_HEAD_AFTER (old_ops, last);
636 	  set_virtual_use_link (MAYDEF_OP_PTR (last), stmt);
637 	  new_i++;
638 	}
639       else if (old_base < new_base)
640 	{
641 	  /* if old is less than new, old goes to the free list.  */
642 	  delink_imm_use (MAYDEF_OP_PTR (old_ops));
643 	  MOVE_HEAD_TO_FREELIST (old_ops, maydef);
644 	}
645       else
646 	{
647 	  /* This is a new operand.  */
648 	  add_maydef_op (stmt, act, &last);
649 	  new_i++;
650 	}
651     }
652 
653   /* If there is anything remaining in the build_v_may_defs list, simply emit it.  */
654   for ( ; new_i < VEC_length (tree, build_v_may_defs); new_i++)
655     add_maydef_op (stmt, VEC_index (tree, build_v_may_defs, new_i), &last);
656 
657   last->next = NULL;
658 
659   /* If there is anything in the old list, free it.  */
660   if (old_ops)
661     {
662       for (ptr = old_ops; ptr; ptr = ptr->next)
663 	delink_imm_use (MAYDEF_OP_PTR (ptr));
664       old_ops->next = free_maydefs;
665       free_maydefs = old_ops;
666     }
667 
668   /* Now set the stmt's operands.  */
669   MAYDEF_OPS (stmt) = new_list.next;
670 
671 #ifdef ENABLE_CHECKING
672   {
673     unsigned x = 0;
674     for (ptr = MAYDEF_OPS (stmt); ptr; ptr = ptr->next)
675       x++;
676 
677     gcc_assert (x == VEC_length (tree, build_v_may_defs));
678   }
679 #endif
680 }
681 
682 static void
finalize_ssa_v_may_defs(tree stmt)683 finalize_ssa_v_may_defs (tree stmt)
684 {
685   finalize_ssa_v_may_def_ops (stmt);
686 }
687 
688 
689 /* Clear the in_list bits and empty the build array for V_MAY_DEFs.  */
690 
691 static inline void
cleanup_v_may_defs(void)692 cleanup_v_may_defs (void)
693 {
694   unsigned x, num;
695   num = VEC_length (tree, build_v_may_defs);
696 
697   for (x = 0; x < num; x++)
698     {
699       tree t = VEC_index (tree, build_v_may_defs, x);
700       if (TREE_CODE (t) != SSA_NAME)
701 	{
702 	  var_ann_t ann = var_ann (t);
703 	  ann->in_v_may_def_list = 0;
704 	}
705     }
706   VEC_truncate (tree, build_v_may_defs, 0);
707 }
708 
709 
710 /* Takes elements from build_vuses and turns them into vuse operands of
711    STMT.  */
712 
713 static inline void
finalize_ssa_vuse_ops(tree stmt)714 finalize_ssa_vuse_ops (tree stmt)
715 {
716   unsigned new_i;
717   struct vuse_optype_d new_list;
718   vuse_optype_p old_ops, ptr, last;
719   tree act;
720   unsigned old_base, new_base;
721 
722   new_list.next = NULL;
723   last = &new_list;
724 
725   old_ops = VUSE_OPS (stmt);
726 
727   new_i = 0;
728   while (old_ops && new_i < VEC_length (tree, build_vuses))
729     {
730       act = VEC_index (tree, build_vuses, new_i);
731       new_base = get_name_decl (act);
732       old_base = get_name_decl (VUSE_OP (old_ops));
733 
734       if (old_base == new_base)
735         {
736 	  /* if variables are the same, reuse this node.  */
737 	  MOVE_HEAD_AFTER (old_ops, last);
738 	  set_virtual_use_link (VUSE_OP_PTR (last), stmt);
739 	  new_i++;
740 	}
741       else if (old_base < new_base)
742 	{
743 	  /* if old is less than new, old goes to the free list.  */
744 	  delink_imm_use (USE_OP_PTR (old_ops));
745 	  MOVE_HEAD_TO_FREELIST (old_ops, vuse);
746 	}
747       else
748 	{
749 	  /* This is a new operand.  */
750 	  add_vuse_op (stmt, act, &last);
751 	  new_i++;
752 	}
753     }
754 
755   /* If there is anything remaining in the build_vuses list, simply emit it.  */
756   for ( ; new_i < VEC_length (tree, build_vuses); new_i++)
757     add_vuse_op (stmt, VEC_index (tree, build_vuses, new_i), &last);
758 
759   last->next = NULL;
760 
761   /* If there is anything in the old list, free it.  */
762   if (old_ops)
763     {
764       for (ptr = old_ops; ptr; ptr = ptr->next)
765 	delink_imm_use (VUSE_OP_PTR (ptr));
766       old_ops->next = free_vuses;
767       free_vuses = old_ops;
768     }
769 
770   /* Now set the stmt's operands.  */
771   VUSE_OPS (stmt) = new_list.next;
772 
773 #ifdef ENABLE_CHECKING
774   {
775     unsigned x = 0;
776     for (ptr = VUSE_OPS (stmt); ptr; ptr = ptr->next)
777       x++;
778 
779     gcc_assert (x == VEC_length (tree, build_vuses));
780   }
781 #endif
782 }
783 
784 /* Return a new VUSE operand vector, comparing to OLD_OPS_P.  */
785 
786 static void
finalize_ssa_vuses(tree stmt)787 finalize_ssa_vuses (tree stmt)
788 {
789   unsigned num, num_v_may_defs;
790   unsigned vuse_index;
791 
792   /* Remove superfluous VUSE operands.  If the statement already has a
793      V_MAY_DEF operation for a variable 'a', then a VUSE for 'a' is
794      not needed because V_MAY_DEFs imply a VUSE of the variable.  For
795      instance, suppose that variable 'a' is aliased:
796 
797 	      # VUSE <a_2>
798 	      # a_3 = V_MAY_DEF <a_2>
799 	      a = a + 1;
800 
801      The VUSE <a_2> is superfluous because it is implied by the
802      V_MAY_DEF operation.  */
803   num = VEC_length (tree, build_vuses);
804   num_v_may_defs = VEC_length (tree, build_v_may_defs);
805 
806   if (num > 0 && num_v_may_defs > 0)
807     {
808       for (vuse_index = 0; vuse_index < VEC_length (tree, build_vuses); )
809         {
810 	  tree vuse;
811 	  vuse = VEC_index (tree, build_vuses, vuse_index);
812 	  if (TREE_CODE (vuse) != SSA_NAME)
813 	    {
814 	      var_ann_t ann = var_ann (vuse);
815 	      ann->in_vuse_list = 0;
816 	      if (ann->in_v_may_def_list)
817 	        {
818 		  VEC_ordered_remove (tree, build_vuses, vuse_index);
819 		  continue;
820 		}
821 	    }
822 	  vuse_index++;
823 	}
824     }
825   else
826     {
827       /* Clear out the in_list bits.  */
828       for (vuse_index = 0;
829 	  vuse_index < VEC_length (tree, build_vuses);
830 	  vuse_index++)
831 	{
832 	  tree t = VEC_index (tree, build_vuses, vuse_index);
833 	  if (TREE_CODE (t) != SSA_NAME)
834 	    {
835 	      var_ann_t ann = var_ann (t);
836 	      ann->in_vuse_list = 0;
837 	    }
838 	}
839     }
840 
841   finalize_ssa_vuse_ops (stmt);
842 
843   /* The V_MAY_DEF build vector wasn't cleaned up because we needed it.  */
844   cleanup_v_may_defs ();
845 
846   /* Free the VUSEs build vector.  */
847   VEC_truncate (tree, build_vuses, 0);
848 
849 }
850 
851 /* Takes elements from build_v_must_defs and turns them into mustdef operands of
852    STMT.  */
853 
854 static inline void
finalize_ssa_v_must_def_ops(tree stmt)855 finalize_ssa_v_must_def_ops (tree stmt)
856 {
857   unsigned new_i;
858   struct mustdef_optype_d new_list;
859   mustdef_optype_p old_ops, ptr, last;
860   tree act;
861   unsigned old_base, new_base;
862 
863   new_list.next = NULL;
864   last = &new_list;
865 
866   old_ops = MUSTDEF_OPS (stmt);
867 
868   new_i = 0;
869   while (old_ops && new_i < VEC_length (tree, build_v_must_defs))
870     {
871       act = VEC_index (tree, build_v_must_defs, new_i);
872       new_base = get_name_decl (act);
873       old_base = get_name_decl (MUSTDEF_KILL (old_ops));
874 
875       if (old_base == new_base)
876         {
877 	  /* If variables are the same, reuse this node.  */
878 	  MOVE_HEAD_AFTER (old_ops, last);
879 	  set_virtual_use_link (MUSTDEF_KILL_PTR (last), stmt);
880 	  new_i++;
881 	}
882       else if (old_base < new_base)
883 	{
884 	  /* If old is less than new, old goes to the free list.  */
885 	  delink_imm_use (MUSTDEF_KILL_PTR (old_ops));
886 	  MOVE_HEAD_TO_FREELIST (old_ops, mustdef);
887 	}
888       else
889 	{
890 	  /* This is a new operand.  */
891 	  add_mustdef_op (stmt, act, &last);
892 	  new_i++;
893 	}
894     }
895 
896   /* If there is anything remaining in the build_v_must_defs list, simply emit it.  */
897   for ( ; new_i < VEC_length (tree, build_v_must_defs); new_i++)
898     add_mustdef_op (stmt, VEC_index (tree, build_v_must_defs, new_i), &last);
899 
900   last->next = NULL;
901 
902   /* If there is anything in the old list, free it.  */
903   if (old_ops)
904     {
905       for (ptr = old_ops; ptr; ptr = ptr->next)
906 	delink_imm_use (MUSTDEF_KILL_PTR (ptr));
907       old_ops->next = free_mustdefs;
908       free_mustdefs = old_ops;
909     }
910 
911   /* Now set the stmt's operands.  */
912   MUSTDEF_OPS (stmt) = new_list.next;
913 
914 #ifdef ENABLE_CHECKING
915   {
916     unsigned x = 0;
917     for (ptr = MUSTDEF_OPS (stmt); ptr; ptr = ptr->next)
918       x++;
919 
920     gcc_assert (x == VEC_length (tree, build_v_must_defs));
921   }
922 #endif
923 }
924 
925 static void
finalize_ssa_v_must_defs(tree stmt)926 finalize_ssa_v_must_defs (tree stmt)
927 {
928   /* In the presence of subvars, there may be more than one V_MUST_DEF
929      per statement (one for each subvar).  It is a bit expensive to
930      verify that all must-defs in a statement belong to subvars if
931      there is more than one must-def, so we don't do it.  Suffice to
932      say, if you reach here without having subvars, and have num >1,
933      you have hit a bug.  */
934   finalize_ssa_v_must_def_ops (stmt);
935   VEC_truncate (tree, build_v_must_defs, 0);
936 }
937 
938 
939 /* Finalize all the build vectors, fill the new ones into INFO.  */
940 
941 static inline void
finalize_ssa_stmt_operands(tree stmt)942 finalize_ssa_stmt_operands (tree stmt)
943 {
944   finalize_ssa_defs (stmt);
945   finalize_ssa_uses (stmt);
946   finalize_ssa_v_must_defs (stmt);
947   finalize_ssa_v_may_defs (stmt);
948   finalize_ssa_vuses (stmt);
949 }
950 
951 
952 /* Start the process of building up operands vectors in INFO.  */
953 
954 static inline void
start_ssa_stmt_operands(void)955 start_ssa_stmt_operands (void)
956 {
957   gcc_assert (VEC_length (tree, build_defs) == 0);
958   gcc_assert (VEC_length (tree, build_uses) == 0);
959   gcc_assert (VEC_length (tree, build_vuses) == 0);
960   gcc_assert (VEC_length (tree, build_v_may_defs) == 0);
961   gcc_assert (VEC_length (tree, build_v_must_defs) == 0);
962 }
963 
964 
965 /* Add DEF_P to the list of pointers to operands.  */
966 
967 static inline void
append_def(tree * def_p)968 append_def (tree *def_p)
969 {
970   VEC_safe_push (tree, heap, build_defs, (tree)def_p);
971 }
972 
973 
974 /* Add USE_P to the list of pointers to operands.  */
975 
976 static inline void
append_use(tree * use_p)977 append_use (tree *use_p)
978 {
979   VEC_safe_push (tree, heap, build_uses, (tree)use_p);
980 }
981 
982 
983 /* Add a new virtual may def for variable VAR to the build array.  */
984 
985 static inline void
append_v_may_def(tree var)986 append_v_may_def (tree var)
987 {
988   if (TREE_CODE (var) != SSA_NAME)
989     {
990       var_ann_t ann = get_var_ann (var);
991 
992       /* Don't allow duplicate entries.  */
993       if (ann->in_v_may_def_list)
994 	return;
995       ann->in_v_may_def_list = 1;
996     }
997 
998   VEC_safe_push (tree, heap, build_v_may_defs, (tree)var);
999 }
1000 
1001 
1002 /* Add VAR to the list of virtual uses.  */
1003 
1004 static inline void
append_vuse(tree var)1005 append_vuse (tree var)
1006 {
1007   /* Don't allow duplicate entries.  */
1008   if (TREE_CODE (var) != SSA_NAME)
1009     {
1010       var_ann_t ann = get_var_ann (var);
1011 
1012       if (ann->in_vuse_list || ann->in_v_may_def_list)
1013         return;
1014       ann->in_vuse_list = 1;
1015     }
1016 
1017   VEC_safe_push (tree, heap, build_vuses, (tree)var);
1018 }
1019 
1020 
1021 /* Add VAR to the list of virtual must definitions for INFO.  */
1022 
1023 static inline void
append_v_must_def(tree var)1024 append_v_must_def (tree var)
1025 {
1026   unsigned i;
1027 
1028   /* Don't allow duplicate entries.  */
1029   for (i = 0; i < VEC_length (tree, build_v_must_defs); i++)
1030     if (var == VEC_index (tree, build_v_must_defs, i))
1031       return;
1032 
1033   VEC_safe_push (tree, heap, build_v_must_defs, (tree)var);
1034 }
1035 
1036 
1037 /* REF is a tree that contains the entire pointer dereference
1038    expression, if available, or NULL otherwise.  ALIAS is the variable
1039    we are asking if REF can access.  OFFSET and SIZE come from the
1040    memory access expression that generated this virtual operand.  */
1041 
1042 static bool
access_can_touch_variable(tree ref,tree alias,HOST_WIDE_INT offset,HOST_WIDE_INT size)1043 access_can_touch_variable (tree ref, tree alias, HOST_WIDE_INT offset,
1044 			   HOST_WIDE_INT size)
1045 {
1046   bool offsetgtz = offset > 0;
1047   unsigned HOST_WIDE_INT uoffset = (unsigned HOST_WIDE_INT) offset;
1048   tree base = ref ? get_base_address (ref) : NULL;
1049 
1050   /* If ALIAS is .GLOBAL_VAR then the memory reference REF must be
1051      using a call-clobbered memory tag.  By definition, call-clobbered
1052      memory tags can always touch .GLOBAL_VAR.  */
1053   if (alias == global_var)
1054     return true;
1055 
1056   /* We cannot prune nonlocal aliases because they are not type
1057      specific.  */
1058   if (alias == nonlocal_all)
1059     return true;
1060 
1061   /* If ALIAS is an SFT, it can't be touched if the offset
1062      and size of the access is not overlapping with the SFT offset and
1063      size.  This is only true if we are accessing through a pointer
1064      to a type that is the same as SFT_PARENT_VAR.  Otherwise, we may
1065      be accessing through a pointer to some substruct of the
1066      structure, and if we try to prune there, we will have the wrong
1067      offset, and get the wrong answer.
1068      i.e., we can't prune without more work if we have something like
1069 
1070      struct gcc_target
1071      {
1072        struct asm_out
1073        {
1074          const char *byte_op;
1075 	 struct asm_int_op
1076 	 {
1077 	   const char *hi;
1078 	 } aligned_op;
1079        } asm_out;
1080      } targetm;
1081 
1082      foo = &targetm.asm_out.aligned_op;
1083      return foo->hi;
1084 
1085      SFT.1, which represents hi, will have SFT_OFFSET=32 because in
1086      terms of SFT_PARENT_VAR, that is where it is.
1087      However, the access through the foo pointer will be at offset 0.  */
1088   if (size != -1
1089       && TREE_CODE (alias) == STRUCT_FIELD_TAG
1090       && base
1091       && TREE_TYPE (base) == TREE_TYPE (SFT_PARENT_VAR (alias))
1092       && !overlap_subvar (offset, size, alias, NULL))
1093     {
1094 #ifdef ACCESS_DEBUGGING
1095       fprintf (stderr, "Access to ");
1096       print_generic_expr (stderr, ref, 0);
1097       fprintf (stderr, " may not touch ");
1098       print_generic_expr (stderr, alias, 0);
1099       fprintf (stderr, " in function %s\n", get_name (current_function_decl));
1100 #endif
1101       return false;
1102     }
1103 
1104   /* Without strict aliasing, it is impossible for a component access
1105      through a pointer to touch a random variable, unless that
1106      variable *is* a structure or a pointer.
1107 
1108      That is, given p->c, and some random global variable b,
1109      there is no legal way that p->c could be an access to b.
1110 
1111      Without strict aliasing on, we consider it legal to do something
1112      like:
1113 
1114      struct foos { int l; };
1115      int foo;
1116      static struct foos *getfoo(void);
1117      int main (void)
1118      {
1119        struct foos *f = getfoo();
1120        f->l = 1;
1121        foo = 2;
1122        if (f->l == 1)
1123          abort();
1124        exit(0);
1125      }
1126      static struct foos *getfoo(void)
1127      { return (struct foos *)&foo; }
1128 
1129      (taken from 20000623-1.c)
1130 
1131      The docs also say/imply that access through union pointers
1132      is legal (but *not* if you take the address of the union member,
1133      i.e. the inverse), such that you can do
1134 
1135      typedef union {
1136        int d;
1137      } U;
1138 
1139      int rv;
1140      void breakme()
1141      {
1142        U *rv0;
1143        U *pretmp = (U*)&rv;
1144        rv0 = pretmp;
1145        rv0->d = 42;
1146      }
1147      To implement this, we just punt on accesses through union
1148      pointers entirely.
1149   */
1150   else if (ref
1151 	   && flag_strict_aliasing
1152 	   && TREE_CODE (ref) != INDIRECT_REF
1153 	   && !MTAG_P (alias)
1154 	   && (TREE_CODE (base) != INDIRECT_REF
1155 	       || TREE_CODE (TREE_TYPE (base)) != UNION_TYPE)
1156 	   && !AGGREGATE_TYPE_P (TREE_TYPE (alias))
1157 	   && TREE_CODE (TREE_TYPE (alias)) != COMPLEX_TYPE
1158 	   && !POINTER_TYPE_P (TREE_TYPE (alias))
1159 	   /* When the struct has may_alias attached to it, we need not to
1160 	      return true.  */
1161 	   && get_alias_set (base))
1162     {
1163 #ifdef ACCESS_DEBUGGING
1164       fprintf (stderr, "Access to ");
1165       print_generic_expr (stderr, ref, 0);
1166       fprintf (stderr, " may not touch ");
1167       print_generic_expr (stderr, alias, 0);
1168       fprintf (stderr, " in function %s\n", get_name (current_function_decl));
1169 #endif
1170       return false;
1171     }
1172 
1173   /* If the offset of the access is greater than the size of one of
1174      the possible aliases, it can't be touching that alias, because it
1175      would be past the end of the structure.  */
1176   else if (ref
1177 	   && flag_strict_aliasing
1178 	   && TREE_CODE (ref) != INDIRECT_REF
1179 	   && !MTAG_P (alias)
1180 	   && !POINTER_TYPE_P (TREE_TYPE (alias))
1181 	   && offsetgtz
1182 	   && DECL_SIZE (alias)
1183 	   && TREE_CODE (DECL_SIZE (alias)) == INTEGER_CST
1184 	   && uoffset > TREE_INT_CST_LOW (DECL_SIZE (alias)))
1185     {
1186 #ifdef ACCESS_DEBUGGING
1187       fprintf (stderr, "Access to ");
1188       print_generic_expr (stderr, ref, 0);
1189       fprintf (stderr, " may not touch ");
1190       print_generic_expr (stderr, alias, 0);
1191       fprintf (stderr, " in function %s\n", get_name (current_function_decl));
1192 #endif
1193       return false;
1194     }
1195 
1196   return true;
1197 }
1198 
1199 
1200 /* Add VAR to the virtual operands array.  FLAGS is as in
1201    get_expr_operands.  FULL_REF is a tree that contains the entire
1202    pointer dereference expression, if available, or NULL otherwise.
1203    OFFSET and SIZE come from the memory access expression that
1204    generated this virtual operand.  FOR_CLOBBER is true is this is
1205    adding a virtual operand for a call clobber.  */
1206 
1207 static void
add_virtual_operand(tree var,stmt_ann_t s_ann,int flags,tree full_ref,HOST_WIDE_INT offset,HOST_WIDE_INT size,bool for_clobber)1208 add_virtual_operand (tree var, stmt_ann_t s_ann, int flags,
1209 		     tree full_ref, HOST_WIDE_INT offset,
1210 		     HOST_WIDE_INT size, bool for_clobber)
1211 {
1212   VEC(tree,gc) *aliases;
1213   tree sym;
1214   var_ann_t v_ann;
1215 
1216   sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
1217   v_ann = var_ann (sym);
1218 
1219   /* Mark statements with volatile operands.  Optimizers should back
1220      off from statements having volatile operands.  */
1221   if (TREE_THIS_VOLATILE (sym) && s_ann)
1222     s_ann->has_volatile_ops = true;
1223 
1224   /* If the variable cannot be modified and this is a V_MAY_DEF change
1225      it into a VUSE.  This happens when read-only variables are marked
1226      call-clobbered and/or aliased to writable variables.  So we only
1227      check that this only happens on non-specific stores.
1228 
1229      Note that if this is a specific store, i.e. associated with a
1230      modify_expr, then we can't suppress the V_MAY_DEF, lest we run
1231      into validation problems.
1232 
1233      This can happen when programs cast away const, leaving us with a
1234      store to read-only memory.  If the statement is actually executed
1235      at runtime, then the program is ill formed.  If the statement is
1236      not executed then all is well.  At the very least, we cannot ICE.  */
1237   if ((flags & opf_non_specific) && unmodifiable_var_p (var))
1238     flags &= ~(opf_is_def | opf_kill_def);
1239 
1240   /* The variable is not a GIMPLE register.  Add it (or its aliases) to
1241      virtual operands, unless the caller has specifically requested
1242      not to add virtual operands (used when adding operands inside an
1243      ADDR_EXPR expression).  */
1244   if (flags & opf_no_vops)
1245     return;
1246 
1247   aliases = v_ann->may_aliases;
1248   if (aliases == NULL)
1249     {
1250       /* The variable is not aliased or it is an alias tag.  */
1251       if (flags & opf_is_def)
1252 	{
1253 	  if (flags & opf_kill_def)
1254 	    {
1255 	      /* V_MUST_DEF for non-aliased, non-GIMPLE register
1256 		 variable definitions.  */
1257 	      gcc_assert (!MTAG_P (var)
1258 			  || TREE_CODE (var) == STRUCT_FIELD_TAG);
1259 	      append_v_must_def (var);
1260 	    }
1261 	  else
1262 	    {
1263 	      /* Add a V_MAY_DEF for call-clobbered variables and
1264 		 memory tags.  */
1265 	      append_v_may_def (var);
1266 	    }
1267 	}
1268       else
1269 	append_vuse (var);
1270     }
1271   else
1272     {
1273       unsigned i;
1274       tree al;
1275 
1276       /* The variable is aliased.  Add its aliases to the virtual
1277 	 operands.  */
1278       gcc_assert (VEC_length (tree, aliases) != 0);
1279 
1280       if (flags & opf_is_def)
1281 	{
1282 
1283 	  bool none_added = true;
1284 
1285 	  for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
1286 	    {
1287 	      if (!access_can_touch_variable (full_ref, al, offset, size))
1288 		continue;
1289 
1290 	      none_added = false;
1291 	      append_v_may_def (al);
1292 	    }
1293 
1294 	  /* If the variable is also an alias tag, add a virtual
1295 	     operand for it, otherwise we will miss representing
1296 	     references to the members of the variable's alias set.
1297 	     This fixes the bug in gcc.c-torture/execute/20020503-1.c.
1298 
1299 	     It is also necessary to add bare defs on clobbers for
1300 	     SMT's, so that bare SMT uses caused by pruning all the
1301 	     aliases will link up properly with calls.   In order to
1302 	     keep the number of these bare defs we add down to the
1303 	     minimum necessary, we keep track of which SMT's were used
1304 	     alone in statement vdefs or VUSEs.  */
1305 	  if (v_ann->is_aliased
1306 	      || none_added
1307 	      || (TREE_CODE (var) == SYMBOL_MEMORY_TAG
1308 		  && for_clobber
1309 		  && SMT_USED_ALONE (var)))
1310 	    {
1311 	      /* Every bare SMT def we add should have SMT_USED_ALONE
1312 		 set on it, or else we will get the wrong answer on
1313 		 clobbers.  Sadly, this assertion trips on code that
1314 		 violates strict aliasing rules, because they *do* get
1315 		 the clobbers wrong, since it is illegal code.  As a
1316 		 result, we currently only enable it for aliasing
1317 		 debugging.  Someone might wish to turn this code into
1318 		 a nice strict-aliasing warning, since we *know* it
1319 		 will get the wrong answer...  */
1320 #ifdef ACCESS_DEBUGGING
1321 	      if (none_added
1322 		  && !updating_used_alone && aliases_computed_p
1323 		  && TREE_CODE (var) == SYMBOL_MEMORY_TAG)
1324 		gcc_assert (SMT_USED_ALONE (var));
1325 #endif
1326 	      append_v_may_def (var);
1327 	    }
1328 	}
1329       else
1330 	{
1331 	  bool none_added = true;
1332 	  for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
1333 	    {
1334 	      if (!access_can_touch_variable (full_ref, al, offset, size))
1335 		continue;
1336 	      none_added = false;
1337 	      append_vuse (al);
1338 	    }
1339 
1340 	  /* Similarly, append a virtual uses for VAR itself, when
1341 	     it is an alias tag.  */
1342 	  if (v_ann->is_aliased || none_added)
1343 	    append_vuse (var);
1344 	}
1345     }
1346 }
1347 
1348 
1349 /* Add *VAR_P to the appropriate operand array for S_ANN.  FLAGS is as in
1350    get_expr_operands.  If *VAR_P is a GIMPLE register, it will be added to
1351    the statement's real operands, otherwise it is added to virtual
1352    operands.  */
1353 
1354 static void
add_stmt_operand(tree * var_p,stmt_ann_t s_ann,int flags)1355 add_stmt_operand (tree *var_p, stmt_ann_t s_ann, int flags)
1356 {
1357   bool is_real_op;
1358   tree var, sym;
1359   var_ann_t v_ann;
1360 
1361   var = *var_p;
1362   gcc_assert (SSA_VAR_P (var));
1363 
1364   is_real_op = is_gimple_reg (var);
1365 
1366   /* If this is a real operand, the operand is either an SSA name or a
1367      decl.  Virtual operands may only be decls.  */
1368   gcc_assert (is_real_op || DECL_P (var));
1369 
1370   sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
1371   v_ann = var_ann (sym);
1372 
1373   /* Mark statements with volatile operands.  Optimizers should back
1374      off from statements having volatile operands.  */
1375   if (TREE_THIS_VOLATILE (sym) && s_ann)
1376     s_ann->has_volatile_ops = true;
1377 
1378   if (is_real_op)
1379     {
1380       /* The variable is a GIMPLE register.  Add it to real operands.  */
1381       if (flags & opf_is_def)
1382 	append_def (var_p);
1383       else
1384 	append_use (var_p);
1385     }
1386   else
1387     add_virtual_operand (var, s_ann, flags, NULL_TREE, 0, -1, false);
1388 }
1389 
1390 
1391 /* A subroutine of get_expr_operands to handle INDIRECT_REF,
1392    ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF.
1393 
1394    STMT is the statement being processed, EXPR is the INDIRECT_REF
1395       that got us here.
1396 
1397    FLAGS is as in get_expr_operands.
1398 
1399    FULL_REF contains the full pointer dereference expression, if we
1400       have it, or NULL otherwise.
1401 
1402    OFFSET and SIZE are the location of the access inside the
1403       dereferenced pointer, if known.
1404 
1405    RECURSE_ON_BASE should be set to true if we want to continue
1406       calling get_expr_operands on the base pointer, and false if
1407       something else will do it for us.  */
1408 
1409 static void
get_indirect_ref_operands(tree stmt,tree expr,int flags,tree full_ref,HOST_WIDE_INT offset,HOST_WIDE_INT size,bool recurse_on_base)1410 get_indirect_ref_operands (tree stmt, tree expr, int flags,
1411 			   tree full_ref,
1412 			   HOST_WIDE_INT offset, HOST_WIDE_INT size,
1413 			   bool recurse_on_base)
1414 {
1415   tree *pptr = &TREE_OPERAND (expr, 0);
1416   tree ptr = *pptr;
1417   stmt_ann_t s_ann = stmt_ann (stmt);
1418 
1419   /* Stores into INDIRECT_REF operands are never killing definitions.  */
1420   flags &= ~opf_kill_def;
1421 
1422   if (SSA_VAR_P (ptr))
1423     {
1424       struct ptr_info_def *pi = NULL;
1425 
1426       /* If PTR has flow-sensitive points-to information, use it.  */
1427       if (TREE_CODE (ptr) == SSA_NAME
1428 	  && (pi = SSA_NAME_PTR_INFO (ptr)) != NULL
1429 	  && pi->name_mem_tag)
1430 	{
1431 	  /* PTR has its own memory tag.  Use it.  */
1432 	  add_virtual_operand (pi->name_mem_tag, s_ann, flags,
1433 			       full_ref, offset, size, false);
1434 	}
1435       else
1436 	{
1437 	  /* If PTR is not an SSA_NAME or it doesn't have a name
1438 	     tag, use its symbol memory tag.  */
1439 	  var_ann_t v_ann;
1440 
1441 	  /* If we are emitting debugging dumps, display a warning if
1442 	     PTR is an SSA_NAME with no flow-sensitive alias
1443 	     information.  That means that we may need to compute
1444 	     aliasing again.  */
1445 	  if (dump_file
1446 	      && TREE_CODE (ptr) == SSA_NAME
1447 	      && pi == NULL)
1448 	    {
1449 	      fprintf (dump_file,
1450 		  "NOTE: no flow-sensitive alias info for ");
1451 	      print_generic_expr (dump_file, ptr, dump_flags);
1452 	      fprintf (dump_file, " in ");
1453 	      print_generic_stmt (dump_file, stmt, dump_flags);
1454 	    }
1455 
1456 	  if (TREE_CODE (ptr) == SSA_NAME)
1457 	    ptr = SSA_NAME_VAR (ptr);
1458 	  v_ann = var_ann (ptr);
1459 
1460 	  if (v_ann->symbol_mem_tag)
1461 	    add_virtual_operand (v_ann->symbol_mem_tag, s_ann, flags,
1462 				 full_ref, offset, size, false);
1463 	}
1464     }
1465   else if (TREE_CODE (ptr) == INTEGER_CST)
1466     {
1467       /* If a constant is used as a pointer, we can't generate a real
1468 	 operand for it but we mark the statement volatile to prevent
1469 	 optimizations from messing things up.  */
1470       if (s_ann)
1471 	s_ann->has_volatile_ops = true;
1472       return;
1473     }
1474   else
1475     {
1476       /* Ok, this isn't even is_gimple_min_invariant.  Something's broke.  */
1477       gcc_unreachable ();
1478     }
1479 
1480   /* If requested, add a USE operand for the base pointer.  */
1481   if (recurse_on_base)
1482     get_expr_operands (stmt, pptr, opf_none);
1483 }
1484 
1485 
1486 /* A subroutine of get_expr_operands to handle TARGET_MEM_REF.  */
1487 
1488 static void
get_tmr_operands(tree stmt,tree expr,int flags)1489 get_tmr_operands (tree stmt, tree expr, int flags)
1490 {
1491   tree tag = TMR_TAG (expr), ref;
1492   HOST_WIDE_INT offset, size, maxsize;
1493   subvar_t svars, sv;
1494   stmt_ann_t s_ann = stmt_ann (stmt);
1495 
1496   /* First record the real operands.  */
1497   get_expr_operands (stmt, &TMR_BASE (expr), opf_none);
1498   get_expr_operands (stmt, &TMR_INDEX (expr), opf_none);
1499 
1500   /* MEM_REFs should never be killing.  */
1501   flags &= ~opf_kill_def;
1502 
1503   if (TMR_SYMBOL (expr))
1504     {
1505       stmt_ann_t ann = stmt_ann (stmt);
1506       add_to_addressable_set (TMR_SYMBOL (expr), &ann->addresses_taken);
1507     }
1508 
1509   if (!tag)
1510     {
1511       /* Something weird, so ensure that we will be careful.  */
1512       stmt_ann (stmt)->has_volatile_ops = true;
1513       return;
1514     }
1515 
1516   if (DECL_P (tag))
1517     {
1518       get_expr_operands (stmt, &tag, flags);
1519       return;
1520     }
1521 
1522   ref = get_ref_base_and_extent (tag, &offset, &size, &maxsize);
1523   gcc_assert (ref != NULL_TREE);
1524   svars = get_subvars_for_var (ref);
1525   for (sv = svars; sv; sv = sv->next)
1526     {
1527       bool exact;
1528       if (overlap_subvar (offset, maxsize, sv->var, &exact))
1529 	{
1530 	  int subvar_flags = flags;
1531 	  if (!exact || size != maxsize)
1532 	    subvar_flags &= ~opf_kill_def;
1533 	  add_stmt_operand (&sv->var, s_ann, subvar_flags);
1534 	}
1535     }
1536 }
1537 
1538 
1539 /* Add clobbering definitions for .GLOBAL_VAR or for each of the call
1540    clobbered variables in the function.  */
1541 
1542 static void
add_call_clobber_ops(tree stmt,tree callee)1543 add_call_clobber_ops (tree stmt, tree callee)
1544 {
1545   unsigned u;
1546   bitmap_iterator bi;
1547   stmt_ann_t s_ann = stmt_ann (stmt);
1548   bitmap not_read_b, not_written_b;
1549 
1550   /* Functions that are not const, pure or never return may clobber
1551      call-clobbered variables.  */
1552   if (s_ann)
1553     s_ann->makes_clobbering_call = true;
1554 
1555   /* If we created .GLOBAL_VAR earlier, just use it.  See compute_may_aliases
1556      for the heuristic used to decide whether to create .GLOBAL_VAR or not.  */
1557   if (global_var)
1558     {
1559       add_stmt_operand (&global_var, s_ann, opf_is_def);
1560       return;
1561     }
1562 
1563   /* Get info for local and module level statics.  There is a bit
1564      set for each static if the call being processed does not read
1565      or write that variable.  */
1566   not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
1567   not_written_b = callee ? ipa_reference_get_not_written_global (callee) : NULL;
1568   /* Add a V_MAY_DEF operand for every call clobbered variable.  */
1569   EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, u, bi)
1570     {
1571       tree var = referenced_var_lookup (u);
1572       unsigned int escape_mask = var_ann (var)->escape_mask;
1573       tree real_var = var;
1574       bool not_read;
1575       bool not_written;
1576 
1577       /* Not read and not written are computed on regular vars, not
1578 	 subvars, so look at the parent var if this is an SFT. */
1579       if (TREE_CODE (var) == STRUCT_FIELD_TAG)
1580 	real_var = SFT_PARENT_VAR (var);
1581 
1582       not_read = not_read_b ? bitmap_bit_p (not_read_b,
1583 					    DECL_UID (real_var)) : false;
1584       not_written = not_written_b ? bitmap_bit_p (not_written_b,
1585 						  DECL_UID (real_var)) : false;
1586       gcc_assert (!unmodifiable_var_p (var));
1587 
1588       clobber_stats.clobbered_vars++;
1589 
1590       /* See if this variable is really clobbered by this function.  */
1591 
1592       /* Trivial case: Things escaping only to pure/const are not
1593 	 clobbered by non-pure-const, and only read by pure/const. */
1594       if ((escape_mask & ~(ESCAPE_TO_PURE_CONST)) == 0)
1595 	{
1596 	  tree call = get_call_expr_in (stmt);
1597 	  if (call_expr_flags (call) & (ECF_CONST | ECF_PURE))
1598 	    {
1599 	      add_stmt_operand (&var, s_ann, opf_none);
1600 	      clobber_stats.unescapable_clobbers_avoided++;
1601 	      continue;
1602 	    }
1603 	  else
1604 	    {
1605 	      clobber_stats.unescapable_clobbers_avoided++;
1606 	      continue;
1607 	    }
1608 	}
1609 
1610       if (not_written)
1611 	{
1612 	  clobber_stats.static_write_clobbers_avoided++;
1613 	  if (!not_read)
1614 	    add_stmt_operand (&var, s_ann, opf_none);
1615 	  else
1616 	    clobber_stats.static_read_clobbers_avoided++;
1617 	}
1618       else
1619 	add_virtual_operand (var, s_ann, opf_is_def, NULL, 0, -1, true);
1620     }
1621 }
1622 
1623 
1624 /* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the
1625    function.  */
1626 
1627 static void
add_call_read_ops(tree stmt,tree callee)1628 add_call_read_ops (tree stmt, tree callee)
1629 {
1630   unsigned u;
1631   bitmap_iterator bi;
1632   stmt_ann_t s_ann = stmt_ann (stmt);
1633   bitmap not_read_b;
1634 
1635   /* if the function is not pure, it may reference memory.  Add
1636      a VUSE for .GLOBAL_VAR if it has been created.  See add_referenced_var
1637      for the heuristic used to decide whether to create .GLOBAL_VAR.  */
1638   if (global_var)
1639     {
1640       add_stmt_operand (&global_var, s_ann, opf_none);
1641       return;
1642     }
1643 
1644   not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
1645 
1646   /* Add a VUSE for each call-clobbered variable.  */
1647   EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, u, bi)
1648     {
1649       tree var = referenced_var (u);
1650       tree real_var = var;
1651       bool not_read;
1652 
1653       clobber_stats.readonly_clobbers++;
1654 
1655       /* Not read and not written are computed on regular vars, not
1656 	 subvars, so look at the parent var if this is an SFT. */
1657 
1658       if (TREE_CODE (var) == STRUCT_FIELD_TAG)
1659 	real_var = SFT_PARENT_VAR (var);
1660 
1661       not_read = not_read_b ? bitmap_bit_p (not_read_b, DECL_UID (real_var))
1662 	                    : false;
1663 
1664       if (not_read)
1665 	{
1666 	  clobber_stats.static_readonly_clobbers_avoided++;
1667 	  continue;
1668 	}
1669 
1670       add_stmt_operand (&var, s_ann, opf_none | opf_non_specific);
1671     }
1672 }
1673 
1674 
1675 /* A subroutine of get_expr_operands to handle CALL_EXPR.  */
1676 
1677 static void
get_call_expr_operands(tree stmt,tree expr)1678 get_call_expr_operands (tree stmt, tree expr)
1679 {
1680   tree op;
1681   int call_flags = call_expr_flags (expr);
1682 
1683   /* If aliases have been computed already, add V_MAY_DEF or V_USE
1684      operands for all the symbols that have been found to be
1685      call-clobbered.
1686 
1687      Note that if aliases have not been computed, the global effects
1688      of calls will not be included in the SSA web. This is fine
1689      because no optimizer should run before aliases have been
1690      computed.  By not bothering with virtual operands for CALL_EXPRs
1691      we avoid adding superfluous virtual operands, which can be a
1692      significant compile time sink (See PR 15855).  */
1693   if (aliases_computed_p
1694       && !bitmap_empty_p (call_clobbered_vars)
1695       && !(call_flags & ECF_NOVOPS))
1696     {
1697       /* A 'pure' or a 'const' function never call-clobbers anything.
1698 	 A 'noreturn' function might, but since we don't return anyway
1699 	 there is no point in recording that.  */
1700       if (TREE_SIDE_EFFECTS (expr)
1701 	  && !(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
1702 	add_call_clobber_ops (stmt, get_callee_fndecl (expr));
1703       else if (!(call_flags & ECF_CONST))
1704 	add_call_read_ops (stmt, get_callee_fndecl (expr));
1705     }
1706 
1707   /* Find uses in the called function.  */
1708   get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
1709 
1710   for (op = TREE_OPERAND (expr, 1); op; op = TREE_CHAIN (op))
1711     get_expr_operands (stmt, &TREE_VALUE (op), opf_none);
1712 
1713   get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1714 }
1715 
1716 
1717 /* Scan operands in the ASM_EXPR stmt referred to in INFO.  */
1718 
1719 static void
get_asm_expr_operands(tree stmt)1720 get_asm_expr_operands (tree stmt)
1721 {
1722   stmt_ann_t s_ann = stmt_ann (stmt);
1723   int noutputs = list_length (ASM_OUTPUTS (stmt));
1724   const char **oconstraints
1725     = (const char **) alloca ((noutputs) * sizeof (const char *));
1726   int i;
1727   tree link;
1728   const char *constraint;
1729   bool allows_mem, allows_reg, is_inout;
1730 
1731   for (i=0, link = ASM_OUTPUTS (stmt); link; ++i, link = TREE_CHAIN (link))
1732     {
1733       constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1734       oconstraints[i] = constraint;
1735       parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
1736 	                       &allows_reg, &is_inout);
1737 
1738       /* This should have been split in gimplify_asm_expr.  */
1739       gcc_assert (!allows_reg || !is_inout);
1740 
1741       /* Memory operands are addressable.  Note that STMT needs the
1742 	 address of this operand.  */
1743       if (!allows_reg && allows_mem)
1744 	{
1745 	  tree t = get_base_address (TREE_VALUE (link));
1746 	  if (t && DECL_P (t) && s_ann)
1747 	    add_to_addressable_set (t, &s_ann->addresses_taken);
1748 	}
1749 
1750       get_expr_operands (stmt, &TREE_VALUE (link), opf_is_def);
1751     }
1752 
1753   for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link))
1754     {
1755       constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1756       parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1757 			      oconstraints, &allows_mem, &allows_reg);
1758 
1759       /* Memory operands are addressable.  Note that STMT needs the
1760 	 address of this operand.  */
1761       if (!allows_reg && allows_mem)
1762 	{
1763 	  tree t = get_base_address (TREE_VALUE (link));
1764 	  if (t && DECL_P (t) && s_ann)
1765 	    add_to_addressable_set (t, &s_ann->addresses_taken);
1766 	}
1767 
1768       get_expr_operands (stmt, &TREE_VALUE (link), 0);
1769     }
1770 
1771 
1772   /* Clobber memory for asm ("" : : : "memory");  */
1773   for (link = ASM_CLOBBERS (stmt); link; link = TREE_CHAIN (link))
1774     if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0)
1775       {
1776 	unsigned i;
1777 	bitmap_iterator bi;
1778 
1779 	/* Clobber all call-clobbered variables (or .GLOBAL_VAR if we
1780 	   decided to group them).  */
1781 	if (global_var)
1782 	  add_stmt_operand (&global_var, s_ann, opf_is_def);
1783 	else
1784 	  EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
1785 	    {
1786 	      tree var = referenced_var (i);
1787 	      add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific);
1788 	    }
1789 
1790 	/* Now clobber all addressables.  */
1791 	EXECUTE_IF_SET_IN_BITMAP (addressable_vars, 0, i, bi)
1792 	    {
1793 	      tree var = referenced_var (i);
1794 
1795 	      /* Subvars are explicitly represented in this list, so
1796 		 we don't need the original to be added to the clobber
1797 		 ops, but the original *will* be in this list because
1798 		 we keep the addressability of the original
1799 		 variable up-to-date so we don't screw up the rest of
1800 		 the backend.  */
1801 	      if (var_can_have_subvars (var)
1802 		  && get_subvars_for_var (var) != NULL)
1803 		continue;
1804 
1805 	      add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific);
1806 	    }
1807 
1808 	break;
1809       }
1810 }
1811 
1812 
1813 /* Scan operands for the assignment expression EXPR in statement STMT.  */
1814 
1815 static void
get_modify_expr_operands(tree stmt,tree expr)1816 get_modify_expr_operands (tree stmt, tree expr)
1817 {
1818   /* First get operands from the RHS.  */
1819   get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1820 
1821   /* For the LHS, use a regular definition (OPF_IS_DEF) for GIMPLE
1822      registers.  If the LHS is a store to memory, we will either need
1823      a preserving definition (V_MAY_DEF) or a killing definition
1824      (V_MUST_DEF).
1825 
1826      Preserving definitions are those that modify a part of an
1827      aggregate object for which no subvars have been computed (or the
1828      reference does not correspond exactly to one of them). Stores
1829      through a pointer are also represented with V_MAY_DEF operators.
1830 
1831      The determination of whether to use a preserving or a killing
1832      definition is done while scanning the LHS of the assignment.  By
1833      default, assume that we will emit a V_MUST_DEF.  */
1834   get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_is_def|opf_kill_def);
1835 }
1836 
1837 
1838 /* Recursively scan the expression pointed to by EXPR_P in statement
1839    STMT.  FLAGS is one of the OPF_* constants modifying how to
1840    interpret the operands found.  */
1841 
1842 static void
get_expr_operands(tree stmt,tree * expr_p,int flags)1843 get_expr_operands (tree stmt, tree *expr_p, int flags)
1844 {
1845   enum tree_code code;
1846   enum tree_code_class class;
1847   tree expr = *expr_p;
1848   stmt_ann_t s_ann = stmt_ann (stmt);
1849 
1850   if (expr == NULL)
1851     return;
1852 
1853   code = TREE_CODE (expr);
1854   class = TREE_CODE_CLASS (code);
1855 
1856   switch (code)
1857     {
1858     case ADDR_EXPR:
1859       /* Taking the address of a variable does not represent a
1860 	 reference to it, but the fact that the statement takes its
1861 	 address will be of interest to some passes (e.g. alias
1862 	 resolution).  */
1863       add_to_addressable_set (TREE_OPERAND (expr, 0), &s_ann->addresses_taken);
1864 
1865       /* If the address is invariant, there may be no interesting
1866 	 variable references inside.  */
1867       if (is_gimple_min_invariant (expr))
1868 	return;
1869 
1870       /* Otherwise, there may be variables referenced inside but there
1871 	 should be no VUSEs created, since the referenced objects are
1872 	 not really accessed.  The only operands that we should find
1873 	 here are ARRAY_REF indices which will always be real operands
1874 	 (GIMPLE does not allow non-registers as array indices).  */
1875       flags |= opf_no_vops;
1876       get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1877       return;
1878 
1879     case SSA_NAME:
1880     case STRUCT_FIELD_TAG:
1881     case SYMBOL_MEMORY_TAG:
1882     case NAME_MEMORY_TAG:
1883      add_stmt_operand (expr_p, s_ann, flags);
1884      return;
1885 
1886     case VAR_DECL:
1887     case PARM_DECL:
1888     case RESULT_DECL:
1889       {
1890 	subvar_t svars;
1891 
1892 	/* Add the subvars for a variable, if it has subvars, to DEFS
1893 	   or USES.  Otherwise, add the variable itself.  Whether it
1894 	   goes to USES or DEFS depends on the operand flags.  */
1895 	if (var_can_have_subvars (expr)
1896 	    && (svars = get_subvars_for_var (expr)))
1897 	  {
1898 	    subvar_t sv;
1899 	    for (sv = svars; sv; sv = sv->next)
1900 	      add_stmt_operand (&sv->var, s_ann, flags);
1901 	  }
1902 	else
1903 	  add_stmt_operand (expr_p, s_ann, flags);
1904 
1905 	return;
1906       }
1907 
1908     case MISALIGNED_INDIRECT_REF:
1909       get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1910       /* fall through */
1911 
1912     case ALIGN_INDIRECT_REF:
1913     case INDIRECT_REF:
1914       get_indirect_ref_operands (stmt, expr, flags, NULL_TREE, 0, -1, true);
1915       return;
1916 
1917     case TARGET_MEM_REF:
1918       get_tmr_operands (stmt, expr, flags);
1919       return;
1920 
1921     case ARRAY_REF:
1922     case ARRAY_RANGE_REF:
1923     case COMPONENT_REF:
1924     case REALPART_EXPR:
1925     case IMAGPART_EXPR:
1926       {
1927 	tree ref;
1928 	HOST_WIDE_INT offset, size, maxsize;
1929 	bool none = true;
1930 
1931 	/* This component reference becomes an access to all of the
1932 	   subvariables it can touch, if we can determine that, but
1933 	   *NOT* the real one.  If we can't determine which fields we
1934 	   could touch, the recursion will eventually get to a
1935 	   variable and add *all* of its subvars, or whatever is the
1936 	   minimum correct subset.  */
1937 	ref = get_ref_base_and_extent (expr, &offset, &size, &maxsize);
1938 	if (SSA_VAR_P (ref) && get_subvars_for_var (ref))
1939 	  {
1940 	    subvar_t sv;
1941 	    subvar_t svars = get_subvars_for_var (ref);
1942 
1943 	    for (sv = svars; sv; sv = sv->next)
1944 	      {
1945 		bool exact;
1946 
1947 		if (overlap_subvar (offset, maxsize, sv->var, &exact))
1948 		  {
1949 	            int subvar_flags = flags;
1950 		    none = false;
1951 		    if (!exact || size != maxsize)
1952 		      subvar_flags &= ~opf_kill_def;
1953 		    add_stmt_operand (&sv->var, s_ann, subvar_flags);
1954 		  }
1955 	      }
1956 
1957 	    if (!none)
1958 	      flags |= opf_no_vops;
1959 	  }
1960 	else if (TREE_CODE (ref) == INDIRECT_REF)
1961 	  {
1962 	    get_indirect_ref_operands (stmt, ref, flags, expr, offset,
1963 		                       maxsize, false);
1964 	    flags |= opf_no_vops;
1965 	  }
1966 
1967 	/* Even if we found subvars above we need to ensure to see
1968 	   immediate uses for d in s.a[d].  In case of s.a having
1969 	   a subvar or we would miss it otherwise.  */
1970 	get_expr_operands (stmt, &TREE_OPERAND (expr, 0),
1971 			   flags & ~opf_kill_def);
1972 
1973 	if (code == COMPONENT_REF)
1974 	  {
1975 	    if (s_ann && TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1)))
1976 	      s_ann->has_volatile_ops = true;
1977 	    get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1978 	  }
1979 	else if (code == ARRAY_REF || code == ARRAY_RANGE_REF)
1980 	  {
1981             get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1982             get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1983             get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_none);
1984 	  }
1985 
1986 	return;
1987       }
1988 
1989     case WITH_SIZE_EXPR:
1990       /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
1991 	 and an rvalue reference to its second argument.  */
1992       get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1993       get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1994       return;
1995 
1996     case CALL_EXPR:
1997       get_call_expr_operands (stmt, expr);
1998       return;
1999 
2000     case COND_EXPR:
2001     case VEC_COND_EXPR:
2002       get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
2003       get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
2004       get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
2005       return;
2006 
2007     case MODIFY_EXPR:
2008       get_modify_expr_operands (stmt, expr);
2009       return;
2010 
2011     case CONSTRUCTOR:
2012       {
2013 	/* General aggregate CONSTRUCTORs have been decomposed, but they
2014 	   are still in use as the COMPLEX_EXPR equivalent for vectors.  */
2015 	constructor_elt *ce;
2016 	unsigned HOST_WIDE_INT idx;
2017 
2018 	for (idx = 0;
2019 	     VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (expr), idx, ce);
2020 	     idx++)
2021 	  get_expr_operands (stmt, &ce->value, opf_none);
2022 
2023 	return;
2024       }
2025 
2026     case BIT_FIELD_REF:
2027       /* Stores using BIT_FIELD_REF are always preserving definitions.  */
2028       flags &= ~opf_kill_def;
2029 
2030       /* Fallthru  */
2031 
2032     case TRUTH_NOT_EXPR:
2033     case VIEW_CONVERT_EXPR:
2034     do_unary:
2035       get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
2036       return;
2037 
2038     case TRUTH_AND_EXPR:
2039     case TRUTH_OR_EXPR:
2040     case TRUTH_XOR_EXPR:
2041     case COMPOUND_EXPR:
2042     case OBJ_TYPE_REF:
2043     case ASSERT_EXPR:
2044     do_binary:
2045       {
2046 	get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
2047 	get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
2048 	return;
2049       }
2050 
2051     case DOT_PROD_EXPR:
2052     case REALIGN_LOAD_EXPR:
2053       {
2054 	get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
2055         get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
2056         get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags);
2057         return;
2058       }
2059 
2060     case BLOCK:
2061     case FUNCTION_DECL:
2062     case EXC_PTR_EXPR:
2063     case FILTER_EXPR:
2064     case LABEL_DECL:
2065     case CONST_DECL:
2066     case OMP_PARALLEL:
2067     case OMP_SECTIONS:
2068     case OMP_FOR:
2069     case OMP_SINGLE:
2070     case OMP_MASTER:
2071     case OMP_ORDERED:
2072     case OMP_CRITICAL:
2073     case OMP_RETURN:
2074     case OMP_CONTINUE:
2075       /* Expressions that make no memory references.  */
2076       return;
2077 
2078     default:
2079       if (class == tcc_unary)
2080 	goto do_unary;
2081       if (class == tcc_binary || class == tcc_comparison)
2082 	goto do_binary;
2083       if (class == tcc_constant || class == tcc_type)
2084 	return;
2085     }
2086 
2087   /* If we get here, something has gone wrong.  */
2088 #ifdef ENABLE_CHECKING
2089   fprintf (stderr, "unhandled expression in get_expr_operands():\n");
2090   debug_tree (expr);
2091   fputs ("\n", stderr);
2092 #endif
2093   gcc_unreachable ();
2094 }
2095 
2096 
2097 /* Parse STMT looking for operands.  When finished, the various
2098    build_* operand vectors will have potential operands in them.  */
2099 
2100 static void
parse_ssa_operands(tree stmt)2101 parse_ssa_operands (tree stmt)
2102 {
2103   enum tree_code code;
2104 
2105   code = TREE_CODE (stmt);
2106   switch (code)
2107     {
2108     case MODIFY_EXPR:
2109       get_modify_expr_operands (stmt, stmt);
2110       break;
2111 
2112     case COND_EXPR:
2113       get_expr_operands (stmt, &COND_EXPR_COND (stmt), opf_none);
2114       break;
2115 
2116     case SWITCH_EXPR:
2117       get_expr_operands (stmt, &SWITCH_COND (stmt), opf_none);
2118       break;
2119 
2120     case ASM_EXPR:
2121       get_asm_expr_operands (stmt);
2122       break;
2123 
2124     case RETURN_EXPR:
2125       get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_none);
2126       break;
2127 
2128     case GOTO_EXPR:
2129       get_expr_operands (stmt, &GOTO_DESTINATION (stmt), opf_none);
2130       break;
2131 
2132     case LABEL_EXPR:
2133       get_expr_operands (stmt, &LABEL_EXPR_LABEL (stmt), opf_none);
2134       break;
2135 
2136     case BIND_EXPR:
2137     case CASE_LABEL_EXPR:
2138     case TRY_CATCH_EXPR:
2139     case TRY_FINALLY_EXPR:
2140     case EH_FILTER_EXPR:
2141     case CATCH_EXPR:
2142     case RESX_EXPR:
2143       /* These nodes contain no variable references.  */
2144       break;
2145 
2146     default:
2147       /* Notice that if get_expr_operands tries to use &STMT as the
2148 	 operand pointer (which may only happen for USE operands), we
2149 	 will fail in add_stmt_operand.  This default will handle
2150 	 statements like empty statements, or CALL_EXPRs that may
2151 	 appear on the RHS of a statement or as statements themselves.  */
2152       get_expr_operands (stmt, &stmt, opf_none);
2153       break;
2154     }
2155 }
2156 
2157 
2158 /* Create an operands cache for STMT.  */
2159 
2160 static void
build_ssa_operands(tree stmt)2161 build_ssa_operands (tree stmt)
2162 {
2163   stmt_ann_t ann = get_stmt_ann (stmt);
2164 
2165   /* Initially assume that the statement has no volatile operands and
2166      does not take the address of any symbols.  */
2167   if (ann)
2168     {
2169       ann->has_volatile_ops = false;
2170       if (ann->addresses_taken)
2171 	ann->addresses_taken = NULL;
2172     }
2173 
2174   start_ssa_stmt_operands ();
2175 
2176   parse_ssa_operands (stmt);
2177   operand_build_sort_virtual (build_vuses);
2178   operand_build_sort_virtual (build_v_may_defs);
2179   operand_build_sort_virtual (build_v_must_defs);
2180 
2181   finalize_ssa_stmt_operands (stmt);
2182 }
2183 
2184 
2185 /* Free any operands vectors in OPS.  */
2186 
2187 void
free_ssa_operands(stmt_operands_p ops)2188 free_ssa_operands (stmt_operands_p ops)
2189 {
2190   ops->def_ops = NULL;
2191   ops->use_ops = NULL;
2192   ops->maydef_ops = NULL;
2193   ops->mustdef_ops = NULL;
2194   ops->vuse_ops = NULL;
2195 }
2196 
2197 
2198 /* Get the operands of statement STMT.  */
2199 
2200 void
update_stmt_operands(tree stmt)2201 update_stmt_operands (tree stmt)
2202 {
2203   stmt_ann_t ann = get_stmt_ann (stmt);
2204 
2205   /* If update_stmt_operands is called before SSA is initialized, do
2206      nothing.  */
2207   if (!ssa_operands_active ())
2208     return;
2209 
2210   /* The optimizers cannot handle statements that are nothing but a
2211      _DECL.  This indicates a bug in the gimplifier.  */
2212   gcc_assert (!SSA_VAR_P (stmt));
2213 
2214   gcc_assert (ann->modified);
2215 
2216   timevar_push (TV_TREE_OPS);
2217 
2218   build_ssa_operands (stmt);
2219 
2220   /* Clear the modified bit for STMT.  */
2221   ann->modified = 0;
2222 
2223   timevar_pop (TV_TREE_OPS);
2224 }
2225 
2226 
2227 /* Copies virtual operands from SRC to DST.  */
2228 
2229 void
copy_virtual_operands(tree dest,tree src)2230 copy_virtual_operands (tree dest, tree src)
2231 {
2232   tree t;
2233   ssa_op_iter iter, old_iter;
2234   use_operand_p use_p, u2;
2235   def_operand_p def_p, d2;
2236 
2237   build_ssa_operands (dest);
2238 
2239   /* Copy all the virtual fields.  */
2240   FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VUSE)
2241     append_vuse (t);
2242   FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMAYDEF)
2243     append_v_may_def (t);
2244   FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMUSTDEF)
2245     append_v_must_def (t);
2246 
2247   if (VEC_length (tree, build_vuses) == 0
2248       && VEC_length (tree, build_v_may_defs) == 0
2249       && VEC_length (tree, build_v_must_defs) == 0)
2250     return;
2251 
2252   /* Now commit the virtual operands to this stmt.  */
2253   finalize_ssa_v_must_defs (dest);
2254   finalize_ssa_v_may_defs (dest);
2255   finalize_ssa_vuses (dest);
2256 
2257   /* Finally, set the field to the same values as then originals.  */
2258   t = op_iter_init_tree (&old_iter, src, SSA_OP_VUSE);
2259   FOR_EACH_SSA_USE_OPERAND (use_p, dest, iter, SSA_OP_VUSE)
2260     {
2261       gcc_assert (!op_iter_done (&old_iter));
2262       SET_USE (use_p, t);
2263       t = op_iter_next_tree (&old_iter);
2264     }
2265   gcc_assert (op_iter_done (&old_iter));
2266 
2267   op_iter_init_maydef (&old_iter, src, &u2, &d2);
2268   FOR_EACH_SSA_MAYDEF_OPERAND (def_p, use_p, dest, iter)
2269     {
2270       gcc_assert (!op_iter_done (&old_iter));
2271       SET_USE (use_p, USE_FROM_PTR (u2));
2272       SET_DEF (def_p, DEF_FROM_PTR (d2));
2273       op_iter_next_maymustdef (&u2, &d2, &old_iter);
2274     }
2275   gcc_assert (op_iter_done (&old_iter));
2276 
2277   op_iter_init_mustdef (&old_iter, src, &u2, &d2);
2278   FOR_EACH_SSA_MUSTDEF_OPERAND (def_p, use_p, dest, iter)
2279     {
2280       gcc_assert (!op_iter_done (&old_iter));
2281       SET_USE (use_p, USE_FROM_PTR (u2));
2282       SET_DEF (def_p, DEF_FROM_PTR (d2));
2283       op_iter_next_maymustdef (&u2, &d2, &old_iter);
2284     }
2285   gcc_assert (op_iter_done (&old_iter));
2286 
2287 }
2288 
2289 
2290 /* Specifically for use in DOM's expression analysis.  Given a store, we
2291    create an artificial stmt which looks like a load from the store, this can
2292    be used to eliminate redundant loads.  OLD_OPS are the operands from the
2293    store stmt, and NEW_STMT is the new load which represents a load of the
2294    values stored.  */
2295 
2296 void
create_ssa_artficial_load_stmt(tree new_stmt,tree old_stmt)2297 create_ssa_artficial_load_stmt (tree new_stmt, tree old_stmt)
2298 {
2299   stmt_ann_t ann;
2300   tree op;
2301   ssa_op_iter iter;
2302   use_operand_p use_p;
2303   unsigned x;
2304 
2305   ann = get_stmt_ann (new_stmt);
2306 
2307   /* Process the stmt looking for operands.  */
2308   start_ssa_stmt_operands ();
2309   parse_ssa_operands (new_stmt);
2310 
2311   for (x = 0; x < VEC_length (tree, build_vuses); x++)
2312     {
2313       tree t = VEC_index (tree, build_vuses, x);
2314       if (TREE_CODE (t) != SSA_NAME)
2315 	{
2316 	  var_ann_t ann = var_ann (t);
2317 	  ann->in_vuse_list = 0;
2318 	}
2319     }
2320 
2321   for (x = 0; x < VEC_length (tree, build_v_may_defs); x++)
2322     {
2323       tree t = VEC_index (tree, build_v_may_defs, x);
2324       if (TREE_CODE (t) != SSA_NAME)
2325 	{
2326 	  var_ann_t ann = var_ann (t);
2327 	  ann->in_v_may_def_list = 0;
2328 	}
2329     }
2330 
2331   /* Remove any virtual operands that were found.  */
2332   VEC_truncate (tree, build_v_may_defs, 0);
2333   VEC_truncate (tree, build_v_must_defs, 0);
2334   VEC_truncate (tree, build_vuses, 0);
2335 
2336   /* For each VDEF on the original statement, we want to create a
2337      VUSE of the V_MAY_DEF result or V_MUST_DEF op on the new
2338      statement.  */
2339   FOR_EACH_SSA_TREE_OPERAND (op, old_stmt, iter,
2340 			     (SSA_OP_VMAYDEF | SSA_OP_VMUSTDEF))
2341     append_vuse (op);
2342 
2343   /* Now build the operands for this new stmt.  */
2344   finalize_ssa_stmt_operands (new_stmt);
2345 
2346   /* All uses in this fake stmt must not be in the immediate use lists.  */
2347   FOR_EACH_SSA_USE_OPERAND (use_p, new_stmt, iter, SSA_OP_ALL_USES)
2348     delink_imm_use (use_p);
2349 }
2350 
2351 
2352 /* Swap operands EXP0 and EXP1 in statement STMT.  No attempt is done
2353    to test the validity of the swap operation.  */
2354 
2355 void
swap_tree_operands(tree stmt,tree * exp0,tree * exp1)2356 swap_tree_operands (tree stmt, tree *exp0, tree *exp1)
2357 {
2358   tree op0, op1;
2359   op0 = *exp0;
2360   op1 = *exp1;
2361 
2362   /* If the operand cache is active, attempt to preserve the relative
2363      positions of these two operands in their respective immediate use
2364      lists.  */
2365   if (ssa_operands_active () && op0 != op1)
2366     {
2367       use_optype_p use0, use1, ptr;
2368       use0 = use1 = NULL;
2369 
2370       /* Find the 2 operands in the cache, if they are there.  */
2371       for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
2372 	if (USE_OP_PTR (ptr)->use == exp0)
2373 	  {
2374 	    use0 = ptr;
2375 	    break;
2376 	  }
2377 
2378       for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
2379 	if (USE_OP_PTR (ptr)->use == exp1)
2380 	  {
2381 	    use1 = ptr;
2382 	    break;
2383 	  }
2384 
2385       /* If both uses don't have operand entries, there isn't much we can do
2386          at this point.  Presumably we don't need to worry about it.  */
2387       if (use0 && use1)
2388         {
2389 	  tree *tmp = USE_OP_PTR (use1)->use;
2390 	  USE_OP_PTR (use1)->use = USE_OP_PTR (use0)->use;
2391 	  USE_OP_PTR (use0)->use = tmp;
2392 	}
2393     }
2394 
2395   /* Now swap the data.  */
2396   *exp0 = op1;
2397   *exp1 = op0;
2398 }
2399 
2400 
2401 /* Add the base address of REF to the set *ADDRESSES_TAKEN.  If
2402    *ADDRESSES_TAKEN is NULL, a new set is created.  REF may be
2403    a single variable whose address has been taken or any other valid
2404    GIMPLE memory reference (structure reference, array, etc).  If the
2405    base address of REF is a decl that has sub-variables, also add all
2406    of its sub-variables.  */
2407 
2408 void
add_to_addressable_set(tree ref,bitmap * addresses_taken)2409 add_to_addressable_set (tree ref, bitmap *addresses_taken)
2410 {
2411   tree var;
2412   subvar_t svars;
2413 
2414   gcc_assert (addresses_taken);
2415 
2416   /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
2417      as the only thing we take the address of.  If VAR is a structure,
2418      taking the address of a field means that the whole structure may
2419      be referenced using pointer arithmetic.  See PR 21407 and the
2420      ensuing mailing list discussion.  */
2421   var = get_base_address (ref);
2422   if (var && SSA_VAR_P (var))
2423     {
2424       if (*addresses_taken == NULL)
2425 	*addresses_taken = BITMAP_GGC_ALLOC ();
2426 
2427       if (var_can_have_subvars (var)
2428 	  && (svars = get_subvars_for_var (var)))
2429 	{
2430 	  subvar_t sv;
2431 	  for (sv = svars; sv; sv = sv->next)
2432 	    {
2433 	      bitmap_set_bit (*addresses_taken, DECL_UID (sv->var));
2434 	      TREE_ADDRESSABLE (sv->var) = 1;
2435 	    }
2436 	}
2437       else
2438 	{
2439 	  bitmap_set_bit (*addresses_taken, DECL_UID (var));
2440 	  TREE_ADDRESSABLE (var) = 1;
2441 	}
2442     }
2443 }
2444 
2445 
2446 /* Scan the immediate_use list for VAR making sure its linked properly.
2447    Return TRUE if there is a problem and emit an error message to F.  */
2448 
2449 bool
verify_imm_links(FILE * f,tree var)2450 verify_imm_links (FILE *f, tree var)
2451 {
2452   use_operand_p ptr, prev, list;
2453   int count;
2454 
2455   gcc_assert (TREE_CODE (var) == SSA_NAME);
2456 
2457   list = &(SSA_NAME_IMM_USE_NODE (var));
2458   gcc_assert (list->use == NULL);
2459 
2460   if (list->prev == NULL)
2461     {
2462       gcc_assert (list->next == NULL);
2463       return false;
2464     }
2465 
2466   prev = list;
2467   count = 0;
2468   for (ptr = list->next; ptr != list; )
2469     {
2470       if (prev != ptr->prev)
2471 	goto error;
2472 
2473       if (ptr->use == NULL)
2474 	goto error; /* 2 roots, or SAFE guard node.  */
2475       else if (*(ptr->use) != var)
2476 	goto error;
2477 
2478       prev = ptr;
2479       ptr = ptr->next;
2480 
2481       /* Avoid infinite loops.  50,000,000 uses probably indicates a
2482 	 problem.  */
2483       if (count++ > 50000000)
2484 	goto error;
2485     }
2486 
2487   /* Verify list in the other direction.  */
2488   prev = list;
2489   for (ptr = list->prev; ptr != list; )
2490     {
2491       if (prev != ptr->next)
2492 	goto error;
2493       prev = ptr;
2494       ptr = ptr->prev;
2495       if (count-- < 0)
2496 	goto error;
2497     }
2498 
2499   if (count != 0)
2500     goto error;
2501 
2502   return false;
2503 
2504  error:
2505   if (ptr->stmt && stmt_modified_p (ptr->stmt))
2506     {
2507       fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->stmt);
2508       print_generic_stmt (f, ptr->stmt, TDF_SLIM);
2509     }
2510   fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr,
2511 	   (void *)ptr->use);
2512   print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM);
2513   fprintf(f, "\n");
2514   return true;
2515 }
2516 
2517 
2518 /* Dump all the immediate uses to FILE.  */
2519 
2520 void
dump_immediate_uses_for(FILE * file,tree var)2521 dump_immediate_uses_for (FILE *file, tree var)
2522 {
2523   imm_use_iterator iter;
2524   use_operand_p use_p;
2525 
2526   gcc_assert (var && TREE_CODE (var) == SSA_NAME);
2527 
2528   print_generic_expr (file, var, TDF_SLIM);
2529   fprintf (file, " : -->");
2530   if (has_zero_uses (var))
2531     fprintf (file, " no uses.\n");
2532   else
2533     if (has_single_use (var))
2534       fprintf (file, " single use.\n");
2535     else
2536       fprintf (file, "%d uses.\n", num_imm_uses (var));
2537 
2538   FOR_EACH_IMM_USE_FAST (use_p, iter, var)
2539     {
2540       if (use_p->stmt == NULL && use_p->use == NULL)
2541         fprintf (file, "***end of stmt iterator marker***\n");
2542       else
2543 	if (!is_gimple_reg (USE_FROM_PTR (use_p)))
2544 	  print_generic_stmt (file, USE_STMT (use_p), TDF_VOPS);
2545 	else
2546 	  print_generic_stmt (file, USE_STMT (use_p), TDF_SLIM);
2547     }
2548   fprintf(file, "\n");
2549 }
2550 
2551 
2552 /* Dump all the immediate uses to FILE.  */
2553 
2554 void
dump_immediate_uses(FILE * file)2555 dump_immediate_uses (FILE *file)
2556 {
2557   tree var;
2558   unsigned int x;
2559 
2560   fprintf (file, "Immediate_uses: \n\n");
2561   for (x = 1; x < num_ssa_names; x++)
2562     {
2563       var = ssa_name(x);
2564       if (!var)
2565         continue;
2566       dump_immediate_uses_for (file, var);
2567     }
2568 }
2569 
2570 
2571 /* Dump def-use edges on stderr.  */
2572 
2573 void
debug_immediate_uses(void)2574 debug_immediate_uses (void)
2575 {
2576   dump_immediate_uses (stderr);
2577 }
2578 
2579 
2580 /* Dump def-use edges on stderr.  */
2581 
2582 void
debug_immediate_uses_for(tree var)2583 debug_immediate_uses_for (tree var)
2584 {
2585   dump_immediate_uses_for (stderr, var);
2586 }
2587 
2588 #include "gt-tree-ssa-operands.h"
2589