1 /* SSA operands management for trees.
2    Copyright (C) 2003-2018 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10 
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14 GNU General Public License for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "tree.h"
25 #include "gimple.h"
26 #include "timevar.h"
27 #include "ssa.h"
28 #include "gimple-pretty-print.h"
29 #include "diagnostic-core.h"
30 #include "stmt.h"
31 #include "print-tree.h"
32 #include "dumpfile.h"
33 
34 
35 /* This file contains the code required to manage the operands cache of the
36    SSA optimizer.  For every stmt, we maintain an operand cache in the stmt
37    annotation.  This cache contains operands that will be of interest to
38    optimizers and other passes wishing to manipulate the IL.
39 
40    The operand type are broken up into REAL and VIRTUAL operands.  The real
41    operands are represented as pointers into the stmt's operand tree.  Thus
42    any manipulation of the real operands will be reflected in the actual tree.
43    Virtual operands are represented solely in the cache, although the base
44    variable for the SSA_NAME may, or may not occur in the stmt's tree.
45    Manipulation of the virtual operands will not be reflected in the stmt tree.
46 
47    The routines in this file are concerned with creating this operand cache
48    from a stmt tree.
49 
50    The operand tree is the parsed by the various get_* routines which look
51    through the stmt tree for the occurrence of operands which may be of
52    interest, and calls are made to the append_* routines whenever one is
53    found.  There are 4 of these routines, each representing one of the
54    4 types of operands. Defs, Uses, Virtual Uses, and Virtual May Defs.
55 
56    The append_* routines check for duplication, and simply keep a list of
57    unique objects for each operand type in the build_* extendable vectors.
58 
59    Once the stmt tree is completely parsed, the finalize_ssa_operands()
60    routine is called, which proceeds to perform the finalization routine
61    on each of the 4 operand vectors which have been built up.
62 
63    If the stmt had a previous operand cache, the finalization routines
64    attempt to match up the new operands with the old ones.  If it's a perfect
65    match, the old vector is simply reused.  If it isn't a perfect match, then
66    a new vector is created and the new operands are placed there.  For
67    virtual operands, if the previous cache had SSA_NAME version of a
68    variable, and that same variable occurs in the same operands cache, then
69    the new cache vector will also get the same SSA_NAME.
70 
71    i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new
72    operand vector for VUSE, then the new vector will also be modified
73    such that it contains 'a_5' rather than 'a'.  */
74 
75 
76 /* Flags to describe operand properties in helpers.  */
77 
78 /* By default, operands are loaded.  */
79 #define opf_use		0
80 
81 /* Operand is the target of an assignment expression or a
82    call-clobbered variable.  */
83 #define opf_def 	(1 << 0)
84 
85 /* No virtual operands should be created in the expression.  This is used
86    when traversing ADDR_EXPR nodes which have different semantics than
87    other expressions.  Inside an ADDR_EXPR node, the only operands that we
88    need to consider are indices into arrays.  For instance, &a.b[i] should
89    generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
90    VUSE for 'b'.  */
91 #define opf_no_vops 	(1 << 1)
92 
93 /* Operand is in a place where address-taken does not imply addressable.  */
94 #define opf_non_addressable (1 << 3)
95 
96 /* Operand is in a place where opf_non_addressable does not apply.  */
97 #define opf_not_non_addressable (1 << 4)
98 
99 /* Operand is having its address taken.  */
100 #define opf_address_taken (1 << 5)
101 
102 /* Array for building all the use operands.  */
103 static vec<tree *> build_uses;
104 
105 /* The built VDEF operand.  */
106 static tree build_vdef;
107 
108 /* The built VUSE operand.  */
109 static tree build_vuse;
110 
111 /* Bitmap obstack for our datastructures that needs to survive across
112    compilations of multiple functions.  */
113 static bitmap_obstack operands_bitmap_obstack;
114 
115 static void get_expr_operands (struct function *, gimple *, tree *, int);
116 
117 /* Number of functions with initialized ssa_operands.  */
118 static int n_initialized = 0;
119 
120 /* Accessor to tree-ssa-operands.c caches.  */
121 static inline struct ssa_operands *
122 gimple_ssa_operands (const struct function *fun)
123 {
124   return &fun->gimple_df->ssa_operands;
125 }
126 
127 
128 /*  Return true if the SSA operands cache is active.  */
129 
130 bool
131 ssa_operands_active (struct function *fun)
132 {
133   if (fun == NULL)
134     return false;
135 
136   return fun->gimple_df && gimple_ssa_operands (fun)->ops_active;
137 }
138 
139 
140 /* Create the VOP variable, an artificial global variable to act as a
141    representative of all of the virtual operands FUD chain.  */
142 
143 static void
144 create_vop_var (struct function *fn)
145 {
146   tree global_var;
147 
148   gcc_assert (fn->gimple_df->vop == NULL_TREE);
149 
150   global_var = build_decl (BUILTINS_LOCATION, VAR_DECL,
151 			   get_identifier (".MEM"),
152 			   void_type_node);
153   DECL_ARTIFICIAL (global_var) = 1;
154   DECL_IGNORED_P (global_var) = 1;
155   TREE_READONLY (global_var) = 0;
156   DECL_EXTERNAL (global_var) = 1;
157   TREE_STATIC (global_var) = 1;
158   TREE_USED (global_var) = 1;
159   DECL_CONTEXT (global_var) = NULL_TREE;
160   TREE_THIS_VOLATILE (global_var) = 0;
161   TREE_ADDRESSABLE (global_var) = 0;
162   VAR_DECL_IS_VIRTUAL_OPERAND (global_var) = 1;
163 
164   fn->gimple_df->vop = global_var;
165 }
166 
167 /* These are the sizes of the operand memory buffer in bytes which gets
168    allocated each time more operands space is required.  The final value is
169    the amount that is allocated every time after that.
170    In 1k we can fit 25 use operands (or 63 def operands) on a host with
171    8 byte pointers, that would be 10 statements each with 1 def and 2
172    uses.  */
173 
174 #define OP_SIZE_INIT	0
175 #define OP_SIZE_1	(1024 - sizeof (void *))
176 #define OP_SIZE_2	(1024 * 4 - sizeof (void *))
177 #define OP_SIZE_3	(1024 * 16 - sizeof (void *))
178 
179 /* Initialize the operand cache routines.  */
180 
181 void
182 init_ssa_operands (struct function *fn)
183 {
184   if (!n_initialized++)
185     {
186       build_uses.create (10);
187       build_vuse = NULL_TREE;
188       build_vdef = NULL_TREE;
189       bitmap_obstack_initialize (&operands_bitmap_obstack);
190     }
191 
192   gcc_assert (gimple_ssa_operands (fn)->operand_memory == NULL);
193   gimple_ssa_operands (fn)->operand_memory_index
194      = gimple_ssa_operands (fn)->ssa_operand_mem_size;
195   gimple_ssa_operands (fn)->ops_active = true;
196   gimple_ssa_operands (fn)->ssa_operand_mem_size = OP_SIZE_INIT;
197   create_vop_var (fn);
198 }
199 
200 
201 /* Dispose of anything required by the operand routines.  */
202 
203 void
204 fini_ssa_operands (struct function *fn)
205 {
206   struct ssa_operand_memory_d *ptr;
207 
208   if (!--n_initialized)
209     {
210       build_uses.release ();
211       build_vdef = NULL_TREE;
212       build_vuse = NULL_TREE;
213     }
214 
215   gimple_ssa_operands (fn)->free_uses = NULL;
216 
217   while ((ptr = gimple_ssa_operands (fn)->operand_memory) != NULL)
218     {
219       gimple_ssa_operands (fn)->operand_memory
220 	= gimple_ssa_operands (fn)->operand_memory->next;
221       ggc_free (ptr);
222     }
223 
224   gimple_ssa_operands (fn)->ops_active = false;
225 
226   if (!n_initialized)
227     bitmap_obstack_release (&operands_bitmap_obstack);
228 
229   fn->gimple_df->vop = NULL_TREE;
230 }
231 
232 
233 /* Return memory for an operand of size SIZE.  */
234 
235 static inline void *
236 ssa_operand_alloc (struct function *fn, unsigned size)
237 {
238   char *ptr;
239 
240   gcc_assert (size == sizeof (struct use_optype_d));
241 
242   if (gimple_ssa_operands (fn)->operand_memory_index + size
243       >= gimple_ssa_operands (fn)->ssa_operand_mem_size)
244     {
245       struct ssa_operand_memory_d *ptr;
246 
247       switch (gimple_ssa_operands (fn)->ssa_operand_mem_size)
248 	{
249 	case OP_SIZE_INIT:
250 	  gimple_ssa_operands (fn)->ssa_operand_mem_size = OP_SIZE_1;
251 	  break;
252 	case OP_SIZE_1:
253 	  gimple_ssa_operands (fn)->ssa_operand_mem_size = OP_SIZE_2;
254 	  break;
255 	case OP_SIZE_2:
256 	case OP_SIZE_3:
257 	  gimple_ssa_operands (fn)->ssa_operand_mem_size = OP_SIZE_3;
258 	  break;
259 	default:
260 	  gcc_unreachable ();
261 	}
262 
263 
264       ptr = (ssa_operand_memory_d *) ggc_internal_alloc
265 	(sizeof (void *) + gimple_ssa_operands (fn)->ssa_operand_mem_size);
266 
267       ptr->next = gimple_ssa_operands (fn)->operand_memory;
268       gimple_ssa_operands (fn)->operand_memory = ptr;
269       gimple_ssa_operands (fn)->operand_memory_index = 0;
270     }
271 
272   ptr = &(gimple_ssa_operands (fn)->operand_memory
273 	  ->mem[gimple_ssa_operands (fn)->operand_memory_index]);
274   gimple_ssa_operands (fn)->operand_memory_index += size;
275   return ptr;
276 }
277 
278 
279 /* Allocate a USE operand.  */
280 
281 static inline struct use_optype_d *
282 alloc_use (struct function *fn)
283 {
284   struct use_optype_d *ret;
285   if (gimple_ssa_operands (fn)->free_uses)
286     {
287       ret = gimple_ssa_operands (fn)->free_uses;
288       gimple_ssa_operands (fn)->free_uses
289 	= gimple_ssa_operands (fn)->free_uses->next;
290     }
291   else
292     ret = (struct use_optype_d *)
293           ssa_operand_alloc (fn, sizeof (struct use_optype_d));
294   return ret;
295 }
296 
297 
298 /* Adds OP to the list of uses of statement STMT after LAST.  */
299 
300 static inline use_optype_p
301 add_use_op (struct function *fn, gimple *stmt, tree *op, use_optype_p last)
302 {
303   use_optype_p new_use;
304 
305   new_use = alloc_use (fn);
306   USE_OP_PTR (new_use)->use = op;
307   link_imm_use_stmt (USE_OP_PTR (new_use), *op, stmt);
308   last->next = new_use;
309   new_use->next = NULL;
310   return new_use;
311 }
312 
313 
314 
315 /* Takes elements from build_defs and turns them into def operands of STMT.
316    TODO -- Make build_defs vec of tree *.  */
317 
318 static inline void
319 finalize_ssa_defs (struct function *fn, gimple *stmt)
320 {
321   /* Pre-pend the vdef we may have built.  */
322   if (build_vdef != NULL_TREE)
323     {
324       tree oldvdef = gimple_vdef (stmt);
325       if (oldvdef
326 	  && TREE_CODE (oldvdef) == SSA_NAME)
327 	oldvdef = SSA_NAME_VAR (oldvdef);
328       if (oldvdef != build_vdef)
329 	gimple_set_vdef (stmt, build_vdef);
330     }
331 
332   /* Clear and unlink a no longer necessary VDEF.  */
333   if (build_vdef == NULL_TREE
334       && gimple_vdef (stmt) != NULL_TREE)
335     {
336       if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
337 	{
338 	  unlink_stmt_vdef (stmt);
339 	  release_ssa_name_fn (fn, gimple_vdef (stmt));
340 	}
341       gimple_set_vdef (stmt, NULL_TREE);
342     }
343 
344   /* If we have a non-SSA_NAME VDEF, mark it for renaming.  */
345   if (gimple_vdef (stmt)
346       && TREE_CODE (gimple_vdef (stmt)) != SSA_NAME)
347     {
348       fn->gimple_df->rename_vops = 1;
349       fn->gimple_df->ssa_renaming_needed = 1;
350     }
351 }
352 
353 
354 /* Takes elements from build_uses and turns them into use operands of STMT.  */
355 
356 static inline void
357 finalize_ssa_uses (struct function *fn, gimple *stmt)
358 {
359   unsigned new_i;
360   struct use_optype_d new_list;
361   use_optype_p old_ops, ptr, last;
362 
363   /* Pre-pend the VUSE we may have built.  */
364   if (build_vuse != NULL_TREE)
365     {
366       tree oldvuse = gimple_vuse (stmt);
367       if (oldvuse
368 	  && TREE_CODE (oldvuse) == SSA_NAME)
369 	oldvuse = SSA_NAME_VAR (oldvuse);
370       if (oldvuse != (build_vuse != NULL_TREE
371 		      ? build_vuse : build_vdef))
372 	gimple_set_vuse (stmt, NULL_TREE);
373       build_uses.safe_insert (0, gimple_vuse_ptr (stmt));
374     }
375 
376   new_list.next = NULL;
377   last = &new_list;
378 
379   old_ops = gimple_use_ops (stmt);
380 
381   /* Clear a no longer necessary VUSE.  */
382   if (build_vuse == NULL_TREE
383       && gimple_vuse (stmt) != NULL_TREE)
384     gimple_set_vuse (stmt, NULL_TREE);
385 
386   /* If there is anything in the old list, free it.  */
387   if (old_ops)
388     {
389       for (ptr = old_ops; ptr->next; ptr = ptr->next)
390 	delink_imm_use (USE_OP_PTR (ptr));
391       delink_imm_use (USE_OP_PTR (ptr));
392       ptr->next = gimple_ssa_operands (fn)->free_uses;
393       gimple_ssa_operands (fn)->free_uses = old_ops;
394     }
395 
396   /* If we added a VUSE, make sure to set the operand if it is not already
397      present and mark it for renaming.  */
398   if (build_vuse != NULL_TREE
399       && gimple_vuse (stmt) == NULL_TREE)
400     {
401       gimple_set_vuse (stmt, gimple_vop (fn));
402       fn->gimple_df->rename_vops = 1;
403       fn->gimple_df->ssa_renaming_needed = 1;
404     }
405 
406   /* Now create nodes for all the new nodes.  */
407   for (new_i = 0; new_i < build_uses.length (); new_i++)
408     {
409       tree *op = build_uses[new_i];
410       last = add_use_op (fn, stmt, op, last);
411     }
412 
413   /* Now set the stmt's operands.  */
414   gimple_set_use_ops (stmt, new_list.next);
415 }
416 
417 
418 /* Clear the in_list bits and empty the build array for VDEFs and
419    VUSEs.  */
420 
421 static inline void
422 cleanup_build_arrays (void)
423 {
424   build_vdef = NULL_TREE;
425   build_vuse = NULL_TREE;
426   build_uses.truncate (0);
427 }
428 
429 
430 /* Finalize all the build vectors, fill the new ones into INFO.  */
431 
432 static inline void
433 finalize_ssa_stmt_operands (struct function *fn, gimple *stmt)
434 {
435   finalize_ssa_defs (fn, stmt);
436   finalize_ssa_uses (fn, stmt);
437   cleanup_build_arrays ();
438 }
439 
440 
441 /* Start the process of building up operands vectors in INFO.  */
442 
443 static inline void
444 start_ssa_stmt_operands (void)
445 {
446   gcc_assert (build_uses.length () == 0);
447   gcc_assert (build_vuse == NULL_TREE);
448   gcc_assert (build_vdef == NULL_TREE);
449 }
450 
451 
452 /* Add USE_P to the list of pointers to operands.  */
453 
454 static inline void
455 append_use (tree *use_p)
456 {
457   build_uses.safe_push (use_p);
458 }
459 
460 
461 /* Add VAR to the set of variables that require a VDEF operator.  */
462 
463 static inline void
464 append_vdef (tree var)
465 {
466   gcc_assert ((build_vdef == NULL_TREE
467 	       || build_vdef == var)
468 	      && (build_vuse == NULL_TREE
469 		  || build_vuse == var));
470 
471   build_vdef = var;
472   build_vuse = var;
473 }
474 
475 
476 /* Add VAR to the set of variables that require a VUSE operator.  */
477 
478 static inline void
479 append_vuse (tree var)
480 {
481   gcc_assert (build_vuse == NULL_TREE
482 	      || build_vuse == var);
483 
484   build_vuse = var;
485 }
486 
487 /* Add virtual operands for STMT.  FLAGS is as in get_expr_operands.  */
488 
489 static void
490 add_virtual_operand (struct function *fn,
491 		     gimple *stmt ATTRIBUTE_UNUSED, int flags)
492 {
493   /* Add virtual operands to the stmt, unless the caller has specifically
494      requested not to do that (used when adding operands inside an
495      ADDR_EXPR expression).  */
496   if (flags & opf_no_vops)
497     return;
498 
499   gcc_assert (!is_gimple_debug (stmt));
500 
501   if (flags & opf_def)
502     append_vdef (gimple_vop (fn));
503   else
504     append_vuse (gimple_vop (fn));
505 }
506 
507 
508 /* Add *VAR_P to the appropriate operand array for statement STMT.
509    FLAGS is as in get_expr_operands.  If *VAR_P is a GIMPLE register,
510    it will be added to the statement's real operands, otherwise it is
511    added to virtual operands.  */
512 
513 static void
514 add_stmt_operand (struct function *fn, tree *var_p, gimple *stmt, int flags)
515 {
516   tree var = *var_p;
517 
518   gcc_assert (SSA_VAR_P (*var_p));
519 
520   if (is_gimple_reg (var))
521     {
522       /* The variable is a GIMPLE register.  Add it to real operands.  */
523       if (flags & opf_def)
524 	;
525       else
526 	append_use (var_p);
527       if (DECL_P (*var_p))
528 	fn->gimple_df->ssa_renaming_needed = 1;
529     }
530   else
531     {
532       /* Mark statements with volatile operands.  */
533       if (!(flags & opf_no_vops)
534 	  && TREE_THIS_VOLATILE (var))
535 	gimple_set_has_volatile_ops (stmt, true);
536 
537       /* The variable is a memory access.  Add virtual operands.  */
538       add_virtual_operand (fn, stmt, flags);
539     }
540 }
541 
542 /* Mark the base address of REF as having its address taken.
543    REF may be a single variable whose address has been taken or any
544    other valid GIMPLE memory reference (structure reference, array,
545    etc).  */
546 
547 static void
548 mark_address_taken (tree ref)
549 {
550   tree var;
551 
552   /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
553      as the only thing we take the address of.  If VAR is a structure,
554      taking the address of a field means that the whole structure may
555      be referenced using pointer arithmetic.  See PR 21407 and the
556      ensuing mailing list discussion.  */
557   var = get_base_address (ref);
558   if (var)
559     {
560       if (DECL_P (var))
561 	TREE_ADDRESSABLE (var) = 1;
562       else if (TREE_CODE (var) == MEM_REF
563 	       && TREE_CODE (TREE_OPERAND (var, 0)) == ADDR_EXPR
564 	       && DECL_P (TREE_OPERAND (TREE_OPERAND (var, 0), 0)))
565 	TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (var, 0), 0)) = 1;
566     }
567 }
568 
569 
570 /* A subroutine of get_expr_operands to handle MEM_REF.
571 
572    STMT is the statement being processed, EXPR is the MEM_REF
573       that got us here.
574 
575    FLAGS is as in get_expr_operands.  */
576 
577 static void
578 get_mem_ref_operands (struct function *fn,
579 		      gimple *stmt, tree expr, int flags)
580 {
581   tree *pptr = &TREE_OPERAND (expr, 0);
582 
583   if (!(flags & opf_no_vops)
584       && TREE_THIS_VOLATILE (expr))
585     gimple_set_has_volatile_ops (stmt, true);
586 
587   /* Add the VOP.  */
588   add_virtual_operand (fn, stmt, flags);
589 
590   /* If requested, add a USE operand for the base pointer.  */
591   get_expr_operands (fn, stmt, pptr,
592 		     opf_non_addressable | opf_use
593 		     | (flags & (opf_no_vops|opf_not_non_addressable)));
594 }
595 
596 
597 /* A subroutine of get_expr_operands to handle TARGET_MEM_REF.  */
598 
599 static void
600 get_tmr_operands (struct function *fn, gimple *stmt, tree expr, int flags)
601 {
602   if (!(flags & opf_no_vops)
603       && TREE_THIS_VOLATILE (expr))
604     gimple_set_has_volatile_ops (stmt, true);
605 
606   /* First record the real operands.  */
607   get_expr_operands (fn, stmt,
608 		     &TMR_BASE (expr), opf_use | (flags & opf_no_vops));
609   get_expr_operands (fn, stmt,
610 		     &TMR_INDEX (expr), opf_use | (flags & opf_no_vops));
611   get_expr_operands (fn, stmt,
612 		     &TMR_INDEX2 (expr), opf_use | (flags & opf_no_vops));
613 
614   add_virtual_operand (fn, stmt, flags);
615 }
616 
617 
618 /* If STMT is a call that may clobber globals and other symbols that
619    escape, add them to the VDEF/VUSE lists for it.  */
620 
621 static void
622 maybe_add_call_vops (struct function *fn, gcall *stmt)
623 {
624   int call_flags = gimple_call_flags (stmt);
625 
626   /* If aliases have been computed already, add VDEF or VUSE
627      operands for all the symbols that have been found to be
628      call-clobbered.  */
629   if (!(call_flags & ECF_NOVOPS))
630     {
631       /* A 'pure' or a 'const' function never call-clobbers anything.  */
632       if (!(call_flags & (ECF_PURE | ECF_CONST)))
633 	add_virtual_operand (fn, stmt, opf_def);
634       else if (!(call_flags & ECF_CONST))
635 	add_virtual_operand (fn, stmt, opf_use);
636     }
637 }
638 
639 
640 /* Scan operands in the ASM_EXPR stmt referred to in INFO.  */
641 
642 static void
643 get_asm_stmt_operands (struct function *fn, gasm *stmt)
644 {
645   size_t i, noutputs;
646   const char **oconstraints;
647   const char *constraint;
648   bool allows_mem, allows_reg, is_inout;
649 
650   noutputs = gimple_asm_noutputs (stmt);
651   oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
652 
653   /* Gather all output operands.  */
654   for (i = 0; i < gimple_asm_noutputs (stmt); i++)
655     {
656       tree link = gimple_asm_output_op (stmt, i);
657       constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
658       oconstraints[i] = constraint;
659       parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
660 	                       &allows_reg, &is_inout);
661 
662       /* This should have been split in gimplify_asm_expr.  */
663       gcc_assert (!allows_reg || !is_inout);
664 
665       /* Memory operands are addressable.  Note that STMT needs the
666 	 address of this operand.  */
667       if (!allows_reg && allows_mem)
668 	mark_address_taken (TREE_VALUE (link));
669 
670       get_expr_operands (fn, stmt,
671 			 &TREE_VALUE (link), opf_def | opf_not_non_addressable);
672     }
673 
674   /* Gather all input operands.  */
675   for (i = 0; i < gimple_asm_ninputs (stmt); i++)
676     {
677       tree link = gimple_asm_input_op (stmt, i);
678       constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
679       parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints,
680 	                      &allows_mem, &allows_reg);
681 
682       /* Memory operands are addressable.  Note that STMT needs the
683 	 address of this operand.  */
684       if (!allows_reg && allows_mem)
685 	mark_address_taken (TREE_VALUE (link));
686 
687       get_expr_operands (fn, stmt, &TREE_VALUE (link), opf_not_non_addressable);
688     }
689 
690   /* Clobber all memory and addressable symbols for asm ("" : : : "memory");  */
691   if (gimple_asm_clobbers_memory_p (stmt))
692     add_virtual_operand (fn, stmt, opf_def);
693 }
694 
695 
696 /* Recursively scan the expression pointed to by EXPR_P in statement
697    STMT.  FLAGS is one of the OPF_* constants modifying how to
698    interpret the operands found.  */
699 
700 static void
701 get_expr_operands (struct function *fn, gimple *stmt, tree *expr_p, int flags)
702 {
703   enum tree_code code;
704   enum tree_code_class codeclass;
705   tree expr = *expr_p;
706   int uflags = opf_use;
707 
708   if (expr == NULL)
709     return;
710 
711   if (is_gimple_debug (stmt))
712     uflags |= (flags & opf_no_vops);
713 
714   code = TREE_CODE (expr);
715   codeclass = TREE_CODE_CLASS (code);
716 
717   switch (code)
718     {
719     case ADDR_EXPR:
720       /* Taking the address of a variable does not represent a
721 	 reference to it, but the fact that the statement takes its
722 	 address will be of interest to some passes (e.g. alias
723 	 resolution).  */
724       if ((!(flags & opf_non_addressable)
725 	   || (flags & opf_not_non_addressable))
726 	  && !is_gimple_debug (stmt))
727 	mark_address_taken (TREE_OPERAND (expr, 0));
728 
729       /* Otherwise, there may be variables referenced inside but there
730 	 should be no VUSEs created, since the referenced objects are
731 	 not really accessed.  The only operands that we should find
732 	 here are ARRAY_REF indices which will always be real operands
733 	 (GIMPLE does not allow non-registers as array indices).  */
734       flags |= opf_no_vops;
735       get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0),
736 			 flags | opf_not_non_addressable | opf_address_taken);
737       return;
738 
739     case SSA_NAME:
740     case VAR_DECL:
741     case PARM_DECL:
742     case RESULT_DECL:
743       if (!(flags & opf_address_taken))
744 	add_stmt_operand (fn, expr_p, stmt, flags);
745       return;
746 
747     case DEBUG_EXPR_DECL:
748       gcc_assert (gimple_debug_bind_p (stmt));
749       return;
750 
751     case MEM_REF:
752       get_mem_ref_operands (fn, stmt, expr, flags);
753       return;
754 
755     case TARGET_MEM_REF:
756       get_tmr_operands (fn, stmt, expr, flags);
757       return;
758 
759     case ARRAY_REF:
760     case ARRAY_RANGE_REF:
761     case COMPONENT_REF:
762     case REALPART_EXPR:
763     case IMAGPART_EXPR:
764       {
765 	if (!(flags & opf_no_vops)
766 	    && TREE_THIS_VOLATILE (expr))
767 	  gimple_set_has_volatile_ops (stmt, true);
768 
769 	get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), flags);
770 
771 	if (code == COMPONENT_REF)
772 	  {
773 	    if (!(flags & opf_no_vops)
774 		&& TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1)))
775 	      gimple_set_has_volatile_ops (stmt, true);
776 	    get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 2), uflags);
777 	  }
778 	else if (code == ARRAY_REF || code == ARRAY_RANGE_REF)
779 	  {
780             get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 1), uflags);
781             get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 2), uflags);
782             get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 3), uflags);
783 	  }
784 
785 	return;
786       }
787 
788     case WITH_SIZE_EXPR:
789       /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
790 	 and an rvalue reference to its second argument.  */
791       get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 1), uflags);
792       get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), flags);
793       return;
794 
795     case COND_EXPR:
796     case VEC_COND_EXPR:
797     case VEC_PERM_EXPR:
798       get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), uflags);
799       get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 1), uflags);
800       get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 2), uflags);
801       return;
802 
803     case CONSTRUCTOR:
804       {
805 	/* General aggregate CONSTRUCTORs have been decomposed, but they
806 	   are still in use as the COMPLEX_EXPR equivalent for vectors.  */
807 	constructor_elt *ce;
808 	unsigned HOST_WIDE_INT idx;
809 
810 	/* A volatile constructor is actually TREE_CLOBBER_P, transfer
811 	   the volatility to the statement, don't use TREE_CLOBBER_P for
812 	   mirroring the other uses of THIS_VOLATILE in this file.  */
813 	if (!(flags & opf_no_vops)
814 	    && TREE_THIS_VOLATILE (expr))
815 	  gimple_set_has_volatile_ops (stmt, true);
816 
817 	for (idx = 0;
818 	     vec_safe_iterate (CONSTRUCTOR_ELTS (expr), idx, &ce);
819 	     idx++)
820 	  get_expr_operands (fn, stmt, &ce->value, uflags);
821 
822 	return;
823       }
824 
825     case BIT_FIELD_REF:
826       if (!(flags & opf_no_vops)
827 	  && TREE_THIS_VOLATILE (expr))
828 	gimple_set_has_volatile_ops (stmt, true);
829       /* FALLTHRU */
830 
831     case VIEW_CONVERT_EXPR:
832     do_unary:
833       get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), flags);
834       return;
835 
836     case BIT_INSERT_EXPR:
837     case COMPOUND_EXPR:
838     case OBJ_TYPE_REF:
839     case ASSERT_EXPR:
840     do_binary:
841       {
842 	get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), flags);
843 	get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 1), flags);
844 	return;
845       }
846 
847     case DOT_PROD_EXPR:
848     case SAD_EXPR:
849     case REALIGN_LOAD_EXPR:
850     case WIDEN_MULT_PLUS_EXPR:
851     case WIDEN_MULT_MINUS_EXPR:
852     case FMA_EXPR:
853       {
854 	get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), flags);
855 	get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 1), flags);
856 	get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 2), flags);
857 	return;
858       }
859 
860     case FUNCTION_DECL:
861     case LABEL_DECL:
862     case CONST_DECL:
863     case CASE_LABEL_EXPR:
864       /* Expressions that make no memory references.  */
865       return;
866 
867     default:
868       if (codeclass == tcc_unary)
869 	goto do_unary;
870       if (codeclass == tcc_binary || codeclass == tcc_comparison)
871 	goto do_binary;
872       if (codeclass == tcc_constant || codeclass == tcc_type)
873 	return;
874     }
875 
876   /* If we get here, something has gone wrong.  */
877   if (flag_checking)
878     {
879       fprintf (stderr, "unhandled expression in get_expr_operands():\n");
880       debug_tree (expr);
881       fputs ("\n", stderr);
882       gcc_unreachable ();
883     }
884 }
885 
886 
887 /* Parse STMT looking for operands.  When finished, the various
888    build_* operand vectors will have potential operands in them.  */
889 
890 static void
891 parse_ssa_operands (struct function *fn, gimple *stmt)
892 {
893   enum gimple_code code = gimple_code (stmt);
894   size_t i, n, start = 0;
895 
896   switch (code)
897     {
898     case GIMPLE_ASM:
899       get_asm_stmt_operands (fn, as_a <gasm *> (stmt));
900       break;
901 
902     case GIMPLE_TRANSACTION:
903       /* The start of a transaction is a memory barrier.  */
904       add_virtual_operand (fn, stmt, opf_def | opf_use);
905       break;
906 
907     case GIMPLE_DEBUG:
908       if (gimple_debug_bind_p (stmt)
909 	  && gimple_debug_bind_has_value_p (stmt))
910 	get_expr_operands (fn, stmt, gimple_debug_bind_get_value_ptr (stmt),
911 			   opf_use | opf_no_vops);
912       break;
913 
914     case GIMPLE_RETURN:
915       append_vuse (gimple_vop (fn));
916       goto do_default;
917 
918     case GIMPLE_CALL:
919       /* Add call-clobbered operands, if needed.  */
920       maybe_add_call_vops (fn, as_a <gcall *> (stmt));
921       /* FALLTHRU */
922 
923     case GIMPLE_ASSIGN:
924       get_expr_operands (fn, stmt, gimple_op_ptr (stmt, 0), opf_def);
925       start = 1;
926       /* FALLTHRU */
927 
928     default:
929     do_default:
930       n = gimple_num_ops (stmt);
931       for (i = start; i < n; i++)
932 	get_expr_operands (fn, stmt, gimple_op_ptr (stmt, i), opf_use);
933       break;
934     }
935 }
936 
937 
938 /* Create an operands cache for STMT.  */
939 
940 static void
941 build_ssa_operands (struct function *fn, gimple *stmt)
942 {
943   /* Initially assume that the statement has no volatile operands.  */
944   gimple_set_has_volatile_ops (stmt, false);
945 
946   start_ssa_stmt_operands ();
947   parse_ssa_operands (fn, stmt);
948   finalize_ssa_stmt_operands (fn, stmt);
949 }
950 
951 /* Verifies SSA statement operands.  */
952 
953 DEBUG_FUNCTION bool
954 verify_ssa_operands (struct function *fn, gimple *stmt)
955 {
956   use_operand_p use_p;
957   def_operand_p def_p;
958   ssa_op_iter iter;
959   unsigned i;
960   tree def;
961   bool volatile_p = gimple_has_volatile_ops (stmt);
962 
963   /* build_ssa_operands w/o finalizing them.  */
964   gimple_set_has_volatile_ops (stmt, false);
965   start_ssa_stmt_operands ();
966   parse_ssa_operands (fn, stmt);
967 
968   /* Now verify the built operands are the same as present in STMT.  */
969   def = gimple_vdef (stmt);
970   if (def
971       && TREE_CODE (def) == SSA_NAME)
972     def = SSA_NAME_VAR (def);
973   if (build_vdef != def)
974     {
975       error ("virtual definition of statement not up-to-date");
976       return true;
977     }
978   if (gimple_vdef (stmt)
979       && ((def_p = gimple_vdef_op (stmt)) == NULL_DEF_OPERAND_P
980 	  || DEF_FROM_PTR (def_p) != gimple_vdef (stmt)))
981     {
982       error ("virtual def operand missing for stmt");
983       return true;
984     }
985 
986   tree use = gimple_vuse (stmt);
987   if (use
988       && TREE_CODE (use) == SSA_NAME)
989     use = SSA_NAME_VAR (use);
990   if (build_vuse != use)
991     {
992       error ("virtual use of statement not up-to-date");
993       return true;
994     }
995   if (gimple_vuse (stmt)
996       && ((use_p = gimple_vuse_op (stmt)) == NULL_USE_OPERAND_P
997 	  || USE_FROM_PTR (use_p) != gimple_vuse (stmt)))
998     {
999       error ("virtual use operand missing for stmt");
1000       return true;
1001     }
1002 
1003   FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
1004     {
1005       tree *op;
1006       FOR_EACH_VEC_ELT (build_uses, i, op)
1007 	{
1008 	  if (use_p->use == op)
1009 	    {
1010 	      build_uses[i] = NULL;
1011 	      break;
1012 	    }
1013 	}
1014       if (i == build_uses.length ())
1015 	{
1016 	  error ("excess use operand for stmt");
1017 	  debug_generic_expr (USE_FROM_PTR (use_p));
1018 	  return true;
1019 	}
1020     }
1021 
1022   tree *op;
1023   FOR_EACH_VEC_ELT (build_uses, i, op)
1024     if (op != NULL)
1025       {
1026 	error ("use operand missing for stmt");
1027 	debug_generic_expr (*op);
1028 	return true;
1029       }
1030 
1031   if (gimple_has_volatile_ops (stmt) != volatile_p)
1032     {
1033       error ("stmt volatile flag not up-to-date");
1034       return true;
1035     }
1036 
1037   cleanup_build_arrays ();
1038   return false;
1039 }
1040 
1041 
1042 /* Releases the operands of STMT back to their freelists, and clears
1043    the stmt operand lists.  */
1044 
1045 void
1046 free_stmt_operands (struct function *fn, gimple *stmt)
1047 {
1048   use_optype_p uses = gimple_use_ops (stmt), last_use;
1049 
1050   if (uses)
1051     {
1052       for (last_use = uses; last_use->next; last_use = last_use->next)
1053 	delink_imm_use (USE_OP_PTR (last_use));
1054       delink_imm_use (USE_OP_PTR (last_use));
1055       last_use->next = gimple_ssa_operands (fn)->free_uses;
1056       gimple_ssa_operands (fn)->free_uses = uses;
1057       gimple_set_use_ops (stmt, NULL);
1058     }
1059 
1060   if (gimple_has_mem_ops (stmt))
1061     {
1062       gimple_set_vuse (stmt, NULL_TREE);
1063       gimple_set_vdef (stmt, NULL_TREE);
1064     }
1065 }
1066 
1067 
1068 /* Get the operands of statement STMT.  */
1069 
1070 void
1071 update_stmt_operands (struct function *fn, gimple *stmt)
1072 {
1073   /* If update_stmt_operands is called before SSA is initialized, do
1074      nothing.  */
1075   if (!ssa_operands_active (fn))
1076     return;
1077 
1078   timevar_push (TV_TREE_OPS);
1079 
1080   gcc_assert (gimple_modified_p (stmt));
1081   build_ssa_operands (fn, stmt);
1082   gimple_set_modified (stmt, false);
1083 
1084   timevar_pop (TV_TREE_OPS);
1085 }
1086 
1087 
1088 /* Swap operands EXP0 and EXP1 in statement STMT.  No attempt is done
1089    to test the validity of the swap operation.  */
1090 
1091 void
1092 swap_ssa_operands (gimple *stmt, tree *exp0, tree *exp1)
1093 {
1094   tree op0, op1;
1095   op0 = *exp0;
1096   op1 = *exp1;
1097 
1098   if (op0 != op1)
1099     {
1100       /* Attempt to preserve the relative positions of these two operands in
1101 	 their * respective immediate use lists by adjusting their use pointer
1102 	 to point to the new operand position.  */
1103       use_optype_p use0, use1, ptr;
1104       use0 = use1 = NULL;
1105 
1106       /* Find the 2 operands in the cache, if they are there.  */
1107       for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next)
1108 	if (USE_OP_PTR (ptr)->use == exp0)
1109 	  {
1110 	    use0 = ptr;
1111 	    break;
1112 	  }
1113 
1114       for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next)
1115 	if (USE_OP_PTR (ptr)->use == exp1)
1116 	  {
1117 	    use1 = ptr;
1118 	    break;
1119 	  }
1120 
1121       /* And adjust their location to point to the new position of the
1122          operand.  */
1123       if (use0)
1124 	USE_OP_PTR (use0)->use = exp1;
1125       if (use1)
1126 	USE_OP_PTR (use1)->use = exp0;
1127 
1128       /* Now swap the data.  */
1129       *exp0 = op1;
1130       *exp1 = op0;
1131     }
1132 }
1133 
1134 
1135 /* Scan the immediate_use list for VAR making sure its linked properly.
1136    Return TRUE if there is a problem and emit an error message to F.  */
1137 
1138 DEBUG_FUNCTION bool
1139 verify_imm_links (FILE *f, tree var)
1140 {
1141   use_operand_p ptr, prev, list;
1142   unsigned int count;
1143 
1144   gcc_assert (TREE_CODE (var) == SSA_NAME);
1145 
1146   list = &(SSA_NAME_IMM_USE_NODE (var));
1147   gcc_assert (list->use == NULL);
1148 
1149   if (list->prev == NULL)
1150     {
1151       gcc_assert (list->next == NULL);
1152       return false;
1153     }
1154 
1155   prev = list;
1156   count = 0;
1157   for (ptr = list->next; ptr != list; )
1158     {
1159       if (prev != ptr->prev)
1160 	{
1161 	  fprintf (f, "prev != ptr->prev\n");
1162 	  goto error;
1163 	}
1164 
1165       if (ptr->use == NULL)
1166 	{
1167 	  fprintf (f, "ptr->use == NULL\n");
1168 	  goto error; /* 2 roots, or SAFE guard node.  */
1169 	}
1170       else if (*(ptr->use) != var)
1171 	{
1172 	  fprintf (f, "*(ptr->use) != var\n");
1173 	  goto error;
1174 	}
1175 
1176       prev = ptr;
1177       ptr = ptr->next;
1178 
1179       count++;
1180       if (count == 0)
1181 	{
1182 	  fprintf (f, "number of immediate uses doesn't fit unsigned int\n");
1183 	  goto error;
1184 	}
1185     }
1186 
1187   /* Verify list in the other direction.  */
1188   prev = list;
1189   for (ptr = list->prev; ptr != list; )
1190     {
1191       if (prev != ptr->next)
1192 	{
1193 	  fprintf (f, "prev != ptr->next\n");
1194 	  goto error;
1195 	}
1196       prev = ptr;
1197       ptr = ptr->prev;
1198       if (count == 0)
1199 	{
1200 	  fprintf (f, "count-- < 0\n");
1201 	  goto error;
1202 	}
1203       count--;
1204     }
1205 
1206   if (count != 0)
1207     {
1208       fprintf (f, "count != 0\n");
1209       goto error;
1210     }
1211 
1212   return false;
1213 
1214  error:
1215   if (ptr->loc.stmt && gimple_modified_p (ptr->loc.stmt))
1216     {
1217       fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->loc.stmt);
1218       print_gimple_stmt (f, ptr->loc.stmt, 0, TDF_SLIM);
1219     }
1220   fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr,
1221 	   (void *)ptr->use);
1222   print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM);
1223   fprintf (f, "\n");
1224   return true;
1225 }
1226 
1227 
1228 /* Dump all the immediate uses to FILE.  */
1229 
1230 void
1231 dump_immediate_uses_for (FILE *file, tree var)
1232 {
1233   imm_use_iterator iter;
1234   use_operand_p use_p;
1235 
1236   gcc_assert (var && TREE_CODE (var) == SSA_NAME);
1237 
1238   print_generic_expr (file, var, TDF_SLIM);
1239   fprintf (file, " : -->");
1240   if (has_zero_uses (var))
1241     fprintf (file, " no uses.\n");
1242   else
1243     if (has_single_use (var))
1244       fprintf (file, " single use.\n");
1245     else
1246       fprintf (file, "%d uses.\n", num_imm_uses (var));
1247 
1248   FOR_EACH_IMM_USE_FAST (use_p, iter, var)
1249     {
1250       if (use_p->loc.stmt == NULL && use_p->use == NULL)
1251         fprintf (file, "***end of stmt iterator marker***\n");
1252       else
1253 	if (!is_gimple_reg (USE_FROM_PTR (use_p)))
1254 	  print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_VOPS|TDF_MEMSYMS);
1255 	else
1256 	  print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_SLIM);
1257     }
1258   fprintf (file, "\n");
1259 }
1260 
1261 
1262 /* Dump all the immediate uses to FILE.  */
1263 
1264 void
1265 dump_immediate_uses (FILE *file)
1266 {
1267   tree var;
1268   unsigned int x;
1269 
1270   fprintf (file, "Immediate_uses: \n\n");
1271   FOR_EACH_SSA_NAME (x, var, cfun)
1272     {
1273       dump_immediate_uses_for (file, var);
1274     }
1275 }
1276 
1277 
1278 /* Dump def-use edges on stderr.  */
1279 
1280 DEBUG_FUNCTION void
1281 debug_immediate_uses (void)
1282 {
1283   dump_immediate_uses (stderr);
1284 }
1285 
1286 
1287 /* Dump def-use edges on stderr.  */
1288 
1289 DEBUG_FUNCTION void
1290 debug_immediate_uses_for (tree var)
1291 {
1292   dump_immediate_uses_for (stderr, var);
1293 }
1294 
1295 
1296 /* Unlink STMTs virtual definition from the IL by propagating its use.  */
1297 
1298 void
1299 unlink_stmt_vdef (gimple *stmt)
1300 {
1301   use_operand_p use_p;
1302   imm_use_iterator iter;
1303   gimple *use_stmt;
1304   tree vdef = gimple_vdef (stmt);
1305   tree vuse = gimple_vuse (stmt);
1306 
1307   if (!vdef
1308       || TREE_CODE (vdef) != SSA_NAME)
1309     return;
1310 
1311   FOR_EACH_IMM_USE_STMT (use_stmt, iter, vdef)
1312     {
1313       FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1314 	SET_USE (use_p, vuse);
1315     }
1316 
1317   if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vdef))
1318     SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 1;
1319 }
1320 
1321 /* Return true if the var whose chain of uses starts at PTR has a
1322    single nondebug use.  Set USE_P and STMT to that single nondebug
1323    use, if so, or to NULL otherwise.  */
1324 bool
1325 single_imm_use_1 (const ssa_use_operand_t *head,
1326 		  use_operand_p *use_p, gimple **stmt)
1327 {
1328   ssa_use_operand_t *ptr, *single_use = 0;
1329 
1330   for (ptr = head->next; ptr != head; ptr = ptr->next)
1331     if (USE_STMT(ptr) && !is_gimple_debug (USE_STMT (ptr)))
1332       {
1333 	if (single_use)
1334 	  {
1335 	    single_use = NULL;
1336 	    break;
1337 	  }
1338 	single_use = ptr;
1339       }
1340 
1341   if (use_p)
1342     *use_p = single_use;
1343 
1344   if (stmt)
1345     *stmt = single_use ? single_use->loc.stmt : NULL;
1346 
1347   return single_use;
1348 }
1349 
1350