1 /* SSA operands management for trees.
2    Copyright (C) 2003-2014 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10 
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14 GNU General Public License for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "tree.h"
25 #include "stmt.h"
26 #include "print-tree.h"
27 #include "flags.h"
28 #include "function.h"
29 #include "gimple-pretty-print.h"
30 #include "bitmap.h"
31 #include "basic-block.h"
32 #include "tree-ssa-alias.h"
33 #include "internal-fn.h"
34 #include "gimple-expr.h"
35 #include "is-a.h"
36 #include "gimple.h"
37 #include "gimple-ssa.h"
38 #include "tree-phinodes.h"
39 #include "ssa-iterators.h"
40 #include "stringpool.h"
41 #include "tree-ssanames.h"
42 #include "tree-inline.h"
43 #include "timevar.h"
44 #include "dumpfile.h"
45 #include "timevar.h"
46 #include "langhooks.h"
47 #include "diagnostic-core.h"
48 
49 
50 /* This file contains the code required to manage the operands cache of the
51    SSA optimizer.  For every stmt, we maintain an operand cache in the stmt
52    annotation.  This cache contains operands that will be of interest to
53    optimizers and other passes wishing to manipulate the IL.
54 
55    The operand type are broken up into REAL and VIRTUAL operands.  The real
56    operands are represented as pointers into the stmt's operand tree.  Thus
57    any manipulation of the real operands will be reflected in the actual tree.
58    Virtual operands are represented solely in the cache, although the base
59    variable for the SSA_NAME may, or may not occur in the stmt's tree.
60    Manipulation of the virtual operands will not be reflected in the stmt tree.
61 
62    The routines in this file are concerned with creating this operand cache
63    from a stmt tree.
64 
65    The operand tree is the parsed by the various get_* routines which look
66    through the stmt tree for the occurrence of operands which may be of
67    interest, and calls are made to the append_* routines whenever one is
68    found.  There are 4 of these routines, each representing one of the
69    4 types of operands. Defs, Uses, Virtual Uses, and Virtual May Defs.
70 
71    The append_* routines check for duplication, and simply keep a list of
72    unique objects for each operand type in the build_* extendable vectors.
73 
74    Once the stmt tree is completely parsed, the finalize_ssa_operands()
75    routine is called, which proceeds to perform the finalization routine
76    on each of the 4 operand vectors which have been built up.
77 
78    If the stmt had a previous operand cache, the finalization routines
79    attempt to match up the new operands with the old ones.  If it's a perfect
80    match, the old vector is simply reused.  If it isn't a perfect match, then
81    a new vector is created and the new operands are placed there.  For
82    virtual operands, if the previous cache had SSA_NAME version of a
83    variable, and that same variable occurs in the same operands cache, then
84    the new cache vector will also get the same SSA_NAME.
85 
86    i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new
87    operand vector for VUSE, then the new vector will also be modified
88    such that it contains 'a_5' rather than 'a'.  */
89 
90 
91 /* Flags to describe operand properties in helpers.  */
92 
93 /* By default, operands are loaded.  */
94 #define opf_use		0
95 
96 /* Operand is the target of an assignment expression or a
97    call-clobbered variable.  */
98 #define opf_def 	(1 << 0)
99 
100 /* No virtual operands should be created in the expression.  This is used
101    when traversing ADDR_EXPR nodes which have different semantics than
102    other expressions.  Inside an ADDR_EXPR node, the only operands that we
103    need to consider are indices into arrays.  For instance, &a.b[i] should
104    generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
105    VUSE for 'b'.  */
106 #define opf_no_vops 	(1 << 1)
107 
108 /* Operand is in a place where address-taken does not imply addressable.  */
109 #define opf_non_addressable (1 << 3)
110 
111 /* Operand is in a place where opf_non_addressable does not apply.  */
112 #define opf_not_non_addressable (1 << 4)
113 
114 /* Operand is having its address taken.  */
115 #define opf_address_taken (1 << 5)
116 
117 /* Array for building all the use operands.  */
118 static vec<tree> build_uses;
119 
120 /* The built VDEF operand.  */
121 static tree build_vdef;
122 
123 /* The built VUSE operand.  */
124 static tree build_vuse;
125 
126 /* Bitmap obstack for our datastructures that needs to survive across
127    compilations of multiple functions.  */
128 static bitmap_obstack operands_bitmap_obstack;
129 
130 static void get_expr_operands (struct function *, gimple, tree *, int);
131 
132 /* Number of functions with initialized ssa_operands.  */
133 static int n_initialized = 0;
134 
135 /* Accessor to tree-ssa-operands.c caches.  */
136 static inline struct ssa_operands *
gimple_ssa_operands(const struct function * fun)137 gimple_ssa_operands (const struct function *fun)
138 {
139   return &fun->gimple_df->ssa_operands;
140 }
141 
142 
143 /*  Return true if the SSA operands cache is active.  */
144 
145 bool
ssa_operands_active(struct function * fun)146 ssa_operands_active (struct function *fun)
147 {
148   if (fun == NULL)
149     return false;
150 
151   return fun->gimple_df && gimple_ssa_operands (fun)->ops_active;
152 }
153 
154 
155 /* Create the VOP variable, an artificial global variable to act as a
156    representative of all of the virtual operands FUD chain.  */
157 
158 static void
create_vop_var(struct function * fn)159 create_vop_var (struct function *fn)
160 {
161   tree global_var;
162 
163   gcc_assert (fn->gimple_df->vop == NULL_TREE);
164 
165   global_var = build_decl (BUILTINS_LOCATION, VAR_DECL,
166 			   get_identifier (".MEM"),
167 			   void_type_node);
168   DECL_ARTIFICIAL (global_var) = 1;
169   TREE_READONLY (global_var) = 0;
170   DECL_EXTERNAL (global_var) = 1;
171   TREE_STATIC (global_var) = 1;
172   TREE_USED (global_var) = 1;
173   DECL_CONTEXT (global_var) = NULL_TREE;
174   TREE_THIS_VOLATILE (global_var) = 0;
175   TREE_ADDRESSABLE (global_var) = 0;
176   VAR_DECL_IS_VIRTUAL_OPERAND (global_var) = 1;
177 
178   fn->gimple_df->vop = global_var;
179 }
180 
181 /* These are the sizes of the operand memory buffer in bytes which gets
182    allocated each time more operands space is required.  The final value is
183    the amount that is allocated every time after that.
184    In 1k we can fit 25 use operands (or 63 def operands) on a host with
185    8 byte pointers, that would be 10 statements each with 1 def and 2
186    uses.  */
187 
188 #define OP_SIZE_INIT	0
189 #define OP_SIZE_1	(1024 - sizeof (void *))
190 #define OP_SIZE_2	(1024 * 4 - sizeof (void *))
191 #define OP_SIZE_3	(1024 * 16 - sizeof (void *))
192 
193 /* Initialize the operand cache routines.  */
194 
195 void
init_ssa_operands(struct function * fn)196 init_ssa_operands (struct function *fn)
197 {
198   if (!n_initialized++)
199     {
200       build_uses.create (10);
201       build_vuse = NULL_TREE;
202       build_vdef = NULL_TREE;
203       bitmap_obstack_initialize (&operands_bitmap_obstack);
204     }
205 
206   gcc_assert (gimple_ssa_operands (fn)->operand_memory == NULL);
207   gimple_ssa_operands (fn)->operand_memory_index
208      = gimple_ssa_operands (fn)->ssa_operand_mem_size;
209   gimple_ssa_operands (fn)->ops_active = true;
210   gimple_ssa_operands (fn)->ssa_operand_mem_size = OP_SIZE_INIT;
211   create_vop_var (fn);
212 }
213 
214 
215 /* Dispose of anything required by the operand routines.  */
216 
217 void
fini_ssa_operands(struct function * fn)218 fini_ssa_operands (struct function *fn)
219 {
220   struct ssa_operand_memory_d *ptr;
221 
222   if (!--n_initialized)
223     {
224       build_uses.release ();
225       build_vdef = NULL_TREE;
226       build_vuse = NULL_TREE;
227     }
228 
229   gimple_ssa_operands (fn)->free_uses = NULL;
230 
231   while ((ptr = gimple_ssa_operands (fn)->operand_memory) != NULL)
232     {
233       gimple_ssa_operands (fn)->operand_memory
234 	= gimple_ssa_operands (fn)->operand_memory->next;
235       ggc_free (ptr);
236     }
237 
238   gimple_ssa_operands (fn)->ops_active = false;
239 
240   if (!n_initialized)
241     bitmap_obstack_release (&operands_bitmap_obstack);
242 
243   fn->gimple_df->vop = NULL_TREE;
244 }
245 
246 
247 /* Return memory for an operand of size SIZE.  */
248 
249 static inline void *
ssa_operand_alloc(struct function * fn,unsigned size)250 ssa_operand_alloc (struct function *fn, unsigned size)
251 {
252   char *ptr;
253 
254   gcc_assert (size == sizeof (struct use_optype_d));
255 
256   if (gimple_ssa_operands (fn)->operand_memory_index + size
257       >= gimple_ssa_operands (fn)->ssa_operand_mem_size)
258     {
259       struct ssa_operand_memory_d *ptr;
260 
261       switch (gimple_ssa_operands (fn)->ssa_operand_mem_size)
262 	{
263 	case OP_SIZE_INIT:
264 	  gimple_ssa_operands (fn)->ssa_operand_mem_size = OP_SIZE_1;
265 	  break;
266 	case OP_SIZE_1:
267 	  gimple_ssa_operands (fn)->ssa_operand_mem_size = OP_SIZE_2;
268 	  break;
269 	case OP_SIZE_2:
270 	case OP_SIZE_3:
271 	  gimple_ssa_operands (fn)->ssa_operand_mem_size = OP_SIZE_3;
272 	  break;
273 	default:
274 	  gcc_unreachable ();
275 	}
276 
277 
278       ptr = ggc_alloc_ssa_operand_memory_d (sizeof (void *)
279                         + gimple_ssa_operands (fn)->ssa_operand_mem_size);
280 
281       ptr->next = gimple_ssa_operands (fn)->operand_memory;
282       gimple_ssa_operands (fn)->operand_memory = ptr;
283       gimple_ssa_operands (fn)->operand_memory_index = 0;
284     }
285 
286   ptr = &(gimple_ssa_operands (fn)->operand_memory
287 	  ->mem[gimple_ssa_operands (fn)->operand_memory_index]);
288   gimple_ssa_operands (fn)->operand_memory_index += size;
289   return ptr;
290 }
291 
292 
293 /* Allocate a USE operand.  */
294 
295 static inline struct use_optype_d *
alloc_use(struct function * fn)296 alloc_use (struct function *fn)
297 {
298   struct use_optype_d *ret;
299   if (gimple_ssa_operands (fn)->free_uses)
300     {
301       ret = gimple_ssa_operands (fn)->free_uses;
302       gimple_ssa_operands (fn)->free_uses
303 	= gimple_ssa_operands (fn)->free_uses->next;
304     }
305   else
306     ret = (struct use_optype_d *)
307           ssa_operand_alloc (fn, sizeof (struct use_optype_d));
308   return ret;
309 }
310 
311 
312 /* Adds OP to the list of uses of statement STMT after LAST.  */
313 
314 static inline use_optype_p
add_use_op(struct function * fn,gimple stmt,tree * op,use_optype_p last)315 add_use_op (struct function *fn, gimple stmt, tree *op, use_optype_p last)
316 {
317   use_optype_p new_use;
318 
319   new_use = alloc_use (fn);
320   USE_OP_PTR (new_use)->use = op;
321   link_imm_use_stmt (USE_OP_PTR (new_use), *op, stmt);
322   last->next = new_use;
323   new_use->next = NULL;
324   return new_use;
325 }
326 
327 
328 
329 /* Takes elements from build_defs and turns them into def operands of STMT.
330    TODO -- Make build_defs vec of tree *.  */
331 
332 static inline void
finalize_ssa_defs(struct function * fn,gimple stmt)333 finalize_ssa_defs (struct function *fn, gimple stmt)
334 {
335   /* Pre-pend the vdef we may have built.  */
336   if (build_vdef != NULL_TREE)
337     {
338       tree oldvdef = gimple_vdef (stmt);
339       if (oldvdef
340 	  && TREE_CODE (oldvdef) == SSA_NAME)
341 	oldvdef = SSA_NAME_VAR (oldvdef);
342       if (oldvdef != build_vdef)
343 	gimple_set_vdef (stmt, build_vdef);
344     }
345 
346   /* Clear and unlink a no longer necessary VDEF.  */
347   if (build_vdef == NULL_TREE
348       && gimple_vdef (stmt) != NULL_TREE)
349     {
350       if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
351 	{
352 	  unlink_stmt_vdef (stmt);
353 	  release_ssa_name_fn (fn, gimple_vdef (stmt));
354 	}
355       gimple_set_vdef (stmt, NULL_TREE);
356     }
357 
358   /* If we have a non-SSA_NAME VDEF, mark it for renaming.  */
359   if (gimple_vdef (stmt)
360       && TREE_CODE (gimple_vdef (stmt)) != SSA_NAME)
361     {
362       fn->gimple_df->rename_vops = 1;
363       fn->gimple_df->ssa_renaming_needed = 1;
364     }
365 }
366 
367 
368 /* Takes elements from build_uses and turns them into use operands of STMT.
369    TODO -- Make build_uses vec of tree *.  */
370 
371 static inline void
finalize_ssa_uses(struct function * fn,gimple stmt)372 finalize_ssa_uses (struct function *fn, gimple stmt)
373 {
374   unsigned new_i;
375   struct use_optype_d new_list;
376   use_optype_p old_ops, ptr, last;
377 
378   /* Pre-pend the VUSE we may have built.  */
379   if (build_vuse != NULL_TREE)
380     {
381       tree oldvuse = gimple_vuse (stmt);
382       if (oldvuse
383 	  && TREE_CODE (oldvuse) == SSA_NAME)
384 	oldvuse = SSA_NAME_VAR (oldvuse);
385       if (oldvuse != (build_vuse != NULL_TREE
386 		      ? build_vuse : build_vdef))
387 	gimple_set_vuse (stmt, NULL_TREE);
388       build_uses.safe_insert (0, (tree)gimple_vuse_ptr (stmt));
389     }
390 
391   new_list.next = NULL;
392   last = &new_list;
393 
394   old_ops = gimple_use_ops (stmt);
395 
396   /* Clear a no longer necessary VUSE.  */
397   if (build_vuse == NULL_TREE
398       && gimple_vuse (stmt) != NULL_TREE)
399     gimple_set_vuse (stmt, NULL_TREE);
400 
401   /* If there is anything in the old list, free it.  */
402   if (old_ops)
403     {
404       for (ptr = old_ops; ptr; ptr = ptr->next)
405 	delink_imm_use (USE_OP_PTR (ptr));
406       old_ops->next = gimple_ssa_operands (fn)->free_uses;
407       gimple_ssa_operands (fn)->free_uses = old_ops;
408     }
409 
410   /* If we added a VUSE, make sure to set the operand if it is not already
411      present and mark it for renaming.  */
412   if (build_vuse != NULL_TREE
413       && gimple_vuse (stmt) == NULL_TREE)
414     {
415       gimple_set_vuse (stmt, gimple_vop (fn));
416       fn->gimple_df->rename_vops = 1;
417       fn->gimple_df->ssa_renaming_needed = 1;
418     }
419 
420   /* Now create nodes for all the new nodes.  */
421   for (new_i = 0; new_i < build_uses.length (); new_i++)
422     {
423       tree *op = (tree *) build_uses[new_i];
424       last = add_use_op (fn, stmt, op, last);
425     }
426 
427   /* Now set the stmt's operands.  */
428   gimple_set_use_ops (stmt, new_list.next);
429 }
430 
431 
432 /* Clear the in_list bits and empty the build array for VDEFs and
433    VUSEs.  */
434 
435 static inline void
cleanup_build_arrays(void)436 cleanup_build_arrays (void)
437 {
438   build_vdef = NULL_TREE;
439   build_vuse = NULL_TREE;
440   build_uses.truncate (0);
441 }
442 
443 
444 /* Finalize all the build vectors, fill the new ones into INFO.  */
445 
446 static inline void
finalize_ssa_stmt_operands(struct function * fn,gimple stmt)447 finalize_ssa_stmt_operands (struct function *fn, gimple stmt)
448 {
449   finalize_ssa_defs (fn, stmt);
450   finalize_ssa_uses (fn, stmt);
451   cleanup_build_arrays ();
452 }
453 
454 
455 /* Start the process of building up operands vectors in INFO.  */
456 
457 static inline void
start_ssa_stmt_operands(void)458 start_ssa_stmt_operands (void)
459 {
460   gcc_assert (build_uses.length () == 0);
461   gcc_assert (build_vuse == NULL_TREE);
462   gcc_assert (build_vdef == NULL_TREE);
463 }
464 
465 
466 /* Add USE_P to the list of pointers to operands.  */
467 
468 static inline void
append_use(tree * use_p)469 append_use (tree *use_p)
470 {
471   build_uses.safe_push ((tree) use_p);
472 }
473 
474 
475 /* Add VAR to the set of variables that require a VDEF operator.  */
476 
477 static inline void
append_vdef(tree var)478 append_vdef (tree var)
479 {
480   if (!optimize)
481     return;
482 
483   gcc_assert ((build_vdef == NULL_TREE
484 	       || build_vdef == var)
485 	      && (build_vuse == NULL_TREE
486 		  || build_vuse == var));
487 
488   build_vdef = var;
489   build_vuse = var;
490 }
491 
492 
493 /* Add VAR to the set of variables that require a VUSE operator.  */
494 
495 static inline void
append_vuse(tree var)496 append_vuse (tree var)
497 {
498   if (!optimize)
499     return;
500 
501   gcc_assert (build_vuse == NULL_TREE
502 	      || build_vuse == var);
503 
504   build_vuse = var;
505 }
506 
507 /* Add virtual operands for STMT.  FLAGS is as in get_expr_operands.  */
508 
509 static void
add_virtual_operand(struct function * fn,gimple stmt ATTRIBUTE_UNUSED,int flags)510 add_virtual_operand (struct function *fn,
511 		     gimple stmt ATTRIBUTE_UNUSED, int flags)
512 {
513   /* Add virtual operands to the stmt, unless the caller has specifically
514      requested not to do that (used when adding operands inside an
515      ADDR_EXPR expression).  */
516   if (flags & opf_no_vops)
517     return;
518 
519   gcc_assert (!is_gimple_debug (stmt));
520 
521   if (flags & opf_def)
522     append_vdef (gimple_vop (fn));
523   else
524     append_vuse (gimple_vop (fn));
525 }
526 
527 
528 /* Add *VAR_P to the appropriate operand array for statement STMT.
529    FLAGS is as in get_expr_operands.  If *VAR_P is a GIMPLE register,
530    it will be added to the statement's real operands, otherwise it is
531    added to virtual operands.  */
532 
533 static void
add_stmt_operand(struct function * fn,tree * var_p,gimple stmt,int flags)534 add_stmt_operand (struct function *fn, tree *var_p, gimple stmt, int flags)
535 {
536   tree var = *var_p;
537 
538   gcc_assert (SSA_VAR_P (*var_p));
539 
540   if (is_gimple_reg (var))
541     {
542       /* The variable is a GIMPLE register.  Add it to real operands.  */
543       if (flags & opf_def)
544 	;
545       else
546 	append_use (var_p);
547       if (DECL_P (*var_p))
548 	fn->gimple_df->ssa_renaming_needed = 1;
549     }
550   else
551     {
552       /* Mark statements with volatile operands.  */
553       if (!(flags & opf_no_vops)
554 	  && TREE_THIS_VOLATILE (var))
555 	gimple_set_has_volatile_ops (stmt, true);
556 
557       /* The variable is a memory access.  Add virtual operands.  */
558       add_virtual_operand (fn, stmt, flags);
559     }
560 }
561 
562 /* Mark the base address of REF as having its address taken.
563    REF may be a single variable whose address has been taken or any
564    other valid GIMPLE memory reference (structure reference, array,
565    etc).  */
566 
567 static void
mark_address_taken(tree ref)568 mark_address_taken (tree ref)
569 {
570   tree var;
571 
572   /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
573      as the only thing we take the address of.  If VAR is a structure,
574      taking the address of a field means that the whole structure may
575      be referenced using pointer arithmetic.  See PR 21407 and the
576      ensuing mailing list discussion.  */
577   var = get_base_address (ref);
578   if (var)
579     {
580       if (DECL_P (var))
581 	TREE_ADDRESSABLE (var) = 1;
582       else if (TREE_CODE (var) == MEM_REF
583 	       && TREE_CODE (TREE_OPERAND (var, 0)) == ADDR_EXPR
584 	       && DECL_P (TREE_OPERAND (TREE_OPERAND (var, 0), 0)))
585 	TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (var, 0), 0)) = 1;
586     }
587 }
588 
589 
590 /* A subroutine of get_expr_operands to handle MEM_REF.
591 
592    STMT is the statement being processed, EXPR is the MEM_REF
593       that got us here.
594 
595    FLAGS is as in get_expr_operands.  */
596 
597 static void
get_mem_ref_operands(struct function * fn,gimple stmt,tree expr,int flags)598 get_mem_ref_operands (struct function *fn,
599 		      gimple stmt, tree expr, int flags)
600 {
601   tree *pptr = &TREE_OPERAND (expr, 0);
602 
603   if (!(flags & opf_no_vops)
604       && TREE_THIS_VOLATILE (expr))
605     gimple_set_has_volatile_ops (stmt, true);
606 
607   /* Add the VOP.  */
608   add_virtual_operand (fn, stmt, flags);
609 
610   /* If requested, add a USE operand for the base pointer.  */
611   get_expr_operands (fn, stmt, pptr,
612 		     opf_non_addressable | opf_use
613 		     | (flags & (opf_no_vops|opf_not_non_addressable)));
614 }
615 
616 
617 /* A subroutine of get_expr_operands to handle TARGET_MEM_REF.  */
618 
619 static void
get_tmr_operands(struct function * fn,gimple stmt,tree expr,int flags)620 get_tmr_operands (struct function *fn, gimple stmt, tree expr, int flags)
621 {
622   if (!(flags & opf_no_vops)
623       && TREE_THIS_VOLATILE (expr))
624     gimple_set_has_volatile_ops (stmt, true);
625 
626   /* First record the real operands.  */
627   get_expr_operands (fn, stmt,
628 		     &TMR_BASE (expr), opf_use | (flags & opf_no_vops));
629   get_expr_operands (fn, stmt,
630 		     &TMR_INDEX (expr), opf_use | (flags & opf_no_vops));
631   get_expr_operands (fn, stmt,
632 		     &TMR_INDEX2 (expr), opf_use | (flags & opf_no_vops));
633 
634   add_virtual_operand (fn, stmt, flags);
635 }
636 
637 
638 /* If STMT is a call that may clobber globals and other symbols that
639    escape, add them to the VDEF/VUSE lists for it.  */
640 
641 static void
maybe_add_call_vops(struct function * fn,gimple stmt)642 maybe_add_call_vops (struct function *fn, gimple stmt)
643 {
644   int call_flags = gimple_call_flags (stmt);
645 
646   /* If aliases have been computed already, add VDEF or VUSE
647      operands for all the symbols that have been found to be
648      call-clobbered.  */
649   if (!(call_flags & ECF_NOVOPS))
650     {
651       /* A 'pure' or a 'const' function never call-clobbers anything.  */
652       if (!(call_flags & (ECF_PURE | ECF_CONST)))
653 	add_virtual_operand (fn, stmt, opf_def);
654       else if (!(call_flags & ECF_CONST))
655 	add_virtual_operand (fn, stmt, opf_use);
656     }
657 }
658 
659 
660 /* Scan operands in the ASM_EXPR stmt referred to in INFO.  */
661 
662 static void
get_asm_stmt_operands(struct function * fn,gimple stmt)663 get_asm_stmt_operands (struct function *fn, gimple stmt)
664 {
665   size_t i, noutputs;
666   const char **oconstraints;
667   const char *constraint;
668   bool allows_mem, allows_reg, is_inout;
669 
670   noutputs = gimple_asm_noutputs (stmt);
671   oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
672 
673   /* Gather all output operands.  */
674   for (i = 0; i < gimple_asm_noutputs (stmt); i++)
675     {
676       tree link = gimple_asm_output_op (stmt, i);
677       constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
678       oconstraints[i] = constraint;
679       parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
680 	                       &allows_reg, &is_inout);
681 
682       /* This should have been split in gimplify_asm_expr.  */
683       gcc_assert (!allows_reg || !is_inout);
684 
685       /* Memory operands are addressable.  Note that STMT needs the
686 	 address of this operand.  */
687       if (!allows_reg && allows_mem)
688 	mark_address_taken (TREE_VALUE (link));
689 
690       get_expr_operands (fn, stmt,
691 			 &TREE_VALUE (link), opf_def | opf_not_non_addressable);
692     }
693 
694   /* Gather all input operands.  */
695   for (i = 0; i < gimple_asm_ninputs (stmt); i++)
696     {
697       tree link = gimple_asm_input_op (stmt, i);
698       constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
699       parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints,
700 	                      &allows_mem, &allows_reg);
701 
702       /* Memory operands are addressable.  Note that STMT needs the
703 	 address of this operand.  */
704       if (!allows_reg && allows_mem)
705 	mark_address_taken (TREE_VALUE (link));
706 
707       get_expr_operands (fn, stmt, &TREE_VALUE (link), opf_not_non_addressable);
708     }
709 
710   /* Clobber all memory and addressable symbols for asm ("" : : : "memory");  */
711   if (gimple_asm_clobbers_memory_p (stmt))
712     add_virtual_operand (fn, stmt, opf_def);
713 }
714 
715 
716 /* Recursively scan the expression pointed to by EXPR_P in statement
717    STMT.  FLAGS is one of the OPF_* constants modifying how to
718    interpret the operands found.  */
719 
720 static void
get_expr_operands(struct function * fn,gimple stmt,tree * expr_p,int flags)721 get_expr_operands (struct function *fn, gimple stmt, tree *expr_p, int flags)
722 {
723   enum tree_code code;
724   enum tree_code_class codeclass;
725   tree expr = *expr_p;
726   int uflags = opf_use;
727 
728   if (expr == NULL)
729     return;
730 
731   if (is_gimple_debug (stmt))
732     uflags |= (flags & opf_no_vops);
733 
734   code = TREE_CODE (expr);
735   codeclass = TREE_CODE_CLASS (code);
736 
737   switch (code)
738     {
739     case ADDR_EXPR:
740       /* Taking the address of a variable does not represent a
741 	 reference to it, but the fact that the statement takes its
742 	 address will be of interest to some passes (e.g. alias
743 	 resolution).  */
744       if ((!(flags & opf_non_addressable)
745 	   || (flags & opf_not_non_addressable))
746 	  && !is_gimple_debug (stmt))
747 	mark_address_taken (TREE_OPERAND (expr, 0));
748 
749       /* Otherwise, there may be variables referenced inside but there
750 	 should be no VUSEs created, since the referenced objects are
751 	 not really accessed.  The only operands that we should find
752 	 here are ARRAY_REF indices which will always be real operands
753 	 (GIMPLE does not allow non-registers as array indices).  */
754       flags |= opf_no_vops;
755       get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0),
756 			 flags | opf_not_non_addressable | opf_address_taken);
757       return;
758 
759     case SSA_NAME:
760     case VAR_DECL:
761     case PARM_DECL:
762     case RESULT_DECL:
763       if (!(flags & opf_address_taken))
764 	add_stmt_operand (fn, expr_p, stmt, flags);
765       return;
766 
767     case DEBUG_EXPR_DECL:
768       gcc_assert (gimple_debug_bind_p (stmt));
769       return;
770 
771     case MEM_REF:
772       get_mem_ref_operands (fn, stmt, expr, flags);
773       return;
774 
775     case TARGET_MEM_REF:
776       get_tmr_operands (fn, stmt, expr, flags);
777       return;
778 
779     case ARRAY_REF:
780     case ARRAY_RANGE_REF:
781     case COMPONENT_REF:
782     case REALPART_EXPR:
783     case IMAGPART_EXPR:
784       {
785 	if (!(flags & opf_no_vops)
786 	    && TREE_THIS_VOLATILE (expr))
787 	  gimple_set_has_volatile_ops (stmt, true);
788 
789 	get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), flags);
790 
791 	if (code == COMPONENT_REF)
792 	  {
793 	    if (!(flags & opf_no_vops)
794 		&& TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1)))
795 	      gimple_set_has_volatile_ops (stmt, true);
796 	    get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 2), uflags);
797 	  }
798 	else if (code == ARRAY_REF || code == ARRAY_RANGE_REF)
799 	  {
800             get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 1), uflags);
801             get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 2), uflags);
802             get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 3), uflags);
803 	  }
804 
805 	return;
806       }
807 
808     case WITH_SIZE_EXPR:
809       /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
810 	 and an rvalue reference to its second argument.  */
811       get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 1), uflags);
812       get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), flags);
813       return;
814 
815     case COND_EXPR:
816     case VEC_COND_EXPR:
817     case VEC_PERM_EXPR:
818       get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), uflags);
819       get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 1), uflags);
820       get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 2), uflags);
821       return;
822 
823     case CONSTRUCTOR:
824       {
825 	/* General aggregate CONSTRUCTORs have been decomposed, but they
826 	   are still in use as the COMPLEX_EXPR equivalent for vectors.  */
827 	constructor_elt *ce;
828 	unsigned HOST_WIDE_INT idx;
829 
830 	/* A volatile constructor is actually TREE_CLOBBER_P, transfer
831 	   the volatility to the statement, don't use TREE_CLOBBER_P for
832 	   mirroring the other uses of THIS_VOLATILE in this file.  */
833 	if (!(flags & opf_no_vops)
834 	    && TREE_THIS_VOLATILE (expr))
835 	  gimple_set_has_volatile_ops (stmt, true);
836 
837 	for (idx = 0;
838 	     vec_safe_iterate (CONSTRUCTOR_ELTS (expr), idx, &ce);
839 	     idx++)
840 	  get_expr_operands (fn, stmt, &ce->value, uflags);
841 
842 	return;
843       }
844 
845     case BIT_FIELD_REF:
846       if (!(flags & opf_no_vops)
847 	  && TREE_THIS_VOLATILE (expr))
848 	gimple_set_has_volatile_ops (stmt, true);
849       /* FALLTHRU */
850 
851     case VIEW_CONVERT_EXPR:
852     do_unary:
853       get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), flags);
854       return;
855 
856     case COMPOUND_EXPR:
857     case OBJ_TYPE_REF:
858     case ASSERT_EXPR:
859     do_binary:
860       {
861 	get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), flags);
862 	get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 1), flags);
863 	return;
864       }
865 
866     case DOT_PROD_EXPR:
867     case REALIGN_LOAD_EXPR:
868     case WIDEN_MULT_PLUS_EXPR:
869     case WIDEN_MULT_MINUS_EXPR:
870     case FMA_EXPR:
871       {
872 	get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), flags);
873 	get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 1), flags);
874 	get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 2), flags);
875 	return;
876       }
877 
878     case FUNCTION_DECL:
879     case LABEL_DECL:
880     case CONST_DECL:
881     case CASE_LABEL_EXPR:
882       /* Expressions that make no memory references.  */
883       return;
884 
885     default:
886       if (codeclass == tcc_unary)
887 	goto do_unary;
888       if (codeclass == tcc_binary || codeclass == tcc_comparison)
889 	goto do_binary;
890       if (codeclass == tcc_constant || codeclass == tcc_type)
891 	return;
892     }
893 
894   /* If we get here, something has gone wrong.  */
895 #ifdef ENABLE_CHECKING
896   fprintf (stderr, "unhandled expression in get_expr_operands():\n");
897   debug_tree (expr);
898   fputs ("\n", stderr);
899 #endif
900   gcc_unreachable ();
901 }
902 
903 
904 /* Parse STMT looking for operands.  When finished, the various
905    build_* operand vectors will have potential operands in them.  */
906 
907 static void
parse_ssa_operands(struct function * fn,gimple stmt)908 parse_ssa_operands (struct function *fn, gimple stmt)
909 {
910   enum gimple_code code = gimple_code (stmt);
911   size_t i, n, start = 0;
912 
913   switch (code)
914     {
915     case GIMPLE_ASM:
916       get_asm_stmt_operands (fn, stmt);
917       break;
918 
919     case GIMPLE_TRANSACTION:
920       /* The start of a transaction is a memory barrier.  */
921       add_virtual_operand (fn, stmt, opf_def | opf_use);
922       break;
923 
924     case GIMPLE_DEBUG:
925       if (gimple_debug_bind_p (stmt)
926 	  && gimple_debug_bind_has_value_p (stmt))
927 	get_expr_operands (fn, stmt, gimple_debug_bind_get_value_ptr (stmt),
928 			   opf_use | opf_no_vops);
929       break;
930 
931     case GIMPLE_RETURN:
932       append_vuse (gimple_vop (fn));
933       goto do_default;
934 
935     case GIMPLE_CALL:
936       /* Add call-clobbered operands, if needed.  */
937       maybe_add_call_vops (fn, stmt);
938       /* FALLTHRU */
939 
940     case GIMPLE_ASSIGN:
941       get_expr_operands (fn, stmt, gimple_op_ptr (stmt, 0), opf_def);
942       start = 1;
943       /* FALLTHRU */
944 
945     default:
946     do_default:
947       n = gimple_num_ops (stmt);
948       for (i = start; i < n; i++)
949 	get_expr_operands (fn, stmt, gimple_op_ptr (stmt, i), opf_use);
950       break;
951     }
952 }
953 
954 
955 /* Create an operands cache for STMT.  */
956 
957 static void
build_ssa_operands(struct function * fn,gimple stmt)958 build_ssa_operands (struct function *fn, gimple stmt)
959 {
960   /* Initially assume that the statement has no volatile operands.  */
961   gimple_set_has_volatile_ops (stmt, false);
962 
963   start_ssa_stmt_operands ();
964   parse_ssa_operands (fn, stmt);
965   finalize_ssa_stmt_operands (fn, stmt);
966 }
967 
968 /* Verifies SSA statement operands.  */
969 
970 DEBUG_FUNCTION bool
verify_ssa_operands(struct function * fn,gimple stmt)971 verify_ssa_operands (struct function *fn, gimple stmt)
972 {
973   use_operand_p use_p;
974   def_operand_p def_p;
975   ssa_op_iter iter;
976   unsigned i;
977   tree use, def;
978   bool volatile_p = gimple_has_volatile_ops (stmt);
979 
980   /* build_ssa_operands w/o finalizing them.  */
981   gimple_set_has_volatile_ops (stmt, false);
982   start_ssa_stmt_operands ();
983   parse_ssa_operands (fn, stmt);
984 
985   /* Now verify the built operands are the same as present in STMT.  */
986   def = gimple_vdef (stmt);
987   if (def
988       && TREE_CODE (def) == SSA_NAME)
989     def = SSA_NAME_VAR (def);
990   if (build_vdef != def)
991     {
992       error ("virtual definition of statement not up-to-date");
993       return true;
994     }
995   if (gimple_vdef (stmt)
996       && ((def_p = gimple_vdef_op (stmt)) == NULL_DEF_OPERAND_P
997 	  || DEF_FROM_PTR (def_p) != gimple_vdef (stmt)))
998     {
999       error ("virtual def operand missing for stmt");
1000       return true;
1001     }
1002 
1003   use = gimple_vuse (stmt);
1004   if (use
1005       && TREE_CODE (use) == SSA_NAME)
1006     use = SSA_NAME_VAR (use);
1007   if (build_vuse != use)
1008     {
1009       error ("virtual use of statement not up-to-date");
1010       return true;
1011     }
1012   if (gimple_vuse (stmt)
1013       && ((use_p = gimple_vuse_op (stmt)) == NULL_USE_OPERAND_P
1014 	  || USE_FROM_PTR (use_p) != gimple_vuse (stmt)))
1015     {
1016       error ("virtual use operand missing for stmt");
1017       return true;
1018     }
1019 
1020   FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
1021     {
1022       FOR_EACH_VEC_ELT (build_uses, i, use)
1023 	{
1024 	  if (use_p->use == (tree *)use)
1025 	    {
1026 	      build_uses[i] = NULL_TREE;
1027 	      break;
1028 	    }
1029 	}
1030       if (i == build_uses.length ())
1031 	{
1032 	  error ("excess use operand for stmt");
1033 	  debug_generic_expr (USE_FROM_PTR (use_p));
1034 	  return true;
1035 	}
1036     }
1037   FOR_EACH_VEC_ELT (build_uses, i, use)
1038     if (use != NULL_TREE)
1039       {
1040 	error ("use operand missing for stmt");
1041 	debug_generic_expr (*(tree *)use);
1042 	return true;
1043       }
1044 
1045   if (gimple_has_volatile_ops (stmt) != volatile_p)
1046     {
1047       error ("stmt volatile flag not up-to-date");
1048       return true;
1049     }
1050 
1051   cleanup_build_arrays ();
1052   return false;
1053 }
1054 
1055 
1056 /* Releases the operands of STMT back to their freelists, and clears
1057    the stmt operand lists.  */
1058 
1059 void
free_stmt_operands(struct function * fn,gimple stmt)1060 free_stmt_operands (struct function *fn, gimple stmt)
1061 {
1062   use_optype_p uses = gimple_use_ops (stmt), last_use;
1063 
1064   if (uses)
1065     {
1066       for (last_use = uses; last_use->next; last_use = last_use->next)
1067 	delink_imm_use (USE_OP_PTR (last_use));
1068       delink_imm_use (USE_OP_PTR (last_use));
1069       last_use->next = gimple_ssa_operands (fn)->free_uses;
1070       gimple_ssa_operands (fn)->free_uses = uses;
1071       gimple_set_use_ops (stmt, NULL);
1072     }
1073 
1074   if (gimple_has_mem_ops (stmt))
1075     {
1076       gimple_set_vuse (stmt, NULL_TREE);
1077       gimple_set_vdef (stmt, NULL_TREE);
1078     }
1079 }
1080 
1081 
1082 /* Get the operands of statement STMT.  */
1083 
1084 void
update_stmt_operands(struct function * fn,gimple stmt)1085 update_stmt_operands (struct function *fn, gimple stmt)
1086 {
1087   /* If update_stmt_operands is called before SSA is initialized, do
1088      nothing.  */
1089   if (!ssa_operands_active (fn))
1090     return;
1091 
1092   timevar_push (TV_TREE_OPS);
1093 
1094   gcc_assert (gimple_modified_p (stmt));
1095   build_ssa_operands (fn, stmt);
1096   gimple_set_modified (stmt, false);
1097 
1098   timevar_pop (TV_TREE_OPS);
1099 }
1100 
1101 
1102 /* Swap operands EXP0 and EXP1 in statement STMT.  No attempt is done
1103    to test the validity of the swap operation.  */
1104 
1105 void
swap_ssa_operands(gimple stmt,tree * exp0,tree * exp1)1106 swap_ssa_operands (gimple stmt, tree *exp0, tree *exp1)
1107 {
1108   tree op0, op1;
1109   op0 = *exp0;
1110   op1 = *exp1;
1111 
1112   if (op0 != op1)
1113     {
1114       /* Attempt to preserve the relative positions of these two operands in
1115 	 their * respective immediate use lists by adjusting their use pointer
1116 	 to point to the new operand position.  */
1117       use_optype_p use0, use1, ptr;
1118       use0 = use1 = NULL;
1119 
1120       /* Find the 2 operands in the cache, if they are there.  */
1121       for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next)
1122 	if (USE_OP_PTR (ptr)->use == exp0)
1123 	  {
1124 	    use0 = ptr;
1125 	    break;
1126 	  }
1127 
1128       for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next)
1129 	if (USE_OP_PTR (ptr)->use == exp1)
1130 	  {
1131 	    use1 = ptr;
1132 	    break;
1133 	  }
1134 
1135       /* And adjust their location to point to the new position of the
1136          operand.  */
1137       if (use0)
1138 	USE_OP_PTR (use0)->use = exp1;
1139       if (use1)
1140 	USE_OP_PTR (use1)->use = exp0;
1141 
1142       /* Now swap the data.  */
1143       *exp0 = op1;
1144       *exp1 = op0;
1145     }
1146 }
1147 
1148 
1149 /* Scan the immediate_use list for VAR making sure its linked properly.
1150    Return TRUE if there is a problem and emit an error message to F.  */
1151 
1152 DEBUG_FUNCTION bool
verify_imm_links(FILE * f,tree var)1153 verify_imm_links (FILE *f, tree var)
1154 {
1155   use_operand_p ptr, prev, list;
1156   int count;
1157 
1158   gcc_assert (TREE_CODE (var) == SSA_NAME);
1159 
1160   list = &(SSA_NAME_IMM_USE_NODE (var));
1161   gcc_assert (list->use == NULL);
1162 
1163   if (list->prev == NULL)
1164     {
1165       gcc_assert (list->next == NULL);
1166       return false;
1167     }
1168 
1169   prev = list;
1170   count = 0;
1171   for (ptr = list->next; ptr != list; )
1172     {
1173       if (prev != ptr->prev)
1174 	goto error;
1175 
1176       if (ptr->use == NULL)
1177 	goto error; /* 2 roots, or SAFE guard node.  */
1178       else if (*(ptr->use) != var)
1179 	goto error;
1180 
1181       prev = ptr;
1182       ptr = ptr->next;
1183 
1184       /* Avoid infinite loops.  50,000,000 uses probably indicates a
1185 	 problem.  */
1186       if (count++ > 50000000)
1187 	goto error;
1188     }
1189 
1190   /* Verify list in the other direction.  */
1191   prev = list;
1192   for (ptr = list->prev; ptr != list; )
1193     {
1194       if (prev != ptr->next)
1195 	goto error;
1196       prev = ptr;
1197       ptr = ptr->prev;
1198       if (count-- < 0)
1199 	goto error;
1200     }
1201 
1202   if (count != 0)
1203     goto error;
1204 
1205   return false;
1206 
1207  error:
1208   if (ptr->loc.stmt && gimple_modified_p (ptr->loc.stmt))
1209     {
1210       fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->loc.stmt);
1211       print_gimple_stmt (f, ptr->loc.stmt, 0, TDF_SLIM);
1212     }
1213   fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr,
1214 	   (void *)ptr->use);
1215   print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM);
1216   fprintf (f, "\n");
1217   return true;
1218 }
1219 
1220 
1221 /* Dump all the immediate uses to FILE.  */
1222 
1223 void
dump_immediate_uses_for(FILE * file,tree var)1224 dump_immediate_uses_for (FILE *file, tree var)
1225 {
1226   imm_use_iterator iter;
1227   use_operand_p use_p;
1228 
1229   gcc_assert (var && TREE_CODE (var) == SSA_NAME);
1230 
1231   print_generic_expr (file, var, TDF_SLIM);
1232   fprintf (file, " : -->");
1233   if (has_zero_uses (var))
1234     fprintf (file, " no uses.\n");
1235   else
1236     if (has_single_use (var))
1237       fprintf (file, " single use.\n");
1238     else
1239       fprintf (file, "%d uses.\n", num_imm_uses (var));
1240 
1241   FOR_EACH_IMM_USE_FAST (use_p, iter, var)
1242     {
1243       if (use_p->loc.stmt == NULL && use_p->use == NULL)
1244         fprintf (file, "***end of stmt iterator marker***\n");
1245       else
1246 	if (!is_gimple_reg (USE_FROM_PTR (use_p)))
1247 	  print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_VOPS|TDF_MEMSYMS);
1248 	else
1249 	  print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_SLIM);
1250     }
1251   fprintf (file, "\n");
1252 }
1253 
1254 
1255 /* Dump all the immediate uses to FILE.  */
1256 
1257 void
dump_immediate_uses(FILE * file)1258 dump_immediate_uses (FILE *file)
1259 {
1260   tree var;
1261   unsigned int x;
1262 
1263   fprintf (file, "Immediate_uses: \n\n");
1264   for (x = 1; x < num_ssa_names; x++)
1265     {
1266       var = ssa_name (x);
1267       if (!var)
1268         continue;
1269       dump_immediate_uses_for (file, var);
1270     }
1271 }
1272 
1273 
1274 /* Dump def-use edges on stderr.  */
1275 
1276 DEBUG_FUNCTION void
debug_immediate_uses(void)1277 debug_immediate_uses (void)
1278 {
1279   dump_immediate_uses (stderr);
1280 }
1281 
1282 
1283 /* Dump def-use edges on stderr.  */
1284 
1285 DEBUG_FUNCTION void
debug_immediate_uses_for(tree var)1286 debug_immediate_uses_for (tree var)
1287 {
1288   dump_immediate_uses_for (stderr, var);
1289 }
1290 
1291 
1292 /* Unlink STMTs virtual definition from the IL by propagating its use.  */
1293 
1294 void
unlink_stmt_vdef(gimple stmt)1295 unlink_stmt_vdef (gimple stmt)
1296 {
1297   use_operand_p use_p;
1298   imm_use_iterator iter;
1299   gimple use_stmt;
1300   tree vdef = gimple_vdef (stmt);
1301   tree vuse = gimple_vuse (stmt);
1302 
1303   if (!vdef
1304       || TREE_CODE (vdef) != SSA_NAME)
1305     return;
1306 
1307   FOR_EACH_IMM_USE_STMT (use_stmt, iter, vdef)
1308     {
1309       FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1310 	SET_USE (use_p, vuse);
1311     }
1312 
1313   if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vdef))
1314     SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 1;
1315 }
1316 
1317 
1318 /* Return true if the var whose chain of uses starts at PTR has no
1319    nondebug uses.  */
1320 bool
has_zero_uses_1(const ssa_use_operand_t * head)1321 has_zero_uses_1 (const ssa_use_operand_t *head)
1322 {
1323   const ssa_use_operand_t *ptr;
1324 
1325   for (ptr = head->next; ptr != head; ptr = ptr->next)
1326     if (!is_gimple_debug (USE_STMT (ptr)))
1327       return false;
1328 
1329   return true;
1330 }
1331 
1332 
1333 /* Return true if the var whose chain of uses starts at PTR has a
1334    single nondebug use.  Set USE_P and STMT to that single nondebug
1335    use, if so, or to NULL otherwise.  */
1336 bool
single_imm_use_1(const ssa_use_operand_t * head,use_operand_p * use_p,gimple * stmt)1337 single_imm_use_1 (const ssa_use_operand_t *head,
1338 		  use_operand_p *use_p, gimple *stmt)
1339 {
1340   ssa_use_operand_t *ptr, *single_use = 0;
1341 
1342   for (ptr = head->next; ptr != head; ptr = ptr->next)
1343     if (!is_gimple_debug (USE_STMT (ptr)))
1344       {
1345 	if (single_use)
1346 	  {
1347 	    single_use = NULL;
1348 	    break;
1349 	  }
1350 	single_use = ptr;
1351       }
1352 
1353   if (use_p)
1354     *use_p = single_use;
1355 
1356   if (stmt)
1357     *stmt = single_use ? single_use->loc.stmt : NULL;
1358 
1359   return single_use;
1360 }
1361