1 /* SSA operands management for trees.
2    Copyright (C) 2003-2020 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10 
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14 GNU General Public License for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "tree.h"
25 #include "gimple.h"
26 #include "timevar.h"
27 #include "ssa.h"
28 #include "gimple-pretty-print.h"
29 #include "diagnostic-core.h"
30 #include "stmt.h"
31 #include "print-tree.h"
32 #include "dumpfile.h"
33 
34 
35 /* This file contains the code required to manage the operands cache of the
36    SSA optimizer.  For every stmt, we maintain an operand cache in the stmt
37    annotation.  This cache contains operands that will be of interest to
38    optimizers and other passes wishing to manipulate the IL.
39 
40    The operand type are broken up into REAL and VIRTUAL operands.  The real
41    operands are represented as pointers into the stmt's operand tree.  Thus
42    any manipulation of the real operands will be reflected in the actual tree.
43    Virtual operands are represented solely in the cache, although the base
44    variable for the SSA_NAME may, or may not occur in the stmt's tree.
45    Manipulation of the virtual operands will not be reflected in the stmt tree.
46 
47    The routines in this file are concerned with creating this operand cache
48    from a stmt tree.
49 
50    The operand tree is the parsed by the various get_* routines which look
51    through the stmt tree for the occurrence of operands which may be of
52    interest, and calls are made to the append_* routines whenever one is
53    found.  There are 4 of these routines, each representing one of the
54    4 types of operands. Defs, Uses, Virtual Uses, and Virtual May Defs.
55 
56    The append_* routines check for duplication, and simply keep a list of
57    unique objects for each operand type in the build_* extendable vectors.
58 
59    Once the stmt tree is completely parsed, the finalize_ssa_operands()
60    routine is called, which proceeds to perform the finalization routine
61    on each of the 4 operand vectors which have been built up.
62 
63    If the stmt had a previous operand cache, the finalization routines
64    attempt to match up the new operands with the old ones.  If it's a perfect
65    match, the old vector is simply reused.  If it isn't a perfect match, then
66    a new vector is created and the new operands are placed there.  For
67    virtual operands, if the previous cache had SSA_NAME version of a
68    variable, and that same variable occurs in the same operands cache, then
69    the new cache vector will also get the same SSA_NAME.
70 
71    i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new
72    operand vector for VUSE, then the new vector will also be modified
73    such that it contains 'a_5' rather than 'a'.  */
74 
75 
76 /* Flags to describe operand properties in helpers.  */
77 
78 /* By default, operands are loaded.  */
79 #define opf_use		0
80 
81 /* Operand is the target of an assignment expression or a
82    call-clobbered variable.  */
83 #define opf_def 	(1 << 0)
84 
85 /* No virtual operands should be created in the expression.  This is used
86    when traversing ADDR_EXPR nodes which have different semantics than
87    other expressions.  Inside an ADDR_EXPR node, the only operands that we
88    need to consider are indices into arrays.  For instance, &a.b[i] should
89    generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
90    VUSE for 'b'.  */
91 #define opf_no_vops 	(1 << 1)
92 
93 /* Operand is in a place where address-taken does not imply addressable.  */
94 #define opf_non_addressable (1 << 3)
95 
96 /* Operand is in a place where opf_non_addressable does not apply.  */
97 #define opf_not_non_addressable (1 << 4)
98 
99 /* Operand is having its address taken.  */
100 #define opf_address_taken (1 << 5)
101 
102 /* Array for building all the use operands.  */
103 static vec<tree *> build_uses;
104 
105 /* The built VDEF operand.  */
106 static tree build_vdef;
107 
108 /* The built VUSE operand.  */
109 static tree build_vuse;
110 
111 /* Bitmap obstack for our datastructures that needs to survive across
112    compilations of multiple functions.  */
113 static bitmap_obstack operands_bitmap_obstack;
114 
115 static void get_expr_operands (struct function *, gimple *, tree *, int);
116 
117 /* Number of functions with initialized ssa_operands.  */
118 static int n_initialized = 0;
119 
120 /* Accessor to tree-ssa-operands.c caches.  */
121 static inline struct ssa_operands *
gimple_ssa_operands(const struct function * fun)122 gimple_ssa_operands (const struct function *fun)
123 {
124   return &fun->gimple_df->ssa_operands;
125 }
126 
127 
128 /*  Return true if the SSA operands cache is active.  */
129 
130 bool
ssa_operands_active(struct function * fun)131 ssa_operands_active (struct function *fun)
132 {
133   if (fun == NULL)
134     return false;
135 
136   return fun->gimple_df && gimple_ssa_operands (fun)->ops_active;
137 }
138 
139 
140 /* Create the VOP variable, an artificial global variable to act as a
141    representative of all of the virtual operands FUD chain.  */
142 
143 static void
create_vop_var(struct function * fn)144 create_vop_var (struct function *fn)
145 {
146   tree global_var;
147 
148   gcc_assert (fn->gimple_df->vop == NULL_TREE);
149 
150   global_var = build_decl (BUILTINS_LOCATION, VAR_DECL,
151 			   get_identifier (".MEM"),
152 			   void_type_node);
153   DECL_ARTIFICIAL (global_var) = 1;
154   DECL_IGNORED_P (global_var) = 1;
155   TREE_READONLY (global_var) = 0;
156   DECL_EXTERNAL (global_var) = 1;
157   TREE_STATIC (global_var) = 1;
158   TREE_USED (global_var) = 1;
159   DECL_CONTEXT (global_var) = NULL_TREE;
160   TREE_THIS_VOLATILE (global_var) = 0;
161   TREE_ADDRESSABLE (global_var) = 0;
162   VAR_DECL_IS_VIRTUAL_OPERAND (global_var) = 1;
163 
164   fn->gimple_df->vop = global_var;
165 }
166 
167 /* These are the sizes of the operand memory buffer in bytes which gets
168    allocated each time more operands space is required.  The final value is
169    the amount that is allocated every time after that.
170    In 1k we can fit 25 use operands (or 63 def operands) on a host with
171    8 byte pointers, that would be 10 statements each with 1 def and 2
172    uses.  */
173 
174 #define OP_SIZE_INIT	0
175 #define OP_SIZE_1	(1024 - sizeof (void *))
176 #define OP_SIZE_2	(1024 * 4 - sizeof (void *))
177 #define OP_SIZE_3	(1024 * 16 - sizeof (void *))
178 
179 /* Initialize the operand cache routines.  */
180 
181 void
init_ssa_operands(struct function * fn)182 init_ssa_operands (struct function *fn)
183 {
184   if (!n_initialized++)
185     {
186       build_uses.create (10);
187       build_vuse = NULL_TREE;
188       build_vdef = NULL_TREE;
189       bitmap_obstack_initialize (&operands_bitmap_obstack);
190     }
191 
192   gcc_assert (gimple_ssa_operands (fn)->operand_memory == NULL);
193   gimple_ssa_operands (fn)->operand_memory_index
194      = gimple_ssa_operands (fn)->ssa_operand_mem_size;
195   gimple_ssa_operands (fn)->ops_active = true;
196   gimple_ssa_operands (fn)->ssa_operand_mem_size = OP_SIZE_INIT;
197   create_vop_var (fn);
198 }
199 
200 
201 /* Dispose of anything required by the operand routines.  */
202 
203 void
fini_ssa_operands(struct function * fn)204 fini_ssa_operands (struct function *fn)
205 {
206   struct ssa_operand_memory_d *ptr;
207 
208   if (!--n_initialized)
209     {
210       build_uses.release ();
211       build_vdef = NULL_TREE;
212       build_vuse = NULL_TREE;
213     }
214 
215   gimple_ssa_operands (fn)->free_uses = NULL;
216 
217   while ((ptr = gimple_ssa_operands (fn)->operand_memory) != NULL)
218     {
219       gimple_ssa_operands (fn)->operand_memory
220 	= gimple_ssa_operands (fn)->operand_memory->next;
221       ggc_free (ptr);
222     }
223 
224   gimple_ssa_operands (fn)->ops_active = false;
225 
226   if (!n_initialized)
227     bitmap_obstack_release (&operands_bitmap_obstack);
228 
229   fn->gimple_df->vop = NULL_TREE;
230 }
231 
232 
233 /* Return memory for an operand of size SIZE.  */
234 
235 static inline void *
ssa_operand_alloc(struct function * fn,unsigned size)236 ssa_operand_alloc (struct function *fn, unsigned size)
237 {
238   char *ptr;
239 
240   gcc_assert (size == sizeof (struct use_optype_d));
241 
242   if (gimple_ssa_operands (fn)->operand_memory_index + size
243       >= gimple_ssa_operands (fn)->ssa_operand_mem_size)
244     {
245       struct ssa_operand_memory_d *ptr;
246 
247       switch (gimple_ssa_operands (fn)->ssa_operand_mem_size)
248 	{
249 	case OP_SIZE_INIT:
250 	  gimple_ssa_operands (fn)->ssa_operand_mem_size = OP_SIZE_1;
251 	  break;
252 	case OP_SIZE_1:
253 	  gimple_ssa_operands (fn)->ssa_operand_mem_size = OP_SIZE_2;
254 	  break;
255 	case OP_SIZE_2:
256 	case OP_SIZE_3:
257 	  gimple_ssa_operands (fn)->ssa_operand_mem_size = OP_SIZE_3;
258 	  break;
259 	default:
260 	  gcc_unreachable ();
261 	}
262 
263 
264       ptr = (ssa_operand_memory_d *) ggc_internal_alloc
265 	(sizeof (void *) + gimple_ssa_operands (fn)->ssa_operand_mem_size);
266 
267       ptr->next = gimple_ssa_operands (fn)->operand_memory;
268       gimple_ssa_operands (fn)->operand_memory = ptr;
269       gimple_ssa_operands (fn)->operand_memory_index = 0;
270     }
271 
272   ptr = &(gimple_ssa_operands (fn)->operand_memory
273 	  ->mem[gimple_ssa_operands (fn)->operand_memory_index]);
274   gimple_ssa_operands (fn)->operand_memory_index += size;
275   return ptr;
276 }
277 
278 
279 /* Allocate a USE operand.  */
280 
281 static inline struct use_optype_d *
alloc_use(struct function * fn)282 alloc_use (struct function *fn)
283 {
284   struct use_optype_d *ret;
285   if (gimple_ssa_operands (fn)->free_uses)
286     {
287       ret = gimple_ssa_operands (fn)->free_uses;
288       gimple_ssa_operands (fn)->free_uses
289 	= gimple_ssa_operands (fn)->free_uses->next;
290     }
291   else
292     ret = (struct use_optype_d *)
293           ssa_operand_alloc (fn, sizeof (struct use_optype_d));
294   return ret;
295 }
296 
297 
298 /* Adds OP to the list of uses of statement STMT after LAST.  */
299 
300 static inline use_optype_p
add_use_op(struct function * fn,gimple * stmt,tree * op,use_optype_p last)301 add_use_op (struct function *fn, gimple *stmt, tree *op, use_optype_p last)
302 {
303   use_optype_p new_use;
304 
305   new_use = alloc_use (fn);
306   USE_OP_PTR (new_use)->use = op;
307   link_imm_use_stmt (USE_OP_PTR (new_use), *op, stmt);
308   last->next = new_use;
309   new_use->next = NULL;
310   return new_use;
311 }
312 
313 
314 
315 /* Takes elements from build_defs and turns them into def operands of STMT.
316    TODO -- Make build_defs vec of tree *.  */
317 
318 static inline void
finalize_ssa_defs(struct function * fn,gimple * stmt)319 finalize_ssa_defs (struct function *fn, gimple *stmt)
320 {
321   /* Pre-pend the vdef we may have built.  */
322   if (build_vdef != NULL_TREE)
323     {
324       tree oldvdef = gimple_vdef (stmt);
325       if (oldvdef
326 	  && TREE_CODE (oldvdef) == SSA_NAME)
327 	oldvdef = SSA_NAME_VAR (oldvdef);
328       if (oldvdef != build_vdef)
329 	gimple_set_vdef (stmt, build_vdef);
330     }
331 
332   /* Clear and unlink a no longer necessary VDEF.  */
333   if (build_vdef == NULL_TREE
334       && gimple_vdef (stmt) != NULL_TREE)
335     {
336       if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
337 	{
338 	  unlink_stmt_vdef (stmt);
339 	  release_ssa_name_fn (fn, gimple_vdef (stmt));
340 	}
341       gimple_set_vdef (stmt, NULL_TREE);
342     }
343 
344   /* If we have a non-SSA_NAME VDEF, mark it for renaming.  */
345   if (gimple_vdef (stmt)
346       && TREE_CODE (gimple_vdef (stmt)) != SSA_NAME)
347     {
348       fn->gimple_df->rename_vops = 1;
349       fn->gimple_df->ssa_renaming_needed = 1;
350     }
351 }
352 
353 
354 /* Takes elements from build_uses and turns them into use operands of STMT.  */
355 
356 static inline void
finalize_ssa_uses(struct function * fn,gimple * stmt)357 finalize_ssa_uses (struct function *fn, gimple *stmt)
358 {
359   unsigned new_i;
360   struct use_optype_d new_list;
361   use_optype_p old_ops, ptr, last;
362 
363   /* Pre-pend the VUSE we may have built.  */
364   if (build_vuse != NULL_TREE)
365     {
366       tree oldvuse = gimple_vuse (stmt);
367       if (oldvuse
368 	  && TREE_CODE (oldvuse) == SSA_NAME)
369 	oldvuse = SSA_NAME_VAR (oldvuse);
370       if (oldvuse != (build_vuse != NULL_TREE
371 		      ? build_vuse : build_vdef))
372 	gimple_set_vuse (stmt, NULL_TREE);
373       build_uses.safe_insert (0, gimple_vuse_ptr (stmt));
374     }
375 
376   new_list.next = NULL;
377   last = &new_list;
378 
379   old_ops = gimple_use_ops (stmt);
380 
381   /* Clear a no longer necessary VUSE.  */
382   if (build_vuse == NULL_TREE
383       && gimple_vuse (stmt) != NULL_TREE)
384     gimple_set_vuse (stmt, NULL_TREE);
385 
386   /* If there is anything in the old list, free it.  */
387   if (old_ops)
388     {
389       for (ptr = old_ops; ptr->next; ptr = ptr->next)
390 	delink_imm_use (USE_OP_PTR (ptr));
391       delink_imm_use (USE_OP_PTR (ptr));
392       ptr->next = gimple_ssa_operands (fn)->free_uses;
393       gimple_ssa_operands (fn)->free_uses = old_ops;
394     }
395 
396   /* If we added a VUSE, make sure to set the operand if it is not already
397      present and mark it for renaming.  */
398   if (build_vuse != NULL_TREE
399       && gimple_vuse (stmt) == NULL_TREE)
400     {
401       gimple_set_vuse (stmt, gimple_vop (fn));
402       fn->gimple_df->rename_vops = 1;
403       fn->gimple_df->ssa_renaming_needed = 1;
404     }
405 
406   /* Now create nodes for all the new nodes.  */
407   for (new_i = 0; new_i < build_uses.length (); new_i++)
408     {
409       tree *op = build_uses[new_i];
410       last = add_use_op (fn, stmt, op, last);
411     }
412 
413   /* Now set the stmt's operands.  */
414   gimple_set_use_ops (stmt, new_list.next);
415 }
416 
417 
418 /* Clear the in_list bits and empty the build array for VDEFs and
419    VUSEs.  */
420 
421 static inline void
cleanup_build_arrays(void)422 cleanup_build_arrays (void)
423 {
424   build_vdef = NULL_TREE;
425   build_vuse = NULL_TREE;
426   build_uses.truncate (0);
427 }
428 
429 
430 /* Finalize all the build vectors, fill the new ones into INFO.  */
431 
432 static inline void
finalize_ssa_stmt_operands(struct function * fn,gimple * stmt)433 finalize_ssa_stmt_operands (struct function *fn, gimple *stmt)
434 {
435   finalize_ssa_defs (fn, stmt);
436   finalize_ssa_uses (fn, stmt);
437   cleanup_build_arrays ();
438 }
439 
440 
441 /* Start the process of building up operands vectors in INFO.  */
442 
443 static inline void
start_ssa_stmt_operands(void)444 start_ssa_stmt_operands (void)
445 {
446   gcc_assert (build_uses.length () == 0);
447   gcc_assert (build_vuse == NULL_TREE);
448   gcc_assert (build_vdef == NULL_TREE);
449 }
450 
451 
452 /* Add USE_P to the list of pointers to operands.  */
453 
454 static inline void
append_use(tree * use_p)455 append_use (tree *use_p)
456 {
457   build_uses.safe_push (use_p);
458 }
459 
460 
461 /* Add VAR to the set of variables that require a VDEF operator.  */
462 
463 static inline void
append_vdef(tree var)464 append_vdef (tree var)
465 {
466   gcc_assert ((build_vdef == NULL_TREE
467 	       || build_vdef == var)
468 	      && (build_vuse == NULL_TREE
469 		  || build_vuse == var));
470 
471   build_vdef = var;
472   build_vuse = var;
473 }
474 
475 
476 /* Add VAR to the set of variables that require a VUSE operator.  */
477 
478 static inline void
append_vuse(tree var)479 append_vuse (tree var)
480 {
481   gcc_assert (build_vuse == NULL_TREE
482 	      || build_vuse == var);
483 
484   build_vuse = var;
485 }
486 
487 /* Add virtual operands for STMT.  FLAGS is as in get_expr_operands.  */
488 
489 static void
add_virtual_operand(struct function * fn,gimple * stmt ATTRIBUTE_UNUSED,int flags)490 add_virtual_operand (struct function *fn,
491 		     gimple *stmt ATTRIBUTE_UNUSED, int flags)
492 {
493   /* Add virtual operands to the stmt, unless the caller has specifically
494      requested not to do that (used when adding operands inside an
495      ADDR_EXPR expression).  */
496   if (flags & opf_no_vops)
497     return;
498 
499   gcc_assert (!is_gimple_debug (stmt));
500 
501   if (flags & opf_def)
502     append_vdef (gimple_vop (fn));
503   else
504     append_vuse (gimple_vop (fn));
505 }
506 
507 
508 /* Add *VAR_P to the appropriate operand array for statement STMT.
509    FLAGS is as in get_expr_operands.  If *VAR_P is a GIMPLE register,
510    it will be added to the statement's real operands, otherwise it is
511    added to virtual operands.  */
512 
513 static void
add_stmt_operand(struct function * fn,tree * var_p,gimple * stmt,int flags)514 add_stmt_operand (struct function *fn, tree *var_p, gimple *stmt, int flags)
515 {
516   tree var = *var_p;
517 
518   gcc_assert (SSA_VAR_P (*var_p)
519 	      || TREE_CODE (*var_p) == STRING_CST
520 	      || TREE_CODE (*var_p) == CONST_DECL);
521 
522   if (is_gimple_reg (var))
523     {
524       /* The variable is a GIMPLE register.  Add it to real operands.  */
525       if (flags & opf_def)
526 	;
527       else
528 	append_use (var_p);
529       if (DECL_P (*var_p))
530 	fn->gimple_df->ssa_renaming_needed = 1;
531     }
532   else
533     {
534       /* Mark statements with volatile operands.  */
535       if (!(flags & opf_no_vops)
536 	  && TREE_THIS_VOLATILE (var))
537 	gimple_set_has_volatile_ops (stmt, true);
538 
539       /* The variable is a memory access.  Add virtual operands.  */
540       add_virtual_operand (fn, stmt, flags);
541     }
542 }
543 
544 /* Mark the base address of REF as having its address taken.
545    REF may be a single variable whose address has been taken or any
546    other valid GIMPLE memory reference (structure reference, array,
547    etc).  */
548 
549 static void
mark_address_taken(tree ref)550 mark_address_taken (tree ref)
551 {
552   tree var;
553 
554   /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
555      as the only thing we take the address of.  If VAR is a structure,
556      taking the address of a field means that the whole structure may
557      be referenced using pointer arithmetic.  See PR 21407 and the
558      ensuing mailing list discussion.  */
559   var = get_base_address (ref);
560   if (var)
561     {
562       if (DECL_P (var))
563 	TREE_ADDRESSABLE (var) = 1;
564       else if (TREE_CODE (var) == MEM_REF
565 	       && TREE_CODE (TREE_OPERAND (var, 0)) == ADDR_EXPR
566 	       && DECL_P (TREE_OPERAND (TREE_OPERAND (var, 0), 0)))
567 	TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (var, 0), 0)) = 1;
568     }
569 }
570 
571 
572 /* A subroutine of get_expr_operands to handle MEM_REF.
573 
574    STMT is the statement being processed, EXPR is the MEM_REF
575       that got us here.
576 
577    FLAGS is as in get_expr_operands.  */
578 
579 static void
get_mem_ref_operands(struct function * fn,gimple * stmt,tree expr,int flags)580 get_mem_ref_operands (struct function *fn,
581 		      gimple *stmt, tree expr, int flags)
582 {
583   tree *pptr = &TREE_OPERAND (expr, 0);
584 
585   if (!(flags & opf_no_vops)
586       && TREE_THIS_VOLATILE (expr))
587     gimple_set_has_volatile_ops (stmt, true);
588 
589   /* Add the VOP.  */
590   add_virtual_operand (fn, stmt, flags);
591 
592   /* If requested, add a USE operand for the base pointer.  */
593   get_expr_operands (fn, stmt, pptr,
594 		     opf_non_addressable | opf_use
595 		     | (flags & (opf_no_vops|opf_not_non_addressable)));
596 }
597 
598 
599 /* A subroutine of get_expr_operands to handle TARGET_MEM_REF.  */
600 
601 static void
get_tmr_operands(struct function * fn,gimple * stmt,tree expr,int flags)602 get_tmr_operands (struct function *fn, gimple *stmt, tree expr, int flags)
603 {
604   if (!(flags & opf_no_vops)
605       && TREE_THIS_VOLATILE (expr))
606     gimple_set_has_volatile_ops (stmt, true);
607 
608   /* First record the real operands.  */
609   get_expr_operands (fn, stmt,
610 		     &TMR_BASE (expr), opf_use | (flags & opf_no_vops));
611   get_expr_operands (fn, stmt,
612 		     &TMR_INDEX (expr), opf_use | (flags & opf_no_vops));
613   get_expr_operands (fn, stmt,
614 		     &TMR_INDEX2 (expr), opf_use | (flags & opf_no_vops));
615 
616   add_virtual_operand (fn, stmt, flags);
617 }
618 
619 
620 /* If STMT is a call that may clobber globals and other symbols that
621    escape, add them to the VDEF/VUSE lists for it.  */
622 
623 static void
maybe_add_call_vops(struct function * fn,gcall * stmt)624 maybe_add_call_vops (struct function *fn, gcall *stmt)
625 {
626   int call_flags = gimple_call_flags (stmt);
627 
628   /* If aliases have been computed already, add VDEF or VUSE
629      operands for all the symbols that have been found to be
630      call-clobbered.  */
631   if (!(call_flags & ECF_NOVOPS))
632     {
633       /* A 'pure' or a 'const' function never call-clobbers anything.  */
634       if (!(call_flags & (ECF_PURE | ECF_CONST)))
635 	add_virtual_operand (fn, stmt, opf_def);
636       else if (!(call_flags & ECF_CONST))
637 	add_virtual_operand (fn, stmt, opf_use);
638     }
639 }
640 
641 
642 /* Scan operands in the ASM_EXPR stmt referred to in INFO.  */
643 
644 static void
get_asm_stmt_operands(struct function * fn,gasm * stmt)645 get_asm_stmt_operands (struct function *fn, gasm *stmt)
646 {
647   size_t i, noutputs;
648   const char **oconstraints;
649   const char *constraint;
650   bool allows_mem, allows_reg, is_inout;
651 
652   noutputs = gimple_asm_noutputs (stmt);
653   oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
654 
655   /* Gather all output operands.  */
656   for (i = 0; i < gimple_asm_noutputs (stmt); i++)
657     {
658       tree link = gimple_asm_output_op (stmt, i);
659       constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
660       oconstraints[i] = constraint;
661       parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
662 	                       &allows_reg, &is_inout);
663 
664       /* This should have been split in gimplify_asm_expr.  */
665       gcc_assert (!allows_reg || !is_inout);
666 
667       /* Memory operands are addressable.  Note that STMT needs the
668 	 address of this operand.  */
669       if (!allows_reg && allows_mem)
670 	mark_address_taken (TREE_VALUE (link));
671 
672       get_expr_operands (fn, stmt,
673 			 &TREE_VALUE (link), opf_def | opf_not_non_addressable);
674     }
675 
676   /* Gather all input operands.  */
677   for (i = 0; i < gimple_asm_ninputs (stmt); i++)
678     {
679       tree link = gimple_asm_input_op (stmt, i);
680       constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
681       parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints,
682 	                      &allows_mem, &allows_reg);
683 
684       /* Memory operands are addressable.  Note that STMT needs the
685 	 address of this operand.  */
686       if (!allows_reg && allows_mem)
687 	mark_address_taken (TREE_VALUE (link));
688 
689       get_expr_operands (fn, stmt, &TREE_VALUE (link), opf_not_non_addressable);
690     }
691 
692   /* Clobber all memory and addressable symbols for asm ("" : : : "memory");  */
693   if (gimple_asm_clobbers_memory_p (stmt))
694     add_virtual_operand (fn, stmt, opf_def);
695 }
696 
697 
698 /* Recursively scan the expression pointed to by EXPR_P in statement
699    STMT.  FLAGS is one of the OPF_* constants modifying how to
700    interpret the operands found.  */
701 
702 static void
get_expr_operands(struct function * fn,gimple * stmt,tree * expr_p,int flags)703 get_expr_operands (struct function *fn, gimple *stmt, tree *expr_p, int flags)
704 {
705   enum tree_code code;
706   enum tree_code_class codeclass;
707   tree expr = *expr_p;
708   int uflags = opf_use;
709 
710   if (expr == NULL)
711     return;
712 
713   if (is_gimple_debug (stmt))
714     uflags |= (flags & opf_no_vops);
715 
716   code = TREE_CODE (expr);
717   codeclass = TREE_CODE_CLASS (code);
718 
719   switch (code)
720     {
721     case ADDR_EXPR:
722       /* Taking the address of a variable does not represent a
723 	 reference to it, but the fact that the statement takes its
724 	 address will be of interest to some passes (e.g. alias
725 	 resolution).  */
726       if ((!(flags & opf_non_addressable)
727 	   || (flags & opf_not_non_addressable))
728 	  && !is_gimple_debug (stmt))
729 	mark_address_taken (TREE_OPERAND (expr, 0));
730 
731       /* Otherwise, there may be variables referenced inside but there
732 	 should be no VUSEs created, since the referenced objects are
733 	 not really accessed.  The only operands that we should find
734 	 here are ARRAY_REF indices which will always be real operands
735 	 (GIMPLE does not allow non-registers as array indices).  */
736       flags |= opf_no_vops;
737       get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0),
738 			 flags | opf_not_non_addressable | opf_address_taken);
739       return;
740 
741     case SSA_NAME:
742     case VAR_DECL:
743     case PARM_DECL:
744     case RESULT_DECL:
745     case STRING_CST:
746     case CONST_DECL:
747       if (!(flags & opf_address_taken))
748 	add_stmt_operand (fn, expr_p, stmt, flags);
749       return;
750 
751     case DEBUG_EXPR_DECL:
752       gcc_assert (gimple_debug_bind_p (stmt));
753       return;
754 
755     case MEM_REF:
756       get_mem_ref_operands (fn, stmt, expr, flags);
757       return;
758 
759     case TARGET_MEM_REF:
760       get_tmr_operands (fn, stmt, expr, flags);
761       return;
762 
763     case ARRAY_REF:
764     case ARRAY_RANGE_REF:
765     case COMPONENT_REF:
766     case REALPART_EXPR:
767     case IMAGPART_EXPR:
768       {
769 	if (!(flags & opf_no_vops)
770 	    && TREE_THIS_VOLATILE (expr))
771 	  gimple_set_has_volatile_ops (stmt, true);
772 
773 	get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), flags);
774 
775 	if (code == COMPONENT_REF)
776 	  {
777 	    if (!(flags & opf_no_vops)
778 		&& TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1)))
779 	      gimple_set_has_volatile_ops (stmt, true);
780 	    get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 2), uflags);
781 	  }
782 	else if (code == ARRAY_REF || code == ARRAY_RANGE_REF)
783 	  {
784             get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 1), uflags);
785             get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 2), uflags);
786             get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 3), uflags);
787 	  }
788 
789 	return;
790       }
791 
792     case WITH_SIZE_EXPR:
793       /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
794 	 and an rvalue reference to its second argument.  */
795       get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 1), uflags);
796       get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), flags);
797       return;
798 
799     case COND_EXPR:
800     case VEC_COND_EXPR:
801     case VEC_PERM_EXPR:
802       get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), uflags);
803       get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 1), uflags);
804       get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 2), uflags);
805       return;
806 
807     case CONSTRUCTOR:
808       {
809 	/* General aggregate CONSTRUCTORs have been decomposed, but they
810 	   are still in use as the COMPLEX_EXPR equivalent for vectors.  */
811 	constructor_elt *ce;
812 	unsigned HOST_WIDE_INT idx;
813 
814 	/* A volatile constructor is actually TREE_CLOBBER_P, transfer
815 	   the volatility to the statement, don't use TREE_CLOBBER_P for
816 	   mirroring the other uses of THIS_VOLATILE in this file.  */
817 	if (!(flags & opf_no_vops)
818 	    && TREE_THIS_VOLATILE (expr))
819 	  gimple_set_has_volatile_ops (stmt, true);
820 
821 	for (idx = 0;
822 	     vec_safe_iterate (CONSTRUCTOR_ELTS (expr), idx, &ce);
823 	     idx++)
824 	  get_expr_operands (fn, stmt, &ce->value, uflags);
825 
826 	return;
827       }
828 
829     case BIT_FIELD_REF:
830       if (!(flags & opf_no_vops)
831 	  && TREE_THIS_VOLATILE (expr))
832 	gimple_set_has_volatile_ops (stmt, true);
833       /* FALLTHRU */
834 
835     case VIEW_CONVERT_EXPR:
836     do_unary:
837       get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), flags);
838       return;
839 
840     case BIT_INSERT_EXPR:
841     case COMPOUND_EXPR:
842     case OBJ_TYPE_REF:
843     case ASSERT_EXPR:
844     do_binary:
845       {
846 	get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), flags);
847 	get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 1), flags);
848 	return;
849       }
850 
851     case DOT_PROD_EXPR:
852     case SAD_EXPR:
853     case REALIGN_LOAD_EXPR:
854     case WIDEN_MULT_PLUS_EXPR:
855     case WIDEN_MULT_MINUS_EXPR:
856       {
857 	get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), flags);
858 	get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 1), flags);
859 	get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 2), flags);
860 	return;
861       }
862 
863     case FUNCTION_DECL:
864     case LABEL_DECL:
865     case CASE_LABEL_EXPR:
866       /* Expressions that make no memory references.  */
867       return;
868 
869     default:
870       if (codeclass == tcc_unary)
871 	goto do_unary;
872       if (codeclass == tcc_binary || codeclass == tcc_comparison)
873 	goto do_binary;
874       if (codeclass == tcc_constant || codeclass == tcc_type)
875 	return;
876     }
877 
878   /* If we get here, something has gone wrong.  */
879   if (flag_checking)
880     {
881       fprintf (stderr, "unhandled expression in get_expr_operands():\n");
882       debug_tree (expr);
883       fputs ("\n", stderr);
884       gcc_unreachable ();
885     }
886 }
887 
888 
889 /* Parse STMT looking for operands.  When finished, the various
890    build_* operand vectors will have potential operands in them.  */
891 
892 static void
parse_ssa_operands(struct function * fn,gimple * stmt)893 parse_ssa_operands (struct function *fn, gimple *stmt)
894 {
895   enum gimple_code code = gimple_code (stmt);
896   size_t i, n, start = 0;
897 
898   switch (code)
899     {
900     case GIMPLE_ASM:
901       get_asm_stmt_operands (fn, as_a <gasm *> (stmt));
902       break;
903 
904     case GIMPLE_TRANSACTION:
905       /* The start of a transaction is a memory barrier.  */
906       add_virtual_operand (fn, stmt, opf_def | opf_use);
907       break;
908 
909     case GIMPLE_DEBUG:
910       if (gimple_debug_bind_p (stmt)
911 	  && gimple_debug_bind_has_value_p (stmt))
912 	get_expr_operands (fn, stmt, gimple_debug_bind_get_value_ptr (stmt),
913 			   opf_use | opf_no_vops);
914       break;
915 
916     case GIMPLE_RETURN:
917       append_vuse (gimple_vop (fn));
918       goto do_default;
919 
920     case GIMPLE_CALL:
921       /* Add call-clobbered operands, if needed.  */
922       maybe_add_call_vops (fn, as_a <gcall *> (stmt));
923       /* FALLTHRU */
924 
925     case GIMPLE_ASSIGN:
926       get_expr_operands (fn, stmt, gimple_op_ptr (stmt, 0), opf_def);
927       start = 1;
928       /* FALLTHRU */
929 
930     default:
931     do_default:
932       n = gimple_num_ops (stmt);
933       for (i = start; i < n; i++)
934 	get_expr_operands (fn, stmt, gimple_op_ptr (stmt, i), opf_use);
935       break;
936     }
937 }
938 
939 
940 /* Create an operands cache for STMT.  */
941 
942 static void
build_ssa_operands(struct function * fn,gimple * stmt)943 build_ssa_operands (struct function *fn, gimple *stmt)
944 {
945   /* Initially assume that the statement has no volatile operands.  */
946   gimple_set_has_volatile_ops (stmt, false);
947 
948   start_ssa_stmt_operands ();
949   parse_ssa_operands (fn, stmt);
950   finalize_ssa_stmt_operands (fn, stmt);
951 }
952 
953 /* Verifies SSA statement operands.  */
954 
955 DEBUG_FUNCTION bool
verify_ssa_operands(struct function * fn,gimple * stmt)956 verify_ssa_operands (struct function *fn, gimple *stmt)
957 {
958   use_operand_p use_p;
959   def_operand_p def_p;
960   ssa_op_iter iter;
961   unsigned i;
962   tree def;
963   bool volatile_p = gimple_has_volatile_ops (stmt);
964 
965   /* build_ssa_operands w/o finalizing them.  */
966   gimple_set_has_volatile_ops (stmt, false);
967   start_ssa_stmt_operands ();
968   parse_ssa_operands (fn, stmt);
969 
970   /* Now verify the built operands are the same as present in STMT.  */
971   def = gimple_vdef (stmt);
972   if (def
973       && TREE_CODE (def) == SSA_NAME)
974     def = SSA_NAME_VAR (def);
975   if (build_vdef != def)
976     {
977       error ("virtual definition of statement not up to date");
978       return true;
979     }
980   if (gimple_vdef (stmt)
981       && ((def_p = gimple_vdef_op (stmt)) == NULL_DEF_OPERAND_P
982 	  || DEF_FROM_PTR (def_p) != gimple_vdef (stmt)))
983     {
984       error ("virtual def operand missing for statement");
985       return true;
986     }
987 
988   tree use = gimple_vuse (stmt);
989   if (use
990       && TREE_CODE (use) == SSA_NAME)
991     use = SSA_NAME_VAR (use);
992   if (build_vuse != use)
993     {
994       error ("virtual use of statement not up to date");
995       return true;
996     }
997   if (gimple_vuse (stmt)
998       && ((use_p = gimple_vuse_op (stmt)) == NULL_USE_OPERAND_P
999 	  || USE_FROM_PTR (use_p) != gimple_vuse (stmt)))
1000     {
1001       error ("virtual use operand missing for statement");
1002       return true;
1003     }
1004 
1005   FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
1006     {
1007       tree *op;
1008       FOR_EACH_VEC_ELT (build_uses, i, op)
1009 	{
1010 	  if (use_p->use == op)
1011 	    {
1012 	      build_uses[i] = NULL;
1013 	      break;
1014 	    }
1015 	}
1016       if (i == build_uses.length ())
1017 	{
1018 	  error ("excess use operand for statement");
1019 	  debug_generic_expr (USE_FROM_PTR (use_p));
1020 	  return true;
1021 	}
1022     }
1023 
1024   tree *op;
1025   FOR_EACH_VEC_ELT (build_uses, i, op)
1026     if (op != NULL)
1027       {
1028 	error ("use operand missing for statement");
1029 	debug_generic_expr (*op);
1030 	return true;
1031       }
1032 
1033   if (gimple_has_volatile_ops (stmt) != volatile_p)
1034     {
1035       error ("statement volatile flag not up to date");
1036       return true;
1037     }
1038 
1039   cleanup_build_arrays ();
1040   return false;
1041 }
1042 
1043 
1044 /* Releases the operands of STMT back to their freelists, and clears
1045    the stmt operand lists.  */
1046 
1047 void
free_stmt_operands(struct function * fn,gimple * stmt)1048 free_stmt_operands (struct function *fn, gimple *stmt)
1049 {
1050   use_optype_p uses = gimple_use_ops (stmt), last_use;
1051 
1052   if (uses)
1053     {
1054       for (last_use = uses; last_use->next; last_use = last_use->next)
1055 	delink_imm_use (USE_OP_PTR (last_use));
1056       delink_imm_use (USE_OP_PTR (last_use));
1057       last_use->next = gimple_ssa_operands (fn)->free_uses;
1058       gimple_ssa_operands (fn)->free_uses = uses;
1059       gimple_set_use_ops (stmt, NULL);
1060     }
1061 
1062   if (gimple_has_mem_ops (stmt))
1063     {
1064       gimple_set_vuse (stmt, NULL_TREE);
1065       gimple_set_vdef (stmt, NULL_TREE);
1066     }
1067 }
1068 
1069 
1070 /* Get the operands of statement STMT.  */
1071 
1072 void
update_stmt_operands(struct function * fn,gimple * stmt)1073 update_stmt_operands (struct function *fn, gimple *stmt)
1074 {
1075   /* If update_stmt_operands is called before SSA is initialized, do
1076      nothing.  */
1077   if (!ssa_operands_active (fn))
1078     return;
1079 
1080   timevar_push (TV_TREE_OPS);
1081 
1082   gcc_assert (gimple_modified_p (stmt));
1083   build_ssa_operands (fn, stmt);
1084   gimple_set_modified (stmt, false);
1085 
1086   timevar_pop (TV_TREE_OPS);
1087 }
1088 
1089 
1090 /* Swap operands EXP0 and EXP1 in statement STMT.  No attempt is done
1091    to test the validity of the swap operation.  */
1092 
1093 void
swap_ssa_operands(gimple * stmt,tree * exp0,tree * exp1)1094 swap_ssa_operands (gimple *stmt, tree *exp0, tree *exp1)
1095 {
1096   tree op0, op1;
1097   op0 = *exp0;
1098   op1 = *exp1;
1099 
1100   if (op0 != op1)
1101     {
1102       /* Attempt to preserve the relative positions of these two operands in
1103 	 their * respective immediate use lists by adjusting their use pointer
1104 	 to point to the new operand position.  */
1105       use_optype_p use0, use1, ptr;
1106       use0 = use1 = NULL;
1107 
1108       /* Find the 2 operands in the cache, if they are there.  */
1109       for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next)
1110 	if (USE_OP_PTR (ptr)->use == exp0)
1111 	  {
1112 	    use0 = ptr;
1113 	    break;
1114 	  }
1115 
1116       for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next)
1117 	if (USE_OP_PTR (ptr)->use == exp1)
1118 	  {
1119 	    use1 = ptr;
1120 	    break;
1121 	  }
1122 
1123       /* And adjust their location to point to the new position of the
1124          operand.  */
1125       if (use0)
1126 	USE_OP_PTR (use0)->use = exp1;
1127       if (use1)
1128 	USE_OP_PTR (use1)->use = exp0;
1129 
1130       /* Now swap the data.  */
1131       *exp0 = op1;
1132       *exp1 = op0;
1133     }
1134 }
1135 
1136 
1137 /* Scan the immediate_use list for VAR making sure its linked properly.
1138    Return TRUE if there is a problem and emit an error message to F.  */
1139 
1140 DEBUG_FUNCTION bool
verify_imm_links(FILE * f,tree var)1141 verify_imm_links (FILE *f, tree var)
1142 {
1143   use_operand_p ptr, prev, list;
1144   unsigned int count;
1145 
1146   gcc_assert (TREE_CODE (var) == SSA_NAME);
1147 
1148   list = &(SSA_NAME_IMM_USE_NODE (var));
1149   gcc_assert (list->use == NULL);
1150 
1151   if (list->prev == NULL)
1152     {
1153       gcc_assert (list->next == NULL);
1154       return false;
1155     }
1156 
1157   prev = list;
1158   count = 0;
1159   for (ptr = list->next; ptr != list; )
1160     {
1161       if (prev != ptr->prev)
1162 	{
1163 	  fprintf (f, "prev != ptr->prev\n");
1164 	  goto error;
1165 	}
1166 
1167       if (ptr->use == NULL)
1168 	{
1169 	  fprintf (f, "ptr->use == NULL\n");
1170 	  goto error; /* 2 roots, or SAFE guard node.  */
1171 	}
1172       else if (*(ptr->use) != var)
1173 	{
1174 	  fprintf (f, "*(ptr->use) != var\n");
1175 	  goto error;
1176 	}
1177 
1178       prev = ptr;
1179       ptr = ptr->next;
1180 
1181       count++;
1182       if (count == 0)
1183 	{
1184 	  fprintf (f, "number of immediate uses doesn't fit unsigned int\n");
1185 	  goto error;
1186 	}
1187     }
1188 
1189   /* Verify list in the other direction.  */
1190   prev = list;
1191   for (ptr = list->prev; ptr != list; )
1192     {
1193       if (prev != ptr->next)
1194 	{
1195 	  fprintf (f, "prev != ptr->next\n");
1196 	  goto error;
1197 	}
1198       prev = ptr;
1199       ptr = ptr->prev;
1200       if (count == 0)
1201 	{
1202 	  fprintf (f, "count-- < 0\n");
1203 	  goto error;
1204 	}
1205       count--;
1206     }
1207 
1208   if (count != 0)
1209     {
1210       fprintf (f, "count != 0\n");
1211       goto error;
1212     }
1213 
1214   return false;
1215 
1216  error:
1217   if (ptr->loc.stmt && gimple_modified_p (ptr->loc.stmt))
1218     {
1219       fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->loc.stmt);
1220       print_gimple_stmt (f, ptr->loc.stmt, 0, TDF_SLIM);
1221     }
1222   fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr,
1223 	   (void *)ptr->use);
1224   print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM);
1225   fprintf (f, "\n");
1226   return true;
1227 }
1228 
1229 
1230 /* Dump all the immediate uses to FILE.  */
1231 
1232 void
dump_immediate_uses_for(FILE * file,tree var)1233 dump_immediate_uses_for (FILE *file, tree var)
1234 {
1235   imm_use_iterator iter;
1236   use_operand_p use_p;
1237 
1238   gcc_assert (var && TREE_CODE (var) == SSA_NAME);
1239 
1240   print_generic_expr (file, var, TDF_SLIM);
1241   fprintf (file, " : -->");
1242   if (has_zero_uses (var))
1243     fprintf (file, " no uses.\n");
1244   else
1245     if (has_single_use (var))
1246       fprintf (file, " single use.\n");
1247     else
1248       fprintf (file, "%d uses.\n", num_imm_uses (var));
1249 
1250   FOR_EACH_IMM_USE_FAST (use_p, iter, var)
1251     {
1252       if (use_p->loc.stmt == NULL && use_p->use == NULL)
1253         fprintf (file, "***end of stmt iterator marker***\n");
1254       else
1255 	if (!is_gimple_reg (USE_FROM_PTR (use_p)))
1256 	  print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_VOPS|TDF_MEMSYMS);
1257 	else
1258 	  print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_SLIM);
1259     }
1260   fprintf (file, "\n");
1261 }
1262 
1263 
1264 /* Dump all the immediate uses to FILE.  */
1265 
1266 void
dump_immediate_uses(FILE * file)1267 dump_immediate_uses (FILE *file)
1268 {
1269   tree var;
1270   unsigned int x;
1271 
1272   fprintf (file, "Immediate_uses: \n\n");
1273   FOR_EACH_SSA_NAME (x, var, cfun)
1274     {
1275       dump_immediate_uses_for (file, var);
1276     }
1277 }
1278 
1279 
1280 /* Dump def-use edges on stderr.  */
1281 
1282 DEBUG_FUNCTION void
debug_immediate_uses(void)1283 debug_immediate_uses (void)
1284 {
1285   dump_immediate_uses (stderr);
1286 }
1287 
1288 
1289 /* Dump def-use edges on stderr.  */
1290 
1291 DEBUG_FUNCTION void
debug_immediate_uses_for(tree var)1292 debug_immediate_uses_for (tree var)
1293 {
1294   dump_immediate_uses_for (stderr, var);
1295 }
1296 
1297 
1298 /* Unlink STMTs virtual definition from the IL by propagating its use.  */
1299 
1300 void
unlink_stmt_vdef(gimple * stmt)1301 unlink_stmt_vdef (gimple *stmt)
1302 {
1303   use_operand_p use_p;
1304   imm_use_iterator iter;
1305   gimple *use_stmt;
1306   tree vdef = gimple_vdef (stmt);
1307   tree vuse = gimple_vuse (stmt);
1308 
1309   if (!vdef
1310       || TREE_CODE (vdef) != SSA_NAME)
1311     return;
1312 
1313   FOR_EACH_IMM_USE_STMT (use_stmt, iter, vdef)
1314     {
1315       FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1316 	SET_USE (use_p, vuse);
1317     }
1318 
1319   if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vdef))
1320     SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 1;
1321 }
1322 
1323 /* Return true if the var whose chain of uses starts at PTR has a
1324    single nondebug use.  Set USE_P and STMT to that single nondebug
1325    use, if so, or to NULL otherwise.  */
1326 bool
single_imm_use_1(const ssa_use_operand_t * head,use_operand_p * use_p,gimple ** stmt)1327 single_imm_use_1 (const ssa_use_operand_t *head,
1328 		  use_operand_p *use_p, gimple **stmt)
1329 {
1330   ssa_use_operand_t *ptr, *single_use = 0;
1331 
1332   for (ptr = head->next; ptr != head; ptr = ptr->next)
1333     if (USE_STMT(ptr) && !is_gimple_debug (USE_STMT (ptr)))
1334       {
1335 	if (single_use)
1336 	  {
1337 	    single_use = NULL;
1338 	    break;
1339 	  }
1340 	single_use = ptr;
1341       }
1342 
1343   if (use_p)
1344     *use_p = single_use;
1345 
1346   if (stmt)
1347     *stmt = single_use ? single_use->loc.stmt : NULL;
1348 
1349   return single_use;
1350 }
1351 
1352