xref: /dragonfly/contrib/gcc-4.7/gcc/gimple.c (revision d4ef6694)
1 /* Gimple IR support functions.
2 
3    Copyright 2007, 2008, 2009, 2010, 2011 Free Software Foundation, Inc.
4    Contributed by Aldy Hernandez <aldyh@redhat.com>
5 
6 This file is part of GCC.
7 
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12 
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16 for more details.
17 
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3.  If not see
20 <http://www.gnu.org/licenses/>.  */
21 
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "target.h"
27 #include "tree.h"
28 #include "ggc.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
31 #include "gimple.h"
32 #include "diagnostic.h"
33 #include "tree-flow.h"
34 #include "value-prof.h"
35 #include "flags.h"
36 #include "alias.h"
37 #include "demangle.h"
38 #include "langhooks.h"
39 
40 /* Global type table.  FIXME lto, it should be possible to re-use some
41    of the type hashing routines in tree.c (type_hash_canon, type_hash_lookup,
42    etc), but those assume that types were built with the various
43    build_*_type routines which is not the case with the streamer.  */
44 static GTY((if_marked ("ggc_marked_p"), param_is (union tree_node)))
45   htab_t gimple_types;
46 static GTY((if_marked ("ggc_marked_p"), param_is (union tree_node)))
47   htab_t gimple_canonical_types;
48 static GTY((if_marked ("tree_int_map_marked_p"), param_is (struct tree_int_map)))
49   htab_t type_hash_cache;
50 static GTY((if_marked ("tree_int_map_marked_p"), param_is (struct tree_int_map)))
51   htab_t canonical_type_hash_cache;
52 
53 /* All the tuples have their operand vector (if present) at the very bottom
54    of the structure.  Therefore, the offset required to find the
55    operands vector the size of the structure minus the size of the 1
56    element tree array at the end (see gimple_ops).  */
57 #define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) \
58 	(HAS_TREE_OP ? sizeof (struct STRUCT) - sizeof (tree) : 0),
59 EXPORTED_CONST size_t gimple_ops_offset_[] = {
60 #include "gsstruct.def"
61 };
62 #undef DEFGSSTRUCT
63 
64 #define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) sizeof(struct STRUCT),
65 static const size_t gsstruct_code_size[] = {
66 #include "gsstruct.def"
67 };
68 #undef DEFGSSTRUCT
69 
70 #define DEFGSCODE(SYM, NAME, GSSCODE)	NAME,
71 const char *const gimple_code_name[] = {
72 #include "gimple.def"
73 };
74 #undef DEFGSCODE
75 
76 #define DEFGSCODE(SYM, NAME, GSSCODE)	GSSCODE,
77 EXPORTED_CONST enum gimple_statement_structure_enum gss_for_code_[] = {
78 #include "gimple.def"
79 };
80 #undef DEFGSCODE
81 
82 #ifdef GATHER_STATISTICS
83 /* Gimple stats.  */
84 
85 int gimple_alloc_counts[(int) gimple_alloc_kind_all];
86 int gimple_alloc_sizes[(int) gimple_alloc_kind_all];
87 
88 /* Keep in sync with gimple.h:enum gimple_alloc_kind.  */
89 static const char * const gimple_alloc_kind_names[] = {
90     "assignments",
91     "phi nodes",
92     "conditionals",
93     "sequences",
94     "everything else"
95 };
96 
97 #endif /* GATHER_STATISTICS */
98 
99 /* A cache of gimple_seq objects.  Sequences are created and destroyed
100    fairly often during gimplification.  */
101 static GTY ((deletable)) struct gimple_seq_d *gimple_seq_cache;
102 
103 /* Private API manipulation functions shared only with some
104    other files.  */
105 extern void gimple_set_stored_syms (gimple, bitmap, bitmap_obstack *);
106 extern void gimple_set_loaded_syms (gimple, bitmap, bitmap_obstack *);
107 
108 /* Gimple tuple constructors.
109    Note: Any constructor taking a ``gimple_seq'' as a parameter, can
110    be passed a NULL to start with an empty sequence.  */
111 
112 /* Set the code for statement G to CODE.  */
113 
114 static inline void
115 gimple_set_code (gimple g, enum gimple_code code)
116 {
117   g->gsbase.code = code;
118 }
119 
120 /* Return the number of bytes needed to hold a GIMPLE statement with
121    code CODE.  */
122 
123 static inline size_t
124 gimple_size (enum gimple_code code)
125 {
126   return gsstruct_code_size[gss_for_code (code)];
127 }
128 
129 /* Allocate memory for a GIMPLE statement with code CODE and NUM_OPS
130    operands.  */
131 
132 gimple
133 gimple_alloc_stat (enum gimple_code code, unsigned num_ops MEM_STAT_DECL)
134 {
135   size_t size;
136   gimple stmt;
137 
138   size = gimple_size (code);
139   if (num_ops > 0)
140     size += sizeof (tree) * (num_ops - 1);
141 
142 #ifdef GATHER_STATISTICS
143   {
144     enum gimple_alloc_kind kind = gimple_alloc_kind (code);
145     gimple_alloc_counts[(int) kind]++;
146     gimple_alloc_sizes[(int) kind] += size;
147   }
148 #endif
149 
150   stmt = ggc_alloc_cleared_gimple_statement_d_stat (size PASS_MEM_STAT);
151   gimple_set_code (stmt, code);
152   gimple_set_num_ops (stmt, num_ops);
153 
154   /* Do not call gimple_set_modified here as it has other side
155      effects and this tuple is still not completely built.  */
156   stmt->gsbase.modified = 1;
157 
158   return stmt;
159 }
160 
161 /* Set SUBCODE to be the code of the expression computed by statement G.  */
162 
163 static inline void
164 gimple_set_subcode (gimple g, unsigned subcode)
165 {
166   /* We only have 16 bits for the RHS code.  Assert that we are not
167      overflowing it.  */
168   gcc_assert (subcode < (1 << 16));
169   g->gsbase.subcode = subcode;
170 }
171 
172 
173 
174 /* Build a tuple with operands.  CODE is the statement to build (which
175    must be one of the GIMPLE_WITH_OPS tuples).  SUBCODE is the sub-code
176    for the new tuple.  NUM_OPS is the number of operands to allocate.  */
177 
178 #define gimple_build_with_ops(c, s, n) \
179   gimple_build_with_ops_stat (c, s, n MEM_STAT_INFO)
180 
181 static gimple
182 gimple_build_with_ops_stat (enum gimple_code code, unsigned subcode,
183 		            unsigned num_ops MEM_STAT_DECL)
184 {
185   gimple s = gimple_alloc_stat (code, num_ops PASS_MEM_STAT);
186   gimple_set_subcode (s, subcode);
187 
188   return s;
189 }
190 
191 
192 /* Build a GIMPLE_RETURN statement returning RETVAL.  */
193 
194 gimple
195 gimple_build_return (tree retval)
196 {
197   gimple s = gimple_build_with_ops (GIMPLE_RETURN, ERROR_MARK, 1);
198   if (retval)
199     gimple_return_set_retval (s, retval);
200   return s;
201 }
202 
203 /* Reset alias information on call S.  */
204 
205 void
206 gimple_call_reset_alias_info (gimple s)
207 {
208   if (gimple_call_flags (s) & ECF_CONST)
209     memset (gimple_call_use_set (s), 0, sizeof (struct pt_solution));
210   else
211     pt_solution_reset (gimple_call_use_set (s));
212   if (gimple_call_flags (s) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
213     memset (gimple_call_clobber_set (s), 0, sizeof (struct pt_solution));
214   else
215     pt_solution_reset (gimple_call_clobber_set (s));
216 }
217 
218 /* Helper for gimple_build_call, gimple_build_call_valist,
219    gimple_build_call_vec and gimple_build_call_from_tree.  Build the basic
220    components of a GIMPLE_CALL statement to function FN with NARGS
221    arguments.  */
222 
223 static inline gimple
224 gimple_build_call_1 (tree fn, unsigned nargs)
225 {
226   gimple s = gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK, nargs + 3);
227   if (TREE_CODE (fn) == FUNCTION_DECL)
228     fn = build_fold_addr_expr (fn);
229   gimple_set_op (s, 1, fn);
230   gimple_call_set_fntype (s, TREE_TYPE (TREE_TYPE (fn)));
231   gimple_call_reset_alias_info (s);
232   return s;
233 }
234 
235 
236 /* Build a GIMPLE_CALL statement to function FN with the arguments
237    specified in vector ARGS.  */
238 
239 gimple
240 gimple_build_call_vec (tree fn, VEC(tree, heap) *args)
241 {
242   unsigned i;
243   unsigned nargs = VEC_length (tree, args);
244   gimple call = gimple_build_call_1 (fn, nargs);
245 
246   for (i = 0; i < nargs; i++)
247     gimple_call_set_arg (call, i, VEC_index (tree, args, i));
248 
249   return call;
250 }
251 
252 
253 /* Build a GIMPLE_CALL statement to function FN.  NARGS is the number of
254    arguments.  The ... are the arguments.  */
255 
256 gimple
257 gimple_build_call (tree fn, unsigned nargs, ...)
258 {
259   va_list ap;
260   gimple call;
261   unsigned i;
262 
263   gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn));
264 
265   call = gimple_build_call_1 (fn, nargs);
266 
267   va_start (ap, nargs);
268   for (i = 0; i < nargs; i++)
269     gimple_call_set_arg (call, i, va_arg (ap, tree));
270   va_end (ap);
271 
272   return call;
273 }
274 
275 
276 /* Build a GIMPLE_CALL statement to function FN.  NARGS is the number of
277    arguments.  AP contains the arguments.  */
278 
279 gimple
280 gimple_build_call_valist (tree fn, unsigned nargs, va_list ap)
281 {
282   gimple call;
283   unsigned i;
284 
285   gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn));
286 
287   call = gimple_build_call_1 (fn, nargs);
288 
289   for (i = 0; i < nargs; i++)
290     gimple_call_set_arg (call, i, va_arg (ap, tree));
291 
292   return call;
293 }
294 
295 
296 /* Helper for gimple_build_call_internal and gimple_build_call_internal_vec.
297    Build the basic components of a GIMPLE_CALL statement to internal
298    function FN with NARGS arguments.  */
299 
300 static inline gimple
301 gimple_build_call_internal_1 (enum internal_fn fn, unsigned nargs)
302 {
303   gimple s = gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK, nargs + 3);
304   s->gsbase.subcode |= GF_CALL_INTERNAL;
305   gimple_call_set_internal_fn (s, fn);
306   gimple_call_reset_alias_info (s);
307   return s;
308 }
309 
310 
311 /* Build a GIMPLE_CALL statement to internal function FN.  NARGS is
312    the number of arguments.  The ... are the arguments.  */
313 
314 gimple
315 gimple_build_call_internal (enum internal_fn fn, unsigned nargs, ...)
316 {
317   va_list ap;
318   gimple call;
319   unsigned i;
320 
321   call = gimple_build_call_internal_1 (fn, nargs);
322   va_start (ap, nargs);
323   for (i = 0; i < nargs; i++)
324     gimple_call_set_arg (call, i, va_arg (ap, tree));
325   va_end (ap);
326 
327   return call;
328 }
329 
330 
331 /* Build a GIMPLE_CALL statement to internal function FN with the arguments
332    specified in vector ARGS.  */
333 
334 gimple
335 gimple_build_call_internal_vec (enum internal_fn fn, VEC(tree, heap) *args)
336 {
337   unsigned i, nargs;
338   gimple call;
339 
340   nargs = VEC_length (tree, args);
341   call = gimple_build_call_internal_1 (fn, nargs);
342   for (i = 0; i < nargs; i++)
343     gimple_call_set_arg (call, i, VEC_index (tree, args, i));
344 
345   return call;
346 }
347 
348 
349 /* Build a GIMPLE_CALL statement from CALL_EXPR T.  Note that T is
350    assumed to be in GIMPLE form already.  Minimal checking is done of
351    this fact.  */
352 
353 gimple
354 gimple_build_call_from_tree (tree t)
355 {
356   unsigned i, nargs;
357   gimple call;
358   tree fndecl = get_callee_fndecl (t);
359 
360   gcc_assert (TREE_CODE (t) == CALL_EXPR);
361 
362   nargs = call_expr_nargs (t);
363   call = gimple_build_call_1 (fndecl ? fndecl : CALL_EXPR_FN (t), nargs);
364 
365   for (i = 0; i < nargs; i++)
366     gimple_call_set_arg (call, i, CALL_EXPR_ARG (t, i));
367 
368   gimple_set_block (call, TREE_BLOCK (t));
369 
370   /* Carry all the CALL_EXPR flags to the new GIMPLE_CALL.  */
371   gimple_call_set_chain (call, CALL_EXPR_STATIC_CHAIN (t));
372   gimple_call_set_tail (call, CALL_EXPR_TAILCALL (t));
373   gimple_call_set_return_slot_opt (call, CALL_EXPR_RETURN_SLOT_OPT (t));
374   if (fndecl
375       && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
376       && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA
377 	  || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN))
378     gimple_call_set_alloca_for_var (call, CALL_ALLOCA_FOR_VAR_P (t));
379   else
380     gimple_call_set_from_thunk (call, CALL_FROM_THUNK_P (t));
381   gimple_call_set_va_arg_pack (call, CALL_EXPR_VA_ARG_PACK (t));
382   gimple_call_set_nothrow (call, TREE_NOTHROW (t));
383   gimple_set_no_warning (call, TREE_NO_WARNING (t));
384 
385   return call;
386 }
387 
388 
389 /* Extract the operands and code for expression EXPR into *SUBCODE_P,
390    *OP1_P, *OP2_P and *OP3_P respectively.  */
391 
392 void
393 extract_ops_from_tree_1 (tree expr, enum tree_code *subcode_p, tree *op1_p,
394 			 tree *op2_p, tree *op3_p)
395 {
396   enum gimple_rhs_class grhs_class;
397 
398   *subcode_p = TREE_CODE (expr);
399   grhs_class = get_gimple_rhs_class (*subcode_p);
400 
401   if (grhs_class == GIMPLE_TERNARY_RHS)
402     {
403       *op1_p = TREE_OPERAND (expr, 0);
404       *op2_p = TREE_OPERAND (expr, 1);
405       *op3_p = TREE_OPERAND (expr, 2);
406     }
407   else if (grhs_class == GIMPLE_BINARY_RHS)
408     {
409       *op1_p = TREE_OPERAND (expr, 0);
410       *op2_p = TREE_OPERAND (expr, 1);
411       *op3_p = NULL_TREE;
412     }
413   else if (grhs_class == GIMPLE_UNARY_RHS)
414     {
415       *op1_p = TREE_OPERAND (expr, 0);
416       *op2_p = NULL_TREE;
417       *op3_p = NULL_TREE;
418     }
419   else if (grhs_class == GIMPLE_SINGLE_RHS)
420     {
421       *op1_p = expr;
422       *op2_p = NULL_TREE;
423       *op3_p = NULL_TREE;
424     }
425   else
426     gcc_unreachable ();
427 }
428 
429 
430 /* Build a GIMPLE_ASSIGN statement.
431 
432    LHS of the assignment.
433    RHS of the assignment which can be unary or binary.  */
434 
435 gimple
436 gimple_build_assign_stat (tree lhs, tree rhs MEM_STAT_DECL)
437 {
438   enum tree_code subcode;
439   tree op1, op2, op3;
440 
441   extract_ops_from_tree_1 (rhs, &subcode, &op1, &op2, &op3);
442   return gimple_build_assign_with_ops_stat (subcode, lhs, op1, op2, op3
443   					    PASS_MEM_STAT);
444 }
445 
446 
447 /* Build a GIMPLE_ASSIGN statement with sub-code SUBCODE and operands
448    OP1 and OP2.  If OP2 is NULL then SUBCODE must be of class
449    GIMPLE_UNARY_RHS or GIMPLE_SINGLE_RHS.  */
450 
451 gimple
452 gimple_build_assign_with_ops_stat (enum tree_code subcode, tree lhs, tree op1,
453                                    tree op2, tree op3 MEM_STAT_DECL)
454 {
455   unsigned num_ops;
456   gimple p;
457 
458   /* Need 1 operand for LHS and 1 or 2 for the RHS (depending on the
459      code).  */
460   num_ops = get_gimple_rhs_num_ops (subcode) + 1;
461 
462   p = gimple_build_with_ops_stat (GIMPLE_ASSIGN, (unsigned)subcode, num_ops
463   			          PASS_MEM_STAT);
464   gimple_assign_set_lhs (p, lhs);
465   gimple_assign_set_rhs1 (p, op1);
466   if (op2)
467     {
468       gcc_assert (num_ops > 2);
469       gimple_assign_set_rhs2 (p, op2);
470     }
471 
472   if (op3)
473     {
474       gcc_assert (num_ops > 3);
475       gimple_assign_set_rhs3 (p, op3);
476     }
477 
478   return p;
479 }
480 
481 
482 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
483 
484    DST/SRC are the destination and source respectively.  You can pass
485    ungimplified trees in DST or SRC, in which case they will be
486    converted to a gimple operand if necessary.
487 
488    This function returns the newly created GIMPLE_ASSIGN tuple.  */
489 
490 gimple
491 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
492 {
493   tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
494   gimplify_and_add (t, seq_p);
495   ggc_free (t);
496   return gimple_seq_last_stmt (*seq_p);
497 }
498 
499 
500 /* Build a GIMPLE_COND statement.
501 
502    PRED is the condition used to compare LHS and the RHS.
503    T_LABEL is the label to jump to if the condition is true.
504    F_LABEL is the label to jump to otherwise.  */
505 
506 gimple
507 gimple_build_cond (enum tree_code pred_code, tree lhs, tree rhs,
508 		   tree t_label, tree f_label)
509 {
510   gimple p;
511 
512   gcc_assert (TREE_CODE_CLASS (pred_code) == tcc_comparison);
513   p = gimple_build_with_ops (GIMPLE_COND, pred_code, 4);
514   gimple_cond_set_lhs (p, lhs);
515   gimple_cond_set_rhs (p, rhs);
516   gimple_cond_set_true_label (p, t_label);
517   gimple_cond_set_false_label (p, f_label);
518   return p;
519 }
520 
521 
522 /* Extract operands for a GIMPLE_COND statement out of COND_EXPR tree COND.  */
523 
524 void
525 gimple_cond_get_ops_from_tree (tree cond, enum tree_code *code_p,
526                                tree *lhs_p, tree *rhs_p)
527 {
528   gcc_assert (TREE_CODE_CLASS (TREE_CODE (cond)) == tcc_comparison
529 	      || TREE_CODE (cond) == TRUTH_NOT_EXPR
530 	      || is_gimple_min_invariant (cond)
531 	      || SSA_VAR_P (cond));
532 
533   extract_ops_from_tree (cond, code_p, lhs_p, rhs_p);
534 
535   /* Canonicalize conditionals of the form 'if (!VAL)'.  */
536   if (*code_p == TRUTH_NOT_EXPR)
537     {
538       *code_p = EQ_EXPR;
539       gcc_assert (*lhs_p && *rhs_p == NULL_TREE);
540       *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
541     }
542   /* Canonicalize conditionals of the form 'if (VAL)'  */
543   else if (TREE_CODE_CLASS (*code_p) != tcc_comparison)
544     {
545       *code_p = NE_EXPR;
546       gcc_assert (*lhs_p && *rhs_p == NULL_TREE);
547       *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
548     }
549 }
550 
551 
552 /* Build a GIMPLE_COND statement from the conditional expression tree
553    COND.  T_LABEL and F_LABEL are as in gimple_build_cond.  */
554 
555 gimple
556 gimple_build_cond_from_tree (tree cond, tree t_label, tree f_label)
557 {
558   enum tree_code code;
559   tree lhs, rhs;
560 
561   gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
562   return gimple_build_cond (code, lhs, rhs, t_label, f_label);
563 }
564 
565 /* Set code, lhs, and rhs of a GIMPLE_COND from a suitable
566    boolean expression tree COND.  */
567 
568 void
569 gimple_cond_set_condition_from_tree (gimple stmt, tree cond)
570 {
571   enum tree_code code;
572   tree lhs, rhs;
573 
574   gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
575   gimple_cond_set_condition (stmt, code, lhs, rhs);
576 }
577 
578 /* Build a GIMPLE_LABEL statement for LABEL.  */
579 
580 gimple
581 gimple_build_label (tree label)
582 {
583   gimple p = gimple_build_with_ops (GIMPLE_LABEL, ERROR_MARK, 1);
584   gimple_label_set_label (p, label);
585   return p;
586 }
587 
588 /* Build a GIMPLE_GOTO statement to label DEST.  */
589 
590 gimple
591 gimple_build_goto (tree dest)
592 {
593   gimple p = gimple_build_with_ops (GIMPLE_GOTO, ERROR_MARK, 1);
594   gimple_goto_set_dest (p, dest);
595   return p;
596 }
597 
598 
599 /* Build a GIMPLE_NOP statement.  */
600 
601 gimple
602 gimple_build_nop (void)
603 {
604   return gimple_alloc (GIMPLE_NOP, 0);
605 }
606 
607 
608 /* Build a GIMPLE_BIND statement.
609    VARS are the variables in BODY.
610    BLOCK is the containing block.  */
611 
612 gimple
613 gimple_build_bind (tree vars, gimple_seq body, tree block)
614 {
615   gimple p = gimple_alloc (GIMPLE_BIND, 0);
616   gimple_bind_set_vars (p, vars);
617   if (body)
618     gimple_bind_set_body (p, body);
619   if (block)
620     gimple_bind_set_block (p, block);
621   return p;
622 }
623 
624 /* Helper function to set the simple fields of a asm stmt.
625 
626    STRING is a pointer to a string that is the asm blocks assembly code.
627    NINPUT is the number of register inputs.
628    NOUTPUT is the number of register outputs.
629    NCLOBBERS is the number of clobbered registers.
630    */
631 
632 static inline gimple
633 gimple_build_asm_1 (const char *string, unsigned ninputs, unsigned noutputs,
634                     unsigned nclobbers, unsigned nlabels)
635 {
636   gimple p;
637   int size = strlen (string);
638 
639   /* ASMs with labels cannot have outputs.  This should have been
640      enforced by the front end.  */
641   gcc_assert (nlabels == 0 || noutputs == 0);
642 
643   p = gimple_build_with_ops (GIMPLE_ASM, ERROR_MARK,
644 			     ninputs + noutputs + nclobbers + nlabels);
645 
646   p->gimple_asm.ni = ninputs;
647   p->gimple_asm.no = noutputs;
648   p->gimple_asm.nc = nclobbers;
649   p->gimple_asm.nl = nlabels;
650   p->gimple_asm.string = ggc_alloc_string (string, size);
651 
652 #ifdef GATHER_STATISTICS
653   gimple_alloc_sizes[(int) gimple_alloc_kind (GIMPLE_ASM)] += size;
654 #endif
655 
656   return p;
657 }
658 
659 /* Build a GIMPLE_ASM statement.
660 
661    STRING is the assembly code.
662    NINPUT is the number of register inputs.
663    NOUTPUT is the number of register outputs.
664    NCLOBBERS is the number of clobbered registers.
665    INPUTS is a vector of the input register parameters.
666    OUTPUTS is a vector of the output register parameters.
667    CLOBBERS is a vector of the clobbered register parameters.
668    LABELS is a vector of destination labels.  */
669 
670 gimple
671 gimple_build_asm_vec (const char *string, VEC(tree,gc)* inputs,
672                       VEC(tree,gc)* outputs, VEC(tree,gc)* clobbers,
673 		      VEC(tree,gc)* labels)
674 {
675   gimple p;
676   unsigned i;
677 
678   p = gimple_build_asm_1 (string,
679                           VEC_length (tree, inputs),
680                           VEC_length (tree, outputs),
681                           VEC_length (tree, clobbers),
682 			  VEC_length (tree, labels));
683 
684   for (i = 0; i < VEC_length (tree, inputs); i++)
685     gimple_asm_set_input_op (p, i, VEC_index (tree, inputs, i));
686 
687   for (i = 0; i < VEC_length (tree, outputs); i++)
688     gimple_asm_set_output_op (p, i, VEC_index (tree, outputs, i));
689 
690   for (i = 0; i < VEC_length (tree, clobbers); i++)
691     gimple_asm_set_clobber_op (p, i, VEC_index (tree, clobbers, i));
692 
693   for (i = 0; i < VEC_length (tree, labels); i++)
694     gimple_asm_set_label_op (p, i, VEC_index (tree, labels, i));
695 
696   return p;
697 }
698 
699 /* Build a GIMPLE_CATCH statement.
700 
701   TYPES are the catch types.
702   HANDLER is the exception handler.  */
703 
704 gimple
705 gimple_build_catch (tree types, gimple_seq handler)
706 {
707   gimple p = gimple_alloc (GIMPLE_CATCH, 0);
708   gimple_catch_set_types (p, types);
709   if (handler)
710     gimple_catch_set_handler (p, handler);
711 
712   return p;
713 }
714 
715 /* Build a GIMPLE_EH_FILTER statement.
716 
717    TYPES are the filter's types.
718    FAILURE is the filter's failure action.  */
719 
720 gimple
721 gimple_build_eh_filter (tree types, gimple_seq failure)
722 {
723   gimple p = gimple_alloc (GIMPLE_EH_FILTER, 0);
724   gimple_eh_filter_set_types (p, types);
725   if (failure)
726     gimple_eh_filter_set_failure (p, failure);
727 
728   return p;
729 }
730 
731 /* Build a GIMPLE_EH_MUST_NOT_THROW statement.  */
732 
733 gimple
734 gimple_build_eh_must_not_throw (tree decl)
735 {
736   gimple p = gimple_alloc (GIMPLE_EH_MUST_NOT_THROW, 0);
737 
738   gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
739   gcc_assert (flags_from_decl_or_type (decl) & ECF_NORETURN);
740   gimple_eh_must_not_throw_set_fndecl (p, decl);
741 
742   return p;
743 }
744 
745 /* Build a GIMPLE_EH_ELSE statement.  */
746 
747 gimple
748 gimple_build_eh_else (gimple_seq n_body, gimple_seq e_body)
749 {
750   gimple p = gimple_alloc (GIMPLE_EH_ELSE, 0);
751   gimple_eh_else_set_n_body (p, n_body);
752   gimple_eh_else_set_e_body (p, e_body);
753   return p;
754 }
755 
756 /* Build a GIMPLE_TRY statement.
757 
758    EVAL is the expression to evaluate.
759    CLEANUP is the cleanup expression.
760    KIND is either GIMPLE_TRY_CATCH or GIMPLE_TRY_FINALLY depending on
761    whether this is a try/catch or a try/finally respectively.  */
762 
763 gimple
764 gimple_build_try (gimple_seq eval, gimple_seq cleanup,
765     		  enum gimple_try_flags kind)
766 {
767   gimple p;
768 
769   gcc_assert (kind == GIMPLE_TRY_CATCH || kind == GIMPLE_TRY_FINALLY);
770   p = gimple_alloc (GIMPLE_TRY, 0);
771   gimple_set_subcode (p, kind);
772   if (eval)
773     gimple_try_set_eval (p, eval);
774   if (cleanup)
775     gimple_try_set_cleanup (p, cleanup);
776 
777   return p;
778 }
779 
780 /* Construct a GIMPLE_WITH_CLEANUP_EXPR statement.
781 
782    CLEANUP is the cleanup expression.  */
783 
784 gimple
785 gimple_build_wce (gimple_seq cleanup)
786 {
787   gimple p = gimple_alloc (GIMPLE_WITH_CLEANUP_EXPR, 0);
788   if (cleanup)
789     gimple_wce_set_cleanup (p, cleanup);
790 
791   return p;
792 }
793 
794 
795 /* Build a GIMPLE_RESX statement.  */
796 
797 gimple
798 gimple_build_resx (int region)
799 {
800   gimple p = gimple_build_with_ops (GIMPLE_RESX, ERROR_MARK, 0);
801   p->gimple_eh_ctrl.region = region;
802   return p;
803 }
804 
805 
806 /* The helper for constructing a gimple switch statement.
807    INDEX is the switch's index.
808    NLABELS is the number of labels in the switch excluding the default.
809    DEFAULT_LABEL is the default label for the switch statement.  */
810 
811 gimple
812 gimple_build_switch_nlabels (unsigned nlabels, tree index, tree default_label)
813 {
814   /* nlabels + 1 default label + 1 index.  */
815   gimple p = gimple_build_with_ops (GIMPLE_SWITCH, ERROR_MARK,
816 				    1 + (default_label != NULL) + nlabels);
817   gimple_switch_set_index (p, index);
818   if (default_label)
819     gimple_switch_set_default_label (p, default_label);
820   return p;
821 }
822 
823 
824 /* Build a GIMPLE_SWITCH statement.
825 
826    INDEX is the switch's index.
827    NLABELS is the number of labels in the switch excluding the DEFAULT_LABEL.
828    ... are the labels excluding the default.  */
829 
830 gimple
831 gimple_build_switch (unsigned nlabels, tree index, tree default_label, ...)
832 {
833   va_list al;
834   unsigned i, offset;
835   gimple p = gimple_build_switch_nlabels (nlabels, index, default_label);
836 
837   /* Store the rest of the labels.  */
838   va_start (al, default_label);
839   offset = (default_label != NULL);
840   for (i = 0; i < nlabels; i++)
841     gimple_switch_set_label (p, i + offset, va_arg (al, tree));
842   va_end (al);
843 
844   return p;
845 }
846 
847 
848 /* Build a GIMPLE_SWITCH statement.
849 
850    INDEX is the switch's index.
851    DEFAULT_LABEL is the default label
852    ARGS is a vector of labels excluding the default.  */
853 
854 gimple
855 gimple_build_switch_vec (tree index, tree default_label, VEC(tree, heap) *args)
856 {
857   unsigned i, offset, nlabels = VEC_length (tree, args);
858   gimple p = gimple_build_switch_nlabels (nlabels, index, default_label);
859 
860   /* Copy the labels from the vector to the switch statement.  */
861   offset = (default_label != NULL);
862   for (i = 0; i < nlabels; i++)
863     gimple_switch_set_label (p, i + offset, VEC_index (tree, args, i));
864 
865   return p;
866 }
867 
868 /* Build a GIMPLE_EH_DISPATCH statement.  */
869 
870 gimple
871 gimple_build_eh_dispatch (int region)
872 {
873   gimple p = gimple_build_with_ops (GIMPLE_EH_DISPATCH, ERROR_MARK, 0);
874   p->gimple_eh_ctrl.region = region;
875   return p;
876 }
877 
878 /* Build a new GIMPLE_DEBUG_BIND statement.
879 
880    VAR is bound to VALUE; block and location are taken from STMT.  */
881 
882 gimple
883 gimple_build_debug_bind_stat (tree var, tree value, gimple stmt MEM_STAT_DECL)
884 {
885   gimple p = gimple_build_with_ops_stat (GIMPLE_DEBUG,
886 					 (unsigned)GIMPLE_DEBUG_BIND, 2
887 					 PASS_MEM_STAT);
888 
889   gimple_debug_bind_set_var (p, var);
890   gimple_debug_bind_set_value (p, value);
891   if (stmt)
892     {
893       gimple_set_block (p, gimple_block (stmt));
894       gimple_set_location (p, gimple_location (stmt));
895     }
896 
897   return p;
898 }
899 
900 
901 /* Build a new GIMPLE_DEBUG_SOURCE_BIND statement.
902 
903    VAR is bound to VALUE; block and location are taken from STMT.  */
904 
905 gimple
906 gimple_build_debug_source_bind_stat (tree var, tree value,
907 				     gimple stmt MEM_STAT_DECL)
908 {
909   gimple p = gimple_build_with_ops_stat (GIMPLE_DEBUG,
910 					 (unsigned)GIMPLE_DEBUG_SOURCE_BIND, 2
911 					 PASS_MEM_STAT);
912 
913   gimple_debug_source_bind_set_var (p, var);
914   gimple_debug_source_bind_set_value (p, value);
915   if (stmt)
916     {
917       gimple_set_block (p, gimple_block (stmt));
918       gimple_set_location (p, gimple_location (stmt));
919     }
920 
921   return p;
922 }
923 
924 
925 /* Build a GIMPLE_OMP_CRITICAL statement.
926 
927    BODY is the sequence of statements for which only one thread can execute.
928    NAME is optional identifier for this critical block.  */
929 
930 gimple
931 gimple_build_omp_critical (gimple_seq body, tree name)
932 {
933   gimple p = gimple_alloc (GIMPLE_OMP_CRITICAL, 0);
934   gimple_omp_critical_set_name (p, name);
935   if (body)
936     gimple_omp_set_body (p, body);
937 
938   return p;
939 }
940 
941 /* Build a GIMPLE_OMP_FOR statement.
942 
943    BODY is sequence of statements inside the for loop.
944    CLAUSES, are any of the OMP loop construct's clauses: private, firstprivate,
945    lastprivate, reductions, ordered, schedule, and nowait.
946    COLLAPSE is the collapse count.
947    PRE_BODY is the sequence of statements that are loop invariant.  */
948 
949 gimple
950 gimple_build_omp_for (gimple_seq body, tree clauses, size_t collapse,
951 		      gimple_seq pre_body)
952 {
953   gimple p = gimple_alloc (GIMPLE_OMP_FOR, 0);
954   if (body)
955     gimple_omp_set_body (p, body);
956   gimple_omp_for_set_clauses (p, clauses);
957   p->gimple_omp_for.collapse = collapse;
958   p->gimple_omp_for.iter
959       = ggc_alloc_cleared_vec_gimple_omp_for_iter (collapse);
960   if (pre_body)
961     gimple_omp_for_set_pre_body (p, pre_body);
962 
963   return p;
964 }
965 
966 
967 /* Build a GIMPLE_OMP_PARALLEL statement.
968 
969    BODY is sequence of statements which are executed in parallel.
970    CLAUSES, are the OMP parallel construct's clauses.
971    CHILD_FN is the function created for the parallel threads to execute.
972    DATA_ARG are the shared data argument(s).  */
973 
974 gimple
975 gimple_build_omp_parallel (gimple_seq body, tree clauses, tree child_fn,
976 			   tree data_arg)
977 {
978   gimple p = gimple_alloc (GIMPLE_OMP_PARALLEL, 0);
979   if (body)
980     gimple_omp_set_body (p, body);
981   gimple_omp_parallel_set_clauses (p, clauses);
982   gimple_omp_parallel_set_child_fn (p, child_fn);
983   gimple_omp_parallel_set_data_arg (p, data_arg);
984 
985   return p;
986 }
987 
988 
989 /* Build a GIMPLE_OMP_TASK statement.
990 
991    BODY is sequence of statements which are executed by the explicit task.
992    CLAUSES, are the OMP parallel construct's clauses.
993    CHILD_FN is the function created for the parallel threads to execute.
994    DATA_ARG are the shared data argument(s).
995    COPY_FN is the optional function for firstprivate initialization.
996    ARG_SIZE and ARG_ALIGN are size and alignment of the data block.  */
997 
998 gimple
999 gimple_build_omp_task (gimple_seq body, tree clauses, tree child_fn,
1000 		       tree data_arg, tree copy_fn, tree arg_size,
1001 		       tree arg_align)
1002 {
1003   gimple p = gimple_alloc (GIMPLE_OMP_TASK, 0);
1004   if (body)
1005     gimple_omp_set_body (p, body);
1006   gimple_omp_task_set_clauses (p, clauses);
1007   gimple_omp_task_set_child_fn (p, child_fn);
1008   gimple_omp_task_set_data_arg (p, data_arg);
1009   gimple_omp_task_set_copy_fn (p, copy_fn);
1010   gimple_omp_task_set_arg_size (p, arg_size);
1011   gimple_omp_task_set_arg_align (p, arg_align);
1012 
1013   return p;
1014 }
1015 
1016 
1017 /* Build a GIMPLE_OMP_SECTION statement for a sections statement.
1018 
1019    BODY is the sequence of statements in the section.  */
1020 
1021 gimple
1022 gimple_build_omp_section (gimple_seq body)
1023 {
1024   gimple p = gimple_alloc (GIMPLE_OMP_SECTION, 0);
1025   if (body)
1026     gimple_omp_set_body (p, body);
1027 
1028   return p;
1029 }
1030 
1031 
1032 /* Build a GIMPLE_OMP_MASTER statement.
1033 
1034    BODY is the sequence of statements to be executed by just the master.  */
1035 
1036 gimple
1037 gimple_build_omp_master (gimple_seq body)
1038 {
1039   gimple p = gimple_alloc (GIMPLE_OMP_MASTER, 0);
1040   if (body)
1041     gimple_omp_set_body (p, body);
1042 
1043   return p;
1044 }
1045 
1046 
1047 /* Build a GIMPLE_OMP_CONTINUE statement.
1048 
1049    CONTROL_DEF is the definition of the control variable.
1050    CONTROL_USE is the use of the control variable.  */
1051 
1052 gimple
1053 gimple_build_omp_continue (tree control_def, tree control_use)
1054 {
1055   gimple p = gimple_alloc (GIMPLE_OMP_CONTINUE, 0);
1056   gimple_omp_continue_set_control_def (p, control_def);
1057   gimple_omp_continue_set_control_use (p, control_use);
1058   return p;
1059 }
1060 
1061 /* Build a GIMPLE_OMP_ORDERED statement.
1062 
1063    BODY is the sequence of statements inside a loop that will executed in
1064    sequence.  */
1065 
1066 gimple
1067 gimple_build_omp_ordered (gimple_seq body)
1068 {
1069   gimple p = gimple_alloc (GIMPLE_OMP_ORDERED, 0);
1070   if (body)
1071     gimple_omp_set_body (p, body);
1072 
1073   return p;
1074 }
1075 
1076 
1077 /* Build a GIMPLE_OMP_RETURN statement.
1078    WAIT_P is true if this is a non-waiting return.  */
1079 
1080 gimple
1081 gimple_build_omp_return (bool wait_p)
1082 {
1083   gimple p = gimple_alloc (GIMPLE_OMP_RETURN, 0);
1084   if (wait_p)
1085     gimple_omp_return_set_nowait (p);
1086 
1087   return p;
1088 }
1089 
1090 
1091 /* Build a GIMPLE_OMP_SECTIONS statement.
1092 
1093    BODY is a sequence of section statements.
1094    CLAUSES are any of the OMP sections contsruct's clauses: private,
1095    firstprivate, lastprivate, reduction, and nowait.  */
1096 
1097 gimple
1098 gimple_build_omp_sections (gimple_seq body, tree clauses)
1099 {
1100   gimple p = gimple_alloc (GIMPLE_OMP_SECTIONS, 0);
1101   if (body)
1102     gimple_omp_set_body (p, body);
1103   gimple_omp_sections_set_clauses (p, clauses);
1104 
1105   return p;
1106 }
1107 
1108 
1109 /* Build a GIMPLE_OMP_SECTIONS_SWITCH.  */
1110 
1111 gimple
1112 gimple_build_omp_sections_switch (void)
1113 {
1114   return gimple_alloc (GIMPLE_OMP_SECTIONS_SWITCH, 0);
1115 }
1116 
1117 
1118 /* Build a GIMPLE_OMP_SINGLE statement.
1119 
1120    BODY is the sequence of statements that will be executed once.
1121    CLAUSES are any of the OMP single construct's clauses: private, firstprivate,
1122    copyprivate, nowait.  */
1123 
1124 gimple
1125 gimple_build_omp_single (gimple_seq body, tree clauses)
1126 {
1127   gimple p = gimple_alloc (GIMPLE_OMP_SINGLE, 0);
1128   if (body)
1129     gimple_omp_set_body (p, body);
1130   gimple_omp_single_set_clauses (p, clauses);
1131 
1132   return p;
1133 }
1134 
1135 
1136 /* Build a GIMPLE_OMP_ATOMIC_LOAD statement.  */
1137 
1138 gimple
1139 gimple_build_omp_atomic_load (tree lhs, tree rhs)
1140 {
1141   gimple p = gimple_alloc (GIMPLE_OMP_ATOMIC_LOAD, 0);
1142   gimple_omp_atomic_load_set_lhs (p, lhs);
1143   gimple_omp_atomic_load_set_rhs (p, rhs);
1144   return p;
1145 }
1146 
1147 /* Build a GIMPLE_OMP_ATOMIC_STORE statement.
1148 
1149    VAL is the value we are storing.  */
1150 
1151 gimple
1152 gimple_build_omp_atomic_store (tree val)
1153 {
1154   gimple p = gimple_alloc (GIMPLE_OMP_ATOMIC_STORE, 0);
1155   gimple_omp_atomic_store_set_val (p, val);
1156   return p;
1157 }
1158 
1159 /* Build a GIMPLE_TRANSACTION statement.  */
1160 
1161 gimple
1162 gimple_build_transaction (gimple_seq body, tree label)
1163 {
1164   gimple p = gimple_alloc (GIMPLE_TRANSACTION, 0);
1165   gimple_transaction_set_body (p, body);
1166   gimple_transaction_set_label (p, label);
1167   return p;
1168 }
1169 
1170 /* Build a GIMPLE_PREDICT statement.  PREDICT is one of the predictors from
1171    predict.def, OUTCOME is NOT_TAKEN or TAKEN.  */
1172 
1173 gimple
1174 gimple_build_predict (enum br_predictor predictor, enum prediction outcome)
1175 {
1176   gimple p = gimple_alloc (GIMPLE_PREDICT, 0);
1177   /* Ensure all the predictors fit into the lower bits of the subcode.  */
1178   gcc_assert ((int) END_PREDICTORS <= GF_PREDICT_TAKEN);
1179   gimple_predict_set_predictor (p, predictor);
1180   gimple_predict_set_outcome (p, outcome);
1181   return p;
1182 }
1183 
1184 #if defined ENABLE_GIMPLE_CHECKING
1185 /* Complain of a gimple type mismatch and die.  */
1186 
1187 void
1188 gimple_check_failed (const_gimple gs, const char *file, int line,
1189 		     const char *function, enum gimple_code code,
1190 		     enum tree_code subcode)
1191 {
1192   internal_error ("gimple check: expected %s(%s), have %s(%s) in %s, at %s:%d",
1193       		  gimple_code_name[code],
1194 		  tree_code_name[subcode],
1195 		  gimple_code_name[gimple_code (gs)],
1196 		  gs->gsbase.subcode > 0
1197 		    ? tree_code_name[gs->gsbase.subcode]
1198 		    : "",
1199 		  function, trim_filename (file), line);
1200 }
1201 #endif /* ENABLE_GIMPLE_CHECKING */
1202 
1203 
1204 /* Allocate a new GIMPLE sequence in GC memory and return it.  If
1205    there are free sequences in GIMPLE_SEQ_CACHE return one of those
1206    instead.  */
1207 
1208 gimple_seq
1209 gimple_seq_alloc (void)
1210 {
1211   gimple_seq seq = gimple_seq_cache;
1212   if (seq)
1213     {
1214       gimple_seq_cache = gimple_seq_cache->next_free;
1215       gcc_assert (gimple_seq_cache != seq);
1216       memset (seq, 0, sizeof (*seq));
1217     }
1218   else
1219     {
1220       seq = ggc_alloc_cleared_gimple_seq_d ();
1221 #ifdef GATHER_STATISTICS
1222       gimple_alloc_counts[(int) gimple_alloc_kind_seq]++;
1223       gimple_alloc_sizes[(int) gimple_alloc_kind_seq] += sizeof (*seq);
1224 #endif
1225     }
1226 
1227   return seq;
1228 }
1229 
1230 /* Return SEQ to the free pool of GIMPLE sequences.  */
1231 
1232 void
1233 gimple_seq_free (gimple_seq seq)
1234 {
1235   if (seq == NULL)
1236     return;
1237 
1238   gcc_assert (gimple_seq_first (seq) == NULL);
1239   gcc_assert (gimple_seq_last (seq) == NULL);
1240 
1241   /* If this triggers, it's a sign that the same list is being freed
1242      twice.  */
1243   gcc_assert (seq != gimple_seq_cache || gimple_seq_cache == NULL);
1244 
1245   /* Add SEQ to the pool of free sequences.  */
1246   seq->next_free = gimple_seq_cache;
1247   gimple_seq_cache = seq;
1248 }
1249 
1250 
1251 /* Link gimple statement GS to the end of the sequence *SEQ_P.  If
1252    *SEQ_P is NULL, a new sequence is allocated.  */
1253 
1254 void
1255 gimple_seq_add_stmt (gimple_seq *seq_p, gimple gs)
1256 {
1257   gimple_stmt_iterator si;
1258 
1259   if (gs == NULL)
1260     return;
1261 
1262   if (*seq_p == NULL)
1263     *seq_p = gimple_seq_alloc ();
1264 
1265   si = gsi_last (*seq_p);
1266   gsi_insert_after (&si, gs, GSI_NEW_STMT);
1267 }
1268 
1269 
1270 /* Append sequence SRC to the end of sequence *DST_P.  If *DST_P is
1271    NULL, a new sequence is allocated.  */
1272 
1273 void
1274 gimple_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
1275 {
1276   gimple_stmt_iterator si;
1277 
1278   if (src == NULL)
1279     return;
1280 
1281   if (*dst_p == NULL)
1282     *dst_p = gimple_seq_alloc ();
1283 
1284   si = gsi_last (*dst_p);
1285   gsi_insert_seq_after (&si, src, GSI_NEW_STMT);
1286 }
1287 
1288 
1289 /* Helper function of empty_body_p.  Return true if STMT is an empty
1290    statement.  */
1291 
1292 static bool
1293 empty_stmt_p (gimple stmt)
1294 {
1295   if (gimple_code (stmt) == GIMPLE_NOP)
1296     return true;
1297   if (gimple_code (stmt) == GIMPLE_BIND)
1298     return empty_body_p (gimple_bind_body (stmt));
1299   return false;
1300 }
1301 
1302 
1303 /* Return true if BODY contains nothing but empty statements.  */
1304 
1305 bool
1306 empty_body_p (gimple_seq body)
1307 {
1308   gimple_stmt_iterator i;
1309 
1310   if (gimple_seq_empty_p (body))
1311     return true;
1312   for (i = gsi_start (body); !gsi_end_p (i); gsi_next (&i))
1313     if (!empty_stmt_p (gsi_stmt (i))
1314 	&& !is_gimple_debug (gsi_stmt (i)))
1315       return false;
1316 
1317   return true;
1318 }
1319 
1320 
1321 /* Perform a deep copy of sequence SRC and return the result.  */
1322 
1323 gimple_seq
1324 gimple_seq_copy (gimple_seq src)
1325 {
1326   gimple_stmt_iterator gsi;
1327   gimple_seq new_seq = gimple_seq_alloc ();
1328   gimple stmt;
1329 
1330   for (gsi = gsi_start (src); !gsi_end_p (gsi); gsi_next (&gsi))
1331     {
1332       stmt = gimple_copy (gsi_stmt (gsi));
1333       gimple_seq_add_stmt (&new_seq, stmt);
1334     }
1335 
1336   return new_seq;
1337 }
1338 
1339 
1340 /* Walk all the statements in the sequence SEQ calling walk_gimple_stmt
1341    on each one.  WI is as in walk_gimple_stmt.
1342 
1343    If walk_gimple_stmt returns non-NULL, the walk is stopped, and the
1344    value is stored in WI->CALLBACK_RESULT.  Also, the statement that
1345    produced the value is returned if this statement has not been
1346    removed by a callback (wi->removed_stmt).  If the statement has
1347    been removed, NULL is returned.
1348 
1349    Otherwise, all the statements are walked and NULL returned.  */
1350 
1351 gimple
1352 walk_gimple_seq (gimple_seq seq, walk_stmt_fn callback_stmt,
1353 		 walk_tree_fn callback_op, struct walk_stmt_info *wi)
1354 {
1355   gimple_stmt_iterator gsi;
1356 
1357   for (gsi = gsi_start (seq); !gsi_end_p (gsi); )
1358     {
1359       tree ret = walk_gimple_stmt (&gsi, callback_stmt, callback_op, wi);
1360       if (ret)
1361 	{
1362 	  /* If CALLBACK_STMT or CALLBACK_OP return a value, WI must exist
1363 	     to hold it.  */
1364 	  gcc_assert (wi);
1365 	  wi->callback_result = ret;
1366 
1367 	  return wi->removed_stmt ? NULL : gsi_stmt (gsi);
1368 	}
1369 
1370       if (!wi->removed_stmt)
1371 	gsi_next (&gsi);
1372     }
1373 
1374   if (wi)
1375     wi->callback_result = NULL_TREE;
1376 
1377   return NULL;
1378 }
1379 
1380 
1381 /* Helper function for walk_gimple_stmt.  Walk operands of a GIMPLE_ASM.  */
1382 
1383 static tree
1384 walk_gimple_asm (gimple stmt, walk_tree_fn callback_op,
1385 		 struct walk_stmt_info *wi)
1386 {
1387   tree ret, op;
1388   unsigned noutputs;
1389   const char **oconstraints;
1390   unsigned i, n;
1391   const char *constraint;
1392   bool allows_mem, allows_reg, is_inout;
1393 
1394   noutputs = gimple_asm_noutputs (stmt);
1395   oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
1396 
1397   if (wi)
1398     wi->is_lhs = true;
1399 
1400   for (i = 0; i < noutputs; i++)
1401     {
1402       op = gimple_asm_output_op (stmt, i);
1403       constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
1404       oconstraints[i] = constraint;
1405       parse_output_constraint (&constraint, i, 0, 0, &allows_mem, &allows_reg,
1406 	                       &is_inout);
1407       if (wi)
1408 	wi->val_only = (allows_reg || !allows_mem);
1409       ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1410       if (ret)
1411 	return ret;
1412     }
1413 
1414   n = gimple_asm_ninputs (stmt);
1415   for (i = 0; i < n; i++)
1416     {
1417       op = gimple_asm_input_op (stmt, i);
1418       constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
1419       parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1420 			      oconstraints, &allows_mem, &allows_reg);
1421       if (wi)
1422 	{
1423 	  wi->val_only = (allows_reg || !allows_mem);
1424           /* Although input "m" is not really a LHS, we need a lvalue.  */
1425 	  wi->is_lhs = !wi->val_only;
1426 	}
1427       ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1428       if (ret)
1429 	return ret;
1430     }
1431 
1432   if (wi)
1433     {
1434       wi->is_lhs = false;
1435       wi->val_only = true;
1436     }
1437 
1438   n = gimple_asm_nlabels (stmt);
1439   for (i = 0; i < n; i++)
1440     {
1441       op = gimple_asm_label_op (stmt, i);
1442       ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1443       if (ret)
1444 	return ret;
1445     }
1446 
1447   return NULL_TREE;
1448 }
1449 
1450 
1451 /* Helper function of WALK_GIMPLE_STMT.  Walk every tree operand in
1452    STMT.  CALLBACK_OP and WI are as in WALK_GIMPLE_STMT.
1453 
1454    CALLBACK_OP is called on each operand of STMT via walk_tree.
1455    Additional parameters to walk_tree must be stored in WI.  For each operand
1456    OP, walk_tree is called as:
1457 
1458 	walk_tree (&OP, CALLBACK_OP, WI, WI->PSET)
1459 
1460    If CALLBACK_OP returns non-NULL for an operand, the remaining
1461    operands are not scanned.
1462 
1463    The return value is that returned by the last call to walk_tree, or
1464    NULL_TREE if no CALLBACK_OP is specified.  */
1465 
1466 tree
1467 walk_gimple_op (gimple stmt, walk_tree_fn callback_op,
1468 		struct walk_stmt_info *wi)
1469 {
1470   struct pointer_set_t *pset = (wi) ? wi->pset : NULL;
1471   unsigned i;
1472   tree ret = NULL_TREE;
1473 
1474   switch (gimple_code (stmt))
1475     {
1476     case GIMPLE_ASSIGN:
1477       /* Walk the RHS operands.  If the LHS is of a non-renamable type or
1478          is a register variable, we may use a COMPONENT_REF on the RHS.  */
1479       if (wi)
1480 	{
1481 	  tree lhs = gimple_assign_lhs (stmt);
1482 	  wi->val_only
1483 	    = (is_gimple_reg_type (TREE_TYPE (lhs)) && !is_gimple_reg (lhs))
1484 	      || gimple_assign_rhs_class (stmt) != GIMPLE_SINGLE_RHS;
1485 	}
1486 
1487       for (i = 1; i < gimple_num_ops (stmt); i++)
1488 	{
1489 	  ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi,
1490 			   pset);
1491 	  if (ret)
1492 	    return ret;
1493 	}
1494 
1495       /* Walk the LHS.  If the RHS is appropriate for a memory, we
1496 	 may use a COMPONENT_REF on the LHS.  */
1497       if (wi)
1498 	{
1499           /* If the RHS has more than 1 operand, it is not appropriate
1500              for the memory.
1501 	     ???  A lhs always requires an lvalue, checking the val_only flag
1502 	     does not make any sense, so we should be able to avoid computing
1503 	     it here.  */
1504 	  tree rhs1 = gimple_assign_rhs1 (stmt);
1505 	  wi->val_only = !(is_gimple_mem_rhs (rhs1)
1506 			   || TREE_CODE (rhs1) == CONSTRUCTOR)
1507                          || gimple_assign_rhs_class (stmt) != GIMPLE_SINGLE_RHS;
1508 	  wi->is_lhs = true;
1509 	}
1510 
1511       ret = walk_tree (gimple_op_ptr (stmt, 0), callback_op, wi, pset);
1512       if (ret)
1513 	return ret;
1514 
1515       if (wi)
1516 	{
1517 	  wi->val_only = true;
1518 	  wi->is_lhs = false;
1519 	}
1520       break;
1521 
1522     case GIMPLE_CALL:
1523       if (wi)
1524 	{
1525 	  wi->is_lhs = false;
1526 	  wi->val_only = true;
1527 	}
1528 
1529       ret = walk_tree (gimple_call_chain_ptr (stmt), callback_op, wi, pset);
1530       if (ret)
1531         return ret;
1532 
1533       ret = walk_tree (gimple_call_fn_ptr (stmt), callback_op, wi, pset);
1534       if (ret)
1535         return ret;
1536 
1537       for (i = 0; i < gimple_call_num_args (stmt); i++)
1538 	{
1539 	  if (wi)
1540 	    wi->val_only
1541 	      = is_gimple_reg_type (TREE_TYPE (gimple_call_arg (stmt, i)));
1542 	  ret = walk_tree (gimple_call_arg_ptr (stmt, i), callback_op, wi,
1543 			   pset);
1544 	  if (ret)
1545 	    return ret;
1546 	}
1547 
1548       if (gimple_call_lhs (stmt))
1549 	{
1550 	  if (wi)
1551 	    {
1552 	      wi->is_lhs = true;
1553 	      wi->val_only
1554 		= is_gimple_reg_type (TREE_TYPE (gimple_call_lhs (stmt)));
1555 	    }
1556 
1557 	  ret = walk_tree (gimple_call_lhs_ptr (stmt), callback_op, wi, pset);
1558 	  if (ret)
1559 	    return ret;
1560 	}
1561 
1562       if (wi)
1563 	{
1564 	  wi->is_lhs = false;
1565 	  wi->val_only = true;
1566 	}
1567       break;
1568 
1569     case GIMPLE_CATCH:
1570       ret = walk_tree (gimple_catch_types_ptr (stmt), callback_op, wi,
1571 		       pset);
1572       if (ret)
1573 	return ret;
1574       break;
1575 
1576     case GIMPLE_EH_FILTER:
1577       ret = walk_tree (gimple_eh_filter_types_ptr (stmt), callback_op, wi,
1578 		       pset);
1579       if (ret)
1580 	return ret;
1581       break;
1582 
1583     case GIMPLE_ASM:
1584       ret = walk_gimple_asm (stmt, callback_op, wi);
1585       if (ret)
1586 	return ret;
1587       break;
1588 
1589     case GIMPLE_OMP_CONTINUE:
1590       ret = walk_tree (gimple_omp_continue_control_def_ptr (stmt),
1591 	  	       callback_op, wi, pset);
1592       if (ret)
1593 	return ret;
1594 
1595       ret = walk_tree (gimple_omp_continue_control_use_ptr (stmt),
1596 	  	       callback_op, wi, pset);
1597       if (ret)
1598 	return ret;
1599       break;
1600 
1601     case GIMPLE_OMP_CRITICAL:
1602       ret = walk_tree (gimple_omp_critical_name_ptr (stmt), callback_op, wi,
1603 		       pset);
1604       if (ret)
1605 	return ret;
1606       break;
1607 
1608     case GIMPLE_OMP_FOR:
1609       ret = walk_tree (gimple_omp_for_clauses_ptr (stmt), callback_op, wi,
1610 		       pset);
1611       if (ret)
1612 	return ret;
1613       for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1614 	{
1615 	  ret = walk_tree (gimple_omp_for_index_ptr (stmt, i), callback_op,
1616 			   wi, pset);
1617 	  if (ret)
1618 	    return ret;
1619 	  ret = walk_tree (gimple_omp_for_initial_ptr (stmt, i), callback_op,
1620 			   wi, pset);
1621 	  if (ret)
1622 	    return ret;
1623 	  ret = walk_tree (gimple_omp_for_final_ptr (stmt, i), callback_op,
1624 			   wi, pset);
1625 	  if (ret)
1626 	    return ret;
1627 	  ret = walk_tree (gimple_omp_for_incr_ptr (stmt, i), callback_op,
1628 			   wi, pset);
1629 	}
1630       if (ret)
1631 	return ret;
1632       break;
1633 
1634     case GIMPLE_OMP_PARALLEL:
1635       ret = walk_tree (gimple_omp_parallel_clauses_ptr (stmt), callback_op,
1636 		       wi, pset);
1637       if (ret)
1638 	return ret;
1639       ret = walk_tree (gimple_omp_parallel_child_fn_ptr (stmt), callback_op,
1640 		       wi, pset);
1641       if (ret)
1642 	return ret;
1643       ret = walk_tree (gimple_omp_parallel_data_arg_ptr (stmt), callback_op,
1644 		       wi, pset);
1645       if (ret)
1646 	return ret;
1647       break;
1648 
1649     case GIMPLE_OMP_TASK:
1650       ret = walk_tree (gimple_omp_task_clauses_ptr (stmt), callback_op,
1651 		       wi, pset);
1652       if (ret)
1653 	return ret;
1654       ret = walk_tree (gimple_omp_task_child_fn_ptr (stmt), callback_op,
1655 		       wi, pset);
1656       if (ret)
1657 	return ret;
1658       ret = walk_tree (gimple_omp_task_data_arg_ptr (stmt), callback_op,
1659 		       wi, pset);
1660       if (ret)
1661 	return ret;
1662       ret = walk_tree (gimple_omp_task_copy_fn_ptr (stmt), callback_op,
1663 		       wi, pset);
1664       if (ret)
1665 	return ret;
1666       ret = walk_tree (gimple_omp_task_arg_size_ptr (stmt), callback_op,
1667 		       wi, pset);
1668       if (ret)
1669 	return ret;
1670       ret = walk_tree (gimple_omp_task_arg_align_ptr (stmt), callback_op,
1671 		       wi, pset);
1672       if (ret)
1673 	return ret;
1674       break;
1675 
1676     case GIMPLE_OMP_SECTIONS:
1677       ret = walk_tree (gimple_omp_sections_clauses_ptr (stmt), callback_op,
1678 		       wi, pset);
1679       if (ret)
1680 	return ret;
1681 
1682       ret = walk_tree (gimple_omp_sections_control_ptr (stmt), callback_op,
1683 		       wi, pset);
1684       if (ret)
1685 	return ret;
1686 
1687       break;
1688 
1689     case GIMPLE_OMP_SINGLE:
1690       ret = walk_tree (gimple_omp_single_clauses_ptr (stmt), callback_op, wi,
1691 		       pset);
1692       if (ret)
1693 	return ret;
1694       break;
1695 
1696     case GIMPLE_OMP_ATOMIC_LOAD:
1697       ret = walk_tree (gimple_omp_atomic_load_lhs_ptr (stmt), callback_op, wi,
1698 		       pset);
1699       if (ret)
1700 	return ret;
1701 
1702       ret = walk_tree (gimple_omp_atomic_load_rhs_ptr (stmt), callback_op, wi,
1703 		       pset);
1704       if (ret)
1705 	return ret;
1706       break;
1707 
1708     case GIMPLE_OMP_ATOMIC_STORE:
1709       ret = walk_tree (gimple_omp_atomic_store_val_ptr (stmt), callback_op,
1710 		       wi, pset);
1711       if (ret)
1712 	return ret;
1713       break;
1714 
1715     case GIMPLE_TRANSACTION:
1716       ret = walk_tree (gimple_transaction_label_ptr (stmt), callback_op,
1717 		       wi, pset);
1718       if (ret)
1719 	return ret;
1720       break;
1721 
1722       /* Tuples that do not have operands.  */
1723     case GIMPLE_NOP:
1724     case GIMPLE_RESX:
1725     case GIMPLE_OMP_RETURN:
1726     case GIMPLE_PREDICT:
1727       break;
1728 
1729     default:
1730       {
1731 	enum gimple_statement_structure_enum gss;
1732 	gss = gimple_statement_structure (stmt);
1733 	if (gss == GSS_WITH_OPS || gss == GSS_WITH_MEM_OPS)
1734 	  for (i = 0; i < gimple_num_ops (stmt); i++)
1735 	    {
1736 	      ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi, pset);
1737 	      if (ret)
1738 		return ret;
1739 	    }
1740       }
1741       break;
1742     }
1743 
1744   return NULL_TREE;
1745 }
1746 
1747 
1748 /* Walk the current statement in GSI (optionally using traversal state
1749    stored in WI).  If WI is NULL, no state is kept during traversal.
1750    The callback CALLBACK_STMT is called.  If CALLBACK_STMT indicates
1751    that it has handled all the operands of the statement, its return
1752    value is returned.  Otherwise, the return value from CALLBACK_STMT
1753    is discarded and its operands are scanned.
1754 
1755    If CALLBACK_STMT is NULL or it didn't handle the operands,
1756    CALLBACK_OP is called on each operand of the statement via
1757    walk_gimple_op.  If walk_gimple_op returns non-NULL for any
1758    operand, the remaining operands are not scanned.  In this case, the
1759    return value from CALLBACK_OP is returned.
1760 
1761    In any other case, NULL_TREE is returned.  */
1762 
1763 tree
1764 walk_gimple_stmt (gimple_stmt_iterator *gsi, walk_stmt_fn callback_stmt,
1765 		  walk_tree_fn callback_op, struct walk_stmt_info *wi)
1766 {
1767   gimple ret;
1768   tree tree_ret;
1769   gimple stmt = gsi_stmt (*gsi);
1770 
1771   if (wi)
1772     {
1773       wi->gsi = *gsi;
1774       wi->removed_stmt = false;
1775 
1776       if (wi->want_locations && gimple_has_location (stmt))
1777 	input_location = gimple_location (stmt);
1778     }
1779 
1780   ret = NULL;
1781 
1782   /* Invoke the statement callback.  Return if the callback handled
1783      all of STMT operands by itself.  */
1784   if (callback_stmt)
1785     {
1786       bool handled_ops = false;
1787       tree_ret = callback_stmt (gsi, &handled_ops, wi);
1788       if (handled_ops)
1789 	return tree_ret;
1790 
1791       /* If CALLBACK_STMT did not handle operands, it should not have
1792 	 a value to return.  */
1793       gcc_assert (tree_ret == NULL);
1794 
1795       if (wi && wi->removed_stmt)
1796 	return NULL;
1797 
1798       /* Re-read stmt in case the callback changed it.  */
1799       stmt = gsi_stmt (*gsi);
1800     }
1801 
1802   /* If CALLBACK_OP is defined, invoke it on every operand of STMT.  */
1803   if (callback_op)
1804     {
1805       tree_ret = walk_gimple_op (stmt, callback_op, wi);
1806       if (tree_ret)
1807 	return tree_ret;
1808     }
1809 
1810   /* If STMT can have statements inside (e.g. GIMPLE_BIND), walk them.  */
1811   switch (gimple_code (stmt))
1812     {
1813     case GIMPLE_BIND:
1814       ret = walk_gimple_seq (gimple_bind_body (stmt), callback_stmt,
1815 	                     callback_op, wi);
1816       if (ret)
1817 	return wi->callback_result;
1818       break;
1819 
1820     case GIMPLE_CATCH:
1821       ret = walk_gimple_seq (gimple_catch_handler (stmt), callback_stmt,
1822 	                     callback_op, wi);
1823       if (ret)
1824 	return wi->callback_result;
1825       break;
1826 
1827     case GIMPLE_EH_FILTER:
1828       ret = walk_gimple_seq (gimple_eh_filter_failure (stmt), callback_stmt,
1829 		             callback_op, wi);
1830       if (ret)
1831 	return wi->callback_result;
1832       break;
1833 
1834     case GIMPLE_EH_ELSE:
1835       ret = walk_gimple_seq (gimple_eh_else_n_body (stmt),
1836 			     callback_stmt, callback_op, wi);
1837       if (ret)
1838 	return wi->callback_result;
1839       ret = walk_gimple_seq (gimple_eh_else_e_body (stmt),
1840 			     callback_stmt, callback_op, wi);
1841       if (ret)
1842 	return wi->callback_result;
1843       break;
1844 
1845     case GIMPLE_TRY:
1846       ret = walk_gimple_seq (gimple_try_eval (stmt), callback_stmt, callback_op,
1847 	                     wi);
1848       if (ret)
1849 	return wi->callback_result;
1850 
1851       ret = walk_gimple_seq (gimple_try_cleanup (stmt), callback_stmt,
1852 	                     callback_op, wi);
1853       if (ret)
1854 	return wi->callback_result;
1855       break;
1856 
1857     case GIMPLE_OMP_FOR:
1858       ret = walk_gimple_seq (gimple_omp_for_pre_body (stmt), callback_stmt,
1859 		             callback_op, wi);
1860       if (ret)
1861 	return wi->callback_result;
1862 
1863       /* FALL THROUGH.  */
1864     case GIMPLE_OMP_CRITICAL:
1865     case GIMPLE_OMP_MASTER:
1866     case GIMPLE_OMP_ORDERED:
1867     case GIMPLE_OMP_SECTION:
1868     case GIMPLE_OMP_PARALLEL:
1869     case GIMPLE_OMP_TASK:
1870     case GIMPLE_OMP_SECTIONS:
1871     case GIMPLE_OMP_SINGLE:
1872       ret = walk_gimple_seq (gimple_omp_body (stmt), callback_stmt,
1873 			     callback_op, wi);
1874       if (ret)
1875 	return wi->callback_result;
1876       break;
1877 
1878     case GIMPLE_WITH_CLEANUP_EXPR:
1879       ret = walk_gimple_seq (gimple_wce_cleanup (stmt), callback_stmt,
1880 			     callback_op, wi);
1881       if (ret)
1882 	return wi->callback_result;
1883       break;
1884 
1885     case GIMPLE_TRANSACTION:
1886       ret = walk_gimple_seq (gimple_transaction_body (stmt),
1887 			     callback_stmt, callback_op, wi);
1888       if (ret)
1889 	return wi->callback_result;
1890       break;
1891 
1892     default:
1893       gcc_assert (!gimple_has_substatements (stmt));
1894       break;
1895     }
1896 
1897   return NULL;
1898 }
1899 
1900 
1901 /* Set sequence SEQ to be the GIMPLE body for function FN.  */
1902 
1903 void
1904 gimple_set_body (tree fndecl, gimple_seq seq)
1905 {
1906   struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1907   if (fn == NULL)
1908     {
1909       /* If FNDECL still does not have a function structure associated
1910 	 with it, then it does not make sense for it to receive a
1911 	 GIMPLE body.  */
1912       gcc_assert (seq == NULL);
1913     }
1914   else
1915     fn->gimple_body = seq;
1916 }
1917 
1918 
1919 /* Return the body of GIMPLE statements for function FN.  After the
1920    CFG pass, the function body doesn't exist anymore because it has
1921    been split up into basic blocks.  In this case, it returns
1922    NULL.  */
1923 
1924 gimple_seq
1925 gimple_body (tree fndecl)
1926 {
1927   struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1928   return fn ? fn->gimple_body : NULL;
1929 }
1930 
1931 /* Return true when FNDECL has Gimple body either in unlowered
1932    or CFG form.  */
1933 bool
1934 gimple_has_body_p (tree fndecl)
1935 {
1936   struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1937   return (gimple_body (fndecl) || (fn && fn->cfg));
1938 }
1939 
1940 /* Return true if calls C1 and C2 are known to go to the same function.  */
1941 
1942 bool
1943 gimple_call_same_target_p (const_gimple c1, const_gimple c2)
1944 {
1945   if (gimple_call_internal_p (c1))
1946     return (gimple_call_internal_p (c2)
1947 	    && gimple_call_internal_fn (c1) == gimple_call_internal_fn (c2));
1948   else
1949     return (gimple_call_fn (c1) == gimple_call_fn (c2)
1950 	    || (gimple_call_fndecl (c1)
1951 		&& gimple_call_fndecl (c1) == gimple_call_fndecl (c2)));
1952 }
1953 
1954 /* Detect flags from a GIMPLE_CALL.  This is just like
1955    call_expr_flags, but for gimple tuples.  */
1956 
1957 int
1958 gimple_call_flags (const_gimple stmt)
1959 {
1960   int flags;
1961   tree decl = gimple_call_fndecl (stmt);
1962 
1963   if (decl)
1964     flags = flags_from_decl_or_type (decl);
1965   else if (gimple_call_internal_p (stmt))
1966     flags = internal_fn_flags (gimple_call_internal_fn (stmt));
1967   else
1968     flags = flags_from_decl_or_type (gimple_call_fntype (stmt));
1969 
1970   if (stmt->gsbase.subcode & GF_CALL_NOTHROW)
1971     flags |= ECF_NOTHROW;
1972 
1973   return flags;
1974 }
1975 
1976 /* Return the "fn spec" string for call STMT.  */
1977 
1978 static tree
1979 gimple_call_fnspec (const_gimple stmt)
1980 {
1981   tree type, attr;
1982 
1983   type = gimple_call_fntype (stmt);
1984   if (!type)
1985     return NULL_TREE;
1986 
1987   attr = lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type));
1988   if (!attr)
1989     return NULL_TREE;
1990 
1991   return TREE_VALUE (TREE_VALUE (attr));
1992 }
1993 
1994 /* Detects argument flags for argument number ARG on call STMT.  */
1995 
1996 int
1997 gimple_call_arg_flags (const_gimple stmt, unsigned arg)
1998 {
1999   tree attr = gimple_call_fnspec (stmt);
2000 
2001   if (!attr || 1 + arg >= (unsigned) TREE_STRING_LENGTH (attr))
2002     return 0;
2003 
2004   switch (TREE_STRING_POINTER (attr)[1 + arg])
2005     {
2006     case 'x':
2007     case 'X':
2008       return EAF_UNUSED;
2009 
2010     case 'R':
2011       return EAF_DIRECT | EAF_NOCLOBBER | EAF_NOESCAPE;
2012 
2013     case 'r':
2014       return EAF_NOCLOBBER | EAF_NOESCAPE;
2015 
2016     case 'W':
2017       return EAF_DIRECT | EAF_NOESCAPE;
2018 
2019     case 'w':
2020       return EAF_NOESCAPE;
2021 
2022     case '.':
2023     default:
2024       return 0;
2025     }
2026 }
2027 
2028 /* Detects return flags for the call STMT.  */
2029 
2030 int
2031 gimple_call_return_flags (const_gimple stmt)
2032 {
2033   tree attr;
2034 
2035   if (gimple_call_flags (stmt) & ECF_MALLOC)
2036     return ERF_NOALIAS;
2037 
2038   attr = gimple_call_fnspec (stmt);
2039   if (!attr || TREE_STRING_LENGTH (attr) < 1)
2040     return 0;
2041 
2042   switch (TREE_STRING_POINTER (attr)[0])
2043     {
2044     case '1':
2045     case '2':
2046     case '3':
2047     case '4':
2048       return ERF_RETURNS_ARG | (TREE_STRING_POINTER (attr)[0] - '1');
2049 
2050     case 'm':
2051       return ERF_NOALIAS;
2052 
2053     case '.':
2054     default:
2055       return 0;
2056     }
2057 }
2058 
2059 
2060 /* Return true if GS is a copy assignment.  */
2061 
2062 bool
2063 gimple_assign_copy_p (gimple gs)
2064 {
2065   return (gimple_assign_single_p (gs)
2066 	  && is_gimple_val (gimple_op (gs, 1)));
2067 }
2068 
2069 
2070 /* Return true if GS is a SSA_NAME copy assignment.  */
2071 
2072 bool
2073 gimple_assign_ssa_name_copy_p (gimple gs)
2074 {
2075   return (gimple_assign_single_p (gs)
2076 	  && TREE_CODE (gimple_assign_lhs (gs)) == SSA_NAME
2077 	  && TREE_CODE (gimple_assign_rhs1 (gs)) == SSA_NAME);
2078 }
2079 
2080 
2081 /* Return true if GS is an assignment with a unary RHS, but the
2082    operator has no effect on the assigned value.  The logic is adapted
2083    from STRIP_NOPS.  This predicate is intended to be used in tuplifying
2084    instances in which STRIP_NOPS was previously applied to the RHS of
2085    an assignment.
2086 
2087    NOTE: In the use cases that led to the creation of this function
2088    and of gimple_assign_single_p, it is typical to test for either
2089    condition and to proceed in the same manner.  In each case, the
2090    assigned value is represented by the single RHS operand of the
2091    assignment.  I suspect there may be cases where gimple_assign_copy_p,
2092    gimple_assign_single_p, or equivalent logic is used where a similar
2093    treatment of unary NOPs is appropriate.  */
2094 
2095 bool
2096 gimple_assign_unary_nop_p (gimple gs)
2097 {
2098   return (is_gimple_assign (gs)
2099           && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs))
2100               || gimple_assign_rhs_code (gs) == NON_LVALUE_EXPR)
2101           && gimple_assign_rhs1 (gs) != error_mark_node
2102           && (TYPE_MODE (TREE_TYPE (gimple_assign_lhs (gs)))
2103               == TYPE_MODE (TREE_TYPE (gimple_assign_rhs1 (gs)))));
2104 }
2105 
2106 /* Set BB to be the basic block holding G.  */
2107 
2108 void
2109 gimple_set_bb (gimple stmt, basic_block bb)
2110 {
2111   stmt->gsbase.bb = bb;
2112 
2113   /* If the statement is a label, add the label to block-to-labels map
2114      so that we can speed up edge creation for GIMPLE_GOTOs.  */
2115   if (cfun->cfg && gimple_code (stmt) == GIMPLE_LABEL)
2116     {
2117       tree t;
2118       int uid;
2119 
2120       t = gimple_label_label (stmt);
2121       uid = LABEL_DECL_UID (t);
2122       if (uid == -1)
2123 	{
2124 	  unsigned old_len = VEC_length (basic_block, label_to_block_map);
2125 	  LABEL_DECL_UID (t) = uid = cfun->cfg->last_label_uid++;
2126 	  if (old_len <= (unsigned) uid)
2127 	    {
2128 	      unsigned new_len = 3 * uid / 2 + 1;
2129 
2130 	      VEC_safe_grow_cleared (basic_block, gc, label_to_block_map,
2131 				     new_len);
2132 	    }
2133 	}
2134 
2135       VEC_replace (basic_block, label_to_block_map, uid, bb);
2136     }
2137 }
2138 
2139 
2140 /* Modify the RHS of the assignment pointed-to by GSI using the
2141    operands in the expression tree EXPR.
2142 
2143    NOTE: The statement pointed-to by GSI may be reallocated if it
2144    did not have enough operand slots.
2145 
2146    This function is useful to convert an existing tree expression into
2147    the flat representation used for the RHS of a GIMPLE assignment.
2148    It will reallocate memory as needed to expand or shrink the number
2149    of operand slots needed to represent EXPR.
2150 
2151    NOTE: If you find yourself building a tree and then calling this
2152    function, you are most certainly doing it the slow way.  It is much
2153    better to build a new assignment or to use the function
2154    gimple_assign_set_rhs_with_ops, which does not require an
2155    expression tree to be built.  */
2156 
2157 void
2158 gimple_assign_set_rhs_from_tree (gimple_stmt_iterator *gsi, tree expr)
2159 {
2160   enum tree_code subcode;
2161   tree op1, op2, op3;
2162 
2163   extract_ops_from_tree_1 (expr, &subcode, &op1, &op2, &op3);
2164   gimple_assign_set_rhs_with_ops_1 (gsi, subcode, op1, op2, op3);
2165 }
2166 
2167 
2168 /* Set the RHS of assignment statement pointed-to by GSI to CODE with
2169    operands OP1, OP2 and OP3.
2170 
2171    NOTE: The statement pointed-to by GSI may be reallocated if it
2172    did not have enough operand slots.  */
2173 
2174 void
2175 gimple_assign_set_rhs_with_ops_1 (gimple_stmt_iterator *gsi, enum tree_code code,
2176 				  tree op1, tree op2, tree op3)
2177 {
2178   unsigned new_rhs_ops = get_gimple_rhs_num_ops (code);
2179   gimple stmt = gsi_stmt (*gsi);
2180 
2181   /* If the new CODE needs more operands, allocate a new statement.  */
2182   if (gimple_num_ops (stmt) < new_rhs_ops + 1)
2183     {
2184       tree lhs = gimple_assign_lhs (stmt);
2185       gimple new_stmt = gimple_alloc (gimple_code (stmt), new_rhs_ops + 1);
2186       memcpy (new_stmt, stmt, gimple_size (gimple_code (stmt)));
2187       gsi_replace (gsi, new_stmt, true);
2188       stmt = new_stmt;
2189 
2190       /* The LHS needs to be reset as this also changes the SSA name
2191 	 on the LHS.  */
2192       gimple_assign_set_lhs (stmt, lhs);
2193     }
2194 
2195   gimple_set_num_ops (stmt, new_rhs_ops + 1);
2196   gimple_set_subcode (stmt, code);
2197   gimple_assign_set_rhs1 (stmt, op1);
2198   if (new_rhs_ops > 1)
2199     gimple_assign_set_rhs2 (stmt, op2);
2200   if (new_rhs_ops > 2)
2201     gimple_assign_set_rhs3 (stmt, op3);
2202 }
2203 
2204 
2205 /* Return the LHS of a statement that performs an assignment,
2206    either a GIMPLE_ASSIGN or a GIMPLE_CALL.  Returns NULL_TREE
2207    for a call to a function that returns no value, or for a
2208    statement other than an assignment or a call.  */
2209 
2210 tree
2211 gimple_get_lhs (const_gimple stmt)
2212 {
2213   enum gimple_code code = gimple_code (stmt);
2214 
2215   if (code == GIMPLE_ASSIGN)
2216     return gimple_assign_lhs (stmt);
2217   else if (code == GIMPLE_CALL)
2218     return gimple_call_lhs (stmt);
2219   else
2220     return NULL_TREE;
2221 }
2222 
2223 
2224 /* Set the LHS of a statement that performs an assignment,
2225    either a GIMPLE_ASSIGN or a GIMPLE_CALL.  */
2226 
2227 void
2228 gimple_set_lhs (gimple stmt, tree lhs)
2229 {
2230   enum gimple_code code = gimple_code (stmt);
2231 
2232   if (code == GIMPLE_ASSIGN)
2233     gimple_assign_set_lhs (stmt, lhs);
2234   else if (code == GIMPLE_CALL)
2235     gimple_call_set_lhs (stmt, lhs);
2236   else
2237     gcc_unreachable();
2238 }
2239 
2240 /* Replace the LHS of STMT, an assignment, either a GIMPLE_ASSIGN or a
2241    GIMPLE_CALL, with NLHS, in preparation for modifying the RHS to an
2242    expression with a different value.
2243 
2244    This will update any annotations (say debug bind stmts) referring
2245    to the original LHS, so that they use the RHS instead.  This is
2246    done even if NLHS and LHS are the same, for it is understood that
2247    the RHS will be modified afterwards, and NLHS will not be assigned
2248    an equivalent value.
2249 
2250    Adjusting any non-annotation uses of the LHS, if needed, is a
2251    responsibility of the caller.
2252 
2253    The effect of this call should be pretty much the same as that of
2254    inserting a copy of STMT before STMT, and then removing the
2255    original stmt, at which time gsi_remove() would have update
2256    annotations, but using this function saves all the inserting,
2257    copying and removing.  */
2258 
2259 void
2260 gimple_replace_lhs (gimple stmt, tree nlhs)
2261 {
2262   if (MAY_HAVE_DEBUG_STMTS)
2263     {
2264       tree lhs = gimple_get_lhs (stmt);
2265 
2266       gcc_assert (SSA_NAME_DEF_STMT (lhs) == stmt);
2267 
2268       insert_debug_temp_for_var_def (NULL, lhs);
2269     }
2270 
2271   gimple_set_lhs (stmt, nlhs);
2272 }
2273 
2274 /* Return a deep copy of statement STMT.  All the operands from STMT
2275    are reallocated and copied using unshare_expr.  The DEF, USE, VDEF
2276    and VUSE operand arrays are set to empty in the new copy.  */
2277 
2278 gimple
2279 gimple_copy (gimple stmt)
2280 {
2281   enum gimple_code code = gimple_code (stmt);
2282   unsigned num_ops = gimple_num_ops (stmt);
2283   gimple copy = gimple_alloc (code, num_ops);
2284   unsigned i;
2285 
2286   /* Shallow copy all the fields from STMT.  */
2287   memcpy (copy, stmt, gimple_size (code));
2288 
2289   /* If STMT has sub-statements, deep-copy them as well.  */
2290   if (gimple_has_substatements (stmt))
2291     {
2292       gimple_seq new_seq;
2293       tree t;
2294 
2295       switch (gimple_code (stmt))
2296 	{
2297 	case GIMPLE_BIND:
2298 	  new_seq = gimple_seq_copy (gimple_bind_body (stmt));
2299 	  gimple_bind_set_body (copy, new_seq);
2300 	  gimple_bind_set_vars (copy, unshare_expr (gimple_bind_vars (stmt)));
2301 	  gimple_bind_set_block (copy, gimple_bind_block (stmt));
2302 	  break;
2303 
2304 	case GIMPLE_CATCH:
2305 	  new_seq = gimple_seq_copy (gimple_catch_handler (stmt));
2306 	  gimple_catch_set_handler (copy, new_seq);
2307 	  t = unshare_expr (gimple_catch_types (stmt));
2308 	  gimple_catch_set_types (copy, t);
2309 	  break;
2310 
2311 	case GIMPLE_EH_FILTER:
2312 	  new_seq = gimple_seq_copy (gimple_eh_filter_failure (stmt));
2313 	  gimple_eh_filter_set_failure (copy, new_seq);
2314 	  t = unshare_expr (gimple_eh_filter_types (stmt));
2315 	  gimple_eh_filter_set_types (copy, t);
2316 	  break;
2317 
2318 	case GIMPLE_EH_ELSE:
2319 	  new_seq = gimple_seq_copy (gimple_eh_else_n_body (stmt));
2320 	  gimple_eh_else_set_n_body (copy, new_seq);
2321 	  new_seq = gimple_seq_copy (gimple_eh_else_e_body (stmt));
2322 	  gimple_eh_else_set_e_body (copy, new_seq);
2323 	  break;
2324 
2325 	case GIMPLE_TRY:
2326 	  new_seq = gimple_seq_copy (gimple_try_eval (stmt));
2327 	  gimple_try_set_eval (copy, new_seq);
2328 	  new_seq = gimple_seq_copy (gimple_try_cleanup (stmt));
2329 	  gimple_try_set_cleanup (copy, new_seq);
2330 	  break;
2331 
2332 	case GIMPLE_OMP_FOR:
2333 	  new_seq = gimple_seq_copy (gimple_omp_for_pre_body (stmt));
2334 	  gimple_omp_for_set_pre_body (copy, new_seq);
2335 	  t = unshare_expr (gimple_omp_for_clauses (stmt));
2336 	  gimple_omp_for_set_clauses (copy, t);
2337 	  copy->gimple_omp_for.iter
2338 	    = ggc_alloc_vec_gimple_omp_for_iter
2339 	    (gimple_omp_for_collapse (stmt));
2340 	  for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2341 	    {
2342 	      gimple_omp_for_set_cond (copy, i,
2343 				       gimple_omp_for_cond (stmt, i));
2344 	      gimple_omp_for_set_index (copy, i,
2345 					gimple_omp_for_index (stmt, i));
2346 	      t = unshare_expr (gimple_omp_for_initial (stmt, i));
2347 	      gimple_omp_for_set_initial (copy, i, t);
2348 	      t = unshare_expr (gimple_omp_for_final (stmt, i));
2349 	      gimple_omp_for_set_final (copy, i, t);
2350 	      t = unshare_expr (gimple_omp_for_incr (stmt, i));
2351 	      gimple_omp_for_set_incr (copy, i, t);
2352 	    }
2353 	  goto copy_omp_body;
2354 
2355 	case GIMPLE_OMP_PARALLEL:
2356 	  t = unshare_expr (gimple_omp_parallel_clauses (stmt));
2357 	  gimple_omp_parallel_set_clauses (copy, t);
2358 	  t = unshare_expr (gimple_omp_parallel_child_fn (stmt));
2359 	  gimple_omp_parallel_set_child_fn (copy, t);
2360 	  t = unshare_expr (gimple_omp_parallel_data_arg (stmt));
2361 	  gimple_omp_parallel_set_data_arg (copy, t);
2362 	  goto copy_omp_body;
2363 
2364 	case GIMPLE_OMP_TASK:
2365 	  t = unshare_expr (gimple_omp_task_clauses (stmt));
2366 	  gimple_omp_task_set_clauses (copy, t);
2367 	  t = unshare_expr (gimple_omp_task_child_fn (stmt));
2368 	  gimple_omp_task_set_child_fn (copy, t);
2369 	  t = unshare_expr (gimple_omp_task_data_arg (stmt));
2370 	  gimple_omp_task_set_data_arg (copy, t);
2371 	  t = unshare_expr (gimple_omp_task_copy_fn (stmt));
2372 	  gimple_omp_task_set_copy_fn (copy, t);
2373 	  t = unshare_expr (gimple_omp_task_arg_size (stmt));
2374 	  gimple_omp_task_set_arg_size (copy, t);
2375 	  t = unshare_expr (gimple_omp_task_arg_align (stmt));
2376 	  gimple_omp_task_set_arg_align (copy, t);
2377 	  goto copy_omp_body;
2378 
2379 	case GIMPLE_OMP_CRITICAL:
2380 	  t = unshare_expr (gimple_omp_critical_name (stmt));
2381 	  gimple_omp_critical_set_name (copy, t);
2382 	  goto copy_omp_body;
2383 
2384 	case GIMPLE_OMP_SECTIONS:
2385 	  t = unshare_expr (gimple_omp_sections_clauses (stmt));
2386 	  gimple_omp_sections_set_clauses (copy, t);
2387 	  t = unshare_expr (gimple_omp_sections_control (stmt));
2388 	  gimple_omp_sections_set_control (copy, t);
2389 	  /* FALLTHRU  */
2390 
2391 	case GIMPLE_OMP_SINGLE:
2392 	case GIMPLE_OMP_SECTION:
2393 	case GIMPLE_OMP_MASTER:
2394 	case GIMPLE_OMP_ORDERED:
2395 	copy_omp_body:
2396 	  new_seq = gimple_seq_copy (gimple_omp_body (stmt));
2397 	  gimple_omp_set_body (copy, new_seq);
2398 	  break;
2399 
2400 	case GIMPLE_TRANSACTION:
2401 	  new_seq = gimple_seq_copy (gimple_transaction_body (stmt));
2402 	  gimple_transaction_set_body (copy, new_seq);
2403 	  break;
2404 
2405 	case GIMPLE_WITH_CLEANUP_EXPR:
2406 	  new_seq = gimple_seq_copy (gimple_wce_cleanup (stmt));
2407 	  gimple_wce_set_cleanup (copy, new_seq);
2408 	  break;
2409 
2410 	default:
2411 	  gcc_unreachable ();
2412 	}
2413     }
2414 
2415   /* Make copy of operands.  */
2416   if (num_ops > 0)
2417     {
2418       for (i = 0; i < num_ops; i++)
2419 	gimple_set_op (copy, i, unshare_expr (gimple_op (stmt, i)));
2420 
2421       /* Clear out SSA operand vectors on COPY.  */
2422       if (gimple_has_ops (stmt))
2423 	{
2424 	  gimple_set_def_ops (copy, NULL);
2425 	  gimple_set_use_ops (copy, NULL);
2426 	}
2427 
2428       if (gimple_has_mem_ops (stmt))
2429 	{
2430 	  gimple_set_vdef (copy, gimple_vdef (stmt));
2431 	  gimple_set_vuse (copy, gimple_vuse (stmt));
2432 	}
2433 
2434       /* SSA operands need to be updated.  */
2435       gimple_set_modified (copy, true);
2436     }
2437 
2438   return copy;
2439 }
2440 
2441 
2442 /* Set the MODIFIED flag to MODIFIEDP, iff the gimple statement G has
2443    a MODIFIED field.  */
2444 
2445 void
2446 gimple_set_modified (gimple s, bool modifiedp)
2447 {
2448   if (gimple_has_ops (s))
2449     s->gsbase.modified = (unsigned) modifiedp;
2450 }
2451 
2452 
2453 /* Return true if statement S has side-effects.  We consider a
2454    statement to have side effects if:
2455 
2456    - It is a GIMPLE_CALL not marked with ECF_PURE or ECF_CONST.
2457    - Any of its operands are marked TREE_THIS_VOLATILE or TREE_SIDE_EFFECTS.  */
2458 
2459 bool
2460 gimple_has_side_effects (const_gimple s)
2461 {
2462   if (is_gimple_debug (s))
2463     return false;
2464 
2465   /* We don't have to scan the arguments to check for
2466      volatile arguments, though, at present, we still
2467      do a scan to check for TREE_SIDE_EFFECTS.  */
2468   if (gimple_has_volatile_ops (s))
2469     return true;
2470 
2471   if (gimple_code (s) == GIMPLE_ASM
2472       && gimple_asm_volatile_p (s))
2473     return true;
2474 
2475   if (is_gimple_call (s))
2476     {
2477       int flags = gimple_call_flags (s);
2478 
2479       /* An infinite loop is considered a side effect.  */
2480       if (!(flags & (ECF_CONST | ECF_PURE))
2481 	  || (flags & ECF_LOOPING_CONST_OR_PURE))
2482 	return true;
2483 
2484       return false;
2485     }
2486 
2487   return false;
2488 }
2489 
2490 /* Helper for gimple_could_trap_p and gimple_assign_rhs_could_trap_p.
2491    Return true if S can trap.  When INCLUDE_MEM is true, check whether
2492    the memory operations could trap.  When INCLUDE_STORES is true and
2493    S is a GIMPLE_ASSIGN, the LHS of the assignment is also checked.  */
2494 
2495 bool
2496 gimple_could_trap_p_1 (gimple s, bool include_mem, bool include_stores)
2497 {
2498   tree t, div = NULL_TREE;
2499   enum tree_code op;
2500 
2501   if (include_mem)
2502     {
2503       unsigned i, start = (is_gimple_assign (s) && !include_stores) ? 1 : 0;
2504 
2505       for (i = start; i < gimple_num_ops (s); i++)
2506 	if (tree_could_trap_p (gimple_op (s, i)))
2507 	  return true;
2508     }
2509 
2510   switch (gimple_code (s))
2511     {
2512     case GIMPLE_ASM:
2513       return gimple_asm_volatile_p (s);
2514 
2515     case GIMPLE_CALL:
2516       t = gimple_call_fndecl (s);
2517       /* Assume that calls to weak functions may trap.  */
2518       if (!t || !DECL_P (t) || DECL_WEAK (t))
2519 	return true;
2520       return false;
2521 
2522     case GIMPLE_ASSIGN:
2523       t = gimple_expr_type (s);
2524       op = gimple_assign_rhs_code (s);
2525       if (get_gimple_rhs_class (op) == GIMPLE_BINARY_RHS)
2526 	div = gimple_assign_rhs2 (s);
2527       return (operation_could_trap_p (op, FLOAT_TYPE_P (t),
2528 				      (INTEGRAL_TYPE_P (t)
2529 				       && TYPE_OVERFLOW_TRAPS (t)),
2530 				      div));
2531 
2532     default:
2533       break;
2534     }
2535 
2536   return false;
2537 }
2538 
2539 /* Return true if statement S can trap.  */
2540 
2541 bool
2542 gimple_could_trap_p (gimple s)
2543 {
2544   return gimple_could_trap_p_1 (s, true, true);
2545 }
2546 
2547 /* Return true if RHS of a GIMPLE_ASSIGN S can trap.  */
2548 
2549 bool
2550 gimple_assign_rhs_could_trap_p (gimple s)
2551 {
2552   gcc_assert (is_gimple_assign (s));
2553   return gimple_could_trap_p_1 (s, true, false);
2554 }
2555 
2556 
2557 /* Print debugging information for gimple stmts generated.  */
2558 
2559 void
2560 dump_gimple_statistics (void)
2561 {
2562 #ifdef GATHER_STATISTICS
2563   int i, total_tuples = 0, total_bytes = 0;
2564 
2565   fprintf (stderr, "\nGIMPLE statements\n");
2566   fprintf (stderr, "Kind                   Stmts      Bytes\n");
2567   fprintf (stderr, "---------------------------------------\n");
2568   for (i = 0; i < (int) gimple_alloc_kind_all; ++i)
2569     {
2570       fprintf (stderr, "%-20s %7d %10d\n", gimple_alloc_kind_names[i],
2571 	  gimple_alloc_counts[i], gimple_alloc_sizes[i]);
2572       total_tuples += gimple_alloc_counts[i];
2573       total_bytes += gimple_alloc_sizes[i];
2574     }
2575   fprintf (stderr, "---------------------------------------\n");
2576   fprintf (stderr, "%-20s %7d %10d\n", "Total", total_tuples, total_bytes);
2577   fprintf (stderr, "---------------------------------------\n");
2578 #else
2579   fprintf (stderr, "No gimple statistics\n");
2580 #endif
2581 }
2582 
2583 
2584 /* Return the number of operands needed on the RHS of a GIMPLE
2585    assignment for an expression with tree code CODE.  */
2586 
2587 unsigned
2588 get_gimple_rhs_num_ops (enum tree_code code)
2589 {
2590   enum gimple_rhs_class rhs_class = get_gimple_rhs_class (code);
2591 
2592   if (rhs_class == GIMPLE_UNARY_RHS || rhs_class == GIMPLE_SINGLE_RHS)
2593     return 1;
2594   else if (rhs_class == GIMPLE_BINARY_RHS)
2595     return 2;
2596   else if (rhs_class == GIMPLE_TERNARY_RHS)
2597     return 3;
2598   else
2599     gcc_unreachable ();
2600 }
2601 
2602 #define DEFTREECODE(SYM, STRING, TYPE, NARGS)   			    \
2603   (unsigned char)							    \
2604   ((TYPE) == tcc_unary ? GIMPLE_UNARY_RHS				    \
2605    : ((TYPE) == tcc_binary						    \
2606       || (TYPE) == tcc_comparison) ? GIMPLE_BINARY_RHS   		    \
2607    : ((TYPE) == tcc_constant						    \
2608       || (TYPE) == tcc_declaration					    \
2609       || (TYPE) == tcc_reference) ? GIMPLE_SINGLE_RHS			    \
2610    : ((SYM) == TRUTH_AND_EXPR						    \
2611       || (SYM) == TRUTH_OR_EXPR						    \
2612       || (SYM) == TRUTH_XOR_EXPR) ? GIMPLE_BINARY_RHS			    \
2613    : (SYM) == TRUTH_NOT_EXPR ? GIMPLE_UNARY_RHS				    \
2614    : ((SYM) == COND_EXPR						    \
2615       || (SYM) == WIDEN_MULT_PLUS_EXPR					    \
2616       || (SYM) == WIDEN_MULT_MINUS_EXPR					    \
2617       || (SYM) == DOT_PROD_EXPR						    \
2618       || (SYM) == REALIGN_LOAD_EXPR					    \
2619       || (SYM) == VEC_COND_EXPR						    \
2620       || (SYM) == VEC_PERM_EXPR                                             \
2621       || (SYM) == FMA_EXPR) ? GIMPLE_TERNARY_RHS			    \
2622    : ((SYM) == CONSTRUCTOR						    \
2623       || (SYM) == OBJ_TYPE_REF						    \
2624       || (SYM) == ASSERT_EXPR						    \
2625       || (SYM) == ADDR_EXPR						    \
2626       || (SYM) == WITH_SIZE_EXPR					    \
2627       || (SYM) == SSA_NAME) ? GIMPLE_SINGLE_RHS				    \
2628    : GIMPLE_INVALID_RHS),
2629 #define END_OF_BASE_TREE_CODES (unsigned char) GIMPLE_INVALID_RHS,
2630 
2631 const unsigned char gimple_rhs_class_table[] = {
2632 #include "all-tree.def"
2633 };
2634 
2635 #undef DEFTREECODE
2636 #undef END_OF_BASE_TREE_CODES
2637 
2638 /* For the definitive definition of GIMPLE, see doc/tree-ssa.texi.  */
2639 
2640 /* Validation of GIMPLE expressions.  */
2641 
2642 /* Returns true iff T is a valid RHS for an assignment to a renamed
2643    user -- or front-end generated artificial -- variable.  */
2644 
2645 bool
2646 is_gimple_reg_rhs (tree t)
2647 {
2648   return get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS;
2649 }
2650 
2651 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
2652    LHS, or for a call argument.  */
2653 
2654 bool
2655 is_gimple_mem_rhs (tree t)
2656 {
2657   /* If we're dealing with a renamable type, either source or dest must be
2658      a renamed variable.  */
2659   if (is_gimple_reg_type (TREE_TYPE (t)))
2660     return is_gimple_val (t);
2661   else
2662     return is_gimple_val (t) || is_gimple_lvalue (t);
2663 }
2664 
2665 /*  Return true if T is a valid LHS for a GIMPLE assignment expression.  */
2666 
2667 bool
2668 is_gimple_lvalue (tree t)
2669 {
2670   return (is_gimple_addressable (t)
2671 	  || TREE_CODE (t) == WITH_SIZE_EXPR
2672 	  /* These are complex lvalues, but don't have addresses, so they
2673 	     go here.  */
2674 	  || TREE_CODE (t) == BIT_FIELD_REF);
2675 }
2676 
2677 /*  Return true if T is a GIMPLE condition.  */
2678 
2679 bool
2680 is_gimple_condexpr (tree t)
2681 {
2682   return (is_gimple_val (t) || (COMPARISON_CLASS_P (t)
2683 				&& !tree_could_throw_p (t)
2684 				&& is_gimple_val (TREE_OPERAND (t, 0))
2685 				&& is_gimple_val (TREE_OPERAND (t, 1))));
2686 }
2687 
2688 /*  Return true if T is something whose address can be taken.  */
2689 
2690 bool
2691 is_gimple_addressable (tree t)
2692 {
2693   return (is_gimple_id (t) || handled_component_p (t)
2694 	  || TREE_CODE (t) == MEM_REF);
2695 }
2696 
2697 /* Return true if T is a valid gimple constant.  */
2698 
2699 bool
2700 is_gimple_constant (const_tree t)
2701 {
2702   switch (TREE_CODE (t))
2703     {
2704     case INTEGER_CST:
2705     case REAL_CST:
2706     case FIXED_CST:
2707     case STRING_CST:
2708     case COMPLEX_CST:
2709     case VECTOR_CST:
2710       return true;
2711 
2712     /* Vector constant constructors are gimple invariant.  */
2713     case CONSTRUCTOR:
2714       if (TREE_TYPE (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2715 	return TREE_CONSTANT (t);
2716       else
2717 	return false;
2718 
2719     default:
2720       return false;
2721     }
2722 }
2723 
2724 /* Return true if T is a gimple address.  */
2725 
2726 bool
2727 is_gimple_address (const_tree t)
2728 {
2729   tree op;
2730 
2731   if (TREE_CODE (t) != ADDR_EXPR)
2732     return false;
2733 
2734   op = TREE_OPERAND (t, 0);
2735   while (handled_component_p (op))
2736     {
2737       if ((TREE_CODE (op) == ARRAY_REF
2738 	   || TREE_CODE (op) == ARRAY_RANGE_REF)
2739 	  && !is_gimple_val (TREE_OPERAND (op, 1)))
2740 	    return false;
2741 
2742       op = TREE_OPERAND (op, 0);
2743     }
2744 
2745   if (CONSTANT_CLASS_P (op) || TREE_CODE (op) == MEM_REF)
2746     return true;
2747 
2748   switch (TREE_CODE (op))
2749     {
2750     case PARM_DECL:
2751     case RESULT_DECL:
2752     case LABEL_DECL:
2753     case FUNCTION_DECL:
2754     case VAR_DECL:
2755     case CONST_DECL:
2756       return true;
2757 
2758     default:
2759       return false;
2760     }
2761 }
2762 
2763 /* Return true if T is a gimple invariant address.  */
2764 
2765 bool
2766 is_gimple_invariant_address (const_tree t)
2767 {
2768   const_tree op;
2769 
2770   if (TREE_CODE (t) != ADDR_EXPR)
2771     return false;
2772 
2773   op = strip_invariant_refs (TREE_OPERAND (t, 0));
2774   if (!op)
2775     return false;
2776 
2777   if (TREE_CODE (op) == MEM_REF)
2778     {
2779       const_tree op0 = TREE_OPERAND (op, 0);
2780       return (TREE_CODE (op0) == ADDR_EXPR
2781 	      && (CONSTANT_CLASS_P (TREE_OPERAND (op0, 0))
2782 		  || decl_address_invariant_p (TREE_OPERAND (op0, 0))));
2783     }
2784 
2785   return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
2786 }
2787 
2788 /* Return true if T is a gimple invariant address at IPA level
2789    (so addresses of variables on stack are not allowed).  */
2790 
2791 bool
2792 is_gimple_ip_invariant_address (const_tree t)
2793 {
2794   const_tree op;
2795 
2796   if (TREE_CODE (t) != ADDR_EXPR)
2797     return false;
2798 
2799   op = strip_invariant_refs (TREE_OPERAND (t, 0));
2800   if (!op)
2801     return false;
2802 
2803   if (TREE_CODE (op) == MEM_REF)
2804     {
2805       const_tree op0 = TREE_OPERAND (op, 0);
2806       return (TREE_CODE (op0) == ADDR_EXPR
2807 	      && (CONSTANT_CLASS_P (TREE_OPERAND (op0, 0))
2808 		  || decl_address_ip_invariant_p (TREE_OPERAND (op0, 0))));
2809     }
2810 
2811   return CONSTANT_CLASS_P (op) || decl_address_ip_invariant_p (op);
2812 }
2813 
2814 /* Return true if T is a GIMPLE minimal invariant.  It's a restricted
2815    form of function invariant.  */
2816 
2817 bool
2818 is_gimple_min_invariant (const_tree t)
2819 {
2820   if (TREE_CODE (t) == ADDR_EXPR)
2821     return is_gimple_invariant_address (t);
2822 
2823   return is_gimple_constant (t);
2824 }
2825 
2826 /* Return true if T is a GIMPLE interprocedural invariant.  It's a restricted
2827    form of gimple minimal invariant.  */
2828 
2829 bool
2830 is_gimple_ip_invariant (const_tree t)
2831 {
2832   if (TREE_CODE (t) == ADDR_EXPR)
2833     return is_gimple_ip_invariant_address (t);
2834 
2835   return is_gimple_constant (t);
2836 }
2837 
2838 /* Return true if T looks like a valid GIMPLE statement.  */
2839 
2840 bool
2841 is_gimple_stmt (tree t)
2842 {
2843   const enum tree_code code = TREE_CODE (t);
2844 
2845   switch (code)
2846     {
2847     case NOP_EXPR:
2848       /* The only valid NOP_EXPR is the empty statement.  */
2849       return IS_EMPTY_STMT (t);
2850 
2851     case BIND_EXPR:
2852     case COND_EXPR:
2853       /* These are only valid if they're void.  */
2854       return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
2855 
2856     case SWITCH_EXPR:
2857     case GOTO_EXPR:
2858     case RETURN_EXPR:
2859     case LABEL_EXPR:
2860     case CASE_LABEL_EXPR:
2861     case TRY_CATCH_EXPR:
2862     case TRY_FINALLY_EXPR:
2863     case EH_FILTER_EXPR:
2864     case CATCH_EXPR:
2865     case ASM_EXPR:
2866     case STATEMENT_LIST:
2867     case OMP_PARALLEL:
2868     case OMP_FOR:
2869     case OMP_SECTIONS:
2870     case OMP_SECTION:
2871     case OMP_SINGLE:
2872     case OMP_MASTER:
2873     case OMP_ORDERED:
2874     case OMP_CRITICAL:
2875     case OMP_TASK:
2876       /* These are always void.  */
2877       return true;
2878 
2879     case CALL_EXPR:
2880     case MODIFY_EXPR:
2881     case PREDICT_EXPR:
2882       /* These are valid regardless of their type.  */
2883       return true;
2884 
2885     default:
2886       return false;
2887     }
2888 }
2889 
2890 /* Return true if T is a variable.  */
2891 
2892 bool
2893 is_gimple_variable (tree t)
2894 {
2895   return (TREE_CODE (t) == VAR_DECL
2896 	  || TREE_CODE (t) == PARM_DECL
2897 	  || TREE_CODE (t) == RESULT_DECL
2898 	  || TREE_CODE (t) == SSA_NAME);
2899 }
2900 
2901 /*  Return true if T is a GIMPLE identifier (something with an address).  */
2902 
2903 bool
2904 is_gimple_id (tree t)
2905 {
2906   return (is_gimple_variable (t)
2907 	  || TREE_CODE (t) == FUNCTION_DECL
2908 	  || TREE_CODE (t) == LABEL_DECL
2909 	  || TREE_CODE (t) == CONST_DECL
2910 	  /* Allow string constants, since they are addressable.  */
2911 	  || TREE_CODE (t) == STRING_CST);
2912 }
2913 
2914 /* Return true if T is a non-aggregate register variable.  */
2915 
2916 bool
2917 is_gimple_reg (tree t)
2918 {
2919   if (TREE_CODE (t) == SSA_NAME)
2920     t = SSA_NAME_VAR (t);
2921 
2922   if (!is_gimple_variable (t))
2923     return false;
2924 
2925   if (!is_gimple_reg_type (TREE_TYPE (t)))
2926     return false;
2927 
2928   /* A volatile decl is not acceptable because we can't reuse it as
2929      needed.  We need to copy it into a temp first.  */
2930   if (TREE_THIS_VOLATILE (t))
2931     return false;
2932 
2933   /* We define "registers" as things that can be renamed as needed,
2934      which with our infrastructure does not apply to memory.  */
2935   if (needs_to_live_in_memory (t))
2936     return false;
2937 
2938   /* Hard register variables are an interesting case.  For those that
2939      are call-clobbered, we don't know where all the calls are, since
2940      we don't (want to) take into account which operations will turn
2941      into libcalls at the rtl level.  For those that are call-saved,
2942      we don't currently model the fact that calls may in fact change
2943      global hard registers, nor do we examine ASM_CLOBBERS at the tree
2944      level, and so miss variable changes that might imply.  All around,
2945      it seems safest to not do too much optimization with these at the
2946      tree level at all.  We'll have to rely on the rtl optimizers to
2947      clean this up, as there we've got all the appropriate bits exposed.  */
2948   if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
2949     return false;
2950 
2951   /* Complex and vector values must have been put into SSA-like form.
2952      That is, no assignments to the individual components.  */
2953   if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
2954       || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2955     return DECL_GIMPLE_REG_P (t);
2956 
2957   return true;
2958 }
2959 
2960 
2961 /* Return true if T is a GIMPLE rvalue, i.e. an identifier or a constant.  */
2962 
2963 bool
2964 is_gimple_val (tree t)
2965 {
2966   /* Make loads from volatiles and memory vars explicit.  */
2967   if (is_gimple_variable (t)
2968       && is_gimple_reg_type (TREE_TYPE (t))
2969       && !is_gimple_reg (t))
2970     return false;
2971 
2972   return (is_gimple_variable (t) || is_gimple_min_invariant (t));
2973 }
2974 
2975 /* Similarly, but accept hard registers as inputs to asm statements.  */
2976 
2977 bool
2978 is_gimple_asm_val (tree t)
2979 {
2980   if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
2981     return true;
2982 
2983   return is_gimple_val (t);
2984 }
2985 
2986 /* Return true if T is a GIMPLE minimal lvalue.  */
2987 
2988 bool
2989 is_gimple_min_lval (tree t)
2990 {
2991   if (!(t = CONST_CAST_TREE (strip_invariant_refs (t))))
2992     return false;
2993   return (is_gimple_id (t) || TREE_CODE (t) == MEM_REF);
2994 }
2995 
2996 /* Return true if T is a valid function operand of a CALL_EXPR.  */
2997 
2998 bool
2999 is_gimple_call_addr (tree t)
3000 {
3001   return (TREE_CODE (t) == OBJ_TYPE_REF || is_gimple_val (t));
3002 }
3003 
3004 /* Return true if T is a valid address operand of a MEM_REF.  */
3005 
3006 bool
3007 is_gimple_mem_ref_addr (tree t)
3008 {
3009   return (is_gimple_reg (t)
3010 	  || TREE_CODE (t) == INTEGER_CST
3011 	  || (TREE_CODE (t) == ADDR_EXPR
3012 	      && (CONSTANT_CLASS_P (TREE_OPERAND (t, 0))
3013 		  || decl_address_invariant_p (TREE_OPERAND (t, 0)))));
3014 }
3015 
3016 
3017 /* Given a memory reference expression T, return its base address.
3018    The base address of a memory reference expression is the main
3019    object being referenced.  For instance, the base address for
3020    'array[i].fld[j]' is 'array'.  You can think of this as stripping
3021    away the offset part from a memory address.
3022 
3023    This function calls handled_component_p to strip away all the inner
3024    parts of the memory reference until it reaches the base object.  */
3025 
3026 tree
3027 get_base_address (tree t)
3028 {
3029   while (handled_component_p (t))
3030     t = TREE_OPERAND (t, 0);
3031 
3032   if ((TREE_CODE (t) == MEM_REF
3033        || TREE_CODE (t) == TARGET_MEM_REF)
3034       && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
3035     t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
3036 
3037   if (TREE_CODE (t) == SSA_NAME
3038       || DECL_P (t)
3039       || TREE_CODE (t) == STRING_CST
3040       || TREE_CODE (t) == CONSTRUCTOR
3041       || INDIRECT_REF_P (t)
3042       || TREE_CODE (t) == MEM_REF
3043       || TREE_CODE (t) == TARGET_MEM_REF)
3044     return t;
3045   else
3046     return NULL_TREE;
3047 }
3048 
3049 void
3050 recalculate_side_effects (tree t)
3051 {
3052   enum tree_code code = TREE_CODE (t);
3053   int len = TREE_OPERAND_LENGTH (t);
3054   int i;
3055 
3056   switch (TREE_CODE_CLASS (code))
3057     {
3058     case tcc_expression:
3059       switch (code)
3060 	{
3061 	case INIT_EXPR:
3062 	case MODIFY_EXPR:
3063 	case VA_ARG_EXPR:
3064 	case PREDECREMENT_EXPR:
3065 	case PREINCREMENT_EXPR:
3066 	case POSTDECREMENT_EXPR:
3067 	case POSTINCREMENT_EXPR:
3068 	  /* All of these have side-effects, no matter what their
3069 	     operands are.  */
3070 	  return;
3071 
3072 	default:
3073 	  break;
3074 	}
3075       /* Fall through.  */
3076 
3077     case tcc_comparison:  /* a comparison expression */
3078     case tcc_unary:       /* a unary arithmetic expression */
3079     case tcc_binary:      /* a binary arithmetic expression */
3080     case tcc_reference:   /* a reference */
3081     case tcc_vl_exp:        /* a function call */
3082       TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
3083       for (i = 0; i < len; ++i)
3084 	{
3085 	  tree op = TREE_OPERAND (t, i);
3086 	  if (op && TREE_SIDE_EFFECTS (op))
3087 	    TREE_SIDE_EFFECTS (t) = 1;
3088 	}
3089       break;
3090 
3091     case tcc_constant:
3092       /* No side-effects.  */
3093       return;
3094 
3095     default:
3096       gcc_unreachable ();
3097    }
3098 }
3099 
3100 /* Canonicalize a tree T for use in a COND_EXPR as conditional.  Returns
3101    a canonicalized tree that is valid for a COND_EXPR or NULL_TREE, if
3102    we failed to create one.  */
3103 
3104 tree
3105 canonicalize_cond_expr_cond (tree t)
3106 {
3107   /* Strip conversions around boolean operations.  */
3108   if (CONVERT_EXPR_P (t)
3109       && (truth_value_p (TREE_CODE (TREE_OPERAND (t, 0)))
3110           || TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 0)))
3111 	     == BOOLEAN_TYPE))
3112     t = TREE_OPERAND (t, 0);
3113 
3114   /* For !x use x == 0.  */
3115   if (TREE_CODE (t) == TRUTH_NOT_EXPR)
3116     {
3117       tree top0 = TREE_OPERAND (t, 0);
3118       t = build2 (EQ_EXPR, TREE_TYPE (t),
3119 		  top0, build_int_cst (TREE_TYPE (top0), 0));
3120     }
3121   /* For cmp ? 1 : 0 use cmp.  */
3122   else if (TREE_CODE (t) == COND_EXPR
3123 	   && COMPARISON_CLASS_P (TREE_OPERAND (t, 0))
3124 	   && integer_onep (TREE_OPERAND (t, 1))
3125 	   && integer_zerop (TREE_OPERAND (t, 2)))
3126     {
3127       tree top0 = TREE_OPERAND (t, 0);
3128       t = build2 (TREE_CODE (top0), TREE_TYPE (t),
3129 		  TREE_OPERAND (top0, 0), TREE_OPERAND (top0, 1));
3130     }
3131 
3132   if (is_gimple_condexpr (t))
3133     return t;
3134 
3135   return NULL_TREE;
3136 }
3137 
3138 /* Build a GIMPLE_CALL identical to STMT but skipping the arguments in
3139    the positions marked by the set ARGS_TO_SKIP.  */
3140 
3141 gimple
3142 gimple_call_copy_skip_args (gimple stmt, bitmap args_to_skip)
3143 {
3144   int i;
3145   int nargs = gimple_call_num_args (stmt);
3146   VEC(tree, heap) *vargs = VEC_alloc (tree, heap, nargs);
3147   gimple new_stmt;
3148 
3149   for (i = 0; i < nargs; i++)
3150     if (!bitmap_bit_p (args_to_skip, i))
3151       VEC_quick_push (tree, vargs, gimple_call_arg (stmt, i));
3152 
3153   if (gimple_call_internal_p (stmt))
3154     new_stmt = gimple_build_call_internal_vec (gimple_call_internal_fn (stmt),
3155 					       vargs);
3156   else
3157     new_stmt = gimple_build_call_vec (gimple_call_fn (stmt), vargs);
3158   VEC_free (tree, heap, vargs);
3159   if (gimple_call_lhs (stmt))
3160     gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
3161 
3162   gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3163   gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3164 
3165   gimple_set_block (new_stmt, gimple_block (stmt));
3166   if (gimple_has_location (stmt))
3167     gimple_set_location (new_stmt, gimple_location (stmt));
3168   gimple_call_copy_flags (new_stmt, stmt);
3169   gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
3170 
3171   gimple_set_modified (new_stmt, true);
3172 
3173   return new_stmt;
3174 }
3175 
3176 
3177 enum gtc_mode { GTC_MERGE = 0, GTC_DIAG = 1 };
3178 
3179 static hashval_t gimple_type_hash (const void *);
3180 
3181 /* Structure used to maintain a cache of some type pairs compared by
3182    gimple_types_compatible_p when comparing aggregate types.  There are
3183    three possible values for SAME_P:
3184 
3185    	-2: The pair (T1, T2) has just been inserted in the table.
3186 	 0: T1 and T2 are different types.
3187 	 1: T1 and T2 are the same type.
3188 
3189    The two elements in the SAME_P array are indexed by the comparison
3190    mode gtc_mode.  */
3191 
3192 struct type_pair_d
3193 {
3194   unsigned int uid1;
3195   unsigned int uid2;
3196   signed char same_p[2];
3197 };
3198 typedef struct type_pair_d *type_pair_t;
3199 DEF_VEC_P(type_pair_t);
3200 DEF_VEC_ALLOC_P(type_pair_t,heap);
3201 
3202 #define GIMPLE_TYPE_PAIR_SIZE 16381
3203 struct type_pair_d *type_pair_cache;
3204 
3205 
3206 /* Lookup the pair of types T1 and T2 in *VISITED_P.  Insert a new
3207    entry if none existed.  */
3208 
3209 static inline type_pair_t
3210 lookup_type_pair (tree t1, tree t2)
3211 {
3212   unsigned int index;
3213   unsigned int uid1, uid2;
3214 
3215   if (type_pair_cache == NULL)
3216     type_pair_cache = XCNEWVEC (struct type_pair_d, GIMPLE_TYPE_PAIR_SIZE);
3217 
3218   if (TYPE_UID (t1) < TYPE_UID (t2))
3219     {
3220       uid1 = TYPE_UID (t1);
3221       uid2 = TYPE_UID (t2);
3222     }
3223   else
3224     {
3225       uid1 = TYPE_UID (t2);
3226       uid2 = TYPE_UID (t1);
3227     }
3228   gcc_checking_assert (uid1 != uid2);
3229 
3230   /* iterative_hash_hashval_t imply an function calls.
3231      We know that UIDS are in limited range.  */
3232   index = ((((unsigned HOST_WIDE_INT)uid1 << HOST_BITS_PER_WIDE_INT / 2) + uid2)
3233 	   % GIMPLE_TYPE_PAIR_SIZE);
3234   if (type_pair_cache [index].uid1 == uid1
3235       && type_pair_cache [index].uid2 == uid2)
3236     return &type_pair_cache[index];
3237 
3238   type_pair_cache [index].uid1 = uid1;
3239   type_pair_cache [index].uid2 = uid2;
3240   type_pair_cache [index].same_p[0] = -2;
3241   type_pair_cache [index].same_p[1] = -2;
3242 
3243   return &type_pair_cache[index];
3244 }
3245 
3246 /* Per pointer state for the SCC finding.  The on_sccstack flag
3247    is not strictly required, it is true when there is no hash value
3248    recorded for the type and false otherwise.  But querying that
3249    is slower.  */
3250 
3251 struct sccs
3252 {
3253   unsigned int dfsnum;
3254   unsigned int low;
3255   bool on_sccstack;
3256   union {
3257     hashval_t hash;
3258     signed char same_p;
3259   } u;
3260 };
3261 
3262 static unsigned int next_dfs_num;
3263 static unsigned int gtc_next_dfs_num;
3264 
3265 
3266 /* GIMPLE type merging cache.  A direct-mapped cache based on TYPE_UID.  */
3267 
3268 typedef struct GTY(()) gimple_type_leader_entry_s {
3269   tree type;
3270   tree leader;
3271 } gimple_type_leader_entry;
3272 
3273 #define GIMPLE_TYPE_LEADER_SIZE 16381
3274 static GTY((deletable, length("GIMPLE_TYPE_LEADER_SIZE")))
3275   gimple_type_leader_entry *gimple_type_leader;
3276 
3277 /* Lookup an existing leader for T and return it or NULL_TREE, if
3278    there is none in the cache.  */
3279 
3280 static inline tree
3281 gimple_lookup_type_leader (tree t)
3282 {
3283   gimple_type_leader_entry *leader;
3284 
3285   if (!gimple_type_leader)
3286     return NULL_TREE;
3287 
3288   leader = &gimple_type_leader[TYPE_UID (t) % GIMPLE_TYPE_LEADER_SIZE];
3289   if (leader->type != t)
3290     return NULL_TREE;
3291 
3292   return leader->leader;
3293 }
3294 
3295 /* Return true if T1 and T2 have the same name.  If FOR_COMPLETION_P is
3296    true then if any type has no name return false, otherwise return
3297    true if both types have no names.  */
3298 
3299 static bool
3300 compare_type_names_p (tree t1, tree t2)
3301 {
3302   tree name1 = TYPE_NAME (t1);
3303   tree name2 = TYPE_NAME (t2);
3304 
3305   if ((name1 != NULL_TREE) != (name2 != NULL_TREE))
3306     return false;
3307 
3308   if (name1 == NULL_TREE)
3309     return true;
3310 
3311   /* Either both should be a TYPE_DECL or both an IDENTIFIER_NODE.  */
3312   if (TREE_CODE (name1) != TREE_CODE (name2))
3313     return false;
3314 
3315   if (TREE_CODE (name1) == TYPE_DECL)
3316     name1 = DECL_NAME (name1);
3317   gcc_checking_assert (!name1 || TREE_CODE (name1) == IDENTIFIER_NODE);
3318 
3319   if (TREE_CODE (name2) == TYPE_DECL)
3320     name2 = DECL_NAME (name2);
3321   gcc_checking_assert (!name2 || TREE_CODE (name2) == IDENTIFIER_NODE);
3322 
3323   /* Identifiers can be compared with pointer equality rather
3324      than a string comparison.  */
3325   if (name1 == name2)
3326     return true;
3327 
3328   return false;
3329 }
3330 
3331 /* Return true if the field decls F1 and F2 are at the same offset.
3332 
3333    This is intended to be used on GIMPLE types only.  */
3334 
3335 bool
3336 gimple_compare_field_offset (tree f1, tree f2)
3337 {
3338   if (DECL_OFFSET_ALIGN (f1) == DECL_OFFSET_ALIGN (f2))
3339     {
3340       tree offset1 = DECL_FIELD_OFFSET (f1);
3341       tree offset2 = DECL_FIELD_OFFSET (f2);
3342       return ((offset1 == offset2
3343 	       /* Once gimplification is done, self-referential offsets are
3344 		  instantiated as operand #2 of the COMPONENT_REF built for
3345 		  each access and reset.  Therefore, they are not relevant
3346 		  anymore and fields are interchangeable provided that they
3347 		  represent the same access.  */
3348 	       || (TREE_CODE (offset1) == PLACEHOLDER_EXPR
3349 		   && TREE_CODE (offset2) == PLACEHOLDER_EXPR
3350 		   && (DECL_SIZE (f1) == DECL_SIZE (f2)
3351 		       || (TREE_CODE (DECL_SIZE (f1)) == PLACEHOLDER_EXPR
3352 			   && TREE_CODE (DECL_SIZE (f2)) == PLACEHOLDER_EXPR)
3353 		       || operand_equal_p (DECL_SIZE (f1), DECL_SIZE (f2), 0))
3354 		   && DECL_ALIGN (f1) == DECL_ALIGN (f2))
3355 	       || operand_equal_p (offset1, offset2, 0))
3356 	      && tree_int_cst_equal (DECL_FIELD_BIT_OFFSET (f1),
3357 				     DECL_FIELD_BIT_OFFSET (f2)));
3358     }
3359 
3360   /* Fortran and C do not always agree on what DECL_OFFSET_ALIGN
3361      should be, so handle differing ones specially by decomposing
3362      the offset into a byte and bit offset manually.  */
3363   if (host_integerp (DECL_FIELD_OFFSET (f1), 0)
3364       && host_integerp (DECL_FIELD_OFFSET (f2), 0))
3365     {
3366       unsigned HOST_WIDE_INT byte_offset1, byte_offset2;
3367       unsigned HOST_WIDE_INT bit_offset1, bit_offset2;
3368       bit_offset1 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f1));
3369       byte_offset1 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f1))
3370 		      + bit_offset1 / BITS_PER_UNIT);
3371       bit_offset2 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f2));
3372       byte_offset2 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f2))
3373 		      + bit_offset2 / BITS_PER_UNIT);
3374       if (byte_offset1 != byte_offset2)
3375 	return false;
3376       return bit_offset1 % BITS_PER_UNIT == bit_offset2 % BITS_PER_UNIT;
3377     }
3378 
3379   return false;
3380 }
3381 
3382 static bool
3383 gimple_types_compatible_p_1 (tree, tree, type_pair_t,
3384 			     VEC(type_pair_t, heap) **,
3385 			     struct pointer_map_t *, struct obstack *);
3386 
3387 /* DFS visit the edge from the callers type pair with state *STATE to
3388    the pair T1, T2 while operating in FOR_MERGING_P mode.
3389    Update the merging status if it is not part of the SCC containing the
3390    callers pair and return it.
3391    SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done.  */
3392 
3393 static bool
3394 gtc_visit (tree t1, tree t2,
3395 	   struct sccs *state,
3396 	   VEC(type_pair_t, heap) **sccstack,
3397 	   struct pointer_map_t *sccstate,
3398 	   struct obstack *sccstate_obstack)
3399 {
3400   struct sccs *cstate = NULL;
3401   type_pair_t p;
3402   void **slot;
3403   tree leader1, leader2;
3404 
3405   /* Check first for the obvious case of pointer identity.  */
3406   if (t1 == t2)
3407     return true;
3408 
3409   /* Check that we have two types to compare.  */
3410   if (t1 == NULL_TREE || t2 == NULL_TREE)
3411     return false;
3412 
3413   /* Can't be the same type if the types don't have the same code.  */
3414   if (TREE_CODE (t1) != TREE_CODE (t2))
3415     return false;
3416 
3417   /* Can't be the same type if they have different CV qualifiers.  */
3418   if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
3419     return false;
3420 
3421   if (TREE_ADDRESSABLE (t1) != TREE_ADDRESSABLE (t2))
3422     return false;
3423 
3424   /* Void types and nullptr types are always the same.  */
3425   if (TREE_CODE (t1) == VOID_TYPE
3426       || TREE_CODE (t1) == NULLPTR_TYPE)
3427     return true;
3428 
3429   /* Can't be the same type if they have different alignment or mode.  */
3430   if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
3431       || TYPE_MODE (t1) != TYPE_MODE (t2))
3432     return false;
3433 
3434   /* Do some simple checks before doing three hashtable queries.  */
3435   if (INTEGRAL_TYPE_P (t1)
3436       || SCALAR_FLOAT_TYPE_P (t1)
3437       || FIXED_POINT_TYPE_P (t1)
3438       || TREE_CODE (t1) == VECTOR_TYPE
3439       || TREE_CODE (t1) == COMPLEX_TYPE
3440       || TREE_CODE (t1) == OFFSET_TYPE
3441       || POINTER_TYPE_P (t1))
3442     {
3443       /* Can't be the same type if they have different sign or precision.  */
3444       if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
3445 	  || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
3446 	return false;
3447 
3448       if (TREE_CODE (t1) == INTEGER_TYPE
3449 	  && (TYPE_IS_SIZETYPE (t1) != TYPE_IS_SIZETYPE (t2)
3450 	      || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)))
3451 	return false;
3452 
3453       /* That's all we need to check for float and fixed-point types.  */
3454       if (SCALAR_FLOAT_TYPE_P (t1)
3455 	  || FIXED_POINT_TYPE_P (t1))
3456 	return true;
3457 
3458       /* For other types fall thru to more complex checks.  */
3459     }
3460 
3461   /* If the types have been previously registered and found equal
3462      they still are.  */
3463   leader1 = gimple_lookup_type_leader (t1);
3464   leader2 = gimple_lookup_type_leader (t2);
3465   if (leader1 == t2
3466       || t1 == leader2
3467       || (leader1 && leader1 == leader2))
3468     return true;
3469 
3470   /* If the hash values of t1 and t2 are different the types can't
3471      possibly be the same.  This helps keeping the type-pair hashtable
3472      small, only tracking comparisons for hash collisions.  */
3473   if (gimple_type_hash (t1) != gimple_type_hash (t2))
3474     return false;
3475 
3476   /* Allocate a new cache entry for this comparison.  */
3477   p = lookup_type_pair (t1, t2);
3478   if (p->same_p[GTC_MERGE] == 0 || p->same_p[GTC_MERGE] == 1)
3479     {
3480       /* We have already decided whether T1 and T2 are the
3481 	 same, return the cached result.  */
3482       return p->same_p[GTC_MERGE] == 1;
3483     }
3484 
3485   if ((slot = pointer_map_contains (sccstate, p)) != NULL)
3486     cstate = (struct sccs *)*slot;
3487   /* Not yet visited.  DFS recurse.  */
3488   if (!cstate)
3489     {
3490       gimple_types_compatible_p_1 (t1, t2, p,
3491 				   sccstack, sccstate, sccstate_obstack);
3492       cstate = (struct sccs *)* pointer_map_contains (sccstate, p);
3493       state->low = MIN (state->low, cstate->low);
3494     }
3495   /* If the type is still on the SCC stack adjust the parents low.  */
3496   if (cstate->dfsnum < state->dfsnum
3497       && cstate->on_sccstack)
3498     state->low = MIN (cstate->dfsnum, state->low);
3499 
3500   /* Return the current lattice value.  We start with an equality
3501      assumption so types part of a SCC will be optimistically
3502      treated equal unless proven otherwise.  */
3503   return cstate->u.same_p;
3504 }
3505 
3506 /* Worker for gimple_types_compatible.
3507    SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done.  */
3508 
3509 static bool
3510 gimple_types_compatible_p_1 (tree t1, tree t2, type_pair_t p,
3511 			     VEC(type_pair_t, heap) **sccstack,
3512 			     struct pointer_map_t *sccstate,
3513 			     struct obstack *sccstate_obstack)
3514 {
3515   struct sccs *state;
3516 
3517   gcc_assert (p->same_p[GTC_MERGE] == -2);
3518 
3519   state = XOBNEW (sccstate_obstack, struct sccs);
3520   *pointer_map_insert (sccstate, p) = state;
3521 
3522   VEC_safe_push (type_pair_t, heap, *sccstack, p);
3523   state->dfsnum = gtc_next_dfs_num++;
3524   state->low = state->dfsnum;
3525   state->on_sccstack = true;
3526   /* Start with an equality assumption.  As we DFS recurse into child
3527      SCCs this assumption may get revisited.  */
3528   state->u.same_p = 1;
3529 
3530   /* The struct tags shall compare equal.  */
3531   if (!compare_type_names_p (t1, t2))
3532     goto different_types;
3533 
3534   /* We may not merge typedef types to the same type in different
3535      contexts.  */
3536   if (TYPE_NAME (t1)
3537       && TREE_CODE (TYPE_NAME (t1)) == TYPE_DECL
3538       && DECL_CONTEXT (TYPE_NAME (t1))
3539       && TYPE_P (DECL_CONTEXT (TYPE_NAME (t1))))
3540     {
3541       if (!gtc_visit (DECL_CONTEXT (TYPE_NAME (t1)),
3542 		      DECL_CONTEXT (TYPE_NAME (t2)),
3543 		      state, sccstack, sccstate, sccstate_obstack))
3544 	goto different_types;
3545     }
3546 
3547   /* If their attributes are not the same they can't be the same type.  */
3548   if (!attribute_list_equal (TYPE_ATTRIBUTES (t1), TYPE_ATTRIBUTES (t2)))
3549     goto different_types;
3550 
3551   /* Do type-specific comparisons.  */
3552   switch (TREE_CODE (t1))
3553     {
3554     case VECTOR_TYPE:
3555     case COMPLEX_TYPE:
3556       if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
3557 		      state, sccstack, sccstate, sccstate_obstack))
3558 	goto different_types;
3559       goto same_types;
3560 
3561     case ARRAY_TYPE:
3562       /* Array types are the same if the element types are the same and
3563 	 the number of elements are the same.  */
3564       if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
3565 		      state, sccstack, sccstate, sccstate_obstack)
3566 	  || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
3567 	  || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
3568 	goto different_types;
3569       else
3570 	{
3571 	  tree i1 = TYPE_DOMAIN (t1);
3572 	  tree i2 = TYPE_DOMAIN (t2);
3573 
3574 	  /* For an incomplete external array, the type domain can be
3575  	     NULL_TREE.  Check this condition also.  */
3576 	  if (i1 == NULL_TREE && i2 == NULL_TREE)
3577 	    goto same_types;
3578 	  else if (i1 == NULL_TREE || i2 == NULL_TREE)
3579 	    goto different_types;
3580 	  else
3581 	    {
3582 	      tree min1 = TYPE_MIN_VALUE (i1);
3583 	      tree min2 = TYPE_MIN_VALUE (i2);
3584 	      tree max1 = TYPE_MAX_VALUE (i1);
3585 	      tree max2 = TYPE_MAX_VALUE (i2);
3586 
3587 	      /* The minimum/maximum values have to be the same.  */
3588 	      if ((min1 == min2
3589 		   || (min1 && min2
3590 		       && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
3591 			    && TREE_CODE (min2) == PLACEHOLDER_EXPR)
3592 		           || operand_equal_p (min1, min2, 0))))
3593 		  && (max1 == max2
3594 		      || (max1 && max2
3595 			  && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
3596 			       && TREE_CODE (max2) == PLACEHOLDER_EXPR)
3597 			      || operand_equal_p (max1, max2, 0)))))
3598 		goto same_types;
3599 	      else
3600 		goto different_types;
3601 	    }
3602 	}
3603 
3604     case METHOD_TYPE:
3605       /* Method types should belong to the same class.  */
3606       if (!gtc_visit (TYPE_METHOD_BASETYPE (t1), TYPE_METHOD_BASETYPE (t2),
3607 		      state, sccstack, sccstate, sccstate_obstack))
3608 	goto different_types;
3609 
3610       /* Fallthru  */
3611 
3612     case FUNCTION_TYPE:
3613       /* Function types are the same if the return type and arguments types
3614 	 are the same.  */
3615       if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
3616 		      state, sccstack, sccstate, sccstate_obstack))
3617 	goto different_types;
3618 
3619       if (!comp_type_attributes (t1, t2))
3620 	goto different_types;
3621 
3622       if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
3623 	goto same_types;
3624       else
3625 	{
3626 	  tree parms1, parms2;
3627 
3628 	  for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
3629 	       parms1 && parms2;
3630 	       parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
3631 	    {
3632 	      if (!gtc_visit (TREE_VALUE (parms1), TREE_VALUE (parms2),
3633 			      state, sccstack, sccstate, sccstate_obstack))
3634 		goto different_types;
3635 	    }
3636 
3637 	  if (parms1 || parms2)
3638 	    goto different_types;
3639 
3640 	  goto same_types;
3641 	}
3642 
3643     case OFFSET_TYPE:
3644       {
3645 	if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
3646 			state, sccstack, sccstate, sccstate_obstack)
3647 	    || !gtc_visit (TYPE_OFFSET_BASETYPE (t1),
3648 			   TYPE_OFFSET_BASETYPE (t2),
3649 			   state, sccstack, sccstate, sccstate_obstack))
3650 	  goto different_types;
3651 
3652 	goto same_types;
3653       }
3654 
3655     case POINTER_TYPE:
3656     case REFERENCE_TYPE:
3657       {
3658 	/* If the two pointers have different ref-all attributes,
3659 	   they can't be the same type.  */
3660 	if (TYPE_REF_CAN_ALIAS_ALL (t1) != TYPE_REF_CAN_ALIAS_ALL (t2))
3661 	  goto different_types;
3662 
3663 	/* Otherwise, pointer and reference types are the same if the
3664 	   pointed-to types are the same.  */
3665 	if (gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
3666 		       state, sccstack, sccstate, sccstate_obstack))
3667 	  goto same_types;
3668 
3669 	goto different_types;
3670       }
3671 
3672     case INTEGER_TYPE:
3673     case BOOLEAN_TYPE:
3674       {
3675 	tree min1 = TYPE_MIN_VALUE (t1);
3676 	tree max1 = TYPE_MAX_VALUE (t1);
3677 	tree min2 = TYPE_MIN_VALUE (t2);
3678 	tree max2 = TYPE_MAX_VALUE (t2);
3679 	bool min_equal_p = false;
3680 	bool max_equal_p = false;
3681 
3682 	/* If either type has a minimum value, the other type must
3683 	   have the same.  */
3684 	if (min1 == NULL_TREE && min2 == NULL_TREE)
3685 	  min_equal_p = true;
3686 	else if (min1 && min2 && operand_equal_p (min1, min2, 0))
3687 	  min_equal_p = true;
3688 
3689 	/* Likewise, if either type has a maximum value, the other
3690 	   type must have the same.  */
3691 	if (max1 == NULL_TREE && max2 == NULL_TREE)
3692 	  max_equal_p = true;
3693 	else if (max1 && max2 && operand_equal_p (max1, max2, 0))
3694 	  max_equal_p = true;
3695 
3696 	if (!min_equal_p || !max_equal_p)
3697 	  goto different_types;
3698 
3699 	goto same_types;
3700       }
3701 
3702     case ENUMERAL_TYPE:
3703       {
3704 	/* FIXME lto, we cannot check bounds on enumeral types because
3705 	   different front ends will produce different values.
3706 	   In C, enumeral types are integers, while in C++ each element
3707 	   will have its own symbolic value.  We should decide how enums
3708 	   are to be represented in GIMPLE and have each front end lower
3709 	   to that.  */
3710 	tree v1, v2;
3711 
3712 	/* For enumeral types, all the values must be the same.  */
3713 	if (TYPE_VALUES (t1) == TYPE_VALUES (t2))
3714 	  goto same_types;
3715 
3716 	for (v1 = TYPE_VALUES (t1), v2 = TYPE_VALUES (t2);
3717 	     v1 && v2;
3718 	     v1 = TREE_CHAIN (v1), v2 = TREE_CHAIN (v2))
3719 	  {
3720 	    tree c1 = TREE_VALUE (v1);
3721 	    tree c2 = TREE_VALUE (v2);
3722 
3723 	    if (TREE_CODE (c1) == CONST_DECL)
3724 	      c1 = DECL_INITIAL (c1);
3725 
3726 	    if (TREE_CODE (c2) == CONST_DECL)
3727 	      c2 = DECL_INITIAL (c2);
3728 
3729 	    if (tree_int_cst_equal (c1, c2) != 1)
3730 	      goto different_types;
3731 
3732 	    if (TREE_PURPOSE (v1) != TREE_PURPOSE (v2))
3733 	      goto different_types;
3734 	  }
3735 
3736 	/* If one enumeration has more values than the other, they
3737 	   are not the same.  */
3738 	if (v1 || v2)
3739 	  goto different_types;
3740 
3741 	goto same_types;
3742       }
3743 
3744     case RECORD_TYPE:
3745     case UNION_TYPE:
3746     case QUAL_UNION_TYPE:
3747       {
3748 	tree f1, f2;
3749 
3750 	/* For aggregate types, all the fields must be the same.  */
3751 	for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
3752 	     f1 && f2;
3753 	     f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
3754 	  {
3755 	    /* Different field kinds are not compatible.  */
3756 	    if (TREE_CODE (f1) != TREE_CODE (f2))
3757 	      goto different_types;
3758 	    /* Field decls must have the same name and offset.  */
3759 	    if (TREE_CODE (f1) == FIELD_DECL
3760 		&& (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
3761 		    || !gimple_compare_field_offset (f1, f2)))
3762 	      goto different_types;
3763 	    /* All entities should have the same name and type.  */
3764 	    if (DECL_NAME (f1) != DECL_NAME (f2)
3765 		|| !gtc_visit (TREE_TYPE (f1), TREE_TYPE (f2),
3766 			       state, sccstack, sccstate, sccstate_obstack))
3767 	      goto different_types;
3768 	  }
3769 
3770 	/* If one aggregate has more fields than the other, they
3771 	   are not the same.  */
3772 	if (f1 || f2)
3773 	  goto different_types;
3774 
3775 	goto same_types;
3776       }
3777 
3778     default:
3779       gcc_unreachable ();
3780     }
3781 
3782   /* Common exit path for types that are not compatible.  */
3783 different_types:
3784   state->u.same_p = 0;
3785   goto pop;
3786 
3787   /* Common exit path for types that are compatible.  */
3788 same_types:
3789   gcc_assert (state->u.same_p == 1);
3790 
3791 pop:
3792   if (state->low == state->dfsnum)
3793     {
3794       type_pair_t x;
3795 
3796       /* Pop off the SCC and set its cache values to the final
3797          comparison result.  */
3798       do
3799 	{
3800 	  struct sccs *cstate;
3801 	  x = VEC_pop (type_pair_t, *sccstack);
3802 	  cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
3803 	  cstate->on_sccstack = false;
3804 	  x->same_p[GTC_MERGE] = state->u.same_p;
3805 	}
3806       while (x != p);
3807     }
3808 
3809   return state->u.same_p;
3810 }
3811 
3812 /* Return true iff T1 and T2 are structurally identical.  When
3813    FOR_MERGING_P is true the an incomplete type and a complete type
3814    are considered different, otherwise they are considered compatible.  */
3815 
3816 static bool
3817 gimple_types_compatible_p (tree t1, tree t2)
3818 {
3819   VEC(type_pair_t, heap) *sccstack = NULL;
3820   struct pointer_map_t *sccstate;
3821   struct obstack sccstate_obstack;
3822   type_pair_t p = NULL;
3823   bool res;
3824   tree leader1, leader2;
3825 
3826   /* Before starting to set up the SCC machinery handle simple cases.  */
3827 
3828   /* Check first for the obvious case of pointer identity.  */
3829   if (t1 == t2)
3830     return true;
3831 
3832   /* Check that we have two types to compare.  */
3833   if (t1 == NULL_TREE || t2 == NULL_TREE)
3834     return false;
3835 
3836   /* Can't be the same type if the types don't have the same code.  */
3837   if (TREE_CODE (t1) != TREE_CODE (t2))
3838     return false;
3839 
3840   /* Can't be the same type if they have different CV qualifiers.  */
3841   if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
3842     return false;
3843 
3844   if (TREE_ADDRESSABLE (t1) != TREE_ADDRESSABLE (t2))
3845     return false;
3846 
3847   /* Void types and nullptr types are always the same.  */
3848   if (TREE_CODE (t1) == VOID_TYPE
3849       || TREE_CODE (t1) == NULLPTR_TYPE)
3850     return true;
3851 
3852   /* Can't be the same type if they have different alignment or mode.  */
3853   if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
3854       || TYPE_MODE (t1) != TYPE_MODE (t2))
3855     return false;
3856 
3857   /* Do some simple checks before doing three hashtable queries.  */
3858   if (INTEGRAL_TYPE_P (t1)
3859       || SCALAR_FLOAT_TYPE_P (t1)
3860       || FIXED_POINT_TYPE_P (t1)
3861       || TREE_CODE (t1) == VECTOR_TYPE
3862       || TREE_CODE (t1) == COMPLEX_TYPE
3863       || TREE_CODE (t1) == OFFSET_TYPE
3864       || POINTER_TYPE_P (t1))
3865     {
3866       /* Can't be the same type if they have different sign or precision.  */
3867       if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
3868 	  || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
3869 	return false;
3870 
3871       if (TREE_CODE (t1) == INTEGER_TYPE
3872 	  && (TYPE_IS_SIZETYPE (t1) != TYPE_IS_SIZETYPE (t2)
3873 	      || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)))
3874 	return false;
3875 
3876       /* That's all we need to check for float and fixed-point types.  */
3877       if (SCALAR_FLOAT_TYPE_P (t1)
3878 	  || FIXED_POINT_TYPE_P (t1))
3879 	return true;
3880 
3881       /* For other types fall thru to more complex checks.  */
3882     }
3883 
3884   /* If the types have been previously registered and found equal
3885      they still are.  */
3886   leader1 = gimple_lookup_type_leader (t1);
3887   leader2 = gimple_lookup_type_leader (t2);
3888   if (leader1 == t2
3889       || t1 == leader2
3890       || (leader1 && leader1 == leader2))
3891     return true;
3892 
3893   /* If the hash values of t1 and t2 are different the types can't
3894      possibly be the same.  This helps keeping the type-pair hashtable
3895      small, only tracking comparisons for hash collisions.  */
3896   if (gimple_type_hash (t1) != gimple_type_hash (t2))
3897     return false;
3898 
3899   /* If we've visited this type pair before (in the case of aggregates
3900      with self-referential types), and we made a decision, return it.  */
3901   p = lookup_type_pair (t1, t2);
3902   if (p->same_p[GTC_MERGE] == 0 || p->same_p[GTC_MERGE] == 1)
3903     {
3904       /* We have already decided whether T1 and T2 are the
3905 	 same, return the cached result.  */
3906       return p->same_p[GTC_MERGE] == 1;
3907     }
3908 
3909   /* Now set up the SCC machinery for the comparison.  */
3910   gtc_next_dfs_num = 1;
3911   sccstate = pointer_map_create ();
3912   gcc_obstack_init (&sccstate_obstack);
3913   res = gimple_types_compatible_p_1 (t1, t2, p,
3914 				     &sccstack, sccstate, &sccstate_obstack);
3915   VEC_free (type_pair_t, heap, sccstack);
3916   pointer_map_destroy (sccstate);
3917   obstack_free (&sccstate_obstack, NULL);
3918 
3919   return res;
3920 }
3921 
3922 
3923 static hashval_t
3924 iterative_hash_gimple_type (tree, hashval_t, VEC(tree, heap) **,
3925 			    struct pointer_map_t *, struct obstack *);
3926 
3927 /* DFS visit the edge from the callers type with state *STATE to T.
3928    Update the callers type hash V with the hash for T if it is not part
3929    of the SCC containing the callers type and return it.
3930    SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done.  */
3931 
3932 static hashval_t
3933 visit (tree t, struct sccs *state, hashval_t v,
3934        VEC (tree, heap) **sccstack,
3935        struct pointer_map_t *sccstate,
3936        struct obstack *sccstate_obstack)
3937 {
3938   struct sccs *cstate = NULL;
3939   struct tree_int_map m;
3940   void **slot;
3941 
3942   /* If there is a hash value recorded for this type then it can't
3943      possibly be part of our parent SCC.  Simply mix in its hash.  */
3944   m.base.from = t;
3945   if ((slot = htab_find_slot (type_hash_cache, &m, NO_INSERT))
3946       && *slot)
3947     return iterative_hash_hashval_t (((struct tree_int_map *) *slot)->to, v);
3948 
3949   if ((slot = pointer_map_contains (sccstate, t)) != NULL)
3950     cstate = (struct sccs *)*slot;
3951   if (!cstate)
3952     {
3953       hashval_t tem;
3954       /* Not yet visited.  DFS recurse.  */
3955       tem = iterative_hash_gimple_type (t, v,
3956 					sccstack, sccstate, sccstate_obstack);
3957       if (!cstate)
3958 	cstate = (struct sccs *)* pointer_map_contains (sccstate, t);
3959       state->low = MIN (state->low, cstate->low);
3960       /* If the type is no longer on the SCC stack and thus is not part
3961          of the parents SCC mix in its hash value.  Otherwise we will
3962 	 ignore the type for hashing purposes and return the unaltered
3963 	 hash value.  */
3964       if (!cstate->on_sccstack)
3965 	return tem;
3966     }
3967   if (cstate->dfsnum < state->dfsnum
3968       && cstate->on_sccstack)
3969     state->low = MIN (cstate->dfsnum, state->low);
3970 
3971   /* We are part of our parents SCC, skip this type during hashing
3972      and return the unaltered hash value.  */
3973   return v;
3974 }
3975 
3976 /* Hash NAME with the previous hash value V and return it.  */
3977 
3978 static hashval_t
3979 iterative_hash_name (tree name, hashval_t v)
3980 {
3981   if (!name)
3982     return v;
3983   v = iterative_hash_hashval_t (TREE_CODE (name), v);
3984   if (TREE_CODE (name) == TYPE_DECL)
3985     name = DECL_NAME (name);
3986   if (!name)
3987     return v;
3988   gcc_assert (TREE_CODE (name) == IDENTIFIER_NODE);
3989   return iterative_hash_object (IDENTIFIER_HASH_VALUE (name), v);
3990 }
3991 
3992 /* A type, hashvalue pair for sorting SCC members.  */
3993 
3994 struct type_hash_pair {
3995   tree type;
3996   hashval_t hash;
3997 };
3998 
3999 /* Compare two type, hashvalue pairs.  */
4000 
4001 static int
4002 type_hash_pair_compare (const void *p1_, const void *p2_)
4003 {
4004   const struct type_hash_pair *p1 = (const struct type_hash_pair *) p1_;
4005   const struct type_hash_pair *p2 = (const struct type_hash_pair *) p2_;
4006   if (p1->hash < p2->hash)
4007     return -1;
4008   else if (p1->hash > p2->hash)
4009     return 1;
4010   return 0;
4011 }
4012 
4013 /* Returning a hash value for gimple type TYPE combined with VAL.
4014    SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done.
4015 
4016    To hash a type we end up hashing in types that are reachable.
4017    Through pointers we can end up with cycles which messes up the
4018    required property that we need to compute the same hash value
4019    for structurally equivalent types.  To avoid this we have to
4020    hash all types in a cycle (the SCC) in a commutative way.  The
4021    easiest way is to not mix in the hashes of the SCC members at
4022    all.  To make this work we have to delay setting the hash
4023    values of the SCC until it is complete.  */
4024 
4025 static hashval_t
4026 iterative_hash_gimple_type (tree type, hashval_t val,
4027 			    VEC(tree, heap) **sccstack,
4028 			    struct pointer_map_t *sccstate,
4029 			    struct obstack *sccstate_obstack)
4030 {
4031   hashval_t v;
4032   void **slot;
4033   struct sccs *state;
4034 
4035   /* Not visited during this DFS walk.  */
4036   gcc_checking_assert (!pointer_map_contains (sccstate, type));
4037   state = XOBNEW (sccstate_obstack, struct sccs);
4038   *pointer_map_insert (sccstate, type) = state;
4039 
4040   VEC_safe_push (tree, heap, *sccstack, type);
4041   state->dfsnum = next_dfs_num++;
4042   state->low = state->dfsnum;
4043   state->on_sccstack = true;
4044 
4045   /* Combine a few common features of types so that types are grouped into
4046      smaller sets; when searching for existing matching types to merge,
4047      only existing types having the same features as the new type will be
4048      checked.  */
4049   v = iterative_hash_name (TYPE_NAME (type), 0);
4050   if (TYPE_NAME (type)
4051       && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
4052       && DECL_CONTEXT (TYPE_NAME (type))
4053       && TYPE_P (DECL_CONTEXT (TYPE_NAME (type))))
4054     v = visit (DECL_CONTEXT (TYPE_NAME (type)), state, v,
4055 	       sccstack, sccstate, sccstate_obstack);
4056   v = iterative_hash_hashval_t (TREE_CODE (type), v);
4057   v = iterative_hash_hashval_t (TYPE_QUALS (type), v);
4058   v = iterative_hash_hashval_t (TREE_ADDRESSABLE (type), v);
4059 
4060   /* Do not hash the types size as this will cause differences in
4061      hash values for the complete vs. the incomplete type variant.  */
4062 
4063   /* Incorporate common features of numerical types.  */
4064   if (INTEGRAL_TYPE_P (type)
4065       || SCALAR_FLOAT_TYPE_P (type)
4066       || FIXED_POINT_TYPE_P (type))
4067     {
4068       v = iterative_hash_hashval_t (TYPE_PRECISION (type), v);
4069       v = iterative_hash_hashval_t (TYPE_MODE (type), v);
4070       v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v);
4071     }
4072 
4073   /* For pointer and reference types, fold in information about the type
4074      pointed to.  */
4075   if (POINTER_TYPE_P (type))
4076     v = visit (TREE_TYPE (type), state, v,
4077 	       sccstack, sccstate, sccstate_obstack);
4078 
4079   /* For integer types hash the types min/max values and the string flag.  */
4080   if (TREE_CODE (type) == INTEGER_TYPE)
4081     {
4082       /* OMP lowering can introduce error_mark_node in place of
4083 	 random local decls in types.  */
4084       if (TYPE_MIN_VALUE (type) != error_mark_node)
4085 	v = iterative_hash_expr (TYPE_MIN_VALUE (type), v);
4086       if (TYPE_MAX_VALUE (type) != error_mark_node)
4087 	v = iterative_hash_expr (TYPE_MAX_VALUE (type), v);
4088       v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
4089     }
4090 
4091   /* For array types hash the domain and the string flag.  */
4092   if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
4093     {
4094       v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
4095       v = visit (TYPE_DOMAIN (type), state, v,
4096 		 sccstack, sccstate, sccstate_obstack);
4097     }
4098 
4099   /* Recurse for aggregates with a single element type.  */
4100   if (TREE_CODE (type) == ARRAY_TYPE
4101       || TREE_CODE (type) == COMPLEX_TYPE
4102       || TREE_CODE (type) == VECTOR_TYPE)
4103     v = visit (TREE_TYPE (type), state, v,
4104 	       sccstack, sccstate, sccstate_obstack);
4105 
4106   /* Incorporate function return and argument types.  */
4107   if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
4108     {
4109       unsigned na;
4110       tree p;
4111 
4112       /* For method types also incorporate their parent class.  */
4113       if (TREE_CODE (type) == METHOD_TYPE)
4114 	v = visit (TYPE_METHOD_BASETYPE (type), state, v,
4115 		   sccstack, sccstate, sccstate_obstack);
4116 
4117       /* Check result and argument types.  */
4118       v = visit (TREE_TYPE (type), state, v,
4119 		 sccstack, sccstate, sccstate_obstack);
4120       for (p = TYPE_ARG_TYPES (type), na = 0; p; p = TREE_CHAIN (p))
4121 	{
4122 	  v = visit (TREE_VALUE (p), state, v,
4123 		     sccstack, sccstate, sccstate_obstack);
4124 	  na++;
4125 	}
4126 
4127       v = iterative_hash_hashval_t (na, v);
4128     }
4129 
4130   if (RECORD_OR_UNION_TYPE_P (type))
4131     {
4132       unsigned nf;
4133       tree f;
4134 
4135       for (f = TYPE_FIELDS (type), nf = 0; f; f = TREE_CHAIN (f))
4136 	{
4137 	  v = iterative_hash_name (DECL_NAME (f), v);
4138 	  v = visit (TREE_TYPE (f), state, v,
4139 		     sccstack, sccstate, sccstate_obstack);
4140 	  nf++;
4141 	}
4142 
4143       v = iterative_hash_hashval_t (nf, v);
4144     }
4145 
4146   /* Record hash for us.  */
4147   state->u.hash = v;
4148 
4149   /* See if we found an SCC.  */
4150   if (state->low == state->dfsnum)
4151     {
4152       tree x;
4153       struct tree_int_map *m;
4154 
4155       /* Pop off the SCC and set its hash values.  */
4156       x = VEC_pop (tree, *sccstack);
4157       /* Optimize SCC size one.  */
4158       if (x == type)
4159 	{
4160 	  state->on_sccstack = false;
4161 	  m = ggc_alloc_cleared_tree_int_map ();
4162 	  m->base.from = x;
4163 	  m->to = v;
4164 	  slot = htab_find_slot (type_hash_cache, m, INSERT);
4165 	  gcc_assert (!*slot);
4166 	  *slot = (void *) m;
4167 	}
4168       else
4169 	{
4170 	  struct sccs *cstate;
4171 	  unsigned first, i, size, j;
4172 	  struct type_hash_pair *pairs;
4173 	  /* Pop off the SCC and build an array of type, hash pairs.  */
4174 	  first = VEC_length (tree, *sccstack) - 1;
4175 	  while (VEC_index (tree, *sccstack, first) != type)
4176 	    --first;
4177 	  size = VEC_length (tree, *sccstack) - first + 1;
4178 	  pairs = XALLOCAVEC (struct type_hash_pair, size);
4179 	  i = 0;
4180 	  cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
4181 	  cstate->on_sccstack = false;
4182 	  pairs[i].type = x;
4183 	  pairs[i].hash = cstate->u.hash;
4184 	  do
4185 	    {
4186 	      x = VEC_pop (tree, *sccstack);
4187 	      cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
4188 	      cstate->on_sccstack = false;
4189 	      ++i;
4190 	      pairs[i].type = x;
4191 	      pairs[i].hash = cstate->u.hash;
4192 	    }
4193 	  while (x != type);
4194 	  gcc_assert (i + 1 == size);
4195 	  /* Sort the arrays of type, hash pairs so that when we mix in
4196 	     all members of the SCC the hash value becomes independent on
4197 	     the order we visited the SCC.  Disregard hashes equal to
4198 	     the hash of the type we mix into because we cannot guarantee
4199 	     a stable sort for those across different TUs.  */
4200 	  qsort (pairs, size, sizeof (struct type_hash_pair),
4201 		 type_hash_pair_compare);
4202 	  for (i = 0; i < size; ++i)
4203 	    {
4204 	      hashval_t hash;
4205 	      m = ggc_alloc_cleared_tree_int_map ();
4206 	      m->base.from = pairs[i].type;
4207 	      hash = pairs[i].hash;
4208 	      /* Skip same hashes.  */
4209 	      for (j = i + 1; j < size && pairs[j].hash == pairs[i].hash; ++j)
4210 		;
4211 	      for (; j < size; ++j)
4212 		hash = iterative_hash_hashval_t (pairs[j].hash, hash);
4213 	      for (j = 0; pairs[j].hash != pairs[i].hash; ++j)
4214 		hash = iterative_hash_hashval_t (pairs[j].hash, hash);
4215 	      m->to = hash;
4216 	      if (pairs[i].type == type)
4217 		v = hash;
4218 	      slot = htab_find_slot (type_hash_cache, m, INSERT);
4219 	      gcc_assert (!*slot);
4220 	      *slot = (void *) m;
4221 	    }
4222 	}
4223     }
4224 
4225   return iterative_hash_hashval_t (v, val);
4226 }
4227 
4228 
4229 /* Returns a hash value for P (assumed to be a type).  The hash value
4230    is computed using some distinguishing features of the type.  Note
4231    that we cannot use pointer hashing here as we may be dealing with
4232    two distinct instances of the same type.
4233 
4234    This function should produce the same hash value for two compatible
4235    types according to gimple_types_compatible_p.  */
4236 
4237 static hashval_t
4238 gimple_type_hash (const void *p)
4239 {
4240   const_tree t = (const_tree) p;
4241   VEC(tree, heap) *sccstack = NULL;
4242   struct pointer_map_t *sccstate;
4243   struct obstack sccstate_obstack;
4244   hashval_t val;
4245   void **slot;
4246   struct tree_int_map m;
4247 
4248   if (type_hash_cache == NULL)
4249     type_hash_cache = htab_create_ggc (512, tree_int_map_hash,
4250 				       tree_int_map_eq, NULL);
4251 
4252   m.base.from = CONST_CAST_TREE (t);
4253   if ((slot = htab_find_slot (type_hash_cache, &m, NO_INSERT))
4254       && *slot)
4255     return iterative_hash_hashval_t (((struct tree_int_map *) *slot)->to, 0);
4256 
4257   /* Perform a DFS walk and pre-hash all reachable types.  */
4258   next_dfs_num = 1;
4259   sccstate = pointer_map_create ();
4260   gcc_obstack_init (&sccstate_obstack);
4261   val = iterative_hash_gimple_type (CONST_CAST_TREE (t), 0,
4262 				    &sccstack, sccstate, &sccstate_obstack);
4263   VEC_free (tree, heap, sccstack);
4264   pointer_map_destroy (sccstate);
4265   obstack_free (&sccstate_obstack, NULL);
4266 
4267   return val;
4268 }
4269 
4270 /* Returning a hash value for gimple type TYPE combined with VAL.
4271 
4272    The hash value returned is equal for types considered compatible
4273    by gimple_canonical_types_compatible_p.  */
4274 
4275 static hashval_t
4276 iterative_hash_canonical_type (tree type, hashval_t val)
4277 {
4278   hashval_t v;
4279   void **slot;
4280   struct tree_int_map *mp, m;
4281 
4282   m.base.from = type;
4283   if ((slot = htab_find_slot (canonical_type_hash_cache, &m, INSERT))
4284       && *slot)
4285     return iterative_hash_hashval_t (((struct tree_int_map *) *slot)->to, val);
4286 
4287   /* Combine a few common features of types so that types are grouped into
4288      smaller sets; when searching for existing matching types to merge,
4289      only existing types having the same features as the new type will be
4290      checked.  */
4291   v = iterative_hash_hashval_t (TREE_CODE (type), 0);
4292   v = iterative_hash_hashval_t (TREE_ADDRESSABLE (type), v);
4293   v = iterative_hash_hashval_t (TYPE_ALIGN (type), v);
4294   v = iterative_hash_hashval_t (TYPE_MODE (type), v);
4295 
4296   /* Incorporate common features of numerical types.  */
4297   if (INTEGRAL_TYPE_P (type)
4298       || SCALAR_FLOAT_TYPE_P (type)
4299       || FIXED_POINT_TYPE_P (type)
4300       || TREE_CODE (type) == VECTOR_TYPE
4301       || TREE_CODE (type) == COMPLEX_TYPE
4302       || TREE_CODE (type) == OFFSET_TYPE
4303       || POINTER_TYPE_P (type))
4304     {
4305       v = iterative_hash_hashval_t (TYPE_PRECISION (type), v);
4306       v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v);
4307     }
4308 
4309   /* For pointer and reference types, fold in information about the type
4310      pointed to but do not recurse to the pointed-to type.  */
4311   if (POINTER_TYPE_P (type))
4312     {
4313       v = iterative_hash_hashval_t (TYPE_REF_CAN_ALIAS_ALL (type), v);
4314       v = iterative_hash_hashval_t (TYPE_ADDR_SPACE (TREE_TYPE (type)), v);
4315       v = iterative_hash_hashval_t (TYPE_RESTRICT (type), v);
4316       v = iterative_hash_hashval_t (TREE_CODE (TREE_TYPE (type)), v);
4317     }
4318 
4319   /* For integer types hash the sizetype and the string flag.  */
4320   if (TREE_CODE (type) == INTEGER_TYPE)
4321     {
4322       v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
4323       v = iterative_hash_hashval_t (TYPE_IS_SIZETYPE (type), v);
4324     }
4325 
4326   /* For array types hash the domain bounds and the string flag.  */
4327   if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
4328     {
4329       v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
4330       /* OMP lowering can introduce error_mark_node in place of
4331 	 random local decls in types.  */
4332       if (TYPE_MIN_VALUE (TYPE_DOMAIN (type)) != error_mark_node)
4333 	v = iterative_hash_expr (TYPE_MIN_VALUE (TYPE_DOMAIN (type)), v);
4334       if (TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != error_mark_node)
4335 	v = iterative_hash_expr (TYPE_MAX_VALUE (TYPE_DOMAIN (type)), v);
4336     }
4337 
4338   /* Recurse for aggregates with a single element type.  */
4339   if (TREE_CODE (type) == ARRAY_TYPE
4340       || TREE_CODE (type) == COMPLEX_TYPE
4341       || TREE_CODE (type) == VECTOR_TYPE)
4342     v = iterative_hash_canonical_type (TREE_TYPE (type), v);
4343 
4344   /* Incorporate function return and argument types.  */
4345   if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
4346     {
4347       unsigned na;
4348       tree p;
4349 
4350       /* For method types also incorporate their parent class.  */
4351       if (TREE_CODE (type) == METHOD_TYPE)
4352 	v = iterative_hash_canonical_type (TYPE_METHOD_BASETYPE (type), v);
4353 
4354       v = iterative_hash_canonical_type (TREE_TYPE (type), v);
4355 
4356       for (p = TYPE_ARG_TYPES (type), na = 0; p; p = TREE_CHAIN (p))
4357 	{
4358 	  v = iterative_hash_canonical_type (TREE_VALUE (p), v);
4359 	  na++;
4360 	}
4361 
4362       v = iterative_hash_hashval_t (na, v);
4363     }
4364 
4365   if (RECORD_OR_UNION_TYPE_P (type))
4366     {
4367       unsigned nf;
4368       tree f;
4369 
4370       for (f = TYPE_FIELDS (type), nf = 0; f; f = TREE_CHAIN (f))
4371 	if (TREE_CODE (f) == FIELD_DECL)
4372 	  {
4373 	    v = iterative_hash_canonical_type (TREE_TYPE (f), v);
4374 	    nf++;
4375 	  }
4376 
4377       v = iterative_hash_hashval_t (nf, v);
4378     }
4379 
4380   /* Cache the just computed hash value.  */
4381   mp = ggc_alloc_cleared_tree_int_map ();
4382   mp->base.from = type;
4383   mp->to = v;
4384   *slot = (void *) mp;
4385 
4386   return iterative_hash_hashval_t (v, val);
4387 }
4388 
4389 static hashval_t
4390 gimple_canonical_type_hash (const void *p)
4391 {
4392   if (canonical_type_hash_cache == NULL)
4393     canonical_type_hash_cache = htab_create_ggc (512, tree_int_map_hash,
4394 						 tree_int_map_eq, NULL);
4395 
4396   return iterative_hash_canonical_type (CONST_CAST_TREE ((const_tree) p), 0);
4397 }
4398 
4399 
4400 /* Returns nonzero if P1 and P2 are equal.  */
4401 
4402 static int
4403 gimple_type_eq (const void *p1, const void *p2)
4404 {
4405   const_tree t1 = (const_tree) p1;
4406   const_tree t2 = (const_tree) p2;
4407   return gimple_types_compatible_p (CONST_CAST_TREE (t1),
4408 				    CONST_CAST_TREE (t2));
4409 }
4410 
4411 
4412 /* Worker for gimple_register_type.
4413    Register type T in the global type table gimple_types.
4414    When REGISTERING_MV is false first recurse for the main variant of T.  */
4415 
4416 static tree
4417 gimple_register_type_1 (tree t, bool registering_mv)
4418 {
4419   void **slot;
4420   gimple_type_leader_entry *leader;
4421 
4422   /* If we registered this type before return the cached result.  */
4423   leader = &gimple_type_leader[TYPE_UID (t) % GIMPLE_TYPE_LEADER_SIZE];
4424   if (leader->type == t)
4425     return leader->leader;
4426 
4427   /* Always register the main variant first.  This is important so we
4428      pick up the non-typedef variants as canonical, otherwise we'll end
4429      up taking typedef ids for structure tags during comparison.
4430      It also makes sure that main variants will be merged to main variants.
4431      As we are operating on a possibly partially fixed up type graph
4432      do not bother to recurse more than once, otherwise we may end up
4433      walking in circles.
4434      If we are registering a main variant it will either remain its
4435      own main variant or it will be merged to something else in which
4436      case we do not care for the main variant leader.  */
4437   if (!registering_mv
4438       && TYPE_MAIN_VARIANT (t) != t)
4439     gimple_register_type_1 (TYPE_MAIN_VARIANT (t), true);
4440 
4441   /* See if we already have an equivalent type registered.  */
4442   slot = htab_find_slot (gimple_types, t, INSERT);
4443   if (*slot
4444       && *(tree *)slot != t)
4445     {
4446       tree new_type = (tree) *((tree *) slot);
4447       leader->type = t;
4448       leader->leader = new_type;
4449       return new_type;
4450     }
4451 
4452   /* If not, insert it to the cache and the hash.  */
4453   leader->type = t;
4454   leader->leader = t;
4455   *slot = (void *) t;
4456   return t;
4457 }
4458 
4459 /* Register type T in the global type table gimple_types.
4460    If another type T', compatible with T, already existed in
4461    gimple_types then return T', otherwise return T.  This is used by
4462    LTO to merge identical types read from different TUs.  */
4463 
4464 tree
4465 gimple_register_type (tree t)
4466 {
4467   gcc_assert (TYPE_P (t));
4468 
4469   if (!gimple_type_leader)
4470     gimple_type_leader = ggc_alloc_cleared_vec_gimple_type_leader_entry_s
4471 				(GIMPLE_TYPE_LEADER_SIZE);
4472 
4473   if (gimple_types == NULL)
4474     gimple_types = htab_create_ggc (16381, gimple_type_hash, gimple_type_eq, 0);
4475 
4476   return gimple_register_type_1 (t, false);
4477 }
4478 
4479 /* The TYPE_CANONICAL merging machinery.  It should closely resemble
4480    the middle-end types_compatible_p function.  It needs to avoid
4481    claiming types are different for types that should be treated
4482    the same with respect to TBAA.  Canonical types are also used
4483    for IL consistency checks via the useless_type_conversion_p
4484    predicate which does not handle all type kinds itself but falls
4485    back to pointer-comparison of TYPE_CANONICAL for aggregates
4486    for example.  */
4487 
4488 /* Return true iff T1 and T2 are structurally identical for what
4489    TBAA is concerned.  */
4490 
4491 static bool
4492 gimple_canonical_types_compatible_p (tree t1, tree t2)
4493 {
4494   /* Before starting to set up the SCC machinery handle simple cases.  */
4495 
4496   /* Check first for the obvious case of pointer identity.  */
4497   if (t1 == t2)
4498     return true;
4499 
4500   /* Check that we have two types to compare.  */
4501   if (t1 == NULL_TREE || t2 == NULL_TREE)
4502     return false;
4503 
4504   /* If the types have been previously registered and found equal
4505      they still are.  */
4506   if (TYPE_CANONICAL (t1)
4507       && TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2))
4508     return true;
4509 
4510   /* Can't be the same type if the types don't have the same code.  */
4511   if (TREE_CODE (t1) != TREE_CODE (t2))
4512     return false;
4513 
4514   if (TREE_ADDRESSABLE (t1) != TREE_ADDRESSABLE (t2))
4515     return false;
4516 
4517   /* Qualifiers do not matter for canonical type comparison purposes.  */
4518 
4519   /* Void types and nullptr types are always the same.  */
4520   if (TREE_CODE (t1) == VOID_TYPE
4521       || TREE_CODE (t1) == NULLPTR_TYPE)
4522     return true;
4523 
4524   /* Can't be the same type if they have different alignment, or mode.  */
4525   if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
4526       || TYPE_MODE (t1) != TYPE_MODE (t2))
4527     return false;
4528 
4529   /* Non-aggregate types can be handled cheaply.  */
4530   if (INTEGRAL_TYPE_P (t1)
4531       || SCALAR_FLOAT_TYPE_P (t1)
4532       || FIXED_POINT_TYPE_P (t1)
4533       || TREE_CODE (t1) == VECTOR_TYPE
4534       || TREE_CODE (t1) == COMPLEX_TYPE
4535       || TREE_CODE (t1) == OFFSET_TYPE
4536       || POINTER_TYPE_P (t1))
4537     {
4538       /* Can't be the same type if they have different sign or precision.  */
4539       if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
4540 	  || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
4541 	return false;
4542 
4543       if (TREE_CODE (t1) == INTEGER_TYPE
4544 	  && (TYPE_IS_SIZETYPE (t1) != TYPE_IS_SIZETYPE (t2)
4545 	      || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)))
4546 	return false;
4547 
4548       /* For canonical type comparisons we do not want to build SCCs
4549 	 so we cannot compare pointed-to types.  But we can, for now,
4550 	 require the same pointed-to type kind and match what
4551 	 useless_type_conversion_p would do.  */
4552       if (POINTER_TYPE_P (t1))
4553 	{
4554 	  /* If the two pointers have different ref-all attributes,
4555 	     they can't be the same type.  */
4556 	  if (TYPE_REF_CAN_ALIAS_ALL (t1) != TYPE_REF_CAN_ALIAS_ALL (t2))
4557 	    return false;
4558 
4559 	  if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
4560 	      != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
4561 	    return false;
4562 
4563 	  if (TYPE_RESTRICT (t1) != TYPE_RESTRICT (t2))
4564 	    return false;
4565 
4566 	  if (TREE_CODE (TREE_TYPE (t1)) != TREE_CODE (TREE_TYPE (t2)))
4567 	    return false;
4568 	}
4569 
4570       /* Tail-recurse to components.  */
4571       if (TREE_CODE (t1) == VECTOR_TYPE
4572 	  || TREE_CODE (t1) == COMPLEX_TYPE)
4573 	return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
4574 						    TREE_TYPE (t2));
4575 
4576       return true;
4577     }
4578 
4579   /* If their attributes are not the same they can't be the same type.  */
4580   if (!attribute_list_equal (TYPE_ATTRIBUTES (t1), TYPE_ATTRIBUTES (t2)))
4581     return false;
4582 
4583   /* Do type-specific comparisons.  */
4584   switch (TREE_CODE (t1))
4585     {
4586     case ARRAY_TYPE:
4587       /* Array types are the same if the element types are the same and
4588 	 the number of elements are the same.  */
4589       if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2))
4590 	  || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
4591 	  || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
4592 	return false;
4593       else
4594 	{
4595 	  tree i1 = TYPE_DOMAIN (t1);
4596 	  tree i2 = TYPE_DOMAIN (t2);
4597 
4598 	  /* For an incomplete external array, the type domain can be
4599  	     NULL_TREE.  Check this condition also.  */
4600 	  if (i1 == NULL_TREE && i2 == NULL_TREE)
4601 	    return true;
4602 	  else if (i1 == NULL_TREE || i2 == NULL_TREE)
4603 	    return false;
4604 	  else
4605 	    {
4606 	      tree min1 = TYPE_MIN_VALUE (i1);
4607 	      tree min2 = TYPE_MIN_VALUE (i2);
4608 	      tree max1 = TYPE_MAX_VALUE (i1);
4609 	      tree max2 = TYPE_MAX_VALUE (i2);
4610 
4611 	      /* The minimum/maximum values have to be the same.  */
4612 	      if ((min1 == min2
4613 		   || (min1 && min2
4614 		       && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
4615 			    && TREE_CODE (min2) == PLACEHOLDER_EXPR)
4616 		           || operand_equal_p (min1, min2, 0))))
4617 		  && (max1 == max2
4618 		      || (max1 && max2
4619 			  && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
4620 			       && TREE_CODE (max2) == PLACEHOLDER_EXPR)
4621 			      || operand_equal_p (max1, max2, 0)))))
4622 		return true;
4623 	      else
4624 		return false;
4625 	    }
4626 	}
4627 
4628     case METHOD_TYPE:
4629       /* Method types should belong to the same class.  */
4630       if (!gimple_canonical_types_compatible_p
4631 	     (TYPE_METHOD_BASETYPE (t1), TYPE_METHOD_BASETYPE (t2)))
4632 	return false;
4633 
4634       /* Fallthru  */
4635 
4636     case FUNCTION_TYPE:
4637       /* Function types are the same if the return type and arguments types
4638 	 are the same.  */
4639       if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2)))
4640 	return false;
4641 
4642       if (!comp_type_attributes (t1, t2))
4643 	return false;
4644 
4645       if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
4646 	return true;
4647       else
4648 	{
4649 	  tree parms1, parms2;
4650 
4651 	  for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
4652 	       parms1 && parms2;
4653 	       parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
4654 	    {
4655 	      if (!gimple_canonical_types_compatible_p
4656 		     (TREE_VALUE (parms1), TREE_VALUE (parms2)))
4657 		return false;
4658 	    }
4659 
4660 	  if (parms1 || parms2)
4661 	    return false;
4662 
4663 	  return true;
4664 	}
4665 
4666     case RECORD_TYPE:
4667     case UNION_TYPE:
4668     case QUAL_UNION_TYPE:
4669       {
4670 	tree f1, f2;
4671 
4672 	/* For aggregate types, all the fields must be the same.  */
4673 	for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
4674 	     f1 || f2;
4675 	     f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
4676 	  {
4677 	    /* Skip non-fields.  */
4678 	    while (f1 && TREE_CODE (f1) != FIELD_DECL)
4679 	      f1 = TREE_CHAIN (f1);
4680 	    while (f2 && TREE_CODE (f2) != FIELD_DECL)
4681 	      f2 = TREE_CHAIN (f2);
4682 	    if (!f1 || !f2)
4683 	      break;
4684 	    /* The fields must have the same name, offset and type.  */
4685 	    if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
4686 		|| !gimple_compare_field_offset (f1, f2)
4687 		|| !gimple_canonical_types_compatible_p
4688 		      (TREE_TYPE (f1), TREE_TYPE (f2)))
4689 	      return false;
4690 	  }
4691 
4692 	/* If one aggregate has more fields than the other, they
4693 	   are not the same.  */
4694 	if (f1 || f2)
4695 	  return false;
4696 
4697 	return true;
4698       }
4699 
4700     default:
4701       gcc_unreachable ();
4702     }
4703 }
4704 
4705 
4706 /* Returns nonzero if P1 and P2 are equal.  */
4707 
4708 static int
4709 gimple_canonical_type_eq (const void *p1, const void *p2)
4710 {
4711   const_tree t1 = (const_tree) p1;
4712   const_tree t2 = (const_tree) p2;
4713   return gimple_canonical_types_compatible_p (CONST_CAST_TREE (t1),
4714 					      CONST_CAST_TREE (t2));
4715 }
4716 
4717 /* Register type T in the global type table gimple_types.
4718    If another type T', compatible with T, already existed in
4719    gimple_types then return T', otherwise return T.  This is used by
4720    LTO to merge identical types read from different TUs.
4721 
4722    ???  This merging does not exactly match how the tree.c middle-end
4723    functions will assign TYPE_CANONICAL when new types are created
4724    during optimization (which at least happens for pointer and array
4725    types).  */
4726 
4727 tree
4728 gimple_register_canonical_type (tree t)
4729 {
4730   void **slot;
4731 
4732   gcc_assert (TYPE_P (t));
4733 
4734   if (TYPE_CANONICAL (t))
4735     return TYPE_CANONICAL (t);
4736 
4737   if (gimple_canonical_types == NULL)
4738     gimple_canonical_types = htab_create_ggc (16381, gimple_canonical_type_hash,
4739 					      gimple_canonical_type_eq, 0);
4740 
4741   slot = htab_find_slot (gimple_canonical_types, t, INSERT);
4742   if (*slot
4743       && *(tree *)slot != t)
4744     {
4745       tree new_type = (tree) *((tree *) slot);
4746 
4747       TYPE_CANONICAL (t) = new_type;
4748       t = new_type;
4749     }
4750   else
4751     {
4752       TYPE_CANONICAL (t) = t;
4753       *slot = (void *) t;
4754     }
4755 
4756   return t;
4757 }
4758 
4759 
4760 /* Show statistics on references to the global type table gimple_types.  */
4761 
4762 void
4763 print_gimple_types_stats (void)
4764 {
4765   if (gimple_types)
4766     fprintf (stderr, "GIMPLE type table: size %ld, %ld elements, "
4767 	     "%ld searches, %ld collisions (ratio: %f)\n",
4768 	     (long) htab_size (gimple_types),
4769 	     (long) htab_elements (gimple_types),
4770 	     (long) gimple_types->searches,
4771 	     (long) gimple_types->collisions,
4772 	     htab_collisions (gimple_types));
4773   else
4774     fprintf (stderr, "GIMPLE type table is empty\n");
4775   if (type_hash_cache)
4776     fprintf (stderr, "GIMPLE type hash table: size %ld, %ld elements, "
4777 	     "%ld searches, %ld collisions (ratio: %f)\n",
4778 	     (long) htab_size (type_hash_cache),
4779 	     (long) htab_elements (type_hash_cache),
4780 	     (long) type_hash_cache->searches,
4781 	     (long) type_hash_cache->collisions,
4782 	     htab_collisions (type_hash_cache));
4783   else
4784     fprintf (stderr, "GIMPLE type hash table is empty\n");
4785   if (gimple_canonical_types)
4786     fprintf (stderr, "GIMPLE canonical type table: size %ld, %ld elements, "
4787 	     "%ld searches, %ld collisions (ratio: %f)\n",
4788 	     (long) htab_size (gimple_canonical_types),
4789 	     (long) htab_elements (gimple_canonical_types),
4790 	     (long) gimple_canonical_types->searches,
4791 	     (long) gimple_canonical_types->collisions,
4792 	     htab_collisions (gimple_canonical_types));
4793   else
4794     fprintf (stderr, "GIMPLE canonical type table is empty\n");
4795   if (canonical_type_hash_cache)
4796     fprintf (stderr, "GIMPLE canonical type hash table: size %ld, %ld elements, "
4797 	     "%ld searches, %ld collisions (ratio: %f)\n",
4798 	     (long) htab_size (canonical_type_hash_cache),
4799 	     (long) htab_elements (canonical_type_hash_cache),
4800 	     (long) canonical_type_hash_cache->searches,
4801 	     (long) canonical_type_hash_cache->collisions,
4802 	     htab_collisions (canonical_type_hash_cache));
4803   else
4804     fprintf (stderr, "GIMPLE canonical type hash table is empty\n");
4805 }
4806 
4807 /* Free the gimple type hashtables used for LTO type merging.  */
4808 
4809 void
4810 free_gimple_type_tables (void)
4811 {
4812   /* Last chance to print stats for the tables.  */
4813   if (flag_lto_report)
4814     print_gimple_types_stats ();
4815 
4816   if (gimple_types)
4817     {
4818       htab_delete (gimple_types);
4819       gimple_types = NULL;
4820     }
4821   if (gimple_canonical_types)
4822     {
4823       htab_delete (gimple_canonical_types);
4824       gimple_canonical_types = NULL;
4825     }
4826   if (type_hash_cache)
4827     {
4828       htab_delete (type_hash_cache);
4829       type_hash_cache = NULL;
4830     }
4831   if (canonical_type_hash_cache)
4832     {
4833       htab_delete (canonical_type_hash_cache);
4834       canonical_type_hash_cache = NULL;
4835     }
4836   if (type_pair_cache)
4837     {
4838       free (type_pair_cache);
4839       type_pair_cache = NULL;
4840     }
4841   gimple_type_leader = NULL;
4842 }
4843 
4844 
4845 /* Return a type the same as TYPE except unsigned or
4846    signed according to UNSIGNEDP.  */
4847 
4848 static tree
4849 gimple_signed_or_unsigned_type (bool unsignedp, tree type)
4850 {
4851   tree type1;
4852 
4853   type1 = TYPE_MAIN_VARIANT (type);
4854   if (type1 == signed_char_type_node
4855       || type1 == char_type_node
4856       || type1 == unsigned_char_type_node)
4857     return unsignedp ? unsigned_char_type_node : signed_char_type_node;
4858   if (type1 == integer_type_node || type1 == unsigned_type_node)
4859     return unsignedp ? unsigned_type_node : integer_type_node;
4860   if (type1 == short_integer_type_node || type1 == short_unsigned_type_node)
4861     return unsignedp ? short_unsigned_type_node : short_integer_type_node;
4862   if (type1 == long_integer_type_node || type1 == long_unsigned_type_node)
4863     return unsignedp ? long_unsigned_type_node : long_integer_type_node;
4864   if (type1 == long_long_integer_type_node
4865       || type1 == long_long_unsigned_type_node)
4866     return unsignedp
4867            ? long_long_unsigned_type_node
4868 	   : long_long_integer_type_node;
4869   if (int128_integer_type_node && (type1 == int128_integer_type_node || type1 == int128_unsigned_type_node))
4870     return unsignedp
4871            ? int128_unsigned_type_node
4872 	   : int128_integer_type_node;
4873 #if HOST_BITS_PER_WIDE_INT >= 64
4874   if (type1 == intTI_type_node || type1 == unsigned_intTI_type_node)
4875     return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
4876 #endif
4877   if (type1 == intDI_type_node || type1 == unsigned_intDI_type_node)
4878     return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
4879   if (type1 == intSI_type_node || type1 == unsigned_intSI_type_node)
4880     return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
4881   if (type1 == intHI_type_node || type1 == unsigned_intHI_type_node)
4882     return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
4883   if (type1 == intQI_type_node || type1 == unsigned_intQI_type_node)
4884     return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
4885 
4886 #define GIMPLE_FIXED_TYPES(NAME)	    \
4887   if (type1 == short_ ## NAME ## _type_node \
4888       || type1 == unsigned_short_ ## NAME ## _type_node) \
4889     return unsignedp ? unsigned_short_ ## NAME ## _type_node \
4890 		     : short_ ## NAME ## _type_node; \
4891   if (type1 == NAME ## _type_node \
4892       || type1 == unsigned_ ## NAME ## _type_node) \
4893     return unsignedp ? unsigned_ ## NAME ## _type_node \
4894 		     : NAME ## _type_node; \
4895   if (type1 == long_ ## NAME ## _type_node \
4896       || type1 == unsigned_long_ ## NAME ## _type_node) \
4897     return unsignedp ? unsigned_long_ ## NAME ## _type_node \
4898 		     : long_ ## NAME ## _type_node; \
4899   if (type1 == long_long_ ## NAME ## _type_node \
4900       || type1 == unsigned_long_long_ ## NAME ## _type_node) \
4901     return unsignedp ? unsigned_long_long_ ## NAME ## _type_node \
4902 		     : long_long_ ## NAME ## _type_node;
4903 
4904 #define GIMPLE_FIXED_MODE_TYPES(NAME) \
4905   if (type1 == NAME ## _type_node \
4906       || type1 == u ## NAME ## _type_node) \
4907     return unsignedp ? u ## NAME ## _type_node \
4908 		     : NAME ## _type_node;
4909 
4910 #define GIMPLE_FIXED_TYPES_SAT(NAME) \
4911   if (type1 == sat_ ## short_ ## NAME ## _type_node \
4912       || type1 == sat_ ## unsigned_short_ ## NAME ## _type_node) \
4913     return unsignedp ? sat_ ## unsigned_short_ ## NAME ## _type_node \
4914 		     : sat_ ## short_ ## NAME ## _type_node; \
4915   if (type1 == sat_ ## NAME ## _type_node \
4916       || type1 == sat_ ## unsigned_ ## NAME ## _type_node) \
4917     return unsignedp ? sat_ ## unsigned_ ## NAME ## _type_node \
4918 		     : sat_ ## NAME ## _type_node; \
4919   if (type1 == sat_ ## long_ ## NAME ## _type_node \
4920       || type1 == sat_ ## unsigned_long_ ## NAME ## _type_node) \
4921     return unsignedp ? sat_ ## unsigned_long_ ## NAME ## _type_node \
4922 		     : sat_ ## long_ ## NAME ## _type_node; \
4923   if (type1 == sat_ ## long_long_ ## NAME ## _type_node \
4924       || type1 == sat_ ## unsigned_long_long_ ## NAME ## _type_node) \
4925     return unsignedp ? sat_ ## unsigned_long_long_ ## NAME ## _type_node \
4926 		     : sat_ ## long_long_ ## NAME ## _type_node;
4927 
4928 #define GIMPLE_FIXED_MODE_TYPES_SAT(NAME)	\
4929   if (type1 == sat_ ## NAME ## _type_node \
4930       || type1 == sat_ ## u ## NAME ## _type_node) \
4931     return unsignedp ? sat_ ## u ## NAME ## _type_node \
4932 		     : sat_ ## NAME ## _type_node;
4933 
4934   GIMPLE_FIXED_TYPES (fract);
4935   GIMPLE_FIXED_TYPES_SAT (fract);
4936   GIMPLE_FIXED_TYPES (accum);
4937   GIMPLE_FIXED_TYPES_SAT (accum);
4938 
4939   GIMPLE_FIXED_MODE_TYPES (qq);
4940   GIMPLE_FIXED_MODE_TYPES (hq);
4941   GIMPLE_FIXED_MODE_TYPES (sq);
4942   GIMPLE_FIXED_MODE_TYPES (dq);
4943   GIMPLE_FIXED_MODE_TYPES (tq);
4944   GIMPLE_FIXED_MODE_TYPES_SAT (qq);
4945   GIMPLE_FIXED_MODE_TYPES_SAT (hq);
4946   GIMPLE_FIXED_MODE_TYPES_SAT (sq);
4947   GIMPLE_FIXED_MODE_TYPES_SAT (dq);
4948   GIMPLE_FIXED_MODE_TYPES_SAT (tq);
4949   GIMPLE_FIXED_MODE_TYPES (ha);
4950   GIMPLE_FIXED_MODE_TYPES (sa);
4951   GIMPLE_FIXED_MODE_TYPES (da);
4952   GIMPLE_FIXED_MODE_TYPES (ta);
4953   GIMPLE_FIXED_MODE_TYPES_SAT (ha);
4954   GIMPLE_FIXED_MODE_TYPES_SAT (sa);
4955   GIMPLE_FIXED_MODE_TYPES_SAT (da);
4956   GIMPLE_FIXED_MODE_TYPES_SAT (ta);
4957 
4958   /* For ENUMERAL_TYPEs in C++, must check the mode of the types, not
4959      the precision; they have precision set to match their range, but
4960      may use a wider mode to match an ABI.  If we change modes, we may
4961      wind up with bad conversions.  For INTEGER_TYPEs in C, must check
4962      the precision as well, so as to yield correct results for
4963      bit-field types.  C++ does not have these separate bit-field
4964      types, and producing a signed or unsigned variant of an
4965      ENUMERAL_TYPE may cause other problems as well.  */
4966   if (!INTEGRAL_TYPE_P (type)
4967       || TYPE_UNSIGNED (type) == unsignedp)
4968     return type;
4969 
4970 #define TYPE_OK(node)							    \
4971   (TYPE_MODE (type) == TYPE_MODE (node)					    \
4972    && TYPE_PRECISION (type) == TYPE_PRECISION (node))
4973   if (TYPE_OK (signed_char_type_node))
4974     return unsignedp ? unsigned_char_type_node : signed_char_type_node;
4975   if (TYPE_OK (integer_type_node))
4976     return unsignedp ? unsigned_type_node : integer_type_node;
4977   if (TYPE_OK (short_integer_type_node))
4978     return unsignedp ? short_unsigned_type_node : short_integer_type_node;
4979   if (TYPE_OK (long_integer_type_node))
4980     return unsignedp ? long_unsigned_type_node : long_integer_type_node;
4981   if (TYPE_OK (long_long_integer_type_node))
4982     return (unsignedp
4983 	    ? long_long_unsigned_type_node
4984 	    : long_long_integer_type_node);
4985   if (int128_integer_type_node && TYPE_OK (int128_integer_type_node))
4986     return (unsignedp
4987 	    ? int128_unsigned_type_node
4988 	    : int128_integer_type_node);
4989 
4990 #if HOST_BITS_PER_WIDE_INT >= 64
4991   if (TYPE_OK (intTI_type_node))
4992     return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
4993 #endif
4994   if (TYPE_OK (intDI_type_node))
4995     return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
4996   if (TYPE_OK (intSI_type_node))
4997     return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
4998   if (TYPE_OK (intHI_type_node))
4999     return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
5000   if (TYPE_OK (intQI_type_node))
5001     return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
5002 
5003 #undef GIMPLE_FIXED_TYPES
5004 #undef GIMPLE_FIXED_MODE_TYPES
5005 #undef GIMPLE_FIXED_TYPES_SAT
5006 #undef GIMPLE_FIXED_MODE_TYPES_SAT
5007 #undef TYPE_OK
5008 
5009   return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
5010 }
5011 
5012 
5013 /* Return an unsigned type the same as TYPE in other respects.  */
5014 
5015 tree
5016 gimple_unsigned_type (tree type)
5017 {
5018   return gimple_signed_or_unsigned_type (true, type);
5019 }
5020 
5021 
5022 /* Return a signed type the same as TYPE in other respects.  */
5023 
5024 tree
5025 gimple_signed_type (tree type)
5026 {
5027   return gimple_signed_or_unsigned_type (false, type);
5028 }
5029 
5030 
5031 /* Return the typed-based alias set for T, which may be an expression
5032    or a type.  Return -1 if we don't do anything special.  */
5033 
5034 alias_set_type
5035 gimple_get_alias_set (tree t)
5036 {
5037   tree u;
5038 
5039   /* Permit type-punning when accessing a union, provided the access
5040      is directly through the union.  For example, this code does not
5041      permit taking the address of a union member and then storing
5042      through it.  Even the type-punning allowed here is a GCC
5043      extension, albeit a common and useful one; the C standard says
5044      that such accesses have implementation-defined behavior.  */
5045   for (u = t;
5046        TREE_CODE (u) == COMPONENT_REF || TREE_CODE (u) == ARRAY_REF;
5047        u = TREE_OPERAND (u, 0))
5048     if (TREE_CODE (u) == COMPONENT_REF
5049 	&& TREE_CODE (TREE_TYPE (TREE_OPERAND (u, 0))) == UNION_TYPE)
5050       return 0;
5051 
5052   /* That's all the expressions we handle specially.  */
5053   if (!TYPE_P (t))
5054     return -1;
5055 
5056   /* For convenience, follow the C standard when dealing with
5057      character types.  Any object may be accessed via an lvalue that
5058      has character type.  */
5059   if (t == char_type_node
5060       || t == signed_char_type_node
5061       || t == unsigned_char_type_node)
5062     return 0;
5063 
5064   /* Allow aliasing between signed and unsigned variants of the same
5065      type.  We treat the signed variant as canonical.  */
5066   if (TREE_CODE (t) == INTEGER_TYPE && TYPE_UNSIGNED (t))
5067     {
5068       tree t1 = gimple_signed_type (t);
5069 
5070       /* t1 == t can happen for boolean nodes which are always unsigned.  */
5071       if (t1 != t)
5072 	return get_alias_set (t1);
5073     }
5074 
5075   return -1;
5076 }
5077 
5078 
5079 /* Data structure used to count the number of dereferences to PTR
5080    inside an expression.  */
5081 struct count_ptr_d
5082 {
5083   tree ptr;
5084   unsigned num_stores;
5085   unsigned num_loads;
5086 };
5087 
5088 /* Helper for count_uses_and_derefs.  Called by walk_tree to look for
5089    (ALIGN/MISALIGNED_)INDIRECT_REF nodes for the pointer passed in DATA.  */
5090 
5091 static tree
5092 count_ptr_derefs (tree *tp, int *walk_subtrees, void *data)
5093 {
5094   struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
5095   struct count_ptr_d *count_p = (struct count_ptr_d *) wi_p->info;
5096 
5097   /* Do not walk inside ADDR_EXPR nodes.  In the expression &ptr->fld,
5098      pointer 'ptr' is *not* dereferenced, it is simply used to compute
5099      the address of 'fld' as 'ptr + offsetof(fld)'.  */
5100   if (TREE_CODE (*tp) == ADDR_EXPR)
5101     {
5102       *walk_subtrees = 0;
5103       return NULL_TREE;
5104     }
5105 
5106   if (TREE_CODE (*tp) == MEM_REF && TREE_OPERAND (*tp, 0) == count_p->ptr)
5107     {
5108       if (wi_p->is_lhs)
5109 	count_p->num_stores++;
5110       else
5111 	count_p->num_loads++;
5112     }
5113 
5114   return NULL_TREE;
5115 }
5116 
5117 /* Count the number of direct and indirect uses for pointer PTR in
5118    statement STMT.  The number of direct uses is stored in
5119    *NUM_USES_P.  Indirect references are counted separately depending
5120    on whether they are store or load operations.  The counts are
5121    stored in *NUM_STORES_P and *NUM_LOADS_P.  */
5122 
5123 void
5124 count_uses_and_derefs (tree ptr, gimple stmt, unsigned *num_uses_p,
5125 		       unsigned *num_loads_p, unsigned *num_stores_p)
5126 {
5127   ssa_op_iter i;
5128   tree use;
5129 
5130   *num_uses_p = 0;
5131   *num_loads_p = 0;
5132   *num_stores_p = 0;
5133 
5134   /* Find out the total number of uses of PTR in STMT.  */
5135   FOR_EACH_SSA_TREE_OPERAND (use, stmt, i, SSA_OP_USE)
5136     if (use == ptr)
5137       (*num_uses_p)++;
5138 
5139   /* Now count the number of indirect references to PTR.  This is
5140      truly awful, but we don't have much choice.  There are no parent
5141      pointers inside INDIRECT_REFs, so an expression like
5142      '*x_1 = foo (x_1, *x_1)' needs to be traversed piece by piece to
5143      find all the indirect and direct uses of x_1 inside.  The only
5144      shortcut we can take is the fact that GIMPLE only allows
5145      INDIRECT_REFs inside the expressions below.  */
5146   if (is_gimple_assign (stmt)
5147       || gimple_code (stmt) == GIMPLE_RETURN
5148       || gimple_code (stmt) == GIMPLE_ASM
5149       || is_gimple_call (stmt))
5150     {
5151       struct walk_stmt_info wi;
5152       struct count_ptr_d count;
5153 
5154       count.ptr = ptr;
5155       count.num_stores = 0;
5156       count.num_loads = 0;
5157 
5158       memset (&wi, 0, sizeof (wi));
5159       wi.info = &count;
5160       walk_gimple_op (stmt, count_ptr_derefs, &wi);
5161 
5162       *num_stores_p = count.num_stores;
5163       *num_loads_p = count.num_loads;
5164     }
5165 
5166   gcc_assert (*num_uses_p >= *num_loads_p + *num_stores_p);
5167 }
5168 
5169 /* From a tree operand OP return the base of a load or store operation
5170    or NULL_TREE if OP is not a load or a store.  */
5171 
5172 static tree
5173 get_base_loadstore (tree op)
5174 {
5175   while (handled_component_p (op))
5176     op = TREE_OPERAND (op, 0);
5177   if (DECL_P (op)
5178       || INDIRECT_REF_P (op)
5179       || TREE_CODE (op) == MEM_REF
5180       || TREE_CODE (op) == TARGET_MEM_REF)
5181     return op;
5182   return NULL_TREE;
5183 }
5184 
5185 /* For the statement STMT call the callbacks VISIT_LOAD, VISIT_STORE and
5186    VISIT_ADDR if non-NULL on loads, store and address-taken operands
5187    passing the STMT, the base of the operand and DATA to it.  The base
5188    will be either a decl, an indirect reference (including TARGET_MEM_REF)
5189    or the argument of an address expression.
5190    Returns the results of these callbacks or'ed.  */
5191 
5192 bool
5193 walk_stmt_load_store_addr_ops (gimple stmt, void *data,
5194 			       bool (*visit_load)(gimple, tree, void *),
5195 			       bool (*visit_store)(gimple, tree, void *),
5196 			       bool (*visit_addr)(gimple, tree, void *))
5197 {
5198   bool ret = false;
5199   unsigned i;
5200   if (gimple_assign_single_p (stmt))
5201     {
5202       tree lhs, rhs;
5203       if (visit_store)
5204 	{
5205 	  lhs = get_base_loadstore (gimple_assign_lhs (stmt));
5206 	  if (lhs)
5207 	    ret |= visit_store (stmt, lhs, data);
5208 	}
5209       rhs = gimple_assign_rhs1 (stmt);
5210       while (handled_component_p (rhs))
5211 	rhs = TREE_OPERAND (rhs, 0);
5212       if (visit_addr)
5213 	{
5214 	  if (TREE_CODE (rhs) == ADDR_EXPR)
5215 	    ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), data);
5216 	  else if (TREE_CODE (rhs) == TARGET_MEM_REF
5217 		   && TREE_CODE (TMR_BASE (rhs)) == ADDR_EXPR)
5218 	    ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (rhs), 0), data);
5219 	  else if (TREE_CODE (rhs) == OBJ_TYPE_REF
5220 		   && TREE_CODE (OBJ_TYPE_REF_OBJECT (rhs)) == ADDR_EXPR)
5221 	    ret |= visit_addr (stmt, TREE_OPERAND (OBJ_TYPE_REF_OBJECT (rhs),
5222 						   0), data);
5223 	  else if (TREE_CODE (rhs) == CONSTRUCTOR)
5224 	    {
5225 	      unsigned int ix;
5226 	      tree val;
5227 
5228 	      FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), ix, val)
5229 		if (TREE_CODE (val) == ADDR_EXPR)
5230 		  ret |= visit_addr (stmt, TREE_OPERAND (val, 0), data);
5231 		else if (TREE_CODE (val) == OBJ_TYPE_REF
5232 			 && TREE_CODE (OBJ_TYPE_REF_OBJECT (val)) == ADDR_EXPR)
5233 		  ret |= visit_addr (stmt,
5234 				     TREE_OPERAND (OBJ_TYPE_REF_OBJECT (val),
5235 						   0), data);
5236 	    }
5237           lhs = gimple_assign_lhs (stmt);
5238 	  if (TREE_CODE (lhs) == TARGET_MEM_REF
5239               && TREE_CODE (TMR_BASE (lhs)) == ADDR_EXPR)
5240             ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (lhs), 0), data);
5241 	}
5242       if (visit_load)
5243 	{
5244 	  rhs = get_base_loadstore (rhs);
5245 	  if (rhs)
5246 	    ret |= visit_load (stmt, rhs, data);
5247 	}
5248     }
5249   else if (visit_addr
5250 	   && (is_gimple_assign (stmt)
5251 	       || gimple_code (stmt) == GIMPLE_COND))
5252     {
5253       for (i = 0; i < gimple_num_ops (stmt); ++i)
5254 	{
5255 	  tree op = gimple_op (stmt, i);
5256 	  if (op == NULL_TREE)
5257 	    ;
5258 	  else if (TREE_CODE (op) == ADDR_EXPR)
5259 	    ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
5260 	  /* COND_EXPR and VCOND_EXPR rhs1 argument is a comparison
5261 	     tree with two operands.  */
5262 	  else if (i == 1 && COMPARISON_CLASS_P (op))
5263 	    {
5264 	      if (TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
5265 		ret |= visit_addr (stmt, TREE_OPERAND (TREE_OPERAND (op, 0),
5266 						       0), data);
5267 	      if (TREE_CODE (TREE_OPERAND (op, 1)) == ADDR_EXPR)
5268 		ret |= visit_addr (stmt, TREE_OPERAND (TREE_OPERAND (op, 1),
5269 						       0), data);
5270 	    }
5271 	}
5272     }
5273   else if (is_gimple_call (stmt))
5274     {
5275       if (visit_store)
5276 	{
5277 	  tree lhs = gimple_call_lhs (stmt);
5278 	  if (lhs)
5279 	    {
5280 	      lhs = get_base_loadstore (lhs);
5281 	      if (lhs)
5282 		ret |= visit_store (stmt, lhs, data);
5283 	    }
5284 	}
5285       if (visit_load || visit_addr)
5286 	for (i = 0; i < gimple_call_num_args (stmt); ++i)
5287 	  {
5288 	    tree rhs = gimple_call_arg (stmt, i);
5289 	    if (visit_addr
5290 		&& TREE_CODE (rhs) == ADDR_EXPR)
5291 	      ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), data);
5292 	    else if (visit_load)
5293 	      {
5294 		rhs = get_base_loadstore (rhs);
5295 		if (rhs)
5296 		  ret |= visit_load (stmt, rhs, data);
5297 	      }
5298 	  }
5299       if (visit_addr
5300 	  && gimple_call_chain (stmt)
5301 	  && TREE_CODE (gimple_call_chain (stmt)) == ADDR_EXPR)
5302 	ret |= visit_addr (stmt, TREE_OPERAND (gimple_call_chain (stmt), 0),
5303 			   data);
5304       if (visit_addr
5305 	  && gimple_call_return_slot_opt_p (stmt)
5306 	  && gimple_call_lhs (stmt) != NULL_TREE
5307 	  && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
5308 	ret |= visit_addr (stmt, gimple_call_lhs (stmt), data);
5309     }
5310   else if (gimple_code (stmt) == GIMPLE_ASM)
5311     {
5312       unsigned noutputs;
5313       const char *constraint;
5314       const char **oconstraints;
5315       bool allows_mem, allows_reg, is_inout;
5316       noutputs = gimple_asm_noutputs (stmt);
5317       oconstraints = XALLOCAVEC (const char *, noutputs);
5318       if (visit_store || visit_addr)
5319 	for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
5320 	  {
5321 	    tree link = gimple_asm_output_op (stmt, i);
5322 	    tree op = get_base_loadstore (TREE_VALUE (link));
5323 	    if (op && visit_store)
5324 	      ret |= visit_store (stmt, op, data);
5325 	    if (visit_addr)
5326 	      {
5327 		constraint = TREE_STRING_POINTER
5328 		    (TREE_VALUE (TREE_PURPOSE (link)));
5329 		oconstraints[i] = constraint;
5330 		parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
5331 					 &allows_reg, &is_inout);
5332 		if (op && !allows_reg && allows_mem)
5333 		  ret |= visit_addr (stmt, op, data);
5334 	      }
5335 	  }
5336       if (visit_load || visit_addr)
5337 	for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
5338 	  {
5339 	    tree link = gimple_asm_input_op (stmt, i);
5340 	    tree op = TREE_VALUE (link);
5341 	    if (visit_addr
5342 		&& TREE_CODE (op) == ADDR_EXPR)
5343 	      ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
5344 	    else if (visit_load || visit_addr)
5345 	      {
5346 		op = get_base_loadstore (op);
5347 		if (op)
5348 		  {
5349 		    if (visit_load)
5350 		      ret |= visit_load (stmt, op, data);
5351 		    if (visit_addr)
5352 		      {
5353 			constraint = TREE_STRING_POINTER
5354 			    (TREE_VALUE (TREE_PURPOSE (link)));
5355 			parse_input_constraint (&constraint, 0, 0, noutputs,
5356 						0, oconstraints,
5357 						&allows_mem, &allows_reg);
5358 			if (!allows_reg && allows_mem)
5359 			  ret |= visit_addr (stmt, op, data);
5360 		      }
5361 		  }
5362 	      }
5363 	  }
5364     }
5365   else if (gimple_code (stmt) == GIMPLE_RETURN)
5366     {
5367       tree op = gimple_return_retval (stmt);
5368       if (op)
5369 	{
5370 	  if (visit_addr
5371 	      && TREE_CODE (op) == ADDR_EXPR)
5372 	    ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
5373 	  else if (visit_load)
5374 	    {
5375 	      op = get_base_loadstore (op);
5376 	      if (op)
5377 		ret |= visit_load (stmt, op, data);
5378 	    }
5379 	}
5380     }
5381   else if (visit_addr
5382 	   && gimple_code (stmt) == GIMPLE_PHI)
5383     {
5384       for (i = 0; i < gimple_phi_num_args (stmt); ++i)
5385 	{
5386 	  tree op = PHI_ARG_DEF (stmt, i);
5387 	  if (TREE_CODE (op) == ADDR_EXPR)
5388 	    ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
5389 	}
5390     }
5391 
5392   return ret;
5393 }
5394 
5395 /* Like walk_stmt_load_store_addr_ops but with NULL visit_addr.  IPA-CP
5396    should make a faster clone for this case.  */
5397 
5398 bool
5399 walk_stmt_load_store_ops (gimple stmt, void *data,
5400 			  bool (*visit_load)(gimple, tree, void *),
5401 			  bool (*visit_store)(gimple, tree, void *))
5402 {
5403   return walk_stmt_load_store_addr_ops (stmt, data,
5404 					visit_load, visit_store, NULL);
5405 }
5406 
5407 /* Helper for gimple_ior_addresses_taken_1.  */
5408 
5409 static bool
5410 gimple_ior_addresses_taken_1 (gimple stmt ATTRIBUTE_UNUSED,
5411 			      tree addr, void *data)
5412 {
5413   bitmap addresses_taken = (bitmap)data;
5414   addr = get_base_address (addr);
5415   if (addr
5416       && DECL_P (addr))
5417     {
5418       bitmap_set_bit (addresses_taken, DECL_UID (addr));
5419       return true;
5420     }
5421   return false;
5422 }
5423 
5424 /* Set the bit for the uid of all decls that have their address taken
5425    in STMT in the ADDRESSES_TAKEN bitmap.  Returns true if there
5426    were any in this stmt.  */
5427 
5428 bool
5429 gimple_ior_addresses_taken (bitmap addresses_taken, gimple stmt)
5430 {
5431   return walk_stmt_load_store_addr_ops (stmt, addresses_taken, NULL, NULL,
5432 					gimple_ior_addresses_taken_1);
5433 }
5434 
5435 
5436 /* Return a printable name for symbol DECL.  */
5437 
5438 const char *
5439 gimple_decl_printable_name (tree decl, int verbosity)
5440 {
5441   if (!DECL_NAME (decl))
5442     return NULL;
5443 
5444   if (DECL_ASSEMBLER_NAME_SET_P (decl))
5445     {
5446       const char *str, *mangled_str;
5447       int dmgl_opts = DMGL_NO_OPTS;
5448 
5449       if (verbosity >= 2)
5450 	{
5451 	  dmgl_opts = DMGL_VERBOSE
5452 		      | DMGL_ANSI
5453 		      | DMGL_GNU_V3
5454 		      | DMGL_RET_POSTFIX;
5455 	  if (TREE_CODE (decl) == FUNCTION_DECL)
5456 	    dmgl_opts |= DMGL_PARAMS;
5457 	}
5458 
5459       mangled_str = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
5460       str = cplus_demangle_v3 (mangled_str, dmgl_opts);
5461       return (str) ? str : mangled_str;
5462     }
5463 
5464   return IDENTIFIER_POINTER (DECL_NAME (decl));
5465 }
5466 
5467 /* Return true when STMTs arguments match those of FNDECL.  */
5468 
5469 static bool
5470 validate_call (gimple stmt, tree fndecl)
5471 {
5472   tree arg, targs = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
5473   unsigned nargs = gimple_call_num_args (stmt);
5474   unsigned i;
5475   for (i = 0; i < nargs; ++i)
5476     {
5477       /* Variadic args follow.  */
5478       if (!targs)
5479 	return true;
5480       arg = gimple_call_arg (stmt, i);
5481       if (INTEGRAL_TYPE_P (TREE_TYPE (arg))
5482 	  && INTEGRAL_TYPE_P (TREE_VALUE (targs)))
5483 	;
5484       else if (POINTER_TYPE_P (TREE_TYPE (arg))
5485 	       && POINTER_TYPE_P (TREE_VALUE (targs)))
5486 	;
5487       else if (TREE_CODE (TREE_TYPE (arg))
5488 	       != TREE_CODE (TREE_VALUE (targs)))
5489 	return false;
5490       targs = TREE_CHAIN (targs);
5491     }
5492   if (targs && !VOID_TYPE_P (TREE_VALUE (targs)))
5493     return false;
5494   return true;
5495 }
5496 
5497 /* Return true when STMT is builtins call to CLASS.  */
5498 
5499 bool
5500 gimple_call_builtin_class_p (gimple stmt, enum built_in_class klass)
5501 {
5502   tree fndecl;
5503   if (is_gimple_call (stmt)
5504       && (fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
5505       && DECL_BUILT_IN_CLASS (fndecl) == klass)
5506     return validate_call (stmt, fndecl);
5507   return false;
5508 }
5509 
5510 /* Return true when STMT is builtins call to CODE of CLASS.  */
5511 
5512 bool
5513 gimple_call_builtin_p (gimple stmt, enum built_in_function code)
5514 {
5515   tree fndecl;
5516   if (is_gimple_call (stmt)
5517       && (fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
5518       && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
5519       && DECL_FUNCTION_CODE (fndecl) == code)
5520     return validate_call (stmt, fndecl);
5521   return false;
5522 }
5523 
5524 /* Return true if STMT clobbers memory.  STMT is required to be a
5525    GIMPLE_ASM.  */
5526 
5527 bool
5528 gimple_asm_clobbers_memory_p (const_gimple stmt)
5529 {
5530   unsigned i;
5531 
5532   for (i = 0; i < gimple_asm_nclobbers (stmt); i++)
5533     {
5534       tree op = gimple_asm_clobber_op (stmt, i);
5535       if (strcmp (TREE_STRING_POINTER (TREE_VALUE (op)), "memory") == 0)
5536 	return true;
5537     }
5538 
5539   return false;
5540 }
5541 #include "gt-gimple.h"
5542