1 /* Gimple IR support functions.
2 
3    Copyright (C) 2007-2016 Free Software Foundation, Inc.
4    Contributed by Aldy Hernandez <aldyh@redhat.com>
5 
6 This file is part of GCC.
7 
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12 
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16 for more details.
17 
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3.  If not see
20 <http://www.gnu.org/licenses/>.  */
21 
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "backend.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "ssa.h"
29 #include "cgraph.h"
30 #include "diagnostic.h"
31 #include "alias.h"
32 #include "fold-const.h"
33 #include "calls.h"
34 #include "stor-layout.h"
35 #include "internal-fn.h"
36 #include "tree-eh.h"
37 #include "gimple-iterator.h"
38 #include "gimple-walk.h"
39 #include "gimplify.h"
40 #include "target.h"
41 
42 
43 /* All the tuples have their operand vector (if present) at the very bottom
44    of the structure.  Therefore, the offset required to find the
45    operands vector the size of the structure minus the size of the 1
46    element tree array at the end (see gimple_ops).  */
47 #define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) \
48 	(HAS_TREE_OP ? sizeof (struct STRUCT) - sizeof (tree) : 0),
49 EXPORTED_CONST size_t gimple_ops_offset_[] = {
50 #include "gsstruct.def"
51 };
52 #undef DEFGSSTRUCT
53 
54 #define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) sizeof (struct STRUCT),
55 static const size_t gsstruct_code_size[] = {
56 #include "gsstruct.def"
57 };
58 #undef DEFGSSTRUCT
59 
60 #define DEFGSCODE(SYM, NAME, GSSCODE)	NAME,
61 const char *const gimple_code_name[] = {
62 #include "gimple.def"
63 };
64 #undef DEFGSCODE
65 
66 #define DEFGSCODE(SYM, NAME, GSSCODE)	GSSCODE,
67 EXPORTED_CONST enum gimple_statement_structure_enum gss_for_code_[] = {
68 #include "gimple.def"
69 };
70 #undef DEFGSCODE
71 
72 /* Gimple stats.  */
73 
74 int gimple_alloc_counts[(int) gimple_alloc_kind_all];
75 int gimple_alloc_sizes[(int) gimple_alloc_kind_all];
76 
77 /* Keep in sync with gimple.h:enum gimple_alloc_kind.  */
78 static const char * const gimple_alloc_kind_names[] = {
79     "assignments",
80     "phi nodes",
81     "conditionals",
82     "everything else"
83 };
84 
85 /* Static gimple tuple members.  */
86 const enum gimple_code gassign::code_;
87 const enum gimple_code gcall::code_;
88 const enum gimple_code gcond::code_;
89 
90 
91 /* Gimple tuple constructors.
92    Note: Any constructor taking a ``gimple_seq'' as a parameter, can
93    be passed a NULL to start with an empty sequence.  */
94 
95 /* Set the code for statement G to CODE.  */
96 
97 static inline void
gimple_set_code(gimple * g,enum gimple_code code)98 gimple_set_code (gimple *g, enum gimple_code code)
99 {
100   g->code = code;
101 }
102 
103 /* Return the number of bytes needed to hold a GIMPLE statement with
104    code CODE.  */
105 
106 static inline size_t
gimple_size(enum gimple_code code)107 gimple_size (enum gimple_code code)
108 {
109   return gsstruct_code_size[gss_for_code (code)];
110 }
111 
112 /* Allocate memory for a GIMPLE statement with code CODE and NUM_OPS
113    operands.  */
114 
115 gimple *
gimple_alloc_stat(enum gimple_code code,unsigned num_ops MEM_STAT_DECL)116 gimple_alloc_stat (enum gimple_code code, unsigned num_ops MEM_STAT_DECL)
117 {
118   size_t size;
119   gimple *stmt;
120 
121   size = gimple_size (code);
122   if (num_ops > 0)
123     size += sizeof (tree) * (num_ops - 1);
124 
125   if (GATHER_STATISTICS)
126     {
127       enum gimple_alloc_kind kind = gimple_alloc_kind (code);
128       gimple_alloc_counts[(int) kind]++;
129       gimple_alloc_sizes[(int) kind] += size;
130     }
131 
132   stmt = ggc_alloc_cleared_gimple_statement_stat (size PASS_MEM_STAT);
133   gimple_set_code (stmt, code);
134   gimple_set_num_ops (stmt, num_ops);
135 
136   /* Do not call gimple_set_modified here as it has other side
137      effects and this tuple is still not completely built.  */
138   stmt->modified = 1;
139   gimple_init_singleton (stmt);
140 
141   return stmt;
142 }
143 
144 /* Set SUBCODE to be the code of the expression computed by statement G.  */
145 
146 static inline void
gimple_set_subcode(gimple * g,unsigned subcode)147 gimple_set_subcode (gimple *g, unsigned subcode)
148 {
149   /* We only have 16 bits for the RHS code.  Assert that we are not
150      overflowing it.  */
151   gcc_assert (subcode < (1 << 16));
152   g->subcode = subcode;
153 }
154 
155 
156 
157 /* Build a tuple with operands.  CODE is the statement to build (which
158    must be one of the GIMPLE_WITH_OPS tuples).  SUBCODE is the subcode
159    for the new tuple.  NUM_OPS is the number of operands to allocate.  */
160 
161 #define gimple_build_with_ops(c, s, n) \
162   gimple_build_with_ops_stat (c, s, n MEM_STAT_INFO)
163 
164 static gimple *
gimple_build_with_ops_stat(enum gimple_code code,unsigned subcode,unsigned num_ops MEM_STAT_DECL)165 gimple_build_with_ops_stat (enum gimple_code code, unsigned subcode,
166 		            unsigned num_ops MEM_STAT_DECL)
167 {
168   gimple *s = gimple_alloc_stat (code, num_ops PASS_MEM_STAT);
169   gimple_set_subcode (s, subcode);
170 
171   return s;
172 }
173 
174 
175 /* Build a GIMPLE_RETURN statement returning RETVAL.  */
176 
177 greturn *
gimple_build_return(tree retval)178 gimple_build_return (tree retval)
179 {
180   greturn *s
181     = as_a <greturn *> (gimple_build_with_ops (GIMPLE_RETURN, ERROR_MARK,
182 					       2));
183   if (retval)
184     gimple_return_set_retval (s, retval);
185   return s;
186 }
187 
188 /* Reset alias information on call S.  */
189 
190 void
gimple_call_reset_alias_info(gcall * s)191 gimple_call_reset_alias_info (gcall *s)
192 {
193   if (gimple_call_flags (s) & ECF_CONST)
194     memset (gimple_call_use_set (s), 0, sizeof (struct pt_solution));
195   else
196     pt_solution_reset (gimple_call_use_set (s));
197   if (gimple_call_flags (s) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
198     memset (gimple_call_clobber_set (s), 0, sizeof (struct pt_solution));
199   else
200     pt_solution_reset (gimple_call_clobber_set (s));
201 }
202 
203 /* Helper for gimple_build_call, gimple_build_call_valist,
204    gimple_build_call_vec and gimple_build_call_from_tree.  Build the basic
205    components of a GIMPLE_CALL statement to function FN with NARGS
206    arguments.  */
207 
208 static inline gcall *
gimple_build_call_1(tree fn,unsigned nargs)209 gimple_build_call_1 (tree fn, unsigned nargs)
210 {
211   gcall *s
212     = as_a <gcall *> (gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK,
213 					     nargs + 3));
214   if (TREE_CODE (fn) == FUNCTION_DECL)
215     fn = build_fold_addr_expr (fn);
216   gimple_set_op (s, 1, fn);
217   gimple_call_set_fntype (s, TREE_TYPE (TREE_TYPE (fn)));
218   gimple_call_reset_alias_info (s);
219   return s;
220 }
221 
222 
223 /* Build a GIMPLE_CALL statement to function FN with the arguments
224    specified in vector ARGS.  */
225 
226 gcall *
gimple_build_call_vec(tree fn,vec<tree> args)227 gimple_build_call_vec (tree fn, vec<tree> args)
228 {
229   unsigned i;
230   unsigned nargs = args.length ();
231   gcall *call = gimple_build_call_1 (fn, nargs);
232 
233   for (i = 0; i < nargs; i++)
234     gimple_call_set_arg (call, i, args[i]);
235 
236   return call;
237 }
238 
239 
240 /* Build a GIMPLE_CALL statement to function FN.  NARGS is the number of
241    arguments.  The ... are the arguments.  */
242 
243 gcall *
gimple_build_call(tree fn,unsigned nargs,...)244 gimple_build_call (tree fn, unsigned nargs, ...)
245 {
246   va_list ap;
247   gcall *call;
248   unsigned i;
249 
250   gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn));
251 
252   call = gimple_build_call_1 (fn, nargs);
253 
254   va_start (ap, nargs);
255   for (i = 0; i < nargs; i++)
256     gimple_call_set_arg (call, i, va_arg (ap, tree));
257   va_end (ap);
258 
259   return call;
260 }
261 
262 
263 /* Build a GIMPLE_CALL statement to function FN.  NARGS is the number of
264    arguments.  AP contains the arguments.  */
265 
266 gcall *
gimple_build_call_valist(tree fn,unsigned nargs,va_list ap)267 gimple_build_call_valist (tree fn, unsigned nargs, va_list ap)
268 {
269   gcall *call;
270   unsigned i;
271 
272   gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn));
273 
274   call = gimple_build_call_1 (fn, nargs);
275 
276   for (i = 0; i < nargs; i++)
277     gimple_call_set_arg (call, i, va_arg (ap, tree));
278 
279   return call;
280 }
281 
282 
283 /* Helper for gimple_build_call_internal and gimple_build_call_internal_vec.
284    Build the basic components of a GIMPLE_CALL statement to internal
285    function FN with NARGS arguments.  */
286 
287 static inline gcall *
gimple_build_call_internal_1(enum internal_fn fn,unsigned nargs)288 gimple_build_call_internal_1 (enum internal_fn fn, unsigned nargs)
289 {
290   gcall *s
291     = as_a <gcall *> (gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK,
292 					     nargs + 3));
293   s->subcode |= GF_CALL_INTERNAL;
294   gimple_call_set_internal_fn (s, fn);
295   gimple_call_reset_alias_info (s);
296   return s;
297 }
298 
299 
300 /* Build a GIMPLE_CALL statement to internal function FN.  NARGS is
301    the number of arguments.  The ... are the arguments.  */
302 
303 gcall *
gimple_build_call_internal(enum internal_fn fn,unsigned nargs,...)304 gimple_build_call_internal (enum internal_fn fn, unsigned nargs, ...)
305 {
306   va_list ap;
307   gcall *call;
308   unsigned i;
309 
310   call = gimple_build_call_internal_1 (fn, nargs);
311   va_start (ap, nargs);
312   for (i = 0; i < nargs; i++)
313     gimple_call_set_arg (call, i, va_arg (ap, tree));
314   va_end (ap);
315 
316   return call;
317 }
318 
319 
320 /* Build a GIMPLE_CALL statement to internal function FN with the arguments
321    specified in vector ARGS.  */
322 
323 gcall *
gimple_build_call_internal_vec(enum internal_fn fn,vec<tree> args)324 gimple_build_call_internal_vec (enum internal_fn fn, vec<tree> args)
325 {
326   unsigned i, nargs;
327   gcall *call;
328 
329   nargs = args.length ();
330   call = gimple_build_call_internal_1 (fn, nargs);
331   for (i = 0; i < nargs; i++)
332     gimple_call_set_arg (call, i, args[i]);
333 
334   return call;
335 }
336 
337 
338 /* Build a GIMPLE_CALL statement from CALL_EXPR T.  Note that T is
339    assumed to be in GIMPLE form already.  Minimal checking is done of
340    this fact.  */
341 
342 gcall *
gimple_build_call_from_tree(tree t)343 gimple_build_call_from_tree (tree t)
344 {
345   unsigned i, nargs;
346   gcall *call;
347   tree fndecl = get_callee_fndecl (t);
348 
349   gcc_assert (TREE_CODE (t) == CALL_EXPR);
350 
351   nargs = call_expr_nargs (t);
352   call = gimple_build_call_1 (fndecl ? fndecl : CALL_EXPR_FN (t), nargs);
353 
354   for (i = 0; i < nargs; i++)
355     gimple_call_set_arg (call, i, CALL_EXPR_ARG (t, i));
356 
357   gimple_set_block (call, TREE_BLOCK (t));
358 
359   /* Carry all the CALL_EXPR flags to the new GIMPLE_CALL.  */
360   gimple_call_set_chain (call, CALL_EXPR_STATIC_CHAIN (t));
361   gimple_call_set_tail (call, CALL_EXPR_TAILCALL (t));
362   gimple_call_set_return_slot_opt (call, CALL_EXPR_RETURN_SLOT_OPT (t));
363   if (fndecl
364       && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
365       && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA
366 	  || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN))
367     gimple_call_set_alloca_for_var (call, CALL_ALLOCA_FOR_VAR_P (t));
368   else
369     gimple_call_set_from_thunk (call, CALL_FROM_THUNK_P (t));
370   gimple_call_set_va_arg_pack (call, CALL_EXPR_VA_ARG_PACK (t));
371   gimple_call_set_nothrow (call, TREE_NOTHROW (t));
372   gimple_set_no_warning (call, TREE_NO_WARNING (t));
373   gimple_call_set_with_bounds (call, CALL_WITH_BOUNDS_P (t));
374 
375   return call;
376 }
377 
378 
379 /* Build a GIMPLE_ASSIGN statement.
380 
381    LHS of the assignment.
382    RHS of the assignment which can be unary or binary.  */
383 
384 gassign *
gimple_build_assign(tree lhs,tree rhs MEM_STAT_DECL)385 gimple_build_assign (tree lhs, tree rhs MEM_STAT_DECL)
386 {
387   enum tree_code subcode;
388   tree op1, op2, op3;
389 
390   extract_ops_from_tree (rhs, &subcode, &op1, &op2, &op3);
391   return gimple_build_assign (lhs, subcode, op1, op2, op3 PASS_MEM_STAT);
392 }
393 
394 
395 /* Build a GIMPLE_ASSIGN statement with subcode SUBCODE and operands
396    OP1, OP2 and OP3.  */
397 
398 static inline gassign *
gimple_build_assign_1(tree lhs,enum tree_code subcode,tree op1,tree op2,tree op3 MEM_STAT_DECL)399 gimple_build_assign_1 (tree lhs, enum tree_code subcode, tree op1,
400 		       tree op2, tree op3 MEM_STAT_DECL)
401 {
402   unsigned num_ops;
403   gassign *p;
404 
405   /* Need 1 operand for LHS and 1 or 2 for the RHS (depending on the
406      code).  */
407   num_ops = get_gimple_rhs_num_ops (subcode) + 1;
408 
409   p = as_a <gassign *> (
410         gimple_build_with_ops_stat (GIMPLE_ASSIGN, (unsigned)subcode, num_ops
411 				    PASS_MEM_STAT));
412   gimple_assign_set_lhs (p, lhs);
413   gimple_assign_set_rhs1 (p, op1);
414   if (op2)
415     {
416       gcc_assert (num_ops > 2);
417       gimple_assign_set_rhs2 (p, op2);
418     }
419 
420   if (op3)
421     {
422       gcc_assert (num_ops > 3);
423       gimple_assign_set_rhs3 (p, op3);
424     }
425 
426   return p;
427 }
428 
429 /* Build a GIMPLE_ASSIGN statement with subcode SUBCODE and operands
430    OP1, OP2 and OP3.  */
431 
432 gassign *
gimple_build_assign(tree lhs,enum tree_code subcode,tree op1,tree op2,tree op3 MEM_STAT_DECL)433 gimple_build_assign (tree lhs, enum tree_code subcode, tree op1,
434 		     tree op2, tree op3 MEM_STAT_DECL)
435 {
436   return gimple_build_assign_1 (lhs, subcode, op1, op2, op3 PASS_MEM_STAT);
437 }
438 
439 /* Build a GIMPLE_ASSIGN statement with subcode SUBCODE and operands
440    OP1 and OP2.  */
441 
442 gassign *
gimple_build_assign(tree lhs,enum tree_code subcode,tree op1,tree op2 MEM_STAT_DECL)443 gimple_build_assign (tree lhs, enum tree_code subcode, tree op1,
444 		     tree op2 MEM_STAT_DECL)
445 {
446   return gimple_build_assign_1 (lhs, subcode, op1, op2, NULL_TREE
447 				PASS_MEM_STAT);
448 }
449 
450 /* Build a GIMPLE_ASSIGN statement with subcode SUBCODE and operand OP1.  */
451 
452 gassign *
gimple_build_assign(tree lhs,enum tree_code subcode,tree op1 MEM_STAT_DECL)453 gimple_build_assign (tree lhs, enum tree_code subcode, tree op1 MEM_STAT_DECL)
454 {
455   return gimple_build_assign_1 (lhs, subcode, op1, NULL_TREE, NULL_TREE
456 				PASS_MEM_STAT);
457 }
458 
459 
460 /* Build a GIMPLE_COND statement.
461 
462    PRED is the condition used to compare LHS and the RHS.
463    T_LABEL is the label to jump to if the condition is true.
464    F_LABEL is the label to jump to otherwise.  */
465 
466 gcond *
gimple_build_cond(enum tree_code pred_code,tree lhs,tree rhs,tree t_label,tree f_label)467 gimple_build_cond (enum tree_code pred_code, tree lhs, tree rhs,
468 		   tree t_label, tree f_label)
469 {
470   gcond *p;
471 
472   gcc_assert (TREE_CODE_CLASS (pred_code) == tcc_comparison);
473   p = as_a <gcond *> (gimple_build_with_ops (GIMPLE_COND, pred_code, 4));
474   gimple_cond_set_lhs (p, lhs);
475   gimple_cond_set_rhs (p, rhs);
476   gimple_cond_set_true_label (p, t_label);
477   gimple_cond_set_false_label (p, f_label);
478   return p;
479 }
480 
481 /* Build a GIMPLE_COND statement from the conditional expression tree
482    COND.  T_LABEL and F_LABEL are as in gimple_build_cond.  */
483 
484 gcond *
gimple_build_cond_from_tree(tree cond,tree t_label,tree f_label)485 gimple_build_cond_from_tree (tree cond, tree t_label, tree f_label)
486 {
487   enum tree_code code;
488   tree lhs, rhs;
489 
490   gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
491   return gimple_build_cond (code, lhs, rhs, t_label, f_label);
492 }
493 
494 /* Set code, lhs, and rhs of a GIMPLE_COND from a suitable
495    boolean expression tree COND.  */
496 
497 void
gimple_cond_set_condition_from_tree(gcond * stmt,tree cond)498 gimple_cond_set_condition_from_tree (gcond *stmt, tree cond)
499 {
500   enum tree_code code;
501   tree lhs, rhs;
502 
503   gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
504   gimple_cond_set_condition (stmt, code, lhs, rhs);
505 }
506 
507 /* Build a GIMPLE_LABEL statement for LABEL.  */
508 
509 glabel *
gimple_build_label(tree label)510 gimple_build_label (tree label)
511 {
512   glabel *p
513     = as_a <glabel *> (gimple_build_with_ops (GIMPLE_LABEL, ERROR_MARK, 1));
514   gimple_label_set_label (p, label);
515   return p;
516 }
517 
518 /* Build a GIMPLE_GOTO statement to label DEST.  */
519 
520 ggoto *
gimple_build_goto(tree dest)521 gimple_build_goto (tree dest)
522 {
523   ggoto *p
524     = as_a <ggoto *> (gimple_build_with_ops (GIMPLE_GOTO, ERROR_MARK, 1));
525   gimple_goto_set_dest (p, dest);
526   return p;
527 }
528 
529 
530 /* Build a GIMPLE_NOP statement.  */
531 
532 gimple *
gimple_build_nop(void)533 gimple_build_nop (void)
534 {
535   return gimple_alloc (GIMPLE_NOP, 0);
536 }
537 
538 
539 /* Build a GIMPLE_BIND statement.
540    VARS are the variables in BODY.
541    BLOCK is the containing block.  */
542 
543 gbind *
gimple_build_bind(tree vars,gimple_seq body,tree block)544 gimple_build_bind (tree vars, gimple_seq body, tree block)
545 {
546   gbind *p = as_a <gbind *> (gimple_alloc (GIMPLE_BIND, 0));
547   gimple_bind_set_vars (p, vars);
548   if (body)
549     gimple_bind_set_body (p, body);
550   if (block)
551     gimple_bind_set_block (p, block);
552   return p;
553 }
554 
555 /* Helper function to set the simple fields of a asm stmt.
556 
557    STRING is a pointer to a string that is the asm blocks assembly code.
558    NINPUT is the number of register inputs.
559    NOUTPUT is the number of register outputs.
560    NCLOBBERS is the number of clobbered registers.
561    */
562 
563 static inline gasm *
gimple_build_asm_1(const char * string,unsigned ninputs,unsigned noutputs,unsigned nclobbers,unsigned nlabels)564 gimple_build_asm_1 (const char *string, unsigned ninputs, unsigned noutputs,
565                     unsigned nclobbers, unsigned nlabels)
566 {
567   gasm *p;
568   int size = strlen (string);
569 
570   /* ASMs with labels cannot have outputs.  This should have been
571      enforced by the front end.  */
572   gcc_assert (nlabels == 0 || noutputs == 0);
573 
574   p = as_a <gasm *> (
575         gimple_build_with_ops (GIMPLE_ASM, ERROR_MARK,
576 			       ninputs + noutputs + nclobbers + nlabels));
577 
578   p->ni = ninputs;
579   p->no = noutputs;
580   p->nc = nclobbers;
581   p->nl = nlabels;
582   p->string = ggc_alloc_string (string, size);
583 
584   if (GATHER_STATISTICS)
585     gimple_alloc_sizes[(int) gimple_alloc_kind (GIMPLE_ASM)] += size;
586 
587   return p;
588 }
589 
590 /* Build a GIMPLE_ASM statement.
591 
592    STRING is the assembly code.
593    NINPUT is the number of register inputs.
594    NOUTPUT is the number of register outputs.
595    NCLOBBERS is the number of clobbered registers.
596    INPUTS is a vector of the input register parameters.
597    OUTPUTS is a vector of the output register parameters.
598    CLOBBERS is a vector of the clobbered register parameters.
599    LABELS is a vector of destination labels.  */
600 
601 gasm *
gimple_build_asm_vec(const char * string,vec<tree,va_gc> * inputs,vec<tree,va_gc> * outputs,vec<tree,va_gc> * clobbers,vec<tree,va_gc> * labels)602 gimple_build_asm_vec (const char *string, vec<tree, va_gc> *inputs,
603                       vec<tree, va_gc> *outputs, vec<tree, va_gc> *clobbers,
604 		      vec<tree, va_gc> *labels)
605 {
606   gasm *p;
607   unsigned i;
608 
609   p = gimple_build_asm_1 (string,
610                           vec_safe_length (inputs),
611                           vec_safe_length (outputs),
612                           vec_safe_length (clobbers),
613 			  vec_safe_length (labels));
614 
615   for (i = 0; i < vec_safe_length (inputs); i++)
616     gimple_asm_set_input_op (p, i, (*inputs)[i]);
617 
618   for (i = 0; i < vec_safe_length (outputs); i++)
619     gimple_asm_set_output_op (p, i, (*outputs)[i]);
620 
621   for (i = 0; i < vec_safe_length (clobbers); i++)
622     gimple_asm_set_clobber_op (p, i, (*clobbers)[i]);
623 
624   for (i = 0; i < vec_safe_length (labels); i++)
625     gimple_asm_set_label_op (p, i, (*labels)[i]);
626 
627   return p;
628 }
629 
630 /* Build a GIMPLE_CATCH statement.
631 
632   TYPES are the catch types.
633   HANDLER is the exception handler.  */
634 
635 gcatch *
gimple_build_catch(tree types,gimple_seq handler)636 gimple_build_catch (tree types, gimple_seq handler)
637 {
638   gcatch *p = as_a <gcatch *> (gimple_alloc (GIMPLE_CATCH, 0));
639   gimple_catch_set_types (p, types);
640   if (handler)
641     gimple_catch_set_handler (p, handler);
642 
643   return p;
644 }
645 
646 /* Build a GIMPLE_EH_FILTER statement.
647 
648    TYPES are the filter's types.
649    FAILURE is the filter's failure action.  */
650 
651 geh_filter *
gimple_build_eh_filter(tree types,gimple_seq failure)652 gimple_build_eh_filter (tree types, gimple_seq failure)
653 {
654   geh_filter *p = as_a <geh_filter *> (gimple_alloc (GIMPLE_EH_FILTER, 0));
655   gimple_eh_filter_set_types (p, types);
656   if (failure)
657     gimple_eh_filter_set_failure (p, failure);
658 
659   return p;
660 }
661 
662 /* Build a GIMPLE_EH_MUST_NOT_THROW statement.  */
663 
664 geh_mnt *
gimple_build_eh_must_not_throw(tree decl)665 gimple_build_eh_must_not_throw (tree decl)
666 {
667   geh_mnt *p = as_a <geh_mnt *> (gimple_alloc (GIMPLE_EH_MUST_NOT_THROW, 0));
668 
669   gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
670   gcc_assert (flags_from_decl_or_type (decl) & ECF_NORETURN);
671   gimple_eh_must_not_throw_set_fndecl (p, decl);
672 
673   return p;
674 }
675 
676 /* Build a GIMPLE_EH_ELSE statement.  */
677 
678 geh_else *
gimple_build_eh_else(gimple_seq n_body,gimple_seq e_body)679 gimple_build_eh_else (gimple_seq n_body, gimple_seq e_body)
680 {
681   geh_else *p = as_a <geh_else *> (gimple_alloc (GIMPLE_EH_ELSE, 0));
682   gimple_eh_else_set_n_body (p, n_body);
683   gimple_eh_else_set_e_body (p, e_body);
684   return p;
685 }
686 
687 /* Build a GIMPLE_TRY statement.
688 
689    EVAL is the expression to evaluate.
690    CLEANUP is the cleanup expression.
691    KIND is either GIMPLE_TRY_CATCH or GIMPLE_TRY_FINALLY depending on
692    whether this is a try/catch or a try/finally respectively.  */
693 
694 gtry *
gimple_build_try(gimple_seq eval,gimple_seq cleanup,enum gimple_try_flags kind)695 gimple_build_try (gimple_seq eval, gimple_seq cleanup,
696     		  enum gimple_try_flags kind)
697 {
698   gtry *p;
699 
700   gcc_assert (kind == GIMPLE_TRY_CATCH || kind == GIMPLE_TRY_FINALLY);
701   p = as_a <gtry *> (gimple_alloc (GIMPLE_TRY, 0));
702   gimple_set_subcode (p, kind);
703   if (eval)
704     gimple_try_set_eval (p, eval);
705   if (cleanup)
706     gimple_try_set_cleanup (p, cleanup);
707 
708   return p;
709 }
710 
711 /* Construct a GIMPLE_WITH_CLEANUP_EXPR statement.
712 
713    CLEANUP is the cleanup expression.  */
714 
715 gimple *
gimple_build_wce(gimple_seq cleanup)716 gimple_build_wce (gimple_seq cleanup)
717 {
718   gimple *p = gimple_alloc (GIMPLE_WITH_CLEANUP_EXPR, 0);
719   if (cleanup)
720     gimple_wce_set_cleanup (p, cleanup);
721 
722   return p;
723 }
724 
725 
726 /* Build a GIMPLE_RESX statement.  */
727 
728 gresx *
gimple_build_resx(int region)729 gimple_build_resx (int region)
730 {
731   gresx *p
732     = as_a <gresx *> (gimple_build_with_ops (GIMPLE_RESX, ERROR_MARK, 0));
733   p->region = region;
734   return p;
735 }
736 
737 
738 /* The helper for constructing a gimple switch statement.
739    INDEX is the switch's index.
740    NLABELS is the number of labels in the switch excluding the default.
741    DEFAULT_LABEL is the default label for the switch statement.  */
742 
743 gswitch *
gimple_build_switch_nlabels(unsigned nlabels,tree index,tree default_label)744 gimple_build_switch_nlabels (unsigned nlabels, tree index, tree default_label)
745 {
746   /* nlabels + 1 default label + 1 index.  */
747   gcc_checking_assert (default_label);
748   gswitch *p = as_a <gswitch *> (gimple_build_with_ops (GIMPLE_SWITCH,
749 							ERROR_MARK,
750 							1 + 1 + nlabels));
751   gimple_switch_set_index (p, index);
752   gimple_switch_set_default_label (p, default_label);
753   return p;
754 }
755 
756 /* Build a GIMPLE_SWITCH statement.
757 
758    INDEX is the switch's index.
759    DEFAULT_LABEL is the default label
760    ARGS is a vector of labels excluding the default.  */
761 
762 gswitch *
gimple_build_switch(tree index,tree default_label,vec<tree> args)763 gimple_build_switch (tree index, tree default_label, vec<tree> args)
764 {
765   unsigned i, nlabels = args.length ();
766 
767   gswitch *p = gimple_build_switch_nlabels (nlabels, index, default_label);
768 
769   /* Copy the labels from the vector to the switch statement.  */
770   for (i = 0; i < nlabels; i++)
771     gimple_switch_set_label (p, i + 1, args[i]);
772 
773   return p;
774 }
775 
776 /* Build a GIMPLE_EH_DISPATCH statement.  */
777 
778 geh_dispatch *
gimple_build_eh_dispatch(int region)779 gimple_build_eh_dispatch (int region)
780 {
781   geh_dispatch *p
782     = as_a <geh_dispatch *> (
783 	gimple_build_with_ops (GIMPLE_EH_DISPATCH, ERROR_MARK, 0));
784   p->region = region;
785   return p;
786 }
787 
788 /* Build a new GIMPLE_DEBUG_BIND statement.
789 
790    VAR is bound to VALUE; block and location are taken from STMT.  */
791 
792 gdebug *
gimple_build_debug_bind_stat(tree var,tree value,gimple * stmt MEM_STAT_DECL)793 gimple_build_debug_bind_stat (tree var, tree value, gimple *stmt MEM_STAT_DECL)
794 {
795   gdebug *p
796     = as_a <gdebug *> (gimple_build_with_ops_stat (GIMPLE_DEBUG,
797 						   (unsigned)GIMPLE_DEBUG_BIND, 2
798 						   PASS_MEM_STAT));
799   gimple_debug_bind_set_var (p, var);
800   gimple_debug_bind_set_value (p, value);
801   if (stmt)
802     gimple_set_location (p, gimple_location (stmt));
803 
804   return p;
805 }
806 
807 
808 /* Build a new GIMPLE_DEBUG_SOURCE_BIND statement.
809 
810    VAR is bound to VALUE; block and location are taken from STMT.  */
811 
812 gdebug *
gimple_build_debug_source_bind_stat(tree var,tree value,gimple * stmt MEM_STAT_DECL)813 gimple_build_debug_source_bind_stat (tree var, tree value,
814 				     gimple *stmt MEM_STAT_DECL)
815 {
816   gdebug *p
817     = as_a <gdebug *> (
818         gimple_build_with_ops_stat (GIMPLE_DEBUG,
819 				    (unsigned)GIMPLE_DEBUG_SOURCE_BIND, 2
820 				    PASS_MEM_STAT));
821 
822   gimple_debug_source_bind_set_var (p, var);
823   gimple_debug_source_bind_set_value (p, value);
824   if (stmt)
825     gimple_set_location (p, gimple_location (stmt));
826 
827   return p;
828 }
829 
830 
831 /* Build a GIMPLE_OMP_CRITICAL statement.
832 
833    BODY is the sequence of statements for which only one thread can execute.
834    NAME is optional identifier for this critical block.
835    CLAUSES are clauses for this critical block.  */
836 
837 gomp_critical *
gimple_build_omp_critical(gimple_seq body,tree name,tree clauses)838 gimple_build_omp_critical (gimple_seq body, tree name, tree clauses)
839 {
840   gomp_critical *p
841     = as_a <gomp_critical *> (gimple_alloc (GIMPLE_OMP_CRITICAL, 0));
842   gimple_omp_critical_set_name (p, name);
843   gimple_omp_critical_set_clauses (p, clauses);
844   if (body)
845     gimple_omp_set_body (p, body);
846 
847   return p;
848 }
849 
850 /* Build a GIMPLE_OMP_FOR statement.
851 
852    BODY is sequence of statements inside the for loop.
853    KIND is the `for' variant.
854    CLAUSES, are any of the construct's clauses.
855    COLLAPSE is the collapse count.
856    PRE_BODY is the sequence of statements that are loop invariant.  */
857 
858 gomp_for *
gimple_build_omp_for(gimple_seq body,int kind,tree clauses,size_t collapse,gimple_seq pre_body)859 gimple_build_omp_for (gimple_seq body, int kind, tree clauses, size_t collapse,
860 		      gimple_seq pre_body)
861 {
862   gomp_for *p = as_a <gomp_for *> (gimple_alloc (GIMPLE_OMP_FOR, 0));
863   if (body)
864     gimple_omp_set_body (p, body);
865   gimple_omp_for_set_clauses (p, clauses);
866   gimple_omp_for_set_kind (p, kind);
867   p->collapse = collapse;
868   p->iter =  ggc_cleared_vec_alloc<gimple_omp_for_iter> (collapse);
869 
870   if (pre_body)
871     gimple_omp_for_set_pre_body (p, pre_body);
872 
873   return p;
874 }
875 
876 
877 /* Build a GIMPLE_OMP_PARALLEL statement.
878 
879    BODY is sequence of statements which are executed in parallel.
880    CLAUSES, are the OMP parallel construct's clauses.
881    CHILD_FN is the function created for the parallel threads to execute.
882    DATA_ARG are the shared data argument(s).  */
883 
884 gomp_parallel *
gimple_build_omp_parallel(gimple_seq body,tree clauses,tree child_fn,tree data_arg)885 gimple_build_omp_parallel (gimple_seq body, tree clauses, tree child_fn,
886 			   tree data_arg)
887 {
888   gomp_parallel *p
889     = as_a <gomp_parallel *> (gimple_alloc (GIMPLE_OMP_PARALLEL, 0));
890   if (body)
891     gimple_omp_set_body (p, body);
892   gimple_omp_parallel_set_clauses (p, clauses);
893   gimple_omp_parallel_set_child_fn (p, child_fn);
894   gimple_omp_parallel_set_data_arg (p, data_arg);
895 
896   return p;
897 }
898 
899 
900 /* Build a GIMPLE_OMP_TASK statement.
901 
902    BODY is sequence of statements which are executed by the explicit task.
903    CLAUSES, are the OMP parallel construct's clauses.
904    CHILD_FN is the function created for the parallel threads to execute.
905    DATA_ARG are the shared data argument(s).
906    COPY_FN is the optional function for firstprivate initialization.
907    ARG_SIZE and ARG_ALIGN are size and alignment of the data block.  */
908 
909 gomp_task *
gimple_build_omp_task(gimple_seq body,tree clauses,tree child_fn,tree data_arg,tree copy_fn,tree arg_size,tree arg_align)910 gimple_build_omp_task (gimple_seq body, tree clauses, tree child_fn,
911 		       tree data_arg, tree copy_fn, tree arg_size,
912 		       tree arg_align)
913 {
914   gomp_task *p = as_a <gomp_task *> (gimple_alloc (GIMPLE_OMP_TASK, 0));
915   if (body)
916     gimple_omp_set_body (p, body);
917   gimple_omp_task_set_clauses (p, clauses);
918   gimple_omp_task_set_child_fn (p, child_fn);
919   gimple_omp_task_set_data_arg (p, data_arg);
920   gimple_omp_task_set_copy_fn (p, copy_fn);
921   gimple_omp_task_set_arg_size (p, arg_size);
922   gimple_omp_task_set_arg_align (p, arg_align);
923 
924   return p;
925 }
926 
927 
928 /* Build a GIMPLE_OMP_SECTION statement for a sections statement.
929 
930    BODY is the sequence of statements in the section.  */
931 
932 gimple *
gimple_build_omp_section(gimple_seq body)933 gimple_build_omp_section (gimple_seq body)
934 {
935   gimple *p = gimple_alloc (GIMPLE_OMP_SECTION, 0);
936   if (body)
937     gimple_omp_set_body (p, body);
938 
939   return p;
940 }
941 
942 
943 /* Build a GIMPLE_OMP_MASTER statement.
944 
945    BODY is the sequence of statements to be executed by just the master.  */
946 
947 gimple *
gimple_build_omp_master(gimple_seq body)948 gimple_build_omp_master (gimple_seq body)
949 {
950   gimple *p = gimple_alloc (GIMPLE_OMP_MASTER, 0);
951   if (body)
952     gimple_omp_set_body (p, body);
953 
954   return p;
955 }
956 
957 /* Build a GIMPLE_OMP_GRID_BODY statement.
958 
959    BODY is the sequence of statements to be executed by the kernel.  */
960 
961 gimple *
gimple_build_omp_grid_body(gimple_seq body)962 gimple_build_omp_grid_body (gimple_seq body)
963 {
964   gimple *p = gimple_alloc (GIMPLE_OMP_GRID_BODY, 0);
965   if (body)
966     gimple_omp_set_body (p, body);
967 
968   return p;
969 }
970 
971 /* Build a GIMPLE_OMP_TASKGROUP statement.
972 
973    BODY is the sequence of statements to be executed by the taskgroup
974    construct.  */
975 
976 gimple *
gimple_build_omp_taskgroup(gimple_seq body)977 gimple_build_omp_taskgroup (gimple_seq body)
978 {
979   gimple *p = gimple_alloc (GIMPLE_OMP_TASKGROUP, 0);
980   if (body)
981     gimple_omp_set_body (p, body);
982 
983   return p;
984 }
985 
986 
987 /* Build a GIMPLE_OMP_CONTINUE statement.
988 
989    CONTROL_DEF is the definition of the control variable.
990    CONTROL_USE is the use of the control variable.  */
991 
992 gomp_continue *
gimple_build_omp_continue(tree control_def,tree control_use)993 gimple_build_omp_continue (tree control_def, tree control_use)
994 {
995   gomp_continue *p
996     = as_a <gomp_continue *> (gimple_alloc (GIMPLE_OMP_CONTINUE, 0));
997   gimple_omp_continue_set_control_def (p, control_def);
998   gimple_omp_continue_set_control_use (p, control_use);
999   return p;
1000 }
1001 
1002 /* Build a GIMPLE_OMP_ORDERED statement.
1003 
1004    BODY is the sequence of statements inside a loop that will executed in
1005    sequence.
1006    CLAUSES are clauses for this statement.  */
1007 
1008 gomp_ordered *
gimple_build_omp_ordered(gimple_seq body,tree clauses)1009 gimple_build_omp_ordered (gimple_seq body, tree clauses)
1010 {
1011   gomp_ordered *p
1012     = as_a <gomp_ordered *> (gimple_alloc (GIMPLE_OMP_ORDERED, 0));
1013   gimple_omp_ordered_set_clauses (p, clauses);
1014   if (body)
1015     gimple_omp_set_body (p, body);
1016 
1017   return p;
1018 }
1019 
1020 
1021 /* Build a GIMPLE_OMP_RETURN statement.
1022    WAIT_P is true if this is a non-waiting return.  */
1023 
1024 gimple *
gimple_build_omp_return(bool wait_p)1025 gimple_build_omp_return (bool wait_p)
1026 {
1027   gimple *p = gimple_alloc (GIMPLE_OMP_RETURN, 0);
1028   if (wait_p)
1029     gimple_omp_return_set_nowait (p);
1030 
1031   return p;
1032 }
1033 
1034 
1035 /* Build a GIMPLE_OMP_SECTIONS statement.
1036 
1037    BODY is a sequence of section statements.
1038    CLAUSES are any of the OMP sections contsruct's clauses: private,
1039    firstprivate, lastprivate, reduction, and nowait.  */
1040 
1041 gomp_sections *
gimple_build_omp_sections(gimple_seq body,tree clauses)1042 gimple_build_omp_sections (gimple_seq body, tree clauses)
1043 {
1044   gomp_sections *p
1045     = as_a <gomp_sections *> (gimple_alloc (GIMPLE_OMP_SECTIONS, 0));
1046   if (body)
1047     gimple_omp_set_body (p, body);
1048   gimple_omp_sections_set_clauses (p, clauses);
1049 
1050   return p;
1051 }
1052 
1053 
1054 /* Build a GIMPLE_OMP_SECTIONS_SWITCH.  */
1055 
1056 gimple *
gimple_build_omp_sections_switch(void)1057 gimple_build_omp_sections_switch (void)
1058 {
1059   return gimple_alloc (GIMPLE_OMP_SECTIONS_SWITCH, 0);
1060 }
1061 
1062 
1063 /* Build a GIMPLE_OMP_SINGLE statement.
1064 
1065    BODY is the sequence of statements that will be executed once.
1066    CLAUSES are any of the OMP single construct's clauses: private, firstprivate,
1067    copyprivate, nowait.  */
1068 
1069 gomp_single *
gimple_build_omp_single(gimple_seq body,tree clauses)1070 gimple_build_omp_single (gimple_seq body, tree clauses)
1071 {
1072   gomp_single *p
1073     = as_a <gomp_single *> (gimple_alloc (GIMPLE_OMP_SINGLE, 0));
1074   if (body)
1075     gimple_omp_set_body (p, body);
1076   gimple_omp_single_set_clauses (p, clauses);
1077 
1078   return p;
1079 }
1080 
1081 
1082 /* Build a GIMPLE_OMP_TARGET statement.
1083 
1084    BODY is the sequence of statements that will be executed.
1085    KIND is the kind of the region.
1086    CLAUSES are any of the construct's clauses.  */
1087 
1088 gomp_target *
gimple_build_omp_target(gimple_seq body,int kind,tree clauses)1089 gimple_build_omp_target (gimple_seq body, int kind, tree clauses)
1090 {
1091   gomp_target *p
1092     = as_a <gomp_target *> (gimple_alloc (GIMPLE_OMP_TARGET, 0));
1093   if (body)
1094     gimple_omp_set_body (p, body);
1095   gimple_omp_target_set_clauses (p, clauses);
1096   gimple_omp_target_set_kind (p, kind);
1097 
1098   return p;
1099 }
1100 
1101 
1102 /* Build a GIMPLE_OMP_TEAMS statement.
1103 
1104    BODY is the sequence of statements that will be executed.
1105    CLAUSES are any of the OMP teams construct's clauses.  */
1106 
1107 gomp_teams *
gimple_build_omp_teams(gimple_seq body,tree clauses)1108 gimple_build_omp_teams (gimple_seq body, tree clauses)
1109 {
1110   gomp_teams *p = as_a <gomp_teams *> (gimple_alloc (GIMPLE_OMP_TEAMS, 0));
1111   if (body)
1112     gimple_omp_set_body (p, body);
1113   gimple_omp_teams_set_clauses (p, clauses);
1114 
1115   return p;
1116 }
1117 
1118 
1119 /* Build a GIMPLE_OMP_ATOMIC_LOAD statement.  */
1120 
1121 gomp_atomic_load *
gimple_build_omp_atomic_load(tree lhs,tree rhs)1122 gimple_build_omp_atomic_load (tree lhs, tree rhs)
1123 {
1124   gomp_atomic_load *p
1125     = as_a <gomp_atomic_load *> (gimple_alloc (GIMPLE_OMP_ATOMIC_LOAD, 0));
1126   gimple_omp_atomic_load_set_lhs (p, lhs);
1127   gimple_omp_atomic_load_set_rhs (p, rhs);
1128   return p;
1129 }
1130 
1131 /* Build a GIMPLE_OMP_ATOMIC_STORE statement.
1132 
1133    VAL is the value we are storing.  */
1134 
1135 gomp_atomic_store *
gimple_build_omp_atomic_store(tree val)1136 gimple_build_omp_atomic_store (tree val)
1137 {
1138   gomp_atomic_store *p
1139     = as_a <gomp_atomic_store *> (gimple_alloc (GIMPLE_OMP_ATOMIC_STORE, 0));
1140   gimple_omp_atomic_store_set_val (p, val);
1141   return p;
1142 }
1143 
1144 /* Build a GIMPLE_TRANSACTION statement.  */
1145 
1146 gtransaction *
gimple_build_transaction(gimple_seq body)1147 gimple_build_transaction (gimple_seq body)
1148 {
1149   gtransaction *p
1150     = as_a <gtransaction *> (gimple_alloc (GIMPLE_TRANSACTION, 0));
1151   gimple_transaction_set_body (p, body);
1152   gimple_transaction_set_label_norm (p, 0);
1153   gimple_transaction_set_label_uninst (p, 0);
1154   gimple_transaction_set_label_over (p, 0);
1155   return p;
1156 }
1157 
1158 #if defined ENABLE_GIMPLE_CHECKING
1159 /* Complain of a gimple type mismatch and die.  */
1160 
1161 void
gimple_check_failed(const gimple * gs,const char * file,int line,const char * function,enum gimple_code code,enum tree_code subcode)1162 gimple_check_failed (const gimple *gs, const char *file, int line,
1163 		     const char *function, enum gimple_code code,
1164 		     enum tree_code subcode)
1165 {
1166   internal_error ("gimple check: expected %s(%s), have %s(%s) in %s, at %s:%d",
1167       		  gimple_code_name[code],
1168 		  get_tree_code_name (subcode),
1169 		  gimple_code_name[gimple_code (gs)],
1170 		  gs->subcode > 0
1171 		    ? get_tree_code_name ((enum tree_code) gs->subcode)
1172 		    : "",
1173 		  function, trim_filename (file), line);
1174 }
1175 #endif /* ENABLE_GIMPLE_CHECKING */
1176 
1177 
1178 /* Link gimple statement GS to the end of the sequence *SEQ_P.  If
1179    *SEQ_P is NULL, a new sequence is allocated.  */
1180 
1181 void
gimple_seq_add_stmt(gimple_seq * seq_p,gimple * gs)1182 gimple_seq_add_stmt (gimple_seq *seq_p, gimple *gs)
1183 {
1184   gimple_stmt_iterator si;
1185   if (gs == NULL)
1186     return;
1187 
1188   si = gsi_last (*seq_p);
1189   gsi_insert_after (&si, gs, GSI_NEW_STMT);
1190 }
1191 
1192 /* Link gimple statement GS to the end of the sequence *SEQ_P.  If
1193    *SEQ_P is NULL, a new sequence is allocated.  This function is
1194    similar to gimple_seq_add_stmt, but does not scan the operands.
1195    During gimplification, we need to manipulate statement sequences
1196    before the def/use vectors have been constructed.  */
1197 
1198 void
gimple_seq_add_stmt_without_update(gimple_seq * seq_p,gimple * gs)1199 gimple_seq_add_stmt_without_update (gimple_seq *seq_p, gimple *gs)
1200 {
1201   gimple_stmt_iterator si;
1202 
1203   if (gs == NULL)
1204     return;
1205 
1206   si = gsi_last (*seq_p);
1207   gsi_insert_after_without_update (&si, gs, GSI_NEW_STMT);
1208 }
1209 
1210 /* Append sequence SRC to the end of sequence *DST_P.  If *DST_P is
1211    NULL, a new sequence is allocated.  */
1212 
1213 void
gimple_seq_add_seq(gimple_seq * dst_p,gimple_seq src)1214 gimple_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
1215 {
1216   gimple_stmt_iterator si;
1217   if (src == NULL)
1218     return;
1219 
1220   si = gsi_last (*dst_p);
1221   gsi_insert_seq_after (&si, src, GSI_NEW_STMT);
1222 }
1223 
1224 /* Append sequence SRC to the end of sequence *DST_P.  If *DST_P is
1225    NULL, a new sequence is allocated.  This function is
1226    similar to gimple_seq_add_seq, but does not scan the operands.  */
1227 
1228 void
gimple_seq_add_seq_without_update(gimple_seq * dst_p,gimple_seq src)1229 gimple_seq_add_seq_without_update (gimple_seq *dst_p, gimple_seq src)
1230 {
1231   gimple_stmt_iterator si;
1232   if (src == NULL)
1233     return;
1234 
1235   si = gsi_last (*dst_p);
1236   gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
1237 }
1238 
1239 /* Determine whether to assign a location to the statement GS.  */
1240 
1241 static bool
should_carry_location_p(gimple * gs)1242 should_carry_location_p (gimple *gs)
1243 {
1244   /* Don't emit a line note for a label.  We particularly don't want to
1245      emit one for the break label, since it doesn't actually correspond
1246      to the beginning of the loop/switch.  */
1247   if (gimple_code (gs) == GIMPLE_LABEL)
1248     return false;
1249 
1250   return true;
1251 }
1252 
1253 /* Set the location for gimple statement GS to LOCATION.  */
1254 
1255 static void
annotate_one_with_location(gimple * gs,location_t location)1256 annotate_one_with_location (gimple *gs, location_t location)
1257 {
1258   if (!gimple_has_location (gs)
1259       && !gimple_do_not_emit_location_p (gs)
1260       && should_carry_location_p (gs))
1261     gimple_set_location (gs, location);
1262 }
1263 
1264 /* Set LOCATION for all the statements after iterator GSI in sequence
1265    SEQ.  If GSI is pointing to the end of the sequence, start with the
1266    first statement in SEQ.  */
1267 
1268 void
annotate_all_with_location_after(gimple_seq seq,gimple_stmt_iterator gsi,location_t location)1269 annotate_all_with_location_after (gimple_seq seq, gimple_stmt_iterator gsi,
1270 				  location_t location)
1271 {
1272   if (gsi_end_p (gsi))
1273     gsi = gsi_start (seq);
1274   else
1275     gsi_next (&gsi);
1276 
1277   for (; !gsi_end_p (gsi); gsi_next (&gsi))
1278     annotate_one_with_location (gsi_stmt (gsi), location);
1279 }
1280 
1281 /* Set the location for all the statements in a sequence STMT_P to LOCATION.  */
1282 
1283 void
annotate_all_with_location(gimple_seq stmt_p,location_t location)1284 annotate_all_with_location (gimple_seq stmt_p, location_t location)
1285 {
1286   gimple_stmt_iterator i;
1287 
1288   if (gimple_seq_empty_p (stmt_p))
1289     return;
1290 
1291   for (i = gsi_start (stmt_p); !gsi_end_p (i); gsi_next (&i))
1292     {
1293       gimple *gs = gsi_stmt (i);
1294       annotate_one_with_location (gs, location);
1295     }
1296 }
1297 
1298 /* Helper function of empty_body_p.  Return true if STMT is an empty
1299    statement.  */
1300 
1301 static bool
empty_stmt_p(gimple * stmt)1302 empty_stmt_p (gimple *stmt)
1303 {
1304   if (gimple_code (stmt) == GIMPLE_NOP)
1305     return true;
1306   if (gbind *bind_stmt = dyn_cast <gbind *> (stmt))
1307     return empty_body_p (gimple_bind_body (bind_stmt));
1308   return false;
1309 }
1310 
1311 
1312 /* Return true if BODY contains nothing but empty statements.  */
1313 
1314 bool
empty_body_p(gimple_seq body)1315 empty_body_p (gimple_seq body)
1316 {
1317   gimple_stmt_iterator i;
1318 
1319   if (gimple_seq_empty_p (body))
1320     return true;
1321   for (i = gsi_start (body); !gsi_end_p (i); gsi_next (&i))
1322     if (!empty_stmt_p (gsi_stmt (i))
1323 	&& !is_gimple_debug (gsi_stmt (i)))
1324       return false;
1325 
1326   return true;
1327 }
1328 
1329 
1330 /* Perform a deep copy of sequence SRC and return the result.  */
1331 
1332 gimple_seq
gimple_seq_copy(gimple_seq src)1333 gimple_seq_copy (gimple_seq src)
1334 {
1335   gimple_stmt_iterator gsi;
1336   gimple_seq new_seq = NULL;
1337   gimple *stmt;
1338 
1339   for (gsi = gsi_start (src); !gsi_end_p (gsi); gsi_next (&gsi))
1340     {
1341       stmt = gimple_copy (gsi_stmt (gsi));
1342       gimple_seq_add_stmt (&new_seq, stmt);
1343     }
1344 
1345   return new_seq;
1346 }
1347 
1348 
1349 
1350 /* Return true if calls C1 and C2 are known to go to the same function.  */
1351 
1352 bool
gimple_call_same_target_p(const gimple * c1,const gimple * c2)1353 gimple_call_same_target_p (const gimple *c1, const gimple *c2)
1354 {
1355   if (gimple_call_internal_p (c1))
1356     return (gimple_call_internal_p (c2)
1357 	    && gimple_call_internal_fn (c1) == gimple_call_internal_fn (c2)
1358 	    && (!gimple_call_internal_unique_p (as_a <const gcall *> (c1))
1359 		|| c1 == c2));
1360   else
1361     return (gimple_call_fn (c1) == gimple_call_fn (c2)
1362 	    || (gimple_call_fndecl (c1)
1363 		&& gimple_call_fndecl (c1) == gimple_call_fndecl (c2)));
1364 }
1365 
1366 /* Detect flags from a GIMPLE_CALL.  This is just like
1367    call_expr_flags, but for gimple tuples.  */
1368 
1369 int
gimple_call_flags(const gimple * stmt)1370 gimple_call_flags (const gimple *stmt)
1371 {
1372   int flags;
1373   tree decl = gimple_call_fndecl (stmt);
1374 
1375   if (decl)
1376     flags = flags_from_decl_or_type (decl);
1377   else if (gimple_call_internal_p (stmt))
1378     flags = internal_fn_flags (gimple_call_internal_fn (stmt));
1379   else
1380     flags = flags_from_decl_or_type (gimple_call_fntype (stmt));
1381 
1382   if (stmt->subcode & GF_CALL_NOTHROW)
1383     flags |= ECF_NOTHROW;
1384 
1385   return flags;
1386 }
1387 
1388 /* Return the "fn spec" string for call STMT.  */
1389 
1390 static const_tree
gimple_call_fnspec(const gcall * stmt)1391 gimple_call_fnspec (const gcall *stmt)
1392 {
1393   tree type, attr;
1394 
1395   if (gimple_call_internal_p (stmt))
1396     return internal_fn_fnspec (gimple_call_internal_fn (stmt));
1397 
1398   type = gimple_call_fntype (stmt);
1399   if (!type)
1400     return NULL_TREE;
1401 
1402   attr = lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type));
1403   if (!attr)
1404     return NULL_TREE;
1405 
1406   return TREE_VALUE (TREE_VALUE (attr));
1407 }
1408 
1409 /* Detects argument flags for argument number ARG on call STMT.  */
1410 
1411 int
gimple_call_arg_flags(const gcall * stmt,unsigned arg)1412 gimple_call_arg_flags (const gcall *stmt, unsigned arg)
1413 {
1414   const_tree attr = gimple_call_fnspec (stmt);
1415 
1416   if (!attr || 1 + arg >= (unsigned) TREE_STRING_LENGTH (attr))
1417     return 0;
1418 
1419   switch (TREE_STRING_POINTER (attr)[1 + arg])
1420     {
1421     case 'x':
1422     case 'X':
1423       return EAF_UNUSED;
1424 
1425     case 'R':
1426       return EAF_DIRECT | EAF_NOCLOBBER | EAF_NOESCAPE;
1427 
1428     case 'r':
1429       return EAF_NOCLOBBER | EAF_NOESCAPE;
1430 
1431     case 'W':
1432       return EAF_DIRECT | EAF_NOESCAPE;
1433 
1434     case 'w':
1435       return EAF_NOESCAPE;
1436 
1437     case '.':
1438     default:
1439       return 0;
1440     }
1441 }
1442 
1443 /* Detects return flags for the call STMT.  */
1444 
1445 int
gimple_call_return_flags(const gcall * stmt)1446 gimple_call_return_flags (const gcall *stmt)
1447 {
1448   const_tree attr;
1449 
1450   if (gimple_call_flags (stmt) & ECF_MALLOC)
1451     return ERF_NOALIAS;
1452 
1453   attr = gimple_call_fnspec (stmt);
1454   if (!attr || TREE_STRING_LENGTH (attr) < 1)
1455     return 0;
1456 
1457   switch (TREE_STRING_POINTER (attr)[0])
1458     {
1459     case '1':
1460     case '2':
1461     case '3':
1462     case '4':
1463       return ERF_RETURNS_ARG | (TREE_STRING_POINTER (attr)[0] - '1');
1464 
1465     case 'm':
1466       return ERF_NOALIAS;
1467 
1468     case '.':
1469     default:
1470       return 0;
1471     }
1472 }
1473 
1474 
1475 /* Return true if GS is a copy assignment.  */
1476 
1477 bool
gimple_assign_copy_p(gimple * gs)1478 gimple_assign_copy_p (gimple *gs)
1479 {
1480   return (gimple_assign_single_p (gs)
1481 	  && is_gimple_val (gimple_op (gs, 1)));
1482 }
1483 
1484 
1485 /* Return true if GS is a SSA_NAME copy assignment.  */
1486 
1487 bool
gimple_assign_ssa_name_copy_p(gimple * gs)1488 gimple_assign_ssa_name_copy_p (gimple *gs)
1489 {
1490   return (gimple_assign_single_p (gs)
1491 	  && TREE_CODE (gimple_assign_lhs (gs)) == SSA_NAME
1492 	  && TREE_CODE (gimple_assign_rhs1 (gs)) == SSA_NAME);
1493 }
1494 
1495 
1496 /* Return true if GS is an assignment with a unary RHS, but the
1497    operator has no effect on the assigned value.  The logic is adapted
1498    from STRIP_NOPS.  This predicate is intended to be used in tuplifying
1499    instances in which STRIP_NOPS was previously applied to the RHS of
1500    an assignment.
1501 
1502    NOTE: In the use cases that led to the creation of this function
1503    and of gimple_assign_single_p, it is typical to test for either
1504    condition and to proceed in the same manner.  In each case, the
1505    assigned value is represented by the single RHS operand of the
1506    assignment.  I suspect there may be cases where gimple_assign_copy_p,
1507    gimple_assign_single_p, or equivalent logic is used where a similar
1508    treatment of unary NOPs is appropriate.  */
1509 
1510 bool
gimple_assign_unary_nop_p(gimple * gs)1511 gimple_assign_unary_nop_p (gimple *gs)
1512 {
1513   return (is_gimple_assign (gs)
1514           && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs))
1515               || gimple_assign_rhs_code (gs) == NON_LVALUE_EXPR)
1516           && gimple_assign_rhs1 (gs) != error_mark_node
1517           && (TYPE_MODE (TREE_TYPE (gimple_assign_lhs (gs)))
1518               == TYPE_MODE (TREE_TYPE (gimple_assign_rhs1 (gs)))));
1519 }
1520 
1521 /* Set BB to be the basic block holding G.  */
1522 
1523 void
gimple_set_bb(gimple * stmt,basic_block bb)1524 gimple_set_bb (gimple *stmt, basic_block bb)
1525 {
1526   stmt->bb = bb;
1527 
1528   if (gimple_code (stmt) != GIMPLE_LABEL)
1529     return;
1530 
1531   /* If the statement is a label, add the label to block-to-labels map
1532      so that we can speed up edge creation for GIMPLE_GOTOs.  */
1533   if (cfun->cfg)
1534     {
1535       tree t;
1536       int uid;
1537 
1538       t = gimple_label_label (as_a <glabel *> (stmt));
1539       uid = LABEL_DECL_UID (t);
1540       if (uid == -1)
1541 	{
1542 	  unsigned old_len =
1543 	    vec_safe_length (label_to_block_map_for_fn (cfun));
1544 	  LABEL_DECL_UID (t) = uid = cfun->cfg->last_label_uid++;
1545 	  if (old_len <= (unsigned) uid)
1546 	    {
1547 	      unsigned new_len = 3 * uid / 2 + 1;
1548 
1549 	      vec_safe_grow_cleared (label_to_block_map_for_fn (cfun),
1550 				     new_len);
1551 	    }
1552 	}
1553 
1554       (*label_to_block_map_for_fn (cfun))[uid] = bb;
1555     }
1556 }
1557 
1558 
1559 /* Modify the RHS of the assignment pointed-to by GSI using the
1560    operands in the expression tree EXPR.
1561 
1562    NOTE: The statement pointed-to by GSI may be reallocated if it
1563    did not have enough operand slots.
1564 
1565    This function is useful to convert an existing tree expression into
1566    the flat representation used for the RHS of a GIMPLE assignment.
1567    It will reallocate memory as needed to expand or shrink the number
1568    of operand slots needed to represent EXPR.
1569 
1570    NOTE: If you find yourself building a tree and then calling this
1571    function, you are most certainly doing it the slow way.  It is much
1572    better to build a new assignment or to use the function
1573    gimple_assign_set_rhs_with_ops, which does not require an
1574    expression tree to be built.  */
1575 
1576 void
gimple_assign_set_rhs_from_tree(gimple_stmt_iterator * gsi,tree expr)1577 gimple_assign_set_rhs_from_tree (gimple_stmt_iterator *gsi, tree expr)
1578 {
1579   enum tree_code subcode;
1580   tree op1, op2, op3;
1581 
1582   extract_ops_from_tree (expr, &subcode, &op1, &op2, &op3);
1583   gimple_assign_set_rhs_with_ops (gsi, subcode, op1, op2, op3);
1584 }
1585 
1586 
1587 /* Set the RHS of assignment statement pointed-to by GSI to CODE with
1588    operands OP1, OP2 and OP3.
1589 
1590    NOTE: The statement pointed-to by GSI may be reallocated if it
1591    did not have enough operand slots.  */
1592 
1593 void
gimple_assign_set_rhs_with_ops(gimple_stmt_iterator * gsi,enum tree_code code,tree op1,tree op2,tree op3)1594 gimple_assign_set_rhs_with_ops (gimple_stmt_iterator *gsi, enum tree_code code,
1595 				tree op1, tree op2, tree op3)
1596 {
1597   unsigned new_rhs_ops = get_gimple_rhs_num_ops (code);
1598   gimple *stmt = gsi_stmt (*gsi);
1599 
1600   /* If the new CODE needs more operands, allocate a new statement.  */
1601   if (gimple_num_ops (stmt) < new_rhs_ops + 1)
1602     {
1603       tree lhs = gimple_assign_lhs (stmt);
1604       gimple *new_stmt = gimple_alloc (gimple_code (stmt), new_rhs_ops + 1);
1605       memcpy (new_stmt, stmt, gimple_size (gimple_code (stmt)));
1606       gimple_init_singleton (new_stmt);
1607       gsi_replace (gsi, new_stmt, true);
1608       stmt = new_stmt;
1609 
1610       /* The LHS needs to be reset as this also changes the SSA name
1611 	 on the LHS.  */
1612       gimple_assign_set_lhs (stmt, lhs);
1613     }
1614 
1615   gimple_set_num_ops (stmt, new_rhs_ops + 1);
1616   gimple_set_subcode (stmt, code);
1617   gimple_assign_set_rhs1 (stmt, op1);
1618   if (new_rhs_ops > 1)
1619     gimple_assign_set_rhs2 (stmt, op2);
1620   if (new_rhs_ops > 2)
1621     gimple_assign_set_rhs3 (stmt, op3);
1622 }
1623 
1624 
1625 /* Return the LHS of a statement that performs an assignment,
1626    either a GIMPLE_ASSIGN or a GIMPLE_CALL.  Returns NULL_TREE
1627    for a call to a function that returns no value, or for a
1628    statement other than an assignment or a call.  */
1629 
1630 tree
gimple_get_lhs(const gimple * stmt)1631 gimple_get_lhs (const gimple *stmt)
1632 {
1633   enum gimple_code code = gimple_code (stmt);
1634 
1635   if (code == GIMPLE_ASSIGN)
1636     return gimple_assign_lhs (stmt);
1637   else if (code == GIMPLE_CALL)
1638     return gimple_call_lhs (stmt);
1639   else
1640     return NULL_TREE;
1641 }
1642 
1643 
1644 /* Set the LHS of a statement that performs an assignment,
1645    either a GIMPLE_ASSIGN or a GIMPLE_CALL.  */
1646 
1647 void
gimple_set_lhs(gimple * stmt,tree lhs)1648 gimple_set_lhs (gimple *stmt, tree lhs)
1649 {
1650   enum gimple_code code = gimple_code (stmt);
1651 
1652   if (code == GIMPLE_ASSIGN)
1653     gimple_assign_set_lhs (stmt, lhs);
1654   else if (code == GIMPLE_CALL)
1655     gimple_call_set_lhs (stmt, lhs);
1656   else
1657     gcc_unreachable ();
1658 }
1659 
1660 
1661 /* Return a deep copy of statement STMT.  All the operands from STMT
1662    are reallocated and copied using unshare_expr.  The DEF, USE, VDEF
1663    and VUSE operand arrays are set to empty in the new copy.  The new
1664    copy isn't part of any sequence.  */
1665 
1666 gimple *
gimple_copy(gimple * stmt)1667 gimple_copy (gimple *stmt)
1668 {
1669   enum gimple_code code = gimple_code (stmt);
1670   unsigned num_ops = gimple_num_ops (stmt);
1671   gimple *copy = gimple_alloc (code, num_ops);
1672   unsigned i;
1673 
1674   /* Shallow copy all the fields from STMT.  */
1675   memcpy (copy, stmt, gimple_size (code));
1676   gimple_init_singleton (copy);
1677 
1678   /* If STMT has sub-statements, deep-copy them as well.  */
1679   if (gimple_has_substatements (stmt))
1680     {
1681       gimple_seq new_seq;
1682       tree t;
1683 
1684       switch (gimple_code (stmt))
1685 	{
1686 	case GIMPLE_BIND:
1687 	  {
1688 	    gbind *bind_stmt = as_a <gbind *> (stmt);
1689 	    gbind *bind_copy = as_a <gbind *> (copy);
1690 	    new_seq = gimple_seq_copy (gimple_bind_body (bind_stmt));
1691 	    gimple_bind_set_body (bind_copy, new_seq);
1692 	    gimple_bind_set_vars (bind_copy,
1693 				  unshare_expr (gimple_bind_vars (bind_stmt)));
1694 	    gimple_bind_set_block (bind_copy, gimple_bind_block (bind_stmt));
1695 	  }
1696 	  break;
1697 
1698 	case GIMPLE_CATCH:
1699 	  {
1700 	    gcatch *catch_stmt = as_a <gcatch *> (stmt);
1701 	    gcatch *catch_copy = as_a <gcatch *> (copy);
1702 	    new_seq = gimple_seq_copy (gimple_catch_handler (catch_stmt));
1703 	    gimple_catch_set_handler (catch_copy, new_seq);
1704 	    t = unshare_expr (gimple_catch_types (catch_stmt));
1705 	    gimple_catch_set_types (catch_copy, t);
1706 	  }
1707 	  break;
1708 
1709 	case GIMPLE_EH_FILTER:
1710 	  {
1711 	    geh_filter *eh_filter_stmt = as_a <geh_filter *> (stmt);
1712 	    geh_filter *eh_filter_copy = as_a <geh_filter *> (copy);
1713 	    new_seq
1714 	      = gimple_seq_copy (gimple_eh_filter_failure (eh_filter_stmt));
1715 	    gimple_eh_filter_set_failure (eh_filter_copy, new_seq);
1716 	    t = unshare_expr (gimple_eh_filter_types (eh_filter_stmt));
1717 	    gimple_eh_filter_set_types (eh_filter_copy, t);
1718 	  }
1719 	  break;
1720 
1721 	case GIMPLE_EH_ELSE:
1722 	  {
1723 	    geh_else *eh_else_stmt = as_a <geh_else *> (stmt);
1724 	    geh_else *eh_else_copy = as_a <geh_else *> (copy);
1725 	    new_seq = gimple_seq_copy (gimple_eh_else_n_body (eh_else_stmt));
1726 	    gimple_eh_else_set_n_body (eh_else_copy, new_seq);
1727 	    new_seq = gimple_seq_copy (gimple_eh_else_e_body (eh_else_stmt));
1728 	    gimple_eh_else_set_e_body (eh_else_copy, new_seq);
1729 	  }
1730 	  break;
1731 
1732 	case GIMPLE_TRY:
1733 	  {
1734 	    gtry *try_stmt = as_a <gtry *> (stmt);
1735 	    gtry *try_copy = as_a <gtry *> (copy);
1736 	    new_seq = gimple_seq_copy (gimple_try_eval (try_stmt));
1737 	    gimple_try_set_eval (try_copy, new_seq);
1738 	    new_seq = gimple_seq_copy (gimple_try_cleanup (try_stmt));
1739 	    gimple_try_set_cleanup (try_copy, new_seq);
1740 	  }
1741 	  break;
1742 
1743 	case GIMPLE_OMP_FOR:
1744 	  new_seq = gimple_seq_copy (gimple_omp_for_pre_body (stmt));
1745 	  gimple_omp_for_set_pre_body (copy, new_seq);
1746 	  t = unshare_expr (gimple_omp_for_clauses (stmt));
1747 	  gimple_omp_for_set_clauses (copy, t);
1748 	  {
1749 	    gomp_for *omp_for_copy = as_a <gomp_for *> (copy);
1750 	    omp_for_copy->iter = ggc_vec_alloc<gimple_omp_for_iter>
1751 	      ( gimple_omp_for_collapse (stmt));
1752           }
1753 	  for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1754 	    {
1755 	      gimple_omp_for_set_cond (copy, i,
1756 				       gimple_omp_for_cond (stmt, i));
1757 	      gimple_omp_for_set_index (copy, i,
1758 					gimple_omp_for_index (stmt, i));
1759 	      t = unshare_expr (gimple_omp_for_initial (stmt, i));
1760 	      gimple_omp_for_set_initial (copy, i, t);
1761 	      t = unshare_expr (gimple_omp_for_final (stmt, i));
1762 	      gimple_omp_for_set_final (copy, i, t);
1763 	      t = unshare_expr (gimple_omp_for_incr (stmt, i));
1764 	      gimple_omp_for_set_incr (copy, i, t);
1765 	    }
1766 	  goto copy_omp_body;
1767 
1768 	case GIMPLE_OMP_PARALLEL:
1769 	  {
1770 	    gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
1771 	    gomp_parallel *omp_par_copy = as_a <gomp_parallel *> (copy);
1772 	    t = unshare_expr (gimple_omp_parallel_clauses (omp_par_stmt));
1773 	    gimple_omp_parallel_set_clauses (omp_par_copy, t);
1774 	    t = unshare_expr (gimple_omp_parallel_child_fn (omp_par_stmt));
1775 	    gimple_omp_parallel_set_child_fn (omp_par_copy, t);
1776 	    t = unshare_expr (gimple_omp_parallel_data_arg (omp_par_stmt));
1777 	    gimple_omp_parallel_set_data_arg (omp_par_copy, t);
1778 	  }
1779 	  goto copy_omp_body;
1780 
1781 	case GIMPLE_OMP_TASK:
1782 	  t = unshare_expr (gimple_omp_task_clauses (stmt));
1783 	  gimple_omp_task_set_clauses (copy, t);
1784 	  t = unshare_expr (gimple_omp_task_child_fn (stmt));
1785 	  gimple_omp_task_set_child_fn (copy, t);
1786 	  t = unshare_expr (gimple_omp_task_data_arg (stmt));
1787 	  gimple_omp_task_set_data_arg (copy, t);
1788 	  t = unshare_expr (gimple_omp_task_copy_fn (stmt));
1789 	  gimple_omp_task_set_copy_fn (copy, t);
1790 	  t = unshare_expr (gimple_omp_task_arg_size (stmt));
1791 	  gimple_omp_task_set_arg_size (copy, t);
1792 	  t = unshare_expr (gimple_omp_task_arg_align (stmt));
1793 	  gimple_omp_task_set_arg_align (copy, t);
1794 	  goto copy_omp_body;
1795 
1796 	case GIMPLE_OMP_CRITICAL:
1797 	  t = unshare_expr (gimple_omp_critical_name
1798 				(as_a <gomp_critical *> (stmt)));
1799 	  gimple_omp_critical_set_name (as_a <gomp_critical *> (copy), t);
1800 	  t = unshare_expr (gimple_omp_critical_clauses
1801 				(as_a <gomp_critical *> (stmt)));
1802 	  gimple_omp_critical_set_clauses (as_a <gomp_critical *> (copy), t);
1803 	  goto copy_omp_body;
1804 
1805 	case GIMPLE_OMP_ORDERED:
1806 	  t = unshare_expr (gimple_omp_ordered_clauses
1807 				(as_a <gomp_ordered *> (stmt)));
1808 	  gimple_omp_ordered_set_clauses (as_a <gomp_ordered *> (copy), t);
1809 	  goto copy_omp_body;
1810 
1811 	case GIMPLE_OMP_SECTIONS:
1812 	  t = unshare_expr (gimple_omp_sections_clauses (stmt));
1813 	  gimple_omp_sections_set_clauses (copy, t);
1814 	  t = unshare_expr (gimple_omp_sections_control (stmt));
1815 	  gimple_omp_sections_set_control (copy, t);
1816 	  /* FALLTHRU  */
1817 
1818 	case GIMPLE_OMP_SINGLE:
1819 	case GIMPLE_OMP_TARGET:
1820 	case GIMPLE_OMP_TEAMS:
1821 	case GIMPLE_OMP_SECTION:
1822 	case GIMPLE_OMP_MASTER:
1823 	case GIMPLE_OMP_TASKGROUP:
1824 	case GIMPLE_OMP_GRID_BODY:
1825 	copy_omp_body:
1826 	  new_seq = gimple_seq_copy (gimple_omp_body (stmt));
1827 	  gimple_omp_set_body (copy, new_seq);
1828 	  break;
1829 
1830 	case GIMPLE_TRANSACTION:
1831 	  new_seq = gimple_seq_copy (gimple_transaction_body (
1832 				       as_a <gtransaction *> (stmt)));
1833 	  gimple_transaction_set_body (as_a <gtransaction *> (copy),
1834 				       new_seq);
1835 	  break;
1836 
1837 	case GIMPLE_WITH_CLEANUP_EXPR:
1838 	  new_seq = gimple_seq_copy (gimple_wce_cleanup (stmt));
1839 	  gimple_wce_set_cleanup (copy, new_seq);
1840 	  break;
1841 
1842 	default:
1843 	  gcc_unreachable ();
1844 	}
1845     }
1846 
1847   /* Make copy of operands.  */
1848   for (i = 0; i < num_ops; i++)
1849     gimple_set_op (copy, i, unshare_expr (gimple_op (stmt, i)));
1850 
1851   if (gimple_has_mem_ops (stmt))
1852     {
1853       gimple_set_vdef (copy, gimple_vdef (stmt));
1854       gimple_set_vuse (copy, gimple_vuse (stmt));
1855     }
1856 
1857   /* Clear out SSA operand vectors on COPY.  */
1858   if (gimple_has_ops (stmt))
1859     {
1860       gimple_set_use_ops (copy, NULL);
1861 
1862       /* SSA operands need to be updated.  */
1863       gimple_set_modified (copy, true);
1864     }
1865 
1866   return copy;
1867 }
1868 
1869 
1870 /* Return true if statement S has side-effects.  We consider a
1871    statement to have side effects if:
1872 
1873    - It is a GIMPLE_CALL not marked with ECF_PURE or ECF_CONST.
1874    - Any of its operands are marked TREE_THIS_VOLATILE or TREE_SIDE_EFFECTS.  */
1875 
1876 bool
gimple_has_side_effects(const gimple * s)1877 gimple_has_side_effects (const gimple *s)
1878 {
1879   if (is_gimple_debug (s))
1880     return false;
1881 
1882   /* We don't have to scan the arguments to check for
1883      volatile arguments, though, at present, we still
1884      do a scan to check for TREE_SIDE_EFFECTS.  */
1885   if (gimple_has_volatile_ops (s))
1886     return true;
1887 
1888   if (gimple_code (s) == GIMPLE_ASM
1889       && gimple_asm_volatile_p (as_a <const gasm *> (s)))
1890     return true;
1891 
1892   if (is_gimple_call (s))
1893     {
1894       int flags = gimple_call_flags (s);
1895 
1896       /* An infinite loop is considered a side effect.  */
1897       if (!(flags & (ECF_CONST | ECF_PURE))
1898 	  || (flags & ECF_LOOPING_CONST_OR_PURE))
1899 	return true;
1900 
1901       return false;
1902     }
1903 
1904   return false;
1905 }
1906 
1907 /* Helper for gimple_could_trap_p and gimple_assign_rhs_could_trap_p.
1908    Return true if S can trap.  When INCLUDE_MEM is true, check whether
1909    the memory operations could trap.  When INCLUDE_STORES is true and
1910    S is a GIMPLE_ASSIGN, the LHS of the assignment is also checked.  */
1911 
1912 bool
gimple_could_trap_p_1(gimple * s,bool include_mem,bool include_stores)1913 gimple_could_trap_p_1 (gimple *s, bool include_mem, bool include_stores)
1914 {
1915   tree t, div = NULL_TREE;
1916   enum tree_code op;
1917 
1918   if (include_mem)
1919     {
1920       unsigned i, start = (is_gimple_assign (s) && !include_stores) ? 1 : 0;
1921 
1922       for (i = start; i < gimple_num_ops (s); i++)
1923 	if (tree_could_trap_p (gimple_op (s, i)))
1924 	  return true;
1925     }
1926 
1927   switch (gimple_code (s))
1928     {
1929     case GIMPLE_ASM:
1930       return gimple_asm_volatile_p (as_a <gasm *> (s));
1931 
1932     case GIMPLE_CALL:
1933       t = gimple_call_fndecl (s);
1934       /* Assume that calls to weak functions may trap.  */
1935       if (!t || !DECL_P (t) || DECL_WEAK (t))
1936 	return true;
1937       return false;
1938 
1939     case GIMPLE_ASSIGN:
1940       t = gimple_expr_type (s);
1941       op = gimple_assign_rhs_code (s);
1942       if (get_gimple_rhs_class (op) == GIMPLE_BINARY_RHS)
1943 	div = gimple_assign_rhs2 (s);
1944       return (operation_could_trap_p (op, FLOAT_TYPE_P (t),
1945 				      (INTEGRAL_TYPE_P (t)
1946 				       && TYPE_OVERFLOW_TRAPS (t)),
1947 				      div));
1948 
1949     case GIMPLE_COND:
1950       t = TREE_TYPE (gimple_cond_lhs (s));
1951       return operation_could_trap_p (gimple_cond_code (s),
1952 				     FLOAT_TYPE_P (t), false, NULL_TREE);
1953 
1954     default:
1955       break;
1956     }
1957 
1958   return false;
1959 }
1960 
1961 /* Return true if statement S can trap.  */
1962 
1963 bool
gimple_could_trap_p(gimple * s)1964 gimple_could_trap_p (gimple *s)
1965 {
1966   return gimple_could_trap_p_1 (s, true, true);
1967 }
1968 
1969 /* Return true if RHS of a GIMPLE_ASSIGN S can trap.  */
1970 
1971 bool
gimple_assign_rhs_could_trap_p(gimple * s)1972 gimple_assign_rhs_could_trap_p (gimple *s)
1973 {
1974   gcc_assert (is_gimple_assign (s));
1975   return gimple_could_trap_p_1 (s, true, false);
1976 }
1977 
1978 
1979 /* Print debugging information for gimple stmts generated.  */
1980 
1981 void
dump_gimple_statistics(void)1982 dump_gimple_statistics (void)
1983 {
1984   int i, total_tuples = 0, total_bytes = 0;
1985 
1986   if (! GATHER_STATISTICS)
1987     {
1988       fprintf (stderr, "No gimple statistics\n");
1989       return;
1990     }
1991 
1992   fprintf (stderr, "\nGIMPLE statements\n");
1993   fprintf (stderr, "Kind                   Stmts      Bytes\n");
1994   fprintf (stderr, "---------------------------------------\n");
1995   for (i = 0; i < (int) gimple_alloc_kind_all; ++i)
1996     {
1997       fprintf (stderr, "%-20s %7d %10d\n", gimple_alloc_kind_names[i],
1998 	  gimple_alloc_counts[i], gimple_alloc_sizes[i]);
1999       total_tuples += gimple_alloc_counts[i];
2000       total_bytes += gimple_alloc_sizes[i];
2001     }
2002   fprintf (stderr, "---------------------------------------\n");
2003   fprintf (stderr, "%-20s %7d %10d\n", "Total", total_tuples, total_bytes);
2004   fprintf (stderr, "---------------------------------------\n");
2005 }
2006 
2007 
2008 /* Return the number of operands needed on the RHS of a GIMPLE
2009    assignment for an expression with tree code CODE.  */
2010 
2011 unsigned
get_gimple_rhs_num_ops(enum tree_code code)2012 get_gimple_rhs_num_ops (enum tree_code code)
2013 {
2014   enum gimple_rhs_class rhs_class = get_gimple_rhs_class (code);
2015 
2016   if (rhs_class == GIMPLE_UNARY_RHS || rhs_class == GIMPLE_SINGLE_RHS)
2017     return 1;
2018   else if (rhs_class == GIMPLE_BINARY_RHS)
2019     return 2;
2020   else if (rhs_class == GIMPLE_TERNARY_RHS)
2021     return 3;
2022   else
2023     gcc_unreachable ();
2024 }
2025 
2026 #define DEFTREECODE(SYM, STRING, TYPE, NARGS)   			    \
2027   (unsigned char)							    \
2028   ((TYPE) == tcc_unary ? GIMPLE_UNARY_RHS				    \
2029    : ((TYPE) == tcc_binary						    \
2030       || (TYPE) == tcc_comparison) ? GIMPLE_BINARY_RHS   		    \
2031    : ((TYPE) == tcc_constant						    \
2032       || (TYPE) == tcc_declaration					    \
2033       || (TYPE) == tcc_reference) ? GIMPLE_SINGLE_RHS			    \
2034    : ((SYM) == TRUTH_AND_EXPR						    \
2035       || (SYM) == TRUTH_OR_EXPR						    \
2036       || (SYM) == TRUTH_XOR_EXPR) ? GIMPLE_BINARY_RHS			    \
2037    : (SYM) == TRUTH_NOT_EXPR ? GIMPLE_UNARY_RHS				    \
2038    : ((SYM) == COND_EXPR						    \
2039       || (SYM) == WIDEN_MULT_PLUS_EXPR					    \
2040       || (SYM) == WIDEN_MULT_MINUS_EXPR					    \
2041       || (SYM) == DOT_PROD_EXPR						    \
2042       || (SYM) == SAD_EXPR						    \
2043       || (SYM) == REALIGN_LOAD_EXPR					    \
2044       || (SYM) == VEC_COND_EXPR						    \
2045       || (SYM) == VEC_PERM_EXPR                                             \
2046       || (SYM) == FMA_EXPR) ? GIMPLE_TERNARY_RHS			    \
2047    : ((SYM) == CONSTRUCTOR						    \
2048       || (SYM) == OBJ_TYPE_REF						    \
2049       || (SYM) == ASSERT_EXPR						    \
2050       || (SYM) == ADDR_EXPR						    \
2051       || (SYM) == WITH_SIZE_EXPR					    \
2052       || (SYM) == SSA_NAME) ? GIMPLE_SINGLE_RHS				    \
2053    : GIMPLE_INVALID_RHS),
2054 #define END_OF_BASE_TREE_CODES (unsigned char) GIMPLE_INVALID_RHS,
2055 
2056 const unsigned char gimple_rhs_class_table[] = {
2057 #include "all-tree.def"
2058 };
2059 
2060 #undef DEFTREECODE
2061 #undef END_OF_BASE_TREE_CODES
2062 
2063 /* Canonicalize a tree T for use in a COND_EXPR as conditional.  Returns
2064    a canonicalized tree that is valid for a COND_EXPR or NULL_TREE, if
2065    we failed to create one.  */
2066 
2067 tree
canonicalize_cond_expr_cond(tree t)2068 canonicalize_cond_expr_cond (tree t)
2069 {
2070   /* Strip conversions around boolean operations.  */
2071   if (CONVERT_EXPR_P (t)
2072       && (truth_value_p (TREE_CODE (TREE_OPERAND (t, 0)))
2073           || TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 0)))
2074 	     == BOOLEAN_TYPE))
2075     t = TREE_OPERAND (t, 0);
2076 
2077   /* For !x use x == 0.  */
2078   if (TREE_CODE (t) == TRUTH_NOT_EXPR)
2079     {
2080       tree top0 = TREE_OPERAND (t, 0);
2081       t = build2 (EQ_EXPR, TREE_TYPE (t),
2082 		  top0, build_int_cst (TREE_TYPE (top0), 0));
2083     }
2084   /* For cmp ? 1 : 0 use cmp.  */
2085   else if (TREE_CODE (t) == COND_EXPR
2086 	   && COMPARISON_CLASS_P (TREE_OPERAND (t, 0))
2087 	   && integer_onep (TREE_OPERAND (t, 1))
2088 	   && integer_zerop (TREE_OPERAND (t, 2)))
2089     {
2090       tree top0 = TREE_OPERAND (t, 0);
2091       t = build2 (TREE_CODE (top0), TREE_TYPE (t),
2092 		  TREE_OPERAND (top0, 0), TREE_OPERAND (top0, 1));
2093     }
2094   /* For x ^ y use x != y.  */
2095   else if (TREE_CODE (t) == BIT_XOR_EXPR)
2096     t = build2 (NE_EXPR, TREE_TYPE (t),
2097 		TREE_OPERAND (t, 0), TREE_OPERAND (t, 1));
2098 
2099   if (is_gimple_condexpr (t))
2100     return t;
2101 
2102   return NULL_TREE;
2103 }
2104 
2105 /* Build a GIMPLE_CALL identical to STMT but skipping the arguments in
2106    the positions marked by the set ARGS_TO_SKIP.  */
2107 
2108 gcall *
gimple_call_copy_skip_args(gcall * stmt,bitmap args_to_skip)2109 gimple_call_copy_skip_args (gcall *stmt, bitmap args_to_skip)
2110 {
2111   int i;
2112   int nargs = gimple_call_num_args (stmt);
2113   auto_vec<tree> vargs (nargs);
2114   gcall *new_stmt;
2115 
2116   for (i = 0; i < nargs; i++)
2117     if (!bitmap_bit_p (args_to_skip, i))
2118       vargs.quick_push (gimple_call_arg (stmt, i));
2119 
2120   if (gimple_call_internal_p (stmt))
2121     new_stmt = gimple_build_call_internal_vec (gimple_call_internal_fn (stmt),
2122 					       vargs);
2123   else
2124     new_stmt = gimple_build_call_vec (gimple_call_fn (stmt), vargs);
2125 
2126   if (gimple_call_lhs (stmt))
2127     gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
2128 
2129   gimple_set_vuse (new_stmt, gimple_vuse (stmt));
2130   gimple_set_vdef (new_stmt, gimple_vdef (stmt));
2131 
2132   if (gimple_has_location (stmt))
2133     gimple_set_location (new_stmt, gimple_location (stmt));
2134   gimple_call_copy_flags (new_stmt, stmt);
2135   gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
2136 
2137   gimple_set_modified (new_stmt, true);
2138 
2139   return new_stmt;
2140 }
2141 
2142 
2143 
2144 /* Return true if the field decls F1 and F2 are at the same offset.
2145 
2146    This is intended to be used on GIMPLE types only.  */
2147 
2148 bool
gimple_compare_field_offset(tree f1,tree f2)2149 gimple_compare_field_offset (tree f1, tree f2)
2150 {
2151   if (DECL_OFFSET_ALIGN (f1) == DECL_OFFSET_ALIGN (f2))
2152     {
2153       tree offset1 = DECL_FIELD_OFFSET (f1);
2154       tree offset2 = DECL_FIELD_OFFSET (f2);
2155       return ((offset1 == offset2
2156 	       /* Once gimplification is done, self-referential offsets are
2157 		  instantiated as operand #2 of the COMPONENT_REF built for
2158 		  each access and reset.  Therefore, they are not relevant
2159 		  anymore and fields are interchangeable provided that they
2160 		  represent the same access.  */
2161 	       || (TREE_CODE (offset1) == PLACEHOLDER_EXPR
2162 		   && TREE_CODE (offset2) == PLACEHOLDER_EXPR
2163 		   && (DECL_SIZE (f1) == DECL_SIZE (f2)
2164 		       || (TREE_CODE (DECL_SIZE (f1)) == PLACEHOLDER_EXPR
2165 			   && TREE_CODE (DECL_SIZE (f2)) == PLACEHOLDER_EXPR)
2166 		       || operand_equal_p (DECL_SIZE (f1), DECL_SIZE (f2), 0))
2167 		   && DECL_ALIGN (f1) == DECL_ALIGN (f2))
2168 	       || operand_equal_p (offset1, offset2, 0))
2169 	      && tree_int_cst_equal (DECL_FIELD_BIT_OFFSET (f1),
2170 				     DECL_FIELD_BIT_OFFSET (f2)));
2171     }
2172 
2173   /* Fortran and C do not always agree on what DECL_OFFSET_ALIGN
2174      should be, so handle differing ones specially by decomposing
2175      the offset into a byte and bit offset manually.  */
2176   if (tree_fits_shwi_p (DECL_FIELD_OFFSET (f1))
2177       && tree_fits_shwi_p (DECL_FIELD_OFFSET (f2)))
2178     {
2179       unsigned HOST_WIDE_INT byte_offset1, byte_offset2;
2180       unsigned HOST_WIDE_INT bit_offset1, bit_offset2;
2181       bit_offset1 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f1));
2182       byte_offset1 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f1))
2183 		      + bit_offset1 / BITS_PER_UNIT);
2184       bit_offset2 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f2));
2185       byte_offset2 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f2))
2186 		      + bit_offset2 / BITS_PER_UNIT);
2187       if (byte_offset1 != byte_offset2)
2188 	return false;
2189       return bit_offset1 % BITS_PER_UNIT == bit_offset2 % BITS_PER_UNIT;
2190     }
2191 
2192   return false;
2193 }
2194 
2195 
2196 /* Return a type the same as TYPE except unsigned or
2197    signed according to UNSIGNEDP.  */
2198 
2199 static tree
gimple_signed_or_unsigned_type(bool unsignedp,tree type)2200 gimple_signed_or_unsigned_type (bool unsignedp, tree type)
2201 {
2202   tree type1;
2203   int i;
2204 
2205   type1 = TYPE_MAIN_VARIANT (type);
2206   if (type1 == signed_char_type_node
2207       || type1 == char_type_node
2208       || type1 == unsigned_char_type_node)
2209     return unsignedp ? unsigned_char_type_node : signed_char_type_node;
2210   if (type1 == integer_type_node || type1 == unsigned_type_node)
2211     return unsignedp ? unsigned_type_node : integer_type_node;
2212   if (type1 == short_integer_type_node || type1 == short_unsigned_type_node)
2213     return unsignedp ? short_unsigned_type_node : short_integer_type_node;
2214   if (type1 == long_integer_type_node || type1 == long_unsigned_type_node)
2215     return unsignedp ? long_unsigned_type_node : long_integer_type_node;
2216   if (type1 == long_long_integer_type_node
2217       || type1 == long_long_unsigned_type_node)
2218     return unsignedp
2219            ? long_long_unsigned_type_node
2220 	   : long_long_integer_type_node;
2221 
2222   for (i = 0; i < NUM_INT_N_ENTS; i ++)
2223     if (int_n_enabled_p[i]
2224 	&& (type1 == int_n_trees[i].unsigned_type
2225 	    || type1 == int_n_trees[i].signed_type))
2226 	return unsignedp
2227 	  ? int_n_trees[i].unsigned_type
2228 	  : int_n_trees[i].signed_type;
2229 
2230 #if HOST_BITS_PER_WIDE_INT >= 64
2231   if (type1 == intTI_type_node || type1 == unsigned_intTI_type_node)
2232     return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
2233 #endif
2234   if (type1 == intDI_type_node || type1 == unsigned_intDI_type_node)
2235     return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
2236   if (type1 == intSI_type_node || type1 == unsigned_intSI_type_node)
2237     return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
2238   if (type1 == intHI_type_node || type1 == unsigned_intHI_type_node)
2239     return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
2240   if (type1 == intQI_type_node || type1 == unsigned_intQI_type_node)
2241     return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
2242 
2243 #define GIMPLE_FIXED_TYPES(NAME)	    \
2244   if (type1 == short_ ## NAME ## _type_node \
2245       || type1 == unsigned_short_ ## NAME ## _type_node) \
2246     return unsignedp ? unsigned_short_ ## NAME ## _type_node \
2247 		     : short_ ## NAME ## _type_node; \
2248   if (type1 == NAME ## _type_node \
2249       || type1 == unsigned_ ## NAME ## _type_node) \
2250     return unsignedp ? unsigned_ ## NAME ## _type_node \
2251 		     : NAME ## _type_node; \
2252   if (type1 == long_ ## NAME ## _type_node \
2253       || type1 == unsigned_long_ ## NAME ## _type_node) \
2254     return unsignedp ? unsigned_long_ ## NAME ## _type_node \
2255 		     : long_ ## NAME ## _type_node; \
2256   if (type1 == long_long_ ## NAME ## _type_node \
2257       || type1 == unsigned_long_long_ ## NAME ## _type_node) \
2258     return unsignedp ? unsigned_long_long_ ## NAME ## _type_node \
2259 		     : long_long_ ## NAME ## _type_node;
2260 
2261 #define GIMPLE_FIXED_MODE_TYPES(NAME) \
2262   if (type1 == NAME ## _type_node \
2263       || type1 == u ## NAME ## _type_node) \
2264     return unsignedp ? u ## NAME ## _type_node \
2265 		     : NAME ## _type_node;
2266 
2267 #define GIMPLE_FIXED_TYPES_SAT(NAME) \
2268   if (type1 == sat_ ## short_ ## NAME ## _type_node \
2269       || type1 == sat_ ## unsigned_short_ ## NAME ## _type_node) \
2270     return unsignedp ? sat_ ## unsigned_short_ ## NAME ## _type_node \
2271 		     : sat_ ## short_ ## NAME ## _type_node; \
2272   if (type1 == sat_ ## NAME ## _type_node \
2273       || type1 == sat_ ## unsigned_ ## NAME ## _type_node) \
2274     return unsignedp ? sat_ ## unsigned_ ## NAME ## _type_node \
2275 		     : sat_ ## NAME ## _type_node; \
2276   if (type1 == sat_ ## long_ ## NAME ## _type_node \
2277       || type1 == sat_ ## unsigned_long_ ## NAME ## _type_node) \
2278     return unsignedp ? sat_ ## unsigned_long_ ## NAME ## _type_node \
2279 		     : sat_ ## long_ ## NAME ## _type_node; \
2280   if (type1 == sat_ ## long_long_ ## NAME ## _type_node \
2281       || type1 == sat_ ## unsigned_long_long_ ## NAME ## _type_node) \
2282     return unsignedp ? sat_ ## unsigned_long_long_ ## NAME ## _type_node \
2283 		     : sat_ ## long_long_ ## NAME ## _type_node;
2284 
2285 #define GIMPLE_FIXED_MODE_TYPES_SAT(NAME)	\
2286   if (type1 == sat_ ## NAME ## _type_node \
2287       || type1 == sat_ ## u ## NAME ## _type_node) \
2288     return unsignedp ? sat_ ## u ## NAME ## _type_node \
2289 		     : sat_ ## NAME ## _type_node;
2290 
2291   GIMPLE_FIXED_TYPES (fract);
2292   GIMPLE_FIXED_TYPES_SAT (fract);
2293   GIMPLE_FIXED_TYPES (accum);
2294   GIMPLE_FIXED_TYPES_SAT (accum);
2295 
2296   GIMPLE_FIXED_MODE_TYPES (qq);
2297   GIMPLE_FIXED_MODE_TYPES (hq);
2298   GIMPLE_FIXED_MODE_TYPES (sq);
2299   GIMPLE_FIXED_MODE_TYPES (dq);
2300   GIMPLE_FIXED_MODE_TYPES (tq);
2301   GIMPLE_FIXED_MODE_TYPES_SAT (qq);
2302   GIMPLE_FIXED_MODE_TYPES_SAT (hq);
2303   GIMPLE_FIXED_MODE_TYPES_SAT (sq);
2304   GIMPLE_FIXED_MODE_TYPES_SAT (dq);
2305   GIMPLE_FIXED_MODE_TYPES_SAT (tq);
2306   GIMPLE_FIXED_MODE_TYPES (ha);
2307   GIMPLE_FIXED_MODE_TYPES (sa);
2308   GIMPLE_FIXED_MODE_TYPES (da);
2309   GIMPLE_FIXED_MODE_TYPES (ta);
2310   GIMPLE_FIXED_MODE_TYPES_SAT (ha);
2311   GIMPLE_FIXED_MODE_TYPES_SAT (sa);
2312   GIMPLE_FIXED_MODE_TYPES_SAT (da);
2313   GIMPLE_FIXED_MODE_TYPES_SAT (ta);
2314 
2315   /* For ENUMERAL_TYPEs in C++, must check the mode of the types, not
2316      the precision; they have precision set to match their range, but
2317      may use a wider mode to match an ABI.  If we change modes, we may
2318      wind up with bad conversions.  For INTEGER_TYPEs in C, must check
2319      the precision as well, so as to yield correct results for
2320      bit-field types.  C++ does not have these separate bit-field
2321      types, and producing a signed or unsigned variant of an
2322      ENUMERAL_TYPE may cause other problems as well.  */
2323   if (!INTEGRAL_TYPE_P (type)
2324       || TYPE_UNSIGNED (type) == unsignedp)
2325     return type;
2326 
2327 #define TYPE_OK(node)							    \
2328   (TYPE_MODE (type) == TYPE_MODE (node)					    \
2329    && TYPE_PRECISION (type) == TYPE_PRECISION (node))
2330   if (TYPE_OK (signed_char_type_node))
2331     return unsignedp ? unsigned_char_type_node : signed_char_type_node;
2332   if (TYPE_OK (integer_type_node))
2333     return unsignedp ? unsigned_type_node : integer_type_node;
2334   if (TYPE_OK (short_integer_type_node))
2335     return unsignedp ? short_unsigned_type_node : short_integer_type_node;
2336   if (TYPE_OK (long_integer_type_node))
2337     return unsignedp ? long_unsigned_type_node : long_integer_type_node;
2338   if (TYPE_OK (long_long_integer_type_node))
2339     return (unsignedp
2340 	    ? long_long_unsigned_type_node
2341 	    : long_long_integer_type_node);
2342 
2343   for (i = 0; i < NUM_INT_N_ENTS; i ++)
2344     if (int_n_enabled_p[i]
2345 	&& TYPE_MODE (type) == int_n_data[i].m
2346 	&& TYPE_PRECISION (type) == int_n_data[i].bitsize)
2347 	return unsignedp
2348 	  ? int_n_trees[i].unsigned_type
2349 	  : int_n_trees[i].signed_type;
2350 
2351 #if HOST_BITS_PER_WIDE_INT >= 64
2352   if (TYPE_OK (intTI_type_node))
2353     return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
2354 #endif
2355   if (TYPE_OK (intDI_type_node))
2356     return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
2357   if (TYPE_OK (intSI_type_node))
2358     return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
2359   if (TYPE_OK (intHI_type_node))
2360     return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
2361   if (TYPE_OK (intQI_type_node))
2362     return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
2363 
2364 #undef GIMPLE_FIXED_TYPES
2365 #undef GIMPLE_FIXED_MODE_TYPES
2366 #undef GIMPLE_FIXED_TYPES_SAT
2367 #undef GIMPLE_FIXED_MODE_TYPES_SAT
2368 #undef TYPE_OK
2369 
2370   return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
2371 }
2372 
2373 
2374 /* Return an unsigned type the same as TYPE in other respects.  */
2375 
2376 tree
gimple_unsigned_type(tree type)2377 gimple_unsigned_type (tree type)
2378 {
2379   return gimple_signed_or_unsigned_type (true, type);
2380 }
2381 
2382 
2383 /* Return a signed type the same as TYPE in other respects.  */
2384 
2385 tree
gimple_signed_type(tree type)2386 gimple_signed_type (tree type)
2387 {
2388   return gimple_signed_or_unsigned_type (false, type);
2389 }
2390 
2391 
2392 /* Return the typed-based alias set for T, which may be an expression
2393    or a type.  Return -1 if we don't do anything special.  */
2394 
2395 alias_set_type
gimple_get_alias_set(tree t)2396 gimple_get_alias_set (tree t)
2397 {
2398   tree u;
2399 
2400   /* Permit type-punning when accessing a union, provided the access
2401      is directly through the union.  For example, this code does not
2402      permit taking the address of a union member and then storing
2403      through it.  Even the type-punning allowed here is a GCC
2404      extension, albeit a common and useful one; the C standard says
2405      that such accesses have implementation-defined behavior.  */
2406   for (u = t;
2407        TREE_CODE (u) == COMPONENT_REF || TREE_CODE (u) == ARRAY_REF;
2408        u = TREE_OPERAND (u, 0))
2409     if (TREE_CODE (u) == COMPONENT_REF
2410 	&& TREE_CODE (TREE_TYPE (TREE_OPERAND (u, 0))) == UNION_TYPE)
2411       return 0;
2412 
2413   /* That's all the expressions we handle specially.  */
2414   if (!TYPE_P (t))
2415     return -1;
2416 
2417   /* For convenience, follow the C standard when dealing with
2418      character types.  Any object may be accessed via an lvalue that
2419      has character type.  */
2420   if (t == char_type_node
2421       || t == signed_char_type_node
2422       || t == unsigned_char_type_node)
2423     return 0;
2424 
2425   /* Allow aliasing between signed and unsigned variants of the same
2426      type.  We treat the signed variant as canonical.  */
2427   if (TREE_CODE (t) == INTEGER_TYPE && TYPE_UNSIGNED (t))
2428     {
2429       tree t1 = gimple_signed_type (t);
2430 
2431       /* t1 == t can happen for boolean nodes which are always unsigned.  */
2432       if (t1 != t)
2433 	return get_alias_set (t1);
2434     }
2435 
2436   return -1;
2437 }
2438 
2439 
2440 /* Helper for gimple_ior_addresses_taken_1.  */
2441 
2442 static bool
gimple_ior_addresses_taken_1(gimple *,tree addr,tree,void * data)2443 gimple_ior_addresses_taken_1 (gimple *, tree addr, tree, void *data)
2444 {
2445   bitmap addresses_taken = (bitmap)data;
2446   addr = get_base_address (addr);
2447   if (addr
2448       && DECL_P (addr))
2449     {
2450       bitmap_set_bit (addresses_taken, DECL_UID (addr));
2451       return true;
2452     }
2453   return false;
2454 }
2455 
2456 /* Set the bit for the uid of all decls that have their address taken
2457    in STMT in the ADDRESSES_TAKEN bitmap.  Returns true if there
2458    were any in this stmt.  */
2459 
2460 bool
gimple_ior_addresses_taken(bitmap addresses_taken,gimple * stmt)2461 gimple_ior_addresses_taken (bitmap addresses_taken, gimple *stmt)
2462 {
2463   return walk_stmt_load_store_addr_ops (stmt, addresses_taken, NULL, NULL,
2464 					gimple_ior_addresses_taken_1);
2465 }
2466 
2467 
2468 /* Return true when STMTs arguments and return value match those of FNDECL,
2469    a decl of a builtin function.  */
2470 
2471 bool
gimple_builtin_call_types_compatible_p(const gimple * stmt,tree fndecl)2472 gimple_builtin_call_types_compatible_p (const gimple *stmt, tree fndecl)
2473 {
2474   gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) != NOT_BUILT_IN);
2475 
2476   tree ret = gimple_call_lhs (stmt);
2477   if (ret
2478       && !useless_type_conversion_p (TREE_TYPE (ret),
2479 				     TREE_TYPE (TREE_TYPE (fndecl))))
2480     return false;
2481 
2482   tree targs = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
2483   unsigned nargs = gimple_call_num_args (stmt);
2484   for (unsigned i = 0; i < nargs; ++i)
2485     {
2486       /* Variadic args follow.  */
2487       if (!targs)
2488 	return true;
2489       tree arg = gimple_call_arg (stmt, i);
2490       tree type = TREE_VALUE (targs);
2491       if (!useless_type_conversion_p (type, TREE_TYPE (arg))
2492 	  /* char/short integral arguments are promoted to int
2493 	     by several frontends if targetm.calls.promote_prototypes
2494 	     is true.  Allow such promotion too.  */
2495 	  && !(INTEGRAL_TYPE_P (type)
2496 	       && TYPE_PRECISION (type) < TYPE_PRECISION (integer_type_node)
2497 	       && targetm.calls.promote_prototypes (TREE_TYPE (fndecl))
2498 	       && useless_type_conversion_p (integer_type_node,
2499 					     TREE_TYPE (arg))))
2500 	return false;
2501       targs = TREE_CHAIN (targs);
2502     }
2503   if (targs && !VOID_TYPE_P (TREE_VALUE (targs)))
2504     return false;
2505   return true;
2506 }
2507 
2508 /* Return true when STMT is builtins call.  */
2509 
2510 bool
gimple_call_builtin_p(const gimple * stmt)2511 gimple_call_builtin_p (const gimple *stmt)
2512 {
2513   tree fndecl;
2514   if (is_gimple_call (stmt)
2515       && (fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
2516       && DECL_BUILT_IN_CLASS (fndecl) != NOT_BUILT_IN)
2517     return gimple_builtin_call_types_compatible_p (stmt, fndecl);
2518   return false;
2519 }
2520 
2521 /* Return true when STMT is builtins call to CLASS.  */
2522 
2523 bool
gimple_call_builtin_p(const gimple * stmt,enum built_in_class klass)2524 gimple_call_builtin_p (const gimple *stmt, enum built_in_class klass)
2525 {
2526   tree fndecl;
2527   if (is_gimple_call (stmt)
2528       && (fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
2529       && DECL_BUILT_IN_CLASS (fndecl) == klass)
2530     return gimple_builtin_call_types_compatible_p (stmt, fndecl);
2531   return false;
2532 }
2533 
2534 /* Return true when STMT is builtins call to CODE of CLASS.  */
2535 
2536 bool
gimple_call_builtin_p(const gimple * stmt,enum built_in_function code)2537 gimple_call_builtin_p (const gimple *stmt, enum built_in_function code)
2538 {
2539   tree fndecl;
2540   if (is_gimple_call (stmt)
2541       && (fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
2542       && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2543       && DECL_FUNCTION_CODE (fndecl) == code)
2544     return gimple_builtin_call_types_compatible_p (stmt, fndecl);
2545   return false;
2546 }
2547 
2548 /* If CALL is a call to a combined_fn (i.e. an internal function or
2549    a normal built-in function), return its code, otherwise return
2550    CFN_LAST.  */
2551 
2552 combined_fn
gimple_call_combined_fn(const gimple * stmt)2553 gimple_call_combined_fn (const gimple *stmt)
2554 {
2555   if (const gcall *call = dyn_cast <const gcall *> (stmt))
2556     {
2557       if (gimple_call_internal_p (call))
2558 	return as_combined_fn (gimple_call_internal_fn (call));
2559 
2560       tree fndecl = gimple_call_fndecl (stmt);
2561       if (fndecl
2562 	  && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2563 	  && gimple_builtin_call_types_compatible_p (stmt, fndecl))
2564 	return as_combined_fn (DECL_FUNCTION_CODE (fndecl));
2565     }
2566   return CFN_LAST;
2567 }
2568 
2569 /* Return true if STMT clobbers memory.  STMT is required to be a
2570    GIMPLE_ASM.  */
2571 
2572 bool
gimple_asm_clobbers_memory_p(const gasm * stmt)2573 gimple_asm_clobbers_memory_p (const gasm *stmt)
2574 {
2575   unsigned i;
2576 
2577   for (i = 0; i < gimple_asm_nclobbers (stmt); i++)
2578     {
2579       tree op = gimple_asm_clobber_op (stmt, i);
2580       if (strcmp (TREE_STRING_POINTER (TREE_VALUE (op)), "memory") == 0)
2581 	return true;
2582     }
2583 
2584   return false;
2585 }
2586 
2587 /* Dump bitmap SET (assumed to contain VAR_DECLs) to FILE.  */
2588 
2589 void
dump_decl_set(FILE * file,bitmap set)2590 dump_decl_set (FILE *file, bitmap set)
2591 {
2592   if (set)
2593     {
2594       bitmap_iterator bi;
2595       unsigned i;
2596 
2597       fprintf (file, "{ ");
2598 
2599       EXECUTE_IF_SET_IN_BITMAP (set, 0, i, bi)
2600 	{
2601 	  fprintf (file, "D.%u", i);
2602 	  fprintf (file, " ");
2603 	}
2604 
2605       fprintf (file, "}");
2606     }
2607   else
2608     fprintf (file, "NIL");
2609 }
2610 
2611 /* Return true when CALL is a call stmt that definitely doesn't
2612    free any memory or makes it unavailable otherwise.  */
2613 bool
nonfreeing_call_p(gimple * call)2614 nonfreeing_call_p (gimple *call)
2615 {
2616   if (gimple_call_builtin_p (call, BUILT_IN_NORMAL)
2617       && gimple_call_flags (call) & ECF_LEAF)
2618     switch (DECL_FUNCTION_CODE (gimple_call_fndecl (call)))
2619       {
2620 	/* Just in case these become ECF_LEAF in the future.  */
2621 	case BUILT_IN_FREE:
2622 	case BUILT_IN_TM_FREE:
2623 	case BUILT_IN_REALLOC:
2624 	case BUILT_IN_STACK_RESTORE:
2625 	  return false;
2626 	default:
2627 	  return true;
2628       }
2629   else if (gimple_call_internal_p (call))
2630     switch (gimple_call_internal_fn (call))
2631       {
2632       case IFN_ABNORMAL_DISPATCHER:
2633         return true;
2634       default:
2635 	if (gimple_call_flags (call) & ECF_LEAF)
2636 	  return true;
2637 	return false;
2638       }
2639 
2640   tree fndecl = gimple_call_fndecl (call);
2641   if (!fndecl)
2642     return false;
2643   struct cgraph_node *n = cgraph_node::get (fndecl);
2644   if (!n)
2645     return false;
2646   enum availability availability;
2647   n = n->function_symbol (&availability);
2648   if (!n || availability <= AVAIL_INTERPOSABLE)
2649     return false;
2650   return n->nonfreeing_fn;
2651 }
2652 
2653 /* Return true when CALL is a call stmt that definitely need not
2654    be considered to be a memory barrier.  */
2655 bool
nonbarrier_call_p(gimple * call)2656 nonbarrier_call_p (gimple *call)
2657 {
2658   if (gimple_call_flags (call) & (ECF_PURE | ECF_CONST))
2659     return true;
2660   /* Should extend this to have a nonbarrier_fn flag, just as above in
2661      the nonfreeing case.  */
2662   return false;
2663 }
2664 
2665 /* Callback for walk_stmt_load_store_ops.
2666 
2667    Return TRUE if OP will dereference the tree stored in DATA, FALSE
2668    otherwise.
2669 
2670    This routine only makes a superficial check for a dereference.  Thus
2671    it must only be used if it is safe to return a false negative.  */
2672 static bool
check_loadstore(gimple *,tree op,tree,void * data)2673 check_loadstore (gimple *, tree op, tree, void *data)
2674 {
2675   if (TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
2676     {
2677       /* Some address spaces may legitimately dereference zero.  */
2678       addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (op));
2679       if (targetm.addr_space.zero_address_valid (as))
2680 	return false;
2681 
2682       return operand_equal_p (TREE_OPERAND (op, 0), (tree)data, 0);
2683     }
2684   return false;
2685 }
2686 
2687 
2688 /* Return true if OP can be inferred to be non-NULL after STMT executes,
2689    either by using a pointer dereference or attributes.  */
2690 bool
infer_nonnull_range(gimple * stmt,tree op)2691 infer_nonnull_range (gimple *stmt, tree op)
2692 {
2693   return infer_nonnull_range_by_dereference (stmt, op)
2694     || infer_nonnull_range_by_attribute (stmt, op);
2695 }
2696 
2697 /* Return true if OP can be inferred to be non-NULL after STMT
2698    executes by using a pointer dereference.  */
2699 bool
infer_nonnull_range_by_dereference(gimple * stmt,tree op)2700 infer_nonnull_range_by_dereference (gimple *stmt, tree op)
2701 {
2702   /* We can only assume that a pointer dereference will yield
2703      non-NULL if -fdelete-null-pointer-checks is enabled.  */
2704   if (!flag_delete_null_pointer_checks
2705       || !POINTER_TYPE_P (TREE_TYPE (op))
2706       || gimple_code (stmt) == GIMPLE_ASM)
2707     return false;
2708 
2709   if (walk_stmt_load_store_ops (stmt, (void *)op,
2710 				check_loadstore, check_loadstore))
2711     return true;
2712 
2713   return false;
2714 }
2715 
2716 /* Return true if OP can be inferred to be a non-NULL after STMT
2717    executes by using attributes.  */
2718 bool
infer_nonnull_range_by_attribute(gimple * stmt,tree op)2719 infer_nonnull_range_by_attribute (gimple *stmt, tree op)
2720 {
2721   /* We can only assume that a pointer dereference will yield
2722      non-NULL if -fdelete-null-pointer-checks is enabled.  */
2723   if (!flag_delete_null_pointer_checks
2724       || !POINTER_TYPE_P (TREE_TYPE (op))
2725       || gimple_code (stmt) == GIMPLE_ASM)
2726     return false;
2727 
2728   if (is_gimple_call (stmt) && !gimple_call_internal_p (stmt))
2729     {
2730       tree fntype = gimple_call_fntype (stmt);
2731       tree attrs = TYPE_ATTRIBUTES (fntype);
2732       for (; attrs; attrs = TREE_CHAIN (attrs))
2733 	{
2734 	  attrs = lookup_attribute ("nonnull", attrs);
2735 
2736 	  /* If "nonnull" wasn't specified, we know nothing about
2737 	     the argument.  */
2738 	  if (attrs == NULL_TREE)
2739 	    return false;
2740 
2741 	  /* If "nonnull" applies to all the arguments, then ARG
2742 	     is non-null if it's in the argument list.  */
2743 	  if (TREE_VALUE (attrs) == NULL_TREE)
2744 	    {
2745 	      for (unsigned int i = 0; i < gimple_call_num_args (stmt); i++)
2746 		{
2747 		  if (POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (stmt, i)))
2748 		      && operand_equal_p (op, gimple_call_arg (stmt, i), 0))
2749 		    return true;
2750 		}
2751 	      return false;
2752 	    }
2753 
2754 	  /* Now see if op appears in the nonnull list.  */
2755 	  for (tree t = TREE_VALUE (attrs); t; t = TREE_CHAIN (t))
2756 	    {
2757 	      unsigned int idx = TREE_INT_CST_LOW (TREE_VALUE (t)) - 1;
2758 	      if (idx < gimple_call_num_args (stmt))
2759 		{
2760 		  tree arg = gimple_call_arg (stmt, idx);
2761 		  if (operand_equal_p (op, arg, 0))
2762 		    return true;
2763 		}
2764 	    }
2765 	}
2766     }
2767 
2768   /* If this function is marked as returning non-null, then we can
2769      infer OP is non-null if it is used in the return statement.  */
2770   if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
2771     if (gimple_return_retval (return_stmt)
2772 	&& operand_equal_p (gimple_return_retval (return_stmt), op, 0)
2773 	&& lookup_attribute ("returns_nonnull",
2774 			     TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
2775       return true;
2776 
2777   return false;
2778 }
2779 
2780 /* Compare two case labels.  Because the front end should already have
2781    made sure that case ranges do not overlap, it is enough to only compare
2782    the CASE_LOW values of each case label.  */
2783 
2784 static int
compare_case_labels(const void * p1,const void * p2)2785 compare_case_labels (const void *p1, const void *p2)
2786 {
2787   const_tree const case1 = *(const_tree const*)p1;
2788   const_tree const case2 = *(const_tree const*)p2;
2789 
2790   /* The 'default' case label always goes first.  */
2791   if (!CASE_LOW (case1))
2792     return -1;
2793   else if (!CASE_LOW (case2))
2794     return 1;
2795   else
2796     return tree_int_cst_compare (CASE_LOW (case1), CASE_LOW (case2));
2797 }
2798 
2799 /* Sort the case labels in LABEL_VEC in place in ascending order.  */
2800 
2801 void
sort_case_labels(vec<tree> label_vec)2802 sort_case_labels (vec<tree> label_vec)
2803 {
2804   label_vec.qsort (compare_case_labels);
2805 }
2806 
2807 /* Prepare a vector of case labels to be used in a GIMPLE_SWITCH statement.
2808 
2809    LABELS is a vector that contains all case labels to look at.
2810 
2811    INDEX_TYPE is the type of the switch index expression.  Case labels
2812    in LABELS are discarded if their values are not in the value range
2813    covered by INDEX_TYPE.  The remaining case label values are folded
2814    to INDEX_TYPE.
2815 
2816    If a default case exists in LABELS, it is removed from LABELS and
2817    returned in DEFAULT_CASEP.  If no default case exists, but the
2818    case labels already cover the whole range of INDEX_TYPE, a default
2819    case is returned pointing to one of the existing case labels.
2820    Otherwise DEFAULT_CASEP is set to NULL_TREE.
2821 
2822    DEFAULT_CASEP may be NULL, in which case the above comment doesn't
2823    apply and no action is taken regardless of whether a default case is
2824    found or not.  */
2825 
2826 void
preprocess_case_label_vec_for_gimple(vec<tree> labels,tree index_type,tree * default_casep)2827 preprocess_case_label_vec_for_gimple (vec<tree> labels,
2828 				      tree index_type,
2829 				      tree *default_casep)
2830 {
2831   tree min_value, max_value;
2832   tree default_case = NULL_TREE;
2833   size_t i, len;
2834 
2835   i = 0;
2836   min_value = TYPE_MIN_VALUE (index_type);
2837   max_value = TYPE_MAX_VALUE (index_type);
2838   while (i < labels.length ())
2839     {
2840       tree elt = labels[i];
2841       tree low = CASE_LOW (elt);
2842       tree high = CASE_HIGH (elt);
2843       bool remove_element = FALSE;
2844 
2845       if (low)
2846 	{
2847 	  gcc_checking_assert (TREE_CODE (low) == INTEGER_CST);
2848 	  gcc_checking_assert (!high || TREE_CODE (high) == INTEGER_CST);
2849 
2850 	  /* This is a non-default case label, i.e. it has a value.
2851 
2852 	     See if the case label is reachable within the range of
2853 	     the index type.  Remove out-of-range case values.  Turn
2854 	     case ranges into a canonical form (high > low strictly)
2855 	     and convert the case label values to the index type.
2856 
2857 	     NB: The type of gimple_switch_index() may be the promoted
2858 	     type, but the case labels retain the original type.  */
2859 
2860 	  if (high)
2861 	    {
2862 	      /* This is a case range.  Discard empty ranges.
2863 		 If the bounds or the range are equal, turn this
2864 		 into a simple (one-value) case.  */
2865 	      int cmp = tree_int_cst_compare (high, low);
2866 	      if (cmp < 0)
2867 		remove_element = TRUE;
2868 	      else if (cmp == 0)
2869 		high = NULL_TREE;
2870 	    }
2871 
2872 	  if (! high)
2873 	    {
2874 	      /* If the simple case value is unreachable, ignore it.  */
2875 	      if ((TREE_CODE (min_value) == INTEGER_CST
2876 		   && tree_int_cst_compare (low, min_value) < 0)
2877 		  || (TREE_CODE (max_value) == INTEGER_CST
2878 		      && tree_int_cst_compare (low, max_value) > 0))
2879 		remove_element = TRUE;
2880 	      else
2881 		low = fold_convert (index_type, low);
2882 	    }
2883 	  else
2884 	    {
2885 	      /* If the entire case range is unreachable, ignore it.  */
2886 	      if ((TREE_CODE (min_value) == INTEGER_CST
2887 		   && tree_int_cst_compare (high, min_value) < 0)
2888 		  || (TREE_CODE (max_value) == INTEGER_CST
2889 		      && tree_int_cst_compare (low, max_value) > 0))
2890 		remove_element = TRUE;
2891 	      else
2892 		{
2893 		  /* If the lower bound is less than the index type's
2894 		     minimum value, truncate the range bounds.  */
2895 		  if (TREE_CODE (min_value) == INTEGER_CST
2896 		      && tree_int_cst_compare (low, min_value) < 0)
2897 		    low = min_value;
2898 		  low = fold_convert (index_type, low);
2899 
2900 		  /* If the upper bound is greater than the index type's
2901 		     maximum value, truncate the range bounds.  */
2902 		  if (TREE_CODE (max_value) == INTEGER_CST
2903 		      && tree_int_cst_compare (high, max_value) > 0)
2904 		    high = max_value;
2905 		  high = fold_convert (index_type, high);
2906 
2907 		  /* We may have folded a case range to a one-value case.  */
2908 		  if (tree_int_cst_equal (low, high))
2909 		    high = NULL_TREE;
2910 		}
2911 	    }
2912 
2913 	  CASE_LOW (elt) = low;
2914 	  CASE_HIGH (elt) = high;
2915 	}
2916       else
2917 	{
2918 	  gcc_assert (!default_case);
2919 	  default_case = elt;
2920 	  /* The default case must be passed separately to the
2921 	     gimple_build_switch routine.  But if DEFAULT_CASEP
2922 	     is NULL, we do not remove the default case (it would
2923 	     be completely lost).  */
2924 	  if (default_casep)
2925 	    remove_element = TRUE;
2926 	}
2927 
2928       if (remove_element)
2929 	labels.ordered_remove (i);
2930       else
2931 	i++;
2932     }
2933   len = i;
2934 
2935   if (!labels.is_empty ())
2936     sort_case_labels (labels);
2937 
2938   if (default_casep && !default_case)
2939     {
2940       /* If the switch has no default label, add one, so that we jump
2941 	 around the switch body.  If the labels already cover the whole
2942 	 range of the switch index_type, add the default label pointing
2943 	 to one of the existing labels.  */
2944       if (len
2945 	  && TYPE_MIN_VALUE (index_type)
2946 	  && TYPE_MAX_VALUE (index_type)
2947 	  && tree_int_cst_equal (CASE_LOW (labels[0]),
2948 				 TYPE_MIN_VALUE (index_type)))
2949 	{
2950 	  tree low, high = CASE_HIGH (labels[len - 1]);
2951 	  if (!high)
2952 	    high = CASE_LOW (labels[len - 1]);
2953 	  if (tree_int_cst_equal (high, TYPE_MAX_VALUE (index_type)))
2954 	    {
2955 	      for (i = 1; i < len; i++)
2956 		{
2957 		  high = CASE_LOW (labels[i]);
2958 		  low = CASE_HIGH (labels[i - 1]);
2959 		  if (!low)
2960 		    low = CASE_LOW (labels[i - 1]);
2961 		  if (wi::add (low, 1) != high)
2962 		    break;
2963 		}
2964 	      if (i == len)
2965 		{
2966 		  tree label = CASE_LABEL (labels[0]);
2967 		  default_case = build_case_label (NULL_TREE, NULL_TREE,
2968 						   label);
2969 		}
2970 	    }
2971 	}
2972     }
2973 
2974   if (default_casep)
2975     *default_casep = default_case;
2976 }
2977 
2978 /* Set the location of all statements in SEQ to LOC.  */
2979 
2980 void
gimple_seq_set_location(gimple_seq seq,location_t loc)2981 gimple_seq_set_location (gimple_seq seq, location_t loc)
2982 {
2983   for (gimple_stmt_iterator i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
2984     gimple_set_location (gsi_stmt (i), loc);
2985 }
2986 
2987 /* Release SSA_NAMEs in SEQ as well as the GIMPLE statements.  */
2988 
2989 void
gimple_seq_discard(gimple_seq seq)2990 gimple_seq_discard (gimple_seq seq)
2991 {
2992   gimple_stmt_iterator gsi;
2993 
2994   for (gsi = gsi_start (seq); !gsi_end_p (gsi); )
2995     {
2996       gimple *stmt = gsi_stmt (gsi);
2997       gsi_remove (&gsi, true);
2998       release_defs (stmt);
2999       ggc_free (stmt);
3000     }
3001 }
3002 
3003 /* See if STMT now calls function that takes no parameters and if so, drop
3004    call arguments.  This is used when devirtualization machinery redirects
3005    to __builtiln_unreacahble or __cxa_pure_virutal.  */
3006 
3007 void
maybe_remove_unused_call_args(struct function * fn,gimple * stmt)3008 maybe_remove_unused_call_args (struct function *fn, gimple *stmt)
3009 {
3010   tree decl = gimple_call_fndecl (stmt);
3011   if (TYPE_ARG_TYPES (TREE_TYPE (decl))
3012       && TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl))) == void_type_node
3013       && gimple_call_num_args (stmt))
3014     {
3015       gimple_set_num_ops (stmt, 3);
3016       update_stmt_fn (fn, stmt);
3017     }
3018 }
3019