xref: /dragonfly/contrib/gcc-8.0/gcc/gimple.c (revision 38fd1498)
1 /* Gimple IR support functions.
2 
3    Copyright (C) 2007-2018 Free Software Foundation, Inc.
4    Contributed by Aldy Hernandez <aldyh@redhat.com>
5 
6 This file is part of GCC.
7 
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12 
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16 for more details.
17 
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3.  If not see
20 <http://www.gnu.org/licenses/>.  */
21 
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "backend.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "ssa.h"
29 #include "cgraph.h"
30 #include "diagnostic.h"
31 #include "alias.h"
32 #include "fold-const.h"
33 #include "calls.h"
34 #include "stor-layout.h"
35 #include "internal-fn.h"
36 #include "tree-eh.h"
37 #include "gimple-iterator.h"
38 #include "gimple-walk.h"
39 #include "gimplify.h"
40 #include "target.h"
41 #include "builtins.h"
42 #include "selftest.h"
43 #include "gimple-pretty-print.h"
44 #include "stringpool.h"
45 #include "attribs.h"
46 #include "asan.h"
47 
48 
49 /* All the tuples have their operand vector (if present) at the very bottom
50    of the structure.  Therefore, the offset required to find the
51    operands vector the size of the structure minus the size of the 1
52    element tree array at the end (see gimple_ops).  */
53 #define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) \
54 	(HAS_TREE_OP ? sizeof (struct STRUCT) - sizeof (tree) : 0),
55 EXPORTED_CONST size_t gimple_ops_offset_[] = {
56 #include "gsstruct.def"
57 };
58 #undef DEFGSSTRUCT
59 
60 #define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) sizeof (struct STRUCT),
61 static const size_t gsstruct_code_size[] = {
62 #include "gsstruct.def"
63 };
64 #undef DEFGSSTRUCT
65 
66 #define DEFGSCODE(SYM, NAME, GSSCODE)	NAME,
67 const char *const gimple_code_name[] = {
68 #include "gimple.def"
69 };
70 #undef DEFGSCODE
71 
72 #define DEFGSCODE(SYM, NAME, GSSCODE)	GSSCODE,
73 EXPORTED_CONST enum gimple_statement_structure_enum gss_for_code_[] = {
74 #include "gimple.def"
75 };
76 #undef DEFGSCODE
77 
78 /* Gimple stats.  */
79 
80 uint64_t gimple_alloc_counts[(int) gimple_alloc_kind_all];
81 uint64_t gimple_alloc_sizes[(int) gimple_alloc_kind_all];
82 
83 /* Keep in sync with gimple.h:enum gimple_alloc_kind.  */
84 static const char * const gimple_alloc_kind_names[] = {
85     "assignments",
86     "phi nodes",
87     "conditionals",
88     "everything else"
89 };
90 
91 /* Static gimple tuple members.  */
92 const enum gimple_code gassign::code_;
93 const enum gimple_code gcall::code_;
94 const enum gimple_code gcond::code_;
95 
96 
97 /* Gimple tuple constructors.
98    Note: Any constructor taking a ``gimple_seq'' as a parameter, can
99    be passed a NULL to start with an empty sequence.  */
100 
101 /* Set the code for statement G to CODE.  */
102 
103 static inline void
gimple_set_code(gimple * g,enum gimple_code code)104 gimple_set_code (gimple *g, enum gimple_code code)
105 {
106   g->code = code;
107 }
108 
109 /* Return the number of bytes needed to hold a GIMPLE statement with
110    code CODE.  */
111 
112 static inline size_t
gimple_size(enum gimple_code code)113 gimple_size (enum gimple_code code)
114 {
115   return gsstruct_code_size[gss_for_code (code)];
116 }
117 
118 /* Allocate memory for a GIMPLE statement with code CODE and NUM_OPS
119    operands.  */
120 
121 gimple *
gimple_alloc(enum gimple_code code,unsigned num_ops MEM_STAT_DECL)122 gimple_alloc (enum gimple_code code, unsigned num_ops MEM_STAT_DECL)
123 {
124   size_t size;
125   gimple *stmt;
126 
127   size = gimple_size (code);
128   if (num_ops > 0)
129     size += sizeof (tree) * (num_ops - 1);
130 
131   if (GATHER_STATISTICS)
132     {
133       enum gimple_alloc_kind kind = gimple_alloc_kind (code);
134       gimple_alloc_counts[(int) kind]++;
135       gimple_alloc_sizes[(int) kind] += size;
136     }
137 
138   stmt = ggc_alloc_cleared_gimple_statement_stat (size PASS_MEM_STAT);
139   gimple_set_code (stmt, code);
140   gimple_set_num_ops (stmt, num_ops);
141 
142   /* Do not call gimple_set_modified here as it has other side
143      effects and this tuple is still not completely built.  */
144   stmt->modified = 1;
145   gimple_init_singleton (stmt);
146 
147   return stmt;
148 }
149 
150 /* Set SUBCODE to be the code of the expression computed by statement G.  */
151 
152 static inline void
gimple_set_subcode(gimple * g,unsigned subcode)153 gimple_set_subcode (gimple *g, unsigned subcode)
154 {
155   /* We only have 16 bits for the RHS code.  Assert that we are not
156      overflowing it.  */
157   gcc_assert (subcode < (1 << 16));
158   g->subcode = subcode;
159 }
160 
161 
162 
163 /* Build a tuple with operands.  CODE is the statement to build (which
164    must be one of the GIMPLE_WITH_OPS tuples).  SUBCODE is the subcode
165    for the new tuple.  NUM_OPS is the number of operands to allocate.  */
166 
167 #define gimple_build_with_ops(c, s, n) \
168   gimple_build_with_ops_stat (c, s, n MEM_STAT_INFO)
169 
170 static gimple *
gimple_build_with_ops_stat(enum gimple_code code,unsigned subcode,unsigned num_ops MEM_STAT_DECL)171 gimple_build_with_ops_stat (enum gimple_code code, unsigned subcode,
172 		            unsigned num_ops MEM_STAT_DECL)
173 {
174   gimple *s = gimple_alloc (code, num_ops PASS_MEM_STAT);
175   gimple_set_subcode (s, subcode);
176 
177   return s;
178 }
179 
180 
181 /* Build a GIMPLE_RETURN statement returning RETVAL.  */
182 
183 greturn *
gimple_build_return(tree retval)184 gimple_build_return (tree retval)
185 {
186   greturn *s
187     = as_a <greturn *> (gimple_build_with_ops (GIMPLE_RETURN, ERROR_MARK,
188 					       2));
189   if (retval)
190     gimple_return_set_retval (s, retval);
191   return s;
192 }
193 
194 /* Reset alias information on call S.  */
195 
196 void
gimple_call_reset_alias_info(gcall * s)197 gimple_call_reset_alias_info (gcall *s)
198 {
199   if (gimple_call_flags (s) & ECF_CONST)
200     memset (gimple_call_use_set (s), 0, sizeof (struct pt_solution));
201   else
202     pt_solution_reset (gimple_call_use_set (s));
203   if (gimple_call_flags (s) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
204     memset (gimple_call_clobber_set (s), 0, sizeof (struct pt_solution));
205   else
206     pt_solution_reset (gimple_call_clobber_set (s));
207 }
208 
209 /* Helper for gimple_build_call, gimple_build_call_valist,
210    gimple_build_call_vec and gimple_build_call_from_tree.  Build the basic
211    components of a GIMPLE_CALL statement to function FN with NARGS
212    arguments.  */
213 
214 static inline gcall *
gimple_build_call_1(tree fn,unsigned nargs)215 gimple_build_call_1 (tree fn, unsigned nargs)
216 {
217   gcall *s
218     = as_a <gcall *> (gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK,
219 					     nargs + 3));
220   if (TREE_CODE (fn) == FUNCTION_DECL)
221     fn = build_fold_addr_expr (fn);
222   gimple_set_op (s, 1, fn);
223   gimple_call_set_fntype (s, TREE_TYPE (TREE_TYPE (fn)));
224   gimple_call_reset_alias_info (s);
225   return s;
226 }
227 
228 
229 /* Build a GIMPLE_CALL statement to function FN with the arguments
230    specified in vector ARGS.  */
231 
232 gcall *
gimple_build_call_vec(tree fn,vec<tree> args)233 gimple_build_call_vec (tree fn, vec<tree> args)
234 {
235   unsigned i;
236   unsigned nargs = args.length ();
237   gcall *call = gimple_build_call_1 (fn, nargs);
238 
239   for (i = 0; i < nargs; i++)
240     gimple_call_set_arg (call, i, args[i]);
241 
242   return call;
243 }
244 
245 
246 /* Build a GIMPLE_CALL statement to function FN.  NARGS is the number of
247    arguments.  The ... are the arguments.  */
248 
249 gcall *
gimple_build_call(tree fn,unsigned nargs,...)250 gimple_build_call (tree fn, unsigned nargs, ...)
251 {
252   va_list ap;
253   gcall *call;
254   unsigned i;
255 
256   gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn));
257 
258   call = gimple_build_call_1 (fn, nargs);
259 
260   va_start (ap, nargs);
261   for (i = 0; i < nargs; i++)
262     gimple_call_set_arg (call, i, va_arg (ap, tree));
263   va_end (ap);
264 
265   return call;
266 }
267 
268 
269 /* Build a GIMPLE_CALL statement to function FN.  NARGS is the number of
270    arguments.  AP contains the arguments.  */
271 
272 gcall *
gimple_build_call_valist(tree fn,unsigned nargs,va_list ap)273 gimple_build_call_valist (tree fn, unsigned nargs, va_list ap)
274 {
275   gcall *call;
276   unsigned i;
277 
278   gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn));
279 
280   call = gimple_build_call_1 (fn, nargs);
281 
282   for (i = 0; i < nargs; i++)
283     gimple_call_set_arg (call, i, va_arg (ap, tree));
284 
285   return call;
286 }
287 
288 
289 /* Helper for gimple_build_call_internal and gimple_build_call_internal_vec.
290    Build the basic components of a GIMPLE_CALL statement to internal
291    function FN with NARGS arguments.  */
292 
293 static inline gcall *
gimple_build_call_internal_1(enum internal_fn fn,unsigned nargs)294 gimple_build_call_internal_1 (enum internal_fn fn, unsigned nargs)
295 {
296   gcall *s
297     = as_a <gcall *> (gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK,
298 					     nargs + 3));
299   s->subcode |= GF_CALL_INTERNAL;
300   gimple_call_set_internal_fn (s, fn);
301   gimple_call_reset_alias_info (s);
302   return s;
303 }
304 
305 
306 /* Build a GIMPLE_CALL statement to internal function FN.  NARGS is
307    the number of arguments.  The ... are the arguments.  */
308 
309 gcall *
gimple_build_call_internal(enum internal_fn fn,unsigned nargs,...)310 gimple_build_call_internal (enum internal_fn fn, unsigned nargs, ...)
311 {
312   va_list ap;
313   gcall *call;
314   unsigned i;
315 
316   call = gimple_build_call_internal_1 (fn, nargs);
317   va_start (ap, nargs);
318   for (i = 0; i < nargs; i++)
319     gimple_call_set_arg (call, i, va_arg (ap, tree));
320   va_end (ap);
321 
322   return call;
323 }
324 
325 
326 /* Build a GIMPLE_CALL statement to internal function FN with the arguments
327    specified in vector ARGS.  */
328 
329 gcall *
gimple_build_call_internal_vec(enum internal_fn fn,vec<tree> args)330 gimple_build_call_internal_vec (enum internal_fn fn, vec<tree> args)
331 {
332   unsigned i, nargs;
333   gcall *call;
334 
335   nargs = args.length ();
336   call = gimple_build_call_internal_1 (fn, nargs);
337   for (i = 0; i < nargs; i++)
338     gimple_call_set_arg (call, i, args[i]);
339 
340   return call;
341 }
342 
343 
344 /* Build a GIMPLE_CALL statement from CALL_EXPR T.  Note that T is
345    assumed to be in GIMPLE form already.  Minimal checking is done of
346    this fact.  */
347 
348 gcall *
gimple_build_call_from_tree(tree t,tree fnptrtype)349 gimple_build_call_from_tree (tree t, tree fnptrtype)
350 {
351   unsigned i, nargs;
352   gcall *call;
353   tree fndecl = get_callee_fndecl (t);
354 
355   gcc_assert (TREE_CODE (t) == CALL_EXPR);
356 
357   nargs = call_expr_nargs (t);
358   call = gimple_build_call_1 (fndecl ? fndecl : CALL_EXPR_FN (t), nargs);
359 
360   for (i = 0; i < nargs; i++)
361     gimple_call_set_arg (call, i, CALL_EXPR_ARG (t, i));
362 
363   gimple_set_block (call, TREE_BLOCK (t));
364   gimple_set_location (call, EXPR_LOCATION (t));
365 
366   /* Carry all the CALL_EXPR flags to the new GIMPLE_CALL.  */
367   gimple_call_set_chain (call, CALL_EXPR_STATIC_CHAIN (t));
368   gimple_call_set_tail (call, CALL_EXPR_TAILCALL (t));
369   gimple_call_set_must_tail (call, CALL_EXPR_MUST_TAIL_CALL (t));
370   gimple_call_set_return_slot_opt (call, CALL_EXPR_RETURN_SLOT_OPT (t));
371   if (fndecl
372       && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
373       && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (fndecl)))
374     gimple_call_set_alloca_for_var (call, CALL_ALLOCA_FOR_VAR_P (t));
375   else
376     gimple_call_set_from_thunk (call, CALL_FROM_THUNK_P (t));
377   gimple_call_set_va_arg_pack (call, CALL_EXPR_VA_ARG_PACK (t));
378   gimple_call_set_nothrow (call, TREE_NOTHROW (t));
379   gimple_call_set_by_descriptor (call, CALL_EXPR_BY_DESCRIPTOR (t));
380   gimple_set_no_warning (call, TREE_NO_WARNING (t));
381   gimple_call_set_with_bounds (call, CALL_WITH_BOUNDS_P (t));
382 
383   if (fnptrtype)
384     {
385       gimple_call_set_fntype (call, TREE_TYPE (fnptrtype));
386 
387       /* Check if it's an indirect CALL and the type has the
388  	 nocf_check attribute. In that case propagate the information
389 	 to the gimple CALL insn.  */
390       if (!fndecl)
391 	{
392 	  gcc_assert (POINTER_TYPE_P (fnptrtype));
393 	  tree fntype = TREE_TYPE (fnptrtype);
394 
395 	  if (lookup_attribute ("nocf_check", TYPE_ATTRIBUTES (fntype)))
396 	    gimple_call_set_nocf_check (call, TRUE);
397 	}
398     }
399 
400   return call;
401 }
402 
403 
404 /* Build a GIMPLE_ASSIGN statement.
405 
406    LHS of the assignment.
407    RHS of the assignment which can be unary or binary.  */
408 
409 gassign *
gimple_build_assign(tree lhs,tree rhs MEM_STAT_DECL)410 gimple_build_assign (tree lhs, tree rhs MEM_STAT_DECL)
411 {
412   enum tree_code subcode;
413   tree op1, op2, op3;
414 
415   extract_ops_from_tree (rhs, &subcode, &op1, &op2, &op3);
416   return gimple_build_assign (lhs, subcode, op1, op2, op3 PASS_MEM_STAT);
417 }
418 
419 
420 /* Build a GIMPLE_ASSIGN statement with subcode SUBCODE and operands
421    OP1, OP2 and OP3.  */
422 
423 static inline gassign *
gimple_build_assign_1(tree lhs,enum tree_code subcode,tree op1,tree op2,tree op3 MEM_STAT_DECL)424 gimple_build_assign_1 (tree lhs, enum tree_code subcode, tree op1,
425 		       tree op2, tree op3 MEM_STAT_DECL)
426 {
427   unsigned num_ops;
428   gassign *p;
429 
430   /* Need 1 operand for LHS and 1 or 2 for the RHS (depending on the
431      code).  */
432   num_ops = get_gimple_rhs_num_ops (subcode) + 1;
433 
434   p = as_a <gassign *> (
435         gimple_build_with_ops_stat (GIMPLE_ASSIGN, (unsigned)subcode, num_ops
436 				    PASS_MEM_STAT));
437   gimple_assign_set_lhs (p, lhs);
438   gimple_assign_set_rhs1 (p, op1);
439   if (op2)
440     {
441       gcc_assert (num_ops > 2);
442       gimple_assign_set_rhs2 (p, op2);
443     }
444 
445   if (op3)
446     {
447       gcc_assert (num_ops > 3);
448       gimple_assign_set_rhs3 (p, op3);
449     }
450 
451   return p;
452 }
453 
454 /* Build a GIMPLE_ASSIGN statement with subcode SUBCODE and operands
455    OP1, OP2 and OP3.  */
456 
457 gassign *
gimple_build_assign(tree lhs,enum tree_code subcode,tree op1,tree op2,tree op3 MEM_STAT_DECL)458 gimple_build_assign (tree lhs, enum tree_code subcode, tree op1,
459 		     tree op2, tree op3 MEM_STAT_DECL)
460 {
461   return gimple_build_assign_1 (lhs, subcode, op1, op2, op3 PASS_MEM_STAT);
462 }
463 
464 /* Build a GIMPLE_ASSIGN statement with subcode SUBCODE and operands
465    OP1 and OP2.  */
466 
467 gassign *
gimple_build_assign(tree lhs,enum tree_code subcode,tree op1,tree op2 MEM_STAT_DECL)468 gimple_build_assign (tree lhs, enum tree_code subcode, tree op1,
469 		     tree op2 MEM_STAT_DECL)
470 {
471   return gimple_build_assign_1 (lhs, subcode, op1, op2, NULL_TREE
472 				PASS_MEM_STAT);
473 }
474 
475 /* Build a GIMPLE_ASSIGN statement with subcode SUBCODE and operand OP1.  */
476 
477 gassign *
gimple_build_assign(tree lhs,enum tree_code subcode,tree op1 MEM_STAT_DECL)478 gimple_build_assign (tree lhs, enum tree_code subcode, tree op1 MEM_STAT_DECL)
479 {
480   return gimple_build_assign_1 (lhs, subcode, op1, NULL_TREE, NULL_TREE
481 				PASS_MEM_STAT);
482 }
483 
484 
485 /* Build a GIMPLE_COND statement.
486 
487    PRED is the condition used to compare LHS and the RHS.
488    T_LABEL is the label to jump to if the condition is true.
489    F_LABEL is the label to jump to otherwise.  */
490 
491 gcond *
gimple_build_cond(enum tree_code pred_code,tree lhs,tree rhs,tree t_label,tree f_label)492 gimple_build_cond (enum tree_code pred_code, tree lhs, tree rhs,
493 		   tree t_label, tree f_label)
494 {
495   gcond *p;
496 
497   gcc_assert (TREE_CODE_CLASS (pred_code) == tcc_comparison);
498   p = as_a <gcond *> (gimple_build_with_ops (GIMPLE_COND, pred_code, 4));
499   gimple_cond_set_lhs (p, lhs);
500   gimple_cond_set_rhs (p, rhs);
501   gimple_cond_set_true_label (p, t_label);
502   gimple_cond_set_false_label (p, f_label);
503   return p;
504 }
505 
506 /* Build a GIMPLE_COND statement from the conditional expression tree
507    COND.  T_LABEL and F_LABEL are as in gimple_build_cond.  */
508 
509 gcond *
gimple_build_cond_from_tree(tree cond,tree t_label,tree f_label)510 gimple_build_cond_from_tree (tree cond, tree t_label, tree f_label)
511 {
512   enum tree_code code;
513   tree lhs, rhs;
514 
515   gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
516   return gimple_build_cond (code, lhs, rhs, t_label, f_label);
517 }
518 
519 /* Set code, lhs, and rhs of a GIMPLE_COND from a suitable
520    boolean expression tree COND.  */
521 
522 void
gimple_cond_set_condition_from_tree(gcond * stmt,tree cond)523 gimple_cond_set_condition_from_tree (gcond *stmt, tree cond)
524 {
525   enum tree_code code;
526   tree lhs, rhs;
527 
528   gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
529   gimple_cond_set_condition (stmt, code, lhs, rhs);
530 }
531 
532 /* Build a GIMPLE_LABEL statement for LABEL.  */
533 
534 glabel *
gimple_build_label(tree label)535 gimple_build_label (tree label)
536 {
537   glabel *p
538     = as_a <glabel *> (gimple_build_with_ops (GIMPLE_LABEL, ERROR_MARK, 1));
539   gimple_label_set_label (p, label);
540   return p;
541 }
542 
543 /* Build a GIMPLE_GOTO statement to label DEST.  */
544 
545 ggoto *
gimple_build_goto(tree dest)546 gimple_build_goto (tree dest)
547 {
548   ggoto *p
549     = as_a <ggoto *> (gimple_build_with_ops (GIMPLE_GOTO, ERROR_MARK, 1));
550   gimple_goto_set_dest (p, dest);
551   return p;
552 }
553 
554 
555 /* Build a GIMPLE_NOP statement.  */
556 
557 gimple *
gimple_build_nop(void)558 gimple_build_nop (void)
559 {
560   return gimple_alloc (GIMPLE_NOP, 0);
561 }
562 
563 
564 /* Build a GIMPLE_BIND statement.
565    VARS are the variables in BODY.
566    BLOCK is the containing block.  */
567 
568 gbind *
gimple_build_bind(tree vars,gimple_seq body,tree block)569 gimple_build_bind (tree vars, gimple_seq body, tree block)
570 {
571   gbind *p = as_a <gbind *> (gimple_alloc (GIMPLE_BIND, 0));
572   gimple_bind_set_vars (p, vars);
573   if (body)
574     gimple_bind_set_body (p, body);
575   if (block)
576     gimple_bind_set_block (p, block);
577   return p;
578 }
579 
580 /* Helper function to set the simple fields of a asm stmt.
581 
582    STRING is a pointer to a string that is the asm blocks assembly code.
583    NINPUT is the number of register inputs.
584    NOUTPUT is the number of register outputs.
585    NCLOBBERS is the number of clobbered registers.
586    */
587 
588 static inline gasm *
gimple_build_asm_1(const char * string,unsigned ninputs,unsigned noutputs,unsigned nclobbers,unsigned nlabels)589 gimple_build_asm_1 (const char *string, unsigned ninputs, unsigned noutputs,
590                     unsigned nclobbers, unsigned nlabels)
591 {
592   gasm *p;
593   int size = strlen (string);
594 
595   /* ASMs with labels cannot have outputs.  This should have been
596      enforced by the front end.  */
597   gcc_assert (nlabels == 0 || noutputs == 0);
598 
599   p = as_a <gasm *> (
600         gimple_build_with_ops (GIMPLE_ASM, ERROR_MARK,
601 			       ninputs + noutputs + nclobbers + nlabels));
602 
603   p->ni = ninputs;
604   p->no = noutputs;
605   p->nc = nclobbers;
606   p->nl = nlabels;
607   p->string = ggc_alloc_string (string, size);
608 
609   if (GATHER_STATISTICS)
610     gimple_alloc_sizes[(int) gimple_alloc_kind (GIMPLE_ASM)] += size;
611 
612   return p;
613 }
614 
615 /* Build a GIMPLE_ASM statement.
616 
617    STRING is the assembly code.
618    NINPUT is the number of register inputs.
619    NOUTPUT is the number of register outputs.
620    NCLOBBERS is the number of clobbered registers.
621    INPUTS is a vector of the input register parameters.
622    OUTPUTS is a vector of the output register parameters.
623    CLOBBERS is a vector of the clobbered register parameters.
624    LABELS is a vector of destination labels.  */
625 
626 gasm *
gimple_build_asm_vec(const char * string,vec<tree,va_gc> * inputs,vec<tree,va_gc> * outputs,vec<tree,va_gc> * clobbers,vec<tree,va_gc> * labels)627 gimple_build_asm_vec (const char *string, vec<tree, va_gc> *inputs,
628                       vec<tree, va_gc> *outputs, vec<tree, va_gc> *clobbers,
629 		      vec<tree, va_gc> *labels)
630 {
631   gasm *p;
632   unsigned i;
633 
634   p = gimple_build_asm_1 (string,
635                           vec_safe_length (inputs),
636                           vec_safe_length (outputs),
637                           vec_safe_length (clobbers),
638 			  vec_safe_length (labels));
639 
640   for (i = 0; i < vec_safe_length (inputs); i++)
641     gimple_asm_set_input_op (p, i, (*inputs)[i]);
642 
643   for (i = 0; i < vec_safe_length (outputs); i++)
644     gimple_asm_set_output_op (p, i, (*outputs)[i]);
645 
646   for (i = 0; i < vec_safe_length (clobbers); i++)
647     gimple_asm_set_clobber_op (p, i, (*clobbers)[i]);
648 
649   for (i = 0; i < vec_safe_length (labels); i++)
650     gimple_asm_set_label_op (p, i, (*labels)[i]);
651 
652   return p;
653 }
654 
655 /* Build a GIMPLE_CATCH statement.
656 
657   TYPES are the catch types.
658   HANDLER is the exception handler.  */
659 
660 gcatch *
gimple_build_catch(tree types,gimple_seq handler)661 gimple_build_catch (tree types, gimple_seq handler)
662 {
663   gcatch *p = as_a <gcatch *> (gimple_alloc (GIMPLE_CATCH, 0));
664   gimple_catch_set_types (p, types);
665   if (handler)
666     gimple_catch_set_handler (p, handler);
667 
668   return p;
669 }
670 
671 /* Build a GIMPLE_EH_FILTER statement.
672 
673    TYPES are the filter's types.
674    FAILURE is the filter's failure action.  */
675 
676 geh_filter *
gimple_build_eh_filter(tree types,gimple_seq failure)677 gimple_build_eh_filter (tree types, gimple_seq failure)
678 {
679   geh_filter *p = as_a <geh_filter *> (gimple_alloc (GIMPLE_EH_FILTER, 0));
680   gimple_eh_filter_set_types (p, types);
681   if (failure)
682     gimple_eh_filter_set_failure (p, failure);
683 
684   return p;
685 }
686 
687 /* Build a GIMPLE_EH_MUST_NOT_THROW statement.  */
688 
689 geh_mnt *
gimple_build_eh_must_not_throw(tree decl)690 gimple_build_eh_must_not_throw (tree decl)
691 {
692   geh_mnt *p = as_a <geh_mnt *> (gimple_alloc (GIMPLE_EH_MUST_NOT_THROW, 0));
693 
694   gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
695   gcc_assert (flags_from_decl_or_type (decl) & ECF_NORETURN);
696   gimple_eh_must_not_throw_set_fndecl (p, decl);
697 
698   return p;
699 }
700 
701 /* Build a GIMPLE_EH_ELSE statement.  */
702 
703 geh_else *
gimple_build_eh_else(gimple_seq n_body,gimple_seq e_body)704 gimple_build_eh_else (gimple_seq n_body, gimple_seq e_body)
705 {
706   geh_else *p = as_a <geh_else *> (gimple_alloc (GIMPLE_EH_ELSE, 0));
707   gimple_eh_else_set_n_body (p, n_body);
708   gimple_eh_else_set_e_body (p, e_body);
709   return p;
710 }
711 
712 /* Build a GIMPLE_TRY statement.
713 
714    EVAL is the expression to evaluate.
715    CLEANUP is the cleanup expression.
716    KIND is either GIMPLE_TRY_CATCH or GIMPLE_TRY_FINALLY depending on
717    whether this is a try/catch or a try/finally respectively.  */
718 
719 gtry *
gimple_build_try(gimple_seq eval,gimple_seq cleanup,enum gimple_try_flags kind)720 gimple_build_try (gimple_seq eval, gimple_seq cleanup,
721     		  enum gimple_try_flags kind)
722 {
723   gtry *p;
724 
725   gcc_assert (kind == GIMPLE_TRY_CATCH || kind == GIMPLE_TRY_FINALLY);
726   p = as_a <gtry *> (gimple_alloc (GIMPLE_TRY, 0));
727   gimple_set_subcode (p, kind);
728   if (eval)
729     gimple_try_set_eval (p, eval);
730   if (cleanup)
731     gimple_try_set_cleanup (p, cleanup);
732 
733   return p;
734 }
735 
736 /* Construct a GIMPLE_WITH_CLEANUP_EXPR statement.
737 
738    CLEANUP is the cleanup expression.  */
739 
740 gimple *
gimple_build_wce(gimple_seq cleanup)741 gimple_build_wce (gimple_seq cleanup)
742 {
743   gimple *p = gimple_alloc (GIMPLE_WITH_CLEANUP_EXPR, 0);
744   if (cleanup)
745     gimple_wce_set_cleanup (p, cleanup);
746 
747   return p;
748 }
749 
750 
751 /* Build a GIMPLE_RESX statement.  */
752 
753 gresx *
gimple_build_resx(int region)754 gimple_build_resx (int region)
755 {
756   gresx *p
757     = as_a <gresx *> (gimple_build_with_ops (GIMPLE_RESX, ERROR_MARK, 0));
758   p->region = region;
759   return p;
760 }
761 
762 
763 /* The helper for constructing a gimple switch statement.
764    INDEX is the switch's index.
765    NLABELS is the number of labels in the switch excluding the default.
766    DEFAULT_LABEL is the default label for the switch statement.  */
767 
768 gswitch *
gimple_build_switch_nlabels(unsigned nlabels,tree index,tree default_label)769 gimple_build_switch_nlabels (unsigned nlabels, tree index, tree default_label)
770 {
771   /* nlabels + 1 default label + 1 index.  */
772   gcc_checking_assert (default_label);
773   gswitch *p = as_a <gswitch *> (gimple_build_with_ops (GIMPLE_SWITCH,
774 							ERROR_MARK,
775 							1 + 1 + nlabels));
776   gimple_switch_set_index (p, index);
777   gimple_switch_set_default_label (p, default_label);
778   return p;
779 }
780 
781 /* Build a GIMPLE_SWITCH statement.
782 
783    INDEX is the switch's index.
784    DEFAULT_LABEL is the default label
785    ARGS is a vector of labels excluding the default.  */
786 
787 gswitch *
gimple_build_switch(tree index,tree default_label,vec<tree> args)788 gimple_build_switch (tree index, tree default_label, vec<tree> args)
789 {
790   unsigned i, nlabels = args.length ();
791 
792   gswitch *p = gimple_build_switch_nlabels (nlabels, index, default_label);
793 
794   /* Copy the labels from the vector to the switch statement.  */
795   for (i = 0; i < nlabels; i++)
796     gimple_switch_set_label (p, i + 1, args[i]);
797 
798   return p;
799 }
800 
801 /* Build a GIMPLE_EH_DISPATCH statement.  */
802 
803 geh_dispatch *
gimple_build_eh_dispatch(int region)804 gimple_build_eh_dispatch (int region)
805 {
806   geh_dispatch *p
807     = as_a <geh_dispatch *> (
808 	gimple_build_with_ops (GIMPLE_EH_DISPATCH, ERROR_MARK, 0));
809   p->region = region;
810   return p;
811 }
812 
813 /* Build a new GIMPLE_DEBUG_BIND statement.
814 
815    VAR is bound to VALUE; block and location are taken from STMT.  */
816 
817 gdebug *
gimple_build_debug_bind(tree var,tree value,gimple * stmt MEM_STAT_DECL)818 gimple_build_debug_bind (tree var, tree value, gimple *stmt MEM_STAT_DECL)
819 {
820   gdebug *p
821     = as_a <gdebug *> (gimple_build_with_ops_stat (GIMPLE_DEBUG,
822 						   (unsigned)GIMPLE_DEBUG_BIND, 2
823 						   PASS_MEM_STAT));
824   gimple_debug_bind_set_var (p, var);
825   gimple_debug_bind_set_value (p, value);
826   if (stmt)
827     gimple_set_location (p, gimple_location (stmt));
828 
829   return p;
830 }
831 
832 
833 /* Build a new GIMPLE_DEBUG_SOURCE_BIND statement.
834 
835    VAR is bound to VALUE; block and location are taken from STMT.  */
836 
837 gdebug *
gimple_build_debug_source_bind(tree var,tree value,gimple * stmt MEM_STAT_DECL)838 gimple_build_debug_source_bind (tree var, tree value,
839 				     gimple *stmt MEM_STAT_DECL)
840 {
841   gdebug *p
842     = as_a <gdebug *> (
843         gimple_build_with_ops_stat (GIMPLE_DEBUG,
844 				    (unsigned)GIMPLE_DEBUG_SOURCE_BIND, 2
845 				    PASS_MEM_STAT));
846 
847   gimple_debug_source_bind_set_var (p, var);
848   gimple_debug_source_bind_set_value (p, value);
849   if (stmt)
850     gimple_set_location (p, gimple_location (stmt));
851 
852   return p;
853 }
854 
855 
856 /* Build a new GIMPLE_DEBUG_BEGIN_STMT statement in BLOCK at
857    LOCATION.  */
858 
859 gdebug *
gimple_build_debug_begin_stmt(tree block,location_t location MEM_STAT_DECL)860 gimple_build_debug_begin_stmt (tree block, location_t location
861 				    MEM_STAT_DECL)
862 {
863   gdebug *p
864     = as_a <gdebug *> (
865         gimple_build_with_ops_stat (GIMPLE_DEBUG,
866 				    (unsigned)GIMPLE_DEBUG_BEGIN_STMT, 0
867 				    PASS_MEM_STAT));
868 
869   gimple_set_location (p, location);
870   gimple_set_block (p, block);
871   cfun->debug_marker_count++;
872 
873   return p;
874 }
875 
876 
877 /* Build a new GIMPLE_DEBUG_INLINE_ENTRY statement in BLOCK at
878    LOCATION.  The BLOCK links to the inlined function.  */
879 
880 gdebug *
gimple_build_debug_inline_entry(tree block,location_t location MEM_STAT_DECL)881 gimple_build_debug_inline_entry (tree block, location_t location
882 				      MEM_STAT_DECL)
883 {
884   gdebug *p
885     = as_a <gdebug *> (
886         gimple_build_with_ops_stat (GIMPLE_DEBUG,
887 				    (unsigned)GIMPLE_DEBUG_INLINE_ENTRY, 0
888 				    PASS_MEM_STAT));
889 
890   gimple_set_location (p, location);
891   gimple_set_block (p, block);
892   cfun->debug_marker_count++;
893 
894   return p;
895 }
896 
897 
898 /* Build a GIMPLE_OMP_CRITICAL statement.
899 
900    BODY is the sequence of statements for which only one thread can execute.
901    NAME is optional identifier for this critical block.
902    CLAUSES are clauses for this critical block.  */
903 
904 gomp_critical *
gimple_build_omp_critical(gimple_seq body,tree name,tree clauses)905 gimple_build_omp_critical (gimple_seq body, tree name, tree clauses)
906 {
907   gomp_critical *p
908     = as_a <gomp_critical *> (gimple_alloc (GIMPLE_OMP_CRITICAL, 0));
909   gimple_omp_critical_set_name (p, name);
910   gimple_omp_critical_set_clauses (p, clauses);
911   if (body)
912     gimple_omp_set_body (p, body);
913 
914   return p;
915 }
916 
917 /* Build a GIMPLE_OMP_FOR statement.
918 
919    BODY is sequence of statements inside the for loop.
920    KIND is the `for' variant.
921    CLAUSES, are any of the construct's clauses.
922    COLLAPSE is the collapse count.
923    PRE_BODY is the sequence of statements that are loop invariant.  */
924 
925 gomp_for *
gimple_build_omp_for(gimple_seq body,int kind,tree clauses,size_t collapse,gimple_seq pre_body)926 gimple_build_omp_for (gimple_seq body, int kind, tree clauses, size_t collapse,
927 		      gimple_seq pre_body)
928 {
929   gomp_for *p = as_a <gomp_for *> (gimple_alloc (GIMPLE_OMP_FOR, 0));
930   if (body)
931     gimple_omp_set_body (p, body);
932   gimple_omp_for_set_clauses (p, clauses);
933   gimple_omp_for_set_kind (p, kind);
934   p->collapse = collapse;
935   p->iter =  ggc_cleared_vec_alloc<gimple_omp_for_iter> (collapse);
936 
937   if (pre_body)
938     gimple_omp_for_set_pre_body (p, pre_body);
939 
940   return p;
941 }
942 
943 
944 /* Build a GIMPLE_OMP_PARALLEL statement.
945 
946    BODY is sequence of statements which are executed in parallel.
947    CLAUSES, are the OMP parallel construct's clauses.
948    CHILD_FN is the function created for the parallel threads to execute.
949    DATA_ARG are the shared data argument(s).  */
950 
951 gomp_parallel *
gimple_build_omp_parallel(gimple_seq body,tree clauses,tree child_fn,tree data_arg)952 gimple_build_omp_parallel (gimple_seq body, tree clauses, tree child_fn,
953 			   tree data_arg)
954 {
955   gomp_parallel *p
956     = as_a <gomp_parallel *> (gimple_alloc (GIMPLE_OMP_PARALLEL, 0));
957   if (body)
958     gimple_omp_set_body (p, body);
959   gimple_omp_parallel_set_clauses (p, clauses);
960   gimple_omp_parallel_set_child_fn (p, child_fn);
961   gimple_omp_parallel_set_data_arg (p, data_arg);
962 
963   return p;
964 }
965 
966 
967 /* Build a GIMPLE_OMP_TASK statement.
968 
969    BODY is sequence of statements which are executed by the explicit task.
970    CLAUSES, are the OMP parallel construct's clauses.
971    CHILD_FN is the function created for the parallel threads to execute.
972    DATA_ARG are the shared data argument(s).
973    COPY_FN is the optional function for firstprivate initialization.
974    ARG_SIZE and ARG_ALIGN are size and alignment of the data block.  */
975 
976 gomp_task *
gimple_build_omp_task(gimple_seq body,tree clauses,tree child_fn,tree data_arg,tree copy_fn,tree arg_size,tree arg_align)977 gimple_build_omp_task (gimple_seq body, tree clauses, tree child_fn,
978 		       tree data_arg, tree copy_fn, tree arg_size,
979 		       tree arg_align)
980 {
981   gomp_task *p = as_a <gomp_task *> (gimple_alloc (GIMPLE_OMP_TASK, 0));
982   if (body)
983     gimple_omp_set_body (p, body);
984   gimple_omp_task_set_clauses (p, clauses);
985   gimple_omp_task_set_child_fn (p, child_fn);
986   gimple_omp_task_set_data_arg (p, data_arg);
987   gimple_omp_task_set_copy_fn (p, copy_fn);
988   gimple_omp_task_set_arg_size (p, arg_size);
989   gimple_omp_task_set_arg_align (p, arg_align);
990 
991   return p;
992 }
993 
994 
995 /* Build a GIMPLE_OMP_SECTION statement for a sections statement.
996 
997    BODY is the sequence of statements in the section.  */
998 
999 gimple *
gimple_build_omp_section(gimple_seq body)1000 gimple_build_omp_section (gimple_seq body)
1001 {
1002   gimple *p = gimple_alloc (GIMPLE_OMP_SECTION, 0);
1003   if (body)
1004     gimple_omp_set_body (p, body);
1005 
1006   return p;
1007 }
1008 
1009 
1010 /* Build a GIMPLE_OMP_MASTER statement.
1011 
1012    BODY is the sequence of statements to be executed by just the master.  */
1013 
1014 gimple *
gimple_build_omp_master(gimple_seq body)1015 gimple_build_omp_master (gimple_seq body)
1016 {
1017   gimple *p = gimple_alloc (GIMPLE_OMP_MASTER, 0);
1018   if (body)
1019     gimple_omp_set_body (p, body);
1020 
1021   return p;
1022 }
1023 
1024 /* Build a GIMPLE_OMP_GRID_BODY statement.
1025 
1026    BODY is the sequence of statements to be executed by the kernel.  */
1027 
1028 gimple *
gimple_build_omp_grid_body(gimple_seq body)1029 gimple_build_omp_grid_body (gimple_seq body)
1030 {
1031   gimple *p = gimple_alloc (GIMPLE_OMP_GRID_BODY, 0);
1032   if (body)
1033     gimple_omp_set_body (p, body);
1034 
1035   return p;
1036 }
1037 
1038 /* Build a GIMPLE_OMP_TASKGROUP statement.
1039 
1040    BODY is the sequence of statements to be executed by the taskgroup
1041    construct.  */
1042 
1043 gimple *
gimple_build_omp_taskgroup(gimple_seq body)1044 gimple_build_omp_taskgroup (gimple_seq body)
1045 {
1046   gimple *p = gimple_alloc (GIMPLE_OMP_TASKGROUP, 0);
1047   if (body)
1048     gimple_omp_set_body (p, body);
1049 
1050   return p;
1051 }
1052 
1053 
1054 /* Build a GIMPLE_OMP_CONTINUE statement.
1055 
1056    CONTROL_DEF is the definition of the control variable.
1057    CONTROL_USE is the use of the control variable.  */
1058 
1059 gomp_continue *
gimple_build_omp_continue(tree control_def,tree control_use)1060 gimple_build_omp_continue (tree control_def, tree control_use)
1061 {
1062   gomp_continue *p
1063     = as_a <gomp_continue *> (gimple_alloc (GIMPLE_OMP_CONTINUE, 0));
1064   gimple_omp_continue_set_control_def (p, control_def);
1065   gimple_omp_continue_set_control_use (p, control_use);
1066   return p;
1067 }
1068 
1069 /* Build a GIMPLE_OMP_ORDERED statement.
1070 
1071    BODY is the sequence of statements inside a loop that will executed in
1072    sequence.
1073    CLAUSES are clauses for this statement.  */
1074 
1075 gomp_ordered *
gimple_build_omp_ordered(gimple_seq body,tree clauses)1076 gimple_build_omp_ordered (gimple_seq body, tree clauses)
1077 {
1078   gomp_ordered *p
1079     = as_a <gomp_ordered *> (gimple_alloc (GIMPLE_OMP_ORDERED, 0));
1080   gimple_omp_ordered_set_clauses (p, clauses);
1081   if (body)
1082     gimple_omp_set_body (p, body);
1083 
1084   return p;
1085 }
1086 
1087 
1088 /* Build a GIMPLE_OMP_RETURN statement.
1089    WAIT_P is true if this is a non-waiting return.  */
1090 
1091 gimple *
gimple_build_omp_return(bool wait_p)1092 gimple_build_omp_return (bool wait_p)
1093 {
1094   gimple *p = gimple_alloc (GIMPLE_OMP_RETURN, 0);
1095   if (wait_p)
1096     gimple_omp_return_set_nowait (p);
1097 
1098   return p;
1099 }
1100 
1101 
1102 /* Build a GIMPLE_OMP_SECTIONS statement.
1103 
1104    BODY is a sequence of section statements.
1105    CLAUSES are any of the OMP sections contsruct's clauses: private,
1106    firstprivate, lastprivate, reduction, and nowait.  */
1107 
1108 gomp_sections *
gimple_build_omp_sections(gimple_seq body,tree clauses)1109 gimple_build_omp_sections (gimple_seq body, tree clauses)
1110 {
1111   gomp_sections *p
1112     = as_a <gomp_sections *> (gimple_alloc (GIMPLE_OMP_SECTIONS, 0));
1113   if (body)
1114     gimple_omp_set_body (p, body);
1115   gimple_omp_sections_set_clauses (p, clauses);
1116 
1117   return p;
1118 }
1119 
1120 
1121 /* Build a GIMPLE_OMP_SECTIONS_SWITCH.  */
1122 
1123 gimple *
gimple_build_omp_sections_switch(void)1124 gimple_build_omp_sections_switch (void)
1125 {
1126   return gimple_alloc (GIMPLE_OMP_SECTIONS_SWITCH, 0);
1127 }
1128 
1129 
1130 /* Build a GIMPLE_OMP_SINGLE statement.
1131 
1132    BODY is the sequence of statements that will be executed once.
1133    CLAUSES are any of the OMP single construct's clauses: private, firstprivate,
1134    copyprivate, nowait.  */
1135 
1136 gomp_single *
gimple_build_omp_single(gimple_seq body,tree clauses)1137 gimple_build_omp_single (gimple_seq body, tree clauses)
1138 {
1139   gomp_single *p
1140     = as_a <gomp_single *> (gimple_alloc (GIMPLE_OMP_SINGLE, 0));
1141   if (body)
1142     gimple_omp_set_body (p, body);
1143   gimple_omp_single_set_clauses (p, clauses);
1144 
1145   return p;
1146 }
1147 
1148 
1149 /* Build a GIMPLE_OMP_TARGET statement.
1150 
1151    BODY is the sequence of statements that will be executed.
1152    KIND is the kind of the region.
1153    CLAUSES are any of the construct's clauses.  */
1154 
1155 gomp_target *
gimple_build_omp_target(gimple_seq body,int kind,tree clauses)1156 gimple_build_omp_target (gimple_seq body, int kind, tree clauses)
1157 {
1158   gomp_target *p
1159     = as_a <gomp_target *> (gimple_alloc (GIMPLE_OMP_TARGET, 0));
1160   if (body)
1161     gimple_omp_set_body (p, body);
1162   gimple_omp_target_set_clauses (p, clauses);
1163   gimple_omp_target_set_kind (p, kind);
1164 
1165   return p;
1166 }
1167 
1168 
1169 /* Build a GIMPLE_OMP_TEAMS statement.
1170 
1171    BODY is the sequence of statements that will be executed.
1172    CLAUSES are any of the OMP teams construct's clauses.  */
1173 
1174 gomp_teams *
gimple_build_omp_teams(gimple_seq body,tree clauses)1175 gimple_build_omp_teams (gimple_seq body, tree clauses)
1176 {
1177   gomp_teams *p = as_a <gomp_teams *> (gimple_alloc (GIMPLE_OMP_TEAMS, 0));
1178   if (body)
1179     gimple_omp_set_body (p, body);
1180   gimple_omp_teams_set_clauses (p, clauses);
1181 
1182   return p;
1183 }
1184 
1185 
1186 /* Build a GIMPLE_OMP_ATOMIC_LOAD statement.  */
1187 
1188 gomp_atomic_load *
gimple_build_omp_atomic_load(tree lhs,tree rhs)1189 gimple_build_omp_atomic_load (tree lhs, tree rhs)
1190 {
1191   gomp_atomic_load *p
1192     = as_a <gomp_atomic_load *> (gimple_alloc (GIMPLE_OMP_ATOMIC_LOAD, 0));
1193   gimple_omp_atomic_load_set_lhs (p, lhs);
1194   gimple_omp_atomic_load_set_rhs (p, rhs);
1195   return p;
1196 }
1197 
1198 /* Build a GIMPLE_OMP_ATOMIC_STORE statement.
1199 
1200    VAL is the value we are storing.  */
1201 
1202 gomp_atomic_store *
gimple_build_omp_atomic_store(tree val)1203 gimple_build_omp_atomic_store (tree val)
1204 {
1205   gomp_atomic_store *p
1206     = as_a <gomp_atomic_store *> (gimple_alloc (GIMPLE_OMP_ATOMIC_STORE, 0));
1207   gimple_omp_atomic_store_set_val (p, val);
1208   return p;
1209 }
1210 
1211 /* Build a GIMPLE_TRANSACTION statement.  */
1212 
1213 gtransaction *
gimple_build_transaction(gimple_seq body)1214 gimple_build_transaction (gimple_seq body)
1215 {
1216   gtransaction *p
1217     = as_a <gtransaction *> (gimple_alloc (GIMPLE_TRANSACTION, 0));
1218   gimple_transaction_set_body (p, body);
1219   gimple_transaction_set_label_norm (p, 0);
1220   gimple_transaction_set_label_uninst (p, 0);
1221   gimple_transaction_set_label_over (p, 0);
1222   return p;
1223 }
1224 
1225 #if defined ENABLE_GIMPLE_CHECKING
1226 /* Complain of a gimple type mismatch and die.  */
1227 
1228 void
gimple_check_failed(const gimple * gs,const char * file,int line,const char * function,enum gimple_code code,enum tree_code subcode)1229 gimple_check_failed (const gimple *gs, const char *file, int line,
1230 		     const char *function, enum gimple_code code,
1231 		     enum tree_code subcode)
1232 {
1233   internal_error ("gimple check: expected %s(%s), have %s(%s) in %s, at %s:%d",
1234       		  gimple_code_name[code],
1235 		  get_tree_code_name (subcode),
1236 		  gimple_code_name[gimple_code (gs)],
1237 		  gs->subcode > 0
1238 		    ? get_tree_code_name ((enum tree_code) gs->subcode)
1239 		    : "",
1240 		  function, trim_filename (file), line);
1241 }
1242 #endif /* ENABLE_GIMPLE_CHECKING */
1243 
1244 
1245 /* Link gimple statement GS to the end of the sequence *SEQ_P.  If
1246    *SEQ_P is NULL, a new sequence is allocated.  */
1247 
1248 void
gimple_seq_add_stmt(gimple_seq * seq_p,gimple * gs)1249 gimple_seq_add_stmt (gimple_seq *seq_p, gimple *gs)
1250 {
1251   gimple_stmt_iterator si;
1252   if (gs == NULL)
1253     return;
1254 
1255   si = gsi_last (*seq_p);
1256   gsi_insert_after (&si, gs, GSI_NEW_STMT);
1257 }
1258 
1259 /* Link gimple statement GS to the end of the sequence *SEQ_P.  If
1260    *SEQ_P is NULL, a new sequence is allocated.  This function is
1261    similar to gimple_seq_add_stmt, but does not scan the operands.
1262    During gimplification, we need to manipulate statement sequences
1263    before the def/use vectors have been constructed.  */
1264 
1265 void
gimple_seq_add_stmt_without_update(gimple_seq * seq_p,gimple * gs)1266 gimple_seq_add_stmt_without_update (gimple_seq *seq_p, gimple *gs)
1267 {
1268   gimple_stmt_iterator si;
1269 
1270   if (gs == NULL)
1271     return;
1272 
1273   si = gsi_last (*seq_p);
1274   gsi_insert_after_without_update (&si, gs, GSI_NEW_STMT);
1275 }
1276 
1277 /* Append sequence SRC to the end of sequence *DST_P.  If *DST_P is
1278    NULL, a new sequence is allocated.  */
1279 
1280 void
gimple_seq_add_seq(gimple_seq * dst_p,gimple_seq src)1281 gimple_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
1282 {
1283   gimple_stmt_iterator si;
1284   if (src == NULL)
1285     return;
1286 
1287   si = gsi_last (*dst_p);
1288   gsi_insert_seq_after (&si, src, GSI_NEW_STMT);
1289 }
1290 
1291 /* Append sequence SRC to the end of sequence *DST_P.  If *DST_P is
1292    NULL, a new sequence is allocated.  This function is
1293    similar to gimple_seq_add_seq, but does not scan the operands.  */
1294 
1295 void
gimple_seq_add_seq_without_update(gimple_seq * dst_p,gimple_seq src)1296 gimple_seq_add_seq_without_update (gimple_seq *dst_p, gimple_seq src)
1297 {
1298   gimple_stmt_iterator si;
1299   if (src == NULL)
1300     return;
1301 
1302   si = gsi_last (*dst_p);
1303   gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
1304 }
1305 
1306 /* Determine whether to assign a location to the statement GS.  */
1307 
1308 static bool
should_carry_location_p(gimple * gs)1309 should_carry_location_p (gimple *gs)
1310 {
1311   /* Don't emit a line note for a label.  We particularly don't want to
1312      emit one for the break label, since it doesn't actually correspond
1313      to the beginning of the loop/switch.  */
1314   if (gimple_code (gs) == GIMPLE_LABEL)
1315     return false;
1316 
1317   return true;
1318 }
1319 
1320 /* Set the location for gimple statement GS to LOCATION.  */
1321 
1322 static void
annotate_one_with_location(gimple * gs,location_t location)1323 annotate_one_with_location (gimple *gs, location_t location)
1324 {
1325   if (!gimple_has_location (gs)
1326       && !gimple_do_not_emit_location_p (gs)
1327       && should_carry_location_p (gs))
1328     gimple_set_location (gs, location);
1329 }
1330 
1331 /* Set LOCATION for all the statements after iterator GSI in sequence
1332    SEQ.  If GSI is pointing to the end of the sequence, start with the
1333    first statement in SEQ.  */
1334 
1335 void
annotate_all_with_location_after(gimple_seq seq,gimple_stmt_iterator gsi,location_t location)1336 annotate_all_with_location_after (gimple_seq seq, gimple_stmt_iterator gsi,
1337 				  location_t location)
1338 {
1339   if (gsi_end_p (gsi))
1340     gsi = gsi_start (seq);
1341   else
1342     gsi_next (&gsi);
1343 
1344   for (; !gsi_end_p (gsi); gsi_next (&gsi))
1345     annotate_one_with_location (gsi_stmt (gsi), location);
1346 }
1347 
1348 /* Set the location for all the statements in a sequence STMT_P to LOCATION.  */
1349 
1350 void
annotate_all_with_location(gimple_seq stmt_p,location_t location)1351 annotate_all_with_location (gimple_seq stmt_p, location_t location)
1352 {
1353   gimple_stmt_iterator i;
1354 
1355   if (gimple_seq_empty_p (stmt_p))
1356     return;
1357 
1358   for (i = gsi_start (stmt_p); !gsi_end_p (i); gsi_next (&i))
1359     {
1360       gimple *gs = gsi_stmt (i);
1361       annotate_one_with_location (gs, location);
1362     }
1363 }
1364 
1365 /* Helper function of empty_body_p.  Return true if STMT is an empty
1366    statement.  */
1367 
1368 static bool
empty_stmt_p(gimple * stmt)1369 empty_stmt_p (gimple *stmt)
1370 {
1371   if (gimple_code (stmt) == GIMPLE_NOP)
1372     return true;
1373   if (gbind *bind_stmt = dyn_cast <gbind *> (stmt))
1374     return empty_body_p (gimple_bind_body (bind_stmt));
1375   return false;
1376 }
1377 
1378 
1379 /* Return true if BODY contains nothing but empty statements.  */
1380 
1381 bool
empty_body_p(gimple_seq body)1382 empty_body_p (gimple_seq body)
1383 {
1384   gimple_stmt_iterator i;
1385 
1386   if (gimple_seq_empty_p (body))
1387     return true;
1388   for (i = gsi_start (body); !gsi_end_p (i); gsi_next (&i))
1389     if (!empty_stmt_p (gsi_stmt (i))
1390 	&& !is_gimple_debug (gsi_stmt (i)))
1391       return false;
1392 
1393   return true;
1394 }
1395 
1396 
1397 /* Perform a deep copy of sequence SRC and return the result.  */
1398 
1399 gimple_seq
gimple_seq_copy(gimple_seq src)1400 gimple_seq_copy (gimple_seq src)
1401 {
1402   gimple_stmt_iterator gsi;
1403   gimple_seq new_seq = NULL;
1404   gimple *stmt;
1405 
1406   for (gsi = gsi_start (src); !gsi_end_p (gsi); gsi_next (&gsi))
1407     {
1408       stmt = gimple_copy (gsi_stmt (gsi));
1409       gimple_seq_add_stmt (&new_seq, stmt);
1410     }
1411 
1412   return new_seq;
1413 }
1414 
1415 
1416 
1417 /* Return true if calls C1 and C2 are known to go to the same function.  */
1418 
1419 bool
gimple_call_same_target_p(const gimple * c1,const gimple * c2)1420 gimple_call_same_target_p (const gimple *c1, const gimple *c2)
1421 {
1422   if (gimple_call_internal_p (c1))
1423     return (gimple_call_internal_p (c2)
1424 	    && gimple_call_internal_fn (c1) == gimple_call_internal_fn (c2)
1425 	    && (!gimple_call_internal_unique_p (as_a <const gcall *> (c1))
1426 		|| c1 == c2));
1427   else
1428     return (gimple_call_fn (c1) == gimple_call_fn (c2)
1429 	    || (gimple_call_fndecl (c1)
1430 		&& gimple_call_fndecl (c1) == gimple_call_fndecl (c2)));
1431 }
1432 
1433 /* Detect flags from a GIMPLE_CALL.  This is just like
1434    call_expr_flags, but for gimple tuples.  */
1435 
1436 int
gimple_call_flags(const gimple * stmt)1437 gimple_call_flags (const gimple *stmt)
1438 {
1439   int flags;
1440   tree decl = gimple_call_fndecl (stmt);
1441 
1442   if (decl)
1443     flags = flags_from_decl_or_type (decl);
1444   else if (gimple_call_internal_p (stmt))
1445     flags = internal_fn_flags (gimple_call_internal_fn (stmt));
1446   else
1447     flags = flags_from_decl_or_type (gimple_call_fntype (stmt));
1448 
1449   if (stmt->subcode & GF_CALL_NOTHROW)
1450     flags |= ECF_NOTHROW;
1451 
1452   if (stmt->subcode & GF_CALL_BY_DESCRIPTOR)
1453     flags |= ECF_BY_DESCRIPTOR;
1454 
1455   return flags;
1456 }
1457 
1458 /* Return the "fn spec" string for call STMT.  */
1459 
1460 static const_tree
gimple_call_fnspec(const gcall * stmt)1461 gimple_call_fnspec (const gcall *stmt)
1462 {
1463   tree type, attr;
1464 
1465   if (gimple_call_internal_p (stmt))
1466     return internal_fn_fnspec (gimple_call_internal_fn (stmt));
1467 
1468   type = gimple_call_fntype (stmt);
1469   if (!type)
1470     return NULL_TREE;
1471 
1472   attr = lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type));
1473   if (!attr)
1474     return NULL_TREE;
1475 
1476   return TREE_VALUE (TREE_VALUE (attr));
1477 }
1478 
1479 /* Detects argument flags for argument number ARG on call STMT.  */
1480 
1481 int
gimple_call_arg_flags(const gcall * stmt,unsigned arg)1482 gimple_call_arg_flags (const gcall *stmt, unsigned arg)
1483 {
1484   const_tree attr = gimple_call_fnspec (stmt);
1485 
1486   if (!attr || 1 + arg >= (unsigned) TREE_STRING_LENGTH (attr))
1487     return 0;
1488 
1489   switch (TREE_STRING_POINTER (attr)[1 + arg])
1490     {
1491     case 'x':
1492     case 'X':
1493       return EAF_UNUSED;
1494 
1495     case 'R':
1496       return EAF_DIRECT | EAF_NOCLOBBER | EAF_NOESCAPE;
1497 
1498     case 'r':
1499       return EAF_NOCLOBBER | EAF_NOESCAPE;
1500 
1501     case 'W':
1502       return EAF_DIRECT | EAF_NOESCAPE;
1503 
1504     case 'w':
1505       return EAF_NOESCAPE;
1506 
1507     case '.':
1508     default:
1509       return 0;
1510     }
1511 }
1512 
1513 /* Detects return flags for the call STMT.  */
1514 
1515 int
gimple_call_return_flags(const gcall * stmt)1516 gimple_call_return_flags (const gcall *stmt)
1517 {
1518   const_tree attr;
1519 
1520   if (gimple_call_flags (stmt) & ECF_MALLOC)
1521     return ERF_NOALIAS;
1522 
1523   attr = gimple_call_fnspec (stmt);
1524   if (!attr || TREE_STRING_LENGTH (attr) < 1)
1525     return 0;
1526 
1527   switch (TREE_STRING_POINTER (attr)[0])
1528     {
1529     case '1':
1530     case '2':
1531     case '3':
1532     case '4':
1533       return ERF_RETURNS_ARG | (TREE_STRING_POINTER (attr)[0] - '1');
1534 
1535     case 'm':
1536       return ERF_NOALIAS;
1537 
1538     case '.':
1539     default:
1540       return 0;
1541     }
1542 }
1543 
1544 
1545 /* Return true if GS is a copy assignment.  */
1546 
1547 bool
gimple_assign_copy_p(gimple * gs)1548 gimple_assign_copy_p (gimple *gs)
1549 {
1550   return (gimple_assign_single_p (gs)
1551 	  && is_gimple_val (gimple_op (gs, 1)));
1552 }
1553 
1554 
1555 /* Return true if GS is a SSA_NAME copy assignment.  */
1556 
1557 bool
gimple_assign_ssa_name_copy_p(gimple * gs)1558 gimple_assign_ssa_name_copy_p (gimple *gs)
1559 {
1560   return (gimple_assign_single_p (gs)
1561 	  && TREE_CODE (gimple_assign_lhs (gs)) == SSA_NAME
1562 	  && TREE_CODE (gimple_assign_rhs1 (gs)) == SSA_NAME);
1563 }
1564 
1565 
1566 /* Return true if GS is an assignment with a unary RHS, but the
1567    operator has no effect on the assigned value.  The logic is adapted
1568    from STRIP_NOPS.  This predicate is intended to be used in tuplifying
1569    instances in which STRIP_NOPS was previously applied to the RHS of
1570    an assignment.
1571 
1572    NOTE: In the use cases that led to the creation of this function
1573    and of gimple_assign_single_p, it is typical to test for either
1574    condition and to proceed in the same manner.  In each case, the
1575    assigned value is represented by the single RHS operand of the
1576    assignment.  I suspect there may be cases where gimple_assign_copy_p,
1577    gimple_assign_single_p, or equivalent logic is used where a similar
1578    treatment of unary NOPs is appropriate.  */
1579 
1580 bool
gimple_assign_unary_nop_p(gimple * gs)1581 gimple_assign_unary_nop_p (gimple *gs)
1582 {
1583   return (is_gimple_assign (gs)
1584           && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs))
1585               || gimple_assign_rhs_code (gs) == NON_LVALUE_EXPR)
1586           && gimple_assign_rhs1 (gs) != error_mark_node
1587           && (TYPE_MODE (TREE_TYPE (gimple_assign_lhs (gs)))
1588               == TYPE_MODE (TREE_TYPE (gimple_assign_rhs1 (gs)))));
1589 }
1590 
1591 /* Set BB to be the basic block holding G.  */
1592 
1593 void
gimple_set_bb(gimple * stmt,basic_block bb)1594 gimple_set_bb (gimple *stmt, basic_block bb)
1595 {
1596   stmt->bb = bb;
1597 
1598   if (gimple_code (stmt) != GIMPLE_LABEL)
1599     return;
1600 
1601   /* If the statement is a label, add the label to block-to-labels map
1602      so that we can speed up edge creation for GIMPLE_GOTOs.  */
1603   if (cfun->cfg)
1604     {
1605       tree t;
1606       int uid;
1607 
1608       t = gimple_label_label (as_a <glabel *> (stmt));
1609       uid = LABEL_DECL_UID (t);
1610       if (uid == -1)
1611 	{
1612 	  unsigned old_len =
1613 	    vec_safe_length (label_to_block_map_for_fn (cfun));
1614 	  LABEL_DECL_UID (t) = uid = cfun->cfg->last_label_uid++;
1615 	  if (old_len <= (unsigned) uid)
1616 	    {
1617 	      unsigned new_len = 3 * uid / 2 + 1;
1618 
1619 	      vec_safe_grow_cleared (label_to_block_map_for_fn (cfun),
1620 				     new_len);
1621 	    }
1622 	}
1623 
1624       (*label_to_block_map_for_fn (cfun))[uid] = bb;
1625     }
1626 }
1627 
1628 
1629 /* Modify the RHS of the assignment pointed-to by GSI using the
1630    operands in the expression tree EXPR.
1631 
1632    NOTE: The statement pointed-to by GSI may be reallocated if it
1633    did not have enough operand slots.
1634 
1635    This function is useful to convert an existing tree expression into
1636    the flat representation used for the RHS of a GIMPLE assignment.
1637    It will reallocate memory as needed to expand or shrink the number
1638    of operand slots needed to represent EXPR.
1639 
1640    NOTE: If you find yourself building a tree and then calling this
1641    function, you are most certainly doing it the slow way.  It is much
1642    better to build a new assignment or to use the function
1643    gimple_assign_set_rhs_with_ops, which does not require an
1644    expression tree to be built.  */
1645 
1646 void
gimple_assign_set_rhs_from_tree(gimple_stmt_iterator * gsi,tree expr)1647 gimple_assign_set_rhs_from_tree (gimple_stmt_iterator *gsi, tree expr)
1648 {
1649   enum tree_code subcode;
1650   tree op1, op2, op3;
1651 
1652   extract_ops_from_tree (expr, &subcode, &op1, &op2, &op3);
1653   gimple_assign_set_rhs_with_ops (gsi, subcode, op1, op2, op3);
1654 }
1655 
1656 
1657 /* Set the RHS of assignment statement pointed-to by GSI to CODE with
1658    operands OP1, OP2 and OP3.
1659 
1660    NOTE: The statement pointed-to by GSI may be reallocated if it
1661    did not have enough operand slots.  */
1662 
1663 void
gimple_assign_set_rhs_with_ops(gimple_stmt_iterator * gsi,enum tree_code code,tree op1,tree op2,tree op3)1664 gimple_assign_set_rhs_with_ops (gimple_stmt_iterator *gsi, enum tree_code code,
1665 				tree op1, tree op2, tree op3)
1666 {
1667   unsigned new_rhs_ops = get_gimple_rhs_num_ops (code);
1668   gimple *stmt = gsi_stmt (*gsi);
1669 
1670   /* If the new CODE needs more operands, allocate a new statement.  */
1671   if (gimple_num_ops (stmt) < new_rhs_ops + 1)
1672     {
1673       tree lhs = gimple_assign_lhs (stmt);
1674       gimple *new_stmt = gimple_alloc (gimple_code (stmt), new_rhs_ops + 1);
1675       memcpy (new_stmt, stmt, gimple_size (gimple_code (stmt)));
1676       gimple_init_singleton (new_stmt);
1677       gsi_replace (gsi, new_stmt, false);
1678       stmt = new_stmt;
1679 
1680       /* The LHS needs to be reset as this also changes the SSA name
1681 	 on the LHS.  */
1682       gimple_assign_set_lhs (stmt, lhs);
1683     }
1684 
1685   gimple_set_num_ops (stmt, new_rhs_ops + 1);
1686   gimple_set_subcode (stmt, code);
1687   gimple_assign_set_rhs1 (stmt, op1);
1688   if (new_rhs_ops > 1)
1689     gimple_assign_set_rhs2 (stmt, op2);
1690   if (new_rhs_ops > 2)
1691     gimple_assign_set_rhs3 (stmt, op3);
1692 }
1693 
1694 
1695 /* Return the LHS of a statement that performs an assignment,
1696    either a GIMPLE_ASSIGN or a GIMPLE_CALL.  Returns NULL_TREE
1697    for a call to a function that returns no value, or for a
1698    statement other than an assignment or a call.  */
1699 
1700 tree
gimple_get_lhs(const gimple * stmt)1701 gimple_get_lhs (const gimple *stmt)
1702 {
1703   enum gimple_code code = gimple_code (stmt);
1704 
1705   if (code == GIMPLE_ASSIGN)
1706     return gimple_assign_lhs (stmt);
1707   else if (code == GIMPLE_CALL)
1708     return gimple_call_lhs (stmt);
1709   else
1710     return NULL_TREE;
1711 }
1712 
1713 
1714 /* Set the LHS of a statement that performs an assignment,
1715    either a GIMPLE_ASSIGN or a GIMPLE_CALL.  */
1716 
1717 void
gimple_set_lhs(gimple * stmt,tree lhs)1718 gimple_set_lhs (gimple *stmt, tree lhs)
1719 {
1720   enum gimple_code code = gimple_code (stmt);
1721 
1722   if (code == GIMPLE_ASSIGN)
1723     gimple_assign_set_lhs (stmt, lhs);
1724   else if (code == GIMPLE_CALL)
1725     gimple_call_set_lhs (stmt, lhs);
1726   else
1727     gcc_unreachable ();
1728 }
1729 
1730 
1731 /* Return a deep copy of statement STMT.  All the operands from STMT
1732    are reallocated and copied using unshare_expr.  The DEF, USE, VDEF
1733    and VUSE operand arrays are set to empty in the new copy.  The new
1734    copy isn't part of any sequence.  */
1735 
1736 gimple *
gimple_copy(gimple * stmt)1737 gimple_copy (gimple *stmt)
1738 {
1739   enum gimple_code code = gimple_code (stmt);
1740   unsigned num_ops = gimple_num_ops (stmt);
1741   gimple *copy = gimple_alloc (code, num_ops);
1742   unsigned i;
1743 
1744   /* Shallow copy all the fields from STMT.  */
1745   memcpy (copy, stmt, gimple_size (code));
1746   gimple_init_singleton (copy);
1747 
1748   /* If STMT has sub-statements, deep-copy them as well.  */
1749   if (gimple_has_substatements (stmt))
1750     {
1751       gimple_seq new_seq;
1752       tree t;
1753 
1754       switch (gimple_code (stmt))
1755 	{
1756 	case GIMPLE_BIND:
1757 	  {
1758 	    gbind *bind_stmt = as_a <gbind *> (stmt);
1759 	    gbind *bind_copy = as_a <gbind *> (copy);
1760 	    new_seq = gimple_seq_copy (gimple_bind_body (bind_stmt));
1761 	    gimple_bind_set_body (bind_copy, new_seq);
1762 	    gimple_bind_set_vars (bind_copy,
1763 				  unshare_expr (gimple_bind_vars (bind_stmt)));
1764 	    gimple_bind_set_block (bind_copy, gimple_bind_block (bind_stmt));
1765 	  }
1766 	  break;
1767 
1768 	case GIMPLE_CATCH:
1769 	  {
1770 	    gcatch *catch_stmt = as_a <gcatch *> (stmt);
1771 	    gcatch *catch_copy = as_a <gcatch *> (copy);
1772 	    new_seq = gimple_seq_copy (gimple_catch_handler (catch_stmt));
1773 	    gimple_catch_set_handler (catch_copy, new_seq);
1774 	    t = unshare_expr (gimple_catch_types (catch_stmt));
1775 	    gimple_catch_set_types (catch_copy, t);
1776 	  }
1777 	  break;
1778 
1779 	case GIMPLE_EH_FILTER:
1780 	  {
1781 	    geh_filter *eh_filter_stmt = as_a <geh_filter *> (stmt);
1782 	    geh_filter *eh_filter_copy = as_a <geh_filter *> (copy);
1783 	    new_seq
1784 	      = gimple_seq_copy (gimple_eh_filter_failure (eh_filter_stmt));
1785 	    gimple_eh_filter_set_failure (eh_filter_copy, new_seq);
1786 	    t = unshare_expr (gimple_eh_filter_types (eh_filter_stmt));
1787 	    gimple_eh_filter_set_types (eh_filter_copy, t);
1788 	  }
1789 	  break;
1790 
1791 	case GIMPLE_EH_ELSE:
1792 	  {
1793 	    geh_else *eh_else_stmt = as_a <geh_else *> (stmt);
1794 	    geh_else *eh_else_copy = as_a <geh_else *> (copy);
1795 	    new_seq = gimple_seq_copy (gimple_eh_else_n_body (eh_else_stmt));
1796 	    gimple_eh_else_set_n_body (eh_else_copy, new_seq);
1797 	    new_seq = gimple_seq_copy (gimple_eh_else_e_body (eh_else_stmt));
1798 	    gimple_eh_else_set_e_body (eh_else_copy, new_seq);
1799 	  }
1800 	  break;
1801 
1802 	case GIMPLE_TRY:
1803 	  {
1804 	    gtry *try_stmt = as_a <gtry *> (stmt);
1805 	    gtry *try_copy = as_a <gtry *> (copy);
1806 	    new_seq = gimple_seq_copy (gimple_try_eval (try_stmt));
1807 	    gimple_try_set_eval (try_copy, new_seq);
1808 	    new_seq = gimple_seq_copy (gimple_try_cleanup (try_stmt));
1809 	    gimple_try_set_cleanup (try_copy, new_seq);
1810 	  }
1811 	  break;
1812 
1813 	case GIMPLE_OMP_FOR:
1814 	  new_seq = gimple_seq_copy (gimple_omp_for_pre_body (stmt));
1815 	  gimple_omp_for_set_pre_body (copy, new_seq);
1816 	  t = unshare_expr (gimple_omp_for_clauses (stmt));
1817 	  gimple_omp_for_set_clauses (copy, t);
1818 	  {
1819 	    gomp_for *omp_for_copy = as_a <gomp_for *> (copy);
1820 	    omp_for_copy->iter = ggc_vec_alloc<gimple_omp_for_iter>
1821 	      ( gimple_omp_for_collapse (stmt));
1822           }
1823 	  for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1824 	    {
1825 	      gimple_omp_for_set_cond (copy, i,
1826 				       gimple_omp_for_cond (stmt, i));
1827 	      gimple_omp_for_set_index (copy, i,
1828 					gimple_omp_for_index (stmt, i));
1829 	      t = unshare_expr (gimple_omp_for_initial (stmt, i));
1830 	      gimple_omp_for_set_initial (copy, i, t);
1831 	      t = unshare_expr (gimple_omp_for_final (stmt, i));
1832 	      gimple_omp_for_set_final (copy, i, t);
1833 	      t = unshare_expr (gimple_omp_for_incr (stmt, i));
1834 	      gimple_omp_for_set_incr (copy, i, t);
1835 	    }
1836 	  goto copy_omp_body;
1837 
1838 	case GIMPLE_OMP_PARALLEL:
1839 	  {
1840 	    gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
1841 	    gomp_parallel *omp_par_copy = as_a <gomp_parallel *> (copy);
1842 	    t = unshare_expr (gimple_omp_parallel_clauses (omp_par_stmt));
1843 	    gimple_omp_parallel_set_clauses (omp_par_copy, t);
1844 	    t = unshare_expr (gimple_omp_parallel_child_fn (omp_par_stmt));
1845 	    gimple_omp_parallel_set_child_fn (omp_par_copy, t);
1846 	    t = unshare_expr (gimple_omp_parallel_data_arg (omp_par_stmt));
1847 	    gimple_omp_parallel_set_data_arg (omp_par_copy, t);
1848 	  }
1849 	  goto copy_omp_body;
1850 
1851 	case GIMPLE_OMP_TASK:
1852 	  t = unshare_expr (gimple_omp_task_clauses (stmt));
1853 	  gimple_omp_task_set_clauses (copy, t);
1854 	  t = unshare_expr (gimple_omp_task_child_fn (stmt));
1855 	  gimple_omp_task_set_child_fn (copy, t);
1856 	  t = unshare_expr (gimple_omp_task_data_arg (stmt));
1857 	  gimple_omp_task_set_data_arg (copy, t);
1858 	  t = unshare_expr (gimple_omp_task_copy_fn (stmt));
1859 	  gimple_omp_task_set_copy_fn (copy, t);
1860 	  t = unshare_expr (gimple_omp_task_arg_size (stmt));
1861 	  gimple_omp_task_set_arg_size (copy, t);
1862 	  t = unshare_expr (gimple_omp_task_arg_align (stmt));
1863 	  gimple_omp_task_set_arg_align (copy, t);
1864 	  goto copy_omp_body;
1865 
1866 	case GIMPLE_OMP_CRITICAL:
1867 	  t = unshare_expr (gimple_omp_critical_name
1868 				(as_a <gomp_critical *> (stmt)));
1869 	  gimple_omp_critical_set_name (as_a <gomp_critical *> (copy), t);
1870 	  t = unshare_expr (gimple_omp_critical_clauses
1871 				(as_a <gomp_critical *> (stmt)));
1872 	  gimple_omp_critical_set_clauses (as_a <gomp_critical *> (copy), t);
1873 	  goto copy_omp_body;
1874 
1875 	case GIMPLE_OMP_ORDERED:
1876 	  t = unshare_expr (gimple_omp_ordered_clauses
1877 				(as_a <gomp_ordered *> (stmt)));
1878 	  gimple_omp_ordered_set_clauses (as_a <gomp_ordered *> (copy), t);
1879 	  goto copy_omp_body;
1880 
1881 	case GIMPLE_OMP_SECTIONS:
1882 	  t = unshare_expr (gimple_omp_sections_clauses (stmt));
1883 	  gimple_omp_sections_set_clauses (copy, t);
1884 	  t = unshare_expr (gimple_omp_sections_control (stmt));
1885 	  gimple_omp_sections_set_control (copy, t);
1886 	  goto copy_omp_body;
1887 
1888 	case GIMPLE_OMP_SINGLE:
1889 	  {
1890 	    gomp_single *omp_single_copy = as_a <gomp_single *> (copy);
1891 	    t = unshare_expr (gimple_omp_single_clauses (stmt));
1892 	    gimple_omp_single_set_clauses (omp_single_copy, t);
1893 	  }
1894 	  goto copy_omp_body;
1895 
1896 	case GIMPLE_OMP_TARGET:
1897 	  {
1898 	    gomp_target *omp_target_stmt = as_a <gomp_target *> (stmt);
1899 	    gomp_target *omp_target_copy = as_a <gomp_target *> (copy);
1900 	    t = unshare_expr (gimple_omp_target_clauses (omp_target_stmt));
1901 	    gimple_omp_target_set_clauses (omp_target_copy, t);
1902 	    t = unshare_expr (gimple_omp_target_data_arg (omp_target_stmt));
1903 	    gimple_omp_target_set_data_arg (omp_target_copy, t);
1904 	  }
1905 	  goto copy_omp_body;
1906 
1907 	case GIMPLE_OMP_TEAMS:
1908 	  {
1909 	    gomp_teams *omp_teams_copy = as_a <gomp_teams *> (copy);
1910 	    t = unshare_expr (gimple_omp_teams_clauses (stmt));
1911 	    gimple_omp_teams_set_clauses (omp_teams_copy, t);
1912 	  }
1913 	  /* FALLTHRU  */
1914 
1915 	case GIMPLE_OMP_SECTION:
1916 	case GIMPLE_OMP_MASTER:
1917 	case GIMPLE_OMP_TASKGROUP:
1918 	case GIMPLE_OMP_GRID_BODY:
1919 	copy_omp_body:
1920 	  new_seq = gimple_seq_copy (gimple_omp_body (stmt));
1921 	  gimple_omp_set_body (copy, new_seq);
1922 	  break;
1923 
1924 	case GIMPLE_TRANSACTION:
1925 	  new_seq = gimple_seq_copy (gimple_transaction_body (
1926 				       as_a <gtransaction *> (stmt)));
1927 	  gimple_transaction_set_body (as_a <gtransaction *> (copy),
1928 				       new_seq);
1929 	  break;
1930 
1931 	case GIMPLE_WITH_CLEANUP_EXPR:
1932 	  new_seq = gimple_seq_copy (gimple_wce_cleanup (stmt));
1933 	  gimple_wce_set_cleanup (copy, new_seq);
1934 	  break;
1935 
1936 	default:
1937 	  gcc_unreachable ();
1938 	}
1939     }
1940 
1941   /* Make copy of operands.  */
1942   for (i = 0; i < num_ops; i++)
1943     gimple_set_op (copy, i, unshare_expr (gimple_op (stmt, i)));
1944 
1945   if (gimple_has_mem_ops (stmt))
1946     {
1947       gimple_set_vdef (copy, gimple_vdef (stmt));
1948       gimple_set_vuse (copy, gimple_vuse (stmt));
1949     }
1950 
1951   /* Clear out SSA operand vectors on COPY.  */
1952   if (gimple_has_ops (stmt))
1953     {
1954       gimple_set_use_ops (copy, NULL);
1955 
1956       /* SSA operands need to be updated.  */
1957       gimple_set_modified (copy, true);
1958     }
1959 
1960   if (gimple_debug_nonbind_marker_p (stmt))
1961     cfun->debug_marker_count++;
1962 
1963   return copy;
1964 }
1965 
1966 
1967 /* Return true if statement S has side-effects.  We consider a
1968    statement to have side effects if:
1969 
1970    - It is a GIMPLE_CALL not marked with ECF_PURE or ECF_CONST.
1971    - Any of its operands are marked TREE_THIS_VOLATILE or TREE_SIDE_EFFECTS.  */
1972 
1973 bool
gimple_has_side_effects(const gimple * s)1974 gimple_has_side_effects (const gimple *s)
1975 {
1976   if (is_gimple_debug (s))
1977     return false;
1978 
1979   /* We don't have to scan the arguments to check for
1980      volatile arguments, though, at present, we still
1981      do a scan to check for TREE_SIDE_EFFECTS.  */
1982   if (gimple_has_volatile_ops (s))
1983     return true;
1984 
1985   if (gimple_code (s) == GIMPLE_ASM
1986       && gimple_asm_volatile_p (as_a <const gasm *> (s)))
1987     return true;
1988 
1989   if (is_gimple_call (s))
1990     {
1991       int flags = gimple_call_flags (s);
1992 
1993       /* An infinite loop is considered a side effect.  */
1994       if (!(flags & (ECF_CONST | ECF_PURE))
1995 	  || (flags & ECF_LOOPING_CONST_OR_PURE))
1996 	return true;
1997 
1998       return false;
1999     }
2000 
2001   return false;
2002 }
2003 
2004 /* Helper for gimple_could_trap_p and gimple_assign_rhs_could_trap_p.
2005    Return true if S can trap.  When INCLUDE_MEM is true, check whether
2006    the memory operations could trap.  When INCLUDE_STORES is true and
2007    S is a GIMPLE_ASSIGN, the LHS of the assignment is also checked.  */
2008 
2009 bool
gimple_could_trap_p_1(gimple * s,bool include_mem,bool include_stores)2010 gimple_could_trap_p_1 (gimple *s, bool include_mem, bool include_stores)
2011 {
2012   tree t, div = NULL_TREE;
2013   enum tree_code op;
2014 
2015   if (include_mem)
2016     {
2017       unsigned i, start = (is_gimple_assign (s) && !include_stores) ? 1 : 0;
2018 
2019       for (i = start; i < gimple_num_ops (s); i++)
2020 	if (tree_could_trap_p (gimple_op (s, i)))
2021 	  return true;
2022     }
2023 
2024   switch (gimple_code (s))
2025     {
2026     case GIMPLE_ASM:
2027       return gimple_asm_volatile_p (as_a <gasm *> (s));
2028 
2029     case GIMPLE_CALL:
2030       t = gimple_call_fndecl (s);
2031       /* Assume that calls to weak functions may trap.  */
2032       if (!t || !DECL_P (t) || DECL_WEAK (t))
2033 	return true;
2034       return false;
2035 
2036     case GIMPLE_ASSIGN:
2037       t = gimple_expr_type (s);
2038       op = gimple_assign_rhs_code (s);
2039       if (get_gimple_rhs_class (op) == GIMPLE_BINARY_RHS)
2040 	div = gimple_assign_rhs2 (s);
2041       return (operation_could_trap_p (op, FLOAT_TYPE_P (t),
2042 				      (INTEGRAL_TYPE_P (t)
2043 				       && TYPE_OVERFLOW_TRAPS (t)),
2044 				      div));
2045 
2046     case GIMPLE_COND:
2047       t = TREE_TYPE (gimple_cond_lhs (s));
2048       return operation_could_trap_p (gimple_cond_code (s),
2049 				     FLOAT_TYPE_P (t), false, NULL_TREE);
2050 
2051     default:
2052       break;
2053     }
2054 
2055   return false;
2056 }
2057 
2058 /* Return true if statement S can trap.  */
2059 
2060 bool
gimple_could_trap_p(gimple * s)2061 gimple_could_trap_p (gimple *s)
2062 {
2063   return gimple_could_trap_p_1 (s, true, true);
2064 }
2065 
2066 /* Return true if RHS of a GIMPLE_ASSIGN S can trap.  */
2067 
2068 bool
gimple_assign_rhs_could_trap_p(gimple * s)2069 gimple_assign_rhs_could_trap_p (gimple *s)
2070 {
2071   gcc_assert (is_gimple_assign (s));
2072   return gimple_could_trap_p_1 (s, true, false);
2073 }
2074 
2075 
2076 /* Print debugging information for gimple stmts generated.  */
2077 
2078 void
dump_gimple_statistics(void)2079 dump_gimple_statistics (void)
2080 {
2081   int i;
2082   uint64_t total_tuples = 0, total_bytes = 0;
2083 
2084   if (! GATHER_STATISTICS)
2085     {
2086       fprintf (stderr, "No GIMPLE statistics\n");
2087       return;
2088     }
2089 
2090   fprintf (stderr, "\nGIMPLE statements\n");
2091   fprintf (stderr, "Kind                   Stmts      Bytes\n");
2092   fprintf (stderr, "---------------------------------------\n");
2093   for (i = 0; i < (int) gimple_alloc_kind_all; ++i)
2094     {
2095       fprintf (stderr, "%-20s %7" PRIu64 " %10" PRIu64 "\n",
2096 	       gimple_alloc_kind_names[i], gimple_alloc_counts[i],
2097 	       gimple_alloc_sizes[i]);
2098       total_tuples += gimple_alloc_counts[i];
2099       total_bytes += gimple_alloc_sizes[i];
2100     }
2101   fprintf (stderr, "---------------------------------------\n");
2102   fprintf (stderr, "%-20s %7" PRIu64 " %10" PRIu64 "\n", "Total",
2103 	   total_tuples, total_bytes);
2104   fprintf (stderr, "---------------------------------------\n");
2105 }
2106 
2107 
2108 /* Return the number of operands needed on the RHS of a GIMPLE
2109    assignment for an expression with tree code CODE.  */
2110 
2111 unsigned
get_gimple_rhs_num_ops(enum tree_code code)2112 get_gimple_rhs_num_ops (enum tree_code code)
2113 {
2114   enum gimple_rhs_class rhs_class = get_gimple_rhs_class (code);
2115 
2116   if (rhs_class == GIMPLE_UNARY_RHS || rhs_class == GIMPLE_SINGLE_RHS)
2117     return 1;
2118   else if (rhs_class == GIMPLE_BINARY_RHS)
2119     return 2;
2120   else if (rhs_class == GIMPLE_TERNARY_RHS)
2121     return 3;
2122   else
2123     gcc_unreachable ();
2124 }
2125 
2126 #define DEFTREECODE(SYM, STRING, TYPE, NARGS)   			    \
2127   (unsigned char)							    \
2128   ((TYPE) == tcc_unary ? GIMPLE_UNARY_RHS				    \
2129    : ((TYPE) == tcc_binary						    \
2130       || (TYPE) == tcc_comparison) ? GIMPLE_BINARY_RHS   		    \
2131    : ((TYPE) == tcc_constant						    \
2132       || (TYPE) == tcc_declaration					    \
2133       || (TYPE) == tcc_reference) ? GIMPLE_SINGLE_RHS			    \
2134    : ((SYM) == TRUTH_AND_EXPR						    \
2135       || (SYM) == TRUTH_OR_EXPR						    \
2136       || (SYM) == TRUTH_XOR_EXPR) ? GIMPLE_BINARY_RHS			    \
2137    : (SYM) == TRUTH_NOT_EXPR ? GIMPLE_UNARY_RHS				    \
2138    : ((SYM) == COND_EXPR						    \
2139       || (SYM) == WIDEN_MULT_PLUS_EXPR					    \
2140       || (SYM) == WIDEN_MULT_MINUS_EXPR					    \
2141       || (SYM) == DOT_PROD_EXPR						    \
2142       || (SYM) == SAD_EXPR						    \
2143       || (SYM) == REALIGN_LOAD_EXPR					    \
2144       || (SYM) == VEC_COND_EXPR						    \
2145       || (SYM) == VEC_PERM_EXPR                                             \
2146       || (SYM) == BIT_INSERT_EXPR					    \
2147       || (SYM) == FMA_EXPR) ? GIMPLE_TERNARY_RHS			    \
2148    : ((SYM) == CONSTRUCTOR						    \
2149       || (SYM) == OBJ_TYPE_REF						    \
2150       || (SYM) == ASSERT_EXPR						    \
2151       || (SYM) == ADDR_EXPR						    \
2152       || (SYM) == WITH_SIZE_EXPR					    \
2153       || (SYM) == SSA_NAME) ? GIMPLE_SINGLE_RHS				    \
2154    : GIMPLE_INVALID_RHS),
2155 #define END_OF_BASE_TREE_CODES (unsigned char) GIMPLE_INVALID_RHS,
2156 
2157 const unsigned char gimple_rhs_class_table[] = {
2158 #include "all-tree.def"
2159 };
2160 
2161 #undef DEFTREECODE
2162 #undef END_OF_BASE_TREE_CODES
2163 
2164 /* Canonicalize a tree T for use in a COND_EXPR as conditional.  Returns
2165    a canonicalized tree that is valid for a COND_EXPR or NULL_TREE, if
2166    we failed to create one.  */
2167 
2168 tree
canonicalize_cond_expr_cond(tree t)2169 canonicalize_cond_expr_cond (tree t)
2170 {
2171   /* Strip conversions around boolean operations.  */
2172   if (CONVERT_EXPR_P (t)
2173       && (truth_value_p (TREE_CODE (TREE_OPERAND (t, 0)))
2174           || TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 0)))
2175 	     == BOOLEAN_TYPE))
2176     t = TREE_OPERAND (t, 0);
2177 
2178   /* For !x use x == 0.  */
2179   if (TREE_CODE (t) == TRUTH_NOT_EXPR)
2180     {
2181       tree top0 = TREE_OPERAND (t, 0);
2182       t = build2 (EQ_EXPR, TREE_TYPE (t),
2183 		  top0, build_int_cst (TREE_TYPE (top0), 0));
2184     }
2185   /* For cmp ? 1 : 0 use cmp.  */
2186   else if (TREE_CODE (t) == COND_EXPR
2187 	   && COMPARISON_CLASS_P (TREE_OPERAND (t, 0))
2188 	   && integer_onep (TREE_OPERAND (t, 1))
2189 	   && integer_zerop (TREE_OPERAND (t, 2)))
2190     {
2191       tree top0 = TREE_OPERAND (t, 0);
2192       t = build2 (TREE_CODE (top0), TREE_TYPE (t),
2193 		  TREE_OPERAND (top0, 0), TREE_OPERAND (top0, 1));
2194     }
2195   /* For x ^ y use x != y.  */
2196   else if (TREE_CODE (t) == BIT_XOR_EXPR)
2197     t = build2 (NE_EXPR, TREE_TYPE (t),
2198 		TREE_OPERAND (t, 0), TREE_OPERAND (t, 1));
2199 
2200   if (is_gimple_condexpr (t))
2201     return t;
2202 
2203   return NULL_TREE;
2204 }
2205 
2206 /* Build a GIMPLE_CALL identical to STMT but skipping the arguments in
2207    the positions marked by the set ARGS_TO_SKIP.  */
2208 
2209 gcall *
gimple_call_copy_skip_args(gcall * stmt,bitmap args_to_skip)2210 gimple_call_copy_skip_args (gcall *stmt, bitmap args_to_skip)
2211 {
2212   int i;
2213   int nargs = gimple_call_num_args (stmt);
2214   auto_vec<tree> vargs (nargs);
2215   gcall *new_stmt;
2216 
2217   for (i = 0; i < nargs; i++)
2218     if (!bitmap_bit_p (args_to_skip, i))
2219       vargs.quick_push (gimple_call_arg (stmt, i));
2220 
2221   if (gimple_call_internal_p (stmt))
2222     new_stmt = gimple_build_call_internal_vec (gimple_call_internal_fn (stmt),
2223 					       vargs);
2224   else
2225     new_stmt = gimple_build_call_vec (gimple_call_fn (stmt), vargs);
2226 
2227   if (gimple_call_lhs (stmt))
2228     gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
2229 
2230   gimple_set_vuse (new_stmt, gimple_vuse (stmt));
2231   gimple_set_vdef (new_stmt, gimple_vdef (stmt));
2232 
2233   if (gimple_has_location (stmt))
2234     gimple_set_location (new_stmt, gimple_location (stmt));
2235   gimple_call_copy_flags (new_stmt, stmt);
2236   gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
2237 
2238   gimple_set_modified (new_stmt, true);
2239 
2240   return new_stmt;
2241 }
2242 
2243 
2244 
2245 /* Return true if the field decls F1 and F2 are at the same offset.
2246 
2247    This is intended to be used on GIMPLE types only.  */
2248 
2249 bool
gimple_compare_field_offset(tree f1,tree f2)2250 gimple_compare_field_offset (tree f1, tree f2)
2251 {
2252   if (DECL_OFFSET_ALIGN (f1) == DECL_OFFSET_ALIGN (f2))
2253     {
2254       tree offset1 = DECL_FIELD_OFFSET (f1);
2255       tree offset2 = DECL_FIELD_OFFSET (f2);
2256       return ((offset1 == offset2
2257 	       /* Once gimplification is done, self-referential offsets are
2258 		  instantiated as operand #2 of the COMPONENT_REF built for
2259 		  each access and reset.  Therefore, they are not relevant
2260 		  anymore and fields are interchangeable provided that they
2261 		  represent the same access.  */
2262 	       || (TREE_CODE (offset1) == PLACEHOLDER_EXPR
2263 		   && TREE_CODE (offset2) == PLACEHOLDER_EXPR
2264 		   && (DECL_SIZE (f1) == DECL_SIZE (f2)
2265 		       || (TREE_CODE (DECL_SIZE (f1)) == PLACEHOLDER_EXPR
2266 			   && TREE_CODE (DECL_SIZE (f2)) == PLACEHOLDER_EXPR)
2267 		       || operand_equal_p (DECL_SIZE (f1), DECL_SIZE (f2), 0))
2268 		   && DECL_ALIGN (f1) == DECL_ALIGN (f2))
2269 	       || operand_equal_p (offset1, offset2, 0))
2270 	      && tree_int_cst_equal (DECL_FIELD_BIT_OFFSET (f1),
2271 				     DECL_FIELD_BIT_OFFSET (f2)));
2272     }
2273 
2274   /* Fortran and C do not always agree on what DECL_OFFSET_ALIGN
2275      should be, so handle differing ones specially by decomposing
2276      the offset into a byte and bit offset manually.  */
2277   if (tree_fits_shwi_p (DECL_FIELD_OFFSET (f1))
2278       && tree_fits_shwi_p (DECL_FIELD_OFFSET (f2)))
2279     {
2280       unsigned HOST_WIDE_INT byte_offset1, byte_offset2;
2281       unsigned HOST_WIDE_INT bit_offset1, bit_offset2;
2282       bit_offset1 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f1));
2283       byte_offset1 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f1))
2284 		      + bit_offset1 / BITS_PER_UNIT);
2285       bit_offset2 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f2));
2286       byte_offset2 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f2))
2287 		      + bit_offset2 / BITS_PER_UNIT);
2288       if (byte_offset1 != byte_offset2)
2289 	return false;
2290       return bit_offset1 % BITS_PER_UNIT == bit_offset2 % BITS_PER_UNIT;
2291     }
2292 
2293   return false;
2294 }
2295 
2296 
2297 /* Return a type the same as TYPE except unsigned or
2298    signed according to UNSIGNEDP.  */
2299 
2300 static tree
gimple_signed_or_unsigned_type(bool unsignedp,tree type)2301 gimple_signed_or_unsigned_type (bool unsignedp, tree type)
2302 {
2303   tree type1;
2304   int i;
2305 
2306   type1 = TYPE_MAIN_VARIANT (type);
2307   if (type1 == signed_char_type_node
2308       || type1 == char_type_node
2309       || type1 == unsigned_char_type_node)
2310     return unsignedp ? unsigned_char_type_node : signed_char_type_node;
2311   if (type1 == integer_type_node || type1 == unsigned_type_node)
2312     return unsignedp ? unsigned_type_node : integer_type_node;
2313   if (type1 == short_integer_type_node || type1 == short_unsigned_type_node)
2314     return unsignedp ? short_unsigned_type_node : short_integer_type_node;
2315   if (type1 == long_integer_type_node || type1 == long_unsigned_type_node)
2316     return unsignedp ? long_unsigned_type_node : long_integer_type_node;
2317   if (type1 == long_long_integer_type_node
2318       || type1 == long_long_unsigned_type_node)
2319     return unsignedp
2320            ? long_long_unsigned_type_node
2321 	   : long_long_integer_type_node;
2322 
2323   for (i = 0; i < NUM_INT_N_ENTS; i ++)
2324     if (int_n_enabled_p[i]
2325 	&& (type1 == int_n_trees[i].unsigned_type
2326 	    || type1 == int_n_trees[i].signed_type))
2327 	return unsignedp
2328 	  ? int_n_trees[i].unsigned_type
2329 	  : int_n_trees[i].signed_type;
2330 
2331 #if HOST_BITS_PER_WIDE_INT >= 64
2332   if (type1 == intTI_type_node || type1 == unsigned_intTI_type_node)
2333     return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
2334 #endif
2335   if (type1 == intDI_type_node || type1 == unsigned_intDI_type_node)
2336     return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
2337   if (type1 == intSI_type_node || type1 == unsigned_intSI_type_node)
2338     return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
2339   if (type1 == intHI_type_node || type1 == unsigned_intHI_type_node)
2340     return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
2341   if (type1 == intQI_type_node || type1 == unsigned_intQI_type_node)
2342     return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
2343 
2344 #define GIMPLE_FIXED_TYPES(NAME)	    \
2345   if (type1 == short_ ## NAME ## _type_node \
2346       || type1 == unsigned_short_ ## NAME ## _type_node) \
2347     return unsignedp ? unsigned_short_ ## NAME ## _type_node \
2348 		     : short_ ## NAME ## _type_node; \
2349   if (type1 == NAME ## _type_node \
2350       || type1 == unsigned_ ## NAME ## _type_node) \
2351     return unsignedp ? unsigned_ ## NAME ## _type_node \
2352 		     : NAME ## _type_node; \
2353   if (type1 == long_ ## NAME ## _type_node \
2354       || type1 == unsigned_long_ ## NAME ## _type_node) \
2355     return unsignedp ? unsigned_long_ ## NAME ## _type_node \
2356 		     : long_ ## NAME ## _type_node; \
2357   if (type1 == long_long_ ## NAME ## _type_node \
2358       || type1 == unsigned_long_long_ ## NAME ## _type_node) \
2359     return unsignedp ? unsigned_long_long_ ## NAME ## _type_node \
2360 		     : long_long_ ## NAME ## _type_node;
2361 
2362 #define GIMPLE_FIXED_MODE_TYPES(NAME) \
2363   if (type1 == NAME ## _type_node \
2364       || type1 == u ## NAME ## _type_node) \
2365     return unsignedp ? u ## NAME ## _type_node \
2366 		     : NAME ## _type_node;
2367 
2368 #define GIMPLE_FIXED_TYPES_SAT(NAME) \
2369   if (type1 == sat_ ## short_ ## NAME ## _type_node \
2370       || type1 == sat_ ## unsigned_short_ ## NAME ## _type_node) \
2371     return unsignedp ? sat_ ## unsigned_short_ ## NAME ## _type_node \
2372 		     : sat_ ## short_ ## NAME ## _type_node; \
2373   if (type1 == sat_ ## NAME ## _type_node \
2374       || type1 == sat_ ## unsigned_ ## NAME ## _type_node) \
2375     return unsignedp ? sat_ ## unsigned_ ## NAME ## _type_node \
2376 		     : sat_ ## NAME ## _type_node; \
2377   if (type1 == sat_ ## long_ ## NAME ## _type_node \
2378       || type1 == sat_ ## unsigned_long_ ## NAME ## _type_node) \
2379     return unsignedp ? sat_ ## unsigned_long_ ## NAME ## _type_node \
2380 		     : sat_ ## long_ ## NAME ## _type_node; \
2381   if (type1 == sat_ ## long_long_ ## NAME ## _type_node \
2382       || type1 == sat_ ## unsigned_long_long_ ## NAME ## _type_node) \
2383     return unsignedp ? sat_ ## unsigned_long_long_ ## NAME ## _type_node \
2384 		     : sat_ ## long_long_ ## NAME ## _type_node;
2385 
2386 #define GIMPLE_FIXED_MODE_TYPES_SAT(NAME)	\
2387   if (type1 == sat_ ## NAME ## _type_node \
2388       || type1 == sat_ ## u ## NAME ## _type_node) \
2389     return unsignedp ? sat_ ## u ## NAME ## _type_node \
2390 		     : sat_ ## NAME ## _type_node;
2391 
2392   GIMPLE_FIXED_TYPES (fract);
2393   GIMPLE_FIXED_TYPES_SAT (fract);
2394   GIMPLE_FIXED_TYPES (accum);
2395   GIMPLE_FIXED_TYPES_SAT (accum);
2396 
2397   GIMPLE_FIXED_MODE_TYPES (qq);
2398   GIMPLE_FIXED_MODE_TYPES (hq);
2399   GIMPLE_FIXED_MODE_TYPES (sq);
2400   GIMPLE_FIXED_MODE_TYPES (dq);
2401   GIMPLE_FIXED_MODE_TYPES (tq);
2402   GIMPLE_FIXED_MODE_TYPES_SAT (qq);
2403   GIMPLE_FIXED_MODE_TYPES_SAT (hq);
2404   GIMPLE_FIXED_MODE_TYPES_SAT (sq);
2405   GIMPLE_FIXED_MODE_TYPES_SAT (dq);
2406   GIMPLE_FIXED_MODE_TYPES_SAT (tq);
2407   GIMPLE_FIXED_MODE_TYPES (ha);
2408   GIMPLE_FIXED_MODE_TYPES (sa);
2409   GIMPLE_FIXED_MODE_TYPES (da);
2410   GIMPLE_FIXED_MODE_TYPES (ta);
2411   GIMPLE_FIXED_MODE_TYPES_SAT (ha);
2412   GIMPLE_FIXED_MODE_TYPES_SAT (sa);
2413   GIMPLE_FIXED_MODE_TYPES_SAT (da);
2414   GIMPLE_FIXED_MODE_TYPES_SAT (ta);
2415 
2416   /* For ENUMERAL_TYPEs in C++, must check the mode of the types, not
2417      the precision; they have precision set to match their range, but
2418      may use a wider mode to match an ABI.  If we change modes, we may
2419      wind up with bad conversions.  For INTEGER_TYPEs in C, must check
2420      the precision as well, so as to yield correct results for
2421      bit-field types.  C++ does not have these separate bit-field
2422      types, and producing a signed or unsigned variant of an
2423      ENUMERAL_TYPE may cause other problems as well.  */
2424   if (!INTEGRAL_TYPE_P (type)
2425       || TYPE_UNSIGNED (type) == unsignedp)
2426     return type;
2427 
2428 #define TYPE_OK(node)							    \
2429   (TYPE_MODE (type) == TYPE_MODE (node)					    \
2430    && TYPE_PRECISION (type) == TYPE_PRECISION (node))
2431   if (TYPE_OK (signed_char_type_node))
2432     return unsignedp ? unsigned_char_type_node : signed_char_type_node;
2433   if (TYPE_OK (integer_type_node))
2434     return unsignedp ? unsigned_type_node : integer_type_node;
2435   if (TYPE_OK (short_integer_type_node))
2436     return unsignedp ? short_unsigned_type_node : short_integer_type_node;
2437   if (TYPE_OK (long_integer_type_node))
2438     return unsignedp ? long_unsigned_type_node : long_integer_type_node;
2439   if (TYPE_OK (long_long_integer_type_node))
2440     return (unsignedp
2441 	    ? long_long_unsigned_type_node
2442 	    : long_long_integer_type_node);
2443 
2444   for (i = 0; i < NUM_INT_N_ENTS; i ++)
2445     if (int_n_enabled_p[i]
2446 	&& TYPE_MODE (type) == int_n_data[i].m
2447 	&& TYPE_PRECISION (type) == int_n_data[i].bitsize)
2448 	return unsignedp
2449 	  ? int_n_trees[i].unsigned_type
2450 	  : int_n_trees[i].signed_type;
2451 
2452 #if HOST_BITS_PER_WIDE_INT >= 64
2453   if (TYPE_OK (intTI_type_node))
2454     return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
2455 #endif
2456   if (TYPE_OK (intDI_type_node))
2457     return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
2458   if (TYPE_OK (intSI_type_node))
2459     return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
2460   if (TYPE_OK (intHI_type_node))
2461     return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
2462   if (TYPE_OK (intQI_type_node))
2463     return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
2464 
2465 #undef GIMPLE_FIXED_TYPES
2466 #undef GIMPLE_FIXED_MODE_TYPES
2467 #undef GIMPLE_FIXED_TYPES_SAT
2468 #undef GIMPLE_FIXED_MODE_TYPES_SAT
2469 #undef TYPE_OK
2470 
2471   return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
2472 }
2473 
2474 
2475 /* Return an unsigned type the same as TYPE in other respects.  */
2476 
2477 tree
gimple_unsigned_type(tree type)2478 gimple_unsigned_type (tree type)
2479 {
2480   return gimple_signed_or_unsigned_type (true, type);
2481 }
2482 
2483 
2484 /* Return a signed type the same as TYPE in other respects.  */
2485 
2486 tree
gimple_signed_type(tree type)2487 gimple_signed_type (tree type)
2488 {
2489   return gimple_signed_or_unsigned_type (false, type);
2490 }
2491 
2492 
2493 /* Return the typed-based alias set for T, which may be an expression
2494    or a type.  Return -1 if we don't do anything special.  */
2495 
2496 alias_set_type
gimple_get_alias_set(tree t)2497 gimple_get_alias_set (tree t)
2498 {
2499   /* That's all the expressions we handle specially.  */
2500   if (!TYPE_P (t))
2501     return -1;
2502 
2503   /* For convenience, follow the C standard when dealing with
2504      character types.  Any object may be accessed via an lvalue that
2505      has character type.  */
2506   if (t == char_type_node
2507       || t == signed_char_type_node
2508       || t == unsigned_char_type_node)
2509     return 0;
2510 
2511   /* Allow aliasing between signed and unsigned variants of the same
2512      type.  We treat the signed variant as canonical.  */
2513   if (TREE_CODE (t) == INTEGER_TYPE && TYPE_UNSIGNED (t))
2514     {
2515       tree t1 = gimple_signed_type (t);
2516 
2517       /* t1 == t can happen for boolean nodes which are always unsigned.  */
2518       if (t1 != t)
2519 	return get_alias_set (t1);
2520     }
2521 
2522   return -1;
2523 }
2524 
2525 
2526 /* Helper for gimple_ior_addresses_taken_1.  */
2527 
2528 static bool
gimple_ior_addresses_taken_1(gimple *,tree addr,tree,void * data)2529 gimple_ior_addresses_taken_1 (gimple *, tree addr, tree, void *data)
2530 {
2531   bitmap addresses_taken = (bitmap)data;
2532   addr = get_base_address (addr);
2533   if (addr
2534       && DECL_P (addr))
2535     {
2536       bitmap_set_bit (addresses_taken, DECL_UID (addr));
2537       return true;
2538     }
2539   return false;
2540 }
2541 
2542 /* Set the bit for the uid of all decls that have their address taken
2543    in STMT in the ADDRESSES_TAKEN bitmap.  Returns true if there
2544    were any in this stmt.  */
2545 
2546 bool
gimple_ior_addresses_taken(bitmap addresses_taken,gimple * stmt)2547 gimple_ior_addresses_taken (bitmap addresses_taken, gimple *stmt)
2548 {
2549   return walk_stmt_load_store_addr_ops (stmt, addresses_taken, NULL, NULL,
2550 					gimple_ior_addresses_taken_1);
2551 }
2552 
2553 
2554 /* Return true when STMTs arguments and return value match those of FNDECL,
2555    a decl of a builtin function.  */
2556 
2557 bool
gimple_builtin_call_types_compatible_p(const gimple * stmt,tree fndecl)2558 gimple_builtin_call_types_compatible_p (const gimple *stmt, tree fndecl)
2559 {
2560   gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) != NOT_BUILT_IN);
2561 
2562   tree ret = gimple_call_lhs (stmt);
2563   if (ret
2564       && !useless_type_conversion_p (TREE_TYPE (ret),
2565 				     TREE_TYPE (TREE_TYPE (fndecl))))
2566     return false;
2567 
2568   tree targs = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
2569   unsigned nargs = gimple_call_num_args (stmt);
2570   for (unsigned i = 0; i < nargs; ++i)
2571     {
2572       /* Variadic args follow.  */
2573       if (!targs)
2574 	return true;
2575       tree arg = gimple_call_arg (stmt, i);
2576       tree type = TREE_VALUE (targs);
2577       if (!useless_type_conversion_p (type, TREE_TYPE (arg))
2578 	  /* char/short integral arguments are promoted to int
2579 	     by several frontends if targetm.calls.promote_prototypes
2580 	     is true.  Allow such promotion too.  */
2581 	  && !(INTEGRAL_TYPE_P (type)
2582 	       && TYPE_PRECISION (type) < TYPE_PRECISION (integer_type_node)
2583 	       && targetm.calls.promote_prototypes (TREE_TYPE (fndecl))
2584 	       && useless_type_conversion_p (integer_type_node,
2585 					     TREE_TYPE (arg))))
2586 	return false;
2587       targs = TREE_CHAIN (targs);
2588     }
2589   if (targs && !VOID_TYPE_P (TREE_VALUE (targs)))
2590     return false;
2591   return true;
2592 }
2593 
2594 /* Return true when STMT is builtins call.  */
2595 
2596 bool
gimple_call_builtin_p(const gimple * stmt)2597 gimple_call_builtin_p (const gimple *stmt)
2598 {
2599   tree fndecl;
2600   if (is_gimple_call (stmt)
2601       && (fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
2602       && DECL_BUILT_IN_CLASS (fndecl) != NOT_BUILT_IN)
2603     return gimple_builtin_call_types_compatible_p (stmt, fndecl);
2604   return false;
2605 }
2606 
2607 /* Return true when STMT is builtins call to CLASS.  */
2608 
2609 bool
gimple_call_builtin_p(const gimple * stmt,enum built_in_class klass)2610 gimple_call_builtin_p (const gimple *stmt, enum built_in_class klass)
2611 {
2612   tree fndecl;
2613   if (is_gimple_call (stmt)
2614       && (fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
2615       && DECL_BUILT_IN_CLASS (fndecl) == klass)
2616     return gimple_builtin_call_types_compatible_p (stmt, fndecl);
2617   return false;
2618 }
2619 
2620 /* Return true when STMT is builtins call to CODE of CLASS.  */
2621 
2622 bool
gimple_call_builtin_p(const gimple * stmt,enum built_in_function code)2623 gimple_call_builtin_p (const gimple *stmt, enum built_in_function code)
2624 {
2625   tree fndecl;
2626   if (is_gimple_call (stmt)
2627       && (fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
2628       && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2629       && DECL_FUNCTION_CODE (fndecl) == code)
2630     return gimple_builtin_call_types_compatible_p (stmt, fndecl);
2631   return false;
2632 }
2633 
2634 /* If CALL is a call to a combined_fn (i.e. an internal function or
2635    a normal built-in function), return its code, otherwise return
2636    CFN_LAST.  */
2637 
2638 combined_fn
gimple_call_combined_fn(const gimple * stmt)2639 gimple_call_combined_fn (const gimple *stmt)
2640 {
2641   if (const gcall *call = dyn_cast <const gcall *> (stmt))
2642     {
2643       if (gimple_call_internal_p (call))
2644 	return as_combined_fn (gimple_call_internal_fn (call));
2645 
2646       tree fndecl = gimple_call_fndecl (stmt);
2647       if (fndecl
2648 	  && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2649 	  && gimple_builtin_call_types_compatible_p (stmt, fndecl))
2650 	return as_combined_fn (DECL_FUNCTION_CODE (fndecl));
2651     }
2652   return CFN_LAST;
2653 }
2654 
2655 /* Return true if STMT clobbers memory.  STMT is required to be a
2656    GIMPLE_ASM.  */
2657 
2658 bool
gimple_asm_clobbers_memory_p(const gasm * stmt)2659 gimple_asm_clobbers_memory_p (const gasm *stmt)
2660 {
2661   unsigned i;
2662 
2663   for (i = 0; i < gimple_asm_nclobbers (stmt); i++)
2664     {
2665       tree op = gimple_asm_clobber_op (stmt, i);
2666       if (strcmp (TREE_STRING_POINTER (TREE_VALUE (op)), "memory") == 0)
2667 	return true;
2668     }
2669 
2670   /* Non-empty basic ASM implicitly clobbers memory.  */
2671   if (gimple_asm_input_p (stmt) && strlen (gimple_asm_string (stmt)) != 0)
2672     return true;
2673 
2674   return false;
2675 }
2676 
2677 /* Dump bitmap SET (assumed to contain VAR_DECLs) to FILE.  */
2678 
2679 void
dump_decl_set(FILE * file,bitmap set)2680 dump_decl_set (FILE *file, bitmap set)
2681 {
2682   if (set)
2683     {
2684       bitmap_iterator bi;
2685       unsigned i;
2686 
2687       fprintf (file, "{ ");
2688 
2689       EXECUTE_IF_SET_IN_BITMAP (set, 0, i, bi)
2690 	{
2691 	  fprintf (file, "D.%u", i);
2692 	  fprintf (file, " ");
2693 	}
2694 
2695       fprintf (file, "}");
2696     }
2697   else
2698     fprintf (file, "NIL");
2699 }
2700 
2701 /* Return true when CALL is a call stmt that definitely doesn't
2702    free any memory or makes it unavailable otherwise.  */
2703 bool
nonfreeing_call_p(gimple * call)2704 nonfreeing_call_p (gimple *call)
2705 {
2706   if (gimple_call_builtin_p (call, BUILT_IN_NORMAL)
2707       && gimple_call_flags (call) & ECF_LEAF)
2708     switch (DECL_FUNCTION_CODE (gimple_call_fndecl (call)))
2709       {
2710 	/* Just in case these become ECF_LEAF in the future.  */
2711 	case BUILT_IN_FREE:
2712 	case BUILT_IN_TM_FREE:
2713 	case BUILT_IN_REALLOC:
2714 	case BUILT_IN_STACK_RESTORE:
2715 	  return false;
2716 	default:
2717 	  return true;
2718       }
2719   else if (gimple_call_internal_p (call))
2720     switch (gimple_call_internal_fn (call))
2721       {
2722       case IFN_ABNORMAL_DISPATCHER:
2723         return true;
2724       case IFN_ASAN_MARK:
2725 	return tree_to_uhwi (gimple_call_arg (call, 0)) == ASAN_MARK_UNPOISON;
2726       default:
2727 	if (gimple_call_flags (call) & ECF_LEAF)
2728 	  return true;
2729 	return false;
2730       }
2731 
2732   tree fndecl = gimple_call_fndecl (call);
2733   if (!fndecl)
2734     return false;
2735   struct cgraph_node *n = cgraph_node::get (fndecl);
2736   if (!n)
2737     return false;
2738   enum availability availability;
2739   n = n->function_symbol (&availability);
2740   if (!n || availability <= AVAIL_INTERPOSABLE)
2741     return false;
2742   return n->nonfreeing_fn;
2743 }
2744 
2745 /* Return true when CALL is a call stmt that definitely need not
2746    be considered to be a memory barrier.  */
2747 bool
nonbarrier_call_p(gimple * call)2748 nonbarrier_call_p (gimple *call)
2749 {
2750   if (gimple_call_flags (call) & (ECF_PURE | ECF_CONST))
2751     return true;
2752   /* Should extend this to have a nonbarrier_fn flag, just as above in
2753      the nonfreeing case.  */
2754   return false;
2755 }
2756 
2757 /* Callback for walk_stmt_load_store_ops.
2758 
2759    Return TRUE if OP will dereference the tree stored in DATA, FALSE
2760    otherwise.
2761 
2762    This routine only makes a superficial check for a dereference.  Thus
2763    it must only be used if it is safe to return a false negative.  */
2764 static bool
check_loadstore(gimple *,tree op,tree,void * data)2765 check_loadstore (gimple *, tree op, tree, void *data)
2766 {
2767   if (TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
2768     {
2769       /* Some address spaces may legitimately dereference zero.  */
2770       addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (op));
2771       if (targetm.addr_space.zero_address_valid (as))
2772 	return false;
2773 
2774       return operand_equal_p (TREE_OPERAND (op, 0), (tree)data, 0);
2775     }
2776   return false;
2777 }
2778 
2779 
2780 /* Return true if OP can be inferred to be non-NULL after STMT executes,
2781    either by using a pointer dereference or attributes.  */
2782 bool
infer_nonnull_range(gimple * stmt,tree op)2783 infer_nonnull_range (gimple *stmt, tree op)
2784 {
2785   return infer_nonnull_range_by_dereference (stmt, op)
2786     || infer_nonnull_range_by_attribute (stmt, op);
2787 }
2788 
2789 /* Return true if OP can be inferred to be non-NULL after STMT
2790    executes by using a pointer dereference.  */
2791 bool
infer_nonnull_range_by_dereference(gimple * stmt,tree op)2792 infer_nonnull_range_by_dereference (gimple *stmt, tree op)
2793 {
2794   /* We can only assume that a pointer dereference will yield
2795      non-NULL if -fdelete-null-pointer-checks is enabled.  */
2796   if (!flag_delete_null_pointer_checks
2797       || !POINTER_TYPE_P (TREE_TYPE (op))
2798       || gimple_code (stmt) == GIMPLE_ASM)
2799     return false;
2800 
2801   if (walk_stmt_load_store_ops (stmt, (void *)op,
2802 				check_loadstore, check_loadstore))
2803     return true;
2804 
2805   return false;
2806 }
2807 
2808 /* Return true if OP can be inferred to be a non-NULL after STMT
2809    executes by using attributes.  */
2810 bool
infer_nonnull_range_by_attribute(gimple * stmt,tree op)2811 infer_nonnull_range_by_attribute (gimple *stmt, tree op)
2812 {
2813   /* We can only assume that a pointer dereference will yield
2814      non-NULL if -fdelete-null-pointer-checks is enabled.  */
2815   if (!flag_delete_null_pointer_checks
2816       || !POINTER_TYPE_P (TREE_TYPE (op))
2817       || gimple_code (stmt) == GIMPLE_ASM)
2818     return false;
2819 
2820   if (is_gimple_call (stmt) && !gimple_call_internal_p (stmt))
2821     {
2822       tree fntype = gimple_call_fntype (stmt);
2823       tree attrs = TYPE_ATTRIBUTES (fntype);
2824       for (; attrs; attrs = TREE_CHAIN (attrs))
2825 	{
2826 	  attrs = lookup_attribute ("nonnull", attrs);
2827 
2828 	  /* If "nonnull" wasn't specified, we know nothing about
2829 	     the argument.  */
2830 	  if (attrs == NULL_TREE)
2831 	    return false;
2832 
2833 	  /* If "nonnull" applies to all the arguments, then ARG
2834 	     is non-null if it's in the argument list.  */
2835 	  if (TREE_VALUE (attrs) == NULL_TREE)
2836 	    {
2837 	      for (unsigned int i = 0; i < gimple_call_num_args (stmt); i++)
2838 		{
2839 		  if (POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (stmt, i)))
2840 		      && operand_equal_p (op, gimple_call_arg (stmt, i), 0))
2841 		    return true;
2842 		}
2843 	      return false;
2844 	    }
2845 
2846 	  /* Now see if op appears in the nonnull list.  */
2847 	  for (tree t = TREE_VALUE (attrs); t; t = TREE_CHAIN (t))
2848 	    {
2849 	      unsigned int idx = TREE_INT_CST_LOW (TREE_VALUE (t)) - 1;
2850 	      if (idx < gimple_call_num_args (stmt))
2851 		{
2852 		  tree arg = gimple_call_arg (stmt, idx);
2853 		  if (operand_equal_p (op, arg, 0))
2854 		    return true;
2855 		}
2856 	    }
2857 	}
2858     }
2859 
2860   /* If this function is marked as returning non-null, then we can
2861      infer OP is non-null if it is used in the return statement.  */
2862   if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
2863     if (gimple_return_retval (return_stmt)
2864 	&& operand_equal_p (gimple_return_retval (return_stmt), op, 0)
2865 	&& lookup_attribute ("returns_nonnull",
2866 			     TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
2867       return true;
2868 
2869   return false;
2870 }
2871 
2872 /* Compare two case labels.  Because the front end should already have
2873    made sure that case ranges do not overlap, it is enough to only compare
2874    the CASE_LOW values of each case label.  */
2875 
2876 static int
compare_case_labels(const void * p1,const void * p2)2877 compare_case_labels (const void *p1, const void *p2)
2878 {
2879   const_tree const case1 = *(const_tree const*)p1;
2880   const_tree const case2 = *(const_tree const*)p2;
2881 
2882   /* The 'default' case label always goes first.  */
2883   if (!CASE_LOW (case1))
2884     return -1;
2885   else if (!CASE_LOW (case2))
2886     return 1;
2887   else
2888     return tree_int_cst_compare (CASE_LOW (case1), CASE_LOW (case2));
2889 }
2890 
2891 /* Sort the case labels in LABEL_VEC in place in ascending order.  */
2892 
2893 void
sort_case_labels(vec<tree> label_vec)2894 sort_case_labels (vec<tree> label_vec)
2895 {
2896   label_vec.qsort (compare_case_labels);
2897 }
2898 
2899 /* Prepare a vector of case labels to be used in a GIMPLE_SWITCH statement.
2900 
2901    LABELS is a vector that contains all case labels to look at.
2902 
2903    INDEX_TYPE is the type of the switch index expression.  Case labels
2904    in LABELS are discarded if their values are not in the value range
2905    covered by INDEX_TYPE.  The remaining case label values are folded
2906    to INDEX_TYPE.
2907 
2908    If a default case exists in LABELS, it is removed from LABELS and
2909    returned in DEFAULT_CASEP.  If no default case exists, but the
2910    case labels already cover the whole range of INDEX_TYPE, a default
2911    case is returned pointing to one of the existing case labels.
2912    Otherwise DEFAULT_CASEP is set to NULL_TREE.
2913 
2914    DEFAULT_CASEP may be NULL, in which case the above comment doesn't
2915    apply and no action is taken regardless of whether a default case is
2916    found or not.  */
2917 
2918 void
preprocess_case_label_vec_for_gimple(vec<tree> labels,tree index_type,tree * default_casep)2919 preprocess_case_label_vec_for_gimple (vec<tree> labels,
2920 				      tree index_type,
2921 				      tree *default_casep)
2922 {
2923   tree min_value, max_value;
2924   tree default_case = NULL_TREE;
2925   size_t i, len;
2926 
2927   i = 0;
2928   min_value = TYPE_MIN_VALUE (index_type);
2929   max_value = TYPE_MAX_VALUE (index_type);
2930   while (i < labels.length ())
2931     {
2932       tree elt = labels[i];
2933       tree low = CASE_LOW (elt);
2934       tree high = CASE_HIGH (elt);
2935       bool remove_element = FALSE;
2936 
2937       if (low)
2938 	{
2939 	  gcc_checking_assert (TREE_CODE (low) == INTEGER_CST);
2940 	  gcc_checking_assert (!high || TREE_CODE (high) == INTEGER_CST);
2941 
2942 	  /* This is a non-default case label, i.e. it has a value.
2943 
2944 	     See if the case label is reachable within the range of
2945 	     the index type.  Remove out-of-range case values.  Turn
2946 	     case ranges into a canonical form (high > low strictly)
2947 	     and convert the case label values to the index type.
2948 
2949 	     NB: The type of gimple_switch_index() may be the promoted
2950 	     type, but the case labels retain the original type.  */
2951 
2952 	  if (high)
2953 	    {
2954 	      /* This is a case range.  Discard empty ranges.
2955 		 If the bounds or the range are equal, turn this
2956 		 into a simple (one-value) case.  */
2957 	      int cmp = tree_int_cst_compare (high, low);
2958 	      if (cmp < 0)
2959 		remove_element = TRUE;
2960 	      else if (cmp == 0)
2961 		high = NULL_TREE;
2962 	    }
2963 
2964 	  if (! high)
2965 	    {
2966 	      /* If the simple case value is unreachable, ignore it.  */
2967 	      if ((TREE_CODE (min_value) == INTEGER_CST
2968 		   && tree_int_cst_compare (low, min_value) < 0)
2969 		  || (TREE_CODE (max_value) == INTEGER_CST
2970 		      && tree_int_cst_compare (low, max_value) > 0))
2971 		remove_element = TRUE;
2972 	      else
2973 		low = fold_convert (index_type, low);
2974 	    }
2975 	  else
2976 	    {
2977 	      /* If the entire case range is unreachable, ignore it.  */
2978 	      if ((TREE_CODE (min_value) == INTEGER_CST
2979 		   && tree_int_cst_compare (high, min_value) < 0)
2980 		  || (TREE_CODE (max_value) == INTEGER_CST
2981 		      && tree_int_cst_compare (low, max_value) > 0))
2982 		remove_element = TRUE;
2983 	      else
2984 		{
2985 		  /* If the lower bound is less than the index type's
2986 		     minimum value, truncate the range bounds.  */
2987 		  if (TREE_CODE (min_value) == INTEGER_CST
2988 		      && tree_int_cst_compare (low, min_value) < 0)
2989 		    low = min_value;
2990 		  low = fold_convert (index_type, low);
2991 
2992 		  /* If the upper bound is greater than the index type's
2993 		     maximum value, truncate the range bounds.  */
2994 		  if (TREE_CODE (max_value) == INTEGER_CST
2995 		      && tree_int_cst_compare (high, max_value) > 0)
2996 		    high = max_value;
2997 		  high = fold_convert (index_type, high);
2998 
2999 		  /* We may have folded a case range to a one-value case.  */
3000 		  if (tree_int_cst_equal (low, high))
3001 		    high = NULL_TREE;
3002 		}
3003 	    }
3004 
3005 	  CASE_LOW (elt) = low;
3006 	  CASE_HIGH (elt) = high;
3007 	}
3008       else
3009 	{
3010 	  gcc_assert (!default_case);
3011 	  default_case = elt;
3012 	  /* The default case must be passed separately to the
3013 	     gimple_build_switch routine.  But if DEFAULT_CASEP
3014 	     is NULL, we do not remove the default case (it would
3015 	     be completely lost).  */
3016 	  if (default_casep)
3017 	    remove_element = TRUE;
3018 	}
3019 
3020       if (remove_element)
3021 	labels.ordered_remove (i);
3022       else
3023 	i++;
3024     }
3025   len = i;
3026 
3027   if (!labels.is_empty ())
3028     sort_case_labels (labels);
3029 
3030   if (default_casep && !default_case)
3031     {
3032       /* If the switch has no default label, add one, so that we jump
3033 	 around the switch body.  If the labels already cover the whole
3034 	 range of the switch index_type, add the default label pointing
3035 	 to one of the existing labels.  */
3036       if (len
3037 	  && TYPE_MIN_VALUE (index_type)
3038 	  && TYPE_MAX_VALUE (index_type)
3039 	  && tree_int_cst_equal (CASE_LOW (labels[0]),
3040 				 TYPE_MIN_VALUE (index_type)))
3041 	{
3042 	  tree low, high = CASE_HIGH (labels[len - 1]);
3043 	  if (!high)
3044 	    high = CASE_LOW (labels[len - 1]);
3045 	  if (tree_int_cst_equal (high, TYPE_MAX_VALUE (index_type)))
3046 	    {
3047 	      tree widest_label = labels[0];
3048 	      for (i = 1; i < len; i++)
3049 		{
3050 		  high = CASE_LOW (labels[i]);
3051 		  low = CASE_HIGH (labels[i - 1]);
3052 		  if (!low)
3053 		    low = CASE_LOW (labels[i - 1]);
3054 
3055 		  if (CASE_HIGH (labels[i]) != NULL_TREE
3056 		      && (CASE_HIGH (widest_label) == NULL_TREE
3057 			  || (wi::gtu_p
3058 			      (wi::to_wide (CASE_HIGH (labels[i]))
3059 			       - wi::to_wide (CASE_LOW (labels[i])),
3060 			       wi::to_wide (CASE_HIGH (widest_label))
3061 			       - wi::to_wide (CASE_LOW (widest_label))))))
3062 		    widest_label = labels[i];
3063 
3064 		  if (wi::to_wide (low) + 1 != wi::to_wide (high))
3065 		    break;
3066 		}
3067 	      if (i == len)
3068 		{
3069 		  /* Designate the label with the widest range to be the
3070 		     default label.  */
3071 		  tree label = CASE_LABEL (widest_label);
3072 		  default_case = build_case_label (NULL_TREE, NULL_TREE,
3073 						   label);
3074 		}
3075 	    }
3076 	}
3077     }
3078 
3079   if (default_casep)
3080     *default_casep = default_case;
3081 }
3082 
3083 /* Set the location of all statements in SEQ to LOC.  */
3084 
3085 void
gimple_seq_set_location(gimple_seq seq,location_t loc)3086 gimple_seq_set_location (gimple_seq seq, location_t loc)
3087 {
3088   for (gimple_stmt_iterator i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
3089     gimple_set_location (gsi_stmt (i), loc);
3090 }
3091 
3092 /* Release SSA_NAMEs in SEQ as well as the GIMPLE statements.  */
3093 
3094 void
gimple_seq_discard(gimple_seq seq)3095 gimple_seq_discard (gimple_seq seq)
3096 {
3097   gimple_stmt_iterator gsi;
3098 
3099   for (gsi = gsi_start (seq); !gsi_end_p (gsi); )
3100     {
3101       gimple *stmt = gsi_stmt (gsi);
3102       gsi_remove (&gsi, true);
3103       release_defs (stmt);
3104       ggc_free (stmt);
3105     }
3106 }
3107 
3108 /* See if STMT now calls function that takes no parameters and if so, drop
3109    call arguments.  This is used when devirtualization machinery redirects
3110    to __builtin_unreachable or __cxa_pure_virtual.  */
3111 
3112 void
maybe_remove_unused_call_args(struct function * fn,gimple * stmt)3113 maybe_remove_unused_call_args (struct function *fn, gimple *stmt)
3114 {
3115   tree decl = gimple_call_fndecl (stmt);
3116   if (TYPE_ARG_TYPES (TREE_TYPE (decl))
3117       && TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl))) == void_type_node
3118       && gimple_call_num_args (stmt))
3119     {
3120       gimple_set_num_ops (stmt, 3);
3121       update_stmt_fn (fn, stmt);
3122     }
3123 }
3124 
3125 /* Return false if STMT will likely expand to real function call.  */
3126 
3127 bool
gimple_inexpensive_call_p(gcall * stmt)3128 gimple_inexpensive_call_p (gcall *stmt)
3129 {
3130   if (gimple_call_internal_p (stmt))
3131     return true;
3132   tree decl = gimple_call_fndecl (stmt);
3133   if (decl && is_inexpensive_builtin (decl))
3134     return true;
3135   return false;
3136 }
3137 
3138 #if CHECKING_P
3139 
3140 namespace selftest {
3141 
3142 /* Selftests for core gimple structures.  */
3143 
3144 /* Verify that STMT is pretty-printed as EXPECTED.
3145    Helper function for selftests.  */
3146 
3147 static void
verify_gimple_pp(const char * expected,gimple * stmt)3148 verify_gimple_pp (const char *expected, gimple *stmt)
3149 {
3150   pretty_printer pp;
3151   pp_gimple_stmt_1 (&pp, stmt, 0 /* spc */, 0 /* flags */);
3152   ASSERT_STREQ (expected, pp_formatted_text (&pp));
3153 }
3154 
3155 /* Build a GIMPLE_ASSIGN equivalent to
3156      tmp = 5;
3157    and verify various properties of it.  */
3158 
3159 static void
test_assign_single()3160 test_assign_single ()
3161 {
3162   tree type = integer_type_node;
3163   tree lhs = build_decl (UNKNOWN_LOCATION, VAR_DECL,
3164 			 get_identifier ("tmp"),
3165 			 type);
3166   tree rhs = build_int_cst (type, 5);
3167   gassign *stmt = gimple_build_assign (lhs, rhs);
3168   verify_gimple_pp ("tmp = 5;", stmt);
3169 
3170   ASSERT_TRUE (is_gimple_assign (stmt));
3171   ASSERT_EQ (lhs, gimple_assign_lhs (stmt));
3172   ASSERT_EQ (lhs, gimple_get_lhs (stmt));
3173   ASSERT_EQ (rhs, gimple_assign_rhs1 (stmt));
3174   ASSERT_EQ (NULL, gimple_assign_rhs2 (stmt));
3175   ASSERT_EQ (NULL, gimple_assign_rhs3 (stmt));
3176   ASSERT_TRUE (gimple_assign_single_p (stmt));
3177   ASSERT_EQ (INTEGER_CST, gimple_assign_rhs_code (stmt));
3178 }
3179 
3180 /* Build a GIMPLE_ASSIGN equivalent to
3181      tmp = a * b;
3182    and verify various properties of it.  */
3183 
3184 static void
test_assign_binop()3185 test_assign_binop ()
3186 {
3187   tree type = integer_type_node;
3188   tree lhs = build_decl (UNKNOWN_LOCATION, VAR_DECL,
3189 			 get_identifier ("tmp"),
3190 			 type);
3191   tree a = build_decl (UNKNOWN_LOCATION, VAR_DECL,
3192 		       get_identifier ("a"),
3193 		       type);
3194   tree b = build_decl (UNKNOWN_LOCATION, VAR_DECL,
3195 		       get_identifier ("b"),
3196 		       type);
3197   gassign *stmt = gimple_build_assign (lhs, MULT_EXPR, a, b);
3198   verify_gimple_pp ("tmp = a * b;", stmt);
3199 
3200   ASSERT_TRUE (is_gimple_assign (stmt));
3201   ASSERT_EQ (lhs, gimple_assign_lhs (stmt));
3202   ASSERT_EQ (lhs, gimple_get_lhs (stmt));
3203   ASSERT_EQ (a, gimple_assign_rhs1 (stmt));
3204   ASSERT_EQ (b, gimple_assign_rhs2 (stmt));
3205   ASSERT_EQ (NULL, gimple_assign_rhs3 (stmt));
3206   ASSERT_FALSE (gimple_assign_single_p (stmt));
3207   ASSERT_EQ (MULT_EXPR, gimple_assign_rhs_code (stmt));
3208 }
3209 
3210 /* Build a GIMPLE_NOP and verify various properties of it.  */
3211 
3212 static void
test_nop_stmt()3213 test_nop_stmt ()
3214 {
3215   gimple *stmt = gimple_build_nop ();
3216   verify_gimple_pp ("GIMPLE_NOP", stmt);
3217   ASSERT_EQ (GIMPLE_NOP, gimple_code (stmt));
3218   ASSERT_EQ (NULL, gimple_get_lhs (stmt));
3219   ASSERT_FALSE (gimple_assign_single_p (stmt));
3220 }
3221 
3222 /* Build a GIMPLE_RETURN equivalent to
3223      return 7;
3224    and verify various properties of it.  */
3225 
3226 static void
test_return_stmt()3227 test_return_stmt ()
3228 {
3229   tree type = integer_type_node;
3230   tree val = build_int_cst (type, 7);
3231   greturn *stmt = gimple_build_return (val);
3232   verify_gimple_pp ("return 7;", stmt);
3233 
3234   ASSERT_EQ (GIMPLE_RETURN, gimple_code (stmt));
3235   ASSERT_EQ (NULL, gimple_get_lhs (stmt));
3236   ASSERT_EQ (val, gimple_return_retval (stmt));
3237   ASSERT_FALSE (gimple_assign_single_p (stmt));
3238 }
3239 
3240 /* Build a GIMPLE_RETURN equivalent to
3241      return;
3242    and verify various properties of it.  */
3243 
3244 static void
test_return_without_value()3245 test_return_without_value ()
3246 {
3247   greturn *stmt = gimple_build_return (NULL);
3248   verify_gimple_pp ("return;", stmt);
3249 
3250   ASSERT_EQ (GIMPLE_RETURN, gimple_code (stmt));
3251   ASSERT_EQ (NULL, gimple_get_lhs (stmt));
3252   ASSERT_EQ (NULL, gimple_return_retval (stmt));
3253   ASSERT_FALSE (gimple_assign_single_p (stmt));
3254 }
3255 
3256 /* Run all of the selftests within this file.  */
3257 
3258 void
gimple_c_tests()3259 gimple_c_tests ()
3260 {
3261   test_assign_single ();
3262   test_assign_binop ();
3263   test_nop_stmt ();
3264   test_return_stmt ();
3265   test_return_without_value ();
3266 }
3267 
3268 } // namespace selftest
3269 
3270 
3271 #endif /* CHECKING_P */
3272