1 /* Gimple IR support functions.
2 
3    Copyright (C) 2007-2021 Free Software Foundation, Inc.
4    Contributed by Aldy Hernandez <aldyh@redhat.com>
5 
6 This file is part of GCC.
7 
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12 
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16 for more details.
17 
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3.  If not see
20 <http://www.gnu.org/licenses/>.  */
21 
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "backend.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "ssa.h"
29 #include "cgraph.h"
30 #include "diagnostic.h"
31 #include "alias.h"
32 #include "fold-const.h"
33 #include "calls.h"
34 #include "stor-layout.h"
35 #include "internal-fn.h"
36 #include "tree-eh.h"
37 #include "gimple-iterator.h"
38 #include "gimple-walk.h"
39 #include "gimplify.h"
40 #include "target.h"
41 #include "builtins.h"
42 #include "selftest.h"
43 #include "gimple-pretty-print.h"
44 #include "stringpool.h"
45 #include "attribs.h"
46 #include "asan.h"
47 #include "langhooks.h"
48 #include "attr-fnspec.h"
49 #include "ipa-modref-tree.h"
50 #include "ipa-modref.h"
51 
52 
53 /* All the tuples have their operand vector (if present) at the very bottom
54    of the structure.  Therefore, the offset required to find the
55    operands vector the size of the structure minus the size of the 1
56    element tree array at the end (see gimple_ops).  */
57 #define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) \
58 	(HAS_TREE_OP ? sizeof (struct STRUCT) - sizeof (tree) : 0),
59 EXPORTED_CONST size_t gimple_ops_offset_[] = {
60 #include "gsstruct.def"
61 };
62 #undef DEFGSSTRUCT
63 
64 #define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) sizeof (struct STRUCT),
65 static const size_t gsstruct_code_size[] = {
66 #include "gsstruct.def"
67 };
68 #undef DEFGSSTRUCT
69 
70 #define DEFGSCODE(SYM, NAME, GSSCODE)	NAME,
71 const char *const gimple_code_name[] = {
72 #include "gimple.def"
73 };
74 #undef DEFGSCODE
75 
76 #define DEFGSCODE(SYM, NAME, GSSCODE)	GSSCODE,
77 EXPORTED_CONST enum gimple_statement_structure_enum gss_for_code_[] = {
78 #include "gimple.def"
79 };
80 #undef DEFGSCODE
81 
82 /* Gimple stats.  */
83 
84 uint64_t gimple_alloc_counts[(int) gimple_alloc_kind_all];
85 uint64_t gimple_alloc_sizes[(int) gimple_alloc_kind_all];
86 
87 /* Keep in sync with gimple.h:enum gimple_alloc_kind.  */
88 static const char * const gimple_alloc_kind_names[] = {
89     "assignments",
90     "phi nodes",
91     "conditionals",
92     "everything else"
93 };
94 
95 /* Static gimple tuple members.  */
96 const enum gimple_code gassign::code_;
97 const enum gimple_code gcall::code_;
98 const enum gimple_code gcond::code_;
99 
100 
101 /* Gimple tuple constructors.
102    Note: Any constructor taking a ``gimple_seq'' as a parameter, can
103    be passed a NULL to start with an empty sequence.  */
104 
105 /* Set the code for statement G to CODE.  */
106 
107 static inline void
gimple_set_code(gimple * g,enum gimple_code code)108 gimple_set_code (gimple *g, enum gimple_code code)
109 {
110   g->code = code;
111 }
112 
113 /* Return the number of bytes needed to hold a GIMPLE statement with
114    code CODE.  */
115 
116 size_t
gimple_size(enum gimple_code code,unsigned num_ops)117 gimple_size (enum gimple_code code, unsigned num_ops)
118 {
119   size_t size = gsstruct_code_size[gss_for_code (code)];
120   if (num_ops > 0)
121     size += (sizeof (tree) * (num_ops - 1));
122   return size;
123 }
124 
125 /* Initialize GIMPLE statement G with CODE and NUM_OPS.  */
126 
127 void
gimple_init(gimple * g,enum gimple_code code,unsigned num_ops)128 gimple_init (gimple *g, enum gimple_code code, unsigned num_ops)
129 {
130   gimple_set_code (g, code);
131   gimple_set_num_ops (g, num_ops);
132 
133   /* Do not call gimple_set_modified here as it has other side
134      effects and this tuple is still not completely built.  */
135   g->modified = 1;
136   gimple_init_singleton (g);
137 }
138 
139 /* Allocate memory for a GIMPLE statement with code CODE and NUM_OPS
140    operands.  */
141 
142 gimple *
gimple_alloc(enum gimple_code code,unsigned num_ops MEM_STAT_DECL)143 gimple_alloc (enum gimple_code code, unsigned num_ops MEM_STAT_DECL)
144 {
145   size_t size;
146   gimple *stmt;
147 
148   size = gimple_size (code, num_ops);
149   if (GATHER_STATISTICS)
150     {
151       enum gimple_alloc_kind kind = gimple_alloc_kind (code);
152       gimple_alloc_counts[(int) kind]++;
153       gimple_alloc_sizes[(int) kind] += size;
154     }
155 
156   stmt = ggc_alloc_cleared_gimple_statement_stat (size PASS_MEM_STAT);
157   gimple_init (stmt, code, num_ops);
158   return stmt;
159 }
160 
161 /* Set SUBCODE to be the code of the expression computed by statement G.  */
162 
163 static inline void
gimple_set_subcode(gimple * g,unsigned subcode)164 gimple_set_subcode (gimple *g, unsigned subcode)
165 {
166   /* We only have 16 bits for the RHS code.  Assert that we are not
167      overflowing it.  */
168   gcc_assert (subcode < (1 << 16));
169   g->subcode = subcode;
170 }
171 
172 
173 
174 /* Build a tuple with operands.  CODE is the statement to build (which
175    must be one of the GIMPLE_WITH_OPS tuples).  SUBCODE is the subcode
176    for the new tuple.  NUM_OPS is the number of operands to allocate.  */
177 
178 #define gimple_build_with_ops(c, s, n) \
179   gimple_build_with_ops_stat (c, s, n MEM_STAT_INFO)
180 
181 static gimple *
gimple_build_with_ops_stat(enum gimple_code code,unsigned subcode,unsigned num_ops MEM_STAT_DECL)182 gimple_build_with_ops_stat (enum gimple_code code, unsigned subcode,
183 		            unsigned num_ops MEM_STAT_DECL)
184 {
185   gimple *s = gimple_alloc (code, num_ops PASS_MEM_STAT);
186   gimple_set_subcode (s, subcode);
187 
188   return s;
189 }
190 
191 
192 /* Build a GIMPLE_RETURN statement returning RETVAL.  */
193 
194 greturn *
gimple_build_return(tree retval)195 gimple_build_return (tree retval)
196 {
197   greturn *s
198     = as_a <greturn *> (gimple_build_with_ops (GIMPLE_RETURN, ERROR_MARK,
199 					       2));
200   if (retval)
201     gimple_return_set_retval (s, retval);
202   return s;
203 }
204 
205 /* Reset alias information on call S.  */
206 
207 void
gimple_call_reset_alias_info(gcall * s)208 gimple_call_reset_alias_info (gcall *s)
209 {
210   if (gimple_call_flags (s) & ECF_CONST)
211     memset (gimple_call_use_set (s), 0, sizeof (struct pt_solution));
212   else
213     pt_solution_reset (gimple_call_use_set (s));
214   if (gimple_call_flags (s) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
215     memset (gimple_call_clobber_set (s), 0, sizeof (struct pt_solution));
216   else
217     pt_solution_reset (gimple_call_clobber_set (s));
218 }
219 
220 /* Helper for gimple_build_call, gimple_build_call_valist,
221    gimple_build_call_vec and gimple_build_call_from_tree.  Build the basic
222    components of a GIMPLE_CALL statement to function FN with NARGS
223    arguments.  */
224 
225 static inline gcall *
gimple_build_call_1(tree fn,unsigned nargs)226 gimple_build_call_1 (tree fn, unsigned nargs)
227 {
228   gcall *s
229     = as_a <gcall *> (gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK,
230 					     nargs + 3));
231   if (TREE_CODE (fn) == FUNCTION_DECL)
232     fn = build_fold_addr_expr (fn);
233   gimple_set_op (s, 1, fn);
234   gimple_call_set_fntype (s, TREE_TYPE (TREE_TYPE (fn)));
235   gimple_call_reset_alias_info (s);
236   return s;
237 }
238 
239 
240 /* Build a GIMPLE_CALL statement to function FN with the arguments
241    specified in vector ARGS.  */
242 
243 gcall *
gimple_build_call_vec(tree fn,vec<tree> args)244 gimple_build_call_vec (tree fn, vec<tree> args)
245 {
246   unsigned i;
247   unsigned nargs = args.length ();
248   gcall *call = gimple_build_call_1 (fn, nargs);
249 
250   for (i = 0; i < nargs; i++)
251     gimple_call_set_arg (call, i, args[i]);
252 
253   return call;
254 }
255 
256 
257 /* Build a GIMPLE_CALL statement to function FN.  NARGS is the number of
258    arguments.  The ... are the arguments.  */
259 
260 gcall *
gimple_build_call(tree fn,unsigned nargs,...)261 gimple_build_call (tree fn, unsigned nargs, ...)
262 {
263   va_list ap;
264   gcall *call;
265   unsigned i;
266 
267   gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn));
268 
269   call = gimple_build_call_1 (fn, nargs);
270 
271   va_start (ap, nargs);
272   for (i = 0; i < nargs; i++)
273     gimple_call_set_arg (call, i, va_arg (ap, tree));
274   va_end (ap);
275 
276   return call;
277 }
278 
279 
280 /* Build a GIMPLE_CALL statement to function FN.  NARGS is the number of
281    arguments.  AP contains the arguments.  */
282 
283 gcall *
gimple_build_call_valist(tree fn,unsigned nargs,va_list ap)284 gimple_build_call_valist (tree fn, unsigned nargs, va_list ap)
285 {
286   gcall *call;
287   unsigned i;
288 
289   gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn));
290 
291   call = gimple_build_call_1 (fn, nargs);
292 
293   for (i = 0; i < nargs; i++)
294     gimple_call_set_arg (call, i, va_arg (ap, tree));
295 
296   return call;
297 }
298 
299 
300 /* Helper for gimple_build_call_internal and gimple_build_call_internal_vec.
301    Build the basic components of a GIMPLE_CALL statement to internal
302    function FN with NARGS arguments.  */
303 
304 static inline gcall *
gimple_build_call_internal_1(enum internal_fn fn,unsigned nargs)305 gimple_build_call_internal_1 (enum internal_fn fn, unsigned nargs)
306 {
307   gcall *s
308     = as_a <gcall *> (gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK,
309 					     nargs + 3));
310   s->subcode |= GF_CALL_INTERNAL;
311   gimple_call_set_internal_fn (s, fn);
312   gimple_call_reset_alias_info (s);
313   return s;
314 }
315 
316 
317 /* Build a GIMPLE_CALL statement to internal function FN.  NARGS is
318    the number of arguments.  The ... are the arguments.  */
319 
320 gcall *
gimple_build_call_internal(enum internal_fn fn,unsigned nargs,...)321 gimple_build_call_internal (enum internal_fn fn, unsigned nargs, ...)
322 {
323   va_list ap;
324   gcall *call;
325   unsigned i;
326 
327   call = gimple_build_call_internal_1 (fn, nargs);
328   va_start (ap, nargs);
329   for (i = 0; i < nargs; i++)
330     gimple_call_set_arg (call, i, va_arg (ap, tree));
331   va_end (ap);
332 
333   return call;
334 }
335 
336 
337 /* Build a GIMPLE_CALL statement to internal function FN with the arguments
338    specified in vector ARGS.  */
339 
340 gcall *
gimple_build_call_internal_vec(enum internal_fn fn,vec<tree> args)341 gimple_build_call_internal_vec (enum internal_fn fn, vec<tree> args)
342 {
343   unsigned i, nargs;
344   gcall *call;
345 
346   nargs = args.length ();
347   call = gimple_build_call_internal_1 (fn, nargs);
348   for (i = 0; i < nargs; i++)
349     gimple_call_set_arg (call, i, args[i]);
350 
351   return call;
352 }
353 
354 
355 /* Build a GIMPLE_CALL statement from CALL_EXPR T.  Note that T is
356    assumed to be in GIMPLE form already.  Minimal checking is done of
357    this fact.  */
358 
359 gcall *
gimple_build_call_from_tree(tree t,tree fnptrtype)360 gimple_build_call_from_tree (tree t, tree fnptrtype)
361 {
362   unsigned i, nargs;
363   gcall *call;
364 
365   gcc_assert (TREE_CODE (t) == CALL_EXPR);
366 
367   nargs = call_expr_nargs (t);
368 
369   tree fndecl = NULL_TREE;
370   if (CALL_EXPR_FN (t) == NULL_TREE)
371     call = gimple_build_call_internal_1 (CALL_EXPR_IFN (t), nargs);
372   else
373     {
374       fndecl = get_callee_fndecl (t);
375       call = gimple_build_call_1 (fndecl ? fndecl : CALL_EXPR_FN (t), nargs);
376     }
377 
378   for (i = 0; i < nargs; i++)
379     gimple_call_set_arg (call, i, CALL_EXPR_ARG (t, i));
380 
381   gimple_set_block (call, TREE_BLOCK (t));
382   gimple_set_location (call, EXPR_LOCATION (t));
383 
384   /* Carry all the CALL_EXPR flags to the new GIMPLE_CALL.  */
385   gimple_call_set_chain (call, CALL_EXPR_STATIC_CHAIN (t));
386   gimple_call_set_tail (call, CALL_EXPR_TAILCALL (t));
387   gimple_call_set_must_tail (call, CALL_EXPR_MUST_TAIL_CALL (t));
388   gimple_call_set_return_slot_opt (call, CALL_EXPR_RETURN_SLOT_OPT (t));
389   if (fndecl
390       && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL)
391       && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (fndecl)))
392     gimple_call_set_alloca_for_var (call, CALL_ALLOCA_FOR_VAR_P (t));
393   else if (fndecl
394 	   && (DECL_IS_OPERATOR_NEW_P (fndecl)
395 	       || DECL_IS_OPERATOR_DELETE_P (fndecl)))
396     gimple_call_set_from_new_or_delete (call, CALL_FROM_NEW_OR_DELETE_P (t));
397   else
398     gimple_call_set_from_thunk (call, CALL_FROM_THUNK_P (t));
399   gimple_call_set_va_arg_pack (call, CALL_EXPR_VA_ARG_PACK (t));
400   gimple_call_set_nothrow (call, TREE_NOTHROW (t));
401   gimple_call_set_by_descriptor (call, CALL_EXPR_BY_DESCRIPTOR (t));
402   gimple_set_no_warning (call, TREE_NO_WARNING (t));
403 
404   if (fnptrtype)
405     {
406       gimple_call_set_fntype (call, TREE_TYPE (fnptrtype));
407 
408       /* Check if it's an indirect CALL and the type has the
409  	 nocf_check attribute. In that case propagate the information
410 	 to the gimple CALL insn.  */
411       if (!fndecl)
412 	{
413 	  gcc_assert (POINTER_TYPE_P (fnptrtype));
414 	  tree fntype = TREE_TYPE (fnptrtype);
415 
416 	  if (lookup_attribute ("nocf_check", TYPE_ATTRIBUTES (fntype)))
417 	    gimple_call_set_nocf_check (call, TRUE);
418 	}
419     }
420 
421   return call;
422 }
423 
424 
425 /* Build a GIMPLE_ASSIGN statement.
426 
427    LHS of the assignment.
428    RHS of the assignment which can be unary or binary.  */
429 
430 gassign *
gimple_build_assign(tree lhs,tree rhs MEM_STAT_DECL)431 gimple_build_assign (tree lhs, tree rhs MEM_STAT_DECL)
432 {
433   enum tree_code subcode;
434   tree op1, op2, op3;
435 
436   extract_ops_from_tree (rhs, &subcode, &op1, &op2, &op3);
437   return gimple_build_assign (lhs, subcode, op1, op2, op3 PASS_MEM_STAT);
438 }
439 
440 
441 /* Build a GIMPLE_ASSIGN statement with subcode SUBCODE and operands
442    OP1, OP2 and OP3.  */
443 
444 static inline gassign *
gimple_build_assign_1(tree lhs,enum tree_code subcode,tree op1,tree op2,tree op3 MEM_STAT_DECL)445 gimple_build_assign_1 (tree lhs, enum tree_code subcode, tree op1,
446 		       tree op2, tree op3 MEM_STAT_DECL)
447 {
448   unsigned num_ops;
449   gassign *p;
450 
451   /* Need 1 operand for LHS and 1 or 2 for the RHS (depending on the
452      code).  */
453   num_ops = get_gimple_rhs_num_ops (subcode) + 1;
454 
455   p = as_a <gassign *> (
456         gimple_build_with_ops_stat (GIMPLE_ASSIGN, (unsigned)subcode, num_ops
457 				    PASS_MEM_STAT));
458   gimple_assign_set_lhs (p, lhs);
459   gimple_assign_set_rhs1 (p, op1);
460   if (op2)
461     {
462       gcc_assert (num_ops > 2);
463       gimple_assign_set_rhs2 (p, op2);
464     }
465 
466   if (op3)
467     {
468       gcc_assert (num_ops > 3);
469       gimple_assign_set_rhs3 (p, op3);
470     }
471 
472   return p;
473 }
474 
475 /* Build a GIMPLE_ASSIGN statement with subcode SUBCODE and operands
476    OP1, OP2 and OP3.  */
477 
478 gassign *
gimple_build_assign(tree lhs,enum tree_code subcode,tree op1,tree op2,tree op3 MEM_STAT_DECL)479 gimple_build_assign (tree lhs, enum tree_code subcode, tree op1,
480 		     tree op2, tree op3 MEM_STAT_DECL)
481 {
482   return gimple_build_assign_1 (lhs, subcode, op1, op2, op3 PASS_MEM_STAT);
483 }
484 
485 /* Build a GIMPLE_ASSIGN statement with subcode SUBCODE and operands
486    OP1 and OP2.  */
487 
488 gassign *
gimple_build_assign(tree lhs,enum tree_code subcode,tree op1,tree op2 MEM_STAT_DECL)489 gimple_build_assign (tree lhs, enum tree_code subcode, tree op1,
490 		     tree op2 MEM_STAT_DECL)
491 {
492   return gimple_build_assign_1 (lhs, subcode, op1, op2, NULL_TREE
493 				PASS_MEM_STAT);
494 }
495 
496 /* Build a GIMPLE_ASSIGN statement with subcode SUBCODE and operand OP1.  */
497 
498 gassign *
gimple_build_assign(tree lhs,enum tree_code subcode,tree op1 MEM_STAT_DECL)499 gimple_build_assign (tree lhs, enum tree_code subcode, tree op1 MEM_STAT_DECL)
500 {
501   return gimple_build_assign_1 (lhs, subcode, op1, NULL_TREE, NULL_TREE
502 				PASS_MEM_STAT);
503 }
504 
505 
506 /* Build a GIMPLE_COND statement.
507 
508    PRED is the condition used to compare LHS and the RHS.
509    T_LABEL is the label to jump to if the condition is true.
510    F_LABEL is the label to jump to otherwise.  */
511 
512 gcond *
gimple_build_cond(enum tree_code pred_code,tree lhs,tree rhs,tree t_label,tree f_label)513 gimple_build_cond (enum tree_code pred_code, tree lhs, tree rhs,
514 		   tree t_label, tree f_label)
515 {
516   gcond *p;
517 
518   gcc_assert (TREE_CODE_CLASS (pred_code) == tcc_comparison);
519   p = as_a <gcond *> (gimple_build_with_ops (GIMPLE_COND, pred_code, 4));
520   gimple_cond_set_lhs (p, lhs);
521   gimple_cond_set_rhs (p, rhs);
522   gimple_cond_set_true_label (p, t_label);
523   gimple_cond_set_false_label (p, f_label);
524   return p;
525 }
526 
527 /* Build a GIMPLE_COND statement from the conditional expression tree
528    COND.  T_LABEL and F_LABEL are as in gimple_build_cond.  */
529 
530 gcond *
gimple_build_cond_from_tree(tree cond,tree t_label,tree f_label)531 gimple_build_cond_from_tree (tree cond, tree t_label, tree f_label)
532 {
533   enum tree_code code;
534   tree lhs, rhs;
535 
536   gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
537   return gimple_build_cond (code, lhs, rhs, t_label, f_label);
538 }
539 
540 /* Set code, lhs, and rhs of a GIMPLE_COND from a suitable
541    boolean expression tree COND.  */
542 
543 void
gimple_cond_set_condition_from_tree(gcond * stmt,tree cond)544 gimple_cond_set_condition_from_tree (gcond *stmt, tree cond)
545 {
546   enum tree_code code;
547   tree lhs, rhs;
548 
549   gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
550   gimple_cond_set_condition (stmt, code, lhs, rhs);
551 }
552 
553 /* Build a GIMPLE_LABEL statement for LABEL.  */
554 
555 glabel *
gimple_build_label(tree label)556 gimple_build_label (tree label)
557 {
558   glabel *p
559     = as_a <glabel *> (gimple_build_with_ops (GIMPLE_LABEL, ERROR_MARK, 1));
560   gimple_label_set_label (p, label);
561   return p;
562 }
563 
564 /* Build a GIMPLE_GOTO statement to label DEST.  */
565 
566 ggoto *
gimple_build_goto(tree dest)567 gimple_build_goto (tree dest)
568 {
569   ggoto *p
570     = as_a <ggoto *> (gimple_build_with_ops (GIMPLE_GOTO, ERROR_MARK, 1));
571   gimple_goto_set_dest (p, dest);
572   return p;
573 }
574 
575 
576 /* Build a GIMPLE_NOP statement.  */
577 
578 gimple *
gimple_build_nop(void)579 gimple_build_nop (void)
580 {
581   return gimple_alloc (GIMPLE_NOP, 0);
582 }
583 
584 
585 /* Build a GIMPLE_BIND statement.
586    VARS are the variables in BODY.
587    BLOCK is the containing block.  */
588 
589 gbind *
gimple_build_bind(tree vars,gimple_seq body,tree block)590 gimple_build_bind (tree vars, gimple_seq body, tree block)
591 {
592   gbind *p = as_a <gbind *> (gimple_alloc (GIMPLE_BIND, 0));
593   gimple_bind_set_vars (p, vars);
594   if (body)
595     gimple_bind_set_body (p, body);
596   if (block)
597     gimple_bind_set_block (p, block);
598   return p;
599 }
600 
601 /* Helper function to set the simple fields of a asm stmt.
602 
603    STRING is a pointer to a string that is the asm blocks assembly code.
604    NINPUT is the number of register inputs.
605    NOUTPUT is the number of register outputs.
606    NCLOBBERS is the number of clobbered registers.
607    */
608 
609 static inline gasm *
gimple_build_asm_1(const char * string,unsigned ninputs,unsigned noutputs,unsigned nclobbers,unsigned nlabels)610 gimple_build_asm_1 (const char *string, unsigned ninputs, unsigned noutputs,
611                     unsigned nclobbers, unsigned nlabels)
612 {
613   gasm *p;
614   int size = strlen (string);
615 
616   p = as_a <gasm *> (
617         gimple_build_with_ops (GIMPLE_ASM, ERROR_MARK,
618 			       ninputs + noutputs + nclobbers + nlabels));
619 
620   p->ni = ninputs;
621   p->no = noutputs;
622   p->nc = nclobbers;
623   p->nl = nlabels;
624   p->string = ggc_alloc_string (string, size);
625 
626   if (GATHER_STATISTICS)
627     gimple_alloc_sizes[(int) gimple_alloc_kind (GIMPLE_ASM)] += size;
628 
629   return p;
630 }
631 
632 /* Build a GIMPLE_ASM statement.
633 
634    STRING is the assembly code.
635    NINPUT is the number of register inputs.
636    NOUTPUT is the number of register outputs.
637    NCLOBBERS is the number of clobbered registers.
638    INPUTS is a vector of the input register parameters.
639    OUTPUTS is a vector of the output register parameters.
640    CLOBBERS is a vector of the clobbered register parameters.
641    LABELS is a vector of destination labels.  */
642 
643 gasm *
gimple_build_asm_vec(const char * string,vec<tree,va_gc> * inputs,vec<tree,va_gc> * outputs,vec<tree,va_gc> * clobbers,vec<tree,va_gc> * labels)644 gimple_build_asm_vec (const char *string, vec<tree, va_gc> *inputs,
645                       vec<tree, va_gc> *outputs, vec<tree, va_gc> *clobbers,
646 		      vec<tree, va_gc> *labels)
647 {
648   gasm *p;
649   unsigned i;
650 
651   p = gimple_build_asm_1 (string,
652                           vec_safe_length (inputs),
653                           vec_safe_length (outputs),
654                           vec_safe_length (clobbers),
655 			  vec_safe_length (labels));
656 
657   for (i = 0; i < vec_safe_length (inputs); i++)
658     gimple_asm_set_input_op (p, i, (*inputs)[i]);
659 
660   for (i = 0; i < vec_safe_length (outputs); i++)
661     gimple_asm_set_output_op (p, i, (*outputs)[i]);
662 
663   for (i = 0; i < vec_safe_length (clobbers); i++)
664     gimple_asm_set_clobber_op (p, i, (*clobbers)[i]);
665 
666   for (i = 0; i < vec_safe_length (labels); i++)
667     gimple_asm_set_label_op (p, i, (*labels)[i]);
668 
669   return p;
670 }
671 
672 /* Build a GIMPLE_CATCH statement.
673 
674   TYPES are the catch types.
675   HANDLER is the exception handler.  */
676 
677 gcatch *
gimple_build_catch(tree types,gimple_seq handler)678 gimple_build_catch (tree types, gimple_seq handler)
679 {
680   gcatch *p = as_a <gcatch *> (gimple_alloc (GIMPLE_CATCH, 0));
681   gimple_catch_set_types (p, types);
682   if (handler)
683     gimple_catch_set_handler (p, handler);
684 
685   return p;
686 }
687 
688 /* Build a GIMPLE_EH_FILTER statement.
689 
690    TYPES are the filter's types.
691    FAILURE is the filter's failure action.  */
692 
693 geh_filter *
gimple_build_eh_filter(tree types,gimple_seq failure)694 gimple_build_eh_filter (tree types, gimple_seq failure)
695 {
696   geh_filter *p = as_a <geh_filter *> (gimple_alloc (GIMPLE_EH_FILTER, 0));
697   gimple_eh_filter_set_types (p, types);
698   if (failure)
699     gimple_eh_filter_set_failure (p, failure);
700 
701   return p;
702 }
703 
704 /* Build a GIMPLE_EH_MUST_NOT_THROW statement.  */
705 
706 geh_mnt *
gimple_build_eh_must_not_throw(tree decl)707 gimple_build_eh_must_not_throw (tree decl)
708 {
709   geh_mnt *p = as_a <geh_mnt *> (gimple_alloc (GIMPLE_EH_MUST_NOT_THROW, 0));
710 
711   gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
712   gcc_assert (flags_from_decl_or_type (decl) & ECF_NORETURN);
713   gimple_eh_must_not_throw_set_fndecl (p, decl);
714 
715   return p;
716 }
717 
718 /* Build a GIMPLE_EH_ELSE statement.  */
719 
720 geh_else *
gimple_build_eh_else(gimple_seq n_body,gimple_seq e_body)721 gimple_build_eh_else (gimple_seq n_body, gimple_seq e_body)
722 {
723   geh_else *p = as_a <geh_else *> (gimple_alloc (GIMPLE_EH_ELSE, 0));
724   gimple_eh_else_set_n_body (p, n_body);
725   gimple_eh_else_set_e_body (p, e_body);
726   return p;
727 }
728 
729 /* Build a GIMPLE_TRY statement.
730 
731    EVAL is the expression to evaluate.
732    CLEANUP is the cleanup expression.
733    KIND is either GIMPLE_TRY_CATCH or GIMPLE_TRY_FINALLY depending on
734    whether this is a try/catch or a try/finally respectively.  */
735 
736 gtry *
gimple_build_try(gimple_seq eval,gimple_seq cleanup,enum gimple_try_flags kind)737 gimple_build_try (gimple_seq eval, gimple_seq cleanup,
738     		  enum gimple_try_flags kind)
739 {
740   gtry *p;
741 
742   gcc_assert (kind == GIMPLE_TRY_CATCH || kind == GIMPLE_TRY_FINALLY);
743   p = as_a <gtry *> (gimple_alloc (GIMPLE_TRY, 0));
744   gimple_set_subcode (p, kind);
745   if (eval)
746     gimple_try_set_eval (p, eval);
747   if (cleanup)
748     gimple_try_set_cleanup (p, cleanup);
749 
750   return p;
751 }
752 
753 /* Construct a GIMPLE_WITH_CLEANUP_EXPR statement.
754 
755    CLEANUP is the cleanup expression.  */
756 
757 gimple *
gimple_build_wce(gimple_seq cleanup)758 gimple_build_wce (gimple_seq cleanup)
759 {
760   gimple *p = gimple_alloc (GIMPLE_WITH_CLEANUP_EXPR, 0);
761   if (cleanup)
762     gimple_wce_set_cleanup (p, cleanup);
763 
764   return p;
765 }
766 
767 
768 /* Build a GIMPLE_RESX statement.  */
769 
770 gresx *
gimple_build_resx(int region)771 gimple_build_resx (int region)
772 {
773   gresx *p
774     = as_a <gresx *> (gimple_build_with_ops (GIMPLE_RESX, ERROR_MARK, 0));
775   p->region = region;
776   return p;
777 }
778 
779 
780 /* The helper for constructing a gimple switch statement.
781    INDEX is the switch's index.
782    NLABELS is the number of labels in the switch excluding the default.
783    DEFAULT_LABEL is the default label for the switch statement.  */
784 
785 gswitch *
gimple_build_switch_nlabels(unsigned nlabels,tree index,tree default_label)786 gimple_build_switch_nlabels (unsigned nlabels, tree index, tree default_label)
787 {
788   /* nlabels + 1 default label + 1 index.  */
789   gcc_checking_assert (default_label);
790   gswitch *p = as_a <gswitch *> (gimple_build_with_ops (GIMPLE_SWITCH,
791 							ERROR_MARK,
792 							1 + 1 + nlabels));
793   gimple_switch_set_index (p, index);
794   gimple_switch_set_default_label (p, default_label);
795   return p;
796 }
797 
798 /* Build a GIMPLE_SWITCH statement.
799 
800    INDEX is the switch's index.
801    DEFAULT_LABEL is the default label
802    ARGS is a vector of labels excluding the default.  */
803 
804 gswitch *
gimple_build_switch(tree index,tree default_label,vec<tree> args)805 gimple_build_switch (tree index, tree default_label, vec<tree> args)
806 {
807   unsigned i, nlabels = args.length ();
808 
809   gswitch *p = gimple_build_switch_nlabels (nlabels, index, default_label);
810 
811   /* Copy the labels from the vector to the switch statement.  */
812   for (i = 0; i < nlabels; i++)
813     gimple_switch_set_label (p, i + 1, args[i]);
814 
815   return p;
816 }
817 
818 /* Build a GIMPLE_EH_DISPATCH statement.  */
819 
820 geh_dispatch *
gimple_build_eh_dispatch(int region)821 gimple_build_eh_dispatch (int region)
822 {
823   geh_dispatch *p
824     = as_a <geh_dispatch *> (
825 	gimple_build_with_ops (GIMPLE_EH_DISPATCH, ERROR_MARK, 0));
826   p->region = region;
827   return p;
828 }
829 
830 /* Build a new GIMPLE_DEBUG_BIND statement.
831 
832    VAR is bound to VALUE; block and location are taken from STMT.  */
833 
834 gdebug *
gimple_build_debug_bind(tree var,tree value,gimple * stmt MEM_STAT_DECL)835 gimple_build_debug_bind (tree var, tree value, gimple *stmt MEM_STAT_DECL)
836 {
837   gdebug *p
838     = as_a <gdebug *> (gimple_build_with_ops_stat (GIMPLE_DEBUG,
839 						   (unsigned)GIMPLE_DEBUG_BIND, 2
840 						   PASS_MEM_STAT));
841   gimple_debug_bind_set_var (p, var);
842   gimple_debug_bind_set_value (p, value);
843   if (stmt)
844     gimple_set_location (p, gimple_location (stmt));
845 
846   return p;
847 }
848 
849 
850 /* Build a new GIMPLE_DEBUG_SOURCE_BIND statement.
851 
852    VAR is bound to VALUE; block and location are taken from STMT.  */
853 
854 gdebug *
gimple_build_debug_source_bind(tree var,tree value,gimple * stmt MEM_STAT_DECL)855 gimple_build_debug_source_bind (tree var, tree value,
856 				     gimple *stmt MEM_STAT_DECL)
857 {
858   gdebug *p
859     = as_a <gdebug *> (
860         gimple_build_with_ops_stat (GIMPLE_DEBUG,
861 				    (unsigned)GIMPLE_DEBUG_SOURCE_BIND, 2
862 				    PASS_MEM_STAT));
863 
864   gimple_debug_source_bind_set_var (p, var);
865   gimple_debug_source_bind_set_value (p, value);
866   if (stmt)
867     gimple_set_location (p, gimple_location (stmt));
868 
869   return p;
870 }
871 
872 
873 /* Build a new GIMPLE_DEBUG_BEGIN_STMT statement in BLOCK at
874    LOCATION.  */
875 
876 gdebug *
gimple_build_debug_begin_stmt(tree block,location_t location MEM_STAT_DECL)877 gimple_build_debug_begin_stmt (tree block, location_t location
878 				    MEM_STAT_DECL)
879 {
880   gdebug *p
881     = as_a <gdebug *> (
882         gimple_build_with_ops_stat (GIMPLE_DEBUG,
883 				    (unsigned)GIMPLE_DEBUG_BEGIN_STMT, 0
884 				    PASS_MEM_STAT));
885 
886   gimple_set_location (p, location);
887   gimple_set_block (p, block);
888   cfun->debug_marker_count++;
889 
890   return p;
891 }
892 
893 
894 /* Build a new GIMPLE_DEBUG_INLINE_ENTRY statement in BLOCK at
895    LOCATION.  The BLOCK links to the inlined function.  */
896 
897 gdebug *
gimple_build_debug_inline_entry(tree block,location_t location MEM_STAT_DECL)898 gimple_build_debug_inline_entry (tree block, location_t location
899 				      MEM_STAT_DECL)
900 {
901   gdebug *p
902     = as_a <gdebug *> (
903         gimple_build_with_ops_stat (GIMPLE_DEBUG,
904 				    (unsigned)GIMPLE_DEBUG_INLINE_ENTRY, 0
905 				    PASS_MEM_STAT));
906 
907   gimple_set_location (p, location);
908   gimple_set_block (p, block);
909   cfun->debug_marker_count++;
910 
911   return p;
912 }
913 
914 
915 /* Build a GIMPLE_OMP_CRITICAL statement.
916 
917    BODY is the sequence of statements for which only one thread can execute.
918    NAME is optional identifier for this critical block.
919    CLAUSES are clauses for this critical block.  */
920 
921 gomp_critical *
gimple_build_omp_critical(gimple_seq body,tree name,tree clauses)922 gimple_build_omp_critical (gimple_seq body, tree name, tree clauses)
923 {
924   gomp_critical *p
925     = as_a <gomp_critical *> (gimple_alloc (GIMPLE_OMP_CRITICAL, 0));
926   gimple_omp_critical_set_name (p, name);
927   gimple_omp_critical_set_clauses (p, clauses);
928   if (body)
929     gimple_omp_set_body (p, body);
930 
931   return p;
932 }
933 
934 /* Build a GIMPLE_OMP_FOR statement.
935 
936    BODY is sequence of statements inside the for loop.
937    KIND is the `for' variant.
938    CLAUSES are any of the construct's clauses.
939    COLLAPSE is the collapse count.
940    PRE_BODY is the sequence of statements that are loop invariant.  */
941 
942 gomp_for *
gimple_build_omp_for(gimple_seq body,int kind,tree clauses,size_t collapse,gimple_seq pre_body)943 gimple_build_omp_for (gimple_seq body, int kind, tree clauses, size_t collapse,
944 		      gimple_seq pre_body)
945 {
946   gomp_for *p = as_a <gomp_for *> (gimple_alloc (GIMPLE_OMP_FOR, 0));
947   if (body)
948     gimple_omp_set_body (p, body);
949   gimple_omp_for_set_clauses (p, clauses);
950   gimple_omp_for_set_kind (p, kind);
951   p->collapse = collapse;
952   p->iter =  ggc_cleared_vec_alloc<gimple_omp_for_iter> (collapse);
953 
954   if (pre_body)
955     gimple_omp_for_set_pre_body (p, pre_body);
956 
957   return p;
958 }
959 
960 
961 /* Build a GIMPLE_OMP_PARALLEL statement.
962 
963    BODY is sequence of statements which are executed in parallel.
964    CLAUSES are the OMP parallel construct's clauses.
965    CHILD_FN is the function created for the parallel threads to execute.
966    DATA_ARG are the shared data argument(s).  */
967 
968 gomp_parallel *
gimple_build_omp_parallel(gimple_seq body,tree clauses,tree child_fn,tree data_arg)969 gimple_build_omp_parallel (gimple_seq body, tree clauses, tree child_fn,
970 			   tree data_arg)
971 {
972   gomp_parallel *p
973     = as_a <gomp_parallel *> (gimple_alloc (GIMPLE_OMP_PARALLEL, 0));
974   if (body)
975     gimple_omp_set_body (p, body);
976   gimple_omp_parallel_set_clauses (p, clauses);
977   gimple_omp_parallel_set_child_fn (p, child_fn);
978   gimple_omp_parallel_set_data_arg (p, data_arg);
979 
980   return p;
981 }
982 
983 
984 /* Build a GIMPLE_OMP_TASK statement.
985 
986    BODY is sequence of statements which are executed by the explicit task.
987    CLAUSES are the OMP task construct's clauses.
988    CHILD_FN is the function created for the parallel threads to execute.
989    DATA_ARG are the shared data argument(s).
990    COPY_FN is the optional function for firstprivate initialization.
991    ARG_SIZE and ARG_ALIGN are size and alignment of the data block.  */
992 
993 gomp_task *
gimple_build_omp_task(gimple_seq body,tree clauses,tree child_fn,tree data_arg,tree copy_fn,tree arg_size,tree arg_align)994 gimple_build_omp_task (gimple_seq body, tree clauses, tree child_fn,
995 		       tree data_arg, tree copy_fn, tree arg_size,
996 		       tree arg_align)
997 {
998   gomp_task *p = as_a <gomp_task *> (gimple_alloc (GIMPLE_OMP_TASK, 0));
999   if (body)
1000     gimple_omp_set_body (p, body);
1001   gimple_omp_task_set_clauses (p, clauses);
1002   gimple_omp_task_set_child_fn (p, child_fn);
1003   gimple_omp_task_set_data_arg (p, data_arg);
1004   gimple_omp_task_set_copy_fn (p, copy_fn);
1005   gimple_omp_task_set_arg_size (p, arg_size);
1006   gimple_omp_task_set_arg_align (p, arg_align);
1007 
1008   return p;
1009 }
1010 
1011 
1012 /* Build a GIMPLE_OMP_SECTION statement for a sections statement.
1013 
1014    BODY is the sequence of statements in the section.  */
1015 
1016 gimple *
gimple_build_omp_section(gimple_seq body)1017 gimple_build_omp_section (gimple_seq body)
1018 {
1019   gimple *p = gimple_alloc (GIMPLE_OMP_SECTION, 0);
1020   if (body)
1021     gimple_omp_set_body (p, body);
1022 
1023   return p;
1024 }
1025 
1026 
1027 /* Build a GIMPLE_OMP_MASTER statement.
1028 
1029    BODY is the sequence of statements to be executed by just the master.  */
1030 
1031 gimple *
gimple_build_omp_master(gimple_seq body)1032 gimple_build_omp_master (gimple_seq body)
1033 {
1034   gimple *p = gimple_alloc (GIMPLE_OMP_MASTER, 0);
1035   if (body)
1036     gimple_omp_set_body (p, body);
1037 
1038   return p;
1039 }
1040 
1041 /* Build a GIMPLE_OMP_TASKGROUP statement.
1042 
1043    BODY is the sequence of statements to be executed by the taskgroup
1044    construct.
1045    CLAUSES are any of the construct's clauses.  */
1046 
1047 gimple *
gimple_build_omp_taskgroup(gimple_seq body,tree clauses)1048 gimple_build_omp_taskgroup (gimple_seq body, tree clauses)
1049 {
1050   gimple *p = gimple_alloc (GIMPLE_OMP_TASKGROUP, 0);
1051   gimple_omp_taskgroup_set_clauses (p, clauses);
1052   if (body)
1053     gimple_omp_set_body (p, body);
1054 
1055   return p;
1056 }
1057 
1058 
1059 /* Build a GIMPLE_OMP_CONTINUE statement.
1060 
1061    CONTROL_DEF is the definition of the control variable.
1062    CONTROL_USE is the use of the control variable.  */
1063 
1064 gomp_continue *
gimple_build_omp_continue(tree control_def,tree control_use)1065 gimple_build_omp_continue (tree control_def, tree control_use)
1066 {
1067   gomp_continue *p
1068     = as_a <gomp_continue *> (gimple_alloc (GIMPLE_OMP_CONTINUE, 0));
1069   gimple_omp_continue_set_control_def (p, control_def);
1070   gimple_omp_continue_set_control_use (p, control_use);
1071   return p;
1072 }
1073 
1074 /* Build a GIMPLE_OMP_ORDERED statement.
1075 
1076    BODY is the sequence of statements inside a loop that will executed in
1077    sequence.
1078    CLAUSES are clauses for this statement.  */
1079 
1080 gomp_ordered *
gimple_build_omp_ordered(gimple_seq body,tree clauses)1081 gimple_build_omp_ordered (gimple_seq body, tree clauses)
1082 {
1083   gomp_ordered *p
1084     = as_a <gomp_ordered *> (gimple_alloc (GIMPLE_OMP_ORDERED, 0));
1085   gimple_omp_ordered_set_clauses (p, clauses);
1086   if (body)
1087     gimple_omp_set_body (p, body);
1088 
1089   return p;
1090 }
1091 
1092 
1093 /* Build a GIMPLE_OMP_RETURN statement.
1094    WAIT_P is true if this is a non-waiting return.  */
1095 
1096 gimple *
gimple_build_omp_return(bool wait_p)1097 gimple_build_omp_return (bool wait_p)
1098 {
1099   gimple *p = gimple_alloc (GIMPLE_OMP_RETURN, 0);
1100   if (wait_p)
1101     gimple_omp_return_set_nowait (p);
1102 
1103   return p;
1104 }
1105 
1106 
1107 /* Build a GIMPLE_OMP_SCAN statement.
1108 
1109    BODY is the sequence of statements to be executed by the scan
1110    construct.
1111    CLAUSES are any of the construct's clauses.  */
1112 
1113 gomp_scan *
gimple_build_omp_scan(gimple_seq body,tree clauses)1114 gimple_build_omp_scan (gimple_seq body, tree clauses)
1115 {
1116   gomp_scan *p
1117     = as_a <gomp_scan *> (gimple_alloc (GIMPLE_OMP_SCAN, 0));
1118   gimple_omp_scan_set_clauses (p, clauses);
1119   if (body)
1120     gimple_omp_set_body (p, body);
1121 
1122   return p;
1123 }
1124 
1125 
1126 /* Build a GIMPLE_OMP_SECTIONS statement.
1127 
1128    BODY is a sequence of section statements.
1129    CLAUSES are any of the OMP sections contsruct's clauses: private,
1130    firstprivate, lastprivate, reduction, and nowait.  */
1131 
1132 gomp_sections *
gimple_build_omp_sections(gimple_seq body,tree clauses)1133 gimple_build_omp_sections (gimple_seq body, tree clauses)
1134 {
1135   gomp_sections *p
1136     = as_a <gomp_sections *> (gimple_alloc (GIMPLE_OMP_SECTIONS, 0));
1137   if (body)
1138     gimple_omp_set_body (p, body);
1139   gimple_omp_sections_set_clauses (p, clauses);
1140 
1141   return p;
1142 }
1143 
1144 
1145 /* Build a GIMPLE_OMP_SECTIONS_SWITCH.  */
1146 
1147 gimple *
gimple_build_omp_sections_switch(void)1148 gimple_build_omp_sections_switch (void)
1149 {
1150   return gimple_alloc (GIMPLE_OMP_SECTIONS_SWITCH, 0);
1151 }
1152 
1153 
1154 /* Build a GIMPLE_OMP_SINGLE statement.
1155 
1156    BODY is the sequence of statements that will be executed once.
1157    CLAUSES are any of the OMP single construct's clauses: private, firstprivate,
1158    copyprivate, nowait.  */
1159 
1160 gomp_single *
gimple_build_omp_single(gimple_seq body,tree clauses)1161 gimple_build_omp_single (gimple_seq body, tree clauses)
1162 {
1163   gomp_single *p
1164     = as_a <gomp_single *> (gimple_alloc (GIMPLE_OMP_SINGLE, 0));
1165   if (body)
1166     gimple_omp_set_body (p, body);
1167   gimple_omp_single_set_clauses (p, clauses);
1168 
1169   return p;
1170 }
1171 
1172 
1173 /* Build a GIMPLE_OMP_TARGET statement.
1174 
1175    BODY is the sequence of statements that will be executed.
1176    KIND is the kind of the region.
1177    CLAUSES are any of the construct's clauses.  */
1178 
1179 gomp_target *
gimple_build_omp_target(gimple_seq body,int kind,tree clauses)1180 gimple_build_omp_target (gimple_seq body, int kind, tree clauses)
1181 {
1182   gomp_target *p
1183     = as_a <gomp_target *> (gimple_alloc (GIMPLE_OMP_TARGET, 0));
1184   if (body)
1185     gimple_omp_set_body (p, body);
1186   gimple_omp_target_set_clauses (p, clauses);
1187   gimple_omp_target_set_kind (p, kind);
1188 
1189   return p;
1190 }
1191 
1192 
1193 /* Build a GIMPLE_OMP_TEAMS statement.
1194 
1195    BODY is the sequence of statements that will be executed.
1196    CLAUSES are any of the OMP teams construct's clauses.  */
1197 
1198 gomp_teams *
gimple_build_omp_teams(gimple_seq body,tree clauses)1199 gimple_build_omp_teams (gimple_seq body, tree clauses)
1200 {
1201   gomp_teams *p = as_a <gomp_teams *> (gimple_alloc (GIMPLE_OMP_TEAMS, 0));
1202   if (body)
1203     gimple_omp_set_body (p, body);
1204   gimple_omp_teams_set_clauses (p, clauses);
1205 
1206   return p;
1207 }
1208 
1209 
1210 /* Build a GIMPLE_OMP_ATOMIC_LOAD statement.  */
1211 
1212 gomp_atomic_load *
gimple_build_omp_atomic_load(tree lhs,tree rhs,enum omp_memory_order mo)1213 gimple_build_omp_atomic_load (tree lhs, tree rhs, enum omp_memory_order mo)
1214 {
1215   gomp_atomic_load *p
1216     = as_a <gomp_atomic_load *> (gimple_alloc (GIMPLE_OMP_ATOMIC_LOAD, 0));
1217   gimple_omp_atomic_load_set_lhs (p, lhs);
1218   gimple_omp_atomic_load_set_rhs (p, rhs);
1219   gimple_omp_atomic_set_memory_order (p, mo);
1220   return p;
1221 }
1222 
1223 /* Build a GIMPLE_OMP_ATOMIC_STORE statement.
1224 
1225    VAL is the value we are storing.  */
1226 
1227 gomp_atomic_store *
gimple_build_omp_atomic_store(tree val,enum omp_memory_order mo)1228 gimple_build_omp_atomic_store (tree val, enum omp_memory_order mo)
1229 {
1230   gomp_atomic_store *p
1231     = as_a <gomp_atomic_store *> (gimple_alloc (GIMPLE_OMP_ATOMIC_STORE, 0));
1232   gimple_omp_atomic_store_set_val (p, val);
1233   gimple_omp_atomic_set_memory_order (p, mo);
1234   return p;
1235 }
1236 
1237 /* Build a GIMPLE_TRANSACTION statement.  */
1238 
1239 gtransaction *
gimple_build_transaction(gimple_seq body)1240 gimple_build_transaction (gimple_seq body)
1241 {
1242   gtransaction *p
1243     = as_a <gtransaction *> (gimple_alloc (GIMPLE_TRANSACTION, 0));
1244   gimple_transaction_set_body (p, body);
1245   gimple_transaction_set_label_norm (p, 0);
1246   gimple_transaction_set_label_uninst (p, 0);
1247   gimple_transaction_set_label_over (p, 0);
1248   return p;
1249 }
1250 
1251 #if defined ENABLE_GIMPLE_CHECKING
1252 /* Complain of a gimple type mismatch and die.  */
1253 
1254 void
gimple_check_failed(const gimple * gs,const char * file,int line,const char * function,enum gimple_code code,enum tree_code subcode)1255 gimple_check_failed (const gimple *gs, const char *file, int line,
1256 		     const char *function, enum gimple_code code,
1257 		     enum tree_code subcode)
1258 {
1259   internal_error ("gimple check: expected %s(%s), have %s(%s) in %s, at %s:%d",
1260       		  gimple_code_name[code],
1261 		  get_tree_code_name (subcode),
1262 		  gimple_code_name[gimple_code (gs)],
1263 		  gs->subcode > 0
1264 		    ? get_tree_code_name ((enum tree_code) gs->subcode)
1265 		    : "",
1266 		  function, trim_filename (file), line);
1267 }
1268 #endif /* ENABLE_GIMPLE_CHECKING */
1269 
1270 
1271 /* Link gimple statement GS to the end of the sequence *SEQ_P.  If
1272    *SEQ_P is NULL, a new sequence is allocated.  */
1273 
1274 void
gimple_seq_add_stmt(gimple_seq * seq_p,gimple * gs)1275 gimple_seq_add_stmt (gimple_seq *seq_p, gimple *gs)
1276 {
1277   gimple_stmt_iterator si;
1278   if (gs == NULL)
1279     return;
1280 
1281   si = gsi_last (*seq_p);
1282   gsi_insert_after (&si, gs, GSI_NEW_STMT);
1283 }
1284 
1285 /* Link gimple statement GS to the end of the sequence *SEQ_P.  If
1286    *SEQ_P is NULL, a new sequence is allocated.  This function is
1287    similar to gimple_seq_add_stmt, but does not scan the operands.
1288    During gimplification, we need to manipulate statement sequences
1289    before the def/use vectors have been constructed.  */
1290 
1291 void
gimple_seq_add_stmt_without_update(gimple_seq * seq_p,gimple * gs)1292 gimple_seq_add_stmt_without_update (gimple_seq *seq_p, gimple *gs)
1293 {
1294   gimple_stmt_iterator si;
1295 
1296   if (gs == NULL)
1297     return;
1298 
1299   si = gsi_last (*seq_p);
1300   gsi_insert_after_without_update (&si, gs, GSI_NEW_STMT);
1301 }
1302 
1303 /* Append sequence SRC to the end of sequence *DST_P.  If *DST_P is
1304    NULL, a new sequence is allocated.  */
1305 
1306 void
gimple_seq_add_seq(gimple_seq * dst_p,gimple_seq src)1307 gimple_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
1308 {
1309   gimple_stmt_iterator si;
1310   if (src == NULL)
1311     return;
1312 
1313   si = gsi_last (*dst_p);
1314   gsi_insert_seq_after (&si, src, GSI_NEW_STMT);
1315 }
1316 
1317 /* Append sequence SRC to the end of sequence *DST_P.  If *DST_P is
1318    NULL, a new sequence is allocated.  This function is
1319    similar to gimple_seq_add_seq, but does not scan the operands.  */
1320 
1321 void
gimple_seq_add_seq_without_update(gimple_seq * dst_p,gimple_seq src)1322 gimple_seq_add_seq_without_update (gimple_seq *dst_p, gimple_seq src)
1323 {
1324   gimple_stmt_iterator si;
1325   if (src == NULL)
1326     return;
1327 
1328   si = gsi_last (*dst_p);
1329   gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
1330 }
1331 
1332 /* Determine whether to assign a location to the statement GS.  */
1333 
1334 static bool
should_carry_location_p(gimple * gs)1335 should_carry_location_p (gimple *gs)
1336 {
1337   /* Don't emit a line note for a label.  We particularly don't want to
1338      emit one for the break label, since it doesn't actually correspond
1339      to the beginning of the loop/switch.  */
1340   if (gimple_code (gs) == GIMPLE_LABEL)
1341     return false;
1342 
1343   return true;
1344 }
1345 
1346 /* Set the location for gimple statement GS to LOCATION.  */
1347 
1348 static void
annotate_one_with_location(gimple * gs,location_t location)1349 annotate_one_with_location (gimple *gs, location_t location)
1350 {
1351   if (!gimple_has_location (gs)
1352       && !gimple_do_not_emit_location_p (gs)
1353       && should_carry_location_p (gs))
1354     gimple_set_location (gs, location);
1355 }
1356 
1357 /* Set LOCATION for all the statements after iterator GSI in sequence
1358    SEQ.  If GSI is pointing to the end of the sequence, start with the
1359    first statement in SEQ.  */
1360 
1361 void
annotate_all_with_location_after(gimple_seq seq,gimple_stmt_iterator gsi,location_t location)1362 annotate_all_with_location_after (gimple_seq seq, gimple_stmt_iterator gsi,
1363 				  location_t location)
1364 {
1365   if (gsi_end_p (gsi))
1366     gsi = gsi_start (seq);
1367   else
1368     gsi_next (&gsi);
1369 
1370   for (; !gsi_end_p (gsi); gsi_next (&gsi))
1371     annotate_one_with_location (gsi_stmt (gsi), location);
1372 }
1373 
1374 /* Set the location for all the statements in a sequence STMT_P to LOCATION.  */
1375 
1376 void
annotate_all_with_location(gimple_seq stmt_p,location_t location)1377 annotate_all_with_location (gimple_seq stmt_p, location_t location)
1378 {
1379   gimple_stmt_iterator i;
1380 
1381   if (gimple_seq_empty_p (stmt_p))
1382     return;
1383 
1384   for (i = gsi_start (stmt_p); !gsi_end_p (i); gsi_next (&i))
1385     {
1386       gimple *gs = gsi_stmt (i);
1387       annotate_one_with_location (gs, location);
1388     }
1389 }
1390 
1391 /* Helper function of empty_body_p.  Return true if STMT is an empty
1392    statement.  */
1393 
1394 static bool
empty_stmt_p(gimple * stmt)1395 empty_stmt_p (gimple *stmt)
1396 {
1397   if (gimple_code (stmt) == GIMPLE_NOP)
1398     return true;
1399   if (gbind *bind_stmt = dyn_cast <gbind *> (stmt))
1400     return empty_body_p (gimple_bind_body (bind_stmt));
1401   return false;
1402 }
1403 
1404 
1405 /* Return true if BODY contains nothing but empty statements.  */
1406 
1407 bool
empty_body_p(gimple_seq body)1408 empty_body_p (gimple_seq body)
1409 {
1410   gimple_stmt_iterator i;
1411 
1412   if (gimple_seq_empty_p (body))
1413     return true;
1414   for (i = gsi_start (body); !gsi_end_p (i); gsi_next (&i))
1415     if (!empty_stmt_p (gsi_stmt (i))
1416 	&& !is_gimple_debug (gsi_stmt (i)))
1417       return false;
1418 
1419   return true;
1420 }
1421 
1422 
1423 /* Perform a deep copy of sequence SRC and return the result.  */
1424 
1425 gimple_seq
gimple_seq_copy(gimple_seq src)1426 gimple_seq_copy (gimple_seq src)
1427 {
1428   gimple_stmt_iterator gsi;
1429   gimple_seq new_seq = NULL;
1430   gimple *stmt;
1431 
1432   for (gsi = gsi_start (src); !gsi_end_p (gsi); gsi_next (&gsi))
1433     {
1434       stmt = gimple_copy (gsi_stmt (gsi));
1435       gimple_seq_add_stmt (&new_seq, stmt);
1436     }
1437 
1438   return new_seq;
1439 }
1440 
1441 
1442 
1443 /* Return true if calls C1 and C2 are known to go to the same function.  */
1444 
1445 bool
gimple_call_same_target_p(const gimple * c1,const gimple * c2)1446 gimple_call_same_target_p (const gimple *c1, const gimple *c2)
1447 {
1448   if (gimple_call_internal_p (c1))
1449     return (gimple_call_internal_p (c2)
1450 	    && gimple_call_internal_fn (c1) == gimple_call_internal_fn (c2)
1451 	    && (!gimple_call_internal_unique_p (as_a <const gcall *> (c1))
1452 		|| c1 == c2));
1453   else
1454     return (gimple_call_fn (c1) == gimple_call_fn (c2)
1455 	    || (gimple_call_fndecl (c1)
1456 		&& gimple_call_fndecl (c1) == gimple_call_fndecl (c2)));
1457 }
1458 
1459 /* Detect flags from a GIMPLE_CALL.  This is just like
1460    call_expr_flags, but for gimple tuples.  */
1461 
1462 int
gimple_call_flags(const gimple * stmt)1463 gimple_call_flags (const gimple *stmt)
1464 {
1465   int flags = 0;
1466 
1467   if (gimple_call_internal_p (stmt))
1468     flags = internal_fn_flags (gimple_call_internal_fn (stmt));
1469   else
1470     {
1471       tree decl = gimple_call_fndecl (stmt);
1472       if (decl)
1473 	flags = flags_from_decl_or_type (decl);
1474       flags |= flags_from_decl_or_type (gimple_call_fntype (stmt));
1475     }
1476 
1477   if (stmt->subcode & GF_CALL_NOTHROW)
1478     flags |= ECF_NOTHROW;
1479 
1480   if (stmt->subcode & GF_CALL_BY_DESCRIPTOR)
1481     flags |= ECF_BY_DESCRIPTOR;
1482 
1483   return flags;
1484 }
1485 
1486 /* Return the "fn spec" string for call STMT.  */
1487 
1488 attr_fnspec
gimple_call_fnspec(const gcall * stmt)1489 gimple_call_fnspec (const gcall *stmt)
1490 {
1491   tree type, attr;
1492 
1493   if (gimple_call_internal_p (stmt))
1494     {
1495       const_tree spec = internal_fn_fnspec (gimple_call_internal_fn (stmt));
1496       if (spec)
1497 	return spec;
1498       else
1499 	return "";
1500     }
1501 
1502   type = gimple_call_fntype (stmt);
1503   if (type)
1504     {
1505       attr = lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type));
1506       if (attr)
1507 	return TREE_VALUE (TREE_VALUE (attr));
1508     }
1509   if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
1510     return builtin_fnspec (gimple_call_fndecl (stmt));
1511   tree fndecl = gimple_call_fndecl (stmt);
1512   /* If the call is to a replaceable operator delete and results
1513      from a delete expression as opposed to a direct call to
1514      such operator, then we can treat it as free.  */
1515   if (fndecl
1516       && DECL_IS_OPERATOR_DELETE_P (fndecl)
1517       && DECL_IS_REPLACEABLE_OPERATOR (fndecl)
1518       && gimple_call_from_new_or_delete (stmt))
1519     return ".co ";
1520   /* Similarly operator new can be treated as malloc.  */
1521   if (fndecl
1522       && DECL_IS_REPLACEABLE_OPERATOR_NEW_P (fndecl)
1523       && gimple_call_from_new_or_delete (stmt))
1524     return "mC";
1525   return "";
1526 }
1527 
1528 /* Detects argument flags for argument number ARG on call STMT.  */
1529 
1530 int
gimple_call_arg_flags(const gcall * stmt,unsigned arg)1531 gimple_call_arg_flags (const gcall *stmt, unsigned arg)
1532 {
1533   attr_fnspec fnspec = gimple_call_fnspec (stmt);
1534   int flags = 0;
1535 
1536   if (fnspec.known_p ())
1537     {
1538       if (!fnspec.arg_specified_p (arg))
1539 	;
1540       else if (!fnspec.arg_used_p (arg))
1541 	flags = EAF_UNUSED;
1542       else
1543 	{
1544 	  if (fnspec.arg_direct_p (arg))
1545 	    flags |= EAF_DIRECT;
1546 	  if (fnspec.arg_noescape_p (arg))
1547 	    flags |= EAF_NOESCAPE | EAF_NODIRECTESCAPE;
1548 	  if (fnspec.arg_readonly_p (arg))
1549 	    flags |= EAF_NOCLOBBER;
1550 	}
1551     }
1552   tree callee = gimple_call_fndecl (stmt);
1553   if (callee)
1554     {
1555       cgraph_node *node = cgraph_node::get (callee);
1556       modref_summary *summary = node ? get_modref_function_summary (node)
1557 				: NULL;
1558 
1559       if (summary && summary->arg_flags.length () > arg)
1560 	{
1561 	  int modref_flags = summary->arg_flags[arg];
1562 
1563 	  /* We have possibly optimized out load.  Be conservative here.  */
1564 	  if (!node->binds_to_current_def_p ())
1565 	    {
1566 	      if ((modref_flags & EAF_UNUSED) && !(flags & EAF_UNUSED))
1567 		modref_flags &= ~EAF_UNUSED;
1568 	      if ((modref_flags & EAF_DIRECT) && !(flags & EAF_DIRECT))
1569 		modref_flags &= ~EAF_DIRECT;
1570 	    }
1571 	  flags |= modref_flags;
1572 	}
1573     }
1574   return flags;
1575 }
1576 
1577 /* Detects return flags for the call STMT.  */
1578 
1579 int
gimple_call_return_flags(const gcall * stmt)1580 gimple_call_return_flags (const gcall *stmt)
1581 {
1582   if (gimple_call_flags (stmt) & ECF_MALLOC)
1583     return ERF_NOALIAS;
1584 
1585   attr_fnspec fnspec = gimple_call_fnspec (stmt);
1586 
1587   unsigned int arg_no;
1588   if (fnspec.returns_arg (&arg_no))
1589     return ERF_RETURNS_ARG | arg_no;
1590 
1591   if (fnspec.returns_noalias_p ())
1592     return ERF_NOALIAS;
1593   return 0;
1594 }
1595 
1596 
1597 /* Return true if call STMT is known to return a non-zero result.  */
1598 
1599 bool
gimple_call_nonnull_result_p(gcall * call)1600 gimple_call_nonnull_result_p (gcall *call)
1601 {
1602   tree fndecl = gimple_call_fndecl (call);
1603   if (!fndecl)
1604     return false;
1605   if (flag_delete_null_pointer_checks && !flag_check_new
1606       && DECL_IS_OPERATOR_NEW_P (fndecl)
1607       && !TREE_NOTHROW (fndecl))
1608     return true;
1609 
1610   /* References are always non-NULL.  */
1611   if (flag_delete_null_pointer_checks
1612       && TREE_CODE (TREE_TYPE (fndecl)) == REFERENCE_TYPE)
1613     return true;
1614 
1615   if (flag_delete_null_pointer_checks
1616       && lookup_attribute ("returns_nonnull",
1617 			   TYPE_ATTRIBUTES (gimple_call_fntype (call))))
1618     return true;
1619   return gimple_alloca_call_p (call);
1620 }
1621 
1622 
1623 /* If CALL returns a non-null result in an argument, return that arg.  */
1624 
1625 tree
gimple_call_nonnull_arg(gcall * call)1626 gimple_call_nonnull_arg (gcall *call)
1627 {
1628   tree fndecl = gimple_call_fndecl (call);
1629   if (!fndecl)
1630     return NULL_TREE;
1631 
1632   unsigned rf = gimple_call_return_flags (call);
1633   if (rf & ERF_RETURNS_ARG)
1634     {
1635       unsigned argnum = rf & ERF_RETURN_ARG_MASK;
1636       if (argnum < gimple_call_num_args (call))
1637 	{
1638 	  tree arg = gimple_call_arg (call, argnum);
1639 	  if (SSA_VAR_P (arg)
1640 	      && infer_nonnull_range_by_attribute (call, arg))
1641 	    return arg;
1642 	}
1643     }
1644   return NULL_TREE;
1645 }
1646 
1647 
1648 /* Return true if GS is a copy assignment.  */
1649 
1650 bool
gimple_assign_copy_p(gimple * gs)1651 gimple_assign_copy_p (gimple *gs)
1652 {
1653   return (gimple_assign_single_p (gs)
1654 	  && is_gimple_val (gimple_op (gs, 1)));
1655 }
1656 
1657 
1658 /* Return true if GS is a SSA_NAME copy assignment.  */
1659 
1660 bool
gimple_assign_ssa_name_copy_p(gimple * gs)1661 gimple_assign_ssa_name_copy_p (gimple *gs)
1662 {
1663   return (gimple_assign_single_p (gs)
1664 	  && TREE_CODE (gimple_assign_lhs (gs)) == SSA_NAME
1665 	  && TREE_CODE (gimple_assign_rhs1 (gs)) == SSA_NAME);
1666 }
1667 
1668 
1669 /* Return true if GS is an assignment with a unary RHS, but the
1670    operator has no effect on the assigned value.  The logic is adapted
1671    from STRIP_NOPS.  This predicate is intended to be used in tuplifying
1672    instances in which STRIP_NOPS was previously applied to the RHS of
1673    an assignment.
1674 
1675    NOTE: In the use cases that led to the creation of this function
1676    and of gimple_assign_single_p, it is typical to test for either
1677    condition and to proceed in the same manner.  In each case, the
1678    assigned value is represented by the single RHS operand of the
1679    assignment.  I suspect there may be cases where gimple_assign_copy_p,
1680    gimple_assign_single_p, or equivalent logic is used where a similar
1681    treatment of unary NOPs is appropriate.  */
1682 
1683 bool
gimple_assign_unary_nop_p(gimple * gs)1684 gimple_assign_unary_nop_p (gimple *gs)
1685 {
1686   return (is_gimple_assign (gs)
1687           && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs))
1688               || gimple_assign_rhs_code (gs) == NON_LVALUE_EXPR)
1689           && gimple_assign_rhs1 (gs) != error_mark_node
1690           && (TYPE_MODE (TREE_TYPE (gimple_assign_lhs (gs)))
1691               == TYPE_MODE (TREE_TYPE (gimple_assign_rhs1 (gs)))));
1692 }
1693 
1694 /* Set BB to be the basic block holding G.  */
1695 
1696 void
gimple_set_bb(gimple * stmt,basic_block bb)1697 gimple_set_bb (gimple *stmt, basic_block bb)
1698 {
1699   stmt->bb = bb;
1700 
1701   if (gimple_code (stmt) != GIMPLE_LABEL)
1702     return;
1703 
1704   /* If the statement is a label, add the label to block-to-labels map
1705      so that we can speed up edge creation for GIMPLE_GOTOs.  */
1706   if (cfun->cfg)
1707     {
1708       tree t;
1709       int uid;
1710 
1711       t = gimple_label_label (as_a <glabel *> (stmt));
1712       uid = LABEL_DECL_UID (t);
1713       if (uid == -1)
1714 	{
1715 	  unsigned old_len =
1716 	    vec_safe_length (label_to_block_map_for_fn (cfun));
1717 	  LABEL_DECL_UID (t) = uid = cfun->cfg->last_label_uid++;
1718 	  if (old_len <= (unsigned) uid)
1719 	    vec_safe_grow_cleared (label_to_block_map_for_fn (cfun), uid + 1);
1720 	}
1721 
1722       (*label_to_block_map_for_fn (cfun))[uid] = bb;
1723     }
1724 }
1725 
1726 
1727 /* Modify the RHS of the assignment pointed-to by GSI using the
1728    operands in the expression tree EXPR.
1729 
1730    NOTE: The statement pointed-to by GSI may be reallocated if it
1731    did not have enough operand slots.
1732 
1733    This function is useful to convert an existing tree expression into
1734    the flat representation used for the RHS of a GIMPLE assignment.
1735    It will reallocate memory as needed to expand or shrink the number
1736    of operand slots needed to represent EXPR.
1737 
1738    NOTE: If you find yourself building a tree and then calling this
1739    function, you are most certainly doing it the slow way.  It is much
1740    better to build a new assignment or to use the function
1741    gimple_assign_set_rhs_with_ops, which does not require an
1742    expression tree to be built.  */
1743 
1744 void
gimple_assign_set_rhs_from_tree(gimple_stmt_iterator * gsi,tree expr)1745 gimple_assign_set_rhs_from_tree (gimple_stmt_iterator *gsi, tree expr)
1746 {
1747   enum tree_code subcode;
1748   tree op1, op2, op3;
1749 
1750   extract_ops_from_tree (expr, &subcode, &op1, &op2, &op3);
1751   gimple_assign_set_rhs_with_ops (gsi, subcode, op1, op2, op3);
1752 }
1753 
1754 
1755 /* Set the RHS of assignment statement pointed-to by GSI to CODE with
1756    operands OP1, OP2 and OP3.
1757 
1758    NOTE: The statement pointed-to by GSI may be reallocated if it
1759    did not have enough operand slots.  */
1760 
1761 void
gimple_assign_set_rhs_with_ops(gimple_stmt_iterator * gsi,enum tree_code code,tree op1,tree op2,tree op3)1762 gimple_assign_set_rhs_with_ops (gimple_stmt_iterator *gsi, enum tree_code code,
1763 				tree op1, tree op2, tree op3)
1764 {
1765   unsigned new_rhs_ops = get_gimple_rhs_num_ops (code);
1766   gimple *stmt = gsi_stmt (*gsi);
1767   gimple *old_stmt = stmt;
1768 
1769   /* If the new CODE needs more operands, allocate a new statement.  */
1770   if (gimple_num_ops (stmt) < new_rhs_ops + 1)
1771     {
1772       tree lhs = gimple_assign_lhs (old_stmt);
1773       stmt = gimple_alloc (gimple_code (old_stmt), new_rhs_ops + 1);
1774       memcpy (stmt, old_stmt, gimple_size (gimple_code (old_stmt)));
1775       gimple_init_singleton (stmt);
1776 
1777       /* The LHS needs to be reset as this also changes the SSA name
1778 	 on the LHS.  */
1779       gimple_assign_set_lhs (stmt, lhs);
1780     }
1781 
1782   gimple_set_num_ops (stmt, new_rhs_ops + 1);
1783   gimple_set_subcode (stmt, code);
1784   gimple_assign_set_rhs1 (stmt, op1);
1785   if (new_rhs_ops > 1)
1786     gimple_assign_set_rhs2 (stmt, op2);
1787   if (new_rhs_ops > 2)
1788     gimple_assign_set_rhs3 (stmt, op3);
1789   if (stmt != old_stmt)
1790     gsi_replace (gsi, stmt, false);
1791 }
1792 
1793 
1794 /* Return the LHS of a statement that performs an assignment,
1795    either a GIMPLE_ASSIGN or a GIMPLE_CALL.  Returns NULL_TREE
1796    for a call to a function that returns no value, or for a
1797    statement other than an assignment or a call.  */
1798 
1799 tree
gimple_get_lhs(const gimple * stmt)1800 gimple_get_lhs (const gimple *stmt)
1801 {
1802   enum gimple_code code = gimple_code (stmt);
1803 
1804   if (code == GIMPLE_ASSIGN)
1805     return gimple_assign_lhs (stmt);
1806   else if (code == GIMPLE_CALL)
1807     return gimple_call_lhs (stmt);
1808   else if (code == GIMPLE_PHI)
1809     return gimple_phi_result (stmt);
1810   else
1811     return NULL_TREE;
1812 }
1813 
1814 
1815 /* Set the LHS of a statement that performs an assignment,
1816    either a GIMPLE_ASSIGN or a GIMPLE_CALL.  */
1817 
1818 void
gimple_set_lhs(gimple * stmt,tree lhs)1819 gimple_set_lhs (gimple *stmt, tree lhs)
1820 {
1821   enum gimple_code code = gimple_code (stmt);
1822 
1823   if (code == GIMPLE_ASSIGN)
1824     gimple_assign_set_lhs (stmt, lhs);
1825   else if (code == GIMPLE_CALL)
1826     gimple_call_set_lhs (stmt, lhs);
1827   else
1828     gcc_unreachable ();
1829 }
1830 
1831 
1832 /* Return a deep copy of statement STMT.  All the operands from STMT
1833    are reallocated and copied using unshare_expr.  The DEF, USE, VDEF
1834    and VUSE operand arrays are set to empty in the new copy.  The new
1835    copy isn't part of any sequence.  */
1836 
1837 gimple *
gimple_copy(gimple * stmt)1838 gimple_copy (gimple *stmt)
1839 {
1840   enum gimple_code code = gimple_code (stmt);
1841   unsigned num_ops = gimple_num_ops (stmt);
1842   gimple *copy = gimple_alloc (code, num_ops);
1843   unsigned i;
1844 
1845   /* Shallow copy all the fields from STMT.  */
1846   memcpy (copy, stmt, gimple_size (code));
1847   gimple_init_singleton (copy);
1848 
1849   /* If STMT has sub-statements, deep-copy them as well.  */
1850   if (gimple_has_substatements (stmt))
1851     {
1852       gimple_seq new_seq;
1853       tree t;
1854 
1855       switch (gimple_code (stmt))
1856 	{
1857 	case GIMPLE_BIND:
1858 	  {
1859 	    gbind *bind_stmt = as_a <gbind *> (stmt);
1860 	    gbind *bind_copy = as_a <gbind *> (copy);
1861 	    new_seq = gimple_seq_copy (gimple_bind_body (bind_stmt));
1862 	    gimple_bind_set_body (bind_copy, new_seq);
1863 	    gimple_bind_set_vars (bind_copy,
1864 				  unshare_expr (gimple_bind_vars (bind_stmt)));
1865 	    gimple_bind_set_block (bind_copy, gimple_bind_block (bind_stmt));
1866 	  }
1867 	  break;
1868 
1869 	case GIMPLE_CATCH:
1870 	  {
1871 	    gcatch *catch_stmt = as_a <gcatch *> (stmt);
1872 	    gcatch *catch_copy = as_a <gcatch *> (copy);
1873 	    new_seq = gimple_seq_copy (gimple_catch_handler (catch_stmt));
1874 	    gimple_catch_set_handler (catch_copy, new_seq);
1875 	    t = unshare_expr (gimple_catch_types (catch_stmt));
1876 	    gimple_catch_set_types (catch_copy, t);
1877 	  }
1878 	  break;
1879 
1880 	case GIMPLE_EH_FILTER:
1881 	  {
1882 	    geh_filter *eh_filter_stmt = as_a <geh_filter *> (stmt);
1883 	    geh_filter *eh_filter_copy = as_a <geh_filter *> (copy);
1884 	    new_seq
1885 	      = gimple_seq_copy (gimple_eh_filter_failure (eh_filter_stmt));
1886 	    gimple_eh_filter_set_failure (eh_filter_copy, new_seq);
1887 	    t = unshare_expr (gimple_eh_filter_types (eh_filter_stmt));
1888 	    gimple_eh_filter_set_types (eh_filter_copy, t);
1889 	  }
1890 	  break;
1891 
1892 	case GIMPLE_EH_ELSE:
1893 	  {
1894 	    geh_else *eh_else_stmt = as_a <geh_else *> (stmt);
1895 	    geh_else *eh_else_copy = as_a <geh_else *> (copy);
1896 	    new_seq = gimple_seq_copy (gimple_eh_else_n_body (eh_else_stmt));
1897 	    gimple_eh_else_set_n_body (eh_else_copy, new_seq);
1898 	    new_seq = gimple_seq_copy (gimple_eh_else_e_body (eh_else_stmt));
1899 	    gimple_eh_else_set_e_body (eh_else_copy, new_seq);
1900 	  }
1901 	  break;
1902 
1903 	case GIMPLE_TRY:
1904 	  {
1905 	    gtry *try_stmt = as_a <gtry *> (stmt);
1906 	    gtry *try_copy = as_a <gtry *> (copy);
1907 	    new_seq = gimple_seq_copy (gimple_try_eval (try_stmt));
1908 	    gimple_try_set_eval (try_copy, new_seq);
1909 	    new_seq = gimple_seq_copy (gimple_try_cleanup (try_stmt));
1910 	    gimple_try_set_cleanup (try_copy, new_seq);
1911 	  }
1912 	  break;
1913 
1914 	case GIMPLE_OMP_FOR:
1915 	  new_seq = gimple_seq_copy (gimple_omp_for_pre_body (stmt));
1916 	  gimple_omp_for_set_pre_body (copy, new_seq);
1917 	  t = unshare_expr (gimple_omp_for_clauses (stmt));
1918 	  gimple_omp_for_set_clauses (copy, t);
1919 	  {
1920 	    gomp_for *omp_for_copy = as_a <gomp_for *> (copy);
1921 	    omp_for_copy->iter = ggc_vec_alloc<gimple_omp_for_iter>
1922 	      ( gimple_omp_for_collapse (stmt));
1923           }
1924 	  for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1925 	    {
1926 	      gimple_omp_for_set_cond (copy, i,
1927 				       gimple_omp_for_cond (stmt, i));
1928 	      gimple_omp_for_set_index (copy, i,
1929 					gimple_omp_for_index (stmt, i));
1930 	      t = unshare_expr (gimple_omp_for_initial (stmt, i));
1931 	      gimple_omp_for_set_initial (copy, i, t);
1932 	      t = unshare_expr (gimple_omp_for_final (stmt, i));
1933 	      gimple_omp_for_set_final (copy, i, t);
1934 	      t = unshare_expr (gimple_omp_for_incr (stmt, i));
1935 	      gimple_omp_for_set_incr (copy, i, t);
1936 	    }
1937 	  goto copy_omp_body;
1938 
1939 	case GIMPLE_OMP_PARALLEL:
1940 	  {
1941 	    gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
1942 	    gomp_parallel *omp_par_copy = as_a <gomp_parallel *> (copy);
1943 	    t = unshare_expr (gimple_omp_parallel_clauses (omp_par_stmt));
1944 	    gimple_omp_parallel_set_clauses (omp_par_copy, t);
1945 	    t = unshare_expr (gimple_omp_parallel_child_fn (omp_par_stmt));
1946 	    gimple_omp_parallel_set_child_fn (omp_par_copy, t);
1947 	    t = unshare_expr (gimple_omp_parallel_data_arg (omp_par_stmt));
1948 	    gimple_omp_parallel_set_data_arg (omp_par_copy, t);
1949 	  }
1950 	  goto copy_omp_body;
1951 
1952 	case GIMPLE_OMP_TASK:
1953 	  t = unshare_expr (gimple_omp_task_clauses (stmt));
1954 	  gimple_omp_task_set_clauses (copy, t);
1955 	  t = unshare_expr (gimple_omp_task_child_fn (stmt));
1956 	  gimple_omp_task_set_child_fn (copy, t);
1957 	  t = unshare_expr (gimple_omp_task_data_arg (stmt));
1958 	  gimple_omp_task_set_data_arg (copy, t);
1959 	  t = unshare_expr (gimple_omp_task_copy_fn (stmt));
1960 	  gimple_omp_task_set_copy_fn (copy, t);
1961 	  t = unshare_expr (gimple_omp_task_arg_size (stmt));
1962 	  gimple_omp_task_set_arg_size (copy, t);
1963 	  t = unshare_expr (gimple_omp_task_arg_align (stmt));
1964 	  gimple_omp_task_set_arg_align (copy, t);
1965 	  goto copy_omp_body;
1966 
1967 	case GIMPLE_OMP_CRITICAL:
1968 	  t = unshare_expr (gimple_omp_critical_name
1969 				(as_a <gomp_critical *> (stmt)));
1970 	  gimple_omp_critical_set_name (as_a <gomp_critical *> (copy), t);
1971 	  t = unshare_expr (gimple_omp_critical_clauses
1972 				(as_a <gomp_critical *> (stmt)));
1973 	  gimple_omp_critical_set_clauses (as_a <gomp_critical *> (copy), t);
1974 	  goto copy_omp_body;
1975 
1976 	case GIMPLE_OMP_ORDERED:
1977 	  t = unshare_expr (gimple_omp_ordered_clauses
1978 				(as_a <gomp_ordered *> (stmt)));
1979 	  gimple_omp_ordered_set_clauses (as_a <gomp_ordered *> (copy), t);
1980 	  goto copy_omp_body;
1981 
1982 	case GIMPLE_OMP_SCAN:
1983 	  t = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt));
1984 	  t = unshare_expr (t);
1985 	  gimple_omp_scan_set_clauses (as_a <gomp_scan *> (copy), t);
1986 	  goto copy_omp_body;
1987 
1988 	case GIMPLE_OMP_TASKGROUP:
1989 	  t = unshare_expr (gimple_omp_taskgroup_clauses (stmt));
1990 	  gimple_omp_taskgroup_set_clauses (copy, t);
1991 	  goto copy_omp_body;
1992 
1993 	case GIMPLE_OMP_SECTIONS:
1994 	  t = unshare_expr (gimple_omp_sections_clauses (stmt));
1995 	  gimple_omp_sections_set_clauses (copy, t);
1996 	  t = unshare_expr (gimple_omp_sections_control (stmt));
1997 	  gimple_omp_sections_set_control (copy, t);
1998 	  goto copy_omp_body;
1999 
2000 	case GIMPLE_OMP_SINGLE:
2001 	  {
2002 	    gomp_single *omp_single_copy = as_a <gomp_single *> (copy);
2003 	    t = unshare_expr (gimple_omp_single_clauses (stmt));
2004 	    gimple_omp_single_set_clauses (omp_single_copy, t);
2005 	  }
2006 	  goto copy_omp_body;
2007 
2008 	case GIMPLE_OMP_TARGET:
2009 	  {
2010 	    gomp_target *omp_target_stmt = as_a <gomp_target *> (stmt);
2011 	    gomp_target *omp_target_copy = as_a <gomp_target *> (copy);
2012 	    t = unshare_expr (gimple_omp_target_clauses (omp_target_stmt));
2013 	    gimple_omp_target_set_clauses (omp_target_copy, t);
2014 	    t = unshare_expr (gimple_omp_target_data_arg (omp_target_stmt));
2015 	    gimple_omp_target_set_data_arg (omp_target_copy, t);
2016 	  }
2017 	  goto copy_omp_body;
2018 
2019 	case GIMPLE_OMP_TEAMS:
2020 	  {
2021 	    gomp_teams *omp_teams_copy = as_a <gomp_teams *> (copy);
2022 	    t = unshare_expr (gimple_omp_teams_clauses (stmt));
2023 	    gimple_omp_teams_set_clauses (omp_teams_copy, t);
2024 	  }
2025 	  /* FALLTHRU  */
2026 
2027 	case GIMPLE_OMP_SECTION:
2028 	case GIMPLE_OMP_MASTER:
2029 	copy_omp_body:
2030 	  new_seq = gimple_seq_copy (gimple_omp_body (stmt));
2031 	  gimple_omp_set_body (copy, new_seq);
2032 	  break;
2033 
2034 	case GIMPLE_TRANSACTION:
2035 	  new_seq = gimple_seq_copy (gimple_transaction_body (
2036 				       as_a <gtransaction *> (stmt)));
2037 	  gimple_transaction_set_body (as_a <gtransaction *> (copy),
2038 				       new_seq);
2039 	  break;
2040 
2041 	case GIMPLE_WITH_CLEANUP_EXPR:
2042 	  new_seq = gimple_seq_copy (gimple_wce_cleanup (stmt));
2043 	  gimple_wce_set_cleanup (copy, new_seq);
2044 	  break;
2045 
2046 	default:
2047 	  gcc_unreachable ();
2048 	}
2049     }
2050 
2051   /* Make copy of operands.  */
2052   for (i = 0; i < num_ops; i++)
2053     gimple_set_op (copy, i, unshare_expr (gimple_op (stmt, i)));
2054 
2055   if (gimple_has_mem_ops (stmt))
2056     {
2057       gimple_set_vdef (copy, gimple_vdef (stmt));
2058       gimple_set_vuse (copy, gimple_vuse (stmt));
2059     }
2060 
2061   /* Clear out SSA operand vectors on COPY.  */
2062   if (gimple_has_ops (stmt))
2063     {
2064       gimple_set_use_ops (copy, NULL);
2065 
2066       /* SSA operands need to be updated.  */
2067       gimple_set_modified (copy, true);
2068     }
2069 
2070   if (gimple_debug_nonbind_marker_p (stmt))
2071     cfun->debug_marker_count++;
2072 
2073   return copy;
2074 }
2075 
2076 /* Move OLD_STMT's vuse and vdef operands to NEW_STMT, on the assumption
2077    that OLD_STMT is about to be removed.  */
2078 
2079 void
gimple_move_vops(gimple * new_stmt,gimple * old_stmt)2080 gimple_move_vops (gimple *new_stmt, gimple *old_stmt)
2081 {
2082   tree vdef = gimple_vdef (old_stmt);
2083   gimple_set_vuse (new_stmt, gimple_vuse (old_stmt));
2084   gimple_set_vdef (new_stmt, vdef);
2085   if (vdef && TREE_CODE (vdef) == SSA_NAME)
2086     SSA_NAME_DEF_STMT (vdef) = new_stmt;
2087 }
2088 
2089 /* Return true if statement S has side-effects.  We consider a
2090    statement to have side effects if:
2091 
2092    - It is a GIMPLE_CALL not marked with ECF_PURE or ECF_CONST.
2093    - Any of its operands are marked TREE_THIS_VOLATILE or TREE_SIDE_EFFECTS.  */
2094 
2095 bool
gimple_has_side_effects(const gimple * s)2096 gimple_has_side_effects (const gimple *s)
2097 {
2098   if (is_gimple_debug (s))
2099     return false;
2100 
2101   /* We don't have to scan the arguments to check for
2102      volatile arguments, though, at present, we still
2103      do a scan to check for TREE_SIDE_EFFECTS.  */
2104   if (gimple_has_volatile_ops (s))
2105     return true;
2106 
2107   if (gimple_code (s) == GIMPLE_ASM
2108       && gimple_asm_volatile_p (as_a <const gasm *> (s)))
2109     return true;
2110 
2111   if (is_gimple_call (s))
2112     {
2113       int flags = gimple_call_flags (s);
2114 
2115       /* An infinite loop is considered a side effect.  */
2116       if (!(flags & (ECF_CONST | ECF_PURE))
2117 	  || (flags & ECF_LOOPING_CONST_OR_PURE))
2118 	return true;
2119 
2120       return false;
2121     }
2122 
2123   return false;
2124 }
2125 
2126 /* Helper for gimple_could_trap_p and gimple_assign_rhs_could_trap_p.
2127    Return true if S can trap.  When INCLUDE_MEM is true, check whether
2128    the memory operations could trap.  When INCLUDE_STORES is true and
2129    S is a GIMPLE_ASSIGN, the LHS of the assignment is also checked.  */
2130 
2131 bool
gimple_could_trap_p_1(gimple * s,bool include_mem,bool include_stores)2132 gimple_could_trap_p_1 (gimple *s, bool include_mem, bool include_stores)
2133 {
2134   tree t, div = NULL_TREE;
2135   enum tree_code op;
2136 
2137   if (include_mem)
2138     {
2139       unsigned i, start = (is_gimple_assign (s) && !include_stores) ? 1 : 0;
2140 
2141       for (i = start; i < gimple_num_ops (s); i++)
2142 	if (tree_could_trap_p (gimple_op (s, i)))
2143 	  return true;
2144     }
2145 
2146   switch (gimple_code (s))
2147     {
2148     case GIMPLE_ASM:
2149       return gimple_asm_volatile_p (as_a <gasm *> (s));
2150 
2151     case GIMPLE_CALL:
2152       if (gimple_call_internal_p (s))
2153 	return false;
2154       t = gimple_call_fndecl (s);
2155       /* Assume that indirect and calls to weak functions may trap.  */
2156       if (!t || !DECL_P (t) || DECL_WEAK (t))
2157 	return true;
2158       return false;
2159 
2160     case GIMPLE_ASSIGN:
2161       op = gimple_assign_rhs_code (s);
2162 
2163       /* For COND_EXPR and VEC_COND_EXPR only the condition may trap.  */
2164       if (op == COND_EXPR || op == VEC_COND_EXPR)
2165 	return tree_could_trap_p (gimple_assign_rhs1 (s));
2166 
2167       /* For comparisons we need to check rhs operand types instead of rhs type
2168          (which is BOOLEAN_TYPE).  */
2169       if (TREE_CODE_CLASS (op) == tcc_comparison)
2170 	t = TREE_TYPE (gimple_assign_rhs1 (s));
2171       else
2172 	t = gimple_expr_type (s);
2173 
2174       if (get_gimple_rhs_class (op) == GIMPLE_BINARY_RHS)
2175 	div = gimple_assign_rhs2 (s);
2176 
2177       return (operation_could_trap_p (op, FLOAT_TYPE_P (t),
2178 				      (INTEGRAL_TYPE_P (t)
2179 				       && TYPE_OVERFLOW_TRAPS (t)),
2180 				      div));
2181 
2182     case GIMPLE_COND:
2183       t = TREE_TYPE (gimple_cond_lhs (s));
2184       return operation_could_trap_p (gimple_cond_code (s),
2185 				     FLOAT_TYPE_P (t), false, NULL_TREE);
2186 
2187     default:
2188       break;
2189     }
2190 
2191   return false;
2192 }
2193 
2194 /* Return true if statement S can trap.  */
2195 
2196 bool
gimple_could_trap_p(gimple * s)2197 gimple_could_trap_p (gimple *s)
2198 {
2199   return gimple_could_trap_p_1 (s, true, true);
2200 }
2201 
2202 /* Return true if RHS of a GIMPLE_ASSIGN S can trap.  */
2203 
2204 bool
gimple_assign_rhs_could_trap_p(gimple * s)2205 gimple_assign_rhs_could_trap_p (gimple *s)
2206 {
2207   gcc_assert (is_gimple_assign (s));
2208   return gimple_could_trap_p_1 (s, true, false);
2209 }
2210 
2211 
2212 /* Print debugging information for gimple stmts generated.  */
2213 
2214 void
dump_gimple_statistics(void)2215 dump_gimple_statistics (void)
2216 {
2217   int i;
2218   uint64_t total_tuples = 0, total_bytes = 0;
2219 
2220   if (! GATHER_STATISTICS)
2221     {
2222       fprintf (stderr, "No GIMPLE statistics\n");
2223       return;
2224     }
2225 
2226   fprintf (stderr, "\nGIMPLE statements\n");
2227   fprintf (stderr, "Kind                   Stmts      Bytes\n");
2228   fprintf (stderr, "---------------------------------------\n");
2229   for (i = 0; i < (int) gimple_alloc_kind_all; ++i)
2230     {
2231       fprintf (stderr, "%-20s %7" PRIu64 "%c %10" PRIu64 "%c\n",
2232 	       gimple_alloc_kind_names[i],
2233 	       SIZE_AMOUNT (gimple_alloc_counts[i]),
2234 	       SIZE_AMOUNT (gimple_alloc_sizes[i]));
2235       total_tuples += gimple_alloc_counts[i];
2236       total_bytes += gimple_alloc_sizes[i];
2237     }
2238   fprintf (stderr, "---------------------------------------\n");
2239   fprintf (stderr, "%-20s %7" PRIu64 "%c %10" PRIu64 "%c\n", "Total",
2240 	   SIZE_AMOUNT (total_tuples), SIZE_AMOUNT (total_bytes));
2241   fprintf (stderr, "---------------------------------------\n");
2242 }
2243 
2244 
2245 /* Return the number of operands needed on the RHS of a GIMPLE
2246    assignment for an expression with tree code CODE.  */
2247 
2248 unsigned
get_gimple_rhs_num_ops(enum tree_code code)2249 get_gimple_rhs_num_ops (enum tree_code code)
2250 {
2251   switch (get_gimple_rhs_class (code))
2252     {
2253     case GIMPLE_UNARY_RHS:
2254     case GIMPLE_SINGLE_RHS:
2255       return 1;
2256     case GIMPLE_BINARY_RHS:
2257       return 2;
2258     case GIMPLE_TERNARY_RHS:
2259       return 3;
2260     default:
2261       gcc_unreachable ();
2262     }
2263 }
2264 
2265 #define DEFTREECODE(SYM, STRING, TYPE, NARGS)   			    \
2266   (unsigned char)							    \
2267   ((TYPE) == tcc_unary ? GIMPLE_UNARY_RHS				    \
2268    : ((TYPE) == tcc_binary						    \
2269       || (TYPE) == tcc_comparison) ? GIMPLE_BINARY_RHS   		    \
2270    : ((TYPE) == tcc_constant						    \
2271       || (TYPE) == tcc_declaration					    \
2272       || (TYPE) == tcc_reference) ? GIMPLE_SINGLE_RHS			    \
2273    : ((SYM) == TRUTH_AND_EXPR						    \
2274       || (SYM) == TRUTH_OR_EXPR						    \
2275       || (SYM) == TRUTH_XOR_EXPR) ? GIMPLE_BINARY_RHS			    \
2276    : (SYM) == TRUTH_NOT_EXPR ? GIMPLE_UNARY_RHS				    \
2277    : ((SYM) == COND_EXPR						    \
2278       || (SYM) == WIDEN_MULT_PLUS_EXPR					    \
2279       || (SYM) == WIDEN_MULT_MINUS_EXPR					    \
2280       || (SYM) == DOT_PROD_EXPR						    \
2281       || (SYM) == SAD_EXPR						    \
2282       || (SYM) == REALIGN_LOAD_EXPR					    \
2283       || (SYM) == VEC_COND_EXPR						    \
2284       || (SYM) == VEC_PERM_EXPR                                             \
2285       || (SYM) == BIT_INSERT_EXPR) ? GIMPLE_TERNARY_RHS			    \
2286    : ((SYM) == CONSTRUCTOR						    \
2287       || (SYM) == OBJ_TYPE_REF						    \
2288       || (SYM) == ASSERT_EXPR						    \
2289       || (SYM) == ADDR_EXPR						    \
2290       || (SYM) == WITH_SIZE_EXPR					    \
2291       || (SYM) == SSA_NAME) ? GIMPLE_SINGLE_RHS				    \
2292    : GIMPLE_INVALID_RHS),
2293 #define END_OF_BASE_TREE_CODES (unsigned char) GIMPLE_INVALID_RHS,
2294 
2295 const unsigned char gimple_rhs_class_table[] = {
2296 #include "all-tree.def"
2297 };
2298 
2299 #undef DEFTREECODE
2300 #undef END_OF_BASE_TREE_CODES
2301 
2302 /* Canonicalize a tree T for use in a COND_EXPR as conditional.  Returns
2303    a canonicalized tree that is valid for a COND_EXPR or NULL_TREE, if
2304    we failed to create one.  */
2305 
2306 tree
canonicalize_cond_expr_cond(tree t)2307 canonicalize_cond_expr_cond (tree t)
2308 {
2309   /* Strip conversions around boolean operations.  */
2310   if (CONVERT_EXPR_P (t)
2311       && (truth_value_p (TREE_CODE (TREE_OPERAND (t, 0)))
2312           || TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 0)))
2313 	     == BOOLEAN_TYPE))
2314     t = TREE_OPERAND (t, 0);
2315 
2316   /* For !x use x == 0.  */
2317   if (TREE_CODE (t) == TRUTH_NOT_EXPR)
2318     {
2319       tree top0 = TREE_OPERAND (t, 0);
2320       t = build2 (EQ_EXPR, TREE_TYPE (t),
2321 		  top0, build_int_cst (TREE_TYPE (top0), 0));
2322     }
2323   /* For cmp ? 1 : 0 use cmp.  */
2324   else if (TREE_CODE (t) == COND_EXPR
2325 	   && COMPARISON_CLASS_P (TREE_OPERAND (t, 0))
2326 	   && integer_onep (TREE_OPERAND (t, 1))
2327 	   && integer_zerop (TREE_OPERAND (t, 2)))
2328     {
2329       tree top0 = TREE_OPERAND (t, 0);
2330       t = build2 (TREE_CODE (top0), TREE_TYPE (t),
2331 		  TREE_OPERAND (top0, 0), TREE_OPERAND (top0, 1));
2332     }
2333   /* For x ^ y use x != y.  */
2334   else if (TREE_CODE (t) == BIT_XOR_EXPR)
2335     t = build2 (NE_EXPR, TREE_TYPE (t),
2336 		TREE_OPERAND (t, 0), TREE_OPERAND (t, 1));
2337 
2338   if (is_gimple_condexpr (t))
2339     return t;
2340 
2341   return NULL_TREE;
2342 }
2343 
2344 /* Build a GIMPLE_CALL identical to STMT but skipping the arguments in
2345    the positions marked by the set ARGS_TO_SKIP.  */
2346 
2347 gcall *
gimple_call_copy_skip_args(gcall * stmt,bitmap args_to_skip)2348 gimple_call_copy_skip_args (gcall *stmt, bitmap args_to_skip)
2349 {
2350   int i;
2351   int nargs = gimple_call_num_args (stmt);
2352   auto_vec<tree> vargs (nargs);
2353   gcall *new_stmt;
2354 
2355   for (i = 0; i < nargs; i++)
2356     if (!bitmap_bit_p (args_to_skip, i))
2357       vargs.quick_push (gimple_call_arg (stmt, i));
2358 
2359   if (gimple_call_internal_p (stmt))
2360     new_stmt = gimple_build_call_internal_vec (gimple_call_internal_fn (stmt),
2361 					       vargs);
2362   else
2363     new_stmt = gimple_build_call_vec (gimple_call_fn (stmt), vargs);
2364 
2365   if (gimple_call_lhs (stmt))
2366     gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
2367 
2368   gimple_set_vuse (new_stmt, gimple_vuse (stmt));
2369   gimple_set_vdef (new_stmt, gimple_vdef (stmt));
2370 
2371   if (gimple_has_location (stmt))
2372     gimple_set_location (new_stmt, gimple_location (stmt));
2373   gimple_call_copy_flags (new_stmt, stmt);
2374   gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
2375 
2376   gimple_set_modified (new_stmt, true);
2377 
2378   return new_stmt;
2379 }
2380 
2381 
2382 
2383 /* Return true if the field decls F1 and F2 are at the same offset.
2384 
2385    This is intended to be used on GIMPLE types only.  */
2386 
2387 bool
gimple_compare_field_offset(tree f1,tree f2)2388 gimple_compare_field_offset (tree f1, tree f2)
2389 {
2390   if (DECL_OFFSET_ALIGN (f1) == DECL_OFFSET_ALIGN (f2))
2391     {
2392       tree offset1 = DECL_FIELD_OFFSET (f1);
2393       tree offset2 = DECL_FIELD_OFFSET (f2);
2394       return ((offset1 == offset2
2395 	       /* Once gimplification is done, self-referential offsets are
2396 		  instantiated as operand #2 of the COMPONENT_REF built for
2397 		  each access and reset.  Therefore, they are not relevant
2398 		  anymore and fields are interchangeable provided that they
2399 		  represent the same access.  */
2400 	       || (TREE_CODE (offset1) == PLACEHOLDER_EXPR
2401 		   && TREE_CODE (offset2) == PLACEHOLDER_EXPR
2402 		   && (DECL_SIZE (f1) == DECL_SIZE (f2)
2403 		       || (TREE_CODE (DECL_SIZE (f1)) == PLACEHOLDER_EXPR
2404 			   && TREE_CODE (DECL_SIZE (f2)) == PLACEHOLDER_EXPR)
2405 		       || operand_equal_p (DECL_SIZE (f1), DECL_SIZE (f2), 0))
2406 		   && DECL_ALIGN (f1) == DECL_ALIGN (f2))
2407 	       || operand_equal_p (offset1, offset2, 0))
2408 	      && tree_int_cst_equal (DECL_FIELD_BIT_OFFSET (f1),
2409 				     DECL_FIELD_BIT_OFFSET (f2)));
2410     }
2411 
2412   /* Fortran and C do not always agree on what DECL_OFFSET_ALIGN
2413      should be, so handle differing ones specially by decomposing
2414      the offset into a byte and bit offset manually.  */
2415   if (tree_fits_shwi_p (DECL_FIELD_OFFSET (f1))
2416       && tree_fits_shwi_p (DECL_FIELD_OFFSET (f2)))
2417     {
2418       unsigned HOST_WIDE_INT byte_offset1, byte_offset2;
2419       unsigned HOST_WIDE_INT bit_offset1, bit_offset2;
2420       bit_offset1 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f1));
2421       byte_offset1 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f1))
2422 		      + bit_offset1 / BITS_PER_UNIT);
2423       bit_offset2 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f2));
2424       byte_offset2 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f2))
2425 		      + bit_offset2 / BITS_PER_UNIT);
2426       if (byte_offset1 != byte_offset2)
2427 	return false;
2428       return bit_offset1 % BITS_PER_UNIT == bit_offset2 % BITS_PER_UNIT;
2429     }
2430 
2431   return false;
2432 }
2433 
2434 
2435 /* Return a type the same as TYPE except unsigned or
2436    signed according to UNSIGNEDP.  */
2437 
2438 static tree
gimple_signed_or_unsigned_type(bool unsignedp,tree type)2439 gimple_signed_or_unsigned_type (bool unsignedp, tree type)
2440 {
2441   tree type1;
2442   int i;
2443 
2444   type1 = TYPE_MAIN_VARIANT (type);
2445   if (type1 == signed_char_type_node
2446       || type1 == char_type_node
2447       || type1 == unsigned_char_type_node)
2448     return unsignedp ? unsigned_char_type_node : signed_char_type_node;
2449   if (type1 == integer_type_node || type1 == unsigned_type_node)
2450     return unsignedp ? unsigned_type_node : integer_type_node;
2451   if (type1 == short_integer_type_node || type1 == short_unsigned_type_node)
2452     return unsignedp ? short_unsigned_type_node : short_integer_type_node;
2453   if (type1 == long_integer_type_node || type1 == long_unsigned_type_node)
2454     return unsignedp ? long_unsigned_type_node : long_integer_type_node;
2455   if (type1 == long_long_integer_type_node
2456       || type1 == long_long_unsigned_type_node)
2457     return unsignedp
2458            ? long_long_unsigned_type_node
2459 	   : long_long_integer_type_node;
2460 
2461   for (i = 0; i < NUM_INT_N_ENTS; i ++)
2462     if (int_n_enabled_p[i]
2463 	&& (type1 == int_n_trees[i].unsigned_type
2464 	    || type1 == int_n_trees[i].signed_type))
2465 	return unsignedp
2466 	  ? int_n_trees[i].unsigned_type
2467 	  : int_n_trees[i].signed_type;
2468 
2469 #if HOST_BITS_PER_WIDE_INT >= 64
2470   if (type1 == intTI_type_node || type1 == unsigned_intTI_type_node)
2471     return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
2472 #endif
2473   if (type1 == intDI_type_node || type1 == unsigned_intDI_type_node)
2474     return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
2475   if (type1 == intSI_type_node || type1 == unsigned_intSI_type_node)
2476     return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
2477   if (type1 == intHI_type_node || type1 == unsigned_intHI_type_node)
2478     return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
2479   if (type1 == intQI_type_node || type1 == unsigned_intQI_type_node)
2480     return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
2481 
2482 #define GIMPLE_FIXED_TYPES(NAME)	    \
2483   if (type1 == short_ ## NAME ## _type_node \
2484       || type1 == unsigned_short_ ## NAME ## _type_node) \
2485     return unsignedp ? unsigned_short_ ## NAME ## _type_node \
2486 		     : short_ ## NAME ## _type_node; \
2487   if (type1 == NAME ## _type_node \
2488       || type1 == unsigned_ ## NAME ## _type_node) \
2489     return unsignedp ? unsigned_ ## NAME ## _type_node \
2490 		     : NAME ## _type_node; \
2491   if (type1 == long_ ## NAME ## _type_node \
2492       || type1 == unsigned_long_ ## NAME ## _type_node) \
2493     return unsignedp ? unsigned_long_ ## NAME ## _type_node \
2494 		     : long_ ## NAME ## _type_node; \
2495   if (type1 == long_long_ ## NAME ## _type_node \
2496       || type1 == unsigned_long_long_ ## NAME ## _type_node) \
2497     return unsignedp ? unsigned_long_long_ ## NAME ## _type_node \
2498 		     : long_long_ ## NAME ## _type_node;
2499 
2500 #define GIMPLE_FIXED_MODE_TYPES(NAME) \
2501   if (type1 == NAME ## _type_node \
2502       || type1 == u ## NAME ## _type_node) \
2503     return unsignedp ? u ## NAME ## _type_node \
2504 		     : NAME ## _type_node;
2505 
2506 #define GIMPLE_FIXED_TYPES_SAT(NAME) \
2507   if (type1 == sat_ ## short_ ## NAME ## _type_node \
2508       || type1 == sat_ ## unsigned_short_ ## NAME ## _type_node) \
2509     return unsignedp ? sat_ ## unsigned_short_ ## NAME ## _type_node \
2510 		     : sat_ ## short_ ## NAME ## _type_node; \
2511   if (type1 == sat_ ## NAME ## _type_node \
2512       || type1 == sat_ ## unsigned_ ## NAME ## _type_node) \
2513     return unsignedp ? sat_ ## unsigned_ ## NAME ## _type_node \
2514 		     : sat_ ## NAME ## _type_node; \
2515   if (type1 == sat_ ## long_ ## NAME ## _type_node \
2516       || type1 == sat_ ## unsigned_long_ ## NAME ## _type_node) \
2517     return unsignedp ? sat_ ## unsigned_long_ ## NAME ## _type_node \
2518 		     : sat_ ## long_ ## NAME ## _type_node; \
2519   if (type1 == sat_ ## long_long_ ## NAME ## _type_node \
2520       || type1 == sat_ ## unsigned_long_long_ ## NAME ## _type_node) \
2521     return unsignedp ? sat_ ## unsigned_long_long_ ## NAME ## _type_node \
2522 		     : sat_ ## long_long_ ## NAME ## _type_node;
2523 
2524 #define GIMPLE_FIXED_MODE_TYPES_SAT(NAME)	\
2525   if (type1 == sat_ ## NAME ## _type_node \
2526       || type1 == sat_ ## u ## NAME ## _type_node) \
2527     return unsignedp ? sat_ ## u ## NAME ## _type_node \
2528 		     : sat_ ## NAME ## _type_node;
2529 
2530   GIMPLE_FIXED_TYPES (fract);
2531   GIMPLE_FIXED_TYPES_SAT (fract);
2532   GIMPLE_FIXED_TYPES (accum);
2533   GIMPLE_FIXED_TYPES_SAT (accum);
2534 
2535   GIMPLE_FIXED_MODE_TYPES (qq);
2536   GIMPLE_FIXED_MODE_TYPES (hq);
2537   GIMPLE_FIXED_MODE_TYPES (sq);
2538   GIMPLE_FIXED_MODE_TYPES (dq);
2539   GIMPLE_FIXED_MODE_TYPES (tq);
2540   GIMPLE_FIXED_MODE_TYPES_SAT (qq);
2541   GIMPLE_FIXED_MODE_TYPES_SAT (hq);
2542   GIMPLE_FIXED_MODE_TYPES_SAT (sq);
2543   GIMPLE_FIXED_MODE_TYPES_SAT (dq);
2544   GIMPLE_FIXED_MODE_TYPES_SAT (tq);
2545   GIMPLE_FIXED_MODE_TYPES (ha);
2546   GIMPLE_FIXED_MODE_TYPES (sa);
2547   GIMPLE_FIXED_MODE_TYPES (da);
2548   GIMPLE_FIXED_MODE_TYPES (ta);
2549   GIMPLE_FIXED_MODE_TYPES_SAT (ha);
2550   GIMPLE_FIXED_MODE_TYPES_SAT (sa);
2551   GIMPLE_FIXED_MODE_TYPES_SAT (da);
2552   GIMPLE_FIXED_MODE_TYPES_SAT (ta);
2553 
2554   /* For ENUMERAL_TYPEs in C++, must check the mode of the types, not
2555      the precision; they have precision set to match their range, but
2556      may use a wider mode to match an ABI.  If we change modes, we may
2557      wind up with bad conversions.  For INTEGER_TYPEs in C, must check
2558      the precision as well, so as to yield correct results for
2559      bit-field types.  C++ does not have these separate bit-field
2560      types, and producing a signed or unsigned variant of an
2561      ENUMERAL_TYPE may cause other problems as well.  */
2562   if (!INTEGRAL_TYPE_P (type)
2563       || TYPE_UNSIGNED (type) == unsignedp)
2564     return type;
2565 
2566 #define TYPE_OK(node)							    \
2567   (TYPE_MODE (type) == TYPE_MODE (node)					    \
2568    && TYPE_PRECISION (type) == TYPE_PRECISION (node))
2569   if (TYPE_OK (signed_char_type_node))
2570     return unsignedp ? unsigned_char_type_node : signed_char_type_node;
2571   if (TYPE_OK (integer_type_node))
2572     return unsignedp ? unsigned_type_node : integer_type_node;
2573   if (TYPE_OK (short_integer_type_node))
2574     return unsignedp ? short_unsigned_type_node : short_integer_type_node;
2575   if (TYPE_OK (long_integer_type_node))
2576     return unsignedp ? long_unsigned_type_node : long_integer_type_node;
2577   if (TYPE_OK (long_long_integer_type_node))
2578     return (unsignedp
2579 	    ? long_long_unsigned_type_node
2580 	    : long_long_integer_type_node);
2581 
2582   for (i = 0; i < NUM_INT_N_ENTS; i ++)
2583     if (int_n_enabled_p[i]
2584 	&& TYPE_MODE (type) == int_n_data[i].m
2585 	&& TYPE_PRECISION (type) == int_n_data[i].bitsize)
2586 	return unsignedp
2587 	  ? int_n_trees[i].unsigned_type
2588 	  : int_n_trees[i].signed_type;
2589 
2590 #if HOST_BITS_PER_WIDE_INT >= 64
2591   if (TYPE_OK (intTI_type_node))
2592     return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
2593 #endif
2594   if (TYPE_OK (intDI_type_node))
2595     return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
2596   if (TYPE_OK (intSI_type_node))
2597     return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
2598   if (TYPE_OK (intHI_type_node))
2599     return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
2600   if (TYPE_OK (intQI_type_node))
2601     return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
2602 
2603 #undef GIMPLE_FIXED_TYPES
2604 #undef GIMPLE_FIXED_MODE_TYPES
2605 #undef GIMPLE_FIXED_TYPES_SAT
2606 #undef GIMPLE_FIXED_MODE_TYPES_SAT
2607 #undef TYPE_OK
2608 
2609   return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
2610 }
2611 
2612 
2613 /* Return an unsigned type the same as TYPE in other respects.  */
2614 
2615 tree
gimple_unsigned_type(tree type)2616 gimple_unsigned_type (tree type)
2617 {
2618   return gimple_signed_or_unsigned_type (true, type);
2619 }
2620 
2621 
2622 /* Return a signed type the same as TYPE in other respects.  */
2623 
2624 tree
gimple_signed_type(tree type)2625 gimple_signed_type (tree type)
2626 {
2627   return gimple_signed_or_unsigned_type (false, type);
2628 }
2629 
2630 
2631 /* Return the typed-based alias set for T, which may be an expression
2632    or a type.  Return -1 if we don't do anything special.  */
2633 
2634 alias_set_type
gimple_get_alias_set(tree t)2635 gimple_get_alias_set (tree t)
2636 {
2637   /* That's all the expressions we handle specially.  */
2638   if (!TYPE_P (t))
2639     return -1;
2640 
2641   /* For convenience, follow the C standard when dealing with
2642      character types.  Any object may be accessed via an lvalue that
2643      has character type.  */
2644   if (t == char_type_node
2645       || t == signed_char_type_node
2646       || t == unsigned_char_type_node)
2647     return 0;
2648 
2649   /* Allow aliasing between signed and unsigned variants of the same
2650      type.  We treat the signed variant as canonical.  */
2651   if (TREE_CODE (t) == INTEGER_TYPE && TYPE_UNSIGNED (t))
2652     {
2653       tree t1 = gimple_signed_type (t);
2654 
2655       /* t1 == t can happen for boolean nodes which are always unsigned.  */
2656       if (t1 != t)
2657 	return get_alias_set (t1);
2658     }
2659 
2660   /* Allow aliasing between enumeral types and the underlying
2661      integer type.  This is required for C since those are
2662      compatible types.  */
2663   else if (TREE_CODE (t) == ENUMERAL_TYPE)
2664     {
2665       tree t1 = lang_hooks.types.type_for_size (tree_to_uhwi (TYPE_SIZE (t)),
2666 						false /* short-cut above */);
2667       return get_alias_set (t1);
2668     }
2669 
2670   return -1;
2671 }
2672 
2673 
2674 /* Helper for gimple_ior_addresses_taken_1.  */
2675 
2676 static bool
gimple_ior_addresses_taken_1(gimple *,tree addr,tree,void * data)2677 gimple_ior_addresses_taken_1 (gimple *, tree addr, tree, void *data)
2678 {
2679   bitmap addresses_taken = (bitmap)data;
2680   addr = get_base_address (addr);
2681   if (addr
2682       && DECL_P (addr))
2683     {
2684       bitmap_set_bit (addresses_taken, DECL_UID (addr));
2685       return true;
2686     }
2687   return false;
2688 }
2689 
2690 /* Set the bit for the uid of all decls that have their address taken
2691    in STMT in the ADDRESSES_TAKEN bitmap.  Returns true if there
2692    were any in this stmt.  */
2693 
2694 bool
gimple_ior_addresses_taken(bitmap addresses_taken,gimple * stmt)2695 gimple_ior_addresses_taken (bitmap addresses_taken, gimple *stmt)
2696 {
2697   return walk_stmt_load_store_addr_ops (stmt, addresses_taken, NULL, NULL,
2698 					gimple_ior_addresses_taken_1);
2699 }
2700 
2701 
2702 /* Return true when STMTs arguments and return value match those of FNDECL,
2703    a decl of a builtin function.  */
2704 
2705 bool
gimple_builtin_call_types_compatible_p(const gimple * stmt,tree fndecl)2706 gimple_builtin_call_types_compatible_p (const gimple *stmt, tree fndecl)
2707 {
2708   gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) != NOT_BUILT_IN);
2709 
2710   tree ret = gimple_call_lhs (stmt);
2711   if (ret
2712       && !useless_type_conversion_p (TREE_TYPE (ret),
2713 				     TREE_TYPE (TREE_TYPE (fndecl))))
2714     return false;
2715 
2716   tree targs = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
2717   unsigned nargs = gimple_call_num_args (stmt);
2718   for (unsigned i = 0; i < nargs; ++i)
2719     {
2720       /* Variadic args follow.  */
2721       if (!targs)
2722 	return true;
2723       tree arg = gimple_call_arg (stmt, i);
2724       tree type = TREE_VALUE (targs);
2725       if (!useless_type_conversion_p (type, TREE_TYPE (arg))
2726 	  /* char/short integral arguments are promoted to int
2727 	     by several frontends if targetm.calls.promote_prototypes
2728 	     is true.  Allow such promotion too.  */
2729 	  && !(INTEGRAL_TYPE_P (type)
2730 	       && TYPE_PRECISION (type) < TYPE_PRECISION (integer_type_node)
2731 	       && targetm.calls.promote_prototypes (TREE_TYPE (fndecl))
2732 	       && useless_type_conversion_p (integer_type_node,
2733 					     TREE_TYPE (arg))))
2734 	return false;
2735       targs = TREE_CHAIN (targs);
2736     }
2737   if (targs && !VOID_TYPE_P (TREE_VALUE (targs)))
2738     return false;
2739   return true;
2740 }
2741 
2742 /* Return true when STMT is operator a replaceable delete call.  */
2743 
2744 bool
gimple_call_operator_delete_p(const gcall * stmt)2745 gimple_call_operator_delete_p (const gcall *stmt)
2746 {
2747   tree fndecl;
2748 
2749   if ((fndecl = gimple_call_fndecl (stmt)) != NULL_TREE)
2750     return DECL_IS_OPERATOR_DELETE_P (fndecl);
2751   return false;
2752 }
2753 
2754 /* Return true when STMT is builtins call.  */
2755 
2756 bool
gimple_call_builtin_p(const gimple * stmt)2757 gimple_call_builtin_p (const gimple *stmt)
2758 {
2759   tree fndecl;
2760   if (is_gimple_call (stmt)
2761       && (fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
2762       && DECL_BUILT_IN_CLASS (fndecl) != NOT_BUILT_IN)
2763     return gimple_builtin_call_types_compatible_p (stmt, fndecl);
2764   return false;
2765 }
2766 
2767 /* Return true when STMT is builtins call to CLASS.  */
2768 
2769 bool
gimple_call_builtin_p(const gimple * stmt,enum built_in_class klass)2770 gimple_call_builtin_p (const gimple *stmt, enum built_in_class klass)
2771 {
2772   tree fndecl;
2773   if (is_gimple_call (stmt)
2774       && (fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
2775       && DECL_BUILT_IN_CLASS (fndecl) == klass)
2776     return gimple_builtin_call_types_compatible_p (stmt, fndecl);
2777   return false;
2778 }
2779 
2780 /* Return true when STMT is builtins call to CODE of CLASS.  */
2781 
2782 bool
gimple_call_builtin_p(const gimple * stmt,enum built_in_function code)2783 gimple_call_builtin_p (const gimple *stmt, enum built_in_function code)
2784 {
2785   tree fndecl;
2786   if (is_gimple_call (stmt)
2787       && (fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
2788       && fndecl_built_in_p (fndecl, code))
2789     return gimple_builtin_call_types_compatible_p (stmt, fndecl);
2790   return false;
2791 }
2792 
2793 /* If CALL is a call to a combined_fn (i.e. an internal function or
2794    a normal built-in function), return its code, otherwise return
2795    CFN_LAST.  */
2796 
2797 combined_fn
gimple_call_combined_fn(const gimple * stmt)2798 gimple_call_combined_fn (const gimple *stmt)
2799 {
2800   if (const gcall *call = dyn_cast <const gcall *> (stmt))
2801     {
2802       if (gimple_call_internal_p (call))
2803 	return as_combined_fn (gimple_call_internal_fn (call));
2804 
2805       tree fndecl = gimple_call_fndecl (stmt);
2806       if (fndecl
2807 	  && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL)
2808 	  && gimple_builtin_call_types_compatible_p (stmt, fndecl))
2809 	return as_combined_fn (DECL_FUNCTION_CODE (fndecl));
2810     }
2811   return CFN_LAST;
2812 }
2813 
2814 /* Return true if STMT clobbers memory.  STMT is required to be a
2815    GIMPLE_ASM.  */
2816 
2817 bool
gimple_asm_clobbers_memory_p(const gasm * stmt)2818 gimple_asm_clobbers_memory_p (const gasm *stmt)
2819 {
2820   unsigned i;
2821 
2822   for (i = 0; i < gimple_asm_nclobbers (stmt); i++)
2823     {
2824       tree op = gimple_asm_clobber_op (stmt, i);
2825       if (strcmp (TREE_STRING_POINTER (TREE_VALUE (op)), "memory") == 0)
2826 	return true;
2827     }
2828 
2829   /* Non-empty basic ASM implicitly clobbers memory.  */
2830   if (gimple_asm_input_p (stmt) && strlen (gimple_asm_string (stmt)) != 0)
2831     return true;
2832 
2833   return false;
2834 }
2835 
2836 /* Dump bitmap SET (assumed to contain VAR_DECLs) to FILE.  */
2837 
2838 void
dump_decl_set(FILE * file,bitmap set)2839 dump_decl_set (FILE *file, bitmap set)
2840 {
2841   if (set)
2842     {
2843       bitmap_iterator bi;
2844       unsigned i;
2845 
2846       fprintf (file, "{ ");
2847 
2848       EXECUTE_IF_SET_IN_BITMAP (set, 0, i, bi)
2849 	{
2850 	  fprintf (file, "D.%u", i);
2851 	  fprintf (file, " ");
2852 	}
2853 
2854       fprintf (file, "}");
2855     }
2856   else
2857     fprintf (file, "NIL");
2858 }
2859 
2860 /* Return true when CALL is a call stmt that definitely doesn't
2861    free any memory or makes it unavailable otherwise.  */
2862 bool
nonfreeing_call_p(gimple * call)2863 nonfreeing_call_p (gimple *call)
2864 {
2865   if (gimple_call_builtin_p (call, BUILT_IN_NORMAL)
2866       && gimple_call_flags (call) & ECF_LEAF)
2867     switch (DECL_FUNCTION_CODE (gimple_call_fndecl (call)))
2868       {
2869 	/* Just in case these become ECF_LEAF in the future.  */
2870 	case BUILT_IN_FREE:
2871 	case BUILT_IN_TM_FREE:
2872 	case BUILT_IN_REALLOC:
2873 	case BUILT_IN_STACK_RESTORE:
2874 	  return false;
2875 	default:
2876 	  return true;
2877       }
2878   else if (gimple_call_internal_p (call))
2879     switch (gimple_call_internal_fn (call))
2880       {
2881       case IFN_ABNORMAL_DISPATCHER:
2882         return true;
2883       case IFN_ASAN_MARK:
2884 	return tree_to_uhwi (gimple_call_arg (call, 0)) == ASAN_MARK_UNPOISON;
2885       default:
2886 	if (gimple_call_flags (call) & ECF_LEAF)
2887 	  return true;
2888 	return false;
2889       }
2890 
2891   tree fndecl = gimple_call_fndecl (call);
2892   if (!fndecl)
2893     return false;
2894   struct cgraph_node *n = cgraph_node::get (fndecl);
2895   if (!n)
2896     return false;
2897   enum availability availability;
2898   n = n->function_symbol (&availability);
2899   if (!n || availability <= AVAIL_INTERPOSABLE)
2900     return false;
2901   return n->nonfreeing_fn;
2902 }
2903 
2904 /* Return true when CALL is a call stmt that definitely need not
2905    be considered to be a memory barrier.  */
2906 bool
nonbarrier_call_p(gimple * call)2907 nonbarrier_call_p (gimple *call)
2908 {
2909   if (gimple_call_flags (call) & (ECF_PURE | ECF_CONST))
2910     return true;
2911   /* Should extend this to have a nonbarrier_fn flag, just as above in
2912      the nonfreeing case.  */
2913   return false;
2914 }
2915 
2916 /* Callback for walk_stmt_load_store_ops.
2917 
2918    Return TRUE if OP will dereference the tree stored in DATA, FALSE
2919    otherwise.
2920 
2921    This routine only makes a superficial check for a dereference.  Thus
2922    it must only be used if it is safe to return a false negative.  */
2923 static bool
check_loadstore(gimple *,tree op,tree,void * data)2924 check_loadstore (gimple *, tree op, tree, void *data)
2925 {
2926   if (TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
2927     {
2928       /* Some address spaces may legitimately dereference zero.  */
2929       addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (op));
2930       if (targetm.addr_space.zero_address_valid (as))
2931 	return false;
2932 
2933       return operand_equal_p (TREE_OPERAND (op, 0), (tree)data, 0);
2934     }
2935   return false;
2936 }
2937 
2938 
2939 /* Return true if OP can be inferred to be non-NULL after STMT executes,
2940    either by using a pointer dereference or attributes.  */
2941 bool
infer_nonnull_range(gimple * stmt,tree op)2942 infer_nonnull_range (gimple *stmt, tree op)
2943 {
2944   return (infer_nonnull_range_by_dereference (stmt, op)
2945 	  || infer_nonnull_range_by_attribute (stmt, op));
2946 }
2947 
2948 /* Return true if OP can be inferred to be non-NULL after STMT
2949    executes by using a pointer dereference.  */
2950 bool
infer_nonnull_range_by_dereference(gimple * stmt,tree op)2951 infer_nonnull_range_by_dereference (gimple *stmt, tree op)
2952 {
2953   /* We can only assume that a pointer dereference will yield
2954      non-NULL if -fdelete-null-pointer-checks is enabled.  */
2955   if (!flag_delete_null_pointer_checks
2956       || !POINTER_TYPE_P (TREE_TYPE (op))
2957       || gimple_code (stmt) == GIMPLE_ASM
2958       || gimple_clobber_p (stmt))
2959     return false;
2960 
2961   if (walk_stmt_load_store_ops (stmt, (void *)op,
2962 				check_loadstore, check_loadstore))
2963     return true;
2964 
2965   return false;
2966 }
2967 
2968 /* Return true if OP can be inferred to be a non-NULL after STMT
2969    executes by using attributes.  */
2970 bool
infer_nonnull_range_by_attribute(gimple * stmt,tree op)2971 infer_nonnull_range_by_attribute (gimple *stmt, tree op)
2972 {
2973   /* We can only assume that a pointer dereference will yield
2974      non-NULL if -fdelete-null-pointer-checks is enabled.  */
2975   if (!flag_delete_null_pointer_checks
2976       || !POINTER_TYPE_P (TREE_TYPE (op))
2977       || gimple_code (stmt) == GIMPLE_ASM)
2978     return false;
2979 
2980   if (is_gimple_call (stmt) && !gimple_call_internal_p (stmt))
2981     {
2982       tree fntype = gimple_call_fntype (stmt);
2983       tree attrs = TYPE_ATTRIBUTES (fntype);
2984       for (; attrs; attrs = TREE_CHAIN (attrs))
2985 	{
2986 	  attrs = lookup_attribute ("nonnull", attrs);
2987 
2988 	  /* If "nonnull" wasn't specified, we know nothing about
2989 	     the argument.  */
2990 	  if (attrs == NULL_TREE)
2991 	    return false;
2992 
2993 	  /* If "nonnull" applies to all the arguments, then ARG
2994 	     is non-null if it's in the argument list.  */
2995 	  if (TREE_VALUE (attrs) == NULL_TREE)
2996 	    {
2997 	      for (unsigned int i = 0; i < gimple_call_num_args (stmt); i++)
2998 		{
2999 		  if (POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (stmt, i)))
3000 		      && operand_equal_p (op, gimple_call_arg (stmt, i), 0))
3001 		    return true;
3002 		}
3003 	      return false;
3004 	    }
3005 
3006 	  /* Now see if op appears in the nonnull list.  */
3007 	  for (tree t = TREE_VALUE (attrs); t; t = TREE_CHAIN (t))
3008 	    {
3009 	      unsigned int idx = TREE_INT_CST_LOW (TREE_VALUE (t)) - 1;
3010 	      if (idx < gimple_call_num_args (stmt))
3011 		{
3012 		  tree arg = gimple_call_arg (stmt, idx);
3013 		  if (operand_equal_p (op, arg, 0))
3014 		    return true;
3015 		}
3016 	    }
3017 	}
3018     }
3019 
3020   /* If this function is marked as returning non-null, then we can
3021      infer OP is non-null if it is used in the return statement.  */
3022   if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
3023     if (gimple_return_retval (return_stmt)
3024 	&& operand_equal_p (gimple_return_retval (return_stmt), op, 0)
3025 	&& lookup_attribute ("returns_nonnull",
3026 			     TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
3027       return true;
3028 
3029   return false;
3030 }
3031 
3032 /* Compare two case labels.  Because the front end should already have
3033    made sure that case ranges do not overlap, it is enough to only compare
3034    the CASE_LOW values of each case label.  */
3035 
3036 static int
compare_case_labels(const void * p1,const void * p2)3037 compare_case_labels (const void *p1, const void *p2)
3038 {
3039   const_tree const case1 = *(const_tree const*)p1;
3040   const_tree const case2 = *(const_tree const*)p2;
3041 
3042   /* The 'default' case label always goes first.  */
3043   if (!CASE_LOW (case1))
3044     return -1;
3045   else if (!CASE_LOW (case2))
3046     return 1;
3047   else
3048     return tree_int_cst_compare (CASE_LOW (case1), CASE_LOW (case2));
3049 }
3050 
3051 /* Sort the case labels in LABEL_VEC in place in ascending order.  */
3052 
3053 void
sort_case_labels(vec<tree> label_vec)3054 sort_case_labels (vec<tree> label_vec)
3055 {
3056   label_vec.qsort (compare_case_labels);
3057 }
3058 
3059 /* Prepare a vector of case labels to be used in a GIMPLE_SWITCH statement.
3060 
3061    LABELS is a vector that contains all case labels to look at.
3062 
3063    INDEX_TYPE is the type of the switch index expression.  Case labels
3064    in LABELS are discarded if their values are not in the value range
3065    covered by INDEX_TYPE.  The remaining case label values are folded
3066    to INDEX_TYPE.
3067 
3068    If a default case exists in LABELS, it is removed from LABELS and
3069    returned in DEFAULT_CASEP.  If no default case exists, but the
3070    case labels already cover the whole range of INDEX_TYPE, a default
3071    case is returned pointing to one of the existing case labels.
3072    Otherwise DEFAULT_CASEP is set to NULL_TREE.
3073 
3074    DEFAULT_CASEP may be NULL, in which case the above comment doesn't
3075    apply and no action is taken regardless of whether a default case is
3076    found or not.  */
3077 
3078 void
preprocess_case_label_vec_for_gimple(vec<tree> labels,tree index_type,tree * default_casep)3079 preprocess_case_label_vec_for_gimple (vec<tree> labels,
3080 				      tree index_type,
3081 				      tree *default_casep)
3082 {
3083   tree min_value, max_value;
3084   tree default_case = NULL_TREE;
3085   size_t i, len;
3086 
3087   i = 0;
3088   min_value = TYPE_MIN_VALUE (index_type);
3089   max_value = TYPE_MAX_VALUE (index_type);
3090   while (i < labels.length ())
3091     {
3092       tree elt = labels[i];
3093       tree low = CASE_LOW (elt);
3094       tree high = CASE_HIGH (elt);
3095       bool remove_element = FALSE;
3096 
3097       if (low)
3098 	{
3099 	  gcc_checking_assert (TREE_CODE (low) == INTEGER_CST);
3100 	  gcc_checking_assert (!high || TREE_CODE (high) == INTEGER_CST);
3101 
3102 	  /* This is a non-default case label, i.e. it has a value.
3103 
3104 	     See if the case label is reachable within the range of
3105 	     the index type.  Remove out-of-range case values.  Turn
3106 	     case ranges into a canonical form (high > low strictly)
3107 	     and convert the case label values to the index type.
3108 
3109 	     NB: The type of gimple_switch_index() may be the promoted
3110 	     type, but the case labels retain the original type.  */
3111 
3112 	  if (high)
3113 	    {
3114 	      /* This is a case range.  Discard empty ranges.
3115 		 If the bounds or the range are equal, turn this
3116 		 into a simple (one-value) case.  */
3117 	      int cmp = tree_int_cst_compare (high, low);
3118 	      if (cmp < 0)
3119 		remove_element = TRUE;
3120 	      else if (cmp == 0)
3121 		high = NULL_TREE;
3122 	    }
3123 
3124 	  if (! high)
3125 	    {
3126 	      /* If the simple case value is unreachable, ignore it.  */
3127 	      if ((TREE_CODE (min_value) == INTEGER_CST
3128 		   && tree_int_cst_compare (low, min_value) < 0)
3129 		  || (TREE_CODE (max_value) == INTEGER_CST
3130 		      && tree_int_cst_compare (low, max_value) > 0))
3131 		remove_element = TRUE;
3132 	      else
3133 		low = fold_convert (index_type, low);
3134 	    }
3135 	  else
3136 	    {
3137 	      /* If the entire case range is unreachable, ignore it.  */
3138 	      if ((TREE_CODE (min_value) == INTEGER_CST
3139 		   && tree_int_cst_compare (high, min_value) < 0)
3140 		  || (TREE_CODE (max_value) == INTEGER_CST
3141 		      && tree_int_cst_compare (low, max_value) > 0))
3142 		remove_element = TRUE;
3143 	      else
3144 		{
3145 		  /* If the lower bound is less than the index type's
3146 		     minimum value, truncate the range bounds.  */
3147 		  if (TREE_CODE (min_value) == INTEGER_CST
3148 		      && tree_int_cst_compare (low, min_value) < 0)
3149 		    low = min_value;
3150 		  low = fold_convert (index_type, low);
3151 
3152 		  /* If the upper bound is greater than the index type's
3153 		     maximum value, truncate the range bounds.  */
3154 		  if (TREE_CODE (max_value) == INTEGER_CST
3155 		      && tree_int_cst_compare (high, max_value) > 0)
3156 		    high = max_value;
3157 		  high = fold_convert (index_type, high);
3158 
3159 		  /* We may have folded a case range to a one-value case.  */
3160 		  if (tree_int_cst_equal (low, high))
3161 		    high = NULL_TREE;
3162 		}
3163 	    }
3164 
3165 	  CASE_LOW (elt) = low;
3166 	  CASE_HIGH (elt) = high;
3167 	}
3168       else
3169 	{
3170 	  gcc_assert (!default_case);
3171 	  default_case = elt;
3172 	  /* The default case must be passed separately to the
3173 	     gimple_build_switch routine.  But if DEFAULT_CASEP
3174 	     is NULL, we do not remove the default case (it would
3175 	     be completely lost).  */
3176 	  if (default_casep)
3177 	    remove_element = TRUE;
3178 	}
3179 
3180       if (remove_element)
3181 	labels.ordered_remove (i);
3182       else
3183 	i++;
3184     }
3185   len = i;
3186 
3187   if (!labels.is_empty ())
3188     sort_case_labels (labels);
3189 
3190   if (default_casep && !default_case)
3191     {
3192       /* If the switch has no default label, add one, so that we jump
3193 	 around the switch body.  If the labels already cover the whole
3194 	 range of the switch index_type, add the default label pointing
3195 	 to one of the existing labels.  */
3196       if (len
3197 	  && TYPE_MIN_VALUE (index_type)
3198 	  && TYPE_MAX_VALUE (index_type)
3199 	  && tree_int_cst_equal (CASE_LOW (labels[0]),
3200 				 TYPE_MIN_VALUE (index_type)))
3201 	{
3202 	  tree low, high = CASE_HIGH (labels[len - 1]);
3203 	  if (!high)
3204 	    high = CASE_LOW (labels[len - 1]);
3205 	  if (tree_int_cst_equal (high, TYPE_MAX_VALUE (index_type)))
3206 	    {
3207 	      tree widest_label = labels[0];
3208 	      for (i = 1; i < len; i++)
3209 		{
3210 		  high = CASE_LOW (labels[i]);
3211 		  low = CASE_HIGH (labels[i - 1]);
3212 		  if (!low)
3213 		    low = CASE_LOW (labels[i - 1]);
3214 
3215 		  if (CASE_HIGH (labels[i]) != NULL_TREE
3216 		      && (CASE_HIGH (widest_label) == NULL_TREE
3217 			  || (wi::gtu_p
3218 			      (wi::to_wide (CASE_HIGH (labels[i]))
3219 			       - wi::to_wide (CASE_LOW (labels[i])),
3220 			       wi::to_wide (CASE_HIGH (widest_label))
3221 			       - wi::to_wide (CASE_LOW (widest_label))))))
3222 		    widest_label = labels[i];
3223 
3224 		  if (wi::to_wide (low) + 1 != wi::to_wide (high))
3225 		    break;
3226 		}
3227 	      if (i == len)
3228 		{
3229 		  /* Designate the label with the widest range to be the
3230 		     default label.  */
3231 		  tree label = CASE_LABEL (widest_label);
3232 		  default_case = build_case_label (NULL_TREE, NULL_TREE,
3233 						   label);
3234 		}
3235 	    }
3236 	}
3237     }
3238 
3239   if (default_casep)
3240     *default_casep = default_case;
3241 }
3242 
3243 /* Set the location of all statements in SEQ to LOC.  */
3244 
3245 void
gimple_seq_set_location(gimple_seq seq,location_t loc)3246 gimple_seq_set_location (gimple_seq seq, location_t loc)
3247 {
3248   for (gimple_stmt_iterator i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
3249     gimple_set_location (gsi_stmt (i), loc);
3250 }
3251 
3252 /* Release SSA_NAMEs in SEQ as well as the GIMPLE statements.  */
3253 
3254 void
gimple_seq_discard(gimple_seq seq)3255 gimple_seq_discard (gimple_seq seq)
3256 {
3257   gimple_stmt_iterator gsi;
3258 
3259   for (gsi = gsi_start (seq); !gsi_end_p (gsi); )
3260     {
3261       gimple *stmt = gsi_stmt (gsi);
3262       gsi_remove (&gsi, true);
3263       release_defs (stmt);
3264       ggc_free (stmt);
3265     }
3266 }
3267 
3268 /* See if STMT now calls function that takes no parameters and if so, drop
3269    call arguments.  This is used when devirtualization machinery redirects
3270    to __builtin_unreachable or __cxa_pure_virtual.  */
3271 
3272 void
maybe_remove_unused_call_args(struct function * fn,gimple * stmt)3273 maybe_remove_unused_call_args (struct function *fn, gimple *stmt)
3274 {
3275   tree decl = gimple_call_fndecl (stmt);
3276   if (TYPE_ARG_TYPES (TREE_TYPE (decl))
3277       && TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl))) == void_type_node
3278       && gimple_call_num_args (stmt))
3279     {
3280       gimple_set_num_ops (stmt, 3);
3281       update_stmt_fn (fn, stmt);
3282     }
3283 }
3284 
3285 /* Return false if STMT will likely expand to real function call.  */
3286 
3287 bool
gimple_inexpensive_call_p(gcall * stmt)3288 gimple_inexpensive_call_p (gcall *stmt)
3289 {
3290   if (gimple_call_internal_p (stmt))
3291     return true;
3292   tree decl = gimple_call_fndecl (stmt);
3293   if (decl && is_inexpensive_builtin (decl))
3294     return true;
3295   return false;
3296 }
3297 
3298 /* Return a non-artificial location for STMT.  If STMT does not have
3299    location information, get the location from EXPR.  */
3300 
3301 location_t
gimple_or_expr_nonartificial_location(gimple * stmt,tree expr)3302 gimple_or_expr_nonartificial_location (gimple *stmt, tree expr)
3303 {
3304   location_t loc = gimple_nonartificial_location (stmt);
3305   if (loc == UNKNOWN_LOCATION && EXPR_HAS_LOCATION (expr))
3306     loc = tree_nonartificial_location (expr);
3307   return expansion_point_location_if_in_system_header (loc);
3308 }
3309 
3310 
3311 #if CHECKING_P
3312 
3313 namespace selftest {
3314 
3315 /* Selftests for core gimple structures.  */
3316 
3317 /* Verify that STMT is pretty-printed as EXPECTED.
3318    Helper function for selftests.  */
3319 
3320 static void
verify_gimple_pp(const char * expected,gimple * stmt)3321 verify_gimple_pp (const char *expected, gimple *stmt)
3322 {
3323   pretty_printer pp;
3324   pp_gimple_stmt_1 (&pp, stmt, 0 /* spc */, TDF_NONE /* flags */);
3325   ASSERT_STREQ (expected, pp_formatted_text (&pp));
3326 }
3327 
3328 /* Build a GIMPLE_ASSIGN equivalent to
3329      tmp = 5;
3330    and verify various properties of it.  */
3331 
3332 static void
test_assign_single()3333 test_assign_single ()
3334 {
3335   tree type = integer_type_node;
3336   tree lhs = build_decl (UNKNOWN_LOCATION, VAR_DECL,
3337 			 get_identifier ("tmp"),
3338 			 type);
3339   tree rhs = build_int_cst (type, 5);
3340   gassign *stmt = gimple_build_assign (lhs, rhs);
3341   verify_gimple_pp ("tmp = 5;", stmt);
3342 
3343   ASSERT_TRUE (is_gimple_assign (stmt));
3344   ASSERT_EQ (lhs, gimple_assign_lhs (stmt));
3345   ASSERT_EQ (lhs, gimple_get_lhs (stmt));
3346   ASSERT_EQ (rhs, gimple_assign_rhs1 (stmt));
3347   ASSERT_EQ (NULL, gimple_assign_rhs2 (stmt));
3348   ASSERT_EQ (NULL, gimple_assign_rhs3 (stmt));
3349   ASSERT_TRUE (gimple_assign_single_p (stmt));
3350   ASSERT_EQ (INTEGER_CST, gimple_assign_rhs_code (stmt));
3351 }
3352 
3353 /* Build a GIMPLE_ASSIGN equivalent to
3354      tmp = a * b;
3355    and verify various properties of it.  */
3356 
3357 static void
test_assign_binop()3358 test_assign_binop ()
3359 {
3360   tree type = integer_type_node;
3361   tree lhs = build_decl (UNKNOWN_LOCATION, VAR_DECL,
3362 			 get_identifier ("tmp"),
3363 			 type);
3364   tree a = build_decl (UNKNOWN_LOCATION, VAR_DECL,
3365 		       get_identifier ("a"),
3366 		       type);
3367   tree b = build_decl (UNKNOWN_LOCATION, VAR_DECL,
3368 		       get_identifier ("b"),
3369 		       type);
3370   gassign *stmt = gimple_build_assign (lhs, MULT_EXPR, a, b);
3371   verify_gimple_pp ("tmp = a * b;", stmt);
3372 
3373   ASSERT_TRUE (is_gimple_assign (stmt));
3374   ASSERT_EQ (lhs, gimple_assign_lhs (stmt));
3375   ASSERT_EQ (lhs, gimple_get_lhs (stmt));
3376   ASSERT_EQ (a, gimple_assign_rhs1 (stmt));
3377   ASSERT_EQ (b, gimple_assign_rhs2 (stmt));
3378   ASSERT_EQ (NULL, gimple_assign_rhs3 (stmt));
3379   ASSERT_FALSE (gimple_assign_single_p (stmt));
3380   ASSERT_EQ (MULT_EXPR, gimple_assign_rhs_code (stmt));
3381 }
3382 
3383 /* Build a GIMPLE_NOP and verify various properties of it.  */
3384 
3385 static void
test_nop_stmt()3386 test_nop_stmt ()
3387 {
3388   gimple *stmt = gimple_build_nop ();
3389   verify_gimple_pp ("GIMPLE_NOP", stmt);
3390   ASSERT_EQ (GIMPLE_NOP, gimple_code (stmt));
3391   ASSERT_EQ (NULL, gimple_get_lhs (stmt));
3392   ASSERT_FALSE (gimple_assign_single_p (stmt));
3393 }
3394 
3395 /* Build a GIMPLE_RETURN equivalent to
3396      return 7;
3397    and verify various properties of it.  */
3398 
3399 static void
test_return_stmt()3400 test_return_stmt ()
3401 {
3402   tree type = integer_type_node;
3403   tree val = build_int_cst (type, 7);
3404   greturn *stmt = gimple_build_return (val);
3405   verify_gimple_pp ("return 7;", stmt);
3406 
3407   ASSERT_EQ (GIMPLE_RETURN, gimple_code (stmt));
3408   ASSERT_EQ (NULL, gimple_get_lhs (stmt));
3409   ASSERT_EQ (val, gimple_return_retval (stmt));
3410   ASSERT_FALSE (gimple_assign_single_p (stmt));
3411 }
3412 
3413 /* Build a GIMPLE_RETURN equivalent to
3414      return;
3415    and verify various properties of it.  */
3416 
3417 static void
test_return_without_value()3418 test_return_without_value ()
3419 {
3420   greturn *stmt = gimple_build_return (NULL);
3421   verify_gimple_pp ("return;", stmt);
3422 
3423   ASSERT_EQ (GIMPLE_RETURN, gimple_code (stmt));
3424   ASSERT_EQ (NULL, gimple_get_lhs (stmt));
3425   ASSERT_EQ (NULL, gimple_return_retval (stmt));
3426   ASSERT_FALSE (gimple_assign_single_p (stmt));
3427 }
3428 
3429 /* Run all of the selftests within this file.  */
3430 
3431 void
gimple_c_tests()3432 gimple_c_tests ()
3433 {
3434   test_assign_single ();
3435   test_assign_binop ();
3436   test_nop_stmt ();
3437   test_return_stmt ();
3438   test_return_without_value ();
3439 }
3440 
3441 } // namespace selftest
3442 
3443 
3444 #endif /* CHECKING_P */
3445