1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2021 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "gimple-pretty-print.h"
33 #include "gimple-ssa-warn-access.h"
34 #include "gimple-ssa-warn-restrict.h"
35 #include "fold-const.h"
36 #include "stmt.h"
37 #include "expr.h"
38 #include "stor-layout.h"
39 #include "dumpfile.h"
40 #include "gimple-fold.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "tree-into-ssa.h"
44 #include "tree-dfa.h"
45 #include "tree-object-size.h"
46 #include "tree-ssa.h"
47 #include "tree-ssa-propagate.h"
48 #include "ipa-utils.h"
49 #include "tree-ssa-address.h"
50 #include "langhooks.h"
51 #include "gimplify-me.h"
52 #include "dbgcnt.h"
53 #include "builtins.h"
54 #include "tree-eh.h"
55 #include "gimple-match.h"
56 #include "gomp-constants.h"
57 #include "optabs-query.h"
58 #include "omp-general.h"
59 #include "tree-cfg.h"
60 #include "fold-const-call.h"
61 #include "stringpool.h"
62 #include "attribs.h"
63 #include "asan.h"
64 #include "diagnostic-core.h"
65 #include "intl.h"
66 #include "calls.h"
67 #include "tree-vector-builder.h"
68 #include "tree-ssa-strlen.h"
69 #include "varasm.h"
70 #include "memmodel.h"
71 #include "optabs.h"
72 #include "internal-fn.h"
73
74 enum strlen_range_kind {
75 /* Compute the exact constant string length. */
76 SRK_STRLEN,
77 /* Compute the maximum constant string length. */
78 SRK_STRLENMAX,
79 /* Compute a range of string lengths bounded by object sizes. When
80 the length of a string cannot be determined, consider as the upper
81 bound the size of the enclosing object the string may be a member
82 or element of. Also determine the size of the largest character
83 array the string may refer to. */
84 SRK_LENRANGE,
85 /* Determine the integer value of the argument (not string length). */
86 SRK_INT_VALUE
87 };
88
89 static bool
90 get_range_strlen (tree, bitmap, strlen_range_kind, c_strlen_data *, unsigned);
91
92 /* Return true when DECL can be referenced from current unit.
93 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
94 We can get declarations that are not possible to reference for various
95 reasons:
96
97 1) When analyzing C++ virtual tables.
98 C++ virtual tables do have known constructors even
99 when they are keyed to other compilation unit.
100 Those tables can contain pointers to methods and vars
101 in other units. Those methods have both STATIC and EXTERNAL
102 set.
103 2) In WHOPR mode devirtualization might lead to reference
104 to method that was partitioned elsehwere.
105 In this case we have static VAR_DECL or FUNCTION_DECL
106 that has no corresponding callgraph/varpool node
107 declaring the body.
108 3) COMDAT functions referred by external vtables that
109 we devirtualize only during final compilation stage.
110 At this time we already decided that we will not output
111 the function body and thus we can't reference the symbol
112 directly. */
113
114 static bool
can_refer_decl_in_current_unit_p(tree decl,tree from_decl)115 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
116 {
117 varpool_node *vnode;
118 struct cgraph_node *node;
119 symtab_node *snode;
120
121 if (DECL_ABSTRACT_P (decl))
122 return false;
123
124 /* We are concerned only about static/external vars and functions. */
125 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
126 || !VAR_OR_FUNCTION_DECL_P (decl))
127 return true;
128
129 /* Static objects can be referred only if they are defined and not optimized
130 out yet. */
131 if (!TREE_PUBLIC (decl))
132 {
133 if (DECL_EXTERNAL (decl))
134 return false;
135 /* Before we start optimizing unreachable code we can be sure all
136 static objects are defined. */
137 if (symtab->function_flags_ready)
138 return true;
139 snode = symtab_node::get (decl);
140 if (!snode || !snode->definition)
141 return false;
142 node = dyn_cast <cgraph_node *> (snode);
143 return !node || !node->inlined_to;
144 }
145
146 /* We will later output the initializer, so we can refer to it.
147 So we are concerned only when DECL comes from initializer of
148 external var or var that has been optimized out. */
149 if (!from_decl
150 || !VAR_P (from_decl)
151 || (!DECL_EXTERNAL (from_decl)
152 && (vnode = varpool_node::get (from_decl)) != NULL
153 && vnode->definition)
154 || (flag_ltrans
155 && (vnode = varpool_node::get (from_decl)) != NULL
156 && vnode->in_other_partition))
157 return true;
158 /* We are folding reference from external vtable. The vtable may reffer
159 to a symbol keyed to other compilation unit. The other compilation
160 unit may be in separate DSO and the symbol may be hidden. */
161 if (DECL_VISIBILITY_SPECIFIED (decl)
162 && DECL_EXTERNAL (decl)
163 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
164 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
165 return false;
166 /* When function is public, we always can introduce new reference.
167 Exception are the COMDAT functions where introducing a direct
168 reference imply need to include function body in the curren tunit. */
169 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
170 return true;
171 /* We have COMDAT. We are going to check if we still have definition
172 or if the definition is going to be output in other partition.
173 Bypass this when gimplifying; all needed functions will be produced.
174
175 As observed in PR20991 for already optimized out comdat virtual functions
176 it may be tempting to not necessarily give up because the copy will be
177 output elsewhere when corresponding vtable is output.
178 This is however not possible - ABI specify that COMDATs are output in
179 units where they are used and when the other unit was compiled with LTO
180 it is possible that vtable was kept public while the function itself
181 was privatized. */
182 if (!symtab->function_flags_ready)
183 return true;
184
185 snode = symtab_node::get (decl);
186 if (!snode
187 || ((!snode->definition || DECL_EXTERNAL (decl))
188 && (!snode->in_other_partition
189 || (!snode->forced_by_abi && !snode->force_output))))
190 return false;
191 node = dyn_cast <cgraph_node *> (snode);
192 return !node || !node->inlined_to;
193 }
194
195 /* Create a temporary for TYPE for a statement STMT. If the current function
196 is in SSA form, a SSA name is created. Otherwise a temporary register
197 is made. */
198
199 tree
create_tmp_reg_or_ssa_name(tree type,gimple * stmt)200 create_tmp_reg_or_ssa_name (tree type, gimple *stmt)
201 {
202 if (gimple_in_ssa_p (cfun))
203 return make_ssa_name (type, stmt);
204 else
205 return create_tmp_reg (type);
206 }
207
208 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
209 acceptable form for is_gimple_min_invariant.
210 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
211
212 tree
canonicalize_constructor_val(tree cval,tree from_decl)213 canonicalize_constructor_val (tree cval, tree from_decl)
214 {
215 if (CONSTANT_CLASS_P (cval))
216 return cval;
217
218 tree orig_cval = cval;
219 STRIP_NOPS (cval);
220 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
221 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
222 {
223 tree ptr = TREE_OPERAND (cval, 0);
224 if (is_gimple_min_invariant (ptr))
225 cval = build1_loc (EXPR_LOCATION (cval),
226 ADDR_EXPR, TREE_TYPE (ptr),
227 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
228 ptr,
229 fold_convert (ptr_type_node,
230 TREE_OPERAND (cval, 1))));
231 }
232 if (TREE_CODE (cval) == ADDR_EXPR)
233 {
234 tree base = NULL_TREE;
235 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
236 {
237 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
238 if (base)
239 TREE_OPERAND (cval, 0) = base;
240 }
241 else
242 base = get_base_address (TREE_OPERAND (cval, 0));
243 if (!base)
244 return NULL_TREE;
245
246 if (VAR_OR_FUNCTION_DECL_P (base)
247 && !can_refer_decl_in_current_unit_p (base, from_decl))
248 return NULL_TREE;
249 if (TREE_TYPE (base) == error_mark_node)
250 return NULL_TREE;
251 if (VAR_P (base))
252 /* ??? We should be able to assert that TREE_ADDRESSABLE is set,
253 but since the use can be in a debug stmt we can't. */
254 ;
255 else if (TREE_CODE (base) == FUNCTION_DECL)
256 {
257 /* Make sure we create a cgraph node for functions we'll reference.
258 They can be non-existent if the reference comes from an entry
259 of an external vtable for example. */
260 cgraph_node::get_create (base);
261 }
262 /* Fixup types in global initializers. */
263 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
264 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
265
266 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
267 cval = fold_convert (TREE_TYPE (orig_cval), cval);
268 return cval;
269 }
270 /* In CONSTRUCTORs we may see unfolded constants like (int (*) ()) 0. */
271 if (TREE_CODE (cval) == INTEGER_CST)
272 {
273 if (TREE_OVERFLOW_P (cval))
274 cval = drop_tree_overflow (cval);
275 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
276 cval = fold_convert (TREE_TYPE (orig_cval), cval);
277 return cval;
278 }
279 return orig_cval;
280 }
281
282 /* If SYM is a constant variable with known value, return the value.
283 NULL_TREE is returned otherwise. */
284
285 tree
get_symbol_constant_value(tree sym)286 get_symbol_constant_value (tree sym)
287 {
288 tree val = ctor_for_folding (sym);
289 if (val != error_mark_node)
290 {
291 if (val)
292 {
293 val = canonicalize_constructor_val (unshare_expr (val), sym);
294 if (val && is_gimple_min_invariant (val))
295 return val;
296 else
297 return NULL_TREE;
298 }
299 /* Variables declared 'const' without an initializer
300 have zero as the initializer if they may not be
301 overridden at link or run time. */
302 if (!val
303 && is_gimple_reg_type (TREE_TYPE (sym)))
304 return build_zero_cst (TREE_TYPE (sym));
305 }
306
307 return NULL_TREE;
308 }
309
310
311
312 /* Subroutine of fold_stmt. We perform constant folding of the
313 memory reference tree EXPR. */
314
315 static tree
maybe_fold_reference(tree expr)316 maybe_fold_reference (tree expr)
317 {
318 tree result = NULL_TREE;
319
320 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
321 || TREE_CODE (expr) == REALPART_EXPR
322 || TREE_CODE (expr) == IMAGPART_EXPR)
323 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
324 result = fold_unary_loc (EXPR_LOCATION (expr),
325 TREE_CODE (expr),
326 TREE_TYPE (expr),
327 TREE_OPERAND (expr, 0));
328 else if (TREE_CODE (expr) == BIT_FIELD_REF
329 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
330 result = fold_ternary_loc (EXPR_LOCATION (expr),
331 TREE_CODE (expr),
332 TREE_TYPE (expr),
333 TREE_OPERAND (expr, 0),
334 TREE_OPERAND (expr, 1),
335 TREE_OPERAND (expr, 2));
336 else
337 result = fold_const_aggregate_ref (expr);
338
339 if (result && is_gimple_min_invariant (result))
340 return result;
341
342 return NULL_TREE;
343 }
344
345 /* Return true if EXPR is an acceptable right-hand-side for a
346 GIMPLE assignment. We validate the entire tree, not just
347 the root node, thus catching expressions that embed complex
348 operands that are not permitted in GIMPLE. This function
349 is needed because the folding routines in fold-const.c
350 may return such expressions in some cases, e.g., an array
351 access with an embedded index addition. It may make more
352 sense to have folding routines that are sensitive to the
353 constraints on GIMPLE operands, rather than abandoning any
354 any attempt to fold if the usual folding turns out to be too
355 aggressive. */
356
357 bool
valid_gimple_rhs_p(tree expr)358 valid_gimple_rhs_p (tree expr)
359 {
360 enum tree_code code = TREE_CODE (expr);
361
362 switch (TREE_CODE_CLASS (code))
363 {
364 case tcc_declaration:
365 if (!is_gimple_variable (expr))
366 return false;
367 break;
368
369 case tcc_constant:
370 /* All constants are ok. */
371 break;
372
373 case tcc_comparison:
374 /* GENERIC allows comparisons with non-boolean types, reject
375 those for GIMPLE. Let vector-typed comparisons pass - rules
376 for GENERIC and GIMPLE are the same here. */
377 if (!(INTEGRAL_TYPE_P (TREE_TYPE (expr))
378 && (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE
379 || TYPE_PRECISION (TREE_TYPE (expr)) == 1))
380 && ! VECTOR_TYPE_P (TREE_TYPE (expr)))
381 return false;
382
383 /* Fallthru. */
384 case tcc_binary:
385 if (!is_gimple_val (TREE_OPERAND (expr, 0))
386 || !is_gimple_val (TREE_OPERAND (expr, 1)))
387 return false;
388 break;
389
390 case tcc_unary:
391 if (!is_gimple_val (TREE_OPERAND (expr, 0)))
392 return false;
393 break;
394
395 case tcc_expression:
396 switch (code)
397 {
398 case ADDR_EXPR:
399 {
400 tree t;
401 if (is_gimple_min_invariant (expr))
402 return true;
403 t = TREE_OPERAND (expr, 0);
404 while (handled_component_p (t))
405 {
406 /* ??? More checks needed, see the GIMPLE verifier. */
407 if ((TREE_CODE (t) == ARRAY_REF
408 || TREE_CODE (t) == ARRAY_RANGE_REF)
409 && !is_gimple_val (TREE_OPERAND (t, 1)))
410 return false;
411 t = TREE_OPERAND (t, 0);
412 }
413 if (!is_gimple_id (t))
414 return false;
415 }
416 break;
417
418 default:
419 if (get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS)
420 {
421 if ((code == COND_EXPR
422 ? !is_gimple_condexpr (TREE_OPERAND (expr, 0))
423 : !is_gimple_val (TREE_OPERAND (expr, 0)))
424 || !is_gimple_val (TREE_OPERAND (expr, 1))
425 || !is_gimple_val (TREE_OPERAND (expr, 2)))
426 return false;
427 break;
428 }
429 return false;
430 }
431 break;
432
433 case tcc_vl_exp:
434 return false;
435
436 case tcc_exceptional:
437 if (code == CONSTRUCTOR)
438 {
439 unsigned i;
440 tree elt;
441 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (expr), i, elt)
442 if (!is_gimple_val (elt))
443 return false;
444 return true;
445 }
446 if (code != SSA_NAME)
447 return false;
448 break;
449
450 case tcc_reference:
451 if (code == BIT_FIELD_REF)
452 return is_gimple_val (TREE_OPERAND (expr, 0));
453 return false;
454
455 default:
456 return false;
457 }
458
459 return true;
460 }
461
462
463 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
464 replacement rhs for the statement or NULL_TREE if no simplification
465 could be made. It is assumed that the operands have been previously
466 folded. */
467
468 static tree
fold_gimple_assign(gimple_stmt_iterator * si)469 fold_gimple_assign (gimple_stmt_iterator *si)
470 {
471 gimple *stmt = gsi_stmt (*si);
472 enum tree_code subcode = gimple_assign_rhs_code (stmt);
473 location_t loc = gimple_location (stmt);
474
475 tree result = NULL_TREE;
476
477 switch (get_gimple_rhs_class (subcode))
478 {
479 case GIMPLE_SINGLE_RHS:
480 {
481 tree rhs = gimple_assign_rhs1 (stmt);
482
483 if (TREE_CLOBBER_P (rhs))
484 return NULL_TREE;
485
486 if (REFERENCE_CLASS_P (rhs))
487 return maybe_fold_reference (rhs);
488
489 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
490 {
491 tree val = OBJ_TYPE_REF_EXPR (rhs);
492 if (is_gimple_min_invariant (val))
493 return val;
494 else if (flag_devirtualize && virtual_method_call_p (rhs))
495 {
496 bool final;
497 vec <cgraph_node *>targets
498 = possible_polymorphic_call_targets (rhs, stmt, &final);
499 if (final && targets.length () <= 1 && dbg_cnt (devirt))
500 {
501 if (dump_enabled_p ())
502 {
503 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
504 "resolving virtual function address "
505 "reference to function %s\n",
506 targets.length () == 1
507 ? targets[0]->name ()
508 : "NULL");
509 }
510 if (targets.length () == 1)
511 {
512 val = fold_convert (TREE_TYPE (val),
513 build_fold_addr_expr_loc
514 (loc, targets[0]->decl));
515 STRIP_USELESS_TYPE_CONVERSION (val);
516 }
517 else
518 /* We cannot use __builtin_unreachable here because it
519 cannot have address taken. */
520 val = build_int_cst (TREE_TYPE (val), 0);
521 return val;
522 }
523 }
524 }
525
526 else if (TREE_CODE (rhs) == ADDR_EXPR)
527 {
528 tree ref = TREE_OPERAND (rhs, 0);
529 if (TREE_CODE (ref) == MEM_REF
530 && integer_zerop (TREE_OPERAND (ref, 1)))
531 {
532 result = TREE_OPERAND (ref, 0);
533 if (!useless_type_conversion_p (TREE_TYPE (rhs),
534 TREE_TYPE (result)))
535 result = build1 (NOP_EXPR, TREE_TYPE (rhs), result);
536 return result;
537 }
538 }
539
540 else if (TREE_CODE (rhs) == CONSTRUCTOR
541 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
542 {
543 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
544 unsigned i;
545 tree val;
546
547 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
548 if (! CONSTANT_CLASS_P (val))
549 return NULL_TREE;
550
551 return build_vector_from_ctor (TREE_TYPE (rhs),
552 CONSTRUCTOR_ELTS (rhs));
553 }
554
555 else if (DECL_P (rhs)
556 && is_gimple_reg_type (TREE_TYPE (rhs)))
557 return get_symbol_constant_value (rhs);
558 }
559 break;
560
561 case GIMPLE_UNARY_RHS:
562 break;
563
564 case GIMPLE_BINARY_RHS:
565 break;
566
567 case GIMPLE_TERNARY_RHS:
568 result = fold_ternary_loc (loc, subcode,
569 TREE_TYPE (gimple_assign_lhs (stmt)),
570 gimple_assign_rhs1 (stmt),
571 gimple_assign_rhs2 (stmt),
572 gimple_assign_rhs3 (stmt));
573
574 if (result)
575 {
576 STRIP_USELESS_TYPE_CONVERSION (result);
577 if (valid_gimple_rhs_p (result))
578 return result;
579 }
580 break;
581
582 case GIMPLE_INVALID_RHS:
583 gcc_unreachable ();
584 }
585
586 return NULL_TREE;
587 }
588
589
590 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
591 adjusting the replacement stmts location and virtual operands.
592 If the statement has a lhs the last stmt in the sequence is expected
593 to assign to that lhs. */
594
595 static void
gsi_replace_with_seq_vops(gimple_stmt_iterator * si_p,gimple_seq stmts)596 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
597 {
598 gimple *stmt = gsi_stmt (*si_p);
599
600 if (gimple_has_location (stmt))
601 annotate_all_with_location (stmts, gimple_location (stmt));
602
603 /* First iterate over the replacement statements backward, assigning
604 virtual operands to their defining statements. */
605 gimple *laststore = NULL;
606 for (gimple_stmt_iterator i = gsi_last (stmts);
607 !gsi_end_p (i); gsi_prev (&i))
608 {
609 gimple *new_stmt = gsi_stmt (i);
610 if ((gimple_assign_single_p (new_stmt)
611 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
612 || (is_gimple_call (new_stmt)
613 && (gimple_call_flags (new_stmt)
614 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
615 {
616 tree vdef;
617 if (!laststore)
618 vdef = gimple_vdef (stmt);
619 else
620 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
621 gimple_set_vdef (new_stmt, vdef);
622 if (vdef && TREE_CODE (vdef) == SSA_NAME)
623 SSA_NAME_DEF_STMT (vdef) = new_stmt;
624 laststore = new_stmt;
625 }
626 }
627
628 /* Second iterate over the statements forward, assigning virtual
629 operands to their uses. */
630 tree reaching_vuse = gimple_vuse (stmt);
631 for (gimple_stmt_iterator i = gsi_start (stmts);
632 !gsi_end_p (i); gsi_next (&i))
633 {
634 gimple *new_stmt = gsi_stmt (i);
635 /* If the new statement possibly has a VUSE, update it with exact SSA
636 name we know will reach this one. */
637 if (gimple_has_mem_ops (new_stmt))
638 gimple_set_vuse (new_stmt, reaching_vuse);
639 gimple_set_modified (new_stmt, true);
640 if (gimple_vdef (new_stmt))
641 reaching_vuse = gimple_vdef (new_stmt);
642 }
643
644 /* If the new sequence does not do a store release the virtual
645 definition of the original statement. */
646 if (reaching_vuse
647 && reaching_vuse == gimple_vuse (stmt))
648 {
649 tree vdef = gimple_vdef (stmt);
650 if (vdef
651 && TREE_CODE (vdef) == SSA_NAME)
652 {
653 unlink_stmt_vdef (stmt);
654 release_ssa_name (vdef);
655 }
656 }
657
658 /* Finally replace the original statement with the sequence. */
659 gsi_replace_with_seq (si_p, stmts, false);
660 }
661
662 /* Helper function for update_gimple_call and
663 gimplify_and_update_call_from_tree. A GIMPLE_CALL STMT is being replaced
664 with GIMPLE_CALL NEW_STMT. */
665
666 static void
finish_update_gimple_call(gimple_stmt_iterator * si_p,gimple * new_stmt,gimple * stmt)667 finish_update_gimple_call (gimple_stmt_iterator *si_p, gimple *new_stmt,
668 gimple *stmt)
669 {
670 tree lhs = gimple_call_lhs (stmt);
671 gimple_call_set_lhs (new_stmt, lhs);
672 if (lhs && TREE_CODE (lhs) == SSA_NAME)
673 SSA_NAME_DEF_STMT (lhs) = new_stmt;
674 gimple_move_vops (new_stmt, stmt);
675 gimple_set_location (new_stmt, gimple_location (stmt));
676 if (gimple_block (new_stmt) == NULL_TREE)
677 gimple_set_block (new_stmt, gimple_block (stmt));
678 gsi_replace (si_p, new_stmt, false);
679 }
680
681 /* Update a GIMPLE_CALL statement at iterator *SI_P to call to FN
682 with number of arguments NARGS, where the arguments in GIMPLE form
683 follow NARGS argument. */
684
685 bool
update_gimple_call(gimple_stmt_iterator * si_p,tree fn,int nargs,...)686 update_gimple_call (gimple_stmt_iterator *si_p, tree fn, int nargs, ...)
687 {
688 va_list ap;
689 gcall *new_stmt, *stmt = as_a <gcall *> (gsi_stmt (*si_p));
690
691 gcc_assert (is_gimple_call (stmt));
692 va_start (ap, nargs);
693 new_stmt = gimple_build_call_valist (fn, nargs, ap);
694 finish_update_gimple_call (si_p, new_stmt, stmt);
695 va_end (ap);
696 return true;
697 }
698
699 /* Return true if EXPR is a CALL_EXPR suitable for representation
700 as a single GIMPLE_CALL statement. If the arguments require
701 further gimplification, return false. */
702
703 static bool
valid_gimple_call_p(tree expr)704 valid_gimple_call_p (tree expr)
705 {
706 unsigned i, nargs;
707
708 if (TREE_CODE (expr) != CALL_EXPR)
709 return false;
710
711 nargs = call_expr_nargs (expr);
712 for (i = 0; i < nargs; i++)
713 {
714 tree arg = CALL_EXPR_ARG (expr, i);
715 if (is_gimple_reg_type (TREE_TYPE (arg)))
716 {
717 if (!is_gimple_val (arg))
718 return false;
719 }
720 else
721 if (!is_gimple_lvalue (arg))
722 return false;
723 }
724
725 return true;
726 }
727
728 /* Convert EXPR into a GIMPLE value suitable for substitution on the
729 RHS of an assignment. Insert the necessary statements before
730 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
731 is replaced. If the call is expected to produces a result, then it
732 is replaced by an assignment of the new RHS to the result variable.
733 If the result is to be ignored, then the call is replaced by a
734 GIMPLE_NOP. A proper VDEF chain is retained by making the first
735 VUSE and the last VDEF of the whole sequence be the same as the replaced
736 statement and using new SSA names for stores in between. */
737
738 void
gimplify_and_update_call_from_tree(gimple_stmt_iterator * si_p,tree expr)739 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
740 {
741 tree lhs;
742 gimple *stmt, *new_stmt;
743 gimple_stmt_iterator i;
744 gimple_seq stmts = NULL;
745
746 stmt = gsi_stmt (*si_p);
747
748 gcc_assert (is_gimple_call (stmt));
749
750 if (valid_gimple_call_p (expr))
751 {
752 /* The call has simplified to another call. */
753 tree fn = CALL_EXPR_FN (expr);
754 unsigned i;
755 unsigned nargs = call_expr_nargs (expr);
756 vec<tree> args = vNULL;
757 gcall *new_stmt;
758
759 if (nargs > 0)
760 {
761 args.create (nargs);
762 args.safe_grow_cleared (nargs, true);
763
764 for (i = 0; i < nargs; i++)
765 args[i] = CALL_EXPR_ARG (expr, i);
766 }
767
768 new_stmt = gimple_build_call_vec (fn, args);
769 finish_update_gimple_call (si_p, new_stmt, stmt);
770 args.release ();
771 return;
772 }
773
774 lhs = gimple_call_lhs (stmt);
775 if (lhs == NULL_TREE)
776 {
777 push_gimplify_context (gimple_in_ssa_p (cfun));
778 gimplify_and_add (expr, &stmts);
779 pop_gimplify_context (NULL);
780
781 /* We can end up with folding a memcpy of an empty class assignment
782 which gets optimized away by C++ gimplification. */
783 if (gimple_seq_empty_p (stmts))
784 {
785 if (gimple_in_ssa_p (cfun))
786 {
787 unlink_stmt_vdef (stmt);
788 release_defs (stmt);
789 }
790 gsi_replace (si_p, gimple_build_nop (), false);
791 return;
792 }
793 }
794 else
795 {
796 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
797 new_stmt = gimple_build_assign (lhs, tmp);
798 i = gsi_last (stmts);
799 gsi_insert_after_without_update (&i, new_stmt,
800 GSI_CONTINUE_LINKING);
801 }
802
803 gsi_replace_with_seq_vops (si_p, stmts);
804 }
805
806
807 /* Replace the call at *GSI with the gimple value VAL. */
808
809 void
replace_call_with_value(gimple_stmt_iterator * gsi,tree val)810 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
811 {
812 gimple *stmt = gsi_stmt (*gsi);
813 tree lhs = gimple_call_lhs (stmt);
814 gimple *repl;
815 if (lhs)
816 {
817 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
818 val = fold_convert (TREE_TYPE (lhs), val);
819 repl = gimple_build_assign (lhs, val);
820 }
821 else
822 repl = gimple_build_nop ();
823 tree vdef = gimple_vdef (stmt);
824 if (vdef && TREE_CODE (vdef) == SSA_NAME)
825 {
826 unlink_stmt_vdef (stmt);
827 release_ssa_name (vdef);
828 }
829 gsi_replace (gsi, repl, false);
830 }
831
832 /* Replace the call at *GSI with the new call REPL and fold that
833 again. */
834
835 static void
replace_call_with_call_and_fold(gimple_stmt_iterator * gsi,gimple * repl)836 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
837 {
838 gimple *stmt = gsi_stmt (*gsi);
839 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
840 gimple_set_location (repl, gimple_location (stmt));
841 gimple_move_vops (repl, stmt);
842 gsi_replace (gsi, repl, false);
843 fold_stmt (gsi);
844 }
845
846 /* Return true if VAR is a VAR_DECL or a component thereof. */
847
848 static bool
var_decl_component_p(tree var)849 var_decl_component_p (tree var)
850 {
851 tree inner = var;
852 while (handled_component_p (inner))
853 inner = TREE_OPERAND (inner, 0);
854 return (DECL_P (inner)
855 || (TREE_CODE (inner) == MEM_REF
856 && TREE_CODE (TREE_OPERAND (inner, 0)) == ADDR_EXPR));
857 }
858
859 /* Return TRUE if the SIZE argument, representing the size of an
860 object, is in a range of values of which exactly zero is valid. */
861
862 static bool
size_must_be_zero_p(tree size)863 size_must_be_zero_p (tree size)
864 {
865 if (integer_zerop (size))
866 return true;
867
868 if (TREE_CODE (size) != SSA_NAME || !INTEGRAL_TYPE_P (TREE_TYPE (size)))
869 return false;
870
871 tree type = TREE_TYPE (size);
872 int prec = TYPE_PRECISION (type);
873
874 /* Compute the value of SSIZE_MAX, the largest positive value that
875 can be stored in ssize_t, the signed counterpart of size_t. */
876 wide_int ssize_max = wi::lshift (wi::one (prec), prec - 1) - 1;
877 value_range valid_range (build_int_cst (type, 0),
878 wide_int_to_tree (type, ssize_max));
879 value_range vr;
880 if (cfun)
881 get_range_query (cfun)->range_of_expr (vr, size);
882 else
883 get_global_range_query ()->range_of_expr (vr, size);
884 if (vr.undefined_p ())
885 vr.set_varying (TREE_TYPE (size));
886 vr.intersect (&valid_range);
887 return vr.zero_p ();
888 }
889
890 /* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and
891 diagnose (otherwise undefined) overlapping copies without preventing
892 folding. When folded, GCC guarantees that overlapping memcpy has
893 the same semantics as memmove. Call to the library memcpy need not
894 provide the same guarantee. Return false if no simplification can
895 be made. */
896
897 static bool
gimple_fold_builtin_memory_op(gimple_stmt_iterator * gsi,tree dest,tree src,enum built_in_function code)898 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
899 tree dest, tree src, enum built_in_function code)
900 {
901 gimple *stmt = gsi_stmt (*gsi);
902 tree lhs = gimple_call_lhs (stmt);
903 tree len = gimple_call_arg (stmt, 2);
904 location_t loc = gimple_location (stmt);
905
906 /* If the LEN parameter is a constant zero or in range where
907 the only valid value is zero, return DEST. */
908 if (size_must_be_zero_p (len))
909 {
910 gimple *repl;
911 if (gimple_call_lhs (stmt))
912 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
913 else
914 repl = gimple_build_nop ();
915 tree vdef = gimple_vdef (stmt);
916 if (vdef && TREE_CODE (vdef) == SSA_NAME)
917 {
918 unlink_stmt_vdef (stmt);
919 release_ssa_name (vdef);
920 }
921 gsi_replace (gsi, repl, false);
922 return true;
923 }
924
925 /* If SRC and DEST are the same (and not volatile), return
926 DEST{,+LEN,+LEN-1}. */
927 if (operand_equal_p (src, dest, 0))
928 {
929 /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
930 It's safe and may even be emitted by GCC itself (see bug
931 32667). */
932 unlink_stmt_vdef (stmt);
933 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
934 release_ssa_name (gimple_vdef (stmt));
935 if (!lhs)
936 {
937 gsi_replace (gsi, gimple_build_nop (), false);
938 return true;
939 }
940 goto done;
941 }
942 else
943 {
944 /* We cannot (easily) change the type of the copy if it is a storage
945 order barrier, i.e. is equivalent to a VIEW_CONVERT_EXPR that can
946 modify the storage order of objects (see storage_order_barrier_p). */
947 tree srctype
948 = POINTER_TYPE_P (TREE_TYPE (src))
949 ? TREE_TYPE (TREE_TYPE (src)) : NULL_TREE;
950 tree desttype
951 = POINTER_TYPE_P (TREE_TYPE (dest))
952 ? TREE_TYPE (TREE_TYPE (dest)) : NULL_TREE;
953 tree destvar, srcvar, srcoff;
954 unsigned int src_align, dest_align;
955 unsigned HOST_WIDE_INT tmp_len;
956 const char *tmp_str;
957
958 /* Build accesses at offset zero with a ref-all character type. */
959 tree off0
960 = build_int_cst (build_pointer_type_for_mode (char_type_node,
961 ptr_mode, true), 0);
962
963 /* If we can perform the copy efficiently with first doing all loads and
964 then all stores inline it that way. Currently efficiently means that
965 we can load all the memory with a single set operation and that the
966 total size is less than MOVE_MAX * MOVE_RATIO. */
967 src_align = get_pointer_alignment (src);
968 dest_align = get_pointer_alignment (dest);
969 if (tree_fits_uhwi_p (len)
970 && (compare_tree_int
971 (len, (MOVE_MAX
972 * MOVE_RATIO (optimize_function_for_size_p (cfun))))
973 <= 0)
974 /* FIXME: Don't transform copies from strings with known length.
975 Until GCC 9 this prevented a case in gcc.dg/strlenopt-8.c
976 from being handled, and the case was XFAILed for that reason.
977 Now that it is handled and the XFAIL removed, as soon as other
978 strlenopt tests that rely on it for passing are adjusted, this
979 hack can be removed. */
980 && !c_strlen (src, 1)
981 && !((tmp_str = getbyterep (src, &tmp_len)) != NULL
982 && memchr (tmp_str, 0, tmp_len) == NULL)
983 && !(srctype
984 && AGGREGATE_TYPE_P (srctype)
985 && TYPE_REVERSE_STORAGE_ORDER (srctype))
986 && !(desttype
987 && AGGREGATE_TYPE_P (desttype)
988 && TYPE_REVERSE_STORAGE_ORDER (desttype)))
989 {
990 unsigned ilen = tree_to_uhwi (len);
991 if (pow2p_hwi (ilen))
992 {
993 /* Detect out-of-bounds accesses without issuing warnings.
994 Avoid folding out-of-bounds copies but to avoid false
995 positives for unreachable code defer warning until after
996 DCE has worked its magic.
997 -Wrestrict is still diagnosed. */
998 if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt),
999 dest, src, len, len,
1000 false, false))
1001 if (warning != OPT_Wrestrict)
1002 return false;
1003
1004 scalar_int_mode mode;
1005 if (int_mode_for_size (ilen * 8, 0).exists (&mode)
1006 && GET_MODE_SIZE (mode) * BITS_PER_UNIT == ilen * 8
1007 && have_insn_for (SET, mode)
1008 /* If the destination pointer is not aligned we must be able
1009 to emit an unaligned store. */
1010 && (dest_align >= GET_MODE_ALIGNMENT (mode)
1011 || !targetm.slow_unaligned_access (mode, dest_align)
1012 || (optab_handler (movmisalign_optab, mode)
1013 != CODE_FOR_nothing)))
1014 {
1015 tree type = build_nonstandard_integer_type (ilen * 8, 1);
1016 tree srctype = type;
1017 tree desttype = type;
1018 if (src_align < GET_MODE_ALIGNMENT (mode))
1019 srctype = build_aligned_type (type, src_align);
1020 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
1021 tree tem = fold_const_aggregate_ref (srcmem);
1022 if (tem)
1023 srcmem = tem;
1024 else if (src_align < GET_MODE_ALIGNMENT (mode)
1025 && targetm.slow_unaligned_access (mode, src_align)
1026 && (optab_handler (movmisalign_optab, mode)
1027 == CODE_FOR_nothing))
1028 srcmem = NULL_TREE;
1029 if (srcmem)
1030 {
1031 gimple *new_stmt;
1032 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
1033 {
1034 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
1035 srcmem
1036 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
1037 new_stmt);
1038 gimple_assign_set_lhs (new_stmt, srcmem);
1039 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1040 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1041 }
1042 if (dest_align < GET_MODE_ALIGNMENT (mode))
1043 desttype = build_aligned_type (type, dest_align);
1044 new_stmt
1045 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
1046 dest, off0),
1047 srcmem);
1048 gimple_move_vops (new_stmt, stmt);
1049 if (!lhs)
1050 {
1051 gsi_replace (gsi, new_stmt, false);
1052 return true;
1053 }
1054 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1055 goto done;
1056 }
1057 }
1058 }
1059 }
1060
1061 if (code == BUILT_IN_MEMMOVE)
1062 {
1063 /* Both DEST and SRC must be pointer types.
1064 ??? This is what old code did. Is the testing for pointer types
1065 really mandatory?
1066
1067 If either SRC is readonly or length is 1, we can use memcpy. */
1068 if (!dest_align || !src_align)
1069 return false;
1070 if (readonly_data_expr (src)
1071 || (tree_fits_uhwi_p (len)
1072 && (MIN (src_align, dest_align) / BITS_PER_UNIT
1073 >= tree_to_uhwi (len))))
1074 {
1075 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1076 if (!fn)
1077 return false;
1078 gimple_call_set_fndecl (stmt, fn);
1079 gimple_call_set_arg (stmt, 0, dest);
1080 gimple_call_set_arg (stmt, 1, src);
1081 fold_stmt (gsi);
1082 return true;
1083 }
1084
1085 /* If *src and *dest can't overlap, optimize into memcpy as well. */
1086 if (TREE_CODE (src) == ADDR_EXPR
1087 && TREE_CODE (dest) == ADDR_EXPR)
1088 {
1089 tree src_base, dest_base, fn;
1090 poly_int64 src_offset = 0, dest_offset = 0;
1091 poly_uint64 maxsize;
1092
1093 srcvar = TREE_OPERAND (src, 0);
1094 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
1095 if (src_base == NULL)
1096 src_base = srcvar;
1097 destvar = TREE_OPERAND (dest, 0);
1098 dest_base = get_addr_base_and_unit_offset (destvar,
1099 &dest_offset);
1100 if (dest_base == NULL)
1101 dest_base = destvar;
1102 if (!poly_int_tree_p (len, &maxsize))
1103 maxsize = -1;
1104 if (SSA_VAR_P (src_base)
1105 && SSA_VAR_P (dest_base))
1106 {
1107 if (operand_equal_p (src_base, dest_base, 0)
1108 && ranges_maybe_overlap_p (src_offset, maxsize,
1109 dest_offset, maxsize))
1110 return false;
1111 }
1112 else if (TREE_CODE (src_base) == MEM_REF
1113 && TREE_CODE (dest_base) == MEM_REF)
1114 {
1115 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
1116 TREE_OPERAND (dest_base, 0), 0))
1117 return false;
1118 poly_offset_int full_src_offset
1119 = mem_ref_offset (src_base) + src_offset;
1120 poly_offset_int full_dest_offset
1121 = mem_ref_offset (dest_base) + dest_offset;
1122 if (ranges_maybe_overlap_p (full_src_offset, maxsize,
1123 full_dest_offset, maxsize))
1124 return false;
1125 }
1126 else
1127 return false;
1128
1129 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1130 if (!fn)
1131 return false;
1132 gimple_call_set_fndecl (stmt, fn);
1133 gimple_call_set_arg (stmt, 0, dest);
1134 gimple_call_set_arg (stmt, 1, src);
1135 fold_stmt (gsi);
1136 return true;
1137 }
1138
1139 /* If the destination and source do not alias optimize into
1140 memcpy as well. */
1141 if ((is_gimple_min_invariant (dest)
1142 || TREE_CODE (dest) == SSA_NAME)
1143 && (is_gimple_min_invariant (src)
1144 || TREE_CODE (src) == SSA_NAME))
1145 {
1146 ao_ref destr, srcr;
1147 ao_ref_init_from_ptr_and_size (&destr, dest, len);
1148 ao_ref_init_from_ptr_and_size (&srcr, src, len);
1149 if (!refs_may_alias_p_1 (&destr, &srcr, false))
1150 {
1151 tree fn;
1152 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1153 if (!fn)
1154 return false;
1155 gimple_call_set_fndecl (stmt, fn);
1156 gimple_call_set_arg (stmt, 0, dest);
1157 gimple_call_set_arg (stmt, 1, src);
1158 fold_stmt (gsi);
1159 return true;
1160 }
1161 }
1162
1163 return false;
1164 }
1165
1166 if (!tree_fits_shwi_p (len))
1167 return false;
1168 if (!srctype
1169 || (AGGREGATE_TYPE_P (srctype)
1170 && TYPE_REVERSE_STORAGE_ORDER (srctype)))
1171 return false;
1172 if (!desttype
1173 || (AGGREGATE_TYPE_P (desttype)
1174 && TYPE_REVERSE_STORAGE_ORDER (desttype)))
1175 return false;
1176 /* In the following try to find a type that is most natural to be
1177 used for the memcpy source and destination and that allows
1178 the most optimization when memcpy is turned into a plain assignment
1179 using that type. In theory we could always use a char[len] type
1180 but that only gains us that the destination and source possibly
1181 no longer will have their address taken. */
1182 if (TREE_CODE (srctype) == ARRAY_TYPE
1183 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1184 srctype = TREE_TYPE (srctype);
1185 if (TREE_CODE (desttype) == ARRAY_TYPE
1186 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
1187 desttype = TREE_TYPE (desttype);
1188 if (TREE_ADDRESSABLE (srctype)
1189 || TREE_ADDRESSABLE (desttype))
1190 return false;
1191
1192 /* Make sure we are not copying using a floating-point mode or
1193 a type whose size possibly does not match its precision. */
1194 if (FLOAT_MODE_P (TYPE_MODE (desttype))
1195 || TREE_CODE (desttype) == BOOLEAN_TYPE
1196 || TREE_CODE (desttype) == ENUMERAL_TYPE)
1197 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
1198 if (FLOAT_MODE_P (TYPE_MODE (srctype))
1199 || TREE_CODE (srctype) == BOOLEAN_TYPE
1200 || TREE_CODE (srctype) == ENUMERAL_TYPE)
1201 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
1202 if (!srctype)
1203 srctype = desttype;
1204 if (!desttype)
1205 desttype = srctype;
1206 if (!srctype)
1207 return false;
1208
1209 src_align = get_pointer_alignment (src);
1210 dest_align = get_pointer_alignment (dest);
1211
1212 /* Choose between src and destination type for the access based
1213 on alignment, whether the access constitutes a register access
1214 and whether it may actually expose a declaration for SSA rewrite
1215 or SRA decomposition. Also try to expose a string constant, we
1216 might be able to concatenate several of them later into a single
1217 string store. */
1218 destvar = NULL_TREE;
1219 srcvar = NULL_TREE;
1220 if (TREE_CODE (dest) == ADDR_EXPR
1221 && var_decl_component_p (TREE_OPERAND (dest, 0))
1222 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len)
1223 && dest_align >= TYPE_ALIGN (desttype)
1224 && (is_gimple_reg_type (desttype)
1225 || src_align >= TYPE_ALIGN (desttype)))
1226 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1227 else if (TREE_CODE (src) == ADDR_EXPR
1228 && var_decl_component_p (TREE_OPERAND (src, 0))
1229 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)
1230 && src_align >= TYPE_ALIGN (srctype)
1231 && (is_gimple_reg_type (srctype)
1232 || dest_align >= TYPE_ALIGN (srctype)))
1233 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1234 /* FIXME: Don't transform copies from strings with known original length.
1235 As soon as strlenopt tests that rely on it for passing are adjusted,
1236 this hack can be removed. */
1237 else if (gimple_call_alloca_for_var_p (stmt)
1238 && (srcvar = string_constant (src, &srcoff, NULL, NULL))
1239 && integer_zerop (srcoff)
1240 && tree_int_cst_equal (TYPE_SIZE_UNIT (TREE_TYPE (srcvar)), len)
1241 && dest_align >= TYPE_ALIGN (TREE_TYPE (srcvar)))
1242 srctype = TREE_TYPE (srcvar);
1243 else
1244 return false;
1245
1246 /* Now that we chose an access type express the other side in
1247 terms of it if the target allows that with respect to alignment
1248 constraints. */
1249 if (srcvar == NULL_TREE)
1250 {
1251 if (src_align >= TYPE_ALIGN (desttype))
1252 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1253 else
1254 {
1255 if (STRICT_ALIGNMENT)
1256 return false;
1257 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1258 src_align);
1259 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1260 }
1261 }
1262 else if (destvar == NULL_TREE)
1263 {
1264 if (dest_align >= TYPE_ALIGN (srctype))
1265 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1266 else
1267 {
1268 if (STRICT_ALIGNMENT)
1269 return false;
1270 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1271 dest_align);
1272 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1273 }
1274 }
1275
1276 /* Same as above, detect out-of-bounds accesses without issuing
1277 warnings. Avoid folding out-of-bounds copies but to avoid
1278 false positives for unreachable code defer warning until
1279 after DCE has worked its magic.
1280 -Wrestrict is still diagnosed. */
1281 if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt),
1282 dest, src, len, len,
1283 false, false))
1284 if (warning != OPT_Wrestrict)
1285 return false;
1286
1287 gimple *new_stmt;
1288 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1289 {
1290 tree tem = fold_const_aggregate_ref (srcvar);
1291 if (tem)
1292 srcvar = tem;
1293 if (! is_gimple_min_invariant (srcvar))
1294 {
1295 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1296 srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1297 new_stmt);
1298 gimple_assign_set_lhs (new_stmt, srcvar);
1299 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1300 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1301 }
1302 new_stmt = gimple_build_assign (destvar, srcvar);
1303 goto set_vop_and_replace;
1304 }
1305
1306 /* We get an aggregate copy. If the source is a STRING_CST, then
1307 directly use its type to perform the copy. */
1308 if (TREE_CODE (srcvar) == STRING_CST)
1309 desttype = srctype;
1310
1311 /* Or else, use an unsigned char[] type to perform the copy in order
1312 to preserve padding and to avoid any issues with TREE_ADDRESSABLE
1313 types or float modes behavior on copying. */
1314 else
1315 {
1316 desttype = build_array_type_nelts (unsigned_char_type_node,
1317 tree_to_uhwi (len));
1318 srctype = desttype;
1319 if (src_align > TYPE_ALIGN (srctype))
1320 srctype = build_aligned_type (srctype, src_align);
1321 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1322 }
1323
1324 if (dest_align > TYPE_ALIGN (desttype))
1325 desttype = build_aligned_type (desttype, dest_align);
1326 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1327 new_stmt = gimple_build_assign (destvar, srcvar);
1328
1329 set_vop_and_replace:
1330 gimple_move_vops (new_stmt, stmt);
1331 if (!lhs)
1332 {
1333 gsi_replace (gsi, new_stmt, false);
1334 return true;
1335 }
1336 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1337 }
1338
1339 done:
1340 gimple_seq stmts = NULL;
1341 if (code == BUILT_IN_MEMCPY || code == BUILT_IN_MEMMOVE)
1342 len = NULL_TREE;
1343 else if (code == BUILT_IN_MEMPCPY)
1344 {
1345 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1346 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1347 TREE_TYPE (dest), dest, len);
1348 }
1349 else
1350 gcc_unreachable ();
1351
1352 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1353 gimple *repl = gimple_build_assign (lhs, dest);
1354 gsi_replace (gsi, repl, false);
1355 return true;
1356 }
1357
1358 /* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1359 to built-in memcmp (a, b, len). */
1360
1361 static bool
gimple_fold_builtin_bcmp(gimple_stmt_iterator * gsi)1362 gimple_fold_builtin_bcmp (gimple_stmt_iterator *gsi)
1363 {
1364 tree fn = builtin_decl_implicit (BUILT_IN_MEMCMP);
1365
1366 if (!fn)
1367 return false;
1368
1369 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1370
1371 gimple *stmt = gsi_stmt (*gsi);
1372 tree a = gimple_call_arg (stmt, 0);
1373 tree b = gimple_call_arg (stmt, 1);
1374 tree len = gimple_call_arg (stmt, 2);
1375
1376 gimple *repl = gimple_build_call (fn, 3, a, b, len);
1377 replace_call_with_call_and_fold (gsi, repl);
1378
1379 return true;
1380 }
1381
1382 /* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1383 to built-in memmove (dest, src, len). */
1384
1385 static bool
gimple_fold_builtin_bcopy(gimple_stmt_iterator * gsi)1386 gimple_fold_builtin_bcopy (gimple_stmt_iterator *gsi)
1387 {
1388 tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE);
1389
1390 if (!fn)
1391 return false;
1392
1393 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1394 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1395 len) into memmove (dest, src, len). */
1396
1397 gimple *stmt = gsi_stmt (*gsi);
1398 tree src = gimple_call_arg (stmt, 0);
1399 tree dest = gimple_call_arg (stmt, 1);
1400 tree len = gimple_call_arg (stmt, 2);
1401
1402 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1403 gimple_call_set_fntype (as_a <gcall *> (stmt), TREE_TYPE (fn));
1404 replace_call_with_call_and_fold (gsi, repl);
1405
1406 return true;
1407 }
1408
1409 /* Transform a call to built-in bzero (dest, len) at *GSI into one
1410 to built-in memset (dest, 0, len). */
1411
1412 static bool
gimple_fold_builtin_bzero(gimple_stmt_iterator * gsi)1413 gimple_fold_builtin_bzero (gimple_stmt_iterator *gsi)
1414 {
1415 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
1416
1417 if (!fn)
1418 return false;
1419
1420 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1421
1422 gimple *stmt = gsi_stmt (*gsi);
1423 tree dest = gimple_call_arg (stmt, 0);
1424 tree len = gimple_call_arg (stmt, 1);
1425
1426 gimple_seq seq = NULL;
1427 gimple *repl = gimple_build_call (fn, 3, dest, integer_zero_node, len);
1428 gimple_seq_add_stmt_without_update (&seq, repl);
1429 gsi_replace_with_seq_vops (gsi, seq);
1430 fold_stmt (gsi);
1431
1432 return true;
1433 }
1434
1435 /* Fold function call to builtin memset or bzero at *GSI setting the
1436 memory of size LEN to VAL. Return whether a simplification was made. */
1437
1438 static bool
gimple_fold_builtin_memset(gimple_stmt_iterator * gsi,tree c,tree len)1439 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1440 {
1441 gimple *stmt = gsi_stmt (*gsi);
1442 tree etype;
1443 unsigned HOST_WIDE_INT length, cval;
1444
1445 /* If the LEN parameter is zero, return DEST. */
1446 if (integer_zerop (len))
1447 {
1448 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1449 return true;
1450 }
1451
1452 if (! tree_fits_uhwi_p (len))
1453 return false;
1454
1455 if (TREE_CODE (c) != INTEGER_CST)
1456 return false;
1457
1458 tree dest = gimple_call_arg (stmt, 0);
1459 tree var = dest;
1460 if (TREE_CODE (var) != ADDR_EXPR)
1461 return false;
1462
1463 var = TREE_OPERAND (var, 0);
1464 if (TREE_THIS_VOLATILE (var))
1465 return false;
1466
1467 etype = TREE_TYPE (var);
1468 if (TREE_CODE (etype) == ARRAY_TYPE)
1469 etype = TREE_TYPE (etype);
1470
1471 if (!INTEGRAL_TYPE_P (etype)
1472 && !POINTER_TYPE_P (etype))
1473 return NULL_TREE;
1474
1475 if (! var_decl_component_p (var))
1476 return NULL_TREE;
1477
1478 length = tree_to_uhwi (len);
1479 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype)) != length
1480 || (GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (etype))
1481 != GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (etype)))
1482 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1483 return NULL_TREE;
1484
1485 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1486 return NULL_TREE;
1487
1488 if (!type_has_mode_precision_p (etype))
1489 etype = lang_hooks.types.type_for_mode (SCALAR_INT_TYPE_MODE (etype),
1490 TYPE_UNSIGNED (etype));
1491
1492 if (integer_zerop (c))
1493 cval = 0;
1494 else
1495 {
1496 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1497 return NULL_TREE;
1498
1499 cval = TREE_INT_CST_LOW (c);
1500 cval &= 0xff;
1501 cval |= cval << 8;
1502 cval |= cval << 16;
1503 cval |= (cval << 31) << 1;
1504 }
1505
1506 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1507 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1508 gimple_move_vops (store, stmt);
1509 gimple_set_location (store, gimple_location (stmt));
1510 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1511 if (gimple_call_lhs (stmt))
1512 {
1513 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1514 gsi_replace (gsi, asgn, false);
1515 }
1516 else
1517 {
1518 gimple_stmt_iterator gsi2 = *gsi;
1519 gsi_prev (gsi);
1520 gsi_remove (&gsi2, true);
1521 }
1522
1523 return true;
1524 }
1525
1526 /* Helper of get_range_strlen for ARG that is not an SSA_NAME. */
1527
1528 static bool
get_range_strlen_tree(tree arg,bitmap visited,strlen_range_kind rkind,c_strlen_data * pdata,unsigned eltsize)1529 get_range_strlen_tree (tree arg, bitmap visited, strlen_range_kind rkind,
1530 c_strlen_data *pdata, unsigned eltsize)
1531 {
1532 gcc_assert (TREE_CODE (arg) != SSA_NAME);
1533
1534 /* The length computed by this invocation of the function. */
1535 tree val = NULL_TREE;
1536
1537 /* True if VAL is an optimistic (tight) bound determined from
1538 the size of the character array in which the string may be
1539 stored. In that case, the computed VAL is used to set
1540 PDATA->MAXBOUND. */
1541 bool tight_bound = false;
1542
1543 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1544 if (TREE_CODE (arg) == ADDR_EXPR
1545 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
1546 {
1547 tree op = TREE_OPERAND (arg, 0);
1548 if (integer_zerop (TREE_OPERAND (op, 1)))
1549 {
1550 tree aop0 = TREE_OPERAND (op, 0);
1551 if (TREE_CODE (aop0) == INDIRECT_REF
1552 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1553 return get_range_strlen (TREE_OPERAND (aop0, 0), visited, rkind,
1554 pdata, eltsize);
1555 }
1556 else if (TREE_CODE (TREE_OPERAND (op, 0)) == COMPONENT_REF
1557 && rkind == SRK_LENRANGE)
1558 {
1559 /* Fail if an array is the last member of a struct object
1560 since it could be treated as a (fake) flexible array
1561 member. */
1562 tree idx = TREE_OPERAND (op, 1);
1563
1564 arg = TREE_OPERAND (op, 0);
1565 tree optype = TREE_TYPE (arg);
1566 if (tree dom = TYPE_DOMAIN (optype))
1567 if (tree bound = TYPE_MAX_VALUE (dom))
1568 if (TREE_CODE (bound) == INTEGER_CST
1569 && TREE_CODE (idx) == INTEGER_CST
1570 && tree_int_cst_lt (bound, idx))
1571 return false;
1572 }
1573 }
1574
1575 if (rkind == SRK_INT_VALUE)
1576 {
1577 /* We are computing the maximum value (not string length). */
1578 val = arg;
1579 if (TREE_CODE (val) != INTEGER_CST
1580 || tree_int_cst_sgn (val) < 0)
1581 return false;
1582 }
1583 else
1584 {
1585 c_strlen_data lendata = { };
1586 val = c_strlen (arg, 1, &lendata, eltsize);
1587
1588 if (!val && lendata.decl)
1589 {
1590 /* ARG refers to an unterminated const character array.
1591 DATA.DECL with size DATA.LEN. */
1592 val = lendata.minlen;
1593 pdata->decl = lendata.decl;
1594 }
1595 }
1596
1597 /* Set if VAL represents the maximum length based on array size (set
1598 when exact length cannot be determined). */
1599 bool maxbound = false;
1600
1601 if (!val && rkind == SRK_LENRANGE)
1602 {
1603 if (TREE_CODE (arg) == ADDR_EXPR)
1604 return get_range_strlen (TREE_OPERAND (arg, 0), visited, rkind,
1605 pdata, eltsize);
1606
1607 if (TREE_CODE (arg) == ARRAY_REF)
1608 {
1609 tree optype = TREE_TYPE (TREE_OPERAND (arg, 0));
1610
1611 /* Determine the "innermost" array type. */
1612 while (TREE_CODE (optype) == ARRAY_TYPE
1613 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1614 optype = TREE_TYPE (optype);
1615
1616 /* Avoid arrays of pointers. */
1617 tree eltype = TREE_TYPE (optype);
1618 if (TREE_CODE (optype) != ARRAY_TYPE
1619 || !INTEGRAL_TYPE_P (eltype))
1620 return false;
1621
1622 /* Fail when the array bound is unknown or zero. */
1623 val = TYPE_SIZE_UNIT (optype);
1624 if (!val
1625 || TREE_CODE (val) != INTEGER_CST
1626 || integer_zerop (val))
1627 return false;
1628
1629 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1630 integer_one_node);
1631
1632 /* Set the minimum size to zero since the string in
1633 the array could have zero length. */
1634 pdata->minlen = ssize_int (0);
1635
1636 tight_bound = true;
1637 }
1638 else if (TREE_CODE (arg) == COMPONENT_REF
1639 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1)))
1640 == ARRAY_TYPE))
1641 {
1642 /* Use the type of the member array to determine the upper
1643 bound on the length of the array. This may be overly
1644 optimistic if the array itself isn't NUL-terminated and
1645 the caller relies on the subsequent member to contain
1646 the NUL but that would only be considered valid if
1647 the array were the last member of a struct. */
1648
1649 tree fld = TREE_OPERAND (arg, 1);
1650
1651 tree optype = TREE_TYPE (fld);
1652
1653 /* Determine the "innermost" array type. */
1654 while (TREE_CODE (optype) == ARRAY_TYPE
1655 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1656 optype = TREE_TYPE (optype);
1657
1658 /* Fail when the array bound is unknown or zero. */
1659 val = TYPE_SIZE_UNIT (optype);
1660 if (!val
1661 || TREE_CODE (val) != INTEGER_CST
1662 || integer_zerop (val))
1663 return false;
1664 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1665 integer_one_node);
1666
1667 /* Set the minimum size to zero since the string in
1668 the array could have zero length. */
1669 pdata->minlen = ssize_int (0);
1670
1671 /* The array size determined above is an optimistic bound
1672 on the length. If the array isn't nul-terminated the
1673 length computed by the library function would be greater.
1674 Even though using strlen to cross the subobject boundary
1675 is undefined, avoid drawing conclusions from the member
1676 type about the length here. */
1677 tight_bound = true;
1678 }
1679 else if (TREE_CODE (arg) == MEM_REF
1680 && TREE_CODE (TREE_TYPE (arg)) == ARRAY_TYPE
1681 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == INTEGER_TYPE
1682 && TREE_CODE (TREE_OPERAND (arg, 0)) == ADDR_EXPR)
1683 {
1684 /* Handle a MEM_REF into a DECL accessing an array of integers,
1685 being conservative about references to extern structures with
1686 flexible array members that can be initialized to arbitrary
1687 numbers of elements as an extension (static structs are okay).
1688 FIXME: Make this less conservative -- see
1689 component_ref_size in tree.c. */
1690 tree ref = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1691 if ((TREE_CODE (ref) == PARM_DECL || VAR_P (ref))
1692 && (decl_binds_to_current_def_p (ref)
1693 || !array_at_struct_end_p (arg)))
1694 {
1695 /* Fail if the offset is out of bounds. Such accesses
1696 should be diagnosed at some point. */
1697 val = DECL_SIZE_UNIT (ref);
1698 if (!val
1699 || TREE_CODE (val) != INTEGER_CST
1700 || integer_zerop (val))
1701 return false;
1702
1703 poly_offset_int psiz = wi::to_offset (val);
1704 poly_offset_int poff = mem_ref_offset (arg);
1705 if (known_le (psiz, poff))
1706 return false;
1707
1708 pdata->minlen = ssize_int (0);
1709
1710 /* Subtract the offset and one for the terminating nul. */
1711 psiz -= poff;
1712 psiz -= 1;
1713 val = wide_int_to_tree (TREE_TYPE (val), psiz);
1714 /* Since VAL reflects the size of a declared object
1715 rather the type of the access it is not a tight bound. */
1716 }
1717 }
1718 else if (TREE_CODE (arg) == PARM_DECL || VAR_P (arg))
1719 {
1720 /* Avoid handling pointers to arrays. GCC might misuse
1721 a pointer to an array of one bound to point to an array
1722 object of a greater bound. */
1723 tree argtype = TREE_TYPE (arg);
1724 if (TREE_CODE (argtype) == ARRAY_TYPE)
1725 {
1726 val = TYPE_SIZE_UNIT (argtype);
1727 if (!val
1728 || TREE_CODE (val) != INTEGER_CST
1729 || integer_zerop (val))
1730 return false;
1731 val = wide_int_to_tree (TREE_TYPE (val),
1732 wi::sub (wi::to_wide (val), 1));
1733
1734 /* Set the minimum size to zero since the string in
1735 the array could have zero length. */
1736 pdata->minlen = ssize_int (0);
1737 }
1738 }
1739 maxbound = true;
1740 }
1741
1742 if (!val)
1743 return false;
1744
1745 /* Adjust the lower bound on the string length as necessary. */
1746 if (!pdata->minlen
1747 || (rkind != SRK_STRLEN
1748 && TREE_CODE (pdata->minlen) == INTEGER_CST
1749 && TREE_CODE (val) == INTEGER_CST
1750 && tree_int_cst_lt (val, pdata->minlen)))
1751 pdata->minlen = val;
1752
1753 if (pdata->maxbound && TREE_CODE (pdata->maxbound) == INTEGER_CST)
1754 {
1755 /* Adjust the tighter (more optimistic) string length bound
1756 if necessary and proceed to adjust the more conservative
1757 bound. */
1758 if (TREE_CODE (val) == INTEGER_CST)
1759 {
1760 if (tree_int_cst_lt (pdata->maxbound, val))
1761 pdata->maxbound = val;
1762 }
1763 else
1764 pdata->maxbound = val;
1765 }
1766 else if (pdata->maxbound || maxbound)
1767 /* Set PDATA->MAXBOUND only if it either isn't INTEGER_CST or
1768 if VAL corresponds to the maximum length determined based
1769 on the type of the object. */
1770 pdata->maxbound = val;
1771
1772 if (tight_bound)
1773 {
1774 /* VAL computed above represents an optimistically tight bound
1775 on the length of the string based on the referenced object's
1776 or subobject's type. Determine the conservative upper bound
1777 based on the enclosing object's size if possible. */
1778 if (rkind == SRK_LENRANGE)
1779 {
1780 poly_int64 offset;
1781 tree base = get_addr_base_and_unit_offset (arg, &offset);
1782 if (!base)
1783 {
1784 /* When the call above fails due to a non-constant offset
1785 assume the offset is zero and use the size of the whole
1786 enclosing object instead. */
1787 base = get_base_address (arg);
1788 offset = 0;
1789 }
1790 /* If the base object is a pointer no upper bound on the length
1791 can be determined. Otherwise the maximum length is equal to
1792 the size of the enclosing object minus the offset of
1793 the referenced subobject minus 1 (for the terminating nul). */
1794 tree type = TREE_TYPE (base);
1795 if (TREE_CODE (type) == POINTER_TYPE
1796 || (TREE_CODE (base) != PARM_DECL && !VAR_P (base))
1797 || !(val = DECL_SIZE_UNIT (base)))
1798 val = build_all_ones_cst (size_type_node);
1799 else
1800 {
1801 val = DECL_SIZE_UNIT (base);
1802 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1803 size_int (offset + 1));
1804 }
1805 }
1806 else
1807 return false;
1808 }
1809
1810 if (pdata->maxlen)
1811 {
1812 /* Adjust the more conservative bound if possible/necessary
1813 and fail otherwise. */
1814 if (rkind != SRK_STRLEN)
1815 {
1816 if (TREE_CODE (pdata->maxlen) != INTEGER_CST
1817 || TREE_CODE (val) != INTEGER_CST)
1818 return false;
1819
1820 if (tree_int_cst_lt (pdata->maxlen, val))
1821 pdata->maxlen = val;
1822 return true;
1823 }
1824 else if (simple_cst_equal (val, pdata->maxlen) != 1)
1825 {
1826 /* Fail if the length of this ARG is different from that
1827 previously determined from another ARG. */
1828 return false;
1829 }
1830 }
1831
1832 pdata->maxlen = val;
1833 return rkind == SRK_LENRANGE || !integer_all_onesp (val);
1834 }
1835
1836 /* For an ARG referencing one or more strings, try to obtain the range
1837 of their lengths, or the size of the largest array ARG referes to if
1838 the range of lengths cannot be determined, and store all in *PDATA.
1839 For an integer ARG (when RKIND == SRK_INT_VALUE), try to determine
1840 the maximum constant value.
1841 If ARG is an SSA_NAME, follow its use-def chains. When RKIND ==
1842 SRK_STRLEN, then if PDATA->MAXLEN is not equal to the determined
1843 length or if we are unable to determine the length, return false.
1844 VISITED is a bitmap of visited variables.
1845 RKIND determines the kind of value or range to obtain (see
1846 strlen_range_kind).
1847 Set PDATA->DECL if ARG refers to an unterminated constant array.
1848 On input, set ELTSIZE to 1 for normal single byte character strings,
1849 and either 2 or 4 for wide characer strings (the size of wchar_t).
1850 Return true if *PDATA was successfully populated and false otherwise. */
1851
1852 static bool
get_range_strlen(tree arg,bitmap visited,strlen_range_kind rkind,c_strlen_data * pdata,unsigned eltsize)1853 get_range_strlen (tree arg, bitmap visited,
1854 strlen_range_kind rkind,
1855 c_strlen_data *pdata, unsigned eltsize)
1856 {
1857
1858 if (TREE_CODE (arg) != SSA_NAME)
1859 return get_range_strlen_tree (arg, visited, rkind, pdata, eltsize);
1860
1861 /* If ARG is registered for SSA update we cannot look at its defining
1862 statement. */
1863 if (name_registered_for_update_p (arg))
1864 return false;
1865
1866 /* If we were already here, break the infinite cycle. */
1867 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
1868 return true;
1869
1870 tree var = arg;
1871 gimple *def_stmt = SSA_NAME_DEF_STMT (var);
1872
1873 switch (gimple_code (def_stmt))
1874 {
1875 case GIMPLE_ASSIGN:
1876 /* The RHS of the statement defining VAR must either have a
1877 constant length or come from another SSA_NAME with a constant
1878 length. */
1879 if (gimple_assign_single_p (def_stmt)
1880 || gimple_assign_unary_nop_p (def_stmt))
1881 {
1882 tree rhs = gimple_assign_rhs1 (def_stmt);
1883 return get_range_strlen (rhs, visited, rkind, pdata, eltsize);
1884 }
1885 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1886 {
1887 tree ops[2] = { gimple_assign_rhs2 (def_stmt),
1888 gimple_assign_rhs3 (def_stmt) };
1889
1890 for (unsigned int i = 0; i < 2; i++)
1891 if (!get_range_strlen (ops[i], visited, rkind, pdata, eltsize))
1892 {
1893 if (rkind != SRK_LENRANGE)
1894 return false;
1895 /* Set the upper bound to the maximum to prevent
1896 it from being adjusted in the next iteration but
1897 leave MINLEN and the more conservative MAXBOUND
1898 determined so far alone (or leave them null if
1899 they haven't been set yet). That the MINLEN is
1900 in fact zero can be determined from MAXLEN being
1901 unbounded but the discovered minimum is used for
1902 diagnostics. */
1903 pdata->maxlen = build_all_ones_cst (size_type_node);
1904 }
1905 return true;
1906 }
1907 return false;
1908
1909 case GIMPLE_PHI:
1910 /* Unless RKIND == SRK_LENRANGE, all arguments of the PHI node
1911 must have a constant length. */
1912 for (unsigned i = 0; i < gimple_phi_num_args (def_stmt); i++)
1913 {
1914 tree arg = gimple_phi_arg (def_stmt, i)->def;
1915
1916 /* If this PHI has itself as an argument, we cannot
1917 determine the string length of this argument. However,
1918 if we can find a constant string length for the other
1919 PHI args then we can still be sure that this is a
1920 constant string length. So be optimistic and just
1921 continue with the next argument. */
1922 if (arg == gimple_phi_result (def_stmt))
1923 continue;
1924
1925 if (!get_range_strlen (arg, visited, rkind, pdata, eltsize))
1926 {
1927 if (rkind != SRK_LENRANGE)
1928 return false;
1929 /* Set the upper bound to the maximum to prevent
1930 it from being adjusted in the next iteration but
1931 leave MINLEN and the more conservative MAXBOUND
1932 determined so far alone (or leave them null if
1933 they haven't been set yet). That the MINLEN is
1934 in fact zero can be determined from MAXLEN being
1935 unbounded but the discovered minimum is used for
1936 diagnostics. */
1937 pdata->maxlen = build_all_ones_cst (size_type_node);
1938 }
1939 }
1940 return true;
1941
1942 default:
1943 return false;
1944 }
1945 }
1946
1947 /* Try to obtain the range of the lengths of the string(s) referenced
1948 by ARG, or the size of the largest array ARG refers to if the range
1949 of lengths cannot be determined, and store all in *PDATA which must
1950 be zero-initialized on input except PDATA->MAXBOUND may be set to
1951 a non-null tree node other than INTEGER_CST to request to have it
1952 set to the length of the longest string in a PHI. ELTSIZE is
1953 the expected size of the string element in bytes: 1 for char and
1954 some power of 2 for wide characters.
1955 Return true if the range [PDATA->MINLEN, PDATA->MAXLEN] is suitable
1956 for optimization. Returning false means that a nonzero PDATA->MINLEN
1957 doesn't reflect the true lower bound of the range when PDATA->MAXLEN
1958 is -1 (in that case, the actual range is indeterminate, i.e.,
1959 [0, PTRDIFF_MAX - 2]. */
1960
1961 bool
get_range_strlen(tree arg,c_strlen_data * pdata,unsigned eltsize)1962 get_range_strlen (tree arg, c_strlen_data *pdata, unsigned eltsize)
1963 {
1964 auto_bitmap visited;
1965 tree maxbound = pdata->maxbound;
1966
1967 if (!get_range_strlen (arg, visited, SRK_LENRANGE, pdata, eltsize))
1968 {
1969 /* On failure extend the length range to an impossible maximum
1970 (a valid MAXLEN must be less than PTRDIFF_MAX - 1). Other
1971 members can stay unchanged regardless. */
1972 pdata->minlen = ssize_int (0);
1973 pdata->maxlen = build_all_ones_cst (size_type_node);
1974 }
1975 else if (!pdata->minlen)
1976 pdata->minlen = ssize_int (0);
1977
1978 /* If it's unchanged from it initial non-null value, set the conservative
1979 MAXBOUND to SIZE_MAX. Otherwise leave it null (if it is null). */
1980 if (maxbound && pdata->maxbound == maxbound)
1981 pdata->maxbound = build_all_ones_cst (size_type_node);
1982
1983 return !integer_all_onesp (pdata->maxlen);
1984 }
1985
1986 /* Return the maximum value for ARG given RKIND (see strlen_range_kind).
1987 For ARG of pointer types, NONSTR indicates if the caller is prepared
1988 to handle unterminated strings. For integer ARG and when RKIND ==
1989 SRK_INT_VALUE, NONSTR must be null.
1990
1991 If an unterminated array is discovered and our caller handles
1992 unterminated arrays, then bubble up the offending DECL and
1993 return the maximum size. Otherwise return NULL. */
1994
1995 static tree
1996 get_maxval_strlen (tree arg, strlen_range_kind rkind, tree *nonstr = NULL)
1997 {
1998 /* A non-null NONSTR is meaningless when determining the maximum
1999 value of an integer ARG. */
2000 gcc_assert (rkind != SRK_INT_VALUE || nonstr == NULL);
2001 /* ARG must have an integral type when RKIND says so. */
2002 gcc_assert (rkind != SRK_INT_VALUE || INTEGRAL_TYPE_P (TREE_TYPE (arg)));
2003
2004 auto_bitmap visited;
2005
2006 /* Reset DATA.MAXLEN if the call fails or when DATA.MAXLEN
2007 is unbounded. */
2008 c_strlen_data lendata = { };
2009 if (!get_range_strlen (arg, visited, rkind, &lendata, /* eltsize = */1))
2010 lendata.maxlen = NULL_TREE;
2011 else if (lendata.maxlen && integer_all_onesp (lendata.maxlen))
2012 lendata.maxlen = NULL_TREE;
2013
2014 if (nonstr)
2015 {
2016 /* For callers prepared to handle unterminated arrays set
2017 *NONSTR to point to the declaration of the array and return
2018 the maximum length/size. */
2019 *nonstr = lendata.decl;
2020 return lendata.maxlen;
2021 }
2022
2023 /* Fail if the constant array isn't nul-terminated. */
2024 return lendata.decl ? NULL_TREE : lendata.maxlen;
2025 }
2026
2027 /* Return true if LEN is known to be less than or equal to (or if STRICT is
2028 true, strictly less than) the lower bound of SIZE at compile time and false
2029 otherwise. */
2030
2031 static bool
2032 known_lower (gimple *stmt, tree len, tree size, bool strict = false)
2033 {
2034 if (len == NULL_TREE)
2035 return false;
2036
2037 wide_int size_range[2];
2038 wide_int len_range[2];
2039 if (get_range (len, stmt, len_range) && get_range (size, stmt, size_range))
2040 {
2041 if (strict)
2042 return wi::ltu_p (len_range[1], size_range[0]);
2043 else
2044 return wi::leu_p (len_range[1], size_range[0]);
2045 }
2046
2047 return false;
2048 }
2049
2050 /* Fold function call to builtin strcpy with arguments DEST and SRC.
2051 If LEN is not NULL, it represents the length of the string to be
2052 copied. Return NULL_TREE if no simplification can be made. */
2053
2054 static bool
gimple_fold_builtin_strcpy(gimple_stmt_iterator * gsi,tree dest,tree src)2055 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
2056 tree dest, tree src)
2057 {
2058 gimple *stmt = gsi_stmt (*gsi);
2059 location_t loc = gimple_location (stmt);
2060 tree fn;
2061
2062 /* If SRC and DEST are the same (and not volatile), return DEST. */
2063 if (operand_equal_p (src, dest, 0))
2064 {
2065 /* Issue -Wrestrict unless the pointers are null (those do
2066 not point to objects and so do not indicate an overlap;
2067 such calls could be the result of sanitization and jump
2068 threading). */
2069 if (!integer_zerop (dest) && !warning_suppressed_p (stmt, OPT_Wrestrict))
2070 {
2071 tree func = gimple_call_fndecl (stmt);
2072
2073 warning_at (loc, OPT_Wrestrict,
2074 "%qD source argument is the same as destination",
2075 func);
2076 }
2077
2078 replace_call_with_value (gsi, dest);
2079 return true;
2080 }
2081
2082 if (optimize_function_for_size_p (cfun))
2083 return false;
2084
2085 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2086 if (!fn)
2087 return false;
2088
2089 /* Set to non-null if ARG refers to an unterminated array. */
2090 tree nonstr = NULL;
2091 tree len = get_maxval_strlen (src, SRK_STRLEN, &nonstr);
2092
2093 if (nonstr)
2094 {
2095 /* Avoid folding calls with unterminated arrays. */
2096 if (!warning_suppressed_p (stmt, OPT_Wstringop_overread))
2097 warn_string_no_nul (loc, stmt, "strcpy", src, nonstr);
2098 suppress_warning (stmt, OPT_Wstringop_overread);
2099 return false;
2100 }
2101
2102 if (!len)
2103 return false;
2104
2105 len = fold_convert_loc (loc, size_type_node, len);
2106 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
2107 len = force_gimple_operand_gsi (gsi, len, true,
2108 NULL_TREE, true, GSI_SAME_STMT);
2109 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2110 replace_call_with_call_and_fold (gsi, repl);
2111 return true;
2112 }
2113
2114 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
2115 If SLEN is not NULL, it represents the length of the source string.
2116 Return NULL_TREE if no simplification can be made. */
2117
2118 static bool
gimple_fold_builtin_strncpy(gimple_stmt_iterator * gsi,tree dest,tree src,tree len)2119 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
2120 tree dest, tree src, tree len)
2121 {
2122 gimple *stmt = gsi_stmt (*gsi);
2123 location_t loc = gimple_location (stmt);
2124 bool nonstring = get_attr_nonstring_decl (dest) != NULL_TREE;
2125
2126 /* If the LEN parameter is zero, return DEST. */
2127 if (integer_zerop (len))
2128 {
2129 /* Avoid warning if the destination refers to an array/pointer
2130 decorate with attribute nonstring. */
2131 if (!nonstring)
2132 {
2133 tree fndecl = gimple_call_fndecl (stmt);
2134
2135 /* Warn about the lack of nul termination: the result is not
2136 a (nul-terminated) string. */
2137 tree slen = get_maxval_strlen (src, SRK_STRLEN);
2138 if (slen && !integer_zerop (slen))
2139 warning_at (loc, OPT_Wstringop_truncation,
2140 "%qD destination unchanged after copying no bytes "
2141 "from a string of length %E",
2142 fndecl, slen);
2143 else
2144 warning_at (loc, OPT_Wstringop_truncation,
2145 "%qD destination unchanged after copying no bytes",
2146 fndecl);
2147 }
2148
2149 replace_call_with_value (gsi, dest);
2150 return true;
2151 }
2152
2153 /* We can't compare slen with len as constants below if len is not a
2154 constant. */
2155 if (TREE_CODE (len) != INTEGER_CST)
2156 return false;
2157
2158 /* Now, we must be passed a constant src ptr parameter. */
2159 tree slen = get_maxval_strlen (src, SRK_STRLEN);
2160 if (!slen || TREE_CODE (slen) != INTEGER_CST)
2161 return false;
2162
2163 /* The size of the source string including the terminating nul. */
2164 tree ssize = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
2165
2166 /* We do not support simplification of this case, though we do
2167 support it when expanding trees into RTL. */
2168 /* FIXME: generate a call to __builtin_memset. */
2169 if (tree_int_cst_lt (ssize, len))
2170 return false;
2171
2172 /* Diagnose truncation that leaves the copy unterminated. */
2173 maybe_diag_stxncpy_trunc (*gsi, src, len);
2174
2175 /* OK transform into builtin memcpy. */
2176 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2177 if (!fn)
2178 return false;
2179
2180 len = fold_convert_loc (loc, size_type_node, len);
2181 len = force_gimple_operand_gsi (gsi, len, true,
2182 NULL_TREE, true, GSI_SAME_STMT);
2183 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2184 replace_call_with_call_and_fold (gsi, repl);
2185
2186 return true;
2187 }
2188
2189 /* Fold function call to builtin strchr or strrchr.
2190 If both arguments are constant, evaluate and fold the result,
2191 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
2192 In general strlen is significantly faster than strchr
2193 due to being a simpler operation. */
2194 static bool
gimple_fold_builtin_strchr(gimple_stmt_iterator * gsi,bool is_strrchr)2195 gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
2196 {
2197 gimple *stmt = gsi_stmt (*gsi);
2198 tree str = gimple_call_arg (stmt, 0);
2199 tree c = gimple_call_arg (stmt, 1);
2200 location_t loc = gimple_location (stmt);
2201 const char *p;
2202 char ch;
2203
2204 if (!gimple_call_lhs (stmt))
2205 return false;
2206
2207 /* Avoid folding if the first argument is not a nul-terminated array.
2208 Defer warning until later. */
2209 if (!check_nul_terminated_array (NULL_TREE, str))
2210 return false;
2211
2212 if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
2213 {
2214 const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
2215
2216 if (p1 == NULL)
2217 {
2218 replace_call_with_value (gsi, integer_zero_node);
2219 return true;
2220 }
2221
2222 tree len = build_int_cst (size_type_node, p1 - p);
2223 gimple_seq stmts = NULL;
2224 gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2225 POINTER_PLUS_EXPR, str, len);
2226 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2227 gsi_replace_with_seq_vops (gsi, stmts);
2228 return true;
2229 }
2230
2231 if (!integer_zerop (c))
2232 return false;
2233
2234 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
2235 if (is_strrchr && optimize_function_for_size_p (cfun))
2236 {
2237 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
2238
2239 if (strchr_fn)
2240 {
2241 gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
2242 replace_call_with_call_and_fold (gsi, repl);
2243 return true;
2244 }
2245
2246 return false;
2247 }
2248
2249 tree len;
2250 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
2251
2252 if (!strlen_fn)
2253 return false;
2254
2255 /* Create newstr = strlen (str). */
2256 gimple_seq stmts = NULL;
2257 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
2258 gimple_set_location (new_stmt, loc);
2259 len = create_tmp_reg_or_ssa_name (size_type_node);
2260 gimple_call_set_lhs (new_stmt, len);
2261 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2262
2263 /* Create (str p+ strlen (str)). */
2264 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2265 POINTER_PLUS_EXPR, str, len);
2266 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2267 gsi_replace_with_seq_vops (gsi, stmts);
2268 /* gsi now points at the assignment to the lhs, get a
2269 stmt iterator to the strlen.
2270 ??? We can't use gsi_for_stmt as that doesn't work when the
2271 CFG isn't built yet. */
2272 gimple_stmt_iterator gsi2 = *gsi;
2273 gsi_prev (&gsi2);
2274 fold_stmt (&gsi2);
2275 return true;
2276 }
2277
2278 /* Fold function call to builtin strstr.
2279 If both arguments are constant, evaluate and fold the result,
2280 additionally fold strstr (x, "") into x and strstr (x, "c")
2281 into strchr (x, 'c'). */
2282 static bool
gimple_fold_builtin_strstr(gimple_stmt_iterator * gsi)2283 gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
2284 {
2285 gimple *stmt = gsi_stmt (*gsi);
2286 if (!gimple_call_lhs (stmt))
2287 return false;
2288
2289 tree haystack = gimple_call_arg (stmt, 0);
2290 tree needle = gimple_call_arg (stmt, 1);
2291
2292 /* Avoid folding if either argument is not a nul-terminated array.
2293 Defer warning until later. */
2294 if (!check_nul_terminated_array (NULL_TREE, haystack)
2295 || !check_nul_terminated_array (NULL_TREE, needle))
2296 return false;
2297
2298 const char *q = c_getstr (needle);
2299 if (q == NULL)
2300 return false;
2301
2302 if (const char *p = c_getstr (haystack))
2303 {
2304 const char *r = strstr (p, q);
2305
2306 if (r == NULL)
2307 {
2308 replace_call_with_value (gsi, integer_zero_node);
2309 return true;
2310 }
2311
2312 tree len = build_int_cst (size_type_node, r - p);
2313 gimple_seq stmts = NULL;
2314 gimple *new_stmt
2315 = gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
2316 haystack, len);
2317 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2318 gsi_replace_with_seq_vops (gsi, stmts);
2319 return true;
2320 }
2321
2322 /* For strstr (x, "") return x. */
2323 if (q[0] == '\0')
2324 {
2325 replace_call_with_value (gsi, haystack);
2326 return true;
2327 }
2328
2329 /* Transform strstr (x, "c") into strchr (x, 'c'). */
2330 if (q[1] == '\0')
2331 {
2332 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
2333 if (strchr_fn)
2334 {
2335 tree c = build_int_cst (integer_type_node, q[0]);
2336 gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
2337 replace_call_with_call_and_fold (gsi, repl);
2338 return true;
2339 }
2340 }
2341
2342 return false;
2343 }
2344
2345 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
2346 to the call.
2347
2348 Return NULL_TREE if no simplification was possible, otherwise return the
2349 simplified form of the call as a tree.
2350
2351 The simplified form may be a constant or other expression which
2352 computes the same value, but in a more efficient manner (including
2353 calls to other builtin functions).
2354
2355 The call may contain arguments which need to be evaluated, but
2356 which are not useful to determine the result of the call. In
2357 this case we return a chain of COMPOUND_EXPRs. The LHS of each
2358 COMPOUND_EXPR will be an argument which must be evaluated.
2359 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
2360 COMPOUND_EXPR in the chain will contain the tree for the simplified
2361 form of the builtin function call. */
2362
2363 static bool
gimple_fold_builtin_strcat(gimple_stmt_iterator * gsi,tree dst,tree src)2364 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
2365 {
2366 gimple *stmt = gsi_stmt (*gsi);
2367 location_t loc = gimple_location (stmt);
2368
2369 const char *p = c_getstr (src);
2370
2371 /* If the string length is zero, return the dst parameter. */
2372 if (p && *p == '\0')
2373 {
2374 replace_call_with_value (gsi, dst);
2375 return true;
2376 }
2377
2378 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
2379 return false;
2380
2381 /* See if we can store by pieces into (dst + strlen(dst)). */
2382 tree newdst;
2383 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
2384 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2385
2386 if (!strlen_fn || !memcpy_fn)
2387 return false;
2388
2389 /* If the length of the source string isn't computable don't
2390 split strcat into strlen and memcpy. */
2391 tree len = get_maxval_strlen (src, SRK_STRLEN);
2392 if (! len)
2393 return false;
2394
2395 /* Create strlen (dst). */
2396 gimple_seq stmts = NULL, stmts2;
2397 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
2398 gimple_set_location (repl, loc);
2399 newdst = create_tmp_reg_or_ssa_name (size_type_node);
2400 gimple_call_set_lhs (repl, newdst);
2401 gimple_seq_add_stmt_without_update (&stmts, repl);
2402
2403 /* Create (dst p+ strlen (dst)). */
2404 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
2405 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
2406 gimple_seq_add_seq_without_update (&stmts, stmts2);
2407
2408 len = fold_convert_loc (loc, size_type_node, len);
2409 len = size_binop_loc (loc, PLUS_EXPR, len,
2410 build_int_cst (size_type_node, 1));
2411 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
2412 gimple_seq_add_seq_without_update (&stmts, stmts2);
2413
2414 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
2415 gimple_seq_add_stmt_without_update (&stmts, repl);
2416 if (gimple_call_lhs (stmt))
2417 {
2418 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
2419 gimple_seq_add_stmt_without_update (&stmts, repl);
2420 gsi_replace_with_seq_vops (gsi, stmts);
2421 /* gsi now points at the assignment to the lhs, get a
2422 stmt iterator to the memcpy call.
2423 ??? We can't use gsi_for_stmt as that doesn't work when the
2424 CFG isn't built yet. */
2425 gimple_stmt_iterator gsi2 = *gsi;
2426 gsi_prev (&gsi2);
2427 fold_stmt (&gsi2);
2428 }
2429 else
2430 {
2431 gsi_replace_with_seq_vops (gsi, stmts);
2432 fold_stmt (gsi);
2433 }
2434 return true;
2435 }
2436
2437 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
2438 are the arguments to the call. */
2439
2440 static bool
gimple_fold_builtin_strcat_chk(gimple_stmt_iterator * gsi)2441 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
2442 {
2443 gimple *stmt = gsi_stmt (*gsi);
2444 tree dest = gimple_call_arg (stmt, 0);
2445 tree src = gimple_call_arg (stmt, 1);
2446 tree size = gimple_call_arg (stmt, 2);
2447 tree fn;
2448 const char *p;
2449
2450
2451 p = c_getstr (src);
2452 /* If the SRC parameter is "", return DEST. */
2453 if (p && *p == '\0')
2454 {
2455 replace_call_with_value (gsi, dest);
2456 return true;
2457 }
2458
2459 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
2460 return false;
2461
2462 /* If __builtin_strcat_chk is used, assume strcat is available. */
2463 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
2464 if (!fn)
2465 return false;
2466
2467 gimple *repl = gimple_build_call (fn, 2, dest, src);
2468 replace_call_with_call_and_fold (gsi, repl);
2469 return true;
2470 }
2471
2472 /* Simplify a call to the strncat builtin. */
2473
2474 static bool
gimple_fold_builtin_strncat(gimple_stmt_iterator * gsi)2475 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
2476 {
2477 gimple *stmt = gsi_stmt (*gsi);
2478 tree dst = gimple_call_arg (stmt, 0);
2479 tree src = gimple_call_arg (stmt, 1);
2480 tree len = gimple_call_arg (stmt, 2);
2481 tree src_len = c_strlen (src, 1);
2482
2483 /* If the requested length is zero, or the src parameter string
2484 length is zero, return the dst parameter. */
2485 if (integer_zerop (len) || (src_len && integer_zerop (src_len)))
2486 {
2487 replace_call_with_value (gsi, dst);
2488 return true;
2489 }
2490
2491 /* Return early if the requested len is less than the string length.
2492 Warnings will be issued elsewhere later. */
2493 if (!src_len || known_lower (stmt, len, src_len, true))
2494 return false;
2495
2496 unsigned HOST_WIDE_INT dstsize;
2497 bool found_dstsize = compute_builtin_object_size (dst, 1, &dstsize);
2498
2499 /* Warn on constant LEN. */
2500 if (TREE_CODE (len) == INTEGER_CST)
2501 {
2502 bool nowarn = warning_suppressed_p (stmt, OPT_Wstringop_overflow_);
2503
2504 if (!nowarn && found_dstsize)
2505 {
2506 int cmpdst = compare_tree_int (len, dstsize);
2507
2508 if (cmpdst >= 0)
2509 {
2510 tree fndecl = gimple_call_fndecl (stmt);
2511
2512 /* Strncat copies (at most) LEN bytes and always appends
2513 the terminating NUL so the specified bound should never
2514 be equal to (or greater than) the size of the destination.
2515 If it is, the copy could overflow. */
2516 location_t loc = gimple_location (stmt);
2517 nowarn = warning_at (loc, OPT_Wstringop_overflow_,
2518 cmpdst == 0
2519 ? G_("%qD specified bound %E equals "
2520 "destination size")
2521 : G_("%qD specified bound %E exceeds "
2522 "destination size %wu"),
2523 fndecl, len, dstsize);
2524 if (nowarn)
2525 suppress_warning (stmt, OPT_Wstringop_overflow_);
2526 }
2527 }
2528
2529 if (!nowarn && TREE_CODE (src_len) == INTEGER_CST
2530 && tree_int_cst_compare (src_len, len) == 0)
2531 {
2532 tree fndecl = gimple_call_fndecl (stmt);
2533 location_t loc = gimple_location (stmt);
2534
2535 /* To avoid possible overflow the specified bound should also
2536 not be equal to the length of the source, even when the size
2537 of the destination is unknown (it's not an uncommon mistake
2538 to specify as the bound to strncpy the length of the source). */
2539 if (warning_at (loc, OPT_Wstringop_overflow_,
2540 "%qD specified bound %E equals source length",
2541 fndecl, len))
2542 suppress_warning (stmt, OPT_Wstringop_overflow_);
2543 }
2544 }
2545
2546 if (!known_lower (stmt, src_len, len))
2547 return false;
2548
2549 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
2550
2551 /* If the replacement _DECL isn't initialized, don't do the
2552 transformation. */
2553 if (!fn)
2554 return false;
2555
2556 /* Otherwise, emit a call to strcat. */
2557 gcall *repl = gimple_build_call (fn, 2, dst, src);
2558 replace_call_with_call_and_fold (gsi, repl);
2559 return true;
2560 }
2561
2562 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2563 LEN, and SIZE. */
2564
2565 static bool
gimple_fold_builtin_strncat_chk(gimple_stmt_iterator * gsi)2566 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
2567 {
2568 gimple *stmt = gsi_stmt (*gsi);
2569 tree dest = gimple_call_arg (stmt, 0);
2570 tree src = gimple_call_arg (stmt, 1);
2571 tree len = gimple_call_arg (stmt, 2);
2572 tree size = gimple_call_arg (stmt, 3);
2573 tree fn;
2574 const char *p;
2575
2576 p = c_getstr (src);
2577 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2578 if ((p && *p == '\0')
2579 || integer_zerop (len))
2580 {
2581 replace_call_with_value (gsi, dest);
2582 return true;
2583 }
2584
2585 if (! integer_all_onesp (size))
2586 {
2587 tree src_len = c_strlen (src, 1);
2588 if (known_lower (stmt, src_len, len))
2589 {
2590 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2591 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
2592 if (!fn)
2593 return false;
2594
2595 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2596 replace_call_with_call_and_fold (gsi, repl);
2597 return true;
2598 }
2599 return false;
2600 }
2601
2602 /* If __builtin_strncat_chk is used, assume strncat is available. */
2603 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
2604 if (!fn)
2605 return false;
2606
2607 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2608 replace_call_with_call_and_fold (gsi, repl);
2609 return true;
2610 }
2611
2612 /* Build and append gimple statements to STMTS that would load a first
2613 character of a memory location identified by STR. LOC is location
2614 of the statement. */
2615
2616 static tree
gimple_load_first_char(location_t loc,tree str,gimple_seq * stmts)2617 gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
2618 {
2619 tree var;
2620
2621 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2622 tree cst_uchar_ptr_node
2623 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
2624 tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
2625
2626 tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
2627 gassign *stmt = gimple_build_assign (NULL_TREE, temp);
2628 var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt);
2629
2630 gimple_assign_set_lhs (stmt, var);
2631 gimple_seq_add_stmt_without_update (stmts, stmt);
2632
2633 return var;
2634 }
2635
2636 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator. */
2637
2638 static bool
gimple_fold_builtin_string_compare(gimple_stmt_iterator * gsi)2639 gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
2640 {
2641 gimple *stmt = gsi_stmt (*gsi);
2642 tree callee = gimple_call_fndecl (stmt);
2643 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2644
2645 tree type = integer_type_node;
2646 tree str1 = gimple_call_arg (stmt, 0);
2647 tree str2 = gimple_call_arg (stmt, 1);
2648 tree lhs = gimple_call_lhs (stmt);
2649
2650 tree bound_node = NULL_TREE;
2651 unsigned HOST_WIDE_INT bound = HOST_WIDE_INT_M1U;
2652
2653 /* Handle strncmp and strncasecmp functions. */
2654 if (gimple_call_num_args (stmt) == 3)
2655 {
2656 bound_node = gimple_call_arg (stmt, 2);
2657 if (tree_fits_uhwi_p (bound_node))
2658 bound = tree_to_uhwi (bound_node);
2659 }
2660
2661 /* If the BOUND parameter is zero, return zero. */
2662 if (bound == 0)
2663 {
2664 replace_call_with_value (gsi, integer_zero_node);
2665 return true;
2666 }
2667
2668 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2669 if (operand_equal_p (str1, str2, 0))
2670 {
2671 replace_call_with_value (gsi, integer_zero_node);
2672 return true;
2673 }
2674
2675 /* Initially set to the number of characters, including the terminating
2676 nul if each array has one. LENx == strnlen (Sx, LENx) implies that
2677 the array Sx is not terminated by a nul.
2678 For nul-terminated strings then adjusted to their length so that
2679 LENx == NULPOSx holds. */
2680 unsigned HOST_WIDE_INT len1 = HOST_WIDE_INT_MAX, len2 = len1;
2681 const char *p1 = getbyterep (str1, &len1);
2682 const char *p2 = getbyterep (str2, &len2);
2683
2684 /* The position of the terminating nul character if one exists, otherwise
2685 a value greater than LENx. */
2686 unsigned HOST_WIDE_INT nulpos1 = HOST_WIDE_INT_MAX, nulpos2 = nulpos1;
2687
2688 if (p1)
2689 {
2690 size_t n = strnlen (p1, len1);
2691 if (n < len1)
2692 len1 = nulpos1 = n;
2693 }
2694
2695 if (p2)
2696 {
2697 size_t n = strnlen (p2, len2);
2698 if (n < len2)
2699 len2 = nulpos2 = n;
2700 }
2701
2702 /* For known strings, return an immediate value. */
2703 if (p1 && p2)
2704 {
2705 int r = 0;
2706 bool known_result = false;
2707
2708 switch (fcode)
2709 {
2710 case BUILT_IN_STRCMP:
2711 case BUILT_IN_STRCMP_EQ:
2712 if (len1 != nulpos1 || len2 != nulpos2)
2713 break;
2714
2715 r = strcmp (p1, p2);
2716 known_result = true;
2717 break;
2718
2719 case BUILT_IN_STRNCMP:
2720 case BUILT_IN_STRNCMP_EQ:
2721 {
2722 if (bound == HOST_WIDE_INT_M1U)
2723 break;
2724
2725 /* Reduce the bound to be no more than the length
2726 of the shorter of the two strings, or the sizes
2727 of the unterminated arrays. */
2728 unsigned HOST_WIDE_INT n = bound;
2729
2730 if (len1 == nulpos1 && len1 < n)
2731 n = len1 + 1;
2732 if (len2 == nulpos2 && len2 < n)
2733 n = len2 + 1;
2734
2735 if (MIN (nulpos1, nulpos2) + 1 < n)
2736 break;
2737
2738 r = strncmp (p1, p2, n);
2739 known_result = true;
2740 break;
2741 }
2742 /* Only handleable situation is where the string are equal (result 0),
2743 which is already handled by operand_equal_p case. */
2744 case BUILT_IN_STRCASECMP:
2745 break;
2746 case BUILT_IN_STRNCASECMP:
2747 {
2748 if (bound == HOST_WIDE_INT_M1U)
2749 break;
2750 r = strncmp (p1, p2, bound);
2751 if (r == 0)
2752 known_result = true;
2753 break;
2754 }
2755 default:
2756 gcc_unreachable ();
2757 }
2758
2759 if (known_result)
2760 {
2761 replace_call_with_value (gsi, build_cmp_result (type, r));
2762 return true;
2763 }
2764 }
2765
2766 bool nonzero_bound = (bound >= 1 && bound < HOST_WIDE_INT_M1U)
2767 || fcode == BUILT_IN_STRCMP
2768 || fcode == BUILT_IN_STRCMP_EQ
2769 || fcode == BUILT_IN_STRCASECMP;
2770
2771 location_t loc = gimple_location (stmt);
2772
2773 /* If the second arg is "", return *(const unsigned char*)arg1. */
2774 if (p2 && *p2 == '\0' && nonzero_bound)
2775 {
2776 gimple_seq stmts = NULL;
2777 tree var = gimple_load_first_char (loc, str1, &stmts);
2778 if (lhs)
2779 {
2780 stmt = gimple_build_assign (lhs, NOP_EXPR, var);
2781 gimple_seq_add_stmt_without_update (&stmts, stmt);
2782 }
2783
2784 gsi_replace_with_seq_vops (gsi, stmts);
2785 return true;
2786 }
2787
2788 /* If the first arg is "", return -*(const unsigned char*)arg2. */
2789 if (p1 && *p1 == '\0' && nonzero_bound)
2790 {
2791 gimple_seq stmts = NULL;
2792 tree var = gimple_load_first_char (loc, str2, &stmts);
2793
2794 if (lhs)
2795 {
2796 tree c = create_tmp_reg_or_ssa_name (integer_type_node);
2797 stmt = gimple_build_assign (c, NOP_EXPR, var);
2798 gimple_seq_add_stmt_without_update (&stmts, stmt);
2799
2800 stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2801 gimple_seq_add_stmt_without_update (&stmts, stmt);
2802 }
2803
2804 gsi_replace_with_seq_vops (gsi, stmts);
2805 return true;
2806 }
2807
2808 /* If BOUND is one, return an expression corresponding to
2809 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2810 if (fcode == BUILT_IN_STRNCMP && bound == 1)
2811 {
2812 gimple_seq stmts = NULL;
2813 tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2814 tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2815
2816 if (lhs)
2817 {
2818 tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
2819 gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2820 gimple_seq_add_stmt_without_update (&stmts, convert1);
2821
2822 tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
2823 gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2824 gimple_seq_add_stmt_without_update (&stmts, convert2);
2825
2826 stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2827 gimple_seq_add_stmt_without_update (&stmts, stmt);
2828 }
2829
2830 gsi_replace_with_seq_vops (gsi, stmts);
2831 return true;
2832 }
2833
2834 /* If BOUND is greater than the length of one constant string,
2835 and the other argument is also a nul-terminated string, replace
2836 strncmp with strcmp. */
2837 if (fcode == BUILT_IN_STRNCMP
2838 && bound > 0 && bound < HOST_WIDE_INT_M1U
2839 && ((p2 && len2 < bound && len2 == nulpos2)
2840 || (p1 && len1 < bound && len1 == nulpos1)))
2841 {
2842 tree fn = builtin_decl_implicit (BUILT_IN_STRCMP);
2843 if (!fn)
2844 return false;
2845 gimple *repl = gimple_build_call (fn, 2, str1, str2);
2846 replace_call_with_call_and_fold (gsi, repl);
2847 return true;
2848 }
2849
2850 return false;
2851 }
2852
2853 /* Fold a call to the memchr pointed by GSI iterator. */
2854
2855 static bool
gimple_fold_builtin_memchr(gimple_stmt_iterator * gsi)2856 gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2857 {
2858 gimple *stmt = gsi_stmt (*gsi);
2859 tree lhs = gimple_call_lhs (stmt);
2860 tree arg1 = gimple_call_arg (stmt, 0);
2861 tree arg2 = gimple_call_arg (stmt, 1);
2862 tree len = gimple_call_arg (stmt, 2);
2863
2864 /* If the LEN parameter is zero, return zero. */
2865 if (integer_zerop (len))
2866 {
2867 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2868 return true;
2869 }
2870
2871 char c;
2872 if (TREE_CODE (arg2) != INTEGER_CST
2873 || !tree_fits_uhwi_p (len)
2874 || !target_char_cst_p (arg2, &c))
2875 return false;
2876
2877 unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2878 unsigned HOST_WIDE_INT string_length;
2879 const char *p1 = getbyterep (arg1, &string_length);
2880
2881 if (p1)
2882 {
2883 const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2884 if (r == NULL)
2885 {
2886 tree mem_size, offset_node;
2887 byte_representation (arg1, &offset_node, &mem_size, NULL);
2888 unsigned HOST_WIDE_INT offset = (offset_node == NULL_TREE)
2889 ? 0 : tree_to_uhwi (offset_node);
2890 /* MEM_SIZE is the size of the array the string literal
2891 is stored in. */
2892 unsigned HOST_WIDE_INT string_size = tree_to_uhwi (mem_size) - offset;
2893 gcc_checking_assert (string_length <= string_size);
2894 if (length <= string_size)
2895 {
2896 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2897 return true;
2898 }
2899 }
2900 else
2901 {
2902 unsigned HOST_WIDE_INT offset = r - p1;
2903 gimple_seq stmts = NULL;
2904 if (lhs != NULL_TREE)
2905 {
2906 tree offset_cst = build_int_cst (sizetype, offset);
2907 gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2908 arg1, offset_cst);
2909 gimple_seq_add_stmt_without_update (&stmts, stmt);
2910 }
2911 else
2912 gimple_seq_add_stmt_without_update (&stmts,
2913 gimple_build_nop ());
2914
2915 gsi_replace_with_seq_vops (gsi, stmts);
2916 return true;
2917 }
2918 }
2919
2920 return false;
2921 }
2922
2923 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2924 to the call. IGNORE is true if the value returned
2925 by the builtin will be ignored. UNLOCKED is true is true if this
2926 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2927 the known length of the string. Return NULL_TREE if no simplification
2928 was possible. */
2929
2930 static bool
gimple_fold_builtin_fputs(gimple_stmt_iterator * gsi,tree arg0,tree arg1,bool unlocked)2931 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
2932 tree arg0, tree arg1,
2933 bool unlocked)
2934 {
2935 gimple *stmt = gsi_stmt (*gsi);
2936
2937 /* If we're using an unlocked function, assume the other unlocked
2938 functions exist explicitly. */
2939 tree const fn_fputc = (unlocked
2940 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2941 : builtin_decl_implicit (BUILT_IN_FPUTC));
2942 tree const fn_fwrite = (unlocked
2943 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2944 : builtin_decl_implicit (BUILT_IN_FWRITE));
2945
2946 /* If the return value is used, don't do the transformation. */
2947 if (gimple_call_lhs (stmt))
2948 return false;
2949
2950 /* Get the length of the string passed to fputs. If the length
2951 can't be determined, punt. */
2952 tree len = get_maxval_strlen (arg0, SRK_STRLEN);
2953 if (!len
2954 || TREE_CODE (len) != INTEGER_CST)
2955 return false;
2956
2957 switch (compare_tree_int (len, 1))
2958 {
2959 case -1: /* length is 0, delete the call entirely . */
2960 replace_call_with_value (gsi, integer_zero_node);
2961 return true;
2962
2963 case 0: /* length is 1, call fputc. */
2964 {
2965 const char *p = c_getstr (arg0);
2966 if (p != NULL)
2967 {
2968 if (!fn_fputc)
2969 return false;
2970
2971 gimple *repl = gimple_build_call (fn_fputc, 2,
2972 build_int_cst
2973 (integer_type_node, p[0]), arg1);
2974 replace_call_with_call_and_fold (gsi, repl);
2975 return true;
2976 }
2977 }
2978 /* FALLTHROUGH */
2979 case 1: /* length is greater than 1, call fwrite. */
2980 {
2981 /* If optimizing for size keep fputs. */
2982 if (optimize_function_for_size_p (cfun))
2983 return false;
2984 /* New argument list transforming fputs(string, stream) to
2985 fwrite(string, 1, len, stream). */
2986 if (!fn_fwrite)
2987 return false;
2988
2989 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
2990 size_one_node, len, arg1);
2991 replace_call_with_call_and_fold (gsi, repl);
2992 return true;
2993 }
2994 default:
2995 gcc_unreachable ();
2996 }
2997 }
2998
2999 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
3000 DEST, SRC, LEN, and SIZE are the arguments to the call.
3001 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
3002 code of the builtin. If MAXLEN is not NULL, it is maximum length
3003 passed as third argument. */
3004
3005 static bool
gimple_fold_builtin_memory_chk(gimple_stmt_iterator * gsi,tree dest,tree src,tree len,tree size,enum built_in_function fcode)3006 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
3007 tree dest, tree src, tree len, tree size,
3008 enum built_in_function fcode)
3009 {
3010 gimple *stmt = gsi_stmt (*gsi);
3011 location_t loc = gimple_location (stmt);
3012 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
3013 tree fn;
3014
3015 /* If SRC and DEST are the same (and not volatile), return DEST
3016 (resp. DEST+LEN for __mempcpy_chk). */
3017 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
3018 {
3019 if (fcode != BUILT_IN_MEMPCPY_CHK)
3020 {
3021 replace_call_with_value (gsi, dest);
3022 return true;
3023 }
3024 else
3025 {
3026 gimple_seq stmts = NULL;
3027 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
3028 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
3029 TREE_TYPE (dest), dest, len);
3030 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3031 replace_call_with_value (gsi, temp);
3032 return true;
3033 }
3034 }
3035
3036 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
3037 if (! integer_all_onesp (size)
3038 && !known_lower (stmt, len, size)
3039 && !known_lower (stmt, maxlen, size))
3040 {
3041 /* MAXLEN and LEN both cannot be proved to be less than SIZE, at
3042 least try to optimize (void) __mempcpy_chk () into
3043 (void) __memcpy_chk () */
3044 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
3045 {
3046 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
3047 if (!fn)
3048 return false;
3049
3050 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
3051 replace_call_with_call_and_fold (gsi, repl);
3052 return true;
3053 }
3054 return false;
3055 }
3056
3057 fn = NULL_TREE;
3058 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
3059 mem{cpy,pcpy,move,set} is available. */
3060 switch (fcode)
3061 {
3062 case BUILT_IN_MEMCPY_CHK:
3063 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
3064 break;
3065 case BUILT_IN_MEMPCPY_CHK:
3066 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
3067 break;
3068 case BUILT_IN_MEMMOVE_CHK:
3069 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
3070 break;
3071 case BUILT_IN_MEMSET_CHK:
3072 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
3073 break;
3074 default:
3075 break;
3076 }
3077
3078 if (!fn)
3079 return false;
3080
3081 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
3082 replace_call_with_call_and_fold (gsi, repl);
3083 return true;
3084 }
3085
3086 /* Print a message in the dump file recording transformation of FROM to TO. */
3087
3088 static void
dump_transformation(gcall * from,gcall * to)3089 dump_transformation (gcall *from, gcall *to)
3090 {
3091 if (dump_enabled_p ())
3092 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, from, "simplified %T to %T\n",
3093 gimple_call_fn (from), gimple_call_fn (to));
3094 }
3095
3096 /* Fold a call to the __st[rp]cpy_chk builtin.
3097 DEST, SRC, and SIZE are the arguments to the call.
3098 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
3099 code of the builtin. If MAXLEN is not NULL, it is maximum length of
3100 strings passed as second argument. */
3101
3102 static bool
gimple_fold_builtin_stxcpy_chk(gimple_stmt_iterator * gsi,tree dest,tree src,tree size,enum built_in_function fcode)3103 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
3104 tree dest,
3105 tree src, tree size,
3106 enum built_in_function fcode)
3107 {
3108 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3109 location_t loc = gimple_location (stmt);
3110 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
3111 tree len, fn;
3112
3113 /* If SRC and DEST are the same (and not volatile), return DEST. */
3114 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
3115 {
3116 /* Issue -Wrestrict unless the pointers are null (those do
3117 not point to objects and so do not indicate an overlap;
3118 such calls could be the result of sanitization and jump
3119 threading). */
3120 if (!integer_zerop (dest)
3121 && !warning_suppressed_p (stmt, OPT_Wrestrict))
3122 {
3123 tree func = gimple_call_fndecl (stmt);
3124
3125 warning_at (loc, OPT_Wrestrict,
3126 "%qD source argument is the same as destination",
3127 func);
3128 }
3129
3130 replace_call_with_value (gsi, dest);
3131 return true;
3132 }
3133
3134 tree maxlen = get_maxval_strlen (src, SRK_STRLENMAX);
3135 if (! integer_all_onesp (size))
3136 {
3137 len = c_strlen (src, 1);
3138 if (!known_lower (stmt, len, size, true)
3139 && !known_lower (stmt, maxlen, size, true))
3140 {
3141 if (fcode == BUILT_IN_STPCPY_CHK)
3142 {
3143 if (! ignore)
3144 return false;
3145
3146 /* If return value of __stpcpy_chk is ignored,
3147 optimize into __strcpy_chk. */
3148 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
3149 if (!fn)
3150 return false;
3151
3152 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
3153 replace_call_with_call_and_fold (gsi, repl);
3154 return true;
3155 }
3156
3157 if (! len || TREE_SIDE_EFFECTS (len))
3158 return false;
3159
3160 /* If c_strlen returned something, but not provably less than size,
3161 transform __strcpy_chk into __memcpy_chk. */
3162 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
3163 if (!fn)
3164 return false;
3165
3166 gimple_seq stmts = NULL;
3167 len = force_gimple_operand (len, &stmts, true, NULL_TREE);
3168 len = gimple_convert (&stmts, loc, size_type_node, len);
3169 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
3170 build_int_cst (size_type_node, 1));
3171 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3172 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
3173 replace_call_with_call_and_fold (gsi, repl);
3174 return true;
3175 }
3176 }
3177
3178 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
3179 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK && !ignore
3180 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
3181 if (!fn)
3182 return false;
3183
3184 gcall *repl = gimple_build_call (fn, 2, dest, src);
3185 dump_transformation (stmt, repl);
3186 replace_call_with_call_and_fold (gsi, repl);
3187 return true;
3188 }
3189
3190 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
3191 are the arguments to the call. If MAXLEN is not NULL, it is maximum
3192 length passed as third argument. IGNORE is true if return value can be
3193 ignored. FCODE is the BUILT_IN_* code of the builtin. */
3194
3195 static bool
gimple_fold_builtin_stxncpy_chk(gimple_stmt_iterator * gsi,tree dest,tree src,tree len,tree size,enum built_in_function fcode)3196 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
3197 tree dest, tree src,
3198 tree len, tree size,
3199 enum built_in_function fcode)
3200 {
3201 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3202 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
3203 tree fn;
3204
3205 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
3206 if (! integer_all_onesp (size)
3207 && !known_lower (stmt, len, size) && !known_lower (stmt, maxlen, size))
3208 {
3209 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
3210 {
3211 /* If return value of __stpncpy_chk is ignored,
3212 optimize into __strncpy_chk. */
3213 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
3214 if (fn)
3215 {
3216 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
3217 replace_call_with_call_and_fold (gsi, repl);
3218 return true;
3219 }
3220 }
3221 return false;
3222 }
3223
3224 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
3225 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK && !ignore
3226 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
3227 if (!fn)
3228 return false;
3229
3230 gcall *repl = gimple_build_call (fn, 3, dest, src, len);
3231 dump_transformation (stmt, repl);
3232 replace_call_with_call_and_fold (gsi, repl);
3233 return true;
3234 }
3235
3236 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
3237 Return NULL_TREE if no simplification can be made. */
3238
3239 static bool
gimple_fold_builtin_stpcpy(gimple_stmt_iterator * gsi)3240 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
3241 {
3242 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3243 location_t loc = gimple_location (stmt);
3244 tree dest = gimple_call_arg (stmt, 0);
3245 tree src = gimple_call_arg (stmt, 1);
3246 tree fn, lenp1;
3247
3248 /* If the result is unused, replace stpcpy with strcpy. */
3249 if (gimple_call_lhs (stmt) == NULL_TREE)
3250 {
3251 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3252 if (!fn)
3253 return false;
3254 gimple_call_set_fndecl (stmt, fn);
3255 fold_stmt (gsi);
3256 return true;
3257 }
3258
3259 /* Set to non-null if ARG refers to an unterminated array. */
3260 c_strlen_data data = { };
3261 /* The size of the unterminated array if SRC referes to one. */
3262 tree size;
3263 /* True if the size is exact/constant, false if it's the lower bound
3264 of a range. */
3265 bool exact;
3266 tree len = c_strlen (src, 1, &data, 1);
3267 if (!len
3268 || TREE_CODE (len) != INTEGER_CST)
3269 {
3270 data.decl = unterminated_array (src, &size, &exact);
3271 if (!data.decl)
3272 return false;
3273 }
3274
3275 if (data.decl)
3276 {
3277 /* Avoid folding calls with unterminated arrays. */
3278 if (!warning_suppressed_p (stmt, OPT_Wstringop_overread))
3279 warn_string_no_nul (loc, stmt, "stpcpy", src, data.decl, size,
3280 exact);
3281 suppress_warning (stmt, OPT_Wstringop_overread);
3282 return false;
3283 }
3284
3285 if (optimize_function_for_size_p (cfun)
3286 /* If length is zero it's small enough. */
3287 && !integer_zerop (len))
3288 return false;
3289
3290 /* If the source has a known length replace stpcpy with memcpy. */
3291 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3292 if (!fn)
3293 return false;
3294
3295 gimple_seq stmts = NULL;
3296 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
3297 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
3298 tem, build_int_cst (size_type_node, 1));
3299 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3300 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
3301 gimple_move_vops (repl, stmt);
3302 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
3303 /* Replace the result with dest + len. */
3304 stmts = NULL;
3305 tem = gimple_convert (&stmts, loc, sizetype, len);
3306 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3307 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
3308 POINTER_PLUS_EXPR, dest, tem);
3309 gsi_replace (gsi, ret, false);
3310 /* Finally fold the memcpy call. */
3311 gimple_stmt_iterator gsi2 = *gsi;
3312 gsi_prev (&gsi2);
3313 fold_stmt (&gsi2);
3314 return true;
3315 }
3316
3317 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
3318 NULL_TREE if a normal call should be emitted rather than expanding
3319 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
3320 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
3321 passed as second argument. */
3322
3323 static bool
gimple_fold_builtin_snprintf_chk(gimple_stmt_iterator * gsi,enum built_in_function fcode)3324 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
3325 enum built_in_function fcode)
3326 {
3327 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3328 tree dest, size, len, fn, fmt, flag;
3329 const char *fmt_str;
3330
3331 /* Verify the required arguments in the original call. */
3332 if (gimple_call_num_args (stmt) < 5)
3333 return false;
3334
3335 dest = gimple_call_arg (stmt, 0);
3336 len = gimple_call_arg (stmt, 1);
3337 flag = gimple_call_arg (stmt, 2);
3338 size = gimple_call_arg (stmt, 3);
3339 fmt = gimple_call_arg (stmt, 4);
3340
3341 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
3342 if (! integer_all_onesp (size)
3343 && !known_lower (stmt, len, size) && !known_lower (stmt, maxlen, size))
3344 return false;
3345
3346 if (!init_target_chars ())
3347 return false;
3348
3349 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
3350 or if format doesn't contain % chars or is "%s". */
3351 if (! integer_zerop (flag))
3352 {
3353 fmt_str = c_getstr (fmt);
3354 if (fmt_str == NULL)
3355 return false;
3356 if (strchr (fmt_str, target_percent) != NULL
3357 && strcmp (fmt_str, target_percent_s))
3358 return false;
3359 }
3360
3361 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
3362 available. */
3363 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
3364 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
3365 if (!fn)
3366 return false;
3367
3368 /* Replace the called function and the first 5 argument by 3 retaining
3369 trailing varargs. */
3370 gimple_call_set_fndecl (stmt, fn);
3371 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3372 gimple_call_set_arg (stmt, 0, dest);
3373 gimple_call_set_arg (stmt, 1, len);
3374 gimple_call_set_arg (stmt, 2, fmt);
3375 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
3376 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3377 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3378 fold_stmt (gsi);
3379 return true;
3380 }
3381
3382 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
3383 Return NULL_TREE if a normal call should be emitted rather than
3384 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
3385 or BUILT_IN_VSPRINTF_CHK. */
3386
3387 static bool
gimple_fold_builtin_sprintf_chk(gimple_stmt_iterator * gsi,enum built_in_function fcode)3388 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
3389 enum built_in_function fcode)
3390 {
3391 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3392 tree dest, size, len, fn, fmt, flag;
3393 const char *fmt_str;
3394 unsigned nargs = gimple_call_num_args (stmt);
3395
3396 /* Verify the required arguments in the original call. */
3397 if (nargs < 4)
3398 return false;
3399 dest = gimple_call_arg (stmt, 0);
3400 flag = gimple_call_arg (stmt, 1);
3401 size = gimple_call_arg (stmt, 2);
3402 fmt = gimple_call_arg (stmt, 3);
3403
3404 len = NULL_TREE;
3405
3406 if (!init_target_chars ())
3407 return false;
3408
3409 /* Check whether the format is a literal string constant. */
3410 fmt_str = c_getstr (fmt);
3411 if (fmt_str != NULL)
3412 {
3413 /* If the format doesn't contain % args or %%, we know the size. */
3414 if (strchr (fmt_str, target_percent) == 0)
3415 {
3416 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
3417 len = build_int_cstu (size_type_node, strlen (fmt_str));
3418 }
3419 /* If the format is "%s" and first ... argument is a string literal,
3420 we know the size too. */
3421 else if (fcode == BUILT_IN_SPRINTF_CHK
3422 && strcmp (fmt_str, target_percent_s) == 0)
3423 {
3424 tree arg;
3425
3426 if (nargs == 5)
3427 {
3428 arg = gimple_call_arg (stmt, 4);
3429 if (POINTER_TYPE_P (TREE_TYPE (arg)))
3430 len = c_strlen (arg, 1);
3431 }
3432 }
3433 }
3434
3435 if (! integer_all_onesp (size) && !known_lower (stmt, len, size, true))
3436 return false;
3437
3438 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
3439 or if format doesn't contain % chars or is "%s". */
3440 if (! integer_zerop (flag))
3441 {
3442 if (fmt_str == NULL)
3443 return false;
3444 if (strchr (fmt_str, target_percent) != NULL
3445 && strcmp (fmt_str, target_percent_s))
3446 return false;
3447 }
3448
3449 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
3450 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
3451 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
3452 if (!fn)
3453 return false;
3454
3455 /* Replace the called function and the first 4 argument by 2 retaining
3456 trailing varargs. */
3457 gimple_call_set_fndecl (stmt, fn);
3458 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3459 gimple_call_set_arg (stmt, 0, dest);
3460 gimple_call_set_arg (stmt, 1, fmt);
3461 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
3462 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3463 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3464 fold_stmt (gsi);
3465 return true;
3466 }
3467
3468 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
3469 ORIG may be null if this is a 2-argument call. We don't attempt to
3470 simplify calls with more than 3 arguments.
3471
3472 Return true if simplification was possible, otherwise false. */
3473
3474 bool
gimple_fold_builtin_sprintf(gimple_stmt_iterator * gsi)3475 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
3476 {
3477 gimple *stmt = gsi_stmt (*gsi);
3478
3479 /* Verify the required arguments in the original call. We deal with two
3480 types of sprintf() calls: 'sprintf (str, fmt)' and
3481 'sprintf (dest, "%s", orig)'. */
3482 if (gimple_call_num_args (stmt) > 3)
3483 return false;
3484
3485 tree orig = NULL_TREE;
3486 if (gimple_call_num_args (stmt) == 3)
3487 orig = gimple_call_arg (stmt, 2);
3488
3489 /* Check whether the format is a literal string constant. */
3490 tree fmt = gimple_call_arg (stmt, 1);
3491 const char *fmt_str = c_getstr (fmt);
3492 if (fmt_str == NULL)
3493 return false;
3494
3495 tree dest = gimple_call_arg (stmt, 0);
3496
3497 if (!init_target_chars ())
3498 return false;
3499
3500 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3501 if (!fn)
3502 return false;
3503
3504 /* If the format doesn't contain % args or %%, use strcpy. */
3505 if (strchr (fmt_str, target_percent) == NULL)
3506 {
3507 /* Don't optimize sprintf (buf, "abc", ptr++). */
3508 if (orig)
3509 return false;
3510
3511 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
3512 'format' is known to contain no % formats. */
3513 gimple_seq stmts = NULL;
3514 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3515
3516 /* Propagate the NO_WARNING bit to avoid issuing the same
3517 warning more than once. */
3518 copy_warning (repl, stmt);
3519
3520 gimple_seq_add_stmt_without_update (&stmts, repl);
3521 if (tree lhs = gimple_call_lhs (stmt))
3522 {
3523 repl = gimple_build_assign (lhs, build_int_cst (TREE_TYPE (lhs),
3524 strlen (fmt_str)));
3525 gimple_seq_add_stmt_without_update (&stmts, repl);
3526 gsi_replace_with_seq_vops (gsi, stmts);
3527 /* gsi now points at the assignment to the lhs, get a
3528 stmt iterator to the memcpy call.
3529 ??? We can't use gsi_for_stmt as that doesn't work when the
3530 CFG isn't built yet. */
3531 gimple_stmt_iterator gsi2 = *gsi;
3532 gsi_prev (&gsi2);
3533 fold_stmt (&gsi2);
3534 }
3535 else
3536 {
3537 gsi_replace_with_seq_vops (gsi, stmts);
3538 fold_stmt (gsi);
3539 }
3540 return true;
3541 }
3542
3543 /* If the format is "%s", use strcpy if the result isn't used. */
3544 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3545 {
3546 /* Don't crash on sprintf (str1, "%s"). */
3547 if (!orig)
3548 return false;
3549
3550 /* Don't fold calls with source arguments of invalid (nonpointer)
3551 types. */
3552 if (!POINTER_TYPE_P (TREE_TYPE (orig)))
3553 return false;
3554
3555 tree orig_len = NULL_TREE;
3556 if (gimple_call_lhs (stmt))
3557 {
3558 orig_len = get_maxval_strlen (orig, SRK_STRLEN);
3559 if (!orig_len)
3560 return false;
3561 }
3562
3563 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
3564 gimple_seq stmts = NULL;
3565 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3566
3567 /* Propagate the NO_WARNING bit to avoid issuing the same
3568 warning more than once. */
3569 copy_warning (repl, stmt);
3570
3571 gimple_seq_add_stmt_without_update (&stmts, repl);
3572 if (tree lhs = gimple_call_lhs (stmt))
3573 {
3574 if (!useless_type_conversion_p (TREE_TYPE (lhs),
3575 TREE_TYPE (orig_len)))
3576 orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3577 repl = gimple_build_assign (lhs, orig_len);
3578 gimple_seq_add_stmt_without_update (&stmts, repl);
3579 gsi_replace_with_seq_vops (gsi, stmts);
3580 /* gsi now points at the assignment to the lhs, get a
3581 stmt iterator to the memcpy call.
3582 ??? We can't use gsi_for_stmt as that doesn't work when the
3583 CFG isn't built yet. */
3584 gimple_stmt_iterator gsi2 = *gsi;
3585 gsi_prev (&gsi2);
3586 fold_stmt (&gsi2);
3587 }
3588 else
3589 {
3590 gsi_replace_with_seq_vops (gsi, stmts);
3591 fold_stmt (gsi);
3592 }
3593 return true;
3594 }
3595 return false;
3596 }
3597
3598 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3599 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3600 attempt to simplify calls with more than 4 arguments.
3601
3602 Return true if simplification was possible, otherwise false. */
3603
3604 bool
gimple_fold_builtin_snprintf(gimple_stmt_iterator * gsi)3605 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
3606 {
3607 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3608 tree dest = gimple_call_arg (stmt, 0);
3609 tree destsize = gimple_call_arg (stmt, 1);
3610 tree fmt = gimple_call_arg (stmt, 2);
3611 tree orig = NULL_TREE;
3612 const char *fmt_str = NULL;
3613
3614 if (gimple_call_num_args (stmt) > 4)
3615 return false;
3616
3617 if (gimple_call_num_args (stmt) == 4)
3618 orig = gimple_call_arg (stmt, 3);
3619
3620 /* Check whether the format is a literal string constant. */
3621 fmt_str = c_getstr (fmt);
3622 if (fmt_str == NULL)
3623 return false;
3624
3625 if (!init_target_chars ())
3626 return false;
3627
3628 /* If the format doesn't contain % args or %%, use strcpy. */
3629 if (strchr (fmt_str, target_percent) == NULL)
3630 {
3631 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3632 if (!fn)
3633 return false;
3634
3635 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3636 if (orig)
3637 return false;
3638
3639 tree len = build_int_cstu (TREE_TYPE (destsize), strlen (fmt_str));
3640
3641 /* We could expand this as
3642 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3643 or to
3644 memcpy (str, fmt_with_nul_at_cstm1, cst);
3645 but in the former case that might increase code size
3646 and in the latter case grow .rodata section too much.
3647 So punt for now. */
3648 if (!known_lower (stmt, len, destsize, true))
3649 return false;
3650
3651 gimple_seq stmts = NULL;
3652 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3653 gimple_seq_add_stmt_without_update (&stmts, repl);
3654 if (tree lhs = gimple_call_lhs (stmt))
3655 {
3656 repl = gimple_build_assign (lhs,
3657 fold_convert (TREE_TYPE (lhs), len));
3658 gimple_seq_add_stmt_without_update (&stmts, repl);
3659 gsi_replace_with_seq_vops (gsi, stmts);
3660 /* gsi now points at the assignment to the lhs, get a
3661 stmt iterator to the memcpy call.
3662 ??? We can't use gsi_for_stmt as that doesn't work when the
3663 CFG isn't built yet. */
3664 gimple_stmt_iterator gsi2 = *gsi;
3665 gsi_prev (&gsi2);
3666 fold_stmt (&gsi2);
3667 }
3668 else
3669 {
3670 gsi_replace_with_seq_vops (gsi, stmts);
3671 fold_stmt (gsi);
3672 }
3673 return true;
3674 }
3675
3676 /* If the format is "%s", use strcpy if the result isn't used. */
3677 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3678 {
3679 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3680 if (!fn)
3681 return false;
3682
3683 /* Don't crash on snprintf (str1, cst, "%s"). */
3684 if (!orig)
3685 return false;
3686
3687 tree orig_len = get_maxval_strlen (orig, SRK_STRLEN);
3688
3689 /* We could expand this as
3690 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3691 or to
3692 memcpy (str1, str2_with_nul_at_cstm1, cst);
3693 but in the former case that might increase code size
3694 and in the latter case grow .rodata section too much.
3695 So punt for now. */
3696 if (!known_lower (stmt, orig_len, destsize, true))
3697 return false;
3698
3699 /* Convert snprintf (str1, cst, "%s", str2) into
3700 strcpy (str1, str2) if strlen (str2) < cst. */
3701 gimple_seq stmts = NULL;
3702 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3703 gimple_seq_add_stmt_without_update (&stmts, repl);
3704 if (tree lhs = gimple_call_lhs (stmt))
3705 {
3706 if (!useless_type_conversion_p (TREE_TYPE (lhs),
3707 TREE_TYPE (orig_len)))
3708 orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3709 repl = gimple_build_assign (lhs, orig_len);
3710 gimple_seq_add_stmt_without_update (&stmts, repl);
3711 gsi_replace_with_seq_vops (gsi, stmts);
3712 /* gsi now points at the assignment to the lhs, get a
3713 stmt iterator to the memcpy call.
3714 ??? We can't use gsi_for_stmt as that doesn't work when the
3715 CFG isn't built yet. */
3716 gimple_stmt_iterator gsi2 = *gsi;
3717 gsi_prev (&gsi2);
3718 fold_stmt (&gsi2);
3719 }
3720 else
3721 {
3722 gsi_replace_with_seq_vops (gsi, stmts);
3723 fold_stmt (gsi);
3724 }
3725 return true;
3726 }
3727 return false;
3728 }
3729
3730 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3731 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3732 more than 3 arguments, and ARG may be null in the 2-argument case.
3733
3734 Return NULL_TREE if no simplification was possible, otherwise return the
3735 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3736 code of the function to be simplified. */
3737
3738 static bool
gimple_fold_builtin_fprintf(gimple_stmt_iterator * gsi,tree fp,tree fmt,tree arg,enum built_in_function fcode)3739 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
3740 tree fp, tree fmt, tree arg,
3741 enum built_in_function fcode)
3742 {
3743 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3744 tree fn_fputc, fn_fputs;
3745 const char *fmt_str = NULL;
3746
3747 /* If the return value is used, don't do the transformation. */
3748 if (gimple_call_lhs (stmt) != NULL_TREE)
3749 return false;
3750
3751 /* Check whether the format is a literal string constant. */
3752 fmt_str = c_getstr (fmt);
3753 if (fmt_str == NULL)
3754 return false;
3755
3756 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
3757 {
3758 /* If we're using an unlocked function, assume the other
3759 unlocked functions exist explicitly. */
3760 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
3761 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
3762 }
3763 else
3764 {
3765 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
3766 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
3767 }
3768
3769 if (!init_target_chars ())
3770 return false;
3771
3772 /* If the format doesn't contain % args or %%, use strcpy. */
3773 if (strchr (fmt_str, target_percent) == NULL)
3774 {
3775 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
3776 && arg)
3777 return false;
3778
3779 /* If the format specifier was "", fprintf does nothing. */
3780 if (fmt_str[0] == '\0')
3781 {
3782 replace_call_with_value (gsi, NULL_TREE);
3783 return true;
3784 }
3785
3786 /* When "string" doesn't contain %, replace all cases of
3787 fprintf (fp, string) with fputs (string, fp). The fputs
3788 builtin will take care of special cases like length == 1. */
3789 if (fn_fputs)
3790 {
3791 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
3792 replace_call_with_call_and_fold (gsi, repl);
3793 return true;
3794 }
3795 }
3796
3797 /* The other optimizations can be done only on the non-va_list variants. */
3798 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3799 return false;
3800
3801 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3802 else if (strcmp (fmt_str, target_percent_s) == 0)
3803 {
3804 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3805 return false;
3806 if (fn_fputs)
3807 {
3808 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3809 replace_call_with_call_and_fold (gsi, repl);
3810 return true;
3811 }
3812 }
3813
3814 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3815 else if (strcmp (fmt_str, target_percent_c) == 0)
3816 {
3817 if (!arg
3818 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3819 return false;
3820 if (fn_fputc)
3821 {
3822 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3823 replace_call_with_call_and_fold (gsi, repl);
3824 return true;
3825 }
3826 }
3827
3828 return false;
3829 }
3830
3831 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3832 FMT and ARG are the arguments to the call; we don't fold cases with
3833 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3834
3835 Return NULL_TREE if no simplification was possible, otherwise return the
3836 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3837 code of the function to be simplified. */
3838
3839 static bool
gimple_fold_builtin_printf(gimple_stmt_iterator * gsi,tree fmt,tree arg,enum built_in_function fcode)3840 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3841 tree arg, enum built_in_function fcode)
3842 {
3843 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3844 tree fn_putchar, fn_puts, newarg;
3845 const char *fmt_str = NULL;
3846
3847 /* If the return value is used, don't do the transformation. */
3848 if (gimple_call_lhs (stmt) != NULL_TREE)
3849 return false;
3850
3851 /* Check whether the format is a literal string constant. */
3852 fmt_str = c_getstr (fmt);
3853 if (fmt_str == NULL)
3854 return false;
3855
3856 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3857 {
3858 /* If we're using an unlocked function, assume the other
3859 unlocked functions exist explicitly. */
3860 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3861 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3862 }
3863 else
3864 {
3865 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3866 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3867 }
3868
3869 if (!init_target_chars ())
3870 return false;
3871
3872 if (strcmp (fmt_str, target_percent_s) == 0
3873 || strchr (fmt_str, target_percent) == NULL)
3874 {
3875 const char *str;
3876
3877 if (strcmp (fmt_str, target_percent_s) == 0)
3878 {
3879 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3880 return false;
3881
3882 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3883 return false;
3884
3885 str = c_getstr (arg);
3886 if (str == NULL)
3887 return false;
3888 }
3889 else
3890 {
3891 /* The format specifier doesn't contain any '%' characters. */
3892 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3893 && arg)
3894 return false;
3895 str = fmt_str;
3896 }
3897
3898 /* If the string was "", printf does nothing. */
3899 if (str[0] == '\0')
3900 {
3901 replace_call_with_value (gsi, NULL_TREE);
3902 return true;
3903 }
3904
3905 /* If the string has length of 1, call putchar. */
3906 if (str[1] == '\0')
3907 {
3908 /* Given printf("c"), (where c is any one character,)
3909 convert "c"[0] to an int and pass that to the replacement
3910 function. */
3911 newarg = build_int_cst (integer_type_node, str[0]);
3912 if (fn_putchar)
3913 {
3914 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3915 replace_call_with_call_and_fold (gsi, repl);
3916 return true;
3917 }
3918 }
3919 else
3920 {
3921 /* If the string was "string\n", call puts("string"). */
3922 size_t len = strlen (str);
3923 if ((unsigned char)str[len - 1] == target_newline
3924 && (size_t) (int) len == len
3925 && (int) len > 0)
3926 {
3927 char *newstr;
3928
3929 /* Create a NUL-terminated string that's one char shorter
3930 than the original, stripping off the trailing '\n'. */
3931 newstr = xstrdup (str);
3932 newstr[len - 1] = '\0';
3933 newarg = build_string_literal (len, newstr);
3934 free (newstr);
3935 if (fn_puts)
3936 {
3937 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3938 replace_call_with_call_and_fold (gsi, repl);
3939 return true;
3940 }
3941 }
3942 else
3943 /* We'd like to arrange to call fputs(string,stdout) here,
3944 but we need stdout and don't have a way to get it yet. */
3945 return false;
3946 }
3947 }
3948
3949 /* The other optimizations can be done only on the non-va_list variants. */
3950 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3951 return false;
3952
3953 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3954 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
3955 {
3956 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3957 return false;
3958 if (fn_puts)
3959 {
3960 gcall *repl = gimple_build_call (fn_puts, 1, arg);
3961 replace_call_with_call_and_fold (gsi, repl);
3962 return true;
3963 }
3964 }
3965
3966 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3967 else if (strcmp (fmt_str, target_percent_c) == 0)
3968 {
3969 if (!arg || ! useless_type_conversion_p (integer_type_node,
3970 TREE_TYPE (arg)))
3971 return false;
3972 if (fn_putchar)
3973 {
3974 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
3975 replace_call_with_call_and_fold (gsi, repl);
3976 return true;
3977 }
3978 }
3979
3980 return false;
3981 }
3982
3983
3984
3985 /* Fold a call to __builtin_strlen with known length LEN. */
3986
3987 static bool
gimple_fold_builtin_strlen(gimple_stmt_iterator * gsi)3988 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
3989 {
3990 gimple *stmt = gsi_stmt (*gsi);
3991 tree arg = gimple_call_arg (stmt, 0);
3992
3993 wide_int minlen;
3994 wide_int maxlen;
3995
3996 c_strlen_data lendata = { };
3997 if (get_range_strlen (arg, &lendata, /* eltsize = */ 1)
3998 && !lendata.decl
3999 && lendata.minlen && TREE_CODE (lendata.minlen) == INTEGER_CST
4000 && lendata.maxlen && TREE_CODE (lendata.maxlen) == INTEGER_CST)
4001 {
4002 /* The range of lengths refers to either a single constant
4003 string or to the longest and shortest constant string
4004 referenced by the argument of the strlen() call, or to
4005 the strings that can possibly be stored in the arrays
4006 the argument refers to. */
4007 minlen = wi::to_wide (lendata.minlen);
4008 maxlen = wi::to_wide (lendata.maxlen);
4009 }
4010 else
4011 {
4012 unsigned prec = TYPE_PRECISION (sizetype);
4013
4014 minlen = wi::shwi (0, prec);
4015 maxlen = wi::to_wide (max_object_size (), prec) - 2;
4016 }
4017
4018 if (minlen == maxlen)
4019 {
4020 /* Fold the strlen call to a constant. */
4021 tree type = TREE_TYPE (lendata.minlen);
4022 tree len = force_gimple_operand_gsi (gsi,
4023 wide_int_to_tree (type, minlen),
4024 true, NULL, true, GSI_SAME_STMT);
4025 replace_call_with_value (gsi, len);
4026 return true;
4027 }
4028
4029 /* Set the strlen() range to [0, MAXLEN]. */
4030 if (tree lhs = gimple_call_lhs (stmt))
4031 set_strlen_range (lhs, minlen, maxlen);
4032
4033 return false;
4034 }
4035
4036 /* Fold a call to __builtin_acc_on_device. */
4037
4038 static bool
gimple_fold_builtin_acc_on_device(gimple_stmt_iterator * gsi,tree arg0)4039 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
4040 {
4041 /* Defer folding until we know which compiler we're in. */
4042 if (symtab->state != EXPANSION)
4043 return false;
4044
4045 unsigned val_host = GOMP_DEVICE_HOST;
4046 unsigned val_dev = GOMP_DEVICE_NONE;
4047
4048 #ifdef ACCEL_COMPILER
4049 val_host = GOMP_DEVICE_NOT_HOST;
4050 val_dev = ACCEL_COMPILER_acc_device;
4051 #endif
4052
4053 location_t loc = gimple_location (gsi_stmt (*gsi));
4054
4055 tree host_eq = make_ssa_name (boolean_type_node);
4056 gimple *host_ass = gimple_build_assign
4057 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
4058 gimple_set_location (host_ass, loc);
4059 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
4060
4061 tree dev_eq = make_ssa_name (boolean_type_node);
4062 gimple *dev_ass = gimple_build_assign
4063 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
4064 gimple_set_location (dev_ass, loc);
4065 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
4066
4067 tree result = make_ssa_name (boolean_type_node);
4068 gimple *result_ass = gimple_build_assign
4069 (result, BIT_IOR_EXPR, host_eq, dev_eq);
4070 gimple_set_location (result_ass, loc);
4071 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
4072
4073 replace_call_with_value (gsi, result);
4074
4075 return true;
4076 }
4077
4078 /* Fold realloc (0, n) -> malloc (n). */
4079
4080 static bool
gimple_fold_builtin_realloc(gimple_stmt_iterator * gsi)4081 gimple_fold_builtin_realloc (gimple_stmt_iterator *gsi)
4082 {
4083 gimple *stmt = gsi_stmt (*gsi);
4084 tree arg = gimple_call_arg (stmt, 0);
4085 tree size = gimple_call_arg (stmt, 1);
4086
4087 if (operand_equal_p (arg, null_pointer_node, 0))
4088 {
4089 tree fn_malloc = builtin_decl_implicit (BUILT_IN_MALLOC);
4090 if (fn_malloc)
4091 {
4092 gcall *repl = gimple_build_call (fn_malloc, 1, size);
4093 replace_call_with_call_and_fold (gsi, repl);
4094 return true;
4095 }
4096 }
4097 return false;
4098 }
4099
4100 /* Number of bytes into which any type but aggregate or vector types
4101 should fit. */
4102 static constexpr size_t clear_padding_unit
4103 = MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT;
4104 /* Buffer size on which __builtin_clear_padding folding code works. */
4105 static const size_t clear_padding_buf_size = 32 * clear_padding_unit;
4106
4107 /* Data passed through __builtin_clear_padding folding. */
4108 struct clear_padding_struct {
4109 location_t loc;
4110 /* 0 during __builtin_clear_padding folding, nonzero during
4111 clear_type_padding_in_mask. In that case, instead of clearing the
4112 non-padding bits in union_ptr array clear the padding bits in there. */
4113 bool clear_in_mask;
4114 tree base;
4115 tree alias_type;
4116 gimple_stmt_iterator *gsi;
4117 /* Alignment of buf->base + 0. */
4118 unsigned align;
4119 /* Offset from buf->base. Should be always a multiple of UNITS_PER_WORD. */
4120 HOST_WIDE_INT off;
4121 /* Number of padding bytes before buf->off that don't have padding clear
4122 code emitted yet. */
4123 HOST_WIDE_INT padding_bytes;
4124 /* The size of the whole object. Never emit code to touch
4125 buf->base + buf->sz or following bytes. */
4126 HOST_WIDE_INT sz;
4127 /* Number of bytes recorded in buf->buf. */
4128 size_t size;
4129 /* When inside union, instead of emitting code we and bits inside of
4130 the union_ptr array. */
4131 unsigned char *union_ptr;
4132 /* Set bits mean padding bits that need to be cleared by the builtin. */
4133 unsigned char buf[clear_padding_buf_size + clear_padding_unit];
4134 };
4135
4136 /* Emit code to clear padding requested in BUF->buf - set bits
4137 in there stand for padding that should be cleared. FULL is true
4138 if everything from the buffer should be flushed, otherwise
4139 it can leave up to 2 * clear_padding_unit bytes for further
4140 processing. */
4141
4142 static void
clear_padding_flush(clear_padding_struct * buf,bool full)4143 clear_padding_flush (clear_padding_struct *buf, bool full)
4144 {
4145 gcc_assert ((clear_padding_unit % UNITS_PER_WORD) == 0);
4146 if (!full && buf->size < 2 * clear_padding_unit)
4147 return;
4148 gcc_assert ((buf->off % UNITS_PER_WORD) == 0);
4149 size_t end = buf->size;
4150 if (!full)
4151 end = ((end - clear_padding_unit - 1) / clear_padding_unit
4152 * clear_padding_unit);
4153 size_t padding_bytes = buf->padding_bytes;
4154 if (buf->union_ptr)
4155 {
4156 if (buf->clear_in_mask)
4157 {
4158 /* During clear_type_padding_in_mask, clear the padding
4159 bits set in buf->buf in the buf->union_ptr mask. */
4160 for (size_t i = 0; i < end; i++)
4161 {
4162 if (buf->buf[i] == (unsigned char) ~0)
4163 padding_bytes++;
4164 else
4165 {
4166 memset (&buf->union_ptr[buf->off + i - padding_bytes],
4167 0, padding_bytes);
4168 padding_bytes = 0;
4169 buf->union_ptr[buf->off + i] &= ~buf->buf[i];
4170 }
4171 }
4172 if (full)
4173 {
4174 memset (&buf->union_ptr[buf->off + end - padding_bytes],
4175 0, padding_bytes);
4176 buf->off = 0;
4177 buf->size = 0;
4178 buf->padding_bytes = 0;
4179 }
4180 else
4181 {
4182 memmove (buf->buf, buf->buf + end, buf->size - end);
4183 buf->off += end;
4184 buf->size -= end;
4185 buf->padding_bytes = padding_bytes;
4186 }
4187 return;
4188 }
4189 /* Inside of a union, instead of emitting any code, instead
4190 clear all bits in the union_ptr buffer that are clear
4191 in buf. Whole padding bytes don't clear anything. */
4192 for (size_t i = 0; i < end; i++)
4193 {
4194 if (buf->buf[i] == (unsigned char) ~0)
4195 padding_bytes++;
4196 else
4197 {
4198 padding_bytes = 0;
4199 buf->union_ptr[buf->off + i] &= buf->buf[i];
4200 }
4201 }
4202 if (full)
4203 {
4204 buf->off = 0;
4205 buf->size = 0;
4206 buf->padding_bytes = 0;
4207 }
4208 else
4209 {
4210 memmove (buf->buf, buf->buf + end, buf->size - end);
4211 buf->off += end;
4212 buf->size -= end;
4213 buf->padding_bytes = padding_bytes;
4214 }
4215 return;
4216 }
4217 size_t wordsize = UNITS_PER_WORD;
4218 for (size_t i = 0; i < end; i += wordsize)
4219 {
4220 size_t nonzero_first = wordsize;
4221 size_t nonzero_last = 0;
4222 size_t zero_first = wordsize;
4223 size_t zero_last = 0;
4224 bool all_ones = true, bytes_only = true;
4225 if ((unsigned HOST_WIDE_INT) (buf->off + i + wordsize)
4226 > (unsigned HOST_WIDE_INT) buf->sz)
4227 {
4228 gcc_assert (wordsize > 1);
4229 wordsize /= 2;
4230 i -= wordsize;
4231 continue;
4232 }
4233 for (size_t j = i; j < i + wordsize && j < end; j++)
4234 {
4235 if (buf->buf[j])
4236 {
4237 if (nonzero_first == wordsize)
4238 {
4239 nonzero_first = j - i;
4240 nonzero_last = j - i;
4241 }
4242 if (nonzero_last != j - i)
4243 all_ones = false;
4244 nonzero_last = j + 1 - i;
4245 }
4246 else
4247 {
4248 if (zero_first == wordsize)
4249 zero_first = j - i;
4250 zero_last = j + 1 - i;
4251 }
4252 if (buf->buf[j] != 0 && buf->buf[j] != (unsigned char) ~0)
4253 {
4254 all_ones = false;
4255 bytes_only = false;
4256 }
4257 }
4258 size_t padding_end = i;
4259 if (padding_bytes)
4260 {
4261 if (nonzero_first == 0
4262 && nonzero_last == wordsize
4263 && all_ones)
4264 {
4265 /* All bits are padding and we had some padding
4266 before too. Just extend it. */
4267 padding_bytes += wordsize;
4268 continue;
4269 }
4270 if (all_ones && nonzero_first == 0)
4271 {
4272 padding_bytes += nonzero_last;
4273 padding_end += nonzero_last;
4274 nonzero_first = wordsize;
4275 nonzero_last = 0;
4276 }
4277 else if (bytes_only && nonzero_first == 0)
4278 {
4279 gcc_assert (zero_first && zero_first != wordsize);
4280 padding_bytes += zero_first;
4281 padding_end += zero_first;
4282 }
4283 tree atype, src;
4284 if (padding_bytes == 1)
4285 {
4286 atype = char_type_node;
4287 src = build_zero_cst (char_type_node);
4288 }
4289 else
4290 {
4291 atype = build_array_type_nelts (char_type_node, padding_bytes);
4292 src = build_constructor (atype, NULL);
4293 }
4294 tree dst = build2_loc (buf->loc, MEM_REF, atype, buf->base,
4295 build_int_cst (buf->alias_type,
4296 buf->off + padding_end
4297 - padding_bytes));
4298 gimple *g = gimple_build_assign (dst, src);
4299 gimple_set_location (g, buf->loc);
4300 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4301 padding_bytes = 0;
4302 buf->padding_bytes = 0;
4303 }
4304 if (nonzero_first == wordsize)
4305 /* All bits in a word are 0, there are no padding bits. */
4306 continue;
4307 if (all_ones && nonzero_last == wordsize)
4308 {
4309 /* All bits between nonzero_first and end of word are padding
4310 bits, start counting padding_bytes. */
4311 padding_bytes = nonzero_last - nonzero_first;
4312 continue;
4313 }
4314 if (bytes_only)
4315 {
4316 /* If bitfields aren't involved in this word, prefer storing
4317 individual bytes or groups of them over performing a RMW
4318 operation on the whole word. */
4319 gcc_assert (i + zero_last <= end);
4320 for (size_t j = padding_end; j < i + zero_last; j++)
4321 {
4322 if (buf->buf[j])
4323 {
4324 size_t k;
4325 for (k = j; k < i + zero_last; k++)
4326 if (buf->buf[k] == 0)
4327 break;
4328 HOST_WIDE_INT off = buf->off + j;
4329 tree atype, src;
4330 if (k - j == 1)
4331 {
4332 atype = char_type_node;
4333 src = build_zero_cst (char_type_node);
4334 }
4335 else
4336 {
4337 atype = build_array_type_nelts (char_type_node, k - j);
4338 src = build_constructor (atype, NULL);
4339 }
4340 tree dst = build2_loc (buf->loc, MEM_REF, atype,
4341 buf->base,
4342 build_int_cst (buf->alias_type, off));
4343 gimple *g = gimple_build_assign (dst, src);
4344 gimple_set_location (g, buf->loc);
4345 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4346 j = k;
4347 }
4348 }
4349 if (nonzero_last == wordsize)
4350 padding_bytes = nonzero_last - zero_last;
4351 continue;
4352 }
4353 for (size_t eltsz = 1; eltsz <= wordsize; eltsz <<= 1)
4354 {
4355 if (nonzero_last - nonzero_first <= eltsz
4356 && ((nonzero_first & ~(eltsz - 1))
4357 == ((nonzero_last - 1) & ~(eltsz - 1))))
4358 {
4359 tree type;
4360 if (eltsz == 1)
4361 type = char_type_node;
4362 else
4363 type = lang_hooks.types.type_for_size (eltsz * BITS_PER_UNIT,
4364 0);
4365 size_t start = nonzero_first & ~(eltsz - 1);
4366 HOST_WIDE_INT off = buf->off + i + start;
4367 tree atype = type;
4368 if (eltsz > 1 && buf->align < TYPE_ALIGN (type))
4369 atype = build_aligned_type (type, buf->align);
4370 tree dst = build2_loc (buf->loc, MEM_REF, atype, buf->base,
4371 build_int_cst (buf->alias_type, off));
4372 tree src;
4373 gimple *g;
4374 if (all_ones
4375 && nonzero_first == start
4376 && nonzero_last == start + eltsz)
4377 src = build_zero_cst (type);
4378 else
4379 {
4380 src = make_ssa_name (type);
4381 g = gimple_build_assign (src, unshare_expr (dst));
4382 gimple_set_location (g, buf->loc);
4383 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4384 tree mask = native_interpret_expr (type,
4385 buf->buf + i + start,
4386 eltsz);
4387 gcc_assert (mask && TREE_CODE (mask) == INTEGER_CST);
4388 mask = fold_build1 (BIT_NOT_EXPR, type, mask);
4389 tree src_masked = make_ssa_name (type);
4390 g = gimple_build_assign (src_masked, BIT_AND_EXPR,
4391 src, mask);
4392 gimple_set_location (g, buf->loc);
4393 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4394 src = src_masked;
4395 }
4396 g = gimple_build_assign (dst, src);
4397 gimple_set_location (g, buf->loc);
4398 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4399 break;
4400 }
4401 }
4402 }
4403 if (full)
4404 {
4405 if (padding_bytes)
4406 {
4407 tree atype, src;
4408 if (padding_bytes == 1)
4409 {
4410 atype = char_type_node;
4411 src = build_zero_cst (char_type_node);
4412 }
4413 else
4414 {
4415 atype = build_array_type_nelts (char_type_node, padding_bytes);
4416 src = build_constructor (atype, NULL);
4417 }
4418 tree dst = build2_loc (buf->loc, MEM_REF, atype, buf->base,
4419 build_int_cst (buf->alias_type,
4420 buf->off + end
4421 - padding_bytes));
4422 gimple *g = gimple_build_assign (dst, src);
4423 gimple_set_location (g, buf->loc);
4424 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4425 }
4426 size_t end_rem = end % UNITS_PER_WORD;
4427 buf->off += end - end_rem;
4428 buf->size = end_rem;
4429 memset (buf->buf, 0, buf->size);
4430 buf->padding_bytes = 0;
4431 }
4432 else
4433 {
4434 memmove (buf->buf, buf->buf + end, buf->size - end);
4435 buf->off += end;
4436 buf->size -= end;
4437 buf->padding_bytes = padding_bytes;
4438 }
4439 }
4440
4441 /* Append PADDING_BYTES padding bytes. */
4442
4443 static void
clear_padding_add_padding(clear_padding_struct * buf,HOST_WIDE_INT padding_bytes)4444 clear_padding_add_padding (clear_padding_struct *buf,
4445 HOST_WIDE_INT padding_bytes)
4446 {
4447 if (padding_bytes == 0)
4448 return;
4449 if ((unsigned HOST_WIDE_INT) padding_bytes + buf->size
4450 > (unsigned HOST_WIDE_INT) clear_padding_buf_size)
4451 clear_padding_flush (buf, false);
4452 if ((unsigned HOST_WIDE_INT) padding_bytes + buf->size
4453 > (unsigned HOST_WIDE_INT) clear_padding_buf_size)
4454 {
4455 memset (buf->buf + buf->size, ~0, clear_padding_buf_size - buf->size);
4456 padding_bytes -= clear_padding_buf_size - buf->size;
4457 buf->size = clear_padding_buf_size;
4458 clear_padding_flush (buf, false);
4459 gcc_assert (buf->padding_bytes);
4460 /* At this point buf->buf[0] through buf->buf[buf->size - 1]
4461 is guaranteed to be all ones. */
4462 padding_bytes += buf->size;
4463 buf->size = padding_bytes % UNITS_PER_WORD;
4464 memset (buf->buf, ~0, buf->size);
4465 buf->off += padding_bytes - buf->size;
4466 buf->padding_bytes += padding_bytes - buf->size;
4467 }
4468 else
4469 {
4470 memset (buf->buf + buf->size, ~0, padding_bytes);
4471 buf->size += padding_bytes;
4472 }
4473 }
4474
4475 static void clear_padding_type (clear_padding_struct *, tree,
4476 HOST_WIDE_INT, bool);
4477
4478 /* Clear padding bits of union type TYPE. */
4479
4480 static void
clear_padding_union(clear_padding_struct * buf,tree type,HOST_WIDE_INT sz,bool for_auto_init)4481 clear_padding_union (clear_padding_struct *buf, tree type,
4482 HOST_WIDE_INT sz, bool for_auto_init)
4483 {
4484 clear_padding_struct *union_buf;
4485 HOST_WIDE_INT start_off = 0, next_off = 0;
4486 size_t start_size = 0;
4487 if (buf->union_ptr)
4488 {
4489 start_off = buf->off + buf->size;
4490 next_off = start_off + sz;
4491 start_size = start_off % UNITS_PER_WORD;
4492 start_off -= start_size;
4493 clear_padding_flush (buf, true);
4494 union_buf = buf;
4495 }
4496 else
4497 {
4498 if (sz + buf->size > clear_padding_buf_size)
4499 clear_padding_flush (buf, false);
4500 union_buf = XALLOCA (clear_padding_struct);
4501 union_buf->loc = buf->loc;
4502 union_buf->clear_in_mask = buf->clear_in_mask;
4503 union_buf->base = NULL_TREE;
4504 union_buf->alias_type = NULL_TREE;
4505 union_buf->gsi = NULL;
4506 union_buf->align = 0;
4507 union_buf->off = 0;
4508 union_buf->padding_bytes = 0;
4509 union_buf->sz = sz;
4510 union_buf->size = 0;
4511 if (sz + buf->size <= clear_padding_buf_size)
4512 union_buf->union_ptr = buf->buf + buf->size;
4513 else
4514 union_buf->union_ptr = XNEWVEC (unsigned char, sz);
4515 memset (union_buf->union_ptr, ~0, sz);
4516 }
4517
4518 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
4519 if (TREE_CODE (field) == FIELD_DECL && !DECL_PADDING_P (field))
4520 {
4521 if (DECL_SIZE_UNIT (field) == NULL_TREE)
4522 {
4523 if (TREE_TYPE (field) == error_mark_node)
4524 continue;
4525 gcc_assert (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
4526 && !COMPLETE_TYPE_P (TREE_TYPE (field)));
4527 if (!buf->clear_in_mask && !for_auto_init)
4528 error_at (buf->loc, "flexible array member %qD does not have "
4529 "well defined padding bits for %qs",
4530 field, "__builtin_clear_padding");
4531 continue;
4532 }
4533 HOST_WIDE_INT fldsz = tree_to_shwi (DECL_SIZE_UNIT (field));
4534 gcc_assert (union_buf->size == 0);
4535 union_buf->off = start_off;
4536 union_buf->size = start_size;
4537 memset (union_buf->buf, ~0, start_size);
4538 clear_padding_type (union_buf, TREE_TYPE (field), fldsz, for_auto_init);
4539 clear_padding_add_padding (union_buf, sz - fldsz);
4540 clear_padding_flush (union_buf, true);
4541 }
4542
4543 if (buf == union_buf)
4544 {
4545 buf->off = next_off;
4546 buf->size = next_off % UNITS_PER_WORD;
4547 buf->off -= buf->size;
4548 memset (buf->buf, ~0, buf->size);
4549 }
4550 else if (sz + buf->size <= clear_padding_buf_size)
4551 buf->size += sz;
4552 else
4553 {
4554 unsigned char *union_ptr = union_buf->union_ptr;
4555 while (sz)
4556 {
4557 clear_padding_flush (buf, false);
4558 HOST_WIDE_INT this_sz
4559 = MIN ((unsigned HOST_WIDE_INT) sz,
4560 clear_padding_buf_size - buf->size);
4561 memcpy (buf->buf + buf->size, union_ptr, this_sz);
4562 buf->size += this_sz;
4563 union_ptr += this_sz;
4564 sz -= this_sz;
4565 }
4566 XDELETE (union_buf->union_ptr);
4567 }
4568 }
4569
4570 /* The only known floating point formats with padding bits are the
4571 IEEE extended ones. */
4572
4573 static bool
clear_padding_real_needs_padding_p(tree type)4574 clear_padding_real_needs_padding_p (tree type)
4575 {
4576 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
4577 return (fmt->b == 2
4578 && fmt->signbit_ro == fmt->signbit_rw
4579 && (fmt->signbit_ro == 79 || fmt->signbit_ro == 95));
4580 }
4581
4582 /* Return true if TYPE might contain any padding bits. */
4583
4584 bool
clear_padding_type_may_have_padding_p(tree type)4585 clear_padding_type_may_have_padding_p (tree type)
4586 {
4587 switch (TREE_CODE (type))
4588 {
4589 case RECORD_TYPE:
4590 case UNION_TYPE:
4591 return true;
4592 case ARRAY_TYPE:
4593 case COMPLEX_TYPE:
4594 case VECTOR_TYPE:
4595 return clear_padding_type_may_have_padding_p (TREE_TYPE (type));
4596 case REAL_TYPE:
4597 return clear_padding_real_needs_padding_p (type);
4598 default:
4599 return false;
4600 }
4601 }
4602
4603 /* Emit a runtime loop:
4604 for (; buf.base != end; buf.base += sz)
4605 __builtin_clear_padding (buf.base); */
4606
4607 static void
clear_padding_emit_loop(clear_padding_struct * buf,tree type,tree end,bool for_auto_init)4608 clear_padding_emit_loop (clear_padding_struct *buf, tree type,
4609 tree end, bool for_auto_init)
4610 {
4611 tree l1 = create_artificial_label (buf->loc);
4612 tree l2 = create_artificial_label (buf->loc);
4613 tree l3 = create_artificial_label (buf->loc);
4614 gimple *g = gimple_build_goto (l2);
4615 gimple_set_location (g, buf->loc);
4616 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4617 g = gimple_build_label (l1);
4618 gimple_set_location (g, buf->loc);
4619 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4620 clear_padding_type (buf, type, buf->sz, for_auto_init);
4621 clear_padding_flush (buf, true);
4622 g = gimple_build_assign (buf->base, POINTER_PLUS_EXPR, buf->base,
4623 size_int (buf->sz));
4624 gimple_set_location (g, buf->loc);
4625 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4626 g = gimple_build_label (l2);
4627 gimple_set_location (g, buf->loc);
4628 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4629 g = gimple_build_cond (NE_EXPR, buf->base, end, l1, l3);
4630 gimple_set_location (g, buf->loc);
4631 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4632 g = gimple_build_label (l3);
4633 gimple_set_location (g, buf->loc);
4634 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4635 }
4636
4637 /* Clear padding bits for TYPE. Called recursively from
4638 gimple_fold_builtin_clear_padding. If FOR_AUTO_INIT is true,
4639 the __builtin_clear_padding is not called by the end user,
4640 instead, it's inserted by the compiler to initialize the
4641 paddings of automatic variable. Therefore, we should not
4642 emit the error messages for flexible array members to confuse
4643 the end user. */
4644
4645 static void
clear_padding_type(clear_padding_struct * buf,tree type,HOST_WIDE_INT sz,bool for_auto_init)4646 clear_padding_type (clear_padding_struct *buf, tree type,
4647 HOST_WIDE_INT sz, bool for_auto_init)
4648 {
4649 switch (TREE_CODE (type))
4650 {
4651 case RECORD_TYPE:
4652 HOST_WIDE_INT cur_pos;
4653 cur_pos = 0;
4654 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
4655 if (TREE_CODE (field) == FIELD_DECL && !DECL_PADDING_P (field))
4656 {
4657 tree ftype = TREE_TYPE (field);
4658 if (DECL_BIT_FIELD (field))
4659 {
4660 HOST_WIDE_INT fldsz = TYPE_PRECISION (ftype);
4661 if (fldsz == 0)
4662 continue;
4663 HOST_WIDE_INT pos = int_byte_position (field);
4664 if (pos >= sz)
4665 continue;
4666 HOST_WIDE_INT bpos
4667 = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
4668 bpos %= BITS_PER_UNIT;
4669 HOST_WIDE_INT end
4670 = ROUND_UP (bpos + fldsz, BITS_PER_UNIT) / BITS_PER_UNIT;
4671 if (pos + end > cur_pos)
4672 {
4673 clear_padding_add_padding (buf, pos + end - cur_pos);
4674 cur_pos = pos + end;
4675 }
4676 gcc_assert (cur_pos > pos
4677 && ((unsigned HOST_WIDE_INT) buf->size
4678 >= (unsigned HOST_WIDE_INT) cur_pos - pos));
4679 unsigned char *p = buf->buf + buf->size - (cur_pos - pos);
4680 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
4681 sorry_at (buf->loc, "PDP11 bit-field handling unsupported"
4682 " in %qs", "__builtin_clear_padding");
4683 else if (BYTES_BIG_ENDIAN)
4684 {
4685 /* Big endian. */
4686 if (bpos + fldsz <= BITS_PER_UNIT)
4687 *p &= ~(((1 << fldsz) - 1)
4688 << (BITS_PER_UNIT - bpos - fldsz));
4689 else
4690 {
4691 if (bpos)
4692 {
4693 *p &= ~(((1U << BITS_PER_UNIT) - 1) >> bpos);
4694 p++;
4695 fldsz -= BITS_PER_UNIT - bpos;
4696 }
4697 memset (p, 0, fldsz / BITS_PER_UNIT);
4698 p += fldsz / BITS_PER_UNIT;
4699 fldsz %= BITS_PER_UNIT;
4700 if (fldsz)
4701 *p &= ((1U << BITS_PER_UNIT) - 1) >> fldsz;
4702 }
4703 }
4704 else
4705 {
4706 /* Little endian. */
4707 if (bpos + fldsz <= BITS_PER_UNIT)
4708 *p &= ~(((1 << fldsz) - 1) << bpos);
4709 else
4710 {
4711 if (bpos)
4712 {
4713 *p &= ~(((1 << BITS_PER_UNIT) - 1) << bpos);
4714 p++;
4715 fldsz -= BITS_PER_UNIT - bpos;
4716 }
4717 memset (p, 0, fldsz / BITS_PER_UNIT);
4718 p += fldsz / BITS_PER_UNIT;
4719 fldsz %= BITS_PER_UNIT;
4720 if (fldsz)
4721 *p &= ~((1 << fldsz) - 1);
4722 }
4723 }
4724 }
4725 else if (DECL_SIZE_UNIT (field) == NULL_TREE)
4726 {
4727 if (ftype == error_mark_node)
4728 continue;
4729 gcc_assert (TREE_CODE (ftype) == ARRAY_TYPE
4730 && !COMPLETE_TYPE_P (ftype));
4731 if (!buf->clear_in_mask && !for_auto_init)
4732 error_at (buf->loc, "flexible array member %qD does not "
4733 "have well defined padding bits for %qs",
4734 field, "__builtin_clear_padding");
4735 }
4736 else if (is_empty_type (TREE_TYPE (field)))
4737 continue;
4738 else
4739 {
4740 HOST_WIDE_INT pos = int_byte_position (field);
4741 if (pos >= sz)
4742 continue;
4743 HOST_WIDE_INT fldsz = tree_to_shwi (DECL_SIZE_UNIT (field));
4744 gcc_assert (pos >= 0 && fldsz >= 0 && pos >= cur_pos);
4745 clear_padding_add_padding (buf, pos - cur_pos);
4746 cur_pos = pos;
4747 clear_padding_type (buf, TREE_TYPE (field),
4748 fldsz, for_auto_init);
4749 cur_pos += fldsz;
4750 }
4751 }
4752 gcc_assert (sz >= cur_pos);
4753 clear_padding_add_padding (buf, sz - cur_pos);
4754 break;
4755 case ARRAY_TYPE:
4756 HOST_WIDE_INT nelts, fldsz;
4757 fldsz = int_size_in_bytes (TREE_TYPE (type));
4758 if (fldsz == 0)
4759 break;
4760 nelts = sz / fldsz;
4761 if (nelts > 1
4762 && sz > 8 * UNITS_PER_WORD
4763 && buf->union_ptr == NULL
4764 && clear_padding_type_may_have_padding_p (TREE_TYPE (type)))
4765 {
4766 /* For sufficiently large array of more than one elements,
4767 emit a runtime loop to keep code size manageable. */
4768 tree base = buf->base;
4769 unsigned int prev_align = buf->align;
4770 HOST_WIDE_INT off = buf->off + buf->size;
4771 HOST_WIDE_INT prev_sz = buf->sz;
4772 clear_padding_flush (buf, true);
4773 tree elttype = TREE_TYPE (type);
4774 buf->base = create_tmp_var (build_pointer_type (elttype));
4775 tree end = make_ssa_name (TREE_TYPE (buf->base));
4776 gimple *g = gimple_build_assign (buf->base, POINTER_PLUS_EXPR,
4777 base, size_int (off));
4778 gimple_set_location (g, buf->loc);
4779 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4780 g = gimple_build_assign (end, POINTER_PLUS_EXPR, buf->base,
4781 size_int (sz));
4782 gimple_set_location (g, buf->loc);
4783 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4784 buf->sz = fldsz;
4785 buf->align = TYPE_ALIGN (elttype);
4786 buf->off = 0;
4787 buf->size = 0;
4788 clear_padding_emit_loop (buf, elttype, end, for_auto_init);
4789 buf->base = base;
4790 buf->sz = prev_sz;
4791 buf->align = prev_align;
4792 buf->size = off % UNITS_PER_WORD;
4793 buf->off = off - buf->size;
4794 memset (buf->buf, 0, buf->size);
4795 break;
4796 }
4797 for (HOST_WIDE_INT i = 0; i < nelts; i++)
4798 clear_padding_type (buf, TREE_TYPE (type), fldsz, for_auto_init);
4799 break;
4800 case UNION_TYPE:
4801 clear_padding_union (buf, type, sz, for_auto_init);
4802 break;
4803 case REAL_TYPE:
4804 gcc_assert ((size_t) sz <= clear_padding_unit);
4805 if ((unsigned HOST_WIDE_INT) sz + buf->size > clear_padding_buf_size)
4806 clear_padding_flush (buf, false);
4807 if (clear_padding_real_needs_padding_p (type))
4808 {
4809 /* Use native_interpret_expr + native_encode_expr to figure out
4810 which bits are padding. */
4811 memset (buf->buf + buf->size, ~0, sz);
4812 tree cst = native_interpret_expr (type, buf->buf + buf->size, sz);
4813 gcc_assert (cst && TREE_CODE (cst) == REAL_CST);
4814 int len = native_encode_expr (cst, buf->buf + buf->size, sz);
4815 gcc_assert (len > 0 && (size_t) len == (size_t) sz);
4816 for (size_t i = 0; i < (size_t) sz; i++)
4817 buf->buf[buf->size + i] ^= ~0;
4818 }
4819 else
4820 memset (buf->buf + buf->size, 0, sz);
4821 buf->size += sz;
4822 break;
4823 case COMPLEX_TYPE:
4824 fldsz = int_size_in_bytes (TREE_TYPE (type));
4825 clear_padding_type (buf, TREE_TYPE (type), fldsz, for_auto_init);
4826 clear_padding_type (buf, TREE_TYPE (type), fldsz, for_auto_init);
4827 break;
4828 case VECTOR_TYPE:
4829 nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
4830 fldsz = int_size_in_bytes (TREE_TYPE (type));
4831 for (HOST_WIDE_INT i = 0; i < nelts; i++)
4832 clear_padding_type (buf, TREE_TYPE (type), fldsz, for_auto_init);
4833 break;
4834 case NULLPTR_TYPE:
4835 gcc_assert ((size_t) sz <= clear_padding_unit);
4836 if ((unsigned HOST_WIDE_INT) sz + buf->size > clear_padding_buf_size)
4837 clear_padding_flush (buf, false);
4838 memset (buf->buf + buf->size, ~0, sz);
4839 buf->size += sz;
4840 break;
4841 default:
4842 gcc_assert ((size_t) sz <= clear_padding_unit);
4843 if ((unsigned HOST_WIDE_INT) sz + buf->size > clear_padding_buf_size)
4844 clear_padding_flush (buf, false);
4845 memset (buf->buf + buf->size, 0, sz);
4846 buf->size += sz;
4847 break;
4848 }
4849 }
4850
4851 /* Clear padding bits of TYPE in MASK. */
4852
4853 void
clear_type_padding_in_mask(tree type,unsigned char * mask)4854 clear_type_padding_in_mask (tree type, unsigned char *mask)
4855 {
4856 clear_padding_struct buf;
4857 buf.loc = UNKNOWN_LOCATION;
4858 buf.clear_in_mask = true;
4859 buf.base = NULL_TREE;
4860 buf.alias_type = NULL_TREE;
4861 buf.gsi = NULL;
4862 buf.align = 0;
4863 buf.off = 0;
4864 buf.padding_bytes = 0;
4865 buf.sz = int_size_in_bytes (type);
4866 buf.size = 0;
4867 buf.union_ptr = mask;
4868 clear_padding_type (&buf, type, buf.sz, false);
4869 clear_padding_flush (&buf, true);
4870 }
4871
4872 /* Fold __builtin_clear_padding builtin. */
4873
4874 static bool
gimple_fold_builtin_clear_padding(gimple_stmt_iterator * gsi)4875 gimple_fold_builtin_clear_padding (gimple_stmt_iterator *gsi)
4876 {
4877 gimple *stmt = gsi_stmt (*gsi);
4878 gcc_assert (gimple_call_num_args (stmt) == 3);
4879 tree ptr = gimple_call_arg (stmt, 0);
4880 tree typearg = gimple_call_arg (stmt, 1);
4881 /* the 3rd argument of __builtin_clear_padding is to distinguish whether
4882 this call is made by the user or by the compiler for automatic variable
4883 initialization. */
4884 bool for_auto_init = (bool) TREE_INT_CST_LOW (gimple_call_arg (stmt, 2));
4885 tree type = TREE_TYPE (TREE_TYPE (typearg));
4886 location_t loc = gimple_location (stmt);
4887 clear_padding_struct buf;
4888 gimple_stmt_iterator gsiprev = *gsi;
4889 /* This should be folded during the lower pass. */
4890 gcc_assert (!gimple_in_ssa_p (cfun) && cfun->cfg == NULL);
4891 gcc_assert (COMPLETE_TYPE_P (type));
4892 gsi_prev (&gsiprev);
4893
4894 buf.loc = loc;
4895 buf.clear_in_mask = false;
4896 buf.base = ptr;
4897 buf.alias_type = NULL_TREE;
4898 buf.gsi = gsi;
4899 buf.align = get_pointer_alignment (ptr);
4900 unsigned int talign = min_align_of_type (type) * BITS_PER_UNIT;
4901 buf.align = MAX (buf.align, talign);
4902 buf.off = 0;
4903 buf.padding_bytes = 0;
4904 buf.size = 0;
4905 buf.sz = int_size_in_bytes (type);
4906 buf.union_ptr = NULL;
4907 if (buf.sz < 0 && int_size_in_bytes (strip_array_types (type)) < 0)
4908 sorry_at (loc, "%s not supported for variable length aggregates",
4909 "__builtin_clear_padding");
4910 /* The implementation currently assumes 8-bit host and target
4911 chars which is the case for all currently supported targets
4912 and hosts and is required e.g. for native_{encode,interpret}* APIs. */
4913 else if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
4914 sorry_at (loc, "%s not supported on this target",
4915 "__builtin_clear_padding");
4916 else if (!clear_padding_type_may_have_padding_p (type))
4917 ;
4918 else if (TREE_CODE (type) == ARRAY_TYPE && buf.sz < 0)
4919 {
4920 tree sz = TYPE_SIZE_UNIT (type);
4921 tree elttype = type;
4922 /* Only supports C/C++ VLAs and flattens all the VLA levels. */
4923 while (TREE_CODE (elttype) == ARRAY_TYPE
4924 && int_size_in_bytes (elttype) < 0)
4925 elttype = TREE_TYPE (elttype);
4926 HOST_WIDE_INT eltsz = int_size_in_bytes (elttype);
4927 gcc_assert (eltsz >= 0);
4928 if (eltsz)
4929 {
4930 buf.base = create_tmp_var (build_pointer_type (elttype));
4931 tree end = make_ssa_name (TREE_TYPE (buf.base));
4932 gimple *g = gimple_build_assign (buf.base, ptr);
4933 gimple_set_location (g, loc);
4934 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4935 g = gimple_build_assign (end, POINTER_PLUS_EXPR, buf.base, sz);
4936 gimple_set_location (g, loc);
4937 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4938 buf.sz = eltsz;
4939 buf.align = TYPE_ALIGN (elttype);
4940 buf.alias_type = build_pointer_type (elttype);
4941 clear_padding_emit_loop (&buf, elttype, end, for_auto_init);
4942 }
4943 }
4944 else
4945 {
4946 if (!is_gimple_mem_ref_addr (buf.base))
4947 {
4948 buf.base = make_ssa_name (TREE_TYPE (ptr));
4949 gimple *g = gimple_build_assign (buf.base, ptr);
4950 gimple_set_location (g, loc);
4951 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4952 }
4953 buf.alias_type = build_pointer_type (type);
4954 clear_padding_type (&buf, type, buf.sz, for_auto_init);
4955 clear_padding_flush (&buf, true);
4956 }
4957
4958 gimple_stmt_iterator gsiprev2 = *gsi;
4959 gsi_prev (&gsiprev2);
4960 if (gsi_stmt (gsiprev) == gsi_stmt (gsiprev2))
4961 gsi_replace (gsi, gimple_build_nop (), true);
4962 else
4963 {
4964 gsi_remove (gsi, true);
4965 *gsi = gsiprev2;
4966 }
4967 return true;
4968 }
4969
4970 /* Fold the non-target builtin at *GSI and return whether any simplification
4971 was made. */
4972
4973 static bool
gimple_fold_builtin(gimple_stmt_iterator * gsi)4974 gimple_fold_builtin (gimple_stmt_iterator *gsi)
4975 {
4976 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
4977 tree callee = gimple_call_fndecl (stmt);
4978
4979 /* Give up for always_inline inline builtins until they are
4980 inlined. */
4981 if (avoid_folding_inline_builtin (callee))
4982 return false;
4983
4984 unsigned n = gimple_call_num_args (stmt);
4985 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
4986 switch (fcode)
4987 {
4988 case BUILT_IN_BCMP:
4989 return gimple_fold_builtin_bcmp (gsi);
4990 case BUILT_IN_BCOPY:
4991 return gimple_fold_builtin_bcopy (gsi);
4992 case BUILT_IN_BZERO:
4993 return gimple_fold_builtin_bzero (gsi);
4994
4995 case BUILT_IN_MEMSET:
4996 return gimple_fold_builtin_memset (gsi,
4997 gimple_call_arg (stmt, 1),
4998 gimple_call_arg (stmt, 2));
4999 case BUILT_IN_MEMCPY:
5000 case BUILT_IN_MEMPCPY:
5001 case BUILT_IN_MEMMOVE:
5002 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
5003 gimple_call_arg (stmt, 1), fcode);
5004 case BUILT_IN_SPRINTF_CHK:
5005 case BUILT_IN_VSPRINTF_CHK:
5006 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
5007 case BUILT_IN_STRCAT_CHK:
5008 return gimple_fold_builtin_strcat_chk (gsi);
5009 case BUILT_IN_STRNCAT_CHK:
5010 return gimple_fold_builtin_strncat_chk (gsi);
5011 case BUILT_IN_STRLEN:
5012 return gimple_fold_builtin_strlen (gsi);
5013 case BUILT_IN_STRCPY:
5014 return gimple_fold_builtin_strcpy (gsi,
5015 gimple_call_arg (stmt, 0),
5016 gimple_call_arg (stmt, 1));
5017 case BUILT_IN_STRNCPY:
5018 return gimple_fold_builtin_strncpy (gsi,
5019 gimple_call_arg (stmt, 0),
5020 gimple_call_arg (stmt, 1),
5021 gimple_call_arg (stmt, 2));
5022 case BUILT_IN_STRCAT:
5023 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
5024 gimple_call_arg (stmt, 1));
5025 case BUILT_IN_STRNCAT:
5026 return gimple_fold_builtin_strncat (gsi);
5027 case BUILT_IN_INDEX:
5028 case BUILT_IN_STRCHR:
5029 return gimple_fold_builtin_strchr (gsi, false);
5030 case BUILT_IN_RINDEX:
5031 case BUILT_IN_STRRCHR:
5032 return gimple_fold_builtin_strchr (gsi, true);
5033 case BUILT_IN_STRSTR:
5034 return gimple_fold_builtin_strstr (gsi);
5035 case BUILT_IN_STRCMP:
5036 case BUILT_IN_STRCMP_EQ:
5037 case BUILT_IN_STRCASECMP:
5038 case BUILT_IN_STRNCMP:
5039 case BUILT_IN_STRNCMP_EQ:
5040 case BUILT_IN_STRNCASECMP:
5041 return gimple_fold_builtin_string_compare (gsi);
5042 case BUILT_IN_MEMCHR:
5043 return gimple_fold_builtin_memchr (gsi);
5044 case BUILT_IN_FPUTS:
5045 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
5046 gimple_call_arg (stmt, 1), false);
5047 case BUILT_IN_FPUTS_UNLOCKED:
5048 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
5049 gimple_call_arg (stmt, 1), true);
5050 case BUILT_IN_MEMCPY_CHK:
5051 case BUILT_IN_MEMPCPY_CHK:
5052 case BUILT_IN_MEMMOVE_CHK:
5053 case BUILT_IN_MEMSET_CHK:
5054 return gimple_fold_builtin_memory_chk (gsi,
5055 gimple_call_arg (stmt, 0),
5056 gimple_call_arg (stmt, 1),
5057 gimple_call_arg (stmt, 2),
5058 gimple_call_arg (stmt, 3),
5059 fcode);
5060 case BUILT_IN_STPCPY:
5061 return gimple_fold_builtin_stpcpy (gsi);
5062 case BUILT_IN_STRCPY_CHK:
5063 case BUILT_IN_STPCPY_CHK:
5064 return gimple_fold_builtin_stxcpy_chk (gsi,
5065 gimple_call_arg (stmt, 0),
5066 gimple_call_arg (stmt, 1),
5067 gimple_call_arg (stmt, 2),
5068 fcode);
5069 case BUILT_IN_STRNCPY_CHK:
5070 case BUILT_IN_STPNCPY_CHK:
5071 return gimple_fold_builtin_stxncpy_chk (gsi,
5072 gimple_call_arg (stmt, 0),
5073 gimple_call_arg (stmt, 1),
5074 gimple_call_arg (stmt, 2),
5075 gimple_call_arg (stmt, 3),
5076 fcode);
5077 case BUILT_IN_SNPRINTF_CHK:
5078 case BUILT_IN_VSNPRINTF_CHK:
5079 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
5080
5081 case BUILT_IN_FPRINTF:
5082 case BUILT_IN_FPRINTF_UNLOCKED:
5083 case BUILT_IN_VFPRINTF:
5084 if (n == 2 || n == 3)
5085 return gimple_fold_builtin_fprintf (gsi,
5086 gimple_call_arg (stmt, 0),
5087 gimple_call_arg (stmt, 1),
5088 n == 3
5089 ? gimple_call_arg (stmt, 2)
5090 : NULL_TREE,
5091 fcode);
5092 break;
5093 case BUILT_IN_FPRINTF_CHK:
5094 case BUILT_IN_VFPRINTF_CHK:
5095 if (n == 3 || n == 4)
5096 return gimple_fold_builtin_fprintf (gsi,
5097 gimple_call_arg (stmt, 0),
5098 gimple_call_arg (stmt, 2),
5099 n == 4
5100 ? gimple_call_arg (stmt, 3)
5101 : NULL_TREE,
5102 fcode);
5103 break;
5104 case BUILT_IN_PRINTF:
5105 case BUILT_IN_PRINTF_UNLOCKED:
5106 case BUILT_IN_VPRINTF:
5107 if (n == 1 || n == 2)
5108 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
5109 n == 2
5110 ? gimple_call_arg (stmt, 1)
5111 : NULL_TREE, fcode);
5112 break;
5113 case BUILT_IN_PRINTF_CHK:
5114 case BUILT_IN_VPRINTF_CHK:
5115 if (n == 2 || n == 3)
5116 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
5117 n == 3
5118 ? gimple_call_arg (stmt, 2)
5119 : NULL_TREE, fcode);
5120 break;
5121 case BUILT_IN_ACC_ON_DEVICE:
5122 return gimple_fold_builtin_acc_on_device (gsi,
5123 gimple_call_arg (stmt, 0));
5124 case BUILT_IN_REALLOC:
5125 return gimple_fold_builtin_realloc (gsi);
5126
5127 case BUILT_IN_CLEAR_PADDING:
5128 return gimple_fold_builtin_clear_padding (gsi);
5129
5130 default:;
5131 }
5132
5133 /* Try the generic builtin folder. */
5134 bool ignore = (gimple_call_lhs (stmt) == NULL);
5135 tree result = fold_call_stmt (stmt, ignore);
5136 if (result)
5137 {
5138 if (ignore)
5139 STRIP_NOPS (result);
5140 else
5141 result = fold_convert (gimple_call_return_type (stmt), result);
5142 gimplify_and_update_call_from_tree (gsi, result);
5143 return true;
5144 }
5145
5146 return false;
5147 }
5148
5149 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
5150 function calls to constants, where possible. */
5151
5152 static tree
fold_internal_goacc_dim(const gimple * call)5153 fold_internal_goacc_dim (const gimple *call)
5154 {
5155 int axis = oacc_get_ifn_dim_arg (call);
5156 int size = oacc_get_fn_dim_size (current_function_decl, axis);
5157 tree result = NULL_TREE;
5158 tree type = TREE_TYPE (gimple_call_lhs (call));
5159
5160 switch (gimple_call_internal_fn (call))
5161 {
5162 case IFN_GOACC_DIM_POS:
5163 /* If the size is 1, we know the answer. */
5164 if (size == 1)
5165 result = build_int_cst (type, 0);
5166 break;
5167 case IFN_GOACC_DIM_SIZE:
5168 /* If the size is not dynamic, we know the answer. */
5169 if (size)
5170 result = build_int_cst (type, size);
5171 break;
5172 default:
5173 break;
5174 }
5175
5176 return result;
5177 }
5178
5179 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
5180 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
5181 &var where var is only addressable because of such calls. */
5182
5183 bool
optimize_atomic_compare_exchange_p(gimple * stmt)5184 optimize_atomic_compare_exchange_p (gimple *stmt)
5185 {
5186 if (gimple_call_num_args (stmt) != 6
5187 || !flag_inline_atomics
5188 || !optimize
5189 || sanitize_flags_p (SANITIZE_THREAD | SANITIZE_ADDRESS)
5190 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
5191 || !gimple_vdef (stmt)
5192 || !gimple_vuse (stmt))
5193 return false;
5194
5195 tree fndecl = gimple_call_fndecl (stmt);
5196 switch (DECL_FUNCTION_CODE (fndecl))
5197 {
5198 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
5199 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
5200 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
5201 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
5202 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
5203 break;
5204 default:
5205 return false;
5206 }
5207
5208 tree expected = gimple_call_arg (stmt, 1);
5209 if (TREE_CODE (expected) != ADDR_EXPR
5210 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
5211 return false;
5212
5213 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
5214 if (!is_gimple_reg_type (etype)
5215 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
5216 || TREE_THIS_VOLATILE (etype)
5217 || VECTOR_TYPE_P (etype)
5218 || TREE_CODE (etype) == COMPLEX_TYPE
5219 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
5220 might not preserve all the bits. See PR71716. */
5221 || SCALAR_FLOAT_TYPE_P (etype)
5222 || maybe_ne (TYPE_PRECISION (etype),
5223 GET_MODE_BITSIZE (TYPE_MODE (etype))))
5224 return false;
5225
5226 tree weak = gimple_call_arg (stmt, 3);
5227 if (!integer_zerop (weak) && !integer_onep (weak))
5228 return false;
5229
5230 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
5231 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
5232 machine_mode mode = TYPE_MODE (itype);
5233
5234 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
5235 == CODE_FOR_nothing
5236 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
5237 return false;
5238
5239 if (maybe_ne (int_size_in_bytes (etype), GET_MODE_SIZE (mode)))
5240 return false;
5241
5242 return true;
5243 }
5244
5245 /* Fold
5246 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
5247 into
5248 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
5249 i = IMAGPART_EXPR <t>;
5250 r = (_Bool) i;
5251 e = REALPART_EXPR <t>; */
5252
5253 void
fold_builtin_atomic_compare_exchange(gimple_stmt_iterator * gsi)5254 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
5255 {
5256 gimple *stmt = gsi_stmt (*gsi);
5257 tree fndecl = gimple_call_fndecl (stmt);
5258 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
5259 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
5260 tree ctype = build_complex_type (itype);
5261 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
5262 bool throws = false;
5263 edge e = NULL;
5264 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
5265 expected);
5266 gsi_insert_before (gsi, g, GSI_SAME_STMT);
5267 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
5268 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
5269 {
5270 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
5271 build1 (VIEW_CONVERT_EXPR, itype,
5272 gimple_assign_lhs (g)));
5273 gsi_insert_before (gsi, g, GSI_SAME_STMT);
5274 }
5275 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
5276 + int_size_in_bytes (itype);
5277 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
5278 gimple_call_arg (stmt, 0),
5279 gimple_assign_lhs (g),
5280 gimple_call_arg (stmt, 2),
5281 build_int_cst (integer_type_node, flag),
5282 gimple_call_arg (stmt, 4),
5283 gimple_call_arg (stmt, 5));
5284 tree lhs = make_ssa_name (ctype);
5285 gimple_call_set_lhs (g, lhs);
5286 gimple_move_vops (g, stmt);
5287 tree oldlhs = gimple_call_lhs (stmt);
5288 if (stmt_can_throw_internal (cfun, stmt))
5289 {
5290 throws = true;
5291 e = find_fallthru_edge (gsi_bb (*gsi)->succs);
5292 }
5293 gimple_call_set_nothrow (as_a <gcall *> (g),
5294 gimple_call_nothrow_p (as_a <gcall *> (stmt)));
5295 gimple_call_set_lhs (stmt, NULL_TREE);
5296 gsi_replace (gsi, g, true);
5297 if (oldlhs)
5298 {
5299 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
5300 build1 (IMAGPART_EXPR, itype, lhs));
5301 if (throws)
5302 {
5303 gsi_insert_on_edge_immediate (e, g);
5304 *gsi = gsi_for_stmt (g);
5305 }
5306 else
5307 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5308 g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g));
5309 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5310 }
5311 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
5312 build1 (REALPART_EXPR, itype, lhs));
5313 if (throws && oldlhs == NULL_TREE)
5314 {
5315 gsi_insert_on_edge_immediate (e, g);
5316 *gsi = gsi_for_stmt (g);
5317 }
5318 else
5319 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5320 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
5321 {
5322 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
5323 VIEW_CONVERT_EXPR,
5324 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
5325 gimple_assign_lhs (g)));
5326 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5327 }
5328 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
5329 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5330 *gsi = gsiret;
5331 }
5332
5333 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
5334 doesn't fit into TYPE. The test for overflow should be regardless of
5335 -fwrapv, and even for unsigned types. */
5336
5337 bool
arith_overflowed_p(enum tree_code code,const_tree type,const_tree arg0,const_tree arg1)5338 arith_overflowed_p (enum tree_code code, const_tree type,
5339 const_tree arg0, const_tree arg1)
5340 {
5341 widest2_int warg0 = widest2_int_cst (arg0);
5342 widest2_int warg1 = widest2_int_cst (arg1);
5343 widest2_int wres;
5344 switch (code)
5345 {
5346 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
5347 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
5348 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
5349 default: gcc_unreachable ();
5350 }
5351 signop sign = TYPE_SIGN (type);
5352 if (sign == UNSIGNED && wi::neg_p (wres))
5353 return true;
5354 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
5355 }
5356
5357 /* If IFN_MASK_LOAD/STORE call CALL is unconditional, return a MEM_REF
5358 for the memory it references, otherwise return null. VECTYPE is the
5359 type of the memory vector. */
5360
5361 static tree
gimple_fold_mask_load_store_mem_ref(gcall * call,tree vectype)5362 gimple_fold_mask_load_store_mem_ref (gcall *call, tree vectype)
5363 {
5364 tree ptr = gimple_call_arg (call, 0);
5365 tree alias_align = gimple_call_arg (call, 1);
5366 tree mask = gimple_call_arg (call, 2);
5367 if (!tree_fits_uhwi_p (alias_align) || !integer_all_onesp (mask))
5368 return NULL_TREE;
5369
5370 unsigned HOST_WIDE_INT align = tree_to_uhwi (alias_align);
5371 if (TYPE_ALIGN (vectype) != align)
5372 vectype = build_aligned_type (vectype, align);
5373 tree offset = build_zero_cst (TREE_TYPE (alias_align));
5374 return fold_build2 (MEM_REF, vectype, ptr, offset);
5375 }
5376
5377 /* Try to fold IFN_MASK_LOAD call CALL. Return true on success. */
5378
5379 static bool
gimple_fold_mask_load(gimple_stmt_iterator * gsi,gcall * call)5380 gimple_fold_mask_load (gimple_stmt_iterator *gsi, gcall *call)
5381 {
5382 tree lhs = gimple_call_lhs (call);
5383 if (!lhs)
5384 return false;
5385
5386 if (tree rhs = gimple_fold_mask_load_store_mem_ref (call, TREE_TYPE (lhs)))
5387 {
5388 gassign *new_stmt = gimple_build_assign (lhs, rhs);
5389 gimple_set_location (new_stmt, gimple_location (call));
5390 gimple_move_vops (new_stmt, call);
5391 gsi_replace (gsi, new_stmt, false);
5392 return true;
5393 }
5394 return false;
5395 }
5396
5397 /* Try to fold IFN_MASK_STORE call CALL. Return true on success. */
5398
5399 static bool
gimple_fold_mask_store(gimple_stmt_iterator * gsi,gcall * call)5400 gimple_fold_mask_store (gimple_stmt_iterator *gsi, gcall *call)
5401 {
5402 tree rhs = gimple_call_arg (call, 3);
5403 if (tree lhs = gimple_fold_mask_load_store_mem_ref (call, TREE_TYPE (rhs)))
5404 {
5405 gassign *new_stmt = gimple_build_assign (lhs, rhs);
5406 gimple_set_location (new_stmt, gimple_location (call));
5407 gimple_move_vops (new_stmt, call);
5408 gsi_replace (gsi, new_stmt, false);
5409 return true;
5410 }
5411 return false;
5412 }
5413
5414 /* Attempt to fold a call statement referenced by the statement iterator GSI.
5415 The statement may be replaced by another statement, e.g., if the call
5416 simplifies to a constant value. Return true if any changes were made.
5417 It is assumed that the operands have been previously folded. */
5418
5419 static bool
gimple_fold_call(gimple_stmt_iterator * gsi,bool inplace)5420 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
5421 {
5422 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
5423 tree callee;
5424 bool changed = false;
5425
5426 /* Check for virtual calls that became direct calls. */
5427 callee = gimple_call_fn (stmt);
5428 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
5429 {
5430 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
5431 {
5432 if (dump_file && virtual_method_call_p (callee)
5433 && !possible_polymorphic_call_target_p
5434 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
5435 (OBJ_TYPE_REF_EXPR (callee)))))
5436 {
5437 fprintf (dump_file,
5438 "Type inheritance inconsistent devirtualization of ");
5439 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5440 fprintf (dump_file, " to ");
5441 print_generic_expr (dump_file, callee, TDF_SLIM);
5442 fprintf (dump_file, "\n");
5443 }
5444
5445 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
5446 changed = true;
5447 }
5448 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
5449 {
5450 bool final;
5451 vec <cgraph_node *>targets
5452 = possible_polymorphic_call_targets (callee, stmt, &final);
5453 if (final && targets.length () <= 1 && dbg_cnt (devirt))
5454 {
5455 tree lhs = gimple_call_lhs (stmt);
5456 if (dump_enabled_p ())
5457 {
5458 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
5459 "folding virtual function call to %s\n",
5460 targets.length () == 1
5461 ? targets[0]->name ()
5462 : "__builtin_unreachable");
5463 }
5464 if (targets.length () == 1)
5465 {
5466 tree fndecl = targets[0]->decl;
5467 gimple_call_set_fndecl (stmt, fndecl);
5468 changed = true;
5469 /* If changing the call to __cxa_pure_virtual
5470 or similar noreturn function, adjust gimple_call_fntype
5471 too. */
5472 if (gimple_call_noreturn_p (stmt)
5473 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
5474 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
5475 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
5476 == void_type_node))
5477 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
5478 /* If the call becomes noreturn, remove the lhs. */
5479 if (lhs
5480 && gimple_call_noreturn_p (stmt)
5481 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
5482 || should_remove_lhs_p (lhs)))
5483 {
5484 if (TREE_CODE (lhs) == SSA_NAME)
5485 {
5486 tree var = create_tmp_var (TREE_TYPE (lhs));
5487 tree def = get_or_create_ssa_default_def (cfun, var);
5488 gimple *new_stmt = gimple_build_assign (lhs, def);
5489 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
5490 }
5491 gimple_call_set_lhs (stmt, NULL_TREE);
5492 }
5493 maybe_remove_unused_call_args (cfun, stmt);
5494 }
5495 else
5496 {
5497 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5498 gimple *new_stmt = gimple_build_call (fndecl, 0);
5499 gimple_set_location (new_stmt, gimple_location (stmt));
5500 /* If the call had a SSA name as lhs morph that into
5501 an uninitialized value. */
5502 if (lhs && TREE_CODE (lhs) == SSA_NAME)
5503 {
5504 tree var = create_tmp_var (TREE_TYPE (lhs));
5505 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, var);
5506 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
5507 set_ssa_default_def (cfun, var, lhs);
5508 }
5509 gimple_move_vops (new_stmt, stmt);
5510 gsi_replace (gsi, new_stmt, false);
5511 return true;
5512 }
5513 }
5514 }
5515 }
5516
5517 /* Check for indirect calls that became direct calls, and then
5518 no longer require a static chain. */
5519 if (gimple_call_chain (stmt))
5520 {
5521 tree fn = gimple_call_fndecl (stmt);
5522 if (fn && !DECL_STATIC_CHAIN (fn))
5523 {
5524 gimple_call_set_chain (stmt, NULL);
5525 changed = true;
5526 }
5527 }
5528
5529 if (inplace)
5530 return changed;
5531
5532 /* Check for builtins that CCP can handle using information not
5533 available in the generic fold routines. */
5534 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
5535 {
5536 if (gimple_fold_builtin (gsi))
5537 changed = true;
5538 }
5539 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
5540 {
5541 changed |= targetm.gimple_fold_builtin (gsi);
5542 }
5543 else if (gimple_call_internal_p (stmt))
5544 {
5545 enum tree_code subcode = ERROR_MARK;
5546 tree result = NULL_TREE;
5547 bool cplx_result = false;
5548 tree overflow = NULL_TREE;
5549 switch (gimple_call_internal_fn (stmt))
5550 {
5551 case IFN_BUILTIN_EXPECT:
5552 result = fold_builtin_expect (gimple_location (stmt),
5553 gimple_call_arg (stmt, 0),
5554 gimple_call_arg (stmt, 1),
5555 gimple_call_arg (stmt, 2),
5556 NULL_TREE);
5557 break;
5558 case IFN_UBSAN_OBJECT_SIZE:
5559 {
5560 tree offset = gimple_call_arg (stmt, 1);
5561 tree objsize = gimple_call_arg (stmt, 2);
5562 if (integer_all_onesp (objsize)
5563 || (TREE_CODE (offset) == INTEGER_CST
5564 && TREE_CODE (objsize) == INTEGER_CST
5565 && tree_int_cst_le (offset, objsize)))
5566 {
5567 replace_call_with_value (gsi, NULL_TREE);
5568 return true;
5569 }
5570 }
5571 break;
5572 case IFN_UBSAN_PTR:
5573 if (integer_zerop (gimple_call_arg (stmt, 1)))
5574 {
5575 replace_call_with_value (gsi, NULL_TREE);
5576 return true;
5577 }
5578 break;
5579 case IFN_UBSAN_BOUNDS:
5580 {
5581 tree index = gimple_call_arg (stmt, 1);
5582 tree bound = gimple_call_arg (stmt, 2);
5583 if (TREE_CODE (index) == INTEGER_CST
5584 && TREE_CODE (bound) == INTEGER_CST)
5585 {
5586 index = fold_convert (TREE_TYPE (bound), index);
5587 if (TREE_CODE (index) == INTEGER_CST
5588 && tree_int_cst_le (index, bound))
5589 {
5590 replace_call_with_value (gsi, NULL_TREE);
5591 return true;
5592 }
5593 }
5594 }
5595 break;
5596 case IFN_GOACC_DIM_SIZE:
5597 case IFN_GOACC_DIM_POS:
5598 result = fold_internal_goacc_dim (stmt);
5599 break;
5600 case IFN_UBSAN_CHECK_ADD:
5601 subcode = PLUS_EXPR;
5602 break;
5603 case IFN_UBSAN_CHECK_SUB:
5604 subcode = MINUS_EXPR;
5605 break;
5606 case IFN_UBSAN_CHECK_MUL:
5607 subcode = MULT_EXPR;
5608 break;
5609 case IFN_ADD_OVERFLOW:
5610 subcode = PLUS_EXPR;
5611 cplx_result = true;
5612 break;
5613 case IFN_SUB_OVERFLOW:
5614 subcode = MINUS_EXPR;
5615 cplx_result = true;
5616 break;
5617 case IFN_MUL_OVERFLOW:
5618 subcode = MULT_EXPR;
5619 cplx_result = true;
5620 break;
5621 case IFN_MASK_LOAD:
5622 changed |= gimple_fold_mask_load (gsi, stmt);
5623 break;
5624 case IFN_MASK_STORE:
5625 changed |= gimple_fold_mask_store (gsi, stmt);
5626 break;
5627 default:
5628 break;
5629 }
5630 if (subcode != ERROR_MARK)
5631 {
5632 tree arg0 = gimple_call_arg (stmt, 0);
5633 tree arg1 = gimple_call_arg (stmt, 1);
5634 tree type = TREE_TYPE (arg0);
5635 if (cplx_result)
5636 {
5637 tree lhs = gimple_call_lhs (stmt);
5638 if (lhs == NULL_TREE)
5639 type = NULL_TREE;
5640 else
5641 type = TREE_TYPE (TREE_TYPE (lhs));
5642 }
5643 if (type == NULL_TREE)
5644 ;
5645 /* x = y + 0; x = y - 0; x = y * 0; */
5646 else if (integer_zerop (arg1))
5647 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
5648 /* x = 0 + y; x = 0 * y; */
5649 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
5650 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
5651 /* x = y - y; */
5652 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
5653 result = integer_zero_node;
5654 /* x = y * 1; x = 1 * y; */
5655 else if (subcode == MULT_EXPR && integer_onep (arg1))
5656 result = arg0;
5657 else if (subcode == MULT_EXPR && integer_onep (arg0))
5658 result = arg1;
5659 else if (TREE_CODE (arg0) == INTEGER_CST
5660 && TREE_CODE (arg1) == INTEGER_CST)
5661 {
5662 if (cplx_result)
5663 result = int_const_binop (subcode, fold_convert (type, arg0),
5664 fold_convert (type, arg1));
5665 else
5666 result = int_const_binop (subcode, arg0, arg1);
5667 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
5668 {
5669 if (cplx_result)
5670 overflow = build_one_cst (type);
5671 else
5672 result = NULL_TREE;
5673 }
5674 }
5675 if (result)
5676 {
5677 if (result == integer_zero_node)
5678 result = build_zero_cst (type);
5679 else if (cplx_result && TREE_TYPE (result) != type)
5680 {
5681 if (TREE_CODE (result) == INTEGER_CST)
5682 {
5683 if (arith_overflowed_p (PLUS_EXPR, type, result,
5684 integer_zero_node))
5685 overflow = build_one_cst (type);
5686 }
5687 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
5688 && TYPE_UNSIGNED (type))
5689 || (TYPE_PRECISION (type)
5690 < (TYPE_PRECISION (TREE_TYPE (result))
5691 + (TYPE_UNSIGNED (TREE_TYPE (result))
5692 && !TYPE_UNSIGNED (type)))))
5693 result = NULL_TREE;
5694 if (result)
5695 result = fold_convert (type, result);
5696 }
5697 }
5698 }
5699
5700 if (result)
5701 {
5702 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
5703 result = drop_tree_overflow (result);
5704 if (cplx_result)
5705 {
5706 if (overflow == NULL_TREE)
5707 overflow = build_zero_cst (TREE_TYPE (result));
5708 tree ctype = build_complex_type (TREE_TYPE (result));
5709 if (TREE_CODE (result) == INTEGER_CST
5710 && TREE_CODE (overflow) == INTEGER_CST)
5711 result = build_complex (ctype, result, overflow);
5712 else
5713 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
5714 ctype, result, overflow);
5715 }
5716 gimplify_and_update_call_from_tree (gsi, result);
5717 changed = true;
5718 }
5719 }
5720
5721 return changed;
5722 }
5723
5724
5725 /* Return true whether NAME has a use on STMT. */
5726
5727 static bool
has_use_on_stmt(tree name,gimple * stmt)5728 has_use_on_stmt (tree name, gimple *stmt)
5729 {
5730 imm_use_iterator iter;
5731 use_operand_p use_p;
5732 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
5733 if (USE_STMT (use_p) == stmt)
5734 return true;
5735 return false;
5736 }
5737
5738 /* Worker for fold_stmt_1 dispatch to pattern based folding with
5739 gimple_simplify.
5740
5741 Replaces *GSI with the simplification result in RCODE and OPS
5742 and the associated statements in *SEQ. Does the replacement
5743 according to INPLACE and returns true if the operation succeeded. */
5744
5745 static bool
replace_stmt_with_simplification(gimple_stmt_iterator * gsi,gimple_match_op * res_op,gimple_seq * seq,bool inplace)5746 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
5747 gimple_match_op *res_op,
5748 gimple_seq *seq, bool inplace)
5749 {
5750 gimple *stmt = gsi_stmt (*gsi);
5751 tree *ops = res_op->ops;
5752 unsigned int num_ops = res_op->num_ops;
5753
5754 /* Play safe and do not allow abnormals to be mentioned in
5755 newly created statements. See also maybe_push_res_to_seq.
5756 As an exception allow such uses if there was a use of the
5757 same SSA name on the old stmt. */
5758 for (unsigned int i = 0; i < num_ops; ++i)
5759 if (TREE_CODE (ops[i]) == SSA_NAME
5760 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[i])
5761 && !has_use_on_stmt (ops[i], stmt))
5762 return false;
5763
5764 if (num_ops > 0 && COMPARISON_CLASS_P (ops[0]))
5765 for (unsigned int i = 0; i < 2; ++i)
5766 if (TREE_CODE (TREE_OPERAND (ops[0], i)) == SSA_NAME
5767 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], i))
5768 && !has_use_on_stmt (TREE_OPERAND (ops[0], i), stmt))
5769 return false;
5770
5771 /* Don't insert new statements when INPLACE is true, even if we could
5772 reuse STMT for the final statement. */
5773 if (inplace && !gimple_seq_empty_p (*seq))
5774 return false;
5775
5776 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
5777 {
5778 gcc_assert (res_op->code.is_tree_code ());
5779 auto code = tree_code (res_op->code);
5780 if (TREE_CODE_CLASS (code) == tcc_comparison
5781 /* GIMPLE_CONDs condition may not throw. */
5782 && (!flag_exceptions
5783 || !cfun->can_throw_non_call_exceptions
5784 || !operation_could_trap_p (code,
5785 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
5786 false, NULL_TREE)))
5787 gimple_cond_set_condition (cond_stmt, code, ops[0], ops[1]);
5788 else if (code == SSA_NAME)
5789 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
5790 build_zero_cst (TREE_TYPE (ops[0])));
5791 else if (code == INTEGER_CST)
5792 {
5793 if (integer_zerop (ops[0]))
5794 gimple_cond_make_false (cond_stmt);
5795 else
5796 gimple_cond_make_true (cond_stmt);
5797 }
5798 else if (!inplace)
5799 {
5800 tree res = maybe_push_res_to_seq (res_op, seq);
5801 if (!res)
5802 return false;
5803 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
5804 build_zero_cst (TREE_TYPE (res)));
5805 }
5806 else
5807 return false;
5808 if (dump_file && (dump_flags & TDF_DETAILS))
5809 {
5810 fprintf (dump_file, "gimple_simplified to ");
5811 if (!gimple_seq_empty_p (*seq))
5812 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
5813 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
5814 0, TDF_SLIM);
5815 }
5816 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
5817 return true;
5818 }
5819 else if (is_gimple_assign (stmt)
5820 && res_op->code.is_tree_code ())
5821 {
5822 auto code = tree_code (res_op->code);
5823 if (!inplace
5824 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (code))
5825 {
5826 maybe_build_generic_op (res_op);
5827 gimple_assign_set_rhs_with_ops (gsi, code,
5828 res_op->op_or_null (0),
5829 res_op->op_or_null (1),
5830 res_op->op_or_null (2));
5831 if (dump_file && (dump_flags & TDF_DETAILS))
5832 {
5833 fprintf (dump_file, "gimple_simplified to ");
5834 if (!gimple_seq_empty_p (*seq))
5835 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
5836 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
5837 0, TDF_SLIM);
5838 }
5839 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
5840 return true;
5841 }
5842 }
5843 else if (res_op->code.is_fn_code ()
5844 && gimple_call_combined_fn (stmt) == combined_fn (res_op->code))
5845 {
5846 gcc_assert (num_ops == gimple_call_num_args (stmt));
5847 for (unsigned int i = 0; i < num_ops; ++i)
5848 gimple_call_set_arg (stmt, i, ops[i]);
5849 if (dump_file && (dump_flags & TDF_DETAILS))
5850 {
5851 fprintf (dump_file, "gimple_simplified to ");
5852 if (!gimple_seq_empty_p (*seq))
5853 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
5854 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
5855 }
5856 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
5857 return true;
5858 }
5859 else if (!inplace)
5860 {
5861 if (gimple_has_lhs (stmt))
5862 {
5863 tree lhs = gimple_get_lhs (stmt);
5864 if (!maybe_push_res_to_seq (res_op, seq, lhs))
5865 return false;
5866 if (dump_file && (dump_flags & TDF_DETAILS))
5867 {
5868 fprintf (dump_file, "gimple_simplified to ");
5869 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
5870 }
5871 gsi_replace_with_seq_vops (gsi, *seq);
5872 return true;
5873 }
5874 else
5875 gcc_unreachable ();
5876 }
5877
5878 return false;
5879 }
5880
5881 /* Canonicalize MEM_REFs invariant address operand after propagation. */
5882
5883 static bool
5884 maybe_canonicalize_mem_ref_addr (tree *t, bool is_debug = false)
5885 {
5886 bool res = false;
5887 tree *orig_t = t;
5888
5889 if (TREE_CODE (*t) == ADDR_EXPR)
5890 t = &TREE_OPERAND (*t, 0);
5891
5892 /* The C and C++ frontends use an ARRAY_REF for indexing with their
5893 generic vector extension. The actual vector referenced is
5894 view-converted to an array type for this purpose. If the index
5895 is constant the canonical representation in the middle-end is a
5896 BIT_FIELD_REF so re-write the former to the latter here. */
5897 if (TREE_CODE (*t) == ARRAY_REF
5898 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
5899 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
5900 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
5901 {
5902 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
5903 if (VECTOR_TYPE_P (vtype))
5904 {
5905 tree low = array_ref_low_bound (*t);
5906 if (TREE_CODE (low) == INTEGER_CST)
5907 {
5908 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
5909 {
5910 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
5911 wi::to_widest (low));
5912 idx = wi::mul (idx, wi::to_widest
5913 (TYPE_SIZE (TREE_TYPE (*t))));
5914 widest_int ext
5915 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
5916 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
5917 {
5918 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
5919 TREE_TYPE (*t),
5920 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
5921 TYPE_SIZE (TREE_TYPE (*t)),
5922 wide_int_to_tree (bitsizetype, idx));
5923 res = true;
5924 }
5925 }
5926 }
5927 }
5928 }
5929
5930 while (handled_component_p (*t))
5931 t = &TREE_OPERAND (*t, 0);
5932
5933 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
5934 of invariant addresses into a SSA name MEM_REF address. */
5935 if (TREE_CODE (*t) == MEM_REF
5936 || TREE_CODE (*t) == TARGET_MEM_REF)
5937 {
5938 tree addr = TREE_OPERAND (*t, 0);
5939 if (TREE_CODE (addr) == ADDR_EXPR
5940 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
5941 || handled_component_p (TREE_OPERAND (addr, 0))))
5942 {
5943 tree base;
5944 poly_int64 coffset;
5945 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
5946 &coffset);
5947 if (!base)
5948 {
5949 if (is_debug)
5950 return false;
5951 gcc_unreachable ();
5952 }
5953
5954 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
5955 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
5956 TREE_OPERAND (*t, 1),
5957 size_int (coffset));
5958 res = true;
5959 }
5960 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
5961 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
5962 }
5963
5964 /* Canonicalize back MEM_REFs to plain reference trees if the object
5965 accessed is a decl that has the same access semantics as the MEM_REF. */
5966 if (TREE_CODE (*t) == MEM_REF
5967 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
5968 && integer_zerop (TREE_OPERAND (*t, 1))
5969 && MR_DEPENDENCE_CLIQUE (*t) == 0)
5970 {
5971 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
5972 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
5973 if (/* Same volatile qualification. */
5974 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
5975 /* Same TBAA behavior with -fstrict-aliasing. */
5976 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
5977 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
5978 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
5979 /* Same alignment. */
5980 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
5981 /* We have to look out here to not drop a required conversion
5982 from the rhs to the lhs if *t appears on the lhs or vice-versa
5983 if it appears on the rhs. Thus require strict type
5984 compatibility. */
5985 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
5986 {
5987 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
5988 res = true;
5989 }
5990 }
5991
5992 else if (TREE_CODE (*orig_t) == ADDR_EXPR
5993 && TREE_CODE (*t) == MEM_REF
5994 && TREE_CODE (TREE_OPERAND (*t, 0)) == INTEGER_CST)
5995 {
5996 tree base;
5997 poly_int64 coffset;
5998 base = get_addr_base_and_unit_offset (TREE_OPERAND (*orig_t, 0),
5999 &coffset);
6000 if (base)
6001 {
6002 gcc_assert (TREE_CODE (base) == MEM_REF);
6003 poly_int64 moffset;
6004 if (mem_ref_offset (base).to_shwi (&moffset))
6005 {
6006 coffset += moffset;
6007 if (wi::to_poly_wide (TREE_OPERAND (base, 0)).to_shwi (&moffset))
6008 {
6009 coffset += moffset;
6010 *orig_t = build_int_cst (TREE_TYPE (*orig_t), coffset);
6011 return true;
6012 }
6013 }
6014 }
6015 }
6016
6017 /* Canonicalize TARGET_MEM_REF in particular with respect to
6018 the indexes becoming constant. */
6019 else if (TREE_CODE (*t) == TARGET_MEM_REF)
6020 {
6021 tree tem = maybe_fold_tmr (*t);
6022 if (tem)
6023 {
6024 *t = tem;
6025 if (TREE_CODE (*orig_t) == ADDR_EXPR)
6026 recompute_tree_invariant_for_addr_expr (*orig_t);
6027 res = true;
6028 }
6029 }
6030
6031 return res;
6032 }
6033
6034 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
6035 distinguishes both cases. */
6036
6037 static bool
fold_stmt_1(gimple_stmt_iterator * gsi,bool inplace,tree (* valueize)(tree))6038 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
6039 {
6040 bool changed = false;
6041 gimple *stmt = gsi_stmt (*gsi);
6042 bool nowarning = warning_suppressed_p (stmt, OPT_Wstrict_overflow);
6043 unsigned i;
6044 fold_defer_overflow_warnings ();
6045
6046 /* First do required canonicalization of [TARGET_]MEM_REF addresses
6047 after propagation.
6048 ??? This shouldn't be done in generic folding but in the
6049 propagation helpers which also know whether an address was
6050 propagated.
6051 Also canonicalize operand order. */
6052 switch (gimple_code (stmt))
6053 {
6054 case GIMPLE_ASSIGN:
6055 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
6056 {
6057 tree *rhs = gimple_assign_rhs1_ptr (stmt);
6058 if ((REFERENCE_CLASS_P (*rhs)
6059 || TREE_CODE (*rhs) == ADDR_EXPR)
6060 && maybe_canonicalize_mem_ref_addr (rhs))
6061 changed = true;
6062 tree *lhs = gimple_assign_lhs_ptr (stmt);
6063 if (REFERENCE_CLASS_P (*lhs)
6064 && maybe_canonicalize_mem_ref_addr (lhs))
6065 changed = true;
6066 /* Canonicalize &MEM[ssa_n, CST] to ssa_n p+ CST.
6067 This cannot be done in maybe_canonicalize_mem_ref_addr
6068 as the gimple now has two operands rather than one.
6069 The same reason why this can't be done in
6070 maybe_canonicalize_mem_ref_addr is the same reason why
6071 this can't be done inplace. */
6072 if (!inplace && TREE_CODE (*rhs) == ADDR_EXPR)
6073 {
6074 tree inner = TREE_OPERAND (*rhs, 0);
6075 if (TREE_CODE (inner) == MEM_REF
6076 && TREE_CODE (TREE_OPERAND (inner, 0)) == SSA_NAME
6077 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
6078 {
6079 tree ptr = TREE_OPERAND (inner, 0);
6080 tree addon = TREE_OPERAND (inner, 1);
6081 addon = fold_convert (sizetype, addon);
6082 gimple_assign_set_rhs_with_ops (gsi, POINTER_PLUS_EXPR,
6083 ptr, addon);
6084 changed = true;
6085 stmt = gsi_stmt (*gsi);
6086 }
6087 }
6088 }
6089 else
6090 {
6091 /* Canonicalize operand order. */
6092 enum tree_code code = gimple_assign_rhs_code (stmt);
6093 if (TREE_CODE_CLASS (code) == tcc_comparison
6094 || commutative_tree_code (code)
6095 || commutative_ternary_tree_code (code))
6096 {
6097 tree rhs1 = gimple_assign_rhs1 (stmt);
6098 tree rhs2 = gimple_assign_rhs2 (stmt);
6099 if (tree_swap_operands_p (rhs1, rhs2))
6100 {
6101 gimple_assign_set_rhs1 (stmt, rhs2);
6102 gimple_assign_set_rhs2 (stmt, rhs1);
6103 if (TREE_CODE_CLASS (code) == tcc_comparison)
6104 gimple_assign_set_rhs_code (stmt,
6105 swap_tree_comparison (code));
6106 changed = true;
6107 }
6108 }
6109 }
6110 break;
6111 case GIMPLE_CALL:
6112 {
6113 gcall *call = as_a<gcall *> (stmt);
6114 for (i = 0; i < gimple_call_num_args (call); ++i)
6115 {
6116 tree *arg = gimple_call_arg_ptr (call, i);
6117 if (REFERENCE_CLASS_P (*arg)
6118 && maybe_canonicalize_mem_ref_addr (arg))
6119 changed = true;
6120 }
6121 tree *lhs = gimple_call_lhs_ptr (call);
6122 if (*lhs
6123 && REFERENCE_CLASS_P (*lhs)
6124 && maybe_canonicalize_mem_ref_addr (lhs))
6125 changed = true;
6126 if (*lhs)
6127 {
6128 combined_fn cfn = gimple_call_combined_fn (call);
6129 internal_fn ifn = associated_internal_fn (cfn, TREE_TYPE (*lhs));
6130 int opno = first_commutative_argument (ifn);
6131 if (opno >= 0)
6132 {
6133 tree arg1 = gimple_call_arg (call, opno);
6134 tree arg2 = gimple_call_arg (call, opno + 1);
6135 if (tree_swap_operands_p (arg1, arg2))
6136 {
6137 gimple_call_set_arg (call, opno, arg2);
6138 gimple_call_set_arg (call, opno + 1, arg1);
6139 changed = true;
6140 }
6141 }
6142 }
6143 break;
6144 }
6145 case GIMPLE_ASM:
6146 {
6147 gasm *asm_stmt = as_a <gasm *> (stmt);
6148 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
6149 {
6150 tree link = gimple_asm_output_op (asm_stmt, i);
6151 tree op = TREE_VALUE (link);
6152 if (REFERENCE_CLASS_P (op)
6153 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
6154 changed = true;
6155 }
6156 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
6157 {
6158 tree link = gimple_asm_input_op (asm_stmt, i);
6159 tree op = TREE_VALUE (link);
6160 if ((REFERENCE_CLASS_P (op)
6161 || TREE_CODE (op) == ADDR_EXPR)
6162 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
6163 changed = true;
6164 }
6165 }
6166 break;
6167 case GIMPLE_DEBUG:
6168 if (gimple_debug_bind_p (stmt))
6169 {
6170 tree *val = gimple_debug_bind_get_value_ptr (stmt);
6171 if (*val
6172 && (REFERENCE_CLASS_P (*val)
6173 || TREE_CODE (*val) == ADDR_EXPR)
6174 && maybe_canonicalize_mem_ref_addr (val, true))
6175 changed = true;
6176 }
6177 break;
6178 case GIMPLE_COND:
6179 {
6180 /* Canonicalize operand order. */
6181 tree lhs = gimple_cond_lhs (stmt);
6182 tree rhs = gimple_cond_rhs (stmt);
6183 if (tree_swap_operands_p (lhs, rhs))
6184 {
6185 gcond *gc = as_a <gcond *> (stmt);
6186 gimple_cond_set_lhs (gc, rhs);
6187 gimple_cond_set_rhs (gc, lhs);
6188 gimple_cond_set_code (gc,
6189 swap_tree_comparison (gimple_cond_code (gc)));
6190 changed = true;
6191 }
6192 }
6193 default:;
6194 }
6195
6196 /* Dispatch to pattern-based folding. */
6197 if (!inplace
6198 || is_gimple_assign (stmt)
6199 || gimple_code (stmt) == GIMPLE_COND)
6200 {
6201 gimple_seq seq = NULL;
6202 gimple_match_op res_op;
6203 if (gimple_simplify (stmt, &res_op, inplace ? NULL : &seq,
6204 valueize, valueize))
6205 {
6206 if (replace_stmt_with_simplification (gsi, &res_op, &seq, inplace))
6207 changed = true;
6208 else
6209 gimple_seq_discard (seq);
6210 }
6211 }
6212
6213 stmt = gsi_stmt (*gsi);
6214
6215 /* Fold the main computation performed by the statement. */
6216 switch (gimple_code (stmt))
6217 {
6218 case GIMPLE_ASSIGN:
6219 {
6220 /* Try to canonicalize for boolean-typed X the comparisons
6221 X == 0, X == 1, X != 0, and X != 1. */
6222 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
6223 || gimple_assign_rhs_code (stmt) == NE_EXPR)
6224 {
6225 tree lhs = gimple_assign_lhs (stmt);
6226 tree op1 = gimple_assign_rhs1 (stmt);
6227 tree op2 = gimple_assign_rhs2 (stmt);
6228 tree type = TREE_TYPE (op1);
6229
6230 /* Check whether the comparison operands are of the same boolean
6231 type as the result type is.
6232 Check that second operand is an integer-constant with value
6233 one or zero. */
6234 if (TREE_CODE (op2) == INTEGER_CST
6235 && (integer_zerop (op2) || integer_onep (op2))
6236 && useless_type_conversion_p (TREE_TYPE (lhs), type))
6237 {
6238 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
6239 bool is_logical_not = false;
6240
6241 /* X == 0 and X != 1 is a logical-not.of X
6242 X == 1 and X != 0 is X */
6243 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
6244 || (cmp_code == NE_EXPR && integer_onep (op2)))
6245 is_logical_not = true;
6246
6247 if (is_logical_not == false)
6248 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
6249 /* Only for one-bit precision typed X the transformation
6250 !X -> ~X is valied. */
6251 else if (TYPE_PRECISION (type) == 1)
6252 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
6253 /* Otherwise we use !X -> X ^ 1. */
6254 else
6255 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
6256 build_int_cst (type, 1));
6257 changed = true;
6258 break;
6259 }
6260 }
6261
6262 unsigned old_num_ops = gimple_num_ops (stmt);
6263 tree lhs = gimple_assign_lhs (stmt);
6264 tree new_rhs = fold_gimple_assign (gsi);
6265 if (new_rhs
6266 && !useless_type_conversion_p (TREE_TYPE (lhs),
6267 TREE_TYPE (new_rhs)))
6268 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
6269 if (new_rhs
6270 && (!inplace
6271 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
6272 {
6273 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
6274 changed = true;
6275 }
6276 break;
6277 }
6278
6279 case GIMPLE_CALL:
6280 changed |= gimple_fold_call (gsi, inplace);
6281 break;
6282
6283 case GIMPLE_DEBUG:
6284 if (gimple_debug_bind_p (stmt))
6285 {
6286 tree val = gimple_debug_bind_get_value (stmt);
6287 if (val
6288 && REFERENCE_CLASS_P (val))
6289 {
6290 tree tem = maybe_fold_reference (val);
6291 if (tem)
6292 {
6293 gimple_debug_bind_set_value (stmt, tem);
6294 changed = true;
6295 }
6296 }
6297 else if (val
6298 && TREE_CODE (val) == ADDR_EXPR)
6299 {
6300 tree ref = TREE_OPERAND (val, 0);
6301 tree tem = maybe_fold_reference (ref);
6302 if (tem)
6303 {
6304 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
6305 gimple_debug_bind_set_value (stmt, tem);
6306 changed = true;
6307 }
6308 }
6309 }
6310 break;
6311
6312 case GIMPLE_RETURN:
6313 {
6314 greturn *ret_stmt = as_a<greturn *> (stmt);
6315 tree ret = gimple_return_retval(ret_stmt);
6316
6317 if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
6318 {
6319 tree val = valueize (ret);
6320 if (val && val != ret
6321 && may_propagate_copy (ret, val))
6322 {
6323 gimple_return_set_retval (ret_stmt, val);
6324 changed = true;
6325 }
6326 }
6327 }
6328 break;
6329
6330 default:;
6331 }
6332
6333 stmt = gsi_stmt (*gsi);
6334
6335 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
6336 return changed;
6337 }
6338
6339 /* Valueziation callback that ends up not following SSA edges. */
6340
6341 tree
no_follow_ssa_edges(tree)6342 no_follow_ssa_edges (tree)
6343 {
6344 return NULL_TREE;
6345 }
6346
6347 /* Valueization callback that ends up following single-use SSA edges only. */
6348
6349 tree
follow_single_use_edges(tree val)6350 follow_single_use_edges (tree val)
6351 {
6352 if (TREE_CODE (val) == SSA_NAME
6353 && !has_single_use (val))
6354 return NULL_TREE;
6355 return val;
6356 }
6357
6358 /* Valueization callback that follows all SSA edges. */
6359
6360 tree
follow_all_ssa_edges(tree val)6361 follow_all_ssa_edges (tree val)
6362 {
6363 return val;
6364 }
6365
6366 /* Fold the statement pointed to by GSI. In some cases, this function may
6367 replace the whole statement with a new one. Returns true iff folding
6368 makes any changes.
6369 The statement pointed to by GSI should be in valid gimple form but may
6370 be in unfolded state as resulting from for example constant propagation
6371 which can produce *&x = 0. */
6372
6373 bool
fold_stmt(gimple_stmt_iterator * gsi)6374 fold_stmt (gimple_stmt_iterator *gsi)
6375 {
6376 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
6377 }
6378
6379 bool
fold_stmt(gimple_stmt_iterator * gsi,tree (* valueize)(tree))6380 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
6381 {
6382 return fold_stmt_1 (gsi, false, valueize);
6383 }
6384
6385 /* Perform the minimal folding on statement *GSI. Only operations like
6386 *&x created by constant propagation are handled. The statement cannot
6387 be replaced with a new one. Return true if the statement was
6388 changed, false otherwise.
6389 The statement *GSI should be in valid gimple form but may
6390 be in unfolded state as resulting from for example constant propagation
6391 which can produce *&x = 0. */
6392
6393 bool
fold_stmt_inplace(gimple_stmt_iterator * gsi)6394 fold_stmt_inplace (gimple_stmt_iterator *gsi)
6395 {
6396 gimple *stmt = gsi_stmt (*gsi);
6397 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
6398 gcc_assert (gsi_stmt (*gsi) == stmt);
6399 return changed;
6400 }
6401
6402 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
6403 if EXPR is null or we don't know how.
6404 If non-null, the result always has boolean type. */
6405
6406 static tree
canonicalize_bool(tree expr,bool invert)6407 canonicalize_bool (tree expr, bool invert)
6408 {
6409 if (!expr)
6410 return NULL_TREE;
6411 else if (invert)
6412 {
6413 if (integer_nonzerop (expr))
6414 return boolean_false_node;
6415 else if (integer_zerop (expr))
6416 return boolean_true_node;
6417 else if (TREE_CODE (expr) == SSA_NAME)
6418 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
6419 build_int_cst (TREE_TYPE (expr), 0));
6420 else if (COMPARISON_CLASS_P (expr))
6421 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
6422 boolean_type_node,
6423 TREE_OPERAND (expr, 0),
6424 TREE_OPERAND (expr, 1));
6425 else
6426 return NULL_TREE;
6427 }
6428 else
6429 {
6430 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
6431 return expr;
6432 if (integer_nonzerop (expr))
6433 return boolean_true_node;
6434 else if (integer_zerop (expr))
6435 return boolean_false_node;
6436 else if (TREE_CODE (expr) == SSA_NAME)
6437 return fold_build2 (NE_EXPR, boolean_type_node, expr,
6438 build_int_cst (TREE_TYPE (expr), 0));
6439 else if (COMPARISON_CLASS_P (expr))
6440 return fold_build2 (TREE_CODE (expr),
6441 boolean_type_node,
6442 TREE_OPERAND (expr, 0),
6443 TREE_OPERAND (expr, 1));
6444 else
6445 return NULL_TREE;
6446 }
6447 }
6448
6449 /* Check to see if a boolean expression EXPR is logically equivalent to the
6450 comparison (OP1 CODE OP2). Check for various identities involving
6451 SSA_NAMEs. */
6452
6453 static bool
same_bool_comparison_p(const_tree expr,enum tree_code code,const_tree op1,const_tree op2)6454 same_bool_comparison_p (const_tree expr, enum tree_code code,
6455 const_tree op1, const_tree op2)
6456 {
6457 gimple *s;
6458
6459 /* The obvious case. */
6460 if (TREE_CODE (expr) == code
6461 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
6462 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
6463 return true;
6464
6465 /* Check for comparing (name, name != 0) and the case where expr
6466 is an SSA_NAME with a definition matching the comparison. */
6467 if (TREE_CODE (expr) == SSA_NAME
6468 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
6469 {
6470 if (operand_equal_p (expr, op1, 0))
6471 return ((code == NE_EXPR && integer_zerop (op2))
6472 || (code == EQ_EXPR && integer_nonzerop (op2)));
6473 s = SSA_NAME_DEF_STMT (expr);
6474 if (is_gimple_assign (s)
6475 && gimple_assign_rhs_code (s) == code
6476 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
6477 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
6478 return true;
6479 }
6480
6481 /* If op1 is of the form (name != 0) or (name == 0), and the definition
6482 of name is a comparison, recurse. */
6483 if (TREE_CODE (op1) == SSA_NAME
6484 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
6485 {
6486 s = SSA_NAME_DEF_STMT (op1);
6487 if (is_gimple_assign (s)
6488 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
6489 {
6490 enum tree_code c = gimple_assign_rhs_code (s);
6491 if ((c == NE_EXPR && integer_zerop (op2))
6492 || (c == EQ_EXPR && integer_nonzerop (op2)))
6493 return same_bool_comparison_p (expr, c,
6494 gimple_assign_rhs1 (s),
6495 gimple_assign_rhs2 (s));
6496 if ((c == EQ_EXPR && integer_zerop (op2))
6497 || (c == NE_EXPR && integer_nonzerop (op2)))
6498 return same_bool_comparison_p (expr,
6499 invert_tree_comparison (c, false),
6500 gimple_assign_rhs1 (s),
6501 gimple_assign_rhs2 (s));
6502 }
6503 }
6504 return false;
6505 }
6506
6507 /* Check to see if two boolean expressions OP1 and OP2 are logically
6508 equivalent. */
6509
6510 static bool
same_bool_result_p(const_tree op1,const_tree op2)6511 same_bool_result_p (const_tree op1, const_tree op2)
6512 {
6513 /* Simple cases first. */
6514 if (operand_equal_p (op1, op2, 0))
6515 return true;
6516
6517 /* Check the cases where at least one of the operands is a comparison.
6518 These are a bit smarter than operand_equal_p in that they apply some
6519 identifies on SSA_NAMEs. */
6520 if (COMPARISON_CLASS_P (op2)
6521 && same_bool_comparison_p (op1, TREE_CODE (op2),
6522 TREE_OPERAND (op2, 0),
6523 TREE_OPERAND (op2, 1)))
6524 return true;
6525 if (COMPARISON_CLASS_P (op1)
6526 && same_bool_comparison_p (op2, TREE_CODE (op1),
6527 TREE_OPERAND (op1, 0),
6528 TREE_OPERAND (op1, 1)))
6529 return true;
6530
6531 /* Default case. */
6532 return false;
6533 }
6534
6535 /* Forward declarations for some mutually recursive functions. */
6536
6537 static tree
6538 and_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
6539 enum tree_code code2, tree op2a, tree op2b);
6540 static tree
6541 and_var_with_comparison (tree type, tree var, bool invert,
6542 enum tree_code code2, tree op2a, tree op2b);
6543 static tree
6544 and_var_with_comparison_1 (tree type, gimple *stmt,
6545 enum tree_code code2, tree op2a, tree op2b);
6546 static tree
6547 or_comparisons_1 (tree, enum tree_code code1, tree op1a, tree op1b,
6548 enum tree_code code2, tree op2a, tree op2b);
6549 static tree
6550 or_var_with_comparison (tree, tree var, bool invert,
6551 enum tree_code code2, tree op2a, tree op2b);
6552 static tree
6553 or_var_with_comparison_1 (tree, gimple *stmt,
6554 enum tree_code code2, tree op2a, tree op2b);
6555
6556 /* Helper function for and_comparisons_1: try to simplify the AND of the
6557 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
6558 If INVERT is true, invert the value of the VAR before doing the AND.
6559 Return NULL_EXPR if we can't simplify this to a single expression. */
6560
6561 static tree
and_var_with_comparison(tree type,tree var,bool invert,enum tree_code code2,tree op2a,tree op2b)6562 and_var_with_comparison (tree type, tree var, bool invert,
6563 enum tree_code code2, tree op2a, tree op2b)
6564 {
6565 tree t;
6566 gimple *stmt = SSA_NAME_DEF_STMT (var);
6567
6568 /* We can only deal with variables whose definitions are assignments. */
6569 if (!is_gimple_assign (stmt))
6570 return NULL_TREE;
6571
6572 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
6573 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
6574 Then we only have to consider the simpler non-inverted cases. */
6575 if (invert)
6576 t = or_var_with_comparison_1 (type, stmt,
6577 invert_tree_comparison (code2, false),
6578 op2a, op2b);
6579 else
6580 t = and_var_with_comparison_1 (type, stmt, code2, op2a, op2b);
6581 return canonicalize_bool (t, invert);
6582 }
6583
6584 /* Try to simplify the AND of the ssa variable defined by the assignment
6585 STMT with the comparison specified by (OP2A CODE2 OP2B).
6586 Return NULL_EXPR if we can't simplify this to a single expression. */
6587
6588 static tree
and_var_with_comparison_1(tree type,gimple * stmt,enum tree_code code2,tree op2a,tree op2b)6589 and_var_with_comparison_1 (tree type, gimple *stmt,
6590 enum tree_code code2, tree op2a, tree op2b)
6591 {
6592 tree var = gimple_assign_lhs (stmt);
6593 tree true_test_var = NULL_TREE;
6594 tree false_test_var = NULL_TREE;
6595 enum tree_code innercode = gimple_assign_rhs_code (stmt);
6596
6597 /* Check for identities like (var AND (var == 0)) => false. */
6598 if (TREE_CODE (op2a) == SSA_NAME
6599 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
6600 {
6601 if ((code2 == NE_EXPR && integer_zerop (op2b))
6602 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
6603 {
6604 true_test_var = op2a;
6605 if (var == true_test_var)
6606 return var;
6607 }
6608 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
6609 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
6610 {
6611 false_test_var = op2a;
6612 if (var == false_test_var)
6613 return boolean_false_node;
6614 }
6615 }
6616
6617 /* If the definition is a comparison, recurse on it. */
6618 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
6619 {
6620 tree t = and_comparisons_1 (type, innercode,
6621 gimple_assign_rhs1 (stmt),
6622 gimple_assign_rhs2 (stmt),
6623 code2,
6624 op2a,
6625 op2b);
6626 if (t)
6627 return t;
6628 }
6629
6630 /* If the definition is an AND or OR expression, we may be able to
6631 simplify by reassociating. */
6632 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
6633 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
6634 {
6635 tree inner1 = gimple_assign_rhs1 (stmt);
6636 tree inner2 = gimple_assign_rhs2 (stmt);
6637 gimple *s;
6638 tree t;
6639 tree partial = NULL_TREE;
6640 bool is_and = (innercode == BIT_AND_EXPR);
6641
6642 /* Check for boolean identities that don't require recursive examination
6643 of inner1/inner2:
6644 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
6645 inner1 AND (inner1 OR inner2) => inner1
6646 !inner1 AND (inner1 AND inner2) => false
6647 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
6648 Likewise for similar cases involving inner2. */
6649 if (inner1 == true_test_var)
6650 return (is_and ? var : inner1);
6651 else if (inner2 == true_test_var)
6652 return (is_and ? var : inner2);
6653 else if (inner1 == false_test_var)
6654 return (is_and
6655 ? boolean_false_node
6656 : and_var_with_comparison (type, inner2, false, code2, op2a,
6657 op2b));
6658 else if (inner2 == false_test_var)
6659 return (is_and
6660 ? boolean_false_node
6661 : and_var_with_comparison (type, inner1, false, code2, op2a,
6662 op2b));
6663
6664 /* Next, redistribute/reassociate the AND across the inner tests.
6665 Compute the first partial result, (inner1 AND (op2a code op2b)) */
6666 if (TREE_CODE (inner1) == SSA_NAME
6667 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
6668 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
6669 && (t = maybe_fold_and_comparisons (type, gimple_assign_rhs_code (s),
6670 gimple_assign_rhs1 (s),
6671 gimple_assign_rhs2 (s),
6672 code2, op2a, op2b)))
6673 {
6674 /* Handle the AND case, where we are reassociating:
6675 (inner1 AND inner2) AND (op2a code2 op2b)
6676 => (t AND inner2)
6677 If the partial result t is a constant, we win. Otherwise
6678 continue on to try reassociating with the other inner test. */
6679 if (is_and)
6680 {
6681 if (integer_onep (t))
6682 return inner2;
6683 else if (integer_zerop (t))
6684 return boolean_false_node;
6685 }
6686
6687 /* Handle the OR case, where we are redistributing:
6688 (inner1 OR inner2) AND (op2a code2 op2b)
6689 => (t OR (inner2 AND (op2a code2 op2b))) */
6690 else if (integer_onep (t))
6691 return boolean_true_node;
6692
6693 /* Save partial result for later. */
6694 partial = t;
6695 }
6696
6697 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
6698 if (TREE_CODE (inner2) == SSA_NAME
6699 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
6700 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
6701 && (t = maybe_fold_and_comparisons (type, gimple_assign_rhs_code (s),
6702 gimple_assign_rhs1 (s),
6703 gimple_assign_rhs2 (s),
6704 code2, op2a, op2b)))
6705 {
6706 /* Handle the AND case, where we are reassociating:
6707 (inner1 AND inner2) AND (op2a code2 op2b)
6708 => (inner1 AND t) */
6709 if (is_and)
6710 {
6711 if (integer_onep (t))
6712 return inner1;
6713 else if (integer_zerop (t))
6714 return boolean_false_node;
6715 /* If both are the same, we can apply the identity
6716 (x AND x) == x. */
6717 else if (partial && same_bool_result_p (t, partial))
6718 return t;
6719 }
6720
6721 /* Handle the OR case. where we are redistributing:
6722 (inner1 OR inner2) AND (op2a code2 op2b)
6723 => (t OR (inner1 AND (op2a code2 op2b)))
6724 => (t OR partial) */
6725 else
6726 {
6727 if (integer_onep (t))
6728 return boolean_true_node;
6729 else if (partial)
6730 {
6731 /* We already got a simplification for the other
6732 operand to the redistributed OR expression. The
6733 interesting case is when at least one is false.
6734 Or, if both are the same, we can apply the identity
6735 (x OR x) == x. */
6736 if (integer_zerop (partial))
6737 return t;
6738 else if (integer_zerop (t))
6739 return partial;
6740 else if (same_bool_result_p (t, partial))
6741 return t;
6742 }
6743 }
6744 }
6745 }
6746 return NULL_TREE;
6747 }
6748
6749 /* Try to simplify the AND of two comparisons defined by
6750 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
6751 If this can be done without constructing an intermediate value,
6752 return the resulting tree; otherwise NULL_TREE is returned.
6753 This function is deliberately asymmetric as it recurses on SSA_DEFs
6754 in the first comparison but not the second. */
6755
6756 static tree
and_comparisons_1(tree type,enum tree_code code1,tree op1a,tree op1b,enum tree_code code2,tree op2a,tree op2b)6757 and_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
6758 enum tree_code code2, tree op2a, tree op2b)
6759 {
6760 tree truth_type = truth_type_for (TREE_TYPE (op1a));
6761
6762 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
6763 if (operand_equal_p (op1a, op2a, 0)
6764 && operand_equal_p (op1b, op2b, 0))
6765 {
6766 /* Result will be either NULL_TREE, or a combined comparison. */
6767 tree t = combine_comparisons (UNKNOWN_LOCATION,
6768 TRUTH_ANDIF_EXPR, code1, code2,
6769 truth_type, op1a, op1b);
6770 if (t)
6771 return t;
6772 }
6773
6774 /* Likewise the swapped case of the above. */
6775 if (operand_equal_p (op1a, op2b, 0)
6776 && operand_equal_p (op1b, op2a, 0))
6777 {
6778 /* Result will be either NULL_TREE, or a combined comparison. */
6779 tree t = combine_comparisons (UNKNOWN_LOCATION,
6780 TRUTH_ANDIF_EXPR, code1,
6781 swap_tree_comparison (code2),
6782 truth_type, op1a, op1b);
6783 if (t)
6784 return t;
6785 }
6786
6787 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
6788 NAME's definition is a truth value. See if there are any simplifications
6789 that can be done against the NAME's definition. */
6790 if (TREE_CODE (op1a) == SSA_NAME
6791 && (code1 == NE_EXPR || code1 == EQ_EXPR)
6792 && (integer_zerop (op1b) || integer_onep (op1b)))
6793 {
6794 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
6795 || (code1 == NE_EXPR && integer_onep (op1b)));
6796 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
6797 switch (gimple_code (stmt))
6798 {
6799 case GIMPLE_ASSIGN:
6800 /* Try to simplify by copy-propagating the definition. */
6801 return and_var_with_comparison (type, op1a, invert, code2, op2a,
6802 op2b);
6803
6804 case GIMPLE_PHI:
6805 /* If every argument to the PHI produces the same result when
6806 ANDed with the second comparison, we win.
6807 Do not do this unless the type is bool since we need a bool
6808 result here anyway. */
6809 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
6810 {
6811 tree result = NULL_TREE;
6812 unsigned i;
6813 for (i = 0; i < gimple_phi_num_args (stmt); i++)
6814 {
6815 tree arg = gimple_phi_arg_def (stmt, i);
6816
6817 /* If this PHI has itself as an argument, ignore it.
6818 If all the other args produce the same result,
6819 we're still OK. */
6820 if (arg == gimple_phi_result (stmt))
6821 continue;
6822 else if (TREE_CODE (arg) == INTEGER_CST)
6823 {
6824 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
6825 {
6826 if (!result)
6827 result = boolean_false_node;
6828 else if (!integer_zerop (result))
6829 return NULL_TREE;
6830 }
6831 else if (!result)
6832 result = fold_build2 (code2, boolean_type_node,
6833 op2a, op2b);
6834 else if (!same_bool_comparison_p (result,
6835 code2, op2a, op2b))
6836 return NULL_TREE;
6837 }
6838 else if (TREE_CODE (arg) == SSA_NAME
6839 && !SSA_NAME_IS_DEFAULT_DEF (arg))
6840 {
6841 tree temp;
6842 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6843 /* In simple cases we can look through PHI nodes,
6844 but we have to be careful with loops.
6845 See PR49073. */
6846 if (! dom_info_available_p (CDI_DOMINATORS)
6847 || gimple_bb (def_stmt) == gimple_bb (stmt)
6848 || dominated_by_p (CDI_DOMINATORS,
6849 gimple_bb (def_stmt),
6850 gimple_bb (stmt)))
6851 return NULL_TREE;
6852 temp = and_var_with_comparison (type, arg, invert, code2,
6853 op2a, op2b);
6854 if (!temp)
6855 return NULL_TREE;
6856 else if (!result)
6857 result = temp;
6858 else if (!same_bool_result_p (result, temp))
6859 return NULL_TREE;
6860 }
6861 else
6862 return NULL_TREE;
6863 }
6864 return result;
6865 }
6866
6867 default:
6868 break;
6869 }
6870 }
6871 return NULL_TREE;
6872 }
6873
6874 /* Helper function for maybe_fold_and_comparisons and maybe_fold_or_comparisons
6875 : try to simplify the AND/OR of the ssa variable VAR with the comparison
6876 specified by (OP2A CODE2 OP2B) from match.pd. Return NULL_EXPR if we can't
6877 simplify this to a single expression. As we are going to lower the cost
6878 of building SSA names / gimple stmts significantly, we need to allocate
6879 them ont the stack. This will cause the code to be a bit ugly. */
6880
6881 static tree
maybe_fold_comparisons_from_match_pd(tree type,enum tree_code code,enum tree_code code1,tree op1a,tree op1b,enum tree_code code2,tree op2a,tree op2b)6882 maybe_fold_comparisons_from_match_pd (tree type, enum tree_code code,
6883 enum tree_code code1,
6884 tree op1a, tree op1b,
6885 enum tree_code code2, tree op2a,
6886 tree op2b)
6887 {
6888 /* Allocate gimple stmt1 on the stack. */
6889 gassign *stmt1
6890 = (gassign *) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN, 3));
6891 gimple_init (stmt1, GIMPLE_ASSIGN, 3);
6892 gimple_assign_set_rhs_code (stmt1, code1);
6893 gimple_assign_set_rhs1 (stmt1, op1a);
6894 gimple_assign_set_rhs2 (stmt1, op1b);
6895
6896 /* Allocate gimple stmt2 on the stack. */
6897 gassign *stmt2
6898 = (gassign *) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN, 3));
6899 gimple_init (stmt2, GIMPLE_ASSIGN, 3);
6900 gimple_assign_set_rhs_code (stmt2, code2);
6901 gimple_assign_set_rhs1 (stmt2, op2a);
6902 gimple_assign_set_rhs2 (stmt2, op2b);
6903
6904 /* Allocate SSA names(lhs1) on the stack. */
6905 tree lhs1 = (tree)XALLOCA (tree_ssa_name);
6906 memset (lhs1, 0, sizeof (tree_ssa_name));
6907 TREE_SET_CODE (lhs1, SSA_NAME);
6908 TREE_TYPE (lhs1) = type;
6909 init_ssa_name_imm_use (lhs1);
6910
6911 /* Allocate SSA names(lhs2) on the stack. */
6912 tree lhs2 = (tree)XALLOCA (tree_ssa_name);
6913 memset (lhs2, 0, sizeof (tree_ssa_name));
6914 TREE_SET_CODE (lhs2, SSA_NAME);
6915 TREE_TYPE (lhs2) = type;
6916 init_ssa_name_imm_use (lhs2);
6917
6918 gimple_assign_set_lhs (stmt1, lhs1);
6919 gimple_assign_set_lhs (stmt2, lhs2);
6920
6921 gimple_match_op op (gimple_match_cond::UNCOND, code,
6922 type, gimple_assign_lhs (stmt1),
6923 gimple_assign_lhs (stmt2));
6924 if (op.resimplify (NULL, follow_all_ssa_edges))
6925 {
6926 if (gimple_simplified_result_is_gimple_val (&op))
6927 {
6928 tree res = op.ops[0];
6929 if (res == lhs1)
6930 return build2 (code1, type, op1a, op1b);
6931 else if (res == lhs2)
6932 return build2 (code2, type, op2a, op2b);
6933 else
6934 return res;
6935 }
6936 else if (op.code.is_tree_code ()
6937 && TREE_CODE_CLASS ((tree_code)op.code) == tcc_comparison)
6938 {
6939 tree op0 = op.ops[0];
6940 tree op1 = op.ops[1];
6941 if (op0 == lhs1 || op0 == lhs2 || op1 == lhs1 || op1 == lhs2)
6942 return NULL_TREE; /* not simple */
6943
6944 return build2 ((enum tree_code)op.code, op.type, op0, op1);
6945 }
6946 }
6947
6948 return NULL_TREE;
6949 }
6950
6951 /* Try to simplify the AND of two comparisons, specified by
6952 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
6953 If this can be simplified to a single expression (without requiring
6954 introducing more SSA variables to hold intermediate values),
6955 return the resulting tree. Otherwise return NULL_TREE.
6956 If the result expression is non-null, it has boolean type. */
6957
6958 tree
maybe_fold_and_comparisons(tree type,enum tree_code code1,tree op1a,tree op1b,enum tree_code code2,tree op2a,tree op2b)6959 maybe_fold_and_comparisons (tree type,
6960 enum tree_code code1, tree op1a, tree op1b,
6961 enum tree_code code2, tree op2a, tree op2b)
6962 {
6963 if (tree t = and_comparisons_1 (type, code1, op1a, op1b, code2, op2a, op2b))
6964 return t;
6965
6966 if (tree t = and_comparisons_1 (type, code2, op2a, op2b, code1, op1a, op1b))
6967 return t;
6968
6969 if (tree t = maybe_fold_comparisons_from_match_pd (type, BIT_AND_EXPR, code1,
6970 op1a, op1b, code2, op2a,
6971 op2b))
6972 return t;
6973
6974 return NULL_TREE;
6975 }
6976
6977 /* Helper function for or_comparisons_1: try to simplify the OR of the
6978 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
6979 If INVERT is true, invert the value of VAR before doing the OR.
6980 Return NULL_EXPR if we can't simplify this to a single expression. */
6981
6982 static tree
or_var_with_comparison(tree type,tree var,bool invert,enum tree_code code2,tree op2a,tree op2b)6983 or_var_with_comparison (tree type, tree var, bool invert,
6984 enum tree_code code2, tree op2a, tree op2b)
6985 {
6986 tree t;
6987 gimple *stmt = SSA_NAME_DEF_STMT (var);
6988
6989 /* We can only deal with variables whose definitions are assignments. */
6990 if (!is_gimple_assign (stmt))
6991 return NULL_TREE;
6992
6993 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
6994 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
6995 Then we only have to consider the simpler non-inverted cases. */
6996 if (invert)
6997 t = and_var_with_comparison_1 (type, stmt,
6998 invert_tree_comparison (code2, false),
6999 op2a, op2b);
7000 else
7001 t = or_var_with_comparison_1 (type, stmt, code2, op2a, op2b);
7002 return canonicalize_bool (t, invert);
7003 }
7004
7005 /* Try to simplify the OR of the ssa variable defined by the assignment
7006 STMT with the comparison specified by (OP2A CODE2 OP2B).
7007 Return NULL_EXPR if we can't simplify this to a single expression. */
7008
7009 static tree
or_var_with_comparison_1(tree type,gimple * stmt,enum tree_code code2,tree op2a,tree op2b)7010 or_var_with_comparison_1 (tree type, gimple *stmt,
7011 enum tree_code code2, tree op2a, tree op2b)
7012 {
7013 tree var = gimple_assign_lhs (stmt);
7014 tree true_test_var = NULL_TREE;
7015 tree false_test_var = NULL_TREE;
7016 enum tree_code innercode = gimple_assign_rhs_code (stmt);
7017
7018 /* Check for identities like (var OR (var != 0)) => true . */
7019 if (TREE_CODE (op2a) == SSA_NAME
7020 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
7021 {
7022 if ((code2 == NE_EXPR && integer_zerop (op2b))
7023 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
7024 {
7025 true_test_var = op2a;
7026 if (var == true_test_var)
7027 return var;
7028 }
7029 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
7030 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
7031 {
7032 false_test_var = op2a;
7033 if (var == false_test_var)
7034 return boolean_true_node;
7035 }
7036 }
7037
7038 /* If the definition is a comparison, recurse on it. */
7039 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
7040 {
7041 tree t = or_comparisons_1 (type, innercode,
7042 gimple_assign_rhs1 (stmt),
7043 gimple_assign_rhs2 (stmt),
7044 code2,
7045 op2a,
7046 op2b);
7047 if (t)
7048 return t;
7049 }
7050
7051 /* If the definition is an AND or OR expression, we may be able to
7052 simplify by reassociating. */
7053 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
7054 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
7055 {
7056 tree inner1 = gimple_assign_rhs1 (stmt);
7057 tree inner2 = gimple_assign_rhs2 (stmt);
7058 gimple *s;
7059 tree t;
7060 tree partial = NULL_TREE;
7061 bool is_or = (innercode == BIT_IOR_EXPR);
7062
7063 /* Check for boolean identities that don't require recursive examination
7064 of inner1/inner2:
7065 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
7066 inner1 OR (inner1 AND inner2) => inner1
7067 !inner1 OR (inner1 OR inner2) => true
7068 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
7069 */
7070 if (inner1 == true_test_var)
7071 return (is_or ? var : inner1);
7072 else if (inner2 == true_test_var)
7073 return (is_or ? var : inner2);
7074 else if (inner1 == false_test_var)
7075 return (is_or
7076 ? boolean_true_node
7077 : or_var_with_comparison (type, inner2, false, code2, op2a,
7078 op2b));
7079 else if (inner2 == false_test_var)
7080 return (is_or
7081 ? boolean_true_node
7082 : or_var_with_comparison (type, inner1, false, code2, op2a,
7083 op2b));
7084
7085 /* Next, redistribute/reassociate the OR across the inner tests.
7086 Compute the first partial result, (inner1 OR (op2a code op2b)) */
7087 if (TREE_CODE (inner1) == SSA_NAME
7088 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
7089 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
7090 && (t = maybe_fold_or_comparisons (type, gimple_assign_rhs_code (s),
7091 gimple_assign_rhs1 (s),
7092 gimple_assign_rhs2 (s),
7093 code2, op2a, op2b)))
7094 {
7095 /* Handle the OR case, where we are reassociating:
7096 (inner1 OR inner2) OR (op2a code2 op2b)
7097 => (t OR inner2)
7098 If the partial result t is a constant, we win. Otherwise
7099 continue on to try reassociating with the other inner test. */
7100 if (is_or)
7101 {
7102 if (integer_onep (t))
7103 return boolean_true_node;
7104 else if (integer_zerop (t))
7105 return inner2;
7106 }
7107
7108 /* Handle the AND case, where we are redistributing:
7109 (inner1 AND inner2) OR (op2a code2 op2b)
7110 => (t AND (inner2 OR (op2a code op2b))) */
7111 else if (integer_zerop (t))
7112 return boolean_false_node;
7113
7114 /* Save partial result for later. */
7115 partial = t;
7116 }
7117
7118 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
7119 if (TREE_CODE (inner2) == SSA_NAME
7120 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
7121 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
7122 && (t = maybe_fold_or_comparisons (type, gimple_assign_rhs_code (s),
7123 gimple_assign_rhs1 (s),
7124 gimple_assign_rhs2 (s),
7125 code2, op2a, op2b)))
7126 {
7127 /* Handle the OR case, where we are reassociating:
7128 (inner1 OR inner2) OR (op2a code2 op2b)
7129 => (inner1 OR t)
7130 => (t OR partial) */
7131 if (is_or)
7132 {
7133 if (integer_zerop (t))
7134 return inner1;
7135 else if (integer_onep (t))
7136 return boolean_true_node;
7137 /* If both are the same, we can apply the identity
7138 (x OR x) == x. */
7139 else if (partial && same_bool_result_p (t, partial))
7140 return t;
7141 }
7142
7143 /* Handle the AND case, where we are redistributing:
7144 (inner1 AND inner2) OR (op2a code2 op2b)
7145 => (t AND (inner1 OR (op2a code2 op2b)))
7146 => (t AND partial) */
7147 else
7148 {
7149 if (integer_zerop (t))
7150 return boolean_false_node;
7151 else if (partial)
7152 {
7153 /* We already got a simplification for the other
7154 operand to the redistributed AND expression. The
7155 interesting case is when at least one is true.
7156 Or, if both are the same, we can apply the identity
7157 (x AND x) == x. */
7158 if (integer_onep (partial))
7159 return t;
7160 else if (integer_onep (t))
7161 return partial;
7162 else if (same_bool_result_p (t, partial))
7163 return t;
7164 }
7165 }
7166 }
7167 }
7168 return NULL_TREE;
7169 }
7170
7171 /* Try to simplify the OR of two comparisons defined by
7172 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
7173 If this can be done without constructing an intermediate value,
7174 return the resulting tree; otherwise NULL_TREE is returned.
7175 This function is deliberately asymmetric as it recurses on SSA_DEFs
7176 in the first comparison but not the second. */
7177
7178 static tree
or_comparisons_1(tree type,enum tree_code code1,tree op1a,tree op1b,enum tree_code code2,tree op2a,tree op2b)7179 or_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
7180 enum tree_code code2, tree op2a, tree op2b)
7181 {
7182 tree truth_type = truth_type_for (TREE_TYPE (op1a));
7183
7184 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
7185 if (operand_equal_p (op1a, op2a, 0)
7186 && operand_equal_p (op1b, op2b, 0))
7187 {
7188 /* Result will be either NULL_TREE, or a combined comparison. */
7189 tree t = combine_comparisons (UNKNOWN_LOCATION,
7190 TRUTH_ORIF_EXPR, code1, code2,
7191 truth_type, op1a, op1b);
7192 if (t)
7193 return t;
7194 }
7195
7196 /* Likewise the swapped case of the above. */
7197 if (operand_equal_p (op1a, op2b, 0)
7198 && operand_equal_p (op1b, op2a, 0))
7199 {
7200 /* Result will be either NULL_TREE, or a combined comparison. */
7201 tree t = combine_comparisons (UNKNOWN_LOCATION,
7202 TRUTH_ORIF_EXPR, code1,
7203 swap_tree_comparison (code2),
7204 truth_type, op1a, op1b);
7205 if (t)
7206 return t;
7207 }
7208
7209 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
7210 NAME's definition is a truth value. See if there are any simplifications
7211 that can be done against the NAME's definition. */
7212 if (TREE_CODE (op1a) == SSA_NAME
7213 && (code1 == NE_EXPR || code1 == EQ_EXPR)
7214 && (integer_zerop (op1b) || integer_onep (op1b)))
7215 {
7216 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
7217 || (code1 == NE_EXPR && integer_onep (op1b)));
7218 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
7219 switch (gimple_code (stmt))
7220 {
7221 case GIMPLE_ASSIGN:
7222 /* Try to simplify by copy-propagating the definition. */
7223 return or_var_with_comparison (type, op1a, invert, code2, op2a,
7224 op2b);
7225
7226 case GIMPLE_PHI:
7227 /* If every argument to the PHI produces the same result when
7228 ORed with the second comparison, we win.
7229 Do not do this unless the type is bool since we need a bool
7230 result here anyway. */
7231 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
7232 {
7233 tree result = NULL_TREE;
7234 unsigned i;
7235 for (i = 0; i < gimple_phi_num_args (stmt); i++)
7236 {
7237 tree arg = gimple_phi_arg_def (stmt, i);
7238
7239 /* If this PHI has itself as an argument, ignore it.
7240 If all the other args produce the same result,
7241 we're still OK. */
7242 if (arg == gimple_phi_result (stmt))
7243 continue;
7244 else if (TREE_CODE (arg) == INTEGER_CST)
7245 {
7246 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
7247 {
7248 if (!result)
7249 result = boolean_true_node;
7250 else if (!integer_onep (result))
7251 return NULL_TREE;
7252 }
7253 else if (!result)
7254 result = fold_build2 (code2, boolean_type_node,
7255 op2a, op2b);
7256 else if (!same_bool_comparison_p (result,
7257 code2, op2a, op2b))
7258 return NULL_TREE;
7259 }
7260 else if (TREE_CODE (arg) == SSA_NAME
7261 && !SSA_NAME_IS_DEFAULT_DEF (arg))
7262 {
7263 tree temp;
7264 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
7265 /* In simple cases we can look through PHI nodes,
7266 but we have to be careful with loops.
7267 See PR49073. */
7268 if (! dom_info_available_p (CDI_DOMINATORS)
7269 || gimple_bb (def_stmt) == gimple_bb (stmt)
7270 || dominated_by_p (CDI_DOMINATORS,
7271 gimple_bb (def_stmt),
7272 gimple_bb (stmt)))
7273 return NULL_TREE;
7274 temp = or_var_with_comparison (type, arg, invert, code2,
7275 op2a, op2b);
7276 if (!temp)
7277 return NULL_TREE;
7278 else if (!result)
7279 result = temp;
7280 else if (!same_bool_result_p (result, temp))
7281 return NULL_TREE;
7282 }
7283 else
7284 return NULL_TREE;
7285 }
7286 return result;
7287 }
7288
7289 default:
7290 break;
7291 }
7292 }
7293 return NULL_TREE;
7294 }
7295
7296 /* Try to simplify the OR of two comparisons, specified by
7297 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
7298 If this can be simplified to a single expression (without requiring
7299 introducing more SSA variables to hold intermediate values),
7300 return the resulting tree. Otherwise return NULL_TREE.
7301 If the result expression is non-null, it has boolean type. */
7302
7303 tree
maybe_fold_or_comparisons(tree type,enum tree_code code1,tree op1a,tree op1b,enum tree_code code2,tree op2a,tree op2b)7304 maybe_fold_or_comparisons (tree type,
7305 enum tree_code code1, tree op1a, tree op1b,
7306 enum tree_code code2, tree op2a, tree op2b)
7307 {
7308 if (tree t = or_comparisons_1 (type, code1, op1a, op1b, code2, op2a, op2b))
7309 return t;
7310
7311 if (tree t = or_comparisons_1 (type, code2, op2a, op2b, code1, op1a, op1b))
7312 return t;
7313
7314 if (tree t = maybe_fold_comparisons_from_match_pd (type, BIT_IOR_EXPR, code1,
7315 op1a, op1b, code2, op2a,
7316 op2b))
7317 return t;
7318
7319 return NULL_TREE;
7320 }
7321
7322 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
7323
7324 Either NULL_TREE, a simplified but non-constant or a constant
7325 is returned.
7326
7327 ??? This should go into a gimple-fold-inline.h file to be eventually
7328 privatized with the single valueize function used in the various TUs
7329 to avoid the indirect function call overhead. */
7330
7331 tree
gimple_fold_stmt_to_constant_1(gimple * stmt,tree (* valueize)(tree),tree (* gvalueize)(tree))7332 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
7333 tree (*gvalueize) (tree))
7334 {
7335 gimple_match_op res_op;
7336 /* ??? The SSA propagators do not correctly deal with following SSA use-def
7337 edges if there are intermediate VARYING defs. For this reason
7338 do not follow SSA edges here even though SCCVN can technically
7339 just deal fine with that. */
7340 if (gimple_simplify (stmt, &res_op, NULL, gvalueize, valueize))
7341 {
7342 tree res = NULL_TREE;
7343 if (gimple_simplified_result_is_gimple_val (&res_op))
7344 res = res_op.ops[0];
7345 else if (mprts_hook)
7346 res = mprts_hook (&res_op);
7347 if (res)
7348 {
7349 if (dump_file && dump_flags & TDF_DETAILS)
7350 {
7351 fprintf (dump_file, "Match-and-simplified ");
7352 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
7353 fprintf (dump_file, " to ");
7354 print_generic_expr (dump_file, res);
7355 fprintf (dump_file, "\n");
7356 }
7357 return res;
7358 }
7359 }
7360
7361 location_t loc = gimple_location (stmt);
7362 switch (gimple_code (stmt))
7363 {
7364 case GIMPLE_ASSIGN:
7365 {
7366 enum tree_code subcode = gimple_assign_rhs_code (stmt);
7367
7368 switch (get_gimple_rhs_class (subcode))
7369 {
7370 case GIMPLE_SINGLE_RHS:
7371 {
7372 tree rhs = gimple_assign_rhs1 (stmt);
7373 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
7374
7375 if (TREE_CODE (rhs) == SSA_NAME)
7376 {
7377 /* If the RHS is an SSA_NAME, return its known constant value,
7378 if any. */
7379 return (*valueize) (rhs);
7380 }
7381 /* Handle propagating invariant addresses into address
7382 operations. */
7383 else if (TREE_CODE (rhs) == ADDR_EXPR
7384 && !is_gimple_min_invariant (rhs))
7385 {
7386 poly_int64 offset = 0;
7387 tree base;
7388 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
7389 &offset,
7390 valueize);
7391 if (base
7392 && (CONSTANT_CLASS_P (base)
7393 || decl_address_invariant_p (base)))
7394 return build_invariant_address (TREE_TYPE (rhs),
7395 base, offset);
7396 }
7397 else if (TREE_CODE (rhs) == CONSTRUCTOR
7398 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
7399 && known_eq (CONSTRUCTOR_NELTS (rhs),
7400 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
7401 {
7402 unsigned i, nelts;
7403 tree val;
7404
7405 nelts = CONSTRUCTOR_NELTS (rhs);
7406 tree_vector_builder vec (TREE_TYPE (rhs), nelts, 1);
7407 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
7408 {
7409 val = (*valueize) (val);
7410 if (TREE_CODE (val) == INTEGER_CST
7411 || TREE_CODE (val) == REAL_CST
7412 || TREE_CODE (val) == FIXED_CST)
7413 vec.quick_push (val);
7414 else
7415 return NULL_TREE;
7416 }
7417
7418 return vec.build ();
7419 }
7420 if (subcode == OBJ_TYPE_REF)
7421 {
7422 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
7423 /* If callee is constant, we can fold away the wrapper. */
7424 if (is_gimple_min_invariant (val))
7425 return val;
7426 }
7427
7428 if (kind == tcc_reference)
7429 {
7430 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
7431 || TREE_CODE (rhs) == REALPART_EXPR
7432 || TREE_CODE (rhs) == IMAGPART_EXPR)
7433 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
7434 {
7435 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
7436 return fold_unary_loc (EXPR_LOCATION (rhs),
7437 TREE_CODE (rhs),
7438 TREE_TYPE (rhs), val);
7439 }
7440 else if (TREE_CODE (rhs) == BIT_FIELD_REF
7441 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
7442 {
7443 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
7444 return fold_ternary_loc (EXPR_LOCATION (rhs),
7445 TREE_CODE (rhs),
7446 TREE_TYPE (rhs), val,
7447 TREE_OPERAND (rhs, 1),
7448 TREE_OPERAND (rhs, 2));
7449 }
7450 else if (TREE_CODE (rhs) == MEM_REF
7451 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
7452 {
7453 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
7454 if (TREE_CODE (val) == ADDR_EXPR
7455 && is_gimple_min_invariant (val))
7456 {
7457 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
7458 unshare_expr (val),
7459 TREE_OPERAND (rhs, 1));
7460 if (tem)
7461 rhs = tem;
7462 }
7463 }
7464 return fold_const_aggregate_ref_1 (rhs, valueize);
7465 }
7466 else if (kind == tcc_declaration)
7467 return get_symbol_constant_value (rhs);
7468 return rhs;
7469 }
7470
7471 case GIMPLE_UNARY_RHS:
7472 return NULL_TREE;
7473
7474 case GIMPLE_BINARY_RHS:
7475 /* Translate &x + CST into an invariant form suitable for
7476 further propagation. */
7477 if (subcode == POINTER_PLUS_EXPR)
7478 {
7479 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
7480 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
7481 if (TREE_CODE (op0) == ADDR_EXPR
7482 && TREE_CODE (op1) == INTEGER_CST)
7483 {
7484 tree off = fold_convert (ptr_type_node, op1);
7485 return build1_loc
7486 (loc, ADDR_EXPR, TREE_TYPE (op0),
7487 fold_build2 (MEM_REF,
7488 TREE_TYPE (TREE_TYPE (op0)),
7489 unshare_expr (op0), off));
7490 }
7491 }
7492 /* Canonicalize bool != 0 and bool == 0 appearing after
7493 valueization. While gimple_simplify handles this
7494 it can get confused by the ~X == 1 -> X == 0 transform
7495 which we cant reduce to a SSA name or a constant
7496 (and we have no way to tell gimple_simplify to not
7497 consider those transforms in the first place). */
7498 else if (subcode == EQ_EXPR
7499 || subcode == NE_EXPR)
7500 {
7501 tree lhs = gimple_assign_lhs (stmt);
7502 tree op0 = gimple_assign_rhs1 (stmt);
7503 if (useless_type_conversion_p (TREE_TYPE (lhs),
7504 TREE_TYPE (op0)))
7505 {
7506 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
7507 op0 = (*valueize) (op0);
7508 if (TREE_CODE (op0) == INTEGER_CST)
7509 std::swap (op0, op1);
7510 if (TREE_CODE (op1) == INTEGER_CST
7511 && ((subcode == NE_EXPR && integer_zerop (op1))
7512 || (subcode == EQ_EXPR && integer_onep (op1))))
7513 return op0;
7514 }
7515 }
7516 return NULL_TREE;
7517
7518 case GIMPLE_TERNARY_RHS:
7519 {
7520 /* Handle ternary operators that can appear in GIMPLE form. */
7521 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
7522 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
7523 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
7524 return fold_ternary_loc (loc, subcode,
7525 TREE_TYPE (gimple_assign_lhs (stmt)),
7526 op0, op1, op2);
7527 }
7528
7529 default:
7530 gcc_unreachable ();
7531 }
7532 }
7533
7534 case GIMPLE_CALL:
7535 {
7536 tree fn;
7537 gcall *call_stmt = as_a <gcall *> (stmt);
7538
7539 if (gimple_call_internal_p (stmt))
7540 {
7541 enum tree_code subcode = ERROR_MARK;
7542 switch (gimple_call_internal_fn (stmt))
7543 {
7544 case IFN_UBSAN_CHECK_ADD:
7545 subcode = PLUS_EXPR;
7546 break;
7547 case IFN_UBSAN_CHECK_SUB:
7548 subcode = MINUS_EXPR;
7549 break;
7550 case IFN_UBSAN_CHECK_MUL:
7551 subcode = MULT_EXPR;
7552 break;
7553 case IFN_BUILTIN_EXPECT:
7554 {
7555 tree arg0 = gimple_call_arg (stmt, 0);
7556 tree op0 = (*valueize) (arg0);
7557 if (TREE_CODE (op0) == INTEGER_CST)
7558 return op0;
7559 return NULL_TREE;
7560 }
7561 default:
7562 return NULL_TREE;
7563 }
7564 tree arg0 = gimple_call_arg (stmt, 0);
7565 tree arg1 = gimple_call_arg (stmt, 1);
7566 tree op0 = (*valueize) (arg0);
7567 tree op1 = (*valueize) (arg1);
7568
7569 if (TREE_CODE (op0) != INTEGER_CST
7570 || TREE_CODE (op1) != INTEGER_CST)
7571 {
7572 switch (subcode)
7573 {
7574 case MULT_EXPR:
7575 /* x * 0 = 0 * x = 0 without overflow. */
7576 if (integer_zerop (op0) || integer_zerop (op1))
7577 return build_zero_cst (TREE_TYPE (arg0));
7578 break;
7579 case MINUS_EXPR:
7580 /* y - y = 0 without overflow. */
7581 if (operand_equal_p (op0, op1, 0))
7582 return build_zero_cst (TREE_TYPE (arg0));
7583 break;
7584 default:
7585 break;
7586 }
7587 }
7588 tree res
7589 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
7590 if (res
7591 && TREE_CODE (res) == INTEGER_CST
7592 && !TREE_OVERFLOW (res))
7593 return res;
7594 return NULL_TREE;
7595 }
7596
7597 fn = (*valueize) (gimple_call_fn (stmt));
7598 if (TREE_CODE (fn) == ADDR_EXPR
7599 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
7600 && fndecl_built_in_p (TREE_OPERAND (fn, 0))
7601 && gimple_builtin_call_types_compatible_p (stmt,
7602 TREE_OPERAND (fn, 0)))
7603 {
7604 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
7605 tree retval;
7606 unsigned i;
7607 for (i = 0; i < gimple_call_num_args (stmt); ++i)
7608 args[i] = (*valueize) (gimple_call_arg (stmt, i));
7609 retval = fold_builtin_call_array (loc,
7610 gimple_call_return_type (call_stmt),
7611 fn, gimple_call_num_args (stmt), args);
7612 if (retval)
7613 {
7614 /* fold_call_expr wraps the result inside a NOP_EXPR. */
7615 STRIP_NOPS (retval);
7616 retval = fold_convert (gimple_call_return_type (call_stmt),
7617 retval);
7618 }
7619 return retval;
7620 }
7621 return NULL_TREE;
7622 }
7623
7624 default:
7625 return NULL_TREE;
7626 }
7627 }
7628
7629 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
7630 Returns NULL_TREE if folding to a constant is not possible, otherwise
7631 returns a constant according to is_gimple_min_invariant. */
7632
7633 tree
gimple_fold_stmt_to_constant(gimple * stmt,tree (* valueize)(tree))7634 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
7635 {
7636 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
7637 if (res && is_gimple_min_invariant (res))
7638 return res;
7639 return NULL_TREE;
7640 }
7641
7642
7643 /* The following set of functions are supposed to fold references using
7644 their constant initializers. */
7645
7646 /* See if we can find constructor defining value of BASE.
7647 When we know the consructor with constant offset (such as
7648 base is array[40] and we do know constructor of array), then
7649 BIT_OFFSET is adjusted accordingly.
7650
7651 As a special case, return error_mark_node when constructor
7652 is not explicitly available, but it is known to be zero
7653 such as 'static const int a;'. */
7654 static tree
get_base_constructor(tree base,poly_int64_pod * bit_offset,tree (* valueize)(tree))7655 get_base_constructor (tree base, poly_int64_pod *bit_offset,
7656 tree (*valueize)(tree))
7657 {
7658 poly_int64 bit_offset2, size, max_size;
7659 bool reverse;
7660
7661 if (TREE_CODE (base) == MEM_REF)
7662 {
7663 poly_offset_int boff = *bit_offset + mem_ref_offset (base) * BITS_PER_UNIT;
7664 if (!boff.to_shwi (bit_offset))
7665 return NULL_TREE;
7666
7667 if (valueize
7668 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
7669 base = valueize (TREE_OPERAND (base, 0));
7670 if (!base || TREE_CODE (base) != ADDR_EXPR)
7671 return NULL_TREE;
7672 base = TREE_OPERAND (base, 0);
7673 }
7674 else if (valueize
7675 && TREE_CODE (base) == SSA_NAME)
7676 base = valueize (base);
7677
7678 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
7679 DECL_INITIAL. If BASE is a nested reference into another
7680 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
7681 the inner reference. */
7682 switch (TREE_CODE (base))
7683 {
7684 case VAR_DECL:
7685 case CONST_DECL:
7686 {
7687 tree init = ctor_for_folding (base);
7688
7689 /* Our semantic is exact opposite of ctor_for_folding;
7690 NULL means unknown, while error_mark_node is 0. */
7691 if (init == error_mark_node)
7692 return NULL_TREE;
7693 if (!init)
7694 return error_mark_node;
7695 return init;
7696 }
7697
7698 case VIEW_CONVERT_EXPR:
7699 return get_base_constructor (TREE_OPERAND (base, 0),
7700 bit_offset, valueize);
7701
7702 case ARRAY_REF:
7703 case COMPONENT_REF:
7704 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
7705 &reverse);
7706 if (!known_size_p (max_size) || maybe_ne (size, max_size))
7707 return NULL_TREE;
7708 *bit_offset += bit_offset2;
7709 return get_base_constructor (base, bit_offset, valueize);
7710
7711 case CONSTRUCTOR:
7712 return base;
7713
7714 default:
7715 if (CONSTANT_CLASS_P (base))
7716 return base;
7717
7718 return NULL_TREE;
7719 }
7720 }
7721
7722 /* CTOR is CONSTRUCTOR of an array type. Fold a reference of SIZE bits
7723 to the memory at bit OFFSET. When non-null, TYPE is the expected
7724 type of the reference; otherwise the type of the referenced element
7725 is used instead. When SIZE is zero, attempt to fold a reference to
7726 the entire element which OFFSET refers to. Increment *SUBOFF by
7727 the bit offset of the accessed element. */
7728
7729 static tree
fold_array_ctor_reference(tree type,tree ctor,unsigned HOST_WIDE_INT offset,unsigned HOST_WIDE_INT size,tree from_decl,unsigned HOST_WIDE_INT * suboff)7730 fold_array_ctor_reference (tree type, tree ctor,
7731 unsigned HOST_WIDE_INT offset,
7732 unsigned HOST_WIDE_INT size,
7733 tree from_decl,
7734 unsigned HOST_WIDE_INT *suboff)
7735 {
7736 offset_int low_bound;
7737 offset_int elt_size;
7738 offset_int access_index;
7739 tree domain_type = NULL_TREE;
7740 HOST_WIDE_INT inner_offset;
7741
7742 /* Compute low bound and elt size. */
7743 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
7744 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
7745 if (domain_type && TYPE_MIN_VALUE (domain_type))
7746 {
7747 /* Static constructors for variably sized objects make no sense. */
7748 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
7749 return NULL_TREE;
7750 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
7751 }
7752 else
7753 low_bound = 0;
7754 /* Static constructors for variably sized objects make no sense. */
7755 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
7756 return NULL_TREE;
7757 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
7758
7759 /* When TYPE is non-null, verify that it specifies a constant-sized
7760 access of a multiple of the array element size. Avoid division
7761 by zero below when ELT_SIZE is zero, such as with the result of
7762 an initializer for a zero-length array or an empty struct. */
7763 if (elt_size == 0
7764 || (type
7765 && (!TYPE_SIZE_UNIT (type)
7766 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST)))
7767 return NULL_TREE;
7768
7769 /* Compute the array index we look for. */
7770 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
7771 elt_size);
7772 access_index += low_bound;
7773
7774 /* And offset within the access. */
7775 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
7776
7777 unsigned HOST_WIDE_INT elt_sz = elt_size.to_uhwi ();
7778 if (size > elt_sz * BITS_PER_UNIT)
7779 {
7780 /* native_encode_expr constraints. */
7781 if (size > MAX_BITSIZE_MODE_ANY_MODE
7782 || size % BITS_PER_UNIT != 0
7783 || inner_offset % BITS_PER_UNIT != 0
7784 || elt_sz > MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT)
7785 return NULL_TREE;
7786
7787 unsigned ctor_idx;
7788 tree val = get_array_ctor_element_at_index (ctor, access_index,
7789 &ctor_idx);
7790 if (!val && ctor_idx >= CONSTRUCTOR_NELTS (ctor))
7791 return build_zero_cst (type);
7792
7793 /* native-encode adjacent ctor elements. */
7794 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
7795 unsigned bufoff = 0;
7796 offset_int index = 0;
7797 offset_int max_index = access_index;
7798 constructor_elt *elt = CONSTRUCTOR_ELT (ctor, ctor_idx);
7799 if (!val)
7800 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
7801 else if (!CONSTANT_CLASS_P (val))
7802 return NULL_TREE;
7803 if (!elt->index)
7804 ;
7805 else if (TREE_CODE (elt->index) == RANGE_EXPR)
7806 {
7807 index = wi::to_offset (TREE_OPERAND (elt->index, 0));
7808 max_index = wi::to_offset (TREE_OPERAND (elt->index, 1));
7809 }
7810 else
7811 index = max_index = wi::to_offset (elt->index);
7812 index = wi::umax (index, access_index);
7813 do
7814 {
7815 if (bufoff + elt_sz > sizeof (buf))
7816 elt_sz = sizeof (buf) - bufoff;
7817 int len = native_encode_expr (val, buf + bufoff, elt_sz,
7818 inner_offset / BITS_PER_UNIT);
7819 if (len != (int) elt_sz - inner_offset / BITS_PER_UNIT)
7820 return NULL_TREE;
7821 inner_offset = 0;
7822 bufoff += len;
7823
7824 access_index += 1;
7825 if (wi::cmpu (access_index, index) == 0)
7826 val = elt->value;
7827 else if (wi::cmpu (access_index, max_index) > 0)
7828 {
7829 ctor_idx++;
7830 if (ctor_idx >= CONSTRUCTOR_NELTS (ctor))
7831 {
7832 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
7833 ++max_index;
7834 }
7835 else
7836 {
7837 elt = CONSTRUCTOR_ELT (ctor, ctor_idx);
7838 index = 0;
7839 max_index = access_index;
7840 if (!elt->index)
7841 ;
7842 else if (TREE_CODE (elt->index) == RANGE_EXPR)
7843 {
7844 index = wi::to_offset (TREE_OPERAND (elt->index, 0));
7845 max_index = wi::to_offset (TREE_OPERAND (elt->index, 1));
7846 }
7847 else
7848 index = max_index = wi::to_offset (elt->index);
7849 index = wi::umax (index, access_index);
7850 if (wi::cmpu (access_index, index) == 0)
7851 val = elt->value;
7852 else
7853 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
7854 }
7855 }
7856 }
7857 while (bufoff < size / BITS_PER_UNIT);
7858 *suboff += size;
7859 return native_interpret_expr (type, buf, size / BITS_PER_UNIT);
7860 }
7861
7862 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
7863 {
7864 if (!size && TREE_CODE (val) != CONSTRUCTOR)
7865 {
7866 /* For the final reference to the entire accessed element
7867 (SIZE is zero), reset INNER_OFFSET, disegard TYPE (which
7868 may be null) in favor of the type of the element, and set
7869 SIZE to the size of the accessed element. */
7870 inner_offset = 0;
7871 type = TREE_TYPE (val);
7872 size = elt_sz * BITS_PER_UNIT;
7873 }
7874 else if (size && access_index < CONSTRUCTOR_NELTS (ctor) - 1
7875 && TREE_CODE (val) == CONSTRUCTOR
7876 && (elt_sz * BITS_PER_UNIT - inner_offset) < size)
7877 /* If this isn't the last element in the CTOR and a CTOR itself
7878 and it does not cover the whole object we are requesting give up
7879 since we're not set up for combining from multiple CTORs. */
7880 return NULL_TREE;
7881
7882 *suboff += access_index.to_uhwi () * elt_sz * BITS_PER_UNIT;
7883 return fold_ctor_reference (type, val, inner_offset, size, from_decl,
7884 suboff);
7885 }
7886
7887 /* Memory not explicitly mentioned in constructor is 0 (or
7888 the reference is out of range). */
7889 return type ? build_zero_cst (type) : NULL_TREE;
7890 }
7891
7892 /* CTOR is CONSTRUCTOR of an aggregate or vector. Fold a reference
7893 of SIZE bits to the memory at bit OFFSET. When non-null, TYPE
7894 is the expected type of the reference; otherwise the type of
7895 the referenced member is used instead. When SIZE is zero,
7896 attempt to fold a reference to the entire member which OFFSET
7897 refers to; in this case. Increment *SUBOFF by the bit offset
7898 of the accessed member. */
7899
7900 static tree
fold_nonarray_ctor_reference(tree type,tree ctor,unsigned HOST_WIDE_INT offset,unsigned HOST_WIDE_INT size,tree from_decl,unsigned HOST_WIDE_INT * suboff)7901 fold_nonarray_ctor_reference (tree type, tree ctor,
7902 unsigned HOST_WIDE_INT offset,
7903 unsigned HOST_WIDE_INT size,
7904 tree from_decl,
7905 unsigned HOST_WIDE_INT *suboff)
7906 {
7907 unsigned HOST_WIDE_INT cnt;
7908 tree cfield, cval;
7909
7910 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
7911 cval)
7912 {
7913 tree byte_offset = DECL_FIELD_OFFSET (cfield);
7914 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
7915 tree field_size = DECL_SIZE (cfield);
7916
7917 if (!field_size)
7918 {
7919 /* Determine the size of the flexible array member from
7920 the size of the initializer provided for it. */
7921 field_size = TYPE_SIZE (TREE_TYPE (cval));
7922 }
7923
7924 /* Variable sized objects in static constructors makes no sense,
7925 but field_size can be NULL for flexible array members. */
7926 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
7927 && TREE_CODE (byte_offset) == INTEGER_CST
7928 && (field_size != NULL_TREE
7929 ? TREE_CODE (field_size) == INTEGER_CST
7930 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
7931
7932 /* Compute bit offset of the field. */
7933 offset_int bitoffset
7934 = (wi::to_offset (field_offset)
7935 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
7936 /* Compute bit offset where the field ends. */
7937 offset_int bitoffset_end;
7938 if (field_size != NULL_TREE)
7939 bitoffset_end = bitoffset + wi::to_offset (field_size);
7940 else
7941 bitoffset_end = 0;
7942
7943 /* Compute the bit offset of the end of the desired access.
7944 As a special case, if the size of the desired access is
7945 zero, assume the access is to the entire field (and let
7946 the caller make any necessary adjustments by storing
7947 the actual bounds of the field in FIELDBOUNDS). */
7948 offset_int access_end = offset_int (offset);
7949 if (size)
7950 access_end += size;
7951 else
7952 access_end = bitoffset_end;
7953
7954 /* Is there any overlap between the desired access at
7955 [OFFSET, OFFSET+SIZE) and the offset of the field within
7956 the object at [BITOFFSET, BITOFFSET_END)? */
7957 if (wi::cmps (access_end, bitoffset) > 0
7958 && (field_size == NULL_TREE
7959 || wi::lts_p (offset, bitoffset_end)))
7960 {
7961 *suboff += bitoffset.to_uhwi ();
7962
7963 if (!size && TREE_CODE (cval) != CONSTRUCTOR)
7964 {
7965 /* For the final reference to the entire accessed member
7966 (SIZE is zero), reset OFFSET, disegard TYPE (which may
7967 be null) in favor of the type of the member, and set
7968 SIZE to the size of the accessed member. */
7969 offset = bitoffset.to_uhwi ();
7970 type = TREE_TYPE (cval);
7971 size = (bitoffset_end - bitoffset).to_uhwi ();
7972 }
7973
7974 /* We do have overlap. Now see if the field is large enough
7975 to cover the access. Give up for accesses that extend
7976 beyond the end of the object or that span multiple fields. */
7977 if (wi::cmps (access_end, bitoffset_end) > 0)
7978 return NULL_TREE;
7979 if (offset < bitoffset)
7980 return NULL_TREE;
7981
7982 offset_int inner_offset = offset_int (offset) - bitoffset;
7983 return fold_ctor_reference (type, cval,
7984 inner_offset.to_uhwi (), size,
7985 from_decl, suboff);
7986 }
7987 }
7988
7989 if (!type)
7990 return NULL_TREE;
7991
7992 return build_zero_cst (type);
7993 }
7994
7995 /* CTOR is value initializing memory. Fold a reference of TYPE and
7996 bit size POLY_SIZE to the memory at bit POLY_OFFSET. When POLY_SIZE
7997 is zero, attempt to fold a reference to the entire subobject
7998 which OFFSET refers to. This is used when folding accesses to
7999 string members of aggregates. When non-null, set *SUBOFF to
8000 the bit offset of the accessed subobject. */
8001
8002 tree
fold_ctor_reference(tree type,tree ctor,const poly_uint64 & poly_offset,const poly_uint64 & poly_size,tree from_decl,unsigned HOST_WIDE_INT * suboff)8003 fold_ctor_reference (tree type, tree ctor, const poly_uint64 &poly_offset,
8004 const poly_uint64 &poly_size, tree from_decl,
8005 unsigned HOST_WIDE_INT *suboff /* = NULL */)
8006 {
8007 tree ret;
8008
8009 /* We found the field with exact match. */
8010 if (type
8011 && useless_type_conversion_p (type, TREE_TYPE (ctor))
8012 && known_eq (poly_offset, 0U))
8013 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
8014
8015 /* The remaining optimizations need a constant size and offset. */
8016 unsigned HOST_WIDE_INT size, offset;
8017 if (!poly_size.is_constant (&size) || !poly_offset.is_constant (&offset))
8018 return NULL_TREE;
8019
8020 /* We are at the end of walk, see if we can view convert the
8021 result. */
8022 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
8023 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
8024 && !compare_tree_int (TYPE_SIZE (type), size)
8025 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
8026 {
8027 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
8028 if (ret)
8029 {
8030 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
8031 if (ret)
8032 STRIP_USELESS_TYPE_CONVERSION (ret);
8033 }
8034 return ret;
8035 }
8036 /* For constants and byte-aligned/sized reads try to go through
8037 native_encode/interpret. */
8038 if (CONSTANT_CLASS_P (ctor)
8039 && BITS_PER_UNIT == 8
8040 && offset % BITS_PER_UNIT == 0
8041 && offset / BITS_PER_UNIT <= INT_MAX
8042 && size % BITS_PER_UNIT == 0
8043 && size <= MAX_BITSIZE_MODE_ANY_MODE
8044 && can_native_interpret_type_p (type))
8045 {
8046 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
8047 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
8048 offset / BITS_PER_UNIT);
8049 if (len > 0)
8050 return native_interpret_expr (type, buf, len);
8051 }
8052 if (TREE_CODE (ctor) == CONSTRUCTOR)
8053 {
8054 unsigned HOST_WIDE_INT dummy = 0;
8055 if (!suboff)
8056 suboff = &dummy;
8057
8058 tree ret;
8059 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
8060 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
8061 ret = fold_array_ctor_reference (type, ctor, offset, size,
8062 from_decl, suboff);
8063 else
8064 ret = fold_nonarray_ctor_reference (type, ctor, offset, size,
8065 from_decl, suboff);
8066
8067 /* Fall back to native_encode_initializer. Needs to be done
8068 only in the outermost fold_ctor_reference call (because it itself
8069 recurses into CONSTRUCTORs) and doesn't update suboff. */
8070 if (ret == NULL_TREE
8071 && suboff == &dummy
8072 && BITS_PER_UNIT == 8
8073 && offset % BITS_PER_UNIT == 0
8074 && offset / BITS_PER_UNIT <= INT_MAX
8075 && size % BITS_PER_UNIT == 0
8076 && size <= MAX_BITSIZE_MODE_ANY_MODE
8077 && can_native_interpret_type_p (type))
8078 {
8079 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
8080 int len = native_encode_initializer (ctor, buf, size / BITS_PER_UNIT,
8081 offset / BITS_PER_UNIT);
8082 if (len > 0)
8083 return native_interpret_expr (type, buf, len);
8084 }
8085
8086 return ret;
8087 }
8088
8089 return NULL_TREE;
8090 }
8091
8092 /* Return the tree representing the element referenced by T if T is an
8093 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
8094 names using VALUEIZE. Return NULL_TREE otherwise. */
8095
8096 tree
fold_const_aggregate_ref_1(tree t,tree (* valueize)(tree))8097 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
8098 {
8099 tree ctor, idx, base;
8100 poly_int64 offset, size, max_size;
8101 tree tem;
8102 bool reverse;
8103
8104 if (TREE_THIS_VOLATILE (t))
8105 return NULL_TREE;
8106
8107 if (DECL_P (t))
8108 return get_symbol_constant_value (t);
8109
8110 tem = fold_read_from_constant_string (t);
8111 if (tem)
8112 return tem;
8113
8114 switch (TREE_CODE (t))
8115 {
8116 case ARRAY_REF:
8117 case ARRAY_RANGE_REF:
8118 /* Constant indexes are handled well by get_base_constructor.
8119 Only special case variable offsets.
8120 FIXME: This code can't handle nested references with variable indexes
8121 (they will be handled only by iteration of ccp). Perhaps we can bring
8122 get_ref_base_and_extent here and make it use a valueize callback. */
8123 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
8124 && valueize
8125 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
8126 && poly_int_tree_p (idx))
8127 {
8128 tree low_bound, unit_size;
8129
8130 /* If the resulting bit-offset is constant, track it. */
8131 if ((low_bound = array_ref_low_bound (t),
8132 poly_int_tree_p (low_bound))
8133 && (unit_size = array_ref_element_size (t),
8134 tree_fits_uhwi_p (unit_size)))
8135 {
8136 poly_offset_int woffset
8137 = wi::sext (wi::to_poly_offset (idx)
8138 - wi::to_poly_offset (low_bound),
8139 TYPE_PRECISION (sizetype));
8140 woffset *= tree_to_uhwi (unit_size);
8141 woffset *= BITS_PER_UNIT;
8142 if (woffset.to_shwi (&offset))
8143 {
8144 base = TREE_OPERAND (t, 0);
8145 ctor = get_base_constructor (base, &offset, valueize);
8146 /* Empty constructor. Always fold to 0. */
8147 if (ctor == error_mark_node)
8148 return build_zero_cst (TREE_TYPE (t));
8149 /* Out of bound array access. Value is undefined,
8150 but don't fold. */
8151 if (maybe_lt (offset, 0))
8152 return NULL_TREE;
8153 /* We cannot determine ctor. */
8154 if (!ctor)
8155 return NULL_TREE;
8156 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
8157 tree_to_uhwi (unit_size)
8158 * BITS_PER_UNIT,
8159 base);
8160 }
8161 }
8162 }
8163 /* Fallthru. */
8164
8165 case COMPONENT_REF:
8166 case BIT_FIELD_REF:
8167 case TARGET_MEM_REF:
8168 case MEM_REF:
8169 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
8170 ctor = get_base_constructor (base, &offset, valueize);
8171
8172 /* Empty constructor. Always fold to 0. */
8173 if (ctor == error_mark_node)
8174 return build_zero_cst (TREE_TYPE (t));
8175 /* We do not know precise address. */
8176 if (!known_size_p (max_size) || maybe_ne (max_size, size))
8177 return NULL_TREE;
8178 /* We cannot determine ctor. */
8179 if (!ctor)
8180 return NULL_TREE;
8181
8182 /* Out of bound array access. Value is undefined, but don't fold. */
8183 if (maybe_lt (offset, 0))
8184 return NULL_TREE;
8185
8186 tem = fold_ctor_reference (TREE_TYPE (t), ctor, offset, size, base);
8187 if (tem)
8188 return tem;
8189
8190 /* For bit field reads try to read the representative and
8191 adjust. */
8192 if (TREE_CODE (t) == COMPONENT_REF
8193 && DECL_BIT_FIELD (TREE_OPERAND (t, 1))
8194 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)))
8195 {
8196 HOST_WIDE_INT csize, coffset;
8197 tree field = TREE_OPERAND (t, 1);
8198 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
8199 if (INTEGRAL_TYPE_P (TREE_TYPE (repr))
8200 && size.is_constant (&csize)
8201 && offset.is_constant (&coffset)
8202 && (coffset % BITS_PER_UNIT != 0
8203 || csize % BITS_PER_UNIT != 0)
8204 && !reverse
8205 && BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN)
8206 {
8207 poly_int64 bitoffset;
8208 poly_uint64 field_offset, repr_offset;
8209 if (poly_int_tree_p (DECL_FIELD_OFFSET (field), &field_offset)
8210 && poly_int_tree_p (DECL_FIELD_OFFSET (repr), &repr_offset))
8211 bitoffset = (field_offset - repr_offset) * BITS_PER_UNIT;
8212 else
8213 bitoffset = 0;
8214 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
8215 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
8216 HOST_WIDE_INT bitoff;
8217 int diff = (TYPE_PRECISION (TREE_TYPE (repr))
8218 - TYPE_PRECISION (TREE_TYPE (field)));
8219 if (bitoffset.is_constant (&bitoff)
8220 && bitoff >= 0
8221 && bitoff <= diff)
8222 {
8223 offset -= bitoff;
8224 size = tree_to_uhwi (DECL_SIZE (repr));
8225
8226 tem = fold_ctor_reference (TREE_TYPE (repr), ctor, offset,
8227 size, base);
8228 if (tem && TREE_CODE (tem) == INTEGER_CST)
8229 {
8230 if (!BYTES_BIG_ENDIAN)
8231 tem = wide_int_to_tree (TREE_TYPE (field),
8232 wi::lrshift (wi::to_wide (tem),
8233 bitoff));
8234 else
8235 tem = wide_int_to_tree (TREE_TYPE (field),
8236 wi::lrshift (wi::to_wide (tem),
8237 diff - bitoff));
8238 return tem;
8239 }
8240 }
8241 }
8242 }
8243 break;
8244
8245 case REALPART_EXPR:
8246 case IMAGPART_EXPR:
8247 {
8248 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
8249 if (c && TREE_CODE (c) == COMPLEX_CST)
8250 return fold_build1_loc (EXPR_LOCATION (t),
8251 TREE_CODE (t), TREE_TYPE (t), c);
8252 break;
8253 }
8254
8255 default:
8256 break;
8257 }
8258
8259 return NULL_TREE;
8260 }
8261
8262 tree
fold_const_aggregate_ref(tree t)8263 fold_const_aggregate_ref (tree t)
8264 {
8265 return fold_const_aggregate_ref_1 (t, NULL);
8266 }
8267
8268 /* Lookup virtual method with index TOKEN in a virtual table V
8269 at OFFSET.
8270 Set CAN_REFER if non-NULL to false if method
8271 is not referable or if the virtual table is ill-formed (such as rewriten
8272 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
8273
8274 tree
gimple_get_virt_method_for_vtable(HOST_WIDE_INT token,tree v,unsigned HOST_WIDE_INT offset,bool * can_refer)8275 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
8276 tree v,
8277 unsigned HOST_WIDE_INT offset,
8278 bool *can_refer)
8279 {
8280 tree vtable = v, init, fn;
8281 unsigned HOST_WIDE_INT size;
8282 unsigned HOST_WIDE_INT elt_size, access_index;
8283 tree domain_type;
8284
8285 if (can_refer)
8286 *can_refer = true;
8287
8288 /* First of all double check we have virtual table. */
8289 if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
8290 {
8291 /* Pass down that we lost track of the target. */
8292 if (can_refer)
8293 *can_refer = false;
8294 return NULL_TREE;
8295 }
8296
8297 init = ctor_for_folding (v);
8298
8299 /* The virtual tables should always be born with constructors
8300 and we always should assume that they are avaialble for
8301 folding. At the moment we do not stream them in all cases,
8302 but it should never happen that ctor seem unreachable. */
8303 gcc_assert (init);
8304 if (init == error_mark_node)
8305 {
8306 /* Pass down that we lost track of the target. */
8307 if (can_refer)
8308 *can_refer = false;
8309 return NULL_TREE;
8310 }
8311 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
8312 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
8313 offset *= BITS_PER_UNIT;
8314 offset += token * size;
8315
8316 /* Lookup the value in the constructor that is assumed to be array.
8317 This is equivalent to
8318 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
8319 offset, size, NULL);
8320 but in a constant time. We expect that frontend produced a simple
8321 array without indexed initializers. */
8322
8323 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
8324 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
8325 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
8326 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
8327
8328 access_index = offset / BITS_PER_UNIT / elt_size;
8329 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
8330
8331 /* The C++ FE can now produce indexed fields, and we check if the indexes
8332 match. */
8333 if (access_index < CONSTRUCTOR_NELTS (init))
8334 {
8335 fn = CONSTRUCTOR_ELT (init, access_index)->value;
8336 tree idx = CONSTRUCTOR_ELT (init, access_index)->index;
8337 gcc_checking_assert (!idx || tree_to_uhwi (idx) == access_index);
8338 STRIP_NOPS (fn);
8339 }
8340 else
8341 fn = NULL;
8342
8343 /* For type inconsistent program we may end up looking up virtual method
8344 in virtual table that does not contain TOKEN entries. We may overrun
8345 the virtual table and pick up a constant or RTTI info pointer.
8346 In any case the call is undefined. */
8347 if (!fn
8348 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
8349 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
8350 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
8351 else
8352 {
8353 fn = TREE_OPERAND (fn, 0);
8354
8355 /* When cgraph node is missing and function is not public, we cannot
8356 devirtualize. This can happen in WHOPR when the actual method
8357 ends up in other partition, because we found devirtualization
8358 possibility too late. */
8359 if (!can_refer_decl_in_current_unit_p (fn, vtable))
8360 {
8361 if (can_refer)
8362 {
8363 *can_refer = false;
8364 return fn;
8365 }
8366 return NULL_TREE;
8367 }
8368 }
8369
8370 /* Make sure we create a cgraph node for functions we'll reference.
8371 They can be non-existent if the reference comes from an entry
8372 of an external vtable for example. */
8373 cgraph_node::get_create (fn);
8374
8375 return fn;
8376 }
8377
8378 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
8379 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
8380 KNOWN_BINFO carries the binfo describing the true type of
8381 OBJ_TYPE_REF_OBJECT(REF).
8382 Set CAN_REFER if non-NULL to false if method
8383 is not referable or if the virtual table is ill-formed (such as rewriten
8384 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
8385
8386 tree
gimple_get_virt_method_for_binfo(HOST_WIDE_INT token,tree known_binfo,bool * can_refer)8387 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
8388 bool *can_refer)
8389 {
8390 unsigned HOST_WIDE_INT offset;
8391 tree v;
8392
8393 v = BINFO_VTABLE (known_binfo);
8394 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
8395 if (!v)
8396 return NULL_TREE;
8397
8398 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
8399 {
8400 if (can_refer)
8401 *can_refer = false;
8402 return NULL_TREE;
8403 }
8404 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
8405 }
8406
8407 /* Given a pointer value T, return a simplified version of an
8408 indirection through T, or NULL_TREE if no simplification is
8409 possible. Note that the resulting type may be different from
8410 the type pointed to in the sense that it is still compatible
8411 from the langhooks point of view. */
8412
8413 tree
gimple_fold_indirect_ref(tree t)8414 gimple_fold_indirect_ref (tree t)
8415 {
8416 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
8417 tree sub = t;
8418 tree subtype;
8419
8420 STRIP_NOPS (sub);
8421 subtype = TREE_TYPE (sub);
8422 if (!POINTER_TYPE_P (subtype)
8423 || TYPE_REF_CAN_ALIAS_ALL (ptype))
8424 return NULL_TREE;
8425
8426 if (TREE_CODE (sub) == ADDR_EXPR)
8427 {
8428 tree op = TREE_OPERAND (sub, 0);
8429 tree optype = TREE_TYPE (op);
8430 /* *&p => p */
8431 if (useless_type_conversion_p (type, optype))
8432 return op;
8433
8434 /* *(foo *)&fooarray => fooarray[0] */
8435 if (TREE_CODE (optype) == ARRAY_TYPE
8436 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
8437 && useless_type_conversion_p (type, TREE_TYPE (optype)))
8438 {
8439 tree type_domain = TYPE_DOMAIN (optype);
8440 tree min_val = size_zero_node;
8441 if (type_domain && TYPE_MIN_VALUE (type_domain))
8442 min_val = TYPE_MIN_VALUE (type_domain);
8443 if (TREE_CODE (min_val) == INTEGER_CST)
8444 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
8445 }
8446 /* *(foo *)&complexfoo => __real__ complexfoo */
8447 else if (TREE_CODE (optype) == COMPLEX_TYPE
8448 && useless_type_conversion_p (type, TREE_TYPE (optype)))
8449 return fold_build1 (REALPART_EXPR, type, op);
8450 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
8451 else if (TREE_CODE (optype) == VECTOR_TYPE
8452 && useless_type_conversion_p (type, TREE_TYPE (optype)))
8453 {
8454 tree part_width = TYPE_SIZE (type);
8455 tree index = bitsize_int (0);
8456 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
8457 }
8458 }
8459
8460 /* *(p + CST) -> ... */
8461 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
8462 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
8463 {
8464 tree addr = TREE_OPERAND (sub, 0);
8465 tree off = TREE_OPERAND (sub, 1);
8466 tree addrtype;
8467
8468 STRIP_NOPS (addr);
8469 addrtype = TREE_TYPE (addr);
8470
8471 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
8472 if (TREE_CODE (addr) == ADDR_EXPR
8473 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
8474 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
8475 && tree_fits_uhwi_p (off))
8476 {
8477 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
8478 tree part_width = TYPE_SIZE (type);
8479 unsigned HOST_WIDE_INT part_widthi
8480 = tree_to_shwi (part_width) / BITS_PER_UNIT;
8481 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
8482 tree index = bitsize_int (indexi);
8483 if (known_lt (offset / part_widthi,
8484 TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype))))
8485 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
8486 part_width, index);
8487 }
8488
8489 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
8490 if (TREE_CODE (addr) == ADDR_EXPR
8491 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
8492 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
8493 {
8494 tree size = TYPE_SIZE_UNIT (type);
8495 if (tree_int_cst_equal (size, off))
8496 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
8497 }
8498
8499 /* *(p + CST) -> MEM_REF <p, CST>. */
8500 if (TREE_CODE (addr) != ADDR_EXPR
8501 || DECL_P (TREE_OPERAND (addr, 0)))
8502 return fold_build2 (MEM_REF, type,
8503 addr,
8504 wide_int_to_tree (ptype, wi::to_wide (off)));
8505 }
8506
8507 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
8508 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
8509 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
8510 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
8511 {
8512 tree type_domain;
8513 tree min_val = size_zero_node;
8514 tree osub = sub;
8515 sub = gimple_fold_indirect_ref (sub);
8516 if (! sub)
8517 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
8518 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
8519 if (type_domain && TYPE_MIN_VALUE (type_domain))
8520 min_val = TYPE_MIN_VALUE (type_domain);
8521 if (TREE_CODE (min_val) == INTEGER_CST)
8522 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
8523 }
8524
8525 return NULL_TREE;
8526 }
8527
8528 /* Return true if CODE is an operation that when operating on signed
8529 integer types involves undefined behavior on overflow and the
8530 operation can be expressed with unsigned arithmetic. */
8531
8532 bool
arith_code_with_undefined_signed_overflow(tree_code code)8533 arith_code_with_undefined_signed_overflow (tree_code code)
8534 {
8535 switch (code)
8536 {
8537 case ABS_EXPR:
8538 case PLUS_EXPR:
8539 case MINUS_EXPR:
8540 case MULT_EXPR:
8541 case NEGATE_EXPR:
8542 case POINTER_PLUS_EXPR:
8543 return true;
8544 default:
8545 return false;
8546 }
8547 }
8548
8549 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
8550 operation that can be transformed to unsigned arithmetic by converting
8551 its operand, carrying out the operation in the corresponding unsigned
8552 type and converting the result back to the original type.
8553
8554 Returns a sequence of statements that replace STMT and also contain
8555 a modified form of STMT itself. */
8556
8557 gimple_seq
rewrite_to_defined_overflow(gimple * stmt)8558 rewrite_to_defined_overflow (gimple *stmt)
8559 {
8560 if (dump_file && (dump_flags & TDF_DETAILS))
8561 {
8562 fprintf (dump_file, "rewriting stmt with undefined signed "
8563 "overflow ");
8564 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
8565 }
8566
8567 tree lhs = gimple_assign_lhs (stmt);
8568 tree type = unsigned_type_for (TREE_TYPE (lhs));
8569 gimple_seq stmts = NULL;
8570 if (gimple_assign_rhs_code (stmt) == ABS_EXPR)
8571 gimple_assign_set_rhs_code (stmt, ABSU_EXPR);
8572 else
8573 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
8574 {
8575 tree op = gimple_op (stmt, i);
8576 op = gimple_convert (&stmts, type, op);
8577 gimple_set_op (stmt, i, op);
8578 }
8579 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
8580 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
8581 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
8582 gimple_set_modified (stmt, true);
8583 gimple_seq_add_stmt (&stmts, stmt);
8584 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
8585 gimple_seq_add_stmt (&stmts, cvt);
8586
8587 return stmts;
8588 }
8589
8590
8591 /* The valueization hook we use for the gimple_build API simplification.
8592 This makes us match fold_buildN behavior by only combining with
8593 statements in the sequence(s) we are currently building. */
8594
8595 static tree
gimple_build_valueize(tree op)8596 gimple_build_valueize (tree op)
8597 {
8598 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
8599 return op;
8600 return NULL_TREE;
8601 }
8602
8603 /* Build the expression CODE OP0 of type TYPE with location LOC,
8604 simplifying it first if possible. Returns the built
8605 expression value and appends statements possibly defining it
8606 to SEQ. */
8607
8608 tree
gimple_build(gimple_seq * seq,location_t loc,enum tree_code code,tree type,tree op0)8609 gimple_build (gimple_seq *seq, location_t loc,
8610 enum tree_code code, tree type, tree op0)
8611 {
8612 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
8613 if (!res)
8614 {
8615 res = create_tmp_reg_or_ssa_name (type);
8616 gimple *stmt;
8617 if (code == REALPART_EXPR
8618 || code == IMAGPART_EXPR
8619 || code == VIEW_CONVERT_EXPR)
8620 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
8621 else
8622 stmt = gimple_build_assign (res, code, op0);
8623 gimple_set_location (stmt, loc);
8624 gimple_seq_add_stmt_without_update (seq, stmt);
8625 }
8626 return res;
8627 }
8628
8629 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
8630 simplifying it first if possible. Returns the built
8631 expression value and appends statements possibly defining it
8632 to SEQ. */
8633
8634 tree
gimple_build(gimple_seq * seq,location_t loc,enum tree_code code,tree type,tree op0,tree op1)8635 gimple_build (gimple_seq *seq, location_t loc,
8636 enum tree_code code, tree type, tree op0, tree op1)
8637 {
8638 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
8639 if (!res)
8640 {
8641 res = create_tmp_reg_or_ssa_name (type);
8642 gimple *stmt = gimple_build_assign (res, code, op0, op1);
8643 gimple_set_location (stmt, loc);
8644 gimple_seq_add_stmt_without_update (seq, stmt);
8645 }
8646 return res;
8647 }
8648
8649 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
8650 simplifying it first if possible. Returns the built
8651 expression value and appends statements possibly defining it
8652 to SEQ. */
8653
8654 tree
gimple_build(gimple_seq * seq,location_t loc,enum tree_code code,tree type,tree op0,tree op1,tree op2)8655 gimple_build (gimple_seq *seq, location_t loc,
8656 enum tree_code code, tree type, tree op0, tree op1, tree op2)
8657 {
8658 tree res = gimple_simplify (code, type, op0, op1, op2,
8659 seq, gimple_build_valueize);
8660 if (!res)
8661 {
8662 res = create_tmp_reg_or_ssa_name (type);
8663 gimple *stmt;
8664 if (code == BIT_FIELD_REF)
8665 stmt = gimple_build_assign (res, code,
8666 build3 (code, type, op0, op1, op2));
8667 else
8668 stmt = gimple_build_assign (res, code, op0, op1, op2);
8669 gimple_set_location (stmt, loc);
8670 gimple_seq_add_stmt_without_update (seq, stmt);
8671 }
8672 return res;
8673 }
8674
8675 /* Build the call FN () with a result of type TYPE (or no result if TYPE is
8676 void) with a location LOC. Returns the built expression value (or NULL_TREE
8677 if TYPE is void) and appends statements possibly defining it to SEQ. */
8678
8679 tree
gimple_build(gimple_seq * seq,location_t loc,combined_fn fn,tree type)8680 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn, tree type)
8681 {
8682 tree res = NULL_TREE;
8683 gcall *stmt;
8684 if (internal_fn_p (fn))
8685 stmt = gimple_build_call_internal (as_internal_fn (fn), 0);
8686 else
8687 {
8688 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
8689 stmt = gimple_build_call (decl, 0);
8690 }
8691 if (!VOID_TYPE_P (type))
8692 {
8693 res = create_tmp_reg_or_ssa_name (type);
8694 gimple_call_set_lhs (stmt, res);
8695 }
8696 gimple_set_location (stmt, loc);
8697 gimple_seq_add_stmt_without_update (seq, stmt);
8698 return res;
8699 }
8700
8701 /* Build the call FN (ARG0) with a result of type TYPE
8702 (or no result if TYPE is void) with location LOC,
8703 simplifying it first if possible. Returns the built
8704 expression value (or NULL_TREE if TYPE is void) and appends
8705 statements possibly defining it to SEQ. */
8706
8707 tree
gimple_build(gimple_seq * seq,location_t loc,combined_fn fn,tree type,tree arg0)8708 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
8709 tree type, tree arg0)
8710 {
8711 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
8712 if (!res)
8713 {
8714 gcall *stmt;
8715 if (internal_fn_p (fn))
8716 stmt = gimple_build_call_internal (as_internal_fn (fn), 1, arg0);
8717 else
8718 {
8719 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
8720 stmt = gimple_build_call (decl, 1, arg0);
8721 }
8722 if (!VOID_TYPE_P (type))
8723 {
8724 res = create_tmp_reg_or_ssa_name (type);
8725 gimple_call_set_lhs (stmt, res);
8726 }
8727 gimple_set_location (stmt, loc);
8728 gimple_seq_add_stmt_without_update (seq, stmt);
8729 }
8730 return res;
8731 }
8732
8733 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
8734 (or no result if TYPE is void) with location LOC,
8735 simplifying it first if possible. Returns the built
8736 expression value (or NULL_TREE if TYPE is void) and appends
8737 statements possibly defining it to SEQ. */
8738
8739 tree
gimple_build(gimple_seq * seq,location_t loc,combined_fn fn,tree type,tree arg0,tree arg1)8740 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
8741 tree type, tree arg0, tree arg1)
8742 {
8743 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
8744 if (!res)
8745 {
8746 gcall *stmt;
8747 if (internal_fn_p (fn))
8748 stmt = gimple_build_call_internal (as_internal_fn (fn), 2, arg0, arg1);
8749 else
8750 {
8751 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
8752 stmt = gimple_build_call (decl, 2, arg0, arg1);
8753 }
8754 if (!VOID_TYPE_P (type))
8755 {
8756 res = create_tmp_reg_or_ssa_name (type);
8757 gimple_call_set_lhs (stmt, res);
8758 }
8759 gimple_set_location (stmt, loc);
8760 gimple_seq_add_stmt_without_update (seq, stmt);
8761 }
8762 return res;
8763 }
8764
8765 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
8766 (or no result if TYPE is void) with location LOC,
8767 simplifying it first if possible. Returns the built
8768 expression value (or NULL_TREE if TYPE is void) and appends
8769 statements possibly defining it to SEQ. */
8770
8771 tree
gimple_build(gimple_seq * seq,location_t loc,combined_fn fn,tree type,tree arg0,tree arg1,tree arg2)8772 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
8773 tree type, tree arg0, tree arg1, tree arg2)
8774 {
8775 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
8776 seq, gimple_build_valueize);
8777 if (!res)
8778 {
8779 gcall *stmt;
8780 if (internal_fn_p (fn))
8781 stmt = gimple_build_call_internal (as_internal_fn (fn),
8782 3, arg0, arg1, arg2);
8783 else
8784 {
8785 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
8786 stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
8787 }
8788 if (!VOID_TYPE_P (type))
8789 {
8790 res = create_tmp_reg_or_ssa_name (type);
8791 gimple_call_set_lhs (stmt, res);
8792 }
8793 gimple_set_location (stmt, loc);
8794 gimple_seq_add_stmt_without_update (seq, stmt);
8795 }
8796 return res;
8797 }
8798
8799 /* Build CODE (OP0) with a result of type TYPE (or no result if TYPE is
8800 void) with location LOC, simplifying it first if possible. Returns the
8801 built expression value (or NULL_TREE if TYPE is void) and appends
8802 statements possibly defining it to SEQ. */
8803
8804 tree
gimple_build(gimple_seq * seq,location_t loc,code_helper code,tree type,tree op0)8805 gimple_build (gimple_seq *seq, location_t loc, code_helper code,
8806 tree type, tree op0)
8807 {
8808 if (code.is_tree_code ())
8809 return gimple_build (seq, loc, tree_code (code), type, op0);
8810 return gimple_build (seq, loc, combined_fn (code), type, op0);
8811 }
8812
8813 /* Build CODE (OP0, OP1) with a result of type TYPE (or no result if TYPE is
8814 void) with location LOC, simplifying it first if possible. Returns the
8815 built expression value (or NULL_TREE if TYPE is void) and appends
8816 statements possibly defining it to SEQ. */
8817
8818 tree
gimple_build(gimple_seq * seq,location_t loc,code_helper code,tree type,tree op0,tree op1)8819 gimple_build (gimple_seq *seq, location_t loc, code_helper code,
8820 tree type, tree op0, tree op1)
8821 {
8822 if (code.is_tree_code ())
8823 return gimple_build (seq, loc, tree_code (code), type, op0, op1);
8824 return gimple_build (seq, loc, combined_fn (code), type, op0, op1);
8825 }
8826
8827 /* Build CODE (OP0, OP1, OP2) with a result of type TYPE (or no result if TYPE
8828 is void) with location LOC, simplifying it first if possible. Returns the
8829 built expression value (or NULL_TREE if TYPE is void) and appends statements
8830 possibly defining it to SEQ. */
8831
8832 tree
gimple_build(gimple_seq * seq,location_t loc,code_helper code,tree type,tree op0,tree op1,tree op2)8833 gimple_build (gimple_seq *seq, location_t loc, code_helper code,
8834 tree type, tree op0, tree op1, tree op2)
8835 {
8836 if (code.is_tree_code ())
8837 return gimple_build (seq, loc, tree_code (code), type, op0, op1, op2);
8838 return gimple_build (seq, loc, combined_fn (code), type, op0, op1, op2);
8839 }
8840
8841 /* Build the conversion (TYPE) OP with a result of type TYPE
8842 with location LOC if such conversion is neccesary in GIMPLE,
8843 simplifying it first.
8844 Returns the built expression value and appends
8845 statements possibly defining it to SEQ. */
8846
8847 tree
gimple_convert(gimple_seq * seq,location_t loc,tree type,tree op)8848 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
8849 {
8850 if (useless_type_conversion_p (type, TREE_TYPE (op)))
8851 return op;
8852 return gimple_build (seq, loc, NOP_EXPR, type, op);
8853 }
8854
8855 /* Build the conversion (ptrofftype) OP with a result of a type
8856 compatible with ptrofftype with location LOC if such conversion
8857 is neccesary in GIMPLE, simplifying it first.
8858 Returns the built expression value and appends
8859 statements possibly defining it to SEQ. */
8860
8861 tree
gimple_convert_to_ptrofftype(gimple_seq * seq,location_t loc,tree op)8862 gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
8863 {
8864 if (ptrofftype_p (TREE_TYPE (op)))
8865 return op;
8866 return gimple_convert (seq, loc, sizetype, op);
8867 }
8868
8869 /* Build a vector of type TYPE in which each element has the value OP.
8870 Return a gimple value for the result, appending any new statements
8871 to SEQ. */
8872
8873 tree
gimple_build_vector_from_val(gimple_seq * seq,location_t loc,tree type,tree op)8874 gimple_build_vector_from_val (gimple_seq *seq, location_t loc, tree type,
8875 tree op)
8876 {
8877 if (!TYPE_VECTOR_SUBPARTS (type).is_constant ()
8878 && !CONSTANT_CLASS_P (op))
8879 return gimple_build (seq, loc, VEC_DUPLICATE_EXPR, type, op);
8880
8881 tree res, vec = build_vector_from_val (type, op);
8882 if (is_gimple_val (vec))
8883 return vec;
8884 if (gimple_in_ssa_p (cfun))
8885 res = make_ssa_name (type);
8886 else
8887 res = create_tmp_reg (type);
8888 gimple *stmt = gimple_build_assign (res, vec);
8889 gimple_set_location (stmt, loc);
8890 gimple_seq_add_stmt_without_update (seq, stmt);
8891 return res;
8892 }
8893
8894 /* Build a vector from BUILDER, handling the case in which some elements
8895 are non-constant. Return a gimple value for the result, appending any
8896 new instructions to SEQ.
8897
8898 BUILDER must not have a stepped encoding on entry. This is because
8899 the function is not geared up to handle the arithmetic that would
8900 be needed in the variable case, and any code building a vector that
8901 is known to be constant should use BUILDER->build () directly. */
8902
8903 tree
gimple_build_vector(gimple_seq * seq,location_t loc,tree_vector_builder * builder)8904 gimple_build_vector (gimple_seq *seq, location_t loc,
8905 tree_vector_builder *builder)
8906 {
8907 gcc_assert (builder->nelts_per_pattern () <= 2);
8908 unsigned int encoded_nelts = builder->encoded_nelts ();
8909 for (unsigned int i = 0; i < encoded_nelts; ++i)
8910 if (!CONSTANT_CLASS_P ((*builder)[i]))
8911 {
8912 tree type = builder->type ();
8913 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
8914 vec<constructor_elt, va_gc> *v;
8915 vec_alloc (v, nelts);
8916 for (i = 0; i < nelts; ++i)
8917 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, builder->elt (i));
8918
8919 tree res;
8920 if (gimple_in_ssa_p (cfun))
8921 res = make_ssa_name (type);
8922 else
8923 res = create_tmp_reg (type);
8924 gimple *stmt = gimple_build_assign (res, build_constructor (type, v));
8925 gimple_set_location (stmt, loc);
8926 gimple_seq_add_stmt_without_update (seq, stmt);
8927 return res;
8928 }
8929 return builder->build ();
8930 }
8931
8932 /* Emit gimple statements into &stmts that take a value given in OLD_SIZE
8933 and generate a value guaranteed to be rounded upwards to ALIGN.
8934
8935 Return the tree node representing this size, it is of TREE_TYPE TYPE. */
8936
8937 tree
gimple_build_round_up(gimple_seq * seq,location_t loc,tree type,tree old_size,unsigned HOST_WIDE_INT align)8938 gimple_build_round_up (gimple_seq *seq, location_t loc, tree type,
8939 tree old_size, unsigned HOST_WIDE_INT align)
8940 {
8941 unsigned HOST_WIDE_INT tg_mask = align - 1;
8942 /* tree new_size = (old_size + tg_mask) & ~tg_mask; */
8943 gcc_assert (INTEGRAL_TYPE_P (type));
8944 tree tree_mask = build_int_cst (type, tg_mask);
8945 tree oversize = gimple_build (seq, loc, PLUS_EXPR, type, old_size,
8946 tree_mask);
8947
8948 tree mask = build_int_cst (type, -align);
8949 return gimple_build (seq, loc, BIT_AND_EXPR, type, oversize, mask);
8950 }
8951
8952 /* Return true if the result of assignment STMT is known to be non-negative.
8953 If the return value is based on the assumption that signed overflow is
8954 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
8955 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
8956
8957 static bool
gimple_assign_nonnegative_warnv_p(gimple * stmt,bool * strict_overflow_p,int depth)8958 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
8959 int depth)
8960 {
8961 enum tree_code code = gimple_assign_rhs_code (stmt);
8962 tree type = TREE_TYPE (gimple_assign_lhs (stmt));
8963 switch (get_gimple_rhs_class (code))
8964 {
8965 case GIMPLE_UNARY_RHS:
8966 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
8967 type,
8968 gimple_assign_rhs1 (stmt),
8969 strict_overflow_p, depth);
8970 case GIMPLE_BINARY_RHS:
8971 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
8972 type,
8973 gimple_assign_rhs1 (stmt),
8974 gimple_assign_rhs2 (stmt),
8975 strict_overflow_p, depth);
8976 case GIMPLE_TERNARY_RHS:
8977 return false;
8978 case GIMPLE_SINGLE_RHS:
8979 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
8980 strict_overflow_p, depth);
8981 case GIMPLE_INVALID_RHS:
8982 break;
8983 }
8984 gcc_unreachable ();
8985 }
8986
8987 /* Return true if return value of call STMT is known to be non-negative.
8988 If the return value is based on the assumption that signed overflow is
8989 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
8990 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
8991
8992 static bool
gimple_call_nonnegative_warnv_p(gimple * stmt,bool * strict_overflow_p,int depth)8993 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
8994 int depth)
8995 {
8996 tree arg0 = gimple_call_num_args (stmt) > 0 ?
8997 gimple_call_arg (stmt, 0) : NULL_TREE;
8998 tree arg1 = gimple_call_num_args (stmt) > 1 ?
8999 gimple_call_arg (stmt, 1) : NULL_TREE;
9000 tree lhs = gimple_call_lhs (stmt);
9001 return (lhs
9002 && tree_call_nonnegative_warnv_p (TREE_TYPE (lhs),
9003 gimple_call_combined_fn (stmt),
9004 arg0, arg1,
9005 strict_overflow_p, depth));
9006 }
9007
9008 /* Return true if return value of call STMT is known to be non-negative.
9009 If the return value is based on the assumption that signed overflow is
9010 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
9011 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
9012
9013 static bool
gimple_phi_nonnegative_warnv_p(gimple * stmt,bool * strict_overflow_p,int depth)9014 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
9015 int depth)
9016 {
9017 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
9018 {
9019 tree arg = gimple_phi_arg_def (stmt, i);
9020 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
9021 return false;
9022 }
9023 return true;
9024 }
9025
9026 /* Return true if STMT is known to compute a non-negative value.
9027 If the return value is based on the assumption that signed overflow is
9028 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
9029 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
9030
9031 bool
gimple_stmt_nonnegative_warnv_p(gimple * stmt,bool * strict_overflow_p,int depth)9032 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
9033 int depth)
9034 {
9035 switch (gimple_code (stmt))
9036 {
9037 case GIMPLE_ASSIGN:
9038 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
9039 depth);
9040 case GIMPLE_CALL:
9041 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
9042 depth);
9043 case GIMPLE_PHI:
9044 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
9045 depth);
9046 default:
9047 return false;
9048 }
9049 }
9050
9051 /* Return true if the floating-point value computed by assignment STMT
9052 is known to have an integer value. We also allow +Inf, -Inf and NaN
9053 to be considered integer values. Return false for signaling NaN.
9054
9055 DEPTH is the current nesting depth of the query. */
9056
9057 static bool
gimple_assign_integer_valued_real_p(gimple * stmt,int depth)9058 gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
9059 {
9060 enum tree_code code = gimple_assign_rhs_code (stmt);
9061 switch (get_gimple_rhs_class (code))
9062 {
9063 case GIMPLE_UNARY_RHS:
9064 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
9065 gimple_assign_rhs1 (stmt), depth);
9066 case GIMPLE_BINARY_RHS:
9067 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
9068 gimple_assign_rhs1 (stmt),
9069 gimple_assign_rhs2 (stmt), depth);
9070 case GIMPLE_TERNARY_RHS:
9071 return false;
9072 case GIMPLE_SINGLE_RHS:
9073 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
9074 case GIMPLE_INVALID_RHS:
9075 break;
9076 }
9077 gcc_unreachable ();
9078 }
9079
9080 /* Return true if the floating-point value computed by call STMT is known
9081 to have an integer value. We also allow +Inf, -Inf and NaN to be
9082 considered integer values. Return false for signaling NaN.
9083
9084 DEPTH is the current nesting depth of the query. */
9085
9086 static bool
gimple_call_integer_valued_real_p(gimple * stmt,int depth)9087 gimple_call_integer_valued_real_p (gimple *stmt, int depth)
9088 {
9089 tree arg0 = (gimple_call_num_args (stmt) > 0
9090 ? gimple_call_arg (stmt, 0)
9091 : NULL_TREE);
9092 tree arg1 = (gimple_call_num_args (stmt) > 1
9093 ? gimple_call_arg (stmt, 1)
9094 : NULL_TREE);
9095 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
9096 arg0, arg1, depth);
9097 }
9098
9099 /* Return true if the floating-point result of phi STMT is known to have
9100 an integer value. We also allow +Inf, -Inf and NaN to be considered
9101 integer values. Return false for signaling NaN.
9102
9103 DEPTH is the current nesting depth of the query. */
9104
9105 static bool
gimple_phi_integer_valued_real_p(gimple * stmt,int depth)9106 gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
9107 {
9108 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
9109 {
9110 tree arg = gimple_phi_arg_def (stmt, i);
9111 if (!integer_valued_real_single_p (arg, depth + 1))
9112 return false;
9113 }
9114 return true;
9115 }
9116
9117 /* Return true if the floating-point value computed by STMT is known
9118 to have an integer value. We also allow +Inf, -Inf and NaN to be
9119 considered integer values. Return false for signaling NaN.
9120
9121 DEPTH is the current nesting depth of the query. */
9122
9123 bool
gimple_stmt_integer_valued_real_p(gimple * stmt,int depth)9124 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
9125 {
9126 switch (gimple_code (stmt))
9127 {
9128 case GIMPLE_ASSIGN:
9129 return gimple_assign_integer_valued_real_p (stmt, depth);
9130 case GIMPLE_CALL:
9131 return gimple_call_integer_valued_real_p (stmt, depth);
9132 case GIMPLE_PHI:
9133 return gimple_phi_integer_valued_real_p (stmt, depth);
9134 default:
9135 return false;
9136 }
9137 }
9138