1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2018 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "gimple-pretty-print.h"
33 #include "gimple-ssa-warn-restrict.h"
34 #include "fold-const.h"
35 #include "stmt.h"
36 #include "expr.h"
37 #include "stor-layout.h"
38 #include "dumpfile.h"
39 #include "gimple-fold.h"
40 #include "gimplify.h"
41 #include "gimple-iterator.h"
42 #include "tree-into-ssa.h"
43 #include "tree-dfa.h"
44 #include "tree-object-size.h"
45 #include "tree-ssa.h"
46 #include "tree-ssa-propagate.h"
47 #include "ipa-utils.h"
48 #include "tree-ssa-address.h"
49 #include "langhooks.h"
50 #include "gimplify-me.h"
51 #include "dbgcnt.h"
52 #include "builtins.h"
53 #include "tree-eh.h"
54 #include "gimple-match.h"
55 #include "gomp-constants.h"
56 #include "optabs-query.h"
57 #include "omp-general.h"
58 #include "ipa-chkp.h"
59 #include "tree-cfg.h"
60 #include "fold-const-call.h"
61 #include "stringpool.h"
62 #include "attribs.h"
63 #include "asan.h"
64 #include "diagnostic-core.h"
65 #include "intl.h"
66 #include "calls.h"
67 #include "tree-vector-builder.h"
68 #include "tree-ssa-strlen.h"
69
70 /* Return true when DECL can be referenced from current unit.
71 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
72 We can get declarations that are not possible to reference for various
73 reasons:
74
75 1) When analyzing C++ virtual tables.
76 C++ virtual tables do have known constructors even
77 when they are keyed to other compilation unit.
78 Those tables can contain pointers to methods and vars
79 in other units. Those methods have both STATIC and EXTERNAL
80 set.
81 2) In WHOPR mode devirtualization might lead to reference
82 to method that was partitioned elsehwere.
83 In this case we have static VAR_DECL or FUNCTION_DECL
84 that has no corresponding callgraph/varpool node
85 declaring the body.
86 3) COMDAT functions referred by external vtables that
87 we devirtualize only during final compilation stage.
88 At this time we already decided that we will not output
89 the function body and thus we can't reference the symbol
90 directly. */
91
92 static bool
can_refer_decl_in_current_unit_p(tree decl,tree from_decl)93 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
94 {
95 varpool_node *vnode;
96 struct cgraph_node *node;
97 symtab_node *snode;
98
99 if (DECL_ABSTRACT_P (decl))
100 return false;
101
102 /* We are concerned only about static/external vars and functions. */
103 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
104 || !VAR_OR_FUNCTION_DECL_P (decl))
105 return true;
106
107 /* Static objects can be referred only if they was not optimized out yet. */
108 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
109 {
110 /* Before we start optimizing unreachable code we can be sure all
111 static objects are defined. */
112 if (symtab->function_flags_ready)
113 return true;
114 snode = symtab_node::get (decl);
115 if (!snode || !snode->definition)
116 return false;
117 node = dyn_cast <cgraph_node *> (snode);
118 return !node || !node->global.inlined_to;
119 }
120
121 /* We will later output the initializer, so we can refer to it.
122 So we are concerned only when DECL comes from initializer of
123 external var or var that has been optimized out. */
124 if (!from_decl
125 || !VAR_P (from_decl)
126 || (!DECL_EXTERNAL (from_decl)
127 && (vnode = varpool_node::get (from_decl)) != NULL
128 && vnode->definition)
129 || (flag_ltrans
130 && (vnode = varpool_node::get (from_decl)) != NULL
131 && vnode->in_other_partition))
132 return true;
133 /* We are folding reference from external vtable. The vtable may reffer
134 to a symbol keyed to other compilation unit. The other compilation
135 unit may be in separate DSO and the symbol may be hidden. */
136 if (DECL_VISIBILITY_SPECIFIED (decl)
137 && DECL_EXTERNAL (decl)
138 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
139 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
140 return false;
141 /* When function is public, we always can introduce new reference.
142 Exception are the COMDAT functions where introducing a direct
143 reference imply need to include function body in the curren tunit. */
144 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
145 return true;
146 /* We have COMDAT. We are going to check if we still have definition
147 or if the definition is going to be output in other partition.
148 Bypass this when gimplifying; all needed functions will be produced.
149
150 As observed in PR20991 for already optimized out comdat virtual functions
151 it may be tempting to not necessarily give up because the copy will be
152 output elsewhere when corresponding vtable is output.
153 This is however not possible - ABI specify that COMDATs are output in
154 units where they are used and when the other unit was compiled with LTO
155 it is possible that vtable was kept public while the function itself
156 was privatized. */
157 if (!symtab->function_flags_ready)
158 return true;
159
160 snode = symtab_node::get (decl);
161 if (!snode
162 || ((!snode->definition || DECL_EXTERNAL (decl))
163 && (!snode->in_other_partition
164 || (!snode->forced_by_abi && !snode->force_output))))
165 return false;
166 node = dyn_cast <cgraph_node *> (snode);
167 return !node || !node->global.inlined_to;
168 }
169
170 /* Create a temporary for TYPE for a statement STMT. If the current function
171 is in SSA form, a SSA name is created. Otherwise a temporary register
172 is made. */
173
174 tree
create_tmp_reg_or_ssa_name(tree type,gimple * stmt)175 create_tmp_reg_or_ssa_name (tree type, gimple *stmt)
176 {
177 if (gimple_in_ssa_p (cfun))
178 return make_ssa_name (type, stmt);
179 else
180 return create_tmp_reg (type);
181 }
182
183 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
184 acceptable form for is_gimple_min_invariant.
185 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
186
187 tree
canonicalize_constructor_val(tree cval,tree from_decl)188 canonicalize_constructor_val (tree cval, tree from_decl)
189 {
190 tree orig_cval = cval;
191 STRIP_NOPS (cval);
192 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
193 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
194 {
195 tree ptr = TREE_OPERAND (cval, 0);
196 if (is_gimple_min_invariant (ptr))
197 cval = build1_loc (EXPR_LOCATION (cval),
198 ADDR_EXPR, TREE_TYPE (ptr),
199 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
200 ptr,
201 fold_convert (ptr_type_node,
202 TREE_OPERAND (cval, 1))));
203 }
204 if (TREE_CODE (cval) == ADDR_EXPR)
205 {
206 tree base = NULL_TREE;
207 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
208 {
209 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
210 if (base)
211 TREE_OPERAND (cval, 0) = base;
212 }
213 else
214 base = get_base_address (TREE_OPERAND (cval, 0));
215 if (!base)
216 return NULL_TREE;
217
218 if (VAR_OR_FUNCTION_DECL_P (base)
219 && !can_refer_decl_in_current_unit_p (base, from_decl))
220 return NULL_TREE;
221 if (TREE_TYPE (base) == error_mark_node)
222 return NULL_TREE;
223 if (VAR_P (base))
224 TREE_ADDRESSABLE (base) = 1;
225 else if (TREE_CODE (base) == FUNCTION_DECL)
226 {
227 /* Make sure we create a cgraph node for functions we'll reference.
228 They can be non-existent if the reference comes from an entry
229 of an external vtable for example. */
230 cgraph_node::get_create (base);
231 }
232 /* Fixup types in global initializers. */
233 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
234 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
235
236 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
237 cval = fold_convert (TREE_TYPE (orig_cval), cval);
238 return cval;
239 }
240 if (TREE_OVERFLOW_P (cval))
241 return drop_tree_overflow (cval);
242 return orig_cval;
243 }
244
245 /* If SYM is a constant variable with known value, return the value.
246 NULL_TREE is returned otherwise. */
247
248 tree
get_symbol_constant_value(tree sym)249 get_symbol_constant_value (tree sym)
250 {
251 tree val = ctor_for_folding (sym);
252 if (val != error_mark_node)
253 {
254 if (val)
255 {
256 val = canonicalize_constructor_val (unshare_expr (val), sym);
257 if (val && is_gimple_min_invariant (val))
258 return val;
259 else
260 return NULL_TREE;
261 }
262 /* Variables declared 'const' without an initializer
263 have zero as the initializer if they may not be
264 overridden at link or run time. */
265 if (!val
266 && is_gimple_reg_type (TREE_TYPE (sym)))
267 return build_zero_cst (TREE_TYPE (sym));
268 }
269
270 return NULL_TREE;
271 }
272
273
274
275 /* Subroutine of fold_stmt. We perform several simplifications of the
276 memory reference tree EXPR and make sure to re-gimplify them properly
277 after propagation of constant addresses. IS_LHS is true if the
278 reference is supposed to be an lvalue. */
279
280 static tree
maybe_fold_reference(tree expr,bool is_lhs)281 maybe_fold_reference (tree expr, bool is_lhs)
282 {
283 tree result;
284
285 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
286 || TREE_CODE (expr) == REALPART_EXPR
287 || TREE_CODE (expr) == IMAGPART_EXPR)
288 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
289 return fold_unary_loc (EXPR_LOCATION (expr),
290 TREE_CODE (expr),
291 TREE_TYPE (expr),
292 TREE_OPERAND (expr, 0));
293 else if (TREE_CODE (expr) == BIT_FIELD_REF
294 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
295 return fold_ternary_loc (EXPR_LOCATION (expr),
296 TREE_CODE (expr),
297 TREE_TYPE (expr),
298 TREE_OPERAND (expr, 0),
299 TREE_OPERAND (expr, 1),
300 TREE_OPERAND (expr, 2));
301
302 if (!is_lhs
303 && (result = fold_const_aggregate_ref (expr))
304 && is_gimple_min_invariant (result))
305 return result;
306
307 return NULL_TREE;
308 }
309
310
311 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
312 replacement rhs for the statement or NULL_TREE if no simplification
313 could be made. It is assumed that the operands have been previously
314 folded. */
315
316 static tree
fold_gimple_assign(gimple_stmt_iterator * si)317 fold_gimple_assign (gimple_stmt_iterator *si)
318 {
319 gimple *stmt = gsi_stmt (*si);
320 enum tree_code subcode = gimple_assign_rhs_code (stmt);
321 location_t loc = gimple_location (stmt);
322
323 tree result = NULL_TREE;
324
325 switch (get_gimple_rhs_class (subcode))
326 {
327 case GIMPLE_SINGLE_RHS:
328 {
329 tree rhs = gimple_assign_rhs1 (stmt);
330
331 if (TREE_CLOBBER_P (rhs))
332 return NULL_TREE;
333
334 if (REFERENCE_CLASS_P (rhs))
335 return maybe_fold_reference (rhs, false);
336
337 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
338 {
339 tree val = OBJ_TYPE_REF_EXPR (rhs);
340 if (is_gimple_min_invariant (val))
341 return val;
342 else if (flag_devirtualize && virtual_method_call_p (rhs))
343 {
344 bool final;
345 vec <cgraph_node *>targets
346 = possible_polymorphic_call_targets (rhs, stmt, &final);
347 if (final && targets.length () <= 1 && dbg_cnt (devirt))
348 {
349 if (dump_enabled_p ())
350 {
351 location_t loc = gimple_location_safe (stmt);
352 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
353 "resolving virtual function address "
354 "reference to function %s\n",
355 targets.length () == 1
356 ? targets[0]->name ()
357 : "NULL");
358 }
359 if (targets.length () == 1)
360 {
361 val = fold_convert (TREE_TYPE (val),
362 build_fold_addr_expr_loc
363 (loc, targets[0]->decl));
364 STRIP_USELESS_TYPE_CONVERSION (val);
365 }
366 else
367 /* We can not use __builtin_unreachable here because it
368 can not have address taken. */
369 val = build_int_cst (TREE_TYPE (val), 0);
370 return val;
371 }
372 }
373 }
374
375 else if (TREE_CODE (rhs) == ADDR_EXPR)
376 {
377 tree ref = TREE_OPERAND (rhs, 0);
378 tree tem = maybe_fold_reference (ref, true);
379 if (tem
380 && TREE_CODE (tem) == MEM_REF
381 && integer_zerop (TREE_OPERAND (tem, 1)))
382 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
383 else if (tem)
384 result = fold_convert (TREE_TYPE (rhs),
385 build_fold_addr_expr_loc (loc, tem));
386 else if (TREE_CODE (ref) == MEM_REF
387 && integer_zerop (TREE_OPERAND (ref, 1)))
388 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
389
390 if (result)
391 {
392 /* Strip away useless type conversions. Both the
393 NON_LVALUE_EXPR that may have been added by fold, and
394 "useless" type conversions that might now be apparent
395 due to propagation. */
396 STRIP_USELESS_TYPE_CONVERSION (result);
397
398 if (result != rhs && valid_gimple_rhs_p (result))
399 return result;
400 }
401 }
402
403 else if (TREE_CODE (rhs) == CONSTRUCTOR
404 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
405 {
406 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
407 unsigned i;
408 tree val;
409
410 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
411 if (! CONSTANT_CLASS_P (val))
412 return NULL_TREE;
413
414 return build_vector_from_ctor (TREE_TYPE (rhs),
415 CONSTRUCTOR_ELTS (rhs));
416 }
417
418 else if (DECL_P (rhs))
419 return get_symbol_constant_value (rhs);
420 }
421 break;
422
423 case GIMPLE_UNARY_RHS:
424 break;
425
426 case GIMPLE_BINARY_RHS:
427 break;
428
429 case GIMPLE_TERNARY_RHS:
430 result = fold_ternary_loc (loc, subcode,
431 TREE_TYPE (gimple_assign_lhs (stmt)),
432 gimple_assign_rhs1 (stmt),
433 gimple_assign_rhs2 (stmt),
434 gimple_assign_rhs3 (stmt));
435
436 if (result)
437 {
438 STRIP_USELESS_TYPE_CONVERSION (result);
439 if (valid_gimple_rhs_p (result))
440 return result;
441 }
442 break;
443
444 case GIMPLE_INVALID_RHS:
445 gcc_unreachable ();
446 }
447
448 return NULL_TREE;
449 }
450
451
452 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
453 adjusting the replacement stmts location and virtual operands.
454 If the statement has a lhs the last stmt in the sequence is expected
455 to assign to that lhs. */
456
457 static void
gsi_replace_with_seq_vops(gimple_stmt_iterator * si_p,gimple_seq stmts)458 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
459 {
460 gimple *stmt = gsi_stmt (*si_p);
461
462 if (gimple_has_location (stmt))
463 annotate_all_with_location (stmts, gimple_location (stmt));
464
465 /* First iterate over the replacement statements backward, assigning
466 virtual operands to their defining statements. */
467 gimple *laststore = NULL;
468 for (gimple_stmt_iterator i = gsi_last (stmts);
469 !gsi_end_p (i); gsi_prev (&i))
470 {
471 gimple *new_stmt = gsi_stmt (i);
472 if ((gimple_assign_single_p (new_stmt)
473 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
474 || (is_gimple_call (new_stmt)
475 && (gimple_call_flags (new_stmt)
476 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
477 {
478 tree vdef;
479 if (!laststore)
480 vdef = gimple_vdef (stmt);
481 else
482 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
483 gimple_set_vdef (new_stmt, vdef);
484 if (vdef && TREE_CODE (vdef) == SSA_NAME)
485 SSA_NAME_DEF_STMT (vdef) = new_stmt;
486 laststore = new_stmt;
487 }
488 }
489
490 /* Second iterate over the statements forward, assigning virtual
491 operands to their uses. */
492 tree reaching_vuse = gimple_vuse (stmt);
493 for (gimple_stmt_iterator i = gsi_start (stmts);
494 !gsi_end_p (i); gsi_next (&i))
495 {
496 gimple *new_stmt = gsi_stmt (i);
497 /* If the new statement possibly has a VUSE, update it with exact SSA
498 name we know will reach this one. */
499 if (gimple_has_mem_ops (new_stmt))
500 gimple_set_vuse (new_stmt, reaching_vuse);
501 gimple_set_modified (new_stmt, true);
502 if (gimple_vdef (new_stmt))
503 reaching_vuse = gimple_vdef (new_stmt);
504 }
505
506 /* If the new sequence does not do a store release the virtual
507 definition of the original statement. */
508 if (reaching_vuse
509 && reaching_vuse == gimple_vuse (stmt))
510 {
511 tree vdef = gimple_vdef (stmt);
512 if (vdef
513 && TREE_CODE (vdef) == SSA_NAME)
514 {
515 unlink_stmt_vdef (stmt);
516 release_ssa_name (vdef);
517 }
518 }
519
520 /* Finally replace the original statement with the sequence. */
521 gsi_replace_with_seq (si_p, stmts, false);
522 }
523
524 /* Convert EXPR into a GIMPLE value suitable for substitution on the
525 RHS of an assignment. Insert the necessary statements before
526 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
527 is replaced. If the call is expected to produces a result, then it
528 is replaced by an assignment of the new RHS to the result variable.
529 If the result is to be ignored, then the call is replaced by a
530 GIMPLE_NOP. A proper VDEF chain is retained by making the first
531 VUSE and the last VDEF of the whole sequence be the same as the replaced
532 statement and using new SSA names for stores in between. */
533
534 void
gimplify_and_update_call_from_tree(gimple_stmt_iterator * si_p,tree expr)535 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
536 {
537 tree lhs;
538 gimple *stmt, *new_stmt;
539 gimple_stmt_iterator i;
540 gimple_seq stmts = NULL;
541
542 stmt = gsi_stmt (*si_p);
543
544 gcc_assert (is_gimple_call (stmt));
545
546 push_gimplify_context (gimple_in_ssa_p (cfun));
547
548 lhs = gimple_call_lhs (stmt);
549 if (lhs == NULL_TREE)
550 {
551 gimplify_and_add (expr, &stmts);
552 /* We can end up with folding a memcpy of an empty class assignment
553 which gets optimized away by C++ gimplification. */
554 if (gimple_seq_empty_p (stmts))
555 {
556 pop_gimplify_context (NULL);
557 if (gimple_in_ssa_p (cfun))
558 {
559 unlink_stmt_vdef (stmt);
560 release_defs (stmt);
561 }
562 gsi_replace (si_p, gimple_build_nop (), false);
563 return;
564 }
565 }
566 else
567 {
568 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
569 new_stmt = gimple_build_assign (lhs, tmp);
570 i = gsi_last (stmts);
571 gsi_insert_after_without_update (&i, new_stmt,
572 GSI_CONTINUE_LINKING);
573 }
574
575 pop_gimplify_context (NULL);
576
577 gsi_replace_with_seq_vops (si_p, stmts);
578 }
579
580
581 /* Replace the call at *GSI with the gimple value VAL. */
582
583 void
replace_call_with_value(gimple_stmt_iterator * gsi,tree val)584 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
585 {
586 gimple *stmt = gsi_stmt (*gsi);
587 tree lhs = gimple_call_lhs (stmt);
588 gimple *repl;
589 if (lhs)
590 {
591 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
592 val = fold_convert (TREE_TYPE (lhs), val);
593 repl = gimple_build_assign (lhs, val);
594 }
595 else
596 repl = gimple_build_nop ();
597 tree vdef = gimple_vdef (stmt);
598 if (vdef && TREE_CODE (vdef) == SSA_NAME)
599 {
600 unlink_stmt_vdef (stmt);
601 release_ssa_name (vdef);
602 }
603 gsi_replace (gsi, repl, false);
604 }
605
606 /* Replace the call at *GSI with the new call REPL and fold that
607 again. */
608
609 static void
replace_call_with_call_and_fold(gimple_stmt_iterator * gsi,gimple * repl)610 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
611 {
612 gimple *stmt = gsi_stmt (*gsi);
613 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
614 gimple_set_location (repl, gimple_location (stmt));
615 if (gimple_vdef (stmt)
616 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
617 {
618 gimple_set_vdef (repl, gimple_vdef (stmt));
619 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
620 }
621 if (gimple_vuse (stmt))
622 gimple_set_vuse (repl, gimple_vuse (stmt));
623 gsi_replace (gsi, repl, false);
624 fold_stmt (gsi);
625 }
626
627 /* Return true if VAR is a VAR_DECL or a component thereof. */
628
629 static bool
var_decl_component_p(tree var)630 var_decl_component_p (tree var)
631 {
632 tree inner = var;
633 while (handled_component_p (inner))
634 inner = TREE_OPERAND (inner, 0);
635 return SSA_VAR_P (inner);
636 }
637
638 /* If the SIZE argument representing the size of an object is in a range
639 of values of which exactly one is valid (and that is zero), return
640 true, otherwise false. */
641
642 static bool
size_must_be_zero_p(tree size)643 size_must_be_zero_p (tree size)
644 {
645 if (integer_zerop (size))
646 return true;
647
648 if (TREE_CODE (size) != SSA_NAME || !INTEGRAL_TYPE_P (TREE_TYPE (size)))
649 return false;
650
651 wide_int min, max;
652 enum value_range_type rtype = get_range_info (size, &min, &max);
653 if (rtype != VR_ANTI_RANGE)
654 return false;
655
656 tree type = TREE_TYPE (size);
657 int prec = TYPE_PRECISION (type);
658
659 wide_int wone = wi::one (prec);
660
661 /* Compute the value of SSIZE_MAX, the largest positive value that
662 can be stored in ssize_t, the signed counterpart of size_t. */
663 wide_int ssize_max = wi::lshift (wi::one (prec), prec - 1) - 1;
664
665 return wi::eq_p (min, wone) && wi::geu_p (max, ssize_max);
666 }
667
668 /* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and
669 diagnose (otherwise undefined) overlapping copies without preventing
670 folding. When folded, GCC guarantees that overlapping memcpy has
671 the same semantics as memmove. Call to the library memcpy need not
672 provide the same guarantee. Return false if no simplification can
673 be made. */
674
675 static bool
gimple_fold_builtin_memory_op(gimple_stmt_iterator * gsi,tree dest,tree src,int endp)676 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
677 tree dest, tree src, int endp)
678 {
679 gimple *stmt = gsi_stmt (*gsi);
680 tree lhs = gimple_call_lhs (stmt);
681 tree len = gimple_call_arg (stmt, 2);
682 tree destvar, srcvar;
683 location_t loc = gimple_location (stmt);
684
685 bool nowarn = gimple_no_warning_p (stmt);
686
687 /* If the LEN parameter is a constant zero or in range where
688 the only valid value is zero, return DEST. */
689 if (size_must_be_zero_p (len))
690 {
691 gimple *repl;
692 if (gimple_call_lhs (stmt))
693 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
694 else
695 repl = gimple_build_nop ();
696 tree vdef = gimple_vdef (stmt);
697 if (vdef && TREE_CODE (vdef) == SSA_NAME)
698 {
699 unlink_stmt_vdef (stmt);
700 release_ssa_name (vdef);
701 }
702 gsi_replace (gsi, repl, false);
703 return true;
704 }
705
706 /* If SRC and DEST are the same (and not volatile), return
707 DEST{,+LEN,+LEN-1}. */
708 if (operand_equal_p (src, dest, 0))
709 {
710 /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
711 It's safe and may even be emitted by GCC itself (see bug
712 32667). */
713 unlink_stmt_vdef (stmt);
714 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
715 release_ssa_name (gimple_vdef (stmt));
716 if (!lhs)
717 {
718 gsi_replace (gsi, gimple_build_nop (), false);
719 return true;
720 }
721 goto done;
722 }
723 else
724 {
725 tree srctype, desttype;
726 unsigned int src_align, dest_align;
727 tree off0;
728
729 /* Inlining of memcpy/memmove may cause bounds lost (if we copy
730 pointers as wide integer) and also may result in huge function
731 size because of inlined bounds copy. Thus don't inline for
732 functions we want to instrument. */
733 if (flag_check_pointer_bounds
734 && chkp_instrumentable_p (cfun->decl)
735 /* Even if data may contain pointers we can inline if copy
736 less than a pointer size. */
737 && (!tree_fits_uhwi_p (len)
738 || compare_tree_int (len, POINTER_SIZE_UNITS) >= 0))
739 return false;
740
741 /* Build accesses at offset zero with a ref-all character type. */
742 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
743 ptr_mode, true), 0);
744
745 /* If we can perform the copy efficiently with first doing all loads
746 and then all stores inline it that way. Currently efficiently
747 means that we can load all the memory into a single integer
748 register which is what MOVE_MAX gives us. */
749 src_align = get_pointer_alignment (src);
750 dest_align = get_pointer_alignment (dest);
751 if (tree_fits_uhwi_p (len)
752 && compare_tree_int (len, MOVE_MAX) <= 0
753 /* ??? Don't transform copies from strings with known length this
754 confuses the tree-ssa-strlen.c. This doesn't handle
755 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
756 reason. */
757 && !c_strlen (src, 2))
758 {
759 unsigned ilen = tree_to_uhwi (len);
760 if (pow2p_hwi (ilen))
761 {
762 /* Detect invalid bounds and overlapping copies and issue
763 either -Warray-bounds or -Wrestrict. */
764 if (!nowarn
765 && check_bounds_or_overlap (as_a <gcall *>(stmt),
766 dest, src, len, len))
767 gimple_set_no_warning (stmt, true);
768
769 scalar_int_mode mode;
770 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
771 if (type
772 && is_a <scalar_int_mode> (TYPE_MODE (type), &mode)
773 && GET_MODE_SIZE (mode) * BITS_PER_UNIT == ilen * 8
774 /* If the destination pointer is not aligned we must be able
775 to emit an unaligned store. */
776 && (dest_align >= GET_MODE_ALIGNMENT (mode)
777 || !targetm.slow_unaligned_access (mode, dest_align)
778 || (optab_handler (movmisalign_optab, mode)
779 != CODE_FOR_nothing)))
780 {
781 tree srctype = type;
782 tree desttype = type;
783 if (src_align < GET_MODE_ALIGNMENT (mode))
784 srctype = build_aligned_type (type, src_align);
785 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
786 tree tem = fold_const_aggregate_ref (srcmem);
787 if (tem)
788 srcmem = tem;
789 else if (src_align < GET_MODE_ALIGNMENT (mode)
790 && targetm.slow_unaligned_access (mode, src_align)
791 && (optab_handler (movmisalign_optab, mode)
792 == CODE_FOR_nothing))
793 srcmem = NULL_TREE;
794 if (srcmem)
795 {
796 gimple *new_stmt;
797 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
798 {
799 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
800 srcmem
801 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
802 new_stmt);
803 gimple_assign_set_lhs (new_stmt, srcmem);
804 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
805 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
806 }
807 if (dest_align < GET_MODE_ALIGNMENT (mode))
808 desttype = build_aligned_type (type, dest_align);
809 new_stmt
810 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
811 dest, off0),
812 srcmem);
813 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
814 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
815 if (gimple_vdef (new_stmt)
816 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
817 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
818 if (!lhs)
819 {
820 gsi_replace (gsi, new_stmt, false);
821 return true;
822 }
823 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
824 goto done;
825 }
826 }
827 }
828 }
829
830 if (endp == 3)
831 {
832 /* Both DEST and SRC must be pointer types.
833 ??? This is what old code did. Is the testing for pointer types
834 really mandatory?
835
836 If either SRC is readonly or length is 1, we can use memcpy. */
837 if (!dest_align || !src_align)
838 return false;
839 if (readonly_data_expr (src)
840 || (tree_fits_uhwi_p (len)
841 && (MIN (src_align, dest_align) / BITS_PER_UNIT
842 >= tree_to_uhwi (len))))
843 {
844 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
845 if (!fn)
846 return false;
847 gimple_call_set_fndecl (stmt, fn);
848 gimple_call_set_arg (stmt, 0, dest);
849 gimple_call_set_arg (stmt, 1, src);
850 fold_stmt (gsi);
851 return true;
852 }
853
854 /* If *src and *dest can't overlap, optimize into memcpy as well. */
855 if (TREE_CODE (src) == ADDR_EXPR
856 && TREE_CODE (dest) == ADDR_EXPR)
857 {
858 tree src_base, dest_base, fn;
859 poly_int64 src_offset = 0, dest_offset = 0;
860 poly_uint64 maxsize;
861
862 srcvar = TREE_OPERAND (src, 0);
863 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
864 if (src_base == NULL)
865 src_base = srcvar;
866 destvar = TREE_OPERAND (dest, 0);
867 dest_base = get_addr_base_and_unit_offset (destvar,
868 &dest_offset);
869 if (dest_base == NULL)
870 dest_base = destvar;
871 if (!poly_int_tree_p (len, &maxsize))
872 maxsize = -1;
873 if (SSA_VAR_P (src_base)
874 && SSA_VAR_P (dest_base))
875 {
876 if (operand_equal_p (src_base, dest_base, 0)
877 && ranges_maybe_overlap_p (src_offset, maxsize,
878 dest_offset, maxsize))
879 return false;
880 }
881 else if (TREE_CODE (src_base) == MEM_REF
882 && TREE_CODE (dest_base) == MEM_REF)
883 {
884 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
885 TREE_OPERAND (dest_base, 0), 0))
886 return false;
887 poly_offset_int full_src_offset
888 = mem_ref_offset (src_base) + src_offset;
889 poly_offset_int full_dest_offset
890 = mem_ref_offset (dest_base) + dest_offset;
891 if (ranges_maybe_overlap_p (full_src_offset, maxsize,
892 full_dest_offset, maxsize))
893 return false;
894 }
895 else
896 return false;
897
898 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
899 if (!fn)
900 return false;
901 gimple_call_set_fndecl (stmt, fn);
902 gimple_call_set_arg (stmt, 0, dest);
903 gimple_call_set_arg (stmt, 1, src);
904 fold_stmt (gsi);
905 return true;
906 }
907
908 /* If the destination and source do not alias optimize into
909 memcpy as well. */
910 if ((is_gimple_min_invariant (dest)
911 || TREE_CODE (dest) == SSA_NAME)
912 && (is_gimple_min_invariant (src)
913 || TREE_CODE (src) == SSA_NAME))
914 {
915 ao_ref destr, srcr;
916 ao_ref_init_from_ptr_and_size (&destr, dest, len);
917 ao_ref_init_from_ptr_and_size (&srcr, src, len);
918 if (!refs_may_alias_p_1 (&destr, &srcr, false))
919 {
920 tree fn;
921 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
922 if (!fn)
923 return false;
924 gimple_call_set_fndecl (stmt, fn);
925 gimple_call_set_arg (stmt, 0, dest);
926 gimple_call_set_arg (stmt, 1, src);
927 fold_stmt (gsi);
928 return true;
929 }
930 }
931
932 return false;
933 }
934
935 if (!tree_fits_shwi_p (len))
936 return false;
937 if (!POINTER_TYPE_P (TREE_TYPE (src))
938 || !POINTER_TYPE_P (TREE_TYPE (dest)))
939 return false;
940 /* In the following try to find a type that is most natural to be
941 used for the memcpy source and destination and that allows
942 the most optimization when memcpy is turned into a plain assignment
943 using that type. In theory we could always use a char[len] type
944 but that only gains us that the destination and source possibly
945 no longer will have their address taken. */
946 srctype = TREE_TYPE (TREE_TYPE (src));
947 if (TREE_CODE (srctype) == ARRAY_TYPE
948 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
949 srctype = TREE_TYPE (srctype);
950 desttype = TREE_TYPE (TREE_TYPE (dest));
951 if (TREE_CODE (desttype) == ARRAY_TYPE
952 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
953 desttype = TREE_TYPE (desttype);
954 if (TREE_ADDRESSABLE (srctype)
955 || TREE_ADDRESSABLE (desttype))
956 return false;
957
958 /* Make sure we are not copying using a floating-point mode or
959 a type whose size possibly does not match its precision. */
960 if (FLOAT_MODE_P (TYPE_MODE (desttype))
961 || TREE_CODE (desttype) == BOOLEAN_TYPE
962 || TREE_CODE (desttype) == ENUMERAL_TYPE)
963 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
964 if (FLOAT_MODE_P (TYPE_MODE (srctype))
965 || TREE_CODE (srctype) == BOOLEAN_TYPE
966 || TREE_CODE (srctype) == ENUMERAL_TYPE)
967 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
968 if (!srctype)
969 srctype = desttype;
970 if (!desttype)
971 desttype = srctype;
972 if (!srctype)
973 return false;
974
975 src_align = get_pointer_alignment (src);
976 dest_align = get_pointer_alignment (dest);
977 if (dest_align < TYPE_ALIGN (desttype)
978 || src_align < TYPE_ALIGN (srctype))
979 return false;
980
981 destvar = NULL_TREE;
982 if (TREE_CODE (dest) == ADDR_EXPR
983 && var_decl_component_p (TREE_OPERAND (dest, 0))
984 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
985 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
986
987 srcvar = NULL_TREE;
988 if (TREE_CODE (src) == ADDR_EXPR
989 && var_decl_component_p (TREE_OPERAND (src, 0))
990 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
991 {
992 if (!destvar
993 || src_align >= TYPE_ALIGN (desttype))
994 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
995 src, off0);
996 else if (!STRICT_ALIGNMENT)
997 {
998 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
999 src_align);
1000 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1001 }
1002 }
1003
1004 if (srcvar == NULL_TREE && destvar == NULL_TREE)
1005 return false;
1006
1007 if (srcvar == NULL_TREE)
1008 {
1009 if (src_align >= TYPE_ALIGN (desttype))
1010 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1011 else
1012 {
1013 if (STRICT_ALIGNMENT)
1014 return false;
1015 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1016 src_align);
1017 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1018 }
1019 }
1020 else if (destvar == NULL_TREE)
1021 {
1022 if (dest_align >= TYPE_ALIGN (srctype))
1023 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1024 else
1025 {
1026 if (STRICT_ALIGNMENT)
1027 return false;
1028 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1029 dest_align);
1030 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1031 }
1032 }
1033
1034 /* Detect invalid bounds and overlapping copies and issue either
1035 -Warray-bounds or -Wrestrict. */
1036 if (!nowarn)
1037 check_bounds_or_overlap (as_a <gcall *>(stmt), dest, src, len, len);
1038
1039 gimple *new_stmt;
1040 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1041 {
1042 tree tem = fold_const_aggregate_ref (srcvar);
1043 if (tem)
1044 srcvar = tem;
1045 if (! is_gimple_min_invariant (srcvar))
1046 {
1047 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1048 srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1049 new_stmt);
1050 gimple_assign_set_lhs (new_stmt, srcvar);
1051 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1052 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1053 }
1054 new_stmt = gimple_build_assign (destvar, srcvar);
1055 goto set_vop_and_replace;
1056 }
1057
1058 /* We get an aggregate copy. Use an unsigned char[] type to
1059 perform the copying to preserve padding and to avoid any issues
1060 with TREE_ADDRESSABLE types or float modes behavior on copying. */
1061 desttype = build_array_type_nelts (unsigned_char_type_node,
1062 tree_to_uhwi (len));
1063 srctype = desttype;
1064 if (src_align > TYPE_ALIGN (srctype))
1065 srctype = build_aligned_type (srctype, src_align);
1066 if (dest_align > TYPE_ALIGN (desttype))
1067 desttype = build_aligned_type (desttype, dest_align);
1068 new_stmt
1069 = gimple_build_assign (fold_build2 (MEM_REF, desttype, dest, off0),
1070 fold_build2 (MEM_REF, srctype, src, off0));
1071 set_vop_and_replace:
1072 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1073 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1074 if (gimple_vdef (new_stmt)
1075 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1076 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1077 if (!lhs)
1078 {
1079 gsi_replace (gsi, new_stmt, false);
1080 return true;
1081 }
1082 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1083 }
1084
1085 done:
1086 gimple_seq stmts = NULL;
1087 if (endp == 0 || endp == 3)
1088 len = NULL_TREE;
1089 else if (endp == 2)
1090 len = gimple_build (&stmts, loc, MINUS_EXPR, TREE_TYPE (len), len,
1091 ssize_int (1));
1092 if (endp == 2 || endp == 1)
1093 {
1094 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1095 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1096 TREE_TYPE (dest), dest, len);
1097 }
1098
1099 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1100 gimple *repl = gimple_build_assign (lhs, dest);
1101 gsi_replace (gsi, repl, false);
1102 return true;
1103 }
1104
1105 /* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1106 to built-in memcmp (a, b, len). */
1107
1108 static bool
gimple_fold_builtin_bcmp(gimple_stmt_iterator * gsi)1109 gimple_fold_builtin_bcmp (gimple_stmt_iterator *gsi)
1110 {
1111 tree fn = builtin_decl_implicit (BUILT_IN_MEMCMP);
1112
1113 if (!fn)
1114 return false;
1115
1116 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1117
1118 gimple *stmt = gsi_stmt (*gsi);
1119 tree a = gimple_call_arg (stmt, 0);
1120 tree b = gimple_call_arg (stmt, 1);
1121 tree len = gimple_call_arg (stmt, 2);
1122
1123 gimple *repl = gimple_build_call (fn, 3, a, b, len);
1124 replace_call_with_call_and_fold (gsi, repl);
1125
1126 return true;
1127 }
1128
1129 /* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1130 to built-in memmove (dest, src, len). */
1131
1132 static bool
gimple_fold_builtin_bcopy(gimple_stmt_iterator * gsi)1133 gimple_fold_builtin_bcopy (gimple_stmt_iterator *gsi)
1134 {
1135 tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE);
1136
1137 if (!fn)
1138 return false;
1139
1140 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1141 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1142 len) into memmove (dest, src, len). */
1143
1144 gimple *stmt = gsi_stmt (*gsi);
1145 tree src = gimple_call_arg (stmt, 0);
1146 tree dest = gimple_call_arg (stmt, 1);
1147 tree len = gimple_call_arg (stmt, 2);
1148
1149 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1150 gimple_call_set_fntype (as_a <gcall *> (stmt), TREE_TYPE (fn));
1151 replace_call_with_call_and_fold (gsi, repl);
1152
1153 return true;
1154 }
1155
1156 /* Transform a call to built-in bzero (dest, len) at *GSI into one
1157 to built-in memset (dest, 0, len). */
1158
1159 static bool
gimple_fold_builtin_bzero(gimple_stmt_iterator * gsi)1160 gimple_fold_builtin_bzero (gimple_stmt_iterator *gsi)
1161 {
1162 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
1163
1164 if (!fn)
1165 return false;
1166
1167 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1168
1169 gimple *stmt = gsi_stmt (*gsi);
1170 tree dest = gimple_call_arg (stmt, 0);
1171 tree len = gimple_call_arg (stmt, 1);
1172
1173 gimple_seq seq = NULL;
1174 gimple *repl = gimple_build_call (fn, 3, dest, integer_zero_node, len);
1175 gimple_seq_add_stmt_without_update (&seq, repl);
1176 gsi_replace_with_seq_vops (gsi, seq);
1177 fold_stmt (gsi);
1178
1179 return true;
1180 }
1181
1182 /* Fold function call to builtin memset or bzero at *GSI setting the
1183 memory of size LEN to VAL. Return whether a simplification was made. */
1184
1185 static bool
gimple_fold_builtin_memset(gimple_stmt_iterator * gsi,tree c,tree len)1186 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1187 {
1188 gimple *stmt = gsi_stmt (*gsi);
1189 tree etype;
1190 unsigned HOST_WIDE_INT length, cval;
1191
1192 /* If the LEN parameter is zero, return DEST. */
1193 if (integer_zerop (len))
1194 {
1195 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1196 return true;
1197 }
1198
1199 if (! tree_fits_uhwi_p (len))
1200 return false;
1201
1202 if (TREE_CODE (c) != INTEGER_CST)
1203 return false;
1204
1205 tree dest = gimple_call_arg (stmt, 0);
1206 tree var = dest;
1207 if (TREE_CODE (var) != ADDR_EXPR)
1208 return false;
1209
1210 var = TREE_OPERAND (var, 0);
1211 if (TREE_THIS_VOLATILE (var))
1212 return false;
1213
1214 etype = TREE_TYPE (var);
1215 if (TREE_CODE (etype) == ARRAY_TYPE)
1216 etype = TREE_TYPE (etype);
1217
1218 if (!INTEGRAL_TYPE_P (etype)
1219 && !POINTER_TYPE_P (etype))
1220 return NULL_TREE;
1221
1222 if (! var_decl_component_p (var))
1223 return NULL_TREE;
1224
1225 length = tree_to_uhwi (len);
1226 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype)) != length
1227 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1228 return NULL_TREE;
1229
1230 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1231 return NULL_TREE;
1232
1233 if (integer_zerop (c))
1234 cval = 0;
1235 else
1236 {
1237 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1238 return NULL_TREE;
1239
1240 cval = TREE_INT_CST_LOW (c);
1241 cval &= 0xff;
1242 cval |= cval << 8;
1243 cval |= cval << 16;
1244 cval |= (cval << 31) << 1;
1245 }
1246
1247 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1248 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1249 gimple_set_vuse (store, gimple_vuse (stmt));
1250 tree vdef = gimple_vdef (stmt);
1251 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1252 {
1253 gimple_set_vdef (store, gimple_vdef (stmt));
1254 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1255 }
1256 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1257 if (gimple_call_lhs (stmt))
1258 {
1259 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1260 gsi_replace (gsi, asgn, false);
1261 }
1262 else
1263 {
1264 gimple_stmt_iterator gsi2 = *gsi;
1265 gsi_prev (gsi);
1266 gsi_remove (&gsi2, true);
1267 }
1268
1269 return true;
1270 }
1271
1272
1273 /* Obtain the minimum and maximum string length or minimum and maximum
1274 value of ARG in LENGTH[0] and LENGTH[1], respectively.
1275 If ARG is an SSA name variable, follow its use-def chains. When
1276 TYPE == 0, if LENGTH[1] is not equal to the length we determine or
1277 if we are unable to determine the length or value, return false.
1278 VISITED is a bitmap of visited variables.
1279 TYPE is 0 if string length should be obtained, 1 for maximum string
1280 length and 2 for maximum value ARG can have.
1281 When FUZZY is non-zero and the length of a string cannot be determined,
1282 the function instead considers as the maximum possible length the
1283 size of a character array it may refer to. If FUZZY is 2, it will handle
1284 PHIs and COND_EXPRs optimistically, if we can determine string length
1285 minimum and maximum, it will use the minimum from the ones where it
1286 can be determined.
1287 Set *FLEXP to true if the range of the string lengths has been
1288 obtained from the upper bound of an array at the end of a struct.
1289 Such an array may hold a string that's longer than its upper bound
1290 due to it being used as a poor-man's flexible array member. */
1291
1292 static bool
get_range_strlen(tree arg,tree length[2],bitmap * visited,int type,int fuzzy,bool * flexp)1293 get_range_strlen (tree arg, tree length[2], bitmap *visited, int type,
1294 int fuzzy, bool *flexp)
1295 {
1296 tree var, val = NULL_TREE;
1297 gimple *def_stmt;
1298
1299 /* The minimum and maximum length. */
1300 tree *const minlen = length;
1301 tree *const maxlen = length + 1;
1302
1303 if (TREE_CODE (arg) != SSA_NAME)
1304 {
1305 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1306 if (TREE_CODE (arg) == ADDR_EXPR
1307 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
1308 {
1309 tree op = TREE_OPERAND (arg, 0);
1310 if (integer_zerop (TREE_OPERAND (op, 1)))
1311 {
1312 tree aop0 = TREE_OPERAND (op, 0);
1313 if (TREE_CODE (aop0) == INDIRECT_REF
1314 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1315 return get_range_strlen (TREE_OPERAND (aop0, 0),
1316 length, visited, type, fuzzy, flexp);
1317 }
1318 else if (TREE_CODE (TREE_OPERAND (op, 0)) == COMPONENT_REF && fuzzy)
1319 {
1320 /* Fail if an array is the last member of a struct object
1321 since it could be treated as a (fake) flexible array
1322 member. */
1323 tree idx = TREE_OPERAND (op, 1);
1324
1325 arg = TREE_OPERAND (op, 0);
1326 tree optype = TREE_TYPE (arg);
1327 if (tree dom = TYPE_DOMAIN (optype))
1328 if (tree bound = TYPE_MAX_VALUE (dom))
1329 if (TREE_CODE (bound) == INTEGER_CST
1330 && TREE_CODE (idx) == INTEGER_CST
1331 && tree_int_cst_lt (bound, idx))
1332 return false;
1333 }
1334 }
1335
1336 if (type == 2)
1337 {
1338 val = arg;
1339 if (TREE_CODE (val) != INTEGER_CST
1340 || tree_int_cst_sgn (val) < 0)
1341 return false;
1342 }
1343 else
1344 val = c_strlen (arg, 1);
1345
1346 if (!val && fuzzy)
1347 {
1348 if (TREE_CODE (arg) == ADDR_EXPR)
1349 return get_range_strlen (TREE_OPERAND (arg, 0), length,
1350 visited, type, fuzzy, flexp);
1351
1352 if (TREE_CODE (arg) == ARRAY_REF)
1353 {
1354 tree type = TREE_TYPE (TREE_OPERAND (arg, 0));
1355
1356 /* Determine the "innermost" array type. */
1357 while (TREE_CODE (type) == ARRAY_TYPE
1358 && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
1359 type = TREE_TYPE (type);
1360
1361 /* Avoid arrays of pointers. */
1362 tree eltype = TREE_TYPE (type);
1363 if (TREE_CODE (type) != ARRAY_TYPE
1364 || !INTEGRAL_TYPE_P (eltype))
1365 return false;
1366
1367 val = TYPE_SIZE_UNIT (type);
1368 if (!val || integer_zerop (val))
1369 return false;
1370
1371 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1372 integer_one_node);
1373 /* Set the minimum size to zero since the string in
1374 the array could have zero length. */
1375 *minlen = ssize_int (0);
1376
1377 if (TREE_CODE (TREE_OPERAND (arg, 0)) == COMPONENT_REF
1378 && type == TREE_TYPE (TREE_OPERAND (arg, 0))
1379 && array_at_struct_end_p (TREE_OPERAND (arg, 0)))
1380 *flexp = true;
1381 }
1382 else if (TREE_CODE (arg) == COMPONENT_REF
1383 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1)))
1384 == ARRAY_TYPE))
1385 {
1386 /* Use the type of the member array to determine the upper
1387 bound on the length of the array. This may be overly
1388 optimistic if the array itself isn't NUL-terminated and
1389 the caller relies on the subsequent member to contain
1390 the NUL but that would only be considered valid if
1391 the array were the last member of a struct.
1392 Set *FLEXP to true if the array whose bound is being
1393 used is at the end of a struct. */
1394 if (array_at_struct_end_p (arg))
1395 *flexp = true;
1396
1397 arg = TREE_OPERAND (arg, 1);
1398
1399 tree type = TREE_TYPE (arg);
1400
1401 while (TREE_CODE (type) == ARRAY_TYPE
1402 && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
1403 type = TREE_TYPE (type);
1404
1405 /* Fail when the array bound is unknown or zero. */
1406 val = TYPE_SIZE_UNIT (type);
1407 if (!val || integer_zerop (val))
1408 return false;
1409 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1410 integer_one_node);
1411 /* Set the minimum size to zero since the string in
1412 the array could have zero length. */
1413 *minlen = ssize_int (0);
1414 }
1415
1416 if (VAR_P (arg))
1417 {
1418 tree type = TREE_TYPE (arg);
1419 if (POINTER_TYPE_P (type))
1420 type = TREE_TYPE (type);
1421
1422 if (TREE_CODE (type) == ARRAY_TYPE)
1423 {
1424 val = TYPE_SIZE_UNIT (type);
1425 if (!val
1426 || TREE_CODE (val) != INTEGER_CST
1427 || integer_zerop (val))
1428 return false;
1429 val = wide_int_to_tree (TREE_TYPE (val),
1430 wi::sub (wi::to_wide (val), 1));
1431 /* Set the minimum size to zero since the string in
1432 the array could have zero length. */
1433 *minlen = ssize_int (0);
1434 }
1435 }
1436 }
1437
1438 if (!val)
1439 return false;
1440
1441 if (!*minlen
1442 || (type > 0
1443 && TREE_CODE (*minlen) == INTEGER_CST
1444 && TREE_CODE (val) == INTEGER_CST
1445 && tree_int_cst_lt (val, *minlen)))
1446 *minlen = val;
1447
1448 if (*maxlen)
1449 {
1450 if (type > 0)
1451 {
1452 if (TREE_CODE (*maxlen) != INTEGER_CST
1453 || TREE_CODE (val) != INTEGER_CST)
1454 return false;
1455
1456 if (tree_int_cst_lt (*maxlen, val))
1457 *maxlen = val;
1458 return true;
1459 }
1460 else if (simple_cst_equal (val, *maxlen) != 1)
1461 return false;
1462 }
1463
1464 *maxlen = val;
1465 return true;
1466 }
1467
1468 /* If ARG is registered for SSA update we cannot look at its defining
1469 statement. */
1470 if (name_registered_for_update_p (arg))
1471 return false;
1472
1473 /* If we were already here, break the infinite cycle. */
1474 if (!*visited)
1475 *visited = BITMAP_ALLOC (NULL);
1476 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1477 return true;
1478
1479 var = arg;
1480 def_stmt = SSA_NAME_DEF_STMT (var);
1481
1482 switch (gimple_code (def_stmt))
1483 {
1484 case GIMPLE_ASSIGN:
1485 /* The RHS of the statement defining VAR must either have a
1486 constant length or come from another SSA_NAME with a constant
1487 length. */
1488 if (gimple_assign_single_p (def_stmt)
1489 || gimple_assign_unary_nop_p (def_stmt))
1490 {
1491 tree rhs = gimple_assign_rhs1 (def_stmt);
1492 return get_range_strlen (rhs, length, visited, type, fuzzy, flexp);
1493 }
1494 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1495 {
1496 tree ops[2] = { gimple_assign_rhs2 (def_stmt),
1497 gimple_assign_rhs3 (def_stmt) };
1498
1499 for (unsigned int i = 0; i < 2; i++)
1500 if (!get_range_strlen (ops[i], length, visited, type, fuzzy,
1501 flexp))
1502 {
1503 if (fuzzy == 2)
1504 *maxlen = build_all_ones_cst (size_type_node);
1505 else
1506 return false;
1507 }
1508 return true;
1509 }
1510 return false;
1511
1512 case GIMPLE_PHI:
1513 /* All the arguments of the PHI node must have the same constant
1514 length. */
1515 for (unsigned i = 0; i < gimple_phi_num_args (def_stmt); i++)
1516 {
1517 tree arg = gimple_phi_arg (def_stmt, i)->def;
1518
1519 /* If this PHI has itself as an argument, we cannot
1520 determine the string length of this argument. However,
1521 if we can find a constant string length for the other
1522 PHI args then we can still be sure that this is a
1523 constant string length. So be optimistic and just
1524 continue with the next argument. */
1525 if (arg == gimple_phi_result (def_stmt))
1526 continue;
1527
1528 if (!get_range_strlen (arg, length, visited, type, fuzzy, flexp))
1529 {
1530 if (fuzzy == 2)
1531 *maxlen = build_all_ones_cst (size_type_node);
1532 else
1533 return false;
1534 }
1535 }
1536 return true;
1537
1538 default:
1539 return false;
1540 }
1541 }
1542
1543 /* Determine the minimum and maximum value or string length that ARG
1544 refers to and store each in the first two elements of MINMAXLEN.
1545 For expressions that point to strings of unknown lengths that are
1546 character arrays, use the upper bound of the array as the maximum
1547 length. For example, given an expression like 'x ? array : "xyz"'
1548 and array declared as 'char array[8]', MINMAXLEN[0] will be set
1549 to 0 and MINMAXLEN[1] to 7, the longest string that could be
1550 stored in array.
1551 Return true if the range of the string lengths has been obtained
1552 from the upper bound of an array at the end of a struct. Such
1553 an array may hold a string that's longer than its upper bound
1554 due to it being used as a poor-man's flexible array member.
1555
1556 STRICT is true if it will handle PHIs and COND_EXPRs conservatively
1557 and false if PHIs and COND_EXPRs are to be handled optimistically,
1558 if we can determine string length minimum and maximum; it will use
1559 the minimum from the ones where it can be determined.
1560 STRICT false should be only used for warning code. */
1561
1562 bool
get_range_strlen(tree arg,tree minmaxlen[2],bool strict)1563 get_range_strlen (tree arg, tree minmaxlen[2], bool strict)
1564 {
1565 bitmap visited = NULL;
1566
1567 minmaxlen[0] = NULL_TREE;
1568 minmaxlen[1] = NULL_TREE;
1569
1570 bool flexarray = false;
1571 if (!get_range_strlen (arg, minmaxlen, &visited, 1, strict ? 1 : 2,
1572 &flexarray))
1573 {
1574 minmaxlen[0] = NULL_TREE;
1575 minmaxlen[1] = NULL_TREE;
1576 }
1577
1578 if (visited)
1579 BITMAP_FREE (visited);
1580
1581 return flexarray;
1582 }
1583
1584 tree
get_maxval_strlen(tree arg,int type)1585 get_maxval_strlen (tree arg, int type)
1586 {
1587 bitmap visited = NULL;
1588 tree len[2] = { NULL_TREE, NULL_TREE };
1589
1590 bool dummy;
1591 if (!get_range_strlen (arg, len, &visited, type, 0, &dummy))
1592 len[1] = NULL_TREE;
1593 if (visited)
1594 BITMAP_FREE (visited);
1595
1596 return len[1];
1597 }
1598
1599
1600 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1601 If LEN is not NULL, it represents the length of the string to be
1602 copied. Return NULL_TREE if no simplification can be made. */
1603
1604 static bool
gimple_fold_builtin_strcpy(gimple_stmt_iterator * gsi,tree dest,tree src)1605 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1606 tree dest, tree src)
1607 {
1608 gimple *stmt = gsi_stmt (*gsi);
1609 location_t loc = gimple_location (stmt);
1610 tree fn;
1611
1612 /* If SRC and DEST are the same (and not volatile), return DEST. */
1613 if (operand_equal_p (src, dest, 0))
1614 {
1615 /* Issue -Wrestrict unless the pointers are null (those do
1616 not point to objects and so do not indicate an overlap;
1617 such calls could be the result of sanitization and jump
1618 threading). */
1619 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
1620 {
1621 tree func = gimple_call_fndecl (stmt);
1622
1623 warning_at (loc, OPT_Wrestrict,
1624 "%qD source argument is the same as destination",
1625 func);
1626 }
1627
1628 replace_call_with_value (gsi, dest);
1629 return true;
1630 }
1631
1632 if (optimize_function_for_size_p (cfun))
1633 return false;
1634
1635 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1636 if (!fn)
1637 return false;
1638
1639 tree len = get_maxval_strlen (src, 0);
1640 if (!len)
1641 return false;
1642
1643 len = fold_convert_loc (loc, size_type_node, len);
1644 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1645 len = force_gimple_operand_gsi (gsi, len, true,
1646 NULL_TREE, true, GSI_SAME_STMT);
1647 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1648 replace_call_with_call_and_fold (gsi, repl);
1649 return true;
1650 }
1651
1652 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1653 If SLEN is not NULL, it represents the length of the source string.
1654 Return NULL_TREE if no simplification can be made. */
1655
1656 static bool
gimple_fold_builtin_strncpy(gimple_stmt_iterator * gsi,tree dest,tree src,tree len)1657 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1658 tree dest, tree src, tree len)
1659 {
1660 gimple *stmt = gsi_stmt (*gsi);
1661 location_t loc = gimple_location (stmt);
1662 bool nonstring = get_attr_nonstring_decl (dest) != NULL_TREE;
1663
1664 /* If the LEN parameter is zero, return DEST. */
1665 if (integer_zerop (len))
1666 {
1667 /* Avoid warning if the destination refers to a an array/pointer
1668 decorate with attribute nonstring. */
1669 if (!nonstring)
1670 {
1671 tree fndecl = gimple_call_fndecl (stmt);
1672 gcall *call = as_a <gcall *> (stmt);
1673
1674 /* Warn about the lack of nul termination: the result is not
1675 a (nul-terminated) string. */
1676 tree slen = get_maxval_strlen (src, 0);
1677 if (slen && !integer_zerop (slen))
1678 warning_at (loc, OPT_Wstringop_truncation,
1679 "%G%qD destination unchanged after copying no bytes "
1680 "from a string of length %E",
1681 call, fndecl, slen);
1682 else
1683 warning_at (loc, OPT_Wstringop_truncation,
1684 "%G%qD destination unchanged after copying no bytes",
1685 call, fndecl);
1686 }
1687
1688 replace_call_with_value (gsi, dest);
1689 return true;
1690 }
1691
1692 /* We can't compare slen with len as constants below if len is not a
1693 constant. */
1694 if (TREE_CODE (len) != INTEGER_CST)
1695 return false;
1696
1697 /* Now, we must be passed a constant src ptr parameter. */
1698 tree slen = get_maxval_strlen (src, 0);
1699 if (!slen || TREE_CODE (slen) != INTEGER_CST)
1700 return false;
1701
1702 /* The size of the source string including the terminating nul. */
1703 tree ssize = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1704
1705 /* We do not support simplification of this case, though we do
1706 support it when expanding trees into RTL. */
1707 /* FIXME: generate a call to __builtin_memset. */
1708 if (tree_int_cst_lt (ssize, len))
1709 return false;
1710
1711 /* Diagnose truncation that leaves the copy unterminated. */
1712 maybe_diag_stxncpy_trunc (*gsi, src, len);
1713
1714 /* OK transform into builtin memcpy. */
1715 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1716 if (!fn)
1717 return false;
1718
1719 len = fold_convert_loc (loc, size_type_node, len);
1720 len = force_gimple_operand_gsi (gsi, len, true,
1721 NULL_TREE, true, GSI_SAME_STMT);
1722 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1723 replace_call_with_call_and_fold (gsi, repl);
1724
1725 return true;
1726 }
1727
1728 /* Fold function call to builtin strchr or strrchr.
1729 If both arguments are constant, evaluate and fold the result,
1730 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
1731 In general strlen is significantly faster than strchr
1732 due to being a simpler operation. */
1733 static bool
gimple_fold_builtin_strchr(gimple_stmt_iterator * gsi,bool is_strrchr)1734 gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
1735 {
1736 gimple *stmt = gsi_stmt (*gsi);
1737 tree str = gimple_call_arg (stmt, 0);
1738 tree c = gimple_call_arg (stmt, 1);
1739 location_t loc = gimple_location (stmt);
1740 const char *p;
1741 char ch;
1742
1743 if (!gimple_call_lhs (stmt))
1744 return false;
1745
1746 if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
1747 {
1748 const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
1749
1750 if (p1 == NULL)
1751 {
1752 replace_call_with_value (gsi, integer_zero_node);
1753 return true;
1754 }
1755
1756 tree len = build_int_cst (size_type_node, p1 - p);
1757 gimple_seq stmts = NULL;
1758 gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1759 POINTER_PLUS_EXPR, str, len);
1760 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1761 gsi_replace_with_seq_vops (gsi, stmts);
1762 return true;
1763 }
1764
1765 if (!integer_zerop (c))
1766 return false;
1767
1768 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
1769 if (is_strrchr && optimize_function_for_size_p (cfun))
1770 {
1771 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1772
1773 if (strchr_fn)
1774 {
1775 gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
1776 replace_call_with_call_and_fold (gsi, repl);
1777 return true;
1778 }
1779
1780 return false;
1781 }
1782
1783 tree len;
1784 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1785
1786 if (!strlen_fn)
1787 return false;
1788
1789 /* Create newstr = strlen (str). */
1790 gimple_seq stmts = NULL;
1791 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
1792 gimple_set_location (new_stmt, loc);
1793 len = create_tmp_reg_or_ssa_name (size_type_node);
1794 gimple_call_set_lhs (new_stmt, len);
1795 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1796
1797 /* Create (str p+ strlen (str)). */
1798 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1799 POINTER_PLUS_EXPR, str, len);
1800 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1801 gsi_replace_with_seq_vops (gsi, stmts);
1802 /* gsi now points at the assignment to the lhs, get a
1803 stmt iterator to the strlen.
1804 ??? We can't use gsi_for_stmt as that doesn't work when the
1805 CFG isn't built yet. */
1806 gimple_stmt_iterator gsi2 = *gsi;
1807 gsi_prev (&gsi2);
1808 fold_stmt (&gsi2);
1809 return true;
1810 }
1811
1812 /* Fold function call to builtin strstr.
1813 If both arguments are constant, evaluate and fold the result,
1814 additionally fold strstr (x, "") into x and strstr (x, "c")
1815 into strchr (x, 'c'). */
1816 static bool
gimple_fold_builtin_strstr(gimple_stmt_iterator * gsi)1817 gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
1818 {
1819 gimple *stmt = gsi_stmt (*gsi);
1820 tree haystack = gimple_call_arg (stmt, 0);
1821 tree needle = gimple_call_arg (stmt, 1);
1822 const char *p, *q;
1823
1824 if (!gimple_call_lhs (stmt))
1825 return false;
1826
1827 q = c_getstr (needle);
1828 if (q == NULL)
1829 return false;
1830
1831 if ((p = c_getstr (haystack)))
1832 {
1833 const char *r = strstr (p, q);
1834
1835 if (r == NULL)
1836 {
1837 replace_call_with_value (gsi, integer_zero_node);
1838 return true;
1839 }
1840
1841 tree len = build_int_cst (size_type_node, r - p);
1842 gimple_seq stmts = NULL;
1843 gimple *new_stmt
1844 = gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
1845 haystack, len);
1846 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1847 gsi_replace_with_seq_vops (gsi, stmts);
1848 return true;
1849 }
1850
1851 /* For strstr (x, "") return x. */
1852 if (q[0] == '\0')
1853 {
1854 replace_call_with_value (gsi, haystack);
1855 return true;
1856 }
1857
1858 /* Transform strstr (x, "c") into strchr (x, 'c'). */
1859 if (q[1] == '\0')
1860 {
1861 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1862 if (strchr_fn)
1863 {
1864 tree c = build_int_cst (integer_type_node, q[0]);
1865 gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
1866 replace_call_with_call_and_fold (gsi, repl);
1867 return true;
1868 }
1869 }
1870
1871 return false;
1872 }
1873
1874 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1875 to the call.
1876
1877 Return NULL_TREE if no simplification was possible, otherwise return the
1878 simplified form of the call as a tree.
1879
1880 The simplified form may be a constant or other expression which
1881 computes the same value, but in a more efficient manner (including
1882 calls to other builtin functions).
1883
1884 The call may contain arguments which need to be evaluated, but
1885 which are not useful to determine the result of the call. In
1886 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1887 COMPOUND_EXPR will be an argument which must be evaluated.
1888 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1889 COMPOUND_EXPR in the chain will contain the tree for the simplified
1890 form of the builtin function call. */
1891
1892 static bool
gimple_fold_builtin_strcat(gimple_stmt_iterator * gsi,tree dst,tree src)1893 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
1894 {
1895 gimple *stmt = gsi_stmt (*gsi);
1896 location_t loc = gimple_location (stmt);
1897
1898 const char *p = c_getstr (src);
1899
1900 /* If the string length is zero, return the dst parameter. */
1901 if (p && *p == '\0')
1902 {
1903 replace_call_with_value (gsi, dst);
1904 return true;
1905 }
1906
1907 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1908 return false;
1909
1910 /* See if we can store by pieces into (dst + strlen(dst)). */
1911 tree newdst;
1912 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1913 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1914
1915 if (!strlen_fn || !memcpy_fn)
1916 return false;
1917
1918 /* If the length of the source string isn't computable don't
1919 split strcat into strlen and memcpy. */
1920 tree len = get_maxval_strlen (src, 0);
1921 if (! len)
1922 return false;
1923
1924 /* Create strlen (dst). */
1925 gimple_seq stmts = NULL, stmts2;
1926 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
1927 gimple_set_location (repl, loc);
1928 newdst = create_tmp_reg_or_ssa_name (size_type_node);
1929 gimple_call_set_lhs (repl, newdst);
1930 gimple_seq_add_stmt_without_update (&stmts, repl);
1931
1932 /* Create (dst p+ strlen (dst)). */
1933 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1934 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1935 gimple_seq_add_seq_without_update (&stmts, stmts2);
1936
1937 len = fold_convert_loc (loc, size_type_node, len);
1938 len = size_binop_loc (loc, PLUS_EXPR, len,
1939 build_int_cst (size_type_node, 1));
1940 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1941 gimple_seq_add_seq_without_update (&stmts, stmts2);
1942
1943 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1944 gimple_seq_add_stmt_without_update (&stmts, repl);
1945 if (gimple_call_lhs (stmt))
1946 {
1947 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1948 gimple_seq_add_stmt_without_update (&stmts, repl);
1949 gsi_replace_with_seq_vops (gsi, stmts);
1950 /* gsi now points at the assignment to the lhs, get a
1951 stmt iterator to the memcpy call.
1952 ??? We can't use gsi_for_stmt as that doesn't work when the
1953 CFG isn't built yet. */
1954 gimple_stmt_iterator gsi2 = *gsi;
1955 gsi_prev (&gsi2);
1956 fold_stmt (&gsi2);
1957 }
1958 else
1959 {
1960 gsi_replace_with_seq_vops (gsi, stmts);
1961 fold_stmt (gsi);
1962 }
1963 return true;
1964 }
1965
1966 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1967 are the arguments to the call. */
1968
1969 static bool
gimple_fold_builtin_strcat_chk(gimple_stmt_iterator * gsi)1970 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
1971 {
1972 gimple *stmt = gsi_stmt (*gsi);
1973 tree dest = gimple_call_arg (stmt, 0);
1974 tree src = gimple_call_arg (stmt, 1);
1975 tree size = gimple_call_arg (stmt, 2);
1976 tree fn;
1977 const char *p;
1978
1979
1980 p = c_getstr (src);
1981 /* If the SRC parameter is "", return DEST. */
1982 if (p && *p == '\0')
1983 {
1984 replace_call_with_value (gsi, dest);
1985 return true;
1986 }
1987
1988 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
1989 return false;
1990
1991 /* If __builtin_strcat_chk is used, assume strcat is available. */
1992 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
1993 if (!fn)
1994 return false;
1995
1996 gimple *repl = gimple_build_call (fn, 2, dest, src);
1997 replace_call_with_call_and_fold (gsi, repl);
1998 return true;
1999 }
2000
2001 /* Simplify a call to the strncat builtin. */
2002
2003 static bool
gimple_fold_builtin_strncat(gimple_stmt_iterator * gsi)2004 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
2005 {
2006 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2007 tree dst = gimple_call_arg (stmt, 0);
2008 tree src = gimple_call_arg (stmt, 1);
2009 tree len = gimple_call_arg (stmt, 2);
2010
2011 const char *p = c_getstr (src);
2012
2013 /* If the requested length is zero, or the src parameter string
2014 length is zero, return the dst parameter. */
2015 if (integer_zerop (len) || (p && *p == '\0'))
2016 {
2017 replace_call_with_value (gsi, dst);
2018 return true;
2019 }
2020
2021 if (TREE_CODE (len) != INTEGER_CST || !p)
2022 return false;
2023
2024 unsigned srclen = strlen (p);
2025
2026 int cmpsrc = compare_tree_int (len, srclen);
2027
2028 /* Return early if the requested len is less than the string length.
2029 Warnings will be issued elsewhere later. */
2030 if (cmpsrc < 0)
2031 return false;
2032
2033 unsigned HOST_WIDE_INT dstsize;
2034
2035 bool nowarn = gimple_no_warning_p (stmt);
2036
2037 if (!nowarn && compute_builtin_object_size (dst, 1, &dstsize))
2038 {
2039 int cmpdst = compare_tree_int (len, dstsize);
2040
2041 if (cmpdst >= 0)
2042 {
2043 tree fndecl = gimple_call_fndecl (stmt);
2044
2045 /* Strncat copies (at most) LEN bytes and always appends
2046 the terminating NUL so the specified bound should never
2047 be equal to (or greater than) the size of the destination.
2048 If it is, the copy could overflow. */
2049 location_t loc = gimple_location (stmt);
2050 nowarn = warning_at (loc, OPT_Wstringop_overflow_,
2051 cmpdst == 0
2052 ? G_("%G%qD specified bound %E equals "
2053 "destination size")
2054 : G_("%G%qD specified bound %E exceeds "
2055 "destination size %wu"),
2056 stmt, fndecl, len, dstsize);
2057 if (nowarn)
2058 gimple_set_no_warning (stmt, true);
2059 }
2060 }
2061
2062 if (!nowarn && cmpsrc == 0)
2063 {
2064 tree fndecl = gimple_call_fndecl (stmt);
2065
2066 /* To avoid certain truncation the specified bound should also
2067 not be equal to (or less than) the length of the source. */
2068 location_t loc = gimple_location (stmt);
2069 if (warning_at (loc, OPT_Wstringop_overflow_,
2070 "%G%qD specified bound %E equals source length",
2071 stmt, fndecl, len))
2072 gimple_set_no_warning (stmt, true);
2073 }
2074
2075 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
2076
2077 /* If the replacement _DECL isn't initialized, don't do the
2078 transformation. */
2079 if (!fn)
2080 return false;
2081
2082 /* Otherwise, emit a call to strcat. */
2083 gcall *repl = gimple_build_call (fn, 2, dst, src);
2084 replace_call_with_call_and_fold (gsi, repl);
2085 return true;
2086 }
2087
2088 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2089 LEN, and SIZE. */
2090
2091 static bool
gimple_fold_builtin_strncat_chk(gimple_stmt_iterator * gsi)2092 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
2093 {
2094 gimple *stmt = gsi_stmt (*gsi);
2095 tree dest = gimple_call_arg (stmt, 0);
2096 tree src = gimple_call_arg (stmt, 1);
2097 tree len = gimple_call_arg (stmt, 2);
2098 tree size = gimple_call_arg (stmt, 3);
2099 tree fn;
2100 const char *p;
2101
2102 p = c_getstr (src);
2103 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2104 if ((p && *p == '\0')
2105 || integer_zerop (len))
2106 {
2107 replace_call_with_value (gsi, dest);
2108 return true;
2109 }
2110
2111 if (! tree_fits_uhwi_p (size))
2112 return false;
2113
2114 if (! integer_all_onesp (size))
2115 {
2116 tree src_len = c_strlen (src, 1);
2117 if (src_len
2118 && tree_fits_uhwi_p (src_len)
2119 && tree_fits_uhwi_p (len)
2120 && ! tree_int_cst_lt (len, src_len))
2121 {
2122 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2123 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
2124 if (!fn)
2125 return false;
2126
2127 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2128 replace_call_with_call_and_fold (gsi, repl);
2129 return true;
2130 }
2131 return false;
2132 }
2133
2134 /* If __builtin_strncat_chk is used, assume strncat is available. */
2135 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
2136 if (!fn)
2137 return false;
2138
2139 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2140 replace_call_with_call_and_fold (gsi, repl);
2141 return true;
2142 }
2143
2144 /* Build and append gimple statements to STMTS that would load a first
2145 character of a memory location identified by STR. LOC is location
2146 of the statement. */
2147
2148 static tree
gimple_load_first_char(location_t loc,tree str,gimple_seq * stmts)2149 gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
2150 {
2151 tree var;
2152
2153 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2154 tree cst_uchar_ptr_node
2155 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
2156 tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
2157
2158 tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
2159 gassign *stmt = gimple_build_assign (NULL_TREE, temp);
2160 var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt);
2161
2162 gimple_assign_set_lhs (stmt, var);
2163 gimple_seq_add_stmt_without_update (stmts, stmt);
2164
2165 return var;
2166 }
2167
2168 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator.
2169 FCODE is the name of the builtin. */
2170
2171 static bool
gimple_fold_builtin_string_compare(gimple_stmt_iterator * gsi)2172 gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
2173 {
2174 gimple *stmt = gsi_stmt (*gsi);
2175 tree callee = gimple_call_fndecl (stmt);
2176 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2177
2178 tree type = integer_type_node;
2179 tree str1 = gimple_call_arg (stmt, 0);
2180 tree str2 = gimple_call_arg (stmt, 1);
2181 tree lhs = gimple_call_lhs (stmt);
2182 HOST_WIDE_INT length = -1;
2183
2184 /* Handle strncmp and strncasecmp functions. */
2185 if (gimple_call_num_args (stmt) == 3)
2186 {
2187 tree len = gimple_call_arg (stmt, 2);
2188 if (tree_fits_uhwi_p (len))
2189 length = tree_to_uhwi (len);
2190 }
2191
2192 /* If the LEN parameter is zero, return zero. */
2193 if (length == 0)
2194 {
2195 replace_call_with_value (gsi, integer_zero_node);
2196 return true;
2197 }
2198
2199 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2200 if (operand_equal_p (str1, str2, 0))
2201 {
2202 replace_call_with_value (gsi, integer_zero_node);
2203 return true;
2204 }
2205
2206 const char *p1 = c_getstr (str1);
2207 const char *p2 = c_getstr (str2);
2208
2209 /* For known strings, return an immediate value. */
2210 if (p1 && p2)
2211 {
2212 int r = 0;
2213 bool known_result = false;
2214
2215 switch (fcode)
2216 {
2217 case BUILT_IN_STRCMP:
2218 {
2219 r = strcmp (p1, p2);
2220 known_result = true;
2221 break;
2222 }
2223 case BUILT_IN_STRNCMP:
2224 {
2225 if (length == -1)
2226 break;
2227 r = strncmp (p1, p2, length);
2228 known_result = true;
2229 break;
2230 }
2231 /* Only handleable situation is where the string are equal (result 0),
2232 which is already handled by operand_equal_p case. */
2233 case BUILT_IN_STRCASECMP:
2234 break;
2235 case BUILT_IN_STRNCASECMP:
2236 {
2237 if (length == -1)
2238 break;
2239 r = strncmp (p1, p2, length);
2240 if (r == 0)
2241 known_result = true;
2242 break;
2243 }
2244 default:
2245 gcc_unreachable ();
2246 }
2247
2248 if (known_result)
2249 {
2250 replace_call_with_value (gsi, build_cmp_result (type, r));
2251 return true;
2252 }
2253 }
2254
2255 bool nonzero_length = length >= 1
2256 || fcode == BUILT_IN_STRCMP
2257 || fcode == BUILT_IN_STRCASECMP;
2258
2259 location_t loc = gimple_location (stmt);
2260
2261 /* If the second arg is "", return *(const unsigned char*)arg1. */
2262 if (p2 && *p2 == '\0' && nonzero_length)
2263 {
2264 gimple_seq stmts = NULL;
2265 tree var = gimple_load_first_char (loc, str1, &stmts);
2266 if (lhs)
2267 {
2268 stmt = gimple_build_assign (lhs, NOP_EXPR, var);
2269 gimple_seq_add_stmt_without_update (&stmts, stmt);
2270 }
2271
2272 gsi_replace_with_seq_vops (gsi, stmts);
2273 return true;
2274 }
2275
2276 /* If the first arg is "", return -*(const unsigned char*)arg2. */
2277 if (p1 && *p1 == '\0' && nonzero_length)
2278 {
2279 gimple_seq stmts = NULL;
2280 tree var = gimple_load_first_char (loc, str2, &stmts);
2281
2282 if (lhs)
2283 {
2284 tree c = create_tmp_reg_or_ssa_name (integer_type_node);
2285 stmt = gimple_build_assign (c, NOP_EXPR, var);
2286 gimple_seq_add_stmt_without_update (&stmts, stmt);
2287
2288 stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2289 gimple_seq_add_stmt_without_update (&stmts, stmt);
2290 }
2291
2292 gsi_replace_with_seq_vops (gsi, stmts);
2293 return true;
2294 }
2295
2296 /* If len parameter is one, return an expression corresponding to
2297 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2298 if (fcode == BUILT_IN_STRNCMP && length == 1)
2299 {
2300 gimple_seq stmts = NULL;
2301 tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2302 tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2303
2304 if (lhs)
2305 {
2306 tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
2307 gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2308 gimple_seq_add_stmt_without_update (&stmts, convert1);
2309
2310 tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
2311 gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2312 gimple_seq_add_stmt_without_update (&stmts, convert2);
2313
2314 stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2315 gimple_seq_add_stmt_without_update (&stmts, stmt);
2316 }
2317
2318 gsi_replace_with_seq_vops (gsi, stmts);
2319 return true;
2320 }
2321
2322 /* If length is larger than the length of one constant string,
2323 replace strncmp with corresponding strcmp */
2324 if (fcode == BUILT_IN_STRNCMP
2325 && length > 0
2326 && ((p2 && (size_t) length > strlen (p2))
2327 || (p1 && (size_t) length > strlen (p1))))
2328 {
2329 tree fn = builtin_decl_implicit (BUILT_IN_STRCMP);
2330 if (!fn)
2331 return false;
2332 gimple *repl = gimple_build_call (fn, 2, str1, str2);
2333 replace_call_with_call_and_fold (gsi, repl);
2334 return true;
2335 }
2336
2337 return false;
2338 }
2339
2340 /* Fold a call to the memchr pointed by GSI iterator. */
2341
2342 static bool
gimple_fold_builtin_memchr(gimple_stmt_iterator * gsi)2343 gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2344 {
2345 gimple *stmt = gsi_stmt (*gsi);
2346 tree lhs = gimple_call_lhs (stmt);
2347 tree arg1 = gimple_call_arg (stmt, 0);
2348 tree arg2 = gimple_call_arg (stmt, 1);
2349 tree len = gimple_call_arg (stmt, 2);
2350
2351 /* If the LEN parameter is zero, return zero. */
2352 if (integer_zerop (len))
2353 {
2354 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2355 return true;
2356 }
2357
2358 char c;
2359 if (TREE_CODE (arg2) != INTEGER_CST
2360 || !tree_fits_uhwi_p (len)
2361 || !target_char_cst_p (arg2, &c))
2362 return false;
2363
2364 unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2365 unsigned HOST_WIDE_INT string_length;
2366 const char *p1 = c_getstr (arg1, &string_length);
2367
2368 if (p1)
2369 {
2370 const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2371 if (r == NULL)
2372 {
2373 if (length <= string_length)
2374 {
2375 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2376 return true;
2377 }
2378 }
2379 else
2380 {
2381 unsigned HOST_WIDE_INT offset = r - p1;
2382 gimple_seq stmts = NULL;
2383 if (lhs != NULL_TREE)
2384 {
2385 tree offset_cst = build_int_cst (TREE_TYPE (len), offset);
2386 gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2387 arg1, offset_cst);
2388 gimple_seq_add_stmt_without_update (&stmts, stmt);
2389 }
2390 else
2391 gimple_seq_add_stmt_without_update (&stmts,
2392 gimple_build_nop ());
2393
2394 gsi_replace_with_seq_vops (gsi, stmts);
2395 return true;
2396 }
2397 }
2398
2399 return false;
2400 }
2401
2402 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2403 to the call. IGNORE is true if the value returned
2404 by the builtin will be ignored. UNLOCKED is true is true if this
2405 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2406 the known length of the string. Return NULL_TREE if no simplification
2407 was possible. */
2408
2409 static bool
gimple_fold_builtin_fputs(gimple_stmt_iterator * gsi,tree arg0,tree arg1,bool unlocked)2410 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
2411 tree arg0, tree arg1,
2412 bool unlocked)
2413 {
2414 gimple *stmt = gsi_stmt (*gsi);
2415
2416 /* If we're using an unlocked function, assume the other unlocked
2417 functions exist explicitly. */
2418 tree const fn_fputc = (unlocked
2419 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2420 : builtin_decl_implicit (BUILT_IN_FPUTC));
2421 tree const fn_fwrite = (unlocked
2422 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2423 : builtin_decl_implicit (BUILT_IN_FWRITE));
2424
2425 /* If the return value is used, don't do the transformation. */
2426 if (gimple_call_lhs (stmt))
2427 return false;
2428
2429 /* Get the length of the string passed to fputs. If the length
2430 can't be determined, punt. */
2431 tree len = get_maxval_strlen (arg0, 0);
2432 if (!len
2433 || TREE_CODE (len) != INTEGER_CST)
2434 return false;
2435
2436 switch (compare_tree_int (len, 1))
2437 {
2438 case -1: /* length is 0, delete the call entirely . */
2439 replace_call_with_value (gsi, integer_zero_node);
2440 return true;
2441
2442 case 0: /* length is 1, call fputc. */
2443 {
2444 const char *p = c_getstr (arg0);
2445 if (p != NULL)
2446 {
2447 if (!fn_fputc)
2448 return false;
2449
2450 gimple *repl = gimple_build_call (fn_fputc, 2,
2451 build_int_cst
2452 (integer_type_node, p[0]), arg1);
2453 replace_call_with_call_and_fold (gsi, repl);
2454 return true;
2455 }
2456 }
2457 /* FALLTHROUGH */
2458 case 1: /* length is greater than 1, call fwrite. */
2459 {
2460 /* If optimizing for size keep fputs. */
2461 if (optimize_function_for_size_p (cfun))
2462 return false;
2463 /* New argument list transforming fputs(string, stream) to
2464 fwrite(string, 1, len, stream). */
2465 if (!fn_fwrite)
2466 return false;
2467
2468 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
2469 size_one_node, len, arg1);
2470 replace_call_with_call_and_fold (gsi, repl);
2471 return true;
2472 }
2473 default:
2474 gcc_unreachable ();
2475 }
2476 return false;
2477 }
2478
2479 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2480 DEST, SRC, LEN, and SIZE are the arguments to the call.
2481 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
2482 code of the builtin. If MAXLEN is not NULL, it is maximum length
2483 passed as third argument. */
2484
2485 static bool
gimple_fold_builtin_memory_chk(gimple_stmt_iterator * gsi,tree dest,tree src,tree len,tree size,enum built_in_function fcode)2486 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
2487 tree dest, tree src, tree len, tree size,
2488 enum built_in_function fcode)
2489 {
2490 gimple *stmt = gsi_stmt (*gsi);
2491 location_t loc = gimple_location (stmt);
2492 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2493 tree fn;
2494
2495 /* If SRC and DEST are the same (and not volatile), return DEST
2496 (resp. DEST+LEN for __mempcpy_chk). */
2497 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
2498 {
2499 if (fcode != BUILT_IN_MEMPCPY_CHK)
2500 {
2501 replace_call_with_value (gsi, dest);
2502 return true;
2503 }
2504 else
2505 {
2506 gimple_seq stmts = NULL;
2507 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
2508 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
2509 TREE_TYPE (dest), dest, len);
2510 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2511 replace_call_with_value (gsi, temp);
2512 return true;
2513 }
2514 }
2515
2516 if (! tree_fits_uhwi_p (size))
2517 return false;
2518
2519 tree maxlen = get_maxval_strlen (len, 2);
2520 if (! integer_all_onesp (size))
2521 {
2522 if (! tree_fits_uhwi_p (len))
2523 {
2524 /* If LEN is not constant, try MAXLEN too.
2525 For MAXLEN only allow optimizing into non-_ocs function
2526 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2527 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2528 {
2529 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
2530 {
2531 /* (void) __mempcpy_chk () can be optimized into
2532 (void) __memcpy_chk (). */
2533 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2534 if (!fn)
2535 return false;
2536
2537 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2538 replace_call_with_call_and_fold (gsi, repl);
2539 return true;
2540 }
2541 return false;
2542 }
2543 }
2544 else
2545 maxlen = len;
2546
2547 if (tree_int_cst_lt (size, maxlen))
2548 return false;
2549 }
2550
2551 fn = NULL_TREE;
2552 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
2553 mem{cpy,pcpy,move,set} is available. */
2554 switch (fcode)
2555 {
2556 case BUILT_IN_MEMCPY_CHK:
2557 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
2558 break;
2559 case BUILT_IN_MEMPCPY_CHK:
2560 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
2561 break;
2562 case BUILT_IN_MEMMOVE_CHK:
2563 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
2564 break;
2565 case BUILT_IN_MEMSET_CHK:
2566 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
2567 break;
2568 default:
2569 break;
2570 }
2571
2572 if (!fn)
2573 return false;
2574
2575 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2576 replace_call_with_call_and_fold (gsi, repl);
2577 return true;
2578 }
2579
2580 /* Fold a call to the __st[rp]cpy_chk builtin.
2581 DEST, SRC, and SIZE are the arguments to the call.
2582 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
2583 code of the builtin. If MAXLEN is not NULL, it is maximum length of
2584 strings passed as second argument. */
2585
2586 static bool
gimple_fold_builtin_stxcpy_chk(gimple_stmt_iterator * gsi,tree dest,tree src,tree size,enum built_in_function fcode)2587 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
2588 tree dest,
2589 tree src, tree size,
2590 enum built_in_function fcode)
2591 {
2592 gimple *stmt = gsi_stmt (*gsi);
2593 location_t loc = gimple_location (stmt);
2594 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2595 tree len, fn;
2596
2597 /* If SRC and DEST are the same (and not volatile), return DEST. */
2598 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
2599 {
2600 /* Issue -Wrestrict unless the pointers are null (those do
2601 not point to objects and so do not indicate an overlap;
2602 such calls could be the result of sanitization and jump
2603 threading). */
2604 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
2605 {
2606 tree func = gimple_call_fndecl (stmt);
2607
2608 warning_at (loc, OPT_Wrestrict,
2609 "%qD source argument is the same as destination",
2610 func);
2611 }
2612
2613 replace_call_with_value (gsi, dest);
2614 return true;
2615 }
2616
2617 if (! tree_fits_uhwi_p (size))
2618 return false;
2619
2620 tree maxlen = get_maxval_strlen (src, 1);
2621 if (! integer_all_onesp (size))
2622 {
2623 len = c_strlen (src, 1);
2624 if (! len || ! tree_fits_uhwi_p (len))
2625 {
2626 /* If LEN is not constant, try MAXLEN too.
2627 For MAXLEN only allow optimizing into non-_ocs function
2628 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2629 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2630 {
2631 if (fcode == BUILT_IN_STPCPY_CHK)
2632 {
2633 if (! ignore)
2634 return false;
2635
2636 /* If return value of __stpcpy_chk is ignored,
2637 optimize into __strcpy_chk. */
2638 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
2639 if (!fn)
2640 return false;
2641
2642 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2643 replace_call_with_call_and_fold (gsi, repl);
2644 return true;
2645 }
2646
2647 if (! len || TREE_SIDE_EFFECTS (len))
2648 return false;
2649
2650 /* If c_strlen returned something, but not a constant,
2651 transform __strcpy_chk into __memcpy_chk. */
2652 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2653 if (!fn)
2654 return false;
2655
2656 gimple_seq stmts = NULL;
2657 len = gimple_convert (&stmts, loc, size_type_node, len);
2658 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
2659 build_int_cst (size_type_node, 1));
2660 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2661 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2662 replace_call_with_call_and_fold (gsi, repl);
2663 return true;
2664 }
2665 }
2666 else
2667 maxlen = len;
2668
2669 if (! tree_int_cst_lt (maxlen, size))
2670 return false;
2671 }
2672
2673 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
2674 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
2675 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
2676 if (!fn)
2677 return false;
2678
2679 gimple *repl = gimple_build_call (fn, 2, dest, src);
2680 replace_call_with_call_and_fold (gsi, repl);
2681 return true;
2682 }
2683
2684 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
2685 are the arguments to the call. If MAXLEN is not NULL, it is maximum
2686 length passed as third argument. IGNORE is true if return value can be
2687 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2688
2689 static bool
gimple_fold_builtin_stxncpy_chk(gimple_stmt_iterator * gsi,tree dest,tree src,tree len,tree size,enum built_in_function fcode)2690 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
2691 tree dest, tree src,
2692 tree len, tree size,
2693 enum built_in_function fcode)
2694 {
2695 gimple *stmt = gsi_stmt (*gsi);
2696 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2697 tree fn;
2698
2699 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
2700 {
2701 /* If return value of __stpncpy_chk is ignored,
2702 optimize into __strncpy_chk. */
2703 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
2704 if (fn)
2705 {
2706 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2707 replace_call_with_call_and_fold (gsi, repl);
2708 return true;
2709 }
2710 }
2711
2712 if (! tree_fits_uhwi_p (size))
2713 return false;
2714
2715 tree maxlen = get_maxval_strlen (len, 2);
2716 if (! integer_all_onesp (size))
2717 {
2718 if (! tree_fits_uhwi_p (len))
2719 {
2720 /* If LEN is not constant, try MAXLEN too.
2721 For MAXLEN only allow optimizing into non-_ocs function
2722 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2723 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2724 return false;
2725 }
2726 else
2727 maxlen = len;
2728
2729 if (tree_int_cst_lt (size, maxlen))
2730 return false;
2731 }
2732
2733 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2734 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
2735 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
2736 if (!fn)
2737 return false;
2738
2739 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2740 replace_call_with_call_and_fold (gsi, repl);
2741 return true;
2742 }
2743
2744 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
2745 Return NULL_TREE if no simplification can be made. */
2746
2747 static bool
gimple_fold_builtin_stpcpy(gimple_stmt_iterator * gsi)2748 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
2749 {
2750 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2751 location_t loc = gimple_location (stmt);
2752 tree dest = gimple_call_arg (stmt, 0);
2753 tree src = gimple_call_arg (stmt, 1);
2754 tree fn, len, lenp1;
2755
2756 /* If the result is unused, replace stpcpy with strcpy. */
2757 if (gimple_call_lhs (stmt) == NULL_TREE)
2758 {
2759 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2760 if (!fn)
2761 return false;
2762 gimple_call_set_fndecl (stmt, fn);
2763 fold_stmt (gsi);
2764 return true;
2765 }
2766
2767 len = c_strlen (src, 1);
2768 if (!len
2769 || TREE_CODE (len) != INTEGER_CST)
2770 return false;
2771
2772 if (optimize_function_for_size_p (cfun)
2773 /* If length is zero it's small enough. */
2774 && !integer_zerop (len))
2775 return false;
2776
2777 /* If the source has a known length replace stpcpy with memcpy. */
2778 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2779 if (!fn)
2780 return false;
2781
2782 gimple_seq stmts = NULL;
2783 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
2784 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
2785 tem, build_int_cst (size_type_node, 1));
2786 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2787 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
2788 gimple_set_vuse (repl, gimple_vuse (stmt));
2789 gimple_set_vdef (repl, gimple_vdef (stmt));
2790 if (gimple_vdef (repl)
2791 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
2792 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
2793 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
2794 /* Replace the result with dest + len. */
2795 stmts = NULL;
2796 tem = gimple_convert (&stmts, loc, sizetype, len);
2797 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2798 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
2799 POINTER_PLUS_EXPR, dest, tem);
2800 gsi_replace (gsi, ret, false);
2801 /* Finally fold the memcpy call. */
2802 gimple_stmt_iterator gsi2 = *gsi;
2803 gsi_prev (&gsi2);
2804 fold_stmt (&gsi2);
2805 return true;
2806 }
2807
2808 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2809 NULL_TREE if a normal call should be emitted rather than expanding
2810 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2811 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2812 passed as second argument. */
2813
2814 static bool
gimple_fold_builtin_snprintf_chk(gimple_stmt_iterator * gsi,enum built_in_function fcode)2815 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
2816 enum built_in_function fcode)
2817 {
2818 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2819 tree dest, size, len, fn, fmt, flag;
2820 const char *fmt_str;
2821
2822 /* Verify the required arguments in the original call. */
2823 if (gimple_call_num_args (stmt) < 5)
2824 return false;
2825
2826 dest = gimple_call_arg (stmt, 0);
2827 len = gimple_call_arg (stmt, 1);
2828 flag = gimple_call_arg (stmt, 2);
2829 size = gimple_call_arg (stmt, 3);
2830 fmt = gimple_call_arg (stmt, 4);
2831
2832 if (! tree_fits_uhwi_p (size))
2833 return false;
2834
2835 if (! integer_all_onesp (size))
2836 {
2837 tree maxlen = get_maxval_strlen (len, 2);
2838 if (! tree_fits_uhwi_p (len))
2839 {
2840 /* If LEN is not constant, try MAXLEN too.
2841 For MAXLEN only allow optimizing into non-_ocs function
2842 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2843 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2844 return false;
2845 }
2846 else
2847 maxlen = len;
2848
2849 if (tree_int_cst_lt (size, maxlen))
2850 return false;
2851 }
2852
2853 if (!init_target_chars ())
2854 return false;
2855
2856 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2857 or if format doesn't contain % chars or is "%s". */
2858 if (! integer_zerop (flag))
2859 {
2860 fmt_str = c_getstr (fmt);
2861 if (fmt_str == NULL)
2862 return false;
2863 if (strchr (fmt_str, target_percent) != NULL
2864 && strcmp (fmt_str, target_percent_s))
2865 return false;
2866 }
2867
2868 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2869 available. */
2870 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2871 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2872 if (!fn)
2873 return false;
2874
2875 /* Replace the called function and the first 5 argument by 3 retaining
2876 trailing varargs. */
2877 gimple_call_set_fndecl (stmt, fn);
2878 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2879 gimple_call_set_arg (stmt, 0, dest);
2880 gimple_call_set_arg (stmt, 1, len);
2881 gimple_call_set_arg (stmt, 2, fmt);
2882 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2883 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2884 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2885 fold_stmt (gsi);
2886 return true;
2887 }
2888
2889 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2890 Return NULL_TREE if a normal call should be emitted rather than
2891 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2892 or BUILT_IN_VSPRINTF_CHK. */
2893
2894 static bool
gimple_fold_builtin_sprintf_chk(gimple_stmt_iterator * gsi,enum built_in_function fcode)2895 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2896 enum built_in_function fcode)
2897 {
2898 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2899 tree dest, size, len, fn, fmt, flag;
2900 const char *fmt_str;
2901 unsigned nargs = gimple_call_num_args (stmt);
2902
2903 /* Verify the required arguments in the original call. */
2904 if (nargs < 4)
2905 return false;
2906 dest = gimple_call_arg (stmt, 0);
2907 flag = gimple_call_arg (stmt, 1);
2908 size = gimple_call_arg (stmt, 2);
2909 fmt = gimple_call_arg (stmt, 3);
2910
2911 if (! tree_fits_uhwi_p (size))
2912 return false;
2913
2914 len = NULL_TREE;
2915
2916 if (!init_target_chars ())
2917 return false;
2918
2919 /* Check whether the format is a literal string constant. */
2920 fmt_str = c_getstr (fmt);
2921 if (fmt_str != NULL)
2922 {
2923 /* If the format doesn't contain % args or %%, we know the size. */
2924 if (strchr (fmt_str, target_percent) == 0)
2925 {
2926 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2927 len = build_int_cstu (size_type_node, strlen (fmt_str));
2928 }
2929 /* If the format is "%s" and first ... argument is a string literal,
2930 we know the size too. */
2931 else if (fcode == BUILT_IN_SPRINTF_CHK
2932 && strcmp (fmt_str, target_percent_s) == 0)
2933 {
2934 tree arg;
2935
2936 if (nargs == 5)
2937 {
2938 arg = gimple_call_arg (stmt, 4);
2939 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2940 {
2941 len = c_strlen (arg, 1);
2942 if (! len || ! tree_fits_uhwi_p (len))
2943 len = NULL_TREE;
2944 }
2945 }
2946 }
2947 }
2948
2949 if (! integer_all_onesp (size))
2950 {
2951 if (! len || ! tree_int_cst_lt (len, size))
2952 return false;
2953 }
2954
2955 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2956 or if format doesn't contain % chars or is "%s". */
2957 if (! integer_zerop (flag))
2958 {
2959 if (fmt_str == NULL)
2960 return false;
2961 if (strchr (fmt_str, target_percent) != NULL
2962 && strcmp (fmt_str, target_percent_s))
2963 return false;
2964 }
2965
2966 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2967 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2968 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2969 if (!fn)
2970 return false;
2971
2972 /* Replace the called function and the first 4 argument by 2 retaining
2973 trailing varargs. */
2974 gimple_call_set_fndecl (stmt, fn);
2975 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2976 gimple_call_set_arg (stmt, 0, dest);
2977 gimple_call_set_arg (stmt, 1, fmt);
2978 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2979 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2980 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2981 fold_stmt (gsi);
2982 return true;
2983 }
2984
2985 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2986 ORIG may be null if this is a 2-argument call. We don't attempt to
2987 simplify calls with more than 3 arguments.
2988
2989 Return true if simplification was possible, otherwise false. */
2990
2991 bool
gimple_fold_builtin_sprintf(gimple_stmt_iterator * gsi)2992 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
2993 {
2994 gimple *stmt = gsi_stmt (*gsi);
2995 tree dest = gimple_call_arg (stmt, 0);
2996 tree fmt = gimple_call_arg (stmt, 1);
2997 tree orig = NULL_TREE;
2998 const char *fmt_str = NULL;
2999
3000 /* Verify the required arguments in the original call. We deal with two
3001 types of sprintf() calls: 'sprintf (str, fmt)' and
3002 'sprintf (dest, "%s", orig)'. */
3003 if (gimple_call_num_args (stmt) > 3)
3004 return false;
3005
3006 if (gimple_call_num_args (stmt) == 3)
3007 orig = gimple_call_arg (stmt, 2);
3008
3009 /* Check whether the format is a literal string constant. */
3010 fmt_str = c_getstr (fmt);
3011 if (fmt_str == NULL)
3012 return false;
3013
3014 if (!init_target_chars ())
3015 return false;
3016
3017 /* If the format doesn't contain % args or %%, use strcpy. */
3018 if (strchr (fmt_str, target_percent) == NULL)
3019 {
3020 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3021
3022 if (!fn)
3023 return false;
3024
3025 /* Don't optimize sprintf (buf, "abc", ptr++). */
3026 if (orig)
3027 return false;
3028
3029 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
3030 'format' is known to contain no % formats. */
3031 gimple_seq stmts = NULL;
3032 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3033 gimple_seq_add_stmt_without_update (&stmts, repl);
3034 if (tree lhs = gimple_call_lhs (stmt))
3035 {
3036 repl = gimple_build_assign (lhs, build_int_cst (TREE_TYPE (lhs),
3037 strlen (fmt_str)));
3038 gimple_seq_add_stmt_without_update (&stmts, repl);
3039 gsi_replace_with_seq_vops (gsi, stmts);
3040 /* gsi now points at the assignment to the lhs, get a
3041 stmt iterator to the memcpy call.
3042 ??? We can't use gsi_for_stmt as that doesn't work when the
3043 CFG isn't built yet. */
3044 gimple_stmt_iterator gsi2 = *gsi;
3045 gsi_prev (&gsi2);
3046 fold_stmt (&gsi2);
3047 }
3048 else
3049 {
3050 gsi_replace_with_seq_vops (gsi, stmts);
3051 fold_stmt (gsi);
3052 }
3053 return true;
3054 }
3055
3056 /* If the format is "%s", use strcpy if the result isn't used. */
3057 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3058 {
3059 tree fn;
3060 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3061
3062 if (!fn)
3063 return false;
3064
3065 /* Don't crash on sprintf (str1, "%s"). */
3066 if (!orig)
3067 return false;
3068
3069 tree orig_len = NULL_TREE;
3070 if (gimple_call_lhs (stmt))
3071 {
3072 orig_len = get_maxval_strlen (orig, 0);
3073 if (!orig_len)
3074 return false;
3075 }
3076
3077 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
3078 gimple_seq stmts = NULL;
3079 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3080 gimple_seq_add_stmt_without_update (&stmts, repl);
3081 if (tree lhs = gimple_call_lhs (stmt))
3082 {
3083 if (!useless_type_conversion_p (TREE_TYPE (lhs),
3084 TREE_TYPE (orig_len)))
3085 orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3086 repl = gimple_build_assign (lhs, orig_len);
3087 gimple_seq_add_stmt_without_update (&stmts, repl);
3088 gsi_replace_with_seq_vops (gsi, stmts);
3089 /* gsi now points at the assignment to the lhs, get a
3090 stmt iterator to the memcpy call.
3091 ??? We can't use gsi_for_stmt as that doesn't work when the
3092 CFG isn't built yet. */
3093 gimple_stmt_iterator gsi2 = *gsi;
3094 gsi_prev (&gsi2);
3095 fold_stmt (&gsi2);
3096 }
3097 else
3098 {
3099 gsi_replace_with_seq_vops (gsi, stmts);
3100 fold_stmt (gsi);
3101 }
3102 return true;
3103 }
3104 return false;
3105 }
3106
3107 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3108 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3109 attempt to simplify calls with more than 4 arguments.
3110
3111 Return true if simplification was possible, otherwise false. */
3112
3113 bool
gimple_fold_builtin_snprintf(gimple_stmt_iterator * gsi)3114 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
3115 {
3116 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3117 tree dest = gimple_call_arg (stmt, 0);
3118 tree destsize = gimple_call_arg (stmt, 1);
3119 tree fmt = gimple_call_arg (stmt, 2);
3120 tree orig = NULL_TREE;
3121 const char *fmt_str = NULL;
3122
3123 if (gimple_call_num_args (stmt) > 4)
3124 return false;
3125
3126 if (gimple_call_num_args (stmt) == 4)
3127 orig = gimple_call_arg (stmt, 3);
3128
3129 if (!tree_fits_uhwi_p (destsize))
3130 return false;
3131 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
3132
3133 /* Check whether the format is a literal string constant. */
3134 fmt_str = c_getstr (fmt);
3135 if (fmt_str == NULL)
3136 return false;
3137
3138 if (!init_target_chars ())
3139 return false;
3140
3141 /* If the format doesn't contain % args or %%, use strcpy. */
3142 if (strchr (fmt_str, target_percent) == NULL)
3143 {
3144 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3145 if (!fn)
3146 return false;
3147
3148 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3149 if (orig)
3150 return false;
3151
3152 /* We could expand this as
3153 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3154 or to
3155 memcpy (str, fmt_with_nul_at_cstm1, cst);
3156 but in the former case that might increase code size
3157 and in the latter case grow .rodata section too much.
3158 So punt for now. */
3159 size_t len = strlen (fmt_str);
3160 if (len >= destlen)
3161 return false;
3162
3163 gimple_seq stmts = NULL;
3164 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3165 gimple_seq_add_stmt_without_update (&stmts, repl);
3166 if (tree lhs = gimple_call_lhs (stmt))
3167 {
3168 repl = gimple_build_assign (lhs,
3169 build_int_cst (TREE_TYPE (lhs), len));
3170 gimple_seq_add_stmt_without_update (&stmts, repl);
3171 gsi_replace_with_seq_vops (gsi, stmts);
3172 /* gsi now points at the assignment to the lhs, get a
3173 stmt iterator to the memcpy call.
3174 ??? We can't use gsi_for_stmt as that doesn't work when the
3175 CFG isn't built yet. */
3176 gimple_stmt_iterator gsi2 = *gsi;
3177 gsi_prev (&gsi2);
3178 fold_stmt (&gsi2);
3179 }
3180 else
3181 {
3182 gsi_replace_with_seq_vops (gsi, stmts);
3183 fold_stmt (gsi);
3184 }
3185 return true;
3186 }
3187
3188 /* If the format is "%s", use strcpy if the result isn't used. */
3189 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3190 {
3191 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3192 if (!fn)
3193 return false;
3194
3195 /* Don't crash on snprintf (str1, cst, "%s"). */
3196 if (!orig)
3197 return false;
3198
3199 tree orig_len = get_maxval_strlen (orig, 0);
3200 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
3201 return false;
3202
3203 /* We could expand this as
3204 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3205 or to
3206 memcpy (str1, str2_with_nul_at_cstm1, cst);
3207 but in the former case that might increase code size
3208 and in the latter case grow .rodata section too much.
3209 So punt for now. */
3210 if (compare_tree_int (orig_len, destlen) >= 0)
3211 return false;
3212
3213 /* Convert snprintf (str1, cst, "%s", str2) into
3214 strcpy (str1, str2) if strlen (str2) < cst. */
3215 gimple_seq stmts = NULL;
3216 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3217 gimple_seq_add_stmt_without_update (&stmts, repl);
3218 if (tree lhs = gimple_call_lhs (stmt))
3219 {
3220 if (!useless_type_conversion_p (TREE_TYPE (lhs),
3221 TREE_TYPE (orig_len)))
3222 orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3223 repl = gimple_build_assign (lhs, orig_len);
3224 gimple_seq_add_stmt_without_update (&stmts, repl);
3225 gsi_replace_with_seq_vops (gsi, stmts);
3226 /* gsi now points at the assignment to the lhs, get a
3227 stmt iterator to the memcpy call.
3228 ??? We can't use gsi_for_stmt as that doesn't work when the
3229 CFG isn't built yet. */
3230 gimple_stmt_iterator gsi2 = *gsi;
3231 gsi_prev (&gsi2);
3232 fold_stmt (&gsi2);
3233 }
3234 else
3235 {
3236 gsi_replace_with_seq_vops (gsi, stmts);
3237 fold_stmt (gsi);
3238 }
3239 return true;
3240 }
3241 return false;
3242 }
3243
3244 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3245 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3246 more than 3 arguments, and ARG may be null in the 2-argument case.
3247
3248 Return NULL_TREE if no simplification was possible, otherwise return the
3249 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3250 code of the function to be simplified. */
3251
3252 static bool
gimple_fold_builtin_fprintf(gimple_stmt_iterator * gsi,tree fp,tree fmt,tree arg,enum built_in_function fcode)3253 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
3254 tree fp, tree fmt, tree arg,
3255 enum built_in_function fcode)
3256 {
3257 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3258 tree fn_fputc, fn_fputs;
3259 const char *fmt_str = NULL;
3260
3261 /* If the return value is used, don't do the transformation. */
3262 if (gimple_call_lhs (stmt) != NULL_TREE)
3263 return false;
3264
3265 /* Check whether the format is a literal string constant. */
3266 fmt_str = c_getstr (fmt);
3267 if (fmt_str == NULL)
3268 return false;
3269
3270 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
3271 {
3272 /* If we're using an unlocked function, assume the other
3273 unlocked functions exist explicitly. */
3274 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
3275 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
3276 }
3277 else
3278 {
3279 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
3280 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
3281 }
3282
3283 if (!init_target_chars ())
3284 return false;
3285
3286 /* If the format doesn't contain % args or %%, use strcpy. */
3287 if (strchr (fmt_str, target_percent) == NULL)
3288 {
3289 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
3290 && arg)
3291 return false;
3292
3293 /* If the format specifier was "", fprintf does nothing. */
3294 if (fmt_str[0] == '\0')
3295 {
3296 replace_call_with_value (gsi, NULL_TREE);
3297 return true;
3298 }
3299
3300 /* When "string" doesn't contain %, replace all cases of
3301 fprintf (fp, string) with fputs (string, fp). The fputs
3302 builtin will take care of special cases like length == 1. */
3303 if (fn_fputs)
3304 {
3305 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
3306 replace_call_with_call_and_fold (gsi, repl);
3307 return true;
3308 }
3309 }
3310
3311 /* The other optimizations can be done only on the non-va_list variants. */
3312 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3313 return false;
3314
3315 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3316 else if (strcmp (fmt_str, target_percent_s) == 0)
3317 {
3318 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3319 return false;
3320 if (fn_fputs)
3321 {
3322 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3323 replace_call_with_call_and_fold (gsi, repl);
3324 return true;
3325 }
3326 }
3327
3328 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3329 else if (strcmp (fmt_str, target_percent_c) == 0)
3330 {
3331 if (!arg
3332 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3333 return false;
3334 if (fn_fputc)
3335 {
3336 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3337 replace_call_with_call_and_fold (gsi, repl);
3338 return true;
3339 }
3340 }
3341
3342 return false;
3343 }
3344
3345 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3346 FMT and ARG are the arguments to the call; we don't fold cases with
3347 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3348
3349 Return NULL_TREE if no simplification was possible, otherwise return the
3350 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3351 code of the function to be simplified. */
3352
3353 static bool
gimple_fold_builtin_printf(gimple_stmt_iterator * gsi,tree fmt,tree arg,enum built_in_function fcode)3354 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3355 tree arg, enum built_in_function fcode)
3356 {
3357 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3358 tree fn_putchar, fn_puts, newarg;
3359 const char *fmt_str = NULL;
3360
3361 /* If the return value is used, don't do the transformation. */
3362 if (gimple_call_lhs (stmt) != NULL_TREE)
3363 return false;
3364
3365 /* Check whether the format is a literal string constant. */
3366 fmt_str = c_getstr (fmt);
3367 if (fmt_str == NULL)
3368 return false;
3369
3370 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3371 {
3372 /* If we're using an unlocked function, assume the other
3373 unlocked functions exist explicitly. */
3374 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3375 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3376 }
3377 else
3378 {
3379 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3380 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3381 }
3382
3383 if (!init_target_chars ())
3384 return false;
3385
3386 if (strcmp (fmt_str, target_percent_s) == 0
3387 || strchr (fmt_str, target_percent) == NULL)
3388 {
3389 const char *str;
3390
3391 if (strcmp (fmt_str, target_percent_s) == 0)
3392 {
3393 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3394 return false;
3395
3396 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3397 return false;
3398
3399 str = c_getstr (arg);
3400 if (str == NULL)
3401 return false;
3402 }
3403 else
3404 {
3405 /* The format specifier doesn't contain any '%' characters. */
3406 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3407 && arg)
3408 return false;
3409 str = fmt_str;
3410 }
3411
3412 /* If the string was "", printf does nothing. */
3413 if (str[0] == '\0')
3414 {
3415 replace_call_with_value (gsi, NULL_TREE);
3416 return true;
3417 }
3418
3419 /* If the string has length of 1, call putchar. */
3420 if (str[1] == '\0')
3421 {
3422 /* Given printf("c"), (where c is any one character,)
3423 convert "c"[0] to an int and pass that to the replacement
3424 function. */
3425 newarg = build_int_cst (integer_type_node, str[0]);
3426 if (fn_putchar)
3427 {
3428 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3429 replace_call_with_call_and_fold (gsi, repl);
3430 return true;
3431 }
3432 }
3433 else
3434 {
3435 /* If the string was "string\n", call puts("string"). */
3436 size_t len = strlen (str);
3437 if ((unsigned char)str[len - 1] == target_newline
3438 && (size_t) (int) len == len
3439 && (int) len > 0)
3440 {
3441 char *newstr;
3442 tree offset_node, string_cst;
3443
3444 /* Create a NUL-terminated string that's one char shorter
3445 than the original, stripping off the trailing '\n'. */
3446 newarg = build_string_literal (len, str);
3447 string_cst = string_constant (newarg, &offset_node);
3448 gcc_checking_assert (string_cst
3449 && (TREE_STRING_LENGTH (string_cst)
3450 == (int) len)
3451 && integer_zerop (offset_node)
3452 && (unsigned char)
3453 TREE_STRING_POINTER (string_cst)[len - 1]
3454 == target_newline);
3455 /* build_string_literal creates a new STRING_CST,
3456 modify it in place to avoid double copying. */
3457 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
3458 newstr[len - 1] = '\0';
3459 if (fn_puts)
3460 {
3461 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3462 replace_call_with_call_and_fold (gsi, repl);
3463 return true;
3464 }
3465 }
3466 else
3467 /* We'd like to arrange to call fputs(string,stdout) here,
3468 but we need stdout and don't have a way to get it yet. */
3469 return false;
3470 }
3471 }
3472
3473 /* The other optimizations can be done only on the non-va_list variants. */
3474 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3475 return false;
3476
3477 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3478 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
3479 {
3480 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3481 return false;
3482 if (fn_puts)
3483 {
3484 gcall *repl = gimple_build_call (fn_puts, 1, arg);
3485 replace_call_with_call_and_fold (gsi, repl);
3486 return true;
3487 }
3488 }
3489
3490 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3491 else if (strcmp (fmt_str, target_percent_c) == 0)
3492 {
3493 if (!arg || ! useless_type_conversion_p (integer_type_node,
3494 TREE_TYPE (arg)))
3495 return false;
3496 if (fn_putchar)
3497 {
3498 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
3499 replace_call_with_call_and_fold (gsi, repl);
3500 return true;
3501 }
3502 }
3503
3504 return false;
3505 }
3506
3507
3508
3509 /* Fold a call to __builtin_strlen with known length LEN. */
3510
3511 static bool
gimple_fold_builtin_strlen(gimple_stmt_iterator * gsi)3512 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
3513 {
3514 gimple *stmt = gsi_stmt (*gsi);
3515
3516 wide_int minlen;
3517 wide_int maxlen;
3518
3519 tree lenrange[2];
3520 if (!get_range_strlen (gimple_call_arg (stmt, 0), lenrange, true)
3521 && lenrange[0] && TREE_CODE (lenrange[0]) == INTEGER_CST
3522 && lenrange[1] && TREE_CODE (lenrange[1]) == INTEGER_CST)
3523 {
3524 /* The range of lengths refers to either a single constant
3525 string or to the longest and shortest constant string
3526 referenced by the argument of the strlen() call, or to
3527 the strings that can possibly be stored in the arrays
3528 the argument refers to. */
3529 minlen = wi::to_wide (lenrange[0]);
3530 maxlen = wi::to_wide (lenrange[1]);
3531 }
3532 else
3533 {
3534 unsigned prec = TYPE_PRECISION (sizetype);
3535
3536 minlen = wi::shwi (0, prec);
3537 maxlen = wi::to_wide (max_object_size (), prec) - 2;
3538 }
3539
3540 if (minlen == maxlen)
3541 {
3542 lenrange[0] = force_gimple_operand_gsi (gsi, lenrange[0], true, NULL,
3543 true, GSI_SAME_STMT);
3544 replace_call_with_value (gsi, lenrange[0]);
3545 return true;
3546 }
3547
3548 if (tree lhs = gimple_call_lhs (stmt))
3549 if (TREE_CODE (lhs) == SSA_NAME
3550 && INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
3551 set_range_info (lhs, VR_RANGE, minlen, maxlen);
3552
3553 return false;
3554 }
3555
3556 /* Fold a call to __builtin_acc_on_device. */
3557
3558 static bool
gimple_fold_builtin_acc_on_device(gimple_stmt_iterator * gsi,tree arg0)3559 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
3560 {
3561 /* Defer folding until we know which compiler we're in. */
3562 if (symtab->state != EXPANSION)
3563 return false;
3564
3565 unsigned val_host = GOMP_DEVICE_HOST;
3566 unsigned val_dev = GOMP_DEVICE_NONE;
3567
3568 #ifdef ACCEL_COMPILER
3569 val_host = GOMP_DEVICE_NOT_HOST;
3570 val_dev = ACCEL_COMPILER_acc_device;
3571 #endif
3572
3573 location_t loc = gimple_location (gsi_stmt (*gsi));
3574
3575 tree host_eq = make_ssa_name (boolean_type_node);
3576 gimple *host_ass = gimple_build_assign
3577 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
3578 gimple_set_location (host_ass, loc);
3579 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
3580
3581 tree dev_eq = make_ssa_name (boolean_type_node);
3582 gimple *dev_ass = gimple_build_assign
3583 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
3584 gimple_set_location (dev_ass, loc);
3585 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
3586
3587 tree result = make_ssa_name (boolean_type_node);
3588 gimple *result_ass = gimple_build_assign
3589 (result, BIT_IOR_EXPR, host_eq, dev_eq);
3590 gimple_set_location (result_ass, loc);
3591 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
3592
3593 replace_call_with_value (gsi, result);
3594
3595 return true;
3596 }
3597
3598 /* Fold realloc (0, n) -> malloc (n). */
3599
3600 static bool
gimple_fold_builtin_realloc(gimple_stmt_iterator * gsi)3601 gimple_fold_builtin_realloc (gimple_stmt_iterator *gsi)
3602 {
3603 gimple *stmt = gsi_stmt (*gsi);
3604 tree arg = gimple_call_arg (stmt, 0);
3605 tree size = gimple_call_arg (stmt, 1);
3606
3607 if (operand_equal_p (arg, null_pointer_node, 0))
3608 {
3609 tree fn_malloc = builtin_decl_implicit (BUILT_IN_MALLOC);
3610 if (fn_malloc)
3611 {
3612 gcall *repl = gimple_build_call (fn_malloc, 1, size);
3613 replace_call_with_call_and_fold (gsi, repl);
3614 return true;
3615 }
3616 }
3617 return false;
3618 }
3619
3620 /* Fold the non-target builtin at *GSI and return whether any simplification
3621 was made. */
3622
3623 static bool
gimple_fold_builtin(gimple_stmt_iterator * gsi)3624 gimple_fold_builtin (gimple_stmt_iterator *gsi)
3625 {
3626 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
3627 tree callee = gimple_call_fndecl (stmt);
3628
3629 /* Give up for always_inline inline builtins until they are
3630 inlined. */
3631 if (avoid_folding_inline_builtin (callee))
3632 return false;
3633
3634 unsigned n = gimple_call_num_args (stmt);
3635 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
3636 switch (fcode)
3637 {
3638 case BUILT_IN_BCMP:
3639 return gimple_fold_builtin_bcmp (gsi);
3640 case BUILT_IN_BCOPY:
3641 return gimple_fold_builtin_bcopy (gsi);
3642 case BUILT_IN_BZERO:
3643 return gimple_fold_builtin_bzero (gsi);
3644
3645 case BUILT_IN_MEMSET:
3646 return gimple_fold_builtin_memset (gsi,
3647 gimple_call_arg (stmt, 1),
3648 gimple_call_arg (stmt, 2));
3649 case BUILT_IN_MEMCPY:
3650 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3651 gimple_call_arg (stmt, 1), 0);
3652 case BUILT_IN_MEMPCPY:
3653 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3654 gimple_call_arg (stmt, 1), 1);
3655 case BUILT_IN_MEMMOVE:
3656 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3657 gimple_call_arg (stmt, 1), 3);
3658 case BUILT_IN_SPRINTF_CHK:
3659 case BUILT_IN_VSPRINTF_CHK:
3660 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
3661 case BUILT_IN_STRCAT_CHK:
3662 return gimple_fold_builtin_strcat_chk (gsi);
3663 case BUILT_IN_STRNCAT_CHK:
3664 return gimple_fold_builtin_strncat_chk (gsi);
3665 case BUILT_IN_STRLEN:
3666 return gimple_fold_builtin_strlen (gsi);
3667 case BUILT_IN_STRCPY:
3668 return gimple_fold_builtin_strcpy (gsi,
3669 gimple_call_arg (stmt, 0),
3670 gimple_call_arg (stmt, 1));
3671 case BUILT_IN_STRNCPY:
3672 return gimple_fold_builtin_strncpy (gsi,
3673 gimple_call_arg (stmt, 0),
3674 gimple_call_arg (stmt, 1),
3675 gimple_call_arg (stmt, 2));
3676 case BUILT_IN_STRCAT:
3677 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
3678 gimple_call_arg (stmt, 1));
3679 case BUILT_IN_STRNCAT:
3680 return gimple_fold_builtin_strncat (gsi);
3681 case BUILT_IN_INDEX:
3682 case BUILT_IN_STRCHR:
3683 return gimple_fold_builtin_strchr (gsi, false);
3684 case BUILT_IN_RINDEX:
3685 case BUILT_IN_STRRCHR:
3686 return gimple_fold_builtin_strchr (gsi, true);
3687 case BUILT_IN_STRSTR:
3688 return gimple_fold_builtin_strstr (gsi);
3689 case BUILT_IN_STRCMP:
3690 case BUILT_IN_STRCASECMP:
3691 case BUILT_IN_STRNCMP:
3692 case BUILT_IN_STRNCASECMP:
3693 return gimple_fold_builtin_string_compare (gsi);
3694 case BUILT_IN_MEMCHR:
3695 return gimple_fold_builtin_memchr (gsi);
3696 case BUILT_IN_FPUTS:
3697 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3698 gimple_call_arg (stmt, 1), false);
3699 case BUILT_IN_FPUTS_UNLOCKED:
3700 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3701 gimple_call_arg (stmt, 1), true);
3702 case BUILT_IN_MEMCPY_CHK:
3703 case BUILT_IN_MEMPCPY_CHK:
3704 case BUILT_IN_MEMMOVE_CHK:
3705 case BUILT_IN_MEMSET_CHK:
3706 return gimple_fold_builtin_memory_chk (gsi,
3707 gimple_call_arg (stmt, 0),
3708 gimple_call_arg (stmt, 1),
3709 gimple_call_arg (stmt, 2),
3710 gimple_call_arg (stmt, 3),
3711 fcode);
3712 case BUILT_IN_STPCPY:
3713 return gimple_fold_builtin_stpcpy (gsi);
3714 case BUILT_IN_STRCPY_CHK:
3715 case BUILT_IN_STPCPY_CHK:
3716 return gimple_fold_builtin_stxcpy_chk (gsi,
3717 gimple_call_arg (stmt, 0),
3718 gimple_call_arg (stmt, 1),
3719 gimple_call_arg (stmt, 2),
3720 fcode);
3721 case BUILT_IN_STRNCPY_CHK:
3722 case BUILT_IN_STPNCPY_CHK:
3723 return gimple_fold_builtin_stxncpy_chk (gsi,
3724 gimple_call_arg (stmt, 0),
3725 gimple_call_arg (stmt, 1),
3726 gimple_call_arg (stmt, 2),
3727 gimple_call_arg (stmt, 3),
3728 fcode);
3729 case BUILT_IN_SNPRINTF_CHK:
3730 case BUILT_IN_VSNPRINTF_CHK:
3731 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
3732
3733 case BUILT_IN_FPRINTF:
3734 case BUILT_IN_FPRINTF_UNLOCKED:
3735 case BUILT_IN_VFPRINTF:
3736 if (n == 2 || n == 3)
3737 return gimple_fold_builtin_fprintf (gsi,
3738 gimple_call_arg (stmt, 0),
3739 gimple_call_arg (stmt, 1),
3740 n == 3
3741 ? gimple_call_arg (stmt, 2)
3742 : NULL_TREE,
3743 fcode);
3744 break;
3745 case BUILT_IN_FPRINTF_CHK:
3746 case BUILT_IN_VFPRINTF_CHK:
3747 if (n == 3 || n == 4)
3748 return gimple_fold_builtin_fprintf (gsi,
3749 gimple_call_arg (stmt, 0),
3750 gimple_call_arg (stmt, 2),
3751 n == 4
3752 ? gimple_call_arg (stmt, 3)
3753 : NULL_TREE,
3754 fcode);
3755 break;
3756 case BUILT_IN_PRINTF:
3757 case BUILT_IN_PRINTF_UNLOCKED:
3758 case BUILT_IN_VPRINTF:
3759 if (n == 1 || n == 2)
3760 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
3761 n == 2
3762 ? gimple_call_arg (stmt, 1)
3763 : NULL_TREE, fcode);
3764 break;
3765 case BUILT_IN_PRINTF_CHK:
3766 case BUILT_IN_VPRINTF_CHK:
3767 if (n == 2 || n == 3)
3768 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
3769 n == 3
3770 ? gimple_call_arg (stmt, 2)
3771 : NULL_TREE, fcode);
3772 break;
3773 case BUILT_IN_ACC_ON_DEVICE:
3774 return gimple_fold_builtin_acc_on_device (gsi,
3775 gimple_call_arg (stmt, 0));
3776 case BUILT_IN_REALLOC:
3777 return gimple_fold_builtin_realloc (gsi);
3778
3779 default:;
3780 }
3781
3782 /* Try the generic builtin folder. */
3783 bool ignore = (gimple_call_lhs (stmt) == NULL);
3784 tree result = fold_call_stmt (stmt, ignore);
3785 if (result)
3786 {
3787 if (ignore)
3788 STRIP_NOPS (result);
3789 else
3790 result = fold_convert (gimple_call_return_type (stmt), result);
3791 if (!update_call_from_tree (gsi, result))
3792 gimplify_and_update_call_from_tree (gsi, result);
3793 return true;
3794 }
3795
3796 return false;
3797 }
3798
3799 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
3800 function calls to constants, where possible. */
3801
3802 static tree
fold_internal_goacc_dim(const gimple * call)3803 fold_internal_goacc_dim (const gimple *call)
3804 {
3805 int axis = oacc_get_ifn_dim_arg (call);
3806 int size = oacc_get_fn_dim_size (current_function_decl, axis);
3807 tree result = NULL_TREE;
3808 tree type = TREE_TYPE (gimple_call_lhs (call));
3809
3810 switch (gimple_call_internal_fn (call))
3811 {
3812 case IFN_GOACC_DIM_POS:
3813 /* If the size is 1, we know the answer. */
3814 if (size == 1)
3815 result = build_int_cst (type, 0);
3816 break;
3817 case IFN_GOACC_DIM_SIZE:
3818 /* If the size is not dynamic, we know the answer. */
3819 if (size)
3820 result = build_int_cst (type, size);
3821 break;
3822 default:
3823 break;
3824 }
3825
3826 return result;
3827 }
3828
3829 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
3830 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
3831 &var where var is only addressable because of such calls. */
3832
3833 bool
optimize_atomic_compare_exchange_p(gimple * stmt)3834 optimize_atomic_compare_exchange_p (gimple *stmt)
3835 {
3836 if (gimple_call_num_args (stmt) != 6
3837 || !flag_inline_atomics
3838 || !optimize
3839 || sanitize_flags_p (SANITIZE_THREAD | SANITIZE_ADDRESS)
3840 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
3841 || !gimple_vdef (stmt)
3842 || !gimple_vuse (stmt))
3843 return false;
3844
3845 tree fndecl = gimple_call_fndecl (stmt);
3846 switch (DECL_FUNCTION_CODE (fndecl))
3847 {
3848 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
3849 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
3850 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
3851 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
3852 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
3853 break;
3854 default:
3855 return false;
3856 }
3857
3858 tree expected = gimple_call_arg (stmt, 1);
3859 if (TREE_CODE (expected) != ADDR_EXPR
3860 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
3861 return false;
3862
3863 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
3864 if (!is_gimple_reg_type (etype)
3865 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
3866 || TREE_THIS_VOLATILE (etype)
3867 || VECTOR_TYPE_P (etype)
3868 || TREE_CODE (etype) == COMPLEX_TYPE
3869 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
3870 might not preserve all the bits. See PR71716. */
3871 || SCALAR_FLOAT_TYPE_P (etype)
3872 || maybe_ne (TYPE_PRECISION (etype),
3873 GET_MODE_BITSIZE (TYPE_MODE (etype))))
3874 return false;
3875
3876 tree weak = gimple_call_arg (stmt, 3);
3877 if (!integer_zerop (weak) && !integer_onep (weak))
3878 return false;
3879
3880 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3881 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3882 machine_mode mode = TYPE_MODE (itype);
3883
3884 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
3885 == CODE_FOR_nothing
3886 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
3887 return false;
3888
3889 if (maybe_ne (int_size_in_bytes (etype), GET_MODE_SIZE (mode)))
3890 return false;
3891
3892 return true;
3893 }
3894
3895 /* Fold
3896 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
3897 into
3898 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
3899 i = IMAGPART_EXPR <t>;
3900 r = (_Bool) i;
3901 e = REALPART_EXPR <t>; */
3902
3903 void
fold_builtin_atomic_compare_exchange(gimple_stmt_iterator * gsi)3904 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
3905 {
3906 gimple *stmt = gsi_stmt (*gsi);
3907 tree fndecl = gimple_call_fndecl (stmt);
3908 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3909 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3910 tree ctype = build_complex_type (itype);
3911 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
3912 bool throws = false;
3913 edge e = NULL;
3914 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
3915 expected);
3916 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3917 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
3918 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
3919 {
3920 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
3921 build1 (VIEW_CONVERT_EXPR, itype,
3922 gimple_assign_lhs (g)));
3923 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3924 }
3925 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
3926 + int_size_in_bytes (itype);
3927 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
3928 gimple_call_arg (stmt, 0),
3929 gimple_assign_lhs (g),
3930 gimple_call_arg (stmt, 2),
3931 build_int_cst (integer_type_node, flag),
3932 gimple_call_arg (stmt, 4),
3933 gimple_call_arg (stmt, 5));
3934 tree lhs = make_ssa_name (ctype);
3935 gimple_call_set_lhs (g, lhs);
3936 gimple_set_vdef (g, gimple_vdef (stmt));
3937 gimple_set_vuse (g, gimple_vuse (stmt));
3938 SSA_NAME_DEF_STMT (gimple_vdef (g)) = g;
3939 tree oldlhs = gimple_call_lhs (stmt);
3940 if (stmt_can_throw_internal (stmt))
3941 {
3942 throws = true;
3943 e = find_fallthru_edge (gsi_bb (*gsi)->succs);
3944 }
3945 gimple_call_set_nothrow (as_a <gcall *> (g),
3946 gimple_call_nothrow_p (as_a <gcall *> (stmt)));
3947 gimple_call_set_lhs (stmt, NULL_TREE);
3948 gsi_replace (gsi, g, true);
3949 if (oldlhs)
3950 {
3951 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
3952 build1 (IMAGPART_EXPR, itype, lhs));
3953 if (throws)
3954 {
3955 gsi_insert_on_edge_immediate (e, g);
3956 *gsi = gsi_for_stmt (g);
3957 }
3958 else
3959 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3960 g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g));
3961 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3962 }
3963 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
3964 build1 (REALPART_EXPR, itype, lhs));
3965 if (throws && oldlhs == NULL_TREE)
3966 {
3967 gsi_insert_on_edge_immediate (e, g);
3968 *gsi = gsi_for_stmt (g);
3969 }
3970 else
3971 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3972 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
3973 {
3974 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
3975 VIEW_CONVERT_EXPR,
3976 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
3977 gimple_assign_lhs (g)));
3978 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3979 }
3980 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
3981 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3982 *gsi = gsiret;
3983 }
3984
3985 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
3986 doesn't fit into TYPE. The test for overflow should be regardless of
3987 -fwrapv, and even for unsigned types. */
3988
3989 bool
arith_overflowed_p(enum tree_code code,const_tree type,const_tree arg0,const_tree arg1)3990 arith_overflowed_p (enum tree_code code, const_tree type,
3991 const_tree arg0, const_tree arg1)
3992 {
3993 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION * 2) widest2_int;
3994 typedef generic_wide_int <wi::extended_tree <WIDE_INT_MAX_PRECISION * 2> >
3995 widest2_int_cst;
3996 widest2_int warg0 = widest2_int_cst (arg0);
3997 widest2_int warg1 = widest2_int_cst (arg1);
3998 widest2_int wres;
3999 switch (code)
4000 {
4001 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
4002 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
4003 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
4004 default: gcc_unreachable ();
4005 }
4006 signop sign = TYPE_SIGN (type);
4007 if (sign == UNSIGNED && wi::neg_p (wres))
4008 return true;
4009 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
4010 }
4011
4012 /* Attempt to fold a call statement referenced by the statement iterator GSI.
4013 The statement may be replaced by another statement, e.g., if the call
4014 simplifies to a constant value. Return true if any changes were made.
4015 It is assumed that the operands have been previously folded. */
4016
4017 static bool
gimple_fold_call(gimple_stmt_iterator * gsi,bool inplace)4018 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
4019 {
4020 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
4021 tree callee;
4022 bool changed = false;
4023 unsigned i;
4024
4025 /* Fold *& in call arguments. */
4026 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4027 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
4028 {
4029 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
4030 if (tmp)
4031 {
4032 gimple_call_set_arg (stmt, i, tmp);
4033 changed = true;
4034 }
4035 }
4036
4037 /* Check for virtual calls that became direct calls. */
4038 callee = gimple_call_fn (stmt);
4039 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
4040 {
4041 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
4042 {
4043 if (dump_file && virtual_method_call_p (callee)
4044 && !possible_polymorphic_call_target_p
4045 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
4046 (OBJ_TYPE_REF_EXPR (callee)))))
4047 {
4048 fprintf (dump_file,
4049 "Type inheritance inconsistent devirtualization of ");
4050 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
4051 fprintf (dump_file, " to ");
4052 print_generic_expr (dump_file, callee, TDF_SLIM);
4053 fprintf (dump_file, "\n");
4054 }
4055
4056 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
4057 changed = true;
4058 }
4059 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
4060 {
4061 bool final;
4062 vec <cgraph_node *>targets
4063 = possible_polymorphic_call_targets (callee, stmt, &final);
4064 if (final && targets.length () <= 1 && dbg_cnt (devirt))
4065 {
4066 tree lhs = gimple_call_lhs (stmt);
4067 if (dump_enabled_p ())
4068 {
4069 location_t loc = gimple_location_safe (stmt);
4070 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
4071 "folding virtual function call to %s\n",
4072 targets.length () == 1
4073 ? targets[0]->name ()
4074 : "__builtin_unreachable");
4075 }
4076 if (targets.length () == 1)
4077 {
4078 tree fndecl = targets[0]->decl;
4079 gimple_call_set_fndecl (stmt, fndecl);
4080 changed = true;
4081 /* If changing the call to __cxa_pure_virtual
4082 or similar noreturn function, adjust gimple_call_fntype
4083 too. */
4084 if (gimple_call_noreturn_p (stmt)
4085 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
4086 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
4087 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
4088 == void_type_node))
4089 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
4090 /* If the call becomes noreturn, remove the lhs. */
4091 if (lhs
4092 && gimple_call_noreturn_p (stmt)
4093 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
4094 || should_remove_lhs_p (lhs)))
4095 {
4096 if (TREE_CODE (lhs) == SSA_NAME)
4097 {
4098 tree var = create_tmp_var (TREE_TYPE (lhs));
4099 tree def = get_or_create_ssa_default_def (cfun, var);
4100 gimple *new_stmt = gimple_build_assign (lhs, def);
4101 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
4102 }
4103 gimple_call_set_lhs (stmt, NULL_TREE);
4104 }
4105 maybe_remove_unused_call_args (cfun, stmt);
4106 }
4107 else
4108 {
4109 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
4110 gimple *new_stmt = gimple_build_call (fndecl, 0);
4111 gimple_set_location (new_stmt, gimple_location (stmt));
4112 /* If the call had a SSA name as lhs morph that into
4113 an uninitialized value. */
4114 if (lhs && TREE_CODE (lhs) == SSA_NAME)
4115 {
4116 tree var = create_tmp_var (TREE_TYPE (lhs));
4117 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, var);
4118 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
4119 set_ssa_default_def (cfun, var, lhs);
4120 }
4121 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4122 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4123 gsi_replace (gsi, new_stmt, false);
4124 return true;
4125 }
4126 }
4127 }
4128 }
4129
4130 /* Check for indirect calls that became direct calls, and then
4131 no longer require a static chain. */
4132 if (gimple_call_chain (stmt))
4133 {
4134 tree fn = gimple_call_fndecl (stmt);
4135 if (fn && !DECL_STATIC_CHAIN (fn))
4136 {
4137 gimple_call_set_chain (stmt, NULL);
4138 changed = true;
4139 }
4140 else
4141 {
4142 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
4143 if (tmp)
4144 {
4145 gimple_call_set_chain (stmt, tmp);
4146 changed = true;
4147 }
4148 }
4149 }
4150
4151 if (inplace)
4152 return changed;
4153
4154 /* Check for builtins that CCP can handle using information not
4155 available in the generic fold routines. */
4156 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4157 {
4158 if (gimple_fold_builtin (gsi))
4159 changed = true;
4160 }
4161 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
4162 {
4163 changed |= targetm.gimple_fold_builtin (gsi);
4164 }
4165 else if (gimple_call_internal_p (stmt))
4166 {
4167 enum tree_code subcode = ERROR_MARK;
4168 tree result = NULL_TREE;
4169 bool cplx_result = false;
4170 tree overflow = NULL_TREE;
4171 switch (gimple_call_internal_fn (stmt))
4172 {
4173 case IFN_BUILTIN_EXPECT:
4174 result = fold_builtin_expect (gimple_location (stmt),
4175 gimple_call_arg (stmt, 0),
4176 gimple_call_arg (stmt, 1),
4177 gimple_call_arg (stmt, 2));
4178 break;
4179 case IFN_UBSAN_OBJECT_SIZE:
4180 {
4181 tree offset = gimple_call_arg (stmt, 1);
4182 tree objsize = gimple_call_arg (stmt, 2);
4183 if (integer_all_onesp (objsize)
4184 || (TREE_CODE (offset) == INTEGER_CST
4185 && TREE_CODE (objsize) == INTEGER_CST
4186 && tree_int_cst_le (offset, objsize)))
4187 {
4188 replace_call_with_value (gsi, NULL_TREE);
4189 return true;
4190 }
4191 }
4192 break;
4193 case IFN_UBSAN_PTR:
4194 if (integer_zerop (gimple_call_arg (stmt, 1)))
4195 {
4196 replace_call_with_value (gsi, NULL_TREE);
4197 return true;
4198 }
4199 break;
4200 case IFN_UBSAN_BOUNDS:
4201 {
4202 tree index = gimple_call_arg (stmt, 1);
4203 tree bound = gimple_call_arg (stmt, 2);
4204 if (TREE_CODE (index) == INTEGER_CST
4205 && TREE_CODE (bound) == INTEGER_CST)
4206 {
4207 index = fold_convert (TREE_TYPE (bound), index);
4208 if (TREE_CODE (index) == INTEGER_CST
4209 && tree_int_cst_le (index, bound))
4210 {
4211 replace_call_with_value (gsi, NULL_TREE);
4212 return true;
4213 }
4214 }
4215 }
4216 break;
4217 case IFN_GOACC_DIM_SIZE:
4218 case IFN_GOACC_DIM_POS:
4219 result = fold_internal_goacc_dim (stmt);
4220 break;
4221 case IFN_UBSAN_CHECK_ADD:
4222 subcode = PLUS_EXPR;
4223 break;
4224 case IFN_UBSAN_CHECK_SUB:
4225 subcode = MINUS_EXPR;
4226 break;
4227 case IFN_UBSAN_CHECK_MUL:
4228 subcode = MULT_EXPR;
4229 break;
4230 case IFN_ADD_OVERFLOW:
4231 subcode = PLUS_EXPR;
4232 cplx_result = true;
4233 break;
4234 case IFN_SUB_OVERFLOW:
4235 subcode = MINUS_EXPR;
4236 cplx_result = true;
4237 break;
4238 case IFN_MUL_OVERFLOW:
4239 subcode = MULT_EXPR;
4240 cplx_result = true;
4241 break;
4242 default:
4243 break;
4244 }
4245 if (subcode != ERROR_MARK)
4246 {
4247 tree arg0 = gimple_call_arg (stmt, 0);
4248 tree arg1 = gimple_call_arg (stmt, 1);
4249 tree type = TREE_TYPE (arg0);
4250 if (cplx_result)
4251 {
4252 tree lhs = gimple_call_lhs (stmt);
4253 if (lhs == NULL_TREE)
4254 type = NULL_TREE;
4255 else
4256 type = TREE_TYPE (TREE_TYPE (lhs));
4257 }
4258 if (type == NULL_TREE)
4259 ;
4260 /* x = y + 0; x = y - 0; x = y * 0; */
4261 else if (integer_zerop (arg1))
4262 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
4263 /* x = 0 + y; x = 0 * y; */
4264 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
4265 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
4266 /* x = y - y; */
4267 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
4268 result = integer_zero_node;
4269 /* x = y * 1; x = 1 * y; */
4270 else if (subcode == MULT_EXPR && integer_onep (arg1))
4271 result = arg0;
4272 else if (subcode == MULT_EXPR && integer_onep (arg0))
4273 result = arg1;
4274 else if (TREE_CODE (arg0) == INTEGER_CST
4275 && TREE_CODE (arg1) == INTEGER_CST)
4276 {
4277 if (cplx_result)
4278 result = int_const_binop (subcode, fold_convert (type, arg0),
4279 fold_convert (type, arg1));
4280 else
4281 result = int_const_binop (subcode, arg0, arg1);
4282 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
4283 {
4284 if (cplx_result)
4285 overflow = build_one_cst (type);
4286 else
4287 result = NULL_TREE;
4288 }
4289 }
4290 if (result)
4291 {
4292 if (result == integer_zero_node)
4293 result = build_zero_cst (type);
4294 else if (cplx_result && TREE_TYPE (result) != type)
4295 {
4296 if (TREE_CODE (result) == INTEGER_CST)
4297 {
4298 if (arith_overflowed_p (PLUS_EXPR, type, result,
4299 integer_zero_node))
4300 overflow = build_one_cst (type);
4301 }
4302 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
4303 && TYPE_UNSIGNED (type))
4304 || (TYPE_PRECISION (type)
4305 < (TYPE_PRECISION (TREE_TYPE (result))
4306 + (TYPE_UNSIGNED (TREE_TYPE (result))
4307 && !TYPE_UNSIGNED (type)))))
4308 result = NULL_TREE;
4309 if (result)
4310 result = fold_convert (type, result);
4311 }
4312 }
4313 }
4314
4315 if (result)
4316 {
4317 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
4318 result = drop_tree_overflow (result);
4319 if (cplx_result)
4320 {
4321 if (overflow == NULL_TREE)
4322 overflow = build_zero_cst (TREE_TYPE (result));
4323 tree ctype = build_complex_type (TREE_TYPE (result));
4324 if (TREE_CODE (result) == INTEGER_CST
4325 && TREE_CODE (overflow) == INTEGER_CST)
4326 result = build_complex (ctype, result, overflow);
4327 else
4328 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
4329 ctype, result, overflow);
4330 }
4331 if (!update_call_from_tree (gsi, result))
4332 gimplify_and_update_call_from_tree (gsi, result);
4333 changed = true;
4334 }
4335 }
4336
4337 return changed;
4338 }
4339
4340
4341 /* Return true whether NAME has a use on STMT. */
4342
4343 static bool
has_use_on_stmt(tree name,gimple * stmt)4344 has_use_on_stmt (tree name, gimple *stmt)
4345 {
4346 imm_use_iterator iter;
4347 use_operand_p use_p;
4348 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
4349 if (USE_STMT (use_p) == stmt)
4350 return true;
4351 return false;
4352 }
4353
4354 /* Worker for fold_stmt_1 dispatch to pattern based folding with
4355 gimple_simplify.
4356
4357 Replaces *GSI with the simplification result in RCODE and OPS
4358 and the associated statements in *SEQ. Does the replacement
4359 according to INPLACE and returns true if the operation succeeded. */
4360
4361 static bool
replace_stmt_with_simplification(gimple_stmt_iterator * gsi,code_helper rcode,tree * ops,gimple_seq * seq,bool inplace)4362 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
4363 code_helper rcode, tree *ops,
4364 gimple_seq *seq, bool inplace)
4365 {
4366 gimple *stmt = gsi_stmt (*gsi);
4367
4368 /* Play safe and do not allow abnormals to be mentioned in
4369 newly created statements. See also maybe_push_res_to_seq.
4370 As an exception allow such uses if there was a use of the
4371 same SSA name on the old stmt. */
4372 if ((TREE_CODE (ops[0]) == SSA_NAME
4373 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0])
4374 && !has_use_on_stmt (ops[0], stmt))
4375 || (ops[1]
4376 && TREE_CODE (ops[1]) == SSA_NAME
4377 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1])
4378 && !has_use_on_stmt (ops[1], stmt))
4379 || (ops[2]
4380 && TREE_CODE (ops[2]) == SSA_NAME
4381 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2])
4382 && !has_use_on_stmt (ops[2], stmt))
4383 || (COMPARISON_CLASS_P (ops[0])
4384 && ((TREE_CODE (TREE_OPERAND (ops[0], 0)) == SSA_NAME
4385 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 0))
4386 && !has_use_on_stmt (TREE_OPERAND (ops[0], 0), stmt))
4387 || (TREE_CODE (TREE_OPERAND (ops[0], 1)) == SSA_NAME
4388 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 1))
4389 && !has_use_on_stmt (TREE_OPERAND (ops[0], 1), stmt)))))
4390 return false;
4391
4392 /* Don't insert new statements when INPLACE is true, even if we could
4393 reuse STMT for the final statement. */
4394 if (inplace && !gimple_seq_empty_p (*seq))
4395 return false;
4396
4397 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
4398 {
4399 gcc_assert (rcode.is_tree_code ());
4400 if (TREE_CODE_CLASS ((enum tree_code)rcode) == tcc_comparison
4401 /* GIMPLE_CONDs condition may not throw. */
4402 && (!flag_exceptions
4403 || !cfun->can_throw_non_call_exceptions
4404 || !operation_could_trap_p (rcode,
4405 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
4406 false, NULL_TREE)))
4407 gimple_cond_set_condition (cond_stmt, rcode, ops[0], ops[1]);
4408 else if (rcode == SSA_NAME)
4409 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
4410 build_zero_cst (TREE_TYPE (ops[0])));
4411 else if (rcode == INTEGER_CST)
4412 {
4413 if (integer_zerop (ops[0]))
4414 gimple_cond_make_false (cond_stmt);
4415 else
4416 gimple_cond_make_true (cond_stmt);
4417 }
4418 else if (!inplace)
4419 {
4420 tree res = maybe_push_res_to_seq (rcode, boolean_type_node,
4421 ops, seq);
4422 if (!res)
4423 return false;
4424 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
4425 build_zero_cst (TREE_TYPE (res)));
4426 }
4427 else
4428 return false;
4429 if (dump_file && (dump_flags & TDF_DETAILS))
4430 {
4431 fprintf (dump_file, "gimple_simplified to ");
4432 if (!gimple_seq_empty_p (*seq))
4433 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4434 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4435 0, TDF_SLIM);
4436 }
4437 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4438 return true;
4439 }
4440 else if (is_gimple_assign (stmt)
4441 && rcode.is_tree_code ())
4442 {
4443 if (!inplace
4444 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (rcode))
4445 {
4446 maybe_build_generic_op (rcode,
4447 TREE_TYPE (gimple_assign_lhs (stmt)), ops);
4448 gimple_assign_set_rhs_with_ops (gsi, rcode, ops[0], ops[1], ops[2]);
4449 if (dump_file && (dump_flags & TDF_DETAILS))
4450 {
4451 fprintf (dump_file, "gimple_simplified to ");
4452 if (!gimple_seq_empty_p (*seq))
4453 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4454 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4455 0, TDF_SLIM);
4456 }
4457 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4458 return true;
4459 }
4460 }
4461 else if (rcode.is_fn_code ()
4462 && gimple_call_combined_fn (stmt) == rcode)
4463 {
4464 unsigned i;
4465 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4466 {
4467 gcc_assert (ops[i] != NULL_TREE);
4468 gimple_call_set_arg (stmt, i, ops[i]);
4469 }
4470 if (i < 3)
4471 gcc_assert (ops[i] == NULL_TREE);
4472 if (dump_file && (dump_flags & TDF_DETAILS))
4473 {
4474 fprintf (dump_file, "gimple_simplified to ");
4475 if (!gimple_seq_empty_p (*seq))
4476 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4477 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
4478 }
4479 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4480 return true;
4481 }
4482 else if (!inplace)
4483 {
4484 if (gimple_has_lhs (stmt))
4485 {
4486 tree lhs = gimple_get_lhs (stmt);
4487 if (!maybe_push_res_to_seq (rcode, TREE_TYPE (lhs),
4488 ops, seq, lhs))
4489 return false;
4490 if (dump_file && (dump_flags & TDF_DETAILS))
4491 {
4492 fprintf (dump_file, "gimple_simplified to ");
4493 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4494 }
4495 gsi_replace_with_seq_vops (gsi, *seq);
4496 return true;
4497 }
4498 else
4499 gcc_unreachable ();
4500 }
4501
4502 return false;
4503 }
4504
4505 /* Canonicalize MEM_REFs invariant address operand after propagation. */
4506
4507 static bool
maybe_canonicalize_mem_ref_addr(tree * t)4508 maybe_canonicalize_mem_ref_addr (tree *t)
4509 {
4510 bool res = false;
4511
4512 if (TREE_CODE (*t) == ADDR_EXPR)
4513 t = &TREE_OPERAND (*t, 0);
4514
4515 /* The C and C++ frontends use an ARRAY_REF for indexing with their
4516 generic vector extension. The actual vector referenced is
4517 view-converted to an array type for this purpose. If the index
4518 is constant the canonical representation in the middle-end is a
4519 BIT_FIELD_REF so re-write the former to the latter here. */
4520 if (TREE_CODE (*t) == ARRAY_REF
4521 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
4522 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
4523 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
4524 {
4525 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
4526 if (VECTOR_TYPE_P (vtype))
4527 {
4528 tree low = array_ref_low_bound (*t);
4529 if (TREE_CODE (low) == INTEGER_CST)
4530 {
4531 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
4532 {
4533 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
4534 wi::to_widest (low));
4535 idx = wi::mul (idx, wi::to_widest
4536 (TYPE_SIZE (TREE_TYPE (*t))));
4537 widest_int ext
4538 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
4539 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
4540 {
4541 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
4542 TREE_TYPE (*t),
4543 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
4544 TYPE_SIZE (TREE_TYPE (*t)),
4545 wide_int_to_tree (bitsizetype, idx));
4546 res = true;
4547 }
4548 }
4549 }
4550 }
4551 }
4552
4553 while (handled_component_p (*t))
4554 t = &TREE_OPERAND (*t, 0);
4555
4556 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
4557 of invariant addresses into a SSA name MEM_REF address. */
4558 if (TREE_CODE (*t) == MEM_REF
4559 || TREE_CODE (*t) == TARGET_MEM_REF)
4560 {
4561 tree addr = TREE_OPERAND (*t, 0);
4562 if (TREE_CODE (addr) == ADDR_EXPR
4563 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
4564 || handled_component_p (TREE_OPERAND (addr, 0))))
4565 {
4566 tree base;
4567 poly_int64 coffset;
4568 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
4569 &coffset);
4570 if (!base)
4571 gcc_unreachable ();
4572
4573 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
4574 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
4575 TREE_OPERAND (*t, 1),
4576 size_int (coffset));
4577 res = true;
4578 }
4579 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
4580 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
4581 }
4582
4583 /* Canonicalize back MEM_REFs to plain reference trees if the object
4584 accessed is a decl that has the same access semantics as the MEM_REF. */
4585 if (TREE_CODE (*t) == MEM_REF
4586 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
4587 && integer_zerop (TREE_OPERAND (*t, 1))
4588 && MR_DEPENDENCE_CLIQUE (*t) == 0)
4589 {
4590 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4591 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
4592 if (/* Same volatile qualification. */
4593 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
4594 /* Same TBAA behavior with -fstrict-aliasing. */
4595 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
4596 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
4597 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
4598 /* Same alignment. */
4599 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
4600 /* We have to look out here to not drop a required conversion
4601 from the rhs to the lhs if *t appears on the lhs or vice-versa
4602 if it appears on the rhs. Thus require strict type
4603 compatibility. */
4604 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
4605 {
4606 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4607 res = true;
4608 }
4609 }
4610
4611 /* Canonicalize TARGET_MEM_REF in particular with respect to
4612 the indexes becoming constant. */
4613 else if (TREE_CODE (*t) == TARGET_MEM_REF)
4614 {
4615 tree tem = maybe_fold_tmr (*t);
4616 if (tem)
4617 {
4618 *t = tem;
4619 res = true;
4620 }
4621 }
4622
4623 return res;
4624 }
4625
4626 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
4627 distinguishes both cases. */
4628
4629 static bool
fold_stmt_1(gimple_stmt_iterator * gsi,bool inplace,tree (* valueize)(tree))4630 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
4631 {
4632 bool changed = false;
4633 gimple *stmt = gsi_stmt (*gsi);
4634 bool nowarning = gimple_no_warning_p (stmt);
4635 unsigned i;
4636 fold_defer_overflow_warnings ();
4637
4638 /* First do required canonicalization of [TARGET_]MEM_REF addresses
4639 after propagation.
4640 ??? This shouldn't be done in generic folding but in the
4641 propagation helpers which also know whether an address was
4642 propagated.
4643 Also canonicalize operand order. */
4644 switch (gimple_code (stmt))
4645 {
4646 case GIMPLE_ASSIGN:
4647 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
4648 {
4649 tree *rhs = gimple_assign_rhs1_ptr (stmt);
4650 if ((REFERENCE_CLASS_P (*rhs)
4651 || TREE_CODE (*rhs) == ADDR_EXPR)
4652 && maybe_canonicalize_mem_ref_addr (rhs))
4653 changed = true;
4654 tree *lhs = gimple_assign_lhs_ptr (stmt);
4655 if (REFERENCE_CLASS_P (*lhs)
4656 && maybe_canonicalize_mem_ref_addr (lhs))
4657 changed = true;
4658 }
4659 else
4660 {
4661 /* Canonicalize operand order. */
4662 enum tree_code code = gimple_assign_rhs_code (stmt);
4663 if (TREE_CODE_CLASS (code) == tcc_comparison
4664 || commutative_tree_code (code)
4665 || commutative_ternary_tree_code (code))
4666 {
4667 tree rhs1 = gimple_assign_rhs1 (stmt);
4668 tree rhs2 = gimple_assign_rhs2 (stmt);
4669 if (tree_swap_operands_p (rhs1, rhs2))
4670 {
4671 gimple_assign_set_rhs1 (stmt, rhs2);
4672 gimple_assign_set_rhs2 (stmt, rhs1);
4673 if (TREE_CODE_CLASS (code) == tcc_comparison)
4674 gimple_assign_set_rhs_code (stmt,
4675 swap_tree_comparison (code));
4676 changed = true;
4677 }
4678 }
4679 }
4680 break;
4681 case GIMPLE_CALL:
4682 {
4683 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4684 {
4685 tree *arg = gimple_call_arg_ptr (stmt, i);
4686 if (REFERENCE_CLASS_P (*arg)
4687 && maybe_canonicalize_mem_ref_addr (arg))
4688 changed = true;
4689 }
4690 tree *lhs = gimple_call_lhs_ptr (stmt);
4691 if (*lhs
4692 && REFERENCE_CLASS_P (*lhs)
4693 && maybe_canonicalize_mem_ref_addr (lhs))
4694 changed = true;
4695 break;
4696 }
4697 case GIMPLE_ASM:
4698 {
4699 gasm *asm_stmt = as_a <gasm *> (stmt);
4700 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
4701 {
4702 tree link = gimple_asm_output_op (asm_stmt, i);
4703 tree op = TREE_VALUE (link);
4704 if (REFERENCE_CLASS_P (op)
4705 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4706 changed = true;
4707 }
4708 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4709 {
4710 tree link = gimple_asm_input_op (asm_stmt, i);
4711 tree op = TREE_VALUE (link);
4712 if ((REFERENCE_CLASS_P (op)
4713 || TREE_CODE (op) == ADDR_EXPR)
4714 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4715 changed = true;
4716 }
4717 }
4718 break;
4719 case GIMPLE_DEBUG:
4720 if (gimple_debug_bind_p (stmt))
4721 {
4722 tree *val = gimple_debug_bind_get_value_ptr (stmt);
4723 if (*val
4724 && (REFERENCE_CLASS_P (*val)
4725 || TREE_CODE (*val) == ADDR_EXPR)
4726 && maybe_canonicalize_mem_ref_addr (val))
4727 changed = true;
4728 }
4729 break;
4730 case GIMPLE_COND:
4731 {
4732 /* Canonicalize operand order. */
4733 tree lhs = gimple_cond_lhs (stmt);
4734 tree rhs = gimple_cond_rhs (stmt);
4735 if (tree_swap_operands_p (lhs, rhs))
4736 {
4737 gcond *gc = as_a <gcond *> (stmt);
4738 gimple_cond_set_lhs (gc, rhs);
4739 gimple_cond_set_rhs (gc, lhs);
4740 gimple_cond_set_code (gc,
4741 swap_tree_comparison (gimple_cond_code (gc)));
4742 changed = true;
4743 }
4744 }
4745 default:;
4746 }
4747
4748 /* Dispatch to pattern-based folding. */
4749 if (!inplace
4750 || is_gimple_assign (stmt)
4751 || gimple_code (stmt) == GIMPLE_COND)
4752 {
4753 gimple_seq seq = NULL;
4754 code_helper rcode;
4755 tree ops[3] = {};
4756 if (gimple_simplify (stmt, &rcode, ops, inplace ? NULL : &seq,
4757 valueize, valueize))
4758 {
4759 if (replace_stmt_with_simplification (gsi, rcode, ops, &seq, inplace))
4760 changed = true;
4761 else
4762 gimple_seq_discard (seq);
4763 }
4764 }
4765
4766 stmt = gsi_stmt (*gsi);
4767
4768 /* Fold the main computation performed by the statement. */
4769 switch (gimple_code (stmt))
4770 {
4771 case GIMPLE_ASSIGN:
4772 {
4773 /* Try to canonicalize for boolean-typed X the comparisons
4774 X == 0, X == 1, X != 0, and X != 1. */
4775 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
4776 || gimple_assign_rhs_code (stmt) == NE_EXPR)
4777 {
4778 tree lhs = gimple_assign_lhs (stmt);
4779 tree op1 = gimple_assign_rhs1 (stmt);
4780 tree op2 = gimple_assign_rhs2 (stmt);
4781 tree type = TREE_TYPE (op1);
4782
4783 /* Check whether the comparison operands are of the same boolean
4784 type as the result type is.
4785 Check that second operand is an integer-constant with value
4786 one or zero. */
4787 if (TREE_CODE (op2) == INTEGER_CST
4788 && (integer_zerop (op2) || integer_onep (op2))
4789 && useless_type_conversion_p (TREE_TYPE (lhs), type))
4790 {
4791 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
4792 bool is_logical_not = false;
4793
4794 /* X == 0 and X != 1 is a logical-not.of X
4795 X == 1 and X != 0 is X */
4796 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
4797 || (cmp_code == NE_EXPR && integer_onep (op2)))
4798 is_logical_not = true;
4799
4800 if (is_logical_not == false)
4801 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
4802 /* Only for one-bit precision typed X the transformation
4803 !X -> ~X is valied. */
4804 else if (TYPE_PRECISION (type) == 1)
4805 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
4806 /* Otherwise we use !X -> X ^ 1. */
4807 else
4808 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
4809 build_int_cst (type, 1));
4810 changed = true;
4811 break;
4812 }
4813 }
4814
4815 unsigned old_num_ops = gimple_num_ops (stmt);
4816 tree lhs = gimple_assign_lhs (stmt);
4817 tree new_rhs = fold_gimple_assign (gsi);
4818 if (new_rhs
4819 && !useless_type_conversion_p (TREE_TYPE (lhs),
4820 TREE_TYPE (new_rhs)))
4821 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
4822 if (new_rhs
4823 && (!inplace
4824 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
4825 {
4826 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
4827 changed = true;
4828 }
4829 break;
4830 }
4831
4832 case GIMPLE_CALL:
4833 changed |= gimple_fold_call (gsi, inplace);
4834 break;
4835
4836 case GIMPLE_ASM:
4837 /* Fold *& in asm operands. */
4838 {
4839 gasm *asm_stmt = as_a <gasm *> (stmt);
4840 size_t noutputs;
4841 const char **oconstraints;
4842 const char *constraint;
4843 bool allows_mem, allows_reg;
4844
4845 noutputs = gimple_asm_noutputs (asm_stmt);
4846 oconstraints = XALLOCAVEC (const char *, noutputs);
4847
4848 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
4849 {
4850 tree link = gimple_asm_output_op (asm_stmt, i);
4851 tree op = TREE_VALUE (link);
4852 oconstraints[i]
4853 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4854 if (REFERENCE_CLASS_P (op)
4855 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
4856 {
4857 TREE_VALUE (link) = op;
4858 changed = true;
4859 }
4860 }
4861 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4862 {
4863 tree link = gimple_asm_input_op (asm_stmt, i);
4864 tree op = TREE_VALUE (link);
4865 constraint
4866 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4867 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
4868 oconstraints, &allows_mem, &allows_reg);
4869 if (REFERENCE_CLASS_P (op)
4870 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
4871 != NULL_TREE)
4872 {
4873 TREE_VALUE (link) = op;
4874 changed = true;
4875 }
4876 }
4877 }
4878 break;
4879
4880 case GIMPLE_DEBUG:
4881 if (gimple_debug_bind_p (stmt))
4882 {
4883 tree val = gimple_debug_bind_get_value (stmt);
4884 if (val
4885 && REFERENCE_CLASS_P (val))
4886 {
4887 tree tem = maybe_fold_reference (val, false);
4888 if (tem)
4889 {
4890 gimple_debug_bind_set_value (stmt, tem);
4891 changed = true;
4892 }
4893 }
4894 else if (val
4895 && TREE_CODE (val) == ADDR_EXPR)
4896 {
4897 tree ref = TREE_OPERAND (val, 0);
4898 tree tem = maybe_fold_reference (ref, false);
4899 if (tem)
4900 {
4901 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
4902 gimple_debug_bind_set_value (stmt, tem);
4903 changed = true;
4904 }
4905 }
4906 }
4907 break;
4908
4909 case GIMPLE_RETURN:
4910 {
4911 greturn *ret_stmt = as_a<greturn *> (stmt);
4912 tree ret = gimple_return_retval(ret_stmt);
4913
4914 if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
4915 {
4916 tree val = valueize (ret);
4917 if (val && val != ret
4918 && may_propagate_copy (ret, val))
4919 {
4920 gimple_return_set_retval (ret_stmt, val);
4921 changed = true;
4922 }
4923 }
4924 }
4925 break;
4926
4927 default:;
4928 }
4929
4930 stmt = gsi_stmt (*gsi);
4931
4932 /* Fold *& on the lhs. */
4933 if (gimple_has_lhs (stmt))
4934 {
4935 tree lhs = gimple_get_lhs (stmt);
4936 if (lhs && REFERENCE_CLASS_P (lhs))
4937 {
4938 tree new_lhs = maybe_fold_reference (lhs, true);
4939 if (new_lhs)
4940 {
4941 gimple_set_lhs (stmt, new_lhs);
4942 changed = true;
4943 }
4944 }
4945 }
4946
4947 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
4948 return changed;
4949 }
4950
4951 /* Valueziation callback that ends up not following SSA edges. */
4952
4953 tree
no_follow_ssa_edges(tree)4954 no_follow_ssa_edges (tree)
4955 {
4956 return NULL_TREE;
4957 }
4958
4959 /* Valueization callback that ends up following single-use SSA edges only. */
4960
4961 tree
follow_single_use_edges(tree val)4962 follow_single_use_edges (tree val)
4963 {
4964 if (TREE_CODE (val) == SSA_NAME
4965 && !has_single_use (val))
4966 return NULL_TREE;
4967 return val;
4968 }
4969
4970 /* Fold the statement pointed to by GSI. In some cases, this function may
4971 replace the whole statement with a new one. Returns true iff folding
4972 makes any changes.
4973 The statement pointed to by GSI should be in valid gimple form but may
4974 be in unfolded state as resulting from for example constant propagation
4975 which can produce *&x = 0. */
4976
4977 bool
fold_stmt(gimple_stmt_iterator * gsi)4978 fold_stmt (gimple_stmt_iterator *gsi)
4979 {
4980 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
4981 }
4982
4983 bool
fold_stmt(gimple_stmt_iterator * gsi,tree (* valueize)(tree))4984 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
4985 {
4986 return fold_stmt_1 (gsi, false, valueize);
4987 }
4988
4989 /* Perform the minimal folding on statement *GSI. Only operations like
4990 *&x created by constant propagation are handled. The statement cannot
4991 be replaced with a new one. Return true if the statement was
4992 changed, false otherwise.
4993 The statement *GSI should be in valid gimple form but may
4994 be in unfolded state as resulting from for example constant propagation
4995 which can produce *&x = 0. */
4996
4997 bool
fold_stmt_inplace(gimple_stmt_iterator * gsi)4998 fold_stmt_inplace (gimple_stmt_iterator *gsi)
4999 {
5000 gimple *stmt = gsi_stmt (*gsi);
5001 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
5002 gcc_assert (gsi_stmt (*gsi) == stmt);
5003 return changed;
5004 }
5005
5006 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
5007 if EXPR is null or we don't know how.
5008 If non-null, the result always has boolean type. */
5009
5010 static tree
canonicalize_bool(tree expr,bool invert)5011 canonicalize_bool (tree expr, bool invert)
5012 {
5013 if (!expr)
5014 return NULL_TREE;
5015 else if (invert)
5016 {
5017 if (integer_nonzerop (expr))
5018 return boolean_false_node;
5019 else if (integer_zerop (expr))
5020 return boolean_true_node;
5021 else if (TREE_CODE (expr) == SSA_NAME)
5022 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
5023 build_int_cst (TREE_TYPE (expr), 0));
5024 else if (COMPARISON_CLASS_P (expr))
5025 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
5026 boolean_type_node,
5027 TREE_OPERAND (expr, 0),
5028 TREE_OPERAND (expr, 1));
5029 else
5030 return NULL_TREE;
5031 }
5032 else
5033 {
5034 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5035 return expr;
5036 if (integer_nonzerop (expr))
5037 return boolean_true_node;
5038 else if (integer_zerop (expr))
5039 return boolean_false_node;
5040 else if (TREE_CODE (expr) == SSA_NAME)
5041 return fold_build2 (NE_EXPR, boolean_type_node, expr,
5042 build_int_cst (TREE_TYPE (expr), 0));
5043 else if (COMPARISON_CLASS_P (expr))
5044 return fold_build2 (TREE_CODE (expr),
5045 boolean_type_node,
5046 TREE_OPERAND (expr, 0),
5047 TREE_OPERAND (expr, 1));
5048 else
5049 return NULL_TREE;
5050 }
5051 }
5052
5053 /* Check to see if a boolean expression EXPR is logically equivalent to the
5054 comparison (OP1 CODE OP2). Check for various identities involving
5055 SSA_NAMEs. */
5056
5057 static bool
same_bool_comparison_p(const_tree expr,enum tree_code code,const_tree op1,const_tree op2)5058 same_bool_comparison_p (const_tree expr, enum tree_code code,
5059 const_tree op1, const_tree op2)
5060 {
5061 gimple *s;
5062
5063 /* The obvious case. */
5064 if (TREE_CODE (expr) == code
5065 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
5066 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
5067 return true;
5068
5069 /* Check for comparing (name, name != 0) and the case where expr
5070 is an SSA_NAME with a definition matching the comparison. */
5071 if (TREE_CODE (expr) == SSA_NAME
5072 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5073 {
5074 if (operand_equal_p (expr, op1, 0))
5075 return ((code == NE_EXPR && integer_zerop (op2))
5076 || (code == EQ_EXPR && integer_nonzerop (op2)));
5077 s = SSA_NAME_DEF_STMT (expr);
5078 if (is_gimple_assign (s)
5079 && gimple_assign_rhs_code (s) == code
5080 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
5081 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
5082 return true;
5083 }
5084
5085 /* If op1 is of the form (name != 0) or (name == 0), and the definition
5086 of name is a comparison, recurse. */
5087 if (TREE_CODE (op1) == SSA_NAME
5088 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
5089 {
5090 s = SSA_NAME_DEF_STMT (op1);
5091 if (is_gimple_assign (s)
5092 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
5093 {
5094 enum tree_code c = gimple_assign_rhs_code (s);
5095 if ((c == NE_EXPR && integer_zerop (op2))
5096 || (c == EQ_EXPR && integer_nonzerop (op2)))
5097 return same_bool_comparison_p (expr, c,
5098 gimple_assign_rhs1 (s),
5099 gimple_assign_rhs2 (s));
5100 if ((c == EQ_EXPR && integer_zerop (op2))
5101 || (c == NE_EXPR && integer_nonzerop (op2)))
5102 return same_bool_comparison_p (expr,
5103 invert_tree_comparison (c, false),
5104 gimple_assign_rhs1 (s),
5105 gimple_assign_rhs2 (s));
5106 }
5107 }
5108 return false;
5109 }
5110
5111 /* Check to see if two boolean expressions OP1 and OP2 are logically
5112 equivalent. */
5113
5114 static bool
same_bool_result_p(const_tree op1,const_tree op2)5115 same_bool_result_p (const_tree op1, const_tree op2)
5116 {
5117 /* Simple cases first. */
5118 if (operand_equal_p (op1, op2, 0))
5119 return true;
5120
5121 /* Check the cases where at least one of the operands is a comparison.
5122 These are a bit smarter than operand_equal_p in that they apply some
5123 identifies on SSA_NAMEs. */
5124 if (COMPARISON_CLASS_P (op2)
5125 && same_bool_comparison_p (op1, TREE_CODE (op2),
5126 TREE_OPERAND (op2, 0),
5127 TREE_OPERAND (op2, 1)))
5128 return true;
5129 if (COMPARISON_CLASS_P (op1)
5130 && same_bool_comparison_p (op2, TREE_CODE (op1),
5131 TREE_OPERAND (op1, 0),
5132 TREE_OPERAND (op1, 1)))
5133 return true;
5134
5135 /* Default case. */
5136 return false;
5137 }
5138
5139 /* Forward declarations for some mutually recursive functions. */
5140
5141 static tree
5142 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5143 enum tree_code code2, tree op2a, tree op2b);
5144 static tree
5145 and_var_with_comparison (tree var, bool invert,
5146 enum tree_code code2, tree op2a, tree op2b);
5147 static tree
5148 and_var_with_comparison_1 (gimple *stmt,
5149 enum tree_code code2, tree op2a, tree op2b);
5150 static tree
5151 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5152 enum tree_code code2, tree op2a, tree op2b);
5153 static tree
5154 or_var_with_comparison (tree var, bool invert,
5155 enum tree_code code2, tree op2a, tree op2b);
5156 static tree
5157 or_var_with_comparison_1 (gimple *stmt,
5158 enum tree_code code2, tree op2a, tree op2b);
5159
5160 /* Helper function for and_comparisons_1: try to simplify the AND of the
5161 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5162 If INVERT is true, invert the value of the VAR before doing the AND.
5163 Return NULL_EXPR if we can't simplify this to a single expression. */
5164
5165 static tree
and_var_with_comparison(tree var,bool invert,enum tree_code code2,tree op2a,tree op2b)5166 and_var_with_comparison (tree var, bool invert,
5167 enum tree_code code2, tree op2a, tree op2b)
5168 {
5169 tree t;
5170 gimple *stmt = SSA_NAME_DEF_STMT (var);
5171
5172 /* We can only deal with variables whose definitions are assignments. */
5173 if (!is_gimple_assign (stmt))
5174 return NULL_TREE;
5175
5176 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5177 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
5178 Then we only have to consider the simpler non-inverted cases. */
5179 if (invert)
5180 t = or_var_with_comparison_1 (stmt,
5181 invert_tree_comparison (code2, false),
5182 op2a, op2b);
5183 else
5184 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
5185 return canonicalize_bool (t, invert);
5186 }
5187
5188 /* Try to simplify the AND of the ssa variable defined by the assignment
5189 STMT with the comparison specified by (OP2A CODE2 OP2B).
5190 Return NULL_EXPR if we can't simplify this to a single expression. */
5191
5192 static tree
and_var_with_comparison_1(gimple * stmt,enum tree_code code2,tree op2a,tree op2b)5193 and_var_with_comparison_1 (gimple *stmt,
5194 enum tree_code code2, tree op2a, tree op2b)
5195 {
5196 tree var = gimple_assign_lhs (stmt);
5197 tree true_test_var = NULL_TREE;
5198 tree false_test_var = NULL_TREE;
5199 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5200
5201 /* Check for identities like (var AND (var == 0)) => false. */
5202 if (TREE_CODE (op2a) == SSA_NAME
5203 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5204 {
5205 if ((code2 == NE_EXPR && integer_zerop (op2b))
5206 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5207 {
5208 true_test_var = op2a;
5209 if (var == true_test_var)
5210 return var;
5211 }
5212 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5213 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5214 {
5215 false_test_var = op2a;
5216 if (var == false_test_var)
5217 return boolean_false_node;
5218 }
5219 }
5220
5221 /* If the definition is a comparison, recurse on it. */
5222 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5223 {
5224 tree t = and_comparisons_1 (innercode,
5225 gimple_assign_rhs1 (stmt),
5226 gimple_assign_rhs2 (stmt),
5227 code2,
5228 op2a,
5229 op2b);
5230 if (t)
5231 return t;
5232 }
5233
5234 /* If the definition is an AND or OR expression, we may be able to
5235 simplify by reassociating. */
5236 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5237 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5238 {
5239 tree inner1 = gimple_assign_rhs1 (stmt);
5240 tree inner2 = gimple_assign_rhs2 (stmt);
5241 gimple *s;
5242 tree t;
5243 tree partial = NULL_TREE;
5244 bool is_and = (innercode == BIT_AND_EXPR);
5245
5246 /* Check for boolean identities that don't require recursive examination
5247 of inner1/inner2:
5248 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
5249 inner1 AND (inner1 OR inner2) => inner1
5250 !inner1 AND (inner1 AND inner2) => false
5251 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
5252 Likewise for similar cases involving inner2. */
5253 if (inner1 == true_test_var)
5254 return (is_and ? var : inner1);
5255 else if (inner2 == true_test_var)
5256 return (is_and ? var : inner2);
5257 else if (inner1 == false_test_var)
5258 return (is_and
5259 ? boolean_false_node
5260 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
5261 else if (inner2 == false_test_var)
5262 return (is_and
5263 ? boolean_false_node
5264 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
5265
5266 /* Next, redistribute/reassociate the AND across the inner tests.
5267 Compute the first partial result, (inner1 AND (op2a code op2b)) */
5268 if (TREE_CODE (inner1) == SSA_NAME
5269 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5270 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5271 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5272 gimple_assign_rhs1 (s),
5273 gimple_assign_rhs2 (s),
5274 code2, op2a, op2b)))
5275 {
5276 /* Handle the AND case, where we are reassociating:
5277 (inner1 AND inner2) AND (op2a code2 op2b)
5278 => (t AND inner2)
5279 If the partial result t is a constant, we win. Otherwise
5280 continue on to try reassociating with the other inner test. */
5281 if (is_and)
5282 {
5283 if (integer_onep (t))
5284 return inner2;
5285 else if (integer_zerop (t))
5286 return boolean_false_node;
5287 }
5288
5289 /* Handle the OR case, where we are redistributing:
5290 (inner1 OR inner2) AND (op2a code2 op2b)
5291 => (t OR (inner2 AND (op2a code2 op2b))) */
5292 else if (integer_onep (t))
5293 return boolean_true_node;
5294
5295 /* Save partial result for later. */
5296 partial = t;
5297 }
5298
5299 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
5300 if (TREE_CODE (inner2) == SSA_NAME
5301 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5302 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5303 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5304 gimple_assign_rhs1 (s),
5305 gimple_assign_rhs2 (s),
5306 code2, op2a, op2b)))
5307 {
5308 /* Handle the AND case, where we are reassociating:
5309 (inner1 AND inner2) AND (op2a code2 op2b)
5310 => (inner1 AND t) */
5311 if (is_and)
5312 {
5313 if (integer_onep (t))
5314 return inner1;
5315 else if (integer_zerop (t))
5316 return boolean_false_node;
5317 /* If both are the same, we can apply the identity
5318 (x AND x) == x. */
5319 else if (partial && same_bool_result_p (t, partial))
5320 return t;
5321 }
5322
5323 /* Handle the OR case. where we are redistributing:
5324 (inner1 OR inner2) AND (op2a code2 op2b)
5325 => (t OR (inner1 AND (op2a code2 op2b)))
5326 => (t OR partial) */
5327 else
5328 {
5329 if (integer_onep (t))
5330 return boolean_true_node;
5331 else if (partial)
5332 {
5333 /* We already got a simplification for the other
5334 operand to the redistributed OR expression. The
5335 interesting case is when at least one is false.
5336 Or, if both are the same, we can apply the identity
5337 (x OR x) == x. */
5338 if (integer_zerop (partial))
5339 return t;
5340 else if (integer_zerop (t))
5341 return partial;
5342 else if (same_bool_result_p (t, partial))
5343 return t;
5344 }
5345 }
5346 }
5347 }
5348 return NULL_TREE;
5349 }
5350
5351 /* Try to simplify the AND of two comparisons defined by
5352 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5353 If this can be done without constructing an intermediate value,
5354 return the resulting tree; otherwise NULL_TREE is returned.
5355 This function is deliberately asymmetric as it recurses on SSA_DEFs
5356 in the first comparison but not the second. */
5357
5358 static tree
and_comparisons_1(enum tree_code code1,tree op1a,tree op1b,enum tree_code code2,tree op2a,tree op2b)5359 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5360 enum tree_code code2, tree op2a, tree op2b)
5361 {
5362 tree truth_type = truth_type_for (TREE_TYPE (op1a));
5363
5364 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
5365 if (operand_equal_p (op1a, op2a, 0)
5366 && operand_equal_p (op1b, op2b, 0))
5367 {
5368 /* Result will be either NULL_TREE, or a combined comparison. */
5369 tree t = combine_comparisons (UNKNOWN_LOCATION,
5370 TRUTH_ANDIF_EXPR, code1, code2,
5371 truth_type, op1a, op1b);
5372 if (t)
5373 return t;
5374 }
5375
5376 /* Likewise the swapped case of the above. */
5377 if (operand_equal_p (op1a, op2b, 0)
5378 && operand_equal_p (op1b, op2a, 0))
5379 {
5380 /* Result will be either NULL_TREE, or a combined comparison. */
5381 tree t = combine_comparisons (UNKNOWN_LOCATION,
5382 TRUTH_ANDIF_EXPR, code1,
5383 swap_tree_comparison (code2),
5384 truth_type, op1a, op1b);
5385 if (t)
5386 return t;
5387 }
5388
5389 /* If both comparisons are of the same value against constants, we might
5390 be able to merge them. */
5391 if (operand_equal_p (op1a, op2a, 0)
5392 && TREE_CODE (op1b) == INTEGER_CST
5393 && TREE_CODE (op2b) == INTEGER_CST)
5394 {
5395 int cmp = tree_int_cst_compare (op1b, op2b);
5396
5397 /* If we have (op1a == op1b), we should either be able to
5398 return that or FALSE, depending on whether the constant op1b
5399 also satisfies the other comparison against op2b. */
5400 if (code1 == EQ_EXPR)
5401 {
5402 bool done = true;
5403 bool val;
5404 switch (code2)
5405 {
5406 case EQ_EXPR: val = (cmp == 0); break;
5407 case NE_EXPR: val = (cmp != 0); break;
5408 case LT_EXPR: val = (cmp < 0); break;
5409 case GT_EXPR: val = (cmp > 0); break;
5410 case LE_EXPR: val = (cmp <= 0); break;
5411 case GE_EXPR: val = (cmp >= 0); break;
5412 default: done = false;
5413 }
5414 if (done)
5415 {
5416 if (val)
5417 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5418 else
5419 return boolean_false_node;
5420 }
5421 }
5422 /* Likewise if the second comparison is an == comparison. */
5423 else if (code2 == EQ_EXPR)
5424 {
5425 bool done = true;
5426 bool val;
5427 switch (code1)
5428 {
5429 case EQ_EXPR: val = (cmp == 0); break;
5430 case NE_EXPR: val = (cmp != 0); break;
5431 case LT_EXPR: val = (cmp > 0); break;
5432 case GT_EXPR: val = (cmp < 0); break;
5433 case LE_EXPR: val = (cmp >= 0); break;
5434 case GE_EXPR: val = (cmp <= 0); break;
5435 default: done = false;
5436 }
5437 if (done)
5438 {
5439 if (val)
5440 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5441 else
5442 return boolean_false_node;
5443 }
5444 }
5445
5446 /* Same business with inequality tests. */
5447 else if (code1 == NE_EXPR)
5448 {
5449 bool val;
5450 switch (code2)
5451 {
5452 case EQ_EXPR: val = (cmp != 0); break;
5453 case NE_EXPR: val = (cmp == 0); break;
5454 case LT_EXPR: val = (cmp >= 0); break;
5455 case GT_EXPR: val = (cmp <= 0); break;
5456 case LE_EXPR: val = (cmp > 0); break;
5457 case GE_EXPR: val = (cmp < 0); break;
5458 default:
5459 val = false;
5460 }
5461 if (val)
5462 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5463 }
5464 else if (code2 == NE_EXPR)
5465 {
5466 bool val;
5467 switch (code1)
5468 {
5469 case EQ_EXPR: val = (cmp == 0); break;
5470 case NE_EXPR: val = (cmp != 0); break;
5471 case LT_EXPR: val = (cmp <= 0); break;
5472 case GT_EXPR: val = (cmp >= 0); break;
5473 case LE_EXPR: val = (cmp < 0); break;
5474 case GE_EXPR: val = (cmp > 0); break;
5475 default:
5476 val = false;
5477 }
5478 if (val)
5479 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5480 }
5481
5482 /* Chose the more restrictive of two < or <= comparisons. */
5483 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5484 && (code2 == LT_EXPR || code2 == LE_EXPR))
5485 {
5486 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5487 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5488 else
5489 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5490 }
5491
5492 /* Likewise chose the more restrictive of two > or >= comparisons. */
5493 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5494 && (code2 == GT_EXPR || code2 == GE_EXPR))
5495 {
5496 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5497 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5498 else
5499 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5500 }
5501
5502 /* Check for singleton ranges. */
5503 else if (cmp == 0
5504 && ((code1 == LE_EXPR && code2 == GE_EXPR)
5505 || (code1 == GE_EXPR && code2 == LE_EXPR)))
5506 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
5507
5508 /* Check for disjoint ranges. */
5509 else if (cmp <= 0
5510 && (code1 == LT_EXPR || code1 == LE_EXPR)
5511 && (code2 == GT_EXPR || code2 == GE_EXPR))
5512 return boolean_false_node;
5513 else if (cmp >= 0
5514 && (code1 == GT_EXPR || code1 == GE_EXPR)
5515 && (code2 == LT_EXPR || code2 == LE_EXPR))
5516 return boolean_false_node;
5517 }
5518
5519 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5520 NAME's definition is a truth value. See if there are any simplifications
5521 that can be done against the NAME's definition. */
5522 if (TREE_CODE (op1a) == SSA_NAME
5523 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5524 && (integer_zerop (op1b) || integer_onep (op1b)))
5525 {
5526 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5527 || (code1 == NE_EXPR && integer_onep (op1b)));
5528 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
5529 switch (gimple_code (stmt))
5530 {
5531 case GIMPLE_ASSIGN:
5532 /* Try to simplify by copy-propagating the definition. */
5533 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
5534
5535 case GIMPLE_PHI:
5536 /* If every argument to the PHI produces the same result when
5537 ANDed with the second comparison, we win.
5538 Do not do this unless the type is bool since we need a bool
5539 result here anyway. */
5540 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5541 {
5542 tree result = NULL_TREE;
5543 unsigned i;
5544 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5545 {
5546 tree arg = gimple_phi_arg_def (stmt, i);
5547
5548 /* If this PHI has itself as an argument, ignore it.
5549 If all the other args produce the same result,
5550 we're still OK. */
5551 if (arg == gimple_phi_result (stmt))
5552 continue;
5553 else if (TREE_CODE (arg) == INTEGER_CST)
5554 {
5555 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
5556 {
5557 if (!result)
5558 result = boolean_false_node;
5559 else if (!integer_zerop (result))
5560 return NULL_TREE;
5561 }
5562 else if (!result)
5563 result = fold_build2 (code2, boolean_type_node,
5564 op2a, op2b);
5565 else if (!same_bool_comparison_p (result,
5566 code2, op2a, op2b))
5567 return NULL_TREE;
5568 }
5569 else if (TREE_CODE (arg) == SSA_NAME
5570 && !SSA_NAME_IS_DEFAULT_DEF (arg))
5571 {
5572 tree temp;
5573 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
5574 /* In simple cases we can look through PHI nodes,
5575 but we have to be careful with loops.
5576 See PR49073. */
5577 if (! dom_info_available_p (CDI_DOMINATORS)
5578 || gimple_bb (def_stmt) == gimple_bb (stmt)
5579 || dominated_by_p (CDI_DOMINATORS,
5580 gimple_bb (def_stmt),
5581 gimple_bb (stmt)))
5582 return NULL_TREE;
5583 temp = and_var_with_comparison (arg, invert, code2,
5584 op2a, op2b);
5585 if (!temp)
5586 return NULL_TREE;
5587 else if (!result)
5588 result = temp;
5589 else if (!same_bool_result_p (result, temp))
5590 return NULL_TREE;
5591 }
5592 else
5593 return NULL_TREE;
5594 }
5595 return result;
5596 }
5597
5598 default:
5599 break;
5600 }
5601 }
5602 return NULL_TREE;
5603 }
5604
5605 /* Try to simplify the AND of two comparisons, specified by
5606 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5607 If this can be simplified to a single expression (without requiring
5608 introducing more SSA variables to hold intermediate values),
5609 return the resulting tree. Otherwise return NULL_TREE.
5610 If the result expression is non-null, it has boolean type. */
5611
5612 tree
maybe_fold_and_comparisons(enum tree_code code1,tree op1a,tree op1b,enum tree_code code2,tree op2a,tree op2b)5613 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
5614 enum tree_code code2, tree op2a, tree op2b)
5615 {
5616 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
5617 if (t)
5618 return t;
5619 else
5620 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
5621 }
5622
5623 /* Helper function for or_comparisons_1: try to simplify the OR of the
5624 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5625 If INVERT is true, invert the value of VAR before doing the OR.
5626 Return NULL_EXPR if we can't simplify this to a single expression. */
5627
5628 static tree
or_var_with_comparison(tree var,bool invert,enum tree_code code2,tree op2a,tree op2b)5629 or_var_with_comparison (tree var, bool invert,
5630 enum tree_code code2, tree op2a, tree op2b)
5631 {
5632 tree t;
5633 gimple *stmt = SSA_NAME_DEF_STMT (var);
5634
5635 /* We can only deal with variables whose definitions are assignments. */
5636 if (!is_gimple_assign (stmt))
5637 return NULL_TREE;
5638
5639 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5640 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
5641 Then we only have to consider the simpler non-inverted cases. */
5642 if (invert)
5643 t = and_var_with_comparison_1 (stmt,
5644 invert_tree_comparison (code2, false),
5645 op2a, op2b);
5646 else
5647 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
5648 return canonicalize_bool (t, invert);
5649 }
5650
5651 /* Try to simplify the OR of the ssa variable defined by the assignment
5652 STMT with the comparison specified by (OP2A CODE2 OP2B).
5653 Return NULL_EXPR if we can't simplify this to a single expression. */
5654
5655 static tree
or_var_with_comparison_1(gimple * stmt,enum tree_code code2,tree op2a,tree op2b)5656 or_var_with_comparison_1 (gimple *stmt,
5657 enum tree_code code2, tree op2a, tree op2b)
5658 {
5659 tree var = gimple_assign_lhs (stmt);
5660 tree true_test_var = NULL_TREE;
5661 tree false_test_var = NULL_TREE;
5662 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5663
5664 /* Check for identities like (var OR (var != 0)) => true . */
5665 if (TREE_CODE (op2a) == SSA_NAME
5666 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5667 {
5668 if ((code2 == NE_EXPR && integer_zerop (op2b))
5669 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5670 {
5671 true_test_var = op2a;
5672 if (var == true_test_var)
5673 return var;
5674 }
5675 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5676 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5677 {
5678 false_test_var = op2a;
5679 if (var == false_test_var)
5680 return boolean_true_node;
5681 }
5682 }
5683
5684 /* If the definition is a comparison, recurse on it. */
5685 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5686 {
5687 tree t = or_comparisons_1 (innercode,
5688 gimple_assign_rhs1 (stmt),
5689 gimple_assign_rhs2 (stmt),
5690 code2,
5691 op2a,
5692 op2b);
5693 if (t)
5694 return t;
5695 }
5696
5697 /* If the definition is an AND or OR expression, we may be able to
5698 simplify by reassociating. */
5699 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5700 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5701 {
5702 tree inner1 = gimple_assign_rhs1 (stmt);
5703 tree inner2 = gimple_assign_rhs2 (stmt);
5704 gimple *s;
5705 tree t;
5706 tree partial = NULL_TREE;
5707 bool is_or = (innercode == BIT_IOR_EXPR);
5708
5709 /* Check for boolean identities that don't require recursive examination
5710 of inner1/inner2:
5711 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
5712 inner1 OR (inner1 AND inner2) => inner1
5713 !inner1 OR (inner1 OR inner2) => true
5714 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
5715 */
5716 if (inner1 == true_test_var)
5717 return (is_or ? var : inner1);
5718 else if (inner2 == true_test_var)
5719 return (is_or ? var : inner2);
5720 else if (inner1 == false_test_var)
5721 return (is_or
5722 ? boolean_true_node
5723 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
5724 else if (inner2 == false_test_var)
5725 return (is_or
5726 ? boolean_true_node
5727 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
5728
5729 /* Next, redistribute/reassociate the OR across the inner tests.
5730 Compute the first partial result, (inner1 OR (op2a code op2b)) */
5731 if (TREE_CODE (inner1) == SSA_NAME
5732 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5733 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5734 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5735 gimple_assign_rhs1 (s),
5736 gimple_assign_rhs2 (s),
5737 code2, op2a, op2b)))
5738 {
5739 /* Handle the OR case, where we are reassociating:
5740 (inner1 OR inner2) OR (op2a code2 op2b)
5741 => (t OR inner2)
5742 If the partial result t is a constant, we win. Otherwise
5743 continue on to try reassociating with the other inner test. */
5744 if (is_or)
5745 {
5746 if (integer_onep (t))
5747 return boolean_true_node;
5748 else if (integer_zerop (t))
5749 return inner2;
5750 }
5751
5752 /* Handle the AND case, where we are redistributing:
5753 (inner1 AND inner2) OR (op2a code2 op2b)
5754 => (t AND (inner2 OR (op2a code op2b))) */
5755 else if (integer_zerop (t))
5756 return boolean_false_node;
5757
5758 /* Save partial result for later. */
5759 partial = t;
5760 }
5761
5762 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
5763 if (TREE_CODE (inner2) == SSA_NAME
5764 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5765 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5766 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5767 gimple_assign_rhs1 (s),
5768 gimple_assign_rhs2 (s),
5769 code2, op2a, op2b)))
5770 {
5771 /* Handle the OR case, where we are reassociating:
5772 (inner1 OR inner2) OR (op2a code2 op2b)
5773 => (inner1 OR t)
5774 => (t OR partial) */
5775 if (is_or)
5776 {
5777 if (integer_zerop (t))
5778 return inner1;
5779 else if (integer_onep (t))
5780 return boolean_true_node;
5781 /* If both are the same, we can apply the identity
5782 (x OR x) == x. */
5783 else if (partial && same_bool_result_p (t, partial))
5784 return t;
5785 }
5786
5787 /* Handle the AND case, where we are redistributing:
5788 (inner1 AND inner2) OR (op2a code2 op2b)
5789 => (t AND (inner1 OR (op2a code2 op2b)))
5790 => (t AND partial) */
5791 else
5792 {
5793 if (integer_zerop (t))
5794 return boolean_false_node;
5795 else if (partial)
5796 {
5797 /* We already got a simplification for the other
5798 operand to the redistributed AND expression. The
5799 interesting case is when at least one is true.
5800 Or, if both are the same, we can apply the identity
5801 (x AND x) == x. */
5802 if (integer_onep (partial))
5803 return t;
5804 else if (integer_onep (t))
5805 return partial;
5806 else if (same_bool_result_p (t, partial))
5807 return t;
5808 }
5809 }
5810 }
5811 }
5812 return NULL_TREE;
5813 }
5814
5815 /* Try to simplify the OR of two comparisons defined by
5816 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5817 If this can be done without constructing an intermediate value,
5818 return the resulting tree; otherwise NULL_TREE is returned.
5819 This function is deliberately asymmetric as it recurses on SSA_DEFs
5820 in the first comparison but not the second. */
5821
5822 static tree
or_comparisons_1(enum tree_code code1,tree op1a,tree op1b,enum tree_code code2,tree op2a,tree op2b)5823 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5824 enum tree_code code2, tree op2a, tree op2b)
5825 {
5826 tree truth_type = truth_type_for (TREE_TYPE (op1a));
5827
5828 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
5829 if (operand_equal_p (op1a, op2a, 0)
5830 && operand_equal_p (op1b, op2b, 0))
5831 {
5832 /* Result will be either NULL_TREE, or a combined comparison. */
5833 tree t = combine_comparisons (UNKNOWN_LOCATION,
5834 TRUTH_ORIF_EXPR, code1, code2,
5835 truth_type, op1a, op1b);
5836 if (t)
5837 return t;
5838 }
5839
5840 /* Likewise the swapped case of the above. */
5841 if (operand_equal_p (op1a, op2b, 0)
5842 && operand_equal_p (op1b, op2a, 0))
5843 {
5844 /* Result will be either NULL_TREE, or a combined comparison. */
5845 tree t = combine_comparisons (UNKNOWN_LOCATION,
5846 TRUTH_ORIF_EXPR, code1,
5847 swap_tree_comparison (code2),
5848 truth_type, op1a, op1b);
5849 if (t)
5850 return t;
5851 }
5852
5853 /* If both comparisons are of the same value against constants, we might
5854 be able to merge them. */
5855 if (operand_equal_p (op1a, op2a, 0)
5856 && TREE_CODE (op1b) == INTEGER_CST
5857 && TREE_CODE (op2b) == INTEGER_CST)
5858 {
5859 int cmp = tree_int_cst_compare (op1b, op2b);
5860
5861 /* If we have (op1a != op1b), we should either be able to
5862 return that or TRUE, depending on whether the constant op1b
5863 also satisfies the other comparison against op2b. */
5864 if (code1 == NE_EXPR)
5865 {
5866 bool done = true;
5867 bool val;
5868 switch (code2)
5869 {
5870 case EQ_EXPR: val = (cmp == 0); break;
5871 case NE_EXPR: val = (cmp != 0); break;
5872 case LT_EXPR: val = (cmp < 0); break;
5873 case GT_EXPR: val = (cmp > 0); break;
5874 case LE_EXPR: val = (cmp <= 0); break;
5875 case GE_EXPR: val = (cmp >= 0); break;
5876 default: done = false;
5877 }
5878 if (done)
5879 {
5880 if (val)
5881 return boolean_true_node;
5882 else
5883 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5884 }
5885 }
5886 /* Likewise if the second comparison is a != comparison. */
5887 else if (code2 == NE_EXPR)
5888 {
5889 bool done = true;
5890 bool val;
5891 switch (code1)
5892 {
5893 case EQ_EXPR: val = (cmp == 0); break;
5894 case NE_EXPR: val = (cmp != 0); break;
5895 case LT_EXPR: val = (cmp > 0); break;
5896 case GT_EXPR: val = (cmp < 0); break;
5897 case LE_EXPR: val = (cmp >= 0); break;
5898 case GE_EXPR: val = (cmp <= 0); break;
5899 default: done = false;
5900 }
5901 if (done)
5902 {
5903 if (val)
5904 return boolean_true_node;
5905 else
5906 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5907 }
5908 }
5909
5910 /* See if an equality test is redundant with the other comparison. */
5911 else if (code1 == EQ_EXPR)
5912 {
5913 bool val;
5914 switch (code2)
5915 {
5916 case EQ_EXPR: val = (cmp == 0); break;
5917 case NE_EXPR: val = (cmp != 0); break;
5918 case LT_EXPR: val = (cmp < 0); break;
5919 case GT_EXPR: val = (cmp > 0); break;
5920 case LE_EXPR: val = (cmp <= 0); break;
5921 case GE_EXPR: val = (cmp >= 0); break;
5922 default:
5923 val = false;
5924 }
5925 if (val)
5926 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5927 }
5928 else if (code2 == EQ_EXPR)
5929 {
5930 bool val;
5931 switch (code1)
5932 {
5933 case EQ_EXPR: val = (cmp == 0); break;
5934 case NE_EXPR: val = (cmp != 0); break;
5935 case LT_EXPR: val = (cmp > 0); break;
5936 case GT_EXPR: val = (cmp < 0); break;
5937 case LE_EXPR: val = (cmp >= 0); break;
5938 case GE_EXPR: val = (cmp <= 0); break;
5939 default:
5940 val = false;
5941 }
5942 if (val)
5943 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5944 }
5945
5946 /* Chose the less restrictive of two < or <= comparisons. */
5947 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5948 && (code2 == LT_EXPR || code2 == LE_EXPR))
5949 {
5950 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5951 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5952 else
5953 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5954 }
5955
5956 /* Likewise chose the less restrictive of two > or >= comparisons. */
5957 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5958 && (code2 == GT_EXPR || code2 == GE_EXPR))
5959 {
5960 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5961 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5962 else
5963 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5964 }
5965
5966 /* Check for singleton ranges. */
5967 else if (cmp == 0
5968 && ((code1 == LT_EXPR && code2 == GT_EXPR)
5969 || (code1 == GT_EXPR && code2 == LT_EXPR)))
5970 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
5971
5972 /* Check for less/greater pairs that don't restrict the range at all. */
5973 else if (cmp >= 0
5974 && (code1 == LT_EXPR || code1 == LE_EXPR)
5975 && (code2 == GT_EXPR || code2 == GE_EXPR))
5976 return boolean_true_node;
5977 else if (cmp <= 0
5978 && (code1 == GT_EXPR || code1 == GE_EXPR)
5979 && (code2 == LT_EXPR || code2 == LE_EXPR))
5980 return boolean_true_node;
5981 }
5982
5983 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5984 NAME's definition is a truth value. See if there are any simplifications
5985 that can be done against the NAME's definition. */
5986 if (TREE_CODE (op1a) == SSA_NAME
5987 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5988 && (integer_zerop (op1b) || integer_onep (op1b)))
5989 {
5990 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5991 || (code1 == NE_EXPR && integer_onep (op1b)));
5992 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
5993 switch (gimple_code (stmt))
5994 {
5995 case GIMPLE_ASSIGN:
5996 /* Try to simplify by copy-propagating the definition. */
5997 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
5998
5999 case GIMPLE_PHI:
6000 /* If every argument to the PHI produces the same result when
6001 ORed with the second comparison, we win.
6002 Do not do this unless the type is bool since we need a bool
6003 result here anyway. */
6004 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
6005 {
6006 tree result = NULL_TREE;
6007 unsigned i;
6008 for (i = 0; i < gimple_phi_num_args (stmt); i++)
6009 {
6010 tree arg = gimple_phi_arg_def (stmt, i);
6011
6012 /* If this PHI has itself as an argument, ignore it.
6013 If all the other args produce the same result,
6014 we're still OK. */
6015 if (arg == gimple_phi_result (stmt))
6016 continue;
6017 else if (TREE_CODE (arg) == INTEGER_CST)
6018 {
6019 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
6020 {
6021 if (!result)
6022 result = boolean_true_node;
6023 else if (!integer_onep (result))
6024 return NULL_TREE;
6025 }
6026 else if (!result)
6027 result = fold_build2 (code2, boolean_type_node,
6028 op2a, op2b);
6029 else if (!same_bool_comparison_p (result,
6030 code2, op2a, op2b))
6031 return NULL_TREE;
6032 }
6033 else if (TREE_CODE (arg) == SSA_NAME
6034 && !SSA_NAME_IS_DEFAULT_DEF (arg))
6035 {
6036 tree temp;
6037 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6038 /* In simple cases we can look through PHI nodes,
6039 but we have to be careful with loops.
6040 See PR49073. */
6041 if (! dom_info_available_p (CDI_DOMINATORS)
6042 || gimple_bb (def_stmt) == gimple_bb (stmt)
6043 || dominated_by_p (CDI_DOMINATORS,
6044 gimple_bb (def_stmt),
6045 gimple_bb (stmt)))
6046 return NULL_TREE;
6047 temp = or_var_with_comparison (arg, invert, code2,
6048 op2a, op2b);
6049 if (!temp)
6050 return NULL_TREE;
6051 else if (!result)
6052 result = temp;
6053 else if (!same_bool_result_p (result, temp))
6054 return NULL_TREE;
6055 }
6056 else
6057 return NULL_TREE;
6058 }
6059 return result;
6060 }
6061
6062 default:
6063 break;
6064 }
6065 }
6066 return NULL_TREE;
6067 }
6068
6069 /* Try to simplify the OR of two comparisons, specified by
6070 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
6071 If this can be simplified to a single expression (without requiring
6072 introducing more SSA variables to hold intermediate values),
6073 return the resulting tree. Otherwise return NULL_TREE.
6074 If the result expression is non-null, it has boolean type. */
6075
6076 tree
maybe_fold_or_comparisons(enum tree_code code1,tree op1a,tree op1b,enum tree_code code2,tree op2a,tree op2b)6077 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
6078 enum tree_code code2, tree op2a, tree op2b)
6079 {
6080 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
6081 if (t)
6082 return t;
6083 else
6084 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
6085 }
6086
6087
6088 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6089
6090 Either NULL_TREE, a simplified but non-constant or a constant
6091 is returned.
6092
6093 ??? This should go into a gimple-fold-inline.h file to be eventually
6094 privatized with the single valueize function used in the various TUs
6095 to avoid the indirect function call overhead. */
6096
6097 tree
gimple_fold_stmt_to_constant_1(gimple * stmt,tree (* valueize)(tree),tree (* gvalueize)(tree))6098 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
6099 tree (*gvalueize) (tree))
6100 {
6101 code_helper rcode;
6102 tree ops[3] = {};
6103 /* ??? The SSA propagators do not correctly deal with following SSA use-def
6104 edges if there are intermediate VARYING defs. For this reason
6105 do not follow SSA edges here even though SCCVN can technically
6106 just deal fine with that. */
6107 if (gimple_simplify (stmt, &rcode, ops, NULL, gvalueize, valueize))
6108 {
6109 tree res = NULL_TREE;
6110 if (gimple_simplified_result_is_gimple_val (rcode, ops))
6111 res = ops[0];
6112 else if (mprts_hook)
6113 res = mprts_hook (rcode, gimple_expr_type (stmt), ops);
6114 if (res)
6115 {
6116 if (dump_file && dump_flags & TDF_DETAILS)
6117 {
6118 fprintf (dump_file, "Match-and-simplified ");
6119 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
6120 fprintf (dump_file, " to ");
6121 print_generic_expr (dump_file, res);
6122 fprintf (dump_file, "\n");
6123 }
6124 return res;
6125 }
6126 }
6127
6128 location_t loc = gimple_location (stmt);
6129 switch (gimple_code (stmt))
6130 {
6131 case GIMPLE_ASSIGN:
6132 {
6133 enum tree_code subcode = gimple_assign_rhs_code (stmt);
6134
6135 switch (get_gimple_rhs_class (subcode))
6136 {
6137 case GIMPLE_SINGLE_RHS:
6138 {
6139 tree rhs = gimple_assign_rhs1 (stmt);
6140 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
6141
6142 if (TREE_CODE (rhs) == SSA_NAME)
6143 {
6144 /* If the RHS is an SSA_NAME, return its known constant value,
6145 if any. */
6146 return (*valueize) (rhs);
6147 }
6148 /* Handle propagating invariant addresses into address
6149 operations. */
6150 else if (TREE_CODE (rhs) == ADDR_EXPR
6151 && !is_gimple_min_invariant (rhs))
6152 {
6153 poly_int64 offset = 0;
6154 tree base;
6155 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
6156 &offset,
6157 valueize);
6158 if (base
6159 && (CONSTANT_CLASS_P (base)
6160 || decl_address_invariant_p (base)))
6161 return build_invariant_address (TREE_TYPE (rhs),
6162 base, offset);
6163 }
6164 else if (TREE_CODE (rhs) == CONSTRUCTOR
6165 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
6166 && known_eq (CONSTRUCTOR_NELTS (rhs),
6167 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
6168 {
6169 unsigned i, nelts;
6170 tree val;
6171
6172 nelts = CONSTRUCTOR_NELTS (rhs);
6173 tree_vector_builder vec (TREE_TYPE (rhs), nelts, 1);
6174 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
6175 {
6176 val = (*valueize) (val);
6177 if (TREE_CODE (val) == INTEGER_CST
6178 || TREE_CODE (val) == REAL_CST
6179 || TREE_CODE (val) == FIXED_CST)
6180 vec.quick_push (val);
6181 else
6182 return NULL_TREE;
6183 }
6184
6185 return vec.build ();
6186 }
6187 if (subcode == OBJ_TYPE_REF)
6188 {
6189 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
6190 /* If callee is constant, we can fold away the wrapper. */
6191 if (is_gimple_min_invariant (val))
6192 return val;
6193 }
6194
6195 if (kind == tcc_reference)
6196 {
6197 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
6198 || TREE_CODE (rhs) == REALPART_EXPR
6199 || TREE_CODE (rhs) == IMAGPART_EXPR)
6200 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6201 {
6202 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6203 return fold_unary_loc (EXPR_LOCATION (rhs),
6204 TREE_CODE (rhs),
6205 TREE_TYPE (rhs), val);
6206 }
6207 else if (TREE_CODE (rhs) == BIT_FIELD_REF
6208 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6209 {
6210 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6211 return fold_ternary_loc (EXPR_LOCATION (rhs),
6212 TREE_CODE (rhs),
6213 TREE_TYPE (rhs), val,
6214 TREE_OPERAND (rhs, 1),
6215 TREE_OPERAND (rhs, 2));
6216 }
6217 else if (TREE_CODE (rhs) == MEM_REF
6218 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6219 {
6220 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6221 if (TREE_CODE (val) == ADDR_EXPR
6222 && is_gimple_min_invariant (val))
6223 {
6224 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
6225 unshare_expr (val),
6226 TREE_OPERAND (rhs, 1));
6227 if (tem)
6228 rhs = tem;
6229 }
6230 }
6231 return fold_const_aggregate_ref_1 (rhs, valueize);
6232 }
6233 else if (kind == tcc_declaration)
6234 return get_symbol_constant_value (rhs);
6235 return rhs;
6236 }
6237
6238 case GIMPLE_UNARY_RHS:
6239 return NULL_TREE;
6240
6241 case GIMPLE_BINARY_RHS:
6242 /* Translate &x + CST into an invariant form suitable for
6243 further propagation. */
6244 if (subcode == POINTER_PLUS_EXPR)
6245 {
6246 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6247 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6248 if (TREE_CODE (op0) == ADDR_EXPR
6249 && TREE_CODE (op1) == INTEGER_CST)
6250 {
6251 tree off = fold_convert (ptr_type_node, op1);
6252 return build_fold_addr_expr_loc
6253 (loc,
6254 fold_build2 (MEM_REF,
6255 TREE_TYPE (TREE_TYPE (op0)),
6256 unshare_expr (op0), off));
6257 }
6258 }
6259 /* Canonicalize bool != 0 and bool == 0 appearing after
6260 valueization. While gimple_simplify handles this
6261 it can get confused by the ~X == 1 -> X == 0 transform
6262 which we cant reduce to a SSA name or a constant
6263 (and we have no way to tell gimple_simplify to not
6264 consider those transforms in the first place). */
6265 else if (subcode == EQ_EXPR
6266 || subcode == NE_EXPR)
6267 {
6268 tree lhs = gimple_assign_lhs (stmt);
6269 tree op0 = gimple_assign_rhs1 (stmt);
6270 if (useless_type_conversion_p (TREE_TYPE (lhs),
6271 TREE_TYPE (op0)))
6272 {
6273 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6274 op0 = (*valueize) (op0);
6275 if (TREE_CODE (op0) == INTEGER_CST)
6276 std::swap (op0, op1);
6277 if (TREE_CODE (op1) == INTEGER_CST
6278 && ((subcode == NE_EXPR && integer_zerop (op1))
6279 || (subcode == EQ_EXPR && integer_onep (op1))))
6280 return op0;
6281 }
6282 }
6283 return NULL_TREE;
6284
6285 case GIMPLE_TERNARY_RHS:
6286 {
6287 /* Handle ternary operators that can appear in GIMPLE form. */
6288 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6289 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6290 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
6291 return fold_ternary_loc (loc, subcode,
6292 gimple_expr_type (stmt), op0, op1, op2);
6293 }
6294
6295 default:
6296 gcc_unreachable ();
6297 }
6298 }
6299
6300 case GIMPLE_CALL:
6301 {
6302 tree fn;
6303 gcall *call_stmt = as_a <gcall *> (stmt);
6304
6305 if (gimple_call_internal_p (stmt))
6306 {
6307 enum tree_code subcode = ERROR_MARK;
6308 switch (gimple_call_internal_fn (stmt))
6309 {
6310 case IFN_UBSAN_CHECK_ADD:
6311 subcode = PLUS_EXPR;
6312 break;
6313 case IFN_UBSAN_CHECK_SUB:
6314 subcode = MINUS_EXPR;
6315 break;
6316 case IFN_UBSAN_CHECK_MUL:
6317 subcode = MULT_EXPR;
6318 break;
6319 case IFN_BUILTIN_EXPECT:
6320 {
6321 tree arg0 = gimple_call_arg (stmt, 0);
6322 tree op0 = (*valueize) (arg0);
6323 if (TREE_CODE (op0) == INTEGER_CST)
6324 return op0;
6325 return NULL_TREE;
6326 }
6327 default:
6328 return NULL_TREE;
6329 }
6330 tree arg0 = gimple_call_arg (stmt, 0);
6331 tree arg1 = gimple_call_arg (stmt, 1);
6332 tree op0 = (*valueize) (arg0);
6333 tree op1 = (*valueize) (arg1);
6334
6335 if (TREE_CODE (op0) != INTEGER_CST
6336 || TREE_CODE (op1) != INTEGER_CST)
6337 {
6338 switch (subcode)
6339 {
6340 case MULT_EXPR:
6341 /* x * 0 = 0 * x = 0 without overflow. */
6342 if (integer_zerop (op0) || integer_zerop (op1))
6343 return build_zero_cst (TREE_TYPE (arg0));
6344 break;
6345 case MINUS_EXPR:
6346 /* y - y = 0 without overflow. */
6347 if (operand_equal_p (op0, op1, 0))
6348 return build_zero_cst (TREE_TYPE (arg0));
6349 break;
6350 default:
6351 break;
6352 }
6353 }
6354 tree res
6355 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
6356 if (res
6357 && TREE_CODE (res) == INTEGER_CST
6358 && !TREE_OVERFLOW (res))
6359 return res;
6360 return NULL_TREE;
6361 }
6362
6363 fn = (*valueize) (gimple_call_fn (stmt));
6364 if (TREE_CODE (fn) == ADDR_EXPR
6365 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
6366 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
6367 && gimple_builtin_call_types_compatible_p (stmt,
6368 TREE_OPERAND (fn, 0)))
6369 {
6370 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
6371 tree retval;
6372 unsigned i;
6373 for (i = 0; i < gimple_call_num_args (stmt); ++i)
6374 args[i] = (*valueize) (gimple_call_arg (stmt, i));
6375 retval = fold_builtin_call_array (loc,
6376 gimple_call_return_type (call_stmt),
6377 fn, gimple_call_num_args (stmt), args);
6378 if (retval)
6379 {
6380 /* fold_call_expr wraps the result inside a NOP_EXPR. */
6381 STRIP_NOPS (retval);
6382 retval = fold_convert (gimple_call_return_type (call_stmt),
6383 retval);
6384 }
6385 return retval;
6386 }
6387 return NULL_TREE;
6388 }
6389
6390 default:
6391 return NULL_TREE;
6392 }
6393 }
6394
6395 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6396 Returns NULL_TREE if folding to a constant is not possible, otherwise
6397 returns a constant according to is_gimple_min_invariant. */
6398
6399 tree
gimple_fold_stmt_to_constant(gimple * stmt,tree (* valueize)(tree))6400 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
6401 {
6402 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
6403 if (res && is_gimple_min_invariant (res))
6404 return res;
6405 return NULL_TREE;
6406 }
6407
6408
6409 /* The following set of functions are supposed to fold references using
6410 their constant initializers. */
6411
6412 /* See if we can find constructor defining value of BASE.
6413 When we know the consructor with constant offset (such as
6414 base is array[40] and we do know constructor of array), then
6415 BIT_OFFSET is adjusted accordingly.
6416
6417 As a special case, return error_mark_node when constructor
6418 is not explicitly available, but it is known to be zero
6419 such as 'static const int a;'. */
6420 static tree
get_base_constructor(tree base,poly_int64_pod * bit_offset,tree (* valueize)(tree))6421 get_base_constructor (tree base, poly_int64_pod *bit_offset,
6422 tree (*valueize)(tree))
6423 {
6424 poly_int64 bit_offset2, size, max_size;
6425 bool reverse;
6426
6427 if (TREE_CODE (base) == MEM_REF)
6428 {
6429 poly_offset_int boff = *bit_offset + mem_ref_offset (base) * BITS_PER_UNIT;
6430 if (!boff.to_shwi (bit_offset))
6431 return NULL_TREE;
6432
6433 if (valueize
6434 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
6435 base = valueize (TREE_OPERAND (base, 0));
6436 if (!base || TREE_CODE (base) != ADDR_EXPR)
6437 return NULL_TREE;
6438 base = TREE_OPERAND (base, 0);
6439 }
6440 else if (valueize
6441 && TREE_CODE (base) == SSA_NAME)
6442 base = valueize (base);
6443
6444 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
6445 DECL_INITIAL. If BASE is a nested reference into another
6446 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
6447 the inner reference. */
6448 switch (TREE_CODE (base))
6449 {
6450 case VAR_DECL:
6451 case CONST_DECL:
6452 {
6453 tree init = ctor_for_folding (base);
6454
6455 /* Our semantic is exact opposite of ctor_for_folding;
6456 NULL means unknown, while error_mark_node is 0. */
6457 if (init == error_mark_node)
6458 return NULL_TREE;
6459 if (!init)
6460 return error_mark_node;
6461 return init;
6462 }
6463
6464 case VIEW_CONVERT_EXPR:
6465 return get_base_constructor (TREE_OPERAND (base, 0),
6466 bit_offset, valueize);
6467
6468 case ARRAY_REF:
6469 case COMPONENT_REF:
6470 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
6471 &reverse);
6472 if (!known_size_p (max_size) || maybe_ne (size, max_size))
6473 return NULL_TREE;
6474 *bit_offset += bit_offset2;
6475 return get_base_constructor (base, bit_offset, valueize);
6476
6477 case CONSTRUCTOR:
6478 return base;
6479
6480 default:
6481 if (CONSTANT_CLASS_P (base))
6482 return base;
6483
6484 return NULL_TREE;
6485 }
6486 }
6487
6488 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
6489 SIZE to the memory at bit OFFSET. */
6490
6491 static tree
fold_array_ctor_reference(tree type,tree ctor,unsigned HOST_WIDE_INT offset,unsigned HOST_WIDE_INT size,tree from_decl)6492 fold_array_ctor_reference (tree type, tree ctor,
6493 unsigned HOST_WIDE_INT offset,
6494 unsigned HOST_WIDE_INT size,
6495 tree from_decl)
6496 {
6497 offset_int low_bound;
6498 offset_int elt_size;
6499 offset_int access_index;
6500 tree domain_type = NULL_TREE;
6501 HOST_WIDE_INT inner_offset;
6502
6503 /* Compute low bound and elt size. */
6504 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
6505 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
6506 if (domain_type && TYPE_MIN_VALUE (domain_type))
6507 {
6508 /* Static constructors for variably sized objects makes no sense. */
6509 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
6510 return NULL_TREE;
6511 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
6512 }
6513 else
6514 low_bound = 0;
6515 /* Static constructors for variably sized objects makes no sense. */
6516 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
6517 return NULL_TREE;
6518 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
6519
6520 /* We can handle only constantly sized accesses that are known to not
6521 be larger than size of array element. */
6522 if (!TYPE_SIZE_UNIT (type)
6523 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
6524 || elt_size < wi::to_offset (TYPE_SIZE_UNIT (type))
6525 || elt_size == 0)
6526 return NULL_TREE;
6527
6528 /* Compute the array index we look for. */
6529 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
6530 elt_size);
6531 access_index += low_bound;
6532
6533 /* And offset within the access. */
6534 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
6535
6536 /* See if the array field is large enough to span whole access. We do not
6537 care to fold accesses spanning multiple array indexes. */
6538 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
6539 return NULL_TREE;
6540 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
6541 return fold_ctor_reference (type, val, inner_offset, size, from_decl);
6542
6543 /* When memory is not explicitely mentioned in constructor,
6544 it is 0 (or out of range). */
6545 return build_zero_cst (type);
6546 }
6547
6548 /* CTOR is CONSTRUCTOR of an aggregate or vector.
6549 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
6550
6551 static tree
fold_nonarray_ctor_reference(tree type,tree ctor,unsigned HOST_WIDE_INT offset,unsigned HOST_WIDE_INT size,tree from_decl)6552 fold_nonarray_ctor_reference (tree type, tree ctor,
6553 unsigned HOST_WIDE_INT offset,
6554 unsigned HOST_WIDE_INT size,
6555 tree from_decl)
6556 {
6557 unsigned HOST_WIDE_INT cnt;
6558 tree cfield, cval;
6559
6560 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
6561 cval)
6562 {
6563 tree byte_offset = DECL_FIELD_OFFSET (cfield);
6564 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
6565 tree field_size = DECL_SIZE (cfield);
6566 offset_int bitoffset;
6567 offset_int bitoffset_end, access_end;
6568
6569 /* Variable sized objects in static constructors makes no sense,
6570 but field_size can be NULL for flexible array members. */
6571 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
6572 && TREE_CODE (byte_offset) == INTEGER_CST
6573 && (field_size != NULL_TREE
6574 ? TREE_CODE (field_size) == INTEGER_CST
6575 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
6576
6577 /* Compute bit offset of the field. */
6578 bitoffset = (wi::to_offset (field_offset)
6579 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
6580 /* Compute bit offset where the field ends. */
6581 if (field_size != NULL_TREE)
6582 bitoffset_end = bitoffset + wi::to_offset (field_size);
6583 else
6584 bitoffset_end = 0;
6585
6586 access_end = offset_int (offset) + size;
6587
6588 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
6589 [BITOFFSET, BITOFFSET_END)? */
6590 if (wi::cmps (access_end, bitoffset) > 0
6591 && (field_size == NULL_TREE
6592 || wi::lts_p (offset, bitoffset_end)))
6593 {
6594 offset_int inner_offset = offset_int (offset) - bitoffset;
6595 /* We do have overlap. Now see if field is large enough to
6596 cover the access. Give up for accesses spanning multiple
6597 fields. */
6598 if (wi::cmps (access_end, bitoffset_end) > 0)
6599 return NULL_TREE;
6600 if (offset < bitoffset)
6601 return NULL_TREE;
6602 return fold_ctor_reference (type, cval,
6603 inner_offset.to_uhwi (), size,
6604 from_decl);
6605 }
6606 }
6607 /* When memory is not explicitely mentioned in constructor, it is 0. */
6608 return build_zero_cst (type);
6609 }
6610
6611 /* CTOR is value initializing memory, fold reference of type TYPE and
6612 size POLY_SIZE to the memory at bit POLY_OFFSET. */
6613
6614 tree
fold_ctor_reference(tree type,tree ctor,poly_uint64 poly_offset,poly_uint64 poly_size,tree from_decl)6615 fold_ctor_reference (tree type, tree ctor, poly_uint64 poly_offset,
6616 poly_uint64 poly_size, tree from_decl)
6617 {
6618 tree ret;
6619
6620 /* We found the field with exact match. */
6621 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
6622 && known_eq (poly_offset, 0U))
6623 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6624
6625 /* The remaining optimizations need a constant size and offset. */
6626 unsigned HOST_WIDE_INT size, offset;
6627 if (!poly_size.is_constant (&size) || !poly_offset.is_constant (&offset))
6628 return NULL_TREE;
6629
6630 /* We are at the end of walk, see if we can view convert the
6631 result. */
6632 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
6633 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
6634 && !compare_tree_int (TYPE_SIZE (type), size)
6635 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
6636 {
6637 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6638 if (ret)
6639 {
6640 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
6641 if (ret)
6642 STRIP_USELESS_TYPE_CONVERSION (ret);
6643 }
6644 return ret;
6645 }
6646 /* For constants and byte-aligned/sized reads try to go through
6647 native_encode/interpret. */
6648 if (CONSTANT_CLASS_P (ctor)
6649 && BITS_PER_UNIT == 8
6650 && offset % BITS_PER_UNIT == 0
6651 && size % BITS_PER_UNIT == 0
6652 && size <= MAX_BITSIZE_MODE_ANY_MODE)
6653 {
6654 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
6655 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
6656 offset / BITS_PER_UNIT);
6657 if (len > 0)
6658 return native_interpret_expr (type, buf, len);
6659 }
6660 if (TREE_CODE (ctor) == CONSTRUCTOR)
6661 {
6662
6663 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
6664 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
6665 return fold_array_ctor_reference (type, ctor, offset, size,
6666 from_decl);
6667 else
6668 return fold_nonarray_ctor_reference (type, ctor, offset, size,
6669 from_decl);
6670 }
6671
6672 return NULL_TREE;
6673 }
6674
6675 /* Return the tree representing the element referenced by T if T is an
6676 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
6677 names using VALUEIZE. Return NULL_TREE otherwise. */
6678
6679 tree
fold_const_aggregate_ref_1(tree t,tree (* valueize)(tree))6680 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
6681 {
6682 tree ctor, idx, base;
6683 poly_int64 offset, size, max_size;
6684 tree tem;
6685 bool reverse;
6686
6687 if (TREE_THIS_VOLATILE (t))
6688 return NULL_TREE;
6689
6690 if (DECL_P (t))
6691 return get_symbol_constant_value (t);
6692
6693 tem = fold_read_from_constant_string (t);
6694 if (tem)
6695 return tem;
6696
6697 switch (TREE_CODE (t))
6698 {
6699 case ARRAY_REF:
6700 case ARRAY_RANGE_REF:
6701 /* Constant indexes are handled well by get_base_constructor.
6702 Only special case variable offsets.
6703 FIXME: This code can't handle nested references with variable indexes
6704 (they will be handled only by iteration of ccp). Perhaps we can bring
6705 get_ref_base_and_extent here and make it use a valueize callback. */
6706 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
6707 && valueize
6708 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
6709 && poly_int_tree_p (idx))
6710 {
6711 tree low_bound, unit_size;
6712
6713 /* If the resulting bit-offset is constant, track it. */
6714 if ((low_bound = array_ref_low_bound (t),
6715 poly_int_tree_p (low_bound))
6716 && (unit_size = array_ref_element_size (t),
6717 tree_fits_uhwi_p (unit_size)))
6718 {
6719 poly_offset_int woffset
6720 = wi::sext (wi::to_poly_offset (idx)
6721 - wi::to_poly_offset (low_bound),
6722 TYPE_PRECISION (TREE_TYPE (idx)));
6723 woffset *= tree_to_uhwi (unit_size);
6724 woffset *= BITS_PER_UNIT;
6725 if (woffset.to_shwi (&offset))
6726 {
6727 base = TREE_OPERAND (t, 0);
6728 ctor = get_base_constructor (base, &offset, valueize);
6729 /* Empty constructor. Always fold to 0. */
6730 if (ctor == error_mark_node)
6731 return build_zero_cst (TREE_TYPE (t));
6732 /* Out of bound array access. Value is undefined,
6733 but don't fold. */
6734 if (maybe_lt (offset, 0))
6735 return NULL_TREE;
6736 /* We can not determine ctor. */
6737 if (!ctor)
6738 return NULL_TREE;
6739 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
6740 tree_to_uhwi (unit_size)
6741 * BITS_PER_UNIT,
6742 base);
6743 }
6744 }
6745 }
6746 /* Fallthru. */
6747
6748 case COMPONENT_REF:
6749 case BIT_FIELD_REF:
6750 case TARGET_MEM_REF:
6751 case MEM_REF:
6752 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
6753 ctor = get_base_constructor (base, &offset, valueize);
6754
6755 /* Empty constructor. Always fold to 0. */
6756 if (ctor == error_mark_node)
6757 return build_zero_cst (TREE_TYPE (t));
6758 /* We do not know precise address. */
6759 if (!known_size_p (max_size) || maybe_ne (max_size, size))
6760 return NULL_TREE;
6761 /* We can not determine ctor. */
6762 if (!ctor)
6763 return NULL_TREE;
6764
6765 /* Out of bound array access. Value is undefined, but don't fold. */
6766 if (maybe_lt (offset, 0))
6767 return NULL_TREE;
6768
6769 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
6770 base);
6771
6772 case REALPART_EXPR:
6773 case IMAGPART_EXPR:
6774 {
6775 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
6776 if (c && TREE_CODE (c) == COMPLEX_CST)
6777 return fold_build1_loc (EXPR_LOCATION (t),
6778 TREE_CODE (t), TREE_TYPE (t), c);
6779 break;
6780 }
6781
6782 default:
6783 break;
6784 }
6785
6786 return NULL_TREE;
6787 }
6788
6789 tree
fold_const_aggregate_ref(tree t)6790 fold_const_aggregate_ref (tree t)
6791 {
6792 return fold_const_aggregate_ref_1 (t, NULL);
6793 }
6794
6795 /* Lookup virtual method with index TOKEN in a virtual table V
6796 at OFFSET.
6797 Set CAN_REFER if non-NULL to false if method
6798 is not referable or if the virtual table is ill-formed (such as rewriten
6799 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
6800
6801 tree
gimple_get_virt_method_for_vtable(HOST_WIDE_INT token,tree v,unsigned HOST_WIDE_INT offset,bool * can_refer)6802 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
6803 tree v,
6804 unsigned HOST_WIDE_INT offset,
6805 bool *can_refer)
6806 {
6807 tree vtable = v, init, fn;
6808 unsigned HOST_WIDE_INT size;
6809 unsigned HOST_WIDE_INT elt_size, access_index;
6810 tree domain_type;
6811
6812 if (can_refer)
6813 *can_refer = true;
6814
6815 /* First of all double check we have virtual table. */
6816 if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
6817 {
6818 /* Pass down that we lost track of the target. */
6819 if (can_refer)
6820 *can_refer = false;
6821 return NULL_TREE;
6822 }
6823
6824 init = ctor_for_folding (v);
6825
6826 /* The virtual tables should always be born with constructors
6827 and we always should assume that they are avaialble for
6828 folding. At the moment we do not stream them in all cases,
6829 but it should never happen that ctor seem unreachable. */
6830 gcc_assert (init);
6831 if (init == error_mark_node)
6832 {
6833 /* Pass down that we lost track of the target. */
6834 if (can_refer)
6835 *can_refer = false;
6836 return NULL_TREE;
6837 }
6838 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
6839 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
6840 offset *= BITS_PER_UNIT;
6841 offset += token * size;
6842
6843 /* Lookup the value in the constructor that is assumed to be array.
6844 This is equivalent to
6845 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
6846 offset, size, NULL);
6847 but in a constant time. We expect that frontend produced a simple
6848 array without indexed initializers. */
6849
6850 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
6851 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
6852 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
6853 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
6854
6855 access_index = offset / BITS_PER_UNIT / elt_size;
6856 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
6857
6858 /* This code makes an assumption that there are no
6859 indexed fileds produced by C++ FE, so we can directly index the array. */
6860 if (access_index < CONSTRUCTOR_NELTS (init))
6861 {
6862 fn = CONSTRUCTOR_ELT (init, access_index)->value;
6863 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
6864 STRIP_NOPS (fn);
6865 }
6866 else
6867 fn = NULL;
6868
6869 /* For type inconsistent program we may end up looking up virtual method
6870 in virtual table that does not contain TOKEN entries. We may overrun
6871 the virtual table and pick up a constant or RTTI info pointer.
6872 In any case the call is undefined. */
6873 if (!fn
6874 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
6875 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
6876 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
6877 else
6878 {
6879 fn = TREE_OPERAND (fn, 0);
6880
6881 /* When cgraph node is missing and function is not public, we cannot
6882 devirtualize. This can happen in WHOPR when the actual method
6883 ends up in other partition, because we found devirtualization
6884 possibility too late. */
6885 if (!can_refer_decl_in_current_unit_p (fn, vtable))
6886 {
6887 if (can_refer)
6888 {
6889 *can_refer = false;
6890 return fn;
6891 }
6892 return NULL_TREE;
6893 }
6894 }
6895
6896 /* Make sure we create a cgraph node for functions we'll reference.
6897 They can be non-existent if the reference comes from an entry
6898 of an external vtable for example. */
6899 cgraph_node::get_create (fn);
6900
6901 return fn;
6902 }
6903
6904 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
6905 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
6906 KNOWN_BINFO carries the binfo describing the true type of
6907 OBJ_TYPE_REF_OBJECT(REF).
6908 Set CAN_REFER if non-NULL to false if method
6909 is not referable or if the virtual table is ill-formed (such as rewriten
6910 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
6911
6912 tree
gimple_get_virt_method_for_binfo(HOST_WIDE_INT token,tree known_binfo,bool * can_refer)6913 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
6914 bool *can_refer)
6915 {
6916 unsigned HOST_WIDE_INT offset;
6917 tree v;
6918
6919 v = BINFO_VTABLE (known_binfo);
6920 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
6921 if (!v)
6922 return NULL_TREE;
6923
6924 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
6925 {
6926 if (can_refer)
6927 *can_refer = false;
6928 return NULL_TREE;
6929 }
6930 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
6931 }
6932
6933 /* Given a pointer value T, return a simplified version of an
6934 indirection through T, or NULL_TREE if no simplification is
6935 possible. Note that the resulting type may be different from
6936 the type pointed to in the sense that it is still compatible
6937 from the langhooks point of view. */
6938
6939 tree
gimple_fold_indirect_ref(tree t)6940 gimple_fold_indirect_ref (tree t)
6941 {
6942 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
6943 tree sub = t;
6944 tree subtype;
6945
6946 STRIP_NOPS (sub);
6947 subtype = TREE_TYPE (sub);
6948 if (!POINTER_TYPE_P (subtype)
6949 || TYPE_REF_CAN_ALIAS_ALL (ptype))
6950 return NULL_TREE;
6951
6952 if (TREE_CODE (sub) == ADDR_EXPR)
6953 {
6954 tree op = TREE_OPERAND (sub, 0);
6955 tree optype = TREE_TYPE (op);
6956 /* *&p => p */
6957 if (useless_type_conversion_p (type, optype))
6958 return op;
6959
6960 /* *(foo *)&fooarray => fooarray[0] */
6961 if (TREE_CODE (optype) == ARRAY_TYPE
6962 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
6963 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6964 {
6965 tree type_domain = TYPE_DOMAIN (optype);
6966 tree min_val = size_zero_node;
6967 if (type_domain && TYPE_MIN_VALUE (type_domain))
6968 min_val = TYPE_MIN_VALUE (type_domain);
6969 if (TREE_CODE (min_val) == INTEGER_CST)
6970 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
6971 }
6972 /* *(foo *)&complexfoo => __real__ complexfoo */
6973 else if (TREE_CODE (optype) == COMPLEX_TYPE
6974 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6975 return fold_build1 (REALPART_EXPR, type, op);
6976 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
6977 else if (TREE_CODE (optype) == VECTOR_TYPE
6978 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6979 {
6980 tree part_width = TYPE_SIZE (type);
6981 tree index = bitsize_int (0);
6982 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
6983 }
6984 }
6985
6986 /* *(p + CST) -> ... */
6987 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
6988 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
6989 {
6990 tree addr = TREE_OPERAND (sub, 0);
6991 tree off = TREE_OPERAND (sub, 1);
6992 tree addrtype;
6993
6994 STRIP_NOPS (addr);
6995 addrtype = TREE_TYPE (addr);
6996
6997 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
6998 if (TREE_CODE (addr) == ADDR_EXPR
6999 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
7000 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
7001 && tree_fits_uhwi_p (off))
7002 {
7003 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
7004 tree part_width = TYPE_SIZE (type);
7005 unsigned HOST_WIDE_INT part_widthi
7006 = tree_to_shwi (part_width) / BITS_PER_UNIT;
7007 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
7008 tree index = bitsize_int (indexi);
7009 if (known_lt (offset / part_widthi,
7010 TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype))))
7011 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
7012 part_width, index);
7013 }
7014
7015 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
7016 if (TREE_CODE (addr) == ADDR_EXPR
7017 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
7018 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
7019 {
7020 tree size = TYPE_SIZE_UNIT (type);
7021 if (tree_int_cst_equal (size, off))
7022 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
7023 }
7024
7025 /* *(p + CST) -> MEM_REF <p, CST>. */
7026 if (TREE_CODE (addr) != ADDR_EXPR
7027 || DECL_P (TREE_OPERAND (addr, 0)))
7028 return fold_build2 (MEM_REF, type,
7029 addr,
7030 wide_int_to_tree (ptype, wi::to_wide (off)));
7031 }
7032
7033 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
7034 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
7035 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
7036 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
7037 {
7038 tree type_domain;
7039 tree min_val = size_zero_node;
7040 tree osub = sub;
7041 sub = gimple_fold_indirect_ref (sub);
7042 if (! sub)
7043 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
7044 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
7045 if (type_domain && TYPE_MIN_VALUE (type_domain))
7046 min_val = TYPE_MIN_VALUE (type_domain);
7047 if (TREE_CODE (min_val) == INTEGER_CST)
7048 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
7049 }
7050
7051 return NULL_TREE;
7052 }
7053
7054 /* Return true if CODE is an operation that when operating on signed
7055 integer types involves undefined behavior on overflow and the
7056 operation can be expressed with unsigned arithmetic. */
7057
7058 bool
arith_code_with_undefined_signed_overflow(tree_code code)7059 arith_code_with_undefined_signed_overflow (tree_code code)
7060 {
7061 switch (code)
7062 {
7063 case PLUS_EXPR:
7064 case MINUS_EXPR:
7065 case MULT_EXPR:
7066 case NEGATE_EXPR:
7067 case POINTER_PLUS_EXPR:
7068 return true;
7069 default:
7070 return false;
7071 }
7072 }
7073
7074 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
7075 operation that can be transformed to unsigned arithmetic by converting
7076 its operand, carrying out the operation in the corresponding unsigned
7077 type and converting the result back to the original type.
7078
7079 Returns a sequence of statements that replace STMT and also contain
7080 a modified form of STMT itself. */
7081
7082 gimple_seq
rewrite_to_defined_overflow(gimple * stmt)7083 rewrite_to_defined_overflow (gimple *stmt)
7084 {
7085 if (dump_file && (dump_flags & TDF_DETAILS))
7086 {
7087 fprintf (dump_file, "rewriting stmt with undefined signed "
7088 "overflow ");
7089 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
7090 }
7091
7092 tree lhs = gimple_assign_lhs (stmt);
7093 tree type = unsigned_type_for (TREE_TYPE (lhs));
7094 gimple_seq stmts = NULL;
7095 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
7096 {
7097 tree op = gimple_op (stmt, i);
7098 op = gimple_convert (&stmts, type, op);
7099 gimple_set_op (stmt, i, op);
7100 }
7101 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
7102 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
7103 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
7104 gimple_seq_add_stmt (&stmts, stmt);
7105 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
7106 gimple_seq_add_stmt (&stmts, cvt);
7107
7108 return stmts;
7109 }
7110
7111
7112 /* The valueization hook we use for the gimple_build API simplification.
7113 This makes us match fold_buildN behavior by only combining with
7114 statements in the sequence(s) we are currently building. */
7115
7116 static tree
gimple_build_valueize(tree op)7117 gimple_build_valueize (tree op)
7118 {
7119 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
7120 return op;
7121 return NULL_TREE;
7122 }
7123
7124 /* Build the expression CODE OP0 of type TYPE with location LOC,
7125 simplifying it first if possible. Returns the built
7126 expression value and appends statements possibly defining it
7127 to SEQ. */
7128
7129 tree
gimple_build(gimple_seq * seq,location_t loc,enum tree_code code,tree type,tree op0)7130 gimple_build (gimple_seq *seq, location_t loc,
7131 enum tree_code code, tree type, tree op0)
7132 {
7133 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
7134 if (!res)
7135 {
7136 res = create_tmp_reg_or_ssa_name (type);
7137 gimple *stmt;
7138 if (code == REALPART_EXPR
7139 || code == IMAGPART_EXPR
7140 || code == VIEW_CONVERT_EXPR)
7141 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
7142 else
7143 stmt = gimple_build_assign (res, code, op0);
7144 gimple_set_location (stmt, loc);
7145 gimple_seq_add_stmt_without_update (seq, stmt);
7146 }
7147 return res;
7148 }
7149
7150 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
7151 simplifying it first if possible. Returns the built
7152 expression value and appends statements possibly defining it
7153 to SEQ. */
7154
7155 tree
gimple_build(gimple_seq * seq,location_t loc,enum tree_code code,tree type,tree op0,tree op1)7156 gimple_build (gimple_seq *seq, location_t loc,
7157 enum tree_code code, tree type, tree op0, tree op1)
7158 {
7159 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
7160 if (!res)
7161 {
7162 res = create_tmp_reg_or_ssa_name (type);
7163 gimple *stmt = gimple_build_assign (res, code, op0, op1);
7164 gimple_set_location (stmt, loc);
7165 gimple_seq_add_stmt_without_update (seq, stmt);
7166 }
7167 return res;
7168 }
7169
7170 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
7171 simplifying it first if possible. Returns the built
7172 expression value and appends statements possibly defining it
7173 to SEQ. */
7174
7175 tree
gimple_build(gimple_seq * seq,location_t loc,enum tree_code code,tree type,tree op0,tree op1,tree op2)7176 gimple_build (gimple_seq *seq, location_t loc,
7177 enum tree_code code, tree type, tree op0, tree op1, tree op2)
7178 {
7179 tree res = gimple_simplify (code, type, op0, op1, op2,
7180 seq, gimple_build_valueize);
7181 if (!res)
7182 {
7183 res = create_tmp_reg_or_ssa_name (type);
7184 gimple *stmt;
7185 if (code == BIT_FIELD_REF)
7186 stmt = gimple_build_assign (res, code,
7187 build3 (code, type, op0, op1, op2));
7188 else
7189 stmt = gimple_build_assign (res, code, op0, op1, op2);
7190 gimple_set_location (stmt, loc);
7191 gimple_seq_add_stmt_without_update (seq, stmt);
7192 }
7193 return res;
7194 }
7195
7196 /* Build the call FN (ARG0) with a result of type TYPE
7197 (or no result if TYPE is void) with location LOC,
7198 simplifying it first if possible. Returns the built
7199 expression value (or NULL_TREE if TYPE is void) and appends
7200 statements possibly defining it to SEQ. */
7201
7202 tree
gimple_build(gimple_seq * seq,location_t loc,enum built_in_function fn,tree type,tree arg0)7203 gimple_build (gimple_seq *seq, location_t loc,
7204 enum built_in_function fn, tree type, tree arg0)
7205 {
7206 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
7207 if (!res)
7208 {
7209 tree decl = builtin_decl_implicit (fn);
7210 gimple *stmt = gimple_build_call (decl, 1, arg0);
7211 if (!VOID_TYPE_P (type))
7212 {
7213 res = create_tmp_reg_or_ssa_name (type);
7214 gimple_call_set_lhs (stmt, res);
7215 }
7216 gimple_set_location (stmt, loc);
7217 gimple_seq_add_stmt_without_update (seq, stmt);
7218 }
7219 return res;
7220 }
7221
7222 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
7223 (or no result if TYPE is void) with location LOC,
7224 simplifying it first if possible. Returns the built
7225 expression value (or NULL_TREE if TYPE is void) and appends
7226 statements possibly defining it to SEQ. */
7227
7228 tree
gimple_build(gimple_seq * seq,location_t loc,enum built_in_function fn,tree type,tree arg0,tree arg1)7229 gimple_build (gimple_seq *seq, location_t loc,
7230 enum built_in_function fn, tree type, tree arg0, tree arg1)
7231 {
7232 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
7233 if (!res)
7234 {
7235 tree decl = builtin_decl_implicit (fn);
7236 gimple *stmt = gimple_build_call (decl, 2, arg0, arg1);
7237 if (!VOID_TYPE_P (type))
7238 {
7239 res = create_tmp_reg_or_ssa_name (type);
7240 gimple_call_set_lhs (stmt, res);
7241 }
7242 gimple_set_location (stmt, loc);
7243 gimple_seq_add_stmt_without_update (seq, stmt);
7244 }
7245 return res;
7246 }
7247
7248 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
7249 (or no result if TYPE is void) with location LOC,
7250 simplifying it first if possible. Returns the built
7251 expression value (or NULL_TREE if TYPE is void) and appends
7252 statements possibly defining it to SEQ. */
7253
7254 tree
gimple_build(gimple_seq * seq,location_t loc,enum built_in_function fn,tree type,tree arg0,tree arg1,tree arg2)7255 gimple_build (gimple_seq *seq, location_t loc,
7256 enum built_in_function fn, tree type,
7257 tree arg0, tree arg1, tree arg2)
7258 {
7259 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
7260 seq, gimple_build_valueize);
7261 if (!res)
7262 {
7263 tree decl = builtin_decl_implicit (fn);
7264 gimple *stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
7265 if (!VOID_TYPE_P (type))
7266 {
7267 res = create_tmp_reg_or_ssa_name (type);
7268 gimple_call_set_lhs (stmt, res);
7269 }
7270 gimple_set_location (stmt, loc);
7271 gimple_seq_add_stmt_without_update (seq, stmt);
7272 }
7273 return res;
7274 }
7275
7276 /* Build the conversion (TYPE) OP with a result of type TYPE
7277 with location LOC if such conversion is neccesary in GIMPLE,
7278 simplifying it first.
7279 Returns the built expression value and appends
7280 statements possibly defining it to SEQ. */
7281
7282 tree
gimple_convert(gimple_seq * seq,location_t loc,tree type,tree op)7283 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
7284 {
7285 if (useless_type_conversion_p (type, TREE_TYPE (op)))
7286 return op;
7287 return gimple_build (seq, loc, NOP_EXPR, type, op);
7288 }
7289
7290 /* Build the conversion (ptrofftype) OP with a result of a type
7291 compatible with ptrofftype with location LOC if such conversion
7292 is neccesary in GIMPLE, simplifying it first.
7293 Returns the built expression value and appends
7294 statements possibly defining it to SEQ. */
7295
7296 tree
gimple_convert_to_ptrofftype(gimple_seq * seq,location_t loc,tree op)7297 gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
7298 {
7299 if (ptrofftype_p (TREE_TYPE (op)))
7300 return op;
7301 return gimple_convert (seq, loc, sizetype, op);
7302 }
7303
7304 /* Build a vector of type TYPE in which each element has the value OP.
7305 Return a gimple value for the result, appending any new statements
7306 to SEQ. */
7307
7308 tree
gimple_build_vector_from_val(gimple_seq * seq,location_t loc,tree type,tree op)7309 gimple_build_vector_from_val (gimple_seq *seq, location_t loc, tree type,
7310 tree op)
7311 {
7312 if (!TYPE_VECTOR_SUBPARTS (type).is_constant ()
7313 && !CONSTANT_CLASS_P (op))
7314 return gimple_build (seq, loc, VEC_DUPLICATE_EXPR, type, op);
7315
7316 tree res, vec = build_vector_from_val (type, op);
7317 if (is_gimple_val (vec))
7318 return vec;
7319 if (gimple_in_ssa_p (cfun))
7320 res = make_ssa_name (type);
7321 else
7322 res = create_tmp_reg (type);
7323 gimple *stmt = gimple_build_assign (res, vec);
7324 gimple_set_location (stmt, loc);
7325 gimple_seq_add_stmt_without_update (seq, stmt);
7326 return res;
7327 }
7328
7329 /* Build a vector from BUILDER, handling the case in which some elements
7330 are non-constant. Return a gimple value for the result, appending any
7331 new instructions to SEQ.
7332
7333 BUILDER must not have a stepped encoding on entry. This is because
7334 the function is not geared up to handle the arithmetic that would
7335 be needed in the variable case, and any code building a vector that
7336 is known to be constant should use BUILDER->build () directly. */
7337
7338 tree
gimple_build_vector(gimple_seq * seq,location_t loc,tree_vector_builder * builder)7339 gimple_build_vector (gimple_seq *seq, location_t loc,
7340 tree_vector_builder *builder)
7341 {
7342 gcc_assert (builder->nelts_per_pattern () <= 2);
7343 unsigned int encoded_nelts = builder->encoded_nelts ();
7344 for (unsigned int i = 0; i < encoded_nelts; ++i)
7345 if (!TREE_CONSTANT ((*builder)[i]))
7346 {
7347 tree type = builder->type ();
7348 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
7349 vec<constructor_elt, va_gc> *v;
7350 vec_alloc (v, nelts);
7351 for (i = 0; i < nelts; ++i)
7352 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, builder->elt (i));
7353
7354 tree res;
7355 if (gimple_in_ssa_p (cfun))
7356 res = make_ssa_name (type);
7357 else
7358 res = create_tmp_reg (type);
7359 gimple *stmt = gimple_build_assign (res, build_constructor (type, v));
7360 gimple_set_location (stmt, loc);
7361 gimple_seq_add_stmt_without_update (seq, stmt);
7362 return res;
7363 }
7364 return builder->build ();
7365 }
7366
7367 /* Return true if the result of assignment STMT is known to be non-negative.
7368 If the return value is based on the assumption that signed overflow is
7369 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7370 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7371
7372 static bool
gimple_assign_nonnegative_warnv_p(gimple * stmt,bool * strict_overflow_p,int depth)7373 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7374 int depth)
7375 {
7376 enum tree_code code = gimple_assign_rhs_code (stmt);
7377 switch (get_gimple_rhs_class (code))
7378 {
7379 case GIMPLE_UNARY_RHS:
7380 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7381 gimple_expr_type (stmt),
7382 gimple_assign_rhs1 (stmt),
7383 strict_overflow_p, depth);
7384 case GIMPLE_BINARY_RHS:
7385 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7386 gimple_expr_type (stmt),
7387 gimple_assign_rhs1 (stmt),
7388 gimple_assign_rhs2 (stmt),
7389 strict_overflow_p, depth);
7390 case GIMPLE_TERNARY_RHS:
7391 return false;
7392 case GIMPLE_SINGLE_RHS:
7393 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
7394 strict_overflow_p, depth);
7395 case GIMPLE_INVALID_RHS:
7396 break;
7397 }
7398 gcc_unreachable ();
7399 }
7400
7401 /* Return true if return value of call STMT is known to be non-negative.
7402 If the return value is based on the assumption that signed overflow is
7403 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7404 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7405
7406 static bool
gimple_call_nonnegative_warnv_p(gimple * stmt,bool * strict_overflow_p,int depth)7407 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7408 int depth)
7409 {
7410 tree arg0 = gimple_call_num_args (stmt) > 0 ?
7411 gimple_call_arg (stmt, 0) : NULL_TREE;
7412 tree arg1 = gimple_call_num_args (stmt) > 1 ?
7413 gimple_call_arg (stmt, 1) : NULL_TREE;
7414
7415 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
7416 gimple_call_combined_fn (stmt),
7417 arg0,
7418 arg1,
7419 strict_overflow_p, depth);
7420 }
7421
7422 /* Return true if return value of call STMT is known to be non-negative.
7423 If the return value is based on the assumption that signed overflow is
7424 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7425 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7426
7427 static bool
gimple_phi_nonnegative_warnv_p(gimple * stmt,bool * strict_overflow_p,int depth)7428 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7429 int depth)
7430 {
7431 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7432 {
7433 tree arg = gimple_phi_arg_def (stmt, i);
7434 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
7435 return false;
7436 }
7437 return true;
7438 }
7439
7440 /* Return true if STMT is known to compute a non-negative value.
7441 If the return value is based on the assumption that signed overflow is
7442 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7443 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7444
7445 bool
gimple_stmt_nonnegative_warnv_p(gimple * stmt,bool * strict_overflow_p,int depth)7446 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7447 int depth)
7448 {
7449 switch (gimple_code (stmt))
7450 {
7451 case GIMPLE_ASSIGN:
7452 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
7453 depth);
7454 case GIMPLE_CALL:
7455 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
7456 depth);
7457 case GIMPLE_PHI:
7458 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
7459 depth);
7460 default:
7461 return false;
7462 }
7463 }
7464
7465 /* Return true if the floating-point value computed by assignment STMT
7466 is known to have an integer value. We also allow +Inf, -Inf and NaN
7467 to be considered integer values. Return false for signaling NaN.
7468
7469 DEPTH is the current nesting depth of the query. */
7470
7471 static bool
gimple_assign_integer_valued_real_p(gimple * stmt,int depth)7472 gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
7473 {
7474 enum tree_code code = gimple_assign_rhs_code (stmt);
7475 switch (get_gimple_rhs_class (code))
7476 {
7477 case GIMPLE_UNARY_RHS:
7478 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
7479 gimple_assign_rhs1 (stmt), depth);
7480 case GIMPLE_BINARY_RHS:
7481 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
7482 gimple_assign_rhs1 (stmt),
7483 gimple_assign_rhs2 (stmt), depth);
7484 case GIMPLE_TERNARY_RHS:
7485 return false;
7486 case GIMPLE_SINGLE_RHS:
7487 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
7488 case GIMPLE_INVALID_RHS:
7489 break;
7490 }
7491 gcc_unreachable ();
7492 }
7493
7494 /* Return true if the floating-point value computed by call STMT is known
7495 to have an integer value. We also allow +Inf, -Inf and NaN to be
7496 considered integer values. Return false for signaling NaN.
7497
7498 DEPTH is the current nesting depth of the query. */
7499
7500 static bool
gimple_call_integer_valued_real_p(gimple * stmt,int depth)7501 gimple_call_integer_valued_real_p (gimple *stmt, int depth)
7502 {
7503 tree arg0 = (gimple_call_num_args (stmt) > 0
7504 ? gimple_call_arg (stmt, 0)
7505 : NULL_TREE);
7506 tree arg1 = (gimple_call_num_args (stmt) > 1
7507 ? gimple_call_arg (stmt, 1)
7508 : NULL_TREE);
7509 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
7510 arg0, arg1, depth);
7511 }
7512
7513 /* Return true if the floating-point result of phi STMT is known to have
7514 an integer value. We also allow +Inf, -Inf and NaN to be considered
7515 integer values. Return false for signaling NaN.
7516
7517 DEPTH is the current nesting depth of the query. */
7518
7519 static bool
gimple_phi_integer_valued_real_p(gimple * stmt,int depth)7520 gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
7521 {
7522 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7523 {
7524 tree arg = gimple_phi_arg_def (stmt, i);
7525 if (!integer_valued_real_single_p (arg, depth + 1))
7526 return false;
7527 }
7528 return true;
7529 }
7530
7531 /* Return true if the floating-point value computed by STMT is known
7532 to have an integer value. We also allow +Inf, -Inf and NaN to be
7533 considered integer values. Return false for signaling NaN.
7534
7535 DEPTH is the current nesting depth of the query. */
7536
7537 bool
gimple_stmt_integer_valued_real_p(gimple * stmt,int depth)7538 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
7539 {
7540 switch (gimple_code (stmt))
7541 {
7542 case GIMPLE_ASSIGN:
7543 return gimple_assign_integer_valued_real_p (stmt, depth);
7544 case GIMPLE_CALL:
7545 return gimple_call_integer_valued_real_p (stmt, depth);
7546 case GIMPLE_PHI:
7547 return gimple_phi_integer_valued_real_p (stmt, depth);
7548 default:
7549 return false;
7550 }
7551 }
7552