1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2020 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "gimple-pretty-print.h"
33 #include "gimple-ssa-warn-restrict.h"
34 #include "fold-const.h"
35 #include "stmt.h"
36 #include "expr.h"
37 #include "stor-layout.h"
38 #include "dumpfile.h"
39 #include "gimple-fold.h"
40 #include "gimplify.h"
41 #include "gimple-iterator.h"
42 #include "tree-into-ssa.h"
43 #include "tree-dfa.h"
44 #include "tree-object-size.h"
45 #include "tree-ssa.h"
46 #include "tree-ssa-propagate.h"
47 #include "ipa-utils.h"
48 #include "tree-ssa-address.h"
49 #include "langhooks.h"
50 #include "gimplify-me.h"
51 #include "dbgcnt.h"
52 #include "builtins.h"
53 #include "tree-eh.h"
54 #include "gimple-match.h"
55 #include "gomp-constants.h"
56 #include "optabs-query.h"
57 #include "omp-general.h"
58 #include "tree-cfg.h"
59 #include "fold-const-call.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62 #include "asan.h"
63 #include "diagnostic-core.h"
64 #include "intl.h"
65 #include "calls.h"
66 #include "tree-vector-builder.h"
67 #include "tree-ssa-strlen.h"
68 #include "varasm.h"
69
70 enum strlen_range_kind {
71 /* Compute the exact constant string length. */
72 SRK_STRLEN,
73 /* Compute the maximum constant string length. */
74 SRK_STRLENMAX,
75 /* Compute a range of string lengths bounded by object sizes. When
76 the length of a string cannot be determined, consider as the upper
77 bound the size of the enclosing object the string may be a member
78 or element of. Also determine the size of the largest character
79 array the string may refer to. */
80 SRK_LENRANGE,
81 /* Determine the integer value of the argument (not string length). */
82 SRK_INT_VALUE
83 };
84
85 static bool
86 get_range_strlen (tree, bitmap *, strlen_range_kind, c_strlen_data *, unsigned);
87
88 /* Return true when DECL can be referenced from current unit.
89 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
90 We can get declarations that are not possible to reference for various
91 reasons:
92
93 1) When analyzing C++ virtual tables.
94 C++ virtual tables do have known constructors even
95 when they are keyed to other compilation unit.
96 Those tables can contain pointers to methods and vars
97 in other units. Those methods have both STATIC and EXTERNAL
98 set.
99 2) In WHOPR mode devirtualization might lead to reference
100 to method that was partitioned elsehwere.
101 In this case we have static VAR_DECL or FUNCTION_DECL
102 that has no corresponding callgraph/varpool node
103 declaring the body.
104 3) COMDAT functions referred by external vtables that
105 we devirtualize only during final compilation stage.
106 At this time we already decided that we will not output
107 the function body and thus we can't reference the symbol
108 directly. */
109
110 static bool
can_refer_decl_in_current_unit_p(tree decl,tree from_decl)111 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
112 {
113 varpool_node *vnode;
114 struct cgraph_node *node;
115 symtab_node *snode;
116
117 if (DECL_ABSTRACT_P (decl))
118 return false;
119
120 /* We are concerned only about static/external vars and functions. */
121 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
122 || !VAR_OR_FUNCTION_DECL_P (decl))
123 return true;
124
125 /* Static objects can be referred only if they are defined and not optimized
126 out yet. */
127 if (!TREE_PUBLIC (decl))
128 {
129 if (DECL_EXTERNAL (decl))
130 return false;
131 /* Before we start optimizing unreachable code we can be sure all
132 static objects are defined. */
133 if (symtab->function_flags_ready)
134 return true;
135 snode = symtab_node::get (decl);
136 if (!snode || !snode->definition)
137 return false;
138 node = dyn_cast <cgraph_node *> (snode);
139 return !node || !node->inlined_to;
140 }
141
142 /* We will later output the initializer, so we can refer to it.
143 So we are concerned only when DECL comes from initializer of
144 external var or var that has been optimized out. */
145 if (!from_decl
146 || !VAR_P (from_decl)
147 || (!DECL_EXTERNAL (from_decl)
148 && (vnode = varpool_node::get (from_decl)) != NULL
149 && vnode->definition)
150 || (flag_ltrans
151 && (vnode = varpool_node::get (from_decl)) != NULL
152 && vnode->in_other_partition))
153 return true;
154 /* We are folding reference from external vtable. The vtable may reffer
155 to a symbol keyed to other compilation unit. The other compilation
156 unit may be in separate DSO and the symbol may be hidden. */
157 if (DECL_VISIBILITY_SPECIFIED (decl)
158 && DECL_EXTERNAL (decl)
159 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
160 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
161 return false;
162 /* When function is public, we always can introduce new reference.
163 Exception are the COMDAT functions where introducing a direct
164 reference imply need to include function body in the curren tunit. */
165 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
166 return true;
167 /* We have COMDAT. We are going to check if we still have definition
168 or if the definition is going to be output in other partition.
169 Bypass this when gimplifying; all needed functions will be produced.
170
171 As observed in PR20991 for already optimized out comdat virtual functions
172 it may be tempting to not necessarily give up because the copy will be
173 output elsewhere when corresponding vtable is output.
174 This is however not possible - ABI specify that COMDATs are output in
175 units where they are used and when the other unit was compiled with LTO
176 it is possible that vtable was kept public while the function itself
177 was privatized. */
178 if (!symtab->function_flags_ready)
179 return true;
180
181 snode = symtab_node::get (decl);
182 if (!snode
183 || ((!snode->definition || DECL_EXTERNAL (decl))
184 && (!snode->in_other_partition
185 || (!snode->forced_by_abi && !snode->force_output))))
186 return false;
187 node = dyn_cast <cgraph_node *> (snode);
188 return !node || !node->inlined_to;
189 }
190
191 /* Create a temporary for TYPE for a statement STMT. If the current function
192 is in SSA form, a SSA name is created. Otherwise a temporary register
193 is made. */
194
195 tree
create_tmp_reg_or_ssa_name(tree type,gimple * stmt)196 create_tmp_reg_or_ssa_name (tree type, gimple *stmt)
197 {
198 if (gimple_in_ssa_p (cfun))
199 return make_ssa_name (type, stmt);
200 else
201 return create_tmp_reg (type);
202 }
203
204 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
205 acceptable form for is_gimple_min_invariant.
206 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
207
208 tree
canonicalize_constructor_val(tree cval,tree from_decl)209 canonicalize_constructor_val (tree cval, tree from_decl)
210 {
211 if (CONSTANT_CLASS_P (cval))
212 return cval;
213
214 tree orig_cval = cval;
215 STRIP_NOPS (cval);
216 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
217 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
218 {
219 tree ptr = TREE_OPERAND (cval, 0);
220 if (is_gimple_min_invariant (ptr))
221 cval = build1_loc (EXPR_LOCATION (cval),
222 ADDR_EXPR, TREE_TYPE (ptr),
223 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
224 ptr,
225 fold_convert (ptr_type_node,
226 TREE_OPERAND (cval, 1))));
227 }
228 if (TREE_CODE (cval) == ADDR_EXPR)
229 {
230 tree base = NULL_TREE;
231 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
232 {
233 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
234 if (base)
235 TREE_OPERAND (cval, 0) = base;
236 }
237 else
238 base = get_base_address (TREE_OPERAND (cval, 0));
239 if (!base)
240 return NULL_TREE;
241
242 if (VAR_OR_FUNCTION_DECL_P (base)
243 && !can_refer_decl_in_current_unit_p (base, from_decl))
244 return NULL_TREE;
245 if (TREE_TYPE (base) == error_mark_node)
246 return NULL_TREE;
247 if (VAR_P (base))
248 TREE_ADDRESSABLE (base) = 1;
249 else if (TREE_CODE (base) == FUNCTION_DECL)
250 {
251 /* Make sure we create a cgraph node for functions we'll reference.
252 They can be non-existent if the reference comes from an entry
253 of an external vtable for example. */
254 cgraph_node::get_create (base);
255 }
256 /* Fixup types in global initializers. */
257 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
258 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
259
260 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
261 cval = fold_convert (TREE_TYPE (orig_cval), cval);
262 return cval;
263 }
264 /* In CONSTRUCTORs we may see unfolded constants like (int (*) ()) 0. */
265 if (TREE_CODE (cval) == INTEGER_CST)
266 {
267 if (TREE_OVERFLOW_P (cval))
268 cval = drop_tree_overflow (cval);
269 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
270 cval = fold_convert (TREE_TYPE (orig_cval), cval);
271 return cval;
272 }
273 return orig_cval;
274 }
275
276 /* If SYM is a constant variable with known value, return the value.
277 NULL_TREE is returned otherwise. */
278
279 tree
get_symbol_constant_value(tree sym)280 get_symbol_constant_value (tree sym)
281 {
282 tree val = ctor_for_folding (sym);
283 if (val != error_mark_node)
284 {
285 if (val)
286 {
287 val = canonicalize_constructor_val (unshare_expr (val), sym);
288 if (val
289 && is_gimple_min_invariant (val)
290 && useless_type_conversion_p (TREE_TYPE (sym), TREE_TYPE (val)))
291 return val;
292 else
293 return NULL_TREE;
294 }
295 /* Variables declared 'const' without an initializer
296 have zero as the initializer if they may not be
297 overridden at link or run time. */
298 if (!val
299 && is_gimple_reg_type (TREE_TYPE (sym)))
300 return build_zero_cst (TREE_TYPE (sym));
301 }
302
303 return NULL_TREE;
304 }
305
306
307
308 /* Subroutine of fold_stmt. We perform several simplifications of the
309 memory reference tree EXPR and make sure to re-gimplify them properly
310 after propagation of constant addresses. IS_LHS is true if the
311 reference is supposed to be an lvalue. */
312
313 static tree
maybe_fold_reference(tree expr,bool is_lhs)314 maybe_fold_reference (tree expr, bool is_lhs)
315 {
316 tree result;
317
318 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
319 || TREE_CODE (expr) == REALPART_EXPR
320 || TREE_CODE (expr) == IMAGPART_EXPR)
321 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
322 return fold_unary_loc (EXPR_LOCATION (expr),
323 TREE_CODE (expr),
324 TREE_TYPE (expr),
325 TREE_OPERAND (expr, 0));
326 else if (TREE_CODE (expr) == BIT_FIELD_REF
327 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
328 return fold_ternary_loc (EXPR_LOCATION (expr),
329 TREE_CODE (expr),
330 TREE_TYPE (expr),
331 TREE_OPERAND (expr, 0),
332 TREE_OPERAND (expr, 1),
333 TREE_OPERAND (expr, 2));
334
335 if (!is_lhs
336 && (result = fold_const_aggregate_ref (expr))
337 && is_gimple_min_invariant (result))
338 return result;
339
340 return NULL_TREE;
341 }
342
343
344 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
345 replacement rhs for the statement or NULL_TREE if no simplification
346 could be made. It is assumed that the operands have been previously
347 folded. */
348
349 static tree
fold_gimple_assign(gimple_stmt_iterator * si)350 fold_gimple_assign (gimple_stmt_iterator *si)
351 {
352 gimple *stmt = gsi_stmt (*si);
353 enum tree_code subcode = gimple_assign_rhs_code (stmt);
354 location_t loc = gimple_location (stmt);
355
356 tree result = NULL_TREE;
357
358 switch (get_gimple_rhs_class (subcode))
359 {
360 case GIMPLE_SINGLE_RHS:
361 {
362 tree rhs = gimple_assign_rhs1 (stmt);
363
364 if (TREE_CLOBBER_P (rhs))
365 return NULL_TREE;
366
367 if (REFERENCE_CLASS_P (rhs))
368 return maybe_fold_reference (rhs, false);
369
370 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
371 {
372 tree val = OBJ_TYPE_REF_EXPR (rhs);
373 if (is_gimple_min_invariant (val))
374 return val;
375 else if (flag_devirtualize && virtual_method_call_p (rhs))
376 {
377 bool final;
378 vec <cgraph_node *>targets
379 = possible_polymorphic_call_targets (rhs, stmt, &final);
380 if (final && targets.length () <= 1 && dbg_cnt (devirt))
381 {
382 if (dump_enabled_p ())
383 {
384 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
385 "resolving virtual function address "
386 "reference to function %s\n",
387 targets.length () == 1
388 ? targets[0]->name ()
389 : "NULL");
390 }
391 if (targets.length () == 1)
392 {
393 val = fold_convert (TREE_TYPE (val),
394 build_fold_addr_expr_loc
395 (loc, targets[0]->decl));
396 STRIP_USELESS_TYPE_CONVERSION (val);
397 }
398 else
399 /* We cannot use __builtin_unreachable here because it
400 cannot have address taken. */
401 val = build_int_cst (TREE_TYPE (val), 0);
402 return val;
403 }
404 }
405 }
406
407 else if (TREE_CODE (rhs) == ADDR_EXPR)
408 {
409 tree ref = TREE_OPERAND (rhs, 0);
410 tree tem = maybe_fold_reference (ref, true);
411 if (tem
412 && TREE_CODE (tem) == MEM_REF
413 && integer_zerop (TREE_OPERAND (tem, 1)))
414 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
415 else if (tem)
416 result = fold_convert (TREE_TYPE (rhs),
417 build_fold_addr_expr_loc (loc, tem));
418 else if (TREE_CODE (ref) == MEM_REF
419 && integer_zerop (TREE_OPERAND (ref, 1)))
420 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
421
422 if (result)
423 {
424 /* Strip away useless type conversions. Both the
425 NON_LVALUE_EXPR that may have been added by fold, and
426 "useless" type conversions that might now be apparent
427 due to propagation. */
428 STRIP_USELESS_TYPE_CONVERSION (result);
429
430 if (result != rhs && valid_gimple_rhs_p (result))
431 return result;
432 }
433 }
434
435 else if (TREE_CODE (rhs) == CONSTRUCTOR
436 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
437 {
438 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
439 unsigned i;
440 tree val;
441
442 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
443 if (! CONSTANT_CLASS_P (val))
444 return NULL_TREE;
445
446 return build_vector_from_ctor (TREE_TYPE (rhs),
447 CONSTRUCTOR_ELTS (rhs));
448 }
449
450 else if (DECL_P (rhs)
451 && is_gimple_reg_type (TREE_TYPE (rhs)))
452 return get_symbol_constant_value (rhs);
453 }
454 break;
455
456 case GIMPLE_UNARY_RHS:
457 break;
458
459 case GIMPLE_BINARY_RHS:
460 break;
461
462 case GIMPLE_TERNARY_RHS:
463 result = fold_ternary_loc (loc, subcode,
464 TREE_TYPE (gimple_assign_lhs (stmt)),
465 gimple_assign_rhs1 (stmt),
466 gimple_assign_rhs2 (stmt),
467 gimple_assign_rhs3 (stmt));
468
469 if (result)
470 {
471 STRIP_USELESS_TYPE_CONVERSION (result);
472 if (valid_gimple_rhs_p (result))
473 return result;
474 }
475 break;
476
477 case GIMPLE_INVALID_RHS:
478 gcc_unreachable ();
479 }
480
481 return NULL_TREE;
482 }
483
484
485 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
486 adjusting the replacement stmts location and virtual operands.
487 If the statement has a lhs the last stmt in the sequence is expected
488 to assign to that lhs. */
489
490 static void
gsi_replace_with_seq_vops(gimple_stmt_iterator * si_p,gimple_seq stmts)491 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
492 {
493 gimple *stmt = gsi_stmt (*si_p);
494
495 if (gimple_has_location (stmt))
496 annotate_all_with_location (stmts, gimple_location (stmt));
497
498 /* First iterate over the replacement statements backward, assigning
499 virtual operands to their defining statements. */
500 gimple *laststore = NULL;
501 for (gimple_stmt_iterator i = gsi_last (stmts);
502 !gsi_end_p (i); gsi_prev (&i))
503 {
504 gimple *new_stmt = gsi_stmt (i);
505 if ((gimple_assign_single_p (new_stmt)
506 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
507 || (is_gimple_call (new_stmt)
508 && (gimple_call_flags (new_stmt)
509 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
510 {
511 tree vdef;
512 if (!laststore)
513 vdef = gimple_vdef (stmt);
514 else
515 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
516 gimple_set_vdef (new_stmt, vdef);
517 if (vdef && TREE_CODE (vdef) == SSA_NAME)
518 SSA_NAME_DEF_STMT (vdef) = new_stmt;
519 laststore = new_stmt;
520 }
521 }
522
523 /* Second iterate over the statements forward, assigning virtual
524 operands to their uses. */
525 tree reaching_vuse = gimple_vuse (stmt);
526 for (gimple_stmt_iterator i = gsi_start (stmts);
527 !gsi_end_p (i); gsi_next (&i))
528 {
529 gimple *new_stmt = gsi_stmt (i);
530 /* If the new statement possibly has a VUSE, update it with exact SSA
531 name we know will reach this one. */
532 if (gimple_has_mem_ops (new_stmt))
533 gimple_set_vuse (new_stmt, reaching_vuse);
534 gimple_set_modified (new_stmt, true);
535 if (gimple_vdef (new_stmt))
536 reaching_vuse = gimple_vdef (new_stmt);
537 }
538
539 /* If the new sequence does not do a store release the virtual
540 definition of the original statement. */
541 if (reaching_vuse
542 && reaching_vuse == gimple_vuse (stmt))
543 {
544 tree vdef = gimple_vdef (stmt);
545 if (vdef
546 && TREE_CODE (vdef) == SSA_NAME)
547 {
548 unlink_stmt_vdef (stmt);
549 release_ssa_name (vdef);
550 }
551 }
552
553 /* Finally replace the original statement with the sequence. */
554 gsi_replace_with_seq (si_p, stmts, false);
555 }
556
557 /* Convert EXPR into a GIMPLE value suitable for substitution on the
558 RHS of an assignment. Insert the necessary statements before
559 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
560 is replaced. If the call is expected to produces a result, then it
561 is replaced by an assignment of the new RHS to the result variable.
562 If the result is to be ignored, then the call is replaced by a
563 GIMPLE_NOP. A proper VDEF chain is retained by making the first
564 VUSE and the last VDEF of the whole sequence be the same as the replaced
565 statement and using new SSA names for stores in between. */
566
567 void
gimplify_and_update_call_from_tree(gimple_stmt_iterator * si_p,tree expr)568 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
569 {
570 tree lhs;
571 gimple *stmt, *new_stmt;
572 gimple_stmt_iterator i;
573 gimple_seq stmts = NULL;
574
575 stmt = gsi_stmt (*si_p);
576
577 gcc_assert (is_gimple_call (stmt));
578
579 push_gimplify_context (gimple_in_ssa_p (cfun));
580
581 lhs = gimple_call_lhs (stmt);
582 if (lhs == NULL_TREE)
583 {
584 gimplify_and_add (expr, &stmts);
585 /* We can end up with folding a memcpy of an empty class assignment
586 which gets optimized away by C++ gimplification. */
587 if (gimple_seq_empty_p (stmts))
588 {
589 pop_gimplify_context (NULL);
590 if (gimple_in_ssa_p (cfun))
591 {
592 unlink_stmt_vdef (stmt);
593 release_defs (stmt);
594 }
595 gsi_replace (si_p, gimple_build_nop (), false);
596 return;
597 }
598 }
599 else
600 {
601 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
602 new_stmt = gimple_build_assign (lhs, tmp);
603 i = gsi_last (stmts);
604 gsi_insert_after_without_update (&i, new_stmt,
605 GSI_CONTINUE_LINKING);
606 }
607
608 pop_gimplify_context (NULL);
609
610 gsi_replace_with_seq_vops (si_p, stmts);
611 }
612
613
614 /* Replace the call at *GSI with the gimple value VAL. */
615
616 void
replace_call_with_value(gimple_stmt_iterator * gsi,tree val)617 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
618 {
619 gimple *stmt = gsi_stmt (*gsi);
620 tree lhs = gimple_call_lhs (stmt);
621 gimple *repl;
622 if (lhs)
623 {
624 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
625 val = fold_convert (TREE_TYPE (lhs), val);
626 repl = gimple_build_assign (lhs, val);
627 }
628 else
629 repl = gimple_build_nop ();
630 tree vdef = gimple_vdef (stmt);
631 if (vdef && TREE_CODE (vdef) == SSA_NAME)
632 {
633 unlink_stmt_vdef (stmt);
634 release_ssa_name (vdef);
635 }
636 gsi_replace (gsi, repl, false);
637 }
638
639 /* Replace the call at *GSI with the new call REPL and fold that
640 again. */
641
642 static void
replace_call_with_call_and_fold(gimple_stmt_iterator * gsi,gimple * repl)643 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
644 {
645 gimple *stmt = gsi_stmt (*gsi);
646 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
647 gimple_set_location (repl, gimple_location (stmt));
648 gimple_move_vops (repl, stmt);
649 gsi_replace (gsi, repl, false);
650 fold_stmt (gsi);
651 }
652
653 /* Return true if VAR is a VAR_DECL or a component thereof. */
654
655 static bool
var_decl_component_p(tree var)656 var_decl_component_p (tree var)
657 {
658 tree inner = var;
659 while (handled_component_p (inner))
660 inner = TREE_OPERAND (inner, 0);
661 return (DECL_P (inner)
662 || (TREE_CODE (inner) == MEM_REF
663 && TREE_CODE (TREE_OPERAND (inner, 0)) == ADDR_EXPR));
664 }
665
666 /* Return TRUE if the SIZE argument, representing the size of an
667 object, is in a range of values of which exactly zero is valid. */
668
669 static bool
size_must_be_zero_p(tree size)670 size_must_be_zero_p (tree size)
671 {
672 if (integer_zerop (size))
673 return true;
674
675 if (TREE_CODE (size) != SSA_NAME || !INTEGRAL_TYPE_P (TREE_TYPE (size)))
676 return false;
677
678 tree type = TREE_TYPE (size);
679 int prec = TYPE_PRECISION (type);
680
681 /* Compute the value of SSIZE_MAX, the largest positive value that
682 can be stored in ssize_t, the signed counterpart of size_t. */
683 wide_int ssize_max = wi::lshift (wi::one (prec), prec - 1) - 1;
684 value_range valid_range (build_int_cst (type, 0),
685 wide_int_to_tree (type, ssize_max));
686 value_range vr;
687 get_range_info (size, vr);
688 vr.intersect (&valid_range);
689 return vr.zero_p ();
690 }
691
692 /* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and
693 diagnose (otherwise undefined) overlapping copies without preventing
694 folding. When folded, GCC guarantees that overlapping memcpy has
695 the same semantics as memmove. Call to the library memcpy need not
696 provide the same guarantee. Return false if no simplification can
697 be made. */
698
699 static bool
gimple_fold_builtin_memory_op(gimple_stmt_iterator * gsi,tree dest,tree src,enum built_in_function code)700 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
701 tree dest, tree src, enum built_in_function code)
702 {
703 gimple *stmt = gsi_stmt (*gsi);
704 tree lhs = gimple_call_lhs (stmt);
705 tree len = gimple_call_arg (stmt, 2);
706 tree destvar, srcvar;
707 location_t loc = gimple_location (stmt);
708
709 /* If the LEN parameter is a constant zero or in range where
710 the only valid value is zero, return DEST. */
711 if (size_must_be_zero_p (len))
712 {
713 gimple *repl;
714 if (gimple_call_lhs (stmt))
715 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
716 else
717 repl = gimple_build_nop ();
718 tree vdef = gimple_vdef (stmt);
719 if (vdef && TREE_CODE (vdef) == SSA_NAME)
720 {
721 unlink_stmt_vdef (stmt);
722 release_ssa_name (vdef);
723 }
724 gsi_replace (gsi, repl, false);
725 return true;
726 }
727
728 /* If SRC and DEST are the same (and not volatile), return
729 DEST{,+LEN,+LEN-1}. */
730 if (operand_equal_p (src, dest, 0))
731 {
732 /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
733 It's safe and may even be emitted by GCC itself (see bug
734 32667). */
735 unlink_stmt_vdef (stmt);
736 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
737 release_ssa_name (gimple_vdef (stmt));
738 if (!lhs)
739 {
740 gsi_replace (gsi, gimple_build_nop (), false);
741 return true;
742 }
743 goto done;
744 }
745 else
746 {
747 tree srctype, desttype;
748 unsigned int src_align, dest_align;
749 tree off0;
750 const char *tmp_str;
751 unsigned HOST_WIDE_INT tmp_len;
752
753 /* Build accesses at offset zero with a ref-all character type. */
754 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
755 ptr_mode, true), 0);
756
757 /* If we can perform the copy efficiently with first doing all loads
758 and then all stores inline it that way. Currently efficiently
759 means that we can load all the memory into a single integer
760 register which is what MOVE_MAX gives us. */
761 src_align = get_pointer_alignment (src);
762 dest_align = get_pointer_alignment (dest);
763 if (tree_fits_uhwi_p (len)
764 && compare_tree_int (len, MOVE_MAX) <= 0
765 /* FIXME: Don't transform copies from strings with known length.
766 Until GCC 9 this prevented a case in gcc.dg/strlenopt-8.c
767 from being handled, and the case was XFAILed for that reason.
768 Now that it is handled and the XFAIL removed, as soon as other
769 strlenopt tests that rely on it for passing are adjusted, this
770 hack can be removed. */
771 && !c_strlen (src, 1)
772 && !((tmp_str = c_getstr (src, &tmp_len)) != NULL
773 && memchr (tmp_str, 0, tmp_len) == NULL))
774 {
775 unsigned ilen = tree_to_uhwi (len);
776 if (pow2p_hwi (ilen))
777 {
778 /* Detect out-of-bounds accesses without issuing warnings.
779 Avoid folding out-of-bounds copies but to avoid false
780 positives for unreachable code defer warning until after
781 DCE has worked its magic.
782 -Wrestrict is still diagnosed. */
783 if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt),
784 dest, src, len, len,
785 false, false))
786 if (warning != OPT_Wrestrict)
787 return false;
788
789 scalar_int_mode mode;
790 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
791 if (type
792 && is_a <scalar_int_mode> (TYPE_MODE (type), &mode)
793 && GET_MODE_SIZE (mode) * BITS_PER_UNIT == ilen * 8
794 /* If the destination pointer is not aligned we must be able
795 to emit an unaligned store. */
796 && (dest_align >= GET_MODE_ALIGNMENT (mode)
797 || !targetm.slow_unaligned_access (mode, dest_align)
798 || (optab_handler (movmisalign_optab, mode)
799 != CODE_FOR_nothing)))
800 {
801 tree srctype = type;
802 tree desttype = type;
803 if (src_align < GET_MODE_ALIGNMENT (mode))
804 srctype = build_aligned_type (type, src_align);
805 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
806 tree tem = fold_const_aggregate_ref (srcmem);
807 if (tem)
808 srcmem = tem;
809 else if (src_align < GET_MODE_ALIGNMENT (mode)
810 && targetm.slow_unaligned_access (mode, src_align)
811 && (optab_handler (movmisalign_optab, mode)
812 == CODE_FOR_nothing))
813 srcmem = NULL_TREE;
814 if (srcmem)
815 {
816 gimple *new_stmt;
817 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
818 {
819 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
820 srcmem
821 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
822 new_stmt);
823 gimple_assign_set_lhs (new_stmt, srcmem);
824 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
825 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
826 }
827 if (dest_align < GET_MODE_ALIGNMENT (mode))
828 desttype = build_aligned_type (type, dest_align);
829 new_stmt
830 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
831 dest, off0),
832 srcmem);
833 gimple_move_vops (new_stmt, stmt);
834 if (!lhs)
835 {
836 gsi_replace (gsi, new_stmt, false);
837 return true;
838 }
839 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
840 goto done;
841 }
842 }
843 }
844 }
845
846 if (code == BUILT_IN_MEMMOVE)
847 {
848 /* Both DEST and SRC must be pointer types.
849 ??? This is what old code did. Is the testing for pointer types
850 really mandatory?
851
852 If either SRC is readonly or length is 1, we can use memcpy. */
853 if (!dest_align || !src_align)
854 return false;
855 if (readonly_data_expr (src)
856 || (tree_fits_uhwi_p (len)
857 && (MIN (src_align, dest_align) / BITS_PER_UNIT
858 >= tree_to_uhwi (len))))
859 {
860 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
861 if (!fn)
862 return false;
863 gimple_call_set_fndecl (stmt, fn);
864 gimple_call_set_arg (stmt, 0, dest);
865 gimple_call_set_arg (stmt, 1, src);
866 fold_stmt (gsi);
867 return true;
868 }
869
870 /* If *src and *dest can't overlap, optimize into memcpy as well. */
871 if (TREE_CODE (src) == ADDR_EXPR
872 && TREE_CODE (dest) == ADDR_EXPR)
873 {
874 tree src_base, dest_base, fn;
875 poly_int64 src_offset = 0, dest_offset = 0;
876 poly_uint64 maxsize;
877
878 srcvar = TREE_OPERAND (src, 0);
879 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
880 if (src_base == NULL)
881 src_base = srcvar;
882 destvar = TREE_OPERAND (dest, 0);
883 dest_base = get_addr_base_and_unit_offset (destvar,
884 &dest_offset);
885 if (dest_base == NULL)
886 dest_base = destvar;
887 if (!poly_int_tree_p (len, &maxsize))
888 maxsize = -1;
889 if (SSA_VAR_P (src_base)
890 && SSA_VAR_P (dest_base))
891 {
892 if (operand_equal_p (src_base, dest_base, 0)
893 && ranges_maybe_overlap_p (src_offset, maxsize,
894 dest_offset, maxsize))
895 return false;
896 }
897 else if (TREE_CODE (src_base) == MEM_REF
898 && TREE_CODE (dest_base) == MEM_REF)
899 {
900 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
901 TREE_OPERAND (dest_base, 0), 0))
902 return false;
903 poly_offset_int full_src_offset
904 = mem_ref_offset (src_base) + src_offset;
905 poly_offset_int full_dest_offset
906 = mem_ref_offset (dest_base) + dest_offset;
907 if (ranges_maybe_overlap_p (full_src_offset, maxsize,
908 full_dest_offset, maxsize))
909 return false;
910 }
911 else
912 return false;
913
914 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
915 if (!fn)
916 return false;
917 gimple_call_set_fndecl (stmt, fn);
918 gimple_call_set_arg (stmt, 0, dest);
919 gimple_call_set_arg (stmt, 1, src);
920 fold_stmt (gsi);
921 return true;
922 }
923
924 /* If the destination and source do not alias optimize into
925 memcpy as well. */
926 if ((is_gimple_min_invariant (dest)
927 || TREE_CODE (dest) == SSA_NAME)
928 && (is_gimple_min_invariant (src)
929 || TREE_CODE (src) == SSA_NAME))
930 {
931 ao_ref destr, srcr;
932 ao_ref_init_from_ptr_and_size (&destr, dest, len);
933 ao_ref_init_from_ptr_and_size (&srcr, src, len);
934 if (!refs_may_alias_p_1 (&destr, &srcr, false))
935 {
936 tree fn;
937 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
938 if (!fn)
939 return false;
940 gimple_call_set_fndecl (stmt, fn);
941 gimple_call_set_arg (stmt, 0, dest);
942 gimple_call_set_arg (stmt, 1, src);
943 fold_stmt (gsi);
944 return true;
945 }
946 }
947
948 return false;
949 }
950
951 if (!tree_fits_shwi_p (len))
952 return false;
953 if (!POINTER_TYPE_P (TREE_TYPE (src))
954 || !POINTER_TYPE_P (TREE_TYPE (dest)))
955 return false;
956 /* In the following try to find a type that is most natural to be
957 used for the memcpy source and destination and that allows
958 the most optimization when memcpy is turned into a plain assignment
959 using that type. In theory we could always use a char[len] type
960 but that only gains us that the destination and source possibly
961 no longer will have their address taken. */
962 srctype = TREE_TYPE (TREE_TYPE (src));
963 if (TREE_CODE (srctype) == ARRAY_TYPE
964 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
965 srctype = TREE_TYPE (srctype);
966 desttype = TREE_TYPE (TREE_TYPE (dest));
967 if (TREE_CODE (desttype) == ARRAY_TYPE
968 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
969 desttype = TREE_TYPE (desttype);
970 if (TREE_ADDRESSABLE (srctype)
971 || TREE_ADDRESSABLE (desttype))
972 return false;
973
974 /* Make sure we are not copying using a floating-point mode or
975 a type whose size possibly does not match its precision. */
976 if (FLOAT_MODE_P (TYPE_MODE (desttype))
977 || TREE_CODE (desttype) == BOOLEAN_TYPE
978 || TREE_CODE (desttype) == ENUMERAL_TYPE)
979 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
980 if (FLOAT_MODE_P (TYPE_MODE (srctype))
981 || TREE_CODE (srctype) == BOOLEAN_TYPE
982 || TREE_CODE (srctype) == ENUMERAL_TYPE)
983 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
984 if (!srctype)
985 srctype = desttype;
986 if (!desttype)
987 desttype = srctype;
988 if (!srctype)
989 return false;
990
991 src_align = get_pointer_alignment (src);
992 dest_align = get_pointer_alignment (dest);
993
994 /* Choose between src and destination type for the access based
995 on alignment, whether the access constitutes a register access
996 and whether it may actually expose a declaration for SSA rewrite
997 or SRA decomposition. */
998 destvar = NULL_TREE;
999 srcvar = NULL_TREE;
1000 if (TREE_CODE (dest) == ADDR_EXPR
1001 && var_decl_component_p (TREE_OPERAND (dest, 0))
1002 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len)
1003 && dest_align >= TYPE_ALIGN (desttype)
1004 && (is_gimple_reg_type (desttype)
1005 || src_align >= TYPE_ALIGN (desttype)))
1006 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1007 else if (TREE_CODE (src) == ADDR_EXPR
1008 && var_decl_component_p (TREE_OPERAND (src, 0))
1009 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)
1010 && src_align >= TYPE_ALIGN (srctype)
1011 && (is_gimple_reg_type (srctype)
1012 || dest_align >= TYPE_ALIGN (srctype)))
1013 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1014 if (srcvar == NULL_TREE && destvar == NULL_TREE)
1015 return false;
1016
1017 /* Now that we chose an access type express the other side in
1018 terms of it if the target allows that with respect to alignment
1019 constraints. */
1020 if (srcvar == NULL_TREE)
1021 {
1022 if (src_align >= TYPE_ALIGN (desttype))
1023 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1024 else
1025 {
1026 if (STRICT_ALIGNMENT)
1027 return false;
1028 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1029 src_align);
1030 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1031 }
1032 }
1033 else if (destvar == NULL_TREE)
1034 {
1035 if (dest_align >= TYPE_ALIGN (srctype))
1036 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1037 else
1038 {
1039 if (STRICT_ALIGNMENT)
1040 return false;
1041 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1042 dest_align);
1043 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1044 }
1045 }
1046
1047 /* Same as above, detect out-of-bounds accesses without issuing
1048 warnings. Avoid folding out-of-bounds copies but to avoid
1049 false positives for unreachable code defer warning until
1050 after DCE has worked its magic.
1051 -Wrestrict is still diagnosed. */
1052 if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt),
1053 dest, src, len, len,
1054 false, false))
1055 if (warning != OPT_Wrestrict)
1056 return false;
1057
1058 gimple *new_stmt;
1059 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1060 {
1061 tree tem = fold_const_aggregate_ref (srcvar);
1062 if (tem)
1063 srcvar = tem;
1064 if (! is_gimple_min_invariant (srcvar))
1065 {
1066 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1067 srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1068 new_stmt);
1069 gimple_assign_set_lhs (new_stmt, srcvar);
1070 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1071 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1072 }
1073 new_stmt = gimple_build_assign (destvar, srcvar);
1074 goto set_vop_and_replace;
1075 }
1076
1077 /* We get an aggregate copy. Use an unsigned char[] type to
1078 perform the copying to preserve padding and to avoid any issues
1079 with TREE_ADDRESSABLE types or float modes behavior on copying. */
1080 desttype = build_array_type_nelts (unsigned_char_type_node,
1081 tree_to_uhwi (len));
1082 srctype = desttype;
1083 if (src_align > TYPE_ALIGN (srctype))
1084 srctype = build_aligned_type (srctype, src_align);
1085 if (dest_align > TYPE_ALIGN (desttype))
1086 desttype = build_aligned_type (desttype, dest_align);
1087 new_stmt
1088 = gimple_build_assign (fold_build2 (MEM_REF, desttype, dest, off0),
1089 fold_build2 (MEM_REF, srctype, src, off0));
1090 set_vop_and_replace:
1091 gimple_move_vops (new_stmt, stmt);
1092 if (!lhs)
1093 {
1094 gsi_replace (gsi, new_stmt, false);
1095 return true;
1096 }
1097 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1098 }
1099
1100 done:
1101 gimple_seq stmts = NULL;
1102 if (code == BUILT_IN_MEMCPY || code == BUILT_IN_MEMMOVE)
1103 len = NULL_TREE;
1104 else if (code == BUILT_IN_MEMPCPY)
1105 {
1106 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1107 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1108 TREE_TYPE (dest), dest, len);
1109 }
1110 else
1111 gcc_unreachable ();
1112
1113 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1114 gimple *repl = gimple_build_assign (lhs, dest);
1115 gsi_replace (gsi, repl, false);
1116 return true;
1117 }
1118
1119 /* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1120 to built-in memcmp (a, b, len). */
1121
1122 static bool
gimple_fold_builtin_bcmp(gimple_stmt_iterator * gsi)1123 gimple_fold_builtin_bcmp (gimple_stmt_iterator *gsi)
1124 {
1125 tree fn = builtin_decl_implicit (BUILT_IN_MEMCMP);
1126
1127 if (!fn)
1128 return false;
1129
1130 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1131
1132 gimple *stmt = gsi_stmt (*gsi);
1133 tree a = gimple_call_arg (stmt, 0);
1134 tree b = gimple_call_arg (stmt, 1);
1135 tree len = gimple_call_arg (stmt, 2);
1136
1137 gimple *repl = gimple_build_call (fn, 3, a, b, len);
1138 replace_call_with_call_and_fold (gsi, repl);
1139
1140 return true;
1141 }
1142
1143 /* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1144 to built-in memmove (dest, src, len). */
1145
1146 static bool
gimple_fold_builtin_bcopy(gimple_stmt_iterator * gsi)1147 gimple_fold_builtin_bcopy (gimple_stmt_iterator *gsi)
1148 {
1149 tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE);
1150
1151 if (!fn)
1152 return false;
1153
1154 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1155 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1156 len) into memmove (dest, src, len). */
1157
1158 gimple *stmt = gsi_stmt (*gsi);
1159 tree src = gimple_call_arg (stmt, 0);
1160 tree dest = gimple_call_arg (stmt, 1);
1161 tree len = gimple_call_arg (stmt, 2);
1162
1163 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1164 gimple_call_set_fntype (as_a <gcall *> (stmt), TREE_TYPE (fn));
1165 replace_call_with_call_and_fold (gsi, repl);
1166
1167 return true;
1168 }
1169
1170 /* Transform a call to built-in bzero (dest, len) at *GSI into one
1171 to built-in memset (dest, 0, len). */
1172
1173 static bool
gimple_fold_builtin_bzero(gimple_stmt_iterator * gsi)1174 gimple_fold_builtin_bzero (gimple_stmt_iterator *gsi)
1175 {
1176 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
1177
1178 if (!fn)
1179 return false;
1180
1181 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1182
1183 gimple *stmt = gsi_stmt (*gsi);
1184 tree dest = gimple_call_arg (stmt, 0);
1185 tree len = gimple_call_arg (stmt, 1);
1186
1187 gimple_seq seq = NULL;
1188 gimple *repl = gimple_build_call (fn, 3, dest, integer_zero_node, len);
1189 gimple_seq_add_stmt_without_update (&seq, repl);
1190 gsi_replace_with_seq_vops (gsi, seq);
1191 fold_stmt (gsi);
1192
1193 return true;
1194 }
1195
1196 /* Fold function call to builtin memset or bzero at *GSI setting the
1197 memory of size LEN to VAL. Return whether a simplification was made. */
1198
1199 static bool
gimple_fold_builtin_memset(gimple_stmt_iterator * gsi,tree c,tree len)1200 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1201 {
1202 gimple *stmt = gsi_stmt (*gsi);
1203 tree etype;
1204 unsigned HOST_WIDE_INT length, cval;
1205
1206 /* If the LEN parameter is zero, return DEST. */
1207 if (integer_zerop (len))
1208 {
1209 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1210 return true;
1211 }
1212
1213 if (! tree_fits_uhwi_p (len))
1214 return false;
1215
1216 if (TREE_CODE (c) != INTEGER_CST)
1217 return false;
1218
1219 tree dest = gimple_call_arg (stmt, 0);
1220 tree var = dest;
1221 if (TREE_CODE (var) != ADDR_EXPR)
1222 return false;
1223
1224 var = TREE_OPERAND (var, 0);
1225 if (TREE_THIS_VOLATILE (var))
1226 return false;
1227
1228 etype = TREE_TYPE (var);
1229 if (TREE_CODE (etype) == ARRAY_TYPE)
1230 etype = TREE_TYPE (etype);
1231
1232 if (!INTEGRAL_TYPE_P (etype)
1233 && !POINTER_TYPE_P (etype))
1234 return NULL_TREE;
1235
1236 if (! var_decl_component_p (var))
1237 return NULL_TREE;
1238
1239 length = tree_to_uhwi (len);
1240 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype)) != length
1241 || (GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (etype))
1242 != GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (etype)))
1243 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1244 return NULL_TREE;
1245
1246 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1247 return NULL_TREE;
1248
1249 if (!type_has_mode_precision_p (etype))
1250 etype = lang_hooks.types.type_for_mode (SCALAR_INT_TYPE_MODE (etype),
1251 TYPE_UNSIGNED (etype));
1252
1253 if (integer_zerop (c))
1254 cval = 0;
1255 else
1256 {
1257 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1258 return NULL_TREE;
1259
1260 cval = TREE_INT_CST_LOW (c);
1261 cval &= 0xff;
1262 cval |= cval << 8;
1263 cval |= cval << 16;
1264 cval |= (cval << 31) << 1;
1265 }
1266
1267 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1268 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1269 gimple_move_vops (store, stmt);
1270 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1271 if (gimple_call_lhs (stmt))
1272 {
1273 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1274 gsi_replace (gsi, asgn, false);
1275 }
1276 else
1277 {
1278 gimple_stmt_iterator gsi2 = *gsi;
1279 gsi_prev (gsi);
1280 gsi_remove (&gsi2, true);
1281 }
1282
1283 return true;
1284 }
1285
1286 /* Helper of get_range_strlen for ARG that is not an SSA_NAME. */
1287
1288 static bool
get_range_strlen_tree(tree arg,bitmap * visited,strlen_range_kind rkind,c_strlen_data * pdata,unsigned eltsize)1289 get_range_strlen_tree (tree arg, bitmap *visited, strlen_range_kind rkind,
1290 c_strlen_data *pdata, unsigned eltsize)
1291 {
1292 gcc_assert (TREE_CODE (arg) != SSA_NAME);
1293
1294 /* The length computed by this invocation of the function. */
1295 tree val = NULL_TREE;
1296
1297 /* True if VAL is an optimistic (tight) bound determined from
1298 the size of the character array in which the string may be
1299 stored. In that case, the computed VAL is used to set
1300 PDATA->MAXBOUND. */
1301 bool tight_bound = false;
1302
1303 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1304 if (TREE_CODE (arg) == ADDR_EXPR
1305 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
1306 {
1307 tree op = TREE_OPERAND (arg, 0);
1308 if (integer_zerop (TREE_OPERAND (op, 1)))
1309 {
1310 tree aop0 = TREE_OPERAND (op, 0);
1311 if (TREE_CODE (aop0) == INDIRECT_REF
1312 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1313 return get_range_strlen (TREE_OPERAND (aop0, 0), visited, rkind,
1314 pdata, eltsize);
1315 }
1316 else if (TREE_CODE (TREE_OPERAND (op, 0)) == COMPONENT_REF
1317 && rkind == SRK_LENRANGE)
1318 {
1319 /* Fail if an array is the last member of a struct object
1320 since it could be treated as a (fake) flexible array
1321 member. */
1322 tree idx = TREE_OPERAND (op, 1);
1323
1324 arg = TREE_OPERAND (op, 0);
1325 tree optype = TREE_TYPE (arg);
1326 if (tree dom = TYPE_DOMAIN (optype))
1327 if (tree bound = TYPE_MAX_VALUE (dom))
1328 if (TREE_CODE (bound) == INTEGER_CST
1329 && TREE_CODE (idx) == INTEGER_CST
1330 && tree_int_cst_lt (bound, idx))
1331 return false;
1332 }
1333 }
1334
1335 if (rkind == SRK_INT_VALUE)
1336 {
1337 /* We are computing the maximum value (not string length). */
1338 val = arg;
1339 if (TREE_CODE (val) != INTEGER_CST
1340 || tree_int_cst_sgn (val) < 0)
1341 return false;
1342 }
1343 else
1344 {
1345 c_strlen_data lendata = { };
1346 val = c_strlen (arg, 1, &lendata, eltsize);
1347
1348 if (!val && lendata.decl)
1349 {
1350 /* ARG refers to an unterminated const character array.
1351 DATA.DECL with size DATA.LEN. */
1352 val = lendata.minlen;
1353 pdata->decl = lendata.decl;
1354 }
1355 }
1356
1357 /* Set if VAL represents the maximum length based on array size (set
1358 when exact length cannot be determined). */
1359 bool maxbound = false;
1360
1361 if (!val && rkind == SRK_LENRANGE)
1362 {
1363 if (TREE_CODE (arg) == ADDR_EXPR)
1364 return get_range_strlen (TREE_OPERAND (arg, 0), visited, rkind,
1365 pdata, eltsize);
1366
1367 if (TREE_CODE (arg) == ARRAY_REF)
1368 {
1369 tree optype = TREE_TYPE (TREE_OPERAND (arg, 0));
1370
1371 /* Determine the "innermost" array type. */
1372 while (TREE_CODE (optype) == ARRAY_TYPE
1373 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1374 optype = TREE_TYPE (optype);
1375
1376 /* Avoid arrays of pointers. */
1377 tree eltype = TREE_TYPE (optype);
1378 if (TREE_CODE (optype) != ARRAY_TYPE
1379 || !INTEGRAL_TYPE_P (eltype))
1380 return false;
1381
1382 /* Fail when the array bound is unknown or zero. */
1383 val = TYPE_SIZE_UNIT (optype);
1384 if (!val
1385 || TREE_CODE (val) != INTEGER_CST
1386 || integer_zerop (val))
1387 return false;
1388
1389 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1390 integer_one_node);
1391
1392 /* Set the minimum size to zero since the string in
1393 the array could have zero length. */
1394 pdata->minlen = ssize_int (0);
1395
1396 tight_bound = true;
1397 }
1398 else if (TREE_CODE (arg) == COMPONENT_REF
1399 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1)))
1400 == ARRAY_TYPE))
1401 {
1402 /* Use the type of the member array to determine the upper
1403 bound on the length of the array. This may be overly
1404 optimistic if the array itself isn't NUL-terminated and
1405 the caller relies on the subsequent member to contain
1406 the NUL but that would only be considered valid if
1407 the array were the last member of a struct. */
1408
1409 tree fld = TREE_OPERAND (arg, 1);
1410
1411 tree optype = TREE_TYPE (fld);
1412
1413 /* Determine the "innermost" array type. */
1414 while (TREE_CODE (optype) == ARRAY_TYPE
1415 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1416 optype = TREE_TYPE (optype);
1417
1418 /* Fail when the array bound is unknown or zero. */
1419 val = TYPE_SIZE_UNIT (optype);
1420 if (!val
1421 || TREE_CODE (val) != INTEGER_CST
1422 || integer_zerop (val))
1423 return false;
1424 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1425 integer_one_node);
1426
1427 /* Set the minimum size to zero since the string in
1428 the array could have zero length. */
1429 pdata->minlen = ssize_int (0);
1430
1431 /* The array size determined above is an optimistic bound
1432 on the length. If the array isn't nul-terminated the
1433 length computed by the library function would be greater.
1434 Even though using strlen to cross the subobject boundary
1435 is undefined, avoid drawing conclusions from the member
1436 type about the length here. */
1437 tight_bound = true;
1438 }
1439 else if (TREE_CODE (arg) == MEM_REF
1440 && TREE_CODE (TREE_TYPE (arg)) == ARRAY_TYPE
1441 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == INTEGER_TYPE
1442 && TREE_CODE (TREE_OPERAND (arg, 0)) == ADDR_EXPR)
1443 {
1444 /* Handle a MEM_REF into a DECL accessing an array of integers,
1445 being conservative about references to extern structures with
1446 flexible array members that can be initialized to arbitrary
1447 numbers of elements as an extension (static structs are okay).
1448 FIXME: Make this less conservative -- see
1449 component_ref_size in tree.c. */
1450 tree ref = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1451 if ((TREE_CODE (ref) == PARM_DECL || VAR_P (ref))
1452 && (decl_binds_to_current_def_p (ref)
1453 || !array_at_struct_end_p (arg)))
1454 {
1455 /* Fail if the offset is out of bounds. Such accesses
1456 should be diagnosed at some point. */
1457 val = DECL_SIZE_UNIT (ref);
1458 if (!val
1459 || TREE_CODE (val) != INTEGER_CST
1460 || integer_zerop (val))
1461 return false;
1462
1463 poly_offset_int psiz = wi::to_offset (val);
1464 poly_offset_int poff = mem_ref_offset (arg);
1465 if (known_le (psiz, poff))
1466 return false;
1467
1468 pdata->minlen = ssize_int (0);
1469
1470 /* Subtract the offset and one for the terminating nul. */
1471 psiz -= poff;
1472 psiz -= 1;
1473 val = wide_int_to_tree (TREE_TYPE (val), psiz);
1474 /* Since VAL reflects the size of a declared object
1475 rather the type of the access it is not a tight bound. */
1476 }
1477 }
1478 else if (TREE_CODE (arg) == PARM_DECL || VAR_P (arg))
1479 {
1480 /* Avoid handling pointers to arrays. GCC might misuse
1481 a pointer to an array of one bound to point to an array
1482 object of a greater bound. */
1483 tree argtype = TREE_TYPE (arg);
1484 if (TREE_CODE (argtype) == ARRAY_TYPE)
1485 {
1486 val = TYPE_SIZE_UNIT (argtype);
1487 if (!val
1488 || TREE_CODE (val) != INTEGER_CST
1489 || integer_zerop (val))
1490 return false;
1491 val = wide_int_to_tree (TREE_TYPE (val),
1492 wi::sub (wi::to_wide (val), 1));
1493
1494 /* Set the minimum size to zero since the string in
1495 the array could have zero length. */
1496 pdata->minlen = ssize_int (0);
1497 }
1498 }
1499 maxbound = true;
1500 }
1501
1502 if (!val)
1503 return false;
1504
1505 /* Adjust the lower bound on the string length as necessary. */
1506 if (!pdata->minlen
1507 || (rkind != SRK_STRLEN
1508 && TREE_CODE (pdata->minlen) == INTEGER_CST
1509 && TREE_CODE (val) == INTEGER_CST
1510 && tree_int_cst_lt (val, pdata->minlen)))
1511 pdata->minlen = val;
1512
1513 if (pdata->maxbound && TREE_CODE (pdata->maxbound) == INTEGER_CST)
1514 {
1515 /* Adjust the tighter (more optimistic) string length bound
1516 if necessary and proceed to adjust the more conservative
1517 bound. */
1518 if (TREE_CODE (val) == INTEGER_CST)
1519 {
1520 if (tree_int_cst_lt (pdata->maxbound, val))
1521 pdata->maxbound = val;
1522 }
1523 else
1524 pdata->maxbound = val;
1525 }
1526 else if (pdata->maxbound || maxbound)
1527 /* Set PDATA->MAXBOUND only if it either isn't INTEGER_CST or
1528 if VAL corresponds to the maximum length determined based
1529 on the type of the object. */
1530 pdata->maxbound = val;
1531
1532 if (tight_bound)
1533 {
1534 /* VAL computed above represents an optimistically tight bound
1535 on the length of the string based on the referenced object's
1536 or subobject's type. Determine the conservative upper bound
1537 based on the enclosing object's size if possible. */
1538 if (rkind == SRK_LENRANGE)
1539 {
1540 poly_int64 offset;
1541 tree base = get_addr_base_and_unit_offset (arg, &offset);
1542 if (!base)
1543 {
1544 /* When the call above fails due to a non-constant offset
1545 assume the offset is zero and use the size of the whole
1546 enclosing object instead. */
1547 base = get_base_address (arg);
1548 offset = 0;
1549 }
1550 /* If the base object is a pointer no upper bound on the length
1551 can be determined. Otherwise the maximum length is equal to
1552 the size of the enclosing object minus the offset of
1553 the referenced subobject minus 1 (for the terminating nul). */
1554 tree type = TREE_TYPE (base);
1555 if (TREE_CODE (type) == POINTER_TYPE
1556 || (TREE_CODE (base) != PARM_DECL && !VAR_P (base))
1557 || !(val = DECL_SIZE_UNIT (base)))
1558 val = build_all_ones_cst (size_type_node);
1559 else
1560 {
1561 val = DECL_SIZE_UNIT (base);
1562 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1563 size_int (offset + 1));
1564 }
1565 }
1566 else
1567 return false;
1568 }
1569
1570 if (pdata->maxlen)
1571 {
1572 /* Adjust the more conservative bound if possible/necessary
1573 and fail otherwise. */
1574 if (rkind != SRK_STRLEN)
1575 {
1576 if (TREE_CODE (pdata->maxlen) != INTEGER_CST
1577 || TREE_CODE (val) != INTEGER_CST)
1578 return false;
1579
1580 if (tree_int_cst_lt (pdata->maxlen, val))
1581 pdata->maxlen = val;
1582 return true;
1583 }
1584 else if (simple_cst_equal (val, pdata->maxlen) != 1)
1585 {
1586 /* Fail if the length of this ARG is different from that
1587 previously determined from another ARG. */
1588 return false;
1589 }
1590 }
1591
1592 pdata->maxlen = val;
1593 return rkind == SRK_LENRANGE || !integer_all_onesp (val);
1594 }
1595
1596 /* For an ARG referencing one or more strings, try to obtain the range
1597 of their lengths, or the size of the largest array ARG referes to if
1598 the range of lengths cannot be determined, and store all in *PDATA.
1599 For an integer ARG (when RKIND == SRK_INT_VALUE), try to determine
1600 the maximum constant value.
1601 If ARG is an SSA_NAME, follow its use-def chains. When RKIND ==
1602 SRK_STRLEN, then if PDATA->MAXLEN is not equal to the determined
1603 length or if we are unable to determine the length, return false.
1604 VISITED is a bitmap of visited variables.
1605 RKIND determines the kind of value or range to obtain (see
1606 strlen_range_kind).
1607 Set PDATA->DECL if ARG refers to an unterminated constant array.
1608 On input, set ELTSIZE to 1 for normal single byte character strings,
1609 and either 2 or 4 for wide characer strings (the size of wchar_t).
1610 Return true if *PDATA was successfully populated and false otherwise. */
1611
1612 static bool
get_range_strlen(tree arg,bitmap * visited,strlen_range_kind rkind,c_strlen_data * pdata,unsigned eltsize)1613 get_range_strlen (tree arg, bitmap *visited,
1614 strlen_range_kind rkind,
1615 c_strlen_data *pdata, unsigned eltsize)
1616 {
1617
1618 if (TREE_CODE (arg) != SSA_NAME)
1619 return get_range_strlen_tree (arg, visited, rkind, pdata, eltsize);
1620
1621 /* If ARG is registered for SSA update we cannot look at its defining
1622 statement. */
1623 if (name_registered_for_update_p (arg))
1624 return false;
1625
1626 /* If we were already here, break the infinite cycle. */
1627 if (!*visited)
1628 *visited = BITMAP_ALLOC (NULL);
1629 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1630 return true;
1631
1632 tree var = arg;
1633 gimple *def_stmt = SSA_NAME_DEF_STMT (var);
1634
1635 switch (gimple_code (def_stmt))
1636 {
1637 case GIMPLE_ASSIGN:
1638 /* The RHS of the statement defining VAR must either have a
1639 constant length or come from another SSA_NAME with a constant
1640 length. */
1641 if (gimple_assign_single_p (def_stmt)
1642 || gimple_assign_unary_nop_p (def_stmt))
1643 {
1644 tree rhs = gimple_assign_rhs1 (def_stmt);
1645 return get_range_strlen (rhs, visited, rkind, pdata, eltsize);
1646 }
1647 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1648 {
1649 tree ops[2] = { gimple_assign_rhs2 (def_stmt),
1650 gimple_assign_rhs3 (def_stmt) };
1651
1652 for (unsigned int i = 0; i < 2; i++)
1653 if (!get_range_strlen (ops[i], visited, rkind, pdata, eltsize))
1654 {
1655 if (rkind != SRK_LENRANGE)
1656 return false;
1657 /* Set the upper bound to the maximum to prevent
1658 it from being adjusted in the next iteration but
1659 leave MINLEN and the more conservative MAXBOUND
1660 determined so far alone (or leave them null if
1661 they haven't been set yet). That the MINLEN is
1662 in fact zero can be determined from MAXLEN being
1663 unbounded but the discovered minimum is used for
1664 diagnostics. */
1665 pdata->maxlen = build_all_ones_cst (size_type_node);
1666 }
1667 return true;
1668 }
1669 return false;
1670
1671 case GIMPLE_PHI:
1672 /* Unless RKIND == SRK_LENRANGE, all arguments of the PHI node
1673 must have a constant length. */
1674 for (unsigned i = 0; i < gimple_phi_num_args (def_stmt); i++)
1675 {
1676 tree arg = gimple_phi_arg (def_stmt, i)->def;
1677
1678 /* If this PHI has itself as an argument, we cannot
1679 determine the string length of this argument. However,
1680 if we can find a constant string length for the other
1681 PHI args then we can still be sure that this is a
1682 constant string length. So be optimistic and just
1683 continue with the next argument. */
1684 if (arg == gimple_phi_result (def_stmt))
1685 continue;
1686
1687 if (!get_range_strlen (arg, visited, rkind, pdata, eltsize))
1688 {
1689 if (rkind != SRK_LENRANGE)
1690 return false;
1691 /* Set the upper bound to the maximum to prevent
1692 it from being adjusted in the next iteration but
1693 leave MINLEN and the more conservative MAXBOUND
1694 determined so far alone (or leave them null if
1695 they haven't been set yet). That the MINLEN is
1696 in fact zero can be determined from MAXLEN being
1697 unbounded but the discovered minimum is used for
1698 diagnostics. */
1699 pdata->maxlen = build_all_ones_cst (size_type_node);
1700 }
1701 }
1702 return true;
1703
1704 default:
1705 return false;
1706 }
1707 }
1708
1709 /* Try to obtain the range of the lengths of the string(s) referenced
1710 by ARG, or the size of the largest array ARG refers to if the range
1711 of lengths cannot be determined, and store all in *PDATA which must
1712 be zero-initialized on input except PDATA->MAXBOUND may be set to
1713 a non-null tree node other than INTEGER_CST to request to have it
1714 set to the length of the longest string in a PHI. ELTSIZE is
1715 the expected size of the string element in bytes: 1 for char and
1716 some power of 2 for wide characters.
1717 Return true if the range [PDATA->MINLEN, PDATA->MAXLEN] is suitable
1718 for optimization. Returning false means that a nonzero PDATA->MINLEN
1719 doesn't reflect the true lower bound of the range when PDATA->MAXLEN
1720 is -1 (in that case, the actual range is indeterminate, i.e.,
1721 [0, PTRDIFF_MAX - 2]. */
1722
1723 bool
get_range_strlen(tree arg,c_strlen_data * pdata,unsigned eltsize)1724 get_range_strlen (tree arg, c_strlen_data *pdata, unsigned eltsize)
1725 {
1726 bitmap visited = NULL;
1727 tree maxbound = pdata->maxbound;
1728
1729 if (!get_range_strlen (arg, &visited, SRK_LENRANGE, pdata, eltsize))
1730 {
1731 /* On failure extend the length range to an impossible maximum
1732 (a valid MAXLEN must be less than PTRDIFF_MAX - 1). Other
1733 members can stay unchanged regardless. */
1734 pdata->minlen = ssize_int (0);
1735 pdata->maxlen = build_all_ones_cst (size_type_node);
1736 }
1737 else if (!pdata->minlen)
1738 pdata->minlen = ssize_int (0);
1739
1740 /* If it's unchanged from it initial non-null value, set the conservative
1741 MAXBOUND to SIZE_MAX. Otherwise leave it null (if it is null). */
1742 if (maxbound && pdata->maxbound == maxbound)
1743 pdata->maxbound = build_all_ones_cst (size_type_node);
1744
1745 if (visited)
1746 BITMAP_FREE (visited);
1747
1748 return !integer_all_onesp (pdata->maxlen);
1749 }
1750
1751 /* Return the maximum value for ARG given RKIND (see strlen_range_kind).
1752 For ARG of pointer types, NONSTR indicates if the caller is prepared
1753 to handle unterminated strings. For integer ARG and when RKIND ==
1754 SRK_INT_VALUE, NONSTR must be null.
1755
1756 If an unterminated array is discovered and our caller handles
1757 unterminated arrays, then bubble up the offending DECL and
1758 return the maximum size. Otherwise return NULL. */
1759
1760 static tree
1761 get_maxval_strlen (tree arg, strlen_range_kind rkind, tree *nonstr = NULL)
1762 {
1763 /* A non-null NONSTR is meaningless when determining the maximum
1764 value of an integer ARG. */
1765 gcc_assert (rkind != SRK_INT_VALUE || nonstr == NULL);
1766 /* ARG must have an integral type when RKIND says so. */
1767 gcc_assert (rkind != SRK_INT_VALUE || INTEGRAL_TYPE_P (TREE_TYPE (arg)));
1768
1769 bitmap visited = NULL;
1770
1771 /* Reset DATA.MAXLEN if the call fails or when DATA.MAXLEN
1772 is unbounded. */
1773 c_strlen_data lendata = { };
1774 if (!get_range_strlen (arg, &visited, rkind, &lendata, /* eltsize = */1))
1775 lendata.maxlen = NULL_TREE;
1776 else if (lendata.maxlen && integer_all_onesp (lendata.maxlen))
1777 lendata.maxlen = NULL_TREE;
1778
1779 if (visited)
1780 BITMAP_FREE (visited);
1781
1782 if (nonstr)
1783 {
1784 /* For callers prepared to handle unterminated arrays set
1785 *NONSTR to point to the declaration of the array and return
1786 the maximum length/size. */
1787 *nonstr = lendata.decl;
1788 return lendata.maxlen;
1789 }
1790
1791 /* Fail if the constant array isn't nul-terminated. */
1792 return lendata.decl ? NULL_TREE : lendata.maxlen;
1793 }
1794
1795
1796 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1797 If LEN is not NULL, it represents the length of the string to be
1798 copied. Return NULL_TREE if no simplification can be made. */
1799
1800 static bool
gimple_fold_builtin_strcpy(gimple_stmt_iterator * gsi,tree dest,tree src)1801 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1802 tree dest, tree src)
1803 {
1804 gimple *stmt = gsi_stmt (*gsi);
1805 location_t loc = gimple_location (stmt);
1806 tree fn;
1807
1808 /* If SRC and DEST are the same (and not volatile), return DEST. */
1809 if (operand_equal_p (src, dest, 0))
1810 {
1811 /* Issue -Wrestrict unless the pointers are null (those do
1812 not point to objects and so do not indicate an overlap;
1813 such calls could be the result of sanitization and jump
1814 threading). */
1815 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
1816 {
1817 tree func = gimple_call_fndecl (stmt);
1818
1819 warning_at (loc, OPT_Wrestrict,
1820 "%qD source argument is the same as destination",
1821 func);
1822 }
1823
1824 replace_call_with_value (gsi, dest);
1825 return true;
1826 }
1827
1828 if (optimize_function_for_size_p (cfun))
1829 return false;
1830
1831 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1832 if (!fn)
1833 return false;
1834
1835 /* Set to non-null if ARG refers to an unterminated array. */
1836 tree nonstr = NULL;
1837 tree len = get_maxval_strlen (src, SRK_STRLEN, &nonstr);
1838
1839 if (nonstr)
1840 {
1841 /* Avoid folding calls with unterminated arrays. */
1842 if (!gimple_no_warning_p (stmt))
1843 warn_string_no_nul (loc, "strcpy", src, nonstr);
1844 gimple_set_no_warning (stmt, true);
1845 return false;
1846 }
1847
1848 if (!len)
1849 return false;
1850
1851 len = fold_convert_loc (loc, size_type_node, len);
1852 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1853 len = force_gimple_operand_gsi (gsi, len, true,
1854 NULL_TREE, true, GSI_SAME_STMT);
1855 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1856 replace_call_with_call_and_fold (gsi, repl);
1857 return true;
1858 }
1859
1860 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1861 If SLEN is not NULL, it represents the length of the source string.
1862 Return NULL_TREE if no simplification can be made. */
1863
1864 static bool
gimple_fold_builtin_strncpy(gimple_stmt_iterator * gsi,tree dest,tree src,tree len)1865 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1866 tree dest, tree src, tree len)
1867 {
1868 gimple *stmt = gsi_stmt (*gsi);
1869 location_t loc = gimple_location (stmt);
1870 bool nonstring = get_attr_nonstring_decl (dest) != NULL_TREE;
1871
1872 /* If the LEN parameter is zero, return DEST. */
1873 if (integer_zerop (len))
1874 {
1875 /* Avoid warning if the destination refers to an array/pointer
1876 decorate with attribute nonstring. */
1877 if (!nonstring)
1878 {
1879 tree fndecl = gimple_call_fndecl (stmt);
1880
1881 /* Warn about the lack of nul termination: the result is not
1882 a (nul-terminated) string. */
1883 tree slen = get_maxval_strlen (src, SRK_STRLEN);
1884 if (slen && !integer_zerop (slen))
1885 warning_at (loc, OPT_Wstringop_truncation,
1886 "%G%qD destination unchanged after copying no bytes "
1887 "from a string of length %E",
1888 stmt, fndecl, slen);
1889 else
1890 warning_at (loc, OPT_Wstringop_truncation,
1891 "%G%qD destination unchanged after copying no bytes",
1892 stmt, fndecl);
1893 }
1894
1895 replace_call_with_value (gsi, dest);
1896 return true;
1897 }
1898
1899 /* We can't compare slen with len as constants below if len is not a
1900 constant. */
1901 if (TREE_CODE (len) != INTEGER_CST)
1902 return false;
1903
1904 /* Now, we must be passed a constant src ptr parameter. */
1905 tree slen = get_maxval_strlen (src, SRK_STRLEN);
1906 if (!slen || TREE_CODE (slen) != INTEGER_CST)
1907 return false;
1908
1909 /* The size of the source string including the terminating nul. */
1910 tree ssize = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1911
1912 /* We do not support simplification of this case, though we do
1913 support it when expanding trees into RTL. */
1914 /* FIXME: generate a call to __builtin_memset. */
1915 if (tree_int_cst_lt (ssize, len))
1916 return false;
1917
1918 /* Diagnose truncation that leaves the copy unterminated. */
1919 maybe_diag_stxncpy_trunc (*gsi, src, len);
1920
1921 /* OK transform into builtin memcpy. */
1922 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1923 if (!fn)
1924 return false;
1925
1926 len = fold_convert_loc (loc, size_type_node, len);
1927 len = force_gimple_operand_gsi (gsi, len, true,
1928 NULL_TREE, true, GSI_SAME_STMT);
1929 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1930 replace_call_with_call_and_fold (gsi, repl);
1931
1932 return true;
1933 }
1934
1935 /* Fold function call to builtin strchr or strrchr.
1936 If both arguments are constant, evaluate and fold the result,
1937 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
1938 In general strlen is significantly faster than strchr
1939 due to being a simpler operation. */
1940 static bool
gimple_fold_builtin_strchr(gimple_stmt_iterator * gsi,bool is_strrchr)1941 gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
1942 {
1943 gimple *stmt = gsi_stmt (*gsi);
1944 tree str = gimple_call_arg (stmt, 0);
1945 tree c = gimple_call_arg (stmt, 1);
1946 location_t loc = gimple_location (stmt);
1947 const char *p;
1948 char ch;
1949
1950 if (!gimple_call_lhs (stmt))
1951 return false;
1952
1953 /* Avoid folding if the first argument is not a nul-terminated array.
1954 Defer warning until later. */
1955 if (!check_nul_terminated_array (NULL_TREE, str))
1956 return false;
1957
1958 if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
1959 {
1960 const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
1961
1962 if (p1 == NULL)
1963 {
1964 replace_call_with_value (gsi, integer_zero_node);
1965 return true;
1966 }
1967
1968 tree len = build_int_cst (size_type_node, p1 - p);
1969 gimple_seq stmts = NULL;
1970 gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1971 POINTER_PLUS_EXPR, str, len);
1972 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1973 gsi_replace_with_seq_vops (gsi, stmts);
1974 return true;
1975 }
1976
1977 if (!integer_zerop (c))
1978 return false;
1979
1980 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
1981 if (is_strrchr && optimize_function_for_size_p (cfun))
1982 {
1983 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1984
1985 if (strchr_fn)
1986 {
1987 gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
1988 replace_call_with_call_and_fold (gsi, repl);
1989 return true;
1990 }
1991
1992 return false;
1993 }
1994
1995 tree len;
1996 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1997
1998 if (!strlen_fn)
1999 return false;
2000
2001 /* Create newstr = strlen (str). */
2002 gimple_seq stmts = NULL;
2003 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
2004 gimple_set_location (new_stmt, loc);
2005 len = create_tmp_reg_or_ssa_name (size_type_node);
2006 gimple_call_set_lhs (new_stmt, len);
2007 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2008
2009 /* Create (str p+ strlen (str)). */
2010 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2011 POINTER_PLUS_EXPR, str, len);
2012 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2013 gsi_replace_with_seq_vops (gsi, stmts);
2014 /* gsi now points at the assignment to the lhs, get a
2015 stmt iterator to the strlen.
2016 ??? We can't use gsi_for_stmt as that doesn't work when the
2017 CFG isn't built yet. */
2018 gimple_stmt_iterator gsi2 = *gsi;
2019 gsi_prev (&gsi2);
2020 fold_stmt (&gsi2);
2021 return true;
2022 }
2023
2024 /* Fold function call to builtin strstr.
2025 If both arguments are constant, evaluate and fold the result,
2026 additionally fold strstr (x, "") into x and strstr (x, "c")
2027 into strchr (x, 'c'). */
2028 static bool
gimple_fold_builtin_strstr(gimple_stmt_iterator * gsi)2029 gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
2030 {
2031 gimple *stmt = gsi_stmt (*gsi);
2032 if (!gimple_call_lhs (stmt))
2033 return false;
2034
2035 tree haystack = gimple_call_arg (stmt, 0);
2036 tree needle = gimple_call_arg (stmt, 1);
2037
2038 /* Avoid folding if either argument is not a nul-terminated array.
2039 Defer warning until later. */
2040 if (!check_nul_terminated_array (NULL_TREE, haystack)
2041 || !check_nul_terminated_array (NULL_TREE, needle))
2042 return false;
2043
2044 const char *q = c_getstr (needle);
2045 if (q == NULL)
2046 return false;
2047
2048 if (const char *p = c_getstr (haystack))
2049 {
2050 const char *r = strstr (p, q);
2051
2052 if (r == NULL)
2053 {
2054 replace_call_with_value (gsi, integer_zero_node);
2055 return true;
2056 }
2057
2058 tree len = build_int_cst (size_type_node, r - p);
2059 gimple_seq stmts = NULL;
2060 gimple *new_stmt
2061 = gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
2062 haystack, len);
2063 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2064 gsi_replace_with_seq_vops (gsi, stmts);
2065 return true;
2066 }
2067
2068 /* For strstr (x, "") return x. */
2069 if (q[0] == '\0')
2070 {
2071 replace_call_with_value (gsi, haystack);
2072 return true;
2073 }
2074
2075 /* Transform strstr (x, "c") into strchr (x, 'c'). */
2076 if (q[1] == '\0')
2077 {
2078 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
2079 if (strchr_fn)
2080 {
2081 tree c = build_int_cst (integer_type_node, q[0]);
2082 gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
2083 replace_call_with_call_and_fold (gsi, repl);
2084 return true;
2085 }
2086 }
2087
2088 return false;
2089 }
2090
2091 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
2092 to the call.
2093
2094 Return NULL_TREE if no simplification was possible, otherwise return the
2095 simplified form of the call as a tree.
2096
2097 The simplified form may be a constant or other expression which
2098 computes the same value, but in a more efficient manner (including
2099 calls to other builtin functions).
2100
2101 The call may contain arguments which need to be evaluated, but
2102 which are not useful to determine the result of the call. In
2103 this case we return a chain of COMPOUND_EXPRs. The LHS of each
2104 COMPOUND_EXPR will be an argument which must be evaluated.
2105 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
2106 COMPOUND_EXPR in the chain will contain the tree for the simplified
2107 form of the builtin function call. */
2108
2109 static bool
gimple_fold_builtin_strcat(gimple_stmt_iterator * gsi,tree dst,tree src)2110 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
2111 {
2112 gimple *stmt = gsi_stmt (*gsi);
2113 location_t loc = gimple_location (stmt);
2114
2115 const char *p = c_getstr (src);
2116
2117 /* If the string length is zero, return the dst parameter. */
2118 if (p && *p == '\0')
2119 {
2120 replace_call_with_value (gsi, dst);
2121 return true;
2122 }
2123
2124 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
2125 return false;
2126
2127 /* See if we can store by pieces into (dst + strlen(dst)). */
2128 tree newdst;
2129 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
2130 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2131
2132 if (!strlen_fn || !memcpy_fn)
2133 return false;
2134
2135 /* If the length of the source string isn't computable don't
2136 split strcat into strlen and memcpy. */
2137 tree len = get_maxval_strlen (src, SRK_STRLEN);
2138 if (! len)
2139 return false;
2140
2141 /* Create strlen (dst). */
2142 gimple_seq stmts = NULL, stmts2;
2143 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
2144 gimple_set_location (repl, loc);
2145 newdst = create_tmp_reg_or_ssa_name (size_type_node);
2146 gimple_call_set_lhs (repl, newdst);
2147 gimple_seq_add_stmt_without_update (&stmts, repl);
2148
2149 /* Create (dst p+ strlen (dst)). */
2150 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
2151 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
2152 gimple_seq_add_seq_without_update (&stmts, stmts2);
2153
2154 len = fold_convert_loc (loc, size_type_node, len);
2155 len = size_binop_loc (loc, PLUS_EXPR, len,
2156 build_int_cst (size_type_node, 1));
2157 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
2158 gimple_seq_add_seq_without_update (&stmts, stmts2);
2159
2160 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
2161 gimple_seq_add_stmt_without_update (&stmts, repl);
2162 if (gimple_call_lhs (stmt))
2163 {
2164 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
2165 gimple_seq_add_stmt_without_update (&stmts, repl);
2166 gsi_replace_with_seq_vops (gsi, stmts);
2167 /* gsi now points at the assignment to the lhs, get a
2168 stmt iterator to the memcpy call.
2169 ??? We can't use gsi_for_stmt as that doesn't work when the
2170 CFG isn't built yet. */
2171 gimple_stmt_iterator gsi2 = *gsi;
2172 gsi_prev (&gsi2);
2173 fold_stmt (&gsi2);
2174 }
2175 else
2176 {
2177 gsi_replace_with_seq_vops (gsi, stmts);
2178 fold_stmt (gsi);
2179 }
2180 return true;
2181 }
2182
2183 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
2184 are the arguments to the call. */
2185
2186 static bool
gimple_fold_builtin_strcat_chk(gimple_stmt_iterator * gsi)2187 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
2188 {
2189 gimple *stmt = gsi_stmt (*gsi);
2190 tree dest = gimple_call_arg (stmt, 0);
2191 tree src = gimple_call_arg (stmt, 1);
2192 tree size = gimple_call_arg (stmt, 2);
2193 tree fn;
2194 const char *p;
2195
2196
2197 p = c_getstr (src);
2198 /* If the SRC parameter is "", return DEST. */
2199 if (p && *p == '\0')
2200 {
2201 replace_call_with_value (gsi, dest);
2202 return true;
2203 }
2204
2205 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
2206 return false;
2207
2208 /* If __builtin_strcat_chk is used, assume strcat is available. */
2209 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
2210 if (!fn)
2211 return false;
2212
2213 gimple *repl = gimple_build_call (fn, 2, dest, src);
2214 replace_call_with_call_and_fold (gsi, repl);
2215 return true;
2216 }
2217
2218 /* Simplify a call to the strncat builtin. */
2219
2220 static bool
gimple_fold_builtin_strncat(gimple_stmt_iterator * gsi)2221 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
2222 {
2223 gimple *stmt = gsi_stmt (*gsi);
2224 tree dst = gimple_call_arg (stmt, 0);
2225 tree src = gimple_call_arg (stmt, 1);
2226 tree len = gimple_call_arg (stmt, 2);
2227
2228 const char *p = c_getstr (src);
2229
2230 /* If the requested length is zero, or the src parameter string
2231 length is zero, return the dst parameter. */
2232 if (integer_zerop (len) || (p && *p == '\0'))
2233 {
2234 replace_call_with_value (gsi, dst);
2235 return true;
2236 }
2237
2238 if (TREE_CODE (len) != INTEGER_CST || !p)
2239 return false;
2240
2241 unsigned srclen = strlen (p);
2242
2243 int cmpsrc = compare_tree_int (len, srclen);
2244
2245 /* Return early if the requested len is less than the string length.
2246 Warnings will be issued elsewhere later. */
2247 if (cmpsrc < 0)
2248 return false;
2249
2250 unsigned HOST_WIDE_INT dstsize;
2251
2252 bool nowarn = gimple_no_warning_p (stmt);
2253
2254 if (!nowarn && compute_builtin_object_size (dst, 1, &dstsize))
2255 {
2256 int cmpdst = compare_tree_int (len, dstsize);
2257
2258 if (cmpdst >= 0)
2259 {
2260 tree fndecl = gimple_call_fndecl (stmt);
2261
2262 /* Strncat copies (at most) LEN bytes and always appends
2263 the terminating NUL so the specified bound should never
2264 be equal to (or greater than) the size of the destination.
2265 If it is, the copy could overflow. */
2266 location_t loc = gimple_location (stmt);
2267 nowarn = warning_at (loc, OPT_Wstringop_overflow_,
2268 cmpdst == 0
2269 ? G_("%G%qD specified bound %E equals "
2270 "destination size")
2271 : G_("%G%qD specified bound %E exceeds "
2272 "destination size %wu"),
2273 stmt, fndecl, len, dstsize);
2274 if (nowarn)
2275 gimple_set_no_warning (stmt, true);
2276 }
2277 }
2278
2279 if (!nowarn && cmpsrc == 0)
2280 {
2281 tree fndecl = gimple_call_fndecl (stmt);
2282 location_t loc = gimple_location (stmt);
2283
2284 /* To avoid possible overflow the specified bound should also
2285 not be equal to the length of the source, even when the size
2286 of the destination is unknown (it's not an uncommon mistake
2287 to specify as the bound to strncpy the length of the source). */
2288 if (warning_at (loc, OPT_Wstringop_overflow_,
2289 "%G%qD specified bound %E equals source length",
2290 stmt, fndecl, len))
2291 gimple_set_no_warning (stmt, true);
2292 }
2293
2294 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
2295
2296 /* If the replacement _DECL isn't initialized, don't do the
2297 transformation. */
2298 if (!fn)
2299 return false;
2300
2301 /* Otherwise, emit a call to strcat. */
2302 gcall *repl = gimple_build_call (fn, 2, dst, src);
2303 replace_call_with_call_and_fold (gsi, repl);
2304 return true;
2305 }
2306
2307 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2308 LEN, and SIZE. */
2309
2310 static bool
gimple_fold_builtin_strncat_chk(gimple_stmt_iterator * gsi)2311 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
2312 {
2313 gimple *stmt = gsi_stmt (*gsi);
2314 tree dest = gimple_call_arg (stmt, 0);
2315 tree src = gimple_call_arg (stmt, 1);
2316 tree len = gimple_call_arg (stmt, 2);
2317 tree size = gimple_call_arg (stmt, 3);
2318 tree fn;
2319 const char *p;
2320
2321 p = c_getstr (src);
2322 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2323 if ((p && *p == '\0')
2324 || integer_zerop (len))
2325 {
2326 replace_call_with_value (gsi, dest);
2327 return true;
2328 }
2329
2330 if (! tree_fits_uhwi_p (size))
2331 return false;
2332
2333 if (! integer_all_onesp (size))
2334 {
2335 tree src_len = c_strlen (src, 1);
2336 if (src_len
2337 && tree_fits_uhwi_p (src_len)
2338 && tree_fits_uhwi_p (len)
2339 && ! tree_int_cst_lt (len, src_len))
2340 {
2341 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2342 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
2343 if (!fn)
2344 return false;
2345
2346 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2347 replace_call_with_call_and_fold (gsi, repl);
2348 return true;
2349 }
2350 return false;
2351 }
2352
2353 /* If __builtin_strncat_chk is used, assume strncat is available. */
2354 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
2355 if (!fn)
2356 return false;
2357
2358 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2359 replace_call_with_call_and_fold (gsi, repl);
2360 return true;
2361 }
2362
2363 /* Build and append gimple statements to STMTS that would load a first
2364 character of a memory location identified by STR. LOC is location
2365 of the statement. */
2366
2367 static tree
gimple_load_first_char(location_t loc,tree str,gimple_seq * stmts)2368 gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
2369 {
2370 tree var;
2371
2372 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2373 tree cst_uchar_ptr_node
2374 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
2375 tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
2376
2377 tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
2378 gassign *stmt = gimple_build_assign (NULL_TREE, temp);
2379 var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt);
2380
2381 gimple_assign_set_lhs (stmt, var);
2382 gimple_seq_add_stmt_without_update (stmts, stmt);
2383
2384 return var;
2385 }
2386
2387 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator. */
2388
2389 static bool
gimple_fold_builtin_string_compare(gimple_stmt_iterator * gsi)2390 gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
2391 {
2392 gimple *stmt = gsi_stmt (*gsi);
2393 tree callee = gimple_call_fndecl (stmt);
2394 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2395
2396 tree type = integer_type_node;
2397 tree str1 = gimple_call_arg (stmt, 0);
2398 tree str2 = gimple_call_arg (stmt, 1);
2399 tree lhs = gimple_call_lhs (stmt);
2400
2401 tree bound_node = NULL_TREE;
2402 unsigned HOST_WIDE_INT bound = HOST_WIDE_INT_M1U;
2403
2404 /* Handle strncmp and strncasecmp functions. */
2405 if (gimple_call_num_args (stmt) == 3)
2406 {
2407 bound_node = gimple_call_arg (stmt, 2);
2408 if (tree_fits_uhwi_p (bound_node))
2409 bound = tree_to_uhwi (bound_node);
2410 }
2411
2412 /* If the BOUND parameter is zero, return zero. */
2413 if (bound == 0)
2414 {
2415 replace_call_with_value (gsi, integer_zero_node);
2416 return true;
2417 }
2418
2419 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2420 if (operand_equal_p (str1, str2, 0))
2421 {
2422 replace_call_with_value (gsi, integer_zero_node);
2423 return true;
2424 }
2425
2426 /* Initially set to the number of characters, including the terminating
2427 nul if each array has one. LENx == strnlen (Sx, LENx) implies that
2428 the array Sx is not terminated by a nul.
2429 For nul-terminated strings then adjusted to their length so that
2430 LENx == NULPOSx holds. */
2431 unsigned HOST_WIDE_INT len1 = HOST_WIDE_INT_MAX, len2 = len1;
2432 const char *p1 = c_getstr (str1, &len1);
2433 const char *p2 = c_getstr (str2, &len2);
2434
2435 /* The position of the terminating nul character if one exists, otherwise
2436 a value greater than LENx. */
2437 unsigned HOST_WIDE_INT nulpos1 = HOST_WIDE_INT_MAX, nulpos2 = nulpos1;
2438
2439 if (p1)
2440 {
2441 size_t n = strnlen (p1, len1);
2442 if (n < len1)
2443 len1 = nulpos1 = n;
2444 }
2445
2446 if (p2)
2447 {
2448 size_t n = strnlen (p2, len2);
2449 if (n < len2)
2450 len2 = nulpos2 = n;
2451 }
2452
2453 /* For known strings, return an immediate value. */
2454 if (p1 && p2)
2455 {
2456 int r = 0;
2457 bool known_result = false;
2458
2459 switch (fcode)
2460 {
2461 case BUILT_IN_STRCMP:
2462 case BUILT_IN_STRCMP_EQ:
2463 if (len1 != nulpos1 || len2 != nulpos2)
2464 break;
2465
2466 r = strcmp (p1, p2);
2467 known_result = true;
2468 break;
2469
2470 case BUILT_IN_STRNCMP:
2471 case BUILT_IN_STRNCMP_EQ:
2472 {
2473 if (bound == HOST_WIDE_INT_M1U)
2474 break;
2475
2476 /* Reduce the bound to be no more than the length
2477 of the shorter of the two strings, or the sizes
2478 of the unterminated arrays. */
2479 unsigned HOST_WIDE_INT n = bound;
2480
2481 if (len1 == nulpos1 && len1 < n)
2482 n = len1 + 1;
2483 if (len2 == nulpos2 && len2 < n)
2484 n = len2 + 1;
2485
2486 if (MIN (nulpos1, nulpos2) + 1 < n)
2487 break;
2488
2489 r = strncmp (p1, p2, n);
2490 known_result = true;
2491 break;
2492 }
2493 /* Only handleable situation is where the string are equal (result 0),
2494 which is already handled by operand_equal_p case. */
2495 case BUILT_IN_STRCASECMP:
2496 break;
2497 case BUILT_IN_STRNCASECMP:
2498 {
2499 if (bound == HOST_WIDE_INT_M1U)
2500 break;
2501 r = strncmp (p1, p2, bound);
2502 if (r == 0)
2503 known_result = true;
2504 break;
2505 }
2506 default:
2507 gcc_unreachable ();
2508 }
2509
2510 if (known_result)
2511 {
2512 replace_call_with_value (gsi, build_cmp_result (type, r));
2513 return true;
2514 }
2515 }
2516
2517 bool nonzero_bound = (bound >= 1 && bound < HOST_WIDE_INT_M1U)
2518 || fcode == BUILT_IN_STRCMP
2519 || fcode == BUILT_IN_STRCMP_EQ
2520 || fcode == BUILT_IN_STRCASECMP;
2521
2522 location_t loc = gimple_location (stmt);
2523
2524 /* If the second arg is "", return *(const unsigned char*)arg1. */
2525 if (p2 && *p2 == '\0' && nonzero_bound)
2526 {
2527 gimple_seq stmts = NULL;
2528 tree var = gimple_load_first_char (loc, str1, &stmts);
2529 if (lhs)
2530 {
2531 stmt = gimple_build_assign (lhs, NOP_EXPR, var);
2532 gimple_seq_add_stmt_without_update (&stmts, stmt);
2533 }
2534
2535 gsi_replace_with_seq_vops (gsi, stmts);
2536 return true;
2537 }
2538
2539 /* If the first arg is "", return -*(const unsigned char*)arg2. */
2540 if (p1 && *p1 == '\0' && nonzero_bound)
2541 {
2542 gimple_seq stmts = NULL;
2543 tree var = gimple_load_first_char (loc, str2, &stmts);
2544
2545 if (lhs)
2546 {
2547 tree c = create_tmp_reg_or_ssa_name (integer_type_node);
2548 stmt = gimple_build_assign (c, NOP_EXPR, var);
2549 gimple_seq_add_stmt_without_update (&stmts, stmt);
2550
2551 stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2552 gimple_seq_add_stmt_without_update (&stmts, stmt);
2553 }
2554
2555 gsi_replace_with_seq_vops (gsi, stmts);
2556 return true;
2557 }
2558
2559 /* If BOUND is one, return an expression corresponding to
2560 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2561 if (fcode == BUILT_IN_STRNCMP && bound == 1)
2562 {
2563 gimple_seq stmts = NULL;
2564 tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2565 tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2566
2567 if (lhs)
2568 {
2569 tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
2570 gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2571 gimple_seq_add_stmt_without_update (&stmts, convert1);
2572
2573 tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
2574 gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2575 gimple_seq_add_stmt_without_update (&stmts, convert2);
2576
2577 stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2578 gimple_seq_add_stmt_without_update (&stmts, stmt);
2579 }
2580
2581 gsi_replace_with_seq_vops (gsi, stmts);
2582 return true;
2583 }
2584
2585 /* If BOUND is greater than the length of one constant string,
2586 and the other argument is also a nul-terminated string, replace
2587 strncmp with strcmp. */
2588 if (fcode == BUILT_IN_STRNCMP
2589 && bound > 0 && bound < HOST_WIDE_INT_M1U
2590 && ((p2 && len2 < bound && len2 == nulpos2)
2591 || (p1 && len1 < bound && len1 == nulpos1)))
2592 {
2593 tree fn = builtin_decl_implicit (BUILT_IN_STRCMP);
2594 if (!fn)
2595 return false;
2596 gimple *repl = gimple_build_call (fn, 2, str1, str2);
2597 replace_call_with_call_and_fold (gsi, repl);
2598 return true;
2599 }
2600
2601 return false;
2602 }
2603
2604 /* Fold a call to the memchr pointed by GSI iterator. */
2605
2606 static bool
gimple_fold_builtin_memchr(gimple_stmt_iterator * gsi)2607 gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2608 {
2609 gimple *stmt = gsi_stmt (*gsi);
2610 tree lhs = gimple_call_lhs (stmt);
2611 tree arg1 = gimple_call_arg (stmt, 0);
2612 tree arg2 = gimple_call_arg (stmt, 1);
2613 tree len = gimple_call_arg (stmt, 2);
2614
2615 /* If the LEN parameter is zero, return zero. */
2616 if (integer_zerop (len))
2617 {
2618 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2619 return true;
2620 }
2621
2622 char c;
2623 if (TREE_CODE (arg2) != INTEGER_CST
2624 || !tree_fits_uhwi_p (len)
2625 || !target_char_cst_p (arg2, &c))
2626 return false;
2627
2628 unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2629 unsigned HOST_WIDE_INT string_length;
2630 const char *p1 = c_getstr (arg1, &string_length);
2631
2632 if (p1)
2633 {
2634 const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2635 if (r == NULL)
2636 {
2637 tree mem_size, offset_node;
2638 string_constant (arg1, &offset_node, &mem_size, NULL);
2639 unsigned HOST_WIDE_INT offset = (offset_node == NULL_TREE)
2640 ? 0 : tree_to_uhwi (offset_node);
2641 /* MEM_SIZE is the size of the array the string literal
2642 is stored in. */
2643 unsigned HOST_WIDE_INT string_size = tree_to_uhwi (mem_size) - offset;
2644 gcc_checking_assert (string_length <= string_size);
2645 if (length <= string_size)
2646 {
2647 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2648 return true;
2649 }
2650 }
2651 else
2652 {
2653 unsigned HOST_WIDE_INT offset = r - p1;
2654 gimple_seq stmts = NULL;
2655 if (lhs != NULL_TREE)
2656 {
2657 tree offset_cst = build_int_cst (TREE_TYPE (len), offset);
2658 gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2659 arg1, offset_cst);
2660 gimple_seq_add_stmt_without_update (&stmts, stmt);
2661 }
2662 else
2663 gimple_seq_add_stmt_without_update (&stmts,
2664 gimple_build_nop ());
2665
2666 gsi_replace_with_seq_vops (gsi, stmts);
2667 return true;
2668 }
2669 }
2670
2671 return false;
2672 }
2673
2674 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2675 to the call. IGNORE is true if the value returned
2676 by the builtin will be ignored. UNLOCKED is true is true if this
2677 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2678 the known length of the string. Return NULL_TREE if no simplification
2679 was possible. */
2680
2681 static bool
gimple_fold_builtin_fputs(gimple_stmt_iterator * gsi,tree arg0,tree arg1,bool unlocked)2682 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
2683 tree arg0, tree arg1,
2684 bool unlocked)
2685 {
2686 gimple *stmt = gsi_stmt (*gsi);
2687
2688 /* If we're using an unlocked function, assume the other unlocked
2689 functions exist explicitly. */
2690 tree const fn_fputc = (unlocked
2691 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2692 : builtin_decl_implicit (BUILT_IN_FPUTC));
2693 tree const fn_fwrite = (unlocked
2694 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2695 : builtin_decl_implicit (BUILT_IN_FWRITE));
2696
2697 /* If the return value is used, don't do the transformation. */
2698 if (gimple_call_lhs (stmt))
2699 return false;
2700
2701 /* Get the length of the string passed to fputs. If the length
2702 can't be determined, punt. */
2703 tree len = get_maxval_strlen (arg0, SRK_STRLEN);
2704 if (!len
2705 || TREE_CODE (len) != INTEGER_CST)
2706 return false;
2707
2708 switch (compare_tree_int (len, 1))
2709 {
2710 case -1: /* length is 0, delete the call entirely . */
2711 replace_call_with_value (gsi, integer_zero_node);
2712 return true;
2713
2714 case 0: /* length is 1, call fputc. */
2715 {
2716 const char *p = c_getstr (arg0);
2717 if (p != NULL)
2718 {
2719 if (!fn_fputc)
2720 return false;
2721
2722 gimple *repl = gimple_build_call (fn_fputc, 2,
2723 build_int_cst
2724 (integer_type_node, p[0]), arg1);
2725 replace_call_with_call_and_fold (gsi, repl);
2726 return true;
2727 }
2728 }
2729 /* FALLTHROUGH */
2730 case 1: /* length is greater than 1, call fwrite. */
2731 {
2732 /* If optimizing for size keep fputs. */
2733 if (optimize_function_for_size_p (cfun))
2734 return false;
2735 /* New argument list transforming fputs(string, stream) to
2736 fwrite(string, 1, len, stream). */
2737 if (!fn_fwrite)
2738 return false;
2739
2740 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
2741 size_one_node, len, arg1);
2742 replace_call_with_call_and_fold (gsi, repl);
2743 return true;
2744 }
2745 default:
2746 gcc_unreachable ();
2747 }
2748 return false;
2749 }
2750
2751 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2752 DEST, SRC, LEN, and SIZE are the arguments to the call.
2753 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
2754 code of the builtin. If MAXLEN is not NULL, it is maximum length
2755 passed as third argument. */
2756
2757 static bool
gimple_fold_builtin_memory_chk(gimple_stmt_iterator * gsi,tree dest,tree src,tree len,tree size,enum built_in_function fcode)2758 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
2759 tree dest, tree src, tree len, tree size,
2760 enum built_in_function fcode)
2761 {
2762 gimple *stmt = gsi_stmt (*gsi);
2763 location_t loc = gimple_location (stmt);
2764 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2765 tree fn;
2766
2767 /* If SRC and DEST are the same (and not volatile), return DEST
2768 (resp. DEST+LEN for __mempcpy_chk). */
2769 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
2770 {
2771 if (fcode != BUILT_IN_MEMPCPY_CHK)
2772 {
2773 replace_call_with_value (gsi, dest);
2774 return true;
2775 }
2776 else
2777 {
2778 gimple_seq stmts = NULL;
2779 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
2780 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
2781 TREE_TYPE (dest), dest, len);
2782 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2783 replace_call_with_value (gsi, temp);
2784 return true;
2785 }
2786 }
2787
2788 if (! tree_fits_uhwi_p (size))
2789 return false;
2790
2791 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
2792 if (! integer_all_onesp (size))
2793 {
2794 if (! tree_fits_uhwi_p (len))
2795 {
2796 /* If LEN is not constant, try MAXLEN too.
2797 For MAXLEN only allow optimizing into non-_ocs function
2798 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2799 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2800 {
2801 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
2802 {
2803 /* (void) __mempcpy_chk () can be optimized into
2804 (void) __memcpy_chk (). */
2805 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2806 if (!fn)
2807 return false;
2808
2809 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2810 replace_call_with_call_and_fold (gsi, repl);
2811 return true;
2812 }
2813 return false;
2814 }
2815 }
2816 else
2817 maxlen = len;
2818
2819 if (tree_int_cst_lt (size, maxlen))
2820 return false;
2821 }
2822
2823 fn = NULL_TREE;
2824 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
2825 mem{cpy,pcpy,move,set} is available. */
2826 switch (fcode)
2827 {
2828 case BUILT_IN_MEMCPY_CHK:
2829 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
2830 break;
2831 case BUILT_IN_MEMPCPY_CHK:
2832 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
2833 break;
2834 case BUILT_IN_MEMMOVE_CHK:
2835 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
2836 break;
2837 case BUILT_IN_MEMSET_CHK:
2838 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
2839 break;
2840 default:
2841 break;
2842 }
2843
2844 if (!fn)
2845 return false;
2846
2847 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2848 replace_call_with_call_and_fold (gsi, repl);
2849 return true;
2850 }
2851
2852 /* Fold a call to the __st[rp]cpy_chk builtin.
2853 DEST, SRC, and SIZE are the arguments to the call.
2854 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
2855 code of the builtin. If MAXLEN is not NULL, it is maximum length of
2856 strings passed as second argument. */
2857
2858 static bool
gimple_fold_builtin_stxcpy_chk(gimple_stmt_iterator * gsi,tree dest,tree src,tree size,enum built_in_function fcode)2859 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
2860 tree dest,
2861 tree src, tree size,
2862 enum built_in_function fcode)
2863 {
2864 gimple *stmt = gsi_stmt (*gsi);
2865 location_t loc = gimple_location (stmt);
2866 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2867 tree len, fn;
2868
2869 /* If SRC and DEST are the same (and not volatile), return DEST. */
2870 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
2871 {
2872 /* Issue -Wrestrict unless the pointers are null (those do
2873 not point to objects and so do not indicate an overlap;
2874 such calls could be the result of sanitization and jump
2875 threading). */
2876 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
2877 {
2878 tree func = gimple_call_fndecl (stmt);
2879
2880 warning_at (loc, OPT_Wrestrict,
2881 "%qD source argument is the same as destination",
2882 func);
2883 }
2884
2885 replace_call_with_value (gsi, dest);
2886 return true;
2887 }
2888
2889 if (! tree_fits_uhwi_p (size))
2890 return false;
2891
2892 tree maxlen = get_maxval_strlen (src, SRK_STRLENMAX);
2893 if (! integer_all_onesp (size))
2894 {
2895 len = c_strlen (src, 1);
2896 if (! len || ! tree_fits_uhwi_p (len))
2897 {
2898 /* If LEN is not constant, try MAXLEN too.
2899 For MAXLEN only allow optimizing into non-_ocs function
2900 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2901 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2902 {
2903 if (fcode == BUILT_IN_STPCPY_CHK)
2904 {
2905 if (! ignore)
2906 return false;
2907
2908 /* If return value of __stpcpy_chk is ignored,
2909 optimize into __strcpy_chk. */
2910 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
2911 if (!fn)
2912 return false;
2913
2914 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2915 replace_call_with_call_and_fold (gsi, repl);
2916 return true;
2917 }
2918
2919 if (! len || TREE_SIDE_EFFECTS (len))
2920 return false;
2921
2922 /* If c_strlen returned something, but not a constant,
2923 transform __strcpy_chk into __memcpy_chk. */
2924 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2925 if (!fn)
2926 return false;
2927
2928 gimple_seq stmts = NULL;
2929 len = force_gimple_operand (len, &stmts, true, NULL_TREE);
2930 len = gimple_convert (&stmts, loc, size_type_node, len);
2931 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
2932 build_int_cst (size_type_node, 1));
2933 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2934 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2935 replace_call_with_call_and_fold (gsi, repl);
2936 return true;
2937 }
2938 }
2939 else
2940 maxlen = len;
2941
2942 if (! tree_int_cst_lt (maxlen, size))
2943 return false;
2944 }
2945
2946 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
2947 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
2948 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
2949 if (!fn)
2950 return false;
2951
2952 gimple *repl = gimple_build_call (fn, 2, dest, src);
2953 replace_call_with_call_and_fold (gsi, repl);
2954 return true;
2955 }
2956
2957 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
2958 are the arguments to the call. If MAXLEN is not NULL, it is maximum
2959 length passed as third argument. IGNORE is true if return value can be
2960 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2961
2962 static bool
gimple_fold_builtin_stxncpy_chk(gimple_stmt_iterator * gsi,tree dest,tree src,tree len,tree size,enum built_in_function fcode)2963 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
2964 tree dest, tree src,
2965 tree len, tree size,
2966 enum built_in_function fcode)
2967 {
2968 gimple *stmt = gsi_stmt (*gsi);
2969 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2970 tree fn;
2971
2972 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
2973 {
2974 /* If return value of __stpncpy_chk is ignored,
2975 optimize into __strncpy_chk. */
2976 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
2977 if (fn)
2978 {
2979 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2980 replace_call_with_call_and_fold (gsi, repl);
2981 return true;
2982 }
2983 }
2984
2985 if (! tree_fits_uhwi_p (size))
2986 return false;
2987
2988 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
2989 if (! integer_all_onesp (size))
2990 {
2991 if (! tree_fits_uhwi_p (len))
2992 {
2993 /* If LEN is not constant, try MAXLEN too.
2994 For MAXLEN only allow optimizing into non-_ocs function
2995 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2996 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2997 return false;
2998 }
2999 else
3000 maxlen = len;
3001
3002 if (tree_int_cst_lt (size, maxlen))
3003 return false;
3004 }
3005
3006 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
3007 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
3008 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
3009 if (!fn)
3010 return false;
3011
3012 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
3013 replace_call_with_call_and_fold (gsi, repl);
3014 return true;
3015 }
3016
3017 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
3018 Return NULL_TREE if no simplification can be made. */
3019
3020 static bool
gimple_fold_builtin_stpcpy(gimple_stmt_iterator * gsi)3021 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
3022 {
3023 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3024 location_t loc = gimple_location (stmt);
3025 tree dest = gimple_call_arg (stmt, 0);
3026 tree src = gimple_call_arg (stmt, 1);
3027 tree fn, lenp1;
3028
3029 /* If the result is unused, replace stpcpy with strcpy. */
3030 if (gimple_call_lhs (stmt) == NULL_TREE)
3031 {
3032 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3033 if (!fn)
3034 return false;
3035 gimple_call_set_fndecl (stmt, fn);
3036 fold_stmt (gsi);
3037 return true;
3038 }
3039
3040 /* Set to non-null if ARG refers to an unterminated array. */
3041 c_strlen_data data = { };
3042 tree len = c_strlen (src, 1, &data, 1);
3043 if (!len
3044 || TREE_CODE (len) != INTEGER_CST)
3045 {
3046 data.decl = unterminated_array (src);
3047 if (!data.decl)
3048 return false;
3049 }
3050
3051 if (data.decl)
3052 {
3053 /* Avoid folding calls with unterminated arrays. */
3054 if (!gimple_no_warning_p (stmt))
3055 warn_string_no_nul (loc, "stpcpy", src, data.decl);
3056 gimple_set_no_warning (stmt, true);
3057 return false;
3058 }
3059
3060 if (optimize_function_for_size_p (cfun)
3061 /* If length is zero it's small enough. */
3062 && !integer_zerop (len))
3063 return false;
3064
3065 /* If the source has a known length replace stpcpy with memcpy. */
3066 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3067 if (!fn)
3068 return false;
3069
3070 gimple_seq stmts = NULL;
3071 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
3072 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
3073 tem, build_int_cst (size_type_node, 1));
3074 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3075 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
3076 gimple_move_vops (repl, stmt);
3077 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
3078 /* Replace the result with dest + len. */
3079 stmts = NULL;
3080 tem = gimple_convert (&stmts, loc, sizetype, len);
3081 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3082 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
3083 POINTER_PLUS_EXPR, dest, tem);
3084 gsi_replace (gsi, ret, false);
3085 /* Finally fold the memcpy call. */
3086 gimple_stmt_iterator gsi2 = *gsi;
3087 gsi_prev (&gsi2);
3088 fold_stmt (&gsi2);
3089 return true;
3090 }
3091
3092 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
3093 NULL_TREE if a normal call should be emitted rather than expanding
3094 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
3095 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
3096 passed as second argument. */
3097
3098 static bool
gimple_fold_builtin_snprintf_chk(gimple_stmt_iterator * gsi,enum built_in_function fcode)3099 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
3100 enum built_in_function fcode)
3101 {
3102 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3103 tree dest, size, len, fn, fmt, flag;
3104 const char *fmt_str;
3105
3106 /* Verify the required arguments in the original call. */
3107 if (gimple_call_num_args (stmt) < 5)
3108 return false;
3109
3110 dest = gimple_call_arg (stmt, 0);
3111 len = gimple_call_arg (stmt, 1);
3112 flag = gimple_call_arg (stmt, 2);
3113 size = gimple_call_arg (stmt, 3);
3114 fmt = gimple_call_arg (stmt, 4);
3115
3116 if (! tree_fits_uhwi_p (size))
3117 return false;
3118
3119 if (! integer_all_onesp (size))
3120 {
3121 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
3122 if (! tree_fits_uhwi_p (len))
3123 {
3124 /* If LEN is not constant, try MAXLEN too.
3125 For MAXLEN only allow optimizing into non-_ocs function
3126 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
3127 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
3128 return false;
3129 }
3130 else
3131 maxlen = len;
3132
3133 if (tree_int_cst_lt (size, maxlen))
3134 return false;
3135 }
3136
3137 if (!init_target_chars ())
3138 return false;
3139
3140 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
3141 or if format doesn't contain % chars or is "%s". */
3142 if (! integer_zerop (flag))
3143 {
3144 fmt_str = c_getstr (fmt);
3145 if (fmt_str == NULL)
3146 return false;
3147 if (strchr (fmt_str, target_percent) != NULL
3148 && strcmp (fmt_str, target_percent_s))
3149 return false;
3150 }
3151
3152 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
3153 available. */
3154 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
3155 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
3156 if (!fn)
3157 return false;
3158
3159 /* Replace the called function and the first 5 argument by 3 retaining
3160 trailing varargs. */
3161 gimple_call_set_fndecl (stmt, fn);
3162 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3163 gimple_call_set_arg (stmt, 0, dest);
3164 gimple_call_set_arg (stmt, 1, len);
3165 gimple_call_set_arg (stmt, 2, fmt);
3166 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
3167 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3168 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3169 fold_stmt (gsi);
3170 return true;
3171 }
3172
3173 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
3174 Return NULL_TREE if a normal call should be emitted rather than
3175 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
3176 or BUILT_IN_VSPRINTF_CHK. */
3177
3178 static bool
gimple_fold_builtin_sprintf_chk(gimple_stmt_iterator * gsi,enum built_in_function fcode)3179 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
3180 enum built_in_function fcode)
3181 {
3182 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3183 tree dest, size, len, fn, fmt, flag;
3184 const char *fmt_str;
3185 unsigned nargs = gimple_call_num_args (stmt);
3186
3187 /* Verify the required arguments in the original call. */
3188 if (nargs < 4)
3189 return false;
3190 dest = gimple_call_arg (stmt, 0);
3191 flag = gimple_call_arg (stmt, 1);
3192 size = gimple_call_arg (stmt, 2);
3193 fmt = gimple_call_arg (stmt, 3);
3194
3195 if (! tree_fits_uhwi_p (size))
3196 return false;
3197
3198 len = NULL_TREE;
3199
3200 if (!init_target_chars ())
3201 return false;
3202
3203 /* Check whether the format is a literal string constant. */
3204 fmt_str = c_getstr (fmt);
3205 if (fmt_str != NULL)
3206 {
3207 /* If the format doesn't contain % args or %%, we know the size. */
3208 if (strchr (fmt_str, target_percent) == 0)
3209 {
3210 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
3211 len = build_int_cstu (size_type_node, strlen (fmt_str));
3212 }
3213 /* If the format is "%s" and first ... argument is a string literal,
3214 we know the size too. */
3215 else if (fcode == BUILT_IN_SPRINTF_CHK
3216 && strcmp (fmt_str, target_percent_s) == 0)
3217 {
3218 tree arg;
3219
3220 if (nargs == 5)
3221 {
3222 arg = gimple_call_arg (stmt, 4);
3223 if (POINTER_TYPE_P (TREE_TYPE (arg)))
3224 {
3225 len = c_strlen (arg, 1);
3226 if (! len || ! tree_fits_uhwi_p (len))
3227 len = NULL_TREE;
3228 }
3229 }
3230 }
3231 }
3232
3233 if (! integer_all_onesp (size))
3234 {
3235 if (! len || ! tree_int_cst_lt (len, size))
3236 return false;
3237 }
3238
3239 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
3240 or if format doesn't contain % chars or is "%s". */
3241 if (! integer_zerop (flag))
3242 {
3243 if (fmt_str == NULL)
3244 return false;
3245 if (strchr (fmt_str, target_percent) != NULL
3246 && strcmp (fmt_str, target_percent_s))
3247 return false;
3248 }
3249
3250 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
3251 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
3252 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
3253 if (!fn)
3254 return false;
3255
3256 /* Replace the called function and the first 4 argument by 2 retaining
3257 trailing varargs. */
3258 gimple_call_set_fndecl (stmt, fn);
3259 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3260 gimple_call_set_arg (stmt, 0, dest);
3261 gimple_call_set_arg (stmt, 1, fmt);
3262 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
3263 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3264 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3265 fold_stmt (gsi);
3266 return true;
3267 }
3268
3269 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
3270 ORIG may be null if this is a 2-argument call. We don't attempt to
3271 simplify calls with more than 3 arguments.
3272
3273 Return true if simplification was possible, otherwise false. */
3274
3275 bool
gimple_fold_builtin_sprintf(gimple_stmt_iterator * gsi)3276 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
3277 {
3278 gimple *stmt = gsi_stmt (*gsi);
3279 tree dest = gimple_call_arg (stmt, 0);
3280 tree fmt = gimple_call_arg (stmt, 1);
3281 tree orig = NULL_TREE;
3282 const char *fmt_str = NULL;
3283
3284 /* Verify the required arguments in the original call. We deal with two
3285 types of sprintf() calls: 'sprintf (str, fmt)' and
3286 'sprintf (dest, "%s", orig)'. */
3287 if (gimple_call_num_args (stmt) > 3)
3288 return false;
3289
3290 if (gimple_call_num_args (stmt) == 3)
3291 orig = gimple_call_arg (stmt, 2);
3292
3293 /* Check whether the format is a literal string constant. */
3294 fmt_str = c_getstr (fmt);
3295 if (fmt_str == NULL)
3296 return false;
3297
3298 if (!init_target_chars ())
3299 return false;
3300
3301 /* If the format doesn't contain % args or %%, use strcpy. */
3302 if (strchr (fmt_str, target_percent) == NULL)
3303 {
3304 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3305
3306 if (!fn)
3307 return false;
3308
3309 /* Don't optimize sprintf (buf, "abc", ptr++). */
3310 if (orig)
3311 return false;
3312
3313 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
3314 'format' is known to contain no % formats. */
3315 gimple_seq stmts = NULL;
3316 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3317
3318 /* Propagate the NO_WARNING bit to avoid issuing the same
3319 warning more than once. */
3320 if (gimple_no_warning_p (stmt))
3321 gimple_set_no_warning (repl, true);
3322
3323 gimple_seq_add_stmt_without_update (&stmts, repl);
3324 if (tree lhs = gimple_call_lhs (stmt))
3325 {
3326 repl = gimple_build_assign (lhs, build_int_cst (TREE_TYPE (lhs),
3327 strlen (fmt_str)));
3328 gimple_seq_add_stmt_without_update (&stmts, repl);
3329 gsi_replace_with_seq_vops (gsi, stmts);
3330 /* gsi now points at the assignment to the lhs, get a
3331 stmt iterator to the memcpy call.
3332 ??? We can't use gsi_for_stmt as that doesn't work when the
3333 CFG isn't built yet. */
3334 gimple_stmt_iterator gsi2 = *gsi;
3335 gsi_prev (&gsi2);
3336 fold_stmt (&gsi2);
3337 }
3338 else
3339 {
3340 gsi_replace_with_seq_vops (gsi, stmts);
3341 fold_stmt (gsi);
3342 }
3343 return true;
3344 }
3345
3346 /* If the format is "%s", use strcpy if the result isn't used. */
3347 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3348 {
3349 tree fn;
3350 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3351
3352 if (!fn)
3353 return false;
3354
3355 /* Don't crash on sprintf (str1, "%s"). */
3356 if (!orig)
3357 return false;
3358
3359 tree orig_len = NULL_TREE;
3360 if (gimple_call_lhs (stmt))
3361 {
3362 orig_len = get_maxval_strlen (orig, SRK_STRLEN);
3363 if (!orig_len)
3364 return false;
3365 }
3366
3367 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
3368 gimple_seq stmts = NULL;
3369 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3370
3371 /* Propagate the NO_WARNING bit to avoid issuing the same
3372 warning more than once. */
3373 if (gimple_no_warning_p (stmt))
3374 gimple_set_no_warning (repl, true);
3375
3376 gimple_seq_add_stmt_without_update (&stmts, repl);
3377 if (tree lhs = gimple_call_lhs (stmt))
3378 {
3379 if (!useless_type_conversion_p (TREE_TYPE (lhs),
3380 TREE_TYPE (orig_len)))
3381 orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3382 repl = gimple_build_assign (lhs, orig_len);
3383 gimple_seq_add_stmt_without_update (&stmts, repl);
3384 gsi_replace_with_seq_vops (gsi, stmts);
3385 /* gsi now points at the assignment to the lhs, get a
3386 stmt iterator to the memcpy call.
3387 ??? We can't use gsi_for_stmt as that doesn't work when the
3388 CFG isn't built yet. */
3389 gimple_stmt_iterator gsi2 = *gsi;
3390 gsi_prev (&gsi2);
3391 fold_stmt (&gsi2);
3392 }
3393 else
3394 {
3395 gsi_replace_with_seq_vops (gsi, stmts);
3396 fold_stmt (gsi);
3397 }
3398 return true;
3399 }
3400 return false;
3401 }
3402
3403 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3404 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3405 attempt to simplify calls with more than 4 arguments.
3406
3407 Return true if simplification was possible, otherwise false. */
3408
3409 bool
gimple_fold_builtin_snprintf(gimple_stmt_iterator * gsi)3410 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
3411 {
3412 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3413 tree dest = gimple_call_arg (stmt, 0);
3414 tree destsize = gimple_call_arg (stmt, 1);
3415 tree fmt = gimple_call_arg (stmt, 2);
3416 tree orig = NULL_TREE;
3417 const char *fmt_str = NULL;
3418
3419 if (gimple_call_num_args (stmt) > 4)
3420 return false;
3421
3422 if (gimple_call_num_args (stmt) == 4)
3423 orig = gimple_call_arg (stmt, 3);
3424
3425 if (!tree_fits_uhwi_p (destsize))
3426 return false;
3427 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
3428
3429 /* Check whether the format is a literal string constant. */
3430 fmt_str = c_getstr (fmt);
3431 if (fmt_str == NULL)
3432 return false;
3433
3434 if (!init_target_chars ())
3435 return false;
3436
3437 /* If the format doesn't contain % args or %%, use strcpy. */
3438 if (strchr (fmt_str, target_percent) == NULL)
3439 {
3440 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3441 if (!fn)
3442 return false;
3443
3444 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3445 if (orig)
3446 return false;
3447
3448 /* We could expand this as
3449 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3450 or to
3451 memcpy (str, fmt_with_nul_at_cstm1, cst);
3452 but in the former case that might increase code size
3453 and in the latter case grow .rodata section too much.
3454 So punt for now. */
3455 size_t len = strlen (fmt_str);
3456 if (len >= destlen)
3457 return false;
3458
3459 gimple_seq stmts = NULL;
3460 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3461 gimple_seq_add_stmt_without_update (&stmts, repl);
3462 if (tree lhs = gimple_call_lhs (stmt))
3463 {
3464 repl = gimple_build_assign (lhs,
3465 build_int_cst (TREE_TYPE (lhs), len));
3466 gimple_seq_add_stmt_without_update (&stmts, repl);
3467 gsi_replace_with_seq_vops (gsi, stmts);
3468 /* gsi now points at the assignment to the lhs, get a
3469 stmt iterator to the memcpy call.
3470 ??? We can't use gsi_for_stmt as that doesn't work when the
3471 CFG isn't built yet. */
3472 gimple_stmt_iterator gsi2 = *gsi;
3473 gsi_prev (&gsi2);
3474 fold_stmt (&gsi2);
3475 }
3476 else
3477 {
3478 gsi_replace_with_seq_vops (gsi, stmts);
3479 fold_stmt (gsi);
3480 }
3481 return true;
3482 }
3483
3484 /* If the format is "%s", use strcpy if the result isn't used. */
3485 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3486 {
3487 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3488 if (!fn)
3489 return false;
3490
3491 /* Don't crash on snprintf (str1, cst, "%s"). */
3492 if (!orig)
3493 return false;
3494
3495 tree orig_len = get_maxval_strlen (orig, SRK_STRLEN);
3496 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
3497 return false;
3498
3499 /* We could expand this as
3500 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3501 or to
3502 memcpy (str1, str2_with_nul_at_cstm1, cst);
3503 but in the former case that might increase code size
3504 and in the latter case grow .rodata section too much.
3505 So punt for now. */
3506 if (compare_tree_int (orig_len, destlen) >= 0)
3507 return false;
3508
3509 /* Convert snprintf (str1, cst, "%s", str2) into
3510 strcpy (str1, str2) if strlen (str2) < cst. */
3511 gimple_seq stmts = NULL;
3512 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3513 gimple_seq_add_stmt_without_update (&stmts, repl);
3514 if (tree lhs = gimple_call_lhs (stmt))
3515 {
3516 if (!useless_type_conversion_p (TREE_TYPE (lhs),
3517 TREE_TYPE (orig_len)))
3518 orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3519 repl = gimple_build_assign (lhs, orig_len);
3520 gimple_seq_add_stmt_without_update (&stmts, repl);
3521 gsi_replace_with_seq_vops (gsi, stmts);
3522 /* gsi now points at the assignment to the lhs, get a
3523 stmt iterator to the memcpy call.
3524 ??? We can't use gsi_for_stmt as that doesn't work when the
3525 CFG isn't built yet. */
3526 gimple_stmt_iterator gsi2 = *gsi;
3527 gsi_prev (&gsi2);
3528 fold_stmt (&gsi2);
3529 }
3530 else
3531 {
3532 gsi_replace_with_seq_vops (gsi, stmts);
3533 fold_stmt (gsi);
3534 }
3535 return true;
3536 }
3537 return false;
3538 }
3539
3540 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3541 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3542 more than 3 arguments, and ARG may be null in the 2-argument case.
3543
3544 Return NULL_TREE if no simplification was possible, otherwise return the
3545 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3546 code of the function to be simplified. */
3547
3548 static bool
gimple_fold_builtin_fprintf(gimple_stmt_iterator * gsi,tree fp,tree fmt,tree arg,enum built_in_function fcode)3549 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
3550 tree fp, tree fmt, tree arg,
3551 enum built_in_function fcode)
3552 {
3553 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3554 tree fn_fputc, fn_fputs;
3555 const char *fmt_str = NULL;
3556
3557 /* If the return value is used, don't do the transformation. */
3558 if (gimple_call_lhs (stmt) != NULL_TREE)
3559 return false;
3560
3561 /* Check whether the format is a literal string constant. */
3562 fmt_str = c_getstr (fmt);
3563 if (fmt_str == NULL)
3564 return false;
3565
3566 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
3567 {
3568 /* If we're using an unlocked function, assume the other
3569 unlocked functions exist explicitly. */
3570 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
3571 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
3572 }
3573 else
3574 {
3575 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
3576 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
3577 }
3578
3579 if (!init_target_chars ())
3580 return false;
3581
3582 /* If the format doesn't contain % args or %%, use strcpy. */
3583 if (strchr (fmt_str, target_percent) == NULL)
3584 {
3585 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
3586 && arg)
3587 return false;
3588
3589 /* If the format specifier was "", fprintf does nothing. */
3590 if (fmt_str[0] == '\0')
3591 {
3592 replace_call_with_value (gsi, NULL_TREE);
3593 return true;
3594 }
3595
3596 /* When "string" doesn't contain %, replace all cases of
3597 fprintf (fp, string) with fputs (string, fp). The fputs
3598 builtin will take care of special cases like length == 1. */
3599 if (fn_fputs)
3600 {
3601 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
3602 replace_call_with_call_and_fold (gsi, repl);
3603 return true;
3604 }
3605 }
3606
3607 /* The other optimizations can be done only on the non-va_list variants. */
3608 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3609 return false;
3610
3611 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3612 else if (strcmp (fmt_str, target_percent_s) == 0)
3613 {
3614 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3615 return false;
3616 if (fn_fputs)
3617 {
3618 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3619 replace_call_with_call_and_fold (gsi, repl);
3620 return true;
3621 }
3622 }
3623
3624 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3625 else if (strcmp (fmt_str, target_percent_c) == 0)
3626 {
3627 if (!arg
3628 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3629 return false;
3630 if (fn_fputc)
3631 {
3632 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3633 replace_call_with_call_and_fold (gsi, repl);
3634 return true;
3635 }
3636 }
3637
3638 return false;
3639 }
3640
3641 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3642 FMT and ARG are the arguments to the call; we don't fold cases with
3643 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3644
3645 Return NULL_TREE if no simplification was possible, otherwise return the
3646 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3647 code of the function to be simplified. */
3648
3649 static bool
gimple_fold_builtin_printf(gimple_stmt_iterator * gsi,tree fmt,tree arg,enum built_in_function fcode)3650 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3651 tree arg, enum built_in_function fcode)
3652 {
3653 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3654 tree fn_putchar, fn_puts, newarg;
3655 const char *fmt_str = NULL;
3656
3657 /* If the return value is used, don't do the transformation. */
3658 if (gimple_call_lhs (stmt) != NULL_TREE)
3659 return false;
3660
3661 /* Check whether the format is a literal string constant. */
3662 fmt_str = c_getstr (fmt);
3663 if (fmt_str == NULL)
3664 return false;
3665
3666 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3667 {
3668 /* If we're using an unlocked function, assume the other
3669 unlocked functions exist explicitly. */
3670 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3671 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3672 }
3673 else
3674 {
3675 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3676 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3677 }
3678
3679 if (!init_target_chars ())
3680 return false;
3681
3682 if (strcmp (fmt_str, target_percent_s) == 0
3683 || strchr (fmt_str, target_percent) == NULL)
3684 {
3685 const char *str;
3686
3687 if (strcmp (fmt_str, target_percent_s) == 0)
3688 {
3689 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3690 return false;
3691
3692 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3693 return false;
3694
3695 str = c_getstr (arg);
3696 if (str == NULL)
3697 return false;
3698 }
3699 else
3700 {
3701 /* The format specifier doesn't contain any '%' characters. */
3702 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3703 && arg)
3704 return false;
3705 str = fmt_str;
3706 }
3707
3708 /* If the string was "", printf does nothing. */
3709 if (str[0] == '\0')
3710 {
3711 replace_call_with_value (gsi, NULL_TREE);
3712 return true;
3713 }
3714
3715 /* If the string has length of 1, call putchar. */
3716 if (str[1] == '\0')
3717 {
3718 /* Given printf("c"), (where c is any one character,)
3719 convert "c"[0] to an int and pass that to the replacement
3720 function. */
3721 newarg = build_int_cst (integer_type_node, str[0]);
3722 if (fn_putchar)
3723 {
3724 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3725 replace_call_with_call_and_fold (gsi, repl);
3726 return true;
3727 }
3728 }
3729 else
3730 {
3731 /* If the string was "string\n", call puts("string"). */
3732 size_t len = strlen (str);
3733 if ((unsigned char)str[len - 1] == target_newline
3734 && (size_t) (int) len == len
3735 && (int) len > 0)
3736 {
3737 char *newstr;
3738
3739 /* Create a NUL-terminated string that's one char shorter
3740 than the original, stripping off the trailing '\n'. */
3741 newstr = xstrdup (str);
3742 newstr[len - 1] = '\0';
3743 newarg = build_string_literal (len, newstr);
3744 free (newstr);
3745 if (fn_puts)
3746 {
3747 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3748 replace_call_with_call_and_fold (gsi, repl);
3749 return true;
3750 }
3751 }
3752 else
3753 /* We'd like to arrange to call fputs(string,stdout) here,
3754 but we need stdout and don't have a way to get it yet. */
3755 return false;
3756 }
3757 }
3758
3759 /* The other optimizations can be done only on the non-va_list variants. */
3760 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3761 return false;
3762
3763 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3764 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
3765 {
3766 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3767 return false;
3768 if (fn_puts)
3769 {
3770 gcall *repl = gimple_build_call (fn_puts, 1, arg);
3771 replace_call_with_call_and_fold (gsi, repl);
3772 return true;
3773 }
3774 }
3775
3776 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3777 else if (strcmp (fmt_str, target_percent_c) == 0)
3778 {
3779 if (!arg || ! useless_type_conversion_p (integer_type_node,
3780 TREE_TYPE (arg)))
3781 return false;
3782 if (fn_putchar)
3783 {
3784 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
3785 replace_call_with_call_and_fold (gsi, repl);
3786 return true;
3787 }
3788 }
3789
3790 return false;
3791 }
3792
3793
3794
3795 /* Fold a call to __builtin_strlen with known length LEN. */
3796
3797 static bool
gimple_fold_builtin_strlen(gimple_stmt_iterator * gsi)3798 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
3799 {
3800 gimple *stmt = gsi_stmt (*gsi);
3801 tree arg = gimple_call_arg (stmt, 0);
3802
3803 wide_int minlen;
3804 wide_int maxlen;
3805
3806 c_strlen_data lendata = { };
3807 if (get_range_strlen (arg, &lendata, /* eltsize = */ 1)
3808 && !lendata.decl
3809 && lendata.minlen && TREE_CODE (lendata.minlen) == INTEGER_CST
3810 && lendata.maxlen && TREE_CODE (lendata.maxlen) == INTEGER_CST)
3811 {
3812 /* The range of lengths refers to either a single constant
3813 string or to the longest and shortest constant string
3814 referenced by the argument of the strlen() call, or to
3815 the strings that can possibly be stored in the arrays
3816 the argument refers to. */
3817 minlen = wi::to_wide (lendata.minlen);
3818 maxlen = wi::to_wide (lendata.maxlen);
3819 }
3820 else
3821 {
3822 unsigned prec = TYPE_PRECISION (sizetype);
3823
3824 minlen = wi::shwi (0, prec);
3825 maxlen = wi::to_wide (max_object_size (), prec) - 2;
3826 }
3827
3828 if (minlen == maxlen)
3829 {
3830 /* Fold the strlen call to a constant. */
3831 tree type = TREE_TYPE (lendata.minlen);
3832 tree len = force_gimple_operand_gsi (gsi,
3833 wide_int_to_tree (type, minlen),
3834 true, NULL, true, GSI_SAME_STMT);
3835 replace_call_with_value (gsi, len);
3836 return true;
3837 }
3838
3839 /* Set the strlen() range to [0, MAXLEN]. */
3840 if (tree lhs = gimple_call_lhs (stmt))
3841 set_strlen_range (lhs, minlen, maxlen);
3842
3843 return false;
3844 }
3845
3846 /* Fold a call to __builtin_acc_on_device. */
3847
3848 static bool
gimple_fold_builtin_acc_on_device(gimple_stmt_iterator * gsi,tree arg0)3849 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
3850 {
3851 /* Defer folding until we know which compiler we're in. */
3852 if (symtab->state != EXPANSION)
3853 return false;
3854
3855 unsigned val_host = GOMP_DEVICE_HOST;
3856 unsigned val_dev = GOMP_DEVICE_NONE;
3857
3858 #ifdef ACCEL_COMPILER
3859 val_host = GOMP_DEVICE_NOT_HOST;
3860 val_dev = ACCEL_COMPILER_acc_device;
3861 #endif
3862
3863 location_t loc = gimple_location (gsi_stmt (*gsi));
3864
3865 tree host_eq = make_ssa_name (boolean_type_node);
3866 gimple *host_ass = gimple_build_assign
3867 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
3868 gimple_set_location (host_ass, loc);
3869 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
3870
3871 tree dev_eq = make_ssa_name (boolean_type_node);
3872 gimple *dev_ass = gimple_build_assign
3873 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
3874 gimple_set_location (dev_ass, loc);
3875 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
3876
3877 tree result = make_ssa_name (boolean_type_node);
3878 gimple *result_ass = gimple_build_assign
3879 (result, BIT_IOR_EXPR, host_eq, dev_eq);
3880 gimple_set_location (result_ass, loc);
3881 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
3882
3883 replace_call_with_value (gsi, result);
3884
3885 return true;
3886 }
3887
3888 /* Fold realloc (0, n) -> malloc (n). */
3889
3890 static bool
gimple_fold_builtin_realloc(gimple_stmt_iterator * gsi)3891 gimple_fold_builtin_realloc (gimple_stmt_iterator *gsi)
3892 {
3893 gimple *stmt = gsi_stmt (*gsi);
3894 tree arg = gimple_call_arg (stmt, 0);
3895 tree size = gimple_call_arg (stmt, 1);
3896
3897 if (operand_equal_p (arg, null_pointer_node, 0))
3898 {
3899 tree fn_malloc = builtin_decl_implicit (BUILT_IN_MALLOC);
3900 if (fn_malloc)
3901 {
3902 gcall *repl = gimple_build_call (fn_malloc, 1, size);
3903 replace_call_with_call_and_fold (gsi, repl);
3904 return true;
3905 }
3906 }
3907 return false;
3908 }
3909
3910 /* Fold the non-target builtin at *GSI and return whether any simplification
3911 was made. */
3912
3913 static bool
gimple_fold_builtin(gimple_stmt_iterator * gsi)3914 gimple_fold_builtin (gimple_stmt_iterator *gsi)
3915 {
3916 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
3917 tree callee = gimple_call_fndecl (stmt);
3918
3919 /* Give up for always_inline inline builtins until they are
3920 inlined. */
3921 if (avoid_folding_inline_builtin (callee))
3922 return false;
3923
3924 unsigned n = gimple_call_num_args (stmt);
3925 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
3926 switch (fcode)
3927 {
3928 case BUILT_IN_BCMP:
3929 return gimple_fold_builtin_bcmp (gsi);
3930 case BUILT_IN_BCOPY:
3931 return gimple_fold_builtin_bcopy (gsi);
3932 case BUILT_IN_BZERO:
3933 return gimple_fold_builtin_bzero (gsi);
3934
3935 case BUILT_IN_MEMSET:
3936 return gimple_fold_builtin_memset (gsi,
3937 gimple_call_arg (stmt, 1),
3938 gimple_call_arg (stmt, 2));
3939 case BUILT_IN_MEMCPY:
3940 case BUILT_IN_MEMPCPY:
3941 case BUILT_IN_MEMMOVE:
3942 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3943 gimple_call_arg (stmt, 1), fcode);
3944 case BUILT_IN_SPRINTF_CHK:
3945 case BUILT_IN_VSPRINTF_CHK:
3946 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
3947 case BUILT_IN_STRCAT_CHK:
3948 return gimple_fold_builtin_strcat_chk (gsi);
3949 case BUILT_IN_STRNCAT_CHK:
3950 return gimple_fold_builtin_strncat_chk (gsi);
3951 case BUILT_IN_STRLEN:
3952 return gimple_fold_builtin_strlen (gsi);
3953 case BUILT_IN_STRCPY:
3954 return gimple_fold_builtin_strcpy (gsi,
3955 gimple_call_arg (stmt, 0),
3956 gimple_call_arg (stmt, 1));
3957 case BUILT_IN_STRNCPY:
3958 return gimple_fold_builtin_strncpy (gsi,
3959 gimple_call_arg (stmt, 0),
3960 gimple_call_arg (stmt, 1),
3961 gimple_call_arg (stmt, 2));
3962 case BUILT_IN_STRCAT:
3963 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
3964 gimple_call_arg (stmt, 1));
3965 case BUILT_IN_STRNCAT:
3966 return gimple_fold_builtin_strncat (gsi);
3967 case BUILT_IN_INDEX:
3968 case BUILT_IN_STRCHR:
3969 return gimple_fold_builtin_strchr (gsi, false);
3970 case BUILT_IN_RINDEX:
3971 case BUILT_IN_STRRCHR:
3972 return gimple_fold_builtin_strchr (gsi, true);
3973 case BUILT_IN_STRSTR:
3974 return gimple_fold_builtin_strstr (gsi);
3975 case BUILT_IN_STRCMP:
3976 case BUILT_IN_STRCMP_EQ:
3977 case BUILT_IN_STRCASECMP:
3978 case BUILT_IN_STRNCMP:
3979 case BUILT_IN_STRNCMP_EQ:
3980 case BUILT_IN_STRNCASECMP:
3981 return gimple_fold_builtin_string_compare (gsi);
3982 case BUILT_IN_MEMCHR:
3983 return gimple_fold_builtin_memchr (gsi);
3984 case BUILT_IN_FPUTS:
3985 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3986 gimple_call_arg (stmt, 1), false);
3987 case BUILT_IN_FPUTS_UNLOCKED:
3988 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3989 gimple_call_arg (stmt, 1), true);
3990 case BUILT_IN_MEMCPY_CHK:
3991 case BUILT_IN_MEMPCPY_CHK:
3992 case BUILT_IN_MEMMOVE_CHK:
3993 case BUILT_IN_MEMSET_CHK:
3994 return gimple_fold_builtin_memory_chk (gsi,
3995 gimple_call_arg (stmt, 0),
3996 gimple_call_arg (stmt, 1),
3997 gimple_call_arg (stmt, 2),
3998 gimple_call_arg (stmt, 3),
3999 fcode);
4000 case BUILT_IN_STPCPY:
4001 return gimple_fold_builtin_stpcpy (gsi);
4002 case BUILT_IN_STRCPY_CHK:
4003 case BUILT_IN_STPCPY_CHK:
4004 return gimple_fold_builtin_stxcpy_chk (gsi,
4005 gimple_call_arg (stmt, 0),
4006 gimple_call_arg (stmt, 1),
4007 gimple_call_arg (stmt, 2),
4008 fcode);
4009 case BUILT_IN_STRNCPY_CHK:
4010 case BUILT_IN_STPNCPY_CHK:
4011 return gimple_fold_builtin_stxncpy_chk (gsi,
4012 gimple_call_arg (stmt, 0),
4013 gimple_call_arg (stmt, 1),
4014 gimple_call_arg (stmt, 2),
4015 gimple_call_arg (stmt, 3),
4016 fcode);
4017 case BUILT_IN_SNPRINTF_CHK:
4018 case BUILT_IN_VSNPRINTF_CHK:
4019 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
4020
4021 case BUILT_IN_FPRINTF:
4022 case BUILT_IN_FPRINTF_UNLOCKED:
4023 case BUILT_IN_VFPRINTF:
4024 if (n == 2 || n == 3)
4025 return gimple_fold_builtin_fprintf (gsi,
4026 gimple_call_arg (stmt, 0),
4027 gimple_call_arg (stmt, 1),
4028 n == 3
4029 ? gimple_call_arg (stmt, 2)
4030 : NULL_TREE,
4031 fcode);
4032 break;
4033 case BUILT_IN_FPRINTF_CHK:
4034 case BUILT_IN_VFPRINTF_CHK:
4035 if (n == 3 || n == 4)
4036 return gimple_fold_builtin_fprintf (gsi,
4037 gimple_call_arg (stmt, 0),
4038 gimple_call_arg (stmt, 2),
4039 n == 4
4040 ? gimple_call_arg (stmt, 3)
4041 : NULL_TREE,
4042 fcode);
4043 break;
4044 case BUILT_IN_PRINTF:
4045 case BUILT_IN_PRINTF_UNLOCKED:
4046 case BUILT_IN_VPRINTF:
4047 if (n == 1 || n == 2)
4048 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
4049 n == 2
4050 ? gimple_call_arg (stmt, 1)
4051 : NULL_TREE, fcode);
4052 break;
4053 case BUILT_IN_PRINTF_CHK:
4054 case BUILT_IN_VPRINTF_CHK:
4055 if (n == 2 || n == 3)
4056 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
4057 n == 3
4058 ? gimple_call_arg (stmt, 2)
4059 : NULL_TREE, fcode);
4060 break;
4061 case BUILT_IN_ACC_ON_DEVICE:
4062 return gimple_fold_builtin_acc_on_device (gsi,
4063 gimple_call_arg (stmt, 0));
4064 case BUILT_IN_REALLOC:
4065 return gimple_fold_builtin_realloc (gsi);
4066
4067 default:;
4068 }
4069
4070 /* Try the generic builtin folder. */
4071 bool ignore = (gimple_call_lhs (stmt) == NULL);
4072 tree result = fold_call_stmt (stmt, ignore);
4073 if (result)
4074 {
4075 if (ignore)
4076 STRIP_NOPS (result);
4077 else
4078 result = fold_convert (gimple_call_return_type (stmt), result);
4079 if (!update_call_from_tree (gsi, result))
4080 gimplify_and_update_call_from_tree (gsi, result);
4081 return true;
4082 }
4083
4084 return false;
4085 }
4086
4087 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
4088 function calls to constants, where possible. */
4089
4090 static tree
fold_internal_goacc_dim(const gimple * call)4091 fold_internal_goacc_dim (const gimple *call)
4092 {
4093 int axis = oacc_get_ifn_dim_arg (call);
4094 int size = oacc_get_fn_dim_size (current_function_decl, axis);
4095 tree result = NULL_TREE;
4096 tree type = TREE_TYPE (gimple_call_lhs (call));
4097
4098 switch (gimple_call_internal_fn (call))
4099 {
4100 case IFN_GOACC_DIM_POS:
4101 /* If the size is 1, we know the answer. */
4102 if (size == 1)
4103 result = build_int_cst (type, 0);
4104 break;
4105 case IFN_GOACC_DIM_SIZE:
4106 /* If the size is not dynamic, we know the answer. */
4107 if (size)
4108 result = build_int_cst (type, size);
4109 break;
4110 default:
4111 break;
4112 }
4113
4114 return result;
4115 }
4116
4117 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
4118 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
4119 &var where var is only addressable because of such calls. */
4120
4121 bool
optimize_atomic_compare_exchange_p(gimple * stmt)4122 optimize_atomic_compare_exchange_p (gimple *stmt)
4123 {
4124 if (gimple_call_num_args (stmt) != 6
4125 || !flag_inline_atomics
4126 || !optimize
4127 || sanitize_flags_p (SANITIZE_THREAD | SANITIZE_ADDRESS)
4128 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
4129 || !gimple_vdef (stmt)
4130 || !gimple_vuse (stmt))
4131 return false;
4132
4133 tree fndecl = gimple_call_fndecl (stmt);
4134 switch (DECL_FUNCTION_CODE (fndecl))
4135 {
4136 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
4137 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
4138 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
4139 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
4140 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
4141 break;
4142 default:
4143 return false;
4144 }
4145
4146 tree expected = gimple_call_arg (stmt, 1);
4147 if (TREE_CODE (expected) != ADDR_EXPR
4148 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
4149 return false;
4150
4151 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
4152 if (!is_gimple_reg_type (etype)
4153 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
4154 || TREE_THIS_VOLATILE (etype)
4155 || VECTOR_TYPE_P (etype)
4156 || TREE_CODE (etype) == COMPLEX_TYPE
4157 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
4158 might not preserve all the bits. See PR71716. */
4159 || SCALAR_FLOAT_TYPE_P (etype)
4160 || maybe_ne (TYPE_PRECISION (etype),
4161 GET_MODE_BITSIZE (TYPE_MODE (etype))))
4162 return false;
4163
4164 tree weak = gimple_call_arg (stmt, 3);
4165 if (!integer_zerop (weak) && !integer_onep (weak))
4166 return false;
4167
4168 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
4169 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
4170 machine_mode mode = TYPE_MODE (itype);
4171
4172 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
4173 == CODE_FOR_nothing
4174 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
4175 return false;
4176
4177 if (maybe_ne (int_size_in_bytes (etype), GET_MODE_SIZE (mode)))
4178 return false;
4179
4180 return true;
4181 }
4182
4183 /* Fold
4184 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
4185 into
4186 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
4187 i = IMAGPART_EXPR <t>;
4188 r = (_Bool) i;
4189 e = REALPART_EXPR <t>; */
4190
4191 void
fold_builtin_atomic_compare_exchange(gimple_stmt_iterator * gsi)4192 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
4193 {
4194 gimple *stmt = gsi_stmt (*gsi);
4195 tree fndecl = gimple_call_fndecl (stmt);
4196 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
4197 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
4198 tree ctype = build_complex_type (itype);
4199 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
4200 bool throws = false;
4201 edge e = NULL;
4202 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
4203 expected);
4204 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4205 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
4206 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
4207 {
4208 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
4209 build1 (VIEW_CONVERT_EXPR, itype,
4210 gimple_assign_lhs (g)));
4211 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4212 }
4213 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
4214 + int_size_in_bytes (itype);
4215 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
4216 gimple_call_arg (stmt, 0),
4217 gimple_assign_lhs (g),
4218 gimple_call_arg (stmt, 2),
4219 build_int_cst (integer_type_node, flag),
4220 gimple_call_arg (stmt, 4),
4221 gimple_call_arg (stmt, 5));
4222 tree lhs = make_ssa_name (ctype);
4223 gimple_call_set_lhs (g, lhs);
4224 gimple_move_vops (g, stmt);
4225 tree oldlhs = gimple_call_lhs (stmt);
4226 if (stmt_can_throw_internal (cfun, stmt))
4227 {
4228 throws = true;
4229 e = find_fallthru_edge (gsi_bb (*gsi)->succs);
4230 }
4231 gimple_call_set_nothrow (as_a <gcall *> (g),
4232 gimple_call_nothrow_p (as_a <gcall *> (stmt)));
4233 gimple_call_set_lhs (stmt, NULL_TREE);
4234 gsi_replace (gsi, g, true);
4235 if (oldlhs)
4236 {
4237 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
4238 build1 (IMAGPART_EXPR, itype, lhs));
4239 if (throws)
4240 {
4241 gsi_insert_on_edge_immediate (e, g);
4242 *gsi = gsi_for_stmt (g);
4243 }
4244 else
4245 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4246 g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g));
4247 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4248 }
4249 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
4250 build1 (REALPART_EXPR, itype, lhs));
4251 if (throws && oldlhs == NULL_TREE)
4252 {
4253 gsi_insert_on_edge_immediate (e, g);
4254 *gsi = gsi_for_stmt (g);
4255 }
4256 else
4257 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4258 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
4259 {
4260 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
4261 VIEW_CONVERT_EXPR,
4262 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
4263 gimple_assign_lhs (g)));
4264 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4265 }
4266 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
4267 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4268 *gsi = gsiret;
4269 }
4270
4271 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
4272 doesn't fit into TYPE. The test for overflow should be regardless of
4273 -fwrapv, and even for unsigned types. */
4274
4275 bool
arith_overflowed_p(enum tree_code code,const_tree type,const_tree arg0,const_tree arg1)4276 arith_overflowed_p (enum tree_code code, const_tree type,
4277 const_tree arg0, const_tree arg1)
4278 {
4279 widest2_int warg0 = widest2_int_cst (arg0);
4280 widest2_int warg1 = widest2_int_cst (arg1);
4281 widest2_int wres;
4282 switch (code)
4283 {
4284 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
4285 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
4286 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
4287 default: gcc_unreachable ();
4288 }
4289 signop sign = TYPE_SIGN (type);
4290 if (sign == UNSIGNED && wi::neg_p (wres))
4291 return true;
4292 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
4293 }
4294
4295 /* If IFN_MASK_LOAD/STORE call CALL is unconditional, return a MEM_REF
4296 for the memory it references, otherwise return null. VECTYPE is the
4297 type of the memory vector. */
4298
4299 static tree
gimple_fold_mask_load_store_mem_ref(gcall * call,tree vectype)4300 gimple_fold_mask_load_store_mem_ref (gcall *call, tree vectype)
4301 {
4302 tree ptr = gimple_call_arg (call, 0);
4303 tree alias_align = gimple_call_arg (call, 1);
4304 tree mask = gimple_call_arg (call, 2);
4305 if (!tree_fits_uhwi_p (alias_align) || !integer_all_onesp (mask))
4306 return NULL_TREE;
4307
4308 unsigned HOST_WIDE_INT align = tree_to_uhwi (alias_align);
4309 if (TYPE_ALIGN (vectype) != align)
4310 vectype = build_aligned_type (vectype, align);
4311 tree offset = build_zero_cst (TREE_TYPE (alias_align));
4312 return fold_build2 (MEM_REF, vectype, ptr, offset);
4313 }
4314
4315 /* Try to fold IFN_MASK_LOAD call CALL. Return true on success. */
4316
4317 static bool
gimple_fold_mask_load(gimple_stmt_iterator * gsi,gcall * call)4318 gimple_fold_mask_load (gimple_stmt_iterator *gsi, gcall *call)
4319 {
4320 tree lhs = gimple_call_lhs (call);
4321 if (!lhs)
4322 return false;
4323
4324 if (tree rhs = gimple_fold_mask_load_store_mem_ref (call, TREE_TYPE (lhs)))
4325 {
4326 gassign *new_stmt = gimple_build_assign (lhs, rhs);
4327 gimple_set_location (new_stmt, gimple_location (call));
4328 gimple_move_vops (new_stmt, call);
4329 gsi_replace (gsi, new_stmt, false);
4330 return true;
4331 }
4332 return false;
4333 }
4334
4335 /* Try to fold IFN_MASK_STORE call CALL. Return true on success. */
4336
4337 static bool
gimple_fold_mask_store(gimple_stmt_iterator * gsi,gcall * call)4338 gimple_fold_mask_store (gimple_stmt_iterator *gsi, gcall *call)
4339 {
4340 tree rhs = gimple_call_arg (call, 3);
4341 if (tree lhs = gimple_fold_mask_load_store_mem_ref (call, TREE_TYPE (rhs)))
4342 {
4343 gassign *new_stmt = gimple_build_assign (lhs, rhs);
4344 gimple_set_location (new_stmt, gimple_location (call));
4345 gimple_move_vops (new_stmt, call);
4346 gsi_replace (gsi, new_stmt, false);
4347 return true;
4348 }
4349 return false;
4350 }
4351
4352 /* Attempt to fold a call statement referenced by the statement iterator GSI.
4353 The statement may be replaced by another statement, e.g., if the call
4354 simplifies to a constant value. Return true if any changes were made.
4355 It is assumed that the operands have been previously folded. */
4356
4357 static bool
gimple_fold_call(gimple_stmt_iterator * gsi,bool inplace)4358 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
4359 {
4360 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
4361 tree callee;
4362 bool changed = false;
4363 unsigned i;
4364
4365 /* Fold *& in call arguments. */
4366 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4367 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
4368 {
4369 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
4370 if (tmp)
4371 {
4372 gimple_call_set_arg (stmt, i, tmp);
4373 changed = true;
4374 }
4375 }
4376
4377 /* Check for virtual calls that became direct calls. */
4378 callee = gimple_call_fn (stmt);
4379 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
4380 {
4381 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
4382 {
4383 if (dump_file && virtual_method_call_p (callee)
4384 && !possible_polymorphic_call_target_p
4385 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
4386 (OBJ_TYPE_REF_EXPR (callee)))))
4387 {
4388 fprintf (dump_file,
4389 "Type inheritance inconsistent devirtualization of ");
4390 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
4391 fprintf (dump_file, " to ");
4392 print_generic_expr (dump_file, callee, TDF_SLIM);
4393 fprintf (dump_file, "\n");
4394 }
4395
4396 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
4397 changed = true;
4398 }
4399 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
4400 {
4401 bool final;
4402 vec <cgraph_node *>targets
4403 = possible_polymorphic_call_targets (callee, stmt, &final);
4404 if (final && targets.length () <= 1 && dbg_cnt (devirt))
4405 {
4406 tree lhs = gimple_call_lhs (stmt);
4407 if (dump_enabled_p ())
4408 {
4409 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
4410 "folding virtual function call to %s\n",
4411 targets.length () == 1
4412 ? targets[0]->name ()
4413 : "__builtin_unreachable");
4414 }
4415 if (targets.length () == 1)
4416 {
4417 tree fndecl = targets[0]->decl;
4418 gimple_call_set_fndecl (stmt, fndecl);
4419 changed = true;
4420 /* If changing the call to __cxa_pure_virtual
4421 or similar noreturn function, adjust gimple_call_fntype
4422 too. */
4423 if (gimple_call_noreturn_p (stmt)
4424 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
4425 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
4426 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
4427 == void_type_node))
4428 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
4429 /* If the call becomes noreturn, remove the lhs. */
4430 if (lhs
4431 && gimple_call_noreturn_p (stmt)
4432 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
4433 || should_remove_lhs_p (lhs)))
4434 {
4435 if (TREE_CODE (lhs) == SSA_NAME)
4436 {
4437 tree var = create_tmp_var (TREE_TYPE (lhs));
4438 tree def = get_or_create_ssa_default_def (cfun, var);
4439 gimple *new_stmt = gimple_build_assign (lhs, def);
4440 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
4441 }
4442 gimple_call_set_lhs (stmt, NULL_TREE);
4443 }
4444 maybe_remove_unused_call_args (cfun, stmt);
4445 }
4446 else
4447 {
4448 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
4449 gimple *new_stmt = gimple_build_call (fndecl, 0);
4450 gimple_set_location (new_stmt, gimple_location (stmt));
4451 /* If the call had a SSA name as lhs morph that into
4452 an uninitialized value. */
4453 if (lhs && TREE_CODE (lhs) == SSA_NAME)
4454 {
4455 tree var = create_tmp_var (TREE_TYPE (lhs));
4456 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, var);
4457 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
4458 set_ssa_default_def (cfun, var, lhs);
4459 }
4460 gimple_move_vops (new_stmt, stmt);
4461 gsi_replace (gsi, new_stmt, false);
4462 return true;
4463 }
4464 }
4465 }
4466 }
4467
4468 /* Check for indirect calls that became direct calls, and then
4469 no longer require a static chain. */
4470 if (gimple_call_chain (stmt))
4471 {
4472 tree fn = gimple_call_fndecl (stmt);
4473 if (fn && !DECL_STATIC_CHAIN (fn))
4474 {
4475 gimple_call_set_chain (stmt, NULL);
4476 changed = true;
4477 }
4478 else
4479 {
4480 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
4481 if (tmp)
4482 {
4483 gimple_call_set_chain (stmt, tmp);
4484 changed = true;
4485 }
4486 }
4487 }
4488
4489 if (inplace)
4490 return changed;
4491
4492 /* Check for builtins that CCP can handle using information not
4493 available in the generic fold routines. */
4494 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4495 {
4496 if (gimple_fold_builtin (gsi))
4497 changed = true;
4498 }
4499 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
4500 {
4501 changed |= targetm.gimple_fold_builtin (gsi);
4502 }
4503 else if (gimple_call_internal_p (stmt))
4504 {
4505 enum tree_code subcode = ERROR_MARK;
4506 tree result = NULL_TREE;
4507 bool cplx_result = false;
4508 tree overflow = NULL_TREE;
4509 switch (gimple_call_internal_fn (stmt))
4510 {
4511 case IFN_BUILTIN_EXPECT:
4512 result = fold_builtin_expect (gimple_location (stmt),
4513 gimple_call_arg (stmt, 0),
4514 gimple_call_arg (stmt, 1),
4515 gimple_call_arg (stmt, 2),
4516 NULL_TREE);
4517 break;
4518 case IFN_UBSAN_OBJECT_SIZE:
4519 {
4520 tree offset = gimple_call_arg (stmt, 1);
4521 tree objsize = gimple_call_arg (stmt, 2);
4522 if (integer_all_onesp (objsize)
4523 || (TREE_CODE (offset) == INTEGER_CST
4524 && TREE_CODE (objsize) == INTEGER_CST
4525 && tree_int_cst_le (offset, objsize)))
4526 {
4527 replace_call_with_value (gsi, NULL_TREE);
4528 return true;
4529 }
4530 }
4531 break;
4532 case IFN_UBSAN_PTR:
4533 if (integer_zerop (gimple_call_arg (stmt, 1)))
4534 {
4535 replace_call_with_value (gsi, NULL_TREE);
4536 return true;
4537 }
4538 break;
4539 case IFN_UBSAN_BOUNDS:
4540 {
4541 tree index = gimple_call_arg (stmt, 1);
4542 tree bound = gimple_call_arg (stmt, 2);
4543 if (TREE_CODE (index) == INTEGER_CST
4544 && TREE_CODE (bound) == INTEGER_CST)
4545 {
4546 index = fold_convert (TREE_TYPE (bound), index);
4547 if (TREE_CODE (index) == INTEGER_CST
4548 && tree_int_cst_le (index, bound))
4549 {
4550 replace_call_with_value (gsi, NULL_TREE);
4551 return true;
4552 }
4553 }
4554 }
4555 break;
4556 case IFN_GOACC_DIM_SIZE:
4557 case IFN_GOACC_DIM_POS:
4558 result = fold_internal_goacc_dim (stmt);
4559 break;
4560 case IFN_UBSAN_CHECK_ADD:
4561 subcode = PLUS_EXPR;
4562 break;
4563 case IFN_UBSAN_CHECK_SUB:
4564 subcode = MINUS_EXPR;
4565 break;
4566 case IFN_UBSAN_CHECK_MUL:
4567 subcode = MULT_EXPR;
4568 break;
4569 case IFN_ADD_OVERFLOW:
4570 subcode = PLUS_EXPR;
4571 cplx_result = true;
4572 break;
4573 case IFN_SUB_OVERFLOW:
4574 subcode = MINUS_EXPR;
4575 cplx_result = true;
4576 break;
4577 case IFN_MUL_OVERFLOW:
4578 subcode = MULT_EXPR;
4579 cplx_result = true;
4580 break;
4581 case IFN_MASK_LOAD:
4582 changed |= gimple_fold_mask_load (gsi, stmt);
4583 break;
4584 case IFN_MASK_STORE:
4585 changed |= gimple_fold_mask_store (gsi, stmt);
4586 break;
4587 default:
4588 break;
4589 }
4590 if (subcode != ERROR_MARK)
4591 {
4592 tree arg0 = gimple_call_arg (stmt, 0);
4593 tree arg1 = gimple_call_arg (stmt, 1);
4594 tree type = TREE_TYPE (arg0);
4595 if (cplx_result)
4596 {
4597 tree lhs = gimple_call_lhs (stmt);
4598 if (lhs == NULL_TREE)
4599 type = NULL_TREE;
4600 else
4601 type = TREE_TYPE (TREE_TYPE (lhs));
4602 }
4603 if (type == NULL_TREE)
4604 ;
4605 /* x = y + 0; x = y - 0; x = y * 0; */
4606 else if (integer_zerop (arg1))
4607 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
4608 /* x = 0 + y; x = 0 * y; */
4609 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
4610 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
4611 /* x = y - y; */
4612 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
4613 result = integer_zero_node;
4614 /* x = y * 1; x = 1 * y; */
4615 else if (subcode == MULT_EXPR && integer_onep (arg1))
4616 result = arg0;
4617 else if (subcode == MULT_EXPR && integer_onep (arg0))
4618 result = arg1;
4619 else if (TREE_CODE (arg0) == INTEGER_CST
4620 && TREE_CODE (arg1) == INTEGER_CST)
4621 {
4622 if (cplx_result)
4623 result = int_const_binop (subcode, fold_convert (type, arg0),
4624 fold_convert (type, arg1));
4625 else
4626 result = int_const_binop (subcode, arg0, arg1);
4627 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
4628 {
4629 if (cplx_result)
4630 overflow = build_one_cst (type);
4631 else
4632 result = NULL_TREE;
4633 }
4634 }
4635 if (result)
4636 {
4637 if (result == integer_zero_node)
4638 result = build_zero_cst (type);
4639 else if (cplx_result && TREE_TYPE (result) != type)
4640 {
4641 if (TREE_CODE (result) == INTEGER_CST)
4642 {
4643 if (arith_overflowed_p (PLUS_EXPR, type, result,
4644 integer_zero_node))
4645 overflow = build_one_cst (type);
4646 }
4647 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
4648 && TYPE_UNSIGNED (type))
4649 || (TYPE_PRECISION (type)
4650 < (TYPE_PRECISION (TREE_TYPE (result))
4651 + (TYPE_UNSIGNED (TREE_TYPE (result))
4652 && !TYPE_UNSIGNED (type)))))
4653 result = NULL_TREE;
4654 if (result)
4655 result = fold_convert (type, result);
4656 }
4657 }
4658 }
4659
4660 if (result)
4661 {
4662 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
4663 result = drop_tree_overflow (result);
4664 if (cplx_result)
4665 {
4666 if (overflow == NULL_TREE)
4667 overflow = build_zero_cst (TREE_TYPE (result));
4668 tree ctype = build_complex_type (TREE_TYPE (result));
4669 if (TREE_CODE (result) == INTEGER_CST
4670 && TREE_CODE (overflow) == INTEGER_CST)
4671 result = build_complex (ctype, result, overflow);
4672 else
4673 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
4674 ctype, result, overflow);
4675 }
4676 if (!update_call_from_tree (gsi, result))
4677 gimplify_and_update_call_from_tree (gsi, result);
4678 changed = true;
4679 }
4680 }
4681
4682 return changed;
4683 }
4684
4685
4686 /* Return true whether NAME has a use on STMT. */
4687
4688 static bool
has_use_on_stmt(tree name,gimple * stmt)4689 has_use_on_stmt (tree name, gimple *stmt)
4690 {
4691 imm_use_iterator iter;
4692 use_operand_p use_p;
4693 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
4694 if (USE_STMT (use_p) == stmt)
4695 return true;
4696 return false;
4697 }
4698
4699 /* Worker for fold_stmt_1 dispatch to pattern based folding with
4700 gimple_simplify.
4701
4702 Replaces *GSI with the simplification result in RCODE and OPS
4703 and the associated statements in *SEQ. Does the replacement
4704 according to INPLACE and returns true if the operation succeeded. */
4705
4706 static bool
replace_stmt_with_simplification(gimple_stmt_iterator * gsi,gimple_match_op * res_op,gimple_seq * seq,bool inplace)4707 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
4708 gimple_match_op *res_op,
4709 gimple_seq *seq, bool inplace)
4710 {
4711 gimple *stmt = gsi_stmt (*gsi);
4712 tree *ops = res_op->ops;
4713 unsigned int num_ops = res_op->num_ops;
4714
4715 /* Play safe and do not allow abnormals to be mentioned in
4716 newly created statements. See also maybe_push_res_to_seq.
4717 As an exception allow such uses if there was a use of the
4718 same SSA name on the old stmt. */
4719 for (unsigned int i = 0; i < num_ops; ++i)
4720 if (TREE_CODE (ops[i]) == SSA_NAME
4721 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[i])
4722 && !has_use_on_stmt (ops[i], stmt))
4723 return false;
4724
4725 if (num_ops > 0 && COMPARISON_CLASS_P (ops[0]))
4726 for (unsigned int i = 0; i < 2; ++i)
4727 if (TREE_CODE (TREE_OPERAND (ops[0], i)) == SSA_NAME
4728 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], i))
4729 && !has_use_on_stmt (TREE_OPERAND (ops[0], i), stmt))
4730 return false;
4731
4732 /* Don't insert new statements when INPLACE is true, even if we could
4733 reuse STMT for the final statement. */
4734 if (inplace && !gimple_seq_empty_p (*seq))
4735 return false;
4736
4737 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
4738 {
4739 gcc_assert (res_op->code.is_tree_code ());
4740 if (TREE_CODE_CLASS ((enum tree_code) res_op->code) == tcc_comparison
4741 /* GIMPLE_CONDs condition may not throw. */
4742 && (!flag_exceptions
4743 || !cfun->can_throw_non_call_exceptions
4744 || !operation_could_trap_p (res_op->code,
4745 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
4746 false, NULL_TREE)))
4747 gimple_cond_set_condition (cond_stmt, res_op->code, ops[0], ops[1]);
4748 else if (res_op->code == SSA_NAME)
4749 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
4750 build_zero_cst (TREE_TYPE (ops[0])));
4751 else if (res_op->code == INTEGER_CST)
4752 {
4753 if (integer_zerop (ops[0]))
4754 gimple_cond_make_false (cond_stmt);
4755 else
4756 gimple_cond_make_true (cond_stmt);
4757 }
4758 else if (!inplace)
4759 {
4760 tree res = maybe_push_res_to_seq (res_op, seq);
4761 if (!res)
4762 return false;
4763 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
4764 build_zero_cst (TREE_TYPE (res)));
4765 }
4766 else
4767 return false;
4768 if (dump_file && (dump_flags & TDF_DETAILS))
4769 {
4770 fprintf (dump_file, "gimple_simplified to ");
4771 if (!gimple_seq_empty_p (*seq))
4772 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4773 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4774 0, TDF_SLIM);
4775 }
4776 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4777 return true;
4778 }
4779 else if (is_gimple_assign (stmt)
4780 && res_op->code.is_tree_code ())
4781 {
4782 if (!inplace
4783 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (res_op->code))
4784 {
4785 maybe_build_generic_op (res_op);
4786 gimple_assign_set_rhs_with_ops (gsi, res_op->code,
4787 res_op->op_or_null (0),
4788 res_op->op_or_null (1),
4789 res_op->op_or_null (2));
4790 if (dump_file && (dump_flags & TDF_DETAILS))
4791 {
4792 fprintf (dump_file, "gimple_simplified to ");
4793 if (!gimple_seq_empty_p (*seq))
4794 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4795 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4796 0, TDF_SLIM);
4797 }
4798 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4799 return true;
4800 }
4801 }
4802 else if (res_op->code.is_fn_code ()
4803 && gimple_call_combined_fn (stmt) == res_op->code)
4804 {
4805 gcc_assert (num_ops == gimple_call_num_args (stmt));
4806 for (unsigned int i = 0; i < num_ops; ++i)
4807 gimple_call_set_arg (stmt, i, ops[i]);
4808 if (dump_file && (dump_flags & TDF_DETAILS))
4809 {
4810 fprintf (dump_file, "gimple_simplified to ");
4811 if (!gimple_seq_empty_p (*seq))
4812 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4813 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
4814 }
4815 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4816 return true;
4817 }
4818 else if (!inplace)
4819 {
4820 if (gimple_has_lhs (stmt))
4821 {
4822 tree lhs = gimple_get_lhs (stmt);
4823 if (!maybe_push_res_to_seq (res_op, seq, lhs))
4824 return false;
4825 if (dump_file && (dump_flags & TDF_DETAILS))
4826 {
4827 fprintf (dump_file, "gimple_simplified to ");
4828 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4829 }
4830 gsi_replace_with_seq_vops (gsi, *seq);
4831 return true;
4832 }
4833 else
4834 gcc_unreachable ();
4835 }
4836
4837 return false;
4838 }
4839
4840 /* Canonicalize MEM_REFs invariant address operand after propagation. */
4841
4842 static bool
4843 maybe_canonicalize_mem_ref_addr (tree *t, bool is_debug = false)
4844 {
4845 bool res = false;
4846 tree *orig_t = t;
4847
4848 if (TREE_CODE (*t) == ADDR_EXPR)
4849 t = &TREE_OPERAND (*t, 0);
4850
4851 /* The C and C++ frontends use an ARRAY_REF for indexing with their
4852 generic vector extension. The actual vector referenced is
4853 view-converted to an array type for this purpose. If the index
4854 is constant the canonical representation in the middle-end is a
4855 BIT_FIELD_REF so re-write the former to the latter here. */
4856 if (TREE_CODE (*t) == ARRAY_REF
4857 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
4858 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
4859 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
4860 {
4861 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
4862 if (VECTOR_TYPE_P (vtype))
4863 {
4864 tree low = array_ref_low_bound (*t);
4865 if (TREE_CODE (low) == INTEGER_CST)
4866 {
4867 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
4868 {
4869 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
4870 wi::to_widest (low));
4871 idx = wi::mul (idx, wi::to_widest
4872 (TYPE_SIZE (TREE_TYPE (*t))));
4873 widest_int ext
4874 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
4875 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
4876 {
4877 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
4878 TREE_TYPE (*t),
4879 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
4880 TYPE_SIZE (TREE_TYPE (*t)),
4881 wide_int_to_tree (bitsizetype, idx));
4882 res = true;
4883 }
4884 }
4885 }
4886 }
4887 }
4888
4889 while (handled_component_p (*t))
4890 t = &TREE_OPERAND (*t, 0);
4891
4892 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
4893 of invariant addresses into a SSA name MEM_REF address. */
4894 if (TREE_CODE (*t) == MEM_REF
4895 || TREE_CODE (*t) == TARGET_MEM_REF)
4896 {
4897 tree addr = TREE_OPERAND (*t, 0);
4898 if (TREE_CODE (addr) == ADDR_EXPR
4899 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
4900 || handled_component_p (TREE_OPERAND (addr, 0))))
4901 {
4902 tree base;
4903 poly_int64 coffset;
4904 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
4905 &coffset);
4906 if (!base)
4907 {
4908 if (is_debug)
4909 return false;
4910 gcc_unreachable ();
4911 }
4912
4913 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
4914 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
4915 TREE_OPERAND (*t, 1),
4916 size_int (coffset));
4917 res = true;
4918 }
4919 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
4920 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
4921 }
4922
4923 /* Canonicalize back MEM_REFs to plain reference trees if the object
4924 accessed is a decl that has the same access semantics as the MEM_REF. */
4925 if (TREE_CODE (*t) == MEM_REF
4926 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
4927 && integer_zerop (TREE_OPERAND (*t, 1))
4928 && MR_DEPENDENCE_CLIQUE (*t) == 0)
4929 {
4930 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4931 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
4932 if (/* Same volatile qualification. */
4933 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
4934 /* Same TBAA behavior with -fstrict-aliasing. */
4935 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
4936 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
4937 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
4938 /* Same alignment. */
4939 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
4940 /* We have to look out here to not drop a required conversion
4941 from the rhs to the lhs if *t appears on the lhs or vice-versa
4942 if it appears on the rhs. Thus require strict type
4943 compatibility. */
4944 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
4945 {
4946 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4947 res = true;
4948 }
4949 }
4950
4951 /* Canonicalize TARGET_MEM_REF in particular with respect to
4952 the indexes becoming constant. */
4953 else if (TREE_CODE (*t) == TARGET_MEM_REF)
4954 {
4955 tree tem = maybe_fold_tmr (*t);
4956 if (tem)
4957 {
4958 *t = tem;
4959 if (TREE_CODE (*orig_t) == ADDR_EXPR)
4960 recompute_tree_invariant_for_addr_expr (*orig_t);
4961 res = true;
4962 }
4963 }
4964
4965 return res;
4966 }
4967
4968 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
4969 distinguishes both cases. */
4970
4971 static bool
fold_stmt_1(gimple_stmt_iterator * gsi,bool inplace,tree (* valueize)(tree))4972 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
4973 {
4974 bool changed = false;
4975 gimple *stmt = gsi_stmt (*gsi);
4976 bool nowarning = gimple_no_warning_p (stmt);
4977 unsigned i;
4978 fold_defer_overflow_warnings ();
4979
4980 /* First do required canonicalization of [TARGET_]MEM_REF addresses
4981 after propagation.
4982 ??? This shouldn't be done in generic folding but in the
4983 propagation helpers which also know whether an address was
4984 propagated.
4985 Also canonicalize operand order. */
4986 switch (gimple_code (stmt))
4987 {
4988 case GIMPLE_ASSIGN:
4989 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
4990 {
4991 tree *rhs = gimple_assign_rhs1_ptr (stmt);
4992 if ((REFERENCE_CLASS_P (*rhs)
4993 || TREE_CODE (*rhs) == ADDR_EXPR)
4994 && maybe_canonicalize_mem_ref_addr (rhs))
4995 changed = true;
4996 tree *lhs = gimple_assign_lhs_ptr (stmt);
4997 if (REFERENCE_CLASS_P (*lhs)
4998 && maybe_canonicalize_mem_ref_addr (lhs))
4999 changed = true;
5000 }
5001 else
5002 {
5003 /* Canonicalize operand order. */
5004 enum tree_code code = gimple_assign_rhs_code (stmt);
5005 if (TREE_CODE_CLASS (code) == tcc_comparison
5006 || commutative_tree_code (code)
5007 || commutative_ternary_tree_code (code))
5008 {
5009 tree rhs1 = gimple_assign_rhs1 (stmt);
5010 tree rhs2 = gimple_assign_rhs2 (stmt);
5011 if (tree_swap_operands_p (rhs1, rhs2))
5012 {
5013 gimple_assign_set_rhs1 (stmt, rhs2);
5014 gimple_assign_set_rhs2 (stmt, rhs1);
5015 if (TREE_CODE_CLASS (code) == tcc_comparison)
5016 gimple_assign_set_rhs_code (stmt,
5017 swap_tree_comparison (code));
5018 changed = true;
5019 }
5020 }
5021 }
5022 break;
5023 case GIMPLE_CALL:
5024 {
5025 for (i = 0; i < gimple_call_num_args (stmt); ++i)
5026 {
5027 tree *arg = gimple_call_arg_ptr (stmt, i);
5028 if (REFERENCE_CLASS_P (*arg)
5029 && maybe_canonicalize_mem_ref_addr (arg))
5030 changed = true;
5031 }
5032 tree *lhs = gimple_call_lhs_ptr (stmt);
5033 if (*lhs
5034 && REFERENCE_CLASS_P (*lhs)
5035 && maybe_canonicalize_mem_ref_addr (lhs))
5036 changed = true;
5037 break;
5038 }
5039 case GIMPLE_ASM:
5040 {
5041 gasm *asm_stmt = as_a <gasm *> (stmt);
5042 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
5043 {
5044 tree link = gimple_asm_output_op (asm_stmt, i);
5045 tree op = TREE_VALUE (link);
5046 if (REFERENCE_CLASS_P (op)
5047 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
5048 changed = true;
5049 }
5050 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
5051 {
5052 tree link = gimple_asm_input_op (asm_stmt, i);
5053 tree op = TREE_VALUE (link);
5054 if ((REFERENCE_CLASS_P (op)
5055 || TREE_CODE (op) == ADDR_EXPR)
5056 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
5057 changed = true;
5058 }
5059 }
5060 break;
5061 case GIMPLE_DEBUG:
5062 if (gimple_debug_bind_p (stmt))
5063 {
5064 tree *val = gimple_debug_bind_get_value_ptr (stmt);
5065 if (*val
5066 && (REFERENCE_CLASS_P (*val)
5067 || TREE_CODE (*val) == ADDR_EXPR)
5068 && maybe_canonicalize_mem_ref_addr (val, true))
5069 changed = true;
5070 }
5071 break;
5072 case GIMPLE_COND:
5073 {
5074 /* Canonicalize operand order. */
5075 tree lhs = gimple_cond_lhs (stmt);
5076 tree rhs = gimple_cond_rhs (stmt);
5077 if (tree_swap_operands_p (lhs, rhs))
5078 {
5079 gcond *gc = as_a <gcond *> (stmt);
5080 gimple_cond_set_lhs (gc, rhs);
5081 gimple_cond_set_rhs (gc, lhs);
5082 gimple_cond_set_code (gc,
5083 swap_tree_comparison (gimple_cond_code (gc)));
5084 changed = true;
5085 }
5086 }
5087 default:;
5088 }
5089
5090 /* Dispatch to pattern-based folding. */
5091 if (!inplace
5092 || is_gimple_assign (stmt)
5093 || gimple_code (stmt) == GIMPLE_COND)
5094 {
5095 gimple_seq seq = NULL;
5096 gimple_match_op res_op;
5097 if (gimple_simplify (stmt, &res_op, inplace ? NULL : &seq,
5098 valueize, valueize))
5099 {
5100 if (replace_stmt_with_simplification (gsi, &res_op, &seq, inplace))
5101 changed = true;
5102 else
5103 gimple_seq_discard (seq);
5104 }
5105 }
5106
5107 stmt = gsi_stmt (*gsi);
5108
5109 /* Fold the main computation performed by the statement. */
5110 switch (gimple_code (stmt))
5111 {
5112 case GIMPLE_ASSIGN:
5113 {
5114 /* Try to canonicalize for boolean-typed X the comparisons
5115 X == 0, X == 1, X != 0, and X != 1. */
5116 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
5117 || gimple_assign_rhs_code (stmt) == NE_EXPR)
5118 {
5119 tree lhs = gimple_assign_lhs (stmt);
5120 tree op1 = gimple_assign_rhs1 (stmt);
5121 tree op2 = gimple_assign_rhs2 (stmt);
5122 tree type = TREE_TYPE (op1);
5123
5124 /* Check whether the comparison operands are of the same boolean
5125 type as the result type is.
5126 Check that second operand is an integer-constant with value
5127 one or zero. */
5128 if (TREE_CODE (op2) == INTEGER_CST
5129 && (integer_zerop (op2) || integer_onep (op2))
5130 && useless_type_conversion_p (TREE_TYPE (lhs), type))
5131 {
5132 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
5133 bool is_logical_not = false;
5134
5135 /* X == 0 and X != 1 is a logical-not.of X
5136 X == 1 and X != 0 is X */
5137 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
5138 || (cmp_code == NE_EXPR && integer_onep (op2)))
5139 is_logical_not = true;
5140
5141 if (is_logical_not == false)
5142 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
5143 /* Only for one-bit precision typed X the transformation
5144 !X -> ~X is valied. */
5145 else if (TYPE_PRECISION (type) == 1)
5146 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
5147 /* Otherwise we use !X -> X ^ 1. */
5148 else
5149 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
5150 build_int_cst (type, 1));
5151 changed = true;
5152 break;
5153 }
5154 }
5155
5156 unsigned old_num_ops = gimple_num_ops (stmt);
5157 tree lhs = gimple_assign_lhs (stmt);
5158 tree new_rhs = fold_gimple_assign (gsi);
5159 if (new_rhs
5160 && !useless_type_conversion_p (TREE_TYPE (lhs),
5161 TREE_TYPE (new_rhs)))
5162 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
5163 if (new_rhs
5164 && (!inplace
5165 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
5166 {
5167 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
5168 changed = true;
5169 }
5170 break;
5171 }
5172
5173 case GIMPLE_CALL:
5174 changed |= gimple_fold_call (gsi, inplace);
5175 break;
5176
5177 case GIMPLE_ASM:
5178 /* Fold *& in asm operands. */
5179 {
5180 gasm *asm_stmt = as_a <gasm *> (stmt);
5181 size_t noutputs;
5182 const char **oconstraints;
5183 const char *constraint;
5184 bool allows_mem, allows_reg;
5185
5186 noutputs = gimple_asm_noutputs (asm_stmt);
5187 oconstraints = XALLOCAVEC (const char *, noutputs);
5188
5189 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
5190 {
5191 tree link = gimple_asm_output_op (asm_stmt, i);
5192 tree op = TREE_VALUE (link);
5193 oconstraints[i]
5194 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5195 if (REFERENCE_CLASS_P (op)
5196 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
5197 {
5198 TREE_VALUE (link) = op;
5199 changed = true;
5200 }
5201 }
5202 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
5203 {
5204 tree link = gimple_asm_input_op (asm_stmt, i);
5205 tree op = TREE_VALUE (link);
5206 constraint
5207 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5208 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
5209 oconstraints, &allows_mem, &allows_reg);
5210 if (REFERENCE_CLASS_P (op)
5211 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
5212 != NULL_TREE)
5213 {
5214 TREE_VALUE (link) = op;
5215 changed = true;
5216 }
5217 }
5218 }
5219 break;
5220
5221 case GIMPLE_DEBUG:
5222 if (gimple_debug_bind_p (stmt))
5223 {
5224 tree val = gimple_debug_bind_get_value (stmt);
5225 if (val
5226 && REFERENCE_CLASS_P (val))
5227 {
5228 tree tem = maybe_fold_reference (val, false);
5229 if (tem)
5230 {
5231 gimple_debug_bind_set_value (stmt, tem);
5232 changed = true;
5233 }
5234 }
5235 else if (val
5236 && TREE_CODE (val) == ADDR_EXPR)
5237 {
5238 tree ref = TREE_OPERAND (val, 0);
5239 tree tem = maybe_fold_reference (ref, false);
5240 if (tem)
5241 {
5242 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
5243 gimple_debug_bind_set_value (stmt, tem);
5244 changed = true;
5245 }
5246 }
5247 }
5248 break;
5249
5250 case GIMPLE_RETURN:
5251 {
5252 greturn *ret_stmt = as_a<greturn *> (stmt);
5253 tree ret = gimple_return_retval(ret_stmt);
5254
5255 if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
5256 {
5257 tree val = valueize (ret);
5258 if (val && val != ret
5259 && may_propagate_copy (ret, val))
5260 {
5261 gimple_return_set_retval (ret_stmt, val);
5262 changed = true;
5263 }
5264 }
5265 }
5266 break;
5267
5268 default:;
5269 }
5270
5271 stmt = gsi_stmt (*gsi);
5272
5273 /* Fold *& on the lhs. */
5274 if (gimple_has_lhs (stmt))
5275 {
5276 tree lhs = gimple_get_lhs (stmt);
5277 if (lhs && REFERENCE_CLASS_P (lhs))
5278 {
5279 tree new_lhs = maybe_fold_reference (lhs, true);
5280 if (new_lhs)
5281 {
5282 gimple_set_lhs (stmt, new_lhs);
5283 changed = true;
5284 }
5285 }
5286 }
5287
5288 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
5289 return changed;
5290 }
5291
5292 /* Valueziation callback that ends up not following SSA edges. */
5293
5294 tree
no_follow_ssa_edges(tree)5295 no_follow_ssa_edges (tree)
5296 {
5297 return NULL_TREE;
5298 }
5299
5300 /* Valueization callback that ends up following single-use SSA edges only. */
5301
5302 tree
follow_single_use_edges(tree val)5303 follow_single_use_edges (tree val)
5304 {
5305 if (TREE_CODE (val) == SSA_NAME
5306 && !has_single_use (val))
5307 return NULL_TREE;
5308 return val;
5309 }
5310
5311 /* Valueization callback that follows all SSA edges. */
5312
5313 tree
follow_all_ssa_edges(tree val)5314 follow_all_ssa_edges (tree val)
5315 {
5316 return val;
5317 }
5318
5319 /* Fold the statement pointed to by GSI. In some cases, this function may
5320 replace the whole statement with a new one. Returns true iff folding
5321 makes any changes.
5322 The statement pointed to by GSI should be in valid gimple form but may
5323 be in unfolded state as resulting from for example constant propagation
5324 which can produce *&x = 0. */
5325
5326 bool
fold_stmt(gimple_stmt_iterator * gsi)5327 fold_stmt (gimple_stmt_iterator *gsi)
5328 {
5329 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
5330 }
5331
5332 bool
fold_stmt(gimple_stmt_iterator * gsi,tree (* valueize)(tree))5333 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
5334 {
5335 return fold_stmt_1 (gsi, false, valueize);
5336 }
5337
5338 /* Perform the minimal folding on statement *GSI. Only operations like
5339 *&x created by constant propagation are handled. The statement cannot
5340 be replaced with a new one. Return true if the statement was
5341 changed, false otherwise.
5342 The statement *GSI should be in valid gimple form but may
5343 be in unfolded state as resulting from for example constant propagation
5344 which can produce *&x = 0. */
5345
5346 bool
fold_stmt_inplace(gimple_stmt_iterator * gsi)5347 fold_stmt_inplace (gimple_stmt_iterator *gsi)
5348 {
5349 gimple *stmt = gsi_stmt (*gsi);
5350 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
5351 gcc_assert (gsi_stmt (*gsi) == stmt);
5352 return changed;
5353 }
5354
5355 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
5356 if EXPR is null or we don't know how.
5357 If non-null, the result always has boolean type. */
5358
5359 static tree
canonicalize_bool(tree expr,bool invert)5360 canonicalize_bool (tree expr, bool invert)
5361 {
5362 if (!expr)
5363 return NULL_TREE;
5364 else if (invert)
5365 {
5366 if (integer_nonzerop (expr))
5367 return boolean_false_node;
5368 else if (integer_zerop (expr))
5369 return boolean_true_node;
5370 else if (TREE_CODE (expr) == SSA_NAME)
5371 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
5372 build_int_cst (TREE_TYPE (expr), 0));
5373 else if (COMPARISON_CLASS_P (expr))
5374 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
5375 boolean_type_node,
5376 TREE_OPERAND (expr, 0),
5377 TREE_OPERAND (expr, 1));
5378 else
5379 return NULL_TREE;
5380 }
5381 else
5382 {
5383 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5384 return expr;
5385 if (integer_nonzerop (expr))
5386 return boolean_true_node;
5387 else if (integer_zerop (expr))
5388 return boolean_false_node;
5389 else if (TREE_CODE (expr) == SSA_NAME)
5390 return fold_build2 (NE_EXPR, boolean_type_node, expr,
5391 build_int_cst (TREE_TYPE (expr), 0));
5392 else if (COMPARISON_CLASS_P (expr))
5393 return fold_build2 (TREE_CODE (expr),
5394 boolean_type_node,
5395 TREE_OPERAND (expr, 0),
5396 TREE_OPERAND (expr, 1));
5397 else
5398 return NULL_TREE;
5399 }
5400 }
5401
5402 /* Check to see if a boolean expression EXPR is logically equivalent to the
5403 comparison (OP1 CODE OP2). Check for various identities involving
5404 SSA_NAMEs. */
5405
5406 static bool
same_bool_comparison_p(const_tree expr,enum tree_code code,const_tree op1,const_tree op2)5407 same_bool_comparison_p (const_tree expr, enum tree_code code,
5408 const_tree op1, const_tree op2)
5409 {
5410 gimple *s;
5411
5412 /* The obvious case. */
5413 if (TREE_CODE (expr) == code
5414 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
5415 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
5416 return true;
5417
5418 /* Check for comparing (name, name != 0) and the case where expr
5419 is an SSA_NAME with a definition matching the comparison. */
5420 if (TREE_CODE (expr) == SSA_NAME
5421 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5422 {
5423 if (operand_equal_p (expr, op1, 0))
5424 return ((code == NE_EXPR && integer_zerop (op2))
5425 || (code == EQ_EXPR && integer_nonzerop (op2)));
5426 s = SSA_NAME_DEF_STMT (expr);
5427 if (is_gimple_assign (s)
5428 && gimple_assign_rhs_code (s) == code
5429 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
5430 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
5431 return true;
5432 }
5433
5434 /* If op1 is of the form (name != 0) or (name == 0), and the definition
5435 of name is a comparison, recurse. */
5436 if (TREE_CODE (op1) == SSA_NAME
5437 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
5438 {
5439 s = SSA_NAME_DEF_STMT (op1);
5440 if (is_gimple_assign (s)
5441 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
5442 {
5443 enum tree_code c = gimple_assign_rhs_code (s);
5444 if ((c == NE_EXPR && integer_zerop (op2))
5445 || (c == EQ_EXPR && integer_nonzerop (op2)))
5446 return same_bool_comparison_p (expr, c,
5447 gimple_assign_rhs1 (s),
5448 gimple_assign_rhs2 (s));
5449 if ((c == EQ_EXPR && integer_zerop (op2))
5450 || (c == NE_EXPR && integer_nonzerop (op2)))
5451 return same_bool_comparison_p (expr,
5452 invert_tree_comparison (c, false),
5453 gimple_assign_rhs1 (s),
5454 gimple_assign_rhs2 (s));
5455 }
5456 }
5457 return false;
5458 }
5459
5460 /* Check to see if two boolean expressions OP1 and OP2 are logically
5461 equivalent. */
5462
5463 static bool
same_bool_result_p(const_tree op1,const_tree op2)5464 same_bool_result_p (const_tree op1, const_tree op2)
5465 {
5466 /* Simple cases first. */
5467 if (operand_equal_p (op1, op2, 0))
5468 return true;
5469
5470 /* Check the cases where at least one of the operands is a comparison.
5471 These are a bit smarter than operand_equal_p in that they apply some
5472 identifies on SSA_NAMEs. */
5473 if (COMPARISON_CLASS_P (op2)
5474 && same_bool_comparison_p (op1, TREE_CODE (op2),
5475 TREE_OPERAND (op2, 0),
5476 TREE_OPERAND (op2, 1)))
5477 return true;
5478 if (COMPARISON_CLASS_P (op1)
5479 && same_bool_comparison_p (op2, TREE_CODE (op1),
5480 TREE_OPERAND (op1, 0),
5481 TREE_OPERAND (op1, 1)))
5482 return true;
5483
5484 /* Default case. */
5485 return false;
5486 }
5487
5488 /* Forward declarations for some mutually recursive functions. */
5489
5490 static tree
5491 and_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
5492 enum tree_code code2, tree op2a, tree op2b);
5493 static tree
5494 and_var_with_comparison (tree type, tree var, bool invert,
5495 enum tree_code code2, tree op2a, tree op2b);
5496 static tree
5497 and_var_with_comparison_1 (tree type, gimple *stmt,
5498 enum tree_code code2, tree op2a, tree op2b);
5499 static tree
5500 or_comparisons_1 (tree, enum tree_code code1, tree op1a, tree op1b,
5501 enum tree_code code2, tree op2a, tree op2b);
5502 static tree
5503 or_var_with_comparison (tree, tree var, bool invert,
5504 enum tree_code code2, tree op2a, tree op2b);
5505 static tree
5506 or_var_with_comparison_1 (tree, gimple *stmt,
5507 enum tree_code code2, tree op2a, tree op2b);
5508
5509 /* Helper function for and_comparisons_1: try to simplify the AND of the
5510 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5511 If INVERT is true, invert the value of the VAR before doing the AND.
5512 Return NULL_EXPR if we can't simplify this to a single expression. */
5513
5514 static tree
and_var_with_comparison(tree type,tree var,bool invert,enum tree_code code2,tree op2a,tree op2b)5515 and_var_with_comparison (tree type, tree var, bool invert,
5516 enum tree_code code2, tree op2a, tree op2b)
5517 {
5518 tree t;
5519 gimple *stmt = SSA_NAME_DEF_STMT (var);
5520
5521 /* We can only deal with variables whose definitions are assignments. */
5522 if (!is_gimple_assign (stmt))
5523 return NULL_TREE;
5524
5525 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5526 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
5527 Then we only have to consider the simpler non-inverted cases. */
5528 if (invert)
5529 t = or_var_with_comparison_1 (type, stmt,
5530 invert_tree_comparison (code2, false),
5531 op2a, op2b);
5532 else
5533 t = and_var_with_comparison_1 (type, stmt, code2, op2a, op2b);
5534 return canonicalize_bool (t, invert);
5535 }
5536
5537 /* Try to simplify the AND of the ssa variable defined by the assignment
5538 STMT with the comparison specified by (OP2A CODE2 OP2B).
5539 Return NULL_EXPR if we can't simplify this to a single expression. */
5540
5541 static tree
and_var_with_comparison_1(tree type,gimple * stmt,enum tree_code code2,tree op2a,tree op2b)5542 and_var_with_comparison_1 (tree type, gimple *stmt,
5543 enum tree_code code2, tree op2a, tree op2b)
5544 {
5545 tree var = gimple_assign_lhs (stmt);
5546 tree true_test_var = NULL_TREE;
5547 tree false_test_var = NULL_TREE;
5548 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5549
5550 /* Check for identities like (var AND (var == 0)) => false. */
5551 if (TREE_CODE (op2a) == SSA_NAME
5552 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5553 {
5554 if ((code2 == NE_EXPR && integer_zerop (op2b))
5555 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5556 {
5557 true_test_var = op2a;
5558 if (var == true_test_var)
5559 return var;
5560 }
5561 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5562 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5563 {
5564 false_test_var = op2a;
5565 if (var == false_test_var)
5566 return boolean_false_node;
5567 }
5568 }
5569
5570 /* If the definition is a comparison, recurse on it. */
5571 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5572 {
5573 tree t = and_comparisons_1 (type, innercode,
5574 gimple_assign_rhs1 (stmt),
5575 gimple_assign_rhs2 (stmt),
5576 code2,
5577 op2a,
5578 op2b);
5579 if (t)
5580 return t;
5581 }
5582
5583 /* If the definition is an AND or OR expression, we may be able to
5584 simplify by reassociating. */
5585 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5586 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5587 {
5588 tree inner1 = gimple_assign_rhs1 (stmt);
5589 tree inner2 = gimple_assign_rhs2 (stmt);
5590 gimple *s;
5591 tree t;
5592 tree partial = NULL_TREE;
5593 bool is_and = (innercode == BIT_AND_EXPR);
5594
5595 /* Check for boolean identities that don't require recursive examination
5596 of inner1/inner2:
5597 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
5598 inner1 AND (inner1 OR inner2) => inner1
5599 !inner1 AND (inner1 AND inner2) => false
5600 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
5601 Likewise for similar cases involving inner2. */
5602 if (inner1 == true_test_var)
5603 return (is_and ? var : inner1);
5604 else if (inner2 == true_test_var)
5605 return (is_and ? var : inner2);
5606 else if (inner1 == false_test_var)
5607 return (is_and
5608 ? boolean_false_node
5609 : and_var_with_comparison (type, inner2, false, code2, op2a,
5610 op2b));
5611 else if (inner2 == false_test_var)
5612 return (is_and
5613 ? boolean_false_node
5614 : and_var_with_comparison (type, inner1, false, code2, op2a,
5615 op2b));
5616
5617 /* Next, redistribute/reassociate the AND across the inner tests.
5618 Compute the first partial result, (inner1 AND (op2a code op2b)) */
5619 if (TREE_CODE (inner1) == SSA_NAME
5620 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5621 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5622 && (t = maybe_fold_and_comparisons (type, gimple_assign_rhs_code (s),
5623 gimple_assign_rhs1 (s),
5624 gimple_assign_rhs2 (s),
5625 code2, op2a, op2b)))
5626 {
5627 /* Handle the AND case, where we are reassociating:
5628 (inner1 AND inner2) AND (op2a code2 op2b)
5629 => (t AND inner2)
5630 If the partial result t is a constant, we win. Otherwise
5631 continue on to try reassociating with the other inner test. */
5632 if (is_and)
5633 {
5634 if (integer_onep (t))
5635 return inner2;
5636 else if (integer_zerop (t))
5637 return boolean_false_node;
5638 }
5639
5640 /* Handle the OR case, where we are redistributing:
5641 (inner1 OR inner2) AND (op2a code2 op2b)
5642 => (t OR (inner2 AND (op2a code2 op2b))) */
5643 else if (integer_onep (t))
5644 return boolean_true_node;
5645
5646 /* Save partial result for later. */
5647 partial = t;
5648 }
5649
5650 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
5651 if (TREE_CODE (inner2) == SSA_NAME
5652 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5653 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5654 && (t = maybe_fold_and_comparisons (type, gimple_assign_rhs_code (s),
5655 gimple_assign_rhs1 (s),
5656 gimple_assign_rhs2 (s),
5657 code2, op2a, op2b)))
5658 {
5659 /* Handle the AND case, where we are reassociating:
5660 (inner1 AND inner2) AND (op2a code2 op2b)
5661 => (inner1 AND t) */
5662 if (is_and)
5663 {
5664 if (integer_onep (t))
5665 return inner1;
5666 else if (integer_zerop (t))
5667 return boolean_false_node;
5668 /* If both are the same, we can apply the identity
5669 (x AND x) == x. */
5670 else if (partial && same_bool_result_p (t, partial))
5671 return t;
5672 }
5673
5674 /* Handle the OR case. where we are redistributing:
5675 (inner1 OR inner2) AND (op2a code2 op2b)
5676 => (t OR (inner1 AND (op2a code2 op2b)))
5677 => (t OR partial) */
5678 else
5679 {
5680 if (integer_onep (t))
5681 return boolean_true_node;
5682 else if (partial)
5683 {
5684 /* We already got a simplification for the other
5685 operand to the redistributed OR expression. The
5686 interesting case is when at least one is false.
5687 Or, if both are the same, we can apply the identity
5688 (x OR x) == x. */
5689 if (integer_zerop (partial))
5690 return t;
5691 else if (integer_zerop (t))
5692 return partial;
5693 else if (same_bool_result_p (t, partial))
5694 return t;
5695 }
5696 }
5697 }
5698 }
5699 return NULL_TREE;
5700 }
5701
5702 /* Try to simplify the AND of two comparisons defined by
5703 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5704 If this can be done without constructing an intermediate value,
5705 return the resulting tree; otherwise NULL_TREE is returned.
5706 This function is deliberately asymmetric as it recurses on SSA_DEFs
5707 in the first comparison but not the second. */
5708
5709 static tree
and_comparisons_1(tree type,enum tree_code code1,tree op1a,tree op1b,enum tree_code code2,tree op2a,tree op2b)5710 and_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
5711 enum tree_code code2, tree op2a, tree op2b)
5712 {
5713 tree truth_type = truth_type_for (TREE_TYPE (op1a));
5714
5715 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
5716 if (operand_equal_p (op1a, op2a, 0)
5717 && operand_equal_p (op1b, op2b, 0))
5718 {
5719 /* Result will be either NULL_TREE, or a combined comparison. */
5720 tree t = combine_comparisons (UNKNOWN_LOCATION,
5721 TRUTH_ANDIF_EXPR, code1, code2,
5722 truth_type, op1a, op1b);
5723 if (t)
5724 return t;
5725 }
5726
5727 /* Likewise the swapped case of the above. */
5728 if (operand_equal_p (op1a, op2b, 0)
5729 && operand_equal_p (op1b, op2a, 0))
5730 {
5731 /* Result will be either NULL_TREE, or a combined comparison. */
5732 tree t = combine_comparisons (UNKNOWN_LOCATION,
5733 TRUTH_ANDIF_EXPR, code1,
5734 swap_tree_comparison (code2),
5735 truth_type, op1a, op1b);
5736 if (t)
5737 return t;
5738 }
5739
5740 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5741 NAME's definition is a truth value. See if there are any simplifications
5742 that can be done against the NAME's definition. */
5743 if (TREE_CODE (op1a) == SSA_NAME
5744 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5745 && (integer_zerop (op1b) || integer_onep (op1b)))
5746 {
5747 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5748 || (code1 == NE_EXPR && integer_onep (op1b)));
5749 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
5750 switch (gimple_code (stmt))
5751 {
5752 case GIMPLE_ASSIGN:
5753 /* Try to simplify by copy-propagating the definition. */
5754 return and_var_with_comparison (type, op1a, invert, code2, op2a,
5755 op2b);
5756
5757 case GIMPLE_PHI:
5758 /* If every argument to the PHI produces the same result when
5759 ANDed with the second comparison, we win.
5760 Do not do this unless the type is bool since we need a bool
5761 result here anyway. */
5762 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5763 {
5764 tree result = NULL_TREE;
5765 unsigned i;
5766 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5767 {
5768 tree arg = gimple_phi_arg_def (stmt, i);
5769
5770 /* If this PHI has itself as an argument, ignore it.
5771 If all the other args produce the same result,
5772 we're still OK. */
5773 if (arg == gimple_phi_result (stmt))
5774 continue;
5775 else if (TREE_CODE (arg) == INTEGER_CST)
5776 {
5777 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
5778 {
5779 if (!result)
5780 result = boolean_false_node;
5781 else if (!integer_zerop (result))
5782 return NULL_TREE;
5783 }
5784 else if (!result)
5785 result = fold_build2 (code2, boolean_type_node,
5786 op2a, op2b);
5787 else if (!same_bool_comparison_p (result,
5788 code2, op2a, op2b))
5789 return NULL_TREE;
5790 }
5791 else if (TREE_CODE (arg) == SSA_NAME
5792 && !SSA_NAME_IS_DEFAULT_DEF (arg))
5793 {
5794 tree temp;
5795 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
5796 /* In simple cases we can look through PHI nodes,
5797 but we have to be careful with loops.
5798 See PR49073. */
5799 if (! dom_info_available_p (CDI_DOMINATORS)
5800 || gimple_bb (def_stmt) == gimple_bb (stmt)
5801 || dominated_by_p (CDI_DOMINATORS,
5802 gimple_bb (def_stmt),
5803 gimple_bb (stmt)))
5804 return NULL_TREE;
5805 temp = and_var_with_comparison (type, arg, invert, code2,
5806 op2a, op2b);
5807 if (!temp)
5808 return NULL_TREE;
5809 else if (!result)
5810 result = temp;
5811 else if (!same_bool_result_p (result, temp))
5812 return NULL_TREE;
5813 }
5814 else
5815 return NULL_TREE;
5816 }
5817 return result;
5818 }
5819
5820 default:
5821 break;
5822 }
5823 }
5824 return NULL_TREE;
5825 }
5826
5827 /* Helper function for maybe_fold_and_comparisons and maybe_fold_or_comparisons
5828 : try to simplify the AND/OR of the ssa variable VAR with the comparison
5829 specified by (OP2A CODE2 OP2B) from match.pd. Return NULL_EXPR if we can't
5830 simplify this to a single expression. As we are going to lower the cost
5831 of building SSA names / gimple stmts significantly, we need to allocate
5832 them ont the stack. This will cause the code to be a bit ugly. */
5833
5834 static tree
maybe_fold_comparisons_from_match_pd(tree type,enum tree_code code,enum tree_code code1,tree op1a,tree op1b,enum tree_code code2,tree op2a,tree op2b)5835 maybe_fold_comparisons_from_match_pd (tree type, enum tree_code code,
5836 enum tree_code code1,
5837 tree op1a, tree op1b,
5838 enum tree_code code2, tree op2a,
5839 tree op2b)
5840 {
5841 /* Allocate gimple stmt1 on the stack. */
5842 gassign *stmt1
5843 = (gassign *) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN, 3));
5844 gimple_init (stmt1, GIMPLE_ASSIGN, 3);
5845 gimple_assign_set_rhs_code (stmt1, code1);
5846 gimple_assign_set_rhs1 (stmt1, op1a);
5847 gimple_assign_set_rhs2 (stmt1, op1b);
5848
5849 /* Allocate gimple stmt2 on the stack. */
5850 gassign *stmt2
5851 = (gassign *) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN, 3));
5852 gimple_init (stmt2, GIMPLE_ASSIGN, 3);
5853 gimple_assign_set_rhs_code (stmt2, code2);
5854 gimple_assign_set_rhs1 (stmt2, op2a);
5855 gimple_assign_set_rhs2 (stmt2, op2b);
5856
5857 /* Allocate SSA names(lhs1) on the stack. */
5858 tree lhs1 = (tree)XALLOCA (tree_ssa_name);
5859 memset (lhs1, 0, sizeof (tree_ssa_name));
5860 TREE_SET_CODE (lhs1, SSA_NAME);
5861 TREE_TYPE (lhs1) = type;
5862 init_ssa_name_imm_use (lhs1);
5863
5864 /* Allocate SSA names(lhs2) on the stack. */
5865 tree lhs2 = (tree)XALLOCA (tree_ssa_name);
5866 memset (lhs2, 0, sizeof (tree_ssa_name));
5867 TREE_SET_CODE (lhs2, SSA_NAME);
5868 TREE_TYPE (lhs2) = type;
5869 init_ssa_name_imm_use (lhs2);
5870
5871 gimple_assign_set_lhs (stmt1, lhs1);
5872 gimple_assign_set_lhs (stmt2, lhs2);
5873
5874 gimple_match_op op (gimple_match_cond::UNCOND, code,
5875 type, gimple_assign_lhs (stmt1),
5876 gimple_assign_lhs (stmt2));
5877 if (op.resimplify (NULL, follow_all_ssa_edges))
5878 {
5879 if (gimple_simplified_result_is_gimple_val (&op))
5880 {
5881 tree res = op.ops[0];
5882 if (res == lhs1)
5883 return build2 (code1, type, op1a, op1b);
5884 else if (res == lhs2)
5885 return build2 (code2, type, op2a, op2b);
5886 else
5887 return res;
5888 }
5889 else if (op.code.is_tree_code ()
5890 && TREE_CODE_CLASS ((tree_code)op.code) == tcc_comparison)
5891 {
5892 tree op0 = op.ops[0];
5893 tree op1 = op.ops[1];
5894 if (op0 == lhs1 || op0 == lhs2 || op1 == lhs1 || op1 == lhs2)
5895 return NULL_TREE; /* not simple */
5896
5897 return build2 ((enum tree_code)op.code, op.type, op0, op1);
5898 }
5899 }
5900
5901 return NULL_TREE;
5902 }
5903
5904 /* Try to simplify the AND of two comparisons, specified by
5905 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5906 If this can be simplified to a single expression (without requiring
5907 introducing more SSA variables to hold intermediate values),
5908 return the resulting tree. Otherwise return NULL_TREE.
5909 If the result expression is non-null, it has boolean type. */
5910
5911 tree
maybe_fold_and_comparisons(tree type,enum tree_code code1,tree op1a,tree op1b,enum tree_code code2,tree op2a,tree op2b)5912 maybe_fold_and_comparisons (tree type,
5913 enum tree_code code1, tree op1a, tree op1b,
5914 enum tree_code code2, tree op2a, tree op2b)
5915 {
5916 if (tree t = and_comparisons_1 (type, code1, op1a, op1b, code2, op2a, op2b))
5917 return t;
5918
5919 if (tree t = and_comparisons_1 (type, code2, op2a, op2b, code1, op1a, op1b))
5920 return t;
5921
5922 if (tree t = maybe_fold_comparisons_from_match_pd (type, BIT_AND_EXPR, code1,
5923 op1a, op1b, code2, op2a,
5924 op2b))
5925 return t;
5926
5927 return NULL_TREE;
5928 }
5929
5930 /* Helper function for or_comparisons_1: try to simplify the OR of the
5931 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5932 If INVERT is true, invert the value of VAR before doing the OR.
5933 Return NULL_EXPR if we can't simplify this to a single expression. */
5934
5935 static tree
or_var_with_comparison(tree type,tree var,bool invert,enum tree_code code2,tree op2a,tree op2b)5936 or_var_with_comparison (tree type, tree var, bool invert,
5937 enum tree_code code2, tree op2a, tree op2b)
5938 {
5939 tree t;
5940 gimple *stmt = SSA_NAME_DEF_STMT (var);
5941
5942 /* We can only deal with variables whose definitions are assignments. */
5943 if (!is_gimple_assign (stmt))
5944 return NULL_TREE;
5945
5946 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5947 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
5948 Then we only have to consider the simpler non-inverted cases. */
5949 if (invert)
5950 t = and_var_with_comparison_1 (type, stmt,
5951 invert_tree_comparison (code2, false),
5952 op2a, op2b);
5953 else
5954 t = or_var_with_comparison_1 (type, stmt, code2, op2a, op2b);
5955 return canonicalize_bool (t, invert);
5956 }
5957
5958 /* Try to simplify the OR of the ssa variable defined by the assignment
5959 STMT with the comparison specified by (OP2A CODE2 OP2B).
5960 Return NULL_EXPR if we can't simplify this to a single expression. */
5961
5962 static tree
or_var_with_comparison_1(tree type,gimple * stmt,enum tree_code code2,tree op2a,tree op2b)5963 or_var_with_comparison_1 (tree type, gimple *stmt,
5964 enum tree_code code2, tree op2a, tree op2b)
5965 {
5966 tree var = gimple_assign_lhs (stmt);
5967 tree true_test_var = NULL_TREE;
5968 tree false_test_var = NULL_TREE;
5969 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5970
5971 /* Check for identities like (var OR (var != 0)) => true . */
5972 if (TREE_CODE (op2a) == SSA_NAME
5973 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5974 {
5975 if ((code2 == NE_EXPR && integer_zerop (op2b))
5976 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5977 {
5978 true_test_var = op2a;
5979 if (var == true_test_var)
5980 return var;
5981 }
5982 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5983 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5984 {
5985 false_test_var = op2a;
5986 if (var == false_test_var)
5987 return boolean_true_node;
5988 }
5989 }
5990
5991 /* If the definition is a comparison, recurse on it. */
5992 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5993 {
5994 tree t = or_comparisons_1 (type, innercode,
5995 gimple_assign_rhs1 (stmt),
5996 gimple_assign_rhs2 (stmt),
5997 code2,
5998 op2a,
5999 op2b);
6000 if (t)
6001 return t;
6002 }
6003
6004 /* If the definition is an AND or OR expression, we may be able to
6005 simplify by reassociating. */
6006 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
6007 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
6008 {
6009 tree inner1 = gimple_assign_rhs1 (stmt);
6010 tree inner2 = gimple_assign_rhs2 (stmt);
6011 gimple *s;
6012 tree t;
6013 tree partial = NULL_TREE;
6014 bool is_or = (innercode == BIT_IOR_EXPR);
6015
6016 /* Check for boolean identities that don't require recursive examination
6017 of inner1/inner2:
6018 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
6019 inner1 OR (inner1 AND inner2) => inner1
6020 !inner1 OR (inner1 OR inner2) => true
6021 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
6022 */
6023 if (inner1 == true_test_var)
6024 return (is_or ? var : inner1);
6025 else if (inner2 == true_test_var)
6026 return (is_or ? var : inner2);
6027 else if (inner1 == false_test_var)
6028 return (is_or
6029 ? boolean_true_node
6030 : or_var_with_comparison (type, inner2, false, code2, op2a,
6031 op2b));
6032 else if (inner2 == false_test_var)
6033 return (is_or
6034 ? boolean_true_node
6035 : or_var_with_comparison (type, inner1, false, code2, op2a,
6036 op2b));
6037
6038 /* Next, redistribute/reassociate the OR across the inner tests.
6039 Compute the first partial result, (inner1 OR (op2a code op2b)) */
6040 if (TREE_CODE (inner1) == SSA_NAME
6041 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
6042 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
6043 && (t = maybe_fold_or_comparisons (type, gimple_assign_rhs_code (s),
6044 gimple_assign_rhs1 (s),
6045 gimple_assign_rhs2 (s),
6046 code2, op2a, op2b)))
6047 {
6048 /* Handle the OR case, where we are reassociating:
6049 (inner1 OR inner2) OR (op2a code2 op2b)
6050 => (t OR inner2)
6051 If the partial result t is a constant, we win. Otherwise
6052 continue on to try reassociating with the other inner test. */
6053 if (is_or)
6054 {
6055 if (integer_onep (t))
6056 return boolean_true_node;
6057 else if (integer_zerop (t))
6058 return inner2;
6059 }
6060
6061 /* Handle the AND case, where we are redistributing:
6062 (inner1 AND inner2) OR (op2a code2 op2b)
6063 => (t AND (inner2 OR (op2a code op2b))) */
6064 else if (integer_zerop (t))
6065 return boolean_false_node;
6066
6067 /* Save partial result for later. */
6068 partial = t;
6069 }
6070
6071 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
6072 if (TREE_CODE (inner2) == SSA_NAME
6073 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
6074 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
6075 && (t = maybe_fold_or_comparisons (type, gimple_assign_rhs_code (s),
6076 gimple_assign_rhs1 (s),
6077 gimple_assign_rhs2 (s),
6078 code2, op2a, op2b)))
6079 {
6080 /* Handle the OR case, where we are reassociating:
6081 (inner1 OR inner2) OR (op2a code2 op2b)
6082 => (inner1 OR t)
6083 => (t OR partial) */
6084 if (is_or)
6085 {
6086 if (integer_zerop (t))
6087 return inner1;
6088 else if (integer_onep (t))
6089 return boolean_true_node;
6090 /* If both are the same, we can apply the identity
6091 (x OR x) == x. */
6092 else if (partial && same_bool_result_p (t, partial))
6093 return t;
6094 }
6095
6096 /* Handle the AND case, where we are redistributing:
6097 (inner1 AND inner2) OR (op2a code2 op2b)
6098 => (t AND (inner1 OR (op2a code2 op2b)))
6099 => (t AND partial) */
6100 else
6101 {
6102 if (integer_zerop (t))
6103 return boolean_false_node;
6104 else if (partial)
6105 {
6106 /* We already got a simplification for the other
6107 operand to the redistributed AND expression. The
6108 interesting case is when at least one is true.
6109 Or, if both are the same, we can apply the identity
6110 (x AND x) == x. */
6111 if (integer_onep (partial))
6112 return t;
6113 else if (integer_onep (t))
6114 return partial;
6115 else if (same_bool_result_p (t, partial))
6116 return t;
6117 }
6118 }
6119 }
6120 }
6121 return NULL_TREE;
6122 }
6123
6124 /* Try to simplify the OR of two comparisons defined by
6125 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
6126 If this can be done without constructing an intermediate value,
6127 return the resulting tree; otherwise NULL_TREE is returned.
6128 This function is deliberately asymmetric as it recurses on SSA_DEFs
6129 in the first comparison but not the second. */
6130
6131 static tree
or_comparisons_1(tree type,enum tree_code code1,tree op1a,tree op1b,enum tree_code code2,tree op2a,tree op2b)6132 or_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
6133 enum tree_code code2, tree op2a, tree op2b)
6134 {
6135 tree truth_type = truth_type_for (TREE_TYPE (op1a));
6136
6137 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
6138 if (operand_equal_p (op1a, op2a, 0)
6139 && operand_equal_p (op1b, op2b, 0))
6140 {
6141 /* Result will be either NULL_TREE, or a combined comparison. */
6142 tree t = combine_comparisons (UNKNOWN_LOCATION,
6143 TRUTH_ORIF_EXPR, code1, code2,
6144 truth_type, op1a, op1b);
6145 if (t)
6146 return t;
6147 }
6148
6149 /* Likewise the swapped case of the above. */
6150 if (operand_equal_p (op1a, op2b, 0)
6151 && operand_equal_p (op1b, op2a, 0))
6152 {
6153 /* Result will be either NULL_TREE, or a combined comparison. */
6154 tree t = combine_comparisons (UNKNOWN_LOCATION,
6155 TRUTH_ORIF_EXPR, code1,
6156 swap_tree_comparison (code2),
6157 truth_type, op1a, op1b);
6158 if (t)
6159 return t;
6160 }
6161
6162 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
6163 NAME's definition is a truth value. See if there are any simplifications
6164 that can be done against the NAME's definition. */
6165 if (TREE_CODE (op1a) == SSA_NAME
6166 && (code1 == NE_EXPR || code1 == EQ_EXPR)
6167 && (integer_zerop (op1b) || integer_onep (op1b)))
6168 {
6169 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
6170 || (code1 == NE_EXPR && integer_onep (op1b)));
6171 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
6172 switch (gimple_code (stmt))
6173 {
6174 case GIMPLE_ASSIGN:
6175 /* Try to simplify by copy-propagating the definition. */
6176 return or_var_with_comparison (type, op1a, invert, code2, op2a,
6177 op2b);
6178
6179 case GIMPLE_PHI:
6180 /* If every argument to the PHI produces the same result when
6181 ORed with the second comparison, we win.
6182 Do not do this unless the type is bool since we need a bool
6183 result here anyway. */
6184 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
6185 {
6186 tree result = NULL_TREE;
6187 unsigned i;
6188 for (i = 0; i < gimple_phi_num_args (stmt); i++)
6189 {
6190 tree arg = gimple_phi_arg_def (stmt, i);
6191
6192 /* If this PHI has itself as an argument, ignore it.
6193 If all the other args produce the same result,
6194 we're still OK. */
6195 if (arg == gimple_phi_result (stmt))
6196 continue;
6197 else if (TREE_CODE (arg) == INTEGER_CST)
6198 {
6199 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
6200 {
6201 if (!result)
6202 result = boolean_true_node;
6203 else if (!integer_onep (result))
6204 return NULL_TREE;
6205 }
6206 else if (!result)
6207 result = fold_build2 (code2, boolean_type_node,
6208 op2a, op2b);
6209 else if (!same_bool_comparison_p (result,
6210 code2, op2a, op2b))
6211 return NULL_TREE;
6212 }
6213 else if (TREE_CODE (arg) == SSA_NAME
6214 && !SSA_NAME_IS_DEFAULT_DEF (arg))
6215 {
6216 tree temp;
6217 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6218 /* In simple cases we can look through PHI nodes,
6219 but we have to be careful with loops.
6220 See PR49073. */
6221 if (! dom_info_available_p (CDI_DOMINATORS)
6222 || gimple_bb (def_stmt) == gimple_bb (stmt)
6223 || dominated_by_p (CDI_DOMINATORS,
6224 gimple_bb (def_stmt),
6225 gimple_bb (stmt)))
6226 return NULL_TREE;
6227 temp = or_var_with_comparison (type, arg, invert, code2,
6228 op2a, op2b);
6229 if (!temp)
6230 return NULL_TREE;
6231 else if (!result)
6232 result = temp;
6233 else if (!same_bool_result_p (result, temp))
6234 return NULL_TREE;
6235 }
6236 else
6237 return NULL_TREE;
6238 }
6239 return result;
6240 }
6241
6242 default:
6243 break;
6244 }
6245 }
6246 return NULL_TREE;
6247 }
6248
6249 /* Try to simplify the OR of two comparisons, specified by
6250 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
6251 If this can be simplified to a single expression (without requiring
6252 introducing more SSA variables to hold intermediate values),
6253 return the resulting tree. Otherwise return NULL_TREE.
6254 If the result expression is non-null, it has boolean type. */
6255
6256 tree
maybe_fold_or_comparisons(tree type,enum tree_code code1,tree op1a,tree op1b,enum tree_code code2,tree op2a,tree op2b)6257 maybe_fold_or_comparisons (tree type,
6258 enum tree_code code1, tree op1a, tree op1b,
6259 enum tree_code code2, tree op2a, tree op2b)
6260 {
6261 if (tree t = or_comparisons_1 (type, code1, op1a, op1b, code2, op2a, op2b))
6262 return t;
6263
6264 if (tree t = or_comparisons_1 (type, code2, op2a, op2b, code1, op1a, op1b))
6265 return t;
6266
6267 if (tree t = maybe_fold_comparisons_from_match_pd (type, BIT_IOR_EXPR, code1,
6268 op1a, op1b, code2, op2a,
6269 op2b))
6270 return t;
6271
6272 return NULL_TREE;
6273 }
6274
6275 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6276
6277 Either NULL_TREE, a simplified but non-constant or a constant
6278 is returned.
6279
6280 ??? This should go into a gimple-fold-inline.h file to be eventually
6281 privatized with the single valueize function used in the various TUs
6282 to avoid the indirect function call overhead. */
6283
6284 tree
gimple_fold_stmt_to_constant_1(gimple * stmt,tree (* valueize)(tree),tree (* gvalueize)(tree))6285 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
6286 tree (*gvalueize) (tree))
6287 {
6288 gimple_match_op res_op;
6289 /* ??? The SSA propagators do not correctly deal with following SSA use-def
6290 edges if there are intermediate VARYING defs. For this reason
6291 do not follow SSA edges here even though SCCVN can technically
6292 just deal fine with that. */
6293 if (gimple_simplify (stmt, &res_op, NULL, gvalueize, valueize))
6294 {
6295 tree res = NULL_TREE;
6296 if (gimple_simplified_result_is_gimple_val (&res_op))
6297 res = res_op.ops[0];
6298 else if (mprts_hook)
6299 res = mprts_hook (&res_op);
6300 if (res)
6301 {
6302 if (dump_file && dump_flags & TDF_DETAILS)
6303 {
6304 fprintf (dump_file, "Match-and-simplified ");
6305 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
6306 fprintf (dump_file, " to ");
6307 print_generic_expr (dump_file, res);
6308 fprintf (dump_file, "\n");
6309 }
6310 return res;
6311 }
6312 }
6313
6314 location_t loc = gimple_location (stmt);
6315 switch (gimple_code (stmt))
6316 {
6317 case GIMPLE_ASSIGN:
6318 {
6319 enum tree_code subcode = gimple_assign_rhs_code (stmt);
6320
6321 switch (get_gimple_rhs_class (subcode))
6322 {
6323 case GIMPLE_SINGLE_RHS:
6324 {
6325 tree rhs = gimple_assign_rhs1 (stmt);
6326 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
6327
6328 if (TREE_CODE (rhs) == SSA_NAME)
6329 {
6330 /* If the RHS is an SSA_NAME, return its known constant value,
6331 if any. */
6332 return (*valueize) (rhs);
6333 }
6334 /* Handle propagating invariant addresses into address
6335 operations. */
6336 else if (TREE_CODE (rhs) == ADDR_EXPR
6337 && !is_gimple_min_invariant (rhs))
6338 {
6339 poly_int64 offset = 0;
6340 tree base;
6341 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
6342 &offset,
6343 valueize);
6344 if (base
6345 && (CONSTANT_CLASS_P (base)
6346 || decl_address_invariant_p (base)))
6347 return build_invariant_address (TREE_TYPE (rhs),
6348 base, offset);
6349 }
6350 else if (TREE_CODE (rhs) == CONSTRUCTOR
6351 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
6352 && known_eq (CONSTRUCTOR_NELTS (rhs),
6353 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
6354 {
6355 unsigned i, nelts;
6356 tree val;
6357
6358 nelts = CONSTRUCTOR_NELTS (rhs);
6359 tree_vector_builder vec (TREE_TYPE (rhs), nelts, 1);
6360 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
6361 {
6362 val = (*valueize) (val);
6363 if (TREE_CODE (val) == INTEGER_CST
6364 || TREE_CODE (val) == REAL_CST
6365 || TREE_CODE (val) == FIXED_CST)
6366 vec.quick_push (val);
6367 else
6368 return NULL_TREE;
6369 }
6370
6371 return vec.build ();
6372 }
6373 if (subcode == OBJ_TYPE_REF)
6374 {
6375 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
6376 /* If callee is constant, we can fold away the wrapper. */
6377 if (is_gimple_min_invariant (val))
6378 return val;
6379 }
6380
6381 if (kind == tcc_reference)
6382 {
6383 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
6384 || TREE_CODE (rhs) == REALPART_EXPR
6385 || TREE_CODE (rhs) == IMAGPART_EXPR)
6386 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6387 {
6388 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6389 return fold_unary_loc (EXPR_LOCATION (rhs),
6390 TREE_CODE (rhs),
6391 TREE_TYPE (rhs), val);
6392 }
6393 else if (TREE_CODE (rhs) == BIT_FIELD_REF
6394 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6395 {
6396 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6397 return fold_ternary_loc (EXPR_LOCATION (rhs),
6398 TREE_CODE (rhs),
6399 TREE_TYPE (rhs), val,
6400 TREE_OPERAND (rhs, 1),
6401 TREE_OPERAND (rhs, 2));
6402 }
6403 else if (TREE_CODE (rhs) == MEM_REF
6404 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6405 {
6406 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6407 if (TREE_CODE (val) == ADDR_EXPR
6408 && is_gimple_min_invariant (val))
6409 {
6410 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
6411 unshare_expr (val),
6412 TREE_OPERAND (rhs, 1));
6413 if (tem)
6414 rhs = tem;
6415 }
6416 }
6417 return fold_const_aggregate_ref_1 (rhs, valueize);
6418 }
6419 else if (kind == tcc_declaration)
6420 return get_symbol_constant_value (rhs);
6421 return rhs;
6422 }
6423
6424 case GIMPLE_UNARY_RHS:
6425 return NULL_TREE;
6426
6427 case GIMPLE_BINARY_RHS:
6428 /* Translate &x + CST into an invariant form suitable for
6429 further propagation. */
6430 if (subcode == POINTER_PLUS_EXPR)
6431 {
6432 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6433 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6434 if (TREE_CODE (op0) == ADDR_EXPR
6435 && TREE_CODE (op1) == INTEGER_CST)
6436 {
6437 tree off = fold_convert (ptr_type_node, op1);
6438 return build1_loc
6439 (loc, ADDR_EXPR, TREE_TYPE (op0),
6440 fold_build2 (MEM_REF,
6441 TREE_TYPE (TREE_TYPE (op0)),
6442 unshare_expr (op0), off));
6443 }
6444 }
6445 /* Canonicalize bool != 0 and bool == 0 appearing after
6446 valueization. While gimple_simplify handles this
6447 it can get confused by the ~X == 1 -> X == 0 transform
6448 which we cant reduce to a SSA name or a constant
6449 (and we have no way to tell gimple_simplify to not
6450 consider those transforms in the first place). */
6451 else if (subcode == EQ_EXPR
6452 || subcode == NE_EXPR)
6453 {
6454 tree lhs = gimple_assign_lhs (stmt);
6455 tree op0 = gimple_assign_rhs1 (stmt);
6456 if (useless_type_conversion_p (TREE_TYPE (lhs),
6457 TREE_TYPE (op0)))
6458 {
6459 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6460 op0 = (*valueize) (op0);
6461 if (TREE_CODE (op0) == INTEGER_CST)
6462 std::swap (op0, op1);
6463 if (TREE_CODE (op1) == INTEGER_CST
6464 && ((subcode == NE_EXPR && integer_zerop (op1))
6465 || (subcode == EQ_EXPR && integer_onep (op1))))
6466 return op0;
6467 }
6468 }
6469 return NULL_TREE;
6470
6471 case GIMPLE_TERNARY_RHS:
6472 {
6473 /* Handle ternary operators that can appear in GIMPLE form. */
6474 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6475 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6476 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
6477 return fold_ternary_loc (loc, subcode,
6478 gimple_expr_type (stmt), op0, op1, op2);
6479 }
6480
6481 default:
6482 gcc_unreachable ();
6483 }
6484 }
6485
6486 case GIMPLE_CALL:
6487 {
6488 tree fn;
6489 gcall *call_stmt = as_a <gcall *> (stmt);
6490
6491 if (gimple_call_internal_p (stmt))
6492 {
6493 enum tree_code subcode = ERROR_MARK;
6494 switch (gimple_call_internal_fn (stmt))
6495 {
6496 case IFN_UBSAN_CHECK_ADD:
6497 subcode = PLUS_EXPR;
6498 break;
6499 case IFN_UBSAN_CHECK_SUB:
6500 subcode = MINUS_EXPR;
6501 break;
6502 case IFN_UBSAN_CHECK_MUL:
6503 subcode = MULT_EXPR;
6504 break;
6505 case IFN_BUILTIN_EXPECT:
6506 {
6507 tree arg0 = gimple_call_arg (stmt, 0);
6508 tree op0 = (*valueize) (arg0);
6509 if (TREE_CODE (op0) == INTEGER_CST)
6510 return op0;
6511 return NULL_TREE;
6512 }
6513 default:
6514 return NULL_TREE;
6515 }
6516 tree arg0 = gimple_call_arg (stmt, 0);
6517 tree arg1 = gimple_call_arg (stmt, 1);
6518 tree op0 = (*valueize) (arg0);
6519 tree op1 = (*valueize) (arg1);
6520
6521 if (TREE_CODE (op0) != INTEGER_CST
6522 || TREE_CODE (op1) != INTEGER_CST)
6523 {
6524 switch (subcode)
6525 {
6526 case MULT_EXPR:
6527 /* x * 0 = 0 * x = 0 without overflow. */
6528 if (integer_zerop (op0) || integer_zerop (op1))
6529 return build_zero_cst (TREE_TYPE (arg0));
6530 break;
6531 case MINUS_EXPR:
6532 /* y - y = 0 without overflow. */
6533 if (operand_equal_p (op0, op1, 0))
6534 return build_zero_cst (TREE_TYPE (arg0));
6535 break;
6536 default:
6537 break;
6538 }
6539 }
6540 tree res
6541 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
6542 if (res
6543 && TREE_CODE (res) == INTEGER_CST
6544 && !TREE_OVERFLOW (res))
6545 return res;
6546 return NULL_TREE;
6547 }
6548
6549 fn = (*valueize) (gimple_call_fn (stmt));
6550 if (TREE_CODE (fn) == ADDR_EXPR
6551 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
6552 && fndecl_built_in_p (TREE_OPERAND (fn, 0))
6553 && gimple_builtin_call_types_compatible_p (stmt,
6554 TREE_OPERAND (fn, 0)))
6555 {
6556 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
6557 tree retval;
6558 unsigned i;
6559 for (i = 0; i < gimple_call_num_args (stmt); ++i)
6560 args[i] = (*valueize) (gimple_call_arg (stmt, i));
6561 retval = fold_builtin_call_array (loc,
6562 gimple_call_return_type (call_stmt),
6563 fn, gimple_call_num_args (stmt), args);
6564 if (retval)
6565 {
6566 /* fold_call_expr wraps the result inside a NOP_EXPR. */
6567 STRIP_NOPS (retval);
6568 retval = fold_convert (gimple_call_return_type (call_stmt),
6569 retval);
6570 }
6571 return retval;
6572 }
6573 return NULL_TREE;
6574 }
6575
6576 default:
6577 return NULL_TREE;
6578 }
6579 }
6580
6581 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6582 Returns NULL_TREE if folding to a constant is not possible, otherwise
6583 returns a constant according to is_gimple_min_invariant. */
6584
6585 tree
gimple_fold_stmt_to_constant(gimple * stmt,tree (* valueize)(tree))6586 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
6587 {
6588 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
6589 if (res && is_gimple_min_invariant (res))
6590 return res;
6591 return NULL_TREE;
6592 }
6593
6594
6595 /* The following set of functions are supposed to fold references using
6596 their constant initializers. */
6597
6598 /* See if we can find constructor defining value of BASE.
6599 When we know the consructor with constant offset (such as
6600 base is array[40] and we do know constructor of array), then
6601 BIT_OFFSET is adjusted accordingly.
6602
6603 As a special case, return error_mark_node when constructor
6604 is not explicitly available, but it is known to be zero
6605 such as 'static const int a;'. */
6606 static tree
get_base_constructor(tree base,poly_int64_pod * bit_offset,tree (* valueize)(tree))6607 get_base_constructor (tree base, poly_int64_pod *bit_offset,
6608 tree (*valueize)(tree))
6609 {
6610 poly_int64 bit_offset2, size, max_size;
6611 bool reverse;
6612
6613 if (TREE_CODE (base) == MEM_REF)
6614 {
6615 poly_offset_int boff = *bit_offset + mem_ref_offset (base) * BITS_PER_UNIT;
6616 if (!boff.to_shwi (bit_offset))
6617 return NULL_TREE;
6618
6619 if (valueize
6620 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
6621 base = valueize (TREE_OPERAND (base, 0));
6622 if (!base || TREE_CODE (base) != ADDR_EXPR)
6623 return NULL_TREE;
6624 base = TREE_OPERAND (base, 0);
6625 }
6626 else if (valueize
6627 && TREE_CODE (base) == SSA_NAME)
6628 base = valueize (base);
6629
6630 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
6631 DECL_INITIAL. If BASE is a nested reference into another
6632 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
6633 the inner reference. */
6634 switch (TREE_CODE (base))
6635 {
6636 case VAR_DECL:
6637 case CONST_DECL:
6638 {
6639 tree init = ctor_for_folding (base);
6640
6641 /* Our semantic is exact opposite of ctor_for_folding;
6642 NULL means unknown, while error_mark_node is 0. */
6643 if (init == error_mark_node)
6644 return NULL_TREE;
6645 if (!init)
6646 return error_mark_node;
6647 return init;
6648 }
6649
6650 case VIEW_CONVERT_EXPR:
6651 return get_base_constructor (TREE_OPERAND (base, 0),
6652 bit_offset, valueize);
6653
6654 case ARRAY_REF:
6655 case COMPONENT_REF:
6656 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
6657 &reverse);
6658 if (!known_size_p (max_size) || maybe_ne (size, max_size))
6659 return NULL_TREE;
6660 *bit_offset += bit_offset2;
6661 return get_base_constructor (base, bit_offset, valueize);
6662
6663 case CONSTRUCTOR:
6664 return base;
6665
6666 default:
6667 if (CONSTANT_CLASS_P (base))
6668 return base;
6669
6670 return NULL_TREE;
6671 }
6672 }
6673
6674 /* CTOR is CONSTRUCTOR of an array type. Fold a reference of SIZE bits
6675 to the memory at bit OFFSET. When non-null, TYPE is the expected
6676 type of the reference; otherwise the type of the referenced element
6677 is used instead. When SIZE is zero, attempt to fold a reference to
6678 the entire element which OFFSET refers to. Increment *SUBOFF by
6679 the bit offset of the accessed element. */
6680
6681 static tree
fold_array_ctor_reference(tree type,tree ctor,unsigned HOST_WIDE_INT offset,unsigned HOST_WIDE_INT size,tree from_decl,unsigned HOST_WIDE_INT * suboff)6682 fold_array_ctor_reference (tree type, tree ctor,
6683 unsigned HOST_WIDE_INT offset,
6684 unsigned HOST_WIDE_INT size,
6685 tree from_decl,
6686 unsigned HOST_WIDE_INT *suboff)
6687 {
6688 offset_int low_bound;
6689 offset_int elt_size;
6690 offset_int access_index;
6691 tree domain_type = NULL_TREE;
6692 HOST_WIDE_INT inner_offset;
6693
6694 /* Compute low bound and elt size. */
6695 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
6696 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
6697 if (domain_type && TYPE_MIN_VALUE (domain_type))
6698 {
6699 /* Static constructors for variably sized objects make no sense. */
6700 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
6701 return NULL_TREE;
6702 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
6703 }
6704 else
6705 low_bound = 0;
6706 /* Static constructors for variably sized objects make no sense. */
6707 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
6708 return NULL_TREE;
6709 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
6710
6711 /* When TYPE is non-null, verify that it specifies a constant-sized
6712 access of a multiple of the array element size. Avoid division
6713 by zero below when ELT_SIZE is zero, such as with the result of
6714 an initializer for a zero-length array or an empty struct. */
6715 if (elt_size == 0
6716 || (type
6717 && (!TYPE_SIZE_UNIT (type)
6718 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST)))
6719 return NULL_TREE;
6720
6721 /* Compute the array index we look for. */
6722 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
6723 elt_size);
6724 access_index += low_bound;
6725
6726 /* And offset within the access. */
6727 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
6728
6729 unsigned HOST_WIDE_INT elt_sz = elt_size.to_uhwi ();
6730 if (size > elt_sz * BITS_PER_UNIT)
6731 {
6732 /* native_encode_expr constraints. */
6733 if (size > MAX_BITSIZE_MODE_ANY_MODE
6734 || size % BITS_PER_UNIT != 0
6735 || inner_offset % BITS_PER_UNIT != 0
6736 || elt_sz > MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT)
6737 return NULL_TREE;
6738
6739 unsigned ctor_idx;
6740 tree val = get_array_ctor_element_at_index (ctor, access_index,
6741 &ctor_idx);
6742 if (!val && ctor_idx >= CONSTRUCTOR_NELTS (ctor))
6743 return build_zero_cst (type);
6744
6745 /* native-encode adjacent ctor elements. */
6746 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
6747 unsigned bufoff = 0;
6748 offset_int index = 0;
6749 offset_int max_index = access_index;
6750 constructor_elt *elt = CONSTRUCTOR_ELT (ctor, ctor_idx);
6751 if (!val)
6752 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
6753 else if (!CONSTANT_CLASS_P (val))
6754 return NULL_TREE;
6755 if (!elt->index)
6756 ;
6757 else if (TREE_CODE (elt->index) == RANGE_EXPR)
6758 {
6759 index = wi::to_offset (TREE_OPERAND (elt->index, 0));
6760 max_index = wi::to_offset (TREE_OPERAND (elt->index, 1));
6761 }
6762 else
6763 index = max_index = wi::to_offset (elt->index);
6764 index = wi::umax (index, access_index);
6765 do
6766 {
6767 if (bufoff + elt_sz > sizeof (buf))
6768 elt_sz = sizeof (buf) - bufoff;
6769 int len = native_encode_expr (val, buf + bufoff, elt_sz,
6770 inner_offset / BITS_PER_UNIT);
6771 if (len != (int) elt_sz - inner_offset / BITS_PER_UNIT)
6772 return NULL_TREE;
6773 inner_offset = 0;
6774 bufoff += len;
6775
6776 access_index += 1;
6777 if (wi::cmpu (access_index, index) == 0)
6778 val = elt->value;
6779 else if (wi::cmpu (access_index, max_index) > 0)
6780 {
6781 ctor_idx++;
6782 if (ctor_idx >= CONSTRUCTOR_NELTS (ctor))
6783 {
6784 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
6785 ++max_index;
6786 }
6787 else
6788 {
6789 elt = CONSTRUCTOR_ELT (ctor, ctor_idx);
6790 index = 0;
6791 max_index = access_index;
6792 if (!elt->index)
6793 ;
6794 else if (TREE_CODE (elt->index) == RANGE_EXPR)
6795 {
6796 index = wi::to_offset (TREE_OPERAND (elt->index, 0));
6797 max_index = wi::to_offset (TREE_OPERAND (elt->index, 1));
6798 }
6799 else
6800 index = max_index = wi::to_offset (elt->index);
6801 index = wi::umax (index, access_index);
6802 if (wi::cmpu (access_index, index) == 0)
6803 val = elt->value;
6804 else
6805 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
6806 }
6807 }
6808 }
6809 while (bufoff < size / BITS_PER_UNIT);
6810 *suboff += size;
6811 return native_interpret_expr (type, buf, size / BITS_PER_UNIT);
6812 }
6813
6814 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
6815 {
6816 if (!size && TREE_CODE (val) != CONSTRUCTOR)
6817 {
6818 /* For the final reference to the entire accessed element
6819 (SIZE is zero), reset INNER_OFFSET, disegard TYPE (which
6820 may be null) in favor of the type of the element, and set
6821 SIZE to the size of the accessed element. */
6822 inner_offset = 0;
6823 type = TREE_TYPE (val);
6824 size = elt_sz * BITS_PER_UNIT;
6825 }
6826 else if (size && access_index < CONSTRUCTOR_NELTS (ctor) - 1
6827 && TREE_CODE (val) == CONSTRUCTOR
6828 && (elt_sz * BITS_PER_UNIT - inner_offset) < size)
6829 /* If this isn't the last element in the CTOR and a CTOR itself
6830 and it does not cover the whole object we are requesting give up
6831 since we're not set up for combining from multiple CTORs. */
6832 return NULL_TREE;
6833
6834 *suboff += access_index.to_uhwi () * elt_sz * BITS_PER_UNIT;
6835 return fold_ctor_reference (type, val, inner_offset, size, from_decl,
6836 suboff);
6837 }
6838
6839 /* Memory not explicitly mentioned in constructor is 0 (or
6840 the reference is out of range). */
6841 return type ? build_zero_cst (type) : NULL_TREE;
6842 }
6843
6844 /* CTOR is CONSTRUCTOR of an aggregate or vector. Fold a reference
6845 of SIZE bits to the memory at bit OFFSET. When non-null, TYPE
6846 is the expected type of the reference; otherwise the type of
6847 the referenced member is used instead. When SIZE is zero,
6848 attempt to fold a reference to the entire member which OFFSET
6849 refers to; in this case. Increment *SUBOFF by the bit offset
6850 of the accessed member. */
6851
6852 static tree
fold_nonarray_ctor_reference(tree type,tree ctor,unsigned HOST_WIDE_INT offset,unsigned HOST_WIDE_INT size,tree from_decl,unsigned HOST_WIDE_INT * suboff)6853 fold_nonarray_ctor_reference (tree type, tree ctor,
6854 unsigned HOST_WIDE_INT offset,
6855 unsigned HOST_WIDE_INT size,
6856 tree from_decl,
6857 unsigned HOST_WIDE_INT *suboff)
6858 {
6859 unsigned HOST_WIDE_INT cnt;
6860 tree cfield, cval;
6861
6862 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
6863 cval)
6864 {
6865 tree byte_offset = DECL_FIELD_OFFSET (cfield);
6866 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
6867 tree field_size = DECL_SIZE (cfield);
6868
6869 if (!field_size)
6870 {
6871 /* Determine the size of the flexible array member from
6872 the size of the initializer provided for it. */
6873 field_size = TYPE_SIZE (TREE_TYPE (cval));
6874 }
6875
6876 /* Variable sized objects in static constructors makes no sense,
6877 but field_size can be NULL for flexible array members. */
6878 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
6879 && TREE_CODE (byte_offset) == INTEGER_CST
6880 && (field_size != NULL_TREE
6881 ? TREE_CODE (field_size) == INTEGER_CST
6882 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
6883
6884 /* Compute bit offset of the field. */
6885 offset_int bitoffset
6886 = (wi::to_offset (field_offset)
6887 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
6888 /* Compute bit offset where the field ends. */
6889 offset_int bitoffset_end;
6890 if (field_size != NULL_TREE)
6891 bitoffset_end = bitoffset + wi::to_offset (field_size);
6892 else
6893 bitoffset_end = 0;
6894
6895 /* Compute the bit offset of the end of the desired access.
6896 As a special case, if the size of the desired access is
6897 zero, assume the access is to the entire field (and let
6898 the caller make any necessary adjustments by storing
6899 the actual bounds of the field in FIELDBOUNDS). */
6900 offset_int access_end = offset_int (offset);
6901 if (size)
6902 access_end += size;
6903 else
6904 access_end = bitoffset_end;
6905
6906 /* Is there any overlap between the desired access at
6907 [OFFSET, OFFSET+SIZE) and the offset of the field within
6908 the object at [BITOFFSET, BITOFFSET_END)? */
6909 if (wi::cmps (access_end, bitoffset) > 0
6910 && (field_size == NULL_TREE
6911 || wi::lts_p (offset, bitoffset_end)))
6912 {
6913 *suboff += bitoffset.to_uhwi ();
6914
6915 if (!size && TREE_CODE (cval) != CONSTRUCTOR)
6916 {
6917 /* For the final reference to the entire accessed member
6918 (SIZE is zero), reset OFFSET, disegard TYPE (which may
6919 be null) in favor of the type of the member, and set
6920 SIZE to the size of the accessed member. */
6921 offset = bitoffset.to_uhwi ();
6922 type = TREE_TYPE (cval);
6923 size = (bitoffset_end - bitoffset).to_uhwi ();
6924 }
6925
6926 /* We do have overlap. Now see if the field is large enough
6927 to cover the access. Give up for accesses that extend
6928 beyond the end of the object or that span multiple fields. */
6929 if (wi::cmps (access_end, bitoffset_end) > 0)
6930 return NULL_TREE;
6931 if (offset < bitoffset)
6932 return NULL_TREE;
6933
6934 offset_int inner_offset = offset_int (offset) - bitoffset;
6935 return fold_ctor_reference (type, cval,
6936 inner_offset.to_uhwi (), size,
6937 from_decl, suboff);
6938 }
6939 }
6940
6941 if (!type)
6942 return NULL_TREE;
6943
6944 return build_zero_cst (type);
6945 }
6946
6947 /* CTOR is value initializing memory. Fold a reference of TYPE and
6948 bit size POLY_SIZE to the memory at bit POLY_OFFSET. When POLY_SIZE
6949 is zero, attempt to fold a reference to the entire subobject
6950 which OFFSET refers to. This is used when folding accesses to
6951 string members of aggregates. When non-null, set *SUBOFF to
6952 the bit offset of the accessed subobject. */
6953
6954 tree
fold_ctor_reference(tree type,tree ctor,const poly_uint64 & poly_offset,const poly_uint64 & poly_size,tree from_decl,unsigned HOST_WIDE_INT * suboff)6955 fold_ctor_reference (tree type, tree ctor, const poly_uint64 &poly_offset,
6956 const poly_uint64 &poly_size, tree from_decl,
6957 unsigned HOST_WIDE_INT *suboff /* = NULL */)
6958 {
6959 tree ret;
6960
6961 /* We found the field with exact match. */
6962 if (type
6963 && useless_type_conversion_p (type, TREE_TYPE (ctor))
6964 && known_eq (poly_offset, 0U))
6965 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6966
6967 /* The remaining optimizations need a constant size and offset. */
6968 unsigned HOST_WIDE_INT size, offset;
6969 if (!poly_size.is_constant (&size) || !poly_offset.is_constant (&offset))
6970 return NULL_TREE;
6971
6972 /* We are at the end of walk, see if we can view convert the
6973 result. */
6974 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
6975 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
6976 && !compare_tree_int (TYPE_SIZE (type), size)
6977 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
6978 {
6979 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6980 if (ret)
6981 {
6982 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
6983 if (ret)
6984 STRIP_USELESS_TYPE_CONVERSION (ret);
6985 }
6986 return ret;
6987 }
6988 /* For constants and byte-aligned/sized reads try to go through
6989 native_encode/interpret. */
6990 if (CONSTANT_CLASS_P (ctor)
6991 && BITS_PER_UNIT == 8
6992 && offset % BITS_PER_UNIT == 0
6993 && offset / BITS_PER_UNIT <= INT_MAX
6994 && size % BITS_PER_UNIT == 0
6995 && size <= MAX_BITSIZE_MODE_ANY_MODE
6996 && can_native_interpret_type_p (type))
6997 {
6998 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
6999 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
7000 offset / BITS_PER_UNIT);
7001 if (len > 0)
7002 return native_interpret_expr (type, buf, len);
7003 }
7004 if (TREE_CODE (ctor) == CONSTRUCTOR)
7005 {
7006 unsigned HOST_WIDE_INT dummy = 0;
7007 if (!suboff)
7008 suboff = &dummy;
7009
7010 tree ret;
7011 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
7012 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
7013 ret = fold_array_ctor_reference (type, ctor, offset, size,
7014 from_decl, suboff);
7015 else
7016 ret = fold_nonarray_ctor_reference (type, ctor, offset, size,
7017 from_decl, suboff);
7018
7019 /* Fall back to native_encode_initializer. Needs to be done
7020 only in the outermost fold_ctor_reference call (because it itself
7021 recurses into CONSTRUCTORs) and doesn't update suboff. */
7022 if (ret == NULL_TREE
7023 && suboff == &dummy
7024 && BITS_PER_UNIT == 8
7025 && offset % BITS_PER_UNIT == 0
7026 && offset / BITS_PER_UNIT <= INT_MAX
7027 && size % BITS_PER_UNIT == 0
7028 && size <= MAX_BITSIZE_MODE_ANY_MODE
7029 && can_native_interpret_type_p (type))
7030 {
7031 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
7032 int len = native_encode_initializer (ctor, buf, size / BITS_PER_UNIT,
7033 offset / BITS_PER_UNIT);
7034 if (len > 0)
7035 return native_interpret_expr (type, buf, len);
7036 }
7037
7038 return ret;
7039 }
7040
7041 return NULL_TREE;
7042 }
7043
7044 /* Return the tree representing the element referenced by T if T is an
7045 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
7046 names using VALUEIZE. Return NULL_TREE otherwise. */
7047
7048 tree
fold_const_aggregate_ref_1(tree t,tree (* valueize)(tree))7049 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
7050 {
7051 tree ctor, idx, base;
7052 poly_int64 offset, size, max_size;
7053 tree tem;
7054 bool reverse;
7055
7056 if (TREE_THIS_VOLATILE (t))
7057 return NULL_TREE;
7058
7059 if (DECL_P (t))
7060 return get_symbol_constant_value (t);
7061
7062 tem = fold_read_from_constant_string (t);
7063 if (tem)
7064 return tem;
7065
7066 switch (TREE_CODE (t))
7067 {
7068 case ARRAY_REF:
7069 case ARRAY_RANGE_REF:
7070 /* Constant indexes are handled well by get_base_constructor.
7071 Only special case variable offsets.
7072 FIXME: This code can't handle nested references with variable indexes
7073 (they will be handled only by iteration of ccp). Perhaps we can bring
7074 get_ref_base_and_extent here and make it use a valueize callback. */
7075 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
7076 && valueize
7077 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
7078 && poly_int_tree_p (idx))
7079 {
7080 tree low_bound, unit_size;
7081
7082 /* If the resulting bit-offset is constant, track it. */
7083 if ((low_bound = array_ref_low_bound (t),
7084 poly_int_tree_p (low_bound))
7085 && (unit_size = array_ref_element_size (t),
7086 tree_fits_uhwi_p (unit_size)))
7087 {
7088 poly_offset_int woffset
7089 = wi::sext (wi::to_poly_offset (idx)
7090 - wi::to_poly_offset (low_bound),
7091 TYPE_PRECISION (sizetype));
7092 woffset *= tree_to_uhwi (unit_size);
7093 woffset *= BITS_PER_UNIT;
7094 if (woffset.to_shwi (&offset))
7095 {
7096 base = TREE_OPERAND (t, 0);
7097 ctor = get_base_constructor (base, &offset, valueize);
7098 /* Empty constructor. Always fold to 0. */
7099 if (ctor == error_mark_node)
7100 return build_zero_cst (TREE_TYPE (t));
7101 /* Out of bound array access. Value is undefined,
7102 but don't fold. */
7103 if (maybe_lt (offset, 0))
7104 return NULL_TREE;
7105 /* We cannot determine ctor. */
7106 if (!ctor)
7107 return NULL_TREE;
7108 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
7109 tree_to_uhwi (unit_size)
7110 * BITS_PER_UNIT,
7111 base);
7112 }
7113 }
7114 }
7115 /* Fallthru. */
7116
7117 case COMPONENT_REF:
7118 case BIT_FIELD_REF:
7119 case TARGET_MEM_REF:
7120 case MEM_REF:
7121 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
7122 ctor = get_base_constructor (base, &offset, valueize);
7123
7124 /* Empty constructor. Always fold to 0. */
7125 if (ctor == error_mark_node)
7126 return build_zero_cst (TREE_TYPE (t));
7127 /* We do not know precise address. */
7128 if (!known_size_p (max_size) || maybe_ne (max_size, size))
7129 return NULL_TREE;
7130 /* We cannot determine ctor. */
7131 if (!ctor)
7132 return NULL_TREE;
7133
7134 /* Out of bound array access. Value is undefined, but don't fold. */
7135 if (maybe_lt (offset, 0))
7136 return NULL_TREE;
7137
7138 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
7139 base);
7140
7141 case REALPART_EXPR:
7142 case IMAGPART_EXPR:
7143 {
7144 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
7145 if (c && TREE_CODE (c) == COMPLEX_CST)
7146 return fold_build1_loc (EXPR_LOCATION (t),
7147 TREE_CODE (t), TREE_TYPE (t), c);
7148 break;
7149 }
7150
7151 default:
7152 break;
7153 }
7154
7155 return NULL_TREE;
7156 }
7157
7158 tree
fold_const_aggregate_ref(tree t)7159 fold_const_aggregate_ref (tree t)
7160 {
7161 return fold_const_aggregate_ref_1 (t, NULL);
7162 }
7163
7164 /* Lookup virtual method with index TOKEN in a virtual table V
7165 at OFFSET.
7166 Set CAN_REFER if non-NULL to false if method
7167 is not referable or if the virtual table is ill-formed (such as rewriten
7168 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
7169
7170 tree
gimple_get_virt_method_for_vtable(HOST_WIDE_INT token,tree v,unsigned HOST_WIDE_INT offset,bool * can_refer)7171 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
7172 tree v,
7173 unsigned HOST_WIDE_INT offset,
7174 bool *can_refer)
7175 {
7176 tree vtable = v, init, fn;
7177 unsigned HOST_WIDE_INT size;
7178 unsigned HOST_WIDE_INT elt_size, access_index;
7179 tree domain_type;
7180
7181 if (can_refer)
7182 *can_refer = true;
7183
7184 /* First of all double check we have virtual table. */
7185 if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
7186 {
7187 /* Pass down that we lost track of the target. */
7188 if (can_refer)
7189 *can_refer = false;
7190 return NULL_TREE;
7191 }
7192
7193 init = ctor_for_folding (v);
7194
7195 /* The virtual tables should always be born with constructors
7196 and we always should assume that they are avaialble for
7197 folding. At the moment we do not stream them in all cases,
7198 but it should never happen that ctor seem unreachable. */
7199 gcc_assert (init);
7200 if (init == error_mark_node)
7201 {
7202 /* Pass down that we lost track of the target. */
7203 if (can_refer)
7204 *can_refer = false;
7205 return NULL_TREE;
7206 }
7207 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
7208 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
7209 offset *= BITS_PER_UNIT;
7210 offset += token * size;
7211
7212 /* Lookup the value in the constructor that is assumed to be array.
7213 This is equivalent to
7214 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
7215 offset, size, NULL);
7216 but in a constant time. We expect that frontend produced a simple
7217 array without indexed initializers. */
7218
7219 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
7220 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
7221 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
7222 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
7223
7224 access_index = offset / BITS_PER_UNIT / elt_size;
7225 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
7226
7227 /* The C++ FE can now produce indexed fields, and we check if the indexes
7228 match. */
7229 if (access_index < CONSTRUCTOR_NELTS (init))
7230 {
7231 fn = CONSTRUCTOR_ELT (init, access_index)->value;
7232 tree idx = CONSTRUCTOR_ELT (init, access_index)->index;
7233 gcc_checking_assert (!idx || tree_to_uhwi (idx) == access_index);
7234 STRIP_NOPS (fn);
7235 }
7236 else
7237 fn = NULL;
7238
7239 /* For type inconsistent program we may end up looking up virtual method
7240 in virtual table that does not contain TOKEN entries. We may overrun
7241 the virtual table and pick up a constant or RTTI info pointer.
7242 In any case the call is undefined. */
7243 if (!fn
7244 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
7245 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
7246 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
7247 else
7248 {
7249 fn = TREE_OPERAND (fn, 0);
7250
7251 /* When cgraph node is missing and function is not public, we cannot
7252 devirtualize. This can happen in WHOPR when the actual method
7253 ends up in other partition, because we found devirtualization
7254 possibility too late. */
7255 if (!can_refer_decl_in_current_unit_p (fn, vtable))
7256 {
7257 if (can_refer)
7258 {
7259 *can_refer = false;
7260 return fn;
7261 }
7262 return NULL_TREE;
7263 }
7264 }
7265
7266 /* Make sure we create a cgraph node for functions we'll reference.
7267 They can be non-existent if the reference comes from an entry
7268 of an external vtable for example. */
7269 cgraph_node::get_create (fn);
7270
7271 return fn;
7272 }
7273
7274 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
7275 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
7276 KNOWN_BINFO carries the binfo describing the true type of
7277 OBJ_TYPE_REF_OBJECT(REF).
7278 Set CAN_REFER if non-NULL to false if method
7279 is not referable or if the virtual table is ill-formed (such as rewriten
7280 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
7281
7282 tree
gimple_get_virt_method_for_binfo(HOST_WIDE_INT token,tree known_binfo,bool * can_refer)7283 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
7284 bool *can_refer)
7285 {
7286 unsigned HOST_WIDE_INT offset;
7287 tree v;
7288
7289 v = BINFO_VTABLE (known_binfo);
7290 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
7291 if (!v)
7292 return NULL_TREE;
7293
7294 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
7295 {
7296 if (can_refer)
7297 *can_refer = false;
7298 return NULL_TREE;
7299 }
7300 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
7301 }
7302
7303 /* Given a pointer value T, return a simplified version of an
7304 indirection through T, or NULL_TREE if no simplification is
7305 possible. Note that the resulting type may be different from
7306 the type pointed to in the sense that it is still compatible
7307 from the langhooks point of view. */
7308
7309 tree
gimple_fold_indirect_ref(tree t)7310 gimple_fold_indirect_ref (tree t)
7311 {
7312 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
7313 tree sub = t;
7314 tree subtype;
7315
7316 STRIP_NOPS (sub);
7317 subtype = TREE_TYPE (sub);
7318 if (!POINTER_TYPE_P (subtype)
7319 || TYPE_REF_CAN_ALIAS_ALL (ptype))
7320 return NULL_TREE;
7321
7322 if (TREE_CODE (sub) == ADDR_EXPR)
7323 {
7324 tree op = TREE_OPERAND (sub, 0);
7325 tree optype = TREE_TYPE (op);
7326 /* *&p => p */
7327 if (useless_type_conversion_p (type, optype))
7328 return op;
7329
7330 /* *(foo *)&fooarray => fooarray[0] */
7331 if (TREE_CODE (optype) == ARRAY_TYPE
7332 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
7333 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7334 {
7335 tree type_domain = TYPE_DOMAIN (optype);
7336 tree min_val = size_zero_node;
7337 if (type_domain && TYPE_MIN_VALUE (type_domain))
7338 min_val = TYPE_MIN_VALUE (type_domain);
7339 if (TREE_CODE (min_val) == INTEGER_CST)
7340 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
7341 }
7342 /* *(foo *)&complexfoo => __real__ complexfoo */
7343 else if (TREE_CODE (optype) == COMPLEX_TYPE
7344 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7345 return fold_build1 (REALPART_EXPR, type, op);
7346 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
7347 else if (TREE_CODE (optype) == VECTOR_TYPE
7348 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7349 {
7350 tree part_width = TYPE_SIZE (type);
7351 tree index = bitsize_int (0);
7352 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
7353 }
7354 }
7355
7356 /* *(p + CST) -> ... */
7357 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
7358 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
7359 {
7360 tree addr = TREE_OPERAND (sub, 0);
7361 tree off = TREE_OPERAND (sub, 1);
7362 tree addrtype;
7363
7364 STRIP_NOPS (addr);
7365 addrtype = TREE_TYPE (addr);
7366
7367 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
7368 if (TREE_CODE (addr) == ADDR_EXPR
7369 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
7370 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
7371 && tree_fits_uhwi_p (off))
7372 {
7373 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
7374 tree part_width = TYPE_SIZE (type);
7375 unsigned HOST_WIDE_INT part_widthi
7376 = tree_to_shwi (part_width) / BITS_PER_UNIT;
7377 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
7378 tree index = bitsize_int (indexi);
7379 if (known_lt (offset / part_widthi,
7380 TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype))))
7381 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
7382 part_width, index);
7383 }
7384
7385 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
7386 if (TREE_CODE (addr) == ADDR_EXPR
7387 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
7388 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
7389 {
7390 tree size = TYPE_SIZE_UNIT (type);
7391 if (tree_int_cst_equal (size, off))
7392 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
7393 }
7394
7395 /* *(p + CST) -> MEM_REF <p, CST>. */
7396 if (TREE_CODE (addr) != ADDR_EXPR
7397 || DECL_P (TREE_OPERAND (addr, 0)))
7398 return fold_build2 (MEM_REF, type,
7399 addr,
7400 wide_int_to_tree (ptype, wi::to_wide (off)));
7401 }
7402
7403 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
7404 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
7405 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
7406 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
7407 {
7408 tree type_domain;
7409 tree min_val = size_zero_node;
7410 tree osub = sub;
7411 sub = gimple_fold_indirect_ref (sub);
7412 if (! sub)
7413 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
7414 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
7415 if (type_domain && TYPE_MIN_VALUE (type_domain))
7416 min_val = TYPE_MIN_VALUE (type_domain);
7417 if (TREE_CODE (min_val) == INTEGER_CST)
7418 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
7419 }
7420
7421 return NULL_TREE;
7422 }
7423
7424 /* Return true if CODE is an operation that when operating on signed
7425 integer types involves undefined behavior on overflow and the
7426 operation can be expressed with unsigned arithmetic. */
7427
7428 bool
arith_code_with_undefined_signed_overflow(tree_code code)7429 arith_code_with_undefined_signed_overflow (tree_code code)
7430 {
7431 switch (code)
7432 {
7433 case ABS_EXPR:
7434 case PLUS_EXPR:
7435 case MINUS_EXPR:
7436 case MULT_EXPR:
7437 case NEGATE_EXPR:
7438 case POINTER_PLUS_EXPR:
7439 return true;
7440 default:
7441 return false;
7442 }
7443 }
7444
7445 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
7446 operation that can be transformed to unsigned arithmetic by converting
7447 its operand, carrying out the operation in the corresponding unsigned
7448 type and converting the result back to the original type.
7449
7450 Returns a sequence of statements that replace STMT and also contain
7451 a modified form of STMT itself. */
7452
7453 gimple_seq
rewrite_to_defined_overflow(gimple * stmt)7454 rewrite_to_defined_overflow (gimple *stmt)
7455 {
7456 if (dump_file && (dump_flags & TDF_DETAILS))
7457 {
7458 fprintf (dump_file, "rewriting stmt with undefined signed "
7459 "overflow ");
7460 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
7461 }
7462
7463 tree lhs = gimple_assign_lhs (stmt);
7464 tree type = unsigned_type_for (TREE_TYPE (lhs));
7465 gimple_seq stmts = NULL;
7466 if (gimple_assign_rhs_code (stmt) == ABS_EXPR)
7467 gimple_assign_set_rhs_code (stmt, ABSU_EXPR);
7468 else
7469 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
7470 {
7471 tree op = gimple_op (stmt, i);
7472 op = gimple_convert (&stmts, type, op);
7473 gimple_set_op (stmt, i, op);
7474 }
7475 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
7476 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
7477 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
7478 gimple_set_modified (stmt, true);
7479 gimple_seq_add_stmt (&stmts, stmt);
7480 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
7481 gimple_seq_add_stmt (&stmts, cvt);
7482
7483 return stmts;
7484 }
7485
7486
7487 /* The valueization hook we use for the gimple_build API simplification.
7488 This makes us match fold_buildN behavior by only combining with
7489 statements in the sequence(s) we are currently building. */
7490
7491 static tree
gimple_build_valueize(tree op)7492 gimple_build_valueize (tree op)
7493 {
7494 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
7495 return op;
7496 return NULL_TREE;
7497 }
7498
7499 /* Build the expression CODE OP0 of type TYPE with location LOC,
7500 simplifying it first if possible. Returns the built
7501 expression value and appends statements possibly defining it
7502 to SEQ. */
7503
7504 tree
gimple_build(gimple_seq * seq,location_t loc,enum tree_code code,tree type,tree op0)7505 gimple_build (gimple_seq *seq, location_t loc,
7506 enum tree_code code, tree type, tree op0)
7507 {
7508 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
7509 if (!res)
7510 {
7511 res = create_tmp_reg_or_ssa_name (type);
7512 gimple *stmt;
7513 if (code == REALPART_EXPR
7514 || code == IMAGPART_EXPR
7515 || code == VIEW_CONVERT_EXPR)
7516 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
7517 else
7518 stmt = gimple_build_assign (res, code, op0);
7519 gimple_set_location (stmt, loc);
7520 gimple_seq_add_stmt_without_update (seq, stmt);
7521 }
7522 return res;
7523 }
7524
7525 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
7526 simplifying it first if possible. Returns the built
7527 expression value and appends statements possibly defining it
7528 to SEQ. */
7529
7530 tree
gimple_build(gimple_seq * seq,location_t loc,enum tree_code code,tree type,tree op0,tree op1)7531 gimple_build (gimple_seq *seq, location_t loc,
7532 enum tree_code code, tree type, tree op0, tree op1)
7533 {
7534 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
7535 if (!res)
7536 {
7537 res = create_tmp_reg_or_ssa_name (type);
7538 gimple *stmt = gimple_build_assign (res, code, op0, op1);
7539 gimple_set_location (stmt, loc);
7540 gimple_seq_add_stmt_without_update (seq, stmt);
7541 }
7542 return res;
7543 }
7544
7545 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
7546 simplifying it first if possible. Returns the built
7547 expression value and appends statements possibly defining it
7548 to SEQ. */
7549
7550 tree
gimple_build(gimple_seq * seq,location_t loc,enum tree_code code,tree type,tree op0,tree op1,tree op2)7551 gimple_build (gimple_seq *seq, location_t loc,
7552 enum tree_code code, tree type, tree op0, tree op1, tree op2)
7553 {
7554 tree res = gimple_simplify (code, type, op0, op1, op2,
7555 seq, gimple_build_valueize);
7556 if (!res)
7557 {
7558 res = create_tmp_reg_or_ssa_name (type);
7559 gimple *stmt;
7560 if (code == BIT_FIELD_REF)
7561 stmt = gimple_build_assign (res, code,
7562 build3 (code, type, op0, op1, op2));
7563 else
7564 stmt = gimple_build_assign (res, code, op0, op1, op2);
7565 gimple_set_location (stmt, loc);
7566 gimple_seq_add_stmt_without_update (seq, stmt);
7567 }
7568 return res;
7569 }
7570
7571 /* Build the call FN (ARG0) with a result of type TYPE
7572 (or no result if TYPE is void) with location LOC,
7573 simplifying it first if possible. Returns the built
7574 expression value (or NULL_TREE if TYPE is void) and appends
7575 statements possibly defining it to SEQ. */
7576
7577 tree
gimple_build(gimple_seq * seq,location_t loc,combined_fn fn,tree type,tree arg0)7578 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7579 tree type, tree arg0)
7580 {
7581 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
7582 if (!res)
7583 {
7584 gcall *stmt;
7585 if (internal_fn_p (fn))
7586 stmt = gimple_build_call_internal (as_internal_fn (fn), 1, arg0);
7587 else
7588 {
7589 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7590 stmt = gimple_build_call (decl, 1, arg0);
7591 }
7592 if (!VOID_TYPE_P (type))
7593 {
7594 res = create_tmp_reg_or_ssa_name (type);
7595 gimple_call_set_lhs (stmt, res);
7596 }
7597 gimple_set_location (stmt, loc);
7598 gimple_seq_add_stmt_without_update (seq, stmt);
7599 }
7600 return res;
7601 }
7602
7603 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
7604 (or no result if TYPE is void) with location LOC,
7605 simplifying it first if possible. Returns the built
7606 expression value (or NULL_TREE if TYPE is void) and appends
7607 statements possibly defining it to SEQ. */
7608
7609 tree
gimple_build(gimple_seq * seq,location_t loc,combined_fn fn,tree type,tree arg0,tree arg1)7610 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7611 tree type, tree arg0, tree arg1)
7612 {
7613 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
7614 if (!res)
7615 {
7616 gcall *stmt;
7617 if (internal_fn_p (fn))
7618 stmt = gimple_build_call_internal (as_internal_fn (fn), 2, arg0, arg1);
7619 else
7620 {
7621 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7622 stmt = gimple_build_call (decl, 2, arg0, arg1);
7623 }
7624 if (!VOID_TYPE_P (type))
7625 {
7626 res = create_tmp_reg_or_ssa_name (type);
7627 gimple_call_set_lhs (stmt, res);
7628 }
7629 gimple_set_location (stmt, loc);
7630 gimple_seq_add_stmt_without_update (seq, stmt);
7631 }
7632 return res;
7633 }
7634
7635 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
7636 (or no result if TYPE is void) with location LOC,
7637 simplifying it first if possible. Returns the built
7638 expression value (or NULL_TREE if TYPE is void) and appends
7639 statements possibly defining it to SEQ. */
7640
7641 tree
gimple_build(gimple_seq * seq,location_t loc,combined_fn fn,tree type,tree arg0,tree arg1,tree arg2)7642 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7643 tree type, tree arg0, tree arg1, tree arg2)
7644 {
7645 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
7646 seq, gimple_build_valueize);
7647 if (!res)
7648 {
7649 gcall *stmt;
7650 if (internal_fn_p (fn))
7651 stmt = gimple_build_call_internal (as_internal_fn (fn),
7652 3, arg0, arg1, arg2);
7653 else
7654 {
7655 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7656 stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
7657 }
7658 if (!VOID_TYPE_P (type))
7659 {
7660 res = create_tmp_reg_or_ssa_name (type);
7661 gimple_call_set_lhs (stmt, res);
7662 }
7663 gimple_set_location (stmt, loc);
7664 gimple_seq_add_stmt_without_update (seq, stmt);
7665 }
7666 return res;
7667 }
7668
7669 /* Build the conversion (TYPE) OP with a result of type TYPE
7670 with location LOC if such conversion is neccesary in GIMPLE,
7671 simplifying it first.
7672 Returns the built expression value and appends
7673 statements possibly defining it to SEQ. */
7674
7675 tree
gimple_convert(gimple_seq * seq,location_t loc,tree type,tree op)7676 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
7677 {
7678 if (useless_type_conversion_p (type, TREE_TYPE (op)))
7679 return op;
7680 return gimple_build (seq, loc, NOP_EXPR, type, op);
7681 }
7682
7683 /* Build the conversion (ptrofftype) OP with a result of a type
7684 compatible with ptrofftype with location LOC if such conversion
7685 is neccesary in GIMPLE, simplifying it first.
7686 Returns the built expression value and appends
7687 statements possibly defining it to SEQ. */
7688
7689 tree
gimple_convert_to_ptrofftype(gimple_seq * seq,location_t loc,tree op)7690 gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
7691 {
7692 if (ptrofftype_p (TREE_TYPE (op)))
7693 return op;
7694 return gimple_convert (seq, loc, sizetype, op);
7695 }
7696
7697 /* Build a vector of type TYPE in which each element has the value OP.
7698 Return a gimple value for the result, appending any new statements
7699 to SEQ. */
7700
7701 tree
gimple_build_vector_from_val(gimple_seq * seq,location_t loc,tree type,tree op)7702 gimple_build_vector_from_val (gimple_seq *seq, location_t loc, tree type,
7703 tree op)
7704 {
7705 if (!TYPE_VECTOR_SUBPARTS (type).is_constant ()
7706 && !CONSTANT_CLASS_P (op))
7707 return gimple_build (seq, loc, VEC_DUPLICATE_EXPR, type, op);
7708
7709 tree res, vec = build_vector_from_val (type, op);
7710 if (is_gimple_val (vec))
7711 return vec;
7712 if (gimple_in_ssa_p (cfun))
7713 res = make_ssa_name (type);
7714 else
7715 res = create_tmp_reg (type);
7716 gimple *stmt = gimple_build_assign (res, vec);
7717 gimple_set_location (stmt, loc);
7718 gimple_seq_add_stmt_without_update (seq, stmt);
7719 return res;
7720 }
7721
7722 /* Build a vector from BUILDER, handling the case in which some elements
7723 are non-constant. Return a gimple value for the result, appending any
7724 new instructions to SEQ.
7725
7726 BUILDER must not have a stepped encoding on entry. This is because
7727 the function is not geared up to handle the arithmetic that would
7728 be needed in the variable case, and any code building a vector that
7729 is known to be constant should use BUILDER->build () directly. */
7730
7731 tree
gimple_build_vector(gimple_seq * seq,location_t loc,tree_vector_builder * builder)7732 gimple_build_vector (gimple_seq *seq, location_t loc,
7733 tree_vector_builder *builder)
7734 {
7735 gcc_assert (builder->nelts_per_pattern () <= 2);
7736 unsigned int encoded_nelts = builder->encoded_nelts ();
7737 for (unsigned int i = 0; i < encoded_nelts; ++i)
7738 if (!TREE_CONSTANT ((*builder)[i]))
7739 {
7740 tree type = builder->type ();
7741 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
7742 vec<constructor_elt, va_gc> *v;
7743 vec_alloc (v, nelts);
7744 for (i = 0; i < nelts; ++i)
7745 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, builder->elt (i));
7746
7747 tree res;
7748 if (gimple_in_ssa_p (cfun))
7749 res = make_ssa_name (type);
7750 else
7751 res = create_tmp_reg (type);
7752 gimple *stmt = gimple_build_assign (res, build_constructor (type, v));
7753 gimple_set_location (stmt, loc);
7754 gimple_seq_add_stmt_without_update (seq, stmt);
7755 return res;
7756 }
7757 return builder->build ();
7758 }
7759
7760 /* Return true if the result of assignment STMT is known to be non-negative.
7761 If the return value is based on the assumption that signed overflow is
7762 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7763 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7764
7765 static bool
gimple_assign_nonnegative_warnv_p(gimple * stmt,bool * strict_overflow_p,int depth)7766 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7767 int depth)
7768 {
7769 enum tree_code code = gimple_assign_rhs_code (stmt);
7770 switch (get_gimple_rhs_class (code))
7771 {
7772 case GIMPLE_UNARY_RHS:
7773 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7774 gimple_expr_type (stmt),
7775 gimple_assign_rhs1 (stmt),
7776 strict_overflow_p, depth);
7777 case GIMPLE_BINARY_RHS:
7778 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7779 gimple_expr_type (stmt),
7780 gimple_assign_rhs1 (stmt),
7781 gimple_assign_rhs2 (stmt),
7782 strict_overflow_p, depth);
7783 case GIMPLE_TERNARY_RHS:
7784 return false;
7785 case GIMPLE_SINGLE_RHS:
7786 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
7787 strict_overflow_p, depth);
7788 case GIMPLE_INVALID_RHS:
7789 break;
7790 }
7791 gcc_unreachable ();
7792 }
7793
7794 /* Return true if return value of call STMT is known to be non-negative.
7795 If the return value is based on the assumption that signed overflow is
7796 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7797 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7798
7799 static bool
gimple_call_nonnegative_warnv_p(gimple * stmt,bool * strict_overflow_p,int depth)7800 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7801 int depth)
7802 {
7803 tree arg0 = gimple_call_num_args (stmt) > 0 ?
7804 gimple_call_arg (stmt, 0) : NULL_TREE;
7805 tree arg1 = gimple_call_num_args (stmt) > 1 ?
7806 gimple_call_arg (stmt, 1) : NULL_TREE;
7807
7808 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
7809 gimple_call_combined_fn (stmt),
7810 arg0,
7811 arg1,
7812 strict_overflow_p, depth);
7813 }
7814
7815 /* Return true if return value of call STMT is known to be non-negative.
7816 If the return value is based on the assumption that signed overflow is
7817 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7818 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7819
7820 static bool
gimple_phi_nonnegative_warnv_p(gimple * stmt,bool * strict_overflow_p,int depth)7821 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7822 int depth)
7823 {
7824 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7825 {
7826 tree arg = gimple_phi_arg_def (stmt, i);
7827 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
7828 return false;
7829 }
7830 return true;
7831 }
7832
7833 /* Return true if STMT is known to compute a non-negative value.
7834 If the return value is based on the assumption that signed overflow is
7835 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7836 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7837
7838 bool
gimple_stmt_nonnegative_warnv_p(gimple * stmt,bool * strict_overflow_p,int depth)7839 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7840 int depth)
7841 {
7842 switch (gimple_code (stmt))
7843 {
7844 case GIMPLE_ASSIGN:
7845 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
7846 depth);
7847 case GIMPLE_CALL:
7848 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
7849 depth);
7850 case GIMPLE_PHI:
7851 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
7852 depth);
7853 default:
7854 return false;
7855 }
7856 }
7857
7858 /* Return true if the floating-point value computed by assignment STMT
7859 is known to have an integer value. We also allow +Inf, -Inf and NaN
7860 to be considered integer values. Return false for signaling NaN.
7861
7862 DEPTH is the current nesting depth of the query. */
7863
7864 static bool
gimple_assign_integer_valued_real_p(gimple * stmt,int depth)7865 gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
7866 {
7867 enum tree_code code = gimple_assign_rhs_code (stmt);
7868 switch (get_gimple_rhs_class (code))
7869 {
7870 case GIMPLE_UNARY_RHS:
7871 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
7872 gimple_assign_rhs1 (stmt), depth);
7873 case GIMPLE_BINARY_RHS:
7874 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
7875 gimple_assign_rhs1 (stmt),
7876 gimple_assign_rhs2 (stmt), depth);
7877 case GIMPLE_TERNARY_RHS:
7878 return false;
7879 case GIMPLE_SINGLE_RHS:
7880 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
7881 case GIMPLE_INVALID_RHS:
7882 break;
7883 }
7884 gcc_unreachable ();
7885 }
7886
7887 /* Return true if the floating-point value computed by call STMT is known
7888 to have an integer value. We also allow +Inf, -Inf and NaN to be
7889 considered integer values. Return false for signaling NaN.
7890
7891 DEPTH is the current nesting depth of the query. */
7892
7893 static bool
gimple_call_integer_valued_real_p(gimple * stmt,int depth)7894 gimple_call_integer_valued_real_p (gimple *stmt, int depth)
7895 {
7896 tree arg0 = (gimple_call_num_args (stmt) > 0
7897 ? gimple_call_arg (stmt, 0)
7898 : NULL_TREE);
7899 tree arg1 = (gimple_call_num_args (stmt) > 1
7900 ? gimple_call_arg (stmt, 1)
7901 : NULL_TREE);
7902 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
7903 arg0, arg1, depth);
7904 }
7905
7906 /* Return true if the floating-point result of phi STMT is known to have
7907 an integer value. We also allow +Inf, -Inf and NaN to be considered
7908 integer values. Return false for signaling NaN.
7909
7910 DEPTH is the current nesting depth of the query. */
7911
7912 static bool
gimple_phi_integer_valued_real_p(gimple * stmt,int depth)7913 gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
7914 {
7915 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7916 {
7917 tree arg = gimple_phi_arg_def (stmt, i);
7918 if (!integer_valued_real_single_p (arg, depth + 1))
7919 return false;
7920 }
7921 return true;
7922 }
7923
7924 /* Return true if the floating-point value computed by STMT is known
7925 to have an integer value. We also allow +Inf, -Inf and NaN to be
7926 considered integer values. Return false for signaling NaN.
7927
7928 DEPTH is the current nesting depth of the query. */
7929
7930 bool
gimple_stmt_integer_valued_real_p(gimple * stmt,int depth)7931 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
7932 {
7933 switch (gimple_code (stmt))
7934 {
7935 case GIMPLE_ASSIGN:
7936 return gimple_assign_integer_valued_real_p (stmt, depth);
7937 case GIMPLE_CALL:
7938 return gimple_call_integer_valued_real_p (stmt, depth);
7939 case GIMPLE_PHI:
7940 return gimple_phi_integer_valued_real_p (stmt, depth);
7941 default:
7942 return false;
7943 }
7944 }
7945