1 /* Expand builtin functions.
2    Copyright (C) 1988-2021 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 /* Legacy warning!  Please add no further builtin simplifications here
21    (apart from pure constant folding) - builtin simplifications should go
22    to match.pd or gimple-fold.c instead.  */
23 
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "tm_p.h"
35 #include "stringpool.h"
36 #include "tree-vrp.h"
37 #include "tree-ssanames.h"
38 #include "expmed.h"
39 #include "optabs.h"
40 #include "emit-rtl.h"
41 #include "recog.h"
42 #include "diagnostic-core.h"
43 #include "alias.h"
44 #include "fold-const.h"
45 #include "fold-const-call.h"
46 #include "gimple-ssa-warn-restrict.h"
47 #include "stor-layout.h"
48 #include "calls.h"
49 #include "varasm.h"
50 #include "tree-object-size.h"
51 #include "tree-ssa-strlen.h"
52 #include "realmpfr.h"
53 #include "cfgrtl.h"
54 #include "except.h"
55 #include "dojump.h"
56 #include "explow.h"
57 #include "stmt.h"
58 #include "expr.h"
59 #include "libfuncs.h"
60 #include "output.h"
61 #include "typeclass.h"
62 #include "langhooks.h"
63 #include "value-prof.h"
64 #include "builtins.h"
65 #include "stringpool.h"
66 #include "attribs.h"
67 #include "asan.h"
68 #include "internal-fn.h"
69 #include "case-cfn-macros.h"
70 #include "gimple-fold.h"
71 #include "intl.h"
72 #include "file-prefix-map.h" /* remap_macro_filename()  */
73 #include "gomp-constants.h"
74 #include "omp-general.h"
75 #include "tree-dfa.h"
76 #include "gimple-iterator.h"
77 #include "gimple-ssa.h"
78 #include "tree-ssa-live.h"
79 #include "tree-outof-ssa.h"
80 #include "attr-fnspec.h"
81 #include "demangle.h"
82 
83 struct target_builtins default_target_builtins;
84 #if SWITCHABLE_TARGET
85 struct target_builtins *this_target_builtins = &default_target_builtins;
86 #endif
87 
88 /* Define the names of the builtin function types and codes.  */
89 const char *const built_in_class_names[BUILT_IN_LAST]
90   = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
91 
92 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
93 const char * built_in_names[(int) END_BUILTINS] =
94 {
95 #include "builtins.def"
96 };
97 
98 /* Setup an array of builtin_info_type, make sure each element decl is
99    initialized to NULL_TREE.  */
100 builtin_info_type builtin_info[(int)END_BUILTINS];
101 
102 /* Non-zero if __builtin_constant_p should be folded right away.  */
103 bool force_folding_builtin_constant_p;
104 
105 static int target_char_cast (tree, char *);
106 static rtx get_memory_rtx (tree, tree);
107 static int apply_args_size (void);
108 static int apply_result_size (void);
109 static rtx result_vector (int, rtx);
110 static void expand_builtin_prefetch (tree);
111 static rtx expand_builtin_apply_args (void);
112 static rtx expand_builtin_apply_args_1 (void);
113 static rtx expand_builtin_apply (rtx, rtx, rtx);
114 static void expand_builtin_return (rtx);
115 static enum type_class type_to_class (tree);
116 static rtx expand_builtin_classify_type (tree);
117 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
118 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
119 static rtx expand_builtin_interclass_mathfn (tree, rtx);
120 static rtx expand_builtin_sincos (tree);
121 static rtx expand_builtin_cexpi (tree, rtx);
122 static rtx expand_builtin_int_roundingfn (tree, rtx);
123 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
124 static rtx expand_builtin_next_arg (void);
125 static rtx expand_builtin_va_start (tree);
126 static rtx expand_builtin_va_end (tree);
127 static rtx expand_builtin_va_copy (tree);
128 static rtx inline_expand_builtin_bytecmp (tree, rtx);
129 static rtx expand_builtin_strcmp (tree, rtx);
130 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
131 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, scalar_int_mode);
132 static rtx expand_builtin_memchr (tree, rtx);
133 static rtx expand_builtin_memcpy (tree, rtx);
134 static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
135 					    rtx target, tree exp,
136 					    memop_ret retmode,
137 					    bool might_overlap);
138 static rtx expand_builtin_memmove (tree, rtx);
139 static rtx expand_builtin_mempcpy (tree, rtx);
140 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, memop_ret);
141 static rtx expand_builtin_strcat (tree);
142 static rtx expand_builtin_strcpy (tree, rtx);
143 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx);
144 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
145 static rtx expand_builtin_stpncpy (tree, rtx);
146 static rtx expand_builtin_strncat (tree, rtx);
147 static rtx expand_builtin_strncpy (tree, rtx);
148 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, scalar_int_mode);
149 static rtx expand_builtin_memset (tree, rtx, machine_mode);
150 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
151 static rtx expand_builtin_bzero (tree);
152 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
153 static rtx expand_builtin_strnlen (tree, rtx, machine_mode);
154 static rtx expand_builtin_alloca (tree);
155 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
156 static rtx expand_builtin_frame_address (tree, tree);
157 static tree stabilize_va_list_loc (location_t, tree, int);
158 static rtx expand_builtin_expect (tree, rtx);
159 static rtx expand_builtin_expect_with_probability (tree, rtx);
160 static tree fold_builtin_constant_p (tree);
161 static tree fold_builtin_classify_type (tree);
162 static tree fold_builtin_strlen (location_t, tree, tree, tree);
163 static tree fold_builtin_inf (location_t, tree, int);
164 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
165 static bool validate_arg (const_tree, enum tree_code code);
166 static rtx expand_builtin_fabs (tree, rtx, rtx);
167 static rtx expand_builtin_signbit (tree, rtx);
168 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
169 static tree fold_builtin_isascii (location_t, tree);
170 static tree fold_builtin_toascii (location_t, tree);
171 static tree fold_builtin_isdigit (location_t, tree);
172 static tree fold_builtin_fabs (location_t, tree, tree);
173 static tree fold_builtin_abs (location_t, tree, tree);
174 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
175 					enum tree_code);
176 static tree fold_builtin_varargs (location_t, tree, tree*, int);
177 
178 static tree fold_builtin_strpbrk (location_t, tree, tree, tree, tree);
179 static tree fold_builtin_strspn (location_t, tree, tree, tree);
180 static tree fold_builtin_strcspn (location_t, tree, tree, tree);
181 
182 static rtx expand_builtin_object_size (tree);
183 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
184 				      enum built_in_function);
185 static void maybe_emit_chk_warning (tree, enum built_in_function);
186 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
187 static tree fold_builtin_object_size (tree, tree);
188 static bool check_read_access (tree, tree, tree = NULL_TREE, int = 1);
189 static bool compute_objsize_r (tree, int, access_ref *, ssa_name_limit_t &,
190 			       pointer_query *);
191 
192 unsigned HOST_WIDE_INT target_newline;
193 unsigned HOST_WIDE_INT target_percent;
194 static unsigned HOST_WIDE_INT target_c;
195 static unsigned HOST_WIDE_INT target_s;
196 char target_percent_c[3];
197 char target_percent_s[3];
198 char target_percent_s_newline[4];
199 static tree do_mpfr_remquo (tree, tree, tree);
200 static tree do_mpfr_lgamma_r (tree, tree, tree);
201 static void expand_builtin_sync_synchronize (void);
202 
access_ref(tree bound,bool minaccess)203 access_ref::access_ref (tree bound /* = NULL_TREE */,
204 			bool minaccess /* = false */)
205 : ref (), eval ([](tree x){ return x; }), deref (), trail1special (true),
206   base0 (true), parmarray ()
207 {
208   /* Set to valid.  */
209   offrng[0] = offrng[1] = 0;
210   /* Invalidate.   */
211   sizrng[0] = sizrng[1] = -1;
212 
213   /* Set the default bounds of the access and adjust below.  */
214   bndrng[0] = minaccess ? 1 : 0;
215   bndrng[1] = HOST_WIDE_INT_M1U;
216 
217   /* When BOUND is nonnull and a range can be extracted from it,
218      set the bounds of the access to reflect both it and MINACCESS.
219      BNDRNG[0] is the size of the minimum access.  */
220   tree rng[2];
221   if (bound && get_size_range (bound, rng, SR_ALLOW_ZERO))
222     {
223       bndrng[0] = wi::to_offset (rng[0]);
224       bndrng[1] = wi::to_offset (rng[1]);
225       bndrng[0] = bndrng[0] > 0 && minaccess ? 1 : 0;
226     }
227 }
228 
229 /* Return the PHI node REF refers to or null if it doesn't.  */
230 
231 gphi *
phi()232 access_ref::phi () const
233 {
234   if (!ref || TREE_CODE (ref) != SSA_NAME)
235     return NULL;
236 
237   gimple *def_stmt = SSA_NAME_DEF_STMT (ref);
238   if (gimple_code (def_stmt) != GIMPLE_PHI)
239     return NULL;
240 
241   return as_a <gphi *> (def_stmt);
242 }
243 
244 /* Determine and return the largest object to which *THIS.  If *THIS
245    refers to a PHI and PREF is nonnull, fill *PREF with the details
246    of the object determined by compute_objsize(ARG, OSTYPE) for each
247    PHI argument ARG.  */
248 
249 tree
get_ref(vec<access_ref> * all_refs,access_ref * pref,int ostype,ssa_name_limit_t * psnlim,pointer_query * qry)250 access_ref::get_ref (vec<access_ref> *all_refs,
251 		     access_ref *pref /* = NULL */,
252 		     int ostype /* = 1 */,
253 		     ssa_name_limit_t *psnlim /* = NULL */,
254 		     pointer_query *qry /* = NULL */) const
255 {
256   gphi *phi_stmt = this->phi ();
257   if (!phi_stmt)
258     return ref;
259 
260   /* FIXME: Calling get_ref() with a null PSNLIM is dangerous and might
261      cause unbounded recursion.  */
262   ssa_name_limit_t snlim_buf;
263   if (!psnlim)
264     psnlim = &snlim_buf;
265 
266   if (!psnlim->visit_phi (ref))
267     return NULL_TREE;
268 
269   /* Reflects the range of offsets of all PHI arguments refer to the same
270      object (i.e., have the same REF).  */
271   access_ref same_ref;
272   /* The conservative result of the PHI reflecting the offset and size
273      of the largest PHI argument, regardless of whether or not they all
274      refer to the same object.  */
275   pointer_query empty_qry;
276   if (!qry)
277     qry = &empty_qry;
278 
279   access_ref phi_ref;
280   if (pref)
281     {
282       phi_ref = *pref;
283       same_ref = *pref;
284     }
285 
286   /* Set if any argument is a function array (or VLA) parameter not
287      declared [static].  */
288   bool parmarray = false;
289   /* The size of the smallest object referenced by the PHI arguments.  */
290   offset_int minsize = 0;
291   const offset_int maxobjsize = wi::to_offset (max_object_size ());
292   /* The offset of the PHI, not reflecting those of its arguments.  */
293   const offset_int orng[2] = { phi_ref.offrng[0], phi_ref.offrng[1] };
294 
295   const unsigned nargs = gimple_phi_num_args (phi_stmt);
296   for (unsigned i = 0; i < nargs; ++i)
297     {
298       access_ref phi_arg_ref;
299       tree arg = gimple_phi_arg_def (phi_stmt, i);
300       if (!compute_objsize_r (arg, ostype, &phi_arg_ref, *psnlim, qry)
301 	  || phi_arg_ref.sizrng[0] < 0)
302 	/* A PHI with all null pointer arguments.  */
303 	return NULL_TREE;
304 
305       /* Add PREF's offset to that of the argument.  */
306       phi_arg_ref.add_offset (orng[0], orng[1]);
307       if (TREE_CODE (arg) == SSA_NAME)
308 	qry->put_ref (arg, phi_arg_ref);
309 
310       if (all_refs)
311 	all_refs->safe_push (phi_arg_ref);
312 
313       const bool arg_known_size = (phi_arg_ref.sizrng[0] != 0
314 				   || phi_arg_ref.sizrng[1] != maxobjsize);
315 
316       parmarray |= phi_arg_ref.parmarray;
317 
318       const bool nullp = integer_zerop (arg) && (i || i + 1 < nargs);
319 
320       if (phi_ref.sizrng[0] < 0)
321 	{
322 	  if (!nullp)
323 	    same_ref = phi_arg_ref;
324 	  phi_ref = phi_arg_ref;
325 	  if (arg_known_size)
326 	    minsize = phi_arg_ref.sizrng[0];
327 	  continue;
328 	}
329 
330       const bool phi_known_size = (phi_ref.sizrng[0] != 0
331 				   || phi_ref.sizrng[1] != maxobjsize);
332 
333       if (phi_known_size && phi_arg_ref.sizrng[0] < minsize)
334 	minsize = phi_arg_ref.sizrng[0];
335 
336       /* Disregard null pointers in PHIs with two or more arguments.
337 	 TODO: Handle this better!  */
338       if (nullp)
339 	continue;
340 
341       /* Determine the amount of remaining space in the argument.  */
342       offset_int argrem[2];
343       argrem[1] = phi_arg_ref.size_remaining (argrem);
344 
345       /* Determine the amount of remaining space computed so far and
346 	 if the remaining space in the argument is more use it instead.  */
347       offset_int phirem[2];
348       phirem[1] = phi_ref.size_remaining (phirem);
349 
350       if (phi_arg_ref.ref != same_ref.ref)
351 	same_ref.ref = NULL_TREE;
352 
353       if (phirem[1] < argrem[1]
354 	  || (phirem[1] == argrem[1]
355 	      && phi_ref.sizrng[1] < phi_arg_ref.sizrng[1]))
356 	/* Use the argument with the most space remaining as the result,
357 	   or the larger one if the space is equal.  */
358 	phi_ref = phi_arg_ref;
359 
360       /* Set SAME_REF.OFFRNG to the maximum range of all arguments.  */
361       if (phi_arg_ref.offrng[0] < same_ref.offrng[0])
362 	same_ref.offrng[0] = phi_arg_ref.offrng[0];
363       if (same_ref.offrng[1] < phi_arg_ref.offrng[1])
364 	same_ref.offrng[1] = phi_arg_ref.offrng[1];
365     }
366 
367   if (!same_ref.ref && same_ref.offrng[0] != 0)
368     /* Clear BASE0 if not all the arguments refer to the same object and
369        if not all their offsets are zero-based.  This allows the final
370        PHI offset to out of bounds for some arguments but not for others
371        (or negative even of all the arguments are BASE0), which is overly
372        permissive.  */
373     phi_ref.base0 = false;
374 
375   if (same_ref.ref)
376     phi_ref = same_ref;
377   else
378     {
379       /* Replace the lower bound of the largest argument with the size
380 	 of the smallest argument, and set PARMARRAY if any argument
381 	 was one.  */
382       phi_ref.sizrng[0] = minsize;
383       phi_ref.parmarray = parmarray;
384     }
385 
386   if (phi_ref.sizrng[0] < 0)
387     {
388       /* Fail if none of the PHI's arguments resulted in updating PHI_REF
389 	 (perhaps because they have all been already visited by prior
390 	 recursive calls).  */
391       psnlim->leave_phi (ref);
392       return NULL_TREE;
393     }
394 
395   /* Avoid changing *THIS.  */
396   if (pref && pref != this)
397     *pref = phi_ref;
398 
399   psnlim->leave_phi (ref);
400 
401   return phi_ref.ref;
402 }
403 
404 /* Return the maximum amount of space remaining and if non-null, set
405    argument to the minimum.  */
406 
407 offset_int
size_remaining(offset_int * pmin)408 access_ref::size_remaining (offset_int *pmin /* = NULL */) const
409 {
410   offset_int minbuf;
411   if (!pmin)
412     pmin = &minbuf;
413 
414   /* add_offset() ensures the offset range isn't inverted.  */
415   gcc_checking_assert (offrng[0] <= offrng[1]);
416 
417   if (base0)
418     {
419       /* The offset into referenced object is zero-based (i.e., it's
420 	 not referenced by a pointer into middle of some unknown object).  */
421       if (offrng[0] < 0 && offrng[1] < 0)
422 	{
423 	  /* If the offset is negative the remaining size is zero.  */
424 	  *pmin = 0;
425 	  return 0;
426 	}
427 
428       if (sizrng[1] <= offrng[0])
429 	{
430 	  /* If the starting offset is greater than or equal to the upper
431 	     bound on the size of the object, the space remaining is zero.
432 	     As a special case, if it's equal, set *PMIN to -1 to let
433 	     the caller know the offset is valid and just past the end.  */
434 	  *pmin = sizrng[1] == offrng[0] ? -1 : 0;
435 	  return 0;
436 	}
437 
438       /* Otherwise return the size minus the lower bound of the offset.  */
439       offset_int or0 = offrng[0] < 0 ? 0 : offrng[0];
440 
441       *pmin = sizrng[0] - or0;
442       return sizrng[1] - or0;
443     }
444 
445   /* The offset to the referenced object isn't zero-based (i.e., it may
446      refer to a byte other than the first.  The size of such an object
447      is constrained only by the size of the address space (the result
448      of max_object_size()).  */
449   if (sizrng[1] <= offrng[0])
450     {
451       *pmin = 0;
452       return 0;
453     }
454 
455   offset_int or0 = offrng[0] < 0 ? 0 : offrng[0];
456 
457   *pmin = sizrng[0] - or0;
458   return sizrng[1] - or0;
459 }
460 
461 /* Add the range [MIN, MAX] to the offset range.  For known objects (with
462    zero-based offsets) at least one of whose offset's bounds is in range,
463    constrain the other (or both) to the bounds of the object (i.e., zero
464    and the upper bound of its size).  This improves the quality of
465    diagnostics.  */
466 
add_offset(const offset_int & min,const offset_int & max)467 void access_ref::add_offset (const offset_int &min, const offset_int &max)
468 {
469   if (min <= max)
470     {
471       /* To add an ordinary range just add it to the bounds.  */
472       offrng[0] += min;
473       offrng[1] += max;
474     }
475   else if (!base0)
476     {
477       /* To add an inverted range to an offset to an unknown object
478 	 expand it to the maximum.  */
479       add_max_offset ();
480       return;
481     }
482   else
483     {
484       /* To add an inverted range to an offset to an known object set
485 	 the upper bound to the maximum representable offset value
486 	 (which may be greater than MAX_OBJECT_SIZE).
487 	 The lower bound is either the sum of the current offset and
488 	 MIN when abs(MAX) is greater than the former, or zero otherwise.
489 	 Zero because then then inverted range includes the negative of
490 	 the lower bound.  */
491       offset_int maxoff = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
492       offrng[1] = maxoff;
493 
494       if (max >= 0)
495 	{
496 	  offrng[0] = 0;
497 	  return;
498 	}
499 
500       offset_int absmax = wi::abs (max);
501       if (offrng[0] < absmax)
502 	{
503 	  offrng[0] += min;
504 	  /* Cap the lower bound at the upper (set to MAXOFF above)
505 	     to avoid inadvertently recreating an inverted range.  */
506 	  if (offrng[1] < offrng[0])
507 	    offrng[0] = offrng[1];
508 	}
509       else
510 	offrng[0] = 0;
511     }
512 
513   if (!base0)
514     return;
515 
516   /* When referencing a known object check to see if the offset computed
517      so far is in bounds... */
518   offset_int remrng[2];
519   remrng[1] = size_remaining (remrng);
520   if (remrng[1] > 0 || remrng[0] < 0)
521     {
522       /* ...if so, constrain it so that neither bound exceeds the size of
523 	 the object.  Out of bounds offsets are left unchanged, and, for
524 	 better or worse, become in bounds later.  They should be detected
525 	 and diagnosed at the point they first become invalid by
526 	 -Warray-bounds.  */
527       if (offrng[0] < 0)
528 	offrng[0] = 0;
529       if (offrng[1] > sizrng[1])
530 	offrng[1] = sizrng[1];
531     }
532 }
533 
534 /* Set a bit for the PHI in VISITED and return true if it wasn't
535    already set.  */
536 
537 bool
visit_phi(tree ssa_name)538 ssa_name_limit_t::visit_phi (tree ssa_name)
539 {
540   if (!visited)
541     visited = BITMAP_ALLOC (NULL);
542 
543   /* Return false if SSA_NAME has already been visited.  */
544   return bitmap_set_bit (visited, SSA_NAME_VERSION (ssa_name));
545 }
546 
547 /* Clear a bit for the PHI in VISITED.  */
548 
549 void
leave_phi(tree ssa_name)550 ssa_name_limit_t::leave_phi (tree ssa_name)
551 {
552   /* Return false if SSA_NAME has already been visited.  */
553   bitmap_clear_bit (visited, SSA_NAME_VERSION (ssa_name));
554 }
555 
556 /* Return false if the SSA_NAME chain length counter has reached
557    the limit, otherwise increment the counter and return true.  */
558 
559 bool
next()560 ssa_name_limit_t::next ()
561 {
562   /* Return a negative value to let caller avoid recursing beyond
563      the specified limit.  */
564   if (ssa_def_max == 0)
565     return false;
566 
567   --ssa_def_max;
568   return true;
569 }
570 
571 /* If the SSA_NAME has already been "seen" return a positive value.
572    Otherwise add it to VISITED.  If the SSA_NAME limit has been
573    reached, return a negative value.  Otherwise return zero.  */
574 
575 int
next_phi(tree ssa_name)576 ssa_name_limit_t::next_phi (tree ssa_name)
577 {
578   {
579     gimple *def_stmt = SSA_NAME_DEF_STMT (ssa_name);
580     /* Return a positive value if the PHI has already been visited.  */
581     if (gimple_code (def_stmt) == GIMPLE_PHI
582 	&& !visit_phi (ssa_name))
583       return 1;
584   }
585 
586   /* Return a negative value to let caller avoid recursing beyond
587      the specified limit.  */
588   if (ssa_def_max == 0)
589     return -1;
590 
591   --ssa_def_max;
592 
593   return 0;
594 }
595 
~ssa_name_limit_t()596 ssa_name_limit_t::~ssa_name_limit_t ()
597 {
598   if (visited)
599     BITMAP_FREE (visited);
600 }
601 
602 /* Default ctor.  Initialize object with pointers to the range_query
603    and cache_type instances to use or null.  */
604 
pointer_query(range_query * qry,cache_type * cache)605 pointer_query::pointer_query (range_query *qry /* = NULL */,
606 			      cache_type *cache /* = NULL */)
607 : rvals (qry), var_cache (cache), hits (), misses (),
608   failures (), depth (), max_depth ()
609 {
610   /* No op.  */
611 }
612 
613 /* Return a pointer to the cached access_ref instance for the SSA_NAME
614    PTR if it's there or null otherwise.  */
615 
616 const access_ref *
get_ref(tree ptr,int ostype)617 pointer_query::get_ref (tree ptr, int ostype /* = 1 */) const
618 {
619   if (!var_cache)
620     {
621       ++misses;
622       return NULL;
623     }
624 
625   unsigned version = SSA_NAME_VERSION (ptr);
626   unsigned idx = version << 1 | (ostype & 1);
627   if (var_cache->indices.length () <= idx)
628     {
629       ++misses;
630       return NULL;
631     }
632 
633   unsigned cache_idx = var_cache->indices[idx];
634   if (var_cache->access_refs.length () <= cache_idx)
635     {
636       ++misses;
637       return NULL;
638     }
639 
640   access_ref &cache_ref = var_cache->access_refs[cache_idx];
641   if (cache_ref.ref)
642     {
643       ++hits;
644       return &cache_ref;
645     }
646 
647   ++misses;
648   return NULL;
649 }
650 
651 /* Retrieve the access_ref instance for a variable from the cache if it's
652    there or compute it and insert it into the cache if it's nonnonull.  */
653 
654 bool
get_ref(tree ptr,access_ref * pref,int ostype)655 pointer_query::get_ref (tree ptr, access_ref *pref, int ostype /* = 1 */)
656 {
657   const unsigned version
658     = TREE_CODE (ptr) == SSA_NAME ? SSA_NAME_VERSION (ptr) : 0;
659 
660   if (var_cache && version)
661     {
662       unsigned idx = version << 1 | (ostype & 1);
663       if (idx < var_cache->indices.length ())
664 	{
665 	  unsigned cache_idx = var_cache->indices[idx] - 1;
666 	  if (cache_idx < var_cache->access_refs.length ()
667 	      && var_cache->access_refs[cache_idx].ref)
668 	    {
669 	      ++hits;
670 	      *pref = var_cache->access_refs[cache_idx];
671 	      return true;
672 	    }
673 	}
674 
675       ++misses;
676     }
677 
678   if (!compute_objsize (ptr, ostype, pref, this))
679     {
680       ++failures;
681       return false;
682     }
683 
684   return true;
685 }
686 
687 /* Add a copy of the access_ref REF for the SSA_NAME to the cache if it's
688    nonnull.  */
689 
690 void
put_ref(tree ptr,const access_ref & ref,int ostype)691 pointer_query::put_ref (tree ptr, const access_ref &ref, int ostype /* = 1 */)
692 {
693   /* Only add populated/valid entries.  */
694   if (!var_cache || !ref.ref || ref.sizrng[0] < 0)
695     return;
696 
697   /* Add REF to the two-level cache.  */
698   unsigned version = SSA_NAME_VERSION (ptr);
699   unsigned idx = version << 1 | (ostype & 1);
700 
701   /* Grow INDICES if necessary.  An index is valid if it's nonzero.
702      Its value minus one is the index into ACCESS_REFS.  Not all
703      entries are valid.  */
704   if (var_cache->indices.length () <= idx)
705     var_cache->indices.safe_grow_cleared (idx + 1);
706 
707   if (!var_cache->indices[idx])
708     var_cache->indices[idx] = var_cache->access_refs.length () + 1;
709 
710   /* Grow ACCESS_REF cache if necessary.  An entry is valid if its
711      REF member is nonnull.  All entries except for the last two
712      are valid.  Once nonnull, the REF value must stay unchanged.  */
713   unsigned cache_idx = var_cache->indices[idx];
714   if (var_cache->access_refs.length () <= cache_idx)
715     var_cache->access_refs.safe_grow_cleared (cache_idx + 1);
716 
717   access_ref cache_ref = var_cache->access_refs[cache_idx - 1];
718   if (cache_ref.ref)
719   {
720     gcc_checking_assert (cache_ref.ref == ref.ref);
721     return;
722   }
723 
724   cache_ref = ref;
725 }
726 
727 /* Flush the cache if it's nonnull.  */
728 
729 void
flush_cache()730 pointer_query::flush_cache ()
731 {
732   if (!var_cache)
733     return;
734   var_cache->indices.release ();
735   var_cache->access_refs.release ();
736 }
737 
738 /* Return true if NAME starts with __builtin_ or __sync_.  */
739 
740 static bool
is_builtin_name(const char * name)741 is_builtin_name (const char *name)
742 {
743   if (strncmp (name, "__builtin_", 10) == 0)
744     return true;
745   if (strncmp (name, "__sync_", 7) == 0)
746     return true;
747   if (strncmp (name, "__atomic_", 9) == 0)
748     return true;
749   return false;
750 }
751 
752 /* Return true if NODE should be considered for inline expansion regardless
753    of the optimization level.  This means whenever a function is invoked with
754    its "internal" name, which normally contains the prefix "__builtin".  */
755 
756 bool
called_as_built_in(tree node)757 called_as_built_in (tree node)
758 {
759   /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
760      we want the name used to call the function, not the name it
761      will have. */
762   const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
763   return is_builtin_name (name);
764 }
765 
766 /* Compute values M and N such that M divides (address of EXP - N) and such
767    that N < M.  If these numbers can be determined, store M in alignp and N in
768    *BITPOSP and return true.  Otherwise return false and store BITS_PER_UNIT to
769    *alignp and any bit-offset to *bitposp.
770 
771    Note that the address (and thus the alignment) computed here is based
772    on the address to which a symbol resolves, whereas DECL_ALIGN is based
773    on the address at which an object is actually located.  These two
774    addresses are not always the same.  For example, on ARM targets,
775    the address &foo of a Thumb function foo() has the lowest bit set,
776    whereas foo() itself starts on an even address.
777 
778    If ADDR_P is true we are taking the address of the memory reference EXP
779    and thus cannot rely on the access taking place.  */
780 
781 static bool
get_object_alignment_2(tree exp,unsigned int * alignp,unsigned HOST_WIDE_INT * bitposp,bool addr_p)782 get_object_alignment_2 (tree exp, unsigned int *alignp,
783 			unsigned HOST_WIDE_INT *bitposp, bool addr_p)
784 {
785   poly_int64 bitsize, bitpos;
786   tree offset;
787   machine_mode mode;
788   int unsignedp, reversep, volatilep;
789   unsigned int align = BITS_PER_UNIT;
790   bool known_alignment = false;
791 
792   /* Get the innermost object and the constant (bitpos) and possibly
793      variable (offset) offset of the access.  */
794   exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
795 			     &unsignedp, &reversep, &volatilep);
796 
797   /* Extract alignment information from the innermost object and
798      possibly adjust bitpos and offset.  */
799   if (TREE_CODE (exp) == FUNCTION_DECL)
800     {
801       /* Function addresses can encode extra information besides their
802 	 alignment.  However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
803 	 allows the low bit to be used as a virtual bit, we know
804 	 that the address itself must be at least 2-byte aligned.  */
805       if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
806 	align = 2 * BITS_PER_UNIT;
807     }
808   else if (TREE_CODE (exp) == LABEL_DECL)
809     ;
810   else if (TREE_CODE (exp) == CONST_DECL)
811     {
812       /* The alignment of a CONST_DECL is determined by its initializer.  */
813       exp = DECL_INITIAL (exp);
814       align = TYPE_ALIGN (TREE_TYPE (exp));
815       if (CONSTANT_CLASS_P (exp))
816 	align = targetm.constant_alignment (exp, align);
817 
818       known_alignment = true;
819     }
820   else if (DECL_P (exp))
821     {
822       align = DECL_ALIGN (exp);
823       known_alignment = true;
824     }
825   else if (TREE_CODE (exp) == INDIRECT_REF
826 	   || TREE_CODE (exp) == MEM_REF
827 	   || TREE_CODE (exp) == TARGET_MEM_REF)
828     {
829       tree addr = TREE_OPERAND (exp, 0);
830       unsigned ptr_align;
831       unsigned HOST_WIDE_INT ptr_bitpos;
832       unsigned HOST_WIDE_INT ptr_bitmask = ~0;
833 
834       /* If the address is explicitely aligned, handle that.  */
835       if (TREE_CODE (addr) == BIT_AND_EXPR
836 	  && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
837 	{
838 	  ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
839 	  ptr_bitmask *= BITS_PER_UNIT;
840 	  align = least_bit_hwi (ptr_bitmask);
841 	  addr = TREE_OPERAND (addr, 0);
842 	}
843 
844       known_alignment
845 	= get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
846       align = MAX (ptr_align, align);
847 
848       /* Re-apply explicit alignment to the bitpos.  */
849       ptr_bitpos &= ptr_bitmask;
850 
851       /* The alignment of the pointer operand in a TARGET_MEM_REF
852 	 has to take the variable offset parts into account.  */
853       if (TREE_CODE (exp) == TARGET_MEM_REF)
854 	{
855 	  if (TMR_INDEX (exp))
856 	    {
857 	      unsigned HOST_WIDE_INT step = 1;
858 	      if (TMR_STEP (exp))
859 		step = TREE_INT_CST_LOW (TMR_STEP (exp));
860 	      align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
861 	    }
862 	  if (TMR_INDEX2 (exp))
863 	    align = BITS_PER_UNIT;
864 	  known_alignment = false;
865 	}
866 
867       /* When EXP is an actual memory reference then we can use
868 	 TYPE_ALIGN of a pointer indirection to derive alignment.
869 	 Do so only if get_pointer_alignment_1 did not reveal absolute
870 	 alignment knowledge and if using that alignment would
871 	 improve the situation.  */
872       unsigned int talign;
873       if (!addr_p && !known_alignment
874 	  && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
875 	  && talign > align)
876 	align = talign;
877       else
878 	{
879 	  /* Else adjust bitpos accordingly.  */
880 	  bitpos += ptr_bitpos;
881 	  if (TREE_CODE (exp) == MEM_REF
882 	      || TREE_CODE (exp) == TARGET_MEM_REF)
883 	    bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
884 	}
885     }
886   else if (TREE_CODE (exp) == STRING_CST)
887     {
888       /* STRING_CST are the only constant objects we allow to be not
889          wrapped inside a CONST_DECL.  */
890       align = TYPE_ALIGN (TREE_TYPE (exp));
891       if (CONSTANT_CLASS_P (exp))
892 	align = targetm.constant_alignment (exp, align);
893 
894       known_alignment = true;
895     }
896 
897   /* If there is a non-constant offset part extract the maximum
898      alignment that can prevail.  */
899   if (offset)
900     {
901       unsigned int trailing_zeros = tree_ctz (offset);
902       if (trailing_zeros < HOST_BITS_PER_INT)
903 	{
904 	  unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
905 	  if (inner)
906 	    align = MIN (align, inner);
907 	}
908     }
909 
910   /* Account for the alignment of runtime coefficients, so that the constant
911      bitpos is guaranteed to be accurate.  */
912   unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
913   if (alt_align != 0 && alt_align < align)
914     {
915       align = alt_align;
916       known_alignment = false;
917     }
918 
919   *alignp = align;
920   *bitposp = bitpos.coeffs[0] & (align - 1);
921   return known_alignment;
922 }
923 
924 /* For a memory reference expression EXP compute values M and N such that M
925    divides (&EXP - N) and such that N < M.  If these numbers can be determined,
926    store M in alignp and N in *BITPOSP and return true.  Otherwise return false
927    and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp.  */
928 
929 bool
get_object_alignment_1(tree exp,unsigned int * alignp,unsigned HOST_WIDE_INT * bitposp)930 get_object_alignment_1 (tree exp, unsigned int *alignp,
931 			unsigned HOST_WIDE_INT *bitposp)
932 {
933   return get_object_alignment_2 (exp, alignp, bitposp, false);
934 }
935 
936 /* Return the alignment in bits of EXP, an object.  */
937 
938 unsigned int
get_object_alignment(tree exp)939 get_object_alignment (tree exp)
940 {
941   unsigned HOST_WIDE_INT bitpos = 0;
942   unsigned int align;
943 
944   get_object_alignment_1 (exp, &align, &bitpos);
945 
946   /* align and bitpos now specify known low bits of the pointer.
947      ptr & (align - 1) == bitpos.  */
948 
949   if (bitpos != 0)
950     align = least_bit_hwi (bitpos);
951   return align;
952 }
953 
954 /* For a pointer valued expression EXP compute values M and N such that M
955    divides (EXP - N) and such that N < M.  If these numbers can be determined,
956    store M in alignp and N in *BITPOSP and return true.  Return false if
957    the results are just a conservative approximation.
958 
959    If EXP is not a pointer, false is returned too.  */
960 
961 bool
get_pointer_alignment_1(tree exp,unsigned int * alignp,unsigned HOST_WIDE_INT * bitposp)962 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
963 			 unsigned HOST_WIDE_INT *bitposp)
964 {
965   STRIP_NOPS (exp);
966 
967   if (TREE_CODE (exp) == ADDR_EXPR)
968     return get_object_alignment_2 (TREE_OPERAND (exp, 0),
969 				   alignp, bitposp, true);
970   else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
971     {
972       unsigned int align;
973       unsigned HOST_WIDE_INT bitpos;
974       bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
975 					  &align, &bitpos);
976       if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
977 	bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
978       else
979 	{
980 	  unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
981 	  if (trailing_zeros < HOST_BITS_PER_INT)
982 	    {
983 	      unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
984 	      if (inner)
985 		align = MIN (align, inner);
986 	    }
987 	}
988       *alignp = align;
989       *bitposp = bitpos & (align - 1);
990       return res;
991     }
992   else if (TREE_CODE (exp) == SSA_NAME
993 	   && POINTER_TYPE_P (TREE_TYPE (exp)))
994     {
995       unsigned int ptr_align, ptr_misalign;
996       struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
997 
998       if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
999 	{
1000 	  *bitposp = ptr_misalign * BITS_PER_UNIT;
1001 	  *alignp = ptr_align * BITS_PER_UNIT;
1002 	  /* Make sure to return a sensible alignment when the multiplication
1003 	     by BITS_PER_UNIT overflowed.  */
1004 	  if (*alignp == 0)
1005 	    *alignp = 1u << (HOST_BITS_PER_INT - 1);
1006 	  /* We cannot really tell whether this result is an approximation.  */
1007 	  return false;
1008 	}
1009       else
1010 	{
1011 	  *bitposp = 0;
1012 	  *alignp = BITS_PER_UNIT;
1013 	  return false;
1014 	}
1015     }
1016   else if (TREE_CODE (exp) == INTEGER_CST)
1017     {
1018       *alignp = BIGGEST_ALIGNMENT;
1019       *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
1020 		  & (BIGGEST_ALIGNMENT - 1));
1021       return true;
1022     }
1023 
1024   *bitposp = 0;
1025   *alignp = BITS_PER_UNIT;
1026   return false;
1027 }
1028 
1029 /* Return the alignment in bits of EXP, a pointer valued expression.
1030    The alignment returned is, by default, the alignment of the thing that
1031    EXP points to.  If it is not a POINTER_TYPE, 0 is returned.
1032 
1033    Otherwise, look at the expression to see if we can do better, i.e., if the
1034    expression is actually pointing at an object whose alignment is tighter.  */
1035 
1036 unsigned int
get_pointer_alignment(tree exp)1037 get_pointer_alignment (tree exp)
1038 {
1039   unsigned HOST_WIDE_INT bitpos = 0;
1040   unsigned int align;
1041 
1042   get_pointer_alignment_1 (exp, &align, &bitpos);
1043 
1044   /* align and bitpos now specify known low bits of the pointer.
1045      ptr & (align - 1) == bitpos.  */
1046 
1047   if (bitpos != 0)
1048     align = least_bit_hwi (bitpos);
1049 
1050   return align;
1051 }
1052 
1053 /* Return the number of leading non-zero elements in the sequence
1054    [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
1055    ELTSIZE must be a power of 2 less than 8.  Used by c_strlen.  */
1056 
1057 unsigned
string_length(const void * ptr,unsigned eltsize,unsigned maxelts)1058 string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
1059 {
1060   gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
1061 
1062   unsigned n;
1063 
1064   if (eltsize == 1)
1065     {
1066       /* Optimize the common case of plain char.  */
1067       for (n = 0; n < maxelts; n++)
1068 	{
1069 	  const char *elt = (const char*) ptr + n;
1070 	  if (!*elt)
1071 	    break;
1072 	}
1073     }
1074   else
1075     {
1076       for (n = 0; n < maxelts; n++)
1077 	{
1078 	  const char *elt = (const char*) ptr + n * eltsize;
1079 	  if (!memcmp (elt, "\0\0\0\0", eltsize))
1080 	    break;
1081 	}
1082     }
1083   return n;
1084 }
1085 
1086 /* For a call EXPR at LOC to a function FNAME that expects a string
1087    in the argument ARG, issue a diagnostic due to it being a called
1088    with an argument that is a character array with no terminating
1089    NUL.  SIZE is the EXACT size of the array, and BNDRNG the number
1090    of characters in which the NUL is expected.  Either EXPR or FNAME
1091    may be null but noth both.  SIZE may be null when BNDRNG is null.  */
1092 
1093 void
warn_string_no_nul(location_t loc,tree expr,const char * fname,tree arg,tree decl,tree size,bool exact,const wide_int bndrng[2])1094 warn_string_no_nul (location_t loc, tree expr, const char *fname,
1095 		    tree arg, tree decl, tree size /* = NULL_TREE */,
1096 		    bool exact /* = false */,
1097 		    const wide_int bndrng[2] /* = NULL */)
1098 {
1099   if ((expr && TREE_NO_WARNING (expr)) || TREE_NO_WARNING (arg))
1100     return;
1101 
1102   loc = expansion_point_location_if_in_system_header (loc);
1103   bool warned;
1104 
1105   /* Format the bound range as a string to keep the nuber of messages
1106      from exploding.  */
1107   char bndstr[80];
1108   *bndstr = 0;
1109   if (bndrng)
1110     {
1111       if (bndrng[0] == bndrng[1])
1112 	sprintf (bndstr, "%llu", (unsigned long long) bndrng[0].to_uhwi ());
1113       else
1114 	sprintf (bndstr, "[%llu, %llu]",
1115 		 (unsigned long long) bndrng[0].to_uhwi (),
1116 		 (unsigned long long) bndrng[1].to_uhwi ());
1117     }
1118 
1119   const tree maxobjsize = max_object_size ();
1120   const wide_int maxsiz = wi::to_wide (maxobjsize);
1121   if (expr)
1122     {
1123       tree func = get_callee_fndecl (expr);
1124       if (bndrng)
1125 	{
1126 	  if (wi::ltu_p (maxsiz, bndrng[0]))
1127 	    warned = warning_at (loc, OPT_Wstringop_overread,
1128 				 "%K%qD specified bound %s exceeds "
1129 				 "maximum object size %E",
1130 				 expr, func, bndstr, maxobjsize);
1131 	  else
1132 	    {
1133 	      bool maybe = wi::to_wide (size) == bndrng[0];
1134 	      warned = warning_at (loc, OPT_Wstringop_overread,
1135 				   exact
1136 				   ? G_("%K%qD specified bound %s exceeds "
1137 					"the size %E of unterminated array")
1138 				   : (maybe
1139 				      ? G_("%K%qD specified bound %s may "
1140 					   "exceed the size of at most %E "
1141 					   "of unterminated array")
1142 				      : G_("%K%qD specified bound %s exceeds "
1143 					   "the size of at most %E "
1144 					   "of unterminated array")),
1145 				   expr, func, bndstr, size);
1146 	    }
1147 	}
1148       else
1149 	warned = warning_at (loc, OPT_Wstringop_overread,
1150 			     "%K%qD argument missing terminating nul",
1151 			     expr, func);
1152     }
1153   else
1154     {
1155       if (bndrng)
1156 	{
1157 	  if (wi::ltu_p (maxsiz, bndrng[0]))
1158 	    warned = warning_at (loc, OPT_Wstringop_overread,
1159 				 "%qs specified bound %s exceeds "
1160 				 "maximum object size %E",
1161 				 fname, bndstr, maxobjsize);
1162 	  else
1163 	    {
1164 	      bool maybe = wi::to_wide (size) == bndrng[0];
1165 	      warned = warning_at (loc, OPT_Wstringop_overread,
1166 				   exact
1167 				   ? G_("%qs specified bound %s exceeds "
1168 					"the size %E of unterminated array")
1169 				   : (maybe
1170 				      ? G_("%qs specified bound %s may "
1171 					   "exceed the size of at most %E "
1172 					   "of unterminated array")
1173 				      : G_("%qs specified bound %s exceeds "
1174 					   "the size of at most %E "
1175 					   "of unterminated array")),
1176 				   fname, bndstr, size);
1177 	    }
1178 	}
1179       else
1180 	warned = warning_at (loc, OPT_Wstringop_overread,
1181 			     "%qs argument missing terminating nul",
1182 			     fname);
1183     }
1184 
1185   if (warned)
1186     {
1187       inform (DECL_SOURCE_LOCATION (decl),
1188 	      "referenced argument declared here");
1189       TREE_NO_WARNING (arg) = 1;
1190       if (expr)
1191 	TREE_NO_WARNING (expr) = 1;
1192     }
1193 }
1194 
1195 /* For a call EXPR (which may be null) that expects a string argument
1196    SRC as an argument, returns false if SRC is a character array with
1197    no terminating NUL.  When nonnull, BOUND is the number of characters
1198    in which to expect the terminating NUL.  RDONLY is true for read-only
1199    accesses such as strcmp, false for read-write such as strcpy.  When
1200    EXPR is also issues a warning.  */
1201 
1202 bool
check_nul_terminated_array(tree expr,tree src,tree bound)1203 check_nul_terminated_array (tree expr, tree src,
1204 			    tree bound /* = NULL_TREE */)
1205 {
1206   /* The constant size of the array SRC points to.  The actual size
1207      may be less of EXACT is true, but not more.  */
1208   tree size;
1209   /* True if SRC involves a non-constant offset into the array.  */
1210   bool exact;
1211   /* The unterminated constant array SRC points to.  */
1212   tree nonstr = unterminated_array (src, &size, &exact);
1213   if (!nonstr)
1214     return true;
1215 
1216   /* NONSTR refers to the non-nul terminated constant array and SIZE
1217      is the constant size of the array in bytes.  EXACT is true when
1218      SIZE is exact.  */
1219 
1220   wide_int bndrng[2];
1221   if (bound)
1222     {
1223       if (TREE_CODE (bound) == INTEGER_CST)
1224 	bndrng[0] = bndrng[1] = wi::to_wide (bound);
1225       else
1226 	{
1227 	  value_range_kind rng = get_range_info (bound, bndrng, bndrng + 1);
1228 	  if (rng != VR_RANGE)
1229 	    return true;
1230 	}
1231 
1232       if (exact)
1233 	{
1234 	  if (wi::leu_p (bndrng[0], wi::to_wide (size)))
1235 	    return true;
1236 	}
1237       else if (wi::lt_p (bndrng[0], wi::to_wide (size), UNSIGNED))
1238 	return true;
1239     }
1240 
1241   if (expr)
1242     warn_string_no_nul (EXPR_LOCATION (expr), expr, NULL, src, nonstr,
1243 			size, exact, bound ? bndrng : NULL);
1244 
1245   return false;
1246 }
1247 
1248 /* If EXP refers to an unterminated constant character array return
1249    the declaration of the object of which the array is a member or
1250    element and if SIZE is not null, set *SIZE to the size of
1251    the unterminated array and set *EXACT if the size is exact or
1252    clear it otherwise.  Otherwise return null.  */
1253 
1254 tree
unterminated_array(tree exp,tree * size,bool * exact)1255 unterminated_array (tree exp, tree *size /* = NULL */, bool *exact /* = NULL */)
1256 {
1257   /* C_STRLEN will return NULL and set DECL in the info
1258      structure if EXP references a unterminated array.  */
1259   c_strlen_data lendata = { };
1260   tree len = c_strlen (exp, 1, &lendata);
1261   if (len == NULL_TREE && lendata.minlen && lendata.decl)
1262      {
1263        if (size)
1264 	{
1265 	  len = lendata.minlen;
1266 	  if (lendata.off)
1267 	    {
1268 	      /* Constant offsets are already accounted for in LENDATA.MINLEN,
1269 		 but not in a SSA_NAME + CST expression.  */
1270 	      if (TREE_CODE (lendata.off) == INTEGER_CST)
1271 		*exact = true;
1272 	      else if (TREE_CODE (lendata.off) == PLUS_EXPR
1273 		       && TREE_CODE (TREE_OPERAND (lendata.off, 1)) == INTEGER_CST)
1274 		{
1275 		  /* Subtract the offset from the size of the array.  */
1276 		  *exact = false;
1277 		  tree temp = TREE_OPERAND (lendata.off, 1);
1278 		  temp = fold_convert (ssizetype, temp);
1279 		  len = fold_build2 (MINUS_EXPR, ssizetype, len, temp);
1280 		}
1281 	      else
1282 		*exact = false;
1283 	    }
1284 	  else
1285 	    *exact = true;
1286 
1287 	  *size = len;
1288 	}
1289        return lendata.decl;
1290      }
1291 
1292   return NULL_TREE;
1293 }
1294 
1295 /* Compute the length of a null-terminated character string or wide
1296    character string handling character sizes of 1, 2, and 4 bytes.
1297    TREE_STRING_LENGTH is not the right way because it evaluates to
1298    the size of the character array in bytes (as opposed to characters)
1299    and because it can contain a zero byte in the middle.
1300 
1301    ONLY_VALUE should be nonzero if the result is not going to be emitted
1302    into the instruction stream and zero if it is going to be expanded.
1303    E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
1304    is returned, otherwise NULL, since
1305    len = c_strlen (ARG, 1); if (len) expand_expr (len, ...); would not
1306    evaluate the side-effects.
1307 
1308    If ONLY_VALUE is two then we do not emit warnings about out-of-bound
1309    accesses.  Note that this implies the result is not going to be emitted
1310    into the instruction stream.
1311 
1312    Additional information about the string accessed may be recorded
1313    in DATA.  For example, if ARG references an unterminated string,
1314    then the declaration will be stored in the DECL field.   If the
1315    length of the unterminated string can be determined, it'll be
1316    stored in the LEN field.  Note this length could well be different
1317    than what a C strlen call would return.
1318 
1319    ELTSIZE is 1 for normal single byte character strings, and 2 or
1320    4 for wide characer strings.  ELTSIZE is by default 1.
1321 
1322    The value returned is of type `ssizetype'.  */
1323 
1324 tree
c_strlen(tree arg,int only_value,c_strlen_data * data,unsigned eltsize)1325 c_strlen (tree arg, int only_value, c_strlen_data *data, unsigned eltsize)
1326 {
1327   /* If we were not passed a DATA pointer, then get one to a local
1328      structure.  That avoids having to check DATA for NULL before
1329      each time we want to use it.  */
1330   c_strlen_data local_strlen_data = { };
1331   if (!data)
1332     data = &local_strlen_data;
1333 
1334   gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
1335 
1336   tree src = STRIP_NOPS (arg);
1337   if (TREE_CODE (src) == COND_EXPR
1338       && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
1339     {
1340       tree len1, len2;
1341 
1342       len1 = c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
1343       len2 = c_strlen (TREE_OPERAND (src, 2), only_value, data, eltsize);
1344       if (tree_int_cst_equal (len1, len2))
1345 	return len1;
1346     }
1347 
1348   if (TREE_CODE (src) == COMPOUND_EXPR
1349       && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
1350     return c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
1351 
1352   location_t loc = EXPR_LOC_OR_LOC (src, input_location);
1353 
1354   /* Offset from the beginning of the string in bytes.  */
1355   tree byteoff;
1356   tree memsize;
1357   tree decl;
1358   src = string_constant (src, &byteoff, &memsize, &decl);
1359   if (src == 0)
1360     return NULL_TREE;
1361 
1362   /* Determine the size of the string element.  */
1363   if (eltsize != tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src)))))
1364     return NULL_TREE;
1365 
1366   /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
1367      length of SRC.  Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible
1368      in case the latter is less than the size of the array, such as when
1369      SRC refers to a short string literal used to initialize a large array.
1370      In that case, the elements of the array after the terminating NUL are
1371      all NUL.  */
1372   HOST_WIDE_INT strelts = TREE_STRING_LENGTH (src);
1373   strelts = strelts / eltsize;
1374 
1375   if (!tree_fits_uhwi_p (memsize))
1376     return NULL_TREE;
1377 
1378   HOST_WIDE_INT maxelts = tree_to_uhwi (memsize) / eltsize;
1379 
1380   /* PTR can point to the byte representation of any string type, including
1381      char* and wchar_t*.  */
1382   const char *ptr = TREE_STRING_POINTER (src);
1383 
1384   if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
1385     {
1386       /* The code below works only for single byte character types.  */
1387       if (eltsize != 1)
1388 	return NULL_TREE;
1389 
1390       /* If the string has an internal NUL character followed by any
1391 	 non-NUL characters (e.g., "foo\0bar"), we can't compute
1392 	 the offset to the following NUL if we don't know where to
1393 	 start searching for it.  */
1394       unsigned len = string_length (ptr, eltsize, strelts);
1395 
1396       /* Return when an embedded null character is found or none at all.
1397 	 In the latter case, set the DECL/LEN field in the DATA structure
1398 	 so that callers may examine them.  */
1399       if (len + 1 < strelts)
1400 	return NULL_TREE;
1401       else if (len >= maxelts)
1402 	{
1403 	  data->decl = decl;
1404 	  data->off = byteoff;
1405 	  data->minlen = ssize_int (len);
1406 	  return NULL_TREE;
1407 	}
1408 
1409       /* For empty strings the result should be zero.  */
1410       if (len == 0)
1411 	return ssize_int (0);
1412 
1413       /* We don't know the starting offset, but we do know that the string
1414 	 has no internal zero bytes.  If the offset falls within the bounds
1415 	 of the string subtract the offset from the length of the string,
1416 	 and return that.  Otherwise the length is zero.  Take care to
1417 	 use SAVE_EXPR in case the OFFSET has side-effects.  */
1418       tree offsave = TREE_SIDE_EFFECTS (byteoff) ? save_expr (byteoff)
1419 						 : byteoff;
1420       offsave = fold_convert_loc (loc, sizetype, offsave);
1421       tree condexp = fold_build2_loc (loc, LE_EXPR, boolean_type_node, offsave,
1422 				      size_int (len));
1423       tree lenexp = fold_build2_loc (loc, MINUS_EXPR, sizetype, size_int (len),
1424 				     offsave);
1425       lenexp = fold_convert_loc (loc, ssizetype, lenexp);
1426       return fold_build3_loc (loc, COND_EXPR, ssizetype, condexp, lenexp,
1427 			      build_zero_cst (ssizetype));
1428     }
1429 
1430   /* Offset from the beginning of the string in elements.  */
1431   HOST_WIDE_INT eltoff;
1432 
1433   /* We have a known offset into the string.  Start searching there for
1434      a null character if we can represent it as a single HOST_WIDE_INT.  */
1435   if (byteoff == 0)
1436     eltoff = 0;
1437   else if (! tree_fits_uhwi_p (byteoff) || tree_to_uhwi (byteoff) % eltsize)
1438     eltoff = -1;
1439   else
1440     eltoff = tree_to_uhwi (byteoff) / eltsize;
1441 
1442   /* If the offset is known to be out of bounds, warn, and call strlen at
1443      runtime.  */
1444   if (eltoff < 0 || eltoff >= maxelts)
1445     {
1446       /* Suppress multiple warnings for propagated constant strings.  */
1447       if (only_value != 2
1448 	  && !TREE_NO_WARNING (arg)
1449 	  && warning_at (loc, OPT_Warray_bounds,
1450 			 "offset %qwi outside bounds of constant string",
1451 			 eltoff))
1452 	{
1453 	  if (decl)
1454 	    inform (DECL_SOURCE_LOCATION (decl), "%qE declared here", decl);
1455 	  TREE_NO_WARNING (arg) = 1;
1456 	}
1457       return NULL_TREE;
1458     }
1459 
1460   /* If eltoff is larger than strelts but less than maxelts the
1461      string length is zero, since the excess memory will be zero.  */
1462   if (eltoff > strelts)
1463     return ssize_int (0);
1464 
1465   /* Use strlen to search for the first zero byte.  Since any strings
1466      constructed with build_string will have nulls appended, we win even
1467      if we get handed something like (char[4])"abcd".
1468 
1469      Since ELTOFF is our starting index into the string, no further
1470      calculation is needed.  */
1471   unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
1472 				strelts - eltoff);
1473 
1474   /* Don't know what to return if there was no zero termination.
1475      Ideally this would turn into a gcc_checking_assert over time.
1476      Set DECL/LEN so callers can examine them.  */
1477   if (len >= maxelts - eltoff)
1478     {
1479       data->decl = decl;
1480       data->off = byteoff;
1481       data->minlen = ssize_int (len);
1482       return NULL_TREE;
1483     }
1484 
1485   return ssize_int (len);
1486 }
1487 
1488 /* Return a constant integer corresponding to target reading
1489    GET_MODE_BITSIZE (MODE) bits from string constant STR.  If
1490    NULL_TERMINATED_P, reading stops after '\0' character, all further ones
1491    are assumed to be zero, otherwise it reads as many characters
1492    as needed.  */
1493 
1494 rtx
c_readstr(const char * str,scalar_int_mode mode,bool null_terminated_p)1495 c_readstr (const char *str, scalar_int_mode mode,
1496 	   bool null_terminated_p/*=true*/)
1497 {
1498   HOST_WIDE_INT ch;
1499   unsigned int i, j;
1500   HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
1501 
1502   gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
1503   unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
1504     / HOST_BITS_PER_WIDE_INT;
1505 
1506   gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
1507   for (i = 0; i < len; i++)
1508     tmp[i] = 0;
1509 
1510   ch = 1;
1511   for (i = 0; i < GET_MODE_SIZE (mode); i++)
1512     {
1513       j = i;
1514       if (WORDS_BIG_ENDIAN)
1515 	j = GET_MODE_SIZE (mode) - i - 1;
1516       if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
1517 	  && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
1518 	j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
1519       j *= BITS_PER_UNIT;
1520 
1521       if (ch || !null_terminated_p)
1522 	ch = (unsigned char) str[i];
1523       tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
1524     }
1525 
1526   wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
1527   return immed_wide_int_const (c, mode);
1528 }
1529 
1530 /* Cast a target constant CST to target CHAR and if that value fits into
1531    host char type, return zero and put that value into variable pointed to by
1532    P.  */
1533 
1534 static int
target_char_cast(tree cst,char * p)1535 target_char_cast (tree cst, char *p)
1536 {
1537   unsigned HOST_WIDE_INT val, hostval;
1538 
1539   if (TREE_CODE (cst) != INTEGER_CST
1540       || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
1541     return 1;
1542 
1543   /* Do not care if it fits or not right here.  */
1544   val = TREE_INT_CST_LOW (cst);
1545 
1546   if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
1547     val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
1548 
1549   hostval = val;
1550   if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
1551     hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
1552 
1553   if (val != hostval)
1554     return 1;
1555 
1556   *p = hostval;
1557   return 0;
1558 }
1559 
1560 /* Similar to save_expr, but assumes that arbitrary code is not executed
1561    in between the multiple evaluations.  In particular, we assume that a
1562    non-addressable local variable will not be modified.  */
1563 
1564 static tree
builtin_save_expr(tree exp)1565 builtin_save_expr (tree exp)
1566 {
1567   if (TREE_CODE (exp) == SSA_NAME
1568       || (TREE_ADDRESSABLE (exp) == 0
1569 	  && (TREE_CODE (exp) == PARM_DECL
1570 	      || (VAR_P (exp) && !TREE_STATIC (exp)))))
1571     return exp;
1572 
1573   return save_expr (exp);
1574 }
1575 
1576 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
1577    times to get the address of either a higher stack frame, or a return
1578    address located within it (depending on FNDECL_CODE).  */
1579 
1580 static rtx
expand_builtin_return_addr(enum built_in_function fndecl_code,int count)1581 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
1582 {
1583   int i;
1584   rtx tem = INITIAL_FRAME_ADDRESS_RTX;
1585   if (tem == NULL_RTX)
1586     {
1587       /* For a zero count with __builtin_return_address, we don't care what
1588 	 frame address we return, because target-specific definitions will
1589 	 override us.  Therefore frame pointer elimination is OK, and using
1590 	 the soft frame pointer is OK.
1591 
1592 	 For a nonzero count, or a zero count with __builtin_frame_address,
1593 	 we require a stable offset from the current frame pointer to the
1594 	 previous one, so we must use the hard frame pointer, and
1595 	 we must disable frame pointer elimination.  */
1596       if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
1597 	tem = frame_pointer_rtx;
1598       else
1599 	{
1600 	  tem = hard_frame_pointer_rtx;
1601 
1602 	  /* Tell reload not to eliminate the frame pointer.  */
1603 	  crtl->accesses_prior_frames = 1;
1604 	}
1605     }
1606 
1607   if (count > 0)
1608     SETUP_FRAME_ADDRESSES ();
1609 
1610   /* On the SPARC, the return address is not in the frame, it is in a
1611      register.  There is no way to access it off of the current frame
1612      pointer, but it can be accessed off the previous frame pointer by
1613      reading the value from the register window save area.  */
1614   if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
1615     count--;
1616 
1617   /* Scan back COUNT frames to the specified frame.  */
1618   for (i = 0; i < count; i++)
1619     {
1620       /* Assume the dynamic chain pointer is in the word that the
1621 	 frame address points to, unless otherwise specified.  */
1622       tem = DYNAMIC_CHAIN_ADDRESS (tem);
1623       tem = memory_address (Pmode, tem);
1624       tem = gen_frame_mem (Pmode, tem);
1625       tem = copy_to_reg (tem);
1626     }
1627 
1628   /* For __builtin_frame_address, return what we've got.  But, on
1629      the SPARC for example, we may have to add a bias.  */
1630   if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
1631     return FRAME_ADDR_RTX (tem);
1632 
1633   /* For __builtin_return_address, get the return address from that frame.  */
1634 #ifdef RETURN_ADDR_RTX
1635   tem = RETURN_ADDR_RTX (count, tem);
1636 #else
1637   tem = memory_address (Pmode,
1638 			plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
1639   tem = gen_frame_mem (Pmode, tem);
1640 #endif
1641   return tem;
1642 }
1643 
1644 /* Alias set used for setjmp buffer.  */
1645 static alias_set_type setjmp_alias_set = -1;
1646 
1647 /* Construct the leading half of a __builtin_setjmp call.  Control will
1648    return to RECEIVER_LABEL.  This is also called directly by the SJLJ
1649    exception handling code.  */
1650 
1651 void
expand_builtin_setjmp_setup(rtx buf_addr,rtx receiver_label)1652 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
1653 {
1654   machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1655   rtx stack_save;
1656   rtx mem;
1657 
1658   if (setjmp_alias_set == -1)
1659     setjmp_alias_set = new_alias_set ();
1660 
1661   buf_addr = convert_memory_address (Pmode, buf_addr);
1662 
1663   buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
1664 
1665   /* We store the frame pointer and the address of receiver_label in
1666      the buffer and use the rest of it for the stack save area, which
1667      is machine-dependent.  */
1668 
1669   mem = gen_rtx_MEM (Pmode, buf_addr);
1670   set_mem_alias_set (mem, setjmp_alias_set);
1671   emit_move_insn (mem, hard_frame_pointer_rtx);
1672 
1673   mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1674 					   GET_MODE_SIZE (Pmode))),
1675   set_mem_alias_set (mem, setjmp_alias_set);
1676 
1677   emit_move_insn (validize_mem (mem),
1678 		  force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
1679 
1680   stack_save = gen_rtx_MEM (sa_mode,
1681 			    plus_constant (Pmode, buf_addr,
1682 					   2 * GET_MODE_SIZE (Pmode)));
1683   set_mem_alias_set (stack_save, setjmp_alias_set);
1684   emit_stack_save (SAVE_NONLOCAL, &stack_save);
1685 
1686   /* If there is further processing to do, do it.  */
1687   if (targetm.have_builtin_setjmp_setup ())
1688     emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
1689 
1690   /* We have a nonlocal label.   */
1691   cfun->has_nonlocal_label = 1;
1692 }
1693 
1694 /* Construct the trailing part of a __builtin_setjmp call.  This is
1695    also called directly by the SJLJ exception handling code.
1696    If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler.  */
1697 
1698 void
expand_builtin_setjmp_receiver(rtx receiver_label)1699 expand_builtin_setjmp_receiver (rtx receiver_label)
1700 {
1701   rtx chain;
1702 
1703   /* Mark the FP as used when we get here, so we have to make sure it's
1704      marked as used by this function.  */
1705   emit_use (hard_frame_pointer_rtx);
1706 
1707   /* Mark the static chain as clobbered here so life information
1708      doesn't get messed up for it.  */
1709   chain = rtx_for_static_chain (current_function_decl, true);
1710   if (chain && REG_P (chain))
1711     emit_clobber (chain);
1712 
1713   if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
1714     {
1715       /* If the argument pointer can be eliminated in favor of the
1716 	 frame pointer, we don't need to restore it.  We assume here
1717 	 that if such an elimination is present, it can always be used.
1718 	 This is the case on all known machines; if we don't make this
1719 	 assumption, we do unnecessary saving on many machines.  */
1720       size_t i;
1721       static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
1722 
1723       for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
1724 	if (elim_regs[i].from == ARG_POINTER_REGNUM
1725 	    && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
1726 	  break;
1727 
1728       if (i == ARRAY_SIZE (elim_regs))
1729 	{
1730 	  /* Now restore our arg pointer from the address at which it
1731 	     was saved in our stack frame.  */
1732 	  emit_move_insn (crtl->args.internal_arg_pointer,
1733 			  copy_to_reg (get_arg_pointer_save_area ()));
1734 	}
1735     }
1736 
1737   if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
1738     emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
1739   else if (targetm.have_nonlocal_goto_receiver ())
1740     emit_insn (targetm.gen_nonlocal_goto_receiver ());
1741   else
1742     { /* Nothing */ }
1743 
1744   /* We must not allow the code we just generated to be reordered by
1745      scheduling.  Specifically, the update of the frame pointer must
1746      happen immediately, not later.  */
1747   emit_insn (gen_blockage ());
1748 }
1749 
1750 /* __builtin_longjmp is passed a pointer to an array of five words (not
1751    all will be used on all machines).  It operates similarly to the C
1752    library function of the same name, but is more efficient.  Much of
1753    the code below is copied from the handling of non-local gotos.  */
1754 
1755 static void
expand_builtin_longjmp(rtx buf_addr,rtx value)1756 expand_builtin_longjmp (rtx buf_addr, rtx value)
1757 {
1758   rtx fp, lab, stack;
1759   rtx_insn *insn, *last;
1760   machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1761 
1762   /* DRAP is needed for stack realign if longjmp is expanded to current
1763      function  */
1764   if (SUPPORTS_STACK_ALIGNMENT)
1765     crtl->need_drap = true;
1766 
1767   if (setjmp_alias_set == -1)
1768     setjmp_alias_set = new_alias_set ();
1769 
1770   buf_addr = convert_memory_address (Pmode, buf_addr);
1771 
1772   buf_addr = force_reg (Pmode, buf_addr);
1773 
1774   /* We require that the user must pass a second argument of 1, because
1775      that is what builtin_setjmp will return.  */
1776   gcc_assert (value == const1_rtx);
1777 
1778   last = get_last_insn ();
1779   if (targetm.have_builtin_longjmp ())
1780     emit_insn (targetm.gen_builtin_longjmp (buf_addr));
1781   else
1782     {
1783       fp = gen_rtx_MEM (Pmode, buf_addr);
1784       lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1785 					       GET_MODE_SIZE (Pmode)));
1786 
1787       stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1788 						   2 * GET_MODE_SIZE (Pmode)));
1789       set_mem_alias_set (fp, setjmp_alias_set);
1790       set_mem_alias_set (lab, setjmp_alias_set);
1791       set_mem_alias_set (stack, setjmp_alias_set);
1792 
1793       /* Pick up FP, label, and SP from the block and jump.  This code is
1794 	 from expand_goto in stmt.c; see there for detailed comments.  */
1795       if (targetm.have_nonlocal_goto ())
1796 	/* We have to pass a value to the nonlocal_goto pattern that will
1797 	   get copied into the static_chain pointer, but it does not matter
1798 	   what that value is, because builtin_setjmp does not use it.  */
1799 	emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1800       else
1801 	{
1802 	  emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1803 	  emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1804 
1805 	  lab = copy_to_reg (lab);
1806 
1807 	  /* Restore the frame pointer and stack pointer.  We must use a
1808 	     temporary since the setjmp buffer may be a local.  */
1809 	  fp = copy_to_reg (fp);
1810 	  emit_stack_restore (SAVE_NONLOCAL, stack);
1811 
1812 	  /* Ensure the frame pointer move is not optimized.  */
1813 	  emit_insn (gen_blockage ());
1814 	  emit_clobber (hard_frame_pointer_rtx);
1815 	  emit_clobber (frame_pointer_rtx);
1816 	  emit_move_insn (hard_frame_pointer_rtx, fp);
1817 
1818 	  emit_use (hard_frame_pointer_rtx);
1819 	  emit_use (stack_pointer_rtx);
1820 	  emit_indirect_jump (lab);
1821 	}
1822     }
1823 
1824   /* Search backwards and mark the jump insn as a non-local goto.
1825      Note that this precludes the use of __builtin_longjmp to a
1826      __builtin_setjmp target in the same function.  However, we've
1827      already cautioned the user that these functions are for
1828      internal exception handling use only.  */
1829   for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1830     {
1831       gcc_assert (insn != last);
1832 
1833       if (JUMP_P (insn))
1834 	{
1835 	  add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1836 	  break;
1837 	}
1838       else if (CALL_P (insn))
1839 	break;
1840     }
1841 }
1842 
1843 static inline bool
more_const_call_expr_args_p(const const_call_expr_arg_iterator * iter)1844 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1845 {
1846   return (iter->i < iter->n);
1847 }
1848 
1849 /* This function validates the types of a function call argument list
1850    against a specified list of tree_codes.  If the last specifier is a 0,
1851    that represents an ellipsis, otherwise the last specifier must be a
1852    VOID_TYPE.  */
1853 
1854 static bool
validate_arglist(const_tree callexpr,...)1855 validate_arglist (const_tree callexpr, ...)
1856 {
1857   enum tree_code code;
1858   bool res = 0;
1859   va_list ap;
1860   const_call_expr_arg_iterator iter;
1861   const_tree arg;
1862 
1863   va_start (ap, callexpr);
1864   init_const_call_expr_arg_iterator (callexpr, &iter);
1865 
1866   /* Get a bitmap of pointer argument numbers declared attribute nonnull.  */
1867   tree fn = CALL_EXPR_FN (callexpr);
1868   bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1869 
1870   for (unsigned argno = 1; ; ++argno)
1871     {
1872       code = (enum tree_code) va_arg (ap, int);
1873 
1874       switch (code)
1875 	{
1876 	case 0:
1877 	  /* This signifies an ellipses, any further arguments are all ok.  */
1878 	  res = true;
1879 	  goto end;
1880 	case VOID_TYPE:
1881 	  /* This signifies an endlink, if no arguments remain, return
1882 	     true, otherwise return false.  */
1883 	  res = !more_const_call_expr_args_p (&iter);
1884 	  goto end;
1885 	case POINTER_TYPE:
1886 	  /* The actual argument must be nonnull when either the whole
1887 	     called function has been declared nonnull, or when the formal
1888 	     argument corresponding to the actual argument has been.  */
1889 	  if (argmap
1890 	      && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1891 	    {
1892 	      arg = next_const_call_expr_arg (&iter);
1893 	      if (!validate_arg (arg, code) || integer_zerop (arg))
1894 		goto end;
1895 	      break;
1896 	    }
1897 	  /* FALLTHRU */
1898 	default:
1899 	  /* If no parameters remain or the parameter's code does not
1900 	     match the specified code, return false.  Otherwise continue
1901 	     checking any remaining arguments.  */
1902 	  arg = next_const_call_expr_arg (&iter);
1903 	  if (!validate_arg (arg, code))
1904 	    goto end;
1905 	  break;
1906 	}
1907     }
1908 
1909   /* We need gotos here since we can only have one VA_CLOSE in a
1910      function.  */
1911  end: ;
1912   va_end (ap);
1913 
1914   BITMAP_FREE (argmap);
1915 
1916   return res;
1917 }
1918 
1919 /* Expand a call to __builtin_nonlocal_goto.  We're passed the target label
1920    and the address of the save area.  */
1921 
1922 static rtx
expand_builtin_nonlocal_goto(tree exp)1923 expand_builtin_nonlocal_goto (tree exp)
1924 {
1925   tree t_label, t_save_area;
1926   rtx r_label, r_save_area, r_fp, r_sp;
1927   rtx_insn *insn;
1928 
1929   if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1930     return NULL_RTX;
1931 
1932   t_label = CALL_EXPR_ARG (exp, 0);
1933   t_save_area = CALL_EXPR_ARG (exp, 1);
1934 
1935   r_label = expand_normal (t_label);
1936   r_label = convert_memory_address (Pmode, r_label);
1937   r_save_area = expand_normal (t_save_area);
1938   r_save_area = convert_memory_address (Pmode, r_save_area);
1939   /* Copy the address of the save location to a register just in case it was
1940      based on the frame pointer.   */
1941   r_save_area = copy_to_reg (r_save_area);
1942   r_fp = gen_rtx_MEM (Pmode, r_save_area);
1943   r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1944 		      plus_constant (Pmode, r_save_area,
1945 				     GET_MODE_SIZE (Pmode)));
1946 
1947   crtl->has_nonlocal_goto = 1;
1948 
1949   /* ??? We no longer need to pass the static chain value, afaik.  */
1950   if (targetm.have_nonlocal_goto ())
1951     emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1952   else
1953     {
1954       emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1955       emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1956 
1957       r_label = copy_to_reg (r_label);
1958 
1959       /* Restore the frame pointer and stack pointer.  We must use a
1960 	 temporary since the setjmp buffer may be a local.  */
1961       r_fp = copy_to_reg (r_fp);
1962       emit_stack_restore (SAVE_NONLOCAL, r_sp);
1963 
1964       /* Ensure the frame pointer move is not optimized.  */
1965       emit_insn (gen_blockage ());
1966       emit_clobber (hard_frame_pointer_rtx);
1967       emit_clobber (frame_pointer_rtx);
1968       emit_move_insn (hard_frame_pointer_rtx, r_fp);
1969 
1970       /* USE of hard_frame_pointer_rtx added for consistency;
1971 	 not clear if really needed.  */
1972       emit_use (hard_frame_pointer_rtx);
1973       emit_use (stack_pointer_rtx);
1974 
1975       /* If the architecture is using a GP register, we must
1976 	 conservatively assume that the target function makes use of it.
1977 	 The prologue of functions with nonlocal gotos must therefore
1978 	 initialize the GP register to the appropriate value, and we
1979 	 must then make sure that this value is live at the point
1980 	 of the jump.  (Note that this doesn't necessarily apply
1981 	 to targets with a nonlocal_goto pattern; they are free
1982 	 to implement it in their own way.  Note also that this is
1983 	 a no-op if the GP register is a global invariant.)  */
1984       unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1985       if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1986 	emit_use (pic_offset_table_rtx);
1987 
1988       emit_indirect_jump (r_label);
1989     }
1990 
1991   /* Search backwards to the jump insn and mark it as a
1992      non-local goto.  */
1993   for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1994     {
1995       if (JUMP_P (insn))
1996 	{
1997 	  add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1998 	  break;
1999 	}
2000       else if (CALL_P (insn))
2001 	break;
2002     }
2003 
2004   return const0_rtx;
2005 }
2006 
2007 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
2008    (not all will be used on all machines) that was passed to __builtin_setjmp.
2009    It updates the stack pointer in that block to the current value.  This is
2010    also called directly by the SJLJ exception handling code.  */
2011 
2012 void
expand_builtin_update_setjmp_buf(rtx buf_addr)2013 expand_builtin_update_setjmp_buf (rtx buf_addr)
2014 {
2015   machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
2016   buf_addr = convert_memory_address (Pmode, buf_addr);
2017   rtx stack_save
2018     = gen_rtx_MEM (sa_mode,
2019 		   memory_address
2020 		   (sa_mode,
2021 		    plus_constant (Pmode, buf_addr,
2022 				   2 * GET_MODE_SIZE (Pmode))));
2023 
2024   emit_stack_save (SAVE_NONLOCAL, &stack_save);
2025 }
2026 
2027 /* Expand a call to __builtin_prefetch.  For a target that does not support
2028    data prefetch, evaluate the memory address argument in case it has side
2029    effects.  */
2030 
2031 static void
expand_builtin_prefetch(tree exp)2032 expand_builtin_prefetch (tree exp)
2033 {
2034   tree arg0, arg1, arg2;
2035   int nargs;
2036   rtx op0, op1, op2;
2037 
2038   if (!validate_arglist (exp, POINTER_TYPE, 0))
2039     return;
2040 
2041   arg0 = CALL_EXPR_ARG (exp, 0);
2042 
2043   /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
2044      zero (read) and argument 2 (locality) defaults to 3 (high degree of
2045      locality).  */
2046   nargs = call_expr_nargs (exp);
2047   if (nargs > 1)
2048     arg1 = CALL_EXPR_ARG (exp, 1);
2049   else
2050     arg1 = integer_zero_node;
2051   if (nargs > 2)
2052     arg2 = CALL_EXPR_ARG (exp, 2);
2053   else
2054     arg2 = integer_three_node;
2055 
2056   /* Argument 0 is an address.  */
2057   op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
2058 
2059   /* Argument 1 (read/write flag) must be a compile-time constant int.  */
2060   if (TREE_CODE (arg1) != INTEGER_CST)
2061     {
2062       error ("second argument to %<__builtin_prefetch%> must be a constant");
2063       arg1 = integer_zero_node;
2064     }
2065   op1 = expand_normal (arg1);
2066   /* Argument 1 must be either zero or one.  */
2067   if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
2068     {
2069       warning (0, "invalid second argument to %<__builtin_prefetch%>;"
2070 	       " using zero");
2071       op1 = const0_rtx;
2072     }
2073 
2074   /* Argument 2 (locality) must be a compile-time constant int.  */
2075   if (TREE_CODE (arg2) != INTEGER_CST)
2076     {
2077       error ("third argument to %<__builtin_prefetch%> must be a constant");
2078       arg2 = integer_zero_node;
2079     }
2080   op2 = expand_normal (arg2);
2081   /* Argument 2 must be 0, 1, 2, or 3.  */
2082   if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
2083     {
2084       warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
2085       op2 = const0_rtx;
2086     }
2087 
2088   if (targetm.have_prefetch ())
2089     {
2090       class expand_operand ops[3];
2091 
2092       create_address_operand (&ops[0], op0);
2093       create_integer_operand (&ops[1], INTVAL (op1));
2094       create_integer_operand (&ops[2], INTVAL (op2));
2095       if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
2096 	return;
2097     }
2098 
2099   /* Don't do anything with direct references to volatile memory, but
2100      generate code to handle other side effects.  */
2101   if (!MEM_P (op0) && side_effects_p (op0))
2102     emit_insn (op0);
2103 }
2104 
2105 /* Get a MEM rtx for expression EXP which is the address of an operand
2106    to be used in a string instruction (cmpstrsi, cpymemsi, ..).  LEN is
2107    the maximum length of the block of memory that might be accessed or
2108    NULL if unknown.  */
2109 
2110 static rtx
get_memory_rtx(tree exp,tree len)2111 get_memory_rtx (tree exp, tree len)
2112 {
2113   tree orig_exp = exp;
2114   rtx addr, mem;
2115 
2116   /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
2117      from its expression, for expr->a.b only <variable>.a.b is recorded.  */
2118   if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
2119     exp = TREE_OPERAND (exp, 0);
2120 
2121   addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
2122   mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
2123 
2124   /* Get an expression we can use to find the attributes to assign to MEM.
2125      First remove any nops.  */
2126   while (CONVERT_EXPR_P (exp)
2127 	 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
2128     exp = TREE_OPERAND (exp, 0);
2129 
2130   /* Build a MEM_REF representing the whole accessed area as a byte blob,
2131      (as builtin stringops may alias with anything).  */
2132   exp = fold_build2 (MEM_REF,
2133 		     build_array_type (char_type_node,
2134 				       build_range_type (sizetype,
2135 							 size_one_node, len)),
2136 		     exp, build_int_cst (ptr_type_node, 0));
2137 
2138   /* If the MEM_REF has no acceptable address, try to get the base object
2139      from the original address we got, and build an all-aliasing
2140      unknown-sized access to that one.  */
2141   if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
2142     set_mem_attributes (mem, exp, 0);
2143   else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
2144 	   && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
2145 						     0))))
2146     {
2147       exp = build_fold_addr_expr (exp);
2148       exp = fold_build2 (MEM_REF,
2149 			 build_array_type (char_type_node,
2150 					   build_range_type (sizetype,
2151 							     size_zero_node,
2152 							     NULL)),
2153 			 exp, build_int_cst (ptr_type_node, 0));
2154       set_mem_attributes (mem, exp, 0);
2155     }
2156   set_mem_alias_set (mem, 0);
2157   return mem;
2158 }
2159 
2160 /* Built-in functions to perform an untyped call and return.  */
2161 
2162 #define apply_args_mode \
2163   (this_target_builtins->x_apply_args_mode)
2164 #define apply_result_mode \
2165   (this_target_builtins->x_apply_result_mode)
2166 
2167 /* Return the size required for the block returned by __builtin_apply_args,
2168    and initialize apply_args_mode.  */
2169 
2170 static int
apply_args_size(void)2171 apply_args_size (void)
2172 {
2173   static int size = -1;
2174   int align;
2175   unsigned int regno;
2176 
2177   /* The values computed by this function never change.  */
2178   if (size < 0)
2179     {
2180       /* The first value is the incoming arg-pointer.  */
2181       size = GET_MODE_SIZE (Pmode);
2182 
2183       /* The second value is the structure value address unless this is
2184 	 passed as an "invisible" first argument.  */
2185       if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
2186 	size += GET_MODE_SIZE (Pmode);
2187 
2188       for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2189 	if (FUNCTION_ARG_REGNO_P (regno))
2190 	  {
2191 	    fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno);
2192 
2193 	    gcc_assert (mode != VOIDmode);
2194 
2195 	    align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
2196 	    if (size % align != 0)
2197 	      size = CEIL (size, align) * align;
2198 	    size += GET_MODE_SIZE (mode);
2199 	    apply_args_mode[regno] = mode;
2200 	  }
2201 	else
2202 	  {
2203 	    apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
2204 	  }
2205     }
2206   return size;
2207 }
2208 
2209 /* Return the size required for the block returned by __builtin_apply,
2210    and initialize apply_result_mode.  */
2211 
2212 static int
apply_result_size(void)2213 apply_result_size (void)
2214 {
2215   static int size = -1;
2216   int align, regno;
2217 
2218   /* The values computed by this function never change.  */
2219   if (size < 0)
2220     {
2221       size = 0;
2222 
2223       for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2224 	if (targetm.calls.function_value_regno_p (regno))
2225 	  {
2226 	    fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno);
2227 
2228 	    gcc_assert (mode != VOIDmode);
2229 
2230 	    align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
2231 	    if (size % align != 0)
2232 	      size = CEIL (size, align) * align;
2233 	    size += GET_MODE_SIZE (mode);
2234 	    apply_result_mode[regno] = mode;
2235 	  }
2236 	else
2237 	  apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
2238 
2239       /* Allow targets that use untyped_call and untyped_return to override
2240 	 the size so that machine-specific information can be stored here.  */
2241 #ifdef APPLY_RESULT_SIZE
2242       size = APPLY_RESULT_SIZE;
2243 #endif
2244     }
2245   return size;
2246 }
2247 
2248 /* Create a vector describing the result block RESULT.  If SAVEP is true,
2249    the result block is used to save the values; otherwise it is used to
2250    restore the values.  */
2251 
2252 static rtx
result_vector(int savep,rtx result)2253 result_vector (int savep, rtx result)
2254 {
2255   int regno, size, align, nelts;
2256   fixed_size_mode mode;
2257   rtx reg, mem;
2258   rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
2259 
2260   size = nelts = 0;
2261   for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2262     if ((mode = apply_result_mode[regno]) != VOIDmode)
2263       {
2264 	align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
2265 	if (size % align != 0)
2266 	  size = CEIL (size, align) * align;
2267 	reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
2268 	mem = adjust_address (result, mode, size);
2269 	savevec[nelts++] = (savep
2270 			    ? gen_rtx_SET (mem, reg)
2271 			    : gen_rtx_SET (reg, mem));
2272 	size += GET_MODE_SIZE (mode);
2273       }
2274   return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
2275 }
2276 
2277 /* Save the state required to perform an untyped call with the same
2278    arguments as were passed to the current function.  */
2279 
2280 static rtx
expand_builtin_apply_args_1(void)2281 expand_builtin_apply_args_1 (void)
2282 {
2283   rtx registers, tem;
2284   int size, align, regno;
2285   fixed_size_mode mode;
2286   rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
2287 
2288   /* Create a block where the arg-pointer, structure value address,
2289      and argument registers can be saved.  */
2290   registers = assign_stack_local (BLKmode, apply_args_size (), -1);
2291 
2292   /* Walk past the arg-pointer and structure value address.  */
2293   size = GET_MODE_SIZE (Pmode);
2294   if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
2295     size += GET_MODE_SIZE (Pmode);
2296 
2297   /* Save each register used in calling a function to the block.  */
2298   for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2299     if ((mode = apply_args_mode[regno]) != VOIDmode)
2300       {
2301 	align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
2302 	if (size % align != 0)
2303 	  size = CEIL (size, align) * align;
2304 
2305 	tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
2306 
2307 	emit_move_insn (adjust_address (registers, mode, size), tem);
2308 	size += GET_MODE_SIZE (mode);
2309       }
2310 
2311   /* Save the arg pointer to the block.  */
2312   tem = copy_to_reg (crtl->args.internal_arg_pointer);
2313   /* We need the pointer as the caller actually passed them to us, not
2314      as we might have pretended they were passed.  Make sure it's a valid
2315      operand, as emit_move_insn isn't expected to handle a PLUS.  */
2316   if (STACK_GROWS_DOWNWARD)
2317     tem
2318       = force_operand (plus_constant (Pmode, tem,
2319 				      crtl->args.pretend_args_size),
2320 		       NULL_RTX);
2321   emit_move_insn (adjust_address (registers, Pmode, 0), tem);
2322 
2323   size = GET_MODE_SIZE (Pmode);
2324 
2325   /* Save the structure value address unless this is passed as an
2326      "invisible" first argument.  */
2327   if (struct_incoming_value)
2328     emit_move_insn (adjust_address (registers, Pmode, size),
2329 		    copy_to_reg (struct_incoming_value));
2330 
2331   /* Return the address of the block.  */
2332   return copy_addr_to_reg (XEXP (registers, 0));
2333 }
2334 
2335 /* __builtin_apply_args returns block of memory allocated on
2336    the stack into which is stored the arg pointer, structure
2337    value address, static chain, and all the registers that might
2338    possibly be used in performing a function call.  The code is
2339    moved to the start of the function so the incoming values are
2340    saved.  */
2341 
2342 static rtx
expand_builtin_apply_args(void)2343 expand_builtin_apply_args (void)
2344 {
2345   /* Don't do __builtin_apply_args more than once in a function.
2346      Save the result of the first call and reuse it.  */
2347   if (apply_args_value != 0)
2348     return apply_args_value;
2349   {
2350     /* When this function is called, it means that registers must be
2351        saved on entry to this function.  So we migrate the
2352        call to the first insn of this function.  */
2353     rtx temp;
2354 
2355     start_sequence ();
2356     temp = expand_builtin_apply_args_1 ();
2357     rtx_insn *seq = get_insns ();
2358     end_sequence ();
2359 
2360     apply_args_value = temp;
2361 
2362     /* Put the insns after the NOTE that starts the function.
2363        If this is inside a start_sequence, make the outer-level insn
2364        chain current, so the code is placed at the start of the
2365        function.  If internal_arg_pointer is a non-virtual pseudo,
2366        it needs to be placed after the function that initializes
2367        that pseudo.  */
2368     push_topmost_sequence ();
2369     if (REG_P (crtl->args.internal_arg_pointer)
2370 	&& REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
2371       emit_insn_before (seq, parm_birth_insn);
2372     else
2373       emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
2374     pop_topmost_sequence ();
2375     return temp;
2376   }
2377 }
2378 
2379 /* Perform an untyped call and save the state required to perform an
2380    untyped return of whatever value was returned by the given function.  */
2381 
2382 static rtx
expand_builtin_apply(rtx function,rtx arguments,rtx argsize)2383 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
2384 {
2385   int size, align, regno;
2386   fixed_size_mode mode;
2387   rtx incoming_args, result, reg, dest, src;
2388   rtx_call_insn *call_insn;
2389   rtx old_stack_level = 0;
2390   rtx call_fusage = 0;
2391   rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
2392 
2393   arguments = convert_memory_address (Pmode, arguments);
2394 
2395   /* Create a block where the return registers can be saved.  */
2396   result = assign_stack_local (BLKmode, apply_result_size (), -1);
2397 
2398   /* Fetch the arg pointer from the ARGUMENTS block.  */
2399   incoming_args = gen_reg_rtx (Pmode);
2400   emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
2401   if (!STACK_GROWS_DOWNWARD)
2402     incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
2403 					 incoming_args, 0, OPTAB_LIB_WIDEN);
2404 
2405   /* Push a new argument block and copy the arguments.  Do not allow
2406      the (potential) memcpy call below to interfere with our stack
2407      manipulations.  */
2408   do_pending_stack_adjust ();
2409   NO_DEFER_POP;
2410 
2411   /* Save the stack with nonlocal if available.  */
2412   if (targetm.have_save_stack_nonlocal ())
2413     emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
2414   else
2415     emit_stack_save (SAVE_BLOCK, &old_stack_level);
2416 
2417   /* Allocate a block of memory onto the stack and copy the memory
2418      arguments to the outgoing arguments address.  We can pass TRUE
2419      as the 4th argument because we just saved the stack pointer
2420      and will restore it right after the call.  */
2421   allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
2422 
2423   /* Set DRAP flag to true, even though allocate_dynamic_stack_space
2424      may have already set current_function_calls_alloca to true.
2425      current_function_calls_alloca won't be set if argsize is zero,
2426      so we have to guarantee need_drap is true here.  */
2427   if (SUPPORTS_STACK_ALIGNMENT)
2428     crtl->need_drap = true;
2429 
2430   dest = virtual_outgoing_args_rtx;
2431   if (!STACK_GROWS_DOWNWARD)
2432     {
2433       if (CONST_INT_P (argsize))
2434 	dest = plus_constant (Pmode, dest, -INTVAL (argsize));
2435       else
2436 	dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
2437     }
2438   dest = gen_rtx_MEM (BLKmode, dest);
2439   set_mem_align (dest, PARM_BOUNDARY);
2440   src = gen_rtx_MEM (BLKmode, incoming_args);
2441   set_mem_align (src, PARM_BOUNDARY);
2442   emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
2443 
2444   /* Refer to the argument block.  */
2445   apply_args_size ();
2446   arguments = gen_rtx_MEM (BLKmode, arguments);
2447   set_mem_align (arguments, PARM_BOUNDARY);
2448 
2449   /* Walk past the arg-pointer and structure value address.  */
2450   size = GET_MODE_SIZE (Pmode);
2451   if (struct_value)
2452     size += GET_MODE_SIZE (Pmode);
2453 
2454   /* Restore each of the registers previously saved.  Make USE insns
2455      for each of these registers for use in making the call.  */
2456   for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2457     if ((mode = apply_args_mode[regno]) != VOIDmode)
2458       {
2459 	align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
2460 	if (size % align != 0)
2461 	  size = CEIL (size, align) * align;
2462 	reg = gen_rtx_REG (mode, regno);
2463 	emit_move_insn (reg, adjust_address (arguments, mode, size));
2464 	use_reg (&call_fusage, reg);
2465 	size += GET_MODE_SIZE (mode);
2466       }
2467 
2468   /* Restore the structure value address unless this is passed as an
2469      "invisible" first argument.  */
2470   size = GET_MODE_SIZE (Pmode);
2471   if (struct_value)
2472     {
2473       rtx value = gen_reg_rtx (Pmode);
2474       emit_move_insn (value, adjust_address (arguments, Pmode, size));
2475       emit_move_insn (struct_value, value);
2476       if (REG_P (struct_value))
2477 	use_reg (&call_fusage, struct_value);
2478     }
2479 
2480   /* All arguments and registers used for the call are set up by now!  */
2481   function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
2482 
2483   /* Ensure address is valid.  SYMBOL_REF is already valid, so no need,
2484      and we don't want to load it into a register as an optimization,
2485      because prepare_call_address already did it if it should be done.  */
2486   if (GET_CODE (function) != SYMBOL_REF)
2487     function = memory_address (FUNCTION_MODE, function);
2488 
2489   /* Generate the actual call instruction and save the return value.  */
2490   if (targetm.have_untyped_call ())
2491     {
2492       rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
2493       rtx_insn *seq = targetm.gen_untyped_call (mem, result,
2494 						result_vector (1, result));
2495       for (rtx_insn *insn = seq; insn; insn = NEXT_INSN (insn))
2496 	if (CALL_P (insn))
2497 	  add_reg_note (insn, REG_UNTYPED_CALL, NULL_RTX);
2498       emit_insn (seq);
2499     }
2500   else if (targetm.have_call_value ())
2501     {
2502       rtx valreg = 0;
2503 
2504       /* Locate the unique return register.  It is not possible to
2505 	 express a call that sets more than one return register using
2506 	 call_value; use untyped_call for that.  In fact, untyped_call
2507 	 only needs to save the return registers in the given block.  */
2508       for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2509 	if ((mode = apply_result_mode[regno]) != VOIDmode)
2510 	  {
2511 	    gcc_assert (!valreg); /* have_untyped_call required.  */
2512 
2513 	    valreg = gen_rtx_REG (mode, regno);
2514 	  }
2515 
2516       emit_insn (targetm.gen_call_value (valreg,
2517 					 gen_rtx_MEM (FUNCTION_MODE, function),
2518 					 const0_rtx, NULL_RTX, const0_rtx));
2519 
2520       emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
2521     }
2522   else
2523     gcc_unreachable ();
2524 
2525   /* Find the CALL insn we just emitted, and attach the register usage
2526      information.  */
2527   call_insn = last_call_insn ();
2528   add_function_usage_to (call_insn, call_fusage);
2529 
2530   /* Restore the stack.  */
2531   if (targetm.have_save_stack_nonlocal ())
2532     emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
2533   else
2534     emit_stack_restore (SAVE_BLOCK, old_stack_level);
2535   fixup_args_size_notes (call_insn, get_last_insn (), 0);
2536 
2537   OK_DEFER_POP;
2538 
2539   /* Return the address of the result block.  */
2540   result = copy_addr_to_reg (XEXP (result, 0));
2541   return convert_memory_address (ptr_mode, result);
2542 }
2543 
2544 /* Perform an untyped return.  */
2545 
2546 static void
expand_builtin_return(rtx result)2547 expand_builtin_return (rtx result)
2548 {
2549   int size, align, regno;
2550   fixed_size_mode mode;
2551   rtx reg;
2552   rtx_insn *call_fusage = 0;
2553 
2554   result = convert_memory_address (Pmode, result);
2555 
2556   apply_result_size ();
2557   result = gen_rtx_MEM (BLKmode, result);
2558 
2559   if (targetm.have_untyped_return ())
2560     {
2561       rtx vector = result_vector (0, result);
2562       emit_jump_insn (targetm.gen_untyped_return (result, vector));
2563       emit_barrier ();
2564       return;
2565     }
2566 
2567   /* Restore the return value and note that each value is used.  */
2568   size = 0;
2569   for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2570     if ((mode = apply_result_mode[regno]) != VOIDmode)
2571       {
2572 	align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
2573 	if (size % align != 0)
2574 	  size = CEIL (size, align) * align;
2575 	reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
2576 	emit_move_insn (reg, adjust_address (result, mode, size));
2577 
2578 	push_to_sequence (call_fusage);
2579 	emit_use (reg);
2580 	call_fusage = get_insns ();
2581 	end_sequence ();
2582 	size += GET_MODE_SIZE (mode);
2583       }
2584 
2585   /* Put the USE insns before the return.  */
2586   emit_insn (call_fusage);
2587 
2588   /* Return whatever values was restored by jumping directly to the end
2589      of the function.  */
2590   expand_naked_return ();
2591 }
2592 
2593 /* Used by expand_builtin_classify_type and fold_builtin_classify_type.  */
2594 
2595 static enum type_class
type_to_class(tree type)2596 type_to_class (tree type)
2597 {
2598   switch (TREE_CODE (type))
2599     {
2600     case VOID_TYPE:	   return void_type_class;
2601     case INTEGER_TYPE:	   return integer_type_class;
2602     case ENUMERAL_TYPE:	   return enumeral_type_class;
2603     case BOOLEAN_TYPE:	   return boolean_type_class;
2604     case POINTER_TYPE:	   return pointer_type_class;
2605     case REFERENCE_TYPE:   return reference_type_class;
2606     case OFFSET_TYPE:	   return offset_type_class;
2607     case REAL_TYPE:	   return real_type_class;
2608     case COMPLEX_TYPE:	   return complex_type_class;
2609     case FUNCTION_TYPE:	   return function_type_class;
2610     case METHOD_TYPE:	   return method_type_class;
2611     case RECORD_TYPE:	   return record_type_class;
2612     case UNION_TYPE:
2613     case QUAL_UNION_TYPE:  return union_type_class;
2614     case ARRAY_TYPE:	   return (TYPE_STRING_FLAG (type)
2615 				   ? string_type_class : array_type_class);
2616     case LANG_TYPE:	   return lang_type_class;
2617     case OPAQUE_TYPE:      return opaque_type_class;
2618     default:		   return no_type_class;
2619     }
2620 }
2621 
2622 /* Expand a call EXP to __builtin_classify_type.  */
2623 
2624 static rtx
expand_builtin_classify_type(tree exp)2625 expand_builtin_classify_type (tree exp)
2626 {
2627   if (call_expr_nargs (exp))
2628     return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
2629   return GEN_INT (no_type_class);
2630 }
2631 
2632 /* This helper macro, meant to be used in mathfn_built_in below, determines
2633    which among a set of builtin math functions is appropriate for a given type
2634    mode.  The `F' (float) and `L' (long double) are automatically generated
2635    from the 'double' case.  If a function supports the _Float<N> and _Float<N>X
2636    types, there are additional types that are considered with 'F32', 'F64',
2637    'F128', etc. suffixes.  */
2638 #define CASE_MATHFN(MATHFN) \
2639   CASE_CFN_##MATHFN: \
2640   fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
2641   fcodel = BUILT_IN_##MATHFN##L ; break;
2642 /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
2643    types.  */
2644 #define CASE_MATHFN_FLOATN(MATHFN) \
2645   CASE_CFN_##MATHFN: \
2646   fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
2647   fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
2648   fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
2649   fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
2650   fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
2651   break;
2652 /* Similar to above, but appends _R after any F/L suffix.  */
2653 #define CASE_MATHFN_REENT(MATHFN) \
2654   case CFN_BUILT_IN_##MATHFN##_R: \
2655   case CFN_BUILT_IN_##MATHFN##F_R: \
2656   case CFN_BUILT_IN_##MATHFN##L_R: \
2657   fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
2658   fcodel = BUILT_IN_##MATHFN##L_R ; break;
2659 
2660 /* Return a function equivalent to FN but operating on floating-point
2661    values of type TYPE, or END_BUILTINS if no such function exists.
2662    This is purely an operation on function codes; it does not guarantee
2663    that the target actually has an implementation of the function.  */
2664 
2665 static built_in_function
mathfn_built_in_2(tree type,combined_fn fn)2666 mathfn_built_in_2 (tree type, combined_fn fn)
2667 {
2668   tree mtype;
2669   built_in_function fcode, fcodef, fcodel;
2670   built_in_function fcodef16 = END_BUILTINS;
2671   built_in_function fcodef32 = END_BUILTINS;
2672   built_in_function fcodef64 = END_BUILTINS;
2673   built_in_function fcodef128 = END_BUILTINS;
2674   built_in_function fcodef32x = END_BUILTINS;
2675   built_in_function fcodef64x = END_BUILTINS;
2676   built_in_function fcodef128x = END_BUILTINS;
2677 
2678   switch (fn)
2679     {
2680 #define SEQ_OF_CASE_MATHFN			\
2681     CASE_MATHFN (ACOS)				\
2682     CASE_MATHFN (ACOSH)				\
2683     CASE_MATHFN (ASIN)				\
2684     CASE_MATHFN (ASINH)				\
2685     CASE_MATHFN (ATAN)				\
2686     CASE_MATHFN (ATAN2)				\
2687     CASE_MATHFN (ATANH)				\
2688     CASE_MATHFN (CBRT)				\
2689     CASE_MATHFN_FLOATN (CEIL)			\
2690     CASE_MATHFN (CEXPI)				\
2691     CASE_MATHFN_FLOATN (COPYSIGN)		\
2692     CASE_MATHFN (COS)				\
2693     CASE_MATHFN (COSH)				\
2694     CASE_MATHFN (DREM)				\
2695     CASE_MATHFN (ERF)				\
2696     CASE_MATHFN (ERFC)				\
2697     CASE_MATHFN (EXP)				\
2698     CASE_MATHFN (EXP10)				\
2699     CASE_MATHFN (EXP2)				\
2700     CASE_MATHFN (EXPM1)				\
2701     CASE_MATHFN (FABS)				\
2702     CASE_MATHFN (FDIM)				\
2703     CASE_MATHFN_FLOATN (FLOOR)			\
2704     CASE_MATHFN_FLOATN (FMA)			\
2705     CASE_MATHFN_FLOATN (FMAX)			\
2706     CASE_MATHFN_FLOATN (FMIN)			\
2707     CASE_MATHFN (FMOD)				\
2708     CASE_MATHFN (FREXP)				\
2709     CASE_MATHFN (GAMMA)				\
2710     CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */	\
2711     CASE_MATHFN (HUGE_VAL)			\
2712     CASE_MATHFN (HYPOT)				\
2713     CASE_MATHFN (ILOGB)				\
2714     CASE_MATHFN (ICEIL)				\
2715     CASE_MATHFN (IFLOOR)			\
2716     CASE_MATHFN (INF)				\
2717     CASE_MATHFN (IRINT)				\
2718     CASE_MATHFN (IROUND)			\
2719     CASE_MATHFN (ISINF)				\
2720     CASE_MATHFN (J0)				\
2721     CASE_MATHFN (J1)				\
2722     CASE_MATHFN (JN)				\
2723     CASE_MATHFN (LCEIL)				\
2724     CASE_MATHFN (LDEXP)				\
2725     CASE_MATHFN (LFLOOR)			\
2726     CASE_MATHFN (LGAMMA)			\
2727     CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */	\
2728     CASE_MATHFN (LLCEIL)			\
2729     CASE_MATHFN (LLFLOOR)			\
2730     CASE_MATHFN (LLRINT)			\
2731     CASE_MATHFN (LLROUND)			\
2732     CASE_MATHFN (LOG)				\
2733     CASE_MATHFN (LOG10)				\
2734     CASE_MATHFN (LOG1P)				\
2735     CASE_MATHFN (LOG2)				\
2736     CASE_MATHFN (LOGB)				\
2737     CASE_MATHFN (LRINT)				\
2738     CASE_MATHFN (LROUND)			\
2739     CASE_MATHFN (MODF)				\
2740     CASE_MATHFN (NAN)				\
2741     CASE_MATHFN (NANS)				\
2742     CASE_MATHFN_FLOATN (NEARBYINT)		\
2743     CASE_MATHFN (NEXTAFTER)			\
2744     CASE_MATHFN (NEXTTOWARD)			\
2745     CASE_MATHFN (POW)				\
2746     CASE_MATHFN (POWI)				\
2747     CASE_MATHFN (POW10)				\
2748     CASE_MATHFN (REMAINDER)			\
2749     CASE_MATHFN (REMQUO)			\
2750     CASE_MATHFN_FLOATN (RINT)			\
2751     CASE_MATHFN_FLOATN (ROUND)			\
2752     CASE_MATHFN_FLOATN (ROUNDEVEN)		\
2753     CASE_MATHFN (SCALB)				\
2754     CASE_MATHFN (SCALBLN)			\
2755     CASE_MATHFN (SCALBN)			\
2756     CASE_MATHFN (SIGNBIT)			\
2757     CASE_MATHFN (SIGNIFICAND)			\
2758     CASE_MATHFN (SIN)				\
2759     CASE_MATHFN (SINCOS)			\
2760     CASE_MATHFN (SINH)				\
2761     CASE_MATHFN_FLOATN (SQRT)			\
2762     CASE_MATHFN (TAN)				\
2763     CASE_MATHFN (TANH)				\
2764     CASE_MATHFN (TGAMMA)			\
2765     CASE_MATHFN_FLOATN (TRUNC)			\
2766     CASE_MATHFN (Y0)				\
2767     CASE_MATHFN (Y1)				\
2768     CASE_MATHFN (YN)
2769 
2770     SEQ_OF_CASE_MATHFN
2771 
2772     default:
2773       return END_BUILTINS;
2774     }
2775 
2776   mtype = TYPE_MAIN_VARIANT (type);
2777   if (mtype == double_type_node)
2778     return fcode;
2779   else if (mtype == float_type_node)
2780     return fcodef;
2781   else if (mtype == long_double_type_node)
2782     return fcodel;
2783   else if (mtype == float16_type_node)
2784     return fcodef16;
2785   else if (mtype == float32_type_node)
2786     return fcodef32;
2787   else if (mtype == float64_type_node)
2788     return fcodef64;
2789   else if (mtype == float128_type_node)
2790     return fcodef128;
2791   else if (mtype == float32x_type_node)
2792     return fcodef32x;
2793   else if (mtype == float64x_type_node)
2794     return fcodef64x;
2795   else if (mtype == float128x_type_node)
2796     return fcodef128x;
2797   else
2798     return END_BUILTINS;
2799 }
2800 
2801 #undef CASE_MATHFN
2802 #undef CASE_MATHFN_FLOATN
2803 #undef CASE_MATHFN_REENT
2804 
2805 /* Return mathematic function equivalent to FN but operating directly on TYPE,
2806    if available.  If IMPLICIT_P is true use the implicit builtin declaration,
2807    otherwise use the explicit declaration.  If we can't do the conversion,
2808    return null.  */
2809 
2810 static tree
mathfn_built_in_1(tree type,combined_fn fn,bool implicit_p)2811 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
2812 {
2813   built_in_function fcode2 = mathfn_built_in_2 (type, fn);
2814   if (fcode2 == END_BUILTINS)
2815     return NULL_TREE;
2816 
2817   if (implicit_p && !builtin_decl_implicit_p (fcode2))
2818     return NULL_TREE;
2819 
2820   return builtin_decl_explicit (fcode2);
2821 }
2822 
2823 /* Like mathfn_built_in_1, but always use the implicit array.  */
2824 
2825 tree
mathfn_built_in(tree type,combined_fn fn)2826 mathfn_built_in (tree type, combined_fn fn)
2827 {
2828   return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2829 }
2830 
2831 /* Like mathfn_built_in_1, but take a built_in_function and
2832    always use the implicit array.  */
2833 
2834 tree
mathfn_built_in(tree type,enum built_in_function fn)2835 mathfn_built_in (tree type, enum built_in_function fn)
2836 {
2837   return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2838 }
2839 
2840 /* Return the type associated with a built in function, i.e., the one
2841    to be passed to mathfn_built_in to get the type-specific
2842    function.  */
2843 
2844 tree
mathfn_built_in_type(combined_fn fn)2845 mathfn_built_in_type (combined_fn fn)
2846 {
2847 #define CASE_MATHFN(MATHFN)			\
2848   case CFN_BUILT_IN_##MATHFN:			\
2849     return double_type_node;			\
2850   case CFN_BUILT_IN_##MATHFN##F:		\
2851     return float_type_node;			\
2852   case CFN_BUILT_IN_##MATHFN##L:		\
2853     return long_double_type_node;
2854 
2855 #define CASE_MATHFN_FLOATN(MATHFN)		\
2856   CASE_MATHFN(MATHFN)				\
2857   case CFN_BUILT_IN_##MATHFN##F16:		\
2858     return float16_type_node;			\
2859   case CFN_BUILT_IN_##MATHFN##F32:		\
2860     return float32_type_node;			\
2861   case CFN_BUILT_IN_##MATHFN##F64:		\
2862     return float64_type_node;			\
2863   case CFN_BUILT_IN_##MATHFN##F128:		\
2864     return float128_type_node;			\
2865   case CFN_BUILT_IN_##MATHFN##F32X:		\
2866     return float32x_type_node;			\
2867   case CFN_BUILT_IN_##MATHFN##F64X:		\
2868     return float64x_type_node;			\
2869   case CFN_BUILT_IN_##MATHFN##F128X:		\
2870     return float128x_type_node;
2871 
2872 /* Similar to above, but appends _R after any F/L suffix.  */
2873 #define CASE_MATHFN_REENT(MATHFN) \
2874   case CFN_BUILT_IN_##MATHFN##_R:		\
2875     return double_type_node;			\
2876   case CFN_BUILT_IN_##MATHFN##F_R:		\
2877     return float_type_node;			\
2878   case CFN_BUILT_IN_##MATHFN##L_R:		\
2879     return long_double_type_node;
2880 
2881   switch (fn)
2882     {
2883     SEQ_OF_CASE_MATHFN
2884 
2885     default:
2886       return NULL_TREE;
2887     }
2888 
2889 #undef CASE_MATHFN
2890 #undef CASE_MATHFN_FLOATN
2891 #undef CASE_MATHFN_REENT
2892 #undef SEQ_OF_CASE_MATHFN
2893 }
2894 
2895 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2896    return its code, otherwise return IFN_LAST.  Note that this function
2897    only tests whether the function is defined in internals.def, not whether
2898    it is actually available on the target.  */
2899 
2900 internal_fn
associated_internal_fn(tree fndecl)2901 associated_internal_fn (tree fndecl)
2902 {
2903   gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2904   tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
2905   switch (DECL_FUNCTION_CODE (fndecl))
2906     {
2907 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2908     CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2909 #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2910     CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2911     CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2912 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2913     CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2914 #include "internal-fn.def"
2915 
2916     CASE_FLT_FN (BUILT_IN_POW10):
2917       return IFN_EXP10;
2918 
2919     CASE_FLT_FN (BUILT_IN_DREM):
2920       return IFN_REMAINDER;
2921 
2922     CASE_FLT_FN (BUILT_IN_SCALBN):
2923     CASE_FLT_FN (BUILT_IN_SCALBLN):
2924       if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2925 	return IFN_LDEXP;
2926       return IFN_LAST;
2927 
2928     default:
2929       return IFN_LAST;
2930     }
2931 }
2932 
2933 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2934    on the current target by a call to an internal function, return the
2935    code of that internal function, otherwise return IFN_LAST.  The caller
2936    is responsible for ensuring that any side-effects of the built-in
2937    call are dealt with correctly.  E.g. if CALL sets errno, the caller
2938    must decide that the errno result isn't needed or make it available
2939    in some other way.  */
2940 
2941 internal_fn
replacement_internal_fn(gcall * call)2942 replacement_internal_fn (gcall *call)
2943 {
2944   if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2945     {
2946       internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2947       if (ifn != IFN_LAST)
2948 	{
2949 	  tree_pair types = direct_internal_fn_types (ifn, call);
2950 	  optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2951 	  if (direct_internal_fn_supported_p (ifn, types, opt_type))
2952 	    return ifn;
2953 	}
2954     }
2955   return IFN_LAST;
2956 }
2957 
2958 /* Expand a call to the builtin trinary math functions (fma).
2959    Return NULL_RTX if a normal call should be emitted rather than expanding the
2960    function in-line.  EXP is the expression that is a call to the builtin
2961    function; if convenient, the result should be placed in TARGET.
2962    SUBTARGET may be used as the target for computing one of EXP's
2963    operands.  */
2964 
2965 static rtx
expand_builtin_mathfn_ternary(tree exp,rtx target,rtx subtarget)2966 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2967 {
2968   optab builtin_optab;
2969   rtx op0, op1, op2, result;
2970   rtx_insn *insns;
2971   tree fndecl = get_callee_fndecl (exp);
2972   tree arg0, arg1, arg2;
2973   machine_mode mode;
2974 
2975   if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2976     return NULL_RTX;
2977 
2978   arg0 = CALL_EXPR_ARG (exp, 0);
2979   arg1 = CALL_EXPR_ARG (exp, 1);
2980   arg2 = CALL_EXPR_ARG (exp, 2);
2981 
2982   switch (DECL_FUNCTION_CODE (fndecl))
2983     {
2984     CASE_FLT_FN (BUILT_IN_FMA):
2985     CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
2986       builtin_optab = fma_optab; break;
2987     default:
2988       gcc_unreachable ();
2989     }
2990 
2991   /* Make a suitable register to place result in.  */
2992   mode = TYPE_MODE (TREE_TYPE (exp));
2993 
2994   /* Before working hard, check whether the instruction is available.  */
2995   if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2996     return NULL_RTX;
2997 
2998   result = gen_reg_rtx (mode);
2999 
3000   /* Always stabilize the argument list.  */
3001   CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
3002   CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
3003   CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
3004 
3005   op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3006   op1 = expand_normal (arg1);
3007   op2 = expand_normal (arg2);
3008 
3009   start_sequence ();
3010 
3011   /* Compute into RESULT.
3012      Set RESULT to wherever the result comes back.  */
3013   result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
3014 			      result, 0);
3015 
3016   /* If we were unable to expand via the builtin, stop the sequence
3017      (without outputting the insns) and call to the library function
3018      with the stabilized argument list.  */
3019   if (result == 0)
3020     {
3021       end_sequence ();
3022       return expand_call (exp, target, target == const0_rtx);
3023     }
3024 
3025   /* Output the entire sequence.  */
3026   insns = get_insns ();
3027   end_sequence ();
3028   emit_insn (insns);
3029 
3030   return result;
3031 }
3032 
3033 /* Expand a call to the builtin sin and cos math functions.
3034    Return NULL_RTX if a normal call should be emitted rather than expanding the
3035    function in-line.  EXP is the expression that is a call to the builtin
3036    function; if convenient, the result should be placed in TARGET.
3037    SUBTARGET may be used as the target for computing one of EXP's
3038    operands.  */
3039 
3040 static rtx
expand_builtin_mathfn_3(tree exp,rtx target,rtx subtarget)3041 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
3042 {
3043   optab builtin_optab;
3044   rtx op0;
3045   rtx_insn *insns;
3046   tree fndecl = get_callee_fndecl (exp);
3047   machine_mode mode;
3048   tree arg;
3049 
3050   if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
3051     return NULL_RTX;
3052 
3053   arg = CALL_EXPR_ARG (exp, 0);
3054 
3055   switch (DECL_FUNCTION_CODE (fndecl))
3056     {
3057     CASE_FLT_FN (BUILT_IN_SIN):
3058     CASE_FLT_FN (BUILT_IN_COS):
3059       builtin_optab = sincos_optab; break;
3060     default:
3061       gcc_unreachable ();
3062     }
3063 
3064   /* Make a suitable register to place result in.  */
3065   mode = TYPE_MODE (TREE_TYPE (exp));
3066 
3067   /* Check if sincos insn is available, otherwise fallback
3068      to sin or cos insn.  */
3069   if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
3070     switch (DECL_FUNCTION_CODE (fndecl))
3071       {
3072       CASE_FLT_FN (BUILT_IN_SIN):
3073 	builtin_optab = sin_optab; break;
3074       CASE_FLT_FN (BUILT_IN_COS):
3075 	builtin_optab = cos_optab; break;
3076       default:
3077 	gcc_unreachable ();
3078       }
3079 
3080   /* Before working hard, check whether the instruction is available.  */
3081   if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
3082     {
3083       rtx result = gen_reg_rtx (mode);
3084 
3085       /* Wrap the computation of the argument in a SAVE_EXPR, as we may
3086 	 need to expand the argument again.  This way, we will not perform
3087 	 side-effects more the once.  */
3088       CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
3089 
3090       op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
3091 
3092       start_sequence ();
3093 
3094       /* Compute into RESULT.
3095 	 Set RESULT to wherever the result comes back.  */
3096       if (builtin_optab == sincos_optab)
3097 	{
3098 	  int ok;
3099 
3100 	  switch (DECL_FUNCTION_CODE (fndecl))
3101 	    {
3102 	    CASE_FLT_FN (BUILT_IN_SIN):
3103 	      ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
3104 	      break;
3105 	    CASE_FLT_FN (BUILT_IN_COS):
3106 	      ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
3107 	      break;
3108 	    default:
3109 	      gcc_unreachable ();
3110 	    }
3111 	  gcc_assert (ok);
3112 	}
3113       else
3114 	result = expand_unop (mode, builtin_optab, op0, result, 0);
3115 
3116       if (result != 0)
3117 	{
3118 	  /* Output the entire sequence.  */
3119 	  insns = get_insns ();
3120 	  end_sequence ();
3121 	  emit_insn (insns);
3122 	  return result;
3123 	}
3124 
3125       /* If we were unable to expand via the builtin, stop the sequence
3126 	 (without outputting the insns) and call to the library function
3127 	 with the stabilized argument list.  */
3128       end_sequence ();
3129     }
3130 
3131   return expand_call (exp, target, target == const0_rtx);
3132 }
3133 
3134 /* Given an interclass math builtin decl FNDECL and it's argument ARG
3135    return an RTL instruction code that implements the functionality.
3136    If that isn't possible or available return CODE_FOR_nothing.  */
3137 
3138 static enum insn_code
interclass_mathfn_icode(tree arg,tree fndecl)3139 interclass_mathfn_icode (tree arg, tree fndecl)
3140 {
3141   bool errno_set = false;
3142   optab builtin_optab = unknown_optab;
3143   machine_mode mode;
3144 
3145   switch (DECL_FUNCTION_CODE (fndecl))
3146     {
3147     CASE_FLT_FN (BUILT_IN_ILOGB):
3148       errno_set = true; builtin_optab = ilogb_optab; break;
3149     CASE_FLT_FN (BUILT_IN_ISINF):
3150       builtin_optab = isinf_optab; break;
3151     case BUILT_IN_ISNORMAL:
3152     case BUILT_IN_ISFINITE:
3153     CASE_FLT_FN (BUILT_IN_FINITE):
3154     case BUILT_IN_FINITED32:
3155     case BUILT_IN_FINITED64:
3156     case BUILT_IN_FINITED128:
3157     case BUILT_IN_ISINFD32:
3158     case BUILT_IN_ISINFD64:
3159     case BUILT_IN_ISINFD128:
3160       /* These builtins have no optabs (yet).  */
3161       break;
3162     default:
3163       gcc_unreachable ();
3164     }
3165 
3166   /* There's no easy way to detect the case we need to set EDOM.  */
3167   if (flag_errno_math && errno_set)
3168     return CODE_FOR_nothing;
3169 
3170   /* Optab mode depends on the mode of the input argument.  */
3171   mode = TYPE_MODE (TREE_TYPE (arg));
3172 
3173   if (builtin_optab)
3174     return optab_handler (builtin_optab, mode);
3175   return CODE_FOR_nothing;
3176 }
3177 
3178 /* Expand a call to one of the builtin math functions that operate on
3179    floating point argument and output an integer result (ilogb, isinf,
3180    isnan, etc).
3181    Return 0 if a normal call should be emitted rather than expanding the
3182    function in-line.  EXP is the expression that is a call to the builtin
3183    function; if convenient, the result should be placed in TARGET.  */
3184 
3185 static rtx
expand_builtin_interclass_mathfn(tree exp,rtx target)3186 expand_builtin_interclass_mathfn (tree exp, rtx target)
3187 {
3188   enum insn_code icode = CODE_FOR_nothing;
3189   rtx op0;
3190   tree fndecl = get_callee_fndecl (exp);
3191   machine_mode mode;
3192   tree arg;
3193 
3194   if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
3195     return NULL_RTX;
3196 
3197   arg = CALL_EXPR_ARG (exp, 0);
3198   icode = interclass_mathfn_icode (arg, fndecl);
3199   mode = TYPE_MODE (TREE_TYPE (arg));
3200 
3201   if (icode != CODE_FOR_nothing)
3202     {
3203       class expand_operand ops[1];
3204       rtx_insn *last = get_last_insn ();
3205       tree orig_arg = arg;
3206 
3207       /* Wrap the computation of the argument in a SAVE_EXPR, as we may
3208 	 need to expand the argument again.  This way, we will not perform
3209 	 side-effects more the once.  */
3210       CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
3211 
3212       op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
3213 
3214       if (mode != GET_MODE (op0))
3215 	op0 = convert_to_mode (mode, op0, 0);
3216 
3217       create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
3218       if (maybe_legitimize_operands (icode, 0, 1, ops)
3219 	  && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
3220 	return ops[0].value;
3221 
3222       delete_insns_since (last);
3223       CALL_EXPR_ARG (exp, 0) = orig_arg;
3224     }
3225 
3226   return NULL_RTX;
3227 }
3228 
3229 /* Expand a call to the builtin sincos math function.
3230    Return NULL_RTX if a normal call should be emitted rather than expanding the
3231    function in-line.  EXP is the expression that is a call to the builtin
3232    function.  */
3233 
3234 static rtx
expand_builtin_sincos(tree exp)3235 expand_builtin_sincos (tree exp)
3236 {
3237   rtx op0, op1, op2, target1, target2;
3238   machine_mode mode;
3239   tree arg, sinp, cosp;
3240   int result;
3241   location_t loc = EXPR_LOCATION (exp);
3242   tree alias_type, alias_off;
3243 
3244   if (!validate_arglist (exp, REAL_TYPE,
3245  			 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3246     return NULL_RTX;
3247 
3248   arg = CALL_EXPR_ARG (exp, 0);
3249   sinp = CALL_EXPR_ARG (exp, 1);
3250   cosp = CALL_EXPR_ARG (exp, 2);
3251 
3252   /* Make a suitable register to place result in.  */
3253   mode = TYPE_MODE (TREE_TYPE (arg));
3254 
3255   /* Check if sincos insn is available, otherwise emit the call.  */
3256   if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
3257     return NULL_RTX;
3258 
3259   target1 = gen_reg_rtx (mode);
3260   target2 = gen_reg_rtx (mode);
3261 
3262   op0 = expand_normal (arg);
3263   alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
3264   alias_off = build_int_cst (alias_type, 0);
3265   op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
3266 					sinp, alias_off));
3267   op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
3268 					cosp, alias_off));
3269 
3270   /* Compute into target1 and target2.
3271      Set TARGET to wherever the result comes back.  */
3272   result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
3273   gcc_assert (result);
3274 
3275   /* Move target1 and target2 to the memory locations indicated
3276      by op1 and op2.  */
3277   emit_move_insn (op1, target1);
3278   emit_move_insn (op2, target2);
3279 
3280   return const0_rtx;
3281 }
3282 
3283 /* Expand a call to the internal cexpi builtin to the sincos math function.
3284    EXP is the expression that is a call to the builtin function; if convenient,
3285    the result should be placed in TARGET.  */
3286 
3287 static rtx
expand_builtin_cexpi(tree exp,rtx target)3288 expand_builtin_cexpi (tree exp, rtx target)
3289 {
3290   tree fndecl = get_callee_fndecl (exp);
3291   tree arg, type;
3292   machine_mode mode;
3293   rtx op0, op1, op2;
3294   location_t loc = EXPR_LOCATION (exp);
3295 
3296   if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
3297     return NULL_RTX;
3298 
3299   arg = CALL_EXPR_ARG (exp, 0);
3300   type = TREE_TYPE (arg);
3301   mode = TYPE_MODE (TREE_TYPE (arg));
3302 
3303   /* Try expanding via a sincos optab, fall back to emitting a libcall
3304      to sincos or cexp.  We are sure we have sincos or cexp because cexpi
3305      is only generated from sincos, cexp or if we have either of them.  */
3306   if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
3307     {
3308       op1 = gen_reg_rtx (mode);
3309       op2 = gen_reg_rtx (mode);
3310 
3311       op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
3312 
3313       /* Compute into op1 and op2.  */
3314       expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
3315     }
3316   else if (targetm.libc_has_function (function_sincos, type))
3317     {
3318       tree call, fn = NULL_TREE;
3319       tree top1, top2;
3320       rtx op1a, op2a;
3321 
3322       if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
3323 	fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
3324       else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
3325 	fn = builtin_decl_explicit (BUILT_IN_SINCOS);
3326       else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
3327 	fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
3328       else
3329 	gcc_unreachable ();
3330 
3331       op1 = assign_temp (TREE_TYPE (arg), 1, 1);
3332       op2 = assign_temp (TREE_TYPE (arg), 1, 1);
3333       op1a = copy_addr_to_reg (XEXP (op1, 0));
3334       op2a = copy_addr_to_reg (XEXP (op2, 0));
3335       top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
3336       top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
3337 
3338       /* Make sure not to fold the sincos call again.  */
3339       call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
3340       expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
3341 				      call, 3, arg, top1, top2));
3342     }
3343   else
3344     {
3345       tree call, fn = NULL_TREE, narg;
3346       tree ctype = build_complex_type (type);
3347 
3348       if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
3349 	fn = builtin_decl_explicit (BUILT_IN_CEXPF);
3350       else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
3351 	fn = builtin_decl_explicit (BUILT_IN_CEXP);
3352       else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
3353 	fn = builtin_decl_explicit (BUILT_IN_CEXPL);
3354       else
3355 	gcc_unreachable ();
3356 
3357       /* If we don't have a decl for cexp create one.  This is the
3358 	 friendliest fallback if the user calls __builtin_cexpi
3359 	 without full target C99 function support.  */
3360       if (fn == NULL_TREE)
3361 	{
3362 	  tree fntype;
3363 	  const char *name = NULL;
3364 
3365 	  if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
3366 	    name = "cexpf";
3367 	  else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
3368 	    name = "cexp";
3369 	  else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
3370 	    name = "cexpl";
3371 
3372 	  fntype = build_function_type_list (ctype, ctype, NULL_TREE);
3373 	  fn = build_fn_decl (name, fntype);
3374 	}
3375 
3376       narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
3377 			  build_real (type, dconst0), arg);
3378 
3379       /* Make sure not to fold the cexp call again.  */
3380       call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
3381       return expand_expr (build_call_nary (ctype, call, 1, narg),
3382 			  target, VOIDmode, EXPAND_NORMAL);
3383     }
3384 
3385   /* Now build the proper return type.  */
3386   return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
3387 			      make_tree (TREE_TYPE (arg), op2),
3388 			      make_tree (TREE_TYPE (arg), op1)),
3389 		      target, VOIDmode, EXPAND_NORMAL);
3390 }
3391 
3392 /* Conveniently construct a function call expression.  FNDECL names the
3393    function to be called, N is the number of arguments, and the "..."
3394    parameters are the argument expressions.  Unlike build_call_exr
3395    this doesn't fold the call, hence it will always return a CALL_EXPR.  */
3396 
3397 static tree
build_call_nofold_loc(location_t loc,tree fndecl,int n,...)3398 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
3399 {
3400   va_list ap;
3401   tree fntype = TREE_TYPE (fndecl);
3402   tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
3403 
3404   va_start (ap, n);
3405   fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
3406   va_end (ap);
3407   SET_EXPR_LOCATION (fn, loc);
3408   return fn;
3409 }
3410 
3411 /* Expand a call to one of the builtin rounding functions gcc defines
3412    as an extension (lfloor and lceil).  As these are gcc extensions we
3413    do not need to worry about setting errno to EDOM.
3414    If expanding via optab fails, lower expression to (int)(floor(x)).
3415    EXP is the expression that is a call to the builtin function;
3416    if convenient, the result should be placed in TARGET.  */
3417 
3418 static rtx
expand_builtin_int_roundingfn(tree exp,rtx target)3419 expand_builtin_int_roundingfn (tree exp, rtx target)
3420 {
3421   convert_optab builtin_optab;
3422   rtx op0, tmp;
3423   rtx_insn *insns;
3424   tree fndecl = get_callee_fndecl (exp);
3425   enum built_in_function fallback_fn;
3426   tree fallback_fndecl;
3427   machine_mode mode;
3428   tree arg;
3429 
3430   if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
3431     return NULL_RTX;
3432 
3433   arg = CALL_EXPR_ARG (exp, 0);
3434 
3435   switch (DECL_FUNCTION_CODE (fndecl))
3436     {
3437     CASE_FLT_FN (BUILT_IN_ICEIL):
3438     CASE_FLT_FN (BUILT_IN_LCEIL):
3439     CASE_FLT_FN (BUILT_IN_LLCEIL):
3440       builtin_optab = lceil_optab;
3441       fallback_fn = BUILT_IN_CEIL;
3442       break;
3443 
3444     CASE_FLT_FN (BUILT_IN_IFLOOR):
3445     CASE_FLT_FN (BUILT_IN_LFLOOR):
3446     CASE_FLT_FN (BUILT_IN_LLFLOOR):
3447       builtin_optab = lfloor_optab;
3448       fallback_fn = BUILT_IN_FLOOR;
3449       break;
3450 
3451     default:
3452       gcc_unreachable ();
3453     }
3454 
3455   /* Make a suitable register to place result in.  */
3456   mode = TYPE_MODE (TREE_TYPE (exp));
3457 
3458   target = gen_reg_rtx (mode);
3459 
3460   /* Wrap the computation of the argument in a SAVE_EXPR, as we may
3461      need to expand the argument again.  This way, we will not perform
3462      side-effects more the once.  */
3463   CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
3464 
3465   op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
3466 
3467   start_sequence ();
3468 
3469   /* Compute into TARGET.  */
3470   if (expand_sfix_optab (target, op0, builtin_optab))
3471     {
3472       /* Output the entire sequence.  */
3473       insns = get_insns ();
3474       end_sequence ();
3475       emit_insn (insns);
3476       return target;
3477     }
3478 
3479   /* If we were unable to expand via the builtin, stop the sequence
3480      (without outputting the insns).  */
3481   end_sequence ();
3482 
3483   /* Fall back to floating point rounding optab.  */
3484   fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
3485 
3486   /* For non-C99 targets we may end up without a fallback fndecl here
3487      if the user called __builtin_lfloor directly.  In this case emit
3488      a call to the floor/ceil variants nevertheless.  This should result
3489      in the best user experience for not full C99 targets.  */
3490   if (fallback_fndecl == NULL_TREE)
3491     {
3492       tree fntype;
3493       const char *name = NULL;
3494 
3495       switch (DECL_FUNCTION_CODE (fndecl))
3496 	{
3497 	case BUILT_IN_ICEIL:
3498 	case BUILT_IN_LCEIL:
3499 	case BUILT_IN_LLCEIL:
3500 	  name = "ceil";
3501 	  break;
3502 	case BUILT_IN_ICEILF:
3503 	case BUILT_IN_LCEILF:
3504 	case BUILT_IN_LLCEILF:
3505 	  name = "ceilf";
3506 	  break;
3507 	case BUILT_IN_ICEILL:
3508 	case BUILT_IN_LCEILL:
3509 	case BUILT_IN_LLCEILL:
3510 	  name = "ceill";
3511 	  break;
3512 	case BUILT_IN_IFLOOR:
3513 	case BUILT_IN_LFLOOR:
3514 	case BUILT_IN_LLFLOOR:
3515 	  name = "floor";
3516 	  break;
3517 	case BUILT_IN_IFLOORF:
3518 	case BUILT_IN_LFLOORF:
3519 	case BUILT_IN_LLFLOORF:
3520 	  name = "floorf";
3521 	  break;
3522 	case BUILT_IN_IFLOORL:
3523 	case BUILT_IN_LFLOORL:
3524 	case BUILT_IN_LLFLOORL:
3525 	  name = "floorl";
3526 	  break;
3527 	default:
3528 	  gcc_unreachable ();
3529 	}
3530 
3531       fntype = build_function_type_list (TREE_TYPE (arg),
3532 					 TREE_TYPE (arg), NULL_TREE);
3533       fallback_fndecl = build_fn_decl (name, fntype);
3534     }
3535 
3536   exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
3537 
3538   tmp = expand_normal (exp);
3539   tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
3540 
3541   /* Truncate the result of floating point optab to integer
3542      via expand_fix ().  */
3543   target = gen_reg_rtx (mode);
3544   expand_fix (target, tmp, 0);
3545 
3546   return target;
3547 }
3548 
3549 /* Expand a call to one of the builtin math functions doing integer
3550    conversion (lrint).
3551    Return 0 if a normal call should be emitted rather than expanding the
3552    function in-line.  EXP is the expression that is a call to the builtin
3553    function; if convenient, the result should be placed in TARGET.  */
3554 
3555 static rtx
expand_builtin_int_roundingfn_2(tree exp,rtx target)3556 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
3557 {
3558   convert_optab builtin_optab;
3559   rtx op0;
3560   rtx_insn *insns;
3561   tree fndecl = get_callee_fndecl (exp);
3562   tree arg;
3563   machine_mode mode;
3564   enum built_in_function fallback_fn = BUILT_IN_NONE;
3565 
3566   if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
3567     return NULL_RTX;
3568 
3569   arg = CALL_EXPR_ARG (exp, 0);
3570 
3571   switch (DECL_FUNCTION_CODE (fndecl))
3572     {
3573     CASE_FLT_FN (BUILT_IN_IRINT):
3574       fallback_fn = BUILT_IN_LRINT;
3575       gcc_fallthrough ();
3576     CASE_FLT_FN (BUILT_IN_LRINT):
3577     CASE_FLT_FN (BUILT_IN_LLRINT):
3578       builtin_optab = lrint_optab;
3579       break;
3580 
3581     CASE_FLT_FN (BUILT_IN_IROUND):
3582       fallback_fn = BUILT_IN_LROUND;
3583       gcc_fallthrough ();
3584     CASE_FLT_FN (BUILT_IN_LROUND):
3585     CASE_FLT_FN (BUILT_IN_LLROUND):
3586       builtin_optab = lround_optab;
3587       break;
3588 
3589     default:
3590       gcc_unreachable ();
3591     }
3592 
3593   /* There's no easy way to detect the case we need to set EDOM.  */
3594   if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
3595     return NULL_RTX;
3596 
3597   /* Make a suitable register to place result in.  */
3598   mode = TYPE_MODE (TREE_TYPE (exp));
3599 
3600   /* There's no easy way to detect the case we need to set EDOM.  */
3601   if (!flag_errno_math)
3602     {
3603       rtx result = gen_reg_rtx (mode);
3604 
3605       /* Wrap the computation of the argument in a SAVE_EXPR, as we may
3606 	 need to expand the argument again.  This way, we will not perform
3607 	 side-effects more the once.  */
3608       CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
3609 
3610       op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
3611 
3612       start_sequence ();
3613 
3614       if (expand_sfix_optab (result, op0, builtin_optab))
3615 	{
3616 	  /* Output the entire sequence.  */
3617 	  insns = get_insns ();
3618 	  end_sequence ();
3619 	  emit_insn (insns);
3620 	  return result;
3621 	}
3622 
3623       /* If we were unable to expand via the builtin, stop the sequence
3624 	 (without outputting the insns) and call to the library function
3625 	 with the stabilized argument list.  */
3626       end_sequence ();
3627     }
3628 
3629   if (fallback_fn != BUILT_IN_NONE)
3630     {
3631       /* Fall back to rounding to long int.  Use implicit_p 0 - for non-C99
3632 	 targets, (int) round (x) should never be transformed into
3633 	 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
3634 	 a call to lround in the hope that the target provides at least some
3635 	 C99 functions.  This should result in the best user experience for
3636 	 not full C99 targets.  */
3637       tree fallback_fndecl = mathfn_built_in_1
3638 	(TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
3639 
3640       exp = build_call_nofold_loc (EXPR_LOCATION (exp),
3641 				   fallback_fndecl, 1, arg);
3642 
3643       target = expand_call (exp, NULL_RTX, target == const0_rtx);
3644       target = maybe_emit_group_store (target, TREE_TYPE (exp));
3645       return convert_to_mode (mode, target, 0);
3646     }
3647 
3648   return expand_call (exp, target, target == const0_rtx);
3649 }
3650 
3651 /* Expand a call to the powi built-in mathematical function.  Return NULL_RTX if
3652    a normal call should be emitted rather than expanding the function
3653    in-line.  EXP is the expression that is a call to the builtin
3654    function; if convenient, the result should be placed in TARGET.  */
3655 
3656 static rtx
expand_builtin_powi(tree exp,rtx target)3657 expand_builtin_powi (tree exp, rtx target)
3658 {
3659   tree arg0, arg1;
3660   rtx op0, op1;
3661   machine_mode mode;
3662   machine_mode mode2;
3663 
3664   if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3665     return NULL_RTX;
3666 
3667   arg0 = CALL_EXPR_ARG (exp, 0);
3668   arg1 = CALL_EXPR_ARG (exp, 1);
3669   mode = TYPE_MODE (TREE_TYPE (exp));
3670 
3671   /* Emit a libcall to libgcc.  */
3672 
3673   /* Mode of the 2nd argument must match that of an int.  */
3674   mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
3675 
3676   if (target == NULL_RTX)
3677     target = gen_reg_rtx (mode);
3678 
3679   op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
3680   if (GET_MODE (op0) != mode)
3681     op0 = convert_to_mode (mode, op0, 0);
3682   op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3683   if (GET_MODE (op1) != mode2)
3684     op1 = convert_to_mode (mode2, op1, 0);
3685 
3686   target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3687 				    target, LCT_CONST, mode,
3688 				    op0, mode, op1, mode2);
3689 
3690   return target;
3691 }
3692 
3693 /* Expand expression EXP which is a call to the strlen builtin.  Return
3694    NULL_RTX if we failed and the caller should emit a normal call, otherwise
3695    try to get the result in TARGET, if convenient.  */
3696 
3697 static rtx
expand_builtin_strlen(tree exp,rtx target,machine_mode target_mode)3698 expand_builtin_strlen (tree exp, rtx target,
3699 		       machine_mode target_mode)
3700 {
3701   if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3702     return NULL_RTX;
3703 
3704   tree src = CALL_EXPR_ARG (exp, 0);
3705   if (!check_read_access (exp, src))
3706     return NULL_RTX;
3707 
3708   /* If the length can be computed at compile-time, return it.  */
3709   if (tree len = c_strlen (src, 0))
3710     return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3711 
3712   /* If the length can be computed at compile-time and is constant
3713      integer, but there are side-effects in src, evaluate
3714      src for side-effects, then return len.
3715      E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3716      can be optimized into: i++; x = 3;  */
3717   tree len = c_strlen (src, 1);
3718   if (len && TREE_CODE (len) == INTEGER_CST)
3719     {
3720       expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3721       return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3722     }
3723 
3724   unsigned int align = get_pointer_alignment (src) / BITS_PER_UNIT;
3725 
3726   /* If SRC is not a pointer type, don't do this operation inline.  */
3727   if (align == 0)
3728     return NULL_RTX;
3729 
3730   /* Bail out if we can't compute strlen in the right mode.  */
3731   machine_mode insn_mode;
3732   enum insn_code icode = CODE_FOR_nothing;
3733   FOR_EACH_MODE_FROM (insn_mode, target_mode)
3734     {
3735       icode = optab_handler (strlen_optab, insn_mode);
3736       if (icode != CODE_FOR_nothing)
3737 	break;
3738     }
3739   if (insn_mode == VOIDmode)
3740     return NULL_RTX;
3741 
3742   /* Make a place to hold the source address.  We will not expand
3743      the actual source until we are sure that the expansion will
3744      not fail -- there are trees that cannot be expanded twice.  */
3745   rtx src_reg = gen_reg_rtx (Pmode);
3746 
3747   /* Mark the beginning of the strlen sequence so we can emit the
3748      source operand later.  */
3749   rtx_insn *before_strlen = get_last_insn ();
3750 
3751   class expand_operand ops[4];
3752   create_output_operand (&ops[0], target, insn_mode);
3753   create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3754   create_integer_operand (&ops[2], 0);
3755   create_integer_operand (&ops[3], align);
3756   if (!maybe_expand_insn (icode, 4, ops))
3757     return NULL_RTX;
3758 
3759   /* Check to see if the argument was declared attribute nonstring
3760      and if so, issue a warning since at this point it's not known
3761      to be nul-terminated.  */
3762   maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
3763 
3764   /* Now that we are assured of success, expand the source.  */
3765   start_sequence ();
3766   rtx pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3767   if (pat != src_reg)
3768     {
3769 #ifdef POINTERS_EXTEND_UNSIGNED
3770       if (GET_MODE (pat) != Pmode)
3771 	pat = convert_to_mode (Pmode, pat,
3772 			       POINTERS_EXTEND_UNSIGNED);
3773 #endif
3774       emit_move_insn (src_reg, pat);
3775     }
3776   pat = get_insns ();
3777   end_sequence ();
3778 
3779   if (before_strlen)
3780     emit_insn_after (pat, before_strlen);
3781   else
3782     emit_insn_before (pat, get_insns ());
3783 
3784   /* Return the value in the proper mode for this function.  */
3785   if (GET_MODE (ops[0].value) == target_mode)
3786     target = ops[0].value;
3787   else if (target != 0)
3788     convert_move (target, ops[0].value, 0);
3789   else
3790     target = convert_to_mode (target_mode, ops[0].value, 0);
3791 
3792   return target;
3793 }
3794 
3795 /* Expand call EXP to the strnlen built-in, returning the result
3796    and setting it in TARGET.  Otherwise return NULL_RTX on failure.  */
3797 
3798 static rtx
expand_builtin_strnlen(tree exp,rtx target,machine_mode target_mode)3799 expand_builtin_strnlen (tree exp, rtx target, machine_mode target_mode)
3800 {
3801   if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3802     return NULL_RTX;
3803 
3804   tree src = CALL_EXPR_ARG (exp, 0);
3805   tree bound = CALL_EXPR_ARG (exp, 1);
3806 
3807   if (!bound)
3808     return NULL_RTX;
3809 
3810   check_read_access (exp, src, bound);
3811 
3812   location_t loc = UNKNOWN_LOCATION;
3813   if (EXPR_HAS_LOCATION (exp))
3814     loc = EXPR_LOCATION (exp);
3815 
3816   /* FIXME: Change c_strlen() to return sizetype instead of ssizetype
3817      so these conversions aren't necessary.  */
3818   c_strlen_data lendata = { };
3819   tree len = c_strlen (src, 0, &lendata, 1);
3820   if (len)
3821     len = fold_convert_loc (loc, TREE_TYPE (bound), len);
3822 
3823   if (TREE_CODE (bound) == INTEGER_CST)
3824     {
3825       if (!len)
3826 	return NULL_RTX;
3827 
3828       len = fold_build2_loc (loc, MIN_EXPR, size_type_node, len, bound);
3829       return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3830     }
3831 
3832   if (TREE_CODE (bound) != SSA_NAME)
3833     return NULL_RTX;
3834 
3835   wide_int min, max;
3836   enum value_range_kind rng = get_range_info (bound, &min, &max);
3837   if (rng != VR_RANGE)
3838     return NULL_RTX;
3839 
3840   if (!len || TREE_CODE (len) != INTEGER_CST)
3841     {
3842       bool exact;
3843       lendata.decl = unterminated_array (src, &len, &exact);
3844       if (!lendata.decl)
3845 	return NULL_RTX;
3846     }
3847 
3848   if (lendata.decl)
3849     return NULL_RTX;
3850 
3851   if (wi::gtu_p (min, wi::to_wide (len)))
3852     return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3853 
3854   len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, bound);
3855   return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3856 }
3857 
3858 /* Callback routine for store_by_pieces.  Read GET_MODE_BITSIZE (MODE)
3859    bytes from bytes at DATA + OFFSET and return it reinterpreted as
3860    a target constant.  */
3861 
3862 static rtx
builtin_memcpy_read_str(void * data,HOST_WIDE_INT offset,scalar_int_mode mode)3863 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3864 			 scalar_int_mode mode)
3865 {
3866   /* The REPresentation pointed to by DATA need not be a nul-terminated
3867      string but the caller guarantees it's large enough for MODE.  */
3868   const char *rep = (const char *) data;
3869 
3870   return c_readstr (rep + offset, mode, /*nul_terminated=*/false);
3871 }
3872 
3873 /* LEN specify length of the block of memcpy/memset operation.
3874    Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3875    In some cases we can make very likely guess on max size, then we
3876    set it into PROBABLE_MAX_SIZE.  */
3877 
3878 static void
determine_block_size(tree len,rtx len_rtx,unsigned HOST_WIDE_INT * min_size,unsigned HOST_WIDE_INT * max_size,unsigned HOST_WIDE_INT * probable_max_size)3879 determine_block_size (tree len, rtx len_rtx,
3880 		      unsigned HOST_WIDE_INT *min_size,
3881 		      unsigned HOST_WIDE_INT *max_size,
3882 		      unsigned HOST_WIDE_INT *probable_max_size)
3883 {
3884   if (CONST_INT_P (len_rtx))
3885     {
3886       *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3887       return;
3888     }
3889   else
3890     {
3891       wide_int min, max;
3892       enum value_range_kind range_type = VR_UNDEFINED;
3893 
3894       /* Determine bounds from the type.  */
3895       if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3896 	*min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3897       else
3898 	*min_size = 0;
3899       if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3900 	*probable_max_size = *max_size
3901 	  = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3902       else
3903 	*probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3904 
3905       if (TREE_CODE (len) == SSA_NAME)
3906 	range_type = get_range_info (len, &min, &max);
3907       if (range_type == VR_RANGE)
3908 	{
3909 	  if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3910 	    *min_size = min.to_uhwi ();
3911 	  if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3912 	    *probable_max_size = *max_size = max.to_uhwi ();
3913 	}
3914       else if (range_type == VR_ANTI_RANGE)
3915 	{
3916 	  /* Code like
3917 
3918 	     int n;
3919 	     if (n < 100)
3920 	       memcpy (a, b, n)
3921 
3922 	     Produce anti range allowing negative values of N.  We still
3923 	     can use the information and make a guess that N is not negative.
3924 	     */
3925 	  if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3926 	    *probable_max_size = min.to_uhwi () - 1;
3927 	}
3928     }
3929   gcc_checking_assert (*max_size <=
3930 		       (unsigned HOST_WIDE_INT)
3931 			  GET_MODE_MASK (GET_MODE (len_rtx)));
3932 }
3933 
3934 /* Issue a warning OPT for a bounded call EXP with a bound in RANGE
3935    accessing an object with SIZE.  */
3936 
3937 static bool
3938 maybe_warn_for_bound (int opt, location_t loc, tree exp, tree func,
3939 		      tree bndrng[2], tree size, const access_data *pad = NULL)
3940 {
3941   if (!bndrng[0] || TREE_NO_WARNING (exp))
3942     return false;
3943 
3944   tree maxobjsize = max_object_size ();
3945 
3946   bool warned = false;
3947 
3948   if (opt == OPT_Wstringop_overread)
3949     {
3950       bool maybe = pad && pad->src.phi ();
3951 
3952       if (tree_int_cst_lt (maxobjsize, bndrng[0]))
3953 	{
3954 	  if (bndrng[0] == bndrng[1])
3955 	    warned = (func
3956 		      ? warning_at (loc, opt,
3957 				    (maybe
3958 				     ? G_("%K%qD specified bound %E may "
3959 					  "exceed maximum object size %E")
3960 				     : G_("%K%qD specified bound %E "
3961 					  "exceeds maximum object size %E")),
3962 				    exp, func, bndrng[0], maxobjsize)
3963 		      : warning_at (loc, opt,
3964 				    (maybe
3965 				     ? G_("%Kspecified bound %E may "
3966 					  "exceed maximum object size %E")
3967 				     : G_("%Kspecified bound %E "
3968 					  "exceeds maximum object size %E")),
3969 				    exp, bndrng[0], maxobjsize));
3970 	  else
3971 	    warned = (func
3972 		      ? warning_at (loc, opt,
3973 				    (maybe
3974 				     ? G_("%K%qD specified bound [%E, %E] may "
3975 					  "exceed maximum object size %E")
3976 				     : G_("%K%qD specified bound [%E, %E] "
3977 					  "exceeds maximum object size %E")),
3978 				    exp, func,
3979 				    bndrng[0], bndrng[1], maxobjsize)
3980 		      : warning_at (loc, opt,
3981 				    (maybe
3982 				     ? G_("%Kspecified bound [%E, %E] may "
3983 					  "exceed maximum object size %E")
3984 				     : G_("%Kspecified bound [%E, %E] "
3985 					  "exceeds maximum object size %E")),
3986 				    exp, bndrng[0], bndrng[1], maxobjsize));
3987 	}
3988       else if (!size || tree_int_cst_le (bndrng[0], size))
3989 	return false;
3990       else if (tree_int_cst_equal (bndrng[0], bndrng[1]))
3991 	warned = (func
3992 		  ? warning_at (loc, opt,
3993 				(maybe
3994 				 ? G_("%K%qD specified bound %E may exceed "
3995 				      "source size %E")
3996 				 : G_("%K%qD specified bound %E exceeds "
3997 				      "source size %E")),
3998 				exp, func, bndrng[0], size)
3999 		  : warning_at (loc, opt,
4000 				(maybe
4001 				 ? G_("%Kspecified bound %E may exceed "
4002 				      "source size %E")
4003 				 : G_("%Kspecified bound %E exceeds "
4004 				      "source size %E")),
4005 				exp, bndrng[0], size));
4006       else
4007 	warned = (func
4008 		  ? warning_at (loc, opt,
4009 				(maybe
4010 				 ? G_("%K%qD specified bound [%E, %E] may "
4011 				      "exceed source size %E")
4012 				 : G_("%K%qD specified bound [%E, %E] exceeds "
4013 				      "source size %E")),
4014 				exp, func, bndrng[0], bndrng[1], size)
4015 		  : warning_at (loc, opt,
4016 				(maybe
4017 				 ? G_("%Kspecified bound [%E, %E] may exceed "
4018 				      "source size %E")
4019 				 : G_("%Kspecified bound [%E, %E] exceeds "
4020 				      "source size %E")),
4021 				exp, bndrng[0], bndrng[1], size));
4022       if (warned)
4023 	{
4024 	  if (pad && pad->src.ref)
4025 	    {
4026 	      if (DECL_P (pad->src.ref))
4027 		inform (DECL_SOURCE_LOCATION (pad->src.ref),
4028 			"source object declared here");
4029 	      else if (EXPR_HAS_LOCATION (pad->src.ref))
4030 		inform (EXPR_LOCATION (pad->src.ref),
4031 			"source object allocated here");
4032 	    }
4033 	  TREE_NO_WARNING (exp) = true;
4034 	}
4035 
4036       return warned;
4037     }
4038 
4039   bool maybe = pad && pad->dst.phi ();
4040   if (tree_int_cst_lt (maxobjsize, bndrng[0]))
4041     {
4042       if (bndrng[0] == bndrng[1])
4043 	warned = (func
4044 		  ? warning_at (loc, opt,
4045 				(maybe
4046 				 ? G_("%K%qD specified size %E may "
4047 				      "exceed maximum object size %E")
4048 				 : G_("%K%qD specified size %E "
4049 				      "exceeds maximum object size %E")),
4050 				exp, func, bndrng[0], maxobjsize)
4051 		  : warning_at (loc, opt,
4052 				(maybe
4053 				 ? G_("%Kspecified size %E may exceed "
4054 				      "maximum object size %E")
4055 				 : G_("%Kspecified size %E exceeds "
4056 				      "maximum object size %E")),
4057 				exp, bndrng[0], maxobjsize));
4058       else
4059 	warned = (func
4060 		  ? warning_at (loc, opt,
4061 				(maybe
4062 				 ? G_("%K%qD specified size between %E and %E "
4063 				      "may exceed maximum object size %E")
4064 				 : G_("%K%qD specified size between %E and %E "
4065 				      "exceeds maximum object size %E")),
4066 				exp, func,
4067 				bndrng[0], bndrng[1], maxobjsize)
4068 		  : warning_at (loc, opt,
4069 				(maybe
4070 				 ? G_("%Kspecified size between %E and %E "
4071 				      "may exceed maximum object size %E")
4072 				 : G_("%Kspecified size between %E and %E "
4073 				      "exceeds maximum object size %E")),
4074 				exp, bndrng[0], bndrng[1], maxobjsize));
4075     }
4076   else if (!size || tree_int_cst_le (bndrng[0], size))
4077     return false;
4078   else if (tree_int_cst_equal (bndrng[0], bndrng[1]))
4079     warned = (func
4080 	      ? warning_at (loc, OPT_Wstringop_overflow_,
4081 			    (maybe
4082 			     ? G_("%K%qD specified bound %E may exceed "
4083 				  "destination size %E")
4084 			     : G_("%K%qD specified bound %E exceeds "
4085 				  "destination size %E")),
4086 			    exp, func, bndrng[0], size)
4087 	      : warning_at (loc, OPT_Wstringop_overflow_,
4088 			    (maybe
4089 			     ? G_("%Kspecified bound %E may exceed "
4090 				  "destination size %E")
4091 			     : G_("%Kspecified bound %E exceeds "
4092 				  "destination size %E")),
4093 			    exp, bndrng[0], size));
4094   else
4095     warned = (func
4096 	      ? warning_at (loc, OPT_Wstringop_overflow_,
4097 			    (maybe
4098 			     ? G_("%K%qD specified bound [%E, %E] may exceed "
4099 				  "destination size %E")
4100 			     : G_("%K%qD specified bound [%E, %E] exceeds "
4101 				  "destination size %E")),
4102 			    exp, func, bndrng[0], bndrng[1], size)
4103 	      : warning_at (loc, OPT_Wstringop_overflow_,
4104 			    (maybe
4105 			     ? G_("%Kspecified bound [%E, %E] exceeds "
4106 				  "destination size %E")
4107 			     : G_("%Kspecified bound [%E, %E] exceeds "
4108 				  "destination size %E")),
4109 			    exp, bndrng[0], bndrng[1], size));
4110 
4111   if (warned)
4112     {
4113       if (pad && pad->dst.ref)
4114 	{
4115 	  if (DECL_P (pad->dst.ref))
4116 	    inform (DECL_SOURCE_LOCATION (pad->dst.ref),
4117 		    "destination object declared here");
4118 	  else if (EXPR_HAS_LOCATION (pad->dst.ref))
4119 	    inform (EXPR_LOCATION (pad->dst.ref),
4120 		    "destination object allocated here");
4121 	}
4122       TREE_NO_WARNING (exp) = true;
4123     }
4124 
4125   return warned;
4126 }
4127 
4128 /* For an expression EXP issue an access warning controlled by option OPT
4129    with access to a region SIZE bytes in size in the RANGE of sizes.
4130    WRITE is true for a write access, READ for a read access, neither for
4131    call that may or may not perform an access but for which the range
4132    is expected to valid.
4133    Returns true when a warning has been issued.  */
4134 
4135 static bool
warn_for_access(location_t loc,tree func,tree exp,int opt,tree range[2],tree size,bool write,bool read,bool maybe)4136 warn_for_access (location_t loc, tree func, tree exp, int opt, tree range[2],
4137 		 tree size, bool write, bool read, bool maybe)
4138 {
4139   bool warned = false;
4140 
4141   if (write && read)
4142     {
4143       if (tree_int_cst_equal (range[0], range[1]))
4144 	warned = (func
4145 		  ? warning_n (loc, opt, tree_to_uhwi (range[0]),
4146 			       (maybe
4147 				? G_("%K%qD may access %E byte in a region "
4148 				     "of size %E")
4149 				: G_("%K%qD accessing %E byte in a region "
4150 				     "of size %E")),
4151 				(maybe
4152 				 ? G_ ("%K%qD may access %E bytes in a region "
4153 				       "of size %E")
4154 				 : G_ ("%K%qD accessing %E bytes in a region "
4155 				       "of size %E")),
4156 			       exp, func, range[0], size)
4157 		  : warning_n (loc, opt, tree_to_uhwi (range[0]),
4158 			       (maybe
4159 				? G_("%Kmay access %E byte in a region "
4160 				     "of size %E")
4161 				: G_("%Kaccessing %E byte in a region "
4162 				     "of size %E")),
4163 			       (maybe
4164 				? G_("%Kmay access %E bytes in a region "
4165 				     "of size %E")
4166 				: G_("%Kaccessing %E bytes in a region "
4167 				     "of size %E")),
4168 			       exp, range[0], size));
4169       else if (tree_int_cst_sign_bit (range[1]))
4170 	{
4171 	  /* Avoid printing the upper bound if it's invalid.  */
4172 	  warned = (func
4173 		    ? warning_at (loc, opt,
4174 				  (maybe
4175 				   ? G_("%K%qD may access %E or more bytes "
4176 					"in a region of size %E")
4177 				   : G_("%K%qD accessing %E or more bytes "
4178 					"in a region of size %E")),
4179 				  exp, func, range[0], size)
4180 		    : warning_at (loc, opt,
4181 				  (maybe
4182 				   ? G_("%Kmay access %E or more bytes "
4183 					"in a region of size %E")
4184 				   : G_("%Kaccessing %E or more bytes "
4185 					"in a region of size %E")),
4186 				  exp, range[0], size));
4187 	}
4188       else
4189 	warned = (func
4190 		  ? warning_at (loc, opt,
4191 				(maybe
4192 				 ? G_("%K%qD may access between %E and %E "
4193 				      "bytes in a region of size %E")
4194 				 : G_("%K%qD accessing between %E and %E "
4195 				      "bytes in a region of size %E")),
4196 				exp, func, range[0], range[1],
4197 				size)
4198 		  : warning_at (loc, opt,
4199 				(maybe
4200 				 ? G_("%Kmay access between %E and %E bytes "
4201 				      "in a region of size %E")
4202 				 : G_("%Kaccessing between %E and %E bytes "
4203 				      "in a region of size %E")),
4204 				exp, range[0], range[1],
4205 				size));
4206       return warned;
4207     }
4208 
4209   if (write)
4210     {
4211       if (tree_int_cst_equal (range[0], range[1]))
4212 	warned = (func
4213 		  ? warning_n (loc, opt, tree_to_uhwi (range[0]),
4214 			       (maybe
4215 				? G_("%K%qD may write %E byte into a region "
4216 				     "of size %E")
4217 				: G_("%K%qD writing %E byte into a region "
4218 				     "of size %E overflows the destination")),
4219 			       (maybe
4220 				? G_("%K%qD may write %E bytes into a region "
4221 				     "of size %E")
4222 				: G_("%K%qD writing %E bytes into a region "
4223 				     "of size %E overflows the destination")),
4224 			       exp, func, range[0], size)
4225 		  : warning_n (loc, opt, tree_to_uhwi (range[0]),
4226 			       (maybe
4227 				? G_("%Kmay write %E byte into a region "
4228 				     "of size %E")
4229 				: G_("%Kwriting %E byte into a region "
4230 				     "of size %E overflows the destination")),
4231 			       (maybe
4232 				? G_("%Kmay write %E bytes into a region "
4233 				     "of size %E")
4234 				: G_("%Kwriting %E bytes into a region "
4235 				     "of size %E overflows the destination")),
4236 			       exp, range[0], size));
4237       else if (tree_int_cst_sign_bit (range[1]))
4238 	{
4239 	  /* Avoid printing the upper bound if it's invalid.  */
4240 	  warned = (func
4241 		    ? warning_at (loc, opt,
4242 				  (maybe
4243 				   ? G_("%K%qD may write %E or more bytes "
4244 					"into a region of size %E")
4245 				   : G_("%K%qD writing %E or more bytes "
4246 					"into a region of size %E overflows "
4247 					"the destination")),
4248 				  exp, func, range[0], size)
4249 		    : warning_at (loc, opt,
4250 				  (maybe
4251 				   ? G_("%Kmay write %E or more bytes into "
4252 					"a region of size %E")
4253 				   : G_("%Kwriting %E or more bytes into "
4254 					"a region of size %E overflows "
4255 					"the destination")),
4256 				  exp, range[0], size));
4257 	}
4258       else
4259 	warned = (func
4260 		  ? warning_at (loc, opt,
4261 				(maybe
4262 				 ? G_("%K%qD may write between %E and %E bytes "
4263 				      "into a region of size %E")
4264 				 : G_("%K%qD writing between %E and %E bytes "
4265 				      "into a region of size %E overflows "
4266 				      "the destination")),
4267 				exp, func, range[0], range[1],
4268 				size)
4269 		  : warning_at (loc, opt,
4270 				(maybe
4271 				 ? G_("%Kmay write between %E and %E bytes "
4272 				      "into a region of size %E")
4273 				 : G_("%Kwriting between %E and %E bytes "
4274 				      "into a region of size %E overflows "
4275 				      "the destination")),
4276 				exp, range[0], range[1],
4277 				size));
4278       return warned;
4279     }
4280 
4281   if (read)
4282     {
4283       if (tree_int_cst_equal (range[0], range[1]))
4284 	warned = (func
4285 		  ? warning_n (loc, OPT_Wstringop_overread,
4286 			       tree_to_uhwi (range[0]),
4287 			       (maybe
4288 				? G_("%K%qD may read %E byte from a region "
4289 				     "of size %E")
4290 				: G_("%K%qD reading %E byte from a region "
4291 				     "of size %E")),
4292 			       (maybe
4293 				? G_("%K%qD may read %E bytes from a region "
4294 				     "of size %E")
4295 				: G_("%K%qD reading %E bytes from a region "
4296 				     "of size %E")),
4297 			       exp, func, range[0], size)
4298 		  : warning_n (loc, OPT_Wstringop_overread,
4299 			       tree_to_uhwi (range[0]),
4300 			       (maybe
4301 				? G_("%Kmay read %E byte from a region "
4302 				     "of size %E")
4303 				: G_("%Kreading %E byte from a region "
4304 				     "of size %E")),
4305 			       (maybe
4306 				? G_("%Kmay read %E bytes from a region "
4307 				     "of size %E")
4308 				: G_("%Kreading %E bytes from a region "
4309 				     "of size %E")),
4310 			       exp, range[0], size));
4311       else if (tree_int_cst_sign_bit (range[1]))
4312 	{
4313 	  /* Avoid printing the upper bound if it's invalid.  */
4314 	  warned = (func
4315 		    ? warning_at (loc, OPT_Wstringop_overread,
4316 				  (maybe
4317 				   ? G_("%K%qD may read %E or more bytes "
4318 					"from a region of size %E")
4319 				   : G_("%K%qD reading %E or more bytes "
4320 					"from a region of size %E")),
4321 				  exp, func, range[0], size)
4322 		    : warning_at (loc, OPT_Wstringop_overread,
4323 				  (maybe
4324 				   ? G_("%Kmay read %E or more bytes "
4325 					"from a region of size %E")
4326 				   : G_("%Kreading %E or more bytes "
4327 					"from a region of size %E")),
4328 				  exp, range[0], size));
4329 	}
4330       else
4331 	warned = (func
4332 		  ? warning_at (loc, OPT_Wstringop_overread,
4333 				(maybe
4334 				 ? G_("%K%qD may read between %E and %E bytes "
4335 				      "from a region of size %E")
4336 				 : G_("%K%qD reading between %E and %E bytes "
4337 				      "from a region of size %E")),
4338 				exp, func, range[0], range[1], size)
4339 		  : warning_at (loc, opt,
4340 				(maybe
4341 				 ? G_("%Kmay read between %E and %E bytes "
4342 				      "from a region of size %E")
4343 				 : G_("%Kreading between %E and %E bytes "
4344 				      "from a region of size %E")),
4345 				exp, range[0], range[1], size));
4346 
4347       if (warned)
4348 	TREE_NO_WARNING (exp) = true;
4349 
4350       return warned;
4351     }
4352 
4353   if (tree_int_cst_equal (range[0], range[1])
4354       || tree_int_cst_sign_bit (range[1]))
4355     warned = (func
4356 	      ? warning_n (loc, OPT_Wstringop_overread,
4357 			   tree_to_uhwi (range[0]),
4358 			   "%K%qD expecting %E byte in a region of size %E",
4359 			   "%K%qD expecting %E bytes in a region of size %E",
4360 			   exp, func, range[0], size)
4361 	      : warning_n (loc, OPT_Wstringop_overread,
4362 			   tree_to_uhwi (range[0]),
4363 			   "%Kexpecting %E byte in a region of size %E",
4364 			   "%Kexpecting %E bytes in a region of size %E",
4365 			   exp, range[0], size));
4366   else if (tree_int_cst_sign_bit (range[1]))
4367     {
4368       /* Avoid printing the upper bound if it's invalid.  */
4369       warned = (func
4370 		? warning_at (loc, OPT_Wstringop_overread,
4371 			      "%K%qD expecting %E or more bytes in a region "
4372 			      "of size %E",
4373 			      exp, func, range[0], size)
4374 		: warning_at (loc, OPT_Wstringop_overread,
4375 			      "%Kexpecting %E or more bytes in a region "
4376 			      "of size %E",
4377 			      exp, range[0], size));
4378     }
4379   else
4380     warned = (func
4381 	      ? warning_at (loc, OPT_Wstringop_overread,
4382 			    "%K%qD expecting between %E and %E bytes in "
4383 			    "a region of size %E",
4384 			    exp, func, range[0], range[1], size)
4385 	      : warning_at (loc, OPT_Wstringop_overread,
4386 			    "%Kexpecting between %E and %E bytes in "
4387 			    "a region of size %E",
4388 			    exp, range[0], range[1], size));
4389 
4390   if (warned)
4391     TREE_NO_WARNING (exp) = true;
4392 
4393   return warned;
4394 }
4395 
4396 /* Issue one inform message describing each target of an access REF.
4397    WRITE is set for a write access and clear for a read access.  */
4398 
4399 void
inform_access(access_mode mode)4400 access_ref::inform_access (access_mode mode) const
4401 {
4402   const access_ref &aref = *this;
4403   if (!aref.ref)
4404     return;
4405 
4406   if (aref.phi ())
4407     {
4408       /* Set MAXREF to refer to the largest object and fill ALL_REFS
4409 	 with data for all objects referenced by the PHI arguments.  */
4410       access_ref maxref;
4411       auto_vec<access_ref> all_refs;
4412       if (!get_ref (&all_refs, &maxref))
4413 	return;
4414 
4415       /* Except for MAXREF, the rest of the arguments' offsets need not
4416 	 reflect one added to the PHI itself.  Determine the latter from
4417 	 MAXREF on which the result is based.  */
4418       const offset_int orng[] =
4419 	{
4420 	  offrng[0] - maxref.offrng[0],
4421 	  wi::smax (offrng[1] - maxref.offrng[1], offrng[0]),
4422 	};
4423 
4424       /* Add the final PHI's offset to that of each of the arguments
4425 	 and recurse to issue an inform message for it.  */
4426       for (unsigned i = 0; i != all_refs.length (); ++i)
4427 	{
4428 	  /* Skip any PHIs; those could lead to infinite recursion.  */
4429 	  if (all_refs[i].phi ())
4430 	    continue;
4431 
4432 	  all_refs[i].add_offset (orng[0], orng[1]);
4433 	  all_refs[i].inform_access (mode);
4434 	}
4435       return;
4436     }
4437 
4438   /* Convert offset range and avoid including a zero range since it
4439      isn't necessarily meaningful.  */
4440   HOST_WIDE_INT diff_min = tree_to_shwi (TYPE_MIN_VALUE (ptrdiff_type_node));
4441   HOST_WIDE_INT diff_max = tree_to_shwi (TYPE_MAX_VALUE (ptrdiff_type_node));
4442   HOST_WIDE_INT minoff;
4443   HOST_WIDE_INT maxoff = diff_max;
4444   if (wi::fits_shwi_p (aref.offrng[0]))
4445     minoff = aref.offrng[0].to_shwi ();
4446   else
4447     minoff = aref.offrng[0] < 0 ? diff_min : diff_max;
4448 
4449   if (wi::fits_shwi_p (aref.offrng[1]))
4450     maxoff = aref.offrng[1].to_shwi ();
4451 
4452   if (maxoff <= diff_min || maxoff >= diff_max)
4453     /* Avoid mentioning an upper bound that's equal to or in excess
4454        of the maximum of ptrdiff_t.  */
4455     maxoff = minoff;
4456 
4457   /* Convert size range and always include it since all sizes are
4458      meaningful. */
4459   unsigned long long minsize = 0, maxsize = 0;
4460   if (wi::fits_shwi_p (aref.sizrng[0])
4461       && wi::fits_shwi_p (aref.sizrng[1]))
4462     {
4463       minsize = aref.sizrng[0].to_shwi ();
4464       maxsize = aref.sizrng[1].to_shwi ();
4465     }
4466 
4467   /* SIZRNG doesn't necessarily have the same range as the allocation
4468      size determined by gimple_call_alloc_size ().  */
4469   char sizestr[80];
4470   if (minsize == maxsize)
4471     sprintf (sizestr, "%llu", minsize);
4472   else
4473     sprintf (sizestr, "[%llu, %llu]", minsize, maxsize);
4474 
4475   char offstr[80];
4476   if (minoff == 0
4477       && (maxoff == 0 || aref.sizrng[1] <= maxoff))
4478     offstr[0] = '\0';
4479   else if (minoff == maxoff)
4480     sprintf (offstr, "%lli", (long long) minoff);
4481   else
4482     sprintf (offstr, "[%lli, %lli]", (long long) minoff, (long long) maxoff);
4483 
4484   location_t loc = UNKNOWN_LOCATION;
4485 
4486   tree ref = this->ref;
4487   tree allocfn = NULL_TREE;
4488   if (TREE_CODE (ref) == SSA_NAME)
4489     {
4490       gimple *stmt = SSA_NAME_DEF_STMT (ref);
4491       if (is_gimple_call (stmt))
4492 	{
4493 	  loc = gimple_location (stmt);
4494 	  if (gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN))
4495 	    {
4496 	      /* Strip the SSA_NAME suffix from the variable name and
4497 		 recreate an identifier with the VLA's original name.  */
4498 	      ref = gimple_call_lhs (stmt);
4499 	      if (SSA_NAME_IDENTIFIER (ref))
4500 		{
4501 		  ref = SSA_NAME_IDENTIFIER (ref);
4502 		  const char *id = IDENTIFIER_POINTER (ref);
4503 		  size_t len = strcspn (id, ".$");
4504 		  if (!len)
4505 		    len = strlen (id);
4506 		  ref = get_identifier_with_length (id, len);
4507 		}
4508 	    }
4509 	  else
4510 	    {
4511 	      /* Except for VLAs, retrieve the allocation function.  */
4512 	      allocfn = gimple_call_fndecl (stmt);
4513 	      if (!allocfn)
4514 		allocfn = gimple_call_fn (stmt);
4515 	      if (TREE_CODE (allocfn) == SSA_NAME)
4516 		{
4517 		  /* For an ALLOC_CALL via a function pointer make a small
4518 		     effort to determine the destination of the pointer.  */
4519 		  gimple *def = SSA_NAME_DEF_STMT (allocfn);
4520 		  if (gimple_assign_single_p (def))
4521 		    {
4522 		      tree rhs = gimple_assign_rhs1 (def);
4523 		      if (DECL_P (rhs))
4524 			allocfn = rhs;
4525 		      else if (TREE_CODE (rhs) == COMPONENT_REF)
4526 			allocfn = TREE_OPERAND (rhs, 1);
4527 		    }
4528 		}
4529 	    }
4530 	}
4531       else if (gimple_nop_p (stmt))
4532 	/* Handle DECL_PARM below.  */
4533 	ref = SSA_NAME_VAR (ref);
4534     }
4535 
4536   if (DECL_P (ref))
4537     loc = DECL_SOURCE_LOCATION (ref);
4538   else if (EXPR_P (ref) && EXPR_HAS_LOCATION (ref))
4539     loc = EXPR_LOCATION (ref);
4540   else if (TREE_CODE (ref) != IDENTIFIER_NODE
4541 	   && TREE_CODE (ref) != SSA_NAME)
4542     return;
4543 
4544   if (mode == access_read_write || mode == access_write_only)
4545     {
4546       if (allocfn == NULL_TREE)
4547 	{
4548 	  if (*offstr)
4549 	    inform (loc, "at offset %s into destination object %qE of size %s",
4550 		    offstr, ref, sizestr);
4551 	  else
4552 	    inform (loc, "destination object %qE of size %s", ref, sizestr);
4553 	  return;
4554 	}
4555 
4556       if (*offstr)
4557 	inform (loc,
4558 		"at offset %s into destination object of size %s "
4559 		"allocated by %qE", offstr, sizestr, allocfn);
4560       else
4561 	inform (loc, "destination object of size %s allocated by %qE",
4562 		sizestr, allocfn);
4563       return;
4564     }
4565 
4566   if (allocfn == NULL_TREE)
4567     {
4568       if (*offstr)
4569 	inform (loc, "at offset %s into source object %qE of size %s",
4570 		offstr, ref, sizestr);
4571       else
4572 	inform (loc, "source object %qE of size %s", ref, sizestr);
4573 
4574       return;
4575     }
4576 
4577   if (*offstr)
4578     inform (loc,
4579 	    "at offset %s into source object of size %s allocated by %qE",
4580 	    offstr, sizestr, allocfn);
4581   else
4582     inform (loc, "source object of size %s allocated by %qE",
4583 	    sizestr, allocfn);
4584 }
4585 
4586 /* Helper to set RANGE to the range of BOUND if it's nonnull, bounded
4587    by BNDRNG if nonnull and valid.  */
4588 
4589 static void
get_size_range(tree bound,tree range[2],const offset_int bndrng[2])4590 get_size_range (tree bound, tree range[2], const offset_int bndrng[2])
4591 {
4592   if (bound)
4593     get_size_range (bound, range);
4594 
4595   if (!bndrng || (bndrng[0] == 0 && bndrng[1] == HOST_WIDE_INT_M1U))
4596     return;
4597 
4598   if (range[0] && TREE_CODE (range[0]) == INTEGER_CST)
4599     {
4600       offset_int r[] =
4601 	{ wi::to_offset (range[0]), wi::to_offset (range[1]) };
4602       if (r[0] < bndrng[0])
4603 	range[0] = wide_int_to_tree (sizetype, bndrng[0]);
4604       if (bndrng[1] < r[1])
4605 	range[1] = wide_int_to_tree (sizetype, bndrng[1]);
4606     }
4607   else
4608     {
4609       range[0] = wide_int_to_tree (sizetype, bndrng[0]);
4610       range[1] = wide_int_to_tree (sizetype, bndrng[1]);
4611     }
4612 }
4613 
4614 /* Try to verify that the sizes and lengths of the arguments to a string
4615    manipulation function given by EXP are within valid bounds and that
4616    the operation does not lead to buffer overflow or read past the end.
4617    Arguments other than EXP may be null.  When non-null, the arguments
4618    have the following meaning:
4619    DST is the destination of a copy call or NULL otherwise.
4620    SRC is the source of a copy call or NULL otherwise.
4621    DSTWRITE is the number of bytes written into the destination obtained
4622    from the user-supplied size argument to the function (such as in
4623    memcpy(DST, SRCs, DSTWRITE) or strncpy(DST, DRC, DSTWRITE).
4624    MAXREAD is the user-supplied bound on the length of the source sequence
4625    (such as in strncat(d, s, N).  It specifies the upper limit on the number
4626    of bytes to write.  If NULL, it's taken to be the same as DSTWRITE.
4627    SRCSTR is the source string (such as in strcpy(DST, SRC)) when the
4628    expression EXP is a string function call (as opposed to a memory call
4629    like memcpy).  As an exception, SRCSTR can also be an integer denoting
4630    the precomputed size of the source string or object (for functions like
4631    memcpy).
4632    DSTSIZE is the size of the destination object.
4633 
4634    When DSTWRITE is null LEN is checked to verify that it doesn't exceed
4635    SIZE_MAX.
4636 
4637    WRITE is true for write accesses, READ is true for reads.  Both are
4638    false for simple size checks in calls to functions that neither read
4639    from nor write to the region.
4640 
4641    When nonnull, PAD points to a more detailed description of the access.
4642 
4643    If the call is successfully verified as safe return true, otherwise
4644    return false.  */
4645 
4646 bool
check_access(tree exp,tree dstwrite,tree maxread,tree srcstr,tree dstsize,access_mode mode,const access_data * pad)4647 check_access (tree exp, tree dstwrite,
4648 	      tree maxread, tree srcstr, tree dstsize,
4649 	      access_mode mode, const access_data *pad /* = NULL */)
4650 {
4651   /* The size of the largest object is half the address space, or
4652      PTRDIFF_MAX.  (This is way too permissive.)  */
4653   tree maxobjsize = max_object_size ();
4654 
4655   /* Either an approximate/minimum the length of the source string for
4656      string functions or the size of the source object for raw memory
4657      functions.  */
4658   tree slen = NULL_TREE;
4659 
4660   /* The range of the access in bytes; first set to the write access
4661      for functions that write and then read for those that also (or
4662      just) read.  */
4663   tree range[2] = { NULL_TREE, NULL_TREE };
4664 
4665   /* Set to true when the exact number of bytes written by a string
4666      function like strcpy is not known and the only thing that is
4667      known is that it must be at least one (for the terminating nul).  */
4668   bool at_least_one = false;
4669   if (srcstr)
4670     {
4671       /* SRCSTR is normally a pointer to string but as a special case
4672 	 it can be an integer denoting the length of a string.  */
4673       if (POINTER_TYPE_P (TREE_TYPE (srcstr)))
4674 	{
4675 	  if (!check_nul_terminated_array (exp, srcstr, maxread))
4676 	    return false;
4677 	  /* Try to determine the range of lengths the source string
4678 	     refers to.  If it can be determined and is less than
4679 	     the upper bound given by MAXREAD add one to it for
4680 	     the terminating nul.  Otherwise, set it to one for
4681 	     the same reason, or to MAXREAD as appropriate.  */
4682 	  c_strlen_data lendata = { };
4683 	  get_range_strlen (srcstr, &lendata, /* eltsize = */ 1);
4684 	  range[0] = lendata.minlen;
4685 	  range[1] = lendata.maxbound ? lendata.maxbound : lendata.maxlen;
4686 	  if (range[0]
4687 	      && TREE_CODE (range[0]) == INTEGER_CST
4688 	      && TREE_CODE (range[1]) == INTEGER_CST
4689 	      && (!maxread || TREE_CODE (maxread) == INTEGER_CST))
4690 	    {
4691 	      if (maxread && tree_int_cst_le (maxread, range[0]))
4692 		range[0] = range[1] = maxread;
4693 	      else
4694 		range[0] = fold_build2 (PLUS_EXPR, size_type_node,
4695 					range[0], size_one_node);
4696 
4697 	      if (maxread && tree_int_cst_le (maxread, range[1]))
4698 		range[1] = maxread;
4699 	      else if (!integer_all_onesp (range[1]))
4700 		range[1] = fold_build2 (PLUS_EXPR, size_type_node,
4701 					range[1], size_one_node);
4702 
4703 	      slen = range[0];
4704 	    }
4705 	  else
4706 	    {
4707 	      at_least_one = true;
4708 	      slen = size_one_node;
4709 	    }
4710 	}
4711       else
4712 	slen = srcstr;
4713     }
4714 
4715   if (!dstwrite && !maxread)
4716     {
4717       /* When the only available piece of data is the object size
4718 	 there is nothing to do.  */
4719       if (!slen)
4720 	return true;
4721 
4722       /* Otherwise, when the length of the source sequence is known
4723 	 (as with strlen), set DSTWRITE to it.  */
4724       if (!range[0])
4725 	dstwrite = slen;
4726     }
4727 
4728   if (!dstsize)
4729     dstsize = maxobjsize;
4730 
4731   /* Set RANGE to that of DSTWRITE if non-null, bounded by PAD->DST.BNDRNG
4732      if valid.  */
4733   get_size_range (dstwrite, range, pad ? pad->dst.bndrng : NULL);
4734 
4735   tree func = get_callee_fndecl (exp);
4736   /* Read vs write access by built-ins can be determined from the const
4737      qualifiers on the pointer argument.  In the absence of attribute
4738      access, non-const qualified pointer arguments to user-defined
4739      functions are assumed to both read and write the objects.  */
4740   const bool builtin = func ? fndecl_built_in_p (func) : false;
4741 
4742   /* First check the number of bytes to be written against the maximum
4743      object size.  */
4744   if (range[0]
4745       && TREE_CODE (range[0]) == INTEGER_CST
4746       && tree_int_cst_lt (maxobjsize, range[0]))
4747     {
4748       location_t loc = tree_inlined_location (exp);
4749       maybe_warn_for_bound (OPT_Wstringop_overflow_, loc, exp, func, range,
4750 			    NULL_TREE, pad);
4751       return false;
4752     }
4753 
4754   /* The number of bytes to write is "exact" if DSTWRITE is non-null,
4755      constant, and in range of unsigned HOST_WIDE_INT.  */
4756   bool exactwrite = dstwrite && tree_fits_uhwi_p (dstwrite);
4757 
4758   /* Next check the number of bytes to be written against the destination
4759      object size.  */
4760   if (range[0] || !exactwrite || integer_all_onesp (dstwrite))
4761     {
4762       if (range[0]
4763 	  && TREE_CODE (range[0]) == INTEGER_CST
4764 	  && ((tree_fits_uhwi_p (dstsize)
4765 	       && tree_int_cst_lt (dstsize, range[0]))
4766 	      || (dstwrite
4767 		  && tree_fits_uhwi_p (dstwrite)
4768 		  && tree_int_cst_lt (dstwrite, range[0]))))
4769 	{
4770 	  if (TREE_NO_WARNING (exp)
4771 	      || (pad && pad->dst.ref && TREE_NO_WARNING (pad->dst.ref)))
4772 	    return false;
4773 
4774 	  location_t loc = tree_inlined_location (exp);
4775 	  bool warned = false;
4776 	  if (dstwrite == slen && at_least_one)
4777 	    {
4778 	      /* This is a call to strcpy with a destination of 0 size
4779 		 and a source of unknown length.  The call will write
4780 		 at least one byte past the end of the destination.  */
4781 	      warned = (func
4782 			? warning_at (loc, OPT_Wstringop_overflow_,
4783 				      "%K%qD writing %E or more bytes into "
4784 				      "a region of size %E overflows "
4785 				      "the destination",
4786 				      exp, func, range[0], dstsize)
4787 			: warning_at (loc, OPT_Wstringop_overflow_,
4788 				      "%Kwriting %E or more bytes into "
4789 				      "a region of size %E overflows "
4790 				      "the destination",
4791 				      exp, range[0], dstsize));
4792 	    }
4793 	  else
4794 	    {
4795 	      const bool read
4796 		= mode == access_read_only || mode == access_read_write;
4797 	      const bool write
4798 		= mode == access_write_only || mode == access_read_write;
4799 	      const bool maybe = pad && pad->dst.parmarray;
4800 	      warned = warn_for_access (loc, func, exp,
4801 					OPT_Wstringop_overflow_,
4802 					range, dstsize,
4803 					write, read && !builtin, maybe);
4804 	    }
4805 
4806 	  if (warned)
4807 	    {
4808 	      TREE_NO_WARNING (exp) = true;
4809 	      if (pad)
4810 		pad->dst.inform_access (pad->mode);
4811 	    }
4812 
4813 	  /* Return error when an overflow has been detected.  */
4814 	  return false;
4815 	}
4816     }
4817 
4818   /* Check the maximum length of the source sequence against the size
4819      of the destination object if known, or against the maximum size
4820      of an object.  */
4821   if (maxread)
4822     {
4823       /* Set RANGE to that of MAXREAD, bounded by PAD->SRC.BNDRNG if
4824 	 PAD is nonnull and BNDRNG is valid.  */
4825       get_size_range (maxread, range, pad ? pad->src.bndrng : NULL);
4826 
4827       location_t loc = tree_inlined_location (exp);
4828       tree size = dstsize;
4829       if (pad && pad->mode == access_read_only)
4830 	size = wide_int_to_tree (sizetype, pad->src.sizrng[1]);
4831 
4832       if (range[0] && maxread && tree_fits_uhwi_p (size))
4833 	{
4834 	  if (tree_int_cst_lt (maxobjsize, range[0]))
4835 	    {
4836 	      maybe_warn_for_bound (OPT_Wstringop_overread, loc, exp, func,
4837 				    range, size, pad);
4838 	      return false;
4839 	    }
4840 
4841 	  if (size != maxobjsize && tree_int_cst_lt (size, range[0]))
4842 	    {
4843 	      int opt = (dstwrite || mode != access_read_only
4844 			 ? OPT_Wstringop_overflow_
4845 			 : OPT_Wstringop_overread);
4846 	      maybe_warn_for_bound (opt, loc, exp, func, range, size, pad);
4847 	      return false;
4848 	    }
4849 	}
4850 
4851       maybe_warn_nonstring_arg (func, exp);
4852     }
4853 
4854   /* Check for reading past the end of SRC.  */
4855   bool overread = (slen
4856 		   && slen == srcstr
4857 		   && dstwrite
4858 		   && range[0]
4859 		   && TREE_CODE (slen) == INTEGER_CST
4860 		   && tree_int_cst_lt (slen, range[0]));
4861   /* If none is determined try to get a better answer based on the details
4862      in PAD.  */
4863   if (!overread
4864       && pad
4865       && pad->src.sizrng[1] >= 0
4866       && pad->src.offrng[0] >= 0
4867       && (pad->src.offrng[1] < 0
4868 	  || pad->src.offrng[0] <= pad->src.offrng[1]))
4869     {
4870       /* Set RANGE to that of MAXREAD, bounded by PAD->SRC.BNDRNG if
4871 	 PAD is nonnull and BNDRNG is valid.  */
4872       get_size_range (maxread, range, pad ? pad->src.bndrng : NULL);
4873       /* Set OVERREAD for reads starting just past the end of an object.  */
4874       overread = pad->src.sizrng[1] - pad->src.offrng[0] < pad->src.bndrng[0];
4875       range[0] = wide_int_to_tree (sizetype, pad->src.bndrng[0]);
4876       slen = size_zero_node;
4877     }
4878 
4879   if (overread)
4880     {
4881       if (TREE_NO_WARNING (exp)
4882 	  || (srcstr && TREE_NO_WARNING (srcstr))
4883 	  || (pad && pad->src.ref && TREE_NO_WARNING (pad->src.ref)))
4884 	return false;
4885 
4886       location_t loc = tree_inlined_location (exp);
4887       const bool read
4888 	= mode == access_read_only || mode == access_read_write;
4889       const bool maybe = pad && pad->dst.parmarray;
4890       if (warn_for_access (loc, func, exp, OPT_Wstringop_overread, range,
4891 			   slen, false, read, maybe))
4892 	{
4893 	  TREE_NO_WARNING (exp) = true;
4894 	  if (pad)
4895 	    pad->src.inform_access (access_read_only);
4896 	}
4897       return false;
4898     }
4899 
4900   return true;
4901 }
4902 
4903 /* A convenience wrapper for check_access above to check access
4904    by a read-only function like puts.  */
4905 
4906 static bool
check_read_access(tree exp,tree src,tree bound,int ost)4907 check_read_access (tree exp, tree src, tree bound /* = NULL_TREE */,
4908 		   int ost /* = 1 */)
4909 {
4910   if (!warn_stringop_overread)
4911     return true;
4912 
4913   if (bound && !useless_type_conversion_p (size_type_node, TREE_TYPE (bound)))
4914     bound = fold_convert (size_type_node, bound);
4915   access_data data (exp, access_read_only, NULL_TREE, false, bound, true);
4916   compute_objsize (src, ost, &data.src);
4917   return check_access (exp, /*dstwrite=*/ NULL_TREE, /*maxread=*/ bound,
4918 		       /*srcstr=*/ src, /*dstsize=*/ NULL_TREE, data.mode,
4919 		       &data);
4920 }
4921 
4922 /* If STMT is a call to an allocation function, returns the constant
4923    maximum size of the object allocated by the call represented as
4924    sizetype.  If nonnull, sets RNG1[] to the range of the size.
4925    When nonnull, uses RVALS for range information, otherwise calls
4926    get_range_info to get it.
4927    Returns null when STMT is not a call to a valid allocation function.  */
4928 
4929 tree
gimple_call_alloc_size(gimple * stmt,wide_int rng1[2],range_query *)4930 gimple_call_alloc_size (gimple *stmt, wide_int rng1[2] /* = NULL */,
4931 			range_query * /* = NULL */)
4932 {
4933   if (!stmt || !is_gimple_call (stmt))
4934     return NULL_TREE;
4935 
4936   tree allocfntype;
4937   if (tree fndecl = gimple_call_fndecl (stmt))
4938     allocfntype = TREE_TYPE (fndecl);
4939   else
4940     allocfntype = gimple_call_fntype (stmt);
4941 
4942   if (!allocfntype)
4943     return NULL_TREE;
4944 
4945   unsigned argidx1 = UINT_MAX, argidx2 = UINT_MAX;
4946   tree at = lookup_attribute ("alloc_size", TYPE_ATTRIBUTES (allocfntype));
4947   if (!at)
4948     {
4949       if (!gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN))
4950 	return NULL_TREE;
4951 
4952       argidx1 = 0;
4953     }
4954 
4955   unsigned nargs = gimple_call_num_args (stmt);
4956 
4957   if (argidx1 == UINT_MAX)
4958     {
4959       tree atval = TREE_VALUE (at);
4960       if (!atval)
4961 	return NULL_TREE;
4962 
4963       argidx1 = TREE_INT_CST_LOW (TREE_VALUE (atval)) - 1;
4964       if (nargs <= argidx1)
4965 	return NULL_TREE;
4966 
4967       atval = TREE_CHAIN (atval);
4968       if (atval)
4969 	{
4970 	  argidx2 = TREE_INT_CST_LOW (TREE_VALUE (atval)) - 1;
4971 	  if (nargs <= argidx2)
4972 	    return NULL_TREE;
4973 	}
4974     }
4975 
4976   tree size = gimple_call_arg (stmt, argidx1);
4977 
4978   wide_int rng1_buf[2];
4979   /* If RNG1 is not set, use the buffer.  */
4980   if (!rng1)
4981     rng1 = rng1_buf;
4982 
4983   /* Use maximum precision to avoid overflow below.  */
4984   const int prec = ADDR_MAX_PRECISION;
4985 
4986   {
4987     tree r[2];
4988     /* Determine the largest valid range size, including zero.  */
4989     if (!get_size_range (size, r, SR_ALLOW_ZERO | SR_USE_LARGEST))
4990       return NULL_TREE;
4991     rng1[0] = wi::to_wide (r[0], prec);
4992     rng1[1] = wi::to_wide (r[1], prec);
4993   }
4994 
4995   if (argidx2 > nargs && TREE_CODE (size) == INTEGER_CST)
4996     return fold_convert (sizetype, size);
4997 
4998   /* To handle ranges do the math in wide_int and return the product
4999      of the upper bounds as a constant.  Ignore anti-ranges.  */
5000   tree n = argidx2 < nargs ? gimple_call_arg (stmt, argidx2) : integer_one_node;
5001   wide_int rng2[2];
5002   {
5003     tree r[2];
5004       /* As above, use the full non-negative range on failure.  */
5005     if (!get_size_range (n, r, SR_ALLOW_ZERO | SR_USE_LARGEST))
5006       return NULL_TREE;
5007     rng2[0] = wi::to_wide (r[0], prec);
5008     rng2[1] = wi::to_wide (r[1], prec);
5009   }
5010 
5011   /* Compute products of both bounds for the caller but return the lesser
5012      of SIZE_MAX and the product of the upper bounds as a constant.  */
5013   rng1[0] = rng1[0] * rng2[0];
5014   rng1[1] = rng1[1] * rng2[1];
5015 
5016   const tree size_max = TYPE_MAX_VALUE (sizetype);
5017   if (wi::gtu_p (rng1[1], wi::to_wide (size_max, prec)))
5018     {
5019       rng1[1] = wi::to_wide (size_max, prec);
5020       return size_max;
5021     }
5022 
5023   return wide_int_to_tree (sizetype, rng1[1]);
5024 }
5025 
5026 /* For an access to an object referenced to by the function parameter PTR
5027    of pointer type, and set RNG[] to the range of sizes of the object
5028    obtainedfrom the attribute access specification for the current function.
5029    Set STATIC_ARRAY if the array parameter has been declared [static].
5030    Return the function parameter on success and null otherwise.  */
5031 
5032 tree
gimple_parm_array_size(tree ptr,wide_int rng[2],bool * static_array)5033 gimple_parm_array_size (tree ptr, wide_int rng[2],
5034 			bool *static_array /* = NULL */)
5035 {
5036   /* For a function argument try to determine the byte size of the array
5037      from the current function declaratation (e.g., attribute access or
5038      related).  */
5039   tree var = SSA_NAME_VAR (ptr);
5040   if (TREE_CODE (var) != PARM_DECL)
5041     return NULL_TREE;
5042 
5043   const unsigned prec = TYPE_PRECISION (sizetype);
5044 
5045   rdwr_map rdwr_idx;
5046   attr_access *access = get_parm_access (rdwr_idx, var);
5047   if (!access)
5048     return NULL_TREE;
5049 
5050   if (access->sizarg != UINT_MAX)
5051     {
5052       /* TODO: Try to extract the range from the argument based on
5053 	 those of subsequent assertions or based on known calls to
5054 	 the current function.  */
5055       return NULL_TREE;
5056     }
5057 
5058   if (!access->minsize)
5059     return NULL_TREE;
5060 
5061   /* Only consider ordinary array bound at level 2 (or above if it's
5062      ever added).  */
5063   if (warn_array_parameter < 2 && !access->static_p)
5064     return NULL_TREE;
5065 
5066   if (static_array)
5067     *static_array = access->static_p;
5068 
5069   rng[0] = wi::zero (prec);
5070   rng[1] = wi::uhwi (access->minsize, prec);
5071   /* Multiply the array bound encoded in the attribute by the size
5072      of what the pointer argument to which it decays points to.  */
5073   tree eltype = TREE_TYPE (TREE_TYPE (ptr));
5074   tree size = TYPE_SIZE_UNIT (eltype);
5075   if (!size || TREE_CODE (size) != INTEGER_CST)
5076     return NULL_TREE;
5077 
5078   rng[1] *= wi::to_wide (size, prec);
5079   return var;
5080 }
5081 
5082 /* Wrapper around the wide_int overload of get_range that accepts
5083    offset_int instead.  For middle end expressions returns the same
5084    result.  For a subset of nonconstamt expressions emitted by the front
5085    end determines a more precise range than would be possible otherwise.  */
5086 
5087 static bool
get_offset_range(tree x,gimple * stmt,offset_int r[2],range_query * rvals)5088 get_offset_range (tree x, gimple *stmt, offset_int r[2], range_query *rvals)
5089 {
5090   offset_int add = 0;
5091   if (TREE_CODE (x) == PLUS_EXPR)
5092     {
5093       /* Handle constant offsets in pointer addition expressions seen
5094 	 n the front end IL.  */
5095       tree op = TREE_OPERAND (x, 1);
5096       if (TREE_CODE (op) == INTEGER_CST)
5097 	{
5098 	  op = fold_convert (signed_type_for (TREE_TYPE (op)), op);
5099 	  add = wi::to_offset (op);
5100 	  x = TREE_OPERAND (x, 0);
5101 	}
5102     }
5103 
5104   if (TREE_CODE (x) == NOP_EXPR)
5105     /* Also handle conversions to sizetype seen in the front end IL.  */
5106     x = TREE_OPERAND (x, 0);
5107 
5108   tree type = TREE_TYPE (x);
5109   if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
5110     return false;
5111 
5112    if (TREE_CODE (x) != INTEGER_CST
5113       && TREE_CODE (x) != SSA_NAME)
5114     {
5115       if (TYPE_UNSIGNED (type)
5116 	  && TYPE_PRECISION (type) == TYPE_PRECISION (sizetype))
5117 	type = signed_type_for (type);
5118 
5119       r[0] = wi::to_offset (TYPE_MIN_VALUE (type)) + add;
5120       r[1] = wi::to_offset (TYPE_MAX_VALUE (type)) + add;
5121       return x;
5122     }
5123 
5124   wide_int wr[2];
5125   if (!get_range (x, stmt, wr, rvals))
5126     return false;
5127 
5128   signop sgn = SIGNED;
5129   /* Only convert signed integers or unsigned sizetype to a signed
5130      offset and avoid converting large positive values in narrower
5131      types to negative offsets.  */
5132   if (TYPE_UNSIGNED (type)
5133       && wr[0].get_precision () < TYPE_PRECISION (sizetype))
5134     sgn = UNSIGNED;
5135 
5136   r[0] = offset_int::from (wr[0], sgn);
5137   r[1] = offset_int::from (wr[1], sgn);
5138   return true;
5139 }
5140 
5141 /* Return the argument that the call STMT to a built-in function returns
5142    or null if it doesn't.  On success, set OFFRNG[] to the range of offsets
5143    from the argument reflected in the value returned by the built-in if it
5144    can be determined, otherwise to 0 and HWI_M1U respectively.  */
5145 
5146 static tree
gimple_call_return_array(gimple * stmt,offset_int offrng[2],range_query * rvals)5147 gimple_call_return_array (gimple *stmt, offset_int offrng[2],
5148 			  range_query *rvals)
5149 {
5150   {
5151     /* Check for attribute fn spec to see if the function returns one
5152        of its arguments.  */
5153     attr_fnspec fnspec = gimple_call_fnspec (as_a <gcall *>(stmt));
5154     unsigned int argno;
5155     if (fnspec.returns_arg (&argno))
5156       {
5157 	offrng[0] = offrng[1] = 0;
5158 	return gimple_call_arg (stmt, argno);
5159       }
5160   }
5161 
5162   if (gimple_call_num_args (stmt) < 1)
5163     return NULL_TREE;
5164 
5165   tree fn = gimple_call_fndecl (stmt);
5166   if (!gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
5167     {
5168       /* See if this is a call to placement new.  */
5169       if (!fn
5170 	  || !DECL_IS_OPERATOR_NEW_P (fn)
5171 	  || DECL_IS_REPLACEABLE_OPERATOR_NEW_P (fn))
5172 	return NULL_TREE;
5173 
5174       /* Check the mangling, keeping in mind that operator new takes
5175 	 a size_t which could be unsigned int or unsigned long.  */
5176       tree fname = DECL_ASSEMBLER_NAME (fn);
5177       if (!id_equal (fname, "_ZnwjPv")       // ordinary form
5178 	  && !id_equal (fname, "_ZnwmPv")    // ordinary form
5179 	  && !id_equal (fname, "_ZnajPv")    // array form
5180 	  && !id_equal (fname, "_ZnamPv"))   // array form
5181 	return NULL_TREE;
5182 
5183       if (gimple_call_num_args (stmt) != 2)
5184 	return NULL_TREE;
5185 
5186       offrng[0] = offrng[1] = 0;
5187       return gimple_call_arg (stmt, 1);
5188     }
5189 
5190   switch (DECL_FUNCTION_CODE (fn))
5191     {
5192     case BUILT_IN_MEMCPY:
5193     case BUILT_IN_MEMCPY_CHK:
5194     case BUILT_IN_MEMMOVE:
5195     case BUILT_IN_MEMMOVE_CHK:
5196     case BUILT_IN_MEMSET:
5197     case BUILT_IN_STPCPY:
5198     case BUILT_IN_STPCPY_CHK:
5199     case BUILT_IN_STPNCPY:
5200     case BUILT_IN_STPNCPY_CHK:
5201     case BUILT_IN_STRCAT:
5202     case BUILT_IN_STRCAT_CHK:
5203     case BUILT_IN_STRCPY:
5204     case BUILT_IN_STRCPY_CHK:
5205     case BUILT_IN_STRNCAT:
5206     case BUILT_IN_STRNCAT_CHK:
5207     case BUILT_IN_STRNCPY:
5208     case BUILT_IN_STRNCPY_CHK:
5209       offrng[0] = offrng[1] = 0;
5210       return gimple_call_arg (stmt, 0);
5211 
5212     case BUILT_IN_MEMPCPY:
5213     case BUILT_IN_MEMPCPY_CHK:
5214       {
5215 	tree off = gimple_call_arg (stmt, 2);
5216 	if (!get_offset_range (off, stmt, offrng, rvals))
5217 	  {
5218 	    offrng[0] = 0;
5219 	    offrng[1] = HOST_WIDE_INT_M1U;
5220 	  }
5221 	return gimple_call_arg (stmt, 0);
5222       }
5223 
5224     case BUILT_IN_MEMCHR:
5225       {
5226 	tree off = gimple_call_arg (stmt, 2);
5227 	if (get_offset_range (off, stmt, offrng, rvals))
5228 	  offrng[0] = 0;
5229 	else
5230 	  {
5231 	    offrng[0] = 0;
5232 	    offrng[1] = HOST_WIDE_INT_M1U;
5233 	  }
5234 	return gimple_call_arg (stmt, 0);
5235       }
5236 
5237     case BUILT_IN_STRCHR:
5238     case BUILT_IN_STRRCHR:
5239     case BUILT_IN_STRSTR:
5240       {
5241 	offrng[0] = 0;
5242 	offrng[1] = HOST_WIDE_INT_M1U;
5243       }
5244       return gimple_call_arg (stmt, 0);
5245 
5246     default:
5247       break;
5248     }
5249 
5250   return NULL_TREE;
5251 }
5252 
5253 /* A helper of compute_objsize_r() to determine the size from an assignment
5254    statement STMT with the RHS of either MIN_EXPR or MAX_EXPR.  */
5255 
5256 static bool
handle_min_max_size(gimple * stmt,int ostype,access_ref * pref,ssa_name_limit_t & snlim,pointer_query * qry)5257 handle_min_max_size (gimple *stmt, int ostype, access_ref *pref,
5258 		     ssa_name_limit_t &snlim, pointer_query *qry)
5259 {
5260   tree_code code = gimple_assign_rhs_code (stmt);
5261 
5262   tree ptr = gimple_assign_rhs1 (stmt);
5263 
5264   /* In a valid MAX_/MIN_EXPR both operands must refer to the same array.
5265      Determine the size/offset of each and use the one with more or less
5266      space remaining, respectively.  If either fails, use the information
5267      determined from the other instead, adjusted up or down as appropriate
5268      for the expression.  */
5269   access_ref aref[2] = { *pref, *pref };
5270   if (!compute_objsize_r (ptr, ostype, &aref[0], snlim, qry))
5271     {
5272       aref[0].base0 = false;
5273       aref[0].offrng[0] = aref[0].offrng[1] = 0;
5274       aref[0].add_max_offset ();
5275       aref[0].set_max_size_range ();
5276     }
5277 
5278   ptr = gimple_assign_rhs2 (stmt);
5279   if (!compute_objsize_r (ptr, ostype, &aref[1], snlim, qry))
5280     {
5281       aref[1].base0 = false;
5282       aref[1].offrng[0] = aref[1].offrng[1] = 0;
5283       aref[1].add_max_offset ();
5284       aref[1].set_max_size_range ();
5285     }
5286 
5287   if (!aref[0].ref && !aref[1].ref)
5288     /* Fail if the identity of neither argument could be determined.  */
5289     return false;
5290 
5291   bool i0 = false;
5292   if (aref[0].ref && aref[0].base0)
5293     {
5294       if (aref[1].ref && aref[1].base0)
5295 	{
5296 	  /* If the object referenced by both arguments has been determined
5297 	     set *PREF to the one with more or less space remainng, whichever
5298 	     is appopriate for CODE.
5299 	     TODO: Indicate when the objects are distinct so it can be
5300 	     diagnosed.  */
5301 	  i0 = code == MAX_EXPR;
5302 	  const bool i1 = !i0;
5303 
5304 	  if (aref[i0].size_remaining () < aref[i1].size_remaining ())
5305 	    *pref = aref[i1];
5306 	  else
5307 	    *pref = aref[i0];
5308 	  return true;
5309 	}
5310 
5311       /* If only the object referenced by one of the arguments could be
5312 	 determined, use it and...  */
5313       *pref = aref[0];
5314       i0 = true;
5315     }
5316   else
5317     *pref = aref[1];
5318 
5319   const bool i1 = !i0;
5320   /* ...see if the offset obtained from the other pointer can be used
5321      to tighten up the bound on the offset obtained from the first.  */
5322   if ((code == MAX_EXPR && aref[i1].offrng[1] < aref[i0].offrng[0])
5323       || (code == MIN_EXPR && aref[i0].offrng[0] < aref[i1].offrng[1]))
5324     {
5325       pref->offrng[0] = aref[i0].offrng[0];
5326       pref->offrng[1] = aref[i0].offrng[1];
5327     }
5328   return true;
5329 }
5330 
5331 /* A helper of compute_objsize_r() to determine the size from ARRAY_REF
5332    AREF.  ADDR is true if PTR is the operand of ADDR_EXPR.  Return true
5333    on success and false on failure.  */
5334 
5335 static bool
handle_array_ref(tree aref,bool addr,int ostype,access_ref * pref,ssa_name_limit_t & snlim,pointer_query * qry)5336 handle_array_ref (tree aref, bool addr, int ostype, access_ref *pref,
5337 		  ssa_name_limit_t &snlim, pointer_query *qry)
5338 {
5339   gcc_assert (TREE_CODE (aref) == ARRAY_REF);
5340 
5341   ++pref->deref;
5342 
5343   tree arefop = TREE_OPERAND (aref, 0);
5344   tree reftype = TREE_TYPE (arefop);
5345   if (!addr && TREE_CODE (TREE_TYPE (reftype)) == POINTER_TYPE)
5346     /* Avoid arrays of pointers.  FIXME: Hande pointers to arrays
5347        of known bound.  */
5348     return false;
5349 
5350   if (!compute_objsize_r (arefop, ostype, pref, snlim, qry))
5351     return false;
5352 
5353   offset_int orng[2];
5354   tree off = pref->eval (TREE_OPERAND (aref, 1));
5355   range_query *const rvals = qry ? qry->rvals : NULL;
5356   if (!get_offset_range (off, NULL, orng, rvals))
5357     {
5358       /* Set ORNG to the maximum offset representable in ptrdiff_t.  */
5359       orng[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
5360       orng[0] = -orng[1] - 1;
5361     }
5362 
5363   /* Convert the array index range determined above to a byte
5364      offset.  */
5365   tree lowbnd = array_ref_low_bound (aref);
5366   if (!integer_zerop (lowbnd) && tree_fits_uhwi_p (lowbnd))
5367     {
5368       /* Adjust the index by the low bound of the array domain
5369 	 (normally zero but 1 in Fortran).  */
5370       unsigned HOST_WIDE_INT lb = tree_to_uhwi (lowbnd);
5371       orng[0] -= lb;
5372       orng[1] -= lb;
5373     }
5374 
5375   tree eltype = TREE_TYPE (aref);
5376   tree tpsize = TYPE_SIZE_UNIT (eltype);
5377   if (!tpsize || TREE_CODE (tpsize) != INTEGER_CST)
5378     {
5379       pref->add_max_offset ();
5380       return true;
5381     }
5382 
5383   offset_int sz = wi::to_offset (tpsize);
5384   orng[0] *= sz;
5385   orng[1] *= sz;
5386 
5387   if (ostype && TREE_CODE (eltype) == ARRAY_TYPE)
5388     {
5389       /* Except for the permissive raw memory functions which use
5390 	 the size of the whole object determined above, use the size
5391 	 of the referenced array.  Because the overall offset is from
5392 	 the beginning of the complete array object add this overall
5393 	 offset to the size of array.  */
5394       offset_int sizrng[2] =
5395 	{
5396 	 pref->offrng[0] + orng[0] + sz,
5397 	 pref->offrng[1] + orng[1] + sz
5398 	};
5399       if (sizrng[1] < sizrng[0])
5400 	std::swap (sizrng[0], sizrng[1]);
5401       if (sizrng[0] >= 0 && sizrng[0] <= pref->sizrng[0])
5402 	pref->sizrng[0] = sizrng[0];
5403       if (sizrng[1] >= 0 && sizrng[1] <= pref->sizrng[1])
5404 	pref->sizrng[1] = sizrng[1];
5405     }
5406 
5407   pref->add_offset (orng[0], orng[1]);
5408   return true;
5409 }
5410 
5411 /* A helper of compute_objsize_r() to determine the size from MEM_REF
5412    MREF.  Return true on success and false on failure.  */
5413 
5414 static bool
handle_mem_ref(tree mref,int ostype,access_ref * pref,ssa_name_limit_t & snlim,pointer_query * qry)5415 handle_mem_ref (tree mref, int ostype, access_ref *pref,
5416 		ssa_name_limit_t &snlim, pointer_query *qry)
5417 {
5418   gcc_assert (TREE_CODE (mref) == MEM_REF);
5419 
5420   ++pref->deref;
5421 
5422   if (VECTOR_TYPE_P (TREE_TYPE (mref)))
5423     {
5424       /* Hack: Give up for MEM_REFs of vector types; those may be
5425 	 synthesized from multiple assignments to consecutive data
5426 	 members (see PR 93200 and 96963).
5427 	 FIXME: Vectorized assignments should only be present after
5428 	 vectorization so this hack is only necessary after it has
5429 	 run and could be avoided in calls from prior passes (e.g.,
5430 	 tree-ssa-strlen.c).
5431 	 FIXME: Deal with this more generally, e.g., by marking up
5432 	 such MEM_REFs at the time they're created.  */
5433       return false;
5434     }
5435 
5436   tree mrefop = TREE_OPERAND (mref, 0);
5437   if (!compute_objsize_r (mrefop, ostype, pref, snlim, qry))
5438     return false;
5439 
5440   offset_int orng[2];
5441   tree off = pref->eval (TREE_OPERAND (mref, 1));
5442   range_query *const rvals = qry ? qry->rvals : NULL;
5443   if (!get_offset_range (off, NULL, orng, rvals))
5444     {
5445       /* Set ORNG to the maximum offset representable in ptrdiff_t.  */
5446       orng[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
5447       orng[0] = -orng[1] - 1;
5448     }
5449 
5450   pref->add_offset (orng[0], orng[1]);
5451   return true;
5452 }
5453 
5454 /* Helper to compute the size of the object referenced by the PTR
5455    expression which must have pointer type, using Object Size type
5456    OSTYPE (only the least significant 2 bits are used).
5457    On success, sets PREF->REF to the DECL of the referenced object
5458    if it's unique, otherwise to null, PREF->OFFRNG to the range of
5459    offsets into it, and PREF->SIZRNG to the range of sizes of
5460    the object(s).
5461    SNLIM is used to avoid visiting the same PHI operand multiple
5462    times, and, when nonnull, RVALS to determine range information.
5463    Returns true on success, false when a meaningful size (or range)
5464    cannot be determined.
5465 
5466    The function is intended for diagnostics and should not be used
5467    to influence code generation or optimization.  */
5468 
5469 static bool
compute_objsize_r(tree ptr,int ostype,access_ref * pref,ssa_name_limit_t & snlim,pointer_query * qry)5470 compute_objsize_r (tree ptr, int ostype, access_ref *pref,
5471 		   ssa_name_limit_t &snlim, pointer_query *qry)
5472 {
5473   STRIP_NOPS (ptr);
5474 
5475   const bool addr = TREE_CODE (ptr) == ADDR_EXPR;
5476   if (addr)
5477     {
5478       --pref->deref;
5479       ptr = TREE_OPERAND (ptr, 0);
5480     }
5481 
5482   if (DECL_P (ptr))
5483     {
5484       pref->ref = ptr;
5485 
5486       if (!addr && POINTER_TYPE_P (TREE_TYPE (ptr)))
5487 	{
5488 	  /* Set the maximum size if the reference is to the pointer
5489 	     itself (as opposed to what it points to), and clear
5490 	     BASE0 since the offset isn't necessarily zero-based.  */
5491 	  pref->set_max_size_range ();
5492 	  pref->base0 = false;
5493 	  return true;
5494 	}
5495 
5496       if (tree size = decl_init_size (ptr, false))
5497 	if (TREE_CODE (size) == INTEGER_CST)
5498 	  {
5499 	    pref->sizrng[0] = pref->sizrng[1] = wi::to_offset (size);
5500 	    return true;
5501 	  }
5502 
5503       pref->set_max_size_range ();
5504       return true;
5505     }
5506 
5507   const tree_code code = TREE_CODE (ptr);
5508   range_query *const rvals = qry ? qry->rvals : NULL;
5509 
5510   if (code == BIT_FIELD_REF)
5511     {
5512       tree ref = TREE_OPERAND (ptr, 0);
5513       if (!compute_objsize_r (ref, ostype, pref, snlim, qry))
5514 	return false;
5515 
5516       offset_int off = wi::to_offset (pref->eval (TREE_OPERAND (ptr, 2)));
5517       pref->add_offset (off / BITS_PER_UNIT);
5518       return true;
5519     }
5520 
5521   if (code == COMPONENT_REF)
5522     {
5523       tree ref = TREE_OPERAND (ptr, 0);
5524       if (TREE_CODE (TREE_TYPE (ref)) == UNION_TYPE)
5525 	/* In accesses through union types consider the entire unions
5526 	   rather than just their members.  */
5527 	ostype = 0;
5528       tree field = TREE_OPERAND (ptr, 1);
5529 
5530       if (ostype == 0)
5531 	{
5532 	  /* In OSTYPE zero (for raw memory functions like memcpy), use
5533 	     the maximum size instead if the identity of the enclosing
5534 	     object cannot be determined.  */
5535 	  if (!compute_objsize_r (ref, ostype, pref, snlim, qry))
5536 	    return false;
5537 
5538 	  /* Otherwise, use the size of the enclosing object and add
5539 	     the offset of the member to the offset computed so far.  */
5540 	  tree offset = byte_position (field);
5541 	  if (TREE_CODE (offset) == INTEGER_CST)
5542 	    pref->add_offset (wi::to_offset (offset));
5543 	  else
5544 	    pref->add_max_offset ();
5545 
5546 	  if (!pref->ref)
5547 	    /* REF may have been already set to an SSA_NAME earlier
5548 	       to provide better context for diagnostics.  In that case,
5549 	       leave it unchanged.  */
5550 	    pref->ref = ref;
5551 	  return true;
5552 	}
5553 
5554       pref->ref = field;
5555 
5556       if (!addr && POINTER_TYPE_P (TREE_TYPE (field)))
5557 	{
5558 	  /* Set maximum size if the reference is to the pointer member
5559 	     itself (as opposed to what it points to).  */
5560 	  pref->set_max_size_range ();
5561 	  return true;
5562 	}
5563 
5564       /* SAM is set for array members that might need special treatment.  */
5565       special_array_member sam;
5566       tree size = component_ref_size (ptr, &sam);
5567       if (sam == special_array_member::int_0)
5568 	pref->sizrng[0] = pref->sizrng[1] = 0;
5569       else if (!pref->trail1special && sam == special_array_member::trail_1)
5570 	pref->sizrng[0] = pref->sizrng[1] = 1;
5571       else if (size && TREE_CODE (size) == INTEGER_CST)
5572 	pref->sizrng[0] = pref->sizrng[1] = wi::to_offset (size);
5573       else
5574 	{
5575 	  /* When the size of the member is unknown it's either a flexible
5576 	     array member or a trailing special array member (either zero
5577 	     length or one-element).  Set the size to the maximum minus
5578 	     the constant size of the type.  */
5579 	  pref->sizrng[0] = 0;
5580 	  pref->sizrng[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
5581 	  if (tree recsize = TYPE_SIZE_UNIT (TREE_TYPE (ref)))
5582 	    if (TREE_CODE (recsize) == INTEGER_CST)
5583 	      pref->sizrng[1] -= wi::to_offset (recsize);
5584 	}
5585       return true;
5586     }
5587 
5588   if (code == ARRAY_REF)
5589     return handle_array_ref (ptr, addr, ostype, pref, snlim, qry);
5590 
5591   if (code == MEM_REF)
5592     return handle_mem_ref (ptr, ostype, pref, snlim, qry);
5593 
5594   if (code == TARGET_MEM_REF)
5595     {
5596       tree ref = TREE_OPERAND (ptr, 0);
5597       if (!compute_objsize_r (ref, ostype, pref, snlim, qry))
5598 	return false;
5599 
5600       /* TODO: Handle remaining operands.  Until then, add maximum offset.  */
5601       pref->ref = ptr;
5602       pref->add_max_offset ();
5603       return true;
5604     }
5605 
5606   if (code == INTEGER_CST)
5607     {
5608       /* Pointer constants other than null are most likely the result
5609 	 of erroneous null pointer addition/subtraction.  Set size to
5610 	 zero.  For null pointers, set size to the maximum for now
5611 	 since those may be the result of jump threading.  */
5612       if (integer_zerop (ptr))
5613 	pref->set_max_size_range ();
5614       else
5615 	pref->sizrng[0] = pref->sizrng[1] = 0;
5616       pref->ref = ptr;
5617 
5618       return true;
5619     }
5620 
5621   if (code == STRING_CST)
5622     {
5623       pref->sizrng[0] = pref->sizrng[1] = TREE_STRING_LENGTH (ptr);
5624       pref->ref = ptr;
5625       return true;
5626     }
5627 
5628   if (code == POINTER_PLUS_EXPR)
5629     {
5630       tree ref = TREE_OPERAND (ptr, 0);
5631       if (!compute_objsize_r (ref, ostype, pref, snlim, qry))
5632 	return false;
5633 
5634       /* Clear DEREF since the offset is being applied to the target
5635 	 of the dereference.  */
5636       pref->deref = 0;
5637 
5638       offset_int orng[2];
5639       tree off = pref->eval (TREE_OPERAND (ptr, 1));
5640       if (get_offset_range (off, NULL, orng, rvals))
5641 	pref->add_offset (orng[0], orng[1]);
5642       else
5643 	pref->add_max_offset ();
5644       return true;
5645     }
5646 
5647   if (code == VIEW_CONVERT_EXPR)
5648     {
5649       ptr = TREE_OPERAND (ptr, 0);
5650       return compute_objsize_r (ptr, ostype, pref, snlim, qry);
5651     }
5652 
5653   if (code == SSA_NAME)
5654     {
5655       if (!snlim.next ())
5656 	return false;
5657 
5658       /* Only process an SSA_NAME if the recursion limit has not yet
5659 	 been reached.  */
5660       if (qry)
5661 	{
5662 	  if (++qry->depth)
5663 	    qry->max_depth = qry->depth;
5664 	  if (const access_ref *cache_ref = qry->get_ref (ptr))
5665 	    {
5666 	      /* If the pointer is in the cache set *PREF to what it refers
5667 		 to and return success.  */
5668 	      *pref = *cache_ref;
5669 	      return true;
5670 	    }
5671 	}
5672 
5673       gimple *stmt = SSA_NAME_DEF_STMT (ptr);
5674       if (is_gimple_call (stmt))
5675 	{
5676 	  /* If STMT is a call to an allocation function get the size
5677 	     from its argument(s).  If successful, also set *PREF->REF
5678 	     to PTR for the caller to include in diagnostics.  */
5679 	  wide_int wr[2];
5680 	  if (gimple_call_alloc_size (stmt, wr, rvals))
5681 	    {
5682 	      pref->ref = ptr;
5683 	      pref->sizrng[0] = offset_int::from (wr[0], UNSIGNED);
5684 	      pref->sizrng[1] = offset_int::from (wr[1], UNSIGNED);
5685 	      /* Constrain both bounds to a valid size.  */
5686 	      offset_int maxsize = wi::to_offset (max_object_size ());
5687 	      if (pref->sizrng[0] > maxsize)
5688 		pref->sizrng[0] = maxsize;
5689 	      if (pref->sizrng[1] > maxsize)
5690 		pref->sizrng[1] = maxsize;
5691 	    }
5692 	  else
5693 	    {
5694 	      /* For functions known to return one of their pointer arguments
5695 		 try to determine what the returned pointer points to, and on
5696 		 success add OFFRNG which was set to the offset added by
5697 		 the function (e.g., memchr) to the overall offset.  */
5698 	      offset_int offrng[2];
5699 	      if (tree ret = gimple_call_return_array (stmt, offrng, rvals))
5700 		{
5701 		  if (!compute_objsize_r (ret, ostype, pref, snlim, qry))
5702 		    return false;
5703 
5704 		  /* Cap OFFRNG[1] to at most the remaining size of
5705 		     the object.  */
5706 		  offset_int remrng[2];
5707 		  remrng[1] = pref->size_remaining (remrng);
5708 		  if (remrng[1] < offrng[1])
5709 		    offrng[1] = remrng[1];
5710 		  pref->add_offset (offrng[0], offrng[1]);
5711 		}
5712 	      else
5713 		{
5714 		  /* For other calls that might return arbitrary pointers
5715 		     including into the middle of objects set the size
5716 		     range to maximum, clear PREF->BASE0, and also set
5717 		     PREF->REF to include in diagnostics.  */
5718 		  pref->set_max_size_range ();
5719 		  pref->base0 = false;
5720 		  pref->ref = ptr;
5721 		}
5722 	    }
5723 	  qry->put_ref (ptr, *pref);
5724 	  return true;
5725 	}
5726 
5727       if (gimple_nop_p (stmt))
5728 	{
5729 	  /* For a function argument try to determine the byte size
5730 	     of the array from the current function declaratation
5731 	     (e.g., attribute access or related).  */
5732 	  wide_int wr[2];
5733 	  bool static_array = false;
5734 	  if (tree ref = gimple_parm_array_size (ptr, wr, &static_array))
5735 	    {
5736 	      pref->parmarray = !static_array;
5737 	      pref->sizrng[0] = offset_int::from (wr[0], UNSIGNED);
5738 	      pref->sizrng[1] = offset_int::from (wr[1], UNSIGNED);
5739 	      pref->ref = ref;
5740 	      qry->put_ref (ptr, *pref);
5741 	      return true;
5742 	    }
5743 
5744 	  pref->set_max_size_range ();
5745 	  pref->base0 = false;
5746 	  pref->ref = ptr;
5747 	  qry->put_ref (ptr, *pref);
5748 	  return true;
5749 	}
5750 
5751       if (gimple_code (stmt) == GIMPLE_PHI)
5752 	{
5753 	  pref->ref = ptr;
5754 	  access_ref phi_ref = *pref;
5755 	  if (!pref->get_ref (NULL, &phi_ref, ostype, &snlim, qry))
5756 	    return false;
5757 	  *pref = phi_ref;
5758 	  pref->ref = ptr;
5759 	  qry->put_ref (ptr, *pref);
5760 	  return true;
5761 	}
5762 
5763       if (!is_gimple_assign (stmt))
5764 	{
5765 	  /* Clear BASE0 since the assigned pointer might point into
5766 	     the middle of the object, set the maximum size range and,
5767 	     if the SSA_NAME refers to a function argumnent, set
5768 	     PREF->REF to it.  */
5769 	  pref->base0 = false;
5770 	  pref->set_max_size_range ();
5771 	  pref->ref = ptr;
5772 	  return true;
5773 	}
5774 
5775       tree_code code = gimple_assign_rhs_code (stmt);
5776 
5777       if (code == MAX_EXPR || code == MIN_EXPR)
5778 	{
5779 	  if (!handle_min_max_size (stmt, ostype, pref, snlim, qry))
5780 	    return false;
5781 	  qry->put_ref (ptr, *pref);
5782 	  return true;
5783 	}
5784 
5785       tree rhs = gimple_assign_rhs1 (stmt);
5786 
5787       if (code == POINTER_PLUS_EXPR
5788 	  && TREE_CODE (TREE_TYPE (rhs)) == POINTER_TYPE)
5789 	{
5790 	  /* Compute the size of the object first. */
5791 	  if (!compute_objsize_r (rhs, ostype, pref, snlim, qry))
5792 	    return false;
5793 
5794 	  offset_int orng[2];
5795 	  tree off = gimple_assign_rhs2 (stmt);
5796 	  if (get_offset_range (off, stmt, orng, rvals))
5797 	    pref->add_offset (orng[0], orng[1]);
5798 	  else
5799 	    pref->add_max_offset ();
5800 	  qry->put_ref (ptr, *pref);
5801 	  return true;
5802 	}
5803 
5804       if (code == ADDR_EXPR
5805 	  || code == SSA_NAME)
5806 	return compute_objsize_r (rhs, ostype, pref, snlim, qry);
5807 
5808       /* (This could also be an assignment from a nonlocal pointer.)  Save
5809 	 PTR to mention in diagnostics but otherwise treat it as a pointer
5810 	 to an unknown object.  */
5811       pref->ref = rhs;
5812       pref->base0 = false;
5813       pref->set_max_size_range ();
5814       return true;
5815     }
5816 
5817   /* Assume all other expressions point into an unknown object
5818      of the maximum valid size.  */
5819   pref->ref = ptr;
5820   pref->base0 = false;
5821   pref->set_max_size_range ();
5822   if (TREE_CODE (ptr) == SSA_NAME)
5823     qry->put_ref (ptr, *pref);
5824   return true;
5825 }
5826 
5827 /* A "public" wrapper around the above.  Clients should use this overload
5828    instead.  */
5829 
5830 tree
compute_objsize(tree ptr,int ostype,access_ref * pref,range_query * rvals)5831 compute_objsize (tree ptr, int ostype, access_ref *pref,
5832 		 range_query *rvals /* = NULL */)
5833 {
5834   pointer_query qry;
5835   qry.rvals = rvals;
5836   ssa_name_limit_t snlim;
5837   if (!compute_objsize_r (ptr, ostype, pref, snlim, &qry))
5838     return NULL_TREE;
5839 
5840   offset_int maxsize = pref->size_remaining ();
5841   if (pref->base0 && pref->offrng[0] < 0 && pref->offrng[1] >= 0)
5842     pref->offrng[0] = 0;
5843   return wide_int_to_tree (sizetype, maxsize);
5844 }
5845 
5846 /* Transitional wrapper.  The function should be removed once callers
5847    transition to the pointer_query API.  */
5848 
5849 tree
compute_objsize(tree ptr,int ostype,access_ref * pref,pointer_query * ptr_qry)5850 compute_objsize (tree ptr, int ostype, access_ref *pref, pointer_query *ptr_qry)
5851 {
5852   pointer_query qry;
5853   if (ptr_qry)
5854     ptr_qry->depth = 0;
5855   else
5856     ptr_qry = &qry;
5857 
5858   ssa_name_limit_t snlim;
5859   if (!compute_objsize_r (ptr, ostype, pref, snlim, ptr_qry))
5860     return NULL_TREE;
5861 
5862   offset_int maxsize = pref->size_remaining ();
5863   if (pref->base0 && pref->offrng[0] < 0 && pref->offrng[1] >= 0)
5864     pref->offrng[0] = 0;
5865   return wide_int_to_tree (sizetype, maxsize);
5866 }
5867 
5868 /* Legacy wrapper around the above.  The function should be removed
5869    once callers transition to one of the two above.  */
5870 
5871 tree
compute_objsize(tree ptr,int ostype,tree * pdecl,tree * poff,range_query * rvals)5872 compute_objsize (tree ptr, int ostype, tree *pdecl /* = NULL */,
5873 		 tree *poff /* = NULL */, range_query *rvals /* = NULL */)
5874 {
5875   /* Set the initial offsets to zero and size to negative to indicate
5876      none has been computed yet.  */
5877   access_ref ref;
5878   tree size = compute_objsize (ptr, ostype, &ref, rvals);
5879   if (!size || !ref.base0)
5880     return NULL_TREE;
5881 
5882   if (pdecl)
5883     *pdecl = ref.ref;
5884 
5885   if (poff)
5886     *poff = wide_int_to_tree (ptrdiff_type_node, ref.offrng[ref.offrng[0] < 0]);
5887 
5888   return size;
5889 }
5890 
5891 /* Helper to determine and check the sizes of the source and the destination
5892    of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls.  EXP is the
5893    call expression, DEST is the destination argument, SRC is the source
5894    argument or null, and LEN is the number of bytes.  Use Object Size type-0
5895    regardless of the OPT_Wstringop_overflow_ setting.  Return true on success
5896    (no overflow or invalid sizes), false otherwise.  */
5897 
5898 static bool
check_memop_access(tree exp,tree dest,tree src,tree size)5899 check_memop_access (tree exp, tree dest, tree src, tree size)
5900 {
5901   /* For functions like memset and memcpy that operate on raw memory
5902      try to determine the size of the largest source and destination
5903      object using type-0 Object Size regardless of the object size
5904      type specified by the option.  */
5905   access_data data (exp, access_read_write);
5906   tree srcsize = src ? compute_objsize (src, 0, &data.src) : NULL_TREE;
5907   tree dstsize = compute_objsize (dest, 0, &data.dst);
5908 
5909   return check_access (exp, size, /*maxread=*/NULL_TREE,
5910 		       srcsize, dstsize, data.mode, &data);
5911 }
5912 
5913 /* Validate memchr arguments without performing any expansion.
5914    Return NULL_RTX.  */
5915 
5916 static rtx
expand_builtin_memchr(tree exp,rtx)5917 expand_builtin_memchr (tree exp, rtx)
5918 {
5919   if (!validate_arglist (exp,
5920  			 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
5921     return NULL_RTX;
5922 
5923   tree arg1 = CALL_EXPR_ARG (exp, 0);
5924   tree len = CALL_EXPR_ARG (exp, 2);
5925 
5926   check_read_access (exp, arg1, len, 0);
5927 
5928   return NULL_RTX;
5929 }
5930 
5931 /* Expand a call EXP to the memcpy builtin.
5932    Return NULL_RTX if we failed, the caller should emit a normal call,
5933    otherwise try to get the result in TARGET, if convenient (and in
5934    mode MODE if that's convenient).  */
5935 
5936 static rtx
expand_builtin_memcpy(tree exp,rtx target)5937 expand_builtin_memcpy (tree exp, rtx target)
5938 {
5939   if (!validate_arglist (exp,
5940  			 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5941     return NULL_RTX;
5942 
5943   tree dest = CALL_EXPR_ARG (exp, 0);
5944   tree src = CALL_EXPR_ARG (exp, 1);
5945   tree len = CALL_EXPR_ARG (exp, 2);
5946 
5947   check_memop_access (exp, dest, src, len);
5948 
5949   return expand_builtin_memory_copy_args (dest, src, len, target, exp,
5950 					  /*retmode=*/ RETURN_BEGIN, false);
5951 }
5952 
5953 /* Check a call EXP to the memmove built-in for validity.
5954    Return NULL_RTX on both success and failure.  */
5955 
5956 static rtx
expand_builtin_memmove(tree exp,rtx target)5957 expand_builtin_memmove (tree exp, rtx target)
5958 {
5959   if (!validate_arglist (exp,
5960  			 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5961     return NULL_RTX;
5962 
5963   tree dest = CALL_EXPR_ARG (exp, 0);
5964   tree src = CALL_EXPR_ARG (exp, 1);
5965   tree len = CALL_EXPR_ARG (exp, 2);
5966 
5967   check_memop_access (exp, dest, src, len);
5968 
5969   return expand_builtin_memory_copy_args (dest, src, len, target, exp,
5970 					  /*retmode=*/ RETURN_BEGIN, true);
5971 }
5972 
5973 /* Expand a call EXP to the mempcpy builtin.
5974    Return NULL_RTX if we failed; the caller should emit a normal call,
5975    otherwise try to get the result in TARGET, if convenient (and in
5976    mode MODE if that's convenient).  */
5977 
5978 static rtx
expand_builtin_mempcpy(tree exp,rtx target)5979 expand_builtin_mempcpy (tree exp, rtx target)
5980 {
5981   if (!validate_arglist (exp,
5982  			 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5983     return NULL_RTX;
5984 
5985   tree dest = CALL_EXPR_ARG (exp, 0);
5986   tree src = CALL_EXPR_ARG (exp, 1);
5987   tree len = CALL_EXPR_ARG (exp, 2);
5988 
5989   /* Policy does not generally allow using compute_objsize (which
5990      is used internally by check_memop_size) to change code generation
5991      or drive optimization decisions.
5992 
5993      In this instance it is safe because the code we generate has
5994      the same semantics regardless of the return value of
5995      check_memop_sizes.   Exactly the same amount of data is copied
5996      and the return value is exactly the same in both cases.
5997 
5998      Furthermore, check_memop_size always uses mode 0 for the call to
5999      compute_objsize, so the imprecise nature of compute_objsize is
6000      avoided.  */
6001 
6002   /* Avoid expanding mempcpy into memcpy when the call is determined
6003      to overflow the buffer.  This also prevents the same overflow
6004      from being diagnosed again when expanding memcpy.  */
6005   if (!check_memop_access (exp, dest, src, len))
6006     return NULL_RTX;
6007 
6008   return expand_builtin_mempcpy_args (dest, src, len,
6009 				      target, exp, /*retmode=*/ RETURN_END);
6010 }
6011 
6012 /* Helper function to do the actual work for expand of memory copy family
6013    functions (memcpy, mempcpy, stpcpy).  Expansing should assign LEN bytes
6014    of memory from SRC to DEST and assign to TARGET if convenient.  Return
6015    value is based on RETMODE argument.  */
6016 
6017 static rtx
expand_builtin_memory_copy_args(tree dest,tree src,tree len,rtx target,tree exp,memop_ret retmode,bool might_overlap)6018 expand_builtin_memory_copy_args (tree dest, tree src, tree len,
6019 				 rtx target, tree exp, memop_ret retmode,
6020 				 bool might_overlap)
6021 {
6022   unsigned int src_align = get_pointer_alignment (src);
6023   unsigned int dest_align = get_pointer_alignment (dest);
6024   rtx dest_mem, src_mem, dest_addr, len_rtx;
6025   HOST_WIDE_INT expected_size = -1;
6026   unsigned int expected_align = 0;
6027   unsigned HOST_WIDE_INT min_size;
6028   unsigned HOST_WIDE_INT max_size;
6029   unsigned HOST_WIDE_INT probable_max_size;
6030 
6031   bool is_move_done;
6032 
6033   /* If DEST is not a pointer type, call the normal function.  */
6034   if (dest_align == 0)
6035     return NULL_RTX;
6036 
6037   /* If either SRC is not a pointer type, don't do this
6038      operation in-line.  */
6039   if (src_align == 0)
6040     return NULL_RTX;
6041 
6042   if (currently_expanding_gimple_stmt)
6043     stringop_block_profile (currently_expanding_gimple_stmt,
6044 			    &expected_align, &expected_size);
6045 
6046   if (expected_align < dest_align)
6047     expected_align = dest_align;
6048   dest_mem = get_memory_rtx (dest, len);
6049   set_mem_align (dest_mem, dest_align);
6050   len_rtx = expand_normal (len);
6051   determine_block_size (len, len_rtx, &min_size, &max_size,
6052 			&probable_max_size);
6053 
6054   /* Try to get the byte representation of the constant SRC points to,
6055      with its byte size in NBYTES.  */
6056   unsigned HOST_WIDE_INT nbytes;
6057   const char *rep = getbyterep (src, &nbytes);
6058 
6059   /* If the function's constant bound LEN_RTX is less than or equal
6060      to the byte size of the representation of the constant argument,
6061      and if block move would be done by pieces, we can avoid loading
6062      the bytes from memory and only store the computed constant.
6063      This works in the overlap (memmove) case as well because
6064      store_by_pieces just generates a series of stores of constants
6065      from the representation returned by getbyterep().  */
6066   if (rep
6067       && CONST_INT_P (len_rtx)
6068       && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= nbytes
6069       && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
6070 			      CONST_CAST (char *, rep),
6071 			      dest_align, false))
6072     {
6073       dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
6074 				  builtin_memcpy_read_str,
6075 				  CONST_CAST (char *, rep),
6076 				  dest_align, false, retmode);
6077       dest_mem = force_operand (XEXP (dest_mem, 0), target);
6078       dest_mem = convert_memory_address (ptr_mode, dest_mem);
6079       return dest_mem;
6080     }
6081 
6082   src_mem = get_memory_rtx (src, len);
6083   set_mem_align (src_mem, src_align);
6084 
6085   /* Copy word part most expediently.  */
6086   enum block_op_methods method = BLOCK_OP_NORMAL;
6087   if (CALL_EXPR_TAILCALL (exp)
6088       && (retmode == RETURN_BEGIN || target == const0_rtx))
6089     method = BLOCK_OP_TAILCALL;
6090   bool use_mempcpy_call = (targetm.libc_has_fast_function (BUILT_IN_MEMPCPY)
6091 			   && retmode == RETURN_END
6092 			   && !might_overlap
6093 			   && target != const0_rtx);
6094   if (use_mempcpy_call)
6095     method = BLOCK_OP_NO_LIBCALL_RET;
6096   dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx, method,
6097 				     expected_align, expected_size,
6098 				     min_size, max_size, probable_max_size,
6099 				     use_mempcpy_call, &is_move_done,
6100 				     might_overlap);
6101 
6102   /* Bail out when a mempcpy call would be expanded as libcall and when
6103      we have a target that provides a fast implementation
6104      of mempcpy routine.  */
6105   if (!is_move_done)
6106     return NULL_RTX;
6107 
6108   if (dest_addr == pc_rtx)
6109     return NULL_RTX;
6110 
6111   if (dest_addr == 0)
6112     {
6113       dest_addr = force_operand (XEXP (dest_mem, 0), target);
6114       dest_addr = convert_memory_address (ptr_mode, dest_addr);
6115     }
6116 
6117   if (retmode != RETURN_BEGIN && target != const0_rtx)
6118     {
6119       dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
6120       /* stpcpy pointer to last byte.  */
6121       if (retmode == RETURN_END_MINUS_ONE)
6122 	dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
6123     }
6124 
6125   return dest_addr;
6126 }
6127 
6128 static rtx
expand_builtin_mempcpy_args(tree dest,tree src,tree len,rtx target,tree orig_exp,memop_ret retmode)6129 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
6130 			     rtx target, tree orig_exp, memop_ret retmode)
6131 {
6132   return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
6133 					  retmode, false);
6134 }
6135 
6136 /* Expand into a movstr instruction, if one is available.  Return NULL_RTX if
6137    we failed, the caller should emit a normal call, otherwise try to
6138    get the result in TARGET, if convenient.
6139    Return value is based on RETMODE argument.  */
6140 
6141 static rtx
expand_movstr(tree dest,tree src,rtx target,memop_ret retmode)6142 expand_movstr (tree dest, tree src, rtx target, memop_ret retmode)
6143 {
6144   class expand_operand ops[3];
6145   rtx dest_mem;
6146   rtx src_mem;
6147 
6148   if (!targetm.have_movstr ())
6149     return NULL_RTX;
6150 
6151   dest_mem = get_memory_rtx (dest, NULL);
6152   src_mem = get_memory_rtx (src, NULL);
6153   if (retmode == RETURN_BEGIN)
6154     {
6155       target = force_reg (Pmode, XEXP (dest_mem, 0));
6156       dest_mem = replace_equiv_address (dest_mem, target);
6157     }
6158 
6159   create_output_operand (&ops[0],
6160 			 retmode != RETURN_BEGIN ? target : NULL_RTX, Pmode);
6161   create_fixed_operand (&ops[1], dest_mem);
6162   create_fixed_operand (&ops[2], src_mem);
6163   if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
6164     return NULL_RTX;
6165 
6166   if (retmode != RETURN_BEGIN && target != const0_rtx)
6167     {
6168       target = ops[0].value;
6169       /* movstr is supposed to set end to the address of the NUL
6170 	 terminator.  If the caller requested a mempcpy-like return value,
6171 	 adjust it.  */
6172       if (retmode == RETURN_END)
6173 	{
6174 	  rtx tem = plus_constant (GET_MODE (target),
6175 				   gen_lowpart (GET_MODE (target), target), 1);
6176 	  emit_move_insn (target, force_operand (tem, NULL_RTX));
6177 	}
6178     }
6179   return target;
6180 }
6181 
6182 /* Do some very basic size validation of a call to the strcpy builtin
6183    given by EXP.  Return NULL_RTX to have the built-in expand to a call
6184    to the library function.  */
6185 
6186 static rtx
expand_builtin_strcat(tree exp)6187 expand_builtin_strcat (tree exp)
6188 {
6189   if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
6190       || !warn_stringop_overflow)
6191     return NULL_RTX;
6192 
6193   tree dest = CALL_EXPR_ARG (exp, 0);
6194   tree src = CALL_EXPR_ARG (exp, 1);
6195 
6196   /* There is no way here to determine the length of the string in
6197      the destination to which the SRC string is being appended so
6198      just diagnose cases when the souce string is longer than
6199      the destination object.  */
6200   access_data data (exp, access_read_write, NULL_TREE, true,
6201 		    NULL_TREE, true);
6202   const int ost = warn_stringop_overflow ? warn_stringop_overflow - 1 : 1;
6203   compute_objsize (src, ost, &data.src);
6204   tree destsize = compute_objsize (dest, ost, &data.dst);
6205 
6206   check_access (exp, /*dstwrite=*/NULL_TREE, /*maxread=*/NULL_TREE,
6207 		src, destsize, data.mode, &data);
6208 
6209   return NULL_RTX;
6210 }
6211 
6212 /* Expand expression EXP, which is a call to the strcpy builtin.  Return
6213    NULL_RTX if we failed the caller should emit a normal call, otherwise
6214    try to get the result in TARGET, if convenient (and in mode MODE if that's
6215    convenient).  */
6216 
6217 static rtx
expand_builtin_strcpy(tree exp,rtx target)6218 expand_builtin_strcpy (tree exp, rtx target)
6219 {
6220   if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6221     return NULL_RTX;
6222 
6223   tree dest = CALL_EXPR_ARG (exp, 0);
6224   tree src = CALL_EXPR_ARG (exp, 1);
6225 
6226   if (warn_stringop_overflow)
6227     {
6228       access_data data (exp, access_read_write, NULL_TREE, true,
6229 			NULL_TREE, true);
6230       const int ost = warn_stringop_overflow ? warn_stringop_overflow - 1 : 1;
6231       compute_objsize (src, ost, &data.src);
6232       tree dstsize = compute_objsize (dest, ost, &data.dst);
6233       check_access (exp, /*dstwrite=*/ NULL_TREE,
6234 		    /*maxread=*/ NULL_TREE, /*srcstr=*/ src,
6235 		    dstsize, data.mode, &data);
6236     }
6237 
6238   if (rtx ret = expand_builtin_strcpy_args (exp, dest, src, target))
6239     {
6240       /* Check to see if the argument was declared attribute nonstring
6241 	 and if so, issue a warning since at this point it's not known
6242 	 to be nul-terminated.  */
6243       tree fndecl = get_callee_fndecl (exp);
6244       maybe_warn_nonstring_arg (fndecl, exp);
6245       return ret;
6246     }
6247 
6248   return NULL_RTX;
6249 }
6250 
6251 /* Helper function to do the actual work for expand_builtin_strcpy.  The
6252    arguments to the builtin_strcpy call DEST and SRC are broken out
6253    so that this can also be called without constructing an actual CALL_EXPR.
6254    The other arguments and return value are the same as for
6255    expand_builtin_strcpy.  */
6256 
6257 static rtx
expand_builtin_strcpy_args(tree exp,tree dest,tree src,rtx target)6258 expand_builtin_strcpy_args (tree exp, tree dest, tree src, rtx target)
6259 {
6260   /* Detect strcpy calls with unterminated arrays..  */
6261   tree size;
6262   bool exact;
6263   if (tree nonstr = unterminated_array (src, &size, &exact))
6264     {
6265       /* NONSTR refers to the non-nul terminated constant array.  */
6266       warn_string_no_nul (EXPR_LOCATION (exp), exp, NULL, src, nonstr,
6267 			  size, exact);
6268       return NULL_RTX;
6269     }
6270 
6271   return expand_movstr (dest, src, target, /*retmode=*/ RETURN_BEGIN);
6272 }
6273 
6274 /* Expand a call EXP to the stpcpy builtin.
6275    Return NULL_RTX if we failed the caller should emit a normal call,
6276    otherwise try to get the result in TARGET, if convenient (and in
6277    mode MODE if that's convenient).  */
6278 
6279 static rtx
expand_builtin_stpcpy_1(tree exp,rtx target,machine_mode mode)6280 expand_builtin_stpcpy_1 (tree exp, rtx target, machine_mode mode)
6281 {
6282   tree dst, src;
6283   location_t loc = EXPR_LOCATION (exp);
6284 
6285   if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6286     return NULL_RTX;
6287 
6288   dst = CALL_EXPR_ARG (exp, 0);
6289   src = CALL_EXPR_ARG (exp, 1);
6290 
6291   if (warn_stringop_overflow)
6292     {
6293       access_data data (exp, access_read_write);
6294       tree destsize = compute_objsize (dst, warn_stringop_overflow - 1,
6295 				       &data.dst);
6296       check_access (exp, /*dstwrite=*/NULL_TREE, /*maxread=*/NULL_TREE,
6297 		    src, destsize, data.mode, &data);
6298     }
6299 
6300   /* If return value is ignored, transform stpcpy into strcpy.  */
6301   if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
6302     {
6303       tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
6304       tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
6305       return expand_expr (result, target, mode, EXPAND_NORMAL);
6306     }
6307   else
6308     {
6309       tree len, lenp1;
6310       rtx ret;
6311 
6312       /* Ensure we get an actual string whose length can be evaluated at
6313 	 compile-time, not an expression containing a string.  This is
6314 	 because the latter will potentially produce pessimized code
6315 	 when used to produce the return value.  */
6316       c_strlen_data lendata = { };
6317       if (!c_getstr (src)
6318 	  || !(len = c_strlen (src, 0, &lendata, 1)))
6319 	return expand_movstr (dst, src, target,
6320 			      /*retmode=*/ RETURN_END_MINUS_ONE);
6321 
6322       if (lendata.decl)
6323 	warn_string_no_nul (EXPR_LOCATION (exp), exp, NULL, src, lendata.decl);
6324 
6325       lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
6326       ret = expand_builtin_mempcpy_args (dst, src, lenp1,
6327 					 target, exp,
6328 					 /*retmode=*/ RETURN_END_MINUS_ONE);
6329 
6330       if (ret)
6331 	return ret;
6332 
6333       if (TREE_CODE (len) == INTEGER_CST)
6334 	{
6335 	  rtx len_rtx = expand_normal (len);
6336 
6337 	  if (CONST_INT_P (len_rtx))
6338 	    {
6339 	      ret = expand_builtin_strcpy_args (exp, dst, src, target);
6340 
6341 	      if (ret)
6342 		{
6343 		  if (! target)
6344 		    {
6345 		      if (mode != VOIDmode)
6346 			target = gen_reg_rtx (mode);
6347 		      else
6348 			target = gen_reg_rtx (GET_MODE (ret));
6349 		    }
6350 		  if (GET_MODE (target) != GET_MODE (ret))
6351 		    ret = gen_lowpart (GET_MODE (target), ret);
6352 
6353 		  ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
6354 		  ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
6355 		  gcc_assert (ret);
6356 
6357 		  return target;
6358 		}
6359 	    }
6360 	}
6361 
6362       return expand_movstr (dst, src, target,
6363 			    /*retmode=*/ RETURN_END_MINUS_ONE);
6364     }
6365 }
6366 
6367 /* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
6368    arguments while being careful to avoid duplicate warnings (which could
6369    be issued if the expander were to expand the call, resulting in it
6370    being emitted in expand_call().  */
6371 
6372 static rtx
expand_builtin_stpcpy(tree exp,rtx target,machine_mode mode)6373 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
6374 {
6375   if (rtx ret = expand_builtin_stpcpy_1 (exp, target, mode))
6376     {
6377       /* The call has been successfully expanded.  Check for nonstring
6378 	 arguments and issue warnings as appropriate.  */
6379       maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
6380       return ret;
6381     }
6382 
6383   return NULL_RTX;
6384 }
6385 
6386 /* Check a call EXP to the stpncpy built-in for validity.
6387    Return NULL_RTX on both success and failure.  */
6388 
6389 static rtx
expand_builtin_stpncpy(tree exp,rtx)6390 expand_builtin_stpncpy (tree exp, rtx)
6391 {
6392   if (!validate_arglist (exp,
6393 			 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6394       || !warn_stringop_overflow)
6395     return NULL_RTX;
6396 
6397   /* The source and destination of the call.  */
6398   tree dest = CALL_EXPR_ARG (exp, 0);
6399   tree src = CALL_EXPR_ARG (exp, 1);
6400 
6401   /* The exact number of bytes to write (not the maximum).  */
6402   tree len = CALL_EXPR_ARG (exp, 2);
6403   access_data data (exp, access_read_write);
6404   /* The size of the destination object.  */
6405   tree destsize = compute_objsize (dest, warn_stringop_overflow - 1, &data.dst);
6406   check_access (exp, len, /*maxread=*/len, src, destsize, data.mode, &data);
6407   return NULL_RTX;
6408 }
6409 
6410 /* Callback routine for store_by_pieces.  Read GET_MODE_BITSIZE (MODE)
6411    bytes from constant string DATA + OFFSET and return it as target
6412    constant.  */
6413 
6414 rtx
builtin_strncpy_read_str(void * data,HOST_WIDE_INT offset,scalar_int_mode mode)6415 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
6416 			  scalar_int_mode mode)
6417 {
6418   const char *str = (const char *) data;
6419 
6420   if ((unsigned HOST_WIDE_INT) offset > strlen (str))
6421     return const0_rtx;
6422 
6423   return c_readstr (str + offset, mode);
6424 }
6425 
6426 /* Helper to check the sizes of sequences and the destination of calls
6427    to __builtin_strncat and __builtin___strncat_chk.  Returns true on
6428    success (no overflow or invalid sizes), false otherwise.  */
6429 
6430 static bool
check_strncat_sizes(tree exp,tree objsize)6431 check_strncat_sizes (tree exp, tree objsize)
6432 {
6433   tree dest = CALL_EXPR_ARG (exp, 0);
6434   tree src = CALL_EXPR_ARG (exp, 1);
6435   tree maxread = CALL_EXPR_ARG (exp, 2);
6436 
6437   /* Try to determine the range of lengths that the source expression
6438      refers to.  */
6439   c_strlen_data lendata = { };
6440   get_range_strlen (src, &lendata, /* eltsize = */ 1);
6441 
6442   /* Try to verify that the destination is big enough for the shortest
6443      string.  */
6444 
6445   access_data data (exp, access_read_write, maxread, true);
6446   if (!objsize && warn_stringop_overflow)
6447     {
6448       /* If it hasn't been provided by __strncat_chk, try to determine
6449 	 the size of the destination object into which the source is
6450 	 being copied.  */
6451       objsize = compute_objsize (dest, warn_stringop_overflow - 1, &data.dst);
6452     }
6453 
6454   /* Add one for the terminating nul.  */
6455   tree srclen = (lendata.minlen
6456 		 ? fold_build2 (PLUS_EXPR, size_type_node, lendata.minlen,
6457 				size_one_node)
6458 		 : NULL_TREE);
6459 
6460   /* The strncat function copies at most MAXREAD bytes and always appends
6461      the terminating nul so the specified upper bound should never be equal
6462      to (or greater than) the size of the destination.  */
6463   if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
6464       && tree_int_cst_equal (objsize, maxread))
6465     {
6466       location_t loc = tree_inlined_location (exp);
6467       warning_at (loc, OPT_Wstringop_overflow_,
6468 		  "%K%qD specified bound %E equals destination size",
6469 		  exp, get_callee_fndecl (exp), maxread);
6470 
6471       return false;
6472     }
6473 
6474   if (!srclen
6475       || (maxread && tree_fits_uhwi_p (maxread)
6476 	  && tree_fits_uhwi_p (srclen)
6477 	  && tree_int_cst_lt (maxread, srclen)))
6478     srclen = maxread;
6479 
6480   /* The number of bytes to write is LEN but check_access will alsoa
6481      check SRCLEN if LEN's value isn't known.  */
6482   return check_access (exp, /*dstwrite=*/NULL_TREE, maxread, srclen,
6483 		       objsize, data.mode, &data);
6484 }
6485 
6486 /* Similar to expand_builtin_strcat, do some very basic size validation
6487    of a call to the strcpy builtin given by EXP.  Return NULL_RTX to have
6488    the built-in expand to a call to the library function.  */
6489 
6490 static rtx
expand_builtin_strncat(tree exp,rtx)6491 expand_builtin_strncat (tree exp, rtx)
6492 {
6493   if (!validate_arglist (exp,
6494 			 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6495       || !warn_stringop_overflow)
6496     return NULL_RTX;
6497 
6498   tree dest = CALL_EXPR_ARG (exp, 0);
6499   tree src = CALL_EXPR_ARG (exp, 1);
6500   /* The upper bound on the number of bytes to write.  */
6501   tree maxread = CALL_EXPR_ARG (exp, 2);
6502 
6503   /* Detect unterminated source (only).  */
6504   if (!check_nul_terminated_array (exp, src, maxread))
6505     return NULL_RTX;
6506 
6507   /* The length of the source sequence.  */
6508   tree slen = c_strlen (src, 1);
6509 
6510   /* Try to determine the range of lengths that the source expression
6511      refers to.  Since the lengths are only used for warning and not
6512      for code generation disable strict mode below.  */
6513   tree maxlen = slen;
6514   if (!maxlen)
6515     {
6516       c_strlen_data lendata = { };
6517       get_range_strlen (src, &lendata, /* eltsize = */ 1);
6518       maxlen = lendata.maxbound;
6519     }
6520 
6521   access_data data (exp, access_read_write);
6522   /* Try to verify that the destination is big enough for the shortest
6523      string.  First try to determine the size of the destination object
6524      into which the source is being copied.  */
6525   tree destsize = compute_objsize (dest, warn_stringop_overflow - 1, &data.dst);
6526 
6527   /* Add one for the terminating nul.  */
6528   tree srclen = (maxlen
6529 		 ? fold_build2 (PLUS_EXPR, size_type_node, maxlen,
6530 				size_one_node)
6531 		 : NULL_TREE);
6532 
6533   /* The strncat function copies at most MAXREAD bytes and always appends
6534      the terminating nul so the specified upper bound should never be equal
6535      to (or greater than) the size of the destination.  */
6536   if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (destsize)
6537       && tree_int_cst_equal (destsize, maxread))
6538     {
6539       location_t loc = tree_inlined_location (exp);
6540       warning_at (loc, OPT_Wstringop_overflow_,
6541 		  "%K%qD specified bound %E equals destination size",
6542 		  exp, get_callee_fndecl (exp), maxread);
6543 
6544       return NULL_RTX;
6545     }
6546 
6547   if (!srclen
6548       || (maxread && tree_fits_uhwi_p (maxread)
6549 	  && tree_fits_uhwi_p (srclen)
6550 	  && tree_int_cst_lt (maxread, srclen)))
6551     srclen = maxread;
6552 
6553   check_access (exp, /*dstwrite=*/NULL_TREE, maxread, srclen,
6554 		destsize, data.mode, &data);
6555   return NULL_RTX;
6556 }
6557 
6558 /* Expand expression EXP, which is a call to the strncpy builtin.  Return
6559    NULL_RTX if we failed the caller should emit a normal call.  */
6560 
6561 static rtx
expand_builtin_strncpy(tree exp,rtx target)6562 expand_builtin_strncpy (tree exp, rtx target)
6563 {
6564   location_t loc = EXPR_LOCATION (exp);
6565 
6566   if (!validate_arglist (exp,
6567 			 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6568     return NULL_RTX;
6569   tree dest = CALL_EXPR_ARG (exp, 0);
6570   tree src = CALL_EXPR_ARG (exp, 1);
6571   /* The number of bytes to write (not the maximum).  */
6572   tree len = CALL_EXPR_ARG (exp, 2);
6573 
6574   /* The length of the source sequence.  */
6575   tree slen = c_strlen (src, 1);
6576 
6577   if (warn_stringop_overflow)
6578     {
6579       access_data data (exp, access_read_write, len, true, len, true);
6580       const int ost = warn_stringop_overflow ? warn_stringop_overflow - 1 : 1;
6581       compute_objsize (src, ost, &data.src);
6582       tree dstsize = compute_objsize (dest, ost, &data.dst);
6583       /* The number of bytes to write is LEN but check_access will also
6584 	 check SLEN if LEN's value isn't known.  */
6585       check_access (exp, /*dstwrite=*/len,
6586 		    /*maxread=*/len, src, dstsize, data.mode, &data);
6587     }
6588 
6589   /* We must be passed a constant len and src parameter.  */
6590   if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
6591     return NULL_RTX;
6592 
6593   slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
6594 
6595   /* We're required to pad with trailing zeros if the requested
6596      len is greater than strlen(s2)+1.  In that case try to
6597      use store_by_pieces, if it fails, punt.  */
6598   if (tree_int_cst_lt (slen, len))
6599     {
6600       unsigned int dest_align = get_pointer_alignment (dest);
6601       const char *p = c_getstr (src);
6602       rtx dest_mem;
6603 
6604       if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
6605 	  || !can_store_by_pieces (tree_to_uhwi (len),
6606 				   builtin_strncpy_read_str,
6607 				   CONST_CAST (char *, p),
6608 				   dest_align, false))
6609 	return NULL_RTX;
6610 
6611       dest_mem = get_memory_rtx (dest, len);
6612       store_by_pieces (dest_mem, tree_to_uhwi (len),
6613 		       builtin_strncpy_read_str,
6614 		       CONST_CAST (char *, p), dest_align, false,
6615 		       RETURN_BEGIN);
6616       dest_mem = force_operand (XEXP (dest_mem, 0), target);
6617       dest_mem = convert_memory_address (ptr_mode, dest_mem);
6618       return dest_mem;
6619     }
6620 
6621   return NULL_RTX;
6622 }
6623 
6624 /* Callback routine for store_by_pieces.  Read GET_MODE_BITSIZE (MODE)
6625    bytes from constant string DATA + OFFSET and return it as target
6626    constant.  */
6627 
6628 rtx
builtin_memset_read_str(void * data,HOST_WIDE_INT offset ATTRIBUTE_UNUSED,scalar_int_mode mode)6629 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
6630 			 scalar_int_mode mode)
6631 {
6632   const char *c = (const char *) data;
6633   char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
6634 
6635   memset (p, *c, GET_MODE_SIZE (mode));
6636 
6637   return c_readstr (p, mode);
6638 }
6639 
6640 /* Callback routine for store_by_pieces.  Return the RTL of a register
6641    containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
6642    char value given in the RTL register data.  For example, if mode is
6643    4 bytes wide, return the RTL for 0x01010101*data.  */
6644 
6645 static rtx
builtin_memset_gen_str(void * data,HOST_WIDE_INT offset ATTRIBUTE_UNUSED,scalar_int_mode mode)6646 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
6647 			scalar_int_mode mode)
6648 {
6649   rtx target, coeff;
6650   size_t size;
6651   char *p;
6652 
6653   size = GET_MODE_SIZE (mode);
6654   if (size == 1)
6655     return (rtx) data;
6656 
6657   p = XALLOCAVEC (char, size);
6658   memset (p, 1, size);
6659   coeff = c_readstr (p, mode);
6660 
6661   target = convert_to_mode (mode, (rtx) data, 1);
6662   target = expand_mult (mode, target, coeff, NULL_RTX, 1);
6663   return force_reg (mode, target);
6664 }
6665 
6666 /* Expand expression EXP, which is a call to the memset builtin.  Return
6667    NULL_RTX if we failed the caller should emit a normal call, otherwise
6668    try to get the result in TARGET, if convenient (and in mode MODE if that's
6669    convenient).  */
6670 
6671 static rtx
expand_builtin_memset(tree exp,rtx target,machine_mode mode)6672 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
6673 {
6674   if (!validate_arglist (exp,
6675  			 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
6676     return NULL_RTX;
6677 
6678   tree dest = CALL_EXPR_ARG (exp, 0);
6679   tree val = CALL_EXPR_ARG (exp, 1);
6680   tree len = CALL_EXPR_ARG (exp, 2);
6681 
6682   check_memop_access (exp, dest, NULL_TREE, len);
6683 
6684   return expand_builtin_memset_args (dest, val, len, target, mode, exp);
6685 }
6686 
6687 /* Helper function to do the actual work for expand_builtin_memset.  The
6688    arguments to the builtin_memset call DEST, VAL, and LEN are broken out
6689    so that this can also be called without constructing an actual CALL_EXPR.
6690    The other arguments and return value are the same as for
6691    expand_builtin_memset.  */
6692 
6693 static rtx
expand_builtin_memset_args(tree dest,tree val,tree len,rtx target,machine_mode mode,tree orig_exp)6694 expand_builtin_memset_args (tree dest, tree val, tree len,
6695 			    rtx target, machine_mode mode, tree orig_exp)
6696 {
6697   tree fndecl, fn;
6698   enum built_in_function fcode;
6699   machine_mode val_mode;
6700   char c;
6701   unsigned int dest_align;
6702   rtx dest_mem, dest_addr, len_rtx;
6703   HOST_WIDE_INT expected_size = -1;
6704   unsigned int expected_align = 0;
6705   unsigned HOST_WIDE_INT min_size;
6706   unsigned HOST_WIDE_INT max_size;
6707   unsigned HOST_WIDE_INT probable_max_size;
6708 
6709   dest_align = get_pointer_alignment (dest);
6710 
6711   /* If DEST is not a pointer type, don't do this operation in-line.  */
6712   if (dest_align == 0)
6713     return NULL_RTX;
6714 
6715   if (currently_expanding_gimple_stmt)
6716     stringop_block_profile (currently_expanding_gimple_stmt,
6717 			    &expected_align, &expected_size);
6718 
6719   if (expected_align < dest_align)
6720     expected_align = dest_align;
6721 
6722   /* If the LEN parameter is zero, return DEST.  */
6723   if (integer_zerop (len))
6724     {
6725       /* Evaluate and ignore VAL in case it has side-effects.  */
6726       expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
6727       return expand_expr (dest, target, mode, EXPAND_NORMAL);
6728     }
6729 
6730   /* Stabilize the arguments in case we fail.  */
6731   dest = builtin_save_expr (dest);
6732   val = builtin_save_expr (val);
6733   len = builtin_save_expr (len);
6734 
6735   len_rtx = expand_normal (len);
6736   determine_block_size (len, len_rtx, &min_size, &max_size,
6737 			&probable_max_size);
6738   dest_mem = get_memory_rtx (dest, len);
6739   val_mode = TYPE_MODE (unsigned_char_type_node);
6740 
6741   if (TREE_CODE (val) != INTEGER_CST)
6742     {
6743       rtx val_rtx;
6744 
6745       val_rtx = expand_normal (val);
6746       val_rtx = convert_to_mode (val_mode, val_rtx, 0);
6747 
6748       /* Assume that we can memset by pieces if we can store
6749        * the coefficients by pieces (in the required modes).
6750        * We can't pass builtin_memset_gen_str as that emits RTL.  */
6751       c = 1;
6752       if (tree_fits_uhwi_p (len)
6753 	  && can_store_by_pieces (tree_to_uhwi (len),
6754 				  builtin_memset_read_str, &c, dest_align,
6755 				  true))
6756 	{
6757 	  val_rtx = force_reg (val_mode, val_rtx);
6758 	  store_by_pieces (dest_mem, tree_to_uhwi (len),
6759 			   builtin_memset_gen_str, val_rtx, dest_align,
6760 			   true, RETURN_BEGIN);
6761 	}
6762       else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
6763 					dest_align, expected_align,
6764 					expected_size, min_size, max_size,
6765 					probable_max_size))
6766 	goto do_libcall;
6767 
6768       dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
6769       dest_mem = convert_memory_address (ptr_mode, dest_mem);
6770       return dest_mem;
6771     }
6772 
6773   if (target_char_cast (val, &c))
6774     goto do_libcall;
6775 
6776   if (c)
6777     {
6778       if (tree_fits_uhwi_p (len)
6779 	  && can_store_by_pieces (tree_to_uhwi (len),
6780 				  builtin_memset_read_str, &c, dest_align,
6781 				  true))
6782 	store_by_pieces (dest_mem, tree_to_uhwi (len),
6783 			 builtin_memset_read_str, &c, dest_align, true,
6784 			 RETURN_BEGIN);
6785       else if (!set_storage_via_setmem (dest_mem, len_rtx,
6786 					gen_int_mode (c, val_mode),
6787 					dest_align, expected_align,
6788 					expected_size, min_size, max_size,
6789 					probable_max_size))
6790 	goto do_libcall;
6791 
6792       dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
6793       dest_mem = convert_memory_address (ptr_mode, dest_mem);
6794       return dest_mem;
6795     }
6796 
6797   set_mem_align (dest_mem, dest_align);
6798   dest_addr = clear_storage_hints (dest_mem, len_rtx,
6799 				   CALL_EXPR_TAILCALL (orig_exp)
6800 				   ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
6801 				   expected_align, expected_size,
6802 				   min_size, max_size,
6803 				   probable_max_size);
6804 
6805   if (dest_addr == 0)
6806     {
6807       dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
6808       dest_addr = convert_memory_address (ptr_mode, dest_addr);
6809     }
6810 
6811   return dest_addr;
6812 
6813  do_libcall:
6814   fndecl = get_callee_fndecl (orig_exp);
6815   fcode = DECL_FUNCTION_CODE (fndecl);
6816   if (fcode == BUILT_IN_MEMSET)
6817     fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
6818 				dest, val, len);
6819   else if (fcode == BUILT_IN_BZERO)
6820     fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
6821 				dest, len);
6822   else
6823     gcc_unreachable ();
6824   gcc_assert (TREE_CODE (fn) == CALL_EXPR);
6825   CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
6826   return expand_call (fn, target, target == const0_rtx);
6827 }
6828 
6829 /* Expand expression EXP, which is a call to the bzero builtin.  Return
6830    NULL_RTX if we failed the caller should emit a normal call.  */
6831 
6832 static rtx
expand_builtin_bzero(tree exp)6833 expand_builtin_bzero (tree exp)
6834 {
6835   if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6836     return NULL_RTX;
6837 
6838   tree dest = CALL_EXPR_ARG (exp, 0);
6839   tree size = CALL_EXPR_ARG (exp, 1);
6840 
6841   check_memop_access (exp, dest, NULL_TREE, size);
6842 
6843   /* New argument list transforming bzero(ptr x, int y) to
6844      memset(ptr x, int 0, size_t y).   This is done this way
6845      so that if it isn't expanded inline, we fallback to
6846      calling bzero instead of memset.  */
6847 
6848   location_t loc = EXPR_LOCATION (exp);
6849 
6850   return expand_builtin_memset_args (dest, integer_zero_node,
6851 				     fold_convert_loc (loc,
6852 						       size_type_node, size),
6853 				     const0_rtx, VOIDmode, exp);
6854 }
6855 
6856 /* Try to expand cmpstr operation ICODE with the given operands.
6857    Return the result rtx on success, otherwise return null.  */
6858 
6859 static rtx
expand_cmpstr(insn_code icode,rtx target,rtx arg1_rtx,rtx arg2_rtx,HOST_WIDE_INT align)6860 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
6861 	       HOST_WIDE_INT align)
6862 {
6863   machine_mode insn_mode = insn_data[icode].operand[0].mode;
6864 
6865   if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
6866     target = NULL_RTX;
6867 
6868   class expand_operand ops[4];
6869   create_output_operand (&ops[0], target, insn_mode);
6870   create_fixed_operand (&ops[1], arg1_rtx);
6871   create_fixed_operand (&ops[2], arg2_rtx);
6872   create_integer_operand (&ops[3], align);
6873   if (maybe_expand_insn (icode, 4, ops))
6874     return ops[0].value;
6875   return NULL_RTX;
6876 }
6877 
6878 /* Expand expression EXP, which is a call to the memcmp built-in function.
6879    Return NULL_RTX if we failed and the caller should emit a normal call,
6880    otherwise try to get the result in TARGET, if convenient.
6881    RESULT_EQ is true if we can relax the returned value to be either zero
6882    or nonzero, without caring about the sign.  */
6883 
6884 static rtx
expand_builtin_memcmp(tree exp,rtx target,bool result_eq)6885 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
6886 {
6887   if (!validate_arglist (exp,
6888  			 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6889     return NULL_RTX;
6890 
6891   tree arg1 = CALL_EXPR_ARG (exp, 0);
6892   tree arg2 = CALL_EXPR_ARG (exp, 1);
6893   tree len = CALL_EXPR_ARG (exp, 2);
6894 
6895   /* Diagnose calls where the specified length exceeds the size of either
6896      object.  */
6897   if (!check_read_access (exp, arg1, len, 0)
6898       || !check_read_access (exp, arg2, len, 0))
6899     return NULL_RTX;
6900 
6901   /* Due to the performance benefit, always inline the calls first
6902      when result_eq is false.  */
6903   rtx result = NULL_RTX;
6904   enum built_in_function fcode = DECL_FUNCTION_CODE (get_callee_fndecl (exp));
6905   if (!result_eq && fcode != BUILT_IN_BCMP)
6906     {
6907       result = inline_expand_builtin_bytecmp (exp, target);
6908       if (result)
6909 	return result;
6910     }
6911 
6912   machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
6913   location_t loc = EXPR_LOCATION (exp);
6914 
6915   unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
6916   unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
6917 
6918   /* If we don't have POINTER_TYPE, call the function.  */
6919   if (arg1_align == 0 || arg2_align == 0)
6920     return NULL_RTX;
6921 
6922   rtx arg1_rtx = get_memory_rtx (arg1, len);
6923   rtx arg2_rtx = get_memory_rtx (arg2, len);
6924   rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
6925 
6926   /* Set MEM_SIZE as appropriate.  */
6927   if (CONST_INT_P (len_rtx))
6928     {
6929       set_mem_size (arg1_rtx, INTVAL (len_rtx));
6930       set_mem_size (arg2_rtx, INTVAL (len_rtx));
6931     }
6932 
6933   by_pieces_constfn constfn = NULL;
6934 
6935   /* Try to get the byte representation of the constant ARG2 (or, only
6936      when the function's result is used for equality to zero, ARG1)
6937      points to, with its byte size in NBYTES.  */
6938   unsigned HOST_WIDE_INT nbytes;
6939   const char *rep = getbyterep (arg2, &nbytes);
6940   if (result_eq && rep == NULL)
6941     {
6942       /* For equality to zero the arguments are interchangeable.  */
6943       rep = getbyterep (arg1, &nbytes);
6944       if (rep != NULL)
6945 	std::swap (arg1_rtx, arg2_rtx);
6946     }
6947 
6948   /* If the function's constant bound LEN_RTX is less than or equal
6949      to the byte size of the representation of the constant argument,
6950      and if block move would be done by pieces, we can avoid loading
6951      the bytes from memory and only store the computed constant result.  */
6952   if (rep
6953       && CONST_INT_P (len_rtx)
6954       && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= nbytes)
6955     constfn = builtin_memcpy_read_str;
6956 
6957   result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
6958 				 TREE_TYPE (len), target,
6959 				 result_eq, constfn,
6960 				 CONST_CAST (char *, rep));
6961 
6962   if (result)
6963     {
6964       /* Return the value in the proper mode for this function.  */
6965       if (GET_MODE (result) == mode)
6966 	return result;
6967 
6968       if (target != 0)
6969 	{
6970 	  convert_move (target, result, 0);
6971 	  return target;
6972 	}
6973 
6974       return convert_to_mode (mode, result, 0);
6975     }
6976 
6977   return NULL_RTX;
6978 }
6979 
6980 /* Expand expression EXP, which is a call to the strcmp builtin.  Return NULL_RTX
6981    if we failed the caller should emit a normal call, otherwise try to get
6982    the result in TARGET, if convenient.  */
6983 
6984 static rtx
expand_builtin_strcmp(tree exp,ATTRIBUTE_UNUSED rtx target)6985 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
6986 {
6987   if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6988     return NULL_RTX;
6989 
6990   tree arg1 = CALL_EXPR_ARG (exp, 0);
6991   tree arg2 = CALL_EXPR_ARG (exp, 1);
6992 
6993   if (!check_read_access (exp, arg1)
6994       || !check_read_access (exp, arg2))
6995     return NULL_RTX;
6996 
6997   /* Due to the performance benefit, always inline the calls first.  */
6998   rtx result = NULL_RTX;
6999   result = inline_expand_builtin_bytecmp (exp, target);
7000   if (result)
7001     return result;
7002 
7003   insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
7004   insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
7005   if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
7006     return NULL_RTX;
7007 
7008   unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
7009   unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
7010 
7011   /* If we don't have POINTER_TYPE, call the function.  */
7012   if (arg1_align == 0 || arg2_align == 0)
7013     return NULL_RTX;
7014 
7015   /* Stabilize the arguments in case gen_cmpstr(n)si fail.  */
7016   arg1 = builtin_save_expr (arg1);
7017   arg2 = builtin_save_expr (arg2);
7018 
7019   rtx arg1_rtx = get_memory_rtx (arg1, NULL);
7020   rtx arg2_rtx = get_memory_rtx (arg2, NULL);
7021 
7022   /* Try to call cmpstrsi.  */
7023   if (cmpstr_icode != CODE_FOR_nothing)
7024     result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
7025 			    MIN (arg1_align, arg2_align));
7026 
7027   /* Try to determine at least one length and call cmpstrnsi.  */
7028   if (!result && cmpstrn_icode != CODE_FOR_nothing)
7029     {
7030       tree len;
7031       rtx arg3_rtx;
7032 
7033       tree len1 = c_strlen (arg1, 1);
7034       tree len2 = c_strlen (arg2, 1);
7035 
7036       if (len1)
7037 	len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
7038       if (len2)
7039 	len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
7040 
7041       /* If we don't have a constant length for the first, use the length
7042 	 of the second, if we know it.  We don't require a constant for
7043 	 this case; some cost analysis could be done if both are available
7044 	 but neither is constant.  For now, assume they're equally cheap,
7045 	 unless one has side effects.  If both strings have constant lengths,
7046 	 use the smaller.  */
7047 
7048       if (!len1)
7049 	len = len2;
7050       else if (!len2)
7051 	len = len1;
7052       else if (TREE_SIDE_EFFECTS (len1))
7053 	len = len2;
7054       else if (TREE_SIDE_EFFECTS (len2))
7055 	len = len1;
7056       else if (TREE_CODE (len1) != INTEGER_CST)
7057 	len = len2;
7058       else if (TREE_CODE (len2) != INTEGER_CST)
7059 	len = len1;
7060       else if (tree_int_cst_lt (len1, len2))
7061 	len = len1;
7062       else
7063 	len = len2;
7064 
7065       /* If both arguments have side effects, we cannot optimize.  */
7066       if (len && !TREE_SIDE_EFFECTS (len))
7067 	{
7068 	  arg3_rtx = expand_normal (len);
7069 	  result = expand_cmpstrn_or_cmpmem
7070 	    (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
7071 	     arg3_rtx, MIN (arg1_align, arg2_align));
7072 	}
7073     }
7074 
7075   tree fndecl = get_callee_fndecl (exp);
7076   if (result)
7077     {
7078       /* Check to see if the argument was declared attribute nonstring
7079 	 and if so, issue a warning since at this point it's not known
7080 	 to be nul-terminated.  */
7081       maybe_warn_nonstring_arg (fndecl, exp);
7082 
7083       /* Return the value in the proper mode for this function.  */
7084       machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
7085       if (GET_MODE (result) == mode)
7086 	return result;
7087       if (target == 0)
7088 	return convert_to_mode (mode, result, 0);
7089       convert_move (target, result, 0);
7090       return target;
7091     }
7092 
7093   /* Expand the library call ourselves using a stabilized argument
7094      list to avoid re-evaluating the function's arguments twice.  */
7095   tree fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
7096   gcc_assert (TREE_CODE (fn) == CALL_EXPR);
7097   CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
7098   return expand_call (fn, target, target == const0_rtx);
7099 }
7100 
7101 /* Expand expression EXP, which is a call to the strncmp builtin. Return
7102    NULL_RTX if we failed the caller should emit a normal call, otherwise
7103    try to get the result in TARGET, if convenient.  */
7104 
7105 static rtx
expand_builtin_strncmp(tree exp,ATTRIBUTE_UNUSED rtx target,ATTRIBUTE_UNUSED machine_mode mode)7106 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
7107 			ATTRIBUTE_UNUSED machine_mode mode)
7108 {
7109   if (!validate_arglist (exp,
7110  			 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7111     return NULL_RTX;
7112 
7113   tree arg1 = CALL_EXPR_ARG (exp, 0);
7114   tree arg2 = CALL_EXPR_ARG (exp, 1);
7115   tree arg3 = CALL_EXPR_ARG (exp, 2);
7116 
7117   if (!check_nul_terminated_array (exp, arg1, arg3)
7118       || !check_nul_terminated_array (exp, arg2, arg3))
7119     return NULL_RTX;
7120 
7121   location_t loc = tree_inlined_location (exp);
7122   tree len1 = c_strlen (arg1, 1);
7123   tree len2 = c_strlen (arg2, 1);
7124 
7125   if (!len1 || !len2)
7126     {
7127       /* Check to see if the argument was declared attribute nonstring
7128 	 and if so, issue a warning since at this point it's not known
7129 	 to be nul-terminated.  */
7130       if (!maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp)
7131 	  && !len1 && !len2)
7132 	{
7133 	  /* A strncmp read is constrained not just by the bound but
7134 	     also by the length of the shorter string.  Specifying
7135 	     a bound that's larger than the size of either array makes
7136 	     no sense and is likely a bug.  When the length of neither
7137 	     of the two strings is known but the sizes of both of
7138 	     the arrays they are stored in is, issue a warning if
7139 	     the bound is larger than than the size of the larger
7140 	     of the two arrays.  */
7141 
7142 	  access_ref ref1 (arg3, true);
7143 	  access_ref ref2 (arg3, true);
7144 
7145 	  tree bndrng[2] = { NULL_TREE, NULL_TREE };
7146 	  get_size_range (arg3, bndrng, ref1.bndrng);
7147 
7148 	  tree size1 = compute_objsize (arg1, 1, &ref1);
7149 	  tree size2 = compute_objsize (arg2, 1, &ref2);
7150 	  tree func = get_callee_fndecl (exp);
7151 
7152 	  if (size1 && size2 && bndrng[0] && !integer_zerop (bndrng[0]))
7153 	    {
7154 	      offset_int rem1 = ref1.size_remaining ();
7155 	      offset_int rem2 = ref2.size_remaining ();
7156 	      if (rem1 == 0 || rem2 == 0)
7157 		maybe_warn_for_bound (OPT_Wstringop_overread, loc, exp, func,
7158 				      bndrng, integer_zero_node);
7159 	      else
7160 		{
7161 		  offset_int maxrem = wi::max (rem1, rem2, UNSIGNED);
7162 		  if (maxrem < wi::to_offset (bndrng[0]))
7163 		    maybe_warn_for_bound (OPT_Wstringop_overread, loc, exp,
7164 					  func, bndrng,
7165 					  wide_int_to_tree (sizetype, maxrem));
7166 		}
7167 	    }
7168 	  else if (bndrng[0]
7169 		   && !integer_zerop (bndrng[0])
7170 		   && ((size1 && integer_zerop (size1))
7171 		       || (size2 && integer_zerop (size2))))
7172 	    maybe_warn_for_bound (OPT_Wstringop_overread, loc, exp, func,
7173 				  bndrng, integer_zero_node);
7174 	}
7175     }
7176 
7177   /* Due to the performance benefit, always inline the calls first.  */
7178   rtx result = NULL_RTX;
7179   result = inline_expand_builtin_bytecmp (exp, target);
7180   if (result)
7181     return result;
7182 
7183   /* If c_strlen can determine an expression for one of the string
7184      lengths, and it doesn't have side effects, then emit cmpstrnsi
7185      using length MIN(strlen(string)+1, arg3).  */
7186   insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
7187   if (cmpstrn_icode == CODE_FOR_nothing)
7188     return NULL_RTX;
7189 
7190   tree len;
7191 
7192   unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
7193   unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
7194 
7195   if (len1)
7196     len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
7197   if (len2)
7198     len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
7199 
7200   tree len3 = fold_convert_loc (loc, sizetype, arg3);
7201 
7202   /* If we don't have a constant length for the first, use the length
7203      of the second, if we know it.  If neither string is constant length,
7204      use the given length argument.  We don't require a constant for
7205      this case; some cost analysis could be done if both are available
7206      but neither is constant.  For now, assume they're equally cheap,
7207      unless one has side effects.  If both strings have constant lengths,
7208      use the smaller.  */
7209 
7210   if (!len1 && !len2)
7211     len = len3;
7212   else if (!len1)
7213     len = len2;
7214   else if (!len2)
7215     len = len1;
7216   else if (TREE_SIDE_EFFECTS (len1))
7217     len = len2;
7218   else if (TREE_SIDE_EFFECTS (len2))
7219     len = len1;
7220   else if (TREE_CODE (len1) != INTEGER_CST)
7221     len = len2;
7222   else if (TREE_CODE (len2) != INTEGER_CST)
7223     len = len1;
7224   else if (tree_int_cst_lt (len1, len2))
7225     len = len1;
7226   else
7227     len = len2;
7228 
7229   /* If we are not using the given length, we must incorporate it here.
7230      The actual new length parameter will be MIN(len,arg3) in this case.  */
7231   if (len != len3)
7232     {
7233       len = fold_convert_loc (loc, sizetype, len);
7234       len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
7235     }
7236   rtx arg1_rtx = get_memory_rtx (arg1, len);
7237   rtx arg2_rtx = get_memory_rtx (arg2, len);
7238   rtx arg3_rtx = expand_normal (len);
7239   result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
7240 				     arg2_rtx, TREE_TYPE (len), arg3_rtx,
7241 				     MIN (arg1_align, arg2_align));
7242 
7243   tree fndecl = get_callee_fndecl (exp);
7244   if (result)
7245     {
7246       /* Return the value in the proper mode for this function.  */
7247       mode = TYPE_MODE (TREE_TYPE (exp));
7248       if (GET_MODE (result) == mode)
7249 	return result;
7250       if (target == 0)
7251 	return convert_to_mode (mode, result, 0);
7252       convert_move (target, result, 0);
7253       return target;
7254     }
7255 
7256   /* Expand the library call ourselves using a stabilized argument
7257      list to avoid re-evaluating the function's arguments twice.  */
7258   tree call = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len);
7259   if (TREE_NO_WARNING (exp))
7260     TREE_NO_WARNING (call) = true;
7261   gcc_assert (TREE_CODE (call) == CALL_EXPR);
7262   CALL_EXPR_TAILCALL (call) = CALL_EXPR_TAILCALL (exp);
7263   return expand_call (call, target, target == const0_rtx);
7264 }
7265 
7266 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
7267    if that's convenient.  */
7268 
7269 rtx
expand_builtin_saveregs(void)7270 expand_builtin_saveregs (void)
7271 {
7272   rtx val;
7273   rtx_insn *seq;
7274 
7275   /* Don't do __builtin_saveregs more than once in a function.
7276      Save the result of the first call and reuse it.  */
7277   if (saveregs_value != 0)
7278     return saveregs_value;
7279 
7280   /* When this function is called, it means that registers must be
7281      saved on entry to this function.  So we migrate the call to the
7282      first insn of this function.  */
7283 
7284   start_sequence ();
7285 
7286   /* Do whatever the machine needs done in this case.  */
7287   val = targetm.calls.expand_builtin_saveregs ();
7288 
7289   seq = get_insns ();
7290   end_sequence ();
7291 
7292   saveregs_value = val;
7293 
7294   /* Put the insns after the NOTE that starts the function.  If this
7295      is inside a start_sequence, make the outer-level insn chain current, so
7296      the code is placed at the start of the function.  */
7297   push_topmost_sequence ();
7298   emit_insn_after (seq, entry_of_function ());
7299   pop_topmost_sequence ();
7300 
7301   return val;
7302 }
7303 
7304 /* Expand a call to __builtin_next_arg.  */
7305 
7306 static rtx
expand_builtin_next_arg(void)7307 expand_builtin_next_arg (void)
7308 {
7309   /* Checking arguments is already done in fold_builtin_next_arg
7310      that must be called before this function.  */
7311   return expand_binop (ptr_mode, add_optab,
7312 		       crtl->args.internal_arg_pointer,
7313 		       crtl->args.arg_offset_rtx,
7314 		       NULL_RTX, 0, OPTAB_LIB_WIDEN);
7315 }
7316 
7317 /* Make it easier for the backends by protecting the valist argument
7318    from multiple evaluations.  */
7319 
7320 static tree
stabilize_va_list_loc(location_t loc,tree valist,int needs_lvalue)7321 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
7322 {
7323   tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
7324 
7325   /* The current way of determining the type of valist is completely
7326      bogus.  We should have the information on the va builtin instead.  */
7327   if (!vatype)
7328     vatype = targetm.fn_abi_va_list (cfun->decl);
7329 
7330   if (TREE_CODE (vatype) == ARRAY_TYPE)
7331     {
7332       if (TREE_SIDE_EFFECTS (valist))
7333 	valist = save_expr (valist);
7334 
7335       /* For this case, the backends will be expecting a pointer to
7336 	 vatype, but it's possible we've actually been given an array
7337 	 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
7338 	 So fix it.  */
7339       if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
7340 	{
7341 	  tree p1 = build_pointer_type (TREE_TYPE (vatype));
7342 	  valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
7343 	}
7344     }
7345   else
7346     {
7347       tree pt = build_pointer_type (vatype);
7348 
7349       if (! needs_lvalue)
7350 	{
7351 	  if (! TREE_SIDE_EFFECTS (valist))
7352 	    return valist;
7353 
7354 	  valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
7355 	  TREE_SIDE_EFFECTS (valist) = 1;
7356 	}
7357 
7358       if (TREE_SIDE_EFFECTS (valist))
7359 	valist = save_expr (valist);
7360       valist = fold_build2_loc (loc, MEM_REF,
7361 				vatype, valist, build_int_cst (pt, 0));
7362     }
7363 
7364   return valist;
7365 }
7366 
7367 /* The "standard" definition of va_list is void*.  */
7368 
7369 tree
std_build_builtin_va_list(void)7370 std_build_builtin_va_list (void)
7371 {
7372   return ptr_type_node;
7373 }
7374 
7375 /* The "standard" abi va_list is va_list_type_node.  */
7376 
7377 tree
std_fn_abi_va_list(tree fndecl ATTRIBUTE_UNUSED)7378 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
7379 {
7380   return va_list_type_node;
7381 }
7382 
7383 /* The "standard" type of va_list is va_list_type_node.  */
7384 
7385 tree
std_canonical_va_list_type(tree type)7386 std_canonical_va_list_type (tree type)
7387 {
7388   tree wtype, htype;
7389 
7390   wtype = va_list_type_node;
7391   htype = type;
7392 
7393   if (TREE_CODE (wtype) == ARRAY_TYPE)
7394     {
7395       /* If va_list is an array type, the argument may have decayed
7396 	 to a pointer type, e.g. by being passed to another function.
7397 	 In that case, unwrap both types so that we can compare the
7398 	 underlying records.  */
7399       if (TREE_CODE (htype) == ARRAY_TYPE
7400 	  || POINTER_TYPE_P (htype))
7401 	{
7402 	  wtype = TREE_TYPE (wtype);
7403 	  htype = TREE_TYPE (htype);
7404 	}
7405     }
7406   if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
7407     return va_list_type_node;
7408 
7409   return NULL_TREE;
7410 }
7411 
7412 /* The "standard" implementation of va_start: just assign `nextarg' to
7413    the variable.  */
7414 
7415 void
std_expand_builtin_va_start(tree valist,rtx nextarg)7416 std_expand_builtin_va_start (tree valist, rtx nextarg)
7417 {
7418   rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
7419   convert_move (va_r, nextarg, 0);
7420 }
7421 
7422 /* Expand EXP, a call to __builtin_va_start.  */
7423 
7424 static rtx
expand_builtin_va_start(tree exp)7425 expand_builtin_va_start (tree exp)
7426 {
7427   rtx nextarg;
7428   tree valist;
7429   location_t loc = EXPR_LOCATION (exp);
7430 
7431   if (call_expr_nargs (exp) < 2)
7432     {
7433       error_at (loc, "too few arguments to function %<va_start%>");
7434       return const0_rtx;
7435     }
7436 
7437   if (fold_builtin_next_arg (exp, true))
7438     return const0_rtx;
7439 
7440   nextarg = expand_builtin_next_arg ();
7441   valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
7442 
7443   if (targetm.expand_builtin_va_start)
7444     targetm.expand_builtin_va_start (valist, nextarg);
7445   else
7446     std_expand_builtin_va_start (valist, nextarg);
7447 
7448   return const0_rtx;
7449 }
7450 
7451 /* Expand EXP, a call to __builtin_va_end.  */
7452 
7453 static rtx
expand_builtin_va_end(tree exp)7454 expand_builtin_va_end (tree exp)
7455 {
7456   tree valist = CALL_EXPR_ARG (exp, 0);
7457 
7458   /* Evaluate for side effects, if needed.  I hate macros that don't
7459      do that.  */
7460   if (TREE_SIDE_EFFECTS (valist))
7461     expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
7462 
7463   return const0_rtx;
7464 }
7465 
7466 /* Expand EXP, a call to __builtin_va_copy.  We do this as a
7467    builtin rather than just as an assignment in stdarg.h because of the
7468    nastiness of array-type va_list types.  */
7469 
7470 static rtx
expand_builtin_va_copy(tree exp)7471 expand_builtin_va_copy (tree exp)
7472 {
7473   tree dst, src, t;
7474   location_t loc = EXPR_LOCATION (exp);
7475 
7476   dst = CALL_EXPR_ARG (exp, 0);
7477   src = CALL_EXPR_ARG (exp, 1);
7478 
7479   dst = stabilize_va_list_loc (loc, dst, 1);
7480   src = stabilize_va_list_loc (loc, src, 0);
7481 
7482   gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
7483 
7484   if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
7485     {
7486       t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
7487       TREE_SIDE_EFFECTS (t) = 1;
7488       expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
7489     }
7490   else
7491     {
7492       rtx dstb, srcb, size;
7493 
7494       /* Evaluate to pointers.  */
7495       dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
7496       srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
7497       size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
7498       		  NULL_RTX, VOIDmode, EXPAND_NORMAL);
7499 
7500       dstb = convert_memory_address (Pmode, dstb);
7501       srcb = convert_memory_address (Pmode, srcb);
7502 
7503       /* "Dereference" to BLKmode memories.  */
7504       dstb = gen_rtx_MEM (BLKmode, dstb);
7505       set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
7506       set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
7507       srcb = gen_rtx_MEM (BLKmode, srcb);
7508       set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
7509       set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
7510 
7511       /* Copy.  */
7512       emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
7513     }
7514 
7515   return const0_rtx;
7516 }
7517 
7518 /* Expand a call to one of the builtin functions __builtin_frame_address or
7519    __builtin_return_address.  */
7520 
7521 static rtx
expand_builtin_frame_address(tree fndecl,tree exp)7522 expand_builtin_frame_address (tree fndecl, tree exp)
7523 {
7524   /* The argument must be a nonnegative integer constant.
7525      It counts the number of frames to scan up the stack.
7526      The value is either the frame pointer value or the return
7527      address saved in that frame.  */
7528   if (call_expr_nargs (exp) == 0)
7529     /* Warning about missing arg was already issued.  */
7530     return const0_rtx;
7531   else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
7532     {
7533       error ("invalid argument to %qD", fndecl);
7534       return const0_rtx;
7535     }
7536   else
7537     {
7538       /* Number of frames to scan up the stack.  */
7539       unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
7540 
7541       rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
7542 
7543       /* Some ports cannot access arbitrary stack frames.  */
7544       if (tem == NULL)
7545 	{
7546 	  warning (0, "unsupported argument to %qD", fndecl);
7547 	  return const0_rtx;
7548 	}
7549 
7550       if (count)
7551 	{
7552 	  /* Warn since no effort is made to ensure that any frame
7553 	     beyond the current one exists or can be safely reached.  */
7554 	  warning (OPT_Wframe_address, "calling %qD with "
7555 		   "a nonzero argument is unsafe", fndecl);
7556 	}
7557 
7558       /* For __builtin_frame_address, return what we've got.  */
7559       if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
7560 	return tem;
7561 
7562       if (!REG_P (tem)
7563 	  && ! CONSTANT_P (tem))
7564 	tem = copy_addr_to_reg (tem);
7565       return tem;
7566     }
7567 }
7568 
7569 /* Expand EXP, a call to the alloca builtin.  Return NULL_RTX if we
7570    failed and the caller should emit a normal call.  */
7571 
7572 static rtx
expand_builtin_alloca(tree exp)7573 expand_builtin_alloca (tree exp)
7574 {
7575   rtx op0;
7576   rtx result;
7577   unsigned int align;
7578   tree fndecl = get_callee_fndecl (exp);
7579   HOST_WIDE_INT max_size;
7580   enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7581   bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
7582   bool valid_arglist
7583     = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
7584        ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
7585 			   VOID_TYPE)
7586        : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
7587 	 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
7588 	 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
7589 
7590   if (!valid_arglist)
7591     return NULL_RTX;
7592 
7593   if ((alloca_for_var
7594        && warn_vla_limit >= HOST_WIDE_INT_MAX
7595        && warn_alloc_size_limit < warn_vla_limit)
7596       || (!alloca_for_var
7597 	  && warn_alloca_limit >= HOST_WIDE_INT_MAX
7598 	  && warn_alloc_size_limit < warn_alloca_limit
7599 	  ))
7600     {
7601       /* -Walloca-larger-than and -Wvla-larger-than settings of
7602 	 less than HOST_WIDE_INT_MAX override the more general
7603 	 -Walloc-size-larger-than so unless either of the former
7604 	 options is smaller than the last one (wchich would imply
7605 	 that the call was already checked), check the alloca
7606 	 arguments for overflow.  */
7607       tree args[] = { CALL_EXPR_ARG (exp, 0), NULL_TREE };
7608       int idx[] = { 0, -1 };
7609       maybe_warn_alloc_args_overflow (fndecl, exp, args, idx);
7610     }
7611 
7612   /* Compute the argument.  */
7613   op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
7614 
7615   /* Compute the alignment.  */
7616   align = (fcode == BUILT_IN_ALLOCA
7617 	   ? BIGGEST_ALIGNMENT
7618 	   : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
7619 
7620   /* Compute the maximum size.  */
7621   max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
7622               ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
7623               : -1);
7624 
7625   /* Allocate the desired space.  If the allocation stems from the declaration
7626      of a variable-sized object, it cannot accumulate.  */
7627   result
7628     = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
7629   result = convert_memory_address (ptr_mode, result);
7630 
7631   /* Dynamic allocations for variables are recorded during gimplification.  */
7632   if (!alloca_for_var && (flag_callgraph_info & CALLGRAPH_INFO_DYNAMIC_ALLOC))
7633     record_dynamic_alloc (exp);
7634 
7635   return result;
7636 }
7637 
7638 /* Emit a call to __asan_allocas_unpoison call in EXP.  Add to second argument
7639    of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
7640    STACK_DYNAMIC_OFFSET value.  See motivation for this in comment to
7641    handle_builtin_stack_restore function.  */
7642 
7643 static rtx
expand_asan_emit_allocas_unpoison(tree exp)7644 expand_asan_emit_allocas_unpoison (tree exp)
7645 {
7646   tree arg0 = CALL_EXPR_ARG (exp, 0);
7647   tree arg1 = CALL_EXPR_ARG (exp, 1);
7648   rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
7649   rtx bot = expand_expr (arg1, NULL_RTX, ptr_mode, EXPAND_NORMAL);
7650   rtx off = expand_simple_binop (Pmode, MINUS, virtual_stack_dynamic_rtx,
7651 				 stack_pointer_rtx, NULL_RTX, 0,
7652 				 OPTAB_LIB_WIDEN);
7653   off = convert_modes (ptr_mode, Pmode, off, 0);
7654   bot = expand_simple_binop (ptr_mode, PLUS, bot, off, NULL_RTX, 0,
7655 			     OPTAB_LIB_WIDEN);
7656   rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
7657   ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
7658 				 top, ptr_mode, bot, ptr_mode);
7659   return ret;
7660 }
7661 
7662 /* Expand a call to bswap builtin in EXP.
7663    Return NULL_RTX if a normal call should be emitted rather than expanding the
7664    function in-line.  If convenient, the result should be placed in TARGET.
7665    SUBTARGET may be used as the target for computing one of EXP's operands.  */
7666 
7667 static rtx
expand_builtin_bswap(machine_mode target_mode,tree exp,rtx target,rtx subtarget)7668 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
7669 		      rtx subtarget)
7670 {
7671   tree arg;
7672   rtx op0;
7673 
7674   if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
7675     return NULL_RTX;
7676 
7677   arg = CALL_EXPR_ARG (exp, 0);
7678   op0 = expand_expr (arg,
7679 		     subtarget && GET_MODE (subtarget) == target_mode
7680 		     ? subtarget : NULL_RTX,
7681 		     target_mode, EXPAND_NORMAL);
7682   if (GET_MODE (op0) != target_mode)
7683     op0 = convert_to_mode (target_mode, op0, 1);
7684 
7685   target = expand_unop (target_mode, bswap_optab, op0, target, 1);
7686 
7687   gcc_assert (target);
7688 
7689   return convert_to_mode (target_mode, target, 1);
7690 }
7691 
7692 /* Expand a call to a unary builtin in EXP.
7693    Return NULL_RTX if a normal call should be emitted rather than expanding the
7694    function in-line.  If convenient, the result should be placed in TARGET.
7695    SUBTARGET may be used as the target for computing one of EXP's operands.  */
7696 
7697 static rtx
expand_builtin_unop(machine_mode target_mode,tree exp,rtx target,rtx subtarget,optab op_optab)7698 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
7699 		     rtx subtarget, optab op_optab)
7700 {
7701   rtx op0;
7702 
7703   if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
7704     return NULL_RTX;
7705 
7706   /* Compute the argument.  */
7707   op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
7708 		     (subtarget
7709 		      && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
7710 			  == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
7711 		     VOIDmode, EXPAND_NORMAL);
7712   /* Compute op, into TARGET if possible.
7713      Set TARGET to wherever the result comes back.  */
7714   target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
7715 			op_optab, op0, target, op_optab != clrsb_optab);
7716   gcc_assert (target);
7717 
7718   return convert_to_mode (target_mode, target, 0);
7719 }
7720 
7721 /* Expand a call to __builtin_expect.  We just return our argument
7722    as the builtin_expect semantic should've been already executed by
7723    tree branch prediction pass. */
7724 
7725 static rtx
expand_builtin_expect(tree exp,rtx target)7726 expand_builtin_expect (tree exp, rtx target)
7727 {
7728   tree arg;
7729 
7730   if (call_expr_nargs (exp) < 2)
7731     return const0_rtx;
7732   arg = CALL_EXPR_ARG (exp, 0);
7733 
7734   target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
7735   /* When guessing was done, the hints should be already stripped away.  */
7736   gcc_assert (!flag_guess_branch_prob
7737 	      || optimize == 0 || seen_error ());
7738   return target;
7739 }
7740 
7741 /* Expand a call to __builtin_expect_with_probability.  We just return our
7742    argument as the builtin_expect semantic should've been already executed by
7743    tree branch prediction pass.  */
7744 
7745 static rtx
expand_builtin_expect_with_probability(tree exp,rtx target)7746 expand_builtin_expect_with_probability (tree exp, rtx target)
7747 {
7748   tree arg;
7749 
7750   if (call_expr_nargs (exp) < 3)
7751     return const0_rtx;
7752   arg = CALL_EXPR_ARG (exp, 0);
7753 
7754   target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
7755   /* When guessing was done, the hints should be already stripped away.  */
7756   gcc_assert (!flag_guess_branch_prob
7757 	      || optimize == 0 || seen_error ());
7758   return target;
7759 }
7760 
7761 
7762 /* Expand a call to __builtin_assume_aligned.  We just return our first
7763    argument as the builtin_assume_aligned semantic should've been already
7764    executed by CCP.  */
7765 
7766 static rtx
expand_builtin_assume_aligned(tree exp,rtx target)7767 expand_builtin_assume_aligned (tree exp, rtx target)
7768 {
7769   if (call_expr_nargs (exp) < 2)
7770     return const0_rtx;
7771   target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
7772 			EXPAND_NORMAL);
7773   gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
7774 	      && (call_expr_nargs (exp) < 3
7775 		  || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
7776   return target;
7777 }
7778 
7779 void
expand_builtin_trap(void)7780 expand_builtin_trap (void)
7781 {
7782   if (targetm.have_trap ())
7783     {
7784       rtx_insn *insn = emit_insn (targetm.gen_trap ());
7785       /* For trap insns when not accumulating outgoing args force
7786 	 REG_ARGS_SIZE note to prevent crossjumping of calls with
7787 	 different args sizes.  */
7788       if (!ACCUMULATE_OUTGOING_ARGS)
7789 	add_args_size_note (insn, stack_pointer_delta);
7790     }
7791   else
7792     {
7793       tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
7794       tree call_expr = build_call_expr (fn, 0);
7795       expand_call (call_expr, NULL_RTX, false);
7796     }
7797 
7798   emit_barrier ();
7799 }
7800 
7801 /* Expand a call to __builtin_unreachable.  We do nothing except emit
7802    a barrier saying that control flow will not pass here.
7803 
7804    It is the responsibility of the program being compiled to ensure
7805    that control flow does never reach __builtin_unreachable.  */
7806 static void
expand_builtin_unreachable(void)7807 expand_builtin_unreachable (void)
7808 {
7809   emit_barrier ();
7810 }
7811 
7812 /* Expand EXP, a call to fabs, fabsf or fabsl.
7813    Return NULL_RTX if a normal call should be emitted rather than expanding
7814    the function inline.  If convenient, the result should be placed
7815    in TARGET.  SUBTARGET may be used as the target for computing
7816    the operand.  */
7817 
7818 static rtx
expand_builtin_fabs(tree exp,rtx target,rtx subtarget)7819 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
7820 {
7821   machine_mode mode;
7822   tree arg;
7823   rtx op0;
7824 
7825   if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
7826     return NULL_RTX;
7827 
7828   arg = CALL_EXPR_ARG (exp, 0);
7829   CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
7830   mode = TYPE_MODE (TREE_TYPE (arg));
7831   op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
7832   return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
7833 }
7834 
7835 /* Expand EXP, a call to copysign, copysignf, or copysignl.
7836    Return NULL is a normal call should be emitted rather than expanding the
7837    function inline.  If convenient, the result should be placed in TARGET.
7838    SUBTARGET may be used as the target for computing the operand.  */
7839 
7840 static rtx
expand_builtin_copysign(tree exp,rtx target,rtx subtarget)7841 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
7842 {
7843   rtx op0, op1;
7844   tree arg;
7845 
7846   if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
7847     return NULL_RTX;
7848 
7849   arg = CALL_EXPR_ARG (exp, 0);
7850   op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
7851 
7852   arg = CALL_EXPR_ARG (exp, 1);
7853   op1 = expand_normal (arg);
7854 
7855   return expand_copysign (op0, op1, target);
7856 }
7857 
7858 /* Emit a call to __builtin___clear_cache.  */
7859 
7860 void
default_emit_call_builtin___clear_cache(rtx begin,rtx end)7861 default_emit_call_builtin___clear_cache (rtx begin, rtx end)
7862 {
7863   rtx callee = gen_rtx_SYMBOL_REF (Pmode,
7864 				   BUILTIN_ASM_NAME_PTR
7865 				   (BUILT_IN_CLEAR_CACHE));
7866 
7867   emit_library_call (callee,
7868 		     LCT_NORMAL, VOIDmode,
7869 		     convert_memory_address (ptr_mode, begin), ptr_mode,
7870 		     convert_memory_address (ptr_mode, end), ptr_mode);
7871 }
7872 
7873 /* Emit a call to __builtin___clear_cache, unless the target specifies
7874    it as do-nothing.  This function can be used by trampoline
7875    finalizers to duplicate the effects of expanding a call to the
7876    clear_cache builtin.  */
7877 
7878 void
maybe_emit_call_builtin___clear_cache(rtx begin,rtx end)7879 maybe_emit_call_builtin___clear_cache (rtx begin, rtx end)
7880 {
7881   if ((GET_MODE (begin) != ptr_mode && GET_MODE (begin) != Pmode)
7882       || (GET_MODE (end) != ptr_mode && GET_MODE (end) != Pmode))
7883     {
7884       error ("both arguments to %<__builtin___clear_cache%> must be pointers");
7885       return;
7886     }
7887 
7888   if (targetm.have_clear_cache ())
7889     {
7890       /* We have a "clear_cache" insn, and it will handle everything.  */
7891       class expand_operand ops[2];
7892 
7893       create_address_operand (&ops[0], begin);
7894       create_address_operand (&ops[1], end);
7895 
7896       if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
7897 	return;
7898     }
7899   else
7900     {
7901 #ifndef CLEAR_INSN_CACHE
7902       /* There is no "clear_cache" insn, and __clear_cache() in libgcc
7903 	 does nothing.  There is no need to call it.  Do nothing.  */
7904       return;
7905 #endif /* CLEAR_INSN_CACHE */
7906     }
7907 
7908   targetm.calls.emit_call_builtin___clear_cache (begin, end);
7909 }
7910 
7911 /* Expand a call to __builtin___clear_cache.  */
7912 
7913 static void
expand_builtin___clear_cache(tree exp)7914 expand_builtin___clear_cache (tree exp)
7915 {
7916   tree begin, end;
7917   rtx begin_rtx, end_rtx;
7918 
7919   /* We must not expand to a library call.  If we did, any
7920      fallback library function in libgcc that might contain a call to
7921      __builtin___clear_cache() would recurse infinitely.  */
7922   if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
7923     {
7924       error ("both arguments to %<__builtin___clear_cache%> must be pointers");
7925       return;
7926     }
7927 
7928   begin = CALL_EXPR_ARG (exp, 0);
7929   begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
7930 
7931   end = CALL_EXPR_ARG (exp, 1);
7932   end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
7933 
7934   maybe_emit_call_builtin___clear_cache (begin_rtx, end_rtx);
7935 }
7936 
7937 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT.  */
7938 
7939 static rtx
round_trampoline_addr(rtx tramp)7940 round_trampoline_addr (rtx tramp)
7941 {
7942   rtx temp, addend, mask;
7943 
7944   /* If we don't need too much alignment, we'll have been guaranteed
7945      proper alignment by get_trampoline_type.  */
7946   if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
7947     return tramp;
7948 
7949   /* Round address up to desired boundary.  */
7950   temp = gen_reg_rtx (Pmode);
7951   addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
7952   mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
7953 
7954   temp  = expand_simple_binop (Pmode, PLUS, tramp, addend,
7955 			       temp, 0, OPTAB_LIB_WIDEN);
7956   tramp = expand_simple_binop (Pmode, AND, temp, mask,
7957 			       temp, 0, OPTAB_LIB_WIDEN);
7958 
7959   return tramp;
7960 }
7961 
7962 static rtx
expand_builtin_init_trampoline(tree exp,bool onstack)7963 expand_builtin_init_trampoline (tree exp, bool onstack)
7964 {
7965   tree t_tramp, t_func, t_chain;
7966   rtx m_tramp, r_tramp, r_chain, tmp;
7967 
7968   if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
7969 			 POINTER_TYPE, VOID_TYPE))
7970     return NULL_RTX;
7971 
7972   t_tramp = CALL_EXPR_ARG (exp, 0);
7973   t_func = CALL_EXPR_ARG (exp, 1);
7974   t_chain = CALL_EXPR_ARG (exp, 2);
7975 
7976   r_tramp = expand_normal (t_tramp);
7977   m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
7978   MEM_NOTRAP_P (m_tramp) = 1;
7979 
7980   /* If ONSTACK, the TRAMP argument should be the address of a field
7981      within the local function's FRAME decl.  Either way, let's see if
7982      we can fill in the MEM_ATTRs for this memory.  */
7983   if (TREE_CODE (t_tramp) == ADDR_EXPR)
7984     set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
7985 
7986   /* Creator of a heap trampoline is responsible for making sure the
7987      address is aligned to at least STACK_BOUNDARY.  Normally malloc
7988      will ensure this anyhow.  */
7989   tmp = round_trampoline_addr (r_tramp);
7990   if (tmp != r_tramp)
7991     {
7992       m_tramp = change_address (m_tramp, BLKmode, tmp);
7993       set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
7994       set_mem_size (m_tramp, TRAMPOLINE_SIZE);
7995     }
7996 
7997   /* The FUNC argument should be the address of the nested function.
7998      Extract the actual function decl to pass to the hook.  */
7999   gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
8000   t_func = TREE_OPERAND (t_func, 0);
8001   gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
8002 
8003   r_chain = expand_normal (t_chain);
8004 
8005   /* Generate insns to initialize the trampoline.  */
8006   targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
8007 
8008   if (onstack)
8009     {
8010       trampolines_created = 1;
8011 
8012       if (targetm.calls.custom_function_descriptors != 0)
8013 	warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
8014 		    "trampoline generated for nested function %qD", t_func);
8015     }
8016 
8017   return const0_rtx;
8018 }
8019 
8020 static rtx
expand_builtin_adjust_trampoline(tree exp)8021 expand_builtin_adjust_trampoline (tree exp)
8022 {
8023   rtx tramp;
8024 
8025   if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
8026     return NULL_RTX;
8027 
8028   tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
8029   tramp = round_trampoline_addr (tramp);
8030   if (targetm.calls.trampoline_adjust_address)
8031     tramp = targetm.calls.trampoline_adjust_address (tramp);
8032 
8033   return tramp;
8034 }
8035 
8036 /* Expand a call to the builtin descriptor initialization routine.
8037    A descriptor is made up of a couple of pointers to the static
8038    chain and the code entry in this order.  */
8039 
8040 static rtx
expand_builtin_init_descriptor(tree exp)8041 expand_builtin_init_descriptor (tree exp)
8042 {
8043   tree t_descr, t_func, t_chain;
8044   rtx m_descr, r_descr, r_func, r_chain;
8045 
8046   if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
8047 			 VOID_TYPE))
8048     return NULL_RTX;
8049 
8050   t_descr = CALL_EXPR_ARG (exp, 0);
8051   t_func = CALL_EXPR_ARG (exp, 1);
8052   t_chain = CALL_EXPR_ARG (exp, 2);
8053 
8054   r_descr = expand_normal (t_descr);
8055   m_descr = gen_rtx_MEM (BLKmode, r_descr);
8056   MEM_NOTRAP_P (m_descr) = 1;
8057   set_mem_align (m_descr, GET_MODE_ALIGNMENT (ptr_mode));
8058 
8059   r_func = expand_normal (t_func);
8060   r_chain = expand_normal (t_chain);
8061 
8062   /* Generate insns to initialize the descriptor.  */
8063   emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
8064   emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
8065 				     POINTER_SIZE / BITS_PER_UNIT), r_func);
8066 
8067   return const0_rtx;
8068 }
8069 
8070 /* Expand a call to the builtin descriptor adjustment routine.  */
8071 
8072 static rtx
expand_builtin_adjust_descriptor(tree exp)8073 expand_builtin_adjust_descriptor (tree exp)
8074 {
8075   rtx tramp;
8076 
8077   if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
8078     return NULL_RTX;
8079 
8080   tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
8081 
8082   /* Unalign the descriptor to allow runtime identification.  */
8083   tramp = plus_constant (ptr_mode, tramp,
8084 			 targetm.calls.custom_function_descriptors);
8085 
8086   return force_operand (tramp, NULL_RTX);
8087 }
8088 
8089 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
8090    function.  The function first checks whether the back end provides
8091    an insn to implement signbit for the respective mode.  If not, it
8092    checks whether the floating point format of the value is such that
8093    the sign bit can be extracted.  If that is not the case, error out.
8094    EXP is the expression that is a call to the builtin function; if
8095    convenient, the result should be placed in TARGET.  */
8096 static rtx
expand_builtin_signbit(tree exp,rtx target)8097 expand_builtin_signbit (tree exp, rtx target)
8098 {
8099   const struct real_format *fmt;
8100   scalar_float_mode fmode;
8101   scalar_int_mode rmode, imode;
8102   tree arg;
8103   int word, bitpos;
8104   enum insn_code icode;
8105   rtx temp;
8106   location_t loc = EXPR_LOCATION (exp);
8107 
8108   if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
8109     return NULL_RTX;
8110 
8111   arg = CALL_EXPR_ARG (exp, 0);
8112   fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
8113   rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
8114   fmt = REAL_MODE_FORMAT (fmode);
8115 
8116   arg = builtin_save_expr (arg);
8117 
8118   /* Expand the argument yielding a RTX expression. */
8119   temp = expand_normal (arg);
8120 
8121   /* Check if the back end provides an insn that handles signbit for the
8122      argument's mode. */
8123   icode = optab_handler (signbit_optab, fmode);
8124   if (icode != CODE_FOR_nothing)
8125     {
8126       rtx_insn *last = get_last_insn ();
8127       target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8128       if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
8129 	return target;
8130       delete_insns_since (last);
8131     }
8132 
8133   /* For floating point formats without a sign bit, implement signbit
8134      as "ARG < 0.0".  */
8135   bitpos = fmt->signbit_ro;
8136   if (bitpos < 0)
8137   {
8138     /* But we can't do this if the format supports signed zero.  */
8139     gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
8140 
8141     arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
8142 		       build_real (TREE_TYPE (arg), dconst0));
8143     return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
8144   }
8145 
8146   if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
8147     {
8148       imode = int_mode_for_mode (fmode).require ();
8149       temp = gen_lowpart (imode, temp);
8150     }
8151   else
8152     {
8153       imode = word_mode;
8154       /* Handle targets with different FP word orders.  */
8155       if (FLOAT_WORDS_BIG_ENDIAN)
8156 	word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
8157       else
8158 	word = bitpos / BITS_PER_WORD;
8159       temp = operand_subword_force (temp, word, fmode);
8160       bitpos = bitpos % BITS_PER_WORD;
8161     }
8162 
8163   /* Force the intermediate word_mode (or narrower) result into a
8164      register.  This avoids attempting to create paradoxical SUBREGs
8165      of floating point modes below.  */
8166   temp = force_reg (imode, temp);
8167 
8168   /* If the bitpos is within the "result mode" lowpart, the operation
8169      can be implement with a single bitwise AND.  Otherwise, we need
8170      a right shift and an AND.  */
8171 
8172   if (bitpos < GET_MODE_BITSIZE (rmode))
8173     {
8174       wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
8175 
8176       if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
8177 	temp = gen_lowpart (rmode, temp);
8178       temp = expand_binop (rmode, and_optab, temp,
8179 			   immed_wide_int_const (mask, rmode),
8180 			   NULL_RTX, 1, OPTAB_LIB_WIDEN);
8181     }
8182   else
8183     {
8184       /* Perform a logical right shift to place the signbit in the least
8185 	 significant bit, then truncate the result to the desired mode
8186 	 and mask just this bit.  */
8187       temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
8188       temp = gen_lowpart (rmode, temp);
8189       temp = expand_binop (rmode, and_optab, temp, const1_rtx,
8190 			   NULL_RTX, 1, OPTAB_LIB_WIDEN);
8191     }
8192 
8193   return temp;
8194 }
8195 
8196 /* Expand fork or exec calls.  TARGET is the desired target of the
8197    call.  EXP is the call. FN is the
8198    identificator of the actual function.  IGNORE is nonzero if the
8199    value is to be ignored.  */
8200 
8201 static rtx
expand_builtin_fork_or_exec(tree fn,tree exp,rtx target,int ignore)8202 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
8203 {
8204   tree id, decl;
8205   tree call;
8206 
8207   if (DECL_FUNCTION_CODE (fn) != BUILT_IN_FORK)
8208     {
8209       tree path = CALL_EXPR_ARG (exp, 0);
8210       /* Detect unterminated path.  */
8211       if (!check_read_access (exp, path))
8212 	return NULL_RTX;
8213 
8214       /* Also detect unterminated first argument.  */
8215       switch (DECL_FUNCTION_CODE (fn))
8216 	{
8217 	case BUILT_IN_EXECL:
8218 	case BUILT_IN_EXECLE:
8219 	case BUILT_IN_EXECLP:
8220 	  if (!check_read_access (exp, path))
8221 	    return NULL_RTX;
8222 	default:
8223 	  break;
8224 	}
8225     }
8226 
8227 
8228   /* If we are not profiling, just call the function.  */
8229   if (!profile_arc_flag)
8230     return NULL_RTX;
8231 
8232   /* Otherwise call the wrapper.  This should be equivalent for the rest of
8233      compiler, so the code does not diverge, and the wrapper may run the
8234      code necessary for keeping the profiling sane.  */
8235 
8236   switch (DECL_FUNCTION_CODE (fn))
8237     {
8238     case BUILT_IN_FORK:
8239       id = get_identifier ("__gcov_fork");
8240       break;
8241 
8242     case BUILT_IN_EXECL:
8243       id = get_identifier ("__gcov_execl");
8244       break;
8245 
8246     case BUILT_IN_EXECV:
8247       id = get_identifier ("__gcov_execv");
8248       break;
8249 
8250     case BUILT_IN_EXECLP:
8251       id = get_identifier ("__gcov_execlp");
8252       break;
8253 
8254     case BUILT_IN_EXECLE:
8255       id = get_identifier ("__gcov_execle");
8256       break;
8257 
8258     case BUILT_IN_EXECVP:
8259       id = get_identifier ("__gcov_execvp");
8260       break;
8261 
8262     case BUILT_IN_EXECVE:
8263       id = get_identifier ("__gcov_execve");
8264       break;
8265 
8266     default:
8267       gcc_unreachable ();
8268     }
8269 
8270   decl = build_decl (DECL_SOURCE_LOCATION (fn),
8271 		     FUNCTION_DECL, id, TREE_TYPE (fn));
8272   DECL_EXTERNAL (decl) = 1;
8273   TREE_PUBLIC (decl) = 1;
8274   DECL_ARTIFICIAL (decl) = 1;
8275   TREE_NOTHROW (decl) = 1;
8276   DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
8277   DECL_VISIBILITY_SPECIFIED (decl) = 1;
8278   call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
8279   return expand_call (call, target, ignore);
8280  }
8281 
8282 
8283 
8284 /* Reconstitute a mode for a __sync intrinsic operation.  Since the type of
8285    the pointer in these functions is void*, the tree optimizers may remove
8286    casts.  The mode computed in expand_builtin isn't reliable either, due
8287    to __sync_bool_compare_and_swap.
8288 
8289    FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
8290    group of builtins.  This gives us log2 of the mode size.  */
8291 
8292 static inline machine_mode
get_builtin_sync_mode(int fcode_diff)8293 get_builtin_sync_mode (int fcode_diff)
8294 {
8295   /* The size is not negotiable, so ask not to get BLKmode in return
8296      if the target indicates that a smaller size would be better.  */
8297   return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
8298 }
8299 
8300 /* Expand the memory expression LOC and return the appropriate memory operand
8301    for the builtin_sync operations.  */
8302 
8303 static rtx
get_builtin_sync_mem(tree loc,machine_mode mode)8304 get_builtin_sync_mem (tree loc, machine_mode mode)
8305 {
8306   rtx addr, mem;
8307   int addr_space = TYPE_ADDR_SPACE (POINTER_TYPE_P (TREE_TYPE (loc))
8308 				    ? TREE_TYPE (TREE_TYPE (loc))
8309 				    : TREE_TYPE (loc));
8310   scalar_int_mode addr_mode = targetm.addr_space.address_mode (addr_space);
8311 
8312   addr = expand_expr (loc, NULL_RTX, addr_mode, EXPAND_SUM);
8313   addr = convert_memory_address (addr_mode, addr);
8314 
8315   /* Note that we explicitly do not want any alias information for this
8316      memory, so that we kill all other live memories.  Otherwise we don't
8317      satisfy the full barrier semantics of the intrinsic.  */
8318   mem = gen_rtx_MEM (mode, addr);
8319 
8320   set_mem_addr_space (mem, addr_space);
8321 
8322   mem = validize_mem (mem);
8323 
8324   /* The alignment needs to be at least according to that of the mode.  */
8325   set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
8326 			   get_pointer_alignment (loc)));
8327   set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
8328   MEM_VOLATILE_P (mem) = 1;
8329 
8330   return mem;
8331 }
8332 
8333 /* Make sure an argument is in the right mode.
8334    EXP is the tree argument.
8335    MODE is the mode it should be in.  */
8336 
8337 static rtx
expand_expr_force_mode(tree exp,machine_mode mode)8338 expand_expr_force_mode (tree exp, machine_mode mode)
8339 {
8340   rtx val;
8341   machine_mode old_mode;
8342 
8343   if (TREE_CODE (exp) == SSA_NAME
8344       && TYPE_MODE (TREE_TYPE (exp)) != mode)
8345     {
8346       /* Undo argument promotion if possible, as combine might not
8347 	 be able to do it later due to MEM_VOLATILE_P uses in the
8348 	 patterns.  */
8349       gimple *g = get_gimple_for_ssa_name (exp);
8350       if (g && gimple_assign_cast_p (g))
8351 	{
8352 	  tree rhs = gimple_assign_rhs1 (g);
8353 	  tree_code code = gimple_assign_rhs_code (g);
8354 	  if (CONVERT_EXPR_CODE_P (code)
8355 	      && TYPE_MODE (TREE_TYPE (rhs)) == mode
8356 	      && INTEGRAL_TYPE_P (TREE_TYPE (exp))
8357 	      && INTEGRAL_TYPE_P (TREE_TYPE (rhs))
8358 	      && (TYPE_PRECISION (TREE_TYPE (exp))
8359 		  > TYPE_PRECISION (TREE_TYPE (rhs))))
8360 	    exp = rhs;
8361 	}
8362     }
8363 
8364   val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
8365   /* If VAL is promoted to a wider mode, convert it back to MODE.  Take care
8366      of CONST_INTs, where we know the old_mode only from the call argument.  */
8367 
8368   old_mode = GET_MODE (val);
8369   if (old_mode == VOIDmode)
8370     old_mode = TYPE_MODE (TREE_TYPE (exp));
8371   val = convert_modes (mode, old_mode, val, 1);
8372   return val;
8373 }
8374 
8375 
8376 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
8377    EXP is the CALL_EXPR.  CODE is the rtx code
8378    that corresponds to the arithmetic or logical operation from the name;
8379    an exception here is that NOT actually means NAND.  TARGET is an optional
8380    place for us to store the results; AFTER is true if this is the
8381    fetch_and_xxx form.  */
8382 
8383 static rtx
expand_builtin_sync_operation(machine_mode mode,tree exp,enum rtx_code code,bool after,rtx target)8384 expand_builtin_sync_operation (machine_mode mode, tree exp,
8385 			       enum rtx_code code, bool after,
8386 			       rtx target)
8387 {
8388   rtx val, mem;
8389   location_t loc = EXPR_LOCATION (exp);
8390 
8391   if (code == NOT && warn_sync_nand)
8392     {
8393       tree fndecl = get_callee_fndecl (exp);
8394       enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8395 
8396       static bool warned_f_a_n, warned_n_a_f;
8397 
8398       switch (fcode)
8399 	{
8400 	case BUILT_IN_SYNC_FETCH_AND_NAND_1:
8401 	case BUILT_IN_SYNC_FETCH_AND_NAND_2:
8402 	case BUILT_IN_SYNC_FETCH_AND_NAND_4:
8403 	case BUILT_IN_SYNC_FETCH_AND_NAND_8:
8404 	case BUILT_IN_SYNC_FETCH_AND_NAND_16:
8405 	  if (warned_f_a_n)
8406 	    break;
8407 
8408 	  fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
8409 	  inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
8410 	  warned_f_a_n = true;
8411 	  break;
8412 
8413 	case BUILT_IN_SYNC_NAND_AND_FETCH_1:
8414 	case BUILT_IN_SYNC_NAND_AND_FETCH_2:
8415 	case BUILT_IN_SYNC_NAND_AND_FETCH_4:
8416 	case BUILT_IN_SYNC_NAND_AND_FETCH_8:
8417 	case BUILT_IN_SYNC_NAND_AND_FETCH_16:
8418 	  if (warned_n_a_f)
8419 	    break;
8420 
8421 	 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
8422 	  inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
8423 	  warned_n_a_f = true;
8424 	  break;
8425 
8426 	default:
8427 	  gcc_unreachable ();
8428 	}
8429     }
8430 
8431   /* Expand the operands.  */
8432   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
8433   val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
8434 
8435   return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
8436 				 after);
8437 }
8438 
8439 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
8440    intrinsics. EXP is the CALL_EXPR.  IS_BOOL is
8441    true if this is the boolean form.  TARGET is a place for us to store the
8442    results; this is NOT optional if IS_BOOL is true.  */
8443 
8444 static rtx
expand_builtin_compare_and_swap(machine_mode mode,tree exp,bool is_bool,rtx target)8445 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
8446 				 bool is_bool, rtx target)
8447 {
8448   rtx old_val, new_val, mem;
8449   rtx *pbool, *poval;
8450 
8451   /* Expand the operands.  */
8452   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
8453   old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
8454   new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
8455 
8456   pbool = poval = NULL;
8457   if (target != const0_rtx)
8458     {
8459       if (is_bool)
8460 	pbool = &target;
8461       else
8462 	poval = &target;
8463     }
8464   if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
8465 				       false, MEMMODEL_SYNC_SEQ_CST,
8466 				       MEMMODEL_SYNC_SEQ_CST))
8467     return NULL_RTX;
8468 
8469   return target;
8470 }
8471 
8472 /* Expand the __sync_lock_test_and_set intrinsic.  Note that the most
8473    general form is actually an atomic exchange, and some targets only
8474    support a reduced form with the second argument being a constant 1.
8475    EXP is the CALL_EXPR; TARGET is an optional place for us to store
8476    the results.  */
8477 
8478 static rtx
expand_builtin_sync_lock_test_and_set(machine_mode mode,tree exp,rtx target)8479 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
8480 				       rtx target)
8481 {
8482   rtx val, mem;
8483 
8484   /* Expand the operands.  */
8485   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
8486   val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
8487 
8488   return expand_sync_lock_test_and_set (target, mem, val);
8489 }
8490 
8491 /* Expand the __sync_lock_release intrinsic.  EXP is the CALL_EXPR.  */
8492 
8493 static void
expand_builtin_sync_lock_release(machine_mode mode,tree exp)8494 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
8495 {
8496   rtx mem;
8497 
8498   /* Expand the operands.  */
8499   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
8500 
8501   expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
8502 }
8503 
8504 /* Given an integer representing an ``enum memmodel'', verify its
8505    correctness and return the memory model enum.  */
8506 
8507 static enum memmodel
get_memmodel(tree exp)8508 get_memmodel (tree exp)
8509 {
8510   rtx op;
8511   unsigned HOST_WIDE_INT val;
8512   location_t loc
8513     = expansion_point_location_if_in_system_header (input_location);
8514 
8515   /* If the parameter is not a constant, it's a run time value so we'll just
8516      convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking.  */
8517   if (TREE_CODE (exp) != INTEGER_CST)
8518     return MEMMODEL_SEQ_CST;
8519 
8520   op = expand_normal (exp);
8521 
8522   val = INTVAL (op);
8523   if (targetm.memmodel_check)
8524     val = targetm.memmodel_check (val);
8525   else if (val & ~MEMMODEL_MASK)
8526     {
8527       warning_at (loc, OPT_Winvalid_memory_model,
8528 		  "unknown architecture specifier in memory model to builtin");
8529       return MEMMODEL_SEQ_CST;
8530     }
8531 
8532   /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
8533   if (memmodel_base (val) >= MEMMODEL_LAST)
8534     {
8535       warning_at (loc, OPT_Winvalid_memory_model,
8536 		  "invalid memory model argument to builtin");
8537       return MEMMODEL_SEQ_CST;
8538     }
8539 
8540   /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
8541      be conservative and promote consume to acquire.  */
8542   if (val == MEMMODEL_CONSUME)
8543     val = MEMMODEL_ACQUIRE;
8544 
8545   return (enum memmodel) val;
8546 }
8547 
8548 /* Expand the __atomic_exchange intrinsic:
8549    	TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
8550    EXP is the CALL_EXPR.
8551    TARGET is an optional place for us to store the results.  */
8552 
8553 static rtx
expand_builtin_atomic_exchange(machine_mode mode,tree exp,rtx target)8554 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
8555 {
8556   rtx val, mem;
8557   enum memmodel model;
8558 
8559   model = get_memmodel (CALL_EXPR_ARG (exp, 2));
8560 
8561   if (!flag_inline_atomics)
8562     return NULL_RTX;
8563 
8564   /* Expand the operands.  */
8565   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
8566   val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
8567 
8568   return expand_atomic_exchange (target, mem, val, model);
8569 }
8570 
8571 /* Expand the __atomic_compare_exchange intrinsic:
8572    	bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
8573 					TYPE desired, BOOL weak,
8574 					enum memmodel success,
8575 					enum memmodel failure)
8576    EXP is the CALL_EXPR.
8577    TARGET is an optional place for us to store the results.  */
8578 
8579 static rtx
expand_builtin_atomic_compare_exchange(machine_mode mode,tree exp,rtx target)8580 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
8581 					rtx target)
8582 {
8583   rtx expect, desired, mem, oldval;
8584   rtx_code_label *label;
8585   enum memmodel success, failure;
8586   tree weak;
8587   bool is_weak;
8588   location_t loc
8589     = expansion_point_location_if_in_system_header (input_location);
8590 
8591   success = get_memmodel (CALL_EXPR_ARG (exp, 4));
8592   failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
8593 
8594   if (failure > success)
8595     {
8596       warning_at (loc, OPT_Winvalid_memory_model,
8597 		  "failure memory model cannot be stronger than success "
8598 		  "memory model for %<__atomic_compare_exchange%>");
8599       success = MEMMODEL_SEQ_CST;
8600     }
8601 
8602   if (is_mm_release (failure) || is_mm_acq_rel (failure))
8603     {
8604       warning_at (loc, OPT_Winvalid_memory_model,
8605 		  "invalid failure memory model for "
8606 		  "%<__atomic_compare_exchange%>");
8607       failure = MEMMODEL_SEQ_CST;
8608       success = MEMMODEL_SEQ_CST;
8609     }
8610 
8611 
8612   if (!flag_inline_atomics)
8613     return NULL_RTX;
8614 
8615   /* Expand the operands.  */
8616   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
8617 
8618   expect = expand_normal (CALL_EXPR_ARG (exp, 1));
8619   expect = convert_memory_address (Pmode, expect);
8620   expect = gen_rtx_MEM (mode, expect);
8621   desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
8622 
8623   weak = CALL_EXPR_ARG (exp, 3);
8624   is_weak = false;
8625   if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
8626     is_weak = true;
8627 
8628   if (target == const0_rtx)
8629     target = NULL;
8630 
8631   /* Lest the rtl backend create a race condition with an imporoper store
8632      to memory, always create a new pseudo for OLDVAL.  */
8633   oldval = NULL;
8634 
8635   if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
8636 				       is_weak, success, failure))
8637     return NULL_RTX;
8638 
8639   /* Conditionally store back to EXPECT, lest we create a race condition
8640      with an improper store to memory.  */
8641   /* ??? With a rearrangement of atomics at the gimple level, we can handle
8642      the normal case where EXPECT is totally private, i.e. a register.  At
8643      which point the store can be unconditional.  */
8644   label = gen_label_rtx ();
8645   emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
8646 			   GET_MODE (target), 1, label);
8647   emit_move_insn (expect, oldval);
8648   emit_label (label);
8649 
8650   return target;
8651 }
8652 
8653 /* Helper function for expand_ifn_atomic_compare_exchange - expand
8654    internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
8655    call.  The weak parameter must be dropped to match the expected parameter
8656    list and the expected argument changed from value to pointer to memory
8657    slot.  */
8658 
8659 static void
expand_ifn_atomic_compare_exchange_into_call(gcall * call,machine_mode mode)8660 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
8661 {
8662   unsigned int z;
8663   vec<tree, va_gc> *vec;
8664 
8665   vec_alloc (vec, 5);
8666   vec->quick_push (gimple_call_arg (call, 0));
8667   tree expected = gimple_call_arg (call, 1);
8668   rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
8669 				      TREE_TYPE (expected));
8670   rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
8671   if (expd != x)
8672     emit_move_insn (x, expd);
8673   tree v = make_tree (TREE_TYPE (expected), x);
8674   vec->quick_push (build1 (ADDR_EXPR,
8675 			   build_pointer_type (TREE_TYPE (expected)), v));
8676   vec->quick_push (gimple_call_arg (call, 2));
8677   /* Skip the boolean weak parameter.  */
8678   for (z = 4; z < 6; z++)
8679     vec->quick_push (gimple_call_arg (call, z));
8680   /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}.  */
8681   unsigned int bytes_log2 = exact_log2 (GET_MODE_SIZE (mode).to_constant ());
8682   gcc_assert (bytes_log2 < 5);
8683   built_in_function fncode
8684     = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
8685 			   + bytes_log2);
8686   tree fndecl = builtin_decl_explicit (fncode);
8687   tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
8688 		    fndecl);
8689   tree exp = build_call_vec (boolean_type_node, fn, vec);
8690   tree lhs = gimple_call_lhs (call);
8691   rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
8692   if (lhs)
8693     {
8694       rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
8695       if (GET_MODE (boolret) != mode)
8696 	boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
8697       x = force_reg (mode, x);
8698       write_complex_part (target, boolret, true);
8699       write_complex_part (target, x, false);
8700     }
8701 }
8702 
8703 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function.  */
8704 
8705 void
expand_ifn_atomic_compare_exchange(gcall * call)8706 expand_ifn_atomic_compare_exchange (gcall *call)
8707 {
8708   int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
8709   gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
8710   machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
8711   rtx expect, desired, mem, oldval, boolret;
8712   enum memmodel success, failure;
8713   tree lhs;
8714   bool is_weak;
8715   location_t loc
8716     = expansion_point_location_if_in_system_header (gimple_location (call));
8717 
8718   success = get_memmodel (gimple_call_arg (call, 4));
8719   failure = get_memmodel (gimple_call_arg (call, 5));
8720 
8721   if (failure > success)
8722     {
8723       warning_at (loc, OPT_Winvalid_memory_model,
8724 		  "failure memory model cannot be stronger than success "
8725 		  "memory model for %<__atomic_compare_exchange%>");
8726       success = MEMMODEL_SEQ_CST;
8727     }
8728 
8729   if (is_mm_release (failure) || is_mm_acq_rel (failure))
8730     {
8731       warning_at (loc, OPT_Winvalid_memory_model,
8732 		  "invalid failure memory model for "
8733 		  "%<__atomic_compare_exchange%>");
8734       failure = MEMMODEL_SEQ_CST;
8735       success = MEMMODEL_SEQ_CST;
8736     }
8737 
8738   if (!flag_inline_atomics)
8739     {
8740       expand_ifn_atomic_compare_exchange_into_call (call, mode);
8741       return;
8742     }
8743 
8744   /* Expand the operands.  */
8745   mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
8746 
8747   expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
8748   desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
8749 
8750   is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
8751 
8752   boolret = NULL;
8753   oldval = NULL;
8754 
8755   if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
8756 				       is_weak, success, failure))
8757     {
8758       expand_ifn_atomic_compare_exchange_into_call (call, mode);
8759       return;
8760     }
8761 
8762   lhs = gimple_call_lhs (call);
8763   if (lhs)
8764     {
8765       rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
8766       if (GET_MODE (boolret) != mode)
8767 	boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
8768       write_complex_part (target, boolret, true);
8769       write_complex_part (target, oldval, false);
8770     }
8771 }
8772 
8773 /* Expand the __atomic_load intrinsic:
8774    	TYPE __atomic_load (TYPE *object, enum memmodel)
8775    EXP is the CALL_EXPR.
8776    TARGET is an optional place for us to store the results.  */
8777 
8778 static rtx
expand_builtin_atomic_load(machine_mode mode,tree exp,rtx target)8779 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
8780 {
8781   rtx mem;
8782   enum memmodel model;
8783 
8784   model = get_memmodel (CALL_EXPR_ARG (exp, 1));
8785   if (is_mm_release (model) || is_mm_acq_rel (model))
8786     {
8787       location_t loc
8788 	= expansion_point_location_if_in_system_header (input_location);
8789       warning_at (loc, OPT_Winvalid_memory_model,
8790 		  "invalid memory model for %<__atomic_load%>");
8791       model = MEMMODEL_SEQ_CST;
8792     }
8793 
8794   if (!flag_inline_atomics)
8795     return NULL_RTX;
8796 
8797   /* Expand the operand.  */
8798   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
8799 
8800   return expand_atomic_load (target, mem, model);
8801 }
8802 
8803 
8804 /* Expand the __atomic_store intrinsic:
8805    	void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
8806    EXP is the CALL_EXPR.
8807    TARGET is an optional place for us to store the results.  */
8808 
8809 static rtx
expand_builtin_atomic_store(machine_mode mode,tree exp)8810 expand_builtin_atomic_store (machine_mode mode, tree exp)
8811 {
8812   rtx mem, val;
8813   enum memmodel model;
8814 
8815   model = get_memmodel (CALL_EXPR_ARG (exp, 2));
8816   if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
8817 	|| is_mm_release (model)))
8818     {
8819       location_t loc
8820 	= expansion_point_location_if_in_system_header (input_location);
8821       warning_at (loc, OPT_Winvalid_memory_model,
8822 		  "invalid memory model for %<__atomic_store%>");
8823       model = MEMMODEL_SEQ_CST;
8824     }
8825 
8826   if (!flag_inline_atomics)
8827     return NULL_RTX;
8828 
8829   /* Expand the operands.  */
8830   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
8831   val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
8832 
8833   return expand_atomic_store (mem, val, model, false);
8834 }
8835 
8836 /* Expand the __atomic_fetch_XXX intrinsic:
8837    	TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
8838    EXP is the CALL_EXPR.
8839    TARGET is an optional place for us to store the results.
8840    CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
8841    FETCH_AFTER is true if returning the result of the operation.
8842    FETCH_AFTER is false if returning the value before the operation.
8843    IGNORE is true if the result is not used.
8844    EXT_CALL is the correct builtin for an external call if this cannot be
8845    resolved to an instruction sequence.  */
8846 
8847 static rtx
expand_builtin_atomic_fetch_op(machine_mode mode,tree exp,rtx target,enum rtx_code code,bool fetch_after,bool ignore,enum built_in_function ext_call)8848 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
8849 				enum rtx_code code, bool fetch_after,
8850 				bool ignore, enum built_in_function ext_call)
8851 {
8852   rtx val, mem, ret;
8853   enum memmodel model;
8854   tree fndecl;
8855   tree addr;
8856 
8857   model = get_memmodel (CALL_EXPR_ARG (exp, 2));
8858 
8859   /* Expand the operands.  */
8860   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
8861   val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
8862 
8863   /* Only try generating instructions if inlining is turned on.  */
8864   if (flag_inline_atomics)
8865     {
8866       ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
8867       if (ret)
8868 	return ret;
8869     }
8870 
8871   /* Return if a different routine isn't needed for the library call.  */
8872   if (ext_call == BUILT_IN_NONE)
8873     return NULL_RTX;
8874 
8875   /* Change the call to the specified function.  */
8876   fndecl = get_callee_fndecl (exp);
8877   addr = CALL_EXPR_FN (exp);
8878   STRIP_NOPS (addr);
8879 
8880   gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
8881   TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
8882 
8883   /* If we will emit code after the call, the call cannot be a tail call.
8884      If it is emitted as a tail call, a barrier is emitted after it, and
8885      then all trailing code is removed.  */
8886   if (!ignore)
8887     CALL_EXPR_TAILCALL (exp) = 0;
8888 
8889   /* Expand the call here so we can emit trailing code.  */
8890   ret = expand_call (exp, target, ignore);
8891 
8892   /* Replace the original function just in case it matters.  */
8893   TREE_OPERAND (addr, 0) = fndecl;
8894 
8895   /* Then issue the arithmetic correction to return the right result.  */
8896   if (!ignore)
8897     {
8898       if (code == NOT)
8899 	{
8900 	  ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
8901 				     OPTAB_LIB_WIDEN);
8902 	  ret = expand_simple_unop (mode, NOT, ret, target, true);
8903 	}
8904       else
8905 	ret = expand_simple_binop (mode, code, ret, val, target, true,
8906 				   OPTAB_LIB_WIDEN);
8907     }
8908   return ret;
8909 }
8910 
8911 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function.  */
8912 
8913 void
expand_ifn_atomic_bit_test_and(gcall * call)8914 expand_ifn_atomic_bit_test_and (gcall *call)
8915 {
8916   tree ptr = gimple_call_arg (call, 0);
8917   tree bit = gimple_call_arg (call, 1);
8918   tree flag = gimple_call_arg (call, 2);
8919   tree lhs = gimple_call_lhs (call);
8920   enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
8921   machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
8922   enum rtx_code code;
8923   optab optab;
8924   class expand_operand ops[5];
8925 
8926   gcc_assert (flag_inline_atomics);
8927 
8928   if (gimple_call_num_args (call) == 4)
8929     model = get_memmodel (gimple_call_arg (call, 3));
8930 
8931   rtx mem = get_builtin_sync_mem (ptr, mode);
8932   rtx val = expand_expr_force_mode (bit, mode);
8933 
8934   switch (gimple_call_internal_fn (call))
8935     {
8936     case IFN_ATOMIC_BIT_TEST_AND_SET:
8937       code = IOR;
8938       optab = atomic_bit_test_and_set_optab;
8939       break;
8940     case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
8941       code = XOR;
8942       optab = atomic_bit_test_and_complement_optab;
8943       break;
8944     case IFN_ATOMIC_BIT_TEST_AND_RESET:
8945       code = AND;
8946       optab = atomic_bit_test_and_reset_optab;
8947       break;
8948     default:
8949       gcc_unreachable ();
8950     }
8951 
8952   if (lhs == NULL_TREE)
8953     {
8954       val = expand_simple_binop (mode, ASHIFT, const1_rtx,
8955 				 val, NULL_RTX, true, OPTAB_DIRECT);
8956       if (code == AND)
8957 	val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
8958       expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
8959       return;
8960     }
8961 
8962   rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
8963   enum insn_code icode = direct_optab_handler (optab, mode);
8964   gcc_assert (icode != CODE_FOR_nothing);
8965   create_output_operand (&ops[0], target, mode);
8966   create_fixed_operand (&ops[1], mem);
8967   create_convert_operand_to (&ops[2], val, mode, true);
8968   create_integer_operand (&ops[3], model);
8969   create_integer_operand (&ops[4], integer_onep (flag));
8970   if (maybe_expand_insn (icode, 5, ops))
8971     return;
8972 
8973   rtx bitval = val;
8974   val = expand_simple_binop (mode, ASHIFT, const1_rtx,
8975 			     val, NULL_RTX, true, OPTAB_DIRECT);
8976   rtx maskval = val;
8977   if (code == AND)
8978     val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
8979   rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
8980 				       code, model, false);
8981   if (integer_onep (flag))
8982     {
8983       result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
8984 				    NULL_RTX, true, OPTAB_DIRECT);
8985       result = expand_simple_binop (mode, AND, result, const1_rtx, target,
8986 				    true, OPTAB_DIRECT);
8987     }
8988   else
8989     result = expand_simple_binop (mode, AND, result, maskval, target, true,
8990 				  OPTAB_DIRECT);
8991   if (result != target)
8992     emit_move_insn (target, result);
8993 }
8994 
8995 /* Expand an atomic clear operation.
8996 	void _atomic_clear (BOOL *obj, enum memmodel)
8997    EXP is the call expression.  */
8998 
8999 static rtx
expand_builtin_atomic_clear(tree exp)9000 expand_builtin_atomic_clear (tree exp)
9001 {
9002   machine_mode mode;
9003   rtx mem, ret;
9004   enum memmodel model;
9005 
9006   mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
9007   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
9008   model = get_memmodel (CALL_EXPR_ARG (exp, 1));
9009 
9010   if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
9011     {
9012       location_t loc
9013 	= expansion_point_location_if_in_system_header (input_location);
9014       warning_at (loc, OPT_Winvalid_memory_model,
9015 		  "invalid memory model for %<__atomic_store%>");
9016       model = MEMMODEL_SEQ_CST;
9017     }
9018 
9019   /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
9020      Failing that, a store is issued by __atomic_store.  The only way this can
9021      fail is if the bool type is larger than a word size.  Unlikely, but
9022      handle it anyway for completeness.  Assume a single threaded model since
9023      there is no atomic support in this case, and no barriers are required.  */
9024   ret = expand_atomic_store (mem, const0_rtx, model, true);
9025   if (!ret)
9026     emit_move_insn (mem, const0_rtx);
9027   return const0_rtx;
9028 }
9029 
9030 /* Expand an atomic test_and_set operation.
9031 	bool _atomic_test_and_set (BOOL *obj, enum memmodel)
9032    EXP is the call expression.  */
9033 
9034 static rtx
expand_builtin_atomic_test_and_set(tree exp,rtx target)9035 expand_builtin_atomic_test_and_set (tree exp, rtx target)
9036 {
9037   rtx mem;
9038   enum memmodel model;
9039   machine_mode mode;
9040 
9041   mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
9042   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
9043   model = get_memmodel (CALL_EXPR_ARG (exp, 1));
9044 
9045   return expand_atomic_test_and_set (target, mem, model);
9046 }
9047 
9048 
9049 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
9050    this architecture.  If ARG1 is NULL, use typical alignment for size ARG0.  */
9051 
9052 static tree
fold_builtin_atomic_always_lock_free(tree arg0,tree arg1)9053 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
9054 {
9055   int size;
9056   machine_mode mode;
9057   unsigned int mode_align, type_align;
9058 
9059   if (TREE_CODE (arg0) != INTEGER_CST)
9060     return NULL_TREE;
9061 
9062   /* We need a corresponding integer mode for the access to be lock-free.  */
9063   size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
9064   if (!int_mode_for_size (size, 0).exists (&mode))
9065     return boolean_false_node;
9066 
9067   mode_align = GET_MODE_ALIGNMENT (mode);
9068 
9069   if (TREE_CODE (arg1) == INTEGER_CST)
9070     {
9071       unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
9072 
9073       /* Either this argument is null, or it's a fake pointer encoding
9074          the alignment of the object.  */
9075       val = least_bit_hwi (val);
9076       val *= BITS_PER_UNIT;
9077 
9078       if (val == 0 || mode_align < val)
9079         type_align = mode_align;
9080       else
9081         type_align = val;
9082     }
9083   else
9084     {
9085       tree ttype = TREE_TYPE (arg1);
9086 
9087       /* This function is usually invoked and folded immediately by the front
9088 	 end before anything else has a chance to look at it.  The pointer
9089 	 parameter at this point is usually cast to a void *, so check for that
9090 	 and look past the cast.  */
9091       if (CONVERT_EXPR_P (arg1)
9092 	  && POINTER_TYPE_P (ttype)
9093 	  && VOID_TYPE_P (TREE_TYPE (ttype))
9094 	  && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
9095 	arg1 = TREE_OPERAND (arg1, 0);
9096 
9097       ttype = TREE_TYPE (arg1);
9098       gcc_assert (POINTER_TYPE_P (ttype));
9099 
9100       /* Get the underlying type of the object.  */
9101       ttype = TREE_TYPE (ttype);
9102       type_align = TYPE_ALIGN (ttype);
9103     }
9104 
9105   /* If the object has smaller alignment, the lock free routines cannot
9106      be used.  */
9107   if (type_align < mode_align)
9108     return boolean_false_node;
9109 
9110   /* Check if a compare_and_swap pattern exists for the mode which represents
9111      the required size.  The pattern is not allowed to fail, so the existence
9112      of the pattern indicates support is present.  Also require that an
9113      atomic load exists for the required size.  */
9114   if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
9115     return boolean_true_node;
9116   else
9117     return boolean_false_node;
9118 }
9119 
9120 /* Return true if the parameters to call EXP represent an object which will
9121    always generate lock free instructions.  The first argument represents the
9122    size of the object, and the second parameter is a pointer to the object
9123    itself.  If NULL is passed for the object, then the result is based on
9124    typical alignment for an object of the specified size.  Otherwise return
9125    false.  */
9126 
9127 static rtx
expand_builtin_atomic_always_lock_free(tree exp)9128 expand_builtin_atomic_always_lock_free (tree exp)
9129 {
9130   tree size;
9131   tree arg0 = CALL_EXPR_ARG (exp, 0);
9132   tree arg1 = CALL_EXPR_ARG (exp, 1);
9133 
9134   if (TREE_CODE (arg0) != INTEGER_CST)
9135     {
9136       error ("non-constant argument 1 to %qs", "__atomic_always_lock_free");
9137       return const0_rtx;
9138     }
9139 
9140   size = fold_builtin_atomic_always_lock_free (arg0, arg1);
9141   if (size == boolean_true_node)
9142     return const1_rtx;
9143   return const0_rtx;
9144 }
9145 
9146 /* Return a one or zero if it can be determined that object ARG1 of size ARG
9147    is lock free on this architecture.  */
9148 
9149 static tree
fold_builtin_atomic_is_lock_free(tree arg0,tree arg1)9150 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
9151 {
9152   if (!flag_inline_atomics)
9153     return NULL_TREE;
9154 
9155   /* If it isn't always lock free, don't generate a result.  */
9156   if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
9157     return boolean_true_node;
9158 
9159   return NULL_TREE;
9160 }
9161 
9162 /* Return true if the parameters to call EXP represent an object which will
9163    always generate lock free instructions.  The first argument represents the
9164    size of the object, and the second parameter is a pointer to the object
9165    itself.  If NULL is passed for the object, then the result is based on
9166    typical alignment for an object of the specified size.  Otherwise return
9167    NULL*/
9168 
9169 static rtx
expand_builtin_atomic_is_lock_free(tree exp)9170 expand_builtin_atomic_is_lock_free (tree exp)
9171 {
9172   tree size;
9173   tree arg0 = CALL_EXPR_ARG (exp, 0);
9174   tree arg1 = CALL_EXPR_ARG (exp, 1);
9175 
9176   if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9177     {
9178       error ("non-integer argument 1 to %qs", "__atomic_is_lock_free");
9179       return NULL_RTX;
9180     }
9181 
9182   if (!flag_inline_atomics)
9183     return NULL_RTX;
9184 
9185   /* If the value is known at compile time, return the RTX for it.  */
9186   size = fold_builtin_atomic_is_lock_free (arg0, arg1);
9187   if (size == boolean_true_node)
9188     return const1_rtx;
9189 
9190   return NULL_RTX;
9191 }
9192 
9193 /* Expand the __atomic_thread_fence intrinsic:
9194    	void __atomic_thread_fence (enum memmodel)
9195    EXP is the CALL_EXPR.  */
9196 
9197 static void
expand_builtin_atomic_thread_fence(tree exp)9198 expand_builtin_atomic_thread_fence (tree exp)
9199 {
9200   enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
9201   expand_mem_thread_fence (model);
9202 }
9203 
9204 /* Expand the __atomic_signal_fence intrinsic:
9205    	void __atomic_signal_fence (enum memmodel)
9206    EXP is the CALL_EXPR.  */
9207 
9208 static void
expand_builtin_atomic_signal_fence(tree exp)9209 expand_builtin_atomic_signal_fence (tree exp)
9210 {
9211   enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
9212   expand_mem_signal_fence (model);
9213 }
9214 
9215 /* Expand the __sync_synchronize intrinsic.  */
9216 
9217 static void
expand_builtin_sync_synchronize(void)9218 expand_builtin_sync_synchronize (void)
9219 {
9220   expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
9221 }
9222 
9223 static rtx
expand_builtin_thread_pointer(tree exp,rtx target)9224 expand_builtin_thread_pointer (tree exp, rtx target)
9225 {
9226   enum insn_code icode;
9227   if (!validate_arglist (exp, VOID_TYPE))
9228     return const0_rtx;
9229   icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
9230   if (icode != CODE_FOR_nothing)
9231     {
9232       class expand_operand op;
9233       /* If the target is not sutitable then create a new target. */
9234       if (target == NULL_RTX
9235 	  || !REG_P (target)
9236 	  || GET_MODE (target) != Pmode)
9237 	target = gen_reg_rtx (Pmode);
9238       create_output_operand (&op, target, Pmode);
9239       expand_insn (icode, 1, &op);
9240       return target;
9241     }
9242   error ("%<__builtin_thread_pointer%> is not supported on this target");
9243   return const0_rtx;
9244 }
9245 
9246 static void
expand_builtin_set_thread_pointer(tree exp)9247 expand_builtin_set_thread_pointer (tree exp)
9248 {
9249   enum insn_code icode;
9250   if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
9251     return;
9252   icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
9253   if (icode != CODE_FOR_nothing)
9254     {
9255       class expand_operand op;
9256       rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
9257 			     Pmode, EXPAND_NORMAL);
9258       create_input_operand (&op, val, Pmode);
9259       expand_insn (icode, 1, &op);
9260       return;
9261     }
9262   error ("%<__builtin_set_thread_pointer%> is not supported on this target");
9263 }
9264 
9265 
9266 /* Emit code to restore the current value of stack.  */
9267 
9268 static void
expand_stack_restore(tree var)9269 expand_stack_restore (tree var)
9270 {
9271   rtx_insn *prev;
9272   rtx sa = expand_normal (var);
9273 
9274   sa = convert_memory_address (Pmode, sa);
9275 
9276   prev = get_last_insn ();
9277   emit_stack_restore (SAVE_BLOCK, sa);
9278 
9279   record_new_stack_level ();
9280 
9281   fixup_args_size_notes (prev, get_last_insn (), 0);
9282 }
9283 
9284 /* Emit code to save the current value of stack.  */
9285 
9286 static rtx
expand_stack_save(void)9287 expand_stack_save (void)
9288 {
9289   rtx ret = NULL_RTX;
9290 
9291   emit_stack_save (SAVE_BLOCK, &ret);
9292   return ret;
9293 }
9294 
9295 /* Emit code to get the openacc gang, worker or vector id or size.  */
9296 
9297 static rtx
expand_builtin_goacc_parlevel_id_size(tree exp,rtx target,int ignore)9298 expand_builtin_goacc_parlevel_id_size (tree exp, rtx target, int ignore)
9299 {
9300   const char *name;
9301   rtx fallback_retval;
9302   rtx_insn *(*gen_fn) (rtx, rtx);
9303   switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp)))
9304     {
9305     case BUILT_IN_GOACC_PARLEVEL_ID:
9306       name = "__builtin_goacc_parlevel_id";
9307       fallback_retval = const0_rtx;
9308       gen_fn = targetm.gen_oacc_dim_pos;
9309       break;
9310     case BUILT_IN_GOACC_PARLEVEL_SIZE:
9311       name = "__builtin_goacc_parlevel_size";
9312       fallback_retval = const1_rtx;
9313       gen_fn = targetm.gen_oacc_dim_size;
9314       break;
9315     default:
9316       gcc_unreachable ();
9317     }
9318 
9319   if (oacc_get_fn_attrib (current_function_decl) == NULL_TREE)
9320     {
9321       error ("%qs only supported in OpenACC code", name);
9322       return const0_rtx;
9323     }
9324 
9325   tree arg = CALL_EXPR_ARG (exp, 0);
9326   if (TREE_CODE (arg) != INTEGER_CST)
9327     {
9328       error ("non-constant argument 0 to %qs", name);
9329       return const0_rtx;
9330     }
9331 
9332   int dim = TREE_INT_CST_LOW (arg);
9333   switch (dim)
9334     {
9335     case GOMP_DIM_GANG:
9336     case GOMP_DIM_WORKER:
9337     case GOMP_DIM_VECTOR:
9338       break;
9339     default:
9340       error ("illegal argument 0 to %qs", name);
9341       return const0_rtx;
9342     }
9343 
9344   if (ignore)
9345     return target;
9346 
9347   if (target == NULL_RTX)
9348     target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
9349 
9350   if (!targetm.have_oacc_dim_size ())
9351     {
9352       emit_move_insn (target, fallback_retval);
9353       return target;
9354     }
9355 
9356   rtx reg = MEM_P (target) ? gen_reg_rtx (GET_MODE (target)) : target;
9357   emit_insn (gen_fn (reg, GEN_INT (dim)));
9358   if (reg != target)
9359     emit_move_insn (target, reg);
9360 
9361   return target;
9362 }
9363 
9364 /* Expand a string compare operation using a sequence of char comparison
9365    to get rid of the calling overhead, with result going to TARGET if
9366    that's convenient.
9367 
9368    VAR_STR is the variable string source;
9369    CONST_STR is the constant string source;
9370    LENGTH is the number of chars to compare;
9371    CONST_STR_N indicates which source string is the constant string;
9372    IS_MEMCMP indicates whether it's a memcmp or strcmp.
9373 
9374    to: (assume const_str_n is 2, i.e., arg2 is a constant string)
9375 
9376    target = (int) (unsigned char) var_str[0]
9377 	    - (int) (unsigned char) const_str[0];
9378    if (target != 0)
9379      goto ne_label;
9380      ...
9381    target = (int) (unsigned char) var_str[length - 2]
9382 	    - (int) (unsigned char) const_str[length - 2];
9383    if (target != 0)
9384      goto ne_label;
9385    target = (int) (unsigned char) var_str[length - 1]
9386 	    - (int) (unsigned char) const_str[length - 1];
9387    ne_label:
9388   */
9389 
9390 static rtx
inline_string_cmp(rtx target,tree var_str,const char * const_str,unsigned HOST_WIDE_INT length,int const_str_n,machine_mode mode)9391 inline_string_cmp (rtx target, tree var_str, const char *const_str,
9392 		   unsigned HOST_WIDE_INT length,
9393 		   int const_str_n, machine_mode mode)
9394 {
9395   HOST_WIDE_INT offset = 0;
9396   rtx var_rtx_array
9397     = get_memory_rtx (var_str, build_int_cst (unsigned_type_node,length));
9398   rtx var_rtx = NULL_RTX;
9399   rtx const_rtx = NULL_RTX;
9400   rtx result = target ? target : gen_reg_rtx (mode);
9401   rtx_code_label *ne_label = gen_label_rtx ();
9402   tree unit_type_node = unsigned_char_type_node;
9403   scalar_int_mode unit_mode
9404     = as_a <scalar_int_mode> TYPE_MODE (unit_type_node);
9405 
9406   start_sequence ();
9407 
9408   for (unsigned HOST_WIDE_INT i = 0; i < length; i++)
9409     {
9410       var_rtx
9411 	= adjust_address (var_rtx_array, TYPE_MODE (unit_type_node), offset);
9412       const_rtx = c_readstr (const_str + offset, unit_mode);
9413       rtx op0 = (const_str_n == 1) ? const_rtx : var_rtx;
9414       rtx op1 = (const_str_n == 1) ? var_rtx : const_rtx;
9415 
9416       op0 = convert_modes (mode, unit_mode, op0, 1);
9417       op1 = convert_modes (mode, unit_mode, op1, 1);
9418       result = expand_simple_binop (mode, MINUS, op0, op1,
9419 				    result, 1, OPTAB_WIDEN);
9420       if (i < length - 1)
9421 	emit_cmp_and_jump_insns (result, CONST0_RTX (mode), NE, NULL_RTX,
9422 	    			 mode, true, ne_label);
9423       offset += GET_MODE_SIZE (unit_mode);
9424     }
9425 
9426   emit_label (ne_label);
9427   rtx_insn *insns = get_insns ();
9428   end_sequence ();
9429   emit_insn (insns);
9430 
9431   return result;
9432 }
9433 
9434 /* Inline expansion of a call to str(n)cmp and memcmp, with result going
9435    to TARGET if that's convenient.
9436    If the call is not been inlined, return NULL_RTX.  */
9437 
9438 static rtx
inline_expand_builtin_bytecmp(tree exp,rtx target)9439 inline_expand_builtin_bytecmp (tree exp, rtx target)
9440 {
9441   tree fndecl = get_callee_fndecl (exp);
9442   enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9443   bool is_ncmp = (fcode == BUILT_IN_STRNCMP || fcode == BUILT_IN_MEMCMP);
9444 
9445   /* Do NOT apply this inlining expansion when optimizing for size or
9446      optimization level below 2.  */
9447   if (optimize < 2 || optimize_insn_for_size_p ())
9448     return NULL_RTX;
9449 
9450   gcc_checking_assert (fcode == BUILT_IN_STRCMP
9451 		       || fcode == BUILT_IN_STRNCMP
9452 		       || fcode == BUILT_IN_MEMCMP);
9453 
9454   /* On a target where the type of the call (int) has same or narrower presicion
9455      than unsigned char, give up the inlining expansion.  */
9456   if (TYPE_PRECISION (unsigned_char_type_node)
9457       >= TYPE_PRECISION (TREE_TYPE (exp)))
9458     return NULL_RTX;
9459 
9460   tree arg1 = CALL_EXPR_ARG (exp, 0);
9461   tree arg2 = CALL_EXPR_ARG (exp, 1);
9462   tree len3_tree = is_ncmp ? CALL_EXPR_ARG (exp, 2) : NULL_TREE;
9463 
9464   unsigned HOST_WIDE_INT len1 = 0;
9465   unsigned HOST_WIDE_INT len2 = 0;
9466   unsigned HOST_WIDE_INT len3 = 0;
9467 
9468   /* Get the object representation of the initializers of ARG1 and ARG2
9469      as strings, provided they refer to constant objects, with their byte
9470      sizes in LEN1 and LEN2, respectively.  */
9471   const char *bytes1 = getbyterep (arg1, &len1);
9472   const char *bytes2 = getbyterep (arg2, &len2);
9473 
9474   /* Fail if neither argument refers to an initialized constant.  */
9475   if (!bytes1 && !bytes2)
9476     return NULL_RTX;
9477 
9478   if (is_ncmp)
9479     {
9480       /* Fail if the memcmp/strncmp bound is not a constant.  */
9481       if (!tree_fits_uhwi_p (len3_tree))
9482 	return NULL_RTX;
9483 
9484       len3 = tree_to_uhwi (len3_tree);
9485 
9486       if (fcode == BUILT_IN_MEMCMP)
9487 	{
9488 	  /* Fail if the memcmp bound is greater than the size of either
9489 	     of the two constant objects.  */
9490 	  if ((bytes1 && len1 < len3)
9491 	      || (bytes2 && len2 < len3))
9492 	    return NULL_RTX;
9493 	}
9494     }
9495 
9496   if (fcode != BUILT_IN_MEMCMP)
9497     {
9498       /* For string functions (i.e., strcmp and strncmp) reduce LEN1
9499 	 and LEN2 to the length of the nul-terminated string stored
9500 	 in each.  */
9501       if (bytes1 != NULL)
9502 	len1 = strnlen (bytes1, len1) + 1;
9503       if (bytes2 != NULL)
9504 	len2 = strnlen (bytes2, len2) + 1;
9505     }
9506 
9507   /* See inline_string_cmp.  */
9508   int const_str_n;
9509   if (!len1)
9510     const_str_n = 2;
9511   else if (!len2)
9512     const_str_n = 1;
9513   else if (len2 > len1)
9514     const_str_n = 1;
9515   else
9516     const_str_n = 2;
9517 
9518   /* For strncmp only, compute the new bound as the smallest of
9519      the lengths of the two strings (plus 1) and the bound provided
9520      to the function.  */
9521   unsigned HOST_WIDE_INT bound = (const_str_n == 1) ? len1 : len2;
9522   if (is_ncmp && len3 < bound)
9523     bound = len3;
9524 
9525   /* If the bound of the comparison is larger than the threshold,
9526      do nothing.  */
9527   if (bound > (unsigned HOST_WIDE_INT) param_builtin_string_cmp_inline_length)
9528     return NULL_RTX;
9529 
9530   machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9531 
9532   /* Now, start inline expansion the call.  */
9533   return inline_string_cmp (target, (const_str_n == 1) ? arg2 : arg1,
9534 			    (const_str_n == 1) ? bytes1 : bytes2, bound,
9535 			    const_str_n, mode);
9536 }
9537 
9538 /* Expand a call to __builtin_speculation_safe_value_<N>.  MODE
9539    represents the size of the first argument to that call, or VOIDmode
9540    if the argument is a pointer.  IGNORE will be true if the result
9541    isn't used.  */
9542 static rtx
expand_speculation_safe_value(machine_mode mode,tree exp,rtx target,bool ignore)9543 expand_speculation_safe_value (machine_mode mode, tree exp, rtx target,
9544 			       bool ignore)
9545 {
9546   rtx val, failsafe;
9547   unsigned nargs = call_expr_nargs (exp);
9548 
9549   tree arg0 = CALL_EXPR_ARG (exp, 0);
9550 
9551   if (mode == VOIDmode)
9552     {
9553       mode = TYPE_MODE (TREE_TYPE (arg0));
9554       gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
9555     }
9556 
9557   val = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
9558 
9559   /* An optional second argument can be used as a failsafe value on
9560      some machines.  If it isn't present, then the failsafe value is
9561      assumed to be 0.  */
9562   if (nargs > 1)
9563     {
9564       tree arg1 = CALL_EXPR_ARG (exp, 1);
9565       failsafe = expand_expr (arg1, NULL_RTX, mode, EXPAND_NORMAL);
9566     }
9567   else
9568     failsafe = const0_rtx;
9569 
9570   /* If the result isn't used, the behavior is undefined.  It would be
9571      nice to emit a warning here, but path splitting means this might
9572      happen with legitimate code.  So simply drop the builtin
9573      expansion in that case; we've handled any side-effects above.  */
9574   if (ignore)
9575     return const0_rtx;
9576 
9577   /* If we don't have a suitable target, create one to hold the result.  */
9578   if (target == NULL || GET_MODE (target) != mode)
9579     target = gen_reg_rtx (mode);
9580 
9581   if (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode)
9582     val = convert_modes (mode, VOIDmode, val, false);
9583 
9584   return targetm.speculation_safe_value (mode, target, val, failsafe);
9585 }
9586 
9587 /* Expand an expression EXP that calls a built-in function,
9588    with result going to TARGET if that's convenient
9589    (and in mode MODE if that's convenient).
9590    SUBTARGET may be used as the target for computing one of EXP's operands.
9591    IGNORE is nonzero if the value is to be ignored.  */
9592 
9593 rtx
expand_builtin(tree exp,rtx target,rtx subtarget,machine_mode mode,int ignore)9594 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
9595 		int ignore)
9596 {
9597   tree fndecl = get_callee_fndecl (exp);
9598   machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
9599   int flags;
9600 
9601   if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9602     return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
9603 
9604   /* When ASan is enabled, we don't want to expand some memory/string
9605      builtins and rely on libsanitizer's hooks.  This allows us to avoid
9606      redundant checks and be sure, that possible overflow will be detected
9607      by ASan.  */
9608 
9609   enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9610   if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
9611     return expand_call (exp, target, ignore);
9612 
9613   /* When not optimizing, generate calls to library functions for a certain
9614      set of builtins.  */
9615   if (!optimize
9616       && !called_as_built_in (fndecl)
9617       && fcode != BUILT_IN_FORK
9618       && fcode != BUILT_IN_EXECL
9619       && fcode != BUILT_IN_EXECV
9620       && fcode != BUILT_IN_EXECLP
9621       && fcode != BUILT_IN_EXECLE
9622       && fcode != BUILT_IN_EXECVP
9623       && fcode != BUILT_IN_EXECVE
9624       && fcode != BUILT_IN_CLEAR_CACHE
9625       && !ALLOCA_FUNCTION_CODE_P (fcode)
9626       && fcode != BUILT_IN_FREE)
9627     return expand_call (exp, target, ignore);
9628 
9629   /* The built-in function expanders test for target == const0_rtx
9630      to determine whether the function's result will be ignored.  */
9631   if (ignore)
9632     target = const0_rtx;
9633 
9634   /* If the result of a pure or const built-in function is ignored, and
9635      none of its arguments are volatile, we can avoid expanding the
9636      built-in call and just evaluate the arguments for side-effects.  */
9637   if (target == const0_rtx
9638       && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
9639       && !(flags & ECF_LOOPING_CONST_OR_PURE))
9640     {
9641       bool volatilep = false;
9642       tree arg;
9643       call_expr_arg_iterator iter;
9644 
9645       FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
9646 	if (TREE_THIS_VOLATILE (arg))
9647 	  {
9648 	    volatilep = true;
9649 	    break;
9650 	  }
9651 
9652       if (! volatilep)
9653 	{
9654 	  FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
9655 	    expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
9656 	  return const0_rtx;
9657 	}
9658     }
9659 
9660   switch (fcode)
9661     {
9662     CASE_FLT_FN (BUILT_IN_FABS):
9663     CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
9664     case BUILT_IN_FABSD32:
9665     case BUILT_IN_FABSD64:
9666     case BUILT_IN_FABSD128:
9667       target = expand_builtin_fabs (exp, target, subtarget);
9668       if (target)
9669 	return target;
9670       break;
9671 
9672     CASE_FLT_FN (BUILT_IN_COPYSIGN):
9673     CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
9674       target = expand_builtin_copysign (exp, target, subtarget);
9675       if (target)
9676 	return target;
9677       break;
9678 
9679       /* Just do a normal library call if we were unable to fold
9680 	 the values.  */
9681     CASE_FLT_FN (BUILT_IN_CABS):
9682       break;
9683 
9684     CASE_FLT_FN (BUILT_IN_FMA):
9685     CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
9686       target = expand_builtin_mathfn_ternary (exp, target, subtarget);
9687       if (target)
9688 	return target;
9689       break;
9690 
9691     CASE_FLT_FN (BUILT_IN_ILOGB):
9692       if (! flag_unsafe_math_optimizations)
9693 	break;
9694       gcc_fallthrough ();
9695     CASE_FLT_FN (BUILT_IN_ISINF):
9696     CASE_FLT_FN (BUILT_IN_FINITE):
9697     case BUILT_IN_ISFINITE:
9698     case BUILT_IN_ISNORMAL:
9699       target = expand_builtin_interclass_mathfn (exp, target);
9700       if (target)
9701 	return target;
9702       break;
9703 
9704     CASE_FLT_FN (BUILT_IN_ICEIL):
9705     CASE_FLT_FN (BUILT_IN_LCEIL):
9706     CASE_FLT_FN (BUILT_IN_LLCEIL):
9707     CASE_FLT_FN (BUILT_IN_LFLOOR):
9708     CASE_FLT_FN (BUILT_IN_IFLOOR):
9709     CASE_FLT_FN (BUILT_IN_LLFLOOR):
9710       target = expand_builtin_int_roundingfn (exp, target);
9711       if (target)
9712 	return target;
9713       break;
9714 
9715     CASE_FLT_FN (BUILT_IN_IRINT):
9716     CASE_FLT_FN (BUILT_IN_LRINT):
9717     CASE_FLT_FN (BUILT_IN_LLRINT):
9718     CASE_FLT_FN (BUILT_IN_IROUND):
9719     CASE_FLT_FN (BUILT_IN_LROUND):
9720     CASE_FLT_FN (BUILT_IN_LLROUND):
9721       target = expand_builtin_int_roundingfn_2 (exp, target);
9722       if (target)
9723 	return target;
9724       break;
9725 
9726     CASE_FLT_FN (BUILT_IN_POWI):
9727       target = expand_builtin_powi (exp, target);
9728       if (target)
9729 	return target;
9730       break;
9731 
9732     CASE_FLT_FN (BUILT_IN_CEXPI):
9733       target = expand_builtin_cexpi (exp, target);
9734       gcc_assert (target);
9735       return target;
9736 
9737     CASE_FLT_FN (BUILT_IN_SIN):
9738     CASE_FLT_FN (BUILT_IN_COS):
9739       if (! flag_unsafe_math_optimizations)
9740 	break;
9741       target = expand_builtin_mathfn_3 (exp, target, subtarget);
9742       if (target)
9743 	return target;
9744       break;
9745 
9746     CASE_FLT_FN (BUILT_IN_SINCOS):
9747       if (! flag_unsafe_math_optimizations)
9748 	break;
9749       target = expand_builtin_sincos (exp);
9750       if (target)
9751 	return target;
9752       break;
9753 
9754     case BUILT_IN_APPLY_ARGS:
9755       return expand_builtin_apply_args ();
9756 
9757       /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
9758 	 FUNCTION with a copy of the parameters described by
9759 	 ARGUMENTS, and ARGSIZE.  It returns a block of memory
9760 	 allocated on the stack into which is stored all the registers
9761 	 that might possibly be used for returning the result of a
9762 	 function.  ARGUMENTS is the value returned by
9763 	 __builtin_apply_args.  ARGSIZE is the number of bytes of
9764 	 arguments that must be copied.  ??? How should this value be
9765 	 computed?  We'll also need a safe worst case value for varargs
9766 	 functions.  */
9767     case BUILT_IN_APPLY:
9768       if (!validate_arglist (exp, POINTER_TYPE,
9769 			     POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
9770 	  && !validate_arglist (exp, REFERENCE_TYPE,
9771 				POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
9772 	return const0_rtx;
9773       else
9774 	{
9775 	  rtx ops[3];
9776 
9777 	  ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
9778 	  ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
9779 	  ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
9780 
9781 	  return expand_builtin_apply (ops[0], ops[1], ops[2]);
9782 	}
9783 
9784       /* __builtin_return (RESULT) causes the function to return the
9785 	 value described by RESULT.  RESULT is address of the block of
9786 	 memory returned by __builtin_apply.  */
9787     case BUILT_IN_RETURN:
9788       if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
9789 	expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
9790       return const0_rtx;
9791 
9792     case BUILT_IN_SAVEREGS:
9793       return expand_builtin_saveregs ();
9794 
9795     case BUILT_IN_VA_ARG_PACK:
9796       /* All valid uses of __builtin_va_arg_pack () are removed during
9797 	 inlining.  */
9798       error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
9799       return const0_rtx;
9800 
9801     case BUILT_IN_VA_ARG_PACK_LEN:
9802       /* All valid uses of __builtin_va_arg_pack_len () are removed during
9803 	 inlining.  */
9804       error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
9805       return const0_rtx;
9806 
9807       /* Return the address of the first anonymous stack arg.  */
9808     case BUILT_IN_NEXT_ARG:
9809       if (fold_builtin_next_arg (exp, false))
9810 	return const0_rtx;
9811       return expand_builtin_next_arg ();
9812 
9813     case BUILT_IN_CLEAR_CACHE:
9814       expand_builtin___clear_cache (exp);
9815       return const0_rtx;
9816 
9817     case BUILT_IN_CLASSIFY_TYPE:
9818       return expand_builtin_classify_type (exp);
9819 
9820     case BUILT_IN_CONSTANT_P:
9821       return const0_rtx;
9822 
9823     case BUILT_IN_FRAME_ADDRESS:
9824     case BUILT_IN_RETURN_ADDRESS:
9825       return expand_builtin_frame_address (fndecl, exp);
9826 
9827     /* Returns the address of the area where the structure is returned.
9828        0 otherwise.  */
9829     case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
9830       if (call_expr_nargs (exp) != 0
9831 	  || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
9832 	  || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
9833 	return const0_rtx;
9834       else
9835 	return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
9836 
9837     CASE_BUILT_IN_ALLOCA:
9838       target = expand_builtin_alloca (exp);
9839       if (target)
9840 	return target;
9841       break;
9842 
9843     case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
9844       return expand_asan_emit_allocas_unpoison (exp);
9845 
9846     case BUILT_IN_STACK_SAVE:
9847       return expand_stack_save ();
9848 
9849     case BUILT_IN_STACK_RESTORE:
9850       expand_stack_restore (CALL_EXPR_ARG (exp, 0));
9851       return const0_rtx;
9852 
9853     case BUILT_IN_BSWAP16:
9854     case BUILT_IN_BSWAP32:
9855     case BUILT_IN_BSWAP64:
9856     case BUILT_IN_BSWAP128:
9857       target = expand_builtin_bswap (target_mode, exp, target, subtarget);
9858       if (target)
9859 	return target;
9860       break;
9861 
9862     CASE_INT_FN (BUILT_IN_FFS):
9863       target = expand_builtin_unop (target_mode, exp, target,
9864 				    subtarget, ffs_optab);
9865       if (target)
9866 	return target;
9867       break;
9868 
9869     CASE_INT_FN (BUILT_IN_CLZ):
9870       target = expand_builtin_unop (target_mode, exp, target,
9871 				    subtarget, clz_optab);
9872       if (target)
9873 	return target;
9874       break;
9875 
9876     CASE_INT_FN (BUILT_IN_CTZ):
9877       target = expand_builtin_unop (target_mode, exp, target,
9878 				    subtarget, ctz_optab);
9879       if (target)
9880 	return target;
9881       break;
9882 
9883     CASE_INT_FN (BUILT_IN_CLRSB):
9884       target = expand_builtin_unop (target_mode, exp, target,
9885 				    subtarget, clrsb_optab);
9886       if (target)
9887 	return target;
9888       break;
9889 
9890     CASE_INT_FN (BUILT_IN_POPCOUNT):
9891       target = expand_builtin_unop (target_mode, exp, target,
9892 				    subtarget, popcount_optab);
9893       if (target)
9894 	return target;
9895       break;
9896 
9897     CASE_INT_FN (BUILT_IN_PARITY):
9898       target = expand_builtin_unop (target_mode, exp, target,
9899 				    subtarget, parity_optab);
9900       if (target)
9901 	return target;
9902       break;
9903 
9904     case BUILT_IN_STRLEN:
9905       target = expand_builtin_strlen (exp, target, target_mode);
9906       if (target)
9907 	return target;
9908       break;
9909 
9910     case BUILT_IN_STRNLEN:
9911       target = expand_builtin_strnlen (exp, target, target_mode);
9912       if (target)
9913 	return target;
9914       break;
9915 
9916     case BUILT_IN_STRCAT:
9917       target = expand_builtin_strcat (exp);
9918       if (target)
9919 	return target;
9920       break;
9921 
9922     case BUILT_IN_GETTEXT:
9923     case BUILT_IN_PUTS:
9924     case BUILT_IN_PUTS_UNLOCKED:
9925     case BUILT_IN_STRDUP:
9926       if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
9927 	check_read_access (exp, CALL_EXPR_ARG (exp, 0));
9928       break;
9929 
9930     case BUILT_IN_INDEX:
9931     case BUILT_IN_RINDEX:
9932     case BUILT_IN_STRCHR:
9933     case BUILT_IN_STRRCHR:
9934       if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
9935 	check_read_access (exp, CALL_EXPR_ARG (exp, 0));
9936       break;
9937 
9938     case BUILT_IN_FPUTS:
9939     case BUILT_IN_FPUTS_UNLOCKED:
9940       if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
9941 	check_read_access (exp, CALL_EXPR_ARG (exp, 0));
9942       break;
9943 
9944     case BUILT_IN_STRNDUP:
9945       if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
9946 	check_read_access (exp, CALL_EXPR_ARG (exp, 0), CALL_EXPR_ARG (exp, 1));
9947       break;
9948 
9949     case BUILT_IN_STRCASECMP:
9950     case BUILT_IN_STRPBRK:
9951     case BUILT_IN_STRSPN:
9952     case BUILT_IN_STRCSPN:
9953     case BUILT_IN_STRSTR:
9954       if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
9955 	{
9956 	  check_read_access (exp, CALL_EXPR_ARG (exp, 0));
9957 	  check_read_access (exp, CALL_EXPR_ARG (exp, 1));
9958 	}
9959       break;
9960 
9961     case BUILT_IN_STRCPY:
9962       target = expand_builtin_strcpy (exp, target);
9963       if (target)
9964 	return target;
9965       break;
9966 
9967     case BUILT_IN_STRNCAT:
9968       target = expand_builtin_strncat (exp, target);
9969       if (target)
9970 	return target;
9971       break;
9972 
9973     case BUILT_IN_STRNCPY:
9974       target = expand_builtin_strncpy (exp, target);
9975       if (target)
9976 	return target;
9977       break;
9978 
9979     case BUILT_IN_STPCPY:
9980       target = expand_builtin_stpcpy (exp, target, mode);
9981       if (target)
9982 	return target;
9983       break;
9984 
9985     case BUILT_IN_STPNCPY:
9986       target = expand_builtin_stpncpy (exp, target);
9987       if (target)
9988 	return target;
9989       break;
9990 
9991     case BUILT_IN_MEMCHR:
9992       target = expand_builtin_memchr (exp, target);
9993       if (target)
9994 	return target;
9995       break;
9996 
9997     case BUILT_IN_MEMCPY:
9998       target = expand_builtin_memcpy (exp, target);
9999       if (target)
10000 	return target;
10001       break;
10002 
10003     case BUILT_IN_MEMMOVE:
10004       target = expand_builtin_memmove (exp, target);
10005       if (target)
10006 	return target;
10007       break;
10008 
10009     case BUILT_IN_MEMPCPY:
10010       target = expand_builtin_mempcpy (exp, target);
10011       if (target)
10012 	return target;
10013       break;
10014 
10015     case BUILT_IN_MEMSET:
10016       target = expand_builtin_memset (exp, target, mode);
10017       if (target)
10018 	return target;
10019       break;
10020 
10021     case BUILT_IN_BZERO:
10022       target = expand_builtin_bzero (exp);
10023       if (target)
10024 	return target;
10025       break;
10026 
10027     /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
10028        back to a BUILT_IN_STRCMP. Remember to delete the 3rd paramater
10029        when changing it to a strcmp call.  */
10030     case BUILT_IN_STRCMP_EQ:
10031       target = expand_builtin_memcmp (exp, target, true);
10032       if (target)
10033 	return target;
10034 
10035       /* Change this call back to a BUILT_IN_STRCMP.  */
10036       TREE_OPERAND (exp, 1)
10037 	= build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP));
10038 
10039       /* Delete the last parameter.  */
10040       unsigned int i;
10041       vec<tree, va_gc> *arg_vec;
10042       vec_alloc (arg_vec, 2);
10043       for (i = 0; i < 2; i++)
10044 	arg_vec->quick_push (CALL_EXPR_ARG (exp, i));
10045       exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), arg_vec);
10046       /* FALLTHROUGH */
10047 
10048     case BUILT_IN_STRCMP:
10049       target = expand_builtin_strcmp (exp, target);
10050       if (target)
10051 	return target;
10052       break;
10053 
10054     /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
10055        back to a BUILT_IN_STRNCMP.  */
10056     case BUILT_IN_STRNCMP_EQ:
10057       target = expand_builtin_memcmp (exp, target, true);
10058       if (target)
10059 	return target;
10060 
10061       /* Change it back to a BUILT_IN_STRNCMP.  */
10062       TREE_OPERAND (exp, 1)
10063 	= build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP));
10064       /* FALLTHROUGH */
10065 
10066     case BUILT_IN_STRNCMP:
10067       target = expand_builtin_strncmp (exp, target, mode);
10068       if (target)
10069 	return target;
10070       break;
10071 
10072     case BUILT_IN_BCMP:
10073     case BUILT_IN_MEMCMP:
10074     case BUILT_IN_MEMCMP_EQ:
10075       target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
10076       if (target)
10077 	return target;
10078       if (fcode == BUILT_IN_MEMCMP_EQ)
10079 	{
10080 	  tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
10081 	  TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
10082 	}
10083       break;
10084 
10085     case BUILT_IN_SETJMP:
10086       /* This should have been lowered to the builtins below.  */
10087       gcc_unreachable ();
10088 
10089     case BUILT_IN_SETJMP_SETUP:
10090       /* __builtin_setjmp_setup is passed a pointer to an array of five words
10091           and the receiver label.  */
10092       if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
10093 	{
10094 	  rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
10095 				      VOIDmode, EXPAND_NORMAL);
10096 	  tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
10097 	  rtx_insn *label_r = label_rtx (label);
10098 
10099 	  /* This is copied from the handling of non-local gotos.  */
10100 	  expand_builtin_setjmp_setup (buf_addr, label_r);
10101 	  nonlocal_goto_handler_labels
10102 	    = gen_rtx_INSN_LIST (VOIDmode, label_r,
10103 				 nonlocal_goto_handler_labels);
10104 	  /* ??? Do not let expand_label treat us as such since we would
10105 	     not want to be both on the list of non-local labels and on
10106 	     the list of forced labels.  */
10107 	  FORCED_LABEL (label) = 0;
10108 	  return const0_rtx;
10109 	}
10110       break;
10111 
10112     case BUILT_IN_SETJMP_RECEIVER:
10113        /* __builtin_setjmp_receiver is passed the receiver label.  */
10114       if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
10115 	{
10116 	  tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
10117 	  rtx_insn *label_r = label_rtx (label);
10118 
10119 	  expand_builtin_setjmp_receiver (label_r);
10120 	  return const0_rtx;
10121 	}
10122       break;
10123 
10124       /* __builtin_longjmp is passed a pointer to an array of five words.
10125 	 It's similar to the C library longjmp function but works with
10126 	 __builtin_setjmp above.  */
10127     case BUILT_IN_LONGJMP:
10128       if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
10129 	{
10130 	  rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
10131 				      VOIDmode, EXPAND_NORMAL);
10132 	  rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
10133 
10134 	  if (value != const1_rtx)
10135 	    {
10136 	      error ("%<__builtin_longjmp%> second argument must be 1");
10137 	      return const0_rtx;
10138 	    }
10139 
10140 	  expand_builtin_longjmp (buf_addr, value);
10141 	  return const0_rtx;
10142 	}
10143       break;
10144 
10145     case BUILT_IN_NONLOCAL_GOTO:
10146       target = expand_builtin_nonlocal_goto (exp);
10147       if (target)
10148 	return target;
10149       break;
10150 
10151       /* This updates the setjmp buffer that is its argument with the value
10152 	 of the current stack pointer.  */
10153     case BUILT_IN_UPDATE_SETJMP_BUF:
10154       if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
10155 	{
10156 	  rtx buf_addr
10157 	    = expand_normal (CALL_EXPR_ARG (exp, 0));
10158 
10159 	  expand_builtin_update_setjmp_buf (buf_addr);
10160 	  return const0_rtx;
10161 	}
10162       break;
10163 
10164     case BUILT_IN_TRAP:
10165       expand_builtin_trap ();
10166       return const0_rtx;
10167 
10168     case BUILT_IN_UNREACHABLE:
10169       expand_builtin_unreachable ();
10170       return const0_rtx;
10171 
10172     CASE_FLT_FN (BUILT_IN_SIGNBIT):
10173     case BUILT_IN_SIGNBITD32:
10174     case BUILT_IN_SIGNBITD64:
10175     case BUILT_IN_SIGNBITD128:
10176       target = expand_builtin_signbit (exp, target);
10177       if (target)
10178 	return target;
10179       break;
10180 
10181       /* Various hooks for the DWARF 2 __throw routine.  */
10182     case BUILT_IN_UNWIND_INIT:
10183       expand_builtin_unwind_init ();
10184       return const0_rtx;
10185     case BUILT_IN_DWARF_CFA:
10186       return virtual_cfa_rtx;
10187 #ifdef DWARF2_UNWIND_INFO
10188     case BUILT_IN_DWARF_SP_COLUMN:
10189       return expand_builtin_dwarf_sp_column ();
10190     case BUILT_IN_INIT_DWARF_REG_SIZES:
10191       expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
10192       return const0_rtx;
10193 #endif
10194     case BUILT_IN_FROB_RETURN_ADDR:
10195       return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
10196     case BUILT_IN_EXTRACT_RETURN_ADDR:
10197       return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
10198     case BUILT_IN_EH_RETURN:
10199       expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
10200 				CALL_EXPR_ARG (exp, 1));
10201       return const0_rtx;
10202     case BUILT_IN_EH_RETURN_DATA_REGNO:
10203       return expand_builtin_eh_return_data_regno (exp);
10204     case BUILT_IN_EXTEND_POINTER:
10205       return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
10206     case BUILT_IN_EH_POINTER:
10207       return expand_builtin_eh_pointer (exp);
10208     case BUILT_IN_EH_FILTER:
10209       return expand_builtin_eh_filter (exp);
10210     case BUILT_IN_EH_COPY_VALUES:
10211       return expand_builtin_eh_copy_values (exp);
10212 
10213     case BUILT_IN_VA_START:
10214       return expand_builtin_va_start (exp);
10215     case BUILT_IN_VA_END:
10216       return expand_builtin_va_end (exp);
10217     case BUILT_IN_VA_COPY:
10218       return expand_builtin_va_copy (exp);
10219     case BUILT_IN_EXPECT:
10220       return expand_builtin_expect (exp, target);
10221     case BUILT_IN_EXPECT_WITH_PROBABILITY:
10222       return expand_builtin_expect_with_probability (exp, target);
10223     case BUILT_IN_ASSUME_ALIGNED:
10224       return expand_builtin_assume_aligned (exp, target);
10225     case BUILT_IN_PREFETCH:
10226       expand_builtin_prefetch (exp);
10227       return const0_rtx;
10228 
10229     case BUILT_IN_INIT_TRAMPOLINE:
10230       return expand_builtin_init_trampoline (exp, true);
10231     case BUILT_IN_INIT_HEAP_TRAMPOLINE:
10232       return expand_builtin_init_trampoline (exp, false);
10233     case BUILT_IN_ADJUST_TRAMPOLINE:
10234       return expand_builtin_adjust_trampoline (exp);
10235 
10236     case BUILT_IN_INIT_DESCRIPTOR:
10237       return expand_builtin_init_descriptor (exp);
10238     case BUILT_IN_ADJUST_DESCRIPTOR:
10239       return expand_builtin_adjust_descriptor (exp);
10240 
10241     case BUILT_IN_FORK:
10242     case BUILT_IN_EXECL:
10243     case BUILT_IN_EXECV:
10244     case BUILT_IN_EXECLP:
10245     case BUILT_IN_EXECLE:
10246     case BUILT_IN_EXECVP:
10247     case BUILT_IN_EXECVE:
10248       target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
10249       if (target)
10250 	return target;
10251       break;
10252 
10253     case BUILT_IN_SYNC_FETCH_AND_ADD_1:
10254     case BUILT_IN_SYNC_FETCH_AND_ADD_2:
10255     case BUILT_IN_SYNC_FETCH_AND_ADD_4:
10256     case BUILT_IN_SYNC_FETCH_AND_ADD_8:
10257     case BUILT_IN_SYNC_FETCH_AND_ADD_16:
10258       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
10259       target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
10260       if (target)
10261 	return target;
10262       break;
10263 
10264     case BUILT_IN_SYNC_FETCH_AND_SUB_1:
10265     case BUILT_IN_SYNC_FETCH_AND_SUB_2:
10266     case BUILT_IN_SYNC_FETCH_AND_SUB_4:
10267     case BUILT_IN_SYNC_FETCH_AND_SUB_8:
10268     case BUILT_IN_SYNC_FETCH_AND_SUB_16:
10269       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
10270       target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
10271       if (target)
10272 	return target;
10273       break;
10274 
10275     case BUILT_IN_SYNC_FETCH_AND_OR_1:
10276     case BUILT_IN_SYNC_FETCH_AND_OR_2:
10277     case BUILT_IN_SYNC_FETCH_AND_OR_4:
10278     case BUILT_IN_SYNC_FETCH_AND_OR_8:
10279     case BUILT_IN_SYNC_FETCH_AND_OR_16:
10280       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
10281       target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
10282       if (target)
10283 	return target;
10284       break;
10285 
10286     case BUILT_IN_SYNC_FETCH_AND_AND_1:
10287     case BUILT_IN_SYNC_FETCH_AND_AND_2:
10288     case BUILT_IN_SYNC_FETCH_AND_AND_4:
10289     case BUILT_IN_SYNC_FETCH_AND_AND_8:
10290     case BUILT_IN_SYNC_FETCH_AND_AND_16:
10291       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
10292       target = expand_builtin_sync_operation (mode, exp, AND, false, target);
10293       if (target)
10294 	return target;
10295       break;
10296 
10297     case BUILT_IN_SYNC_FETCH_AND_XOR_1:
10298     case BUILT_IN_SYNC_FETCH_AND_XOR_2:
10299     case BUILT_IN_SYNC_FETCH_AND_XOR_4:
10300     case BUILT_IN_SYNC_FETCH_AND_XOR_8:
10301     case BUILT_IN_SYNC_FETCH_AND_XOR_16:
10302       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
10303       target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
10304       if (target)
10305 	return target;
10306       break;
10307 
10308     case BUILT_IN_SYNC_FETCH_AND_NAND_1:
10309     case BUILT_IN_SYNC_FETCH_AND_NAND_2:
10310     case BUILT_IN_SYNC_FETCH_AND_NAND_4:
10311     case BUILT_IN_SYNC_FETCH_AND_NAND_8:
10312     case BUILT_IN_SYNC_FETCH_AND_NAND_16:
10313       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
10314       target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
10315       if (target)
10316 	return target;
10317       break;
10318 
10319     case BUILT_IN_SYNC_ADD_AND_FETCH_1:
10320     case BUILT_IN_SYNC_ADD_AND_FETCH_2:
10321     case BUILT_IN_SYNC_ADD_AND_FETCH_4:
10322     case BUILT_IN_SYNC_ADD_AND_FETCH_8:
10323     case BUILT_IN_SYNC_ADD_AND_FETCH_16:
10324       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
10325       target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
10326       if (target)
10327 	return target;
10328       break;
10329 
10330     case BUILT_IN_SYNC_SUB_AND_FETCH_1:
10331     case BUILT_IN_SYNC_SUB_AND_FETCH_2:
10332     case BUILT_IN_SYNC_SUB_AND_FETCH_4:
10333     case BUILT_IN_SYNC_SUB_AND_FETCH_8:
10334     case BUILT_IN_SYNC_SUB_AND_FETCH_16:
10335       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
10336       target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
10337       if (target)
10338 	return target;
10339       break;
10340 
10341     case BUILT_IN_SYNC_OR_AND_FETCH_1:
10342     case BUILT_IN_SYNC_OR_AND_FETCH_2:
10343     case BUILT_IN_SYNC_OR_AND_FETCH_4:
10344     case BUILT_IN_SYNC_OR_AND_FETCH_8:
10345     case BUILT_IN_SYNC_OR_AND_FETCH_16:
10346       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
10347       target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
10348       if (target)
10349 	return target;
10350       break;
10351 
10352     case BUILT_IN_SYNC_AND_AND_FETCH_1:
10353     case BUILT_IN_SYNC_AND_AND_FETCH_2:
10354     case BUILT_IN_SYNC_AND_AND_FETCH_4:
10355     case BUILT_IN_SYNC_AND_AND_FETCH_8:
10356     case BUILT_IN_SYNC_AND_AND_FETCH_16:
10357       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
10358       target = expand_builtin_sync_operation (mode, exp, AND, true, target);
10359       if (target)
10360 	return target;
10361       break;
10362 
10363     case BUILT_IN_SYNC_XOR_AND_FETCH_1:
10364     case BUILT_IN_SYNC_XOR_AND_FETCH_2:
10365     case BUILT_IN_SYNC_XOR_AND_FETCH_4:
10366     case BUILT_IN_SYNC_XOR_AND_FETCH_8:
10367     case BUILT_IN_SYNC_XOR_AND_FETCH_16:
10368       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
10369       target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
10370       if (target)
10371 	return target;
10372       break;
10373 
10374     case BUILT_IN_SYNC_NAND_AND_FETCH_1:
10375     case BUILT_IN_SYNC_NAND_AND_FETCH_2:
10376     case BUILT_IN_SYNC_NAND_AND_FETCH_4:
10377     case BUILT_IN_SYNC_NAND_AND_FETCH_8:
10378     case BUILT_IN_SYNC_NAND_AND_FETCH_16:
10379       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
10380       target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
10381       if (target)
10382 	return target;
10383       break;
10384 
10385     case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
10386     case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
10387     case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
10388     case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
10389     case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
10390       if (mode == VOIDmode)
10391 	mode = TYPE_MODE (boolean_type_node);
10392       if (!target || !register_operand (target, mode))
10393 	target = gen_reg_rtx (mode);
10394 
10395       mode = get_builtin_sync_mode
10396 				(fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
10397       target = expand_builtin_compare_and_swap (mode, exp, true, target);
10398       if (target)
10399 	return target;
10400       break;
10401 
10402     case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
10403     case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
10404     case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
10405     case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
10406     case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
10407       mode = get_builtin_sync_mode
10408 				(fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
10409       target = expand_builtin_compare_and_swap (mode, exp, false, target);
10410       if (target)
10411 	return target;
10412       break;
10413 
10414     case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
10415     case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
10416     case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
10417     case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
10418     case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
10419       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
10420       target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
10421       if (target)
10422 	return target;
10423       break;
10424 
10425     case BUILT_IN_SYNC_LOCK_RELEASE_1:
10426     case BUILT_IN_SYNC_LOCK_RELEASE_2:
10427     case BUILT_IN_SYNC_LOCK_RELEASE_4:
10428     case BUILT_IN_SYNC_LOCK_RELEASE_8:
10429     case BUILT_IN_SYNC_LOCK_RELEASE_16:
10430       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
10431       expand_builtin_sync_lock_release (mode, exp);
10432       return const0_rtx;
10433 
10434     case BUILT_IN_SYNC_SYNCHRONIZE:
10435       expand_builtin_sync_synchronize ();
10436       return const0_rtx;
10437 
10438     case BUILT_IN_ATOMIC_EXCHANGE_1:
10439     case BUILT_IN_ATOMIC_EXCHANGE_2:
10440     case BUILT_IN_ATOMIC_EXCHANGE_4:
10441     case BUILT_IN_ATOMIC_EXCHANGE_8:
10442     case BUILT_IN_ATOMIC_EXCHANGE_16:
10443       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
10444       target = expand_builtin_atomic_exchange (mode, exp, target);
10445       if (target)
10446 	return target;
10447       break;
10448 
10449     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
10450     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
10451     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
10452     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
10453     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
10454       {
10455 	unsigned int nargs, z;
10456 	vec<tree, va_gc> *vec;
10457 
10458 	mode =
10459 	    get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
10460 	target = expand_builtin_atomic_compare_exchange (mode, exp, target);
10461 	if (target)
10462 	  return target;
10463 
10464 	/* If this is turned into an external library call, the weak parameter
10465 	   must be dropped to match the expected parameter list.  */
10466 	nargs = call_expr_nargs (exp);
10467 	vec_alloc (vec, nargs - 1);
10468 	for (z = 0; z < 3; z++)
10469 	  vec->quick_push (CALL_EXPR_ARG (exp, z));
10470 	/* Skip the boolean weak parameter.  */
10471 	for (z = 4; z < 6; z++)
10472 	  vec->quick_push (CALL_EXPR_ARG (exp, z));
10473 	exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
10474 	break;
10475       }
10476 
10477     case BUILT_IN_ATOMIC_LOAD_1:
10478     case BUILT_IN_ATOMIC_LOAD_2:
10479     case BUILT_IN_ATOMIC_LOAD_4:
10480     case BUILT_IN_ATOMIC_LOAD_8:
10481     case BUILT_IN_ATOMIC_LOAD_16:
10482       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
10483       target = expand_builtin_atomic_load (mode, exp, target);
10484       if (target)
10485 	return target;
10486       break;
10487 
10488     case BUILT_IN_ATOMIC_STORE_1:
10489     case BUILT_IN_ATOMIC_STORE_2:
10490     case BUILT_IN_ATOMIC_STORE_4:
10491     case BUILT_IN_ATOMIC_STORE_8:
10492     case BUILT_IN_ATOMIC_STORE_16:
10493       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
10494       target = expand_builtin_atomic_store (mode, exp);
10495       if (target)
10496 	return const0_rtx;
10497       break;
10498 
10499     case BUILT_IN_ATOMIC_ADD_FETCH_1:
10500     case BUILT_IN_ATOMIC_ADD_FETCH_2:
10501     case BUILT_IN_ATOMIC_ADD_FETCH_4:
10502     case BUILT_IN_ATOMIC_ADD_FETCH_8:
10503     case BUILT_IN_ATOMIC_ADD_FETCH_16:
10504       {
10505 	enum built_in_function lib;
10506 	mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
10507 	lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
10508 				       (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
10509 	target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
10510 						 ignore, lib);
10511 	if (target)
10512 	  return target;
10513 	break;
10514       }
10515     case BUILT_IN_ATOMIC_SUB_FETCH_1:
10516     case BUILT_IN_ATOMIC_SUB_FETCH_2:
10517     case BUILT_IN_ATOMIC_SUB_FETCH_4:
10518     case BUILT_IN_ATOMIC_SUB_FETCH_8:
10519     case BUILT_IN_ATOMIC_SUB_FETCH_16:
10520       {
10521 	enum built_in_function lib;
10522 	mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
10523 	lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
10524 				       (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
10525 	target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
10526 						 ignore, lib);
10527 	if (target)
10528 	  return target;
10529 	break;
10530       }
10531     case BUILT_IN_ATOMIC_AND_FETCH_1:
10532     case BUILT_IN_ATOMIC_AND_FETCH_2:
10533     case BUILT_IN_ATOMIC_AND_FETCH_4:
10534     case BUILT_IN_ATOMIC_AND_FETCH_8:
10535     case BUILT_IN_ATOMIC_AND_FETCH_16:
10536       {
10537 	enum built_in_function lib;
10538 	mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
10539 	lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
10540 				       (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
10541 	target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
10542 						 ignore, lib);
10543 	if (target)
10544 	  return target;
10545 	break;
10546       }
10547     case BUILT_IN_ATOMIC_NAND_FETCH_1:
10548     case BUILT_IN_ATOMIC_NAND_FETCH_2:
10549     case BUILT_IN_ATOMIC_NAND_FETCH_4:
10550     case BUILT_IN_ATOMIC_NAND_FETCH_8:
10551     case BUILT_IN_ATOMIC_NAND_FETCH_16:
10552       {
10553 	enum built_in_function lib;
10554 	mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
10555 	lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
10556 				       (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
10557 	target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
10558 						 ignore, lib);
10559 	if (target)
10560 	  return target;
10561 	break;
10562       }
10563     case BUILT_IN_ATOMIC_XOR_FETCH_1:
10564     case BUILT_IN_ATOMIC_XOR_FETCH_2:
10565     case BUILT_IN_ATOMIC_XOR_FETCH_4:
10566     case BUILT_IN_ATOMIC_XOR_FETCH_8:
10567     case BUILT_IN_ATOMIC_XOR_FETCH_16:
10568       {
10569 	enum built_in_function lib;
10570 	mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
10571 	lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
10572 				       (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
10573 	target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
10574 						 ignore, lib);
10575 	if (target)
10576 	  return target;
10577 	break;
10578       }
10579     case BUILT_IN_ATOMIC_OR_FETCH_1:
10580     case BUILT_IN_ATOMIC_OR_FETCH_2:
10581     case BUILT_IN_ATOMIC_OR_FETCH_4:
10582     case BUILT_IN_ATOMIC_OR_FETCH_8:
10583     case BUILT_IN_ATOMIC_OR_FETCH_16:
10584       {
10585 	enum built_in_function lib;
10586 	mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
10587 	lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
10588 				       (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
10589 	target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
10590 						 ignore, lib);
10591 	if (target)
10592 	  return target;
10593 	break;
10594       }
10595     case BUILT_IN_ATOMIC_FETCH_ADD_1:
10596     case BUILT_IN_ATOMIC_FETCH_ADD_2:
10597     case BUILT_IN_ATOMIC_FETCH_ADD_4:
10598     case BUILT_IN_ATOMIC_FETCH_ADD_8:
10599     case BUILT_IN_ATOMIC_FETCH_ADD_16:
10600       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
10601       target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
10602 					       ignore, BUILT_IN_NONE);
10603       if (target)
10604 	return target;
10605       break;
10606 
10607     case BUILT_IN_ATOMIC_FETCH_SUB_1:
10608     case BUILT_IN_ATOMIC_FETCH_SUB_2:
10609     case BUILT_IN_ATOMIC_FETCH_SUB_4:
10610     case BUILT_IN_ATOMIC_FETCH_SUB_8:
10611     case BUILT_IN_ATOMIC_FETCH_SUB_16:
10612       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
10613       target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
10614 					       ignore, BUILT_IN_NONE);
10615       if (target)
10616 	return target;
10617       break;
10618 
10619     case BUILT_IN_ATOMIC_FETCH_AND_1:
10620     case BUILT_IN_ATOMIC_FETCH_AND_2:
10621     case BUILT_IN_ATOMIC_FETCH_AND_4:
10622     case BUILT_IN_ATOMIC_FETCH_AND_8:
10623     case BUILT_IN_ATOMIC_FETCH_AND_16:
10624       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
10625       target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
10626 					       ignore, BUILT_IN_NONE);
10627       if (target)
10628 	return target;
10629       break;
10630 
10631     case BUILT_IN_ATOMIC_FETCH_NAND_1:
10632     case BUILT_IN_ATOMIC_FETCH_NAND_2:
10633     case BUILT_IN_ATOMIC_FETCH_NAND_4:
10634     case BUILT_IN_ATOMIC_FETCH_NAND_8:
10635     case BUILT_IN_ATOMIC_FETCH_NAND_16:
10636       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
10637       target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
10638 					       ignore, BUILT_IN_NONE);
10639       if (target)
10640 	return target;
10641       break;
10642 
10643     case BUILT_IN_ATOMIC_FETCH_XOR_1:
10644     case BUILT_IN_ATOMIC_FETCH_XOR_2:
10645     case BUILT_IN_ATOMIC_FETCH_XOR_4:
10646     case BUILT_IN_ATOMIC_FETCH_XOR_8:
10647     case BUILT_IN_ATOMIC_FETCH_XOR_16:
10648       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
10649       target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
10650 					       ignore, BUILT_IN_NONE);
10651       if (target)
10652 	return target;
10653       break;
10654 
10655     case BUILT_IN_ATOMIC_FETCH_OR_1:
10656     case BUILT_IN_ATOMIC_FETCH_OR_2:
10657     case BUILT_IN_ATOMIC_FETCH_OR_4:
10658     case BUILT_IN_ATOMIC_FETCH_OR_8:
10659     case BUILT_IN_ATOMIC_FETCH_OR_16:
10660       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
10661       target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
10662 					       ignore, BUILT_IN_NONE);
10663       if (target)
10664 	return target;
10665       break;
10666 
10667     case BUILT_IN_ATOMIC_TEST_AND_SET:
10668       return expand_builtin_atomic_test_and_set (exp, target);
10669 
10670     case BUILT_IN_ATOMIC_CLEAR:
10671       return expand_builtin_atomic_clear (exp);
10672 
10673     case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10674       return expand_builtin_atomic_always_lock_free (exp);
10675 
10676     case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10677       target = expand_builtin_atomic_is_lock_free (exp);
10678       if (target)
10679         return target;
10680       break;
10681 
10682     case BUILT_IN_ATOMIC_THREAD_FENCE:
10683       expand_builtin_atomic_thread_fence (exp);
10684       return const0_rtx;
10685 
10686     case BUILT_IN_ATOMIC_SIGNAL_FENCE:
10687       expand_builtin_atomic_signal_fence (exp);
10688       return const0_rtx;
10689 
10690     case BUILT_IN_OBJECT_SIZE:
10691       return expand_builtin_object_size (exp);
10692 
10693     case BUILT_IN_MEMCPY_CHK:
10694     case BUILT_IN_MEMPCPY_CHK:
10695     case BUILT_IN_MEMMOVE_CHK:
10696     case BUILT_IN_MEMSET_CHK:
10697       target = expand_builtin_memory_chk (exp, target, mode, fcode);
10698       if (target)
10699 	return target;
10700       break;
10701 
10702     case BUILT_IN_STRCPY_CHK:
10703     case BUILT_IN_STPCPY_CHK:
10704     case BUILT_IN_STRNCPY_CHK:
10705     case BUILT_IN_STPNCPY_CHK:
10706     case BUILT_IN_STRCAT_CHK:
10707     case BUILT_IN_STRNCAT_CHK:
10708     case BUILT_IN_SNPRINTF_CHK:
10709     case BUILT_IN_VSNPRINTF_CHK:
10710       maybe_emit_chk_warning (exp, fcode);
10711       break;
10712 
10713     case BUILT_IN_SPRINTF_CHK:
10714     case BUILT_IN_VSPRINTF_CHK:
10715       maybe_emit_sprintf_chk_warning (exp, fcode);
10716       break;
10717 
10718     case BUILT_IN_THREAD_POINTER:
10719       return expand_builtin_thread_pointer (exp, target);
10720 
10721     case BUILT_IN_SET_THREAD_POINTER:
10722       expand_builtin_set_thread_pointer (exp);
10723       return const0_rtx;
10724 
10725     case BUILT_IN_ACC_ON_DEVICE:
10726       /* Do library call, if we failed to expand the builtin when
10727 	 folding.  */
10728       break;
10729 
10730     case BUILT_IN_GOACC_PARLEVEL_ID:
10731     case BUILT_IN_GOACC_PARLEVEL_SIZE:
10732       return expand_builtin_goacc_parlevel_id_size (exp, target, ignore);
10733 
10734     case BUILT_IN_SPECULATION_SAFE_VALUE_PTR:
10735       return expand_speculation_safe_value (VOIDmode, exp, target, ignore);
10736 
10737     case BUILT_IN_SPECULATION_SAFE_VALUE_1:
10738     case BUILT_IN_SPECULATION_SAFE_VALUE_2:
10739     case BUILT_IN_SPECULATION_SAFE_VALUE_4:
10740     case BUILT_IN_SPECULATION_SAFE_VALUE_8:
10741     case BUILT_IN_SPECULATION_SAFE_VALUE_16:
10742       mode = get_builtin_sync_mode (fcode - BUILT_IN_SPECULATION_SAFE_VALUE_1);
10743       return expand_speculation_safe_value (mode, exp, target, ignore);
10744 
10745     default:	/* just do library call, if unknown builtin */
10746       break;
10747     }
10748 
10749   /* The switch statement above can drop through to cause the function
10750      to be called normally.  */
10751   return expand_call (exp, target, ignore);
10752 }
10753 
10754 /* Determine whether a tree node represents a call to a built-in
10755    function.  If the tree T is a call to a built-in function with
10756    the right number of arguments of the appropriate types, return
10757    the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
10758    Otherwise the return value is END_BUILTINS.  */
10759 
10760 enum built_in_function
builtin_mathfn_code(const_tree t)10761 builtin_mathfn_code (const_tree t)
10762 {
10763   const_tree fndecl, arg, parmlist;
10764   const_tree argtype, parmtype;
10765   const_call_expr_arg_iterator iter;
10766 
10767   if (TREE_CODE (t) != CALL_EXPR)
10768     return END_BUILTINS;
10769 
10770   fndecl = get_callee_fndecl (t);
10771   if (fndecl == NULL_TREE || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
10772       return END_BUILTINS;
10773 
10774   parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
10775   init_const_call_expr_arg_iterator (t, &iter);
10776   for (; parmlist; parmlist = TREE_CHAIN (parmlist))
10777     {
10778       /* If a function doesn't take a variable number of arguments,
10779 	 the last element in the list will have type `void'.  */
10780       parmtype = TREE_VALUE (parmlist);
10781       if (VOID_TYPE_P (parmtype))
10782 	{
10783 	  if (more_const_call_expr_args_p (&iter))
10784 	    return END_BUILTINS;
10785 	  return DECL_FUNCTION_CODE (fndecl);
10786 	}
10787 
10788       if (! more_const_call_expr_args_p (&iter))
10789 	return END_BUILTINS;
10790 
10791       arg = next_const_call_expr_arg (&iter);
10792       argtype = TREE_TYPE (arg);
10793 
10794       if (SCALAR_FLOAT_TYPE_P (parmtype))
10795 	{
10796 	  if (! SCALAR_FLOAT_TYPE_P (argtype))
10797 	    return END_BUILTINS;
10798 	}
10799       else if (COMPLEX_FLOAT_TYPE_P (parmtype))
10800 	{
10801 	  if (! COMPLEX_FLOAT_TYPE_P (argtype))
10802 	    return END_BUILTINS;
10803 	}
10804       else if (POINTER_TYPE_P (parmtype))
10805 	{
10806 	  if (! POINTER_TYPE_P (argtype))
10807 	    return END_BUILTINS;
10808 	}
10809       else if (INTEGRAL_TYPE_P (parmtype))
10810 	{
10811 	  if (! INTEGRAL_TYPE_P (argtype))
10812 	    return END_BUILTINS;
10813 	}
10814       else
10815 	return END_BUILTINS;
10816     }
10817 
10818   /* Variable-length argument list.  */
10819   return DECL_FUNCTION_CODE (fndecl);
10820 }
10821 
10822 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
10823    evaluate to a constant.  */
10824 
10825 static tree
fold_builtin_constant_p(tree arg)10826 fold_builtin_constant_p (tree arg)
10827 {
10828   /* We return 1 for a numeric type that's known to be a constant
10829      value at compile-time or for an aggregate type that's a
10830      literal constant.  */
10831   STRIP_NOPS (arg);
10832 
10833   /* If we know this is a constant, emit the constant of one.  */
10834   if (CONSTANT_CLASS_P (arg)
10835       || (TREE_CODE (arg) == CONSTRUCTOR
10836 	  && TREE_CONSTANT (arg)))
10837     return integer_one_node;
10838   if (TREE_CODE (arg) == ADDR_EXPR)
10839     {
10840        tree op = TREE_OPERAND (arg, 0);
10841        if (TREE_CODE (op) == STRING_CST
10842 	   || (TREE_CODE (op) == ARRAY_REF
10843 	       && integer_zerop (TREE_OPERAND (op, 1))
10844 	       && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
10845 	 return integer_one_node;
10846     }
10847 
10848   /* If this expression has side effects, show we don't know it to be a
10849      constant.  Likewise if it's a pointer or aggregate type since in
10850      those case we only want literals, since those are only optimized
10851      when generating RTL, not later.
10852      And finally, if we are compiling an initializer, not code, we
10853      need to return a definite result now; there's not going to be any
10854      more optimization done.  */
10855   if (TREE_SIDE_EFFECTS (arg)
10856       || AGGREGATE_TYPE_P (TREE_TYPE (arg))
10857       || POINTER_TYPE_P (TREE_TYPE (arg))
10858       || cfun == 0
10859       || folding_initializer
10860       || force_folding_builtin_constant_p)
10861     return integer_zero_node;
10862 
10863   return NULL_TREE;
10864 }
10865 
10866 /* Create builtin_expect or builtin_expect_with_probability
10867    with PRED and EXPECTED as its arguments and return it as a truthvalue.
10868    Fortran FE can also produce builtin_expect with PREDICTOR as third argument.
10869    builtin_expect_with_probability instead uses third argument as PROBABILITY
10870    value.  */
10871 
10872 static tree
build_builtin_expect_predicate(location_t loc,tree pred,tree expected,tree predictor,tree probability)10873 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
10874 				tree predictor, tree probability)
10875 {
10876   tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
10877 
10878   fn = builtin_decl_explicit (probability == NULL_TREE ? BUILT_IN_EXPECT
10879 			      : BUILT_IN_EXPECT_WITH_PROBABILITY);
10880   arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
10881   ret_type = TREE_TYPE (TREE_TYPE (fn));
10882   pred_type = TREE_VALUE (arg_types);
10883   expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
10884 
10885   pred = fold_convert_loc (loc, pred_type, pred);
10886   expected = fold_convert_loc (loc, expected_type, expected);
10887 
10888   if (probability)
10889     call_expr = build_call_expr_loc (loc, fn, 3, pred, expected, probability);
10890   else
10891     call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
10892 				     predictor);
10893 
10894   return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
10895 		 build_int_cst (ret_type, 0));
10896 }
10897 
10898 /* Fold a call to builtin_expect with arguments ARG0, ARG1, ARG2, ARG3.  Return
10899    NULL_TREE if no simplification is possible.  */
10900 
10901 tree
fold_builtin_expect(location_t loc,tree arg0,tree arg1,tree arg2,tree arg3)10902 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2,
10903 		     tree arg3)
10904 {
10905   tree inner, fndecl, inner_arg0;
10906   enum tree_code code;
10907 
10908   /* Distribute the expected value over short-circuiting operators.
10909      See through the cast from truthvalue_type_node to long.  */
10910   inner_arg0 = arg0;
10911   while (CONVERT_EXPR_P (inner_arg0)
10912 	 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
10913 	 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
10914     inner_arg0 = TREE_OPERAND (inner_arg0, 0);
10915 
10916   /* If this is a builtin_expect within a builtin_expect keep the
10917      inner one.  See through a comparison against a constant.  It
10918      might have been added to create a thruthvalue.  */
10919   inner = inner_arg0;
10920 
10921   if (COMPARISON_CLASS_P (inner)
10922       && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
10923     inner = TREE_OPERAND (inner, 0);
10924 
10925   if (TREE_CODE (inner) == CALL_EXPR
10926       && (fndecl = get_callee_fndecl (inner))
10927       && (fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
10928 	  || fndecl_built_in_p (fndecl, BUILT_IN_EXPECT_WITH_PROBABILITY)))
10929     return arg0;
10930 
10931   inner = inner_arg0;
10932   code = TREE_CODE (inner);
10933   if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
10934     {
10935       tree op0 = TREE_OPERAND (inner, 0);
10936       tree op1 = TREE_OPERAND (inner, 1);
10937       arg1 = save_expr (arg1);
10938 
10939       op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2, arg3);
10940       op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2, arg3);
10941       inner = build2 (code, TREE_TYPE (inner), op0, op1);
10942 
10943       return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
10944     }
10945 
10946   /* If the argument isn't invariant then there's nothing else we can do.  */
10947   if (!TREE_CONSTANT (inner_arg0))
10948     return NULL_TREE;
10949 
10950   /* If we expect that a comparison against the argument will fold to
10951      a constant return the constant.  In practice, this means a true
10952      constant or the address of a non-weak symbol.  */
10953   inner = inner_arg0;
10954   STRIP_NOPS (inner);
10955   if (TREE_CODE (inner) == ADDR_EXPR)
10956     {
10957       do
10958 	{
10959 	  inner = TREE_OPERAND (inner, 0);
10960 	}
10961       while (TREE_CODE (inner) == COMPONENT_REF
10962 	     || TREE_CODE (inner) == ARRAY_REF);
10963       if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
10964 	return NULL_TREE;
10965     }
10966 
10967   /* Otherwise, ARG0 already has the proper type for the return value.  */
10968   return arg0;
10969 }
10970 
10971 /* Fold a call to __builtin_classify_type with argument ARG.  */
10972 
10973 static tree
fold_builtin_classify_type(tree arg)10974 fold_builtin_classify_type (tree arg)
10975 {
10976   if (arg == 0)
10977     return build_int_cst (integer_type_node, no_type_class);
10978 
10979   return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
10980 }
10981 
10982 /* Fold a call EXPR (which may be null) to __builtin_strlen with argument
10983    ARG.  */
10984 
10985 static tree
fold_builtin_strlen(location_t loc,tree expr,tree type,tree arg)10986 fold_builtin_strlen (location_t loc, tree expr, tree type, tree arg)
10987 {
10988   if (!validate_arg (arg, POINTER_TYPE))
10989     return NULL_TREE;
10990   else
10991     {
10992       c_strlen_data lendata = { };
10993       tree len = c_strlen (arg, 0, &lendata);
10994 
10995       if (len)
10996 	return fold_convert_loc (loc, type, len);
10997 
10998       if (!lendata.decl)
10999 	c_strlen (arg, 1, &lendata);
11000 
11001       if (lendata.decl)
11002 	{
11003 	  if (EXPR_HAS_LOCATION (arg))
11004 	    loc = EXPR_LOCATION (arg);
11005 	  else if (loc == UNKNOWN_LOCATION)
11006 	    loc = input_location;
11007 	  warn_string_no_nul (loc, expr, "strlen", arg, lendata.decl);
11008 	}
11009 
11010       return NULL_TREE;
11011     }
11012 }
11013 
11014 /* Fold a call to __builtin_inf or __builtin_huge_val.  */
11015 
11016 static tree
fold_builtin_inf(location_t loc,tree type,int warn)11017 fold_builtin_inf (location_t loc, tree type, int warn)
11018 {
11019   REAL_VALUE_TYPE real;
11020 
11021   /* __builtin_inff is intended to be usable to define INFINITY on all
11022      targets.  If an infinity is not available, INFINITY expands "to a
11023      positive constant of type float that overflows at translation
11024      time", footnote "In this case, using INFINITY will violate the
11025      constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
11026      Thus we pedwarn to ensure this constraint violation is
11027      diagnosed.  */
11028   if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
11029     pedwarn (loc, 0, "target format does not support infinity");
11030 
11031   real_inf (&real);
11032   return build_real (type, real);
11033 }
11034 
11035 /* Fold function call to builtin sincos, sincosf, or sincosl.  Return
11036    NULL_TREE if no simplification can be made.  */
11037 
11038 static tree
fold_builtin_sincos(location_t loc,tree arg0,tree arg1,tree arg2)11039 fold_builtin_sincos (location_t loc,
11040 		     tree arg0, tree arg1, tree arg2)
11041 {
11042   tree type;
11043   tree fndecl, call = NULL_TREE;
11044 
11045   if (!validate_arg (arg0, REAL_TYPE)
11046       || !validate_arg (arg1, POINTER_TYPE)
11047       || !validate_arg (arg2, POINTER_TYPE))
11048     return NULL_TREE;
11049 
11050   type = TREE_TYPE (arg0);
11051 
11052   /* Calculate the result when the argument is a constant.  */
11053   built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
11054   if (fn == END_BUILTINS)
11055     return NULL_TREE;
11056 
11057   /* Canonicalize sincos to cexpi.  */
11058   if (TREE_CODE (arg0) == REAL_CST)
11059     {
11060       tree complex_type = build_complex_type (type);
11061       call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
11062     }
11063   if (!call)
11064     {
11065       if (!targetm.libc_has_function (function_c99_math_complex, type)
11066 	  || !builtin_decl_implicit_p (fn))
11067 	return NULL_TREE;
11068       fndecl = builtin_decl_explicit (fn);
11069       call = build_call_expr_loc (loc, fndecl, 1, arg0);
11070       call = builtin_save_expr (call);
11071     }
11072 
11073   tree ptype = build_pointer_type (type);
11074   arg1 = fold_convert (ptype, arg1);
11075   arg2 = fold_convert (ptype, arg2);
11076   return build2 (COMPOUND_EXPR, void_type_node,
11077 		 build2 (MODIFY_EXPR, void_type_node,
11078 			 build_fold_indirect_ref_loc (loc, arg1),
11079 			 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
11080 		 build2 (MODIFY_EXPR, void_type_node,
11081 			 build_fold_indirect_ref_loc (loc, arg2),
11082 			 fold_build1_loc (loc, REALPART_EXPR, type, call)));
11083 }
11084 
11085 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
11086    Return NULL_TREE if no simplification can be made.  */
11087 
11088 static tree
fold_builtin_memcmp(location_t loc,tree arg1,tree arg2,tree len)11089 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
11090 {
11091   if (!validate_arg (arg1, POINTER_TYPE)
11092       || !validate_arg (arg2, POINTER_TYPE)
11093       || !validate_arg (len, INTEGER_TYPE))
11094     return NULL_TREE;
11095 
11096   /* If the LEN parameter is zero, return zero.  */
11097   if (integer_zerop (len))
11098     return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
11099 			      arg1, arg2);
11100 
11101   /* If ARG1 and ARG2 are the same (and not volatile), return zero.  */
11102   if (operand_equal_p (arg1, arg2, 0))
11103     return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
11104 
11105   /* If len parameter is one, return an expression corresponding to
11106      (*(const unsigned char*)arg1 - (const unsigned char*)arg2).  */
11107   if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
11108     {
11109       tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
11110       tree cst_uchar_ptr_node
11111 	= build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
11112 
11113       tree ind1
11114 	= fold_convert_loc (loc, integer_type_node,
11115 			    build1 (INDIRECT_REF, cst_uchar_node,
11116 				    fold_convert_loc (loc,
11117 						      cst_uchar_ptr_node,
11118 						      arg1)));
11119       tree ind2
11120 	= fold_convert_loc (loc, integer_type_node,
11121 			    build1 (INDIRECT_REF, cst_uchar_node,
11122 				    fold_convert_loc (loc,
11123 						      cst_uchar_ptr_node,
11124 						      arg2)));
11125       return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
11126     }
11127 
11128   return NULL_TREE;
11129 }
11130 
11131 /* Fold a call to builtin isascii with argument ARG.  */
11132 
11133 static tree
fold_builtin_isascii(location_t loc,tree arg)11134 fold_builtin_isascii (location_t loc, tree arg)
11135 {
11136   if (!validate_arg (arg, INTEGER_TYPE))
11137     return NULL_TREE;
11138   else
11139     {
11140       /* Transform isascii(c) -> ((c & ~0x7f) == 0).  */
11141       arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
11142 			 build_int_cst (integer_type_node,
11143 					~ (unsigned HOST_WIDE_INT) 0x7f));
11144       return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
11145 			      arg, integer_zero_node);
11146     }
11147 }
11148 
11149 /* Fold a call to builtin toascii with argument ARG.  */
11150 
11151 static tree
fold_builtin_toascii(location_t loc,tree arg)11152 fold_builtin_toascii (location_t loc, tree arg)
11153 {
11154   if (!validate_arg (arg, INTEGER_TYPE))
11155     return NULL_TREE;
11156 
11157   /* Transform toascii(c) -> (c & 0x7f).  */
11158   return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
11159 			  build_int_cst (integer_type_node, 0x7f));
11160 }
11161 
11162 /* Fold a call to builtin isdigit with argument ARG.  */
11163 
11164 static tree
fold_builtin_isdigit(location_t loc,tree arg)11165 fold_builtin_isdigit (location_t loc, tree arg)
11166 {
11167   if (!validate_arg (arg, INTEGER_TYPE))
11168     return NULL_TREE;
11169   else
11170     {
11171       /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9.  */
11172       /* According to the C standard, isdigit is unaffected by locale.
11173 	 However, it definitely is affected by the target character set.  */
11174       unsigned HOST_WIDE_INT target_digit0
11175 	= lang_hooks.to_target_charset ('0');
11176 
11177       if (target_digit0 == 0)
11178 	return NULL_TREE;
11179 
11180       arg = fold_convert_loc (loc, unsigned_type_node, arg);
11181       arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
11182 			 build_int_cst (unsigned_type_node, target_digit0));
11183       return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
11184 			  build_int_cst (unsigned_type_node, 9));
11185     }
11186 }
11187 
11188 /* Fold a call to fabs, fabsf or fabsl with argument ARG.  */
11189 
11190 static tree
fold_builtin_fabs(location_t loc,tree arg,tree type)11191 fold_builtin_fabs (location_t loc, tree arg, tree type)
11192 {
11193   if (!validate_arg (arg, REAL_TYPE))
11194     return NULL_TREE;
11195 
11196   arg = fold_convert_loc (loc, type, arg);
11197   return fold_build1_loc (loc, ABS_EXPR, type, arg);
11198 }
11199 
11200 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG.  */
11201 
11202 static tree
fold_builtin_abs(location_t loc,tree arg,tree type)11203 fold_builtin_abs (location_t loc, tree arg, tree type)
11204 {
11205   if (!validate_arg (arg, INTEGER_TYPE))
11206     return NULL_TREE;
11207 
11208   arg = fold_convert_loc (loc, type, arg);
11209   return fold_build1_loc (loc, ABS_EXPR, type, arg);
11210 }
11211 
11212 /* Fold a call to builtin carg(a+bi) -> atan2(b,a).  */
11213 
11214 static tree
fold_builtin_carg(location_t loc,tree arg,tree type)11215 fold_builtin_carg (location_t loc, tree arg, tree type)
11216 {
11217   if (validate_arg (arg, COMPLEX_TYPE)
11218       && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
11219     {
11220       tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
11221 
11222       if (atan2_fn)
11223         {
11224   	  tree new_arg = builtin_save_expr (arg);
11225 	  tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
11226 	  tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
11227 	  return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
11228 	}
11229     }
11230 
11231   return NULL_TREE;
11232 }
11233 
11234 /* Fold a call to builtin frexp, we can assume the base is 2.  */
11235 
11236 static tree
fold_builtin_frexp(location_t loc,tree arg0,tree arg1,tree rettype)11237 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
11238 {
11239   if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
11240     return NULL_TREE;
11241 
11242   STRIP_NOPS (arg0);
11243 
11244   if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
11245     return NULL_TREE;
11246 
11247   arg1 = build_fold_indirect_ref_loc (loc, arg1);
11248 
11249   /* Proceed if a valid pointer type was passed in.  */
11250   if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
11251     {
11252       const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
11253       tree frac, exp;
11254 
11255       switch (value->cl)
11256       {
11257       case rvc_zero:
11258 	/* For +-0, return (*exp = 0, +-0).  */
11259 	exp = integer_zero_node;
11260 	frac = arg0;
11261 	break;
11262       case rvc_nan:
11263       case rvc_inf:
11264 	/* For +-NaN or +-Inf, *exp is unspecified, return arg0.  */
11265 	return omit_one_operand_loc (loc, rettype, arg0, arg1);
11266       case rvc_normal:
11267 	{
11268 	  /* Since the frexp function always expects base 2, and in
11269 	     GCC normalized significands are already in the range
11270 	     [0.5, 1.0), we have exactly what frexp wants.  */
11271 	  REAL_VALUE_TYPE frac_rvt = *value;
11272 	  SET_REAL_EXP (&frac_rvt, 0);
11273 	  frac = build_real (rettype, frac_rvt);
11274 	  exp = build_int_cst (integer_type_node, REAL_EXP (value));
11275 	}
11276 	break;
11277       default:
11278 	gcc_unreachable ();
11279       }
11280 
11281       /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
11282       arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
11283       TREE_SIDE_EFFECTS (arg1) = 1;
11284       return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
11285     }
11286 
11287   return NULL_TREE;
11288 }
11289 
11290 /* Fold a call to builtin modf.  */
11291 
11292 static tree
fold_builtin_modf(location_t loc,tree arg0,tree arg1,tree rettype)11293 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
11294 {
11295   if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
11296     return NULL_TREE;
11297 
11298   STRIP_NOPS (arg0);
11299 
11300   if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
11301     return NULL_TREE;
11302 
11303   arg1 = build_fold_indirect_ref_loc (loc, arg1);
11304 
11305   /* Proceed if a valid pointer type was passed in.  */
11306   if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
11307     {
11308       const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
11309       REAL_VALUE_TYPE trunc, frac;
11310 
11311       switch (value->cl)
11312       {
11313       case rvc_nan:
11314       case rvc_zero:
11315 	/* For +-NaN or +-0, return (*arg1 = arg0, arg0).  */
11316 	trunc = frac = *value;
11317 	break;
11318       case rvc_inf:
11319 	/* For +-Inf, return (*arg1 = arg0, +-0).  */
11320 	frac = dconst0;
11321 	frac.sign = value->sign;
11322 	trunc = *value;
11323 	break;
11324       case rvc_normal:
11325 	/* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)).  */
11326 	real_trunc (&trunc, VOIDmode, value);
11327 	real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
11328 	/* If the original number was negative and already
11329 	   integral, then the fractional part is -0.0.  */
11330 	if (value->sign && frac.cl == rvc_zero)
11331 	  frac.sign = value->sign;
11332 	break;
11333       }
11334 
11335       /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
11336       arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
11337 			  build_real (rettype, trunc));
11338       TREE_SIDE_EFFECTS (arg1) = 1;
11339       return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
11340 			  build_real (rettype, frac));
11341     }
11342 
11343   return NULL_TREE;
11344 }
11345 
11346 /* Given a location LOC, an interclass builtin function decl FNDECL
11347    and its single argument ARG, return an folded expression computing
11348    the same, or NULL_TREE if we either couldn't or didn't want to fold
11349    (the latter happen if there's an RTL instruction available).  */
11350 
11351 static tree
fold_builtin_interclass_mathfn(location_t loc,tree fndecl,tree arg)11352 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
11353 {
11354   machine_mode mode;
11355 
11356   if (!validate_arg (arg, REAL_TYPE))
11357     return NULL_TREE;
11358 
11359   if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
11360     return NULL_TREE;
11361 
11362   mode = TYPE_MODE (TREE_TYPE (arg));
11363 
11364   bool is_ibm_extended = MODE_COMPOSITE_P (mode);
11365 
11366   /* If there is no optab, try generic code.  */
11367   switch (DECL_FUNCTION_CODE (fndecl))
11368     {
11369       tree result;
11370 
11371     CASE_FLT_FN (BUILT_IN_ISINF):
11372       {
11373 	/* isinf(x) -> isgreater(fabs(x),DBL_MAX).  */
11374 	tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
11375 	tree type = TREE_TYPE (arg);
11376 	REAL_VALUE_TYPE r;
11377 	char buf[128];
11378 
11379 	if (is_ibm_extended)
11380 	  {
11381 	    /* NaN and Inf are encoded in the high-order double value
11382 	       only.  The low-order value is not significant.  */
11383 	    type = double_type_node;
11384 	    mode = DFmode;
11385 	    arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
11386 	  }
11387 	get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
11388 	real_from_string (&r, buf);
11389 	result = build_call_expr (isgr_fn, 2,
11390 				  fold_build1_loc (loc, ABS_EXPR, type, arg),
11391 				  build_real (type, r));
11392 	return result;
11393       }
11394     CASE_FLT_FN (BUILT_IN_FINITE):
11395     case BUILT_IN_ISFINITE:
11396       {
11397 	/* isfinite(x) -> islessequal(fabs(x),DBL_MAX).  */
11398 	tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
11399 	tree type = TREE_TYPE (arg);
11400 	REAL_VALUE_TYPE r;
11401 	char buf[128];
11402 
11403 	if (is_ibm_extended)
11404 	  {
11405 	    /* NaN and Inf are encoded in the high-order double value
11406 	       only.  The low-order value is not significant.  */
11407 	    type = double_type_node;
11408 	    mode = DFmode;
11409 	    arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
11410 	  }
11411 	get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
11412 	real_from_string (&r, buf);
11413 	result = build_call_expr (isle_fn, 2,
11414 				  fold_build1_loc (loc, ABS_EXPR, type, arg),
11415 				  build_real (type, r));
11416 	/*result = fold_build2_loc (loc, UNGT_EXPR,
11417 				  TREE_TYPE (TREE_TYPE (fndecl)),
11418 				  fold_build1_loc (loc, ABS_EXPR, type, arg),
11419 				  build_real (type, r));
11420 	result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
11421 				  TREE_TYPE (TREE_TYPE (fndecl)),
11422 				  result);*/
11423 	return result;
11424       }
11425     case BUILT_IN_ISNORMAL:
11426       {
11427 	/* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
11428 	   islessequal(fabs(x),DBL_MAX).  */
11429 	tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
11430 	tree type = TREE_TYPE (arg);
11431 	tree orig_arg, max_exp, min_exp;
11432 	machine_mode orig_mode = mode;
11433 	REAL_VALUE_TYPE rmax, rmin;
11434 	char buf[128];
11435 
11436 	orig_arg = arg = builtin_save_expr (arg);
11437 	if (is_ibm_extended)
11438 	  {
11439 	    /* Use double to test the normal range of IBM extended
11440 	       precision.  Emin for IBM extended precision is
11441 	       different to emin for IEEE double, being 53 higher
11442 	       since the low double exponent is at least 53 lower
11443 	       than the high double exponent.  */
11444 	    type = double_type_node;
11445 	    mode = DFmode;
11446 	    arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
11447 	  }
11448 	arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
11449 
11450 	get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
11451 	real_from_string (&rmax, buf);
11452 	sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
11453 	real_from_string (&rmin, buf);
11454 	max_exp = build_real (type, rmax);
11455 	min_exp = build_real (type, rmin);
11456 
11457 	max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
11458 	if (is_ibm_extended)
11459 	  {
11460 	    /* Testing the high end of the range is done just using
11461 	       the high double, using the same test as isfinite().
11462 	       For the subnormal end of the range we first test the
11463 	       high double, then if its magnitude is equal to the
11464 	       limit of 0x1p-969, we test whether the low double is
11465 	       non-zero and opposite sign to the high double.  */
11466 	    tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
11467 	    tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
11468 	    tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
11469 	    tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
11470 				       arg, min_exp);
11471 	    tree as_complex = build1 (VIEW_CONVERT_EXPR,
11472 				      complex_double_type_node, orig_arg);
11473 	    tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
11474 	    tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
11475 	    tree zero = build_real (type, dconst0);
11476 	    tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
11477 	    tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
11478 	    tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
11479 	    tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
11480 				      fold_build3 (COND_EXPR,
11481 						   integer_type_node,
11482 						   hilt, logt, lolt));
11483 	    eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
11484 				  eq_min, ok_lo);
11485 	    min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
11486 				   gt_min, eq_min);
11487 	  }
11488 	else
11489 	  {
11490 	    tree const isge_fn
11491 	      = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
11492 	    min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
11493 	  }
11494 	result = fold_build2 (BIT_AND_EXPR, integer_type_node,
11495 			      max_exp, min_exp);
11496 	return result;
11497       }
11498     default:
11499       break;
11500     }
11501 
11502   return NULL_TREE;
11503 }
11504 
11505 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
11506    ARG is the argument for the call.  */
11507 
11508 static tree
fold_builtin_classify(location_t loc,tree fndecl,tree arg,int builtin_index)11509 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
11510 {
11511   tree type = TREE_TYPE (TREE_TYPE (fndecl));
11512 
11513   if (!validate_arg (arg, REAL_TYPE))
11514     return NULL_TREE;
11515 
11516   switch (builtin_index)
11517     {
11518     case BUILT_IN_ISINF:
11519       if (tree_expr_infinite_p (arg))
11520 	return omit_one_operand_loc (loc, type, integer_one_node, arg);
11521       if (!tree_expr_maybe_infinite_p (arg))
11522 	return omit_one_operand_loc (loc, type, integer_zero_node, arg);
11523       return NULL_TREE;
11524 
11525     case BUILT_IN_ISINF_SIGN:
11526       {
11527 	/* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
11528 	/* In a boolean context, GCC will fold the inner COND_EXPR to
11529 	   1.  So e.g. "if (isinf_sign(x))" would be folded to just
11530 	   "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
11531 	tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
11532 	tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
11533 	tree tmp = NULL_TREE;
11534 
11535 	arg = builtin_save_expr (arg);
11536 
11537 	if (signbit_fn && isinf_fn)
11538 	  {
11539 	    tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
11540 	    tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
11541 
11542 	    signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
11543 					signbit_call, integer_zero_node);
11544 	    isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
11545 				      isinf_call, integer_zero_node);
11546 
11547 	    tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
11548 			       integer_minus_one_node, integer_one_node);
11549 	    tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
11550 			       isinf_call, tmp,
11551 			       integer_zero_node);
11552 	  }
11553 
11554 	return tmp;
11555       }
11556 
11557     case BUILT_IN_ISFINITE:
11558       if (tree_expr_finite_p (arg))
11559 	return omit_one_operand_loc (loc, type, integer_one_node, arg);
11560       if (tree_expr_nan_p (arg) || tree_expr_infinite_p (arg))
11561 	return omit_one_operand_loc (loc, type, integer_zero_node, arg);
11562       return NULL_TREE;
11563 
11564     case BUILT_IN_ISNAN:
11565       if (tree_expr_nan_p (arg))
11566 	return omit_one_operand_loc (loc, type, integer_one_node, arg);
11567       if (!tree_expr_maybe_nan_p (arg))
11568 	return omit_one_operand_loc (loc, type, integer_zero_node, arg);
11569 
11570       {
11571 	bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
11572 	if (is_ibm_extended)
11573 	  {
11574 	    /* NaN and Inf are encoded in the high-order double value
11575 	       only.  The low-order value is not significant.  */
11576 	    arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
11577 	  }
11578       }
11579       arg = builtin_save_expr (arg);
11580       return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
11581 
11582     default:
11583       gcc_unreachable ();
11584     }
11585 }
11586 
11587 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
11588    This builtin will generate code to return the appropriate floating
11589    point classification depending on the value of the floating point
11590    number passed in.  The possible return values must be supplied as
11591    int arguments to the call in the following order: FP_NAN, FP_INFINITE,
11592    FP_NORMAL, FP_SUBNORMAL and FP_ZERO.  The ellipses is for exactly
11593    one floating point argument which is "type generic".  */
11594 
11595 static tree
fold_builtin_fpclassify(location_t loc,tree * args,int nargs)11596 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
11597 {
11598   tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
11599     arg, type, res, tmp;
11600   machine_mode mode;
11601   REAL_VALUE_TYPE r;
11602   char buf[128];
11603 
11604   /* Verify the required arguments in the original call.  */
11605   if (nargs != 6
11606       || !validate_arg (args[0], INTEGER_TYPE)
11607       || !validate_arg (args[1], INTEGER_TYPE)
11608       || !validate_arg (args[2], INTEGER_TYPE)
11609       || !validate_arg (args[3], INTEGER_TYPE)
11610       || !validate_arg (args[4], INTEGER_TYPE)
11611       || !validate_arg (args[5], REAL_TYPE))
11612     return NULL_TREE;
11613 
11614   fp_nan = args[0];
11615   fp_infinite = args[1];
11616   fp_normal = args[2];
11617   fp_subnormal = args[3];
11618   fp_zero = args[4];
11619   arg = args[5];
11620   type = TREE_TYPE (arg);
11621   mode = TYPE_MODE (type);
11622   arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
11623 
11624   /* fpclassify(x) ->
11625        isnan(x) ? FP_NAN :
11626          (fabs(x) == Inf ? FP_INFINITE :
11627 	   (fabs(x) >= DBL_MIN ? FP_NORMAL :
11628 	     (x == 0 ? FP_ZERO : FP_SUBNORMAL))).  */
11629 
11630   tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
11631 		     build_real (type, dconst0));
11632   res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
11633 		     tmp, fp_zero, fp_subnormal);
11634 
11635   sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
11636   real_from_string (&r, buf);
11637   tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
11638 		     arg, build_real (type, r));
11639   res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
11640 
11641   if (tree_expr_maybe_infinite_p (arg))
11642     {
11643       real_inf (&r);
11644       tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
11645 			 build_real (type, r));
11646       res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
11647 			 fp_infinite, res);
11648     }
11649 
11650   if (tree_expr_maybe_nan_p (arg))
11651     {
11652       tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
11653       res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
11654     }
11655 
11656   return res;
11657 }
11658 
11659 /* Fold a call to an unordered comparison function such as
11660    __builtin_isgreater().  FNDECL is the FUNCTION_DECL for the function
11661    being called and ARG0 and ARG1 are the arguments for the call.
11662    UNORDERED_CODE and ORDERED_CODE are comparison codes that give
11663    the opposite of the desired result.  UNORDERED_CODE is used
11664    for modes that can hold NaNs and ORDERED_CODE is used for
11665    the rest.  */
11666 
11667 static tree
fold_builtin_unordered_cmp(location_t loc,tree fndecl,tree arg0,tree arg1,enum tree_code unordered_code,enum tree_code ordered_code)11668 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
11669 			    enum tree_code unordered_code,
11670 			    enum tree_code ordered_code)
11671 {
11672   tree type = TREE_TYPE (TREE_TYPE (fndecl));
11673   enum tree_code code;
11674   tree type0, type1;
11675   enum tree_code code0, code1;
11676   tree cmp_type = NULL_TREE;
11677 
11678   type0 = TREE_TYPE (arg0);
11679   type1 = TREE_TYPE (arg1);
11680 
11681   code0 = TREE_CODE (type0);
11682   code1 = TREE_CODE (type1);
11683 
11684   if (code0 == REAL_TYPE && code1 == REAL_TYPE)
11685     /* Choose the wider of two real types.  */
11686     cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
11687       ? type0 : type1;
11688   else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
11689     cmp_type = type0;
11690   else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
11691     cmp_type = type1;
11692 
11693   arg0 = fold_convert_loc (loc, cmp_type, arg0);
11694   arg1 = fold_convert_loc (loc, cmp_type, arg1);
11695 
11696   if (unordered_code == UNORDERED_EXPR)
11697     {
11698       if (tree_expr_nan_p (arg0) || tree_expr_nan_p (arg1))
11699 	return omit_two_operands_loc (loc, type, integer_one_node, arg0, arg1);
11700       if (!tree_expr_maybe_nan_p (arg0) && !tree_expr_maybe_nan_p (arg1))
11701 	return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
11702       return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
11703     }
11704 
11705   code = (tree_expr_maybe_nan_p (arg0) || tree_expr_maybe_nan_p (arg1))
11706 	 ? unordered_code : ordered_code;
11707   return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
11708 		      fold_build2_loc (loc, code, type, arg0, arg1));
11709 }
11710 
11711 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
11712    arithmetics if it can never overflow, or into internal functions that
11713    return both result of arithmetics and overflowed boolean flag in
11714    a complex integer result, or some other check for overflow.
11715    Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
11716    checking part of that.  */
11717 
11718 static tree
fold_builtin_arith_overflow(location_t loc,enum built_in_function fcode,tree arg0,tree arg1,tree arg2)11719 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
11720 			     tree arg0, tree arg1, tree arg2)
11721 {
11722   enum internal_fn ifn = IFN_LAST;
11723   /* The code of the expression corresponding to the built-in.  */
11724   enum tree_code opcode = ERROR_MARK;
11725   bool ovf_only = false;
11726 
11727   switch (fcode)
11728     {
11729     case BUILT_IN_ADD_OVERFLOW_P:
11730       ovf_only = true;
11731       /* FALLTHRU */
11732     case BUILT_IN_ADD_OVERFLOW:
11733     case BUILT_IN_SADD_OVERFLOW:
11734     case BUILT_IN_SADDL_OVERFLOW:
11735     case BUILT_IN_SADDLL_OVERFLOW:
11736     case BUILT_IN_UADD_OVERFLOW:
11737     case BUILT_IN_UADDL_OVERFLOW:
11738     case BUILT_IN_UADDLL_OVERFLOW:
11739       opcode = PLUS_EXPR;
11740       ifn = IFN_ADD_OVERFLOW;
11741       break;
11742     case BUILT_IN_SUB_OVERFLOW_P:
11743       ovf_only = true;
11744       /* FALLTHRU */
11745     case BUILT_IN_SUB_OVERFLOW:
11746     case BUILT_IN_SSUB_OVERFLOW:
11747     case BUILT_IN_SSUBL_OVERFLOW:
11748     case BUILT_IN_SSUBLL_OVERFLOW:
11749     case BUILT_IN_USUB_OVERFLOW:
11750     case BUILT_IN_USUBL_OVERFLOW:
11751     case BUILT_IN_USUBLL_OVERFLOW:
11752       opcode = MINUS_EXPR;
11753       ifn = IFN_SUB_OVERFLOW;
11754       break;
11755     case BUILT_IN_MUL_OVERFLOW_P:
11756       ovf_only = true;
11757       /* FALLTHRU */
11758     case BUILT_IN_MUL_OVERFLOW:
11759     case BUILT_IN_SMUL_OVERFLOW:
11760     case BUILT_IN_SMULL_OVERFLOW:
11761     case BUILT_IN_SMULLL_OVERFLOW:
11762     case BUILT_IN_UMUL_OVERFLOW:
11763     case BUILT_IN_UMULL_OVERFLOW:
11764     case BUILT_IN_UMULLL_OVERFLOW:
11765       opcode = MULT_EXPR;
11766       ifn = IFN_MUL_OVERFLOW;
11767       break;
11768     default:
11769       gcc_unreachable ();
11770     }
11771 
11772   /* For the "generic" overloads, the first two arguments can have different
11773      types and the last argument determines the target type to use to check
11774      for overflow.  The arguments of the other overloads all have the same
11775      type.  */
11776   tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
11777 
11778   /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
11779      arguments are constant, attempt to fold the built-in call into a constant
11780      expression indicating whether or not it detected an overflow.  */
11781   if (ovf_only
11782       && TREE_CODE (arg0) == INTEGER_CST
11783       && TREE_CODE (arg1) == INTEGER_CST)
11784     /* Perform the computation in the target type and check for overflow.  */
11785     return omit_one_operand_loc (loc, boolean_type_node,
11786 				 arith_overflowed_p (opcode, type, arg0, arg1)
11787 				 ? boolean_true_node : boolean_false_node,
11788 				 arg2);
11789 
11790   tree intres, ovfres;
11791   if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
11792     {
11793       intres = fold_binary_loc (loc, opcode, type,
11794 				fold_convert_loc (loc, type, arg0),
11795 				fold_convert_loc (loc, type, arg1));
11796       if (TREE_OVERFLOW (intres))
11797 	intres = drop_tree_overflow (intres);
11798       ovfres = (arith_overflowed_p (opcode, type, arg0, arg1)
11799 		? boolean_true_node : boolean_false_node);
11800     }
11801   else
11802     {
11803       tree ctype = build_complex_type (type);
11804       tree call = build_call_expr_internal_loc (loc, ifn, ctype, 2,
11805 						arg0, arg1);
11806       tree tgt = save_expr (call);
11807       intres = build1_loc (loc, REALPART_EXPR, type, tgt);
11808       ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
11809       ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
11810     }
11811 
11812   if (ovf_only)
11813     return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
11814 
11815   tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
11816   tree store
11817     = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
11818   return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
11819 }
11820 
11821 /* Fold a call to __builtin_FILE to a constant string.  */
11822 
11823 static inline tree
fold_builtin_FILE(location_t loc)11824 fold_builtin_FILE (location_t loc)
11825 {
11826   if (const char *fname = LOCATION_FILE (loc))
11827     {
11828       /* The documentation says this builtin is equivalent to the preprocessor
11829 	 __FILE__ macro so it appears appropriate to use the same file prefix
11830 	 mappings.  */
11831       fname = remap_macro_filename (fname);
11832     return build_string_literal (strlen (fname) + 1, fname);
11833     }
11834 
11835   return build_string_literal (1, "");
11836 }
11837 
11838 /* Fold a call to __builtin_FUNCTION to a constant string.  */
11839 
11840 static inline tree
fold_builtin_FUNCTION()11841 fold_builtin_FUNCTION ()
11842 {
11843   const char *name = "";
11844 
11845   if (current_function_decl)
11846     name = lang_hooks.decl_printable_name (current_function_decl, 0);
11847 
11848   return build_string_literal (strlen (name) + 1, name);
11849 }
11850 
11851 /* Fold a call to __builtin_LINE to an integer constant.  */
11852 
11853 static inline tree
fold_builtin_LINE(location_t loc,tree type)11854 fold_builtin_LINE (location_t loc, tree type)
11855 {
11856   return build_int_cst (type, LOCATION_LINE (loc));
11857 }
11858 
11859 /* Fold a call to built-in function FNDECL with 0 arguments.
11860    This function returns NULL_TREE if no simplification was possible.  */
11861 
11862 static tree
fold_builtin_0(location_t loc,tree fndecl)11863 fold_builtin_0 (location_t loc, tree fndecl)
11864 {
11865   tree type = TREE_TYPE (TREE_TYPE (fndecl));
11866   enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11867   switch (fcode)
11868     {
11869     case BUILT_IN_FILE:
11870       return fold_builtin_FILE (loc);
11871 
11872     case BUILT_IN_FUNCTION:
11873       return fold_builtin_FUNCTION ();
11874 
11875     case BUILT_IN_LINE:
11876       return fold_builtin_LINE (loc, type);
11877 
11878     CASE_FLT_FN (BUILT_IN_INF):
11879     CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
11880     case BUILT_IN_INFD32:
11881     case BUILT_IN_INFD64:
11882     case BUILT_IN_INFD128:
11883       return fold_builtin_inf (loc, type, true);
11884 
11885     CASE_FLT_FN (BUILT_IN_HUGE_VAL):
11886     CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
11887       return fold_builtin_inf (loc, type, false);
11888 
11889     case BUILT_IN_CLASSIFY_TYPE:
11890       return fold_builtin_classify_type (NULL_TREE);
11891 
11892     default:
11893       break;
11894     }
11895   return NULL_TREE;
11896 }
11897 
11898 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
11899    This function returns NULL_TREE if no simplification was possible.  */
11900 
11901 static tree
fold_builtin_1(location_t loc,tree expr,tree fndecl,tree arg0)11902 fold_builtin_1 (location_t loc, tree expr, tree fndecl, tree arg0)
11903 {
11904   tree type = TREE_TYPE (TREE_TYPE (fndecl));
11905   enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11906 
11907   if (TREE_CODE (arg0) == ERROR_MARK)
11908     return NULL_TREE;
11909 
11910   if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
11911     return ret;
11912 
11913   switch (fcode)
11914     {
11915     case BUILT_IN_CONSTANT_P:
11916       {
11917 	tree val = fold_builtin_constant_p (arg0);
11918 
11919 	/* Gimplification will pull the CALL_EXPR for the builtin out of
11920 	   an if condition.  When not optimizing, we'll not CSE it back.
11921 	   To avoid link error types of regressions, return false now.  */
11922 	if (!val && !optimize)
11923 	  val = integer_zero_node;
11924 
11925 	return val;
11926       }
11927 
11928     case BUILT_IN_CLASSIFY_TYPE:
11929       return fold_builtin_classify_type (arg0);
11930 
11931     case BUILT_IN_STRLEN:
11932       return fold_builtin_strlen (loc, expr, type, arg0);
11933 
11934     CASE_FLT_FN (BUILT_IN_FABS):
11935     CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
11936     case BUILT_IN_FABSD32:
11937     case BUILT_IN_FABSD64:
11938     case BUILT_IN_FABSD128:
11939       return fold_builtin_fabs (loc, arg0, type);
11940 
11941     case BUILT_IN_ABS:
11942     case BUILT_IN_LABS:
11943     case BUILT_IN_LLABS:
11944     case BUILT_IN_IMAXABS:
11945       return fold_builtin_abs (loc, arg0, type);
11946 
11947     CASE_FLT_FN (BUILT_IN_CONJ):
11948       if (validate_arg (arg0, COMPLEX_TYPE)
11949 	&& TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
11950 	return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
11951     break;
11952 
11953     CASE_FLT_FN (BUILT_IN_CREAL):
11954       if (validate_arg (arg0, COMPLEX_TYPE)
11955 	&& TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
11956 	return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
11957     break;
11958 
11959     CASE_FLT_FN (BUILT_IN_CIMAG):
11960       if (validate_arg (arg0, COMPLEX_TYPE)
11961 	  && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
11962 	return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
11963     break;
11964 
11965     CASE_FLT_FN (BUILT_IN_CARG):
11966       return fold_builtin_carg (loc, arg0, type);
11967 
11968     case BUILT_IN_ISASCII:
11969       return fold_builtin_isascii (loc, arg0);
11970 
11971     case BUILT_IN_TOASCII:
11972       return fold_builtin_toascii (loc, arg0);
11973 
11974     case BUILT_IN_ISDIGIT:
11975       return fold_builtin_isdigit (loc, arg0);
11976 
11977     CASE_FLT_FN (BUILT_IN_FINITE):
11978     case BUILT_IN_FINITED32:
11979     case BUILT_IN_FINITED64:
11980     case BUILT_IN_FINITED128:
11981     case BUILT_IN_ISFINITE:
11982       {
11983 	tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
11984 	if (ret)
11985 	  return ret;
11986 	return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
11987       }
11988 
11989     CASE_FLT_FN (BUILT_IN_ISINF):
11990     case BUILT_IN_ISINFD32:
11991     case BUILT_IN_ISINFD64:
11992     case BUILT_IN_ISINFD128:
11993       {
11994 	tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
11995 	if (ret)
11996 	  return ret;
11997 	return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
11998       }
11999 
12000     case BUILT_IN_ISNORMAL:
12001       return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
12002 
12003     case BUILT_IN_ISINF_SIGN:
12004       return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
12005 
12006     CASE_FLT_FN (BUILT_IN_ISNAN):
12007     case BUILT_IN_ISNAND32:
12008     case BUILT_IN_ISNAND64:
12009     case BUILT_IN_ISNAND128:
12010       return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
12011 
12012     case BUILT_IN_FREE:
12013       if (integer_zerop (arg0))
12014 	return build_empty_stmt (loc);
12015       break;
12016 
12017     default:
12018       break;
12019     }
12020 
12021   return NULL_TREE;
12022 
12023 }
12024 
12025 /* Folds a call EXPR (which may be null) to built-in function FNDECL
12026    with 2 arguments, ARG0 and ARG1.  This function returns NULL_TREE
12027    if no simplification was possible.  */
12028 
12029 static tree
fold_builtin_2(location_t loc,tree expr,tree fndecl,tree arg0,tree arg1)12030 fold_builtin_2 (location_t loc, tree expr, tree fndecl, tree arg0, tree arg1)
12031 {
12032   tree type = TREE_TYPE (TREE_TYPE (fndecl));
12033   enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
12034 
12035   if (TREE_CODE (arg0) == ERROR_MARK
12036       || TREE_CODE (arg1) == ERROR_MARK)
12037     return NULL_TREE;
12038 
12039   if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
12040     return ret;
12041 
12042   switch (fcode)
12043     {
12044     CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
12045     CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
12046       if (validate_arg (arg0, REAL_TYPE)
12047 	  && validate_arg (arg1, POINTER_TYPE))
12048 	return do_mpfr_lgamma_r (arg0, arg1, type);
12049     break;
12050 
12051     CASE_FLT_FN (BUILT_IN_FREXP):
12052       return fold_builtin_frexp (loc, arg0, arg1, type);
12053 
12054     CASE_FLT_FN (BUILT_IN_MODF):
12055       return fold_builtin_modf (loc, arg0, arg1, type);
12056 
12057     case BUILT_IN_STRSPN:
12058       return fold_builtin_strspn (loc, expr, arg0, arg1);
12059 
12060     case BUILT_IN_STRCSPN:
12061       return fold_builtin_strcspn (loc, expr, arg0, arg1);
12062 
12063     case BUILT_IN_STRPBRK:
12064       return fold_builtin_strpbrk (loc, expr, arg0, arg1, type);
12065 
12066     case BUILT_IN_EXPECT:
12067       return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, NULL_TREE);
12068 
12069     case BUILT_IN_ISGREATER:
12070       return fold_builtin_unordered_cmp (loc, fndecl,
12071 					 arg0, arg1, UNLE_EXPR, LE_EXPR);
12072     case BUILT_IN_ISGREATEREQUAL:
12073       return fold_builtin_unordered_cmp (loc, fndecl,
12074 					 arg0, arg1, UNLT_EXPR, LT_EXPR);
12075     case BUILT_IN_ISLESS:
12076       return fold_builtin_unordered_cmp (loc, fndecl,
12077 					 arg0, arg1, UNGE_EXPR, GE_EXPR);
12078     case BUILT_IN_ISLESSEQUAL:
12079       return fold_builtin_unordered_cmp (loc, fndecl,
12080 					 arg0, arg1, UNGT_EXPR, GT_EXPR);
12081     case BUILT_IN_ISLESSGREATER:
12082       return fold_builtin_unordered_cmp (loc, fndecl,
12083 					 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
12084     case BUILT_IN_ISUNORDERED:
12085       return fold_builtin_unordered_cmp (loc, fndecl,
12086 					 arg0, arg1, UNORDERED_EXPR,
12087 					 NOP_EXPR);
12088 
12089       /* We do the folding for va_start in the expander.  */
12090     case BUILT_IN_VA_START:
12091       break;
12092 
12093     case BUILT_IN_OBJECT_SIZE:
12094       return fold_builtin_object_size (arg0, arg1);
12095 
12096     case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
12097       return fold_builtin_atomic_always_lock_free (arg0, arg1);
12098 
12099     case BUILT_IN_ATOMIC_IS_LOCK_FREE:
12100       return fold_builtin_atomic_is_lock_free (arg0, arg1);
12101 
12102     default:
12103       break;
12104     }
12105   return NULL_TREE;
12106 }
12107 
12108 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
12109    and ARG2.
12110    This function returns NULL_TREE if no simplification was possible.  */
12111 
12112 static tree
fold_builtin_3(location_t loc,tree fndecl,tree arg0,tree arg1,tree arg2)12113 fold_builtin_3 (location_t loc, tree fndecl,
12114 		tree arg0, tree arg1, tree arg2)
12115 {
12116   tree type = TREE_TYPE (TREE_TYPE (fndecl));
12117   enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
12118 
12119   if (TREE_CODE (arg0) == ERROR_MARK
12120       || TREE_CODE (arg1) == ERROR_MARK
12121       || TREE_CODE (arg2) == ERROR_MARK)
12122     return NULL_TREE;
12123 
12124   if (tree ret = fold_const_call (as_combined_fn (fcode), type,
12125 				  arg0, arg1, arg2))
12126     return ret;
12127 
12128   switch (fcode)
12129     {
12130 
12131     CASE_FLT_FN (BUILT_IN_SINCOS):
12132       return fold_builtin_sincos (loc, arg0, arg1, arg2);
12133 
12134     CASE_FLT_FN (BUILT_IN_REMQUO):
12135       if (validate_arg (arg0, REAL_TYPE)
12136 	  && validate_arg (arg1, REAL_TYPE)
12137 	  && validate_arg (arg2, POINTER_TYPE))
12138 	return do_mpfr_remquo (arg0, arg1, arg2);
12139     break;
12140 
12141     case BUILT_IN_MEMCMP:
12142       return fold_builtin_memcmp (loc, arg0, arg1, arg2);
12143 
12144     case BUILT_IN_EXPECT:
12145       return fold_builtin_expect (loc, arg0, arg1, arg2, NULL_TREE);
12146 
12147     case BUILT_IN_EXPECT_WITH_PROBABILITY:
12148       return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, arg2);
12149 
12150     case BUILT_IN_ADD_OVERFLOW:
12151     case BUILT_IN_SUB_OVERFLOW:
12152     case BUILT_IN_MUL_OVERFLOW:
12153     case BUILT_IN_ADD_OVERFLOW_P:
12154     case BUILT_IN_SUB_OVERFLOW_P:
12155     case BUILT_IN_MUL_OVERFLOW_P:
12156     case BUILT_IN_SADD_OVERFLOW:
12157     case BUILT_IN_SADDL_OVERFLOW:
12158     case BUILT_IN_SADDLL_OVERFLOW:
12159     case BUILT_IN_SSUB_OVERFLOW:
12160     case BUILT_IN_SSUBL_OVERFLOW:
12161     case BUILT_IN_SSUBLL_OVERFLOW:
12162     case BUILT_IN_SMUL_OVERFLOW:
12163     case BUILT_IN_SMULL_OVERFLOW:
12164     case BUILT_IN_SMULLL_OVERFLOW:
12165     case BUILT_IN_UADD_OVERFLOW:
12166     case BUILT_IN_UADDL_OVERFLOW:
12167     case BUILT_IN_UADDLL_OVERFLOW:
12168     case BUILT_IN_USUB_OVERFLOW:
12169     case BUILT_IN_USUBL_OVERFLOW:
12170     case BUILT_IN_USUBLL_OVERFLOW:
12171     case BUILT_IN_UMUL_OVERFLOW:
12172     case BUILT_IN_UMULL_OVERFLOW:
12173     case BUILT_IN_UMULLL_OVERFLOW:
12174       return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
12175 
12176     default:
12177       break;
12178     }
12179   return NULL_TREE;
12180 }
12181 
12182 /* Folds a call EXPR (which may be null) to built-in function FNDECL.
12183    ARGS is an array of NARGS arguments.  IGNORE is true if the result
12184    of the function call is ignored.  This function returns NULL_TREE
12185    if no simplification was possible.  */
12186 
12187 static tree
fold_builtin_n(location_t loc,tree expr,tree fndecl,tree * args,int nargs,bool)12188 fold_builtin_n (location_t loc, tree expr, tree fndecl, tree *args,
12189 		int nargs, bool)
12190 {
12191   tree ret = NULL_TREE;
12192 
12193   switch (nargs)
12194     {
12195     case 0:
12196       ret = fold_builtin_0 (loc, fndecl);
12197       break;
12198     case 1:
12199       ret = fold_builtin_1 (loc, expr, fndecl, args[0]);
12200       break;
12201     case 2:
12202       ret = fold_builtin_2 (loc, expr, fndecl, args[0], args[1]);
12203       break;
12204     case 3:
12205       ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
12206       break;
12207     default:
12208       ret = fold_builtin_varargs (loc, fndecl, args, nargs);
12209       break;
12210     }
12211   if (ret)
12212     {
12213       ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
12214       SET_EXPR_LOCATION (ret, loc);
12215       return ret;
12216     }
12217   return NULL_TREE;
12218 }
12219 
12220 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
12221    list ARGS along with N new arguments in NEWARGS.  SKIP is the number
12222    of arguments in ARGS to be omitted.  OLDNARGS is the number of
12223    elements in ARGS.  */
12224 
12225 static tree
rewrite_call_expr_valist(location_t loc,int oldnargs,tree * args,int skip,tree fndecl,int n,va_list newargs)12226 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
12227 			  int skip, tree fndecl, int n, va_list newargs)
12228 {
12229   int nargs = oldnargs - skip + n;
12230   tree *buffer;
12231 
12232   if (n > 0)
12233     {
12234       int i, j;
12235 
12236       buffer = XALLOCAVEC (tree, nargs);
12237       for (i = 0; i < n; i++)
12238 	buffer[i] = va_arg (newargs, tree);
12239       for (j = skip; j < oldnargs; j++, i++)
12240 	buffer[i] = args[j];
12241     }
12242   else
12243     buffer = args + skip;
12244 
12245   return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
12246 }
12247 
12248 /* Return true if FNDECL shouldn't be folded right now.
12249    If a built-in function has an inline attribute always_inline
12250    wrapper, defer folding it after always_inline functions have
12251    been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
12252    might not be performed.  */
12253 
12254 bool
avoid_folding_inline_builtin(tree fndecl)12255 avoid_folding_inline_builtin (tree fndecl)
12256 {
12257   return (DECL_DECLARED_INLINE_P (fndecl)
12258 	  && DECL_DISREGARD_INLINE_LIMITS (fndecl)
12259 	  && cfun
12260 	  && !cfun->always_inline_functions_inlined
12261 	  && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
12262 }
12263 
12264 /* A wrapper function for builtin folding that prevents warnings for
12265    "statement without effect" and the like, caused by removing the
12266    call node earlier than the warning is generated.  */
12267 
12268 tree
fold_call_expr(location_t loc,tree exp,bool ignore)12269 fold_call_expr (location_t loc, tree exp, bool ignore)
12270 {
12271   tree ret = NULL_TREE;
12272   tree fndecl = get_callee_fndecl (exp);
12273   if (fndecl && fndecl_built_in_p (fndecl)
12274       /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
12275 	 yet.  Defer folding until we see all the arguments
12276 	 (after inlining).  */
12277       && !CALL_EXPR_VA_ARG_PACK (exp))
12278     {
12279       int nargs = call_expr_nargs (exp);
12280 
12281       /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
12282 	 instead last argument is __builtin_va_arg_pack ().  Defer folding
12283 	 even in that case, until arguments are finalized.  */
12284       if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
12285 	{
12286 	  tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
12287 	  if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
12288 	    return NULL_TREE;
12289 	}
12290 
12291       if (avoid_folding_inline_builtin (fndecl))
12292 	return NULL_TREE;
12293 
12294       if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
12295         return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
12296 				     CALL_EXPR_ARGP (exp), ignore);
12297       else
12298 	{
12299 	  tree *args = CALL_EXPR_ARGP (exp);
12300 	  ret = fold_builtin_n (loc, exp, fndecl, args, nargs, ignore);
12301 	  if (ret)
12302 	    return ret;
12303 	}
12304     }
12305   return NULL_TREE;
12306 }
12307 
12308 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
12309    N arguments are passed in the array ARGARRAY.  Return a folded
12310    expression or NULL_TREE if no simplification was possible.  */
12311 
12312 tree
fold_builtin_call_array(location_t loc,tree,tree fn,int n,tree * argarray)12313 fold_builtin_call_array (location_t loc, tree,
12314 			 tree fn,
12315 			 int n,
12316 			 tree *argarray)
12317 {
12318   if (TREE_CODE (fn) != ADDR_EXPR)
12319     return NULL_TREE;
12320 
12321   tree fndecl = TREE_OPERAND (fn, 0);
12322   if (TREE_CODE (fndecl) == FUNCTION_DECL
12323       && fndecl_built_in_p (fndecl))
12324     {
12325       /* If last argument is __builtin_va_arg_pack (), arguments to this
12326 	 function are not finalized yet.  Defer folding until they are.  */
12327       if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
12328 	{
12329 	  tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
12330 	  if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
12331 	    return NULL_TREE;
12332 	}
12333       if (avoid_folding_inline_builtin (fndecl))
12334 	return NULL_TREE;
12335       if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
12336 	return targetm.fold_builtin (fndecl, n, argarray, false);
12337       else
12338 	return fold_builtin_n (loc, NULL_TREE, fndecl, argarray, n, false);
12339     }
12340 
12341   return NULL_TREE;
12342 }
12343 
12344 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
12345    along with N new arguments specified as the "..." parameters.  SKIP
12346    is the number of arguments in EXP to be omitted.  This function is used
12347    to do varargs-to-varargs transformations.  */
12348 
12349 static tree
rewrite_call_expr(location_t loc,tree exp,int skip,tree fndecl,int n,...)12350 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
12351 {
12352   va_list ap;
12353   tree t;
12354 
12355   va_start (ap, n);
12356   t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
12357 				CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
12358   va_end (ap);
12359 
12360   return t;
12361 }
12362 
12363 /* Validate a single argument ARG against a tree code CODE representing
12364    a type.  Return true when argument is valid.  */
12365 
12366 static bool
validate_arg(const_tree arg,enum tree_code code)12367 validate_arg (const_tree arg, enum tree_code code)
12368 {
12369   if (!arg)
12370     return false;
12371   else if (code == POINTER_TYPE)
12372     return POINTER_TYPE_P (TREE_TYPE (arg));
12373   else if (code == INTEGER_TYPE)
12374     return INTEGRAL_TYPE_P (TREE_TYPE (arg));
12375   return code == TREE_CODE (TREE_TYPE (arg));
12376 }
12377 
12378 /* This function validates the types of a function call argument list
12379    against a specified list of tree_codes.  If the last specifier is a 0,
12380    that represents an ellipses, otherwise the last specifier must be a
12381    VOID_TYPE.
12382 
12383    This is the GIMPLE version of validate_arglist.  Eventually we want to
12384    completely convert builtins.c to work from GIMPLEs and the tree based
12385    validate_arglist will then be removed.  */
12386 
12387 bool
validate_gimple_arglist(const gcall * call,...)12388 validate_gimple_arglist (const gcall *call, ...)
12389 {
12390   enum tree_code code;
12391   bool res = 0;
12392   va_list ap;
12393   const_tree arg;
12394   size_t i;
12395 
12396   va_start (ap, call);
12397   i = 0;
12398 
12399   do
12400     {
12401       code = (enum tree_code) va_arg (ap, int);
12402       switch (code)
12403 	{
12404 	case 0:
12405 	  /* This signifies an ellipses, any further arguments are all ok.  */
12406 	  res = true;
12407 	  goto end;
12408 	case VOID_TYPE:
12409 	  /* This signifies an endlink, if no arguments remain, return
12410 	     true, otherwise return false.  */
12411 	  res = (i == gimple_call_num_args (call));
12412 	  goto end;
12413 	default:
12414 	  /* If no parameters remain or the parameter's code does not
12415 	     match the specified code, return false.  Otherwise continue
12416 	     checking any remaining arguments.  */
12417 	  arg = gimple_call_arg (call, i++);
12418 	  if (!validate_arg (arg, code))
12419 	    goto end;
12420 	  break;
12421 	}
12422     }
12423   while (1);
12424 
12425   /* We need gotos here since we can only have one VA_CLOSE in a
12426      function.  */
12427  end: ;
12428   va_end (ap);
12429 
12430   return res;
12431 }
12432 
12433 /* Default target-specific builtin expander that does nothing.  */
12434 
12435 rtx
default_expand_builtin(tree exp ATTRIBUTE_UNUSED,rtx target ATTRIBUTE_UNUSED,rtx subtarget ATTRIBUTE_UNUSED,machine_mode mode ATTRIBUTE_UNUSED,int ignore ATTRIBUTE_UNUSED)12436 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
12437 			rtx target ATTRIBUTE_UNUSED,
12438 			rtx subtarget ATTRIBUTE_UNUSED,
12439 			machine_mode mode ATTRIBUTE_UNUSED,
12440 			int ignore ATTRIBUTE_UNUSED)
12441 {
12442   return NULL_RTX;
12443 }
12444 
12445 /* Returns true is EXP represents data that would potentially reside
12446    in a readonly section.  */
12447 
12448 bool
readonly_data_expr(tree exp)12449 readonly_data_expr (tree exp)
12450 {
12451   STRIP_NOPS (exp);
12452 
12453   if (TREE_CODE (exp) != ADDR_EXPR)
12454     return false;
12455 
12456   exp = get_base_address (TREE_OPERAND (exp, 0));
12457   if (!exp)
12458     return false;
12459 
12460   /* Make sure we call decl_readonly_section only for trees it
12461      can handle (since it returns true for everything it doesn't
12462      understand).  */
12463   if (TREE_CODE (exp) == STRING_CST
12464       || TREE_CODE (exp) == CONSTRUCTOR
12465       || (VAR_P (exp) && TREE_STATIC (exp)))
12466     return decl_readonly_section (exp, 0);
12467   else
12468     return false;
12469 }
12470 
12471 /* Simplify a call to the strpbrk builtin.  S1 and S2 are the arguments
12472    to the call, and TYPE is its return type.
12473 
12474    Return NULL_TREE if no simplification was possible, otherwise return the
12475    simplified form of the call as a tree.
12476 
12477    The simplified form may be a constant or other expression which
12478    computes the same value, but in a more efficient manner (including
12479    calls to other builtin functions).
12480 
12481    The call may contain arguments which need to be evaluated, but
12482    which are not useful to determine the result of the call.  In
12483    this case we return a chain of COMPOUND_EXPRs.  The LHS of each
12484    COMPOUND_EXPR will be an argument which must be evaluated.
12485    COMPOUND_EXPRs are chained through their RHS.  The RHS of the last
12486    COMPOUND_EXPR in the chain will contain the tree for the simplified
12487    form of the builtin function call.  */
12488 
12489 static tree
fold_builtin_strpbrk(location_t loc,tree,tree s1,tree s2,tree type)12490 fold_builtin_strpbrk (location_t loc, tree, tree s1, tree s2, tree type)
12491 {
12492   if (!validate_arg (s1, POINTER_TYPE)
12493       || !validate_arg (s2, POINTER_TYPE))
12494     return NULL_TREE;
12495 
12496   tree fn;
12497   const char *p1, *p2;
12498 
12499   p2 = c_getstr (s2);
12500   if (p2 == NULL)
12501     return NULL_TREE;
12502 
12503   p1 = c_getstr (s1);
12504   if (p1 != NULL)
12505     {
12506       const char *r = strpbrk (p1, p2);
12507       tree tem;
12508 
12509       if (r == NULL)
12510 	return build_int_cst (TREE_TYPE (s1), 0);
12511 
12512       /* Return an offset into the constant string argument.  */
12513       tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
12514       return fold_convert_loc (loc, type, tem);
12515     }
12516 
12517   if (p2[0] == '\0')
12518     /* strpbrk(x, "") == NULL.
12519        Evaluate and ignore s1 in case it had side-effects.  */
12520     return omit_one_operand_loc (loc, type, integer_zero_node, s1);
12521 
12522   if (p2[1] != '\0')
12523     return NULL_TREE;  /* Really call strpbrk.  */
12524 
12525   fn = builtin_decl_implicit (BUILT_IN_STRCHR);
12526   if (!fn)
12527     return NULL_TREE;
12528 
12529   /* New argument list transforming strpbrk(s1, s2) to
12530      strchr(s1, s2[0]).  */
12531   return build_call_expr_loc (loc, fn, 2, s1,
12532 			      build_int_cst (integer_type_node, p2[0]));
12533 }
12534 
12535 /* Simplify a call to the strspn builtin.  S1 and S2 are the arguments
12536    to the call.
12537 
12538    Return NULL_TREE if no simplification was possible, otherwise return the
12539    simplified form of the call as a tree.
12540 
12541    The simplified form may be a constant or other expression which
12542    computes the same value, but in a more efficient manner (including
12543    calls to other builtin functions).
12544 
12545    The call may contain arguments which need to be evaluated, but
12546    which are not useful to determine the result of the call.  In
12547    this case we return a chain of COMPOUND_EXPRs.  The LHS of each
12548    COMPOUND_EXPR will be an argument which must be evaluated.
12549    COMPOUND_EXPRs are chained through their RHS.  The RHS of the last
12550    COMPOUND_EXPR in the chain will contain the tree for the simplified
12551    form of the builtin function call.  */
12552 
12553 static tree
fold_builtin_strspn(location_t loc,tree expr,tree s1,tree s2)12554 fold_builtin_strspn (location_t loc, tree expr, tree s1, tree s2)
12555 {
12556   if (!validate_arg (s1, POINTER_TYPE)
12557       || !validate_arg (s2, POINTER_TYPE))
12558     return NULL_TREE;
12559 
12560   if (!check_nul_terminated_array (expr, s1)
12561       || !check_nul_terminated_array (expr, s2))
12562     return NULL_TREE;
12563 
12564   const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
12565 
12566   /* If either argument is "", return NULL_TREE.  */
12567   if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
12568     /* Evaluate and ignore both arguments in case either one has
12569        side-effects.  */
12570     return omit_two_operands_loc (loc, size_type_node, size_zero_node,
12571 				  s1, s2);
12572   return NULL_TREE;
12573 }
12574 
12575 /* Simplify a call to the strcspn builtin.  S1 and S2 are the arguments
12576    to the call.
12577 
12578    Return NULL_TREE if no simplification was possible, otherwise return the
12579    simplified form of the call as a tree.
12580 
12581    The simplified form may be a constant or other expression which
12582    computes the same value, but in a more efficient manner (including
12583    calls to other builtin functions).
12584 
12585    The call may contain arguments which need to be evaluated, but
12586    which are not useful to determine the result of the call.  In
12587    this case we return a chain of COMPOUND_EXPRs.  The LHS of each
12588    COMPOUND_EXPR will be an argument which must be evaluated.
12589    COMPOUND_EXPRs are chained through their RHS.  The RHS of the last
12590    COMPOUND_EXPR in the chain will contain the tree for the simplified
12591    form of the builtin function call.  */
12592 
12593 static tree
fold_builtin_strcspn(location_t loc,tree expr,tree s1,tree s2)12594 fold_builtin_strcspn (location_t loc, tree expr, tree s1, tree s2)
12595 {
12596   if (!validate_arg (s1, POINTER_TYPE)
12597       || !validate_arg (s2, POINTER_TYPE))
12598     return NULL_TREE;
12599 
12600   if (!check_nul_terminated_array (expr, s1)
12601       || !check_nul_terminated_array (expr, s2))
12602     return NULL_TREE;
12603 
12604   /* If the first argument is "", return NULL_TREE.  */
12605   const char *p1 = c_getstr (s1);
12606   if (p1 && *p1 == '\0')
12607     {
12608       /* Evaluate and ignore argument s2 in case it has
12609 	 side-effects.  */
12610       return omit_one_operand_loc (loc, size_type_node,
12611 				   size_zero_node, s2);
12612     }
12613 
12614   /* If the second argument is "", return __builtin_strlen(s1).  */
12615   const char *p2 = c_getstr (s2);
12616   if (p2 && *p2 == '\0')
12617     {
12618       tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
12619 
12620       /* If the replacement _DECL isn't initialized, don't do the
12621 	 transformation.  */
12622       if (!fn)
12623 	return NULL_TREE;
12624 
12625       return build_call_expr_loc (loc, fn, 1, s1);
12626     }
12627   return NULL_TREE;
12628 }
12629 
12630 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
12631    produced.  False otherwise.  This is done so that we don't output the error
12632    or warning twice or three times.  */
12633 
12634 bool
fold_builtin_next_arg(tree exp,bool va_start_p)12635 fold_builtin_next_arg (tree exp, bool va_start_p)
12636 {
12637   tree fntype = TREE_TYPE (current_function_decl);
12638   int nargs = call_expr_nargs (exp);
12639   tree arg;
12640   /* There is good chance the current input_location points inside the
12641      definition of the va_start macro (perhaps on the token for
12642      builtin) in a system header, so warnings will not be emitted.
12643      Use the location in real source code.  */
12644   location_t current_location =
12645     linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
12646 					      NULL);
12647 
12648   if (!stdarg_p (fntype))
12649     {
12650       error ("%<va_start%> used in function with fixed arguments");
12651       return true;
12652     }
12653 
12654   if (va_start_p)
12655     {
12656       if (va_start_p && (nargs != 2))
12657 	{
12658 	  error ("wrong number of arguments to function %<va_start%>");
12659 	  return true;
12660 	}
12661       arg = CALL_EXPR_ARG (exp, 1);
12662     }
12663   /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
12664      when we checked the arguments and if needed issued a warning.  */
12665   else
12666     {
12667       if (nargs == 0)
12668 	{
12669 	  /* Evidently an out of date version of <stdarg.h>; can't validate
12670 	     va_start's second argument, but can still work as intended.  */
12671 	  warning_at (current_location,
12672 		      OPT_Wvarargs,
12673 		   "%<__builtin_next_arg%> called without an argument");
12674 	  return true;
12675 	}
12676       else if (nargs > 1)
12677 	{
12678 	  error ("wrong number of arguments to function %<__builtin_next_arg%>");
12679 	  return true;
12680 	}
12681       arg = CALL_EXPR_ARG (exp, 0);
12682     }
12683 
12684   if (TREE_CODE (arg) == SSA_NAME
12685       && SSA_NAME_VAR (arg))
12686     arg = SSA_NAME_VAR (arg);
12687 
12688   /* We destructively modify the call to be __builtin_va_start (ap, 0)
12689      or __builtin_next_arg (0) the first time we see it, after checking
12690      the arguments and if needed issuing a warning.  */
12691   if (!integer_zerop (arg))
12692     {
12693       tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
12694 
12695       /* Strip off all nops for the sake of the comparison.  This
12696 	 is not quite the same as STRIP_NOPS.  It does more.
12697 	 We must also strip off INDIRECT_EXPR for C++ reference
12698 	 parameters.  */
12699       while (CONVERT_EXPR_P (arg)
12700 	     || TREE_CODE (arg) == INDIRECT_REF)
12701 	arg = TREE_OPERAND (arg, 0);
12702       if (arg != last_parm)
12703 	{
12704 	  /* FIXME: Sometimes with the tree optimizers we can get the
12705 	     not the last argument even though the user used the last
12706 	     argument.  We just warn and set the arg to be the last
12707 	     argument so that we will get wrong-code because of
12708 	     it.  */
12709 	  warning_at (current_location,
12710 		      OPT_Wvarargs,
12711 		      "second parameter of %<va_start%> not last named argument");
12712 	}
12713 
12714       /* Undefined by C99 7.15.1.4p4 (va_start):
12715          "If the parameter parmN is declared with the register storage
12716          class, with a function or array type, or with a type that is
12717          not compatible with the type that results after application of
12718          the default argument promotions, the behavior is undefined."
12719       */
12720       else if (DECL_REGISTER (arg))
12721 	{
12722 	  warning_at (current_location,
12723 		      OPT_Wvarargs,
12724 		      "undefined behavior when second parameter of "
12725 		      "%<va_start%> is declared with %<register%> storage");
12726 	}
12727 
12728       /* We want to verify the second parameter just once before the tree
12729 	 optimizers are run and then avoid keeping it in the tree,
12730 	 as otherwise we could warn even for correct code like:
12731 	 void foo (int i, ...)
12732 	 { va_list ap; i++; va_start (ap, i); va_end (ap); }  */
12733       if (va_start_p)
12734 	CALL_EXPR_ARG (exp, 1) = integer_zero_node;
12735       else
12736 	CALL_EXPR_ARG (exp, 0) = integer_zero_node;
12737     }
12738   return false;
12739 }
12740 
12741 
12742 /* Expand a call EXP to __builtin_object_size.  */
12743 
12744 static rtx
expand_builtin_object_size(tree exp)12745 expand_builtin_object_size (tree exp)
12746 {
12747   tree ost;
12748   int object_size_type;
12749   tree fndecl = get_callee_fndecl (exp);
12750 
12751   if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
12752     {
12753       error ("%Kfirst argument of %qD must be a pointer, second integer constant",
12754 	     exp, fndecl);
12755       expand_builtin_trap ();
12756       return const0_rtx;
12757     }
12758 
12759   ost = CALL_EXPR_ARG (exp, 1);
12760   STRIP_NOPS (ost);
12761 
12762   if (TREE_CODE (ost) != INTEGER_CST
12763       || tree_int_cst_sgn (ost) < 0
12764       || compare_tree_int (ost, 3) > 0)
12765     {
12766       error ("%Klast argument of %qD is not integer constant between 0 and 3",
12767 	     exp, fndecl);
12768       expand_builtin_trap ();
12769       return const0_rtx;
12770     }
12771 
12772   object_size_type = tree_to_shwi (ost);
12773 
12774   return object_size_type < 2 ? constm1_rtx : const0_rtx;
12775 }
12776 
12777 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12778    FCODE is the BUILT_IN_* to use.
12779    Return NULL_RTX if we failed; the caller should emit a normal call,
12780    otherwise try to get the result in TARGET, if convenient (and in
12781    mode MODE if that's convenient).  */
12782 
12783 static rtx
expand_builtin_memory_chk(tree exp,rtx target,machine_mode mode,enum built_in_function fcode)12784 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
12785 			   enum built_in_function fcode)
12786 {
12787   if (!validate_arglist (exp,
12788 			 POINTER_TYPE,
12789 			 fcode == BUILT_IN_MEMSET_CHK
12790 			 ? INTEGER_TYPE : POINTER_TYPE,
12791 			 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
12792     return NULL_RTX;
12793 
12794   tree dest = CALL_EXPR_ARG (exp, 0);
12795   tree src = CALL_EXPR_ARG (exp, 1);
12796   tree len = CALL_EXPR_ARG (exp, 2);
12797   tree size = CALL_EXPR_ARG (exp, 3);
12798 
12799   /* FIXME: Set access mode to write only for memset et al.  */
12800   bool sizes_ok = check_access (exp, len, /*maxread=*/NULL_TREE,
12801 				/*srcstr=*/NULL_TREE, size, access_read_write);
12802 
12803   if (!tree_fits_uhwi_p (size))
12804     return NULL_RTX;
12805 
12806   if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
12807     {
12808       /* Avoid transforming the checking call to an ordinary one when
12809 	 an overflow has been detected or when the call couldn't be
12810 	 validated because the size is not constant.  */
12811       if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
12812 	return NULL_RTX;
12813 
12814       tree fn = NULL_TREE;
12815       /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12816 	 mem{cpy,pcpy,move,set} is available.  */
12817       switch (fcode)
12818 	{
12819 	case BUILT_IN_MEMCPY_CHK:
12820 	  fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
12821 	  break;
12822 	case BUILT_IN_MEMPCPY_CHK:
12823 	  fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
12824 	  break;
12825 	case BUILT_IN_MEMMOVE_CHK:
12826 	  fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
12827 	  break;
12828 	case BUILT_IN_MEMSET_CHK:
12829 	  fn = builtin_decl_explicit (BUILT_IN_MEMSET);
12830 	  break;
12831 	default:
12832 	  break;
12833 	}
12834 
12835       if (! fn)
12836 	return NULL_RTX;
12837 
12838       fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
12839       gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12840       CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12841       return expand_expr (fn, target, mode, EXPAND_NORMAL);
12842     }
12843   else if (fcode == BUILT_IN_MEMSET_CHK)
12844     return NULL_RTX;
12845   else
12846     {
12847       unsigned int dest_align = get_pointer_alignment (dest);
12848 
12849       /* If DEST is not a pointer type, call the normal function.  */
12850       if (dest_align == 0)
12851 	return NULL_RTX;
12852 
12853       /* If SRC and DEST are the same (and not volatile), do nothing.  */
12854       if (operand_equal_p (src, dest, 0))
12855 	{
12856 	  tree expr;
12857 
12858 	  if (fcode != BUILT_IN_MEMPCPY_CHK)
12859 	    {
12860 	      /* Evaluate and ignore LEN in case it has side-effects.  */
12861 	      expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
12862 	      return expand_expr (dest, target, mode, EXPAND_NORMAL);
12863 	    }
12864 
12865 	  expr = fold_build_pointer_plus (dest, len);
12866 	  return expand_expr (expr, target, mode, EXPAND_NORMAL);
12867 	}
12868 
12869       /* __memmove_chk special case.  */
12870       if (fcode == BUILT_IN_MEMMOVE_CHK)
12871 	{
12872 	  unsigned int src_align = get_pointer_alignment (src);
12873 
12874 	  if (src_align == 0)
12875 	    return NULL_RTX;
12876 
12877 	  /* If src is categorized for a readonly section we can use
12878 	     normal __memcpy_chk.  */
12879 	  if (readonly_data_expr (src))
12880 	    {
12881 	      tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12882 	      if (!fn)
12883 		return NULL_RTX;
12884 	      fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
12885 					  dest, src, len, size);
12886 	      gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12887 	      CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12888 	      return expand_expr (fn, target, mode, EXPAND_NORMAL);
12889 	    }
12890 	}
12891       return NULL_RTX;
12892     }
12893 }
12894 
12895 /* Emit warning if a buffer overflow is detected at compile time.  */
12896 
12897 static void
maybe_emit_chk_warning(tree exp,enum built_in_function fcode)12898 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
12899 {
12900   /* The source string.  */
12901   tree srcstr = NULL_TREE;
12902   /* The size of the destination object returned by __builtin_object_size.  */
12903   tree objsize = NULL_TREE;
12904   /* The string that is being concatenated with (as in __strcat_chk)
12905      or null if it isn't.  */
12906   tree catstr = NULL_TREE;
12907   /* The maximum length of the source sequence in a bounded operation
12908      (such as __strncat_chk) or null if the operation isn't bounded
12909      (such as __strcat_chk).  */
12910   tree maxread = NULL_TREE;
12911   /* The exact size of the access (such as in __strncpy_chk).  */
12912   tree size = NULL_TREE;
12913   /* The access by the function that's checked.  Except for snprintf
12914      both writing and reading is checked.  */
12915   access_mode mode = access_read_write;
12916 
12917   switch (fcode)
12918     {
12919     case BUILT_IN_STRCPY_CHK:
12920     case BUILT_IN_STPCPY_CHK:
12921       srcstr = CALL_EXPR_ARG (exp, 1);
12922       objsize = CALL_EXPR_ARG (exp, 2);
12923       break;
12924 
12925     case BUILT_IN_STRCAT_CHK:
12926       /* For __strcat_chk the warning will be emitted only if overflowing
12927 	 by at least strlen (dest) + 1 bytes.  */
12928       catstr = CALL_EXPR_ARG (exp, 0);
12929       srcstr = CALL_EXPR_ARG (exp, 1);
12930       objsize = CALL_EXPR_ARG (exp, 2);
12931       break;
12932 
12933     case BUILT_IN_STRNCAT_CHK:
12934       catstr = CALL_EXPR_ARG (exp, 0);
12935       srcstr = CALL_EXPR_ARG (exp, 1);
12936       maxread = CALL_EXPR_ARG (exp, 2);
12937       objsize = CALL_EXPR_ARG (exp, 3);
12938       break;
12939 
12940     case BUILT_IN_STRNCPY_CHK:
12941     case BUILT_IN_STPNCPY_CHK:
12942       srcstr = CALL_EXPR_ARG (exp, 1);
12943       size = CALL_EXPR_ARG (exp, 2);
12944       objsize = CALL_EXPR_ARG (exp, 3);
12945       break;
12946 
12947     case BUILT_IN_SNPRINTF_CHK:
12948     case BUILT_IN_VSNPRINTF_CHK:
12949       maxread = CALL_EXPR_ARG (exp, 1);
12950       objsize = CALL_EXPR_ARG (exp, 3);
12951       /* The only checked access the write to the destination.  */
12952       mode = access_write_only;
12953       break;
12954     default:
12955       gcc_unreachable ();
12956     }
12957 
12958   if (catstr && maxread)
12959     {
12960       /* Check __strncat_chk.  There is no way to determine the length
12961 	 of the string to which the source string is being appended so
12962 	 just warn when the length of the source string is not known.  */
12963       check_strncat_sizes (exp, objsize);
12964       return;
12965     }
12966 
12967   check_access (exp, size, maxread, srcstr, objsize, mode);
12968 }
12969 
12970 /* Emit warning if a buffer overflow is detected at compile time
12971    in __sprintf_chk/__vsprintf_chk calls.  */
12972 
12973 static void
maybe_emit_sprintf_chk_warning(tree exp,enum built_in_function fcode)12974 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
12975 {
12976   tree size, len, fmt;
12977   const char *fmt_str;
12978   int nargs = call_expr_nargs (exp);
12979 
12980   /* Verify the required arguments in the original call.  */
12981 
12982   if (nargs < 4)
12983     return;
12984   size = CALL_EXPR_ARG (exp, 2);
12985   fmt = CALL_EXPR_ARG (exp, 3);
12986 
12987   if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
12988     return;
12989 
12990   /* Check whether the format is a literal string constant.  */
12991   fmt_str = c_getstr (fmt);
12992   if (fmt_str == NULL)
12993     return;
12994 
12995   if (!init_target_chars ())
12996     return;
12997 
12998   /* If the format doesn't contain % args or %%, we know its size.  */
12999   if (strchr (fmt_str, target_percent) == 0)
13000     len = build_int_cstu (size_type_node, strlen (fmt_str));
13001   /* If the format is "%s" and first ... argument is a string literal,
13002      we know it too.  */
13003   else if (fcode == BUILT_IN_SPRINTF_CHK
13004 	   && strcmp (fmt_str, target_percent_s) == 0)
13005     {
13006       tree arg;
13007 
13008       if (nargs < 5)
13009 	return;
13010       arg = CALL_EXPR_ARG (exp, 4);
13011       if (! POINTER_TYPE_P (TREE_TYPE (arg)))
13012 	return;
13013 
13014       len = c_strlen (arg, 1);
13015       if (!len || ! tree_fits_uhwi_p (len))
13016 	return;
13017     }
13018   else
13019     return;
13020 
13021   /* Add one for the terminating nul.  */
13022   len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
13023 
13024   check_access (exp, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, len, size,
13025 		access_write_only);
13026 }
13027 
13028 /* Return true if STMT is a call to an allocation function.  Unless
13029    ALL_ALLOC is set, consider only functions that return dynmamically
13030    allocated objects.  Otherwise return true even for all forms of
13031    alloca (including VLA).  */
13032 
13033 static bool
fndecl_alloc_p(tree fndecl,bool all_alloc)13034 fndecl_alloc_p (tree fndecl, bool all_alloc)
13035 {
13036   if (!fndecl)
13037     return false;
13038 
13039   /* A call to operator new isn't recognized as one to a built-in.  */
13040   if (DECL_IS_OPERATOR_NEW_P (fndecl))
13041     return true;
13042 
13043   if (fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
13044     {
13045       switch (DECL_FUNCTION_CODE (fndecl))
13046 	{
13047 	case BUILT_IN_ALLOCA:
13048 	case BUILT_IN_ALLOCA_WITH_ALIGN:
13049 	  return all_alloc;
13050 	case BUILT_IN_ALIGNED_ALLOC:
13051 	case BUILT_IN_CALLOC:
13052 	case BUILT_IN_GOMP_ALLOC:
13053 	case BUILT_IN_MALLOC:
13054 	case BUILT_IN_REALLOC:
13055 	case BUILT_IN_STRDUP:
13056 	case BUILT_IN_STRNDUP:
13057 	  return true;
13058 	default:
13059 	  break;
13060 	}
13061     }
13062 
13063   /* A function is considered an allocation function if it's declared
13064      with attribute malloc with an argument naming its associated
13065      deallocation function.  */
13066   tree attrs = DECL_ATTRIBUTES (fndecl);
13067   if (!attrs)
13068     return false;
13069 
13070   for (tree allocs = attrs;
13071        (allocs = lookup_attribute ("malloc", allocs));
13072        allocs = TREE_CHAIN (allocs))
13073     {
13074       tree args = TREE_VALUE (allocs);
13075       if (!args)
13076 	continue;
13077 
13078       if (TREE_VALUE (args))
13079 	return true;
13080     }
13081 
13082   return false;
13083 }
13084 
13085 /* Return true if STMT is a call to an allocation function.  A wrapper
13086    around fndecl_alloc_p.  */
13087 
13088 static bool
13089 gimple_call_alloc_p (gimple *stmt, bool all_alloc = false)
13090 {
13091   return fndecl_alloc_p (gimple_call_fndecl (stmt), all_alloc);
13092 }
13093 
13094 /* Return the zero-based number corresponding to the argument being
13095    deallocated if STMT is a call to a deallocation function or UINT_MAX
13096    if it isn't.  */
13097 
13098 static unsigned
call_dealloc_argno(tree exp)13099 call_dealloc_argno (tree exp)
13100 {
13101   tree fndecl = get_callee_fndecl (exp);
13102   if (!fndecl)
13103     return UINT_MAX;
13104 
13105   return fndecl_dealloc_argno (fndecl);
13106 }
13107 
13108 /* Return the zero-based number corresponding to the argument being
13109    deallocated if FNDECL is a deallocation function or UINT_MAX
13110    if it isn't.  */
13111 
13112 unsigned
fndecl_dealloc_argno(tree fndecl)13113 fndecl_dealloc_argno (tree fndecl)
13114 {
13115   /* A call to operator delete isn't recognized as one to a built-in.  */
13116   if (DECL_IS_OPERATOR_DELETE_P (fndecl))
13117     {
13118       if (DECL_IS_REPLACEABLE_OPERATOR (fndecl))
13119 	return 0;
13120 
13121       /* Avoid placement delete that's not been inlined.  */
13122       tree fname = DECL_ASSEMBLER_NAME (fndecl);
13123       if (id_equal (fname, "_ZdlPvS_")       // ordinary form
13124 	  || id_equal (fname, "_ZdaPvS_"))   // array form
13125 	return UINT_MAX;
13126       return 0;
13127     }
13128 
13129   /* TODO: Handle user-defined functions with attribute malloc?  Handle
13130      known non-built-ins like fopen?  */
13131   if (fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
13132     {
13133       switch (DECL_FUNCTION_CODE (fndecl))
13134 	{
13135 	case BUILT_IN_FREE:
13136 	case BUILT_IN_REALLOC:
13137 	  return 0;
13138 	default:
13139 	  break;
13140 	}
13141       return UINT_MAX;
13142     }
13143 
13144   tree attrs = DECL_ATTRIBUTES (fndecl);
13145   if (!attrs)
13146     return UINT_MAX;
13147 
13148   for (tree atfree = attrs;
13149        (atfree = lookup_attribute ("*dealloc", atfree));
13150        atfree = TREE_CHAIN (atfree))
13151     {
13152       tree alloc = TREE_VALUE (atfree);
13153       if (!alloc)
13154 	continue;
13155 
13156       tree pos = TREE_CHAIN (alloc);
13157       if (!pos)
13158 	return 0;
13159 
13160       pos = TREE_VALUE (pos);
13161       return TREE_INT_CST_LOW (pos) - 1;
13162     }
13163 
13164   return UINT_MAX;
13165 }
13166 
13167 /* Return true if DELC doesn't refer to an operator delete that's
13168    suitable to call with a pointer returned from the operator new
13169    described by NEWC.  */
13170 
13171 static bool
new_delete_mismatch_p(const demangle_component & newc,const demangle_component & delc)13172 new_delete_mismatch_p (const demangle_component &newc,
13173 		       const demangle_component &delc)
13174 {
13175   if (newc.type != delc.type)
13176     return true;
13177 
13178   switch (newc.type)
13179     {
13180     case DEMANGLE_COMPONENT_NAME:
13181       {
13182 	int len = newc.u.s_name.len;
13183 	const char *news = newc.u.s_name.s;
13184 	const char *dels = delc.u.s_name.s;
13185 	if (len != delc.u.s_name.len || memcmp (news, dels, len))
13186 	  return true;
13187 
13188 	if (news[len] == 'n')
13189 	  {
13190 	    if (news[len + 1] == 'a')
13191 	      return dels[len] != 'd' || dels[len + 1] != 'a';
13192 	    if (news[len + 1] == 'w')
13193 	      return dels[len] != 'd' || dels[len + 1] != 'l';
13194 	  }
13195 	return false;
13196       }
13197 
13198     case DEMANGLE_COMPONENT_OPERATOR:
13199       /* Operator mismatches are handled above.  */
13200       return false;
13201 
13202     case DEMANGLE_COMPONENT_EXTENDED_OPERATOR:
13203       if (newc.u.s_extended_operator.args != delc.u.s_extended_operator.args)
13204 	return true;
13205       return new_delete_mismatch_p (*newc.u.s_extended_operator.name,
13206 				    *delc.u.s_extended_operator.name);
13207 
13208     case DEMANGLE_COMPONENT_FIXED_TYPE:
13209       if (newc.u.s_fixed.accum != delc.u.s_fixed.accum
13210 	  || newc.u.s_fixed.sat != delc.u.s_fixed.sat)
13211 	return true;
13212       return new_delete_mismatch_p (*newc.u.s_fixed.length,
13213 				    *delc.u.s_fixed.length);
13214 
13215     case DEMANGLE_COMPONENT_CTOR:
13216       if (newc.u.s_ctor.kind != delc.u.s_ctor.kind)
13217 	return true;
13218       return new_delete_mismatch_p (*newc.u.s_ctor.name,
13219 				    *delc.u.s_ctor.name);
13220 
13221     case DEMANGLE_COMPONENT_DTOR:
13222       if (newc.u.s_dtor.kind != delc.u.s_dtor.kind)
13223 	return true;
13224       return new_delete_mismatch_p (*newc.u.s_dtor.name,
13225 				    *delc.u.s_dtor.name);
13226 
13227     case DEMANGLE_COMPONENT_BUILTIN_TYPE:
13228       {
13229 	/* The demangler API provides no better way to compare built-in
13230 	   types except to by comparing their demangled names. */
13231 	size_t nsz, dsz;
13232 	demangle_component *pnc = const_cast<demangle_component *>(&newc);
13233 	demangle_component *pdc = const_cast<demangle_component *>(&delc);
13234 	char *nts = cplus_demangle_print (0, pnc, 16, &nsz);
13235 	char *dts = cplus_demangle_print (0, pdc, 16, &dsz);
13236 	if (!nts != !dts)
13237 	  return true;
13238 	bool mismatch = strcmp (nts, dts);
13239 	free (nts);
13240 	free (dts);
13241 	return mismatch;
13242       }
13243 
13244     case DEMANGLE_COMPONENT_SUB_STD:
13245       if (newc.u.s_string.len != delc.u.s_string.len)
13246 	return true;
13247       return memcmp (newc.u.s_string.string, delc.u.s_string.string,
13248 		     newc.u.s_string.len);
13249 
13250     case DEMANGLE_COMPONENT_FUNCTION_PARAM:
13251     case DEMANGLE_COMPONENT_TEMPLATE_PARAM:
13252       return newc.u.s_number.number != delc.u.s_number.number;
13253 
13254     case DEMANGLE_COMPONENT_CHARACTER:
13255       return newc.u.s_character.character != delc.u.s_character.character;
13256 
13257     case DEMANGLE_COMPONENT_DEFAULT_ARG:
13258     case DEMANGLE_COMPONENT_LAMBDA:
13259       if (newc.u.s_unary_num.num != delc.u.s_unary_num.num)
13260 	return true;
13261       return new_delete_mismatch_p (*newc.u.s_unary_num.sub,
13262 				    *delc.u.s_unary_num.sub);
13263     default:
13264       break;
13265     }
13266 
13267   if (!newc.u.s_binary.left != !delc.u.s_binary.left)
13268     return true;
13269 
13270   if (!newc.u.s_binary.left)
13271     return false;
13272 
13273   if (new_delete_mismatch_p (*newc.u.s_binary.left, *delc.u.s_binary.left)
13274       || !newc.u.s_binary.right != !delc.u.s_binary.right)
13275     return true;
13276 
13277   if (newc.u.s_binary.right)
13278     return new_delete_mismatch_p (*newc.u.s_binary.right,
13279 				  *delc.u.s_binary.right);
13280   return false;
13281 }
13282 
13283 /* Return true if DELETE_DECL is an operator delete that's not suitable
13284    to call with a pointer returned fron NEW_DECL.  */
13285 
13286 static bool
new_delete_mismatch_p(tree new_decl,tree delete_decl)13287 new_delete_mismatch_p (tree new_decl, tree delete_decl)
13288 {
13289   tree new_name = DECL_ASSEMBLER_NAME (new_decl);
13290   tree delete_name = DECL_ASSEMBLER_NAME (delete_decl);
13291 
13292   /* valid_new_delete_pair_p() returns a conservative result (currently
13293      it only handles global operators).  A true result is reliable but
13294      a false result doesn't necessarily mean the operators don't match.  */
13295   if (valid_new_delete_pair_p (new_name, delete_name))
13296     return false;
13297 
13298   /* For anything not handled by valid_new_delete_pair_p() such as member
13299      operators compare the individual demangled components of the mangled
13300      name.  */
13301   const char *new_str = IDENTIFIER_POINTER (new_name);
13302   const char *del_str = IDENTIFIER_POINTER (delete_name);
13303 
13304   void *np = NULL, *dp = NULL;
13305   demangle_component *ndc = cplus_demangle_v3_components (new_str, 0, &np);
13306   demangle_component *ddc = cplus_demangle_v3_components (del_str, 0, &dp);
13307   bool mismatch = new_delete_mismatch_p (*ndc, *ddc);
13308   free (np);
13309   free (dp);
13310   return mismatch;
13311 }
13312 
13313 /* ALLOC_DECL and DEALLOC_DECL are pair of allocation and deallocation
13314    functions.  Return true if the latter is suitable to deallocate objects
13315    allocated by calls to the former.  */
13316 
13317 static bool
matching_alloc_calls_p(tree alloc_decl,tree dealloc_decl)13318 matching_alloc_calls_p (tree alloc_decl, tree dealloc_decl)
13319 {
13320   /* Set to alloc_kind_t::builtin if ALLOC_DECL is associated with
13321      a built-in deallocator.  */
13322   enum class alloc_kind_t { none, builtin, user }
13323   alloc_dealloc_kind = alloc_kind_t::none;
13324 
13325   if (DECL_IS_OPERATOR_NEW_P (alloc_decl))
13326     {
13327       if (DECL_IS_OPERATOR_DELETE_P (dealloc_decl))
13328 	/* Return true iff both functions are of the same array or
13329 	   singleton form and false otherwise.  */
13330 	return !new_delete_mismatch_p (alloc_decl, dealloc_decl);
13331 
13332       /* Return false for deallocation functions that are known not
13333 	 to match.  */
13334       if (fndecl_built_in_p (dealloc_decl, BUILT_IN_FREE)
13335 	  || fndecl_built_in_p (dealloc_decl, BUILT_IN_REALLOC))
13336 	return false;
13337       /* Otherwise proceed below to check the deallocation function's
13338 	 "*dealloc" attributes to look for one that mentions this operator
13339 	 new.  */
13340     }
13341   else if (fndecl_built_in_p (alloc_decl, BUILT_IN_NORMAL))
13342     {
13343       switch (DECL_FUNCTION_CODE (alloc_decl))
13344 	{
13345 	case BUILT_IN_ALLOCA:
13346 	case BUILT_IN_ALLOCA_WITH_ALIGN:
13347 	  return false;
13348 
13349 	case BUILT_IN_ALIGNED_ALLOC:
13350 	case BUILT_IN_CALLOC:
13351 	case BUILT_IN_GOMP_ALLOC:
13352 	case BUILT_IN_MALLOC:
13353 	case BUILT_IN_REALLOC:
13354 	case BUILT_IN_STRDUP:
13355 	case BUILT_IN_STRNDUP:
13356 	  if (DECL_IS_OPERATOR_DELETE_P (dealloc_decl))
13357 	    return false;
13358 
13359 	  if (fndecl_built_in_p (dealloc_decl, BUILT_IN_FREE)
13360 	      || fndecl_built_in_p (dealloc_decl, BUILT_IN_REALLOC))
13361 	    return true;
13362 
13363 	  alloc_dealloc_kind = alloc_kind_t::builtin;
13364 	  break;
13365 
13366 	default:
13367 	  break;
13368 	}
13369     }
13370 
13371   /* Set if DEALLOC_DECL both allocates and deallocates.  */
13372   alloc_kind_t realloc_kind = alloc_kind_t::none;
13373 
13374   if (fndecl_built_in_p (dealloc_decl, BUILT_IN_NORMAL))
13375     {
13376       built_in_function dealloc_code = DECL_FUNCTION_CODE (dealloc_decl);
13377       if (dealloc_code == BUILT_IN_REALLOC)
13378 	realloc_kind = alloc_kind_t::builtin;
13379 
13380       for (tree amats = DECL_ATTRIBUTES (alloc_decl);
13381 	   (amats = lookup_attribute ("malloc", amats));
13382 	   amats = TREE_CHAIN (amats))
13383 	{
13384 	  tree args = TREE_VALUE (amats);
13385 	  if (!args)
13386 	    continue;
13387 
13388 	  tree fndecl = TREE_VALUE (args);
13389 	  if (!fndecl || !DECL_P (fndecl))
13390 	    continue;
13391 
13392 	  if (fndecl_built_in_p (fndecl, BUILT_IN_NORMAL)
13393 	      && dealloc_code == DECL_FUNCTION_CODE (fndecl))
13394 	    return true;
13395 	}
13396     }
13397 
13398   const bool alloc_builtin = fndecl_built_in_p (alloc_decl, BUILT_IN_NORMAL);
13399   alloc_kind_t realloc_dealloc_kind = alloc_kind_t::none;
13400 
13401   /* If DEALLOC_DECL has an internal "*dealloc" attribute scan the list
13402      of its associated allocation functions for ALLOC_DECL.
13403      If the corresponding ALLOC_DECL is found they're a matching pair,
13404      otherwise they're not.
13405      With DDATS set to the Deallocator's *Dealloc ATtributes...  */
13406   for (tree ddats = DECL_ATTRIBUTES (dealloc_decl);
13407        (ddats = lookup_attribute ("*dealloc", ddats));
13408        ddats = TREE_CHAIN (ddats))
13409     {
13410       tree args = TREE_VALUE (ddats);
13411       if (!args)
13412 	continue;
13413 
13414       tree alloc = TREE_VALUE (args);
13415       if (!alloc)
13416 	continue;
13417 
13418       if (alloc == DECL_NAME (dealloc_decl))
13419 	realloc_kind = alloc_kind_t::user;
13420 
13421       if (DECL_P (alloc))
13422 	{
13423 	  gcc_checking_assert (fndecl_built_in_p (alloc, BUILT_IN_NORMAL));
13424 
13425 	  switch (DECL_FUNCTION_CODE (alloc))
13426 	    {
13427 	    case BUILT_IN_ALIGNED_ALLOC:
13428 	    case BUILT_IN_CALLOC:
13429 	    case BUILT_IN_GOMP_ALLOC:
13430 	    case BUILT_IN_MALLOC:
13431 	    case BUILT_IN_REALLOC:
13432 	    case BUILT_IN_STRDUP:
13433 	    case BUILT_IN_STRNDUP:
13434 	      realloc_dealloc_kind = alloc_kind_t::builtin;
13435 	      break;
13436 	    default:
13437 	      break;
13438 	    }
13439 
13440 	  if (!alloc_builtin)
13441 	    continue;
13442 
13443 	  if (DECL_FUNCTION_CODE (alloc) != DECL_FUNCTION_CODE (alloc_decl))
13444 	    continue;
13445 
13446 	  return true;
13447 	}
13448 
13449       if (alloc == DECL_NAME (alloc_decl))
13450 	return true;
13451     }
13452 
13453   if (realloc_kind == alloc_kind_t::none)
13454     return false;
13455 
13456   hash_set<tree> common_deallocs;
13457   /* Special handling for deallocators.  Iterate over both the allocator's
13458      and the reallocator's associated deallocator functions looking for
13459      the first one in common.  If one is found, the de/reallocator is
13460      a match for the allocator even though the latter isn't directly
13461      associated with the former.  This simplifies declarations in system
13462      headers.
13463      With AMATS set to the Allocator's Malloc ATtributes,
13464      and  RMATS set to Reallocator's Malloc ATtributes...  */
13465   for (tree amats = DECL_ATTRIBUTES (alloc_decl),
13466 	 rmats = DECL_ATTRIBUTES (dealloc_decl);
13467        (amats = lookup_attribute ("malloc", amats))
13468 	 || (rmats = lookup_attribute ("malloc", rmats));
13469        amats = amats ? TREE_CHAIN (amats) : NULL_TREE,
13470 	 rmats = rmats ? TREE_CHAIN (rmats) : NULL_TREE)
13471     {
13472       if (tree args = amats ? TREE_VALUE (amats) : NULL_TREE)
13473 	if (tree adealloc = TREE_VALUE (args))
13474 	  {
13475 	    if (DECL_P (adealloc)
13476 		&& fndecl_built_in_p (adealloc, BUILT_IN_NORMAL))
13477 	      {
13478 		built_in_function fncode = DECL_FUNCTION_CODE (adealloc);
13479 		if (fncode == BUILT_IN_FREE || fncode == BUILT_IN_REALLOC)
13480 		  {
13481 		    if (realloc_kind == alloc_kind_t::builtin)
13482 		      return true;
13483 		    alloc_dealloc_kind = alloc_kind_t::builtin;
13484 		  }
13485 		continue;
13486 	      }
13487 
13488 	    common_deallocs.add (adealloc);
13489 	  }
13490 
13491       if (tree args = rmats ? TREE_VALUE (rmats) : NULL_TREE)
13492 	if (tree ddealloc = TREE_VALUE (args))
13493 	  {
13494 	    if (DECL_P (ddealloc)
13495 		&& fndecl_built_in_p (ddealloc, BUILT_IN_NORMAL))
13496 	      {
13497 		built_in_function fncode = DECL_FUNCTION_CODE (ddealloc);
13498 		if (fncode == BUILT_IN_FREE || fncode == BUILT_IN_REALLOC)
13499 		  {
13500 		    if (alloc_dealloc_kind == alloc_kind_t::builtin)
13501 		      return true;
13502 		    realloc_dealloc_kind = alloc_kind_t::builtin;
13503 		  }
13504 		continue;
13505 	      }
13506 
13507 	    if (common_deallocs.add (ddealloc))
13508 	      return true;
13509 	  }
13510     }
13511 
13512   /* Succeed only if ALLOC_DECL and the reallocator DEALLOC_DECL share
13513      a built-in deallocator.  */
13514   return  (alloc_dealloc_kind == alloc_kind_t::builtin
13515 	   && realloc_dealloc_kind == alloc_kind_t::builtin);
13516 }
13517 
13518 /* Return true if DEALLOC_DECL is a function suitable to deallocate
13519    objectes allocated by the ALLOC call.  */
13520 
13521 static bool
matching_alloc_calls_p(gimple * alloc,tree dealloc_decl)13522 matching_alloc_calls_p (gimple *alloc, tree dealloc_decl)
13523 {
13524   tree alloc_decl = gimple_call_fndecl (alloc);
13525   if (!alloc_decl)
13526     return true;
13527 
13528   return matching_alloc_calls_p (alloc_decl, dealloc_decl);
13529 }
13530 
13531 /* Diagnose a call EXP to deallocate a pointer referenced by AREF if it
13532    includes a nonzero offset.  Such a pointer cannot refer to the beginning
13533    of an allocated object.  A negative offset may refer to it only if
13534    the target pointer is unknown.  */
13535 
13536 static bool
warn_dealloc_offset(location_t loc,tree exp,const access_ref & aref)13537 warn_dealloc_offset (location_t loc, tree exp, const access_ref &aref)
13538 {
13539   if (aref.deref || aref.offrng[0] <= 0 || aref.offrng[1] <= 0)
13540     return false;
13541 
13542   tree dealloc_decl = get_callee_fndecl (exp);
13543   if (!dealloc_decl)
13544     return false;
13545 
13546   if (DECL_IS_OPERATOR_DELETE_P (dealloc_decl)
13547       && !DECL_IS_REPLACEABLE_OPERATOR (dealloc_decl))
13548     {
13549       /* A call to a user-defined operator delete with a pointer plus offset
13550 	 may be valid if it's returned from an unknown function (i.e., one
13551 	 that's not operator new).  */
13552       if (TREE_CODE (aref.ref) == SSA_NAME)
13553 	{
13554 	  gimple *def_stmt = SSA_NAME_DEF_STMT (aref.ref);
13555 	  if (is_gimple_call (def_stmt))
13556 	    {
13557 	      tree alloc_decl = gimple_call_fndecl (def_stmt);
13558 	      if (!alloc_decl || !DECL_IS_OPERATOR_NEW_P (alloc_decl))
13559 		return false;
13560 	    }
13561 	}
13562     }
13563 
13564   char offstr[80];
13565   offstr[0] = '\0';
13566   if (wi::fits_shwi_p (aref.offrng[0]))
13567     {
13568       if (aref.offrng[0] == aref.offrng[1]
13569 	  || !wi::fits_shwi_p (aref.offrng[1]))
13570 	sprintf (offstr, " %lli",
13571 		 (long long)aref.offrng[0].to_shwi ());
13572       else
13573 	sprintf (offstr, " [%lli, %lli]",
13574 		 (long long)aref.offrng[0].to_shwi (),
13575 		 (long long)aref.offrng[1].to_shwi ());
13576     }
13577 
13578   if (!warning_at (loc, OPT_Wfree_nonheap_object,
13579 		   "%K%qD called on pointer %qE with nonzero offset%s",
13580 		   exp, dealloc_decl, aref.ref, offstr))
13581     return false;
13582 
13583   if (DECL_P (aref.ref))
13584     inform (DECL_SOURCE_LOCATION (aref.ref), "declared here");
13585   else if (TREE_CODE (aref.ref) == SSA_NAME)
13586     {
13587       gimple *def_stmt = SSA_NAME_DEF_STMT (aref.ref);
13588       if (is_gimple_call (def_stmt))
13589 	{
13590 	  location_t def_loc = gimple_location (def_stmt);
13591 	  tree alloc_decl = gimple_call_fndecl (def_stmt);
13592 	  if (alloc_decl)
13593 	    inform (def_loc,
13594 		    "returned from %qD", alloc_decl);
13595 	  else if (tree alloc_fntype = gimple_call_fntype (def_stmt))
13596 	    inform (def_loc,
13597 		    "returned from %qT", alloc_fntype);
13598 	  else
13599 	    inform (def_loc,  "obtained here");
13600 	}
13601     }
13602 
13603   return true;
13604 }
13605 
13606 /* Issue a warning if a deallocation function such as free, realloc,
13607    or C++ operator delete is called with an argument not returned by
13608    a matching allocation function such as malloc or the corresponding
13609    form of C++ operatorn new.  */
13610 
13611 void
maybe_emit_free_warning(tree exp)13612 maybe_emit_free_warning (tree exp)
13613 {
13614   tree fndecl = get_callee_fndecl (exp);
13615   if (!fndecl)
13616     return;
13617 
13618   unsigned argno = call_dealloc_argno (exp);
13619   if ((unsigned) call_expr_nargs (exp) <= argno)
13620     return;
13621 
13622   tree ptr = CALL_EXPR_ARG (exp, argno);
13623   if (integer_zerop (ptr))
13624     return;
13625 
13626   access_ref aref;
13627   if (!compute_objsize (ptr, 0, &aref))
13628     return;
13629 
13630   tree ref = aref.ref;
13631   if (integer_zerop (ref))
13632     return;
13633 
13634   tree dealloc_decl = get_callee_fndecl (exp);
13635   location_t loc = tree_inlined_location (exp);
13636 
13637   if (DECL_P (ref) || EXPR_P (ref))
13638     {
13639       /* Diagnose freeing a declared object.  */
13640       if (aref.ref_declared ()
13641 	  && warning_at (loc, OPT_Wfree_nonheap_object,
13642 			 "%K%qD called on unallocated object %qD",
13643 			 exp, dealloc_decl, ref))
13644 	{
13645 	  loc = (DECL_P (ref)
13646 		 ? DECL_SOURCE_LOCATION (ref)
13647 		 : EXPR_LOCATION (ref));
13648 	  inform (loc, "declared here");
13649 	  return;
13650 	}
13651 
13652       /* Diagnose freeing a pointer that includes a positive offset.
13653 	 Such a pointer cannot refer to the beginning of an allocated
13654 	 object.  A negative offset may refer to it.  */
13655       if (aref.sizrng[0] != aref.sizrng[1]
13656 	  && warn_dealloc_offset (loc, exp, aref))
13657 	return;
13658     }
13659   else if (CONSTANT_CLASS_P (ref))
13660     {
13661       if (warning_at (loc, OPT_Wfree_nonheap_object,
13662 		      "%K%qD called on a pointer to an unallocated "
13663 		      "object %qE", exp, dealloc_decl, ref))
13664 	{
13665 	  if (TREE_CODE (ptr) == SSA_NAME)
13666 	    {
13667 	      gimple *def_stmt = SSA_NAME_DEF_STMT (ptr);
13668 	      if (is_gimple_assign (def_stmt))
13669 		{
13670 		  location_t loc = gimple_location (def_stmt);
13671 		  inform (loc, "assigned here");
13672 		}
13673 	    }
13674 	  return;
13675 	}
13676     }
13677   else if (TREE_CODE (ref) == SSA_NAME)
13678     {
13679       /* Also warn if the pointer argument refers to the result
13680 	 of an allocation call like alloca or VLA.  */
13681       gimple *def_stmt = SSA_NAME_DEF_STMT (ref);
13682       if (is_gimple_call (def_stmt))
13683 	{
13684 	  bool warned = false;
13685 	  if (gimple_call_alloc_p (def_stmt))
13686 	    {
13687 	      if (matching_alloc_calls_p (def_stmt, dealloc_decl))
13688 		{
13689 		  if (warn_dealloc_offset (loc, exp, aref))
13690 		    return;
13691 		}
13692 	      else
13693 		{
13694 		  tree alloc_decl = gimple_call_fndecl (def_stmt);
13695 		  int opt = (DECL_IS_OPERATOR_NEW_P (alloc_decl)
13696 			     || DECL_IS_OPERATOR_DELETE_P (dealloc_decl)
13697 			     ? OPT_Wmismatched_new_delete
13698 			     : OPT_Wmismatched_dealloc);
13699 		  warned = warning_at (loc, opt,
13700 				       "%K%qD called on pointer returned "
13701 				       "from a mismatched allocation "
13702 				       "function", exp, dealloc_decl);
13703 		}
13704 	    }
13705 	  else if (gimple_call_builtin_p (def_stmt, BUILT_IN_ALLOCA)
13706 	    	   || gimple_call_builtin_p (def_stmt,
13707 	    				     BUILT_IN_ALLOCA_WITH_ALIGN))
13708 	    warned = warning_at (loc, OPT_Wfree_nonheap_object,
13709 				 "%K%qD called on pointer to "
13710 				 "an unallocated object",
13711 				 exp, dealloc_decl);
13712 	  else if (warn_dealloc_offset (loc, exp, aref))
13713 	    return;
13714 
13715 	  if (warned)
13716 	    {
13717 	      tree fndecl = gimple_call_fndecl (def_stmt);
13718 	      inform (gimple_location (def_stmt),
13719 		      "returned from %qD", fndecl);
13720 	      return;
13721 	    }
13722 	}
13723       else if (gimple_nop_p (def_stmt))
13724 	{
13725 	  ref = SSA_NAME_VAR (ref);
13726 	  /* Diagnose freeing a pointer that includes a positive offset.  */
13727 	  if (TREE_CODE (ref) == PARM_DECL
13728 	      && !aref.deref
13729 	      && aref.sizrng[0] != aref.sizrng[1]
13730 	      && aref.offrng[0] > 0 && aref.offrng[1] > 0
13731 	      && warn_dealloc_offset (loc, exp, aref))
13732 	    return;
13733 	}
13734     }
13735 }
13736 
13737 /* Fold a call to __builtin_object_size with arguments PTR and OST,
13738    if possible.  */
13739 
13740 static tree
fold_builtin_object_size(tree ptr,tree ost)13741 fold_builtin_object_size (tree ptr, tree ost)
13742 {
13743   unsigned HOST_WIDE_INT bytes;
13744   int object_size_type;
13745 
13746   if (!validate_arg (ptr, POINTER_TYPE)
13747       || !validate_arg (ost, INTEGER_TYPE))
13748     return NULL_TREE;
13749 
13750   STRIP_NOPS (ost);
13751 
13752   if (TREE_CODE (ost) != INTEGER_CST
13753       || tree_int_cst_sgn (ost) < 0
13754       || compare_tree_int (ost, 3) > 0)
13755     return NULL_TREE;
13756 
13757   object_size_type = tree_to_shwi (ost);
13758 
13759   /* __builtin_object_size doesn't evaluate side-effects in its arguments;
13760      if there are any side-effects, it returns (size_t) -1 for types 0 and 1
13761      and (size_t) 0 for types 2 and 3.  */
13762   if (TREE_SIDE_EFFECTS (ptr))
13763     return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
13764 
13765   if (TREE_CODE (ptr) == ADDR_EXPR)
13766     {
13767       compute_builtin_object_size (ptr, object_size_type, &bytes);
13768       if (wi::fits_to_tree_p (bytes, size_type_node))
13769 	return build_int_cstu (size_type_node, bytes);
13770     }
13771   else if (TREE_CODE (ptr) == SSA_NAME)
13772     {
13773       /* If object size is not known yet, delay folding until
13774        later.  Maybe subsequent passes will help determining
13775        it.  */
13776       if (compute_builtin_object_size (ptr, object_size_type, &bytes)
13777 	  && wi::fits_to_tree_p (bytes, size_type_node))
13778 	return build_int_cstu (size_type_node, bytes);
13779     }
13780 
13781   return NULL_TREE;
13782 }
13783 
13784 /* Builtins with folding operations that operate on "..." arguments
13785    need special handling; we need to store the arguments in a convenient
13786    data structure before attempting any folding.  Fortunately there are
13787    only a few builtins that fall into this category.  FNDECL is the
13788    function, EXP is the CALL_EXPR for the call.  */
13789 
13790 static tree
fold_builtin_varargs(location_t loc,tree fndecl,tree * args,int nargs)13791 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
13792 {
13793   enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
13794   tree ret = NULL_TREE;
13795 
13796   switch (fcode)
13797     {
13798     case BUILT_IN_FPCLASSIFY:
13799       ret = fold_builtin_fpclassify (loc, args, nargs);
13800       break;
13801 
13802     default:
13803       break;
13804     }
13805   if (ret)
13806     {
13807       ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
13808       SET_EXPR_LOCATION (ret, loc);
13809       TREE_NO_WARNING (ret) = 1;
13810       return ret;
13811     }
13812   return NULL_TREE;
13813 }
13814 
13815 /* Initialize format string characters in the target charset.  */
13816 
13817 bool
init_target_chars(void)13818 init_target_chars (void)
13819 {
13820   static bool init;
13821   if (!init)
13822     {
13823       target_newline = lang_hooks.to_target_charset ('\n');
13824       target_percent = lang_hooks.to_target_charset ('%');
13825       target_c = lang_hooks.to_target_charset ('c');
13826       target_s = lang_hooks.to_target_charset ('s');
13827       if (target_newline == 0 || target_percent == 0 || target_c == 0
13828 	  || target_s == 0)
13829 	return false;
13830 
13831       target_percent_c[0] = target_percent;
13832       target_percent_c[1] = target_c;
13833       target_percent_c[2] = '\0';
13834 
13835       target_percent_s[0] = target_percent;
13836       target_percent_s[1] = target_s;
13837       target_percent_s[2] = '\0';
13838 
13839       target_percent_s_newline[0] = target_percent;
13840       target_percent_s_newline[1] = target_s;
13841       target_percent_s_newline[2] = target_newline;
13842       target_percent_s_newline[3] = '\0';
13843 
13844       init = true;
13845     }
13846   return true;
13847 }
13848 
13849 /* Helper function for do_mpfr_arg*().  Ensure M is a normal number
13850    and no overflow/underflow occurred.  INEXACT is true if M was not
13851    exactly calculated.  TYPE is the tree type for the result.  This
13852    function assumes that you cleared the MPFR flags and then
13853    calculated M to see if anything subsequently set a flag prior to
13854    entering this function.  Return NULL_TREE if any checks fail.  */
13855 
13856 static tree
do_mpfr_ckconv(mpfr_srcptr m,tree type,int inexact)13857 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
13858 {
13859   /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13860      overflow/underflow occurred.  If -frounding-math, proceed iff the
13861      result of calling FUNC was exact.  */
13862   if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
13863       && (!flag_rounding_math || !inexact))
13864     {
13865       REAL_VALUE_TYPE rr;
13866 
13867       real_from_mpfr (&rr, m, type, MPFR_RNDN);
13868       /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
13869 	 check for overflow/underflow.  If the REAL_VALUE_TYPE is zero
13870 	 but the mpft_t is not, then we underflowed in the
13871 	 conversion.  */
13872       if (real_isfinite (&rr)
13873 	  && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
13874         {
13875 	  REAL_VALUE_TYPE rmode;
13876 
13877 	  real_convert (&rmode, TYPE_MODE (type), &rr);
13878 	  /* Proceed iff the specified mode can hold the value.  */
13879 	  if (real_identical (&rmode, &rr))
13880 	    return build_real (type, rmode);
13881 	}
13882     }
13883   return NULL_TREE;
13884 }
13885 
13886 /* Helper function for do_mpc_arg*().  Ensure M is a normal complex
13887    number and no overflow/underflow occurred.  INEXACT is true if M
13888    was not exactly calculated.  TYPE is the tree type for the result.
13889    This function assumes that you cleared the MPFR flags and then
13890    calculated M to see if anything subsequently set a flag prior to
13891    entering this function.  Return NULL_TREE if any checks fail, if
13892    FORCE_CONVERT is true, then bypass the checks.  */
13893 
13894 static tree
do_mpc_ckconv(mpc_srcptr m,tree type,int inexact,int force_convert)13895 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
13896 {
13897   /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13898      overflow/underflow occurred.  If -frounding-math, proceed iff the
13899      result of calling FUNC was exact.  */
13900   if (force_convert
13901       || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
13902 	  && !mpfr_overflow_p () && !mpfr_underflow_p ()
13903 	  && (!flag_rounding_math || !inexact)))
13904     {
13905       REAL_VALUE_TYPE re, im;
13906 
13907       real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), MPFR_RNDN);
13908       real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), MPFR_RNDN);
13909       /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
13910 	 check for overflow/underflow.  If the REAL_VALUE_TYPE is zero
13911 	 but the mpft_t is not, then we underflowed in the
13912 	 conversion.  */
13913       if (force_convert
13914 	  || (real_isfinite (&re) && real_isfinite (&im)
13915 	      && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
13916 	      && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
13917         {
13918 	  REAL_VALUE_TYPE re_mode, im_mode;
13919 
13920 	  real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
13921 	  real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
13922 	  /* Proceed iff the specified mode can hold the value.  */
13923 	  if (force_convert
13924 	      || (real_identical (&re_mode, &re)
13925 		  && real_identical (&im_mode, &im)))
13926 	    return build_complex (type, build_real (TREE_TYPE (type), re_mode),
13927 				  build_real (TREE_TYPE (type), im_mode));
13928 	}
13929     }
13930   return NULL_TREE;
13931 }
13932 
13933 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13934    the pointer *(ARG_QUO) and return the result.  The type is taken
13935    from the type of ARG0 and is used for setting the precision of the
13936    calculation and results.  */
13937 
13938 static tree
do_mpfr_remquo(tree arg0,tree arg1,tree arg_quo)13939 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13940 {
13941   tree const type = TREE_TYPE (arg0);
13942   tree result = NULL_TREE;
13943 
13944   STRIP_NOPS (arg0);
13945   STRIP_NOPS (arg1);
13946 
13947   /* To proceed, MPFR must exactly represent the target floating point
13948      format, which only happens when the target base equals two.  */
13949   if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13950       && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13951       && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13952     {
13953       const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13954       const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13955 
13956       if (real_isfinite (ra0) && real_isfinite (ra1))
13957         {
13958 	  const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13959 	  const int prec = fmt->p;
13960 	  const mpfr_rnd_t rnd = fmt->round_towards_zero? MPFR_RNDZ : MPFR_RNDN;
13961 	  tree result_rem;
13962 	  long integer_quo;
13963 	  mpfr_t m0, m1;
13964 
13965 	  mpfr_inits2 (prec, m0, m1, NULL);
13966 	  mpfr_from_real (m0, ra0, MPFR_RNDN);
13967 	  mpfr_from_real (m1, ra1, MPFR_RNDN);
13968 	  mpfr_clear_flags ();
13969 	  mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13970 	  /* Remquo is independent of the rounding mode, so pass
13971 	     inexact=0 to do_mpfr_ckconv().  */
13972 	  result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13973 	  mpfr_clears (m0, m1, NULL);
13974 	  if (result_rem)
13975 	    {
13976 	      /* MPFR calculates quo in the host's long so it may
13977 		 return more bits in quo than the target int can hold
13978 		 if sizeof(host long) > sizeof(target int).  This can
13979 		 happen even for native compilers in LP64 mode.  In
13980 		 these cases, modulo the quo value with the largest
13981 		 number that the target int can hold while leaving one
13982 		 bit for the sign.  */
13983 	      if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13984 		integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13985 
13986 	      /* Dereference the quo pointer argument.  */
13987 	      arg_quo = build_fold_indirect_ref (arg_quo);
13988 	      /* Proceed iff a valid pointer type was passed in.  */
13989 	      if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13990 	        {
13991 		  /* Set the value. */
13992 		  tree result_quo
13993 		    = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
13994 				   build_int_cst (TREE_TYPE (arg_quo),
13995 						  integer_quo));
13996 		  TREE_SIDE_EFFECTS (result_quo) = 1;
13997 		  /* Combine the quo assignment with the rem.  */
13998 		  result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13999 						    result_quo, result_rem));
14000 		}
14001 	    }
14002 	}
14003     }
14004   return result;
14005 }
14006 
14007 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
14008    resulting value as a tree with type TYPE.  The mpfr precision is
14009    set to the precision of TYPE.  We assume that this mpfr function
14010    returns zero if the result could be calculated exactly within the
14011    requested precision.  In addition, the integer pointer represented
14012    by ARG_SG will be dereferenced and set to the appropriate signgam
14013    (-1,1) value.  */
14014 
14015 static tree
do_mpfr_lgamma_r(tree arg,tree arg_sg,tree type)14016 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
14017 {
14018   tree result = NULL_TREE;
14019 
14020   STRIP_NOPS (arg);
14021 
14022   /* To proceed, MPFR must exactly represent the target floating point
14023      format, which only happens when the target base equals two.  Also
14024      verify ARG is a constant and that ARG_SG is an int pointer.  */
14025   if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
14026       && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
14027       && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
14028       && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
14029     {
14030       const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
14031 
14032       /* In addition to NaN and Inf, the argument cannot be zero or a
14033 	 negative integer.  */
14034       if (real_isfinite (ra)
14035 	  && ra->cl != rvc_zero
14036 	  && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
14037         {
14038 	  const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
14039 	  const int prec = fmt->p;
14040 	  const mpfr_rnd_t rnd = fmt->round_towards_zero? MPFR_RNDZ : MPFR_RNDN;
14041 	  int inexact, sg;
14042 	  mpfr_t m;
14043 	  tree result_lg;
14044 
14045 	  mpfr_init2 (m, prec);
14046 	  mpfr_from_real (m, ra, MPFR_RNDN);
14047 	  mpfr_clear_flags ();
14048 	  inexact = mpfr_lgamma (m, &sg, m, rnd);
14049 	  result_lg = do_mpfr_ckconv (m, type, inexact);
14050 	  mpfr_clear (m);
14051 	  if (result_lg)
14052 	    {
14053 	      tree result_sg;
14054 
14055 	      /* Dereference the arg_sg pointer argument.  */
14056 	      arg_sg = build_fold_indirect_ref (arg_sg);
14057 	      /* Assign the signgam value into *arg_sg. */
14058 	      result_sg = fold_build2 (MODIFY_EXPR,
14059 				       TREE_TYPE (arg_sg), arg_sg,
14060 				       build_int_cst (TREE_TYPE (arg_sg), sg));
14061 	      TREE_SIDE_EFFECTS (result_sg) = 1;
14062 	      /* Combine the signgam assignment with the lgamma result.  */
14063 	      result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
14064 						result_sg, result_lg));
14065 	    }
14066 	}
14067     }
14068 
14069   return result;
14070 }
14071 
14072 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
14073    mpc function FUNC on it and return the resulting value as a tree
14074    with type TYPE.  The mpfr precision is set to the precision of
14075    TYPE.  We assume that function FUNC returns zero if the result
14076    could be calculated exactly within the requested precision.  If
14077    DO_NONFINITE is true, then fold expressions containing Inf or NaN
14078    in the arguments and/or results.  */
14079 
14080 tree
do_mpc_arg2(tree arg0,tree arg1,tree type,int do_nonfinite,int (* func)(mpc_ptr,mpc_srcptr,mpc_srcptr,mpc_rnd_t))14081 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
14082 	     int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
14083 {
14084   tree result = NULL_TREE;
14085 
14086   STRIP_NOPS (arg0);
14087   STRIP_NOPS (arg1);
14088 
14089   /* To proceed, MPFR must exactly represent the target floating point
14090      format, which only happens when the target base equals two.  */
14091   if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
14092       && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
14093       && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
14094       && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
14095       && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
14096     {
14097       const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
14098       const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
14099       const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
14100       const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
14101 
14102       if (do_nonfinite
14103 	  || (real_isfinite (re0) && real_isfinite (im0)
14104 	      && real_isfinite (re1) && real_isfinite (im1)))
14105         {
14106 	  const struct real_format *const fmt =
14107 	    REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
14108 	  const int prec = fmt->p;
14109 	  const mpfr_rnd_t rnd = fmt->round_towards_zero
14110 				 ? MPFR_RNDZ : MPFR_RNDN;
14111 	  const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
14112 	  int inexact;
14113 	  mpc_t m0, m1;
14114 
14115 	  mpc_init2 (m0, prec);
14116 	  mpc_init2 (m1, prec);
14117 	  mpfr_from_real (mpc_realref (m0), re0, rnd);
14118 	  mpfr_from_real (mpc_imagref (m0), im0, rnd);
14119 	  mpfr_from_real (mpc_realref (m1), re1, rnd);
14120 	  mpfr_from_real (mpc_imagref (m1), im1, rnd);
14121 	  mpfr_clear_flags ();
14122 	  inexact = func (m0, m0, m1, crnd);
14123 	  result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
14124 	  mpc_clear (m0);
14125 	  mpc_clear (m1);
14126 	}
14127     }
14128 
14129   return result;
14130 }
14131 
14132 /* A wrapper function for builtin folding that prevents warnings for
14133    "statement without effect" and the like, caused by removing the
14134    call node earlier than the warning is generated.  */
14135 
14136 tree
fold_call_stmt(gcall * stmt,bool ignore)14137 fold_call_stmt (gcall *stmt, bool ignore)
14138 {
14139   tree ret = NULL_TREE;
14140   tree fndecl = gimple_call_fndecl (stmt);
14141   location_t loc = gimple_location (stmt);
14142   if (fndecl && fndecl_built_in_p (fndecl)
14143       && !gimple_call_va_arg_pack_p (stmt))
14144     {
14145       int nargs = gimple_call_num_args (stmt);
14146       tree *args = (nargs > 0
14147 		    ? gimple_call_arg_ptr (stmt, 0)
14148 		    : &error_mark_node);
14149 
14150       if (avoid_folding_inline_builtin (fndecl))
14151 	return NULL_TREE;
14152       if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
14153         {
14154 	  return targetm.fold_builtin (fndecl, nargs, args, ignore);
14155         }
14156       else
14157 	{
14158 	  ret = fold_builtin_n (loc, NULL_TREE, fndecl, args, nargs, ignore);
14159 	  if (ret)
14160 	    {
14161 	      /* Propagate location information from original call to
14162 		 expansion of builtin.  Otherwise things like
14163 		 maybe_emit_chk_warning, that operate on the expansion
14164 		 of a builtin, will use the wrong location information.  */
14165 	      if (gimple_has_location (stmt))
14166                 {
14167 		  tree realret = ret;
14168 		  if (TREE_CODE (ret) == NOP_EXPR)
14169 		    realret = TREE_OPERAND (ret, 0);
14170 		  if (CAN_HAVE_LOCATION_P (realret)
14171 		      && !EXPR_HAS_LOCATION (realret))
14172 		    SET_EXPR_LOCATION (realret, loc);
14173                   return realret;
14174                 }
14175 	      return ret;
14176 	    }
14177 	}
14178     }
14179   return NULL_TREE;
14180 }
14181 
14182 /* Look up the function in builtin_decl that corresponds to DECL
14183    and set ASMSPEC as its user assembler name.  DECL must be a
14184    function decl that declares a builtin.  */
14185 
14186 void
set_builtin_user_assembler_name(tree decl,const char * asmspec)14187 set_builtin_user_assembler_name (tree decl, const char *asmspec)
14188 {
14189   gcc_assert (fndecl_built_in_p (decl, BUILT_IN_NORMAL)
14190 	      && asmspec != 0);
14191 
14192   tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
14193   set_user_assembler_name (builtin, asmspec);
14194 
14195   if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
14196       && INT_TYPE_SIZE < BITS_PER_WORD)
14197     {
14198       scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
14199       set_user_assembler_libfunc ("ffs", asmspec);
14200       set_optab_libfunc (ffs_optab, mode, "ffs");
14201     }
14202 }
14203 
14204 /* Return true if DECL is a builtin that expands to a constant or similarly
14205    simple code.  */
14206 bool
is_simple_builtin(tree decl)14207 is_simple_builtin (tree decl)
14208 {
14209   if (decl && fndecl_built_in_p (decl, BUILT_IN_NORMAL))
14210     switch (DECL_FUNCTION_CODE (decl))
14211       {
14212 	/* Builtins that expand to constants.  */
14213       case BUILT_IN_CONSTANT_P:
14214       case BUILT_IN_EXPECT:
14215       case BUILT_IN_OBJECT_SIZE:
14216       case BUILT_IN_UNREACHABLE:
14217 	/* Simple register moves or loads from stack.  */
14218       case BUILT_IN_ASSUME_ALIGNED:
14219       case BUILT_IN_RETURN_ADDRESS:
14220       case BUILT_IN_EXTRACT_RETURN_ADDR:
14221       case BUILT_IN_FROB_RETURN_ADDR:
14222       case BUILT_IN_RETURN:
14223       case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
14224       case BUILT_IN_FRAME_ADDRESS:
14225       case BUILT_IN_VA_END:
14226       case BUILT_IN_STACK_SAVE:
14227       case BUILT_IN_STACK_RESTORE:
14228 	/* Exception state returns or moves registers around.  */
14229       case BUILT_IN_EH_FILTER:
14230       case BUILT_IN_EH_POINTER:
14231       case BUILT_IN_EH_COPY_VALUES:
14232 	return true;
14233 
14234       default:
14235 	return false;
14236       }
14237 
14238   return false;
14239 }
14240 
14241 /* Return true if DECL is a builtin that is not expensive, i.e., they are
14242    most probably expanded inline into reasonably simple code.  This is a
14243    superset of is_simple_builtin.  */
14244 bool
is_inexpensive_builtin(tree decl)14245 is_inexpensive_builtin (tree decl)
14246 {
14247   if (!decl)
14248     return false;
14249   else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
14250     return true;
14251   else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
14252     switch (DECL_FUNCTION_CODE (decl))
14253       {
14254       case BUILT_IN_ABS:
14255       CASE_BUILT_IN_ALLOCA:
14256       case BUILT_IN_BSWAP16:
14257       case BUILT_IN_BSWAP32:
14258       case BUILT_IN_BSWAP64:
14259       case BUILT_IN_BSWAP128:
14260       case BUILT_IN_CLZ:
14261       case BUILT_IN_CLZIMAX:
14262       case BUILT_IN_CLZL:
14263       case BUILT_IN_CLZLL:
14264       case BUILT_IN_CTZ:
14265       case BUILT_IN_CTZIMAX:
14266       case BUILT_IN_CTZL:
14267       case BUILT_IN_CTZLL:
14268       case BUILT_IN_FFS:
14269       case BUILT_IN_FFSIMAX:
14270       case BUILT_IN_FFSL:
14271       case BUILT_IN_FFSLL:
14272       case BUILT_IN_IMAXABS:
14273       case BUILT_IN_FINITE:
14274       case BUILT_IN_FINITEF:
14275       case BUILT_IN_FINITEL:
14276       case BUILT_IN_FINITED32:
14277       case BUILT_IN_FINITED64:
14278       case BUILT_IN_FINITED128:
14279       case BUILT_IN_FPCLASSIFY:
14280       case BUILT_IN_ISFINITE:
14281       case BUILT_IN_ISINF_SIGN:
14282       case BUILT_IN_ISINF:
14283       case BUILT_IN_ISINFF:
14284       case BUILT_IN_ISINFL:
14285       case BUILT_IN_ISINFD32:
14286       case BUILT_IN_ISINFD64:
14287       case BUILT_IN_ISINFD128:
14288       case BUILT_IN_ISNAN:
14289       case BUILT_IN_ISNANF:
14290       case BUILT_IN_ISNANL:
14291       case BUILT_IN_ISNAND32:
14292       case BUILT_IN_ISNAND64:
14293       case BUILT_IN_ISNAND128:
14294       case BUILT_IN_ISNORMAL:
14295       case BUILT_IN_ISGREATER:
14296       case BUILT_IN_ISGREATEREQUAL:
14297       case BUILT_IN_ISLESS:
14298       case BUILT_IN_ISLESSEQUAL:
14299       case BUILT_IN_ISLESSGREATER:
14300       case BUILT_IN_ISUNORDERED:
14301       case BUILT_IN_VA_ARG_PACK:
14302       case BUILT_IN_VA_ARG_PACK_LEN:
14303       case BUILT_IN_VA_COPY:
14304       case BUILT_IN_TRAP:
14305       case BUILT_IN_SAVEREGS:
14306       case BUILT_IN_POPCOUNTL:
14307       case BUILT_IN_POPCOUNTLL:
14308       case BUILT_IN_POPCOUNTIMAX:
14309       case BUILT_IN_POPCOUNT:
14310       case BUILT_IN_PARITYL:
14311       case BUILT_IN_PARITYLL:
14312       case BUILT_IN_PARITYIMAX:
14313       case BUILT_IN_PARITY:
14314       case BUILT_IN_LABS:
14315       case BUILT_IN_LLABS:
14316       case BUILT_IN_PREFETCH:
14317       case BUILT_IN_ACC_ON_DEVICE:
14318 	return true;
14319 
14320       default:
14321 	return is_simple_builtin (decl);
14322       }
14323 
14324   return false;
14325 }
14326 
14327 /* Return true if T is a constant and the value cast to a target char
14328    can be represented by a host char.
14329    Store the casted char constant in *P if so.  */
14330 
14331 bool
target_char_cst_p(tree t,char * p)14332 target_char_cst_p (tree t, char *p)
14333 {
14334   if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
14335     return false;
14336 
14337   *p = (char)tree_to_uhwi (t);
14338   return true;
14339 }
14340 
14341 /* Return true if the builtin DECL is implemented in a standard library.
14342    Otherwise returns false which doesn't guarantee it is not (thus the list of
14343    handled builtins below may be incomplete).  */
14344 
14345 bool
builtin_with_linkage_p(tree decl)14346 builtin_with_linkage_p (tree decl)
14347 {
14348   if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
14349     switch (DECL_FUNCTION_CODE (decl))
14350     {
14351       CASE_FLT_FN (BUILT_IN_ACOS):
14352       CASE_FLT_FN (BUILT_IN_ACOSH):
14353       CASE_FLT_FN (BUILT_IN_ASIN):
14354       CASE_FLT_FN (BUILT_IN_ASINH):
14355       CASE_FLT_FN (BUILT_IN_ATAN):
14356       CASE_FLT_FN (BUILT_IN_ATANH):
14357       CASE_FLT_FN (BUILT_IN_ATAN2):
14358       CASE_FLT_FN (BUILT_IN_CBRT):
14359       CASE_FLT_FN (BUILT_IN_CEIL):
14360       CASE_FLT_FN_FLOATN_NX (BUILT_IN_CEIL):
14361       CASE_FLT_FN (BUILT_IN_COPYSIGN):
14362       CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
14363       CASE_FLT_FN (BUILT_IN_COS):
14364       CASE_FLT_FN (BUILT_IN_COSH):
14365       CASE_FLT_FN (BUILT_IN_ERF):
14366       CASE_FLT_FN (BUILT_IN_ERFC):
14367       CASE_FLT_FN (BUILT_IN_EXP):
14368       CASE_FLT_FN (BUILT_IN_EXP2):
14369       CASE_FLT_FN (BUILT_IN_EXPM1):
14370       CASE_FLT_FN (BUILT_IN_FABS):
14371       CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
14372       CASE_FLT_FN (BUILT_IN_FDIM):
14373       CASE_FLT_FN (BUILT_IN_FLOOR):
14374       CASE_FLT_FN_FLOATN_NX (BUILT_IN_FLOOR):
14375       CASE_FLT_FN (BUILT_IN_FMA):
14376       CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
14377       CASE_FLT_FN (BUILT_IN_FMAX):
14378       CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMAX):
14379       CASE_FLT_FN (BUILT_IN_FMIN):
14380       CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMIN):
14381       CASE_FLT_FN (BUILT_IN_FMOD):
14382       CASE_FLT_FN (BUILT_IN_FREXP):
14383       CASE_FLT_FN (BUILT_IN_HYPOT):
14384       CASE_FLT_FN (BUILT_IN_ILOGB):
14385       CASE_FLT_FN (BUILT_IN_LDEXP):
14386       CASE_FLT_FN (BUILT_IN_LGAMMA):
14387       CASE_FLT_FN (BUILT_IN_LLRINT):
14388       CASE_FLT_FN (BUILT_IN_LLROUND):
14389       CASE_FLT_FN (BUILT_IN_LOG):
14390       CASE_FLT_FN (BUILT_IN_LOG10):
14391       CASE_FLT_FN (BUILT_IN_LOG1P):
14392       CASE_FLT_FN (BUILT_IN_LOG2):
14393       CASE_FLT_FN (BUILT_IN_LOGB):
14394       CASE_FLT_FN (BUILT_IN_LRINT):
14395       CASE_FLT_FN (BUILT_IN_LROUND):
14396       CASE_FLT_FN (BUILT_IN_MODF):
14397       CASE_FLT_FN (BUILT_IN_NAN):
14398       CASE_FLT_FN (BUILT_IN_NEARBYINT):
14399       CASE_FLT_FN_FLOATN_NX (BUILT_IN_NEARBYINT):
14400       CASE_FLT_FN (BUILT_IN_NEXTAFTER):
14401       CASE_FLT_FN (BUILT_IN_NEXTTOWARD):
14402       CASE_FLT_FN (BUILT_IN_POW):
14403       CASE_FLT_FN (BUILT_IN_REMAINDER):
14404       CASE_FLT_FN (BUILT_IN_REMQUO):
14405       CASE_FLT_FN (BUILT_IN_RINT):
14406       CASE_FLT_FN_FLOATN_NX (BUILT_IN_RINT):
14407       CASE_FLT_FN (BUILT_IN_ROUND):
14408       CASE_FLT_FN_FLOATN_NX (BUILT_IN_ROUND):
14409       CASE_FLT_FN (BUILT_IN_SCALBLN):
14410       CASE_FLT_FN (BUILT_IN_SCALBN):
14411       CASE_FLT_FN (BUILT_IN_SIN):
14412       CASE_FLT_FN (BUILT_IN_SINH):
14413       CASE_FLT_FN (BUILT_IN_SINCOS):
14414       CASE_FLT_FN (BUILT_IN_SQRT):
14415       CASE_FLT_FN_FLOATN_NX (BUILT_IN_SQRT):
14416       CASE_FLT_FN (BUILT_IN_TAN):
14417       CASE_FLT_FN (BUILT_IN_TANH):
14418       CASE_FLT_FN (BUILT_IN_TGAMMA):
14419       CASE_FLT_FN (BUILT_IN_TRUNC):
14420       CASE_FLT_FN_FLOATN_NX (BUILT_IN_TRUNC):
14421 	return true;
14422       default:
14423 	break;
14424     }
14425   return false;
14426 }
14427 
14428 /* Return true if OFFRNG is bounded to a subrange of offset values
14429    valid for the largest possible object.  */
14430 
14431 bool
offset_bounded()14432 access_ref::offset_bounded () const
14433 {
14434   tree min = TYPE_MIN_VALUE (ptrdiff_type_node);
14435   tree max = TYPE_MAX_VALUE (ptrdiff_type_node);
14436   return wi::to_offset (min) <= offrng[0] && offrng[1] <= wi::to_offset (max);
14437 }
14438 
14439 /* If CALLEE has known side effects, fill in INFO and return true.
14440    See tree-ssa-structalias.c:find_func_aliases
14441    for the list of builtins we might need to handle here.  */
14442 
14443 attr_fnspec
builtin_fnspec(tree callee)14444 builtin_fnspec (tree callee)
14445 {
14446   built_in_function code = DECL_FUNCTION_CODE (callee);
14447 
14448   switch (code)
14449     {
14450       /* All the following functions read memory pointed to by
14451 	 their second argument and write memory pointed to by first
14452 	 argument.
14453 	 strcat/strncat additionally reads memory pointed to by the first
14454 	 argument.  */
14455       case BUILT_IN_STRCAT:
14456       case BUILT_IN_STRCAT_CHK:
14457 	return "1cW 1 ";
14458       case BUILT_IN_STRNCAT:
14459       case BUILT_IN_STRNCAT_CHK:
14460 	return "1cW 13";
14461       case BUILT_IN_STRCPY:
14462       case BUILT_IN_STRCPY_CHK:
14463 	return "1cO 1 ";
14464       case BUILT_IN_STPCPY:
14465       case BUILT_IN_STPCPY_CHK:
14466 	return ".cO 1 ";
14467       case BUILT_IN_STRNCPY:
14468       case BUILT_IN_MEMCPY:
14469       case BUILT_IN_MEMMOVE:
14470       case BUILT_IN_TM_MEMCPY:
14471       case BUILT_IN_TM_MEMMOVE:
14472       case BUILT_IN_STRNCPY_CHK:
14473       case BUILT_IN_MEMCPY_CHK:
14474       case BUILT_IN_MEMMOVE_CHK:
14475 	return "1cO313";
14476       case BUILT_IN_MEMPCPY:
14477       case BUILT_IN_MEMPCPY_CHK:
14478 	return ".cO313";
14479       case BUILT_IN_STPNCPY:
14480       case BUILT_IN_STPNCPY_CHK:
14481 	return ".cO313";
14482       case BUILT_IN_BCOPY:
14483 	return ".c23O3";
14484       case BUILT_IN_BZERO:
14485 	return ".cO2";
14486       case BUILT_IN_MEMCMP:
14487       case BUILT_IN_MEMCMP_EQ:
14488       case BUILT_IN_BCMP:
14489       case BUILT_IN_STRNCMP:
14490       case BUILT_IN_STRNCMP_EQ:
14491       case BUILT_IN_STRNCASECMP:
14492 	return ".cR3R3";
14493 
14494       /* The following functions read memory pointed to by their
14495 	 first argument.  */
14496       CASE_BUILT_IN_TM_LOAD (1):
14497       CASE_BUILT_IN_TM_LOAD (2):
14498       CASE_BUILT_IN_TM_LOAD (4):
14499       CASE_BUILT_IN_TM_LOAD (8):
14500       CASE_BUILT_IN_TM_LOAD (FLOAT):
14501       CASE_BUILT_IN_TM_LOAD (DOUBLE):
14502       CASE_BUILT_IN_TM_LOAD (LDOUBLE):
14503       CASE_BUILT_IN_TM_LOAD (M64):
14504       CASE_BUILT_IN_TM_LOAD (M128):
14505       CASE_BUILT_IN_TM_LOAD (M256):
14506       case BUILT_IN_TM_LOG:
14507       case BUILT_IN_TM_LOG_1:
14508       case BUILT_IN_TM_LOG_2:
14509       case BUILT_IN_TM_LOG_4:
14510       case BUILT_IN_TM_LOG_8:
14511       case BUILT_IN_TM_LOG_FLOAT:
14512       case BUILT_IN_TM_LOG_DOUBLE:
14513       case BUILT_IN_TM_LOG_LDOUBLE:
14514       case BUILT_IN_TM_LOG_M64:
14515       case BUILT_IN_TM_LOG_M128:
14516       case BUILT_IN_TM_LOG_M256:
14517 	return ".cR ";
14518 
14519       case BUILT_IN_INDEX:
14520       case BUILT_IN_RINDEX:
14521       case BUILT_IN_STRCHR:
14522       case BUILT_IN_STRLEN:
14523       case BUILT_IN_STRRCHR:
14524 	return ".cR ";
14525       case BUILT_IN_STRNLEN:
14526 	return ".cR2";
14527 
14528       /* These read memory pointed to by the first argument.
14529 	 Allocating memory does not have any side-effects apart from
14530 	 being the definition point for the pointer.
14531 	 Unix98 specifies that errno is set on allocation failure.  */
14532       case BUILT_IN_STRDUP:
14533 	return "mCR ";
14534       case BUILT_IN_STRNDUP:
14535 	return "mCR2";
14536       /* Allocating memory does not have any side-effects apart from
14537 	 being the definition point for the pointer.  */
14538       case BUILT_IN_MALLOC:
14539       case BUILT_IN_ALIGNED_ALLOC:
14540       case BUILT_IN_CALLOC:
14541       case BUILT_IN_GOMP_ALLOC:
14542 	return "mC";
14543       CASE_BUILT_IN_ALLOCA:
14544 	return "mc";
14545       /* These read memory pointed to by the first argument with size
14546 	 in the third argument.  */
14547       case BUILT_IN_MEMCHR:
14548 	return ".cR3";
14549       /* These read memory pointed to by the first and second arguments.  */
14550       case BUILT_IN_STRSTR:
14551       case BUILT_IN_STRPBRK:
14552       case BUILT_IN_STRCASECMP:
14553       case BUILT_IN_STRCSPN:
14554       case BUILT_IN_STRSPN:
14555       case BUILT_IN_STRCMP:
14556       case BUILT_IN_STRCMP_EQ:
14557 	return ".cR R ";
14558       /* Freeing memory kills the pointed-to memory.  More importantly
14559 	 the call has to serve as a barrier for moving loads and stores
14560 	 across it.  */
14561       case BUILT_IN_STACK_RESTORE:
14562       case BUILT_IN_FREE:
14563       case BUILT_IN_GOMP_FREE:
14564 	return ".co ";
14565       case BUILT_IN_VA_END:
14566 	return ".cO ";
14567       /* Realloc serves both as allocation point and deallocation point.  */
14568       case BUILT_IN_REALLOC:
14569 	return ".Cw ";
14570       case BUILT_IN_GAMMA_R:
14571       case BUILT_IN_GAMMAF_R:
14572       case BUILT_IN_GAMMAL_R:
14573       case BUILT_IN_LGAMMA_R:
14574       case BUILT_IN_LGAMMAF_R:
14575       case BUILT_IN_LGAMMAL_R:
14576 	return ".C. Ot";
14577       case BUILT_IN_FREXP:
14578       case BUILT_IN_FREXPF:
14579       case BUILT_IN_FREXPL:
14580       case BUILT_IN_MODF:
14581       case BUILT_IN_MODFF:
14582       case BUILT_IN_MODFL:
14583 	return ".c. Ot";
14584       case BUILT_IN_REMQUO:
14585       case BUILT_IN_REMQUOF:
14586       case BUILT_IN_REMQUOL:
14587 	return ".c. . Ot";
14588       case BUILT_IN_SINCOS:
14589       case BUILT_IN_SINCOSF:
14590       case BUILT_IN_SINCOSL:
14591 	return ".c. OtOt";
14592       case BUILT_IN_MEMSET:
14593       case BUILT_IN_MEMSET_CHK:
14594       case BUILT_IN_TM_MEMSET:
14595 	return "1cO3";
14596       CASE_BUILT_IN_TM_STORE (1):
14597       CASE_BUILT_IN_TM_STORE (2):
14598       CASE_BUILT_IN_TM_STORE (4):
14599       CASE_BUILT_IN_TM_STORE (8):
14600       CASE_BUILT_IN_TM_STORE (FLOAT):
14601       CASE_BUILT_IN_TM_STORE (DOUBLE):
14602       CASE_BUILT_IN_TM_STORE (LDOUBLE):
14603       CASE_BUILT_IN_TM_STORE (M64):
14604       CASE_BUILT_IN_TM_STORE (M128):
14605       CASE_BUILT_IN_TM_STORE (M256):
14606 	return ".cO ";
14607       case BUILT_IN_STACK_SAVE:
14608 	return ".c";
14609       case BUILT_IN_ASSUME_ALIGNED:
14610 	return "1cX ";
14611       /* But posix_memalign stores a pointer into the memory pointed to
14612 	 by its first argument.  */
14613       case BUILT_IN_POSIX_MEMALIGN:
14614 	return ".cOt";
14615 
14616       default:
14617 	return "";
14618     }
14619 }
14620