1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989-2019 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "predict.h"
29 #include "memmodel.h"
30 #include "tm_p.h"
31 #include "stringpool.h"
32 #include "expmed.h"
33 #include "optabs.h"
34 #include "emit-rtl.h"
35 #include "cgraph.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "varasm.h"
40 #include "internal-fn.h"
41 #include "dojump.h"
42 #include "explow.h"
43 #include "calls.h"
44 #include "expr.h"
45 #include "output.h"
46 #include "langhooks.h"
47 #include "except.h"
48 #include "dbgcnt.h"
49 #include "rtl-iter.h"
50 #include "tree-vrp.h"
51 #include "tree-ssanames.h"
52 #include "tree-ssa-strlen.h"
53 #include "intl.h"
54 #include "stringpool.h"
55 #include "attribs.h"
56 #include "builtins.h"
57 #include "gimple-fold.h"
58
59 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
60 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
61
62 /* Data structure and subroutines used within expand_call. */
63
64 struct arg_data
65 {
66 /* Tree node for this argument. */
67 tree tree_value;
68 /* Mode for value; TYPE_MODE unless promoted. */
69 machine_mode mode;
70 /* Current RTL value for argument, or 0 if it isn't precomputed. */
71 rtx value;
72 /* Initially-compute RTL value for argument; only for const functions. */
73 rtx initial_value;
74 /* Register to pass this argument in, 0 if passed on stack, or an
75 PARALLEL if the arg is to be copied into multiple non-contiguous
76 registers. */
77 rtx reg;
78 /* Register to pass this argument in when generating tail call sequence.
79 This is not the same register as for normal calls on machines with
80 register windows. */
81 rtx tail_call_reg;
82 /* If REG is a PARALLEL, this is a copy of VALUE pulled into the correct
83 form for emit_group_move. */
84 rtx parallel_value;
85 /* If REG was promoted from the actual mode of the argument expression,
86 indicates whether the promotion is sign- or zero-extended. */
87 int unsignedp;
88 /* Number of bytes to put in registers. 0 means put the whole arg
89 in registers. Also 0 if not passed in registers. */
90 int partial;
91 /* Nonzero if argument must be passed on stack.
92 Note that some arguments may be passed on the stack
93 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
94 pass_on_stack identifies arguments that *cannot* go in registers. */
95 int pass_on_stack;
96 /* Some fields packaged up for locate_and_pad_parm. */
97 struct locate_and_pad_arg_data locate;
98 /* Location on the stack at which parameter should be stored. The store
99 has already been done if STACK == VALUE. */
100 rtx stack;
101 /* Location on the stack of the start of this argument slot. This can
102 differ from STACK if this arg pads downward. This location is known
103 to be aligned to TARGET_FUNCTION_ARG_BOUNDARY. */
104 rtx stack_slot;
105 /* Place that this stack area has been saved, if needed. */
106 rtx save_area;
107 /* If an argument's alignment does not permit direct copying into registers,
108 copy in smaller-sized pieces into pseudos. These are stored in a
109 block pointed to by this field. The next field says how many
110 word-sized pseudos we made. */
111 rtx *aligned_regs;
112 int n_aligned_regs;
113 };
114
115 /* A vector of one char per byte of stack space. A byte if nonzero if
116 the corresponding stack location has been used.
117 This vector is used to prevent a function call within an argument from
118 clobbering any stack already set up. */
119 static char *stack_usage_map;
120
121 /* Size of STACK_USAGE_MAP. */
122 static unsigned int highest_outgoing_arg_in_use;
123
124 /* Assume that any stack location at this byte index is used,
125 without checking the contents of stack_usage_map. */
126 static unsigned HOST_WIDE_INT stack_usage_watermark = HOST_WIDE_INT_M1U;
127
128 /* A bitmap of virtual-incoming stack space. Bit is set if the corresponding
129 stack location's tail call argument has been already stored into the stack.
130 This bitmap is used to prevent sibling call optimization if function tries
131 to use parent's incoming argument slots when they have been already
132 overwritten with tail call arguments. */
133 static sbitmap stored_args_map;
134
135 /* Assume that any virtual-incoming location at this byte index has been
136 stored, without checking the contents of stored_args_map. */
137 static unsigned HOST_WIDE_INT stored_args_watermark;
138
139 /* stack_arg_under_construction is nonzero when an argument may be
140 initialized with a constructor call (including a C function that
141 returns a BLKmode struct) and expand_call must take special action
142 to make sure the object being constructed does not overlap the
143 argument list for the constructor call. */
144 static int stack_arg_under_construction;
145
146 static void precompute_register_parameters (int, struct arg_data *, int *);
147 static int store_one_arg (struct arg_data *, rtx, int, int, int);
148 static void store_unaligned_arguments_into_pseudos (struct arg_data *, int);
149 static int finalize_must_preallocate (int, int, struct arg_data *,
150 struct args_size *);
151 static void precompute_arguments (int, struct arg_data *);
152 static void compute_argument_addresses (struct arg_data *, rtx, int);
153 static rtx rtx_for_function_call (tree, tree);
154 static void load_register_parameters (struct arg_data *, int, rtx *, int,
155 int, int *);
156 static int special_function_p (const_tree, int);
157 static int check_sibcall_argument_overlap_1 (rtx);
158 static int check_sibcall_argument_overlap (rtx_insn *, struct arg_data *, int);
159
160 static tree split_complex_types (tree);
161
162 #ifdef REG_PARM_STACK_SPACE
163 static rtx save_fixed_argument_area (int, rtx, int *, int *);
164 static void restore_fixed_argument_area (rtx, rtx, int, int);
165 #endif
166
167 /* Return true if bytes [LOWER_BOUND, UPPER_BOUND) of the outgoing
168 stack region might already be in use. */
169
170 static bool
stack_region_maybe_used_p(poly_uint64 lower_bound,poly_uint64 upper_bound,unsigned int reg_parm_stack_space)171 stack_region_maybe_used_p (poly_uint64 lower_bound, poly_uint64 upper_bound,
172 unsigned int reg_parm_stack_space)
173 {
174 unsigned HOST_WIDE_INT const_lower, const_upper;
175 const_lower = constant_lower_bound (lower_bound);
176 if (!upper_bound.is_constant (&const_upper))
177 const_upper = HOST_WIDE_INT_M1U;
178
179 if (const_upper > stack_usage_watermark)
180 return true;
181
182 /* Don't worry about things in the fixed argument area;
183 it has already been saved. */
184 const_lower = MAX (const_lower, reg_parm_stack_space);
185 const_upper = MIN (const_upper, highest_outgoing_arg_in_use);
186 for (unsigned HOST_WIDE_INT i = const_lower; i < const_upper; ++i)
187 if (stack_usage_map[i])
188 return true;
189 return false;
190 }
191
192 /* Record that bytes [LOWER_BOUND, UPPER_BOUND) of the outgoing
193 stack region are now in use. */
194
195 static void
mark_stack_region_used(poly_uint64 lower_bound,poly_uint64 upper_bound)196 mark_stack_region_used (poly_uint64 lower_bound, poly_uint64 upper_bound)
197 {
198 unsigned HOST_WIDE_INT const_lower, const_upper;
199 const_lower = constant_lower_bound (lower_bound);
200 if (upper_bound.is_constant (&const_upper))
201 for (unsigned HOST_WIDE_INT i = const_lower; i < const_upper; ++i)
202 stack_usage_map[i] = 1;
203 else
204 stack_usage_watermark = MIN (stack_usage_watermark, const_lower);
205 }
206
207 /* Force FUNEXP into a form suitable for the address of a CALL,
208 and return that as an rtx. Also load the static chain register
209 if FNDECL is a nested function.
210
211 CALL_FUSAGE points to a variable holding the prospective
212 CALL_INSN_FUNCTION_USAGE information. */
213
214 rtx
prepare_call_address(tree fndecl_or_type,rtx funexp,rtx static_chain_value,rtx * call_fusage,int reg_parm_seen,int flags)215 prepare_call_address (tree fndecl_or_type, rtx funexp, rtx static_chain_value,
216 rtx *call_fusage, int reg_parm_seen, int flags)
217 {
218 /* Make a valid memory address and copy constants through pseudo-regs,
219 but not for a constant address if -fno-function-cse. */
220 if (GET_CODE (funexp) != SYMBOL_REF)
221 {
222 /* If it's an indirect call by descriptor, generate code to perform
223 runtime identification of the pointer and load the descriptor. */
224 if ((flags & ECF_BY_DESCRIPTOR) && !flag_trampolines)
225 {
226 const int bit_val = targetm.calls.custom_function_descriptors;
227 rtx call_lab = gen_label_rtx ();
228
229 gcc_assert (fndecl_or_type && TYPE_P (fndecl_or_type));
230 fndecl_or_type
231 = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
232 fndecl_or_type);
233 DECL_STATIC_CHAIN (fndecl_or_type) = 1;
234 rtx chain = targetm.calls.static_chain (fndecl_or_type, false);
235
236 if (GET_MODE (funexp) != Pmode)
237 funexp = convert_memory_address (Pmode, funexp);
238
239 /* Avoid long live ranges around function calls. */
240 funexp = copy_to_mode_reg (Pmode, funexp);
241
242 if (REG_P (chain))
243 emit_insn (gen_rtx_CLOBBER (VOIDmode, chain));
244
245 /* Emit the runtime identification pattern. */
246 rtx mask = gen_rtx_AND (Pmode, funexp, GEN_INT (bit_val));
247 emit_cmp_and_jump_insns (mask, const0_rtx, EQ, NULL_RTX, Pmode, 1,
248 call_lab);
249
250 /* Statically predict the branch to very likely taken. */
251 rtx_insn *insn = get_last_insn ();
252 if (JUMP_P (insn))
253 predict_insn_def (insn, PRED_BUILTIN_EXPECT, TAKEN);
254
255 /* Load the descriptor. */
256 rtx mem = gen_rtx_MEM (ptr_mode,
257 plus_constant (Pmode, funexp, - bit_val));
258 MEM_NOTRAP_P (mem) = 1;
259 mem = convert_memory_address (Pmode, mem);
260 emit_move_insn (chain, mem);
261
262 mem = gen_rtx_MEM (ptr_mode,
263 plus_constant (Pmode, funexp,
264 POINTER_SIZE / BITS_PER_UNIT
265 - bit_val));
266 MEM_NOTRAP_P (mem) = 1;
267 mem = convert_memory_address (Pmode, mem);
268 emit_move_insn (funexp, mem);
269
270 emit_label (call_lab);
271
272 if (REG_P (chain))
273 {
274 use_reg (call_fusage, chain);
275 STATIC_CHAIN_REG_P (chain) = 1;
276 }
277
278 /* Make sure we're not going to be overwritten below. */
279 gcc_assert (!static_chain_value);
280 }
281
282 /* If we are using registers for parameters, force the
283 function address into a register now. */
284 funexp = ((reg_parm_seen
285 && targetm.small_register_classes_for_mode_p (FUNCTION_MODE))
286 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
287 : memory_address (FUNCTION_MODE, funexp));
288 }
289 else
290 {
291 /* funexp could be a SYMBOL_REF represents a function pointer which is
292 of ptr_mode. In this case, it should be converted into address mode
293 to be a valid address for memory rtx pattern. See PR 64971. */
294 if (GET_MODE (funexp) != Pmode)
295 funexp = convert_memory_address (Pmode, funexp);
296
297 if (!(flags & ECF_SIBCALL))
298 {
299 if (!NO_FUNCTION_CSE && optimize && ! flag_no_function_cse)
300 funexp = force_reg (Pmode, funexp);
301 }
302 }
303
304 if (static_chain_value != 0
305 && (TREE_CODE (fndecl_or_type) != FUNCTION_DECL
306 || DECL_STATIC_CHAIN (fndecl_or_type)))
307 {
308 rtx chain;
309
310 chain = targetm.calls.static_chain (fndecl_or_type, false);
311 static_chain_value = convert_memory_address (Pmode, static_chain_value);
312
313 emit_move_insn (chain, static_chain_value);
314 if (REG_P (chain))
315 {
316 use_reg (call_fusage, chain);
317 STATIC_CHAIN_REG_P (chain) = 1;
318 }
319 }
320
321 return funexp;
322 }
323
324 /* Generate instructions to call function FUNEXP,
325 and optionally pop the results.
326 The CALL_INSN is the first insn generated.
327
328 FNDECL is the declaration node of the function. This is given to the
329 hook TARGET_RETURN_POPS_ARGS to determine whether this function pops
330 its own args.
331
332 FUNTYPE is the data type of the function. This is given to the hook
333 TARGET_RETURN_POPS_ARGS to determine whether this function pops its
334 own args. We used to allow an identifier for library functions, but
335 that doesn't work when the return type is an aggregate type and the
336 calling convention says that the pointer to this aggregate is to be
337 popped by the callee.
338
339 STACK_SIZE is the number of bytes of arguments on the stack,
340 ROUNDED_STACK_SIZE is that number rounded up to
341 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
342 both to put into the call insn and to generate explicit popping
343 code if necessary.
344
345 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
346 It is zero if this call doesn't want a structure value.
347
348 NEXT_ARG_REG is the rtx that results from executing
349 targetm.calls.function_arg (&args_so_far, VOIDmode, void_type_node, true)
350 just after all the args have had their registers assigned.
351 This could be whatever you like, but normally it is the first
352 arg-register beyond those used for args in this call,
353 or 0 if all the arg-registers are used in this call.
354 It is passed on to `gen_call' so you can put this info in the call insn.
355
356 VALREG is a hard register in which a value is returned,
357 or 0 if the call does not return a value.
358
359 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
360 the args to this call were processed.
361 We restore `inhibit_defer_pop' to that value.
362
363 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
364 denote registers used by the called function. */
365
366 static void
emit_call_1(rtx funexp,tree fntree ATTRIBUTE_UNUSED,tree fndecl ATTRIBUTE_UNUSED,tree funtype ATTRIBUTE_UNUSED,poly_int64 stack_size ATTRIBUTE_UNUSED,poly_int64 rounded_stack_size,poly_int64 struct_value_size ATTRIBUTE_UNUSED,rtx next_arg_reg ATTRIBUTE_UNUSED,rtx valreg,int old_inhibit_defer_pop,rtx call_fusage,int ecf_flags,cumulative_args_t args_so_far ATTRIBUTE_UNUSED)367 emit_call_1 (rtx funexp, tree fntree ATTRIBUTE_UNUSED, tree fndecl ATTRIBUTE_UNUSED,
368 tree funtype ATTRIBUTE_UNUSED,
369 poly_int64 stack_size ATTRIBUTE_UNUSED,
370 poly_int64 rounded_stack_size,
371 poly_int64 struct_value_size ATTRIBUTE_UNUSED,
372 rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg,
373 int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags,
374 cumulative_args_t args_so_far ATTRIBUTE_UNUSED)
375 {
376 rtx rounded_stack_size_rtx = gen_int_mode (rounded_stack_size, Pmode);
377 rtx call, funmem, pat;
378 int already_popped = 0;
379 poly_int64 n_popped = 0;
380
381 /* Sibling call patterns never pop arguments (no sibcall(_value)_pop
382 patterns exist). Any popping that the callee does on return will
383 be from our caller's frame rather than ours. */
384 if (!(ecf_flags & ECF_SIBCALL))
385 {
386 n_popped += targetm.calls.return_pops_args (fndecl, funtype, stack_size);
387
388 #ifdef CALL_POPS_ARGS
389 n_popped += CALL_POPS_ARGS (*get_cumulative_args (args_so_far));
390 #endif
391 }
392
393 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
394 and we don't want to load it into a register as an optimization,
395 because prepare_call_address already did it if it should be done. */
396 if (GET_CODE (funexp) != SYMBOL_REF)
397 funexp = memory_address (FUNCTION_MODE, funexp);
398
399 funmem = gen_rtx_MEM (FUNCTION_MODE, funexp);
400 if (fndecl && TREE_CODE (fndecl) == FUNCTION_DECL)
401 {
402 tree t = fndecl;
403
404 /* Although a built-in FUNCTION_DECL and its non-__builtin
405 counterpart compare equal and get a shared mem_attrs, they
406 produce different dump output in compare-debug compilations,
407 if an entry gets garbage collected in one compilation, then
408 adds a different (but equivalent) entry, while the other
409 doesn't run the garbage collector at the same spot and then
410 shares the mem_attr with the equivalent entry. */
411 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
412 {
413 tree t2 = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
414 if (t2)
415 t = t2;
416 }
417
418 set_mem_expr (funmem, t);
419 }
420 else if (fntree)
421 set_mem_expr (funmem, build_simple_mem_ref (CALL_EXPR_FN (fntree)));
422
423 if (ecf_flags & ECF_SIBCALL)
424 {
425 if (valreg)
426 pat = targetm.gen_sibcall_value (valreg, funmem,
427 rounded_stack_size_rtx,
428 next_arg_reg, NULL_RTX);
429 else
430 pat = targetm.gen_sibcall (funmem, rounded_stack_size_rtx,
431 next_arg_reg,
432 gen_int_mode (struct_value_size, Pmode));
433 }
434 /* If the target has "call" or "call_value" insns, then prefer them
435 if no arguments are actually popped. If the target does not have
436 "call" or "call_value" insns, then we must use the popping versions
437 even if the call has no arguments to pop. */
438 else if (maybe_ne (n_popped, 0)
439 || !(valreg
440 ? targetm.have_call_value ()
441 : targetm.have_call ()))
442 {
443 rtx n_pop = gen_int_mode (n_popped, Pmode);
444
445 /* If this subroutine pops its own args, record that in the call insn
446 if possible, for the sake of frame pointer elimination. */
447
448 if (valreg)
449 pat = targetm.gen_call_value_pop (valreg, funmem,
450 rounded_stack_size_rtx,
451 next_arg_reg, n_pop);
452 else
453 pat = targetm.gen_call_pop (funmem, rounded_stack_size_rtx,
454 next_arg_reg, n_pop);
455
456 already_popped = 1;
457 }
458 else
459 {
460 if (valreg)
461 pat = targetm.gen_call_value (valreg, funmem, rounded_stack_size_rtx,
462 next_arg_reg, NULL_RTX);
463 else
464 pat = targetm.gen_call (funmem, rounded_stack_size_rtx, next_arg_reg,
465 gen_int_mode (struct_value_size, Pmode));
466 }
467 emit_insn (pat);
468
469 /* Find the call we just emitted. */
470 rtx_call_insn *call_insn = last_call_insn ();
471
472 /* Some target create a fresh MEM instead of reusing the one provided
473 above. Set its MEM_EXPR. */
474 call = get_call_rtx_from (call_insn);
475 if (call
476 && MEM_EXPR (XEXP (call, 0)) == NULL_TREE
477 && MEM_EXPR (funmem) != NULL_TREE)
478 set_mem_expr (XEXP (call, 0), MEM_EXPR (funmem));
479
480 /* Put the register usage information there. */
481 add_function_usage_to (call_insn, call_fusage);
482
483 /* If this is a const call, then set the insn's unchanging bit. */
484 if (ecf_flags & ECF_CONST)
485 RTL_CONST_CALL_P (call_insn) = 1;
486
487 /* If this is a pure call, then set the insn's unchanging bit. */
488 if (ecf_flags & ECF_PURE)
489 RTL_PURE_CALL_P (call_insn) = 1;
490
491 /* If this is a const call, then set the insn's unchanging bit. */
492 if (ecf_flags & ECF_LOOPING_CONST_OR_PURE)
493 RTL_LOOPING_CONST_OR_PURE_CALL_P (call_insn) = 1;
494
495 /* Create a nothrow REG_EH_REGION note, if needed. */
496 make_reg_eh_region_note (call_insn, ecf_flags, 0);
497
498 if (ecf_flags & ECF_NORETURN)
499 add_reg_note (call_insn, REG_NORETURN, const0_rtx);
500
501 if (ecf_flags & ECF_RETURNS_TWICE)
502 {
503 add_reg_note (call_insn, REG_SETJMP, const0_rtx);
504 cfun->calls_setjmp = 1;
505 }
506
507 SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
508
509 /* Restore this now, so that we do defer pops for this call's args
510 if the context of the call as a whole permits. */
511 inhibit_defer_pop = old_inhibit_defer_pop;
512
513 if (maybe_ne (n_popped, 0))
514 {
515 if (!already_popped)
516 CALL_INSN_FUNCTION_USAGE (call_insn)
517 = gen_rtx_EXPR_LIST (VOIDmode,
518 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
519 CALL_INSN_FUNCTION_USAGE (call_insn));
520 rounded_stack_size -= n_popped;
521 rounded_stack_size_rtx = gen_int_mode (rounded_stack_size, Pmode);
522 stack_pointer_delta -= n_popped;
523
524 add_args_size_note (call_insn, stack_pointer_delta);
525
526 /* If popup is needed, stack realign must use DRAP */
527 if (SUPPORTS_STACK_ALIGNMENT)
528 crtl->need_drap = true;
529 }
530 /* For noreturn calls when not accumulating outgoing args force
531 REG_ARGS_SIZE note to prevent crossjumping of calls with different
532 args sizes. */
533 else if (!ACCUMULATE_OUTGOING_ARGS && (ecf_flags & ECF_NORETURN) != 0)
534 add_args_size_note (call_insn, stack_pointer_delta);
535
536 if (!ACCUMULATE_OUTGOING_ARGS)
537 {
538 /* If returning from the subroutine does not automatically pop the args,
539 we need an instruction to pop them sooner or later.
540 Perhaps do it now; perhaps just record how much space to pop later.
541
542 If returning from the subroutine does pop the args, indicate that the
543 stack pointer will be changed. */
544
545 if (maybe_ne (rounded_stack_size, 0))
546 {
547 if (ecf_flags & ECF_NORETURN)
548 /* Just pretend we did the pop. */
549 stack_pointer_delta -= rounded_stack_size;
550 else if (flag_defer_pop && inhibit_defer_pop == 0
551 && ! (ecf_flags & (ECF_CONST | ECF_PURE)))
552 pending_stack_adjust += rounded_stack_size;
553 else
554 adjust_stack (rounded_stack_size_rtx);
555 }
556 }
557 /* When we accumulate outgoing args, we must avoid any stack manipulations.
558 Restore the stack pointer to its original value now. Usually
559 ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
560 On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
561 popping variants of functions exist as well.
562
563 ??? We may optimize similar to defer_pop above, but it is
564 probably not worthwhile.
565
566 ??? It will be worthwhile to enable combine_stack_adjustments even for
567 such machines. */
568 else if (maybe_ne (n_popped, 0))
569 anti_adjust_stack (gen_int_mode (n_popped, Pmode));
570 }
571
572 /* Determine if the function identified by FNDECL is one with
573 special properties we wish to know about. Modify FLAGS accordingly.
574
575 For example, if the function might return more than one time (setjmp), then
576 set ECF_RETURNS_TWICE.
577
578 Set ECF_MAY_BE_ALLOCA for any memory allocation function that might allocate
579 space from the stack such as alloca. */
580
581 static int
special_function_p(const_tree fndecl,int flags)582 special_function_p (const_tree fndecl, int flags)
583 {
584 tree name_decl = DECL_NAME (fndecl);
585
586 if (fndecl && name_decl
587 && IDENTIFIER_LENGTH (name_decl) <= 11
588 /* Exclude functions not at the file scope, or not `extern',
589 since they are not the magic functions we would otherwise
590 think they are.
591 FIXME: this should be handled with attributes, not with this
592 hacky imitation of DECL_ASSEMBLER_NAME. It's (also) wrong
593 because you can declare fork() inside a function if you
594 wish. */
595 && (DECL_CONTEXT (fndecl) == NULL_TREE
596 || TREE_CODE (DECL_CONTEXT (fndecl)) == TRANSLATION_UNIT_DECL)
597 && TREE_PUBLIC (fndecl))
598 {
599 const char *name = IDENTIFIER_POINTER (name_decl);
600 const char *tname = name;
601
602 /* We assume that alloca will always be called by name. It
603 makes no sense to pass it as a pointer-to-function to
604 anything that does not understand its behavior. */
605 if (IDENTIFIER_LENGTH (name_decl) == 6
606 && name[0] == 'a'
607 && ! strcmp (name, "alloca"))
608 flags |= ECF_MAY_BE_ALLOCA;
609
610 /* Disregard prefix _ or __. */
611 if (name[0] == '_')
612 {
613 if (name[1] == '_')
614 tname += 2;
615 else
616 tname += 1;
617 }
618
619 /* ECF_RETURNS_TWICE is safe even for -ffreestanding. */
620 if (! strcmp (tname, "setjmp")
621 || ! strcmp (tname, "sigsetjmp")
622 || ! strcmp (name, "savectx")
623 || ! strcmp (name, "vfork")
624 || ! strcmp (name, "getcontext"))
625 flags |= ECF_RETURNS_TWICE;
626 }
627
628 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
629 && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (fndecl)))
630 flags |= ECF_MAY_BE_ALLOCA;
631
632 return flags;
633 }
634
635 /* Similar to special_function_p; return a set of ERF_ flags for the
636 function FNDECL. */
637 static int
decl_return_flags(tree fndecl)638 decl_return_flags (tree fndecl)
639 {
640 tree attr;
641 tree type = TREE_TYPE (fndecl);
642 if (!type)
643 return 0;
644
645 attr = lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type));
646 if (!attr)
647 return 0;
648
649 attr = TREE_VALUE (TREE_VALUE (attr));
650 if (!attr || TREE_STRING_LENGTH (attr) < 1)
651 return 0;
652
653 switch (TREE_STRING_POINTER (attr)[0])
654 {
655 case '1':
656 case '2':
657 case '3':
658 case '4':
659 return ERF_RETURNS_ARG | (TREE_STRING_POINTER (attr)[0] - '1');
660
661 case 'm':
662 return ERF_NOALIAS;
663
664 case '.':
665 default:
666 return 0;
667 }
668 }
669
670 /* Return nonzero when FNDECL represents a call to setjmp. */
671
672 int
setjmp_call_p(const_tree fndecl)673 setjmp_call_p (const_tree fndecl)
674 {
675 if (DECL_IS_RETURNS_TWICE (fndecl))
676 return ECF_RETURNS_TWICE;
677 return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
678 }
679
680
681 /* Return true if STMT may be an alloca call. */
682
683 bool
gimple_maybe_alloca_call_p(const gimple * stmt)684 gimple_maybe_alloca_call_p (const gimple *stmt)
685 {
686 tree fndecl;
687
688 if (!is_gimple_call (stmt))
689 return false;
690
691 fndecl = gimple_call_fndecl (stmt);
692 if (fndecl && (special_function_p (fndecl, 0) & ECF_MAY_BE_ALLOCA))
693 return true;
694
695 return false;
696 }
697
698 /* Return true if STMT is a builtin alloca call. */
699
700 bool
gimple_alloca_call_p(const gimple * stmt)701 gimple_alloca_call_p (const gimple *stmt)
702 {
703 tree fndecl;
704
705 if (!is_gimple_call (stmt))
706 return false;
707
708 fndecl = gimple_call_fndecl (stmt);
709 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
710 switch (DECL_FUNCTION_CODE (fndecl))
711 {
712 CASE_BUILT_IN_ALLOCA:
713 return gimple_call_num_args (stmt) > 0;
714 default:
715 break;
716 }
717
718 return false;
719 }
720
721 /* Return true when exp contains a builtin alloca call. */
722
723 bool
alloca_call_p(const_tree exp)724 alloca_call_p (const_tree exp)
725 {
726 tree fndecl;
727 if (TREE_CODE (exp) == CALL_EXPR
728 && (fndecl = get_callee_fndecl (exp))
729 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
730 switch (DECL_FUNCTION_CODE (fndecl))
731 {
732 CASE_BUILT_IN_ALLOCA:
733 return true;
734 default:
735 break;
736 }
737
738 return false;
739 }
740
741 /* Return TRUE if FNDECL is either a TM builtin or a TM cloned
742 function. Return FALSE otherwise. */
743
744 static bool
is_tm_builtin(const_tree fndecl)745 is_tm_builtin (const_tree fndecl)
746 {
747 if (fndecl == NULL)
748 return false;
749
750 if (decl_is_tm_clone (fndecl))
751 return true;
752
753 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
754 {
755 switch (DECL_FUNCTION_CODE (fndecl))
756 {
757 case BUILT_IN_TM_COMMIT:
758 case BUILT_IN_TM_COMMIT_EH:
759 case BUILT_IN_TM_ABORT:
760 case BUILT_IN_TM_IRREVOCABLE:
761 case BUILT_IN_TM_GETTMCLONE_IRR:
762 case BUILT_IN_TM_MEMCPY:
763 case BUILT_IN_TM_MEMMOVE:
764 case BUILT_IN_TM_MEMSET:
765 CASE_BUILT_IN_TM_STORE (1):
766 CASE_BUILT_IN_TM_STORE (2):
767 CASE_BUILT_IN_TM_STORE (4):
768 CASE_BUILT_IN_TM_STORE (8):
769 CASE_BUILT_IN_TM_STORE (FLOAT):
770 CASE_BUILT_IN_TM_STORE (DOUBLE):
771 CASE_BUILT_IN_TM_STORE (LDOUBLE):
772 CASE_BUILT_IN_TM_STORE (M64):
773 CASE_BUILT_IN_TM_STORE (M128):
774 CASE_BUILT_IN_TM_STORE (M256):
775 CASE_BUILT_IN_TM_LOAD (1):
776 CASE_BUILT_IN_TM_LOAD (2):
777 CASE_BUILT_IN_TM_LOAD (4):
778 CASE_BUILT_IN_TM_LOAD (8):
779 CASE_BUILT_IN_TM_LOAD (FLOAT):
780 CASE_BUILT_IN_TM_LOAD (DOUBLE):
781 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
782 CASE_BUILT_IN_TM_LOAD (M64):
783 CASE_BUILT_IN_TM_LOAD (M128):
784 CASE_BUILT_IN_TM_LOAD (M256):
785 case BUILT_IN_TM_LOG:
786 case BUILT_IN_TM_LOG_1:
787 case BUILT_IN_TM_LOG_2:
788 case BUILT_IN_TM_LOG_4:
789 case BUILT_IN_TM_LOG_8:
790 case BUILT_IN_TM_LOG_FLOAT:
791 case BUILT_IN_TM_LOG_DOUBLE:
792 case BUILT_IN_TM_LOG_LDOUBLE:
793 case BUILT_IN_TM_LOG_M64:
794 case BUILT_IN_TM_LOG_M128:
795 case BUILT_IN_TM_LOG_M256:
796 return true;
797 default:
798 break;
799 }
800 }
801 return false;
802 }
803
804 /* Detect flags (function attributes) from the function decl or type node. */
805
806 int
flags_from_decl_or_type(const_tree exp)807 flags_from_decl_or_type (const_tree exp)
808 {
809 int flags = 0;
810
811 if (DECL_P (exp))
812 {
813 /* The function exp may have the `malloc' attribute. */
814 if (DECL_IS_MALLOC (exp))
815 flags |= ECF_MALLOC;
816
817 /* The function exp may have the `returns_twice' attribute. */
818 if (DECL_IS_RETURNS_TWICE (exp))
819 flags |= ECF_RETURNS_TWICE;
820
821 /* Process the pure and const attributes. */
822 if (TREE_READONLY (exp))
823 flags |= ECF_CONST;
824 if (DECL_PURE_P (exp))
825 flags |= ECF_PURE;
826 if (DECL_LOOPING_CONST_OR_PURE_P (exp))
827 flags |= ECF_LOOPING_CONST_OR_PURE;
828
829 if (DECL_IS_NOVOPS (exp))
830 flags |= ECF_NOVOPS;
831 if (lookup_attribute ("leaf", DECL_ATTRIBUTES (exp)))
832 flags |= ECF_LEAF;
833 if (lookup_attribute ("cold", DECL_ATTRIBUTES (exp)))
834 flags |= ECF_COLD;
835
836 if (TREE_NOTHROW (exp))
837 flags |= ECF_NOTHROW;
838
839 if (flag_tm)
840 {
841 if (is_tm_builtin (exp))
842 flags |= ECF_TM_BUILTIN;
843 else if ((flags & (ECF_CONST|ECF_NOVOPS)) != 0
844 || lookup_attribute ("transaction_pure",
845 TYPE_ATTRIBUTES (TREE_TYPE (exp))))
846 flags |= ECF_TM_PURE;
847 }
848
849 flags = special_function_p (exp, flags);
850 }
851 else if (TYPE_P (exp))
852 {
853 if (TYPE_READONLY (exp))
854 flags |= ECF_CONST;
855
856 if (flag_tm
857 && ((flags & ECF_CONST) != 0
858 || lookup_attribute ("transaction_pure", TYPE_ATTRIBUTES (exp))))
859 flags |= ECF_TM_PURE;
860 }
861 else
862 gcc_unreachable ();
863
864 if (TREE_THIS_VOLATILE (exp))
865 {
866 flags |= ECF_NORETURN;
867 if (flags & (ECF_CONST|ECF_PURE))
868 flags |= ECF_LOOPING_CONST_OR_PURE;
869 }
870
871 return flags;
872 }
873
874 /* Detect flags from a CALL_EXPR. */
875
876 int
call_expr_flags(const_tree t)877 call_expr_flags (const_tree t)
878 {
879 int flags;
880 tree decl = get_callee_fndecl (t);
881
882 if (decl)
883 flags = flags_from_decl_or_type (decl);
884 else if (CALL_EXPR_FN (t) == NULL_TREE)
885 flags = internal_fn_flags (CALL_EXPR_IFN (t));
886 else
887 {
888 tree type = TREE_TYPE (CALL_EXPR_FN (t));
889 if (type && TREE_CODE (type) == POINTER_TYPE)
890 flags = flags_from_decl_or_type (TREE_TYPE (type));
891 else
892 flags = 0;
893 if (CALL_EXPR_BY_DESCRIPTOR (t))
894 flags |= ECF_BY_DESCRIPTOR;
895 }
896
897 return flags;
898 }
899
900 /* Return true if TYPE should be passed by invisible reference. */
901
902 bool
pass_by_reference(CUMULATIVE_ARGS * ca,machine_mode mode,tree type,bool named_arg)903 pass_by_reference (CUMULATIVE_ARGS *ca, machine_mode mode,
904 tree type, bool named_arg)
905 {
906 if (type)
907 {
908 /* If this type contains non-trivial constructors, then it is
909 forbidden for the middle-end to create any new copies. */
910 if (TREE_ADDRESSABLE (type))
911 return true;
912
913 /* GCC post 3.4 passes *all* variable sized types by reference. */
914 if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
915 return true;
916
917 /* If a record type should be passed the same as its first (and only)
918 member, use the type and mode of that member. */
919 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
920 {
921 type = TREE_TYPE (first_field (type));
922 mode = TYPE_MODE (type);
923 }
924 }
925
926 return targetm.calls.pass_by_reference (pack_cumulative_args (ca), mode,
927 type, named_arg);
928 }
929
930 /* Return true if TYPE, which is passed by reference, should be callee
931 copied instead of caller copied. */
932
933 bool
reference_callee_copied(CUMULATIVE_ARGS * ca,machine_mode mode,tree type,bool named_arg)934 reference_callee_copied (CUMULATIVE_ARGS *ca, machine_mode mode,
935 tree type, bool named_arg)
936 {
937 if (type && TREE_ADDRESSABLE (type))
938 return false;
939 return targetm.calls.callee_copies (pack_cumulative_args (ca), mode, type,
940 named_arg);
941 }
942
943
944 /* Precompute all register parameters as described by ARGS, storing values
945 into fields within the ARGS array.
946
947 NUM_ACTUALS indicates the total number elements in the ARGS array.
948
949 Set REG_PARM_SEEN if we encounter a register parameter. */
950
951 static void
precompute_register_parameters(int num_actuals,struct arg_data * args,int * reg_parm_seen)952 precompute_register_parameters (int num_actuals, struct arg_data *args,
953 int *reg_parm_seen)
954 {
955 int i;
956
957 *reg_parm_seen = 0;
958
959 for (i = 0; i < num_actuals; i++)
960 if (args[i].reg != 0 && ! args[i].pass_on_stack)
961 {
962 *reg_parm_seen = 1;
963
964 if (args[i].value == 0)
965 {
966 push_temp_slots ();
967 args[i].value = expand_normal (args[i].tree_value);
968 preserve_temp_slots (args[i].value);
969 pop_temp_slots ();
970 }
971
972 /* If we are to promote the function arg to a wider mode,
973 do it now. */
974
975 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
976 args[i].value
977 = convert_modes (args[i].mode,
978 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
979 args[i].value, args[i].unsignedp);
980
981 /* If the value is a non-legitimate constant, force it into a
982 pseudo now. TLS symbols sometimes need a call to resolve. */
983 if (CONSTANT_P (args[i].value)
984 && !targetm.legitimate_constant_p (args[i].mode, args[i].value))
985 args[i].value = force_reg (args[i].mode, args[i].value);
986
987 /* If we're going to have to load the value by parts, pull the
988 parts into pseudos. The part extraction process can involve
989 non-trivial computation. */
990 if (GET_CODE (args[i].reg) == PARALLEL)
991 {
992 tree type = TREE_TYPE (args[i].tree_value);
993 args[i].parallel_value
994 = emit_group_load_into_temps (args[i].reg, args[i].value,
995 type, int_size_in_bytes (type));
996 }
997
998 /* If the value is expensive, and we are inside an appropriately
999 short loop, put the value into a pseudo and then put the pseudo
1000 into the hard reg.
1001
1002 For small register classes, also do this if this call uses
1003 register parameters. This is to avoid reload conflicts while
1004 loading the parameters registers. */
1005
1006 else if ((! (REG_P (args[i].value)
1007 || (GET_CODE (args[i].value) == SUBREG
1008 && REG_P (SUBREG_REG (args[i].value)))))
1009 && args[i].mode != BLKmode
1010 && (set_src_cost (args[i].value, args[i].mode,
1011 optimize_insn_for_speed_p ())
1012 > COSTS_N_INSNS (1))
1013 && ((*reg_parm_seen
1014 && targetm.small_register_classes_for_mode_p (args[i].mode))
1015 || optimize))
1016 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
1017 }
1018 }
1019
1020 #ifdef REG_PARM_STACK_SPACE
1021
1022 /* The argument list is the property of the called routine and it
1023 may clobber it. If the fixed area has been used for previous
1024 parameters, we must save and restore it. */
1025
1026 static rtx
save_fixed_argument_area(int reg_parm_stack_space,rtx argblock,int * low_to_save,int * high_to_save)1027 save_fixed_argument_area (int reg_parm_stack_space, rtx argblock, int *low_to_save, int *high_to_save)
1028 {
1029 unsigned int low;
1030 unsigned int high;
1031
1032 /* Compute the boundary of the area that needs to be saved, if any. */
1033 high = reg_parm_stack_space;
1034 if (ARGS_GROW_DOWNWARD)
1035 high += 1;
1036
1037 if (high > highest_outgoing_arg_in_use)
1038 high = highest_outgoing_arg_in_use;
1039
1040 for (low = 0; low < high; low++)
1041 if (stack_usage_map[low] != 0 || low >= stack_usage_watermark)
1042 {
1043 int num_to_save;
1044 machine_mode save_mode;
1045 int delta;
1046 rtx addr;
1047 rtx stack_area;
1048 rtx save_area;
1049
1050 while (stack_usage_map[--high] == 0)
1051 ;
1052
1053 *low_to_save = low;
1054 *high_to_save = high;
1055
1056 num_to_save = high - low + 1;
1057
1058 /* If we don't have the required alignment, must do this
1059 in BLKmode. */
1060 scalar_int_mode imode;
1061 if (int_mode_for_size (num_to_save * BITS_PER_UNIT, 1).exists (&imode)
1062 && (low & (MIN (GET_MODE_SIZE (imode),
1063 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)) == 0)
1064 save_mode = imode;
1065 else
1066 save_mode = BLKmode;
1067
1068 if (ARGS_GROW_DOWNWARD)
1069 delta = -high;
1070 else
1071 delta = low;
1072
1073 addr = plus_constant (Pmode, argblock, delta);
1074 stack_area = gen_rtx_MEM (save_mode, memory_address (save_mode, addr));
1075
1076 set_mem_align (stack_area, PARM_BOUNDARY);
1077 if (save_mode == BLKmode)
1078 {
1079 save_area = assign_stack_temp (BLKmode, num_to_save);
1080 emit_block_move (validize_mem (save_area), stack_area,
1081 GEN_INT (num_to_save), BLOCK_OP_CALL_PARM);
1082 }
1083 else
1084 {
1085 save_area = gen_reg_rtx (save_mode);
1086 emit_move_insn (save_area, stack_area);
1087 }
1088
1089 return save_area;
1090 }
1091
1092 return NULL_RTX;
1093 }
1094
1095 static void
restore_fixed_argument_area(rtx save_area,rtx argblock,int high_to_save,int low_to_save)1096 restore_fixed_argument_area (rtx save_area, rtx argblock, int high_to_save, int low_to_save)
1097 {
1098 machine_mode save_mode = GET_MODE (save_area);
1099 int delta;
1100 rtx addr, stack_area;
1101
1102 if (ARGS_GROW_DOWNWARD)
1103 delta = -high_to_save;
1104 else
1105 delta = low_to_save;
1106
1107 addr = plus_constant (Pmode, argblock, delta);
1108 stack_area = gen_rtx_MEM (save_mode, memory_address (save_mode, addr));
1109 set_mem_align (stack_area, PARM_BOUNDARY);
1110
1111 if (save_mode != BLKmode)
1112 emit_move_insn (stack_area, save_area);
1113 else
1114 emit_block_move (stack_area, validize_mem (save_area),
1115 GEN_INT (high_to_save - low_to_save + 1),
1116 BLOCK_OP_CALL_PARM);
1117 }
1118 #endif /* REG_PARM_STACK_SPACE */
1119
1120 /* If any elements in ARGS refer to parameters that are to be passed in
1121 registers, but not in memory, and whose alignment does not permit a
1122 direct copy into registers. Copy the values into a group of pseudos
1123 which we will later copy into the appropriate hard registers.
1124
1125 Pseudos for each unaligned argument will be stored into the array
1126 args[argnum].aligned_regs. The caller is responsible for deallocating
1127 the aligned_regs array if it is nonzero. */
1128
1129 static void
store_unaligned_arguments_into_pseudos(struct arg_data * args,int num_actuals)1130 store_unaligned_arguments_into_pseudos (struct arg_data *args, int num_actuals)
1131 {
1132 int i, j;
1133
1134 for (i = 0; i < num_actuals; i++)
1135 if (args[i].reg != 0 && ! args[i].pass_on_stack
1136 && GET_CODE (args[i].reg) != PARALLEL
1137 && args[i].mode == BLKmode
1138 && MEM_P (args[i].value)
1139 && (MEM_ALIGN (args[i].value)
1140 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
1141 {
1142 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1143 int endian_correction = 0;
1144
1145 if (args[i].partial)
1146 {
1147 gcc_assert (args[i].partial % UNITS_PER_WORD == 0);
1148 args[i].n_aligned_regs = args[i].partial / UNITS_PER_WORD;
1149 }
1150 else
1151 {
1152 args[i].n_aligned_regs
1153 = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1154 }
1155
1156 args[i].aligned_regs = XNEWVEC (rtx, args[i].n_aligned_regs);
1157
1158 /* Structures smaller than a word are normally aligned to the
1159 least significant byte. On a BYTES_BIG_ENDIAN machine,
1160 this means we must skip the empty high order bytes when
1161 calculating the bit offset. */
1162 if (bytes < UNITS_PER_WORD
1163 #ifdef BLOCK_REG_PADDING
1164 && (BLOCK_REG_PADDING (args[i].mode,
1165 TREE_TYPE (args[i].tree_value), 1)
1166 == PAD_DOWNWARD)
1167 #else
1168 && BYTES_BIG_ENDIAN
1169 #endif
1170 )
1171 endian_correction = BITS_PER_WORD - bytes * BITS_PER_UNIT;
1172
1173 for (j = 0; j < args[i].n_aligned_regs; j++)
1174 {
1175 rtx reg = gen_reg_rtx (word_mode);
1176 rtx word = operand_subword_force (args[i].value, j, BLKmode);
1177 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
1178
1179 args[i].aligned_regs[j] = reg;
1180 word = extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
1181 word_mode, word_mode, false, NULL);
1182
1183 /* There is no need to restrict this code to loading items
1184 in TYPE_ALIGN sized hunks. The bitfield instructions can
1185 load up entire word sized registers efficiently.
1186
1187 ??? This may not be needed anymore.
1188 We use to emit a clobber here but that doesn't let later
1189 passes optimize the instructions we emit. By storing 0 into
1190 the register later passes know the first AND to zero out the
1191 bitfield being set in the register is unnecessary. The store
1192 of 0 will be deleted as will at least the first AND. */
1193
1194 emit_move_insn (reg, const0_rtx);
1195
1196 bytes -= bitsize / BITS_PER_UNIT;
1197 store_bit_field (reg, bitsize, endian_correction, 0, 0,
1198 word_mode, word, false);
1199 }
1200 }
1201 }
1202
1203 /* The limit set by -Walloc-larger-than=. */
1204 static GTY(()) tree alloc_object_size_limit;
1205
1206 /* Initialize ALLOC_OBJECT_SIZE_LIMIT based on the -Walloc-size-larger-than=
1207 setting if the option is specified, or to the maximum object size if it
1208 is not. Return the initialized value. */
1209
1210 static tree
alloc_max_size(void)1211 alloc_max_size (void)
1212 {
1213 if (alloc_object_size_limit)
1214 return alloc_object_size_limit;
1215
1216 HOST_WIDE_INT limit = warn_alloc_size_limit;
1217 if (limit == HOST_WIDE_INT_MAX)
1218 limit = tree_to_shwi (TYPE_MAX_VALUE (ptrdiff_type_node));
1219
1220 alloc_object_size_limit = build_int_cst (size_type_node, limit);
1221
1222 return alloc_object_size_limit;
1223 }
1224
1225 /* Return true when EXP's range can be determined and set RANGE[] to it
1226 after adjusting it if necessary to make EXP a represents a valid size
1227 of object, or a valid size argument to an allocation function declared
1228 with attribute alloc_size (whose argument may be signed), or to a string
1229 manipulation function like memset. When ALLOW_ZERO is true, allow
1230 returning a range of [0, 0] for a size in an anti-range [1, N] where
1231 N > PTRDIFF_MAX. A zero range is a (nearly) invalid argument to
1232 allocation functions like malloc but it is a valid argument to
1233 functions like memset. */
1234
1235 bool
get_size_range(tree exp,tree range[2],bool allow_zero)1236 get_size_range (tree exp, tree range[2], bool allow_zero /* = false */)
1237 {
1238 if (tree_fits_uhwi_p (exp))
1239 {
1240 /* EXP is a constant. */
1241 range[0] = range[1] = exp;
1242 return true;
1243 }
1244
1245 tree exptype = TREE_TYPE (exp);
1246 bool integral = INTEGRAL_TYPE_P (exptype);
1247
1248 wide_int min, max;
1249 enum value_range_kind range_type;
1250
1251 if (integral)
1252 range_type = determine_value_range (exp, &min, &max);
1253 else
1254 range_type = VR_VARYING;
1255
1256 if (range_type == VR_VARYING)
1257 {
1258 if (integral)
1259 {
1260 /* Use the full range of the type of the expression when
1261 no value range information is available. */
1262 range[0] = TYPE_MIN_VALUE (exptype);
1263 range[1] = TYPE_MAX_VALUE (exptype);
1264 return true;
1265 }
1266
1267 range[0] = NULL_TREE;
1268 range[1] = NULL_TREE;
1269 return false;
1270 }
1271
1272 unsigned expprec = TYPE_PRECISION (exptype);
1273
1274 bool signed_p = !TYPE_UNSIGNED (exptype);
1275
1276 if (range_type == VR_ANTI_RANGE)
1277 {
1278 if (signed_p)
1279 {
1280 if (wi::les_p (max, 0))
1281 {
1282 /* EXP is not in a strictly negative range. That means
1283 it must be in some (not necessarily strictly) positive
1284 range which includes zero. Since in signed to unsigned
1285 conversions negative values end up converted to large
1286 positive values, and otherwise they are not valid sizes,
1287 the resulting range is in both cases [0, TYPE_MAX]. */
1288 min = wi::zero (expprec);
1289 max = wi::to_wide (TYPE_MAX_VALUE (exptype));
1290 }
1291 else if (wi::les_p (min - 1, 0))
1292 {
1293 /* EXP is not in a negative-positive range. That means EXP
1294 is either negative, or greater than max. Since negative
1295 sizes are invalid make the range [MAX + 1, TYPE_MAX]. */
1296 min = max + 1;
1297 max = wi::to_wide (TYPE_MAX_VALUE (exptype));
1298 }
1299 else
1300 {
1301 max = min - 1;
1302 min = wi::zero (expprec);
1303 }
1304 }
1305 else if (wi::eq_p (0, min - 1))
1306 {
1307 /* EXP is unsigned and not in the range [1, MAX]. That means
1308 it's either zero or greater than MAX. Even though 0 would
1309 normally be detected by -Walloc-zero, unless ALLOW_ZERO
1310 is true, set the range to [MAX, TYPE_MAX] so that when MAX
1311 is greater than the limit the whole range is diagnosed. */
1312 if (allow_zero)
1313 min = max = wi::zero (expprec);
1314 else
1315 {
1316 min = max + 1;
1317 max = wi::to_wide (TYPE_MAX_VALUE (exptype));
1318 }
1319 }
1320 else
1321 {
1322 max = min - 1;
1323 min = wi::zero (expprec);
1324 }
1325 }
1326
1327 range[0] = wide_int_to_tree (exptype, min);
1328 range[1] = wide_int_to_tree (exptype, max);
1329
1330 return true;
1331 }
1332
1333 /* Diagnose a call EXP to function FN decorated with attribute alloc_size
1334 whose argument numbers given by IDX with values given by ARGS exceed
1335 the maximum object size or cause an unsigned oveflow (wrapping) when
1336 multiplied. FN is null when EXP is a call via a function pointer.
1337 When ARGS[0] is null the function does nothing. ARGS[1] may be null
1338 for functions like malloc, and non-null for those like calloc that
1339 are decorated with a two-argument attribute alloc_size. */
1340
1341 void
maybe_warn_alloc_args_overflow(tree fn,tree exp,tree args[2],int idx[2])1342 maybe_warn_alloc_args_overflow (tree fn, tree exp, tree args[2], int idx[2])
1343 {
1344 /* The range each of the (up to) two arguments is known to be in. */
1345 tree argrange[2][2] = { { NULL_TREE, NULL_TREE }, { NULL_TREE, NULL_TREE } };
1346
1347 /* Maximum object size set by -Walloc-size-larger-than= or SIZE_MAX / 2. */
1348 tree maxobjsize = alloc_max_size ();
1349
1350 location_t loc = EXPR_LOCATION (exp);
1351
1352 tree fntype = fn ? TREE_TYPE (fn) : TREE_TYPE (TREE_TYPE (exp));
1353 built_in_function fncode = fn ? DECL_FUNCTION_CODE (fn) : BUILT_IN_NONE;
1354 bool warned = false;
1355
1356 /* Validate each argument individually. */
1357 for (unsigned i = 0; i != 2 && args[i]; ++i)
1358 {
1359 if (TREE_CODE (args[i]) == INTEGER_CST)
1360 {
1361 argrange[i][0] = args[i];
1362 argrange[i][1] = args[i];
1363
1364 if (tree_int_cst_lt (args[i], integer_zero_node))
1365 {
1366 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
1367 "%Kargument %i value %qE is negative",
1368 exp, idx[i] + 1, args[i]);
1369 }
1370 else if (integer_zerop (args[i]))
1371 {
1372 /* Avoid issuing -Walloc-zero for allocation functions other
1373 than __builtin_alloca that are declared with attribute
1374 returns_nonnull because there's no portability risk. This
1375 avoids warning for such calls to libiberty's xmalloc and
1376 friends.
1377 Also avoid issuing the warning for calls to function named
1378 "alloca". */
1379 if ((fncode == BUILT_IN_ALLOCA
1380 && IDENTIFIER_LENGTH (DECL_NAME (fn)) != 6)
1381 || (fncode != BUILT_IN_ALLOCA
1382 && !lookup_attribute ("returns_nonnull",
1383 TYPE_ATTRIBUTES (fntype))))
1384 warned = warning_at (loc, OPT_Walloc_zero,
1385 "%Kargument %i value is zero",
1386 exp, idx[i] + 1);
1387 }
1388 else if (tree_int_cst_lt (maxobjsize, args[i]))
1389 {
1390 /* G++ emits calls to ::operator new[](SIZE_MAX) in C++98
1391 mode and with -fno-exceptions as a way to indicate array
1392 size overflow. There's no good way to detect C++98 here
1393 so avoid diagnosing these calls for all C++ modes. */
1394 if (i == 0
1395 && fn
1396 && !args[1]
1397 && lang_GNU_CXX ()
1398 && DECL_IS_OPERATOR_NEW (fn)
1399 && integer_all_onesp (args[i]))
1400 continue;
1401
1402 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
1403 "%Kargument %i value %qE exceeds "
1404 "maximum object size %E",
1405 exp, idx[i] + 1, args[i], maxobjsize);
1406 }
1407 }
1408 else if (TREE_CODE (args[i]) == SSA_NAME
1409 && get_size_range (args[i], argrange[i]))
1410 {
1411 /* Verify that the argument's range is not negative (including
1412 upper bound of zero). */
1413 if (tree_int_cst_lt (argrange[i][0], integer_zero_node)
1414 && tree_int_cst_le (argrange[i][1], integer_zero_node))
1415 {
1416 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
1417 "%Kargument %i range [%E, %E] is negative",
1418 exp, idx[i] + 1,
1419 argrange[i][0], argrange[i][1]);
1420 }
1421 else if (tree_int_cst_lt (maxobjsize, argrange[i][0]))
1422 {
1423 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
1424 "%Kargument %i range [%E, %E] exceeds "
1425 "maximum object size %E",
1426 exp, idx[i] + 1,
1427 argrange[i][0], argrange[i][1],
1428 maxobjsize);
1429 }
1430 }
1431 }
1432
1433 if (!argrange[0])
1434 return;
1435
1436 /* For a two-argument alloc_size, validate the product of the two
1437 arguments if both of their values or ranges are known. */
1438 if (!warned && tree_fits_uhwi_p (argrange[0][0])
1439 && argrange[1][0] && tree_fits_uhwi_p (argrange[1][0])
1440 && !integer_onep (argrange[0][0])
1441 && !integer_onep (argrange[1][0]))
1442 {
1443 /* Check for overflow in the product of a function decorated with
1444 attribute alloc_size (X, Y). */
1445 unsigned szprec = TYPE_PRECISION (size_type_node);
1446 wide_int x = wi::to_wide (argrange[0][0], szprec);
1447 wide_int y = wi::to_wide (argrange[1][0], szprec);
1448
1449 wi::overflow_type vflow;
1450 wide_int prod = wi::umul (x, y, &vflow);
1451
1452 if (vflow)
1453 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
1454 "%Kproduct %<%E * %E%> of arguments %i and %i "
1455 "exceeds %<SIZE_MAX%>",
1456 exp, argrange[0][0], argrange[1][0],
1457 idx[0] + 1, idx[1] + 1);
1458 else if (wi::ltu_p (wi::to_wide (maxobjsize, szprec), prod))
1459 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
1460 "%Kproduct %<%E * %E%> of arguments %i and %i "
1461 "exceeds maximum object size %E",
1462 exp, argrange[0][0], argrange[1][0],
1463 idx[0] + 1, idx[1] + 1,
1464 maxobjsize);
1465
1466 if (warned)
1467 {
1468 /* Print the full range of each of the two arguments to make
1469 it clear when it is, in fact, in a range and not constant. */
1470 if (argrange[0][0] != argrange [0][1])
1471 inform (loc, "argument %i in the range [%E, %E]",
1472 idx[0] + 1, argrange[0][0], argrange[0][1]);
1473 if (argrange[1][0] != argrange [1][1])
1474 inform (loc, "argument %i in the range [%E, %E]",
1475 idx[1] + 1, argrange[1][0], argrange[1][1]);
1476 }
1477 }
1478
1479 if (warned && fn)
1480 {
1481 location_t fnloc = DECL_SOURCE_LOCATION (fn);
1482
1483 if (DECL_IS_BUILTIN (fn))
1484 inform (loc,
1485 "in a call to built-in allocation function %qD", fn);
1486 else
1487 inform (fnloc,
1488 "in a call to allocation function %qD declared here", fn);
1489 }
1490 }
1491
1492 /* If EXPR refers to a character array or pointer declared attribute
1493 nonstring return a decl for that array or pointer and set *REF to
1494 the referenced enclosing object or pointer. Otherwise returns
1495 null. */
1496
1497 tree
get_attr_nonstring_decl(tree expr,tree * ref)1498 get_attr_nonstring_decl (tree expr, tree *ref)
1499 {
1500 tree decl = expr;
1501 tree var = NULL_TREE;
1502 if (TREE_CODE (decl) == SSA_NAME)
1503 {
1504 gimple *def = SSA_NAME_DEF_STMT (decl);
1505
1506 if (is_gimple_assign (def))
1507 {
1508 tree_code code = gimple_assign_rhs_code (def);
1509 if (code == ADDR_EXPR
1510 || code == COMPONENT_REF
1511 || code == VAR_DECL)
1512 decl = gimple_assign_rhs1 (def);
1513 }
1514 else
1515 var = SSA_NAME_VAR (decl);
1516 }
1517
1518 if (TREE_CODE (decl) == ADDR_EXPR)
1519 decl = TREE_OPERAND (decl, 0);
1520
1521 /* To simplify calling code, store the referenced DECL regardless of
1522 the attribute determined below, but avoid storing the SSA_NAME_VAR
1523 obtained above (it's not useful for dataflow purposes). */
1524 if (ref)
1525 *ref = decl;
1526
1527 /* Use the SSA_NAME_VAR that was determined above to see if it's
1528 declared nonstring. Otherwise drill down into the referenced
1529 DECL. */
1530 if (var)
1531 decl = var;
1532 else if (TREE_CODE (decl) == ARRAY_REF)
1533 decl = TREE_OPERAND (decl, 0);
1534 else if (TREE_CODE (decl) == COMPONENT_REF)
1535 decl = TREE_OPERAND (decl, 1);
1536 else if (TREE_CODE (decl) == MEM_REF)
1537 return get_attr_nonstring_decl (TREE_OPERAND (decl, 0), ref);
1538
1539 if (DECL_P (decl)
1540 && lookup_attribute ("nonstring", DECL_ATTRIBUTES (decl)))
1541 return decl;
1542
1543 return NULL_TREE;
1544 }
1545
1546 /* Warn about passing a non-string array/pointer to a function that
1547 expects a nul-terminated string argument. */
1548
1549 void
maybe_warn_nonstring_arg(tree fndecl,tree exp)1550 maybe_warn_nonstring_arg (tree fndecl, tree exp)
1551 {
1552 if (!fndecl || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
1553 return;
1554
1555 if (TREE_NO_WARNING (exp) || !warn_stringop_overflow)
1556 return;
1557
1558 /* Avoid clearly invalid calls (more checking done below). */
1559 unsigned nargs = call_expr_nargs (exp);
1560 if (!nargs)
1561 return;
1562
1563 /* The bound argument to a bounded string function like strncpy. */
1564 tree bound = NULL_TREE;
1565
1566 /* The longest known or possible string argument to one of the comparison
1567 functions. If the length is less than the bound it is used instead.
1568 Since the length is only used for warning and not for code generation
1569 disable strict mode in the calls to get_range_strlen below. */
1570 tree maxlen = NULL_TREE;
1571
1572 /* It's safe to call "bounded" string functions with a non-string
1573 argument since the functions provide an explicit bound for this
1574 purpose. The exception is strncat where the bound may refer to
1575 either the destination or the source. */
1576 int fncode = DECL_FUNCTION_CODE (fndecl);
1577 switch (fncode)
1578 {
1579 case BUILT_IN_STRCMP:
1580 case BUILT_IN_STRNCMP:
1581 case BUILT_IN_STRNCASECMP:
1582 {
1583 /* For these, if one argument refers to one or more of a set
1584 of string constants or arrays of known size, determine
1585 the range of their known or possible lengths and use it
1586 conservatively as the bound for the unbounded function,
1587 and to adjust the range of the bound of the bounded ones. */
1588 for (unsigned argno = 0;
1589 argno < MIN (nargs, 2)
1590 && !(maxlen && TREE_CODE (maxlen) == INTEGER_CST); argno++)
1591 {
1592 tree arg = CALL_EXPR_ARG (exp, argno);
1593 if (!get_attr_nonstring_decl (arg))
1594 {
1595 c_strlen_data lendata = { };
1596 get_range_strlen (arg, &lendata, /* eltsize = */ 1);
1597 maxlen = lendata.maxbound;
1598 }
1599 }
1600 }
1601 /* Fall through. */
1602
1603 case BUILT_IN_STRNCAT:
1604 case BUILT_IN_STPNCPY:
1605 case BUILT_IN_STRNCPY:
1606 if (nargs > 2)
1607 bound = CALL_EXPR_ARG (exp, 2);
1608 break;
1609
1610 case BUILT_IN_STRNDUP:
1611 if (nargs > 1)
1612 bound = CALL_EXPR_ARG (exp, 1);
1613 break;
1614
1615 case BUILT_IN_STRNLEN:
1616 {
1617 tree arg = CALL_EXPR_ARG (exp, 0);
1618 if (!get_attr_nonstring_decl (arg))
1619 {
1620 c_strlen_data lendata = { };
1621 get_range_strlen (arg, &lendata, /* eltsize = */ 1);
1622 maxlen = lendata.maxbound;
1623 }
1624 if (nargs > 1)
1625 bound = CALL_EXPR_ARG (exp, 1);
1626 break;
1627 }
1628
1629 default:
1630 break;
1631 }
1632
1633 /* Determine the range of the bound argument (if specified). */
1634 tree bndrng[2] = { NULL_TREE, NULL_TREE };
1635 if (bound)
1636 {
1637 STRIP_NOPS (bound);
1638 get_size_range (bound, bndrng);
1639 }
1640
1641 location_t loc = EXPR_LOCATION (exp);
1642
1643 if (bndrng[0])
1644 {
1645 /* Diagnose excessive bound prior the adjustment below and
1646 regardless of attribute nonstring. */
1647 tree maxobjsize = max_object_size ();
1648 if (tree_int_cst_lt (maxobjsize, bndrng[0]))
1649 {
1650 if (tree_int_cst_equal (bndrng[0], bndrng[1]))
1651 warning_at (loc, OPT_Wstringop_overflow_,
1652 "%K%qD specified bound %E "
1653 "exceeds maximum object size %E",
1654 exp, fndecl, bndrng[0], maxobjsize);
1655 else
1656 warning_at (loc, OPT_Wstringop_overflow_,
1657 "%K%qD specified bound [%E, %E] "
1658 "exceeds maximum object size %E",
1659 exp, fndecl, bndrng[0], bndrng[1], maxobjsize);
1660 return;
1661 }
1662 }
1663
1664 if (maxlen && !integer_all_onesp (maxlen))
1665 {
1666 /* Add one for the nul. */
1667 maxlen = const_binop (PLUS_EXPR, TREE_TYPE (maxlen), maxlen,
1668 size_one_node);
1669
1670 if (!bndrng[0])
1671 {
1672 /* Conservatively use the upper bound of the lengths for
1673 both the lower and the upper bound of the operation. */
1674 bndrng[0] = maxlen;
1675 bndrng[1] = maxlen;
1676 bound = void_type_node;
1677 }
1678 else if (maxlen)
1679 {
1680 /* Replace the bound on the operation with the upper bound
1681 of the length of the string if the latter is smaller. */
1682 if (tree_int_cst_lt (maxlen, bndrng[0]))
1683 bndrng[0] = maxlen;
1684 else if (tree_int_cst_lt (maxlen, bndrng[1]))
1685 bndrng[1] = maxlen;
1686 }
1687 }
1688
1689 /* Iterate over the built-in function's formal arguments and check
1690 each const char* against the actual argument. If the actual
1691 argument is declared attribute non-string issue a warning unless
1692 the argument's maximum length is bounded. */
1693 function_args_iterator it;
1694 function_args_iter_init (&it, TREE_TYPE (fndecl));
1695
1696 for (unsigned argno = 0; ; ++argno, function_args_iter_next (&it))
1697 {
1698 /* Avoid iterating past the declared argument in a call
1699 to function declared without a prototype. */
1700 if (argno >= nargs)
1701 break;
1702
1703 tree argtype = function_args_iter_cond (&it);
1704 if (!argtype)
1705 break;
1706
1707 if (TREE_CODE (argtype) != POINTER_TYPE)
1708 continue;
1709
1710 argtype = TREE_TYPE (argtype);
1711
1712 if (TREE_CODE (argtype) != INTEGER_TYPE
1713 || !TYPE_READONLY (argtype))
1714 continue;
1715
1716 argtype = TYPE_MAIN_VARIANT (argtype);
1717 if (argtype != char_type_node)
1718 continue;
1719
1720 tree callarg = CALL_EXPR_ARG (exp, argno);
1721 if (TREE_CODE (callarg) == ADDR_EXPR)
1722 callarg = TREE_OPERAND (callarg, 0);
1723
1724 /* See if the destination is declared with attribute "nonstring". */
1725 tree decl = get_attr_nonstring_decl (callarg);
1726 if (!decl)
1727 continue;
1728
1729 /* The maximum number of array elements accessed. */
1730 offset_int wibnd = 0;
1731
1732 if (argno && fncode == BUILT_IN_STRNCAT)
1733 {
1734 /* See if the bound in strncat is derived from the length
1735 of the strlen of the destination (as it's expected to be).
1736 If so, reset BOUND and FNCODE to trigger a warning. */
1737 tree dstarg = CALL_EXPR_ARG (exp, 0);
1738 if (is_strlen_related_p (dstarg, bound))
1739 {
1740 /* The bound applies to the destination, not to the source,
1741 so reset these to trigger a warning without mentioning
1742 the bound. */
1743 bound = NULL;
1744 fncode = 0;
1745 }
1746 else if (bndrng[1])
1747 /* Use the upper bound of the range for strncat. */
1748 wibnd = wi::to_offset (bndrng[1]);
1749 }
1750 else if (bndrng[0])
1751 /* Use the lower bound of the range for functions other than
1752 strncat. */
1753 wibnd = wi::to_offset (bndrng[0]);
1754
1755 /* Determine the size of the argument array if it is one. */
1756 offset_int asize = wibnd;
1757 bool known_size = false;
1758 tree type = TREE_TYPE (decl);
1759
1760 /* Determine the array size. For arrays of unknown bound and
1761 pointers reset BOUND to trigger the appropriate warning. */
1762 if (TREE_CODE (type) == ARRAY_TYPE)
1763 {
1764 if (tree arrbnd = TYPE_DOMAIN (type))
1765 {
1766 if ((arrbnd = TYPE_MAX_VALUE (arrbnd)))
1767 {
1768 asize = wi::to_offset (arrbnd) + 1;
1769 known_size = true;
1770 }
1771 }
1772 else if (bound == void_type_node)
1773 bound = NULL_TREE;
1774 }
1775 else if (bound == void_type_node)
1776 bound = NULL_TREE;
1777
1778 /* In a call to strncat with a bound in a range whose lower but
1779 not upper bound is less than the array size, reset ASIZE to
1780 be the same as the bound and the other variable to trigger
1781 the apprpriate warning below. */
1782 if (fncode == BUILT_IN_STRNCAT
1783 && bndrng[0] != bndrng[1]
1784 && wi::ltu_p (wi::to_offset (bndrng[0]), asize)
1785 && (!known_size
1786 || wi::ltu_p (asize, wibnd)))
1787 {
1788 asize = wibnd;
1789 bound = NULL_TREE;
1790 fncode = 0;
1791 }
1792
1793 bool warned = false;
1794
1795 auto_diagnostic_group d;
1796 if (wi::ltu_p (asize, wibnd))
1797 {
1798 if (bndrng[0] == bndrng[1])
1799 warned = warning_at (loc, OPT_Wstringop_overflow_,
1800 "%qD argument %i declared attribute "
1801 "%<nonstring%> is smaller than the specified "
1802 "bound %wu",
1803 fndecl, argno + 1, wibnd.to_uhwi ());
1804 else if (wi::ltu_p (asize, wi::to_offset (bndrng[0])))
1805 warned = warning_at (loc, OPT_Wstringop_overflow_,
1806 "%qD argument %i declared attribute "
1807 "%<nonstring%> is smaller than "
1808 "the specified bound [%E, %E]",
1809 fndecl, argno + 1, bndrng[0], bndrng[1]);
1810 else
1811 warned = warning_at (loc, OPT_Wstringop_overflow_,
1812 "%qD argument %i declared attribute "
1813 "%<nonstring%> may be smaller than "
1814 "the specified bound [%E, %E]",
1815 fndecl, argno + 1, bndrng[0], bndrng[1]);
1816 }
1817 else if (fncode == BUILT_IN_STRNCAT)
1818 ; /* Avoid warning for calls to strncat() when the bound
1819 is equal to the size of the non-string argument. */
1820 else if (!bound)
1821 warned = warning_at (loc, OPT_Wstringop_overflow_,
1822 "%qD argument %i declared attribute %<nonstring%>",
1823 fndecl, argno + 1);
1824
1825 if (warned)
1826 inform (DECL_SOURCE_LOCATION (decl),
1827 "argument %qD declared here", decl);
1828 }
1829 }
1830
1831 /* Issue an error if CALL_EXPR was flagged as requiring
1832 tall-call optimization. */
1833
1834 static void
maybe_complain_about_tail_call(tree call_expr,const char * reason)1835 maybe_complain_about_tail_call (tree call_expr, const char *reason)
1836 {
1837 gcc_assert (TREE_CODE (call_expr) == CALL_EXPR);
1838 if (!CALL_EXPR_MUST_TAIL_CALL (call_expr))
1839 return;
1840
1841 error_at (EXPR_LOCATION (call_expr), "cannot tail-call: %s", reason);
1842 }
1843
1844 /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
1845 CALL_EXPR EXP.
1846
1847 NUM_ACTUALS is the total number of parameters.
1848
1849 N_NAMED_ARGS is the total number of named arguments.
1850
1851 STRUCT_VALUE_ADDR_VALUE is the implicit argument for a struct return
1852 value, or null.
1853
1854 FNDECL is the tree code for the target of this call (if known)
1855
1856 ARGS_SO_FAR holds state needed by the target to know where to place
1857 the next argument.
1858
1859 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
1860 for arguments which are passed in registers.
1861
1862 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
1863 and may be modified by this routine.
1864
1865 OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
1866 flags which may be modified by this routine.
1867
1868 MAY_TAILCALL is cleared if we encounter an invisible pass-by-reference
1869 that requires allocation of stack space.
1870
1871 CALL_FROM_THUNK_P is true if this call is the jump from a thunk to
1872 the thunked-to function. */
1873
1874 static void
initialize_argument_information(int num_actuals ATTRIBUTE_UNUSED,struct arg_data * args,struct args_size * args_size,int n_named_args ATTRIBUTE_UNUSED,tree exp,tree struct_value_addr_value,tree fndecl,tree fntype,cumulative_args_t args_so_far,int reg_parm_stack_space,rtx * old_stack_level,poly_int64_pod * old_pending_adj,int * must_preallocate,int * ecf_flags,bool * may_tailcall,bool call_from_thunk_p)1875 initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED,
1876 struct arg_data *args,
1877 struct args_size *args_size,
1878 int n_named_args ATTRIBUTE_UNUSED,
1879 tree exp, tree struct_value_addr_value,
1880 tree fndecl, tree fntype,
1881 cumulative_args_t args_so_far,
1882 int reg_parm_stack_space,
1883 rtx *old_stack_level,
1884 poly_int64_pod *old_pending_adj,
1885 int *must_preallocate, int *ecf_flags,
1886 bool *may_tailcall, bool call_from_thunk_p)
1887 {
1888 CUMULATIVE_ARGS *args_so_far_pnt = get_cumulative_args (args_so_far);
1889 location_t loc = EXPR_LOCATION (exp);
1890
1891 /* Count arg position in order args appear. */
1892 int argpos;
1893
1894 int i;
1895
1896 args_size->constant = 0;
1897 args_size->var = 0;
1898
1899 bitmap_obstack_initialize (NULL);
1900
1901 /* In this loop, we consider args in the order they are written.
1902 We fill up ARGS from the back. */
1903
1904 i = num_actuals - 1;
1905 {
1906 int j = i;
1907 call_expr_arg_iterator iter;
1908 tree arg;
1909 bitmap slots = NULL;
1910
1911 if (struct_value_addr_value)
1912 {
1913 args[j].tree_value = struct_value_addr_value;
1914 j--;
1915 }
1916 argpos = 0;
1917 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
1918 {
1919 tree argtype = TREE_TYPE (arg);
1920
1921 if (targetm.calls.split_complex_arg
1922 && argtype
1923 && TREE_CODE (argtype) == COMPLEX_TYPE
1924 && targetm.calls.split_complex_arg (argtype))
1925 {
1926 tree subtype = TREE_TYPE (argtype);
1927 args[j].tree_value = build1 (REALPART_EXPR, subtype, arg);
1928 j--;
1929 args[j].tree_value = build1 (IMAGPART_EXPR, subtype, arg);
1930 }
1931 else
1932 args[j].tree_value = arg;
1933 j--;
1934 argpos++;
1935 }
1936
1937 if (slots)
1938 BITMAP_FREE (slots);
1939 }
1940
1941 bitmap_obstack_release (NULL);
1942
1943 /* Extract attribute alloc_size from the type of the called expression
1944 (which could be a function or a function pointer) and if set, store
1945 the indices of the corresponding arguments in ALLOC_IDX, and then
1946 the actual argument(s) at those indices in ALLOC_ARGS. */
1947 int alloc_idx[2] = { -1, -1 };
1948 if (tree alloc_size = lookup_attribute ("alloc_size",
1949 TYPE_ATTRIBUTES (fntype)))
1950 {
1951 tree args = TREE_VALUE (alloc_size);
1952 alloc_idx[0] = TREE_INT_CST_LOW (TREE_VALUE (args)) - 1;
1953 if (TREE_CHAIN (args))
1954 alloc_idx[1] = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (args))) - 1;
1955 }
1956
1957 /* Array for up to the two attribute alloc_size arguments. */
1958 tree alloc_args[] = { NULL_TREE, NULL_TREE };
1959
1960 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
1961 for (argpos = 0; argpos < num_actuals; i--, argpos++)
1962 {
1963 tree type = TREE_TYPE (args[i].tree_value);
1964 int unsignedp;
1965 machine_mode mode;
1966
1967 /* Replace erroneous argument with constant zero. */
1968 if (type == error_mark_node || !COMPLETE_TYPE_P (type))
1969 args[i].tree_value = integer_zero_node, type = integer_type_node;
1970
1971 /* If TYPE is a transparent union or record, pass things the way
1972 we would pass the first field of the union or record. We have
1973 already verified that the modes are the same. */
1974 if (RECORD_OR_UNION_TYPE_P (type) && TYPE_TRANSPARENT_AGGR (type))
1975 type = TREE_TYPE (first_field (type));
1976
1977 /* Decide where to pass this arg.
1978
1979 args[i].reg is nonzero if all or part is passed in registers.
1980
1981 args[i].partial is nonzero if part but not all is passed in registers,
1982 and the exact value says how many bytes are passed in registers.
1983
1984 args[i].pass_on_stack is nonzero if the argument must at least be
1985 computed on the stack. It may then be loaded back into registers
1986 if args[i].reg is nonzero.
1987
1988 These decisions are driven by the FUNCTION_... macros and must agree
1989 with those made by function.c. */
1990
1991 /* See if this argument should be passed by invisible reference. */
1992 if (pass_by_reference (args_so_far_pnt, TYPE_MODE (type),
1993 type, argpos < n_named_args))
1994 {
1995 bool callee_copies;
1996 tree base = NULL_TREE;
1997
1998 callee_copies
1999 = reference_callee_copied (args_so_far_pnt, TYPE_MODE (type),
2000 type, argpos < n_named_args);
2001
2002 /* If we're compiling a thunk, pass through invisible references
2003 instead of making a copy. */
2004 if (call_from_thunk_p
2005 || (callee_copies
2006 && !TREE_ADDRESSABLE (type)
2007 && (base = get_base_address (args[i].tree_value))
2008 && TREE_CODE (base) != SSA_NAME
2009 && (!DECL_P (base) || MEM_P (DECL_RTL (base)))))
2010 {
2011 /* We may have turned the parameter value into an SSA name.
2012 Go back to the original parameter so we can take the
2013 address. */
2014 if (TREE_CODE (args[i].tree_value) == SSA_NAME)
2015 {
2016 gcc_assert (SSA_NAME_IS_DEFAULT_DEF (args[i].tree_value));
2017 args[i].tree_value = SSA_NAME_VAR (args[i].tree_value);
2018 gcc_assert (TREE_CODE (args[i].tree_value) == PARM_DECL);
2019 }
2020 /* Argument setup code may have copied the value to register. We
2021 revert that optimization now because the tail call code must
2022 use the original location. */
2023 if (TREE_CODE (args[i].tree_value) == PARM_DECL
2024 && !MEM_P (DECL_RTL (args[i].tree_value))
2025 && DECL_INCOMING_RTL (args[i].tree_value)
2026 && MEM_P (DECL_INCOMING_RTL (args[i].tree_value)))
2027 set_decl_rtl (args[i].tree_value,
2028 DECL_INCOMING_RTL (args[i].tree_value));
2029
2030 mark_addressable (args[i].tree_value);
2031
2032 /* We can't use sibcalls if a callee-copied argument is
2033 stored in the current function's frame. */
2034 if (!call_from_thunk_p && DECL_P (base) && !TREE_STATIC (base))
2035 {
2036 *may_tailcall = false;
2037 maybe_complain_about_tail_call (exp,
2038 "a callee-copied argument is"
2039 " stored in the current"
2040 " function's frame");
2041 }
2042
2043 args[i].tree_value = build_fold_addr_expr_loc (loc,
2044 args[i].tree_value);
2045 type = TREE_TYPE (args[i].tree_value);
2046
2047 if (*ecf_flags & ECF_CONST)
2048 *ecf_flags &= ~(ECF_CONST | ECF_LOOPING_CONST_OR_PURE);
2049 }
2050 else
2051 {
2052 /* We make a copy of the object and pass the address to the
2053 function being called. */
2054 rtx copy;
2055
2056 if (!COMPLETE_TYPE_P (type)
2057 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
2058 || (flag_stack_check == GENERIC_STACK_CHECK
2059 && compare_tree_int (TYPE_SIZE_UNIT (type),
2060 STACK_CHECK_MAX_VAR_SIZE) > 0))
2061 {
2062 /* This is a variable-sized object. Make space on the stack
2063 for it. */
2064 rtx size_rtx = expr_size (args[i].tree_value);
2065
2066 if (*old_stack_level == 0)
2067 {
2068 emit_stack_save (SAVE_BLOCK, old_stack_level);
2069 *old_pending_adj = pending_stack_adjust;
2070 pending_stack_adjust = 0;
2071 }
2072
2073 /* We can pass TRUE as the 4th argument because we just
2074 saved the stack pointer and will restore it right after
2075 the call. */
2076 copy = allocate_dynamic_stack_space (size_rtx,
2077 TYPE_ALIGN (type),
2078 TYPE_ALIGN (type),
2079 max_int_size_in_bytes
2080 (type),
2081 true);
2082 copy = gen_rtx_MEM (BLKmode, copy);
2083 set_mem_attributes (copy, type, 1);
2084 }
2085 else
2086 copy = assign_temp (type, 1, 0);
2087
2088 store_expr (args[i].tree_value, copy, 0, false, false);
2089
2090 /* Just change the const function to pure and then let
2091 the next test clear the pure based on
2092 callee_copies. */
2093 if (*ecf_flags & ECF_CONST)
2094 {
2095 *ecf_flags &= ~ECF_CONST;
2096 *ecf_flags |= ECF_PURE;
2097 }
2098
2099 if (!callee_copies && *ecf_flags & ECF_PURE)
2100 *ecf_flags &= ~(ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
2101
2102 args[i].tree_value
2103 = build_fold_addr_expr_loc (loc, make_tree (type, copy));
2104 type = TREE_TYPE (args[i].tree_value);
2105 *may_tailcall = false;
2106 maybe_complain_about_tail_call (exp,
2107 "argument must be passed"
2108 " by copying");
2109 }
2110 }
2111
2112 unsignedp = TYPE_UNSIGNED (type);
2113 mode = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
2114 fndecl ? TREE_TYPE (fndecl) : fntype, 0);
2115
2116 args[i].unsignedp = unsignedp;
2117 args[i].mode = mode;
2118
2119 targetm.calls.warn_parameter_passing_abi (args_so_far, type);
2120
2121 args[i].reg = targetm.calls.function_arg (args_so_far, mode, type,
2122 argpos < n_named_args);
2123
2124 if (args[i].reg && CONST_INT_P (args[i].reg))
2125 args[i].reg = NULL;
2126
2127 /* If this is a sibling call and the machine has register windows, the
2128 register window has to be unwinded before calling the routine, so
2129 arguments have to go into the incoming registers. */
2130 if (targetm.calls.function_incoming_arg != targetm.calls.function_arg)
2131 args[i].tail_call_reg
2132 = targetm.calls.function_incoming_arg (args_so_far, mode, type,
2133 argpos < n_named_args);
2134 else
2135 args[i].tail_call_reg = args[i].reg;
2136
2137 if (args[i].reg)
2138 args[i].partial
2139 = targetm.calls.arg_partial_bytes (args_so_far, mode, type,
2140 argpos < n_named_args);
2141
2142 args[i].pass_on_stack = targetm.calls.must_pass_in_stack (mode, type);
2143
2144 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
2145 it means that we are to pass this arg in the register(s) designated
2146 by the PARALLEL, but also to pass it in the stack. */
2147 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
2148 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
2149 args[i].pass_on_stack = 1;
2150
2151 /* If this is an addressable type, we must preallocate the stack
2152 since we must evaluate the object into its final location.
2153
2154 If this is to be passed in both registers and the stack, it is simpler
2155 to preallocate. */
2156 if (TREE_ADDRESSABLE (type)
2157 || (args[i].pass_on_stack && args[i].reg != 0))
2158 *must_preallocate = 1;
2159
2160 /* Compute the stack-size of this argument. */
2161 if (args[i].reg == 0 || args[i].partial != 0
2162 || reg_parm_stack_space > 0
2163 || args[i].pass_on_stack)
2164 locate_and_pad_parm (mode, type,
2165 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2166 1,
2167 #else
2168 args[i].reg != 0,
2169 #endif
2170 reg_parm_stack_space,
2171 args[i].pass_on_stack ? 0 : args[i].partial,
2172 fndecl, args_size, &args[i].locate);
2173 #ifdef BLOCK_REG_PADDING
2174 else
2175 /* The argument is passed entirely in registers. See at which
2176 end it should be padded. */
2177 args[i].locate.where_pad =
2178 BLOCK_REG_PADDING (mode, type,
2179 int_size_in_bytes (type) <= UNITS_PER_WORD);
2180 #endif
2181
2182 /* Update ARGS_SIZE, the total stack space for args so far. */
2183
2184 args_size->constant += args[i].locate.size.constant;
2185 if (args[i].locate.size.var)
2186 ADD_PARM_SIZE (*args_size, args[i].locate.size.var);
2187
2188 /* Increment ARGS_SO_FAR, which has info about which arg-registers
2189 have been used, etc. */
2190
2191 targetm.calls.function_arg_advance (args_so_far, TYPE_MODE (type),
2192 type, argpos < n_named_args);
2193
2194 /* Store argument values for functions decorated with attribute
2195 alloc_size. */
2196 if (argpos == alloc_idx[0])
2197 alloc_args[0] = args[i].tree_value;
2198 else if (argpos == alloc_idx[1])
2199 alloc_args[1] = args[i].tree_value;
2200 }
2201
2202 if (alloc_args[0])
2203 {
2204 /* Check the arguments of functions decorated with attribute
2205 alloc_size. */
2206 maybe_warn_alloc_args_overflow (fndecl, exp, alloc_args, alloc_idx);
2207 }
2208
2209 /* Detect passing non-string arguments to functions expecting
2210 nul-terminated strings. */
2211 maybe_warn_nonstring_arg (fndecl, exp);
2212 }
2213
2214 /* Update ARGS_SIZE to contain the total size for the argument block.
2215 Return the original constant component of the argument block's size.
2216
2217 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
2218 for arguments passed in registers. */
2219
2220 static poly_int64
compute_argument_block_size(int reg_parm_stack_space,struct args_size * args_size,tree fndecl ATTRIBUTE_UNUSED,tree fntype ATTRIBUTE_UNUSED,int preferred_stack_boundary ATTRIBUTE_UNUSED)2221 compute_argument_block_size (int reg_parm_stack_space,
2222 struct args_size *args_size,
2223 tree fndecl ATTRIBUTE_UNUSED,
2224 tree fntype ATTRIBUTE_UNUSED,
2225 int preferred_stack_boundary ATTRIBUTE_UNUSED)
2226 {
2227 poly_int64 unadjusted_args_size = args_size->constant;
2228
2229 /* For accumulate outgoing args mode we don't need to align, since the frame
2230 will be already aligned. Align to STACK_BOUNDARY in order to prevent
2231 backends from generating misaligned frame sizes. */
2232 if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
2233 preferred_stack_boundary = STACK_BOUNDARY;
2234
2235 /* Compute the actual size of the argument block required. The variable
2236 and constant sizes must be combined, the size may have to be rounded,
2237 and there may be a minimum required size. */
2238
2239 if (args_size->var)
2240 {
2241 args_size->var = ARGS_SIZE_TREE (*args_size);
2242 args_size->constant = 0;
2243
2244 preferred_stack_boundary /= BITS_PER_UNIT;
2245 if (preferred_stack_boundary > 1)
2246 {
2247 /* We don't handle this case yet. To handle it correctly we have
2248 to add the delta, round and subtract the delta.
2249 Currently no machine description requires this support. */
2250 gcc_assert (multiple_p (stack_pointer_delta,
2251 preferred_stack_boundary));
2252 args_size->var = round_up (args_size->var, preferred_stack_boundary);
2253 }
2254
2255 if (reg_parm_stack_space > 0)
2256 {
2257 args_size->var
2258 = size_binop (MAX_EXPR, args_size->var,
2259 ssize_int (reg_parm_stack_space));
2260
2261 /* The area corresponding to register parameters is not to count in
2262 the size of the block we need. So make the adjustment. */
2263 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
2264 args_size->var
2265 = size_binop (MINUS_EXPR, args_size->var,
2266 ssize_int (reg_parm_stack_space));
2267 }
2268 }
2269 else
2270 {
2271 preferred_stack_boundary /= BITS_PER_UNIT;
2272 if (preferred_stack_boundary < 1)
2273 preferred_stack_boundary = 1;
2274 args_size->constant = (aligned_upper_bound (args_size->constant
2275 + stack_pointer_delta,
2276 preferred_stack_boundary)
2277 - stack_pointer_delta);
2278
2279 args_size->constant = upper_bound (args_size->constant,
2280 reg_parm_stack_space);
2281
2282 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
2283 args_size->constant -= reg_parm_stack_space;
2284 }
2285 return unadjusted_args_size;
2286 }
2287
2288 /* Precompute parameters as needed for a function call.
2289
2290 FLAGS is mask of ECF_* constants.
2291
2292 NUM_ACTUALS is the number of arguments.
2293
2294 ARGS is an array containing information for each argument; this
2295 routine fills in the INITIAL_VALUE and VALUE fields for each
2296 precomputed argument. */
2297
2298 static void
precompute_arguments(int num_actuals,struct arg_data * args)2299 precompute_arguments (int num_actuals, struct arg_data *args)
2300 {
2301 int i;
2302
2303 /* If this is a libcall, then precompute all arguments so that we do not
2304 get extraneous instructions emitted as part of the libcall sequence. */
2305
2306 /* If we preallocated the stack space, and some arguments must be passed
2307 on the stack, then we must precompute any parameter which contains a
2308 function call which will store arguments on the stack.
2309 Otherwise, evaluating the parameter may clobber previous parameters
2310 which have already been stored into the stack. (we have code to avoid
2311 such case by saving the outgoing stack arguments, but it results in
2312 worse code) */
2313 if (!ACCUMULATE_OUTGOING_ARGS)
2314 return;
2315
2316 for (i = 0; i < num_actuals; i++)
2317 {
2318 tree type;
2319 machine_mode mode;
2320
2321 if (TREE_CODE (args[i].tree_value) != CALL_EXPR)
2322 continue;
2323
2324 /* If this is an addressable type, we cannot pre-evaluate it. */
2325 type = TREE_TYPE (args[i].tree_value);
2326 gcc_assert (!TREE_ADDRESSABLE (type));
2327
2328 args[i].initial_value = args[i].value
2329 = expand_normal (args[i].tree_value);
2330
2331 mode = TYPE_MODE (type);
2332 if (mode != args[i].mode)
2333 {
2334 int unsignedp = args[i].unsignedp;
2335 args[i].value
2336 = convert_modes (args[i].mode, mode,
2337 args[i].value, args[i].unsignedp);
2338
2339 /* CSE will replace this only if it contains args[i].value
2340 pseudo, so convert it down to the declared mode using
2341 a SUBREG. */
2342 if (REG_P (args[i].value)
2343 && GET_MODE_CLASS (args[i].mode) == MODE_INT
2344 && promote_mode (type, mode, &unsignedp) != args[i].mode)
2345 {
2346 args[i].initial_value
2347 = gen_lowpart_SUBREG (mode, args[i].value);
2348 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
2349 SUBREG_PROMOTED_SET (args[i].initial_value, args[i].unsignedp);
2350 }
2351 }
2352 }
2353 }
2354
2355 /* Given the current state of MUST_PREALLOCATE and information about
2356 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
2357 compute and return the final value for MUST_PREALLOCATE. */
2358
2359 static int
finalize_must_preallocate(int must_preallocate,int num_actuals,struct arg_data * args,struct args_size * args_size)2360 finalize_must_preallocate (int must_preallocate, int num_actuals,
2361 struct arg_data *args, struct args_size *args_size)
2362 {
2363 /* See if we have or want to preallocate stack space.
2364
2365 If we would have to push a partially-in-regs parm
2366 before other stack parms, preallocate stack space instead.
2367
2368 If the size of some parm is not a multiple of the required stack
2369 alignment, we must preallocate.
2370
2371 If the total size of arguments that would otherwise create a copy in
2372 a temporary (such as a CALL) is more than half the total argument list
2373 size, preallocation is faster.
2374
2375 Another reason to preallocate is if we have a machine (like the m88k)
2376 where stack alignment is required to be maintained between every
2377 pair of insns, not just when the call is made. However, we assume here
2378 that such machines either do not have push insns (and hence preallocation
2379 would occur anyway) or the problem is taken care of with
2380 PUSH_ROUNDING. */
2381
2382 if (! must_preallocate)
2383 {
2384 int partial_seen = 0;
2385 poly_int64 copy_to_evaluate_size = 0;
2386 int i;
2387
2388 for (i = 0; i < num_actuals && ! must_preallocate; i++)
2389 {
2390 if (args[i].partial > 0 && ! args[i].pass_on_stack)
2391 partial_seen = 1;
2392 else if (partial_seen && args[i].reg == 0)
2393 must_preallocate = 1;
2394
2395 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
2396 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
2397 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
2398 || TREE_CODE (args[i].tree_value) == COND_EXPR
2399 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
2400 copy_to_evaluate_size
2401 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
2402 }
2403
2404 if (maybe_ne (args_size->constant, 0)
2405 && maybe_ge (copy_to_evaluate_size * 2, args_size->constant))
2406 must_preallocate = 1;
2407 }
2408 return must_preallocate;
2409 }
2410
2411 /* If we preallocated stack space, compute the address of each argument
2412 and store it into the ARGS array.
2413
2414 We need not ensure it is a valid memory address here; it will be
2415 validized when it is used.
2416
2417 ARGBLOCK is an rtx for the address of the outgoing arguments. */
2418
2419 static void
compute_argument_addresses(struct arg_data * args,rtx argblock,int num_actuals)2420 compute_argument_addresses (struct arg_data *args, rtx argblock, int num_actuals)
2421 {
2422 if (argblock)
2423 {
2424 rtx arg_reg = argblock;
2425 int i;
2426 poly_int64 arg_offset = 0;
2427
2428 if (GET_CODE (argblock) == PLUS)
2429 {
2430 arg_reg = XEXP (argblock, 0);
2431 arg_offset = rtx_to_poly_int64 (XEXP (argblock, 1));
2432 }
2433
2434 for (i = 0; i < num_actuals; i++)
2435 {
2436 rtx offset = ARGS_SIZE_RTX (args[i].locate.offset);
2437 rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset);
2438 rtx addr;
2439 unsigned int align, boundary;
2440 poly_uint64 units_on_stack = 0;
2441 machine_mode partial_mode = VOIDmode;
2442
2443 /* Skip this parm if it will not be passed on the stack. */
2444 if (! args[i].pass_on_stack
2445 && args[i].reg != 0
2446 && args[i].partial == 0)
2447 continue;
2448
2449 if (TYPE_EMPTY_P (TREE_TYPE (args[i].tree_value)))
2450 continue;
2451
2452 addr = simplify_gen_binary (PLUS, Pmode, arg_reg, offset);
2453 addr = plus_constant (Pmode, addr, arg_offset);
2454
2455 if (args[i].partial != 0)
2456 {
2457 /* Only part of the parameter is being passed on the stack.
2458 Generate a simple memory reference of the correct size. */
2459 units_on_stack = args[i].locate.size.constant;
2460 poly_uint64 bits_on_stack = units_on_stack * BITS_PER_UNIT;
2461 partial_mode = int_mode_for_size (bits_on_stack, 1).else_blk ();
2462 args[i].stack = gen_rtx_MEM (partial_mode, addr);
2463 set_mem_size (args[i].stack, units_on_stack);
2464 }
2465 else
2466 {
2467 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
2468 set_mem_attributes (args[i].stack,
2469 TREE_TYPE (args[i].tree_value), 1);
2470 }
2471 align = BITS_PER_UNIT;
2472 boundary = args[i].locate.boundary;
2473 poly_int64 offset_val;
2474 if (args[i].locate.where_pad != PAD_DOWNWARD)
2475 align = boundary;
2476 else if (poly_int_rtx_p (offset, &offset_val))
2477 {
2478 align = least_bit_hwi (boundary);
2479 unsigned int offset_align
2480 = known_alignment (offset_val) * BITS_PER_UNIT;
2481 if (offset_align != 0)
2482 align = MIN (align, offset_align);
2483 }
2484 set_mem_align (args[i].stack, align);
2485
2486 addr = simplify_gen_binary (PLUS, Pmode, arg_reg, slot_offset);
2487 addr = plus_constant (Pmode, addr, arg_offset);
2488
2489 if (args[i].partial != 0)
2490 {
2491 /* Only part of the parameter is being passed on the stack.
2492 Generate a simple memory reference of the correct size.
2493 */
2494 args[i].stack_slot = gen_rtx_MEM (partial_mode, addr);
2495 set_mem_size (args[i].stack_slot, units_on_stack);
2496 }
2497 else
2498 {
2499 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
2500 set_mem_attributes (args[i].stack_slot,
2501 TREE_TYPE (args[i].tree_value), 1);
2502 }
2503 set_mem_align (args[i].stack_slot, args[i].locate.boundary);
2504
2505 /* Function incoming arguments may overlap with sibling call
2506 outgoing arguments and we cannot allow reordering of reads
2507 from function arguments with stores to outgoing arguments
2508 of sibling calls. */
2509 set_mem_alias_set (args[i].stack, 0);
2510 set_mem_alias_set (args[i].stack_slot, 0);
2511 }
2512 }
2513 }
2514
2515 /* Given a FNDECL and EXP, return an rtx suitable for use as a target address
2516 in a call instruction.
2517
2518 FNDECL is the tree node for the target function. For an indirect call
2519 FNDECL will be NULL_TREE.
2520
2521 ADDR is the operand 0 of CALL_EXPR for this call. */
2522
2523 static rtx
rtx_for_function_call(tree fndecl,tree addr)2524 rtx_for_function_call (tree fndecl, tree addr)
2525 {
2526 rtx funexp;
2527
2528 /* Get the function to call, in the form of RTL. */
2529 if (fndecl)
2530 {
2531 if (!TREE_USED (fndecl) && fndecl != current_function_decl)
2532 TREE_USED (fndecl) = 1;
2533
2534 /* Get a SYMBOL_REF rtx for the function address. */
2535 funexp = XEXP (DECL_RTL (fndecl), 0);
2536 }
2537 else
2538 /* Generate an rtx (probably a pseudo-register) for the address. */
2539 {
2540 push_temp_slots ();
2541 funexp = expand_normal (addr);
2542 pop_temp_slots (); /* FUNEXP can't be BLKmode. */
2543 }
2544 return funexp;
2545 }
2546
2547 /* Return the static chain for this function, if any. */
2548
2549 rtx
rtx_for_static_chain(const_tree fndecl_or_type,bool incoming_p)2550 rtx_for_static_chain (const_tree fndecl_or_type, bool incoming_p)
2551 {
2552 if (DECL_P (fndecl_or_type) && !DECL_STATIC_CHAIN (fndecl_or_type))
2553 return NULL;
2554
2555 return targetm.calls.static_chain (fndecl_or_type, incoming_p);
2556 }
2557
2558 /* Internal state for internal_arg_pointer_based_exp and its helpers. */
2559 static struct
2560 {
2561 /* Last insn that has been scanned by internal_arg_pointer_based_exp_scan,
2562 or NULL_RTX if none has been scanned yet. */
2563 rtx_insn *scan_start;
2564 /* Vector indexed by REGNO - FIRST_PSEUDO_REGISTER, recording if a pseudo is
2565 based on crtl->args.internal_arg_pointer. The element is NULL_RTX if the
2566 pseudo isn't based on it, a CONST_INT offset if the pseudo is based on it
2567 with fixed offset, or PC if this is with variable or unknown offset. */
2568 vec<rtx> cache;
2569 } internal_arg_pointer_exp_state;
2570
2571 static rtx internal_arg_pointer_based_exp (const_rtx, bool);
2572
2573 /* Helper function for internal_arg_pointer_based_exp. Scan insns in
2574 the tail call sequence, starting with first insn that hasn't been
2575 scanned yet, and note for each pseudo on the LHS whether it is based
2576 on crtl->args.internal_arg_pointer or not, and what offset from that
2577 that pointer it has. */
2578
2579 static void
internal_arg_pointer_based_exp_scan(void)2580 internal_arg_pointer_based_exp_scan (void)
2581 {
2582 rtx_insn *insn, *scan_start = internal_arg_pointer_exp_state.scan_start;
2583
2584 if (scan_start == NULL_RTX)
2585 insn = get_insns ();
2586 else
2587 insn = NEXT_INSN (scan_start);
2588
2589 while (insn)
2590 {
2591 rtx set = single_set (insn);
2592 if (set && REG_P (SET_DEST (set)) && !HARD_REGISTER_P (SET_DEST (set)))
2593 {
2594 rtx val = NULL_RTX;
2595 unsigned int idx = REGNO (SET_DEST (set)) - FIRST_PSEUDO_REGISTER;
2596 /* Punt on pseudos set multiple times. */
2597 if (idx < internal_arg_pointer_exp_state.cache.length ()
2598 && (internal_arg_pointer_exp_state.cache[idx]
2599 != NULL_RTX))
2600 val = pc_rtx;
2601 else
2602 val = internal_arg_pointer_based_exp (SET_SRC (set), false);
2603 if (val != NULL_RTX)
2604 {
2605 if (idx >= internal_arg_pointer_exp_state.cache.length ())
2606 internal_arg_pointer_exp_state.cache
2607 .safe_grow_cleared (idx + 1);
2608 internal_arg_pointer_exp_state.cache[idx] = val;
2609 }
2610 }
2611 if (NEXT_INSN (insn) == NULL_RTX)
2612 scan_start = insn;
2613 insn = NEXT_INSN (insn);
2614 }
2615
2616 internal_arg_pointer_exp_state.scan_start = scan_start;
2617 }
2618
2619 /* Compute whether RTL is based on crtl->args.internal_arg_pointer. Return
2620 NULL_RTX if RTL isn't based on it, a CONST_INT offset if RTL is based on
2621 it with fixed offset, or PC if this is with variable or unknown offset.
2622 TOPLEVEL is true if the function is invoked at the topmost level. */
2623
2624 static rtx
internal_arg_pointer_based_exp(const_rtx rtl,bool toplevel)2625 internal_arg_pointer_based_exp (const_rtx rtl, bool toplevel)
2626 {
2627 if (CONSTANT_P (rtl))
2628 return NULL_RTX;
2629
2630 if (rtl == crtl->args.internal_arg_pointer)
2631 return const0_rtx;
2632
2633 if (REG_P (rtl) && HARD_REGISTER_P (rtl))
2634 return NULL_RTX;
2635
2636 poly_int64 offset;
2637 if (GET_CODE (rtl) == PLUS && poly_int_rtx_p (XEXP (rtl, 1), &offset))
2638 {
2639 rtx val = internal_arg_pointer_based_exp (XEXP (rtl, 0), toplevel);
2640 if (val == NULL_RTX || val == pc_rtx)
2641 return val;
2642 return plus_constant (Pmode, val, offset);
2643 }
2644
2645 /* When called at the topmost level, scan pseudo assignments in between the
2646 last scanned instruction in the tail call sequence and the latest insn
2647 in that sequence. */
2648 if (toplevel)
2649 internal_arg_pointer_based_exp_scan ();
2650
2651 if (REG_P (rtl))
2652 {
2653 unsigned int idx = REGNO (rtl) - FIRST_PSEUDO_REGISTER;
2654 if (idx < internal_arg_pointer_exp_state.cache.length ())
2655 return internal_arg_pointer_exp_state.cache[idx];
2656
2657 return NULL_RTX;
2658 }
2659
2660 subrtx_iterator::array_type array;
2661 FOR_EACH_SUBRTX (iter, array, rtl, NONCONST)
2662 {
2663 const_rtx x = *iter;
2664 if (REG_P (x) && internal_arg_pointer_based_exp (x, false) != NULL_RTX)
2665 return pc_rtx;
2666 if (MEM_P (x))
2667 iter.skip_subrtxes ();
2668 }
2669
2670 return NULL_RTX;
2671 }
2672
2673 /* Return true if SIZE bytes starting from address ADDR might overlap an
2674 already-clobbered argument area. This function is used to determine
2675 if we should give up a sibcall. */
2676
2677 static bool
mem_might_overlap_already_clobbered_arg_p(rtx addr,poly_uint64 size)2678 mem_might_overlap_already_clobbered_arg_p (rtx addr, poly_uint64 size)
2679 {
2680 poly_int64 i;
2681 unsigned HOST_WIDE_INT start, end;
2682 rtx val;
2683
2684 if (bitmap_empty_p (stored_args_map)
2685 && stored_args_watermark == HOST_WIDE_INT_M1U)
2686 return false;
2687 val = internal_arg_pointer_based_exp (addr, true);
2688 if (val == NULL_RTX)
2689 return false;
2690 else if (!poly_int_rtx_p (val, &i))
2691 return true;
2692
2693 if (known_eq (size, 0U))
2694 return false;
2695
2696 if (STACK_GROWS_DOWNWARD)
2697 i -= crtl->args.pretend_args_size;
2698 else
2699 i += crtl->args.pretend_args_size;
2700
2701 if (ARGS_GROW_DOWNWARD)
2702 i = -i - size;
2703
2704 /* We can ignore any references to the function's pretend args,
2705 which at this point would manifest as negative values of I. */
2706 if (known_le (i, 0) && known_le (size, poly_uint64 (-i)))
2707 return false;
2708
2709 start = maybe_lt (i, 0) ? 0 : constant_lower_bound (i);
2710 if (!(i + size).is_constant (&end))
2711 end = HOST_WIDE_INT_M1U;
2712
2713 if (end > stored_args_watermark)
2714 return true;
2715
2716 end = MIN (end, SBITMAP_SIZE (stored_args_map));
2717 for (unsigned HOST_WIDE_INT k = start; k < end; ++k)
2718 if (bitmap_bit_p (stored_args_map, k))
2719 return true;
2720
2721 return false;
2722 }
2723
2724 /* Do the register loads required for any wholly-register parms or any
2725 parms which are passed both on the stack and in a register. Their
2726 expressions were already evaluated.
2727
2728 Mark all register-parms as living through the call, putting these USE
2729 insns in the CALL_INSN_FUNCTION_USAGE field.
2730
2731 When IS_SIBCALL, perform the check_sibcall_argument_overlap
2732 checking, setting *SIBCALL_FAILURE if appropriate. */
2733
2734 static void
load_register_parameters(struct arg_data * args,int num_actuals,rtx * call_fusage,int flags,int is_sibcall,int * sibcall_failure)2735 load_register_parameters (struct arg_data *args, int num_actuals,
2736 rtx *call_fusage, int flags, int is_sibcall,
2737 int *sibcall_failure)
2738 {
2739 int i, j;
2740
2741 for (i = 0; i < num_actuals; i++)
2742 {
2743 rtx reg = ((flags & ECF_SIBCALL)
2744 ? args[i].tail_call_reg : args[i].reg);
2745 if (reg)
2746 {
2747 int partial = args[i].partial;
2748 int nregs;
2749 poly_int64 size = 0;
2750 HOST_WIDE_INT const_size = 0;
2751 rtx_insn *before_arg = get_last_insn ();
2752 tree type = TREE_TYPE (args[i].tree_value);
2753 if (RECORD_OR_UNION_TYPE_P (type) && TYPE_TRANSPARENT_AGGR (type))
2754 type = TREE_TYPE (first_field (type));
2755 /* Set non-negative if we must move a word at a time, even if
2756 just one word (e.g, partial == 4 && mode == DFmode). Set
2757 to -1 if we just use a normal move insn. This value can be
2758 zero if the argument is a zero size structure. */
2759 nregs = -1;
2760 if (GET_CODE (reg) == PARALLEL)
2761 ;
2762 else if (partial)
2763 {
2764 gcc_assert (partial % UNITS_PER_WORD == 0);
2765 nregs = partial / UNITS_PER_WORD;
2766 }
2767 else if (TYPE_MODE (type) == BLKmode)
2768 {
2769 /* Variable-sized parameters should be described by a
2770 PARALLEL instead. */
2771 const_size = int_size_in_bytes (type);
2772 gcc_assert (const_size >= 0);
2773 nregs = (const_size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2774 size = const_size;
2775 }
2776 else
2777 size = GET_MODE_SIZE (args[i].mode);
2778
2779 /* Handle calls that pass values in multiple non-contiguous
2780 locations. The Irix 6 ABI has examples of this. */
2781
2782 if (GET_CODE (reg) == PARALLEL)
2783 emit_group_move (reg, args[i].parallel_value);
2784
2785 /* If simple case, just do move. If normal partial, store_one_arg
2786 has already loaded the register for us. In all other cases,
2787 load the register(s) from memory. */
2788
2789 else if (nregs == -1)
2790 {
2791 emit_move_insn (reg, args[i].value);
2792 #ifdef BLOCK_REG_PADDING
2793 /* Handle case where we have a value that needs shifting
2794 up to the msb. eg. a QImode value and we're padding
2795 upward on a BYTES_BIG_ENDIAN machine. */
2796 if (args[i].locate.where_pad
2797 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD))
2798 {
2799 gcc_checking_assert (ordered_p (size, UNITS_PER_WORD));
2800 if (maybe_lt (size, UNITS_PER_WORD))
2801 {
2802 rtx x;
2803 poly_int64 shift
2804 = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
2805
2806 /* Assigning REG here rather than a temp makes
2807 CALL_FUSAGE report the whole reg as used.
2808 Strictly speaking, the call only uses SIZE
2809 bytes at the msb end, but it doesn't seem worth
2810 generating rtl to say that. */
2811 reg = gen_rtx_REG (word_mode, REGNO (reg));
2812 x = expand_shift (LSHIFT_EXPR, word_mode,
2813 reg, shift, reg, 1);
2814 if (x != reg)
2815 emit_move_insn (reg, x);
2816 }
2817 }
2818 #endif
2819 }
2820
2821 /* If we have pre-computed the values to put in the registers in
2822 the case of non-aligned structures, copy them in now. */
2823
2824 else if (args[i].n_aligned_regs != 0)
2825 for (j = 0; j < args[i].n_aligned_regs; j++)
2826 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
2827 args[i].aligned_regs[j]);
2828
2829 else if (partial == 0 || args[i].pass_on_stack)
2830 {
2831 /* SIZE and CONST_SIZE are 0 for partial arguments and
2832 the size of a BLKmode type otherwise. */
2833 gcc_checking_assert (known_eq (size, const_size));
2834 rtx mem = validize_mem (copy_rtx (args[i].value));
2835
2836 /* Check for overlap with already clobbered argument area,
2837 providing that this has non-zero size. */
2838 if (is_sibcall
2839 && const_size != 0
2840 && (mem_might_overlap_already_clobbered_arg_p
2841 (XEXP (args[i].value, 0), const_size)))
2842 *sibcall_failure = 1;
2843
2844 if (const_size % UNITS_PER_WORD == 0
2845 || MEM_ALIGN (mem) % BITS_PER_WORD == 0)
2846 move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode);
2847 else
2848 {
2849 if (nregs > 1)
2850 move_block_to_reg (REGNO (reg), mem, nregs - 1,
2851 args[i].mode);
2852 rtx dest = gen_rtx_REG (word_mode, REGNO (reg) + nregs - 1);
2853 unsigned int bitoff = (nregs - 1) * BITS_PER_WORD;
2854 unsigned int bitsize = const_size * BITS_PER_UNIT - bitoff;
2855 rtx x = extract_bit_field (mem, bitsize, bitoff, 1, dest,
2856 word_mode, word_mode, false,
2857 NULL);
2858 if (BYTES_BIG_ENDIAN)
2859 x = expand_shift (LSHIFT_EXPR, word_mode, x,
2860 BITS_PER_WORD - bitsize, dest, 1);
2861 if (x != dest)
2862 emit_move_insn (dest, x);
2863 }
2864
2865 /* Handle a BLKmode that needs shifting. */
2866 if (nregs == 1 && const_size < UNITS_PER_WORD
2867 #ifdef BLOCK_REG_PADDING
2868 && args[i].locate.where_pad == PAD_DOWNWARD
2869 #else
2870 && BYTES_BIG_ENDIAN
2871 #endif
2872 )
2873 {
2874 rtx dest = gen_rtx_REG (word_mode, REGNO (reg));
2875 int shift = (UNITS_PER_WORD - const_size) * BITS_PER_UNIT;
2876 enum tree_code dir = (BYTES_BIG_ENDIAN
2877 ? RSHIFT_EXPR : LSHIFT_EXPR);
2878 rtx x;
2879
2880 x = expand_shift (dir, word_mode, dest, shift, dest, 1);
2881 if (x != dest)
2882 emit_move_insn (dest, x);
2883 }
2884 }
2885
2886 /* When a parameter is a block, and perhaps in other cases, it is
2887 possible that it did a load from an argument slot that was
2888 already clobbered. */
2889 if (is_sibcall
2890 && check_sibcall_argument_overlap (before_arg, &args[i], 0))
2891 *sibcall_failure = 1;
2892
2893 /* Handle calls that pass values in multiple non-contiguous
2894 locations. The Irix 6 ABI has examples of this. */
2895 if (GET_CODE (reg) == PARALLEL)
2896 use_group_regs (call_fusage, reg);
2897 else if (nregs == -1)
2898 use_reg_mode (call_fusage, reg, TYPE_MODE (type));
2899 else if (nregs > 0)
2900 use_regs (call_fusage, REGNO (reg), nregs);
2901 }
2902 }
2903 }
2904
2905 /* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
2906 wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
2907 bytes, then we would need to push some additional bytes to pad the
2908 arguments. So, we try to compute an adjust to the stack pointer for an
2909 amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
2910 bytes. Then, when the arguments are pushed the stack will be perfectly
2911 aligned.
2912
2913 Return true if this optimization is possible, storing the adjustment
2914 in ADJUSTMENT_OUT and setting ARGS_SIZE->CONSTANT to the number of
2915 bytes that should be popped after the call. */
2916
2917 static bool
combine_pending_stack_adjustment_and_call(poly_int64_pod * adjustment_out,poly_int64 unadjusted_args_size,struct args_size * args_size,unsigned int preferred_unit_stack_boundary)2918 combine_pending_stack_adjustment_and_call (poly_int64_pod *adjustment_out,
2919 poly_int64 unadjusted_args_size,
2920 struct args_size *args_size,
2921 unsigned int preferred_unit_stack_boundary)
2922 {
2923 /* The number of bytes to pop so that the stack will be
2924 under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
2925 poly_int64 adjustment;
2926 /* The alignment of the stack after the arguments are pushed, if we
2927 just pushed the arguments without adjust the stack here. */
2928 unsigned HOST_WIDE_INT unadjusted_alignment;
2929
2930 if (!known_misalignment (stack_pointer_delta + unadjusted_args_size,
2931 preferred_unit_stack_boundary,
2932 &unadjusted_alignment))
2933 return false;
2934
2935 /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
2936 as possible -- leaving just enough left to cancel out the
2937 UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the
2938 PENDING_STACK_ADJUST is non-negative, and congruent to
2939 -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */
2940
2941 /* Begin by trying to pop all the bytes. */
2942 unsigned HOST_WIDE_INT tmp_misalignment;
2943 if (!known_misalignment (pending_stack_adjust,
2944 preferred_unit_stack_boundary,
2945 &tmp_misalignment))
2946 return false;
2947 unadjusted_alignment -= tmp_misalignment;
2948 adjustment = pending_stack_adjust;
2949 /* Push enough additional bytes that the stack will be aligned
2950 after the arguments are pushed. */
2951 if (preferred_unit_stack_boundary > 1 && unadjusted_alignment)
2952 adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
2953
2954 /* We need to know whether the adjusted argument size
2955 (UNADJUSTED_ARGS_SIZE - ADJUSTMENT) constitutes an allocation
2956 or a deallocation. */
2957 if (!ordered_p (adjustment, unadjusted_args_size))
2958 return false;
2959
2960 /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
2961 bytes after the call. The right number is the entire
2962 PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
2963 by the arguments in the first place. */
2964 args_size->constant
2965 = pending_stack_adjust - adjustment + unadjusted_args_size;
2966
2967 *adjustment_out = adjustment;
2968 return true;
2969 }
2970
2971 /* Scan X expression if it does not dereference any argument slots
2972 we already clobbered by tail call arguments (as noted in stored_args_map
2973 bitmap).
2974 Return nonzero if X expression dereferences such argument slots,
2975 zero otherwise. */
2976
2977 static int
check_sibcall_argument_overlap_1(rtx x)2978 check_sibcall_argument_overlap_1 (rtx x)
2979 {
2980 RTX_CODE code;
2981 int i, j;
2982 const char *fmt;
2983
2984 if (x == NULL_RTX)
2985 return 0;
2986
2987 code = GET_CODE (x);
2988
2989 /* We need not check the operands of the CALL expression itself. */
2990 if (code == CALL)
2991 return 0;
2992
2993 if (code == MEM)
2994 return (mem_might_overlap_already_clobbered_arg_p
2995 (XEXP (x, 0), GET_MODE_SIZE (GET_MODE (x))));
2996
2997 /* Scan all subexpressions. */
2998 fmt = GET_RTX_FORMAT (code);
2999 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3000 {
3001 if (*fmt == 'e')
3002 {
3003 if (check_sibcall_argument_overlap_1 (XEXP (x, i)))
3004 return 1;
3005 }
3006 else if (*fmt == 'E')
3007 {
3008 for (j = 0; j < XVECLEN (x, i); j++)
3009 if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j)))
3010 return 1;
3011 }
3012 }
3013 return 0;
3014 }
3015
3016 /* Scan sequence after INSN if it does not dereference any argument slots
3017 we already clobbered by tail call arguments (as noted in stored_args_map
3018 bitmap). If MARK_STORED_ARGS_MAP, add stack slots for ARG to
3019 stored_args_map bitmap afterwards (when ARG is a register MARK_STORED_ARGS_MAP
3020 should be 0). Return nonzero if sequence after INSN dereferences such argument
3021 slots, zero otherwise. */
3022
3023 static int
check_sibcall_argument_overlap(rtx_insn * insn,struct arg_data * arg,int mark_stored_args_map)3024 check_sibcall_argument_overlap (rtx_insn *insn, struct arg_data *arg,
3025 int mark_stored_args_map)
3026 {
3027 poly_uint64 low, high;
3028 unsigned HOST_WIDE_INT const_low, const_high;
3029
3030 if (insn == NULL_RTX)
3031 insn = get_insns ();
3032 else
3033 insn = NEXT_INSN (insn);
3034
3035 for (; insn; insn = NEXT_INSN (insn))
3036 if (INSN_P (insn)
3037 && check_sibcall_argument_overlap_1 (PATTERN (insn)))
3038 break;
3039
3040 if (mark_stored_args_map)
3041 {
3042 if (ARGS_GROW_DOWNWARD)
3043 low = -arg->locate.slot_offset.constant - arg->locate.size.constant;
3044 else
3045 low = arg->locate.slot_offset.constant;
3046 high = low + arg->locate.size.constant;
3047
3048 const_low = constant_lower_bound (low);
3049 if (high.is_constant (&const_high))
3050 for (unsigned HOST_WIDE_INT i = const_low; i < const_high; ++i)
3051 bitmap_set_bit (stored_args_map, i);
3052 else
3053 stored_args_watermark = MIN (stored_args_watermark, const_low);
3054 }
3055 return insn != NULL_RTX;
3056 }
3057
3058 /* Given that a function returns a value of mode MODE at the most
3059 significant end of hard register VALUE, shift VALUE left or right
3060 as specified by LEFT_P. Return true if some action was needed. */
3061
3062 bool
shift_return_value(machine_mode mode,bool left_p,rtx value)3063 shift_return_value (machine_mode mode, bool left_p, rtx value)
3064 {
3065 gcc_assert (REG_P (value) && HARD_REGISTER_P (value));
3066 machine_mode value_mode = GET_MODE (value);
3067 poly_int64 shift = GET_MODE_BITSIZE (value_mode) - GET_MODE_BITSIZE (mode);
3068
3069 if (known_eq (shift, 0))
3070 return false;
3071
3072 /* Use ashr rather than lshr for right shifts. This is for the benefit
3073 of the MIPS port, which requires SImode values to be sign-extended
3074 when stored in 64-bit registers. */
3075 if (!force_expand_binop (value_mode, left_p ? ashl_optab : ashr_optab,
3076 value, gen_int_shift_amount (value_mode, shift),
3077 value, 1, OPTAB_WIDEN))
3078 gcc_unreachable ();
3079 return true;
3080 }
3081
3082 /* If X is a likely-spilled register value, copy it to a pseudo
3083 register and return that register. Return X otherwise. */
3084
3085 static rtx
avoid_likely_spilled_reg(rtx x)3086 avoid_likely_spilled_reg (rtx x)
3087 {
3088 rtx new_rtx;
3089
3090 if (REG_P (x)
3091 && HARD_REGISTER_P (x)
3092 && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (x))))
3093 {
3094 /* Make sure that we generate a REG rather than a CONCAT.
3095 Moves into CONCATs can need nontrivial instructions,
3096 and the whole point of this function is to avoid
3097 using the hard register directly in such a situation. */
3098 generating_concat_p = 0;
3099 new_rtx = gen_reg_rtx (GET_MODE (x));
3100 generating_concat_p = 1;
3101 emit_move_insn (new_rtx, x);
3102 return new_rtx;
3103 }
3104 return x;
3105 }
3106
3107 /* Helper function for expand_call.
3108 Return false is EXP is not implementable as a sibling call. */
3109
3110 static bool
can_implement_as_sibling_call_p(tree exp,rtx structure_value_addr,tree funtype,int reg_parm_stack_space ATTRIBUTE_UNUSED,tree fndecl,int flags,tree addr,const args_size & args_size)3111 can_implement_as_sibling_call_p (tree exp,
3112 rtx structure_value_addr,
3113 tree funtype,
3114 int reg_parm_stack_space ATTRIBUTE_UNUSED,
3115 tree fndecl,
3116 int flags,
3117 tree addr,
3118 const args_size &args_size)
3119 {
3120 if (!targetm.have_sibcall_epilogue ())
3121 {
3122 maybe_complain_about_tail_call
3123 (exp,
3124 "machine description does not have"
3125 " a sibcall_epilogue instruction pattern");
3126 return false;
3127 }
3128
3129 /* Doing sibling call optimization needs some work, since
3130 structure_value_addr can be allocated on the stack.
3131 It does not seem worth the effort since few optimizable
3132 sibling calls will return a structure. */
3133 if (structure_value_addr != NULL_RTX)
3134 {
3135 maybe_complain_about_tail_call (exp, "callee returns a structure");
3136 return false;
3137 }
3138
3139 #ifdef REG_PARM_STACK_SPACE
3140 /* If outgoing reg parm stack space changes, we cannot do sibcall. */
3141 if (OUTGOING_REG_PARM_STACK_SPACE (funtype)
3142 != OUTGOING_REG_PARM_STACK_SPACE (TREE_TYPE (current_function_decl))
3143 || (reg_parm_stack_space != REG_PARM_STACK_SPACE (current_function_decl)))
3144 {
3145 maybe_complain_about_tail_call (exp,
3146 "inconsistent size of stack space"
3147 " allocated for arguments which are"
3148 " passed in registers");
3149 return false;
3150 }
3151 #endif
3152
3153 /* Check whether the target is able to optimize the call
3154 into a sibcall. */
3155 if (!targetm.function_ok_for_sibcall (fndecl, exp))
3156 {
3157 maybe_complain_about_tail_call (exp,
3158 "target is not able to optimize the"
3159 " call into a sibling call");
3160 return false;
3161 }
3162
3163 /* Functions that do not return exactly once may not be sibcall
3164 optimized. */
3165 if (flags & ECF_RETURNS_TWICE)
3166 {
3167 maybe_complain_about_tail_call (exp, "callee returns twice");
3168 return false;
3169 }
3170 if (flags & ECF_NORETURN)
3171 {
3172 maybe_complain_about_tail_call (exp, "callee does not return");
3173 return false;
3174 }
3175
3176 if (TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr))))
3177 {
3178 maybe_complain_about_tail_call (exp, "volatile function type");
3179 return false;
3180 }
3181
3182 /* If the called function is nested in the current one, it might access
3183 some of the caller's arguments, but could clobber them beforehand if
3184 the argument areas are shared. */
3185 if (fndecl && decl_function_context (fndecl) == current_function_decl)
3186 {
3187 maybe_complain_about_tail_call (exp, "nested function");
3188 return false;
3189 }
3190
3191 /* If this function requires more stack slots than the current
3192 function, we cannot change it into a sibling call.
3193 crtl->args.pretend_args_size is not part of the
3194 stack allocated by our caller. */
3195 if (maybe_gt (args_size.constant,
3196 crtl->args.size - crtl->args.pretend_args_size))
3197 {
3198 maybe_complain_about_tail_call (exp,
3199 "callee required more stack slots"
3200 " than the caller");
3201 return false;
3202 }
3203
3204 /* If the callee pops its own arguments, then it must pop exactly
3205 the same number of arguments as the current function. */
3206 if (maybe_ne (targetm.calls.return_pops_args (fndecl, funtype,
3207 args_size.constant),
3208 targetm.calls.return_pops_args (current_function_decl,
3209 TREE_TYPE
3210 (current_function_decl),
3211 crtl->args.size)))
3212 {
3213 maybe_complain_about_tail_call (exp,
3214 "inconsistent number of"
3215 " popped arguments");
3216 return false;
3217 }
3218
3219 if (!lang_hooks.decls.ok_for_sibcall (fndecl))
3220 {
3221 maybe_complain_about_tail_call (exp, "frontend does not support"
3222 " sibling call");
3223 return false;
3224 }
3225
3226 /* All checks passed. */
3227 return true;
3228 }
3229
3230 /* Generate all the code for a CALL_EXPR exp
3231 and return an rtx for its value.
3232 Store the value in TARGET (specified as an rtx) if convenient.
3233 If the value is stored in TARGET then TARGET is returned.
3234 If IGNORE is nonzero, then we ignore the value of the function call. */
3235
3236 rtx
expand_call(tree exp,rtx target,int ignore)3237 expand_call (tree exp, rtx target, int ignore)
3238 {
3239 /* Nonzero if we are currently expanding a call. */
3240 static int currently_expanding_call = 0;
3241
3242 /* RTX for the function to be called. */
3243 rtx funexp;
3244 /* Sequence of insns to perform a normal "call". */
3245 rtx_insn *normal_call_insns = NULL;
3246 /* Sequence of insns to perform a tail "call". */
3247 rtx_insn *tail_call_insns = NULL;
3248 /* Data type of the function. */
3249 tree funtype;
3250 tree type_arg_types;
3251 tree rettype;
3252 /* Declaration of the function being called,
3253 or 0 if the function is computed (not known by name). */
3254 tree fndecl = 0;
3255 /* The type of the function being called. */
3256 tree fntype;
3257 bool try_tail_call = CALL_EXPR_TAILCALL (exp);
3258 bool must_tail_call = CALL_EXPR_MUST_TAIL_CALL (exp);
3259 int pass;
3260
3261 /* Register in which non-BLKmode value will be returned,
3262 or 0 if no value or if value is BLKmode. */
3263 rtx valreg;
3264 /* Address where we should return a BLKmode value;
3265 0 if value not BLKmode. */
3266 rtx structure_value_addr = 0;
3267 /* Nonzero if that address is being passed by treating it as
3268 an extra, implicit first parameter. Otherwise,
3269 it is passed by being copied directly into struct_value_rtx. */
3270 int structure_value_addr_parm = 0;
3271 /* Holds the value of implicit argument for the struct value. */
3272 tree structure_value_addr_value = NULL_TREE;
3273 /* Size of aggregate value wanted, or zero if none wanted
3274 or if we are using the non-reentrant PCC calling convention
3275 or expecting the value in registers. */
3276 poly_int64 struct_value_size = 0;
3277 /* Nonzero if called function returns an aggregate in memory PCC style,
3278 by returning the address of where to find it. */
3279 int pcc_struct_value = 0;
3280 rtx struct_value = 0;
3281
3282 /* Number of actual parameters in this call, including struct value addr. */
3283 int num_actuals;
3284 /* Number of named args. Args after this are anonymous ones
3285 and they must all go on the stack. */
3286 int n_named_args;
3287 /* Number of complex actual arguments that need to be split. */
3288 int num_complex_actuals = 0;
3289
3290 /* Vector of information about each argument.
3291 Arguments are numbered in the order they will be pushed,
3292 not the order they are written. */
3293 struct arg_data *args;
3294
3295 /* Total size in bytes of all the stack-parms scanned so far. */
3296 struct args_size args_size;
3297 struct args_size adjusted_args_size;
3298 /* Size of arguments before any adjustments (such as rounding). */
3299 poly_int64 unadjusted_args_size;
3300 /* Data on reg parms scanned so far. */
3301 CUMULATIVE_ARGS args_so_far_v;
3302 cumulative_args_t args_so_far;
3303 /* Nonzero if a reg parm has been scanned. */
3304 int reg_parm_seen;
3305 /* Nonzero if this is an indirect function call. */
3306
3307 /* Nonzero if we must avoid push-insns in the args for this call.
3308 If stack space is allocated for register parameters, but not by the
3309 caller, then it is preallocated in the fixed part of the stack frame.
3310 So the entire argument block must then be preallocated (i.e., we
3311 ignore PUSH_ROUNDING in that case). */
3312
3313 int must_preallocate = !PUSH_ARGS;
3314
3315 /* Size of the stack reserved for parameter registers. */
3316 int reg_parm_stack_space = 0;
3317
3318 /* Address of space preallocated for stack parms
3319 (on machines that lack push insns), or 0 if space not preallocated. */
3320 rtx argblock = 0;
3321
3322 /* Mask of ECF_ and ERF_ flags. */
3323 int flags = 0;
3324 int return_flags = 0;
3325 #ifdef REG_PARM_STACK_SPACE
3326 /* Define the boundary of the register parm stack space that needs to be
3327 saved, if any. */
3328 int low_to_save, high_to_save;
3329 rtx save_area = 0; /* Place that it is saved */
3330 #endif
3331
3332 unsigned int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3333 char *initial_stack_usage_map = stack_usage_map;
3334 unsigned HOST_WIDE_INT initial_stack_usage_watermark = stack_usage_watermark;
3335 char *stack_usage_map_buf = NULL;
3336
3337 poly_int64 old_stack_allocated;
3338
3339 /* State variables to track stack modifications. */
3340 rtx old_stack_level = 0;
3341 int old_stack_arg_under_construction = 0;
3342 poly_int64 old_pending_adj = 0;
3343 int old_inhibit_defer_pop = inhibit_defer_pop;
3344
3345 /* Some stack pointer alterations we make are performed via
3346 allocate_dynamic_stack_space. This modifies the stack_pointer_delta,
3347 which we then also need to save/restore along the way. */
3348 poly_int64 old_stack_pointer_delta = 0;
3349
3350 rtx call_fusage;
3351 tree addr = CALL_EXPR_FN (exp);
3352 int i;
3353 /* The alignment of the stack, in bits. */
3354 unsigned HOST_WIDE_INT preferred_stack_boundary;
3355 /* The alignment of the stack, in bytes. */
3356 unsigned HOST_WIDE_INT preferred_unit_stack_boundary;
3357 /* The static chain value to use for this call. */
3358 rtx static_chain_value;
3359 /* See if this is "nothrow" function call. */
3360 if (TREE_NOTHROW (exp))
3361 flags |= ECF_NOTHROW;
3362
3363 /* See if we can find a DECL-node for the actual function, and get the
3364 function attributes (flags) from the function decl or type node. */
3365 fndecl = get_callee_fndecl (exp);
3366 if (fndecl)
3367 {
3368 fntype = TREE_TYPE (fndecl);
3369 flags |= flags_from_decl_or_type (fndecl);
3370 return_flags |= decl_return_flags (fndecl);
3371 }
3372 else
3373 {
3374 fntype = TREE_TYPE (TREE_TYPE (addr));
3375 flags |= flags_from_decl_or_type (fntype);
3376 if (CALL_EXPR_BY_DESCRIPTOR (exp))
3377 flags |= ECF_BY_DESCRIPTOR;
3378 }
3379 rettype = TREE_TYPE (exp);
3380
3381 struct_value = targetm.calls.struct_value_rtx (fntype, 0);
3382
3383 /* Warn if this value is an aggregate type,
3384 regardless of which calling convention we are using for it. */
3385 if (AGGREGATE_TYPE_P (rettype))
3386 warning (OPT_Waggregate_return, "function call has aggregate value");
3387
3388 /* If the result of a non looping pure or const function call is
3389 ignored (or void), and none of its arguments are volatile, we can
3390 avoid expanding the call and just evaluate the arguments for
3391 side-effects. */
3392 if ((flags & (ECF_CONST | ECF_PURE))
3393 && (!(flags & ECF_LOOPING_CONST_OR_PURE))
3394 && (ignore || target == const0_rtx
3395 || TYPE_MODE (rettype) == VOIDmode))
3396 {
3397 bool volatilep = false;
3398 tree arg;
3399 call_expr_arg_iterator iter;
3400
3401 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
3402 if (TREE_THIS_VOLATILE (arg))
3403 {
3404 volatilep = true;
3405 break;
3406 }
3407
3408 if (! volatilep)
3409 {
3410 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
3411 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
3412 return const0_rtx;
3413 }
3414 }
3415
3416 #ifdef REG_PARM_STACK_SPACE
3417 reg_parm_stack_space = REG_PARM_STACK_SPACE (!fndecl ? fntype : fndecl);
3418 #endif
3419
3420 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
3421 && reg_parm_stack_space > 0 && PUSH_ARGS)
3422 must_preallocate = 1;
3423
3424 /* Set up a place to return a structure. */
3425
3426 /* Cater to broken compilers. */
3427 if (aggregate_value_p (exp, fntype))
3428 {
3429 /* This call returns a big structure. */
3430 flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
3431
3432 #ifdef PCC_STATIC_STRUCT_RETURN
3433 {
3434 pcc_struct_value = 1;
3435 }
3436 #else /* not PCC_STATIC_STRUCT_RETURN */
3437 {
3438 if (!poly_int_tree_p (TYPE_SIZE_UNIT (rettype), &struct_value_size))
3439 struct_value_size = -1;
3440
3441 /* Even if it is semantically safe to use the target as the return
3442 slot, it may be not sufficiently aligned for the return type. */
3443 if (CALL_EXPR_RETURN_SLOT_OPT (exp)
3444 && target
3445 && MEM_P (target)
3446 /* If rettype is addressable, we may not create a temporary.
3447 If target is properly aligned at runtime and the compiler
3448 just doesn't know about it, it will work fine, otherwise it
3449 will be UB. */
3450 && (TREE_ADDRESSABLE (rettype)
3451 || !(MEM_ALIGN (target) < TYPE_ALIGN (rettype)
3452 && targetm.slow_unaligned_access (TYPE_MODE (rettype),
3453 MEM_ALIGN (target)))))
3454 structure_value_addr = XEXP (target, 0);
3455 else
3456 {
3457 /* For variable-sized objects, we must be called with a target
3458 specified. If we were to allocate space on the stack here,
3459 we would have no way of knowing when to free it. */
3460 rtx d = assign_temp (rettype, 1, 1);
3461 structure_value_addr = XEXP (d, 0);
3462 target = 0;
3463 }
3464 }
3465 #endif /* not PCC_STATIC_STRUCT_RETURN */
3466 }
3467
3468 /* Figure out the amount to which the stack should be aligned. */
3469 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3470 if (fndecl)
3471 {
3472 struct cgraph_rtl_info *i = cgraph_node::rtl_info (fndecl);
3473 /* Without automatic stack alignment, we can't increase preferred
3474 stack boundary. With automatic stack alignment, it is
3475 unnecessary since unless we can guarantee that all callers will
3476 align the outgoing stack properly, callee has to align its
3477 stack anyway. */
3478 if (i
3479 && i->preferred_incoming_stack_boundary
3480 && i->preferred_incoming_stack_boundary < preferred_stack_boundary)
3481 preferred_stack_boundary = i->preferred_incoming_stack_boundary;
3482 }
3483
3484 /* Operand 0 is a pointer-to-function; get the type of the function. */
3485 funtype = TREE_TYPE (addr);
3486 gcc_assert (POINTER_TYPE_P (funtype));
3487 funtype = TREE_TYPE (funtype);
3488
3489 /* Count whether there are actual complex arguments that need to be split
3490 into their real and imaginary parts. Munge the type_arg_types
3491 appropriately here as well. */
3492 if (targetm.calls.split_complex_arg)
3493 {
3494 call_expr_arg_iterator iter;
3495 tree arg;
3496 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
3497 {
3498 tree type = TREE_TYPE (arg);
3499 if (type && TREE_CODE (type) == COMPLEX_TYPE
3500 && targetm.calls.split_complex_arg (type))
3501 num_complex_actuals++;
3502 }
3503 type_arg_types = split_complex_types (TYPE_ARG_TYPES (funtype));
3504 }
3505 else
3506 type_arg_types = TYPE_ARG_TYPES (funtype);
3507
3508 if (flags & ECF_MAY_BE_ALLOCA)
3509 cfun->calls_alloca = 1;
3510
3511 /* If struct_value_rtx is 0, it means pass the address
3512 as if it were an extra parameter. Put the argument expression
3513 in structure_value_addr_value. */
3514 if (structure_value_addr && struct_value == 0)
3515 {
3516 /* If structure_value_addr is a REG other than
3517 virtual_outgoing_args_rtx, we can use always use it. If it
3518 is not a REG, we must always copy it into a register.
3519 If it is virtual_outgoing_args_rtx, we must copy it to another
3520 register in some cases. */
3521 rtx temp = (!REG_P (structure_value_addr)
3522 || (ACCUMULATE_OUTGOING_ARGS
3523 && stack_arg_under_construction
3524 && structure_value_addr == virtual_outgoing_args_rtx)
3525 ? copy_addr_to_reg (convert_memory_address
3526 (Pmode, structure_value_addr))
3527 : structure_value_addr);
3528
3529 structure_value_addr_value =
3530 make_tree (build_pointer_type (TREE_TYPE (funtype)), temp);
3531 structure_value_addr_parm = 1;
3532 }
3533
3534 /* Count the arguments and set NUM_ACTUALS. */
3535 num_actuals =
3536 call_expr_nargs (exp) + num_complex_actuals + structure_value_addr_parm;
3537
3538 /* Compute number of named args.
3539 First, do a raw count of the args for INIT_CUMULATIVE_ARGS. */
3540
3541 if (type_arg_types != 0)
3542 n_named_args
3543 = (list_length (type_arg_types)
3544 /* Count the struct value address, if it is passed as a parm. */
3545 + structure_value_addr_parm);
3546 else
3547 /* If we know nothing, treat all args as named. */
3548 n_named_args = num_actuals;
3549
3550 /* Start updating where the next arg would go.
3551
3552 On some machines (such as the PA) indirect calls have a different
3553 calling convention than normal calls. The fourth argument in
3554 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
3555 or not. */
3556 INIT_CUMULATIVE_ARGS (args_so_far_v, funtype, NULL_RTX, fndecl, n_named_args);
3557 args_so_far = pack_cumulative_args (&args_so_far_v);
3558
3559 /* Now possibly adjust the number of named args.
3560 Normally, don't include the last named arg if anonymous args follow.
3561 We do include the last named arg if
3562 targetm.calls.strict_argument_naming() returns nonzero.
3563 (If no anonymous args follow, the result of list_length is actually
3564 one too large. This is harmless.)
3565
3566 If targetm.calls.pretend_outgoing_varargs_named() returns
3567 nonzero, and targetm.calls.strict_argument_naming() returns zero,
3568 this machine will be able to place unnamed args that were passed
3569 in registers into the stack. So treat all args as named. This
3570 allows the insns emitting for a specific argument list to be
3571 independent of the function declaration.
3572
3573 If targetm.calls.pretend_outgoing_varargs_named() returns zero,
3574 we do not have any reliable way to pass unnamed args in
3575 registers, so we must force them into memory. */
3576
3577 if (type_arg_types != 0
3578 && targetm.calls.strict_argument_naming (args_so_far))
3579 ;
3580 else if (type_arg_types != 0
3581 && ! targetm.calls.pretend_outgoing_varargs_named (args_so_far))
3582 /* Don't include the last named arg. */
3583 --n_named_args;
3584 else
3585 /* Treat all args as named. */
3586 n_named_args = num_actuals;
3587
3588 /* Make a vector to hold all the information about each arg. */
3589 args = XCNEWVEC (struct arg_data, num_actuals);
3590
3591 /* Build up entries in the ARGS array, compute the size of the
3592 arguments into ARGS_SIZE, etc. */
3593 initialize_argument_information (num_actuals, args, &args_size,
3594 n_named_args, exp,
3595 structure_value_addr_value, fndecl, fntype,
3596 args_so_far, reg_parm_stack_space,
3597 &old_stack_level, &old_pending_adj,
3598 &must_preallocate, &flags,
3599 &try_tail_call, CALL_FROM_THUNK_P (exp));
3600
3601 if (args_size.var)
3602 must_preallocate = 1;
3603
3604 /* Now make final decision about preallocating stack space. */
3605 must_preallocate = finalize_must_preallocate (must_preallocate,
3606 num_actuals, args,
3607 &args_size);
3608
3609 /* If the structure value address will reference the stack pointer, we
3610 must stabilize it. We don't need to do this if we know that we are
3611 not going to adjust the stack pointer in processing this call. */
3612
3613 if (structure_value_addr
3614 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
3615 || reg_mentioned_p (virtual_outgoing_args_rtx,
3616 structure_value_addr))
3617 && (args_size.var
3618 || (!ACCUMULATE_OUTGOING_ARGS
3619 && maybe_ne (args_size.constant, 0))))
3620 structure_value_addr = copy_to_reg (structure_value_addr);
3621
3622 /* Tail calls can make things harder to debug, and we've traditionally
3623 pushed these optimizations into -O2. Don't try if we're already
3624 expanding a call, as that means we're an argument. Don't try if
3625 there's cleanups, as we know there's code to follow the call. */
3626 if (currently_expanding_call++ != 0
3627 || (!flag_optimize_sibling_calls && !CALL_FROM_THUNK_P (exp))
3628 || args_size.var
3629 || dbg_cnt (tail_call) == false)
3630 try_tail_call = 0;
3631
3632 /* Workaround buggy C/C++ wrappers around Fortran routines with
3633 character(len=constant) arguments if the hidden string length arguments
3634 are passed on the stack; if the callers forget to pass those arguments,
3635 attempting to tail call in such routines leads to stack corruption.
3636 Avoid tail calls in functions where at least one such hidden string
3637 length argument is passed (partially or fully) on the stack in the
3638 caller and the callee needs to pass any arguments on the stack.
3639 See PR90329. */
3640 if (try_tail_call && maybe_ne (args_size.constant, 0))
3641 for (tree arg = DECL_ARGUMENTS (current_function_decl);
3642 arg; arg = DECL_CHAIN (arg))
3643 if (DECL_HIDDEN_STRING_LENGTH (arg) && DECL_INCOMING_RTL (arg))
3644 {
3645 subrtx_iterator::array_type array;
3646 FOR_EACH_SUBRTX (iter, array, DECL_INCOMING_RTL (arg), NONCONST)
3647 if (MEM_P (*iter))
3648 {
3649 try_tail_call = 0;
3650 break;
3651 }
3652 }
3653
3654 /* If the user has marked the function as requiring tail-call
3655 optimization, attempt it. */
3656 if (must_tail_call)
3657 try_tail_call = 1;
3658
3659 /* Rest of purposes for tail call optimizations to fail. */
3660 if (try_tail_call)
3661 try_tail_call = can_implement_as_sibling_call_p (exp,
3662 structure_value_addr,
3663 funtype,
3664 reg_parm_stack_space,
3665 fndecl,
3666 flags, addr, args_size);
3667
3668 /* Check if caller and callee disagree in promotion of function
3669 return value. */
3670 if (try_tail_call)
3671 {
3672 machine_mode caller_mode, caller_promoted_mode;
3673 machine_mode callee_mode, callee_promoted_mode;
3674 int caller_unsignedp, callee_unsignedp;
3675 tree caller_res = DECL_RESULT (current_function_decl);
3676
3677 caller_unsignedp = TYPE_UNSIGNED (TREE_TYPE (caller_res));
3678 caller_mode = DECL_MODE (caller_res);
3679 callee_unsignedp = TYPE_UNSIGNED (TREE_TYPE (funtype));
3680 callee_mode = TYPE_MODE (TREE_TYPE (funtype));
3681 caller_promoted_mode
3682 = promote_function_mode (TREE_TYPE (caller_res), caller_mode,
3683 &caller_unsignedp,
3684 TREE_TYPE (current_function_decl), 1);
3685 callee_promoted_mode
3686 = promote_function_mode (TREE_TYPE (funtype), callee_mode,
3687 &callee_unsignedp,
3688 funtype, 1);
3689 if (caller_mode != VOIDmode
3690 && (caller_promoted_mode != callee_promoted_mode
3691 || ((caller_mode != caller_promoted_mode
3692 || callee_mode != callee_promoted_mode)
3693 && (caller_unsignedp != callee_unsignedp
3694 || partial_subreg_p (caller_mode, callee_mode)))))
3695 {
3696 try_tail_call = 0;
3697 maybe_complain_about_tail_call (exp,
3698 "caller and callee disagree in"
3699 " promotion of function"
3700 " return value");
3701 }
3702 }
3703
3704 /* Ensure current function's preferred stack boundary is at least
3705 what we need. Stack alignment may also increase preferred stack
3706 boundary. */
3707 if (crtl->preferred_stack_boundary < preferred_stack_boundary)
3708 crtl->preferred_stack_boundary = preferred_stack_boundary;
3709 else
3710 preferred_stack_boundary = crtl->preferred_stack_boundary;
3711
3712 preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
3713
3714 /* We want to make two insn chains; one for a sibling call, the other
3715 for a normal call. We will select one of the two chains after
3716 initial RTL generation is complete. */
3717 for (pass = try_tail_call ? 0 : 1; pass < 2; pass++)
3718 {
3719 int sibcall_failure = 0;
3720 /* We want to emit any pending stack adjustments before the tail
3721 recursion "call". That way we know any adjustment after the tail
3722 recursion call can be ignored if we indeed use the tail
3723 call expansion. */
3724 saved_pending_stack_adjust save;
3725 rtx_insn *insns, *before_call, *after_args;
3726 rtx next_arg_reg;
3727
3728 if (pass == 0)
3729 {
3730 /* State variables we need to save and restore between
3731 iterations. */
3732 save_pending_stack_adjust (&save);
3733 }
3734 if (pass)
3735 flags &= ~ECF_SIBCALL;
3736 else
3737 flags |= ECF_SIBCALL;
3738
3739 /* Other state variables that we must reinitialize each time
3740 through the loop (that are not initialized by the loop itself). */
3741 argblock = 0;
3742 call_fusage = 0;
3743
3744 /* Start a new sequence for the normal call case.
3745
3746 From this point on, if the sibling call fails, we want to set
3747 sibcall_failure instead of continuing the loop. */
3748 start_sequence ();
3749
3750 /* Don't let pending stack adjusts add up to too much.
3751 Also, do all pending adjustments now if there is any chance
3752 this might be a call to alloca or if we are expanding a sibling
3753 call sequence.
3754 Also do the adjustments before a throwing call, otherwise
3755 exception handling can fail; PR 19225. */
3756 if (maybe_ge (pending_stack_adjust, 32)
3757 || (maybe_ne (pending_stack_adjust, 0)
3758 && (flags & ECF_MAY_BE_ALLOCA))
3759 || (maybe_ne (pending_stack_adjust, 0)
3760 && flag_exceptions && !(flags & ECF_NOTHROW))
3761 || pass == 0)
3762 do_pending_stack_adjust ();
3763
3764 /* Precompute any arguments as needed. */
3765 if (pass)
3766 precompute_arguments (num_actuals, args);
3767
3768 /* Now we are about to start emitting insns that can be deleted
3769 if a libcall is deleted. */
3770 if (pass && (flags & ECF_MALLOC))
3771 start_sequence ();
3772
3773 if (pass == 0
3774 && crtl->stack_protect_guard
3775 && targetm.stack_protect_runtime_enabled_p ())
3776 stack_protect_epilogue ();
3777
3778 adjusted_args_size = args_size;
3779 /* Compute the actual size of the argument block required. The variable
3780 and constant sizes must be combined, the size may have to be rounded,
3781 and there may be a minimum required size. When generating a sibcall
3782 pattern, do not round up, since we'll be re-using whatever space our
3783 caller provided. */
3784 unadjusted_args_size
3785 = compute_argument_block_size (reg_parm_stack_space,
3786 &adjusted_args_size,
3787 fndecl, fntype,
3788 (pass == 0 ? 0
3789 : preferred_stack_boundary));
3790
3791 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
3792
3793 /* The argument block when performing a sibling call is the
3794 incoming argument block. */
3795 if (pass == 0)
3796 {
3797 argblock = crtl->args.internal_arg_pointer;
3798 if (STACK_GROWS_DOWNWARD)
3799 argblock
3800 = plus_constant (Pmode, argblock, crtl->args.pretend_args_size);
3801 else
3802 argblock
3803 = plus_constant (Pmode, argblock, -crtl->args.pretend_args_size);
3804
3805 HOST_WIDE_INT map_size = constant_lower_bound (args_size.constant);
3806 stored_args_map = sbitmap_alloc (map_size);
3807 bitmap_clear (stored_args_map);
3808 stored_args_watermark = HOST_WIDE_INT_M1U;
3809 }
3810
3811 /* If we have no actual push instructions, or shouldn't use them,
3812 make space for all args right now. */
3813 else if (adjusted_args_size.var != 0)
3814 {
3815 if (old_stack_level == 0)
3816 {
3817 emit_stack_save (SAVE_BLOCK, &old_stack_level);
3818 old_stack_pointer_delta = stack_pointer_delta;
3819 old_pending_adj = pending_stack_adjust;
3820 pending_stack_adjust = 0;
3821 /* stack_arg_under_construction says whether a stack arg is
3822 being constructed at the old stack level. Pushing the stack
3823 gets a clean outgoing argument block. */
3824 old_stack_arg_under_construction = stack_arg_under_construction;
3825 stack_arg_under_construction = 0;
3826 }
3827 argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
3828 if (flag_stack_usage_info)
3829 current_function_has_unbounded_dynamic_stack_size = 1;
3830 }
3831 else
3832 {
3833 /* Note that we must go through the motions of allocating an argument
3834 block even if the size is zero because we may be storing args
3835 in the area reserved for register arguments, which may be part of
3836 the stack frame. */
3837
3838 poly_int64 needed = adjusted_args_size.constant;
3839
3840 /* Store the maximum argument space used. It will be pushed by
3841 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
3842 checking). */
3843
3844 crtl->outgoing_args_size = upper_bound (crtl->outgoing_args_size,
3845 needed);
3846
3847 if (must_preallocate)
3848 {
3849 if (ACCUMULATE_OUTGOING_ARGS)
3850 {
3851 /* Since the stack pointer will never be pushed, it is
3852 possible for the evaluation of a parm to clobber
3853 something we have already written to the stack.
3854 Since most function calls on RISC machines do not use
3855 the stack, this is uncommon, but must work correctly.
3856
3857 Therefore, we save any area of the stack that was already
3858 written and that we are using. Here we set up to do this
3859 by making a new stack usage map from the old one. The
3860 actual save will be done by store_one_arg.
3861
3862 Another approach might be to try to reorder the argument
3863 evaluations to avoid this conflicting stack usage. */
3864
3865 /* Since we will be writing into the entire argument area,
3866 the map must be allocated for its entire size, not just
3867 the part that is the responsibility of the caller. */
3868 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
3869 needed += reg_parm_stack_space;
3870
3871 poly_int64 limit = needed;
3872 if (ARGS_GROW_DOWNWARD)
3873 limit += 1;
3874
3875 /* For polynomial sizes, this is the maximum possible
3876 size needed for arguments with a constant size
3877 and offset. */
3878 HOST_WIDE_INT const_limit = constant_lower_bound (limit);
3879 highest_outgoing_arg_in_use
3880 = MAX (initial_highest_arg_in_use, const_limit);
3881
3882 free (stack_usage_map_buf);
3883 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
3884 stack_usage_map = stack_usage_map_buf;
3885
3886 if (initial_highest_arg_in_use)
3887 memcpy (stack_usage_map, initial_stack_usage_map,
3888 initial_highest_arg_in_use);
3889
3890 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3891 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
3892 (highest_outgoing_arg_in_use
3893 - initial_highest_arg_in_use));
3894 needed = 0;
3895
3896 /* The address of the outgoing argument list must not be
3897 copied to a register here, because argblock would be left
3898 pointing to the wrong place after the call to
3899 allocate_dynamic_stack_space below. */
3900
3901 argblock = virtual_outgoing_args_rtx;
3902 }
3903 else
3904 {
3905 /* Try to reuse some or all of the pending_stack_adjust
3906 to get this space. */
3907 if (inhibit_defer_pop == 0
3908 && (combine_pending_stack_adjustment_and_call
3909 (&needed,
3910 unadjusted_args_size,
3911 &adjusted_args_size,
3912 preferred_unit_stack_boundary)))
3913 {
3914 /* combine_pending_stack_adjustment_and_call computes
3915 an adjustment before the arguments are allocated.
3916 Account for them and see whether or not the stack
3917 needs to go up or down. */
3918 needed = unadjusted_args_size - needed;
3919
3920 /* Checked by
3921 combine_pending_stack_adjustment_and_call. */
3922 gcc_checking_assert (ordered_p (needed, 0));
3923 if (maybe_lt (needed, 0))
3924 {
3925 /* We're releasing stack space. */
3926 /* ??? We can avoid any adjustment at all if we're
3927 already aligned. FIXME. */
3928 pending_stack_adjust = -needed;
3929 do_pending_stack_adjust ();
3930 needed = 0;
3931 }
3932 else
3933 /* We need to allocate space. We'll do that in
3934 push_block below. */
3935 pending_stack_adjust = 0;
3936 }
3937
3938 /* Special case this because overhead of `push_block' in
3939 this case is non-trivial. */
3940 if (known_eq (needed, 0))
3941 argblock = virtual_outgoing_args_rtx;
3942 else
3943 {
3944 rtx needed_rtx = gen_int_mode (needed, Pmode);
3945 argblock = push_block (needed_rtx, 0, 0);
3946 if (ARGS_GROW_DOWNWARD)
3947 argblock = plus_constant (Pmode, argblock, needed);
3948 }
3949
3950 /* We only really need to call `copy_to_reg' in the case
3951 where push insns are going to be used to pass ARGBLOCK
3952 to a function call in ARGS. In that case, the stack
3953 pointer changes value from the allocation point to the
3954 call point, and hence the value of
3955 VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might
3956 as well always do it. */
3957 argblock = copy_to_reg (argblock);
3958 }
3959 }
3960 }
3961
3962 if (ACCUMULATE_OUTGOING_ARGS)
3963 {
3964 /* The save/restore code in store_one_arg handles all
3965 cases except one: a constructor call (including a C
3966 function returning a BLKmode struct) to initialize
3967 an argument. */
3968 if (stack_arg_under_construction)
3969 {
3970 rtx push_size
3971 = (gen_int_mode
3972 (adjusted_args_size.constant
3973 + (OUTGOING_REG_PARM_STACK_SPACE (!fndecl ? fntype
3974 : TREE_TYPE (fndecl))
3975 ? 0 : reg_parm_stack_space), Pmode));
3976 if (old_stack_level == 0)
3977 {
3978 emit_stack_save (SAVE_BLOCK, &old_stack_level);
3979 old_stack_pointer_delta = stack_pointer_delta;
3980 old_pending_adj = pending_stack_adjust;
3981 pending_stack_adjust = 0;
3982 /* stack_arg_under_construction says whether a stack
3983 arg is being constructed at the old stack level.
3984 Pushing the stack gets a clean outgoing argument
3985 block. */
3986 old_stack_arg_under_construction
3987 = stack_arg_under_construction;
3988 stack_arg_under_construction = 0;
3989 /* Make a new map for the new argument list. */
3990 free (stack_usage_map_buf);
3991 stack_usage_map_buf = XCNEWVEC (char, highest_outgoing_arg_in_use);
3992 stack_usage_map = stack_usage_map_buf;
3993 highest_outgoing_arg_in_use = 0;
3994 stack_usage_watermark = HOST_WIDE_INT_M1U;
3995 }
3996 /* We can pass TRUE as the 4th argument because we just
3997 saved the stack pointer and will restore it right after
3998 the call. */
3999 allocate_dynamic_stack_space (push_size, 0, BIGGEST_ALIGNMENT,
4000 -1, true);
4001 }
4002
4003 /* If argument evaluation might modify the stack pointer,
4004 copy the address of the argument list to a register. */
4005 for (i = 0; i < num_actuals; i++)
4006 if (args[i].pass_on_stack)
4007 {
4008 argblock = copy_addr_to_reg (argblock);
4009 break;
4010 }
4011 }
4012
4013 compute_argument_addresses (args, argblock, num_actuals);
4014
4015 /* Stack is properly aligned, pops can't safely be deferred during
4016 the evaluation of the arguments. */
4017 NO_DEFER_POP;
4018
4019 /* Precompute all register parameters. It isn't safe to compute
4020 anything once we have started filling any specific hard regs.
4021 TLS symbols sometimes need a call to resolve. Precompute
4022 register parameters before any stack pointer manipulation
4023 to avoid unaligned stack in the called function. */
4024 precompute_register_parameters (num_actuals, args, ®_parm_seen);
4025
4026 OK_DEFER_POP;
4027
4028 /* Perform stack alignment before the first push (the last arg). */
4029 if (argblock == 0
4030 && maybe_gt (adjusted_args_size.constant, reg_parm_stack_space)
4031 && maybe_ne (adjusted_args_size.constant, unadjusted_args_size))
4032 {
4033 /* When the stack adjustment is pending, we get better code
4034 by combining the adjustments. */
4035 if (maybe_ne (pending_stack_adjust, 0)
4036 && ! inhibit_defer_pop
4037 && (combine_pending_stack_adjustment_and_call
4038 (&pending_stack_adjust,
4039 unadjusted_args_size,
4040 &adjusted_args_size,
4041 preferred_unit_stack_boundary)))
4042 do_pending_stack_adjust ();
4043 else if (argblock == 0)
4044 anti_adjust_stack (gen_int_mode (adjusted_args_size.constant
4045 - unadjusted_args_size,
4046 Pmode));
4047 }
4048 /* Now that the stack is properly aligned, pops can't safely
4049 be deferred during the evaluation of the arguments. */
4050 NO_DEFER_POP;
4051
4052 /* Record the maximum pushed stack space size. We need to delay
4053 doing it this far to take into account the optimization done
4054 by combine_pending_stack_adjustment_and_call. */
4055 if (flag_stack_usage_info
4056 && !ACCUMULATE_OUTGOING_ARGS
4057 && pass
4058 && adjusted_args_size.var == 0)
4059 {
4060 poly_int64 pushed = (adjusted_args_size.constant
4061 + pending_stack_adjust);
4062 current_function_pushed_stack_size
4063 = upper_bound (current_function_pushed_stack_size, pushed);
4064 }
4065
4066 funexp = rtx_for_function_call (fndecl, addr);
4067
4068 if (CALL_EXPR_STATIC_CHAIN (exp))
4069 static_chain_value = expand_normal (CALL_EXPR_STATIC_CHAIN (exp));
4070 else
4071 static_chain_value = 0;
4072
4073 #ifdef REG_PARM_STACK_SPACE
4074 /* Save the fixed argument area if it's part of the caller's frame and
4075 is clobbered by argument setup for this call. */
4076 if (ACCUMULATE_OUTGOING_ARGS && pass)
4077 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
4078 &low_to_save, &high_to_save);
4079 #endif
4080
4081 /* Now store (and compute if necessary) all non-register parms.
4082 These come before register parms, since they can require block-moves,
4083 which could clobber the registers used for register parms.
4084 Parms which have partial registers are not stored here,
4085 but we do preallocate space here if they want that. */
4086
4087 for (i = 0; i < num_actuals; i++)
4088 {
4089 if (args[i].reg == 0 || args[i].pass_on_stack)
4090 {
4091 rtx_insn *before_arg = get_last_insn ();
4092
4093 /* We don't allow passing huge (> 2^30 B) arguments
4094 by value. It would cause an overflow later on. */
4095 if (constant_lower_bound (adjusted_args_size.constant)
4096 >= (1 << (HOST_BITS_PER_INT - 2)))
4097 {
4098 sorry ("passing too large argument on stack");
4099 continue;
4100 }
4101
4102 if (store_one_arg (&args[i], argblock, flags,
4103 adjusted_args_size.var != 0,
4104 reg_parm_stack_space)
4105 || (pass == 0
4106 && check_sibcall_argument_overlap (before_arg,
4107 &args[i], 1)))
4108 sibcall_failure = 1;
4109 }
4110
4111 if (args[i].stack)
4112 call_fusage
4113 = gen_rtx_EXPR_LIST (TYPE_MODE (TREE_TYPE (args[i].tree_value)),
4114 gen_rtx_USE (VOIDmode, args[i].stack),
4115 call_fusage);
4116 }
4117
4118 /* If we have a parm that is passed in registers but not in memory
4119 and whose alignment does not permit a direct copy into registers,
4120 make a group of pseudos that correspond to each register that we
4121 will later fill. */
4122 if (STRICT_ALIGNMENT)
4123 store_unaligned_arguments_into_pseudos (args, num_actuals);
4124
4125 /* Now store any partially-in-registers parm.
4126 This is the last place a block-move can happen. */
4127 if (reg_parm_seen)
4128 for (i = 0; i < num_actuals; i++)
4129 if (args[i].partial != 0 && ! args[i].pass_on_stack)
4130 {
4131 rtx_insn *before_arg = get_last_insn ();
4132
4133 /* On targets with weird calling conventions (e.g. PA) it's
4134 hard to ensure that all cases of argument overlap between
4135 stack and registers work. Play it safe and bail out. */
4136 if (ARGS_GROW_DOWNWARD && !STACK_GROWS_DOWNWARD)
4137 {
4138 sibcall_failure = 1;
4139 break;
4140 }
4141
4142 if (store_one_arg (&args[i], argblock, flags,
4143 adjusted_args_size.var != 0,
4144 reg_parm_stack_space)
4145 || (pass == 0
4146 && check_sibcall_argument_overlap (before_arg,
4147 &args[i], 1)))
4148 sibcall_failure = 1;
4149 }
4150
4151 bool any_regs = false;
4152 for (i = 0; i < num_actuals; i++)
4153 if (args[i].reg != NULL_RTX)
4154 {
4155 any_regs = true;
4156 targetm.calls.call_args (args[i].reg, funtype);
4157 }
4158 if (!any_regs)
4159 targetm.calls.call_args (pc_rtx, funtype);
4160
4161 /* Figure out the register where the value, if any, will come back. */
4162 valreg = 0;
4163 if (TYPE_MODE (rettype) != VOIDmode
4164 && ! structure_value_addr)
4165 {
4166 if (pcc_struct_value)
4167 valreg = hard_function_value (build_pointer_type (rettype),
4168 fndecl, NULL, (pass == 0));
4169 else
4170 valreg = hard_function_value (rettype, fndecl, fntype,
4171 (pass == 0));
4172
4173 /* If VALREG is a PARALLEL whose first member has a zero
4174 offset, use that. This is for targets such as m68k that
4175 return the same value in multiple places. */
4176 if (GET_CODE (valreg) == PARALLEL)
4177 {
4178 rtx elem = XVECEXP (valreg, 0, 0);
4179 rtx where = XEXP (elem, 0);
4180 rtx offset = XEXP (elem, 1);
4181 if (offset == const0_rtx
4182 && GET_MODE (where) == GET_MODE (valreg))
4183 valreg = where;
4184 }
4185 }
4186
4187 /* If register arguments require space on the stack and stack space
4188 was not preallocated, allocate stack space here for arguments
4189 passed in registers. */
4190 if (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
4191 && !ACCUMULATE_OUTGOING_ARGS
4192 && must_preallocate == 0 && reg_parm_stack_space > 0)
4193 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
4194
4195 /* Pass the function the address in which to return a
4196 structure value. */
4197 if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
4198 {
4199 structure_value_addr
4200 = convert_memory_address (Pmode, structure_value_addr);
4201 emit_move_insn (struct_value,
4202 force_reg (Pmode,
4203 force_operand (structure_value_addr,
4204 NULL_RTX)));
4205
4206 if (REG_P (struct_value))
4207 use_reg (&call_fusage, struct_value);
4208 }
4209
4210 after_args = get_last_insn ();
4211 funexp = prepare_call_address (fndecl ? fndecl : fntype, funexp,
4212 static_chain_value, &call_fusage,
4213 reg_parm_seen, flags);
4214
4215 load_register_parameters (args, num_actuals, &call_fusage, flags,
4216 pass == 0, &sibcall_failure);
4217
4218 /* Save a pointer to the last insn before the call, so that we can
4219 later safely search backwards to find the CALL_INSN. */
4220 before_call = get_last_insn ();
4221
4222 /* Set up next argument register. For sibling calls on machines
4223 with register windows this should be the incoming register. */
4224 if (pass == 0)
4225 next_arg_reg = targetm.calls.function_incoming_arg (args_so_far,
4226 VOIDmode,
4227 void_type_node,
4228 true);
4229 else
4230 next_arg_reg = targetm.calls.function_arg (args_so_far,
4231 VOIDmode, void_type_node,
4232 true);
4233
4234 if (pass == 1 && (return_flags & ERF_RETURNS_ARG))
4235 {
4236 int arg_nr = return_flags & ERF_RETURN_ARG_MASK;
4237 arg_nr = num_actuals - arg_nr - 1;
4238 if (arg_nr >= 0
4239 && arg_nr < num_actuals
4240 && args[arg_nr].reg
4241 && valreg
4242 && REG_P (valreg)
4243 && GET_MODE (args[arg_nr].reg) == GET_MODE (valreg))
4244 call_fusage
4245 = gen_rtx_EXPR_LIST (TYPE_MODE (TREE_TYPE (args[arg_nr].tree_value)),
4246 gen_rtx_SET (valreg, args[arg_nr].reg),
4247 call_fusage);
4248 }
4249 /* All arguments and registers used for the call must be set up by
4250 now! */
4251
4252 /* Stack must be properly aligned now. */
4253 gcc_assert (!pass
4254 || multiple_p (stack_pointer_delta,
4255 preferred_unit_stack_boundary));
4256
4257 /* Generate the actual call instruction. */
4258 emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size,
4259 adjusted_args_size.constant, struct_value_size,
4260 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
4261 flags, args_so_far);
4262
4263 if (flag_ipa_ra)
4264 {
4265 rtx_call_insn *last;
4266 rtx datum = NULL_RTX;
4267 if (fndecl != NULL_TREE)
4268 {
4269 datum = XEXP (DECL_RTL (fndecl), 0);
4270 gcc_assert (datum != NULL_RTX
4271 && GET_CODE (datum) == SYMBOL_REF);
4272 }
4273 last = last_call_insn ();
4274 add_reg_note (last, REG_CALL_DECL, datum);
4275 }
4276
4277 /* If the call setup or the call itself overlaps with anything
4278 of the argument setup we probably clobbered our call address.
4279 In that case we can't do sibcalls. */
4280 if (pass == 0
4281 && check_sibcall_argument_overlap (after_args, 0, 0))
4282 sibcall_failure = 1;
4283
4284 /* If a non-BLKmode value is returned at the most significant end
4285 of a register, shift the register right by the appropriate amount
4286 and update VALREG accordingly. BLKmode values are handled by the
4287 group load/store machinery below. */
4288 if (!structure_value_addr
4289 && !pcc_struct_value
4290 && TYPE_MODE (rettype) != VOIDmode
4291 && TYPE_MODE (rettype) != BLKmode
4292 && REG_P (valreg)
4293 && targetm.calls.return_in_msb (rettype))
4294 {
4295 if (shift_return_value (TYPE_MODE (rettype), false, valreg))
4296 sibcall_failure = 1;
4297 valreg = gen_rtx_REG (TYPE_MODE (rettype), REGNO (valreg));
4298 }
4299
4300 if (pass && (flags & ECF_MALLOC))
4301 {
4302 rtx temp = gen_reg_rtx (GET_MODE (valreg));
4303 rtx_insn *last, *insns;
4304
4305 /* The return value from a malloc-like function is a pointer. */
4306 if (TREE_CODE (rettype) == POINTER_TYPE)
4307 mark_reg_pointer (temp, MALLOC_ABI_ALIGNMENT);
4308
4309 emit_move_insn (temp, valreg);
4310
4311 /* The return value from a malloc-like function cannot alias
4312 anything else. */
4313 last = get_last_insn ();
4314 add_reg_note (last, REG_NOALIAS, temp);
4315
4316 /* Write out the sequence. */
4317 insns = get_insns ();
4318 end_sequence ();
4319 emit_insn (insns);
4320 valreg = temp;
4321 }
4322
4323 /* For calls to `setjmp', etc., inform
4324 function.c:setjmp_warnings that it should complain if
4325 nonvolatile values are live. For functions that cannot
4326 return, inform flow that control does not fall through. */
4327
4328 if ((flags & ECF_NORETURN) || pass == 0)
4329 {
4330 /* The barrier must be emitted
4331 immediately after the CALL_INSN. Some ports emit more
4332 than just a CALL_INSN above, so we must search for it here. */
4333
4334 rtx_insn *last = get_last_insn ();
4335 while (!CALL_P (last))
4336 {
4337 last = PREV_INSN (last);
4338 /* There was no CALL_INSN? */
4339 gcc_assert (last != before_call);
4340 }
4341
4342 emit_barrier_after (last);
4343
4344 /* Stack adjustments after a noreturn call are dead code.
4345 However when NO_DEFER_POP is in effect, we must preserve
4346 stack_pointer_delta. */
4347 if (inhibit_defer_pop == 0)
4348 {
4349 stack_pointer_delta = old_stack_allocated;
4350 pending_stack_adjust = 0;
4351 }
4352 }
4353
4354 /* If value type not void, return an rtx for the value. */
4355
4356 if (TYPE_MODE (rettype) == VOIDmode
4357 || ignore)
4358 target = const0_rtx;
4359 else if (structure_value_addr)
4360 {
4361 if (target == 0 || !MEM_P (target))
4362 {
4363 target
4364 = gen_rtx_MEM (TYPE_MODE (rettype),
4365 memory_address (TYPE_MODE (rettype),
4366 structure_value_addr));
4367 set_mem_attributes (target, rettype, 1);
4368 }
4369 }
4370 else if (pcc_struct_value)
4371 {
4372 /* This is the special C++ case where we need to
4373 know what the true target was. We take care to
4374 never use this value more than once in one expression. */
4375 target = gen_rtx_MEM (TYPE_MODE (rettype),
4376 copy_to_reg (valreg));
4377 set_mem_attributes (target, rettype, 1);
4378 }
4379 /* Handle calls that return values in multiple non-contiguous locations.
4380 The Irix 6 ABI has examples of this. */
4381 else if (GET_CODE (valreg) == PARALLEL)
4382 {
4383 if (target == 0)
4384 target = emit_group_move_into_temps (valreg);
4385 else if (rtx_equal_p (target, valreg))
4386 ;
4387 else if (GET_CODE (target) == PARALLEL)
4388 /* Handle the result of a emit_group_move_into_temps
4389 call in the previous pass. */
4390 emit_group_move (target, valreg);
4391 else
4392 emit_group_store (target, valreg, rettype,
4393 int_size_in_bytes (rettype));
4394 }
4395 else if (target
4396 && GET_MODE (target) == TYPE_MODE (rettype)
4397 && GET_MODE (target) == GET_MODE (valreg))
4398 {
4399 bool may_overlap = false;
4400
4401 /* We have to copy a return value in a CLASS_LIKELY_SPILLED hard
4402 reg to a plain register. */
4403 if (!REG_P (target) || HARD_REGISTER_P (target))
4404 valreg = avoid_likely_spilled_reg (valreg);
4405
4406 /* If TARGET is a MEM in the argument area, and we have
4407 saved part of the argument area, then we can't store
4408 directly into TARGET as it may get overwritten when we
4409 restore the argument save area below. Don't work too
4410 hard though and simply force TARGET to a register if it
4411 is a MEM; the optimizer is quite likely to sort it out. */
4412 if (ACCUMULATE_OUTGOING_ARGS && pass && MEM_P (target))
4413 for (i = 0; i < num_actuals; i++)
4414 if (args[i].save_area)
4415 {
4416 may_overlap = true;
4417 break;
4418 }
4419
4420 if (may_overlap)
4421 target = copy_to_reg (valreg);
4422 else
4423 {
4424 /* TARGET and VALREG cannot be equal at this point
4425 because the latter would not have
4426 REG_FUNCTION_VALUE_P true, while the former would if
4427 it were referring to the same register.
4428
4429 If they refer to the same register, this move will be
4430 a no-op, except when function inlining is being
4431 done. */
4432 emit_move_insn (target, valreg);
4433
4434 /* If we are setting a MEM, this code must be executed.
4435 Since it is emitted after the call insn, sibcall
4436 optimization cannot be performed in that case. */
4437 if (MEM_P (target))
4438 sibcall_failure = 1;
4439 }
4440 }
4441 else
4442 target = copy_to_reg (avoid_likely_spilled_reg (valreg));
4443
4444 /* If we promoted this return value, make the proper SUBREG.
4445 TARGET might be const0_rtx here, so be careful. */
4446 if (REG_P (target)
4447 && TYPE_MODE (rettype) != BLKmode
4448 && GET_MODE (target) != TYPE_MODE (rettype))
4449 {
4450 tree type = rettype;
4451 int unsignedp = TYPE_UNSIGNED (type);
4452 machine_mode pmode;
4453
4454 /* Ensure we promote as expected, and get the new unsignedness. */
4455 pmode = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
4456 funtype, 1);
4457 gcc_assert (GET_MODE (target) == pmode);
4458
4459 poly_uint64 offset = subreg_lowpart_offset (TYPE_MODE (type),
4460 GET_MODE (target));
4461 target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
4462 SUBREG_PROMOTED_VAR_P (target) = 1;
4463 SUBREG_PROMOTED_SET (target, unsignedp);
4464 }
4465
4466 /* If size of args is variable or this was a constructor call for a stack
4467 argument, restore saved stack-pointer value. */
4468
4469 if (old_stack_level)
4470 {
4471 rtx_insn *prev = get_last_insn ();
4472
4473 emit_stack_restore (SAVE_BLOCK, old_stack_level);
4474 stack_pointer_delta = old_stack_pointer_delta;
4475
4476 fixup_args_size_notes (prev, get_last_insn (), stack_pointer_delta);
4477
4478 pending_stack_adjust = old_pending_adj;
4479 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
4480 stack_arg_under_construction = old_stack_arg_under_construction;
4481 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
4482 stack_usage_map = initial_stack_usage_map;
4483 stack_usage_watermark = initial_stack_usage_watermark;
4484 sibcall_failure = 1;
4485 }
4486 else if (ACCUMULATE_OUTGOING_ARGS && pass)
4487 {
4488 #ifdef REG_PARM_STACK_SPACE
4489 if (save_area)
4490 restore_fixed_argument_area (save_area, argblock,
4491 high_to_save, low_to_save);
4492 #endif
4493
4494 /* If we saved any argument areas, restore them. */
4495 for (i = 0; i < num_actuals; i++)
4496 if (args[i].save_area)
4497 {
4498 machine_mode save_mode = GET_MODE (args[i].save_area);
4499 rtx stack_area
4500 = gen_rtx_MEM (save_mode,
4501 memory_address (save_mode,
4502 XEXP (args[i].stack_slot, 0)));
4503
4504 if (save_mode != BLKmode)
4505 emit_move_insn (stack_area, args[i].save_area);
4506 else
4507 emit_block_move (stack_area, args[i].save_area,
4508 (gen_int_mode
4509 (args[i].locate.size.constant, Pmode)),
4510 BLOCK_OP_CALL_PARM);
4511 }
4512
4513 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
4514 stack_usage_map = initial_stack_usage_map;
4515 stack_usage_watermark = initial_stack_usage_watermark;
4516 }
4517
4518 /* If this was alloca, record the new stack level. */
4519 if (flags & ECF_MAY_BE_ALLOCA)
4520 record_new_stack_level ();
4521
4522 /* Free up storage we no longer need. */
4523 for (i = 0; i < num_actuals; ++i)
4524 free (args[i].aligned_regs);
4525
4526 targetm.calls.end_call_args ();
4527
4528 insns = get_insns ();
4529 end_sequence ();
4530
4531 if (pass == 0)
4532 {
4533 tail_call_insns = insns;
4534
4535 /* Restore the pending stack adjustment now that we have
4536 finished generating the sibling call sequence. */
4537
4538 restore_pending_stack_adjust (&save);
4539
4540 /* Prepare arg structure for next iteration. */
4541 for (i = 0; i < num_actuals; i++)
4542 {
4543 args[i].value = 0;
4544 args[i].aligned_regs = 0;
4545 args[i].stack = 0;
4546 }
4547
4548 sbitmap_free (stored_args_map);
4549 internal_arg_pointer_exp_state.scan_start = NULL;
4550 internal_arg_pointer_exp_state.cache.release ();
4551 }
4552 else
4553 {
4554 normal_call_insns = insns;
4555
4556 /* Verify that we've deallocated all the stack we used. */
4557 gcc_assert ((flags & ECF_NORETURN)
4558 || known_eq (old_stack_allocated,
4559 stack_pointer_delta
4560 - pending_stack_adjust));
4561 }
4562
4563 /* If something prevents making this a sibling call,
4564 zero out the sequence. */
4565 if (sibcall_failure)
4566 tail_call_insns = NULL;
4567 else
4568 break;
4569 }
4570
4571 /* If tail call production succeeded, we need to remove REG_EQUIV notes on
4572 arguments too, as argument area is now clobbered by the call. */
4573 if (tail_call_insns)
4574 {
4575 emit_insn (tail_call_insns);
4576 crtl->tail_call_emit = true;
4577 }
4578 else
4579 {
4580 emit_insn (normal_call_insns);
4581 if (try_tail_call)
4582 /* Ideally we'd emit a message for all of the ways that it could
4583 have failed. */
4584 maybe_complain_about_tail_call (exp, "tail call production failed");
4585 }
4586
4587 currently_expanding_call--;
4588
4589 free (stack_usage_map_buf);
4590 free (args);
4591 return target;
4592 }
4593
4594 /* A sibling call sequence invalidates any REG_EQUIV notes made for
4595 this function's incoming arguments.
4596
4597 At the start of RTL generation we know the only REG_EQUIV notes
4598 in the rtl chain are those for incoming arguments, so we can look
4599 for REG_EQUIV notes between the start of the function and the
4600 NOTE_INSN_FUNCTION_BEG.
4601
4602 This is (slight) overkill. We could keep track of the highest
4603 argument we clobber and be more selective in removing notes, but it
4604 does not seem to be worth the effort. */
4605
4606 void
fixup_tail_calls(void)4607 fixup_tail_calls (void)
4608 {
4609 rtx_insn *insn;
4610
4611 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4612 {
4613 rtx note;
4614
4615 /* There are never REG_EQUIV notes for the incoming arguments
4616 after the NOTE_INSN_FUNCTION_BEG note, so stop if we see it. */
4617 if (NOTE_P (insn)
4618 && NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)
4619 break;
4620
4621 note = find_reg_note (insn, REG_EQUIV, 0);
4622 if (note)
4623 remove_note (insn, note);
4624 note = find_reg_note (insn, REG_EQUIV, 0);
4625 gcc_assert (!note);
4626 }
4627 }
4628
4629 /* Traverse a list of TYPES and expand all complex types into their
4630 components. */
4631 static tree
split_complex_types(tree types)4632 split_complex_types (tree types)
4633 {
4634 tree p;
4635
4636 /* Before allocating memory, check for the common case of no complex. */
4637 for (p = types; p; p = TREE_CHAIN (p))
4638 {
4639 tree type = TREE_VALUE (p);
4640 if (TREE_CODE (type) == COMPLEX_TYPE
4641 && targetm.calls.split_complex_arg (type))
4642 goto found;
4643 }
4644 return types;
4645
4646 found:
4647 types = copy_list (types);
4648
4649 for (p = types; p; p = TREE_CHAIN (p))
4650 {
4651 tree complex_type = TREE_VALUE (p);
4652
4653 if (TREE_CODE (complex_type) == COMPLEX_TYPE
4654 && targetm.calls.split_complex_arg (complex_type))
4655 {
4656 tree next, imag;
4657
4658 /* Rewrite complex type with component type. */
4659 TREE_VALUE (p) = TREE_TYPE (complex_type);
4660 next = TREE_CHAIN (p);
4661
4662 /* Add another component type for the imaginary part. */
4663 imag = build_tree_list (NULL_TREE, TREE_VALUE (p));
4664 TREE_CHAIN (p) = imag;
4665 TREE_CHAIN (imag) = next;
4666
4667 /* Skip the newly created node. */
4668 p = TREE_CHAIN (p);
4669 }
4670 }
4671
4672 return types;
4673 }
4674
4675 /* Output a library call to function ORGFUN (a SYMBOL_REF rtx)
4676 for a value of mode OUTMODE,
4677 with NARGS different arguments, passed as ARGS.
4678 Store the return value if RETVAL is nonzero: store it in VALUE if
4679 VALUE is nonnull, otherwise pick a convenient location. In either
4680 case return the location of the stored value.
4681
4682 FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for
4683 `const' calls, LCT_PURE for `pure' calls, or another LCT_ value for
4684 other types of library calls. */
4685
4686 rtx
emit_library_call_value_1(int retval,rtx orgfun,rtx value,enum libcall_type fn_type,machine_mode outmode,int nargs,rtx_mode_t * args)4687 emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
4688 enum libcall_type fn_type,
4689 machine_mode outmode, int nargs, rtx_mode_t *args)
4690 {
4691 /* Total size in bytes of all the stack-parms scanned so far. */
4692 struct args_size args_size;
4693 /* Size of arguments before any adjustments (such as rounding). */
4694 struct args_size original_args_size;
4695 int argnum;
4696 rtx fun;
4697 /* Todo, choose the correct decl type of orgfun. Sadly this information
4698 isn't present here, so we default to native calling abi here. */
4699 tree fndecl ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
4700 tree fntype ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
4701 int count;
4702 rtx argblock = 0;
4703 CUMULATIVE_ARGS args_so_far_v;
4704 cumulative_args_t args_so_far;
4705 struct arg
4706 {
4707 rtx value;
4708 machine_mode mode;
4709 rtx reg;
4710 int partial;
4711 struct locate_and_pad_arg_data locate;
4712 rtx save_area;
4713 };
4714 struct arg *argvec;
4715 int old_inhibit_defer_pop = inhibit_defer_pop;
4716 rtx call_fusage = 0;
4717 rtx mem_value = 0;
4718 rtx valreg;
4719 int pcc_struct_value = 0;
4720 poly_int64 struct_value_size = 0;
4721 int flags;
4722 int reg_parm_stack_space = 0;
4723 poly_int64 needed;
4724 rtx_insn *before_call;
4725 bool have_push_fusage;
4726 tree tfom; /* type_for_mode (outmode, 0) */
4727
4728 #ifdef REG_PARM_STACK_SPACE
4729 /* Define the boundary of the register parm stack space that needs to be
4730 save, if any. */
4731 int low_to_save = 0, high_to_save = 0;
4732 rtx save_area = 0; /* Place that it is saved. */
4733 #endif
4734
4735 /* Size of the stack reserved for parameter registers. */
4736 unsigned int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
4737 char *initial_stack_usage_map = stack_usage_map;
4738 unsigned HOST_WIDE_INT initial_stack_usage_watermark = stack_usage_watermark;
4739 char *stack_usage_map_buf = NULL;
4740
4741 rtx struct_value = targetm.calls.struct_value_rtx (0, 0);
4742
4743 #ifdef REG_PARM_STACK_SPACE
4744 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
4745 #endif
4746
4747 /* By default, library functions cannot throw. */
4748 flags = ECF_NOTHROW;
4749
4750 switch (fn_type)
4751 {
4752 case LCT_NORMAL:
4753 break;
4754 case LCT_CONST:
4755 flags |= ECF_CONST;
4756 break;
4757 case LCT_PURE:
4758 flags |= ECF_PURE;
4759 break;
4760 case LCT_NORETURN:
4761 flags |= ECF_NORETURN;
4762 break;
4763 case LCT_THROW:
4764 flags &= ~ECF_NOTHROW;
4765 break;
4766 case LCT_RETURNS_TWICE:
4767 flags = ECF_RETURNS_TWICE;
4768 break;
4769 }
4770 fun = orgfun;
4771
4772 /* Ensure current function's preferred stack boundary is at least
4773 what we need. */
4774 if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
4775 crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
4776
4777 /* If this kind of value comes back in memory,
4778 decide where in memory it should come back. */
4779 if (outmode != VOIDmode)
4780 {
4781 tfom = lang_hooks.types.type_for_mode (outmode, 0);
4782 if (aggregate_value_p (tfom, 0))
4783 {
4784 #ifdef PCC_STATIC_STRUCT_RETURN
4785 rtx pointer_reg
4786 = hard_function_value (build_pointer_type (tfom), 0, 0, 0);
4787 mem_value = gen_rtx_MEM (outmode, pointer_reg);
4788 pcc_struct_value = 1;
4789 if (value == 0)
4790 value = gen_reg_rtx (outmode);
4791 #else /* not PCC_STATIC_STRUCT_RETURN */
4792 struct_value_size = GET_MODE_SIZE (outmode);
4793 if (value != 0 && MEM_P (value))
4794 mem_value = value;
4795 else
4796 mem_value = assign_temp (tfom, 1, 1);
4797 #endif
4798 /* This call returns a big structure. */
4799 flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
4800 }
4801 }
4802 else
4803 tfom = void_type_node;
4804
4805 /* ??? Unfinished: must pass the memory address as an argument. */
4806
4807 /* Copy all the libcall-arguments out of the varargs data
4808 and into a vector ARGVEC.
4809
4810 Compute how to pass each argument. We only support a very small subset
4811 of the full argument passing conventions to limit complexity here since
4812 library functions shouldn't have many args. */
4813
4814 argvec = XALLOCAVEC (struct arg, nargs + 1);
4815 memset (argvec, 0, (nargs + 1) * sizeof (struct arg));
4816
4817 #ifdef INIT_CUMULATIVE_LIBCALL_ARGS
4818 INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far_v, outmode, fun);
4819 #else
4820 INIT_CUMULATIVE_ARGS (args_so_far_v, NULL_TREE, fun, 0, nargs);
4821 #endif
4822 args_so_far = pack_cumulative_args (&args_so_far_v);
4823
4824 args_size.constant = 0;
4825 args_size.var = 0;
4826
4827 count = 0;
4828
4829 push_temp_slots ();
4830
4831 /* If there's a structure value address to be passed,
4832 either pass it in the special place, or pass it as an extra argument. */
4833 if (mem_value && struct_value == 0 && ! pcc_struct_value)
4834 {
4835 rtx addr = XEXP (mem_value, 0);
4836
4837 nargs++;
4838
4839 /* Make sure it is a reasonable operand for a move or push insn. */
4840 if (!REG_P (addr) && !MEM_P (addr)
4841 && !(CONSTANT_P (addr)
4842 && targetm.legitimate_constant_p (Pmode, addr)))
4843 addr = force_operand (addr, NULL_RTX);
4844
4845 argvec[count].value = addr;
4846 argvec[count].mode = Pmode;
4847 argvec[count].partial = 0;
4848
4849 argvec[count].reg = targetm.calls.function_arg (args_so_far,
4850 Pmode, NULL_TREE, true);
4851 gcc_assert (targetm.calls.arg_partial_bytes (args_so_far, Pmode,
4852 NULL_TREE, 1) == 0);
4853
4854 locate_and_pad_parm (Pmode, NULL_TREE,
4855 #ifdef STACK_PARMS_IN_REG_PARM_AREA
4856 1,
4857 #else
4858 argvec[count].reg != 0,
4859 #endif
4860 reg_parm_stack_space, 0,
4861 NULL_TREE, &args_size, &argvec[count].locate);
4862
4863 if (argvec[count].reg == 0 || argvec[count].partial != 0
4864 || reg_parm_stack_space > 0)
4865 args_size.constant += argvec[count].locate.size.constant;
4866
4867 targetm.calls.function_arg_advance (args_so_far, Pmode, (tree) 0, true);
4868
4869 count++;
4870 }
4871
4872 for (unsigned int i = 0; count < nargs; i++, count++)
4873 {
4874 rtx val = args[i].first;
4875 machine_mode mode = args[i].second;
4876 int unsigned_p = 0;
4877
4878 /* We cannot convert the arg value to the mode the library wants here;
4879 must do it earlier where we know the signedness of the arg. */
4880 gcc_assert (mode != BLKmode
4881 && (GET_MODE (val) == mode || GET_MODE (val) == VOIDmode));
4882
4883 /* Make sure it is a reasonable operand for a move or push insn. */
4884 if (!REG_P (val) && !MEM_P (val)
4885 && !(CONSTANT_P (val) && targetm.legitimate_constant_p (mode, val)))
4886 val = force_operand (val, NULL_RTX);
4887
4888 if (pass_by_reference (&args_so_far_v, mode, NULL_TREE, 1))
4889 {
4890 rtx slot;
4891 int must_copy
4892 = !reference_callee_copied (&args_so_far_v, mode, NULL_TREE, 1);
4893
4894 /* If this was a CONST function, it is now PURE since it now
4895 reads memory. */
4896 if (flags & ECF_CONST)
4897 {
4898 flags &= ~ECF_CONST;
4899 flags |= ECF_PURE;
4900 }
4901
4902 if (MEM_P (val) && !must_copy)
4903 {
4904 tree val_expr = MEM_EXPR (val);
4905 if (val_expr)
4906 mark_addressable (val_expr);
4907 slot = val;
4908 }
4909 else
4910 {
4911 slot = assign_temp (lang_hooks.types.type_for_mode (mode, 0),
4912 1, 1);
4913 emit_move_insn (slot, val);
4914 }
4915
4916 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
4917 gen_rtx_USE (VOIDmode, slot),
4918 call_fusage);
4919 if (must_copy)
4920 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
4921 gen_rtx_CLOBBER (VOIDmode,
4922 slot),
4923 call_fusage);
4924
4925 mode = Pmode;
4926 val = force_operand (XEXP (slot, 0), NULL_RTX);
4927 }
4928
4929 mode = promote_function_mode (NULL_TREE, mode, &unsigned_p, NULL_TREE, 0);
4930 argvec[count].mode = mode;
4931 argvec[count].value = convert_modes (mode, GET_MODE (val), val, unsigned_p);
4932 argvec[count].reg = targetm.calls.function_arg (args_so_far, mode,
4933 NULL_TREE, true);
4934
4935 argvec[count].partial
4936 = targetm.calls.arg_partial_bytes (args_so_far, mode, NULL_TREE, 1);
4937
4938 if (argvec[count].reg == 0
4939 || argvec[count].partial != 0
4940 || reg_parm_stack_space > 0)
4941 {
4942 locate_and_pad_parm (mode, NULL_TREE,
4943 #ifdef STACK_PARMS_IN_REG_PARM_AREA
4944 1,
4945 #else
4946 argvec[count].reg != 0,
4947 #endif
4948 reg_parm_stack_space, argvec[count].partial,
4949 NULL_TREE, &args_size, &argvec[count].locate);
4950 args_size.constant += argvec[count].locate.size.constant;
4951 gcc_assert (!argvec[count].locate.size.var);
4952 }
4953 #ifdef BLOCK_REG_PADDING
4954 else
4955 /* The argument is passed entirely in registers. See at which
4956 end it should be padded. */
4957 argvec[count].locate.where_pad =
4958 BLOCK_REG_PADDING (mode, NULL_TREE,
4959 known_le (GET_MODE_SIZE (mode), UNITS_PER_WORD));
4960 #endif
4961
4962 targetm.calls.function_arg_advance (args_so_far, mode, (tree) 0, true);
4963 }
4964
4965 /* If this machine requires an external definition for library
4966 functions, write one out. */
4967 assemble_external_libcall (fun);
4968
4969 original_args_size = args_size;
4970 args_size.constant = (aligned_upper_bound (args_size.constant
4971 + stack_pointer_delta,
4972 STACK_BYTES)
4973 - stack_pointer_delta);
4974
4975 args_size.constant = upper_bound (args_size.constant,
4976 reg_parm_stack_space);
4977
4978 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
4979 args_size.constant -= reg_parm_stack_space;
4980
4981 crtl->outgoing_args_size = upper_bound (crtl->outgoing_args_size,
4982 args_size.constant);
4983
4984 if (flag_stack_usage_info && !ACCUMULATE_OUTGOING_ARGS)
4985 {
4986 poly_int64 pushed = args_size.constant + pending_stack_adjust;
4987 current_function_pushed_stack_size
4988 = upper_bound (current_function_pushed_stack_size, pushed);
4989 }
4990
4991 if (ACCUMULATE_OUTGOING_ARGS)
4992 {
4993 /* Since the stack pointer will never be pushed, it is possible for
4994 the evaluation of a parm to clobber something we have already
4995 written to the stack. Since most function calls on RISC machines
4996 do not use the stack, this is uncommon, but must work correctly.
4997
4998 Therefore, we save any area of the stack that was already written
4999 and that we are using. Here we set up to do this by making a new
5000 stack usage map from the old one.
5001
5002 Another approach might be to try to reorder the argument
5003 evaluations to avoid this conflicting stack usage. */
5004
5005 needed = args_size.constant;
5006
5007 /* Since we will be writing into the entire argument area, the
5008 map must be allocated for its entire size, not just the part that
5009 is the responsibility of the caller. */
5010 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
5011 needed += reg_parm_stack_space;
5012
5013 poly_int64 limit = needed;
5014 if (ARGS_GROW_DOWNWARD)
5015 limit += 1;
5016
5017 /* For polynomial sizes, this is the maximum possible size needed
5018 for arguments with a constant size and offset. */
5019 HOST_WIDE_INT const_limit = constant_lower_bound (limit);
5020 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
5021 const_limit);
5022
5023 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
5024 stack_usage_map = stack_usage_map_buf;
5025
5026 if (initial_highest_arg_in_use)
5027 memcpy (stack_usage_map, initial_stack_usage_map,
5028 initial_highest_arg_in_use);
5029
5030 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
5031 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
5032 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
5033 needed = 0;
5034
5035 /* We must be careful to use virtual regs before they're instantiated,
5036 and real regs afterwards. Loop optimization, for example, can create
5037 new libcalls after we've instantiated the virtual regs, and if we
5038 use virtuals anyway, they won't match the rtl patterns. */
5039
5040 if (virtuals_instantiated)
5041 argblock = plus_constant (Pmode, stack_pointer_rtx,
5042 STACK_POINTER_OFFSET);
5043 else
5044 argblock = virtual_outgoing_args_rtx;
5045 }
5046 else
5047 {
5048 if (!PUSH_ARGS)
5049 argblock = push_block (gen_int_mode (args_size.constant, Pmode), 0, 0);
5050 }
5051
5052 /* We push args individually in reverse order, perform stack alignment
5053 before the first push (the last arg). */
5054 if (argblock == 0)
5055 anti_adjust_stack (gen_int_mode (args_size.constant
5056 - original_args_size.constant,
5057 Pmode));
5058
5059 argnum = nargs - 1;
5060
5061 #ifdef REG_PARM_STACK_SPACE
5062 if (ACCUMULATE_OUTGOING_ARGS)
5063 {
5064 /* The argument list is the property of the called routine and it
5065 may clobber it. If the fixed area has been used for previous
5066 parameters, we must save and restore it. */
5067 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
5068 &low_to_save, &high_to_save);
5069 }
5070 #endif
5071
5072 /* When expanding a normal call, args are stored in push order,
5073 which is the reverse of what we have here. */
5074 bool any_regs = false;
5075 for (int i = nargs; i-- > 0; )
5076 if (argvec[i].reg != NULL_RTX)
5077 {
5078 targetm.calls.call_args (argvec[i].reg, NULL_TREE);
5079 any_regs = true;
5080 }
5081 if (!any_regs)
5082 targetm.calls.call_args (pc_rtx, NULL_TREE);
5083
5084 /* Push the args that need to be pushed. */
5085
5086 have_push_fusage = false;
5087
5088 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
5089 are to be pushed. */
5090 for (count = 0; count < nargs; count++, argnum--)
5091 {
5092 machine_mode mode = argvec[argnum].mode;
5093 rtx val = argvec[argnum].value;
5094 rtx reg = argvec[argnum].reg;
5095 int partial = argvec[argnum].partial;
5096 unsigned int parm_align = argvec[argnum].locate.boundary;
5097 poly_int64 lower_bound = 0, upper_bound = 0;
5098
5099 if (! (reg != 0 && partial == 0))
5100 {
5101 rtx use;
5102
5103 if (ACCUMULATE_OUTGOING_ARGS)
5104 {
5105 /* If this is being stored into a pre-allocated, fixed-size,
5106 stack area, save any previous data at that location. */
5107
5108 if (ARGS_GROW_DOWNWARD)
5109 {
5110 /* stack_slot is negative, but we want to index stack_usage_map
5111 with positive values. */
5112 upper_bound = -argvec[argnum].locate.slot_offset.constant + 1;
5113 lower_bound = upper_bound - argvec[argnum].locate.size.constant;
5114 }
5115 else
5116 {
5117 lower_bound = argvec[argnum].locate.slot_offset.constant;
5118 upper_bound = lower_bound + argvec[argnum].locate.size.constant;
5119 }
5120
5121 if (stack_region_maybe_used_p (lower_bound, upper_bound,
5122 reg_parm_stack_space))
5123 {
5124 /* We need to make a save area. */
5125 poly_uint64 size
5126 = argvec[argnum].locate.size.constant * BITS_PER_UNIT;
5127 machine_mode save_mode
5128 = int_mode_for_size (size, 1).else_blk ();
5129 rtx adr
5130 = plus_constant (Pmode, argblock,
5131 argvec[argnum].locate.offset.constant);
5132 rtx stack_area
5133 = gen_rtx_MEM (save_mode, memory_address (save_mode, adr));
5134
5135 if (save_mode == BLKmode)
5136 {
5137 argvec[argnum].save_area
5138 = assign_stack_temp (BLKmode,
5139 argvec[argnum].locate.size.constant
5140 );
5141
5142 emit_block_move (validize_mem
5143 (copy_rtx (argvec[argnum].save_area)),
5144 stack_area,
5145 (gen_int_mode
5146 (argvec[argnum].locate.size.constant,
5147 Pmode)),
5148 BLOCK_OP_CALL_PARM);
5149 }
5150 else
5151 {
5152 argvec[argnum].save_area = gen_reg_rtx (save_mode);
5153
5154 emit_move_insn (argvec[argnum].save_area, stack_area);
5155 }
5156 }
5157 }
5158
5159 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, parm_align,
5160 partial, reg, 0, argblock,
5161 (gen_int_mode
5162 (argvec[argnum].locate.offset.constant, Pmode)),
5163 reg_parm_stack_space,
5164 ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad), false);
5165
5166 /* Now mark the segment we just used. */
5167 if (ACCUMULATE_OUTGOING_ARGS)
5168 mark_stack_region_used (lower_bound, upper_bound);
5169
5170 NO_DEFER_POP;
5171
5172 /* Indicate argument access so that alias.c knows that these
5173 values are live. */
5174 if (argblock)
5175 use = plus_constant (Pmode, argblock,
5176 argvec[argnum].locate.offset.constant);
5177 else if (have_push_fusage)
5178 continue;
5179 else
5180 {
5181 /* When arguments are pushed, trying to tell alias.c where
5182 exactly this argument is won't work, because the
5183 auto-increment causes confusion. So we merely indicate
5184 that we access something with a known mode somewhere on
5185 the stack. */
5186 use = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
5187 gen_rtx_SCRATCH (Pmode));
5188 have_push_fusage = true;
5189 }
5190 use = gen_rtx_MEM (argvec[argnum].mode, use);
5191 use = gen_rtx_USE (VOIDmode, use);
5192 call_fusage = gen_rtx_EXPR_LIST (VOIDmode, use, call_fusage);
5193 }
5194 }
5195
5196 argnum = nargs - 1;
5197
5198 fun = prepare_call_address (NULL, fun, NULL, &call_fusage, 0, 0);
5199
5200 /* Now load any reg parms into their regs. */
5201
5202 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
5203 are to be pushed. */
5204 for (count = 0; count < nargs; count++, argnum--)
5205 {
5206 machine_mode mode = argvec[argnum].mode;
5207 rtx val = argvec[argnum].value;
5208 rtx reg = argvec[argnum].reg;
5209 int partial = argvec[argnum].partial;
5210
5211 /* Handle calls that pass values in multiple non-contiguous
5212 locations. The PA64 has examples of this for library calls. */
5213 if (reg != 0 && GET_CODE (reg) == PARALLEL)
5214 emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (mode));
5215 else if (reg != 0 && partial == 0)
5216 {
5217 emit_move_insn (reg, val);
5218 #ifdef BLOCK_REG_PADDING
5219 poly_int64 size = GET_MODE_SIZE (argvec[argnum].mode);
5220
5221 /* Copied from load_register_parameters. */
5222
5223 /* Handle case where we have a value that needs shifting
5224 up to the msb. eg. a QImode value and we're padding
5225 upward on a BYTES_BIG_ENDIAN machine. */
5226 if (known_lt (size, UNITS_PER_WORD)
5227 && (argvec[argnum].locate.where_pad
5228 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
5229 {
5230 rtx x;
5231 poly_int64 shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
5232
5233 /* Assigning REG here rather than a temp makes CALL_FUSAGE
5234 report the whole reg as used. Strictly speaking, the
5235 call only uses SIZE bytes at the msb end, but it doesn't
5236 seem worth generating rtl to say that. */
5237 reg = gen_rtx_REG (word_mode, REGNO (reg));
5238 x = expand_shift (LSHIFT_EXPR, word_mode, reg, shift, reg, 1);
5239 if (x != reg)
5240 emit_move_insn (reg, x);
5241 }
5242 #endif
5243 }
5244
5245 NO_DEFER_POP;
5246 }
5247
5248 /* Any regs containing parms remain in use through the call. */
5249 for (count = 0; count < nargs; count++)
5250 {
5251 rtx reg = argvec[count].reg;
5252 if (reg != 0 && GET_CODE (reg) == PARALLEL)
5253 use_group_regs (&call_fusage, reg);
5254 else if (reg != 0)
5255 {
5256 int partial = argvec[count].partial;
5257 if (partial)
5258 {
5259 int nregs;
5260 gcc_assert (partial % UNITS_PER_WORD == 0);
5261 nregs = partial / UNITS_PER_WORD;
5262 use_regs (&call_fusage, REGNO (reg), nregs);
5263 }
5264 else
5265 use_reg (&call_fusage, reg);
5266 }
5267 }
5268
5269 /* Pass the function the address in which to return a structure value. */
5270 if (mem_value != 0 && struct_value != 0 && ! pcc_struct_value)
5271 {
5272 emit_move_insn (struct_value,
5273 force_reg (Pmode,
5274 force_operand (XEXP (mem_value, 0),
5275 NULL_RTX)));
5276 if (REG_P (struct_value))
5277 use_reg (&call_fusage, struct_value);
5278 }
5279
5280 /* Don't allow popping to be deferred, since then
5281 cse'ing of library calls could delete a call and leave the pop. */
5282 NO_DEFER_POP;
5283 valreg = (mem_value == 0 && outmode != VOIDmode
5284 ? hard_libcall_value (outmode, orgfun) : NULL_RTX);
5285
5286 /* Stack must be properly aligned now. */
5287 gcc_assert (multiple_p (stack_pointer_delta,
5288 PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT));
5289
5290 before_call = get_last_insn ();
5291
5292 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
5293 will set inhibit_defer_pop to that value. */
5294 /* The return type is needed to decide how many bytes the function pops.
5295 Signedness plays no role in that, so for simplicity, we pretend it's
5296 always signed. We also assume that the list of arguments passed has
5297 no impact, so we pretend it is unknown. */
5298
5299 emit_call_1 (fun, NULL,
5300 get_identifier (XSTR (orgfun, 0)),
5301 build_function_type (tfom, NULL_TREE),
5302 original_args_size.constant, args_size.constant,
5303 struct_value_size,
5304 targetm.calls.function_arg (args_so_far,
5305 VOIDmode, void_type_node, true),
5306 valreg,
5307 old_inhibit_defer_pop + 1, call_fusage, flags, args_so_far);
5308
5309 if (flag_ipa_ra)
5310 {
5311 rtx datum = orgfun;
5312 gcc_assert (GET_CODE (datum) == SYMBOL_REF);
5313 rtx_call_insn *last = last_call_insn ();
5314 add_reg_note (last, REG_CALL_DECL, datum);
5315 }
5316
5317 /* Right-shift returned value if necessary. */
5318 if (!pcc_struct_value
5319 && TYPE_MODE (tfom) != BLKmode
5320 && targetm.calls.return_in_msb (tfom))
5321 {
5322 shift_return_value (TYPE_MODE (tfom), false, valreg);
5323 valreg = gen_rtx_REG (TYPE_MODE (tfom), REGNO (valreg));
5324 }
5325
5326 targetm.calls.end_call_args ();
5327
5328 /* For calls to `setjmp', etc., inform function.c:setjmp_warnings
5329 that it should complain if nonvolatile values are live. For
5330 functions that cannot return, inform flow that control does not
5331 fall through. */
5332 if (flags & ECF_NORETURN)
5333 {
5334 /* The barrier note must be emitted
5335 immediately after the CALL_INSN. Some ports emit more than
5336 just a CALL_INSN above, so we must search for it here. */
5337 rtx_insn *last = get_last_insn ();
5338 while (!CALL_P (last))
5339 {
5340 last = PREV_INSN (last);
5341 /* There was no CALL_INSN? */
5342 gcc_assert (last != before_call);
5343 }
5344
5345 emit_barrier_after (last);
5346 }
5347
5348 /* Consider that "regular" libcalls, i.e. all of them except for LCT_THROW
5349 and LCT_RETURNS_TWICE, cannot perform non-local gotos. */
5350 if (flags & ECF_NOTHROW)
5351 {
5352 rtx_insn *last = get_last_insn ();
5353 while (!CALL_P (last))
5354 {
5355 last = PREV_INSN (last);
5356 /* There was no CALL_INSN? */
5357 gcc_assert (last != before_call);
5358 }
5359
5360 make_reg_eh_region_note_nothrow_nononlocal (last);
5361 }
5362
5363 /* Now restore inhibit_defer_pop to its actual original value. */
5364 OK_DEFER_POP;
5365
5366 pop_temp_slots ();
5367
5368 /* Copy the value to the right place. */
5369 if (outmode != VOIDmode && retval)
5370 {
5371 if (mem_value)
5372 {
5373 if (value == 0)
5374 value = mem_value;
5375 if (value != mem_value)
5376 emit_move_insn (value, mem_value);
5377 }
5378 else if (GET_CODE (valreg) == PARALLEL)
5379 {
5380 if (value == 0)
5381 value = gen_reg_rtx (outmode);
5382 emit_group_store (value, valreg, NULL_TREE, GET_MODE_SIZE (outmode));
5383 }
5384 else
5385 {
5386 /* Convert to the proper mode if a promotion has been active. */
5387 if (GET_MODE (valreg) != outmode)
5388 {
5389 int unsignedp = TYPE_UNSIGNED (tfom);
5390
5391 gcc_assert (promote_function_mode (tfom, outmode, &unsignedp,
5392 fndecl ? TREE_TYPE (fndecl) : fntype, 1)
5393 == GET_MODE (valreg));
5394 valreg = convert_modes (outmode, GET_MODE (valreg), valreg, 0);
5395 }
5396
5397 if (value != 0)
5398 emit_move_insn (value, valreg);
5399 else
5400 value = valreg;
5401 }
5402 }
5403
5404 if (ACCUMULATE_OUTGOING_ARGS)
5405 {
5406 #ifdef REG_PARM_STACK_SPACE
5407 if (save_area)
5408 restore_fixed_argument_area (save_area, argblock,
5409 high_to_save, low_to_save);
5410 #endif
5411
5412 /* If we saved any argument areas, restore them. */
5413 for (count = 0; count < nargs; count++)
5414 if (argvec[count].save_area)
5415 {
5416 machine_mode save_mode = GET_MODE (argvec[count].save_area);
5417 rtx adr = plus_constant (Pmode, argblock,
5418 argvec[count].locate.offset.constant);
5419 rtx stack_area = gen_rtx_MEM (save_mode,
5420 memory_address (save_mode, adr));
5421
5422 if (save_mode == BLKmode)
5423 emit_block_move (stack_area,
5424 validize_mem
5425 (copy_rtx (argvec[count].save_area)),
5426 (gen_int_mode
5427 (argvec[count].locate.size.constant, Pmode)),
5428 BLOCK_OP_CALL_PARM);
5429 else
5430 emit_move_insn (stack_area, argvec[count].save_area);
5431 }
5432
5433 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
5434 stack_usage_map = initial_stack_usage_map;
5435 stack_usage_watermark = initial_stack_usage_watermark;
5436 }
5437
5438 free (stack_usage_map_buf);
5439
5440 return value;
5441
5442 }
5443
5444
5445 /* Store a single argument for a function call
5446 into the register or memory area where it must be passed.
5447 *ARG describes the argument value and where to pass it.
5448
5449 ARGBLOCK is the address of the stack-block for all the arguments,
5450 or 0 on a machine where arguments are pushed individually.
5451
5452 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
5453 so must be careful about how the stack is used.
5454
5455 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
5456 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
5457 that we need not worry about saving and restoring the stack.
5458
5459 FNDECL is the declaration of the function we are calling.
5460
5461 Return nonzero if this arg should cause sibcall failure,
5462 zero otherwise. */
5463
5464 static int
store_one_arg(struct arg_data * arg,rtx argblock,int flags,int variable_size ATTRIBUTE_UNUSED,int reg_parm_stack_space)5465 store_one_arg (struct arg_data *arg, rtx argblock, int flags,
5466 int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space)
5467 {
5468 tree pval = arg->tree_value;
5469 rtx reg = 0;
5470 int partial = 0;
5471 poly_int64 used = 0;
5472 poly_int64 lower_bound = 0, upper_bound = 0;
5473 int sibcall_failure = 0;
5474
5475 if (TREE_CODE (pval) == ERROR_MARK)
5476 return 1;
5477
5478 /* Push a new temporary level for any temporaries we make for
5479 this argument. */
5480 push_temp_slots ();
5481
5482 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL))
5483 {
5484 /* If this is being stored into a pre-allocated, fixed-size, stack area,
5485 save any previous data at that location. */
5486 if (argblock && ! variable_size && arg->stack)
5487 {
5488 if (ARGS_GROW_DOWNWARD)
5489 {
5490 /* stack_slot is negative, but we want to index stack_usage_map
5491 with positive values. */
5492 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
5493 {
5494 rtx offset = XEXP (XEXP (arg->stack_slot, 0), 1);
5495 upper_bound = -rtx_to_poly_int64 (offset) + 1;
5496 }
5497 else
5498 upper_bound = 0;
5499
5500 lower_bound = upper_bound - arg->locate.size.constant;
5501 }
5502 else
5503 {
5504 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
5505 {
5506 rtx offset = XEXP (XEXP (arg->stack_slot, 0), 1);
5507 lower_bound = rtx_to_poly_int64 (offset);
5508 }
5509 else
5510 lower_bound = 0;
5511
5512 upper_bound = lower_bound + arg->locate.size.constant;
5513 }
5514
5515 if (stack_region_maybe_used_p (lower_bound, upper_bound,
5516 reg_parm_stack_space))
5517 {
5518 /* We need to make a save area. */
5519 poly_uint64 size = arg->locate.size.constant * BITS_PER_UNIT;
5520 machine_mode save_mode
5521 = int_mode_for_size (size, 1).else_blk ();
5522 rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0));
5523 rtx stack_area = gen_rtx_MEM (save_mode, adr);
5524
5525 if (save_mode == BLKmode)
5526 {
5527 arg->save_area
5528 = assign_temp (TREE_TYPE (arg->tree_value), 1, 1);
5529 preserve_temp_slots (arg->save_area);
5530 emit_block_move (validize_mem (copy_rtx (arg->save_area)),
5531 stack_area,
5532 (gen_int_mode
5533 (arg->locate.size.constant, Pmode)),
5534 BLOCK_OP_CALL_PARM);
5535 }
5536 else
5537 {
5538 arg->save_area = gen_reg_rtx (save_mode);
5539 emit_move_insn (arg->save_area, stack_area);
5540 }
5541 }
5542 }
5543 }
5544
5545 /* If this isn't going to be placed on both the stack and in registers,
5546 set up the register and number of words. */
5547 if (! arg->pass_on_stack)
5548 {
5549 if (flags & ECF_SIBCALL)
5550 reg = arg->tail_call_reg;
5551 else
5552 reg = arg->reg;
5553 partial = arg->partial;
5554 }
5555
5556 /* Being passed entirely in a register. We shouldn't be called in
5557 this case. */
5558 gcc_assert (reg == 0 || partial != 0);
5559
5560 /* If this arg needs special alignment, don't load the registers
5561 here. */
5562 if (arg->n_aligned_regs != 0)
5563 reg = 0;
5564
5565 /* If this is being passed partially in a register, we can't evaluate
5566 it directly into its stack slot. Otherwise, we can. */
5567 if (arg->value == 0)
5568 {
5569 /* stack_arg_under_construction is nonzero if a function argument is
5570 being evaluated directly into the outgoing argument list and
5571 expand_call must take special action to preserve the argument list
5572 if it is called recursively.
5573
5574 For scalar function arguments stack_usage_map is sufficient to
5575 determine which stack slots must be saved and restored. Scalar
5576 arguments in general have pass_on_stack == 0.
5577
5578 If this argument is initialized by a function which takes the
5579 address of the argument (a C++ constructor or a C function
5580 returning a BLKmode structure), then stack_usage_map is
5581 insufficient and expand_call must push the stack around the
5582 function call. Such arguments have pass_on_stack == 1.
5583
5584 Note that it is always safe to set stack_arg_under_construction,
5585 but this generates suboptimal code if set when not needed. */
5586
5587 if (arg->pass_on_stack)
5588 stack_arg_under_construction++;
5589
5590 arg->value = expand_expr (pval,
5591 (partial
5592 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
5593 ? NULL_RTX : arg->stack,
5594 VOIDmode, EXPAND_STACK_PARM);
5595
5596 /* If we are promoting object (or for any other reason) the mode
5597 doesn't agree, convert the mode. */
5598
5599 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
5600 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
5601 arg->value, arg->unsignedp);
5602
5603 if (arg->pass_on_stack)
5604 stack_arg_under_construction--;
5605 }
5606
5607 /* Check for overlap with already clobbered argument area. */
5608 if ((flags & ECF_SIBCALL)
5609 && MEM_P (arg->value)
5610 && mem_might_overlap_already_clobbered_arg_p (XEXP (arg->value, 0),
5611 arg->locate.size.constant))
5612 sibcall_failure = 1;
5613
5614 /* Don't allow anything left on stack from computation
5615 of argument to alloca. */
5616 if (flags & ECF_MAY_BE_ALLOCA)
5617 do_pending_stack_adjust ();
5618
5619 if (arg->value == arg->stack)
5620 /* If the value is already in the stack slot, we are done. */
5621 ;
5622 else if (arg->mode != BLKmode)
5623 {
5624 unsigned int parm_align;
5625
5626 /* Argument is a scalar, not entirely passed in registers.
5627 (If part is passed in registers, arg->partial says how much
5628 and emit_push_insn will take care of putting it there.)
5629
5630 Push it, and if its size is less than the
5631 amount of space allocated to it,
5632 also bump stack pointer by the additional space.
5633 Note that in C the default argument promotions
5634 will prevent such mismatches. */
5635
5636 poly_int64 size = (TYPE_EMPTY_P (TREE_TYPE (pval))
5637 ? 0 : GET_MODE_SIZE (arg->mode));
5638
5639 /* Compute how much space the push instruction will push.
5640 On many machines, pushing a byte will advance the stack
5641 pointer by a halfword. */
5642 #ifdef PUSH_ROUNDING
5643 size = PUSH_ROUNDING (size);
5644 #endif
5645 used = size;
5646
5647 /* Compute how much space the argument should get:
5648 round up to a multiple of the alignment for arguments. */
5649 if (targetm.calls.function_arg_padding (arg->mode, TREE_TYPE (pval))
5650 != PAD_NONE)
5651 /* At the moment we don't (need to) support ABIs for which the
5652 padding isn't known at compile time. In principle it should
5653 be easy to add though. */
5654 used = force_align_up (size, PARM_BOUNDARY / BITS_PER_UNIT);
5655
5656 /* Compute the alignment of the pushed argument. */
5657 parm_align = arg->locate.boundary;
5658 if (targetm.calls.function_arg_padding (arg->mode, TREE_TYPE (pval))
5659 == PAD_DOWNWARD)
5660 {
5661 poly_int64 pad = used - size;
5662 unsigned int pad_align = known_alignment (pad) * BITS_PER_UNIT;
5663 if (pad_align != 0)
5664 parm_align = MIN (parm_align, pad_align);
5665 }
5666
5667 /* This isn't already where we want it on the stack, so put it there.
5668 This can either be done with push or copy insns. */
5669 if (maybe_ne (used, 0)
5670 && !emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval),
5671 NULL_RTX, parm_align, partial, reg, used - size,
5672 argblock, ARGS_SIZE_RTX (arg->locate.offset),
5673 reg_parm_stack_space,
5674 ARGS_SIZE_RTX (arg->locate.alignment_pad), true))
5675 sibcall_failure = 1;
5676
5677 /* Unless this is a partially-in-register argument, the argument is now
5678 in the stack. */
5679 if (partial == 0)
5680 arg->value = arg->stack;
5681 }
5682 else
5683 {
5684 /* BLKmode, at least partly to be pushed. */
5685
5686 unsigned int parm_align;
5687 poly_int64 excess;
5688 rtx size_rtx;
5689
5690 /* Pushing a nonscalar.
5691 If part is passed in registers, PARTIAL says how much
5692 and emit_push_insn will take care of putting it there. */
5693
5694 /* Round its size up to a multiple
5695 of the allocation unit for arguments. */
5696
5697 if (arg->locate.size.var != 0)
5698 {
5699 excess = 0;
5700 size_rtx = ARGS_SIZE_RTX (arg->locate.size);
5701 }
5702 else
5703 {
5704 /* PUSH_ROUNDING has no effect on us, because emit_push_insn
5705 for BLKmode is careful to avoid it. */
5706 excess = (arg->locate.size.constant
5707 - arg_int_size_in_bytes (TREE_TYPE (pval))
5708 + partial);
5709 size_rtx = expand_expr (arg_size_in_bytes (TREE_TYPE (pval)),
5710 NULL_RTX, TYPE_MODE (sizetype),
5711 EXPAND_NORMAL);
5712 }
5713
5714 parm_align = arg->locate.boundary;
5715
5716 /* When an argument is padded down, the block is aligned to
5717 PARM_BOUNDARY, but the actual argument isn't. */
5718 if (targetm.calls.function_arg_padding (arg->mode, TREE_TYPE (pval))
5719 == PAD_DOWNWARD)
5720 {
5721 if (arg->locate.size.var)
5722 parm_align = BITS_PER_UNIT;
5723 else
5724 {
5725 unsigned int excess_align
5726 = known_alignment (excess) * BITS_PER_UNIT;
5727 if (excess_align != 0)
5728 parm_align = MIN (parm_align, excess_align);
5729 }
5730 }
5731
5732 if ((flags & ECF_SIBCALL) && MEM_P (arg->value))
5733 {
5734 /* emit_push_insn might not work properly if arg->value and
5735 argblock + arg->locate.offset areas overlap. */
5736 rtx x = arg->value;
5737 poly_int64 i = 0;
5738
5739 if (strip_offset (XEXP (x, 0), &i)
5740 == crtl->args.internal_arg_pointer)
5741 {
5742 /* arg.locate doesn't contain the pretend_args_size offset,
5743 it's part of argblock. Ensure we don't count it in I. */
5744 if (STACK_GROWS_DOWNWARD)
5745 i -= crtl->args.pretend_args_size;
5746 else
5747 i += crtl->args.pretend_args_size;
5748
5749 /* expand_call should ensure this. */
5750 gcc_assert (!arg->locate.offset.var
5751 && arg->locate.size.var == 0);
5752 poly_int64 size_val = rtx_to_poly_int64 (size_rtx);
5753
5754 if (known_eq (arg->locate.offset.constant, i))
5755 {
5756 /* Even though they appear to be at the same location,
5757 if part of the outgoing argument is in registers,
5758 they aren't really at the same location. Check for
5759 this by making sure that the incoming size is the
5760 same as the outgoing size. */
5761 if (maybe_ne (arg->locate.size.constant, size_val))
5762 sibcall_failure = 1;
5763 }
5764 else if (maybe_in_range_p (arg->locate.offset.constant,
5765 i, size_val))
5766 sibcall_failure = 1;
5767 /* Use arg->locate.size.constant instead of size_rtx
5768 because we only care about the part of the argument
5769 on the stack. */
5770 else if (maybe_in_range_p (i, arg->locate.offset.constant,
5771 arg->locate.size.constant))
5772 sibcall_failure = 1;
5773 }
5774 }
5775
5776 if (!CONST_INT_P (size_rtx) || INTVAL (size_rtx) != 0)
5777 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
5778 parm_align, partial, reg, excess, argblock,
5779 ARGS_SIZE_RTX (arg->locate.offset),
5780 reg_parm_stack_space,
5781 ARGS_SIZE_RTX (arg->locate.alignment_pad), false);
5782
5783 /* Unless this is a partially-in-register argument, the argument is now
5784 in the stack.
5785
5786 ??? Unlike the case above, in which we want the actual
5787 address of the data, so that we can load it directly into a
5788 register, here we want the address of the stack slot, so that
5789 it's properly aligned for word-by-word copying or something
5790 like that. It's not clear that this is always correct. */
5791 if (partial == 0)
5792 arg->value = arg->stack_slot;
5793 }
5794
5795 if (arg->reg && GET_CODE (arg->reg) == PARALLEL)
5796 {
5797 tree type = TREE_TYPE (arg->tree_value);
5798 arg->parallel_value
5799 = emit_group_load_into_temps (arg->reg, arg->value, type,
5800 int_size_in_bytes (type));
5801 }
5802
5803 /* Mark all slots this store used. */
5804 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)
5805 && argblock && ! variable_size && arg->stack)
5806 mark_stack_region_used (lower_bound, upper_bound);
5807
5808 /* Once we have pushed something, pops can't safely
5809 be deferred during the rest of the arguments. */
5810 NO_DEFER_POP;
5811
5812 /* Free any temporary slots made in processing this argument. */
5813 pop_temp_slots ();
5814
5815 return sibcall_failure;
5816 }
5817
5818 /* Nonzero if we do not know how to pass TYPE solely in registers. */
5819
5820 bool
must_pass_in_stack_var_size(machine_mode mode ATTRIBUTE_UNUSED,const_tree type)5821 must_pass_in_stack_var_size (machine_mode mode ATTRIBUTE_UNUSED,
5822 const_tree type)
5823 {
5824 if (!type)
5825 return false;
5826
5827 /* If the type has variable size... */
5828 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5829 return true;
5830
5831 /* If the type is marked as addressable (it is required
5832 to be constructed into the stack)... */
5833 if (TREE_ADDRESSABLE (type))
5834 return true;
5835
5836 return false;
5837 }
5838
5839 /* Another version of the TARGET_MUST_PASS_IN_STACK hook. This one
5840 takes trailing padding of a structure into account. */
5841 /* ??? Should be able to merge these two by examining BLOCK_REG_PADDING. */
5842
5843 bool
must_pass_in_stack_var_size_or_pad(machine_mode mode,const_tree type)5844 must_pass_in_stack_var_size_or_pad (machine_mode mode, const_tree type)
5845 {
5846 if (!type)
5847 return false;
5848
5849 /* If the type has variable size... */
5850 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5851 return true;
5852
5853 /* If the type is marked as addressable (it is required
5854 to be constructed into the stack)... */
5855 if (TREE_ADDRESSABLE (type))
5856 return true;
5857
5858 if (TYPE_EMPTY_P (type))
5859 return false;
5860
5861 /* If the padding and mode of the type is such that a copy into
5862 a register would put it into the wrong part of the register. */
5863 if (mode == BLKmode
5864 && int_size_in_bytes (type) % (PARM_BOUNDARY / BITS_PER_UNIT)
5865 && (targetm.calls.function_arg_padding (mode, type)
5866 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
5867 return true;
5868
5869 return false;
5870 }
5871
5872 /* Tell the garbage collector about GTY markers in this source file. */
5873 #include "gt-calls.h"
5874