1 /* Subroutines for manipulating rtx's in semantically interesting ways.
2    Copyright (C) 1987-2016 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "target.h"
25 #include "function.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "tm_p.h"
29 #include "expmed.h"
30 #include "optabs.h"
31 #include "emit-rtl.h"
32 #include "recog.h"
33 #include "diagnostic-core.h"
34 #include "stor-layout.h"
35 #include "except.h"
36 #include "dojump.h"
37 #include "explow.h"
38 #include "expr.h"
39 #include "common/common-target.h"
40 #include "output.h"
41 
42 static rtx break_out_memory_refs (rtx);
43 
44 
45 /* Truncate and perhaps sign-extend C as appropriate for MODE.  */
46 
47 HOST_WIDE_INT
trunc_int_for_mode(HOST_WIDE_INT c,machine_mode mode)48 trunc_int_for_mode (HOST_WIDE_INT c, machine_mode mode)
49 {
50   int width = GET_MODE_PRECISION (mode);
51 
52   /* You want to truncate to a _what_?  */
53   gcc_assert (SCALAR_INT_MODE_P (mode)
54 	      || POINTER_BOUNDS_MODE_P (mode));
55 
56   /* Canonicalize BImode to 0 and STORE_FLAG_VALUE.  */
57   if (mode == BImode)
58     return c & 1 ? STORE_FLAG_VALUE : 0;
59 
60   /* Sign-extend for the requested mode.  */
61 
62   if (width < HOST_BITS_PER_WIDE_INT)
63     {
64       HOST_WIDE_INT sign = 1;
65       sign <<= width - 1;
66       c &= (sign << 1) - 1;
67       c ^= sign;
68       c -= sign;
69     }
70 
71   return c;
72 }
73 
74 /* Return an rtx for the sum of X and the integer C, given that X has
75    mode MODE.  INPLACE is true if X can be modified inplace or false
76    if it must be treated as immutable.  */
77 
78 rtx
plus_constant(machine_mode mode,rtx x,HOST_WIDE_INT c,bool inplace)79 plus_constant (machine_mode mode, rtx x, HOST_WIDE_INT c,
80 	       bool inplace)
81 {
82   RTX_CODE code;
83   rtx y;
84   rtx tem;
85   int all_constant = 0;
86 
87   gcc_assert (GET_MODE (x) == VOIDmode || GET_MODE (x) == mode);
88 
89   if (c == 0)
90     return x;
91 
92  restart:
93 
94   code = GET_CODE (x);
95   y = x;
96 
97   switch (code)
98     {
99     CASE_CONST_SCALAR_INT:
100       return immed_wide_int_const (wi::add (std::make_pair (x, mode), c),
101 				   mode);
102     case MEM:
103       /* If this is a reference to the constant pool, try replacing it with
104 	 a reference to a new constant.  If the resulting address isn't
105 	 valid, don't return it because we have no way to validize it.  */
106       if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
107 	  && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
108 	{
109 	  rtx cst = get_pool_constant (XEXP (x, 0));
110 
111 	  if (GET_CODE (cst) == CONST_VECTOR
112 	      && GET_MODE_INNER (GET_MODE (cst)) == mode)
113 	    {
114 	      cst = gen_lowpart (mode, cst);
115 	      gcc_assert (cst);
116 	    }
117 	  tem = plus_constant (mode, cst, c);
118 	  tem = force_const_mem (GET_MODE (x), tem);
119 	  /* Targets may disallow some constants in the constant pool, thus
120 	     force_const_mem may return NULL_RTX.  */
121 	  if (tem && memory_address_p (GET_MODE (tem), XEXP (tem, 0)))
122 	    return tem;
123 	}
124       break;
125 
126     case CONST:
127       /* If adding to something entirely constant, set a flag
128 	 so that we can add a CONST around the result.  */
129       if (inplace && shared_const_p (x))
130 	inplace = false;
131       x = XEXP (x, 0);
132       all_constant = 1;
133       goto restart;
134 
135     case SYMBOL_REF:
136     case LABEL_REF:
137       all_constant = 1;
138       break;
139 
140     case PLUS:
141       /* The interesting case is adding the integer to a sum.  Look
142 	 for constant term in the sum and combine with C.  For an
143 	 integer constant term or a constant term that is not an
144 	 explicit integer, we combine or group them together anyway.
145 
146 	 We may not immediately return from the recursive call here, lest
147 	 all_constant gets lost.  */
148 
149       if (CONSTANT_P (XEXP (x, 1)))
150 	{
151 	  rtx term = plus_constant (mode, XEXP (x, 1), c, inplace);
152 	  if (term == const0_rtx)
153 	    x = XEXP (x, 0);
154 	  else if (inplace)
155 	    XEXP (x, 1) = term;
156 	  else
157 	    x = gen_rtx_PLUS (mode, XEXP (x, 0), term);
158 	  c = 0;
159 	}
160       else if (rtx *const_loc = find_constant_term_loc (&y))
161 	{
162 	  if (!inplace)
163 	    {
164 	      /* We need to be careful since X may be shared and we can't
165 		 modify it in place.  */
166 	      x = copy_rtx (x);
167 	      const_loc = find_constant_term_loc (&x);
168 	    }
169 	  *const_loc = plus_constant (mode, *const_loc, c, true);
170 	  c = 0;
171 	}
172       break;
173 
174     default:
175       break;
176     }
177 
178   if (c != 0)
179     x = gen_rtx_PLUS (mode, x, gen_int_mode (c, mode));
180 
181   if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
182     return x;
183   else if (all_constant)
184     return gen_rtx_CONST (mode, x);
185   else
186     return x;
187 }
188 
189 /* If X is a sum, return a new sum like X but lacking any constant terms.
190    Add all the removed constant terms into *CONSTPTR.
191    X itself is not altered.  The result != X if and only if
192    it is not isomorphic to X.  */
193 
194 rtx
eliminate_constant_term(rtx x,rtx * constptr)195 eliminate_constant_term (rtx x, rtx *constptr)
196 {
197   rtx x0, x1;
198   rtx tem;
199 
200   if (GET_CODE (x) != PLUS)
201     return x;
202 
203   /* First handle constants appearing at this level explicitly.  */
204   if (CONST_INT_P (XEXP (x, 1))
205       && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x), *constptr,
206 						XEXP (x, 1)))
207       && CONST_INT_P (tem))
208     {
209       *constptr = tem;
210       return eliminate_constant_term (XEXP (x, 0), constptr);
211     }
212 
213   tem = const0_rtx;
214   x0 = eliminate_constant_term (XEXP (x, 0), &tem);
215   x1 = eliminate_constant_term (XEXP (x, 1), &tem);
216   if ((x1 != XEXP (x, 1) || x0 != XEXP (x, 0))
217       && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x),
218 						*constptr, tem))
219       && CONST_INT_P (tem))
220     {
221       *constptr = tem;
222       return gen_rtx_PLUS (GET_MODE (x), x0, x1);
223     }
224 
225   return x;
226 }
227 
228 
229 /* Return a copy of X in which all memory references
230    and all constants that involve symbol refs
231    have been replaced with new temporary registers.
232    Also emit code to load the memory locations and constants
233    into those registers.
234 
235    If X contains no such constants or memory references,
236    X itself (not a copy) is returned.
237 
238    If a constant is found in the address that is not a legitimate constant
239    in an insn, it is left alone in the hope that it might be valid in the
240    address.
241 
242    X may contain no arithmetic except addition, subtraction and multiplication.
243    Values returned by expand_expr with 1 for sum_ok fit this constraint.  */
244 
245 static rtx
break_out_memory_refs(rtx x)246 break_out_memory_refs (rtx x)
247 {
248   if (MEM_P (x)
249       || (CONSTANT_P (x) && CONSTANT_ADDRESS_P (x)
250 	  && GET_MODE (x) != VOIDmode))
251     x = force_reg (GET_MODE (x), x);
252   else if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
253 	   || GET_CODE (x) == MULT)
254     {
255       rtx op0 = break_out_memory_refs (XEXP (x, 0));
256       rtx op1 = break_out_memory_refs (XEXP (x, 1));
257 
258       if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
259 	x = simplify_gen_binary (GET_CODE (x), GET_MODE (x), op0, op1);
260     }
261 
262   return x;
263 }
264 
265 /* Given X, a memory address in address space AS' pointer mode, convert it to
266    an address in the address space's address mode, or vice versa (TO_MODE says
267    which way).  We take advantage of the fact that pointers are not allowed to
268    overflow by commuting arithmetic operations over conversions so that address
269    arithmetic insns can be used. IN_CONST is true if this conversion is inside
270    a CONST. NO_EMIT is true if no insns should be emitted, and instead
271    it should return NULL if it can't be simplified without emitting insns.  */
272 
273 rtx
convert_memory_address_addr_space_1(machine_mode to_mode ATTRIBUTE_UNUSED,rtx x,addr_space_t as ATTRIBUTE_UNUSED,bool in_const ATTRIBUTE_UNUSED,bool no_emit ATTRIBUTE_UNUSED)274 convert_memory_address_addr_space_1 (machine_mode to_mode ATTRIBUTE_UNUSED,
275 				     rtx x, addr_space_t as ATTRIBUTE_UNUSED,
276 				     bool in_const ATTRIBUTE_UNUSED,
277 				     bool no_emit ATTRIBUTE_UNUSED)
278 {
279 #ifndef POINTERS_EXTEND_UNSIGNED
280   gcc_assert (GET_MODE (x) == to_mode || GET_MODE (x) == VOIDmode);
281   return x;
282 #else /* defined(POINTERS_EXTEND_UNSIGNED) */
283   machine_mode pointer_mode, address_mode, from_mode;
284   rtx temp;
285   enum rtx_code code;
286 
287   /* If X already has the right mode, just return it.  */
288   if (GET_MODE (x) == to_mode)
289     return x;
290 
291   pointer_mode = targetm.addr_space.pointer_mode (as);
292   address_mode = targetm.addr_space.address_mode (as);
293   from_mode = to_mode == pointer_mode ? address_mode : pointer_mode;
294 
295   /* Here we handle some special cases.  If none of them apply, fall through
296      to the default case.  */
297   switch (GET_CODE (x))
298     {
299     CASE_CONST_SCALAR_INT:
300       if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode))
301 	code = TRUNCATE;
302       else if (POINTERS_EXTEND_UNSIGNED < 0)
303 	break;
304       else if (POINTERS_EXTEND_UNSIGNED > 0)
305 	code = ZERO_EXTEND;
306       else
307 	code = SIGN_EXTEND;
308       temp = simplify_unary_operation (code, to_mode, x, from_mode);
309       if (temp)
310 	return temp;
311       break;
312 
313     case SUBREG:
314       if ((SUBREG_PROMOTED_VAR_P (x) || REG_POINTER (SUBREG_REG (x)))
315 	  && GET_MODE (SUBREG_REG (x)) == to_mode)
316 	return SUBREG_REG (x);
317       break;
318 
319     case LABEL_REF:
320       temp = gen_rtx_LABEL_REF (to_mode, LABEL_REF_LABEL (x));
321       LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
322       return temp;
323 
324     case SYMBOL_REF:
325       temp = shallow_copy_rtx (x);
326       PUT_MODE (temp, to_mode);
327       return temp;
328 
329     case CONST:
330       temp = convert_memory_address_addr_space_1 (to_mode, XEXP (x, 0), as,
331 						  true, no_emit);
332       return temp ? gen_rtx_CONST (to_mode, temp) : temp;
333 
334     case PLUS:
335     case MULT:
336       /* For addition we can safely permute the conversion and addition
337 	 operation if one operand is a constant and converting the constant
338 	 does not change it or if one operand is a constant and we are
339 	 using a ptr_extend instruction  (POINTERS_EXTEND_UNSIGNED < 0).
340 	 We can always safely permute them if we are making the address
341 	 narrower. Inside a CONST RTL, this is safe for both pointers
342 	 zero or sign extended as pointers cannot wrap. */
343       if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode)
344 	  || (GET_CODE (x) == PLUS
345 	      && CONST_INT_P (XEXP (x, 1))
346 	      && ((in_const && POINTERS_EXTEND_UNSIGNED != 0)
347 		  || XEXP (x, 1) == convert_memory_address_addr_space_1
348 				     (to_mode, XEXP (x, 1), as, in_const,
349 				      no_emit)
350                   || POINTERS_EXTEND_UNSIGNED < 0)))
351 	{
352 	  temp = convert_memory_address_addr_space_1 (to_mode, XEXP (x, 0),
353 						      as, in_const, no_emit);
354 	  return (temp ? gen_rtx_fmt_ee (GET_CODE (x), to_mode,
355 					 temp, XEXP (x, 1))
356 		       : temp);
357 	}
358       break;
359 
360     default:
361       break;
362     }
363 
364   if (no_emit)
365     return NULL_RTX;
366 
367   return convert_modes (to_mode, from_mode,
368 			x, POINTERS_EXTEND_UNSIGNED);
369 #endif /* defined(POINTERS_EXTEND_UNSIGNED) */
370 }
371 
372 /* Given X, a memory address in address space AS' pointer mode, convert it to
373    an address in the address space's address mode, or vice versa (TO_MODE says
374    which way).  We take advantage of the fact that pointers are not allowed to
375    overflow by commuting arithmetic operations over conversions so that address
376    arithmetic insns can be used.  */
377 
378 rtx
convert_memory_address_addr_space(machine_mode to_mode,rtx x,addr_space_t as)379 convert_memory_address_addr_space (machine_mode to_mode, rtx x, addr_space_t as)
380 {
381   return convert_memory_address_addr_space_1 (to_mode, x, as, false, false);
382 }
383 
384 
385 /* Return something equivalent to X but valid as a memory address for something
386    of mode MODE in the named address space AS.  When X is not itself valid,
387    this works by copying X or subexpressions of it into registers.  */
388 
389 rtx
memory_address_addr_space(machine_mode mode,rtx x,addr_space_t as)390 memory_address_addr_space (machine_mode mode, rtx x, addr_space_t as)
391 {
392   rtx oldx = x;
393   machine_mode address_mode = targetm.addr_space.address_mode (as);
394 
395   x = convert_memory_address_addr_space (address_mode, x, as);
396 
397   /* By passing constant addresses through registers
398      we get a chance to cse them.  */
399   if (! cse_not_expected && CONSTANT_P (x) && CONSTANT_ADDRESS_P (x))
400     x = force_reg (address_mode, x);
401 
402   /* We get better cse by rejecting indirect addressing at this stage.
403      Let the combiner create indirect addresses where appropriate.
404      For now, generate the code so that the subexpressions useful to share
405      are visible.  But not if cse won't be done!  */
406   else
407     {
408       if (! cse_not_expected && !REG_P (x))
409 	x = break_out_memory_refs (x);
410 
411       /* At this point, any valid address is accepted.  */
412       if (memory_address_addr_space_p (mode, x, as))
413 	goto done;
414 
415       /* If it was valid before but breaking out memory refs invalidated it,
416 	 use it the old way.  */
417       if (memory_address_addr_space_p (mode, oldx, as))
418 	{
419 	  x = oldx;
420 	  goto done;
421 	}
422 
423       /* Perform machine-dependent transformations on X
424 	 in certain cases.  This is not necessary since the code
425 	 below can handle all possible cases, but machine-dependent
426 	 transformations can make better code.  */
427       {
428 	rtx orig_x = x;
429 	x = targetm.addr_space.legitimize_address (x, oldx, mode, as);
430 	if (orig_x != x && memory_address_addr_space_p (mode, x, as))
431 	  goto done;
432       }
433 
434       /* PLUS and MULT can appear in special ways
435 	 as the result of attempts to make an address usable for indexing.
436 	 Usually they are dealt with by calling force_operand, below.
437 	 But a sum containing constant terms is special
438 	 if removing them makes the sum a valid address:
439 	 then we generate that address in a register
440 	 and index off of it.  We do this because it often makes
441 	 shorter code, and because the addresses thus generated
442 	 in registers often become common subexpressions.  */
443       if (GET_CODE (x) == PLUS)
444 	{
445 	  rtx constant_term = const0_rtx;
446 	  rtx y = eliminate_constant_term (x, &constant_term);
447 	  if (constant_term == const0_rtx
448 	      || ! memory_address_addr_space_p (mode, y, as))
449 	    x = force_operand (x, NULL_RTX);
450 	  else
451 	    {
452 	      y = gen_rtx_PLUS (GET_MODE (x), copy_to_reg (y), constant_term);
453 	      if (! memory_address_addr_space_p (mode, y, as))
454 		x = force_operand (x, NULL_RTX);
455 	      else
456 		x = y;
457 	    }
458 	}
459 
460       else if (GET_CODE (x) == MULT || GET_CODE (x) == MINUS)
461 	x = force_operand (x, NULL_RTX);
462 
463       /* If we have a register that's an invalid address,
464 	 it must be a hard reg of the wrong class.  Copy it to a pseudo.  */
465       else if (REG_P (x))
466 	x = copy_to_reg (x);
467 
468       /* Last resort: copy the value to a register, since
469 	 the register is a valid address.  */
470       else
471 	x = force_reg (address_mode, x);
472     }
473 
474  done:
475 
476   gcc_assert (memory_address_addr_space_p (mode, x, as));
477   /* If we didn't change the address, we are done.  Otherwise, mark
478      a reg as a pointer if we have REG or REG + CONST_INT.  */
479   if (oldx == x)
480     return x;
481   else if (REG_P (x))
482     mark_reg_pointer (x, BITS_PER_UNIT);
483   else if (GET_CODE (x) == PLUS
484 	   && REG_P (XEXP (x, 0))
485 	   && CONST_INT_P (XEXP (x, 1)))
486     mark_reg_pointer (XEXP (x, 0), BITS_PER_UNIT);
487 
488   /* OLDX may have been the address on a temporary.  Update the address
489      to indicate that X is now used.  */
490   update_temp_slot_address (oldx, x);
491 
492   return x;
493 }
494 
495 /* Convert a mem ref into one with a valid memory address.
496    Pass through anything else unchanged.  */
497 
498 rtx
validize_mem(rtx ref)499 validize_mem (rtx ref)
500 {
501   if (!MEM_P (ref))
502     return ref;
503   ref = use_anchored_address (ref);
504   if (memory_address_addr_space_p (GET_MODE (ref), XEXP (ref, 0),
505 				   MEM_ADDR_SPACE (ref)))
506     return ref;
507 
508   /* Don't alter REF itself, since that is probably a stack slot.  */
509   return replace_equiv_address (ref, XEXP (ref, 0));
510 }
511 
512 /* If X is a memory reference to a member of an object block, try rewriting
513    it to use an anchor instead.  Return the new memory reference on success
514    and the old one on failure.  */
515 
516 rtx
use_anchored_address(rtx x)517 use_anchored_address (rtx x)
518 {
519   rtx base;
520   HOST_WIDE_INT offset;
521   machine_mode mode;
522 
523   if (!flag_section_anchors)
524     return x;
525 
526   if (!MEM_P (x))
527     return x;
528 
529   /* Split the address into a base and offset.  */
530   base = XEXP (x, 0);
531   offset = 0;
532   if (GET_CODE (base) == CONST
533       && GET_CODE (XEXP (base, 0)) == PLUS
534       && CONST_INT_P (XEXP (XEXP (base, 0), 1)))
535     {
536       offset += INTVAL (XEXP (XEXP (base, 0), 1));
537       base = XEXP (XEXP (base, 0), 0);
538     }
539 
540   /* Check whether BASE is suitable for anchors.  */
541   if (GET_CODE (base) != SYMBOL_REF
542       || !SYMBOL_REF_HAS_BLOCK_INFO_P (base)
543       || SYMBOL_REF_ANCHOR_P (base)
544       || SYMBOL_REF_BLOCK (base) == NULL
545       || !targetm.use_anchors_for_symbol_p (base))
546     return x;
547 
548   /* Decide where BASE is going to be.  */
549   place_block_symbol (base);
550 
551   /* Get the anchor we need to use.  */
552   offset += SYMBOL_REF_BLOCK_OFFSET (base);
553   base = get_section_anchor (SYMBOL_REF_BLOCK (base), offset,
554 			     SYMBOL_REF_TLS_MODEL (base));
555 
556   /* Work out the offset from the anchor.  */
557   offset -= SYMBOL_REF_BLOCK_OFFSET (base);
558 
559   /* If we're going to run a CSE pass, force the anchor into a register.
560      We will then be able to reuse registers for several accesses, if the
561      target costs say that that's worthwhile.  */
562   mode = GET_MODE (base);
563   if (!cse_not_expected)
564     base = force_reg (mode, base);
565 
566   return replace_equiv_address (x, plus_constant (mode, base, offset));
567 }
568 
569 /* Copy the value or contents of X to a new temp reg and return that reg.  */
570 
571 rtx
copy_to_reg(rtx x)572 copy_to_reg (rtx x)
573 {
574   rtx temp = gen_reg_rtx (GET_MODE (x));
575 
576   /* If not an operand, must be an address with PLUS and MULT so
577      do the computation.  */
578   if (! general_operand (x, VOIDmode))
579     x = force_operand (x, temp);
580 
581   if (x != temp)
582     emit_move_insn (temp, x);
583 
584   return temp;
585 }
586 
587 /* Like copy_to_reg but always give the new register mode Pmode
588    in case X is a constant.  */
589 
590 rtx
copy_addr_to_reg(rtx x)591 copy_addr_to_reg (rtx x)
592 {
593   return copy_to_mode_reg (Pmode, x);
594 }
595 
596 /* Like copy_to_reg but always give the new register mode MODE
597    in case X is a constant.  */
598 
599 rtx
copy_to_mode_reg(machine_mode mode,rtx x)600 copy_to_mode_reg (machine_mode mode, rtx x)
601 {
602   rtx temp = gen_reg_rtx (mode);
603 
604   /* If not an operand, must be an address with PLUS and MULT so
605      do the computation.  */
606   if (! general_operand (x, VOIDmode))
607     x = force_operand (x, temp);
608 
609   gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
610   if (x != temp)
611     emit_move_insn (temp, x);
612   return temp;
613 }
614 
615 /* Load X into a register if it is not already one.
616    Use mode MODE for the register.
617    X should be valid for mode MODE, but it may be a constant which
618    is valid for all integer modes; that's why caller must specify MODE.
619 
620    The caller must not alter the value in the register we return,
621    since we mark it as a "constant" register.  */
622 
623 rtx
force_reg(machine_mode mode,rtx x)624 force_reg (machine_mode mode, rtx x)
625 {
626   rtx temp, set;
627   rtx_insn *insn;
628 
629   if (REG_P (x))
630     return x;
631 
632   if (general_operand (x, mode))
633     {
634       temp = gen_reg_rtx (mode);
635       insn = emit_move_insn (temp, x);
636     }
637   else
638     {
639       temp = force_operand (x, NULL_RTX);
640       if (REG_P (temp))
641 	insn = get_last_insn ();
642       else
643 	{
644 	  rtx temp2 = gen_reg_rtx (mode);
645 	  insn = emit_move_insn (temp2, temp);
646 	  temp = temp2;
647 	}
648     }
649 
650   /* Let optimizers know that TEMP's value never changes
651      and that X can be substituted for it.  Don't get confused
652      if INSN set something else (such as a SUBREG of TEMP).  */
653   if (CONSTANT_P (x)
654       && (set = single_set (insn)) != 0
655       && SET_DEST (set) == temp
656       && ! rtx_equal_p (x, SET_SRC (set)))
657     set_unique_reg_note (insn, REG_EQUAL, x);
658 
659   /* Let optimizers know that TEMP is a pointer, and if so, the
660      known alignment of that pointer.  */
661   {
662     unsigned align = 0;
663     if (GET_CODE (x) == SYMBOL_REF)
664       {
665         align = BITS_PER_UNIT;
666 	if (SYMBOL_REF_DECL (x) && DECL_P (SYMBOL_REF_DECL (x)))
667 	  align = DECL_ALIGN (SYMBOL_REF_DECL (x));
668       }
669     else if (GET_CODE (x) == LABEL_REF)
670       align = BITS_PER_UNIT;
671     else if (GET_CODE (x) == CONST
672 	     && GET_CODE (XEXP (x, 0)) == PLUS
673 	     && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
674 	     && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
675       {
676 	rtx s = XEXP (XEXP (x, 0), 0);
677 	rtx c = XEXP (XEXP (x, 0), 1);
678 	unsigned sa, ca;
679 
680 	sa = BITS_PER_UNIT;
681 	if (SYMBOL_REF_DECL (s) && DECL_P (SYMBOL_REF_DECL (s)))
682 	  sa = DECL_ALIGN (SYMBOL_REF_DECL (s));
683 
684 	if (INTVAL (c) == 0)
685 	  align = sa;
686 	else
687 	  {
688 	    ca = ctz_hwi (INTVAL (c)) * BITS_PER_UNIT;
689 	    align = MIN (sa, ca);
690 	  }
691       }
692 
693     if (align || (MEM_P (x) && MEM_POINTER (x)))
694       mark_reg_pointer (temp, align);
695   }
696 
697   return temp;
698 }
699 
700 /* If X is a memory ref, copy its contents to a new temp reg and return
701    that reg.  Otherwise, return X.  */
702 
703 rtx
force_not_mem(rtx x)704 force_not_mem (rtx x)
705 {
706   rtx temp;
707 
708   if (!MEM_P (x) || GET_MODE (x) == BLKmode)
709     return x;
710 
711   temp = gen_reg_rtx (GET_MODE (x));
712 
713   if (MEM_POINTER (x))
714     REG_POINTER (temp) = 1;
715 
716   emit_move_insn (temp, x);
717   return temp;
718 }
719 
720 /* Copy X to TARGET (if it's nonzero and a reg)
721    or to a new temp reg and return that reg.
722    MODE is the mode to use for X in case it is a constant.  */
723 
724 rtx
copy_to_suggested_reg(rtx x,rtx target,machine_mode mode)725 copy_to_suggested_reg (rtx x, rtx target, machine_mode mode)
726 {
727   rtx temp;
728 
729   if (target && REG_P (target))
730     temp = target;
731   else
732     temp = gen_reg_rtx (mode);
733 
734   emit_move_insn (temp, x);
735   return temp;
736 }
737 
738 /* Return the mode to use to pass or return a scalar of TYPE and MODE.
739    PUNSIGNEDP points to the signedness of the type and may be adjusted
740    to show what signedness to use on extension operations.
741 
742    FOR_RETURN is nonzero if the caller is promoting the return value
743    of FNDECL, else it is for promoting args.  */
744 
745 machine_mode
promote_function_mode(const_tree type,machine_mode mode,int * punsignedp,const_tree funtype,int for_return)746 promote_function_mode (const_tree type, machine_mode mode, int *punsignedp,
747 		       const_tree funtype, int for_return)
748 {
749   /* Called without a type node for a libcall.  */
750   if (type == NULL_TREE)
751     {
752       if (INTEGRAL_MODE_P (mode))
753 	return targetm.calls.promote_function_mode (NULL_TREE, mode,
754 						    punsignedp, funtype,
755 						    for_return);
756       else
757 	return mode;
758     }
759 
760   switch (TREE_CODE (type))
761     {
762     case INTEGER_TYPE:   case ENUMERAL_TYPE:   case BOOLEAN_TYPE:
763     case REAL_TYPE:      case OFFSET_TYPE:     case FIXED_POINT_TYPE:
764     case POINTER_TYPE:   case REFERENCE_TYPE:
765       return targetm.calls.promote_function_mode (type, mode, punsignedp, funtype,
766 						  for_return);
767 
768     default:
769       return mode;
770     }
771 }
772 /* Return the mode to use to store a scalar of TYPE and MODE.
773    PUNSIGNEDP points to the signedness of the type and may be adjusted
774    to show what signedness to use on extension operations.  */
775 
776 machine_mode
promote_mode(const_tree type ATTRIBUTE_UNUSED,machine_mode mode,int * punsignedp ATTRIBUTE_UNUSED)777 promote_mode (const_tree type ATTRIBUTE_UNUSED, machine_mode mode,
778 	      int *punsignedp ATTRIBUTE_UNUSED)
779 {
780 #ifdef PROMOTE_MODE
781   enum tree_code code;
782   int unsignedp;
783 #endif
784 
785   /* For libcalls this is invoked without TYPE from the backends
786      TARGET_PROMOTE_FUNCTION_MODE hooks.  Don't do anything in that
787      case.  */
788   if (type == NULL_TREE)
789     return mode;
790 
791   /* FIXME: this is the same logic that was there until GCC 4.4, but we
792      probably want to test POINTERS_EXTEND_UNSIGNED even if PROMOTE_MODE
793      is not defined.  The affected targets are M32C, S390, SPARC.  */
794 #ifdef PROMOTE_MODE
795   code = TREE_CODE (type);
796   unsignedp = *punsignedp;
797 
798   switch (code)
799     {
800     case INTEGER_TYPE:   case ENUMERAL_TYPE:   case BOOLEAN_TYPE:
801     case REAL_TYPE:      case OFFSET_TYPE:     case FIXED_POINT_TYPE:
802       PROMOTE_MODE (mode, unsignedp, type);
803       *punsignedp = unsignedp;
804       return mode;
805       break;
806 
807 #ifdef POINTERS_EXTEND_UNSIGNED
808     case REFERENCE_TYPE:
809     case POINTER_TYPE:
810       *punsignedp = POINTERS_EXTEND_UNSIGNED;
811       return targetm.addr_space.address_mode
812 	       (TYPE_ADDR_SPACE (TREE_TYPE (type)));
813       break;
814 #endif
815 
816     default:
817       return mode;
818     }
819 #else
820   return mode;
821 #endif
822 }
823 
824 
825 /* Use one of promote_mode or promote_function_mode to find the promoted
826    mode of DECL.  If PUNSIGNEDP is not NULL, store there the unsignedness
827    of DECL after promotion.  */
828 
829 machine_mode
promote_decl_mode(const_tree decl,int * punsignedp)830 promote_decl_mode (const_tree decl, int *punsignedp)
831 {
832   tree type = TREE_TYPE (decl);
833   int unsignedp = TYPE_UNSIGNED (type);
834   machine_mode mode = DECL_MODE (decl);
835   machine_mode pmode;
836 
837   if (TREE_CODE (decl) == RESULT_DECL && !DECL_BY_REFERENCE (decl))
838     pmode = promote_function_mode (type, mode, &unsignedp,
839                                    TREE_TYPE (current_function_decl), 1);
840   else if (TREE_CODE (decl) == RESULT_DECL || TREE_CODE (decl) == PARM_DECL)
841     pmode = promote_function_mode (type, mode, &unsignedp,
842                                    TREE_TYPE (current_function_decl), 2);
843   else
844     pmode = promote_mode (type, mode, &unsignedp);
845 
846   if (punsignedp)
847     *punsignedp = unsignedp;
848   return pmode;
849 }
850 
851 /* Return the promoted mode for name.  If it is a named SSA_NAME, it
852    is the same as promote_decl_mode.  Otherwise, it is the promoted
853    mode of a temp decl of same type as the SSA_NAME, if we had created
854    one.  */
855 
856 machine_mode
promote_ssa_mode(const_tree name,int * punsignedp)857 promote_ssa_mode (const_tree name, int *punsignedp)
858 {
859   gcc_assert (TREE_CODE (name) == SSA_NAME);
860 
861   /* Partitions holding parms and results must be promoted as expected
862      by function.c.  */
863   if (SSA_NAME_VAR (name)
864       && (TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
865 	  || TREE_CODE (SSA_NAME_VAR (name)) == RESULT_DECL))
866     {
867       machine_mode mode = promote_decl_mode (SSA_NAME_VAR (name), punsignedp);
868       if (mode != BLKmode)
869 	return mode;
870     }
871 
872   tree type = TREE_TYPE (name);
873   int unsignedp = TYPE_UNSIGNED (type);
874   machine_mode mode = TYPE_MODE (type);
875 
876   /* Bypass TYPE_MODE when it maps vector modes to BLKmode.  */
877   if (mode == BLKmode)
878     {
879       gcc_assert (VECTOR_TYPE_P (type));
880       mode = type->type_common.mode;
881     }
882 
883   machine_mode pmode = promote_mode (type, mode, &unsignedp);
884   if (punsignedp)
885     *punsignedp = unsignedp;
886 
887   return pmode;
888 }
889 
890 
891 
892 /* Controls the behavior of {anti_,}adjust_stack.  */
893 static bool suppress_reg_args_size;
894 
895 /* A helper for adjust_stack and anti_adjust_stack.  */
896 
897 static void
adjust_stack_1(rtx adjust,bool anti_p)898 adjust_stack_1 (rtx adjust, bool anti_p)
899 {
900   rtx temp;
901   rtx_insn *insn;
902 
903   /* Hereafter anti_p means subtract_p.  */
904   if (!STACK_GROWS_DOWNWARD)
905     anti_p = !anti_p;
906 
907   temp = expand_binop (Pmode,
908 		       anti_p ? sub_optab : add_optab,
909 		       stack_pointer_rtx, adjust, stack_pointer_rtx, 0,
910 		       OPTAB_LIB_WIDEN);
911 
912   if (temp != stack_pointer_rtx)
913     insn = emit_move_insn (stack_pointer_rtx, temp);
914   else
915     {
916       insn = get_last_insn ();
917       temp = single_set (insn);
918       gcc_assert (temp != NULL && SET_DEST (temp) == stack_pointer_rtx);
919     }
920 
921   if (!suppress_reg_args_size)
922     add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
923 }
924 
925 /* Adjust the stack pointer by ADJUST (an rtx for a number of bytes).
926    This pops when ADJUST is positive.  ADJUST need not be constant.  */
927 
928 void
adjust_stack(rtx adjust)929 adjust_stack (rtx adjust)
930 {
931   if (adjust == const0_rtx)
932     return;
933 
934   /* We expect all variable sized adjustments to be multiple of
935      PREFERRED_STACK_BOUNDARY.  */
936   if (CONST_INT_P (adjust))
937     stack_pointer_delta -= INTVAL (adjust);
938 
939   adjust_stack_1 (adjust, false);
940 }
941 
942 /* Adjust the stack pointer by minus ADJUST (an rtx for a number of bytes).
943    This pushes when ADJUST is positive.  ADJUST need not be constant.  */
944 
945 void
anti_adjust_stack(rtx adjust)946 anti_adjust_stack (rtx adjust)
947 {
948   if (adjust == const0_rtx)
949     return;
950 
951   /* We expect all variable sized adjustments to be multiple of
952      PREFERRED_STACK_BOUNDARY.  */
953   if (CONST_INT_P (adjust))
954     stack_pointer_delta += INTVAL (adjust);
955 
956   adjust_stack_1 (adjust, true);
957 }
958 
959 /* Round the size of a block to be pushed up to the boundary required
960    by this machine.  SIZE is the desired size, which need not be constant.  */
961 
962 static rtx
round_push(rtx size)963 round_push (rtx size)
964 {
965   rtx align_rtx, alignm1_rtx;
966 
967   if (!SUPPORTS_STACK_ALIGNMENT
968       || crtl->preferred_stack_boundary == MAX_SUPPORTED_STACK_ALIGNMENT)
969     {
970       int align = crtl->preferred_stack_boundary / BITS_PER_UNIT;
971 
972       if (align == 1)
973 	return size;
974 
975       if (CONST_INT_P (size))
976 	{
977 	  HOST_WIDE_INT new_size = (INTVAL (size) + align - 1) / align * align;
978 
979 	  if (INTVAL (size) != new_size)
980 	    size = GEN_INT (new_size);
981 	  return size;
982 	}
983 
984       align_rtx = GEN_INT (align);
985       alignm1_rtx = GEN_INT (align - 1);
986     }
987   else
988     {
989       /* If crtl->preferred_stack_boundary might still grow, use
990 	 virtual_preferred_stack_boundary_rtx instead.  This will be
991 	 substituted by the right value in vregs pass and optimized
992 	 during combine.  */
993       align_rtx = virtual_preferred_stack_boundary_rtx;
994       alignm1_rtx = force_operand (plus_constant (Pmode, align_rtx, -1),
995 				   NULL_RTX);
996     }
997 
998   /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
999      but we know it can't.  So add ourselves and then do
1000      TRUNC_DIV_EXPR.  */
1001   size = expand_binop (Pmode, add_optab, size, alignm1_rtx,
1002 		       NULL_RTX, 1, OPTAB_LIB_WIDEN);
1003   size = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, size, align_rtx,
1004 			NULL_RTX, 1);
1005   size = expand_mult (Pmode, size, align_rtx, NULL_RTX, 1);
1006 
1007   return size;
1008 }
1009 
1010 /* Save the stack pointer for the purpose in SAVE_LEVEL.  PSAVE is a pointer
1011    to a previously-created save area.  If no save area has been allocated,
1012    this function will allocate one.  If a save area is specified, it
1013    must be of the proper mode.  */
1014 
1015 void
emit_stack_save(enum save_level save_level,rtx * psave)1016 emit_stack_save (enum save_level save_level, rtx *psave)
1017 {
1018   rtx sa = *psave;
1019   /* The default is that we use a move insn and save in a Pmode object.  */
1020   rtx_insn *(*fcn) (rtx, rtx) = gen_move_insn;
1021   machine_mode mode = STACK_SAVEAREA_MODE (save_level);
1022 
1023   /* See if this machine has anything special to do for this kind of save.  */
1024   switch (save_level)
1025     {
1026     case SAVE_BLOCK:
1027       if (targetm.have_save_stack_block ())
1028 	fcn = targetm.gen_save_stack_block;
1029       break;
1030     case SAVE_FUNCTION:
1031       if (targetm.have_save_stack_function ())
1032 	fcn = targetm.gen_save_stack_function;
1033       break;
1034     case SAVE_NONLOCAL:
1035       if (targetm.have_save_stack_nonlocal ())
1036 	fcn = targetm.gen_save_stack_nonlocal;
1037       break;
1038     default:
1039       break;
1040     }
1041 
1042   /* If there is no save area and we have to allocate one, do so.  Otherwise
1043      verify the save area is the proper mode.  */
1044 
1045   if (sa == 0)
1046     {
1047       if (mode != VOIDmode)
1048 	{
1049 	  if (save_level == SAVE_NONLOCAL)
1050 	    *psave = sa = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
1051 	  else
1052 	    *psave = sa = gen_reg_rtx (mode);
1053 	}
1054     }
1055 
1056   do_pending_stack_adjust ();
1057   if (sa != 0)
1058     sa = validize_mem (sa);
1059   emit_insn (fcn (sa, stack_pointer_rtx));
1060 }
1061 
1062 /* Restore the stack pointer for the purpose in SAVE_LEVEL.  SA is the save
1063    area made by emit_stack_save.  If it is zero, we have nothing to do.  */
1064 
1065 void
emit_stack_restore(enum save_level save_level,rtx sa)1066 emit_stack_restore (enum save_level save_level, rtx sa)
1067 {
1068   /* The default is that we use a move insn.  */
1069   rtx_insn *(*fcn) (rtx, rtx) = gen_move_insn;
1070 
1071   /* If stack_realign_drap, the x86 backend emits a prologue that aligns both
1072      STACK_POINTER and HARD_FRAME_POINTER.
1073      If stack_realign_fp, the x86 backend emits a prologue that aligns only
1074      STACK_POINTER. This renders the HARD_FRAME_POINTER unusable for accessing
1075      aligned variables, which is reflected in ix86_can_eliminate.
1076      We normally still have the realigned STACK_POINTER that we can use.
1077      But if there is a stack restore still present at reload, it can trigger
1078      mark_not_eliminable for the STACK_POINTER, leaving no way to eliminate
1079      FRAME_POINTER into a hard reg.
1080      To prevent this situation, we force need_drap if we emit a stack
1081      restore.  */
1082   if (SUPPORTS_STACK_ALIGNMENT)
1083     crtl->need_drap = true;
1084 
1085   /* See if this machine has anything special to do for this kind of save.  */
1086   switch (save_level)
1087     {
1088     case SAVE_BLOCK:
1089       if (targetm.have_restore_stack_block ())
1090 	fcn = targetm.gen_restore_stack_block;
1091       break;
1092     case SAVE_FUNCTION:
1093       if (targetm.have_restore_stack_function ())
1094 	fcn = targetm.gen_restore_stack_function;
1095       break;
1096     case SAVE_NONLOCAL:
1097       if (targetm.have_restore_stack_nonlocal ())
1098 	fcn = targetm.gen_restore_stack_nonlocal;
1099       break;
1100     default:
1101       break;
1102     }
1103 
1104   if (sa != 0)
1105     {
1106       sa = validize_mem (sa);
1107       /* These clobbers prevent the scheduler from moving
1108 	 references to variable arrays below the code
1109 	 that deletes (pops) the arrays.  */
1110       emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1111       emit_clobber (gen_rtx_MEM (BLKmode, stack_pointer_rtx));
1112     }
1113 
1114   discard_pending_stack_adjust ();
1115 
1116   emit_insn (fcn (stack_pointer_rtx, sa));
1117 }
1118 
1119 /* Invoke emit_stack_save on the nonlocal_goto_save_area for the current
1120    function.  This should be called whenever we allocate or deallocate
1121    dynamic stack space.  */
1122 
1123 void
update_nonlocal_goto_save_area(void)1124 update_nonlocal_goto_save_area (void)
1125 {
1126   tree t_save;
1127   rtx r_save;
1128 
1129   /* The nonlocal_goto_save_area object is an array of N pointers.  The
1130      first one is used for the frame pointer save; the rest are sized by
1131      STACK_SAVEAREA_MODE.  Create a reference to array index 1, the first
1132      of the stack save area slots.  */
1133   t_save = build4 (ARRAY_REF,
1134 		   TREE_TYPE (TREE_TYPE (cfun->nonlocal_goto_save_area)),
1135 		   cfun->nonlocal_goto_save_area,
1136 		   integer_one_node, NULL_TREE, NULL_TREE);
1137   r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
1138 
1139   emit_stack_save (SAVE_NONLOCAL, &r_save);
1140 }
1141 
1142 /* Record a new stack level for the current function.  This should be called
1143    whenever we allocate or deallocate dynamic stack space.  */
1144 
1145 void
record_new_stack_level(void)1146 record_new_stack_level (void)
1147 {
1148   /* Record the new stack level for nonlocal gotos.  */
1149   if (cfun->nonlocal_goto_save_area)
1150     update_nonlocal_goto_save_area ();
1151 
1152   /* Record the new stack level for SJLJ exceptions.  */
1153   if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
1154     update_sjlj_context ();
1155 }
1156 
1157 /* Return an rtx representing the address of an area of memory dynamically
1158    pushed on the stack.
1159 
1160    Any required stack pointer alignment is preserved.
1161 
1162    SIZE is an rtx representing the size of the area.
1163 
1164    SIZE_ALIGN is the alignment (in bits) that we know SIZE has.  This
1165    parameter may be zero.  If so, a proper value will be extracted
1166    from SIZE if it is constant, otherwise BITS_PER_UNIT will be assumed.
1167 
1168    REQUIRED_ALIGN is the alignment (in bits) required for the region
1169    of memory.
1170 
1171    If CANNOT_ACCUMULATE is set to TRUE, the caller guarantees that the
1172    stack space allocated by the generated code cannot be added with itself
1173    in the course of the execution of the function.  It is always safe to
1174    pass FALSE here and the following criterion is sufficient in order to
1175    pass TRUE: every path in the CFG that starts at the allocation point and
1176    loops to it executes the associated deallocation code.  */
1177 
1178 rtx
allocate_dynamic_stack_space(rtx size,unsigned size_align,unsigned required_align,bool cannot_accumulate)1179 allocate_dynamic_stack_space (rtx size, unsigned size_align,
1180 			      unsigned required_align, bool cannot_accumulate)
1181 {
1182   HOST_WIDE_INT stack_usage_size = -1;
1183   rtx_code_label *final_label;
1184   rtx final_target, target;
1185   unsigned extra_align = 0;
1186   bool must_align;
1187 
1188   /* If we're asking for zero bytes, it doesn't matter what we point
1189      to since we can't dereference it.  But return a reasonable
1190      address anyway.  */
1191   if (size == const0_rtx)
1192     return virtual_stack_dynamic_rtx;
1193 
1194   /* Otherwise, show we're calling alloca or equivalent.  */
1195   cfun->calls_alloca = 1;
1196 
1197   /* If stack usage info is requested, look into the size we are passed.
1198      We need to do so this early to avoid the obfuscation that may be
1199      introduced later by the various alignment operations.  */
1200   if (flag_stack_usage_info)
1201     {
1202       if (CONST_INT_P (size))
1203 	stack_usage_size = INTVAL (size);
1204       else if (REG_P (size))
1205         {
1206 	  /* Look into the last emitted insn and see if we can deduce
1207 	     something for the register.  */
1208 	  rtx_insn *insn;
1209 	  rtx set, note;
1210 	  insn = get_last_insn ();
1211 	  if ((set = single_set (insn)) && rtx_equal_p (SET_DEST (set), size))
1212 	    {
1213 	      if (CONST_INT_P (SET_SRC (set)))
1214 		stack_usage_size = INTVAL (SET_SRC (set));
1215 	      else if ((note = find_reg_equal_equiv_note (insn))
1216 		       && CONST_INT_P (XEXP (note, 0)))
1217 		stack_usage_size = INTVAL (XEXP (note, 0));
1218 	    }
1219 	}
1220 
1221       /* If the size is not constant, we can't say anything.  */
1222       if (stack_usage_size == -1)
1223 	{
1224 	  current_function_has_unbounded_dynamic_stack_size = 1;
1225 	  stack_usage_size = 0;
1226 	}
1227     }
1228 
1229   /* Ensure the size is in the proper mode.  */
1230   if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1231     size = convert_to_mode (Pmode, size, 1);
1232 
1233   /* Adjust SIZE_ALIGN, if needed.  */
1234   if (CONST_INT_P (size))
1235     {
1236       unsigned HOST_WIDE_INT lsb;
1237 
1238       lsb = INTVAL (size);
1239       lsb &= -lsb;
1240 
1241       /* Watch out for overflow truncating to "unsigned".  */
1242       if (lsb > UINT_MAX / BITS_PER_UNIT)
1243 	size_align = 1u << (HOST_BITS_PER_INT - 1);
1244       else
1245 	size_align = (unsigned)lsb * BITS_PER_UNIT;
1246     }
1247   else if (size_align < BITS_PER_UNIT)
1248     size_align = BITS_PER_UNIT;
1249 
1250   /* We can't attempt to minimize alignment necessary, because we don't
1251      know the final value of preferred_stack_boundary yet while executing
1252      this code.  */
1253   if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
1254     crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
1255 
1256   /* We will need to ensure that the address we return is aligned to
1257      REQUIRED_ALIGN.  If STACK_DYNAMIC_OFFSET is defined, we don't
1258      always know its final value at this point in the compilation (it
1259      might depend on the size of the outgoing parameter lists, for
1260      example), so we must align the value to be returned in that case.
1261      (Note that STACK_DYNAMIC_OFFSET will have a default nonzero value if
1262      STACK_POINTER_OFFSET or ACCUMULATE_OUTGOING_ARGS are defined).
1263      We must also do an alignment operation on the returned value if
1264      the stack pointer alignment is less strict than REQUIRED_ALIGN.
1265 
1266      If we have to align, we must leave space in SIZE for the hole
1267      that might result from the alignment operation.  */
1268 
1269   must_align = (crtl->preferred_stack_boundary < required_align);
1270   if (must_align)
1271     {
1272       if (required_align > PREFERRED_STACK_BOUNDARY)
1273 	extra_align = PREFERRED_STACK_BOUNDARY;
1274       else if (required_align > STACK_BOUNDARY)
1275 	extra_align = STACK_BOUNDARY;
1276       else
1277 	extra_align = BITS_PER_UNIT;
1278     }
1279 
1280   /* ??? STACK_POINTER_OFFSET is always defined now.  */
1281 #if defined (STACK_DYNAMIC_OFFSET) || defined (STACK_POINTER_OFFSET)
1282   must_align = true;
1283   extra_align = BITS_PER_UNIT;
1284 #endif
1285 
1286   if (must_align)
1287     {
1288       unsigned extra = (required_align - extra_align) / BITS_PER_UNIT;
1289 
1290       size = plus_constant (Pmode, size, extra);
1291       size = force_operand (size, NULL_RTX);
1292 
1293       if (flag_stack_usage_info)
1294 	stack_usage_size += extra;
1295 
1296       if (extra && size_align > extra_align)
1297 	size_align = extra_align;
1298     }
1299 
1300   /* Round the size to a multiple of the required stack alignment.
1301      Since the stack if presumed to be rounded before this allocation,
1302      this will maintain the required alignment.
1303 
1304      If the stack grows downward, we could save an insn by subtracting
1305      SIZE from the stack pointer and then aligning the stack pointer.
1306      The problem with this is that the stack pointer may be unaligned
1307      between the execution of the subtraction and alignment insns and
1308      some machines do not allow this.  Even on those that do, some
1309      signal handlers malfunction if a signal should occur between those
1310      insns.  Since this is an extremely rare event, we have no reliable
1311      way of knowing which systems have this problem.  So we avoid even
1312      momentarily mis-aligning the stack.  */
1313   if (size_align % MAX_SUPPORTED_STACK_ALIGNMENT != 0)
1314     {
1315       size = round_push (size);
1316 
1317       if (flag_stack_usage_info)
1318 	{
1319 	  int align = crtl->preferred_stack_boundary / BITS_PER_UNIT;
1320 	  stack_usage_size = (stack_usage_size + align - 1) / align * align;
1321 	}
1322     }
1323 
1324   target = gen_reg_rtx (Pmode);
1325 
1326   /* The size is supposed to be fully adjusted at this point so record it
1327      if stack usage info is requested.  */
1328   if (flag_stack_usage_info)
1329     {
1330       current_function_dynamic_stack_size += stack_usage_size;
1331 
1332       /* ??? This is gross but the only safe stance in the absence
1333 	 of stack usage oriented flow analysis.  */
1334       if (!cannot_accumulate)
1335 	current_function_has_unbounded_dynamic_stack_size = 1;
1336     }
1337 
1338   final_label = NULL;
1339   final_target = NULL_RTX;
1340 
1341   /* If we are splitting the stack, we need to ask the backend whether
1342      there is enough room on the current stack.  If there isn't, or if
1343      the backend doesn't know how to tell is, then we need to call a
1344      function to allocate memory in some other way.  This memory will
1345      be released when we release the current stack segment.  The
1346      effect is that stack allocation becomes less efficient, but at
1347      least it doesn't cause a stack overflow.  */
1348   if (flag_split_stack)
1349     {
1350       rtx_code_label *available_label;
1351       rtx ask, space, func;
1352 
1353       available_label = NULL;
1354 
1355       if (targetm.have_split_stack_space_check ())
1356 	{
1357 	  available_label = gen_label_rtx ();
1358 
1359 	  /* This instruction will branch to AVAILABLE_LABEL if there
1360 	     are SIZE bytes available on the stack.  */
1361 	  emit_insn (targetm.gen_split_stack_space_check
1362 		     (size, available_label));
1363 	}
1364 
1365       /* The __morestack_allocate_stack_space function will allocate
1366 	 memory using malloc.  If the alignment of the memory returned
1367 	 by malloc does not meet REQUIRED_ALIGN, we increase SIZE to
1368 	 make sure we allocate enough space.  */
1369       if (MALLOC_ABI_ALIGNMENT >= required_align)
1370 	ask = size;
1371       else
1372 	{
1373 	  ask = expand_binop (Pmode, add_optab, size,
1374 			      gen_int_mode (required_align / BITS_PER_UNIT - 1,
1375 					    Pmode),
1376 			      NULL_RTX, 1, OPTAB_LIB_WIDEN);
1377 	  must_align = true;
1378 	}
1379 
1380       func = init_one_libfunc ("__morestack_allocate_stack_space");
1381 
1382       space = emit_library_call_value (func, target, LCT_NORMAL, Pmode,
1383 				       1, ask, Pmode);
1384 
1385       if (available_label == NULL_RTX)
1386 	return space;
1387 
1388       final_target = gen_reg_rtx (Pmode);
1389 
1390       emit_move_insn (final_target, space);
1391 
1392       final_label = gen_label_rtx ();
1393       emit_jump (final_label);
1394 
1395       emit_label (available_label);
1396     }
1397 
1398   do_pending_stack_adjust ();
1399 
1400  /* We ought to be called always on the toplevel and stack ought to be aligned
1401     properly.  */
1402   gcc_assert (!(stack_pointer_delta
1403 		% (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)));
1404 
1405   /* If needed, check that we have the required amount of stack.  Take into
1406      account what has already been checked.  */
1407   if (STACK_CHECK_MOVING_SP)
1408     ;
1409   else if (flag_stack_check == GENERIC_STACK_CHECK)
1410     probe_stack_range (STACK_OLD_CHECK_PROTECT + STACK_CHECK_MAX_FRAME_SIZE,
1411 		       size);
1412   else if (flag_stack_check == STATIC_BUILTIN_STACK_CHECK)
1413     probe_stack_range (STACK_CHECK_PROTECT, size);
1414 
1415   /* Don't let anti_adjust_stack emit notes.  */
1416   suppress_reg_args_size = true;
1417 
1418   /* Perform the required allocation from the stack.  Some systems do
1419      this differently than simply incrementing/decrementing from the
1420      stack pointer, such as acquiring the space by calling malloc().  */
1421   if (targetm.have_allocate_stack ())
1422     {
1423       struct expand_operand ops[2];
1424       /* We don't have to check against the predicate for operand 0 since
1425 	 TARGET is known to be a pseudo of the proper mode, which must
1426 	 be valid for the operand.  */
1427       create_fixed_operand (&ops[0], target);
1428       create_convert_operand_to (&ops[1], size, STACK_SIZE_MODE, true);
1429       expand_insn (targetm.code_for_allocate_stack, 2, ops);
1430     }
1431   else
1432     {
1433       int saved_stack_pointer_delta;
1434 
1435       if (!STACK_GROWS_DOWNWARD)
1436 	emit_move_insn (target, virtual_stack_dynamic_rtx);
1437 
1438       /* Check stack bounds if necessary.  */
1439       if (crtl->limit_stack)
1440 	{
1441 	  rtx available;
1442 	  rtx_code_label *space_available = gen_label_rtx ();
1443 	  if (STACK_GROWS_DOWNWARD)
1444 	    available = expand_binop (Pmode, sub_optab,
1445 				      stack_pointer_rtx, stack_limit_rtx,
1446 				      NULL_RTX, 1, OPTAB_WIDEN);
1447 	  else
1448 	    available = expand_binop (Pmode, sub_optab,
1449 				      stack_limit_rtx, stack_pointer_rtx,
1450 				      NULL_RTX, 1, OPTAB_WIDEN);
1451 
1452 	  emit_cmp_and_jump_insns (available, size, GEU, NULL_RTX, Pmode, 1,
1453 				   space_available);
1454 	  if (targetm.have_trap ())
1455 	    emit_insn (targetm.gen_trap ());
1456 	  else
1457 	    error ("stack limits not supported on this target");
1458 	  emit_barrier ();
1459 	  emit_label (space_available);
1460 	}
1461 
1462       saved_stack_pointer_delta = stack_pointer_delta;
1463 
1464       if (flag_stack_check && STACK_CHECK_MOVING_SP)
1465 	anti_adjust_stack_and_probe (size, false);
1466       else
1467 	anti_adjust_stack (size);
1468 
1469       /* Even if size is constant, don't modify stack_pointer_delta.
1470 	 The constant size alloca should preserve
1471 	 crtl->preferred_stack_boundary alignment.  */
1472       stack_pointer_delta = saved_stack_pointer_delta;
1473 
1474       if (STACK_GROWS_DOWNWARD)
1475 	emit_move_insn (target, virtual_stack_dynamic_rtx);
1476     }
1477 
1478   suppress_reg_args_size = false;
1479 
1480   /* Finish up the split stack handling.  */
1481   if (final_label != NULL_RTX)
1482     {
1483       gcc_assert (flag_split_stack);
1484       emit_move_insn (final_target, target);
1485       emit_label (final_label);
1486       target = final_target;
1487     }
1488 
1489   if (must_align)
1490     {
1491       /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
1492 	 but we know it can't.  So add ourselves and then do
1493 	 TRUNC_DIV_EXPR.  */
1494       target = expand_binop (Pmode, add_optab, target,
1495 			     gen_int_mode (required_align / BITS_PER_UNIT - 1,
1496 					   Pmode),
1497 			     NULL_RTX, 1, OPTAB_LIB_WIDEN);
1498       target = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, target,
1499 			      gen_int_mode (required_align / BITS_PER_UNIT,
1500 					    Pmode),
1501 			      NULL_RTX, 1);
1502       target = expand_mult (Pmode, target,
1503 			    gen_int_mode (required_align / BITS_PER_UNIT,
1504 					  Pmode),
1505 			    NULL_RTX, 1);
1506     }
1507 
1508   /* Now that we've committed to a return value, mark its alignment.  */
1509   mark_reg_pointer (target, required_align);
1510 
1511   /* Record the new stack level.  */
1512   record_new_stack_level ();
1513 
1514   return target;
1515 }
1516 
1517 /* A front end may want to override GCC's stack checking by providing a
1518    run-time routine to call to check the stack, so provide a mechanism for
1519    calling that routine.  */
1520 
1521 static GTY(()) rtx stack_check_libfunc;
1522 
1523 void
set_stack_check_libfunc(const char * libfunc_name)1524 set_stack_check_libfunc (const char *libfunc_name)
1525 {
1526   gcc_assert (stack_check_libfunc == NULL_RTX);
1527   stack_check_libfunc = gen_rtx_SYMBOL_REF (Pmode, libfunc_name);
1528 }
1529 
1530 /* Emit one stack probe at ADDRESS, an address within the stack.  */
1531 
1532 void
emit_stack_probe(rtx address)1533 emit_stack_probe (rtx address)
1534 {
1535   if (targetm.have_probe_stack_address ())
1536     emit_insn (targetm.gen_probe_stack_address (address));
1537   else
1538     {
1539       rtx memref = gen_rtx_MEM (word_mode, address);
1540 
1541       MEM_VOLATILE_P (memref) = 1;
1542 
1543       /* See if we have an insn to probe the stack.  */
1544       if (targetm.have_probe_stack ())
1545         emit_insn (targetm.gen_probe_stack (memref));
1546       else
1547         emit_move_insn (memref, const0_rtx);
1548     }
1549 }
1550 
1551 /* Probe a range of stack addresses from FIRST to FIRST+SIZE, inclusive.
1552    FIRST is a constant and size is a Pmode RTX.  These are offsets from
1553    the current stack pointer.  STACK_GROWS_DOWNWARD says whether to add
1554    or subtract them from the stack pointer.  */
1555 
1556 #define PROBE_INTERVAL (1 << STACK_CHECK_PROBE_INTERVAL_EXP)
1557 
1558 #if STACK_GROWS_DOWNWARD
1559 #define STACK_GROW_OP MINUS
1560 #define STACK_GROW_OPTAB sub_optab
1561 #define STACK_GROW_OFF(off) -(off)
1562 #else
1563 #define STACK_GROW_OP PLUS
1564 #define STACK_GROW_OPTAB add_optab
1565 #define STACK_GROW_OFF(off) (off)
1566 #endif
1567 
1568 void
probe_stack_range(HOST_WIDE_INT first,rtx size)1569 probe_stack_range (HOST_WIDE_INT first, rtx size)
1570 {
1571   /* First ensure SIZE is Pmode.  */
1572   if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1573     size = convert_to_mode (Pmode, size, 1);
1574 
1575   /* Next see if we have a function to check the stack.  */
1576   if (stack_check_libfunc)
1577     {
1578       rtx addr = memory_address (Pmode,
1579 				 gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1580 					         stack_pointer_rtx,
1581 					         plus_constant (Pmode,
1582 								size, first)));
1583       emit_library_call (stack_check_libfunc, LCT_THROW, VOIDmode, 1, addr,
1584 			 Pmode);
1585     }
1586 
1587   /* Next see if we have an insn to check the stack.  */
1588   else if (targetm.have_check_stack ())
1589     {
1590       struct expand_operand ops[1];
1591       rtx addr = memory_address (Pmode,
1592 				 gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1593 					         stack_pointer_rtx,
1594 					         plus_constant (Pmode,
1595 								size, first)));
1596       bool success;
1597       create_input_operand (&ops[0], addr, Pmode);
1598       success = maybe_expand_insn (targetm.code_for_check_stack, 1, ops);
1599       gcc_assert (success);
1600     }
1601 
1602   /* Otherwise we have to generate explicit probes.  If we have a constant
1603      small number of them to generate, that's the easy case.  */
1604   else if (CONST_INT_P (size) && INTVAL (size) < 7 * PROBE_INTERVAL)
1605     {
1606       HOST_WIDE_INT isize = INTVAL (size), i;
1607       rtx addr;
1608 
1609       /* Probe at FIRST + N * PROBE_INTERVAL for values of N from 1 until
1610 	 it exceeds SIZE.  If only one probe is needed, this will not
1611 	 generate any code.  Then probe at FIRST + SIZE.  */
1612       for (i = PROBE_INTERVAL; i < isize; i += PROBE_INTERVAL)
1613 	{
1614 	  addr = memory_address (Pmode,
1615 				 plus_constant (Pmode, stack_pointer_rtx,
1616 				 		STACK_GROW_OFF (first + i)));
1617 	  emit_stack_probe (addr);
1618 	}
1619 
1620       addr = memory_address (Pmode,
1621 			     plus_constant (Pmode, stack_pointer_rtx,
1622 					    STACK_GROW_OFF (first + isize)));
1623       emit_stack_probe (addr);
1624     }
1625 
1626   /* In the variable case, do the same as above, but in a loop.  Note that we
1627      must be extra careful with variables wrapping around because we might be
1628      at the very top (or the very bottom) of the address space and we have to
1629      be able to handle this case properly; in particular, we use an equality
1630      test for the loop condition.  */
1631   else
1632     {
1633       rtx rounded_size, rounded_size_op, test_addr, last_addr, temp;
1634       rtx_code_label *loop_lab = gen_label_rtx ();
1635       rtx_code_label *end_lab = gen_label_rtx ();
1636 
1637       /* Step 1: round SIZE to the previous multiple of the interval.  */
1638 
1639       /* ROUNDED_SIZE = SIZE & -PROBE_INTERVAL  */
1640       rounded_size
1641 	= simplify_gen_binary (AND, Pmode, size,
1642 			       gen_int_mode (-PROBE_INTERVAL, Pmode));
1643       rounded_size_op = force_operand (rounded_size, NULL_RTX);
1644 
1645 
1646       /* Step 2: compute initial and final value of the loop counter.  */
1647 
1648       /* TEST_ADDR = SP + FIRST.  */
1649       test_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1650 					 	 stack_pointer_rtx,
1651 						 gen_int_mode (first, Pmode)),
1652 				 NULL_RTX);
1653 
1654       /* LAST_ADDR = SP + FIRST + ROUNDED_SIZE.  */
1655       last_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1656 						 test_addr,
1657 						 rounded_size_op), NULL_RTX);
1658 
1659 
1660       /* Step 3: the loop
1661 
1662 	 while (TEST_ADDR != LAST_ADDR)
1663 	   {
1664 	     TEST_ADDR = TEST_ADDR + PROBE_INTERVAL
1665 	     probe at TEST_ADDR
1666 	   }
1667 
1668 	 probes at FIRST + N * PROBE_INTERVAL for values of N from 1
1669 	 until it is equal to ROUNDED_SIZE.  */
1670 
1671       emit_label (loop_lab);
1672 
1673       /* Jump to END_LAB if TEST_ADDR == LAST_ADDR.  */
1674       emit_cmp_and_jump_insns (test_addr, last_addr, EQ, NULL_RTX, Pmode, 1,
1675 			       end_lab);
1676 
1677       /* TEST_ADDR = TEST_ADDR + PROBE_INTERVAL.  */
1678       temp = expand_binop (Pmode, STACK_GROW_OPTAB, test_addr,
1679 			   gen_int_mode (PROBE_INTERVAL, Pmode), test_addr,
1680 			   1, OPTAB_WIDEN);
1681 
1682       gcc_assert (temp == test_addr);
1683 
1684       /* Probe at TEST_ADDR.  */
1685       emit_stack_probe (test_addr);
1686 
1687       emit_jump (loop_lab);
1688 
1689       emit_label (end_lab);
1690 
1691 
1692       /* Step 4: probe at FIRST + SIZE if we cannot assert at compile-time
1693 	 that SIZE is equal to ROUNDED_SIZE.  */
1694 
1695       /* TEMP = SIZE - ROUNDED_SIZE.  */
1696       temp = simplify_gen_binary (MINUS, Pmode, size, rounded_size);
1697       if (temp != const0_rtx)
1698 	{
1699 	  rtx addr;
1700 
1701 	  if (CONST_INT_P (temp))
1702 	    {
1703 	      /* Use [base + disp} addressing mode if supported.  */
1704 	      HOST_WIDE_INT offset = INTVAL (temp);
1705 	      addr = memory_address (Pmode,
1706 				     plus_constant (Pmode, last_addr,
1707 						    STACK_GROW_OFF (offset)));
1708 	    }
1709 	  else
1710 	    {
1711 	      /* Manual CSE if the difference is not known at compile-time.  */
1712 	      temp = gen_rtx_MINUS (Pmode, size, rounded_size_op);
1713 	      addr = memory_address (Pmode,
1714 				     gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1715 						     last_addr, temp));
1716 	    }
1717 
1718 	  emit_stack_probe (addr);
1719 	}
1720     }
1721 
1722   /* Make sure nothing is scheduled before we are done.  */
1723   emit_insn (gen_blockage ());
1724 }
1725 
1726 /* Adjust the stack pointer by minus SIZE (an rtx for a number of bytes)
1727    while probing it.  This pushes when SIZE is positive.  SIZE need not
1728    be constant.  If ADJUST_BACK is true, adjust back the stack pointer
1729    by plus SIZE at the end.  */
1730 
1731 void
anti_adjust_stack_and_probe(rtx size,bool adjust_back)1732 anti_adjust_stack_and_probe (rtx size, bool adjust_back)
1733 {
1734   /* We skip the probe for the first interval + a small dope of 4 words and
1735      probe that many bytes past the specified size to maintain a protection
1736      area at the botton of the stack.  */
1737   const int dope = 4 * UNITS_PER_WORD;
1738 
1739   /* First ensure SIZE is Pmode.  */
1740   if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1741     size = convert_to_mode (Pmode, size, 1);
1742 
1743   /* If we have a constant small number of probes to generate, that's the
1744      easy case.  */
1745   if (CONST_INT_P (size) && INTVAL (size) < 7 * PROBE_INTERVAL)
1746     {
1747       HOST_WIDE_INT isize = INTVAL (size), i;
1748       bool first_probe = true;
1749 
1750       /* Adjust SP and probe at PROBE_INTERVAL + N * PROBE_INTERVAL for
1751 	 values of N from 1 until it exceeds SIZE.  If only one probe is
1752 	 needed, this will not generate any code.  Then adjust and probe
1753 	 to PROBE_INTERVAL + SIZE.  */
1754       for (i = PROBE_INTERVAL; i < isize; i += PROBE_INTERVAL)
1755 	{
1756 	  if (first_probe)
1757 	    {
1758 	      anti_adjust_stack (GEN_INT (2 * PROBE_INTERVAL + dope));
1759 	      first_probe = false;
1760 	    }
1761 	  else
1762 	    anti_adjust_stack (GEN_INT (PROBE_INTERVAL));
1763 	  emit_stack_probe (stack_pointer_rtx);
1764 	}
1765 
1766       if (first_probe)
1767 	anti_adjust_stack (plus_constant (Pmode, size, PROBE_INTERVAL + dope));
1768       else
1769 	anti_adjust_stack (plus_constant (Pmode, size, PROBE_INTERVAL - i));
1770       emit_stack_probe (stack_pointer_rtx);
1771     }
1772 
1773   /* In the variable case, do the same as above, but in a loop.  Note that we
1774      must be extra careful with variables wrapping around because we might be
1775      at the very top (or the very bottom) of the address space and we have to
1776      be able to handle this case properly; in particular, we use an equality
1777      test for the loop condition.  */
1778   else
1779     {
1780       rtx rounded_size, rounded_size_op, last_addr, temp;
1781       rtx_code_label *loop_lab = gen_label_rtx ();
1782       rtx_code_label *end_lab = gen_label_rtx ();
1783 
1784 
1785       /* Step 1: round SIZE to the previous multiple of the interval.  */
1786 
1787       /* ROUNDED_SIZE = SIZE & -PROBE_INTERVAL  */
1788       rounded_size
1789 	= simplify_gen_binary (AND, Pmode, size,
1790 			       gen_int_mode (-PROBE_INTERVAL, Pmode));
1791       rounded_size_op = force_operand (rounded_size, NULL_RTX);
1792 
1793 
1794       /* Step 2: compute initial and final value of the loop counter.  */
1795 
1796       /* SP = SP_0 + PROBE_INTERVAL.  */
1797       anti_adjust_stack (GEN_INT (PROBE_INTERVAL + dope));
1798 
1799       /* LAST_ADDR = SP_0 + PROBE_INTERVAL + ROUNDED_SIZE.  */
1800       last_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1801 						 stack_pointer_rtx,
1802 						 rounded_size_op), NULL_RTX);
1803 
1804 
1805       /* Step 3: the loop
1806 
1807 	 while (SP != LAST_ADDR)
1808 	   {
1809 	     SP = SP + PROBE_INTERVAL
1810 	     probe at SP
1811 	   }
1812 
1813 	 adjusts SP and probes at PROBE_INTERVAL + N * PROBE_INTERVAL for
1814 	 values of N from 1 until it is equal to ROUNDED_SIZE.  */
1815 
1816       emit_label (loop_lab);
1817 
1818       /* Jump to END_LAB if SP == LAST_ADDR.  */
1819       emit_cmp_and_jump_insns (stack_pointer_rtx, last_addr, EQ, NULL_RTX,
1820 			       Pmode, 1, end_lab);
1821 
1822       /* SP = SP + PROBE_INTERVAL and probe at SP.  */
1823       anti_adjust_stack (GEN_INT (PROBE_INTERVAL));
1824       emit_stack_probe (stack_pointer_rtx);
1825 
1826       emit_jump (loop_lab);
1827 
1828       emit_label (end_lab);
1829 
1830 
1831       /* Step 4: adjust SP and probe at PROBE_INTERVAL + SIZE if we cannot
1832 	 assert at compile-time that SIZE is equal to ROUNDED_SIZE.  */
1833 
1834       /* TEMP = SIZE - ROUNDED_SIZE.  */
1835       temp = simplify_gen_binary (MINUS, Pmode, size, rounded_size);
1836       if (temp != const0_rtx)
1837 	{
1838 	  /* Manual CSE if the difference is not known at compile-time.  */
1839 	  if (GET_CODE (temp) != CONST_INT)
1840 	    temp = gen_rtx_MINUS (Pmode, size, rounded_size_op);
1841 	  anti_adjust_stack (temp);
1842 	  emit_stack_probe (stack_pointer_rtx);
1843 	}
1844     }
1845 
1846   /* Adjust back and account for the additional first interval.  */
1847   if (adjust_back)
1848     adjust_stack (plus_constant (Pmode, size, PROBE_INTERVAL + dope));
1849   else
1850     adjust_stack (GEN_INT (PROBE_INTERVAL + dope));
1851 }
1852 
1853 /* Return an rtx representing the register or memory location
1854    in which a scalar value of data type VALTYPE
1855    was returned by a function call to function FUNC.
1856    FUNC is a FUNCTION_DECL, FNTYPE a FUNCTION_TYPE node if the precise
1857    function is known, otherwise 0.
1858    OUTGOING is 1 if on a machine with register windows this function
1859    should return the register in which the function will put its result
1860    and 0 otherwise.  */
1861 
1862 rtx
hard_function_value(const_tree valtype,const_tree func,const_tree fntype,int outgoing ATTRIBUTE_UNUSED)1863 hard_function_value (const_tree valtype, const_tree func, const_tree fntype,
1864 		     int outgoing ATTRIBUTE_UNUSED)
1865 {
1866   rtx val;
1867 
1868   val = targetm.calls.function_value (valtype, func ? func : fntype, outgoing);
1869 
1870   if (REG_P (val)
1871       && GET_MODE (val) == BLKmode)
1872     {
1873       unsigned HOST_WIDE_INT bytes = int_size_in_bytes (valtype);
1874       machine_mode tmpmode;
1875 
1876       /* int_size_in_bytes can return -1.  We don't need a check here
1877 	 since the value of bytes will then be large enough that no
1878 	 mode will match anyway.  */
1879 
1880       for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1881 	   tmpmode != VOIDmode;
1882 	   tmpmode = GET_MODE_WIDER_MODE (tmpmode))
1883 	{
1884 	  /* Have we found a large enough mode?  */
1885 	  if (GET_MODE_SIZE (tmpmode) >= bytes)
1886 	    break;
1887 	}
1888 
1889       /* No suitable mode found.  */
1890       gcc_assert (tmpmode != VOIDmode);
1891 
1892       PUT_MODE (val, tmpmode);
1893     }
1894   return val;
1895 }
1896 
1897 /* Return an rtx representing the register or memory location
1898    in which a scalar value of mode MODE was returned by a library call.  */
1899 
1900 rtx
hard_libcall_value(machine_mode mode,rtx fun)1901 hard_libcall_value (machine_mode mode, rtx fun)
1902 {
1903   return targetm.calls.libcall_value (mode, fun);
1904 }
1905 
1906 /* Look up the tree code for a given rtx code
1907    to provide the arithmetic operation for real_arithmetic.
1908    The function returns an int because the caller may not know
1909    what `enum tree_code' means.  */
1910 
1911 int
rtx_to_tree_code(enum rtx_code code)1912 rtx_to_tree_code (enum rtx_code code)
1913 {
1914   enum tree_code tcode;
1915 
1916   switch (code)
1917     {
1918     case PLUS:
1919       tcode = PLUS_EXPR;
1920       break;
1921     case MINUS:
1922       tcode = MINUS_EXPR;
1923       break;
1924     case MULT:
1925       tcode = MULT_EXPR;
1926       break;
1927     case DIV:
1928       tcode = RDIV_EXPR;
1929       break;
1930     case SMIN:
1931       tcode = MIN_EXPR;
1932       break;
1933     case SMAX:
1934       tcode = MAX_EXPR;
1935       break;
1936     default:
1937       tcode = LAST_AND_UNUSED_TREE_CODE;
1938       break;
1939     }
1940   return ((int) tcode);
1941 }
1942 
1943 #include "gt-explow.h"
1944