1 /* Subroutines for manipulating rtx's in semantically interesting ways.
2    Copyright (C) 1987-2014 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "diagnostic-core.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "stor-layout.h"
29 #include "tm_p.h"
30 #include "flags.h"
31 #include "except.h"
32 #include "function.h"
33 #include "expr.h"
34 #include "optabs.h"
35 #include "libfuncs.h"
36 #include "hard-reg-set.h"
37 #include "insn-config.h"
38 #include "ggc.h"
39 #include "recog.h"
40 #include "langhooks.h"
41 #include "target.h"
42 #include "common/common-target.h"
43 #include "output.h"
44 
45 static rtx break_out_memory_refs (rtx);
46 
47 
48 /* Truncate and perhaps sign-extend C as appropriate for MODE.  */
49 
50 HOST_WIDE_INT
trunc_int_for_mode(HOST_WIDE_INT c,enum machine_mode mode)51 trunc_int_for_mode (HOST_WIDE_INT c, enum machine_mode mode)
52 {
53   int width = GET_MODE_PRECISION (mode);
54 
55   /* You want to truncate to a _what_?  */
56   gcc_assert (SCALAR_INT_MODE_P (mode));
57 
58   /* Canonicalize BImode to 0 and STORE_FLAG_VALUE.  */
59   if (mode == BImode)
60     return c & 1 ? STORE_FLAG_VALUE : 0;
61 
62   /* Sign-extend for the requested mode.  */
63 
64   if (width < HOST_BITS_PER_WIDE_INT)
65     {
66       HOST_WIDE_INT sign = 1;
67       sign <<= width - 1;
68       c &= (sign << 1) - 1;
69       c ^= sign;
70       c -= sign;
71     }
72 
73   return c;
74 }
75 
76 /* Return an rtx for the sum of X and the integer C, given that X has
77    mode MODE.  */
78 
79 rtx
plus_constant(enum machine_mode mode,rtx x,HOST_WIDE_INT c)80 plus_constant (enum machine_mode mode, rtx x, HOST_WIDE_INT c)
81 {
82   RTX_CODE code;
83   rtx y;
84   rtx tem;
85   int all_constant = 0;
86 
87   gcc_assert (GET_MODE (x) == VOIDmode || GET_MODE (x) == mode);
88 
89   if (c == 0)
90     return x;
91 
92  restart:
93 
94   code = GET_CODE (x);
95   y = x;
96 
97   switch (code)
98     {
99     case CONST_INT:
100       if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
101 	{
102 	  double_int di_x = double_int::from_shwi (INTVAL (x));
103 	  double_int di_c = double_int::from_shwi (c);
104 
105 	  bool overflow;
106 	  double_int v = di_x.add_with_sign (di_c, false, &overflow);
107 	  if (overflow)
108 	    gcc_unreachable ();
109 
110 	  return immed_double_int_const (v, mode);
111 	}
112 
113       return gen_int_mode (UINTVAL (x) + c, mode);
114 
115     case CONST_DOUBLE:
116       {
117 	double_int di_x = double_int::from_pair (CONST_DOUBLE_HIGH (x),
118 						 CONST_DOUBLE_LOW (x));
119 	double_int di_c = double_int::from_shwi (c);
120 
121 	bool overflow;
122 	double_int v = di_x.add_with_sign (di_c, false, &overflow);
123 	if (overflow)
124 	  /* Sorry, we have no way to represent overflows this wide.
125 	     To fix, add constant support wider than CONST_DOUBLE.  */
126 	  gcc_assert (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_DOUBLE_INT);
127 
128 	return immed_double_int_const (v, mode);
129       }
130 
131     case MEM:
132       /* If this is a reference to the constant pool, try replacing it with
133 	 a reference to a new constant.  If the resulting address isn't
134 	 valid, don't return it because we have no way to validize it.  */
135       if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
136 	  && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
137 	{
138 	  tem = plus_constant (mode, get_pool_constant (XEXP (x, 0)), c);
139 	  tem = force_const_mem (GET_MODE (x), tem);
140 	  if (memory_address_p (GET_MODE (tem), XEXP (tem, 0)))
141 	    return tem;
142 	}
143       break;
144 
145     case CONST:
146       /* If adding to something entirely constant, set a flag
147 	 so that we can add a CONST around the result.  */
148       x = XEXP (x, 0);
149       all_constant = 1;
150       goto restart;
151 
152     case SYMBOL_REF:
153     case LABEL_REF:
154       all_constant = 1;
155       break;
156 
157     case PLUS:
158       /* The interesting case is adding the integer to a sum.  Look
159 	 for constant term in the sum and combine with C.  For an
160 	 integer constant term or a constant term that is not an
161 	 explicit integer, we combine or group them together anyway.
162 
163 	 We may not immediately return from the recursive call here, lest
164 	 all_constant gets lost.  */
165 
166       if (CONSTANT_P (XEXP (x, 1)))
167 	{
168 	  x = gen_rtx_PLUS (mode, XEXP (x, 0),
169 			    plus_constant (mode, XEXP (x, 1), c));
170 	  c = 0;
171 	}
172       else if (find_constant_term_loc (&y))
173 	{
174 	  /* We need to be careful since X may be shared and we can't
175 	     modify it in place.  */
176 	  rtx copy = copy_rtx (x);
177 	  rtx *const_loc = find_constant_term_loc (&copy);
178 
179 	  *const_loc = plus_constant (mode, *const_loc, c);
180 	  x = copy;
181 	  c = 0;
182 	}
183       break;
184 
185     default:
186       break;
187     }
188 
189   if (c != 0)
190     x = gen_rtx_PLUS (mode, x, gen_int_mode (c, mode));
191 
192   if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
193     return x;
194   else if (all_constant)
195     return gen_rtx_CONST (mode, x);
196   else
197     return x;
198 }
199 
200 /* If X is a sum, return a new sum like X but lacking any constant terms.
201    Add all the removed constant terms into *CONSTPTR.
202    X itself is not altered.  The result != X if and only if
203    it is not isomorphic to X.  */
204 
205 rtx
eliminate_constant_term(rtx x,rtx * constptr)206 eliminate_constant_term (rtx x, rtx *constptr)
207 {
208   rtx x0, x1;
209   rtx tem;
210 
211   if (GET_CODE (x) != PLUS)
212     return x;
213 
214   /* First handle constants appearing at this level explicitly.  */
215   if (CONST_INT_P (XEXP (x, 1))
216       && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x), *constptr,
217 						XEXP (x, 1)))
218       && CONST_INT_P (tem))
219     {
220       *constptr = tem;
221       return eliminate_constant_term (XEXP (x, 0), constptr);
222     }
223 
224   tem = const0_rtx;
225   x0 = eliminate_constant_term (XEXP (x, 0), &tem);
226   x1 = eliminate_constant_term (XEXP (x, 1), &tem);
227   if ((x1 != XEXP (x, 1) || x0 != XEXP (x, 0))
228       && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x),
229 						*constptr, tem))
230       && CONST_INT_P (tem))
231     {
232       *constptr = tem;
233       return gen_rtx_PLUS (GET_MODE (x), x0, x1);
234     }
235 
236   return x;
237 }
238 
239 /* Returns a tree for the size of EXP in bytes.  */
240 
241 static tree
tree_expr_size(const_tree exp)242 tree_expr_size (const_tree exp)
243 {
244   if (DECL_P (exp)
245       && DECL_SIZE_UNIT (exp) != 0)
246     return DECL_SIZE_UNIT (exp);
247   else
248     return size_in_bytes (TREE_TYPE (exp));
249 }
250 
251 /* Return an rtx for the size in bytes of the value of EXP.  */
252 
253 rtx
expr_size(tree exp)254 expr_size (tree exp)
255 {
256   tree size;
257 
258   if (TREE_CODE (exp) == WITH_SIZE_EXPR)
259     size = TREE_OPERAND (exp, 1);
260   else
261     {
262       size = tree_expr_size (exp);
263       gcc_assert (size);
264       gcc_assert (size == SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, exp));
265     }
266 
267   return expand_expr (size, NULL_RTX, TYPE_MODE (sizetype), EXPAND_NORMAL);
268 }
269 
270 /* Return a wide integer for the size in bytes of the value of EXP, or -1
271    if the size can vary or is larger than an integer.  */
272 
273 HOST_WIDE_INT
int_expr_size(tree exp)274 int_expr_size (tree exp)
275 {
276   tree size;
277 
278   if (TREE_CODE (exp) == WITH_SIZE_EXPR)
279     size = TREE_OPERAND (exp, 1);
280   else
281     {
282       size = tree_expr_size (exp);
283       gcc_assert (size);
284     }
285 
286   if (size == 0 || !tree_fits_shwi_p (size))
287     return -1;
288 
289   return tree_to_shwi (size);
290 }
291 
292 /* Return a copy of X in which all memory references
293    and all constants that involve symbol refs
294    have been replaced with new temporary registers.
295    Also emit code to load the memory locations and constants
296    into those registers.
297 
298    If X contains no such constants or memory references,
299    X itself (not a copy) is returned.
300 
301    If a constant is found in the address that is not a legitimate constant
302    in an insn, it is left alone in the hope that it might be valid in the
303    address.
304 
305    X may contain no arithmetic except addition, subtraction and multiplication.
306    Values returned by expand_expr with 1 for sum_ok fit this constraint.  */
307 
308 static rtx
break_out_memory_refs(rtx x)309 break_out_memory_refs (rtx x)
310 {
311   if (MEM_P (x)
312       || (CONSTANT_P (x) && CONSTANT_ADDRESS_P (x)
313 	  && GET_MODE (x) != VOIDmode))
314     x = force_reg (GET_MODE (x), x);
315   else if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
316 	   || GET_CODE (x) == MULT)
317     {
318       rtx op0 = break_out_memory_refs (XEXP (x, 0));
319       rtx op1 = break_out_memory_refs (XEXP (x, 1));
320 
321       if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
322 	x = simplify_gen_binary (GET_CODE (x), GET_MODE (x), op0, op1);
323     }
324 
325   return x;
326 }
327 
328 /* Given X, a memory address in address space AS' pointer mode, convert it to
329    an address in the address space's address mode, or vice versa (TO_MODE says
330    which way).  We take advantage of the fact that pointers are not allowed to
331    overflow by commuting arithmetic operations over conversions so that address
332    arithmetic insns can be used.  */
333 
334 rtx
convert_memory_address_addr_space(enum machine_mode to_mode ATTRIBUTE_UNUSED,rtx x,addr_space_t as ATTRIBUTE_UNUSED)335 convert_memory_address_addr_space (enum machine_mode to_mode ATTRIBUTE_UNUSED,
336 				   rtx x, addr_space_t as ATTRIBUTE_UNUSED)
337 {
338 #ifndef POINTERS_EXTEND_UNSIGNED
339   gcc_assert (GET_MODE (x) == to_mode || GET_MODE (x) == VOIDmode);
340   return x;
341 #else /* defined(POINTERS_EXTEND_UNSIGNED) */
342   enum machine_mode pointer_mode, address_mode, from_mode;
343   rtx temp;
344   enum rtx_code code;
345 
346   /* If X already has the right mode, just return it.  */
347   if (GET_MODE (x) == to_mode)
348     return x;
349 
350   pointer_mode = targetm.addr_space.pointer_mode (as);
351   address_mode = targetm.addr_space.address_mode (as);
352   from_mode = to_mode == pointer_mode ? address_mode : pointer_mode;
353 
354   /* Here we handle some special cases.  If none of them apply, fall through
355      to the default case.  */
356   switch (GET_CODE (x))
357     {
358     CASE_CONST_SCALAR_INT:
359       if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode))
360 	code = TRUNCATE;
361       else if (POINTERS_EXTEND_UNSIGNED < 0)
362 	break;
363       else if (POINTERS_EXTEND_UNSIGNED > 0)
364 	code = ZERO_EXTEND;
365       else
366 	code = SIGN_EXTEND;
367       temp = simplify_unary_operation (code, to_mode, x, from_mode);
368       if (temp)
369 	return temp;
370       break;
371 
372     case SUBREG:
373       if ((SUBREG_PROMOTED_VAR_P (x) || REG_POINTER (SUBREG_REG (x)))
374 	  && GET_MODE (SUBREG_REG (x)) == to_mode)
375 	return SUBREG_REG (x);
376       break;
377 
378     case LABEL_REF:
379       temp = gen_rtx_LABEL_REF (to_mode, XEXP (x, 0));
380       LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
381       return temp;
382       break;
383 
384     case SYMBOL_REF:
385       temp = shallow_copy_rtx (x);
386       PUT_MODE (temp, to_mode);
387       return temp;
388       break;
389 
390     case CONST:
391       return gen_rtx_CONST (to_mode,
392 			    convert_memory_address_addr_space
393 			      (to_mode, XEXP (x, 0), as));
394       break;
395 
396     case PLUS:
397     case MULT:
398       /* FIXME: For addition, we used to permute the conversion and
399 	 addition operation only if one operand is a constant and
400 	 converting the constant does not change it or if one operand
401 	 is a constant and we are using a ptr_extend instruction
402 	 (POINTERS_EXTEND_UNSIGNED < 0) even if the resulting address
403 	 may overflow/underflow.  We relax the condition to include
404 	 zero-extend (POINTERS_EXTEND_UNSIGNED > 0) since the other
405 	 parts of the compiler depend on it.  See PR 49721.
406 
407 	 We can always safely permute them if we are making the address
408 	 narrower.  */
409       if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode)
410 	  || (GET_CODE (x) == PLUS
411 	      && CONST_INT_P (XEXP (x, 1))
412 	      && (POINTERS_EXTEND_UNSIGNED != 0
413 		  || XEXP (x, 1) == convert_memory_address_addr_space
414 		  			(to_mode, XEXP (x, 1), as))))
415 	return gen_rtx_fmt_ee (GET_CODE (x), to_mode,
416 			       convert_memory_address_addr_space
417 				 (to_mode, XEXP (x, 0), as),
418 			       XEXP (x, 1));
419       break;
420 
421     default:
422       break;
423     }
424 
425   return convert_modes (to_mode, from_mode,
426 			x, POINTERS_EXTEND_UNSIGNED);
427 #endif /* defined(POINTERS_EXTEND_UNSIGNED) */
428 }
429 
430 /* Return something equivalent to X but valid as a memory address for something
431    of mode MODE in the named address space AS.  When X is not itself valid,
432    this works by copying X or subexpressions of it into registers.  */
433 
434 rtx
memory_address_addr_space(enum machine_mode mode,rtx x,addr_space_t as)435 memory_address_addr_space (enum machine_mode mode, rtx x, addr_space_t as)
436 {
437   rtx oldx = x;
438   enum machine_mode address_mode = targetm.addr_space.address_mode (as);
439 
440   x = convert_memory_address_addr_space (address_mode, x, as);
441 
442   /* By passing constant addresses through registers
443      we get a chance to cse them.  */
444   if (! cse_not_expected && CONSTANT_P (x) && CONSTANT_ADDRESS_P (x))
445     x = force_reg (address_mode, x);
446 
447   /* We get better cse by rejecting indirect addressing at this stage.
448      Let the combiner create indirect addresses where appropriate.
449      For now, generate the code so that the subexpressions useful to share
450      are visible.  But not if cse won't be done!  */
451   else
452     {
453       if (! cse_not_expected && !REG_P (x))
454 	x = break_out_memory_refs (x);
455 
456       /* At this point, any valid address is accepted.  */
457       if (memory_address_addr_space_p (mode, x, as))
458 	goto done;
459 
460       /* If it was valid before but breaking out memory refs invalidated it,
461 	 use it the old way.  */
462       if (memory_address_addr_space_p (mode, oldx, as))
463 	{
464 	  x = oldx;
465 	  goto done;
466 	}
467 
468       /* Perform machine-dependent transformations on X
469 	 in certain cases.  This is not necessary since the code
470 	 below can handle all possible cases, but machine-dependent
471 	 transformations can make better code.  */
472       {
473 	rtx orig_x = x;
474 	x = targetm.addr_space.legitimize_address (x, oldx, mode, as);
475 	if (orig_x != x && memory_address_addr_space_p (mode, x, as))
476 	  goto done;
477       }
478 
479       /* PLUS and MULT can appear in special ways
480 	 as the result of attempts to make an address usable for indexing.
481 	 Usually they are dealt with by calling force_operand, below.
482 	 But a sum containing constant terms is special
483 	 if removing them makes the sum a valid address:
484 	 then we generate that address in a register
485 	 and index off of it.  We do this because it often makes
486 	 shorter code, and because the addresses thus generated
487 	 in registers often become common subexpressions.  */
488       if (GET_CODE (x) == PLUS)
489 	{
490 	  rtx constant_term = const0_rtx;
491 	  rtx y = eliminate_constant_term (x, &constant_term);
492 	  if (constant_term == const0_rtx
493 	      || ! memory_address_addr_space_p (mode, y, as))
494 	    x = force_operand (x, NULL_RTX);
495 	  else
496 	    {
497 	      y = gen_rtx_PLUS (GET_MODE (x), copy_to_reg (y), constant_term);
498 	      if (! memory_address_addr_space_p (mode, y, as))
499 		x = force_operand (x, NULL_RTX);
500 	      else
501 		x = y;
502 	    }
503 	}
504 
505       else if (GET_CODE (x) == MULT || GET_CODE (x) == MINUS)
506 	x = force_operand (x, NULL_RTX);
507 
508       /* If we have a register that's an invalid address,
509 	 it must be a hard reg of the wrong class.  Copy it to a pseudo.  */
510       else if (REG_P (x))
511 	x = copy_to_reg (x);
512 
513       /* Last resort: copy the value to a register, since
514 	 the register is a valid address.  */
515       else
516 	x = force_reg (address_mode, x);
517     }
518 
519  done:
520 
521   gcc_assert (memory_address_addr_space_p (mode, x, as));
522   /* If we didn't change the address, we are done.  Otherwise, mark
523      a reg as a pointer if we have REG or REG + CONST_INT.  */
524   if (oldx == x)
525     return x;
526   else if (REG_P (x))
527     mark_reg_pointer (x, BITS_PER_UNIT);
528   else if (GET_CODE (x) == PLUS
529 	   && REG_P (XEXP (x, 0))
530 	   && CONST_INT_P (XEXP (x, 1)))
531     mark_reg_pointer (XEXP (x, 0), BITS_PER_UNIT);
532 
533   /* OLDX may have been the address on a temporary.  Update the address
534      to indicate that X is now used.  */
535   update_temp_slot_address (oldx, x);
536 
537   return x;
538 }
539 
540 /* Convert a mem ref into one with a valid memory address.
541    Pass through anything else unchanged.  */
542 
543 rtx
validize_mem(rtx ref)544 validize_mem (rtx ref)
545 {
546   if (!MEM_P (ref))
547     return ref;
548   ref = use_anchored_address (ref);
549   if (memory_address_addr_space_p (GET_MODE (ref), XEXP (ref, 0),
550 				   MEM_ADDR_SPACE (ref)))
551     return ref;
552 
553   /* Don't alter REF itself, since that is probably a stack slot.  */
554   return replace_equiv_address (ref, XEXP (ref, 0));
555 }
556 
557 /* If X is a memory reference to a member of an object block, try rewriting
558    it to use an anchor instead.  Return the new memory reference on success
559    and the old one on failure.  */
560 
561 rtx
use_anchored_address(rtx x)562 use_anchored_address (rtx x)
563 {
564   rtx base;
565   HOST_WIDE_INT offset;
566   enum machine_mode mode;
567 
568   if (!flag_section_anchors)
569     return x;
570 
571   if (!MEM_P (x))
572     return x;
573 
574   /* Split the address into a base and offset.  */
575   base = XEXP (x, 0);
576   offset = 0;
577   if (GET_CODE (base) == CONST
578       && GET_CODE (XEXP (base, 0)) == PLUS
579       && CONST_INT_P (XEXP (XEXP (base, 0), 1)))
580     {
581       offset += INTVAL (XEXP (XEXP (base, 0), 1));
582       base = XEXP (XEXP (base, 0), 0);
583     }
584 
585   /* Check whether BASE is suitable for anchors.  */
586   if (GET_CODE (base) != SYMBOL_REF
587       || !SYMBOL_REF_HAS_BLOCK_INFO_P (base)
588       || SYMBOL_REF_ANCHOR_P (base)
589       || SYMBOL_REF_BLOCK (base) == NULL
590       || !targetm.use_anchors_for_symbol_p (base))
591     return x;
592 
593   /* Decide where BASE is going to be.  */
594   place_block_symbol (base);
595 
596   /* Get the anchor we need to use.  */
597   offset += SYMBOL_REF_BLOCK_OFFSET (base);
598   base = get_section_anchor (SYMBOL_REF_BLOCK (base), offset,
599 			     SYMBOL_REF_TLS_MODEL (base));
600 
601   /* Work out the offset from the anchor.  */
602   offset -= SYMBOL_REF_BLOCK_OFFSET (base);
603 
604   /* If we're going to run a CSE pass, force the anchor into a register.
605      We will then be able to reuse registers for several accesses, if the
606      target costs say that that's worthwhile.  */
607   mode = GET_MODE (base);
608   if (!cse_not_expected)
609     base = force_reg (mode, base);
610 
611   return replace_equiv_address (x, plus_constant (mode, base, offset));
612 }
613 
614 /* Copy the value or contents of X to a new temp reg and return that reg.  */
615 
616 rtx
copy_to_reg(rtx x)617 copy_to_reg (rtx x)
618 {
619   rtx temp = gen_reg_rtx (GET_MODE (x));
620 
621   /* If not an operand, must be an address with PLUS and MULT so
622      do the computation.  */
623   if (! general_operand (x, VOIDmode))
624     x = force_operand (x, temp);
625 
626   if (x != temp)
627     emit_move_insn (temp, x);
628 
629   return temp;
630 }
631 
632 /* Like copy_to_reg but always give the new register mode Pmode
633    in case X is a constant.  */
634 
635 rtx
copy_addr_to_reg(rtx x)636 copy_addr_to_reg (rtx x)
637 {
638   return copy_to_mode_reg (Pmode, x);
639 }
640 
641 /* Like copy_to_reg but always give the new register mode MODE
642    in case X is a constant.  */
643 
644 rtx
copy_to_mode_reg(enum machine_mode mode,rtx x)645 copy_to_mode_reg (enum machine_mode mode, rtx x)
646 {
647   rtx temp = gen_reg_rtx (mode);
648 
649   /* If not an operand, must be an address with PLUS and MULT so
650      do the computation.  */
651   if (! general_operand (x, VOIDmode))
652     x = force_operand (x, temp);
653 
654   gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
655   if (x != temp)
656     emit_move_insn (temp, x);
657   return temp;
658 }
659 
660 /* Load X into a register if it is not already one.
661    Use mode MODE for the register.
662    X should be valid for mode MODE, but it may be a constant which
663    is valid for all integer modes; that's why caller must specify MODE.
664 
665    The caller must not alter the value in the register we return,
666    since we mark it as a "constant" register.  */
667 
668 rtx
force_reg(enum machine_mode mode,rtx x)669 force_reg (enum machine_mode mode, rtx x)
670 {
671   rtx temp, insn, set;
672 
673   if (REG_P (x))
674     return x;
675 
676   if (general_operand (x, mode))
677     {
678       temp = gen_reg_rtx (mode);
679       insn = emit_move_insn (temp, x);
680     }
681   else
682     {
683       temp = force_operand (x, NULL_RTX);
684       if (REG_P (temp))
685 	insn = get_last_insn ();
686       else
687 	{
688 	  rtx temp2 = gen_reg_rtx (mode);
689 	  insn = emit_move_insn (temp2, temp);
690 	  temp = temp2;
691 	}
692     }
693 
694   /* Let optimizers know that TEMP's value never changes
695      and that X can be substituted for it.  Don't get confused
696      if INSN set something else (such as a SUBREG of TEMP).  */
697   if (CONSTANT_P (x)
698       && (set = single_set (insn)) != 0
699       && SET_DEST (set) == temp
700       && ! rtx_equal_p (x, SET_SRC (set)))
701     set_unique_reg_note (insn, REG_EQUAL, x);
702 
703   /* Let optimizers know that TEMP is a pointer, and if so, the
704      known alignment of that pointer.  */
705   {
706     unsigned align = 0;
707     if (GET_CODE (x) == SYMBOL_REF)
708       {
709         align = BITS_PER_UNIT;
710 	if (SYMBOL_REF_DECL (x) && DECL_P (SYMBOL_REF_DECL (x)))
711 	  align = DECL_ALIGN (SYMBOL_REF_DECL (x));
712       }
713     else if (GET_CODE (x) == LABEL_REF)
714       align = BITS_PER_UNIT;
715     else if (GET_CODE (x) == CONST
716 	     && GET_CODE (XEXP (x, 0)) == PLUS
717 	     && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
718 	     && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
719       {
720 	rtx s = XEXP (XEXP (x, 0), 0);
721 	rtx c = XEXP (XEXP (x, 0), 1);
722 	unsigned sa, ca;
723 
724 	sa = BITS_PER_UNIT;
725 	if (SYMBOL_REF_DECL (s) && DECL_P (SYMBOL_REF_DECL (s)))
726 	  sa = DECL_ALIGN (SYMBOL_REF_DECL (s));
727 
728 	if (INTVAL (c) == 0)
729 	  align = sa;
730 	else
731 	  {
732 	    ca = ctz_hwi (INTVAL (c)) * BITS_PER_UNIT;
733 	    align = MIN (sa, ca);
734 	  }
735       }
736 
737     if (align || (MEM_P (x) && MEM_POINTER (x)))
738       mark_reg_pointer (temp, align);
739   }
740 
741   return temp;
742 }
743 
744 /* If X is a memory ref, copy its contents to a new temp reg and return
745    that reg.  Otherwise, return X.  */
746 
747 rtx
force_not_mem(rtx x)748 force_not_mem (rtx x)
749 {
750   rtx temp;
751 
752   if (!MEM_P (x) || GET_MODE (x) == BLKmode)
753     return x;
754 
755   temp = gen_reg_rtx (GET_MODE (x));
756 
757   if (MEM_POINTER (x))
758     REG_POINTER (temp) = 1;
759 
760   emit_move_insn (temp, x);
761   return temp;
762 }
763 
764 /* Copy X to TARGET (if it's nonzero and a reg)
765    or to a new temp reg and return that reg.
766    MODE is the mode to use for X in case it is a constant.  */
767 
768 rtx
copy_to_suggested_reg(rtx x,rtx target,enum machine_mode mode)769 copy_to_suggested_reg (rtx x, rtx target, enum machine_mode mode)
770 {
771   rtx temp;
772 
773   if (target && REG_P (target))
774     temp = target;
775   else
776     temp = gen_reg_rtx (mode);
777 
778   emit_move_insn (temp, x);
779   return temp;
780 }
781 
782 /* Return the mode to use to pass or return a scalar of TYPE and MODE.
783    PUNSIGNEDP points to the signedness of the type and may be adjusted
784    to show what signedness to use on extension operations.
785 
786    FOR_RETURN is nonzero if the caller is promoting the return value
787    of FNDECL, else it is for promoting args.  */
788 
789 enum machine_mode
promote_function_mode(const_tree type,enum machine_mode mode,int * punsignedp,const_tree funtype,int for_return)790 promote_function_mode (const_tree type, enum machine_mode mode, int *punsignedp,
791 		       const_tree funtype, int for_return)
792 {
793   /* Called without a type node for a libcall.  */
794   if (type == NULL_TREE)
795     {
796       if (INTEGRAL_MODE_P (mode))
797 	return targetm.calls.promote_function_mode (NULL_TREE, mode,
798 						    punsignedp, funtype,
799 						    for_return);
800       else
801 	return mode;
802     }
803 
804   switch (TREE_CODE (type))
805     {
806     case INTEGER_TYPE:   case ENUMERAL_TYPE:   case BOOLEAN_TYPE:
807     case REAL_TYPE:      case OFFSET_TYPE:     case FIXED_POINT_TYPE:
808     case POINTER_TYPE:   case REFERENCE_TYPE:
809       return targetm.calls.promote_function_mode (type, mode, punsignedp, funtype,
810 						  for_return);
811 
812     default:
813       return mode;
814     }
815 }
816 /* Return the mode to use to store a scalar of TYPE and MODE.
817    PUNSIGNEDP points to the signedness of the type and may be adjusted
818    to show what signedness to use on extension operations.  */
819 
820 enum machine_mode
promote_mode(const_tree type ATTRIBUTE_UNUSED,enum machine_mode mode,int * punsignedp ATTRIBUTE_UNUSED)821 promote_mode (const_tree type ATTRIBUTE_UNUSED, enum machine_mode mode,
822 	      int *punsignedp ATTRIBUTE_UNUSED)
823 {
824 #ifdef PROMOTE_MODE
825   enum tree_code code;
826   int unsignedp;
827 #endif
828 
829   /* For libcalls this is invoked without TYPE from the backends
830      TARGET_PROMOTE_FUNCTION_MODE hooks.  Don't do anything in that
831      case.  */
832   if (type == NULL_TREE)
833     return mode;
834 
835   /* FIXME: this is the same logic that was there until GCC 4.4, but we
836      probably want to test POINTERS_EXTEND_UNSIGNED even if PROMOTE_MODE
837      is not defined.  The affected targets are M32C, S390, SPARC.  */
838 #ifdef PROMOTE_MODE
839   code = TREE_CODE (type);
840   unsignedp = *punsignedp;
841 
842   switch (code)
843     {
844     case INTEGER_TYPE:   case ENUMERAL_TYPE:   case BOOLEAN_TYPE:
845     case REAL_TYPE:      case OFFSET_TYPE:     case FIXED_POINT_TYPE:
846       PROMOTE_MODE (mode, unsignedp, type);
847       *punsignedp = unsignedp;
848       return mode;
849       break;
850 
851 #ifdef POINTERS_EXTEND_UNSIGNED
852     case REFERENCE_TYPE:
853     case POINTER_TYPE:
854       *punsignedp = POINTERS_EXTEND_UNSIGNED;
855       return targetm.addr_space.address_mode
856 	       (TYPE_ADDR_SPACE (TREE_TYPE (type)));
857       break;
858 #endif
859 
860     default:
861       return mode;
862     }
863 #else
864   return mode;
865 #endif
866 }
867 
868 
869 /* Use one of promote_mode or promote_function_mode to find the promoted
870    mode of DECL.  If PUNSIGNEDP is not NULL, store there the unsignedness
871    of DECL after promotion.  */
872 
873 enum machine_mode
promote_decl_mode(const_tree decl,int * punsignedp)874 promote_decl_mode (const_tree decl, int *punsignedp)
875 {
876   tree type = TREE_TYPE (decl);
877   int unsignedp = TYPE_UNSIGNED (type);
878   enum machine_mode mode = DECL_MODE (decl);
879   enum machine_mode pmode;
880 
881   if (TREE_CODE (decl) == RESULT_DECL
882       || TREE_CODE (decl) == PARM_DECL)
883     pmode = promote_function_mode (type, mode, &unsignedp,
884                                    TREE_TYPE (current_function_decl), 2);
885   else
886     pmode = promote_mode (type, mode, &unsignedp);
887 
888   if (punsignedp)
889     *punsignedp = unsignedp;
890   return pmode;
891 }
892 
893 
894 /* Controls the behaviour of {anti_,}adjust_stack.  */
895 static bool suppress_reg_args_size;
896 
897 /* A helper for adjust_stack and anti_adjust_stack.  */
898 
899 static void
adjust_stack_1(rtx adjust,bool anti_p)900 adjust_stack_1 (rtx adjust, bool anti_p)
901 {
902   rtx temp, insn;
903 
904 #ifndef STACK_GROWS_DOWNWARD
905   /* Hereafter anti_p means subtract_p.  */
906   anti_p = !anti_p;
907 #endif
908 
909   temp = expand_binop (Pmode,
910 		       anti_p ? sub_optab : add_optab,
911 		       stack_pointer_rtx, adjust, stack_pointer_rtx, 0,
912 		       OPTAB_LIB_WIDEN);
913 
914   if (temp != stack_pointer_rtx)
915     insn = emit_move_insn (stack_pointer_rtx, temp);
916   else
917     {
918       insn = get_last_insn ();
919       temp = single_set (insn);
920       gcc_assert (temp != NULL && SET_DEST (temp) == stack_pointer_rtx);
921     }
922 
923   if (!suppress_reg_args_size)
924     add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
925 }
926 
927 /* Adjust the stack pointer by ADJUST (an rtx for a number of bytes).
928    This pops when ADJUST is positive.  ADJUST need not be constant.  */
929 
930 void
adjust_stack(rtx adjust)931 adjust_stack (rtx adjust)
932 {
933   if (adjust == const0_rtx)
934     return;
935 
936   /* We expect all variable sized adjustments to be multiple of
937      PREFERRED_STACK_BOUNDARY.  */
938   if (CONST_INT_P (adjust))
939     stack_pointer_delta -= INTVAL (adjust);
940 
941   adjust_stack_1 (adjust, false);
942 }
943 
944 /* Adjust the stack pointer by minus ADJUST (an rtx for a number of bytes).
945    This pushes when ADJUST is positive.  ADJUST need not be constant.  */
946 
947 void
anti_adjust_stack(rtx adjust)948 anti_adjust_stack (rtx adjust)
949 {
950   if (adjust == const0_rtx)
951     return;
952 
953   /* We expect all variable sized adjustments to be multiple of
954      PREFERRED_STACK_BOUNDARY.  */
955   if (CONST_INT_P (adjust))
956     stack_pointer_delta += INTVAL (adjust);
957 
958   adjust_stack_1 (adjust, true);
959 }
960 
961 /* Round the size of a block to be pushed up to the boundary required
962    by this machine.  SIZE is the desired size, which need not be constant.  */
963 
964 static rtx
round_push(rtx size)965 round_push (rtx size)
966 {
967   rtx align_rtx, alignm1_rtx;
968 
969   if (!SUPPORTS_STACK_ALIGNMENT
970       || crtl->preferred_stack_boundary == MAX_SUPPORTED_STACK_ALIGNMENT)
971     {
972       int align = crtl->preferred_stack_boundary / BITS_PER_UNIT;
973 
974       if (align == 1)
975 	return size;
976 
977       if (CONST_INT_P (size))
978 	{
979 	  HOST_WIDE_INT new_size = (INTVAL (size) + align - 1) / align * align;
980 
981 	  if (INTVAL (size) != new_size)
982 	    size = GEN_INT (new_size);
983 	  return size;
984 	}
985 
986       align_rtx = GEN_INT (align);
987       alignm1_rtx = GEN_INT (align - 1);
988     }
989   else
990     {
991       /* If crtl->preferred_stack_boundary might still grow, use
992 	 virtual_preferred_stack_boundary_rtx instead.  This will be
993 	 substituted by the right value in vregs pass and optimized
994 	 during combine.  */
995       align_rtx = virtual_preferred_stack_boundary_rtx;
996       alignm1_rtx = force_operand (plus_constant (Pmode, align_rtx, -1),
997 				   NULL_RTX);
998     }
999 
1000   /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
1001      but we know it can't.  So add ourselves and then do
1002      TRUNC_DIV_EXPR.  */
1003   size = expand_binop (Pmode, add_optab, size, alignm1_rtx,
1004 		       NULL_RTX, 1, OPTAB_LIB_WIDEN);
1005   size = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, size, align_rtx,
1006 			NULL_RTX, 1);
1007   size = expand_mult (Pmode, size, align_rtx, NULL_RTX, 1);
1008 
1009   return size;
1010 }
1011 
1012 /* Save the stack pointer for the purpose in SAVE_LEVEL.  PSAVE is a pointer
1013    to a previously-created save area.  If no save area has been allocated,
1014    this function will allocate one.  If a save area is specified, it
1015    must be of the proper mode.  */
1016 
1017 void
emit_stack_save(enum save_level save_level,rtx * psave)1018 emit_stack_save (enum save_level save_level, rtx *psave)
1019 {
1020   rtx sa = *psave;
1021   /* The default is that we use a move insn and save in a Pmode object.  */
1022   rtx (*fcn) (rtx, rtx) = gen_move_insn;
1023   enum machine_mode mode = STACK_SAVEAREA_MODE (save_level);
1024 
1025   /* See if this machine has anything special to do for this kind of save.  */
1026   switch (save_level)
1027     {
1028 #ifdef HAVE_save_stack_block
1029     case SAVE_BLOCK:
1030       if (HAVE_save_stack_block)
1031 	fcn = gen_save_stack_block;
1032       break;
1033 #endif
1034 #ifdef HAVE_save_stack_function
1035     case SAVE_FUNCTION:
1036       if (HAVE_save_stack_function)
1037 	fcn = gen_save_stack_function;
1038       break;
1039 #endif
1040 #ifdef HAVE_save_stack_nonlocal
1041     case SAVE_NONLOCAL:
1042       if (HAVE_save_stack_nonlocal)
1043 	fcn = gen_save_stack_nonlocal;
1044       break;
1045 #endif
1046     default:
1047       break;
1048     }
1049 
1050   /* If there is no save area and we have to allocate one, do so.  Otherwise
1051      verify the save area is the proper mode.  */
1052 
1053   if (sa == 0)
1054     {
1055       if (mode != VOIDmode)
1056 	{
1057 	  if (save_level == SAVE_NONLOCAL)
1058 	    *psave = sa = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
1059 	  else
1060 	    *psave = sa = gen_reg_rtx (mode);
1061 	}
1062     }
1063 
1064   do_pending_stack_adjust ();
1065   if (sa != 0)
1066     sa = validize_mem (sa);
1067   emit_insn (fcn (sa, stack_pointer_rtx));
1068 }
1069 
1070 /* Restore the stack pointer for the purpose in SAVE_LEVEL.  SA is the save
1071    area made by emit_stack_save.  If it is zero, we have nothing to do.  */
1072 
1073 void
emit_stack_restore(enum save_level save_level,rtx sa)1074 emit_stack_restore (enum save_level save_level, rtx sa)
1075 {
1076   /* The default is that we use a move insn.  */
1077   rtx (*fcn) (rtx, rtx) = gen_move_insn;
1078 
1079   /* If stack_realign_drap, the x86 backend emits a prologue that aligns both
1080      STACK_POINTER and HARD_FRAME_POINTER.
1081      If stack_realign_fp, the x86 backend emits a prologue that aligns only
1082      STACK_POINTER. This renders the HARD_FRAME_POINTER unusable for accessing
1083      aligned variables, which is reflected in ix86_can_eliminate.
1084      We normally still have the realigned STACK_POINTER that we can use.
1085      But if there is a stack restore still present at reload, it can trigger
1086      mark_not_eliminable for the STACK_POINTER, leaving no way to eliminate
1087      FRAME_POINTER into a hard reg.
1088      To prevent this situation, we force need_drap if we emit a stack
1089      restore.  */
1090   if (SUPPORTS_STACK_ALIGNMENT)
1091     crtl->need_drap = true;
1092 
1093   /* See if this machine has anything special to do for this kind of save.  */
1094   switch (save_level)
1095     {
1096 #ifdef HAVE_restore_stack_block
1097     case SAVE_BLOCK:
1098       if (HAVE_restore_stack_block)
1099 	fcn = gen_restore_stack_block;
1100       break;
1101 #endif
1102 #ifdef HAVE_restore_stack_function
1103     case SAVE_FUNCTION:
1104       if (HAVE_restore_stack_function)
1105 	fcn = gen_restore_stack_function;
1106       break;
1107 #endif
1108 #ifdef HAVE_restore_stack_nonlocal
1109     case SAVE_NONLOCAL:
1110       if (HAVE_restore_stack_nonlocal)
1111 	fcn = gen_restore_stack_nonlocal;
1112       break;
1113 #endif
1114     default:
1115       break;
1116     }
1117 
1118   if (sa != 0)
1119     {
1120       sa = validize_mem (sa);
1121       /* These clobbers prevent the scheduler from moving
1122 	 references to variable arrays below the code
1123 	 that deletes (pops) the arrays.  */
1124       emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1125       emit_clobber (gen_rtx_MEM (BLKmode, stack_pointer_rtx));
1126     }
1127 
1128   discard_pending_stack_adjust ();
1129 
1130   emit_insn (fcn (stack_pointer_rtx, sa));
1131 }
1132 
1133 /* Invoke emit_stack_save on the nonlocal_goto_save_area for the current
1134    function.  This function should be called whenever we allocate or
1135    deallocate dynamic stack space.  */
1136 
1137 void
update_nonlocal_goto_save_area(void)1138 update_nonlocal_goto_save_area (void)
1139 {
1140   tree t_save;
1141   rtx r_save;
1142 
1143   /* The nonlocal_goto_save_area object is an array of N pointers.  The
1144      first one is used for the frame pointer save; the rest are sized by
1145      STACK_SAVEAREA_MODE.  Create a reference to array index 1, the first
1146      of the stack save area slots.  */
1147   t_save = build4 (ARRAY_REF,
1148 		   TREE_TYPE (TREE_TYPE (cfun->nonlocal_goto_save_area)),
1149 		   cfun->nonlocal_goto_save_area,
1150 		   integer_one_node, NULL_TREE, NULL_TREE);
1151   r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
1152 
1153   emit_stack_save (SAVE_NONLOCAL, &r_save);
1154 }
1155 
1156 /* Return an rtx representing the address of an area of memory dynamically
1157    pushed on the stack.
1158 
1159    Any required stack pointer alignment is preserved.
1160 
1161    SIZE is an rtx representing the size of the area.
1162 
1163    SIZE_ALIGN is the alignment (in bits) that we know SIZE has.  This
1164    parameter may be zero.  If so, a proper value will be extracted
1165    from SIZE if it is constant, otherwise BITS_PER_UNIT will be assumed.
1166 
1167    REQUIRED_ALIGN is the alignment (in bits) required for the region
1168    of memory.
1169 
1170    If CANNOT_ACCUMULATE is set to TRUE, the caller guarantees that the
1171    stack space allocated by the generated code cannot be added with itself
1172    in the course of the execution of the function.  It is always safe to
1173    pass FALSE here and the following criterion is sufficient in order to
1174    pass TRUE: every path in the CFG that starts at the allocation point and
1175    loops to it executes the associated deallocation code.  */
1176 
1177 rtx
allocate_dynamic_stack_space(rtx size,unsigned size_align,unsigned required_align,bool cannot_accumulate)1178 allocate_dynamic_stack_space (rtx size, unsigned size_align,
1179 			      unsigned required_align, bool cannot_accumulate)
1180 {
1181   HOST_WIDE_INT stack_usage_size = -1;
1182   rtx final_label, final_target, target;
1183   unsigned extra_align = 0;
1184   bool must_align;
1185 
1186   /* If we're asking for zero bytes, it doesn't matter what we point
1187      to since we can't dereference it.  But return a reasonable
1188      address anyway.  */
1189   if (size == const0_rtx)
1190     return virtual_stack_dynamic_rtx;
1191 
1192   /* Otherwise, show we're calling alloca or equivalent.  */
1193   cfun->calls_alloca = 1;
1194 
1195   /* If stack usage info is requested, look into the size we are passed.
1196      We need to do so this early to avoid the obfuscation that may be
1197      introduced later by the various alignment operations.  */
1198   if (flag_stack_usage_info)
1199     {
1200       if (CONST_INT_P (size))
1201 	stack_usage_size = INTVAL (size);
1202       else if (REG_P (size))
1203         {
1204 	  /* Look into the last emitted insn and see if we can deduce
1205 	     something for the register.  */
1206 	  rtx insn, set, note;
1207 	  insn = get_last_insn ();
1208 	  if ((set = single_set (insn)) && rtx_equal_p (SET_DEST (set), size))
1209 	    {
1210 	      if (CONST_INT_P (SET_SRC (set)))
1211 		stack_usage_size = INTVAL (SET_SRC (set));
1212 	      else if ((note = find_reg_equal_equiv_note (insn))
1213 		       && CONST_INT_P (XEXP (note, 0)))
1214 		stack_usage_size = INTVAL (XEXP (note, 0));
1215 	    }
1216 	}
1217 
1218       /* If the size is not constant, we can't say anything.  */
1219       if (stack_usage_size == -1)
1220 	{
1221 	  current_function_has_unbounded_dynamic_stack_size = 1;
1222 	  stack_usage_size = 0;
1223 	}
1224     }
1225 
1226   /* Ensure the size is in the proper mode.  */
1227   if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1228     size = convert_to_mode (Pmode, size, 1);
1229 
1230   /* Adjust SIZE_ALIGN, if needed.  */
1231   if (CONST_INT_P (size))
1232     {
1233       unsigned HOST_WIDE_INT lsb;
1234 
1235       lsb = INTVAL (size);
1236       lsb &= -lsb;
1237 
1238       /* Watch out for overflow truncating to "unsigned".  */
1239       if (lsb > UINT_MAX / BITS_PER_UNIT)
1240 	size_align = 1u << (HOST_BITS_PER_INT - 1);
1241       else
1242 	size_align = (unsigned)lsb * BITS_PER_UNIT;
1243     }
1244   else if (size_align < BITS_PER_UNIT)
1245     size_align = BITS_PER_UNIT;
1246 
1247   /* We can't attempt to minimize alignment necessary, because we don't
1248      know the final value of preferred_stack_boundary yet while executing
1249      this code.  */
1250   if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
1251     crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
1252 
1253   /* We will need to ensure that the address we return is aligned to
1254      REQUIRED_ALIGN.  If STACK_DYNAMIC_OFFSET is defined, we don't
1255      always know its final value at this point in the compilation (it
1256      might depend on the size of the outgoing parameter lists, for
1257      example), so we must align the value to be returned in that case.
1258      (Note that STACK_DYNAMIC_OFFSET will have a default nonzero value if
1259      STACK_POINTER_OFFSET or ACCUMULATE_OUTGOING_ARGS are defined).
1260      We must also do an alignment operation on the returned value if
1261      the stack pointer alignment is less strict than REQUIRED_ALIGN.
1262 
1263      If we have to align, we must leave space in SIZE for the hole
1264      that might result from the alignment operation.  */
1265 
1266   must_align = (crtl->preferred_stack_boundary < required_align);
1267   if (must_align)
1268     {
1269       if (required_align > PREFERRED_STACK_BOUNDARY)
1270 	extra_align = PREFERRED_STACK_BOUNDARY;
1271       else if (required_align > STACK_BOUNDARY)
1272 	extra_align = STACK_BOUNDARY;
1273       else
1274 	extra_align = BITS_PER_UNIT;
1275     }
1276 
1277   /* ??? STACK_POINTER_OFFSET is always defined now.  */
1278 #if defined (STACK_DYNAMIC_OFFSET) || defined (STACK_POINTER_OFFSET)
1279   must_align = true;
1280   extra_align = BITS_PER_UNIT;
1281 #endif
1282 
1283   if (must_align)
1284     {
1285       unsigned extra = (required_align - extra_align) / BITS_PER_UNIT;
1286 
1287       size = plus_constant (Pmode, size, extra);
1288       size = force_operand (size, NULL_RTX);
1289 
1290       if (flag_stack_usage_info)
1291 	stack_usage_size += extra;
1292 
1293       if (extra && size_align > extra_align)
1294 	size_align = extra_align;
1295     }
1296 
1297   /* Round the size to a multiple of the required stack alignment.
1298      Since the stack if presumed to be rounded before this allocation,
1299      this will maintain the required alignment.
1300 
1301      If the stack grows downward, we could save an insn by subtracting
1302      SIZE from the stack pointer and then aligning the stack pointer.
1303      The problem with this is that the stack pointer may be unaligned
1304      between the execution of the subtraction and alignment insns and
1305      some machines do not allow this.  Even on those that do, some
1306      signal handlers malfunction if a signal should occur between those
1307      insns.  Since this is an extremely rare event, we have no reliable
1308      way of knowing which systems have this problem.  So we avoid even
1309      momentarily mis-aligning the stack.  */
1310   if (size_align % MAX_SUPPORTED_STACK_ALIGNMENT != 0)
1311     {
1312       size = round_push (size);
1313 
1314       if (flag_stack_usage_info)
1315 	{
1316 	  int align = crtl->preferred_stack_boundary / BITS_PER_UNIT;
1317 	  stack_usage_size = (stack_usage_size + align - 1) / align * align;
1318 	}
1319     }
1320 
1321   target = gen_reg_rtx (Pmode);
1322 
1323   /* The size is supposed to be fully adjusted at this point so record it
1324      if stack usage info is requested.  */
1325   if (flag_stack_usage_info)
1326     {
1327       current_function_dynamic_stack_size += stack_usage_size;
1328 
1329       /* ??? This is gross but the only safe stance in the absence
1330 	 of stack usage oriented flow analysis.  */
1331       if (!cannot_accumulate)
1332 	current_function_has_unbounded_dynamic_stack_size = 1;
1333     }
1334 
1335   final_label = NULL_RTX;
1336   final_target = NULL_RTX;
1337 
1338   /* If we are splitting the stack, we need to ask the backend whether
1339      there is enough room on the current stack.  If there isn't, or if
1340      the backend doesn't know how to tell is, then we need to call a
1341      function to allocate memory in some other way.  This memory will
1342      be released when we release the current stack segment.  The
1343      effect is that stack allocation becomes less efficient, but at
1344      least it doesn't cause a stack overflow.  */
1345   if (flag_split_stack)
1346     {
1347       rtx available_label, ask, space, func;
1348 
1349       available_label = NULL_RTX;
1350 
1351 #ifdef HAVE_split_stack_space_check
1352       if (HAVE_split_stack_space_check)
1353 	{
1354 	  available_label = gen_label_rtx ();
1355 
1356 	  /* This instruction will branch to AVAILABLE_LABEL if there
1357 	     are SIZE bytes available on the stack.  */
1358 	  emit_insn (gen_split_stack_space_check (size, available_label));
1359 	}
1360 #endif
1361 
1362       /* The __morestack_allocate_stack_space function will allocate
1363 	 memory using malloc.  If the alignment of the memory returned
1364 	 by malloc does not meet REQUIRED_ALIGN, we increase SIZE to
1365 	 make sure we allocate enough space.  */
1366       if (MALLOC_ABI_ALIGNMENT >= required_align)
1367 	ask = size;
1368       else
1369 	{
1370 	  ask = expand_binop (Pmode, add_optab, size,
1371 			      gen_int_mode (required_align / BITS_PER_UNIT - 1,
1372 					    Pmode),
1373 			      NULL_RTX, 1, OPTAB_LIB_WIDEN);
1374 	  must_align = true;
1375 	}
1376 
1377       func = init_one_libfunc ("__morestack_allocate_stack_space");
1378 
1379       space = emit_library_call_value (func, target, LCT_NORMAL, Pmode,
1380 				       1, ask, Pmode);
1381 
1382       if (available_label == NULL_RTX)
1383 	return space;
1384 
1385       final_target = gen_reg_rtx (Pmode);
1386 
1387       emit_move_insn (final_target, space);
1388 
1389       final_label = gen_label_rtx ();
1390       emit_jump (final_label);
1391 
1392       emit_label (available_label);
1393     }
1394 
1395   do_pending_stack_adjust ();
1396 
1397  /* We ought to be called always on the toplevel and stack ought to be aligned
1398     properly.  */
1399   gcc_assert (!(stack_pointer_delta
1400 		% (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)));
1401 
1402   /* If needed, check that we have the required amount of stack.  Take into
1403      account what has already been checked.  */
1404   if (STACK_CHECK_MOVING_SP)
1405     ;
1406   else if (flag_stack_check == GENERIC_STACK_CHECK)
1407     probe_stack_range (STACK_OLD_CHECK_PROTECT + STACK_CHECK_MAX_FRAME_SIZE,
1408 		       size);
1409   else if (flag_stack_check == STATIC_BUILTIN_STACK_CHECK)
1410     probe_stack_range (STACK_CHECK_PROTECT, size);
1411 
1412   /* Don't let anti_adjust_stack emit notes.  */
1413   suppress_reg_args_size = true;
1414 
1415   /* Perform the required allocation from the stack.  Some systems do
1416      this differently than simply incrementing/decrementing from the
1417      stack pointer, such as acquiring the space by calling malloc().  */
1418 #ifdef HAVE_allocate_stack
1419   if (HAVE_allocate_stack)
1420     {
1421       struct expand_operand ops[2];
1422       /* We don't have to check against the predicate for operand 0 since
1423 	 TARGET is known to be a pseudo of the proper mode, which must
1424 	 be valid for the operand.  */
1425       create_fixed_operand (&ops[0], target);
1426       create_convert_operand_to (&ops[1], size, STACK_SIZE_MODE, true);
1427       expand_insn (CODE_FOR_allocate_stack, 2, ops);
1428     }
1429   else
1430 #endif
1431     {
1432       int saved_stack_pointer_delta;
1433 
1434 #ifndef STACK_GROWS_DOWNWARD
1435       emit_move_insn (target, virtual_stack_dynamic_rtx);
1436 #endif
1437 
1438       /* Check stack bounds if necessary.  */
1439       if (crtl->limit_stack)
1440 	{
1441 	  rtx available;
1442 	  rtx space_available = gen_label_rtx ();
1443 #ifdef STACK_GROWS_DOWNWARD
1444 	  available = expand_binop (Pmode, sub_optab,
1445 				    stack_pointer_rtx, stack_limit_rtx,
1446 				    NULL_RTX, 1, OPTAB_WIDEN);
1447 #else
1448 	  available = expand_binop (Pmode, sub_optab,
1449 				    stack_limit_rtx, stack_pointer_rtx,
1450 				    NULL_RTX, 1, OPTAB_WIDEN);
1451 #endif
1452 	  emit_cmp_and_jump_insns (available, size, GEU, NULL_RTX, Pmode, 1,
1453 				   space_available);
1454 #ifdef HAVE_trap
1455 	  if (HAVE_trap)
1456 	    emit_insn (gen_trap ());
1457 	  else
1458 #endif
1459 	    error ("stack limits not supported on this target");
1460 	  emit_barrier ();
1461 	  emit_label (space_available);
1462 	}
1463 
1464       saved_stack_pointer_delta = stack_pointer_delta;
1465 
1466       if (flag_stack_check && STACK_CHECK_MOVING_SP)
1467 	anti_adjust_stack_and_probe (size, false);
1468       else
1469 	anti_adjust_stack (size);
1470 
1471       /* Even if size is constant, don't modify stack_pointer_delta.
1472 	 The constant size alloca should preserve
1473 	 crtl->preferred_stack_boundary alignment.  */
1474       stack_pointer_delta = saved_stack_pointer_delta;
1475 
1476 #ifdef STACK_GROWS_DOWNWARD
1477       emit_move_insn (target, virtual_stack_dynamic_rtx);
1478 #endif
1479     }
1480 
1481   suppress_reg_args_size = false;
1482 
1483   /* Finish up the split stack handling.  */
1484   if (final_label != NULL_RTX)
1485     {
1486       gcc_assert (flag_split_stack);
1487       emit_move_insn (final_target, target);
1488       emit_label (final_label);
1489       target = final_target;
1490     }
1491 
1492   if (must_align)
1493     {
1494       /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
1495 	 but we know it can't.  So add ourselves and then do
1496 	 TRUNC_DIV_EXPR.  */
1497       target = expand_binop (Pmode, add_optab, target,
1498 			     gen_int_mode (required_align / BITS_PER_UNIT - 1,
1499 					   Pmode),
1500 			     NULL_RTX, 1, OPTAB_LIB_WIDEN);
1501       target = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, target,
1502 			      gen_int_mode (required_align / BITS_PER_UNIT,
1503 					    Pmode),
1504 			      NULL_RTX, 1);
1505       target = expand_mult (Pmode, target,
1506 			    gen_int_mode (required_align / BITS_PER_UNIT,
1507 					  Pmode),
1508 			    NULL_RTX, 1);
1509     }
1510 
1511   /* Now that we've committed to a return value, mark its alignment.  */
1512   mark_reg_pointer (target, required_align);
1513 
1514   /* Record the new stack level for nonlocal gotos.  */
1515   if (cfun->nonlocal_goto_save_area != 0)
1516     update_nonlocal_goto_save_area ();
1517 
1518   return target;
1519 }
1520 
1521 /* A front end may want to override GCC's stack checking by providing a
1522    run-time routine to call to check the stack, so provide a mechanism for
1523    calling that routine.  */
1524 
1525 static GTY(()) rtx stack_check_libfunc;
1526 
1527 void
set_stack_check_libfunc(const char * libfunc_name)1528 set_stack_check_libfunc (const char *libfunc_name)
1529 {
1530   gcc_assert (stack_check_libfunc == NULL_RTX);
1531   stack_check_libfunc = gen_rtx_SYMBOL_REF (Pmode, libfunc_name);
1532 }
1533 
1534 /* Emit one stack probe at ADDRESS, an address within the stack.  */
1535 
1536 void
emit_stack_probe(rtx address)1537 emit_stack_probe (rtx address)
1538 {
1539 #ifdef HAVE_probe_stack_address
1540   if (HAVE_probe_stack_address)
1541     emit_insn (gen_probe_stack_address (address));
1542   else
1543 #endif
1544     {
1545       rtx memref = gen_rtx_MEM (word_mode, address);
1546 
1547       MEM_VOLATILE_P (memref) = 1;
1548 
1549       /* See if we have an insn to probe the stack.  */
1550 #ifdef HAVE_probe_stack
1551       if (HAVE_probe_stack)
1552         emit_insn (gen_probe_stack (memref));
1553       else
1554 #endif
1555         emit_move_insn (memref, const0_rtx);
1556     }
1557 }
1558 
1559 /* Probe a range of stack addresses from FIRST to FIRST+SIZE, inclusive.
1560    FIRST is a constant and size is a Pmode RTX.  These are offsets from
1561    the current stack pointer.  STACK_GROWS_DOWNWARD says whether to add
1562    or subtract them from the stack pointer.  */
1563 
1564 #define PROBE_INTERVAL (1 << STACK_CHECK_PROBE_INTERVAL_EXP)
1565 
1566 #ifdef STACK_GROWS_DOWNWARD
1567 #define STACK_GROW_OP MINUS
1568 #define STACK_GROW_OPTAB sub_optab
1569 #define STACK_GROW_OFF(off) -(off)
1570 #else
1571 #define STACK_GROW_OP PLUS
1572 #define STACK_GROW_OPTAB add_optab
1573 #define STACK_GROW_OFF(off) (off)
1574 #endif
1575 
1576 void
probe_stack_range(HOST_WIDE_INT first,rtx size)1577 probe_stack_range (HOST_WIDE_INT first, rtx size)
1578 {
1579   /* First ensure SIZE is Pmode.  */
1580   if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1581     size = convert_to_mode (Pmode, size, 1);
1582 
1583   /* Next see if we have a function to check the stack.  */
1584   if (stack_check_libfunc)
1585     {
1586       rtx addr = memory_address (Pmode,
1587 				 gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1588 					         stack_pointer_rtx,
1589 					         plus_constant (Pmode,
1590 								size, first)));
1591       emit_library_call (stack_check_libfunc, LCT_NORMAL, VOIDmode, 1, addr,
1592 			 Pmode);
1593     }
1594 
1595   /* Next see if we have an insn to check the stack.  */
1596 #ifdef HAVE_check_stack
1597   else if (HAVE_check_stack)
1598     {
1599       struct expand_operand ops[1];
1600       rtx addr = memory_address (Pmode,
1601 				 gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1602 					         stack_pointer_rtx,
1603 					         plus_constant (Pmode,
1604 								size, first)));
1605       bool success;
1606       create_input_operand (&ops[0], addr, Pmode);
1607       success = maybe_expand_insn (CODE_FOR_check_stack, 1, ops);
1608       gcc_assert (success);
1609     }
1610 #endif
1611 
1612   /* Otherwise we have to generate explicit probes.  If we have a constant
1613      small number of them to generate, that's the easy case.  */
1614   else if (CONST_INT_P (size) && INTVAL (size) < 7 * PROBE_INTERVAL)
1615     {
1616       HOST_WIDE_INT isize = INTVAL (size), i;
1617       rtx addr;
1618 
1619       /* Probe at FIRST + N * PROBE_INTERVAL for values of N from 1 until
1620 	 it exceeds SIZE.  If only one probe is needed, this will not
1621 	 generate any code.  Then probe at FIRST + SIZE.  */
1622       for (i = PROBE_INTERVAL; i < isize; i += PROBE_INTERVAL)
1623 	{
1624 	  addr = memory_address (Pmode,
1625 				 plus_constant (Pmode, stack_pointer_rtx,
1626 				 		STACK_GROW_OFF (first + i)));
1627 	  emit_stack_probe (addr);
1628 	}
1629 
1630       addr = memory_address (Pmode,
1631 			     plus_constant (Pmode, stack_pointer_rtx,
1632 					    STACK_GROW_OFF (first + isize)));
1633       emit_stack_probe (addr);
1634     }
1635 
1636   /* In the variable case, do the same as above, but in a loop.  Note that we
1637      must be extra careful with variables wrapping around because we might be
1638      at the very top (or the very bottom) of the address space and we have to
1639      be able to handle this case properly; in particular, we use an equality
1640      test for the loop condition.  */
1641   else
1642     {
1643       rtx rounded_size, rounded_size_op, test_addr, last_addr, temp;
1644       rtx loop_lab = gen_label_rtx ();
1645       rtx end_lab = gen_label_rtx ();
1646 
1647 
1648       /* Step 1: round SIZE to the previous multiple of the interval.  */
1649 
1650       /* ROUNDED_SIZE = SIZE & -PROBE_INTERVAL  */
1651       rounded_size
1652 	= simplify_gen_binary (AND, Pmode, size,
1653 			       gen_int_mode (-PROBE_INTERVAL, Pmode));
1654       rounded_size_op = force_operand (rounded_size, NULL_RTX);
1655 
1656 
1657       /* Step 2: compute initial and final value of the loop counter.  */
1658 
1659       /* TEST_ADDR = SP + FIRST.  */
1660       test_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1661 					 	 stack_pointer_rtx,
1662 						 gen_int_mode (first, Pmode)),
1663 				 NULL_RTX);
1664 
1665       /* LAST_ADDR = SP + FIRST + ROUNDED_SIZE.  */
1666       last_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1667 						 test_addr,
1668 						 rounded_size_op), NULL_RTX);
1669 
1670 
1671       /* Step 3: the loop
1672 
1673 	 while (TEST_ADDR != LAST_ADDR)
1674 	   {
1675 	     TEST_ADDR = TEST_ADDR + PROBE_INTERVAL
1676 	     probe at TEST_ADDR
1677 	   }
1678 
1679 	 probes at FIRST + N * PROBE_INTERVAL for values of N from 1
1680 	 until it is equal to ROUNDED_SIZE.  */
1681 
1682       emit_label (loop_lab);
1683 
1684       /* Jump to END_LAB if TEST_ADDR == LAST_ADDR.  */
1685       emit_cmp_and_jump_insns (test_addr, last_addr, EQ, NULL_RTX, Pmode, 1,
1686 			       end_lab);
1687 
1688       /* TEST_ADDR = TEST_ADDR + PROBE_INTERVAL.  */
1689       temp = expand_binop (Pmode, STACK_GROW_OPTAB, test_addr,
1690 			   gen_int_mode (PROBE_INTERVAL, Pmode), test_addr,
1691 			   1, OPTAB_WIDEN);
1692 
1693       gcc_assert (temp == test_addr);
1694 
1695       /* Probe at TEST_ADDR.  */
1696       emit_stack_probe (test_addr);
1697 
1698       emit_jump (loop_lab);
1699 
1700       emit_label (end_lab);
1701 
1702 
1703       /* Step 4: probe at FIRST + SIZE if we cannot assert at compile-time
1704 	 that SIZE is equal to ROUNDED_SIZE.  */
1705 
1706       /* TEMP = SIZE - ROUNDED_SIZE.  */
1707       temp = simplify_gen_binary (MINUS, Pmode, size, rounded_size);
1708       if (temp != const0_rtx)
1709 	{
1710 	  rtx addr;
1711 
1712 	  if (CONST_INT_P (temp))
1713 	    {
1714 	      /* Use [base + disp} addressing mode if supported.  */
1715 	      HOST_WIDE_INT offset = INTVAL (temp);
1716 	      addr = memory_address (Pmode,
1717 				     plus_constant (Pmode, last_addr,
1718 						    STACK_GROW_OFF (offset)));
1719 	    }
1720 	  else
1721 	    {
1722 	      /* Manual CSE if the difference is not known at compile-time.  */
1723 	      temp = gen_rtx_MINUS (Pmode, size, rounded_size_op);
1724 	      addr = memory_address (Pmode,
1725 				     gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1726 						     last_addr, temp));
1727 	    }
1728 
1729 	  emit_stack_probe (addr);
1730 	}
1731     }
1732 
1733   /* Make sure nothing is scheduled before we are done.  */
1734   emit_insn (gen_blockage ());
1735 }
1736 
1737 /* Adjust the stack pointer by minus SIZE (an rtx for a number of bytes)
1738    while probing it.  This pushes when SIZE is positive.  SIZE need not
1739    be constant.  If ADJUST_BACK is true, adjust back the stack pointer
1740    by plus SIZE at the end.  */
1741 
1742 void
anti_adjust_stack_and_probe(rtx size,bool adjust_back)1743 anti_adjust_stack_and_probe (rtx size, bool adjust_back)
1744 {
1745   /* We skip the probe for the first interval + a small dope of 4 words and
1746      probe that many bytes past the specified size to maintain a protection
1747      area at the botton of the stack.  */
1748   const int dope = 4 * UNITS_PER_WORD;
1749 
1750   /* First ensure SIZE is Pmode.  */
1751   if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1752     size = convert_to_mode (Pmode, size, 1);
1753 
1754   /* If we have a constant small number of probes to generate, that's the
1755      easy case.  */
1756   if (CONST_INT_P (size) && INTVAL (size) < 7 * PROBE_INTERVAL)
1757     {
1758       HOST_WIDE_INT isize = INTVAL (size), i;
1759       bool first_probe = true;
1760 
1761       /* Adjust SP and probe at PROBE_INTERVAL + N * PROBE_INTERVAL for
1762 	 values of N from 1 until it exceeds SIZE.  If only one probe is
1763 	 needed, this will not generate any code.  Then adjust and probe
1764 	 to PROBE_INTERVAL + SIZE.  */
1765       for (i = PROBE_INTERVAL; i < isize; i += PROBE_INTERVAL)
1766 	{
1767 	  if (first_probe)
1768 	    {
1769 	      anti_adjust_stack (GEN_INT (2 * PROBE_INTERVAL + dope));
1770 	      first_probe = false;
1771 	    }
1772 	  else
1773 	    anti_adjust_stack (GEN_INT (PROBE_INTERVAL));
1774 	  emit_stack_probe (stack_pointer_rtx);
1775 	}
1776 
1777       if (first_probe)
1778 	anti_adjust_stack (plus_constant (Pmode, size, PROBE_INTERVAL + dope));
1779       else
1780 	anti_adjust_stack (plus_constant (Pmode, size, PROBE_INTERVAL - i));
1781       emit_stack_probe (stack_pointer_rtx);
1782     }
1783 
1784   /* In the variable case, do the same as above, but in a loop.  Note that we
1785      must be extra careful with variables wrapping around because we might be
1786      at the very top (or the very bottom) of the address space and we have to
1787      be able to handle this case properly; in particular, we use an equality
1788      test for the loop condition.  */
1789   else
1790     {
1791       rtx rounded_size, rounded_size_op, last_addr, temp;
1792       rtx loop_lab = gen_label_rtx ();
1793       rtx end_lab = gen_label_rtx ();
1794 
1795 
1796       /* Step 1: round SIZE to the previous multiple of the interval.  */
1797 
1798       /* ROUNDED_SIZE = SIZE & -PROBE_INTERVAL  */
1799       rounded_size
1800 	= simplify_gen_binary (AND, Pmode, size,
1801 			       gen_int_mode (-PROBE_INTERVAL, Pmode));
1802       rounded_size_op = force_operand (rounded_size, NULL_RTX);
1803 
1804 
1805       /* Step 2: compute initial and final value of the loop counter.  */
1806 
1807       /* SP = SP_0 + PROBE_INTERVAL.  */
1808       anti_adjust_stack (GEN_INT (PROBE_INTERVAL + dope));
1809 
1810       /* LAST_ADDR = SP_0 + PROBE_INTERVAL + ROUNDED_SIZE.  */
1811       last_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1812 						 stack_pointer_rtx,
1813 						 rounded_size_op), NULL_RTX);
1814 
1815 
1816       /* Step 3: the loop
1817 
1818 	 while (SP != LAST_ADDR)
1819 	   {
1820 	     SP = SP + PROBE_INTERVAL
1821 	     probe at SP
1822 	   }
1823 
1824 	 adjusts SP and probes at PROBE_INTERVAL + N * PROBE_INTERVAL for
1825 	 values of N from 1 until it is equal to ROUNDED_SIZE.  */
1826 
1827       emit_label (loop_lab);
1828 
1829       /* Jump to END_LAB if SP == LAST_ADDR.  */
1830       emit_cmp_and_jump_insns (stack_pointer_rtx, last_addr, EQ, NULL_RTX,
1831 			       Pmode, 1, end_lab);
1832 
1833       /* SP = SP + PROBE_INTERVAL and probe at SP.  */
1834       anti_adjust_stack (GEN_INT (PROBE_INTERVAL));
1835       emit_stack_probe (stack_pointer_rtx);
1836 
1837       emit_jump (loop_lab);
1838 
1839       emit_label (end_lab);
1840 
1841 
1842       /* Step 4: adjust SP and probe at PROBE_INTERVAL + SIZE if we cannot
1843 	 assert at compile-time that SIZE is equal to ROUNDED_SIZE.  */
1844 
1845       /* TEMP = SIZE - ROUNDED_SIZE.  */
1846       temp = simplify_gen_binary (MINUS, Pmode, size, rounded_size);
1847       if (temp != const0_rtx)
1848 	{
1849 	  /* Manual CSE if the difference is not known at compile-time.  */
1850 	  if (GET_CODE (temp) != CONST_INT)
1851 	    temp = gen_rtx_MINUS (Pmode, size, rounded_size_op);
1852 	  anti_adjust_stack (temp);
1853 	  emit_stack_probe (stack_pointer_rtx);
1854 	}
1855     }
1856 
1857   /* Adjust back and account for the additional first interval.  */
1858   if (adjust_back)
1859     adjust_stack (plus_constant (Pmode, size, PROBE_INTERVAL + dope));
1860   else
1861     adjust_stack (GEN_INT (PROBE_INTERVAL + dope));
1862 }
1863 
1864 /* Return an rtx representing the register or memory location
1865    in which a scalar value of data type VALTYPE
1866    was returned by a function call to function FUNC.
1867    FUNC is a FUNCTION_DECL, FNTYPE a FUNCTION_TYPE node if the precise
1868    function is known, otherwise 0.
1869    OUTGOING is 1 if on a machine with register windows this function
1870    should return the register in which the function will put its result
1871    and 0 otherwise.  */
1872 
1873 rtx
hard_function_value(const_tree valtype,const_tree func,const_tree fntype,int outgoing ATTRIBUTE_UNUSED)1874 hard_function_value (const_tree valtype, const_tree func, const_tree fntype,
1875 		     int outgoing ATTRIBUTE_UNUSED)
1876 {
1877   rtx val;
1878 
1879   val = targetm.calls.function_value (valtype, func ? func : fntype, outgoing);
1880 
1881   if (REG_P (val)
1882       && GET_MODE (val) == BLKmode)
1883     {
1884       unsigned HOST_WIDE_INT bytes = int_size_in_bytes (valtype);
1885       enum machine_mode tmpmode;
1886 
1887       /* int_size_in_bytes can return -1.  We don't need a check here
1888 	 since the value of bytes will then be large enough that no
1889 	 mode will match anyway.  */
1890 
1891       for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1892 	   tmpmode != VOIDmode;
1893 	   tmpmode = GET_MODE_WIDER_MODE (tmpmode))
1894 	{
1895 	  /* Have we found a large enough mode?  */
1896 	  if (GET_MODE_SIZE (tmpmode) >= bytes)
1897 	    break;
1898 	}
1899 
1900       /* No suitable mode found.  */
1901       gcc_assert (tmpmode != VOIDmode);
1902 
1903       PUT_MODE (val, tmpmode);
1904     }
1905   return val;
1906 }
1907 
1908 /* Return an rtx representing the register or memory location
1909    in which a scalar value of mode MODE was returned by a library call.  */
1910 
1911 rtx
hard_libcall_value(enum machine_mode mode,rtx fun)1912 hard_libcall_value (enum machine_mode mode, rtx fun)
1913 {
1914   return targetm.calls.libcall_value (mode, fun);
1915 }
1916 
1917 /* Look up the tree code for a given rtx code
1918    to provide the arithmetic operation for REAL_ARITHMETIC.
1919    The function returns an int because the caller may not know
1920    what `enum tree_code' means.  */
1921 
1922 int
rtx_to_tree_code(enum rtx_code code)1923 rtx_to_tree_code (enum rtx_code code)
1924 {
1925   enum tree_code tcode;
1926 
1927   switch (code)
1928     {
1929     case PLUS:
1930       tcode = PLUS_EXPR;
1931       break;
1932     case MINUS:
1933       tcode = MINUS_EXPR;
1934       break;
1935     case MULT:
1936       tcode = MULT_EXPR;
1937       break;
1938     case DIV:
1939       tcode = RDIV_EXPR;
1940       break;
1941     case SMIN:
1942       tcode = MIN_EXPR;
1943       break;
1944     case SMAX:
1945       tcode = MAX_EXPR;
1946       break;
1947     default:
1948       tcode = LAST_AND_UNUSED_TREE_CODE;
1949       break;
1950     }
1951   return ((int) tcode);
1952 }
1953 
1954 #include "gt-explow.h"
1955