xref: /openbsd/gnu/usr.bin/gcc/gcc/explow.c (revision 06dc6460)
1 /* Subroutines for manipulating rtx's in semantically interesting ways.
2    Copyright (C) 1987, 1991, 1994, 1995, 1996, 1997, 1998,
3    1999, 2000, 2001, 2002 Free Software Foundation, Inc.
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11 
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15 for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING.  If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA.  */
21 
22 
23 #include "config.h"
24 #include "system.h"
25 #include "toplev.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "tm_p.h"
29 #include "flags.h"
30 #include "function.h"
31 #include "expr.h"
32 #include "optabs.h"
33 #include "hard-reg-set.h"
34 #include "insn-config.h"
35 #include "ggc.h"
36 #include "recog.h"
37 #include "langhooks.h"
38 
39 static rtx break_out_memory_refs	PARAMS ((rtx));
40 static void emit_stack_probe		PARAMS ((rtx));
41 
42 
43 /* Truncate and perhaps sign-extend C as appropriate for MODE.  */
44 
45 HOST_WIDE_INT
trunc_int_for_mode(c,mode)46 trunc_int_for_mode (c, mode)
47      HOST_WIDE_INT c;
48      enum machine_mode mode;
49 {
50   int width = GET_MODE_BITSIZE (mode);
51 
52   /* You want to truncate to a _what_?  */
53   if (! SCALAR_INT_MODE_P (mode))
54     abort ();
55 
56   /* Canonicalize BImode to 0 and STORE_FLAG_VALUE.  */
57   if (mode == BImode)
58     return c & 1 ? STORE_FLAG_VALUE : 0;
59 
60   /* Sign-extend for the requested mode.  */
61 
62   if (width < HOST_BITS_PER_WIDE_INT)
63     {
64       HOST_WIDE_INT sign = 1;
65       sign <<= width - 1;
66       c &= (sign << 1) - 1;
67       c ^= sign;
68       c -= sign;
69     }
70 
71   return c;
72 }
73 
74 /* Return an rtx for the sum of X and the integer C.
75 
76    This function should be used via the `plus_constant' macro.  */
77 
78 rtx
plus_constant_wide(x,c)79 plus_constant_wide (x, c)
80      rtx x;
81      HOST_WIDE_INT c;
82 {
83   RTX_CODE code;
84   rtx y;
85   enum machine_mode mode;
86   rtx tem;
87   int all_constant = 0;
88 
89   if (c == 0
90       && !(flag_propolice_protection && x == virtual_stack_vars_rtx))
91     return x;
92 
93  restart:
94 
95   code = GET_CODE (x);
96   mode = GET_MODE (x);
97   y = x;
98 
99   switch (code)
100     {
101     case CONST_INT:
102       return GEN_INT (INTVAL (x) + c);
103 
104     case CONST_DOUBLE:
105       {
106 	unsigned HOST_WIDE_INT l1 = CONST_DOUBLE_LOW (x);
107 	HOST_WIDE_INT h1 = CONST_DOUBLE_HIGH (x);
108 	unsigned HOST_WIDE_INT l2 = c;
109 	HOST_WIDE_INT h2 = c < 0 ? ~0 : 0;
110 	unsigned HOST_WIDE_INT lv;
111 	HOST_WIDE_INT hv;
112 
113 	add_double (l1, h1, l2, h2, &lv, &hv);
114 
115 	return immed_double_const (lv, hv, VOIDmode);
116       }
117 
118     case MEM:
119       /* If this is a reference to the constant pool, try replacing it with
120 	 a reference to a new constant.  If the resulting address isn't
121 	 valid, don't return it because we have no way to validize it.  */
122       if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
123 	  && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
124 	{
125 	  tem
126 	    = force_const_mem (GET_MODE (x),
127 			       plus_constant (get_pool_constant (XEXP (x, 0)),
128 					      c));
129 	  if (memory_address_p (GET_MODE (tem), XEXP (tem, 0)))
130 	    return tem;
131 	}
132       break;
133 
134     case CONST:
135       /* If adding to something entirely constant, set a flag
136 	 so that we can add a CONST around the result.  */
137       x = XEXP (x, 0);
138       all_constant = 1;
139       goto restart;
140 
141     case SYMBOL_REF:
142     case LABEL_REF:
143       all_constant = 1;
144       break;
145 
146     case PLUS:
147       /* The interesting case is adding the integer to a sum.
148 	 Look for constant term in the sum and combine
149 	 with C.  For an integer constant term, we make a combined
150 	 integer.  For a constant term that is not an explicit integer,
151 	 we cannot really combine, but group them together anyway.
152 
153 	 Restart or use a recursive call in case the remaining operand is
154 	 something that we handle specially, such as a SYMBOL_REF.
155 
156 	 We may not immediately return from the recursive call here, lest
157 	 all_constant gets lost.  */
158 
159       if (GET_CODE (XEXP (x, 1)) == CONST_INT)
160 	{
161 	  c += INTVAL (XEXP (x, 1));
162 
163 	  if (GET_MODE (x) != VOIDmode)
164 	    c = trunc_int_for_mode (c, GET_MODE (x));
165 
166 	  x = XEXP (x, 0);
167 	  goto restart;
168 	}
169       else if (CONSTANT_P (XEXP (x, 1)))
170 	{
171 	  x = gen_rtx_PLUS (mode, XEXP (x, 0), plus_constant (XEXP (x, 1), c));
172 	  c = 0;
173 	}
174       else if (find_constant_term_loc (&y))
175 	{
176 	  /* We need to be careful since X may be shared and we can't
177 	     modify it in place.  */
178 	  rtx copy = copy_rtx (x);
179 	  rtx *const_loc = find_constant_term_loc (&copy);
180 
181 	  *const_loc = plus_constant (*const_loc, c);
182 	  x = copy;
183 	  c = 0;
184 	}
185       break;
186 
187     default:
188       break;
189     }
190 
191   if (c != 0
192       || (flag_propolice_protection && x == virtual_stack_vars_rtx))
193     x = gen_rtx_PLUS (mode, x, GEN_INT (c));
194 
195   if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
196     return x;
197   else if (all_constant)
198     return gen_rtx_CONST (mode, x);
199   else
200     return x;
201 }
202 
203 /* If X is a sum, return a new sum like X but lacking any constant terms.
204    Add all the removed constant terms into *CONSTPTR.
205    X itself is not altered.  The result != X if and only if
206    it is not isomorphic to X.  */
207 
208 rtx
eliminate_constant_term(x,constptr)209 eliminate_constant_term (x, constptr)
210      rtx x;
211      rtx *constptr;
212 {
213   rtx x0, x1;
214   rtx tem;
215 
216   if (GET_CODE (x) != PLUS)
217     return x;
218 
219   /* First handle constants appearing at this level explicitly.  */
220   if (GET_CODE (XEXP (x, 1)) == CONST_INT
221       && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x), *constptr,
222 						XEXP (x, 1)))
223       && GET_CODE (tem) == CONST_INT)
224     {
225       *constptr = tem;
226       return eliminate_constant_term (XEXP (x, 0), constptr);
227     }
228 
229   tem = const0_rtx;
230   x0 = eliminate_constant_term (XEXP (x, 0), &tem);
231   x1 = eliminate_constant_term (XEXP (x, 1), &tem);
232   if ((x1 != XEXP (x, 1) || x0 != XEXP (x, 0))
233       && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x),
234 						*constptr, tem))
235       && GET_CODE (tem) == CONST_INT)
236     {
237       *constptr = tem;
238       return gen_rtx_PLUS (GET_MODE (x), x0, x1);
239     }
240 
241   return x;
242 }
243 
244 /* Returns the insn that next references REG after INSN, or 0
245    if REG is clobbered before next referenced or we cannot find
246    an insn that references REG in a straight-line piece of code.  */
247 
248 rtx
find_next_ref(reg,insn)249 find_next_ref (reg, insn)
250      rtx reg;
251      rtx insn;
252 {
253   rtx next;
254 
255   for (insn = NEXT_INSN (insn); insn; insn = next)
256     {
257       next = NEXT_INSN (insn);
258       if (GET_CODE (insn) == NOTE)
259 	continue;
260       if (GET_CODE (insn) == CODE_LABEL
261 	  || GET_CODE (insn) == BARRIER)
262 	return 0;
263       if (GET_CODE (insn) == INSN
264 	  || GET_CODE (insn) == JUMP_INSN
265 	  || GET_CODE (insn) == CALL_INSN)
266 	{
267 	  if (reg_set_p (reg, insn))
268 	    return 0;
269 	  if (reg_mentioned_p (reg, PATTERN (insn)))
270 	    return insn;
271 	  if (GET_CODE (insn) == JUMP_INSN)
272 	    {
273 	      if (any_uncondjump_p (insn))
274 		next = JUMP_LABEL (insn);
275 	      else
276 		return 0;
277 	    }
278 	  if (GET_CODE (insn) == CALL_INSN
279 	      && REGNO (reg) < FIRST_PSEUDO_REGISTER
280 	      && call_used_regs[REGNO (reg)])
281 	    return 0;
282 	}
283       else
284 	abort ();
285     }
286   return 0;
287 }
288 
289 /* Return an rtx for the size in bytes of the value of EXP.  */
290 
291 rtx
expr_size(exp)292 expr_size (exp)
293      tree exp;
294 {
295   tree size = (*lang_hooks.expr_size) (exp);
296 
297   if (TREE_CODE (size) != INTEGER_CST
298       && contains_placeholder_p (size))
299     size = build (WITH_RECORD_EXPR, sizetype, size, exp);
300 
301   return expand_expr (size, NULL_RTX, TYPE_MODE (sizetype), 0);
302 }
303 
304 /* Return a wide integer for the size in bytes of the value of EXP, or -1
305    if the size can vary or is larger than an integer.  */
306 
307 HOST_WIDE_INT
int_expr_size(exp)308 int_expr_size (exp)
309      tree exp;
310 {
311   tree t = (*lang_hooks.expr_size) (exp);
312 
313   if (t == 0
314       || TREE_CODE (t) != INTEGER_CST
315       || TREE_OVERFLOW (t)
316       || TREE_INT_CST_HIGH (t) != 0
317       /* If the result would appear negative, it's too big to represent.  */
318       || (HOST_WIDE_INT) TREE_INT_CST_LOW (t) < 0)
319     return -1;
320 
321   return TREE_INT_CST_LOW (t);
322 }
323 
324 /* Return a copy of X in which all memory references
325    and all constants that involve symbol refs
326    have been replaced with new temporary registers.
327    Also emit code to load the memory locations and constants
328    into those registers.
329 
330    If X contains no such constants or memory references,
331    X itself (not a copy) is returned.
332 
333    If a constant is found in the address that is not a legitimate constant
334    in an insn, it is left alone in the hope that it might be valid in the
335    address.
336 
337    X may contain no arithmetic except addition, subtraction and multiplication.
338    Values returned by expand_expr with 1 for sum_ok fit this constraint.  */
339 
340 static rtx
break_out_memory_refs(x)341 break_out_memory_refs (x)
342      rtx x;
343 {
344   if (GET_CODE (x) == MEM
345       || (CONSTANT_P (x) && CONSTANT_ADDRESS_P (x)
346 	  && GET_MODE (x) != VOIDmode))
347     x = force_reg (GET_MODE (x), x);
348   else if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
349 	   || GET_CODE (x) == MULT)
350     {
351       rtx op0 = break_out_memory_refs (XEXP (x, 0));
352       rtx op1 = break_out_memory_refs (XEXP (x, 1));
353 
354       if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
355 	x = gen_rtx_fmt_ee (GET_CODE (x), Pmode, op0, op1);
356     }
357 
358   return x;
359 }
360 
361 /* Given X, a memory address in ptr_mode, convert it to an address
362    in Pmode, or vice versa (TO_MODE says which way).  We take advantage of
363    the fact that pointers are not allowed to overflow by commuting arithmetic
364    operations over conversions so that address arithmetic insns can be
365    used.  */
366 
367 rtx
convert_memory_address(to_mode,x)368 convert_memory_address (to_mode, x)
369      enum machine_mode to_mode ATTRIBUTE_UNUSED;
370      rtx x;
371 {
372 #ifndef POINTERS_EXTEND_UNSIGNED
373   return x;
374 #else /* defined(POINTERS_EXTEND_UNSIGNED) */
375   enum machine_mode from_mode;
376   rtx temp;
377   enum rtx_code code;
378 
379   /* If X already has the right mode, just return it.  */
380   if (GET_MODE (x) == to_mode)
381     return x;
382 
383   from_mode = to_mode == ptr_mode ? Pmode : ptr_mode;
384 
385   /* Here we handle some special cases.  If none of them apply, fall through
386      to the default case.  */
387   switch (GET_CODE (x))
388     {
389     case CONST_INT:
390     case CONST_DOUBLE:
391       if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode))
392 	code = TRUNCATE;
393       else if (POINTERS_EXTEND_UNSIGNED < 0)
394 	break;
395       else if (POINTERS_EXTEND_UNSIGNED > 0)
396 	code = ZERO_EXTEND;
397       else
398 	code = SIGN_EXTEND;
399       temp = simplify_unary_operation (code, to_mode, x, from_mode);
400       if (temp)
401 	return temp;
402       break;
403 
404     case SUBREG:
405       if ((SUBREG_PROMOTED_VAR_P (x) || REG_POINTER (SUBREG_REG (x)))
406 	  && GET_MODE (SUBREG_REG (x)) == to_mode)
407 	return SUBREG_REG (x);
408       break;
409 
410     case LABEL_REF:
411       temp = gen_rtx_LABEL_REF (to_mode, XEXP (x, 0));
412       LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
413       return temp;
414       break;
415 
416     case SYMBOL_REF:
417       temp = shallow_copy_rtx (x);
418       PUT_MODE (temp, to_mode);
419       return temp;
420       break;
421 
422     case CONST:
423       return gen_rtx_CONST (to_mode,
424 			    convert_memory_address (to_mode, XEXP (x, 0)));
425       break;
426 
427     case PLUS:
428     case MULT:
429       /* For addition we can safely permute the conversion and addition
430 	 operation if one operand is a constant and converting the constant
431 	 does not change it.  We can always safely permute them if we are
432 	 making the address narrower.  */
433       if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode)
434 	  || (GET_CODE (x) == PLUS
435 	      && GET_CODE (XEXP (x, 1)) == CONST_INT
436 	      && XEXP (x, 1) == convert_memory_address (to_mode, XEXP (x, 1))))
437 	return gen_rtx_fmt_ee (GET_CODE (x), to_mode,
438 			       convert_memory_address (to_mode, XEXP (x, 0)),
439 			       XEXP (x, 1));
440       break;
441 
442     default:
443       break;
444     }
445 
446   return convert_modes (to_mode, from_mode,
447 			x, POINTERS_EXTEND_UNSIGNED);
448 #endif /* defined(POINTERS_EXTEND_UNSIGNED) */
449 }
450 
451 /* Given a memory address or facsimile X, construct a new address,
452    currently equivalent, that is stable: future stores won't change it.
453 
454    X must be composed of constants, register and memory references
455    combined with addition, subtraction and multiplication:
456    in other words, just what you can get from expand_expr if sum_ok is 1.
457 
458    Works by making copies of all regs and memory locations used
459    by X and combining them the same way X does.
460    You could also stabilize the reference to this address
461    by copying the address to a register with copy_to_reg;
462    but then you wouldn't get indexed addressing in the reference.  */
463 
464 rtx
copy_all_regs(x)465 copy_all_regs (x)
466      rtx x;
467 {
468   if (GET_CODE (x) == REG)
469     {
470       if (REGNO (x) != FRAME_POINTER_REGNUM
471 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
472 	  && REGNO (x) != HARD_FRAME_POINTER_REGNUM
473 #endif
474 	  )
475 	x = copy_to_reg (x);
476     }
477   else if (GET_CODE (x) == MEM)
478     x = copy_to_reg (x);
479   else if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
480 	   || GET_CODE (x) == MULT)
481     {
482       rtx op0 = copy_all_regs (XEXP (x, 0));
483       rtx op1 = copy_all_regs (XEXP (x, 1));
484       if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
485 	x = gen_rtx_fmt_ee (GET_CODE (x), Pmode, op0, op1);
486     }
487   return x;
488 }
489 
490 /* Return something equivalent to X but valid as a memory address
491    for something of mode MODE.  When X is not itself valid, this
492    works by copying X or subexpressions of it into registers.  */
493 
494 rtx
memory_address(mode,x)495 memory_address (mode, x)
496      enum machine_mode mode;
497      rtx x;
498 {
499   rtx oldx = x;
500 
501   if (GET_CODE (x) == ADDRESSOF)
502     return x;
503 
504 #ifdef POINTERS_EXTEND_UNSIGNED
505   if (GET_MODE (x) != Pmode)
506     x = convert_memory_address (Pmode, x);
507 #endif
508 
509   /* By passing constant addresses thru registers
510      we get a chance to cse them.  */
511   if (! cse_not_expected && CONSTANT_P (x) && CONSTANT_ADDRESS_P (x))
512     x = force_reg (Pmode, x);
513 
514   /* Accept a QUEUED that refers to a REG
515      even though that isn't a valid address.
516      On attempting to put this in an insn we will call protect_from_queue
517      which will turn it into a REG, which is valid.  */
518   else if (GET_CODE (x) == QUEUED
519       && GET_CODE (QUEUED_VAR (x)) == REG)
520     ;
521 
522   /* We get better cse by rejecting indirect addressing at this stage.
523      Let the combiner create indirect addresses where appropriate.
524      For now, generate the code so that the subexpressions useful to share
525      are visible.  But not if cse won't be done!  */
526   else
527     {
528       if (! cse_not_expected && GET_CODE (x) != REG)
529 	x = break_out_memory_refs (x);
530 
531       /* At this point, any valid address is accepted.  */
532       GO_IF_LEGITIMATE_ADDRESS (mode, x, win);
533 
534       /* If it was valid before but breaking out memory refs invalidated it,
535 	 use it the old way.  */
536       if (memory_address_p (mode, oldx))
537 	goto win2;
538 
539       /* Perform machine-dependent transformations on X
540 	 in certain cases.  This is not necessary since the code
541 	 below can handle all possible cases, but machine-dependent
542 	 transformations can make better code.  */
543       if (flag_propolice_protection)
544 	{
545 #define FRAMEADDR_P(X) (GET_CODE (X) == PLUS				\
546 			&& XEXP (X, 0) == virtual_stack_vars_rtx	\
547 			&& GET_CODE (XEXP (X, 1)) == CONST_INT)
548 	  rtx y;
549 	  if (FRAMEADDR_P (x)) goto win;
550 	  for (y=x; y!=0 && GET_CODE (y)==PLUS; y = XEXP (y, 0))
551 	    {
552 	      if (FRAMEADDR_P (XEXP (y, 0)))
553 		XEXP (y, 0) = force_reg (GET_MODE (XEXP (y, 0)), XEXP (y, 0));
554 	      if (FRAMEADDR_P (XEXP (y, 1)))
555 		XEXP (y, 1) = force_reg (GET_MODE (XEXP (y, 1)), XEXP (y, 1));
556 	    }
557 	}
558       LEGITIMIZE_ADDRESS (x, oldx, mode, win);
559 
560       /* PLUS and MULT can appear in special ways
561 	 as the result of attempts to make an address usable for indexing.
562 	 Usually they are dealt with by calling force_operand, below.
563 	 But a sum containing constant terms is special
564 	 if removing them makes the sum a valid address:
565 	 then we generate that address in a register
566 	 and index off of it.  We do this because it often makes
567 	 shorter code, and because the addresses thus generated
568 	 in registers often become common subexpressions.  */
569       if (GET_CODE (x) == PLUS)
570 	{
571 	  rtx constant_term = const0_rtx;
572 	  rtx y = eliminate_constant_term (x, &constant_term);
573 	  if (constant_term == const0_rtx
574 	      || ! memory_address_p (mode, y))
575 	    x = force_operand (x, NULL_RTX);
576 	  else
577 	    {
578 	      y = gen_rtx_PLUS (GET_MODE (x), copy_to_reg (y), constant_term);
579 	      if (! memory_address_p (mode, y))
580 		x = force_operand (x, NULL_RTX);
581 	      else
582 		x = y;
583 	    }
584 	}
585 
586       else if (GET_CODE (x) == MULT || GET_CODE (x) == MINUS)
587 	x = force_operand (x, NULL_RTX);
588 
589       /* If we have a register that's an invalid address,
590 	 it must be a hard reg of the wrong class.  Copy it to a pseudo.  */
591       else if (GET_CODE (x) == REG)
592 	x = copy_to_reg (x);
593 
594       /* Last resort: copy the value to a register, since
595 	 the register is a valid address.  */
596       else
597 	x = force_reg (Pmode, x);
598 
599       goto done;
600 
601     win2:
602       x = oldx;
603     win:
604       if (flag_force_addr && ! cse_not_expected && GET_CODE (x) != REG
605 	  /* Don't copy an addr via a reg if it is one of our stack slots.  */
606 	  && ! (GET_CODE (x) == PLUS
607 		&& (XEXP (x, 0) == virtual_stack_vars_rtx
608 		    || XEXP (x, 0) == virtual_incoming_args_rtx)))
609 	{
610 	  if (general_operand (x, Pmode))
611 	    x = force_reg (Pmode, x);
612 	  else
613 	    x = force_operand (x, NULL_RTX);
614 	}
615     }
616 
617  done:
618 
619   /* If we didn't change the address, we are done.  Otherwise, mark
620      a reg as a pointer if we have REG or REG + CONST_INT.  */
621   if (oldx == x)
622     return x;
623   else if (GET_CODE (x) == REG)
624     mark_reg_pointer (x, BITS_PER_UNIT);
625   else if (GET_CODE (x) == PLUS
626 	   && GET_CODE (XEXP (x, 0)) == REG
627 	   && GET_CODE (XEXP (x, 1)) == CONST_INT)
628     mark_reg_pointer (XEXP (x, 0), BITS_PER_UNIT);
629 
630   /* OLDX may have been the address on a temporary.  Update the address
631      to indicate that X is now used.  */
632   update_temp_slot_address (oldx, x);
633 
634   return x;
635 }
636 
637 /* Like `memory_address' but pretend `flag_force_addr' is 0.  */
638 
639 rtx
memory_address_noforce(mode,x)640 memory_address_noforce (mode, x)
641      enum machine_mode mode;
642      rtx x;
643 {
644   int ambient_force_addr = flag_force_addr;
645   rtx val;
646 
647   flag_force_addr = 0;
648   val = memory_address (mode, x);
649   flag_force_addr = ambient_force_addr;
650   return val;
651 }
652 
653 /* Convert a mem ref into one with a valid memory address.
654    Pass through anything else unchanged.  */
655 
656 rtx
validize_mem(ref)657 validize_mem (ref)
658      rtx ref;
659 {
660   if (GET_CODE (ref) != MEM)
661     return ref;
662   if (! (flag_force_addr && CONSTANT_ADDRESS_P (XEXP (ref, 0)))
663       && memory_address_p (GET_MODE (ref), XEXP (ref, 0)))
664     return ref;
665 
666   /* Don't alter REF itself, since that is probably a stack slot.  */
667   return replace_equiv_address (ref, XEXP (ref, 0));
668 }
669 
670 /* Given REF, either a MEM or a REG, and T, either the type of X or
671    the expression corresponding to REF, set RTX_UNCHANGING_P if
672    appropriate.  */
673 
674 void
maybe_set_unchanging(ref,t)675 maybe_set_unchanging (ref, t)
676      rtx ref;
677      tree t;
678 {
679   /* We can set RTX_UNCHANGING_P from TREE_READONLY for decls whose
680      initialization is only executed once, or whose initializer always
681      has the same value.  Currently we simplify this to PARM_DECLs in the
682      first case, and decls with TREE_CONSTANT initializers in the second.
683 
684      We cannot do this for non-static aggregates, because of the double
685      writes that can be generated by store_constructor, depending on the
686      contents of the initializer.  Yes, this does eliminate a good fraction
687      of the number of uses of RTX_UNCHANGING_P for a language like Ada.
688      It also eliminates a good quantity of bugs.  Let this be incentive to
689      eliminate RTX_UNCHANGING_P entirely in favour of a more reliable
690      solution, perhaps based on alias sets.  */
691 
692   if ((TREE_READONLY (t) && DECL_P (t)
693        && (TREE_STATIC (t) || ! AGGREGATE_TYPE_P (TREE_TYPE (t)))
694        && (TREE_CODE (t) == PARM_DECL
695 	   || DECL_INITIAL (t) == NULL_TREE
696 	   || TREE_CONSTANT (DECL_INITIAL (t))))
697       || TREE_CODE_CLASS (TREE_CODE (t)) == 'c')
698     RTX_UNCHANGING_P (ref) = 1;
699 }
700 
701 /* Return a modified copy of X with its memory address copied
702    into a temporary register to protect it from side effects.
703    If X is not a MEM, it is returned unchanged (and not copied).
704    Perhaps even if it is a MEM, if there is no need to change it.  */
705 
706 rtx
stabilize(x)707 stabilize (x)
708      rtx x;
709 {
710 
711   if (GET_CODE (x) != MEM
712       || ! rtx_unstable_p (XEXP (x, 0)))
713     return x;
714 
715   return
716     replace_equiv_address (x, force_reg (Pmode, copy_all_regs (XEXP (x, 0))));
717 }
718 
719 /* Copy the value or contents of X to a new temp reg and return that reg.  */
720 
721 rtx
copy_to_reg(x)722 copy_to_reg (x)
723      rtx x;
724 {
725   rtx temp = gen_reg_rtx (GET_MODE (x));
726 
727   /* If not an operand, must be an address with PLUS and MULT so
728      do the computation.  */
729   if (! general_operand (x, VOIDmode))
730     x = force_operand (x, temp);
731 
732   if (x != temp)
733     emit_move_insn (temp, x);
734 
735   return temp;
736 }
737 
738 /* Like copy_to_reg but always give the new register mode Pmode
739    in case X is a constant.  */
740 
741 rtx
copy_addr_to_reg(x)742 copy_addr_to_reg (x)
743      rtx x;
744 {
745   return copy_to_mode_reg (Pmode, x);
746 }
747 
748 /* Like copy_to_reg but always give the new register mode MODE
749    in case X is a constant.  */
750 
751 rtx
copy_to_mode_reg(mode,x)752 copy_to_mode_reg (mode, x)
753      enum machine_mode mode;
754      rtx x;
755 {
756   rtx temp = gen_reg_rtx (mode);
757 
758   /* If not an operand, must be an address with PLUS and MULT so
759      do the computation.  */
760   if (! general_operand (x, VOIDmode))
761     x = force_operand (x, temp);
762 
763   if (GET_MODE (x) != mode && GET_MODE (x) != VOIDmode)
764     abort ();
765   if (x != temp)
766     emit_move_insn (temp, x);
767   return temp;
768 }
769 
770 /* Load X into a register if it is not already one.
771    Use mode MODE for the register.
772    X should be valid for mode MODE, but it may be a constant which
773    is valid for all integer modes; that's why caller must specify MODE.
774 
775    The caller must not alter the value in the register we return,
776    since we mark it as a "constant" register.  */
777 
778 rtx
force_reg(mode,x)779 force_reg (mode, x)
780      enum machine_mode mode;
781      rtx x;
782 {
783   rtx temp, insn, set;
784 
785   if (GET_CODE (x) == REG)
786     return x;
787 
788   if (general_operand (x, mode))
789     {
790       temp = gen_reg_rtx (mode);
791       insn = emit_move_insn (temp, x);
792     }
793   else
794     {
795       temp = force_operand (x, NULL_RTX);
796       if (GET_CODE (temp) == REG)
797 	insn = get_last_insn ();
798       else
799 	{
800 	  rtx temp2 = gen_reg_rtx (mode);
801 	  insn = emit_move_insn (temp2, temp);
802 	  temp = temp2;
803 	}
804     }
805 
806   /* Let optimizers know that TEMP's value never changes
807      and that X can be substituted for it.  Don't get confused
808      if INSN set something else (such as a SUBREG of TEMP).  */
809   if (CONSTANT_P (x)
810       && (set = single_set (insn)) != 0
811       && SET_DEST (set) == temp)
812     set_unique_reg_note (insn, REG_EQUAL, x);
813 
814   return temp;
815 }
816 
817 /* If X is a memory ref, copy its contents to a new temp reg and return
818    that reg.  Otherwise, return X.  */
819 
820 rtx
force_not_mem(x)821 force_not_mem (x)
822      rtx x;
823 {
824   rtx temp;
825 
826   if (GET_CODE (x) != MEM || GET_MODE (x) == BLKmode)
827     return x;
828 
829   temp = gen_reg_rtx (GET_MODE (x));
830   emit_move_insn (temp, x);
831   return temp;
832 }
833 
834 /* Copy X to TARGET (if it's nonzero and a reg)
835    or to a new temp reg and return that reg.
836    MODE is the mode to use for X in case it is a constant.  */
837 
838 rtx
copy_to_suggested_reg(x,target,mode)839 copy_to_suggested_reg (x, target, mode)
840      rtx x, target;
841      enum machine_mode mode;
842 {
843   rtx temp;
844 
845   if (target && GET_CODE (target) == REG)
846     temp = target;
847   else
848     temp = gen_reg_rtx (mode);
849 
850   emit_move_insn (temp, x);
851   return temp;
852 }
853 
854 /* Return the mode to use to store a scalar of TYPE and MODE.
855    PUNSIGNEDP points to the signedness of the type and may be adjusted
856    to show what signedness to use on extension operations.
857 
858    FOR_CALL is nonzero if this call is promoting args for a call.  */
859 
860 enum machine_mode
promote_mode(type,mode,punsignedp,for_call)861 promote_mode (type, mode, punsignedp, for_call)
862      tree type;
863      enum machine_mode mode;
864      int *punsignedp;
865      int for_call ATTRIBUTE_UNUSED;
866 {
867   enum tree_code code = TREE_CODE (type);
868   int unsignedp = *punsignedp;
869 
870 #ifdef PROMOTE_FOR_CALL_ONLY
871   if (! for_call)
872     return mode;
873 #endif
874 
875   switch (code)
876     {
877 #ifdef PROMOTE_MODE
878     case INTEGER_TYPE:   case ENUMERAL_TYPE:   case BOOLEAN_TYPE:
879     case CHAR_TYPE:      case REAL_TYPE:       case OFFSET_TYPE:
880       PROMOTE_MODE (mode, unsignedp, type);
881       break;
882 #endif
883 
884 #ifdef POINTERS_EXTEND_UNSIGNED
885     case REFERENCE_TYPE:
886     case POINTER_TYPE:
887       mode = Pmode;
888       unsignedp = POINTERS_EXTEND_UNSIGNED;
889       break;
890 #endif
891 
892     default:
893       break;
894     }
895 
896   *punsignedp = unsignedp;
897   return mode;
898 }
899 
900 /* Adjust the stack pointer by ADJUST (an rtx for a number of bytes).
901    This pops when ADJUST is positive.  ADJUST need not be constant.  */
902 
903 void
adjust_stack(adjust)904 adjust_stack (adjust)
905      rtx adjust;
906 {
907   rtx temp;
908   adjust = protect_from_queue (adjust, 0);
909 
910   if (adjust == const0_rtx)
911     return;
912 
913   /* We expect all variable sized adjustments to be multiple of
914      PREFERRED_STACK_BOUNDARY.  */
915   if (GET_CODE (adjust) == CONST_INT)
916     stack_pointer_delta -= INTVAL (adjust);
917 
918   temp = expand_binop (Pmode,
919 #ifdef STACK_GROWS_DOWNWARD
920 		       add_optab,
921 #else
922 		       sub_optab,
923 #endif
924 		       stack_pointer_rtx, adjust, stack_pointer_rtx, 0,
925 		       OPTAB_LIB_WIDEN);
926 
927   if (temp != stack_pointer_rtx)
928     emit_move_insn (stack_pointer_rtx, temp);
929 }
930 
931 /* Adjust the stack pointer by minus ADJUST (an rtx for a number of bytes).
932    This pushes when ADJUST is positive.  ADJUST need not be constant.  */
933 
934 void
anti_adjust_stack(adjust)935 anti_adjust_stack (adjust)
936      rtx adjust;
937 {
938   rtx temp;
939   adjust = protect_from_queue (adjust, 0);
940 
941   if (adjust == const0_rtx)
942     return;
943 
944   /* We expect all variable sized adjustments to be multiple of
945      PREFERRED_STACK_BOUNDARY.  */
946   if (GET_CODE (adjust) == CONST_INT)
947     stack_pointer_delta += INTVAL (adjust);
948 
949   temp = expand_binop (Pmode,
950 #ifdef STACK_GROWS_DOWNWARD
951 		       sub_optab,
952 #else
953 		       add_optab,
954 #endif
955 		       stack_pointer_rtx, adjust, stack_pointer_rtx, 0,
956 		       OPTAB_LIB_WIDEN);
957 
958   if (temp != stack_pointer_rtx)
959     emit_move_insn (stack_pointer_rtx, temp);
960 }
961 
962 /* Round the size of a block to be pushed up to the boundary required
963    by this machine.  SIZE is the desired size, which need not be constant.  */
964 
965 rtx
round_push(size)966 round_push (size)
967      rtx size;
968 {
969   int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
970   if (align == 1)
971     return size;
972   if (GET_CODE (size) == CONST_INT)
973     {
974       int new = (INTVAL (size) + align - 1) / align * align;
975       if (INTVAL (size) != new)
976 	size = GEN_INT (new);
977     }
978   else
979     {
980       /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
981 	 but we know it can't.  So add ourselves and then do
982 	 TRUNC_DIV_EXPR.  */
983       size = expand_binop (Pmode, add_optab, size, GEN_INT (align - 1),
984 			   NULL_RTX, 1, OPTAB_LIB_WIDEN);
985       size = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, size, GEN_INT (align),
986 			    NULL_RTX, 1);
987       size = expand_mult (Pmode, size, GEN_INT (align), NULL_RTX, 1);
988     }
989   return size;
990 }
991 
992 /* Save the stack pointer for the purpose in SAVE_LEVEL.  PSAVE is a pointer
993    to a previously-created save area.  If no save area has been allocated,
994    this function will allocate one.  If a save area is specified, it
995    must be of the proper mode.
996 
997    The insns are emitted after insn AFTER, if nonzero, otherwise the insns
998    are emitted at the current position.  */
999 
1000 void
emit_stack_save(save_level,psave,after)1001 emit_stack_save (save_level, psave, after)
1002      enum save_level save_level;
1003      rtx *psave;
1004      rtx after;
1005 {
1006   rtx sa = *psave;
1007   /* The default is that we use a move insn and save in a Pmode object.  */
1008   rtx (*fcn) PARAMS ((rtx, rtx)) = gen_move_insn;
1009   enum machine_mode mode = STACK_SAVEAREA_MODE (save_level);
1010 
1011   /* See if this machine has anything special to do for this kind of save.  */
1012   switch (save_level)
1013     {
1014 #ifdef HAVE_save_stack_block
1015     case SAVE_BLOCK:
1016       if (HAVE_save_stack_block)
1017 	fcn = gen_save_stack_block;
1018       break;
1019 #endif
1020 #ifdef HAVE_save_stack_function
1021     case SAVE_FUNCTION:
1022       if (HAVE_save_stack_function)
1023 	fcn = gen_save_stack_function;
1024       break;
1025 #endif
1026 #ifdef HAVE_save_stack_nonlocal
1027     case SAVE_NONLOCAL:
1028       if (HAVE_save_stack_nonlocal)
1029 	fcn = gen_save_stack_nonlocal;
1030       break;
1031 #endif
1032     default:
1033       break;
1034     }
1035 
1036   /* If there is no save area and we have to allocate one, do so.  Otherwise
1037      verify the save area is the proper mode.  */
1038 
1039   if (sa == 0)
1040     {
1041       if (mode != VOIDmode)
1042 	{
1043 	  if (save_level == SAVE_NONLOCAL)
1044 	    *psave = sa = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
1045 	  else
1046 	    *psave = sa = gen_reg_rtx (mode);
1047 	}
1048     }
1049   else
1050     {
1051       if (mode == VOIDmode || GET_MODE (sa) != mode)
1052 	abort ();
1053     }
1054 
1055   if (after)
1056     {
1057       rtx seq;
1058 
1059       start_sequence ();
1060       /* We must validize inside the sequence, to ensure that any instructions
1061 	 created by the validize call also get moved to the right place.  */
1062       if (sa != 0)
1063 	sa = validize_mem (sa);
1064       emit_insn (fcn (sa, stack_pointer_rtx));
1065       seq = get_insns ();
1066       end_sequence ();
1067       emit_insn_after (seq, after);
1068     }
1069   else
1070     {
1071       if (sa != 0)
1072 	sa = validize_mem (sa);
1073       emit_insn (fcn (sa, stack_pointer_rtx));
1074     }
1075 }
1076 
1077 /* Restore the stack pointer for the purpose in SAVE_LEVEL.  SA is the save
1078    area made by emit_stack_save.  If it is zero, we have nothing to do.
1079 
1080    Put any emitted insns after insn AFTER, if nonzero, otherwise at
1081    current position.  */
1082 
1083 void
emit_stack_restore(save_level,sa,after)1084 emit_stack_restore (save_level, sa, after)
1085      enum save_level save_level;
1086      rtx after;
1087      rtx sa;
1088 {
1089   /* The default is that we use a move insn.  */
1090   rtx (*fcn) PARAMS ((rtx, rtx)) = gen_move_insn;
1091 
1092   /* See if this machine has anything special to do for this kind of save.  */
1093   switch (save_level)
1094     {
1095 #ifdef HAVE_restore_stack_block
1096     case SAVE_BLOCK:
1097       if (HAVE_restore_stack_block)
1098 	fcn = gen_restore_stack_block;
1099       break;
1100 #endif
1101 #ifdef HAVE_restore_stack_function
1102     case SAVE_FUNCTION:
1103       if (HAVE_restore_stack_function)
1104 	fcn = gen_restore_stack_function;
1105       break;
1106 #endif
1107 #ifdef HAVE_restore_stack_nonlocal
1108     case SAVE_NONLOCAL:
1109       if (HAVE_restore_stack_nonlocal)
1110 	fcn = gen_restore_stack_nonlocal;
1111       break;
1112 #endif
1113     default:
1114       break;
1115     }
1116 
1117   if (sa != 0)
1118     {
1119       sa = validize_mem (sa);
1120       /* These clobbers prevent the scheduler from moving
1121 	 references to variable arrays below the code
1122 	 that deletes (pops) the arrays.  */
1123       emit_insn (gen_rtx_CLOBBER (VOIDmode,
1124 		    gen_rtx_MEM (BLKmode,
1125 			gen_rtx_SCRATCH (VOIDmode))));
1126       emit_insn (gen_rtx_CLOBBER (VOIDmode,
1127 		    gen_rtx_MEM (BLKmode, stack_pointer_rtx)));
1128     }
1129 
1130   if (after)
1131     {
1132       rtx seq;
1133 
1134       start_sequence ();
1135       emit_insn (fcn (stack_pointer_rtx, sa));
1136       seq = get_insns ();
1137       end_sequence ();
1138       emit_insn_after (seq, after);
1139     }
1140   else
1141     emit_insn (fcn (stack_pointer_rtx, sa));
1142 }
1143 
1144 #ifdef SETJMP_VIA_SAVE_AREA
1145 /* Optimize RTL generated by allocate_dynamic_stack_space for targets
1146    where SETJMP_VIA_SAVE_AREA is true.  The problem is that on these
1147    platforms, the dynamic stack space used can corrupt the original
1148    frame, thus causing a crash if a longjmp unwinds to it.  */
1149 
1150 void
optimize_save_area_alloca(insns)1151 optimize_save_area_alloca (insns)
1152      rtx insns;
1153 {
1154   rtx insn;
1155 
1156   for (insn = insns; insn; insn = NEXT_INSN(insn))
1157     {
1158       rtx note;
1159 
1160       if (GET_CODE (insn) != INSN)
1161 	continue;
1162 
1163       for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1164 	{
1165 	  if (REG_NOTE_KIND (note) != REG_SAVE_AREA)
1166 	    continue;
1167 
1168 	  if (!current_function_calls_setjmp)
1169 	    {
1170 	      rtx pat = PATTERN (insn);
1171 
1172 	      /* If we do not see the note in a pattern matching
1173 		 these precise characteristics, we did something
1174 		 entirely wrong in allocate_dynamic_stack_space.
1175 
1176 		 Note, one way this could happen is if SETJMP_VIA_SAVE_AREA
1177 		 was defined on a machine where stacks grow towards higher
1178 		 addresses.
1179 
1180 		 Right now only supported port with stack that grow upward
1181 		 is the HPPA and it does not define SETJMP_VIA_SAVE_AREA.  */
1182 	      if (GET_CODE (pat) != SET
1183 		  || SET_DEST (pat) != stack_pointer_rtx
1184 		  || GET_CODE (SET_SRC (pat)) != MINUS
1185 		  || XEXP (SET_SRC (pat), 0) != stack_pointer_rtx)
1186 		abort ();
1187 
1188 	      /* This will now be transformed into a (set REG REG)
1189 		 so we can just blow away all the other notes.  */
1190 	      XEXP (SET_SRC (pat), 1) = XEXP (note, 0);
1191 	      REG_NOTES (insn) = NULL_RTX;
1192 	    }
1193 	  else
1194 	    {
1195 	      /* setjmp was called, we must remove the REG_SAVE_AREA
1196 		 note so that later passes do not get confused by its
1197 		 presence.  */
1198 	      if (note == REG_NOTES (insn))
1199 		{
1200 		  REG_NOTES (insn) = XEXP (note, 1);
1201 		}
1202 	      else
1203 		{
1204 		  rtx srch;
1205 
1206 		  for (srch = REG_NOTES (insn); srch; srch = XEXP (srch, 1))
1207 		    if (XEXP (srch, 1) == note)
1208 		      break;
1209 
1210 		  if (srch == NULL_RTX)
1211 		    abort ();
1212 
1213 		  XEXP (srch, 1) = XEXP (note, 1);
1214 		}
1215 	    }
1216 	  /* Once we've seen the note of interest, we need not look at
1217 	     the rest of them.  */
1218 	  break;
1219 	}
1220     }
1221 }
1222 #endif /* SETJMP_VIA_SAVE_AREA */
1223 
1224 /* Return an rtx representing the address of an area of memory dynamically
1225    pushed on the stack.  This region of memory is always aligned to
1226    a multiple of BIGGEST_ALIGNMENT.
1227 
1228    Any required stack pointer alignment is preserved.
1229 
1230    SIZE is an rtx representing the size of the area.
1231    TARGET is a place in which the address can be placed.
1232 
1233    KNOWN_ALIGN is the alignment (in bits) that we know SIZE has.  */
1234 
1235 rtx
allocate_dynamic_stack_space(size,target,known_align)1236 allocate_dynamic_stack_space (size, target, known_align)
1237      rtx size;
1238      rtx target;
1239      int known_align;
1240 {
1241 #ifdef SETJMP_VIA_SAVE_AREA
1242   rtx setjmpless_size = NULL_RTX;
1243 #endif
1244 
1245   /* If we're asking for zero bytes, it doesn't matter what we point
1246      to since we can't dereference it.  But return a reasonable
1247      address anyway.  */
1248   if (size == const0_rtx)
1249     return virtual_stack_dynamic_rtx;
1250 
1251   /* Otherwise, show we're calling alloca or equivalent.  */
1252   current_function_calls_alloca = 1;
1253 
1254   /* Ensure the size is in the proper mode.  */
1255   if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1256     size = convert_to_mode (Pmode, size, 1);
1257 
1258   /* We can't attempt to minimize alignment necessary, because we don't
1259      know the final value of preferred_stack_boundary yet while executing
1260      this code.  */
1261   cfun->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
1262 
1263   /* We will need to ensure that the address we return is aligned to
1264      BIGGEST_ALIGNMENT.  If STACK_DYNAMIC_OFFSET is defined, we don't
1265      always know its final value at this point in the compilation (it
1266      might depend on the size of the outgoing parameter lists, for
1267      example), so we must align the value to be returned in that case.
1268      (Note that STACK_DYNAMIC_OFFSET will have a default nonzero value if
1269      STACK_POINTER_OFFSET or ACCUMULATE_OUTGOING_ARGS are defined).
1270      We must also do an alignment operation on the returned value if
1271      the stack pointer alignment is less strict that BIGGEST_ALIGNMENT.
1272 
1273      If we have to align, we must leave space in SIZE for the hole
1274      that might result from the alignment operation.  */
1275 
1276 #if defined (STACK_DYNAMIC_OFFSET) || defined (STACK_POINTER_OFFSET)
1277 #define MUST_ALIGN 1
1278 #else
1279 #define MUST_ALIGN (PREFERRED_STACK_BOUNDARY < BIGGEST_ALIGNMENT)
1280 #endif
1281 
1282   if (MUST_ALIGN)
1283     size
1284       = force_operand (plus_constant (size,
1285 				      BIGGEST_ALIGNMENT / BITS_PER_UNIT - 1),
1286 		       NULL_RTX);
1287 
1288 #ifdef SETJMP_VIA_SAVE_AREA
1289   /* If setjmp restores regs from a save area in the stack frame,
1290      avoid clobbering the reg save area.  Note that the offset of
1291      virtual_incoming_args_rtx includes the preallocated stack args space.
1292      It would be no problem to clobber that, but it's on the wrong side
1293      of the old save area.  */
1294   {
1295     rtx dynamic_offset
1296       = expand_binop (Pmode, sub_optab, virtual_stack_dynamic_rtx,
1297 		      stack_pointer_rtx, NULL_RTX, 1, OPTAB_LIB_WIDEN);
1298 
1299     if (!current_function_calls_setjmp)
1300       {
1301 	int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1302 
1303 	/* See optimize_save_area_alloca to understand what is being
1304 	   set up here.  */
1305 
1306 	/* ??? Code below assumes that the save area needs maximal
1307 	   alignment.  This constraint may be too strong.  */
1308 	if (PREFERRED_STACK_BOUNDARY != BIGGEST_ALIGNMENT)
1309 	  abort ();
1310 
1311 	if (GET_CODE (size) == CONST_INT)
1312 	  {
1313 	    HOST_WIDE_INT new = INTVAL (size) / align * align;
1314 
1315 	    if (INTVAL (size) != new)
1316 	      setjmpless_size = GEN_INT (new);
1317 	    else
1318 	      setjmpless_size = size;
1319 	  }
1320 	else
1321 	  {
1322 	    /* Since we know overflow is not possible, we avoid using
1323 	       CEIL_DIV_EXPR and use TRUNC_DIV_EXPR instead.  */
1324 	    setjmpless_size = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, size,
1325 					     GEN_INT (align), NULL_RTX, 1);
1326 	    setjmpless_size = expand_mult (Pmode, setjmpless_size,
1327 					   GEN_INT (align), NULL_RTX, 1);
1328 	  }
1329 	/* Our optimization works based upon being able to perform a simple
1330 	   transformation of this RTL into a (set REG REG) so make sure things
1331 	   did in fact end up in a REG.  */
1332 	if (!register_operand (setjmpless_size, Pmode))
1333 	  setjmpless_size = force_reg (Pmode, setjmpless_size);
1334       }
1335 
1336     size = expand_binop (Pmode, add_optab, size, dynamic_offset,
1337 			 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1338   }
1339 #endif /* SETJMP_VIA_SAVE_AREA */
1340 
1341   /* Round the size to a multiple of the required stack alignment.
1342      Since the stack if presumed to be rounded before this allocation,
1343      this will maintain the required alignment.
1344 
1345      If the stack grows downward, we could save an insn by subtracting
1346      SIZE from the stack pointer and then aligning the stack pointer.
1347      The problem with this is that the stack pointer may be unaligned
1348      between the execution of the subtraction and alignment insns and
1349      some machines do not allow this.  Even on those that do, some
1350      signal handlers malfunction if a signal should occur between those
1351      insns.  Since this is an extremely rare event, we have no reliable
1352      way of knowing which systems have this problem.  So we avoid even
1353      momentarily mis-aligning the stack.  */
1354 
1355   /* If we added a variable amount to SIZE,
1356      we can no longer assume it is aligned.  */
1357 #if !defined (SETJMP_VIA_SAVE_AREA)
1358   if (MUST_ALIGN || known_align % PREFERRED_STACK_BOUNDARY != 0)
1359 #endif
1360     size = round_push (size);
1361 
1362   do_pending_stack_adjust ();
1363 
1364  /* We ought to be called always on the toplevel and stack ought to be aligned
1365     properly.  */
1366   if (stack_pointer_delta % (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT))
1367     abort ();
1368 
1369   /* If needed, check that we have the required amount of stack.  Take into
1370      account what has already been checked.  */
1371   if (flag_stack_check && ! STACK_CHECK_BUILTIN)
1372     probe_stack_range (STACK_CHECK_MAX_FRAME_SIZE + STACK_CHECK_PROTECT, size);
1373 
1374   /* Don't use a TARGET that isn't a pseudo or is the wrong mode.  */
1375   if (target == 0 || GET_CODE (target) != REG
1376       || REGNO (target) < FIRST_PSEUDO_REGISTER
1377       || GET_MODE (target) != Pmode)
1378     target = gen_reg_rtx (Pmode);
1379 
1380   mark_reg_pointer (target, known_align);
1381 
1382   /* Perform the required allocation from the stack.  Some systems do
1383      this differently than simply incrementing/decrementing from the
1384      stack pointer, such as acquiring the space by calling malloc().  */
1385 #ifdef HAVE_allocate_stack
1386   if (HAVE_allocate_stack)
1387     {
1388       enum machine_mode mode = STACK_SIZE_MODE;
1389       insn_operand_predicate_fn pred;
1390 
1391       /* We don't have to check against the predicate for operand 0 since
1392 	 TARGET is known to be a pseudo of the proper mode, which must
1393 	 be valid for the operand.  For operand 1, convert to the
1394 	 proper mode and validate.  */
1395       if (mode == VOIDmode)
1396 	mode = insn_data[(int) CODE_FOR_allocate_stack].operand[1].mode;
1397 
1398       pred = insn_data[(int) CODE_FOR_allocate_stack].operand[1].predicate;
1399       if (pred && ! ((*pred) (size, mode)))
1400 	size = copy_to_mode_reg (mode, size);
1401 
1402       emit_insn (gen_allocate_stack (target, size));
1403     }
1404   else
1405 #endif
1406     {
1407 #ifndef STACK_GROWS_DOWNWARD
1408       emit_move_insn (target, virtual_stack_dynamic_rtx);
1409 #endif
1410 
1411       /* Check stack bounds if necessary.  */
1412       if (current_function_limit_stack)
1413 	{
1414 	  rtx available;
1415 	  rtx space_available = gen_label_rtx ();
1416 #ifdef STACK_GROWS_DOWNWARD
1417 	  available = expand_binop (Pmode, sub_optab,
1418 				    stack_pointer_rtx, stack_limit_rtx,
1419 				    NULL_RTX, 1, OPTAB_WIDEN);
1420 #else
1421 	  available = expand_binop (Pmode, sub_optab,
1422 				    stack_limit_rtx, stack_pointer_rtx,
1423 				    NULL_RTX, 1, OPTAB_WIDEN);
1424 #endif
1425 	  emit_cmp_and_jump_insns (available, size, GEU, NULL_RTX, Pmode, 1,
1426 				   space_available);
1427 #ifdef HAVE_trap
1428 	  if (HAVE_trap)
1429 	    emit_insn (gen_trap ());
1430 	  else
1431 #endif
1432 	    error ("stack limits not supported on this target");
1433 	  emit_barrier ();
1434 	  emit_label (space_available);
1435 	}
1436 
1437       anti_adjust_stack (size);
1438 #ifdef SETJMP_VIA_SAVE_AREA
1439       if (setjmpless_size != NULL_RTX)
1440 	{
1441 	  rtx note_target = get_last_insn ();
1442 
1443 	  REG_NOTES (note_target)
1444 	    = gen_rtx_EXPR_LIST (REG_SAVE_AREA, setjmpless_size,
1445 				 REG_NOTES (note_target));
1446 	}
1447 #endif /* SETJMP_VIA_SAVE_AREA */
1448 
1449 #ifdef STACK_GROWS_DOWNWARD
1450       emit_move_insn (target, virtual_stack_dynamic_rtx);
1451 #endif
1452     }
1453 
1454   if (MUST_ALIGN)
1455     {
1456       /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
1457 	 but we know it can't.  So add ourselves and then do
1458 	 TRUNC_DIV_EXPR.  */
1459       target = expand_binop (Pmode, add_optab, target,
1460 			     GEN_INT (BIGGEST_ALIGNMENT / BITS_PER_UNIT - 1),
1461 			     NULL_RTX, 1, OPTAB_LIB_WIDEN);
1462       target = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, target,
1463 			      GEN_INT (BIGGEST_ALIGNMENT / BITS_PER_UNIT),
1464 			      NULL_RTX, 1);
1465       target = expand_mult (Pmode, target,
1466 			    GEN_INT (BIGGEST_ALIGNMENT / BITS_PER_UNIT),
1467 			    NULL_RTX, 1);
1468     }
1469 
1470   /* Some systems require a particular insn to refer to the stack
1471      to make the pages exist.  */
1472 #ifdef HAVE_probe
1473   if (HAVE_probe)
1474     emit_insn (gen_probe ());
1475 #endif
1476 
1477   /* Record the new stack level for nonlocal gotos.  */
1478   if (nonlocal_goto_handler_slots != 0)
1479     emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
1480 
1481   return target;
1482 }
1483 
1484 /* A front end may want to override GCC's stack checking by providing a
1485    run-time routine to call to check the stack, so provide a mechanism for
1486    calling that routine.  */
1487 
1488 static GTY(()) rtx stack_check_libfunc;
1489 
1490 void
set_stack_check_libfunc(libfunc)1491 set_stack_check_libfunc (libfunc)
1492      rtx libfunc;
1493 {
1494   stack_check_libfunc = libfunc;
1495 }
1496 
1497 /* Emit one stack probe at ADDRESS, an address within the stack.  */
1498 
1499 static void
emit_stack_probe(address)1500 emit_stack_probe (address)
1501      rtx address;
1502 {
1503   rtx memref = gen_rtx_MEM (word_mode, address);
1504 
1505   MEM_VOLATILE_P (memref) = 1;
1506 
1507   if (STACK_CHECK_PROBE_LOAD)
1508     emit_move_insn (gen_reg_rtx (word_mode), memref);
1509   else
1510     emit_move_insn (memref, const0_rtx);
1511 }
1512 
1513 /* Probe a range of stack addresses from FIRST to FIRST+SIZE, inclusive.
1514    FIRST is a constant and size is a Pmode RTX.  These are offsets from the
1515    current stack pointer.  STACK_GROWS_DOWNWARD says whether to add or
1516    subtract from the stack.  If SIZE is constant, this is done
1517    with a fixed number of probes.  Otherwise, we must make a loop.  */
1518 
1519 #ifdef STACK_GROWS_DOWNWARD
1520 #define STACK_GROW_OP MINUS
1521 #else
1522 #define STACK_GROW_OP PLUS
1523 #endif
1524 
1525 void
probe_stack_range(first,size)1526 probe_stack_range (first, size)
1527      HOST_WIDE_INT first;
1528      rtx size;
1529 {
1530   /* First ensure SIZE is Pmode.  */
1531   if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1532     size = convert_to_mode (Pmode, size, 1);
1533 
1534   /* Next see if the front end has set up a function for us to call to
1535      check the stack.  */
1536   if (stack_check_libfunc != 0)
1537     {
1538       rtx addr = memory_address (QImode,
1539 				 gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1540 					         stack_pointer_rtx,
1541 					         plus_constant (size, first)));
1542 
1543 #ifdef POINTERS_EXTEND_UNSIGNED
1544       if (GET_MODE (addr) != ptr_mode)
1545 	addr = convert_memory_address (ptr_mode, addr);
1546 #endif
1547 
1548       emit_library_call (stack_check_libfunc, LCT_NORMAL, VOIDmode, 1, addr,
1549 			 ptr_mode);
1550     }
1551 
1552   /* Next see if we have an insn to check the stack.  Use it if so.  */
1553 #ifdef HAVE_check_stack
1554   else if (HAVE_check_stack)
1555     {
1556       insn_operand_predicate_fn pred;
1557       rtx last_addr
1558 	= force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1559 					 stack_pointer_rtx,
1560 					 plus_constant (size, first)),
1561 			 NULL_RTX);
1562 
1563       pred = insn_data[(int) CODE_FOR_check_stack].operand[0].predicate;
1564       if (pred && ! ((*pred) (last_addr, Pmode)))
1565 	last_addr = copy_to_mode_reg (Pmode, last_addr);
1566 
1567       emit_insn (gen_check_stack (last_addr));
1568     }
1569 #endif
1570 
1571   /* If we have to generate explicit probes, see if we have a constant
1572      small number of them to generate.  If so, that's the easy case.  */
1573   else if (GET_CODE (size) == CONST_INT
1574 	   && INTVAL (size) < 10 * STACK_CHECK_PROBE_INTERVAL)
1575     {
1576       HOST_WIDE_INT offset;
1577 
1578       /* Start probing at FIRST + N * STACK_CHECK_PROBE_INTERVAL
1579 	 for values of N from 1 until it exceeds LAST.  If only one
1580 	 probe is needed, this will not generate any code.  Then probe
1581 	 at LAST.  */
1582       for (offset = first + STACK_CHECK_PROBE_INTERVAL;
1583 	   offset < INTVAL (size);
1584 	   offset = offset + STACK_CHECK_PROBE_INTERVAL)
1585 	emit_stack_probe (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1586 					  stack_pointer_rtx,
1587 					  GEN_INT (offset)));
1588 
1589       emit_stack_probe (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1590 					stack_pointer_rtx,
1591 					plus_constant (size, first)));
1592     }
1593 
1594   /* In the variable case, do the same as above, but in a loop.  We emit loop
1595      notes so that loop optimization can be done.  */
1596   else
1597     {
1598       rtx test_addr
1599 	= force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1600 					 stack_pointer_rtx,
1601 					 GEN_INT (first + STACK_CHECK_PROBE_INTERVAL)),
1602 			 NULL_RTX);
1603       rtx last_addr
1604 	= force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1605 					 stack_pointer_rtx,
1606 					 plus_constant (size, first)),
1607 			 NULL_RTX);
1608       rtx incr = GEN_INT (STACK_CHECK_PROBE_INTERVAL);
1609       rtx loop_lab = gen_label_rtx ();
1610       rtx test_lab = gen_label_rtx ();
1611       rtx end_lab = gen_label_rtx ();
1612       rtx temp;
1613 
1614       if (GET_CODE (test_addr) != REG
1615 	  || REGNO (test_addr) < FIRST_PSEUDO_REGISTER)
1616 	test_addr = force_reg (Pmode, test_addr);
1617 
1618       emit_note (NULL, NOTE_INSN_LOOP_BEG);
1619       emit_jump (test_lab);
1620 
1621       emit_label (loop_lab);
1622       emit_stack_probe (test_addr);
1623 
1624       emit_note (NULL, NOTE_INSN_LOOP_CONT);
1625 
1626 #ifdef STACK_GROWS_DOWNWARD
1627 #define CMP_OPCODE GTU
1628       temp = expand_binop (Pmode, sub_optab, test_addr, incr, test_addr,
1629 			   1, OPTAB_WIDEN);
1630 #else
1631 #define CMP_OPCODE LTU
1632       temp = expand_binop (Pmode, add_optab, test_addr, incr, test_addr,
1633 			   1, OPTAB_WIDEN);
1634 #endif
1635 
1636       if (temp != test_addr)
1637 	abort ();
1638 
1639       emit_label (test_lab);
1640       emit_cmp_and_jump_insns (test_addr, last_addr, CMP_OPCODE,
1641 			       NULL_RTX, Pmode, 1, loop_lab);
1642       emit_jump (end_lab);
1643       emit_note (NULL, NOTE_INSN_LOOP_END);
1644       emit_label (end_lab);
1645 
1646       emit_stack_probe (last_addr);
1647     }
1648 }
1649 
1650 /* Return an rtx representing the register or memory location
1651    in which a scalar value of data type VALTYPE
1652    was returned by a function call to function FUNC.
1653    FUNC is a FUNCTION_DECL node if the precise function is known,
1654    otherwise 0.
1655    OUTGOING is 1 if on a machine with register windows this function
1656    should return the register in which the function will put its result
1657    and 0 otherwise.  */
1658 
1659 rtx
hard_function_value(valtype,func,outgoing)1660 hard_function_value (valtype, func, outgoing)
1661      tree valtype;
1662      tree func ATTRIBUTE_UNUSED;
1663      int outgoing ATTRIBUTE_UNUSED;
1664 {
1665   rtx val;
1666 
1667 #ifdef FUNCTION_OUTGOING_VALUE
1668   if (outgoing)
1669     val = FUNCTION_OUTGOING_VALUE (valtype, func);
1670   else
1671 #endif
1672     val = FUNCTION_VALUE (valtype, func);
1673 
1674   if (GET_CODE (val) == REG
1675       && GET_MODE (val) == BLKmode)
1676     {
1677       unsigned HOST_WIDE_INT bytes = int_size_in_bytes (valtype);
1678       enum machine_mode tmpmode;
1679 
1680       /* int_size_in_bytes can return -1.  We don't need a check here
1681 	 since the value of bytes will be large enough that no mode
1682 	 will match and we will abort later in this function.  */
1683 
1684       for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1685 	   tmpmode != VOIDmode;
1686 	   tmpmode = GET_MODE_WIDER_MODE (tmpmode))
1687 	{
1688 	  /* Have we found a large enough mode?  */
1689 	  if (GET_MODE_SIZE (tmpmode) >= bytes)
1690 	    break;
1691 	}
1692 
1693       /* No suitable mode found.  */
1694       if (tmpmode == VOIDmode)
1695 	abort ();
1696 
1697       PUT_MODE (val, tmpmode);
1698     }
1699   return val;
1700 }
1701 
1702 /* Return an rtx representing the register or memory location
1703    in which a scalar value of mode MODE was returned by a library call.  */
1704 
1705 rtx
hard_libcall_value(mode)1706 hard_libcall_value (mode)
1707      enum machine_mode mode;
1708 {
1709   return LIBCALL_VALUE (mode);
1710 }
1711 
1712 /* Look up the tree code for a given rtx code
1713    to provide the arithmetic operation for REAL_ARITHMETIC.
1714    The function returns an int because the caller may not know
1715    what `enum tree_code' means.  */
1716 
1717 int
rtx_to_tree_code(code)1718 rtx_to_tree_code (code)
1719      enum rtx_code code;
1720 {
1721   enum tree_code tcode;
1722 
1723   switch (code)
1724     {
1725     case PLUS:
1726       tcode = PLUS_EXPR;
1727       break;
1728     case MINUS:
1729       tcode = MINUS_EXPR;
1730       break;
1731     case MULT:
1732       tcode = MULT_EXPR;
1733       break;
1734     case DIV:
1735       tcode = RDIV_EXPR;
1736       break;
1737     case SMIN:
1738       tcode = MIN_EXPR;
1739       break;
1740     case SMAX:
1741       tcode = MAX_EXPR;
1742       break;
1743     default:
1744       tcode = LAST_AND_UNUSED_TREE_CODE;
1745       break;
1746     }
1747   return ((int) tcode);
1748 }
1749 
1750 #include "gt-explow.h"
1751