xref: /openbsd/gnu/usr.bin/gcc/gcc/rtlanal.c (revision c87b03e5)
1 /* Analyze RTL for C-Compiler
2    Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3    1999, 2000, 2001, 2002 Free Software Foundation, Inc.
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11 
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15 for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING.  If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA.  */
21 
22 
23 #include "config.h"
24 #include "system.h"
25 #include "toplev.h"
26 #include "rtl.h"
27 #include "hard-reg-set.h"
28 #include "insn-config.h"
29 #include "recog.h"
30 #include "tm_p.h"
31 #include "flags.h"
32 #include "basic-block.h"
33 #include "real.h"
34 
35 /* Forward declarations */
36 static int global_reg_mentioned_p_1 PARAMS ((rtx *, void *));
37 static void set_of_1		PARAMS ((rtx, rtx, void *));
38 static void insn_dependent_p_1	PARAMS ((rtx, rtx, void *));
39 static int computed_jump_p_1	PARAMS ((rtx));
40 static void parms_set 		PARAMS ((rtx, rtx, void *));
41 static bool hoist_test_store		PARAMS ((rtx, rtx, regset));
42 static void hoist_update_store		PARAMS ((rtx, rtx *, rtx, rtx));
43 
44 /* Bit flags that specify the machine subtype we are compiling for.
45    Bits are tested using macros TARGET_... defined in the tm.h file
46    and set by `-m...' switches.  Must be defined in rtlanal.c.  */
47 
48 int target_flags;
49 
50 /* Return 1 if the value of X is unstable
51    (would be different at a different point in the program).
52    The frame pointer, arg pointer, etc. are considered stable
53    (within one function) and so is anything marked `unchanging'.  */
54 
55 int
rtx_unstable_p(x)56 rtx_unstable_p (x)
57      rtx x;
58 {
59   RTX_CODE code = GET_CODE (x);
60   int i;
61   const char *fmt;
62 
63   switch (code)
64     {
65     case MEM:
66       return ! RTX_UNCHANGING_P (x) || rtx_unstable_p (XEXP (x, 0));
67 
68     case QUEUED:
69       return 1;
70 
71     case ADDRESSOF:
72     case CONST:
73     case CONST_INT:
74     case CONST_DOUBLE:
75     case CONST_VECTOR:
76     case SYMBOL_REF:
77     case LABEL_REF:
78       return 0;
79 
80     case REG:
81       /* As in rtx_varies_p, we have to use the actual rtx, not reg number.  */
82       if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
83 	  /* The arg pointer varies if it is not a fixed register.  */
84 	  || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
85 	  || RTX_UNCHANGING_P (x))
86 	return 0;
87 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
88       /* ??? When call-clobbered, the value is stable modulo the restore
89 	 that must happen after a call.  This currently screws up local-alloc
90 	 into believing that the restore is not needed.  */
91       if (x == pic_offset_table_rtx)
92 	return 0;
93 #endif
94       return 1;
95 
96     case ASM_OPERANDS:
97       if (MEM_VOLATILE_P (x))
98 	return 1;
99 
100       /* FALLTHROUGH */
101 
102     default:
103       break;
104     }
105 
106   fmt = GET_RTX_FORMAT (code);
107   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
108     if (fmt[i] == 'e')
109       {
110 	if (rtx_unstable_p (XEXP (x, i)))
111 	  return 1;
112       }
113     else if (fmt[i] == 'E')
114       {
115 	int j;
116 	for (j = 0; j < XVECLEN (x, i); j++)
117 	  if (rtx_unstable_p (XVECEXP (x, i, j)))
118 	    return 1;
119       }
120 
121   return 0;
122 }
123 
124 /* Return 1 if X has a value that can vary even between two
125    executions of the program.  0 means X can be compared reliably
126    against certain constants or near-constants.
127    FOR_ALIAS is nonzero if we are called from alias analysis; if it is
128    zero, we are slightly more conservative.
129    The frame pointer and the arg pointer are considered constant.  */
130 
131 int
rtx_varies_p(x,for_alias)132 rtx_varies_p (x, for_alias)
133      rtx x;
134      int for_alias;
135 {
136   RTX_CODE code = GET_CODE (x);
137   int i;
138   const char *fmt;
139 
140   switch (code)
141     {
142     case MEM:
143       return ! RTX_UNCHANGING_P (x) || rtx_varies_p (XEXP (x, 0), for_alias);
144 
145     case QUEUED:
146       return 1;
147 
148     case CONST:
149     case CONST_INT:
150     case CONST_DOUBLE:
151     case CONST_VECTOR:
152     case SYMBOL_REF:
153     case LABEL_REF:
154       return 0;
155 
156     case REG:
157       /* Note that we have to test for the actual rtx used for the frame
158 	 and arg pointers and not just the register number in case we have
159 	 eliminated the frame and/or arg pointer and are using it
160 	 for pseudos.  */
161       if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
162 	  /* The arg pointer varies if it is not a fixed register.  */
163 	  || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
164 	return 0;
165       if (x == pic_offset_table_rtx
166 #ifdef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
167 	  /* ??? When call-clobbered, the value is stable modulo the restore
168 	     that must happen after a call.  This currently screws up
169 	     local-alloc into believing that the restore is not needed, so we
170 	     must return 0 only if we are called from alias analysis.  */
171 	  && for_alias
172 #endif
173 	  )
174 	return 0;
175       return 1;
176 
177     case LO_SUM:
178       /* The operand 0 of a LO_SUM is considered constant
179 	 (in fact it is related specifically to operand 1)
180 	 during alias analysis.  */
181       return (! for_alias && rtx_varies_p (XEXP (x, 0), for_alias))
182 	     || rtx_varies_p (XEXP (x, 1), for_alias);
183 
184     case ASM_OPERANDS:
185       if (MEM_VOLATILE_P (x))
186 	return 1;
187 
188       /* FALLTHROUGH */
189 
190     default:
191       break;
192     }
193 
194   fmt = GET_RTX_FORMAT (code);
195   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
196     if (fmt[i] == 'e')
197       {
198 	if (rtx_varies_p (XEXP (x, i), for_alias))
199 	  return 1;
200       }
201     else if (fmt[i] == 'E')
202       {
203 	int j;
204 	for (j = 0; j < XVECLEN (x, i); j++)
205 	  if (rtx_varies_p (XVECEXP (x, i, j), for_alias))
206 	    return 1;
207       }
208 
209   return 0;
210 }
211 
212 /* Return 0 if the use of X as an address in a MEM can cause a trap.  */
213 
214 int
rtx_addr_can_trap_p(x)215 rtx_addr_can_trap_p (x)
216      rtx x;
217 {
218   enum rtx_code code = GET_CODE (x);
219 
220   switch (code)
221     {
222     case SYMBOL_REF:
223       return SYMBOL_REF_WEAK (x);
224 
225     case LABEL_REF:
226       return 0;
227 
228     case REG:
229       /* As in rtx_varies_p, we have to use the actual rtx, not reg number.  */
230       if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
231 	  || x == stack_pointer_rtx
232 	  /* The arg pointer varies if it is not a fixed register.  */
233 	  || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
234 	return 0;
235       /* All of the virtual frame registers are stack references.  */
236       if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
237 	  && REGNO (x) <= LAST_VIRTUAL_REGISTER)
238 	return 0;
239       return 1;
240 
241     case CONST:
242       return rtx_addr_can_trap_p (XEXP (x, 0));
243 
244     case PLUS:
245       /* An address is assumed not to trap if it is an address that can't
246 	 trap plus a constant integer or it is the pic register plus a
247 	 constant.  */
248       return ! ((! rtx_addr_can_trap_p (XEXP (x, 0))
249 		 && GET_CODE (XEXP (x, 1)) == CONST_INT)
250 		|| (XEXP (x, 0) == pic_offset_table_rtx
251 		    && CONSTANT_P (XEXP (x, 1))));
252 
253     case LO_SUM:
254     case PRE_MODIFY:
255       return rtx_addr_can_trap_p (XEXP (x, 1));
256 
257     case PRE_DEC:
258     case PRE_INC:
259     case POST_DEC:
260     case POST_INC:
261     case POST_MODIFY:
262       return rtx_addr_can_trap_p (XEXP (x, 0));
263 
264     default:
265       break;
266     }
267 
268   /* If it isn't one of the case above, it can cause a trap.  */
269   return 1;
270 }
271 
272 /* Return 1 if X refers to a memory location whose address
273    cannot be compared reliably with constant addresses,
274    or if X refers to a BLKmode memory object.
275    FOR_ALIAS is nonzero if we are called from alias analysis; if it is
276    zero, we are slightly more conservative.  */
277 
278 int
rtx_addr_varies_p(x,for_alias)279 rtx_addr_varies_p (x, for_alias)
280      rtx x;
281      int for_alias;
282 {
283   enum rtx_code code;
284   int i;
285   const char *fmt;
286 
287   if (x == 0)
288     return 0;
289 
290   code = GET_CODE (x);
291   if (code == MEM)
292     return GET_MODE (x) == BLKmode || rtx_varies_p (XEXP (x, 0), for_alias);
293 
294   fmt = GET_RTX_FORMAT (code);
295   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
296     if (fmt[i] == 'e')
297       {
298 	if (rtx_addr_varies_p (XEXP (x, i), for_alias))
299 	  return 1;
300       }
301     else if (fmt[i] == 'E')
302       {
303 	int j;
304 	for (j = 0; j < XVECLEN (x, i); j++)
305 	  if (rtx_addr_varies_p (XVECEXP (x, i, j), for_alias))
306 	    return 1;
307       }
308   return 0;
309 }
310 
311 /* Return the value of the integer term in X, if one is apparent;
312    otherwise return 0.
313    Only obvious integer terms are detected.
314    This is used in cse.c with the `related_value' field.  */
315 
316 HOST_WIDE_INT
get_integer_term(x)317 get_integer_term (x)
318      rtx x;
319 {
320   if (GET_CODE (x) == CONST)
321     x = XEXP (x, 0);
322 
323   if (GET_CODE (x) == MINUS
324       && GET_CODE (XEXP (x, 1)) == CONST_INT)
325     return - INTVAL (XEXP (x, 1));
326   if (GET_CODE (x) == PLUS
327       && GET_CODE (XEXP (x, 1)) == CONST_INT)
328     return INTVAL (XEXP (x, 1));
329   return 0;
330 }
331 
332 /* If X is a constant, return the value sans apparent integer term;
333    otherwise return 0.
334    Only obvious integer terms are detected.  */
335 
336 rtx
get_related_value(x)337 get_related_value (x)
338      rtx x;
339 {
340   if (GET_CODE (x) != CONST)
341     return 0;
342   x = XEXP (x, 0);
343   if (GET_CODE (x) == PLUS
344       && GET_CODE (XEXP (x, 1)) == CONST_INT)
345     return XEXP (x, 0);
346   else if (GET_CODE (x) == MINUS
347 	   && GET_CODE (XEXP (x, 1)) == CONST_INT)
348     return XEXP (x, 0);
349   return 0;
350 }
351 
352 /* Given a tablejump insn INSN, return the RTL expression for the offset
353    into the jump table.  If the offset cannot be determined, then return
354    NULL_RTX.
355 
356    If EARLIEST is nonzero, it is a pointer to a place where the earliest
357    insn used in locating the offset was found.  */
358 
359 rtx
get_jump_table_offset(insn,earliest)360 get_jump_table_offset (insn, earliest)
361      rtx insn;
362      rtx *earliest;
363 {
364   rtx label;
365   rtx table;
366   rtx set;
367   rtx old_insn;
368   rtx x;
369   rtx old_x;
370   rtx y;
371   rtx old_y;
372   int i;
373 
374   if (GET_CODE (insn) != JUMP_INSN
375       || ! (label = JUMP_LABEL (insn))
376       || ! (table = NEXT_INSN (label))
377       || GET_CODE (table) != JUMP_INSN
378       || (GET_CODE (PATTERN (table)) != ADDR_VEC
379 	  && GET_CODE (PATTERN (table)) != ADDR_DIFF_VEC)
380       || ! (set = single_set (insn)))
381     return NULL_RTX;
382 
383   x = SET_SRC (set);
384 
385   /* Some targets (eg, ARM) emit a tablejump that also
386      contains the out-of-range target.  */
387   if (GET_CODE (x) == IF_THEN_ELSE
388       && GET_CODE (XEXP (x, 2)) == LABEL_REF)
389     x = XEXP (x, 1);
390 
391   /* Search backwards and locate the expression stored in X.  */
392   for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
393        old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
394     ;
395 
396   /* If X is an expression using a relative address then strip
397      off the addition / subtraction of PC, PIC_OFFSET_TABLE_REGNUM,
398      or the jump table label.  */
399   if (GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC
400       && (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS))
401     {
402       for (i = 0; i < 2; i++)
403 	{
404 	  old_insn = insn;
405 	  y = XEXP (x, i);
406 
407 	  if (y == pc_rtx || y == pic_offset_table_rtx)
408 	    break;
409 
410 	  for (old_y = NULL_RTX; GET_CODE (y) == REG && y != old_y;
411 	       old_y = y, y = find_last_value (y, &old_insn, NULL_RTX, 0))
412 	    ;
413 
414 	  if ((GET_CODE (y) == LABEL_REF && XEXP (y, 0) == label))
415 	    break;
416 	}
417 
418       if (i >= 2)
419 	return NULL_RTX;
420 
421       x = XEXP (x, 1 - i);
422 
423       for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
424 	   old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
425 	;
426     }
427 
428   /* Strip off any sign or zero extension.  */
429   if (GET_CODE (x) == SIGN_EXTEND || GET_CODE (x) == ZERO_EXTEND)
430     {
431       x = XEXP (x, 0);
432 
433       for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
434 	   old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
435 	;
436     }
437 
438   /* If X isn't a MEM then this isn't a tablejump we understand.  */
439   if (GET_CODE (x) != MEM)
440     return NULL_RTX;
441 
442   /* Strip off the MEM.  */
443   x = XEXP (x, 0);
444 
445   for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
446        old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
447     ;
448 
449   /* If X isn't a PLUS than this isn't a tablejump we understand.  */
450   if (GET_CODE (x) != PLUS)
451     return NULL_RTX;
452 
453   /* At this point we should have an expression representing the jump table
454      plus an offset.  Examine each operand in order to determine which one
455      represents the jump table.  Knowing that tells us that the other operand
456      must represent the offset.  */
457   for (i = 0; i < 2; i++)
458     {
459       old_insn = insn;
460       y = XEXP (x, i);
461 
462       for (old_y = NULL_RTX; GET_CODE (y) == REG && y != old_y;
463 	   old_y = y, y = find_last_value (y, &old_insn, NULL_RTX, 0))
464 	;
465 
466       if ((GET_CODE (y) == CONST || GET_CODE (y) == LABEL_REF)
467 	  && reg_mentioned_p (label, y))
468 	break;
469     }
470 
471   if (i >= 2)
472     return NULL_RTX;
473 
474   x = XEXP (x, 1 - i);
475 
476   /* Strip off the addition / subtraction of PIC_OFFSET_TABLE_REGNUM.  */
477   if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS)
478     for (i = 0; i < 2; i++)
479       if (XEXP (x, i) == pic_offset_table_rtx)
480 	{
481 	  x = XEXP (x, 1 - i);
482 	  break;
483 	}
484 
485   if (earliest)
486     *earliest = insn;
487 
488   /* Return the RTL expression representing the offset.  */
489   return x;
490 }
491 
492 /* A subroutine of global_reg_mentioned_p, returns 1 if *LOC mentions
493    a global register.  */
494 
495 static int
global_reg_mentioned_p_1(loc,data)496 global_reg_mentioned_p_1 (loc, data)
497      rtx *loc;
498      void *data ATTRIBUTE_UNUSED;
499 {
500   int regno;
501   rtx x = *loc;
502 
503   if (! x)
504     return 0;
505 
506   switch (GET_CODE (x))
507     {
508     case SUBREG:
509       if (GET_CODE (SUBREG_REG (x)) == REG)
510 	{
511 	  if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
512 	      && global_regs[subreg_regno (x)])
513 	    return 1;
514 	  return 0;
515 	}
516       break;
517 
518     case REG:
519       regno = REGNO (x);
520       if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
521 	return 1;
522       return 0;
523 
524     case SCRATCH:
525     case PC:
526     case CC0:
527     case CONST_INT:
528     case CONST_DOUBLE:
529     case CONST:
530     case LABEL_REF:
531       return 0;
532 
533     case CALL:
534       /* A non-constant call might use a global register.  */
535       return 1;
536 
537     default:
538       break;
539     }
540 
541   return 0;
542 }
543 
544 /* Returns nonzero if X mentions a global register.  */
545 
546 int
global_reg_mentioned_p(x)547 global_reg_mentioned_p (x)
548      rtx x;
549 {
550 
551   if (INSN_P (x))
552     {
553       if (GET_CODE (x) == CALL_INSN)
554 	{
555 	  if (! CONST_OR_PURE_CALL_P (x))
556 	    return 1;
557 	  x = CALL_INSN_FUNCTION_USAGE (x);
558 	  if (x == 0)
559 	    return 0;
560 	}
561       else
562 	x = PATTERN (x);
563     }
564 
565   return for_each_rtx (&x, global_reg_mentioned_p_1, NULL);
566 }
567 
568 /* Return the number of places FIND appears within X.  If COUNT_DEST is
569    zero, we do not count occurrences inside the destination of a SET.  */
570 
571 int
count_occurrences(x,find,count_dest)572 count_occurrences (x, find, count_dest)
573      rtx x, find;
574      int count_dest;
575 {
576   int i, j;
577   enum rtx_code code;
578   const char *format_ptr;
579   int count;
580 
581   if (x == find)
582     return 1;
583 
584   code = GET_CODE (x);
585 
586   switch (code)
587     {
588     case REG:
589     case CONST_INT:
590     case CONST_DOUBLE:
591     case CONST_VECTOR:
592     case SYMBOL_REF:
593     case CODE_LABEL:
594     case PC:
595     case CC0:
596       return 0;
597 
598     case MEM:
599       if (GET_CODE (find) == MEM && rtx_equal_p (x, find))
600 	return 1;
601       break;
602 
603     case SET:
604       if (SET_DEST (x) == find && ! count_dest)
605 	return count_occurrences (SET_SRC (x), find, count_dest);
606       break;
607 
608     default:
609       break;
610     }
611 
612   format_ptr = GET_RTX_FORMAT (code);
613   count = 0;
614 
615   for (i = 0; i < GET_RTX_LENGTH (code); i++)
616     {
617       switch (*format_ptr++)
618 	{
619 	case 'e':
620 	  count += count_occurrences (XEXP (x, i), find, count_dest);
621 	  break;
622 
623 	case 'E':
624 	  for (j = 0; j < XVECLEN (x, i); j++)
625 	    count += count_occurrences (XVECEXP (x, i, j), find, count_dest);
626 	  break;
627 	}
628     }
629   return count;
630 }
631 
632 /* Nonzero if register REG appears somewhere within IN.
633    Also works if REG is not a register; in this case it checks
634    for a subexpression of IN that is Lisp "equal" to REG.  */
635 
636 int
reg_mentioned_p(reg,in)637 reg_mentioned_p (reg, in)
638      rtx reg, in;
639 {
640   const char *fmt;
641   int i;
642   enum rtx_code code;
643 
644   if (in == 0)
645     return 0;
646 
647   if (reg == in)
648     return 1;
649 
650   if (GET_CODE (in) == LABEL_REF)
651     return reg == XEXP (in, 0);
652 
653   code = GET_CODE (in);
654 
655   switch (code)
656     {
657       /* Compare registers by number.  */
658     case REG:
659       return GET_CODE (reg) == REG && REGNO (in) == REGNO (reg);
660 
661       /* These codes have no constituent expressions
662 	 and are unique.  */
663     case SCRATCH:
664     case CC0:
665     case PC:
666       return 0;
667 
668     case CONST_INT:
669       return GET_CODE (reg) == CONST_INT && INTVAL (in) == INTVAL (reg);
670 
671     case CONST_VECTOR:
672     case CONST_DOUBLE:
673       /* These are kept unique for a given value.  */
674       return 0;
675 
676     default:
677       break;
678     }
679 
680   if (GET_CODE (reg) == code && rtx_equal_p (reg, in))
681     return 1;
682 
683   fmt = GET_RTX_FORMAT (code);
684 
685   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
686     {
687       if (fmt[i] == 'E')
688 	{
689 	  int j;
690 	  for (j = XVECLEN (in, i) - 1; j >= 0; j--)
691 	    if (reg_mentioned_p (reg, XVECEXP (in, i, j)))
692 	      return 1;
693 	}
694       else if (fmt[i] == 'e'
695 	       && reg_mentioned_p (reg, XEXP (in, i)))
696 	return 1;
697     }
698   return 0;
699 }
700 
701 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
702    no CODE_LABEL insn.  */
703 
704 int
no_labels_between_p(beg,end)705 no_labels_between_p (beg, end)
706      rtx beg, end;
707 {
708   rtx p;
709   if (beg == end)
710     return 0;
711   for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
712     if (GET_CODE (p) == CODE_LABEL)
713       return 0;
714   return 1;
715 }
716 
717 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
718    no JUMP_INSN insn.  */
719 
720 int
no_jumps_between_p(beg,end)721 no_jumps_between_p (beg, end)
722      rtx beg, end;
723 {
724   rtx p;
725   for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
726     if (GET_CODE (p) == JUMP_INSN)
727       return 0;
728   return 1;
729 }
730 
731 /* Nonzero if register REG is used in an insn between
732    FROM_INSN and TO_INSN (exclusive of those two).  */
733 
734 int
reg_used_between_p(reg,from_insn,to_insn)735 reg_used_between_p (reg, from_insn, to_insn)
736      rtx reg, from_insn, to_insn;
737 {
738   rtx insn;
739 
740   if (from_insn == to_insn)
741     return 0;
742 
743   for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
744     if (INSN_P (insn)
745 	&& (reg_overlap_mentioned_p (reg, PATTERN (insn))
746 	   || (GET_CODE (insn) == CALL_INSN
747 	      && (find_reg_fusage (insn, USE, reg)
748 		  || find_reg_fusage (insn, CLOBBER, reg)))))
749       return 1;
750   return 0;
751 }
752 
753 /* Nonzero if the old value of X, a register, is referenced in BODY.  If X
754    is entirely replaced by a new value and the only use is as a SET_DEST,
755    we do not consider it a reference.  */
756 
757 int
reg_referenced_p(x,body)758 reg_referenced_p (x, body)
759      rtx x;
760      rtx body;
761 {
762   int i;
763 
764   switch (GET_CODE (body))
765     {
766     case SET:
767       if (reg_overlap_mentioned_p (x, SET_SRC (body)))
768 	return 1;
769 
770       /* If the destination is anything other than CC0, PC, a REG or a SUBREG
771 	 of a REG that occupies all of the REG, the insn references X if
772 	 it is mentioned in the destination.  */
773       if (GET_CODE (SET_DEST (body)) != CC0
774 	  && GET_CODE (SET_DEST (body)) != PC
775 	  && GET_CODE (SET_DEST (body)) != REG
776 	  && ! (GET_CODE (SET_DEST (body)) == SUBREG
777 		&& GET_CODE (SUBREG_REG (SET_DEST (body))) == REG
778 		&& (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (body))))
779 		      + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
780 		    == ((GET_MODE_SIZE (GET_MODE (SET_DEST (body)))
781 			 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
782 	  && reg_overlap_mentioned_p (x, SET_DEST (body)))
783 	return 1;
784       return 0;
785 
786     case ASM_OPERANDS:
787       for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
788 	if (reg_overlap_mentioned_p (x, ASM_OPERANDS_INPUT (body, i)))
789 	  return 1;
790       return 0;
791 
792     case CALL:
793     case USE:
794     case IF_THEN_ELSE:
795       return reg_overlap_mentioned_p (x, body);
796 
797     case TRAP_IF:
798       return reg_overlap_mentioned_p (x, TRAP_CONDITION (body));
799 
800     case PREFETCH:
801       return reg_overlap_mentioned_p (x, XEXP (body, 0));
802 
803     case UNSPEC:
804     case UNSPEC_VOLATILE:
805       for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
806 	if (reg_overlap_mentioned_p (x, XVECEXP (body, 0, i)))
807 	  return 1;
808       return 0;
809 
810     case PARALLEL:
811       for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
812 	if (reg_referenced_p (x, XVECEXP (body, 0, i)))
813 	  return 1;
814       return 0;
815 
816     case CLOBBER:
817       if (GET_CODE (XEXP (body, 0)) == MEM)
818 	if (reg_overlap_mentioned_p (x, XEXP (XEXP (body, 0), 0)))
819 	  return 1;
820       return 0;
821 
822     case COND_EXEC:
823       if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body)))
824 	return 1;
825       return reg_referenced_p (x, COND_EXEC_CODE (body));
826 
827     default:
828       return 0;
829     }
830 }
831 
832 /* Nonzero if register REG is referenced in an insn between
833    FROM_INSN and TO_INSN (exclusive of those two).  Sets of REG do
834    not count.  */
835 
836 int
reg_referenced_between_p(reg,from_insn,to_insn)837 reg_referenced_between_p (reg, from_insn, to_insn)
838      rtx reg, from_insn, to_insn;
839 {
840   rtx insn;
841 
842   if (from_insn == to_insn)
843     return 0;
844 
845   for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
846     if (INSN_P (insn)
847 	&& (reg_referenced_p (reg, PATTERN (insn))
848 	   || (GET_CODE (insn) == CALL_INSN
849 	      && find_reg_fusage (insn, USE, reg))))
850       return 1;
851   return 0;
852 }
853 
854 /* Nonzero if register REG is set or clobbered in an insn between
855    FROM_INSN and TO_INSN (exclusive of those two).  */
856 
857 int
reg_set_between_p(reg,from_insn,to_insn)858 reg_set_between_p (reg, from_insn, to_insn)
859      rtx reg, from_insn, to_insn;
860 {
861   rtx insn;
862 
863   if (from_insn == to_insn)
864     return 0;
865 
866   for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
867     if (INSN_P (insn) && reg_set_p (reg, insn))
868       return 1;
869   return 0;
870 }
871 
872 /* Internals of reg_set_between_p.  */
873 int
reg_set_p(reg,insn)874 reg_set_p (reg, insn)
875      rtx reg, insn;
876 {
877   rtx body = insn;
878 
879   /* We can be passed an insn or part of one.  If we are passed an insn,
880      check if a side-effect of the insn clobbers REG.  */
881   if (INSN_P (insn))
882     {
883       if (FIND_REG_INC_NOTE (insn, reg)
884 	  || (GET_CODE (insn) == CALL_INSN
885 	      /* We'd like to test call_used_regs here, but rtlanal.c can't
886 		 reference that variable due to its use in genattrtab.  So
887 		 we'll just be more conservative.
888 
889 		 ??? Unless we could ensure that the CALL_INSN_FUNCTION_USAGE
890 		 information holds all clobbered registers.  */
891 	      && ((GET_CODE (reg) == REG
892 		   && REGNO (reg) < FIRST_PSEUDO_REGISTER)
893 		  || GET_CODE (reg) == MEM
894 		  || find_reg_fusage (insn, CLOBBER, reg))))
895 	return 1;
896 
897       body = PATTERN (insn);
898     }
899 
900   return set_of (reg, insn) != NULL_RTX;
901 }
902 
903 /* Similar to reg_set_between_p, but check all registers in X.  Return 0
904    only if none of them are modified between START and END.  Do not
905    consider non-registers one way or the other.  */
906 
907 int
regs_set_between_p(x,start,end)908 regs_set_between_p (x, start, end)
909      rtx x;
910      rtx start, end;
911 {
912   enum rtx_code code = GET_CODE (x);
913   const char *fmt;
914   int i, j;
915 
916   switch (code)
917     {
918     case CONST_INT:
919     case CONST_DOUBLE:
920     case CONST_VECTOR:
921     case CONST:
922     case SYMBOL_REF:
923     case LABEL_REF:
924     case PC:
925     case CC0:
926       return 0;
927 
928     case REG:
929       return reg_set_between_p (x, start, end);
930 
931     default:
932       break;
933     }
934 
935   fmt = GET_RTX_FORMAT (code);
936   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
937     {
938       if (fmt[i] == 'e' && regs_set_between_p (XEXP (x, i), start, end))
939 	return 1;
940 
941       else if (fmt[i] == 'E')
942 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
943 	  if (regs_set_between_p (XVECEXP (x, i, j), start, end))
944 	    return 1;
945     }
946 
947   return 0;
948 }
949 
950 /* Similar to reg_set_between_p, but check all registers in X.  Return 0
951    only if none of them are modified between START and END.  Return 1 if
952    X contains a MEM; this routine does not perform any memory aliasing.  */
953 
954 int
modified_between_p(x,start,end)955 modified_between_p (x, start, end)
956      rtx x;
957      rtx start, end;
958 {
959   enum rtx_code code = GET_CODE (x);
960   const char *fmt;
961   int i, j;
962 
963   switch (code)
964     {
965     case CONST_INT:
966     case CONST_DOUBLE:
967     case CONST_VECTOR:
968     case CONST:
969     case SYMBOL_REF:
970     case LABEL_REF:
971       return 0;
972 
973     case PC:
974     case CC0:
975       return 1;
976 
977     case MEM:
978       /* If the memory is not constant, assume it is modified.  If it is
979 	 constant, we still have to check the address.  */
980       if (! RTX_UNCHANGING_P (x))
981 	return 1;
982       break;
983 
984     case REG:
985       return reg_set_between_p (x, start, end);
986 
987     default:
988       break;
989     }
990 
991   fmt = GET_RTX_FORMAT (code);
992   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
993     {
994       if (fmt[i] == 'e' && modified_between_p (XEXP (x, i), start, end))
995 	return 1;
996 
997       else if (fmt[i] == 'E')
998 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
999 	  if (modified_between_p (XVECEXP (x, i, j), start, end))
1000 	    return 1;
1001     }
1002 
1003   return 0;
1004 }
1005 
1006 /* Similar to reg_set_p, but check all registers in X.  Return 0 only if none
1007    of them are modified in INSN.  Return 1 if X contains a MEM; this routine
1008    does not perform any memory aliasing.  */
1009 
1010 int
modified_in_p(x,insn)1011 modified_in_p (x, insn)
1012      rtx x;
1013      rtx insn;
1014 {
1015   enum rtx_code code = GET_CODE (x);
1016   const char *fmt;
1017   int i, j;
1018 
1019   switch (code)
1020     {
1021     case CONST_INT:
1022     case CONST_DOUBLE:
1023     case CONST_VECTOR:
1024     case CONST:
1025     case SYMBOL_REF:
1026     case LABEL_REF:
1027       return 0;
1028 
1029     case PC:
1030     case CC0:
1031       return 1;
1032 
1033     case MEM:
1034       /* If the memory is not constant, assume it is modified.  If it is
1035 	 constant, we still have to check the address.  */
1036       if (! RTX_UNCHANGING_P (x))
1037 	return 1;
1038       break;
1039 
1040     case REG:
1041       return reg_set_p (x, insn);
1042 
1043     default:
1044       break;
1045     }
1046 
1047   fmt = GET_RTX_FORMAT (code);
1048   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1049     {
1050       if (fmt[i] == 'e' && modified_in_p (XEXP (x, i), insn))
1051 	return 1;
1052 
1053       else if (fmt[i] == 'E')
1054 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1055 	  if (modified_in_p (XVECEXP (x, i, j), insn))
1056 	    return 1;
1057     }
1058 
1059   return 0;
1060 }
1061 
1062 /* Return true if anything in insn X is (anti,output,true) dependent on
1063    anything in insn Y.  */
1064 
1065 int
insn_dependent_p(x,y)1066 insn_dependent_p (x, y)
1067      rtx x, y;
1068 {
1069   rtx tmp;
1070 
1071   if (! INSN_P (x) || ! INSN_P (y))
1072     abort ();
1073 
1074   tmp = PATTERN (y);
1075   note_stores (PATTERN (x), insn_dependent_p_1, &tmp);
1076   if (tmp == NULL_RTX)
1077     return 1;
1078 
1079   tmp = PATTERN (x);
1080   note_stores (PATTERN (y), insn_dependent_p_1, &tmp);
1081   if (tmp == NULL_RTX)
1082     return 1;
1083 
1084   return 0;
1085 }
1086 
1087 /* A helper routine for insn_dependent_p called through note_stores.  */
1088 
1089 static void
insn_dependent_p_1(x,pat,data)1090 insn_dependent_p_1 (x, pat, data)
1091      rtx x;
1092      rtx pat ATTRIBUTE_UNUSED;
1093      void *data;
1094 {
1095   rtx * pinsn = (rtx *) data;
1096 
1097   if (*pinsn && reg_mentioned_p (x, *pinsn))
1098     *pinsn = NULL_RTX;
1099 }
1100 
1101 /* Helper function for set_of.  */
1102 struct set_of_data
1103   {
1104     rtx found;
1105     rtx pat;
1106   };
1107 
1108 static void
set_of_1(x,pat,data1)1109 set_of_1 (x, pat, data1)
1110      rtx x;
1111      rtx pat;
1112      void *data1;
1113 {
1114    struct set_of_data *data = (struct set_of_data *) (data1);
1115    if (rtx_equal_p (x, data->pat)
1116        || (GET_CODE (x) != MEM && reg_overlap_mentioned_p (data->pat, x)))
1117      data->found = pat;
1118 }
1119 
1120 /* Give an INSN, return a SET or CLOBBER expression that does modify PAT
1121    (either directly or via STRICT_LOW_PART and similar modifiers).  */
1122 rtx
set_of(pat,insn)1123 set_of (pat, insn)
1124      rtx pat, insn;
1125 {
1126   struct set_of_data data;
1127   data.found = NULL_RTX;
1128   data.pat = pat;
1129   note_stores (INSN_P (insn) ? PATTERN (insn) : insn, set_of_1, &data);
1130   return data.found;
1131 }
1132 
1133 /* Given an INSN, return a SET expression if this insn has only a single SET.
1134    It may also have CLOBBERs, USEs, or SET whose output
1135    will not be used, which we ignore.  */
1136 
1137 rtx
single_set_2(insn,pat)1138 single_set_2 (insn, pat)
1139      rtx insn, pat;
1140 {
1141   rtx set = NULL;
1142   int set_verified = 1;
1143   int i;
1144 
1145   if (GET_CODE (pat) == PARALLEL)
1146     {
1147       for (i = 0; i < XVECLEN (pat, 0); i++)
1148 	{
1149 	  rtx sub = XVECEXP (pat, 0, i);
1150 	  switch (GET_CODE (sub))
1151 	    {
1152 	    case USE:
1153 	    case CLOBBER:
1154 	      break;
1155 
1156 	    case SET:
1157 	      /* We can consider insns having multiple sets, where all
1158 		 but one are dead as single set insns.  In common case
1159 		 only single set is present in the pattern so we want
1160 		 to avoid checking for REG_UNUSED notes unless necessary.
1161 
1162 		 When we reach set first time, we just expect this is
1163 		 the single set we are looking for and only when more
1164 		 sets are found in the insn, we check them.  */
1165 	      if (!set_verified)
1166 		{
1167 		  if (find_reg_note (insn, REG_UNUSED, SET_DEST (set))
1168 		      && !side_effects_p (set))
1169 		    set = NULL;
1170 		  else
1171 		    set_verified = 1;
1172 		}
1173 	      if (!set)
1174 		set = sub, set_verified = 0;
1175 	      else if (!find_reg_note (insn, REG_UNUSED, SET_DEST (sub))
1176 		       || side_effects_p (sub))
1177 		return NULL_RTX;
1178 	      break;
1179 
1180 	    default:
1181 	      return NULL_RTX;
1182 	    }
1183 	}
1184     }
1185   return set;
1186 }
1187 
1188 /* Given an INSN, return nonzero if it has more than one SET, else return
1189    zero.  */
1190 
1191 int
multiple_sets(insn)1192 multiple_sets (insn)
1193      rtx insn;
1194 {
1195   int found;
1196   int i;
1197 
1198   /* INSN must be an insn.  */
1199   if (! INSN_P (insn))
1200     return 0;
1201 
1202   /* Only a PARALLEL can have multiple SETs.  */
1203   if (GET_CODE (PATTERN (insn)) == PARALLEL)
1204     {
1205       for (i = 0, found = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1206 	if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET)
1207 	  {
1208 	    /* If we have already found a SET, then return now.  */
1209 	    if (found)
1210 	      return 1;
1211 	    else
1212 	      found = 1;
1213 	  }
1214     }
1215 
1216   /* Either zero or one SET.  */
1217   return 0;
1218 }
1219 
1220 /* Return nonzero if the destination of SET equals the source
1221    and there are no side effects.  */
1222 
1223 int
set_noop_p(set)1224 set_noop_p (set)
1225      rtx set;
1226 {
1227   rtx src = SET_SRC (set);
1228   rtx dst = SET_DEST (set);
1229 
1230   if (side_effects_p (src) || side_effects_p (dst))
1231     return 0;
1232 
1233   if (GET_CODE (dst) == MEM && GET_CODE (src) == MEM)
1234     return rtx_equal_p (dst, src);
1235 
1236   if (dst == pc_rtx && src == pc_rtx)
1237     return 1;
1238 
1239   if (GET_CODE (dst) == SIGN_EXTRACT
1240       || GET_CODE (dst) == ZERO_EXTRACT)
1241     return rtx_equal_p (XEXP (dst, 0), src)
1242 	   && ! BYTES_BIG_ENDIAN && XEXP (dst, 2) == const0_rtx;
1243 
1244   if (GET_CODE (dst) == STRICT_LOW_PART)
1245     dst = XEXP (dst, 0);
1246 
1247   if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
1248     {
1249       if (SUBREG_BYTE (src) != SUBREG_BYTE (dst))
1250 	return 0;
1251       src = SUBREG_REG (src);
1252       dst = SUBREG_REG (dst);
1253     }
1254 
1255   return (GET_CODE (src) == REG && GET_CODE (dst) == REG
1256 	  && REGNO (src) == REGNO (dst));
1257 }
1258 
1259 /* Return nonzero if an insn consists only of SETs, each of which only sets a
1260    value to itself.  */
1261 
1262 int
noop_move_p(insn)1263 noop_move_p (insn)
1264      rtx insn;
1265 {
1266   rtx pat = PATTERN (insn);
1267 
1268   if (INSN_CODE (insn) == NOOP_MOVE_INSN_CODE)
1269     return 1;
1270 
1271   /* Insns carrying these notes are useful later on.  */
1272   if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
1273     return 0;
1274 
1275   /* For now treat an insn with a REG_RETVAL note as a
1276      a special insn which should not be considered a no-op.  */
1277   if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
1278     return 0;
1279 
1280   if (GET_CODE (pat) == SET && set_noop_p (pat))
1281     return 1;
1282 
1283   if (GET_CODE (pat) == PARALLEL)
1284     {
1285       int i;
1286       /* If nothing but SETs of registers to themselves,
1287 	 this insn can also be deleted.  */
1288       for (i = 0; i < XVECLEN (pat, 0); i++)
1289 	{
1290 	  rtx tem = XVECEXP (pat, 0, i);
1291 
1292 	  if (GET_CODE (tem) == USE
1293 	      || GET_CODE (tem) == CLOBBER)
1294 	    continue;
1295 
1296 	  if (GET_CODE (tem) != SET || ! set_noop_p (tem))
1297 	    return 0;
1298 	}
1299 
1300       return 1;
1301     }
1302   return 0;
1303 }
1304 
1305 
1306 /* Return the last thing that X was assigned from before *PINSN.  If VALID_TO
1307    is not NULL_RTX then verify that the object is not modified up to VALID_TO.
1308    If the object was modified, if we hit a partial assignment to X, or hit a
1309    CODE_LABEL first, return X.  If we found an assignment, update *PINSN to
1310    point to it.  ALLOW_HWREG is set to 1 if hardware registers are allowed to
1311    be the src.  */
1312 
1313 rtx
find_last_value(x,pinsn,valid_to,allow_hwreg)1314 find_last_value (x, pinsn, valid_to, allow_hwreg)
1315      rtx x;
1316      rtx *pinsn;
1317      rtx valid_to;
1318      int allow_hwreg;
1319 {
1320   rtx p;
1321 
1322   for (p = PREV_INSN (*pinsn); p && GET_CODE (p) != CODE_LABEL;
1323        p = PREV_INSN (p))
1324     if (INSN_P (p))
1325       {
1326 	rtx set = single_set (p);
1327 	rtx note = find_reg_note (p, REG_EQUAL, NULL_RTX);
1328 
1329 	if (set && rtx_equal_p (x, SET_DEST (set)))
1330 	  {
1331 	    rtx src = SET_SRC (set);
1332 
1333 	    if (note && GET_CODE (XEXP (note, 0)) != EXPR_LIST)
1334 	      src = XEXP (note, 0);
1335 
1336 	    if ((valid_to == NULL_RTX
1337 		 || ! modified_between_p (src, PREV_INSN (p), valid_to))
1338 		/* Reject hard registers because we don't usually want
1339 		   to use them; we'd rather use a pseudo.  */
1340 		&& (! (GET_CODE (src) == REG
1341 		      && REGNO (src) < FIRST_PSEUDO_REGISTER) || allow_hwreg))
1342 	      {
1343 		*pinsn = p;
1344 		return src;
1345 	      }
1346 	  }
1347 
1348 	/* If set in non-simple way, we don't have a value.  */
1349 	if (reg_set_p (x, p))
1350 	  break;
1351       }
1352 
1353   return x;
1354 }
1355 
1356 /* Return nonzero if register in range [REGNO, ENDREGNO)
1357    appears either explicitly or implicitly in X
1358    other than being stored into.
1359 
1360    References contained within the substructure at LOC do not count.
1361    LOC may be zero, meaning don't ignore anything.  */
1362 
1363 int
refers_to_regno_p(regno,endregno,x,loc)1364 refers_to_regno_p (regno, endregno, x, loc)
1365      unsigned int regno, endregno;
1366      rtx x;
1367      rtx *loc;
1368 {
1369   int i;
1370   unsigned int x_regno;
1371   RTX_CODE code;
1372   const char *fmt;
1373 
1374  repeat:
1375   /* The contents of a REG_NONNEG note is always zero, so we must come here
1376      upon repeat in case the last REG_NOTE is a REG_NONNEG note.  */
1377   if (x == 0)
1378     return 0;
1379 
1380   code = GET_CODE (x);
1381 
1382   switch (code)
1383     {
1384     case REG:
1385       x_regno = REGNO (x);
1386 
1387       /* If we modifying the stack, frame, or argument pointer, it will
1388 	 clobber a virtual register.  In fact, we could be more precise,
1389 	 but it isn't worth it.  */
1390       if ((x_regno == STACK_POINTER_REGNUM
1391 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1392 	   || x_regno == ARG_POINTER_REGNUM
1393 #endif
1394 	   || x_regno == FRAME_POINTER_REGNUM)
1395 	  && regno >= FIRST_VIRTUAL_REGISTER && regno <= LAST_VIRTUAL_REGISTER)
1396 	return 1;
1397 
1398       return (endregno > x_regno
1399 	      && regno < x_regno + (x_regno < FIRST_PSEUDO_REGISTER
1400 				    ? HARD_REGNO_NREGS (x_regno, GET_MODE (x))
1401 			      : 1));
1402 
1403     case SUBREG:
1404       /* If this is a SUBREG of a hard reg, we can see exactly which
1405 	 registers are being modified.  Otherwise, handle normally.  */
1406       if (GET_CODE (SUBREG_REG (x)) == REG
1407 	  && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
1408 	{
1409 	  unsigned int inner_regno = subreg_regno (x);
1410 	  unsigned int inner_endregno
1411 	    = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
1412 			     ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
1413 
1414 	  return endregno > inner_regno && regno < inner_endregno;
1415 	}
1416       break;
1417 
1418     case CLOBBER:
1419     case SET:
1420       if (&SET_DEST (x) != loc
1421 	  /* Note setting a SUBREG counts as referring to the REG it is in for
1422 	     a pseudo but not for hard registers since we can
1423 	     treat each word individually.  */
1424 	  && ((GET_CODE (SET_DEST (x)) == SUBREG
1425 	       && loc != &SUBREG_REG (SET_DEST (x))
1426 	       && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG
1427 	       && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
1428 	       && refers_to_regno_p (regno, endregno,
1429 				     SUBREG_REG (SET_DEST (x)), loc))
1430 	      || (GET_CODE (SET_DEST (x)) != REG
1431 		  && refers_to_regno_p (regno, endregno, SET_DEST (x), loc))))
1432 	return 1;
1433 
1434       if (code == CLOBBER || loc == &SET_SRC (x))
1435 	return 0;
1436       x = SET_SRC (x);
1437       goto repeat;
1438 
1439     default:
1440       break;
1441     }
1442 
1443   /* X does not match, so try its subexpressions.  */
1444 
1445   fmt = GET_RTX_FORMAT (code);
1446   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1447     {
1448       if (fmt[i] == 'e' && loc != &XEXP (x, i))
1449 	{
1450 	  if (i == 0)
1451 	    {
1452 	      x = XEXP (x, 0);
1453 	      goto repeat;
1454 	    }
1455 	  else
1456 	    if (refers_to_regno_p (regno, endregno, XEXP (x, i), loc))
1457 	      return 1;
1458 	}
1459       else if (fmt[i] == 'E')
1460 	{
1461 	  int j;
1462 	  for (j = XVECLEN (x, i) - 1; j >=0; j--)
1463 	    if (loc != &XVECEXP (x, i, j)
1464 		&& refers_to_regno_p (regno, endregno, XVECEXP (x, i, j), loc))
1465 	      return 1;
1466 	}
1467     }
1468   return 0;
1469 }
1470 
1471 /* Nonzero if modifying X will affect IN.  If X is a register or a SUBREG,
1472    we check if any register number in X conflicts with the relevant register
1473    numbers.  If X is a constant, return 0.  If X is a MEM, return 1 iff IN
1474    contains a MEM (we don't bother checking for memory addresses that can't
1475    conflict because we expect this to be a rare case.  */
1476 
1477 int
reg_overlap_mentioned_p(x,in)1478 reg_overlap_mentioned_p (x, in)
1479      rtx x, in;
1480 {
1481   unsigned int regno, endregno;
1482 
1483   /* Overly conservative.  */
1484   if (GET_CODE (x) == STRICT_LOW_PART
1485       || GET_CODE (x) == ZERO_EXTRACT
1486       || GET_CODE (x) == SIGN_EXTRACT)
1487     x = XEXP (x, 0);
1488 
1489   /* If either argument is a constant, then modifying X can not affect IN.  */
1490   if (CONSTANT_P (x) || CONSTANT_P (in))
1491     return 0;
1492 
1493   switch (GET_CODE (x))
1494     {
1495     case SUBREG:
1496       regno = REGNO (SUBREG_REG (x));
1497       if (regno < FIRST_PSEUDO_REGISTER)
1498 	regno = subreg_regno (x);
1499       goto do_reg;
1500 
1501     case REG:
1502       regno = REGNO (x);
1503     do_reg:
1504       endregno = regno + (regno < FIRST_PSEUDO_REGISTER
1505 			  ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
1506       return refers_to_regno_p (regno, endregno, in, (rtx*) 0);
1507 
1508     case MEM:
1509       {
1510 	const char *fmt;
1511 	int i;
1512 
1513 	if (GET_CODE (in) == MEM)
1514 	  return 1;
1515 
1516 	fmt = GET_RTX_FORMAT (GET_CODE (in));
1517 	for (i = GET_RTX_LENGTH (GET_CODE (in)) - 1; i >= 0; i--)
1518 	  if (fmt[i] == 'e' && reg_overlap_mentioned_p (x, XEXP (in, i)))
1519 	    return 1;
1520 
1521 	return 0;
1522       }
1523 
1524     case SCRATCH:
1525     case PC:
1526     case CC0:
1527       return reg_mentioned_p (x, in);
1528 
1529     case PARALLEL:
1530       {
1531 	int i;
1532 
1533 	/* If any register in here refers to it we return true.  */
1534 	for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1535 	  if (XEXP (XVECEXP (x, 0, i), 0) != 0
1536 	      && reg_overlap_mentioned_p (XEXP (XVECEXP (x, 0, i), 0), in))
1537 	      return 1;
1538 	return 0;
1539       }
1540 
1541     default:
1542       break;
1543     }
1544 
1545   abort ();
1546 }
1547 
1548 /* Return the last value to which REG was set prior to INSN.  If we can't
1549    find it easily, return 0.
1550 
1551    We only return a REG, SUBREG, or constant because it is too hard to
1552    check if a MEM remains unchanged.  */
1553 
1554 rtx
reg_set_last(x,insn)1555 reg_set_last (x, insn)
1556      rtx x;
1557      rtx insn;
1558 {
1559   rtx orig_insn = insn;
1560 
1561   /* Scan backwards until reg_set_last_1 changed one of the above flags.
1562      Stop when we reach a label or X is a hard reg and we reach a
1563      CALL_INSN (if reg_set_last_last_regno is a hard reg).
1564 
1565      If we find a set of X, ensure that its SET_SRC remains unchanged.  */
1566 
1567   /* We compare with <= here, because reg_set_last_last_regno
1568      is actually the number of the first reg *not* in X.  */
1569   for (;
1570        insn && GET_CODE (insn) != CODE_LABEL
1571        && ! (GET_CODE (insn) == CALL_INSN
1572 	     && REGNO (x) <= FIRST_PSEUDO_REGISTER);
1573        insn = PREV_INSN (insn))
1574     if (INSN_P (insn))
1575       {
1576 	rtx set = set_of (x, insn);
1577 	/* OK, this function modify our register.  See if we understand it.  */
1578 	if (set)
1579 	  {
1580 	    rtx last_value;
1581 	    if (GET_CODE (set) != SET || SET_DEST (set) != x)
1582 	      return 0;
1583 	    last_value = SET_SRC (x);
1584 	    if (CONSTANT_P (last_value)
1585 		|| ((GET_CODE (last_value) == REG
1586 		     || GET_CODE (last_value) == SUBREG)
1587 		    && ! reg_set_between_p (last_value,
1588 					    insn, orig_insn)))
1589 	      return last_value;
1590 	    else
1591 	      return 0;
1592 	  }
1593       }
1594 
1595   return 0;
1596 }
1597 
1598 /* Call FUN on each register or MEM that is stored into or clobbered by X.
1599    (X would be the pattern of an insn).
1600    FUN receives two arguments:
1601      the REG, MEM, CC0 or PC being stored in or clobbered,
1602      the SET or CLOBBER rtx that does the store.
1603 
1604   If the item being stored in or clobbered is a SUBREG of a hard register,
1605   the SUBREG will be passed.  */
1606 
1607 void
note_stores(x,fun,data)1608 note_stores (x, fun, data)
1609      rtx x;
1610      void (*fun) PARAMS ((rtx, rtx, void *));
1611      void *data;
1612 {
1613   int i;
1614 
1615   if (GET_CODE (x) == COND_EXEC)
1616     x = COND_EXEC_CODE (x);
1617 
1618   if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
1619     {
1620       rtx dest = SET_DEST (x);
1621 
1622       while ((GET_CODE (dest) == SUBREG
1623 	      && (GET_CODE (SUBREG_REG (dest)) != REG
1624 		  || REGNO (SUBREG_REG (dest)) >= FIRST_PSEUDO_REGISTER))
1625 	     || GET_CODE (dest) == ZERO_EXTRACT
1626 	     || GET_CODE (dest) == SIGN_EXTRACT
1627 	     || GET_CODE (dest) == STRICT_LOW_PART)
1628 	dest = XEXP (dest, 0);
1629 
1630       /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
1631 	 each of whose first operand is a register.  */
1632       if (GET_CODE (dest) == PARALLEL)
1633 	{
1634 	  for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1635 	    if (XEXP (XVECEXP (dest, 0, i), 0) != 0)
1636 	      (*fun) (XEXP (XVECEXP (dest, 0, i), 0), x, data);
1637 	}
1638       else
1639 	(*fun) (dest, x, data);
1640     }
1641 
1642   else if (GET_CODE (x) == PARALLEL)
1643     for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1644       note_stores (XVECEXP (x, 0, i), fun, data);
1645 }
1646 
1647 /* Like notes_stores, but call FUN for each expression that is being
1648    referenced in PBODY, a pointer to the PATTERN of an insn.  We only call
1649    FUN for each expression, not any interior subexpressions.  FUN receives a
1650    pointer to the expression and the DATA passed to this function.
1651 
1652    Note that this is not quite the same test as that done in reg_referenced_p
1653    since that considers something as being referenced if it is being
1654    partially set, while we do not.  */
1655 
1656 void
note_uses(pbody,fun,data)1657 note_uses (pbody, fun, data)
1658      rtx *pbody;
1659      void (*fun) PARAMS ((rtx *, void *));
1660      void *data;
1661 {
1662   rtx body = *pbody;
1663   int i;
1664 
1665   switch (GET_CODE (body))
1666     {
1667     case COND_EXEC:
1668       (*fun) (&COND_EXEC_TEST (body), data);
1669       note_uses (&COND_EXEC_CODE (body), fun, data);
1670       return;
1671 
1672     case PARALLEL:
1673       for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1674 	note_uses (&XVECEXP (body, 0, i), fun, data);
1675       return;
1676 
1677     case USE:
1678       (*fun) (&XEXP (body, 0), data);
1679       return;
1680 
1681     case ASM_OPERANDS:
1682       for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
1683 	(*fun) (&ASM_OPERANDS_INPUT (body, i), data);
1684       return;
1685 
1686     case TRAP_IF:
1687       (*fun) (&TRAP_CONDITION (body), data);
1688       return;
1689 
1690     case PREFETCH:
1691       (*fun) (&XEXP (body, 0), data);
1692       return;
1693 
1694     case UNSPEC:
1695     case UNSPEC_VOLATILE:
1696       for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1697 	(*fun) (&XVECEXP (body, 0, i), data);
1698       return;
1699 
1700     case CLOBBER:
1701       if (GET_CODE (XEXP (body, 0)) == MEM)
1702 	(*fun) (&XEXP (XEXP (body, 0), 0), data);
1703       return;
1704 
1705     case SET:
1706       {
1707 	rtx dest = SET_DEST (body);
1708 
1709 	/* For sets we replace everything in source plus registers in memory
1710 	   expression in store and operands of a ZERO_EXTRACT.  */
1711 	(*fun) (&SET_SRC (body), data);
1712 
1713 	if (GET_CODE (dest) == ZERO_EXTRACT)
1714 	  {
1715 	    (*fun) (&XEXP (dest, 1), data);
1716 	    (*fun) (&XEXP (dest, 2), data);
1717 	  }
1718 
1719 	while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART)
1720 	  dest = XEXP (dest, 0);
1721 
1722 	if (GET_CODE (dest) == MEM)
1723 	  (*fun) (&XEXP (dest, 0), data);
1724       }
1725       return;
1726 
1727     default:
1728       /* All the other possibilities never store.  */
1729       (*fun) (pbody, data);
1730       return;
1731     }
1732 }
1733 
1734 /* Return nonzero if X's old contents don't survive after INSN.
1735    This will be true if X is (cc0) or if X is a register and
1736    X dies in INSN or because INSN entirely sets X.
1737 
1738    "Entirely set" means set directly and not through a SUBREG,
1739    ZERO_EXTRACT or SIGN_EXTRACT, so no trace of the old contents remains.
1740    Likewise, REG_INC does not count.
1741 
1742    REG may be a hard or pseudo reg.  Renumbering is not taken into account,
1743    but for this use that makes no difference, since regs don't overlap
1744    during their lifetimes.  Therefore, this function may be used
1745    at any time after deaths have been computed (in flow.c).
1746 
1747    If REG is a hard reg that occupies multiple machine registers, this
1748    function will only return 1 if each of those registers will be replaced
1749    by INSN.  */
1750 
1751 int
dead_or_set_p(insn,x)1752 dead_or_set_p (insn, x)
1753      rtx insn;
1754      rtx x;
1755 {
1756   unsigned int regno, last_regno;
1757   unsigned int i;
1758 
1759   /* Can't use cc0_rtx below since this file is used by genattrtab.c.  */
1760   if (GET_CODE (x) == CC0)
1761     return 1;
1762 
1763   if (GET_CODE (x) != REG)
1764     abort ();
1765 
1766   regno = REGNO (x);
1767   last_regno = (regno >= FIRST_PSEUDO_REGISTER ? regno
1768 		: regno + HARD_REGNO_NREGS (regno, GET_MODE (x)) - 1);
1769 
1770   for (i = regno; i <= last_regno; i++)
1771     if (! dead_or_set_regno_p (insn, i))
1772       return 0;
1773 
1774   return 1;
1775 }
1776 
1777 /* Utility function for dead_or_set_p to check an individual register.  Also
1778    called from flow.c.  */
1779 
1780 int
dead_or_set_regno_p(insn,test_regno)1781 dead_or_set_regno_p (insn, test_regno)
1782      rtx insn;
1783      unsigned int test_regno;
1784 {
1785   unsigned int regno, endregno;
1786   rtx pattern;
1787 
1788   /* See if there is a death note for something that includes TEST_REGNO.  */
1789   if (find_regno_note (insn, REG_DEAD, test_regno))
1790     return 1;
1791 
1792   if (GET_CODE (insn) == CALL_INSN
1793       && find_regno_fusage (insn, CLOBBER, test_regno))
1794     return 1;
1795 
1796   pattern = PATTERN (insn);
1797 
1798   if (GET_CODE (pattern) == COND_EXEC)
1799     pattern = COND_EXEC_CODE (pattern);
1800 
1801   if (GET_CODE (pattern) == SET)
1802     {
1803       rtx dest = SET_DEST (pattern);
1804 
1805       /* A value is totally replaced if it is the destination or the
1806 	 destination is a SUBREG of REGNO that does not change the number of
1807 	 words in it.  */
1808       if (GET_CODE (dest) == SUBREG
1809 	  && (((GET_MODE_SIZE (GET_MODE (dest))
1810 		+ UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1811 	      == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1812 		   + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1813 	dest = SUBREG_REG (dest);
1814 
1815       if (GET_CODE (dest) != REG)
1816 	return 0;
1817 
1818       regno = REGNO (dest);
1819       endregno = (regno >= FIRST_PSEUDO_REGISTER ? regno + 1
1820 		  : regno + HARD_REGNO_NREGS (regno, GET_MODE (dest)));
1821 
1822       return (test_regno >= regno && test_regno < endregno);
1823     }
1824   else if (GET_CODE (pattern) == PARALLEL)
1825     {
1826       int i;
1827 
1828       for (i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
1829 	{
1830 	  rtx body = XVECEXP (pattern, 0, i);
1831 
1832 	  if (GET_CODE (body) == COND_EXEC)
1833 	    body = COND_EXEC_CODE (body);
1834 
1835 	  if (GET_CODE (body) == SET || GET_CODE (body) == CLOBBER)
1836 	    {
1837 	      rtx dest = SET_DEST (body);
1838 
1839 	      if (GET_CODE (dest) == SUBREG
1840 		  && (((GET_MODE_SIZE (GET_MODE (dest))
1841 			+ UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1842 		      == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1843 			   + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1844 		dest = SUBREG_REG (dest);
1845 
1846 	      if (GET_CODE (dest) != REG)
1847 		continue;
1848 
1849 	      regno = REGNO (dest);
1850 	      endregno = (regno >= FIRST_PSEUDO_REGISTER ? regno + 1
1851 			  : regno + HARD_REGNO_NREGS (regno, GET_MODE (dest)));
1852 
1853 	      if (test_regno >= regno && test_regno < endregno)
1854 		return 1;
1855 	    }
1856 	}
1857     }
1858 
1859   return 0;
1860 }
1861 
1862 /* Return the reg-note of kind KIND in insn INSN, if there is one.
1863    If DATUM is nonzero, look for one whose datum is DATUM.  */
1864 
1865 rtx
find_reg_note(insn,kind,datum)1866 find_reg_note (insn, kind, datum)
1867      rtx insn;
1868      enum reg_note kind;
1869      rtx datum;
1870 {
1871   rtx link;
1872 
1873   /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN.  */
1874   if (! INSN_P (insn))
1875     return 0;
1876 
1877   for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1878     if (REG_NOTE_KIND (link) == kind
1879 	&& (datum == 0 || datum == XEXP (link, 0)))
1880       return link;
1881   return 0;
1882 }
1883 
1884 /* Return the reg-note of kind KIND in insn INSN which applies to register
1885    number REGNO, if any.  Return 0 if there is no such reg-note.  Note that
1886    the REGNO of this NOTE need not be REGNO if REGNO is a hard register;
1887    it might be the case that the note overlaps REGNO.  */
1888 
1889 rtx
find_regno_note(insn,kind,regno)1890 find_regno_note (insn, kind, regno)
1891      rtx insn;
1892      enum reg_note kind;
1893      unsigned int regno;
1894 {
1895   rtx link;
1896 
1897   /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN.  */
1898   if (! INSN_P (insn))
1899     return 0;
1900 
1901   for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1902     if (REG_NOTE_KIND (link) == kind
1903 	/* Verify that it is a register, so that scratch and MEM won't cause a
1904 	   problem here.  */
1905 	&& GET_CODE (XEXP (link, 0)) == REG
1906 	&& REGNO (XEXP (link, 0)) <= regno
1907 	&& ((REGNO (XEXP (link, 0))
1908 	     + (REGNO (XEXP (link, 0)) >= FIRST_PSEUDO_REGISTER ? 1
1909 		: HARD_REGNO_NREGS (REGNO (XEXP (link, 0)),
1910 				    GET_MODE (XEXP (link, 0)))))
1911 	    > regno))
1912       return link;
1913   return 0;
1914 }
1915 
1916 /* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and
1917    has such a note.  */
1918 
1919 rtx
find_reg_equal_equiv_note(insn)1920 find_reg_equal_equiv_note (insn)
1921      rtx insn;
1922 {
1923   rtx note;
1924 
1925   if (single_set (insn) == 0)
1926     return 0;
1927   else if ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) != 0)
1928     return note;
1929   else
1930     return find_reg_note (insn, REG_EQUAL, NULL_RTX);
1931 }
1932 
1933 /* Return true if DATUM, or any overlap of DATUM, of kind CODE is found
1934    in the CALL_INSN_FUNCTION_USAGE information of INSN.  */
1935 
1936 int
find_reg_fusage(insn,code,datum)1937 find_reg_fusage (insn, code, datum)
1938      rtx insn;
1939      enum rtx_code code;
1940      rtx datum;
1941 {
1942   /* If it's not a CALL_INSN, it can't possibly have a
1943      CALL_INSN_FUNCTION_USAGE field, so don't bother checking.  */
1944   if (GET_CODE (insn) != CALL_INSN)
1945     return 0;
1946 
1947   if (! datum)
1948     abort ();
1949 
1950   if (GET_CODE (datum) != REG)
1951     {
1952       rtx link;
1953 
1954       for (link = CALL_INSN_FUNCTION_USAGE (insn);
1955 	   link;
1956 	   link = XEXP (link, 1))
1957 	if (GET_CODE (XEXP (link, 0)) == code
1958 	    && rtx_equal_p (datum, XEXP (XEXP (link, 0), 0)))
1959 	  return 1;
1960     }
1961   else
1962     {
1963       unsigned int regno = REGNO (datum);
1964 
1965       /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1966 	 to pseudo registers, so don't bother checking.  */
1967 
1968       if (regno < FIRST_PSEUDO_REGISTER)
1969 	{
1970 	  unsigned int end_regno
1971 	    = regno + HARD_REGNO_NREGS (regno, GET_MODE (datum));
1972 	  unsigned int i;
1973 
1974 	  for (i = regno; i < end_regno; i++)
1975 	    if (find_regno_fusage (insn, code, i))
1976 	      return 1;
1977 	}
1978     }
1979 
1980   return 0;
1981 }
1982 
1983 /* Return true if REGNO, or any overlap of REGNO, of kind CODE is found
1984    in the CALL_INSN_FUNCTION_USAGE information of INSN.  */
1985 
1986 int
find_regno_fusage(insn,code,regno)1987 find_regno_fusage (insn, code, regno)
1988      rtx insn;
1989      enum rtx_code code;
1990      unsigned int regno;
1991 {
1992   rtx link;
1993 
1994   /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1995      to pseudo registers, so don't bother checking.  */
1996 
1997   if (regno >= FIRST_PSEUDO_REGISTER
1998       || GET_CODE (insn) != CALL_INSN )
1999     return 0;
2000 
2001   for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
2002     {
2003       unsigned int regnote;
2004       rtx op, reg;
2005 
2006       if (GET_CODE (op = XEXP (link, 0)) == code
2007 	  && GET_CODE (reg = XEXP (op, 0)) == REG
2008 	  && (regnote = REGNO (reg)) <= regno
2009 	  && regnote + HARD_REGNO_NREGS (regnote, GET_MODE (reg)) > regno)
2010 	return 1;
2011     }
2012 
2013   return 0;
2014 }
2015 
2016 /* Return true if INSN is a call to a pure function.  */
2017 
2018 int
pure_call_p(insn)2019 pure_call_p (insn)
2020      rtx insn;
2021 {
2022   rtx link;
2023 
2024   if (GET_CODE (insn) != CALL_INSN || ! CONST_OR_PURE_CALL_P (insn))
2025     return 0;
2026 
2027   /* Look for the note that differentiates const and pure functions.  */
2028   for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
2029     {
2030       rtx u, m;
2031 
2032       if (GET_CODE (u = XEXP (link, 0)) == USE
2033 	  && GET_CODE (m = XEXP (u, 0)) == MEM && GET_MODE (m) == BLKmode
2034 	  && GET_CODE (XEXP (m, 0)) == SCRATCH)
2035 	return 1;
2036     }
2037 
2038   return 0;
2039 }
2040 
2041 /* Remove register note NOTE from the REG_NOTES of INSN.  */
2042 
2043 void
remove_note(insn,note)2044 remove_note (insn, note)
2045      rtx insn;
2046      rtx note;
2047 {
2048   rtx link;
2049 
2050   if (note == NULL_RTX)
2051     return;
2052 
2053   if (REG_NOTES (insn) == note)
2054     {
2055       REG_NOTES (insn) = XEXP (note, 1);
2056       return;
2057     }
2058 
2059   for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
2060     if (XEXP (link, 1) == note)
2061       {
2062 	XEXP (link, 1) = XEXP (note, 1);
2063 	return;
2064       }
2065 
2066   abort ();
2067 }
2068 
2069 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
2070    return 1 if it is found.  A simple equality test is used to determine if
2071    NODE matches.  */
2072 
2073 int
in_expr_list_p(listp,node)2074 in_expr_list_p (listp, node)
2075      rtx listp;
2076      rtx node;
2077 {
2078   rtx x;
2079 
2080   for (x = listp; x; x = XEXP (x, 1))
2081     if (node == XEXP (x, 0))
2082       return 1;
2083 
2084   return 0;
2085 }
2086 
2087 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
2088    remove that entry from the list if it is found.
2089 
2090    A simple equality test is used to determine if NODE matches.  */
2091 
2092 void
remove_node_from_expr_list(node,listp)2093 remove_node_from_expr_list (node, listp)
2094      rtx node;
2095      rtx *listp;
2096 {
2097   rtx temp = *listp;
2098   rtx prev = NULL_RTX;
2099 
2100   while (temp)
2101     {
2102       if (node == XEXP (temp, 0))
2103 	{
2104 	  /* Splice the node out of the list.  */
2105 	  if (prev)
2106 	    XEXP (prev, 1) = XEXP (temp, 1);
2107 	  else
2108 	    *listp = XEXP (temp, 1);
2109 
2110 	  return;
2111 	}
2112 
2113       prev = temp;
2114       temp = XEXP (temp, 1);
2115     }
2116 }
2117 
2118 /* Nonzero if X contains any volatile instructions.  These are instructions
2119    which may cause unpredictable machine state instructions, and thus no
2120    instructions should be moved or combined across them.  This includes
2121    only volatile asms and UNSPEC_VOLATILE instructions.  */
2122 
2123 int
volatile_insn_p(x)2124 volatile_insn_p (x)
2125      rtx x;
2126 {
2127   RTX_CODE code;
2128 
2129   code = GET_CODE (x);
2130   switch (code)
2131     {
2132     case LABEL_REF:
2133     case SYMBOL_REF:
2134     case CONST_INT:
2135     case CONST:
2136     case CONST_DOUBLE:
2137     case CONST_VECTOR:
2138     case CC0:
2139     case PC:
2140     case REG:
2141     case SCRATCH:
2142     case CLOBBER:
2143     case ADDR_VEC:
2144     case ADDR_DIFF_VEC:
2145     case CALL:
2146     case MEM:
2147       return 0;
2148 
2149     case UNSPEC_VOLATILE:
2150  /* case TRAP_IF: This isn't clear yet.  */
2151       return 1;
2152 
2153     case ASM_INPUT:
2154     case ASM_OPERANDS:
2155       if (MEM_VOLATILE_P (x))
2156 	return 1;
2157 
2158     default:
2159       break;
2160     }
2161 
2162   /* Recursively scan the operands of this expression.  */
2163 
2164   {
2165     const char *fmt = GET_RTX_FORMAT (code);
2166     int i;
2167 
2168     for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2169       {
2170 	if (fmt[i] == 'e')
2171 	  {
2172 	    if (volatile_insn_p (XEXP (x, i)))
2173 	      return 1;
2174 	  }
2175 	else if (fmt[i] == 'E')
2176 	  {
2177 	    int j;
2178 	    for (j = 0; j < XVECLEN (x, i); j++)
2179 	      if (volatile_insn_p (XVECEXP (x, i, j)))
2180 		return 1;
2181 	  }
2182       }
2183   }
2184   return 0;
2185 }
2186 
2187 /* Nonzero if X contains any volatile memory references
2188    UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions.  */
2189 
2190 int
volatile_refs_p(x)2191 volatile_refs_p (x)
2192      rtx x;
2193 {
2194   RTX_CODE code;
2195 
2196   code = GET_CODE (x);
2197   switch (code)
2198     {
2199     case LABEL_REF:
2200     case SYMBOL_REF:
2201     case CONST_INT:
2202     case CONST:
2203     case CONST_DOUBLE:
2204     case CONST_VECTOR:
2205     case CC0:
2206     case PC:
2207     case REG:
2208     case SCRATCH:
2209     case CLOBBER:
2210     case ADDR_VEC:
2211     case ADDR_DIFF_VEC:
2212       return 0;
2213 
2214     case UNSPEC_VOLATILE:
2215       return 1;
2216 
2217     case MEM:
2218     case ASM_INPUT:
2219     case ASM_OPERANDS:
2220       if (MEM_VOLATILE_P (x))
2221 	return 1;
2222 
2223     default:
2224       break;
2225     }
2226 
2227   /* Recursively scan the operands of this expression.  */
2228 
2229   {
2230     const char *fmt = GET_RTX_FORMAT (code);
2231     int i;
2232 
2233     for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2234       {
2235 	if (fmt[i] == 'e')
2236 	  {
2237 	    if (volatile_refs_p (XEXP (x, i)))
2238 	      return 1;
2239 	  }
2240 	else if (fmt[i] == 'E')
2241 	  {
2242 	    int j;
2243 	    for (j = 0; j < XVECLEN (x, i); j++)
2244 	      if (volatile_refs_p (XVECEXP (x, i, j)))
2245 		return 1;
2246 	  }
2247       }
2248   }
2249   return 0;
2250 }
2251 
2252 /* Similar to above, except that it also rejects register pre- and post-
2253    incrementing.  */
2254 
2255 int
side_effects_p(x)2256 side_effects_p (x)
2257      rtx x;
2258 {
2259   RTX_CODE code;
2260 
2261   code = GET_CODE (x);
2262   switch (code)
2263     {
2264     case LABEL_REF:
2265     case SYMBOL_REF:
2266     case CONST_INT:
2267     case CONST:
2268     case CONST_DOUBLE:
2269     case CONST_VECTOR:
2270     case CC0:
2271     case PC:
2272     case REG:
2273     case SCRATCH:
2274     case ADDR_VEC:
2275     case ADDR_DIFF_VEC:
2276       return 0;
2277 
2278     case CLOBBER:
2279       /* Reject CLOBBER with a non-VOID mode.  These are made by combine.c
2280 	 when some combination can't be done.  If we see one, don't think
2281 	 that we can simplify the expression.  */
2282       return (GET_MODE (x) != VOIDmode);
2283 
2284     case PRE_INC:
2285     case PRE_DEC:
2286     case POST_INC:
2287     case POST_DEC:
2288     case PRE_MODIFY:
2289     case POST_MODIFY:
2290     case CALL:
2291     case UNSPEC_VOLATILE:
2292  /* case TRAP_IF: This isn't clear yet.  */
2293       return 1;
2294 
2295     case MEM:
2296     case ASM_INPUT:
2297     case ASM_OPERANDS:
2298       if (MEM_VOLATILE_P (x))
2299 	return 1;
2300 
2301     default:
2302       break;
2303     }
2304 
2305   /* Recursively scan the operands of this expression.  */
2306 
2307   {
2308     const char *fmt = GET_RTX_FORMAT (code);
2309     int i;
2310 
2311     for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2312       {
2313 	if (fmt[i] == 'e')
2314 	  {
2315 	    if (side_effects_p (XEXP (x, i)))
2316 	      return 1;
2317 	  }
2318 	else if (fmt[i] == 'E')
2319 	  {
2320 	    int j;
2321 	    for (j = 0; j < XVECLEN (x, i); j++)
2322 	      if (side_effects_p (XVECEXP (x, i, j)))
2323 		return 1;
2324 	  }
2325       }
2326   }
2327   return 0;
2328 }
2329 
2330 /* Return nonzero if evaluating rtx X might cause a trap.  */
2331 
2332 int
may_trap_p(x)2333 may_trap_p (x)
2334      rtx x;
2335 {
2336   int i;
2337   enum rtx_code code;
2338   const char *fmt;
2339 
2340   if (x == 0)
2341     return 0;
2342   code = GET_CODE (x);
2343   switch (code)
2344     {
2345       /* Handle these cases quickly.  */
2346     case CONST_INT:
2347     case CONST_DOUBLE:
2348     case CONST_VECTOR:
2349     case SYMBOL_REF:
2350     case LABEL_REF:
2351     case CONST:
2352     case PC:
2353     case CC0:
2354     case REG:
2355     case SCRATCH:
2356       return 0;
2357 
2358     case ASM_INPUT:
2359     case UNSPEC_VOLATILE:
2360     case TRAP_IF:
2361       return 1;
2362 
2363     case ASM_OPERANDS:
2364       return MEM_VOLATILE_P (x);
2365 
2366       /* Memory ref can trap unless it's a static var or a stack slot.  */
2367     case MEM:
2368       if (MEM_NOTRAP_P (x))
2369 	return 0;
2370       return rtx_addr_can_trap_p (XEXP (x, 0));
2371 
2372       /* Division by a non-constant might trap.  */
2373     case DIV:
2374     case MOD:
2375     case UDIV:
2376     case UMOD:
2377       if (HONOR_SNANS (GET_MODE (x)))
2378 	return 1;
2379       if (! CONSTANT_P (XEXP (x, 1))
2380 	  || (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
2381 	      && flag_trapping_math))
2382 	return 1;
2383       /* This was const0_rtx, but by not using that,
2384 	 we can link this file into other programs.  */
2385       if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) == 0)
2386 	return 1;
2387       break;
2388 
2389     case EXPR_LIST:
2390       /* An EXPR_LIST is used to represent a function call.  This
2391 	 certainly may trap.  */
2392       return 1;
2393 
2394     case GE:
2395     case GT:
2396     case LE:
2397     case LT:
2398     case COMPARE:
2399       /* Some floating point comparisons may trap.  */
2400       if (!flag_trapping_math)
2401 	break;
2402       /* ??? There is no machine independent way to check for tests that trap
2403 	 when COMPARE is used, though many targets do make this distinction.
2404 	 For instance, sparc uses CCFPE for compares which generate exceptions
2405 	 and CCFP for compares which do not generate exceptions.  */
2406       if (HONOR_NANS (GET_MODE (x)))
2407 	return 1;
2408       /* But often the compare has some CC mode, so check operand
2409 	 modes as well.  */
2410       if (HONOR_NANS (GET_MODE (XEXP (x, 0)))
2411 	  || HONOR_NANS (GET_MODE (XEXP (x, 1))))
2412 	return 1;
2413       break;
2414 
2415     case EQ:
2416     case NE:
2417       if (HONOR_SNANS (GET_MODE (x)))
2418 	return 1;
2419       /* Often comparison is CC mode, so check operand modes.  */
2420       if (HONOR_SNANS (GET_MODE (XEXP (x, 0)))
2421 	  || HONOR_SNANS (GET_MODE (XEXP (x, 1))))
2422 	return 1;
2423       break;
2424 
2425     case FIX:
2426       /* Conversion of floating point might trap.  */
2427       if (flag_trapping_math && HONOR_NANS (GET_MODE (XEXP (x, 0))))
2428 	return 1;
2429       break;
2430 
2431     case NEG:
2432     case ABS:
2433       /* These operations don't trap even with floating point.  */
2434       break;
2435 
2436     default:
2437       /* Any floating arithmetic may trap.  */
2438       if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
2439 	  && flag_trapping_math)
2440 	return 1;
2441     }
2442 
2443   fmt = GET_RTX_FORMAT (code);
2444   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2445     {
2446       if (fmt[i] == 'e')
2447 	{
2448 	  if (may_trap_p (XEXP (x, i)))
2449 	    return 1;
2450 	}
2451       else if (fmt[i] == 'E')
2452 	{
2453 	  int j;
2454 	  for (j = 0; j < XVECLEN (x, i); j++)
2455 	    if (may_trap_p (XVECEXP (x, i, j)))
2456 	      return 1;
2457 	}
2458     }
2459   return 0;
2460 }
2461 
2462 /* Return nonzero if X contains a comparison that is not either EQ or NE,
2463    i.e., an inequality.  */
2464 
2465 int
inequality_comparisons_p(x)2466 inequality_comparisons_p (x)
2467      rtx x;
2468 {
2469   const char *fmt;
2470   int len, i;
2471   enum rtx_code code = GET_CODE (x);
2472 
2473   switch (code)
2474     {
2475     case REG:
2476     case SCRATCH:
2477     case PC:
2478     case CC0:
2479     case CONST_INT:
2480     case CONST_DOUBLE:
2481     case CONST_VECTOR:
2482     case CONST:
2483     case LABEL_REF:
2484     case SYMBOL_REF:
2485       return 0;
2486 
2487     case LT:
2488     case LTU:
2489     case GT:
2490     case GTU:
2491     case LE:
2492     case LEU:
2493     case GE:
2494     case GEU:
2495       return 1;
2496 
2497     default:
2498       break;
2499     }
2500 
2501   len = GET_RTX_LENGTH (code);
2502   fmt = GET_RTX_FORMAT (code);
2503 
2504   for (i = 0; i < len; i++)
2505     {
2506       if (fmt[i] == 'e')
2507 	{
2508 	  if (inequality_comparisons_p (XEXP (x, i)))
2509 	    return 1;
2510 	}
2511       else if (fmt[i] == 'E')
2512 	{
2513 	  int j;
2514 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2515 	    if (inequality_comparisons_p (XVECEXP (x, i, j)))
2516 	      return 1;
2517 	}
2518     }
2519 
2520   return 0;
2521 }
2522 
2523 /* Replace any occurrence of FROM in X with TO.  The function does
2524    not enter into CONST_DOUBLE for the replace.
2525 
2526    Note that copying is not done so X must not be shared unless all copies
2527    are to be modified.  */
2528 
2529 rtx
replace_rtx(x,from,to)2530 replace_rtx (x, from, to)
2531      rtx x, from, to;
2532 {
2533   int i, j;
2534   const char *fmt;
2535 
2536   /* The following prevents loops occurrence when we change MEM in
2537      CONST_DOUBLE onto the same CONST_DOUBLE.  */
2538   if (x != 0 && GET_CODE (x) == CONST_DOUBLE)
2539     return x;
2540 
2541   if (x == from)
2542     return to;
2543 
2544   /* Allow this function to make replacements in EXPR_LISTs.  */
2545   if (x == 0)
2546     return 0;
2547 
2548   if (GET_CODE (x) == SUBREG)
2549     {
2550       rtx new = replace_rtx (SUBREG_REG (x), from, to);
2551 
2552       if (GET_CODE (new) == CONST_INT)
2553 	{
2554 	  x = simplify_subreg (GET_MODE (x), new,
2555 			       GET_MODE (SUBREG_REG (x)),
2556 			       SUBREG_BYTE (x));
2557 	  if (! x)
2558 	    abort ();
2559 	}
2560       else
2561 	SUBREG_REG (x) = new;
2562 
2563       return x;
2564     }
2565   else if (GET_CODE (x) == ZERO_EXTEND)
2566     {
2567       rtx new = replace_rtx (XEXP (x, 0), from, to);
2568 
2569       if (GET_CODE (new) == CONST_INT)
2570 	{
2571 	  x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
2572 					new, GET_MODE (XEXP (x, 0)));
2573 	  if (! x)
2574 	    abort ();
2575 	}
2576       else
2577 	XEXP (x, 0) = new;
2578 
2579       return x;
2580     }
2581 
2582   fmt = GET_RTX_FORMAT (GET_CODE (x));
2583   for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2584     {
2585       if (fmt[i] == 'e')
2586 	XEXP (x, i) = replace_rtx (XEXP (x, i), from, to);
2587       else if (fmt[i] == 'E')
2588 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2589 	  XVECEXP (x, i, j) = replace_rtx (XVECEXP (x, i, j), from, to);
2590     }
2591 
2592   return x;
2593 }
2594 
2595 /* Throughout the rtx X, replace many registers according to REG_MAP.
2596    Return the replacement for X (which may be X with altered contents).
2597    REG_MAP[R] is the replacement for register R, or 0 for don't replace.
2598    NREGS is the length of REG_MAP; regs >= NREGS are not mapped.
2599 
2600    We only support REG_MAP entries of REG or SUBREG.  Also, hard registers
2601    should not be mapped to pseudos or vice versa since validate_change
2602    is not called.
2603 
2604    If REPLACE_DEST is 1, replacements are also done in destinations;
2605    otherwise, only sources are replaced.  */
2606 
2607 rtx
replace_regs(x,reg_map,nregs,replace_dest)2608 replace_regs (x, reg_map, nregs, replace_dest)
2609      rtx x;
2610      rtx *reg_map;
2611      unsigned int nregs;
2612      int replace_dest;
2613 {
2614   enum rtx_code code;
2615   int i;
2616   const char *fmt;
2617 
2618   if (x == 0)
2619     return x;
2620 
2621   code = GET_CODE (x);
2622   switch (code)
2623     {
2624     case SCRATCH:
2625     case PC:
2626     case CC0:
2627     case CONST_INT:
2628     case CONST_DOUBLE:
2629     case CONST_VECTOR:
2630     case CONST:
2631     case SYMBOL_REF:
2632     case LABEL_REF:
2633       return x;
2634 
2635     case REG:
2636       /* Verify that the register has an entry before trying to access it.  */
2637       if (REGNO (x) < nregs && reg_map[REGNO (x)] != 0)
2638 	{
2639 	  /* SUBREGs can't be shared.  Always return a copy to ensure that if
2640 	     this replacement occurs more than once then each instance will
2641 	     get distinct rtx.  */
2642 	  if (GET_CODE (reg_map[REGNO (x)]) == SUBREG)
2643 	    return copy_rtx (reg_map[REGNO (x)]);
2644 	  return reg_map[REGNO (x)];
2645 	}
2646       return x;
2647 
2648     case SUBREG:
2649       /* Prevent making nested SUBREGs.  */
2650       if (GET_CODE (SUBREG_REG (x)) == REG && REGNO (SUBREG_REG (x)) < nregs
2651 	  && reg_map[REGNO (SUBREG_REG (x))] != 0
2652 	  && GET_CODE (reg_map[REGNO (SUBREG_REG (x))]) == SUBREG)
2653 	{
2654 	  rtx map_val = reg_map[REGNO (SUBREG_REG (x))];
2655 	  return simplify_gen_subreg (GET_MODE (x), map_val,
2656 				      GET_MODE (SUBREG_REG (x)),
2657 				      SUBREG_BYTE (x));
2658 	}
2659       break;
2660 
2661     case SET:
2662       if (replace_dest)
2663 	SET_DEST (x) = replace_regs (SET_DEST (x), reg_map, nregs, 0);
2664 
2665       else if (GET_CODE (SET_DEST (x)) == MEM
2666 	       || GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2667 	/* Even if we are not to replace destinations, replace register if it
2668 	   is CONTAINED in destination (destination is memory or
2669 	   STRICT_LOW_PART).  */
2670 	XEXP (SET_DEST (x), 0) = replace_regs (XEXP (SET_DEST (x), 0),
2671 					       reg_map, nregs, 0);
2672       else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2673 	/* Similarly, for ZERO_EXTRACT we replace all operands.  */
2674 	break;
2675 
2676       SET_SRC (x) = replace_regs (SET_SRC (x), reg_map, nregs, 0);
2677       return x;
2678 
2679     default:
2680       break;
2681     }
2682 
2683   fmt = GET_RTX_FORMAT (code);
2684   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2685     {
2686       if (fmt[i] == 'e')
2687 	XEXP (x, i) = replace_regs (XEXP (x, i), reg_map, nregs, replace_dest);
2688       else if (fmt[i] == 'E')
2689 	{
2690 	  int j;
2691 	  for (j = 0; j < XVECLEN (x, i); j++)
2692 	    XVECEXP (x, i, j) = replace_regs (XVECEXP (x, i, j), reg_map,
2693 					      nregs, replace_dest);
2694 	}
2695     }
2696   return x;
2697 }
2698 
2699 /* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or
2700    constant that is not in the constant pool and not in the condition
2701    of an IF_THEN_ELSE.  */
2702 
2703 static int
computed_jump_p_1(x)2704 computed_jump_p_1 (x)
2705      rtx x;
2706 {
2707   enum rtx_code code = GET_CODE (x);
2708   int i, j;
2709   const char *fmt;
2710 
2711   switch (code)
2712     {
2713     case LABEL_REF:
2714     case PC:
2715       return 0;
2716 
2717     case CONST:
2718     case CONST_INT:
2719     case CONST_DOUBLE:
2720     case CONST_VECTOR:
2721     case SYMBOL_REF:
2722     case REG:
2723       return 1;
2724 
2725     case MEM:
2726       return ! (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
2727 		&& CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)));
2728 
2729     case IF_THEN_ELSE:
2730       return (computed_jump_p_1 (XEXP (x, 1))
2731 	      || computed_jump_p_1 (XEXP (x, 2)));
2732 
2733     default:
2734       break;
2735     }
2736 
2737   fmt = GET_RTX_FORMAT (code);
2738   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2739     {
2740       if (fmt[i] == 'e'
2741 	  && computed_jump_p_1 (XEXP (x, i)))
2742 	return 1;
2743 
2744       else if (fmt[i] == 'E')
2745 	for (j = 0; j < XVECLEN (x, i); j++)
2746 	  if (computed_jump_p_1 (XVECEXP (x, i, j)))
2747 	    return 1;
2748     }
2749 
2750   return 0;
2751 }
2752 
2753 /* Return nonzero if INSN is an indirect jump (aka computed jump).
2754 
2755    Tablejumps and casesi insns are not considered indirect jumps;
2756    we can recognize them by a (use (label_ref)).  */
2757 
2758 int
computed_jump_p(insn)2759 computed_jump_p (insn)
2760      rtx insn;
2761 {
2762   int i;
2763   if (GET_CODE (insn) == JUMP_INSN)
2764     {
2765       rtx pat = PATTERN (insn);
2766 
2767       if (find_reg_note (insn, REG_LABEL, NULL_RTX))
2768 	return 0;
2769       else if (GET_CODE (pat) == PARALLEL)
2770 	{
2771 	  int len = XVECLEN (pat, 0);
2772 	  int has_use_labelref = 0;
2773 
2774 	  for (i = len - 1; i >= 0; i--)
2775 	    if (GET_CODE (XVECEXP (pat, 0, i)) == USE
2776 		&& (GET_CODE (XEXP (XVECEXP (pat, 0, i), 0))
2777 		    == LABEL_REF))
2778 	      has_use_labelref = 1;
2779 
2780 	  if (! has_use_labelref)
2781 	    for (i = len - 1; i >= 0; i--)
2782 	      if (GET_CODE (XVECEXP (pat, 0, i)) == SET
2783 		  && SET_DEST (XVECEXP (pat, 0, i)) == pc_rtx
2784 		  && computed_jump_p_1 (SET_SRC (XVECEXP (pat, 0, i))))
2785 		return 1;
2786 	}
2787       else if (GET_CODE (pat) == SET
2788 	       && SET_DEST (pat) == pc_rtx
2789 	       && computed_jump_p_1 (SET_SRC (pat)))
2790 	return 1;
2791     }
2792   return 0;
2793 }
2794 
2795 /* Traverse X via depth-first search, calling F for each
2796    sub-expression (including X itself).  F is also passed the DATA.
2797    If F returns -1, do not traverse sub-expressions, but continue
2798    traversing the rest of the tree.  If F ever returns any other
2799    nonzero value, stop the traversal, and return the value returned
2800    by F.  Otherwise, return 0.  This function does not traverse inside
2801    tree structure that contains RTX_EXPRs, or into sub-expressions
2802    whose format code is `0' since it is not known whether or not those
2803    codes are actually RTL.
2804 
2805    This routine is very general, and could (should?) be used to
2806    implement many of the other routines in this file.  */
2807 
2808 int
for_each_rtx(x,f,data)2809 for_each_rtx (x, f, data)
2810      rtx *x;
2811      rtx_function f;
2812      void *data;
2813 {
2814   int result;
2815   int length;
2816   const char *format;
2817   int i;
2818 
2819   /* Call F on X.  */
2820   result = (*f) (x, data);
2821   if (result == -1)
2822     /* Do not traverse sub-expressions.  */
2823     return 0;
2824   else if (result != 0)
2825     /* Stop the traversal.  */
2826     return result;
2827 
2828   if (*x == NULL_RTX)
2829     /* There are no sub-expressions.  */
2830     return 0;
2831 
2832   length = GET_RTX_LENGTH (GET_CODE (*x));
2833   format = GET_RTX_FORMAT (GET_CODE (*x));
2834 
2835   for (i = 0; i < length; ++i)
2836     {
2837       switch (format[i])
2838 	{
2839 	case 'e':
2840 	  result = for_each_rtx (&XEXP (*x, i), f, data);
2841 	  if (result != 0)
2842 	    return result;
2843 	  break;
2844 
2845 	case 'V':
2846 	case 'E':
2847 	  if (XVEC (*x, i) != 0)
2848 	    {
2849 	      int j;
2850 	      for (j = 0; j < XVECLEN (*x, i); ++j)
2851 		{
2852 		  result = for_each_rtx (&XVECEXP (*x, i, j), f, data);
2853 		  if (result != 0)
2854 		    return result;
2855 		}
2856 	    }
2857 	  break;
2858 
2859 	default:
2860 	  /* Nothing to do.  */
2861 	  break;
2862 	}
2863 
2864     }
2865 
2866   return 0;
2867 }
2868 
2869 /* Searches X for any reference to REGNO, returning the rtx of the
2870    reference found if any.  Otherwise, returns NULL_RTX.  */
2871 
2872 rtx
regno_use_in(regno,x)2873 regno_use_in (regno, x)
2874      unsigned int regno;
2875      rtx x;
2876 {
2877   const char *fmt;
2878   int i, j;
2879   rtx tem;
2880 
2881   if (GET_CODE (x) == REG && REGNO (x) == regno)
2882     return x;
2883 
2884   fmt = GET_RTX_FORMAT (GET_CODE (x));
2885   for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2886     {
2887       if (fmt[i] == 'e')
2888 	{
2889 	  if ((tem = regno_use_in (regno, XEXP (x, i))))
2890 	    return tem;
2891 	}
2892       else if (fmt[i] == 'E')
2893 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2894 	  if ((tem = regno_use_in (regno , XVECEXP (x, i, j))))
2895 	    return tem;
2896     }
2897 
2898   return NULL_RTX;
2899 }
2900 
2901 /* Return a value indicating whether OP, an operand of a commutative
2902    operation, is preferred as the first or second operand.  The higher
2903    the value, the stronger the preference for being the first operand.
2904    We use negative values to indicate a preference for the first operand
2905    and positive values for the second operand.  */
2906 
2907 int
commutative_operand_precedence(op)2908 commutative_operand_precedence (op)
2909      rtx op;
2910 {
2911   /* Constants always come the second operand.  Prefer "nice" constants.  */
2912   if (GET_CODE (op) == CONST_INT)
2913     return -5;
2914   if (GET_CODE (op) == CONST_DOUBLE)
2915     return -4;
2916   if (CONSTANT_P (op))
2917     return -3;
2918 
2919   /* SUBREGs of objects should come second.  */
2920   if (GET_CODE (op) == SUBREG
2921       && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op))) == 'o')
2922     return -2;
2923 
2924   /* If only one operand is a `neg', `not',
2925     `mult', `plus', or `minus' expression, it will be the first
2926     operand.  */
2927   if (GET_CODE (op) == NEG || GET_CODE (op) == NOT
2928       || GET_CODE (op) == MULT || GET_CODE (op) == PLUS
2929       || GET_CODE (op) == MINUS)
2930     return 2;
2931 
2932   /* Complex expressions should be the first, so decrease priority
2933      of objects.  */
2934   if (GET_RTX_CLASS (GET_CODE (op)) == 'o')
2935     return -1;
2936   return 0;
2937 }
2938 
2939 /* Return 1 iff it is necessary to swap operands of commutative operation
2940    in order to canonicalize expression.  */
2941 
2942 int
swap_commutative_operands_p(x,y)2943 swap_commutative_operands_p (x, y)
2944      rtx x, y;
2945 {
2946   return (commutative_operand_precedence (x)
2947 	  < commutative_operand_precedence (y));
2948 }
2949 
2950 /* Return 1 if X is an autoincrement side effect and the register is
2951    not the stack pointer.  */
2952 int
auto_inc_p(x)2953 auto_inc_p (x)
2954      rtx x;
2955 {
2956   switch (GET_CODE (x))
2957     {
2958     case PRE_INC:
2959     case POST_INC:
2960     case PRE_DEC:
2961     case POST_DEC:
2962     case PRE_MODIFY:
2963     case POST_MODIFY:
2964       /* There are no REG_INC notes for SP.  */
2965       if (XEXP (x, 0) != stack_pointer_rtx)
2966 	return 1;
2967     default:
2968       break;
2969     }
2970   return 0;
2971 }
2972 
2973 /* Return 1 if the sequence of instructions beginning with FROM and up
2974    to and including TO is safe to move.  If NEW_TO is non-NULL, and
2975    the sequence is not already safe to move, but can be easily
2976    extended to a sequence which is safe, then NEW_TO will point to the
2977    end of the extended sequence.
2978 
2979    For now, this function only checks that the region contains whole
2980    exception regions, but it could be extended to check additional
2981    conditions as well.  */
2982 
2983 int
insns_safe_to_move_p(from,to,new_to)2984 insns_safe_to_move_p (from, to, new_to)
2985      rtx from;
2986      rtx to;
2987      rtx *new_to;
2988 {
2989   int eh_region_count = 0;
2990   int past_to_p = 0;
2991   rtx r = from;
2992 
2993   /* By default, assume the end of the region will be what was
2994      suggested.  */
2995   if (new_to)
2996     *new_to = to;
2997 
2998   while (r)
2999     {
3000       if (GET_CODE (r) == NOTE)
3001 	{
3002 	  switch (NOTE_LINE_NUMBER (r))
3003 	    {
3004 	    case NOTE_INSN_EH_REGION_BEG:
3005 	      ++eh_region_count;
3006 	      break;
3007 
3008 	    case NOTE_INSN_EH_REGION_END:
3009 	      if (eh_region_count == 0)
3010 		/* This sequence of instructions contains the end of
3011 		   an exception region, but not he beginning.  Moving
3012 		   it will cause chaos.  */
3013 		return 0;
3014 
3015 	      --eh_region_count;
3016 	      break;
3017 
3018 	    default:
3019 	      break;
3020 	    }
3021 	}
3022       else if (past_to_p)
3023 	/* If we've passed TO, and we see a non-note instruction, we
3024 	   can't extend the sequence to a movable sequence.  */
3025 	return 0;
3026 
3027       if (r == to)
3028 	{
3029 	  if (!new_to)
3030 	    /* It's OK to move the sequence if there were matched sets of
3031 	       exception region notes.  */
3032 	    return eh_region_count == 0;
3033 
3034 	  past_to_p = 1;
3035 	}
3036 
3037       /* It's OK to move the sequence if there were matched sets of
3038 	 exception region notes.  */
3039       if (past_to_p && eh_region_count == 0)
3040 	{
3041 	  *new_to = r;
3042 	  return 1;
3043 	}
3044 
3045       /* Go to the next instruction.  */
3046       r = NEXT_INSN (r);
3047     }
3048 
3049   return 0;
3050 }
3051 
3052 /* Return nonzero if IN contains a piece of rtl that has the address LOC */
3053 int
loc_mentioned_in_p(loc,in)3054 loc_mentioned_in_p (loc, in)
3055      rtx *loc, in;
3056 {
3057   enum rtx_code code = GET_CODE (in);
3058   const char *fmt = GET_RTX_FORMAT (code);
3059   int i, j;
3060 
3061   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3062     {
3063       if (loc == &in->fld[i].rtx)
3064 	return 1;
3065       if (fmt[i] == 'e')
3066 	{
3067 	  if (loc_mentioned_in_p (loc, XEXP (in, i)))
3068 	    return 1;
3069 	}
3070       else if (fmt[i] == 'E')
3071 	for (j = XVECLEN (in, i) - 1; j >= 0; j--)
3072 	  if (loc_mentioned_in_p (loc, XVECEXP (in, i, j)))
3073 	    return 1;
3074     }
3075   return 0;
3076 }
3077 
3078 /* Given a subreg X, return the bit offset where the subreg begins
3079    (counting from the least significant bit of the reg).  */
3080 
3081 unsigned int
subreg_lsb(x)3082 subreg_lsb (x)
3083      rtx x;
3084 {
3085   enum machine_mode inner_mode = GET_MODE (SUBREG_REG (x));
3086   enum machine_mode mode = GET_MODE (x);
3087   unsigned int bitpos;
3088   unsigned int byte;
3089   unsigned int word;
3090 
3091   /* A paradoxical subreg begins at bit position 0.  */
3092   if (GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (inner_mode))
3093     return 0;
3094 
3095   if (WORDS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
3096     /* If the subreg crosses a word boundary ensure that
3097        it also begins and ends on a word boundary.  */
3098     if ((SUBREG_BYTE (x) % UNITS_PER_WORD
3099 	 + GET_MODE_SIZE (mode)) > UNITS_PER_WORD
3100 	&& (SUBREG_BYTE (x) % UNITS_PER_WORD
3101 	    || GET_MODE_SIZE (mode) % UNITS_PER_WORD))
3102 	abort ();
3103 
3104   if (WORDS_BIG_ENDIAN)
3105     word = (GET_MODE_SIZE (inner_mode)
3106 	    - (SUBREG_BYTE (x) + GET_MODE_SIZE (mode))) / UNITS_PER_WORD;
3107   else
3108     word = SUBREG_BYTE (x) / UNITS_PER_WORD;
3109   bitpos = word * BITS_PER_WORD;
3110 
3111   if (BYTES_BIG_ENDIAN)
3112     byte = (GET_MODE_SIZE (inner_mode)
3113 	    - (SUBREG_BYTE (x) + GET_MODE_SIZE (mode))) % UNITS_PER_WORD;
3114   else
3115     byte = SUBREG_BYTE (x) % UNITS_PER_WORD;
3116   bitpos += byte * BITS_PER_UNIT;
3117 
3118   return bitpos;
3119 }
3120 
3121 /* This function returns the regno offset of a subreg expression.
3122    xregno - A regno of an inner hard subreg_reg (or what will become one).
3123    xmode  - The mode of xregno.
3124    offset - The byte offset.
3125    ymode  - The mode of a top level SUBREG (or what may become one).
3126    RETURN - The regno offset which would be used.  */
3127 unsigned int
subreg_regno_offset(xregno,xmode,offset,ymode)3128 subreg_regno_offset (xregno, xmode, offset, ymode)
3129      unsigned int xregno;
3130      enum machine_mode xmode;
3131      unsigned int offset;
3132      enum machine_mode ymode;
3133 {
3134   int nregs_xmode, nregs_ymode;
3135   int mode_multiple, nregs_multiple;
3136   int y_offset;
3137 
3138   if (xregno >= FIRST_PSEUDO_REGISTER)
3139     abort ();
3140 
3141   nregs_xmode = HARD_REGNO_NREGS (xregno, xmode);
3142   nregs_ymode = HARD_REGNO_NREGS (xregno, ymode);
3143 
3144   /* If this is a big endian paradoxical subreg, which uses more actual
3145      hard registers than the original register, we must return a negative
3146      offset so that we find the proper highpart of the register.  */
3147   if (offset == 0
3148       && nregs_ymode > nregs_xmode
3149       && (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
3150 	  ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN))
3151     return nregs_xmode - nregs_ymode;
3152 
3153   if (offset == 0 || nregs_xmode == nregs_ymode)
3154     return 0;
3155 
3156   /* size of ymode must not be greater than the size of xmode.  */
3157   mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
3158   if (mode_multiple == 0)
3159     abort ();
3160 
3161   y_offset = offset / GET_MODE_SIZE (ymode);
3162   nregs_multiple =  nregs_xmode / nregs_ymode;
3163   return (y_offset / (mode_multiple / nregs_multiple)) * nregs_ymode;
3164 }
3165 
3166 /* This function returns true when the offset is representable via
3167    subreg_offset in the given regno.
3168    xregno - A regno of an inner hard subreg_reg (or what will become one).
3169    xmode  - The mode of xregno.
3170    offset - The byte offset.
3171    ymode  - The mode of a top level SUBREG (or what may become one).
3172    RETURN - The regno offset which would be used.  */
3173 bool
subreg_offset_representable_p(xregno,xmode,offset,ymode)3174 subreg_offset_representable_p (xregno, xmode, offset, ymode)
3175      unsigned int xregno;
3176      enum machine_mode xmode;
3177      unsigned int offset;
3178      enum machine_mode ymode;
3179 {
3180   int nregs_xmode, nregs_ymode;
3181   int mode_multiple, nregs_multiple;
3182   int y_offset;
3183 
3184   if (xregno >= FIRST_PSEUDO_REGISTER)
3185     abort ();
3186 
3187   nregs_xmode = HARD_REGNO_NREGS (xregno, xmode);
3188   nregs_ymode = HARD_REGNO_NREGS (xregno, ymode);
3189 
3190   /* paradoxical subregs are always valid.  */
3191   if (offset == 0
3192       && nregs_ymode > nregs_xmode
3193       && (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
3194 	  ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN))
3195     return true;
3196 
3197   /* Lowpart subregs are always valid.  */
3198   if (offset == subreg_lowpart_offset (ymode, xmode))
3199     return true;
3200 
3201 #ifdef ENABLE_CHECKING
3202   /* This should always pass, otherwise we don't know how to verify the
3203      constraint.  These conditions may be relaxed but subreg_offset would
3204      need to be redesigned.  */
3205   if (GET_MODE_SIZE (xmode) % GET_MODE_SIZE (ymode)
3206       || GET_MODE_SIZE (ymode) % nregs_ymode
3207       || nregs_xmode % nregs_ymode)
3208     abort ();
3209 #endif
3210 
3211   /* The XMODE value can be seen as an vector of NREGS_XMODE
3212      values.  The subreg must represent an lowpart of given field.
3213      Compute what field it is.  */
3214   offset -= subreg_lowpart_offset (ymode,
3215 		  		   mode_for_size (GET_MODE_BITSIZE (xmode)
3216 			  			  / nregs_xmode,
3217 						  MODE_INT, 0));
3218 
3219   /* size of ymode must not be greater than the size of xmode.  */
3220   mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
3221   if (mode_multiple == 0)
3222     abort ();
3223 
3224   y_offset = offset / GET_MODE_SIZE (ymode);
3225   nregs_multiple =  nregs_xmode / nregs_ymode;
3226 #ifdef ENABLE_CHECKING
3227   if (offset % GET_MODE_SIZE (ymode)
3228       || mode_multiple % nregs_multiple)
3229     abort ();
3230 #endif
3231   return (!(y_offset % (mode_multiple / nregs_multiple)));
3232 }
3233 
3234 /* Return the final regno that a subreg expression refers to.  */
3235 unsigned int
subreg_regno(x)3236 subreg_regno (x)
3237      rtx x;
3238 {
3239   unsigned int ret;
3240   rtx subreg = SUBREG_REG (x);
3241   int regno = REGNO (subreg);
3242 
3243   ret = regno + subreg_regno_offset (regno,
3244 				     GET_MODE (subreg),
3245 				     SUBREG_BYTE (x),
3246 				     GET_MODE (x));
3247   return ret;
3248 
3249 }
3250 struct parms_set_data
3251 {
3252   int nregs;
3253   HARD_REG_SET regs;
3254 };
3255 
3256 /* Helper function for noticing stores to parameter registers.  */
3257 static void
parms_set(x,pat,data)3258 parms_set (x, pat, data)
3259 	rtx x, pat ATTRIBUTE_UNUSED;
3260 	void *data;
3261 {
3262   struct parms_set_data *d = data;
3263   if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3264       && TEST_HARD_REG_BIT (d->regs, REGNO (x)))
3265     {
3266       CLEAR_HARD_REG_BIT (d->regs, REGNO (x));
3267       d->nregs--;
3268     }
3269 }
3270 
3271 /* Look backward for first parameter to be loaded.
3272    Do not skip BOUNDARY.  */
3273 rtx
find_first_parameter_load(call_insn,boundary)3274 find_first_parameter_load (call_insn, boundary)
3275      rtx call_insn, boundary;
3276 {
3277   struct parms_set_data parm;
3278   rtx p, before;
3279 
3280   /* Since different machines initialize their parameter registers
3281      in different orders, assume nothing.  Collect the set of all
3282      parameter registers.  */
3283   CLEAR_HARD_REG_SET (parm.regs);
3284   parm.nregs = 0;
3285   for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
3286     if (GET_CODE (XEXP (p, 0)) == USE
3287 	&& GET_CODE (XEXP (XEXP (p, 0), 0)) == REG)
3288       {
3289 	if (REGNO (XEXP (XEXP (p, 0), 0)) >= FIRST_PSEUDO_REGISTER)
3290 	  abort ();
3291 
3292 	/* We only care about registers which can hold function
3293 	   arguments.  */
3294 	if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
3295 	  continue;
3296 
3297 	SET_HARD_REG_BIT (parm.regs, REGNO (XEXP (XEXP (p, 0), 0)));
3298 	parm.nregs++;
3299       }
3300   before = call_insn;
3301 
3302   /* Search backward for the first set of a register in this set.  */
3303   while (parm.nregs && before != boundary)
3304     {
3305       before = PREV_INSN (before);
3306 
3307       /* It is possible that some loads got CSEed from one call to
3308          another.  Stop in that case.  */
3309       if (GET_CODE (before) == CALL_INSN)
3310 	break;
3311 
3312       /* Our caller needs either ensure that we will find all sets
3313          (in case code has not been optimized yet), or take care
3314          for possible labels in a way by setting boundary to preceding
3315          CODE_LABEL.  */
3316       if (GET_CODE (before) == CODE_LABEL)
3317 	{
3318 	  if (before != boundary)
3319 	    abort ();
3320 	  break;
3321 	}
3322 
3323       if (INSN_P (before))
3324 	note_stores (PATTERN (before), parms_set, &parm);
3325     }
3326   return before;
3327 }
3328 
3329 /* Return true if we should avoid inserting code between INSN and preceeding
3330    call instruction.  */
3331 
3332 bool
keep_with_call_p(insn)3333 keep_with_call_p (insn)
3334      rtx insn;
3335 {
3336   rtx set;
3337 
3338   if (INSN_P (insn) && (set = single_set (insn)) != NULL)
3339     {
3340       if (GET_CODE (SET_DEST (set)) == REG
3341 	  && REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
3342 	  && fixed_regs[REGNO (SET_DEST (set))]
3343 	  && general_operand (SET_SRC (set), VOIDmode))
3344 	return true;
3345       if (GET_CODE (SET_SRC (set)) == REG
3346 	  && FUNCTION_VALUE_REGNO_P (REGNO (SET_SRC (set)))
3347 	  && GET_CODE (SET_DEST (set)) == REG
3348 	  && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
3349 	return true;
3350       /* There may be a stack pop just after the call and before the store
3351 	 of the return register.  Search for the actual store when deciding
3352 	 if we can break or not.  */
3353       if (SET_DEST (set) == stack_pointer_rtx)
3354 	{
3355 	  rtx i2 = next_nonnote_insn (insn);
3356 	  if (i2 && keep_with_call_p (i2))
3357 	    return true;
3358 	}
3359     }
3360   return false;
3361 }
3362 
3363 /* Return true when store to register X can be hoisted to the place
3364    with LIVE registers (can be NULL).  Value VAL contains destination
3365    whose value will be used.  */
3366 
3367 static bool
hoist_test_store(x,val,live)3368 hoist_test_store (x, val, live)
3369      rtx x, val;
3370      regset live;
3371 {
3372   if (GET_CODE (x) == SCRATCH)
3373     return true;
3374 
3375   if (rtx_equal_p (x, val))
3376     return true;
3377 
3378   /* Allow subreg of X in case it is not writting just part of multireg pseudo.
3379      Then we would need to update all users to care hoisting the store too.
3380      Caller may represent that by specifying whole subreg as val.  */
3381 
3382   if (GET_CODE (x) == SUBREG && rtx_equal_p (SUBREG_REG (x), val))
3383     {
3384       if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) > UNITS_PER_WORD
3385 	  && GET_MODE_BITSIZE (GET_MODE (x)) <
3386 	  GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
3387 	return false;
3388       return true;
3389     }
3390   if (GET_CODE (x) == SUBREG)
3391     x = SUBREG_REG (x);
3392 
3393   /* Anything except register store is not hoistable.  This includes the
3394      partial stores to registers.  */
3395 
3396   if (!REG_P (x))
3397     return false;
3398 
3399   /* Pseudo registers can be allways replaced by another pseudo to avoid
3400      the side effect, for hard register we must ensure that they are dead.
3401      Eventually we may want to add code to try turn pseudos to hards, but it
3402      is unlikely usefull.  */
3403 
3404   if (REGNO (x) < FIRST_PSEUDO_REGISTER)
3405     {
3406       int regno = REGNO (x);
3407       int n = HARD_REGNO_NREGS (regno, GET_MODE (x));
3408 
3409       if (!live)
3410 	return false;
3411       if (REGNO_REG_SET_P (live, regno))
3412 	return false;
3413       while (--n > 0)
3414 	if (REGNO_REG_SET_P (live, regno + n))
3415 	  return false;
3416     }
3417   return true;
3418 }
3419 
3420 
3421 /* Return true if INSN can be hoisted to place with LIVE hard registers
3422    (LIVE can be NULL when unknown).  VAL is expected to be stored by the insn
3423    and used by the hoisting pass.  */
3424 
3425 bool
can_hoist_insn_p(insn,val,live)3426 can_hoist_insn_p (insn, val, live)
3427      rtx insn, val;
3428      regset live;
3429 {
3430   rtx pat = PATTERN (insn);
3431   int i;
3432 
3433   /* It probably does not worth the complexity to handle multiple
3434      set stores.  */
3435   if (!single_set (insn))
3436     return false;
3437   /* We can move CALL_INSN, but we need to check that all caller clobbered
3438      regs are dead.  */
3439   if (GET_CODE (insn) == CALL_INSN)
3440     return false;
3441   /* In future we will handle hoisting of libcall sequences, but
3442      give up for now.  */
3443   if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
3444     return false;
3445   switch (GET_CODE (pat))
3446     {
3447     case SET:
3448       if (!hoist_test_store (SET_DEST (pat), val, live))
3449 	return false;
3450       break;
3451     case USE:
3452       /* USES do have sick semantics, so do not move them.  */
3453       return false;
3454       break;
3455     case CLOBBER:
3456       if (!hoist_test_store (XEXP (pat, 0), val, live))
3457 	return false;
3458       break;
3459     case PARALLEL:
3460       for (i = 0; i < XVECLEN (pat, 0); i++)
3461 	{
3462 	  rtx x = XVECEXP (pat, 0, i);
3463 	  switch (GET_CODE (x))
3464 	    {
3465 	    case SET:
3466 	      if (!hoist_test_store (SET_DEST (x), val, live))
3467 		return false;
3468 	      break;
3469 	    case USE:
3470 	      /* We need to fix callers to really ensure availability
3471 	         of all values inisn uses, but for now it is safe to prohibit
3472 		 hoisting of any insn having such a hiden uses.  */
3473 	      return false;
3474 	      break;
3475 	    case CLOBBER:
3476 	      if (!hoist_test_store (SET_DEST (x), val, live))
3477 		return false;
3478 	      break;
3479 	    default:
3480 	      break;
3481 	    }
3482 	}
3483       break;
3484     default:
3485       abort ();
3486     }
3487   return true;
3488 }
3489 
3490 /* Update store after hoisting - replace all stores to pseudo registers
3491    by new ones to avoid clobbering of values except for store to VAL that will
3492    be updated to NEW.  */
3493 
3494 static void
hoist_update_store(insn,xp,val,new)3495 hoist_update_store (insn, xp, val, new)
3496      rtx insn, *xp, val, new;
3497 {
3498   rtx x = *xp;
3499 
3500   if (GET_CODE (x) == SCRATCH)
3501     return;
3502 
3503   if (GET_CODE (x) == SUBREG && SUBREG_REG (x) == val)
3504     validate_change (insn, xp,
3505 		     simplify_gen_subreg (GET_MODE (x), new, GET_MODE (new),
3506 					  SUBREG_BYTE (x)), 1);
3507   if (rtx_equal_p (x, val))
3508     {
3509       validate_change (insn, xp, new, 1);
3510       return;
3511     }
3512   if (GET_CODE (x) == SUBREG)
3513     {
3514       xp = &SUBREG_REG (x);
3515       x = *xp;
3516     }
3517 
3518   if (!REG_P (x))
3519     abort ();
3520 
3521   /* We've verified that hard registers are dead, so we may keep the side
3522      effect.  Otherwise replace it by new pseudo.  */
3523   if (REGNO (x) >= FIRST_PSEUDO_REGISTER)
3524     validate_change (insn, xp, gen_reg_rtx (GET_MODE (x)), 1);
3525   REG_NOTES (insn)
3526     = alloc_EXPR_LIST (REG_UNUSED, *xp, REG_NOTES (insn));
3527 }
3528 
3529 /* Create a copy of INSN after AFTER replacing store of VAL to NEW
3530    and each other side effect to pseudo register by new pseudo register.  */
3531 
3532 rtx
hoist_insn_after(insn,after,val,new)3533 hoist_insn_after (insn, after, val, new)
3534      rtx insn, after, val, new;
3535 {
3536   rtx pat;
3537   int i;
3538   rtx note;
3539 
3540   insn = emit_copy_of_insn_after (insn, after);
3541   pat = PATTERN (insn);
3542 
3543   /* Remove REG_UNUSED notes as we will re-emit them.  */
3544   while ((note = find_reg_note (insn, REG_UNUSED, NULL_RTX)))
3545     remove_note (insn, note);
3546 
3547   /* To get this working callers must ensure to move everything referenced
3548      by REG_EQUAL/REG_EQUIV notes too.  Lets remove them, it is probably
3549      easier.  */
3550   while ((note = find_reg_note (insn, REG_EQUAL, NULL_RTX)))
3551     remove_note (insn, note);
3552   while ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)))
3553     remove_note (insn, note);
3554 
3555   /* Remove REG_DEAD notes as they might not be valid anymore in case
3556      we create redundancy.  */
3557   while ((note = find_reg_note (insn, REG_DEAD, NULL_RTX)))
3558     remove_note (insn, note);
3559   switch (GET_CODE (pat))
3560     {
3561     case SET:
3562       hoist_update_store (insn, &SET_DEST (pat), val, new);
3563       break;
3564     case USE:
3565       break;
3566     case CLOBBER:
3567       hoist_update_store (insn, &XEXP (pat, 0), val, new);
3568       break;
3569     case PARALLEL:
3570       for (i = 0; i < XVECLEN (pat, 0); i++)
3571 	{
3572 	  rtx x = XVECEXP (pat, 0, i);
3573 	  switch (GET_CODE (x))
3574 	    {
3575 	    case SET:
3576 	      hoist_update_store (insn, &SET_DEST (x), val, new);
3577 	      break;
3578 	    case USE:
3579 	      break;
3580 	    case CLOBBER:
3581 	      hoist_update_store (insn, &SET_DEST (x), val, new);
3582 	      break;
3583 	    default:
3584 	      break;
3585 	    }
3586 	}
3587       break;
3588     default:
3589       abort ();
3590     }
3591   if (!apply_change_group ())
3592     abort ();
3593 
3594   return insn;
3595 }
3596 
3597 rtx
hoist_insn_to_edge(insn,e,val,new)3598 hoist_insn_to_edge (insn, e, val, new)
3599      rtx insn, val, new;
3600      edge e;
3601 {
3602   rtx new_insn;
3603 
3604   /* We cannot insert instructions on an abnormal critical edge.
3605      It will be easier to find the culprit if we die now.  */
3606   if ((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e))
3607     abort ();
3608 
3609   /* Do not use emit_insn_on_edge as we want to preserve notes and similar
3610      stuff.  We also emit CALL_INSNS and firends.  */
3611   if (e->insns == NULL_RTX)
3612     {
3613       start_sequence ();
3614       emit_note (NULL, NOTE_INSN_DELETED);
3615     }
3616   else
3617     push_to_sequence (e->insns);
3618 
3619   new_insn = hoist_insn_after (insn, get_last_insn (), val, new);
3620 
3621   e->insns = get_insns ();
3622   end_sequence ();
3623   return new_insn;
3624 }
3625