xref: /openbsd/gnu/gcc/gcc/rtlanal.c (revision 404b540a)
1*404b540aSrobert /* Analyze RTL for GNU compiler.
2*404b540aSrobert    Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3*404b540aSrobert    1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software
4*404b540aSrobert    Foundation, Inc.
5*404b540aSrobert 
6*404b540aSrobert This file is part of GCC.
7*404b540aSrobert 
8*404b540aSrobert GCC is free software; you can redistribute it and/or modify it under
9*404b540aSrobert the terms of the GNU General Public License as published by the Free
10*404b540aSrobert Software Foundation; either version 2, or (at your option) any later
11*404b540aSrobert version.
12*404b540aSrobert 
13*404b540aSrobert GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14*404b540aSrobert WARRANTY; without even the implied warranty of MERCHANTABILITY or
15*404b540aSrobert FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16*404b540aSrobert for more details.
17*404b540aSrobert 
18*404b540aSrobert You should have received a copy of the GNU General Public License
19*404b540aSrobert along with GCC; see the file COPYING.  If not, write to the Free
20*404b540aSrobert Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21*404b540aSrobert 02110-1301, USA.  */
22*404b540aSrobert 
23*404b540aSrobert 
24*404b540aSrobert #include "config.h"
25*404b540aSrobert #include "system.h"
26*404b540aSrobert #include "coretypes.h"
27*404b540aSrobert #include "tm.h"
28*404b540aSrobert #include "toplev.h"
29*404b540aSrobert #include "rtl.h"
30*404b540aSrobert #include "hard-reg-set.h"
31*404b540aSrobert #include "insn-config.h"
32*404b540aSrobert #include "recog.h"
33*404b540aSrobert #include "target.h"
34*404b540aSrobert #include "output.h"
35*404b540aSrobert #include "tm_p.h"
36*404b540aSrobert #include "flags.h"
37*404b540aSrobert #include "real.h"
38*404b540aSrobert #include "regs.h"
39*404b540aSrobert #include "function.h"
40*404b540aSrobert 
41*404b540aSrobert /* Forward declarations */
42*404b540aSrobert static void set_of_1 (rtx, rtx, void *);
43*404b540aSrobert static bool covers_regno_p (rtx, unsigned int);
44*404b540aSrobert static bool covers_regno_no_parallel_p (rtx, unsigned int);
45*404b540aSrobert static int rtx_referenced_p_1 (rtx *, void *);
46*404b540aSrobert static int computed_jump_p_1 (rtx);
47*404b540aSrobert static void parms_set (rtx, rtx, void *);
48*404b540aSrobert 
49*404b540aSrobert static unsigned HOST_WIDE_INT cached_nonzero_bits (rtx, enum machine_mode,
50*404b540aSrobert                                                    rtx, enum machine_mode,
51*404b540aSrobert                                                    unsigned HOST_WIDE_INT);
52*404b540aSrobert static unsigned HOST_WIDE_INT nonzero_bits1 (rtx, enum machine_mode, rtx,
53*404b540aSrobert                                              enum machine_mode,
54*404b540aSrobert                                              unsigned HOST_WIDE_INT);
55*404b540aSrobert static unsigned int cached_num_sign_bit_copies (rtx, enum machine_mode, rtx,
56*404b540aSrobert                                                 enum machine_mode,
57*404b540aSrobert                                                 unsigned int);
58*404b540aSrobert static unsigned int num_sign_bit_copies1 (rtx, enum machine_mode, rtx,
59*404b540aSrobert                                           enum machine_mode, unsigned int);
60*404b540aSrobert 
61*404b540aSrobert /* Offset of the first 'e', 'E' or 'V' operand for each rtx code, or
62*404b540aSrobert    -1 if a code has no such operand.  */
63*404b540aSrobert static int non_rtx_starting_operands[NUM_RTX_CODE];
64*404b540aSrobert 
65*404b540aSrobert /* Bit flags that specify the machine subtype we are compiling for.
66*404b540aSrobert    Bits are tested using macros TARGET_... defined in the tm.h file
67*404b540aSrobert    and set by `-m...' switches.  Must be defined in rtlanal.c.  */
68*404b540aSrobert 
69*404b540aSrobert int target_flags;
70*404b540aSrobert 
71*404b540aSrobert /* Truncation narrows the mode from SOURCE mode to DESTINATION mode.
72*404b540aSrobert    If TARGET_MODE_REP_EXTENDED (DESTINATION, DESTINATION_REP) is
73*404b540aSrobert    SIGN_EXTEND then while narrowing we also have to enforce the
74*404b540aSrobert    representation and sign-extend the value to mode DESTINATION_REP.
75*404b540aSrobert 
76*404b540aSrobert    If the value is already sign-extended to DESTINATION_REP mode we
77*404b540aSrobert    can just switch to DESTINATION mode on it.  For each pair of
78*404b540aSrobert    integral modes SOURCE and DESTINATION, when truncating from SOURCE
79*404b540aSrobert    to DESTINATION, NUM_SIGN_BIT_COPIES_IN_REP[SOURCE][DESTINATION]
80*404b540aSrobert    contains the number of high-order bits in SOURCE that have to be
81*404b540aSrobert    copies of the sign-bit so that we can do this mode-switch to
82*404b540aSrobert    DESTINATION.  */
83*404b540aSrobert 
84*404b540aSrobert static unsigned int
85*404b540aSrobert num_sign_bit_copies_in_rep[MAX_MODE_INT + 1][MAX_MODE_INT + 1];
86*404b540aSrobert 
87*404b540aSrobert /* Return 1 if the value of X is unstable
88*404b540aSrobert    (would be different at a different point in the program).
89*404b540aSrobert    The frame pointer, arg pointer, etc. are considered stable
90*404b540aSrobert    (within one function) and so is anything marked `unchanging'.  */
91*404b540aSrobert 
92*404b540aSrobert int
rtx_unstable_p(rtx x)93*404b540aSrobert rtx_unstable_p (rtx x)
94*404b540aSrobert {
95*404b540aSrobert   RTX_CODE code = GET_CODE (x);
96*404b540aSrobert   int i;
97*404b540aSrobert   const char *fmt;
98*404b540aSrobert 
99*404b540aSrobert   switch (code)
100*404b540aSrobert     {
101*404b540aSrobert     case MEM:
102*404b540aSrobert       return !MEM_READONLY_P (x) || rtx_unstable_p (XEXP (x, 0));
103*404b540aSrobert 
104*404b540aSrobert     case CONST:
105*404b540aSrobert     case CONST_INT:
106*404b540aSrobert     case CONST_DOUBLE:
107*404b540aSrobert     case CONST_VECTOR:
108*404b540aSrobert     case SYMBOL_REF:
109*404b540aSrobert     case LABEL_REF:
110*404b540aSrobert       return 0;
111*404b540aSrobert 
112*404b540aSrobert     case REG:
113*404b540aSrobert       /* As in rtx_varies_p, we have to use the actual rtx, not reg number.  */
114*404b540aSrobert       if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
115*404b540aSrobert 	  /* The arg pointer varies if it is not a fixed register.  */
116*404b540aSrobert 	  || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
117*404b540aSrobert 	return 0;
118*404b540aSrobert #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
119*404b540aSrobert       /* ??? When call-clobbered, the value is stable modulo the restore
120*404b540aSrobert 	 that must happen after a call.  This currently screws up local-alloc
121*404b540aSrobert 	 into believing that the restore is not needed.  */
122*404b540aSrobert       if (x == pic_offset_table_rtx)
123*404b540aSrobert 	return 0;
124*404b540aSrobert #endif
125*404b540aSrobert       return 1;
126*404b540aSrobert 
127*404b540aSrobert     case ASM_OPERANDS:
128*404b540aSrobert       if (MEM_VOLATILE_P (x))
129*404b540aSrobert 	return 1;
130*404b540aSrobert 
131*404b540aSrobert       /* Fall through.  */
132*404b540aSrobert 
133*404b540aSrobert     default:
134*404b540aSrobert       break;
135*404b540aSrobert     }
136*404b540aSrobert 
137*404b540aSrobert   fmt = GET_RTX_FORMAT (code);
138*404b540aSrobert   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
139*404b540aSrobert     if (fmt[i] == 'e')
140*404b540aSrobert       {
141*404b540aSrobert 	if (rtx_unstable_p (XEXP (x, i)))
142*404b540aSrobert 	  return 1;
143*404b540aSrobert       }
144*404b540aSrobert     else if (fmt[i] == 'E')
145*404b540aSrobert       {
146*404b540aSrobert 	int j;
147*404b540aSrobert 	for (j = 0; j < XVECLEN (x, i); j++)
148*404b540aSrobert 	  if (rtx_unstable_p (XVECEXP (x, i, j)))
149*404b540aSrobert 	    return 1;
150*404b540aSrobert       }
151*404b540aSrobert 
152*404b540aSrobert   return 0;
153*404b540aSrobert }
154*404b540aSrobert 
155*404b540aSrobert /* Return 1 if X has a value that can vary even between two
156*404b540aSrobert    executions of the program.  0 means X can be compared reliably
157*404b540aSrobert    against certain constants or near-constants.
158*404b540aSrobert    FOR_ALIAS is nonzero if we are called from alias analysis; if it is
159*404b540aSrobert    zero, we are slightly more conservative.
160*404b540aSrobert    The frame pointer and the arg pointer are considered constant.  */
161*404b540aSrobert 
162*404b540aSrobert int
rtx_varies_p(rtx x,int for_alias)163*404b540aSrobert rtx_varies_p (rtx x, int for_alias)
164*404b540aSrobert {
165*404b540aSrobert   RTX_CODE code;
166*404b540aSrobert   int i;
167*404b540aSrobert   const char *fmt;
168*404b540aSrobert 
169*404b540aSrobert   if (!x)
170*404b540aSrobert     return 0;
171*404b540aSrobert 
172*404b540aSrobert   code = GET_CODE (x);
173*404b540aSrobert   switch (code)
174*404b540aSrobert     {
175*404b540aSrobert     case MEM:
176*404b540aSrobert       return !MEM_READONLY_P (x) || rtx_varies_p (XEXP (x, 0), for_alias);
177*404b540aSrobert 
178*404b540aSrobert     case CONST:
179*404b540aSrobert     case CONST_INT:
180*404b540aSrobert     case CONST_DOUBLE:
181*404b540aSrobert     case CONST_VECTOR:
182*404b540aSrobert     case SYMBOL_REF:
183*404b540aSrobert     case LABEL_REF:
184*404b540aSrobert       return 0;
185*404b540aSrobert 
186*404b540aSrobert     case REG:
187*404b540aSrobert       /* Note that we have to test for the actual rtx used for the frame
188*404b540aSrobert 	 and arg pointers and not just the register number in case we have
189*404b540aSrobert 	 eliminated the frame and/or arg pointer and are using it
190*404b540aSrobert 	 for pseudos.  */
191*404b540aSrobert       if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
192*404b540aSrobert 	  /* The arg pointer varies if it is not a fixed register.  */
193*404b540aSrobert 	  || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
194*404b540aSrobert 	return 0;
195*404b540aSrobert       if (x == pic_offset_table_rtx
196*404b540aSrobert #ifdef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
197*404b540aSrobert 	  /* ??? When call-clobbered, the value is stable modulo the restore
198*404b540aSrobert 	     that must happen after a call.  This currently screws up
199*404b540aSrobert 	     local-alloc into believing that the restore is not needed, so we
200*404b540aSrobert 	     must return 0 only if we are called from alias analysis.  */
201*404b540aSrobert 	  && for_alias
202*404b540aSrobert #endif
203*404b540aSrobert 	  )
204*404b540aSrobert 	return 0;
205*404b540aSrobert       return 1;
206*404b540aSrobert 
207*404b540aSrobert     case LO_SUM:
208*404b540aSrobert       /* The operand 0 of a LO_SUM is considered constant
209*404b540aSrobert 	 (in fact it is related specifically to operand 1)
210*404b540aSrobert 	 during alias analysis.  */
211*404b540aSrobert       return (! for_alias && rtx_varies_p (XEXP (x, 0), for_alias))
212*404b540aSrobert 	     || rtx_varies_p (XEXP (x, 1), for_alias);
213*404b540aSrobert 
214*404b540aSrobert     case ASM_OPERANDS:
215*404b540aSrobert       if (MEM_VOLATILE_P (x))
216*404b540aSrobert 	return 1;
217*404b540aSrobert 
218*404b540aSrobert       /* Fall through.  */
219*404b540aSrobert 
220*404b540aSrobert     default:
221*404b540aSrobert       break;
222*404b540aSrobert     }
223*404b540aSrobert 
224*404b540aSrobert   fmt = GET_RTX_FORMAT (code);
225*404b540aSrobert   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
226*404b540aSrobert     if (fmt[i] == 'e')
227*404b540aSrobert       {
228*404b540aSrobert 	if (rtx_varies_p (XEXP (x, i), for_alias))
229*404b540aSrobert 	  return 1;
230*404b540aSrobert       }
231*404b540aSrobert     else if (fmt[i] == 'E')
232*404b540aSrobert       {
233*404b540aSrobert 	int j;
234*404b540aSrobert 	for (j = 0; j < XVECLEN (x, i); j++)
235*404b540aSrobert 	  if (rtx_varies_p (XVECEXP (x, i, j), for_alias))
236*404b540aSrobert 	    return 1;
237*404b540aSrobert       }
238*404b540aSrobert 
239*404b540aSrobert   return 0;
240*404b540aSrobert }
241*404b540aSrobert 
242*404b540aSrobert /* Return nonzero if the use of X as an address in a MEM can cause a trap.
243*404b540aSrobert    MODE is the mode of the MEM (not that of X) and UNALIGNED_MEMS controls
244*404b540aSrobert    whether nonzero is returned for unaligned memory accesses on strict
245*404b540aSrobert    alignment machines.  */
246*404b540aSrobert 
247*404b540aSrobert static int
rtx_addr_can_trap_p_1(rtx x,enum machine_mode mode,bool unaligned_mems)248*404b540aSrobert rtx_addr_can_trap_p_1 (rtx x, enum machine_mode mode, bool unaligned_mems)
249*404b540aSrobert {
250*404b540aSrobert   enum rtx_code code = GET_CODE (x);
251*404b540aSrobert 
252*404b540aSrobert   switch (code)
253*404b540aSrobert     {
254*404b540aSrobert     case SYMBOL_REF:
255*404b540aSrobert       return SYMBOL_REF_WEAK (x);
256*404b540aSrobert 
257*404b540aSrobert     case LABEL_REF:
258*404b540aSrobert       return 0;
259*404b540aSrobert 
260*404b540aSrobert     case REG:
261*404b540aSrobert       /* As in rtx_varies_p, we have to use the actual rtx, not reg number.  */
262*404b540aSrobert       if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
263*404b540aSrobert 	  || x == stack_pointer_rtx
264*404b540aSrobert 	  /* The arg pointer varies if it is not a fixed register.  */
265*404b540aSrobert 	  || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
266*404b540aSrobert 	return 0;
267*404b540aSrobert       /* All of the virtual frame registers are stack references.  */
268*404b540aSrobert       if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
269*404b540aSrobert 	  && REGNO (x) <= LAST_VIRTUAL_REGISTER)
270*404b540aSrobert 	return 0;
271*404b540aSrobert       return 1;
272*404b540aSrobert 
273*404b540aSrobert     case CONST:
274*404b540aSrobert       return rtx_addr_can_trap_p_1 (XEXP (x, 0), mode, unaligned_mems);
275*404b540aSrobert 
276*404b540aSrobert     case PLUS:
277*404b540aSrobert       /* An address is assumed not to trap if:
278*404b540aSrobert 	 - it is an address that can't trap plus a constant integer,
279*404b540aSrobert 	   with the proper remainder modulo the mode size if we are
280*404b540aSrobert 	   considering unaligned memory references.  */
281*404b540aSrobert       if (!rtx_addr_can_trap_p_1 (XEXP (x, 0), mode, unaligned_mems)
282*404b540aSrobert 	  && GET_CODE (XEXP (x, 1)) == CONST_INT)
283*404b540aSrobert 	{
284*404b540aSrobert 	  HOST_WIDE_INT offset;
285*404b540aSrobert 
286*404b540aSrobert 	  if (!STRICT_ALIGNMENT
287*404b540aSrobert 	      || !unaligned_mems
288*404b540aSrobert 	      || GET_MODE_SIZE (mode) == 0)
289*404b540aSrobert 	    return 0;
290*404b540aSrobert 
291*404b540aSrobert 	  offset = INTVAL (XEXP (x, 1));
292*404b540aSrobert 
293*404b540aSrobert #ifdef SPARC_STACK_BOUNDARY_HACK
294*404b540aSrobert 	  /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
295*404b540aSrobert 	     the real alignment of %sp.  However, when it does this, the
296*404b540aSrobert 	     alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY.  */
297*404b540aSrobert 	  if (SPARC_STACK_BOUNDARY_HACK
298*404b540aSrobert 	      && (XEXP (x, 0) == stack_pointer_rtx
299*404b540aSrobert 		  || XEXP (x, 0) == hard_frame_pointer_rtx))
300*404b540aSrobert 	    offset -= STACK_POINTER_OFFSET;
301*404b540aSrobert #endif
302*404b540aSrobert 
303*404b540aSrobert 	  return offset % GET_MODE_SIZE (mode) != 0;
304*404b540aSrobert 	}
305*404b540aSrobert 
306*404b540aSrobert       /* - or it is the pic register plus a constant.  */
307*404b540aSrobert       if (XEXP (x, 0) == pic_offset_table_rtx && CONSTANT_P (XEXP (x, 1)))
308*404b540aSrobert 	return 0;
309*404b540aSrobert 
310*404b540aSrobert       return 1;
311*404b540aSrobert 
312*404b540aSrobert     case LO_SUM:
313*404b540aSrobert     case PRE_MODIFY:
314*404b540aSrobert       return rtx_addr_can_trap_p_1 (XEXP (x, 1), mode, unaligned_mems);
315*404b540aSrobert 
316*404b540aSrobert     case PRE_DEC:
317*404b540aSrobert     case PRE_INC:
318*404b540aSrobert     case POST_DEC:
319*404b540aSrobert     case POST_INC:
320*404b540aSrobert     case POST_MODIFY:
321*404b540aSrobert       return rtx_addr_can_trap_p_1 (XEXP (x, 0), mode, unaligned_mems);
322*404b540aSrobert 
323*404b540aSrobert     default:
324*404b540aSrobert       break;
325*404b540aSrobert     }
326*404b540aSrobert 
327*404b540aSrobert   /* If it isn't one of the case above, it can cause a trap.  */
328*404b540aSrobert   return 1;
329*404b540aSrobert }
330*404b540aSrobert 
331*404b540aSrobert /* Return nonzero if the use of X as an address in a MEM can cause a trap.  */
332*404b540aSrobert 
333*404b540aSrobert int
rtx_addr_can_trap_p(rtx x)334*404b540aSrobert rtx_addr_can_trap_p (rtx x)
335*404b540aSrobert {
336*404b540aSrobert   return rtx_addr_can_trap_p_1 (x, VOIDmode, false);
337*404b540aSrobert }
338*404b540aSrobert 
339*404b540aSrobert /* Return true if X is an address that is known to not be zero.  */
340*404b540aSrobert 
341*404b540aSrobert bool
nonzero_address_p(rtx x)342*404b540aSrobert nonzero_address_p (rtx x)
343*404b540aSrobert {
344*404b540aSrobert   enum rtx_code code = GET_CODE (x);
345*404b540aSrobert 
346*404b540aSrobert   switch (code)
347*404b540aSrobert     {
348*404b540aSrobert     case SYMBOL_REF:
349*404b540aSrobert       return !SYMBOL_REF_WEAK (x);
350*404b540aSrobert 
351*404b540aSrobert     case LABEL_REF:
352*404b540aSrobert       return true;
353*404b540aSrobert 
354*404b540aSrobert     case REG:
355*404b540aSrobert       /* As in rtx_varies_p, we have to use the actual rtx, not reg number.  */
356*404b540aSrobert       if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
357*404b540aSrobert 	  || x == stack_pointer_rtx
358*404b540aSrobert 	  || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
359*404b540aSrobert 	return true;
360*404b540aSrobert       /* All of the virtual frame registers are stack references.  */
361*404b540aSrobert       if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
362*404b540aSrobert 	  && REGNO (x) <= LAST_VIRTUAL_REGISTER)
363*404b540aSrobert 	return true;
364*404b540aSrobert       return false;
365*404b540aSrobert 
366*404b540aSrobert     case CONST:
367*404b540aSrobert       return nonzero_address_p (XEXP (x, 0));
368*404b540aSrobert 
369*404b540aSrobert     case PLUS:
370*404b540aSrobert       if (GET_CODE (XEXP (x, 1)) == CONST_INT)
371*404b540aSrobert         return nonzero_address_p (XEXP (x, 0));
372*404b540aSrobert       /* Handle PIC references.  */
373*404b540aSrobert       else if (XEXP (x, 0) == pic_offset_table_rtx
374*404b540aSrobert 	       && CONSTANT_P (XEXP (x, 1)))
375*404b540aSrobert 	return true;
376*404b540aSrobert       return false;
377*404b540aSrobert 
378*404b540aSrobert     case PRE_MODIFY:
379*404b540aSrobert       /* Similar to the above; allow positive offsets.  Further, since
380*404b540aSrobert 	 auto-inc is only allowed in memories, the register must be a
381*404b540aSrobert 	 pointer.  */
382*404b540aSrobert       if (GET_CODE (XEXP (x, 1)) == CONST_INT
383*404b540aSrobert 	  && INTVAL (XEXP (x, 1)) > 0)
384*404b540aSrobert 	return true;
385*404b540aSrobert       return nonzero_address_p (XEXP (x, 0));
386*404b540aSrobert 
387*404b540aSrobert     case PRE_INC:
388*404b540aSrobert       /* Similarly.  Further, the offset is always positive.  */
389*404b540aSrobert       return true;
390*404b540aSrobert 
391*404b540aSrobert     case PRE_DEC:
392*404b540aSrobert     case POST_DEC:
393*404b540aSrobert     case POST_INC:
394*404b540aSrobert     case POST_MODIFY:
395*404b540aSrobert       return nonzero_address_p (XEXP (x, 0));
396*404b540aSrobert 
397*404b540aSrobert     case LO_SUM:
398*404b540aSrobert       return nonzero_address_p (XEXP (x, 1));
399*404b540aSrobert 
400*404b540aSrobert     default:
401*404b540aSrobert       break;
402*404b540aSrobert     }
403*404b540aSrobert 
404*404b540aSrobert   /* If it isn't one of the case above, might be zero.  */
405*404b540aSrobert   return false;
406*404b540aSrobert }
407*404b540aSrobert 
408*404b540aSrobert /* Return 1 if X refers to a memory location whose address
409*404b540aSrobert    cannot be compared reliably with constant addresses,
410*404b540aSrobert    or if X refers to a BLKmode memory object.
411*404b540aSrobert    FOR_ALIAS is nonzero if we are called from alias analysis; if it is
412*404b540aSrobert    zero, we are slightly more conservative.  */
413*404b540aSrobert 
414*404b540aSrobert int
rtx_addr_varies_p(rtx x,int for_alias)415*404b540aSrobert rtx_addr_varies_p (rtx x, int for_alias)
416*404b540aSrobert {
417*404b540aSrobert   enum rtx_code code;
418*404b540aSrobert   int i;
419*404b540aSrobert   const char *fmt;
420*404b540aSrobert 
421*404b540aSrobert   if (x == 0)
422*404b540aSrobert     return 0;
423*404b540aSrobert 
424*404b540aSrobert   code = GET_CODE (x);
425*404b540aSrobert   if (code == MEM)
426*404b540aSrobert     return GET_MODE (x) == BLKmode || rtx_varies_p (XEXP (x, 0), for_alias);
427*404b540aSrobert 
428*404b540aSrobert   fmt = GET_RTX_FORMAT (code);
429*404b540aSrobert   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
430*404b540aSrobert     if (fmt[i] == 'e')
431*404b540aSrobert       {
432*404b540aSrobert 	if (rtx_addr_varies_p (XEXP (x, i), for_alias))
433*404b540aSrobert 	  return 1;
434*404b540aSrobert       }
435*404b540aSrobert     else if (fmt[i] == 'E')
436*404b540aSrobert       {
437*404b540aSrobert 	int j;
438*404b540aSrobert 	for (j = 0; j < XVECLEN (x, i); j++)
439*404b540aSrobert 	  if (rtx_addr_varies_p (XVECEXP (x, i, j), for_alias))
440*404b540aSrobert 	    return 1;
441*404b540aSrobert       }
442*404b540aSrobert   return 0;
443*404b540aSrobert }
444*404b540aSrobert 
445*404b540aSrobert /* Return the value of the integer term in X, if one is apparent;
446*404b540aSrobert    otherwise return 0.
447*404b540aSrobert    Only obvious integer terms are detected.
448*404b540aSrobert    This is used in cse.c with the `related_value' field.  */
449*404b540aSrobert 
450*404b540aSrobert HOST_WIDE_INT
get_integer_term(rtx x)451*404b540aSrobert get_integer_term (rtx x)
452*404b540aSrobert {
453*404b540aSrobert   if (GET_CODE (x) == CONST)
454*404b540aSrobert     x = XEXP (x, 0);
455*404b540aSrobert 
456*404b540aSrobert   if (GET_CODE (x) == MINUS
457*404b540aSrobert       && GET_CODE (XEXP (x, 1)) == CONST_INT)
458*404b540aSrobert     return - INTVAL (XEXP (x, 1));
459*404b540aSrobert   if (GET_CODE (x) == PLUS
460*404b540aSrobert       && GET_CODE (XEXP (x, 1)) == CONST_INT)
461*404b540aSrobert     return INTVAL (XEXP (x, 1));
462*404b540aSrobert   return 0;
463*404b540aSrobert }
464*404b540aSrobert 
465*404b540aSrobert /* If X is a constant, return the value sans apparent integer term;
466*404b540aSrobert    otherwise return 0.
467*404b540aSrobert    Only obvious integer terms are detected.  */
468*404b540aSrobert 
469*404b540aSrobert rtx
get_related_value(rtx x)470*404b540aSrobert get_related_value (rtx x)
471*404b540aSrobert {
472*404b540aSrobert   if (GET_CODE (x) != CONST)
473*404b540aSrobert     return 0;
474*404b540aSrobert   x = XEXP (x, 0);
475*404b540aSrobert   if (GET_CODE (x) == PLUS
476*404b540aSrobert       && GET_CODE (XEXP (x, 1)) == CONST_INT)
477*404b540aSrobert     return XEXP (x, 0);
478*404b540aSrobert   else if (GET_CODE (x) == MINUS
479*404b540aSrobert 	   && GET_CODE (XEXP (x, 1)) == CONST_INT)
480*404b540aSrobert     return XEXP (x, 0);
481*404b540aSrobert   return 0;
482*404b540aSrobert }
483*404b540aSrobert 
484*404b540aSrobert /* Return the number of places FIND appears within X.  If COUNT_DEST is
485*404b540aSrobert    zero, we do not count occurrences inside the destination of a SET.  */
486*404b540aSrobert 
487*404b540aSrobert int
count_occurrences(rtx x,rtx find,int count_dest)488*404b540aSrobert count_occurrences (rtx x, rtx find, int count_dest)
489*404b540aSrobert {
490*404b540aSrobert   int i, j;
491*404b540aSrobert   enum rtx_code code;
492*404b540aSrobert   const char *format_ptr;
493*404b540aSrobert   int count;
494*404b540aSrobert 
495*404b540aSrobert   if (x == find)
496*404b540aSrobert     return 1;
497*404b540aSrobert 
498*404b540aSrobert   code = GET_CODE (x);
499*404b540aSrobert 
500*404b540aSrobert   switch (code)
501*404b540aSrobert     {
502*404b540aSrobert     case REG:
503*404b540aSrobert     case CONST_INT:
504*404b540aSrobert     case CONST_DOUBLE:
505*404b540aSrobert     case CONST_VECTOR:
506*404b540aSrobert     case SYMBOL_REF:
507*404b540aSrobert     case CODE_LABEL:
508*404b540aSrobert     case PC:
509*404b540aSrobert     case CC0:
510*404b540aSrobert       return 0;
511*404b540aSrobert 
512*404b540aSrobert     case MEM:
513*404b540aSrobert       if (MEM_P (find) && rtx_equal_p (x, find))
514*404b540aSrobert 	return 1;
515*404b540aSrobert       break;
516*404b540aSrobert 
517*404b540aSrobert     case SET:
518*404b540aSrobert       if (SET_DEST (x) == find && ! count_dest)
519*404b540aSrobert 	return count_occurrences (SET_SRC (x), find, count_dest);
520*404b540aSrobert       break;
521*404b540aSrobert 
522*404b540aSrobert     default:
523*404b540aSrobert       break;
524*404b540aSrobert     }
525*404b540aSrobert 
526*404b540aSrobert   format_ptr = GET_RTX_FORMAT (code);
527*404b540aSrobert   count = 0;
528*404b540aSrobert 
529*404b540aSrobert   for (i = 0; i < GET_RTX_LENGTH (code); i++)
530*404b540aSrobert     {
531*404b540aSrobert       switch (*format_ptr++)
532*404b540aSrobert 	{
533*404b540aSrobert 	case 'e':
534*404b540aSrobert 	  count += count_occurrences (XEXP (x, i), find, count_dest);
535*404b540aSrobert 	  break;
536*404b540aSrobert 
537*404b540aSrobert 	case 'E':
538*404b540aSrobert 	  for (j = 0; j < XVECLEN (x, i); j++)
539*404b540aSrobert 	    count += count_occurrences (XVECEXP (x, i, j), find, count_dest);
540*404b540aSrobert 	  break;
541*404b540aSrobert 	}
542*404b540aSrobert     }
543*404b540aSrobert   return count;
544*404b540aSrobert }
545*404b540aSrobert 
546*404b540aSrobert /* Nonzero if register REG appears somewhere within IN.
547*404b540aSrobert    Also works if REG is not a register; in this case it checks
548*404b540aSrobert    for a subexpression of IN that is Lisp "equal" to REG.  */
549*404b540aSrobert 
550*404b540aSrobert int
reg_mentioned_p(rtx reg,rtx in)551*404b540aSrobert reg_mentioned_p (rtx reg, rtx in)
552*404b540aSrobert {
553*404b540aSrobert   const char *fmt;
554*404b540aSrobert   int i;
555*404b540aSrobert   enum rtx_code code;
556*404b540aSrobert 
557*404b540aSrobert   if (in == 0)
558*404b540aSrobert     return 0;
559*404b540aSrobert 
560*404b540aSrobert   if (reg == in)
561*404b540aSrobert     return 1;
562*404b540aSrobert 
563*404b540aSrobert   if (GET_CODE (in) == LABEL_REF)
564*404b540aSrobert     return reg == XEXP (in, 0);
565*404b540aSrobert 
566*404b540aSrobert   code = GET_CODE (in);
567*404b540aSrobert 
568*404b540aSrobert   switch (code)
569*404b540aSrobert     {
570*404b540aSrobert       /* Compare registers by number.  */
571*404b540aSrobert     case REG:
572*404b540aSrobert       return REG_P (reg) && REGNO (in) == REGNO (reg);
573*404b540aSrobert 
574*404b540aSrobert       /* These codes have no constituent expressions
575*404b540aSrobert 	 and are unique.  */
576*404b540aSrobert     case SCRATCH:
577*404b540aSrobert     case CC0:
578*404b540aSrobert     case PC:
579*404b540aSrobert       return 0;
580*404b540aSrobert 
581*404b540aSrobert     case CONST_INT:
582*404b540aSrobert     case CONST_VECTOR:
583*404b540aSrobert     case CONST_DOUBLE:
584*404b540aSrobert       /* These are kept unique for a given value.  */
585*404b540aSrobert       return 0;
586*404b540aSrobert 
587*404b540aSrobert     default:
588*404b540aSrobert       break;
589*404b540aSrobert     }
590*404b540aSrobert 
591*404b540aSrobert   if (GET_CODE (reg) == code && rtx_equal_p (reg, in))
592*404b540aSrobert     return 1;
593*404b540aSrobert 
594*404b540aSrobert   fmt = GET_RTX_FORMAT (code);
595*404b540aSrobert 
596*404b540aSrobert   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
597*404b540aSrobert     {
598*404b540aSrobert       if (fmt[i] == 'E')
599*404b540aSrobert 	{
600*404b540aSrobert 	  int j;
601*404b540aSrobert 	  for (j = XVECLEN (in, i) - 1; j >= 0; j--)
602*404b540aSrobert 	    if (reg_mentioned_p (reg, XVECEXP (in, i, j)))
603*404b540aSrobert 	      return 1;
604*404b540aSrobert 	}
605*404b540aSrobert       else if (fmt[i] == 'e'
606*404b540aSrobert 	       && reg_mentioned_p (reg, XEXP (in, i)))
607*404b540aSrobert 	return 1;
608*404b540aSrobert     }
609*404b540aSrobert   return 0;
610*404b540aSrobert }
611*404b540aSrobert 
612*404b540aSrobert /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
613*404b540aSrobert    no CODE_LABEL insn.  */
614*404b540aSrobert 
615*404b540aSrobert int
no_labels_between_p(rtx beg,rtx end)616*404b540aSrobert no_labels_between_p (rtx beg, rtx end)
617*404b540aSrobert {
618*404b540aSrobert   rtx p;
619*404b540aSrobert   if (beg == end)
620*404b540aSrobert     return 0;
621*404b540aSrobert   for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
622*404b540aSrobert     if (LABEL_P (p))
623*404b540aSrobert       return 0;
624*404b540aSrobert   return 1;
625*404b540aSrobert }
626*404b540aSrobert 
627*404b540aSrobert /* Nonzero if register REG is used in an insn between
628*404b540aSrobert    FROM_INSN and TO_INSN (exclusive of those two).  */
629*404b540aSrobert 
630*404b540aSrobert int
reg_used_between_p(rtx reg,rtx from_insn,rtx to_insn)631*404b540aSrobert reg_used_between_p (rtx reg, rtx from_insn, rtx to_insn)
632*404b540aSrobert {
633*404b540aSrobert   rtx insn;
634*404b540aSrobert 
635*404b540aSrobert   if (from_insn == to_insn)
636*404b540aSrobert     return 0;
637*404b540aSrobert 
638*404b540aSrobert   for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
639*404b540aSrobert     if (INSN_P (insn)
640*404b540aSrobert 	&& (reg_overlap_mentioned_p (reg, PATTERN (insn))
641*404b540aSrobert 	   || (CALL_P (insn) && find_reg_fusage (insn, USE, reg))))
642*404b540aSrobert       return 1;
643*404b540aSrobert   return 0;
644*404b540aSrobert }
645*404b540aSrobert 
646*404b540aSrobert /* Nonzero if the old value of X, a register, is referenced in BODY.  If X
647*404b540aSrobert    is entirely replaced by a new value and the only use is as a SET_DEST,
648*404b540aSrobert    we do not consider it a reference.  */
649*404b540aSrobert 
650*404b540aSrobert int
reg_referenced_p(rtx x,rtx body)651*404b540aSrobert reg_referenced_p (rtx x, rtx body)
652*404b540aSrobert {
653*404b540aSrobert   int i;
654*404b540aSrobert 
655*404b540aSrobert   switch (GET_CODE (body))
656*404b540aSrobert     {
657*404b540aSrobert     case SET:
658*404b540aSrobert       if (reg_overlap_mentioned_p (x, SET_SRC (body)))
659*404b540aSrobert 	return 1;
660*404b540aSrobert 
661*404b540aSrobert       /* If the destination is anything other than CC0, PC, a REG or a SUBREG
662*404b540aSrobert 	 of a REG that occupies all of the REG, the insn references X if
663*404b540aSrobert 	 it is mentioned in the destination.  */
664*404b540aSrobert       if (GET_CODE (SET_DEST (body)) != CC0
665*404b540aSrobert 	  && GET_CODE (SET_DEST (body)) != PC
666*404b540aSrobert 	  && !REG_P (SET_DEST (body))
667*404b540aSrobert 	  && ! (GET_CODE (SET_DEST (body)) == SUBREG
668*404b540aSrobert 		&& REG_P (SUBREG_REG (SET_DEST (body)))
669*404b540aSrobert 		&& (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (body))))
670*404b540aSrobert 		      + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
671*404b540aSrobert 		    == ((GET_MODE_SIZE (GET_MODE (SET_DEST (body)))
672*404b540aSrobert 			 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
673*404b540aSrobert 	  && reg_overlap_mentioned_p (x, SET_DEST (body)))
674*404b540aSrobert 	return 1;
675*404b540aSrobert       return 0;
676*404b540aSrobert 
677*404b540aSrobert     case ASM_OPERANDS:
678*404b540aSrobert       for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
679*404b540aSrobert 	if (reg_overlap_mentioned_p (x, ASM_OPERANDS_INPUT (body, i)))
680*404b540aSrobert 	  return 1;
681*404b540aSrobert       return 0;
682*404b540aSrobert 
683*404b540aSrobert     case CALL:
684*404b540aSrobert     case USE:
685*404b540aSrobert     case IF_THEN_ELSE:
686*404b540aSrobert       return reg_overlap_mentioned_p (x, body);
687*404b540aSrobert 
688*404b540aSrobert     case TRAP_IF:
689*404b540aSrobert       return reg_overlap_mentioned_p (x, TRAP_CONDITION (body));
690*404b540aSrobert 
691*404b540aSrobert     case PREFETCH:
692*404b540aSrobert       return reg_overlap_mentioned_p (x, XEXP (body, 0));
693*404b540aSrobert 
694*404b540aSrobert     case UNSPEC:
695*404b540aSrobert     case UNSPEC_VOLATILE:
696*404b540aSrobert       for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
697*404b540aSrobert 	if (reg_overlap_mentioned_p (x, XVECEXP (body, 0, i)))
698*404b540aSrobert 	  return 1;
699*404b540aSrobert       return 0;
700*404b540aSrobert 
701*404b540aSrobert     case PARALLEL:
702*404b540aSrobert       for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
703*404b540aSrobert 	if (reg_referenced_p (x, XVECEXP (body, 0, i)))
704*404b540aSrobert 	  return 1;
705*404b540aSrobert       return 0;
706*404b540aSrobert 
707*404b540aSrobert     case CLOBBER:
708*404b540aSrobert       if (MEM_P (XEXP (body, 0)))
709*404b540aSrobert 	if (reg_overlap_mentioned_p (x, XEXP (XEXP (body, 0), 0)))
710*404b540aSrobert 	  return 1;
711*404b540aSrobert       return 0;
712*404b540aSrobert 
713*404b540aSrobert     case COND_EXEC:
714*404b540aSrobert       if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body)))
715*404b540aSrobert 	return 1;
716*404b540aSrobert       return reg_referenced_p (x, COND_EXEC_CODE (body));
717*404b540aSrobert 
718*404b540aSrobert     default:
719*404b540aSrobert       return 0;
720*404b540aSrobert     }
721*404b540aSrobert }
722*404b540aSrobert 
723*404b540aSrobert /* Nonzero if register REG is set or clobbered in an insn between
724*404b540aSrobert    FROM_INSN and TO_INSN (exclusive of those two).  */
725*404b540aSrobert 
726*404b540aSrobert int
reg_set_between_p(rtx reg,rtx from_insn,rtx to_insn)727*404b540aSrobert reg_set_between_p (rtx reg, rtx from_insn, rtx to_insn)
728*404b540aSrobert {
729*404b540aSrobert   rtx insn;
730*404b540aSrobert 
731*404b540aSrobert   if (from_insn == to_insn)
732*404b540aSrobert     return 0;
733*404b540aSrobert 
734*404b540aSrobert   for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
735*404b540aSrobert     if (INSN_P (insn) && reg_set_p (reg, insn))
736*404b540aSrobert       return 1;
737*404b540aSrobert   return 0;
738*404b540aSrobert }
739*404b540aSrobert 
740*404b540aSrobert /* Internals of reg_set_between_p.  */
741*404b540aSrobert int
reg_set_p(rtx reg,rtx insn)742*404b540aSrobert reg_set_p (rtx reg, rtx insn)
743*404b540aSrobert {
744*404b540aSrobert   /* We can be passed an insn or part of one.  If we are passed an insn,
745*404b540aSrobert      check if a side-effect of the insn clobbers REG.  */
746*404b540aSrobert   if (INSN_P (insn)
747*404b540aSrobert       && (FIND_REG_INC_NOTE (insn, reg)
748*404b540aSrobert 	  || (CALL_P (insn)
749*404b540aSrobert 	      && ((REG_P (reg)
750*404b540aSrobert 		   && REGNO (reg) < FIRST_PSEUDO_REGISTER
751*404b540aSrobert 		   && TEST_HARD_REG_BIT (regs_invalidated_by_call,
752*404b540aSrobert 					 REGNO (reg)))
753*404b540aSrobert 		  || MEM_P (reg)
754*404b540aSrobert 		  || find_reg_fusage (insn, CLOBBER, reg)))))
755*404b540aSrobert     return 1;
756*404b540aSrobert 
757*404b540aSrobert   return set_of (reg, insn) != NULL_RTX;
758*404b540aSrobert }
759*404b540aSrobert 
760*404b540aSrobert /* Similar to reg_set_between_p, but check all registers in X.  Return 0
761*404b540aSrobert    only if none of them are modified between START and END.  Return 1 if
762*404b540aSrobert    X contains a MEM; this routine does usememory aliasing.  */
763*404b540aSrobert 
764*404b540aSrobert int
modified_between_p(rtx x,rtx start,rtx end)765*404b540aSrobert modified_between_p (rtx x, rtx start, rtx end)
766*404b540aSrobert {
767*404b540aSrobert   enum rtx_code code = GET_CODE (x);
768*404b540aSrobert   const char *fmt;
769*404b540aSrobert   int i, j;
770*404b540aSrobert   rtx insn;
771*404b540aSrobert 
772*404b540aSrobert   if (start == end)
773*404b540aSrobert     return 0;
774*404b540aSrobert 
775*404b540aSrobert   switch (code)
776*404b540aSrobert     {
777*404b540aSrobert     case CONST_INT:
778*404b540aSrobert     case CONST_DOUBLE:
779*404b540aSrobert     case CONST_VECTOR:
780*404b540aSrobert     case CONST:
781*404b540aSrobert     case SYMBOL_REF:
782*404b540aSrobert     case LABEL_REF:
783*404b540aSrobert       return 0;
784*404b540aSrobert 
785*404b540aSrobert     case PC:
786*404b540aSrobert     case CC0:
787*404b540aSrobert       return 1;
788*404b540aSrobert 
789*404b540aSrobert     case MEM:
790*404b540aSrobert       if (modified_between_p (XEXP (x, 0), start, end))
791*404b540aSrobert 	return 1;
792*404b540aSrobert       if (MEM_READONLY_P (x))
793*404b540aSrobert 	return 0;
794*404b540aSrobert       for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
795*404b540aSrobert 	if (memory_modified_in_insn_p (x, insn))
796*404b540aSrobert 	  return 1;
797*404b540aSrobert       return 0;
798*404b540aSrobert       break;
799*404b540aSrobert 
800*404b540aSrobert     case REG:
801*404b540aSrobert       return reg_set_between_p (x, start, end);
802*404b540aSrobert 
803*404b540aSrobert     default:
804*404b540aSrobert       break;
805*404b540aSrobert     }
806*404b540aSrobert 
807*404b540aSrobert   fmt = GET_RTX_FORMAT (code);
808*404b540aSrobert   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
809*404b540aSrobert     {
810*404b540aSrobert       if (fmt[i] == 'e' && modified_between_p (XEXP (x, i), start, end))
811*404b540aSrobert 	return 1;
812*404b540aSrobert 
813*404b540aSrobert       else if (fmt[i] == 'E')
814*404b540aSrobert 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
815*404b540aSrobert 	  if (modified_between_p (XVECEXP (x, i, j), start, end))
816*404b540aSrobert 	    return 1;
817*404b540aSrobert     }
818*404b540aSrobert 
819*404b540aSrobert   return 0;
820*404b540aSrobert }
821*404b540aSrobert 
822*404b540aSrobert /* Similar to reg_set_p, but check all registers in X.  Return 0 only if none
823*404b540aSrobert    of them are modified in INSN.  Return 1 if X contains a MEM; this routine
824*404b540aSrobert    does use memory aliasing.  */
825*404b540aSrobert 
826*404b540aSrobert int
modified_in_p(rtx x,rtx insn)827*404b540aSrobert modified_in_p (rtx x, rtx insn)
828*404b540aSrobert {
829*404b540aSrobert   enum rtx_code code = GET_CODE (x);
830*404b540aSrobert   const char *fmt;
831*404b540aSrobert   int i, j;
832*404b540aSrobert 
833*404b540aSrobert   switch (code)
834*404b540aSrobert     {
835*404b540aSrobert     case CONST_INT:
836*404b540aSrobert     case CONST_DOUBLE:
837*404b540aSrobert     case CONST_VECTOR:
838*404b540aSrobert     case CONST:
839*404b540aSrobert     case SYMBOL_REF:
840*404b540aSrobert     case LABEL_REF:
841*404b540aSrobert       return 0;
842*404b540aSrobert 
843*404b540aSrobert     case PC:
844*404b540aSrobert     case CC0:
845*404b540aSrobert       return 1;
846*404b540aSrobert 
847*404b540aSrobert     case MEM:
848*404b540aSrobert       if (modified_in_p (XEXP (x, 0), insn))
849*404b540aSrobert 	return 1;
850*404b540aSrobert       if (MEM_READONLY_P (x))
851*404b540aSrobert 	return 0;
852*404b540aSrobert       if (memory_modified_in_insn_p (x, insn))
853*404b540aSrobert 	return 1;
854*404b540aSrobert       return 0;
855*404b540aSrobert       break;
856*404b540aSrobert 
857*404b540aSrobert     case REG:
858*404b540aSrobert       return reg_set_p (x, insn);
859*404b540aSrobert 
860*404b540aSrobert     default:
861*404b540aSrobert       break;
862*404b540aSrobert     }
863*404b540aSrobert 
864*404b540aSrobert   fmt = GET_RTX_FORMAT (code);
865*404b540aSrobert   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
866*404b540aSrobert     {
867*404b540aSrobert       if (fmt[i] == 'e' && modified_in_p (XEXP (x, i), insn))
868*404b540aSrobert 	return 1;
869*404b540aSrobert 
870*404b540aSrobert       else if (fmt[i] == 'E')
871*404b540aSrobert 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
872*404b540aSrobert 	  if (modified_in_p (XVECEXP (x, i, j), insn))
873*404b540aSrobert 	    return 1;
874*404b540aSrobert     }
875*404b540aSrobert 
876*404b540aSrobert   return 0;
877*404b540aSrobert }
878*404b540aSrobert 
879*404b540aSrobert /* Helper function for set_of.  */
880*404b540aSrobert struct set_of_data
881*404b540aSrobert   {
882*404b540aSrobert     rtx found;
883*404b540aSrobert     rtx pat;
884*404b540aSrobert   };
885*404b540aSrobert 
886*404b540aSrobert static void
set_of_1(rtx x,rtx pat,void * data1)887*404b540aSrobert set_of_1 (rtx x, rtx pat, void *data1)
888*404b540aSrobert {
889*404b540aSrobert    struct set_of_data *data = (struct set_of_data *) (data1);
890*404b540aSrobert    if (rtx_equal_p (x, data->pat)
891*404b540aSrobert        || (!MEM_P (x) && reg_overlap_mentioned_p (data->pat, x)))
892*404b540aSrobert      data->found = pat;
893*404b540aSrobert }
894*404b540aSrobert 
895*404b540aSrobert /* Give an INSN, return a SET or CLOBBER expression that does modify PAT
896*404b540aSrobert    (either directly or via STRICT_LOW_PART and similar modifiers).  */
897*404b540aSrobert rtx
set_of(rtx pat,rtx insn)898*404b540aSrobert set_of (rtx pat, rtx insn)
899*404b540aSrobert {
900*404b540aSrobert   struct set_of_data data;
901*404b540aSrobert   data.found = NULL_RTX;
902*404b540aSrobert   data.pat = pat;
903*404b540aSrobert   note_stores (INSN_P (insn) ? PATTERN (insn) : insn, set_of_1, &data);
904*404b540aSrobert   return data.found;
905*404b540aSrobert }
906*404b540aSrobert 
907*404b540aSrobert /* Given an INSN, return a SET expression if this insn has only a single SET.
908*404b540aSrobert    It may also have CLOBBERs, USEs, or SET whose output
909*404b540aSrobert    will not be used, which we ignore.  */
910*404b540aSrobert 
911*404b540aSrobert rtx
single_set_2(rtx insn,rtx pat)912*404b540aSrobert single_set_2 (rtx insn, rtx pat)
913*404b540aSrobert {
914*404b540aSrobert   rtx set = NULL;
915*404b540aSrobert   int set_verified = 1;
916*404b540aSrobert   int i;
917*404b540aSrobert 
918*404b540aSrobert   if (GET_CODE (pat) == PARALLEL)
919*404b540aSrobert     {
920*404b540aSrobert       for (i = 0; i < XVECLEN (pat, 0); i++)
921*404b540aSrobert 	{
922*404b540aSrobert 	  rtx sub = XVECEXP (pat, 0, i);
923*404b540aSrobert 	  switch (GET_CODE (sub))
924*404b540aSrobert 	    {
925*404b540aSrobert 	    case USE:
926*404b540aSrobert 	    case CLOBBER:
927*404b540aSrobert 	      break;
928*404b540aSrobert 
929*404b540aSrobert 	    case SET:
930*404b540aSrobert 	      /* We can consider insns having multiple sets, where all
931*404b540aSrobert 		 but one are dead as single set insns.  In common case
932*404b540aSrobert 		 only single set is present in the pattern so we want
933*404b540aSrobert 		 to avoid checking for REG_UNUSED notes unless necessary.
934*404b540aSrobert 
935*404b540aSrobert 		 When we reach set first time, we just expect this is
936*404b540aSrobert 		 the single set we are looking for and only when more
937*404b540aSrobert 		 sets are found in the insn, we check them.  */
938*404b540aSrobert 	      if (!set_verified)
939*404b540aSrobert 		{
940*404b540aSrobert 		  if (find_reg_note (insn, REG_UNUSED, SET_DEST (set))
941*404b540aSrobert 		      && !side_effects_p (set))
942*404b540aSrobert 		    set = NULL;
943*404b540aSrobert 		  else
944*404b540aSrobert 		    set_verified = 1;
945*404b540aSrobert 		}
946*404b540aSrobert 	      if (!set)
947*404b540aSrobert 		set = sub, set_verified = 0;
948*404b540aSrobert 	      else if (!find_reg_note (insn, REG_UNUSED, SET_DEST (sub))
949*404b540aSrobert 		       || side_effects_p (sub))
950*404b540aSrobert 		return NULL_RTX;
951*404b540aSrobert 	      break;
952*404b540aSrobert 
953*404b540aSrobert 	    default:
954*404b540aSrobert 	      return NULL_RTX;
955*404b540aSrobert 	    }
956*404b540aSrobert 	}
957*404b540aSrobert     }
958*404b540aSrobert   return set;
959*404b540aSrobert }
960*404b540aSrobert 
961*404b540aSrobert /* Given an INSN, return nonzero if it has more than one SET, else return
962*404b540aSrobert    zero.  */
963*404b540aSrobert 
964*404b540aSrobert int
multiple_sets(rtx insn)965*404b540aSrobert multiple_sets (rtx insn)
966*404b540aSrobert {
967*404b540aSrobert   int found;
968*404b540aSrobert   int i;
969*404b540aSrobert 
970*404b540aSrobert   /* INSN must be an insn.  */
971*404b540aSrobert   if (! INSN_P (insn))
972*404b540aSrobert     return 0;
973*404b540aSrobert 
974*404b540aSrobert   /* Only a PARALLEL can have multiple SETs.  */
975*404b540aSrobert   if (GET_CODE (PATTERN (insn)) == PARALLEL)
976*404b540aSrobert     {
977*404b540aSrobert       for (i = 0, found = 0; i < XVECLEN (PATTERN (insn), 0); i++)
978*404b540aSrobert 	if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET)
979*404b540aSrobert 	  {
980*404b540aSrobert 	    /* If we have already found a SET, then return now.  */
981*404b540aSrobert 	    if (found)
982*404b540aSrobert 	      return 1;
983*404b540aSrobert 	    else
984*404b540aSrobert 	      found = 1;
985*404b540aSrobert 	  }
986*404b540aSrobert     }
987*404b540aSrobert 
988*404b540aSrobert   /* Either zero or one SET.  */
989*404b540aSrobert   return 0;
990*404b540aSrobert }
991*404b540aSrobert 
992*404b540aSrobert /* Return nonzero if the destination of SET equals the source
993*404b540aSrobert    and there are no side effects.  */
994*404b540aSrobert 
995*404b540aSrobert int
set_noop_p(rtx set)996*404b540aSrobert set_noop_p (rtx set)
997*404b540aSrobert {
998*404b540aSrobert   rtx src = SET_SRC (set);
999*404b540aSrobert   rtx dst = SET_DEST (set);
1000*404b540aSrobert 
1001*404b540aSrobert   if (dst == pc_rtx && src == pc_rtx)
1002*404b540aSrobert     return 1;
1003*404b540aSrobert 
1004*404b540aSrobert   if (MEM_P (dst) && MEM_P (src))
1005*404b540aSrobert     return rtx_equal_p (dst, src) && !side_effects_p (dst);
1006*404b540aSrobert 
1007*404b540aSrobert   if (GET_CODE (dst) == ZERO_EXTRACT)
1008*404b540aSrobert     return rtx_equal_p (XEXP (dst, 0), src)
1009*404b540aSrobert 	   && ! BYTES_BIG_ENDIAN && XEXP (dst, 2) == const0_rtx
1010*404b540aSrobert 	   && !side_effects_p (src);
1011*404b540aSrobert 
1012*404b540aSrobert   if (GET_CODE (dst) == STRICT_LOW_PART)
1013*404b540aSrobert     dst = XEXP (dst, 0);
1014*404b540aSrobert 
1015*404b540aSrobert   if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
1016*404b540aSrobert     {
1017*404b540aSrobert       if (SUBREG_BYTE (src) != SUBREG_BYTE (dst))
1018*404b540aSrobert 	return 0;
1019*404b540aSrobert       src = SUBREG_REG (src);
1020*404b540aSrobert       dst = SUBREG_REG (dst);
1021*404b540aSrobert     }
1022*404b540aSrobert 
1023*404b540aSrobert   return (REG_P (src) && REG_P (dst)
1024*404b540aSrobert 	  && REGNO (src) == REGNO (dst));
1025*404b540aSrobert }
1026*404b540aSrobert 
1027*404b540aSrobert /* Return nonzero if an insn consists only of SETs, each of which only sets a
1028*404b540aSrobert    value to itself.  */
1029*404b540aSrobert 
1030*404b540aSrobert int
noop_move_p(rtx insn)1031*404b540aSrobert noop_move_p (rtx insn)
1032*404b540aSrobert {
1033*404b540aSrobert   rtx pat = PATTERN (insn);
1034*404b540aSrobert 
1035*404b540aSrobert   if (INSN_CODE (insn) == NOOP_MOVE_INSN_CODE)
1036*404b540aSrobert     return 1;
1037*404b540aSrobert 
1038*404b540aSrobert   /* Insns carrying these notes are useful later on.  */
1039*404b540aSrobert   if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
1040*404b540aSrobert     return 0;
1041*404b540aSrobert 
1042*404b540aSrobert   /* For now treat an insn with a REG_RETVAL note as a
1043*404b540aSrobert      a special insn which should not be considered a no-op.  */
1044*404b540aSrobert   if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
1045*404b540aSrobert     return 0;
1046*404b540aSrobert 
1047*404b540aSrobert   if (GET_CODE (pat) == SET && set_noop_p (pat))
1048*404b540aSrobert     return 1;
1049*404b540aSrobert 
1050*404b540aSrobert   if (GET_CODE (pat) == PARALLEL)
1051*404b540aSrobert     {
1052*404b540aSrobert       int i;
1053*404b540aSrobert       /* If nothing but SETs of registers to themselves,
1054*404b540aSrobert 	 this insn can also be deleted.  */
1055*404b540aSrobert       for (i = 0; i < XVECLEN (pat, 0); i++)
1056*404b540aSrobert 	{
1057*404b540aSrobert 	  rtx tem = XVECEXP (pat, 0, i);
1058*404b540aSrobert 
1059*404b540aSrobert 	  if (GET_CODE (tem) == USE
1060*404b540aSrobert 	      || GET_CODE (tem) == CLOBBER)
1061*404b540aSrobert 	    continue;
1062*404b540aSrobert 
1063*404b540aSrobert 	  if (GET_CODE (tem) != SET || ! set_noop_p (tem))
1064*404b540aSrobert 	    return 0;
1065*404b540aSrobert 	}
1066*404b540aSrobert 
1067*404b540aSrobert       return 1;
1068*404b540aSrobert     }
1069*404b540aSrobert   return 0;
1070*404b540aSrobert }
1071*404b540aSrobert 
1072*404b540aSrobert 
1073*404b540aSrobert /* Return the last thing that X was assigned from before *PINSN.  If VALID_TO
1074*404b540aSrobert    is not NULL_RTX then verify that the object is not modified up to VALID_TO.
1075*404b540aSrobert    If the object was modified, if we hit a partial assignment to X, or hit a
1076*404b540aSrobert    CODE_LABEL first, return X.  If we found an assignment, update *PINSN to
1077*404b540aSrobert    point to it.  ALLOW_HWREG is set to 1 if hardware registers are allowed to
1078*404b540aSrobert    be the src.  */
1079*404b540aSrobert 
1080*404b540aSrobert rtx
find_last_value(rtx x,rtx * pinsn,rtx valid_to,int allow_hwreg)1081*404b540aSrobert find_last_value (rtx x, rtx *pinsn, rtx valid_to, int allow_hwreg)
1082*404b540aSrobert {
1083*404b540aSrobert   rtx p;
1084*404b540aSrobert 
1085*404b540aSrobert   for (p = PREV_INSN (*pinsn); p && !LABEL_P (p);
1086*404b540aSrobert        p = PREV_INSN (p))
1087*404b540aSrobert     if (INSN_P (p))
1088*404b540aSrobert       {
1089*404b540aSrobert 	rtx set = single_set (p);
1090*404b540aSrobert 	rtx note = find_reg_note (p, REG_EQUAL, NULL_RTX);
1091*404b540aSrobert 
1092*404b540aSrobert 	if (set && rtx_equal_p (x, SET_DEST (set)))
1093*404b540aSrobert 	  {
1094*404b540aSrobert 	    rtx src = SET_SRC (set);
1095*404b540aSrobert 
1096*404b540aSrobert 	    if (note && GET_CODE (XEXP (note, 0)) != EXPR_LIST)
1097*404b540aSrobert 	      src = XEXP (note, 0);
1098*404b540aSrobert 
1099*404b540aSrobert 	    if ((valid_to == NULL_RTX
1100*404b540aSrobert 		 || ! modified_between_p (src, PREV_INSN (p), valid_to))
1101*404b540aSrobert 		/* Reject hard registers because we don't usually want
1102*404b540aSrobert 		   to use them; we'd rather use a pseudo.  */
1103*404b540aSrobert 		&& (! (REG_P (src)
1104*404b540aSrobert 		      && REGNO (src) < FIRST_PSEUDO_REGISTER) || allow_hwreg))
1105*404b540aSrobert 	      {
1106*404b540aSrobert 		*pinsn = p;
1107*404b540aSrobert 		return src;
1108*404b540aSrobert 	      }
1109*404b540aSrobert 	  }
1110*404b540aSrobert 
1111*404b540aSrobert 	/* If set in non-simple way, we don't have a value.  */
1112*404b540aSrobert 	if (reg_set_p (x, p))
1113*404b540aSrobert 	  break;
1114*404b540aSrobert       }
1115*404b540aSrobert 
1116*404b540aSrobert   return x;
1117*404b540aSrobert }
1118*404b540aSrobert 
1119*404b540aSrobert /* Return nonzero if register in range [REGNO, ENDREGNO)
1120*404b540aSrobert    appears either explicitly or implicitly in X
1121*404b540aSrobert    other than being stored into.
1122*404b540aSrobert 
1123*404b540aSrobert    References contained within the substructure at LOC do not count.
1124*404b540aSrobert    LOC may be zero, meaning don't ignore anything.  */
1125*404b540aSrobert 
1126*404b540aSrobert int
refers_to_regno_p(unsigned int regno,unsigned int endregno,rtx x,rtx * loc)1127*404b540aSrobert refers_to_regno_p (unsigned int regno, unsigned int endregno, rtx x,
1128*404b540aSrobert 		   rtx *loc)
1129*404b540aSrobert {
1130*404b540aSrobert   int i;
1131*404b540aSrobert   unsigned int x_regno;
1132*404b540aSrobert   RTX_CODE code;
1133*404b540aSrobert   const char *fmt;
1134*404b540aSrobert 
1135*404b540aSrobert  repeat:
1136*404b540aSrobert   /* The contents of a REG_NONNEG note is always zero, so we must come here
1137*404b540aSrobert      upon repeat in case the last REG_NOTE is a REG_NONNEG note.  */
1138*404b540aSrobert   if (x == 0)
1139*404b540aSrobert     return 0;
1140*404b540aSrobert 
1141*404b540aSrobert   code = GET_CODE (x);
1142*404b540aSrobert 
1143*404b540aSrobert   switch (code)
1144*404b540aSrobert     {
1145*404b540aSrobert     case REG:
1146*404b540aSrobert       x_regno = REGNO (x);
1147*404b540aSrobert 
1148*404b540aSrobert       /* If we modifying the stack, frame, or argument pointer, it will
1149*404b540aSrobert 	 clobber a virtual register.  In fact, we could be more precise,
1150*404b540aSrobert 	 but it isn't worth it.  */
1151*404b540aSrobert       if ((x_regno == STACK_POINTER_REGNUM
1152*404b540aSrobert #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1153*404b540aSrobert 	   || x_regno == ARG_POINTER_REGNUM
1154*404b540aSrobert #endif
1155*404b540aSrobert 	   || x_regno == FRAME_POINTER_REGNUM)
1156*404b540aSrobert 	  && regno >= FIRST_VIRTUAL_REGISTER && regno <= LAST_VIRTUAL_REGISTER)
1157*404b540aSrobert 	return 1;
1158*404b540aSrobert 
1159*404b540aSrobert       return (endregno > x_regno
1160*404b540aSrobert 	      && regno < x_regno + (x_regno < FIRST_PSEUDO_REGISTER
1161*404b540aSrobert 				    ? hard_regno_nregs[x_regno][GET_MODE (x)]
1162*404b540aSrobert 			      : 1));
1163*404b540aSrobert 
1164*404b540aSrobert     case SUBREG:
1165*404b540aSrobert       /* If this is a SUBREG of a hard reg, we can see exactly which
1166*404b540aSrobert 	 registers are being modified.  Otherwise, handle normally.  */
1167*404b540aSrobert       if (REG_P (SUBREG_REG (x))
1168*404b540aSrobert 	  && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
1169*404b540aSrobert 	{
1170*404b540aSrobert 	  unsigned int inner_regno = subreg_regno (x);
1171*404b540aSrobert 	  unsigned int inner_endregno
1172*404b540aSrobert 	    = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
1173*404b540aSrobert 			     ? hard_regno_nregs[inner_regno][GET_MODE (x)] : 1);
1174*404b540aSrobert 
1175*404b540aSrobert 	  return endregno > inner_regno && regno < inner_endregno;
1176*404b540aSrobert 	}
1177*404b540aSrobert       break;
1178*404b540aSrobert 
1179*404b540aSrobert     case CLOBBER:
1180*404b540aSrobert     case SET:
1181*404b540aSrobert       if (&SET_DEST (x) != loc
1182*404b540aSrobert 	  /* Note setting a SUBREG counts as referring to the REG it is in for
1183*404b540aSrobert 	     a pseudo but not for hard registers since we can
1184*404b540aSrobert 	     treat each word individually.  */
1185*404b540aSrobert 	  && ((GET_CODE (SET_DEST (x)) == SUBREG
1186*404b540aSrobert 	       && loc != &SUBREG_REG (SET_DEST (x))
1187*404b540aSrobert 	       && REG_P (SUBREG_REG (SET_DEST (x)))
1188*404b540aSrobert 	       && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
1189*404b540aSrobert 	       && refers_to_regno_p (regno, endregno,
1190*404b540aSrobert 				     SUBREG_REG (SET_DEST (x)), loc))
1191*404b540aSrobert 	      || (!REG_P (SET_DEST (x))
1192*404b540aSrobert 		  && refers_to_regno_p (regno, endregno, SET_DEST (x), loc))))
1193*404b540aSrobert 	return 1;
1194*404b540aSrobert 
1195*404b540aSrobert       if (code == CLOBBER || loc == &SET_SRC (x))
1196*404b540aSrobert 	return 0;
1197*404b540aSrobert       x = SET_SRC (x);
1198*404b540aSrobert       goto repeat;
1199*404b540aSrobert 
1200*404b540aSrobert     default:
1201*404b540aSrobert       break;
1202*404b540aSrobert     }
1203*404b540aSrobert 
1204*404b540aSrobert   /* X does not match, so try its subexpressions.  */
1205*404b540aSrobert 
1206*404b540aSrobert   fmt = GET_RTX_FORMAT (code);
1207*404b540aSrobert   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1208*404b540aSrobert     {
1209*404b540aSrobert       if (fmt[i] == 'e' && loc != &XEXP (x, i))
1210*404b540aSrobert 	{
1211*404b540aSrobert 	  if (i == 0)
1212*404b540aSrobert 	    {
1213*404b540aSrobert 	      x = XEXP (x, 0);
1214*404b540aSrobert 	      goto repeat;
1215*404b540aSrobert 	    }
1216*404b540aSrobert 	  else
1217*404b540aSrobert 	    if (refers_to_regno_p (regno, endregno, XEXP (x, i), loc))
1218*404b540aSrobert 	      return 1;
1219*404b540aSrobert 	}
1220*404b540aSrobert       else if (fmt[i] == 'E')
1221*404b540aSrobert 	{
1222*404b540aSrobert 	  int j;
1223*404b540aSrobert 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1224*404b540aSrobert 	    if (loc != &XVECEXP (x, i, j)
1225*404b540aSrobert 		&& refers_to_regno_p (regno, endregno, XVECEXP (x, i, j), loc))
1226*404b540aSrobert 	      return 1;
1227*404b540aSrobert 	}
1228*404b540aSrobert     }
1229*404b540aSrobert   return 0;
1230*404b540aSrobert }
1231*404b540aSrobert 
1232*404b540aSrobert /* Nonzero if modifying X will affect IN.  If X is a register or a SUBREG,
1233*404b540aSrobert    we check if any register number in X conflicts with the relevant register
1234*404b540aSrobert    numbers.  If X is a constant, return 0.  If X is a MEM, return 1 iff IN
1235*404b540aSrobert    contains a MEM (we don't bother checking for memory addresses that can't
1236*404b540aSrobert    conflict because we expect this to be a rare case.  */
1237*404b540aSrobert 
1238*404b540aSrobert int
reg_overlap_mentioned_p(rtx x,rtx in)1239*404b540aSrobert reg_overlap_mentioned_p (rtx x, rtx in)
1240*404b540aSrobert {
1241*404b540aSrobert   unsigned int regno, endregno;
1242*404b540aSrobert 
1243*404b540aSrobert   /* If either argument is a constant, then modifying X can not
1244*404b540aSrobert      affect IN.  Here we look at IN, we can profitably combine
1245*404b540aSrobert      CONSTANT_P (x) with the switch statement below.  */
1246*404b540aSrobert   if (CONSTANT_P (in))
1247*404b540aSrobert     return 0;
1248*404b540aSrobert 
1249*404b540aSrobert  recurse:
1250*404b540aSrobert   switch (GET_CODE (x))
1251*404b540aSrobert     {
1252*404b540aSrobert     case STRICT_LOW_PART:
1253*404b540aSrobert     case ZERO_EXTRACT:
1254*404b540aSrobert     case SIGN_EXTRACT:
1255*404b540aSrobert       /* Overly conservative.  */
1256*404b540aSrobert       x = XEXP (x, 0);
1257*404b540aSrobert       goto recurse;
1258*404b540aSrobert 
1259*404b540aSrobert     case SUBREG:
1260*404b540aSrobert       regno = REGNO (SUBREG_REG (x));
1261*404b540aSrobert       if (regno < FIRST_PSEUDO_REGISTER)
1262*404b540aSrobert 	regno = subreg_regno (x);
1263*404b540aSrobert       goto do_reg;
1264*404b540aSrobert 
1265*404b540aSrobert     case REG:
1266*404b540aSrobert       regno = REGNO (x);
1267*404b540aSrobert     do_reg:
1268*404b540aSrobert       endregno = regno + (regno < FIRST_PSEUDO_REGISTER
1269*404b540aSrobert 			  ? hard_regno_nregs[regno][GET_MODE (x)] : 1);
1270*404b540aSrobert       return refers_to_regno_p (regno, endregno, in, (rtx*) 0);
1271*404b540aSrobert 
1272*404b540aSrobert     case MEM:
1273*404b540aSrobert       {
1274*404b540aSrobert 	const char *fmt;
1275*404b540aSrobert 	int i;
1276*404b540aSrobert 
1277*404b540aSrobert 	if (MEM_P (in))
1278*404b540aSrobert 	  return 1;
1279*404b540aSrobert 
1280*404b540aSrobert 	fmt = GET_RTX_FORMAT (GET_CODE (in));
1281*404b540aSrobert 	for (i = GET_RTX_LENGTH (GET_CODE (in)) - 1; i >= 0; i--)
1282*404b540aSrobert 	  if (fmt[i] == 'e')
1283*404b540aSrobert 	    {
1284*404b540aSrobert 	      if (reg_overlap_mentioned_p (x, XEXP (in, i)))
1285*404b540aSrobert 		return 1;
1286*404b540aSrobert 	    }
1287*404b540aSrobert 	  else if (fmt[i] == 'E')
1288*404b540aSrobert 	    {
1289*404b540aSrobert 	      int j;
1290*404b540aSrobert 	      for (j = XVECLEN (in, i) - 1; j >= 0; --j)
1291*404b540aSrobert 		if (reg_overlap_mentioned_p (x, XVECEXP (in, i, j)))
1292*404b540aSrobert 		  return 1;
1293*404b540aSrobert 	    }
1294*404b540aSrobert 
1295*404b540aSrobert 	return 0;
1296*404b540aSrobert       }
1297*404b540aSrobert 
1298*404b540aSrobert     case SCRATCH:
1299*404b540aSrobert     case PC:
1300*404b540aSrobert     case CC0:
1301*404b540aSrobert       return reg_mentioned_p (x, in);
1302*404b540aSrobert 
1303*404b540aSrobert     case PARALLEL:
1304*404b540aSrobert       {
1305*404b540aSrobert 	int i;
1306*404b540aSrobert 
1307*404b540aSrobert 	/* If any register in here refers to it we return true.  */
1308*404b540aSrobert 	for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1309*404b540aSrobert 	  if (XEXP (XVECEXP (x, 0, i), 0) != 0
1310*404b540aSrobert 	      && reg_overlap_mentioned_p (XEXP (XVECEXP (x, 0, i), 0), in))
1311*404b540aSrobert 	    return 1;
1312*404b540aSrobert 	return 0;
1313*404b540aSrobert       }
1314*404b540aSrobert 
1315*404b540aSrobert     default:
1316*404b540aSrobert       gcc_assert (CONSTANT_P (x));
1317*404b540aSrobert       return 0;
1318*404b540aSrobert     }
1319*404b540aSrobert }
1320*404b540aSrobert 
1321*404b540aSrobert /* Call FUN on each register or MEM that is stored into or clobbered by X.
1322*404b540aSrobert    (X would be the pattern of an insn).
1323*404b540aSrobert    FUN receives two arguments:
1324*404b540aSrobert      the REG, MEM, CC0 or PC being stored in or clobbered,
1325*404b540aSrobert      the SET or CLOBBER rtx that does the store.
1326*404b540aSrobert 
1327*404b540aSrobert   If the item being stored in or clobbered is a SUBREG of a hard register,
1328*404b540aSrobert   the SUBREG will be passed.  */
1329*404b540aSrobert 
1330*404b540aSrobert void
note_stores(rtx x,void (* fun)(rtx,rtx,void *),void * data)1331*404b540aSrobert note_stores (rtx x, void (*fun) (rtx, rtx, void *), void *data)
1332*404b540aSrobert {
1333*404b540aSrobert   int i;
1334*404b540aSrobert 
1335*404b540aSrobert   if (GET_CODE (x) == COND_EXEC)
1336*404b540aSrobert     x = COND_EXEC_CODE (x);
1337*404b540aSrobert 
1338*404b540aSrobert   if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
1339*404b540aSrobert     {
1340*404b540aSrobert       rtx dest = SET_DEST (x);
1341*404b540aSrobert 
1342*404b540aSrobert       while ((GET_CODE (dest) == SUBREG
1343*404b540aSrobert 	      && (!REG_P (SUBREG_REG (dest))
1344*404b540aSrobert 		  || REGNO (SUBREG_REG (dest)) >= FIRST_PSEUDO_REGISTER))
1345*404b540aSrobert 	     || GET_CODE (dest) == ZERO_EXTRACT
1346*404b540aSrobert 	     || GET_CODE (dest) == STRICT_LOW_PART)
1347*404b540aSrobert 	dest = XEXP (dest, 0);
1348*404b540aSrobert 
1349*404b540aSrobert       /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
1350*404b540aSrobert 	 each of whose first operand is a register.  */
1351*404b540aSrobert       if (GET_CODE (dest) == PARALLEL)
1352*404b540aSrobert 	{
1353*404b540aSrobert 	  for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1354*404b540aSrobert 	    if (XEXP (XVECEXP (dest, 0, i), 0) != 0)
1355*404b540aSrobert 	      (*fun) (XEXP (XVECEXP (dest, 0, i), 0), x, data);
1356*404b540aSrobert 	}
1357*404b540aSrobert       else
1358*404b540aSrobert 	(*fun) (dest, x, data);
1359*404b540aSrobert     }
1360*404b540aSrobert 
1361*404b540aSrobert   else if (GET_CODE (x) == PARALLEL)
1362*404b540aSrobert     for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1363*404b540aSrobert       note_stores (XVECEXP (x, 0, i), fun, data);
1364*404b540aSrobert }
1365*404b540aSrobert 
1366*404b540aSrobert /* Like notes_stores, but call FUN for each expression that is being
1367*404b540aSrobert    referenced in PBODY, a pointer to the PATTERN of an insn.  We only call
1368*404b540aSrobert    FUN for each expression, not any interior subexpressions.  FUN receives a
1369*404b540aSrobert    pointer to the expression and the DATA passed to this function.
1370*404b540aSrobert 
1371*404b540aSrobert    Note that this is not quite the same test as that done in reg_referenced_p
1372*404b540aSrobert    since that considers something as being referenced if it is being
1373*404b540aSrobert    partially set, while we do not.  */
1374*404b540aSrobert 
1375*404b540aSrobert void
note_uses(rtx * pbody,void (* fun)(rtx *,void *),void * data)1376*404b540aSrobert note_uses (rtx *pbody, void (*fun) (rtx *, void *), void *data)
1377*404b540aSrobert {
1378*404b540aSrobert   rtx body = *pbody;
1379*404b540aSrobert   int i;
1380*404b540aSrobert 
1381*404b540aSrobert   switch (GET_CODE (body))
1382*404b540aSrobert     {
1383*404b540aSrobert     case COND_EXEC:
1384*404b540aSrobert       (*fun) (&COND_EXEC_TEST (body), data);
1385*404b540aSrobert       note_uses (&COND_EXEC_CODE (body), fun, data);
1386*404b540aSrobert       return;
1387*404b540aSrobert 
1388*404b540aSrobert     case PARALLEL:
1389*404b540aSrobert       for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1390*404b540aSrobert 	note_uses (&XVECEXP (body, 0, i), fun, data);
1391*404b540aSrobert       return;
1392*404b540aSrobert 
1393*404b540aSrobert     case USE:
1394*404b540aSrobert       (*fun) (&XEXP (body, 0), data);
1395*404b540aSrobert       return;
1396*404b540aSrobert 
1397*404b540aSrobert     case ASM_OPERANDS:
1398*404b540aSrobert       for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
1399*404b540aSrobert 	(*fun) (&ASM_OPERANDS_INPUT (body, i), data);
1400*404b540aSrobert       return;
1401*404b540aSrobert 
1402*404b540aSrobert     case TRAP_IF:
1403*404b540aSrobert       (*fun) (&TRAP_CONDITION (body), data);
1404*404b540aSrobert       return;
1405*404b540aSrobert 
1406*404b540aSrobert     case PREFETCH:
1407*404b540aSrobert       (*fun) (&XEXP (body, 0), data);
1408*404b540aSrobert       return;
1409*404b540aSrobert 
1410*404b540aSrobert     case UNSPEC:
1411*404b540aSrobert     case UNSPEC_VOLATILE:
1412*404b540aSrobert       for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1413*404b540aSrobert 	(*fun) (&XVECEXP (body, 0, i), data);
1414*404b540aSrobert       return;
1415*404b540aSrobert 
1416*404b540aSrobert     case CLOBBER:
1417*404b540aSrobert       if (MEM_P (XEXP (body, 0)))
1418*404b540aSrobert 	(*fun) (&XEXP (XEXP (body, 0), 0), data);
1419*404b540aSrobert       return;
1420*404b540aSrobert 
1421*404b540aSrobert     case SET:
1422*404b540aSrobert       {
1423*404b540aSrobert 	rtx dest = SET_DEST (body);
1424*404b540aSrobert 
1425*404b540aSrobert 	/* For sets we replace everything in source plus registers in memory
1426*404b540aSrobert 	   expression in store and operands of a ZERO_EXTRACT.  */
1427*404b540aSrobert 	(*fun) (&SET_SRC (body), data);
1428*404b540aSrobert 
1429*404b540aSrobert 	if (GET_CODE (dest) == ZERO_EXTRACT)
1430*404b540aSrobert 	  {
1431*404b540aSrobert 	    (*fun) (&XEXP (dest, 1), data);
1432*404b540aSrobert 	    (*fun) (&XEXP (dest, 2), data);
1433*404b540aSrobert 	  }
1434*404b540aSrobert 
1435*404b540aSrobert 	while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART)
1436*404b540aSrobert 	  dest = XEXP (dest, 0);
1437*404b540aSrobert 
1438*404b540aSrobert 	if (MEM_P (dest))
1439*404b540aSrobert 	  (*fun) (&XEXP (dest, 0), data);
1440*404b540aSrobert       }
1441*404b540aSrobert       return;
1442*404b540aSrobert 
1443*404b540aSrobert     default:
1444*404b540aSrobert       /* All the other possibilities never store.  */
1445*404b540aSrobert       (*fun) (pbody, data);
1446*404b540aSrobert       return;
1447*404b540aSrobert     }
1448*404b540aSrobert }
1449*404b540aSrobert 
1450*404b540aSrobert /* Return nonzero if X's old contents don't survive after INSN.
1451*404b540aSrobert    This will be true if X is (cc0) or if X is a register and
1452*404b540aSrobert    X dies in INSN or because INSN entirely sets X.
1453*404b540aSrobert 
1454*404b540aSrobert    "Entirely set" means set directly and not through a SUBREG, or
1455*404b540aSrobert    ZERO_EXTRACT, so no trace of the old contents remains.
1456*404b540aSrobert    Likewise, REG_INC does not count.
1457*404b540aSrobert 
1458*404b540aSrobert    REG may be a hard or pseudo reg.  Renumbering is not taken into account,
1459*404b540aSrobert    but for this use that makes no difference, since regs don't overlap
1460*404b540aSrobert    during their lifetimes.  Therefore, this function may be used
1461*404b540aSrobert    at any time after deaths have been computed (in flow.c).
1462*404b540aSrobert 
1463*404b540aSrobert    If REG is a hard reg that occupies multiple machine registers, this
1464*404b540aSrobert    function will only return 1 if each of those registers will be replaced
1465*404b540aSrobert    by INSN.  */
1466*404b540aSrobert 
1467*404b540aSrobert int
dead_or_set_p(rtx insn,rtx x)1468*404b540aSrobert dead_or_set_p (rtx insn, rtx x)
1469*404b540aSrobert {
1470*404b540aSrobert   unsigned int regno, last_regno;
1471*404b540aSrobert   unsigned int i;
1472*404b540aSrobert 
1473*404b540aSrobert   /* Can't use cc0_rtx below since this file is used by genattrtab.c.  */
1474*404b540aSrobert   if (GET_CODE (x) == CC0)
1475*404b540aSrobert     return 1;
1476*404b540aSrobert 
1477*404b540aSrobert   gcc_assert (REG_P (x));
1478*404b540aSrobert 
1479*404b540aSrobert   regno = REGNO (x);
1480*404b540aSrobert   last_regno = (regno >= FIRST_PSEUDO_REGISTER ? regno
1481*404b540aSrobert 		: regno + hard_regno_nregs[regno][GET_MODE (x)] - 1);
1482*404b540aSrobert 
1483*404b540aSrobert   for (i = regno; i <= last_regno; i++)
1484*404b540aSrobert     if (! dead_or_set_regno_p (insn, i))
1485*404b540aSrobert       return 0;
1486*404b540aSrobert 
1487*404b540aSrobert   return 1;
1488*404b540aSrobert }
1489*404b540aSrobert 
1490*404b540aSrobert /* Return TRUE iff DEST is a register or subreg of a register and
1491*404b540aSrobert    doesn't change the number of words of the inner register, and any
1492*404b540aSrobert    part of the register is TEST_REGNO.  */
1493*404b540aSrobert 
1494*404b540aSrobert static bool
covers_regno_no_parallel_p(rtx dest,unsigned int test_regno)1495*404b540aSrobert covers_regno_no_parallel_p (rtx dest, unsigned int test_regno)
1496*404b540aSrobert {
1497*404b540aSrobert   unsigned int regno, endregno;
1498*404b540aSrobert 
1499*404b540aSrobert   if (GET_CODE (dest) == SUBREG
1500*404b540aSrobert       && (((GET_MODE_SIZE (GET_MODE (dest))
1501*404b540aSrobert 	    + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1502*404b540aSrobert 	  == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1503*404b540aSrobert 	       + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1504*404b540aSrobert     dest = SUBREG_REG (dest);
1505*404b540aSrobert 
1506*404b540aSrobert   if (!REG_P (dest))
1507*404b540aSrobert     return false;
1508*404b540aSrobert 
1509*404b540aSrobert   regno = REGNO (dest);
1510*404b540aSrobert   endregno = (regno >= FIRST_PSEUDO_REGISTER ? regno + 1
1511*404b540aSrobert 	      : regno + hard_regno_nregs[regno][GET_MODE (dest)]);
1512*404b540aSrobert   return (test_regno >= regno && test_regno < endregno);
1513*404b540aSrobert }
1514*404b540aSrobert 
1515*404b540aSrobert /* Like covers_regno_no_parallel_p, but also handles PARALLELs where
1516*404b540aSrobert    any member matches the covers_regno_no_parallel_p criteria.  */
1517*404b540aSrobert 
1518*404b540aSrobert static bool
covers_regno_p(rtx dest,unsigned int test_regno)1519*404b540aSrobert covers_regno_p (rtx dest, unsigned int test_regno)
1520*404b540aSrobert {
1521*404b540aSrobert   if (GET_CODE (dest) == PARALLEL)
1522*404b540aSrobert     {
1523*404b540aSrobert       /* Some targets place small structures in registers for return
1524*404b540aSrobert 	 values of functions, and those registers are wrapped in
1525*404b540aSrobert 	 PARALLELs that we may see as the destination of a SET.  */
1526*404b540aSrobert       int i;
1527*404b540aSrobert 
1528*404b540aSrobert       for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1529*404b540aSrobert 	{
1530*404b540aSrobert 	  rtx inner = XEXP (XVECEXP (dest, 0, i), 0);
1531*404b540aSrobert 	  if (inner != NULL_RTX
1532*404b540aSrobert 	      && covers_regno_no_parallel_p (inner, test_regno))
1533*404b540aSrobert 	    return true;
1534*404b540aSrobert 	}
1535*404b540aSrobert 
1536*404b540aSrobert       return false;
1537*404b540aSrobert     }
1538*404b540aSrobert   else
1539*404b540aSrobert     return covers_regno_no_parallel_p (dest, test_regno);
1540*404b540aSrobert }
1541*404b540aSrobert 
1542*404b540aSrobert /* Utility function for dead_or_set_p to check an individual register.  Also
1543*404b540aSrobert    called from flow.c.  */
1544*404b540aSrobert 
1545*404b540aSrobert int
dead_or_set_regno_p(rtx insn,unsigned int test_regno)1546*404b540aSrobert dead_or_set_regno_p (rtx insn, unsigned int test_regno)
1547*404b540aSrobert {
1548*404b540aSrobert   rtx pattern;
1549*404b540aSrobert 
1550*404b540aSrobert   /* See if there is a death note for something that includes TEST_REGNO.  */
1551*404b540aSrobert   if (find_regno_note (insn, REG_DEAD, test_regno))
1552*404b540aSrobert     return 1;
1553*404b540aSrobert 
1554*404b540aSrobert   if (CALL_P (insn)
1555*404b540aSrobert       && find_regno_fusage (insn, CLOBBER, test_regno))
1556*404b540aSrobert     return 1;
1557*404b540aSrobert 
1558*404b540aSrobert   pattern = PATTERN (insn);
1559*404b540aSrobert 
1560*404b540aSrobert   if (GET_CODE (pattern) == COND_EXEC)
1561*404b540aSrobert     pattern = COND_EXEC_CODE (pattern);
1562*404b540aSrobert 
1563*404b540aSrobert   if (GET_CODE (pattern) == SET)
1564*404b540aSrobert     return covers_regno_p (SET_DEST (pattern), test_regno);
1565*404b540aSrobert   else if (GET_CODE (pattern) == PARALLEL)
1566*404b540aSrobert     {
1567*404b540aSrobert       int i;
1568*404b540aSrobert 
1569*404b540aSrobert       for (i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
1570*404b540aSrobert 	{
1571*404b540aSrobert 	  rtx body = XVECEXP (pattern, 0, i);
1572*404b540aSrobert 
1573*404b540aSrobert 	  if (GET_CODE (body) == COND_EXEC)
1574*404b540aSrobert 	    body = COND_EXEC_CODE (body);
1575*404b540aSrobert 
1576*404b540aSrobert 	  if ((GET_CODE (body) == SET || GET_CODE (body) == CLOBBER)
1577*404b540aSrobert 	      && covers_regno_p (SET_DEST (body), test_regno))
1578*404b540aSrobert 	    return 1;
1579*404b540aSrobert 	}
1580*404b540aSrobert     }
1581*404b540aSrobert 
1582*404b540aSrobert   return 0;
1583*404b540aSrobert }
1584*404b540aSrobert 
1585*404b540aSrobert /* Return the reg-note of kind KIND in insn INSN, if there is one.
1586*404b540aSrobert    If DATUM is nonzero, look for one whose datum is DATUM.  */
1587*404b540aSrobert 
1588*404b540aSrobert rtx
find_reg_note(rtx insn,enum reg_note kind,rtx datum)1589*404b540aSrobert find_reg_note (rtx insn, enum reg_note kind, rtx datum)
1590*404b540aSrobert {
1591*404b540aSrobert   rtx link;
1592*404b540aSrobert 
1593*404b540aSrobert   gcc_assert (insn);
1594*404b540aSrobert 
1595*404b540aSrobert   /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN.  */
1596*404b540aSrobert   if (! INSN_P (insn))
1597*404b540aSrobert     return 0;
1598*404b540aSrobert   if (datum == 0)
1599*404b540aSrobert     {
1600*404b540aSrobert       for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1601*404b540aSrobert 	if (REG_NOTE_KIND (link) == kind)
1602*404b540aSrobert 	  return link;
1603*404b540aSrobert       return 0;
1604*404b540aSrobert     }
1605*404b540aSrobert 
1606*404b540aSrobert   for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1607*404b540aSrobert     if (REG_NOTE_KIND (link) == kind && datum == XEXP (link, 0))
1608*404b540aSrobert       return link;
1609*404b540aSrobert   return 0;
1610*404b540aSrobert }
1611*404b540aSrobert 
1612*404b540aSrobert /* Return the reg-note of kind KIND in insn INSN which applies to register
1613*404b540aSrobert    number REGNO, if any.  Return 0 if there is no such reg-note.  Note that
1614*404b540aSrobert    the REGNO of this NOTE need not be REGNO if REGNO is a hard register;
1615*404b540aSrobert    it might be the case that the note overlaps REGNO.  */
1616*404b540aSrobert 
1617*404b540aSrobert rtx
find_regno_note(rtx insn,enum reg_note kind,unsigned int regno)1618*404b540aSrobert find_regno_note (rtx insn, enum reg_note kind, unsigned int regno)
1619*404b540aSrobert {
1620*404b540aSrobert   rtx link;
1621*404b540aSrobert 
1622*404b540aSrobert   /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN.  */
1623*404b540aSrobert   if (! INSN_P (insn))
1624*404b540aSrobert     return 0;
1625*404b540aSrobert 
1626*404b540aSrobert   for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1627*404b540aSrobert     if (REG_NOTE_KIND (link) == kind
1628*404b540aSrobert 	/* Verify that it is a register, so that scratch and MEM won't cause a
1629*404b540aSrobert 	   problem here.  */
1630*404b540aSrobert 	&& REG_P (XEXP (link, 0))
1631*404b540aSrobert 	&& REGNO (XEXP (link, 0)) <= regno
1632*404b540aSrobert 	&& ((REGNO (XEXP (link, 0))
1633*404b540aSrobert 	     + (REGNO (XEXP (link, 0)) >= FIRST_PSEUDO_REGISTER ? 1
1634*404b540aSrobert 		: hard_regno_nregs[REGNO (XEXP (link, 0))]
1635*404b540aSrobert 				  [GET_MODE (XEXP (link, 0))]))
1636*404b540aSrobert 	    > regno))
1637*404b540aSrobert       return link;
1638*404b540aSrobert   return 0;
1639*404b540aSrobert }
1640*404b540aSrobert 
1641*404b540aSrobert /* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and
1642*404b540aSrobert    has such a note.  */
1643*404b540aSrobert 
1644*404b540aSrobert rtx
find_reg_equal_equiv_note(rtx insn)1645*404b540aSrobert find_reg_equal_equiv_note (rtx insn)
1646*404b540aSrobert {
1647*404b540aSrobert   rtx link;
1648*404b540aSrobert 
1649*404b540aSrobert   if (!INSN_P (insn))
1650*404b540aSrobert     return 0;
1651*404b540aSrobert   for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1652*404b540aSrobert     if (REG_NOTE_KIND (link) == REG_EQUAL
1653*404b540aSrobert 	|| REG_NOTE_KIND (link) == REG_EQUIV)
1654*404b540aSrobert       {
1655*404b540aSrobert 	if (single_set (insn) == 0)
1656*404b540aSrobert 	  return 0;
1657*404b540aSrobert 	return link;
1658*404b540aSrobert       }
1659*404b540aSrobert   return NULL;
1660*404b540aSrobert }
1661*404b540aSrobert 
1662*404b540aSrobert /* Return true if DATUM, or any overlap of DATUM, of kind CODE is found
1663*404b540aSrobert    in the CALL_INSN_FUNCTION_USAGE information of INSN.  */
1664*404b540aSrobert 
1665*404b540aSrobert int
find_reg_fusage(rtx insn,enum rtx_code code,rtx datum)1666*404b540aSrobert find_reg_fusage (rtx insn, enum rtx_code code, rtx datum)
1667*404b540aSrobert {
1668*404b540aSrobert   /* If it's not a CALL_INSN, it can't possibly have a
1669*404b540aSrobert      CALL_INSN_FUNCTION_USAGE field, so don't bother checking.  */
1670*404b540aSrobert   if (!CALL_P (insn))
1671*404b540aSrobert     return 0;
1672*404b540aSrobert 
1673*404b540aSrobert   gcc_assert (datum);
1674*404b540aSrobert 
1675*404b540aSrobert   if (!REG_P (datum))
1676*404b540aSrobert     {
1677*404b540aSrobert       rtx link;
1678*404b540aSrobert 
1679*404b540aSrobert       for (link = CALL_INSN_FUNCTION_USAGE (insn);
1680*404b540aSrobert 	   link;
1681*404b540aSrobert 	   link = XEXP (link, 1))
1682*404b540aSrobert 	if (GET_CODE (XEXP (link, 0)) == code
1683*404b540aSrobert 	    && rtx_equal_p (datum, XEXP (XEXP (link, 0), 0)))
1684*404b540aSrobert 	  return 1;
1685*404b540aSrobert     }
1686*404b540aSrobert   else
1687*404b540aSrobert     {
1688*404b540aSrobert       unsigned int regno = REGNO (datum);
1689*404b540aSrobert 
1690*404b540aSrobert       /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1691*404b540aSrobert 	 to pseudo registers, so don't bother checking.  */
1692*404b540aSrobert 
1693*404b540aSrobert       if (regno < FIRST_PSEUDO_REGISTER)
1694*404b540aSrobert 	{
1695*404b540aSrobert 	  unsigned int end_regno
1696*404b540aSrobert 	    = regno + hard_regno_nregs[regno][GET_MODE (datum)];
1697*404b540aSrobert 	  unsigned int i;
1698*404b540aSrobert 
1699*404b540aSrobert 	  for (i = regno; i < end_regno; i++)
1700*404b540aSrobert 	    if (find_regno_fusage (insn, code, i))
1701*404b540aSrobert 	      return 1;
1702*404b540aSrobert 	}
1703*404b540aSrobert     }
1704*404b540aSrobert 
1705*404b540aSrobert   return 0;
1706*404b540aSrobert }
1707*404b540aSrobert 
1708*404b540aSrobert /* Return true if REGNO, or any overlap of REGNO, of kind CODE is found
1709*404b540aSrobert    in the CALL_INSN_FUNCTION_USAGE information of INSN.  */
1710*404b540aSrobert 
1711*404b540aSrobert int
find_regno_fusage(rtx insn,enum rtx_code code,unsigned int regno)1712*404b540aSrobert find_regno_fusage (rtx insn, enum rtx_code code, unsigned int regno)
1713*404b540aSrobert {
1714*404b540aSrobert   rtx link;
1715*404b540aSrobert 
1716*404b540aSrobert   /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1717*404b540aSrobert      to pseudo registers, so don't bother checking.  */
1718*404b540aSrobert 
1719*404b540aSrobert   if (regno >= FIRST_PSEUDO_REGISTER
1720*404b540aSrobert       || !CALL_P (insn) )
1721*404b540aSrobert     return 0;
1722*404b540aSrobert 
1723*404b540aSrobert   for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
1724*404b540aSrobert     {
1725*404b540aSrobert       unsigned int regnote;
1726*404b540aSrobert       rtx op, reg;
1727*404b540aSrobert 
1728*404b540aSrobert       if (GET_CODE (op = XEXP (link, 0)) == code
1729*404b540aSrobert 	  && REG_P (reg = XEXP (op, 0))
1730*404b540aSrobert 	  && (regnote = REGNO (reg)) <= regno
1731*404b540aSrobert 	  && regnote + hard_regno_nregs[regnote][GET_MODE (reg)] > regno)
1732*404b540aSrobert 	return 1;
1733*404b540aSrobert     }
1734*404b540aSrobert 
1735*404b540aSrobert   return 0;
1736*404b540aSrobert }
1737*404b540aSrobert 
1738*404b540aSrobert /* Return true if INSN is a call to a pure function.  */
1739*404b540aSrobert 
1740*404b540aSrobert int
pure_call_p(rtx insn)1741*404b540aSrobert pure_call_p (rtx insn)
1742*404b540aSrobert {
1743*404b540aSrobert   rtx link;
1744*404b540aSrobert 
1745*404b540aSrobert   if (!CALL_P (insn) || ! CONST_OR_PURE_CALL_P (insn))
1746*404b540aSrobert     return 0;
1747*404b540aSrobert 
1748*404b540aSrobert   /* Look for the note that differentiates const and pure functions.  */
1749*404b540aSrobert   for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
1750*404b540aSrobert     {
1751*404b540aSrobert       rtx u, m;
1752*404b540aSrobert 
1753*404b540aSrobert       if (GET_CODE (u = XEXP (link, 0)) == USE
1754*404b540aSrobert 	  && MEM_P (m = XEXP (u, 0)) && GET_MODE (m) == BLKmode
1755*404b540aSrobert 	  && GET_CODE (XEXP (m, 0)) == SCRATCH)
1756*404b540aSrobert 	return 1;
1757*404b540aSrobert     }
1758*404b540aSrobert 
1759*404b540aSrobert   return 0;
1760*404b540aSrobert }
1761*404b540aSrobert 
1762*404b540aSrobert /* Remove register note NOTE from the REG_NOTES of INSN.  */
1763*404b540aSrobert 
1764*404b540aSrobert void
remove_note(rtx insn,rtx note)1765*404b540aSrobert remove_note (rtx insn, rtx note)
1766*404b540aSrobert {
1767*404b540aSrobert   rtx link;
1768*404b540aSrobert 
1769*404b540aSrobert   if (note == NULL_RTX)
1770*404b540aSrobert     return;
1771*404b540aSrobert 
1772*404b540aSrobert   if (REG_NOTES (insn) == note)
1773*404b540aSrobert     {
1774*404b540aSrobert       REG_NOTES (insn) = XEXP (note, 1);
1775*404b540aSrobert       return;
1776*404b540aSrobert     }
1777*404b540aSrobert 
1778*404b540aSrobert   for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1779*404b540aSrobert     if (XEXP (link, 1) == note)
1780*404b540aSrobert       {
1781*404b540aSrobert 	XEXP (link, 1) = XEXP (note, 1);
1782*404b540aSrobert 	return;
1783*404b540aSrobert       }
1784*404b540aSrobert 
1785*404b540aSrobert   gcc_unreachable ();
1786*404b540aSrobert }
1787*404b540aSrobert 
1788*404b540aSrobert /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
1789*404b540aSrobert    return 1 if it is found.  A simple equality test is used to determine if
1790*404b540aSrobert    NODE matches.  */
1791*404b540aSrobert 
1792*404b540aSrobert int
in_expr_list_p(rtx listp,rtx node)1793*404b540aSrobert in_expr_list_p (rtx listp, rtx node)
1794*404b540aSrobert {
1795*404b540aSrobert   rtx x;
1796*404b540aSrobert 
1797*404b540aSrobert   for (x = listp; x; x = XEXP (x, 1))
1798*404b540aSrobert     if (node == XEXP (x, 0))
1799*404b540aSrobert       return 1;
1800*404b540aSrobert 
1801*404b540aSrobert   return 0;
1802*404b540aSrobert }
1803*404b540aSrobert 
1804*404b540aSrobert /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
1805*404b540aSrobert    remove that entry from the list if it is found.
1806*404b540aSrobert 
1807*404b540aSrobert    A simple equality test is used to determine if NODE matches.  */
1808*404b540aSrobert 
1809*404b540aSrobert void
remove_node_from_expr_list(rtx node,rtx * listp)1810*404b540aSrobert remove_node_from_expr_list (rtx node, rtx *listp)
1811*404b540aSrobert {
1812*404b540aSrobert   rtx temp = *listp;
1813*404b540aSrobert   rtx prev = NULL_RTX;
1814*404b540aSrobert 
1815*404b540aSrobert   while (temp)
1816*404b540aSrobert     {
1817*404b540aSrobert       if (node == XEXP (temp, 0))
1818*404b540aSrobert 	{
1819*404b540aSrobert 	  /* Splice the node out of the list.  */
1820*404b540aSrobert 	  if (prev)
1821*404b540aSrobert 	    XEXP (prev, 1) = XEXP (temp, 1);
1822*404b540aSrobert 	  else
1823*404b540aSrobert 	    *listp = XEXP (temp, 1);
1824*404b540aSrobert 
1825*404b540aSrobert 	  return;
1826*404b540aSrobert 	}
1827*404b540aSrobert 
1828*404b540aSrobert       prev = temp;
1829*404b540aSrobert       temp = XEXP (temp, 1);
1830*404b540aSrobert     }
1831*404b540aSrobert }
1832*404b540aSrobert 
1833*404b540aSrobert /* Nonzero if X contains any volatile instructions.  These are instructions
1834*404b540aSrobert    which may cause unpredictable machine state instructions, and thus no
1835*404b540aSrobert    instructions should be moved or combined across them.  This includes
1836*404b540aSrobert    only volatile asms and UNSPEC_VOLATILE instructions.  */
1837*404b540aSrobert 
1838*404b540aSrobert int
volatile_insn_p(rtx x)1839*404b540aSrobert volatile_insn_p (rtx x)
1840*404b540aSrobert {
1841*404b540aSrobert   RTX_CODE code;
1842*404b540aSrobert 
1843*404b540aSrobert   code = GET_CODE (x);
1844*404b540aSrobert   switch (code)
1845*404b540aSrobert     {
1846*404b540aSrobert     case LABEL_REF:
1847*404b540aSrobert     case SYMBOL_REF:
1848*404b540aSrobert     case CONST_INT:
1849*404b540aSrobert     case CONST:
1850*404b540aSrobert     case CONST_DOUBLE:
1851*404b540aSrobert     case CONST_VECTOR:
1852*404b540aSrobert     case CC0:
1853*404b540aSrobert     case PC:
1854*404b540aSrobert     case REG:
1855*404b540aSrobert     case SCRATCH:
1856*404b540aSrobert     case CLOBBER:
1857*404b540aSrobert     case ADDR_VEC:
1858*404b540aSrobert     case ADDR_DIFF_VEC:
1859*404b540aSrobert     case CALL:
1860*404b540aSrobert     case MEM:
1861*404b540aSrobert       return 0;
1862*404b540aSrobert 
1863*404b540aSrobert     case UNSPEC_VOLATILE:
1864*404b540aSrobert  /* case TRAP_IF: This isn't clear yet.  */
1865*404b540aSrobert       return 1;
1866*404b540aSrobert 
1867*404b540aSrobert     case ASM_INPUT:
1868*404b540aSrobert     case ASM_OPERANDS:
1869*404b540aSrobert       if (MEM_VOLATILE_P (x))
1870*404b540aSrobert 	return 1;
1871*404b540aSrobert 
1872*404b540aSrobert     default:
1873*404b540aSrobert       break;
1874*404b540aSrobert     }
1875*404b540aSrobert 
1876*404b540aSrobert   /* Recursively scan the operands of this expression.  */
1877*404b540aSrobert 
1878*404b540aSrobert   {
1879*404b540aSrobert     const char *fmt = GET_RTX_FORMAT (code);
1880*404b540aSrobert     int i;
1881*404b540aSrobert 
1882*404b540aSrobert     for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1883*404b540aSrobert       {
1884*404b540aSrobert 	if (fmt[i] == 'e')
1885*404b540aSrobert 	  {
1886*404b540aSrobert 	    if (volatile_insn_p (XEXP (x, i)))
1887*404b540aSrobert 	      return 1;
1888*404b540aSrobert 	  }
1889*404b540aSrobert 	else if (fmt[i] == 'E')
1890*404b540aSrobert 	  {
1891*404b540aSrobert 	    int j;
1892*404b540aSrobert 	    for (j = 0; j < XVECLEN (x, i); j++)
1893*404b540aSrobert 	      if (volatile_insn_p (XVECEXP (x, i, j)))
1894*404b540aSrobert 		return 1;
1895*404b540aSrobert 	  }
1896*404b540aSrobert       }
1897*404b540aSrobert   }
1898*404b540aSrobert   return 0;
1899*404b540aSrobert }
1900*404b540aSrobert 
1901*404b540aSrobert /* Nonzero if X contains any volatile memory references
1902*404b540aSrobert    UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions.  */
1903*404b540aSrobert 
1904*404b540aSrobert int
volatile_refs_p(rtx x)1905*404b540aSrobert volatile_refs_p (rtx x)
1906*404b540aSrobert {
1907*404b540aSrobert   RTX_CODE code;
1908*404b540aSrobert 
1909*404b540aSrobert   code = GET_CODE (x);
1910*404b540aSrobert   switch (code)
1911*404b540aSrobert     {
1912*404b540aSrobert     case LABEL_REF:
1913*404b540aSrobert     case SYMBOL_REF:
1914*404b540aSrobert     case CONST_INT:
1915*404b540aSrobert     case CONST:
1916*404b540aSrobert     case CONST_DOUBLE:
1917*404b540aSrobert     case CONST_VECTOR:
1918*404b540aSrobert     case CC0:
1919*404b540aSrobert     case PC:
1920*404b540aSrobert     case REG:
1921*404b540aSrobert     case SCRATCH:
1922*404b540aSrobert     case CLOBBER:
1923*404b540aSrobert     case ADDR_VEC:
1924*404b540aSrobert     case ADDR_DIFF_VEC:
1925*404b540aSrobert       return 0;
1926*404b540aSrobert 
1927*404b540aSrobert     case UNSPEC_VOLATILE:
1928*404b540aSrobert       return 1;
1929*404b540aSrobert 
1930*404b540aSrobert     case MEM:
1931*404b540aSrobert     case ASM_INPUT:
1932*404b540aSrobert     case ASM_OPERANDS:
1933*404b540aSrobert       if (MEM_VOLATILE_P (x))
1934*404b540aSrobert 	return 1;
1935*404b540aSrobert 
1936*404b540aSrobert     default:
1937*404b540aSrobert       break;
1938*404b540aSrobert     }
1939*404b540aSrobert 
1940*404b540aSrobert   /* Recursively scan the operands of this expression.  */
1941*404b540aSrobert 
1942*404b540aSrobert   {
1943*404b540aSrobert     const char *fmt = GET_RTX_FORMAT (code);
1944*404b540aSrobert     int i;
1945*404b540aSrobert 
1946*404b540aSrobert     for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1947*404b540aSrobert       {
1948*404b540aSrobert 	if (fmt[i] == 'e')
1949*404b540aSrobert 	  {
1950*404b540aSrobert 	    if (volatile_refs_p (XEXP (x, i)))
1951*404b540aSrobert 	      return 1;
1952*404b540aSrobert 	  }
1953*404b540aSrobert 	else if (fmt[i] == 'E')
1954*404b540aSrobert 	  {
1955*404b540aSrobert 	    int j;
1956*404b540aSrobert 	    for (j = 0; j < XVECLEN (x, i); j++)
1957*404b540aSrobert 	      if (volatile_refs_p (XVECEXP (x, i, j)))
1958*404b540aSrobert 		return 1;
1959*404b540aSrobert 	  }
1960*404b540aSrobert       }
1961*404b540aSrobert   }
1962*404b540aSrobert   return 0;
1963*404b540aSrobert }
1964*404b540aSrobert 
1965*404b540aSrobert /* Similar to above, except that it also rejects register pre- and post-
1966*404b540aSrobert    incrementing.  */
1967*404b540aSrobert 
1968*404b540aSrobert int
side_effects_p(rtx x)1969*404b540aSrobert side_effects_p (rtx x)
1970*404b540aSrobert {
1971*404b540aSrobert   RTX_CODE code;
1972*404b540aSrobert 
1973*404b540aSrobert   code = GET_CODE (x);
1974*404b540aSrobert   switch (code)
1975*404b540aSrobert     {
1976*404b540aSrobert     case LABEL_REF:
1977*404b540aSrobert     case SYMBOL_REF:
1978*404b540aSrobert     case CONST_INT:
1979*404b540aSrobert     case CONST:
1980*404b540aSrobert     case CONST_DOUBLE:
1981*404b540aSrobert     case CONST_VECTOR:
1982*404b540aSrobert     case CC0:
1983*404b540aSrobert     case PC:
1984*404b540aSrobert     case REG:
1985*404b540aSrobert     case SCRATCH:
1986*404b540aSrobert     case ADDR_VEC:
1987*404b540aSrobert     case ADDR_DIFF_VEC:
1988*404b540aSrobert       return 0;
1989*404b540aSrobert 
1990*404b540aSrobert     case CLOBBER:
1991*404b540aSrobert       /* Reject CLOBBER with a non-VOID mode.  These are made by combine.c
1992*404b540aSrobert 	 when some combination can't be done.  If we see one, don't think
1993*404b540aSrobert 	 that we can simplify the expression.  */
1994*404b540aSrobert       return (GET_MODE (x) != VOIDmode);
1995*404b540aSrobert 
1996*404b540aSrobert     case PRE_INC:
1997*404b540aSrobert     case PRE_DEC:
1998*404b540aSrobert     case POST_INC:
1999*404b540aSrobert     case POST_DEC:
2000*404b540aSrobert     case PRE_MODIFY:
2001*404b540aSrobert     case POST_MODIFY:
2002*404b540aSrobert     case CALL:
2003*404b540aSrobert     case UNSPEC_VOLATILE:
2004*404b540aSrobert  /* case TRAP_IF: This isn't clear yet.  */
2005*404b540aSrobert       return 1;
2006*404b540aSrobert 
2007*404b540aSrobert     case MEM:
2008*404b540aSrobert     case ASM_INPUT:
2009*404b540aSrobert     case ASM_OPERANDS:
2010*404b540aSrobert       if (MEM_VOLATILE_P (x))
2011*404b540aSrobert 	return 1;
2012*404b540aSrobert 
2013*404b540aSrobert     default:
2014*404b540aSrobert       break;
2015*404b540aSrobert     }
2016*404b540aSrobert 
2017*404b540aSrobert   /* Recursively scan the operands of this expression.  */
2018*404b540aSrobert 
2019*404b540aSrobert   {
2020*404b540aSrobert     const char *fmt = GET_RTX_FORMAT (code);
2021*404b540aSrobert     int i;
2022*404b540aSrobert 
2023*404b540aSrobert     for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2024*404b540aSrobert       {
2025*404b540aSrobert 	if (fmt[i] == 'e')
2026*404b540aSrobert 	  {
2027*404b540aSrobert 	    if (side_effects_p (XEXP (x, i)))
2028*404b540aSrobert 	      return 1;
2029*404b540aSrobert 	  }
2030*404b540aSrobert 	else if (fmt[i] == 'E')
2031*404b540aSrobert 	  {
2032*404b540aSrobert 	    int j;
2033*404b540aSrobert 	    for (j = 0; j < XVECLEN (x, i); j++)
2034*404b540aSrobert 	      if (side_effects_p (XVECEXP (x, i, j)))
2035*404b540aSrobert 		return 1;
2036*404b540aSrobert 	  }
2037*404b540aSrobert       }
2038*404b540aSrobert   }
2039*404b540aSrobert   return 0;
2040*404b540aSrobert }
2041*404b540aSrobert 
2042*404b540aSrobert enum may_trap_p_flags
2043*404b540aSrobert {
2044*404b540aSrobert   MTP_UNALIGNED_MEMS = 1,
2045*404b540aSrobert   MTP_AFTER_MOVE = 2
2046*404b540aSrobert };
2047*404b540aSrobert /* Return nonzero if evaluating rtx X might cause a trap.
2048*404b540aSrobert    (FLAGS & MTP_UNALIGNED_MEMS) controls whether nonzero is returned for
2049*404b540aSrobert    unaligned memory accesses on strict alignment machines.  If
2050*404b540aSrobert    (FLAGS & AFTER_MOVE) is true, returns nonzero even in case the expression
2051*404b540aSrobert    cannot trap at its current location, but it might become trapping if moved
2052*404b540aSrobert    elsewhere.  */
2053*404b540aSrobert 
2054*404b540aSrobert static int
may_trap_p_1(rtx x,unsigned flags)2055*404b540aSrobert may_trap_p_1 (rtx x, unsigned flags)
2056*404b540aSrobert {
2057*404b540aSrobert   int i;
2058*404b540aSrobert   enum rtx_code code;
2059*404b540aSrobert   const char *fmt;
2060*404b540aSrobert   bool unaligned_mems = (flags & MTP_UNALIGNED_MEMS) != 0;
2061*404b540aSrobert 
2062*404b540aSrobert   if (x == 0)
2063*404b540aSrobert     return 0;
2064*404b540aSrobert   code = GET_CODE (x);
2065*404b540aSrobert   switch (code)
2066*404b540aSrobert     {
2067*404b540aSrobert       /* Handle these cases quickly.  */
2068*404b540aSrobert     case CONST_INT:
2069*404b540aSrobert     case CONST_DOUBLE:
2070*404b540aSrobert     case CONST_VECTOR:
2071*404b540aSrobert     case SYMBOL_REF:
2072*404b540aSrobert     case LABEL_REF:
2073*404b540aSrobert     case CONST:
2074*404b540aSrobert     case PC:
2075*404b540aSrobert     case CC0:
2076*404b540aSrobert     case REG:
2077*404b540aSrobert     case SCRATCH:
2078*404b540aSrobert       return 0;
2079*404b540aSrobert 
2080*404b540aSrobert     case ASM_INPUT:
2081*404b540aSrobert     case UNSPEC_VOLATILE:
2082*404b540aSrobert     case TRAP_IF:
2083*404b540aSrobert       return 1;
2084*404b540aSrobert 
2085*404b540aSrobert     case ASM_OPERANDS:
2086*404b540aSrobert       return MEM_VOLATILE_P (x);
2087*404b540aSrobert 
2088*404b540aSrobert       /* Memory ref can trap unless it's a static var or a stack slot.  */
2089*404b540aSrobert     case MEM:
2090*404b540aSrobert       if (/* MEM_NOTRAP_P only relates to the actual position of the memory
2091*404b540aSrobert 	     reference; moving it out of condition might cause its address
2092*404b540aSrobert 	     become invalid.  */
2093*404b540aSrobert 	  !(flags & MTP_AFTER_MOVE)
2094*404b540aSrobert 	  && MEM_NOTRAP_P (x)
2095*404b540aSrobert 	  && (!STRICT_ALIGNMENT || !unaligned_mems))
2096*404b540aSrobert 	return 0;
2097*404b540aSrobert       return
2098*404b540aSrobert 	rtx_addr_can_trap_p_1 (XEXP (x, 0), GET_MODE (x), unaligned_mems);
2099*404b540aSrobert 
2100*404b540aSrobert       /* Division by a non-constant might trap.  */
2101*404b540aSrobert     case DIV:
2102*404b540aSrobert     case MOD:
2103*404b540aSrobert     case UDIV:
2104*404b540aSrobert     case UMOD:
2105*404b540aSrobert       if (HONOR_SNANS (GET_MODE (x)))
2106*404b540aSrobert 	return 1;
2107*404b540aSrobert       if (SCALAR_FLOAT_MODE_P (GET_MODE (x)))
2108*404b540aSrobert 	return flag_trapping_math;
2109*404b540aSrobert       if (!CONSTANT_P (XEXP (x, 1)) || (XEXP (x, 1) == const0_rtx))
2110*404b540aSrobert 	return 1;
2111*404b540aSrobert       break;
2112*404b540aSrobert 
2113*404b540aSrobert     case EXPR_LIST:
2114*404b540aSrobert       /* An EXPR_LIST is used to represent a function call.  This
2115*404b540aSrobert 	 certainly may trap.  */
2116*404b540aSrobert       return 1;
2117*404b540aSrobert 
2118*404b540aSrobert     case GE:
2119*404b540aSrobert     case GT:
2120*404b540aSrobert     case LE:
2121*404b540aSrobert     case LT:
2122*404b540aSrobert     case LTGT:
2123*404b540aSrobert     case COMPARE:
2124*404b540aSrobert       /* Some floating point comparisons may trap.  */
2125*404b540aSrobert       if (!flag_trapping_math)
2126*404b540aSrobert 	break;
2127*404b540aSrobert       /* ??? There is no machine independent way to check for tests that trap
2128*404b540aSrobert 	 when COMPARE is used, though many targets do make this distinction.
2129*404b540aSrobert 	 For instance, sparc uses CCFPE for compares which generate exceptions
2130*404b540aSrobert 	 and CCFP for compares which do not generate exceptions.  */
2131*404b540aSrobert       if (HONOR_NANS (GET_MODE (x)))
2132*404b540aSrobert 	return 1;
2133*404b540aSrobert       /* But often the compare has some CC mode, so check operand
2134*404b540aSrobert 	 modes as well.  */
2135*404b540aSrobert       if (HONOR_NANS (GET_MODE (XEXP (x, 0)))
2136*404b540aSrobert 	  || HONOR_NANS (GET_MODE (XEXP (x, 1))))
2137*404b540aSrobert 	return 1;
2138*404b540aSrobert       break;
2139*404b540aSrobert 
2140*404b540aSrobert     case EQ:
2141*404b540aSrobert     case NE:
2142*404b540aSrobert       if (HONOR_SNANS (GET_MODE (x)))
2143*404b540aSrobert 	return 1;
2144*404b540aSrobert       /* Often comparison is CC mode, so check operand modes.  */
2145*404b540aSrobert       if (HONOR_SNANS (GET_MODE (XEXP (x, 0)))
2146*404b540aSrobert 	  || HONOR_SNANS (GET_MODE (XEXP (x, 1))))
2147*404b540aSrobert 	return 1;
2148*404b540aSrobert       break;
2149*404b540aSrobert 
2150*404b540aSrobert     case FIX:
2151*404b540aSrobert       /* Conversion of floating point might trap.  */
2152*404b540aSrobert       if (flag_trapping_math && HONOR_NANS (GET_MODE (XEXP (x, 0))))
2153*404b540aSrobert 	return 1;
2154*404b540aSrobert       break;
2155*404b540aSrobert 
2156*404b540aSrobert     case NEG:
2157*404b540aSrobert     case ABS:
2158*404b540aSrobert     case SUBREG:
2159*404b540aSrobert       /* These operations don't trap even with floating point.  */
2160*404b540aSrobert       break;
2161*404b540aSrobert 
2162*404b540aSrobert     default:
2163*404b540aSrobert       /* Any floating arithmetic may trap.  */
2164*404b540aSrobert       if (SCALAR_FLOAT_MODE_P (GET_MODE (x))
2165*404b540aSrobert 	  && flag_trapping_math)
2166*404b540aSrobert 	return 1;
2167*404b540aSrobert     }
2168*404b540aSrobert 
2169*404b540aSrobert   fmt = GET_RTX_FORMAT (code);
2170*404b540aSrobert   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2171*404b540aSrobert     {
2172*404b540aSrobert       if (fmt[i] == 'e')
2173*404b540aSrobert 	{
2174*404b540aSrobert 	  if (may_trap_p_1 (XEXP (x, i), flags))
2175*404b540aSrobert 	    return 1;
2176*404b540aSrobert 	}
2177*404b540aSrobert       else if (fmt[i] == 'E')
2178*404b540aSrobert 	{
2179*404b540aSrobert 	  int j;
2180*404b540aSrobert 	  for (j = 0; j < XVECLEN (x, i); j++)
2181*404b540aSrobert 	    if (may_trap_p_1 (XVECEXP (x, i, j), flags))
2182*404b540aSrobert 	      return 1;
2183*404b540aSrobert 	}
2184*404b540aSrobert     }
2185*404b540aSrobert   return 0;
2186*404b540aSrobert }
2187*404b540aSrobert 
2188*404b540aSrobert /* Return nonzero if evaluating rtx X might cause a trap.  */
2189*404b540aSrobert 
2190*404b540aSrobert int
may_trap_p(rtx x)2191*404b540aSrobert may_trap_p (rtx x)
2192*404b540aSrobert {
2193*404b540aSrobert   return may_trap_p_1 (x, 0);
2194*404b540aSrobert }
2195*404b540aSrobert 
2196*404b540aSrobert /* Return nonzero if evaluating rtx X might cause a trap, when the expression
2197*404b540aSrobert    is moved from its current location by some optimization.  */
2198*404b540aSrobert 
2199*404b540aSrobert int
may_trap_after_code_motion_p(rtx x)2200*404b540aSrobert may_trap_after_code_motion_p (rtx x)
2201*404b540aSrobert {
2202*404b540aSrobert   return may_trap_p_1 (x, MTP_AFTER_MOVE);
2203*404b540aSrobert }
2204*404b540aSrobert 
2205*404b540aSrobert /* Same as above, but additionally return nonzero if evaluating rtx X might
2206*404b540aSrobert    cause a fault.  We define a fault for the purpose of this function as a
2207*404b540aSrobert    erroneous execution condition that cannot be encountered during the normal
2208*404b540aSrobert    execution of a valid program; the typical example is an unaligned memory
2209*404b540aSrobert    access on a strict alignment machine.  The compiler guarantees that it
2210*404b540aSrobert    doesn't generate code that will fault from a valid program, but this
2211*404b540aSrobert    guarantee doesn't mean anything for individual instructions.  Consider
2212*404b540aSrobert    the following example:
2213*404b540aSrobert 
2214*404b540aSrobert       struct S { int d; union { char *cp; int *ip; }; };
2215*404b540aSrobert 
2216*404b540aSrobert       int foo(struct S *s)
2217*404b540aSrobert       {
2218*404b540aSrobert 	if (s->d == 1)
2219*404b540aSrobert 	  return *s->ip;
2220*404b540aSrobert 	else
2221*404b540aSrobert 	  return *s->cp;
2222*404b540aSrobert       }
2223*404b540aSrobert 
2224*404b540aSrobert    on a strict alignment machine.  In a valid program, foo will never be
2225*404b540aSrobert    invoked on a structure for which d is equal to 1 and the underlying
2226*404b540aSrobert    unique field of the union not aligned on a 4-byte boundary, but the
2227*404b540aSrobert    expression *s->ip might cause a fault if considered individually.
2228*404b540aSrobert 
2229*404b540aSrobert    At the RTL level, potentially problematic expressions will almost always
2230*404b540aSrobert    verify may_trap_p; for example, the above dereference can be emitted as
2231*404b540aSrobert    (mem:SI (reg:P)) and this expression is may_trap_p for a generic register.
2232*404b540aSrobert    However, suppose that foo is inlined in a caller that causes s->cp to
2233*404b540aSrobert    point to a local character variable and guarantees that s->d is not set
2234*404b540aSrobert    to 1; foo may have been effectively translated into pseudo-RTL as:
2235*404b540aSrobert 
2236*404b540aSrobert       if ((reg:SI) == 1)
2237*404b540aSrobert 	(set (reg:SI) (mem:SI (%fp - 7)))
2238*404b540aSrobert       else
2239*404b540aSrobert 	(set (reg:QI) (mem:QI (%fp - 7)))
2240*404b540aSrobert 
2241*404b540aSrobert    Now (mem:SI (%fp - 7)) is considered as not may_trap_p since it is a
2242*404b540aSrobert    memory reference to a stack slot, but it will certainly cause a fault
2243*404b540aSrobert    on a strict alignment machine.  */
2244*404b540aSrobert 
2245*404b540aSrobert int
may_trap_or_fault_p(rtx x)2246*404b540aSrobert may_trap_or_fault_p (rtx x)
2247*404b540aSrobert {
2248*404b540aSrobert   return may_trap_p_1 (x, MTP_UNALIGNED_MEMS);
2249*404b540aSrobert }
2250*404b540aSrobert 
2251*404b540aSrobert /* Return nonzero if X contains a comparison that is not either EQ or NE,
2252*404b540aSrobert    i.e., an inequality.  */
2253*404b540aSrobert 
2254*404b540aSrobert int
inequality_comparisons_p(rtx x)2255*404b540aSrobert inequality_comparisons_p (rtx x)
2256*404b540aSrobert {
2257*404b540aSrobert   const char *fmt;
2258*404b540aSrobert   int len, i;
2259*404b540aSrobert   enum rtx_code code = GET_CODE (x);
2260*404b540aSrobert 
2261*404b540aSrobert   switch (code)
2262*404b540aSrobert     {
2263*404b540aSrobert     case REG:
2264*404b540aSrobert     case SCRATCH:
2265*404b540aSrobert     case PC:
2266*404b540aSrobert     case CC0:
2267*404b540aSrobert     case CONST_INT:
2268*404b540aSrobert     case CONST_DOUBLE:
2269*404b540aSrobert     case CONST_VECTOR:
2270*404b540aSrobert     case CONST:
2271*404b540aSrobert     case LABEL_REF:
2272*404b540aSrobert     case SYMBOL_REF:
2273*404b540aSrobert       return 0;
2274*404b540aSrobert 
2275*404b540aSrobert     case LT:
2276*404b540aSrobert     case LTU:
2277*404b540aSrobert     case GT:
2278*404b540aSrobert     case GTU:
2279*404b540aSrobert     case LE:
2280*404b540aSrobert     case LEU:
2281*404b540aSrobert     case GE:
2282*404b540aSrobert     case GEU:
2283*404b540aSrobert       return 1;
2284*404b540aSrobert 
2285*404b540aSrobert     default:
2286*404b540aSrobert       break;
2287*404b540aSrobert     }
2288*404b540aSrobert 
2289*404b540aSrobert   len = GET_RTX_LENGTH (code);
2290*404b540aSrobert   fmt = GET_RTX_FORMAT (code);
2291*404b540aSrobert 
2292*404b540aSrobert   for (i = 0; i < len; i++)
2293*404b540aSrobert     {
2294*404b540aSrobert       if (fmt[i] == 'e')
2295*404b540aSrobert 	{
2296*404b540aSrobert 	  if (inequality_comparisons_p (XEXP (x, i)))
2297*404b540aSrobert 	    return 1;
2298*404b540aSrobert 	}
2299*404b540aSrobert       else if (fmt[i] == 'E')
2300*404b540aSrobert 	{
2301*404b540aSrobert 	  int j;
2302*404b540aSrobert 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2303*404b540aSrobert 	    if (inequality_comparisons_p (XVECEXP (x, i, j)))
2304*404b540aSrobert 	      return 1;
2305*404b540aSrobert 	}
2306*404b540aSrobert     }
2307*404b540aSrobert 
2308*404b540aSrobert   return 0;
2309*404b540aSrobert }
2310*404b540aSrobert 
2311*404b540aSrobert /* Replace any occurrence of FROM in X with TO.  The function does
2312*404b540aSrobert    not enter into CONST_DOUBLE for the replace.
2313*404b540aSrobert 
2314*404b540aSrobert    Note that copying is not done so X must not be shared unless all copies
2315*404b540aSrobert    are to be modified.  */
2316*404b540aSrobert 
2317*404b540aSrobert rtx
replace_rtx(rtx x,rtx from,rtx to)2318*404b540aSrobert replace_rtx (rtx x, rtx from, rtx to)
2319*404b540aSrobert {
2320*404b540aSrobert   int i, j;
2321*404b540aSrobert   const char *fmt;
2322*404b540aSrobert 
2323*404b540aSrobert   /* The following prevents loops occurrence when we change MEM in
2324*404b540aSrobert      CONST_DOUBLE onto the same CONST_DOUBLE.  */
2325*404b540aSrobert   if (x != 0 && GET_CODE (x) == CONST_DOUBLE)
2326*404b540aSrobert     return x;
2327*404b540aSrobert 
2328*404b540aSrobert   if (x == from)
2329*404b540aSrobert     return to;
2330*404b540aSrobert 
2331*404b540aSrobert   /* Allow this function to make replacements in EXPR_LISTs.  */
2332*404b540aSrobert   if (x == 0)
2333*404b540aSrobert     return 0;
2334*404b540aSrobert 
2335*404b540aSrobert   if (GET_CODE (x) == SUBREG)
2336*404b540aSrobert     {
2337*404b540aSrobert       rtx new = replace_rtx (SUBREG_REG (x), from, to);
2338*404b540aSrobert 
2339*404b540aSrobert       if (GET_CODE (new) == CONST_INT)
2340*404b540aSrobert 	{
2341*404b540aSrobert 	  x = simplify_subreg (GET_MODE (x), new,
2342*404b540aSrobert 			       GET_MODE (SUBREG_REG (x)),
2343*404b540aSrobert 			       SUBREG_BYTE (x));
2344*404b540aSrobert 	  gcc_assert (x);
2345*404b540aSrobert 	}
2346*404b540aSrobert       else
2347*404b540aSrobert 	SUBREG_REG (x) = new;
2348*404b540aSrobert 
2349*404b540aSrobert       return x;
2350*404b540aSrobert     }
2351*404b540aSrobert   else if (GET_CODE (x) == ZERO_EXTEND)
2352*404b540aSrobert     {
2353*404b540aSrobert       rtx new = replace_rtx (XEXP (x, 0), from, to);
2354*404b540aSrobert 
2355*404b540aSrobert       if (GET_CODE (new) == CONST_INT)
2356*404b540aSrobert 	{
2357*404b540aSrobert 	  x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
2358*404b540aSrobert 					new, GET_MODE (XEXP (x, 0)));
2359*404b540aSrobert 	  gcc_assert (x);
2360*404b540aSrobert 	}
2361*404b540aSrobert       else
2362*404b540aSrobert 	XEXP (x, 0) = new;
2363*404b540aSrobert 
2364*404b540aSrobert       return x;
2365*404b540aSrobert     }
2366*404b540aSrobert 
2367*404b540aSrobert   fmt = GET_RTX_FORMAT (GET_CODE (x));
2368*404b540aSrobert   for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2369*404b540aSrobert     {
2370*404b540aSrobert       if (fmt[i] == 'e')
2371*404b540aSrobert 	XEXP (x, i) = replace_rtx (XEXP (x, i), from, to);
2372*404b540aSrobert       else if (fmt[i] == 'E')
2373*404b540aSrobert 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2374*404b540aSrobert 	  XVECEXP (x, i, j) = replace_rtx (XVECEXP (x, i, j), from, to);
2375*404b540aSrobert     }
2376*404b540aSrobert 
2377*404b540aSrobert   return x;
2378*404b540aSrobert }
2379*404b540aSrobert 
2380*404b540aSrobert /* Replace occurrences of the old label in *X with the new one.
2381*404b540aSrobert    DATA is a REPLACE_LABEL_DATA containing the old and new labels.  */
2382*404b540aSrobert 
2383*404b540aSrobert int
replace_label(rtx * x,void * data)2384*404b540aSrobert replace_label (rtx *x, void *data)
2385*404b540aSrobert {
2386*404b540aSrobert   rtx l = *x;
2387*404b540aSrobert   rtx old_label = ((replace_label_data *) data)->r1;
2388*404b540aSrobert   rtx new_label = ((replace_label_data *) data)->r2;
2389*404b540aSrobert   bool update_label_nuses = ((replace_label_data *) data)->update_label_nuses;
2390*404b540aSrobert 
2391*404b540aSrobert   if (l == NULL_RTX)
2392*404b540aSrobert     return 0;
2393*404b540aSrobert 
2394*404b540aSrobert   if (GET_CODE (l) == SYMBOL_REF
2395*404b540aSrobert       && CONSTANT_POOL_ADDRESS_P (l))
2396*404b540aSrobert     {
2397*404b540aSrobert       rtx c = get_pool_constant (l);
2398*404b540aSrobert       if (rtx_referenced_p (old_label, c))
2399*404b540aSrobert 	{
2400*404b540aSrobert 	  rtx new_c, new_l;
2401*404b540aSrobert 	  replace_label_data *d = (replace_label_data *) data;
2402*404b540aSrobert 
2403*404b540aSrobert 	  /* Create a copy of constant C; replace the label inside
2404*404b540aSrobert 	     but do not update LABEL_NUSES because uses in constant pool
2405*404b540aSrobert 	     are not counted.  */
2406*404b540aSrobert 	  new_c = copy_rtx (c);
2407*404b540aSrobert 	  d->update_label_nuses = false;
2408*404b540aSrobert 	  for_each_rtx (&new_c, replace_label, data);
2409*404b540aSrobert 	  d->update_label_nuses = update_label_nuses;
2410*404b540aSrobert 
2411*404b540aSrobert 	  /* Add the new constant NEW_C to constant pool and replace
2412*404b540aSrobert 	     the old reference to constant by new reference.  */
2413*404b540aSrobert 	  new_l = XEXP (force_const_mem (get_pool_mode (l), new_c), 0);
2414*404b540aSrobert 	  *x = replace_rtx (l, l, new_l);
2415*404b540aSrobert 	}
2416*404b540aSrobert       return 0;
2417*404b540aSrobert     }
2418*404b540aSrobert 
2419*404b540aSrobert   /* If this is a JUMP_INSN, then we also need to fix the JUMP_LABEL
2420*404b540aSrobert      field.  This is not handled by for_each_rtx because it doesn't
2421*404b540aSrobert      handle unprinted ('0') fields.  */
2422*404b540aSrobert   if (JUMP_P (l) && JUMP_LABEL (l) == old_label)
2423*404b540aSrobert     JUMP_LABEL (l) = new_label;
2424*404b540aSrobert 
2425*404b540aSrobert   if ((GET_CODE (l) == LABEL_REF
2426*404b540aSrobert        || GET_CODE (l) == INSN_LIST)
2427*404b540aSrobert       && XEXP (l, 0) == old_label)
2428*404b540aSrobert     {
2429*404b540aSrobert       XEXP (l, 0) = new_label;
2430*404b540aSrobert       if (update_label_nuses)
2431*404b540aSrobert 	{
2432*404b540aSrobert 	  ++LABEL_NUSES (new_label);
2433*404b540aSrobert 	  --LABEL_NUSES (old_label);
2434*404b540aSrobert 	}
2435*404b540aSrobert       return 0;
2436*404b540aSrobert     }
2437*404b540aSrobert 
2438*404b540aSrobert   return 0;
2439*404b540aSrobert }
2440*404b540aSrobert 
2441*404b540aSrobert /* When *BODY is equal to X or X is directly referenced by *BODY
2442*404b540aSrobert    return nonzero, thus FOR_EACH_RTX stops traversing and returns nonzero
2443*404b540aSrobert    too, otherwise FOR_EACH_RTX continues traversing *BODY.  */
2444*404b540aSrobert 
2445*404b540aSrobert static int
rtx_referenced_p_1(rtx * body,void * x)2446*404b540aSrobert rtx_referenced_p_1 (rtx *body, void *x)
2447*404b540aSrobert {
2448*404b540aSrobert   rtx y = (rtx) x;
2449*404b540aSrobert 
2450*404b540aSrobert   if (*body == NULL_RTX)
2451*404b540aSrobert     return y == NULL_RTX;
2452*404b540aSrobert 
2453*404b540aSrobert   /* Return true if a label_ref *BODY refers to label Y.  */
2454*404b540aSrobert   if (GET_CODE (*body) == LABEL_REF && LABEL_P (y))
2455*404b540aSrobert     return XEXP (*body, 0) == y;
2456*404b540aSrobert 
2457*404b540aSrobert   /* If *BODY is a reference to pool constant traverse the constant.  */
2458*404b540aSrobert   if (GET_CODE (*body) == SYMBOL_REF
2459*404b540aSrobert       && CONSTANT_POOL_ADDRESS_P (*body))
2460*404b540aSrobert     return rtx_referenced_p (y, get_pool_constant (*body));
2461*404b540aSrobert 
2462*404b540aSrobert   /* By default, compare the RTL expressions.  */
2463*404b540aSrobert   return rtx_equal_p (*body, y);
2464*404b540aSrobert }
2465*404b540aSrobert 
2466*404b540aSrobert /* Return true if X is referenced in BODY.  */
2467*404b540aSrobert 
2468*404b540aSrobert int
rtx_referenced_p(rtx x,rtx body)2469*404b540aSrobert rtx_referenced_p (rtx x, rtx body)
2470*404b540aSrobert {
2471*404b540aSrobert   return for_each_rtx (&body, rtx_referenced_p_1, x);
2472*404b540aSrobert }
2473*404b540aSrobert 
2474*404b540aSrobert /* If INSN is a tablejump return true and store the label (before jump table) to
2475*404b540aSrobert    *LABELP and the jump table to *TABLEP.  LABELP and TABLEP may be NULL.  */
2476*404b540aSrobert 
2477*404b540aSrobert bool
tablejump_p(rtx insn,rtx * labelp,rtx * tablep)2478*404b540aSrobert tablejump_p (rtx insn, rtx *labelp, rtx *tablep)
2479*404b540aSrobert {
2480*404b540aSrobert   rtx label, table;
2481*404b540aSrobert 
2482*404b540aSrobert   if (JUMP_P (insn)
2483*404b540aSrobert       && (label = JUMP_LABEL (insn)) != NULL_RTX
2484*404b540aSrobert       && (table = next_active_insn (label)) != NULL_RTX
2485*404b540aSrobert       && JUMP_P (table)
2486*404b540aSrobert       && (GET_CODE (PATTERN (table)) == ADDR_VEC
2487*404b540aSrobert 	  || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
2488*404b540aSrobert     {
2489*404b540aSrobert       if (labelp)
2490*404b540aSrobert 	*labelp = label;
2491*404b540aSrobert       if (tablep)
2492*404b540aSrobert 	*tablep = table;
2493*404b540aSrobert       return true;
2494*404b540aSrobert     }
2495*404b540aSrobert   return false;
2496*404b540aSrobert }
2497*404b540aSrobert 
2498*404b540aSrobert /* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or
2499*404b540aSrobert    constant that is not in the constant pool and not in the condition
2500*404b540aSrobert    of an IF_THEN_ELSE.  */
2501*404b540aSrobert 
2502*404b540aSrobert static int
computed_jump_p_1(rtx x)2503*404b540aSrobert computed_jump_p_1 (rtx x)
2504*404b540aSrobert {
2505*404b540aSrobert   enum rtx_code code = GET_CODE (x);
2506*404b540aSrobert   int i, j;
2507*404b540aSrobert   const char *fmt;
2508*404b540aSrobert 
2509*404b540aSrobert   switch (code)
2510*404b540aSrobert     {
2511*404b540aSrobert     case LABEL_REF:
2512*404b540aSrobert     case PC:
2513*404b540aSrobert       return 0;
2514*404b540aSrobert 
2515*404b540aSrobert     case CONST:
2516*404b540aSrobert     case CONST_INT:
2517*404b540aSrobert     case CONST_DOUBLE:
2518*404b540aSrobert     case CONST_VECTOR:
2519*404b540aSrobert     case SYMBOL_REF:
2520*404b540aSrobert     case REG:
2521*404b540aSrobert       return 1;
2522*404b540aSrobert 
2523*404b540aSrobert     case MEM:
2524*404b540aSrobert       return ! (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
2525*404b540aSrobert 		&& CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)));
2526*404b540aSrobert 
2527*404b540aSrobert     case IF_THEN_ELSE:
2528*404b540aSrobert       return (computed_jump_p_1 (XEXP (x, 1))
2529*404b540aSrobert 	      || computed_jump_p_1 (XEXP (x, 2)));
2530*404b540aSrobert 
2531*404b540aSrobert     default:
2532*404b540aSrobert       break;
2533*404b540aSrobert     }
2534*404b540aSrobert 
2535*404b540aSrobert   fmt = GET_RTX_FORMAT (code);
2536*404b540aSrobert   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2537*404b540aSrobert     {
2538*404b540aSrobert       if (fmt[i] == 'e'
2539*404b540aSrobert 	  && computed_jump_p_1 (XEXP (x, i)))
2540*404b540aSrobert 	return 1;
2541*404b540aSrobert 
2542*404b540aSrobert       else if (fmt[i] == 'E')
2543*404b540aSrobert 	for (j = 0; j < XVECLEN (x, i); j++)
2544*404b540aSrobert 	  if (computed_jump_p_1 (XVECEXP (x, i, j)))
2545*404b540aSrobert 	    return 1;
2546*404b540aSrobert     }
2547*404b540aSrobert 
2548*404b540aSrobert   return 0;
2549*404b540aSrobert }
2550*404b540aSrobert 
2551*404b540aSrobert /* Return nonzero if INSN is an indirect jump (aka computed jump).
2552*404b540aSrobert 
2553*404b540aSrobert    Tablejumps and casesi insns are not considered indirect jumps;
2554*404b540aSrobert    we can recognize them by a (use (label_ref)).  */
2555*404b540aSrobert 
2556*404b540aSrobert int
computed_jump_p(rtx insn)2557*404b540aSrobert computed_jump_p (rtx insn)
2558*404b540aSrobert {
2559*404b540aSrobert   int i;
2560*404b540aSrobert   if (JUMP_P (insn))
2561*404b540aSrobert     {
2562*404b540aSrobert       rtx pat = PATTERN (insn);
2563*404b540aSrobert 
2564*404b540aSrobert       if (find_reg_note (insn, REG_LABEL, NULL_RTX))
2565*404b540aSrobert 	return 0;
2566*404b540aSrobert       else if (GET_CODE (pat) == PARALLEL)
2567*404b540aSrobert 	{
2568*404b540aSrobert 	  int len = XVECLEN (pat, 0);
2569*404b540aSrobert 	  int has_use_labelref = 0;
2570*404b540aSrobert 
2571*404b540aSrobert 	  for (i = len - 1; i >= 0; i--)
2572*404b540aSrobert 	    if (GET_CODE (XVECEXP (pat, 0, i)) == USE
2573*404b540aSrobert 		&& (GET_CODE (XEXP (XVECEXP (pat, 0, i), 0))
2574*404b540aSrobert 		    == LABEL_REF))
2575*404b540aSrobert 	      has_use_labelref = 1;
2576*404b540aSrobert 
2577*404b540aSrobert 	  if (! has_use_labelref)
2578*404b540aSrobert 	    for (i = len - 1; i >= 0; i--)
2579*404b540aSrobert 	      if (GET_CODE (XVECEXP (pat, 0, i)) == SET
2580*404b540aSrobert 		  && SET_DEST (XVECEXP (pat, 0, i)) == pc_rtx
2581*404b540aSrobert 		  && computed_jump_p_1 (SET_SRC (XVECEXP (pat, 0, i))))
2582*404b540aSrobert 		return 1;
2583*404b540aSrobert 	}
2584*404b540aSrobert       else if (GET_CODE (pat) == SET
2585*404b540aSrobert 	       && SET_DEST (pat) == pc_rtx
2586*404b540aSrobert 	       && computed_jump_p_1 (SET_SRC (pat)))
2587*404b540aSrobert 	return 1;
2588*404b540aSrobert     }
2589*404b540aSrobert   return 0;
2590*404b540aSrobert }
2591*404b540aSrobert 
2592*404b540aSrobert /* Optimized loop of for_each_rtx, trying to avoid useless recursive
2593*404b540aSrobert    calls.  Processes the subexpressions of EXP and passes them to F.  */
2594*404b540aSrobert static int
for_each_rtx_1(rtx exp,int n,rtx_function f,void * data)2595*404b540aSrobert for_each_rtx_1 (rtx exp, int n, rtx_function f, void *data)
2596*404b540aSrobert {
2597*404b540aSrobert   int result, i, j;
2598*404b540aSrobert   const char *format = GET_RTX_FORMAT (GET_CODE (exp));
2599*404b540aSrobert   rtx *x;
2600*404b540aSrobert 
2601*404b540aSrobert   for (; format[n] != '\0'; n++)
2602*404b540aSrobert     {
2603*404b540aSrobert       switch (format[n])
2604*404b540aSrobert 	{
2605*404b540aSrobert 	case 'e':
2606*404b540aSrobert 	  /* Call F on X.  */
2607*404b540aSrobert 	  x = &XEXP (exp, n);
2608*404b540aSrobert 	  result = (*f) (x, data);
2609*404b540aSrobert 	  if (result == -1)
2610*404b540aSrobert 	    /* Do not traverse sub-expressions.  */
2611*404b540aSrobert 	    continue;
2612*404b540aSrobert 	  else if (result != 0)
2613*404b540aSrobert 	    /* Stop the traversal.  */
2614*404b540aSrobert 	    return result;
2615*404b540aSrobert 
2616*404b540aSrobert 	  if (*x == NULL_RTX)
2617*404b540aSrobert 	    /* There are no sub-expressions.  */
2618*404b540aSrobert 	    continue;
2619*404b540aSrobert 
2620*404b540aSrobert 	  i = non_rtx_starting_operands[GET_CODE (*x)];
2621*404b540aSrobert 	  if (i >= 0)
2622*404b540aSrobert 	    {
2623*404b540aSrobert 	      result = for_each_rtx_1 (*x, i, f, data);
2624*404b540aSrobert 	      if (result != 0)
2625*404b540aSrobert 		return result;
2626*404b540aSrobert 	    }
2627*404b540aSrobert 	  break;
2628*404b540aSrobert 
2629*404b540aSrobert 	case 'V':
2630*404b540aSrobert 	case 'E':
2631*404b540aSrobert 	  if (XVEC (exp, n) == 0)
2632*404b540aSrobert 	    continue;
2633*404b540aSrobert 	  for (j = 0; j < XVECLEN (exp, n); ++j)
2634*404b540aSrobert 	    {
2635*404b540aSrobert 	      /* Call F on X.  */
2636*404b540aSrobert 	      x = &XVECEXP (exp, n, j);
2637*404b540aSrobert 	      result = (*f) (x, data);
2638*404b540aSrobert 	      if (result == -1)
2639*404b540aSrobert 		/* Do not traverse sub-expressions.  */
2640*404b540aSrobert 		continue;
2641*404b540aSrobert 	      else if (result != 0)
2642*404b540aSrobert 		/* Stop the traversal.  */
2643*404b540aSrobert 		return result;
2644*404b540aSrobert 
2645*404b540aSrobert 	      if (*x == NULL_RTX)
2646*404b540aSrobert 		/* There are no sub-expressions.  */
2647*404b540aSrobert 		continue;
2648*404b540aSrobert 
2649*404b540aSrobert 	      i = non_rtx_starting_operands[GET_CODE (*x)];
2650*404b540aSrobert 	      if (i >= 0)
2651*404b540aSrobert 		{
2652*404b540aSrobert 		  result = for_each_rtx_1 (*x, i, f, data);
2653*404b540aSrobert 		  if (result != 0)
2654*404b540aSrobert 		    return result;
2655*404b540aSrobert 	        }
2656*404b540aSrobert 	    }
2657*404b540aSrobert 	  break;
2658*404b540aSrobert 
2659*404b540aSrobert 	default:
2660*404b540aSrobert 	  /* Nothing to do.  */
2661*404b540aSrobert 	  break;
2662*404b540aSrobert 	}
2663*404b540aSrobert     }
2664*404b540aSrobert 
2665*404b540aSrobert   return 0;
2666*404b540aSrobert }
2667*404b540aSrobert 
2668*404b540aSrobert /* Traverse X via depth-first search, calling F for each
2669*404b540aSrobert    sub-expression (including X itself).  F is also passed the DATA.
2670*404b540aSrobert    If F returns -1, do not traverse sub-expressions, but continue
2671*404b540aSrobert    traversing the rest of the tree.  If F ever returns any other
2672*404b540aSrobert    nonzero value, stop the traversal, and return the value returned
2673*404b540aSrobert    by F.  Otherwise, return 0.  This function does not traverse inside
2674*404b540aSrobert    tree structure that contains RTX_EXPRs, or into sub-expressions
2675*404b540aSrobert    whose format code is `0' since it is not known whether or not those
2676*404b540aSrobert    codes are actually RTL.
2677*404b540aSrobert 
2678*404b540aSrobert    This routine is very general, and could (should?) be used to
2679*404b540aSrobert    implement many of the other routines in this file.  */
2680*404b540aSrobert 
2681*404b540aSrobert int
for_each_rtx(rtx * x,rtx_function f,void * data)2682*404b540aSrobert for_each_rtx (rtx *x, rtx_function f, void *data)
2683*404b540aSrobert {
2684*404b540aSrobert   int result;
2685*404b540aSrobert   int i;
2686*404b540aSrobert 
2687*404b540aSrobert   /* Call F on X.  */
2688*404b540aSrobert   result = (*f) (x, data);
2689*404b540aSrobert   if (result == -1)
2690*404b540aSrobert     /* Do not traverse sub-expressions.  */
2691*404b540aSrobert     return 0;
2692*404b540aSrobert   else if (result != 0)
2693*404b540aSrobert     /* Stop the traversal.  */
2694*404b540aSrobert     return result;
2695*404b540aSrobert 
2696*404b540aSrobert   if (*x == NULL_RTX)
2697*404b540aSrobert     /* There are no sub-expressions.  */
2698*404b540aSrobert     return 0;
2699*404b540aSrobert 
2700*404b540aSrobert   i = non_rtx_starting_operands[GET_CODE (*x)];
2701*404b540aSrobert   if (i < 0)
2702*404b540aSrobert     return 0;
2703*404b540aSrobert 
2704*404b540aSrobert   return for_each_rtx_1 (*x, i, f, data);
2705*404b540aSrobert }
2706*404b540aSrobert 
2707*404b540aSrobert 
2708*404b540aSrobert /* Searches X for any reference to REGNO, returning the rtx of the
2709*404b540aSrobert    reference found if any.  Otherwise, returns NULL_RTX.  */
2710*404b540aSrobert 
2711*404b540aSrobert rtx
regno_use_in(unsigned int regno,rtx x)2712*404b540aSrobert regno_use_in (unsigned int regno, rtx x)
2713*404b540aSrobert {
2714*404b540aSrobert   const char *fmt;
2715*404b540aSrobert   int i, j;
2716*404b540aSrobert   rtx tem;
2717*404b540aSrobert 
2718*404b540aSrobert   if (REG_P (x) && REGNO (x) == regno)
2719*404b540aSrobert     return x;
2720*404b540aSrobert 
2721*404b540aSrobert   fmt = GET_RTX_FORMAT (GET_CODE (x));
2722*404b540aSrobert   for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2723*404b540aSrobert     {
2724*404b540aSrobert       if (fmt[i] == 'e')
2725*404b540aSrobert 	{
2726*404b540aSrobert 	  if ((tem = regno_use_in (regno, XEXP (x, i))))
2727*404b540aSrobert 	    return tem;
2728*404b540aSrobert 	}
2729*404b540aSrobert       else if (fmt[i] == 'E')
2730*404b540aSrobert 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2731*404b540aSrobert 	  if ((tem = regno_use_in (regno , XVECEXP (x, i, j))))
2732*404b540aSrobert 	    return tem;
2733*404b540aSrobert     }
2734*404b540aSrobert 
2735*404b540aSrobert   return NULL_RTX;
2736*404b540aSrobert }
2737*404b540aSrobert 
2738*404b540aSrobert /* Return a value indicating whether OP, an operand of a commutative
2739*404b540aSrobert    operation, is preferred as the first or second operand.  The higher
2740*404b540aSrobert    the value, the stronger the preference for being the first operand.
2741*404b540aSrobert    We use negative values to indicate a preference for the first operand
2742*404b540aSrobert    and positive values for the second operand.  */
2743*404b540aSrobert 
2744*404b540aSrobert int
commutative_operand_precedence(rtx op)2745*404b540aSrobert commutative_operand_precedence (rtx op)
2746*404b540aSrobert {
2747*404b540aSrobert   enum rtx_code code = GET_CODE (op);
2748*404b540aSrobert 
2749*404b540aSrobert   /* Constants always come the second operand.  Prefer "nice" constants.  */
2750*404b540aSrobert   if (code == CONST_INT)
2751*404b540aSrobert     return -7;
2752*404b540aSrobert   if (code == CONST_DOUBLE)
2753*404b540aSrobert     return -6;
2754*404b540aSrobert   op = avoid_constant_pool_reference (op);
2755*404b540aSrobert   code = GET_CODE (op);
2756*404b540aSrobert 
2757*404b540aSrobert   switch (GET_RTX_CLASS (code))
2758*404b540aSrobert     {
2759*404b540aSrobert     case RTX_CONST_OBJ:
2760*404b540aSrobert       if (code == CONST_INT)
2761*404b540aSrobert         return -5;
2762*404b540aSrobert       if (code == CONST_DOUBLE)
2763*404b540aSrobert         return -4;
2764*404b540aSrobert       return -3;
2765*404b540aSrobert 
2766*404b540aSrobert     case RTX_EXTRA:
2767*404b540aSrobert       /* SUBREGs of objects should come second.  */
2768*404b540aSrobert       if (code == SUBREG && OBJECT_P (SUBREG_REG (op)))
2769*404b540aSrobert         return -2;
2770*404b540aSrobert 
2771*404b540aSrobert       if (!CONSTANT_P (op))
2772*404b540aSrobert         return 0;
2773*404b540aSrobert       else
2774*404b540aSrobert 	/* As for RTX_CONST_OBJ.  */
2775*404b540aSrobert 	return -3;
2776*404b540aSrobert 
2777*404b540aSrobert     case RTX_OBJ:
2778*404b540aSrobert       /* Complex expressions should be the first, so decrease priority
2779*404b540aSrobert          of objects.  */
2780*404b540aSrobert       return -1;
2781*404b540aSrobert 
2782*404b540aSrobert     case RTX_COMM_ARITH:
2783*404b540aSrobert       /* Prefer operands that are themselves commutative to be first.
2784*404b540aSrobert          This helps to make things linear.  In particular,
2785*404b540aSrobert          (and (and (reg) (reg)) (not (reg))) is canonical.  */
2786*404b540aSrobert       return 4;
2787*404b540aSrobert 
2788*404b540aSrobert     case RTX_BIN_ARITH:
2789*404b540aSrobert       /* If only one operand is a binary expression, it will be the first
2790*404b540aSrobert          operand.  In particular,  (plus (minus (reg) (reg)) (neg (reg)))
2791*404b540aSrobert          is canonical, although it will usually be further simplified.  */
2792*404b540aSrobert       return 2;
2793*404b540aSrobert 
2794*404b540aSrobert     case RTX_UNARY:
2795*404b540aSrobert       /* Then prefer NEG and NOT.  */
2796*404b540aSrobert       if (code == NEG || code == NOT)
2797*404b540aSrobert         return 1;
2798*404b540aSrobert 
2799*404b540aSrobert     default:
2800*404b540aSrobert       return 0;
2801*404b540aSrobert     }
2802*404b540aSrobert }
2803*404b540aSrobert 
2804*404b540aSrobert /* Return 1 iff it is necessary to swap operands of commutative operation
2805*404b540aSrobert    in order to canonicalize expression.  */
2806*404b540aSrobert 
2807*404b540aSrobert int
swap_commutative_operands_p(rtx x,rtx y)2808*404b540aSrobert swap_commutative_operands_p (rtx x, rtx y)
2809*404b540aSrobert {
2810*404b540aSrobert   return (commutative_operand_precedence (x)
2811*404b540aSrobert 	  < commutative_operand_precedence (y));
2812*404b540aSrobert }
2813*404b540aSrobert 
2814*404b540aSrobert /* Return 1 if X is an autoincrement side effect and the register is
2815*404b540aSrobert    not the stack pointer.  */
2816*404b540aSrobert int
auto_inc_p(rtx x)2817*404b540aSrobert auto_inc_p (rtx x)
2818*404b540aSrobert {
2819*404b540aSrobert   switch (GET_CODE (x))
2820*404b540aSrobert     {
2821*404b540aSrobert     case PRE_INC:
2822*404b540aSrobert     case POST_INC:
2823*404b540aSrobert     case PRE_DEC:
2824*404b540aSrobert     case POST_DEC:
2825*404b540aSrobert     case PRE_MODIFY:
2826*404b540aSrobert     case POST_MODIFY:
2827*404b540aSrobert       /* There are no REG_INC notes for SP.  */
2828*404b540aSrobert       if (XEXP (x, 0) != stack_pointer_rtx)
2829*404b540aSrobert 	return 1;
2830*404b540aSrobert     default:
2831*404b540aSrobert       break;
2832*404b540aSrobert     }
2833*404b540aSrobert   return 0;
2834*404b540aSrobert }
2835*404b540aSrobert 
2836*404b540aSrobert /* Return nonzero if IN contains a piece of rtl that has the address LOC.  */
2837*404b540aSrobert int
loc_mentioned_in_p(rtx * loc,rtx in)2838*404b540aSrobert loc_mentioned_in_p (rtx *loc, rtx in)
2839*404b540aSrobert {
2840*404b540aSrobert   enum rtx_code code = GET_CODE (in);
2841*404b540aSrobert   const char *fmt = GET_RTX_FORMAT (code);
2842*404b540aSrobert   int i, j;
2843*404b540aSrobert 
2844*404b540aSrobert   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2845*404b540aSrobert     {
2846*404b540aSrobert       if (loc == &in->u.fld[i].rt_rtx)
2847*404b540aSrobert 	return 1;
2848*404b540aSrobert       if (fmt[i] == 'e')
2849*404b540aSrobert 	{
2850*404b540aSrobert 	  if (loc_mentioned_in_p (loc, XEXP (in, i)))
2851*404b540aSrobert 	    return 1;
2852*404b540aSrobert 	}
2853*404b540aSrobert       else if (fmt[i] == 'E')
2854*404b540aSrobert 	for (j = XVECLEN (in, i) - 1; j >= 0; j--)
2855*404b540aSrobert 	  if (loc_mentioned_in_p (loc, XVECEXP (in, i, j)))
2856*404b540aSrobert 	    return 1;
2857*404b540aSrobert     }
2858*404b540aSrobert   return 0;
2859*404b540aSrobert }
2860*404b540aSrobert 
2861*404b540aSrobert /* Helper function for subreg_lsb.  Given a subreg's OUTER_MODE, INNER_MODE,
2862*404b540aSrobert    and SUBREG_BYTE, return the bit offset where the subreg begins
2863*404b540aSrobert    (counting from the least significant bit of the operand).  */
2864*404b540aSrobert 
2865*404b540aSrobert unsigned int
subreg_lsb_1(enum machine_mode outer_mode,enum machine_mode inner_mode,unsigned int subreg_byte)2866*404b540aSrobert subreg_lsb_1 (enum machine_mode outer_mode,
2867*404b540aSrobert 	      enum machine_mode inner_mode,
2868*404b540aSrobert 	      unsigned int subreg_byte)
2869*404b540aSrobert {
2870*404b540aSrobert   unsigned int bitpos;
2871*404b540aSrobert   unsigned int byte;
2872*404b540aSrobert   unsigned int word;
2873*404b540aSrobert 
2874*404b540aSrobert   /* A paradoxical subreg begins at bit position 0.  */
2875*404b540aSrobert   if (GET_MODE_BITSIZE (outer_mode) > GET_MODE_BITSIZE (inner_mode))
2876*404b540aSrobert     return 0;
2877*404b540aSrobert 
2878*404b540aSrobert   if (WORDS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2879*404b540aSrobert     /* If the subreg crosses a word boundary ensure that
2880*404b540aSrobert        it also begins and ends on a word boundary.  */
2881*404b540aSrobert     gcc_assert (!((subreg_byte % UNITS_PER_WORD
2882*404b540aSrobert 		  + GET_MODE_SIZE (outer_mode)) > UNITS_PER_WORD
2883*404b540aSrobert 		  && (subreg_byte % UNITS_PER_WORD
2884*404b540aSrobert 		      || GET_MODE_SIZE (outer_mode) % UNITS_PER_WORD)));
2885*404b540aSrobert 
2886*404b540aSrobert   if (WORDS_BIG_ENDIAN)
2887*404b540aSrobert     word = (GET_MODE_SIZE (inner_mode)
2888*404b540aSrobert 	    - (subreg_byte + GET_MODE_SIZE (outer_mode))) / UNITS_PER_WORD;
2889*404b540aSrobert   else
2890*404b540aSrobert     word = subreg_byte / UNITS_PER_WORD;
2891*404b540aSrobert   bitpos = word * BITS_PER_WORD;
2892*404b540aSrobert 
2893*404b540aSrobert   if (BYTES_BIG_ENDIAN)
2894*404b540aSrobert     byte = (GET_MODE_SIZE (inner_mode)
2895*404b540aSrobert 	    - (subreg_byte + GET_MODE_SIZE (outer_mode))) % UNITS_PER_WORD;
2896*404b540aSrobert   else
2897*404b540aSrobert     byte = subreg_byte % UNITS_PER_WORD;
2898*404b540aSrobert   bitpos += byte * BITS_PER_UNIT;
2899*404b540aSrobert 
2900*404b540aSrobert   return bitpos;
2901*404b540aSrobert }
2902*404b540aSrobert 
2903*404b540aSrobert /* Given a subreg X, return the bit offset where the subreg begins
2904*404b540aSrobert    (counting from the least significant bit of the reg).  */
2905*404b540aSrobert 
2906*404b540aSrobert unsigned int
subreg_lsb(rtx x)2907*404b540aSrobert subreg_lsb (rtx x)
2908*404b540aSrobert {
2909*404b540aSrobert   return subreg_lsb_1 (GET_MODE (x), GET_MODE (SUBREG_REG (x)),
2910*404b540aSrobert 		       SUBREG_BYTE (x));
2911*404b540aSrobert }
2912*404b540aSrobert 
2913*404b540aSrobert /* This function returns the regno offset of a subreg expression.
2914*404b540aSrobert    xregno - A regno of an inner hard subreg_reg (or what will become one).
2915*404b540aSrobert    xmode  - The mode of xregno.
2916*404b540aSrobert    offset - The byte offset.
2917*404b540aSrobert    ymode  - The mode of a top level SUBREG (or what may become one).
2918*404b540aSrobert    RETURN - The regno offset which would be used.  */
2919*404b540aSrobert unsigned int
subreg_regno_offset(unsigned int xregno,enum machine_mode xmode,unsigned int offset,enum machine_mode ymode)2920*404b540aSrobert subreg_regno_offset (unsigned int xregno, enum machine_mode xmode,
2921*404b540aSrobert 		     unsigned int offset, enum machine_mode ymode)
2922*404b540aSrobert {
2923*404b540aSrobert   int nregs_xmode, nregs_ymode;
2924*404b540aSrobert   int mode_multiple, nregs_multiple;
2925*404b540aSrobert   int y_offset;
2926*404b540aSrobert 
2927*404b540aSrobert   gcc_assert (xregno < FIRST_PSEUDO_REGISTER);
2928*404b540aSrobert 
2929*404b540aSrobert   /* Adjust nregs_xmode to allow for 'holes'.  */
2930*404b540aSrobert   if (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode))
2931*404b540aSrobert     nregs_xmode = HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode);
2932*404b540aSrobert   else
2933*404b540aSrobert     nregs_xmode = hard_regno_nregs[xregno][xmode];
2934*404b540aSrobert 
2935*404b540aSrobert   nregs_ymode = hard_regno_nregs[xregno][ymode];
2936*404b540aSrobert 
2937*404b540aSrobert   /* If this is a big endian paradoxical subreg, which uses more actual
2938*404b540aSrobert      hard registers than the original register, we must return a negative
2939*404b540aSrobert      offset so that we find the proper highpart of the register.  */
2940*404b540aSrobert   if (offset == 0
2941*404b540aSrobert       && nregs_ymode > nregs_xmode
2942*404b540aSrobert       && (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
2943*404b540aSrobert 	  ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN))
2944*404b540aSrobert     return nregs_xmode - nregs_ymode;
2945*404b540aSrobert 
2946*404b540aSrobert   if (offset == 0 || nregs_xmode == nregs_ymode)
2947*404b540aSrobert     return 0;
2948*404b540aSrobert 
2949*404b540aSrobert   /* Size of ymode must not be greater than the size of xmode.  */
2950*404b540aSrobert   mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
2951*404b540aSrobert   gcc_assert (mode_multiple != 0);
2952*404b540aSrobert 
2953*404b540aSrobert   y_offset = offset / GET_MODE_SIZE (ymode);
2954*404b540aSrobert   nregs_multiple =  nregs_xmode / nregs_ymode;
2955*404b540aSrobert   return (y_offset / (mode_multiple / nregs_multiple)) * nregs_ymode;
2956*404b540aSrobert }
2957*404b540aSrobert 
2958*404b540aSrobert /* This function returns true when the offset is representable via
2959*404b540aSrobert    subreg_offset in the given regno.
2960*404b540aSrobert    xregno - A regno of an inner hard subreg_reg (or what will become one).
2961*404b540aSrobert    xmode  - The mode of xregno.
2962*404b540aSrobert    offset - The byte offset.
2963*404b540aSrobert    ymode  - The mode of a top level SUBREG (or what may become one).
2964*404b540aSrobert    RETURN - Whether the offset is representable.  */
2965*404b540aSrobert bool
subreg_offset_representable_p(unsigned int xregno,enum machine_mode xmode,unsigned int offset,enum machine_mode ymode)2966*404b540aSrobert subreg_offset_representable_p (unsigned int xregno, enum machine_mode xmode,
2967*404b540aSrobert 			       unsigned int offset, enum machine_mode ymode)
2968*404b540aSrobert {
2969*404b540aSrobert   int nregs_xmode, nregs_ymode;
2970*404b540aSrobert   int mode_multiple, nregs_multiple;
2971*404b540aSrobert   int y_offset;
2972*404b540aSrobert   int regsize_xmode, regsize_ymode;
2973*404b540aSrobert 
2974*404b540aSrobert   gcc_assert (xregno < FIRST_PSEUDO_REGISTER);
2975*404b540aSrobert 
2976*404b540aSrobert   /* If there are holes in a non-scalar mode in registers, we expect
2977*404b540aSrobert      that it is made up of its units concatenated together.  */
2978*404b540aSrobert   if (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode))
2979*404b540aSrobert     {
2980*404b540aSrobert       enum machine_mode xmode_unit;
2981*404b540aSrobert 
2982*404b540aSrobert       nregs_xmode = HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode);
2983*404b540aSrobert       if (GET_MODE_INNER (xmode) == VOIDmode)
2984*404b540aSrobert 	xmode_unit = xmode;
2985*404b540aSrobert       else
2986*404b540aSrobert 	xmode_unit = GET_MODE_INNER (xmode);
2987*404b540aSrobert       gcc_assert (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode_unit));
2988*404b540aSrobert       gcc_assert (nregs_xmode
2989*404b540aSrobert 		  == (GET_MODE_NUNITS (xmode)
2990*404b540aSrobert 		      * HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode_unit)));
2991*404b540aSrobert       gcc_assert (hard_regno_nregs[xregno][xmode]
2992*404b540aSrobert 		  == (hard_regno_nregs[xregno][xmode_unit]
2993*404b540aSrobert 		      * GET_MODE_NUNITS (xmode)));
2994*404b540aSrobert 
2995*404b540aSrobert       /* You can only ask for a SUBREG of a value with holes in the middle
2996*404b540aSrobert 	 if you don't cross the holes.  (Such a SUBREG should be done by
2997*404b540aSrobert 	 picking a different register class, or doing it in memory if
2998*404b540aSrobert 	 necessary.)  An example of a value with holes is XCmode on 32-bit
2999*404b540aSrobert 	 x86 with -m128bit-long-double; it's represented in 6 32-bit registers,
3000*404b540aSrobert 	 3 for each part, but in memory it's two 128-bit parts.
3001*404b540aSrobert 	 Padding is assumed to be at the end (not necessarily the 'high part')
3002*404b540aSrobert 	 of each unit.  */
3003*404b540aSrobert       if ((offset / GET_MODE_SIZE (xmode_unit) + 1
3004*404b540aSrobert 	   < GET_MODE_NUNITS (xmode))
3005*404b540aSrobert 	  && (offset / GET_MODE_SIZE (xmode_unit)
3006*404b540aSrobert 	      != ((offset + GET_MODE_SIZE (ymode) - 1)
3007*404b540aSrobert 		  / GET_MODE_SIZE (xmode_unit))))
3008*404b540aSrobert 	return false;
3009*404b540aSrobert     }
3010*404b540aSrobert   else
3011*404b540aSrobert     nregs_xmode = hard_regno_nregs[xregno][xmode];
3012*404b540aSrobert 
3013*404b540aSrobert   nregs_ymode = hard_regno_nregs[xregno][ymode];
3014*404b540aSrobert 
3015*404b540aSrobert   /* Paradoxical subregs are otherwise valid.  */
3016*404b540aSrobert   if (offset == 0
3017*404b540aSrobert       && nregs_ymode > nregs_xmode
3018*404b540aSrobert       && (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
3019*404b540aSrobert 	  ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN))
3020*404b540aSrobert     return true;
3021*404b540aSrobert 
3022*404b540aSrobert   /* If registers store different numbers of bits in the different
3023*404b540aSrobert      modes, we cannot generally form this subreg.  */
3024*404b540aSrobert   regsize_xmode = GET_MODE_SIZE (xmode) / nregs_xmode;
3025*404b540aSrobert   regsize_ymode = GET_MODE_SIZE (ymode) / nregs_ymode;
3026*404b540aSrobert   if (regsize_xmode > regsize_ymode && nregs_ymode > 1)
3027*404b540aSrobert     return false;
3028*404b540aSrobert   if (regsize_ymode > regsize_xmode && nregs_xmode > 1)
3029*404b540aSrobert     return false;
3030*404b540aSrobert 
3031*404b540aSrobert   /* Lowpart subregs are otherwise valid.  */
3032*404b540aSrobert   if (offset == subreg_lowpart_offset (ymode, xmode))
3033*404b540aSrobert     return true;
3034*404b540aSrobert 
3035*404b540aSrobert   /* This should always pass, otherwise we don't know how to verify
3036*404b540aSrobert      the constraint.  These conditions may be relaxed but
3037*404b540aSrobert      subreg_regno_offset would need to be redesigned.  */
3038*404b540aSrobert   gcc_assert ((GET_MODE_SIZE (xmode) % GET_MODE_SIZE (ymode)) == 0);
3039*404b540aSrobert   gcc_assert ((nregs_xmode % nregs_ymode) == 0);
3040*404b540aSrobert 
3041*404b540aSrobert   /* The XMODE value can be seen as a vector of NREGS_XMODE
3042*404b540aSrobert      values.  The subreg must represent a lowpart of given field.
3043*404b540aSrobert      Compute what field it is.  */
3044*404b540aSrobert   offset -= subreg_lowpart_offset (ymode,
3045*404b540aSrobert 				   mode_for_size (GET_MODE_BITSIZE (xmode)
3046*404b540aSrobert 						  / nregs_xmode,
3047*404b540aSrobert 						  MODE_INT, 0));
3048*404b540aSrobert 
3049*404b540aSrobert   /* Size of ymode must not be greater than the size of xmode.  */
3050*404b540aSrobert   mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
3051*404b540aSrobert   gcc_assert (mode_multiple != 0);
3052*404b540aSrobert 
3053*404b540aSrobert   y_offset = offset / GET_MODE_SIZE (ymode);
3054*404b540aSrobert   nregs_multiple =  nregs_xmode / nregs_ymode;
3055*404b540aSrobert 
3056*404b540aSrobert   gcc_assert ((offset % GET_MODE_SIZE (ymode)) == 0);
3057*404b540aSrobert   gcc_assert ((mode_multiple % nregs_multiple) == 0);
3058*404b540aSrobert 
3059*404b540aSrobert   return (!(y_offset % (mode_multiple / nregs_multiple)));
3060*404b540aSrobert }
3061*404b540aSrobert 
3062*404b540aSrobert /* Return the final regno that a subreg expression refers to.  */
3063*404b540aSrobert unsigned int
subreg_regno(rtx x)3064*404b540aSrobert subreg_regno (rtx x)
3065*404b540aSrobert {
3066*404b540aSrobert   unsigned int ret;
3067*404b540aSrobert   rtx subreg = SUBREG_REG (x);
3068*404b540aSrobert   int regno = REGNO (subreg);
3069*404b540aSrobert 
3070*404b540aSrobert   ret = regno + subreg_regno_offset (regno,
3071*404b540aSrobert 				     GET_MODE (subreg),
3072*404b540aSrobert 				     SUBREG_BYTE (x),
3073*404b540aSrobert 				     GET_MODE (x));
3074*404b540aSrobert   return ret;
3075*404b540aSrobert 
3076*404b540aSrobert }
3077*404b540aSrobert struct parms_set_data
3078*404b540aSrobert {
3079*404b540aSrobert   int nregs;
3080*404b540aSrobert   HARD_REG_SET regs;
3081*404b540aSrobert };
3082*404b540aSrobert 
3083*404b540aSrobert /* Helper function for noticing stores to parameter registers.  */
3084*404b540aSrobert static void
parms_set(rtx x,rtx pat ATTRIBUTE_UNUSED,void * data)3085*404b540aSrobert parms_set (rtx x, rtx pat ATTRIBUTE_UNUSED, void *data)
3086*404b540aSrobert {
3087*404b540aSrobert   struct parms_set_data *d = data;
3088*404b540aSrobert   if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3089*404b540aSrobert       && TEST_HARD_REG_BIT (d->regs, REGNO (x)))
3090*404b540aSrobert     {
3091*404b540aSrobert       CLEAR_HARD_REG_BIT (d->regs, REGNO (x));
3092*404b540aSrobert       d->nregs--;
3093*404b540aSrobert     }
3094*404b540aSrobert }
3095*404b540aSrobert 
3096*404b540aSrobert /* Look backward for first parameter to be loaded.
3097*404b540aSrobert    Note that loads of all parameters will not necessarily be
3098*404b540aSrobert    found if CSE has eliminated some of them (e.g., an argument
3099*404b540aSrobert    to the outer function is passed down as a parameter).
3100*404b540aSrobert    Do not skip BOUNDARY.  */
3101*404b540aSrobert rtx
find_first_parameter_load(rtx call_insn,rtx boundary)3102*404b540aSrobert find_first_parameter_load (rtx call_insn, rtx boundary)
3103*404b540aSrobert {
3104*404b540aSrobert   struct parms_set_data parm;
3105*404b540aSrobert   rtx p, before, first_set;
3106*404b540aSrobert 
3107*404b540aSrobert   /* Since different machines initialize their parameter registers
3108*404b540aSrobert      in different orders, assume nothing.  Collect the set of all
3109*404b540aSrobert      parameter registers.  */
3110*404b540aSrobert   CLEAR_HARD_REG_SET (parm.regs);
3111*404b540aSrobert   parm.nregs = 0;
3112*404b540aSrobert   for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
3113*404b540aSrobert     if (GET_CODE (XEXP (p, 0)) == USE
3114*404b540aSrobert 	&& REG_P (XEXP (XEXP (p, 0), 0)))
3115*404b540aSrobert       {
3116*404b540aSrobert 	gcc_assert (REGNO (XEXP (XEXP (p, 0), 0)) < FIRST_PSEUDO_REGISTER);
3117*404b540aSrobert 
3118*404b540aSrobert 	/* We only care about registers which can hold function
3119*404b540aSrobert 	   arguments.  */
3120*404b540aSrobert 	if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
3121*404b540aSrobert 	  continue;
3122*404b540aSrobert 
3123*404b540aSrobert 	SET_HARD_REG_BIT (parm.regs, REGNO (XEXP (XEXP (p, 0), 0)));
3124*404b540aSrobert 	parm.nregs++;
3125*404b540aSrobert       }
3126*404b540aSrobert   before = call_insn;
3127*404b540aSrobert   first_set = call_insn;
3128*404b540aSrobert 
3129*404b540aSrobert   /* Search backward for the first set of a register in this set.  */
3130*404b540aSrobert   while (parm.nregs && before != boundary)
3131*404b540aSrobert     {
3132*404b540aSrobert       before = PREV_INSN (before);
3133*404b540aSrobert 
3134*404b540aSrobert       /* It is possible that some loads got CSEed from one call to
3135*404b540aSrobert          another.  Stop in that case.  */
3136*404b540aSrobert       if (CALL_P (before))
3137*404b540aSrobert 	break;
3138*404b540aSrobert 
3139*404b540aSrobert       /* Our caller needs either ensure that we will find all sets
3140*404b540aSrobert          (in case code has not been optimized yet), or take care
3141*404b540aSrobert          for possible labels in a way by setting boundary to preceding
3142*404b540aSrobert          CODE_LABEL.  */
3143*404b540aSrobert       if (LABEL_P (before))
3144*404b540aSrobert 	{
3145*404b540aSrobert 	  gcc_assert (before == boundary);
3146*404b540aSrobert 	  break;
3147*404b540aSrobert 	}
3148*404b540aSrobert 
3149*404b540aSrobert       if (INSN_P (before))
3150*404b540aSrobert 	{
3151*404b540aSrobert 	  int nregs_old = parm.nregs;
3152*404b540aSrobert 	  note_stores (PATTERN (before), parms_set, &parm);
3153*404b540aSrobert 	  /* If we found something that did not set a parameter reg,
3154*404b540aSrobert 	     we're done.  Do not keep going, as that might result
3155*404b540aSrobert 	     in hoisting an insn before the setting of a pseudo
3156*404b540aSrobert 	     that is used by the hoisted insn. */
3157*404b540aSrobert 	  if (nregs_old != parm.nregs)
3158*404b540aSrobert 	    first_set = before;
3159*404b540aSrobert 	  else
3160*404b540aSrobert 	    break;
3161*404b540aSrobert 	}
3162*404b540aSrobert     }
3163*404b540aSrobert   return first_set;
3164*404b540aSrobert }
3165*404b540aSrobert 
3166*404b540aSrobert /* Return true if we should avoid inserting code between INSN and preceding
3167*404b540aSrobert    call instruction.  */
3168*404b540aSrobert 
3169*404b540aSrobert bool
keep_with_call_p(rtx insn)3170*404b540aSrobert keep_with_call_p (rtx insn)
3171*404b540aSrobert {
3172*404b540aSrobert   rtx set;
3173*404b540aSrobert 
3174*404b540aSrobert   if (INSN_P (insn) && (set = single_set (insn)) != NULL)
3175*404b540aSrobert     {
3176*404b540aSrobert       if (REG_P (SET_DEST (set))
3177*404b540aSrobert 	  && REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
3178*404b540aSrobert 	  && fixed_regs[REGNO (SET_DEST (set))]
3179*404b540aSrobert 	  && general_operand (SET_SRC (set), VOIDmode))
3180*404b540aSrobert 	return true;
3181*404b540aSrobert       if (REG_P (SET_SRC (set))
3182*404b540aSrobert 	  && FUNCTION_VALUE_REGNO_P (REGNO (SET_SRC (set)))
3183*404b540aSrobert 	  && REG_P (SET_DEST (set))
3184*404b540aSrobert 	  && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
3185*404b540aSrobert 	return true;
3186*404b540aSrobert       /* There may be a stack pop just after the call and before the store
3187*404b540aSrobert 	 of the return register.  Search for the actual store when deciding
3188*404b540aSrobert 	 if we can break or not.  */
3189*404b540aSrobert       if (SET_DEST (set) == stack_pointer_rtx)
3190*404b540aSrobert 	{
3191*404b540aSrobert 	  rtx i2 = next_nonnote_insn (insn);
3192*404b540aSrobert 	  if (i2 && keep_with_call_p (i2))
3193*404b540aSrobert 	    return true;
3194*404b540aSrobert 	}
3195*404b540aSrobert     }
3196*404b540aSrobert   return false;
3197*404b540aSrobert }
3198*404b540aSrobert 
3199*404b540aSrobert /* Return true if LABEL is a target of JUMP_INSN.  This applies only
3200*404b540aSrobert    to non-complex jumps.  That is, direct unconditional, conditional,
3201*404b540aSrobert    and tablejumps, but not computed jumps or returns.  It also does
3202*404b540aSrobert    not apply to the fallthru case of a conditional jump.  */
3203*404b540aSrobert 
3204*404b540aSrobert bool
label_is_jump_target_p(rtx label,rtx jump_insn)3205*404b540aSrobert label_is_jump_target_p (rtx label, rtx jump_insn)
3206*404b540aSrobert {
3207*404b540aSrobert   rtx tmp = JUMP_LABEL (jump_insn);
3208*404b540aSrobert 
3209*404b540aSrobert   if (label == tmp)
3210*404b540aSrobert     return true;
3211*404b540aSrobert 
3212*404b540aSrobert   if (tablejump_p (jump_insn, NULL, &tmp))
3213*404b540aSrobert     {
3214*404b540aSrobert       rtvec vec = XVEC (PATTERN (tmp),
3215*404b540aSrobert 			GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC);
3216*404b540aSrobert       int i, veclen = GET_NUM_ELEM (vec);
3217*404b540aSrobert 
3218*404b540aSrobert       for (i = 0; i < veclen; ++i)
3219*404b540aSrobert 	if (XEXP (RTVEC_ELT (vec, i), 0) == label)
3220*404b540aSrobert 	  return true;
3221*404b540aSrobert     }
3222*404b540aSrobert 
3223*404b540aSrobert   return false;
3224*404b540aSrobert }
3225*404b540aSrobert 
3226*404b540aSrobert 
3227*404b540aSrobert /* Return an estimate of the cost of computing rtx X.
3228*404b540aSrobert    One use is in cse, to decide which expression to keep in the hash table.
3229*404b540aSrobert    Another is in rtl generation, to pick the cheapest way to multiply.
3230*404b540aSrobert    Other uses like the latter are expected in the future.  */
3231*404b540aSrobert 
3232*404b540aSrobert int
rtx_cost(rtx x,enum rtx_code outer_code ATTRIBUTE_UNUSED)3233*404b540aSrobert rtx_cost (rtx x, enum rtx_code outer_code ATTRIBUTE_UNUSED)
3234*404b540aSrobert {
3235*404b540aSrobert   int i, j;
3236*404b540aSrobert   enum rtx_code code;
3237*404b540aSrobert   const char *fmt;
3238*404b540aSrobert   int total;
3239*404b540aSrobert 
3240*404b540aSrobert   if (x == 0)
3241*404b540aSrobert     return 0;
3242*404b540aSrobert 
3243*404b540aSrobert   /* Compute the default costs of certain things.
3244*404b540aSrobert      Note that targetm.rtx_costs can override the defaults.  */
3245*404b540aSrobert 
3246*404b540aSrobert   code = GET_CODE (x);
3247*404b540aSrobert   switch (code)
3248*404b540aSrobert     {
3249*404b540aSrobert     case MULT:
3250*404b540aSrobert       total = COSTS_N_INSNS (5);
3251*404b540aSrobert       break;
3252*404b540aSrobert     case DIV:
3253*404b540aSrobert     case UDIV:
3254*404b540aSrobert     case MOD:
3255*404b540aSrobert     case UMOD:
3256*404b540aSrobert       total = COSTS_N_INSNS (7);
3257*404b540aSrobert       break;
3258*404b540aSrobert     case USE:
3259*404b540aSrobert       /* Used in combine.c as a marker.  */
3260*404b540aSrobert       total = 0;
3261*404b540aSrobert       break;
3262*404b540aSrobert     default:
3263*404b540aSrobert       total = COSTS_N_INSNS (1);
3264*404b540aSrobert     }
3265*404b540aSrobert 
3266*404b540aSrobert   switch (code)
3267*404b540aSrobert     {
3268*404b540aSrobert     case REG:
3269*404b540aSrobert       return 0;
3270*404b540aSrobert 
3271*404b540aSrobert     case SUBREG:
3272*404b540aSrobert       total = 0;
3273*404b540aSrobert       /* If we can't tie these modes, make this expensive.  The larger
3274*404b540aSrobert 	 the mode, the more expensive it is.  */
3275*404b540aSrobert       if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
3276*404b540aSrobert 	return COSTS_N_INSNS (2
3277*404b540aSrobert 			      + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
3278*404b540aSrobert       break;
3279*404b540aSrobert 
3280*404b540aSrobert     default:
3281*404b540aSrobert       if (targetm.rtx_costs (x, code, outer_code, &total))
3282*404b540aSrobert 	return total;
3283*404b540aSrobert       break;
3284*404b540aSrobert     }
3285*404b540aSrobert 
3286*404b540aSrobert   /* Sum the costs of the sub-rtx's, plus cost of this operation,
3287*404b540aSrobert      which is already in total.  */
3288*404b540aSrobert 
3289*404b540aSrobert   fmt = GET_RTX_FORMAT (code);
3290*404b540aSrobert   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3291*404b540aSrobert     if (fmt[i] == 'e')
3292*404b540aSrobert       total += rtx_cost (XEXP (x, i), code);
3293*404b540aSrobert     else if (fmt[i] == 'E')
3294*404b540aSrobert       for (j = 0; j < XVECLEN (x, i); j++)
3295*404b540aSrobert 	total += rtx_cost (XVECEXP (x, i, j), code);
3296*404b540aSrobert 
3297*404b540aSrobert   return total;
3298*404b540aSrobert }
3299*404b540aSrobert 
3300*404b540aSrobert /* Return cost of address expression X.
3301*404b540aSrobert    Expect that X is properly formed address reference.  */
3302*404b540aSrobert 
3303*404b540aSrobert int
address_cost(rtx x,enum machine_mode mode)3304*404b540aSrobert address_cost (rtx x, enum machine_mode mode)
3305*404b540aSrobert {
3306*404b540aSrobert   /* We may be asked for cost of various unusual addresses, such as operands
3307*404b540aSrobert      of push instruction.  It is not worthwhile to complicate writing
3308*404b540aSrobert      of the target hook by such cases.  */
3309*404b540aSrobert 
3310*404b540aSrobert   if (!memory_address_p (mode, x))
3311*404b540aSrobert     return 1000;
3312*404b540aSrobert 
3313*404b540aSrobert   return targetm.address_cost (x);
3314*404b540aSrobert }
3315*404b540aSrobert 
3316*404b540aSrobert /* If the target doesn't override, compute the cost as with arithmetic.  */
3317*404b540aSrobert 
3318*404b540aSrobert int
default_address_cost(rtx x)3319*404b540aSrobert default_address_cost (rtx x)
3320*404b540aSrobert {
3321*404b540aSrobert   return rtx_cost (x, MEM);
3322*404b540aSrobert }
3323*404b540aSrobert 
3324*404b540aSrobert 
3325*404b540aSrobert unsigned HOST_WIDE_INT
nonzero_bits(rtx x,enum machine_mode mode)3326*404b540aSrobert nonzero_bits (rtx x, enum machine_mode mode)
3327*404b540aSrobert {
3328*404b540aSrobert   return cached_nonzero_bits (x, mode, NULL_RTX, VOIDmode, 0);
3329*404b540aSrobert }
3330*404b540aSrobert 
3331*404b540aSrobert unsigned int
num_sign_bit_copies(rtx x,enum machine_mode mode)3332*404b540aSrobert num_sign_bit_copies (rtx x, enum machine_mode mode)
3333*404b540aSrobert {
3334*404b540aSrobert   return cached_num_sign_bit_copies (x, mode, NULL_RTX, VOIDmode, 0);
3335*404b540aSrobert }
3336*404b540aSrobert 
3337*404b540aSrobert /* The function cached_nonzero_bits is a wrapper around nonzero_bits1.
3338*404b540aSrobert    It avoids exponential behavior in nonzero_bits1 when X has
3339*404b540aSrobert    identical subexpressions on the first or the second level.  */
3340*404b540aSrobert 
3341*404b540aSrobert static unsigned HOST_WIDE_INT
cached_nonzero_bits(rtx x,enum machine_mode mode,rtx known_x,enum machine_mode known_mode,unsigned HOST_WIDE_INT known_ret)3342*404b540aSrobert cached_nonzero_bits (rtx x, enum machine_mode mode, rtx known_x,
3343*404b540aSrobert 		     enum machine_mode known_mode,
3344*404b540aSrobert 		     unsigned HOST_WIDE_INT known_ret)
3345*404b540aSrobert {
3346*404b540aSrobert   if (x == known_x && mode == known_mode)
3347*404b540aSrobert     return known_ret;
3348*404b540aSrobert 
3349*404b540aSrobert   /* Try to find identical subexpressions.  If found call
3350*404b540aSrobert      nonzero_bits1 on X with the subexpressions as KNOWN_X and the
3351*404b540aSrobert      precomputed value for the subexpression as KNOWN_RET.  */
3352*404b540aSrobert 
3353*404b540aSrobert   if (ARITHMETIC_P (x))
3354*404b540aSrobert     {
3355*404b540aSrobert       rtx x0 = XEXP (x, 0);
3356*404b540aSrobert       rtx x1 = XEXP (x, 1);
3357*404b540aSrobert 
3358*404b540aSrobert       /* Check the first level.  */
3359*404b540aSrobert       if (x0 == x1)
3360*404b540aSrobert 	return nonzero_bits1 (x, mode, x0, mode,
3361*404b540aSrobert 			      cached_nonzero_bits (x0, mode, known_x,
3362*404b540aSrobert 						   known_mode, known_ret));
3363*404b540aSrobert 
3364*404b540aSrobert       /* Check the second level.  */
3365*404b540aSrobert       if (ARITHMETIC_P (x0)
3366*404b540aSrobert 	  && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
3367*404b540aSrobert 	return nonzero_bits1 (x, mode, x1, mode,
3368*404b540aSrobert 			      cached_nonzero_bits (x1, mode, known_x,
3369*404b540aSrobert 						   known_mode, known_ret));
3370*404b540aSrobert 
3371*404b540aSrobert       if (ARITHMETIC_P (x1)
3372*404b540aSrobert 	  && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
3373*404b540aSrobert 	return nonzero_bits1 (x, mode, x0, mode,
3374*404b540aSrobert 			      cached_nonzero_bits (x0, mode, known_x,
3375*404b540aSrobert 						   known_mode, known_ret));
3376*404b540aSrobert     }
3377*404b540aSrobert 
3378*404b540aSrobert   return nonzero_bits1 (x, mode, known_x, known_mode, known_ret);
3379*404b540aSrobert }
3380*404b540aSrobert 
3381*404b540aSrobert /* We let num_sign_bit_copies recur into nonzero_bits as that is useful.
3382*404b540aSrobert    We don't let nonzero_bits recur into num_sign_bit_copies, because that
3383*404b540aSrobert    is less useful.  We can't allow both, because that results in exponential
3384*404b540aSrobert    run time recursion.  There is a nullstone testcase that triggered
3385*404b540aSrobert    this.  This macro avoids accidental uses of num_sign_bit_copies.  */
3386*404b540aSrobert #define cached_num_sign_bit_copies sorry_i_am_preventing_exponential_behavior
3387*404b540aSrobert 
3388*404b540aSrobert /* Given an expression, X, compute which bits in X can be nonzero.
3389*404b540aSrobert    We don't care about bits outside of those defined in MODE.
3390*404b540aSrobert 
3391*404b540aSrobert    For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
3392*404b540aSrobert    an arithmetic operation, we can do better.  */
3393*404b540aSrobert 
3394*404b540aSrobert static unsigned HOST_WIDE_INT
nonzero_bits1(rtx x,enum machine_mode mode,rtx known_x,enum machine_mode known_mode,unsigned HOST_WIDE_INT known_ret)3395*404b540aSrobert nonzero_bits1 (rtx x, enum machine_mode mode, rtx known_x,
3396*404b540aSrobert 	       enum machine_mode known_mode,
3397*404b540aSrobert 	       unsigned HOST_WIDE_INT known_ret)
3398*404b540aSrobert {
3399*404b540aSrobert   unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
3400*404b540aSrobert   unsigned HOST_WIDE_INT inner_nz;
3401*404b540aSrobert   enum rtx_code code;
3402*404b540aSrobert   unsigned int mode_width = GET_MODE_BITSIZE (mode);
3403*404b540aSrobert 
3404*404b540aSrobert   /* For floating-point values, assume all bits are needed.  */
3405*404b540aSrobert   if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode))
3406*404b540aSrobert     return nonzero;
3407*404b540aSrobert 
3408*404b540aSrobert   /* If X is wider than MODE, use its mode instead.  */
3409*404b540aSrobert   if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
3410*404b540aSrobert     {
3411*404b540aSrobert       mode = GET_MODE (x);
3412*404b540aSrobert       nonzero = GET_MODE_MASK (mode);
3413*404b540aSrobert       mode_width = GET_MODE_BITSIZE (mode);
3414*404b540aSrobert     }
3415*404b540aSrobert 
3416*404b540aSrobert   if (mode_width > HOST_BITS_PER_WIDE_INT)
3417*404b540aSrobert     /* Our only callers in this case look for single bit values.  So
3418*404b540aSrobert        just return the mode mask.  Those tests will then be false.  */
3419*404b540aSrobert     return nonzero;
3420*404b540aSrobert 
3421*404b540aSrobert #ifndef WORD_REGISTER_OPERATIONS
3422*404b540aSrobert   /* If MODE is wider than X, but both are a single word for both the host
3423*404b540aSrobert      and target machines, we can compute this from which bits of the
3424*404b540aSrobert      object might be nonzero in its own mode, taking into account the fact
3425*404b540aSrobert      that on many CISC machines, accessing an object in a wider mode
3426*404b540aSrobert      causes the high-order bits to become undefined.  So they are
3427*404b540aSrobert      not known to be zero.  */
3428*404b540aSrobert 
3429*404b540aSrobert   if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
3430*404b540aSrobert       && GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD
3431*404b540aSrobert       && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
3432*404b540aSrobert       && GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (GET_MODE (x)))
3433*404b540aSrobert     {
3434*404b540aSrobert       nonzero &= cached_nonzero_bits (x, GET_MODE (x),
3435*404b540aSrobert 				      known_x, known_mode, known_ret);
3436*404b540aSrobert       nonzero |= GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x));
3437*404b540aSrobert       return nonzero;
3438*404b540aSrobert     }
3439*404b540aSrobert #endif
3440*404b540aSrobert 
3441*404b540aSrobert   code = GET_CODE (x);
3442*404b540aSrobert   switch (code)
3443*404b540aSrobert     {
3444*404b540aSrobert     case REG:
3445*404b540aSrobert #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
3446*404b540aSrobert       /* If pointers extend unsigned and this is a pointer in Pmode, say that
3447*404b540aSrobert 	 all the bits above ptr_mode are known to be zero.  */
3448*404b540aSrobert       if (POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
3449*404b540aSrobert 	  && REG_POINTER (x))
3450*404b540aSrobert 	nonzero &= GET_MODE_MASK (ptr_mode);
3451*404b540aSrobert #endif
3452*404b540aSrobert 
3453*404b540aSrobert       /* Include declared information about alignment of pointers.  */
3454*404b540aSrobert       /* ??? We don't properly preserve REG_POINTER changes across
3455*404b540aSrobert 	 pointer-to-integer casts, so we can't trust it except for
3456*404b540aSrobert 	 things that we know must be pointers.  See execute/960116-1.c.  */
3457*404b540aSrobert       if ((x == stack_pointer_rtx
3458*404b540aSrobert 	   || x == frame_pointer_rtx
3459*404b540aSrobert 	   || x == arg_pointer_rtx)
3460*404b540aSrobert 	  && REGNO_POINTER_ALIGN (REGNO (x)))
3461*404b540aSrobert 	{
3462*404b540aSrobert 	  unsigned HOST_WIDE_INT alignment
3463*404b540aSrobert 	    = REGNO_POINTER_ALIGN (REGNO (x)) / BITS_PER_UNIT;
3464*404b540aSrobert 
3465*404b540aSrobert #ifdef PUSH_ROUNDING
3466*404b540aSrobert 	  /* If PUSH_ROUNDING is defined, it is possible for the
3467*404b540aSrobert 	     stack to be momentarily aligned only to that amount,
3468*404b540aSrobert 	     so we pick the least alignment.  */
3469*404b540aSrobert 	  if (x == stack_pointer_rtx && PUSH_ARGS)
3470*404b540aSrobert 	    alignment = MIN ((unsigned HOST_WIDE_INT) PUSH_ROUNDING (1),
3471*404b540aSrobert 			     alignment);
3472*404b540aSrobert #endif
3473*404b540aSrobert 
3474*404b540aSrobert 	  nonzero &= ~(alignment - 1);
3475*404b540aSrobert 	}
3476*404b540aSrobert 
3477*404b540aSrobert       {
3478*404b540aSrobert 	unsigned HOST_WIDE_INT nonzero_for_hook = nonzero;
3479*404b540aSrobert 	rtx new = rtl_hooks.reg_nonzero_bits (x, mode, known_x,
3480*404b540aSrobert 					      known_mode, known_ret,
3481*404b540aSrobert 					      &nonzero_for_hook);
3482*404b540aSrobert 
3483*404b540aSrobert 	if (new)
3484*404b540aSrobert 	  nonzero_for_hook &= cached_nonzero_bits (new, mode, known_x,
3485*404b540aSrobert 						   known_mode, known_ret);
3486*404b540aSrobert 
3487*404b540aSrobert 	return nonzero_for_hook;
3488*404b540aSrobert       }
3489*404b540aSrobert 
3490*404b540aSrobert     case CONST_INT:
3491*404b540aSrobert #ifdef SHORT_IMMEDIATES_SIGN_EXTEND
3492*404b540aSrobert       /* If X is negative in MODE, sign-extend the value.  */
3493*404b540aSrobert       if (INTVAL (x) > 0 && mode_width < BITS_PER_WORD
3494*404b540aSrobert 	  && 0 != (INTVAL (x) & ((HOST_WIDE_INT) 1 << (mode_width - 1))))
3495*404b540aSrobert 	return (INTVAL (x) | ((HOST_WIDE_INT) (-1) << mode_width));
3496*404b540aSrobert #endif
3497*404b540aSrobert 
3498*404b540aSrobert       return INTVAL (x);
3499*404b540aSrobert 
3500*404b540aSrobert     case MEM:
3501*404b540aSrobert #ifdef LOAD_EXTEND_OP
3502*404b540aSrobert       /* In many, if not most, RISC machines, reading a byte from memory
3503*404b540aSrobert 	 zeros the rest of the register.  Noticing that fact saves a lot
3504*404b540aSrobert 	 of extra zero-extends.  */
3505*404b540aSrobert       if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND)
3506*404b540aSrobert 	nonzero &= GET_MODE_MASK (GET_MODE (x));
3507*404b540aSrobert #endif
3508*404b540aSrobert       break;
3509*404b540aSrobert 
3510*404b540aSrobert     case EQ:  case NE:
3511*404b540aSrobert     case UNEQ:  case LTGT:
3512*404b540aSrobert     case GT:  case GTU:  case UNGT:
3513*404b540aSrobert     case LT:  case LTU:  case UNLT:
3514*404b540aSrobert     case GE:  case GEU:  case UNGE:
3515*404b540aSrobert     case LE:  case LEU:  case UNLE:
3516*404b540aSrobert     case UNORDERED: case ORDERED:
3517*404b540aSrobert       /* If this produces an integer result, we know which bits are set.
3518*404b540aSrobert 	 Code here used to clear bits outside the mode of X, but that is
3519*404b540aSrobert 	 now done above.  */
3520*404b540aSrobert       /* Mind that MODE is the mode the caller wants to look at this
3521*404b540aSrobert 	 operation in, and not the actual operation mode.  We can wind
3522*404b540aSrobert 	 up with (subreg:DI (gt:V4HI x y)), and we don't have anything
3523*404b540aSrobert 	 that describes the results of a vector compare.  */
3524*404b540aSrobert       if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
3525*404b540aSrobert 	  && mode_width <= HOST_BITS_PER_WIDE_INT)
3526*404b540aSrobert 	nonzero = STORE_FLAG_VALUE;
3527*404b540aSrobert       break;
3528*404b540aSrobert 
3529*404b540aSrobert     case NEG:
3530*404b540aSrobert #if 0
3531*404b540aSrobert       /* Disabled to avoid exponential mutual recursion between nonzero_bits
3532*404b540aSrobert 	 and num_sign_bit_copies.  */
3533*404b540aSrobert       if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
3534*404b540aSrobert 	  == GET_MODE_BITSIZE (GET_MODE (x)))
3535*404b540aSrobert 	nonzero = 1;
3536*404b540aSrobert #endif
3537*404b540aSrobert 
3538*404b540aSrobert       if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
3539*404b540aSrobert 	nonzero |= (GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x)));
3540*404b540aSrobert       break;
3541*404b540aSrobert 
3542*404b540aSrobert     case ABS:
3543*404b540aSrobert #if 0
3544*404b540aSrobert       /* Disabled to avoid exponential mutual recursion between nonzero_bits
3545*404b540aSrobert 	 and num_sign_bit_copies.  */
3546*404b540aSrobert       if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
3547*404b540aSrobert 	  == GET_MODE_BITSIZE (GET_MODE (x)))
3548*404b540aSrobert 	nonzero = 1;
3549*404b540aSrobert #endif
3550*404b540aSrobert       break;
3551*404b540aSrobert 
3552*404b540aSrobert     case TRUNCATE:
3553*404b540aSrobert       nonzero &= (cached_nonzero_bits (XEXP (x, 0), mode,
3554*404b540aSrobert 				       known_x, known_mode, known_ret)
3555*404b540aSrobert 		  & GET_MODE_MASK (mode));
3556*404b540aSrobert       break;
3557*404b540aSrobert 
3558*404b540aSrobert     case ZERO_EXTEND:
3559*404b540aSrobert       nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
3560*404b540aSrobert 				      known_x, known_mode, known_ret);
3561*404b540aSrobert       if (GET_MODE (XEXP (x, 0)) != VOIDmode)
3562*404b540aSrobert 	nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
3563*404b540aSrobert       break;
3564*404b540aSrobert 
3565*404b540aSrobert     case SIGN_EXTEND:
3566*404b540aSrobert       /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
3567*404b540aSrobert 	 Otherwise, show all the bits in the outer mode but not the inner
3568*404b540aSrobert 	 may be nonzero.  */
3569*404b540aSrobert       inner_nz = cached_nonzero_bits (XEXP (x, 0), mode,
3570*404b540aSrobert 				      known_x, known_mode, known_ret);
3571*404b540aSrobert       if (GET_MODE (XEXP (x, 0)) != VOIDmode)
3572*404b540aSrobert 	{
3573*404b540aSrobert 	  inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
3574*404b540aSrobert 	  if (inner_nz
3575*404b540aSrobert 	      & (((HOST_WIDE_INT) 1
3576*404b540aSrobert 		  << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
3577*404b540aSrobert 	    inner_nz |= (GET_MODE_MASK (mode)
3578*404b540aSrobert 			 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
3579*404b540aSrobert 	}
3580*404b540aSrobert 
3581*404b540aSrobert       nonzero &= inner_nz;
3582*404b540aSrobert       break;
3583*404b540aSrobert 
3584*404b540aSrobert     case AND:
3585*404b540aSrobert       nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
3586*404b540aSrobert 				       known_x, known_mode, known_ret)
3587*404b540aSrobert       		 & cached_nonzero_bits (XEXP (x, 1), mode,
3588*404b540aSrobert 					known_x, known_mode, known_ret);
3589*404b540aSrobert       break;
3590*404b540aSrobert 
3591*404b540aSrobert     case XOR:   case IOR:
3592*404b540aSrobert     case UMIN:  case UMAX:  case SMIN:  case SMAX:
3593*404b540aSrobert       {
3594*404b540aSrobert 	unsigned HOST_WIDE_INT nonzero0 =
3595*404b540aSrobert 	  cached_nonzero_bits (XEXP (x, 0), mode,
3596*404b540aSrobert 			       known_x, known_mode, known_ret);
3597*404b540aSrobert 
3598*404b540aSrobert 	/* Don't call nonzero_bits for the second time if it cannot change
3599*404b540aSrobert 	   anything.  */
3600*404b540aSrobert 	if ((nonzero & nonzero0) != nonzero)
3601*404b540aSrobert 	  nonzero &= nonzero0
3602*404b540aSrobert       		     | cached_nonzero_bits (XEXP (x, 1), mode,
3603*404b540aSrobert 					    known_x, known_mode, known_ret);
3604*404b540aSrobert       }
3605*404b540aSrobert       break;
3606*404b540aSrobert 
3607*404b540aSrobert     case PLUS:  case MINUS:
3608*404b540aSrobert     case MULT:
3609*404b540aSrobert     case DIV:   case UDIV:
3610*404b540aSrobert     case MOD:   case UMOD:
3611*404b540aSrobert       /* We can apply the rules of arithmetic to compute the number of
3612*404b540aSrobert 	 high- and low-order zero bits of these operations.  We start by
3613*404b540aSrobert 	 computing the width (position of the highest-order nonzero bit)
3614*404b540aSrobert 	 and the number of low-order zero bits for each value.  */
3615*404b540aSrobert       {
3616*404b540aSrobert 	unsigned HOST_WIDE_INT nz0 =
3617*404b540aSrobert 	  cached_nonzero_bits (XEXP (x, 0), mode,
3618*404b540aSrobert 			       known_x, known_mode, known_ret);
3619*404b540aSrobert 	unsigned HOST_WIDE_INT nz1 =
3620*404b540aSrobert 	  cached_nonzero_bits (XEXP (x, 1), mode,
3621*404b540aSrobert 			       known_x, known_mode, known_ret);
3622*404b540aSrobert 	int sign_index = GET_MODE_BITSIZE (GET_MODE (x)) - 1;
3623*404b540aSrobert 	int width0 = floor_log2 (nz0) + 1;
3624*404b540aSrobert 	int width1 = floor_log2 (nz1) + 1;
3625*404b540aSrobert 	int low0 = floor_log2 (nz0 & -nz0);
3626*404b540aSrobert 	int low1 = floor_log2 (nz1 & -nz1);
3627*404b540aSrobert 	HOST_WIDE_INT op0_maybe_minusp
3628*404b540aSrobert 	  = (nz0 & ((HOST_WIDE_INT) 1 << sign_index));
3629*404b540aSrobert 	HOST_WIDE_INT op1_maybe_minusp
3630*404b540aSrobert 	  = (nz1 & ((HOST_WIDE_INT) 1 << sign_index));
3631*404b540aSrobert 	unsigned int result_width = mode_width;
3632*404b540aSrobert 	int result_low = 0;
3633*404b540aSrobert 
3634*404b540aSrobert 	switch (code)
3635*404b540aSrobert 	  {
3636*404b540aSrobert 	  case PLUS:
3637*404b540aSrobert 	    result_width = MAX (width0, width1) + 1;
3638*404b540aSrobert 	    result_low = MIN (low0, low1);
3639*404b540aSrobert 	    break;
3640*404b540aSrobert 	  case MINUS:
3641*404b540aSrobert 	    result_low = MIN (low0, low1);
3642*404b540aSrobert 	    break;
3643*404b540aSrobert 	  case MULT:
3644*404b540aSrobert 	    result_width = width0 + width1;
3645*404b540aSrobert 	    result_low = low0 + low1;
3646*404b540aSrobert 	    break;
3647*404b540aSrobert 	  case DIV:
3648*404b540aSrobert 	    if (width1 == 0)
3649*404b540aSrobert 	      break;
3650*404b540aSrobert 	    if (! op0_maybe_minusp && ! op1_maybe_minusp)
3651*404b540aSrobert 	      result_width = width0;
3652*404b540aSrobert 	    break;
3653*404b540aSrobert 	  case UDIV:
3654*404b540aSrobert 	    if (width1 == 0)
3655*404b540aSrobert 	      break;
3656*404b540aSrobert 	    result_width = width0;
3657*404b540aSrobert 	    break;
3658*404b540aSrobert 	  case MOD:
3659*404b540aSrobert 	    if (width1 == 0)
3660*404b540aSrobert 	      break;
3661*404b540aSrobert 	    if (! op0_maybe_minusp && ! op1_maybe_minusp)
3662*404b540aSrobert 	      result_width = MIN (width0, width1);
3663*404b540aSrobert 	    result_low = MIN (low0, low1);
3664*404b540aSrobert 	    break;
3665*404b540aSrobert 	  case UMOD:
3666*404b540aSrobert 	    if (width1 == 0)
3667*404b540aSrobert 	      break;
3668*404b540aSrobert 	    result_width = MIN (width0, width1);
3669*404b540aSrobert 	    result_low = MIN (low0, low1);
3670*404b540aSrobert 	    break;
3671*404b540aSrobert 	  default:
3672*404b540aSrobert 	    gcc_unreachable ();
3673*404b540aSrobert 	  }
3674*404b540aSrobert 
3675*404b540aSrobert 	if (result_width < mode_width)
3676*404b540aSrobert 	  nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1;
3677*404b540aSrobert 
3678*404b540aSrobert 	if (result_low > 0)
3679*404b540aSrobert 	  nonzero &= ~(((HOST_WIDE_INT) 1 << result_low) - 1);
3680*404b540aSrobert 
3681*404b540aSrobert #ifdef POINTERS_EXTEND_UNSIGNED
3682*404b540aSrobert 	/* If pointers extend unsigned and this is an addition or subtraction
3683*404b540aSrobert 	   to a pointer in Pmode, all the bits above ptr_mode are known to be
3684*404b540aSrobert 	   zero.  */
3685*404b540aSrobert 	if (POINTERS_EXTEND_UNSIGNED > 0 && GET_MODE (x) == Pmode
3686*404b540aSrobert 	    && (code == PLUS || code == MINUS)
3687*404b540aSrobert 	    && REG_P (XEXP (x, 0)) && REG_POINTER (XEXP (x, 0)))
3688*404b540aSrobert 	  nonzero &= GET_MODE_MASK (ptr_mode);
3689*404b540aSrobert #endif
3690*404b540aSrobert       }
3691*404b540aSrobert       break;
3692*404b540aSrobert 
3693*404b540aSrobert     case ZERO_EXTRACT:
3694*404b540aSrobert       if (GET_CODE (XEXP (x, 1)) == CONST_INT
3695*404b540aSrobert 	  && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
3696*404b540aSrobert 	nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
3697*404b540aSrobert       break;
3698*404b540aSrobert 
3699*404b540aSrobert     case SUBREG:
3700*404b540aSrobert       /* If this is a SUBREG formed for a promoted variable that has
3701*404b540aSrobert 	 been zero-extended, we know that at least the high-order bits
3702*404b540aSrobert 	 are zero, though others might be too.  */
3703*404b540aSrobert 
3704*404b540aSrobert       if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x) > 0)
3705*404b540aSrobert 	nonzero = GET_MODE_MASK (GET_MODE (x))
3706*404b540aSrobert 		  & cached_nonzero_bits (SUBREG_REG (x), GET_MODE (x),
3707*404b540aSrobert 					 known_x, known_mode, known_ret);
3708*404b540aSrobert 
3709*404b540aSrobert       /* If the inner mode is a single word for both the host and target
3710*404b540aSrobert 	 machines, we can compute this from which bits of the inner
3711*404b540aSrobert 	 object might be nonzero.  */
3712*404b540aSrobert       if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
3713*404b540aSrobert 	  && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
3714*404b540aSrobert 	      <= HOST_BITS_PER_WIDE_INT))
3715*404b540aSrobert 	{
3716*404b540aSrobert 	  nonzero &= cached_nonzero_bits (SUBREG_REG (x), mode,
3717*404b540aSrobert 					  known_x, known_mode, known_ret);
3718*404b540aSrobert 
3719*404b540aSrobert #if defined (WORD_REGISTER_OPERATIONS) && defined (LOAD_EXTEND_OP)
3720*404b540aSrobert 	  /* If this is a typical RISC machine, we only have to worry
3721*404b540aSrobert 	     about the way loads are extended.  */
3722*404b540aSrobert 	  if ((LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
3723*404b540aSrobert 	       ? (((nonzero
3724*404b540aSrobert 		    & (((unsigned HOST_WIDE_INT) 1
3725*404b540aSrobert 			<< (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) - 1))))
3726*404b540aSrobert 		   != 0))
3727*404b540aSrobert 	       : LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) != ZERO_EXTEND)
3728*404b540aSrobert 	      || !MEM_P (SUBREG_REG (x)))
3729*404b540aSrobert #endif
3730*404b540aSrobert 	    {
3731*404b540aSrobert 	      /* On many CISC machines, accessing an object in a wider mode
3732*404b540aSrobert 		 causes the high-order bits to become undefined.  So they are
3733*404b540aSrobert 		 not known to be zero.  */
3734*404b540aSrobert 	      if (GET_MODE_SIZE (GET_MODE (x))
3735*404b540aSrobert 		  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3736*404b540aSrobert 		nonzero |= (GET_MODE_MASK (GET_MODE (x))
3737*404b540aSrobert 			    & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
3738*404b540aSrobert 	    }
3739*404b540aSrobert 	}
3740*404b540aSrobert       break;
3741*404b540aSrobert 
3742*404b540aSrobert     case ASHIFTRT:
3743*404b540aSrobert     case LSHIFTRT:
3744*404b540aSrobert     case ASHIFT:
3745*404b540aSrobert     case ROTATE:
3746*404b540aSrobert       /* The nonzero bits are in two classes: any bits within MODE
3747*404b540aSrobert 	 that aren't in GET_MODE (x) are always significant.  The rest of the
3748*404b540aSrobert 	 nonzero bits are those that are significant in the operand of
3749*404b540aSrobert 	 the shift when shifted the appropriate number of bits.  This
3750*404b540aSrobert 	 shows that high-order bits are cleared by the right shift and
3751*404b540aSrobert 	 low-order bits by left shifts.  */
3752*404b540aSrobert       if (GET_CODE (XEXP (x, 1)) == CONST_INT
3753*404b540aSrobert 	  && INTVAL (XEXP (x, 1)) >= 0
3754*404b540aSrobert 	  && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
3755*404b540aSrobert 	{
3756*404b540aSrobert 	  enum machine_mode inner_mode = GET_MODE (x);
3757*404b540aSrobert 	  unsigned int width = GET_MODE_BITSIZE (inner_mode);
3758*404b540aSrobert 	  int count = INTVAL (XEXP (x, 1));
3759*404b540aSrobert 	  unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
3760*404b540aSrobert 	  unsigned HOST_WIDE_INT op_nonzero =
3761*404b540aSrobert 	    cached_nonzero_bits (XEXP (x, 0), mode,
3762*404b540aSrobert 				 known_x, known_mode, known_ret);
3763*404b540aSrobert 	  unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
3764*404b540aSrobert 	  unsigned HOST_WIDE_INT outer = 0;
3765*404b540aSrobert 
3766*404b540aSrobert 	  if (mode_width > width)
3767*404b540aSrobert 	    outer = (op_nonzero & nonzero & ~mode_mask);
3768*404b540aSrobert 
3769*404b540aSrobert 	  if (code == LSHIFTRT)
3770*404b540aSrobert 	    inner >>= count;
3771*404b540aSrobert 	  else if (code == ASHIFTRT)
3772*404b540aSrobert 	    {
3773*404b540aSrobert 	      inner >>= count;
3774*404b540aSrobert 
3775*404b540aSrobert 	      /* If the sign bit may have been nonzero before the shift, we
3776*404b540aSrobert 		 need to mark all the places it could have been copied to
3777*404b540aSrobert 		 by the shift as possibly nonzero.  */
3778*404b540aSrobert 	      if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
3779*404b540aSrobert 		inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
3780*404b540aSrobert 	    }
3781*404b540aSrobert 	  else if (code == ASHIFT)
3782*404b540aSrobert 	    inner <<= count;
3783*404b540aSrobert 	  else
3784*404b540aSrobert 	    inner = ((inner << (count % width)
3785*404b540aSrobert 		      | (inner >> (width - (count % width)))) & mode_mask);
3786*404b540aSrobert 
3787*404b540aSrobert 	  nonzero &= (outer | inner);
3788*404b540aSrobert 	}
3789*404b540aSrobert       break;
3790*404b540aSrobert 
3791*404b540aSrobert     case FFS:
3792*404b540aSrobert     case POPCOUNT:
3793*404b540aSrobert       /* This is at most the number of bits in the mode.  */
3794*404b540aSrobert       nonzero = ((HOST_WIDE_INT) 2 << (floor_log2 (mode_width))) - 1;
3795*404b540aSrobert       break;
3796*404b540aSrobert 
3797*404b540aSrobert     case CLZ:
3798*404b540aSrobert       /* If CLZ has a known value at zero, then the nonzero bits are
3799*404b540aSrobert 	 that value, plus the number of bits in the mode minus one.  */
3800*404b540aSrobert       if (CLZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
3801*404b540aSrobert 	nonzero |= ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
3802*404b540aSrobert       else
3803*404b540aSrobert 	nonzero = -1;
3804*404b540aSrobert       break;
3805*404b540aSrobert 
3806*404b540aSrobert     case CTZ:
3807*404b540aSrobert       /* If CTZ has a known value at zero, then the nonzero bits are
3808*404b540aSrobert 	 that value, plus the number of bits in the mode minus one.  */
3809*404b540aSrobert       if (CTZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
3810*404b540aSrobert 	nonzero |= ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
3811*404b540aSrobert       else
3812*404b540aSrobert 	nonzero = -1;
3813*404b540aSrobert       break;
3814*404b540aSrobert 
3815*404b540aSrobert     case PARITY:
3816*404b540aSrobert       nonzero = 1;
3817*404b540aSrobert       break;
3818*404b540aSrobert 
3819*404b540aSrobert     case IF_THEN_ELSE:
3820*404b540aSrobert       {
3821*404b540aSrobert 	unsigned HOST_WIDE_INT nonzero_true =
3822*404b540aSrobert 	  cached_nonzero_bits (XEXP (x, 1), mode,
3823*404b540aSrobert 			       known_x, known_mode, known_ret);
3824*404b540aSrobert 
3825*404b540aSrobert 	/* Don't call nonzero_bits for the second time if it cannot change
3826*404b540aSrobert 	   anything.  */
3827*404b540aSrobert 	if ((nonzero & nonzero_true) != nonzero)
3828*404b540aSrobert 	  nonzero &= nonzero_true
3829*404b540aSrobert       		     | cached_nonzero_bits (XEXP (x, 2), mode,
3830*404b540aSrobert 					    known_x, known_mode, known_ret);
3831*404b540aSrobert       }
3832*404b540aSrobert       break;
3833*404b540aSrobert 
3834*404b540aSrobert     default:
3835*404b540aSrobert       break;
3836*404b540aSrobert     }
3837*404b540aSrobert 
3838*404b540aSrobert   return nonzero;
3839*404b540aSrobert }
3840*404b540aSrobert 
3841*404b540aSrobert /* See the macro definition above.  */
3842*404b540aSrobert #undef cached_num_sign_bit_copies
3843*404b540aSrobert 
3844*404b540aSrobert 
3845*404b540aSrobert /* The function cached_num_sign_bit_copies is a wrapper around
3846*404b540aSrobert    num_sign_bit_copies1.  It avoids exponential behavior in
3847*404b540aSrobert    num_sign_bit_copies1 when X has identical subexpressions on the
3848*404b540aSrobert    first or the second level.  */
3849*404b540aSrobert 
3850*404b540aSrobert static unsigned int
cached_num_sign_bit_copies(rtx x,enum machine_mode mode,rtx known_x,enum machine_mode known_mode,unsigned int known_ret)3851*404b540aSrobert cached_num_sign_bit_copies (rtx x, enum machine_mode mode, rtx known_x,
3852*404b540aSrobert 			    enum machine_mode known_mode,
3853*404b540aSrobert 			    unsigned int known_ret)
3854*404b540aSrobert {
3855*404b540aSrobert   if (x == known_x && mode == known_mode)
3856*404b540aSrobert     return known_ret;
3857*404b540aSrobert 
3858*404b540aSrobert   /* Try to find identical subexpressions.  If found call
3859*404b540aSrobert      num_sign_bit_copies1 on X with the subexpressions as KNOWN_X and
3860*404b540aSrobert      the precomputed value for the subexpression as KNOWN_RET.  */
3861*404b540aSrobert 
3862*404b540aSrobert   if (ARITHMETIC_P (x))
3863*404b540aSrobert     {
3864*404b540aSrobert       rtx x0 = XEXP (x, 0);
3865*404b540aSrobert       rtx x1 = XEXP (x, 1);
3866*404b540aSrobert 
3867*404b540aSrobert       /* Check the first level.  */
3868*404b540aSrobert       if (x0 == x1)
3869*404b540aSrobert 	return
3870*404b540aSrobert 	  num_sign_bit_copies1 (x, mode, x0, mode,
3871*404b540aSrobert 				cached_num_sign_bit_copies (x0, mode, known_x,
3872*404b540aSrobert 							    known_mode,
3873*404b540aSrobert 							    known_ret));
3874*404b540aSrobert 
3875*404b540aSrobert       /* Check the second level.  */
3876*404b540aSrobert       if (ARITHMETIC_P (x0)
3877*404b540aSrobert 	  && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
3878*404b540aSrobert 	return
3879*404b540aSrobert 	  num_sign_bit_copies1 (x, mode, x1, mode,
3880*404b540aSrobert 				cached_num_sign_bit_copies (x1, mode, known_x,
3881*404b540aSrobert 							    known_mode,
3882*404b540aSrobert 							    known_ret));
3883*404b540aSrobert 
3884*404b540aSrobert       if (ARITHMETIC_P (x1)
3885*404b540aSrobert 	  && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
3886*404b540aSrobert 	return
3887*404b540aSrobert 	  num_sign_bit_copies1 (x, mode, x0, mode,
3888*404b540aSrobert 				cached_num_sign_bit_copies (x0, mode, known_x,
3889*404b540aSrobert 							    known_mode,
3890*404b540aSrobert 							    known_ret));
3891*404b540aSrobert     }
3892*404b540aSrobert 
3893*404b540aSrobert   return num_sign_bit_copies1 (x, mode, known_x, known_mode, known_ret);
3894*404b540aSrobert }
3895*404b540aSrobert 
3896*404b540aSrobert /* Return the number of bits at the high-order end of X that are known to
3897*404b540aSrobert    be equal to the sign bit.  X will be used in mode MODE; if MODE is
3898*404b540aSrobert    VOIDmode, X will be used in its own mode.  The returned value  will always
3899*404b540aSrobert    be between 1 and the number of bits in MODE.  */
3900*404b540aSrobert 
3901*404b540aSrobert static unsigned int
num_sign_bit_copies1(rtx x,enum machine_mode mode,rtx known_x,enum machine_mode known_mode,unsigned int known_ret)3902*404b540aSrobert num_sign_bit_copies1 (rtx x, enum machine_mode mode, rtx known_x,
3903*404b540aSrobert 		      enum machine_mode known_mode,
3904*404b540aSrobert 		      unsigned int known_ret)
3905*404b540aSrobert {
3906*404b540aSrobert   enum rtx_code code = GET_CODE (x);
3907*404b540aSrobert   unsigned int bitwidth = GET_MODE_BITSIZE (mode);
3908*404b540aSrobert   int num0, num1, result;
3909*404b540aSrobert   unsigned HOST_WIDE_INT nonzero;
3910*404b540aSrobert 
3911*404b540aSrobert   /* If we weren't given a mode, use the mode of X.  If the mode is still
3912*404b540aSrobert      VOIDmode, we don't know anything.  Likewise if one of the modes is
3913*404b540aSrobert      floating-point.  */
3914*404b540aSrobert 
3915*404b540aSrobert   if (mode == VOIDmode)
3916*404b540aSrobert     mode = GET_MODE (x);
3917*404b540aSrobert 
3918*404b540aSrobert   if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x)))
3919*404b540aSrobert     return 1;
3920*404b540aSrobert 
3921*404b540aSrobert   /* For a smaller object, just ignore the high bits.  */
3922*404b540aSrobert   if (bitwidth < GET_MODE_BITSIZE (GET_MODE (x)))
3923*404b540aSrobert     {
3924*404b540aSrobert       num0 = cached_num_sign_bit_copies (x, GET_MODE (x),
3925*404b540aSrobert 					 known_x, known_mode, known_ret);
3926*404b540aSrobert       return MAX (1,
3927*404b540aSrobert 		  num0 - (int) (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth));
3928*404b540aSrobert     }
3929*404b540aSrobert 
3930*404b540aSrobert   if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_BITSIZE (GET_MODE (x)))
3931*404b540aSrobert     {
3932*404b540aSrobert #ifndef WORD_REGISTER_OPERATIONS
3933*404b540aSrobert   /* If this machine does not do all register operations on the entire
3934*404b540aSrobert      register and MODE is wider than the mode of X, we can say nothing
3935*404b540aSrobert      at all about the high-order bits.  */
3936*404b540aSrobert       return 1;
3937*404b540aSrobert #else
3938*404b540aSrobert       /* Likewise on machines that do, if the mode of the object is smaller
3939*404b540aSrobert 	 than a word and loads of that size don't sign extend, we can say
3940*404b540aSrobert 	 nothing about the high order bits.  */
3941*404b540aSrobert       if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
3942*404b540aSrobert #ifdef LOAD_EXTEND_OP
3943*404b540aSrobert 	  && LOAD_EXTEND_OP (GET_MODE (x)) != SIGN_EXTEND
3944*404b540aSrobert #endif
3945*404b540aSrobert 	  )
3946*404b540aSrobert 	return 1;
3947*404b540aSrobert #endif
3948*404b540aSrobert     }
3949*404b540aSrobert 
3950*404b540aSrobert   switch (code)
3951*404b540aSrobert     {
3952*404b540aSrobert     case REG:
3953*404b540aSrobert 
3954*404b540aSrobert #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
3955*404b540aSrobert       /* If pointers extend signed and this is a pointer in Pmode, say that
3956*404b540aSrobert 	 all the bits above ptr_mode are known to be sign bit copies.  */
3957*404b540aSrobert       if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode && mode == Pmode
3958*404b540aSrobert 	  && REG_POINTER (x))
3959*404b540aSrobert 	return GET_MODE_BITSIZE (Pmode) - GET_MODE_BITSIZE (ptr_mode) + 1;
3960*404b540aSrobert #endif
3961*404b540aSrobert 
3962*404b540aSrobert       {
3963*404b540aSrobert 	unsigned int copies_for_hook = 1, copies = 1;
3964*404b540aSrobert 	rtx new = rtl_hooks.reg_num_sign_bit_copies (x, mode, known_x,
3965*404b540aSrobert 						     known_mode, known_ret,
3966*404b540aSrobert 						     &copies_for_hook);
3967*404b540aSrobert 
3968*404b540aSrobert 	if (new)
3969*404b540aSrobert 	  copies = cached_num_sign_bit_copies (new, mode, known_x,
3970*404b540aSrobert 					       known_mode, known_ret);
3971*404b540aSrobert 
3972*404b540aSrobert 	if (copies > 1 || copies_for_hook > 1)
3973*404b540aSrobert 	  return MAX (copies, copies_for_hook);
3974*404b540aSrobert 
3975*404b540aSrobert 	/* Else, use nonzero_bits to guess num_sign_bit_copies (see below).  */
3976*404b540aSrobert       }
3977*404b540aSrobert       break;
3978*404b540aSrobert 
3979*404b540aSrobert     case MEM:
3980*404b540aSrobert #ifdef LOAD_EXTEND_OP
3981*404b540aSrobert       /* Some RISC machines sign-extend all loads of smaller than a word.  */
3982*404b540aSrobert       if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
3983*404b540aSrobert 	return MAX (1, ((int) bitwidth
3984*404b540aSrobert 			- (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1));
3985*404b540aSrobert #endif
3986*404b540aSrobert       break;
3987*404b540aSrobert 
3988*404b540aSrobert     case CONST_INT:
3989*404b540aSrobert       /* If the constant is negative, take its 1's complement and remask.
3990*404b540aSrobert 	 Then see how many zero bits we have.  */
3991*404b540aSrobert       nonzero = INTVAL (x) & GET_MODE_MASK (mode);
3992*404b540aSrobert       if (bitwidth <= HOST_BITS_PER_WIDE_INT
3993*404b540aSrobert 	  && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
3994*404b540aSrobert 	nonzero = (~nonzero) & GET_MODE_MASK (mode);
3995*404b540aSrobert 
3996*404b540aSrobert       return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
3997*404b540aSrobert 
3998*404b540aSrobert     case SUBREG:
3999*404b540aSrobert       /* If this is a SUBREG for a promoted object that is sign-extended
4000*404b540aSrobert 	 and we are looking at it in a wider mode, we know that at least the
4001*404b540aSrobert 	 high-order bits are known to be sign bit copies.  */
4002*404b540aSrobert 
4003*404b540aSrobert       if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
4004*404b540aSrobert 	{
4005*404b540aSrobert 	  num0 = cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4006*404b540aSrobert 					     known_x, known_mode, known_ret);
4007*404b540aSrobert 	  return MAX ((int) bitwidth
4008*404b540aSrobert 		      - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1,
4009*404b540aSrobert 		      num0);
4010*404b540aSrobert 	}
4011*404b540aSrobert 
4012*404b540aSrobert       /* For a smaller object, just ignore the high bits.  */
4013*404b540aSrobert       if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
4014*404b540aSrobert 	{
4015*404b540aSrobert 	  num0 = cached_num_sign_bit_copies (SUBREG_REG (x), VOIDmode,
4016*404b540aSrobert 					     known_x, known_mode, known_ret);
4017*404b540aSrobert 	  return MAX (1, (num0
4018*404b540aSrobert 			  - (int) (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
4019*404b540aSrobert 				   - bitwidth)));
4020*404b540aSrobert 	}
4021*404b540aSrobert 
4022*404b540aSrobert #ifdef WORD_REGISTER_OPERATIONS
4023*404b540aSrobert #ifdef LOAD_EXTEND_OP
4024*404b540aSrobert       /* For paradoxical SUBREGs on machines where all register operations
4025*404b540aSrobert 	 affect the entire register, just look inside.  Note that we are
4026*404b540aSrobert 	 passing MODE to the recursive call, so the number of sign bit copies
4027*404b540aSrobert 	 will remain relative to that mode, not the inner mode.  */
4028*404b540aSrobert 
4029*404b540aSrobert       /* This works only if loads sign extend.  Otherwise, if we get a
4030*404b540aSrobert 	 reload for the inner part, it may be loaded from the stack, and
4031*404b540aSrobert 	 then we lose all sign bit copies that existed before the store
4032*404b540aSrobert 	 to the stack.  */
4033*404b540aSrobert 
4034*404b540aSrobert       if ((GET_MODE_SIZE (GET_MODE (x))
4035*404b540aSrobert 	   > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4036*404b540aSrobert 	  && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
4037*404b540aSrobert 	  && MEM_P (SUBREG_REG (x)))
4038*404b540aSrobert 	return cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4039*404b540aSrobert 					   known_x, known_mode, known_ret);
4040*404b540aSrobert #endif
4041*404b540aSrobert #endif
4042*404b540aSrobert       break;
4043*404b540aSrobert 
4044*404b540aSrobert     case SIGN_EXTRACT:
4045*404b540aSrobert       if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4046*404b540aSrobert 	return MAX (1, (int) bitwidth - INTVAL (XEXP (x, 1)));
4047*404b540aSrobert       break;
4048*404b540aSrobert 
4049*404b540aSrobert     case SIGN_EXTEND:
4050*404b540aSrobert       return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4051*404b540aSrobert 	      + cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4052*404b540aSrobert 					    known_x, known_mode, known_ret));
4053*404b540aSrobert 
4054*404b540aSrobert     case TRUNCATE:
4055*404b540aSrobert       /* For a smaller object, just ignore the high bits.  */
4056*404b540aSrobert       num0 = cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4057*404b540aSrobert 					 known_x, known_mode, known_ret);
4058*404b540aSrobert       return MAX (1, (num0 - (int) (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4059*404b540aSrobert 				    - bitwidth)));
4060*404b540aSrobert 
4061*404b540aSrobert     case NOT:
4062*404b540aSrobert       return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4063*404b540aSrobert 					 known_x, known_mode, known_ret);
4064*404b540aSrobert 
4065*404b540aSrobert     case ROTATE:       case ROTATERT:
4066*404b540aSrobert       /* If we are rotating left by a number of bits less than the number
4067*404b540aSrobert 	 of sign bit copies, we can just subtract that amount from the
4068*404b540aSrobert 	 number.  */
4069*404b540aSrobert       if (GET_CODE (XEXP (x, 1)) == CONST_INT
4070*404b540aSrobert 	  && INTVAL (XEXP (x, 1)) >= 0
4071*404b540aSrobert 	  && INTVAL (XEXP (x, 1)) < (int) bitwidth)
4072*404b540aSrobert 	{
4073*404b540aSrobert 	  num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4074*404b540aSrobert 					     known_x, known_mode, known_ret);
4075*404b540aSrobert 	  return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
4076*404b540aSrobert 				 : (int) bitwidth - INTVAL (XEXP (x, 1))));
4077*404b540aSrobert 	}
4078*404b540aSrobert       break;
4079*404b540aSrobert 
4080*404b540aSrobert     case NEG:
4081*404b540aSrobert       /* In general, this subtracts one sign bit copy.  But if the value
4082*404b540aSrobert 	 is known to be positive, the number of sign bit copies is the
4083*404b540aSrobert 	 same as that of the input.  Finally, if the input has just one bit
4084*404b540aSrobert 	 that might be nonzero, all the bits are copies of the sign bit.  */
4085*404b540aSrobert       num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4086*404b540aSrobert 					 known_x, known_mode, known_ret);
4087*404b540aSrobert       if (bitwidth > HOST_BITS_PER_WIDE_INT)
4088*404b540aSrobert 	return num0 > 1 ? num0 - 1 : 1;
4089*404b540aSrobert 
4090*404b540aSrobert       nonzero = nonzero_bits (XEXP (x, 0), mode);
4091*404b540aSrobert       if (nonzero == 1)
4092*404b540aSrobert 	return bitwidth;
4093*404b540aSrobert 
4094*404b540aSrobert       if (num0 > 1
4095*404b540aSrobert 	  && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
4096*404b540aSrobert 	num0--;
4097*404b540aSrobert 
4098*404b540aSrobert       return num0;
4099*404b540aSrobert 
4100*404b540aSrobert     case IOR:   case AND:   case XOR:
4101*404b540aSrobert     case SMIN:  case SMAX:  case UMIN:  case UMAX:
4102*404b540aSrobert       /* Logical operations will preserve the number of sign-bit copies.
4103*404b540aSrobert 	 MIN and MAX operations always return one of the operands.  */
4104*404b540aSrobert       num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4105*404b540aSrobert 					 known_x, known_mode, known_ret);
4106*404b540aSrobert       num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4107*404b540aSrobert 					 known_x, known_mode, known_ret);
4108*404b540aSrobert       return MIN (num0, num1);
4109*404b540aSrobert 
4110*404b540aSrobert     case PLUS:  case MINUS:
4111*404b540aSrobert       /* For addition and subtraction, we can have a 1-bit carry.  However,
4112*404b540aSrobert 	 if we are subtracting 1 from a positive number, there will not
4113*404b540aSrobert 	 be such a carry.  Furthermore, if the positive number is known to
4114*404b540aSrobert 	 be 0 or 1, we know the result is either -1 or 0.  */
4115*404b540aSrobert 
4116*404b540aSrobert       if (code == PLUS && XEXP (x, 1) == constm1_rtx
4117*404b540aSrobert 	  && bitwidth <= HOST_BITS_PER_WIDE_INT)
4118*404b540aSrobert 	{
4119*404b540aSrobert 	  nonzero = nonzero_bits (XEXP (x, 0), mode);
4120*404b540aSrobert 	  if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
4121*404b540aSrobert 	    return (nonzero == 1 || nonzero == 0 ? bitwidth
4122*404b540aSrobert 		    : bitwidth - floor_log2 (nonzero) - 1);
4123*404b540aSrobert 	}
4124*404b540aSrobert 
4125*404b540aSrobert       num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4126*404b540aSrobert 					 known_x, known_mode, known_ret);
4127*404b540aSrobert       num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4128*404b540aSrobert 					 known_x, known_mode, known_ret);
4129*404b540aSrobert       result = MAX (1, MIN (num0, num1) - 1);
4130*404b540aSrobert 
4131*404b540aSrobert #ifdef POINTERS_EXTEND_UNSIGNED
4132*404b540aSrobert       /* If pointers extend signed and this is an addition or subtraction
4133*404b540aSrobert 	 to a pointer in Pmode, all the bits above ptr_mode are known to be
4134*404b540aSrobert 	 sign bit copies.  */
4135*404b540aSrobert       if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
4136*404b540aSrobert 	  && (code == PLUS || code == MINUS)
4137*404b540aSrobert 	  && REG_P (XEXP (x, 0)) && REG_POINTER (XEXP (x, 0)))
4138*404b540aSrobert 	result = MAX ((int) (GET_MODE_BITSIZE (Pmode)
4139*404b540aSrobert 			     - GET_MODE_BITSIZE (ptr_mode) + 1),
4140*404b540aSrobert 		      result);
4141*404b540aSrobert #endif
4142*404b540aSrobert       return result;
4143*404b540aSrobert 
4144*404b540aSrobert     case MULT:
4145*404b540aSrobert       /* The number of bits of the product is the sum of the number of
4146*404b540aSrobert 	 bits of both terms.  However, unless one of the terms if known
4147*404b540aSrobert 	 to be positive, we must allow for an additional bit since negating
4148*404b540aSrobert 	 a negative number can remove one sign bit copy.  */
4149*404b540aSrobert 
4150*404b540aSrobert       num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4151*404b540aSrobert 					 known_x, known_mode, known_ret);
4152*404b540aSrobert       num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4153*404b540aSrobert 					 known_x, known_mode, known_ret);
4154*404b540aSrobert 
4155*404b540aSrobert       result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
4156*404b540aSrobert       if (result > 0
4157*404b540aSrobert 	  && (bitwidth > HOST_BITS_PER_WIDE_INT
4158*404b540aSrobert 	      || (((nonzero_bits (XEXP (x, 0), mode)
4159*404b540aSrobert 		    & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4160*404b540aSrobert 		  && ((nonzero_bits (XEXP (x, 1), mode)
4161*404b540aSrobert 		       & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))))
4162*404b540aSrobert 	result--;
4163*404b540aSrobert 
4164*404b540aSrobert       return MAX (1, result);
4165*404b540aSrobert 
4166*404b540aSrobert     case UDIV:
4167*404b540aSrobert       /* The result must be <= the first operand.  If the first operand
4168*404b540aSrobert 	 has the high bit set, we know nothing about the number of sign
4169*404b540aSrobert 	 bit copies.  */
4170*404b540aSrobert       if (bitwidth > HOST_BITS_PER_WIDE_INT)
4171*404b540aSrobert 	return 1;
4172*404b540aSrobert       else if ((nonzero_bits (XEXP (x, 0), mode)
4173*404b540aSrobert 		& ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4174*404b540aSrobert 	return 1;
4175*404b540aSrobert       else
4176*404b540aSrobert 	return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4177*404b540aSrobert 					   known_x, known_mode, known_ret);
4178*404b540aSrobert 
4179*404b540aSrobert     case UMOD:
4180*404b540aSrobert       /* The result must be <= the second operand.  */
4181*404b540aSrobert       return cached_num_sign_bit_copies (XEXP (x, 1), mode,
4182*404b540aSrobert 					   known_x, known_mode, known_ret);
4183*404b540aSrobert 
4184*404b540aSrobert     case DIV:
4185*404b540aSrobert       /* Similar to unsigned division, except that we have to worry about
4186*404b540aSrobert 	 the case where the divisor is negative, in which case we have
4187*404b540aSrobert 	 to add 1.  */
4188*404b540aSrobert       result = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4189*404b540aSrobert 					   known_x, known_mode, known_ret);
4190*404b540aSrobert       if (result > 1
4191*404b540aSrobert 	  && (bitwidth > HOST_BITS_PER_WIDE_INT
4192*404b540aSrobert 	      || (nonzero_bits (XEXP (x, 1), mode)
4193*404b540aSrobert 		  & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
4194*404b540aSrobert 	result--;
4195*404b540aSrobert 
4196*404b540aSrobert       return result;
4197*404b540aSrobert 
4198*404b540aSrobert     case MOD:
4199*404b540aSrobert       result = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4200*404b540aSrobert 					   known_x, known_mode, known_ret);
4201*404b540aSrobert       if (result > 1
4202*404b540aSrobert 	  && (bitwidth > HOST_BITS_PER_WIDE_INT
4203*404b540aSrobert 	      || (nonzero_bits (XEXP (x, 1), mode)
4204*404b540aSrobert 		  & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
4205*404b540aSrobert 	result--;
4206*404b540aSrobert 
4207*404b540aSrobert       return result;
4208*404b540aSrobert 
4209*404b540aSrobert     case ASHIFTRT:
4210*404b540aSrobert       /* Shifts by a constant add to the number of bits equal to the
4211*404b540aSrobert 	 sign bit.  */
4212*404b540aSrobert       num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4213*404b540aSrobert 					 known_x, known_mode, known_ret);
4214*404b540aSrobert       if (GET_CODE (XEXP (x, 1)) == CONST_INT
4215*404b540aSrobert 	  && INTVAL (XEXP (x, 1)) > 0)
4216*404b540aSrobert 	num0 = MIN ((int) bitwidth, num0 + INTVAL (XEXP (x, 1)));
4217*404b540aSrobert 
4218*404b540aSrobert       return num0;
4219*404b540aSrobert 
4220*404b540aSrobert     case ASHIFT:
4221*404b540aSrobert       /* Left shifts destroy copies.  */
4222*404b540aSrobert       if (GET_CODE (XEXP (x, 1)) != CONST_INT
4223*404b540aSrobert 	  || INTVAL (XEXP (x, 1)) < 0
4224*404b540aSrobert 	  || INTVAL (XEXP (x, 1)) >= (int) bitwidth)
4225*404b540aSrobert 	return 1;
4226*404b540aSrobert 
4227*404b540aSrobert       num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4228*404b540aSrobert 					 known_x, known_mode, known_ret);
4229*404b540aSrobert       return MAX (1, num0 - INTVAL (XEXP (x, 1)));
4230*404b540aSrobert 
4231*404b540aSrobert     case IF_THEN_ELSE:
4232*404b540aSrobert       num0 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4233*404b540aSrobert 					 known_x, known_mode, known_ret);
4234*404b540aSrobert       num1 = cached_num_sign_bit_copies (XEXP (x, 2), mode,
4235*404b540aSrobert 					 known_x, known_mode, known_ret);
4236*404b540aSrobert       return MIN (num0, num1);
4237*404b540aSrobert 
4238*404b540aSrobert     case EQ:  case NE:  case GE:  case GT:  case LE:  case LT:
4239*404b540aSrobert     case UNEQ:  case LTGT:  case UNGE:  case UNGT:  case UNLE:  case UNLT:
4240*404b540aSrobert     case GEU: case GTU: case LEU: case LTU:
4241*404b540aSrobert     case UNORDERED: case ORDERED:
4242*404b540aSrobert       /* If the constant is negative, take its 1's complement and remask.
4243*404b540aSrobert 	 Then see how many zero bits we have.  */
4244*404b540aSrobert       nonzero = STORE_FLAG_VALUE;
4245*404b540aSrobert       if (bitwidth <= HOST_BITS_PER_WIDE_INT
4246*404b540aSrobert 	  && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4247*404b540aSrobert 	nonzero = (~nonzero) & GET_MODE_MASK (mode);
4248*404b540aSrobert 
4249*404b540aSrobert       return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4250*404b540aSrobert 
4251*404b540aSrobert     default:
4252*404b540aSrobert       break;
4253*404b540aSrobert     }
4254*404b540aSrobert 
4255*404b540aSrobert   /* If we haven't been able to figure it out by one of the above rules,
4256*404b540aSrobert      see if some of the high-order bits are known to be zero.  If so,
4257*404b540aSrobert      count those bits and return one less than that amount.  If we can't
4258*404b540aSrobert      safely compute the mask for this mode, always return BITWIDTH.  */
4259*404b540aSrobert 
4260*404b540aSrobert   bitwidth = GET_MODE_BITSIZE (mode);
4261*404b540aSrobert   if (bitwidth > HOST_BITS_PER_WIDE_INT)
4262*404b540aSrobert     return 1;
4263*404b540aSrobert 
4264*404b540aSrobert   nonzero = nonzero_bits (x, mode);
4265*404b540aSrobert   return nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))
4266*404b540aSrobert 	 ? 1 : bitwidth - floor_log2 (nonzero) - 1;
4267*404b540aSrobert }
4268*404b540aSrobert 
4269*404b540aSrobert /* Calculate the rtx_cost of a single instruction.  A return value of
4270*404b540aSrobert    zero indicates an instruction pattern without a known cost.  */
4271*404b540aSrobert 
4272*404b540aSrobert int
insn_rtx_cost(rtx pat)4273*404b540aSrobert insn_rtx_cost (rtx pat)
4274*404b540aSrobert {
4275*404b540aSrobert   int i, cost;
4276*404b540aSrobert   rtx set;
4277*404b540aSrobert 
4278*404b540aSrobert   /* Extract the single set rtx from the instruction pattern.
4279*404b540aSrobert      We can't use single_set since we only have the pattern.  */
4280*404b540aSrobert   if (GET_CODE (pat) == SET)
4281*404b540aSrobert     set = pat;
4282*404b540aSrobert   else if (GET_CODE (pat) == PARALLEL)
4283*404b540aSrobert     {
4284*404b540aSrobert       set = NULL_RTX;
4285*404b540aSrobert       for (i = 0; i < XVECLEN (pat, 0); i++)
4286*404b540aSrobert 	{
4287*404b540aSrobert 	  rtx x = XVECEXP (pat, 0, i);
4288*404b540aSrobert 	  if (GET_CODE (x) == SET)
4289*404b540aSrobert 	    {
4290*404b540aSrobert 	      if (set)
4291*404b540aSrobert 		return 0;
4292*404b540aSrobert 	      set = x;
4293*404b540aSrobert 	    }
4294*404b540aSrobert 	}
4295*404b540aSrobert       if (!set)
4296*404b540aSrobert 	return 0;
4297*404b540aSrobert     }
4298*404b540aSrobert   else
4299*404b540aSrobert     return 0;
4300*404b540aSrobert 
4301*404b540aSrobert   cost = rtx_cost (SET_SRC (set), SET);
4302*404b540aSrobert   return cost > 0 ? cost : COSTS_N_INSNS (1);
4303*404b540aSrobert }
4304*404b540aSrobert 
4305*404b540aSrobert /* Given an insn INSN and condition COND, return the condition in a
4306*404b540aSrobert    canonical form to simplify testing by callers.  Specifically:
4307*404b540aSrobert 
4308*404b540aSrobert    (1) The code will always be a comparison operation (EQ, NE, GT, etc.).
4309*404b540aSrobert    (2) Both operands will be machine operands; (cc0) will have been replaced.
4310*404b540aSrobert    (3) If an operand is a constant, it will be the second operand.
4311*404b540aSrobert    (4) (LE x const) will be replaced with (LT x <const+1>) and similarly
4312*404b540aSrobert        for GE, GEU, and LEU.
4313*404b540aSrobert 
4314*404b540aSrobert    If the condition cannot be understood, or is an inequality floating-point
4315*404b540aSrobert    comparison which needs to be reversed, 0 will be returned.
4316*404b540aSrobert 
4317*404b540aSrobert    If REVERSE is nonzero, then reverse the condition prior to canonizing it.
4318*404b540aSrobert 
4319*404b540aSrobert    If EARLIEST is nonzero, it is a pointer to a place where the earliest
4320*404b540aSrobert    insn used in locating the condition was found.  If a replacement test
4321*404b540aSrobert    of the condition is desired, it should be placed in front of that
4322*404b540aSrobert    insn and we will be sure that the inputs are still valid.
4323*404b540aSrobert 
4324*404b540aSrobert    If WANT_REG is nonzero, we wish the condition to be relative to that
4325*404b540aSrobert    register, if possible.  Therefore, do not canonicalize the condition
4326*404b540aSrobert    further.  If ALLOW_CC_MODE is nonzero, allow the condition returned
4327*404b540aSrobert    to be a compare to a CC mode register.
4328*404b540aSrobert 
4329*404b540aSrobert    If VALID_AT_INSN_P, the condition must be valid at both *EARLIEST
4330*404b540aSrobert    and at INSN.  */
4331*404b540aSrobert 
4332*404b540aSrobert rtx
canonicalize_condition(rtx insn,rtx cond,int reverse,rtx * earliest,rtx want_reg,int allow_cc_mode,int valid_at_insn_p)4333*404b540aSrobert canonicalize_condition (rtx insn, rtx cond, int reverse, rtx *earliest,
4334*404b540aSrobert 			rtx want_reg, int allow_cc_mode, int valid_at_insn_p)
4335*404b540aSrobert {
4336*404b540aSrobert   enum rtx_code code;
4337*404b540aSrobert   rtx prev = insn;
4338*404b540aSrobert   rtx set;
4339*404b540aSrobert   rtx tem;
4340*404b540aSrobert   rtx op0, op1;
4341*404b540aSrobert   int reverse_code = 0;
4342*404b540aSrobert   enum machine_mode mode;
4343*404b540aSrobert   basic_block bb = BLOCK_FOR_INSN (insn);
4344*404b540aSrobert 
4345*404b540aSrobert   code = GET_CODE (cond);
4346*404b540aSrobert   mode = GET_MODE (cond);
4347*404b540aSrobert   op0 = XEXP (cond, 0);
4348*404b540aSrobert   op1 = XEXP (cond, 1);
4349*404b540aSrobert 
4350*404b540aSrobert   if (reverse)
4351*404b540aSrobert     code = reversed_comparison_code (cond, insn);
4352*404b540aSrobert   if (code == UNKNOWN)
4353*404b540aSrobert     return 0;
4354*404b540aSrobert 
4355*404b540aSrobert   if (earliest)
4356*404b540aSrobert     *earliest = insn;
4357*404b540aSrobert 
4358*404b540aSrobert   /* If we are comparing a register with zero, see if the register is set
4359*404b540aSrobert      in the previous insn to a COMPARE or a comparison operation.  Perform
4360*404b540aSrobert      the same tests as a function of STORE_FLAG_VALUE as find_comparison_args
4361*404b540aSrobert      in cse.c  */
4362*404b540aSrobert 
4363*404b540aSrobert   while ((GET_RTX_CLASS (code) == RTX_COMPARE
4364*404b540aSrobert 	  || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
4365*404b540aSrobert 	 && op1 == CONST0_RTX (GET_MODE (op0))
4366*404b540aSrobert 	 && op0 != want_reg)
4367*404b540aSrobert     {
4368*404b540aSrobert       /* Set nonzero when we find something of interest.  */
4369*404b540aSrobert       rtx x = 0;
4370*404b540aSrobert 
4371*404b540aSrobert #ifdef HAVE_cc0
4372*404b540aSrobert       /* If comparison with cc0, import actual comparison from compare
4373*404b540aSrobert 	 insn.  */
4374*404b540aSrobert       if (op0 == cc0_rtx)
4375*404b540aSrobert 	{
4376*404b540aSrobert 	  if ((prev = prev_nonnote_insn (prev)) == 0
4377*404b540aSrobert 	      || !NONJUMP_INSN_P (prev)
4378*404b540aSrobert 	      || (set = single_set (prev)) == 0
4379*404b540aSrobert 	      || SET_DEST (set) != cc0_rtx)
4380*404b540aSrobert 	    return 0;
4381*404b540aSrobert 
4382*404b540aSrobert 	  op0 = SET_SRC (set);
4383*404b540aSrobert 	  op1 = CONST0_RTX (GET_MODE (op0));
4384*404b540aSrobert 	  if (earliest)
4385*404b540aSrobert 	    *earliest = prev;
4386*404b540aSrobert 	}
4387*404b540aSrobert #endif
4388*404b540aSrobert 
4389*404b540aSrobert       /* If this is a COMPARE, pick up the two things being compared.  */
4390*404b540aSrobert       if (GET_CODE (op0) == COMPARE)
4391*404b540aSrobert 	{
4392*404b540aSrobert 	  op1 = XEXP (op0, 1);
4393*404b540aSrobert 	  op0 = XEXP (op0, 0);
4394*404b540aSrobert 	  continue;
4395*404b540aSrobert 	}
4396*404b540aSrobert       else if (!REG_P (op0))
4397*404b540aSrobert 	break;
4398*404b540aSrobert 
4399*404b540aSrobert       /* Go back to the previous insn.  Stop if it is not an INSN.  We also
4400*404b540aSrobert 	 stop if it isn't a single set or if it has a REG_INC note because
4401*404b540aSrobert 	 we don't want to bother dealing with it.  */
4402*404b540aSrobert 
4403*404b540aSrobert       if ((prev = prev_nonnote_insn (prev)) == 0
4404*404b540aSrobert 	  || !NONJUMP_INSN_P (prev)
4405*404b540aSrobert 	  || FIND_REG_INC_NOTE (prev, NULL_RTX)
4406*404b540aSrobert 	  /* In cfglayout mode, there do not have to be labels at the
4407*404b540aSrobert 	     beginning of a block, or jumps at the end, so the previous
4408*404b540aSrobert 	     conditions would not stop us when we reach bb boundary.  */
4409*404b540aSrobert 	  || BLOCK_FOR_INSN (prev) != bb)
4410*404b540aSrobert 	break;
4411*404b540aSrobert 
4412*404b540aSrobert       set = set_of (op0, prev);
4413*404b540aSrobert 
4414*404b540aSrobert       if (set
4415*404b540aSrobert 	  && (GET_CODE (set) != SET
4416*404b540aSrobert 	      || !rtx_equal_p (SET_DEST (set), op0)))
4417*404b540aSrobert 	break;
4418*404b540aSrobert 
4419*404b540aSrobert       /* If this is setting OP0, get what it sets it to if it looks
4420*404b540aSrobert 	 relevant.  */
4421*404b540aSrobert       if (set)
4422*404b540aSrobert 	{
4423*404b540aSrobert 	  enum machine_mode inner_mode = GET_MODE (SET_DEST (set));
4424*404b540aSrobert #ifdef FLOAT_STORE_FLAG_VALUE
4425*404b540aSrobert 	  REAL_VALUE_TYPE fsfv;
4426*404b540aSrobert #endif
4427*404b540aSrobert 
4428*404b540aSrobert 	  /* ??? We may not combine comparisons done in a CCmode with
4429*404b540aSrobert 	     comparisons not done in a CCmode.  This is to aid targets
4430*404b540aSrobert 	     like Alpha that have an IEEE compliant EQ instruction, and
4431*404b540aSrobert 	     a non-IEEE compliant BEQ instruction.  The use of CCmode is
4432*404b540aSrobert 	     actually artificial, simply to prevent the combination, but
4433*404b540aSrobert 	     should not affect other platforms.
4434*404b540aSrobert 
4435*404b540aSrobert 	     However, we must allow VOIDmode comparisons to match either
4436*404b540aSrobert 	     CCmode or non-CCmode comparison, because some ports have
4437*404b540aSrobert 	     modeless comparisons inside branch patterns.
4438*404b540aSrobert 
4439*404b540aSrobert 	     ??? This mode check should perhaps look more like the mode check
4440*404b540aSrobert 	     in simplify_comparison in combine.  */
4441*404b540aSrobert 
4442*404b540aSrobert 	  if ((GET_CODE (SET_SRC (set)) == COMPARE
4443*404b540aSrobert 	       || (((code == NE
4444*404b540aSrobert 		     || (code == LT
4445*404b540aSrobert 			 && GET_MODE_CLASS (inner_mode) == MODE_INT
4446*404b540aSrobert 			 && (GET_MODE_BITSIZE (inner_mode)
4447*404b540aSrobert 			     <= HOST_BITS_PER_WIDE_INT)
4448*404b540aSrobert 			 && (STORE_FLAG_VALUE
4449*404b540aSrobert 			     & ((HOST_WIDE_INT) 1
4450*404b540aSrobert 				<< (GET_MODE_BITSIZE (inner_mode) - 1))))
4451*404b540aSrobert #ifdef FLOAT_STORE_FLAG_VALUE
4452*404b540aSrobert 		     || (code == LT
4453*404b540aSrobert 			 && SCALAR_FLOAT_MODE_P (inner_mode)
4454*404b540aSrobert 			 && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
4455*404b540aSrobert 			     REAL_VALUE_NEGATIVE (fsfv)))
4456*404b540aSrobert #endif
4457*404b540aSrobert 		     ))
4458*404b540aSrobert 		   && COMPARISON_P (SET_SRC (set))))
4459*404b540aSrobert 	      && (((GET_MODE_CLASS (mode) == MODE_CC)
4460*404b540aSrobert 		   == (GET_MODE_CLASS (inner_mode) == MODE_CC))
4461*404b540aSrobert 		  || mode == VOIDmode || inner_mode == VOIDmode))
4462*404b540aSrobert 	    x = SET_SRC (set);
4463*404b540aSrobert 	  else if (((code == EQ
4464*404b540aSrobert 		     || (code == GE
4465*404b540aSrobert 			 && (GET_MODE_BITSIZE (inner_mode)
4466*404b540aSrobert 			     <= HOST_BITS_PER_WIDE_INT)
4467*404b540aSrobert 			 && GET_MODE_CLASS (inner_mode) == MODE_INT
4468*404b540aSrobert 			 && (STORE_FLAG_VALUE
4469*404b540aSrobert 			     & ((HOST_WIDE_INT) 1
4470*404b540aSrobert 				<< (GET_MODE_BITSIZE (inner_mode) - 1))))
4471*404b540aSrobert #ifdef FLOAT_STORE_FLAG_VALUE
4472*404b540aSrobert 		     || (code == GE
4473*404b540aSrobert 			 && SCALAR_FLOAT_MODE_P (inner_mode)
4474*404b540aSrobert 			 && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
4475*404b540aSrobert 			     REAL_VALUE_NEGATIVE (fsfv)))
4476*404b540aSrobert #endif
4477*404b540aSrobert 		     ))
4478*404b540aSrobert 		   && COMPARISON_P (SET_SRC (set))
4479*404b540aSrobert 		   && (((GET_MODE_CLASS (mode) == MODE_CC)
4480*404b540aSrobert 			== (GET_MODE_CLASS (inner_mode) == MODE_CC))
4481*404b540aSrobert 		       || mode == VOIDmode || inner_mode == VOIDmode))
4482*404b540aSrobert 
4483*404b540aSrobert 	    {
4484*404b540aSrobert 	      reverse_code = 1;
4485*404b540aSrobert 	      x = SET_SRC (set);
4486*404b540aSrobert 	    }
4487*404b540aSrobert 	  else
4488*404b540aSrobert 	    break;
4489*404b540aSrobert 	}
4490*404b540aSrobert 
4491*404b540aSrobert       else if (reg_set_p (op0, prev))
4492*404b540aSrobert 	/* If this sets OP0, but not directly, we have to give up.  */
4493*404b540aSrobert 	break;
4494*404b540aSrobert 
4495*404b540aSrobert       if (x)
4496*404b540aSrobert 	{
4497*404b540aSrobert 	  /* If the caller is expecting the condition to be valid at INSN,
4498*404b540aSrobert 	     make sure X doesn't change before INSN.  */
4499*404b540aSrobert 	  if (valid_at_insn_p)
4500*404b540aSrobert 	    if (modified_in_p (x, prev) || modified_between_p (x, prev, insn))
4501*404b540aSrobert 	      break;
4502*404b540aSrobert 	  if (COMPARISON_P (x))
4503*404b540aSrobert 	    code = GET_CODE (x);
4504*404b540aSrobert 	  if (reverse_code)
4505*404b540aSrobert 	    {
4506*404b540aSrobert 	      code = reversed_comparison_code (x, prev);
4507*404b540aSrobert 	      if (code == UNKNOWN)
4508*404b540aSrobert 		return 0;
4509*404b540aSrobert 	      reverse_code = 0;
4510*404b540aSrobert 	    }
4511*404b540aSrobert 
4512*404b540aSrobert 	  op0 = XEXP (x, 0), op1 = XEXP (x, 1);
4513*404b540aSrobert 	  if (earliest)
4514*404b540aSrobert 	    *earliest = prev;
4515*404b540aSrobert 	}
4516*404b540aSrobert     }
4517*404b540aSrobert 
4518*404b540aSrobert   /* If constant is first, put it last.  */
4519*404b540aSrobert   if (CONSTANT_P (op0))
4520*404b540aSrobert     code = swap_condition (code), tem = op0, op0 = op1, op1 = tem;
4521*404b540aSrobert 
4522*404b540aSrobert   /* If OP0 is the result of a comparison, we weren't able to find what
4523*404b540aSrobert      was really being compared, so fail.  */
4524*404b540aSrobert   if (!allow_cc_mode
4525*404b540aSrobert       && GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC)
4526*404b540aSrobert     return 0;
4527*404b540aSrobert 
4528*404b540aSrobert   /* Canonicalize any ordered comparison with integers involving equality
4529*404b540aSrobert      if we can do computations in the relevant mode and we do not
4530*404b540aSrobert      overflow.  */
4531*404b540aSrobert 
4532*404b540aSrobert   if (GET_MODE_CLASS (GET_MODE (op0)) != MODE_CC
4533*404b540aSrobert       && GET_CODE (op1) == CONST_INT
4534*404b540aSrobert       && GET_MODE (op0) != VOIDmode
4535*404b540aSrobert       && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
4536*404b540aSrobert     {
4537*404b540aSrobert       HOST_WIDE_INT const_val = INTVAL (op1);
4538*404b540aSrobert       unsigned HOST_WIDE_INT uconst_val = const_val;
4539*404b540aSrobert       unsigned HOST_WIDE_INT max_val
4540*404b540aSrobert 	= (unsigned HOST_WIDE_INT) GET_MODE_MASK (GET_MODE (op0));
4541*404b540aSrobert 
4542*404b540aSrobert       switch (code)
4543*404b540aSrobert 	{
4544*404b540aSrobert 	case LE:
4545*404b540aSrobert 	  if ((unsigned HOST_WIDE_INT) const_val != max_val >> 1)
4546*404b540aSrobert 	    code = LT, op1 = gen_int_mode (const_val + 1, GET_MODE (op0));
4547*404b540aSrobert 	  break;
4548*404b540aSrobert 
4549*404b540aSrobert 	/* When cross-compiling, const_val might be sign-extended from
4550*404b540aSrobert 	   BITS_PER_WORD to HOST_BITS_PER_WIDE_INT */
4551*404b540aSrobert 	case GE:
4552*404b540aSrobert 	  if ((HOST_WIDE_INT) (const_val & max_val)
4553*404b540aSrobert 	      != (((HOST_WIDE_INT) 1
4554*404b540aSrobert 		   << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
4555*404b540aSrobert 	    code = GT, op1 = gen_int_mode (const_val - 1, GET_MODE (op0));
4556*404b540aSrobert 	  break;
4557*404b540aSrobert 
4558*404b540aSrobert 	case LEU:
4559*404b540aSrobert 	  if (uconst_val < max_val)
4560*404b540aSrobert 	    code = LTU, op1 = gen_int_mode (uconst_val + 1, GET_MODE (op0));
4561*404b540aSrobert 	  break;
4562*404b540aSrobert 
4563*404b540aSrobert 	case GEU:
4564*404b540aSrobert 	  if (uconst_val != 0)
4565*404b540aSrobert 	    code = GTU, op1 = gen_int_mode (uconst_val - 1, GET_MODE (op0));
4566*404b540aSrobert 	  break;
4567*404b540aSrobert 
4568*404b540aSrobert 	default:
4569*404b540aSrobert 	  break;
4570*404b540aSrobert 	}
4571*404b540aSrobert     }
4572*404b540aSrobert 
4573*404b540aSrobert   /* Never return CC0; return zero instead.  */
4574*404b540aSrobert   if (CC0_P (op0))
4575*404b540aSrobert     return 0;
4576*404b540aSrobert 
4577*404b540aSrobert   return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
4578*404b540aSrobert }
4579*404b540aSrobert 
4580*404b540aSrobert /* Given a jump insn JUMP, return the condition that will cause it to branch
4581*404b540aSrobert    to its JUMP_LABEL.  If the condition cannot be understood, or is an
4582*404b540aSrobert    inequality floating-point comparison which needs to be reversed, 0 will
4583*404b540aSrobert    be returned.
4584*404b540aSrobert 
4585*404b540aSrobert    If EARLIEST is nonzero, it is a pointer to a place where the earliest
4586*404b540aSrobert    insn used in locating the condition was found.  If a replacement test
4587*404b540aSrobert    of the condition is desired, it should be placed in front of that
4588*404b540aSrobert    insn and we will be sure that the inputs are still valid.  If EARLIEST
4589*404b540aSrobert    is null, the returned condition will be valid at INSN.
4590*404b540aSrobert 
4591*404b540aSrobert    If ALLOW_CC_MODE is nonzero, allow the condition returned to be a
4592*404b540aSrobert    compare CC mode register.
4593*404b540aSrobert 
4594*404b540aSrobert    VALID_AT_INSN_P is the same as for canonicalize_condition.  */
4595*404b540aSrobert 
4596*404b540aSrobert rtx
get_condition(rtx jump,rtx * earliest,int allow_cc_mode,int valid_at_insn_p)4597*404b540aSrobert get_condition (rtx jump, rtx *earliest, int allow_cc_mode, int valid_at_insn_p)
4598*404b540aSrobert {
4599*404b540aSrobert   rtx cond;
4600*404b540aSrobert   int reverse;
4601*404b540aSrobert   rtx set;
4602*404b540aSrobert 
4603*404b540aSrobert   /* If this is not a standard conditional jump, we can't parse it.  */
4604*404b540aSrobert   if (!JUMP_P (jump)
4605*404b540aSrobert       || ! any_condjump_p (jump))
4606*404b540aSrobert     return 0;
4607*404b540aSrobert   set = pc_set (jump);
4608*404b540aSrobert 
4609*404b540aSrobert   cond = XEXP (SET_SRC (set), 0);
4610*404b540aSrobert 
4611*404b540aSrobert   /* If this branches to JUMP_LABEL when the condition is false, reverse
4612*404b540aSrobert      the condition.  */
4613*404b540aSrobert   reverse
4614*404b540aSrobert     = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
4615*404b540aSrobert       && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump);
4616*404b540aSrobert 
4617*404b540aSrobert   return canonicalize_condition (jump, cond, reverse, earliest, NULL_RTX,
4618*404b540aSrobert 				 allow_cc_mode, valid_at_insn_p);
4619*404b540aSrobert }
4620*404b540aSrobert 
4621*404b540aSrobert /* Initialize the table NUM_SIGN_BIT_COPIES_IN_REP based on
4622*404b540aSrobert    TARGET_MODE_REP_EXTENDED.
4623*404b540aSrobert 
4624*404b540aSrobert    Note that we assume that the property of
4625*404b540aSrobert    TARGET_MODE_REP_EXTENDED(B, C) is sticky to the integral modes
4626*404b540aSrobert    narrower than mode B.  I.e., if A is a mode narrower than B then in
4627*404b540aSrobert    order to be able to operate on it in mode B, mode A needs to
4628*404b540aSrobert    satisfy the requirements set by the representation of mode B.  */
4629*404b540aSrobert 
4630*404b540aSrobert static void
init_num_sign_bit_copies_in_rep(void)4631*404b540aSrobert init_num_sign_bit_copies_in_rep (void)
4632*404b540aSrobert {
4633*404b540aSrobert   enum machine_mode mode, in_mode;
4634*404b540aSrobert 
4635*404b540aSrobert   for (in_mode = GET_CLASS_NARROWEST_MODE (MODE_INT); in_mode != VOIDmode;
4636*404b540aSrobert        in_mode = GET_MODE_WIDER_MODE (mode))
4637*404b540aSrobert     for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != in_mode;
4638*404b540aSrobert 	 mode = GET_MODE_WIDER_MODE (mode))
4639*404b540aSrobert       {
4640*404b540aSrobert 	enum machine_mode i;
4641*404b540aSrobert 
4642*404b540aSrobert 	/* Currently, it is assumed that TARGET_MODE_REP_EXTENDED
4643*404b540aSrobert 	   extends to the next widest mode.  */
4644*404b540aSrobert 	gcc_assert (targetm.mode_rep_extended (mode, in_mode) == UNKNOWN
4645*404b540aSrobert 		    || GET_MODE_WIDER_MODE (mode) == in_mode);
4646*404b540aSrobert 
4647*404b540aSrobert 	/* We are in in_mode.  Count how many bits outside of mode
4648*404b540aSrobert 	   have to be copies of the sign-bit.  */
4649*404b540aSrobert 	for (i = mode; i != in_mode; i = GET_MODE_WIDER_MODE (i))
4650*404b540aSrobert 	  {
4651*404b540aSrobert 	    enum machine_mode wider = GET_MODE_WIDER_MODE (i);
4652*404b540aSrobert 
4653*404b540aSrobert 	    if (targetm.mode_rep_extended (i, wider) == SIGN_EXTEND
4654*404b540aSrobert 		/* We can only check sign-bit copies starting from the
4655*404b540aSrobert 		   top-bit.  In order to be able to check the bits we
4656*404b540aSrobert 		   have already seen we pretend that subsequent bits
4657*404b540aSrobert 		   have to be sign-bit copies too.  */
4658*404b540aSrobert 		|| num_sign_bit_copies_in_rep [in_mode][mode])
4659*404b540aSrobert 	      num_sign_bit_copies_in_rep [in_mode][mode]
4660*404b540aSrobert 		+= GET_MODE_BITSIZE (wider) - GET_MODE_BITSIZE (i);
4661*404b540aSrobert 	  }
4662*404b540aSrobert       }
4663*404b540aSrobert }
4664*404b540aSrobert 
4665*404b540aSrobert /* Suppose that truncation from the machine mode of X to MODE is not a
4666*404b540aSrobert    no-op.  See if there is anything special about X so that we can
4667*404b540aSrobert    assume it already contains a truncated value of MODE.  */
4668*404b540aSrobert 
4669*404b540aSrobert bool
truncated_to_mode(enum machine_mode mode,rtx x)4670*404b540aSrobert truncated_to_mode (enum machine_mode mode, rtx x)
4671*404b540aSrobert {
4672*404b540aSrobert   /* This register has already been used in MODE without explicit
4673*404b540aSrobert      truncation.  */
4674*404b540aSrobert   if (REG_P (x) && rtl_hooks.reg_truncated_to_mode (mode, x))
4675*404b540aSrobert     return true;
4676*404b540aSrobert 
4677*404b540aSrobert   /* See if we already satisfy the requirements of MODE.  If yes we
4678*404b540aSrobert      can just switch to MODE.  */
4679*404b540aSrobert   if (num_sign_bit_copies_in_rep[GET_MODE (x)][mode]
4680*404b540aSrobert       && (num_sign_bit_copies (x, GET_MODE (x))
4681*404b540aSrobert 	  >= num_sign_bit_copies_in_rep[GET_MODE (x)][mode] + 1))
4682*404b540aSrobert     return true;
4683*404b540aSrobert 
4684*404b540aSrobert   return false;
4685*404b540aSrobert }
4686*404b540aSrobert 
4687*404b540aSrobert /* Initialize non_rtx_starting_operands, which is used to speed up
4688*404b540aSrobert    for_each_rtx.  */
4689*404b540aSrobert void
init_rtlanal(void)4690*404b540aSrobert init_rtlanal (void)
4691*404b540aSrobert {
4692*404b540aSrobert   int i;
4693*404b540aSrobert   for (i = 0; i < NUM_RTX_CODE; i++)
4694*404b540aSrobert     {
4695*404b540aSrobert       const char *format = GET_RTX_FORMAT (i);
4696*404b540aSrobert       const char *first = strpbrk (format, "eEV");
4697*404b540aSrobert       non_rtx_starting_operands[i] = first ? first - format : -1;
4698*404b540aSrobert     }
4699*404b540aSrobert 
4700*404b540aSrobert   init_num_sign_bit_copies_in_rep ();
4701*404b540aSrobert }
4702*404b540aSrobert 
4703*404b540aSrobert /* Check whether this is a constant pool constant.  */
4704*404b540aSrobert bool
constant_pool_constant_p(rtx x)4705*404b540aSrobert constant_pool_constant_p (rtx x)
4706*404b540aSrobert {
4707*404b540aSrobert   x = avoid_constant_pool_reference (x);
4708*404b540aSrobert   return GET_CODE (x) == CONST_DOUBLE;
4709*404b540aSrobert }
4710*404b540aSrobert 
4711