xref: /openbsd/gnu/gcc/gcc/simplify-rtx.c (revision 404b540a)
1*404b540aSrobert /* RTL simplification functions for GNU compiler.
2*404b540aSrobert    Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3*404b540aSrobert    1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4*404b540aSrobert    Free Software Foundation, Inc.
5*404b540aSrobert 
6*404b540aSrobert This file is part of GCC.
7*404b540aSrobert 
8*404b540aSrobert GCC is free software; you can redistribute it and/or modify it under
9*404b540aSrobert the terms of the GNU General Public License as published by the Free
10*404b540aSrobert Software Foundation; either version 2, or (at your option) any later
11*404b540aSrobert version.
12*404b540aSrobert 
13*404b540aSrobert GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14*404b540aSrobert WARRANTY; without even the implied warranty of MERCHANTABILITY or
15*404b540aSrobert FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16*404b540aSrobert for more details.
17*404b540aSrobert 
18*404b540aSrobert You should have received a copy of the GNU General Public License
19*404b540aSrobert along with GCC; see the file COPYING.  If not, write to the Free
20*404b540aSrobert Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21*404b540aSrobert 02110-1301, USA.  */
22*404b540aSrobert 
23*404b540aSrobert 
24*404b540aSrobert #include "config.h"
25*404b540aSrobert #include "system.h"
26*404b540aSrobert #include "coretypes.h"
27*404b540aSrobert #include "tm.h"
28*404b540aSrobert #include "rtl.h"
29*404b540aSrobert #include "tree.h"
30*404b540aSrobert #include "tm_p.h"
31*404b540aSrobert #include "regs.h"
32*404b540aSrobert #include "hard-reg-set.h"
33*404b540aSrobert #include "flags.h"
34*404b540aSrobert #include "real.h"
35*404b540aSrobert #include "insn-config.h"
36*404b540aSrobert #include "recog.h"
37*404b540aSrobert #include "function.h"
38*404b540aSrobert #include "expr.h"
39*404b540aSrobert #include "toplev.h"
40*404b540aSrobert #include "output.h"
41*404b540aSrobert #include "ggc.h"
42*404b540aSrobert #include "target.h"
43*404b540aSrobert 
44*404b540aSrobert /* Simplification and canonicalization of RTL.  */
45*404b540aSrobert 
46*404b540aSrobert /* Much code operates on (low, high) pairs; the low value is an
47*404b540aSrobert    unsigned wide int, the high value a signed wide int.  We
48*404b540aSrobert    occasionally need to sign extend from low to high as if low were a
49*404b540aSrobert    signed wide int.  */
50*404b540aSrobert #define HWI_SIGN_EXTEND(low) \
51*404b540aSrobert  ((((HOST_WIDE_INT) low) < 0) ? ((HOST_WIDE_INT) -1) : ((HOST_WIDE_INT) 0))
52*404b540aSrobert 
53*404b540aSrobert static rtx neg_const_int (enum machine_mode, rtx);
54*404b540aSrobert static bool plus_minus_operand_p (rtx);
55*404b540aSrobert static int simplify_plus_minus_op_data_cmp (const void *, const void *);
56*404b540aSrobert static rtx simplify_plus_minus (enum rtx_code, enum machine_mode, rtx, rtx);
57*404b540aSrobert static rtx simplify_immed_subreg (enum machine_mode, rtx, enum machine_mode,
58*404b540aSrobert 				  unsigned int);
59*404b540aSrobert static rtx simplify_associative_operation (enum rtx_code, enum machine_mode,
60*404b540aSrobert 					   rtx, rtx);
61*404b540aSrobert static rtx simplify_relational_operation_1 (enum rtx_code, enum machine_mode,
62*404b540aSrobert 					    enum machine_mode, rtx, rtx);
63*404b540aSrobert static rtx simplify_unary_operation_1 (enum rtx_code, enum machine_mode, rtx);
64*404b540aSrobert static rtx simplify_binary_operation_1 (enum rtx_code, enum machine_mode,
65*404b540aSrobert 					rtx, rtx, rtx, rtx);
66*404b540aSrobert 
67*404b540aSrobert /* Negate a CONST_INT rtx, truncating (because a conversion from a
68*404b540aSrobert    maximally negative number can overflow).  */
69*404b540aSrobert static rtx
neg_const_int(enum machine_mode mode,rtx i)70*404b540aSrobert neg_const_int (enum machine_mode mode, rtx i)
71*404b540aSrobert {
72*404b540aSrobert   return gen_int_mode (- INTVAL (i), mode);
73*404b540aSrobert }
74*404b540aSrobert 
75*404b540aSrobert /* Test whether expression, X, is an immediate constant that represents
76*404b540aSrobert    the most significant bit of machine mode MODE.  */
77*404b540aSrobert 
78*404b540aSrobert bool
mode_signbit_p(enum machine_mode mode,rtx x)79*404b540aSrobert mode_signbit_p (enum machine_mode mode, rtx x)
80*404b540aSrobert {
81*404b540aSrobert   unsigned HOST_WIDE_INT val;
82*404b540aSrobert   unsigned int width;
83*404b540aSrobert 
84*404b540aSrobert   if (GET_MODE_CLASS (mode) != MODE_INT)
85*404b540aSrobert     return false;
86*404b540aSrobert 
87*404b540aSrobert   width = GET_MODE_BITSIZE (mode);
88*404b540aSrobert   if (width == 0)
89*404b540aSrobert     return false;
90*404b540aSrobert 
91*404b540aSrobert   if (width <= HOST_BITS_PER_WIDE_INT
92*404b540aSrobert       && GET_CODE (x) == CONST_INT)
93*404b540aSrobert     val = INTVAL (x);
94*404b540aSrobert   else if (width <= 2 * HOST_BITS_PER_WIDE_INT
95*404b540aSrobert 	   && GET_CODE (x) == CONST_DOUBLE
96*404b540aSrobert 	   && CONST_DOUBLE_LOW (x) == 0)
97*404b540aSrobert     {
98*404b540aSrobert       val = CONST_DOUBLE_HIGH (x);
99*404b540aSrobert       width -= HOST_BITS_PER_WIDE_INT;
100*404b540aSrobert     }
101*404b540aSrobert   else
102*404b540aSrobert     return false;
103*404b540aSrobert 
104*404b540aSrobert   if (width < HOST_BITS_PER_WIDE_INT)
105*404b540aSrobert     val &= ((unsigned HOST_WIDE_INT) 1 << width) - 1;
106*404b540aSrobert   return val == ((unsigned HOST_WIDE_INT) 1 << (width - 1));
107*404b540aSrobert }
108*404b540aSrobert 
109*404b540aSrobert /* Make a binary operation by properly ordering the operands and
110*404b540aSrobert    seeing if the expression folds.  */
111*404b540aSrobert 
112*404b540aSrobert rtx
simplify_gen_binary(enum rtx_code code,enum machine_mode mode,rtx op0,rtx op1)113*404b540aSrobert simplify_gen_binary (enum rtx_code code, enum machine_mode mode, rtx op0,
114*404b540aSrobert 		     rtx op1)
115*404b540aSrobert {
116*404b540aSrobert   rtx tem;
117*404b540aSrobert 
118*404b540aSrobert   /* If this simplifies, do it.  */
119*404b540aSrobert   tem = simplify_binary_operation (code, mode, op0, op1);
120*404b540aSrobert   if (tem)
121*404b540aSrobert     return tem;
122*404b540aSrobert 
123*404b540aSrobert   /* Put complex operands first and constants second if commutative.  */
124*404b540aSrobert   if (GET_RTX_CLASS (code) == RTX_COMM_ARITH
125*404b540aSrobert       && swap_commutative_operands_p (op0, op1))
126*404b540aSrobert     tem = op0, op0 = op1, op1 = tem;
127*404b540aSrobert 
128*404b540aSrobert   return gen_rtx_fmt_ee (code, mode, op0, op1);
129*404b540aSrobert }
130*404b540aSrobert 
131*404b540aSrobert /* If X is a MEM referencing the constant pool, return the real value.
132*404b540aSrobert    Otherwise return X.  */
133*404b540aSrobert rtx
avoid_constant_pool_reference(rtx x)134*404b540aSrobert avoid_constant_pool_reference (rtx x)
135*404b540aSrobert {
136*404b540aSrobert   rtx c, tmp, addr;
137*404b540aSrobert   enum machine_mode cmode;
138*404b540aSrobert   HOST_WIDE_INT offset = 0;
139*404b540aSrobert 
140*404b540aSrobert   switch (GET_CODE (x))
141*404b540aSrobert     {
142*404b540aSrobert     case MEM:
143*404b540aSrobert       break;
144*404b540aSrobert 
145*404b540aSrobert     case FLOAT_EXTEND:
146*404b540aSrobert       /* Handle float extensions of constant pool references.  */
147*404b540aSrobert       tmp = XEXP (x, 0);
148*404b540aSrobert       c = avoid_constant_pool_reference (tmp);
149*404b540aSrobert       if (c != tmp && GET_CODE (c) == CONST_DOUBLE)
150*404b540aSrobert 	{
151*404b540aSrobert 	  REAL_VALUE_TYPE d;
152*404b540aSrobert 
153*404b540aSrobert 	  REAL_VALUE_FROM_CONST_DOUBLE (d, c);
154*404b540aSrobert 	  return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (x));
155*404b540aSrobert 	}
156*404b540aSrobert       return x;
157*404b540aSrobert 
158*404b540aSrobert     default:
159*404b540aSrobert       return x;
160*404b540aSrobert     }
161*404b540aSrobert 
162*404b540aSrobert   addr = XEXP (x, 0);
163*404b540aSrobert 
164*404b540aSrobert   /* Call target hook to avoid the effects of -fpic etc....  */
165*404b540aSrobert   addr = targetm.delegitimize_address (addr);
166*404b540aSrobert 
167*404b540aSrobert   /* Split the address into a base and integer offset.  */
168*404b540aSrobert   if (GET_CODE (addr) == CONST
169*404b540aSrobert       && GET_CODE (XEXP (addr, 0)) == PLUS
170*404b540aSrobert       && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
171*404b540aSrobert     {
172*404b540aSrobert       offset = INTVAL (XEXP (XEXP (addr, 0), 1));
173*404b540aSrobert       addr = XEXP (XEXP (addr, 0), 0);
174*404b540aSrobert     }
175*404b540aSrobert 
176*404b540aSrobert   if (GET_CODE (addr) == LO_SUM)
177*404b540aSrobert     addr = XEXP (addr, 1);
178*404b540aSrobert 
179*404b540aSrobert   /* If this is a constant pool reference, we can turn it into its
180*404b540aSrobert      constant and hope that simplifications happen.  */
181*404b540aSrobert   if (GET_CODE (addr) == SYMBOL_REF
182*404b540aSrobert       && CONSTANT_POOL_ADDRESS_P (addr))
183*404b540aSrobert     {
184*404b540aSrobert       c = get_pool_constant (addr);
185*404b540aSrobert       cmode = get_pool_mode (addr);
186*404b540aSrobert 
187*404b540aSrobert       /* If we're accessing the constant in a different mode than it was
188*404b540aSrobert          originally stored, attempt to fix that up via subreg simplifications.
189*404b540aSrobert          If that fails we have no choice but to return the original memory.  */
190*404b540aSrobert       if (offset != 0 || cmode != GET_MODE (x))
191*404b540aSrobert         {
192*404b540aSrobert           rtx tem = simplify_subreg (GET_MODE (x), c, cmode, offset);
193*404b540aSrobert           if (tem && CONSTANT_P (tem))
194*404b540aSrobert             return tem;
195*404b540aSrobert         }
196*404b540aSrobert       else
197*404b540aSrobert         return c;
198*404b540aSrobert     }
199*404b540aSrobert 
200*404b540aSrobert   return x;
201*404b540aSrobert }
202*404b540aSrobert 
203*404b540aSrobert /* Return true if X is a MEM referencing the constant pool.  */
204*404b540aSrobert 
205*404b540aSrobert bool
constant_pool_reference_p(rtx x)206*404b540aSrobert constant_pool_reference_p (rtx x)
207*404b540aSrobert {
208*404b540aSrobert   return avoid_constant_pool_reference (x) != x;
209*404b540aSrobert }
210*404b540aSrobert 
211*404b540aSrobert /* Make a unary operation by first seeing if it folds and otherwise making
212*404b540aSrobert    the specified operation.  */
213*404b540aSrobert 
214*404b540aSrobert rtx
simplify_gen_unary(enum rtx_code code,enum machine_mode mode,rtx op,enum machine_mode op_mode)215*404b540aSrobert simplify_gen_unary (enum rtx_code code, enum machine_mode mode, rtx op,
216*404b540aSrobert 		    enum machine_mode op_mode)
217*404b540aSrobert {
218*404b540aSrobert   rtx tem;
219*404b540aSrobert 
220*404b540aSrobert   /* If this simplifies, use it.  */
221*404b540aSrobert   if ((tem = simplify_unary_operation (code, mode, op, op_mode)) != 0)
222*404b540aSrobert     return tem;
223*404b540aSrobert 
224*404b540aSrobert   return gen_rtx_fmt_e (code, mode, op);
225*404b540aSrobert }
226*404b540aSrobert 
227*404b540aSrobert /* Likewise for ternary operations.  */
228*404b540aSrobert 
229*404b540aSrobert rtx
simplify_gen_ternary(enum rtx_code code,enum machine_mode mode,enum machine_mode op0_mode,rtx op0,rtx op1,rtx op2)230*404b540aSrobert simplify_gen_ternary (enum rtx_code code, enum machine_mode mode,
231*404b540aSrobert 		      enum machine_mode op0_mode, rtx op0, rtx op1, rtx op2)
232*404b540aSrobert {
233*404b540aSrobert   rtx tem;
234*404b540aSrobert 
235*404b540aSrobert   /* If this simplifies, use it.  */
236*404b540aSrobert   if (0 != (tem = simplify_ternary_operation (code, mode, op0_mode,
237*404b540aSrobert 					      op0, op1, op2)))
238*404b540aSrobert     return tem;
239*404b540aSrobert 
240*404b540aSrobert   return gen_rtx_fmt_eee (code, mode, op0, op1, op2);
241*404b540aSrobert }
242*404b540aSrobert 
243*404b540aSrobert /* Likewise, for relational operations.
244*404b540aSrobert    CMP_MODE specifies mode comparison is done in.  */
245*404b540aSrobert 
246*404b540aSrobert rtx
simplify_gen_relational(enum rtx_code code,enum machine_mode mode,enum machine_mode cmp_mode,rtx op0,rtx op1)247*404b540aSrobert simplify_gen_relational (enum rtx_code code, enum machine_mode mode,
248*404b540aSrobert 			 enum machine_mode cmp_mode, rtx op0, rtx op1)
249*404b540aSrobert {
250*404b540aSrobert   rtx tem;
251*404b540aSrobert 
252*404b540aSrobert   if (0 != (tem = simplify_relational_operation (code, mode, cmp_mode,
253*404b540aSrobert 						 op0, op1)))
254*404b540aSrobert     return tem;
255*404b540aSrobert 
256*404b540aSrobert   return gen_rtx_fmt_ee (code, mode, op0, op1);
257*404b540aSrobert }
258*404b540aSrobert 
259*404b540aSrobert /* Replace all occurrences of OLD_RTX in X with NEW_RTX and try to simplify the
260*404b540aSrobert    resulting RTX.  Return a new RTX which is as simplified as possible.  */
261*404b540aSrobert 
262*404b540aSrobert rtx
simplify_replace_rtx(rtx x,rtx old_rtx,rtx new_rtx)263*404b540aSrobert simplify_replace_rtx (rtx x, rtx old_rtx, rtx new_rtx)
264*404b540aSrobert {
265*404b540aSrobert   enum rtx_code code = GET_CODE (x);
266*404b540aSrobert   enum machine_mode mode = GET_MODE (x);
267*404b540aSrobert   enum machine_mode op_mode;
268*404b540aSrobert   rtx op0, op1, op2;
269*404b540aSrobert 
270*404b540aSrobert   /* If X is OLD_RTX, return NEW_RTX.  Otherwise, if this is an expression, try
271*404b540aSrobert      to build a new expression substituting recursively.  If we can't do
272*404b540aSrobert      anything, return our input.  */
273*404b540aSrobert 
274*404b540aSrobert   if (x == old_rtx)
275*404b540aSrobert     return new_rtx;
276*404b540aSrobert 
277*404b540aSrobert   switch (GET_RTX_CLASS (code))
278*404b540aSrobert     {
279*404b540aSrobert     case RTX_UNARY:
280*404b540aSrobert       op0 = XEXP (x, 0);
281*404b540aSrobert       op_mode = GET_MODE (op0);
282*404b540aSrobert       op0 = simplify_replace_rtx (op0, old_rtx, new_rtx);
283*404b540aSrobert       if (op0 == XEXP (x, 0))
284*404b540aSrobert 	return x;
285*404b540aSrobert       return simplify_gen_unary (code, mode, op0, op_mode);
286*404b540aSrobert 
287*404b540aSrobert     case RTX_BIN_ARITH:
288*404b540aSrobert     case RTX_COMM_ARITH:
289*404b540aSrobert       op0 = simplify_replace_rtx (XEXP (x, 0), old_rtx, new_rtx);
290*404b540aSrobert       op1 = simplify_replace_rtx (XEXP (x, 1), old_rtx, new_rtx);
291*404b540aSrobert       if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1))
292*404b540aSrobert 	return x;
293*404b540aSrobert       return simplify_gen_binary (code, mode, op0, op1);
294*404b540aSrobert 
295*404b540aSrobert     case RTX_COMPARE:
296*404b540aSrobert     case RTX_COMM_COMPARE:
297*404b540aSrobert       op0 = XEXP (x, 0);
298*404b540aSrobert       op1 = XEXP (x, 1);
299*404b540aSrobert       op_mode = GET_MODE (op0) != VOIDmode ? GET_MODE (op0) : GET_MODE (op1);
300*404b540aSrobert       op0 = simplify_replace_rtx (op0, old_rtx, new_rtx);
301*404b540aSrobert       op1 = simplify_replace_rtx (op1, old_rtx, new_rtx);
302*404b540aSrobert       if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1))
303*404b540aSrobert 	return x;
304*404b540aSrobert       return simplify_gen_relational (code, mode, op_mode, op0, op1);
305*404b540aSrobert 
306*404b540aSrobert     case RTX_TERNARY:
307*404b540aSrobert     case RTX_BITFIELD_OPS:
308*404b540aSrobert       op0 = XEXP (x, 0);
309*404b540aSrobert       op_mode = GET_MODE (op0);
310*404b540aSrobert       op0 = simplify_replace_rtx (op0, old_rtx, new_rtx);
311*404b540aSrobert       op1 = simplify_replace_rtx (XEXP (x, 1), old_rtx, new_rtx);
312*404b540aSrobert       op2 = simplify_replace_rtx (XEXP (x, 2), old_rtx, new_rtx);
313*404b540aSrobert       if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1) && op2 == XEXP (x, 2))
314*404b540aSrobert 	return x;
315*404b540aSrobert       if (op_mode == VOIDmode)
316*404b540aSrobert 	op_mode = GET_MODE (op0);
317*404b540aSrobert       return simplify_gen_ternary (code, mode, op_mode, op0, op1, op2);
318*404b540aSrobert 
319*404b540aSrobert     case RTX_EXTRA:
320*404b540aSrobert       /* The only case we try to handle is a SUBREG.  */
321*404b540aSrobert       if (code == SUBREG)
322*404b540aSrobert 	{
323*404b540aSrobert 	  op0 = simplify_replace_rtx (SUBREG_REG (x), old_rtx, new_rtx);
324*404b540aSrobert 	  if (op0 == SUBREG_REG (x))
325*404b540aSrobert 	    return x;
326*404b540aSrobert 	  op0 = simplify_gen_subreg (GET_MODE (x), op0,
327*404b540aSrobert 				     GET_MODE (SUBREG_REG (x)),
328*404b540aSrobert 				     SUBREG_BYTE (x));
329*404b540aSrobert 	  return op0 ? op0 : x;
330*404b540aSrobert 	}
331*404b540aSrobert       break;
332*404b540aSrobert 
333*404b540aSrobert     case RTX_OBJ:
334*404b540aSrobert       if (code == MEM)
335*404b540aSrobert 	{
336*404b540aSrobert 	  op0 = simplify_replace_rtx (XEXP (x, 0), old_rtx, new_rtx);
337*404b540aSrobert 	  if (op0 == XEXP (x, 0))
338*404b540aSrobert 	    return x;
339*404b540aSrobert 	  return replace_equiv_address_nv (x, op0);
340*404b540aSrobert 	}
341*404b540aSrobert       else if (code == LO_SUM)
342*404b540aSrobert 	{
343*404b540aSrobert 	  op0 = simplify_replace_rtx (XEXP (x, 0), old_rtx, new_rtx);
344*404b540aSrobert 	  op1 = simplify_replace_rtx (XEXP (x, 1), old_rtx, new_rtx);
345*404b540aSrobert 
346*404b540aSrobert 	  /* (lo_sum (high x) x) -> x  */
347*404b540aSrobert 	  if (GET_CODE (op0) == HIGH && rtx_equal_p (XEXP (op0, 0), op1))
348*404b540aSrobert 	    return op1;
349*404b540aSrobert 
350*404b540aSrobert 	  if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1))
351*404b540aSrobert 	    return x;
352*404b540aSrobert 	  return gen_rtx_LO_SUM (mode, op0, op1);
353*404b540aSrobert 	}
354*404b540aSrobert       else if (code == REG)
355*404b540aSrobert 	{
356*404b540aSrobert 	  if (rtx_equal_p (x, old_rtx))
357*404b540aSrobert 	    return new_rtx;
358*404b540aSrobert 	}
359*404b540aSrobert       break;
360*404b540aSrobert 
361*404b540aSrobert     default:
362*404b540aSrobert       break;
363*404b540aSrobert     }
364*404b540aSrobert   return x;
365*404b540aSrobert }
366*404b540aSrobert 
367*404b540aSrobert /* Try to simplify a unary operation CODE whose output mode is to be
368*404b540aSrobert    MODE with input operand OP whose mode was originally OP_MODE.
369*404b540aSrobert    Return zero if no simplification can be made.  */
370*404b540aSrobert rtx
simplify_unary_operation(enum rtx_code code,enum machine_mode mode,rtx op,enum machine_mode op_mode)371*404b540aSrobert simplify_unary_operation (enum rtx_code code, enum machine_mode mode,
372*404b540aSrobert 			  rtx op, enum machine_mode op_mode)
373*404b540aSrobert {
374*404b540aSrobert   rtx trueop, tem;
375*404b540aSrobert 
376*404b540aSrobert   if (GET_CODE (op) == CONST)
377*404b540aSrobert     op = XEXP (op, 0);
378*404b540aSrobert 
379*404b540aSrobert   trueop = avoid_constant_pool_reference (op);
380*404b540aSrobert 
381*404b540aSrobert   tem = simplify_const_unary_operation (code, mode, trueop, op_mode);
382*404b540aSrobert   if (tem)
383*404b540aSrobert     return tem;
384*404b540aSrobert 
385*404b540aSrobert   return simplify_unary_operation_1 (code, mode, op);
386*404b540aSrobert }
387*404b540aSrobert 
388*404b540aSrobert /* Perform some simplifications we can do even if the operands
389*404b540aSrobert    aren't constant.  */
390*404b540aSrobert static rtx
simplify_unary_operation_1(enum rtx_code code,enum machine_mode mode,rtx op)391*404b540aSrobert simplify_unary_operation_1 (enum rtx_code code, enum machine_mode mode, rtx op)
392*404b540aSrobert {
393*404b540aSrobert   enum rtx_code reversed;
394*404b540aSrobert   rtx temp;
395*404b540aSrobert 
396*404b540aSrobert   switch (code)
397*404b540aSrobert     {
398*404b540aSrobert     case NOT:
399*404b540aSrobert       /* (not (not X)) == X.  */
400*404b540aSrobert       if (GET_CODE (op) == NOT)
401*404b540aSrobert 	return XEXP (op, 0);
402*404b540aSrobert 
403*404b540aSrobert       /* (not (eq X Y)) == (ne X Y), etc. if BImode or the result of the
404*404b540aSrobert 	 comparison is all ones.   */
405*404b540aSrobert       if (COMPARISON_P (op)
406*404b540aSrobert 	  && (mode == BImode || STORE_FLAG_VALUE == -1)
407*404b540aSrobert 	  && ((reversed = reversed_comparison_code (op, NULL_RTX)) != UNKNOWN))
408*404b540aSrobert 	return simplify_gen_relational (reversed, mode, VOIDmode,
409*404b540aSrobert 					XEXP (op, 0), XEXP (op, 1));
410*404b540aSrobert 
411*404b540aSrobert       /* (not (plus X -1)) can become (neg X).  */
412*404b540aSrobert       if (GET_CODE (op) == PLUS
413*404b540aSrobert 	  && XEXP (op, 1) == constm1_rtx)
414*404b540aSrobert 	return simplify_gen_unary (NEG, mode, XEXP (op, 0), mode);
415*404b540aSrobert 
416*404b540aSrobert       /* Similarly, (not (neg X)) is (plus X -1).  */
417*404b540aSrobert       if (GET_CODE (op) == NEG)
418*404b540aSrobert 	return plus_constant (XEXP (op, 0), -1);
419*404b540aSrobert 
420*404b540aSrobert       /* (not (xor X C)) for C constant is (xor X D) with D = ~C.  */
421*404b540aSrobert       if (GET_CODE (op) == XOR
422*404b540aSrobert 	  && GET_CODE (XEXP (op, 1)) == CONST_INT
423*404b540aSrobert 	  && (temp = simplify_unary_operation (NOT, mode,
424*404b540aSrobert 					       XEXP (op, 1), mode)) != 0)
425*404b540aSrobert 	return simplify_gen_binary (XOR, mode, XEXP (op, 0), temp);
426*404b540aSrobert 
427*404b540aSrobert       /* (not (plus X C)) for signbit C is (xor X D) with D = ~C.  */
428*404b540aSrobert       if (GET_CODE (op) == PLUS
429*404b540aSrobert 	  && GET_CODE (XEXP (op, 1)) == CONST_INT
430*404b540aSrobert 	  && mode_signbit_p (mode, XEXP (op, 1))
431*404b540aSrobert 	  && (temp = simplify_unary_operation (NOT, mode,
432*404b540aSrobert 					       XEXP (op, 1), mode)) != 0)
433*404b540aSrobert 	return simplify_gen_binary (XOR, mode, XEXP (op, 0), temp);
434*404b540aSrobert 
435*404b540aSrobert 
436*404b540aSrobert       /* (not (ashift 1 X)) is (rotate ~1 X).  We used to do this for
437*404b540aSrobert 	 operands other than 1, but that is not valid.  We could do a
438*404b540aSrobert 	 similar simplification for (not (lshiftrt C X)) where C is
439*404b540aSrobert 	 just the sign bit, but this doesn't seem common enough to
440*404b540aSrobert 	 bother with.  */
441*404b540aSrobert       if (GET_CODE (op) == ASHIFT
442*404b540aSrobert 	  && XEXP (op, 0) == const1_rtx)
443*404b540aSrobert 	{
444*404b540aSrobert 	  temp = simplify_gen_unary (NOT, mode, const1_rtx, mode);
445*404b540aSrobert 	  return simplify_gen_binary (ROTATE, mode, temp, XEXP (op, 1));
446*404b540aSrobert 	}
447*404b540aSrobert 
448*404b540aSrobert       /* (not (ashiftrt foo C)) where C is the number of bits in FOO
449*404b540aSrobert 	 minus 1 is (ge foo (const_int 0)) if STORE_FLAG_VALUE is -1,
450*404b540aSrobert 	 so we can perform the above simplification.  */
451*404b540aSrobert 
452*404b540aSrobert       if (STORE_FLAG_VALUE == -1
453*404b540aSrobert 	  && GET_CODE (op) == ASHIFTRT
454*404b540aSrobert 	  && GET_CODE (XEXP (op, 1)) == CONST_INT
455*404b540aSrobert 	  && INTVAL (XEXP (op, 1)) == GET_MODE_BITSIZE (mode) - 1)
456*404b540aSrobert 	return simplify_gen_relational (GE, mode, VOIDmode,
457*404b540aSrobert 					XEXP (op, 0), const0_rtx);
458*404b540aSrobert 
459*404b540aSrobert 
460*404b540aSrobert       if (GET_CODE (op) == SUBREG
461*404b540aSrobert 	  && subreg_lowpart_p (op)
462*404b540aSrobert 	  && (GET_MODE_SIZE (GET_MODE (op))
463*404b540aSrobert 	      < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op))))
464*404b540aSrobert 	  && GET_CODE (SUBREG_REG (op)) == ASHIFT
465*404b540aSrobert 	  && XEXP (SUBREG_REG (op), 0) == const1_rtx)
466*404b540aSrobert 	{
467*404b540aSrobert 	  enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op));
468*404b540aSrobert 	  rtx x;
469*404b540aSrobert 
470*404b540aSrobert 	  x = gen_rtx_ROTATE (inner_mode,
471*404b540aSrobert 			      simplify_gen_unary (NOT, inner_mode, const1_rtx,
472*404b540aSrobert 						  inner_mode),
473*404b540aSrobert 			      XEXP (SUBREG_REG (op), 1));
474*404b540aSrobert 	  return rtl_hooks.gen_lowpart_no_emit (mode, x);
475*404b540aSrobert 	}
476*404b540aSrobert 
477*404b540aSrobert       /* Apply De Morgan's laws to reduce number of patterns for machines
478*404b540aSrobert 	 with negating logical insns (and-not, nand, etc.).  If result has
479*404b540aSrobert 	 only one NOT, put it first, since that is how the patterns are
480*404b540aSrobert 	 coded.  */
481*404b540aSrobert 
482*404b540aSrobert       if (GET_CODE (op) == IOR || GET_CODE (op) == AND)
483*404b540aSrobert 	{
484*404b540aSrobert 	  rtx in1 = XEXP (op, 0), in2 = XEXP (op, 1);
485*404b540aSrobert 	  enum machine_mode op_mode;
486*404b540aSrobert 
487*404b540aSrobert 	  op_mode = GET_MODE (in1);
488*404b540aSrobert 	  in1 = simplify_gen_unary (NOT, op_mode, in1, op_mode);
489*404b540aSrobert 
490*404b540aSrobert 	  op_mode = GET_MODE (in2);
491*404b540aSrobert 	  if (op_mode == VOIDmode)
492*404b540aSrobert 	    op_mode = mode;
493*404b540aSrobert 	  in2 = simplify_gen_unary (NOT, op_mode, in2, op_mode);
494*404b540aSrobert 
495*404b540aSrobert 	  if (GET_CODE (in2) == NOT && GET_CODE (in1) != NOT)
496*404b540aSrobert 	    {
497*404b540aSrobert 	      rtx tem = in2;
498*404b540aSrobert 	      in2 = in1; in1 = tem;
499*404b540aSrobert 	    }
500*404b540aSrobert 
501*404b540aSrobert 	  return gen_rtx_fmt_ee (GET_CODE (op) == IOR ? AND : IOR,
502*404b540aSrobert 				 mode, in1, in2);
503*404b540aSrobert 	}
504*404b540aSrobert       break;
505*404b540aSrobert 
506*404b540aSrobert     case NEG:
507*404b540aSrobert       /* (neg (neg X)) == X.  */
508*404b540aSrobert       if (GET_CODE (op) == NEG)
509*404b540aSrobert 	return XEXP (op, 0);
510*404b540aSrobert 
511*404b540aSrobert       /* (neg (plus X 1)) can become (not X).  */
512*404b540aSrobert       if (GET_CODE (op) == PLUS
513*404b540aSrobert 	  && XEXP (op, 1) == const1_rtx)
514*404b540aSrobert 	return simplify_gen_unary (NOT, mode, XEXP (op, 0), mode);
515*404b540aSrobert 
516*404b540aSrobert       /* Similarly, (neg (not X)) is (plus X 1).  */
517*404b540aSrobert       if (GET_CODE (op) == NOT)
518*404b540aSrobert 	return plus_constant (XEXP (op, 0), 1);
519*404b540aSrobert 
520*404b540aSrobert       /* (neg (minus X Y)) can become (minus Y X).  This transformation
521*404b540aSrobert 	 isn't safe for modes with signed zeros, since if X and Y are
522*404b540aSrobert 	 both +0, (minus Y X) is the same as (minus X Y).  If the
523*404b540aSrobert 	 rounding mode is towards +infinity (or -infinity) then the two
524*404b540aSrobert 	 expressions will be rounded differently.  */
525*404b540aSrobert       if (GET_CODE (op) == MINUS
526*404b540aSrobert 	  && !HONOR_SIGNED_ZEROS (mode)
527*404b540aSrobert 	  && !HONOR_SIGN_DEPENDENT_ROUNDING (mode))
528*404b540aSrobert 	return simplify_gen_binary (MINUS, mode, XEXP (op, 1), XEXP (op, 0));
529*404b540aSrobert 
530*404b540aSrobert       if (GET_CODE (op) == PLUS
531*404b540aSrobert 	  && !HONOR_SIGNED_ZEROS (mode)
532*404b540aSrobert 	  && !HONOR_SIGN_DEPENDENT_ROUNDING (mode))
533*404b540aSrobert 	{
534*404b540aSrobert 	  /* (neg (plus A C)) is simplified to (minus -C A).  */
535*404b540aSrobert 	  if (GET_CODE (XEXP (op, 1)) == CONST_INT
536*404b540aSrobert 	      || GET_CODE (XEXP (op, 1)) == CONST_DOUBLE)
537*404b540aSrobert 	    {
538*404b540aSrobert 	      temp = simplify_unary_operation (NEG, mode, XEXP (op, 1), mode);
539*404b540aSrobert 	      if (temp)
540*404b540aSrobert 		return simplify_gen_binary (MINUS, mode, temp, XEXP (op, 0));
541*404b540aSrobert 	    }
542*404b540aSrobert 
543*404b540aSrobert 	  /* (neg (plus A B)) is canonicalized to (minus (neg A) B).  */
544*404b540aSrobert 	  temp = simplify_gen_unary (NEG, mode, XEXP (op, 0), mode);
545*404b540aSrobert 	  return simplify_gen_binary (MINUS, mode, temp, XEXP (op, 1));
546*404b540aSrobert 	}
547*404b540aSrobert 
548*404b540aSrobert       /* (neg (mult A B)) becomes (mult (neg A) B).
549*404b540aSrobert 	 This works even for floating-point values.  */
550*404b540aSrobert       if (GET_CODE (op) == MULT
551*404b540aSrobert 	  && !HONOR_SIGN_DEPENDENT_ROUNDING (mode))
552*404b540aSrobert 	{
553*404b540aSrobert 	  temp = simplify_gen_unary (NEG, mode, XEXP (op, 0), mode);
554*404b540aSrobert 	  return simplify_gen_binary (MULT, mode, temp, XEXP (op, 1));
555*404b540aSrobert 	}
556*404b540aSrobert 
557*404b540aSrobert       /* NEG commutes with ASHIFT since it is multiplication.  Only do
558*404b540aSrobert 	 this if we can then eliminate the NEG (e.g., if the operand
559*404b540aSrobert 	 is a constant).  */
560*404b540aSrobert       if (GET_CODE (op) == ASHIFT)
561*404b540aSrobert 	{
562*404b540aSrobert 	  temp = simplify_unary_operation (NEG, mode, XEXP (op, 0), mode);
563*404b540aSrobert 	  if (temp)
564*404b540aSrobert 	    return simplify_gen_binary (ASHIFT, mode, temp, XEXP (op, 1));
565*404b540aSrobert 	}
566*404b540aSrobert 
567*404b540aSrobert       /* (neg (ashiftrt X C)) can be replaced by (lshiftrt X C) when
568*404b540aSrobert 	 C is equal to the width of MODE minus 1.  */
569*404b540aSrobert       if (GET_CODE (op) == ASHIFTRT
570*404b540aSrobert 	  && GET_CODE (XEXP (op, 1)) == CONST_INT
571*404b540aSrobert 	  && INTVAL (XEXP (op, 1)) == GET_MODE_BITSIZE (mode) - 1)
572*404b540aSrobert 	return simplify_gen_binary (LSHIFTRT, mode,
573*404b540aSrobert 				    XEXP (op, 0), XEXP (op, 1));
574*404b540aSrobert 
575*404b540aSrobert       /* (neg (lshiftrt X C)) can be replaced by (ashiftrt X C) when
576*404b540aSrobert 	 C is equal to the width of MODE minus 1.  */
577*404b540aSrobert       if (GET_CODE (op) == LSHIFTRT
578*404b540aSrobert 	  && GET_CODE (XEXP (op, 1)) == CONST_INT
579*404b540aSrobert 	  && INTVAL (XEXP (op, 1)) == GET_MODE_BITSIZE (mode) - 1)
580*404b540aSrobert 	return simplify_gen_binary (ASHIFTRT, mode,
581*404b540aSrobert 				    XEXP (op, 0), XEXP (op, 1));
582*404b540aSrobert 
583*404b540aSrobert       /* (neg (xor A 1)) is (plus A -1) if A is known to be either 0 or 1.  */
584*404b540aSrobert       if (GET_CODE (op) == XOR
585*404b540aSrobert 	  && XEXP (op, 1) == const1_rtx
586*404b540aSrobert 	  && nonzero_bits (XEXP (op, 0), mode) == 1)
587*404b540aSrobert 	return plus_constant (XEXP (op, 0), -1);
588*404b540aSrobert 
589*404b540aSrobert       /* (neg (lt x 0)) is (ashiftrt X C) if STORE_FLAG_VALUE is 1.  */
590*404b540aSrobert       /* (neg (lt x 0)) is (lshiftrt X C) if STORE_FLAG_VALUE is -1.  */
591*404b540aSrobert       if (GET_CODE (op) == LT
592*404b540aSrobert 	  && XEXP (op, 1) == const0_rtx)
593*404b540aSrobert 	{
594*404b540aSrobert 	  enum machine_mode inner = GET_MODE (XEXP (op, 0));
595*404b540aSrobert 	  int isize = GET_MODE_BITSIZE (inner);
596*404b540aSrobert 	  if (STORE_FLAG_VALUE == 1)
597*404b540aSrobert 	    {
598*404b540aSrobert 	      temp = simplify_gen_binary (ASHIFTRT, inner, XEXP (op, 0),
599*404b540aSrobert 					  GEN_INT (isize - 1));
600*404b540aSrobert 	      if (mode == inner)
601*404b540aSrobert 		return temp;
602*404b540aSrobert 	      if (GET_MODE_BITSIZE (mode) > isize)
603*404b540aSrobert 		return simplify_gen_unary (SIGN_EXTEND, mode, temp, inner);
604*404b540aSrobert 	      return simplify_gen_unary (TRUNCATE, mode, temp, inner);
605*404b540aSrobert 	    }
606*404b540aSrobert 	  else if (STORE_FLAG_VALUE == -1)
607*404b540aSrobert 	    {
608*404b540aSrobert 	      temp = simplify_gen_binary (LSHIFTRT, inner, XEXP (op, 0),
609*404b540aSrobert 					  GEN_INT (isize - 1));
610*404b540aSrobert 	      if (mode == inner)
611*404b540aSrobert 		return temp;
612*404b540aSrobert 	      if (GET_MODE_BITSIZE (mode) > isize)
613*404b540aSrobert 		return simplify_gen_unary (ZERO_EXTEND, mode, temp, inner);
614*404b540aSrobert 	      return simplify_gen_unary (TRUNCATE, mode, temp, inner);
615*404b540aSrobert 	    }
616*404b540aSrobert 	}
617*404b540aSrobert       break;
618*404b540aSrobert 
619*404b540aSrobert     case TRUNCATE:
620*404b540aSrobert       /* We can't handle truncation to a partial integer mode here
621*404b540aSrobert          because we don't know the real bitsize of the partial
622*404b540aSrobert          integer mode.  */
623*404b540aSrobert       if (GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
624*404b540aSrobert         break;
625*404b540aSrobert 
626*404b540aSrobert       /* (truncate:SI ({sign,zero}_extend:DI foo:SI)) == foo:SI.  */
627*404b540aSrobert       if ((GET_CODE (op) == SIGN_EXTEND
628*404b540aSrobert 	   || GET_CODE (op) == ZERO_EXTEND)
629*404b540aSrobert 	  && GET_MODE (XEXP (op, 0)) == mode)
630*404b540aSrobert 	return XEXP (op, 0);
631*404b540aSrobert 
632*404b540aSrobert       /* (truncate:SI (OP:DI ({sign,zero}_extend:DI foo:SI))) is
633*404b540aSrobert 	 (OP:SI foo:SI) if OP is NEG or ABS.  */
634*404b540aSrobert       if ((GET_CODE (op) == ABS
635*404b540aSrobert 	   || GET_CODE (op) == NEG)
636*404b540aSrobert 	  && (GET_CODE (XEXP (op, 0)) == SIGN_EXTEND
637*404b540aSrobert 	      || GET_CODE (XEXP (op, 0)) == ZERO_EXTEND)
638*404b540aSrobert 	  && GET_MODE (XEXP (XEXP (op, 0), 0)) == mode)
639*404b540aSrobert 	return simplify_gen_unary (GET_CODE (op), mode,
640*404b540aSrobert 				   XEXP (XEXP (op, 0), 0), mode);
641*404b540aSrobert 
642*404b540aSrobert       /* (truncate:A (subreg:B (truncate:C X) 0)) is
643*404b540aSrobert 	 (truncate:A X).  */
644*404b540aSrobert       if (GET_CODE (op) == SUBREG
645*404b540aSrobert 	  && GET_CODE (SUBREG_REG (op)) == TRUNCATE
646*404b540aSrobert 	  && subreg_lowpart_p (op))
647*404b540aSrobert 	return simplify_gen_unary (TRUNCATE, mode, XEXP (SUBREG_REG (op), 0),
648*404b540aSrobert 				   GET_MODE (XEXP (SUBREG_REG (op), 0)));
649*404b540aSrobert 
650*404b540aSrobert       /* If we know that the value is already truncated, we can
651*404b540aSrobert          replace the TRUNCATE with a SUBREG.  Note that this is also
652*404b540aSrobert          valid if TRULY_NOOP_TRUNCATION is false for the corresponding
653*404b540aSrobert          modes we just have to apply a different definition for
654*404b540aSrobert          truncation.  But don't do this for an (LSHIFTRT (MULT ...))
655*404b540aSrobert          since this will cause problems with the umulXi3_highpart
656*404b540aSrobert          patterns.  */
657*404b540aSrobert       if ((TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
658*404b540aSrobert 				 GET_MODE_BITSIZE (GET_MODE (op)))
659*404b540aSrobert 	   ? (num_sign_bit_copies (op, GET_MODE (op))
660*404b540aSrobert 	      > (unsigned int) (GET_MODE_BITSIZE (GET_MODE (op))
661*404b540aSrobert 				- GET_MODE_BITSIZE (mode)))
662*404b540aSrobert 	   : truncated_to_mode (mode, op))
663*404b540aSrobert 	  && ! (GET_CODE (op) == LSHIFTRT
664*404b540aSrobert 		&& GET_CODE (XEXP (op, 0)) == MULT))
665*404b540aSrobert 	return rtl_hooks.gen_lowpart_no_emit (mode, op);
666*404b540aSrobert 
667*404b540aSrobert       /* A truncate of a comparison can be replaced with a subreg if
668*404b540aSrobert          STORE_FLAG_VALUE permits.  This is like the previous test,
669*404b540aSrobert          but it works even if the comparison is done in a mode larger
670*404b540aSrobert          than HOST_BITS_PER_WIDE_INT.  */
671*404b540aSrobert       if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
672*404b540aSrobert 	  && COMPARISON_P (op)
673*404b540aSrobert 	  && ((HOST_WIDE_INT) STORE_FLAG_VALUE & ~GET_MODE_MASK (mode)) == 0)
674*404b540aSrobert 	return rtl_hooks.gen_lowpart_no_emit (mode, op);
675*404b540aSrobert       break;
676*404b540aSrobert 
677*404b540aSrobert     case FLOAT_TRUNCATE:
678*404b540aSrobert       if (DECIMAL_FLOAT_MODE_P (mode))
679*404b540aSrobert 	break;
680*404b540aSrobert 
681*404b540aSrobert       /* (float_truncate:SF (float_extend:DF foo:SF)) = foo:SF.  */
682*404b540aSrobert       if (GET_CODE (op) == FLOAT_EXTEND
683*404b540aSrobert 	  && GET_MODE (XEXP (op, 0)) == mode)
684*404b540aSrobert 	return XEXP (op, 0);
685*404b540aSrobert 
686*404b540aSrobert       /* (float_truncate:SF (float_truncate:DF foo:XF))
687*404b540aSrobert          = (float_truncate:SF foo:XF).
688*404b540aSrobert 	 This may eliminate double rounding, so it is unsafe.
689*404b540aSrobert 
690*404b540aSrobert          (float_truncate:SF (float_extend:XF foo:DF))
691*404b540aSrobert          = (float_truncate:SF foo:DF).
692*404b540aSrobert 
693*404b540aSrobert          (float_truncate:DF (float_extend:XF foo:SF))
694*404b540aSrobert          = (float_extend:SF foo:DF).  */
695*404b540aSrobert       if ((GET_CODE (op) == FLOAT_TRUNCATE
696*404b540aSrobert 	   && flag_unsafe_math_optimizations)
697*404b540aSrobert 	  || GET_CODE (op) == FLOAT_EXTEND)
698*404b540aSrobert 	return simplify_gen_unary (GET_MODE_SIZE (GET_MODE (XEXP (op,
699*404b540aSrobert 							    0)))
700*404b540aSrobert 				   > GET_MODE_SIZE (mode)
701*404b540aSrobert 				   ? FLOAT_TRUNCATE : FLOAT_EXTEND,
702*404b540aSrobert 				   mode,
703*404b540aSrobert 				   XEXP (op, 0), mode);
704*404b540aSrobert 
705*404b540aSrobert       /*  (float_truncate (float x)) is (float x)  */
706*404b540aSrobert       if (GET_CODE (op) == FLOAT
707*404b540aSrobert 	  && (flag_unsafe_math_optimizations
708*404b540aSrobert 	      || ((unsigned)significand_size (GET_MODE (op))
709*404b540aSrobert 		  >= (GET_MODE_BITSIZE (GET_MODE (XEXP (op, 0)))
710*404b540aSrobert 		      - num_sign_bit_copies (XEXP (op, 0),
711*404b540aSrobert 					     GET_MODE (XEXP (op, 0)))))))
712*404b540aSrobert 	return simplify_gen_unary (FLOAT, mode,
713*404b540aSrobert 				   XEXP (op, 0),
714*404b540aSrobert 				   GET_MODE (XEXP (op, 0)));
715*404b540aSrobert 
716*404b540aSrobert       /* (float_truncate:SF (OP:DF (float_extend:DF foo:sf))) is
717*404b540aSrobert 	 (OP:SF foo:SF) if OP is NEG or ABS.  */
718*404b540aSrobert       if ((GET_CODE (op) == ABS
719*404b540aSrobert 	   || GET_CODE (op) == NEG)
720*404b540aSrobert 	  && GET_CODE (XEXP (op, 0)) == FLOAT_EXTEND
721*404b540aSrobert 	  && GET_MODE (XEXP (XEXP (op, 0), 0)) == mode)
722*404b540aSrobert 	return simplify_gen_unary (GET_CODE (op), mode,
723*404b540aSrobert 				   XEXP (XEXP (op, 0), 0), mode);
724*404b540aSrobert 
725*404b540aSrobert       /* (float_truncate:SF (subreg:DF (float_truncate:SF X) 0))
726*404b540aSrobert 	 is (float_truncate:SF x).  */
727*404b540aSrobert       if (GET_CODE (op) == SUBREG
728*404b540aSrobert 	  && subreg_lowpart_p (op)
729*404b540aSrobert 	  && GET_CODE (SUBREG_REG (op)) == FLOAT_TRUNCATE)
730*404b540aSrobert 	return SUBREG_REG (op);
731*404b540aSrobert       break;
732*404b540aSrobert 
733*404b540aSrobert     case FLOAT_EXTEND:
734*404b540aSrobert       if (DECIMAL_FLOAT_MODE_P (mode))
735*404b540aSrobert 	break;
736*404b540aSrobert 
737*404b540aSrobert       /*  (float_extend (float_extend x)) is (float_extend x)
738*404b540aSrobert 
739*404b540aSrobert 	  (float_extend (float x)) is (float x) assuming that double
740*404b540aSrobert 	  rounding can't happen.
741*404b540aSrobert           */
742*404b540aSrobert       if (GET_CODE (op) == FLOAT_EXTEND
743*404b540aSrobert 	  || (GET_CODE (op) == FLOAT
744*404b540aSrobert 	      && ((unsigned)significand_size (GET_MODE (op))
745*404b540aSrobert 		  >= (GET_MODE_BITSIZE (GET_MODE (XEXP (op, 0)))
746*404b540aSrobert 		      - num_sign_bit_copies (XEXP (op, 0),
747*404b540aSrobert 					     GET_MODE (XEXP (op, 0)))))))
748*404b540aSrobert 	return simplify_gen_unary (GET_CODE (op), mode,
749*404b540aSrobert 				   XEXP (op, 0),
750*404b540aSrobert 				   GET_MODE (XEXP (op, 0)));
751*404b540aSrobert 
752*404b540aSrobert       break;
753*404b540aSrobert 
754*404b540aSrobert     case ABS:
755*404b540aSrobert       /* (abs (neg <foo>)) -> (abs <foo>) */
756*404b540aSrobert       if (GET_CODE (op) == NEG)
757*404b540aSrobert 	return simplify_gen_unary (ABS, mode, XEXP (op, 0),
758*404b540aSrobert 				   GET_MODE (XEXP (op, 0)));
759*404b540aSrobert 
760*404b540aSrobert       /* If the mode of the operand is VOIDmode (i.e. if it is ASM_OPERANDS),
761*404b540aSrobert          do nothing.  */
762*404b540aSrobert       if (GET_MODE (op) == VOIDmode)
763*404b540aSrobert 	break;
764*404b540aSrobert 
765*404b540aSrobert       /* If operand is something known to be positive, ignore the ABS.  */
766*404b540aSrobert       if (GET_CODE (op) == FFS || GET_CODE (op) == ABS
767*404b540aSrobert 	  || ((GET_MODE_BITSIZE (GET_MODE (op))
768*404b540aSrobert 	       <= HOST_BITS_PER_WIDE_INT)
769*404b540aSrobert 	      && ((nonzero_bits (op, GET_MODE (op))
770*404b540aSrobert 		   & ((HOST_WIDE_INT) 1
771*404b540aSrobert 		      << (GET_MODE_BITSIZE (GET_MODE (op)) - 1)))
772*404b540aSrobert 		  == 0)))
773*404b540aSrobert 	return op;
774*404b540aSrobert 
775*404b540aSrobert       /* If operand is known to be only -1 or 0, convert ABS to NEG.  */
776*404b540aSrobert       if (num_sign_bit_copies (op, mode) == GET_MODE_BITSIZE (mode))
777*404b540aSrobert 	return gen_rtx_NEG (mode, op);
778*404b540aSrobert 
779*404b540aSrobert       break;
780*404b540aSrobert 
781*404b540aSrobert     case FFS:
782*404b540aSrobert       /* (ffs (*_extend <X>)) = (ffs <X>) */
783*404b540aSrobert       if (GET_CODE (op) == SIGN_EXTEND
784*404b540aSrobert 	  || GET_CODE (op) == ZERO_EXTEND)
785*404b540aSrobert 	return simplify_gen_unary (FFS, mode, XEXP (op, 0),
786*404b540aSrobert 				   GET_MODE (XEXP (op, 0)));
787*404b540aSrobert       break;
788*404b540aSrobert 
789*404b540aSrobert     case POPCOUNT:
790*404b540aSrobert     case PARITY:
791*404b540aSrobert       /* (pop* (zero_extend <X>)) = (pop* <X>) */
792*404b540aSrobert       if (GET_CODE (op) == ZERO_EXTEND)
793*404b540aSrobert 	return simplify_gen_unary (code, mode, XEXP (op, 0),
794*404b540aSrobert 				   GET_MODE (XEXP (op, 0)));
795*404b540aSrobert       break;
796*404b540aSrobert 
797*404b540aSrobert     case FLOAT:
798*404b540aSrobert       /* (float (sign_extend <X>)) = (float <X>).  */
799*404b540aSrobert       if (GET_CODE (op) == SIGN_EXTEND)
800*404b540aSrobert 	return simplify_gen_unary (FLOAT, mode, XEXP (op, 0),
801*404b540aSrobert 				   GET_MODE (XEXP (op, 0)));
802*404b540aSrobert       break;
803*404b540aSrobert 
804*404b540aSrobert     case SIGN_EXTEND:
805*404b540aSrobert       /* (sign_extend (truncate (minus (label_ref L1) (label_ref L2))))
806*404b540aSrobert 	 becomes just the MINUS if its mode is MODE.  This allows
807*404b540aSrobert 	 folding switch statements on machines using casesi (such as
808*404b540aSrobert 	 the VAX).  */
809*404b540aSrobert       if (GET_CODE (op) == TRUNCATE
810*404b540aSrobert 	  && GET_MODE (XEXP (op, 0)) == mode
811*404b540aSrobert 	  && GET_CODE (XEXP (op, 0)) == MINUS
812*404b540aSrobert 	  && GET_CODE (XEXP (XEXP (op, 0), 0)) == LABEL_REF
813*404b540aSrobert 	  && GET_CODE (XEXP (XEXP (op, 0), 1)) == LABEL_REF)
814*404b540aSrobert 	return XEXP (op, 0);
815*404b540aSrobert 
816*404b540aSrobert       /* Check for a sign extension of a subreg of a promoted
817*404b540aSrobert 	 variable, where the promotion is sign-extended, and the
818*404b540aSrobert 	 target mode is the same as the variable's promotion.  */
819*404b540aSrobert       if (GET_CODE (op) == SUBREG
820*404b540aSrobert 	  && SUBREG_PROMOTED_VAR_P (op)
821*404b540aSrobert 	  && ! SUBREG_PROMOTED_UNSIGNED_P (op)
822*404b540aSrobert 	  && GET_MODE (XEXP (op, 0)) == mode)
823*404b540aSrobert 	return XEXP (op, 0);
824*404b540aSrobert 
825*404b540aSrobert #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
826*404b540aSrobert       if (! POINTERS_EXTEND_UNSIGNED
827*404b540aSrobert 	  && mode == Pmode && GET_MODE (op) == ptr_mode
828*404b540aSrobert 	  && (CONSTANT_P (op)
829*404b540aSrobert 	      || (GET_CODE (op) == SUBREG
830*404b540aSrobert 		  && REG_P (SUBREG_REG (op))
831*404b540aSrobert 		  && REG_POINTER (SUBREG_REG (op))
832*404b540aSrobert 		  && GET_MODE (SUBREG_REG (op)) == Pmode)))
833*404b540aSrobert 	return convert_memory_address (Pmode, op);
834*404b540aSrobert #endif
835*404b540aSrobert       break;
836*404b540aSrobert 
837*404b540aSrobert     case ZERO_EXTEND:
838*404b540aSrobert       /* Check for a zero extension of a subreg of a promoted
839*404b540aSrobert 	 variable, where the promotion is zero-extended, and the
840*404b540aSrobert 	 target mode is the same as the variable's promotion.  */
841*404b540aSrobert       if (GET_CODE (op) == SUBREG
842*404b540aSrobert 	  && SUBREG_PROMOTED_VAR_P (op)
843*404b540aSrobert 	  && SUBREG_PROMOTED_UNSIGNED_P (op) > 0
844*404b540aSrobert 	  && GET_MODE (XEXP (op, 0)) == mode)
845*404b540aSrobert 	return XEXP (op, 0);
846*404b540aSrobert 
847*404b540aSrobert #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
848*404b540aSrobert       if (POINTERS_EXTEND_UNSIGNED > 0
849*404b540aSrobert 	  && mode == Pmode && GET_MODE (op) == ptr_mode
850*404b540aSrobert 	  && (CONSTANT_P (op)
851*404b540aSrobert 	      || (GET_CODE (op) == SUBREG
852*404b540aSrobert 		  && REG_P (SUBREG_REG (op))
853*404b540aSrobert 		  && REG_POINTER (SUBREG_REG (op))
854*404b540aSrobert 		  && GET_MODE (SUBREG_REG (op)) == Pmode)))
855*404b540aSrobert 	return convert_memory_address (Pmode, op);
856*404b540aSrobert #endif
857*404b540aSrobert       break;
858*404b540aSrobert 
859*404b540aSrobert     default:
860*404b540aSrobert       break;
861*404b540aSrobert     }
862*404b540aSrobert 
863*404b540aSrobert   return 0;
864*404b540aSrobert }
865*404b540aSrobert 
866*404b540aSrobert /* Try to compute the value of a unary operation CODE whose output mode is to
867*404b540aSrobert    be MODE with input operand OP whose mode was originally OP_MODE.
868*404b540aSrobert    Return zero if the value cannot be computed.  */
869*404b540aSrobert rtx
simplify_const_unary_operation(enum rtx_code code,enum machine_mode mode,rtx op,enum machine_mode op_mode)870*404b540aSrobert simplify_const_unary_operation (enum rtx_code code, enum machine_mode mode,
871*404b540aSrobert 				rtx op, enum machine_mode op_mode)
872*404b540aSrobert {
873*404b540aSrobert   unsigned int width = GET_MODE_BITSIZE (mode);
874*404b540aSrobert 
875*404b540aSrobert   if (code == VEC_DUPLICATE)
876*404b540aSrobert     {
877*404b540aSrobert       gcc_assert (VECTOR_MODE_P (mode));
878*404b540aSrobert       if (GET_MODE (op) != VOIDmode)
879*404b540aSrobert       {
880*404b540aSrobert 	if (!VECTOR_MODE_P (GET_MODE (op)))
881*404b540aSrobert 	  gcc_assert (GET_MODE_INNER (mode) == GET_MODE (op));
882*404b540aSrobert 	else
883*404b540aSrobert 	  gcc_assert (GET_MODE_INNER (mode) == GET_MODE_INNER
884*404b540aSrobert 						(GET_MODE (op)));
885*404b540aSrobert       }
886*404b540aSrobert       if (GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE
887*404b540aSrobert 	  || GET_CODE (op) == CONST_VECTOR)
888*404b540aSrobert 	{
889*404b540aSrobert           int elt_size = GET_MODE_SIZE (GET_MODE_INNER (mode));
890*404b540aSrobert           unsigned n_elts = (GET_MODE_SIZE (mode) / elt_size);
891*404b540aSrobert 	  rtvec v = rtvec_alloc (n_elts);
892*404b540aSrobert 	  unsigned int i;
893*404b540aSrobert 
894*404b540aSrobert 	  if (GET_CODE (op) != CONST_VECTOR)
895*404b540aSrobert 	    for (i = 0; i < n_elts; i++)
896*404b540aSrobert 	      RTVEC_ELT (v, i) = op;
897*404b540aSrobert 	  else
898*404b540aSrobert 	    {
899*404b540aSrobert 	      enum machine_mode inmode = GET_MODE (op);
900*404b540aSrobert               int in_elt_size = GET_MODE_SIZE (GET_MODE_INNER (inmode));
901*404b540aSrobert               unsigned in_n_elts = (GET_MODE_SIZE (inmode) / in_elt_size);
902*404b540aSrobert 
903*404b540aSrobert 	      gcc_assert (in_n_elts < n_elts);
904*404b540aSrobert 	      gcc_assert ((n_elts % in_n_elts) == 0);
905*404b540aSrobert 	      for (i = 0; i < n_elts; i++)
906*404b540aSrobert 	        RTVEC_ELT (v, i) = CONST_VECTOR_ELT (op, i % in_n_elts);
907*404b540aSrobert 	    }
908*404b540aSrobert 	  return gen_rtx_CONST_VECTOR (mode, v);
909*404b540aSrobert 	}
910*404b540aSrobert     }
911*404b540aSrobert 
912*404b540aSrobert   if (VECTOR_MODE_P (mode) && GET_CODE (op) == CONST_VECTOR)
913*404b540aSrobert     {
914*404b540aSrobert       int elt_size = GET_MODE_SIZE (GET_MODE_INNER (mode));
915*404b540aSrobert       unsigned n_elts = (GET_MODE_SIZE (mode) / elt_size);
916*404b540aSrobert       enum machine_mode opmode = GET_MODE (op);
917*404b540aSrobert       int op_elt_size = GET_MODE_SIZE (GET_MODE_INNER (opmode));
918*404b540aSrobert       unsigned op_n_elts = (GET_MODE_SIZE (opmode) / op_elt_size);
919*404b540aSrobert       rtvec v = rtvec_alloc (n_elts);
920*404b540aSrobert       unsigned int i;
921*404b540aSrobert 
922*404b540aSrobert       gcc_assert (op_n_elts == n_elts);
923*404b540aSrobert       for (i = 0; i < n_elts; i++)
924*404b540aSrobert 	{
925*404b540aSrobert 	  rtx x = simplify_unary_operation (code, GET_MODE_INNER (mode),
926*404b540aSrobert 					    CONST_VECTOR_ELT (op, i),
927*404b540aSrobert 					    GET_MODE_INNER (opmode));
928*404b540aSrobert 	  if (!x)
929*404b540aSrobert 	    return 0;
930*404b540aSrobert 	  RTVEC_ELT (v, i) = x;
931*404b540aSrobert 	}
932*404b540aSrobert       return gen_rtx_CONST_VECTOR (mode, v);
933*404b540aSrobert     }
934*404b540aSrobert 
935*404b540aSrobert   /* The order of these tests is critical so that, for example, we don't
936*404b540aSrobert      check the wrong mode (input vs. output) for a conversion operation,
937*404b540aSrobert      such as FIX.  At some point, this should be simplified.  */
938*404b540aSrobert 
939*404b540aSrobert   if (code == FLOAT && GET_MODE (op) == VOIDmode
940*404b540aSrobert       && (GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT))
941*404b540aSrobert     {
942*404b540aSrobert       HOST_WIDE_INT hv, lv;
943*404b540aSrobert       REAL_VALUE_TYPE d;
944*404b540aSrobert 
945*404b540aSrobert       if (GET_CODE (op) == CONST_INT)
946*404b540aSrobert 	lv = INTVAL (op), hv = HWI_SIGN_EXTEND (lv);
947*404b540aSrobert       else
948*404b540aSrobert 	lv = CONST_DOUBLE_LOW (op),  hv = CONST_DOUBLE_HIGH (op);
949*404b540aSrobert 
950*404b540aSrobert       REAL_VALUE_FROM_INT (d, lv, hv, mode);
951*404b540aSrobert       d = real_value_truncate (mode, d);
952*404b540aSrobert       return CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
953*404b540aSrobert     }
954*404b540aSrobert   else if (code == UNSIGNED_FLOAT && GET_MODE (op) == VOIDmode
955*404b540aSrobert 	   && (GET_CODE (op) == CONST_DOUBLE
956*404b540aSrobert 	       || GET_CODE (op) == CONST_INT))
957*404b540aSrobert     {
958*404b540aSrobert       HOST_WIDE_INT hv, lv;
959*404b540aSrobert       REAL_VALUE_TYPE d;
960*404b540aSrobert 
961*404b540aSrobert       if (GET_CODE (op) == CONST_INT)
962*404b540aSrobert 	lv = INTVAL (op), hv = HWI_SIGN_EXTEND (lv);
963*404b540aSrobert       else
964*404b540aSrobert 	lv = CONST_DOUBLE_LOW (op),  hv = CONST_DOUBLE_HIGH (op);
965*404b540aSrobert 
966*404b540aSrobert       if (op_mode == VOIDmode)
967*404b540aSrobert 	{
968*404b540aSrobert 	  /* We don't know how to interpret negative-looking numbers in
969*404b540aSrobert 	     this case, so don't try to fold those.  */
970*404b540aSrobert 	  if (hv < 0)
971*404b540aSrobert 	    return 0;
972*404b540aSrobert 	}
973*404b540aSrobert       else if (GET_MODE_BITSIZE (op_mode) >= HOST_BITS_PER_WIDE_INT * 2)
974*404b540aSrobert 	;
975*404b540aSrobert       else
976*404b540aSrobert 	hv = 0, lv &= GET_MODE_MASK (op_mode);
977*404b540aSrobert 
978*404b540aSrobert       REAL_VALUE_FROM_UNSIGNED_INT (d, lv, hv, mode);
979*404b540aSrobert       d = real_value_truncate (mode, d);
980*404b540aSrobert       return CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
981*404b540aSrobert     }
982*404b540aSrobert 
983*404b540aSrobert   if (GET_CODE (op) == CONST_INT
984*404b540aSrobert       && width <= HOST_BITS_PER_WIDE_INT && width > 0)
985*404b540aSrobert     {
986*404b540aSrobert       HOST_WIDE_INT arg0 = INTVAL (op);
987*404b540aSrobert       HOST_WIDE_INT val;
988*404b540aSrobert 
989*404b540aSrobert       switch (code)
990*404b540aSrobert 	{
991*404b540aSrobert 	case NOT:
992*404b540aSrobert 	  val = ~ arg0;
993*404b540aSrobert 	  break;
994*404b540aSrobert 
995*404b540aSrobert 	case NEG:
996*404b540aSrobert 	  val = - arg0;
997*404b540aSrobert 	  break;
998*404b540aSrobert 
999*404b540aSrobert 	case ABS:
1000*404b540aSrobert 	  val = (arg0 >= 0 ? arg0 : - arg0);
1001*404b540aSrobert 	  break;
1002*404b540aSrobert 
1003*404b540aSrobert 	case FFS:
1004*404b540aSrobert 	  /* Don't use ffs here.  Instead, get low order bit and then its
1005*404b540aSrobert 	     number.  If arg0 is zero, this will return 0, as desired.  */
1006*404b540aSrobert 	  arg0 &= GET_MODE_MASK (mode);
1007*404b540aSrobert 	  val = exact_log2 (arg0 & (- arg0)) + 1;
1008*404b540aSrobert 	  break;
1009*404b540aSrobert 
1010*404b540aSrobert 	case CLZ:
1011*404b540aSrobert 	  arg0 &= GET_MODE_MASK (mode);
1012*404b540aSrobert 	  if (arg0 == 0 && CLZ_DEFINED_VALUE_AT_ZERO (mode, val))
1013*404b540aSrobert 	    ;
1014*404b540aSrobert 	  else
1015*404b540aSrobert 	    val = GET_MODE_BITSIZE (mode) - floor_log2 (arg0) - 1;
1016*404b540aSrobert 	  break;
1017*404b540aSrobert 
1018*404b540aSrobert 	case CTZ:
1019*404b540aSrobert 	  arg0 &= GET_MODE_MASK (mode);
1020*404b540aSrobert 	  if (arg0 == 0)
1021*404b540aSrobert 	    {
1022*404b540aSrobert 	      /* Even if the value at zero is undefined, we have to come
1023*404b540aSrobert 		 up with some replacement.  Seems good enough.  */
1024*404b540aSrobert 	      if (! CTZ_DEFINED_VALUE_AT_ZERO (mode, val))
1025*404b540aSrobert 		val = GET_MODE_BITSIZE (mode);
1026*404b540aSrobert 	    }
1027*404b540aSrobert 	  else
1028*404b540aSrobert 	    val = exact_log2 (arg0 & -arg0);
1029*404b540aSrobert 	  break;
1030*404b540aSrobert 
1031*404b540aSrobert 	case POPCOUNT:
1032*404b540aSrobert 	  arg0 &= GET_MODE_MASK (mode);
1033*404b540aSrobert 	  val = 0;
1034*404b540aSrobert 	  while (arg0)
1035*404b540aSrobert 	    val++, arg0 &= arg0 - 1;
1036*404b540aSrobert 	  break;
1037*404b540aSrobert 
1038*404b540aSrobert 	case PARITY:
1039*404b540aSrobert 	  arg0 &= GET_MODE_MASK (mode);
1040*404b540aSrobert 	  val = 0;
1041*404b540aSrobert 	  while (arg0)
1042*404b540aSrobert 	    val++, arg0 &= arg0 - 1;
1043*404b540aSrobert 	  val &= 1;
1044*404b540aSrobert 	  break;
1045*404b540aSrobert 
1046*404b540aSrobert 	case TRUNCATE:
1047*404b540aSrobert 	  val = arg0;
1048*404b540aSrobert 	  break;
1049*404b540aSrobert 
1050*404b540aSrobert 	case ZERO_EXTEND:
1051*404b540aSrobert 	  /* When zero-extending a CONST_INT, we need to know its
1052*404b540aSrobert              original mode.  */
1053*404b540aSrobert 	  gcc_assert (op_mode != VOIDmode);
1054*404b540aSrobert 	  if (GET_MODE_BITSIZE (op_mode) == HOST_BITS_PER_WIDE_INT)
1055*404b540aSrobert 	    {
1056*404b540aSrobert 	      /* If we were really extending the mode,
1057*404b540aSrobert 		 we would have to distinguish between zero-extension
1058*404b540aSrobert 		 and sign-extension.  */
1059*404b540aSrobert 	      gcc_assert (width == GET_MODE_BITSIZE (op_mode));
1060*404b540aSrobert 	      val = arg0;
1061*404b540aSrobert 	    }
1062*404b540aSrobert 	  else if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT)
1063*404b540aSrobert 	    val = arg0 & ~((HOST_WIDE_INT) (-1) << GET_MODE_BITSIZE (op_mode));
1064*404b540aSrobert 	  else
1065*404b540aSrobert 	    return 0;
1066*404b540aSrobert 	  break;
1067*404b540aSrobert 
1068*404b540aSrobert 	case SIGN_EXTEND:
1069*404b540aSrobert 	  if (op_mode == VOIDmode)
1070*404b540aSrobert 	    op_mode = mode;
1071*404b540aSrobert 	  if (GET_MODE_BITSIZE (op_mode) == HOST_BITS_PER_WIDE_INT)
1072*404b540aSrobert 	    {
1073*404b540aSrobert 	      /* If we were really extending the mode,
1074*404b540aSrobert 		 we would have to distinguish between zero-extension
1075*404b540aSrobert 		 and sign-extension.  */
1076*404b540aSrobert 	      gcc_assert (width == GET_MODE_BITSIZE (op_mode));
1077*404b540aSrobert 	      val = arg0;
1078*404b540aSrobert 	    }
1079*404b540aSrobert 	  else if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT)
1080*404b540aSrobert 	    {
1081*404b540aSrobert 	      val
1082*404b540aSrobert 		= arg0 & ~((HOST_WIDE_INT) (-1) << GET_MODE_BITSIZE (op_mode));
1083*404b540aSrobert 	      if (val
1084*404b540aSrobert 		  & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (op_mode) - 1)))
1085*404b540aSrobert 		val -= (HOST_WIDE_INT) 1 << GET_MODE_BITSIZE (op_mode);
1086*404b540aSrobert 	    }
1087*404b540aSrobert 	  else
1088*404b540aSrobert 	    return 0;
1089*404b540aSrobert 	  break;
1090*404b540aSrobert 
1091*404b540aSrobert 	case SQRT:
1092*404b540aSrobert 	case FLOAT_EXTEND:
1093*404b540aSrobert 	case FLOAT_TRUNCATE:
1094*404b540aSrobert 	case SS_TRUNCATE:
1095*404b540aSrobert 	case US_TRUNCATE:
1096*404b540aSrobert 	case SS_NEG:
1097*404b540aSrobert 	  return 0;
1098*404b540aSrobert 
1099*404b540aSrobert 	default:
1100*404b540aSrobert 	  gcc_unreachable ();
1101*404b540aSrobert 	}
1102*404b540aSrobert 
1103*404b540aSrobert       return gen_int_mode (val, mode);
1104*404b540aSrobert     }
1105*404b540aSrobert 
1106*404b540aSrobert   /* We can do some operations on integer CONST_DOUBLEs.  Also allow
1107*404b540aSrobert      for a DImode operation on a CONST_INT.  */
1108*404b540aSrobert   else if (GET_MODE (op) == VOIDmode
1109*404b540aSrobert 	   && width <= HOST_BITS_PER_WIDE_INT * 2
1110*404b540aSrobert 	   && (GET_CODE (op) == CONST_DOUBLE
1111*404b540aSrobert 	       || GET_CODE (op) == CONST_INT))
1112*404b540aSrobert     {
1113*404b540aSrobert       unsigned HOST_WIDE_INT l1, lv;
1114*404b540aSrobert       HOST_WIDE_INT h1, hv;
1115*404b540aSrobert 
1116*404b540aSrobert       if (GET_CODE (op) == CONST_DOUBLE)
1117*404b540aSrobert 	l1 = CONST_DOUBLE_LOW (op), h1 = CONST_DOUBLE_HIGH (op);
1118*404b540aSrobert       else
1119*404b540aSrobert 	l1 = INTVAL (op), h1 = HWI_SIGN_EXTEND (l1);
1120*404b540aSrobert 
1121*404b540aSrobert       switch (code)
1122*404b540aSrobert 	{
1123*404b540aSrobert 	case NOT:
1124*404b540aSrobert 	  lv = ~ l1;
1125*404b540aSrobert 	  hv = ~ h1;
1126*404b540aSrobert 	  break;
1127*404b540aSrobert 
1128*404b540aSrobert 	case NEG:
1129*404b540aSrobert 	  neg_double (l1, h1, &lv, &hv);
1130*404b540aSrobert 	  break;
1131*404b540aSrobert 
1132*404b540aSrobert 	case ABS:
1133*404b540aSrobert 	  if (h1 < 0)
1134*404b540aSrobert 	    neg_double (l1, h1, &lv, &hv);
1135*404b540aSrobert 	  else
1136*404b540aSrobert 	    lv = l1, hv = h1;
1137*404b540aSrobert 	  break;
1138*404b540aSrobert 
1139*404b540aSrobert 	case FFS:
1140*404b540aSrobert 	  hv = 0;
1141*404b540aSrobert 	  if (l1 == 0)
1142*404b540aSrobert 	    {
1143*404b540aSrobert 	      if (h1 == 0)
1144*404b540aSrobert 		lv = 0;
1145*404b540aSrobert 	      else
1146*404b540aSrobert 		lv = HOST_BITS_PER_WIDE_INT + exact_log2 (h1 & -h1) + 1;
1147*404b540aSrobert 	    }
1148*404b540aSrobert 	  else
1149*404b540aSrobert 	    lv = exact_log2 (l1 & -l1) + 1;
1150*404b540aSrobert 	  break;
1151*404b540aSrobert 
1152*404b540aSrobert 	case CLZ:
1153*404b540aSrobert 	  hv = 0;
1154*404b540aSrobert 	  if (h1 != 0)
1155*404b540aSrobert 	    lv = GET_MODE_BITSIZE (mode) - floor_log2 (h1) - 1
1156*404b540aSrobert 	      - HOST_BITS_PER_WIDE_INT;
1157*404b540aSrobert 	  else if (l1 != 0)
1158*404b540aSrobert 	    lv = GET_MODE_BITSIZE (mode) - floor_log2 (l1) - 1;
1159*404b540aSrobert 	  else if (! CLZ_DEFINED_VALUE_AT_ZERO (mode, lv))
1160*404b540aSrobert 	    lv = GET_MODE_BITSIZE (mode);
1161*404b540aSrobert 	  break;
1162*404b540aSrobert 
1163*404b540aSrobert 	case CTZ:
1164*404b540aSrobert 	  hv = 0;
1165*404b540aSrobert 	  if (l1 != 0)
1166*404b540aSrobert 	    lv = exact_log2 (l1 & -l1);
1167*404b540aSrobert 	  else if (h1 != 0)
1168*404b540aSrobert 	    lv = HOST_BITS_PER_WIDE_INT + exact_log2 (h1 & -h1);
1169*404b540aSrobert 	  else if (! CTZ_DEFINED_VALUE_AT_ZERO (mode, lv))
1170*404b540aSrobert 	    lv = GET_MODE_BITSIZE (mode);
1171*404b540aSrobert 	  break;
1172*404b540aSrobert 
1173*404b540aSrobert 	case POPCOUNT:
1174*404b540aSrobert 	  hv = 0;
1175*404b540aSrobert 	  lv = 0;
1176*404b540aSrobert 	  while (l1)
1177*404b540aSrobert 	    lv++, l1 &= l1 - 1;
1178*404b540aSrobert 	  while (h1)
1179*404b540aSrobert 	    lv++, h1 &= h1 - 1;
1180*404b540aSrobert 	  break;
1181*404b540aSrobert 
1182*404b540aSrobert 	case PARITY:
1183*404b540aSrobert 	  hv = 0;
1184*404b540aSrobert 	  lv = 0;
1185*404b540aSrobert 	  while (l1)
1186*404b540aSrobert 	    lv++, l1 &= l1 - 1;
1187*404b540aSrobert 	  while (h1)
1188*404b540aSrobert 	    lv++, h1 &= h1 - 1;
1189*404b540aSrobert 	  lv &= 1;
1190*404b540aSrobert 	  break;
1191*404b540aSrobert 
1192*404b540aSrobert 	case TRUNCATE:
1193*404b540aSrobert 	  /* This is just a change-of-mode, so do nothing.  */
1194*404b540aSrobert 	  lv = l1, hv = h1;
1195*404b540aSrobert 	  break;
1196*404b540aSrobert 
1197*404b540aSrobert 	case ZERO_EXTEND:
1198*404b540aSrobert 	  gcc_assert (op_mode != VOIDmode);
1199*404b540aSrobert 
1200*404b540aSrobert 	  if (GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT)
1201*404b540aSrobert 	    return 0;
1202*404b540aSrobert 
1203*404b540aSrobert 	  hv = 0;
1204*404b540aSrobert 	  lv = l1 & GET_MODE_MASK (op_mode);
1205*404b540aSrobert 	  break;
1206*404b540aSrobert 
1207*404b540aSrobert 	case SIGN_EXTEND:
1208*404b540aSrobert 	  if (op_mode == VOIDmode
1209*404b540aSrobert 	      || GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT)
1210*404b540aSrobert 	    return 0;
1211*404b540aSrobert 	  else
1212*404b540aSrobert 	    {
1213*404b540aSrobert 	      lv = l1 & GET_MODE_MASK (op_mode);
1214*404b540aSrobert 	      if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT
1215*404b540aSrobert 		  && (lv & ((HOST_WIDE_INT) 1
1216*404b540aSrobert 			    << (GET_MODE_BITSIZE (op_mode) - 1))) != 0)
1217*404b540aSrobert 		lv -= (HOST_WIDE_INT) 1 << GET_MODE_BITSIZE (op_mode);
1218*404b540aSrobert 
1219*404b540aSrobert 	      hv = HWI_SIGN_EXTEND (lv);
1220*404b540aSrobert 	    }
1221*404b540aSrobert 	  break;
1222*404b540aSrobert 
1223*404b540aSrobert 	case SQRT:
1224*404b540aSrobert 	  return 0;
1225*404b540aSrobert 
1226*404b540aSrobert 	default:
1227*404b540aSrobert 	  return 0;
1228*404b540aSrobert 	}
1229*404b540aSrobert 
1230*404b540aSrobert       return immed_double_const (lv, hv, mode);
1231*404b540aSrobert     }
1232*404b540aSrobert 
1233*404b540aSrobert   else if (GET_CODE (op) == CONST_DOUBLE
1234*404b540aSrobert 	   && SCALAR_FLOAT_MODE_P (mode))
1235*404b540aSrobert     {
1236*404b540aSrobert       REAL_VALUE_TYPE d, t;
1237*404b540aSrobert       REAL_VALUE_FROM_CONST_DOUBLE (d, op);
1238*404b540aSrobert 
1239*404b540aSrobert       switch (code)
1240*404b540aSrobert 	{
1241*404b540aSrobert 	case SQRT:
1242*404b540aSrobert 	  if (HONOR_SNANS (mode) && real_isnan (&d))
1243*404b540aSrobert 	    return 0;
1244*404b540aSrobert 	  real_sqrt (&t, mode, &d);
1245*404b540aSrobert 	  d = t;
1246*404b540aSrobert 	  break;
1247*404b540aSrobert 	case ABS:
1248*404b540aSrobert 	  d = REAL_VALUE_ABS (d);
1249*404b540aSrobert 	  break;
1250*404b540aSrobert 	case NEG:
1251*404b540aSrobert 	  d = REAL_VALUE_NEGATE (d);
1252*404b540aSrobert 	  break;
1253*404b540aSrobert 	case FLOAT_TRUNCATE:
1254*404b540aSrobert 	  d = real_value_truncate (mode, d);
1255*404b540aSrobert 	  break;
1256*404b540aSrobert 	case FLOAT_EXTEND:
1257*404b540aSrobert 	  /* All this does is change the mode.  */
1258*404b540aSrobert 	  break;
1259*404b540aSrobert 	case FIX:
1260*404b540aSrobert 	  real_arithmetic (&d, FIX_TRUNC_EXPR, &d, NULL);
1261*404b540aSrobert 	  break;
1262*404b540aSrobert 	case NOT:
1263*404b540aSrobert 	  {
1264*404b540aSrobert 	    long tmp[4];
1265*404b540aSrobert 	    int i;
1266*404b540aSrobert 
1267*404b540aSrobert 	    real_to_target (tmp, &d, GET_MODE (op));
1268*404b540aSrobert 	    for (i = 0; i < 4; i++)
1269*404b540aSrobert 	      tmp[i] = ~tmp[i];
1270*404b540aSrobert 	    real_from_target (&d, tmp, mode);
1271*404b540aSrobert 	    break;
1272*404b540aSrobert 	  }
1273*404b540aSrobert 	default:
1274*404b540aSrobert 	  gcc_unreachable ();
1275*404b540aSrobert 	}
1276*404b540aSrobert       return CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
1277*404b540aSrobert     }
1278*404b540aSrobert 
1279*404b540aSrobert   else if (GET_CODE (op) == CONST_DOUBLE
1280*404b540aSrobert 	   && SCALAR_FLOAT_MODE_P (GET_MODE (op))
1281*404b540aSrobert 	   && GET_MODE_CLASS (mode) == MODE_INT
1282*404b540aSrobert 	   && width <= 2*HOST_BITS_PER_WIDE_INT && width > 0)
1283*404b540aSrobert     {
1284*404b540aSrobert       /* Although the overflow semantics of RTL's FIX and UNSIGNED_FIX
1285*404b540aSrobert 	 operators are intentionally left unspecified (to ease implementation
1286*404b540aSrobert 	 by target backends), for consistency, this routine implements the
1287*404b540aSrobert 	 same semantics for constant folding as used by the middle-end.  */
1288*404b540aSrobert 
1289*404b540aSrobert       /* This was formerly used only for non-IEEE float.
1290*404b540aSrobert 	 eggert@twinsun.com says it is safe for IEEE also.  */
1291*404b540aSrobert       HOST_WIDE_INT xh, xl, th, tl;
1292*404b540aSrobert       REAL_VALUE_TYPE x, t;
1293*404b540aSrobert       REAL_VALUE_FROM_CONST_DOUBLE (x, op);
1294*404b540aSrobert       switch (code)
1295*404b540aSrobert 	{
1296*404b540aSrobert 	case FIX:
1297*404b540aSrobert 	  if (REAL_VALUE_ISNAN (x))
1298*404b540aSrobert 	    return const0_rtx;
1299*404b540aSrobert 
1300*404b540aSrobert 	  /* Test against the signed upper bound.  */
1301*404b540aSrobert 	  if (width > HOST_BITS_PER_WIDE_INT)
1302*404b540aSrobert 	    {
1303*404b540aSrobert 	      th = ((unsigned HOST_WIDE_INT) 1
1304*404b540aSrobert 		    << (width - HOST_BITS_PER_WIDE_INT - 1)) - 1;
1305*404b540aSrobert 	      tl = -1;
1306*404b540aSrobert 	    }
1307*404b540aSrobert 	  else
1308*404b540aSrobert 	    {
1309*404b540aSrobert 	      th = 0;
1310*404b540aSrobert 	      tl = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
1311*404b540aSrobert 	    }
1312*404b540aSrobert 	  real_from_integer (&t, VOIDmode, tl, th, 0);
1313*404b540aSrobert 	  if (REAL_VALUES_LESS (t, x))
1314*404b540aSrobert 	    {
1315*404b540aSrobert 	      xh = th;
1316*404b540aSrobert 	      xl = tl;
1317*404b540aSrobert 	      break;
1318*404b540aSrobert 	    }
1319*404b540aSrobert 
1320*404b540aSrobert 	  /* Test against the signed lower bound.  */
1321*404b540aSrobert 	  if (width > HOST_BITS_PER_WIDE_INT)
1322*404b540aSrobert 	    {
1323*404b540aSrobert 	      th = (HOST_WIDE_INT) -1 << (width - HOST_BITS_PER_WIDE_INT - 1);
1324*404b540aSrobert 	      tl = 0;
1325*404b540aSrobert 	    }
1326*404b540aSrobert 	  else
1327*404b540aSrobert 	    {
1328*404b540aSrobert 	      th = -1;
1329*404b540aSrobert 	      tl = (HOST_WIDE_INT) -1 << (width - 1);
1330*404b540aSrobert 	    }
1331*404b540aSrobert 	  real_from_integer (&t, VOIDmode, tl, th, 0);
1332*404b540aSrobert 	  if (REAL_VALUES_LESS (x, t))
1333*404b540aSrobert 	    {
1334*404b540aSrobert 	      xh = th;
1335*404b540aSrobert 	      xl = tl;
1336*404b540aSrobert 	      break;
1337*404b540aSrobert 	    }
1338*404b540aSrobert 	  REAL_VALUE_TO_INT (&xl, &xh, x);
1339*404b540aSrobert 	  break;
1340*404b540aSrobert 
1341*404b540aSrobert 	case UNSIGNED_FIX:
1342*404b540aSrobert 	  if (REAL_VALUE_ISNAN (x) || REAL_VALUE_NEGATIVE (x))
1343*404b540aSrobert 	    return const0_rtx;
1344*404b540aSrobert 
1345*404b540aSrobert 	  /* Test against the unsigned upper bound.  */
1346*404b540aSrobert 	  if (width == 2*HOST_BITS_PER_WIDE_INT)
1347*404b540aSrobert 	    {
1348*404b540aSrobert 	      th = -1;
1349*404b540aSrobert 	      tl = -1;
1350*404b540aSrobert 	    }
1351*404b540aSrobert 	  else if (width >= HOST_BITS_PER_WIDE_INT)
1352*404b540aSrobert 	    {
1353*404b540aSrobert 	      th = ((unsigned HOST_WIDE_INT) 1
1354*404b540aSrobert 		    << (width - HOST_BITS_PER_WIDE_INT)) - 1;
1355*404b540aSrobert 	      tl = -1;
1356*404b540aSrobert 	    }
1357*404b540aSrobert 	  else
1358*404b540aSrobert 	    {
1359*404b540aSrobert 	      th = 0;
1360*404b540aSrobert 	      tl = ((unsigned HOST_WIDE_INT) 1 << width) - 1;
1361*404b540aSrobert 	    }
1362*404b540aSrobert 	  real_from_integer (&t, VOIDmode, tl, th, 1);
1363*404b540aSrobert 	  if (REAL_VALUES_LESS (t, x))
1364*404b540aSrobert 	    {
1365*404b540aSrobert 	      xh = th;
1366*404b540aSrobert 	      xl = tl;
1367*404b540aSrobert 	      break;
1368*404b540aSrobert 	    }
1369*404b540aSrobert 
1370*404b540aSrobert 	  REAL_VALUE_TO_INT (&xl, &xh, x);
1371*404b540aSrobert 	  break;
1372*404b540aSrobert 
1373*404b540aSrobert 	default:
1374*404b540aSrobert 	  gcc_unreachable ();
1375*404b540aSrobert 	}
1376*404b540aSrobert       return immed_double_const (xl, xh, mode);
1377*404b540aSrobert     }
1378*404b540aSrobert 
1379*404b540aSrobert   return NULL_RTX;
1380*404b540aSrobert }
1381*404b540aSrobert 
1382*404b540aSrobert /* Subroutine of simplify_binary_operation to simplify a commutative,
1383*404b540aSrobert    associative binary operation CODE with result mode MODE, operating
1384*404b540aSrobert    on OP0 and OP1.  CODE is currently one of PLUS, MULT, AND, IOR, XOR,
1385*404b540aSrobert    SMIN, SMAX, UMIN or UMAX.  Return zero if no simplification or
1386*404b540aSrobert    canonicalization is possible.  */
1387*404b540aSrobert 
1388*404b540aSrobert static rtx
simplify_associative_operation(enum rtx_code code,enum machine_mode mode,rtx op0,rtx op1)1389*404b540aSrobert simplify_associative_operation (enum rtx_code code, enum machine_mode mode,
1390*404b540aSrobert 				rtx op0, rtx op1)
1391*404b540aSrobert {
1392*404b540aSrobert   rtx tem;
1393*404b540aSrobert 
1394*404b540aSrobert   /* Linearize the operator to the left.  */
1395*404b540aSrobert   if (GET_CODE (op1) == code)
1396*404b540aSrobert     {
1397*404b540aSrobert       /* "(a op b) op (c op d)" becomes "((a op b) op c) op d)".  */
1398*404b540aSrobert       if (GET_CODE (op0) == code)
1399*404b540aSrobert 	{
1400*404b540aSrobert 	  tem = simplify_gen_binary (code, mode, op0, XEXP (op1, 0));
1401*404b540aSrobert 	  return simplify_gen_binary (code, mode, tem, XEXP (op1, 1));
1402*404b540aSrobert 	}
1403*404b540aSrobert 
1404*404b540aSrobert       /* "a op (b op c)" becomes "(b op c) op a".  */
1405*404b540aSrobert       if (! swap_commutative_operands_p (op1, op0))
1406*404b540aSrobert 	return simplify_gen_binary (code, mode, op1, op0);
1407*404b540aSrobert 
1408*404b540aSrobert       tem = op0;
1409*404b540aSrobert       op0 = op1;
1410*404b540aSrobert       op1 = tem;
1411*404b540aSrobert     }
1412*404b540aSrobert 
1413*404b540aSrobert   if (GET_CODE (op0) == code)
1414*404b540aSrobert     {
1415*404b540aSrobert       /* Canonicalize "(x op c) op y" as "(x op y) op c".  */
1416*404b540aSrobert       if (swap_commutative_operands_p (XEXP (op0, 1), op1))
1417*404b540aSrobert 	{
1418*404b540aSrobert 	  tem = simplify_gen_binary (code, mode, XEXP (op0, 0), op1);
1419*404b540aSrobert 	  return simplify_gen_binary (code, mode, tem, XEXP (op0, 1));
1420*404b540aSrobert 	}
1421*404b540aSrobert 
1422*404b540aSrobert       /* Attempt to simplify "(a op b) op c" as "a op (b op c)".  */
1423*404b540aSrobert       tem = swap_commutative_operands_p (XEXP (op0, 1), op1)
1424*404b540aSrobert 	    ? simplify_binary_operation (code, mode, op1, XEXP (op0, 1))
1425*404b540aSrobert 	    : simplify_binary_operation (code, mode, XEXP (op0, 1), op1);
1426*404b540aSrobert       if (tem != 0)
1427*404b540aSrobert         return simplify_gen_binary (code, mode, XEXP (op0, 0), tem);
1428*404b540aSrobert 
1429*404b540aSrobert       /* Attempt to simplify "(a op b) op c" as "(a op c) op b".  */
1430*404b540aSrobert       tem = swap_commutative_operands_p (XEXP (op0, 0), op1)
1431*404b540aSrobert 	    ? simplify_binary_operation (code, mode, op1, XEXP (op0, 0))
1432*404b540aSrobert 	    : simplify_binary_operation (code, mode, XEXP (op0, 0), op1);
1433*404b540aSrobert       if (tem != 0)
1434*404b540aSrobert         return simplify_gen_binary (code, mode, tem, XEXP (op0, 1));
1435*404b540aSrobert     }
1436*404b540aSrobert 
1437*404b540aSrobert   return 0;
1438*404b540aSrobert }
1439*404b540aSrobert 
1440*404b540aSrobert 
1441*404b540aSrobert /* Simplify a binary operation CODE with result mode MODE, operating on OP0
1442*404b540aSrobert    and OP1.  Return 0 if no simplification is possible.
1443*404b540aSrobert 
1444*404b540aSrobert    Don't use this for relational operations such as EQ or LT.
1445*404b540aSrobert    Use simplify_relational_operation instead.  */
1446*404b540aSrobert rtx
simplify_binary_operation(enum rtx_code code,enum machine_mode mode,rtx op0,rtx op1)1447*404b540aSrobert simplify_binary_operation (enum rtx_code code, enum machine_mode mode,
1448*404b540aSrobert 			   rtx op0, rtx op1)
1449*404b540aSrobert {
1450*404b540aSrobert   rtx trueop0, trueop1;
1451*404b540aSrobert   rtx tem;
1452*404b540aSrobert 
1453*404b540aSrobert   /* Relational operations don't work here.  We must know the mode
1454*404b540aSrobert      of the operands in order to do the comparison correctly.
1455*404b540aSrobert      Assuming a full word can give incorrect results.
1456*404b540aSrobert      Consider comparing 128 with -128 in QImode.  */
1457*404b540aSrobert   gcc_assert (GET_RTX_CLASS (code) != RTX_COMPARE);
1458*404b540aSrobert   gcc_assert (GET_RTX_CLASS (code) != RTX_COMM_COMPARE);
1459*404b540aSrobert 
1460*404b540aSrobert   /* Make sure the constant is second.  */
1461*404b540aSrobert   if (GET_RTX_CLASS (code) == RTX_COMM_ARITH
1462*404b540aSrobert       && swap_commutative_operands_p (op0, op1))
1463*404b540aSrobert     {
1464*404b540aSrobert       tem = op0, op0 = op1, op1 = tem;
1465*404b540aSrobert     }
1466*404b540aSrobert 
1467*404b540aSrobert   trueop0 = avoid_constant_pool_reference (op0);
1468*404b540aSrobert   trueop1 = avoid_constant_pool_reference (op1);
1469*404b540aSrobert 
1470*404b540aSrobert   tem = simplify_const_binary_operation (code, mode, trueop0, trueop1);
1471*404b540aSrobert   if (tem)
1472*404b540aSrobert     return tem;
1473*404b540aSrobert   return simplify_binary_operation_1 (code, mode, op0, op1, trueop0, trueop1);
1474*404b540aSrobert }
1475*404b540aSrobert 
1476*404b540aSrobert /* Subroutine of simplify_binary_operation.  Simplify a binary operation
1477*404b540aSrobert    CODE with result mode MODE, operating on OP0 and OP1.  If OP0 and/or
1478*404b540aSrobert    OP1 are constant pool references, TRUEOP0 and TRUEOP1 represent the
1479*404b540aSrobert    actual constants.  */
1480*404b540aSrobert 
1481*404b540aSrobert static rtx
simplify_binary_operation_1(enum rtx_code code,enum machine_mode mode,rtx op0,rtx op1,rtx trueop0,rtx trueop1)1482*404b540aSrobert simplify_binary_operation_1 (enum rtx_code code, enum machine_mode mode,
1483*404b540aSrobert 			     rtx op0, rtx op1, rtx trueop0, rtx trueop1)
1484*404b540aSrobert {
1485*404b540aSrobert   rtx tem, reversed, opleft, opright;
1486*404b540aSrobert   HOST_WIDE_INT val;
1487*404b540aSrobert   unsigned int width = GET_MODE_BITSIZE (mode);
1488*404b540aSrobert 
1489*404b540aSrobert   /* Even if we can't compute a constant result,
1490*404b540aSrobert      there are some cases worth simplifying.  */
1491*404b540aSrobert 
1492*404b540aSrobert   switch (code)
1493*404b540aSrobert     {
1494*404b540aSrobert     case PLUS:
1495*404b540aSrobert       /* Maybe simplify x + 0 to x.  The two expressions are equivalent
1496*404b540aSrobert 	 when x is NaN, infinite, or finite and nonzero.  They aren't
1497*404b540aSrobert 	 when x is -0 and the rounding mode is not towards -infinity,
1498*404b540aSrobert 	 since (-0) + 0 is then 0.  */
1499*404b540aSrobert       if (!HONOR_SIGNED_ZEROS (mode) && trueop1 == CONST0_RTX (mode))
1500*404b540aSrobert 	return op0;
1501*404b540aSrobert 
1502*404b540aSrobert       /* ((-a) + b) -> (b - a) and similarly for (a + (-b)).  These
1503*404b540aSrobert 	 transformations are safe even for IEEE.  */
1504*404b540aSrobert       if (GET_CODE (op0) == NEG)
1505*404b540aSrobert 	return simplify_gen_binary (MINUS, mode, op1, XEXP (op0, 0));
1506*404b540aSrobert       else if (GET_CODE (op1) == NEG)
1507*404b540aSrobert 	return simplify_gen_binary (MINUS, mode, op0, XEXP (op1, 0));
1508*404b540aSrobert 
1509*404b540aSrobert       /* (~a) + 1 -> -a */
1510*404b540aSrobert       if (INTEGRAL_MODE_P (mode)
1511*404b540aSrobert 	  && GET_CODE (op0) == NOT
1512*404b540aSrobert 	  && trueop1 == const1_rtx)
1513*404b540aSrobert 	return simplify_gen_unary (NEG, mode, XEXP (op0, 0), mode);
1514*404b540aSrobert 
1515*404b540aSrobert       /* Handle both-operands-constant cases.  We can only add
1516*404b540aSrobert 	 CONST_INTs to constants since the sum of relocatable symbols
1517*404b540aSrobert 	 can't be handled by most assemblers.  Don't add CONST_INT
1518*404b540aSrobert 	 to CONST_INT since overflow won't be computed properly if wider
1519*404b540aSrobert 	 than HOST_BITS_PER_WIDE_INT.  */
1520*404b540aSrobert 
1521*404b540aSrobert       if (CONSTANT_P (op0) && GET_MODE (op0) != VOIDmode
1522*404b540aSrobert 	  && GET_CODE (op1) == CONST_INT)
1523*404b540aSrobert 	return plus_constant (op0, INTVAL (op1));
1524*404b540aSrobert       else if (CONSTANT_P (op1) && GET_MODE (op1) != VOIDmode
1525*404b540aSrobert 	       && GET_CODE (op0) == CONST_INT)
1526*404b540aSrobert 	return plus_constant (op1, INTVAL (op0));
1527*404b540aSrobert 
1528*404b540aSrobert       /* See if this is something like X * C - X or vice versa or
1529*404b540aSrobert 	 if the multiplication is written as a shift.  If so, we can
1530*404b540aSrobert 	 distribute and make a new multiply, shift, or maybe just
1531*404b540aSrobert 	 have X (if C is 2 in the example above).  But don't make
1532*404b540aSrobert 	 something more expensive than we had before.  */
1533*404b540aSrobert 
1534*404b540aSrobert       if (SCALAR_INT_MODE_P (mode))
1535*404b540aSrobert 	{
1536*404b540aSrobert 	  HOST_WIDE_INT coeff0h = 0, coeff1h = 0;
1537*404b540aSrobert 	  unsigned HOST_WIDE_INT coeff0l = 1, coeff1l = 1;
1538*404b540aSrobert 	  rtx lhs = op0, rhs = op1;
1539*404b540aSrobert 
1540*404b540aSrobert 	  if (GET_CODE (lhs) == NEG)
1541*404b540aSrobert 	    {
1542*404b540aSrobert 	      coeff0l = -1;
1543*404b540aSrobert 	      coeff0h = -1;
1544*404b540aSrobert 	      lhs = XEXP (lhs, 0);
1545*404b540aSrobert 	    }
1546*404b540aSrobert 	  else if (GET_CODE (lhs) == MULT
1547*404b540aSrobert 		   && GET_CODE (XEXP (lhs, 1)) == CONST_INT)
1548*404b540aSrobert 	    {
1549*404b540aSrobert 	      coeff0l = INTVAL (XEXP (lhs, 1));
1550*404b540aSrobert 	      coeff0h = INTVAL (XEXP (lhs, 1)) < 0 ? -1 : 0;
1551*404b540aSrobert 	      lhs = XEXP (lhs, 0);
1552*404b540aSrobert 	    }
1553*404b540aSrobert 	  else if (GET_CODE (lhs) == ASHIFT
1554*404b540aSrobert 		   && GET_CODE (XEXP (lhs, 1)) == CONST_INT
1555*404b540aSrobert 		   && INTVAL (XEXP (lhs, 1)) >= 0
1556*404b540aSrobert 		   && INTVAL (XEXP (lhs, 1)) < HOST_BITS_PER_WIDE_INT)
1557*404b540aSrobert 	    {
1558*404b540aSrobert 	      coeff0l = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (lhs, 1));
1559*404b540aSrobert 	      coeff0h = 0;
1560*404b540aSrobert 	      lhs = XEXP (lhs, 0);
1561*404b540aSrobert 	    }
1562*404b540aSrobert 
1563*404b540aSrobert 	  if (GET_CODE (rhs) == NEG)
1564*404b540aSrobert 	    {
1565*404b540aSrobert 	      coeff1l = -1;
1566*404b540aSrobert 	      coeff1h = -1;
1567*404b540aSrobert 	      rhs = XEXP (rhs, 0);
1568*404b540aSrobert 	    }
1569*404b540aSrobert 	  else if (GET_CODE (rhs) == MULT
1570*404b540aSrobert 		   && GET_CODE (XEXP (rhs, 1)) == CONST_INT)
1571*404b540aSrobert 	    {
1572*404b540aSrobert 	      coeff1l = INTVAL (XEXP (rhs, 1));
1573*404b540aSrobert 	      coeff1h = INTVAL (XEXP (rhs, 1)) < 0 ? -1 : 0;
1574*404b540aSrobert 	      rhs = XEXP (rhs, 0);
1575*404b540aSrobert 	    }
1576*404b540aSrobert 	  else if (GET_CODE (rhs) == ASHIFT
1577*404b540aSrobert 		   && GET_CODE (XEXP (rhs, 1)) == CONST_INT
1578*404b540aSrobert 		   && INTVAL (XEXP (rhs, 1)) >= 0
1579*404b540aSrobert 		   && INTVAL (XEXP (rhs, 1)) < HOST_BITS_PER_WIDE_INT)
1580*404b540aSrobert 	    {
1581*404b540aSrobert 	      coeff1l = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (rhs, 1));
1582*404b540aSrobert 	      coeff1h = 0;
1583*404b540aSrobert 	      rhs = XEXP (rhs, 0);
1584*404b540aSrobert 	    }
1585*404b540aSrobert 
1586*404b540aSrobert 	  if (rtx_equal_p (lhs, rhs))
1587*404b540aSrobert 	    {
1588*404b540aSrobert 	      rtx orig = gen_rtx_PLUS (mode, op0, op1);
1589*404b540aSrobert 	      rtx coeff;
1590*404b540aSrobert 	      unsigned HOST_WIDE_INT l;
1591*404b540aSrobert 	      HOST_WIDE_INT h;
1592*404b540aSrobert 
1593*404b540aSrobert 	      add_double (coeff0l, coeff0h, coeff1l, coeff1h, &l, &h);
1594*404b540aSrobert 	      coeff = immed_double_const (l, h, mode);
1595*404b540aSrobert 
1596*404b540aSrobert 	      tem = simplify_gen_binary (MULT, mode, lhs, coeff);
1597*404b540aSrobert 	      return rtx_cost (tem, SET) <= rtx_cost (orig, SET)
1598*404b540aSrobert 		? tem : 0;
1599*404b540aSrobert 	    }
1600*404b540aSrobert 	}
1601*404b540aSrobert 
1602*404b540aSrobert       /* (plus (xor X C1) C2) is (xor X (C1^C2)) if C2 is signbit.  */
1603*404b540aSrobert       if ((GET_CODE (op1) == CONST_INT
1604*404b540aSrobert 	   || GET_CODE (op1) == CONST_DOUBLE)
1605*404b540aSrobert 	  && GET_CODE (op0) == XOR
1606*404b540aSrobert 	  && (GET_CODE (XEXP (op0, 1)) == CONST_INT
1607*404b540aSrobert 	      || GET_CODE (XEXP (op0, 1)) == CONST_DOUBLE)
1608*404b540aSrobert 	  && mode_signbit_p (mode, op1))
1609*404b540aSrobert 	return simplify_gen_binary (XOR, mode, XEXP (op0, 0),
1610*404b540aSrobert 				    simplify_gen_binary (XOR, mode, op1,
1611*404b540aSrobert 							 XEXP (op0, 1)));
1612*404b540aSrobert 
1613*404b540aSrobert       /* Canonicalize (plus (mult (neg B) C) A) to (minus A (mult B C)).  */
1614*404b540aSrobert       if (GET_CODE (op0) == MULT
1615*404b540aSrobert 	  && GET_CODE (XEXP (op0, 0)) == NEG)
1616*404b540aSrobert 	{
1617*404b540aSrobert 	  rtx in1, in2;
1618*404b540aSrobert 
1619*404b540aSrobert 	  in1 = XEXP (XEXP (op0, 0), 0);
1620*404b540aSrobert 	  in2 = XEXP (op0, 1);
1621*404b540aSrobert 	  return simplify_gen_binary (MINUS, mode, op1,
1622*404b540aSrobert 				      simplify_gen_binary (MULT, mode,
1623*404b540aSrobert 							   in1, in2));
1624*404b540aSrobert 	}
1625*404b540aSrobert 
1626*404b540aSrobert       /* (plus (comparison A B) C) can become (neg (rev-comp A B)) if
1627*404b540aSrobert 	 C is 1 and STORE_FLAG_VALUE is -1 or if C is -1 and STORE_FLAG_VALUE
1628*404b540aSrobert 	 is 1.  */
1629*404b540aSrobert       if (COMPARISON_P (op0)
1630*404b540aSrobert 	  && ((STORE_FLAG_VALUE == -1 && trueop1 == const1_rtx)
1631*404b540aSrobert 	      || (STORE_FLAG_VALUE == 1 && trueop1 == constm1_rtx))
1632*404b540aSrobert 	  && (reversed = reversed_comparison (op0, mode)))
1633*404b540aSrobert 	return
1634*404b540aSrobert 	  simplify_gen_unary (NEG, mode, reversed, mode);
1635*404b540aSrobert 
1636*404b540aSrobert       /* If one of the operands is a PLUS or a MINUS, see if we can
1637*404b540aSrobert 	 simplify this by the associative law.
1638*404b540aSrobert 	 Don't use the associative law for floating point.
1639*404b540aSrobert 	 The inaccuracy makes it nonassociative,
1640*404b540aSrobert 	 and subtle programs can break if operations are associated.  */
1641*404b540aSrobert 
1642*404b540aSrobert       if (INTEGRAL_MODE_P (mode)
1643*404b540aSrobert 	  && (plus_minus_operand_p (op0)
1644*404b540aSrobert 	      || plus_minus_operand_p (op1))
1645*404b540aSrobert 	  && (tem = simplify_plus_minus (code, mode, op0, op1)) != 0)
1646*404b540aSrobert 	return tem;
1647*404b540aSrobert 
1648*404b540aSrobert       /* Reassociate floating point addition only when the user
1649*404b540aSrobert 	 specifies unsafe math optimizations.  */
1650*404b540aSrobert       if (FLOAT_MODE_P (mode)
1651*404b540aSrobert 	  && flag_unsafe_math_optimizations)
1652*404b540aSrobert 	{
1653*404b540aSrobert 	  tem = simplify_associative_operation (code, mode, op0, op1);
1654*404b540aSrobert 	  if (tem)
1655*404b540aSrobert 	    return tem;
1656*404b540aSrobert 	}
1657*404b540aSrobert       break;
1658*404b540aSrobert 
1659*404b540aSrobert     case COMPARE:
1660*404b540aSrobert #ifdef HAVE_cc0
1661*404b540aSrobert       /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
1662*404b540aSrobert 	 using cc0, in which case we want to leave it as a COMPARE
1663*404b540aSrobert 	 so we can distinguish it from a register-register-copy.
1664*404b540aSrobert 
1665*404b540aSrobert 	 In IEEE floating point, x-0 is not the same as x.  */
1666*404b540aSrobert 
1667*404b540aSrobert       if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
1668*404b540aSrobert 	   || ! FLOAT_MODE_P (mode) || flag_unsafe_math_optimizations)
1669*404b540aSrobert 	  && trueop1 == CONST0_RTX (mode))
1670*404b540aSrobert 	return op0;
1671*404b540aSrobert #endif
1672*404b540aSrobert 
1673*404b540aSrobert       /* Convert (compare (gt (flags) 0) (lt (flags) 0)) to (flags).  */
1674*404b540aSrobert       if (((GET_CODE (op0) == GT && GET_CODE (op1) == LT)
1675*404b540aSrobert 	   || (GET_CODE (op0) == GTU && GET_CODE (op1) == LTU))
1676*404b540aSrobert 	  && XEXP (op0, 1) == const0_rtx && XEXP (op1, 1) == const0_rtx)
1677*404b540aSrobert 	{
1678*404b540aSrobert 	  rtx xop00 = XEXP (op0, 0);
1679*404b540aSrobert 	  rtx xop10 = XEXP (op1, 0);
1680*404b540aSrobert 
1681*404b540aSrobert #ifdef HAVE_cc0
1682*404b540aSrobert 	  if (GET_CODE (xop00) == CC0 && GET_CODE (xop10) == CC0)
1683*404b540aSrobert #else
1684*404b540aSrobert 	    if (REG_P (xop00) && REG_P (xop10)
1685*404b540aSrobert 		&& GET_MODE (xop00) == GET_MODE (xop10)
1686*404b540aSrobert 		&& REGNO (xop00) == REGNO (xop10)
1687*404b540aSrobert 		&& GET_MODE_CLASS (GET_MODE (xop00)) == MODE_CC
1688*404b540aSrobert 		&& GET_MODE_CLASS (GET_MODE (xop10)) == MODE_CC)
1689*404b540aSrobert #endif
1690*404b540aSrobert 	      return xop00;
1691*404b540aSrobert 	}
1692*404b540aSrobert       break;
1693*404b540aSrobert 
1694*404b540aSrobert     case MINUS:
1695*404b540aSrobert       /* We can't assume x-x is 0 even with non-IEEE floating point,
1696*404b540aSrobert 	 but since it is zero except in very strange circumstances, we
1697*404b540aSrobert 	 will treat it as zero with -funsafe-math-optimizations.  */
1698*404b540aSrobert       if (rtx_equal_p (trueop0, trueop1)
1699*404b540aSrobert 	  && ! side_effects_p (op0)
1700*404b540aSrobert 	  && (! FLOAT_MODE_P (mode) || flag_unsafe_math_optimizations))
1701*404b540aSrobert 	return CONST0_RTX (mode);
1702*404b540aSrobert 
1703*404b540aSrobert       /* Change subtraction from zero into negation.  (0 - x) is the
1704*404b540aSrobert 	 same as -x when x is NaN, infinite, or finite and nonzero.
1705*404b540aSrobert 	 But if the mode has signed zeros, and does not round towards
1706*404b540aSrobert 	 -infinity, then 0 - 0 is 0, not -0.  */
1707*404b540aSrobert       if (!HONOR_SIGNED_ZEROS (mode) && trueop0 == CONST0_RTX (mode))
1708*404b540aSrobert 	return simplify_gen_unary (NEG, mode, op1, mode);
1709*404b540aSrobert 
1710*404b540aSrobert       /* (-1 - a) is ~a.  */
1711*404b540aSrobert       if (trueop0 == constm1_rtx)
1712*404b540aSrobert 	return simplify_gen_unary (NOT, mode, op1, mode);
1713*404b540aSrobert 
1714*404b540aSrobert       /* Subtracting 0 has no effect unless the mode has signed zeros
1715*404b540aSrobert 	 and supports rounding towards -infinity.  In such a case,
1716*404b540aSrobert 	 0 - 0 is -0.  */
1717*404b540aSrobert       if (!(HONOR_SIGNED_ZEROS (mode)
1718*404b540aSrobert 	    && HONOR_SIGN_DEPENDENT_ROUNDING (mode))
1719*404b540aSrobert 	  && trueop1 == CONST0_RTX (mode))
1720*404b540aSrobert 	return op0;
1721*404b540aSrobert 
1722*404b540aSrobert       /* See if this is something like X * C - X or vice versa or
1723*404b540aSrobert 	 if the multiplication is written as a shift.  If so, we can
1724*404b540aSrobert 	 distribute and make a new multiply, shift, or maybe just
1725*404b540aSrobert 	 have X (if C is 2 in the example above).  But don't make
1726*404b540aSrobert 	 something more expensive than we had before.  */
1727*404b540aSrobert 
1728*404b540aSrobert       if (SCALAR_INT_MODE_P (mode))
1729*404b540aSrobert 	{
1730*404b540aSrobert 	  HOST_WIDE_INT coeff0h = 0, negcoeff1h = -1;
1731*404b540aSrobert 	  unsigned HOST_WIDE_INT coeff0l = 1, negcoeff1l = -1;
1732*404b540aSrobert 	  rtx lhs = op0, rhs = op1;
1733*404b540aSrobert 
1734*404b540aSrobert 	  if (GET_CODE (lhs) == NEG)
1735*404b540aSrobert 	    {
1736*404b540aSrobert 	      coeff0l = -1;
1737*404b540aSrobert 	      coeff0h = -1;
1738*404b540aSrobert 	      lhs = XEXP (lhs, 0);
1739*404b540aSrobert 	    }
1740*404b540aSrobert 	  else if (GET_CODE (lhs) == MULT
1741*404b540aSrobert 		   && GET_CODE (XEXP (lhs, 1)) == CONST_INT)
1742*404b540aSrobert 	    {
1743*404b540aSrobert 	      coeff0l = INTVAL (XEXP (lhs, 1));
1744*404b540aSrobert 	      coeff0h = INTVAL (XEXP (lhs, 1)) < 0 ? -1 : 0;
1745*404b540aSrobert 	      lhs = XEXP (lhs, 0);
1746*404b540aSrobert 	    }
1747*404b540aSrobert 	  else if (GET_CODE (lhs) == ASHIFT
1748*404b540aSrobert 		   && GET_CODE (XEXP (lhs, 1)) == CONST_INT
1749*404b540aSrobert 		   && INTVAL (XEXP (lhs, 1)) >= 0
1750*404b540aSrobert 		   && INTVAL (XEXP (lhs, 1)) < HOST_BITS_PER_WIDE_INT)
1751*404b540aSrobert 	    {
1752*404b540aSrobert 	      coeff0l = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (lhs, 1));
1753*404b540aSrobert 	      coeff0h = 0;
1754*404b540aSrobert 	      lhs = XEXP (lhs, 0);
1755*404b540aSrobert 	    }
1756*404b540aSrobert 
1757*404b540aSrobert 	  if (GET_CODE (rhs) == NEG)
1758*404b540aSrobert 	    {
1759*404b540aSrobert 	      negcoeff1l = 1;
1760*404b540aSrobert 	      negcoeff1h = 0;
1761*404b540aSrobert 	      rhs = XEXP (rhs, 0);
1762*404b540aSrobert 	    }
1763*404b540aSrobert 	  else if (GET_CODE (rhs) == MULT
1764*404b540aSrobert 		   && GET_CODE (XEXP (rhs, 1)) == CONST_INT)
1765*404b540aSrobert 	    {
1766*404b540aSrobert 	      negcoeff1l = -INTVAL (XEXP (rhs, 1));
1767*404b540aSrobert 	      negcoeff1h = INTVAL (XEXP (rhs, 1)) <= 0 ? 0 : -1;
1768*404b540aSrobert 	      rhs = XEXP (rhs, 0);
1769*404b540aSrobert 	    }
1770*404b540aSrobert 	  else if (GET_CODE (rhs) == ASHIFT
1771*404b540aSrobert 		   && GET_CODE (XEXP (rhs, 1)) == CONST_INT
1772*404b540aSrobert 		   && INTVAL (XEXP (rhs, 1)) >= 0
1773*404b540aSrobert 		   && INTVAL (XEXP (rhs, 1)) < HOST_BITS_PER_WIDE_INT)
1774*404b540aSrobert 	    {
1775*404b540aSrobert 	      negcoeff1l = -(((HOST_WIDE_INT) 1) << INTVAL (XEXP (rhs, 1)));
1776*404b540aSrobert 	      negcoeff1h = -1;
1777*404b540aSrobert 	      rhs = XEXP (rhs, 0);
1778*404b540aSrobert 	    }
1779*404b540aSrobert 
1780*404b540aSrobert 	  if (rtx_equal_p (lhs, rhs))
1781*404b540aSrobert 	    {
1782*404b540aSrobert 	      rtx orig = gen_rtx_MINUS (mode, op0, op1);
1783*404b540aSrobert 	      rtx coeff;
1784*404b540aSrobert 	      unsigned HOST_WIDE_INT l;
1785*404b540aSrobert 	      HOST_WIDE_INT h;
1786*404b540aSrobert 
1787*404b540aSrobert 	      add_double (coeff0l, coeff0h, negcoeff1l, negcoeff1h, &l, &h);
1788*404b540aSrobert 	      coeff = immed_double_const (l, h, mode);
1789*404b540aSrobert 
1790*404b540aSrobert 	      tem = simplify_gen_binary (MULT, mode, lhs, coeff);
1791*404b540aSrobert 	      return rtx_cost (tem, SET) <= rtx_cost (orig, SET)
1792*404b540aSrobert 		? tem : 0;
1793*404b540aSrobert 	    }
1794*404b540aSrobert 	}
1795*404b540aSrobert 
1796*404b540aSrobert       /* (a - (-b)) -> (a + b).  True even for IEEE.  */
1797*404b540aSrobert       if (GET_CODE (op1) == NEG)
1798*404b540aSrobert 	return simplify_gen_binary (PLUS, mode, op0, XEXP (op1, 0));
1799*404b540aSrobert 
1800*404b540aSrobert       /* (-x - c) may be simplified as (-c - x).  */
1801*404b540aSrobert       if (GET_CODE (op0) == NEG
1802*404b540aSrobert 	  && (GET_CODE (op1) == CONST_INT
1803*404b540aSrobert 	      || GET_CODE (op1) == CONST_DOUBLE))
1804*404b540aSrobert 	{
1805*404b540aSrobert 	  tem = simplify_unary_operation (NEG, mode, op1, mode);
1806*404b540aSrobert 	  if (tem)
1807*404b540aSrobert 	    return simplify_gen_binary (MINUS, mode, tem, XEXP (op0, 0));
1808*404b540aSrobert 	}
1809*404b540aSrobert 
1810*404b540aSrobert       /* Don't let a relocatable value get a negative coeff.  */
1811*404b540aSrobert       if (GET_CODE (op1) == CONST_INT && GET_MODE (op0) != VOIDmode)
1812*404b540aSrobert 	return simplify_gen_binary (PLUS, mode,
1813*404b540aSrobert 				    op0,
1814*404b540aSrobert 				    neg_const_int (mode, op1));
1815*404b540aSrobert 
1816*404b540aSrobert       /* (x - (x & y)) -> (x & ~y) */
1817*404b540aSrobert       if (GET_CODE (op1) == AND)
1818*404b540aSrobert 	{
1819*404b540aSrobert 	  if (rtx_equal_p (op0, XEXP (op1, 0)))
1820*404b540aSrobert 	    {
1821*404b540aSrobert 	      tem = simplify_gen_unary (NOT, mode, XEXP (op1, 1),
1822*404b540aSrobert 					GET_MODE (XEXP (op1, 1)));
1823*404b540aSrobert 	      return simplify_gen_binary (AND, mode, op0, tem);
1824*404b540aSrobert 	    }
1825*404b540aSrobert 	  if (rtx_equal_p (op0, XEXP (op1, 1)))
1826*404b540aSrobert 	    {
1827*404b540aSrobert 	      tem = simplify_gen_unary (NOT, mode, XEXP (op1, 0),
1828*404b540aSrobert 					GET_MODE (XEXP (op1, 0)));
1829*404b540aSrobert 	      return simplify_gen_binary (AND, mode, op0, tem);
1830*404b540aSrobert 	    }
1831*404b540aSrobert 	}
1832*404b540aSrobert 
1833*404b540aSrobert       /* If STORE_FLAG_VALUE is 1, (minus 1 (comparison foo bar)) can be done
1834*404b540aSrobert 	 by reversing the comparison code if valid.  */
1835*404b540aSrobert       if (STORE_FLAG_VALUE == 1
1836*404b540aSrobert 	  && trueop0 == const1_rtx
1837*404b540aSrobert 	  && COMPARISON_P (op1)
1838*404b540aSrobert 	  && (reversed = reversed_comparison (op1, mode)))
1839*404b540aSrobert 	return reversed;
1840*404b540aSrobert 
1841*404b540aSrobert       /* Canonicalize (minus A (mult (neg B) C)) to (plus (mult B C) A).  */
1842*404b540aSrobert       if (GET_CODE (op1) == MULT
1843*404b540aSrobert 	  && GET_CODE (XEXP (op1, 0)) == NEG)
1844*404b540aSrobert 	{
1845*404b540aSrobert 	  rtx in1, in2;
1846*404b540aSrobert 
1847*404b540aSrobert 	  in1 = XEXP (XEXP (op1, 0), 0);
1848*404b540aSrobert 	  in2 = XEXP (op1, 1);
1849*404b540aSrobert 	  return simplify_gen_binary (PLUS, mode,
1850*404b540aSrobert 				      simplify_gen_binary (MULT, mode,
1851*404b540aSrobert 							   in1, in2),
1852*404b540aSrobert 				      op0);
1853*404b540aSrobert 	}
1854*404b540aSrobert 
1855*404b540aSrobert       /* Canonicalize (minus (neg A) (mult B C)) to
1856*404b540aSrobert 	 (minus (mult (neg B) C) A).  */
1857*404b540aSrobert       if (GET_CODE (op1) == MULT
1858*404b540aSrobert 	  && GET_CODE (op0) == NEG)
1859*404b540aSrobert 	{
1860*404b540aSrobert 	  rtx in1, in2;
1861*404b540aSrobert 
1862*404b540aSrobert 	  in1 = simplify_gen_unary (NEG, mode, XEXP (op1, 0), mode);
1863*404b540aSrobert 	  in2 = XEXP (op1, 1);
1864*404b540aSrobert 	  return simplify_gen_binary (MINUS, mode,
1865*404b540aSrobert 				      simplify_gen_binary (MULT, mode,
1866*404b540aSrobert 							   in1, in2),
1867*404b540aSrobert 				      XEXP (op0, 0));
1868*404b540aSrobert 	}
1869*404b540aSrobert 
1870*404b540aSrobert       /* If one of the operands is a PLUS or a MINUS, see if we can
1871*404b540aSrobert 	 simplify this by the associative law.  This will, for example,
1872*404b540aSrobert          canonicalize (minus A (plus B C)) to (minus (minus A B) C).
1873*404b540aSrobert 	 Don't use the associative law for floating point.
1874*404b540aSrobert 	 The inaccuracy makes it nonassociative,
1875*404b540aSrobert 	 and subtle programs can break if operations are associated.  */
1876*404b540aSrobert 
1877*404b540aSrobert       if (INTEGRAL_MODE_P (mode)
1878*404b540aSrobert 	  && (plus_minus_operand_p (op0)
1879*404b540aSrobert 	      || plus_minus_operand_p (op1))
1880*404b540aSrobert 	  && (tem = simplify_plus_minus (code, mode, op0, op1)) != 0)
1881*404b540aSrobert 	return tem;
1882*404b540aSrobert       break;
1883*404b540aSrobert 
1884*404b540aSrobert     case MULT:
1885*404b540aSrobert       if (trueop1 == constm1_rtx)
1886*404b540aSrobert 	return simplify_gen_unary (NEG, mode, op0, mode);
1887*404b540aSrobert 
1888*404b540aSrobert       /* Maybe simplify x * 0 to 0.  The reduction is not valid if
1889*404b540aSrobert 	 x is NaN, since x * 0 is then also NaN.  Nor is it valid
1890*404b540aSrobert 	 when the mode has signed zeros, since multiplying a negative
1891*404b540aSrobert 	 number by 0 will give -0, not 0.  */
1892*404b540aSrobert       if (!HONOR_NANS (mode)
1893*404b540aSrobert 	  && !HONOR_SIGNED_ZEROS (mode)
1894*404b540aSrobert 	  && trueop1 == CONST0_RTX (mode)
1895*404b540aSrobert 	  && ! side_effects_p (op0))
1896*404b540aSrobert 	return op1;
1897*404b540aSrobert 
1898*404b540aSrobert       /* In IEEE floating point, x*1 is not equivalent to x for
1899*404b540aSrobert 	 signalling NaNs.  */
1900*404b540aSrobert       if (!HONOR_SNANS (mode)
1901*404b540aSrobert 	  && trueop1 == CONST1_RTX (mode))
1902*404b540aSrobert 	return op0;
1903*404b540aSrobert 
1904*404b540aSrobert       /* Convert multiply by constant power of two into shift unless
1905*404b540aSrobert 	 we are still generating RTL.  This test is a kludge.  */
1906*404b540aSrobert       if (GET_CODE (trueop1) == CONST_INT
1907*404b540aSrobert 	  && (val = exact_log2 (INTVAL (trueop1))) >= 0
1908*404b540aSrobert 	  /* If the mode is larger than the host word size, and the
1909*404b540aSrobert 	     uppermost bit is set, then this isn't a power of two due
1910*404b540aSrobert 	     to implicit sign extension.  */
1911*404b540aSrobert 	  && (width <= HOST_BITS_PER_WIDE_INT
1912*404b540aSrobert 	      || val != HOST_BITS_PER_WIDE_INT - 1))
1913*404b540aSrobert 	return simplify_gen_binary (ASHIFT, mode, op0, GEN_INT (val));
1914*404b540aSrobert 
1915*404b540aSrobert       /* Likewise for multipliers wider than a word.  */
1916*404b540aSrobert       if (GET_CODE (trueop1) == CONST_DOUBLE
1917*404b540aSrobert 	  && (GET_MODE (trueop1) == VOIDmode
1918*404b540aSrobert 	      || GET_MODE_CLASS (GET_MODE (trueop1)) == MODE_INT)
1919*404b540aSrobert 	  && GET_MODE (op0) == mode
1920*404b540aSrobert 	  && CONST_DOUBLE_LOW (trueop1) == 0
1921*404b540aSrobert 	  && (val = exact_log2 (CONST_DOUBLE_HIGH (trueop1))) >= 0)
1922*404b540aSrobert 	return simplify_gen_binary (ASHIFT, mode, op0,
1923*404b540aSrobert 				    GEN_INT (val + HOST_BITS_PER_WIDE_INT));
1924*404b540aSrobert 
1925*404b540aSrobert       /* x*2 is x+x and x*(-1) is -x */
1926*404b540aSrobert       if (GET_CODE (trueop1) == CONST_DOUBLE
1927*404b540aSrobert 	  && SCALAR_FLOAT_MODE_P (GET_MODE (trueop1))
1928*404b540aSrobert 	  && GET_MODE (op0) == mode)
1929*404b540aSrobert 	{
1930*404b540aSrobert 	  REAL_VALUE_TYPE d;
1931*404b540aSrobert 	  REAL_VALUE_FROM_CONST_DOUBLE (d, trueop1);
1932*404b540aSrobert 
1933*404b540aSrobert 	  if (REAL_VALUES_EQUAL (d, dconst2))
1934*404b540aSrobert 	    return simplify_gen_binary (PLUS, mode, op0, copy_rtx (op0));
1935*404b540aSrobert 
1936*404b540aSrobert 	  if (!HONOR_SNANS (mode)
1937*404b540aSrobert 	      && REAL_VALUES_EQUAL (d, dconstm1))
1938*404b540aSrobert 	    return simplify_gen_unary (NEG, mode, op0, mode);
1939*404b540aSrobert 	}
1940*404b540aSrobert 
1941*404b540aSrobert       /* Optimize -x * -x as x * x.  */
1942*404b540aSrobert       if (FLOAT_MODE_P (mode)
1943*404b540aSrobert 	  && GET_CODE (op0) == NEG
1944*404b540aSrobert 	  && GET_CODE (op1) == NEG
1945*404b540aSrobert 	  && rtx_equal_p (XEXP (op0, 0), XEXP (op1, 0))
1946*404b540aSrobert 	  && !side_effects_p (XEXP (op0, 0)))
1947*404b540aSrobert 	return simplify_gen_binary (MULT, mode, XEXP (op0, 0), XEXP (op1, 0));
1948*404b540aSrobert 
1949*404b540aSrobert       /* Likewise, optimize abs(x) * abs(x) as x * x.  */
1950*404b540aSrobert       if (SCALAR_FLOAT_MODE_P (mode)
1951*404b540aSrobert 	  && GET_CODE (op0) == ABS
1952*404b540aSrobert 	  && GET_CODE (op1) == ABS
1953*404b540aSrobert 	  && rtx_equal_p (XEXP (op0, 0), XEXP (op1, 0))
1954*404b540aSrobert 	  && !side_effects_p (XEXP (op0, 0)))
1955*404b540aSrobert 	return simplify_gen_binary (MULT, mode, XEXP (op0, 0), XEXP (op1, 0));
1956*404b540aSrobert 
1957*404b540aSrobert       /* Reassociate multiplication, but for floating point MULTs
1958*404b540aSrobert 	 only when the user specifies unsafe math optimizations.  */
1959*404b540aSrobert       if (! FLOAT_MODE_P (mode)
1960*404b540aSrobert 	  || flag_unsafe_math_optimizations)
1961*404b540aSrobert 	{
1962*404b540aSrobert 	  tem = simplify_associative_operation (code, mode, op0, op1);
1963*404b540aSrobert 	  if (tem)
1964*404b540aSrobert 	    return tem;
1965*404b540aSrobert 	}
1966*404b540aSrobert       break;
1967*404b540aSrobert 
1968*404b540aSrobert     case IOR:
1969*404b540aSrobert       if (trueop1 == const0_rtx)
1970*404b540aSrobert 	return op0;
1971*404b540aSrobert       if (GET_CODE (trueop1) == CONST_INT
1972*404b540aSrobert 	  && ((INTVAL (trueop1) & GET_MODE_MASK (mode))
1973*404b540aSrobert 	      == GET_MODE_MASK (mode)))
1974*404b540aSrobert 	return op1;
1975*404b540aSrobert       if (rtx_equal_p (trueop0, trueop1) && ! side_effects_p (op0))
1976*404b540aSrobert 	return op0;
1977*404b540aSrobert       /* A | (~A) -> -1 */
1978*404b540aSrobert       if (((GET_CODE (op0) == NOT && rtx_equal_p (XEXP (op0, 0), op1))
1979*404b540aSrobert 	   || (GET_CODE (op1) == NOT && rtx_equal_p (XEXP (op1, 0), op0)))
1980*404b540aSrobert 	  && ! side_effects_p (op0)
1981*404b540aSrobert 	  && SCALAR_INT_MODE_P (mode))
1982*404b540aSrobert 	return constm1_rtx;
1983*404b540aSrobert 
1984*404b540aSrobert       /* (ior A C) is C if all bits of A that might be nonzero are on in C.  */
1985*404b540aSrobert       if (GET_CODE (op1) == CONST_INT
1986*404b540aSrobert 	  && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
1987*404b540aSrobert 	  && (nonzero_bits (op0, mode) & ~INTVAL (op1)) == 0)
1988*404b540aSrobert 	return op1;
1989*404b540aSrobert 
1990*404b540aSrobert       /* Convert (A & B) | A to A.  */
1991*404b540aSrobert       if (GET_CODE (op0) == AND
1992*404b540aSrobert 	  && (rtx_equal_p (XEXP (op0, 0), op1)
1993*404b540aSrobert 	      || rtx_equal_p (XEXP (op0, 1), op1))
1994*404b540aSrobert 	  && ! side_effects_p (XEXP (op0, 0))
1995*404b540aSrobert 	  && ! side_effects_p (XEXP (op0, 1)))
1996*404b540aSrobert 	return op1;
1997*404b540aSrobert 
1998*404b540aSrobert       /* Convert (ior (ashift A CX) (lshiftrt A CY)) where CX+CY equals the
1999*404b540aSrobert          mode size to (rotate A CX).  */
2000*404b540aSrobert 
2001*404b540aSrobert       if (GET_CODE (op1) == ASHIFT
2002*404b540aSrobert           || GET_CODE (op1) == SUBREG)
2003*404b540aSrobert         {
2004*404b540aSrobert 	  opleft = op1;
2005*404b540aSrobert 	  opright = op0;
2006*404b540aSrobert 	}
2007*404b540aSrobert       else
2008*404b540aSrobert         {
2009*404b540aSrobert 	  opright = op1;
2010*404b540aSrobert 	  opleft = op0;
2011*404b540aSrobert 	}
2012*404b540aSrobert 
2013*404b540aSrobert       if (GET_CODE (opleft) == ASHIFT && GET_CODE (opright) == LSHIFTRT
2014*404b540aSrobert           && rtx_equal_p (XEXP (opleft, 0), XEXP (opright, 0))
2015*404b540aSrobert           && GET_CODE (XEXP (opleft, 1)) == CONST_INT
2016*404b540aSrobert           && GET_CODE (XEXP (opright, 1)) == CONST_INT
2017*404b540aSrobert           && (INTVAL (XEXP (opleft, 1)) + INTVAL (XEXP (opright, 1))
2018*404b540aSrobert               == GET_MODE_BITSIZE (mode)))
2019*404b540aSrobert         return gen_rtx_ROTATE (mode, XEXP (opright, 0), XEXP (opleft, 1));
2020*404b540aSrobert 
2021*404b540aSrobert       /* Same, but for ashift that has been "simplified" to a wider mode
2022*404b540aSrobert         by simplify_shift_const.  */
2023*404b540aSrobert 
2024*404b540aSrobert       if (GET_CODE (opleft) == SUBREG
2025*404b540aSrobert           && GET_CODE (SUBREG_REG (opleft)) == ASHIFT
2026*404b540aSrobert           && GET_CODE (opright) == LSHIFTRT
2027*404b540aSrobert           && GET_CODE (XEXP (opright, 0)) == SUBREG
2028*404b540aSrobert           && GET_MODE (opleft) == GET_MODE (XEXP (opright, 0))
2029*404b540aSrobert           && SUBREG_BYTE (opleft) == SUBREG_BYTE (XEXP (opright, 0))
2030*404b540aSrobert           && (GET_MODE_SIZE (GET_MODE (opleft))
2031*404b540aSrobert               < GET_MODE_SIZE (GET_MODE (SUBREG_REG (opleft))))
2032*404b540aSrobert           && rtx_equal_p (XEXP (SUBREG_REG (opleft), 0),
2033*404b540aSrobert                           SUBREG_REG (XEXP (opright, 0)))
2034*404b540aSrobert           && GET_CODE (XEXP (SUBREG_REG (opleft), 1)) == CONST_INT
2035*404b540aSrobert           && GET_CODE (XEXP (opright, 1)) == CONST_INT
2036*404b540aSrobert           && (INTVAL (XEXP (SUBREG_REG (opleft), 1)) + INTVAL (XEXP (opright, 1))
2037*404b540aSrobert               == GET_MODE_BITSIZE (mode)))
2038*404b540aSrobert         return gen_rtx_ROTATE (mode, XEXP (opright, 0),
2039*404b540aSrobert                                XEXP (SUBREG_REG (opleft), 1));
2040*404b540aSrobert 
2041*404b540aSrobert       /* If we have (ior (and (X C1) C2)), simplify this by making
2042*404b540aSrobert 	 C1 as small as possible if C1 actually changes.  */
2043*404b540aSrobert       if (GET_CODE (op1) == CONST_INT
2044*404b540aSrobert 	  && (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
2045*404b540aSrobert 	      || INTVAL (op1) > 0)
2046*404b540aSrobert 	  && GET_CODE (op0) == AND
2047*404b540aSrobert 	  && GET_CODE (XEXP (op0, 1)) == CONST_INT
2048*404b540aSrobert 	  && GET_CODE (op1) == CONST_INT
2049*404b540aSrobert 	  && (INTVAL (XEXP (op0, 1)) & INTVAL (op1)) != 0)
2050*404b540aSrobert 	return simplify_gen_binary (IOR, mode,
2051*404b540aSrobert 				    simplify_gen_binary
2052*404b540aSrobert 					  (AND, mode, XEXP (op0, 0),
2053*404b540aSrobert 					   GEN_INT (INTVAL (XEXP (op0, 1))
2054*404b540aSrobert 						    & ~INTVAL (op1))),
2055*404b540aSrobert 				    op1);
2056*404b540aSrobert 
2057*404b540aSrobert       /* If OP0 is (ashiftrt (plus ...) C), it might actually be
2058*404b540aSrobert          a (sign_extend (plus ...)).  Then check if OP1 is a CONST_INT and
2059*404b540aSrobert 	 the PLUS does not affect any of the bits in OP1: then we can do
2060*404b540aSrobert 	 the IOR as a PLUS and we can associate.  This is valid if OP1
2061*404b540aSrobert          can be safely shifted left C bits.  */
2062*404b540aSrobert       if (GET_CODE (trueop1) == CONST_INT && GET_CODE (op0) == ASHIFTRT
2063*404b540aSrobert           && GET_CODE (XEXP (op0, 0)) == PLUS
2064*404b540aSrobert           && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
2065*404b540aSrobert           && GET_CODE (XEXP (op0, 1)) == CONST_INT
2066*404b540aSrobert           && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT)
2067*404b540aSrobert         {
2068*404b540aSrobert           int count = INTVAL (XEXP (op0, 1));
2069*404b540aSrobert           HOST_WIDE_INT mask = INTVAL (trueop1) << count;
2070*404b540aSrobert 
2071*404b540aSrobert           if (mask >> count == INTVAL (trueop1)
2072*404b540aSrobert               && (mask & nonzero_bits (XEXP (op0, 0), mode)) == 0)
2073*404b540aSrobert 	    return simplify_gen_binary (ASHIFTRT, mode,
2074*404b540aSrobert 					plus_constant (XEXP (op0, 0), mask),
2075*404b540aSrobert 					XEXP (op0, 1));
2076*404b540aSrobert         }
2077*404b540aSrobert 
2078*404b540aSrobert       tem = simplify_associative_operation (code, mode, op0, op1);
2079*404b540aSrobert       if (tem)
2080*404b540aSrobert 	return tem;
2081*404b540aSrobert       break;
2082*404b540aSrobert 
2083*404b540aSrobert     case XOR:
2084*404b540aSrobert       if (trueop1 == const0_rtx)
2085*404b540aSrobert 	return op0;
2086*404b540aSrobert       if (GET_CODE (trueop1) == CONST_INT
2087*404b540aSrobert 	  && ((INTVAL (trueop1) & GET_MODE_MASK (mode))
2088*404b540aSrobert 	      == GET_MODE_MASK (mode)))
2089*404b540aSrobert 	return simplify_gen_unary (NOT, mode, op0, mode);
2090*404b540aSrobert       if (rtx_equal_p (trueop0, trueop1)
2091*404b540aSrobert 	  && ! side_effects_p (op0)
2092*404b540aSrobert 	  && GET_MODE_CLASS (mode) != MODE_CC)
2093*404b540aSrobert 	 return CONST0_RTX (mode);
2094*404b540aSrobert 
2095*404b540aSrobert       /* Canonicalize XOR of the most significant bit to PLUS.  */
2096*404b540aSrobert       if ((GET_CODE (op1) == CONST_INT
2097*404b540aSrobert 	   || GET_CODE (op1) == CONST_DOUBLE)
2098*404b540aSrobert 	  && mode_signbit_p (mode, op1))
2099*404b540aSrobert 	return simplify_gen_binary (PLUS, mode, op0, op1);
2100*404b540aSrobert       /* (xor (plus X C1) C2) is (xor X (C1^C2)) if C1 is signbit.  */
2101*404b540aSrobert       if ((GET_CODE (op1) == CONST_INT
2102*404b540aSrobert 	   || GET_CODE (op1) == CONST_DOUBLE)
2103*404b540aSrobert 	  && GET_CODE (op0) == PLUS
2104*404b540aSrobert 	  && (GET_CODE (XEXP (op0, 1)) == CONST_INT
2105*404b540aSrobert 	      || GET_CODE (XEXP (op0, 1)) == CONST_DOUBLE)
2106*404b540aSrobert 	  && mode_signbit_p (mode, XEXP (op0, 1)))
2107*404b540aSrobert 	return simplify_gen_binary (XOR, mode, XEXP (op0, 0),
2108*404b540aSrobert 				    simplify_gen_binary (XOR, mode, op1,
2109*404b540aSrobert 							 XEXP (op0, 1)));
2110*404b540aSrobert 
2111*404b540aSrobert       /* If we are XORing two things that have no bits in common,
2112*404b540aSrobert 	 convert them into an IOR.  This helps to detect rotation encoded
2113*404b540aSrobert 	 using those methods and possibly other simplifications.  */
2114*404b540aSrobert 
2115*404b540aSrobert       if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
2116*404b540aSrobert 	  && (nonzero_bits (op0, mode)
2117*404b540aSrobert 	      & nonzero_bits (op1, mode)) == 0)
2118*404b540aSrobert 	return (simplify_gen_binary (IOR, mode, op0, op1));
2119*404b540aSrobert 
2120*404b540aSrobert       /* Convert (XOR (NOT x) (NOT y)) to (XOR x y).
2121*404b540aSrobert 	 Also convert (XOR (NOT x) y) to (NOT (XOR x y)), similarly for
2122*404b540aSrobert 	 (NOT y).  */
2123*404b540aSrobert       {
2124*404b540aSrobert 	int num_negated = 0;
2125*404b540aSrobert 
2126*404b540aSrobert 	if (GET_CODE (op0) == NOT)
2127*404b540aSrobert 	  num_negated++, op0 = XEXP (op0, 0);
2128*404b540aSrobert 	if (GET_CODE (op1) == NOT)
2129*404b540aSrobert 	  num_negated++, op1 = XEXP (op1, 0);
2130*404b540aSrobert 
2131*404b540aSrobert 	if (num_negated == 2)
2132*404b540aSrobert 	  return simplify_gen_binary (XOR, mode, op0, op1);
2133*404b540aSrobert 	else if (num_negated == 1)
2134*404b540aSrobert 	  return simplify_gen_unary (NOT, mode,
2135*404b540aSrobert 				     simplify_gen_binary (XOR, mode, op0, op1),
2136*404b540aSrobert 				     mode);
2137*404b540aSrobert       }
2138*404b540aSrobert 
2139*404b540aSrobert       /* Convert (xor (and A B) B) to (and (not A) B).  The latter may
2140*404b540aSrobert 	 correspond to a machine insn or result in further simplifications
2141*404b540aSrobert 	 if B is a constant.  */
2142*404b540aSrobert 
2143*404b540aSrobert       if (GET_CODE (op0) == AND
2144*404b540aSrobert 	  && rtx_equal_p (XEXP (op0, 1), op1)
2145*404b540aSrobert 	  && ! side_effects_p (op1))
2146*404b540aSrobert 	return simplify_gen_binary (AND, mode,
2147*404b540aSrobert 				    simplify_gen_unary (NOT, mode,
2148*404b540aSrobert 							XEXP (op0, 0), mode),
2149*404b540aSrobert 				    op1);
2150*404b540aSrobert 
2151*404b540aSrobert       else if (GET_CODE (op0) == AND
2152*404b540aSrobert 	       && rtx_equal_p (XEXP (op0, 0), op1)
2153*404b540aSrobert 	       && ! side_effects_p (op1))
2154*404b540aSrobert 	return simplify_gen_binary (AND, mode,
2155*404b540aSrobert 				    simplify_gen_unary (NOT, mode,
2156*404b540aSrobert 							XEXP (op0, 1), mode),
2157*404b540aSrobert 				    op1);
2158*404b540aSrobert 
2159*404b540aSrobert       /* (xor (comparison foo bar) (const_int 1)) can become the reversed
2160*404b540aSrobert 	 comparison if STORE_FLAG_VALUE is 1.  */
2161*404b540aSrobert       if (STORE_FLAG_VALUE == 1
2162*404b540aSrobert 	  && trueop1 == const1_rtx
2163*404b540aSrobert 	  && COMPARISON_P (op0)
2164*404b540aSrobert 	  && (reversed = reversed_comparison (op0, mode)))
2165*404b540aSrobert 	return reversed;
2166*404b540aSrobert 
2167*404b540aSrobert       /* (lshiftrt foo C) where C is the number of bits in FOO minus 1
2168*404b540aSrobert 	 is (lt foo (const_int 0)), so we can perform the above
2169*404b540aSrobert 	 simplification if STORE_FLAG_VALUE is 1.  */
2170*404b540aSrobert 
2171*404b540aSrobert       if (STORE_FLAG_VALUE == 1
2172*404b540aSrobert 	  && trueop1 == const1_rtx
2173*404b540aSrobert 	  && GET_CODE (op0) == LSHIFTRT
2174*404b540aSrobert 	  && GET_CODE (XEXP (op0, 1)) == CONST_INT
2175*404b540aSrobert 	  && INTVAL (XEXP (op0, 1)) == GET_MODE_BITSIZE (mode) - 1)
2176*404b540aSrobert 	return gen_rtx_GE (mode, XEXP (op0, 0), const0_rtx);
2177*404b540aSrobert 
2178*404b540aSrobert       /* (xor (comparison foo bar) (const_int sign-bit))
2179*404b540aSrobert 	 when STORE_FLAG_VALUE is the sign bit.  */
2180*404b540aSrobert       if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
2181*404b540aSrobert 	  && ((STORE_FLAG_VALUE & GET_MODE_MASK (mode))
2182*404b540aSrobert 	      == (unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
2183*404b540aSrobert 	  && trueop1 == const_true_rtx
2184*404b540aSrobert 	  && COMPARISON_P (op0)
2185*404b540aSrobert 	  && (reversed = reversed_comparison (op0, mode)))
2186*404b540aSrobert 	return reversed;
2187*404b540aSrobert 
2188*404b540aSrobert       break;
2189*404b540aSrobert 
2190*404b540aSrobert       tem = simplify_associative_operation (code, mode, op0, op1);
2191*404b540aSrobert       if (tem)
2192*404b540aSrobert 	return tem;
2193*404b540aSrobert       break;
2194*404b540aSrobert 
2195*404b540aSrobert     case AND:
2196*404b540aSrobert       if (trueop1 == CONST0_RTX (mode) && ! side_effects_p (op0))
2197*404b540aSrobert 	return trueop1;
2198*404b540aSrobert       /* If we are turning off bits already known off in OP0, we need
2199*404b540aSrobert 	 not do an AND.  */
2200*404b540aSrobert       if (GET_CODE (trueop1) == CONST_INT
2201*404b540aSrobert 	  && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
2202*404b540aSrobert 	  && (nonzero_bits (trueop0, mode) & ~INTVAL (trueop1)) == 0)
2203*404b540aSrobert 	return op0;
2204*404b540aSrobert       if (rtx_equal_p (trueop0, trueop1) && ! side_effects_p (op0)
2205*404b540aSrobert 	  && GET_MODE_CLASS (mode) != MODE_CC)
2206*404b540aSrobert 	return op0;
2207*404b540aSrobert       /* A & (~A) -> 0 */
2208*404b540aSrobert       if (((GET_CODE (op0) == NOT && rtx_equal_p (XEXP (op0, 0), op1))
2209*404b540aSrobert 	   || (GET_CODE (op1) == NOT && rtx_equal_p (XEXP (op1, 0), op0)))
2210*404b540aSrobert 	  && ! side_effects_p (op0)
2211*404b540aSrobert 	  && GET_MODE_CLASS (mode) != MODE_CC)
2212*404b540aSrobert 	return CONST0_RTX (mode);
2213*404b540aSrobert 
2214*404b540aSrobert       /* Transform (and (extend X) C) into (zero_extend (and X C)) if
2215*404b540aSrobert 	 there are no nonzero bits of C outside of X's mode.  */
2216*404b540aSrobert       if ((GET_CODE (op0) == SIGN_EXTEND
2217*404b540aSrobert 	   || GET_CODE (op0) == ZERO_EXTEND)
2218*404b540aSrobert 	  && GET_CODE (trueop1) == CONST_INT
2219*404b540aSrobert 	  && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
2220*404b540aSrobert 	  && (~GET_MODE_MASK (GET_MODE (XEXP (op0, 0)))
2221*404b540aSrobert 	      & INTVAL (trueop1)) == 0)
2222*404b540aSrobert 	{
2223*404b540aSrobert 	  enum machine_mode imode = GET_MODE (XEXP (op0, 0));
2224*404b540aSrobert 	  tem = simplify_gen_binary (AND, imode, XEXP (op0, 0),
2225*404b540aSrobert 				     gen_int_mode (INTVAL (trueop1),
2226*404b540aSrobert 						   imode));
2227*404b540aSrobert 	  return simplify_gen_unary (ZERO_EXTEND, mode, tem, imode);
2228*404b540aSrobert 	}
2229*404b540aSrobert 
2230*404b540aSrobert       /* Convert (A ^ B) & A to A & (~B) since the latter is often a single
2231*404b540aSrobert 	 insn (and may simplify more).  */
2232*404b540aSrobert       if (GET_CODE (op0) == XOR
2233*404b540aSrobert 	  && rtx_equal_p (XEXP (op0, 0), op1)
2234*404b540aSrobert 	  && ! side_effects_p (op1))
2235*404b540aSrobert 	return simplify_gen_binary (AND, mode,
2236*404b540aSrobert 				    simplify_gen_unary (NOT, mode,
2237*404b540aSrobert 							XEXP (op0, 1), mode),
2238*404b540aSrobert 				    op1);
2239*404b540aSrobert 
2240*404b540aSrobert       if (GET_CODE (op0) == XOR
2241*404b540aSrobert 	  && rtx_equal_p (XEXP (op0, 1), op1)
2242*404b540aSrobert 	  && ! side_effects_p (op1))
2243*404b540aSrobert 	return simplify_gen_binary (AND, mode,
2244*404b540aSrobert 				    simplify_gen_unary (NOT, mode,
2245*404b540aSrobert 							XEXP (op0, 0), mode),
2246*404b540aSrobert 				    op1);
2247*404b540aSrobert 
2248*404b540aSrobert       /* Similarly for (~(A ^ B)) & A.  */
2249*404b540aSrobert       if (GET_CODE (op0) == NOT
2250*404b540aSrobert 	  && GET_CODE (XEXP (op0, 0)) == XOR
2251*404b540aSrobert 	  && rtx_equal_p (XEXP (XEXP (op0, 0), 0), op1)
2252*404b540aSrobert 	  && ! side_effects_p (op1))
2253*404b540aSrobert 	return simplify_gen_binary (AND, mode, XEXP (XEXP (op0, 0), 1), op1);
2254*404b540aSrobert 
2255*404b540aSrobert       if (GET_CODE (op0) == NOT
2256*404b540aSrobert 	  && GET_CODE (XEXP (op0, 0)) == XOR
2257*404b540aSrobert 	  && rtx_equal_p (XEXP (XEXP (op0, 0), 1), op1)
2258*404b540aSrobert 	  && ! side_effects_p (op1))
2259*404b540aSrobert 	return simplify_gen_binary (AND, mode, XEXP (XEXP (op0, 0), 0), op1);
2260*404b540aSrobert 
2261*404b540aSrobert       /* Convert (A | B) & A to A.  */
2262*404b540aSrobert       if (GET_CODE (op0) == IOR
2263*404b540aSrobert 	  && (rtx_equal_p (XEXP (op0, 0), op1)
2264*404b540aSrobert 	      || rtx_equal_p (XEXP (op0, 1), op1))
2265*404b540aSrobert 	  && ! side_effects_p (XEXP (op0, 0))
2266*404b540aSrobert 	  && ! side_effects_p (XEXP (op0, 1)))
2267*404b540aSrobert 	return op1;
2268*404b540aSrobert 
2269*404b540aSrobert       /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
2270*404b540aSrobert 	 ((A & N) + B) & M -> (A + B) & M
2271*404b540aSrobert 	 Similarly if (N & M) == 0,
2272*404b540aSrobert 	 ((A | N) + B) & M -> (A + B) & M
2273*404b540aSrobert 	 and for - instead of + and/or ^ instead of |.  */
2274*404b540aSrobert       if (GET_CODE (trueop1) == CONST_INT
2275*404b540aSrobert 	  && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
2276*404b540aSrobert 	  && ~INTVAL (trueop1)
2277*404b540aSrobert 	  && (INTVAL (trueop1) & (INTVAL (trueop1) + 1)) == 0
2278*404b540aSrobert 	  && (GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS))
2279*404b540aSrobert 	{
2280*404b540aSrobert 	  rtx pmop[2];
2281*404b540aSrobert 	  int which;
2282*404b540aSrobert 
2283*404b540aSrobert 	  pmop[0] = XEXP (op0, 0);
2284*404b540aSrobert 	  pmop[1] = XEXP (op0, 1);
2285*404b540aSrobert 
2286*404b540aSrobert 	  for (which = 0; which < 2; which++)
2287*404b540aSrobert 	    {
2288*404b540aSrobert 	      tem = pmop[which];
2289*404b540aSrobert 	      switch (GET_CODE (tem))
2290*404b540aSrobert 		{
2291*404b540aSrobert 		case AND:
2292*404b540aSrobert 		  if (GET_CODE (XEXP (tem, 1)) == CONST_INT
2293*404b540aSrobert 		      && (INTVAL (XEXP (tem, 1)) & INTVAL (trueop1))
2294*404b540aSrobert 		      == INTVAL (trueop1))
2295*404b540aSrobert 		    pmop[which] = XEXP (tem, 0);
2296*404b540aSrobert 		  break;
2297*404b540aSrobert 		case IOR:
2298*404b540aSrobert 		case XOR:
2299*404b540aSrobert 		  if (GET_CODE (XEXP (tem, 1)) == CONST_INT
2300*404b540aSrobert 		      && (INTVAL (XEXP (tem, 1)) & INTVAL (trueop1)) == 0)
2301*404b540aSrobert 		    pmop[which] = XEXP (tem, 0);
2302*404b540aSrobert 		  break;
2303*404b540aSrobert 		default:
2304*404b540aSrobert 		  break;
2305*404b540aSrobert 		}
2306*404b540aSrobert 	    }
2307*404b540aSrobert 
2308*404b540aSrobert 	  if (pmop[0] != XEXP (op0, 0) || pmop[1] != XEXP (op0, 1))
2309*404b540aSrobert 	    {
2310*404b540aSrobert 	      tem = simplify_gen_binary (GET_CODE (op0), mode,
2311*404b540aSrobert 					 pmop[0], pmop[1]);
2312*404b540aSrobert 	      return simplify_gen_binary (code, mode, tem, op1);
2313*404b540aSrobert 	    }
2314*404b540aSrobert 	}
2315*404b540aSrobert       tem = simplify_associative_operation (code, mode, op0, op1);
2316*404b540aSrobert       if (tem)
2317*404b540aSrobert 	return tem;
2318*404b540aSrobert       break;
2319*404b540aSrobert 
2320*404b540aSrobert     case UDIV:
2321*404b540aSrobert       /* 0/x is 0 (or x&0 if x has side-effects).  */
2322*404b540aSrobert       if (trueop0 == CONST0_RTX (mode))
2323*404b540aSrobert 	{
2324*404b540aSrobert 	  if (side_effects_p (op1))
2325*404b540aSrobert 	    return simplify_gen_binary (AND, mode, op1, trueop0);
2326*404b540aSrobert 	  return trueop0;
2327*404b540aSrobert 	}
2328*404b540aSrobert       /* x/1 is x.  */
2329*404b540aSrobert       if (trueop1 == CONST1_RTX (mode))
2330*404b540aSrobert 	return rtl_hooks.gen_lowpart_no_emit (mode, op0);
2331*404b540aSrobert       /* Convert divide by power of two into shift.  */
2332*404b540aSrobert       if (GET_CODE (trueop1) == CONST_INT
2333*404b540aSrobert 	  && (val = exact_log2 (INTVAL (trueop1))) > 0)
2334*404b540aSrobert 	return simplify_gen_binary (LSHIFTRT, mode, op0, GEN_INT (val));
2335*404b540aSrobert       break;
2336*404b540aSrobert 
2337*404b540aSrobert     case DIV:
2338*404b540aSrobert       /* Handle floating point and integers separately.  */
2339*404b540aSrobert       if (SCALAR_FLOAT_MODE_P (mode))
2340*404b540aSrobert 	{
2341*404b540aSrobert 	  /* Maybe change 0.0 / x to 0.0.  This transformation isn't
2342*404b540aSrobert 	     safe for modes with NaNs, since 0.0 / 0.0 will then be
2343*404b540aSrobert 	     NaN rather than 0.0.  Nor is it safe for modes with signed
2344*404b540aSrobert 	     zeros, since dividing 0 by a negative number gives -0.0  */
2345*404b540aSrobert 	  if (trueop0 == CONST0_RTX (mode)
2346*404b540aSrobert 	      && !HONOR_NANS (mode)
2347*404b540aSrobert 	      && !HONOR_SIGNED_ZEROS (mode)
2348*404b540aSrobert 	      && ! side_effects_p (op1))
2349*404b540aSrobert 	    return op0;
2350*404b540aSrobert 	  /* x/1.0 is x.  */
2351*404b540aSrobert 	  if (trueop1 == CONST1_RTX (mode)
2352*404b540aSrobert 	      && !HONOR_SNANS (mode))
2353*404b540aSrobert 	    return op0;
2354*404b540aSrobert 
2355*404b540aSrobert 	  if (GET_CODE (trueop1) == CONST_DOUBLE
2356*404b540aSrobert 	      && trueop1 != CONST0_RTX (mode))
2357*404b540aSrobert 	    {
2358*404b540aSrobert 	      REAL_VALUE_TYPE d;
2359*404b540aSrobert 	      REAL_VALUE_FROM_CONST_DOUBLE (d, trueop1);
2360*404b540aSrobert 
2361*404b540aSrobert 	      /* x/-1.0 is -x.  */
2362*404b540aSrobert 	      if (REAL_VALUES_EQUAL (d, dconstm1)
2363*404b540aSrobert 		  && !HONOR_SNANS (mode))
2364*404b540aSrobert 		return simplify_gen_unary (NEG, mode, op0, mode);
2365*404b540aSrobert 
2366*404b540aSrobert 	      /* Change FP division by a constant into multiplication.
2367*404b540aSrobert 		 Only do this with -funsafe-math-optimizations.  */
2368*404b540aSrobert 	      if (flag_unsafe_math_optimizations
2369*404b540aSrobert 		  && !REAL_VALUES_EQUAL (d, dconst0))
2370*404b540aSrobert 		{
2371*404b540aSrobert 		  REAL_ARITHMETIC (d, RDIV_EXPR, dconst1, d);
2372*404b540aSrobert 		  tem = CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
2373*404b540aSrobert 		  return simplify_gen_binary (MULT, mode, op0, tem);
2374*404b540aSrobert 		}
2375*404b540aSrobert 	    }
2376*404b540aSrobert 	}
2377*404b540aSrobert       else
2378*404b540aSrobert 	{
2379*404b540aSrobert 	  /* 0/x is 0 (or x&0 if x has side-effects).  */
2380*404b540aSrobert 	  if (trueop0 == CONST0_RTX (mode))
2381*404b540aSrobert 	    {
2382*404b540aSrobert 	      if (side_effects_p (op1))
2383*404b540aSrobert 		return simplify_gen_binary (AND, mode, op1, trueop0);
2384*404b540aSrobert 	      return trueop0;
2385*404b540aSrobert 	    }
2386*404b540aSrobert 	  /* x/1 is x.  */
2387*404b540aSrobert 	  if (trueop1 == CONST1_RTX (mode))
2388*404b540aSrobert 	    return rtl_hooks.gen_lowpart_no_emit (mode, op0);
2389*404b540aSrobert 	  /* x/-1 is -x.  */
2390*404b540aSrobert 	  if (trueop1 == constm1_rtx)
2391*404b540aSrobert 	    {
2392*404b540aSrobert 	      rtx x = rtl_hooks.gen_lowpart_no_emit (mode, op0);
2393*404b540aSrobert 	      return simplify_gen_unary (NEG, mode, x, mode);
2394*404b540aSrobert 	    }
2395*404b540aSrobert 	}
2396*404b540aSrobert       break;
2397*404b540aSrobert 
2398*404b540aSrobert     case UMOD:
2399*404b540aSrobert       /* 0%x is 0 (or x&0 if x has side-effects).  */
2400*404b540aSrobert       if (trueop0 == CONST0_RTX (mode))
2401*404b540aSrobert 	{
2402*404b540aSrobert 	  if (side_effects_p (op1))
2403*404b540aSrobert 	    return simplify_gen_binary (AND, mode, op1, trueop0);
2404*404b540aSrobert 	  return trueop0;
2405*404b540aSrobert 	}
2406*404b540aSrobert       /* x%1 is 0 (of x&0 if x has side-effects).  */
2407*404b540aSrobert       if (trueop1 == CONST1_RTX (mode))
2408*404b540aSrobert 	{
2409*404b540aSrobert 	  if (side_effects_p (op0))
2410*404b540aSrobert 	    return simplify_gen_binary (AND, mode, op0, CONST0_RTX (mode));
2411*404b540aSrobert 	  return CONST0_RTX (mode);
2412*404b540aSrobert 	}
2413*404b540aSrobert       /* Implement modulus by power of two as AND.  */
2414*404b540aSrobert       if (GET_CODE (trueop1) == CONST_INT
2415*404b540aSrobert 	  && exact_log2 (INTVAL (trueop1)) > 0)
2416*404b540aSrobert 	return simplify_gen_binary (AND, mode, op0,
2417*404b540aSrobert 				    GEN_INT (INTVAL (op1) - 1));
2418*404b540aSrobert       break;
2419*404b540aSrobert 
2420*404b540aSrobert     case MOD:
2421*404b540aSrobert       /* 0%x is 0 (or x&0 if x has side-effects).  */
2422*404b540aSrobert       if (trueop0 == CONST0_RTX (mode))
2423*404b540aSrobert 	{
2424*404b540aSrobert 	  if (side_effects_p (op1))
2425*404b540aSrobert 	    return simplify_gen_binary (AND, mode, op1, trueop0);
2426*404b540aSrobert 	  return trueop0;
2427*404b540aSrobert 	}
2428*404b540aSrobert       /* x%1 and x%-1 is 0 (or x&0 if x has side-effects).  */
2429*404b540aSrobert       if (trueop1 == CONST1_RTX (mode) || trueop1 == constm1_rtx)
2430*404b540aSrobert 	{
2431*404b540aSrobert 	  if (side_effects_p (op0))
2432*404b540aSrobert 	    return simplify_gen_binary (AND, mode, op0, CONST0_RTX (mode));
2433*404b540aSrobert 	  return CONST0_RTX (mode);
2434*404b540aSrobert 	}
2435*404b540aSrobert       break;
2436*404b540aSrobert 
2437*404b540aSrobert     case ROTATERT:
2438*404b540aSrobert     case ROTATE:
2439*404b540aSrobert     case ASHIFTRT:
2440*404b540aSrobert       if (trueop1 == CONST0_RTX (mode))
2441*404b540aSrobert 	return op0;
2442*404b540aSrobert       if (trueop0 == CONST0_RTX (mode) && ! side_effects_p (op1))
2443*404b540aSrobert 	return op0;
2444*404b540aSrobert       /* Rotating ~0 always results in ~0.  */
2445*404b540aSrobert       if (GET_CODE (trueop0) == CONST_INT && width <= HOST_BITS_PER_WIDE_INT
2446*404b540aSrobert 	  && (unsigned HOST_WIDE_INT) INTVAL (trueop0) == GET_MODE_MASK (mode)
2447*404b540aSrobert 	  && ! side_effects_p (op1))
2448*404b540aSrobert 	return op0;
2449*404b540aSrobert       break;
2450*404b540aSrobert 
2451*404b540aSrobert     case ASHIFT:
2452*404b540aSrobert     case SS_ASHIFT:
2453*404b540aSrobert       if (trueop1 == CONST0_RTX (mode))
2454*404b540aSrobert 	return op0;
2455*404b540aSrobert       if (trueop0 == CONST0_RTX (mode) && ! side_effects_p (op1))
2456*404b540aSrobert 	return op0;
2457*404b540aSrobert       break;
2458*404b540aSrobert 
2459*404b540aSrobert     case LSHIFTRT:
2460*404b540aSrobert       if (trueop1 == CONST0_RTX (mode))
2461*404b540aSrobert 	return op0;
2462*404b540aSrobert       if (trueop0 == CONST0_RTX (mode) && ! side_effects_p (op1))
2463*404b540aSrobert 	return op0;
2464*404b540aSrobert       /* Optimize (lshiftrt (clz X) C) as (eq X 0).  */
2465*404b540aSrobert       if (GET_CODE (op0) == CLZ
2466*404b540aSrobert 	  && GET_CODE (trueop1) == CONST_INT
2467*404b540aSrobert 	  && STORE_FLAG_VALUE == 1
2468*404b540aSrobert 	  && INTVAL (trueop1) < (HOST_WIDE_INT)width)
2469*404b540aSrobert 	{
2470*404b540aSrobert 	  enum machine_mode imode = GET_MODE (XEXP (op0, 0));
2471*404b540aSrobert 	  unsigned HOST_WIDE_INT zero_val = 0;
2472*404b540aSrobert 
2473*404b540aSrobert 	  if (CLZ_DEFINED_VALUE_AT_ZERO (imode, zero_val)
2474*404b540aSrobert 	      && zero_val == GET_MODE_BITSIZE (imode)
2475*404b540aSrobert 	      && INTVAL (trueop1) == exact_log2 (zero_val))
2476*404b540aSrobert 	    return simplify_gen_relational (EQ, mode, imode,
2477*404b540aSrobert 					    XEXP (op0, 0), const0_rtx);
2478*404b540aSrobert 	}
2479*404b540aSrobert       break;
2480*404b540aSrobert 
2481*404b540aSrobert     case SMIN:
2482*404b540aSrobert       if (width <= HOST_BITS_PER_WIDE_INT
2483*404b540aSrobert 	  && GET_CODE (trueop1) == CONST_INT
2484*404b540aSrobert 	  && INTVAL (trueop1) == (HOST_WIDE_INT) 1 << (width -1)
2485*404b540aSrobert 	  && ! side_effects_p (op0))
2486*404b540aSrobert 	return op1;
2487*404b540aSrobert       if (rtx_equal_p (trueop0, trueop1) && ! side_effects_p (op0))
2488*404b540aSrobert 	return op0;
2489*404b540aSrobert       tem = simplify_associative_operation (code, mode, op0, op1);
2490*404b540aSrobert       if (tem)
2491*404b540aSrobert 	return tem;
2492*404b540aSrobert       break;
2493*404b540aSrobert 
2494*404b540aSrobert     case SMAX:
2495*404b540aSrobert       if (width <= HOST_BITS_PER_WIDE_INT
2496*404b540aSrobert 	  && GET_CODE (trueop1) == CONST_INT
2497*404b540aSrobert 	  && ((unsigned HOST_WIDE_INT) INTVAL (trueop1)
2498*404b540aSrobert 	      == (unsigned HOST_WIDE_INT) GET_MODE_MASK (mode) >> 1)
2499*404b540aSrobert 	  && ! side_effects_p (op0))
2500*404b540aSrobert 	return op1;
2501*404b540aSrobert       if (rtx_equal_p (trueop0, trueop1) && ! side_effects_p (op0))
2502*404b540aSrobert 	return op0;
2503*404b540aSrobert       tem = simplify_associative_operation (code, mode, op0, op1);
2504*404b540aSrobert       if (tem)
2505*404b540aSrobert 	return tem;
2506*404b540aSrobert       break;
2507*404b540aSrobert 
2508*404b540aSrobert     case UMIN:
2509*404b540aSrobert       if (trueop1 == CONST0_RTX (mode) && ! side_effects_p (op0))
2510*404b540aSrobert 	return op1;
2511*404b540aSrobert       if (rtx_equal_p (trueop0, trueop1) && ! side_effects_p (op0))
2512*404b540aSrobert 	return op0;
2513*404b540aSrobert       tem = simplify_associative_operation (code, mode, op0, op1);
2514*404b540aSrobert       if (tem)
2515*404b540aSrobert 	return tem;
2516*404b540aSrobert       break;
2517*404b540aSrobert 
2518*404b540aSrobert     case UMAX:
2519*404b540aSrobert       if (trueop1 == constm1_rtx && ! side_effects_p (op0))
2520*404b540aSrobert 	return op1;
2521*404b540aSrobert       if (rtx_equal_p (trueop0, trueop1) && ! side_effects_p (op0))
2522*404b540aSrobert 	return op0;
2523*404b540aSrobert       tem = simplify_associative_operation (code, mode, op0, op1);
2524*404b540aSrobert       if (tem)
2525*404b540aSrobert 	return tem;
2526*404b540aSrobert       break;
2527*404b540aSrobert 
2528*404b540aSrobert     case SS_PLUS:
2529*404b540aSrobert     case US_PLUS:
2530*404b540aSrobert     case SS_MINUS:
2531*404b540aSrobert     case US_MINUS:
2532*404b540aSrobert       /* ??? There are simplifications that can be done.  */
2533*404b540aSrobert       return 0;
2534*404b540aSrobert 
2535*404b540aSrobert     case VEC_SELECT:
2536*404b540aSrobert       if (!VECTOR_MODE_P (mode))
2537*404b540aSrobert 	{
2538*404b540aSrobert 	  gcc_assert (VECTOR_MODE_P (GET_MODE (trueop0)));
2539*404b540aSrobert 	  gcc_assert (mode == GET_MODE_INNER (GET_MODE (trueop0)));
2540*404b540aSrobert 	  gcc_assert (GET_CODE (trueop1) == PARALLEL);
2541*404b540aSrobert 	  gcc_assert (XVECLEN (trueop1, 0) == 1);
2542*404b540aSrobert 	  gcc_assert (GET_CODE (XVECEXP (trueop1, 0, 0)) == CONST_INT);
2543*404b540aSrobert 
2544*404b540aSrobert 	  if (GET_CODE (trueop0) == CONST_VECTOR)
2545*404b540aSrobert 	    return CONST_VECTOR_ELT (trueop0, INTVAL (XVECEXP
2546*404b540aSrobert 						      (trueop1, 0, 0)));
2547*404b540aSrobert 	}
2548*404b540aSrobert       else
2549*404b540aSrobert 	{
2550*404b540aSrobert 	  gcc_assert (VECTOR_MODE_P (GET_MODE (trueop0)));
2551*404b540aSrobert 	  gcc_assert (GET_MODE_INNER (mode)
2552*404b540aSrobert 		      == GET_MODE_INNER (GET_MODE (trueop0)));
2553*404b540aSrobert 	  gcc_assert (GET_CODE (trueop1) == PARALLEL);
2554*404b540aSrobert 
2555*404b540aSrobert 	  if (GET_CODE (trueop0) == CONST_VECTOR)
2556*404b540aSrobert 	    {
2557*404b540aSrobert 	      int elt_size = GET_MODE_SIZE (GET_MODE_INNER (mode));
2558*404b540aSrobert 	      unsigned n_elts = (GET_MODE_SIZE (mode) / elt_size);
2559*404b540aSrobert 	      rtvec v = rtvec_alloc (n_elts);
2560*404b540aSrobert 	      unsigned int i;
2561*404b540aSrobert 
2562*404b540aSrobert 	      gcc_assert (XVECLEN (trueop1, 0) == (int) n_elts);
2563*404b540aSrobert 	      for (i = 0; i < n_elts; i++)
2564*404b540aSrobert 		{
2565*404b540aSrobert 		  rtx x = XVECEXP (trueop1, 0, i);
2566*404b540aSrobert 
2567*404b540aSrobert 		  gcc_assert (GET_CODE (x) == CONST_INT);
2568*404b540aSrobert 		  RTVEC_ELT (v, i) = CONST_VECTOR_ELT (trueop0,
2569*404b540aSrobert 						       INTVAL (x));
2570*404b540aSrobert 		}
2571*404b540aSrobert 
2572*404b540aSrobert 	      return gen_rtx_CONST_VECTOR (mode, v);
2573*404b540aSrobert 	    }
2574*404b540aSrobert 	}
2575*404b540aSrobert 
2576*404b540aSrobert       if (XVECLEN (trueop1, 0) == 1
2577*404b540aSrobert 	  && GET_CODE (XVECEXP (trueop1, 0, 0)) == CONST_INT
2578*404b540aSrobert 	  && GET_CODE (trueop0) == VEC_CONCAT)
2579*404b540aSrobert 	{
2580*404b540aSrobert 	  rtx vec = trueop0;
2581*404b540aSrobert 	  int offset = INTVAL (XVECEXP (trueop1, 0, 0)) * GET_MODE_SIZE (mode);
2582*404b540aSrobert 
2583*404b540aSrobert 	  /* Try to find the element in the VEC_CONCAT.  */
2584*404b540aSrobert 	  while (GET_MODE (vec) != mode
2585*404b540aSrobert 		 && GET_CODE (vec) == VEC_CONCAT)
2586*404b540aSrobert 	    {
2587*404b540aSrobert 	      HOST_WIDE_INT vec_size = GET_MODE_SIZE (GET_MODE (XEXP (vec, 0)));
2588*404b540aSrobert 	      if (offset < vec_size)
2589*404b540aSrobert 		vec = XEXP (vec, 0);
2590*404b540aSrobert 	      else
2591*404b540aSrobert 		{
2592*404b540aSrobert 		  offset -= vec_size;
2593*404b540aSrobert 		  vec = XEXP (vec, 1);
2594*404b540aSrobert 		}
2595*404b540aSrobert 	      vec = avoid_constant_pool_reference (vec);
2596*404b540aSrobert 	    }
2597*404b540aSrobert 
2598*404b540aSrobert 	  if (GET_MODE (vec) == mode)
2599*404b540aSrobert 	    return vec;
2600*404b540aSrobert 	}
2601*404b540aSrobert 
2602*404b540aSrobert       return 0;
2603*404b540aSrobert     case VEC_CONCAT:
2604*404b540aSrobert       {
2605*404b540aSrobert 	enum machine_mode op0_mode = (GET_MODE (trueop0) != VOIDmode
2606*404b540aSrobert 				      ? GET_MODE (trueop0)
2607*404b540aSrobert 				      : GET_MODE_INNER (mode));
2608*404b540aSrobert 	enum machine_mode op1_mode = (GET_MODE (trueop1) != VOIDmode
2609*404b540aSrobert 				      ? GET_MODE (trueop1)
2610*404b540aSrobert 				      : GET_MODE_INNER (mode));
2611*404b540aSrobert 
2612*404b540aSrobert 	gcc_assert (VECTOR_MODE_P (mode));
2613*404b540aSrobert 	gcc_assert (GET_MODE_SIZE (op0_mode) + GET_MODE_SIZE (op1_mode)
2614*404b540aSrobert 		    == GET_MODE_SIZE (mode));
2615*404b540aSrobert 
2616*404b540aSrobert 	if (VECTOR_MODE_P (op0_mode))
2617*404b540aSrobert 	  gcc_assert (GET_MODE_INNER (mode)
2618*404b540aSrobert 		      == GET_MODE_INNER (op0_mode));
2619*404b540aSrobert 	else
2620*404b540aSrobert 	  gcc_assert (GET_MODE_INNER (mode) == op0_mode);
2621*404b540aSrobert 
2622*404b540aSrobert 	if (VECTOR_MODE_P (op1_mode))
2623*404b540aSrobert 	  gcc_assert (GET_MODE_INNER (mode)
2624*404b540aSrobert 		      == GET_MODE_INNER (op1_mode));
2625*404b540aSrobert 	else
2626*404b540aSrobert 	  gcc_assert (GET_MODE_INNER (mode) == op1_mode);
2627*404b540aSrobert 
2628*404b540aSrobert 	if ((GET_CODE (trueop0) == CONST_VECTOR
2629*404b540aSrobert 	     || GET_CODE (trueop0) == CONST_INT
2630*404b540aSrobert 	     || GET_CODE (trueop0) == CONST_DOUBLE)
2631*404b540aSrobert 	    && (GET_CODE (trueop1) == CONST_VECTOR
2632*404b540aSrobert 		|| GET_CODE (trueop1) == CONST_INT
2633*404b540aSrobert 		|| GET_CODE (trueop1) == CONST_DOUBLE))
2634*404b540aSrobert 	  {
2635*404b540aSrobert 	    int elt_size = GET_MODE_SIZE (GET_MODE_INNER (mode));
2636*404b540aSrobert 	    unsigned n_elts = (GET_MODE_SIZE (mode) / elt_size);
2637*404b540aSrobert 	    rtvec v = rtvec_alloc (n_elts);
2638*404b540aSrobert 	    unsigned int i;
2639*404b540aSrobert 	    unsigned in_n_elts = 1;
2640*404b540aSrobert 
2641*404b540aSrobert 	    if (VECTOR_MODE_P (op0_mode))
2642*404b540aSrobert 	      in_n_elts = (GET_MODE_SIZE (op0_mode) / elt_size);
2643*404b540aSrobert 	    for (i = 0; i < n_elts; i++)
2644*404b540aSrobert 	      {
2645*404b540aSrobert 		if (i < in_n_elts)
2646*404b540aSrobert 		  {
2647*404b540aSrobert 		    if (!VECTOR_MODE_P (op0_mode))
2648*404b540aSrobert 		      RTVEC_ELT (v, i) = trueop0;
2649*404b540aSrobert 		    else
2650*404b540aSrobert 		      RTVEC_ELT (v, i) = CONST_VECTOR_ELT (trueop0, i);
2651*404b540aSrobert 		  }
2652*404b540aSrobert 		else
2653*404b540aSrobert 		  {
2654*404b540aSrobert 		    if (!VECTOR_MODE_P (op1_mode))
2655*404b540aSrobert 		      RTVEC_ELT (v, i) = trueop1;
2656*404b540aSrobert 		    else
2657*404b540aSrobert 		      RTVEC_ELT (v, i) = CONST_VECTOR_ELT (trueop1,
2658*404b540aSrobert 							   i - in_n_elts);
2659*404b540aSrobert 		  }
2660*404b540aSrobert 	      }
2661*404b540aSrobert 
2662*404b540aSrobert 	    return gen_rtx_CONST_VECTOR (mode, v);
2663*404b540aSrobert 	  }
2664*404b540aSrobert       }
2665*404b540aSrobert       return 0;
2666*404b540aSrobert 
2667*404b540aSrobert     default:
2668*404b540aSrobert       gcc_unreachable ();
2669*404b540aSrobert     }
2670*404b540aSrobert 
2671*404b540aSrobert   return 0;
2672*404b540aSrobert }
2673*404b540aSrobert 
2674*404b540aSrobert rtx
simplify_const_binary_operation(enum rtx_code code,enum machine_mode mode,rtx op0,rtx op1)2675*404b540aSrobert simplify_const_binary_operation (enum rtx_code code, enum machine_mode mode,
2676*404b540aSrobert 				 rtx op0, rtx op1)
2677*404b540aSrobert {
2678*404b540aSrobert   HOST_WIDE_INT arg0, arg1, arg0s, arg1s;
2679*404b540aSrobert   HOST_WIDE_INT val;
2680*404b540aSrobert   unsigned int width = GET_MODE_BITSIZE (mode);
2681*404b540aSrobert 
2682*404b540aSrobert   if (VECTOR_MODE_P (mode)
2683*404b540aSrobert       && code != VEC_CONCAT
2684*404b540aSrobert       && GET_CODE (op0) == CONST_VECTOR
2685*404b540aSrobert       && GET_CODE (op1) == CONST_VECTOR)
2686*404b540aSrobert     {
2687*404b540aSrobert       unsigned n_elts = GET_MODE_NUNITS (mode);
2688*404b540aSrobert       enum machine_mode op0mode = GET_MODE (op0);
2689*404b540aSrobert       unsigned op0_n_elts = GET_MODE_NUNITS (op0mode);
2690*404b540aSrobert       enum machine_mode op1mode = GET_MODE (op1);
2691*404b540aSrobert       unsigned op1_n_elts = GET_MODE_NUNITS (op1mode);
2692*404b540aSrobert       rtvec v = rtvec_alloc (n_elts);
2693*404b540aSrobert       unsigned int i;
2694*404b540aSrobert 
2695*404b540aSrobert       gcc_assert (op0_n_elts == n_elts);
2696*404b540aSrobert       gcc_assert (op1_n_elts == n_elts);
2697*404b540aSrobert       for (i = 0; i < n_elts; i++)
2698*404b540aSrobert 	{
2699*404b540aSrobert 	  rtx x = simplify_binary_operation (code, GET_MODE_INNER (mode),
2700*404b540aSrobert 					     CONST_VECTOR_ELT (op0, i),
2701*404b540aSrobert 					     CONST_VECTOR_ELT (op1, i));
2702*404b540aSrobert 	  if (!x)
2703*404b540aSrobert 	    return 0;
2704*404b540aSrobert 	  RTVEC_ELT (v, i) = x;
2705*404b540aSrobert 	}
2706*404b540aSrobert 
2707*404b540aSrobert       return gen_rtx_CONST_VECTOR (mode, v);
2708*404b540aSrobert     }
2709*404b540aSrobert 
2710*404b540aSrobert   if (VECTOR_MODE_P (mode)
2711*404b540aSrobert       && code == VEC_CONCAT
2712*404b540aSrobert       && CONSTANT_P (op0) && CONSTANT_P (op1))
2713*404b540aSrobert     {
2714*404b540aSrobert       unsigned n_elts = GET_MODE_NUNITS (mode);
2715*404b540aSrobert       rtvec v = rtvec_alloc (n_elts);
2716*404b540aSrobert 
2717*404b540aSrobert       gcc_assert (n_elts >= 2);
2718*404b540aSrobert       if (n_elts == 2)
2719*404b540aSrobert 	{
2720*404b540aSrobert 	  gcc_assert (GET_CODE (op0) != CONST_VECTOR);
2721*404b540aSrobert 	  gcc_assert (GET_CODE (op1) != CONST_VECTOR);
2722*404b540aSrobert 
2723*404b540aSrobert 	  RTVEC_ELT (v, 0) = op0;
2724*404b540aSrobert 	  RTVEC_ELT (v, 1) = op1;
2725*404b540aSrobert 	}
2726*404b540aSrobert       else
2727*404b540aSrobert 	{
2728*404b540aSrobert 	  unsigned op0_n_elts = GET_MODE_NUNITS (GET_MODE (op0));
2729*404b540aSrobert 	  unsigned op1_n_elts = GET_MODE_NUNITS (GET_MODE (op1));
2730*404b540aSrobert 	  unsigned i;
2731*404b540aSrobert 
2732*404b540aSrobert 	  gcc_assert (GET_CODE (op0) == CONST_VECTOR);
2733*404b540aSrobert 	  gcc_assert (GET_CODE (op1) == CONST_VECTOR);
2734*404b540aSrobert 	  gcc_assert (op0_n_elts + op1_n_elts == n_elts);
2735*404b540aSrobert 
2736*404b540aSrobert 	  for (i = 0; i < op0_n_elts; ++i)
2737*404b540aSrobert 	    RTVEC_ELT (v, i) = XVECEXP (op0, 0, i);
2738*404b540aSrobert 	  for (i = 0; i < op1_n_elts; ++i)
2739*404b540aSrobert 	    RTVEC_ELT (v, op0_n_elts+i) = XVECEXP (op1, 0, i);
2740*404b540aSrobert 	}
2741*404b540aSrobert 
2742*404b540aSrobert       return gen_rtx_CONST_VECTOR (mode, v);
2743*404b540aSrobert     }
2744*404b540aSrobert 
2745*404b540aSrobert   if (SCALAR_FLOAT_MODE_P (mode)
2746*404b540aSrobert       && GET_CODE (op0) == CONST_DOUBLE
2747*404b540aSrobert       && GET_CODE (op1) == CONST_DOUBLE
2748*404b540aSrobert       && mode == GET_MODE (op0) && mode == GET_MODE (op1))
2749*404b540aSrobert     {
2750*404b540aSrobert       if (code == AND
2751*404b540aSrobert 	  || code == IOR
2752*404b540aSrobert 	  || code == XOR)
2753*404b540aSrobert 	{
2754*404b540aSrobert 	  long tmp0[4];
2755*404b540aSrobert 	  long tmp1[4];
2756*404b540aSrobert 	  REAL_VALUE_TYPE r;
2757*404b540aSrobert 	  int i;
2758*404b540aSrobert 
2759*404b540aSrobert 	  real_to_target (tmp0, CONST_DOUBLE_REAL_VALUE (op0),
2760*404b540aSrobert 			  GET_MODE (op0));
2761*404b540aSrobert 	  real_to_target (tmp1, CONST_DOUBLE_REAL_VALUE (op1),
2762*404b540aSrobert 			  GET_MODE (op1));
2763*404b540aSrobert 	  for (i = 0; i < 4; i++)
2764*404b540aSrobert 	    {
2765*404b540aSrobert 	      switch (code)
2766*404b540aSrobert 	      {
2767*404b540aSrobert 	      case AND:
2768*404b540aSrobert 		tmp0[i] &= tmp1[i];
2769*404b540aSrobert 		break;
2770*404b540aSrobert 	      case IOR:
2771*404b540aSrobert 		tmp0[i] |= tmp1[i];
2772*404b540aSrobert 		break;
2773*404b540aSrobert 	      case XOR:
2774*404b540aSrobert 		tmp0[i] ^= tmp1[i];
2775*404b540aSrobert 		break;
2776*404b540aSrobert 	      default:
2777*404b540aSrobert 		gcc_unreachable ();
2778*404b540aSrobert 	      }
2779*404b540aSrobert 	    }
2780*404b540aSrobert 	   real_from_target (&r, tmp0, mode);
2781*404b540aSrobert 	   return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
2782*404b540aSrobert 	}
2783*404b540aSrobert       else
2784*404b540aSrobert 	{
2785*404b540aSrobert 	  REAL_VALUE_TYPE f0, f1, value, result;
2786*404b540aSrobert 	  bool inexact;
2787*404b540aSrobert 
2788*404b540aSrobert 	  REAL_VALUE_FROM_CONST_DOUBLE (f0, op0);
2789*404b540aSrobert 	  REAL_VALUE_FROM_CONST_DOUBLE (f1, op1);
2790*404b540aSrobert 	  real_convert (&f0, mode, &f0);
2791*404b540aSrobert 	  real_convert (&f1, mode, &f1);
2792*404b540aSrobert 
2793*404b540aSrobert 	  if (HONOR_SNANS (mode)
2794*404b540aSrobert 	      && (REAL_VALUE_ISNAN (f0) || REAL_VALUE_ISNAN (f1)))
2795*404b540aSrobert 	    return 0;
2796*404b540aSrobert 
2797*404b540aSrobert 	  if (code == DIV
2798*404b540aSrobert 	      && REAL_VALUES_EQUAL (f1, dconst0)
2799*404b540aSrobert 	      && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
2800*404b540aSrobert 	    return 0;
2801*404b540aSrobert 
2802*404b540aSrobert 	  if (MODE_HAS_INFINITIES (mode) && HONOR_NANS (mode)
2803*404b540aSrobert 	      && flag_trapping_math
2804*404b540aSrobert 	      && REAL_VALUE_ISINF (f0) && REAL_VALUE_ISINF (f1))
2805*404b540aSrobert 	    {
2806*404b540aSrobert 	      int s0 = REAL_VALUE_NEGATIVE (f0);
2807*404b540aSrobert 	      int s1 = REAL_VALUE_NEGATIVE (f1);
2808*404b540aSrobert 
2809*404b540aSrobert 	      switch (code)
2810*404b540aSrobert 		{
2811*404b540aSrobert 		case PLUS:
2812*404b540aSrobert 		  /* Inf + -Inf = NaN plus exception.  */
2813*404b540aSrobert 		  if (s0 != s1)
2814*404b540aSrobert 		    return 0;
2815*404b540aSrobert 		  break;
2816*404b540aSrobert 		case MINUS:
2817*404b540aSrobert 		  /* Inf - Inf = NaN plus exception.  */
2818*404b540aSrobert 		  if (s0 == s1)
2819*404b540aSrobert 		    return 0;
2820*404b540aSrobert 		  break;
2821*404b540aSrobert 		case DIV:
2822*404b540aSrobert 		  /* Inf / Inf = NaN plus exception.  */
2823*404b540aSrobert 		  return 0;
2824*404b540aSrobert 		default:
2825*404b540aSrobert 		  break;
2826*404b540aSrobert 		}
2827*404b540aSrobert 	    }
2828*404b540aSrobert 
2829*404b540aSrobert 	  if (code == MULT && MODE_HAS_INFINITIES (mode) && HONOR_NANS (mode)
2830*404b540aSrobert 	      && flag_trapping_math
2831*404b540aSrobert 	      && ((REAL_VALUE_ISINF (f0) && REAL_VALUES_EQUAL (f1, dconst0))
2832*404b540aSrobert 		  || (REAL_VALUE_ISINF (f1)
2833*404b540aSrobert 		      && REAL_VALUES_EQUAL (f0, dconst0))))
2834*404b540aSrobert 	    /* Inf * 0 = NaN plus exception.  */
2835*404b540aSrobert 	    return 0;
2836*404b540aSrobert 
2837*404b540aSrobert 	  inexact = real_arithmetic (&value, rtx_to_tree_code (code),
2838*404b540aSrobert 				     &f0, &f1);
2839*404b540aSrobert 	  real_convert (&result, mode, &value);
2840*404b540aSrobert 
2841*404b540aSrobert 	  /* Don't constant fold this floating point operation if
2842*404b540aSrobert 	     the result has overflowed and flag_trapping_math.  */
2843*404b540aSrobert 
2844*404b540aSrobert 	  if (flag_trapping_math
2845*404b540aSrobert 	      && MODE_HAS_INFINITIES (mode)
2846*404b540aSrobert 	      && REAL_VALUE_ISINF (result)
2847*404b540aSrobert 	      && !REAL_VALUE_ISINF (f0)
2848*404b540aSrobert 	      && !REAL_VALUE_ISINF (f1))
2849*404b540aSrobert 	    /* Overflow plus exception.  */
2850*404b540aSrobert 	    return 0;
2851*404b540aSrobert 
2852*404b540aSrobert 	  /* Don't constant fold this floating point operation if the
2853*404b540aSrobert 	     result may dependent upon the run-time rounding mode and
2854*404b540aSrobert 	     flag_rounding_math is set, or if GCC's software emulation
2855*404b540aSrobert 	     is unable to accurately represent the result.  */
2856*404b540aSrobert 
2857*404b540aSrobert 	  if ((flag_rounding_math
2858*404b540aSrobert 	       || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
2859*404b540aSrobert 		   && !flag_unsafe_math_optimizations))
2860*404b540aSrobert 	      && (inexact || !real_identical (&result, &value)))
2861*404b540aSrobert 	    return NULL_RTX;
2862*404b540aSrobert 
2863*404b540aSrobert 	  return CONST_DOUBLE_FROM_REAL_VALUE (result, mode);
2864*404b540aSrobert 	}
2865*404b540aSrobert     }
2866*404b540aSrobert 
2867*404b540aSrobert   /* We can fold some multi-word operations.  */
2868*404b540aSrobert   if (GET_MODE_CLASS (mode) == MODE_INT
2869*404b540aSrobert       && width == HOST_BITS_PER_WIDE_INT * 2
2870*404b540aSrobert       && (GET_CODE (op0) == CONST_DOUBLE || GET_CODE (op0) == CONST_INT)
2871*404b540aSrobert       && (GET_CODE (op1) == CONST_DOUBLE || GET_CODE (op1) == CONST_INT))
2872*404b540aSrobert     {
2873*404b540aSrobert       unsigned HOST_WIDE_INT l1, l2, lv, lt;
2874*404b540aSrobert       HOST_WIDE_INT h1, h2, hv, ht;
2875*404b540aSrobert 
2876*404b540aSrobert       if (GET_CODE (op0) == CONST_DOUBLE)
2877*404b540aSrobert 	l1 = CONST_DOUBLE_LOW (op0), h1 = CONST_DOUBLE_HIGH (op0);
2878*404b540aSrobert       else
2879*404b540aSrobert 	l1 = INTVAL (op0), h1 = HWI_SIGN_EXTEND (l1);
2880*404b540aSrobert 
2881*404b540aSrobert       if (GET_CODE (op1) == CONST_DOUBLE)
2882*404b540aSrobert 	l2 = CONST_DOUBLE_LOW (op1), h2 = CONST_DOUBLE_HIGH (op1);
2883*404b540aSrobert       else
2884*404b540aSrobert 	l2 = INTVAL (op1), h2 = HWI_SIGN_EXTEND (l2);
2885*404b540aSrobert 
2886*404b540aSrobert       switch (code)
2887*404b540aSrobert 	{
2888*404b540aSrobert 	case MINUS:
2889*404b540aSrobert 	  /* A - B == A + (-B).  */
2890*404b540aSrobert 	  neg_double (l2, h2, &lv, &hv);
2891*404b540aSrobert 	  l2 = lv, h2 = hv;
2892*404b540aSrobert 
2893*404b540aSrobert 	  /* Fall through....  */
2894*404b540aSrobert 
2895*404b540aSrobert 	case PLUS:
2896*404b540aSrobert 	  add_double (l1, h1, l2, h2, &lv, &hv);
2897*404b540aSrobert 	  break;
2898*404b540aSrobert 
2899*404b540aSrobert 	case MULT:
2900*404b540aSrobert 	  mul_double (l1, h1, l2, h2, &lv, &hv);
2901*404b540aSrobert 	  break;
2902*404b540aSrobert 
2903*404b540aSrobert 	case DIV:
2904*404b540aSrobert 	  if (div_and_round_double (TRUNC_DIV_EXPR, 0, l1, h1, l2, h2,
2905*404b540aSrobert 				    &lv, &hv, &lt, &ht))
2906*404b540aSrobert 	    return 0;
2907*404b540aSrobert 	  break;
2908*404b540aSrobert 
2909*404b540aSrobert 	case MOD:
2910*404b540aSrobert 	  if (div_and_round_double (TRUNC_DIV_EXPR, 0, l1, h1, l2, h2,
2911*404b540aSrobert 				    &lt, &ht, &lv, &hv))
2912*404b540aSrobert 	    return 0;
2913*404b540aSrobert 	  break;
2914*404b540aSrobert 
2915*404b540aSrobert 	case UDIV:
2916*404b540aSrobert 	  if (div_and_round_double (TRUNC_DIV_EXPR, 1, l1, h1, l2, h2,
2917*404b540aSrobert 				    &lv, &hv, &lt, &ht))
2918*404b540aSrobert 	    return 0;
2919*404b540aSrobert 	  break;
2920*404b540aSrobert 
2921*404b540aSrobert 	case UMOD:
2922*404b540aSrobert 	  if (div_and_round_double (TRUNC_DIV_EXPR, 1, l1, h1, l2, h2,
2923*404b540aSrobert 				    &lt, &ht, &lv, &hv))
2924*404b540aSrobert 	    return 0;
2925*404b540aSrobert 	  break;
2926*404b540aSrobert 
2927*404b540aSrobert 	case AND:
2928*404b540aSrobert 	  lv = l1 & l2, hv = h1 & h2;
2929*404b540aSrobert 	  break;
2930*404b540aSrobert 
2931*404b540aSrobert 	case IOR:
2932*404b540aSrobert 	  lv = l1 | l2, hv = h1 | h2;
2933*404b540aSrobert 	  break;
2934*404b540aSrobert 
2935*404b540aSrobert 	case XOR:
2936*404b540aSrobert 	  lv = l1 ^ l2, hv = h1 ^ h2;
2937*404b540aSrobert 	  break;
2938*404b540aSrobert 
2939*404b540aSrobert 	case SMIN:
2940*404b540aSrobert 	  if (h1 < h2
2941*404b540aSrobert 	      || (h1 == h2
2942*404b540aSrobert 		  && ((unsigned HOST_WIDE_INT) l1
2943*404b540aSrobert 		      < (unsigned HOST_WIDE_INT) l2)))
2944*404b540aSrobert 	    lv = l1, hv = h1;
2945*404b540aSrobert 	  else
2946*404b540aSrobert 	    lv = l2, hv = h2;
2947*404b540aSrobert 	  break;
2948*404b540aSrobert 
2949*404b540aSrobert 	case SMAX:
2950*404b540aSrobert 	  if (h1 > h2
2951*404b540aSrobert 	      || (h1 == h2
2952*404b540aSrobert 		  && ((unsigned HOST_WIDE_INT) l1
2953*404b540aSrobert 		      > (unsigned HOST_WIDE_INT) l2)))
2954*404b540aSrobert 	    lv = l1, hv = h1;
2955*404b540aSrobert 	  else
2956*404b540aSrobert 	    lv = l2, hv = h2;
2957*404b540aSrobert 	  break;
2958*404b540aSrobert 
2959*404b540aSrobert 	case UMIN:
2960*404b540aSrobert 	  if ((unsigned HOST_WIDE_INT) h1 < (unsigned HOST_WIDE_INT) h2
2961*404b540aSrobert 	      || (h1 == h2
2962*404b540aSrobert 		  && ((unsigned HOST_WIDE_INT) l1
2963*404b540aSrobert 		      < (unsigned HOST_WIDE_INT) l2)))
2964*404b540aSrobert 	    lv = l1, hv = h1;
2965*404b540aSrobert 	  else
2966*404b540aSrobert 	    lv = l2, hv = h2;
2967*404b540aSrobert 	  break;
2968*404b540aSrobert 
2969*404b540aSrobert 	case UMAX:
2970*404b540aSrobert 	  if ((unsigned HOST_WIDE_INT) h1 > (unsigned HOST_WIDE_INT) h2
2971*404b540aSrobert 	      || (h1 == h2
2972*404b540aSrobert 		  && ((unsigned HOST_WIDE_INT) l1
2973*404b540aSrobert 		      > (unsigned HOST_WIDE_INT) l2)))
2974*404b540aSrobert 	    lv = l1, hv = h1;
2975*404b540aSrobert 	  else
2976*404b540aSrobert 	    lv = l2, hv = h2;
2977*404b540aSrobert 	  break;
2978*404b540aSrobert 
2979*404b540aSrobert 	case LSHIFTRT:   case ASHIFTRT:
2980*404b540aSrobert 	case ASHIFT:
2981*404b540aSrobert 	case ROTATE:     case ROTATERT:
2982*404b540aSrobert 	  if (SHIFT_COUNT_TRUNCATED)
2983*404b540aSrobert 	    l2 &= (GET_MODE_BITSIZE (mode) - 1), h2 = 0;
2984*404b540aSrobert 
2985*404b540aSrobert 	  if (h2 != 0 || l2 >= GET_MODE_BITSIZE (mode))
2986*404b540aSrobert 	    return 0;
2987*404b540aSrobert 
2988*404b540aSrobert 	  if (code == LSHIFTRT || code == ASHIFTRT)
2989*404b540aSrobert 	    rshift_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv,
2990*404b540aSrobert 			   code == ASHIFTRT);
2991*404b540aSrobert 	  else if (code == ASHIFT)
2992*404b540aSrobert 	    lshift_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv, 1);
2993*404b540aSrobert 	  else if (code == ROTATE)
2994*404b540aSrobert 	    lrotate_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv);
2995*404b540aSrobert 	  else /* code == ROTATERT */
2996*404b540aSrobert 	    rrotate_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv);
2997*404b540aSrobert 	  break;
2998*404b540aSrobert 
2999*404b540aSrobert 	default:
3000*404b540aSrobert 	  return 0;
3001*404b540aSrobert 	}
3002*404b540aSrobert 
3003*404b540aSrobert       return immed_double_const (lv, hv, mode);
3004*404b540aSrobert     }
3005*404b540aSrobert 
3006*404b540aSrobert   if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
3007*404b540aSrobert       && width <= HOST_BITS_PER_WIDE_INT && width != 0)
3008*404b540aSrobert     {
3009*404b540aSrobert       /* Get the integer argument values in two forms:
3010*404b540aSrobert          zero-extended in ARG0, ARG1 and sign-extended in ARG0S, ARG1S.  */
3011*404b540aSrobert 
3012*404b540aSrobert       arg0 = INTVAL (op0);
3013*404b540aSrobert       arg1 = INTVAL (op1);
3014*404b540aSrobert 
3015*404b540aSrobert       if (width < HOST_BITS_PER_WIDE_INT)
3016*404b540aSrobert         {
3017*404b540aSrobert           arg0 &= ((HOST_WIDE_INT) 1 << width) - 1;
3018*404b540aSrobert           arg1 &= ((HOST_WIDE_INT) 1 << width) - 1;
3019*404b540aSrobert 
3020*404b540aSrobert           arg0s = arg0;
3021*404b540aSrobert           if (arg0s & ((HOST_WIDE_INT) 1 << (width - 1)))
3022*404b540aSrobert 	    arg0s |= ((HOST_WIDE_INT) (-1) << width);
3023*404b540aSrobert 
3024*404b540aSrobert 	  arg1s = arg1;
3025*404b540aSrobert 	  if (arg1s & ((HOST_WIDE_INT) 1 << (width - 1)))
3026*404b540aSrobert 	    arg1s |= ((HOST_WIDE_INT) (-1) << width);
3027*404b540aSrobert 	}
3028*404b540aSrobert       else
3029*404b540aSrobert 	{
3030*404b540aSrobert 	  arg0s = arg0;
3031*404b540aSrobert 	  arg1s = arg1;
3032*404b540aSrobert 	}
3033*404b540aSrobert 
3034*404b540aSrobert       /* Compute the value of the arithmetic.  */
3035*404b540aSrobert 
3036*404b540aSrobert       switch (code)
3037*404b540aSrobert 	{
3038*404b540aSrobert 	case PLUS:
3039*404b540aSrobert 	  val = arg0s + arg1s;
3040*404b540aSrobert 	  break;
3041*404b540aSrobert 
3042*404b540aSrobert 	case MINUS:
3043*404b540aSrobert 	  val = arg0s - arg1s;
3044*404b540aSrobert 	  break;
3045*404b540aSrobert 
3046*404b540aSrobert 	case MULT:
3047*404b540aSrobert 	  val = arg0s * arg1s;
3048*404b540aSrobert 	  break;
3049*404b540aSrobert 
3050*404b540aSrobert 	case DIV:
3051*404b540aSrobert 	  if (arg1s == 0
3052*404b540aSrobert 	      || (arg0s == (HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1)
3053*404b540aSrobert 		  && arg1s == -1))
3054*404b540aSrobert 	    return 0;
3055*404b540aSrobert 	  val = arg0s / arg1s;
3056*404b540aSrobert 	  break;
3057*404b540aSrobert 
3058*404b540aSrobert 	case MOD:
3059*404b540aSrobert 	  if (arg1s == 0
3060*404b540aSrobert 	      || (arg0s == (HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1)
3061*404b540aSrobert 		  && arg1s == -1))
3062*404b540aSrobert 	    return 0;
3063*404b540aSrobert 	  val = arg0s % arg1s;
3064*404b540aSrobert 	  break;
3065*404b540aSrobert 
3066*404b540aSrobert 	case UDIV:
3067*404b540aSrobert 	  if (arg1 == 0
3068*404b540aSrobert 	      || (arg0s == (HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1)
3069*404b540aSrobert 		  && arg1s == -1))
3070*404b540aSrobert 	    return 0;
3071*404b540aSrobert 	  val = (unsigned HOST_WIDE_INT) arg0 / arg1;
3072*404b540aSrobert 	  break;
3073*404b540aSrobert 
3074*404b540aSrobert 	case UMOD:
3075*404b540aSrobert 	  if (arg1 == 0
3076*404b540aSrobert 	      || (arg0s == (HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1)
3077*404b540aSrobert 		  && arg1s == -1))
3078*404b540aSrobert 	    return 0;
3079*404b540aSrobert 	  val = (unsigned HOST_WIDE_INT) arg0 % arg1;
3080*404b540aSrobert 	  break;
3081*404b540aSrobert 
3082*404b540aSrobert 	case AND:
3083*404b540aSrobert 	  val = arg0 & arg1;
3084*404b540aSrobert 	  break;
3085*404b540aSrobert 
3086*404b540aSrobert 	case IOR:
3087*404b540aSrobert 	  val = arg0 | arg1;
3088*404b540aSrobert 	  break;
3089*404b540aSrobert 
3090*404b540aSrobert 	case XOR:
3091*404b540aSrobert 	  val = arg0 ^ arg1;
3092*404b540aSrobert 	  break;
3093*404b540aSrobert 
3094*404b540aSrobert 	case LSHIFTRT:
3095*404b540aSrobert 	case ASHIFT:
3096*404b540aSrobert 	case ASHIFTRT:
3097*404b540aSrobert 	  /* Truncate the shift if SHIFT_COUNT_TRUNCATED, otherwise make sure
3098*404b540aSrobert 	     the value is in range.  We can't return any old value for
3099*404b540aSrobert 	     out-of-range arguments because either the middle-end (via
3100*404b540aSrobert 	     shift_truncation_mask) or the back-end might be relying on
3101*404b540aSrobert 	     target-specific knowledge.  Nor can we rely on
3102*404b540aSrobert 	     shift_truncation_mask, since the shift might not be part of an
3103*404b540aSrobert 	     ashlM3, lshrM3 or ashrM3 instruction.  */
3104*404b540aSrobert 	  if (SHIFT_COUNT_TRUNCATED)
3105*404b540aSrobert 	    arg1 = (unsigned HOST_WIDE_INT) arg1 % width;
3106*404b540aSrobert 	  else if (arg1 < 0 || arg1 >= GET_MODE_BITSIZE (mode))
3107*404b540aSrobert 	    return 0;
3108*404b540aSrobert 
3109*404b540aSrobert 	  val = (code == ASHIFT
3110*404b540aSrobert 		 ? ((unsigned HOST_WIDE_INT) arg0) << arg1
3111*404b540aSrobert 		 : ((unsigned HOST_WIDE_INT) arg0) >> arg1);
3112*404b540aSrobert 
3113*404b540aSrobert 	  /* Sign-extend the result for arithmetic right shifts.  */
3114*404b540aSrobert 	  if (code == ASHIFTRT && arg0s < 0 && arg1 > 0)
3115*404b540aSrobert 	    val |= ((HOST_WIDE_INT) -1) << (width - arg1);
3116*404b540aSrobert 	  break;
3117*404b540aSrobert 
3118*404b540aSrobert 	case ROTATERT:
3119*404b540aSrobert 	  if (arg1 < 0)
3120*404b540aSrobert 	    return 0;
3121*404b540aSrobert 
3122*404b540aSrobert 	  arg1 %= width;
3123*404b540aSrobert 	  val = ((((unsigned HOST_WIDE_INT) arg0) << (width - arg1))
3124*404b540aSrobert 		 | (((unsigned HOST_WIDE_INT) arg0) >> arg1));
3125*404b540aSrobert 	  break;
3126*404b540aSrobert 
3127*404b540aSrobert 	case ROTATE:
3128*404b540aSrobert 	  if (arg1 < 0)
3129*404b540aSrobert 	    return 0;
3130*404b540aSrobert 
3131*404b540aSrobert 	  arg1 %= width;
3132*404b540aSrobert 	  val = ((((unsigned HOST_WIDE_INT) arg0) << arg1)
3133*404b540aSrobert 		 | (((unsigned HOST_WIDE_INT) arg0) >> (width - arg1)));
3134*404b540aSrobert 	  break;
3135*404b540aSrobert 
3136*404b540aSrobert 	case COMPARE:
3137*404b540aSrobert 	  /* Do nothing here.  */
3138*404b540aSrobert 	  return 0;
3139*404b540aSrobert 
3140*404b540aSrobert 	case SMIN:
3141*404b540aSrobert 	  val = arg0s <= arg1s ? arg0s : arg1s;
3142*404b540aSrobert 	  break;
3143*404b540aSrobert 
3144*404b540aSrobert 	case UMIN:
3145*404b540aSrobert 	  val = ((unsigned HOST_WIDE_INT) arg0
3146*404b540aSrobert 		 <= (unsigned HOST_WIDE_INT) arg1 ? arg0 : arg1);
3147*404b540aSrobert 	  break;
3148*404b540aSrobert 
3149*404b540aSrobert 	case SMAX:
3150*404b540aSrobert 	  val = arg0s > arg1s ? arg0s : arg1s;
3151*404b540aSrobert 	  break;
3152*404b540aSrobert 
3153*404b540aSrobert 	case UMAX:
3154*404b540aSrobert 	  val = ((unsigned HOST_WIDE_INT) arg0
3155*404b540aSrobert 		 > (unsigned HOST_WIDE_INT) arg1 ? arg0 : arg1);
3156*404b540aSrobert 	  break;
3157*404b540aSrobert 
3158*404b540aSrobert 	case SS_PLUS:
3159*404b540aSrobert 	case US_PLUS:
3160*404b540aSrobert 	case SS_MINUS:
3161*404b540aSrobert 	case US_MINUS:
3162*404b540aSrobert 	case SS_ASHIFT:
3163*404b540aSrobert 	  /* ??? There are simplifications that can be done.  */
3164*404b540aSrobert 	  return 0;
3165*404b540aSrobert 
3166*404b540aSrobert 	default:
3167*404b540aSrobert 	  gcc_unreachable ();
3168*404b540aSrobert 	}
3169*404b540aSrobert 
3170*404b540aSrobert       return gen_int_mode (val, mode);
3171*404b540aSrobert     }
3172*404b540aSrobert 
3173*404b540aSrobert   return NULL_RTX;
3174*404b540aSrobert }
3175*404b540aSrobert 
3176*404b540aSrobert 
3177*404b540aSrobert 
3178*404b540aSrobert /* Simplify a PLUS or MINUS, at least one of whose operands may be another
3179*404b540aSrobert    PLUS or MINUS.
3180*404b540aSrobert 
3181*404b540aSrobert    Rather than test for specific case, we do this by a brute-force method
3182*404b540aSrobert    and do all possible simplifications until no more changes occur.  Then
3183*404b540aSrobert    we rebuild the operation.  */
3184*404b540aSrobert 
3185*404b540aSrobert struct simplify_plus_minus_op_data
3186*404b540aSrobert {
3187*404b540aSrobert   rtx op;
3188*404b540aSrobert   short neg;
3189*404b540aSrobert };
3190*404b540aSrobert 
3191*404b540aSrobert static int
simplify_plus_minus_op_data_cmp(const void * p1,const void * p2)3192*404b540aSrobert simplify_plus_minus_op_data_cmp (const void *p1, const void *p2)
3193*404b540aSrobert {
3194*404b540aSrobert   const struct simplify_plus_minus_op_data *d1 = p1;
3195*404b540aSrobert   const struct simplify_plus_minus_op_data *d2 = p2;
3196*404b540aSrobert   int result;
3197*404b540aSrobert 
3198*404b540aSrobert   result = (commutative_operand_precedence (d2->op)
3199*404b540aSrobert 	    - commutative_operand_precedence (d1->op));
3200*404b540aSrobert   if (result)
3201*404b540aSrobert     return result;
3202*404b540aSrobert 
3203*404b540aSrobert   /* Group together equal REGs to do more simplification.  */
3204*404b540aSrobert   if (REG_P (d1->op) && REG_P (d2->op))
3205*404b540aSrobert     return REGNO (d1->op) - REGNO (d2->op);
3206*404b540aSrobert   else
3207*404b540aSrobert     return 0;
3208*404b540aSrobert }
3209*404b540aSrobert 
3210*404b540aSrobert static rtx
simplify_plus_minus(enum rtx_code code,enum machine_mode mode,rtx op0,rtx op1)3211*404b540aSrobert simplify_plus_minus (enum rtx_code code, enum machine_mode mode, rtx op0,
3212*404b540aSrobert 		     rtx op1)
3213*404b540aSrobert {
3214*404b540aSrobert   struct simplify_plus_minus_op_data ops[8];
3215*404b540aSrobert   rtx result, tem;
3216*404b540aSrobert   int n_ops = 2, input_ops = 2;
3217*404b540aSrobert   int changed, n_constants = 0, canonicalized = 0;
3218*404b540aSrobert   int i, j;
3219*404b540aSrobert 
3220*404b540aSrobert   memset (ops, 0, sizeof ops);
3221*404b540aSrobert 
3222*404b540aSrobert   /* Set up the two operands and then expand them until nothing has been
3223*404b540aSrobert      changed.  If we run out of room in our array, give up; this should
3224*404b540aSrobert      almost never happen.  */
3225*404b540aSrobert 
3226*404b540aSrobert   ops[0].op = op0;
3227*404b540aSrobert   ops[0].neg = 0;
3228*404b540aSrobert   ops[1].op = op1;
3229*404b540aSrobert   ops[1].neg = (code == MINUS);
3230*404b540aSrobert 
3231*404b540aSrobert   do
3232*404b540aSrobert     {
3233*404b540aSrobert       changed = 0;
3234*404b540aSrobert 
3235*404b540aSrobert       for (i = 0; i < n_ops; i++)
3236*404b540aSrobert 	{
3237*404b540aSrobert 	  rtx this_op = ops[i].op;
3238*404b540aSrobert 	  int this_neg = ops[i].neg;
3239*404b540aSrobert 	  enum rtx_code this_code = GET_CODE (this_op);
3240*404b540aSrobert 
3241*404b540aSrobert 	  switch (this_code)
3242*404b540aSrobert 	    {
3243*404b540aSrobert 	    case PLUS:
3244*404b540aSrobert 	    case MINUS:
3245*404b540aSrobert 	      if (n_ops == 7)
3246*404b540aSrobert 		return NULL_RTX;
3247*404b540aSrobert 
3248*404b540aSrobert 	      ops[n_ops].op = XEXP (this_op, 1);
3249*404b540aSrobert 	      ops[n_ops].neg = (this_code == MINUS) ^ this_neg;
3250*404b540aSrobert 	      n_ops++;
3251*404b540aSrobert 
3252*404b540aSrobert 	      ops[i].op = XEXP (this_op, 0);
3253*404b540aSrobert 	      input_ops++;
3254*404b540aSrobert 	      changed = 1;
3255*404b540aSrobert 	      canonicalized |= this_neg;
3256*404b540aSrobert 	      break;
3257*404b540aSrobert 
3258*404b540aSrobert 	    case NEG:
3259*404b540aSrobert 	      ops[i].op = XEXP (this_op, 0);
3260*404b540aSrobert 	      ops[i].neg = ! this_neg;
3261*404b540aSrobert 	      changed = 1;
3262*404b540aSrobert 	      canonicalized = 1;
3263*404b540aSrobert 	      break;
3264*404b540aSrobert 
3265*404b540aSrobert 	    case CONST:
3266*404b540aSrobert 	      if (n_ops < 7
3267*404b540aSrobert 		  && GET_CODE (XEXP (this_op, 0)) == PLUS
3268*404b540aSrobert 		  && CONSTANT_P (XEXP (XEXP (this_op, 0), 0))
3269*404b540aSrobert 		  && CONSTANT_P (XEXP (XEXP (this_op, 0), 1)))
3270*404b540aSrobert 		{
3271*404b540aSrobert 		  ops[i].op = XEXP (XEXP (this_op, 0), 0);
3272*404b540aSrobert 		  ops[n_ops].op = XEXP (XEXP (this_op, 0), 1);
3273*404b540aSrobert 		  ops[n_ops].neg = this_neg;
3274*404b540aSrobert 		  n_ops++;
3275*404b540aSrobert 		  changed = 1;
3276*404b540aSrobert 	          canonicalized = 1;
3277*404b540aSrobert 		}
3278*404b540aSrobert 	      break;
3279*404b540aSrobert 
3280*404b540aSrobert 	    case NOT:
3281*404b540aSrobert 	      /* ~a -> (-a - 1) */
3282*404b540aSrobert 	      if (n_ops != 7)
3283*404b540aSrobert 		{
3284*404b540aSrobert 		  ops[n_ops].op = constm1_rtx;
3285*404b540aSrobert 		  ops[n_ops++].neg = this_neg;
3286*404b540aSrobert 		  ops[i].op = XEXP (this_op, 0);
3287*404b540aSrobert 		  ops[i].neg = !this_neg;
3288*404b540aSrobert 		  changed = 1;
3289*404b540aSrobert 	          canonicalized = 1;
3290*404b540aSrobert 		}
3291*404b540aSrobert 	      break;
3292*404b540aSrobert 
3293*404b540aSrobert 	    case CONST_INT:
3294*404b540aSrobert 	      n_constants++;
3295*404b540aSrobert 	      if (this_neg)
3296*404b540aSrobert 		{
3297*404b540aSrobert 		  ops[i].op = neg_const_int (mode, this_op);
3298*404b540aSrobert 		  ops[i].neg = 0;
3299*404b540aSrobert 		  changed = 1;
3300*404b540aSrobert 	          canonicalized = 1;
3301*404b540aSrobert 		}
3302*404b540aSrobert 	      break;
3303*404b540aSrobert 
3304*404b540aSrobert 	    default:
3305*404b540aSrobert 	      break;
3306*404b540aSrobert 	    }
3307*404b540aSrobert 	}
3308*404b540aSrobert     }
3309*404b540aSrobert   while (changed);
3310*404b540aSrobert 
3311*404b540aSrobert   if (n_constants > 1)
3312*404b540aSrobert     canonicalized = 1;
3313*404b540aSrobert 
3314*404b540aSrobert   gcc_assert (n_ops >= 2);
3315*404b540aSrobert 
3316*404b540aSrobert   /* If we only have two operands, we can avoid the loops.  */
3317*404b540aSrobert   if (n_ops == 2)
3318*404b540aSrobert     {
3319*404b540aSrobert       enum rtx_code code = ops[0].neg || ops[1].neg ? MINUS : PLUS;
3320*404b540aSrobert       rtx lhs, rhs;
3321*404b540aSrobert 
3322*404b540aSrobert       /* Get the two operands.  Be careful with the order, especially for
3323*404b540aSrobert 	 the cases where code == MINUS.  */
3324*404b540aSrobert       if (ops[0].neg && ops[1].neg)
3325*404b540aSrobert 	{
3326*404b540aSrobert 	  lhs = gen_rtx_NEG (mode, ops[0].op);
3327*404b540aSrobert 	  rhs = ops[1].op;
3328*404b540aSrobert 	}
3329*404b540aSrobert       else if (ops[0].neg)
3330*404b540aSrobert 	{
3331*404b540aSrobert 	  lhs = ops[1].op;
3332*404b540aSrobert 	  rhs = ops[0].op;
3333*404b540aSrobert 	}
3334*404b540aSrobert       else
3335*404b540aSrobert 	{
3336*404b540aSrobert 	  lhs = ops[0].op;
3337*404b540aSrobert 	  rhs = ops[1].op;
3338*404b540aSrobert 	}
3339*404b540aSrobert 
3340*404b540aSrobert       return simplify_const_binary_operation (code, mode, lhs, rhs);
3341*404b540aSrobert     }
3342*404b540aSrobert 
3343*404b540aSrobert   /* Now simplify each pair of operands until nothing changes.  */
3344*404b540aSrobert   do
3345*404b540aSrobert     {
3346*404b540aSrobert       /* Insertion sort is good enough for an eight-element array.  */
3347*404b540aSrobert       for (i = 1; i < n_ops; i++)
3348*404b540aSrobert         {
3349*404b540aSrobert           struct simplify_plus_minus_op_data save;
3350*404b540aSrobert           j = i - 1;
3351*404b540aSrobert           if (simplify_plus_minus_op_data_cmp (&ops[j], &ops[i]) < 0)
3352*404b540aSrobert 	    continue;
3353*404b540aSrobert 
3354*404b540aSrobert           canonicalized = 1;
3355*404b540aSrobert           save = ops[i];
3356*404b540aSrobert           do
3357*404b540aSrobert 	    ops[j + 1] = ops[j];
3358*404b540aSrobert           while (j-- && simplify_plus_minus_op_data_cmp (&ops[j], &save) > 0);
3359*404b540aSrobert           ops[j + 1] = save;
3360*404b540aSrobert         }
3361*404b540aSrobert 
3362*404b540aSrobert       /* This is only useful the first time through.  */
3363*404b540aSrobert       if (!canonicalized)
3364*404b540aSrobert         return NULL_RTX;
3365*404b540aSrobert 
3366*404b540aSrobert       changed = 0;
3367*404b540aSrobert       for (i = n_ops - 1; i > 0; i--)
3368*404b540aSrobert 	for (j = i - 1; j >= 0; j--)
3369*404b540aSrobert 	  {
3370*404b540aSrobert 	    rtx lhs = ops[j].op, rhs = ops[i].op;
3371*404b540aSrobert 	    int lneg = ops[j].neg, rneg = ops[i].neg;
3372*404b540aSrobert 
3373*404b540aSrobert 	    if (lhs != 0 && rhs != 0)
3374*404b540aSrobert 	      {
3375*404b540aSrobert 		enum rtx_code ncode = PLUS;
3376*404b540aSrobert 
3377*404b540aSrobert 		if (lneg != rneg)
3378*404b540aSrobert 		  {
3379*404b540aSrobert 		    ncode = MINUS;
3380*404b540aSrobert 		    if (lneg)
3381*404b540aSrobert 		      tem = lhs, lhs = rhs, rhs = tem;
3382*404b540aSrobert 		  }
3383*404b540aSrobert 		else if (swap_commutative_operands_p (lhs, rhs))
3384*404b540aSrobert 		  tem = lhs, lhs = rhs, rhs = tem;
3385*404b540aSrobert 
3386*404b540aSrobert 		if ((GET_CODE (lhs) == CONST || GET_CODE (lhs) == CONST_INT)
3387*404b540aSrobert 		    && (GET_CODE (rhs) == CONST || GET_CODE (rhs) == CONST_INT))
3388*404b540aSrobert 		  {
3389*404b540aSrobert 		    rtx tem_lhs, tem_rhs;
3390*404b540aSrobert 
3391*404b540aSrobert 		    tem_lhs = GET_CODE (lhs) == CONST ? XEXP (lhs, 0) : lhs;
3392*404b540aSrobert 		    tem_rhs = GET_CODE (rhs) == CONST ? XEXP (rhs, 0) : rhs;
3393*404b540aSrobert 		    tem = simplify_binary_operation (ncode, mode, tem_lhs, tem_rhs);
3394*404b540aSrobert 
3395*404b540aSrobert 		    if (tem && !CONSTANT_P (tem))
3396*404b540aSrobert 		      tem = gen_rtx_CONST (GET_MODE (tem), tem);
3397*404b540aSrobert 		  }
3398*404b540aSrobert 		else
3399*404b540aSrobert 		  tem = simplify_binary_operation (ncode, mode, lhs, rhs);
3400*404b540aSrobert 
3401*404b540aSrobert 		/* Reject "simplifications" that just wrap the two
3402*404b540aSrobert 		   arguments in a CONST.  Failure to do so can result
3403*404b540aSrobert 		   in infinite recursion with simplify_binary_operation
3404*404b540aSrobert 		   when it calls us to simplify CONST operations.  */
3405*404b540aSrobert 		if (tem
3406*404b540aSrobert 		    && ! (GET_CODE (tem) == CONST
3407*404b540aSrobert 			  && GET_CODE (XEXP (tem, 0)) == ncode
3408*404b540aSrobert 			  && XEXP (XEXP (tem, 0), 0) == lhs
3409*404b540aSrobert 			  && XEXP (XEXP (tem, 0), 1) == rhs))
3410*404b540aSrobert 		  {
3411*404b540aSrobert 		    lneg &= rneg;
3412*404b540aSrobert 		    if (GET_CODE (tem) == NEG)
3413*404b540aSrobert 		      tem = XEXP (tem, 0), lneg = !lneg;
3414*404b540aSrobert 		    if (GET_CODE (tem) == CONST_INT && lneg)
3415*404b540aSrobert 		      tem = neg_const_int (mode, tem), lneg = 0;
3416*404b540aSrobert 
3417*404b540aSrobert 		    ops[i].op = tem;
3418*404b540aSrobert 		    ops[i].neg = lneg;
3419*404b540aSrobert 		    ops[j].op = NULL_RTX;
3420*404b540aSrobert 		    changed = 1;
3421*404b540aSrobert 		  }
3422*404b540aSrobert 	      }
3423*404b540aSrobert 	  }
3424*404b540aSrobert 
3425*404b540aSrobert       /* Pack all the operands to the lower-numbered entries.  */
3426*404b540aSrobert       for (i = 0, j = 0; j < n_ops; j++)
3427*404b540aSrobert         if (ops[j].op)
3428*404b540aSrobert           {
3429*404b540aSrobert 	    ops[i] = ops[j];
3430*404b540aSrobert 	    i++;
3431*404b540aSrobert           }
3432*404b540aSrobert       n_ops = i;
3433*404b540aSrobert     }
3434*404b540aSrobert   while (changed);
3435*404b540aSrobert 
3436*404b540aSrobert   /* Create (minus -C X) instead of (neg (const (plus X C))).  */
3437*404b540aSrobert   if (n_ops == 2
3438*404b540aSrobert       && GET_CODE (ops[1].op) == CONST_INT
3439*404b540aSrobert       && CONSTANT_P (ops[0].op)
3440*404b540aSrobert       && ops[0].neg)
3441*404b540aSrobert     return gen_rtx_fmt_ee (MINUS, mode, ops[1].op, ops[0].op);
3442*404b540aSrobert 
3443*404b540aSrobert   /* We suppressed creation of trivial CONST expressions in the
3444*404b540aSrobert      combination loop to avoid recursion.  Create one manually now.
3445*404b540aSrobert      The combination loop should have ensured that there is exactly
3446*404b540aSrobert      one CONST_INT, and the sort will have ensured that it is last
3447*404b540aSrobert      in the array and that any other constant will be next-to-last.  */
3448*404b540aSrobert 
3449*404b540aSrobert   if (n_ops > 1
3450*404b540aSrobert       && GET_CODE (ops[n_ops - 1].op) == CONST_INT
3451*404b540aSrobert       && CONSTANT_P (ops[n_ops - 2].op))
3452*404b540aSrobert     {
3453*404b540aSrobert       rtx value = ops[n_ops - 1].op;
3454*404b540aSrobert       if (ops[n_ops - 1].neg ^ ops[n_ops - 2].neg)
3455*404b540aSrobert 	value = neg_const_int (mode, value);
3456*404b540aSrobert       ops[n_ops - 2].op = plus_constant (ops[n_ops - 2].op, INTVAL (value));
3457*404b540aSrobert       n_ops--;
3458*404b540aSrobert     }
3459*404b540aSrobert 
3460*404b540aSrobert   /* Put a non-negated operand first, if possible.  */
3461*404b540aSrobert 
3462*404b540aSrobert   for (i = 0; i < n_ops && ops[i].neg; i++)
3463*404b540aSrobert     continue;
3464*404b540aSrobert   if (i == n_ops)
3465*404b540aSrobert     ops[0].op = gen_rtx_NEG (mode, ops[0].op);
3466*404b540aSrobert   else if (i != 0)
3467*404b540aSrobert     {
3468*404b540aSrobert       tem = ops[0].op;
3469*404b540aSrobert       ops[0] = ops[i];
3470*404b540aSrobert       ops[i].op = tem;
3471*404b540aSrobert       ops[i].neg = 1;
3472*404b540aSrobert     }
3473*404b540aSrobert 
3474*404b540aSrobert   /* Now make the result by performing the requested operations.  */
3475*404b540aSrobert   result = ops[0].op;
3476*404b540aSrobert   for (i = 1; i < n_ops; i++)
3477*404b540aSrobert     result = gen_rtx_fmt_ee (ops[i].neg ? MINUS : PLUS,
3478*404b540aSrobert 			     mode, result, ops[i].op);
3479*404b540aSrobert 
3480*404b540aSrobert   return result;
3481*404b540aSrobert }
3482*404b540aSrobert 
3483*404b540aSrobert /* Check whether an operand is suitable for calling simplify_plus_minus.  */
3484*404b540aSrobert static bool
plus_minus_operand_p(rtx x)3485*404b540aSrobert plus_minus_operand_p (rtx x)
3486*404b540aSrobert {
3487*404b540aSrobert   return GET_CODE (x) == PLUS
3488*404b540aSrobert          || GET_CODE (x) == MINUS
3489*404b540aSrobert 	 || (GET_CODE (x) == CONST
3490*404b540aSrobert 	     && GET_CODE (XEXP (x, 0)) == PLUS
3491*404b540aSrobert 	     && CONSTANT_P (XEXP (XEXP (x, 0), 0))
3492*404b540aSrobert 	     && CONSTANT_P (XEXP (XEXP (x, 0), 1)));
3493*404b540aSrobert }
3494*404b540aSrobert 
3495*404b540aSrobert /* Like simplify_binary_operation except used for relational operators.
3496*404b540aSrobert    MODE is the mode of the result. If MODE is VOIDmode, both operands must
3497*404b540aSrobert    not also be VOIDmode.
3498*404b540aSrobert 
3499*404b540aSrobert    CMP_MODE specifies in which mode the comparison is done in, so it is
3500*404b540aSrobert    the mode of the operands.  If CMP_MODE is VOIDmode, it is taken from
3501*404b540aSrobert    the operands or, if both are VOIDmode, the operands are compared in
3502*404b540aSrobert    "infinite precision".  */
3503*404b540aSrobert rtx
simplify_relational_operation(enum rtx_code code,enum machine_mode mode,enum machine_mode cmp_mode,rtx op0,rtx op1)3504*404b540aSrobert simplify_relational_operation (enum rtx_code code, enum machine_mode mode,
3505*404b540aSrobert 			       enum machine_mode cmp_mode, rtx op0, rtx op1)
3506*404b540aSrobert {
3507*404b540aSrobert   rtx tem, trueop0, trueop1;
3508*404b540aSrobert 
3509*404b540aSrobert   if (cmp_mode == VOIDmode)
3510*404b540aSrobert     cmp_mode = GET_MODE (op0);
3511*404b540aSrobert   if (cmp_mode == VOIDmode)
3512*404b540aSrobert     cmp_mode = GET_MODE (op1);
3513*404b540aSrobert 
3514*404b540aSrobert   tem = simplify_const_relational_operation (code, cmp_mode, op0, op1);
3515*404b540aSrobert   if (tem)
3516*404b540aSrobert     {
3517*404b540aSrobert       if (SCALAR_FLOAT_MODE_P (mode))
3518*404b540aSrobert 	{
3519*404b540aSrobert           if (tem == const0_rtx)
3520*404b540aSrobert             return CONST0_RTX (mode);
3521*404b540aSrobert #ifdef FLOAT_STORE_FLAG_VALUE
3522*404b540aSrobert 	  {
3523*404b540aSrobert 	    REAL_VALUE_TYPE val;
3524*404b540aSrobert 	    val = FLOAT_STORE_FLAG_VALUE (mode);
3525*404b540aSrobert 	    return CONST_DOUBLE_FROM_REAL_VALUE (val, mode);
3526*404b540aSrobert 	  }
3527*404b540aSrobert #else
3528*404b540aSrobert 	  return NULL_RTX;
3529*404b540aSrobert #endif
3530*404b540aSrobert 	}
3531*404b540aSrobert       if (VECTOR_MODE_P (mode))
3532*404b540aSrobert 	{
3533*404b540aSrobert 	  if (tem == const0_rtx)
3534*404b540aSrobert 	    return CONST0_RTX (mode);
3535*404b540aSrobert #ifdef VECTOR_STORE_FLAG_VALUE
3536*404b540aSrobert 	  {
3537*404b540aSrobert 	    int i, units;
3538*404b540aSrobert 	    rtvec v;
3539*404b540aSrobert 
3540*404b540aSrobert 	    rtx val = VECTOR_STORE_FLAG_VALUE (mode);
3541*404b540aSrobert 	    if (val == NULL_RTX)
3542*404b540aSrobert 	      return NULL_RTX;
3543*404b540aSrobert 	    if (val == const1_rtx)
3544*404b540aSrobert 	      return CONST1_RTX (mode);
3545*404b540aSrobert 
3546*404b540aSrobert 	    units = GET_MODE_NUNITS (mode);
3547*404b540aSrobert 	    v = rtvec_alloc (units);
3548*404b540aSrobert 	    for (i = 0; i < units; i++)
3549*404b540aSrobert 	      RTVEC_ELT (v, i) = val;
3550*404b540aSrobert 	    return gen_rtx_raw_CONST_VECTOR (mode, v);
3551*404b540aSrobert 	  }
3552*404b540aSrobert #else
3553*404b540aSrobert 	  return NULL_RTX;
3554*404b540aSrobert #endif
3555*404b540aSrobert 	}
3556*404b540aSrobert 
3557*404b540aSrobert       return tem;
3558*404b540aSrobert     }
3559*404b540aSrobert 
3560*404b540aSrobert   /* For the following tests, ensure const0_rtx is op1.  */
3561*404b540aSrobert   if (swap_commutative_operands_p (op0, op1)
3562*404b540aSrobert       || (op0 == const0_rtx && op1 != const0_rtx))
3563*404b540aSrobert     tem = op0, op0 = op1, op1 = tem, code = swap_condition (code);
3564*404b540aSrobert 
3565*404b540aSrobert   /* If op0 is a compare, extract the comparison arguments from it.  */
3566*404b540aSrobert   if (GET_CODE (op0) == COMPARE && op1 == const0_rtx)
3567*404b540aSrobert     return simplify_relational_operation (code, mode, VOIDmode,
3568*404b540aSrobert 				          XEXP (op0, 0), XEXP (op0, 1));
3569*404b540aSrobert 
3570*404b540aSrobert   if (GET_MODE_CLASS (cmp_mode) == MODE_CC
3571*404b540aSrobert       || CC0_P (op0))
3572*404b540aSrobert     return NULL_RTX;
3573*404b540aSrobert 
3574*404b540aSrobert   trueop0 = avoid_constant_pool_reference (op0);
3575*404b540aSrobert   trueop1 = avoid_constant_pool_reference (op1);
3576*404b540aSrobert   return simplify_relational_operation_1 (code, mode, cmp_mode,
3577*404b540aSrobert 		  			  trueop0, trueop1);
3578*404b540aSrobert }
3579*404b540aSrobert 
3580*404b540aSrobert /* This part of simplify_relational_operation is only used when CMP_MODE
3581*404b540aSrobert    is not in class MODE_CC (i.e. it is a real comparison).
3582*404b540aSrobert 
3583*404b540aSrobert    MODE is the mode of the result, while CMP_MODE specifies in which
3584*404b540aSrobert    mode the comparison is done in, so it is the mode of the operands.  */
3585*404b540aSrobert 
3586*404b540aSrobert static rtx
simplify_relational_operation_1(enum rtx_code code,enum machine_mode mode,enum machine_mode cmp_mode,rtx op0,rtx op1)3587*404b540aSrobert simplify_relational_operation_1 (enum rtx_code code, enum machine_mode mode,
3588*404b540aSrobert 				 enum machine_mode cmp_mode, rtx op0, rtx op1)
3589*404b540aSrobert {
3590*404b540aSrobert   enum rtx_code op0code = GET_CODE (op0);
3591*404b540aSrobert 
3592*404b540aSrobert   if (GET_CODE (op1) == CONST_INT)
3593*404b540aSrobert     {
3594*404b540aSrobert       if (INTVAL (op1) == 0 && COMPARISON_P (op0))
3595*404b540aSrobert 	{
3596*404b540aSrobert 	  /* If op0 is a comparison, extract the comparison arguments
3597*404b540aSrobert 	     from it.  */
3598*404b540aSrobert 	  if (code == NE)
3599*404b540aSrobert 	    {
3600*404b540aSrobert 	      if (GET_MODE (op0) == mode)
3601*404b540aSrobert 		return simplify_rtx (op0);
3602*404b540aSrobert 	      else
3603*404b540aSrobert 		return simplify_gen_relational (GET_CODE (op0), mode, VOIDmode,
3604*404b540aSrobert 					        XEXP (op0, 0), XEXP (op0, 1));
3605*404b540aSrobert 	    }
3606*404b540aSrobert 	  else if (code == EQ)
3607*404b540aSrobert 	    {
3608*404b540aSrobert 	      enum rtx_code new_code = reversed_comparison_code (op0, NULL_RTX);
3609*404b540aSrobert 	      if (new_code != UNKNOWN)
3610*404b540aSrobert 	        return simplify_gen_relational (new_code, mode, VOIDmode,
3611*404b540aSrobert 					        XEXP (op0, 0), XEXP (op0, 1));
3612*404b540aSrobert 	    }
3613*404b540aSrobert 	}
3614*404b540aSrobert     }
3615*404b540aSrobert 
3616*404b540aSrobert   /* (eq/ne (plus x cst1) cst2) simplifies to (eq/ne x (cst2 - cst1))  */
3617*404b540aSrobert   if ((code == EQ || code == NE)
3618*404b540aSrobert       && (op0code == PLUS || op0code == MINUS)
3619*404b540aSrobert       && CONSTANT_P (op1)
3620*404b540aSrobert       && CONSTANT_P (XEXP (op0, 1))
3621*404b540aSrobert       && (INTEGRAL_MODE_P (cmp_mode) || flag_unsafe_math_optimizations))
3622*404b540aSrobert     {
3623*404b540aSrobert       rtx x = XEXP (op0, 0);
3624*404b540aSrobert       rtx c = XEXP (op0, 1);
3625*404b540aSrobert 
3626*404b540aSrobert       c = simplify_gen_binary (op0code == PLUS ? MINUS : PLUS,
3627*404b540aSrobert 			       cmp_mode, op1, c);
3628*404b540aSrobert       return simplify_gen_relational (code, mode, cmp_mode, x, c);
3629*404b540aSrobert     }
3630*404b540aSrobert 
3631*404b540aSrobert   /* (ne:SI (zero_extract:SI FOO (const_int 1) BAR) (const_int 0))) is
3632*404b540aSrobert      the same as (zero_extract:SI FOO (const_int 1) BAR).  */
3633*404b540aSrobert   if (code == NE
3634*404b540aSrobert       && op1 == const0_rtx
3635*404b540aSrobert       && GET_MODE_CLASS (mode) == MODE_INT
3636*404b540aSrobert       && cmp_mode != VOIDmode
3637*404b540aSrobert       /* ??? Work-around BImode bugs in the ia64 backend.  */
3638*404b540aSrobert       && mode != BImode
3639*404b540aSrobert       && cmp_mode != BImode
3640*404b540aSrobert       && nonzero_bits (op0, cmp_mode) == 1
3641*404b540aSrobert       && STORE_FLAG_VALUE == 1)
3642*404b540aSrobert     return GET_MODE_SIZE (mode) > GET_MODE_SIZE (cmp_mode)
3643*404b540aSrobert 	   ? simplify_gen_unary (ZERO_EXTEND, mode, op0, cmp_mode)
3644*404b540aSrobert 	   : lowpart_subreg (mode, op0, cmp_mode);
3645*404b540aSrobert 
3646*404b540aSrobert   /* (eq/ne (xor x y) 0) simplifies to (eq/ne x y).  */
3647*404b540aSrobert   if ((code == EQ || code == NE)
3648*404b540aSrobert       && op1 == const0_rtx
3649*404b540aSrobert       && op0code == XOR)
3650*404b540aSrobert     return simplify_gen_relational (code, mode, cmp_mode,
3651*404b540aSrobert 				    XEXP (op0, 0), XEXP (op0, 1));
3652*404b540aSrobert 
3653*404b540aSrobert   /* (eq/ne (xor x y) x) simplifies to (eq/ne y 0).  */
3654*404b540aSrobert   if ((code == EQ || code == NE)
3655*404b540aSrobert       && op0code == XOR
3656*404b540aSrobert       && rtx_equal_p (XEXP (op0, 0), op1)
3657*404b540aSrobert       && !side_effects_p (XEXP (op0, 0)))
3658*404b540aSrobert     return simplify_gen_relational (code, mode, cmp_mode,
3659*404b540aSrobert 				    XEXP (op0, 1), const0_rtx);
3660*404b540aSrobert 
3661*404b540aSrobert   /* Likewise (eq/ne (xor x y) y) simplifies to (eq/ne x 0).  */
3662*404b540aSrobert   if ((code == EQ || code == NE)
3663*404b540aSrobert       && op0code == XOR
3664*404b540aSrobert       && rtx_equal_p (XEXP (op0, 1), op1)
3665*404b540aSrobert       && !side_effects_p (XEXP (op0, 1)))
3666*404b540aSrobert     return simplify_gen_relational (code, mode, cmp_mode,
3667*404b540aSrobert 				    XEXP (op0, 0), const0_rtx);
3668*404b540aSrobert 
3669*404b540aSrobert   /* (eq/ne (xor x C1) C2) simplifies to (eq/ne x (C1^C2)).  */
3670*404b540aSrobert   if ((code == EQ || code == NE)
3671*404b540aSrobert       && op0code == XOR
3672*404b540aSrobert       && (GET_CODE (op1) == CONST_INT
3673*404b540aSrobert 	  || GET_CODE (op1) == CONST_DOUBLE)
3674*404b540aSrobert       && (GET_CODE (XEXP (op0, 1)) == CONST_INT
3675*404b540aSrobert 	  || GET_CODE (XEXP (op0, 1)) == CONST_DOUBLE))
3676*404b540aSrobert     return simplify_gen_relational (code, mode, cmp_mode, XEXP (op0, 0),
3677*404b540aSrobert 				    simplify_gen_binary (XOR, cmp_mode,
3678*404b540aSrobert 							 XEXP (op0, 1), op1));
3679*404b540aSrobert 
3680*404b540aSrobert   return NULL_RTX;
3681*404b540aSrobert }
3682*404b540aSrobert 
3683*404b540aSrobert /* Check if the given comparison (done in the given MODE) is actually a
3684*404b540aSrobert    tautology or a contradiction.
3685*404b540aSrobert    If no simplification is possible, this function returns zero.
3686*404b540aSrobert    Otherwise, it returns either const_true_rtx or const0_rtx.  */
3687*404b540aSrobert 
3688*404b540aSrobert rtx
simplify_const_relational_operation(enum rtx_code code,enum machine_mode mode,rtx op0,rtx op1)3689*404b540aSrobert simplify_const_relational_operation (enum rtx_code code,
3690*404b540aSrobert 				     enum machine_mode mode,
3691*404b540aSrobert 				     rtx op0, rtx op1)
3692*404b540aSrobert {
3693*404b540aSrobert   int equal, op0lt, op0ltu, op1lt, op1ltu;
3694*404b540aSrobert   rtx tem;
3695*404b540aSrobert   rtx trueop0;
3696*404b540aSrobert   rtx trueop1;
3697*404b540aSrobert 
3698*404b540aSrobert   gcc_assert (mode != VOIDmode
3699*404b540aSrobert 	      || (GET_MODE (op0) == VOIDmode
3700*404b540aSrobert 		  && GET_MODE (op1) == VOIDmode));
3701*404b540aSrobert 
3702*404b540aSrobert   /* If op0 is a compare, extract the comparison arguments from it.  */
3703*404b540aSrobert   if (GET_CODE (op0) == COMPARE && op1 == const0_rtx)
3704*404b540aSrobert     {
3705*404b540aSrobert       op1 = XEXP (op0, 1);
3706*404b540aSrobert       op0 = XEXP (op0, 0);
3707*404b540aSrobert 
3708*404b540aSrobert       if (GET_MODE (op0) != VOIDmode)
3709*404b540aSrobert 	mode = GET_MODE (op0);
3710*404b540aSrobert       else if (GET_MODE (op1) != VOIDmode)
3711*404b540aSrobert 	mode = GET_MODE (op1);
3712*404b540aSrobert       else
3713*404b540aSrobert 	return 0;
3714*404b540aSrobert     }
3715*404b540aSrobert 
3716*404b540aSrobert   /* We can't simplify MODE_CC values since we don't know what the
3717*404b540aSrobert      actual comparison is.  */
3718*404b540aSrobert   if (GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC || CC0_P (op0))
3719*404b540aSrobert     return 0;
3720*404b540aSrobert 
3721*404b540aSrobert   /* Make sure the constant is second.  */
3722*404b540aSrobert   if (swap_commutative_operands_p (op0, op1))
3723*404b540aSrobert     {
3724*404b540aSrobert       tem = op0, op0 = op1, op1 = tem;
3725*404b540aSrobert       code = swap_condition (code);
3726*404b540aSrobert     }
3727*404b540aSrobert 
3728*404b540aSrobert   trueop0 = avoid_constant_pool_reference (op0);
3729*404b540aSrobert   trueop1 = avoid_constant_pool_reference (op1);
3730*404b540aSrobert 
3731*404b540aSrobert   /* For integer comparisons of A and B maybe we can simplify A - B and can
3732*404b540aSrobert      then simplify a comparison of that with zero.  If A and B are both either
3733*404b540aSrobert      a register or a CONST_INT, this can't help; testing for these cases will
3734*404b540aSrobert      prevent infinite recursion here and speed things up.
3735*404b540aSrobert 
3736*404b540aSrobert      We can only do this for EQ and NE comparisons as otherwise we may
3737*404b540aSrobert      lose or introduce overflow which we cannot disregard as undefined as
3738*404b540aSrobert      we do not know the signedness of the operation on either the left or
3739*404b540aSrobert      the right hand side of the comparison.  */
3740*404b540aSrobert 
3741*404b540aSrobert   if (INTEGRAL_MODE_P (mode) && trueop1 != const0_rtx
3742*404b540aSrobert       && (code == EQ || code == NE)
3743*404b540aSrobert       && ! ((REG_P (op0) || GET_CODE (trueop0) == CONST_INT)
3744*404b540aSrobert 	    && (REG_P (op1) || GET_CODE (trueop1) == CONST_INT))
3745*404b540aSrobert       && 0 != (tem = simplify_binary_operation (MINUS, mode, op0, op1))
3746*404b540aSrobert       /* We cannot do this if tem is a nonzero address.  */
3747*404b540aSrobert       && ! nonzero_address_p (tem))
3748*404b540aSrobert     return simplify_const_relational_operation (signed_condition (code),
3749*404b540aSrobert 						mode, tem, const0_rtx);
3750*404b540aSrobert 
3751*404b540aSrobert   if (! HONOR_NANS (mode) && code == ORDERED)
3752*404b540aSrobert     return const_true_rtx;
3753*404b540aSrobert 
3754*404b540aSrobert   if (! HONOR_NANS (mode) && code == UNORDERED)
3755*404b540aSrobert     return const0_rtx;
3756*404b540aSrobert 
3757*404b540aSrobert   /* For modes without NaNs, if the two operands are equal, we know the
3758*404b540aSrobert      result except if they have side-effects.  */
3759*404b540aSrobert   if (! HONOR_NANS (GET_MODE (trueop0))
3760*404b540aSrobert       && rtx_equal_p (trueop0, trueop1)
3761*404b540aSrobert       && ! side_effects_p (trueop0))
3762*404b540aSrobert     equal = 1, op0lt = 0, op0ltu = 0, op1lt = 0, op1ltu = 0;
3763*404b540aSrobert 
3764*404b540aSrobert   /* If the operands are floating-point constants, see if we can fold
3765*404b540aSrobert      the result.  */
3766*404b540aSrobert   else if (GET_CODE (trueop0) == CONST_DOUBLE
3767*404b540aSrobert 	   && GET_CODE (trueop1) == CONST_DOUBLE
3768*404b540aSrobert 	   && SCALAR_FLOAT_MODE_P (GET_MODE (trueop0)))
3769*404b540aSrobert     {
3770*404b540aSrobert       REAL_VALUE_TYPE d0, d1;
3771*404b540aSrobert 
3772*404b540aSrobert       REAL_VALUE_FROM_CONST_DOUBLE (d0, trueop0);
3773*404b540aSrobert       REAL_VALUE_FROM_CONST_DOUBLE (d1, trueop1);
3774*404b540aSrobert 
3775*404b540aSrobert       /* Comparisons are unordered iff at least one of the values is NaN.  */
3776*404b540aSrobert       if (REAL_VALUE_ISNAN (d0) || REAL_VALUE_ISNAN (d1))
3777*404b540aSrobert 	switch (code)
3778*404b540aSrobert 	  {
3779*404b540aSrobert 	  case UNEQ:
3780*404b540aSrobert 	  case UNLT:
3781*404b540aSrobert 	  case UNGT:
3782*404b540aSrobert 	  case UNLE:
3783*404b540aSrobert 	  case UNGE:
3784*404b540aSrobert 	  case NE:
3785*404b540aSrobert 	  case UNORDERED:
3786*404b540aSrobert 	    return const_true_rtx;
3787*404b540aSrobert 	  case EQ:
3788*404b540aSrobert 	  case LT:
3789*404b540aSrobert 	  case GT:
3790*404b540aSrobert 	  case LE:
3791*404b540aSrobert 	  case GE:
3792*404b540aSrobert 	  case LTGT:
3793*404b540aSrobert 	  case ORDERED:
3794*404b540aSrobert 	    return const0_rtx;
3795*404b540aSrobert 	  default:
3796*404b540aSrobert 	    return 0;
3797*404b540aSrobert 	  }
3798*404b540aSrobert 
3799*404b540aSrobert       equal = REAL_VALUES_EQUAL (d0, d1);
3800*404b540aSrobert       op0lt = op0ltu = REAL_VALUES_LESS (d0, d1);
3801*404b540aSrobert       op1lt = op1ltu = REAL_VALUES_LESS (d1, d0);
3802*404b540aSrobert     }
3803*404b540aSrobert 
3804*404b540aSrobert   /* Otherwise, see if the operands are both integers.  */
3805*404b540aSrobert   else if ((GET_MODE_CLASS (mode) == MODE_INT || mode == VOIDmode)
3806*404b540aSrobert 	   && (GET_CODE (trueop0) == CONST_DOUBLE
3807*404b540aSrobert 	       || GET_CODE (trueop0) == CONST_INT)
3808*404b540aSrobert 	   && (GET_CODE (trueop1) == CONST_DOUBLE
3809*404b540aSrobert 	       || GET_CODE (trueop1) == CONST_INT))
3810*404b540aSrobert     {
3811*404b540aSrobert       int width = GET_MODE_BITSIZE (mode);
3812*404b540aSrobert       HOST_WIDE_INT l0s, h0s, l1s, h1s;
3813*404b540aSrobert       unsigned HOST_WIDE_INT l0u, h0u, l1u, h1u;
3814*404b540aSrobert 
3815*404b540aSrobert       /* Get the two words comprising each integer constant.  */
3816*404b540aSrobert       if (GET_CODE (trueop0) == CONST_DOUBLE)
3817*404b540aSrobert 	{
3818*404b540aSrobert 	  l0u = l0s = CONST_DOUBLE_LOW (trueop0);
3819*404b540aSrobert 	  h0u = h0s = CONST_DOUBLE_HIGH (trueop0);
3820*404b540aSrobert 	}
3821*404b540aSrobert       else
3822*404b540aSrobert 	{
3823*404b540aSrobert 	  l0u = l0s = INTVAL (trueop0);
3824*404b540aSrobert 	  h0u = h0s = HWI_SIGN_EXTEND (l0s);
3825*404b540aSrobert 	}
3826*404b540aSrobert 
3827*404b540aSrobert       if (GET_CODE (trueop1) == CONST_DOUBLE)
3828*404b540aSrobert 	{
3829*404b540aSrobert 	  l1u = l1s = CONST_DOUBLE_LOW (trueop1);
3830*404b540aSrobert 	  h1u = h1s = CONST_DOUBLE_HIGH (trueop1);
3831*404b540aSrobert 	}
3832*404b540aSrobert       else
3833*404b540aSrobert 	{
3834*404b540aSrobert 	  l1u = l1s = INTVAL (trueop1);
3835*404b540aSrobert 	  h1u = h1s = HWI_SIGN_EXTEND (l1s);
3836*404b540aSrobert 	}
3837*404b540aSrobert 
3838*404b540aSrobert       /* If WIDTH is nonzero and smaller than HOST_BITS_PER_WIDE_INT,
3839*404b540aSrobert 	 we have to sign or zero-extend the values.  */
3840*404b540aSrobert       if (width != 0 && width < HOST_BITS_PER_WIDE_INT)
3841*404b540aSrobert 	{
3842*404b540aSrobert 	  l0u &= ((HOST_WIDE_INT) 1 << width) - 1;
3843*404b540aSrobert 	  l1u &= ((HOST_WIDE_INT) 1 << width) - 1;
3844*404b540aSrobert 
3845*404b540aSrobert 	  if (l0s & ((HOST_WIDE_INT) 1 << (width - 1)))
3846*404b540aSrobert 	    l0s |= ((HOST_WIDE_INT) (-1) << width);
3847*404b540aSrobert 
3848*404b540aSrobert 	  if (l1s & ((HOST_WIDE_INT) 1 << (width - 1)))
3849*404b540aSrobert 	    l1s |= ((HOST_WIDE_INT) (-1) << width);
3850*404b540aSrobert 	}
3851*404b540aSrobert       if (width != 0 && width <= HOST_BITS_PER_WIDE_INT)
3852*404b540aSrobert 	h0u = h1u = 0, h0s = HWI_SIGN_EXTEND (l0s), h1s = HWI_SIGN_EXTEND (l1s);
3853*404b540aSrobert 
3854*404b540aSrobert       equal = (h0u == h1u && l0u == l1u);
3855*404b540aSrobert       op0lt = (h0s < h1s || (h0s == h1s && l0u < l1u));
3856*404b540aSrobert       op1lt = (h1s < h0s || (h1s == h0s && l1u < l0u));
3857*404b540aSrobert       op0ltu = (h0u < h1u || (h0u == h1u && l0u < l1u));
3858*404b540aSrobert       op1ltu = (h1u < h0u || (h1u == h0u && l1u < l0u));
3859*404b540aSrobert     }
3860*404b540aSrobert 
3861*404b540aSrobert   /* Otherwise, there are some code-specific tests we can make.  */
3862*404b540aSrobert   else
3863*404b540aSrobert     {
3864*404b540aSrobert       /* Optimize comparisons with upper and lower bounds.  */
3865*404b540aSrobert       if (SCALAR_INT_MODE_P (mode)
3866*404b540aSrobert 	  && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3867*404b540aSrobert 	{
3868*404b540aSrobert 	  rtx mmin, mmax;
3869*404b540aSrobert 	  int sign;
3870*404b540aSrobert 
3871*404b540aSrobert 	  if (code == GEU
3872*404b540aSrobert 	      || code == LEU
3873*404b540aSrobert 	      || code == GTU
3874*404b540aSrobert 	      || code == LTU)
3875*404b540aSrobert 	    sign = 0;
3876*404b540aSrobert 	  else
3877*404b540aSrobert 	    sign = 1;
3878*404b540aSrobert 
3879*404b540aSrobert 	  get_mode_bounds (mode, sign, mode, &mmin, &mmax);
3880*404b540aSrobert 
3881*404b540aSrobert 	  tem = NULL_RTX;
3882*404b540aSrobert 	  switch (code)
3883*404b540aSrobert 	    {
3884*404b540aSrobert 	    case GEU:
3885*404b540aSrobert 	    case GE:
3886*404b540aSrobert 	      /* x >= min is always true.  */
3887*404b540aSrobert 	      if (rtx_equal_p (trueop1, mmin))
3888*404b540aSrobert 		tem = const_true_rtx;
3889*404b540aSrobert 	      else
3890*404b540aSrobert 	      break;
3891*404b540aSrobert 
3892*404b540aSrobert 	    case LEU:
3893*404b540aSrobert 	    case LE:
3894*404b540aSrobert 	      /* x <= max is always true.  */
3895*404b540aSrobert 	      if (rtx_equal_p (trueop1, mmax))
3896*404b540aSrobert 		tem = const_true_rtx;
3897*404b540aSrobert 	      break;
3898*404b540aSrobert 
3899*404b540aSrobert 	    case GTU:
3900*404b540aSrobert 	    case GT:
3901*404b540aSrobert 	      /* x > max is always false.  */
3902*404b540aSrobert 	      if (rtx_equal_p (trueop1, mmax))
3903*404b540aSrobert 		tem = const0_rtx;
3904*404b540aSrobert 	      break;
3905*404b540aSrobert 
3906*404b540aSrobert 	    case LTU:
3907*404b540aSrobert 	    case LT:
3908*404b540aSrobert 	      /* x < min is always false.  */
3909*404b540aSrobert 	      if (rtx_equal_p (trueop1, mmin))
3910*404b540aSrobert 		tem = const0_rtx;
3911*404b540aSrobert 	      break;
3912*404b540aSrobert 
3913*404b540aSrobert 	    default:
3914*404b540aSrobert 	      break;
3915*404b540aSrobert 	    }
3916*404b540aSrobert 	  if (tem == const0_rtx
3917*404b540aSrobert 	      || tem == const_true_rtx)
3918*404b540aSrobert 	    return tem;
3919*404b540aSrobert 	}
3920*404b540aSrobert 
3921*404b540aSrobert       switch (code)
3922*404b540aSrobert 	{
3923*404b540aSrobert 	case EQ:
3924*404b540aSrobert 	  if (trueop1 == const0_rtx && nonzero_address_p (op0))
3925*404b540aSrobert 	    return const0_rtx;
3926*404b540aSrobert 	  break;
3927*404b540aSrobert 
3928*404b540aSrobert 	case NE:
3929*404b540aSrobert 	  if (trueop1 == const0_rtx && nonzero_address_p (op0))
3930*404b540aSrobert 	    return const_true_rtx;
3931*404b540aSrobert 	  break;
3932*404b540aSrobert 
3933*404b540aSrobert 	case LT:
3934*404b540aSrobert 	  /* Optimize abs(x) < 0.0.  */
3935*404b540aSrobert 	  if (trueop1 == CONST0_RTX (mode)
3936*404b540aSrobert 	      && !HONOR_SNANS (mode)
3937*404b540aSrobert 	      && (!INTEGRAL_MODE_P (mode)
3938*404b540aSrobert 		  || (!flag_wrapv && !flag_trapv && flag_strict_overflow)))
3939*404b540aSrobert 	    {
3940*404b540aSrobert 	      tem = GET_CODE (trueop0) == FLOAT_EXTEND ? XEXP (trueop0, 0)
3941*404b540aSrobert 						       : trueop0;
3942*404b540aSrobert 	      if (GET_CODE (tem) == ABS)
3943*404b540aSrobert 		{
3944*404b540aSrobert 		  if (INTEGRAL_MODE_P (mode)
3945*404b540aSrobert 		      && (issue_strict_overflow_warning
3946*404b540aSrobert 			  (WARN_STRICT_OVERFLOW_CONDITIONAL)))
3947*404b540aSrobert 		    warning (OPT_Wstrict_overflow,
3948*404b540aSrobert 			     ("assuming signed overflow does not occur when "
3949*404b540aSrobert 			      "assuming abs (x) < 0 is false"));
3950*404b540aSrobert 		  return const0_rtx;
3951*404b540aSrobert 		}
3952*404b540aSrobert 	    }
3953*404b540aSrobert 	  break;
3954*404b540aSrobert 
3955*404b540aSrobert 	case GE:
3956*404b540aSrobert 	  /* Optimize abs(x) >= 0.0.  */
3957*404b540aSrobert 	  if (trueop1 == CONST0_RTX (mode)
3958*404b540aSrobert 	      && !HONOR_NANS (mode)
3959*404b540aSrobert 	      && (!INTEGRAL_MODE_P (mode)
3960*404b540aSrobert 		  || (!flag_wrapv && !flag_trapv && flag_strict_overflow)))
3961*404b540aSrobert 	    {
3962*404b540aSrobert 	      tem = GET_CODE (trueop0) == FLOAT_EXTEND ? XEXP (trueop0, 0)
3963*404b540aSrobert 						       : trueop0;
3964*404b540aSrobert 	      if (GET_CODE (tem) == ABS)
3965*404b540aSrobert 		{
3966*404b540aSrobert 		  if (INTEGRAL_MODE_P (mode)
3967*404b540aSrobert 		      && (issue_strict_overflow_warning
3968*404b540aSrobert 			  (WARN_STRICT_OVERFLOW_CONDITIONAL)))
3969*404b540aSrobert 		    warning (OPT_Wstrict_overflow,
3970*404b540aSrobert 			     ("assuming signed overflow does not occur when "
3971*404b540aSrobert 			      "assuming abs (x) >= 0 is true"));
3972*404b540aSrobert 		  return const_true_rtx;
3973*404b540aSrobert 		}
3974*404b540aSrobert 	    }
3975*404b540aSrobert 	  break;
3976*404b540aSrobert 
3977*404b540aSrobert 	case UNGE:
3978*404b540aSrobert 	  /* Optimize ! (abs(x) < 0.0).  */
3979*404b540aSrobert 	  if (trueop1 == CONST0_RTX (mode))
3980*404b540aSrobert 	    {
3981*404b540aSrobert 	      tem = GET_CODE (trueop0) == FLOAT_EXTEND ? XEXP (trueop0, 0)
3982*404b540aSrobert 						       : trueop0;
3983*404b540aSrobert 	      if (GET_CODE (tem) == ABS)
3984*404b540aSrobert 		return const_true_rtx;
3985*404b540aSrobert 	    }
3986*404b540aSrobert 	  break;
3987*404b540aSrobert 
3988*404b540aSrobert 	default:
3989*404b540aSrobert 	  break;
3990*404b540aSrobert 	}
3991*404b540aSrobert 
3992*404b540aSrobert       return 0;
3993*404b540aSrobert     }
3994*404b540aSrobert 
3995*404b540aSrobert   /* If we reach here, EQUAL, OP0LT, OP0LTU, OP1LT, and OP1LTU are set
3996*404b540aSrobert      as appropriate.  */
3997*404b540aSrobert   switch (code)
3998*404b540aSrobert     {
3999*404b540aSrobert     case EQ:
4000*404b540aSrobert     case UNEQ:
4001*404b540aSrobert       return equal ? const_true_rtx : const0_rtx;
4002*404b540aSrobert     case NE:
4003*404b540aSrobert     case LTGT:
4004*404b540aSrobert       return ! equal ? const_true_rtx : const0_rtx;
4005*404b540aSrobert     case LT:
4006*404b540aSrobert     case UNLT:
4007*404b540aSrobert       return op0lt ? const_true_rtx : const0_rtx;
4008*404b540aSrobert     case GT:
4009*404b540aSrobert     case UNGT:
4010*404b540aSrobert       return op1lt ? const_true_rtx : const0_rtx;
4011*404b540aSrobert     case LTU:
4012*404b540aSrobert       return op0ltu ? const_true_rtx : const0_rtx;
4013*404b540aSrobert     case GTU:
4014*404b540aSrobert       return op1ltu ? const_true_rtx : const0_rtx;
4015*404b540aSrobert     case LE:
4016*404b540aSrobert     case UNLE:
4017*404b540aSrobert       return equal || op0lt ? const_true_rtx : const0_rtx;
4018*404b540aSrobert     case GE:
4019*404b540aSrobert     case UNGE:
4020*404b540aSrobert       return equal || op1lt ? const_true_rtx : const0_rtx;
4021*404b540aSrobert     case LEU:
4022*404b540aSrobert       return equal || op0ltu ? const_true_rtx : const0_rtx;
4023*404b540aSrobert     case GEU:
4024*404b540aSrobert       return equal || op1ltu ? const_true_rtx : const0_rtx;
4025*404b540aSrobert     case ORDERED:
4026*404b540aSrobert       return const_true_rtx;
4027*404b540aSrobert     case UNORDERED:
4028*404b540aSrobert       return const0_rtx;
4029*404b540aSrobert     default:
4030*404b540aSrobert       gcc_unreachable ();
4031*404b540aSrobert     }
4032*404b540aSrobert }
4033*404b540aSrobert 
4034*404b540aSrobert /* Simplify CODE, an operation with result mode MODE and three operands,
4035*404b540aSrobert    OP0, OP1, and OP2.  OP0_MODE was the mode of OP0 before it became
4036*404b540aSrobert    a constant.  Return 0 if no simplifications is possible.  */
4037*404b540aSrobert 
4038*404b540aSrobert rtx
simplify_ternary_operation(enum rtx_code code,enum machine_mode mode,enum machine_mode op0_mode,rtx op0,rtx op1,rtx op2)4039*404b540aSrobert simplify_ternary_operation (enum rtx_code code, enum machine_mode mode,
4040*404b540aSrobert 			    enum machine_mode op0_mode, rtx op0, rtx op1,
4041*404b540aSrobert 			    rtx op2)
4042*404b540aSrobert {
4043*404b540aSrobert   unsigned int width = GET_MODE_BITSIZE (mode);
4044*404b540aSrobert 
4045*404b540aSrobert   /* VOIDmode means "infinite" precision.  */
4046*404b540aSrobert   if (width == 0)
4047*404b540aSrobert     width = HOST_BITS_PER_WIDE_INT;
4048*404b540aSrobert 
4049*404b540aSrobert   switch (code)
4050*404b540aSrobert     {
4051*404b540aSrobert     case SIGN_EXTRACT:
4052*404b540aSrobert     case ZERO_EXTRACT:
4053*404b540aSrobert       if (GET_CODE (op0) == CONST_INT
4054*404b540aSrobert 	  && GET_CODE (op1) == CONST_INT
4055*404b540aSrobert 	  && GET_CODE (op2) == CONST_INT
4056*404b540aSrobert 	  && ((unsigned) INTVAL (op1) + (unsigned) INTVAL (op2) <= width)
4057*404b540aSrobert 	  && width <= (unsigned) HOST_BITS_PER_WIDE_INT)
4058*404b540aSrobert 	{
4059*404b540aSrobert 	  /* Extracting a bit-field from a constant */
4060*404b540aSrobert 	  HOST_WIDE_INT val = INTVAL (op0);
4061*404b540aSrobert 
4062*404b540aSrobert 	  if (BITS_BIG_ENDIAN)
4063*404b540aSrobert 	    val >>= (GET_MODE_BITSIZE (op0_mode)
4064*404b540aSrobert 		     - INTVAL (op2) - INTVAL (op1));
4065*404b540aSrobert 	  else
4066*404b540aSrobert 	    val >>= INTVAL (op2);
4067*404b540aSrobert 
4068*404b540aSrobert 	  if (HOST_BITS_PER_WIDE_INT != INTVAL (op1))
4069*404b540aSrobert 	    {
4070*404b540aSrobert 	      /* First zero-extend.  */
4071*404b540aSrobert 	      val &= ((HOST_WIDE_INT) 1 << INTVAL (op1)) - 1;
4072*404b540aSrobert 	      /* If desired, propagate sign bit.  */
4073*404b540aSrobert 	      if (code == SIGN_EXTRACT
4074*404b540aSrobert 		  && (val & ((HOST_WIDE_INT) 1 << (INTVAL (op1) - 1))))
4075*404b540aSrobert 		val |= ~ (((HOST_WIDE_INT) 1 << INTVAL (op1)) - 1);
4076*404b540aSrobert 	    }
4077*404b540aSrobert 
4078*404b540aSrobert 	  /* Clear the bits that don't belong in our mode,
4079*404b540aSrobert 	     unless they and our sign bit are all one.
4080*404b540aSrobert 	     So we get either a reasonable negative value or a reasonable
4081*404b540aSrobert 	     unsigned value for this mode.  */
4082*404b540aSrobert 	  if (width < HOST_BITS_PER_WIDE_INT
4083*404b540aSrobert 	      && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
4084*404b540aSrobert 		  != ((HOST_WIDE_INT) (-1) << (width - 1))))
4085*404b540aSrobert 	    val &= ((HOST_WIDE_INT) 1 << width) - 1;
4086*404b540aSrobert 
4087*404b540aSrobert 	  return gen_int_mode (val, mode);
4088*404b540aSrobert 	}
4089*404b540aSrobert       break;
4090*404b540aSrobert 
4091*404b540aSrobert     case IF_THEN_ELSE:
4092*404b540aSrobert       if (GET_CODE (op0) == CONST_INT)
4093*404b540aSrobert 	return op0 != const0_rtx ? op1 : op2;
4094*404b540aSrobert 
4095*404b540aSrobert       /* Convert c ? a : a into "a".  */
4096*404b540aSrobert       if (rtx_equal_p (op1, op2) && ! side_effects_p (op0))
4097*404b540aSrobert 	return op1;
4098*404b540aSrobert 
4099*404b540aSrobert       /* Convert a != b ? a : b into "a".  */
4100*404b540aSrobert       if (GET_CODE (op0) == NE
4101*404b540aSrobert 	  && ! side_effects_p (op0)
4102*404b540aSrobert 	  && ! HONOR_NANS (mode)
4103*404b540aSrobert 	  && ! HONOR_SIGNED_ZEROS (mode)
4104*404b540aSrobert 	  && ((rtx_equal_p (XEXP (op0, 0), op1)
4105*404b540aSrobert 	       && rtx_equal_p (XEXP (op0, 1), op2))
4106*404b540aSrobert 	      || (rtx_equal_p (XEXP (op0, 0), op2)
4107*404b540aSrobert 		  && rtx_equal_p (XEXP (op0, 1), op1))))
4108*404b540aSrobert 	return op1;
4109*404b540aSrobert 
4110*404b540aSrobert       /* Convert a == b ? a : b into "b".  */
4111*404b540aSrobert       if (GET_CODE (op0) == EQ
4112*404b540aSrobert 	  && ! side_effects_p (op0)
4113*404b540aSrobert 	  && ! HONOR_NANS (mode)
4114*404b540aSrobert 	  && ! HONOR_SIGNED_ZEROS (mode)
4115*404b540aSrobert 	  && ((rtx_equal_p (XEXP (op0, 0), op1)
4116*404b540aSrobert 	       && rtx_equal_p (XEXP (op0, 1), op2))
4117*404b540aSrobert 	      || (rtx_equal_p (XEXP (op0, 0), op2)
4118*404b540aSrobert 		  && rtx_equal_p (XEXP (op0, 1), op1))))
4119*404b540aSrobert 	return op2;
4120*404b540aSrobert 
4121*404b540aSrobert       if (COMPARISON_P (op0) && ! side_effects_p (op0))
4122*404b540aSrobert 	{
4123*404b540aSrobert 	  enum machine_mode cmp_mode = (GET_MODE (XEXP (op0, 0)) == VOIDmode
4124*404b540aSrobert 					? GET_MODE (XEXP (op0, 1))
4125*404b540aSrobert 					: GET_MODE (XEXP (op0, 0)));
4126*404b540aSrobert 	  rtx temp;
4127*404b540aSrobert 
4128*404b540aSrobert 	  /* Look for happy constants in op1 and op2.  */
4129*404b540aSrobert 	  if (GET_CODE (op1) == CONST_INT && GET_CODE (op2) == CONST_INT)
4130*404b540aSrobert 	    {
4131*404b540aSrobert 	      HOST_WIDE_INT t = INTVAL (op1);
4132*404b540aSrobert 	      HOST_WIDE_INT f = INTVAL (op2);
4133*404b540aSrobert 
4134*404b540aSrobert 	      if (t == STORE_FLAG_VALUE && f == 0)
4135*404b540aSrobert 	        code = GET_CODE (op0);
4136*404b540aSrobert 	      else if (t == 0 && f == STORE_FLAG_VALUE)
4137*404b540aSrobert 		{
4138*404b540aSrobert 		  enum rtx_code tmp;
4139*404b540aSrobert 		  tmp = reversed_comparison_code (op0, NULL_RTX);
4140*404b540aSrobert 		  if (tmp == UNKNOWN)
4141*404b540aSrobert 		    break;
4142*404b540aSrobert 		  code = tmp;
4143*404b540aSrobert 		}
4144*404b540aSrobert 	      else
4145*404b540aSrobert 		break;
4146*404b540aSrobert 
4147*404b540aSrobert 	      return simplify_gen_relational (code, mode, cmp_mode,
4148*404b540aSrobert 					      XEXP (op0, 0), XEXP (op0, 1));
4149*404b540aSrobert 	    }
4150*404b540aSrobert 
4151*404b540aSrobert 	  if (cmp_mode == VOIDmode)
4152*404b540aSrobert 	    cmp_mode = op0_mode;
4153*404b540aSrobert 	  temp = simplify_relational_operation (GET_CODE (op0), op0_mode,
4154*404b540aSrobert 			  			cmp_mode, XEXP (op0, 0),
4155*404b540aSrobert 						XEXP (op0, 1));
4156*404b540aSrobert 
4157*404b540aSrobert 	  /* See if any simplifications were possible.  */
4158*404b540aSrobert 	  if (temp)
4159*404b540aSrobert 	    {
4160*404b540aSrobert 	      if (GET_CODE (temp) == CONST_INT)
4161*404b540aSrobert 		return temp == const0_rtx ? op2 : op1;
4162*404b540aSrobert 	      else if (temp)
4163*404b540aSrobert 	        return gen_rtx_IF_THEN_ELSE (mode, temp, op1, op2);
4164*404b540aSrobert 	    }
4165*404b540aSrobert 	}
4166*404b540aSrobert       break;
4167*404b540aSrobert 
4168*404b540aSrobert     case VEC_MERGE:
4169*404b540aSrobert       gcc_assert (GET_MODE (op0) == mode);
4170*404b540aSrobert       gcc_assert (GET_MODE (op1) == mode);
4171*404b540aSrobert       gcc_assert (VECTOR_MODE_P (mode));
4172*404b540aSrobert       op2 = avoid_constant_pool_reference (op2);
4173*404b540aSrobert       if (GET_CODE (op2) == CONST_INT)
4174*404b540aSrobert 	{
4175*404b540aSrobert           int elt_size = GET_MODE_SIZE (GET_MODE_INNER (mode));
4176*404b540aSrobert 	  unsigned n_elts = (GET_MODE_SIZE (mode) / elt_size);
4177*404b540aSrobert 	  int mask = (1 << n_elts) - 1;
4178*404b540aSrobert 
4179*404b540aSrobert 	  if (!(INTVAL (op2) & mask))
4180*404b540aSrobert 	    return op1;
4181*404b540aSrobert 	  if ((INTVAL (op2) & mask) == mask)
4182*404b540aSrobert 	    return op0;
4183*404b540aSrobert 
4184*404b540aSrobert 	  op0 = avoid_constant_pool_reference (op0);
4185*404b540aSrobert 	  op1 = avoid_constant_pool_reference (op1);
4186*404b540aSrobert 	  if (GET_CODE (op0) == CONST_VECTOR
4187*404b540aSrobert 	      && GET_CODE (op1) == CONST_VECTOR)
4188*404b540aSrobert 	    {
4189*404b540aSrobert 	      rtvec v = rtvec_alloc (n_elts);
4190*404b540aSrobert 	      unsigned int i;
4191*404b540aSrobert 
4192*404b540aSrobert 	      for (i = 0; i < n_elts; i++)
4193*404b540aSrobert 		RTVEC_ELT (v, i) = (INTVAL (op2) & (1 << i)
4194*404b540aSrobert 				    ? CONST_VECTOR_ELT (op0, i)
4195*404b540aSrobert 				    : CONST_VECTOR_ELT (op1, i));
4196*404b540aSrobert 	      return gen_rtx_CONST_VECTOR (mode, v);
4197*404b540aSrobert 	    }
4198*404b540aSrobert 	}
4199*404b540aSrobert       break;
4200*404b540aSrobert 
4201*404b540aSrobert     default:
4202*404b540aSrobert       gcc_unreachable ();
4203*404b540aSrobert     }
4204*404b540aSrobert 
4205*404b540aSrobert   return 0;
4206*404b540aSrobert }
4207*404b540aSrobert 
4208*404b540aSrobert /* Evaluate a SUBREG of a CONST_INT or CONST_DOUBLE or CONST_VECTOR,
4209*404b540aSrobert    returning another CONST_INT or CONST_DOUBLE or CONST_VECTOR.
4210*404b540aSrobert 
4211*404b540aSrobert    Works by unpacking OP into a collection of 8-bit values
4212*404b540aSrobert    represented as a little-endian array of 'unsigned char', selecting by BYTE,
4213*404b540aSrobert    and then repacking them again for OUTERMODE.  */
4214*404b540aSrobert 
4215*404b540aSrobert static rtx
simplify_immed_subreg(enum machine_mode outermode,rtx op,enum machine_mode innermode,unsigned int byte)4216*404b540aSrobert simplify_immed_subreg (enum machine_mode outermode, rtx op,
4217*404b540aSrobert 		       enum machine_mode innermode, unsigned int byte)
4218*404b540aSrobert {
4219*404b540aSrobert   /* We support up to 512-bit values (for V8DFmode).  */
4220*404b540aSrobert   enum {
4221*404b540aSrobert     max_bitsize = 512,
4222*404b540aSrobert     value_bit = 8,
4223*404b540aSrobert     value_mask = (1 << value_bit) - 1
4224*404b540aSrobert   };
4225*404b540aSrobert   unsigned char value[max_bitsize / value_bit];
4226*404b540aSrobert   int value_start;
4227*404b540aSrobert   int i;
4228*404b540aSrobert   int elem;
4229*404b540aSrobert 
4230*404b540aSrobert   int num_elem;
4231*404b540aSrobert   rtx * elems;
4232*404b540aSrobert   int elem_bitsize;
4233*404b540aSrobert   rtx result_s;
4234*404b540aSrobert   rtvec result_v = NULL;
4235*404b540aSrobert   enum mode_class outer_class;
4236*404b540aSrobert   enum machine_mode outer_submode;
4237*404b540aSrobert 
4238*404b540aSrobert   /* Some ports misuse CCmode.  */
4239*404b540aSrobert   if (GET_MODE_CLASS (outermode) == MODE_CC && GET_CODE (op) == CONST_INT)
4240*404b540aSrobert     return op;
4241*404b540aSrobert 
4242*404b540aSrobert   /* We have no way to represent a complex constant at the rtl level.  */
4243*404b540aSrobert   if (COMPLEX_MODE_P (outermode))
4244*404b540aSrobert     return NULL_RTX;
4245*404b540aSrobert 
4246*404b540aSrobert   /* Unpack the value.  */
4247*404b540aSrobert 
4248*404b540aSrobert   if (GET_CODE (op) == CONST_VECTOR)
4249*404b540aSrobert     {
4250*404b540aSrobert       num_elem = CONST_VECTOR_NUNITS (op);
4251*404b540aSrobert       elems = &CONST_VECTOR_ELT (op, 0);
4252*404b540aSrobert       elem_bitsize = GET_MODE_BITSIZE (GET_MODE_INNER (innermode));
4253*404b540aSrobert     }
4254*404b540aSrobert   else
4255*404b540aSrobert     {
4256*404b540aSrobert       num_elem = 1;
4257*404b540aSrobert       elems = &op;
4258*404b540aSrobert       elem_bitsize = max_bitsize;
4259*404b540aSrobert     }
4260*404b540aSrobert   /* If this asserts, it is too complicated; reducing value_bit may help.  */
4261*404b540aSrobert   gcc_assert (BITS_PER_UNIT % value_bit == 0);
4262*404b540aSrobert   /* I don't know how to handle endianness of sub-units.  */
4263*404b540aSrobert   gcc_assert (elem_bitsize % BITS_PER_UNIT == 0);
4264*404b540aSrobert 
4265*404b540aSrobert   for (elem = 0; elem < num_elem; elem++)
4266*404b540aSrobert     {
4267*404b540aSrobert       unsigned char * vp;
4268*404b540aSrobert       rtx el = elems[elem];
4269*404b540aSrobert 
4270*404b540aSrobert       /* Vectors are kept in target memory order.  (This is probably
4271*404b540aSrobert 	 a mistake.)  */
4272*404b540aSrobert       {
4273*404b540aSrobert 	unsigned byte = (elem * elem_bitsize) / BITS_PER_UNIT;
4274*404b540aSrobert 	unsigned ibyte = (((num_elem - 1 - elem) * elem_bitsize)
4275*404b540aSrobert 			  / BITS_PER_UNIT);
4276*404b540aSrobert 	unsigned word_byte = WORDS_BIG_ENDIAN ? ibyte : byte;
4277*404b540aSrobert 	unsigned subword_byte = BYTES_BIG_ENDIAN ? ibyte : byte;
4278*404b540aSrobert 	unsigned bytele = (subword_byte % UNITS_PER_WORD
4279*404b540aSrobert 			 + (word_byte / UNITS_PER_WORD) * UNITS_PER_WORD);
4280*404b540aSrobert 	vp = value + (bytele * BITS_PER_UNIT) / value_bit;
4281*404b540aSrobert       }
4282*404b540aSrobert 
4283*404b540aSrobert       switch (GET_CODE (el))
4284*404b540aSrobert 	{
4285*404b540aSrobert 	case CONST_INT:
4286*404b540aSrobert 	  for (i = 0;
4287*404b540aSrobert 	       i < HOST_BITS_PER_WIDE_INT && i < elem_bitsize;
4288*404b540aSrobert 	       i += value_bit)
4289*404b540aSrobert 	    *vp++ = INTVAL (el) >> i;
4290*404b540aSrobert 	  /* CONST_INTs are always logically sign-extended.  */
4291*404b540aSrobert 	  for (; i < elem_bitsize; i += value_bit)
4292*404b540aSrobert 	    *vp++ = INTVAL (el) < 0 ? -1 : 0;
4293*404b540aSrobert 	  break;
4294*404b540aSrobert 
4295*404b540aSrobert 	case CONST_DOUBLE:
4296*404b540aSrobert 	  if (GET_MODE (el) == VOIDmode)
4297*404b540aSrobert 	    {
4298*404b540aSrobert 	      /* If this triggers, someone should have generated a
4299*404b540aSrobert 		 CONST_INT instead.  */
4300*404b540aSrobert 	      gcc_assert (elem_bitsize > HOST_BITS_PER_WIDE_INT);
4301*404b540aSrobert 
4302*404b540aSrobert 	      for (i = 0; i < HOST_BITS_PER_WIDE_INT; i += value_bit)
4303*404b540aSrobert 		*vp++ = CONST_DOUBLE_LOW (el) >> i;
4304*404b540aSrobert 	      while (i < HOST_BITS_PER_WIDE_INT * 2 && i < elem_bitsize)
4305*404b540aSrobert 		{
4306*404b540aSrobert 		  *vp++
4307*404b540aSrobert 		    = CONST_DOUBLE_HIGH (el) >> (i - HOST_BITS_PER_WIDE_INT);
4308*404b540aSrobert 		  i += value_bit;
4309*404b540aSrobert 		}
4310*404b540aSrobert 	      /* It shouldn't matter what's done here, so fill it with
4311*404b540aSrobert 		 zero.  */
4312*404b540aSrobert 	      for (; i < elem_bitsize; i += value_bit)
4313*404b540aSrobert 		*vp++ = 0;
4314*404b540aSrobert 	    }
4315*404b540aSrobert 	  else
4316*404b540aSrobert 	    {
4317*404b540aSrobert 	      long tmp[max_bitsize / 32];
4318*404b540aSrobert 	      int bitsize = GET_MODE_BITSIZE (GET_MODE (el));
4319*404b540aSrobert 
4320*404b540aSrobert 	      gcc_assert (SCALAR_FLOAT_MODE_P (GET_MODE (el)));
4321*404b540aSrobert 	      gcc_assert (bitsize <= elem_bitsize);
4322*404b540aSrobert 	      gcc_assert (bitsize % value_bit == 0);
4323*404b540aSrobert 
4324*404b540aSrobert 	      real_to_target (tmp, CONST_DOUBLE_REAL_VALUE (el),
4325*404b540aSrobert 			      GET_MODE (el));
4326*404b540aSrobert 
4327*404b540aSrobert 	      /* real_to_target produces its result in words affected by
4328*404b540aSrobert 		 FLOAT_WORDS_BIG_ENDIAN.  However, we ignore this,
4329*404b540aSrobert 		 and use WORDS_BIG_ENDIAN instead; see the documentation
4330*404b540aSrobert 	         of SUBREG in rtl.texi.  */
4331*404b540aSrobert 	      for (i = 0; i < bitsize; i += value_bit)
4332*404b540aSrobert 		{
4333*404b540aSrobert 		  int ibase;
4334*404b540aSrobert 		  if (WORDS_BIG_ENDIAN)
4335*404b540aSrobert 		    ibase = bitsize - 1 - i;
4336*404b540aSrobert 		  else
4337*404b540aSrobert 		    ibase = i;
4338*404b540aSrobert 		  *vp++ = tmp[ibase / 32] >> i % 32;
4339*404b540aSrobert 		}
4340*404b540aSrobert 
4341*404b540aSrobert 	      /* It shouldn't matter what's done here, so fill it with
4342*404b540aSrobert 		 zero.  */
4343*404b540aSrobert 	      for (; i < elem_bitsize; i += value_bit)
4344*404b540aSrobert 		*vp++ = 0;
4345*404b540aSrobert 	    }
4346*404b540aSrobert 	  break;
4347*404b540aSrobert 
4348*404b540aSrobert 	default:
4349*404b540aSrobert 	  gcc_unreachable ();
4350*404b540aSrobert 	}
4351*404b540aSrobert     }
4352*404b540aSrobert 
4353*404b540aSrobert   /* Now, pick the right byte to start with.  */
4354*404b540aSrobert   /* Renumber BYTE so that the least-significant byte is byte 0.  A special
4355*404b540aSrobert      case is paradoxical SUBREGs, which shouldn't be adjusted since they
4356*404b540aSrobert      will already have offset 0.  */
4357*404b540aSrobert   if (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode))
4358*404b540aSrobert     {
4359*404b540aSrobert       unsigned ibyte = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode)
4360*404b540aSrobert 			- byte);
4361*404b540aSrobert       unsigned word_byte = WORDS_BIG_ENDIAN ? ibyte : byte;
4362*404b540aSrobert       unsigned subword_byte = BYTES_BIG_ENDIAN ? ibyte : byte;
4363*404b540aSrobert       byte = (subword_byte % UNITS_PER_WORD
4364*404b540aSrobert 	      + (word_byte / UNITS_PER_WORD) * UNITS_PER_WORD);
4365*404b540aSrobert     }
4366*404b540aSrobert 
4367*404b540aSrobert   /* BYTE should still be inside OP.  (Note that BYTE is unsigned,
4368*404b540aSrobert      so if it's become negative it will instead be very large.)  */
4369*404b540aSrobert   gcc_assert (byte < GET_MODE_SIZE (innermode));
4370*404b540aSrobert 
4371*404b540aSrobert   /* Convert from bytes to chunks of size value_bit.  */
4372*404b540aSrobert   value_start = byte * (BITS_PER_UNIT / value_bit);
4373*404b540aSrobert 
4374*404b540aSrobert   /* Re-pack the value.  */
4375*404b540aSrobert 
4376*404b540aSrobert   if (VECTOR_MODE_P (outermode))
4377*404b540aSrobert     {
4378*404b540aSrobert       num_elem = GET_MODE_NUNITS (outermode);
4379*404b540aSrobert       result_v = rtvec_alloc (num_elem);
4380*404b540aSrobert       elems = &RTVEC_ELT (result_v, 0);
4381*404b540aSrobert       outer_submode = GET_MODE_INNER (outermode);
4382*404b540aSrobert     }
4383*404b540aSrobert   else
4384*404b540aSrobert     {
4385*404b540aSrobert       num_elem = 1;
4386*404b540aSrobert       elems = &result_s;
4387*404b540aSrobert       outer_submode = outermode;
4388*404b540aSrobert     }
4389*404b540aSrobert 
4390*404b540aSrobert   outer_class = GET_MODE_CLASS (outer_submode);
4391*404b540aSrobert   elem_bitsize = GET_MODE_BITSIZE (outer_submode);
4392*404b540aSrobert 
4393*404b540aSrobert   gcc_assert (elem_bitsize % value_bit == 0);
4394*404b540aSrobert   gcc_assert (elem_bitsize + value_start * value_bit <= max_bitsize);
4395*404b540aSrobert 
4396*404b540aSrobert   for (elem = 0; elem < num_elem; elem++)
4397*404b540aSrobert     {
4398*404b540aSrobert       unsigned char *vp;
4399*404b540aSrobert 
4400*404b540aSrobert       /* Vectors are stored in target memory order.  (This is probably
4401*404b540aSrobert 	 a mistake.)  */
4402*404b540aSrobert       {
4403*404b540aSrobert 	unsigned byte = (elem * elem_bitsize) / BITS_PER_UNIT;
4404*404b540aSrobert 	unsigned ibyte = (((num_elem - 1 - elem) * elem_bitsize)
4405*404b540aSrobert 			  / BITS_PER_UNIT);
4406*404b540aSrobert 	unsigned word_byte = WORDS_BIG_ENDIAN ? ibyte : byte;
4407*404b540aSrobert 	unsigned subword_byte = BYTES_BIG_ENDIAN ? ibyte : byte;
4408*404b540aSrobert 	unsigned bytele = (subword_byte % UNITS_PER_WORD
4409*404b540aSrobert 			 + (word_byte / UNITS_PER_WORD) * UNITS_PER_WORD);
4410*404b540aSrobert 	vp = value + value_start + (bytele * BITS_PER_UNIT) / value_bit;
4411*404b540aSrobert       }
4412*404b540aSrobert 
4413*404b540aSrobert       switch (outer_class)
4414*404b540aSrobert 	{
4415*404b540aSrobert 	case MODE_INT:
4416*404b540aSrobert 	case MODE_PARTIAL_INT:
4417*404b540aSrobert 	  {
4418*404b540aSrobert 	    unsigned HOST_WIDE_INT hi = 0, lo = 0;
4419*404b540aSrobert 
4420*404b540aSrobert 	    for (i = 0;
4421*404b540aSrobert 		 i < HOST_BITS_PER_WIDE_INT && i < elem_bitsize;
4422*404b540aSrobert 		 i += value_bit)
4423*404b540aSrobert 	      lo |= (HOST_WIDE_INT)(*vp++ & value_mask) << i;
4424*404b540aSrobert 	    for (; i < elem_bitsize; i += value_bit)
4425*404b540aSrobert 	      hi |= ((HOST_WIDE_INT)(*vp++ & value_mask)
4426*404b540aSrobert 		     << (i - HOST_BITS_PER_WIDE_INT));
4427*404b540aSrobert 
4428*404b540aSrobert 	    /* immed_double_const doesn't call trunc_int_for_mode.  I don't
4429*404b540aSrobert 	       know why.  */
4430*404b540aSrobert 	    if (elem_bitsize <= HOST_BITS_PER_WIDE_INT)
4431*404b540aSrobert 	      elems[elem] = gen_int_mode (lo, outer_submode);
4432*404b540aSrobert 	    else if (elem_bitsize <= 2 * HOST_BITS_PER_WIDE_INT)
4433*404b540aSrobert 	      elems[elem] = immed_double_const (lo, hi, outer_submode);
4434*404b540aSrobert 	    else
4435*404b540aSrobert 	      return NULL_RTX;
4436*404b540aSrobert 	  }
4437*404b540aSrobert 	  break;
4438*404b540aSrobert 
4439*404b540aSrobert 	case MODE_FLOAT:
4440*404b540aSrobert 	case MODE_DECIMAL_FLOAT:
4441*404b540aSrobert 	  {
4442*404b540aSrobert 	    REAL_VALUE_TYPE r;
4443*404b540aSrobert 	    long tmp[max_bitsize / 32];
4444*404b540aSrobert 
4445*404b540aSrobert 	    /* real_from_target wants its input in words affected by
4446*404b540aSrobert 	       FLOAT_WORDS_BIG_ENDIAN.  However, we ignore this,
4447*404b540aSrobert 	       and use WORDS_BIG_ENDIAN instead; see the documentation
4448*404b540aSrobert 	       of SUBREG in rtl.texi.  */
4449*404b540aSrobert 	    for (i = 0; i < max_bitsize / 32; i++)
4450*404b540aSrobert 	      tmp[i] = 0;
4451*404b540aSrobert 	    for (i = 0; i < elem_bitsize; i += value_bit)
4452*404b540aSrobert 	      {
4453*404b540aSrobert 		int ibase;
4454*404b540aSrobert 		if (WORDS_BIG_ENDIAN)
4455*404b540aSrobert 		  ibase = elem_bitsize - 1 - i;
4456*404b540aSrobert 		else
4457*404b540aSrobert 		  ibase = i;
4458*404b540aSrobert 		tmp[ibase / 32] |= (*vp++ & value_mask) << i % 32;
4459*404b540aSrobert 	      }
4460*404b540aSrobert 
4461*404b540aSrobert 	    real_from_target (&r, tmp, outer_submode);
4462*404b540aSrobert 	    elems[elem] = CONST_DOUBLE_FROM_REAL_VALUE (r, outer_submode);
4463*404b540aSrobert 	  }
4464*404b540aSrobert 	  break;
4465*404b540aSrobert 
4466*404b540aSrobert 	default:
4467*404b540aSrobert 	  gcc_unreachable ();
4468*404b540aSrobert 	}
4469*404b540aSrobert     }
4470*404b540aSrobert   if (VECTOR_MODE_P (outermode))
4471*404b540aSrobert     return gen_rtx_CONST_VECTOR (outermode, result_v);
4472*404b540aSrobert   else
4473*404b540aSrobert     return result_s;
4474*404b540aSrobert }
4475*404b540aSrobert 
4476*404b540aSrobert /* Simplify SUBREG:OUTERMODE(OP:INNERMODE, BYTE)
4477*404b540aSrobert    Return 0 if no simplifications are possible.  */
4478*404b540aSrobert rtx
simplify_subreg(enum machine_mode outermode,rtx op,enum machine_mode innermode,unsigned int byte)4479*404b540aSrobert simplify_subreg (enum machine_mode outermode, rtx op,
4480*404b540aSrobert 		 enum machine_mode innermode, unsigned int byte)
4481*404b540aSrobert {
4482*404b540aSrobert   /* Little bit of sanity checking.  */
4483*404b540aSrobert   gcc_assert (innermode != VOIDmode);
4484*404b540aSrobert   gcc_assert (outermode != VOIDmode);
4485*404b540aSrobert   gcc_assert (innermode != BLKmode);
4486*404b540aSrobert   gcc_assert (outermode != BLKmode);
4487*404b540aSrobert 
4488*404b540aSrobert   gcc_assert (GET_MODE (op) == innermode
4489*404b540aSrobert 	      || GET_MODE (op) == VOIDmode);
4490*404b540aSrobert 
4491*404b540aSrobert   gcc_assert ((byte % GET_MODE_SIZE (outermode)) == 0);
4492*404b540aSrobert   gcc_assert (byte < GET_MODE_SIZE (innermode));
4493*404b540aSrobert 
4494*404b540aSrobert   if (outermode == innermode && !byte)
4495*404b540aSrobert     return op;
4496*404b540aSrobert 
4497*404b540aSrobert   if (GET_CODE (op) == CONST_INT
4498*404b540aSrobert       || GET_CODE (op) == CONST_DOUBLE
4499*404b540aSrobert       || GET_CODE (op) == CONST_VECTOR)
4500*404b540aSrobert     return simplify_immed_subreg (outermode, op, innermode, byte);
4501*404b540aSrobert 
4502*404b540aSrobert   /* Changing mode twice with SUBREG => just change it once,
4503*404b540aSrobert      or not at all if changing back op starting mode.  */
4504*404b540aSrobert   if (GET_CODE (op) == SUBREG)
4505*404b540aSrobert     {
4506*404b540aSrobert       enum machine_mode innermostmode = GET_MODE (SUBREG_REG (op));
4507*404b540aSrobert       int final_offset = byte + SUBREG_BYTE (op);
4508*404b540aSrobert       rtx newx;
4509*404b540aSrobert 
4510*404b540aSrobert       if (outermode == innermostmode
4511*404b540aSrobert 	  && byte == 0 && SUBREG_BYTE (op) == 0)
4512*404b540aSrobert 	return SUBREG_REG (op);
4513*404b540aSrobert 
4514*404b540aSrobert       /* The SUBREG_BYTE represents offset, as if the value were stored
4515*404b540aSrobert 	 in memory.  Irritating exception is paradoxical subreg, where
4516*404b540aSrobert 	 we define SUBREG_BYTE to be 0.  On big endian machines, this
4517*404b540aSrobert 	 value should be negative.  For a moment, undo this exception.  */
4518*404b540aSrobert       if (byte == 0 && GET_MODE_SIZE (innermode) < GET_MODE_SIZE (outermode))
4519*404b540aSrobert 	{
4520*404b540aSrobert 	  int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
4521*404b540aSrobert 	  if (WORDS_BIG_ENDIAN)
4522*404b540aSrobert 	    final_offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
4523*404b540aSrobert 	  if (BYTES_BIG_ENDIAN)
4524*404b540aSrobert 	    final_offset += difference % UNITS_PER_WORD;
4525*404b540aSrobert 	}
4526*404b540aSrobert       if (SUBREG_BYTE (op) == 0
4527*404b540aSrobert 	  && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
4528*404b540aSrobert 	{
4529*404b540aSrobert 	  int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
4530*404b540aSrobert 	  if (WORDS_BIG_ENDIAN)
4531*404b540aSrobert 	    final_offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
4532*404b540aSrobert 	  if (BYTES_BIG_ENDIAN)
4533*404b540aSrobert 	    final_offset += difference % UNITS_PER_WORD;
4534*404b540aSrobert 	}
4535*404b540aSrobert 
4536*404b540aSrobert       /* See whether resulting subreg will be paradoxical.  */
4537*404b540aSrobert       if (GET_MODE_SIZE (innermostmode) > GET_MODE_SIZE (outermode))
4538*404b540aSrobert 	{
4539*404b540aSrobert 	  /* In nonparadoxical subregs we can't handle negative offsets.  */
4540*404b540aSrobert 	  if (final_offset < 0)
4541*404b540aSrobert 	    return NULL_RTX;
4542*404b540aSrobert 	  /* Bail out in case resulting subreg would be incorrect.  */
4543*404b540aSrobert 	  if (final_offset % GET_MODE_SIZE (outermode)
4544*404b540aSrobert 	      || (unsigned) final_offset >= GET_MODE_SIZE (innermostmode))
4545*404b540aSrobert 	    return NULL_RTX;
4546*404b540aSrobert 	}
4547*404b540aSrobert       else
4548*404b540aSrobert 	{
4549*404b540aSrobert 	  int offset = 0;
4550*404b540aSrobert 	  int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (outermode));
4551*404b540aSrobert 
4552*404b540aSrobert 	  /* In paradoxical subreg, see if we are still looking on lower part.
4553*404b540aSrobert 	     If so, our SUBREG_BYTE will be 0.  */
4554*404b540aSrobert 	  if (WORDS_BIG_ENDIAN)
4555*404b540aSrobert 	    offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
4556*404b540aSrobert 	  if (BYTES_BIG_ENDIAN)
4557*404b540aSrobert 	    offset += difference % UNITS_PER_WORD;
4558*404b540aSrobert 	  if (offset == final_offset)
4559*404b540aSrobert 	    final_offset = 0;
4560*404b540aSrobert 	  else
4561*404b540aSrobert 	    return NULL_RTX;
4562*404b540aSrobert 	}
4563*404b540aSrobert 
4564*404b540aSrobert       /* Recurse for further possible simplifications.  */
4565*404b540aSrobert       newx = simplify_subreg (outermode, SUBREG_REG (op), innermostmode,
4566*404b540aSrobert 			      final_offset);
4567*404b540aSrobert       if (newx)
4568*404b540aSrobert 	return newx;
4569*404b540aSrobert       if (validate_subreg (outermode, innermostmode,
4570*404b540aSrobert 			   SUBREG_REG (op), final_offset))
4571*404b540aSrobert         return gen_rtx_SUBREG (outermode, SUBREG_REG (op), final_offset);
4572*404b540aSrobert       return NULL_RTX;
4573*404b540aSrobert     }
4574*404b540aSrobert 
4575*404b540aSrobert   /* Merge implicit and explicit truncations.  */
4576*404b540aSrobert 
4577*404b540aSrobert   if (GET_CODE (op) == TRUNCATE
4578*404b540aSrobert       && GET_MODE_SIZE (outermode) < GET_MODE_SIZE (innermode)
4579*404b540aSrobert       && subreg_lowpart_offset (outermode, innermode) == byte)
4580*404b540aSrobert     return simplify_gen_unary (TRUNCATE, outermode, XEXP (op, 0),
4581*404b540aSrobert 			       GET_MODE (XEXP (op, 0)));
4582*404b540aSrobert 
4583*404b540aSrobert   /* SUBREG of a hard register => just change the register number
4584*404b540aSrobert      and/or mode.  If the hard register is not valid in that mode,
4585*404b540aSrobert      suppress this simplification.  If the hard register is the stack,
4586*404b540aSrobert      frame, or argument pointer, leave this as a SUBREG.  */
4587*404b540aSrobert 
4588*404b540aSrobert   if (REG_P (op)
4589*404b540aSrobert       && REGNO (op) < FIRST_PSEUDO_REGISTER
4590*404b540aSrobert #ifdef CANNOT_CHANGE_MODE_CLASS
4591*404b540aSrobert       && ! (REG_CANNOT_CHANGE_MODE_P (REGNO (op), innermode, outermode)
4592*404b540aSrobert 	    && GET_MODE_CLASS (innermode) != MODE_COMPLEX_INT
4593*404b540aSrobert 	    && GET_MODE_CLASS (innermode) != MODE_COMPLEX_FLOAT)
4594*404b540aSrobert #endif
4595*404b540aSrobert       && ((reload_completed && !frame_pointer_needed)
4596*404b540aSrobert 	  || (REGNO (op) != FRAME_POINTER_REGNUM
4597*404b540aSrobert #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
4598*404b540aSrobert 	      && REGNO (op) != HARD_FRAME_POINTER_REGNUM
4599*404b540aSrobert #endif
4600*404b540aSrobert 	     ))
4601*404b540aSrobert #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4602*404b540aSrobert       && REGNO (op) != ARG_POINTER_REGNUM
4603*404b540aSrobert #endif
4604*404b540aSrobert       && REGNO (op) != STACK_POINTER_REGNUM
4605*404b540aSrobert       && subreg_offset_representable_p (REGNO (op), innermode,
4606*404b540aSrobert 					byte, outermode))
4607*404b540aSrobert     {
4608*404b540aSrobert       unsigned int regno = REGNO (op);
4609*404b540aSrobert       unsigned int final_regno
4610*404b540aSrobert 	= regno + subreg_regno_offset (regno, innermode, byte, outermode);
4611*404b540aSrobert 
4612*404b540aSrobert       /* ??? We do allow it if the current REG is not valid for
4613*404b540aSrobert 	 its mode.  This is a kludge to work around how float/complex
4614*404b540aSrobert 	 arguments are passed on 32-bit SPARC and should be fixed.  */
4615*404b540aSrobert       if (HARD_REGNO_MODE_OK (final_regno, outermode)
4616*404b540aSrobert 	  || ! HARD_REGNO_MODE_OK (regno, innermode))
4617*404b540aSrobert 	{
4618*404b540aSrobert 	  rtx x;
4619*404b540aSrobert 	  int final_offset = byte;
4620*404b540aSrobert 
4621*404b540aSrobert 	  /* Adjust offset for paradoxical subregs.  */
4622*404b540aSrobert 	  if (byte == 0
4623*404b540aSrobert 	      && GET_MODE_SIZE (innermode) < GET_MODE_SIZE (outermode))
4624*404b540aSrobert 	    {
4625*404b540aSrobert 	      int difference = (GET_MODE_SIZE (innermode)
4626*404b540aSrobert 				- GET_MODE_SIZE (outermode));
4627*404b540aSrobert 	      if (WORDS_BIG_ENDIAN)
4628*404b540aSrobert 		final_offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
4629*404b540aSrobert 	      if (BYTES_BIG_ENDIAN)
4630*404b540aSrobert 		final_offset += difference % UNITS_PER_WORD;
4631*404b540aSrobert 	    }
4632*404b540aSrobert 
4633*404b540aSrobert 	  x = gen_rtx_REG_offset (op, outermode, final_regno, final_offset);
4634*404b540aSrobert 
4635*404b540aSrobert 	  /* Propagate original regno.  We don't have any way to specify
4636*404b540aSrobert 	     the offset inside original regno, so do so only for lowpart.
4637*404b540aSrobert 	     The information is used only by alias analysis that can not
4638*404b540aSrobert 	     grog partial register anyway.  */
4639*404b540aSrobert 
4640*404b540aSrobert 	  if (subreg_lowpart_offset (outermode, innermode) == byte)
4641*404b540aSrobert 	    ORIGINAL_REGNO (x) = ORIGINAL_REGNO (op);
4642*404b540aSrobert 	  return x;
4643*404b540aSrobert 	}
4644*404b540aSrobert     }
4645*404b540aSrobert 
4646*404b540aSrobert   /* If we have a SUBREG of a register that we are replacing and we are
4647*404b540aSrobert      replacing it with a MEM, make a new MEM and try replacing the
4648*404b540aSrobert      SUBREG with it.  Don't do this if the MEM has a mode-dependent address
4649*404b540aSrobert      or if we would be widening it.  */
4650*404b540aSrobert 
4651*404b540aSrobert   if (MEM_P (op)
4652*404b540aSrobert       && ! mode_dependent_address_p (XEXP (op, 0))
4653*404b540aSrobert       /* Allow splitting of volatile memory references in case we don't
4654*404b540aSrobert          have instruction to move the whole thing.  */
4655*404b540aSrobert       && (! MEM_VOLATILE_P (op)
4656*404b540aSrobert 	  || ! have_insn_for (SET, innermode))
4657*404b540aSrobert       && GET_MODE_SIZE (outermode) <= GET_MODE_SIZE (GET_MODE (op)))
4658*404b540aSrobert     return adjust_address_nv (op, outermode, byte);
4659*404b540aSrobert 
4660*404b540aSrobert   /* Handle complex values represented as CONCAT
4661*404b540aSrobert      of real and imaginary part.  */
4662*404b540aSrobert   if (GET_CODE (op) == CONCAT)
4663*404b540aSrobert     {
4664*404b540aSrobert       unsigned int inner_size, final_offset;
4665*404b540aSrobert       rtx part, res;
4666*404b540aSrobert 
4667*404b540aSrobert       inner_size = GET_MODE_UNIT_SIZE (innermode);
4668*404b540aSrobert       part = byte < inner_size ? XEXP (op, 0) : XEXP (op, 1);
4669*404b540aSrobert       final_offset = byte % inner_size;
4670*404b540aSrobert       if (final_offset + GET_MODE_SIZE (outermode) > inner_size)
4671*404b540aSrobert 	return NULL_RTX;
4672*404b540aSrobert 
4673*404b540aSrobert       res = simplify_subreg (outermode, part, GET_MODE (part), final_offset);
4674*404b540aSrobert       if (res)
4675*404b540aSrobert 	return res;
4676*404b540aSrobert       if (validate_subreg (outermode, GET_MODE (part), part, final_offset))
4677*404b540aSrobert 	return gen_rtx_SUBREG (outermode, part, final_offset);
4678*404b540aSrobert       return NULL_RTX;
4679*404b540aSrobert     }
4680*404b540aSrobert 
4681*404b540aSrobert   /* Optimize SUBREG truncations of zero and sign extended values.  */
4682*404b540aSrobert   if ((GET_CODE (op) == ZERO_EXTEND
4683*404b540aSrobert        || GET_CODE (op) == SIGN_EXTEND)
4684*404b540aSrobert       && GET_MODE_BITSIZE (outermode) < GET_MODE_BITSIZE (innermode))
4685*404b540aSrobert     {
4686*404b540aSrobert       unsigned int bitpos = subreg_lsb_1 (outermode, innermode, byte);
4687*404b540aSrobert 
4688*404b540aSrobert       /* If we're requesting the lowpart of a zero or sign extension,
4689*404b540aSrobert 	 there are three possibilities.  If the outermode is the same
4690*404b540aSrobert 	 as the origmode, we can omit both the extension and the subreg.
4691*404b540aSrobert 	 If the outermode is not larger than the origmode, we can apply
4692*404b540aSrobert 	 the truncation without the extension.  Finally, if the outermode
4693*404b540aSrobert 	 is larger than the origmode, but both are integer modes, we
4694*404b540aSrobert 	 can just extend to the appropriate mode.  */
4695*404b540aSrobert       if (bitpos == 0)
4696*404b540aSrobert 	{
4697*404b540aSrobert 	  enum machine_mode origmode = GET_MODE (XEXP (op, 0));
4698*404b540aSrobert 	  if (outermode == origmode)
4699*404b540aSrobert 	    return XEXP (op, 0);
4700*404b540aSrobert 	  if (GET_MODE_BITSIZE (outermode) <= GET_MODE_BITSIZE (origmode))
4701*404b540aSrobert 	    return simplify_gen_subreg (outermode, XEXP (op, 0), origmode,
4702*404b540aSrobert 					subreg_lowpart_offset (outermode,
4703*404b540aSrobert 							       origmode));
4704*404b540aSrobert 	  if (SCALAR_INT_MODE_P (outermode))
4705*404b540aSrobert 	    return simplify_gen_unary (GET_CODE (op), outermode,
4706*404b540aSrobert 				       XEXP (op, 0), origmode);
4707*404b540aSrobert 	}
4708*404b540aSrobert 
4709*404b540aSrobert       /* A SUBREG resulting from a zero extension may fold to zero if
4710*404b540aSrobert 	 it extracts higher bits that the ZERO_EXTEND's source bits.  */
4711*404b540aSrobert       if (GET_CODE (op) == ZERO_EXTEND
4712*404b540aSrobert 	  && bitpos >= GET_MODE_BITSIZE (GET_MODE (XEXP (op, 0))))
4713*404b540aSrobert 	return CONST0_RTX (outermode);
4714*404b540aSrobert     }
4715*404b540aSrobert 
4716*404b540aSrobert   /* Simplify (subreg:QI (lshiftrt:SI (sign_extend:SI (x:QI)) C), 0) into
4717*404b540aSrobert      to (ashiftrt:QI (x:QI) C), where C is a suitable small constant and
4718*404b540aSrobert      the outer subreg is effectively a truncation to the original mode.  */
4719*404b540aSrobert   if ((GET_CODE (op) == LSHIFTRT
4720*404b540aSrobert        || GET_CODE (op) == ASHIFTRT)
4721*404b540aSrobert       && SCALAR_INT_MODE_P (outermode)
4722*404b540aSrobert       /* Ensure that OUTERMODE is at least twice as wide as the INNERMODE
4723*404b540aSrobert 	 to avoid the possibility that an outer LSHIFTRT shifts by more
4724*404b540aSrobert 	 than the sign extension's sign_bit_copies and introduces zeros
4725*404b540aSrobert 	 into the high bits of the result.  */
4726*404b540aSrobert       && (2 * GET_MODE_BITSIZE (outermode)) <= GET_MODE_BITSIZE (innermode)
4727*404b540aSrobert       && GET_CODE (XEXP (op, 1)) == CONST_INT
4728*404b540aSrobert       && GET_CODE (XEXP (op, 0)) == SIGN_EXTEND
4729*404b540aSrobert       && GET_MODE (XEXP (XEXP (op, 0), 0)) == outermode
4730*404b540aSrobert       && INTVAL (XEXP (op, 1)) < GET_MODE_BITSIZE (outermode)
4731*404b540aSrobert       && subreg_lsb_1 (outermode, innermode, byte) == 0)
4732*404b540aSrobert     return simplify_gen_binary (ASHIFTRT, outermode,
4733*404b540aSrobert 				XEXP (XEXP (op, 0), 0), XEXP (op, 1));
4734*404b540aSrobert 
4735*404b540aSrobert   /* Likewise (subreg:QI (lshiftrt:SI (zero_extend:SI (x:QI)) C), 0) into
4736*404b540aSrobert      to (lshiftrt:QI (x:QI) C), where C is a suitable small constant and
4737*404b540aSrobert      the outer subreg is effectively a truncation to the original mode.  */
4738*404b540aSrobert   if ((GET_CODE (op) == LSHIFTRT
4739*404b540aSrobert        || GET_CODE (op) == ASHIFTRT)
4740*404b540aSrobert       && SCALAR_INT_MODE_P (outermode)
4741*404b540aSrobert       && GET_MODE_BITSIZE (outermode) < GET_MODE_BITSIZE (innermode)
4742*404b540aSrobert       && GET_CODE (XEXP (op, 1)) == CONST_INT
4743*404b540aSrobert       && GET_CODE (XEXP (op, 0)) == ZERO_EXTEND
4744*404b540aSrobert       && GET_MODE (XEXP (XEXP (op, 0), 0)) == outermode
4745*404b540aSrobert       && INTVAL (XEXP (op, 1)) < GET_MODE_BITSIZE (outermode)
4746*404b540aSrobert       && subreg_lsb_1 (outermode, innermode, byte) == 0)
4747*404b540aSrobert     return simplify_gen_binary (LSHIFTRT, outermode,
4748*404b540aSrobert 				XEXP (XEXP (op, 0), 0), XEXP (op, 1));
4749*404b540aSrobert 
4750*404b540aSrobert   /* Likewise (subreg:QI (ashift:SI (zero_extend:SI (x:QI)) C), 0) into
4751*404b540aSrobert      to (ashift:QI (x:QI) C), where C is a suitable small constant and
4752*404b540aSrobert      the outer subreg is effectively a truncation to the original mode.  */
4753*404b540aSrobert   if (GET_CODE (op) == ASHIFT
4754*404b540aSrobert       && SCALAR_INT_MODE_P (outermode)
4755*404b540aSrobert       && GET_MODE_BITSIZE (outermode) < GET_MODE_BITSIZE (innermode)
4756*404b540aSrobert       && GET_CODE (XEXP (op, 1)) == CONST_INT
4757*404b540aSrobert       && (GET_CODE (XEXP (op, 0)) == ZERO_EXTEND
4758*404b540aSrobert 	  || GET_CODE (XEXP (op, 0)) == SIGN_EXTEND)
4759*404b540aSrobert       && GET_MODE (XEXP (XEXP (op, 0), 0)) == outermode
4760*404b540aSrobert       && INTVAL (XEXP (op, 1)) < GET_MODE_BITSIZE (outermode)
4761*404b540aSrobert       && subreg_lsb_1 (outermode, innermode, byte) == 0)
4762*404b540aSrobert     return simplify_gen_binary (ASHIFT, outermode,
4763*404b540aSrobert 				XEXP (XEXP (op, 0), 0), XEXP (op, 1));
4764*404b540aSrobert 
4765*404b540aSrobert   return NULL_RTX;
4766*404b540aSrobert }
4767*404b540aSrobert 
4768*404b540aSrobert /* Make a SUBREG operation or equivalent if it folds.  */
4769*404b540aSrobert 
4770*404b540aSrobert rtx
simplify_gen_subreg(enum machine_mode outermode,rtx op,enum machine_mode innermode,unsigned int byte)4771*404b540aSrobert simplify_gen_subreg (enum machine_mode outermode, rtx op,
4772*404b540aSrobert 		     enum machine_mode innermode, unsigned int byte)
4773*404b540aSrobert {
4774*404b540aSrobert   rtx newx;
4775*404b540aSrobert 
4776*404b540aSrobert   newx = simplify_subreg (outermode, op, innermode, byte);
4777*404b540aSrobert   if (newx)
4778*404b540aSrobert     return newx;
4779*404b540aSrobert 
4780*404b540aSrobert   if (GET_CODE (op) == SUBREG
4781*404b540aSrobert       || GET_CODE (op) == CONCAT
4782*404b540aSrobert       || GET_MODE (op) == VOIDmode)
4783*404b540aSrobert     return NULL_RTX;
4784*404b540aSrobert 
4785*404b540aSrobert   if (validate_subreg (outermode, innermode, op, byte))
4786*404b540aSrobert     return gen_rtx_SUBREG (outermode, op, byte);
4787*404b540aSrobert 
4788*404b540aSrobert   return NULL_RTX;
4789*404b540aSrobert }
4790*404b540aSrobert 
4791*404b540aSrobert /* Simplify X, an rtx expression.
4792*404b540aSrobert 
4793*404b540aSrobert    Return the simplified expression or NULL if no simplifications
4794*404b540aSrobert    were possible.
4795*404b540aSrobert 
4796*404b540aSrobert    This is the preferred entry point into the simplification routines;
4797*404b540aSrobert    however, we still allow passes to call the more specific routines.
4798*404b540aSrobert 
4799*404b540aSrobert    Right now GCC has three (yes, three) major bodies of RTL simplification
4800*404b540aSrobert    code that need to be unified.
4801*404b540aSrobert 
4802*404b540aSrobert 	1. fold_rtx in cse.c.  This code uses various CSE specific
4803*404b540aSrobert 	   information to aid in RTL simplification.
4804*404b540aSrobert 
4805*404b540aSrobert 	2. simplify_rtx in combine.c.  Similar to fold_rtx, except that
4806*404b540aSrobert 	   it uses combine specific information to aid in RTL
4807*404b540aSrobert 	   simplification.
4808*404b540aSrobert 
4809*404b540aSrobert 	3. The routines in this file.
4810*404b540aSrobert 
4811*404b540aSrobert 
4812*404b540aSrobert    Long term we want to only have one body of simplification code; to
4813*404b540aSrobert    get to that state I recommend the following steps:
4814*404b540aSrobert 
4815*404b540aSrobert 	1. Pour over fold_rtx & simplify_rtx and move any simplifications
4816*404b540aSrobert 	   which are not pass dependent state into these routines.
4817*404b540aSrobert 
4818*404b540aSrobert 	2. As code is moved by #1, change fold_rtx & simplify_rtx to
4819*404b540aSrobert 	   use this routine whenever possible.
4820*404b540aSrobert 
4821*404b540aSrobert 	3. Allow for pass dependent state to be provided to these
4822*404b540aSrobert 	   routines and add simplifications based on the pass dependent
4823*404b540aSrobert 	   state.  Remove code from cse.c & combine.c that becomes
4824*404b540aSrobert 	   redundant/dead.
4825*404b540aSrobert 
4826*404b540aSrobert     It will take time, but ultimately the compiler will be easier to
4827*404b540aSrobert     maintain and improve.  It's totally silly that when we add a
4828*404b540aSrobert     simplification that it needs to be added to 4 places (3 for RTL
4829*404b540aSrobert     simplification and 1 for tree simplification.  */
4830*404b540aSrobert 
4831*404b540aSrobert rtx
simplify_rtx(rtx x)4832*404b540aSrobert simplify_rtx (rtx x)
4833*404b540aSrobert {
4834*404b540aSrobert   enum rtx_code code = GET_CODE (x);
4835*404b540aSrobert   enum machine_mode mode = GET_MODE (x);
4836*404b540aSrobert 
4837*404b540aSrobert   switch (GET_RTX_CLASS (code))
4838*404b540aSrobert     {
4839*404b540aSrobert     case RTX_UNARY:
4840*404b540aSrobert       return simplify_unary_operation (code, mode,
4841*404b540aSrobert 				       XEXP (x, 0), GET_MODE (XEXP (x, 0)));
4842*404b540aSrobert     case RTX_COMM_ARITH:
4843*404b540aSrobert       if (swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
4844*404b540aSrobert 	return simplify_gen_binary (code, mode, XEXP (x, 1), XEXP (x, 0));
4845*404b540aSrobert 
4846*404b540aSrobert       /* Fall through....  */
4847*404b540aSrobert 
4848*404b540aSrobert     case RTX_BIN_ARITH:
4849*404b540aSrobert       return simplify_binary_operation (code, mode, XEXP (x, 0), XEXP (x, 1));
4850*404b540aSrobert 
4851*404b540aSrobert     case RTX_TERNARY:
4852*404b540aSrobert     case RTX_BITFIELD_OPS:
4853*404b540aSrobert       return simplify_ternary_operation (code, mode, GET_MODE (XEXP (x, 0)),
4854*404b540aSrobert 					 XEXP (x, 0), XEXP (x, 1),
4855*404b540aSrobert 					 XEXP (x, 2));
4856*404b540aSrobert 
4857*404b540aSrobert     case RTX_COMPARE:
4858*404b540aSrobert     case RTX_COMM_COMPARE:
4859*404b540aSrobert       return simplify_relational_operation (code, mode,
4860*404b540aSrobert                                             ((GET_MODE (XEXP (x, 0))
4861*404b540aSrobert                                              != VOIDmode)
4862*404b540aSrobert                                             ? GET_MODE (XEXP (x, 0))
4863*404b540aSrobert                                             : GET_MODE (XEXP (x, 1))),
4864*404b540aSrobert                                             XEXP (x, 0),
4865*404b540aSrobert                                             XEXP (x, 1));
4866*404b540aSrobert 
4867*404b540aSrobert     case RTX_EXTRA:
4868*404b540aSrobert       if (code == SUBREG)
4869*404b540aSrobert 	return simplify_gen_subreg (mode, SUBREG_REG (x),
4870*404b540aSrobert 				    GET_MODE (SUBREG_REG (x)),
4871*404b540aSrobert 				    SUBREG_BYTE (x));
4872*404b540aSrobert       break;
4873*404b540aSrobert 
4874*404b540aSrobert     case RTX_OBJ:
4875*404b540aSrobert       if (code == LO_SUM)
4876*404b540aSrobert 	{
4877*404b540aSrobert 	  /* Convert (lo_sum (high FOO) FOO) to FOO.  */
4878*404b540aSrobert 	  if (GET_CODE (XEXP (x, 0)) == HIGH
4879*404b540aSrobert 	      && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1)))
4880*404b540aSrobert 	  return XEXP (x, 1);
4881*404b540aSrobert 	}
4882*404b540aSrobert       break;
4883*404b540aSrobert 
4884*404b540aSrobert     default:
4885*404b540aSrobert       break;
4886*404b540aSrobert     }
4887*404b540aSrobert   return NULL;
4888*404b540aSrobert }
4889*404b540aSrobert 
4890