xref: /dragonfly/contrib/gcc-8.0/gcc/recog.c (revision 38fd1498)
1*38fd1498Szrj /* Subroutines used by or related to instruction recognition.
2*38fd1498Szrj    Copyright (C) 1987-2018 Free Software Foundation, Inc.
3*38fd1498Szrj 
4*38fd1498Szrj This file is part of GCC.
5*38fd1498Szrj 
6*38fd1498Szrj GCC is free software; you can redistribute it and/or modify it under
7*38fd1498Szrj the terms of the GNU General Public License as published by the Free
8*38fd1498Szrj Software Foundation; either version 3, or (at your option) any later
9*38fd1498Szrj version.
10*38fd1498Szrj 
11*38fd1498Szrj GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12*38fd1498Szrj WARRANTY; without even the implied warranty of MERCHANTABILITY or
13*38fd1498Szrj FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14*38fd1498Szrj for more details.
15*38fd1498Szrj 
16*38fd1498Szrj You should have received a copy of the GNU General Public License
17*38fd1498Szrj along with GCC; see the file COPYING3.  If not see
18*38fd1498Szrj <http://www.gnu.org/licenses/>.  */
19*38fd1498Szrj 
20*38fd1498Szrj 
21*38fd1498Szrj #include "config.h"
22*38fd1498Szrj #include "system.h"
23*38fd1498Szrj #include "coretypes.h"
24*38fd1498Szrj #include "backend.h"
25*38fd1498Szrj #include "target.h"
26*38fd1498Szrj #include "rtl.h"
27*38fd1498Szrj #include "tree.h"
28*38fd1498Szrj #include "cfghooks.h"
29*38fd1498Szrj #include "df.h"
30*38fd1498Szrj #include "memmodel.h"
31*38fd1498Szrj #include "tm_p.h"
32*38fd1498Szrj #include "insn-config.h"
33*38fd1498Szrj #include "regs.h"
34*38fd1498Szrj #include "emit-rtl.h"
35*38fd1498Szrj #include "recog.h"
36*38fd1498Szrj #include "insn-attr.h"
37*38fd1498Szrj #include "addresses.h"
38*38fd1498Szrj #include "cfgrtl.h"
39*38fd1498Szrj #include "cfgbuild.h"
40*38fd1498Szrj #include "cfgcleanup.h"
41*38fd1498Szrj #include "reload.h"
42*38fd1498Szrj #include "tree-pass.h"
43*38fd1498Szrj 
44*38fd1498Szrj #ifndef STACK_POP_CODE
45*38fd1498Szrj #if STACK_GROWS_DOWNWARD
46*38fd1498Szrj #define STACK_POP_CODE POST_INC
47*38fd1498Szrj #else
48*38fd1498Szrj #define STACK_POP_CODE POST_DEC
49*38fd1498Szrj #endif
50*38fd1498Szrj #endif
51*38fd1498Szrj 
52*38fd1498Szrj static void validate_replace_rtx_1 (rtx *, rtx, rtx, rtx_insn *, bool);
53*38fd1498Szrj static void validate_replace_src_1 (rtx *, void *);
54*38fd1498Szrj static rtx_insn *split_insn (rtx_insn *);
55*38fd1498Szrj 
56*38fd1498Szrj struct target_recog default_target_recog;
57*38fd1498Szrj #if SWITCHABLE_TARGET
58*38fd1498Szrj struct target_recog *this_target_recog = &default_target_recog;
59*38fd1498Szrj #endif
60*38fd1498Szrj 
61*38fd1498Szrj /* Nonzero means allow operands to be volatile.
62*38fd1498Szrj    This should be 0 if you are generating rtl, such as if you are calling
63*38fd1498Szrj    the functions in optabs.c and expmed.c (most of the time).
64*38fd1498Szrj    This should be 1 if all valid insns need to be recognized,
65*38fd1498Szrj    such as in reginfo.c and final.c and reload.c.
66*38fd1498Szrj 
67*38fd1498Szrj    init_recog and init_recog_no_volatile are responsible for setting this.  */
68*38fd1498Szrj 
69*38fd1498Szrj int volatile_ok;
70*38fd1498Szrj 
71*38fd1498Szrj struct recog_data_d recog_data;
72*38fd1498Szrj 
73*38fd1498Szrj /* Contains a vector of operand_alternative structures, such that
74*38fd1498Szrj    operand OP of alternative A is at index A * n_operands + OP.
75*38fd1498Szrj    Set up by preprocess_constraints.  */
76*38fd1498Szrj const operand_alternative *recog_op_alt;
77*38fd1498Szrj 
78*38fd1498Szrj /* Used to provide recog_op_alt for asms.  */
79*38fd1498Szrj static operand_alternative asm_op_alt[MAX_RECOG_OPERANDS
80*38fd1498Szrj 				      * MAX_RECOG_ALTERNATIVES];
81*38fd1498Szrj 
82*38fd1498Szrj /* On return from `constrain_operands', indicate which alternative
83*38fd1498Szrj    was satisfied.  */
84*38fd1498Szrj 
85*38fd1498Szrj int which_alternative;
86*38fd1498Szrj 
87*38fd1498Szrj /* Nonzero after end of reload pass.
88*38fd1498Szrj    Set to 1 or 0 by toplev.c.
89*38fd1498Szrj    Controls the significance of (SUBREG (MEM)).  */
90*38fd1498Szrj 
91*38fd1498Szrj int reload_completed;
92*38fd1498Szrj 
93*38fd1498Szrj /* Nonzero after thread_prologue_and_epilogue_insns has run.  */
94*38fd1498Szrj int epilogue_completed;
95*38fd1498Szrj 
96*38fd1498Szrj /* Initialize data used by the function `recog'.
97*38fd1498Szrj    This must be called once in the compilation of a function
98*38fd1498Szrj    before any insn recognition may be done in the function.  */
99*38fd1498Szrj 
100*38fd1498Szrj void
init_recog_no_volatile(void)101*38fd1498Szrj init_recog_no_volatile (void)
102*38fd1498Szrj {
103*38fd1498Szrj   volatile_ok = 0;
104*38fd1498Szrj }
105*38fd1498Szrj 
106*38fd1498Szrj void
init_recog(void)107*38fd1498Szrj init_recog (void)
108*38fd1498Szrj {
109*38fd1498Szrj   volatile_ok = 1;
110*38fd1498Szrj }
111*38fd1498Szrj 
112*38fd1498Szrj 
113*38fd1498Szrj /* Return true if labels in asm operands BODY are LABEL_REFs.  */
114*38fd1498Szrj 
115*38fd1498Szrj static bool
asm_labels_ok(rtx body)116*38fd1498Szrj asm_labels_ok (rtx body)
117*38fd1498Szrj {
118*38fd1498Szrj   rtx asmop;
119*38fd1498Szrj   int i;
120*38fd1498Szrj 
121*38fd1498Szrj   asmop = extract_asm_operands (body);
122*38fd1498Szrj   if (asmop == NULL_RTX)
123*38fd1498Szrj     return true;
124*38fd1498Szrj 
125*38fd1498Szrj   for (i = 0; i < ASM_OPERANDS_LABEL_LENGTH (asmop); i++)
126*38fd1498Szrj     if (GET_CODE (ASM_OPERANDS_LABEL (asmop, i)) != LABEL_REF)
127*38fd1498Szrj       return false;
128*38fd1498Szrj 
129*38fd1498Szrj   return true;
130*38fd1498Szrj }
131*38fd1498Szrj 
132*38fd1498Szrj /* Check that X is an insn-body for an `asm' with operands
133*38fd1498Szrj    and that the operands mentioned in it are legitimate.  */
134*38fd1498Szrj 
135*38fd1498Szrj int
check_asm_operands(rtx x)136*38fd1498Szrj check_asm_operands (rtx x)
137*38fd1498Szrj {
138*38fd1498Szrj   int noperands;
139*38fd1498Szrj   rtx *operands;
140*38fd1498Szrj   const char **constraints;
141*38fd1498Szrj   int i;
142*38fd1498Szrj 
143*38fd1498Szrj   if (!asm_labels_ok (x))
144*38fd1498Szrj     return 0;
145*38fd1498Szrj 
146*38fd1498Szrj   /* Post-reload, be more strict with things.  */
147*38fd1498Szrj   if (reload_completed)
148*38fd1498Szrj     {
149*38fd1498Szrj       /* ??? Doh!  We've not got the wrapping insn.  Cook one up.  */
150*38fd1498Szrj       rtx_insn *insn = make_insn_raw (x);
151*38fd1498Szrj       extract_insn (insn);
152*38fd1498Szrj       constrain_operands (1, get_enabled_alternatives (insn));
153*38fd1498Szrj       return which_alternative >= 0;
154*38fd1498Szrj     }
155*38fd1498Szrj 
156*38fd1498Szrj   noperands = asm_noperands (x);
157*38fd1498Szrj   if (noperands < 0)
158*38fd1498Szrj     return 0;
159*38fd1498Szrj   if (noperands == 0)
160*38fd1498Szrj     return 1;
161*38fd1498Szrj 
162*38fd1498Szrj   operands = XALLOCAVEC (rtx, noperands);
163*38fd1498Szrj   constraints = XALLOCAVEC (const char *, noperands);
164*38fd1498Szrj 
165*38fd1498Szrj   decode_asm_operands (x, operands, NULL, constraints, NULL, NULL);
166*38fd1498Szrj 
167*38fd1498Szrj   for (i = 0; i < noperands; i++)
168*38fd1498Szrj     {
169*38fd1498Szrj       const char *c = constraints[i];
170*38fd1498Szrj       if (c[0] == '%')
171*38fd1498Szrj 	c++;
172*38fd1498Szrj       if (! asm_operand_ok (operands[i], c, constraints))
173*38fd1498Szrj 	return 0;
174*38fd1498Szrj     }
175*38fd1498Szrj 
176*38fd1498Szrj   return 1;
177*38fd1498Szrj }
178*38fd1498Szrj 
179*38fd1498Szrj /* Static data for the next two routines.  */
180*38fd1498Szrj 
181*38fd1498Szrj struct change_t
182*38fd1498Szrj {
183*38fd1498Szrj   rtx object;
184*38fd1498Szrj   int old_code;
185*38fd1498Szrj   bool unshare;
186*38fd1498Szrj   rtx *loc;
187*38fd1498Szrj   rtx old;
188*38fd1498Szrj };
189*38fd1498Szrj 
190*38fd1498Szrj static change_t *changes;
191*38fd1498Szrj static int changes_allocated;
192*38fd1498Szrj 
193*38fd1498Szrj static int num_changes = 0;
194*38fd1498Szrj 
195*38fd1498Szrj /* Validate a proposed change to OBJECT.  LOC is the location in the rtl
196*38fd1498Szrj    at which NEW_RTX will be placed.  If OBJECT is zero, no validation is done,
197*38fd1498Szrj    the change is simply made.
198*38fd1498Szrj 
199*38fd1498Szrj    Two types of objects are supported:  If OBJECT is a MEM, memory_address_p
200*38fd1498Szrj    will be called with the address and mode as parameters.  If OBJECT is
201*38fd1498Szrj    an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
202*38fd1498Szrj    the change in place.
203*38fd1498Szrj 
204*38fd1498Szrj    IN_GROUP is nonzero if this is part of a group of changes that must be
205*38fd1498Szrj    performed as a group.  In that case, the changes will be stored.  The
206*38fd1498Szrj    function `apply_change_group' will validate and apply the changes.
207*38fd1498Szrj 
208*38fd1498Szrj    If IN_GROUP is zero, this is a single change.  Try to recognize the insn
209*38fd1498Szrj    or validate the memory reference with the change applied.  If the result
210*38fd1498Szrj    is not valid for the machine, suppress the change and return zero.
211*38fd1498Szrj    Otherwise, perform the change and return 1.  */
212*38fd1498Szrj 
213*38fd1498Szrj static bool
validate_change_1(rtx object,rtx * loc,rtx new_rtx,bool in_group,bool unshare)214*38fd1498Szrj validate_change_1 (rtx object, rtx *loc, rtx new_rtx, bool in_group, bool unshare)
215*38fd1498Szrj {
216*38fd1498Szrj   rtx old = *loc;
217*38fd1498Szrj 
218*38fd1498Szrj   if (old == new_rtx || rtx_equal_p (old, new_rtx))
219*38fd1498Szrj     return 1;
220*38fd1498Szrj 
221*38fd1498Szrj   gcc_assert (in_group != 0 || num_changes == 0);
222*38fd1498Szrj 
223*38fd1498Szrj   *loc = new_rtx;
224*38fd1498Szrj 
225*38fd1498Szrj   /* Save the information describing this change.  */
226*38fd1498Szrj   if (num_changes >= changes_allocated)
227*38fd1498Szrj     {
228*38fd1498Szrj       if (changes_allocated == 0)
229*38fd1498Szrj 	/* This value allows for repeated substitutions inside complex
230*38fd1498Szrj 	   indexed addresses, or changes in up to 5 insns.  */
231*38fd1498Szrj 	changes_allocated = MAX_RECOG_OPERANDS * 5;
232*38fd1498Szrj       else
233*38fd1498Szrj 	changes_allocated *= 2;
234*38fd1498Szrj 
235*38fd1498Szrj       changes = XRESIZEVEC (change_t, changes, changes_allocated);
236*38fd1498Szrj     }
237*38fd1498Szrj 
238*38fd1498Szrj   changes[num_changes].object = object;
239*38fd1498Szrj   changes[num_changes].loc = loc;
240*38fd1498Szrj   changes[num_changes].old = old;
241*38fd1498Szrj   changes[num_changes].unshare = unshare;
242*38fd1498Szrj 
243*38fd1498Szrj   if (object && !MEM_P (object))
244*38fd1498Szrj     {
245*38fd1498Szrj       /* Set INSN_CODE to force rerecognition of insn.  Save old code in
246*38fd1498Szrj 	 case invalid.  */
247*38fd1498Szrj       changes[num_changes].old_code = INSN_CODE (object);
248*38fd1498Szrj       INSN_CODE (object) = -1;
249*38fd1498Szrj     }
250*38fd1498Szrj 
251*38fd1498Szrj   num_changes++;
252*38fd1498Szrj 
253*38fd1498Szrj   /* If we are making a group of changes, return 1.  Otherwise, validate the
254*38fd1498Szrj      change group we made.  */
255*38fd1498Szrj 
256*38fd1498Szrj   if (in_group)
257*38fd1498Szrj     return 1;
258*38fd1498Szrj   else
259*38fd1498Szrj     return apply_change_group ();
260*38fd1498Szrj }
261*38fd1498Szrj 
262*38fd1498Szrj /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
263*38fd1498Szrj    UNSHARE to false.  */
264*38fd1498Szrj 
265*38fd1498Szrj bool
validate_change(rtx object,rtx * loc,rtx new_rtx,bool in_group)266*38fd1498Szrj validate_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
267*38fd1498Szrj {
268*38fd1498Szrj   return validate_change_1 (object, loc, new_rtx, in_group, false);
269*38fd1498Szrj }
270*38fd1498Szrj 
271*38fd1498Szrj /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
272*38fd1498Szrj    UNSHARE to true.  */
273*38fd1498Szrj 
274*38fd1498Szrj bool
validate_unshare_change(rtx object,rtx * loc,rtx new_rtx,bool in_group)275*38fd1498Szrj validate_unshare_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
276*38fd1498Szrj {
277*38fd1498Szrj   return validate_change_1 (object, loc, new_rtx, in_group, true);
278*38fd1498Szrj }
279*38fd1498Szrj 
280*38fd1498Szrj 
281*38fd1498Szrj /* Keep X canonicalized if some changes have made it non-canonical; only
282*38fd1498Szrj    modifies the operands of X, not (for example) its code.  Simplifications
283*38fd1498Szrj    are not the job of this routine.
284*38fd1498Szrj 
285*38fd1498Szrj    Return true if anything was changed.  */
286*38fd1498Szrj bool
canonicalize_change_group(rtx_insn * insn,rtx x)287*38fd1498Szrj canonicalize_change_group (rtx_insn *insn, rtx x)
288*38fd1498Szrj {
289*38fd1498Szrj   if (COMMUTATIVE_P (x)
290*38fd1498Szrj       && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
291*38fd1498Szrj     {
292*38fd1498Szrj       /* Oops, the caller has made X no longer canonical.
293*38fd1498Szrj 	 Let's redo the changes in the correct order.  */
294*38fd1498Szrj       rtx tem = XEXP (x, 0);
295*38fd1498Szrj       validate_unshare_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
296*38fd1498Szrj       validate_unshare_change (insn, &XEXP (x, 1), tem, 1);
297*38fd1498Szrj       return true;
298*38fd1498Szrj     }
299*38fd1498Szrj   else
300*38fd1498Szrj     return false;
301*38fd1498Szrj }
302*38fd1498Szrj 
303*38fd1498Szrj 
304*38fd1498Szrj /* This subroutine of apply_change_group verifies whether the changes to INSN
305*38fd1498Szrj    were valid; i.e. whether INSN can still be recognized.
306*38fd1498Szrj 
307*38fd1498Szrj    If IN_GROUP is true clobbers which have to be added in order to
308*38fd1498Szrj    match the instructions will be added to the current change group.
309*38fd1498Szrj    Otherwise the changes will take effect immediately.  */
310*38fd1498Szrj 
311*38fd1498Szrj int
insn_invalid_p(rtx_insn * insn,bool in_group)312*38fd1498Szrj insn_invalid_p (rtx_insn *insn, bool in_group)
313*38fd1498Szrj {
314*38fd1498Szrj   rtx pat = PATTERN (insn);
315*38fd1498Szrj   int num_clobbers = 0;
316*38fd1498Szrj   /* If we are before reload and the pattern is a SET, see if we can add
317*38fd1498Szrj      clobbers.  */
318*38fd1498Szrj   int icode = recog (pat, insn,
319*38fd1498Szrj 		     (GET_CODE (pat) == SET
320*38fd1498Szrj 		      && ! reload_completed
321*38fd1498Szrj                       && ! reload_in_progress)
322*38fd1498Szrj 		     ? &num_clobbers : 0);
323*38fd1498Szrj   int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
324*38fd1498Szrj 
325*38fd1498Szrj 
326*38fd1498Szrj   /* If this is an asm and the operand aren't legal, then fail.  Likewise if
327*38fd1498Szrj      this is not an asm and the insn wasn't recognized.  */
328*38fd1498Szrj   if ((is_asm && ! check_asm_operands (PATTERN (insn)))
329*38fd1498Szrj       || (!is_asm && icode < 0))
330*38fd1498Szrj     return 1;
331*38fd1498Szrj 
332*38fd1498Szrj   /* If we have to add CLOBBERs, fail if we have to add ones that reference
333*38fd1498Szrj      hard registers since our callers can't know if they are live or not.
334*38fd1498Szrj      Otherwise, add them.  */
335*38fd1498Szrj   if (num_clobbers > 0)
336*38fd1498Szrj     {
337*38fd1498Szrj       rtx newpat;
338*38fd1498Szrj 
339*38fd1498Szrj       if (added_clobbers_hard_reg_p (icode))
340*38fd1498Szrj 	return 1;
341*38fd1498Szrj 
342*38fd1498Szrj       newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
343*38fd1498Szrj       XVECEXP (newpat, 0, 0) = pat;
344*38fd1498Szrj       add_clobbers (newpat, icode);
345*38fd1498Szrj       if (in_group)
346*38fd1498Szrj 	validate_change (insn, &PATTERN (insn), newpat, 1);
347*38fd1498Szrj       else
348*38fd1498Szrj 	PATTERN (insn) = pat = newpat;
349*38fd1498Szrj     }
350*38fd1498Szrj 
351*38fd1498Szrj   /* After reload, verify that all constraints are satisfied.  */
352*38fd1498Szrj   if (reload_completed)
353*38fd1498Szrj     {
354*38fd1498Szrj       extract_insn (insn);
355*38fd1498Szrj 
356*38fd1498Szrj       if (! constrain_operands (1, get_preferred_alternatives (insn)))
357*38fd1498Szrj 	return 1;
358*38fd1498Szrj     }
359*38fd1498Szrj 
360*38fd1498Szrj   INSN_CODE (insn) = icode;
361*38fd1498Szrj   return 0;
362*38fd1498Szrj }
363*38fd1498Szrj 
364*38fd1498Szrj /* Return number of changes made and not validated yet.  */
365*38fd1498Szrj int
num_changes_pending(void)366*38fd1498Szrj num_changes_pending (void)
367*38fd1498Szrj {
368*38fd1498Szrj   return num_changes;
369*38fd1498Szrj }
370*38fd1498Szrj 
371*38fd1498Szrj /* Tentatively apply the changes numbered NUM and up.
372*38fd1498Szrj    Return 1 if all changes are valid, zero otherwise.  */
373*38fd1498Szrj 
374*38fd1498Szrj int
verify_changes(int num)375*38fd1498Szrj verify_changes (int num)
376*38fd1498Szrj {
377*38fd1498Szrj   int i;
378*38fd1498Szrj   rtx last_validated = NULL_RTX;
379*38fd1498Szrj 
380*38fd1498Szrj   /* The changes have been applied and all INSN_CODEs have been reset to force
381*38fd1498Szrj      rerecognition.
382*38fd1498Szrj 
383*38fd1498Szrj      The changes are valid if we aren't given an object, or if we are
384*38fd1498Szrj      given a MEM and it still is a valid address, or if this is in insn
385*38fd1498Szrj      and it is recognized.  In the latter case, if reload has completed,
386*38fd1498Szrj      we also require that the operands meet the constraints for
387*38fd1498Szrj      the insn.  */
388*38fd1498Szrj 
389*38fd1498Szrj   for (i = num; i < num_changes; i++)
390*38fd1498Szrj     {
391*38fd1498Szrj       rtx object = changes[i].object;
392*38fd1498Szrj 
393*38fd1498Szrj       /* If there is no object to test or if it is the same as the one we
394*38fd1498Szrj          already tested, ignore it.  */
395*38fd1498Szrj       if (object == 0 || object == last_validated)
396*38fd1498Szrj 	continue;
397*38fd1498Szrj 
398*38fd1498Szrj       if (MEM_P (object))
399*38fd1498Szrj 	{
400*38fd1498Szrj 	  if (! memory_address_addr_space_p (GET_MODE (object),
401*38fd1498Szrj 					     XEXP (object, 0),
402*38fd1498Szrj 					     MEM_ADDR_SPACE (object)))
403*38fd1498Szrj 	    break;
404*38fd1498Szrj 	}
405*38fd1498Szrj       else if (/* changes[i].old might be zero, e.g. when putting a
406*38fd1498Szrj 	       REG_FRAME_RELATED_EXPR into a previously empty list.  */
407*38fd1498Szrj 	       changes[i].old
408*38fd1498Szrj 	       && REG_P (changes[i].old)
409*38fd1498Szrj 	       && asm_noperands (PATTERN (object)) > 0
410*38fd1498Szrj 	       && REG_EXPR (changes[i].old) != NULL_TREE
411*38fd1498Szrj 	       && HAS_DECL_ASSEMBLER_NAME_P (REG_EXPR (changes[i].old))
412*38fd1498Szrj 	       && DECL_ASSEMBLER_NAME_SET_P (REG_EXPR (changes[i].old))
413*38fd1498Szrj 	       && DECL_REGISTER (REG_EXPR (changes[i].old)))
414*38fd1498Szrj 	{
415*38fd1498Szrj 	  /* Don't allow changes of hard register operands to inline
416*38fd1498Szrj 	     assemblies if they have been defined as register asm ("x").  */
417*38fd1498Szrj 	  break;
418*38fd1498Szrj 	}
419*38fd1498Szrj       else if (DEBUG_INSN_P (object))
420*38fd1498Szrj 	continue;
421*38fd1498Szrj       else if (insn_invalid_p (as_a <rtx_insn *> (object), true))
422*38fd1498Szrj 	{
423*38fd1498Szrj 	  rtx pat = PATTERN (object);
424*38fd1498Szrj 
425*38fd1498Szrj 	  /* Perhaps we couldn't recognize the insn because there were
426*38fd1498Szrj 	     extra CLOBBERs at the end.  If so, try to re-recognize
427*38fd1498Szrj 	     without the last CLOBBER (later iterations will cause each of
428*38fd1498Szrj 	     them to be eliminated, in turn).  But don't do this if we
429*38fd1498Szrj 	     have an ASM_OPERAND.  */
430*38fd1498Szrj 	  if (GET_CODE (pat) == PARALLEL
431*38fd1498Szrj 	      && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
432*38fd1498Szrj 	      && asm_noperands (PATTERN (object)) < 0)
433*38fd1498Szrj 	    {
434*38fd1498Szrj 	      rtx newpat;
435*38fd1498Szrj 
436*38fd1498Szrj 	      if (XVECLEN (pat, 0) == 2)
437*38fd1498Szrj 		newpat = XVECEXP (pat, 0, 0);
438*38fd1498Szrj 	      else
439*38fd1498Szrj 		{
440*38fd1498Szrj 		  int j;
441*38fd1498Szrj 
442*38fd1498Szrj 		  newpat
443*38fd1498Szrj 		    = gen_rtx_PARALLEL (VOIDmode,
444*38fd1498Szrj 					rtvec_alloc (XVECLEN (pat, 0) - 1));
445*38fd1498Szrj 		  for (j = 0; j < XVECLEN (newpat, 0); j++)
446*38fd1498Szrj 		    XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
447*38fd1498Szrj 		}
448*38fd1498Szrj 
449*38fd1498Szrj 	      /* Add a new change to this group to replace the pattern
450*38fd1498Szrj 		 with this new pattern.  Then consider this change
451*38fd1498Szrj 		 as having succeeded.  The change we added will
452*38fd1498Szrj 		 cause the entire call to fail if things remain invalid.
453*38fd1498Szrj 
454*38fd1498Szrj 		 Note that this can lose if a later change than the one
455*38fd1498Szrj 		 we are processing specified &XVECEXP (PATTERN (object), 0, X)
456*38fd1498Szrj 		 but this shouldn't occur.  */
457*38fd1498Szrj 
458*38fd1498Szrj 	      validate_change (object, &PATTERN (object), newpat, 1);
459*38fd1498Szrj 	      continue;
460*38fd1498Szrj 	    }
461*38fd1498Szrj 	  else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
462*38fd1498Szrj 		   || GET_CODE (pat) == VAR_LOCATION)
463*38fd1498Szrj 	    /* If this insn is a CLOBBER or USE, it is always valid, but is
464*38fd1498Szrj 	       never recognized.  */
465*38fd1498Szrj 	    continue;
466*38fd1498Szrj 	  else
467*38fd1498Szrj 	    break;
468*38fd1498Szrj 	}
469*38fd1498Szrj       last_validated = object;
470*38fd1498Szrj     }
471*38fd1498Szrj 
472*38fd1498Szrj   return (i == num_changes);
473*38fd1498Szrj }
474*38fd1498Szrj 
475*38fd1498Szrj /* A group of changes has previously been issued with validate_change
476*38fd1498Szrj    and verified with verify_changes.  Call df_insn_rescan for each of
477*38fd1498Szrj    the insn changed and clear num_changes.  */
478*38fd1498Szrj 
479*38fd1498Szrj void
confirm_change_group(void)480*38fd1498Szrj confirm_change_group (void)
481*38fd1498Szrj {
482*38fd1498Szrj   int i;
483*38fd1498Szrj   rtx last_object = NULL;
484*38fd1498Szrj 
485*38fd1498Szrj   for (i = 0; i < num_changes; i++)
486*38fd1498Szrj     {
487*38fd1498Szrj       rtx object = changes[i].object;
488*38fd1498Szrj 
489*38fd1498Szrj       if (changes[i].unshare)
490*38fd1498Szrj 	*changes[i].loc = copy_rtx (*changes[i].loc);
491*38fd1498Szrj 
492*38fd1498Szrj       /* Avoid unnecessary rescanning when multiple changes to same instruction
493*38fd1498Szrj          are made.  */
494*38fd1498Szrj       if (object)
495*38fd1498Szrj 	{
496*38fd1498Szrj 	  if (object != last_object && last_object && INSN_P (last_object))
497*38fd1498Szrj 	    df_insn_rescan (as_a <rtx_insn *> (last_object));
498*38fd1498Szrj 	  last_object = object;
499*38fd1498Szrj 	}
500*38fd1498Szrj     }
501*38fd1498Szrj 
502*38fd1498Szrj   if (last_object && INSN_P (last_object))
503*38fd1498Szrj     df_insn_rescan (as_a <rtx_insn *> (last_object));
504*38fd1498Szrj   num_changes = 0;
505*38fd1498Szrj }
506*38fd1498Szrj 
507*38fd1498Szrj /* Apply a group of changes previously issued with `validate_change'.
508*38fd1498Szrj    If all changes are valid, call confirm_change_group and return 1,
509*38fd1498Szrj    otherwise, call cancel_changes and return 0.  */
510*38fd1498Szrj 
511*38fd1498Szrj int
apply_change_group(void)512*38fd1498Szrj apply_change_group (void)
513*38fd1498Szrj {
514*38fd1498Szrj   if (verify_changes (0))
515*38fd1498Szrj     {
516*38fd1498Szrj       confirm_change_group ();
517*38fd1498Szrj       return 1;
518*38fd1498Szrj     }
519*38fd1498Szrj   else
520*38fd1498Szrj     {
521*38fd1498Szrj       cancel_changes (0);
522*38fd1498Szrj       return 0;
523*38fd1498Szrj     }
524*38fd1498Szrj }
525*38fd1498Szrj 
526*38fd1498Szrj 
527*38fd1498Szrj /* Return the number of changes so far in the current group.  */
528*38fd1498Szrj 
529*38fd1498Szrj int
num_validated_changes(void)530*38fd1498Szrj num_validated_changes (void)
531*38fd1498Szrj {
532*38fd1498Szrj   return num_changes;
533*38fd1498Szrj }
534*38fd1498Szrj 
535*38fd1498Szrj /* Retract the changes numbered NUM and up.  */
536*38fd1498Szrj 
537*38fd1498Szrj void
cancel_changes(int num)538*38fd1498Szrj cancel_changes (int num)
539*38fd1498Szrj {
540*38fd1498Szrj   int i;
541*38fd1498Szrj 
542*38fd1498Szrj   /* Back out all the changes.  Do this in the opposite order in which
543*38fd1498Szrj      they were made.  */
544*38fd1498Szrj   for (i = num_changes - 1; i >= num; i--)
545*38fd1498Szrj     {
546*38fd1498Szrj       *changes[i].loc = changes[i].old;
547*38fd1498Szrj       if (changes[i].object && !MEM_P (changes[i].object))
548*38fd1498Szrj 	INSN_CODE (changes[i].object) = changes[i].old_code;
549*38fd1498Szrj     }
550*38fd1498Szrj   num_changes = num;
551*38fd1498Szrj }
552*38fd1498Szrj 
553*38fd1498Szrj /* Reduce conditional compilation elsewhere.  */
554*38fd1498Szrj /* A subroutine of validate_replace_rtx_1 that tries to simplify the resulting
555*38fd1498Szrj    rtx.  */
556*38fd1498Szrj 
557*38fd1498Szrj static void
simplify_while_replacing(rtx * loc,rtx to,rtx_insn * object,machine_mode op0_mode)558*38fd1498Szrj simplify_while_replacing (rtx *loc, rtx to, rtx_insn *object,
559*38fd1498Szrj                           machine_mode op0_mode)
560*38fd1498Szrj {
561*38fd1498Szrj   rtx x = *loc;
562*38fd1498Szrj   enum rtx_code code = GET_CODE (x);
563*38fd1498Szrj   rtx new_rtx = NULL_RTX;
564*38fd1498Szrj   scalar_int_mode is_mode;
565*38fd1498Szrj 
566*38fd1498Szrj   if (SWAPPABLE_OPERANDS_P (x)
567*38fd1498Szrj       && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
568*38fd1498Szrj     {
569*38fd1498Szrj       validate_unshare_change (object, loc,
570*38fd1498Szrj 			       gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x) ? code
571*38fd1498Szrj 					       : swap_condition (code),
572*38fd1498Szrj 					       GET_MODE (x), XEXP (x, 1),
573*38fd1498Szrj 					       XEXP (x, 0)), 1);
574*38fd1498Szrj       x = *loc;
575*38fd1498Szrj       code = GET_CODE (x);
576*38fd1498Szrj     }
577*38fd1498Szrj 
578*38fd1498Szrj   /* Canonicalize arithmetics with all constant operands.  */
579*38fd1498Szrj   switch (GET_RTX_CLASS (code))
580*38fd1498Szrj     {
581*38fd1498Szrj     case RTX_UNARY:
582*38fd1498Szrj       if (CONSTANT_P (XEXP (x, 0)))
583*38fd1498Szrj 	new_rtx = simplify_unary_operation (code, GET_MODE (x), XEXP (x, 0),
584*38fd1498Szrj 					    op0_mode);
585*38fd1498Szrj       break;
586*38fd1498Szrj     case RTX_COMM_ARITH:
587*38fd1498Szrj     case RTX_BIN_ARITH:
588*38fd1498Szrj       if (CONSTANT_P (XEXP (x, 0)) && CONSTANT_P (XEXP (x, 1)))
589*38fd1498Szrj 	new_rtx = simplify_binary_operation (code, GET_MODE (x), XEXP (x, 0),
590*38fd1498Szrj 					     XEXP (x, 1));
591*38fd1498Szrj       break;
592*38fd1498Szrj     case RTX_COMPARE:
593*38fd1498Szrj     case RTX_COMM_COMPARE:
594*38fd1498Szrj       if (CONSTANT_P (XEXP (x, 0)) && CONSTANT_P (XEXP (x, 1)))
595*38fd1498Szrj 	new_rtx = simplify_relational_operation (code, GET_MODE (x), op0_mode,
596*38fd1498Szrj 						 XEXP (x, 0), XEXP (x, 1));
597*38fd1498Szrj       break;
598*38fd1498Szrj     default:
599*38fd1498Szrj       break;
600*38fd1498Szrj     }
601*38fd1498Szrj   if (new_rtx)
602*38fd1498Szrj     {
603*38fd1498Szrj       validate_change (object, loc, new_rtx, 1);
604*38fd1498Szrj       return;
605*38fd1498Szrj     }
606*38fd1498Szrj 
607*38fd1498Szrj   switch (code)
608*38fd1498Szrj     {
609*38fd1498Szrj     case PLUS:
610*38fd1498Szrj       /* If we have a PLUS whose second operand is now a CONST_INT, use
611*38fd1498Szrj          simplify_gen_binary to try to simplify it.
612*38fd1498Szrj          ??? We may want later to remove this, once simplification is
613*38fd1498Szrj          separated from this function.  */
614*38fd1498Szrj       if (CONST_INT_P (XEXP (x, 1)) && XEXP (x, 1) == to)
615*38fd1498Szrj 	validate_change (object, loc,
616*38fd1498Szrj 			 simplify_gen_binary
617*38fd1498Szrj 			 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
618*38fd1498Szrj       break;
619*38fd1498Szrj     case MINUS:
620*38fd1498Szrj       if (CONST_SCALAR_INT_P (XEXP (x, 1)))
621*38fd1498Szrj 	validate_change (object, loc,
622*38fd1498Szrj 			 simplify_gen_binary
623*38fd1498Szrj 			 (PLUS, GET_MODE (x), XEXP (x, 0),
624*38fd1498Szrj 			  simplify_gen_unary (NEG,
625*38fd1498Szrj 					      GET_MODE (x), XEXP (x, 1),
626*38fd1498Szrj 					      GET_MODE (x))), 1);
627*38fd1498Szrj       break;
628*38fd1498Szrj     case ZERO_EXTEND:
629*38fd1498Szrj     case SIGN_EXTEND:
630*38fd1498Szrj       if (GET_MODE (XEXP (x, 0)) == VOIDmode)
631*38fd1498Szrj 	{
632*38fd1498Szrj 	  new_rtx = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
633*38fd1498Szrj 				    op0_mode);
634*38fd1498Szrj 	  /* If any of the above failed, substitute in something that
635*38fd1498Szrj 	     we know won't be recognized.  */
636*38fd1498Szrj 	  if (!new_rtx)
637*38fd1498Szrj 	    new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
638*38fd1498Szrj 	  validate_change (object, loc, new_rtx, 1);
639*38fd1498Szrj 	}
640*38fd1498Szrj       break;
641*38fd1498Szrj     case SUBREG:
642*38fd1498Szrj       /* All subregs possible to simplify should be simplified.  */
643*38fd1498Szrj       new_rtx = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
644*38fd1498Szrj 			     SUBREG_BYTE (x));
645*38fd1498Szrj 
646*38fd1498Szrj       /* Subregs of VOIDmode operands are incorrect.  */
647*38fd1498Szrj       if (!new_rtx && GET_MODE (SUBREG_REG (x)) == VOIDmode)
648*38fd1498Szrj 	new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
649*38fd1498Szrj       if (new_rtx)
650*38fd1498Szrj 	validate_change (object, loc, new_rtx, 1);
651*38fd1498Szrj       break;
652*38fd1498Szrj     case ZERO_EXTRACT:
653*38fd1498Szrj     case SIGN_EXTRACT:
654*38fd1498Szrj       /* If we are replacing a register with memory, try to change the memory
655*38fd1498Szrj          to be the mode required for memory in extract operations (this isn't
656*38fd1498Szrj          likely to be an insertion operation; if it was, nothing bad will
657*38fd1498Szrj          happen, we might just fail in some cases).  */
658*38fd1498Szrj 
659*38fd1498Szrj       if (MEM_P (XEXP (x, 0))
660*38fd1498Szrj 	  && is_a <scalar_int_mode> (GET_MODE (XEXP (x, 0)), &is_mode)
661*38fd1498Szrj 	  && CONST_INT_P (XEXP (x, 1))
662*38fd1498Szrj 	  && CONST_INT_P (XEXP (x, 2))
663*38fd1498Szrj 	  && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0),
664*38fd1498Szrj 					MEM_ADDR_SPACE (XEXP (x, 0)))
665*38fd1498Szrj 	  && !MEM_VOLATILE_P (XEXP (x, 0)))
666*38fd1498Szrj 	{
667*38fd1498Szrj 	  int pos = INTVAL (XEXP (x, 2));
668*38fd1498Szrj 	  machine_mode new_mode = is_mode;
669*38fd1498Szrj 	  if (GET_CODE (x) == ZERO_EXTRACT && targetm.have_extzv ())
670*38fd1498Szrj 	    new_mode = insn_data[targetm.code_for_extzv].operand[1].mode;
671*38fd1498Szrj 	  else if (GET_CODE (x) == SIGN_EXTRACT && targetm.have_extv ())
672*38fd1498Szrj 	    new_mode = insn_data[targetm.code_for_extv].operand[1].mode;
673*38fd1498Szrj 	  scalar_int_mode wanted_mode = (new_mode == VOIDmode
674*38fd1498Szrj 					 ? word_mode
675*38fd1498Szrj 					 : as_a <scalar_int_mode> (new_mode));
676*38fd1498Szrj 
677*38fd1498Szrj 	  /* If we have a narrower mode, we can do something.  */
678*38fd1498Szrj 	  if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
679*38fd1498Szrj 	    {
680*38fd1498Szrj 	      int offset = pos / BITS_PER_UNIT;
681*38fd1498Szrj 	      rtx newmem;
682*38fd1498Szrj 
683*38fd1498Szrj 	      /* If the bytes and bits are counted differently, we
684*38fd1498Szrj 	         must adjust the offset.  */
685*38fd1498Szrj 	      if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
686*38fd1498Szrj 		offset =
687*38fd1498Szrj 		  (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
688*38fd1498Szrj 		   offset);
689*38fd1498Szrj 
690*38fd1498Szrj 	      gcc_assert (GET_MODE_PRECISION (wanted_mode)
691*38fd1498Szrj 			  == GET_MODE_BITSIZE (wanted_mode));
692*38fd1498Szrj 	      pos %= GET_MODE_BITSIZE (wanted_mode);
693*38fd1498Szrj 
694*38fd1498Szrj 	      newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
695*38fd1498Szrj 
696*38fd1498Szrj 	      validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
697*38fd1498Szrj 	      validate_change (object, &XEXP (x, 0), newmem, 1);
698*38fd1498Szrj 	    }
699*38fd1498Szrj 	}
700*38fd1498Szrj 
701*38fd1498Szrj       break;
702*38fd1498Szrj 
703*38fd1498Szrj     default:
704*38fd1498Szrj       break;
705*38fd1498Szrj     }
706*38fd1498Szrj }
707*38fd1498Szrj 
708*38fd1498Szrj /* Replace every occurrence of FROM in X with TO.  Mark each change with
709*38fd1498Szrj    validate_change passing OBJECT.  */
710*38fd1498Szrj 
711*38fd1498Szrj static void
validate_replace_rtx_1(rtx * loc,rtx from,rtx to,rtx_insn * object,bool simplify)712*38fd1498Szrj validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx_insn *object,
713*38fd1498Szrj                         bool simplify)
714*38fd1498Szrj {
715*38fd1498Szrj   int i, j;
716*38fd1498Szrj   const char *fmt;
717*38fd1498Szrj   rtx x = *loc;
718*38fd1498Szrj   enum rtx_code code;
719*38fd1498Szrj   machine_mode op0_mode = VOIDmode;
720*38fd1498Szrj   int prev_changes = num_changes;
721*38fd1498Szrj 
722*38fd1498Szrj   if (!x)
723*38fd1498Szrj     return;
724*38fd1498Szrj 
725*38fd1498Szrj   code = GET_CODE (x);
726*38fd1498Szrj   fmt = GET_RTX_FORMAT (code);
727*38fd1498Szrj   if (fmt[0] == 'e')
728*38fd1498Szrj     op0_mode = GET_MODE (XEXP (x, 0));
729*38fd1498Szrj 
730*38fd1498Szrj   /* X matches FROM if it is the same rtx or they are both referring to the
731*38fd1498Szrj      same register in the same mode.  Avoid calling rtx_equal_p unless the
732*38fd1498Szrj      operands look similar.  */
733*38fd1498Szrj 
734*38fd1498Szrj   if (x == from
735*38fd1498Szrj       || (REG_P (x) && REG_P (from)
736*38fd1498Szrj 	  && GET_MODE (x) == GET_MODE (from)
737*38fd1498Szrj 	  && REGNO (x) == REGNO (from))
738*38fd1498Szrj       || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
739*38fd1498Szrj 	  && rtx_equal_p (x, from)))
740*38fd1498Szrj     {
741*38fd1498Szrj       validate_unshare_change (object, loc, to, 1);
742*38fd1498Szrj       return;
743*38fd1498Szrj     }
744*38fd1498Szrj 
745*38fd1498Szrj   /* Call ourself recursively to perform the replacements.
746*38fd1498Szrj      We must not replace inside already replaced expression, otherwise we
747*38fd1498Szrj      get infinite recursion for replacements like (reg X)->(subreg (reg X))
748*38fd1498Szrj      so we must special case shared ASM_OPERANDS.  */
749*38fd1498Szrj 
750*38fd1498Szrj   if (GET_CODE (x) == PARALLEL)
751*38fd1498Szrj     {
752*38fd1498Szrj       for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
753*38fd1498Szrj 	{
754*38fd1498Szrj 	  if (j && GET_CODE (XVECEXP (x, 0, j)) == SET
755*38fd1498Szrj 	      && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == ASM_OPERANDS)
756*38fd1498Szrj 	    {
757*38fd1498Szrj 	      /* Verify that operands are really shared.  */
758*38fd1498Szrj 	      gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, 0)))
759*38fd1498Szrj 			  == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
760*38fd1498Szrj 							      (x, 0, j))));
761*38fd1498Szrj 	      validate_replace_rtx_1 (&SET_DEST (XVECEXP (x, 0, j)),
762*38fd1498Szrj 				      from, to, object, simplify);
763*38fd1498Szrj 	    }
764*38fd1498Szrj 	  else
765*38fd1498Szrj 	    validate_replace_rtx_1 (&XVECEXP (x, 0, j), from, to, object,
766*38fd1498Szrj                                     simplify);
767*38fd1498Szrj 	}
768*38fd1498Szrj     }
769*38fd1498Szrj   else
770*38fd1498Szrj     for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
771*38fd1498Szrj       {
772*38fd1498Szrj 	if (fmt[i] == 'e')
773*38fd1498Szrj 	  validate_replace_rtx_1 (&XEXP (x, i), from, to, object, simplify);
774*38fd1498Szrj 	else if (fmt[i] == 'E')
775*38fd1498Szrj 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
776*38fd1498Szrj 	    validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object,
777*38fd1498Szrj                                     simplify);
778*38fd1498Szrj       }
779*38fd1498Szrj 
780*38fd1498Szrj   /* If we didn't substitute, there is nothing more to do.  */
781*38fd1498Szrj   if (num_changes == prev_changes)
782*38fd1498Szrj     return;
783*38fd1498Szrj 
784*38fd1498Szrj   /* ??? The regmove is no more, so is this aberration still necessary?  */
785*38fd1498Szrj   /* Allow substituted expression to have different mode.  This is used by
786*38fd1498Szrj      regmove to change mode of pseudo register.  */
787*38fd1498Szrj   if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
788*38fd1498Szrj     op0_mode = GET_MODE (XEXP (x, 0));
789*38fd1498Szrj 
790*38fd1498Szrj   /* Do changes needed to keep rtx consistent.  Don't do any other
791*38fd1498Szrj      simplifications, as it is not our job.  */
792*38fd1498Szrj   if (simplify)
793*38fd1498Szrj     simplify_while_replacing (loc, to, object, op0_mode);
794*38fd1498Szrj }
795*38fd1498Szrj 
796*38fd1498Szrj /* Try replacing every occurrence of FROM in subexpression LOC of INSN
797*38fd1498Szrj    with TO.  After all changes have been made, validate by seeing
798*38fd1498Szrj    if INSN is still valid.  */
799*38fd1498Szrj 
800*38fd1498Szrj int
validate_replace_rtx_subexp(rtx from,rtx to,rtx_insn * insn,rtx * loc)801*38fd1498Szrj validate_replace_rtx_subexp (rtx from, rtx to, rtx_insn *insn, rtx *loc)
802*38fd1498Szrj {
803*38fd1498Szrj   validate_replace_rtx_1 (loc, from, to, insn, true);
804*38fd1498Szrj   return apply_change_group ();
805*38fd1498Szrj }
806*38fd1498Szrj 
807*38fd1498Szrj /* Try replacing every occurrence of FROM in INSN with TO.  After all
808*38fd1498Szrj    changes have been made, validate by seeing if INSN is still valid.  */
809*38fd1498Szrj 
810*38fd1498Szrj int
validate_replace_rtx(rtx from,rtx to,rtx_insn * insn)811*38fd1498Szrj validate_replace_rtx (rtx from, rtx to, rtx_insn *insn)
812*38fd1498Szrj {
813*38fd1498Szrj   validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
814*38fd1498Szrj   return apply_change_group ();
815*38fd1498Szrj }
816*38fd1498Szrj 
817*38fd1498Szrj /* Try replacing every occurrence of FROM in WHERE with TO.  Assume that WHERE
818*38fd1498Szrj    is a part of INSN.  After all changes have been made, validate by seeing if
819*38fd1498Szrj    INSN is still valid.
820*38fd1498Szrj    validate_replace_rtx (from, to, insn) is equivalent to
821*38fd1498Szrj    validate_replace_rtx_part (from, to, &PATTERN (insn), insn).  */
822*38fd1498Szrj 
823*38fd1498Szrj int
validate_replace_rtx_part(rtx from,rtx to,rtx * where,rtx_insn * insn)824*38fd1498Szrj validate_replace_rtx_part (rtx from, rtx to, rtx *where, rtx_insn *insn)
825*38fd1498Szrj {
826*38fd1498Szrj   validate_replace_rtx_1 (where, from, to, insn, true);
827*38fd1498Szrj   return apply_change_group ();
828*38fd1498Szrj }
829*38fd1498Szrj 
830*38fd1498Szrj /* Same as above, but do not simplify rtx afterwards.  */
831*38fd1498Szrj int
validate_replace_rtx_part_nosimplify(rtx from,rtx to,rtx * where,rtx_insn * insn)832*38fd1498Szrj validate_replace_rtx_part_nosimplify (rtx from, rtx to, rtx *where,
833*38fd1498Szrj 				      rtx_insn *insn)
834*38fd1498Szrj {
835*38fd1498Szrj   validate_replace_rtx_1 (where, from, to, insn, false);
836*38fd1498Szrj   return apply_change_group ();
837*38fd1498Szrj 
838*38fd1498Szrj }
839*38fd1498Szrj 
840*38fd1498Szrj /* Try replacing every occurrence of FROM in INSN with TO.  This also
841*38fd1498Szrj    will replace in REG_EQUAL and REG_EQUIV notes.  */
842*38fd1498Szrj 
843*38fd1498Szrj void
validate_replace_rtx_group(rtx from,rtx to,rtx_insn * insn)844*38fd1498Szrj validate_replace_rtx_group (rtx from, rtx to, rtx_insn *insn)
845*38fd1498Szrj {
846*38fd1498Szrj   rtx note;
847*38fd1498Szrj   validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
848*38fd1498Szrj   for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
849*38fd1498Szrj     if (REG_NOTE_KIND (note) == REG_EQUAL
850*38fd1498Szrj 	|| REG_NOTE_KIND (note) == REG_EQUIV)
851*38fd1498Szrj       validate_replace_rtx_1 (&XEXP (note, 0), from, to, insn, true);
852*38fd1498Szrj }
853*38fd1498Szrj 
854*38fd1498Szrj /* Function called by note_uses to replace used subexpressions.  */
855*38fd1498Szrj struct validate_replace_src_data
856*38fd1498Szrj {
857*38fd1498Szrj   rtx from;			/* Old RTX */
858*38fd1498Szrj   rtx to;			/* New RTX */
859*38fd1498Szrj   rtx_insn *insn;			/* Insn in which substitution is occurring.  */
860*38fd1498Szrj };
861*38fd1498Szrj 
862*38fd1498Szrj static void
validate_replace_src_1(rtx * x,void * data)863*38fd1498Szrj validate_replace_src_1 (rtx *x, void *data)
864*38fd1498Szrj {
865*38fd1498Szrj   struct validate_replace_src_data *d
866*38fd1498Szrj     = (struct validate_replace_src_data *) data;
867*38fd1498Szrj 
868*38fd1498Szrj   validate_replace_rtx_1 (x, d->from, d->to, d->insn, true);
869*38fd1498Szrj }
870*38fd1498Szrj 
871*38fd1498Szrj /* Try replacing every occurrence of FROM in INSN with TO, avoiding
872*38fd1498Szrj    SET_DESTs.  */
873*38fd1498Szrj 
874*38fd1498Szrj void
validate_replace_src_group(rtx from,rtx to,rtx_insn * insn)875*38fd1498Szrj validate_replace_src_group (rtx from, rtx to, rtx_insn *insn)
876*38fd1498Szrj {
877*38fd1498Szrj   struct validate_replace_src_data d;
878*38fd1498Szrj 
879*38fd1498Szrj   d.from = from;
880*38fd1498Szrj   d.to = to;
881*38fd1498Szrj   d.insn = insn;
882*38fd1498Szrj   note_uses (&PATTERN (insn), validate_replace_src_1, &d);
883*38fd1498Szrj }
884*38fd1498Szrj 
885*38fd1498Szrj /* Try simplify INSN.
886*38fd1498Szrj    Invoke simplify_rtx () on every SET_SRC and SET_DEST inside the INSN's
887*38fd1498Szrj    pattern and return true if something was simplified.  */
888*38fd1498Szrj 
889*38fd1498Szrj bool
validate_simplify_insn(rtx_insn * insn)890*38fd1498Szrj validate_simplify_insn (rtx_insn *insn)
891*38fd1498Szrj {
892*38fd1498Szrj   int i;
893*38fd1498Szrj   rtx pat = NULL;
894*38fd1498Szrj   rtx newpat = NULL;
895*38fd1498Szrj 
896*38fd1498Szrj   pat = PATTERN (insn);
897*38fd1498Szrj 
898*38fd1498Szrj   if (GET_CODE (pat) == SET)
899*38fd1498Szrj     {
900*38fd1498Szrj       newpat = simplify_rtx (SET_SRC (pat));
901*38fd1498Szrj       if (newpat && !rtx_equal_p (SET_SRC (pat), newpat))
902*38fd1498Szrj 	validate_change (insn, &SET_SRC (pat), newpat, 1);
903*38fd1498Szrj       newpat = simplify_rtx (SET_DEST (pat));
904*38fd1498Szrj       if (newpat && !rtx_equal_p (SET_DEST (pat), newpat))
905*38fd1498Szrj 	validate_change (insn, &SET_DEST (pat), newpat, 1);
906*38fd1498Szrj     }
907*38fd1498Szrj   else if (GET_CODE (pat) == PARALLEL)
908*38fd1498Szrj     for (i = 0; i < XVECLEN (pat, 0); i++)
909*38fd1498Szrj       {
910*38fd1498Szrj 	rtx s = XVECEXP (pat, 0, i);
911*38fd1498Szrj 
912*38fd1498Szrj 	if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
913*38fd1498Szrj 	  {
914*38fd1498Szrj 	    newpat = simplify_rtx (SET_SRC (s));
915*38fd1498Szrj 	    if (newpat && !rtx_equal_p (SET_SRC (s), newpat))
916*38fd1498Szrj 	      validate_change (insn, &SET_SRC (s), newpat, 1);
917*38fd1498Szrj 	    newpat = simplify_rtx (SET_DEST (s));
918*38fd1498Szrj 	    if (newpat && !rtx_equal_p (SET_DEST (s), newpat))
919*38fd1498Szrj 	      validate_change (insn, &SET_DEST (s), newpat, 1);
920*38fd1498Szrj 	  }
921*38fd1498Szrj       }
922*38fd1498Szrj   return ((num_changes_pending () > 0) && (apply_change_group () > 0));
923*38fd1498Szrj }
924*38fd1498Szrj 
925*38fd1498Szrj /* Return 1 if the insn using CC0 set by INSN does not contain
926*38fd1498Szrj    any ordered tests applied to the condition codes.
927*38fd1498Szrj    EQ and NE tests do not count.  */
928*38fd1498Szrj 
929*38fd1498Szrj int
next_insn_tests_no_inequality(rtx_insn * insn)930*38fd1498Szrj next_insn_tests_no_inequality (rtx_insn *insn)
931*38fd1498Szrj {
932*38fd1498Szrj   rtx_insn *next = next_cc0_user (insn);
933*38fd1498Szrj 
934*38fd1498Szrj   /* If there is no next insn, we have to take the conservative choice.  */
935*38fd1498Szrj   if (next == 0)
936*38fd1498Szrj     return 0;
937*38fd1498Szrj 
938*38fd1498Szrj   return (INSN_P (next)
939*38fd1498Szrj 	  && ! inequality_comparisons_p (PATTERN (next)));
940*38fd1498Szrj }
941*38fd1498Szrj 
942*38fd1498Szrj /* Return 1 if OP is a valid general operand for machine mode MODE.
943*38fd1498Szrj    This is either a register reference, a memory reference,
944*38fd1498Szrj    or a constant.  In the case of a memory reference, the address
945*38fd1498Szrj    is checked for general validity for the target machine.
946*38fd1498Szrj 
947*38fd1498Szrj    Register and memory references must have mode MODE in order to be valid,
948*38fd1498Szrj    but some constants have no machine mode and are valid for any mode.
949*38fd1498Szrj 
950*38fd1498Szrj    If MODE is VOIDmode, OP is checked for validity for whatever mode
951*38fd1498Szrj    it has.
952*38fd1498Szrj 
953*38fd1498Szrj    The main use of this function is as a predicate in match_operand
954*38fd1498Szrj    expressions in the machine description.  */
955*38fd1498Szrj 
956*38fd1498Szrj int
general_operand(rtx op,machine_mode mode)957*38fd1498Szrj general_operand (rtx op, machine_mode mode)
958*38fd1498Szrj {
959*38fd1498Szrj   enum rtx_code code = GET_CODE (op);
960*38fd1498Szrj 
961*38fd1498Szrj   if (mode == VOIDmode)
962*38fd1498Szrj     mode = GET_MODE (op);
963*38fd1498Szrj 
964*38fd1498Szrj   /* Don't accept CONST_INT or anything similar
965*38fd1498Szrj      if the caller wants something floating.  */
966*38fd1498Szrj   if (GET_MODE (op) == VOIDmode && mode != VOIDmode
967*38fd1498Szrj       && GET_MODE_CLASS (mode) != MODE_INT
968*38fd1498Szrj       && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
969*38fd1498Szrj     return 0;
970*38fd1498Szrj 
971*38fd1498Szrj   if (CONST_INT_P (op)
972*38fd1498Szrj       && mode != VOIDmode
973*38fd1498Szrj       && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
974*38fd1498Szrj     return 0;
975*38fd1498Szrj 
976*38fd1498Szrj   if (CONSTANT_P (op))
977*38fd1498Szrj     return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
978*38fd1498Szrj 	     || mode == VOIDmode)
979*38fd1498Szrj 	    && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
980*38fd1498Szrj 	    && targetm.legitimate_constant_p (mode == VOIDmode
981*38fd1498Szrj 					      ? GET_MODE (op)
982*38fd1498Szrj 					      : mode, op));
983*38fd1498Szrj 
984*38fd1498Szrj   /* Except for certain constants with VOIDmode, already checked for,
985*38fd1498Szrj      OP's mode must match MODE if MODE specifies a mode.  */
986*38fd1498Szrj 
987*38fd1498Szrj   if (GET_MODE (op) != mode)
988*38fd1498Szrj     return 0;
989*38fd1498Szrj 
990*38fd1498Szrj   if (code == SUBREG)
991*38fd1498Szrj     {
992*38fd1498Szrj       rtx sub = SUBREG_REG (op);
993*38fd1498Szrj 
994*38fd1498Szrj #ifdef INSN_SCHEDULING
995*38fd1498Szrj       /* On machines that have insn scheduling, we want all memory
996*38fd1498Szrj 	 reference to be explicit, so outlaw paradoxical SUBREGs.
997*38fd1498Szrj 	 However, we must allow them after reload so that they can
998*38fd1498Szrj 	 get cleaned up by cleanup_subreg_operands.  */
999*38fd1498Szrj       if (!reload_completed && MEM_P (sub)
1000*38fd1498Szrj 	  && paradoxical_subreg_p (op))
1001*38fd1498Szrj 	return 0;
1002*38fd1498Szrj #endif
1003*38fd1498Szrj       /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
1004*38fd1498Szrj          may result in incorrect reference.  We should simplify all valid
1005*38fd1498Szrj          subregs of MEM anyway.  But allow this after reload because we
1006*38fd1498Szrj 	 might be called from cleanup_subreg_operands.
1007*38fd1498Szrj 
1008*38fd1498Szrj 	 ??? This is a kludge.  */
1009*38fd1498Szrj       if (!reload_completed
1010*38fd1498Szrj 	  && maybe_ne (SUBREG_BYTE (op), 0)
1011*38fd1498Szrj 	  && MEM_P (sub))
1012*38fd1498Szrj 	return 0;
1013*38fd1498Szrj 
1014*38fd1498Szrj       if (REG_P (sub)
1015*38fd1498Szrj 	  && REGNO (sub) < FIRST_PSEUDO_REGISTER
1016*38fd1498Szrj 	  && !REG_CAN_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode)
1017*38fd1498Szrj 	  && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
1018*38fd1498Szrj 	  && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT
1019*38fd1498Szrj 	  /* LRA can generate some invalid SUBREGS just for matched
1020*38fd1498Szrj 	     operand reload presentation.  LRA needs to treat them as
1021*38fd1498Szrj 	     valid.  */
1022*38fd1498Szrj 	  && ! LRA_SUBREG_P (op))
1023*38fd1498Szrj 	return 0;
1024*38fd1498Szrj 
1025*38fd1498Szrj       /* FLOAT_MODE subregs can't be paradoxical.  Combine will occasionally
1026*38fd1498Szrj 	 create such rtl, and we must reject it.  */
1027*38fd1498Szrj       if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
1028*38fd1498Szrj 	  /* LRA can use subreg to store a floating point value in an
1029*38fd1498Szrj 	     integer mode.  Although the floating point and the
1030*38fd1498Szrj 	     integer modes need the same number of hard registers, the
1031*38fd1498Szrj 	     size of floating point mode can be less than the integer
1032*38fd1498Szrj 	     mode.  */
1033*38fd1498Szrj 	  && ! lra_in_progress
1034*38fd1498Szrj 	  && paradoxical_subreg_p (op))
1035*38fd1498Szrj 	return 0;
1036*38fd1498Szrj 
1037*38fd1498Szrj       op = sub;
1038*38fd1498Szrj       code = GET_CODE (op);
1039*38fd1498Szrj     }
1040*38fd1498Szrj 
1041*38fd1498Szrj   if (code == REG)
1042*38fd1498Szrj     return (REGNO (op) >= FIRST_PSEUDO_REGISTER
1043*38fd1498Szrj 	    || in_hard_reg_set_p (operand_reg_set, GET_MODE (op), REGNO (op)));
1044*38fd1498Szrj 
1045*38fd1498Szrj   if (code == MEM)
1046*38fd1498Szrj     {
1047*38fd1498Szrj       rtx y = XEXP (op, 0);
1048*38fd1498Szrj 
1049*38fd1498Szrj       if (! volatile_ok && MEM_VOLATILE_P (op))
1050*38fd1498Szrj 	return 0;
1051*38fd1498Szrj 
1052*38fd1498Szrj       /* Use the mem's mode, since it will be reloaded thus.  LRA can
1053*38fd1498Szrj 	 generate move insn with invalid addresses which is made valid
1054*38fd1498Szrj 	 and efficiently calculated by LRA through further numerous
1055*38fd1498Szrj 	 transformations.  */
1056*38fd1498Szrj       if (lra_in_progress
1057*38fd1498Szrj 	  || memory_address_addr_space_p (GET_MODE (op), y, MEM_ADDR_SPACE (op)))
1058*38fd1498Szrj 	return 1;
1059*38fd1498Szrj     }
1060*38fd1498Szrj 
1061*38fd1498Szrj   return 0;
1062*38fd1498Szrj }
1063*38fd1498Szrj 
1064*38fd1498Szrj /* Return 1 if OP is a valid memory address for a memory reference
1065*38fd1498Szrj    of mode MODE.
1066*38fd1498Szrj 
1067*38fd1498Szrj    The main use of this function is as a predicate in match_operand
1068*38fd1498Szrj    expressions in the machine description.  */
1069*38fd1498Szrj 
1070*38fd1498Szrj int
address_operand(rtx op,machine_mode mode)1071*38fd1498Szrj address_operand (rtx op, machine_mode mode)
1072*38fd1498Szrj {
1073*38fd1498Szrj   return memory_address_p (mode, op);
1074*38fd1498Szrj }
1075*38fd1498Szrj 
1076*38fd1498Szrj /* Return 1 if OP is a register reference of mode MODE.
1077*38fd1498Szrj    If MODE is VOIDmode, accept a register in any mode.
1078*38fd1498Szrj 
1079*38fd1498Szrj    The main use of this function is as a predicate in match_operand
1080*38fd1498Szrj    expressions in the machine description.  */
1081*38fd1498Szrj 
1082*38fd1498Szrj int
register_operand(rtx op,machine_mode mode)1083*38fd1498Szrj register_operand (rtx op, machine_mode mode)
1084*38fd1498Szrj {
1085*38fd1498Szrj   if (GET_CODE (op) == SUBREG)
1086*38fd1498Szrj     {
1087*38fd1498Szrj       rtx sub = SUBREG_REG (op);
1088*38fd1498Szrj 
1089*38fd1498Szrj       /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1090*38fd1498Szrj 	 because it is guaranteed to be reloaded into one.
1091*38fd1498Szrj 	 Just make sure the MEM is valid in itself.
1092*38fd1498Szrj 	 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1093*38fd1498Szrj 	 but currently it does result from (SUBREG (REG)...) where the
1094*38fd1498Szrj 	 reg went on the stack.)  */
1095*38fd1498Szrj       if (!REG_P (sub) && (reload_completed || !MEM_P (sub)))
1096*38fd1498Szrj 	return 0;
1097*38fd1498Szrj     }
1098*38fd1498Szrj   else if (!REG_P (op))
1099*38fd1498Szrj     return 0;
1100*38fd1498Szrj   return general_operand (op, mode);
1101*38fd1498Szrj }
1102*38fd1498Szrj 
1103*38fd1498Szrj /* Return 1 for a register in Pmode; ignore the tested mode.  */
1104*38fd1498Szrj 
1105*38fd1498Szrj int
pmode_register_operand(rtx op,machine_mode mode ATTRIBUTE_UNUSED)1106*38fd1498Szrj pmode_register_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED)
1107*38fd1498Szrj {
1108*38fd1498Szrj   return register_operand (op, Pmode);
1109*38fd1498Szrj }
1110*38fd1498Szrj 
1111*38fd1498Szrj /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1112*38fd1498Szrj    or a hard register.  */
1113*38fd1498Szrj 
1114*38fd1498Szrj int
scratch_operand(rtx op,machine_mode mode)1115*38fd1498Szrj scratch_operand (rtx op, machine_mode mode)
1116*38fd1498Szrj {
1117*38fd1498Szrj   if (GET_MODE (op) != mode && mode != VOIDmode)
1118*38fd1498Szrj     return 0;
1119*38fd1498Szrj 
1120*38fd1498Szrj   return (GET_CODE (op) == SCRATCH
1121*38fd1498Szrj 	  || (REG_P (op)
1122*38fd1498Szrj 	      && (lra_in_progress
1123*38fd1498Szrj 		  || (REGNO (op) < FIRST_PSEUDO_REGISTER
1124*38fd1498Szrj 		      && REGNO_REG_CLASS (REGNO (op)) != NO_REGS))));
1125*38fd1498Szrj }
1126*38fd1498Szrj 
1127*38fd1498Szrj /* Return 1 if OP is a valid immediate operand for mode MODE.
1128*38fd1498Szrj 
1129*38fd1498Szrj    The main use of this function is as a predicate in match_operand
1130*38fd1498Szrj    expressions in the machine description.  */
1131*38fd1498Szrj 
1132*38fd1498Szrj int
immediate_operand(rtx op,machine_mode mode)1133*38fd1498Szrj immediate_operand (rtx op, machine_mode mode)
1134*38fd1498Szrj {
1135*38fd1498Szrj   /* Don't accept CONST_INT or anything similar
1136*38fd1498Szrj      if the caller wants something floating.  */
1137*38fd1498Szrj   if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1138*38fd1498Szrj       && GET_MODE_CLASS (mode) != MODE_INT
1139*38fd1498Szrj       && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1140*38fd1498Szrj     return 0;
1141*38fd1498Szrj 
1142*38fd1498Szrj   if (CONST_INT_P (op)
1143*38fd1498Szrj       && mode != VOIDmode
1144*38fd1498Szrj       && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1145*38fd1498Szrj     return 0;
1146*38fd1498Szrj 
1147*38fd1498Szrj   return (CONSTANT_P (op)
1148*38fd1498Szrj 	  && (GET_MODE (op) == mode || mode == VOIDmode
1149*38fd1498Szrj 	      || GET_MODE (op) == VOIDmode)
1150*38fd1498Szrj 	  && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1151*38fd1498Szrj 	  && targetm.legitimate_constant_p (mode == VOIDmode
1152*38fd1498Szrj 					    ? GET_MODE (op)
1153*38fd1498Szrj 					    : mode, op));
1154*38fd1498Szrj }
1155*38fd1498Szrj 
1156*38fd1498Szrj /* Returns 1 if OP is an operand that is a CONST_INT of mode MODE.  */
1157*38fd1498Szrj 
1158*38fd1498Szrj int
const_int_operand(rtx op,machine_mode mode)1159*38fd1498Szrj const_int_operand (rtx op, machine_mode mode)
1160*38fd1498Szrj {
1161*38fd1498Szrj   if (!CONST_INT_P (op))
1162*38fd1498Szrj     return 0;
1163*38fd1498Szrj 
1164*38fd1498Szrj   if (mode != VOIDmode
1165*38fd1498Szrj       && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1166*38fd1498Szrj     return 0;
1167*38fd1498Szrj 
1168*38fd1498Szrj   return 1;
1169*38fd1498Szrj }
1170*38fd1498Szrj 
1171*38fd1498Szrj #if TARGET_SUPPORTS_WIDE_INT
1172*38fd1498Szrj /* Returns 1 if OP is an operand that is a CONST_INT or CONST_WIDE_INT
1173*38fd1498Szrj    of mode MODE.  */
1174*38fd1498Szrj int
const_scalar_int_operand(rtx op,machine_mode mode)1175*38fd1498Szrj const_scalar_int_operand (rtx op, machine_mode mode)
1176*38fd1498Szrj {
1177*38fd1498Szrj   if (!CONST_SCALAR_INT_P (op))
1178*38fd1498Szrj     return 0;
1179*38fd1498Szrj 
1180*38fd1498Szrj   if (CONST_INT_P (op))
1181*38fd1498Szrj     return const_int_operand (op, mode);
1182*38fd1498Szrj 
1183*38fd1498Szrj   if (mode != VOIDmode)
1184*38fd1498Szrj     {
1185*38fd1498Szrj       scalar_int_mode int_mode = as_a <scalar_int_mode> (mode);
1186*38fd1498Szrj       int prec = GET_MODE_PRECISION (int_mode);
1187*38fd1498Szrj       int bitsize = GET_MODE_BITSIZE (int_mode);
1188*38fd1498Szrj 
1189*38fd1498Szrj       if (CONST_WIDE_INT_NUNITS (op) * HOST_BITS_PER_WIDE_INT > bitsize)
1190*38fd1498Szrj 	return 0;
1191*38fd1498Szrj 
1192*38fd1498Szrj       if (prec == bitsize)
1193*38fd1498Szrj 	return 1;
1194*38fd1498Szrj       else
1195*38fd1498Szrj 	{
1196*38fd1498Szrj 	  /* Multiword partial int.  */
1197*38fd1498Szrj 	  HOST_WIDE_INT x
1198*38fd1498Szrj 	    = CONST_WIDE_INT_ELT (op, CONST_WIDE_INT_NUNITS (op) - 1);
1199*38fd1498Szrj 	  return (sext_hwi (x, prec & (HOST_BITS_PER_WIDE_INT - 1)) == x);
1200*38fd1498Szrj 	}
1201*38fd1498Szrj     }
1202*38fd1498Szrj   return 1;
1203*38fd1498Szrj }
1204*38fd1498Szrj 
1205*38fd1498Szrj /* Returns 1 if OP is an operand that is a constant integer or constant
1206*38fd1498Szrj    floating-point number of MODE.  */
1207*38fd1498Szrj 
1208*38fd1498Szrj int
const_double_operand(rtx op,machine_mode mode)1209*38fd1498Szrj const_double_operand (rtx op, machine_mode mode)
1210*38fd1498Szrj {
1211*38fd1498Szrj   return (GET_CODE (op) == CONST_DOUBLE)
1212*38fd1498Szrj 	  && (GET_MODE (op) == mode || mode == VOIDmode);
1213*38fd1498Szrj }
1214*38fd1498Szrj #else
1215*38fd1498Szrj /* Returns 1 if OP is an operand that is a constant integer or constant
1216*38fd1498Szrj    floating-point number of MODE.  */
1217*38fd1498Szrj 
1218*38fd1498Szrj int
const_double_operand(rtx op,machine_mode mode)1219*38fd1498Szrj const_double_operand (rtx op, machine_mode mode)
1220*38fd1498Szrj {
1221*38fd1498Szrj   /* Don't accept CONST_INT or anything similar
1222*38fd1498Szrj      if the caller wants something floating.  */
1223*38fd1498Szrj   if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1224*38fd1498Szrj       && GET_MODE_CLASS (mode) != MODE_INT
1225*38fd1498Szrj       && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1226*38fd1498Szrj     return 0;
1227*38fd1498Szrj 
1228*38fd1498Szrj   return ((CONST_DOUBLE_P (op) || CONST_INT_P (op))
1229*38fd1498Szrj 	  && (mode == VOIDmode || GET_MODE (op) == mode
1230*38fd1498Szrj 	      || GET_MODE (op) == VOIDmode));
1231*38fd1498Szrj }
1232*38fd1498Szrj #endif
1233*38fd1498Szrj /* Return 1 if OP is a general operand that is not an immediate
1234*38fd1498Szrj    operand of mode MODE.  */
1235*38fd1498Szrj 
1236*38fd1498Szrj int
nonimmediate_operand(rtx op,machine_mode mode)1237*38fd1498Szrj nonimmediate_operand (rtx op, machine_mode mode)
1238*38fd1498Szrj {
1239*38fd1498Szrj   return (general_operand (op, mode) && ! CONSTANT_P (op));
1240*38fd1498Szrj }
1241*38fd1498Szrj 
1242*38fd1498Szrj /* Return 1 if OP is a register reference or immediate value of mode MODE.  */
1243*38fd1498Szrj 
1244*38fd1498Szrj int
nonmemory_operand(rtx op,machine_mode mode)1245*38fd1498Szrj nonmemory_operand (rtx op, machine_mode mode)
1246*38fd1498Szrj {
1247*38fd1498Szrj   if (CONSTANT_P (op))
1248*38fd1498Szrj     return immediate_operand (op, mode);
1249*38fd1498Szrj   return register_operand (op, mode);
1250*38fd1498Szrj }
1251*38fd1498Szrj 
1252*38fd1498Szrj /* Return 1 if OP is a valid operand that stands for pushing a
1253*38fd1498Szrj    value of mode MODE onto the stack.
1254*38fd1498Szrj 
1255*38fd1498Szrj    The main use of this function is as a predicate in match_operand
1256*38fd1498Szrj    expressions in the machine description.  */
1257*38fd1498Szrj 
1258*38fd1498Szrj int
push_operand(rtx op,machine_mode mode)1259*38fd1498Szrj push_operand (rtx op, machine_mode mode)
1260*38fd1498Szrj {
1261*38fd1498Szrj   if (!MEM_P (op))
1262*38fd1498Szrj     return 0;
1263*38fd1498Szrj 
1264*38fd1498Szrj   if (mode != VOIDmode && GET_MODE (op) != mode)
1265*38fd1498Szrj     return 0;
1266*38fd1498Szrj 
1267*38fd1498Szrj   poly_int64 rounded_size = GET_MODE_SIZE (mode);
1268*38fd1498Szrj 
1269*38fd1498Szrj #ifdef PUSH_ROUNDING
1270*38fd1498Szrj   rounded_size = PUSH_ROUNDING (MACRO_INT (rounded_size));
1271*38fd1498Szrj #endif
1272*38fd1498Szrj 
1273*38fd1498Szrj   op = XEXP (op, 0);
1274*38fd1498Szrj 
1275*38fd1498Szrj   if (known_eq (rounded_size, GET_MODE_SIZE (mode)))
1276*38fd1498Szrj     {
1277*38fd1498Szrj       if (GET_CODE (op) != STACK_PUSH_CODE)
1278*38fd1498Szrj 	return 0;
1279*38fd1498Szrj     }
1280*38fd1498Szrj   else
1281*38fd1498Szrj     {
1282*38fd1498Szrj       poly_int64 offset;
1283*38fd1498Szrj       if (GET_CODE (op) != PRE_MODIFY
1284*38fd1498Szrj 	  || GET_CODE (XEXP (op, 1)) != PLUS
1285*38fd1498Szrj 	  || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1286*38fd1498Szrj 	  || !poly_int_rtx_p (XEXP (XEXP (op, 1), 1), &offset)
1287*38fd1498Szrj 	  || (STACK_GROWS_DOWNWARD
1288*38fd1498Szrj 	      ? maybe_ne (offset, -rounded_size)
1289*38fd1498Szrj 	      : maybe_ne (offset, rounded_size)))
1290*38fd1498Szrj 	return 0;
1291*38fd1498Szrj     }
1292*38fd1498Szrj 
1293*38fd1498Szrj   return XEXP (op, 0) == stack_pointer_rtx;
1294*38fd1498Szrj }
1295*38fd1498Szrj 
1296*38fd1498Szrj /* Return 1 if OP is a valid operand that stands for popping a
1297*38fd1498Szrj    value of mode MODE off the stack.
1298*38fd1498Szrj 
1299*38fd1498Szrj    The main use of this function is as a predicate in match_operand
1300*38fd1498Szrj    expressions in the machine description.  */
1301*38fd1498Szrj 
1302*38fd1498Szrj int
pop_operand(rtx op,machine_mode mode)1303*38fd1498Szrj pop_operand (rtx op, machine_mode mode)
1304*38fd1498Szrj {
1305*38fd1498Szrj   if (!MEM_P (op))
1306*38fd1498Szrj     return 0;
1307*38fd1498Szrj 
1308*38fd1498Szrj   if (mode != VOIDmode && GET_MODE (op) != mode)
1309*38fd1498Szrj     return 0;
1310*38fd1498Szrj 
1311*38fd1498Szrj   op = XEXP (op, 0);
1312*38fd1498Szrj 
1313*38fd1498Szrj   if (GET_CODE (op) != STACK_POP_CODE)
1314*38fd1498Szrj     return 0;
1315*38fd1498Szrj 
1316*38fd1498Szrj   return XEXP (op, 0) == stack_pointer_rtx;
1317*38fd1498Szrj }
1318*38fd1498Szrj 
1319*38fd1498Szrj /* Return 1 if ADDR is a valid memory address
1320*38fd1498Szrj    for mode MODE in address space AS.  */
1321*38fd1498Szrj 
1322*38fd1498Szrj int
memory_address_addr_space_p(machine_mode mode ATTRIBUTE_UNUSED,rtx addr,addr_space_t as)1323*38fd1498Szrj memory_address_addr_space_p (machine_mode mode ATTRIBUTE_UNUSED,
1324*38fd1498Szrj 			     rtx addr, addr_space_t as)
1325*38fd1498Szrj {
1326*38fd1498Szrj #ifdef GO_IF_LEGITIMATE_ADDRESS
1327*38fd1498Szrj   gcc_assert (ADDR_SPACE_GENERIC_P (as));
1328*38fd1498Szrj   GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1329*38fd1498Szrj   return 0;
1330*38fd1498Szrj 
1331*38fd1498Szrj  win:
1332*38fd1498Szrj   return 1;
1333*38fd1498Szrj #else
1334*38fd1498Szrj   return targetm.addr_space.legitimate_address_p (mode, addr, 0, as);
1335*38fd1498Szrj #endif
1336*38fd1498Szrj }
1337*38fd1498Szrj 
1338*38fd1498Szrj /* Return 1 if OP is a valid memory reference with mode MODE,
1339*38fd1498Szrj    including a valid address.
1340*38fd1498Szrj 
1341*38fd1498Szrj    The main use of this function is as a predicate in match_operand
1342*38fd1498Szrj    expressions in the machine description.  */
1343*38fd1498Szrj 
1344*38fd1498Szrj int
memory_operand(rtx op,machine_mode mode)1345*38fd1498Szrj memory_operand (rtx op, machine_mode mode)
1346*38fd1498Szrj {
1347*38fd1498Szrj   rtx inner;
1348*38fd1498Szrj 
1349*38fd1498Szrj   if (! reload_completed)
1350*38fd1498Szrj     /* Note that no SUBREG is a memory operand before end of reload pass,
1351*38fd1498Szrj        because (SUBREG (MEM...)) forces reloading into a register.  */
1352*38fd1498Szrj     return MEM_P (op) && general_operand (op, mode);
1353*38fd1498Szrj 
1354*38fd1498Szrj   if (mode != VOIDmode && GET_MODE (op) != mode)
1355*38fd1498Szrj     return 0;
1356*38fd1498Szrj 
1357*38fd1498Szrj   inner = op;
1358*38fd1498Szrj   if (GET_CODE (inner) == SUBREG)
1359*38fd1498Szrj     inner = SUBREG_REG (inner);
1360*38fd1498Szrj 
1361*38fd1498Szrj   return (MEM_P (inner) && general_operand (op, mode));
1362*38fd1498Szrj }
1363*38fd1498Szrj 
1364*38fd1498Szrj /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1365*38fd1498Szrj    that is, a memory reference whose address is a general_operand.  */
1366*38fd1498Szrj 
1367*38fd1498Szrj int
indirect_operand(rtx op,machine_mode mode)1368*38fd1498Szrj indirect_operand (rtx op, machine_mode mode)
1369*38fd1498Szrj {
1370*38fd1498Szrj   /* Before reload, a SUBREG isn't in memory (see memory_operand, above).  */
1371*38fd1498Szrj   if (! reload_completed
1372*38fd1498Szrj       && GET_CODE (op) == SUBREG && MEM_P (SUBREG_REG (op)))
1373*38fd1498Szrj     {
1374*38fd1498Szrj       if (mode != VOIDmode && GET_MODE (op) != mode)
1375*38fd1498Szrj 	return 0;
1376*38fd1498Szrj 
1377*38fd1498Szrj       /* The only way that we can have a general_operand as the resulting
1378*38fd1498Szrj 	 address is if OFFSET is zero and the address already is an operand
1379*38fd1498Szrj 	 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1380*38fd1498Szrj 	 operand.  */
1381*38fd1498Szrj       poly_int64 offset;
1382*38fd1498Szrj       rtx addr = strip_offset (XEXP (SUBREG_REG (op), 0), &offset);
1383*38fd1498Szrj       return (known_eq (offset + SUBREG_BYTE (op), 0)
1384*38fd1498Szrj 	      && general_operand (addr, Pmode));
1385*38fd1498Szrj     }
1386*38fd1498Szrj 
1387*38fd1498Szrj   return (MEM_P (op)
1388*38fd1498Szrj 	  && memory_operand (op, mode)
1389*38fd1498Szrj 	  && general_operand (XEXP (op, 0), Pmode));
1390*38fd1498Szrj }
1391*38fd1498Szrj 
1392*38fd1498Szrj /* Return 1 if this is an ordered comparison operator (not including
1393*38fd1498Szrj    ORDERED and UNORDERED).  */
1394*38fd1498Szrj 
1395*38fd1498Szrj int
ordered_comparison_operator(rtx op,machine_mode mode)1396*38fd1498Szrj ordered_comparison_operator (rtx op, machine_mode mode)
1397*38fd1498Szrj {
1398*38fd1498Szrj   if (mode != VOIDmode && GET_MODE (op) != mode)
1399*38fd1498Szrj     return false;
1400*38fd1498Szrj   switch (GET_CODE (op))
1401*38fd1498Szrj     {
1402*38fd1498Szrj     case EQ:
1403*38fd1498Szrj     case NE:
1404*38fd1498Szrj     case LT:
1405*38fd1498Szrj     case LTU:
1406*38fd1498Szrj     case LE:
1407*38fd1498Szrj     case LEU:
1408*38fd1498Szrj     case GT:
1409*38fd1498Szrj     case GTU:
1410*38fd1498Szrj     case GE:
1411*38fd1498Szrj     case GEU:
1412*38fd1498Szrj       return true;
1413*38fd1498Szrj     default:
1414*38fd1498Szrj       return false;
1415*38fd1498Szrj     }
1416*38fd1498Szrj }
1417*38fd1498Szrj 
1418*38fd1498Szrj /* Return 1 if this is a comparison operator.  This allows the use of
1419*38fd1498Szrj    MATCH_OPERATOR to recognize all the branch insns.  */
1420*38fd1498Szrj 
1421*38fd1498Szrj int
comparison_operator(rtx op,machine_mode mode)1422*38fd1498Szrj comparison_operator (rtx op, machine_mode mode)
1423*38fd1498Szrj {
1424*38fd1498Szrj   return ((mode == VOIDmode || GET_MODE (op) == mode)
1425*38fd1498Szrj 	  && COMPARISON_P (op));
1426*38fd1498Szrj }
1427*38fd1498Szrj 
1428*38fd1498Szrj /* If BODY is an insn body that uses ASM_OPERANDS, return it.  */
1429*38fd1498Szrj 
1430*38fd1498Szrj rtx
extract_asm_operands(rtx body)1431*38fd1498Szrj extract_asm_operands (rtx body)
1432*38fd1498Szrj {
1433*38fd1498Szrj   rtx tmp;
1434*38fd1498Szrj   switch (GET_CODE (body))
1435*38fd1498Szrj     {
1436*38fd1498Szrj     case ASM_OPERANDS:
1437*38fd1498Szrj       return body;
1438*38fd1498Szrj 
1439*38fd1498Szrj     case SET:
1440*38fd1498Szrj       /* Single output operand: BODY is (set OUTPUT (asm_operands ...)).  */
1441*38fd1498Szrj       tmp = SET_SRC (body);
1442*38fd1498Szrj       if (GET_CODE (tmp) == ASM_OPERANDS)
1443*38fd1498Szrj 	return tmp;
1444*38fd1498Szrj       break;
1445*38fd1498Szrj 
1446*38fd1498Szrj     case PARALLEL:
1447*38fd1498Szrj       tmp = XVECEXP (body, 0, 0);
1448*38fd1498Szrj       if (GET_CODE (tmp) == ASM_OPERANDS)
1449*38fd1498Szrj 	return tmp;
1450*38fd1498Szrj       if (GET_CODE (tmp) == SET)
1451*38fd1498Szrj 	{
1452*38fd1498Szrj 	  tmp = SET_SRC (tmp);
1453*38fd1498Szrj 	  if (GET_CODE (tmp) == ASM_OPERANDS)
1454*38fd1498Szrj 	    return tmp;
1455*38fd1498Szrj 	}
1456*38fd1498Szrj       break;
1457*38fd1498Szrj 
1458*38fd1498Szrj     default:
1459*38fd1498Szrj       break;
1460*38fd1498Szrj     }
1461*38fd1498Szrj   return NULL;
1462*38fd1498Szrj }
1463*38fd1498Szrj 
1464*38fd1498Szrj /* If BODY is an insn body that uses ASM_OPERANDS,
1465*38fd1498Szrj    return the number of operands (both input and output) in the insn.
1466*38fd1498Szrj    If BODY is an insn body that uses ASM_INPUT with CLOBBERS in PARALLEL,
1467*38fd1498Szrj    return 0.
1468*38fd1498Szrj    Otherwise return -1.  */
1469*38fd1498Szrj 
1470*38fd1498Szrj int
asm_noperands(const_rtx body)1471*38fd1498Szrj asm_noperands (const_rtx body)
1472*38fd1498Szrj {
1473*38fd1498Szrj   rtx asm_op = extract_asm_operands (CONST_CAST_RTX (body));
1474*38fd1498Szrj   int i, n_sets = 0;
1475*38fd1498Szrj 
1476*38fd1498Szrj   if (asm_op == NULL)
1477*38fd1498Szrj     {
1478*38fd1498Szrj       if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) >= 2
1479*38fd1498Szrj 	  && GET_CODE (XVECEXP (body, 0, 0)) == ASM_INPUT)
1480*38fd1498Szrj 	{
1481*38fd1498Szrj 	  /* body is [(asm_input ...) (clobber (reg ...))...].  */
1482*38fd1498Szrj 	  for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1483*38fd1498Szrj 	    if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1484*38fd1498Szrj 	      return -1;
1485*38fd1498Szrj 	  return 0;
1486*38fd1498Szrj 	}
1487*38fd1498Szrj       return -1;
1488*38fd1498Szrj     }
1489*38fd1498Szrj 
1490*38fd1498Szrj   if (GET_CODE (body) == SET)
1491*38fd1498Szrj     n_sets = 1;
1492*38fd1498Szrj   else if (GET_CODE (body) == PARALLEL)
1493*38fd1498Szrj     {
1494*38fd1498Szrj       if (GET_CODE (XVECEXP (body, 0, 0)) == SET)
1495*38fd1498Szrj 	{
1496*38fd1498Szrj 	  /* Multiple output operands, or 1 output plus some clobbers:
1497*38fd1498Szrj 	     body is
1498*38fd1498Szrj 	     [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...].  */
1499*38fd1498Szrj 	  /* Count backwards through CLOBBERs to determine number of SETs.  */
1500*38fd1498Szrj 	  for (i = XVECLEN (body, 0); i > 0; i--)
1501*38fd1498Szrj 	    {
1502*38fd1498Szrj 	      if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1503*38fd1498Szrj 		break;
1504*38fd1498Szrj 	      if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1505*38fd1498Szrj 		return -1;
1506*38fd1498Szrj 	    }
1507*38fd1498Szrj 
1508*38fd1498Szrj 	  /* N_SETS is now number of output operands.  */
1509*38fd1498Szrj 	  n_sets = i;
1510*38fd1498Szrj 
1511*38fd1498Szrj 	  /* Verify that all the SETs we have
1512*38fd1498Szrj 	     came from a single original asm_operands insn
1513*38fd1498Szrj 	     (so that invalid combinations are blocked).  */
1514*38fd1498Szrj 	  for (i = 0; i < n_sets; i++)
1515*38fd1498Szrj 	    {
1516*38fd1498Szrj 	      rtx elt = XVECEXP (body, 0, i);
1517*38fd1498Szrj 	      if (GET_CODE (elt) != SET)
1518*38fd1498Szrj 		return -1;
1519*38fd1498Szrj 	      if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1520*38fd1498Szrj 		return -1;
1521*38fd1498Szrj 	      /* If these ASM_OPERANDS rtx's came from different original insns
1522*38fd1498Szrj 	         then they aren't allowed together.  */
1523*38fd1498Szrj 	      if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1524*38fd1498Szrj 		  != ASM_OPERANDS_INPUT_VEC (asm_op))
1525*38fd1498Szrj 		return -1;
1526*38fd1498Szrj 	    }
1527*38fd1498Szrj 	}
1528*38fd1498Szrj       else
1529*38fd1498Szrj 	{
1530*38fd1498Szrj 	  /* 0 outputs, but some clobbers:
1531*38fd1498Szrj 	     body is [(asm_operands ...) (clobber (reg ...))...].  */
1532*38fd1498Szrj 	  /* Make sure all the other parallel things really are clobbers.  */
1533*38fd1498Szrj 	  for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1534*38fd1498Szrj 	    if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1535*38fd1498Szrj 	      return -1;
1536*38fd1498Szrj 	}
1537*38fd1498Szrj     }
1538*38fd1498Szrj 
1539*38fd1498Szrj   return (ASM_OPERANDS_INPUT_LENGTH (asm_op)
1540*38fd1498Szrj 	  + ASM_OPERANDS_LABEL_LENGTH (asm_op) + n_sets);
1541*38fd1498Szrj }
1542*38fd1498Szrj 
1543*38fd1498Szrj /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1544*38fd1498Szrj    copy its operands (both input and output) into the vector OPERANDS,
1545*38fd1498Szrj    the locations of the operands within the insn into the vector OPERAND_LOCS,
1546*38fd1498Szrj    and the constraints for the operands into CONSTRAINTS.
1547*38fd1498Szrj    Write the modes of the operands into MODES.
1548*38fd1498Szrj    Write the location info into LOC.
1549*38fd1498Szrj    Return the assembler-template.
1550*38fd1498Szrj    If BODY is an insn body that uses ASM_INPUT with CLOBBERS in PARALLEL,
1551*38fd1498Szrj    return the basic assembly string.
1552*38fd1498Szrj 
1553*38fd1498Szrj    If LOC, MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1554*38fd1498Szrj    we don't store that info.  */
1555*38fd1498Szrj 
1556*38fd1498Szrj const char *
decode_asm_operands(rtx body,rtx * operands,rtx ** operand_locs,const char ** constraints,machine_mode * modes,location_t * loc)1557*38fd1498Szrj decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs,
1558*38fd1498Szrj 		     const char **constraints, machine_mode *modes,
1559*38fd1498Szrj 		     location_t *loc)
1560*38fd1498Szrj {
1561*38fd1498Szrj   int nbase = 0, n, i;
1562*38fd1498Szrj   rtx asmop;
1563*38fd1498Szrj 
1564*38fd1498Szrj   switch (GET_CODE (body))
1565*38fd1498Szrj     {
1566*38fd1498Szrj     case ASM_OPERANDS:
1567*38fd1498Szrj       /* Zero output asm: BODY is (asm_operands ...).  */
1568*38fd1498Szrj       asmop = body;
1569*38fd1498Szrj       break;
1570*38fd1498Szrj 
1571*38fd1498Szrj     case SET:
1572*38fd1498Szrj       /* Single output asm: BODY is (set OUTPUT (asm_operands ...)).  */
1573*38fd1498Szrj       asmop = SET_SRC (body);
1574*38fd1498Szrj 
1575*38fd1498Szrj       /* The output is in the SET.
1576*38fd1498Szrj 	 Its constraint is in the ASM_OPERANDS itself.  */
1577*38fd1498Szrj       if (operands)
1578*38fd1498Szrj 	operands[0] = SET_DEST (body);
1579*38fd1498Szrj       if (operand_locs)
1580*38fd1498Szrj 	operand_locs[0] = &SET_DEST (body);
1581*38fd1498Szrj       if (constraints)
1582*38fd1498Szrj 	constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1583*38fd1498Szrj       if (modes)
1584*38fd1498Szrj 	modes[0] = GET_MODE (SET_DEST (body));
1585*38fd1498Szrj       nbase = 1;
1586*38fd1498Szrj       break;
1587*38fd1498Szrj 
1588*38fd1498Szrj     case PARALLEL:
1589*38fd1498Szrj       {
1590*38fd1498Szrj 	int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs.  */
1591*38fd1498Szrj 
1592*38fd1498Szrj 	asmop = XVECEXP (body, 0, 0);
1593*38fd1498Szrj 	if (GET_CODE (asmop) == SET)
1594*38fd1498Szrj 	  {
1595*38fd1498Szrj 	    asmop = SET_SRC (asmop);
1596*38fd1498Szrj 
1597*38fd1498Szrj 	    /* At least one output, plus some CLOBBERs.  The outputs are in
1598*38fd1498Szrj 	       the SETs.  Their constraints are in the ASM_OPERANDS itself.  */
1599*38fd1498Szrj 	    for (i = 0; i < nparallel; i++)
1600*38fd1498Szrj 	      {
1601*38fd1498Szrj 		if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1602*38fd1498Szrj 		  break;		/* Past last SET */
1603*38fd1498Szrj 		if (operands)
1604*38fd1498Szrj 		  operands[i] = SET_DEST (XVECEXP (body, 0, i));
1605*38fd1498Szrj 		if (operand_locs)
1606*38fd1498Szrj 		  operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1607*38fd1498Szrj 		if (constraints)
1608*38fd1498Szrj 		  constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1609*38fd1498Szrj 		if (modes)
1610*38fd1498Szrj 		  modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1611*38fd1498Szrj 	      }
1612*38fd1498Szrj 	    nbase = i;
1613*38fd1498Szrj 	  }
1614*38fd1498Szrj 	else if (GET_CODE (asmop) == ASM_INPUT)
1615*38fd1498Szrj 	  {
1616*38fd1498Szrj 	    if (loc)
1617*38fd1498Szrj 	      *loc = ASM_INPUT_SOURCE_LOCATION (asmop);
1618*38fd1498Szrj 	    return XSTR (asmop, 0);
1619*38fd1498Szrj 	  }
1620*38fd1498Szrj 	break;
1621*38fd1498Szrj       }
1622*38fd1498Szrj 
1623*38fd1498Szrj     default:
1624*38fd1498Szrj       gcc_unreachable ();
1625*38fd1498Szrj     }
1626*38fd1498Szrj 
1627*38fd1498Szrj   n = ASM_OPERANDS_INPUT_LENGTH (asmop);
1628*38fd1498Szrj   for (i = 0; i < n; i++)
1629*38fd1498Szrj     {
1630*38fd1498Szrj       if (operand_locs)
1631*38fd1498Szrj 	operand_locs[nbase + i] = &ASM_OPERANDS_INPUT (asmop, i);
1632*38fd1498Szrj       if (operands)
1633*38fd1498Szrj 	operands[nbase + i] = ASM_OPERANDS_INPUT (asmop, i);
1634*38fd1498Szrj       if (constraints)
1635*38fd1498Szrj 	constraints[nbase + i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1636*38fd1498Szrj       if (modes)
1637*38fd1498Szrj 	modes[nbase + i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1638*38fd1498Szrj     }
1639*38fd1498Szrj   nbase += n;
1640*38fd1498Szrj 
1641*38fd1498Szrj   n = ASM_OPERANDS_LABEL_LENGTH (asmop);
1642*38fd1498Szrj   for (i = 0; i < n; i++)
1643*38fd1498Szrj     {
1644*38fd1498Szrj       if (operand_locs)
1645*38fd1498Szrj 	operand_locs[nbase + i] = &ASM_OPERANDS_LABEL (asmop, i);
1646*38fd1498Szrj       if (operands)
1647*38fd1498Szrj 	operands[nbase + i] = ASM_OPERANDS_LABEL (asmop, i);
1648*38fd1498Szrj       if (constraints)
1649*38fd1498Szrj 	constraints[nbase + i] = "";
1650*38fd1498Szrj       if (modes)
1651*38fd1498Szrj 	modes[nbase + i] = Pmode;
1652*38fd1498Szrj     }
1653*38fd1498Szrj 
1654*38fd1498Szrj   if (loc)
1655*38fd1498Szrj     *loc = ASM_OPERANDS_SOURCE_LOCATION (asmop);
1656*38fd1498Szrj 
1657*38fd1498Szrj   return ASM_OPERANDS_TEMPLATE (asmop);
1658*38fd1498Szrj }
1659*38fd1498Szrj 
1660*38fd1498Szrj /* Parse inline assembly string STRING and determine which operands are
1661*38fd1498Szrj    referenced by % markers.  For the first NOPERANDS operands, set USED[I]
1662*38fd1498Szrj    to true if operand I is referenced.
1663*38fd1498Szrj 
1664*38fd1498Szrj    This is intended to distinguish barrier-like asms such as:
1665*38fd1498Szrj 
1666*38fd1498Szrj       asm ("" : "=m" (...));
1667*38fd1498Szrj 
1668*38fd1498Szrj    from real references such as:
1669*38fd1498Szrj 
1670*38fd1498Szrj       asm ("sw\t$0, %0" : "=m" (...));  */
1671*38fd1498Szrj 
1672*38fd1498Szrj void
get_referenced_operands(const char * string,bool * used,unsigned int noperands)1673*38fd1498Szrj get_referenced_operands (const char *string, bool *used,
1674*38fd1498Szrj 			 unsigned int noperands)
1675*38fd1498Szrj {
1676*38fd1498Szrj   memset (used, 0, sizeof (bool) * noperands);
1677*38fd1498Szrj   const char *p = string;
1678*38fd1498Szrj   while (*p)
1679*38fd1498Szrj     switch (*p)
1680*38fd1498Szrj       {
1681*38fd1498Szrj       case '%':
1682*38fd1498Szrj 	p += 1;
1683*38fd1498Szrj 	/* A letter followed by a digit indicates an operand number.  */
1684*38fd1498Szrj 	if (ISALPHA (p[0]) && ISDIGIT (p[1]))
1685*38fd1498Szrj 	  p += 1;
1686*38fd1498Szrj 	if (ISDIGIT (*p))
1687*38fd1498Szrj 	  {
1688*38fd1498Szrj 	    char *endptr;
1689*38fd1498Szrj 	    unsigned long opnum = strtoul (p, &endptr, 10);
1690*38fd1498Szrj 	    if (endptr != p && opnum < noperands)
1691*38fd1498Szrj 	      used[opnum] = true;
1692*38fd1498Szrj 	    p = endptr;
1693*38fd1498Szrj 	  }
1694*38fd1498Szrj 	else
1695*38fd1498Szrj 	  p += 1;
1696*38fd1498Szrj 	break;
1697*38fd1498Szrj 
1698*38fd1498Szrj       default:
1699*38fd1498Szrj 	p++;
1700*38fd1498Szrj 	break;
1701*38fd1498Szrj       }
1702*38fd1498Szrj }
1703*38fd1498Szrj 
1704*38fd1498Szrj /* Check if an asm_operand matches its constraints.
1705*38fd1498Szrj    Return > 0 if ok, = 0 if bad, < 0 if inconclusive.  */
1706*38fd1498Szrj 
1707*38fd1498Szrj int
asm_operand_ok(rtx op,const char * constraint,const char ** constraints)1708*38fd1498Szrj asm_operand_ok (rtx op, const char *constraint, const char **constraints)
1709*38fd1498Szrj {
1710*38fd1498Szrj   int result = 0;
1711*38fd1498Szrj   bool incdec_ok = false;
1712*38fd1498Szrj 
1713*38fd1498Szrj   /* Use constrain_operands after reload.  */
1714*38fd1498Szrj   gcc_assert (!reload_completed);
1715*38fd1498Szrj 
1716*38fd1498Szrj   /* Empty constraint string is the same as "X,...,X", i.e. X for as
1717*38fd1498Szrj      many alternatives as required to match the other operands.  */
1718*38fd1498Szrj   if (*constraint == '\0')
1719*38fd1498Szrj     result = 1;
1720*38fd1498Szrj 
1721*38fd1498Szrj   while (*constraint)
1722*38fd1498Szrj     {
1723*38fd1498Szrj       enum constraint_num cn;
1724*38fd1498Szrj       char c = *constraint;
1725*38fd1498Szrj       int len;
1726*38fd1498Szrj       switch (c)
1727*38fd1498Szrj 	{
1728*38fd1498Szrj 	case ',':
1729*38fd1498Szrj 	  constraint++;
1730*38fd1498Szrj 	  continue;
1731*38fd1498Szrj 
1732*38fd1498Szrj 	case '0': case '1': case '2': case '3': case '4':
1733*38fd1498Szrj 	case '5': case '6': case '7': case '8': case '9':
1734*38fd1498Szrj 	  /* If caller provided constraints pointer, look up
1735*38fd1498Szrj 	     the matching constraint.  Otherwise, our caller should have
1736*38fd1498Szrj 	     given us the proper matching constraint, but we can't
1737*38fd1498Szrj 	     actually fail the check if they didn't.  Indicate that
1738*38fd1498Szrj 	     results are inconclusive.  */
1739*38fd1498Szrj 	  if (constraints)
1740*38fd1498Szrj 	    {
1741*38fd1498Szrj 	      char *end;
1742*38fd1498Szrj 	      unsigned long match;
1743*38fd1498Szrj 
1744*38fd1498Szrj 	      match = strtoul (constraint, &end, 10);
1745*38fd1498Szrj 	      if (!result)
1746*38fd1498Szrj 		result = asm_operand_ok (op, constraints[match], NULL);
1747*38fd1498Szrj 	      constraint = (const char *) end;
1748*38fd1498Szrj 	    }
1749*38fd1498Szrj 	  else
1750*38fd1498Szrj 	    {
1751*38fd1498Szrj 	      do
1752*38fd1498Szrj 		constraint++;
1753*38fd1498Szrj 	      while (ISDIGIT (*constraint));
1754*38fd1498Szrj 	      if (! result)
1755*38fd1498Szrj 		result = -1;
1756*38fd1498Szrj 	    }
1757*38fd1498Szrj 	  continue;
1758*38fd1498Szrj 
1759*38fd1498Szrj 	  /* The rest of the compiler assumes that reloading the address
1760*38fd1498Szrj 	     of a MEM into a register will make it fit an 'o' constraint.
1761*38fd1498Szrj 	     That is, if it sees a MEM operand for an 'o' constraint,
1762*38fd1498Szrj 	     it assumes that (mem (base-reg)) will fit.
1763*38fd1498Szrj 
1764*38fd1498Szrj 	     That assumption fails on targets that don't have offsettable
1765*38fd1498Szrj 	     addresses at all.  We therefore need to treat 'o' asm
1766*38fd1498Szrj 	     constraints as a special case and only accept operands that
1767*38fd1498Szrj 	     are already offsettable, thus proving that at least one
1768*38fd1498Szrj 	     offsettable address exists.  */
1769*38fd1498Szrj 	case 'o': /* offsettable */
1770*38fd1498Szrj 	  if (offsettable_nonstrict_memref_p (op))
1771*38fd1498Szrj 	    result = 1;
1772*38fd1498Szrj 	  break;
1773*38fd1498Szrj 
1774*38fd1498Szrj 	case 'g':
1775*38fd1498Szrj 	  if (general_operand (op, VOIDmode))
1776*38fd1498Szrj 	    result = 1;
1777*38fd1498Szrj 	  break;
1778*38fd1498Szrj 
1779*38fd1498Szrj 	case '<':
1780*38fd1498Szrj 	case '>':
1781*38fd1498Szrj 	  /* ??? Before auto-inc-dec, auto inc/dec insns are not supposed
1782*38fd1498Szrj 	     to exist, excepting those that expand_call created.  Further,
1783*38fd1498Szrj 	     on some machines which do not have generalized auto inc/dec,
1784*38fd1498Szrj 	     an inc/dec is not a memory_operand.
1785*38fd1498Szrj 
1786*38fd1498Szrj 	     Match any memory and hope things are resolved after reload.  */
1787*38fd1498Szrj 	  incdec_ok = true;
1788*38fd1498Szrj 	  /* FALLTHRU */
1789*38fd1498Szrj 	default:
1790*38fd1498Szrj 	  cn = lookup_constraint (constraint);
1791*38fd1498Szrj 	  switch (get_constraint_type (cn))
1792*38fd1498Szrj 	    {
1793*38fd1498Szrj 	    case CT_REGISTER:
1794*38fd1498Szrj 	      if (!result
1795*38fd1498Szrj 		  && reg_class_for_constraint (cn) != NO_REGS
1796*38fd1498Szrj 		  && GET_MODE (op) != BLKmode
1797*38fd1498Szrj 		  && register_operand (op, VOIDmode))
1798*38fd1498Szrj 		result = 1;
1799*38fd1498Szrj 	      break;
1800*38fd1498Szrj 
1801*38fd1498Szrj 	    case CT_CONST_INT:
1802*38fd1498Szrj 	      if (!result
1803*38fd1498Szrj 		  && CONST_INT_P (op)
1804*38fd1498Szrj 		  && insn_const_int_ok_for_constraint (INTVAL (op), cn))
1805*38fd1498Szrj 		result = 1;
1806*38fd1498Szrj 	      break;
1807*38fd1498Szrj 
1808*38fd1498Szrj 	    case CT_MEMORY:
1809*38fd1498Szrj 	    case CT_SPECIAL_MEMORY:
1810*38fd1498Szrj 	      /* Every memory operand can be reloaded to fit.  */
1811*38fd1498Szrj 	      result = result || memory_operand (op, VOIDmode);
1812*38fd1498Szrj 	      break;
1813*38fd1498Szrj 
1814*38fd1498Szrj 	    case CT_ADDRESS:
1815*38fd1498Szrj 	      /* Every address operand can be reloaded to fit.  */
1816*38fd1498Szrj 	      result = result || address_operand (op, VOIDmode);
1817*38fd1498Szrj 	      break;
1818*38fd1498Szrj 
1819*38fd1498Szrj 	    case CT_FIXED_FORM:
1820*38fd1498Szrj 	      result = result || constraint_satisfied_p (op, cn);
1821*38fd1498Szrj 	      break;
1822*38fd1498Szrj 	    }
1823*38fd1498Szrj 	  break;
1824*38fd1498Szrj 	}
1825*38fd1498Szrj       len = CONSTRAINT_LEN (c, constraint);
1826*38fd1498Szrj       do
1827*38fd1498Szrj 	constraint++;
1828*38fd1498Szrj       while (--len && *constraint && *constraint != ',');
1829*38fd1498Szrj       if (len)
1830*38fd1498Szrj 	return 0;
1831*38fd1498Szrj     }
1832*38fd1498Szrj 
1833*38fd1498Szrj   /* For operands without < or > constraints reject side-effects.  */
1834*38fd1498Szrj   if (AUTO_INC_DEC && !incdec_ok && result && MEM_P (op))
1835*38fd1498Szrj     switch (GET_CODE (XEXP (op, 0)))
1836*38fd1498Szrj       {
1837*38fd1498Szrj       case PRE_INC:
1838*38fd1498Szrj       case POST_INC:
1839*38fd1498Szrj       case PRE_DEC:
1840*38fd1498Szrj       case POST_DEC:
1841*38fd1498Szrj       case PRE_MODIFY:
1842*38fd1498Szrj       case POST_MODIFY:
1843*38fd1498Szrj 	return 0;
1844*38fd1498Szrj       default:
1845*38fd1498Szrj 	break;
1846*38fd1498Szrj       }
1847*38fd1498Szrj 
1848*38fd1498Szrj   return result;
1849*38fd1498Szrj }
1850*38fd1498Szrj 
1851*38fd1498Szrj /* Given an rtx *P, if it is a sum containing an integer constant term,
1852*38fd1498Szrj    return the location (type rtx *) of the pointer to that constant term.
1853*38fd1498Szrj    Otherwise, return a null pointer.  */
1854*38fd1498Szrj 
1855*38fd1498Szrj rtx *
find_constant_term_loc(rtx * p)1856*38fd1498Szrj find_constant_term_loc (rtx *p)
1857*38fd1498Szrj {
1858*38fd1498Szrj   rtx *tem;
1859*38fd1498Szrj   enum rtx_code code = GET_CODE (*p);
1860*38fd1498Szrj 
1861*38fd1498Szrj   /* If *P IS such a constant term, P is its location.  */
1862*38fd1498Szrj 
1863*38fd1498Szrj   if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1864*38fd1498Szrj       || code == CONST)
1865*38fd1498Szrj     return p;
1866*38fd1498Szrj 
1867*38fd1498Szrj   /* Otherwise, if not a sum, it has no constant term.  */
1868*38fd1498Szrj 
1869*38fd1498Szrj   if (GET_CODE (*p) != PLUS)
1870*38fd1498Szrj     return 0;
1871*38fd1498Szrj 
1872*38fd1498Szrj   /* If one of the summands is constant, return its location.  */
1873*38fd1498Szrj 
1874*38fd1498Szrj   if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1875*38fd1498Szrj       && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1876*38fd1498Szrj     return p;
1877*38fd1498Szrj 
1878*38fd1498Szrj   /* Otherwise, check each summand for containing a constant term.  */
1879*38fd1498Szrj 
1880*38fd1498Szrj   if (XEXP (*p, 0) != 0)
1881*38fd1498Szrj     {
1882*38fd1498Szrj       tem = find_constant_term_loc (&XEXP (*p, 0));
1883*38fd1498Szrj       if (tem != 0)
1884*38fd1498Szrj 	return tem;
1885*38fd1498Szrj     }
1886*38fd1498Szrj 
1887*38fd1498Szrj   if (XEXP (*p, 1) != 0)
1888*38fd1498Szrj     {
1889*38fd1498Szrj       tem = find_constant_term_loc (&XEXP (*p, 1));
1890*38fd1498Szrj       if (tem != 0)
1891*38fd1498Szrj 	return tem;
1892*38fd1498Szrj     }
1893*38fd1498Szrj 
1894*38fd1498Szrj   return 0;
1895*38fd1498Szrj }
1896*38fd1498Szrj 
1897*38fd1498Szrj /* Return 1 if OP is a memory reference
1898*38fd1498Szrj    whose address contains no side effects
1899*38fd1498Szrj    and remains valid after the addition
1900*38fd1498Szrj    of a positive integer less than the
1901*38fd1498Szrj    size of the object being referenced.
1902*38fd1498Szrj 
1903*38fd1498Szrj    We assume that the original address is valid and do not check it.
1904*38fd1498Szrj 
1905*38fd1498Szrj    This uses strict_memory_address_p as a subroutine, so
1906*38fd1498Szrj    don't use it before reload.  */
1907*38fd1498Szrj 
1908*38fd1498Szrj int
offsettable_memref_p(rtx op)1909*38fd1498Szrj offsettable_memref_p (rtx op)
1910*38fd1498Szrj {
1911*38fd1498Szrj   return ((MEM_P (op))
1912*38fd1498Szrj 	  && offsettable_address_addr_space_p (1, GET_MODE (op), XEXP (op, 0),
1913*38fd1498Szrj 					       MEM_ADDR_SPACE (op)));
1914*38fd1498Szrj }
1915*38fd1498Szrj 
1916*38fd1498Szrj /* Similar, but don't require a strictly valid mem ref:
1917*38fd1498Szrj    consider pseudo-regs valid as index or base regs.  */
1918*38fd1498Szrj 
1919*38fd1498Szrj int
offsettable_nonstrict_memref_p(rtx op)1920*38fd1498Szrj offsettable_nonstrict_memref_p (rtx op)
1921*38fd1498Szrj {
1922*38fd1498Szrj   return ((MEM_P (op))
1923*38fd1498Szrj 	  && offsettable_address_addr_space_p (0, GET_MODE (op), XEXP (op, 0),
1924*38fd1498Szrj 					       MEM_ADDR_SPACE (op)));
1925*38fd1498Szrj }
1926*38fd1498Szrj 
1927*38fd1498Szrj /* Return 1 if Y is a memory address which contains no side effects
1928*38fd1498Szrj    and would remain valid for address space AS after the addition of
1929*38fd1498Szrj    a positive integer less than the size of that mode.
1930*38fd1498Szrj 
1931*38fd1498Szrj    We assume that the original address is valid and do not check it.
1932*38fd1498Szrj    We do check that it is valid for narrower modes.
1933*38fd1498Szrj 
1934*38fd1498Szrj    If STRICTP is nonzero, we require a strictly valid address,
1935*38fd1498Szrj    for the sake of use in reload.c.  */
1936*38fd1498Szrj 
1937*38fd1498Szrj int
offsettable_address_addr_space_p(int strictp,machine_mode mode,rtx y,addr_space_t as)1938*38fd1498Szrj offsettable_address_addr_space_p (int strictp, machine_mode mode, rtx y,
1939*38fd1498Szrj 				  addr_space_t as)
1940*38fd1498Szrj {
1941*38fd1498Szrj   enum rtx_code ycode = GET_CODE (y);
1942*38fd1498Szrj   rtx z;
1943*38fd1498Szrj   rtx y1 = y;
1944*38fd1498Szrj   rtx *y2;
1945*38fd1498Szrj   int (*addressp) (machine_mode, rtx, addr_space_t) =
1946*38fd1498Szrj     (strictp ? strict_memory_address_addr_space_p
1947*38fd1498Szrj 	     : memory_address_addr_space_p);
1948*38fd1498Szrj   poly_int64 mode_sz = GET_MODE_SIZE (mode);
1949*38fd1498Szrj 
1950*38fd1498Szrj   if (CONSTANT_ADDRESS_P (y))
1951*38fd1498Szrj     return 1;
1952*38fd1498Szrj 
1953*38fd1498Szrj   /* Adjusting an offsettable address involves changing to a narrower mode.
1954*38fd1498Szrj      Make sure that's OK.  */
1955*38fd1498Szrj 
1956*38fd1498Szrj   if (mode_dependent_address_p (y, as))
1957*38fd1498Szrj     return 0;
1958*38fd1498Szrj 
1959*38fd1498Szrj   machine_mode address_mode = GET_MODE (y);
1960*38fd1498Szrj   if (address_mode == VOIDmode)
1961*38fd1498Szrj     address_mode = targetm.addr_space.address_mode (as);
1962*38fd1498Szrj #ifdef POINTERS_EXTEND_UNSIGNED
1963*38fd1498Szrj   machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
1964*38fd1498Szrj #endif
1965*38fd1498Szrj 
1966*38fd1498Szrj   /* ??? How much offset does an offsettable BLKmode reference need?
1967*38fd1498Szrj      Clearly that depends on the situation in which it's being used.
1968*38fd1498Szrj      However, the current situation in which we test 0xffffffff is
1969*38fd1498Szrj      less than ideal.  Caveat user.  */
1970*38fd1498Szrj   if (known_eq (mode_sz, 0))
1971*38fd1498Szrj     mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1972*38fd1498Szrj 
1973*38fd1498Szrj   /* If the expression contains a constant term,
1974*38fd1498Szrj      see if it remains valid when max possible offset is added.  */
1975*38fd1498Szrj 
1976*38fd1498Szrj   if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1977*38fd1498Szrj     {
1978*38fd1498Szrj       int good;
1979*38fd1498Szrj 
1980*38fd1498Szrj       y1 = *y2;
1981*38fd1498Szrj       *y2 = plus_constant (address_mode, *y2, mode_sz - 1);
1982*38fd1498Szrj       /* Use QImode because an odd displacement may be automatically invalid
1983*38fd1498Szrj 	 for any wider mode.  But it should be valid for a single byte.  */
1984*38fd1498Szrj       good = (*addressp) (QImode, y, as);
1985*38fd1498Szrj 
1986*38fd1498Szrj       /* In any case, restore old contents of memory.  */
1987*38fd1498Szrj       *y2 = y1;
1988*38fd1498Szrj       return good;
1989*38fd1498Szrj     }
1990*38fd1498Szrj 
1991*38fd1498Szrj   if (GET_RTX_CLASS (ycode) == RTX_AUTOINC)
1992*38fd1498Szrj     return 0;
1993*38fd1498Szrj 
1994*38fd1498Szrj   /* The offset added here is chosen as the maximum offset that
1995*38fd1498Szrj      any instruction could need to add when operating on something
1996*38fd1498Szrj      of the specified mode.  We assume that if Y and Y+c are
1997*38fd1498Szrj      valid addresses then so is Y+d for all 0<d<c.  adjust_address will
1998*38fd1498Szrj      go inside a LO_SUM here, so we do so as well.  */
1999*38fd1498Szrj   if (GET_CODE (y) == LO_SUM
2000*38fd1498Szrj       && mode != BLKmode
2001*38fd1498Szrj       && known_le (mode_sz, GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT))
2002*38fd1498Szrj     z = gen_rtx_LO_SUM (address_mode, XEXP (y, 0),
2003*38fd1498Szrj 			plus_constant (address_mode, XEXP (y, 1),
2004*38fd1498Szrj 				       mode_sz - 1));
2005*38fd1498Szrj #ifdef POINTERS_EXTEND_UNSIGNED
2006*38fd1498Szrj   /* Likewise for a ZERO_EXTEND from pointer_mode.  */
2007*38fd1498Szrj   else if (POINTERS_EXTEND_UNSIGNED > 0
2008*38fd1498Szrj 	   && GET_CODE (y) == ZERO_EXTEND
2009*38fd1498Szrj 	   && GET_MODE (XEXP (y, 0)) == pointer_mode)
2010*38fd1498Szrj     z = gen_rtx_ZERO_EXTEND (address_mode,
2011*38fd1498Szrj 			     plus_constant (pointer_mode, XEXP (y, 0),
2012*38fd1498Szrj 					    mode_sz - 1));
2013*38fd1498Szrj #endif
2014*38fd1498Szrj   else
2015*38fd1498Szrj     z = plus_constant (address_mode, y, mode_sz - 1);
2016*38fd1498Szrj 
2017*38fd1498Szrj   /* Use QImode because an odd displacement may be automatically invalid
2018*38fd1498Szrj      for any wider mode.  But it should be valid for a single byte.  */
2019*38fd1498Szrj   return (*addressp) (QImode, z, as);
2020*38fd1498Szrj }
2021*38fd1498Szrj 
2022*38fd1498Szrj /* Return 1 if ADDR is an address-expression whose effect depends
2023*38fd1498Szrj    on the mode of the memory reference it is used in.
2024*38fd1498Szrj 
2025*38fd1498Szrj    ADDRSPACE is the address space associated with the address.
2026*38fd1498Szrj 
2027*38fd1498Szrj    Autoincrement addressing is a typical example of mode-dependence
2028*38fd1498Szrj    because the amount of the increment depends on the mode.  */
2029*38fd1498Szrj 
2030*38fd1498Szrj bool
mode_dependent_address_p(rtx addr,addr_space_t addrspace)2031*38fd1498Szrj mode_dependent_address_p (rtx addr, addr_space_t addrspace)
2032*38fd1498Szrj {
2033*38fd1498Szrj   /* Auto-increment addressing with anything other than post_modify
2034*38fd1498Szrj      or pre_modify always introduces a mode dependency.  Catch such
2035*38fd1498Szrj      cases now instead of deferring to the target.  */
2036*38fd1498Szrj   if (GET_CODE (addr) == PRE_INC
2037*38fd1498Szrj       || GET_CODE (addr) == POST_INC
2038*38fd1498Szrj       || GET_CODE (addr) == PRE_DEC
2039*38fd1498Szrj       || GET_CODE (addr) == POST_DEC)
2040*38fd1498Szrj     return true;
2041*38fd1498Szrj 
2042*38fd1498Szrj   return targetm.mode_dependent_address_p (addr, addrspace);
2043*38fd1498Szrj }
2044*38fd1498Szrj 
2045*38fd1498Szrj /* Return true if boolean attribute ATTR is supported.  */
2046*38fd1498Szrj 
2047*38fd1498Szrj static bool
have_bool_attr(bool_attr attr)2048*38fd1498Szrj have_bool_attr (bool_attr attr)
2049*38fd1498Szrj {
2050*38fd1498Szrj   switch (attr)
2051*38fd1498Szrj     {
2052*38fd1498Szrj     case BA_ENABLED:
2053*38fd1498Szrj       return HAVE_ATTR_enabled;
2054*38fd1498Szrj     case BA_PREFERRED_FOR_SIZE:
2055*38fd1498Szrj       return HAVE_ATTR_enabled || HAVE_ATTR_preferred_for_size;
2056*38fd1498Szrj     case BA_PREFERRED_FOR_SPEED:
2057*38fd1498Szrj       return HAVE_ATTR_enabled || HAVE_ATTR_preferred_for_speed;
2058*38fd1498Szrj     }
2059*38fd1498Szrj   gcc_unreachable ();
2060*38fd1498Szrj }
2061*38fd1498Szrj 
2062*38fd1498Szrj /* Return the value of ATTR for instruction INSN.  */
2063*38fd1498Szrj 
2064*38fd1498Szrj static bool
get_bool_attr(rtx_insn * insn,bool_attr attr)2065*38fd1498Szrj get_bool_attr (rtx_insn *insn, bool_attr attr)
2066*38fd1498Szrj {
2067*38fd1498Szrj   switch (attr)
2068*38fd1498Szrj     {
2069*38fd1498Szrj     case BA_ENABLED:
2070*38fd1498Szrj       return get_attr_enabled (insn);
2071*38fd1498Szrj     case BA_PREFERRED_FOR_SIZE:
2072*38fd1498Szrj       return get_attr_enabled (insn) && get_attr_preferred_for_size (insn);
2073*38fd1498Szrj     case BA_PREFERRED_FOR_SPEED:
2074*38fd1498Szrj       return get_attr_enabled (insn) && get_attr_preferred_for_speed (insn);
2075*38fd1498Szrj     }
2076*38fd1498Szrj   gcc_unreachable ();
2077*38fd1498Szrj }
2078*38fd1498Szrj 
2079*38fd1498Szrj /* Like get_bool_attr_mask, but don't use the cache.  */
2080*38fd1498Szrj 
2081*38fd1498Szrj static alternative_mask
get_bool_attr_mask_uncached(rtx_insn * insn,bool_attr attr)2082*38fd1498Szrj get_bool_attr_mask_uncached (rtx_insn *insn, bool_attr attr)
2083*38fd1498Szrj {
2084*38fd1498Szrj   /* Temporarily install enough information for get_attr_<foo> to assume
2085*38fd1498Szrj      that the insn operands are already cached.  As above, the attribute
2086*38fd1498Szrj      mustn't depend on the values of operands, so we don't provide their
2087*38fd1498Szrj      real values here.  */
2088*38fd1498Szrj   rtx_insn *old_insn = recog_data.insn;
2089*38fd1498Szrj   int old_alternative = which_alternative;
2090*38fd1498Szrj 
2091*38fd1498Szrj   recog_data.insn = insn;
2092*38fd1498Szrj   alternative_mask mask = ALL_ALTERNATIVES;
2093*38fd1498Szrj   int n_alternatives = insn_data[INSN_CODE (insn)].n_alternatives;
2094*38fd1498Szrj   for (int i = 0; i < n_alternatives; i++)
2095*38fd1498Szrj     {
2096*38fd1498Szrj       which_alternative = i;
2097*38fd1498Szrj       if (!get_bool_attr (insn, attr))
2098*38fd1498Szrj 	mask &= ~ALTERNATIVE_BIT (i);
2099*38fd1498Szrj     }
2100*38fd1498Szrj 
2101*38fd1498Szrj   recog_data.insn = old_insn;
2102*38fd1498Szrj   which_alternative = old_alternative;
2103*38fd1498Szrj   return mask;
2104*38fd1498Szrj }
2105*38fd1498Szrj 
2106*38fd1498Szrj /* Return the mask of operand alternatives that are allowed for INSN
2107*38fd1498Szrj    by boolean attribute ATTR.  This mask depends only on INSN and on
2108*38fd1498Szrj    the current target; it does not depend on things like the values of
2109*38fd1498Szrj    operands.  */
2110*38fd1498Szrj 
2111*38fd1498Szrj static alternative_mask
get_bool_attr_mask(rtx_insn * insn,bool_attr attr)2112*38fd1498Szrj get_bool_attr_mask (rtx_insn *insn, bool_attr attr)
2113*38fd1498Szrj {
2114*38fd1498Szrj   /* Quick exit for asms and for targets that don't use these attributes.  */
2115*38fd1498Szrj   int code = INSN_CODE (insn);
2116*38fd1498Szrj   if (code < 0 || !have_bool_attr (attr))
2117*38fd1498Szrj     return ALL_ALTERNATIVES;
2118*38fd1498Szrj 
2119*38fd1498Szrj   /* Calling get_attr_<foo> can be expensive, so cache the mask
2120*38fd1498Szrj      for speed.  */
2121*38fd1498Szrj   if (!this_target_recog->x_bool_attr_masks[code][attr])
2122*38fd1498Szrj     this_target_recog->x_bool_attr_masks[code][attr]
2123*38fd1498Szrj       = get_bool_attr_mask_uncached (insn, attr);
2124*38fd1498Szrj   return this_target_recog->x_bool_attr_masks[code][attr];
2125*38fd1498Szrj }
2126*38fd1498Szrj 
2127*38fd1498Szrj /* Return the set of alternatives of INSN that are allowed by the current
2128*38fd1498Szrj    target.  */
2129*38fd1498Szrj 
2130*38fd1498Szrj alternative_mask
get_enabled_alternatives(rtx_insn * insn)2131*38fd1498Szrj get_enabled_alternatives (rtx_insn *insn)
2132*38fd1498Szrj {
2133*38fd1498Szrj   return get_bool_attr_mask (insn, BA_ENABLED);
2134*38fd1498Szrj }
2135*38fd1498Szrj 
2136*38fd1498Szrj /* Return the set of alternatives of INSN that are allowed by the current
2137*38fd1498Szrj    target and are preferred for the current size/speed optimization
2138*38fd1498Szrj    choice.  */
2139*38fd1498Szrj 
2140*38fd1498Szrj alternative_mask
get_preferred_alternatives(rtx_insn * insn)2141*38fd1498Szrj get_preferred_alternatives (rtx_insn *insn)
2142*38fd1498Szrj {
2143*38fd1498Szrj   if (optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn)))
2144*38fd1498Szrj     return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SPEED);
2145*38fd1498Szrj   else
2146*38fd1498Szrj     return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SIZE);
2147*38fd1498Szrj }
2148*38fd1498Szrj 
2149*38fd1498Szrj /* Return the set of alternatives of INSN that are allowed by the current
2150*38fd1498Szrj    target and are preferred for the size/speed optimization choice
2151*38fd1498Szrj    associated with BB.  Passing a separate BB is useful if INSN has not
2152*38fd1498Szrj    been emitted yet or if we are considering moving it to a different
2153*38fd1498Szrj    block.  */
2154*38fd1498Szrj 
2155*38fd1498Szrj alternative_mask
get_preferred_alternatives(rtx_insn * insn,basic_block bb)2156*38fd1498Szrj get_preferred_alternatives (rtx_insn *insn, basic_block bb)
2157*38fd1498Szrj {
2158*38fd1498Szrj   if (optimize_bb_for_speed_p (bb))
2159*38fd1498Szrj     return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SPEED);
2160*38fd1498Szrj   else
2161*38fd1498Szrj     return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SIZE);
2162*38fd1498Szrj }
2163*38fd1498Szrj 
2164*38fd1498Szrj /* Assert that the cached boolean attributes for INSN are still accurate.
2165*38fd1498Szrj    The backend is required to define these attributes in a way that only
2166*38fd1498Szrj    depends on the current target (rather than operands, compiler phase,
2167*38fd1498Szrj    etc.).  */
2168*38fd1498Szrj 
2169*38fd1498Szrj bool
check_bool_attrs(rtx_insn * insn)2170*38fd1498Szrj check_bool_attrs (rtx_insn *insn)
2171*38fd1498Szrj {
2172*38fd1498Szrj   int code = INSN_CODE (insn);
2173*38fd1498Szrj   if (code >= 0)
2174*38fd1498Szrj     for (int i = 0; i <= BA_LAST; ++i)
2175*38fd1498Szrj       {
2176*38fd1498Szrj 	enum bool_attr attr = (enum bool_attr) i;
2177*38fd1498Szrj 	if (this_target_recog->x_bool_attr_masks[code][attr])
2178*38fd1498Szrj 	  gcc_assert (this_target_recog->x_bool_attr_masks[code][attr]
2179*38fd1498Szrj 		      == get_bool_attr_mask_uncached (insn, attr));
2180*38fd1498Szrj       }
2181*38fd1498Szrj   return true;
2182*38fd1498Szrj }
2183*38fd1498Szrj 
2184*38fd1498Szrj /* Like extract_insn, but save insn extracted and don't extract again, when
2185*38fd1498Szrj    called again for the same insn expecting that recog_data still contain the
2186*38fd1498Szrj    valid information.  This is used primary by gen_attr infrastructure that
2187*38fd1498Szrj    often does extract insn again and again.  */
2188*38fd1498Szrj void
extract_insn_cached(rtx_insn * insn)2189*38fd1498Szrj extract_insn_cached (rtx_insn *insn)
2190*38fd1498Szrj {
2191*38fd1498Szrj   if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
2192*38fd1498Szrj     return;
2193*38fd1498Szrj   extract_insn (insn);
2194*38fd1498Szrj   recog_data.insn = insn;
2195*38fd1498Szrj }
2196*38fd1498Szrj 
2197*38fd1498Szrj /* Do uncached extract_insn, constrain_operands and complain about failures.
2198*38fd1498Szrj    This should be used when extracting a pre-existing constrained instruction
2199*38fd1498Szrj    if the caller wants to know which alternative was chosen.  */
2200*38fd1498Szrj void
extract_constrain_insn(rtx_insn * insn)2201*38fd1498Szrj extract_constrain_insn (rtx_insn *insn)
2202*38fd1498Szrj {
2203*38fd1498Szrj   extract_insn (insn);
2204*38fd1498Szrj   if (!constrain_operands (reload_completed, get_enabled_alternatives (insn)))
2205*38fd1498Szrj     fatal_insn_not_found (insn);
2206*38fd1498Szrj }
2207*38fd1498Szrj 
2208*38fd1498Szrj /* Do cached extract_insn, constrain_operands and complain about failures.
2209*38fd1498Szrj    Used by insn_attrtab.  */
2210*38fd1498Szrj void
extract_constrain_insn_cached(rtx_insn * insn)2211*38fd1498Szrj extract_constrain_insn_cached (rtx_insn *insn)
2212*38fd1498Szrj {
2213*38fd1498Szrj   extract_insn_cached (insn);
2214*38fd1498Szrj   if (which_alternative == -1
2215*38fd1498Szrj       && !constrain_operands (reload_completed,
2216*38fd1498Szrj 			      get_enabled_alternatives (insn)))
2217*38fd1498Szrj     fatal_insn_not_found (insn);
2218*38fd1498Szrj }
2219*38fd1498Szrj 
2220*38fd1498Szrj /* Do cached constrain_operands on INSN and complain about failures.  */
2221*38fd1498Szrj int
constrain_operands_cached(rtx_insn * insn,int strict)2222*38fd1498Szrj constrain_operands_cached (rtx_insn *insn, int strict)
2223*38fd1498Szrj {
2224*38fd1498Szrj   if (which_alternative == -1)
2225*38fd1498Szrj     return constrain_operands (strict, get_enabled_alternatives (insn));
2226*38fd1498Szrj   else
2227*38fd1498Szrj     return 1;
2228*38fd1498Szrj }
2229*38fd1498Szrj 
2230*38fd1498Szrj /* Analyze INSN and fill in recog_data.  */
2231*38fd1498Szrj 
2232*38fd1498Szrj void
extract_insn(rtx_insn * insn)2233*38fd1498Szrj extract_insn (rtx_insn *insn)
2234*38fd1498Szrj {
2235*38fd1498Szrj   int i;
2236*38fd1498Szrj   int icode;
2237*38fd1498Szrj   int noperands;
2238*38fd1498Szrj   rtx body = PATTERN (insn);
2239*38fd1498Szrj 
2240*38fd1498Szrj   recog_data.n_operands = 0;
2241*38fd1498Szrj   recog_data.n_alternatives = 0;
2242*38fd1498Szrj   recog_data.n_dups = 0;
2243*38fd1498Szrj   recog_data.is_asm = false;
2244*38fd1498Szrj 
2245*38fd1498Szrj   switch (GET_CODE (body))
2246*38fd1498Szrj     {
2247*38fd1498Szrj     case USE:
2248*38fd1498Szrj     case CLOBBER:
2249*38fd1498Szrj     case ASM_INPUT:
2250*38fd1498Szrj     case ADDR_VEC:
2251*38fd1498Szrj     case ADDR_DIFF_VEC:
2252*38fd1498Szrj     case VAR_LOCATION:
2253*38fd1498Szrj     case DEBUG_MARKER:
2254*38fd1498Szrj       return;
2255*38fd1498Szrj 
2256*38fd1498Szrj     case SET:
2257*38fd1498Szrj       if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2258*38fd1498Szrj 	goto asm_insn;
2259*38fd1498Szrj       else
2260*38fd1498Szrj 	goto normal_insn;
2261*38fd1498Szrj     case PARALLEL:
2262*38fd1498Szrj       if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2263*38fd1498Szrj 	   && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2264*38fd1498Szrj 	  || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS
2265*38fd1498Szrj 	  || GET_CODE (XVECEXP (body, 0, 0)) == ASM_INPUT)
2266*38fd1498Szrj 	goto asm_insn;
2267*38fd1498Szrj       else
2268*38fd1498Szrj 	goto normal_insn;
2269*38fd1498Szrj     case ASM_OPERANDS:
2270*38fd1498Szrj     asm_insn:
2271*38fd1498Szrj       recog_data.n_operands = noperands = asm_noperands (body);
2272*38fd1498Szrj       if (noperands >= 0)
2273*38fd1498Szrj 	{
2274*38fd1498Szrj 	  /* This insn is an `asm' with operands.  */
2275*38fd1498Szrj 
2276*38fd1498Szrj 	  /* expand_asm_operands makes sure there aren't too many operands.  */
2277*38fd1498Szrj 	  gcc_assert (noperands <= MAX_RECOG_OPERANDS);
2278*38fd1498Szrj 
2279*38fd1498Szrj 	  /* Now get the operand values and constraints out of the insn.  */
2280*38fd1498Szrj 	  decode_asm_operands (body, recog_data.operand,
2281*38fd1498Szrj 			       recog_data.operand_loc,
2282*38fd1498Szrj 			       recog_data.constraints,
2283*38fd1498Szrj 			       recog_data.operand_mode, NULL);
2284*38fd1498Szrj 	  memset (recog_data.is_operator, 0, sizeof recog_data.is_operator);
2285*38fd1498Szrj 	  if (noperands > 0)
2286*38fd1498Szrj 	    {
2287*38fd1498Szrj 	      const char *p =  recog_data.constraints[0];
2288*38fd1498Szrj 	      recog_data.n_alternatives = 1;
2289*38fd1498Szrj 	      while (*p)
2290*38fd1498Szrj 		recog_data.n_alternatives += (*p++ == ',');
2291*38fd1498Szrj 	    }
2292*38fd1498Szrj 	  recog_data.is_asm = true;
2293*38fd1498Szrj 	  break;
2294*38fd1498Szrj 	}
2295*38fd1498Szrj       fatal_insn_not_found (insn);
2296*38fd1498Szrj 
2297*38fd1498Szrj     default:
2298*38fd1498Szrj     normal_insn:
2299*38fd1498Szrj       /* Ordinary insn: recognize it, get the operands via insn_extract
2300*38fd1498Szrj 	 and get the constraints.  */
2301*38fd1498Szrj 
2302*38fd1498Szrj       icode = recog_memoized (insn);
2303*38fd1498Szrj       if (icode < 0)
2304*38fd1498Szrj 	fatal_insn_not_found (insn);
2305*38fd1498Szrj 
2306*38fd1498Szrj       recog_data.n_operands = noperands = insn_data[icode].n_operands;
2307*38fd1498Szrj       recog_data.n_alternatives = insn_data[icode].n_alternatives;
2308*38fd1498Szrj       recog_data.n_dups = insn_data[icode].n_dups;
2309*38fd1498Szrj 
2310*38fd1498Szrj       insn_extract (insn);
2311*38fd1498Szrj 
2312*38fd1498Szrj       for (i = 0; i < noperands; i++)
2313*38fd1498Szrj 	{
2314*38fd1498Szrj 	  recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2315*38fd1498Szrj 	  recog_data.is_operator[i] = insn_data[icode].operand[i].is_operator;
2316*38fd1498Szrj 	  recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2317*38fd1498Szrj 	  /* VOIDmode match_operands gets mode from their real operand.  */
2318*38fd1498Szrj 	  if (recog_data.operand_mode[i] == VOIDmode)
2319*38fd1498Szrj 	    recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2320*38fd1498Szrj 	}
2321*38fd1498Szrj     }
2322*38fd1498Szrj   for (i = 0; i < noperands; i++)
2323*38fd1498Szrj     recog_data.operand_type[i]
2324*38fd1498Szrj       = (recog_data.constraints[i][0] == '=' ? OP_OUT
2325*38fd1498Szrj 	 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2326*38fd1498Szrj 	 : OP_IN);
2327*38fd1498Szrj 
2328*38fd1498Szrj   gcc_assert (recog_data.n_alternatives <= MAX_RECOG_ALTERNATIVES);
2329*38fd1498Szrj 
2330*38fd1498Szrj   recog_data.insn = NULL;
2331*38fd1498Szrj   which_alternative = -1;
2332*38fd1498Szrj }
2333*38fd1498Szrj 
2334*38fd1498Szrj /* Fill in OP_ALT_BASE for an instruction that has N_OPERANDS
2335*38fd1498Szrj    operands, N_ALTERNATIVES alternatives and constraint strings
2336*38fd1498Szrj    CONSTRAINTS.  OP_ALT_BASE has N_ALTERNATIVES * N_OPERANDS entries
2337*38fd1498Szrj    and CONSTRAINTS has N_OPERANDS entries.  OPLOC should be passed in
2338*38fd1498Szrj    if the insn is an asm statement and preprocessing should take the
2339*38fd1498Szrj    asm operands into account, e.g. to determine whether they could be
2340*38fd1498Szrj    addresses in constraints that require addresses; it should then
2341*38fd1498Szrj    point to an array of pointers to each operand.  */
2342*38fd1498Szrj 
2343*38fd1498Szrj void
preprocess_constraints(int n_operands,int n_alternatives,const char ** constraints,operand_alternative * op_alt_base,rtx ** oploc)2344*38fd1498Szrj preprocess_constraints (int n_operands, int n_alternatives,
2345*38fd1498Szrj 			const char **constraints,
2346*38fd1498Szrj 			operand_alternative *op_alt_base,
2347*38fd1498Szrj 			rtx **oploc)
2348*38fd1498Szrj {
2349*38fd1498Szrj   for (int i = 0; i < n_operands; i++)
2350*38fd1498Szrj     {
2351*38fd1498Szrj       int j;
2352*38fd1498Szrj       struct operand_alternative *op_alt;
2353*38fd1498Szrj       const char *p = constraints[i];
2354*38fd1498Szrj 
2355*38fd1498Szrj       op_alt = op_alt_base;
2356*38fd1498Szrj 
2357*38fd1498Szrj       for (j = 0; j < n_alternatives; j++, op_alt += n_operands)
2358*38fd1498Szrj 	{
2359*38fd1498Szrj 	  op_alt[i].cl = NO_REGS;
2360*38fd1498Szrj 	  op_alt[i].constraint = p;
2361*38fd1498Szrj 	  op_alt[i].matches = -1;
2362*38fd1498Szrj 	  op_alt[i].matched = -1;
2363*38fd1498Szrj 
2364*38fd1498Szrj 	  if (*p == '\0' || *p == ',')
2365*38fd1498Szrj 	    {
2366*38fd1498Szrj 	      op_alt[i].anything_ok = 1;
2367*38fd1498Szrj 	      continue;
2368*38fd1498Szrj 	    }
2369*38fd1498Szrj 
2370*38fd1498Szrj 	  for (;;)
2371*38fd1498Szrj 	    {
2372*38fd1498Szrj 	      char c = *p;
2373*38fd1498Szrj 	      if (c == '#')
2374*38fd1498Szrj 		do
2375*38fd1498Szrj 		  c = *++p;
2376*38fd1498Szrj 		while (c != ',' && c != '\0');
2377*38fd1498Szrj 	      if (c == ',' || c == '\0')
2378*38fd1498Szrj 		{
2379*38fd1498Szrj 		  p++;
2380*38fd1498Szrj 		  break;
2381*38fd1498Szrj 		}
2382*38fd1498Szrj 
2383*38fd1498Szrj 	      switch (c)
2384*38fd1498Szrj 		{
2385*38fd1498Szrj 		case '?':
2386*38fd1498Szrj 		  op_alt[i].reject += 6;
2387*38fd1498Szrj 		  break;
2388*38fd1498Szrj 		case '!':
2389*38fd1498Szrj 		  op_alt[i].reject += 600;
2390*38fd1498Szrj 		  break;
2391*38fd1498Szrj 		case '&':
2392*38fd1498Szrj 		  op_alt[i].earlyclobber = 1;
2393*38fd1498Szrj 		  break;
2394*38fd1498Szrj 
2395*38fd1498Szrj 		case '0': case '1': case '2': case '3': case '4':
2396*38fd1498Szrj 		case '5': case '6': case '7': case '8': case '9':
2397*38fd1498Szrj 		  {
2398*38fd1498Szrj 		    char *end;
2399*38fd1498Szrj 		    op_alt[i].matches = strtoul (p, &end, 10);
2400*38fd1498Szrj 		    op_alt[op_alt[i].matches].matched = i;
2401*38fd1498Szrj 		    p = end;
2402*38fd1498Szrj 		  }
2403*38fd1498Szrj 		  continue;
2404*38fd1498Szrj 
2405*38fd1498Szrj 		case 'X':
2406*38fd1498Szrj 		  op_alt[i].anything_ok = 1;
2407*38fd1498Szrj 		  break;
2408*38fd1498Szrj 
2409*38fd1498Szrj 		case 'g':
2410*38fd1498Szrj 		  op_alt[i].cl =
2411*38fd1498Szrj 		   reg_class_subunion[(int) op_alt[i].cl][(int) GENERAL_REGS];
2412*38fd1498Szrj 		  break;
2413*38fd1498Szrj 
2414*38fd1498Szrj 		default:
2415*38fd1498Szrj 		  enum constraint_num cn = lookup_constraint (p);
2416*38fd1498Szrj 		  enum reg_class cl;
2417*38fd1498Szrj 		  switch (get_constraint_type (cn))
2418*38fd1498Szrj 		    {
2419*38fd1498Szrj 		    case CT_REGISTER:
2420*38fd1498Szrj 		      cl = reg_class_for_constraint (cn);
2421*38fd1498Szrj 		      if (cl != NO_REGS)
2422*38fd1498Szrj 			op_alt[i].cl = reg_class_subunion[op_alt[i].cl][cl];
2423*38fd1498Szrj 		      break;
2424*38fd1498Szrj 
2425*38fd1498Szrj 		    case CT_CONST_INT:
2426*38fd1498Szrj 		      break;
2427*38fd1498Szrj 
2428*38fd1498Szrj 		    case CT_MEMORY:
2429*38fd1498Szrj 		    case CT_SPECIAL_MEMORY:
2430*38fd1498Szrj 		      op_alt[i].memory_ok = 1;
2431*38fd1498Szrj 		      break;
2432*38fd1498Szrj 
2433*38fd1498Szrj 		    case CT_ADDRESS:
2434*38fd1498Szrj 		      if (oploc && !address_operand (*oploc[i], VOIDmode))
2435*38fd1498Szrj 			break;
2436*38fd1498Szrj 
2437*38fd1498Szrj 		      op_alt[i].is_address = 1;
2438*38fd1498Szrj 		      op_alt[i].cl
2439*38fd1498Szrj 			= (reg_class_subunion
2440*38fd1498Szrj 			   [(int) op_alt[i].cl]
2441*38fd1498Szrj 			   [(int) base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
2442*38fd1498Szrj 						  ADDRESS, SCRATCH)]);
2443*38fd1498Szrj 		      break;
2444*38fd1498Szrj 
2445*38fd1498Szrj 		    case CT_FIXED_FORM:
2446*38fd1498Szrj 		      break;
2447*38fd1498Szrj 		    }
2448*38fd1498Szrj 		  break;
2449*38fd1498Szrj 		}
2450*38fd1498Szrj 	      p += CONSTRAINT_LEN (c, p);
2451*38fd1498Szrj 	    }
2452*38fd1498Szrj 	}
2453*38fd1498Szrj     }
2454*38fd1498Szrj }
2455*38fd1498Szrj 
2456*38fd1498Szrj /* Return an array of operand_alternative instructions for
2457*38fd1498Szrj    instruction ICODE.  */
2458*38fd1498Szrj 
2459*38fd1498Szrj const operand_alternative *
preprocess_insn_constraints(unsigned int icode)2460*38fd1498Szrj preprocess_insn_constraints (unsigned int icode)
2461*38fd1498Szrj {
2462*38fd1498Szrj   gcc_checking_assert (IN_RANGE (icode, 0, NUM_INSN_CODES - 1));
2463*38fd1498Szrj   if (this_target_recog->x_op_alt[icode])
2464*38fd1498Szrj     return this_target_recog->x_op_alt[icode];
2465*38fd1498Szrj 
2466*38fd1498Szrj   int n_operands = insn_data[icode].n_operands;
2467*38fd1498Szrj   if (n_operands == 0)
2468*38fd1498Szrj     return 0;
2469*38fd1498Szrj   /* Always provide at least one alternative so that which_op_alt ()
2470*38fd1498Szrj      works correctly.  If the instruction has 0 alternatives (i.e. all
2471*38fd1498Szrj      constraint strings are empty) then each operand in this alternative
2472*38fd1498Szrj      will have anything_ok set.  */
2473*38fd1498Szrj   int n_alternatives = MAX (insn_data[icode].n_alternatives, 1);
2474*38fd1498Szrj   int n_entries = n_operands * n_alternatives;
2475*38fd1498Szrj 
2476*38fd1498Szrj   operand_alternative *op_alt = XCNEWVEC (operand_alternative, n_entries);
2477*38fd1498Szrj   const char **constraints = XALLOCAVEC (const char *, n_operands);
2478*38fd1498Szrj 
2479*38fd1498Szrj   for (int i = 0; i < n_operands; ++i)
2480*38fd1498Szrj     constraints[i] = insn_data[icode].operand[i].constraint;
2481*38fd1498Szrj   preprocess_constraints (n_operands, n_alternatives, constraints, op_alt,
2482*38fd1498Szrj 			  NULL);
2483*38fd1498Szrj 
2484*38fd1498Szrj   this_target_recog->x_op_alt[icode] = op_alt;
2485*38fd1498Szrj   return op_alt;
2486*38fd1498Szrj }
2487*38fd1498Szrj 
2488*38fd1498Szrj /* After calling extract_insn, you can use this function to extract some
2489*38fd1498Szrj    information from the constraint strings into a more usable form.
2490*38fd1498Szrj    The collected data is stored in recog_op_alt.  */
2491*38fd1498Szrj 
2492*38fd1498Szrj void
preprocess_constraints(rtx_insn * insn)2493*38fd1498Szrj preprocess_constraints (rtx_insn *insn)
2494*38fd1498Szrj {
2495*38fd1498Szrj   int icode = INSN_CODE (insn);
2496*38fd1498Szrj   if (icode >= 0)
2497*38fd1498Szrj     recog_op_alt = preprocess_insn_constraints (icode);
2498*38fd1498Szrj   else
2499*38fd1498Szrj     {
2500*38fd1498Szrj       int n_operands = recog_data.n_operands;
2501*38fd1498Szrj       int n_alternatives = recog_data.n_alternatives;
2502*38fd1498Szrj       int n_entries = n_operands * n_alternatives;
2503*38fd1498Szrj       memset (asm_op_alt, 0, n_entries * sizeof (operand_alternative));
2504*38fd1498Szrj       preprocess_constraints (n_operands, n_alternatives,
2505*38fd1498Szrj 			      recog_data.constraints, asm_op_alt,
2506*38fd1498Szrj 			      NULL);
2507*38fd1498Szrj       recog_op_alt = asm_op_alt;
2508*38fd1498Szrj     }
2509*38fd1498Szrj }
2510*38fd1498Szrj 
2511*38fd1498Szrj /* Check the operands of an insn against the insn's operand constraints
2512*38fd1498Szrj    and return 1 if they match any of the alternatives in ALTERNATIVES.
2513*38fd1498Szrj 
2514*38fd1498Szrj    The information about the insn's operands, constraints, operand modes
2515*38fd1498Szrj    etc. is obtained from the global variables set up by extract_insn.
2516*38fd1498Szrj 
2517*38fd1498Szrj    WHICH_ALTERNATIVE is set to a number which indicates which
2518*38fd1498Szrj    alternative of constraints was matched: 0 for the first alternative,
2519*38fd1498Szrj    1 for the next, etc.
2520*38fd1498Szrj 
2521*38fd1498Szrj    In addition, when two operands are required to match
2522*38fd1498Szrj    and it happens that the output operand is (reg) while the
2523*38fd1498Szrj    input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2524*38fd1498Szrj    make the output operand look like the input.
2525*38fd1498Szrj    This is because the output operand is the one the template will print.
2526*38fd1498Szrj 
2527*38fd1498Szrj    This is used in final, just before printing the assembler code and by
2528*38fd1498Szrj    the routines that determine an insn's attribute.
2529*38fd1498Szrj 
2530*38fd1498Szrj    If STRICT is a positive nonzero value, it means that we have been
2531*38fd1498Szrj    called after reload has been completed.  In that case, we must
2532*38fd1498Szrj    do all checks strictly.  If it is zero, it means that we have been called
2533*38fd1498Szrj    before reload has completed.  In that case, we first try to see if we can
2534*38fd1498Szrj    find an alternative that matches strictly.  If not, we try again, this
2535*38fd1498Szrj    time assuming that reload will fix up the insn.  This provides a "best
2536*38fd1498Szrj    guess" for the alternative and is used to compute attributes of insns prior
2537*38fd1498Szrj    to reload.  A negative value of STRICT is used for this internal call.  */
2538*38fd1498Szrj 
2539*38fd1498Szrj struct funny_match
2540*38fd1498Szrj {
2541*38fd1498Szrj   int this_op, other;
2542*38fd1498Szrj };
2543*38fd1498Szrj 
2544*38fd1498Szrj int
constrain_operands(int strict,alternative_mask alternatives)2545*38fd1498Szrj constrain_operands (int strict, alternative_mask alternatives)
2546*38fd1498Szrj {
2547*38fd1498Szrj   const char *constraints[MAX_RECOG_OPERANDS];
2548*38fd1498Szrj   int matching_operands[MAX_RECOG_OPERANDS];
2549*38fd1498Szrj   int earlyclobber[MAX_RECOG_OPERANDS];
2550*38fd1498Szrj   int c;
2551*38fd1498Szrj 
2552*38fd1498Szrj   struct funny_match funny_match[MAX_RECOG_OPERANDS];
2553*38fd1498Szrj   int funny_match_index;
2554*38fd1498Szrj 
2555*38fd1498Szrj   which_alternative = 0;
2556*38fd1498Szrj   if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2557*38fd1498Szrj     return 1;
2558*38fd1498Szrj 
2559*38fd1498Szrj   for (c = 0; c < recog_data.n_operands; c++)
2560*38fd1498Szrj     {
2561*38fd1498Szrj       constraints[c] = recog_data.constraints[c];
2562*38fd1498Szrj       matching_operands[c] = -1;
2563*38fd1498Szrj     }
2564*38fd1498Szrj 
2565*38fd1498Szrj   do
2566*38fd1498Szrj     {
2567*38fd1498Szrj       int seen_earlyclobber_at = -1;
2568*38fd1498Szrj       int opno;
2569*38fd1498Szrj       int lose = 0;
2570*38fd1498Szrj       funny_match_index = 0;
2571*38fd1498Szrj 
2572*38fd1498Szrj       if (!TEST_BIT (alternatives, which_alternative))
2573*38fd1498Szrj 	{
2574*38fd1498Szrj 	  int i;
2575*38fd1498Szrj 
2576*38fd1498Szrj 	  for (i = 0; i < recog_data.n_operands; i++)
2577*38fd1498Szrj 	    constraints[i] = skip_alternative (constraints[i]);
2578*38fd1498Szrj 
2579*38fd1498Szrj 	  which_alternative++;
2580*38fd1498Szrj 	  continue;
2581*38fd1498Szrj 	}
2582*38fd1498Szrj 
2583*38fd1498Szrj       for (opno = 0; opno < recog_data.n_operands; opno++)
2584*38fd1498Szrj 	{
2585*38fd1498Szrj 	  rtx op = recog_data.operand[opno];
2586*38fd1498Szrj 	  machine_mode mode = GET_MODE (op);
2587*38fd1498Szrj 	  const char *p = constraints[opno];
2588*38fd1498Szrj 	  int offset = 0;
2589*38fd1498Szrj 	  int win = 0;
2590*38fd1498Szrj 	  int val;
2591*38fd1498Szrj 	  int len;
2592*38fd1498Szrj 
2593*38fd1498Szrj 	  earlyclobber[opno] = 0;
2594*38fd1498Szrj 
2595*38fd1498Szrj 	  /* A unary operator may be accepted by the predicate, but it
2596*38fd1498Szrj 	     is irrelevant for matching constraints.  */
2597*38fd1498Szrj 	  if (UNARY_P (op))
2598*38fd1498Szrj 	    op = XEXP (op, 0);
2599*38fd1498Szrj 
2600*38fd1498Szrj 	  if (GET_CODE (op) == SUBREG)
2601*38fd1498Szrj 	    {
2602*38fd1498Szrj 	      if (REG_P (SUBREG_REG (op))
2603*38fd1498Szrj 		  && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2604*38fd1498Szrj 		offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2605*38fd1498Szrj 					      GET_MODE (SUBREG_REG (op)),
2606*38fd1498Szrj 					      SUBREG_BYTE (op),
2607*38fd1498Szrj 					      GET_MODE (op));
2608*38fd1498Szrj 	      op = SUBREG_REG (op);
2609*38fd1498Szrj 	    }
2610*38fd1498Szrj 
2611*38fd1498Szrj 	  /* An empty constraint or empty alternative
2612*38fd1498Szrj 	     allows anything which matched the pattern.  */
2613*38fd1498Szrj 	  if (*p == 0 || *p == ',')
2614*38fd1498Szrj 	    win = 1;
2615*38fd1498Szrj 
2616*38fd1498Szrj 	  do
2617*38fd1498Szrj 	    switch (c = *p, len = CONSTRAINT_LEN (c, p), c)
2618*38fd1498Szrj 	      {
2619*38fd1498Szrj 	      case '\0':
2620*38fd1498Szrj 		len = 0;
2621*38fd1498Szrj 		break;
2622*38fd1498Szrj 	      case ',':
2623*38fd1498Szrj 		c = '\0';
2624*38fd1498Szrj 		break;
2625*38fd1498Szrj 
2626*38fd1498Szrj 	      case '#':
2627*38fd1498Szrj 		/* Ignore rest of this alternative as far as
2628*38fd1498Szrj 		   constraint checking is concerned.  */
2629*38fd1498Szrj 		do
2630*38fd1498Szrj 		  p++;
2631*38fd1498Szrj 		while (*p && *p != ',');
2632*38fd1498Szrj 		len = 0;
2633*38fd1498Szrj 		break;
2634*38fd1498Szrj 
2635*38fd1498Szrj 	      case '&':
2636*38fd1498Szrj 		earlyclobber[opno] = 1;
2637*38fd1498Szrj 		if (seen_earlyclobber_at < 0)
2638*38fd1498Szrj 		  seen_earlyclobber_at = opno;
2639*38fd1498Szrj 		break;
2640*38fd1498Szrj 
2641*38fd1498Szrj 	      case '0':  case '1':  case '2':  case '3':  case '4':
2642*38fd1498Szrj 	      case '5':  case '6':  case '7':  case '8':  case '9':
2643*38fd1498Szrj 		{
2644*38fd1498Szrj 		  /* This operand must be the same as a previous one.
2645*38fd1498Szrj 		     This kind of constraint is used for instructions such
2646*38fd1498Szrj 		     as add when they take only two operands.
2647*38fd1498Szrj 
2648*38fd1498Szrj 		     Note that the lower-numbered operand is passed first.
2649*38fd1498Szrj 
2650*38fd1498Szrj 		     If we are not testing strictly, assume that this
2651*38fd1498Szrj 		     constraint will be satisfied.  */
2652*38fd1498Szrj 
2653*38fd1498Szrj 		  char *end;
2654*38fd1498Szrj 		  int match;
2655*38fd1498Szrj 
2656*38fd1498Szrj 		  match = strtoul (p, &end, 10);
2657*38fd1498Szrj 		  p = end;
2658*38fd1498Szrj 
2659*38fd1498Szrj 		  if (strict < 0)
2660*38fd1498Szrj 		    val = 1;
2661*38fd1498Szrj 		  else
2662*38fd1498Szrj 		    {
2663*38fd1498Szrj 		      rtx op1 = recog_data.operand[match];
2664*38fd1498Szrj 		      rtx op2 = recog_data.operand[opno];
2665*38fd1498Szrj 
2666*38fd1498Szrj 		      /* A unary operator may be accepted by the predicate,
2667*38fd1498Szrj 			 but it is irrelevant for matching constraints.  */
2668*38fd1498Szrj 		      if (UNARY_P (op1))
2669*38fd1498Szrj 			op1 = XEXP (op1, 0);
2670*38fd1498Szrj 		      if (UNARY_P (op2))
2671*38fd1498Szrj 			op2 = XEXP (op2, 0);
2672*38fd1498Szrj 
2673*38fd1498Szrj 		      val = operands_match_p (op1, op2);
2674*38fd1498Szrj 		    }
2675*38fd1498Szrj 
2676*38fd1498Szrj 		  matching_operands[opno] = match;
2677*38fd1498Szrj 		  matching_operands[match] = opno;
2678*38fd1498Szrj 
2679*38fd1498Szrj 		  if (val != 0)
2680*38fd1498Szrj 		    win = 1;
2681*38fd1498Szrj 
2682*38fd1498Szrj 		  /* If output is *x and input is *--x, arrange later
2683*38fd1498Szrj 		     to change the output to *--x as well, since the
2684*38fd1498Szrj 		     output op is the one that will be printed.  */
2685*38fd1498Szrj 		  if (val == 2 && strict > 0)
2686*38fd1498Szrj 		    {
2687*38fd1498Szrj 		      funny_match[funny_match_index].this_op = opno;
2688*38fd1498Szrj 		      funny_match[funny_match_index++].other = match;
2689*38fd1498Szrj 		    }
2690*38fd1498Szrj 		}
2691*38fd1498Szrj 		len = 0;
2692*38fd1498Szrj 		break;
2693*38fd1498Szrj 
2694*38fd1498Szrj 	      case 'p':
2695*38fd1498Szrj 		/* p is used for address_operands.  When we are called by
2696*38fd1498Szrj 		   gen_reload, no one will have checked that the address is
2697*38fd1498Szrj 		   strictly valid, i.e., that all pseudos requiring hard regs
2698*38fd1498Szrj 		   have gotten them.  */
2699*38fd1498Szrj 		if (strict <= 0
2700*38fd1498Szrj 		    || (strict_memory_address_p (recog_data.operand_mode[opno],
2701*38fd1498Szrj 						 op)))
2702*38fd1498Szrj 		  win = 1;
2703*38fd1498Szrj 		break;
2704*38fd1498Szrj 
2705*38fd1498Szrj 		/* No need to check general_operand again;
2706*38fd1498Szrj 		   it was done in insn-recog.c.  Well, except that reload
2707*38fd1498Szrj 		   doesn't check the validity of its replacements, but
2708*38fd1498Szrj 		   that should only matter when there's a bug.  */
2709*38fd1498Szrj 	      case 'g':
2710*38fd1498Szrj 		/* Anything goes unless it is a REG and really has a hard reg
2711*38fd1498Szrj 		   but the hard reg is not in the class GENERAL_REGS.  */
2712*38fd1498Szrj 		if (REG_P (op))
2713*38fd1498Szrj 		  {
2714*38fd1498Szrj 		    if (strict < 0
2715*38fd1498Szrj 			|| GENERAL_REGS == ALL_REGS
2716*38fd1498Szrj 			|| (reload_in_progress
2717*38fd1498Szrj 			    && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2718*38fd1498Szrj 			|| reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2719*38fd1498Szrj 		      win = 1;
2720*38fd1498Szrj 		  }
2721*38fd1498Szrj 		else if (strict < 0 || general_operand (op, mode))
2722*38fd1498Szrj 		  win = 1;
2723*38fd1498Szrj 		break;
2724*38fd1498Szrj 
2725*38fd1498Szrj 	      default:
2726*38fd1498Szrj 		{
2727*38fd1498Szrj 		  enum constraint_num cn = lookup_constraint (p);
2728*38fd1498Szrj 		  enum reg_class cl = reg_class_for_constraint (cn);
2729*38fd1498Szrj 		  if (cl != NO_REGS)
2730*38fd1498Szrj 		    {
2731*38fd1498Szrj 		      if (strict < 0
2732*38fd1498Szrj 			  || (strict == 0
2733*38fd1498Szrj 			      && REG_P (op)
2734*38fd1498Szrj 			      && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2735*38fd1498Szrj 			  || (strict == 0 && GET_CODE (op) == SCRATCH)
2736*38fd1498Szrj 			  || (REG_P (op)
2737*38fd1498Szrj 			      && reg_fits_class_p (op, cl, offset, mode)))
2738*38fd1498Szrj 		        win = 1;
2739*38fd1498Szrj 		    }
2740*38fd1498Szrj 
2741*38fd1498Szrj 		  else if (constraint_satisfied_p (op, cn))
2742*38fd1498Szrj 		    win = 1;
2743*38fd1498Szrj 
2744*38fd1498Szrj 		  else if (insn_extra_memory_constraint (cn)
2745*38fd1498Szrj 			   /* Every memory operand can be reloaded to fit.  */
2746*38fd1498Szrj 			   && ((strict < 0 && MEM_P (op))
2747*38fd1498Szrj 			       /* Before reload, accept what reload can turn
2748*38fd1498Szrj 				  into a mem.  */
2749*38fd1498Szrj 			       || (strict < 0 && CONSTANT_P (op))
2750*38fd1498Szrj 			       /* Before reload, accept a pseudo,
2751*38fd1498Szrj 				  since LRA can turn it into a mem.  */
2752*38fd1498Szrj 			       || (strict < 0 && targetm.lra_p () && REG_P (op)
2753*38fd1498Szrj 				   && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2754*38fd1498Szrj 			       /* During reload, accept a pseudo  */
2755*38fd1498Szrj 			       || (reload_in_progress && REG_P (op)
2756*38fd1498Szrj 				   && REGNO (op) >= FIRST_PSEUDO_REGISTER)))
2757*38fd1498Szrj 		    win = 1;
2758*38fd1498Szrj 		  else if (insn_extra_address_constraint (cn)
2759*38fd1498Szrj 			   /* Every address operand can be reloaded to fit.  */
2760*38fd1498Szrj 			   && strict < 0)
2761*38fd1498Szrj 		    win = 1;
2762*38fd1498Szrj 		  /* Cater to architectures like IA-64 that define extra memory
2763*38fd1498Szrj 		     constraints without using define_memory_constraint.  */
2764*38fd1498Szrj 		  else if (reload_in_progress
2765*38fd1498Szrj 			   && REG_P (op)
2766*38fd1498Szrj 			   && REGNO (op) >= FIRST_PSEUDO_REGISTER
2767*38fd1498Szrj 			   && reg_renumber[REGNO (op)] < 0
2768*38fd1498Szrj 			   && reg_equiv_mem (REGNO (op)) != 0
2769*38fd1498Szrj 			   && constraint_satisfied_p
2770*38fd1498Szrj 			      (reg_equiv_mem (REGNO (op)), cn))
2771*38fd1498Szrj 		    win = 1;
2772*38fd1498Szrj 		  break;
2773*38fd1498Szrj 		}
2774*38fd1498Szrj 	      }
2775*38fd1498Szrj 	  while (p += len, c);
2776*38fd1498Szrj 
2777*38fd1498Szrj 	  constraints[opno] = p;
2778*38fd1498Szrj 	  /* If this operand did not win somehow,
2779*38fd1498Szrj 	     this alternative loses.  */
2780*38fd1498Szrj 	  if (! win)
2781*38fd1498Szrj 	    lose = 1;
2782*38fd1498Szrj 	}
2783*38fd1498Szrj       /* This alternative won; the operands are ok.
2784*38fd1498Szrj 	 Change whichever operands this alternative says to change.  */
2785*38fd1498Szrj       if (! lose)
2786*38fd1498Szrj 	{
2787*38fd1498Szrj 	  int opno, eopno;
2788*38fd1498Szrj 
2789*38fd1498Szrj 	  /* See if any earlyclobber operand conflicts with some other
2790*38fd1498Szrj 	     operand.  */
2791*38fd1498Szrj 
2792*38fd1498Szrj 	  if (strict > 0  && seen_earlyclobber_at >= 0)
2793*38fd1498Szrj 	    for (eopno = seen_earlyclobber_at;
2794*38fd1498Szrj 		 eopno < recog_data.n_operands;
2795*38fd1498Szrj 		 eopno++)
2796*38fd1498Szrj 	      /* Ignore earlyclobber operands now in memory,
2797*38fd1498Szrj 		 because we would often report failure when we have
2798*38fd1498Szrj 		 two memory operands, one of which was formerly a REG.  */
2799*38fd1498Szrj 	      if (earlyclobber[eopno]
2800*38fd1498Szrj 		  && REG_P (recog_data.operand[eopno]))
2801*38fd1498Szrj 		for (opno = 0; opno < recog_data.n_operands; opno++)
2802*38fd1498Szrj 		  if ((MEM_P (recog_data.operand[opno])
2803*38fd1498Szrj 		       || recog_data.operand_type[opno] != OP_OUT)
2804*38fd1498Szrj 		      && opno != eopno
2805*38fd1498Szrj 		      /* Ignore things like match_operator operands.  */
2806*38fd1498Szrj 		      && *recog_data.constraints[opno] != 0
2807*38fd1498Szrj 		      && ! (matching_operands[opno] == eopno
2808*38fd1498Szrj 			    && operands_match_p (recog_data.operand[opno],
2809*38fd1498Szrj 						 recog_data.operand[eopno]))
2810*38fd1498Szrj 		      && ! safe_from_earlyclobber (recog_data.operand[opno],
2811*38fd1498Szrj 						   recog_data.operand[eopno]))
2812*38fd1498Szrj 		    lose = 1;
2813*38fd1498Szrj 
2814*38fd1498Szrj 	  if (! lose)
2815*38fd1498Szrj 	    {
2816*38fd1498Szrj 	      while (--funny_match_index >= 0)
2817*38fd1498Szrj 		{
2818*38fd1498Szrj 		  recog_data.operand[funny_match[funny_match_index].other]
2819*38fd1498Szrj 		    = recog_data.operand[funny_match[funny_match_index].this_op];
2820*38fd1498Szrj 		}
2821*38fd1498Szrj 
2822*38fd1498Szrj 	      /* For operands without < or > constraints reject side-effects.  */
2823*38fd1498Szrj 	      if (AUTO_INC_DEC && recog_data.is_asm)
2824*38fd1498Szrj 		{
2825*38fd1498Szrj 		  for (opno = 0; opno < recog_data.n_operands; opno++)
2826*38fd1498Szrj 		    if (MEM_P (recog_data.operand[opno]))
2827*38fd1498Szrj 		      switch (GET_CODE (XEXP (recog_data.operand[opno], 0)))
2828*38fd1498Szrj 			{
2829*38fd1498Szrj 			case PRE_INC:
2830*38fd1498Szrj 			case POST_INC:
2831*38fd1498Szrj 			case PRE_DEC:
2832*38fd1498Szrj 			case POST_DEC:
2833*38fd1498Szrj 			case PRE_MODIFY:
2834*38fd1498Szrj 			case POST_MODIFY:
2835*38fd1498Szrj 			  if (strchr (recog_data.constraints[opno], '<') == NULL
2836*38fd1498Szrj 			      && strchr (recog_data.constraints[opno], '>')
2837*38fd1498Szrj 				 == NULL)
2838*38fd1498Szrj 			    return 0;
2839*38fd1498Szrj 			  break;
2840*38fd1498Szrj 			default:
2841*38fd1498Szrj 			  break;
2842*38fd1498Szrj 			}
2843*38fd1498Szrj 		}
2844*38fd1498Szrj 
2845*38fd1498Szrj 	      return 1;
2846*38fd1498Szrj 	    }
2847*38fd1498Szrj 	}
2848*38fd1498Szrj 
2849*38fd1498Szrj       which_alternative++;
2850*38fd1498Szrj     }
2851*38fd1498Szrj   while (which_alternative < recog_data.n_alternatives);
2852*38fd1498Szrj 
2853*38fd1498Szrj   which_alternative = -1;
2854*38fd1498Szrj   /* If we are about to reject this, but we are not to test strictly,
2855*38fd1498Szrj      try a very loose test.  Only return failure if it fails also.  */
2856*38fd1498Szrj   if (strict == 0)
2857*38fd1498Szrj     return constrain_operands (-1, alternatives);
2858*38fd1498Szrj   else
2859*38fd1498Szrj     return 0;
2860*38fd1498Szrj }
2861*38fd1498Szrj 
2862*38fd1498Szrj /* Return true iff OPERAND (assumed to be a REG rtx)
2863*38fd1498Szrj    is a hard reg in class CLASS when its regno is offset by OFFSET
2864*38fd1498Szrj    and changed to mode MODE.
2865*38fd1498Szrj    If REG occupies multiple hard regs, all of them must be in CLASS.  */
2866*38fd1498Szrj 
2867*38fd1498Szrj bool
reg_fits_class_p(const_rtx operand,reg_class_t cl,int offset,machine_mode mode)2868*38fd1498Szrj reg_fits_class_p (const_rtx operand, reg_class_t cl, int offset,
2869*38fd1498Szrj 		  machine_mode mode)
2870*38fd1498Szrj {
2871*38fd1498Szrj   unsigned int regno = REGNO (operand);
2872*38fd1498Szrj 
2873*38fd1498Szrj   if (cl == NO_REGS)
2874*38fd1498Szrj     return false;
2875*38fd1498Szrj 
2876*38fd1498Szrj   /* Regno must not be a pseudo register.  Offset may be negative.  */
2877*38fd1498Szrj   return (HARD_REGISTER_NUM_P (regno)
2878*38fd1498Szrj 	  && HARD_REGISTER_NUM_P (regno + offset)
2879*38fd1498Szrj 	  && in_hard_reg_set_p (reg_class_contents[(int) cl], mode,
2880*38fd1498Szrj 				regno + offset));
2881*38fd1498Szrj }
2882*38fd1498Szrj 
2883*38fd1498Szrj /* Split single instruction.  Helper function for split_all_insns and
2884*38fd1498Szrj    split_all_insns_noflow.  Return last insn in the sequence if successful,
2885*38fd1498Szrj    or NULL if unsuccessful.  */
2886*38fd1498Szrj 
2887*38fd1498Szrj static rtx_insn *
split_insn(rtx_insn * insn)2888*38fd1498Szrj split_insn (rtx_insn *insn)
2889*38fd1498Szrj {
2890*38fd1498Szrj   /* Split insns here to get max fine-grain parallelism.  */
2891*38fd1498Szrj   rtx_insn *first = PREV_INSN (insn);
2892*38fd1498Szrj   rtx_insn *last = try_split (PATTERN (insn), insn, 1);
2893*38fd1498Szrj   rtx insn_set, last_set, note;
2894*38fd1498Szrj 
2895*38fd1498Szrj   if (last == insn)
2896*38fd1498Szrj     return NULL;
2897*38fd1498Szrj 
2898*38fd1498Szrj   /* If the original instruction was a single set that was known to be
2899*38fd1498Szrj      equivalent to a constant, see if we can say the same about the last
2900*38fd1498Szrj      instruction in the split sequence.  The two instructions must set
2901*38fd1498Szrj      the same destination.  */
2902*38fd1498Szrj   insn_set = single_set (insn);
2903*38fd1498Szrj   if (insn_set)
2904*38fd1498Szrj     {
2905*38fd1498Szrj       last_set = single_set (last);
2906*38fd1498Szrj       if (last_set && rtx_equal_p (SET_DEST (last_set), SET_DEST (insn_set)))
2907*38fd1498Szrj 	{
2908*38fd1498Szrj 	  note = find_reg_equal_equiv_note (insn);
2909*38fd1498Szrj 	  if (note && CONSTANT_P (XEXP (note, 0)))
2910*38fd1498Szrj 	    set_unique_reg_note (last, REG_EQUAL, XEXP (note, 0));
2911*38fd1498Szrj 	  else if (CONSTANT_P (SET_SRC (insn_set)))
2912*38fd1498Szrj 	    set_unique_reg_note (last, REG_EQUAL,
2913*38fd1498Szrj 				 copy_rtx (SET_SRC (insn_set)));
2914*38fd1498Szrj 	}
2915*38fd1498Szrj     }
2916*38fd1498Szrj 
2917*38fd1498Szrj   /* try_split returns the NOTE that INSN became.  */
2918*38fd1498Szrj   SET_INSN_DELETED (insn);
2919*38fd1498Szrj 
2920*38fd1498Szrj   /* ??? Coddle to md files that generate subregs in post-reload
2921*38fd1498Szrj      splitters instead of computing the proper hard register.  */
2922*38fd1498Szrj   if (reload_completed && first != last)
2923*38fd1498Szrj     {
2924*38fd1498Szrj       first = NEXT_INSN (first);
2925*38fd1498Szrj       for (;;)
2926*38fd1498Szrj 	{
2927*38fd1498Szrj 	  if (INSN_P (first))
2928*38fd1498Szrj 	    cleanup_subreg_operands (first);
2929*38fd1498Szrj 	  if (first == last)
2930*38fd1498Szrj 	    break;
2931*38fd1498Szrj 	  first = NEXT_INSN (first);
2932*38fd1498Szrj 	}
2933*38fd1498Szrj     }
2934*38fd1498Szrj 
2935*38fd1498Szrj   return last;
2936*38fd1498Szrj }
2937*38fd1498Szrj 
2938*38fd1498Szrj /* Split all insns in the function.  If UPD_LIFE, update life info after.  */
2939*38fd1498Szrj 
2940*38fd1498Szrj void
split_all_insns(void)2941*38fd1498Szrj split_all_insns (void)
2942*38fd1498Szrj {
2943*38fd1498Szrj   bool changed;
2944*38fd1498Szrj   bool need_cfg_cleanup = false;
2945*38fd1498Szrj   basic_block bb;
2946*38fd1498Szrj 
2947*38fd1498Szrj   auto_sbitmap blocks (last_basic_block_for_fn (cfun));
2948*38fd1498Szrj   bitmap_clear (blocks);
2949*38fd1498Szrj   changed = false;
2950*38fd1498Szrj 
2951*38fd1498Szrj   FOR_EACH_BB_REVERSE_FN (bb, cfun)
2952*38fd1498Szrj     {
2953*38fd1498Szrj       rtx_insn *insn, *next;
2954*38fd1498Szrj       bool finish = false;
2955*38fd1498Szrj 
2956*38fd1498Szrj       rtl_profile_for_bb (bb);
2957*38fd1498Szrj       for (insn = BB_HEAD (bb); !finish ; insn = next)
2958*38fd1498Szrj 	{
2959*38fd1498Szrj 	  /* Can't use `next_real_insn' because that might go across
2960*38fd1498Szrj 	     CODE_LABELS and short-out basic blocks.  */
2961*38fd1498Szrj 	  next = NEXT_INSN (insn);
2962*38fd1498Szrj 	  finish = (insn == BB_END (bb));
2963*38fd1498Szrj 
2964*38fd1498Szrj 	  /* If INSN has a REG_EH_REGION note and we split INSN, the
2965*38fd1498Szrj 	     resulting split may not have/need REG_EH_REGION notes.
2966*38fd1498Szrj 
2967*38fd1498Szrj 	     If that happens and INSN was the last reference to the
2968*38fd1498Szrj 	     given EH region, then the EH region will become unreachable.
2969*38fd1498Szrj 	     We can not leave the unreachable blocks in the CFG as that
2970*38fd1498Szrj 	     will trigger a checking failure.
2971*38fd1498Szrj 
2972*38fd1498Szrj 	     So track if INSN has a REG_EH_REGION note.  If so and we
2973*38fd1498Szrj 	     split INSN, then trigger a CFG cleanup.  */
2974*38fd1498Szrj 	  rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2975*38fd1498Szrj 	  if (INSN_P (insn))
2976*38fd1498Szrj 	    {
2977*38fd1498Szrj 	      rtx set = single_set (insn);
2978*38fd1498Szrj 
2979*38fd1498Szrj 	      /* Don't split no-op move insns.  These should silently
2980*38fd1498Szrj 		 disappear later in final.  Splitting such insns would
2981*38fd1498Szrj 		 break the code that handles LIBCALL blocks.  */
2982*38fd1498Szrj 	      if (set && set_noop_p (set))
2983*38fd1498Szrj 		{
2984*38fd1498Szrj 		  /* Nops get in the way while scheduling, so delete them
2985*38fd1498Szrj 		     now if register allocation has already been done.  It
2986*38fd1498Szrj 		     is too risky to try to do this before register
2987*38fd1498Szrj 		     allocation, and there are unlikely to be very many
2988*38fd1498Szrj 		     nops then anyways.  */
2989*38fd1498Szrj 		  if (reload_completed)
2990*38fd1498Szrj 		      delete_insn_and_edges (insn);
2991*38fd1498Szrj 		  if (note)
2992*38fd1498Szrj 		    need_cfg_cleanup = true;
2993*38fd1498Szrj 		}
2994*38fd1498Szrj 	      else
2995*38fd1498Szrj 		{
2996*38fd1498Szrj 		  if (split_insn (insn))
2997*38fd1498Szrj 		    {
2998*38fd1498Szrj 		      bitmap_set_bit (blocks, bb->index);
2999*38fd1498Szrj 		      changed = true;
3000*38fd1498Szrj 		      if (note)
3001*38fd1498Szrj 			need_cfg_cleanup = true;
3002*38fd1498Szrj 		    }
3003*38fd1498Szrj 		}
3004*38fd1498Szrj 	    }
3005*38fd1498Szrj 	}
3006*38fd1498Szrj     }
3007*38fd1498Szrj 
3008*38fd1498Szrj   default_rtl_profile ();
3009*38fd1498Szrj   if (changed)
3010*38fd1498Szrj     {
3011*38fd1498Szrj       find_many_sub_basic_blocks (blocks);
3012*38fd1498Szrj 
3013*38fd1498Szrj       /* Splitting could drop an REG_EH_REGION if it potentially
3014*38fd1498Szrj 	 trapped in its original form, but does not in its split
3015*38fd1498Szrj 	 form.  Consider a FLOAT_TRUNCATE which splits into a memory
3016*38fd1498Szrj 	 store/load pair and -fnon-call-exceptions.  */
3017*38fd1498Szrj       if (need_cfg_cleanup)
3018*38fd1498Szrj 	cleanup_cfg (0);
3019*38fd1498Szrj     }
3020*38fd1498Szrj 
3021*38fd1498Szrj   checking_verify_flow_info ();
3022*38fd1498Szrj }
3023*38fd1498Szrj 
3024*38fd1498Szrj /* Same as split_all_insns, but do not expect CFG to be available.
3025*38fd1498Szrj    Used by machine dependent reorg passes.  */
3026*38fd1498Szrj 
3027*38fd1498Szrj unsigned int
split_all_insns_noflow(void)3028*38fd1498Szrj split_all_insns_noflow (void)
3029*38fd1498Szrj {
3030*38fd1498Szrj   rtx_insn *next, *insn;
3031*38fd1498Szrj 
3032*38fd1498Szrj   for (insn = get_insns (); insn; insn = next)
3033*38fd1498Szrj     {
3034*38fd1498Szrj       next = NEXT_INSN (insn);
3035*38fd1498Szrj       if (INSN_P (insn))
3036*38fd1498Szrj 	{
3037*38fd1498Szrj 	  /* Don't split no-op move insns.  These should silently
3038*38fd1498Szrj 	     disappear later in final.  Splitting such insns would
3039*38fd1498Szrj 	     break the code that handles LIBCALL blocks.  */
3040*38fd1498Szrj 	  rtx set = single_set (insn);
3041*38fd1498Szrj 	  if (set && set_noop_p (set))
3042*38fd1498Szrj 	    {
3043*38fd1498Szrj 	      /* Nops get in the way while scheduling, so delete them
3044*38fd1498Szrj 		 now if register allocation has already been done.  It
3045*38fd1498Szrj 		 is too risky to try to do this before register
3046*38fd1498Szrj 		 allocation, and there are unlikely to be very many
3047*38fd1498Szrj 		 nops then anyways.
3048*38fd1498Szrj 
3049*38fd1498Szrj 		 ??? Should we use delete_insn when the CFG isn't valid?  */
3050*38fd1498Szrj 	      if (reload_completed)
3051*38fd1498Szrj 		delete_insn_and_edges (insn);
3052*38fd1498Szrj 	    }
3053*38fd1498Szrj 	  else
3054*38fd1498Szrj 	    split_insn (insn);
3055*38fd1498Szrj 	}
3056*38fd1498Szrj     }
3057*38fd1498Szrj   return 0;
3058*38fd1498Szrj }
3059*38fd1498Szrj 
3060*38fd1498Szrj struct peep2_insn_data
3061*38fd1498Szrj {
3062*38fd1498Szrj   rtx_insn *insn;
3063*38fd1498Szrj   regset live_before;
3064*38fd1498Szrj };
3065*38fd1498Szrj 
3066*38fd1498Szrj static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
3067*38fd1498Szrj static int peep2_current;
3068*38fd1498Szrj 
3069*38fd1498Szrj static bool peep2_do_rebuild_jump_labels;
3070*38fd1498Szrj static bool peep2_do_cleanup_cfg;
3071*38fd1498Szrj 
3072*38fd1498Szrj /* The number of instructions available to match a peep2.  */
3073*38fd1498Szrj int peep2_current_count;
3074*38fd1498Szrj 
3075*38fd1498Szrj /* A marker indicating the last insn of the block.  The live_before regset
3076*38fd1498Szrj    for this element is correct, indicating DF_LIVE_OUT for the block.  */
3077*38fd1498Szrj #define PEEP2_EOB invalid_insn_rtx
3078*38fd1498Szrj 
3079*38fd1498Szrj /* Wrap N to fit into the peep2_insn_data buffer.  */
3080*38fd1498Szrj 
3081*38fd1498Szrj static int
peep2_buf_position(int n)3082*38fd1498Szrj peep2_buf_position (int n)
3083*38fd1498Szrj {
3084*38fd1498Szrj   if (n >= MAX_INSNS_PER_PEEP2 + 1)
3085*38fd1498Szrj     n -= MAX_INSNS_PER_PEEP2 + 1;
3086*38fd1498Szrj   return n;
3087*38fd1498Szrj }
3088*38fd1498Szrj 
3089*38fd1498Szrj /* Return the Nth non-note insn after `current', or return NULL_RTX if it
3090*38fd1498Szrj    does not exist.  Used by the recognizer to find the next insn to match
3091*38fd1498Szrj    in a multi-insn pattern.  */
3092*38fd1498Szrj 
3093*38fd1498Szrj rtx_insn *
peep2_next_insn(int n)3094*38fd1498Szrj peep2_next_insn (int n)
3095*38fd1498Szrj {
3096*38fd1498Szrj   gcc_assert (n <= peep2_current_count);
3097*38fd1498Szrj 
3098*38fd1498Szrj   n = peep2_buf_position (peep2_current + n);
3099*38fd1498Szrj 
3100*38fd1498Szrj   return peep2_insn_data[n].insn;
3101*38fd1498Szrj }
3102*38fd1498Szrj 
3103*38fd1498Szrj /* Return true if REGNO is dead before the Nth non-note insn
3104*38fd1498Szrj    after `current'.  */
3105*38fd1498Szrj 
3106*38fd1498Szrj int
peep2_regno_dead_p(int ofs,int regno)3107*38fd1498Szrj peep2_regno_dead_p (int ofs, int regno)
3108*38fd1498Szrj {
3109*38fd1498Szrj   gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
3110*38fd1498Szrj 
3111*38fd1498Szrj   ofs = peep2_buf_position (peep2_current + ofs);
3112*38fd1498Szrj 
3113*38fd1498Szrj   gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
3114*38fd1498Szrj 
3115*38fd1498Szrj   return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
3116*38fd1498Szrj }
3117*38fd1498Szrj 
3118*38fd1498Szrj /* Similarly for a REG.  */
3119*38fd1498Szrj 
3120*38fd1498Szrj int
peep2_reg_dead_p(int ofs,rtx reg)3121*38fd1498Szrj peep2_reg_dead_p (int ofs, rtx reg)
3122*38fd1498Szrj {
3123*38fd1498Szrj   gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
3124*38fd1498Szrj 
3125*38fd1498Szrj   ofs = peep2_buf_position (peep2_current + ofs);
3126*38fd1498Szrj 
3127*38fd1498Szrj   gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
3128*38fd1498Szrj 
3129*38fd1498Szrj   unsigned int end_regno = END_REGNO (reg);
3130*38fd1498Szrj   for (unsigned int regno = REGNO (reg); regno < end_regno; ++regno)
3131*38fd1498Szrj     if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno))
3132*38fd1498Szrj       return 0;
3133*38fd1498Szrj   return 1;
3134*38fd1498Szrj }
3135*38fd1498Szrj 
3136*38fd1498Szrj /* Regno offset to be used in the register search.  */
3137*38fd1498Szrj static int search_ofs;
3138*38fd1498Szrj 
3139*38fd1498Szrj /* Try to find a hard register of mode MODE, matching the register class in
3140*38fd1498Szrj    CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
3141*38fd1498Szrj    remains available until the end of LAST_INSN.  LAST_INSN may be NULL_RTX,
3142*38fd1498Szrj    in which case the only condition is that the register must be available
3143*38fd1498Szrj    before CURRENT_INSN.
3144*38fd1498Szrj    Registers that already have bits set in REG_SET will not be considered.
3145*38fd1498Szrj 
3146*38fd1498Szrj    If an appropriate register is available, it will be returned and the
3147*38fd1498Szrj    corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
3148*38fd1498Szrj    returned.  */
3149*38fd1498Szrj 
3150*38fd1498Szrj rtx
peep2_find_free_register(int from,int to,const char * class_str,machine_mode mode,HARD_REG_SET * reg_set)3151*38fd1498Szrj peep2_find_free_register (int from, int to, const char *class_str,
3152*38fd1498Szrj 			  machine_mode mode, HARD_REG_SET *reg_set)
3153*38fd1498Szrj {
3154*38fd1498Szrj   enum reg_class cl;
3155*38fd1498Szrj   HARD_REG_SET live;
3156*38fd1498Szrj   df_ref def;
3157*38fd1498Szrj   int i;
3158*38fd1498Szrj 
3159*38fd1498Szrj   gcc_assert (from < MAX_INSNS_PER_PEEP2 + 1);
3160*38fd1498Szrj   gcc_assert (to < MAX_INSNS_PER_PEEP2 + 1);
3161*38fd1498Szrj 
3162*38fd1498Szrj   from = peep2_buf_position (peep2_current + from);
3163*38fd1498Szrj   to = peep2_buf_position (peep2_current + to);
3164*38fd1498Szrj 
3165*38fd1498Szrj   gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3166*38fd1498Szrj   REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
3167*38fd1498Szrj 
3168*38fd1498Szrj   while (from != to)
3169*38fd1498Szrj     {
3170*38fd1498Szrj       gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3171*38fd1498Szrj 
3172*38fd1498Szrj       /* Don't use registers set or clobbered by the insn.  */
3173*38fd1498Szrj       FOR_EACH_INSN_DEF (def, peep2_insn_data[from].insn)
3174*38fd1498Szrj 	SET_HARD_REG_BIT (live, DF_REF_REGNO (def));
3175*38fd1498Szrj 
3176*38fd1498Szrj       from = peep2_buf_position (from + 1);
3177*38fd1498Szrj     }
3178*38fd1498Szrj 
3179*38fd1498Szrj   cl = reg_class_for_constraint (lookup_constraint (class_str));
3180*38fd1498Szrj 
3181*38fd1498Szrj   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3182*38fd1498Szrj     {
3183*38fd1498Szrj       int raw_regno, regno, success, j;
3184*38fd1498Szrj 
3185*38fd1498Szrj       /* Distribute the free registers as much as possible.  */
3186*38fd1498Szrj       raw_regno = search_ofs + i;
3187*38fd1498Szrj       if (raw_regno >= FIRST_PSEUDO_REGISTER)
3188*38fd1498Szrj 	raw_regno -= FIRST_PSEUDO_REGISTER;
3189*38fd1498Szrj #ifdef REG_ALLOC_ORDER
3190*38fd1498Szrj       regno = reg_alloc_order[raw_regno];
3191*38fd1498Szrj #else
3192*38fd1498Szrj       regno = raw_regno;
3193*38fd1498Szrj #endif
3194*38fd1498Szrj 
3195*38fd1498Szrj       /* Can it support the mode we need?  */
3196*38fd1498Szrj       if (!targetm.hard_regno_mode_ok (regno, mode))
3197*38fd1498Szrj 	continue;
3198*38fd1498Szrj 
3199*38fd1498Szrj       success = 1;
3200*38fd1498Szrj       for (j = 0; success && j < hard_regno_nregs (regno, mode); j++)
3201*38fd1498Szrj 	{
3202*38fd1498Szrj 	  /* Don't allocate fixed registers.  */
3203*38fd1498Szrj 	  if (fixed_regs[regno + j])
3204*38fd1498Szrj 	    {
3205*38fd1498Szrj 	      success = 0;
3206*38fd1498Szrj 	      break;
3207*38fd1498Szrj 	    }
3208*38fd1498Szrj 	  /* Don't allocate global registers.  */
3209*38fd1498Szrj 	  if (global_regs[regno + j])
3210*38fd1498Szrj 	    {
3211*38fd1498Szrj 	      success = 0;
3212*38fd1498Szrj 	      break;
3213*38fd1498Szrj 	    }
3214*38fd1498Szrj 	  /* Make sure the register is of the right class.  */
3215*38fd1498Szrj 	  if (! TEST_HARD_REG_BIT (reg_class_contents[cl], regno + j))
3216*38fd1498Szrj 	    {
3217*38fd1498Szrj 	      success = 0;
3218*38fd1498Szrj 	      break;
3219*38fd1498Szrj 	    }
3220*38fd1498Szrj 	  /* And that we don't create an extra save/restore.  */
3221*38fd1498Szrj 	  if (! call_used_regs[regno + j] && ! df_regs_ever_live_p (regno + j))
3222*38fd1498Szrj 	    {
3223*38fd1498Szrj 	      success = 0;
3224*38fd1498Szrj 	      break;
3225*38fd1498Szrj 	    }
3226*38fd1498Szrj 
3227*38fd1498Szrj 	  if (! targetm.hard_regno_scratch_ok (regno + j))
3228*38fd1498Szrj 	    {
3229*38fd1498Szrj 	      success = 0;
3230*38fd1498Szrj 	      break;
3231*38fd1498Szrj 	    }
3232*38fd1498Szrj 
3233*38fd1498Szrj 	  /* And we don't clobber traceback for noreturn functions.  */
3234*38fd1498Szrj 	  if ((regno + j == FRAME_POINTER_REGNUM
3235*38fd1498Szrj 	       || regno + j == HARD_FRAME_POINTER_REGNUM)
3236*38fd1498Szrj 	      && (! reload_completed || frame_pointer_needed))
3237*38fd1498Szrj 	    {
3238*38fd1498Szrj 	      success = 0;
3239*38fd1498Szrj 	      break;
3240*38fd1498Szrj 	    }
3241*38fd1498Szrj 
3242*38fd1498Szrj 	  if (TEST_HARD_REG_BIT (*reg_set, regno + j)
3243*38fd1498Szrj 	      || TEST_HARD_REG_BIT (live, regno + j))
3244*38fd1498Szrj 	    {
3245*38fd1498Szrj 	      success = 0;
3246*38fd1498Szrj 	      break;
3247*38fd1498Szrj 	    }
3248*38fd1498Szrj 	}
3249*38fd1498Szrj 
3250*38fd1498Szrj       if (success)
3251*38fd1498Szrj 	{
3252*38fd1498Szrj 	  add_to_hard_reg_set (reg_set, mode, regno);
3253*38fd1498Szrj 
3254*38fd1498Szrj 	  /* Start the next search with the next register.  */
3255*38fd1498Szrj 	  if (++raw_regno >= FIRST_PSEUDO_REGISTER)
3256*38fd1498Szrj 	    raw_regno = 0;
3257*38fd1498Szrj 	  search_ofs = raw_regno;
3258*38fd1498Szrj 
3259*38fd1498Szrj 	  return gen_rtx_REG (mode, regno);
3260*38fd1498Szrj 	}
3261*38fd1498Szrj     }
3262*38fd1498Szrj 
3263*38fd1498Szrj   search_ofs = 0;
3264*38fd1498Szrj   return NULL_RTX;
3265*38fd1498Szrj }
3266*38fd1498Szrj 
3267*38fd1498Szrj /* Forget all currently tracked instructions, only remember current
3268*38fd1498Szrj    LIVE regset.  */
3269*38fd1498Szrj 
3270*38fd1498Szrj static void
peep2_reinit_state(regset live)3271*38fd1498Szrj peep2_reinit_state (regset live)
3272*38fd1498Szrj {
3273*38fd1498Szrj   int i;
3274*38fd1498Szrj 
3275*38fd1498Szrj   /* Indicate that all slots except the last holds invalid data.  */
3276*38fd1498Szrj   for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3277*38fd1498Szrj     peep2_insn_data[i].insn = NULL;
3278*38fd1498Szrj   peep2_current_count = 0;
3279*38fd1498Szrj 
3280*38fd1498Szrj   /* Indicate that the last slot contains live_after data.  */
3281*38fd1498Szrj   peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3282*38fd1498Szrj   peep2_current = MAX_INSNS_PER_PEEP2;
3283*38fd1498Szrj 
3284*38fd1498Szrj   COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3285*38fd1498Szrj }
3286*38fd1498Szrj 
3287*38fd1498Szrj /* While scanning basic block BB, we found a match of length MATCH_LEN,
3288*38fd1498Szrj    starting at INSN.  Perform the replacement, removing the old insns and
3289*38fd1498Szrj    replacing them with ATTEMPT.  Returns the last insn emitted, or NULL
3290*38fd1498Szrj    if the replacement is rejected.  */
3291*38fd1498Szrj 
3292*38fd1498Szrj static rtx_insn *
peep2_attempt(basic_block bb,rtx_insn * insn,int match_len,rtx_insn * attempt)3293*38fd1498Szrj peep2_attempt (basic_block bb, rtx_insn *insn, int match_len, rtx_insn *attempt)
3294*38fd1498Szrj {
3295*38fd1498Szrj   int i;
3296*38fd1498Szrj   rtx_insn *last, *before_try, *x;
3297*38fd1498Szrj   rtx eh_note, as_note;
3298*38fd1498Szrj   rtx_insn *old_insn;
3299*38fd1498Szrj   rtx_insn *new_insn;
3300*38fd1498Szrj   bool was_call = false;
3301*38fd1498Szrj 
3302*38fd1498Szrj   /* If we are splitting an RTX_FRAME_RELATED_P insn, do not allow it to
3303*38fd1498Szrj      match more than one insn, or to be split into more than one insn.  */
3304*38fd1498Szrj   old_insn = peep2_insn_data[peep2_current].insn;
3305*38fd1498Szrj   if (RTX_FRAME_RELATED_P (old_insn))
3306*38fd1498Szrj     {
3307*38fd1498Szrj       bool any_note = false;
3308*38fd1498Szrj       rtx note;
3309*38fd1498Szrj 
3310*38fd1498Szrj       if (match_len != 0)
3311*38fd1498Szrj 	return NULL;
3312*38fd1498Szrj 
3313*38fd1498Szrj       /* Look for one "active" insn.  I.e. ignore any "clobber" insns that
3314*38fd1498Szrj 	 may be in the stream for the purpose of register allocation.  */
3315*38fd1498Szrj       if (active_insn_p (attempt))
3316*38fd1498Szrj 	new_insn = attempt;
3317*38fd1498Szrj       else
3318*38fd1498Szrj 	new_insn = next_active_insn (attempt);
3319*38fd1498Szrj       if (next_active_insn (new_insn))
3320*38fd1498Szrj 	return NULL;
3321*38fd1498Szrj 
3322*38fd1498Szrj       /* We have a 1-1 replacement.  Copy over any frame-related info.  */
3323*38fd1498Szrj       RTX_FRAME_RELATED_P (new_insn) = 1;
3324*38fd1498Szrj 
3325*38fd1498Szrj       /* Allow the backend to fill in a note during the split.  */
3326*38fd1498Szrj       for (note = REG_NOTES (new_insn); note ; note = XEXP (note, 1))
3327*38fd1498Szrj 	switch (REG_NOTE_KIND (note))
3328*38fd1498Szrj 	  {
3329*38fd1498Szrj 	  case REG_FRAME_RELATED_EXPR:
3330*38fd1498Szrj 	  case REG_CFA_DEF_CFA:
3331*38fd1498Szrj 	  case REG_CFA_ADJUST_CFA:
3332*38fd1498Szrj 	  case REG_CFA_OFFSET:
3333*38fd1498Szrj 	  case REG_CFA_REGISTER:
3334*38fd1498Szrj 	  case REG_CFA_EXPRESSION:
3335*38fd1498Szrj 	  case REG_CFA_RESTORE:
3336*38fd1498Szrj 	  case REG_CFA_SET_VDRAP:
3337*38fd1498Szrj 	    any_note = true;
3338*38fd1498Szrj 	    break;
3339*38fd1498Szrj 	  default:
3340*38fd1498Szrj 	    break;
3341*38fd1498Szrj 	  }
3342*38fd1498Szrj 
3343*38fd1498Szrj       /* If the backend didn't supply a note, copy one over.  */
3344*38fd1498Szrj       if (!any_note)
3345*38fd1498Szrj         for (note = REG_NOTES (old_insn); note ; note = XEXP (note, 1))
3346*38fd1498Szrj 	  switch (REG_NOTE_KIND (note))
3347*38fd1498Szrj 	    {
3348*38fd1498Szrj 	    case REG_FRAME_RELATED_EXPR:
3349*38fd1498Szrj 	    case REG_CFA_DEF_CFA:
3350*38fd1498Szrj 	    case REG_CFA_ADJUST_CFA:
3351*38fd1498Szrj 	    case REG_CFA_OFFSET:
3352*38fd1498Szrj 	    case REG_CFA_REGISTER:
3353*38fd1498Szrj 	    case REG_CFA_EXPRESSION:
3354*38fd1498Szrj 	    case REG_CFA_RESTORE:
3355*38fd1498Szrj 	    case REG_CFA_SET_VDRAP:
3356*38fd1498Szrj 	      add_reg_note (new_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3357*38fd1498Szrj 	      any_note = true;
3358*38fd1498Szrj 	      break;
3359*38fd1498Szrj 	    default:
3360*38fd1498Szrj 	      break;
3361*38fd1498Szrj 	    }
3362*38fd1498Szrj 
3363*38fd1498Szrj       /* If there still isn't a note, make sure the unwind info sees the
3364*38fd1498Szrj 	 same expression as before the split.  */
3365*38fd1498Szrj       if (!any_note)
3366*38fd1498Szrj 	{
3367*38fd1498Szrj 	  rtx old_set, new_set;
3368*38fd1498Szrj 
3369*38fd1498Szrj 	  /* The old insn had better have been simple, or annotated.  */
3370*38fd1498Szrj 	  old_set = single_set (old_insn);
3371*38fd1498Szrj 	  gcc_assert (old_set != NULL);
3372*38fd1498Szrj 
3373*38fd1498Szrj 	  new_set = single_set (new_insn);
3374*38fd1498Szrj 	  if (!new_set || !rtx_equal_p (new_set, old_set))
3375*38fd1498Szrj 	    add_reg_note (new_insn, REG_FRAME_RELATED_EXPR, old_set);
3376*38fd1498Szrj 	}
3377*38fd1498Szrj 
3378*38fd1498Szrj       /* Copy prologue/epilogue status.  This is required in order to keep
3379*38fd1498Szrj 	 proper placement of EPILOGUE_BEG and the DW_CFA_remember_state.  */
3380*38fd1498Szrj       maybe_copy_prologue_epilogue_insn (old_insn, new_insn);
3381*38fd1498Szrj     }
3382*38fd1498Szrj 
3383*38fd1498Szrj   /* If we are splitting a CALL_INSN, look for the CALL_INSN
3384*38fd1498Szrj      in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3385*38fd1498Szrj      cfg-related call notes.  */
3386*38fd1498Szrj   for (i = 0; i <= match_len; ++i)
3387*38fd1498Szrj     {
3388*38fd1498Szrj       int j;
3389*38fd1498Szrj       rtx note;
3390*38fd1498Szrj 
3391*38fd1498Szrj       j = peep2_buf_position (peep2_current + i);
3392*38fd1498Szrj       old_insn = peep2_insn_data[j].insn;
3393*38fd1498Szrj       if (!CALL_P (old_insn))
3394*38fd1498Szrj 	continue;
3395*38fd1498Szrj       was_call = true;
3396*38fd1498Szrj 
3397*38fd1498Szrj       new_insn = attempt;
3398*38fd1498Szrj       while (new_insn != NULL_RTX)
3399*38fd1498Szrj 	{
3400*38fd1498Szrj 	  if (CALL_P (new_insn))
3401*38fd1498Szrj 	    break;
3402*38fd1498Szrj 	  new_insn = NEXT_INSN (new_insn);
3403*38fd1498Szrj 	}
3404*38fd1498Szrj 
3405*38fd1498Szrj       gcc_assert (new_insn != NULL_RTX);
3406*38fd1498Szrj 
3407*38fd1498Szrj       CALL_INSN_FUNCTION_USAGE (new_insn)
3408*38fd1498Szrj 	= CALL_INSN_FUNCTION_USAGE (old_insn);
3409*38fd1498Szrj       SIBLING_CALL_P (new_insn) = SIBLING_CALL_P (old_insn);
3410*38fd1498Szrj 
3411*38fd1498Szrj       for (note = REG_NOTES (old_insn);
3412*38fd1498Szrj 	   note;
3413*38fd1498Szrj 	   note = XEXP (note, 1))
3414*38fd1498Szrj 	switch (REG_NOTE_KIND (note))
3415*38fd1498Szrj 	  {
3416*38fd1498Szrj 	  case REG_NORETURN:
3417*38fd1498Szrj 	  case REG_SETJMP:
3418*38fd1498Szrj 	  case REG_TM:
3419*38fd1498Szrj 	  case REG_CALL_NOCF_CHECK:
3420*38fd1498Szrj 	    add_reg_note (new_insn, REG_NOTE_KIND (note),
3421*38fd1498Szrj 			  XEXP (note, 0));
3422*38fd1498Szrj 	    break;
3423*38fd1498Szrj 	  default:
3424*38fd1498Szrj 	    /* Discard all other reg notes.  */
3425*38fd1498Szrj 	    break;
3426*38fd1498Szrj 	  }
3427*38fd1498Szrj 
3428*38fd1498Szrj       /* Croak if there is another call in the sequence.  */
3429*38fd1498Szrj       while (++i <= match_len)
3430*38fd1498Szrj 	{
3431*38fd1498Szrj 	  j = peep2_buf_position (peep2_current + i);
3432*38fd1498Szrj 	  old_insn = peep2_insn_data[j].insn;
3433*38fd1498Szrj 	  gcc_assert (!CALL_P (old_insn));
3434*38fd1498Szrj 	}
3435*38fd1498Szrj       break;
3436*38fd1498Szrj     }
3437*38fd1498Szrj 
3438*38fd1498Szrj   /* If we matched any instruction that had a REG_ARGS_SIZE, then
3439*38fd1498Szrj      move those notes over to the new sequence.  */
3440*38fd1498Szrj   as_note = NULL;
3441*38fd1498Szrj   for (i = match_len; i >= 0; --i)
3442*38fd1498Szrj     {
3443*38fd1498Szrj       int j = peep2_buf_position (peep2_current + i);
3444*38fd1498Szrj       old_insn = peep2_insn_data[j].insn;
3445*38fd1498Szrj 
3446*38fd1498Szrj       as_note = find_reg_note (old_insn, REG_ARGS_SIZE, NULL);
3447*38fd1498Szrj       if (as_note)
3448*38fd1498Szrj 	break;
3449*38fd1498Szrj     }
3450*38fd1498Szrj 
3451*38fd1498Szrj   i = peep2_buf_position (peep2_current + match_len);
3452*38fd1498Szrj   eh_note = find_reg_note (peep2_insn_data[i].insn, REG_EH_REGION, NULL_RTX);
3453*38fd1498Szrj 
3454*38fd1498Szrj   /* Replace the old sequence with the new.  */
3455*38fd1498Szrj   rtx_insn *peepinsn = peep2_insn_data[i].insn;
3456*38fd1498Szrj   last = emit_insn_after_setloc (attempt,
3457*38fd1498Szrj 				 peep2_insn_data[i].insn,
3458*38fd1498Szrj 				 INSN_LOCATION (peepinsn));
3459*38fd1498Szrj   if (JUMP_P (peepinsn) && JUMP_P (last))
3460*38fd1498Szrj     CROSSING_JUMP_P (last) = CROSSING_JUMP_P (peepinsn);
3461*38fd1498Szrj   before_try = PREV_INSN (insn);
3462*38fd1498Szrj   delete_insn_chain (insn, peep2_insn_data[i].insn, false);
3463*38fd1498Szrj 
3464*38fd1498Szrj   /* Re-insert the EH_REGION notes.  */
3465*38fd1498Szrj   if (eh_note || (was_call && nonlocal_goto_handler_labels))
3466*38fd1498Szrj     {
3467*38fd1498Szrj       edge eh_edge;
3468*38fd1498Szrj       edge_iterator ei;
3469*38fd1498Szrj 
3470*38fd1498Szrj       FOR_EACH_EDGE (eh_edge, ei, bb->succs)
3471*38fd1498Szrj 	if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
3472*38fd1498Szrj 	  break;
3473*38fd1498Szrj 
3474*38fd1498Szrj       if (eh_note)
3475*38fd1498Szrj 	copy_reg_eh_region_note_backward (eh_note, last, before_try);
3476*38fd1498Szrj 
3477*38fd1498Szrj       if (eh_edge)
3478*38fd1498Szrj 	for (x = last; x != before_try; x = PREV_INSN (x))
3479*38fd1498Szrj 	  if (x != BB_END (bb)
3480*38fd1498Szrj 	      && (can_throw_internal (x)
3481*38fd1498Szrj 		  || can_nonlocal_goto (x)))
3482*38fd1498Szrj 	    {
3483*38fd1498Szrj 	      edge nfte, nehe;
3484*38fd1498Szrj 	      int flags;
3485*38fd1498Szrj 
3486*38fd1498Szrj 	      nfte = split_block (bb, x);
3487*38fd1498Szrj 	      flags = (eh_edge->flags
3488*38fd1498Szrj 		       & (EDGE_EH | EDGE_ABNORMAL));
3489*38fd1498Szrj 	      if (CALL_P (x))
3490*38fd1498Szrj 		flags |= EDGE_ABNORMAL_CALL;
3491*38fd1498Szrj 	      nehe = make_edge (nfte->src, eh_edge->dest,
3492*38fd1498Szrj 				flags);
3493*38fd1498Szrj 
3494*38fd1498Szrj 	      nehe->probability = eh_edge->probability;
3495*38fd1498Szrj 	      nfte->probability = nehe->probability.invert ();
3496*38fd1498Szrj 
3497*38fd1498Szrj 	      peep2_do_cleanup_cfg |= purge_dead_edges (nfte->dest);
3498*38fd1498Szrj 	      bb = nfte->src;
3499*38fd1498Szrj 	      eh_edge = nehe;
3500*38fd1498Szrj 	    }
3501*38fd1498Szrj 
3502*38fd1498Szrj       /* Converting possibly trapping insn to non-trapping is
3503*38fd1498Szrj 	 possible.  Zap dummy outgoing edges.  */
3504*38fd1498Szrj       peep2_do_cleanup_cfg |= purge_dead_edges (bb);
3505*38fd1498Szrj     }
3506*38fd1498Szrj 
3507*38fd1498Szrj   /* Re-insert the ARGS_SIZE notes.  */
3508*38fd1498Szrj   if (as_note)
3509*38fd1498Szrj     fixup_args_size_notes (before_try, last, get_args_size (as_note));
3510*38fd1498Szrj 
3511*38fd1498Szrj   /* If we generated a jump instruction, it won't have
3512*38fd1498Szrj      JUMP_LABEL set.  Recompute after we're done.  */
3513*38fd1498Szrj   for (x = last; x != before_try; x = PREV_INSN (x))
3514*38fd1498Szrj     if (JUMP_P (x))
3515*38fd1498Szrj       {
3516*38fd1498Szrj 	peep2_do_rebuild_jump_labels = true;
3517*38fd1498Szrj 	break;
3518*38fd1498Szrj       }
3519*38fd1498Szrj 
3520*38fd1498Szrj   return last;
3521*38fd1498Szrj }
3522*38fd1498Szrj 
3523*38fd1498Szrj /* After performing a replacement in basic block BB, fix up the life
3524*38fd1498Szrj    information in our buffer.  LAST is the last of the insns that we
3525*38fd1498Szrj    emitted as a replacement.  PREV is the insn before the start of
3526*38fd1498Szrj    the replacement.  MATCH_LEN is the number of instructions that were
3527*38fd1498Szrj    matched, and which now need to be replaced in the buffer.  */
3528*38fd1498Szrj 
3529*38fd1498Szrj static void
peep2_update_life(basic_block bb,int match_len,rtx_insn * last,rtx_insn * prev)3530*38fd1498Szrj peep2_update_life (basic_block bb, int match_len, rtx_insn *last,
3531*38fd1498Szrj 		   rtx_insn *prev)
3532*38fd1498Szrj {
3533*38fd1498Szrj   int i = peep2_buf_position (peep2_current + match_len + 1);
3534*38fd1498Szrj   rtx_insn *x;
3535*38fd1498Szrj   regset_head live;
3536*38fd1498Szrj 
3537*38fd1498Szrj   INIT_REG_SET (&live);
3538*38fd1498Szrj   COPY_REG_SET (&live, peep2_insn_data[i].live_before);
3539*38fd1498Szrj 
3540*38fd1498Szrj   gcc_assert (peep2_current_count >= match_len + 1);
3541*38fd1498Szrj   peep2_current_count -= match_len + 1;
3542*38fd1498Szrj 
3543*38fd1498Szrj   x = last;
3544*38fd1498Szrj   do
3545*38fd1498Szrj     {
3546*38fd1498Szrj       if (INSN_P (x))
3547*38fd1498Szrj 	{
3548*38fd1498Szrj 	  df_insn_rescan (x);
3549*38fd1498Szrj 	  if (peep2_current_count < MAX_INSNS_PER_PEEP2)
3550*38fd1498Szrj 	    {
3551*38fd1498Szrj 	      peep2_current_count++;
3552*38fd1498Szrj 	      if (--i < 0)
3553*38fd1498Szrj 		i = MAX_INSNS_PER_PEEP2;
3554*38fd1498Szrj 	      peep2_insn_data[i].insn = x;
3555*38fd1498Szrj 	      df_simulate_one_insn_backwards (bb, x, &live);
3556*38fd1498Szrj 	      COPY_REG_SET (peep2_insn_data[i].live_before, &live);
3557*38fd1498Szrj 	    }
3558*38fd1498Szrj 	}
3559*38fd1498Szrj       x = PREV_INSN (x);
3560*38fd1498Szrj     }
3561*38fd1498Szrj   while (x != prev);
3562*38fd1498Szrj   CLEAR_REG_SET (&live);
3563*38fd1498Szrj 
3564*38fd1498Szrj   peep2_current = i;
3565*38fd1498Szrj }
3566*38fd1498Szrj 
3567*38fd1498Szrj /* Add INSN, which is in BB, at the end of the peep2 insn buffer if possible.
3568*38fd1498Szrj    Return true if we added it, false otherwise.  The caller will try to match
3569*38fd1498Szrj    peepholes against the buffer if we return false; otherwise it will try to
3570*38fd1498Szrj    add more instructions to the buffer.  */
3571*38fd1498Szrj 
3572*38fd1498Szrj static bool
peep2_fill_buffer(basic_block bb,rtx_insn * insn,regset live)3573*38fd1498Szrj peep2_fill_buffer (basic_block bb, rtx_insn *insn, regset live)
3574*38fd1498Szrj {
3575*38fd1498Szrj   int pos;
3576*38fd1498Szrj 
3577*38fd1498Szrj   /* Once we have filled the maximum number of insns the buffer can hold,
3578*38fd1498Szrj      allow the caller to match the insns against peepholes.  We wait until
3579*38fd1498Szrj      the buffer is full in case the target has similar peepholes of different
3580*38fd1498Szrj      length; we always want to match the longest if possible.  */
3581*38fd1498Szrj   if (peep2_current_count == MAX_INSNS_PER_PEEP2)
3582*38fd1498Szrj     return false;
3583*38fd1498Szrj 
3584*38fd1498Szrj   /* If an insn has RTX_FRAME_RELATED_P set, do not allow it to be matched with
3585*38fd1498Szrj      any other pattern, lest it change the semantics of the frame info.  */
3586*38fd1498Szrj   if (RTX_FRAME_RELATED_P (insn))
3587*38fd1498Szrj     {
3588*38fd1498Szrj       /* Let the buffer drain first.  */
3589*38fd1498Szrj       if (peep2_current_count > 0)
3590*38fd1498Szrj 	return false;
3591*38fd1498Szrj       /* Now the insn will be the only thing in the buffer.  */
3592*38fd1498Szrj     }
3593*38fd1498Szrj 
3594*38fd1498Szrj   pos = peep2_buf_position (peep2_current + peep2_current_count);
3595*38fd1498Szrj   peep2_insn_data[pos].insn = insn;
3596*38fd1498Szrj   COPY_REG_SET (peep2_insn_data[pos].live_before, live);
3597*38fd1498Szrj   peep2_current_count++;
3598*38fd1498Szrj 
3599*38fd1498Szrj   df_simulate_one_insn_forwards (bb, insn, live);
3600*38fd1498Szrj   return true;
3601*38fd1498Szrj }
3602*38fd1498Szrj 
3603*38fd1498Szrj /* Perform the peephole2 optimization pass.  */
3604*38fd1498Szrj 
3605*38fd1498Szrj static void
peephole2_optimize(void)3606*38fd1498Szrj peephole2_optimize (void)
3607*38fd1498Szrj {
3608*38fd1498Szrj   rtx_insn *insn;
3609*38fd1498Szrj   bitmap live;
3610*38fd1498Szrj   int i;
3611*38fd1498Szrj   basic_block bb;
3612*38fd1498Szrj 
3613*38fd1498Szrj   peep2_do_cleanup_cfg = false;
3614*38fd1498Szrj   peep2_do_rebuild_jump_labels = false;
3615*38fd1498Szrj 
3616*38fd1498Szrj   df_set_flags (DF_LR_RUN_DCE);
3617*38fd1498Szrj   df_note_add_problem ();
3618*38fd1498Szrj   df_analyze ();
3619*38fd1498Szrj 
3620*38fd1498Szrj   /* Initialize the regsets we're going to use.  */
3621*38fd1498Szrj   for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3622*38fd1498Szrj     peep2_insn_data[i].live_before = BITMAP_ALLOC (&reg_obstack);
3623*38fd1498Szrj   search_ofs = 0;
3624*38fd1498Szrj   live = BITMAP_ALLOC (&reg_obstack);
3625*38fd1498Szrj 
3626*38fd1498Szrj   FOR_EACH_BB_REVERSE_FN (bb, cfun)
3627*38fd1498Szrj     {
3628*38fd1498Szrj       bool past_end = false;
3629*38fd1498Szrj       int pos;
3630*38fd1498Szrj 
3631*38fd1498Szrj       rtl_profile_for_bb (bb);
3632*38fd1498Szrj 
3633*38fd1498Szrj       /* Start up propagation.  */
3634*38fd1498Szrj       bitmap_copy (live, DF_LR_IN (bb));
3635*38fd1498Szrj       df_simulate_initialize_forwards (bb, live);
3636*38fd1498Szrj       peep2_reinit_state (live);
3637*38fd1498Szrj 
3638*38fd1498Szrj       insn = BB_HEAD (bb);
3639*38fd1498Szrj       for (;;)
3640*38fd1498Szrj 	{
3641*38fd1498Szrj 	  rtx_insn *attempt, *head;
3642*38fd1498Szrj 	  int match_len;
3643*38fd1498Szrj 
3644*38fd1498Szrj 	  if (!past_end && !NONDEBUG_INSN_P (insn))
3645*38fd1498Szrj 	    {
3646*38fd1498Szrj 	    next_insn:
3647*38fd1498Szrj 	      insn = NEXT_INSN (insn);
3648*38fd1498Szrj 	      if (insn == NEXT_INSN (BB_END (bb)))
3649*38fd1498Szrj 		past_end = true;
3650*38fd1498Szrj 	      continue;
3651*38fd1498Szrj 	    }
3652*38fd1498Szrj 	  if (!past_end && peep2_fill_buffer (bb, insn, live))
3653*38fd1498Szrj 	    goto next_insn;
3654*38fd1498Szrj 
3655*38fd1498Szrj 	  /* If we did not fill an empty buffer, it signals the end of the
3656*38fd1498Szrj 	     block.  */
3657*38fd1498Szrj 	  if (peep2_current_count == 0)
3658*38fd1498Szrj 	    break;
3659*38fd1498Szrj 
3660*38fd1498Szrj 	  /* The buffer filled to the current maximum, so try to match.  */
3661*38fd1498Szrj 
3662*38fd1498Szrj 	  pos = peep2_buf_position (peep2_current + peep2_current_count);
3663*38fd1498Szrj 	  peep2_insn_data[pos].insn = PEEP2_EOB;
3664*38fd1498Szrj 	  COPY_REG_SET (peep2_insn_data[pos].live_before, live);
3665*38fd1498Szrj 
3666*38fd1498Szrj 	  /* Match the peephole.  */
3667*38fd1498Szrj 	  head = peep2_insn_data[peep2_current].insn;
3668*38fd1498Szrj 	  attempt = peephole2_insns (PATTERN (head), head, &match_len);
3669*38fd1498Szrj 	  if (attempt != NULL)
3670*38fd1498Szrj 	    {
3671*38fd1498Szrj 	      rtx_insn *last = peep2_attempt (bb, head, match_len, attempt);
3672*38fd1498Szrj 	      if (last)
3673*38fd1498Szrj 		{
3674*38fd1498Szrj 		  peep2_update_life (bb, match_len, last, PREV_INSN (attempt));
3675*38fd1498Szrj 		  continue;
3676*38fd1498Szrj 		}
3677*38fd1498Szrj 	    }
3678*38fd1498Szrj 
3679*38fd1498Szrj 	  /* No match: advance the buffer by one insn.  */
3680*38fd1498Szrj 	  peep2_current = peep2_buf_position (peep2_current + 1);
3681*38fd1498Szrj 	  peep2_current_count--;
3682*38fd1498Szrj 	}
3683*38fd1498Szrj     }
3684*38fd1498Szrj 
3685*38fd1498Szrj   default_rtl_profile ();
3686*38fd1498Szrj   for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3687*38fd1498Szrj     BITMAP_FREE (peep2_insn_data[i].live_before);
3688*38fd1498Szrj   BITMAP_FREE (live);
3689*38fd1498Szrj   if (peep2_do_rebuild_jump_labels)
3690*38fd1498Szrj     rebuild_jump_labels (get_insns ());
3691*38fd1498Szrj   if (peep2_do_cleanup_cfg)
3692*38fd1498Szrj     cleanup_cfg (CLEANUP_CFG_CHANGED);
3693*38fd1498Szrj }
3694*38fd1498Szrj 
3695*38fd1498Szrj /* Common predicates for use with define_bypass.  */
3696*38fd1498Szrj 
3697*38fd1498Szrj /* Helper function for store_data_bypass_p, handle just a single SET
3698*38fd1498Szrj    IN_SET.  */
3699*38fd1498Szrj 
3700*38fd1498Szrj static bool
store_data_bypass_p_1(rtx_insn * out_insn,rtx in_set)3701*38fd1498Szrj store_data_bypass_p_1 (rtx_insn *out_insn, rtx in_set)
3702*38fd1498Szrj {
3703*38fd1498Szrj   if (!MEM_P (SET_DEST (in_set)))
3704*38fd1498Szrj     return false;
3705*38fd1498Szrj 
3706*38fd1498Szrj   rtx out_set = single_set (out_insn);
3707*38fd1498Szrj   if (out_set)
3708*38fd1498Szrj     return !reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set));
3709*38fd1498Szrj 
3710*38fd1498Szrj   rtx out_pat = PATTERN (out_insn);
3711*38fd1498Szrj   if (GET_CODE (out_pat) != PARALLEL)
3712*38fd1498Szrj     return false;
3713*38fd1498Szrj 
3714*38fd1498Szrj   for (int i = 0; i < XVECLEN (out_pat, 0); i++)
3715*38fd1498Szrj     {
3716*38fd1498Szrj       rtx out_exp = XVECEXP (out_pat, 0, i);
3717*38fd1498Szrj 
3718*38fd1498Szrj       if (GET_CODE (out_exp) == CLOBBER || GET_CODE (out_exp) == USE)
3719*38fd1498Szrj 	continue;
3720*38fd1498Szrj 
3721*38fd1498Szrj       gcc_assert (GET_CODE (out_exp) == SET);
3722*38fd1498Szrj 
3723*38fd1498Szrj       if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_set)))
3724*38fd1498Szrj 	return false;
3725*38fd1498Szrj     }
3726*38fd1498Szrj 
3727*38fd1498Szrj   return true;
3728*38fd1498Szrj }
3729*38fd1498Szrj 
3730*38fd1498Szrj /* True if the dependency between OUT_INSN and IN_INSN is on the store
3731*38fd1498Szrj    data not the address operand(s) of the store.  IN_INSN and OUT_INSN
3732*38fd1498Szrj    must be either a single_set or a PARALLEL with SETs inside.  */
3733*38fd1498Szrj 
3734*38fd1498Szrj int
store_data_bypass_p(rtx_insn * out_insn,rtx_insn * in_insn)3735*38fd1498Szrj store_data_bypass_p (rtx_insn *out_insn, rtx_insn *in_insn)
3736*38fd1498Szrj {
3737*38fd1498Szrj   rtx in_set = single_set (in_insn);
3738*38fd1498Szrj   if (in_set)
3739*38fd1498Szrj     return store_data_bypass_p_1 (out_insn, in_set);
3740*38fd1498Szrj 
3741*38fd1498Szrj   rtx in_pat = PATTERN (in_insn);
3742*38fd1498Szrj   if (GET_CODE (in_pat) != PARALLEL)
3743*38fd1498Szrj     return false;
3744*38fd1498Szrj 
3745*38fd1498Szrj   for (int i = 0; i < XVECLEN (in_pat, 0); i++)
3746*38fd1498Szrj     {
3747*38fd1498Szrj       rtx in_exp = XVECEXP (in_pat, 0, i);
3748*38fd1498Szrj 
3749*38fd1498Szrj       if (GET_CODE (in_exp) == CLOBBER || GET_CODE (in_exp) == USE)
3750*38fd1498Szrj 	continue;
3751*38fd1498Szrj 
3752*38fd1498Szrj       gcc_assert (GET_CODE (in_exp) == SET);
3753*38fd1498Szrj 
3754*38fd1498Szrj       if (!store_data_bypass_p_1 (out_insn, in_exp))
3755*38fd1498Szrj 	return false;
3756*38fd1498Szrj     }
3757*38fd1498Szrj 
3758*38fd1498Szrj   return true;
3759*38fd1498Szrj }
3760*38fd1498Szrj 
3761*38fd1498Szrj /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3762*38fd1498Szrj    condition, and not the THEN or ELSE branch.  OUT_INSN may be either a single
3763*38fd1498Szrj    or multiple set; IN_INSN should be single_set for truth, but for convenience
3764*38fd1498Szrj    of insn categorization may be any JUMP or CALL insn.  */
3765*38fd1498Szrj 
3766*38fd1498Szrj int
if_test_bypass_p(rtx_insn * out_insn,rtx_insn * in_insn)3767*38fd1498Szrj if_test_bypass_p (rtx_insn *out_insn, rtx_insn *in_insn)
3768*38fd1498Szrj {
3769*38fd1498Szrj   rtx out_set, in_set;
3770*38fd1498Szrj 
3771*38fd1498Szrj   in_set = single_set (in_insn);
3772*38fd1498Szrj   if (! in_set)
3773*38fd1498Szrj     {
3774*38fd1498Szrj       gcc_assert (JUMP_P (in_insn) || CALL_P (in_insn));
3775*38fd1498Szrj       return false;
3776*38fd1498Szrj     }
3777*38fd1498Szrj 
3778*38fd1498Szrj   if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
3779*38fd1498Szrj     return false;
3780*38fd1498Szrj   in_set = SET_SRC (in_set);
3781*38fd1498Szrj 
3782*38fd1498Szrj   out_set = single_set (out_insn);
3783*38fd1498Szrj   if (out_set)
3784*38fd1498Szrj     {
3785*38fd1498Szrj       if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3786*38fd1498Szrj 	  || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3787*38fd1498Szrj 	return false;
3788*38fd1498Szrj     }
3789*38fd1498Szrj   else
3790*38fd1498Szrj     {
3791*38fd1498Szrj       rtx out_pat;
3792*38fd1498Szrj       int i;
3793*38fd1498Szrj 
3794*38fd1498Szrj       out_pat = PATTERN (out_insn);
3795*38fd1498Szrj       gcc_assert (GET_CODE (out_pat) == PARALLEL);
3796*38fd1498Szrj 
3797*38fd1498Szrj       for (i = 0; i < XVECLEN (out_pat, 0); i++)
3798*38fd1498Szrj 	{
3799*38fd1498Szrj 	  rtx exp = XVECEXP (out_pat, 0, i);
3800*38fd1498Szrj 
3801*38fd1498Szrj 	  if (GET_CODE (exp) == CLOBBER)
3802*38fd1498Szrj 	    continue;
3803*38fd1498Szrj 
3804*38fd1498Szrj 	  gcc_assert (GET_CODE (exp) == SET);
3805*38fd1498Szrj 
3806*38fd1498Szrj 	  if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3807*38fd1498Szrj 	      || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3808*38fd1498Szrj 	    return false;
3809*38fd1498Szrj 	}
3810*38fd1498Szrj     }
3811*38fd1498Szrj 
3812*38fd1498Szrj   return true;
3813*38fd1498Szrj }
3814*38fd1498Szrj 
3815*38fd1498Szrj static unsigned int
rest_of_handle_peephole2(void)3816*38fd1498Szrj rest_of_handle_peephole2 (void)
3817*38fd1498Szrj {
3818*38fd1498Szrj   if (HAVE_peephole2)
3819*38fd1498Szrj     peephole2_optimize ();
3820*38fd1498Szrj 
3821*38fd1498Szrj   return 0;
3822*38fd1498Szrj }
3823*38fd1498Szrj 
3824*38fd1498Szrj namespace {
3825*38fd1498Szrj 
3826*38fd1498Szrj const pass_data pass_data_peephole2 =
3827*38fd1498Szrj {
3828*38fd1498Szrj   RTL_PASS, /* type */
3829*38fd1498Szrj   "peephole2", /* name */
3830*38fd1498Szrj   OPTGROUP_NONE, /* optinfo_flags */
3831*38fd1498Szrj   TV_PEEPHOLE2, /* tv_id */
3832*38fd1498Szrj   0, /* properties_required */
3833*38fd1498Szrj   0, /* properties_provided */
3834*38fd1498Szrj   0, /* properties_destroyed */
3835*38fd1498Szrj   0, /* todo_flags_start */
3836*38fd1498Szrj   TODO_df_finish, /* todo_flags_finish */
3837*38fd1498Szrj };
3838*38fd1498Szrj 
3839*38fd1498Szrj class pass_peephole2 : public rtl_opt_pass
3840*38fd1498Szrj {
3841*38fd1498Szrj public:
pass_peephole2(gcc::context * ctxt)3842*38fd1498Szrj   pass_peephole2 (gcc::context *ctxt)
3843*38fd1498Szrj     : rtl_opt_pass (pass_data_peephole2, ctxt)
3844*38fd1498Szrj   {}
3845*38fd1498Szrj 
3846*38fd1498Szrj   /* opt_pass methods: */
3847*38fd1498Szrj   /* The epiphany backend creates a second instance of this pass, so we need
3848*38fd1498Szrj      a clone method.  */
clone()3849*38fd1498Szrj   opt_pass * clone () { return new pass_peephole2 (m_ctxt); }
gate(function *)3850*38fd1498Szrj   virtual bool gate (function *) { return (optimize > 0 && flag_peephole2); }
execute(function *)3851*38fd1498Szrj   virtual unsigned int execute (function *)
3852*38fd1498Szrj     {
3853*38fd1498Szrj       return rest_of_handle_peephole2 ();
3854*38fd1498Szrj     }
3855*38fd1498Szrj 
3856*38fd1498Szrj }; // class pass_peephole2
3857*38fd1498Szrj 
3858*38fd1498Szrj } // anon namespace
3859*38fd1498Szrj 
3860*38fd1498Szrj rtl_opt_pass *
make_pass_peephole2(gcc::context * ctxt)3861*38fd1498Szrj make_pass_peephole2 (gcc::context *ctxt)
3862*38fd1498Szrj {
3863*38fd1498Szrj   return new pass_peephole2 (ctxt);
3864*38fd1498Szrj }
3865*38fd1498Szrj 
3866*38fd1498Szrj namespace {
3867*38fd1498Szrj 
3868*38fd1498Szrj const pass_data pass_data_split_all_insns =
3869*38fd1498Szrj {
3870*38fd1498Szrj   RTL_PASS, /* type */
3871*38fd1498Szrj   "split1", /* name */
3872*38fd1498Szrj   OPTGROUP_NONE, /* optinfo_flags */
3873*38fd1498Szrj   TV_NONE, /* tv_id */
3874*38fd1498Szrj   0, /* properties_required */
3875*38fd1498Szrj   PROP_rtl_split_insns, /* properties_provided */
3876*38fd1498Szrj   0, /* properties_destroyed */
3877*38fd1498Szrj   0, /* todo_flags_start */
3878*38fd1498Szrj   0, /* todo_flags_finish */
3879*38fd1498Szrj };
3880*38fd1498Szrj 
3881*38fd1498Szrj class pass_split_all_insns : public rtl_opt_pass
3882*38fd1498Szrj {
3883*38fd1498Szrj public:
pass_split_all_insns(gcc::context * ctxt)3884*38fd1498Szrj   pass_split_all_insns (gcc::context *ctxt)
3885*38fd1498Szrj     : rtl_opt_pass (pass_data_split_all_insns, ctxt)
3886*38fd1498Szrj   {}
3887*38fd1498Szrj 
3888*38fd1498Szrj   /* opt_pass methods: */
3889*38fd1498Szrj   /* The epiphany backend creates a second instance of this pass, so
3890*38fd1498Szrj      we need a clone method.  */
clone()3891*38fd1498Szrj   opt_pass * clone () { return new pass_split_all_insns (m_ctxt); }
execute(function *)3892*38fd1498Szrj   virtual unsigned int execute (function *)
3893*38fd1498Szrj     {
3894*38fd1498Szrj       split_all_insns ();
3895*38fd1498Szrj       return 0;
3896*38fd1498Szrj     }
3897*38fd1498Szrj 
3898*38fd1498Szrj }; // class pass_split_all_insns
3899*38fd1498Szrj 
3900*38fd1498Szrj } // anon namespace
3901*38fd1498Szrj 
3902*38fd1498Szrj rtl_opt_pass *
make_pass_split_all_insns(gcc::context * ctxt)3903*38fd1498Szrj make_pass_split_all_insns (gcc::context *ctxt)
3904*38fd1498Szrj {
3905*38fd1498Szrj   return new pass_split_all_insns (ctxt);
3906*38fd1498Szrj }
3907*38fd1498Szrj 
3908*38fd1498Szrj namespace {
3909*38fd1498Szrj 
3910*38fd1498Szrj const pass_data pass_data_split_after_reload =
3911*38fd1498Szrj {
3912*38fd1498Szrj   RTL_PASS, /* type */
3913*38fd1498Szrj   "split2", /* name */
3914*38fd1498Szrj   OPTGROUP_NONE, /* optinfo_flags */
3915*38fd1498Szrj   TV_NONE, /* tv_id */
3916*38fd1498Szrj   0, /* properties_required */
3917*38fd1498Szrj   0, /* properties_provided */
3918*38fd1498Szrj   0, /* properties_destroyed */
3919*38fd1498Szrj   0, /* todo_flags_start */
3920*38fd1498Szrj   0, /* todo_flags_finish */
3921*38fd1498Szrj };
3922*38fd1498Szrj 
3923*38fd1498Szrj class pass_split_after_reload : public rtl_opt_pass
3924*38fd1498Szrj {
3925*38fd1498Szrj public:
pass_split_after_reload(gcc::context * ctxt)3926*38fd1498Szrj   pass_split_after_reload (gcc::context *ctxt)
3927*38fd1498Szrj     : rtl_opt_pass (pass_data_split_after_reload, ctxt)
3928*38fd1498Szrj   {}
3929*38fd1498Szrj 
3930*38fd1498Szrj   /* opt_pass methods: */
gate(function *)3931*38fd1498Szrj   virtual bool gate (function *)
3932*38fd1498Szrj     {
3933*38fd1498Szrj       /* If optimizing, then go ahead and split insns now.  */
3934*38fd1498Szrj       if (optimize > 0)
3935*38fd1498Szrj 	return true;
3936*38fd1498Szrj 
3937*38fd1498Szrj #ifdef STACK_REGS
3938*38fd1498Szrj       return true;
3939*38fd1498Szrj #else
3940*38fd1498Szrj       return false;
3941*38fd1498Szrj #endif
3942*38fd1498Szrj     }
3943*38fd1498Szrj 
execute(function *)3944*38fd1498Szrj   virtual unsigned int execute (function *)
3945*38fd1498Szrj     {
3946*38fd1498Szrj       split_all_insns ();
3947*38fd1498Szrj       return 0;
3948*38fd1498Szrj     }
3949*38fd1498Szrj 
3950*38fd1498Szrj }; // class pass_split_after_reload
3951*38fd1498Szrj 
3952*38fd1498Szrj } // anon namespace
3953*38fd1498Szrj 
3954*38fd1498Szrj rtl_opt_pass *
make_pass_split_after_reload(gcc::context * ctxt)3955*38fd1498Szrj make_pass_split_after_reload (gcc::context *ctxt)
3956*38fd1498Szrj {
3957*38fd1498Szrj   return new pass_split_after_reload (ctxt);
3958*38fd1498Szrj }
3959*38fd1498Szrj 
3960*38fd1498Szrj namespace {
3961*38fd1498Szrj 
3962*38fd1498Szrj const pass_data pass_data_split_before_regstack =
3963*38fd1498Szrj {
3964*38fd1498Szrj   RTL_PASS, /* type */
3965*38fd1498Szrj   "split3", /* name */
3966*38fd1498Szrj   OPTGROUP_NONE, /* optinfo_flags */
3967*38fd1498Szrj   TV_NONE, /* tv_id */
3968*38fd1498Szrj   0, /* properties_required */
3969*38fd1498Szrj   0, /* properties_provided */
3970*38fd1498Szrj   0, /* properties_destroyed */
3971*38fd1498Szrj   0, /* todo_flags_start */
3972*38fd1498Szrj   0, /* todo_flags_finish */
3973*38fd1498Szrj };
3974*38fd1498Szrj 
3975*38fd1498Szrj class pass_split_before_regstack : public rtl_opt_pass
3976*38fd1498Szrj {
3977*38fd1498Szrj public:
pass_split_before_regstack(gcc::context * ctxt)3978*38fd1498Szrj   pass_split_before_regstack (gcc::context *ctxt)
3979*38fd1498Szrj     : rtl_opt_pass (pass_data_split_before_regstack, ctxt)
3980*38fd1498Szrj   {}
3981*38fd1498Szrj 
3982*38fd1498Szrj   /* opt_pass methods: */
3983*38fd1498Szrj   virtual bool gate (function *);
execute(function *)3984*38fd1498Szrj   virtual unsigned int execute (function *)
3985*38fd1498Szrj     {
3986*38fd1498Szrj       split_all_insns ();
3987*38fd1498Szrj       return 0;
3988*38fd1498Szrj     }
3989*38fd1498Szrj 
3990*38fd1498Szrj }; // class pass_split_before_regstack
3991*38fd1498Szrj 
3992*38fd1498Szrj bool
gate(function *)3993*38fd1498Szrj pass_split_before_regstack::gate (function *)
3994*38fd1498Szrj {
3995*38fd1498Szrj #if HAVE_ATTR_length && defined (STACK_REGS)
3996*38fd1498Szrj   /* If flow2 creates new instructions which need splitting
3997*38fd1498Szrj      and scheduling after reload is not done, they might not be
3998*38fd1498Szrj      split until final which doesn't allow splitting
3999*38fd1498Szrj      if HAVE_ATTR_length.  */
4000*38fd1498Szrj # ifdef INSN_SCHEDULING
4001*38fd1498Szrj   return (optimize && !flag_schedule_insns_after_reload);
4002*38fd1498Szrj # else
4003*38fd1498Szrj   return (optimize);
4004*38fd1498Szrj # endif
4005*38fd1498Szrj #else
4006*38fd1498Szrj   return 0;
4007*38fd1498Szrj #endif
4008*38fd1498Szrj }
4009*38fd1498Szrj 
4010*38fd1498Szrj } // anon namespace
4011*38fd1498Szrj 
4012*38fd1498Szrj rtl_opt_pass *
make_pass_split_before_regstack(gcc::context * ctxt)4013*38fd1498Szrj make_pass_split_before_regstack (gcc::context *ctxt)
4014*38fd1498Szrj {
4015*38fd1498Szrj   return new pass_split_before_regstack (ctxt);
4016*38fd1498Szrj }
4017*38fd1498Szrj 
4018*38fd1498Szrj static unsigned int
rest_of_handle_split_before_sched2(void)4019*38fd1498Szrj rest_of_handle_split_before_sched2 (void)
4020*38fd1498Szrj {
4021*38fd1498Szrj #ifdef INSN_SCHEDULING
4022*38fd1498Szrj   split_all_insns ();
4023*38fd1498Szrj #endif
4024*38fd1498Szrj   return 0;
4025*38fd1498Szrj }
4026*38fd1498Szrj 
4027*38fd1498Szrj namespace {
4028*38fd1498Szrj 
4029*38fd1498Szrj const pass_data pass_data_split_before_sched2 =
4030*38fd1498Szrj {
4031*38fd1498Szrj   RTL_PASS, /* type */
4032*38fd1498Szrj   "split4", /* name */
4033*38fd1498Szrj   OPTGROUP_NONE, /* optinfo_flags */
4034*38fd1498Szrj   TV_NONE, /* tv_id */
4035*38fd1498Szrj   0, /* properties_required */
4036*38fd1498Szrj   0, /* properties_provided */
4037*38fd1498Szrj   0, /* properties_destroyed */
4038*38fd1498Szrj   0, /* todo_flags_start */
4039*38fd1498Szrj   0, /* todo_flags_finish */
4040*38fd1498Szrj };
4041*38fd1498Szrj 
4042*38fd1498Szrj class pass_split_before_sched2 : public rtl_opt_pass
4043*38fd1498Szrj {
4044*38fd1498Szrj public:
pass_split_before_sched2(gcc::context * ctxt)4045*38fd1498Szrj   pass_split_before_sched2 (gcc::context *ctxt)
4046*38fd1498Szrj     : rtl_opt_pass (pass_data_split_before_sched2, ctxt)
4047*38fd1498Szrj   {}
4048*38fd1498Szrj 
4049*38fd1498Szrj   /* opt_pass methods: */
gate(function *)4050*38fd1498Szrj   virtual bool gate (function *)
4051*38fd1498Szrj     {
4052*38fd1498Szrj #ifdef INSN_SCHEDULING
4053*38fd1498Szrj       return optimize > 0 && flag_schedule_insns_after_reload;
4054*38fd1498Szrj #else
4055*38fd1498Szrj       return false;
4056*38fd1498Szrj #endif
4057*38fd1498Szrj     }
4058*38fd1498Szrj 
execute(function *)4059*38fd1498Szrj   virtual unsigned int execute (function *)
4060*38fd1498Szrj     {
4061*38fd1498Szrj       return rest_of_handle_split_before_sched2 ();
4062*38fd1498Szrj     }
4063*38fd1498Szrj 
4064*38fd1498Szrj }; // class pass_split_before_sched2
4065*38fd1498Szrj 
4066*38fd1498Szrj } // anon namespace
4067*38fd1498Szrj 
4068*38fd1498Szrj rtl_opt_pass *
make_pass_split_before_sched2(gcc::context * ctxt)4069*38fd1498Szrj make_pass_split_before_sched2 (gcc::context *ctxt)
4070*38fd1498Szrj {
4071*38fd1498Szrj   return new pass_split_before_sched2 (ctxt);
4072*38fd1498Szrj }
4073*38fd1498Szrj 
4074*38fd1498Szrj namespace {
4075*38fd1498Szrj 
4076*38fd1498Szrj const pass_data pass_data_split_for_shorten_branches =
4077*38fd1498Szrj {
4078*38fd1498Szrj   RTL_PASS, /* type */
4079*38fd1498Szrj   "split5", /* name */
4080*38fd1498Szrj   OPTGROUP_NONE, /* optinfo_flags */
4081*38fd1498Szrj   TV_NONE, /* tv_id */
4082*38fd1498Szrj   0, /* properties_required */
4083*38fd1498Szrj   0, /* properties_provided */
4084*38fd1498Szrj   0, /* properties_destroyed */
4085*38fd1498Szrj   0, /* todo_flags_start */
4086*38fd1498Szrj   0, /* todo_flags_finish */
4087*38fd1498Szrj };
4088*38fd1498Szrj 
4089*38fd1498Szrj class pass_split_for_shorten_branches : public rtl_opt_pass
4090*38fd1498Szrj {
4091*38fd1498Szrj public:
pass_split_for_shorten_branches(gcc::context * ctxt)4092*38fd1498Szrj   pass_split_for_shorten_branches (gcc::context *ctxt)
4093*38fd1498Szrj     : rtl_opt_pass (pass_data_split_for_shorten_branches, ctxt)
4094*38fd1498Szrj   {}
4095*38fd1498Szrj 
4096*38fd1498Szrj   /* opt_pass methods: */
gate(function *)4097*38fd1498Szrj   virtual bool gate (function *)
4098*38fd1498Szrj     {
4099*38fd1498Szrj       /* The placement of the splitting that we do for shorten_branches
4100*38fd1498Szrj 	 depends on whether regstack is used by the target or not.  */
4101*38fd1498Szrj #if HAVE_ATTR_length && !defined (STACK_REGS)
4102*38fd1498Szrj       return true;
4103*38fd1498Szrj #else
4104*38fd1498Szrj       return false;
4105*38fd1498Szrj #endif
4106*38fd1498Szrj     }
4107*38fd1498Szrj 
execute(function *)4108*38fd1498Szrj   virtual unsigned int execute (function *)
4109*38fd1498Szrj     {
4110*38fd1498Szrj       return split_all_insns_noflow ();
4111*38fd1498Szrj     }
4112*38fd1498Szrj 
4113*38fd1498Szrj }; // class pass_split_for_shorten_branches
4114*38fd1498Szrj 
4115*38fd1498Szrj } // anon namespace
4116*38fd1498Szrj 
4117*38fd1498Szrj rtl_opt_pass *
make_pass_split_for_shorten_branches(gcc::context * ctxt)4118*38fd1498Szrj make_pass_split_for_shorten_branches (gcc::context *ctxt)
4119*38fd1498Szrj {
4120*38fd1498Szrj   return new pass_split_for_shorten_branches (ctxt);
4121*38fd1498Szrj }
4122*38fd1498Szrj 
4123*38fd1498Szrj /* (Re)initialize the target information after a change in target.  */
4124*38fd1498Szrj 
4125*38fd1498Szrj void
recog_init()4126*38fd1498Szrj recog_init ()
4127*38fd1498Szrj {
4128*38fd1498Szrj   /* The information is zero-initialized, so we don't need to do anything
4129*38fd1498Szrj      first time round.  */
4130*38fd1498Szrj   if (!this_target_recog->x_initialized)
4131*38fd1498Szrj     {
4132*38fd1498Szrj       this_target_recog->x_initialized = true;
4133*38fd1498Szrj       return;
4134*38fd1498Szrj     }
4135*38fd1498Szrj   memset (this_target_recog->x_bool_attr_masks, 0,
4136*38fd1498Szrj 	  sizeof (this_target_recog->x_bool_attr_masks));
4137*38fd1498Szrj   for (unsigned int i = 0; i < NUM_INSN_CODES; ++i)
4138*38fd1498Szrj     if (this_target_recog->x_op_alt[i])
4139*38fd1498Szrj       {
4140*38fd1498Szrj 	free (this_target_recog->x_op_alt[i]);
4141*38fd1498Szrj 	this_target_recog->x_op_alt[i] = 0;
4142*38fd1498Szrj       }
4143*38fd1498Szrj }
4144