xref: /dragonfly/contrib/gcc-8.0/gcc/recog.c (revision 6e5c5008)
1 /* Subroutines used by or related to instruction recognition.
2    Copyright (C) 1987-2018 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "cfghooks.h"
29 #include "df.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "insn-config.h"
33 #include "regs.h"
34 #include "emit-rtl.h"
35 #include "recog.h"
36 #include "insn-attr.h"
37 #include "addresses.h"
38 #include "cfgrtl.h"
39 #include "cfgbuild.h"
40 #include "cfgcleanup.h"
41 #include "reload.h"
42 #include "tree-pass.h"
43 
44 #ifndef STACK_POP_CODE
45 #if STACK_GROWS_DOWNWARD
46 #define STACK_POP_CODE POST_INC
47 #else
48 #define STACK_POP_CODE POST_DEC
49 #endif
50 #endif
51 
52 static void validate_replace_rtx_1 (rtx *, rtx, rtx, rtx_insn *, bool);
53 static void validate_replace_src_1 (rtx *, void *);
54 static rtx_insn *split_insn (rtx_insn *);
55 
56 struct target_recog default_target_recog;
57 #if SWITCHABLE_TARGET
58 struct target_recog *this_target_recog = &default_target_recog;
59 #endif
60 
61 /* Nonzero means allow operands to be volatile.
62    This should be 0 if you are generating rtl, such as if you are calling
63    the functions in optabs.c and expmed.c (most of the time).
64    This should be 1 if all valid insns need to be recognized,
65    such as in reginfo.c and final.c and reload.c.
66 
67    init_recog and init_recog_no_volatile are responsible for setting this.  */
68 
69 int volatile_ok;
70 
71 struct recog_data_d recog_data;
72 
73 /* Contains a vector of operand_alternative structures, such that
74    operand OP of alternative A is at index A * n_operands + OP.
75    Set up by preprocess_constraints.  */
76 const operand_alternative *recog_op_alt;
77 
78 /* Used to provide recog_op_alt for asms.  */
79 static operand_alternative asm_op_alt[MAX_RECOG_OPERANDS
80 				      * MAX_RECOG_ALTERNATIVES];
81 
82 /* On return from `constrain_operands', indicate which alternative
83    was satisfied.  */
84 
85 int which_alternative;
86 
87 /* Nonzero after end of reload pass.
88    Set to 1 or 0 by toplev.c.
89    Controls the significance of (SUBREG (MEM)).  */
90 
91 int reload_completed;
92 
93 /* Nonzero after thread_prologue_and_epilogue_insns has run.  */
94 int epilogue_completed;
95 
96 /* Initialize data used by the function `recog'.
97    This must be called once in the compilation of a function
98    before any insn recognition may be done in the function.  */
99 
100 void
101 init_recog_no_volatile (void)
102 {
103   volatile_ok = 0;
104 }
105 
106 void
107 init_recog (void)
108 {
109   volatile_ok = 1;
110 }
111 
112 
113 /* Return true if labels in asm operands BODY are LABEL_REFs.  */
114 
115 static bool
116 asm_labels_ok (rtx body)
117 {
118   rtx asmop;
119   int i;
120 
121   asmop = extract_asm_operands (body);
122   if (asmop == NULL_RTX)
123     return true;
124 
125   for (i = 0; i < ASM_OPERANDS_LABEL_LENGTH (asmop); i++)
126     if (GET_CODE (ASM_OPERANDS_LABEL (asmop, i)) != LABEL_REF)
127       return false;
128 
129   return true;
130 }
131 
132 /* Check that X is an insn-body for an `asm' with operands
133    and that the operands mentioned in it are legitimate.  */
134 
135 int
136 check_asm_operands (rtx x)
137 {
138   int noperands;
139   rtx *operands;
140   const char **constraints;
141   int i;
142 
143   if (!asm_labels_ok (x))
144     return 0;
145 
146   /* Post-reload, be more strict with things.  */
147   if (reload_completed)
148     {
149       /* ??? Doh!  We've not got the wrapping insn.  Cook one up.  */
150       rtx_insn *insn = make_insn_raw (x);
151       extract_insn (insn);
152       constrain_operands (1, get_enabled_alternatives (insn));
153       return which_alternative >= 0;
154     }
155 
156   noperands = asm_noperands (x);
157   if (noperands < 0)
158     return 0;
159   if (noperands == 0)
160     return 1;
161 
162   operands = XALLOCAVEC (rtx, noperands);
163   constraints = XALLOCAVEC (const char *, noperands);
164 
165   decode_asm_operands (x, operands, NULL, constraints, NULL, NULL);
166 
167   for (i = 0; i < noperands; i++)
168     {
169       const char *c = constraints[i];
170       if (c[0] == '%')
171 	c++;
172       if (! asm_operand_ok (operands[i], c, constraints))
173 	return 0;
174     }
175 
176   return 1;
177 }
178 
179 /* Static data for the next two routines.  */
180 
181 struct change_t
182 {
183   rtx object;
184   int old_code;
185   bool unshare;
186   rtx *loc;
187   rtx old;
188 };
189 
190 static change_t *changes;
191 static int changes_allocated;
192 
193 static int num_changes = 0;
194 
195 /* Validate a proposed change to OBJECT.  LOC is the location in the rtl
196    at which NEW_RTX will be placed.  If OBJECT is zero, no validation is done,
197    the change is simply made.
198 
199    Two types of objects are supported:  If OBJECT is a MEM, memory_address_p
200    will be called with the address and mode as parameters.  If OBJECT is
201    an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
202    the change in place.
203 
204    IN_GROUP is nonzero if this is part of a group of changes that must be
205    performed as a group.  In that case, the changes will be stored.  The
206    function `apply_change_group' will validate and apply the changes.
207 
208    If IN_GROUP is zero, this is a single change.  Try to recognize the insn
209    or validate the memory reference with the change applied.  If the result
210    is not valid for the machine, suppress the change and return zero.
211    Otherwise, perform the change and return 1.  */
212 
213 static bool
214 validate_change_1 (rtx object, rtx *loc, rtx new_rtx, bool in_group, bool unshare)
215 {
216   rtx old = *loc;
217 
218   if (old == new_rtx || rtx_equal_p (old, new_rtx))
219     return 1;
220 
221   gcc_assert (in_group != 0 || num_changes == 0);
222 
223   *loc = new_rtx;
224 
225   /* Save the information describing this change.  */
226   if (num_changes >= changes_allocated)
227     {
228       if (changes_allocated == 0)
229 	/* This value allows for repeated substitutions inside complex
230 	   indexed addresses, or changes in up to 5 insns.  */
231 	changes_allocated = MAX_RECOG_OPERANDS * 5;
232       else
233 	changes_allocated *= 2;
234 
235       changes = XRESIZEVEC (change_t, changes, changes_allocated);
236     }
237 
238   changes[num_changes].object = object;
239   changes[num_changes].loc = loc;
240   changes[num_changes].old = old;
241   changes[num_changes].unshare = unshare;
242 
243   if (object && !MEM_P (object))
244     {
245       /* Set INSN_CODE to force rerecognition of insn.  Save old code in
246 	 case invalid.  */
247       changes[num_changes].old_code = INSN_CODE (object);
248       INSN_CODE (object) = -1;
249     }
250 
251   num_changes++;
252 
253   /* If we are making a group of changes, return 1.  Otherwise, validate the
254      change group we made.  */
255 
256   if (in_group)
257     return 1;
258   else
259     return apply_change_group ();
260 }
261 
262 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
263    UNSHARE to false.  */
264 
265 bool
266 validate_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
267 {
268   return validate_change_1 (object, loc, new_rtx, in_group, false);
269 }
270 
271 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
272    UNSHARE to true.  */
273 
274 bool
275 validate_unshare_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
276 {
277   return validate_change_1 (object, loc, new_rtx, in_group, true);
278 }
279 
280 
281 /* Keep X canonicalized if some changes have made it non-canonical; only
282    modifies the operands of X, not (for example) its code.  Simplifications
283    are not the job of this routine.
284 
285    Return true if anything was changed.  */
286 bool
287 canonicalize_change_group (rtx_insn *insn, rtx x)
288 {
289   if (COMMUTATIVE_P (x)
290       && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
291     {
292       /* Oops, the caller has made X no longer canonical.
293 	 Let's redo the changes in the correct order.  */
294       rtx tem = XEXP (x, 0);
295       validate_unshare_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
296       validate_unshare_change (insn, &XEXP (x, 1), tem, 1);
297       return true;
298     }
299   else
300     return false;
301 }
302 
303 
304 /* This subroutine of apply_change_group verifies whether the changes to INSN
305    were valid; i.e. whether INSN can still be recognized.
306 
307    If IN_GROUP is true clobbers which have to be added in order to
308    match the instructions will be added to the current change group.
309    Otherwise the changes will take effect immediately.  */
310 
311 int
312 insn_invalid_p (rtx_insn *insn, bool in_group)
313 {
314   rtx pat = PATTERN (insn);
315   int num_clobbers = 0;
316   /* If we are before reload and the pattern is a SET, see if we can add
317      clobbers.  */
318   int icode = recog (pat, insn,
319 		     (GET_CODE (pat) == SET
320 		      && ! reload_completed
321                       && ! reload_in_progress)
322 		     ? &num_clobbers : 0);
323   int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
324 
325 
326   /* If this is an asm and the operand aren't legal, then fail.  Likewise if
327      this is not an asm and the insn wasn't recognized.  */
328   if ((is_asm && ! check_asm_operands (PATTERN (insn)))
329       || (!is_asm && icode < 0))
330     return 1;
331 
332   /* If we have to add CLOBBERs, fail if we have to add ones that reference
333      hard registers since our callers can't know if they are live or not.
334      Otherwise, add them.  */
335   if (num_clobbers > 0)
336     {
337       rtx newpat;
338 
339       if (added_clobbers_hard_reg_p (icode))
340 	return 1;
341 
342       newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
343       XVECEXP (newpat, 0, 0) = pat;
344       add_clobbers (newpat, icode);
345       if (in_group)
346 	validate_change (insn, &PATTERN (insn), newpat, 1);
347       else
348 	PATTERN (insn) = pat = newpat;
349     }
350 
351   /* After reload, verify that all constraints are satisfied.  */
352   if (reload_completed)
353     {
354       extract_insn (insn);
355 
356       if (! constrain_operands (1, get_preferred_alternatives (insn)))
357 	return 1;
358     }
359 
360   INSN_CODE (insn) = icode;
361   return 0;
362 }
363 
364 /* Return number of changes made and not validated yet.  */
365 int
366 num_changes_pending (void)
367 {
368   return num_changes;
369 }
370 
371 /* Tentatively apply the changes numbered NUM and up.
372    Return 1 if all changes are valid, zero otherwise.  */
373 
374 int
375 verify_changes (int num)
376 {
377   int i;
378   rtx last_validated = NULL_RTX;
379 
380   /* The changes have been applied and all INSN_CODEs have been reset to force
381      rerecognition.
382 
383      The changes are valid if we aren't given an object, or if we are
384      given a MEM and it still is a valid address, or if this is in insn
385      and it is recognized.  In the latter case, if reload has completed,
386      we also require that the operands meet the constraints for
387      the insn.  */
388 
389   for (i = num; i < num_changes; i++)
390     {
391       rtx object = changes[i].object;
392 
393       /* If there is no object to test or if it is the same as the one we
394          already tested, ignore it.  */
395       if (object == 0 || object == last_validated)
396 	continue;
397 
398       if (MEM_P (object))
399 	{
400 	  if (! memory_address_addr_space_p (GET_MODE (object),
401 					     XEXP (object, 0),
402 					     MEM_ADDR_SPACE (object)))
403 	    break;
404 	}
405       else if (/* changes[i].old might be zero, e.g. when putting a
406 	       REG_FRAME_RELATED_EXPR into a previously empty list.  */
407 	       changes[i].old
408 	       && REG_P (changes[i].old)
409 	       && asm_noperands (PATTERN (object)) > 0
410 	       && REG_EXPR (changes[i].old) != NULL_TREE
411 	       && HAS_DECL_ASSEMBLER_NAME_P (REG_EXPR (changes[i].old))
412 	       && DECL_ASSEMBLER_NAME_SET_P (REG_EXPR (changes[i].old))
413 	       && DECL_REGISTER (REG_EXPR (changes[i].old)))
414 	{
415 	  /* Don't allow changes of hard register operands to inline
416 	     assemblies if they have been defined as register asm ("x").  */
417 	  break;
418 	}
419       else if (DEBUG_INSN_P (object))
420 	continue;
421       else if (insn_invalid_p (as_a <rtx_insn *> (object), true))
422 	{
423 	  rtx pat = PATTERN (object);
424 
425 	  /* Perhaps we couldn't recognize the insn because there were
426 	     extra CLOBBERs at the end.  If so, try to re-recognize
427 	     without the last CLOBBER (later iterations will cause each of
428 	     them to be eliminated, in turn).  But don't do this if we
429 	     have an ASM_OPERAND.  */
430 	  if (GET_CODE (pat) == PARALLEL
431 	      && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
432 	      && asm_noperands (PATTERN (object)) < 0)
433 	    {
434 	      rtx newpat;
435 
436 	      if (XVECLEN (pat, 0) == 2)
437 		newpat = XVECEXP (pat, 0, 0);
438 	      else
439 		{
440 		  int j;
441 
442 		  newpat
443 		    = gen_rtx_PARALLEL (VOIDmode,
444 					rtvec_alloc (XVECLEN (pat, 0) - 1));
445 		  for (j = 0; j < XVECLEN (newpat, 0); j++)
446 		    XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
447 		}
448 
449 	      /* Add a new change to this group to replace the pattern
450 		 with this new pattern.  Then consider this change
451 		 as having succeeded.  The change we added will
452 		 cause the entire call to fail if things remain invalid.
453 
454 		 Note that this can lose if a later change than the one
455 		 we are processing specified &XVECEXP (PATTERN (object), 0, X)
456 		 but this shouldn't occur.  */
457 
458 	      validate_change (object, &PATTERN (object), newpat, 1);
459 	      continue;
460 	    }
461 	  else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
462 		   || GET_CODE (pat) == VAR_LOCATION)
463 	    /* If this insn is a CLOBBER or USE, it is always valid, but is
464 	       never recognized.  */
465 	    continue;
466 	  else
467 	    break;
468 	}
469       last_validated = object;
470     }
471 
472   return (i == num_changes);
473 }
474 
475 /* A group of changes has previously been issued with validate_change
476    and verified with verify_changes.  Call df_insn_rescan for each of
477    the insn changed and clear num_changes.  */
478 
479 void
480 confirm_change_group (void)
481 {
482   int i;
483   rtx last_object = NULL;
484 
485   for (i = 0; i < num_changes; i++)
486     {
487       rtx object = changes[i].object;
488 
489       if (changes[i].unshare)
490 	*changes[i].loc = copy_rtx (*changes[i].loc);
491 
492       /* Avoid unnecessary rescanning when multiple changes to same instruction
493          are made.  */
494       if (object)
495 	{
496 	  if (object != last_object && last_object && INSN_P (last_object))
497 	    df_insn_rescan (as_a <rtx_insn *> (last_object));
498 	  last_object = object;
499 	}
500     }
501 
502   if (last_object && INSN_P (last_object))
503     df_insn_rescan (as_a <rtx_insn *> (last_object));
504   num_changes = 0;
505 }
506 
507 /* Apply a group of changes previously issued with `validate_change'.
508    If all changes are valid, call confirm_change_group and return 1,
509    otherwise, call cancel_changes and return 0.  */
510 
511 int
512 apply_change_group (void)
513 {
514   if (verify_changes (0))
515     {
516       confirm_change_group ();
517       return 1;
518     }
519   else
520     {
521       cancel_changes (0);
522       return 0;
523     }
524 }
525 
526 
527 /* Return the number of changes so far in the current group.  */
528 
529 int
530 num_validated_changes (void)
531 {
532   return num_changes;
533 }
534 
535 /* Retract the changes numbered NUM and up.  */
536 
537 void
538 cancel_changes (int num)
539 {
540   int i;
541 
542   /* Back out all the changes.  Do this in the opposite order in which
543      they were made.  */
544   for (i = num_changes - 1; i >= num; i--)
545     {
546       *changes[i].loc = changes[i].old;
547       if (changes[i].object && !MEM_P (changes[i].object))
548 	INSN_CODE (changes[i].object) = changes[i].old_code;
549     }
550   num_changes = num;
551 }
552 
553 /* Reduce conditional compilation elsewhere.  */
554 /* A subroutine of validate_replace_rtx_1 that tries to simplify the resulting
555    rtx.  */
556 
557 static void
558 simplify_while_replacing (rtx *loc, rtx to, rtx_insn *object,
559                           machine_mode op0_mode)
560 {
561   rtx x = *loc;
562   enum rtx_code code = GET_CODE (x);
563   rtx new_rtx = NULL_RTX;
564   scalar_int_mode is_mode;
565 
566   if (SWAPPABLE_OPERANDS_P (x)
567       && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
568     {
569       validate_unshare_change (object, loc,
570 			       gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x) ? code
571 					       : swap_condition (code),
572 					       GET_MODE (x), XEXP (x, 1),
573 					       XEXP (x, 0)), 1);
574       x = *loc;
575       code = GET_CODE (x);
576     }
577 
578   /* Canonicalize arithmetics with all constant operands.  */
579   switch (GET_RTX_CLASS (code))
580     {
581     case RTX_UNARY:
582       if (CONSTANT_P (XEXP (x, 0)))
583 	new_rtx = simplify_unary_operation (code, GET_MODE (x), XEXP (x, 0),
584 					    op0_mode);
585       break;
586     case RTX_COMM_ARITH:
587     case RTX_BIN_ARITH:
588       if (CONSTANT_P (XEXP (x, 0)) && CONSTANT_P (XEXP (x, 1)))
589 	new_rtx = simplify_binary_operation (code, GET_MODE (x), XEXP (x, 0),
590 					     XEXP (x, 1));
591       break;
592     case RTX_COMPARE:
593     case RTX_COMM_COMPARE:
594       if (CONSTANT_P (XEXP (x, 0)) && CONSTANT_P (XEXP (x, 1)))
595 	new_rtx = simplify_relational_operation (code, GET_MODE (x), op0_mode,
596 						 XEXP (x, 0), XEXP (x, 1));
597       break;
598     default:
599       break;
600     }
601   if (new_rtx)
602     {
603       validate_change (object, loc, new_rtx, 1);
604       return;
605     }
606 
607   switch (code)
608     {
609     case PLUS:
610       /* If we have a PLUS whose second operand is now a CONST_INT, use
611          simplify_gen_binary to try to simplify it.
612          ??? We may want later to remove this, once simplification is
613          separated from this function.  */
614       if (CONST_INT_P (XEXP (x, 1)) && XEXP (x, 1) == to)
615 	validate_change (object, loc,
616 			 simplify_gen_binary
617 			 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
618       break;
619     case MINUS:
620       if (CONST_SCALAR_INT_P (XEXP (x, 1)))
621 	validate_change (object, loc,
622 			 simplify_gen_binary
623 			 (PLUS, GET_MODE (x), XEXP (x, 0),
624 			  simplify_gen_unary (NEG,
625 					      GET_MODE (x), XEXP (x, 1),
626 					      GET_MODE (x))), 1);
627       break;
628     case ZERO_EXTEND:
629     case SIGN_EXTEND:
630       if (GET_MODE (XEXP (x, 0)) == VOIDmode)
631 	{
632 	  new_rtx = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
633 				    op0_mode);
634 	  /* If any of the above failed, substitute in something that
635 	     we know won't be recognized.  */
636 	  if (!new_rtx)
637 	    new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
638 	  validate_change (object, loc, new_rtx, 1);
639 	}
640       break;
641     case SUBREG:
642       /* All subregs possible to simplify should be simplified.  */
643       new_rtx = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
644 			     SUBREG_BYTE (x));
645 
646       /* Subregs of VOIDmode operands are incorrect.  */
647       if (!new_rtx && GET_MODE (SUBREG_REG (x)) == VOIDmode)
648 	new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
649       if (new_rtx)
650 	validate_change (object, loc, new_rtx, 1);
651       break;
652     case ZERO_EXTRACT:
653     case SIGN_EXTRACT:
654       /* If we are replacing a register with memory, try to change the memory
655          to be the mode required for memory in extract operations (this isn't
656          likely to be an insertion operation; if it was, nothing bad will
657          happen, we might just fail in some cases).  */
658 
659       if (MEM_P (XEXP (x, 0))
660 	  && is_a <scalar_int_mode> (GET_MODE (XEXP (x, 0)), &is_mode)
661 	  && CONST_INT_P (XEXP (x, 1))
662 	  && CONST_INT_P (XEXP (x, 2))
663 	  && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0),
664 					MEM_ADDR_SPACE (XEXP (x, 0)))
665 	  && !MEM_VOLATILE_P (XEXP (x, 0)))
666 	{
667 	  int pos = INTVAL (XEXP (x, 2));
668 	  machine_mode new_mode = is_mode;
669 	  if (GET_CODE (x) == ZERO_EXTRACT && targetm.have_extzv ())
670 	    new_mode = insn_data[targetm.code_for_extzv].operand[1].mode;
671 	  else if (GET_CODE (x) == SIGN_EXTRACT && targetm.have_extv ())
672 	    new_mode = insn_data[targetm.code_for_extv].operand[1].mode;
673 	  scalar_int_mode wanted_mode = (new_mode == VOIDmode
674 					 ? word_mode
675 					 : as_a <scalar_int_mode> (new_mode));
676 
677 	  /* If we have a narrower mode, we can do something.  */
678 	  if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
679 	    {
680 	      int offset = pos / BITS_PER_UNIT;
681 	      rtx newmem;
682 
683 	      /* If the bytes and bits are counted differently, we
684 	         must adjust the offset.  */
685 	      if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
686 		offset =
687 		  (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
688 		   offset);
689 
690 	      gcc_assert (GET_MODE_PRECISION (wanted_mode)
691 			  == GET_MODE_BITSIZE (wanted_mode));
692 	      pos %= GET_MODE_BITSIZE (wanted_mode);
693 
694 	      newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
695 
696 	      validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
697 	      validate_change (object, &XEXP (x, 0), newmem, 1);
698 	    }
699 	}
700 
701       break;
702 
703     default:
704       break;
705     }
706 }
707 
708 /* Replace every occurrence of FROM in X with TO.  Mark each change with
709    validate_change passing OBJECT.  */
710 
711 static void
712 validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx_insn *object,
713                         bool simplify)
714 {
715   int i, j;
716   const char *fmt;
717   rtx x = *loc;
718   enum rtx_code code;
719   machine_mode op0_mode = VOIDmode;
720   int prev_changes = num_changes;
721 
722   if (!x)
723     return;
724 
725   code = GET_CODE (x);
726   fmt = GET_RTX_FORMAT (code);
727   if (fmt[0] == 'e')
728     op0_mode = GET_MODE (XEXP (x, 0));
729 
730   /* X matches FROM if it is the same rtx or they are both referring to the
731      same register in the same mode.  Avoid calling rtx_equal_p unless the
732      operands look similar.  */
733 
734   if (x == from
735       || (REG_P (x) && REG_P (from)
736 	  && GET_MODE (x) == GET_MODE (from)
737 	  && REGNO (x) == REGNO (from))
738       || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
739 	  && rtx_equal_p (x, from)))
740     {
741       validate_unshare_change (object, loc, to, 1);
742       return;
743     }
744 
745   /* Call ourself recursively to perform the replacements.
746      We must not replace inside already replaced expression, otherwise we
747      get infinite recursion for replacements like (reg X)->(subreg (reg X))
748      so we must special case shared ASM_OPERANDS.  */
749 
750   if (GET_CODE (x) == PARALLEL)
751     {
752       for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
753 	{
754 	  if (j && GET_CODE (XVECEXP (x, 0, j)) == SET
755 	      && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == ASM_OPERANDS)
756 	    {
757 	      /* Verify that operands are really shared.  */
758 	      gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, 0)))
759 			  == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
760 							      (x, 0, j))));
761 	      validate_replace_rtx_1 (&SET_DEST (XVECEXP (x, 0, j)),
762 				      from, to, object, simplify);
763 	    }
764 	  else
765 	    validate_replace_rtx_1 (&XVECEXP (x, 0, j), from, to, object,
766                                     simplify);
767 	}
768     }
769   else
770     for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
771       {
772 	if (fmt[i] == 'e')
773 	  validate_replace_rtx_1 (&XEXP (x, i), from, to, object, simplify);
774 	else if (fmt[i] == 'E')
775 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
776 	    validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object,
777                                     simplify);
778       }
779 
780   /* If we didn't substitute, there is nothing more to do.  */
781   if (num_changes == prev_changes)
782     return;
783 
784   /* ??? The regmove is no more, so is this aberration still necessary?  */
785   /* Allow substituted expression to have different mode.  This is used by
786      regmove to change mode of pseudo register.  */
787   if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
788     op0_mode = GET_MODE (XEXP (x, 0));
789 
790   /* Do changes needed to keep rtx consistent.  Don't do any other
791      simplifications, as it is not our job.  */
792   if (simplify)
793     simplify_while_replacing (loc, to, object, op0_mode);
794 }
795 
796 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
797    with TO.  After all changes have been made, validate by seeing
798    if INSN is still valid.  */
799 
800 int
801 validate_replace_rtx_subexp (rtx from, rtx to, rtx_insn *insn, rtx *loc)
802 {
803   validate_replace_rtx_1 (loc, from, to, insn, true);
804   return apply_change_group ();
805 }
806 
807 /* Try replacing every occurrence of FROM in INSN with TO.  After all
808    changes have been made, validate by seeing if INSN is still valid.  */
809 
810 int
811 validate_replace_rtx (rtx from, rtx to, rtx_insn *insn)
812 {
813   validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
814   return apply_change_group ();
815 }
816 
817 /* Try replacing every occurrence of FROM in WHERE with TO.  Assume that WHERE
818    is a part of INSN.  After all changes have been made, validate by seeing if
819    INSN is still valid.
820    validate_replace_rtx (from, to, insn) is equivalent to
821    validate_replace_rtx_part (from, to, &PATTERN (insn), insn).  */
822 
823 int
824 validate_replace_rtx_part (rtx from, rtx to, rtx *where, rtx_insn *insn)
825 {
826   validate_replace_rtx_1 (where, from, to, insn, true);
827   return apply_change_group ();
828 }
829 
830 /* Same as above, but do not simplify rtx afterwards.  */
831 int
832 validate_replace_rtx_part_nosimplify (rtx from, rtx to, rtx *where,
833 				      rtx_insn *insn)
834 {
835   validate_replace_rtx_1 (where, from, to, insn, false);
836   return apply_change_group ();
837 
838 }
839 
840 /* Try replacing every occurrence of FROM in INSN with TO.  This also
841    will replace in REG_EQUAL and REG_EQUIV notes.  */
842 
843 void
844 validate_replace_rtx_group (rtx from, rtx to, rtx_insn *insn)
845 {
846   rtx note;
847   validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
848   for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
849     if (REG_NOTE_KIND (note) == REG_EQUAL
850 	|| REG_NOTE_KIND (note) == REG_EQUIV)
851       validate_replace_rtx_1 (&XEXP (note, 0), from, to, insn, true);
852 }
853 
854 /* Function called by note_uses to replace used subexpressions.  */
855 struct validate_replace_src_data
856 {
857   rtx from;			/* Old RTX */
858   rtx to;			/* New RTX */
859   rtx_insn *insn;			/* Insn in which substitution is occurring.  */
860 };
861 
862 static void
863 validate_replace_src_1 (rtx *x, void *data)
864 {
865   struct validate_replace_src_data *d
866     = (struct validate_replace_src_data *) data;
867 
868   validate_replace_rtx_1 (x, d->from, d->to, d->insn, true);
869 }
870 
871 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
872    SET_DESTs.  */
873 
874 void
875 validate_replace_src_group (rtx from, rtx to, rtx_insn *insn)
876 {
877   struct validate_replace_src_data d;
878 
879   d.from = from;
880   d.to = to;
881   d.insn = insn;
882   note_uses (&PATTERN (insn), validate_replace_src_1, &d);
883 }
884 
885 /* Try simplify INSN.
886    Invoke simplify_rtx () on every SET_SRC and SET_DEST inside the INSN's
887    pattern and return true if something was simplified.  */
888 
889 bool
890 validate_simplify_insn (rtx_insn *insn)
891 {
892   int i;
893   rtx pat = NULL;
894   rtx newpat = NULL;
895 
896   pat = PATTERN (insn);
897 
898   if (GET_CODE (pat) == SET)
899     {
900       newpat = simplify_rtx (SET_SRC (pat));
901       if (newpat && !rtx_equal_p (SET_SRC (pat), newpat))
902 	validate_change (insn, &SET_SRC (pat), newpat, 1);
903       newpat = simplify_rtx (SET_DEST (pat));
904       if (newpat && !rtx_equal_p (SET_DEST (pat), newpat))
905 	validate_change (insn, &SET_DEST (pat), newpat, 1);
906     }
907   else if (GET_CODE (pat) == PARALLEL)
908     for (i = 0; i < XVECLEN (pat, 0); i++)
909       {
910 	rtx s = XVECEXP (pat, 0, i);
911 
912 	if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
913 	  {
914 	    newpat = simplify_rtx (SET_SRC (s));
915 	    if (newpat && !rtx_equal_p (SET_SRC (s), newpat))
916 	      validate_change (insn, &SET_SRC (s), newpat, 1);
917 	    newpat = simplify_rtx (SET_DEST (s));
918 	    if (newpat && !rtx_equal_p (SET_DEST (s), newpat))
919 	      validate_change (insn, &SET_DEST (s), newpat, 1);
920 	  }
921       }
922   return ((num_changes_pending () > 0) && (apply_change_group () > 0));
923 }
924 
925 /* Return 1 if the insn using CC0 set by INSN does not contain
926    any ordered tests applied to the condition codes.
927    EQ and NE tests do not count.  */
928 
929 int
930 next_insn_tests_no_inequality (rtx_insn *insn)
931 {
932   rtx_insn *next = next_cc0_user (insn);
933 
934   /* If there is no next insn, we have to take the conservative choice.  */
935   if (next == 0)
936     return 0;
937 
938   return (INSN_P (next)
939 	  && ! inequality_comparisons_p (PATTERN (next)));
940 }
941 
942 /* Return 1 if OP is a valid general operand for machine mode MODE.
943    This is either a register reference, a memory reference,
944    or a constant.  In the case of a memory reference, the address
945    is checked for general validity for the target machine.
946 
947    Register and memory references must have mode MODE in order to be valid,
948    but some constants have no machine mode and are valid for any mode.
949 
950    If MODE is VOIDmode, OP is checked for validity for whatever mode
951    it has.
952 
953    The main use of this function is as a predicate in match_operand
954    expressions in the machine description.  */
955 
956 int
957 general_operand (rtx op, machine_mode mode)
958 {
959   enum rtx_code code = GET_CODE (op);
960 
961   if (mode == VOIDmode)
962     mode = GET_MODE (op);
963 
964   /* Don't accept CONST_INT or anything similar
965      if the caller wants something floating.  */
966   if (GET_MODE (op) == VOIDmode && mode != VOIDmode
967       && GET_MODE_CLASS (mode) != MODE_INT
968       && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
969     return 0;
970 
971   if (CONST_INT_P (op)
972       && mode != VOIDmode
973       && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
974     return 0;
975 
976   if (CONSTANT_P (op))
977     return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
978 	     || mode == VOIDmode)
979 	    && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
980 	    && targetm.legitimate_constant_p (mode == VOIDmode
981 					      ? GET_MODE (op)
982 					      : mode, op));
983 
984   /* Except for certain constants with VOIDmode, already checked for,
985      OP's mode must match MODE if MODE specifies a mode.  */
986 
987   if (GET_MODE (op) != mode)
988     return 0;
989 
990   if (code == SUBREG)
991     {
992       rtx sub = SUBREG_REG (op);
993 
994 #ifdef INSN_SCHEDULING
995       /* On machines that have insn scheduling, we want all memory
996 	 reference to be explicit, so outlaw paradoxical SUBREGs.
997 	 However, we must allow them after reload so that they can
998 	 get cleaned up by cleanup_subreg_operands.  */
999       if (!reload_completed && MEM_P (sub)
1000 	  && paradoxical_subreg_p (op))
1001 	return 0;
1002 #endif
1003       /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
1004          may result in incorrect reference.  We should simplify all valid
1005          subregs of MEM anyway.  But allow this after reload because we
1006 	 might be called from cleanup_subreg_operands.
1007 
1008 	 ??? This is a kludge.  */
1009       if (!reload_completed
1010 	  && maybe_ne (SUBREG_BYTE (op), 0)
1011 	  && MEM_P (sub))
1012 	return 0;
1013 
1014       if (REG_P (sub)
1015 	  && REGNO (sub) < FIRST_PSEUDO_REGISTER
1016 	  && !REG_CAN_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode)
1017 	  && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
1018 	  && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT
1019 	  /* LRA can generate some invalid SUBREGS just for matched
1020 	     operand reload presentation.  LRA needs to treat them as
1021 	     valid.  */
1022 	  && ! LRA_SUBREG_P (op))
1023 	return 0;
1024 
1025       /* FLOAT_MODE subregs can't be paradoxical.  Combine will occasionally
1026 	 create such rtl, and we must reject it.  */
1027       if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
1028 	  /* LRA can use subreg to store a floating point value in an
1029 	     integer mode.  Although the floating point and the
1030 	     integer modes need the same number of hard registers, the
1031 	     size of floating point mode can be less than the integer
1032 	     mode.  */
1033 	  && ! lra_in_progress
1034 	  && paradoxical_subreg_p (op))
1035 	return 0;
1036 
1037       op = sub;
1038       code = GET_CODE (op);
1039     }
1040 
1041   if (code == REG)
1042     return (REGNO (op) >= FIRST_PSEUDO_REGISTER
1043 	    || in_hard_reg_set_p (operand_reg_set, GET_MODE (op), REGNO (op)));
1044 
1045   if (code == MEM)
1046     {
1047       rtx y = XEXP (op, 0);
1048 
1049       if (! volatile_ok && MEM_VOLATILE_P (op))
1050 	return 0;
1051 
1052       /* Use the mem's mode, since it will be reloaded thus.  LRA can
1053 	 generate move insn with invalid addresses which is made valid
1054 	 and efficiently calculated by LRA through further numerous
1055 	 transformations.  */
1056       if (lra_in_progress
1057 	  || memory_address_addr_space_p (GET_MODE (op), y, MEM_ADDR_SPACE (op)))
1058 	return 1;
1059     }
1060 
1061   return 0;
1062 }
1063 
1064 /* Return 1 if OP is a valid memory address for a memory reference
1065    of mode MODE.
1066 
1067    The main use of this function is as a predicate in match_operand
1068    expressions in the machine description.  */
1069 
1070 int
1071 address_operand (rtx op, machine_mode mode)
1072 {
1073   return memory_address_p (mode, op);
1074 }
1075 
1076 /* Return 1 if OP is a register reference of mode MODE.
1077    If MODE is VOIDmode, accept a register in any mode.
1078 
1079    The main use of this function is as a predicate in match_operand
1080    expressions in the machine description.  */
1081 
1082 int
1083 register_operand (rtx op, machine_mode mode)
1084 {
1085   if (GET_CODE (op) == SUBREG)
1086     {
1087       rtx sub = SUBREG_REG (op);
1088 
1089       /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1090 	 because it is guaranteed to be reloaded into one.
1091 	 Just make sure the MEM is valid in itself.
1092 	 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1093 	 but currently it does result from (SUBREG (REG)...) where the
1094 	 reg went on the stack.)  */
1095       if (!REG_P (sub) && (reload_completed || !MEM_P (sub)))
1096 	return 0;
1097     }
1098   else if (!REG_P (op))
1099     return 0;
1100   return general_operand (op, mode);
1101 }
1102 
1103 /* Return 1 for a register in Pmode; ignore the tested mode.  */
1104 
1105 int
1106 pmode_register_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED)
1107 {
1108   return register_operand (op, Pmode);
1109 }
1110 
1111 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1112    or a hard register.  */
1113 
1114 int
1115 scratch_operand (rtx op, machine_mode mode)
1116 {
1117   if (GET_MODE (op) != mode && mode != VOIDmode)
1118     return 0;
1119 
1120   return (GET_CODE (op) == SCRATCH
1121 	  || (REG_P (op)
1122 	      && (lra_in_progress
1123 		  || (REGNO (op) < FIRST_PSEUDO_REGISTER
1124 		      && REGNO_REG_CLASS (REGNO (op)) != NO_REGS))));
1125 }
1126 
1127 /* Return 1 if OP is a valid immediate operand for mode MODE.
1128 
1129    The main use of this function is as a predicate in match_operand
1130    expressions in the machine description.  */
1131 
1132 int
1133 immediate_operand (rtx op, machine_mode mode)
1134 {
1135   /* Don't accept CONST_INT or anything similar
1136      if the caller wants something floating.  */
1137   if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1138       && GET_MODE_CLASS (mode) != MODE_INT
1139       && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1140     return 0;
1141 
1142   if (CONST_INT_P (op)
1143       && mode != VOIDmode
1144       && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1145     return 0;
1146 
1147   return (CONSTANT_P (op)
1148 	  && (GET_MODE (op) == mode || mode == VOIDmode
1149 	      || GET_MODE (op) == VOIDmode)
1150 	  && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1151 	  && targetm.legitimate_constant_p (mode == VOIDmode
1152 					    ? GET_MODE (op)
1153 					    : mode, op));
1154 }
1155 
1156 /* Returns 1 if OP is an operand that is a CONST_INT of mode MODE.  */
1157 
1158 int
1159 const_int_operand (rtx op, machine_mode mode)
1160 {
1161   if (!CONST_INT_P (op))
1162     return 0;
1163 
1164   if (mode != VOIDmode
1165       && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1166     return 0;
1167 
1168   return 1;
1169 }
1170 
1171 #if TARGET_SUPPORTS_WIDE_INT
1172 /* Returns 1 if OP is an operand that is a CONST_INT or CONST_WIDE_INT
1173    of mode MODE.  */
1174 int
1175 const_scalar_int_operand (rtx op, machine_mode mode)
1176 {
1177   if (!CONST_SCALAR_INT_P (op))
1178     return 0;
1179 
1180   if (CONST_INT_P (op))
1181     return const_int_operand (op, mode);
1182 
1183   if (mode != VOIDmode)
1184     {
1185       scalar_int_mode int_mode = as_a <scalar_int_mode> (mode);
1186       int prec = GET_MODE_PRECISION (int_mode);
1187       int bitsize = GET_MODE_BITSIZE (int_mode);
1188 
1189       if (CONST_WIDE_INT_NUNITS (op) * HOST_BITS_PER_WIDE_INT > bitsize)
1190 	return 0;
1191 
1192       if (prec == bitsize)
1193 	return 1;
1194       else
1195 	{
1196 	  /* Multiword partial int.  */
1197 	  HOST_WIDE_INT x
1198 	    = CONST_WIDE_INT_ELT (op, CONST_WIDE_INT_NUNITS (op) - 1);
1199 	  return (sext_hwi (x, prec & (HOST_BITS_PER_WIDE_INT - 1)) == x);
1200 	}
1201     }
1202   return 1;
1203 }
1204 
1205 /* Returns 1 if OP is an operand that is a constant integer or constant
1206    floating-point number of MODE.  */
1207 
1208 int
1209 const_double_operand (rtx op, machine_mode mode)
1210 {
1211   return (GET_CODE (op) == CONST_DOUBLE)
1212 	  && (GET_MODE (op) == mode || mode == VOIDmode);
1213 }
1214 #else
1215 /* Returns 1 if OP is an operand that is a constant integer or constant
1216    floating-point number of MODE.  */
1217 
1218 int
1219 const_double_operand (rtx op, machine_mode mode)
1220 {
1221   /* Don't accept CONST_INT or anything similar
1222      if the caller wants something floating.  */
1223   if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1224       && GET_MODE_CLASS (mode) != MODE_INT
1225       && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1226     return 0;
1227 
1228   return ((CONST_DOUBLE_P (op) || CONST_INT_P (op))
1229 	  && (mode == VOIDmode || GET_MODE (op) == mode
1230 	      || GET_MODE (op) == VOIDmode));
1231 }
1232 #endif
1233 /* Return 1 if OP is a general operand that is not an immediate
1234    operand of mode MODE.  */
1235 
1236 int
1237 nonimmediate_operand (rtx op, machine_mode mode)
1238 {
1239   return (general_operand (op, mode) && ! CONSTANT_P (op));
1240 }
1241 
1242 /* Return 1 if OP is a register reference or immediate value of mode MODE.  */
1243 
1244 int
1245 nonmemory_operand (rtx op, machine_mode mode)
1246 {
1247   if (CONSTANT_P (op))
1248     return immediate_operand (op, mode);
1249   return register_operand (op, mode);
1250 }
1251 
1252 /* Return 1 if OP is a valid operand that stands for pushing a
1253    value of mode MODE onto the stack.
1254 
1255    The main use of this function is as a predicate in match_operand
1256    expressions in the machine description.  */
1257 
1258 int
1259 push_operand (rtx op, machine_mode mode)
1260 {
1261   if (!MEM_P (op))
1262     return 0;
1263 
1264   if (mode != VOIDmode && GET_MODE (op) != mode)
1265     return 0;
1266 
1267   poly_int64 rounded_size = GET_MODE_SIZE (mode);
1268 
1269 #ifdef PUSH_ROUNDING
1270   rounded_size = PUSH_ROUNDING (MACRO_INT (rounded_size));
1271 #endif
1272 
1273   op = XEXP (op, 0);
1274 
1275   if (known_eq (rounded_size, GET_MODE_SIZE (mode)))
1276     {
1277       if (GET_CODE (op) != STACK_PUSH_CODE)
1278 	return 0;
1279     }
1280   else
1281     {
1282       poly_int64 offset;
1283       if (GET_CODE (op) != PRE_MODIFY
1284 	  || GET_CODE (XEXP (op, 1)) != PLUS
1285 	  || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1286 	  || !poly_int_rtx_p (XEXP (XEXP (op, 1), 1), &offset)
1287 	  || (STACK_GROWS_DOWNWARD
1288 	      ? maybe_ne (offset, -rounded_size)
1289 	      : maybe_ne (offset, rounded_size)))
1290 	return 0;
1291     }
1292 
1293   return XEXP (op, 0) == stack_pointer_rtx;
1294 }
1295 
1296 /* Return 1 if OP is a valid operand that stands for popping a
1297    value of mode MODE off the stack.
1298 
1299    The main use of this function is as a predicate in match_operand
1300    expressions in the machine description.  */
1301 
1302 int
1303 pop_operand (rtx op, machine_mode mode)
1304 {
1305   if (!MEM_P (op))
1306     return 0;
1307 
1308   if (mode != VOIDmode && GET_MODE (op) != mode)
1309     return 0;
1310 
1311   op = XEXP (op, 0);
1312 
1313   if (GET_CODE (op) != STACK_POP_CODE)
1314     return 0;
1315 
1316   return XEXP (op, 0) == stack_pointer_rtx;
1317 }
1318 
1319 /* Return 1 if ADDR is a valid memory address
1320    for mode MODE in address space AS.  */
1321 
1322 int
1323 memory_address_addr_space_p (machine_mode mode ATTRIBUTE_UNUSED,
1324 			     rtx addr, addr_space_t as)
1325 {
1326 #ifdef GO_IF_LEGITIMATE_ADDRESS
1327   gcc_assert (ADDR_SPACE_GENERIC_P (as));
1328   GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1329   return 0;
1330 
1331  win:
1332   return 1;
1333 #else
1334   return targetm.addr_space.legitimate_address_p (mode, addr, 0, as);
1335 #endif
1336 }
1337 
1338 /* Return 1 if OP is a valid memory reference with mode MODE,
1339    including a valid address.
1340 
1341    The main use of this function is as a predicate in match_operand
1342    expressions in the machine description.  */
1343 
1344 int
1345 memory_operand (rtx op, machine_mode mode)
1346 {
1347   rtx inner;
1348 
1349   if (! reload_completed)
1350     /* Note that no SUBREG is a memory operand before end of reload pass,
1351        because (SUBREG (MEM...)) forces reloading into a register.  */
1352     return MEM_P (op) && general_operand (op, mode);
1353 
1354   if (mode != VOIDmode && GET_MODE (op) != mode)
1355     return 0;
1356 
1357   inner = op;
1358   if (GET_CODE (inner) == SUBREG)
1359     inner = SUBREG_REG (inner);
1360 
1361   return (MEM_P (inner) && general_operand (op, mode));
1362 }
1363 
1364 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1365    that is, a memory reference whose address is a general_operand.  */
1366 
1367 int
1368 indirect_operand (rtx op, machine_mode mode)
1369 {
1370   /* Before reload, a SUBREG isn't in memory (see memory_operand, above).  */
1371   if (! reload_completed
1372       && GET_CODE (op) == SUBREG && MEM_P (SUBREG_REG (op)))
1373     {
1374       if (mode != VOIDmode && GET_MODE (op) != mode)
1375 	return 0;
1376 
1377       /* The only way that we can have a general_operand as the resulting
1378 	 address is if OFFSET is zero and the address already is an operand
1379 	 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1380 	 operand.  */
1381       poly_int64 offset;
1382       rtx addr = strip_offset (XEXP (SUBREG_REG (op), 0), &offset);
1383       return (known_eq (offset + SUBREG_BYTE (op), 0)
1384 	      && general_operand (addr, Pmode));
1385     }
1386 
1387   return (MEM_P (op)
1388 	  && memory_operand (op, mode)
1389 	  && general_operand (XEXP (op, 0), Pmode));
1390 }
1391 
1392 /* Return 1 if this is an ordered comparison operator (not including
1393    ORDERED and UNORDERED).  */
1394 
1395 int
1396 ordered_comparison_operator (rtx op, machine_mode mode)
1397 {
1398   if (mode != VOIDmode && GET_MODE (op) != mode)
1399     return false;
1400   switch (GET_CODE (op))
1401     {
1402     case EQ:
1403     case NE:
1404     case LT:
1405     case LTU:
1406     case LE:
1407     case LEU:
1408     case GT:
1409     case GTU:
1410     case GE:
1411     case GEU:
1412       return true;
1413     default:
1414       return false;
1415     }
1416 }
1417 
1418 /* Return 1 if this is a comparison operator.  This allows the use of
1419    MATCH_OPERATOR to recognize all the branch insns.  */
1420 
1421 int
1422 comparison_operator (rtx op, machine_mode mode)
1423 {
1424   return ((mode == VOIDmode || GET_MODE (op) == mode)
1425 	  && COMPARISON_P (op));
1426 }
1427 
1428 /* If BODY is an insn body that uses ASM_OPERANDS, return it.  */
1429 
1430 rtx
1431 extract_asm_operands (rtx body)
1432 {
1433   rtx tmp;
1434   switch (GET_CODE (body))
1435     {
1436     case ASM_OPERANDS:
1437       return body;
1438 
1439     case SET:
1440       /* Single output operand: BODY is (set OUTPUT (asm_operands ...)).  */
1441       tmp = SET_SRC (body);
1442       if (GET_CODE (tmp) == ASM_OPERANDS)
1443 	return tmp;
1444       break;
1445 
1446     case PARALLEL:
1447       tmp = XVECEXP (body, 0, 0);
1448       if (GET_CODE (tmp) == ASM_OPERANDS)
1449 	return tmp;
1450       if (GET_CODE (tmp) == SET)
1451 	{
1452 	  tmp = SET_SRC (tmp);
1453 	  if (GET_CODE (tmp) == ASM_OPERANDS)
1454 	    return tmp;
1455 	}
1456       break;
1457 
1458     default:
1459       break;
1460     }
1461   return NULL;
1462 }
1463 
1464 /* If BODY is an insn body that uses ASM_OPERANDS,
1465    return the number of operands (both input and output) in the insn.
1466    If BODY is an insn body that uses ASM_INPUT with CLOBBERS in PARALLEL,
1467    return 0.
1468    Otherwise return -1.  */
1469 
1470 int
1471 asm_noperands (const_rtx body)
1472 {
1473   rtx asm_op = extract_asm_operands (CONST_CAST_RTX (body));
1474   int i, n_sets = 0;
1475 
1476   if (asm_op == NULL)
1477     {
1478       if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) >= 2
1479 	  && GET_CODE (XVECEXP (body, 0, 0)) == ASM_INPUT)
1480 	{
1481 	  /* body is [(asm_input ...) (clobber (reg ...))...].  */
1482 	  for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1483 	    if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1484 	      return -1;
1485 	  return 0;
1486 	}
1487       return -1;
1488     }
1489 
1490   if (GET_CODE (body) == SET)
1491     n_sets = 1;
1492   else if (GET_CODE (body) == PARALLEL)
1493     {
1494       if (GET_CODE (XVECEXP (body, 0, 0)) == SET)
1495 	{
1496 	  /* Multiple output operands, or 1 output plus some clobbers:
1497 	     body is
1498 	     [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...].  */
1499 	  /* Count backwards through CLOBBERs to determine number of SETs.  */
1500 	  for (i = XVECLEN (body, 0); i > 0; i--)
1501 	    {
1502 	      if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1503 		break;
1504 	      if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1505 		return -1;
1506 	    }
1507 
1508 	  /* N_SETS is now number of output operands.  */
1509 	  n_sets = i;
1510 
1511 	  /* Verify that all the SETs we have
1512 	     came from a single original asm_operands insn
1513 	     (so that invalid combinations are blocked).  */
1514 	  for (i = 0; i < n_sets; i++)
1515 	    {
1516 	      rtx elt = XVECEXP (body, 0, i);
1517 	      if (GET_CODE (elt) != SET)
1518 		return -1;
1519 	      if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1520 		return -1;
1521 	      /* If these ASM_OPERANDS rtx's came from different original insns
1522 	         then they aren't allowed together.  */
1523 	      if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1524 		  != ASM_OPERANDS_INPUT_VEC (asm_op))
1525 		return -1;
1526 	    }
1527 	}
1528       else
1529 	{
1530 	  /* 0 outputs, but some clobbers:
1531 	     body is [(asm_operands ...) (clobber (reg ...))...].  */
1532 	  /* Make sure all the other parallel things really are clobbers.  */
1533 	  for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1534 	    if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1535 	      return -1;
1536 	}
1537     }
1538 
1539   return (ASM_OPERANDS_INPUT_LENGTH (asm_op)
1540 	  + ASM_OPERANDS_LABEL_LENGTH (asm_op) + n_sets);
1541 }
1542 
1543 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1544    copy its operands (both input and output) into the vector OPERANDS,
1545    the locations of the operands within the insn into the vector OPERAND_LOCS,
1546    and the constraints for the operands into CONSTRAINTS.
1547    Write the modes of the operands into MODES.
1548    Write the location info into LOC.
1549    Return the assembler-template.
1550    If BODY is an insn body that uses ASM_INPUT with CLOBBERS in PARALLEL,
1551    return the basic assembly string.
1552 
1553    If LOC, MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1554    we don't store that info.  */
1555 
1556 const char *
1557 decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs,
1558 		     const char **constraints, machine_mode *modes,
1559 		     location_t *loc)
1560 {
1561   int nbase = 0, n, i;
1562   rtx asmop;
1563 
1564   switch (GET_CODE (body))
1565     {
1566     case ASM_OPERANDS:
1567       /* Zero output asm: BODY is (asm_operands ...).  */
1568       asmop = body;
1569       break;
1570 
1571     case SET:
1572       /* Single output asm: BODY is (set OUTPUT (asm_operands ...)).  */
1573       asmop = SET_SRC (body);
1574 
1575       /* The output is in the SET.
1576 	 Its constraint is in the ASM_OPERANDS itself.  */
1577       if (operands)
1578 	operands[0] = SET_DEST (body);
1579       if (operand_locs)
1580 	operand_locs[0] = &SET_DEST (body);
1581       if (constraints)
1582 	constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1583       if (modes)
1584 	modes[0] = GET_MODE (SET_DEST (body));
1585       nbase = 1;
1586       break;
1587 
1588     case PARALLEL:
1589       {
1590 	int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs.  */
1591 
1592 	asmop = XVECEXP (body, 0, 0);
1593 	if (GET_CODE (asmop) == SET)
1594 	  {
1595 	    asmop = SET_SRC (asmop);
1596 
1597 	    /* At least one output, plus some CLOBBERs.  The outputs are in
1598 	       the SETs.  Their constraints are in the ASM_OPERANDS itself.  */
1599 	    for (i = 0; i < nparallel; i++)
1600 	      {
1601 		if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1602 		  break;		/* Past last SET */
1603 		if (operands)
1604 		  operands[i] = SET_DEST (XVECEXP (body, 0, i));
1605 		if (operand_locs)
1606 		  operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1607 		if (constraints)
1608 		  constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1609 		if (modes)
1610 		  modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1611 	      }
1612 	    nbase = i;
1613 	  }
1614 	else if (GET_CODE (asmop) == ASM_INPUT)
1615 	  {
1616 	    if (loc)
1617 	      *loc = ASM_INPUT_SOURCE_LOCATION (asmop);
1618 	    return XSTR (asmop, 0);
1619 	  }
1620 	break;
1621       }
1622 
1623     default:
1624       gcc_unreachable ();
1625     }
1626 
1627   n = ASM_OPERANDS_INPUT_LENGTH (asmop);
1628   for (i = 0; i < n; i++)
1629     {
1630       if (operand_locs)
1631 	operand_locs[nbase + i] = &ASM_OPERANDS_INPUT (asmop, i);
1632       if (operands)
1633 	operands[nbase + i] = ASM_OPERANDS_INPUT (asmop, i);
1634       if (constraints)
1635 	constraints[nbase + i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1636       if (modes)
1637 	modes[nbase + i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1638     }
1639   nbase += n;
1640 
1641   n = ASM_OPERANDS_LABEL_LENGTH (asmop);
1642   for (i = 0; i < n; i++)
1643     {
1644       if (operand_locs)
1645 	operand_locs[nbase + i] = &ASM_OPERANDS_LABEL (asmop, i);
1646       if (operands)
1647 	operands[nbase + i] = ASM_OPERANDS_LABEL (asmop, i);
1648       if (constraints)
1649 	constraints[nbase + i] = "";
1650       if (modes)
1651 	modes[nbase + i] = Pmode;
1652     }
1653 
1654   if (loc)
1655     *loc = ASM_OPERANDS_SOURCE_LOCATION (asmop);
1656 
1657   return ASM_OPERANDS_TEMPLATE (asmop);
1658 }
1659 
1660 /* Parse inline assembly string STRING and determine which operands are
1661    referenced by % markers.  For the first NOPERANDS operands, set USED[I]
1662    to true if operand I is referenced.
1663 
1664    This is intended to distinguish barrier-like asms such as:
1665 
1666       asm ("" : "=m" (...));
1667 
1668    from real references such as:
1669 
1670       asm ("sw\t$0, %0" : "=m" (...));  */
1671 
1672 void
1673 get_referenced_operands (const char *string, bool *used,
1674 			 unsigned int noperands)
1675 {
1676   memset (used, 0, sizeof (bool) * noperands);
1677   const char *p = string;
1678   while (*p)
1679     switch (*p)
1680       {
1681       case '%':
1682 	p += 1;
1683 	/* A letter followed by a digit indicates an operand number.  */
1684 	if (ISALPHA (p[0]) && ISDIGIT (p[1]))
1685 	  p += 1;
1686 	if (ISDIGIT (*p))
1687 	  {
1688 	    char *endptr;
1689 	    unsigned long opnum = strtoul (p, &endptr, 10);
1690 	    if (endptr != p && opnum < noperands)
1691 	      used[opnum] = true;
1692 	    p = endptr;
1693 	  }
1694 	else
1695 	  p += 1;
1696 	break;
1697 
1698       default:
1699 	p++;
1700 	break;
1701       }
1702 }
1703 
1704 /* Check if an asm_operand matches its constraints.
1705    Return > 0 if ok, = 0 if bad, < 0 if inconclusive.  */
1706 
1707 int
1708 asm_operand_ok (rtx op, const char *constraint, const char **constraints)
1709 {
1710   int result = 0;
1711   bool incdec_ok = false;
1712 
1713   /* Use constrain_operands after reload.  */
1714   gcc_assert (!reload_completed);
1715 
1716   /* Empty constraint string is the same as "X,...,X", i.e. X for as
1717      many alternatives as required to match the other operands.  */
1718   if (*constraint == '\0')
1719     result = 1;
1720 
1721   while (*constraint)
1722     {
1723       enum constraint_num cn;
1724       char c = *constraint;
1725       int len;
1726       switch (c)
1727 	{
1728 	case ',':
1729 	  constraint++;
1730 	  continue;
1731 
1732 	case '0': case '1': case '2': case '3': case '4':
1733 	case '5': case '6': case '7': case '8': case '9':
1734 	  /* If caller provided constraints pointer, look up
1735 	     the matching constraint.  Otherwise, our caller should have
1736 	     given us the proper matching constraint, but we can't
1737 	     actually fail the check if they didn't.  Indicate that
1738 	     results are inconclusive.  */
1739 	  if (constraints)
1740 	    {
1741 	      char *end;
1742 	      unsigned long match;
1743 
1744 	      match = strtoul (constraint, &end, 10);
1745 	      if (!result)
1746 		result = asm_operand_ok (op, constraints[match], NULL);
1747 	      constraint = (const char *) end;
1748 	    }
1749 	  else
1750 	    {
1751 	      do
1752 		constraint++;
1753 	      while (ISDIGIT (*constraint));
1754 	      if (! result)
1755 		result = -1;
1756 	    }
1757 	  continue;
1758 
1759 	  /* The rest of the compiler assumes that reloading the address
1760 	     of a MEM into a register will make it fit an 'o' constraint.
1761 	     That is, if it sees a MEM operand for an 'o' constraint,
1762 	     it assumes that (mem (base-reg)) will fit.
1763 
1764 	     That assumption fails on targets that don't have offsettable
1765 	     addresses at all.  We therefore need to treat 'o' asm
1766 	     constraints as a special case and only accept operands that
1767 	     are already offsettable, thus proving that at least one
1768 	     offsettable address exists.  */
1769 	case 'o': /* offsettable */
1770 	  if (offsettable_nonstrict_memref_p (op))
1771 	    result = 1;
1772 	  break;
1773 
1774 	case 'g':
1775 	  if (general_operand (op, VOIDmode))
1776 	    result = 1;
1777 	  break;
1778 
1779 	case '<':
1780 	case '>':
1781 	  /* ??? Before auto-inc-dec, auto inc/dec insns are not supposed
1782 	     to exist, excepting those that expand_call created.  Further,
1783 	     on some machines which do not have generalized auto inc/dec,
1784 	     an inc/dec is not a memory_operand.
1785 
1786 	     Match any memory and hope things are resolved after reload.  */
1787 	  incdec_ok = true;
1788 	  /* FALLTHRU */
1789 	default:
1790 	  cn = lookup_constraint (constraint);
1791 	  switch (get_constraint_type (cn))
1792 	    {
1793 	    case CT_REGISTER:
1794 	      if (!result
1795 		  && reg_class_for_constraint (cn) != NO_REGS
1796 		  && GET_MODE (op) != BLKmode
1797 		  && register_operand (op, VOIDmode))
1798 		result = 1;
1799 	      break;
1800 
1801 	    case CT_CONST_INT:
1802 	      if (!result
1803 		  && CONST_INT_P (op)
1804 		  && insn_const_int_ok_for_constraint (INTVAL (op), cn))
1805 		result = 1;
1806 	      break;
1807 
1808 	    case CT_MEMORY:
1809 	    case CT_SPECIAL_MEMORY:
1810 	      /* Every memory operand can be reloaded to fit.  */
1811 	      result = result || memory_operand (op, VOIDmode);
1812 	      break;
1813 
1814 	    case CT_ADDRESS:
1815 	      /* Every address operand can be reloaded to fit.  */
1816 	      result = result || address_operand (op, VOIDmode);
1817 	      break;
1818 
1819 	    case CT_FIXED_FORM:
1820 	      result = result || constraint_satisfied_p (op, cn);
1821 	      break;
1822 	    }
1823 	  break;
1824 	}
1825       len = CONSTRAINT_LEN (c, constraint);
1826       do
1827 	constraint++;
1828       while (--len && *constraint && *constraint != ',');
1829       if (len)
1830 	return 0;
1831     }
1832 
1833   /* For operands without < or > constraints reject side-effects.  */
1834   if (AUTO_INC_DEC && !incdec_ok && result && MEM_P (op))
1835     switch (GET_CODE (XEXP (op, 0)))
1836       {
1837       case PRE_INC:
1838       case POST_INC:
1839       case PRE_DEC:
1840       case POST_DEC:
1841       case PRE_MODIFY:
1842       case POST_MODIFY:
1843 	return 0;
1844       default:
1845 	break;
1846       }
1847 
1848   return result;
1849 }
1850 
1851 /* Given an rtx *P, if it is a sum containing an integer constant term,
1852    return the location (type rtx *) of the pointer to that constant term.
1853    Otherwise, return a null pointer.  */
1854 
1855 rtx *
1856 find_constant_term_loc (rtx *p)
1857 {
1858   rtx *tem;
1859   enum rtx_code code = GET_CODE (*p);
1860 
1861   /* If *P IS such a constant term, P is its location.  */
1862 
1863   if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1864       || code == CONST)
1865     return p;
1866 
1867   /* Otherwise, if not a sum, it has no constant term.  */
1868 
1869   if (GET_CODE (*p) != PLUS)
1870     return 0;
1871 
1872   /* If one of the summands is constant, return its location.  */
1873 
1874   if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1875       && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1876     return p;
1877 
1878   /* Otherwise, check each summand for containing a constant term.  */
1879 
1880   if (XEXP (*p, 0) != 0)
1881     {
1882       tem = find_constant_term_loc (&XEXP (*p, 0));
1883       if (tem != 0)
1884 	return tem;
1885     }
1886 
1887   if (XEXP (*p, 1) != 0)
1888     {
1889       tem = find_constant_term_loc (&XEXP (*p, 1));
1890       if (tem != 0)
1891 	return tem;
1892     }
1893 
1894   return 0;
1895 }
1896 
1897 /* Return 1 if OP is a memory reference
1898    whose address contains no side effects
1899    and remains valid after the addition
1900    of a positive integer less than the
1901    size of the object being referenced.
1902 
1903    We assume that the original address is valid and do not check it.
1904 
1905    This uses strict_memory_address_p as a subroutine, so
1906    don't use it before reload.  */
1907 
1908 int
1909 offsettable_memref_p (rtx op)
1910 {
1911   return ((MEM_P (op))
1912 	  && offsettable_address_addr_space_p (1, GET_MODE (op), XEXP (op, 0),
1913 					       MEM_ADDR_SPACE (op)));
1914 }
1915 
1916 /* Similar, but don't require a strictly valid mem ref:
1917    consider pseudo-regs valid as index or base regs.  */
1918 
1919 int
1920 offsettable_nonstrict_memref_p (rtx op)
1921 {
1922   return ((MEM_P (op))
1923 	  && offsettable_address_addr_space_p (0, GET_MODE (op), XEXP (op, 0),
1924 					       MEM_ADDR_SPACE (op)));
1925 }
1926 
1927 /* Return 1 if Y is a memory address which contains no side effects
1928    and would remain valid for address space AS after the addition of
1929    a positive integer less than the size of that mode.
1930 
1931    We assume that the original address is valid and do not check it.
1932    We do check that it is valid for narrower modes.
1933 
1934    If STRICTP is nonzero, we require a strictly valid address,
1935    for the sake of use in reload.c.  */
1936 
1937 int
1938 offsettable_address_addr_space_p (int strictp, machine_mode mode, rtx y,
1939 				  addr_space_t as)
1940 {
1941   enum rtx_code ycode = GET_CODE (y);
1942   rtx z;
1943   rtx y1 = y;
1944   rtx *y2;
1945   int (*addressp) (machine_mode, rtx, addr_space_t) =
1946     (strictp ? strict_memory_address_addr_space_p
1947 	     : memory_address_addr_space_p);
1948   poly_int64 mode_sz = GET_MODE_SIZE (mode);
1949 
1950   if (CONSTANT_ADDRESS_P (y))
1951     return 1;
1952 
1953   /* Adjusting an offsettable address involves changing to a narrower mode.
1954      Make sure that's OK.  */
1955 
1956   if (mode_dependent_address_p (y, as))
1957     return 0;
1958 
1959   machine_mode address_mode = GET_MODE (y);
1960   if (address_mode == VOIDmode)
1961     address_mode = targetm.addr_space.address_mode (as);
1962 #ifdef POINTERS_EXTEND_UNSIGNED
1963   machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
1964 #endif
1965 
1966   /* ??? How much offset does an offsettable BLKmode reference need?
1967      Clearly that depends on the situation in which it's being used.
1968      However, the current situation in which we test 0xffffffff is
1969      less than ideal.  Caveat user.  */
1970   if (known_eq (mode_sz, 0))
1971     mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1972 
1973   /* If the expression contains a constant term,
1974      see if it remains valid when max possible offset is added.  */
1975 
1976   if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1977     {
1978       int good;
1979 
1980       y1 = *y2;
1981       *y2 = plus_constant (address_mode, *y2, mode_sz - 1);
1982       /* Use QImode because an odd displacement may be automatically invalid
1983 	 for any wider mode.  But it should be valid for a single byte.  */
1984       good = (*addressp) (QImode, y, as);
1985 
1986       /* In any case, restore old contents of memory.  */
1987       *y2 = y1;
1988       return good;
1989     }
1990 
1991   if (GET_RTX_CLASS (ycode) == RTX_AUTOINC)
1992     return 0;
1993 
1994   /* The offset added here is chosen as the maximum offset that
1995      any instruction could need to add when operating on something
1996      of the specified mode.  We assume that if Y and Y+c are
1997      valid addresses then so is Y+d for all 0<d<c.  adjust_address will
1998      go inside a LO_SUM here, so we do so as well.  */
1999   if (GET_CODE (y) == LO_SUM
2000       && mode != BLKmode
2001       && known_le (mode_sz, GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT))
2002     z = gen_rtx_LO_SUM (address_mode, XEXP (y, 0),
2003 			plus_constant (address_mode, XEXP (y, 1),
2004 				       mode_sz - 1));
2005 #ifdef POINTERS_EXTEND_UNSIGNED
2006   /* Likewise for a ZERO_EXTEND from pointer_mode.  */
2007   else if (POINTERS_EXTEND_UNSIGNED > 0
2008 	   && GET_CODE (y) == ZERO_EXTEND
2009 	   && GET_MODE (XEXP (y, 0)) == pointer_mode)
2010     z = gen_rtx_ZERO_EXTEND (address_mode,
2011 			     plus_constant (pointer_mode, XEXP (y, 0),
2012 					    mode_sz - 1));
2013 #endif
2014   else
2015     z = plus_constant (address_mode, y, mode_sz - 1);
2016 
2017   /* Use QImode because an odd displacement may be automatically invalid
2018      for any wider mode.  But it should be valid for a single byte.  */
2019   return (*addressp) (QImode, z, as);
2020 }
2021 
2022 /* Return 1 if ADDR is an address-expression whose effect depends
2023    on the mode of the memory reference it is used in.
2024 
2025    ADDRSPACE is the address space associated with the address.
2026 
2027    Autoincrement addressing is a typical example of mode-dependence
2028    because the amount of the increment depends on the mode.  */
2029 
2030 bool
2031 mode_dependent_address_p (rtx addr, addr_space_t addrspace)
2032 {
2033   /* Auto-increment addressing with anything other than post_modify
2034      or pre_modify always introduces a mode dependency.  Catch such
2035      cases now instead of deferring to the target.  */
2036   if (GET_CODE (addr) == PRE_INC
2037       || GET_CODE (addr) == POST_INC
2038       || GET_CODE (addr) == PRE_DEC
2039       || GET_CODE (addr) == POST_DEC)
2040     return true;
2041 
2042   return targetm.mode_dependent_address_p (addr, addrspace);
2043 }
2044 
2045 /* Return true if boolean attribute ATTR is supported.  */
2046 
2047 static bool
2048 have_bool_attr (bool_attr attr)
2049 {
2050   switch (attr)
2051     {
2052     case BA_ENABLED:
2053       return HAVE_ATTR_enabled;
2054     case BA_PREFERRED_FOR_SIZE:
2055       return HAVE_ATTR_enabled || HAVE_ATTR_preferred_for_size;
2056     case BA_PREFERRED_FOR_SPEED:
2057       return HAVE_ATTR_enabled || HAVE_ATTR_preferred_for_speed;
2058     }
2059   gcc_unreachable ();
2060 }
2061 
2062 /* Return the value of ATTR for instruction INSN.  */
2063 
2064 static bool
2065 get_bool_attr (rtx_insn *insn, bool_attr attr)
2066 {
2067   switch (attr)
2068     {
2069     case BA_ENABLED:
2070       return get_attr_enabled (insn);
2071     case BA_PREFERRED_FOR_SIZE:
2072       return get_attr_enabled (insn) && get_attr_preferred_for_size (insn);
2073     case BA_PREFERRED_FOR_SPEED:
2074       return get_attr_enabled (insn) && get_attr_preferred_for_speed (insn);
2075     }
2076   gcc_unreachable ();
2077 }
2078 
2079 /* Like get_bool_attr_mask, but don't use the cache.  */
2080 
2081 static alternative_mask
2082 get_bool_attr_mask_uncached (rtx_insn *insn, bool_attr attr)
2083 {
2084   /* Temporarily install enough information for get_attr_<foo> to assume
2085      that the insn operands are already cached.  As above, the attribute
2086      mustn't depend on the values of operands, so we don't provide their
2087      real values here.  */
2088   rtx_insn *old_insn = recog_data.insn;
2089   int old_alternative = which_alternative;
2090 
2091   recog_data.insn = insn;
2092   alternative_mask mask = ALL_ALTERNATIVES;
2093   int n_alternatives = insn_data[INSN_CODE (insn)].n_alternatives;
2094   for (int i = 0; i < n_alternatives; i++)
2095     {
2096       which_alternative = i;
2097       if (!get_bool_attr (insn, attr))
2098 	mask &= ~ALTERNATIVE_BIT (i);
2099     }
2100 
2101   recog_data.insn = old_insn;
2102   which_alternative = old_alternative;
2103   return mask;
2104 }
2105 
2106 /* Return the mask of operand alternatives that are allowed for INSN
2107    by boolean attribute ATTR.  This mask depends only on INSN and on
2108    the current target; it does not depend on things like the values of
2109    operands.  */
2110 
2111 static alternative_mask
2112 get_bool_attr_mask (rtx_insn *insn, bool_attr attr)
2113 {
2114   /* Quick exit for asms and for targets that don't use these attributes.  */
2115   int code = INSN_CODE (insn);
2116   if (code < 0 || !have_bool_attr (attr))
2117     return ALL_ALTERNATIVES;
2118 
2119   /* Calling get_attr_<foo> can be expensive, so cache the mask
2120      for speed.  */
2121   if (!this_target_recog->x_bool_attr_masks[code][attr])
2122     this_target_recog->x_bool_attr_masks[code][attr]
2123       = get_bool_attr_mask_uncached (insn, attr);
2124   return this_target_recog->x_bool_attr_masks[code][attr];
2125 }
2126 
2127 /* Return the set of alternatives of INSN that are allowed by the current
2128    target.  */
2129 
2130 alternative_mask
2131 get_enabled_alternatives (rtx_insn *insn)
2132 {
2133   return get_bool_attr_mask (insn, BA_ENABLED);
2134 }
2135 
2136 /* Return the set of alternatives of INSN that are allowed by the current
2137    target and are preferred for the current size/speed optimization
2138    choice.  */
2139 
2140 alternative_mask
2141 get_preferred_alternatives (rtx_insn *insn)
2142 {
2143   if (optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn)))
2144     return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SPEED);
2145   else
2146     return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SIZE);
2147 }
2148 
2149 /* Return the set of alternatives of INSN that are allowed by the current
2150    target and are preferred for the size/speed optimization choice
2151    associated with BB.  Passing a separate BB is useful if INSN has not
2152    been emitted yet or if we are considering moving it to a different
2153    block.  */
2154 
2155 alternative_mask
2156 get_preferred_alternatives (rtx_insn *insn, basic_block bb)
2157 {
2158   if (optimize_bb_for_speed_p (bb))
2159     return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SPEED);
2160   else
2161     return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SIZE);
2162 }
2163 
2164 /* Assert that the cached boolean attributes for INSN are still accurate.
2165    The backend is required to define these attributes in a way that only
2166    depends on the current target (rather than operands, compiler phase,
2167    etc.).  */
2168 
2169 bool
2170 check_bool_attrs (rtx_insn *insn)
2171 {
2172   int code = INSN_CODE (insn);
2173   if (code >= 0)
2174     for (int i = 0; i <= BA_LAST; ++i)
2175       {
2176 	enum bool_attr attr = (enum bool_attr) i;
2177 	if (this_target_recog->x_bool_attr_masks[code][attr])
2178 	  gcc_assert (this_target_recog->x_bool_attr_masks[code][attr]
2179 		      == get_bool_attr_mask_uncached (insn, attr));
2180       }
2181   return true;
2182 }
2183 
2184 /* Like extract_insn, but save insn extracted and don't extract again, when
2185    called again for the same insn expecting that recog_data still contain the
2186    valid information.  This is used primary by gen_attr infrastructure that
2187    often does extract insn again and again.  */
2188 void
2189 extract_insn_cached (rtx_insn *insn)
2190 {
2191   if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
2192     return;
2193   extract_insn (insn);
2194   recog_data.insn = insn;
2195 }
2196 
2197 /* Do uncached extract_insn, constrain_operands and complain about failures.
2198    This should be used when extracting a pre-existing constrained instruction
2199    if the caller wants to know which alternative was chosen.  */
2200 void
2201 extract_constrain_insn (rtx_insn *insn)
2202 {
2203   extract_insn (insn);
2204   if (!constrain_operands (reload_completed, get_enabled_alternatives (insn)))
2205     fatal_insn_not_found (insn);
2206 }
2207 
2208 /* Do cached extract_insn, constrain_operands and complain about failures.
2209    Used by insn_attrtab.  */
2210 void
2211 extract_constrain_insn_cached (rtx_insn *insn)
2212 {
2213   extract_insn_cached (insn);
2214   if (which_alternative == -1
2215       && !constrain_operands (reload_completed,
2216 			      get_enabled_alternatives (insn)))
2217     fatal_insn_not_found (insn);
2218 }
2219 
2220 /* Do cached constrain_operands on INSN and complain about failures.  */
2221 int
2222 constrain_operands_cached (rtx_insn *insn, int strict)
2223 {
2224   if (which_alternative == -1)
2225     return constrain_operands (strict, get_enabled_alternatives (insn));
2226   else
2227     return 1;
2228 }
2229 
2230 /* Analyze INSN and fill in recog_data.  */
2231 
2232 void
2233 extract_insn (rtx_insn *insn)
2234 {
2235   int i;
2236   int icode;
2237   int noperands;
2238   rtx body = PATTERN (insn);
2239 
2240   recog_data.n_operands = 0;
2241   recog_data.n_alternatives = 0;
2242   recog_data.n_dups = 0;
2243   recog_data.is_asm = false;
2244 
2245   switch (GET_CODE (body))
2246     {
2247     case USE:
2248     case CLOBBER:
2249     case ASM_INPUT:
2250     case ADDR_VEC:
2251     case ADDR_DIFF_VEC:
2252     case VAR_LOCATION:
2253     case DEBUG_MARKER:
2254       return;
2255 
2256     case SET:
2257       if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2258 	goto asm_insn;
2259       else
2260 	goto normal_insn;
2261     case PARALLEL:
2262       if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2263 	   && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2264 	  || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS
2265 	  || GET_CODE (XVECEXP (body, 0, 0)) == ASM_INPUT)
2266 	goto asm_insn;
2267       else
2268 	goto normal_insn;
2269     case ASM_OPERANDS:
2270     asm_insn:
2271       recog_data.n_operands = noperands = asm_noperands (body);
2272       if (noperands >= 0)
2273 	{
2274 	  /* This insn is an `asm' with operands.  */
2275 
2276 	  /* expand_asm_operands makes sure there aren't too many operands.  */
2277 	  gcc_assert (noperands <= MAX_RECOG_OPERANDS);
2278 
2279 	  /* Now get the operand values and constraints out of the insn.  */
2280 	  decode_asm_operands (body, recog_data.operand,
2281 			       recog_data.operand_loc,
2282 			       recog_data.constraints,
2283 			       recog_data.operand_mode, NULL);
2284 	  memset (recog_data.is_operator, 0, sizeof recog_data.is_operator);
2285 	  if (noperands > 0)
2286 	    {
2287 	      const char *p =  recog_data.constraints[0];
2288 	      recog_data.n_alternatives = 1;
2289 	      while (*p)
2290 		recog_data.n_alternatives += (*p++ == ',');
2291 	    }
2292 	  recog_data.is_asm = true;
2293 	  break;
2294 	}
2295       fatal_insn_not_found (insn);
2296 
2297     default:
2298     normal_insn:
2299       /* Ordinary insn: recognize it, get the operands via insn_extract
2300 	 and get the constraints.  */
2301 
2302       icode = recog_memoized (insn);
2303       if (icode < 0)
2304 	fatal_insn_not_found (insn);
2305 
2306       recog_data.n_operands = noperands = insn_data[icode].n_operands;
2307       recog_data.n_alternatives = insn_data[icode].n_alternatives;
2308       recog_data.n_dups = insn_data[icode].n_dups;
2309 
2310       insn_extract (insn);
2311 
2312       for (i = 0; i < noperands; i++)
2313 	{
2314 	  recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2315 	  recog_data.is_operator[i] = insn_data[icode].operand[i].is_operator;
2316 	  recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2317 	  /* VOIDmode match_operands gets mode from their real operand.  */
2318 	  if (recog_data.operand_mode[i] == VOIDmode)
2319 	    recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2320 	}
2321     }
2322   for (i = 0; i < noperands; i++)
2323     recog_data.operand_type[i]
2324       = (recog_data.constraints[i][0] == '=' ? OP_OUT
2325 	 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2326 	 : OP_IN);
2327 
2328   gcc_assert (recog_data.n_alternatives <= MAX_RECOG_ALTERNATIVES);
2329 
2330   recog_data.insn = NULL;
2331   which_alternative = -1;
2332 }
2333 
2334 /* Fill in OP_ALT_BASE for an instruction that has N_OPERANDS
2335    operands, N_ALTERNATIVES alternatives and constraint strings
2336    CONSTRAINTS.  OP_ALT_BASE has N_ALTERNATIVES * N_OPERANDS entries
2337    and CONSTRAINTS has N_OPERANDS entries.  OPLOC should be passed in
2338    if the insn is an asm statement and preprocessing should take the
2339    asm operands into account, e.g. to determine whether they could be
2340    addresses in constraints that require addresses; it should then
2341    point to an array of pointers to each operand.  */
2342 
2343 void
2344 preprocess_constraints (int n_operands, int n_alternatives,
2345 			const char **constraints,
2346 			operand_alternative *op_alt_base,
2347 			rtx **oploc)
2348 {
2349   for (int i = 0; i < n_operands; i++)
2350     {
2351       int j;
2352       struct operand_alternative *op_alt;
2353       const char *p = constraints[i];
2354 
2355       op_alt = op_alt_base;
2356 
2357       for (j = 0; j < n_alternatives; j++, op_alt += n_operands)
2358 	{
2359 	  op_alt[i].cl = NO_REGS;
2360 	  op_alt[i].constraint = p;
2361 	  op_alt[i].matches = -1;
2362 	  op_alt[i].matched = -1;
2363 
2364 	  if (*p == '\0' || *p == ',')
2365 	    {
2366 	      op_alt[i].anything_ok = 1;
2367 	      continue;
2368 	    }
2369 
2370 	  for (;;)
2371 	    {
2372 	      char c = *p;
2373 	      if (c == '#')
2374 		do
2375 		  c = *++p;
2376 		while (c != ',' && c != '\0');
2377 	      if (c == ',' || c == '\0')
2378 		{
2379 		  p++;
2380 		  break;
2381 		}
2382 
2383 	      switch (c)
2384 		{
2385 		case '?':
2386 		  op_alt[i].reject += 6;
2387 		  break;
2388 		case '!':
2389 		  op_alt[i].reject += 600;
2390 		  break;
2391 		case '&':
2392 		  op_alt[i].earlyclobber = 1;
2393 		  break;
2394 
2395 		case '0': case '1': case '2': case '3': case '4':
2396 		case '5': case '6': case '7': case '8': case '9':
2397 		  {
2398 		    char *end;
2399 		    op_alt[i].matches = strtoul (p, &end, 10);
2400 		    op_alt[op_alt[i].matches].matched = i;
2401 		    p = end;
2402 		  }
2403 		  continue;
2404 
2405 		case 'X':
2406 		  op_alt[i].anything_ok = 1;
2407 		  break;
2408 
2409 		case 'g':
2410 		  op_alt[i].cl =
2411 		   reg_class_subunion[(int) op_alt[i].cl][(int) GENERAL_REGS];
2412 		  break;
2413 
2414 		default:
2415 		  enum constraint_num cn = lookup_constraint (p);
2416 		  enum reg_class cl;
2417 		  switch (get_constraint_type (cn))
2418 		    {
2419 		    case CT_REGISTER:
2420 		      cl = reg_class_for_constraint (cn);
2421 		      if (cl != NO_REGS)
2422 			op_alt[i].cl = reg_class_subunion[op_alt[i].cl][cl];
2423 		      break;
2424 
2425 		    case CT_CONST_INT:
2426 		      break;
2427 
2428 		    case CT_MEMORY:
2429 		    case CT_SPECIAL_MEMORY:
2430 		      op_alt[i].memory_ok = 1;
2431 		      break;
2432 
2433 		    case CT_ADDRESS:
2434 		      if (oploc && !address_operand (*oploc[i], VOIDmode))
2435 			break;
2436 
2437 		      op_alt[i].is_address = 1;
2438 		      op_alt[i].cl
2439 			= (reg_class_subunion
2440 			   [(int) op_alt[i].cl]
2441 			   [(int) base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
2442 						  ADDRESS, SCRATCH)]);
2443 		      break;
2444 
2445 		    case CT_FIXED_FORM:
2446 		      break;
2447 		    }
2448 		  break;
2449 		}
2450 	      p += CONSTRAINT_LEN (c, p);
2451 	    }
2452 	}
2453     }
2454 }
2455 
2456 /* Return an array of operand_alternative instructions for
2457    instruction ICODE.  */
2458 
2459 const operand_alternative *
2460 preprocess_insn_constraints (unsigned int icode)
2461 {
2462   gcc_checking_assert (IN_RANGE (icode, 0, NUM_INSN_CODES - 1));
2463   if (this_target_recog->x_op_alt[icode])
2464     return this_target_recog->x_op_alt[icode];
2465 
2466   int n_operands = insn_data[icode].n_operands;
2467   if (n_operands == 0)
2468     return 0;
2469   /* Always provide at least one alternative so that which_op_alt ()
2470      works correctly.  If the instruction has 0 alternatives (i.e. all
2471      constraint strings are empty) then each operand in this alternative
2472      will have anything_ok set.  */
2473   int n_alternatives = MAX (insn_data[icode].n_alternatives, 1);
2474   int n_entries = n_operands * n_alternatives;
2475 
2476   operand_alternative *op_alt = XCNEWVEC (operand_alternative, n_entries);
2477   const char **constraints = XALLOCAVEC (const char *, n_operands);
2478 
2479   for (int i = 0; i < n_operands; ++i)
2480     constraints[i] = insn_data[icode].operand[i].constraint;
2481   preprocess_constraints (n_operands, n_alternatives, constraints, op_alt,
2482 			  NULL);
2483 
2484   this_target_recog->x_op_alt[icode] = op_alt;
2485   return op_alt;
2486 }
2487 
2488 /* After calling extract_insn, you can use this function to extract some
2489    information from the constraint strings into a more usable form.
2490    The collected data is stored in recog_op_alt.  */
2491 
2492 void
2493 preprocess_constraints (rtx_insn *insn)
2494 {
2495   int icode = INSN_CODE (insn);
2496   if (icode >= 0)
2497     recog_op_alt = preprocess_insn_constraints (icode);
2498   else
2499     {
2500       int n_operands = recog_data.n_operands;
2501       int n_alternatives = recog_data.n_alternatives;
2502       int n_entries = n_operands * n_alternatives;
2503       memset (asm_op_alt, 0, n_entries * sizeof (operand_alternative));
2504       preprocess_constraints (n_operands, n_alternatives,
2505 			      recog_data.constraints, asm_op_alt,
2506 			      NULL);
2507       recog_op_alt = asm_op_alt;
2508     }
2509 }
2510 
2511 /* Check the operands of an insn against the insn's operand constraints
2512    and return 1 if they match any of the alternatives in ALTERNATIVES.
2513 
2514    The information about the insn's operands, constraints, operand modes
2515    etc. is obtained from the global variables set up by extract_insn.
2516 
2517    WHICH_ALTERNATIVE is set to a number which indicates which
2518    alternative of constraints was matched: 0 for the first alternative,
2519    1 for the next, etc.
2520 
2521    In addition, when two operands are required to match
2522    and it happens that the output operand is (reg) while the
2523    input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2524    make the output operand look like the input.
2525    This is because the output operand is the one the template will print.
2526 
2527    This is used in final, just before printing the assembler code and by
2528    the routines that determine an insn's attribute.
2529 
2530    If STRICT is a positive nonzero value, it means that we have been
2531    called after reload has been completed.  In that case, we must
2532    do all checks strictly.  If it is zero, it means that we have been called
2533    before reload has completed.  In that case, we first try to see if we can
2534    find an alternative that matches strictly.  If not, we try again, this
2535    time assuming that reload will fix up the insn.  This provides a "best
2536    guess" for the alternative and is used to compute attributes of insns prior
2537    to reload.  A negative value of STRICT is used for this internal call.  */
2538 
2539 struct funny_match
2540 {
2541   int this_op, other;
2542 };
2543 
2544 int
2545 constrain_operands (int strict, alternative_mask alternatives)
2546 {
2547   const char *constraints[MAX_RECOG_OPERANDS];
2548   int matching_operands[MAX_RECOG_OPERANDS];
2549   int earlyclobber[MAX_RECOG_OPERANDS];
2550   int c;
2551 
2552   struct funny_match funny_match[MAX_RECOG_OPERANDS];
2553   int funny_match_index;
2554 
2555   which_alternative = 0;
2556   if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2557     return 1;
2558 
2559   for (c = 0; c < recog_data.n_operands; c++)
2560     {
2561       constraints[c] = recog_data.constraints[c];
2562       matching_operands[c] = -1;
2563     }
2564 
2565   do
2566     {
2567       int seen_earlyclobber_at = -1;
2568       int opno;
2569       int lose = 0;
2570       funny_match_index = 0;
2571 
2572       if (!TEST_BIT (alternatives, which_alternative))
2573 	{
2574 	  int i;
2575 
2576 	  for (i = 0; i < recog_data.n_operands; i++)
2577 	    constraints[i] = skip_alternative (constraints[i]);
2578 
2579 	  which_alternative++;
2580 	  continue;
2581 	}
2582 
2583       for (opno = 0; opno < recog_data.n_operands; opno++)
2584 	{
2585 	  rtx op = recog_data.operand[opno];
2586 	  machine_mode mode = GET_MODE (op);
2587 	  const char *p = constraints[opno];
2588 	  int offset = 0;
2589 	  int win = 0;
2590 	  int val;
2591 	  int len;
2592 
2593 	  earlyclobber[opno] = 0;
2594 
2595 	  /* A unary operator may be accepted by the predicate, but it
2596 	     is irrelevant for matching constraints.  */
2597 	  if (UNARY_P (op))
2598 	    op = XEXP (op, 0);
2599 
2600 	  if (GET_CODE (op) == SUBREG)
2601 	    {
2602 	      if (REG_P (SUBREG_REG (op))
2603 		  && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2604 		offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2605 					      GET_MODE (SUBREG_REG (op)),
2606 					      SUBREG_BYTE (op),
2607 					      GET_MODE (op));
2608 	      op = SUBREG_REG (op);
2609 	    }
2610 
2611 	  /* An empty constraint or empty alternative
2612 	     allows anything which matched the pattern.  */
2613 	  if (*p == 0 || *p == ',')
2614 	    win = 1;
2615 
2616 	  do
2617 	    switch (c = *p, len = CONSTRAINT_LEN (c, p), c)
2618 	      {
2619 	      case '\0':
2620 		len = 0;
2621 		break;
2622 	      case ',':
2623 		c = '\0';
2624 		break;
2625 
2626 	      case '#':
2627 		/* Ignore rest of this alternative as far as
2628 		   constraint checking is concerned.  */
2629 		do
2630 		  p++;
2631 		while (*p && *p != ',');
2632 		len = 0;
2633 		break;
2634 
2635 	      case '&':
2636 		earlyclobber[opno] = 1;
2637 		if (seen_earlyclobber_at < 0)
2638 		  seen_earlyclobber_at = opno;
2639 		break;
2640 
2641 	      case '0':  case '1':  case '2':  case '3':  case '4':
2642 	      case '5':  case '6':  case '7':  case '8':  case '9':
2643 		{
2644 		  /* This operand must be the same as a previous one.
2645 		     This kind of constraint is used for instructions such
2646 		     as add when they take only two operands.
2647 
2648 		     Note that the lower-numbered operand is passed first.
2649 
2650 		     If we are not testing strictly, assume that this
2651 		     constraint will be satisfied.  */
2652 
2653 		  char *end;
2654 		  int match;
2655 
2656 		  match = strtoul (p, &end, 10);
2657 		  p = end;
2658 
2659 		  if (strict < 0)
2660 		    val = 1;
2661 		  else
2662 		    {
2663 		      rtx op1 = recog_data.operand[match];
2664 		      rtx op2 = recog_data.operand[opno];
2665 
2666 		      /* A unary operator may be accepted by the predicate,
2667 			 but it is irrelevant for matching constraints.  */
2668 		      if (UNARY_P (op1))
2669 			op1 = XEXP (op1, 0);
2670 		      if (UNARY_P (op2))
2671 			op2 = XEXP (op2, 0);
2672 
2673 		      val = operands_match_p (op1, op2);
2674 		    }
2675 
2676 		  matching_operands[opno] = match;
2677 		  matching_operands[match] = opno;
2678 
2679 		  if (val != 0)
2680 		    win = 1;
2681 
2682 		  /* If output is *x and input is *--x, arrange later
2683 		     to change the output to *--x as well, since the
2684 		     output op is the one that will be printed.  */
2685 		  if (val == 2 && strict > 0)
2686 		    {
2687 		      funny_match[funny_match_index].this_op = opno;
2688 		      funny_match[funny_match_index++].other = match;
2689 		    }
2690 		}
2691 		len = 0;
2692 		break;
2693 
2694 	      case 'p':
2695 		/* p is used for address_operands.  When we are called by
2696 		   gen_reload, no one will have checked that the address is
2697 		   strictly valid, i.e., that all pseudos requiring hard regs
2698 		   have gotten them.  */
2699 		if (strict <= 0
2700 		    || (strict_memory_address_p (recog_data.operand_mode[opno],
2701 						 op)))
2702 		  win = 1;
2703 		break;
2704 
2705 		/* No need to check general_operand again;
2706 		   it was done in insn-recog.c.  Well, except that reload
2707 		   doesn't check the validity of its replacements, but
2708 		   that should only matter when there's a bug.  */
2709 	      case 'g':
2710 		/* Anything goes unless it is a REG and really has a hard reg
2711 		   but the hard reg is not in the class GENERAL_REGS.  */
2712 		if (REG_P (op))
2713 		  {
2714 		    if (strict < 0
2715 			|| GENERAL_REGS == ALL_REGS
2716 			|| (reload_in_progress
2717 			    && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2718 			|| reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2719 		      win = 1;
2720 		  }
2721 		else if (strict < 0 || general_operand (op, mode))
2722 		  win = 1;
2723 		break;
2724 
2725 	      default:
2726 		{
2727 		  enum constraint_num cn = lookup_constraint (p);
2728 		  enum reg_class cl = reg_class_for_constraint (cn);
2729 		  if (cl != NO_REGS)
2730 		    {
2731 		      if (strict < 0
2732 			  || (strict == 0
2733 			      && REG_P (op)
2734 			      && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2735 			  || (strict == 0 && GET_CODE (op) == SCRATCH)
2736 			  || (REG_P (op)
2737 			      && reg_fits_class_p (op, cl, offset, mode)))
2738 		        win = 1;
2739 		    }
2740 
2741 		  else if (constraint_satisfied_p (op, cn))
2742 		    win = 1;
2743 
2744 		  else if (insn_extra_memory_constraint (cn)
2745 			   /* Every memory operand can be reloaded to fit.  */
2746 			   && ((strict < 0 && MEM_P (op))
2747 			       /* Before reload, accept what reload can turn
2748 				  into a mem.  */
2749 			       || (strict < 0 && CONSTANT_P (op))
2750 			       /* Before reload, accept a pseudo,
2751 				  since LRA can turn it into a mem.  */
2752 			       || (strict < 0 && targetm.lra_p () && REG_P (op)
2753 				   && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2754 			       /* During reload, accept a pseudo  */
2755 			       || (reload_in_progress && REG_P (op)
2756 				   && REGNO (op) >= FIRST_PSEUDO_REGISTER)))
2757 		    win = 1;
2758 		  else if (insn_extra_address_constraint (cn)
2759 			   /* Every address operand can be reloaded to fit.  */
2760 			   && strict < 0)
2761 		    win = 1;
2762 		  /* Cater to architectures like IA-64 that define extra memory
2763 		     constraints without using define_memory_constraint.  */
2764 		  else if (reload_in_progress
2765 			   && REG_P (op)
2766 			   && REGNO (op) >= FIRST_PSEUDO_REGISTER
2767 			   && reg_renumber[REGNO (op)] < 0
2768 			   && reg_equiv_mem (REGNO (op)) != 0
2769 			   && constraint_satisfied_p
2770 			      (reg_equiv_mem (REGNO (op)), cn))
2771 		    win = 1;
2772 		  break;
2773 		}
2774 	      }
2775 	  while (p += len, c);
2776 
2777 	  constraints[opno] = p;
2778 	  /* If this operand did not win somehow,
2779 	     this alternative loses.  */
2780 	  if (! win)
2781 	    lose = 1;
2782 	}
2783       /* This alternative won; the operands are ok.
2784 	 Change whichever operands this alternative says to change.  */
2785       if (! lose)
2786 	{
2787 	  int opno, eopno;
2788 
2789 	  /* See if any earlyclobber operand conflicts with some other
2790 	     operand.  */
2791 
2792 	  if (strict > 0  && seen_earlyclobber_at >= 0)
2793 	    for (eopno = seen_earlyclobber_at;
2794 		 eopno < recog_data.n_operands;
2795 		 eopno++)
2796 	      /* Ignore earlyclobber operands now in memory,
2797 		 because we would often report failure when we have
2798 		 two memory operands, one of which was formerly a REG.  */
2799 	      if (earlyclobber[eopno]
2800 		  && REG_P (recog_data.operand[eopno]))
2801 		for (opno = 0; opno < recog_data.n_operands; opno++)
2802 		  if ((MEM_P (recog_data.operand[opno])
2803 		       || recog_data.operand_type[opno] != OP_OUT)
2804 		      && opno != eopno
2805 		      /* Ignore things like match_operator operands.  */
2806 		      && *recog_data.constraints[opno] != 0
2807 		      && ! (matching_operands[opno] == eopno
2808 			    && operands_match_p (recog_data.operand[opno],
2809 						 recog_data.operand[eopno]))
2810 		      && ! safe_from_earlyclobber (recog_data.operand[opno],
2811 						   recog_data.operand[eopno]))
2812 		    lose = 1;
2813 
2814 	  if (! lose)
2815 	    {
2816 	      while (--funny_match_index >= 0)
2817 		{
2818 		  recog_data.operand[funny_match[funny_match_index].other]
2819 		    = recog_data.operand[funny_match[funny_match_index].this_op];
2820 		}
2821 
2822 	      /* For operands without < or > constraints reject side-effects.  */
2823 	      if (AUTO_INC_DEC && recog_data.is_asm)
2824 		{
2825 		  for (opno = 0; opno < recog_data.n_operands; opno++)
2826 		    if (MEM_P (recog_data.operand[opno]))
2827 		      switch (GET_CODE (XEXP (recog_data.operand[opno], 0)))
2828 			{
2829 			case PRE_INC:
2830 			case POST_INC:
2831 			case PRE_DEC:
2832 			case POST_DEC:
2833 			case PRE_MODIFY:
2834 			case POST_MODIFY:
2835 			  if (strchr (recog_data.constraints[opno], '<') == NULL
2836 			      && strchr (recog_data.constraints[opno], '>')
2837 				 == NULL)
2838 			    return 0;
2839 			  break;
2840 			default:
2841 			  break;
2842 			}
2843 		}
2844 
2845 	      return 1;
2846 	    }
2847 	}
2848 
2849       which_alternative++;
2850     }
2851   while (which_alternative < recog_data.n_alternatives);
2852 
2853   which_alternative = -1;
2854   /* If we are about to reject this, but we are not to test strictly,
2855      try a very loose test.  Only return failure if it fails also.  */
2856   if (strict == 0)
2857     return constrain_operands (-1, alternatives);
2858   else
2859     return 0;
2860 }
2861 
2862 /* Return true iff OPERAND (assumed to be a REG rtx)
2863    is a hard reg in class CLASS when its regno is offset by OFFSET
2864    and changed to mode MODE.
2865    If REG occupies multiple hard regs, all of them must be in CLASS.  */
2866 
2867 bool
2868 reg_fits_class_p (const_rtx operand, reg_class_t cl, int offset,
2869 		  machine_mode mode)
2870 {
2871   unsigned int regno = REGNO (operand);
2872 
2873   if (cl == NO_REGS)
2874     return false;
2875 
2876   /* Regno must not be a pseudo register.  Offset may be negative.  */
2877   return (HARD_REGISTER_NUM_P (regno)
2878 	  && HARD_REGISTER_NUM_P (regno + offset)
2879 	  && in_hard_reg_set_p (reg_class_contents[(int) cl], mode,
2880 				regno + offset));
2881 }
2882 
2883 /* Split single instruction.  Helper function for split_all_insns and
2884    split_all_insns_noflow.  Return last insn in the sequence if successful,
2885    or NULL if unsuccessful.  */
2886 
2887 static rtx_insn *
2888 split_insn (rtx_insn *insn)
2889 {
2890   /* Split insns here to get max fine-grain parallelism.  */
2891   rtx_insn *first = PREV_INSN (insn);
2892   rtx_insn *last = try_split (PATTERN (insn), insn, 1);
2893   rtx insn_set, last_set, note;
2894 
2895   if (last == insn)
2896     return NULL;
2897 
2898   /* If the original instruction was a single set that was known to be
2899      equivalent to a constant, see if we can say the same about the last
2900      instruction in the split sequence.  The two instructions must set
2901      the same destination.  */
2902   insn_set = single_set (insn);
2903   if (insn_set)
2904     {
2905       last_set = single_set (last);
2906       if (last_set && rtx_equal_p (SET_DEST (last_set), SET_DEST (insn_set)))
2907 	{
2908 	  note = find_reg_equal_equiv_note (insn);
2909 	  if (note && CONSTANT_P (XEXP (note, 0)))
2910 	    set_unique_reg_note (last, REG_EQUAL, XEXP (note, 0));
2911 	  else if (CONSTANT_P (SET_SRC (insn_set)))
2912 	    set_unique_reg_note (last, REG_EQUAL,
2913 				 copy_rtx (SET_SRC (insn_set)));
2914 	}
2915     }
2916 
2917   /* try_split returns the NOTE that INSN became.  */
2918   SET_INSN_DELETED (insn);
2919 
2920   /* ??? Coddle to md files that generate subregs in post-reload
2921      splitters instead of computing the proper hard register.  */
2922   if (reload_completed && first != last)
2923     {
2924       first = NEXT_INSN (first);
2925       for (;;)
2926 	{
2927 	  if (INSN_P (first))
2928 	    cleanup_subreg_operands (first);
2929 	  if (first == last)
2930 	    break;
2931 	  first = NEXT_INSN (first);
2932 	}
2933     }
2934 
2935   return last;
2936 }
2937 
2938 /* Split all insns in the function.  If UPD_LIFE, update life info after.  */
2939 
2940 void
2941 split_all_insns (void)
2942 {
2943   bool changed;
2944   bool need_cfg_cleanup = false;
2945   basic_block bb;
2946 
2947   auto_sbitmap blocks (last_basic_block_for_fn (cfun));
2948   bitmap_clear (blocks);
2949   changed = false;
2950 
2951   FOR_EACH_BB_REVERSE_FN (bb, cfun)
2952     {
2953       rtx_insn *insn, *next;
2954       bool finish = false;
2955 
2956       rtl_profile_for_bb (bb);
2957       for (insn = BB_HEAD (bb); !finish ; insn = next)
2958 	{
2959 	  /* Can't use `next_real_insn' because that might go across
2960 	     CODE_LABELS and short-out basic blocks.  */
2961 	  next = NEXT_INSN (insn);
2962 	  finish = (insn == BB_END (bb));
2963 
2964 	  /* If INSN has a REG_EH_REGION note and we split INSN, the
2965 	     resulting split may not have/need REG_EH_REGION notes.
2966 
2967 	     If that happens and INSN was the last reference to the
2968 	     given EH region, then the EH region will become unreachable.
2969 	     We can not leave the unreachable blocks in the CFG as that
2970 	     will trigger a checking failure.
2971 
2972 	     So track if INSN has a REG_EH_REGION note.  If so and we
2973 	     split INSN, then trigger a CFG cleanup.  */
2974 	  rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2975 	  if (INSN_P (insn))
2976 	    {
2977 	      rtx set = single_set (insn);
2978 
2979 	      /* Don't split no-op move insns.  These should silently
2980 		 disappear later in final.  Splitting such insns would
2981 		 break the code that handles LIBCALL blocks.  */
2982 	      if (set && set_noop_p (set))
2983 		{
2984 		  /* Nops get in the way while scheduling, so delete them
2985 		     now if register allocation has already been done.  It
2986 		     is too risky to try to do this before register
2987 		     allocation, and there are unlikely to be very many
2988 		     nops then anyways.  */
2989 		  if (reload_completed)
2990 		      delete_insn_and_edges (insn);
2991 		  if (note)
2992 		    need_cfg_cleanup = true;
2993 		}
2994 	      else
2995 		{
2996 		  if (split_insn (insn))
2997 		    {
2998 		      bitmap_set_bit (blocks, bb->index);
2999 		      changed = true;
3000 		      if (note)
3001 			need_cfg_cleanup = true;
3002 		    }
3003 		}
3004 	    }
3005 	}
3006     }
3007 
3008   default_rtl_profile ();
3009   if (changed)
3010     {
3011       find_many_sub_basic_blocks (blocks);
3012 
3013       /* Splitting could drop an REG_EH_REGION if it potentially
3014 	 trapped in its original form, but does not in its split
3015 	 form.  Consider a FLOAT_TRUNCATE which splits into a memory
3016 	 store/load pair and -fnon-call-exceptions.  */
3017       if (need_cfg_cleanup)
3018 	cleanup_cfg (0);
3019     }
3020 
3021   checking_verify_flow_info ();
3022 }
3023 
3024 /* Same as split_all_insns, but do not expect CFG to be available.
3025    Used by machine dependent reorg passes.  */
3026 
3027 unsigned int
3028 split_all_insns_noflow (void)
3029 {
3030   rtx_insn *next, *insn;
3031 
3032   for (insn = get_insns (); insn; insn = next)
3033     {
3034       next = NEXT_INSN (insn);
3035       if (INSN_P (insn))
3036 	{
3037 	  /* Don't split no-op move insns.  These should silently
3038 	     disappear later in final.  Splitting such insns would
3039 	     break the code that handles LIBCALL blocks.  */
3040 	  rtx set = single_set (insn);
3041 	  if (set && set_noop_p (set))
3042 	    {
3043 	      /* Nops get in the way while scheduling, so delete them
3044 		 now if register allocation has already been done.  It
3045 		 is too risky to try to do this before register
3046 		 allocation, and there are unlikely to be very many
3047 		 nops then anyways.
3048 
3049 		 ??? Should we use delete_insn when the CFG isn't valid?  */
3050 	      if (reload_completed)
3051 		delete_insn_and_edges (insn);
3052 	    }
3053 	  else
3054 	    split_insn (insn);
3055 	}
3056     }
3057   return 0;
3058 }
3059 
3060 struct peep2_insn_data
3061 {
3062   rtx_insn *insn;
3063   regset live_before;
3064 };
3065 
3066 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
3067 static int peep2_current;
3068 
3069 static bool peep2_do_rebuild_jump_labels;
3070 static bool peep2_do_cleanup_cfg;
3071 
3072 /* The number of instructions available to match a peep2.  */
3073 int peep2_current_count;
3074 
3075 /* A marker indicating the last insn of the block.  The live_before regset
3076    for this element is correct, indicating DF_LIVE_OUT for the block.  */
3077 #define PEEP2_EOB invalid_insn_rtx
3078 
3079 /* Wrap N to fit into the peep2_insn_data buffer.  */
3080 
3081 static int
3082 peep2_buf_position (int n)
3083 {
3084   if (n >= MAX_INSNS_PER_PEEP2 + 1)
3085     n -= MAX_INSNS_PER_PEEP2 + 1;
3086   return n;
3087 }
3088 
3089 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
3090    does not exist.  Used by the recognizer to find the next insn to match
3091    in a multi-insn pattern.  */
3092 
3093 rtx_insn *
3094 peep2_next_insn (int n)
3095 {
3096   gcc_assert (n <= peep2_current_count);
3097 
3098   n = peep2_buf_position (peep2_current + n);
3099 
3100   return peep2_insn_data[n].insn;
3101 }
3102 
3103 /* Return true if REGNO is dead before the Nth non-note insn
3104    after `current'.  */
3105 
3106 int
3107 peep2_regno_dead_p (int ofs, int regno)
3108 {
3109   gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
3110 
3111   ofs = peep2_buf_position (peep2_current + ofs);
3112 
3113   gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
3114 
3115   return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
3116 }
3117 
3118 /* Similarly for a REG.  */
3119 
3120 int
3121 peep2_reg_dead_p (int ofs, rtx reg)
3122 {
3123   gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
3124 
3125   ofs = peep2_buf_position (peep2_current + ofs);
3126 
3127   gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
3128 
3129   unsigned int end_regno = END_REGNO (reg);
3130   for (unsigned int regno = REGNO (reg); regno < end_regno; ++regno)
3131     if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno))
3132       return 0;
3133   return 1;
3134 }
3135 
3136 /* Regno offset to be used in the register search.  */
3137 static int search_ofs;
3138 
3139 /* Try to find a hard register of mode MODE, matching the register class in
3140    CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
3141    remains available until the end of LAST_INSN.  LAST_INSN may be NULL_RTX,
3142    in which case the only condition is that the register must be available
3143    before CURRENT_INSN.
3144    Registers that already have bits set in REG_SET will not be considered.
3145 
3146    If an appropriate register is available, it will be returned and the
3147    corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
3148    returned.  */
3149 
3150 rtx
3151 peep2_find_free_register (int from, int to, const char *class_str,
3152 			  machine_mode mode, HARD_REG_SET *reg_set)
3153 {
3154   enum reg_class cl;
3155   HARD_REG_SET live;
3156   df_ref def;
3157   int i;
3158 
3159   gcc_assert (from < MAX_INSNS_PER_PEEP2 + 1);
3160   gcc_assert (to < MAX_INSNS_PER_PEEP2 + 1);
3161 
3162   from = peep2_buf_position (peep2_current + from);
3163   to = peep2_buf_position (peep2_current + to);
3164 
3165   gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3166   REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
3167 
3168   while (from != to)
3169     {
3170       gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3171 
3172       /* Don't use registers set or clobbered by the insn.  */
3173       FOR_EACH_INSN_DEF (def, peep2_insn_data[from].insn)
3174 	SET_HARD_REG_BIT (live, DF_REF_REGNO (def));
3175 
3176       from = peep2_buf_position (from + 1);
3177     }
3178 
3179   cl = reg_class_for_constraint (lookup_constraint (class_str));
3180 
3181   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3182     {
3183       int raw_regno, regno, success, j;
3184 
3185       /* Distribute the free registers as much as possible.  */
3186       raw_regno = search_ofs + i;
3187       if (raw_regno >= FIRST_PSEUDO_REGISTER)
3188 	raw_regno -= FIRST_PSEUDO_REGISTER;
3189 #ifdef REG_ALLOC_ORDER
3190       regno = reg_alloc_order[raw_regno];
3191 #else
3192       regno = raw_regno;
3193 #endif
3194 
3195       /* Can it support the mode we need?  */
3196       if (!targetm.hard_regno_mode_ok (regno, mode))
3197 	continue;
3198 
3199       success = 1;
3200       for (j = 0; success && j < hard_regno_nregs (regno, mode); j++)
3201 	{
3202 	  /* Don't allocate fixed registers.  */
3203 	  if (fixed_regs[regno + j])
3204 	    {
3205 	      success = 0;
3206 	      break;
3207 	    }
3208 	  /* Don't allocate global registers.  */
3209 	  if (global_regs[regno + j])
3210 	    {
3211 	      success = 0;
3212 	      break;
3213 	    }
3214 	  /* Make sure the register is of the right class.  */
3215 	  if (! TEST_HARD_REG_BIT (reg_class_contents[cl], regno + j))
3216 	    {
3217 	      success = 0;
3218 	      break;
3219 	    }
3220 	  /* And that we don't create an extra save/restore.  */
3221 	  if (! call_used_regs[regno + j] && ! df_regs_ever_live_p (regno + j))
3222 	    {
3223 	      success = 0;
3224 	      break;
3225 	    }
3226 
3227 	  if (! targetm.hard_regno_scratch_ok (regno + j))
3228 	    {
3229 	      success = 0;
3230 	      break;
3231 	    }
3232 
3233 	  /* And we don't clobber traceback for noreturn functions.  */
3234 	  if ((regno + j == FRAME_POINTER_REGNUM
3235 	       || regno + j == HARD_FRAME_POINTER_REGNUM)
3236 	      && (! reload_completed || frame_pointer_needed))
3237 	    {
3238 	      success = 0;
3239 	      break;
3240 	    }
3241 
3242 	  if (TEST_HARD_REG_BIT (*reg_set, regno + j)
3243 	      || TEST_HARD_REG_BIT (live, regno + j))
3244 	    {
3245 	      success = 0;
3246 	      break;
3247 	    }
3248 	}
3249 
3250       if (success)
3251 	{
3252 	  add_to_hard_reg_set (reg_set, mode, regno);
3253 
3254 	  /* Start the next search with the next register.  */
3255 	  if (++raw_regno >= FIRST_PSEUDO_REGISTER)
3256 	    raw_regno = 0;
3257 	  search_ofs = raw_regno;
3258 
3259 	  return gen_rtx_REG (mode, regno);
3260 	}
3261     }
3262 
3263   search_ofs = 0;
3264   return NULL_RTX;
3265 }
3266 
3267 /* Forget all currently tracked instructions, only remember current
3268    LIVE regset.  */
3269 
3270 static void
3271 peep2_reinit_state (regset live)
3272 {
3273   int i;
3274 
3275   /* Indicate that all slots except the last holds invalid data.  */
3276   for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3277     peep2_insn_data[i].insn = NULL;
3278   peep2_current_count = 0;
3279 
3280   /* Indicate that the last slot contains live_after data.  */
3281   peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3282   peep2_current = MAX_INSNS_PER_PEEP2;
3283 
3284   COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3285 }
3286 
3287 /* While scanning basic block BB, we found a match of length MATCH_LEN,
3288    starting at INSN.  Perform the replacement, removing the old insns and
3289    replacing them with ATTEMPT.  Returns the last insn emitted, or NULL
3290    if the replacement is rejected.  */
3291 
3292 static rtx_insn *
3293 peep2_attempt (basic_block bb, rtx_insn *insn, int match_len, rtx_insn *attempt)
3294 {
3295   int i;
3296   rtx_insn *last, *before_try, *x;
3297   rtx eh_note, as_note;
3298   rtx_insn *old_insn;
3299   rtx_insn *new_insn;
3300   bool was_call = false;
3301 
3302   /* If we are splitting an RTX_FRAME_RELATED_P insn, do not allow it to
3303      match more than one insn, or to be split into more than one insn.  */
3304   old_insn = peep2_insn_data[peep2_current].insn;
3305   if (RTX_FRAME_RELATED_P (old_insn))
3306     {
3307       bool any_note = false;
3308       rtx note;
3309 
3310       if (match_len != 0)
3311 	return NULL;
3312 
3313       /* Look for one "active" insn.  I.e. ignore any "clobber" insns that
3314 	 may be in the stream for the purpose of register allocation.  */
3315       if (active_insn_p (attempt))
3316 	new_insn = attempt;
3317       else
3318 	new_insn = next_active_insn (attempt);
3319       if (next_active_insn (new_insn))
3320 	return NULL;
3321 
3322       /* We have a 1-1 replacement.  Copy over any frame-related info.  */
3323       RTX_FRAME_RELATED_P (new_insn) = 1;
3324 
3325       /* Allow the backend to fill in a note during the split.  */
3326       for (note = REG_NOTES (new_insn); note ; note = XEXP (note, 1))
3327 	switch (REG_NOTE_KIND (note))
3328 	  {
3329 	  case REG_FRAME_RELATED_EXPR:
3330 	  case REG_CFA_DEF_CFA:
3331 	  case REG_CFA_ADJUST_CFA:
3332 	  case REG_CFA_OFFSET:
3333 	  case REG_CFA_REGISTER:
3334 	  case REG_CFA_EXPRESSION:
3335 	  case REG_CFA_RESTORE:
3336 	  case REG_CFA_SET_VDRAP:
3337 	    any_note = true;
3338 	    break;
3339 	  default:
3340 	    break;
3341 	  }
3342 
3343       /* If the backend didn't supply a note, copy one over.  */
3344       if (!any_note)
3345         for (note = REG_NOTES (old_insn); note ; note = XEXP (note, 1))
3346 	  switch (REG_NOTE_KIND (note))
3347 	    {
3348 	    case REG_FRAME_RELATED_EXPR:
3349 	    case REG_CFA_DEF_CFA:
3350 	    case REG_CFA_ADJUST_CFA:
3351 	    case REG_CFA_OFFSET:
3352 	    case REG_CFA_REGISTER:
3353 	    case REG_CFA_EXPRESSION:
3354 	    case REG_CFA_RESTORE:
3355 	    case REG_CFA_SET_VDRAP:
3356 	      add_reg_note (new_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3357 	      any_note = true;
3358 	      break;
3359 	    default:
3360 	      break;
3361 	    }
3362 
3363       /* If there still isn't a note, make sure the unwind info sees the
3364 	 same expression as before the split.  */
3365       if (!any_note)
3366 	{
3367 	  rtx old_set, new_set;
3368 
3369 	  /* The old insn had better have been simple, or annotated.  */
3370 	  old_set = single_set (old_insn);
3371 	  gcc_assert (old_set != NULL);
3372 
3373 	  new_set = single_set (new_insn);
3374 	  if (!new_set || !rtx_equal_p (new_set, old_set))
3375 	    add_reg_note (new_insn, REG_FRAME_RELATED_EXPR, old_set);
3376 	}
3377 
3378       /* Copy prologue/epilogue status.  This is required in order to keep
3379 	 proper placement of EPILOGUE_BEG and the DW_CFA_remember_state.  */
3380       maybe_copy_prologue_epilogue_insn (old_insn, new_insn);
3381     }
3382 
3383   /* If we are splitting a CALL_INSN, look for the CALL_INSN
3384      in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3385      cfg-related call notes.  */
3386   for (i = 0; i <= match_len; ++i)
3387     {
3388       int j;
3389       rtx note;
3390 
3391       j = peep2_buf_position (peep2_current + i);
3392       old_insn = peep2_insn_data[j].insn;
3393       if (!CALL_P (old_insn))
3394 	continue;
3395       was_call = true;
3396 
3397       new_insn = attempt;
3398       while (new_insn != NULL_RTX)
3399 	{
3400 	  if (CALL_P (new_insn))
3401 	    break;
3402 	  new_insn = NEXT_INSN (new_insn);
3403 	}
3404 
3405       gcc_assert (new_insn != NULL_RTX);
3406 
3407       CALL_INSN_FUNCTION_USAGE (new_insn)
3408 	= CALL_INSN_FUNCTION_USAGE (old_insn);
3409       SIBLING_CALL_P (new_insn) = SIBLING_CALL_P (old_insn);
3410 
3411       for (note = REG_NOTES (old_insn);
3412 	   note;
3413 	   note = XEXP (note, 1))
3414 	switch (REG_NOTE_KIND (note))
3415 	  {
3416 	  case REG_NORETURN:
3417 	  case REG_SETJMP:
3418 	  case REG_TM:
3419 	  case REG_CALL_NOCF_CHECK:
3420 	    add_reg_note (new_insn, REG_NOTE_KIND (note),
3421 			  XEXP (note, 0));
3422 	    break;
3423 	  default:
3424 	    /* Discard all other reg notes.  */
3425 	    break;
3426 	  }
3427 
3428       /* Croak if there is another call in the sequence.  */
3429       while (++i <= match_len)
3430 	{
3431 	  j = peep2_buf_position (peep2_current + i);
3432 	  old_insn = peep2_insn_data[j].insn;
3433 	  gcc_assert (!CALL_P (old_insn));
3434 	}
3435       break;
3436     }
3437 
3438   /* If we matched any instruction that had a REG_ARGS_SIZE, then
3439      move those notes over to the new sequence.  */
3440   as_note = NULL;
3441   for (i = match_len; i >= 0; --i)
3442     {
3443       int j = peep2_buf_position (peep2_current + i);
3444       old_insn = peep2_insn_data[j].insn;
3445 
3446       as_note = find_reg_note (old_insn, REG_ARGS_SIZE, NULL);
3447       if (as_note)
3448 	break;
3449     }
3450 
3451   i = peep2_buf_position (peep2_current + match_len);
3452   eh_note = find_reg_note (peep2_insn_data[i].insn, REG_EH_REGION, NULL_RTX);
3453 
3454   /* Replace the old sequence with the new.  */
3455   rtx_insn *peepinsn = peep2_insn_data[i].insn;
3456   last = emit_insn_after_setloc (attempt,
3457 				 peep2_insn_data[i].insn,
3458 				 INSN_LOCATION (peepinsn));
3459   if (JUMP_P (peepinsn) && JUMP_P (last))
3460     CROSSING_JUMP_P (last) = CROSSING_JUMP_P (peepinsn);
3461   before_try = PREV_INSN (insn);
3462   delete_insn_chain (insn, peep2_insn_data[i].insn, false);
3463 
3464   /* Re-insert the EH_REGION notes.  */
3465   if (eh_note || (was_call && nonlocal_goto_handler_labels))
3466     {
3467       edge eh_edge;
3468       edge_iterator ei;
3469 
3470       FOR_EACH_EDGE (eh_edge, ei, bb->succs)
3471 	if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
3472 	  break;
3473 
3474       if (eh_note)
3475 	copy_reg_eh_region_note_backward (eh_note, last, before_try);
3476 
3477       if (eh_edge)
3478 	for (x = last; x != before_try; x = PREV_INSN (x))
3479 	  if (x != BB_END (bb)
3480 	      && (can_throw_internal (x)
3481 		  || can_nonlocal_goto (x)))
3482 	    {
3483 	      edge nfte, nehe;
3484 	      int flags;
3485 
3486 	      nfte = split_block (bb, x);
3487 	      flags = (eh_edge->flags
3488 		       & (EDGE_EH | EDGE_ABNORMAL));
3489 	      if (CALL_P (x))
3490 		flags |= EDGE_ABNORMAL_CALL;
3491 	      nehe = make_edge (nfte->src, eh_edge->dest,
3492 				flags);
3493 
3494 	      nehe->probability = eh_edge->probability;
3495 	      nfte->probability = nehe->probability.invert ();
3496 
3497 	      peep2_do_cleanup_cfg |= purge_dead_edges (nfte->dest);
3498 	      bb = nfte->src;
3499 	      eh_edge = nehe;
3500 	    }
3501 
3502       /* Converting possibly trapping insn to non-trapping is
3503 	 possible.  Zap dummy outgoing edges.  */
3504       peep2_do_cleanup_cfg |= purge_dead_edges (bb);
3505     }
3506 
3507   /* Re-insert the ARGS_SIZE notes.  */
3508   if (as_note)
3509     fixup_args_size_notes (before_try, last, get_args_size (as_note));
3510 
3511   /* If we generated a jump instruction, it won't have
3512      JUMP_LABEL set.  Recompute after we're done.  */
3513   for (x = last; x != before_try; x = PREV_INSN (x))
3514     if (JUMP_P (x))
3515       {
3516 	peep2_do_rebuild_jump_labels = true;
3517 	break;
3518       }
3519 
3520   return last;
3521 }
3522 
3523 /* After performing a replacement in basic block BB, fix up the life
3524    information in our buffer.  LAST is the last of the insns that we
3525    emitted as a replacement.  PREV is the insn before the start of
3526    the replacement.  MATCH_LEN is the number of instructions that were
3527    matched, and which now need to be replaced in the buffer.  */
3528 
3529 static void
3530 peep2_update_life (basic_block bb, int match_len, rtx_insn *last,
3531 		   rtx_insn *prev)
3532 {
3533   int i = peep2_buf_position (peep2_current + match_len + 1);
3534   rtx_insn *x;
3535   regset_head live;
3536 
3537   INIT_REG_SET (&live);
3538   COPY_REG_SET (&live, peep2_insn_data[i].live_before);
3539 
3540   gcc_assert (peep2_current_count >= match_len + 1);
3541   peep2_current_count -= match_len + 1;
3542 
3543   x = last;
3544   do
3545     {
3546       if (INSN_P (x))
3547 	{
3548 	  df_insn_rescan (x);
3549 	  if (peep2_current_count < MAX_INSNS_PER_PEEP2)
3550 	    {
3551 	      peep2_current_count++;
3552 	      if (--i < 0)
3553 		i = MAX_INSNS_PER_PEEP2;
3554 	      peep2_insn_data[i].insn = x;
3555 	      df_simulate_one_insn_backwards (bb, x, &live);
3556 	      COPY_REG_SET (peep2_insn_data[i].live_before, &live);
3557 	    }
3558 	}
3559       x = PREV_INSN (x);
3560     }
3561   while (x != prev);
3562   CLEAR_REG_SET (&live);
3563 
3564   peep2_current = i;
3565 }
3566 
3567 /* Add INSN, which is in BB, at the end of the peep2 insn buffer if possible.
3568    Return true if we added it, false otherwise.  The caller will try to match
3569    peepholes against the buffer if we return false; otherwise it will try to
3570    add more instructions to the buffer.  */
3571 
3572 static bool
3573 peep2_fill_buffer (basic_block bb, rtx_insn *insn, regset live)
3574 {
3575   int pos;
3576 
3577   /* Once we have filled the maximum number of insns the buffer can hold,
3578      allow the caller to match the insns against peepholes.  We wait until
3579      the buffer is full in case the target has similar peepholes of different
3580      length; we always want to match the longest if possible.  */
3581   if (peep2_current_count == MAX_INSNS_PER_PEEP2)
3582     return false;
3583 
3584   /* If an insn has RTX_FRAME_RELATED_P set, do not allow it to be matched with
3585      any other pattern, lest it change the semantics of the frame info.  */
3586   if (RTX_FRAME_RELATED_P (insn))
3587     {
3588       /* Let the buffer drain first.  */
3589       if (peep2_current_count > 0)
3590 	return false;
3591       /* Now the insn will be the only thing in the buffer.  */
3592     }
3593 
3594   pos = peep2_buf_position (peep2_current + peep2_current_count);
3595   peep2_insn_data[pos].insn = insn;
3596   COPY_REG_SET (peep2_insn_data[pos].live_before, live);
3597   peep2_current_count++;
3598 
3599   df_simulate_one_insn_forwards (bb, insn, live);
3600   return true;
3601 }
3602 
3603 /* Perform the peephole2 optimization pass.  */
3604 
3605 static void
3606 peephole2_optimize (void)
3607 {
3608   rtx_insn *insn;
3609   bitmap live;
3610   int i;
3611   basic_block bb;
3612 
3613   peep2_do_cleanup_cfg = false;
3614   peep2_do_rebuild_jump_labels = false;
3615 
3616   df_set_flags (DF_LR_RUN_DCE);
3617   df_note_add_problem ();
3618   df_analyze ();
3619 
3620   /* Initialize the regsets we're going to use.  */
3621   for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3622     peep2_insn_data[i].live_before = BITMAP_ALLOC (&reg_obstack);
3623   search_ofs = 0;
3624   live = BITMAP_ALLOC (&reg_obstack);
3625 
3626   FOR_EACH_BB_REVERSE_FN (bb, cfun)
3627     {
3628       bool past_end = false;
3629       int pos;
3630 
3631       rtl_profile_for_bb (bb);
3632 
3633       /* Start up propagation.  */
3634       bitmap_copy (live, DF_LR_IN (bb));
3635       df_simulate_initialize_forwards (bb, live);
3636       peep2_reinit_state (live);
3637 
3638       insn = BB_HEAD (bb);
3639       for (;;)
3640 	{
3641 	  rtx_insn *attempt, *head;
3642 	  int match_len;
3643 
3644 	  if (!past_end && !NONDEBUG_INSN_P (insn))
3645 	    {
3646 	    next_insn:
3647 	      insn = NEXT_INSN (insn);
3648 	      if (insn == NEXT_INSN (BB_END (bb)))
3649 		past_end = true;
3650 	      continue;
3651 	    }
3652 	  if (!past_end && peep2_fill_buffer (bb, insn, live))
3653 	    goto next_insn;
3654 
3655 	  /* If we did not fill an empty buffer, it signals the end of the
3656 	     block.  */
3657 	  if (peep2_current_count == 0)
3658 	    break;
3659 
3660 	  /* The buffer filled to the current maximum, so try to match.  */
3661 
3662 	  pos = peep2_buf_position (peep2_current + peep2_current_count);
3663 	  peep2_insn_data[pos].insn = PEEP2_EOB;
3664 	  COPY_REG_SET (peep2_insn_data[pos].live_before, live);
3665 
3666 	  /* Match the peephole.  */
3667 	  head = peep2_insn_data[peep2_current].insn;
3668 	  attempt = peephole2_insns (PATTERN (head), head, &match_len);
3669 	  if (attempt != NULL)
3670 	    {
3671 	      rtx_insn *last = peep2_attempt (bb, head, match_len, attempt);
3672 	      if (last)
3673 		{
3674 		  peep2_update_life (bb, match_len, last, PREV_INSN (attempt));
3675 		  continue;
3676 		}
3677 	    }
3678 
3679 	  /* No match: advance the buffer by one insn.  */
3680 	  peep2_current = peep2_buf_position (peep2_current + 1);
3681 	  peep2_current_count--;
3682 	}
3683     }
3684 
3685   default_rtl_profile ();
3686   for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3687     BITMAP_FREE (peep2_insn_data[i].live_before);
3688   BITMAP_FREE (live);
3689   if (peep2_do_rebuild_jump_labels)
3690     rebuild_jump_labels (get_insns ());
3691   if (peep2_do_cleanup_cfg)
3692     cleanup_cfg (CLEANUP_CFG_CHANGED);
3693 }
3694 
3695 /* Common predicates for use with define_bypass.  */
3696 
3697 /* Helper function for store_data_bypass_p, handle just a single SET
3698    IN_SET.  */
3699 
3700 static bool
3701 store_data_bypass_p_1 (rtx_insn *out_insn, rtx in_set)
3702 {
3703   if (!MEM_P (SET_DEST (in_set)))
3704     return false;
3705 
3706   rtx out_set = single_set (out_insn);
3707   if (out_set)
3708     return !reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set));
3709 
3710   rtx out_pat = PATTERN (out_insn);
3711   if (GET_CODE (out_pat) != PARALLEL)
3712     return false;
3713 
3714   for (int i = 0; i < XVECLEN (out_pat, 0); i++)
3715     {
3716       rtx out_exp = XVECEXP (out_pat, 0, i);
3717 
3718       if (GET_CODE (out_exp) == CLOBBER || GET_CODE (out_exp) == USE)
3719 	continue;
3720 
3721       gcc_assert (GET_CODE (out_exp) == SET);
3722 
3723       if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_set)))
3724 	return false;
3725     }
3726 
3727   return true;
3728 }
3729 
3730 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3731    data not the address operand(s) of the store.  IN_INSN and OUT_INSN
3732    must be either a single_set or a PARALLEL with SETs inside.  */
3733 
3734 int
3735 store_data_bypass_p (rtx_insn *out_insn, rtx_insn *in_insn)
3736 {
3737   rtx in_set = single_set (in_insn);
3738   if (in_set)
3739     return store_data_bypass_p_1 (out_insn, in_set);
3740 
3741   rtx in_pat = PATTERN (in_insn);
3742   if (GET_CODE (in_pat) != PARALLEL)
3743     return false;
3744 
3745   for (int i = 0; i < XVECLEN (in_pat, 0); i++)
3746     {
3747       rtx in_exp = XVECEXP (in_pat, 0, i);
3748 
3749       if (GET_CODE (in_exp) == CLOBBER || GET_CODE (in_exp) == USE)
3750 	continue;
3751 
3752       gcc_assert (GET_CODE (in_exp) == SET);
3753 
3754       if (!store_data_bypass_p_1 (out_insn, in_exp))
3755 	return false;
3756     }
3757 
3758   return true;
3759 }
3760 
3761 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3762    condition, and not the THEN or ELSE branch.  OUT_INSN may be either a single
3763    or multiple set; IN_INSN should be single_set for truth, but for convenience
3764    of insn categorization may be any JUMP or CALL insn.  */
3765 
3766 int
3767 if_test_bypass_p (rtx_insn *out_insn, rtx_insn *in_insn)
3768 {
3769   rtx out_set, in_set;
3770 
3771   in_set = single_set (in_insn);
3772   if (! in_set)
3773     {
3774       gcc_assert (JUMP_P (in_insn) || CALL_P (in_insn));
3775       return false;
3776     }
3777 
3778   if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
3779     return false;
3780   in_set = SET_SRC (in_set);
3781 
3782   out_set = single_set (out_insn);
3783   if (out_set)
3784     {
3785       if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3786 	  || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3787 	return false;
3788     }
3789   else
3790     {
3791       rtx out_pat;
3792       int i;
3793 
3794       out_pat = PATTERN (out_insn);
3795       gcc_assert (GET_CODE (out_pat) == PARALLEL);
3796 
3797       for (i = 0; i < XVECLEN (out_pat, 0); i++)
3798 	{
3799 	  rtx exp = XVECEXP (out_pat, 0, i);
3800 
3801 	  if (GET_CODE (exp) == CLOBBER)
3802 	    continue;
3803 
3804 	  gcc_assert (GET_CODE (exp) == SET);
3805 
3806 	  if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3807 	      || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3808 	    return false;
3809 	}
3810     }
3811 
3812   return true;
3813 }
3814 
3815 static unsigned int
3816 rest_of_handle_peephole2 (void)
3817 {
3818   if (HAVE_peephole2)
3819     peephole2_optimize ();
3820 
3821   return 0;
3822 }
3823 
3824 namespace {
3825 
3826 const pass_data pass_data_peephole2 =
3827 {
3828   RTL_PASS, /* type */
3829   "peephole2", /* name */
3830   OPTGROUP_NONE, /* optinfo_flags */
3831   TV_PEEPHOLE2, /* tv_id */
3832   0, /* properties_required */
3833   0, /* properties_provided */
3834   0, /* properties_destroyed */
3835   0, /* todo_flags_start */
3836   TODO_df_finish, /* todo_flags_finish */
3837 };
3838 
3839 class pass_peephole2 : public rtl_opt_pass
3840 {
3841 public:
3842   pass_peephole2 (gcc::context *ctxt)
3843     : rtl_opt_pass (pass_data_peephole2, ctxt)
3844   {}
3845 
3846   /* opt_pass methods: */
3847   /* The epiphany backend creates a second instance of this pass, so we need
3848      a clone method.  */
3849   opt_pass * clone () { return new pass_peephole2 (m_ctxt); }
3850   virtual bool gate (function *) { return (optimize > 0 && flag_peephole2); }
3851   virtual unsigned int execute (function *)
3852     {
3853       return rest_of_handle_peephole2 ();
3854     }
3855 
3856 }; // class pass_peephole2
3857 
3858 } // anon namespace
3859 
3860 rtl_opt_pass *
3861 make_pass_peephole2 (gcc::context *ctxt)
3862 {
3863   return new pass_peephole2 (ctxt);
3864 }
3865 
3866 namespace {
3867 
3868 const pass_data pass_data_split_all_insns =
3869 {
3870   RTL_PASS, /* type */
3871   "split1", /* name */
3872   OPTGROUP_NONE, /* optinfo_flags */
3873   TV_NONE, /* tv_id */
3874   0, /* properties_required */
3875   PROP_rtl_split_insns, /* properties_provided */
3876   0, /* properties_destroyed */
3877   0, /* todo_flags_start */
3878   0, /* todo_flags_finish */
3879 };
3880 
3881 class pass_split_all_insns : public rtl_opt_pass
3882 {
3883 public:
3884   pass_split_all_insns (gcc::context *ctxt)
3885     : rtl_opt_pass (pass_data_split_all_insns, ctxt)
3886   {}
3887 
3888   /* opt_pass methods: */
3889   /* The epiphany backend creates a second instance of this pass, so
3890      we need a clone method.  */
3891   opt_pass * clone () { return new pass_split_all_insns (m_ctxt); }
3892   virtual unsigned int execute (function *)
3893     {
3894       split_all_insns ();
3895       return 0;
3896     }
3897 
3898 }; // class pass_split_all_insns
3899 
3900 } // anon namespace
3901 
3902 rtl_opt_pass *
3903 make_pass_split_all_insns (gcc::context *ctxt)
3904 {
3905   return new pass_split_all_insns (ctxt);
3906 }
3907 
3908 namespace {
3909 
3910 const pass_data pass_data_split_after_reload =
3911 {
3912   RTL_PASS, /* type */
3913   "split2", /* name */
3914   OPTGROUP_NONE, /* optinfo_flags */
3915   TV_NONE, /* tv_id */
3916   0, /* properties_required */
3917   0, /* properties_provided */
3918   0, /* properties_destroyed */
3919   0, /* todo_flags_start */
3920   0, /* todo_flags_finish */
3921 };
3922 
3923 class pass_split_after_reload : public rtl_opt_pass
3924 {
3925 public:
3926   pass_split_after_reload (gcc::context *ctxt)
3927     : rtl_opt_pass (pass_data_split_after_reload, ctxt)
3928   {}
3929 
3930   /* opt_pass methods: */
3931   virtual bool gate (function *)
3932     {
3933       /* If optimizing, then go ahead and split insns now.  */
3934       if (optimize > 0)
3935 	return true;
3936 
3937 #ifdef STACK_REGS
3938       return true;
3939 #else
3940       return false;
3941 #endif
3942     }
3943 
3944   virtual unsigned int execute (function *)
3945     {
3946       split_all_insns ();
3947       return 0;
3948     }
3949 
3950 }; // class pass_split_after_reload
3951 
3952 } // anon namespace
3953 
3954 rtl_opt_pass *
3955 make_pass_split_after_reload (gcc::context *ctxt)
3956 {
3957   return new pass_split_after_reload (ctxt);
3958 }
3959 
3960 namespace {
3961 
3962 const pass_data pass_data_split_before_regstack =
3963 {
3964   RTL_PASS, /* type */
3965   "split3", /* name */
3966   OPTGROUP_NONE, /* optinfo_flags */
3967   TV_NONE, /* tv_id */
3968   0, /* properties_required */
3969   0, /* properties_provided */
3970   0, /* properties_destroyed */
3971   0, /* todo_flags_start */
3972   0, /* todo_flags_finish */
3973 };
3974 
3975 class pass_split_before_regstack : public rtl_opt_pass
3976 {
3977 public:
3978   pass_split_before_regstack (gcc::context *ctxt)
3979     : rtl_opt_pass (pass_data_split_before_regstack, ctxt)
3980   {}
3981 
3982   /* opt_pass methods: */
3983   virtual bool gate (function *);
3984   virtual unsigned int execute (function *)
3985     {
3986       split_all_insns ();
3987       return 0;
3988     }
3989 
3990 }; // class pass_split_before_regstack
3991 
3992 bool
3993 pass_split_before_regstack::gate (function *)
3994 {
3995 #if HAVE_ATTR_length && defined (STACK_REGS)
3996   /* If flow2 creates new instructions which need splitting
3997      and scheduling after reload is not done, they might not be
3998      split until final which doesn't allow splitting
3999      if HAVE_ATTR_length.  */
4000 # ifdef INSN_SCHEDULING
4001   return (optimize && !flag_schedule_insns_after_reload);
4002 # else
4003   return (optimize);
4004 # endif
4005 #else
4006   return 0;
4007 #endif
4008 }
4009 
4010 } // anon namespace
4011 
4012 rtl_opt_pass *
4013 make_pass_split_before_regstack (gcc::context *ctxt)
4014 {
4015   return new pass_split_before_regstack (ctxt);
4016 }
4017 
4018 static unsigned int
4019 rest_of_handle_split_before_sched2 (void)
4020 {
4021 #ifdef INSN_SCHEDULING
4022   split_all_insns ();
4023 #endif
4024   return 0;
4025 }
4026 
4027 namespace {
4028 
4029 const pass_data pass_data_split_before_sched2 =
4030 {
4031   RTL_PASS, /* type */
4032   "split4", /* name */
4033   OPTGROUP_NONE, /* optinfo_flags */
4034   TV_NONE, /* tv_id */
4035   0, /* properties_required */
4036   0, /* properties_provided */
4037   0, /* properties_destroyed */
4038   0, /* todo_flags_start */
4039   0, /* todo_flags_finish */
4040 };
4041 
4042 class pass_split_before_sched2 : public rtl_opt_pass
4043 {
4044 public:
4045   pass_split_before_sched2 (gcc::context *ctxt)
4046     : rtl_opt_pass (pass_data_split_before_sched2, ctxt)
4047   {}
4048 
4049   /* opt_pass methods: */
4050   virtual bool gate (function *)
4051     {
4052 #ifdef INSN_SCHEDULING
4053       return optimize > 0 && flag_schedule_insns_after_reload;
4054 #else
4055       return false;
4056 #endif
4057     }
4058 
4059   virtual unsigned int execute (function *)
4060     {
4061       return rest_of_handle_split_before_sched2 ();
4062     }
4063 
4064 }; // class pass_split_before_sched2
4065 
4066 } // anon namespace
4067 
4068 rtl_opt_pass *
4069 make_pass_split_before_sched2 (gcc::context *ctxt)
4070 {
4071   return new pass_split_before_sched2 (ctxt);
4072 }
4073 
4074 namespace {
4075 
4076 const pass_data pass_data_split_for_shorten_branches =
4077 {
4078   RTL_PASS, /* type */
4079   "split5", /* name */
4080   OPTGROUP_NONE, /* optinfo_flags */
4081   TV_NONE, /* tv_id */
4082   0, /* properties_required */
4083   0, /* properties_provided */
4084   0, /* properties_destroyed */
4085   0, /* todo_flags_start */
4086   0, /* todo_flags_finish */
4087 };
4088 
4089 class pass_split_for_shorten_branches : public rtl_opt_pass
4090 {
4091 public:
4092   pass_split_for_shorten_branches (gcc::context *ctxt)
4093     : rtl_opt_pass (pass_data_split_for_shorten_branches, ctxt)
4094   {}
4095 
4096   /* opt_pass methods: */
4097   virtual bool gate (function *)
4098     {
4099       /* The placement of the splitting that we do for shorten_branches
4100 	 depends on whether regstack is used by the target or not.  */
4101 #if HAVE_ATTR_length && !defined (STACK_REGS)
4102       return true;
4103 #else
4104       return false;
4105 #endif
4106     }
4107 
4108   virtual unsigned int execute (function *)
4109     {
4110       return split_all_insns_noflow ();
4111     }
4112 
4113 }; // class pass_split_for_shorten_branches
4114 
4115 } // anon namespace
4116 
4117 rtl_opt_pass *
4118 make_pass_split_for_shorten_branches (gcc::context *ctxt)
4119 {
4120   return new pass_split_for_shorten_branches (ctxt);
4121 }
4122 
4123 /* (Re)initialize the target information after a change in target.  */
4124 
4125 void
4126 recog_init ()
4127 {
4128   /* The information is zero-initialized, so we don't need to do anything
4129      first time round.  */
4130   if (!this_target_recog->x_initialized)
4131     {
4132       this_target_recog->x_initialized = true;
4133       return;
4134     }
4135   memset (this_target_recog->x_bool_attr_masks, 0,
4136 	  sizeof (this_target_recog->x_bool_attr_masks));
4137   for (unsigned int i = 0; i < NUM_INSN_CODES; ++i)
4138     if (this_target_recog->x_op_alt[i])
4139       {
4140 	free (this_target_recog->x_op_alt[i]);
4141 	this_target_recog->x_op_alt[i] = 0;
4142       }
4143 }
4144