1 /* Subroutines used by or related to instruction recognition.
2    Copyright (C) 1987-2014 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl-error.h"
27 #include "tm_p.h"
28 #include "insn-config.h"
29 #include "insn-attr.h"
30 #include "hard-reg-set.h"
31 #include "recog.h"
32 #include "regs.h"
33 #include "addresses.h"
34 #include "expr.h"
35 #include "function.h"
36 #include "flags.h"
37 #include "basic-block.h"
38 #include "reload.h"
39 #include "target.h"
40 #include "tree-pass.h"
41 #include "df.h"
42 #include "insn-codes.h"
43 
44 #ifndef STACK_PUSH_CODE
45 #ifdef STACK_GROWS_DOWNWARD
46 #define STACK_PUSH_CODE PRE_DEC
47 #else
48 #define STACK_PUSH_CODE PRE_INC
49 #endif
50 #endif
51 
52 #ifndef STACK_POP_CODE
53 #ifdef STACK_GROWS_DOWNWARD
54 #define STACK_POP_CODE POST_INC
55 #else
56 #define STACK_POP_CODE POST_DEC
57 #endif
58 #endif
59 
60 static void validate_replace_rtx_1 (rtx *, rtx, rtx, rtx, bool);
61 static void validate_replace_src_1 (rtx *, void *);
62 static rtx split_insn (rtx);
63 
64 /* Nonzero means allow operands to be volatile.
65    This should be 0 if you are generating rtl, such as if you are calling
66    the functions in optabs.c and expmed.c (most of the time).
67    This should be 1 if all valid insns need to be recognized,
68    such as in reginfo.c and final.c and reload.c.
69 
70    init_recog and init_recog_no_volatile are responsible for setting this.  */
71 
72 int volatile_ok;
73 
74 struct recog_data_d recog_data;
75 
76 /* Contains a vector of operand_alternative structures for every operand.
77    Set up by preprocess_constraints.  */
78 struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
79 
80 /* On return from `constrain_operands', indicate which alternative
81    was satisfied.  */
82 
83 int which_alternative;
84 
85 /* Nonzero after end of reload pass.
86    Set to 1 or 0 by toplev.c.
87    Controls the significance of (SUBREG (MEM)).  */
88 
89 int reload_completed;
90 
91 /* Nonzero after thread_prologue_and_epilogue_insns has run.  */
92 int epilogue_completed;
93 
94 /* Initialize data used by the function `recog'.
95    This must be called once in the compilation of a function
96    before any insn recognition may be done in the function.  */
97 
98 void
init_recog_no_volatile(void)99 init_recog_no_volatile (void)
100 {
101   volatile_ok = 0;
102 }
103 
104 void
init_recog(void)105 init_recog (void)
106 {
107   volatile_ok = 1;
108 }
109 
110 
111 /* Return true if labels in asm operands BODY are LABEL_REFs.  */
112 
113 static bool
asm_labels_ok(rtx body)114 asm_labels_ok (rtx body)
115 {
116   rtx asmop;
117   int i;
118 
119   asmop = extract_asm_operands (body);
120   if (asmop == NULL_RTX)
121     return true;
122 
123   for (i = 0; i < ASM_OPERANDS_LABEL_LENGTH (asmop); i++)
124     if (GET_CODE (ASM_OPERANDS_LABEL (asmop, i)) != LABEL_REF)
125       return false;
126 
127   return true;
128 }
129 
130 /* Check that X is an insn-body for an `asm' with operands
131    and that the operands mentioned in it are legitimate.  */
132 
133 int
check_asm_operands(rtx x)134 check_asm_operands (rtx x)
135 {
136   int noperands;
137   rtx *operands;
138   const char **constraints;
139   int i;
140 
141   if (!asm_labels_ok (x))
142     return 0;
143 
144   /* Post-reload, be more strict with things.  */
145   if (reload_completed)
146     {
147       /* ??? Doh!  We've not got the wrapping insn.  Cook one up.  */
148       extract_insn (make_insn_raw (x));
149       constrain_operands (1);
150       return which_alternative >= 0;
151     }
152 
153   noperands = asm_noperands (x);
154   if (noperands < 0)
155     return 0;
156   if (noperands == 0)
157     return 1;
158 
159   operands = XALLOCAVEC (rtx, noperands);
160   constraints = XALLOCAVEC (const char *, noperands);
161 
162   decode_asm_operands (x, operands, NULL, constraints, NULL, NULL);
163 
164   for (i = 0; i < noperands; i++)
165     {
166       const char *c = constraints[i];
167       if (c[0] == '%')
168 	c++;
169       if (! asm_operand_ok (operands[i], c, constraints))
170 	return 0;
171     }
172 
173   return 1;
174 }
175 
176 /* Static data for the next two routines.  */
177 
178 typedef struct change_t
179 {
180   rtx object;
181   int old_code;
182   rtx *loc;
183   rtx old;
184   bool unshare;
185 } change_t;
186 
187 static change_t *changes;
188 static int changes_allocated;
189 
190 static int num_changes = 0;
191 
192 /* Validate a proposed change to OBJECT.  LOC is the location in the rtl
193    at which NEW_RTX will be placed.  If OBJECT is zero, no validation is done,
194    the change is simply made.
195 
196    Two types of objects are supported:  If OBJECT is a MEM, memory_address_p
197    will be called with the address and mode as parameters.  If OBJECT is
198    an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
199    the change in place.
200 
201    IN_GROUP is nonzero if this is part of a group of changes that must be
202    performed as a group.  In that case, the changes will be stored.  The
203    function `apply_change_group' will validate and apply the changes.
204 
205    If IN_GROUP is zero, this is a single change.  Try to recognize the insn
206    or validate the memory reference with the change applied.  If the result
207    is not valid for the machine, suppress the change and return zero.
208    Otherwise, perform the change and return 1.  */
209 
210 static bool
validate_change_1(rtx object,rtx * loc,rtx new_rtx,bool in_group,bool unshare)211 validate_change_1 (rtx object, rtx *loc, rtx new_rtx, bool in_group, bool unshare)
212 {
213   rtx old = *loc;
214 
215   if (old == new_rtx || rtx_equal_p (old, new_rtx))
216     return 1;
217 
218   gcc_assert (in_group != 0 || num_changes == 0);
219 
220   *loc = new_rtx;
221 
222   /* Save the information describing this change.  */
223   if (num_changes >= changes_allocated)
224     {
225       if (changes_allocated == 0)
226 	/* This value allows for repeated substitutions inside complex
227 	   indexed addresses, or changes in up to 5 insns.  */
228 	changes_allocated = MAX_RECOG_OPERANDS * 5;
229       else
230 	changes_allocated *= 2;
231 
232       changes = XRESIZEVEC (change_t, changes, changes_allocated);
233     }
234 
235   changes[num_changes].object = object;
236   changes[num_changes].loc = loc;
237   changes[num_changes].old = old;
238   changes[num_changes].unshare = unshare;
239 
240   if (object && !MEM_P (object))
241     {
242       /* Set INSN_CODE to force rerecognition of insn.  Save old code in
243 	 case invalid.  */
244       changes[num_changes].old_code = INSN_CODE (object);
245       INSN_CODE (object) = -1;
246     }
247 
248   num_changes++;
249 
250   /* If we are making a group of changes, return 1.  Otherwise, validate the
251      change group we made.  */
252 
253   if (in_group)
254     return 1;
255   else
256     return apply_change_group ();
257 }
258 
259 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
260    UNSHARE to false.  */
261 
262 bool
validate_change(rtx object,rtx * loc,rtx new_rtx,bool in_group)263 validate_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
264 {
265   return validate_change_1 (object, loc, new_rtx, in_group, false);
266 }
267 
268 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
269    UNSHARE to true.  */
270 
271 bool
validate_unshare_change(rtx object,rtx * loc,rtx new_rtx,bool in_group)272 validate_unshare_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
273 {
274   return validate_change_1 (object, loc, new_rtx, in_group, true);
275 }
276 
277 
278 /* Keep X canonicalized if some changes have made it non-canonical; only
279    modifies the operands of X, not (for example) its code.  Simplifications
280    are not the job of this routine.
281 
282    Return true if anything was changed.  */
283 bool
canonicalize_change_group(rtx insn,rtx x)284 canonicalize_change_group (rtx insn, rtx x)
285 {
286   if (COMMUTATIVE_P (x)
287       && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
288     {
289       /* Oops, the caller has made X no longer canonical.
290 	 Let's redo the changes in the correct order.  */
291       rtx tem = XEXP (x, 0);
292       validate_unshare_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
293       validate_unshare_change (insn, &XEXP (x, 1), tem, 1);
294       return true;
295     }
296   else
297     return false;
298 }
299 
300 
301 /* This subroutine of apply_change_group verifies whether the changes to INSN
302    were valid; i.e. whether INSN can still be recognized.
303 
304    If IN_GROUP is true clobbers which have to be added in order to
305    match the instructions will be added to the current change group.
306    Otherwise the changes will take effect immediately.  */
307 
308 int
insn_invalid_p(rtx insn,bool in_group)309 insn_invalid_p (rtx insn, bool in_group)
310 {
311   rtx pat = PATTERN (insn);
312   int num_clobbers = 0;
313   /* If we are before reload and the pattern is a SET, see if we can add
314      clobbers.  */
315   int icode = recog (pat, insn,
316 		     (GET_CODE (pat) == SET
317 		      && ! reload_completed
318                       && ! reload_in_progress)
319 		     ? &num_clobbers : 0);
320   int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
321 
322 
323   /* If this is an asm and the operand aren't legal, then fail.  Likewise if
324      this is not an asm and the insn wasn't recognized.  */
325   if ((is_asm && ! check_asm_operands (PATTERN (insn)))
326       || (!is_asm && icode < 0))
327     return 1;
328 
329   /* If we have to add CLOBBERs, fail if we have to add ones that reference
330      hard registers since our callers can't know if they are live or not.
331      Otherwise, add them.  */
332   if (num_clobbers > 0)
333     {
334       rtx newpat;
335 
336       if (added_clobbers_hard_reg_p (icode))
337 	return 1;
338 
339       newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
340       XVECEXP (newpat, 0, 0) = pat;
341       add_clobbers (newpat, icode);
342       if (in_group)
343 	validate_change (insn, &PATTERN (insn), newpat, 1);
344       else
345 	PATTERN (insn) = pat = newpat;
346     }
347 
348   /* After reload, verify that all constraints are satisfied.  */
349   if (reload_completed)
350     {
351       extract_insn (insn);
352 
353       if (! constrain_operands (1))
354 	return 1;
355     }
356 
357   INSN_CODE (insn) = icode;
358   return 0;
359 }
360 
361 /* Return number of changes made and not validated yet.  */
362 int
num_changes_pending(void)363 num_changes_pending (void)
364 {
365   return num_changes;
366 }
367 
368 /* Tentatively apply the changes numbered NUM and up.
369    Return 1 if all changes are valid, zero otherwise.  */
370 
371 int
verify_changes(int num)372 verify_changes (int num)
373 {
374   int i;
375   rtx last_validated = NULL_RTX;
376 
377   /* The changes have been applied and all INSN_CODEs have been reset to force
378      rerecognition.
379 
380      The changes are valid if we aren't given an object, or if we are
381      given a MEM and it still is a valid address, or if this is in insn
382      and it is recognized.  In the latter case, if reload has completed,
383      we also require that the operands meet the constraints for
384      the insn.  */
385 
386   for (i = num; i < num_changes; i++)
387     {
388       rtx object = changes[i].object;
389 
390       /* If there is no object to test or if it is the same as the one we
391          already tested, ignore it.  */
392       if (object == 0 || object == last_validated)
393 	continue;
394 
395       if (MEM_P (object))
396 	{
397 	  if (! memory_address_addr_space_p (GET_MODE (object),
398 					     XEXP (object, 0),
399 					     MEM_ADDR_SPACE (object)))
400 	    break;
401 	}
402       else if (/* changes[i].old might be zero, e.g. when putting a
403 	       REG_FRAME_RELATED_EXPR into a previously empty list.  */
404 	       changes[i].old
405 	       && REG_P (changes[i].old)
406 	       && asm_noperands (PATTERN (object)) > 0
407 	       && REG_EXPR (changes[i].old) != NULL_TREE
408 	       && DECL_ASSEMBLER_NAME_SET_P (REG_EXPR (changes[i].old))
409 	       && DECL_REGISTER (REG_EXPR (changes[i].old)))
410 	{
411 	  /* Don't allow changes of hard register operands to inline
412 	     assemblies if they have been defined as register asm ("x").  */
413 	  break;
414 	}
415       else if (DEBUG_INSN_P (object))
416 	continue;
417       else if (insn_invalid_p (object, true))
418 	{
419 	  rtx pat = PATTERN (object);
420 
421 	  /* Perhaps we couldn't recognize the insn because there were
422 	     extra CLOBBERs at the end.  If so, try to re-recognize
423 	     without the last CLOBBER (later iterations will cause each of
424 	     them to be eliminated, in turn).  But don't do this if we
425 	     have an ASM_OPERAND.  */
426 	  if (GET_CODE (pat) == PARALLEL
427 	      && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
428 	      && asm_noperands (PATTERN (object)) < 0)
429 	    {
430 	      rtx newpat;
431 
432 	      if (XVECLEN (pat, 0) == 2)
433 		newpat = XVECEXP (pat, 0, 0);
434 	      else
435 		{
436 		  int j;
437 
438 		  newpat
439 		    = gen_rtx_PARALLEL (VOIDmode,
440 					rtvec_alloc (XVECLEN (pat, 0) - 1));
441 		  for (j = 0; j < XVECLEN (newpat, 0); j++)
442 		    XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
443 		}
444 
445 	      /* Add a new change to this group to replace the pattern
446 		 with this new pattern.  Then consider this change
447 		 as having succeeded.  The change we added will
448 		 cause the entire call to fail if things remain invalid.
449 
450 		 Note that this can lose if a later change than the one
451 		 we are processing specified &XVECEXP (PATTERN (object), 0, X)
452 		 but this shouldn't occur.  */
453 
454 	      validate_change (object, &PATTERN (object), newpat, 1);
455 	      continue;
456 	    }
457 	  else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
458 		   || GET_CODE (pat) == VAR_LOCATION)
459 	    /* If this insn is a CLOBBER or USE, it is always valid, but is
460 	       never recognized.  */
461 	    continue;
462 	  else
463 	    break;
464 	}
465       last_validated = object;
466     }
467 
468   return (i == num_changes);
469 }
470 
471 /* A group of changes has previously been issued with validate_change
472    and verified with verify_changes.  Call df_insn_rescan for each of
473    the insn changed and clear num_changes.  */
474 
475 void
confirm_change_group(void)476 confirm_change_group (void)
477 {
478   int i;
479   rtx last_object = NULL;
480 
481   for (i = 0; i < num_changes; i++)
482     {
483       rtx object = changes[i].object;
484 
485       if (changes[i].unshare)
486 	*changes[i].loc = copy_rtx (*changes[i].loc);
487 
488       /* Avoid unnecessary rescanning when multiple changes to same instruction
489          are made.  */
490       if (object)
491 	{
492 	  if (object != last_object && last_object && INSN_P (last_object))
493 	    df_insn_rescan (last_object);
494 	  last_object = object;
495 	}
496     }
497 
498   if (last_object && INSN_P (last_object))
499     df_insn_rescan (last_object);
500   num_changes = 0;
501 }
502 
503 /* Apply a group of changes previously issued with `validate_change'.
504    If all changes are valid, call confirm_change_group and return 1,
505    otherwise, call cancel_changes and return 0.  */
506 
507 int
apply_change_group(void)508 apply_change_group (void)
509 {
510   if (verify_changes (0))
511     {
512       confirm_change_group ();
513       return 1;
514     }
515   else
516     {
517       cancel_changes (0);
518       return 0;
519     }
520 }
521 
522 
523 /* Return the number of changes so far in the current group.  */
524 
525 int
num_validated_changes(void)526 num_validated_changes (void)
527 {
528   return num_changes;
529 }
530 
531 /* Retract the changes numbered NUM and up.  */
532 
533 void
cancel_changes(int num)534 cancel_changes (int num)
535 {
536   int i;
537 
538   /* Back out all the changes.  Do this in the opposite order in which
539      they were made.  */
540   for (i = num_changes - 1; i >= num; i--)
541     {
542       *changes[i].loc = changes[i].old;
543       if (changes[i].object && !MEM_P (changes[i].object))
544 	INSN_CODE (changes[i].object) = changes[i].old_code;
545     }
546   num_changes = num;
547 }
548 
549 /* Reduce conditional compilation elsewhere.  */
550 #ifndef HAVE_extv
551 #define HAVE_extv	0
552 #define CODE_FOR_extv	CODE_FOR_nothing
553 #endif
554 #ifndef HAVE_extzv
555 #define HAVE_extzv	0
556 #define CODE_FOR_extzv	CODE_FOR_nothing
557 #endif
558 
559 /* A subroutine of validate_replace_rtx_1 that tries to simplify the resulting
560    rtx.  */
561 
562 static void
simplify_while_replacing(rtx * loc,rtx to,rtx object,enum machine_mode op0_mode)563 simplify_while_replacing (rtx *loc, rtx to, rtx object,
564                           enum machine_mode op0_mode)
565 {
566   rtx x = *loc;
567   enum rtx_code code = GET_CODE (x);
568   rtx new_rtx = NULL_RTX;
569 
570   if (SWAPPABLE_OPERANDS_P (x)
571       && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
572     {
573       validate_unshare_change (object, loc,
574 			       gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x) ? code
575 					       : swap_condition (code),
576 					       GET_MODE (x), XEXP (x, 1),
577 					       XEXP (x, 0)), 1);
578       x = *loc;
579       code = GET_CODE (x);
580     }
581 
582   /* Canonicalize arithmetics with all constant operands.  */
583   switch (GET_RTX_CLASS (code))
584     {
585     case RTX_UNARY:
586       if (CONSTANT_P (XEXP (x, 0)))
587 	new_rtx = simplify_unary_operation (code, GET_MODE (x), XEXP (x, 0),
588 					    op0_mode);
589       break;
590     case RTX_COMM_ARITH:
591     case RTX_BIN_ARITH:
592       if (CONSTANT_P (XEXP (x, 0)) && CONSTANT_P (XEXP (x, 1)))
593 	new_rtx = simplify_binary_operation (code, GET_MODE (x), XEXP (x, 0),
594 					     XEXP (x, 1));
595       break;
596     case RTX_COMPARE:
597     case RTX_COMM_COMPARE:
598       if (CONSTANT_P (XEXP (x, 0)) && CONSTANT_P (XEXP (x, 1)))
599 	new_rtx = simplify_relational_operation (code, GET_MODE (x), op0_mode,
600 						 XEXP (x, 0), XEXP (x, 1));
601       break;
602     default:
603       break;
604     }
605   if (new_rtx)
606     {
607       validate_change (object, loc, new_rtx, 1);
608       return;
609     }
610 
611   switch (code)
612     {
613     case PLUS:
614       /* If we have a PLUS whose second operand is now a CONST_INT, use
615          simplify_gen_binary to try to simplify it.
616          ??? We may want later to remove this, once simplification is
617          separated from this function.  */
618       if (CONST_INT_P (XEXP (x, 1)) && XEXP (x, 1) == to)
619 	validate_change (object, loc,
620 			 simplify_gen_binary
621 			 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
622       break;
623     case MINUS:
624       if (CONST_SCALAR_INT_P (XEXP (x, 1)))
625 	validate_change (object, loc,
626 			 simplify_gen_binary
627 			 (PLUS, GET_MODE (x), XEXP (x, 0),
628 			  simplify_gen_unary (NEG,
629 					      GET_MODE (x), XEXP (x, 1),
630 					      GET_MODE (x))), 1);
631       break;
632     case ZERO_EXTEND:
633     case SIGN_EXTEND:
634       if (GET_MODE (XEXP (x, 0)) == VOIDmode)
635 	{
636 	  new_rtx = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
637 				    op0_mode);
638 	  /* If any of the above failed, substitute in something that
639 	     we know won't be recognized.  */
640 	  if (!new_rtx)
641 	    new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
642 	  validate_change (object, loc, new_rtx, 1);
643 	}
644       break;
645     case SUBREG:
646       /* All subregs possible to simplify should be simplified.  */
647       new_rtx = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
648 			     SUBREG_BYTE (x));
649 
650       /* Subregs of VOIDmode operands are incorrect.  */
651       if (!new_rtx && GET_MODE (SUBREG_REG (x)) == VOIDmode)
652 	new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
653       if (new_rtx)
654 	validate_change (object, loc, new_rtx, 1);
655       break;
656     case ZERO_EXTRACT:
657     case SIGN_EXTRACT:
658       /* If we are replacing a register with memory, try to change the memory
659          to be the mode required for memory in extract operations (this isn't
660          likely to be an insertion operation; if it was, nothing bad will
661          happen, we might just fail in some cases).  */
662 
663       if (MEM_P (XEXP (x, 0))
664 	  && CONST_INT_P (XEXP (x, 1))
665 	  && CONST_INT_P (XEXP (x, 2))
666 	  && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0),
667 					MEM_ADDR_SPACE (XEXP (x, 0)))
668 	  && !MEM_VOLATILE_P (XEXP (x, 0)))
669 	{
670 	  enum machine_mode wanted_mode = VOIDmode;
671 	  enum machine_mode is_mode = GET_MODE (XEXP (x, 0));
672 	  int pos = INTVAL (XEXP (x, 2));
673 
674 	  if (GET_CODE (x) == ZERO_EXTRACT && HAVE_extzv)
675 	    {
676 	      wanted_mode = insn_data[CODE_FOR_extzv].operand[1].mode;
677 	      if (wanted_mode == VOIDmode)
678 		wanted_mode = word_mode;
679 	    }
680 	  else if (GET_CODE (x) == SIGN_EXTRACT && HAVE_extv)
681 	    {
682 	      wanted_mode = insn_data[CODE_FOR_extv].operand[1].mode;
683 	      if (wanted_mode == VOIDmode)
684 		wanted_mode = word_mode;
685 	    }
686 
687 	  /* If we have a narrower mode, we can do something.  */
688 	  if (wanted_mode != VOIDmode
689 	      && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
690 	    {
691 	      int offset = pos / BITS_PER_UNIT;
692 	      rtx newmem;
693 
694 	      /* If the bytes and bits are counted differently, we
695 	         must adjust the offset.  */
696 	      if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
697 		offset =
698 		  (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
699 		   offset);
700 
701 	      gcc_assert (GET_MODE_PRECISION (wanted_mode)
702 			  == GET_MODE_BITSIZE (wanted_mode));
703 	      pos %= GET_MODE_BITSIZE (wanted_mode);
704 
705 	      newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
706 
707 	      validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
708 	      validate_change (object, &XEXP (x, 0), newmem, 1);
709 	    }
710 	}
711 
712       break;
713 
714     default:
715       break;
716     }
717 }
718 
719 /* Replace every occurrence of FROM in X with TO.  Mark each change with
720    validate_change passing OBJECT.  */
721 
722 static void
validate_replace_rtx_1(rtx * loc,rtx from,rtx to,rtx object,bool simplify)723 validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx object,
724                         bool simplify)
725 {
726   int i, j;
727   const char *fmt;
728   rtx x = *loc;
729   enum rtx_code code;
730   enum machine_mode op0_mode = VOIDmode;
731   int prev_changes = num_changes;
732 
733   if (!x)
734     return;
735 
736   code = GET_CODE (x);
737   fmt = GET_RTX_FORMAT (code);
738   if (fmt[0] == 'e')
739     op0_mode = GET_MODE (XEXP (x, 0));
740 
741   /* X matches FROM if it is the same rtx or they are both referring to the
742      same register in the same mode.  Avoid calling rtx_equal_p unless the
743      operands look similar.  */
744 
745   if (x == from
746       || (REG_P (x) && REG_P (from)
747 	  && GET_MODE (x) == GET_MODE (from)
748 	  && REGNO (x) == REGNO (from))
749       || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
750 	  && rtx_equal_p (x, from)))
751     {
752       validate_unshare_change (object, loc, to, 1);
753       return;
754     }
755 
756   /* Call ourself recursively to perform the replacements.
757      We must not replace inside already replaced expression, otherwise we
758      get infinite recursion for replacements like (reg X)->(subreg (reg X))
759      so we must special case shared ASM_OPERANDS.  */
760 
761   if (GET_CODE (x) == PARALLEL)
762     {
763       for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
764 	{
765 	  if (j && GET_CODE (XVECEXP (x, 0, j)) == SET
766 	      && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == ASM_OPERANDS)
767 	    {
768 	      /* Verify that operands are really shared.  */
769 	      gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, 0)))
770 			  == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
771 							      (x, 0, j))));
772 	      validate_replace_rtx_1 (&SET_DEST (XVECEXP (x, 0, j)),
773 				      from, to, object, simplify);
774 	    }
775 	  else
776 	    validate_replace_rtx_1 (&XVECEXP (x, 0, j), from, to, object,
777                                     simplify);
778 	}
779     }
780   else
781     for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
782       {
783 	if (fmt[i] == 'e')
784 	  validate_replace_rtx_1 (&XEXP (x, i), from, to, object, simplify);
785 	else if (fmt[i] == 'E')
786 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
787 	    validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object,
788                                     simplify);
789       }
790 
791   /* If we didn't substitute, there is nothing more to do.  */
792   if (num_changes == prev_changes)
793     return;
794 
795   /* ??? The regmove is no more, so is this aberration still necessary?  */
796   /* Allow substituted expression to have different mode.  This is used by
797      regmove to change mode of pseudo register.  */
798   if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
799     op0_mode = GET_MODE (XEXP (x, 0));
800 
801   /* Do changes needed to keep rtx consistent.  Don't do any other
802      simplifications, as it is not our job.  */
803   if (simplify)
804     simplify_while_replacing (loc, to, object, op0_mode);
805 }
806 
807 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
808    with TO.  After all changes have been made, validate by seeing
809    if INSN is still valid.  */
810 
811 int
validate_replace_rtx_subexp(rtx from,rtx to,rtx insn,rtx * loc)812 validate_replace_rtx_subexp (rtx from, rtx to, rtx insn, rtx *loc)
813 {
814   validate_replace_rtx_1 (loc, from, to, insn, true);
815   return apply_change_group ();
816 }
817 
818 /* Try replacing every occurrence of FROM in INSN with TO.  After all
819    changes have been made, validate by seeing if INSN is still valid.  */
820 
821 int
validate_replace_rtx(rtx from,rtx to,rtx insn)822 validate_replace_rtx (rtx from, rtx to, rtx insn)
823 {
824   validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
825   return apply_change_group ();
826 }
827 
828 /* Try replacing every occurrence of FROM in WHERE with TO.  Assume that WHERE
829    is a part of INSN.  After all changes have been made, validate by seeing if
830    INSN is still valid.
831    validate_replace_rtx (from, to, insn) is equivalent to
832    validate_replace_rtx_part (from, to, &PATTERN (insn), insn).  */
833 
834 int
validate_replace_rtx_part(rtx from,rtx to,rtx * where,rtx insn)835 validate_replace_rtx_part (rtx from, rtx to, rtx *where, rtx insn)
836 {
837   validate_replace_rtx_1 (where, from, to, insn, true);
838   return apply_change_group ();
839 }
840 
841 /* Same as above, but do not simplify rtx afterwards.  */
842 int
validate_replace_rtx_part_nosimplify(rtx from,rtx to,rtx * where,rtx insn)843 validate_replace_rtx_part_nosimplify (rtx from, rtx to, rtx *where,
844                                       rtx insn)
845 {
846   validate_replace_rtx_1 (where, from, to, insn, false);
847   return apply_change_group ();
848 
849 }
850 
851 /* Try replacing every occurrence of FROM in INSN with TO.  This also
852    will replace in REG_EQUAL and REG_EQUIV notes.  */
853 
854 void
validate_replace_rtx_group(rtx from,rtx to,rtx insn)855 validate_replace_rtx_group (rtx from, rtx to, rtx insn)
856 {
857   rtx note;
858   validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
859   for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
860     if (REG_NOTE_KIND (note) == REG_EQUAL
861 	|| REG_NOTE_KIND (note) == REG_EQUIV)
862       validate_replace_rtx_1 (&XEXP (note, 0), from, to, insn, true);
863 }
864 
865 /* Function called by note_uses to replace used subexpressions.  */
866 struct validate_replace_src_data
867 {
868   rtx from;			/* Old RTX */
869   rtx to;			/* New RTX */
870   rtx insn;			/* Insn in which substitution is occurring.  */
871 };
872 
873 static void
validate_replace_src_1(rtx * x,void * data)874 validate_replace_src_1 (rtx *x, void *data)
875 {
876   struct validate_replace_src_data *d
877     = (struct validate_replace_src_data *) data;
878 
879   validate_replace_rtx_1 (x, d->from, d->to, d->insn, true);
880 }
881 
882 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
883    SET_DESTs.  */
884 
885 void
validate_replace_src_group(rtx from,rtx to,rtx insn)886 validate_replace_src_group (rtx from, rtx to, rtx insn)
887 {
888   struct validate_replace_src_data d;
889 
890   d.from = from;
891   d.to = to;
892   d.insn = insn;
893   note_uses (&PATTERN (insn), validate_replace_src_1, &d);
894 }
895 
896 /* Try simplify INSN.
897    Invoke simplify_rtx () on every SET_SRC and SET_DEST inside the INSN's
898    pattern and return true if something was simplified.  */
899 
900 bool
validate_simplify_insn(rtx insn)901 validate_simplify_insn (rtx insn)
902 {
903   int i;
904   rtx pat = NULL;
905   rtx newpat = NULL;
906 
907   pat = PATTERN (insn);
908 
909   if (GET_CODE (pat) == SET)
910     {
911       newpat = simplify_rtx (SET_SRC (pat));
912       if (newpat && !rtx_equal_p (SET_SRC (pat), newpat))
913 	validate_change (insn, &SET_SRC (pat), newpat, 1);
914       newpat = simplify_rtx (SET_DEST (pat));
915       if (newpat && !rtx_equal_p (SET_DEST (pat), newpat))
916 	validate_change (insn, &SET_DEST (pat), newpat, 1);
917     }
918   else if (GET_CODE (pat) == PARALLEL)
919     for (i = 0; i < XVECLEN (pat, 0); i++)
920       {
921 	rtx s = XVECEXP (pat, 0, i);
922 
923 	if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
924 	  {
925 	    newpat = simplify_rtx (SET_SRC (s));
926 	    if (newpat && !rtx_equal_p (SET_SRC (s), newpat))
927 	      validate_change (insn, &SET_SRC (s), newpat, 1);
928 	    newpat = simplify_rtx (SET_DEST (s));
929 	    if (newpat && !rtx_equal_p (SET_DEST (s), newpat))
930 	      validate_change (insn, &SET_DEST (s), newpat, 1);
931 	  }
932       }
933   return ((num_changes_pending () > 0) && (apply_change_group () > 0));
934 }
935 
936 #ifdef HAVE_cc0
937 /* Return 1 if the insn using CC0 set by INSN does not contain
938    any ordered tests applied to the condition codes.
939    EQ and NE tests do not count.  */
940 
941 int
next_insn_tests_no_inequality(rtx insn)942 next_insn_tests_no_inequality (rtx insn)
943 {
944   rtx next = next_cc0_user (insn);
945 
946   /* If there is no next insn, we have to take the conservative choice.  */
947   if (next == 0)
948     return 0;
949 
950   return (INSN_P (next)
951 	  && ! inequality_comparisons_p (PATTERN (next)));
952 }
953 #endif
954 
955 /* Return 1 if OP is a valid general operand for machine mode MODE.
956    This is either a register reference, a memory reference,
957    or a constant.  In the case of a memory reference, the address
958    is checked for general validity for the target machine.
959 
960    Register and memory references must have mode MODE in order to be valid,
961    but some constants have no machine mode and are valid for any mode.
962 
963    If MODE is VOIDmode, OP is checked for validity for whatever mode
964    it has.
965 
966    The main use of this function is as a predicate in match_operand
967    expressions in the machine description.  */
968 
969 int
general_operand(rtx op,enum machine_mode mode)970 general_operand (rtx op, enum machine_mode mode)
971 {
972   enum rtx_code code = GET_CODE (op);
973 
974   if (mode == VOIDmode)
975     mode = GET_MODE (op);
976 
977   /* Don't accept CONST_INT or anything similar
978      if the caller wants something floating.  */
979   if (GET_MODE (op) == VOIDmode && mode != VOIDmode
980       && GET_MODE_CLASS (mode) != MODE_INT
981       && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
982     return 0;
983 
984   if (CONST_INT_P (op)
985       && mode != VOIDmode
986       && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
987     return 0;
988 
989   if (CONSTANT_P (op))
990     return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
991 	     || mode == VOIDmode)
992 	    && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
993 	    && targetm.legitimate_constant_p (mode == VOIDmode
994 					      ? GET_MODE (op)
995 					      : mode, op));
996 
997   /* Except for certain constants with VOIDmode, already checked for,
998      OP's mode must match MODE if MODE specifies a mode.  */
999 
1000   if (GET_MODE (op) != mode)
1001     return 0;
1002 
1003   if (code == SUBREG)
1004     {
1005       rtx sub = SUBREG_REG (op);
1006 
1007 #ifdef INSN_SCHEDULING
1008       /* On machines that have insn scheduling, we want all memory
1009 	 reference to be explicit, so outlaw paradoxical SUBREGs.
1010 	 However, we must allow them after reload so that they can
1011 	 get cleaned up by cleanup_subreg_operands.  */
1012       if (!reload_completed && MEM_P (sub)
1013 	  && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (sub)))
1014 	return 0;
1015 #endif
1016       /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
1017          may result in incorrect reference.  We should simplify all valid
1018          subregs of MEM anyway.  But allow this after reload because we
1019 	 might be called from cleanup_subreg_operands.
1020 
1021 	 ??? This is a kludge.  */
1022       if (!reload_completed && SUBREG_BYTE (op) != 0
1023 	  && MEM_P (sub))
1024 	return 0;
1025 
1026 #ifdef CANNOT_CHANGE_MODE_CLASS
1027       if (REG_P (sub)
1028 	  && REGNO (sub) < FIRST_PSEUDO_REGISTER
1029 	  && REG_CANNOT_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode)
1030 	  && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
1031 	  && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT
1032 	  /* LRA can generate some invalid SUBREGS just for matched
1033 	     operand reload presentation.  LRA needs to treat them as
1034 	     valid.  */
1035 	  && ! LRA_SUBREG_P (op))
1036 	return 0;
1037 #endif
1038 
1039       /* FLOAT_MODE subregs can't be paradoxical.  Combine will occasionally
1040 	 create such rtl, and we must reject it.  */
1041       if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
1042 	  /* LRA can use subreg to store a floating point value in an
1043 	     integer mode.  Although the floating point and the
1044 	     integer modes need the same number of hard registers, the
1045 	     size of floating point mode can be less than the integer
1046 	     mode.  */
1047 	  && ! lra_in_progress
1048 	  && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
1049 	return 0;
1050 
1051       op = sub;
1052       code = GET_CODE (op);
1053     }
1054 
1055   if (code == REG)
1056     return (REGNO (op) >= FIRST_PSEUDO_REGISTER
1057 	    || in_hard_reg_set_p (operand_reg_set, GET_MODE (op), REGNO (op)));
1058 
1059   if (code == MEM)
1060     {
1061       rtx y = XEXP (op, 0);
1062 
1063       if (! volatile_ok && MEM_VOLATILE_P (op))
1064 	return 0;
1065 
1066       /* Use the mem's mode, since it will be reloaded thus.  LRA can
1067 	 generate move insn with invalid addresses which is made valid
1068 	 and efficiently calculated by LRA through further numerous
1069 	 transformations.  */
1070       if (lra_in_progress
1071 	  || memory_address_addr_space_p (GET_MODE (op), y, MEM_ADDR_SPACE (op)))
1072 	return 1;
1073     }
1074 
1075   return 0;
1076 }
1077 
1078 /* Return 1 if OP is a valid memory address for a memory reference
1079    of mode MODE.
1080 
1081    The main use of this function is as a predicate in match_operand
1082    expressions in the machine description.  */
1083 
1084 int
address_operand(rtx op,enum machine_mode mode)1085 address_operand (rtx op, enum machine_mode mode)
1086 {
1087   return memory_address_p (mode, op);
1088 }
1089 
1090 /* Return 1 if OP is a register reference of mode MODE.
1091    If MODE is VOIDmode, accept a register in any mode.
1092 
1093    The main use of this function is as a predicate in match_operand
1094    expressions in the machine description.  */
1095 
1096 int
register_operand(rtx op,enum machine_mode mode)1097 register_operand (rtx op, enum machine_mode mode)
1098 {
1099   if (GET_CODE (op) == SUBREG)
1100     {
1101       rtx sub = SUBREG_REG (op);
1102 
1103       /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1104 	 because it is guaranteed to be reloaded into one.
1105 	 Just make sure the MEM is valid in itself.
1106 	 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1107 	 but currently it does result from (SUBREG (REG)...) where the
1108 	 reg went on the stack.)  */
1109       if (!REG_P (sub) && (reload_completed || !MEM_P (sub)))
1110 	return 0;
1111     }
1112   else if (!REG_P (op))
1113     return 0;
1114   return general_operand (op, mode);
1115 }
1116 
1117 /* Return 1 for a register in Pmode; ignore the tested mode.  */
1118 
1119 int
pmode_register_operand(rtx op,enum machine_mode mode ATTRIBUTE_UNUSED)1120 pmode_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1121 {
1122   return register_operand (op, Pmode);
1123 }
1124 
1125 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1126    or a hard register.  */
1127 
1128 int
scratch_operand(rtx op,enum machine_mode mode)1129 scratch_operand (rtx op, enum machine_mode mode)
1130 {
1131   if (GET_MODE (op) != mode && mode != VOIDmode)
1132     return 0;
1133 
1134   return (GET_CODE (op) == SCRATCH
1135 	  || (REG_P (op)
1136 	      && (lra_in_progress || REGNO (op) < FIRST_PSEUDO_REGISTER)));
1137 }
1138 
1139 /* Return 1 if OP is a valid immediate operand for mode MODE.
1140 
1141    The main use of this function is as a predicate in match_operand
1142    expressions in the machine description.  */
1143 
1144 int
immediate_operand(rtx op,enum machine_mode mode)1145 immediate_operand (rtx op, enum machine_mode mode)
1146 {
1147   /* Don't accept CONST_INT or anything similar
1148      if the caller wants something floating.  */
1149   if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1150       && GET_MODE_CLASS (mode) != MODE_INT
1151       && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1152     return 0;
1153 
1154   if (CONST_INT_P (op)
1155       && mode != VOIDmode
1156       && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1157     return 0;
1158 
1159   return (CONSTANT_P (op)
1160 	  && (GET_MODE (op) == mode || mode == VOIDmode
1161 	      || GET_MODE (op) == VOIDmode)
1162 	  && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1163 	  && targetm.legitimate_constant_p (mode == VOIDmode
1164 					    ? GET_MODE (op)
1165 					    : mode, op));
1166 }
1167 
1168 /* Returns 1 if OP is an operand that is a CONST_INT.  */
1169 
1170 int
const_int_operand(rtx op,enum machine_mode mode)1171 const_int_operand (rtx op, enum machine_mode mode)
1172 {
1173   if (!CONST_INT_P (op))
1174     return 0;
1175 
1176   if (mode != VOIDmode
1177       && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1178     return 0;
1179 
1180   return 1;
1181 }
1182 
1183 /* Returns 1 if OP is an operand that is a constant integer or constant
1184    floating-point number.  */
1185 
1186 int
const_double_operand(rtx op,enum machine_mode mode)1187 const_double_operand (rtx op, enum machine_mode mode)
1188 {
1189   /* Don't accept CONST_INT or anything similar
1190      if the caller wants something floating.  */
1191   if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1192       && GET_MODE_CLASS (mode) != MODE_INT
1193       && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1194     return 0;
1195 
1196   return ((CONST_DOUBLE_P (op) || CONST_INT_P (op))
1197 	  && (mode == VOIDmode || GET_MODE (op) == mode
1198 	      || GET_MODE (op) == VOIDmode));
1199 }
1200 
1201 /* Return 1 if OP is a general operand that is not an immediate operand.  */
1202 
1203 int
nonimmediate_operand(rtx op,enum machine_mode mode)1204 nonimmediate_operand (rtx op, enum machine_mode mode)
1205 {
1206   return (general_operand (op, mode) && ! CONSTANT_P (op));
1207 }
1208 
1209 /* Return 1 if OP is a register reference or immediate value of mode MODE.  */
1210 
1211 int
nonmemory_operand(rtx op,enum machine_mode mode)1212 nonmemory_operand (rtx op, enum machine_mode mode)
1213 {
1214   if (CONSTANT_P (op))
1215     return immediate_operand (op, mode);
1216   return register_operand (op, mode);
1217 }
1218 
1219 /* Return 1 if OP is a valid operand that stands for pushing a
1220    value of mode MODE onto the stack.
1221 
1222    The main use of this function is as a predicate in match_operand
1223    expressions in the machine description.  */
1224 
1225 int
push_operand(rtx op,enum machine_mode mode)1226 push_operand (rtx op, enum machine_mode mode)
1227 {
1228   unsigned int rounded_size = GET_MODE_SIZE (mode);
1229 
1230 #ifdef PUSH_ROUNDING
1231   rounded_size = PUSH_ROUNDING (rounded_size);
1232 #endif
1233 
1234   if (!MEM_P (op))
1235     return 0;
1236 
1237   if (mode != VOIDmode && GET_MODE (op) != mode)
1238     return 0;
1239 
1240   op = XEXP (op, 0);
1241 
1242   if (rounded_size == GET_MODE_SIZE (mode))
1243     {
1244       if (GET_CODE (op) != STACK_PUSH_CODE)
1245 	return 0;
1246     }
1247   else
1248     {
1249       if (GET_CODE (op) != PRE_MODIFY
1250 	  || GET_CODE (XEXP (op, 1)) != PLUS
1251 	  || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1252 	  || !CONST_INT_P (XEXP (XEXP (op, 1), 1))
1253 #ifdef STACK_GROWS_DOWNWARD
1254 	  || INTVAL (XEXP (XEXP (op, 1), 1)) != - (int) rounded_size
1255 #else
1256 	  || INTVAL (XEXP (XEXP (op, 1), 1)) != (int) rounded_size
1257 #endif
1258 	  )
1259 	return 0;
1260     }
1261 
1262   return XEXP (op, 0) == stack_pointer_rtx;
1263 }
1264 
1265 /* Return 1 if OP is a valid operand that stands for popping a
1266    value of mode MODE off the stack.
1267 
1268    The main use of this function is as a predicate in match_operand
1269    expressions in the machine description.  */
1270 
1271 int
pop_operand(rtx op,enum machine_mode mode)1272 pop_operand (rtx op, enum machine_mode mode)
1273 {
1274   if (!MEM_P (op))
1275     return 0;
1276 
1277   if (mode != VOIDmode && GET_MODE (op) != mode)
1278     return 0;
1279 
1280   op = XEXP (op, 0);
1281 
1282   if (GET_CODE (op) != STACK_POP_CODE)
1283     return 0;
1284 
1285   return XEXP (op, 0) == stack_pointer_rtx;
1286 }
1287 
1288 /* Return 1 if ADDR is a valid memory address
1289    for mode MODE in address space AS.  */
1290 
1291 int
memory_address_addr_space_p(enum machine_mode mode ATTRIBUTE_UNUSED,rtx addr,addr_space_t as)1292 memory_address_addr_space_p (enum machine_mode mode ATTRIBUTE_UNUSED,
1293 			     rtx addr, addr_space_t as)
1294 {
1295 #ifdef GO_IF_LEGITIMATE_ADDRESS
1296   gcc_assert (ADDR_SPACE_GENERIC_P (as));
1297   GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1298   return 0;
1299 
1300  win:
1301   return 1;
1302 #else
1303   return targetm.addr_space.legitimate_address_p (mode, addr, 0, as);
1304 #endif
1305 }
1306 
1307 /* Return 1 if OP is a valid memory reference with mode MODE,
1308    including a valid address.
1309 
1310    The main use of this function is as a predicate in match_operand
1311    expressions in the machine description.  */
1312 
1313 int
memory_operand(rtx op,enum machine_mode mode)1314 memory_operand (rtx op, enum machine_mode mode)
1315 {
1316   rtx inner;
1317 
1318   if (! reload_completed)
1319     /* Note that no SUBREG is a memory operand before end of reload pass,
1320        because (SUBREG (MEM...)) forces reloading into a register.  */
1321     return MEM_P (op) && general_operand (op, mode);
1322 
1323   if (mode != VOIDmode && GET_MODE (op) != mode)
1324     return 0;
1325 
1326   inner = op;
1327   if (GET_CODE (inner) == SUBREG)
1328     inner = SUBREG_REG (inner);
1329 
1330   return (MEM_P (inner) && general_operand (op, mode));
1331 }
1332 
1333 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1334    that is, a memory reference whose address is a general_operand.  */
1335 
1336 int
indirect_operand(rtx op,enum machine_mode mode)1337 indirect_operand (rtx op, enum machine_mode mode)
1338 {
1339   /* Before reload, a SUBREG isn't in memory (see memory_operand, above).  */
1340   if (! reload_completed
1341       && GET_CODE (op) == SUBREG && MEM_P (SUBREG_REG (op)))
1342     {
1343       int offset = SUBREG_BYTE (op);
1344       rtx inner = SUBREG_REG (op);
1345 
1346       if (mode != VOIDmode && GET_MODE (op) != mode)
1347 	return 0;
1348 
1349       /* The only way that we can have a general_operand as the resulting
1350 	 address is if OFFSET is zero and the address already is an operand
1351 	 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1352 	 operand.  */
1353 
1354       return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1355 	      || (GET_CODE (XEXP (inner, 0)) == PLUS
1356 		  && CONST_INT_P (XEXP (XEXP (inner, 0), 1))
1357 		  && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1358 		  && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1359     }
1360 
1361   return (MEM_P (op)
1362 	  && memory_operand (op, mode)
1363 	  && general_operand (XEXP (op, 0), Pmode));
1364 }
1365 
1366 /* Return 1 if this is an ordered comparison operator (not including
1367    ORDERED and UNORDERED).  */
1368 
1369 int
ordered_comparison_operator(rtx op,enum machine_mode mode)1370 ordered_comparison_operator (rtx op, enum machine_mode mode)
1371 {
1372   if (mode != VOIDmode && GET_MODE (op) != mode)
1373     return false;
1374   switch (GET_CODE (op))
1375     {
1376     case EQ:
1377     case NE:
1378     case LT:
1379     case LTU:
1380     case LE:
1381     case LEU:
1382     case GT:
1383     case GTU:
1384     case GE:
1385     case GEU:
1386       return true;
1387     default:
1388       return false;
1389     }
1390 }
1391 
1392 /* Return 1 if this is a comparison operator.  This allows the use of
1393    MATCH_OPERATOR to recognize all the branch insns.  */
1394 
1395 int
comparison_operator(rtx op,enum machine_mode mode)1396 comparison_operator (rtx op, enum machine_mode mode)
1397 {
1398   return ((mode == VOIDmode || GET_MODE (op) == mode)
1399 	  && COMPARISON_P (op));
1400 }
1401 
1402 /* If BODY is an insn body that uses ASM_OPERANDS, return it.  */
1403 
1404 rtx
extract_asm_operands(rtx body)1405 extract_asm_operands (rtx body)
1406 {
1407   rtx tmp;
1408   switch (GET_CODE (body))
1409     {
1410     case ASM_OPERANDS:
1411       return body;
1412 
1413     case SET:
1414       /* Single output operand: BODY is (set OUTPUT (asm_operands ...)).  */
1415       tmp = SET_SRC (body);
1416       if (GET_CODE (tmp) == ASM_OPERANDS)
1417 	return tmp;
1418       break;
1419 
1420     case PARALLEL:
1421       tmp = XVECEXP (body, 0, 0);
1422       if (GET_CODE (tmp) == ASM_OPERANDS)
1423 	return tmp;
1424       if (GET_CODE (tmp) == SET)
1425 	{
1426 	  tmp = SET_SRC (tmp);
1427 	  if (GET_CODE (tmp) == ASM_OPERANDS)
1428 	    return tmp;
1429 	}
1430       break;
1431 
1432     default:
1433       break;
1434     }
1435   return NULL;
1436 }
1437 
1438 /* If BODY is an insn body that uses ASM_OPERANDS,
1439    return the number of operands (both input and output) in the insn.
1440    Otherwise return -1.  */
1441 
1442 int
asm_noperands(const_rtx body)1443 asm_noperands (const_rtx body)
1444 {
1445   rtx asm_op = extract_asm_operands (CONST_CAST_RTX (body));
1446   int n_sets = 0;
1447 
1448   if (asm_op == NULL)
1449     return -1;
1450 
1451   if (GET_CODE (body) == SET)
1452     n_sets = 1;
1453   else if (GET_CODE (body) == PARALLEL)
1454     {
1455       int i;
1456       if (GET_CODE (XVECEXP (body, 0, 0)) == SET)
1457 	{
1458 	  /* Multiple output operands, or 1 output plus some clobbers:
1459 	     body is
1460 	     [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...].  */
1461 	  /* Count backwards through CLOBBERs to determine number of SETs.  */
1462 	  for (i = XVECLEN (body, 0); i > 0; i--)
1463 	    {
1464 	      if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1465 		break;
1466 	      if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1467 		return -1;
1468 	    }
1469 
1470 	  /* N_SETS is now number of output operands.  */
1471 	  n_sets = i;
1472 
1473 	  /* Verify that all the SETs we have
1474 	     came from a single original asm_operands insn
1475 	     (so that invalid combinations are blocked).  */
1476 	  for (i = 0; i < n_sets; i++)
1477 	    {
1478 	      rtx elt = XVECEXP (body, 0, i);
1479 	      if (GET_CODE (elt) != SET)
1480 		return -1;
1481 	      if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1482 		return -1;
1483 	      /* If these ASM_OPERANDS rtx's came from different original insns
1484 	         then they aren't allowed together.  */
1485 	      if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1486 		  != ASM_OPERANDS_INPUT_VEC (asm_op))
1487 		return -1;
1488 	    }
1489 	}
1490       else
1491 	{
1492 	  /* 0 outputs, but some clobbers:
1493 	     body is [(asm_operands ...) (clobber (reg ...))...].  */
1494 	  /* Make sure all the other parallel things really are clobbers.  */
1495 	  for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1496 	    if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1497 	      return -1;
1498 	}
1499     }
1500 
1501   return (ASM_OPERANDS_INPUT_LENGTH (asm_op)
1502 	  + ASM_OPERANDS_LABEL_LENGTH (asm_op) + n_sets);
1503 }
1504 
1505 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1506    copy its operands (both input and output) into the vector OPERANDS,
1507    the locations of the operands within the insn into the vector OPERAND_LOCS,
1508    and the constraints for the operands into CONSTRAINTS.
1509    Write the modes of the operands into MODES.
1510    Return the assembler-template.
1511 
1512    If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1513    we don't store that info.  */
1514 
1515 const char *
decode_asm_operands(rtx body,rtx * operands,rtx ** operand_locs,const char ** constraints,enum machine_mode * modes,location_t * loc)1516 decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs,
1517 		     const char **constraints, enum machine_mode *modes,
1518 		     location_t *loc)
1519 {
1520   int nbase = 0, n, i;
1521   rtx asmop;
1522 
1523   switch (GET_CODE (body))
1524     {
1525     case ASM_OPERANDS:
1526       /* Zero output asm: BODY is (asm_operands ...).  */
1527       asmop = body;
1528       break;
1529 
1530     case SET:
1531       /* Single output asm: BODY is (set OUTPUT (asm_operands ...)).  */
1532       asmop = SET_SRC (body);
1533 
1534       /* The output is in the SET.
1535 	 Its constraint is in the ASM_OPERANDS itself.  */
1536       if (operands)
1537 	operands[0] = SET_DEST (body);
1538       if (operand_locs)
1539 	operand_locs[0] = &SET_DEST (body);
1540       if (constraints)
1541 	constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1542       if (modes)
1543 	modes[0] = GET_MODE (SET_DEST (body));
1544       nbase = 1;
1545       break;
1546 
1547     case PARALLEL:
1548       {
1549 	int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs.  */
1550 
1551 	asmop = XVECEXP (body, 0, 0);
1552 	if (GET_CODE (asmop) == SET)
1553 	  {
1554 	    asmop = SET_SRC (asmop);
1555 
1556 	    /* At least one output, plus some CLOBBERs.  The outputs are in
1557 	       the SETs.  Their constraints are in the ASM_OPERANDS itself.  */
1558 	    for (i = 0; i < nparallel; i++)
1559 	      {
1560 		if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1561 		  break;		/* Past last SET */
1562 		if (operands)
1563 		  operands[i] = SET_DEST (XVECEXP (body, 0, i));
1564 		if (operand_locs)
1565 		  operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1566 		if (constraints)
1567 		  constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1568 		if (modes)
1569 		  modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1570 	      }
1571 	    nbase = i;
1572 	  }
1573 	break;
1574       }
1575 
1576     default:
1577       gcc_unreachable ();
1578     }
1579 
1580   n = ASM_OPERANDS_INPUT_LENGTH (asmop);
1581   for (i = 0; i < n; i++)
1582     {
1583       if (operand_locs)
1584 	operand_locs[nbase + i] = &ASM_OPERANDS_INPUT (asmop, i);
1585       if (operands)
1586 	operands[nbase + i] = ASM_OPERANDS_INPUT (asmop, i);
1587       if (constraints)
1588 	constraints[nbase + i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1589       if (modes)
1590 	modes[nbase + i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1591     }
1592   nbase += n;
1593 
1594   n = ASM_OPERANDS_LABEL_LENGTH (asmop);
1595   for (i = 0; i < n; i++)
1596     {
1597       if (operand_locs)
1598 	operand_locs[nbase + i] = &ASM_OPERANDS_LABEL (asmop, i);
1599       if (operands)
1600 	operands[nbase + i] = ASM_OPERANDS_LABEL (asmop, i);
1601       if (constraints)
1602 	constraints[nbase + i] = "";
1603       if (modes)
1604 	modes[nbase + i] = Pmode;
1605     }
1606 
1607   if (loc)
1608     *loc = ASM_OPERANDS_SOURCE_LOCATION (asmop);
1609 
1610   return ASM_OPERANDS_TEMPLATE (asmop);
1611 }
1612 
1613 /* Parse inline assembly string STRING and determine which operands are
1614    referenced by % markers.  For the first NOPERANDS operands, set USED[I]
1615    to true if operand I is referenced.
1616 
1617    This is intended to distinguish barrier-like asms such as:
1618 
1619       asm ("" : "=m" (...));
1620 
1621    from real references such as:
1622 
1623       asm ("sw\t$0, %0" : "=m" (...));  */
1624 
1625 void
get_referenced_operands(const char * string,bool * used,unsigned int noperands)1626 get_referenced_operands (const char *string, bool *used,
1627 			 unsigned int noperands)
1628 {
1629   memset (used, 0, sizeof (bool) * noperands);
1630   const char *p = string;
1631   while (*p)
1632     switch (*p)
1633       {
1634       case '%':
1635 	p += 1;
1636 	/* A letter followed by a digit indicates an operand number.  */
1637 	if (ISALPHA (p[0]) && ISDIGIT (p[1]))
1638 	  p += 1;
1639 	if (ISDIGIT (*p))
1640 	  {
1641 	    char *endptr;
1642 	    unsigned long opnum = strtoul (p, &endptr, 10);
1643 	    if (endptr != p && opnum < noperands)
1644 	      used[opnum] = true;
1645 	    p = endptr;
1646 	  }
1647 	else
1648 	  p += 1;
1649 	break;
1650 
1651       default:
1652 	p++;
1653 	break;
1654       }
1655 }
1656 
1657 /* Check if an asm_operand matches its constraints.
1658    Return > 0 if ok, = 0 if bad, < 0 if inconclusive.  */
1659 
1660 int
asm_operand_ok(rtx op,const char * constraint,const char ** constraints)1661 asm_operand_ok (rtx op, const char *constraint, const char **constraints)
1662 {
1663   int result = 0;
1664 #ifdef AUTO_INC_DEC
1665   bool incdec_ok = false;
1666 #endif
1667 
1668   /* Use constrain_operands after reload.  */
1669   gcc_assert (!reload_completed);
1670 
1671   /* Empty constraint string is the same as "X,...,X", i.e. X for as
1672      many alternatives as required to match the other operands.  */
1673   if (*constraint == '\0')
1674     result = 1;
1675 
1676   while (*constraint)
1677     {
1678       char c = *constraint;
1679       int len;
1680       switch (c)
1681 	{
1682 	case ',':
1683 	  constraint++;
1684 	  continue;
1685 	case '=':
1686 	case '+':
1687 	case '*':
1688 	case '%':
1689 	case '!':
1690 	case '#':
1691 	case '&':
1692 	case '?':
1693 	  break;
1694 
1695 	case '0': case '1': case '2': case '3': case '4':
1696 	case '5': case '6': case '7': case '8': case '9':
1697 	  /* If caller provided constraints pointer, look up
1698 	     the matching constraint.  Otherwise, our caller should have
1699 	     given us the proper matching constraint, but we can't
1700 	     actually fail the check if they didn't.  Indicate that
1701 	     results are inconclusive.  */
1702 	  if (constraints)
1703 	    {
1704 	      char *end;
1705 	      unsigned long match;
1706 
1707 	      match = strtoul (constraint, &end, 10);
1708 	      if (!result)
1709 		result = asm_operand_ok (op, constraints[match], NULL);
1710 	      constraint = (const char *) end;
1711 	    }
1712 	  else
1713 	    {
1714 	      do
1715 		constraint++;
1716 	      while (ISDIGIT (*constraint));
1717 	      if (! result)
1718 		result = -1;
1719 	    }
1720 	  continue;
1721 
1722 	case 'p':
1723 	  if (address_operand (op, VOIDmode))
1724 	    result = 1;
1725 	  break;
1726 
1727 	case TARGET_MEM_CONSTRAINT:
1728 	case 'V': /* non-offsettable */
1729 	  if (memory_operand (op, VOIDmode))
1730 	    result = 1;
1731 	  break;
1732 
1733 	case 'o': /* offsettable */
1734 	  if (offsettable_nonstrict_memref_p (op))
1735 	    result = 1;
1736 	  break;
1737 
1738 	case '<':
1739 	  /* ??? Before auto-inc-dec, auto inc/dec insns are not supposed to exist,
1740 	     excepting those that expand_call created.  Further, on some
1741 	     machines which do not have generalized auto inc/dec, an inc/dec
1742 	     is not a memory_operand.
1743 
1744 	     Match any memory and hope things are resolved after reload.  */
1745 
1746 	  if (MEM_P (op)
1747 	      && (1
1748 		  || GET_CODE (XEXP (op, 0)) == PRE_DEC
1749 		  || GET_CODE (XEXP (op, 0)) == POST_DEC))
1750 	    result = 1;
1751 #ifdef AUTO_INC_DEC
1752 	  incdec_ok = true;
1753 #endif
1754 	  break;
1755 
1756 	case '>':
1757 	  if (MEM_P (op)
1758 	      && (1
1759 		  || GET_CODE (XEXP (op, 0)) == PRE_INC
1760 		  || GET_CODE (XEXP (op, 0)) == POST_INC))
1761 	    result = 1;
1762 #ifdef AUTO_INC_DEC
1763 	  incdec_ok = true;
1764 #endif
1765 	  break;
1766 
1767 	case 'E':
1768 	case 'F':
1769 	  if (CONST_DOUBLE_AS_FLOAT_P (op)
1770 	      || (GET_CODE (op) == CONST_VECTOR
1771 		  && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
1772 	    result = 1;
1773 	  break;
1774 
1775 	case 'G':
1776 	  if (CONST_DOUBLE_AS_FLOAT_P (op)
1777 	      && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'G', constraint))
1778 	    result = 1;
1779 	  break;
1780 	case 'H':
1781 	  if (CONST_DOUBLE_AS_FLOAT_P (op)
1782 	      && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'H', constraint))
1783 	    result = 1;
1784 	  break;
1785 
1786 	case 's':
1787 	  if (CONST_SCALAR_INT_P (op))
1788 	    break;
1789 	  /* Fall through.  */
1790 
1791 	case 'i':
1792 	  if (CONSTANT_P (op) && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op)))
1793 	    result = 1;
1794 	  break;
1795 
1796 	case 'n':
1797 	  if (CONST_SCALAR_INT_P (op))
1798 	    result = 1;
1799 	  break;
1800 
1801 	case 'I':
1802 	  if (CONST_INT_P (op)
1803 	      && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'I', constraint))
1804 	    result = 1;
1805 	  break;
1806 	case 'J':
1807 	  if (CONST_INT_P (op)
1808 	      && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'J', constraint))
1809 	    result = 1;
1810 	  break;
1811 	case 'K':
1812 	  if (CONST_INT_P (op)
1813 	      && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'K', constraint))
1814 	    result = 1;
1815 	  break;
1816 	case 'L':
1817 	  if (CONST_INT_P (op)
1818 	      && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'L', constraint))
1819 	    result = 1;
1820 	  break;
1821 	case 'M':
1822 	  if (CONST_INT_P (op)
1823 	      && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'M', constraint))
1824 	    result = 1;
1825 	  break;
1826 	case 'N':
1827 	  if (CONST_INT_P (op)
1828 	      && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'N', constraint))
1829 	    result = 1;
1830 	  break;
1831 	case 'O':
1832 	  if (CONST_INT_P (op)
1833 	      && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'O', constraint))
1834 	    result = 1;
1835 	  break;
1836 	case 'P':
1837 	  if (CONST_INT_P (op)
1838 	      && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'P', constraint))
1839 	    result = 1;
1840 	  break;
1841 
1842 	case 'X':
1843 	  result = 1;
1844 	  break;
1845 
1846 	case 'g':
1847 	  if (general_operand (op, VOIDmode))
1848 	    result = 1;
1849 	  break;
1850 
1851 	default:
1852 	  /* For all other letters, we first check for a register class,
1853 	     otherwise it is an EXTRA_CONSTRAINT.  */
1854 	  if (REG_CLASS_FROM_CONSTRAINT (c, constraint) != NO_REGS)
1855 	    {
1856 	    case 'r':
1857 	      if (GET_MODE (op) == BLKmode)
1858 		break;
1859 	      if (register_operand (op, VOIDmode))
1860 		result = 1;
1861 	    }
1862 #ifdef EXTRA_CONSTRAINT_STR
1863 	  else if (EXTRA_MEMORY_CONSTRAINT (c, constraint))
1864 	    /* Every memory operand can be reloaded to fit.  */
1865 	    result = result || memory_operand (op, VOIDmode);
1866 	  else if (EXTRA_ADDRESS_CONSTRAINT (c, constraint))
1867 	    /* Every address operand can be reloaded to fit.  */
1868 	    result = result || address_operand (op, VOIDmode);
1869 	  else if (EXTRA_CONSTRAINT_STR (op, c, constraint))
1870 	    result = 1;
1871 #endif
1872 	  break;
1873 	}
1874       len = CONSTRAINT_LEN (c, constraint);
1875       do
1876 	constraint++;
1877       while (--len && *constraint);
1878       if (len)
1879 	return 0;
1880     }
1881 
1882 #ifdef AUTO_INC_DEC
1883   /* For operands without < or > constraints reject side-effects.  */
1884   if (!incdec_ok && result && MEM_P (op))
1885     switch (GET_CODE (XEXP (op, 0)))
1886       {
1887       case PRE_INC:
1888       case POST_INC:
1889       case PRE_DEC:
1890       case POST_DEC:
1891       case PRE_MODIFY:
1892       case POST_MODIFY:
1893 	return 0;
1894       default:
1895 	break;
1896       }
1897 #endif
1898 
1899   return result;
1900 }
1901 
1902 /* Given an rtx *P, if it is a sum containing an integer constant term,
1903    return the location (type rtx *) of the pointer to that constant term.
1904    Otherwise, return a null pointer.  */
1905 
1906 rtx *
find_constant_term_loc(rtx * p)1907 find_constant_term_loc (rtx *p)
1908 {
1909   rtx *tem;
1910   enum rtx_code code = GET_CODE (*p);
1911 
1912   /* If *P IS such a constant term, P is its location.  */
1913 
1914   if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1915       || code == CONST)
1916     return p;
1917 
1918   /* Otherwise, if not a sum, it has no constant term.  */
1919 
1920   if (GET_CODE (*p) != PLUS)
1921     return 0;
1922 
1923   /* If one of the summands is constant, return its location.  */
1924 
1925   if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1926       && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1927     return p;
1928 
1929   /* Otherwise, check each summand for containing a constant term.  */
1930 
1931   if (XEXP (*p, 0) != 0)
1932     {
1933       tem = find_constant_term_loc (&XEXP (*p, 0));
1934       if (tem != 0)
1935 	return tem;
1936     }
1937 
1938   if (XEXP (*p, 1) != 0)
1939     {
1940       tem = find_constant_term_loc (&XEXP (*p, 1));
1941       if (tem != 0)
1942 	return tem;
1943     }
1944 
1945   return 0;
1946 }
1947 
1948 /* Return 1 if OP is a memory reference
1949    whose address contains no side effects
1950    and remains valid after the addition
1951    of a positive integer less than the
1952    size of the object being referenced.
1953 
1954    We assume that the original address is valid and do not check it.
1955 
1956    This uses strict_memory_address_p as a subroutine, so
1957    don't use it before reload.  */
1958 
1959 int
offsettable_memref_p(rtx op)1960 offsettable_memref_p (rtx op)
1961 {
1962   return ((MEM_P (op))
1963 	  && offsettable_address_addr_space_p (1, GET_MODE (op), XEXP (op, 0),
1964 					       MEM_ADDR_SPACE (op)));
1965 }
1966 
1967 /* Similar, but don't require a strictly valid mem ref:
1968    consider pseudo-regs valid as index or base regs.  */
1969 
1970 int
offsettable_nonstrict_memref_p(rtx op)1971 offsettable_nonstrict_memref_p (rtx op)
1972 {
1973   return ((MEM_P (op))
1974 	  && offsettable_address_addr_space_p (0, GET_MODE (op), XEXP (op, 0),
1975 					       MEM_ADDR_SPACE (op)));
1976 }
1977 
1978 /* Return 1 if Y is a memory address which contains no side effects
1979    and would remain valid for address space AS after the addition of
1980    a positive integer less than the size of that mode.
1981 
1982    We assume that the original address is valid and do not check it.
1983    We do check that it is valid for narrower modes.
1984 
1985    If STRICTP is nonzero, we require a strictly valid address,
1986    for the sake of use in reload.c.  */
1987 
1988 int
offsettable_address_addr_space_p(int strictp,enum machine_mode mode,rtx y,addr_space_t as)1989 offsettable_address_addr_space_p (int strictp, enum machine_mode mode, rtx y,
1990 				  addr_space_t as)
1991 {
1992   enum rtx_code ycode = GET_CODE (y);
1993   rtx z;
1994   rtx y1 = y;
1995   rtx *y2;
1996   int (*addressp) (enum machine_mode, rtx, addr_space_t) =
1997     (strictp ? strict_memory_address_addr_space_p
1998 	     : memory_address_addr_space_p);
1999   unsigned int mode_sz = GET_MODE_SIZE (mode);
2000 
2001   if (CONSTANT_ADDRESS_P (y))
2002     return 1;
2003 
2004   /* Adjusting an offsettable address involves changing to a narrower mode.
2005      Make sure that's OK.  */
2006 
2007   if (mode_dependent_address_p (y, as))
2008     return 0;
2009 
2010   enum machine_mode address_mode = GET_MODE (y);
2011   if (address_mode == VOIDmode)
2012     address_mode = targetm.addr_space.address_mode (as);
2013 #ifdef POINTERS_EXTEND_UNSIGNED
2014   enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
2015 #endif
2016 
2017   /* ??? How much offset does an offsettable BLKmode reference need?
2018      Clearly that depends on the situation in which it's being used.
2019      However, the current situation in which we test 0xffffffff is
2020      less than ideal.  Caveat user.  */
2021   if (mode_sz == 0)
2022     mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
2023 
2024   /* If the expression contains a constant term,
2025      see if it remains valid when max possible offset is added.  */
2026 
2027   if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
2028     {
2029       int good;
2030 
2031       y1 = *y2;
2032       *y2 = plus_constant (address_mode, *y2, mode_sz - 1);
2033       /* Use QImode because an odd displacement may be automatically invalid
2034 	 for any wider mode.  But it should be valid for a single byte.  */
2035       good = (*addressp) (QImode, y, as);
2036 
2037       /* In any case, restore old contents of memory.  */
2038       *y2 = y1;
2039       return good;
2040     }
2041 
2042   if (GET_RTX_CLASS (ycode) == RTX_AUTOINC)
2043     return 0;
2044 
2045   /* The offset added here is chosen as the maximum offset that
2046      any instruction could need to add when operating on something
2047      of the specified mode.  We assume that if Y and Y+c are
2048      valid addresses then so is Y+d for all 0<d<c.  adjust_address will
2049      go inside a LO_SUM here, so we do so as well.  */
2050   if (GET_CODE (y) == LO_SUM
2051       && mode != BLKmode
2052       && mode_sz <= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
2053     z = gen_rtx_LO_SUM (address_mode, XEXP (y, 0),
2054 			plus_constant (address_mode, XEXP (y, 1),
2055 				       mode_sz - 1));
2056 #ifdef POINTERS_EXTEND_UNSIGNED
2057   /* Likewise for a ZERO_EXTEND from pointer_mode.  */
2058   else if (POINTERS_EXTEND_UNSIGNED > 0
2059 	   && GET_CODE (y) == ZERO_EXTEND
2060 	   && GET_MODE (XEXP (y, 0)) == pointer_mode)
2061     z = gen_rtx_ZERO_EXTEND (address_mode,
2062 			     plus_constant (pointer_mode, XEXP (y, 0),
2063 					    mode_sz - 1));
2064 #endif
2065   else
2066     z = plus_constant (address_mode, y, mode_sz - 1);
2067 
2068   /* Use QImode because an odd displacement may be automatically invalid
2069      for any wider mode.  But it should be valid for a single byte.  */
2070   return (*addressp) (QImode, z, as);
2071 }
2072 
2073 /* Return 1 if ADDR is an address-expression whose effect depends
2074    on the mode of the memory reference it is used in.
2075 
2076    ADDRSPACE is the address space associated with the address.
2077 
2078    Autoincrement addressing is a typical example of mode-dependence
2079    because the amount of the increment depends on the mode.  */
2080 
2081 bool
mode_dependent_address_p(rtx addr,addr_space_t addrspace)2082 mode_dependent_address_p (rtx addr, addr_space_t addrspace)
2083 {
2084   /* Auto-increment addressing with anything other than post_modify
2085      or pre_modify always introduces a mode dependency.  Catch such
2086      cases now instead of deferring to the target.  */
2087   if (GET_CODE (addr) == PRE_INC
2088       || GET_CODE (addr) == POST_INC
2089       || GET_CODE (addr) == PRE_DEC
2090       || GET_CODE (addr) == POST_DEC)
2091     return true;
2092 
2093   return targetm.mode_dependent_address_p (addr, addrspace);
2094 }
2095 
2096 /* Like extract_insn, but save insn extracted and don't extract again, when
2097    called again for the same insn expecting that recog_data still contain the
2098    valid information.  This is used primary by gen_attr infrastructure that
2099    often does extract insn again and again.  */
2100 void
extract_insn_cached(rtx insn)2101 extract_insn_cached (rtx insn)
2102 {
2103   if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
2104     return;
2105   extract_insn (insn);
2106   recog_data.insn = insn;
2107 }
2108 
2109 /* Do cached extract_insn, constrain_operands and complain about failures.
2110    Used by insn_attrtab.  */
2111 void
extract_constrain_insn_cached(rtx insn)2112 extract_constrain_insn_cached (rtx insn)
2113 {
2114   extract_insn_cached (insn);
2115   if (which_alternative == -1
2116       && !constrain_operands (reload_completed))
2117     fatal_insn_not_found (insn);
2118 }
2119 
2120 /* Do cached constrain_operands and complain about failures.  */
2121 int
constrain_operands_cached(int strict)2122 constrain_operands_cached (int strict)
2123 {
2124   if (which_alternative == -1)
2125     return constrain_operands (strict);
2126   else
2127     return 1;
2128 }
2129 
2130 /* Analyze INSN and fill in recog_data.  */
2131 
2132 void
extract_insn(rtx insn)2133 extract_insn (rtx insn)
2134 {
2135   int i;
2136   int icode;
2137   int noperands;
2138   rtx body = PATTERN (insn);
2139 
2140   recog_data.n_operands = 0;
2141   recog_data.n_alternatives = 0;
2142   recog_data.n_dups = 0;
2143   recog_data.is_asm = false;
2144 
2145   switch (GET_CODE (body))
2146     {
2147     case USE:
2148     case CLOBBER:
2149     case ASM_INPUT:
2150     case ADDR_VEC:
2151     case ADDR_DIFF_VEC:
2152     case VAR_LOCATION:
2153       return;
2154 
2155     case SET:
2156       if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2157 	goto asm_insn;
2158       else
2159 	goto normal_insn;
2160     case PARALLEL:
2161       if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2162 	   && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2163 	  || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
2164 	goto asm_insn;
2165       else
2166 	goto normal_insn;
2167     case ASM_OPERANDS:
2168     asm_insn:
2169       recog_data.n_operands = noperands = asm_noperands (body);
2170       if (noperands >= 0)
2171 	{
2172 	  /* This insn is an `asm' with operands.  */
2173 
2174 	  /* expand_asm_operands makes sure there aren't too many operands.  */
2175 	  gcc_assert (noperands <= MAX_RECOG_OPERANDS);
2176 
2177 	  /* Now get the operand values and constraints out of the insn.  */
2178 	  decode_asm_operands (body, recog_data.operand,
2179 			       recog_data.operand_loc,
2180 			       recog_data.constraints,
2181 			       recog_data.operand_mode, NULL);
2182 	  memset (recog_data.is_operator, 0, sizeof recog_data.is_operator);
2183 	  if (noperands > 0)
2184 	    {
2185 	      const char *p =  recog_data.constraints[0];
2186 	      recog_data.n_alternatives = 1;
2187 	      while (*p)
2188 		recog_data.n_alternatives += (*p++ == ',');
2189 	    }
2190 	  recog_data.is_asm = true;
2191 	  break;
2192 	}
2193       fatal_insn_not_found (insn);
2194 
2195     default:
2196     normal_insn:
2197       /* Ordinary insn: recognize it, get the operands via insn_extract
2198 	 and get the constraints.  */
2199 
2200       icode = recog_memoized (insn);
2201       if (icode < 0)
2202 	fatal_insn_not_found (insn);
2203 
2204       recog_data.n_operands = noperands = insn_data[icode].n_operands;
2205       recog_data.n_alternatives = insn_data[icode].n_alternatives;
2206       recog_data.n_dups = insn_data[icode].n_dups;
2207 
2208       insn_extract (insn);
2209 
2210       for (i = 0; i < noperands; i++)
2211 	{
2212 	  recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2213 	  recog_data.is_operator[i] = insn_data[icode].operand[i].is_operator;
2214 	  recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2215 	  /* VOIDmode match_operands gets mode from their real operand.  */
2216 	  if (recog_data.operand_mode[i] == VOIDmode)
2217 	    recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2218 	}
2219     }
2220   for (i = 0; i < noperands; i++)
2221     recog_data.operand_type[i]
2222       = (recog_data.constraints[i][0] == '=' ? OP_OUT
2223 	 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2224 	 : OP_IN);
2225 
2226   gcc_assert (recog_data.n_alternatives <= MAX_RECOG_ALTERNATIVES);
2227 
2228   if (INSN_CODE (insn) < 0)
2229     for (i = 0; i < recog_data.n_alternatives; i++)
2230       recog_data.alternative_enabled_p[i] = true;
2231   else
2232     {
2233       recog_data.insn = insn;
2234       for (i = 0; i < recog_data.n_alternatives; i++)
2235 	{
2236 	  which_alternative = i;
2237 	  recog_data.alternative_enabled_p[i]
2238 	    = HAVE_ATTR_enabled ? get_attr_enabled (insn) : 1;
2239 	}
2240     }
2241 
2242   recog_data.insn = NULL;
2243   which_alternative = -1;
2244 }
2245 
2246 /* After calling extract_insn, you can use this function to extract some
2247    information from the constraint strings into a more usable form.
2248    The collected data is stored in recog_op_alt.  */
2249 void
preprocess_constraints(void)2250 preprocess_constraints (void)
2251 {
2252   int i;
2253 
2254   for (i = 0; i < recog_data.n_operands; i++)
2255     memset (recog_op_alt[i], 0, (recog_data.n_alternatives
2256 				 * sizeof (struct operand_alternative)));
2257 
2258   for (i = 0; i < recog_data.n_operands; i++)
2259     {
2260       int j;
2261       struct operand_alternative *op_alt;
2262       const char *p = recog_data.constraints[i];
2263 
2264       op_alt = recog_op_alt[i];
2265 
2266       for (j = 0; j < recog_data.n_alternatives; j++)
2267 	{
2268 	  op_alt[j].cl = NO_REGS;
2269 	  op_alt[j].constraint = p;
2270 	  op_alt[j].matches = -1;
2271 	  op_alt[j].matched = -1;
2272 
2273 	  if (!recog_data.alternative_enabled_p[j])
2274 	    {
2275 	      p = skip_alternative (p);
2276 	      continue;
2277 	    }
2278 
2279 	  if (*p == '\0' || *p == ',')
2280 	    {
2281 	      op_alt[j].anything_ok = 1;
2282 	      continue;
2283 	    }
2284 
2285 	  for (;;)
2286 	    {
2287 	      char c = *p;
2288 	      if (c == '#')
2289 		do
2290 		  c = *++p;
2291 		while (c != ',' && c != '\0');
2292 	      if (c == ',' || c == '\0')
2293 		{
2294 		  p++;
2295 		  break;
2296 		}
2297 
2298 	      switch (c)
2299 		{
2300 		case '=': case '+': case '*': case '%':
2301 		case 'E': case 'F': case 'G': case 'H':
2302 		case 's': case 'i': case 'n':
2303 		case 'I': case 'J': case 'K': case 'L':
2304 		case 'M': case 'N': case 'O': case 'P':
2305 		  /* These don't say anything we care about.  */
2306 		  break;
2307 
2308 		case '?':
2309 		  op_alt[j].reject += 6;
2310 		  break;
2311 		case '!':
2312 		  op_alt[j].reject += 600;
2313 		  break;
2314 		case '&':
2315 		  op_alt[j].earlyclobber = 1;
2316 		  break;
2317 
2318 		case '0': case '1': case '2': case '3': case '4':
2319 		case '5': case '6': case '7': case '8': case '9':
2320 		  {
2321 		    char *end;
2322 		    op_alt[j].matches = strtoul (p, &end, 10);
2323 		    recog_op_alt[op_alt[j].matches][j].matched = i;
2324 		    p = end;
2325 		  }
2326 		  continue;
2327 
2328 		case TARGET_MEM_CONSTRAINT:
2329 		  op_alt[j].memory_ok = 1;
2330 		  break;
2331 		case '<':
2332 		  op_alt[j].decmem_ok = 1;
2333 		  break;
2334 		case '>':
2335 		  op_alt[j].incmem_ok = 1;
2336 		  break;
2337 		case 'V':
2338 		  op_alt[j].nonoffmem_ok = 1;
2339 		  break;
2340 		case 'o':
2341 		  op_alt[j].offmem_ok = 1;
2342 		  break;
2343 		case 'X':
2344 		  op_alt[j].anything_ok = 1;
2345 		  break;
2346 
2347 		case 'p':
2348 		  op_alt[j].is_address = 1;
2349 		  op_alt[j].cl = reg_class_subunion[(int) op_alt[j].cl]
2350 		      [(int) base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
2351 					     ADDRESS, SCRATCH)];
2352 		  break;
2353 
2354 		case 'g':
2355 		case 'r':
2356 		  op_alt[j].cl =
2357 		   reg_class_subunion[(int) op_alt[j].cl][(int) GENERAL_REGS];
2358 		  break;
2359 
2360 		default:
2361 		  if (EXTRA_MEMORY_CONSTRAINT (c, p))
2362 		    {
2363 		      op_alt[j].memory_ok = 1;
2364 		      break;
2365 		    }
2366 		  if (EXTRA_ADDRESS_CONSTRAINT (c, p))
2367 		    {
2368 		      op_alt[j].is_address = 1;
2369 		      op_alt[j].cl
2370 			= (reg_class_subunion
2371 			   [(int) op_alt[j].cl]
2372 			   [(int) base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
2373 						  ADDRESS, SCRATCH)]);
2374 		      break;
2375 		    }
2376 
2377 		  op_alt[j].cl
2378 		    = (reg_class_subunion
2379 		       [(int) op_alt[j].cl]
2380 		       [(int) REG_CLASS_FROM_CONSTRAINT ((unsigned char) c, p)]);
2381 		  break;
2382 		}
2383 	      p += CONSTRAINT_LEN (c, p);
2384 	    }
2385 	}
2386     }
2387 }
2388 
2389 /* Check the operands of an insn against the insn's operand constraints
2390    and return 1 if they are valid.
2391    The information about the insn's operands, constraints, operand modes
2392    etc. is obtained from the global variables set up by extract_insn.
2393 
2394    WHICH_ALTERNATIVE is set to a number which indicates which
2395    alternative of constraints was matched: 0 for the first alternative,
2396    1 for the next, etc.
2397 
2398    In addition, when two operands are required to match
2399    and it happens that the output operand is (reg) while the
2400    input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2401    make the output operand look like the input.
2402    This is because the output operand is the one the template will print.
2403 
2404    This is used in final, just before printing the assembler code and by
2405    the routines that determine an insn's attribute.
2406 
2407    If STRICT is a positive nonzero value, it means that we have been
2408    called after reload has been completed.  In that case, we must
2409    do all checks strictly.  If it is zero, it means that we have been called
2410    before reload has completed.  In that case, we first try to see if we can
2411    find an alternative that matches strictly.  If not, we try again, this
2412    time assuming that reload will fix up the insn.  This provides a "best
2413    guess" for the alternative and is used to compute attributes of insns prior
2414    to reload.  A negative value of STRICT is used for this internal call.  */
2415 
2416 struct funny_match
2417 {
2418   int this_op, other;
2419 };
2420 
2421 int
constrain_operands(int strict)2422 constrain_operands (int strict)
2423 {
2424   const char *constraints[MAX_RECOG_OPERANDS];
2425   int matching_operands[MAX_RECOG_OPERANDS];
2426   int earlyclobber[MAX_RECOG_OPERANDS];
2427   int c;
2428 
2429   struct funny_match funny_match[MAX_RECOG_OPERANDS];
2430   int funny_match_index;
2431 
2432   which_alternative = 0;
2433   if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2434     return 1;
2435 
2436   for (c = 0; c < recog_data.n_operands; c++)
2437     {
2438       constraints[c] = recog_data.constraints[c];
2439       matching_operands[c] = -1;
2440     }
2441 
2442   do
2443     {
2444       int seen_earlyclobber_at = -1;
2445       int opno;
2446       int lose = 0;
2447       funny_match_index = 0;
2448 
2449       if (!recog_data.alternative_enabled_p[which_alternative])
2450 	{
2451 	  int i;
2452 
2453 	  for (i = 0; i < recog_data.n_operands; i++)
2454 	    constraints[i] = skip_alternative (constraints[i]);
2455 
2456 	  which_alternative++;
2457 	  continue;
2458 	}
2459 
2460       for (opno = 0; opno < recog_data.n_operands; opno++)
2461 	{
2462 	  rtx op = recog_data.operand[opno];
2463 	  enum machine_mode mode = GET_MODE (op);
2464 	  const char *p = constraints[opno];
2465 	  int offset = 0;
2466 	  int win = 0;
2467 	  int val;
2468 	  int len;
2469 
2470 	  earlyclobber[opno] = 0;
2471 
2472 	  /* A unary operator may be accepted by the predicate, but it
2473 	     is irrelevant for matching constraints.  */
2474 	  if (UNARY_P (op))
2475 	    op = XEXP (op, 0);
2476 
2477 	  if (GET_CODE (op) == SUBREG)
2478 	    {
2479 	      if (REG_P (SUBREG_REG (op))
2480 		  && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2481 		offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2482 					      GET_MODE (SUBREG_REG (op)),
2483 					      SUBREG_BYTE (op),
2484 					      GET_MODE (op));
2485 	      op = SUBREG_REG (op);
2486 	    }
2487 
2488 	  /* An empty constraint or empty alternative
2489 	     allows anything which matched the pattern.  */
2490 	  if (*p == 0 || *p == ',')
2491 	    win = 1;
2492 
2493 	  do
2494 	    switch (c = *p, len = CONSTRAINT_LEN (c, p), c)
2495 	      {
2496 	      case '\0':
2497 		len = 0;
2498 		break;
2499 	      case ',':
2500 		c = '\0';
2501 		break;
2502 
2503 	      case '?':  case '!': case '*':  case '%':
2504 	      case '=':  case '+':
2505 		break;
2506 
2507 	      case '#':
2508 		/* Ignore rest of this alternative as far as
2509 		   constraint checking is concerned.  */
2510 		do
2511 		  p++;
2512 		while (*p && *p != ',');
2513 		len = 0;
2514 		break;
2515 
2516 	      case '&':
2517 		earlyclobber[opno] = 1;
2518 		if (seen_earlyclobber_at < 0)
2519 		  seen_earlyclobber_at = opno;
2520 		break;
2521 
2522 	      case '0':  case '1':  case '2':  case '3':  case '4':
2523 	      case '5':  case '6':  case '7':  case '8':  case '9':
2524 		{
2525 		  /* This operand must be the same as a previous one.
2526 		     This kind of constraint is used for instructions such
2527 		     as add when they take only two operands.
2528 
2529 		     Note that the lower-numbered operand is passed first.
2530 
2531 		     If we are not testing strictly, assume that this
2532 		     constraint will be satisfied.  */
2533 
2534 		  char *end;
2535 		  int match;
2536 
2537 		  match = strtoul (p, &end, 10);
2538 		  p = end;
2539 
2540 		  if (strict < 0)
2541 		    val = 1;
2542 		  else
2543 		    {
2544 		      rtx op1 = recog_data.operand[match];
2545 		      rtx op2 = recog_data.operand[opno];
2546 
2547 		      /* A unary operator may be accepted by the predicate,
2548 			 but it is irrelevant for matching constraints.  */
2549 		      if (UNARY_P (op1))
2550 			op1 = XEXP (op1, 0);
2551 		      if (UNARY_P (op2))
2552 			op2 = XEXP (op2, 0);
2553 
2554 		      val = operands_match_p (op1, op2);
2555 		    }
2556 
2557 		  matching_operands[opno] = match;
2558 		  matching_operands[match] = opno;
2559 
2560 		  if (val != 0)
2561 		    win = 1;
2562 
2563 		  /* If output is *x and input is *--x, arrange later
2564 		     to change the output to *--x as well, since the
2565 		     output op is the one that will be printed.  */
2566 		  if (val == 2 && strict > 0)
2567 		    {
2568 		      funny_match[funny_match_index].this_op = opno;
2569 		      funny_match[funny_match_index++].other = match;
2570 		    }
2571 		}
2572 		len = 0;
2573 		break;
2574 
2575 	      case 'p':
2576 		/* p is used for address_operands.  When we are called by
2577 		   gen_reload, no one will have checked that the address is
2578 		   strictly valid, i.e., that all pseudos requiring hard regs
2579 		   have gotten them.  */
2580 		if (strict <= 0
2581 		    || (strict_memory_address_p (recog_data.operand_mode[opno],
2582 						 op)))
2583 		  win = 1;
2584 		break;
2585 
2586 		/* No need to check general_operand again;
2587 		   it was done in insn-recog.c.  Well, except that reload
2588 		   doesn't check the validity of its replacements, but
2589 		   that should only matter when there's a bug.  */
2590 	      case 'g':
2591 		/* Anything goes unless it is a REG and really has a hard reg
2592 		   but the hard reg is not in the class GENERAL_REGS.  */
2593 		if (REG_P (op))
2594 		  {
2595 		    if (strict < 0
2596 			|| GENERAL_REGS == ALL_REGS
2597 			|| (reload_in_progress
2598 			    && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2599 			|| reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2600 		      win = 1;
2601 		  }
2602 		else if (strict < 0 || general_operand (op, mode))
2603 		  win = 1;
2604 		break;
2605 
2606 	      case 'X':
2607 		/* This is used for a MATCH_SCRATCH in the cases when
2608 		   we don't actually need anything.  So anything goes
2609 		   any time.  */
2610 		win = 1;
2611 		break;
2612 
2613 	      case TARGET_MEM_CONSTRAINT:
2614 		/* Memory operands must be valid, to the extent
2615 		   required by STRICT.  */
2616 		if (MEM_P (op))
2617 		  {
2618 		    if (strict > 0
2619 			&& !strict_memory_address_addr_space_p
2620 			     (GET_MODE (op), XEXP (op, 0),
2621 			      MEM_ADDR_SPACE (op)))
2622 		      break;
2623 		    if (strict == 0
2624 			&& !memory_address_addr_space_p
2625 			     (GET_MODE (op), XEXP (op, 0),
2626 			      MEM_ADDR_SPACE (op)))
2627 		      break;
2628 		    win = 1;
2629 		  }
2630 		/* Before reload, accept what reload can turn into mem.  */
2631 		else if (strict < 0 && CONSTANT_P (op))
2632 		  win = 1;
2633 		/* During reload, accept a pseudo  */
2634 		else if (reload_in_progress && REG_P (op)
2635 			 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2636 		  win = 1;
2637 		break;
2638 
2639 	      case '<':
2640 		if (MEM_P (op)
2641 		    && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2642 			|| GET_CODE (XEXP (op, 0)) == POST_DEC))
2643 		  win = 1;
2644 		break;
2645 
2646 	      case '>':
2647 		if (MEM_P (op)
2648 		    && (GET_CODE (XEXP (op, 0)) == PRE_INC
2649 			|| GET_CODE (XEXP (op, 0)) == POST_INC))
2650 		  win = 1;
2651 		break;
2652 
2653 	      case 'E':
2654 	      case 'F':
2655 		if (CONST_DOUBLE_AS_FLOAT_P (op)
2656 		    || (GET_CODE (op) == CONST_VECTOR
2657 			&& GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
2658 		  win = 1;
2659 		break;
2660 
2661 	      case 'G':
2662 	      case 'H':
2663 		if (CONST_DOUBLE_AS_FLOAT_P (op)
2664 		    && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, c, p))
2665 		  win = 1;
2666 		break;
2667 
2668 	      case 's':
2669 		if (CONST_SCALAR_INT_P (op))
2670 		  break;
2671 	      case 'i':
2672 		if (CONSTANT_P (op))
2673 		  win = 1;
2674 		break;
2675 
2676 	      case 'n':
2677 		if (CONST_SCALAR_INT_P (op))
2678 		  win = 1;
2679 		break;
2680 
2681 	      case 'I':
2682 	      case 'J':
2683 	      case 'K':
2684 	      case 'L':
2685 	      case 'M':
2686 	      case 'N':
2687 	      case 'O':
2688 	      case 'P':
2689 		if (CONST_INT_P (op)
2690 		    && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), c, p))
2691 		  win = 1;
2692 		break;
2693 
2694 	      case 'V':
2695 		if (MEM_P (op)
2696 		    && ((strict > 0 && ! offsettable_memref_p (op))
2697 			|| (strict < 0
2698 			    && !(CONSTANT_P (op) || MEM_P (op)))
2699 			|| (reload_in_progress
2700 			    && !(REG_P (op)
2701 				 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2702 		  win = 1;
2703 		break;
2704 
2705 	      case 'o':
2706 		if ((strict > 0 && offsettable_memref_p (op))
2707 		    || (strict == 0 && offsettable_nonstrict_memref_p (op))
2708 		    /* Before reload, accept what reload can handle.  */
2709 		    || (strict < 0
2710 			&& (CONSTANT_P (op) || MEM_P (op)))
2711 		    /* During reload, accept a pseudo  */
2712 		    || (reload_in_progress && REG_P (op)
2713 			&& REGNO (op) >= FIRST_PSEUDO_REGISTER))
2714 		  win = 1;
2715 		break;
2716 
2717 	      default:
2718 		{
2719 		  enum reg_class cl;
2720 
2721 		  cl = (c == 'r'
2722 			   ? GENERAL_REGS : REG_CLASS_FROM_CONSTRAINT (c, p));
2723 		  if (cl != NO_REGS)
2724 		    {
2725 		      if (strict < 0
2726 			  || (strict == 0
2727 			      && REG_P (op)
2728 			      && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2729 			  || (strict == 0 && GET_CODE (op) == SCRATCH)
2730 			  || (REG_P (op)
2731 			      && reg_fits_class_p (op, cl, offset, mode)))
2732 		        win = 1;
2733 		    }
2734 #ifdef EXTRA_CONSTRAINT_STR
2735 		  else if (EXTRA_CONSTRAINT_STR (op, c, p))
2736 		    win = 1;
2737 
2738 		  else if (EXTRA_MEMORY_CONSTRAINT (c, p)
2739 			   /* Every memory operand can be reloaded to fit.  */
2740 			   && ((strict < 0 && MEM_P (op))
2741 			       /* Before reload, accept what reload can turn
2742 				  into mem.  */
2743 			       || (strict < 0 && CONSTANT_P (op))
2744 			       /* During reload, accept a pseudo  */
2745 			       || (reload_in_progress && REG_P (op)
2746 				   && REGNO (op) >= FIRST_PSEUDO_REGISTER)))
2747 		    win = 1;
2748 		  else if (EXTRA_ADDRESS_CONSTRAINT (c, p)
2749 			   /* Every address operand can be reloaded to fit.  */
2750 			   && strict < 0)
2751 		    win = 1;
2752 		  /* Cater to architectures like IA-64 that define extra memory
2753 		     constraints without using define_memory_constraint.  */
2754 		  else if (reload_in_progress
2755 			   && REG_P (op)
2756 			   && REGNO (op) >= FIRST_PSEUDO_REGISTER
2757 			   && reg_renumber[REGNO (op)] < 0
2758 			   && reg_equiv_mem (REGNO (op)) != 0
2759 			   && EXTRA_CONSTRAINT_STR
2760 			      (reg_equiv_mem (REGNO (op)), c, p))
2761 		    win = 1;
2762 #endif
2763 		  break;
2764 		}
2765 	      }
2766 	  while (p += len, c);
2767 
2768 	  constraints[opno] = p;
2769 	  /* If this operand did not win somehow,
2770 	     this alternative loses.  */
2771 	  if (! win)
2772 	    lose = 1;
2773 	}
2774       /* This alternative won; the operands are ok.
2775 	 Change whichever operands this alternative says to change.  */
2776       if (! lose)
2777 	{
2778 	  int opno, eopno;
2779 
2780 	  /* See if any earlyclobber operand conflicts with some other
2781 	     operand.  */
2782 
2783 	  if (strict > 0  && seen_earlyclobber_at >= 0)
2784 	    for (eopno = seen_earlyclobber_at;
2785 		 eopno < recog_data.n_operands;
2786 		 eopno++)
2787 	      /* Ignore earlyclobber operands now in memory,
2788 		 because we would often report failure when we have
2789 		 two memory operands, one of which was formerly a REG.  */
2790 	      if (earlyclobber[eopno]
2791 		  && REG_P (recog_data.operand[eopno]))
2792 		for (opno = 0; opno < recog_data.n_operands; opno++)
2793 		  if ((MEM_P (recog_data.operand[opno])
2794 		       || recog_data.operand_type[opno] != OP_OUT)
2795 		      && opno != eopno
2796 		      /* Ignore things like match_operator operands.  */
2797 		      && *recog_data.constraints[opno] != 0
2798 		      && ! (matching_operands[opno] == eopno
2799 			    && operands_match_p (recog_data.operand[opno],
2800 						 recog_data.operand[eopno]))
2801 		      && ! safe_from_earlyclobber (recog_data.operand[opno],
2802 						   recog_data.operand[eopno]))
2803 		    lose = 1;
2804 
2805 	  if (! lose)
2806 	    {
2807 	      while (--funny_match_index >= 0)
2808 		{
2809 		  recog_data.operand[funny_match[funny_match_index].other]
2810 		    = recog_data.operand[funny_match[funny_match_index].this_op];
2811 		}
2812 
2813 #ifdef AUTO_INC_DEC
2814 	      /* For operands without < or > constraints reject side-effects.  */
2815 	      if (recog_data.is_asm)
2816 		{
2817 		  for (opno = 0; opno < recog_data.n_operands; opno++)
2818 		    if (MEM_P (recog_data.operand[opno]))
2819 		      switch (GET_CODE (XEXP (recog_data.operand[opno], 0)))
2820 			{
2821 			case PRE_INC:
2822 			case POST_INC:
2823 			case PRE_DEC:
2824 			case POST_DEC:
2825 			case PRE_MODIFY:
2826 			case POST_MODIFY:
2827 			  if (strchr (recog_data.constraints[opno], '<') == NULL
2828 			      && strchr (recog_data.constraints[opno], '>')
2829 				 == NULL)
2830 			    return 0;
2831 			  break;
2832 			default:
2833 			  break;
2834 			}
2835 		}
2836 #endif
2837 	      return 1;
2838 	    }
2839 	}
2840 
2841       which_alternative++;
2842     }
2843   while (which_alternative < recog_data.n_alternatives);
2844 
2845   which_alternative = -1;
2846   /* If we are about to reject this, but we are not to test strictly,
2847      try a very loose test.  Only return failure if it fails also.  */
2848   if (strict == 0)
2849     return constrain_operands (-1);
2850   else
2851     return 0;
2852 }
2853 
2854 /* Return true iff OPERAND (assumed to be a REG rtx)
2855    is a hard reg in class CLASS when its regno is offset by OFFSET
2856    and changed to mode MODE.
2857    If REG occupies multiple hard regs, all of them must be in CLASS.  */
2858 
2859 bool
reg_fits_class_p(const_rtx operand,reg_class_t cl,int offset,enum machine_mode mode)2860 reg_fits_class_p (const_rtx operand, reg_class_t cl, int offset,
2861 		  enum machine_mode mode)
2862 {
2863   unsigned int regno = REGNO (operand);
2864 
2865   if (cl == NO_REGS)
2866     return false;
2867 
2868   /* Regno must not be a pseudo register.  Offset may be negative.  */
2869   return (HARD_REGISTER_NUM_P (regno)
2870 	  && HARD_REGISTER_NUM_P (regno + offset)
2871 	  && in_hard_reg_set_p (reg_class_contents[(int) cl], mode,
2872 				regno + offset));
2873 }
2874 
2875 /* Split single instruction.  Helper function for split_all_insns and
2876    split_all_insns_noflow.  Return last insn in the sequence if successful,
2877    or NULL if unsuccessful.  */
2878 
2879 static rtx
split_insn(rtx insn)2880 split_insn (rtx insn)
2881 {
2882   /* Split insns here to get max fine-grain parallelism.  */
2883   rtx first = PREV_INSN (insn);
2884   rtx last = try_split (PATTERN (insn), insn, 1);
2885   rtx insn_set, last_set, note;
2886 
2887   if (last == insn)
2888     return NULL_RTX;
2889 
2890   /* If the original instruction was a single set that was known to be
2891      equivalent to a constant, see if we can say the same about the last
2892      instruction in the split sequence.  The two instructions must set
2893      the same destination.  */
2894   insn_set = single_set (insn);
2895   if (insn_set)
2896     {
2897       last_set = single_set (last);
2898       if (last_set && rtx_equal_p (SET_DEST (last_set), SET_DEST (insn_set)))
2899 	{
2900 	  note = find_reg_equal_equiv_note (insn);
2901 	  if (note && CONSTANT_P (XEXP (note, 0)))
2902 	    set_unique_reg_note (last, REG_EQUAL, XEXP (note, 0));
2903 	  else if (CONSTANT_P (SET_SRC (insn_set)))
2904 	    set_unique_reg_note (last, REG_EQUAL,
2905 				 copy_rtx (SET_SRC (insn_set)));
2906 	}
2907     }
2908 
2909   /* try_split returns the NOTE that INSN became.  */
2910   SET_INSN_DELETED (insn);
2911 
2912   /* ??? Coddle to md files that generate subregs in post-reload
2913      splitters instead of computing the proper hard register.  */
2914   if (reload_completed && first != last)
2915     {
2916       first = NEXT_INSN (first);
2917       for (;;)
2918 	{
2919 	  if (INSN_P (first))
2920 	    cleanup_subreg_operands (first);
2921 	  if (first == last)
2922 	    break;
2923 	  first = NEXT_INSN (first);
2924 	}
2925     }
2926 
2927   return last;
2928 }
2929 
2930 /* Split all insns in the function.  If UPD_LIFE, update life info after.  */
2931 
2932 void
split_all_insns(void)2933 split_all_insns (void)
2934 {
2935   sbitmap blocks;
2936   bool changed;
2937   basic_block bb;
2938 
2939   blocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
2940   bitmap_clear (blocks);
2941   changed = false;
2942 
2943   FOR_EACH_BB_REVERSE_FN (bb, cfun)
2944     {
2945       rtx insn, next;
2946       bool finish = false;
2947 
2948       rtl_profile_for_bb (bb);
2949       for (insn = BB_HEAD (bb); !finish ; insn = next)
2950 	{
2951 	  /* Can't use `next_real_insn' because that might go across
2952 	     CODE_LABELS and short-out basic blocks.  */
2953 	  next = NEXT_INSN (insn);
2954 	  finish = (insn == BB_END (bb));
2955 	  if (INSN_P (insn))
2956 	    {
2957 	      rtx set = single_set (insn);
2958 
2959 	      /* Don't split no-op move insns.  These should silently
2960 		 disappear later in final.  Splitting such insns would
2961 		 break the code that handles LIBCALL blocks.  */
2962 	      if (set && set_noop_p (set))
2963 		{
2964 		  /* Nops get in the way while scheduling, so delete them
2965 		     now if register allocation has already been done.  It
2966 		     is too risky to try to do this before register
2967 		     allocation, and there are unlikely to be very many
2968 		     nops then anyways.  */
2969 		  if (reload_completed)
2970 		      delete_insn_and_edges (insn);
2971 		}
2972 	      else
2973 		{
2974 		  if (split_insn (insn))
2975 		    {
2976 		      bitmap_set_bit (blocks, bb->index);
2977 		      changed = true;
2978 		    }
2979 		}
2980 	    }
2981 	}
2982     }
2983 
2984   default_rtl_profile ();
2985   if (changed)
2986     find_many_sub_basic_blocks (blocks);
2987 
2988 #ifdef ENABLE_CHECKING
2989   verify_flow_info ();
2990 #endif
2991 
2992   sbitmap_free (blocks);
2993 }
2994 
2995 /* Same as split_all_insns, but do not expect CFG to be available.
2996    Used by machine dependent reorg passes.  */
2997 
2998 unsigned int
split_all_insns_noflow(void)2999 split_all_insns_noflow (void)
3000 {
3001   rtx next, insn;
3002 
3003   for (insn = get_insns (); insn; insn = next)
3004     {
3005       next = NEXT_INSN (insn);
3006       if (INSN_P (insn))
3007 	{
3008 	  /* Don't split no-op move insns.  These should silently
3009 	     disappear later in final.  Splitting such insns would
3010 	     break the code that handles LIBCALL blocks.  */
3011 	  rtx set = single_set (insn);
3012 	  if (set && set_noop_p (set))
3013 	    {
3014 	      /* Nops get in the way while scheduling, so delete them
3015 		 now if register allocation has already been done.  It
3016 		 is too risky to try to do this before register
3017 		 allocation, and there are unlikely to be very many
3018 		 nops then anyways.
3019 
3020 		 ??? Should we use delete_insn when the CFG isn't valid?  */
3021 	      if (reload_completed)
3022 		delete_insn_and_edges (insn);
3023 	    }
3024 	  else
3025 	    split_insn (insn);
3026 	}
3027     }
3028   return 0;
3029 }
3030 
3031 #ifdef HAVE_peephole2
3032 struct peep2_insn_data
3033 {
3034   rtx insn;
3035   regset live_before;
3036 };
3037 
3038 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
3039 static int peep2_current;
3040 
3041 static bool peep2_do_rebuild_jump_labels;
3042 static bool peep2_do_cleanup_cfg;
3043 
3044 /* The number of instructions available to match a peep2.  */
3045 int peep2_current_count;
3046 
3047 /* A non-insn marker indicating the last insn of the block.
3048    The live_before regset for this element is correct, indicating
3049    DF_LIVE_OUT for the block.  */
3050 #define PEEP2_EOB	pc_rtx
3051 
3052 /* Wrap N to fit into the peep2_insn_data buffer.  */
3053 
3054 static int
peep2_buf_position(int n)3055 peep2_buf_position (int n)
3056 {
3057   if (n >= MAX_INSNS_PER_PEEP2 + 1)
3058     n -= MAX_INSNS_PER_PEEP2 + 1;
3059   return n;
3060 }
3061 
3062 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
3063    does not exist.  Used by the recognizer to find the next insn to match
3064    in a multi-insn pattern.  */
3065 
3066 rtx
peep2_next_insn(int n)3067 peep2_next_insn (int n)
3068 {
3069   gcc_assert (n <= peep2_current_count);
3070 
3071   n = peep2_buf_position (peep2_current + n);
3072 
3073   return peep2_insn_data[n].insn;
3074 }
3075 
3076 /* Return true if REGNO is dead before the Nth non-note insn
3077    after `current'.  */
3078 
3079 int
peep2_regno_dead_p(int ofs,int regno)3080 peep2_regno_dead_p (int ofs, int regno)
3081 {
3082   gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
3083 
3084   ofs = peep2_buf_position (peep2_current + ofs);
3085 
3086   gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
3087 
3088   return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
3089 }
3090 
3091 /* Similarly for a REG.  */
3092 
3093 int
peep2_reg_dead_p(int ofs,rtx reg)3094 peep2_reg_dead_p (int ofs, rtx reg)
3095 {
3096   int regno, n;
3097 
3098   gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
3099 
3100   ofs = peep2_buf_position (peep2_current + ofs);
3101 
3102   gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
3103 
3104   regno = REGNO (reg);
3105   n = hard_regno_nregs[regno][GET_MODE (reg)];
3106   while (--n >= 0)
3107     if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
3108       return 0;
3109   return 1;
3110 }
3111 
3112 /* Regno offset to be used in the register search.  */
3113 static int search_ofs;
3114 
3115 /* Try to find a hard register of mode MODE, matching the register class in
3116    CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
3117    remains available until the end of LAST_INSN.  LAST_INSN may be NULL_RTX,
3118    in which case the only condition is that the register must be available
3119    before CURRENT_INSN.
3120    Registers that already have bits set in REG_SET will not be considered.
3121 
3122    If an appropriate register is available, it will be returned and the
3123    corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
3124    returned.  */
3125 
3126 rtx
peep2_find_free_register(int from,int to,const char * class_str,enum machine_mode mode,HARD_REG_SET * reg_set)3127 peep2_find_free_register (int from, int to, const char *class_str,
3128 			  enum machine_mode mode, HARD_REG_SET *reg_set)
3129 {
3130   enum reg_class cl;
3131   HARD_REG_SET live;
3132   df_ref *def_rec;
3133   int i;
3134 
3135   gcc_assert (from < MAX_INSNS_PER_PEEP2 + 1);
3136   gcc_assert (to < MAX_INSNS_PER_PEEP2 + 1);
3137 
3138   from = peep2_buf_position (peep2_current + from);
3139   to = peep2_buf_position (peep2_current + to);
3140 
3141   gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3142   REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
3143 
3144   while (from != to)
3145     {
3146       gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3147 
3148       /* Don't use registers set or clobbered by the insn.  */
3149       for (def_rec = DF_INSN_DEFS (peep2_insn_data[from].insn);
3150 	   *def_rec; def_rec++)
3151 	SET_HARD_REG_BIT (live, DF_REF_REGNO (*def_rec));
3152 
3153       from = peep2_buf_position (from + 1);
3154     }
3155 
3156   cl = (class_str[0] == 'r' ? GENERAL_REGS
3157 	   : REG_CLASS_FROM_CONSTRAINT (class_str[0], class_str));
3158 
3159   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3160     {
3161       int raw_regno, regno, success, j;
3162 
3163       /* Distribute the free registers as much as possible.  */
3164       raw_regno = search_ofs + i;
3165       if (raw_regno >= FIRST_PSEUDO_REGISTER)
3166 	raw_regno -= FIRST_PSEUDO_REGISTER;
3167 #ifdef REG_ALLOC_ORDER
3168       regno = reg_alloc_order[raw_regno];
3169 #else
3170       regno = raw_regno;
3171 #endif
3172 
3173       /* Can it support the mode we need?  */
3174       if (! HARD_REGNO_MODE_OK (regno, mode))
3175 	continue;
3176 
3177       success = 1;
3178       for (j = 0; success && j < hard_regno_nregs[regno][mode]; j++)
3179 	{
3180 	  /* Don't allocate fixed registers.  */
3181 	  if (fixed_regs[regno + j])
3182 	    {
3183 	      success = 0;
3184 	      break;
3185 	    }
3186 	  /* Don't allocate global registers.  */
3187 	  if (global_regs[regno + j])
3188 	    {
3189 	      success = 0;
3190 	      break;
3191 	    }
3192 	  /* Make sure the register is of the right class.  */
3193 	  if (! TEST_HARD_REG_BIT (reg_class_contents[cl], regno + j))
3194 	    {
3195 	      success = 0;
3196 	      break;
3197 	    }
3198 	  /* And that we don't create an extra save/restore.  */
3199 	  if (! call_used_regs[regno + j] && ! df_regs_ever_live_p (regno + j))
3200 	    {
3201 	      success = 0;
3202 	      break;
3203 	    }
3204 
3205 	  if (! targetm.hard_regno_scratch_ok (regno + j))
3206 	    {
3207 	      success = 0;
3208 	      break;
3209 	    }
3210 
3211 	  /* And we don't clobber traceback for noreturn functions.  */
3212 	  if ((regno + j == FRAME_POINTER_REGNUM
3213 	       || regno + j == HARD_FRAME_POINTER_REGNUM)
3214 	      && (! reload_completed || frame_pointer_needed))
3215 	    {
3216 	      success = 0;
3217 	      break;
3218 	    }
3219 
3220 	  if (TEST_HARD_REG_BIT (*reg_set, regno + j)
3221 	      || TEST_HARD_REG_BIT (live, regno + j))
3222 	    {
3223 	      success = 0;
3224 	      break;
3225 	    }
3226 	}
3227 
3228       if (success)
3229 	{
3230 	  add_to_hard_reg_set (reg_set, mode, regno);
3231 
3232 	  /* Start the next search with the next register.  */
3233 	  if (++raw_regno >= FIRST_PSEUDO_REGISTER)
3234 	    raw_regno = 0;
3235 	  search_ofs = raw_regno;
3236 
3237 	  return gen_rtx_REG (mode, regno);
3238 	}
3239     }
3240 
3241   search_ofs = 0;
3242   return NULL_RTX;
3243 }
3244 
3245 /* Forget all currently tracked instructions, only remember current
3246    LIVE regset.  */
3247 
3248 static void
peep2_reinit_state(regset live)3249 peep2_reinit_state (regset live)
3250 {
3251   int i;
3252 
3253   /* Indicate that all slots except the last holds invalid data.  */
3254   for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3255     peep2_insn_data[i].insn = NULL_RTX;
3256   peep2_current_count = 0;
3257 
3258   /* Indicate that the last slot contains live_after data.  */
3259   peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3260   peep2_current = MAX_INSNS_PER_PEEP2;
3261 
3262   COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3263 }
3264 
3265 /* While scanning basic block BB, we found a match of length MATCH_LEN,
3266    starting at INSN.  Perform the replacement, removing the old insns and
3267    replacing them with ATTEMPT.  Returns the last insn emitted, or NULL
3268    if the replacement is rejected.  */
3269 
3270 static rtx
peep2_attempt(basic_block bb,rtx insn,int match_len,rtx attempt)3271 peep2_attempt (basic_block bb, rtx insn, int match_len, rtx attempt)
3272 {
3273   int i;
3274   rtx last, eh_note, as_note, before_try, x;
3275   rtx old_insn, new_insn;
3276   bool was_call = false;
3277 
3278   /* If we are splitting an RTX_FRAME_RELATED_P insn, do not allow it to
3279      match more than one insn, or to be split into more than one insn.  */
3280   old_insn = peep2_insn_data[peep2_current].insn;
3281   if (RTX_FRAME_RELATED_P (old_insn))
3282     {
3283       bool any_note = false;
3284       rtx note;
3285 
3286       if (match_len != 0)
3287 	return NULL;
3288 
3289       /* Look for one "active" insn.  I.e. ignore any "clobber" insns that
3290 	 may be in the stream for the purpose of register allocation.  */
3291       if (active_insn_p (attempt))
3292 	new_insn = attempt;
3293       else
3294 	new_insn = next_active_insn (attempt);
3295       if (next_active_insn (new_insn))
3296 	return NULL;
3297 
3298       /* We have a 1-1 replacement.  Copy over any frame-related info.  */
3299       RTX_FRAME_RELATED_P (new_insn) = 1;
3300 
3301       /* Allow the backend to fill in a note during the split.  */
3302       for (note = REG_NOTES (new_insn); note ; note = XEXP (note, 1))
3303 	switch (REG_NOTE_KIND (note))
3304 	  {
3305 	  case REG_FRAME_RELATED_EXPR:
3306 	  case REG_CFA_DEF_CFA:
3307 	  case REG_CFA_ADJUST_CFA:
3308 	  case REG_CFA_OFFSET:
3309 	  case REG_CFA_REGISTER:
3310 	  case REG_CFA_EXPRESSION:
3311 	  case REG_CFA_RESTORE:
3312 	  case REG_CFA_SET_VDRAP:
3313 	    any_note = true;
3314 	    break;
3315 	  default:
3316 	    break;
3317 	  }
3318 
3319       /* If the backend didn't supply a note, copy one over.  */
3320       if (!any_note)
3321         for (note = REG_NOTES (old_insn); note ; note = XEXP (note, 1))
3322 	  switch (REG_NOTE_KIND (note))
3323 	    {
3324 	    case REG_FRAME_RELATED_EXPR:
3325 	    case REG_CFA_DEF_CFA:
3326 	    case REG_CFA_ADJUST_CFA:
3327 	    case REG_CFA_OFFSET:
3328 	    case REG_CFA_REGISTER:
3329 	    case REG_CFA_EXPRESSION:
3330 	    case REG_CFA_RESTORE:
3331 	    case REG_CFA_SET_VDRAP:
3332 	      add_reg_note (new_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3333 	      any_note = true;
3334 	      break;
3335 	    default:
3336 	      break;
3337 	    }
3338 
3339       /* If there still isn't a note, make sure the unwind info sees the
3340 	 same expression as before the split.  */
3341       if (!any_note)
3342 	{
3343 	  rtx old_set, new_set;
3344 
3345 	  /* The old insn had better have been simple, or annotated.  */
3346 	  old_set = single_set (old_insn);
3347 	  gcc_assert (old_set != NULL);
3348 
3349 	  new_set = single_set (new_insn);
3350 	  if (!new_set || !rtx_equal_p (new_set, old_set))
3351 	    add_reg_note (new_insn, REG_FRAME_RELATED_EXPR, old_set);
3352 	}
3353 
3354       /* Copy prologue/epilogue status.  This is required in order to keep
3355 	 proper placement of EPILOGUE_BEG and the DW_CFA_remember_state.  */
3356       maybe_copy_prologue_epilogue_insn (old_insn, new_insn);
3357     }
3358 
3359   /* If we are splitting a CALL_INSN, look for the CALL_INSN
3360      in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3361      cfg-related call notes.  */
3362   for (i = 0; i <= match_len; ++i)
3363     {
3364       int j;
3365       rtx note;
3366 
3367       j = peep2_buf_position (peep2_current + i);
3368       old_insn = peep2_insn_data[j].insn;
3369       if (!CALL_P (old_insn))
3370 	continue;
3371       was_call = true;
3372 
3373       new_insn = attempt;
3374       while (new_insn != NULL_RTX)
3375 	{
3376 	  if (CALL_P (new_insn))
3377 	    break;
3378 	  new_insn = NEXT_INSN (new_insn);
3379 	}
3380 
3381       gcc_assert (new_insn != NULL_RTX);
3382 
3383       CALL_INSN_FUNCTION_USAGE (new_insn)
3384 	= CALL_INSN_FUNCTION_USAGE (old_insn);
3385 
3386       for (note = REG_NOTES (old_insn);
3387 	   note;
3388 	   note = XEXP (note, 1))
3389 	switch (REG_NOTE_KIND (note))
3390 	  {
3391 	  case REG_NORETURN:
3392 	  case REG_SETJMP:
3393 	  case REG_TM:
3394 	    add_reg_note (new_insn, REG_NOTE_KIND (note),
3395 			  XEXP (note, 0));
3396 	    break;
3397 	  default:
3398 	    /* Discard all other reg notes.  */
3399 	    break;
3400 	  }
3401 
3402       /* Croak if there is another call in the sequence.  */
3403       while (++i <= match_len)
3404 	{
3405 	  j = peep2_buf_position (peep2_current + i);
3406 	  old_insn = peep2_insn_data[j].insn;
3407 	  gcc_assert (!CALL_P (old_insn));
3408 	}
3409       break;
3410     }
3411 
3412   /* If we matched any instruction that had a REG_ARGS_SIZE, then
3413      move those notes over to the new sequence.  */
3414   as_note = NULL;
3415   for (i = match_len; i >= 0; --i)
3416     {
3417       int j = peep2_buf_position (peep2_current + i);
3418       old_insn = peep2_insn_data[j].insn;
3419 
3420       as_note = find_reg_note (old_insn, REG_ARGS_SIZE, NULL);
3421       if (as_note)
3422 	break;
3423     }
3424 
3425   i = peep2_buf_position (peep2_current + match_len);
3426   eh_note = find_reg_note (peep2_insn_data[i].insn, REG_EH_REGION, NULL_RTX);
3427 
3428   /* Replace the old sequence with the new.  */
3429   last = emit_insn_after_setloc (attempt,
3430 				 peep2_insn_data[i].insn,
3431 				 INSN_LOCATION (peep2_insn_data[i].insn));
3432   before_try = PREV_INSN (insn);
3433   delete_insn_chain (insn, peep2_insn_data[i].insn, false);
3434 
3435   /* Re-insert the EH_REGION notes.  */
3436   if (eh_note || (was_call && nonlocal_goto_handler_labels))
3437     {
3438       edge eh_edge;
3439       edge_iterator ei;
3440 
3441       FOR_EACH_EDGE (eh_edge, ei, bb->succs)
3442 	if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
3443 	  break;
3444 
3445       if (eh_note)
3446 	copy_reg_eh_region_note_backward (eh_note, last, before_try);
3447 
3448       if (eh_edge)
3449 	for (x = last; x != before_try; x = PREV_INSN (x))
3450 	  if (x != BB_END (bb)
3451 	      && (can_throw_internal (x)
3452 		  || can_nonlocal_goto (x)))
3453 	    {
3454 	      edge nfte, nehe;
3455 	      int flags;
3456 
3457 	      nfte = split_block (bb, x);
3458 	      flags = (eh_edge->flags
3459 		       & (EDGE_EH | EDGE_ABNORMAL));
3460 	      if (CALL_P (x))
3461 		flags |= EDGE_ABNORMAL_CALL;
3462 	      nehe = make_edge (nfte->src, eh_edge->dest,
3463 				flags);
3464 
3465 	      nehe->probability = eh_edge->probability;
3466 	      nfte->probability
3467 		= REG_BR_PROB_BASE - nehe->probability;
3468 
3469 	      peep2_do_cleanup_cfg |= purge_dead_edges (nfte->dest);
3470 	      bb = nfte->src;
3471 	      eh_edge = nehe;
3472 	    }
3473 
3474       /* Converting possibly trapping insn to non-trapping is
3475 	 possible.  Zap dummy outgoing edges.  */
3476       peep2_do_cleanup_cfg |= purge_dead_edges (bb);
3477     }
3478 
3479   /* Re-insert the ARGS_SIZE notes.  */
3480   if (as_note)
3481     fixup_args_size_notes (before_try, last, INTVAL (XEXP (as_note, 0)));
3482 
3483   /* If we generated a jump instruction, it won't have
3484      JUMP_LABEL set.  Recompute after we're done.  */
3485   for (x = last; x != before_try; x = PREV_INSN (x))
3486     if (JUMP_P (x))
3487       {
3488 	peep2_do_rebuild_jump_labels = true;
3489 	break;
3490       }
3491 
3492   return last;
3493 }
3494 
3495 /* After performing a replacement in basic block BB, fix up the life
3496    information in our buffer.  LAST is the last of the insns that we
3497    emitted as a replacement.  PREV is the insn before the start of
3498    the replacement.  MATCH_LEN is the number of instructions that were
3499    matched, and which now need to be replaced in the buffer.  */
3500 
3501 static void
peep2_update_life(basic_block bb,int match_len,rtx last,rtx prev)3502 peep2_update_life (basic_block bb, int match_len, rtx last, rtx prev)
3503 {
3504   int i = peep2_buf_position (peep2_current + match_len + 1);
3505   rtx x;
3506   regset_head live;
3507 
3508   INIT_REG_SET (&live);
3509   COPY_REG_SET (&live, peep2_insn_data[i].live_before);
3510 
3511   gcc_assert (peep2_current_count >= match_len + 1);
3512   peep2_current_count -= match_len + 1;
3513 
3514   x = last;
3515   do
3516     {
3517       if (INSN_P (x))
3518 	{
3519 	  df_insn_rescan (x);
3520 	  if (peep2_current_count < MAX_INSNS_PER_PEEP2)
3521 	    {
3522 	      peep2_current_count++;
3523 	      if (--i < 0)
3524 		i = MAX_INSNS_PER_PEEP2;
3525 	      peep2_insn_data[i].insn = x;
3526 	      df_simulate_one_insn_backwards (bb, x, &live);
3527 	      COPY_REG_SET (peep2_insn_data[i].live_before, &live);
3528 	    }
3529 	}
3530       x = PREV_INSN (x);
3531     }
3532   while (x != prev);
3533   CLEAR_REG_SET (&live);
3534 
3535   peep2_current = i;
3536 }
3537 
3538 /* Add INSN, which is in BB, at the end of the peep2 insn buffer if possible.
3539    Return true if we added it, false otherwise.  The caller will try to match
3540    peepholes against the buffer if we return false; otherwise it will try to
3541    add more instructions to the buffer.  */
3542 
3543 static bool
peep2_fill_buffer(basic_block bb,rtx insn,regset live)3544 peep2_fill_buffer (basic_block bb, rtx insn, regset live)
3545 {
3546   int pos;
3547 
3548   /* Once we have filled the maximum number of insns the buffer can hold,
3549      allow the caller to match the insns against peepholes.  We wait until
3550      the buffer is full in case the target has similar peepholes of different
3551      length; we always want to match the longest if possible.  */
3552   if (peep2_current_count == MAX_INSNS_PER_PEEP2)
3553     return false;
3554 
3555   /* If an insn has RTX_FRAME_RELATED_P set, do not allow it to be matched with
3556      any other pattern, lest it change the semantics of the frame info.  */
3557   if (RTX_FRAME_RELATED_P (insn))
3558     {
3559       /* Let the buffer drain first.  */
3560       if (peep2_current_count > 0)
3561 	return false;
3562       /* Now the insn will be the only thing in the buffer.  */
3563     }
3564 
3565   pos = peep2_buf_position (peep2_current + peep2_current_count);
3566   peep2_insn_data[pos].insn = insn;
3567   COPY_REG_SET (peep2_insn_data[pos].live_before, live);
3568   peep2_current_count++;
3569 
3570   df_simulate_one_insn_forwards (bb, insn, live);
3571   return true;
3572 }
3573 
3574 /* Perform the peephole2 optimization pass.  */
3575 
3576 static void
peephole2_optimize(void)3577 peephole2_optimize (void)
3578 {
3579   rtx insn;
3580   bitmap live;
3581   int i;
3582   basic_block bb;
3583 
3584   peep2_do_cleanup_cfg = false;
3585   peep2_do_rebuild_jump_labels = false;
3586 
3587   df_set_flags (DF_LR_RUN_DCE);
3588   df_note_add_problem ();
3589   df_analyze ();
3590 
3591   /* Initialize the regsets we're going to use.  */
3592   for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3593     peep2_insn_data[i].live_before = BITMAP_ALLOC (&reg_obstack);
3594   search_ofs = 0;
3595   live = BITMAP_ALLOC (&reg_obstack);
3596 
3597   FOR_EACH_BB_REVERSE_FN (bb, cfun)
3598     {
3599       bool past_end = false;
3600       int pos;
3601 
3602       rtl_profile_for_bb (bb);
3603 
3604       /* Start up propagation.  */
3605       bitmap_copy (live, DF_LR_IN (bb));
3606       df_simulate_initialize_forwards (bb, live);
3607       peep2_reinit_state (live);
3608 
3609       insn = BB_HEAD (bb);
3610       for (;;)
3611 	{
3612 	  rtx attempt, head;
3613 	  int match_len;
3614 
3615 	  if (!past_end && !NONDEBUG_INSN_P (insn))
3616 	    {
3617 	    next_insn:
3618 	      insn = NEXT_INSN (insn);
3619 	      if (insn == NEXT_INSN (BB_END (bb)))
3620 		past_end = true;
3621 	      continue;
3622 	    }
3623 	  if (!past_end && peep2_fill_buffer (bb, insn, live))
3624 	    goto next_insn;
3625 
3626 	  /* If we did not fill an empty buffer, it signals the end of the
3627 	     block.  */
3628 	  if (peep2_current_count == 0)
3629 	    break;
3630 
3631 	  /* The buffer filled to the current maximum, so try to match.  */
3632 
3633 	  pos = peep2_buf_position (peep2_current + peep2_current_count);
3634 	  peep2_insn_data[pos].insn = PEEP2_EOB;
3635 	  COPY_REG_SET (peep2_insn_data[pos].live_before, live);
3636 
3637 	  /* Match the peephole.  */
3638 	  head = peep2_insn_data[peep2_current].insn;
3639 	  attempt = peephole2_insns (PATTERN (head), head, &match_len);
3640 	  if (attempt != NULL)
3641 	    {
3642 	      rtx last = peep2_attempt (bb, head, match_len, attempt);
3643 	      if (last)
3644 		{
3645 		  peep2_update_life (bb, match_len, last, PREV_INSN (attempt));
3646 		  continue;
3647 		}
3648 	    }
3649 
3650 	  /* No match: advance the buffer by one insn.  */
3651 	  peep2_current = peep2_buf_position (peep2_current + 1);
3652 	  peep2_current_count--;
3653 	}
3654     }
3655 
3656   default_rtl_profile ();
3657   for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3658     BITMAP_FREE (peep2_insn_data[i].live_before);
3659   BITMAP_FREE (live);
3660   if (peep2_do_rebuild_jump_labels)
3661     rebuild_jump_labels (get_insns ());
3662   if (peep2_do_cleanup_cfg)
3663     cleanup_cfg (CLEANUP_CFG_CHANGED);
3664 }
3665 #endif /* HAVE_peephole2 */
3666 
3667 /* Common predicates for use with define_bypass.  */
3668 
3669 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3670    data not the address operand(s) of the store.  IN_INSN and OUT_INSN
3671    must be either a single_set or a PARALLEL with SETs inside.  */
3672 
3673 int
store_data_bypass_p(rtx out_insn,rtx in_insn)3674 store_data_bypass_p (rtx out_insn, rtx in_insn)
3675 {
3676   rtx out_set, in_set;
3677   rtx out_pat, in_pat;
3678   rtx out_exp, in_exp;
3679   int i, j;
3680 
3681   in_set = single_set (in_insn);
3682   if (in_set)
3683     {
3684       if (!MEM_P (SET_DEST (in_set)))
3685 	return false;
3686 
3687       out_set = single_set (out_insn);
3688       if (out_set)
3689         {
3690           if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set)))
3691             return false;
3692         }
3693       else
3694         {
3695           out_pat = PATTERN (out_insn);
3696 
3697 	  if (GET_CODE (out_pat) != PARALLEL)
3698 	    return false;
3699 
3700           for (i = 0; i < XVECLEN (out_pat, 0); i++)
3701           {
3702             out_exp = XVECEXP (out_pat, 0, i);
3703 
3704             if (GET_CODE (out_exp) == CLOBBER)
3705               continue;
3706 
3707             gcc_assert (GET_CODE (out_exp) == SET);
3708 
3709             if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_set)))
3710               return false;
3711           }
3712       }
3713     }
3714   else
3715     {
3716       in_pat = PATTERN (in_insn);
3717       gcc_assert (GET_CODE (in_pat) == PARALLEL);
3718 
3719       for (i = 0; i < XVECLEN (in_pat, 0); i++)
3720 	{
3721 	  in_exp = XVECEXP (in_pat, 0, i);
3722 
3723 	  if (GET_CODE (in_exp) == CLOBBER)
3724 	    continue;
3725 
3726 	  gcc_assert (GET_CODE (in_exp) == SET);
3727 
3728 	  if (!MEM_P (SET_DEST (in_exp)))
3729 	    return false;
3730 
3731           out_set = single_set (out_insn);
3732           if (out_set)
3733             {
3734               if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_exp)))
3735                 return false;
3736             }
3737           else
3738             {
3739               out_pat = PATTERN (out_insn);
3740               gcc_assert (GET_CODE (out_pat) == PARALLEL);
3741 
3742               for (j = 0; j < XVECLEN (out_pat, 0); j++)
3743                 {
3744                   out_exp = XVECEXP (out_pat, 0, j);
3745 
3746                   if (GET_CODE (out_exp) == CLOBBER)
3747                     continue;
3748 
3749                   gcc_assert (GET_CODE (out_exp) == SET);
3750 
3751                   if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_exp)))
3752                     return false;
3753                 }
3754             }
3755         }
3756     }
3757 
3758   return true;
3759 }
3760 
3761 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3762    condition, and not the THEN or ELSE branch.  OUT_INSN may be either a single
3763    or multiple set; IN_INSN should be single_set for truth, but for convenience
3764    of insn categorization may be any JUMP or CALL insn.  */
3765 
3766 int
if_test_bypass_p(rtx out_insn,rtx in_insn)3767 if_test_bypass_p (rtx out_insn, rtx in_insn)
3768 {
3769   rtx out_set, in_set;
3770 
3771   in_set = single_set (in_insn);
3772   if (! in_set)
3773     {
3774       gcc_assert (JUMP_P (in_insn) || CALL_P (in_insn));
3775       return false;
3776     }
3777 
3778   if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
3779     return false;
3780   in_set = SET_SRC (in_set);
3781 
3782   out_set = single_set (out_insn);
3783   if (out_set)
3784     {
3785       if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3786 	  || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3787 	return false;
3788     }
3789   else
3790     {
3791       rtx out_pat;
3792       int i;
3793 
3794       out_pat = PATTERN (out_insn);
3795       gcc_assert (GET_CODE (out_pat) == PARALLEL);
3796 
3797       for (i = 0; i < XVECLEN (out_pat, 0); i++)
3798 	{
3799 	  rtx exp = XVECEXP (out_pat, 0, i);
3800 
3801 	  if (GET_CODE (exp) == CLOBBER)
3802 	    continue;
3803 
3804 	  gcc_assert (GET_CODE (exp) == SET);
3805 
3806 	  if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3807 	      || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3808 	    return false;
3809 	}
3810     }
3811 
3812   return true;
3813 }
3814 
3815 static bool
gate_handle_peephole2(void)3816 gate_handle_peephole2 (void)
3817 {
3818   return (optimize > 0 && flag_peephole2);
3819 }
3820 
3821 static unsigned int
rest_of_handle_peephole2(void)3822 rest_of_handle_peephole2 (void)
3823 {
3824 #ifdef HAVE_peephole2
3825   peephole2_optimize ();
3826 #endif
3827   return 0;
3828 }
3829 
3830 namespace {
3831 
3832 const pass_data pass_data_peephole2 =
3833 {
3834   RTL_PASS, /* type */
3835   "peephole2", /* name */
3836   OPTGROUP_NONE, /* optinfo_flags */
3837   true, /* has_gate */
3838   true, /* has_execute */
3839   TV_PEEPHOLE2, /* tv_id */
3840   0, /* properties_required */
3841   0, /* properties_provided */
3842   0, /* properties_destroyed */
3843   0, /* todo_flags_start */
3844   ( TODO_df_finish | TODO_verify_rtl_sharing | 0 ), /* todo_flags_finish */
3845 };
3846 
3847 class pass_peephole2 : public rtl_opt_pass
3848 {
3849 public:
pass_peephole2(gcc::context * ctxt)3850   pass_peephole2 (gcc::context *ctxt)
3851     : rtl_opt_pass (pass_data_peephole2, ctxt)
3852   {}
3853 
3854   /* opt_pass methods: */
3855   /* The epiphany backend creates a second instance of this pass, so we need
3856      a clone method.  */
clone()3857   opt_pass * clone () { return new pass_peephole2 (m_ctxt); }
gate()3858   bool gate () { return gate_handle_peephole2 (); }
execute()3859   unsigned int execute () { return rest_of_handle_peephole2 (); }
3860 
3861 }; // class pass_peephole2
3862 
3863 } // anon namespace
3864 
3865 rtl_opt_pass *
make_pass_peephole2(gcc::context * ctxt)3866 make_pass_peephole2 (gcc::context *ctxt)
3867 {
3868   return new pass_peephole2 (ctxt);
3869 }
3870 
3871 static unsigned int
rest_of_handle_split_all_insns(void)3872 rest_of_handle_split_all_insns (void)
3873 {
3874   split_all_insns ();
3875   return 0;
3876 }
3877 
3878 namespace {
3879 
3880 const pass_data pass_data_split_all_insns =
3881 {
3882   RTL_PASS, /* type */
3883   "split1", /* name */
3884   OPTGROUP_NONE, /* optinfo_flags */
3885   false, /* has_gate */
3886   true, /* has_execute */
3887   TV_NONE, /* tv_id */
3888   0, /* properties_required */
3889   0, /* properties_provided */
3890   0, /* properties_destroyed */
3891   0, /* todo_flags_start */
3892   0, /* todo_flags_finish */
3893 };
3894 
3895 class pass_split_all_insns : public rtl_opt_pass
3896 {
3897 public:
pass_split_all_insns(gcc::context * ctxt)3898   pass_split_all_insns (gcc::context *ctxt)
3899     : rtl_opt_pass (pass_data_split_all_insns, ctxt)
3900   {}
3901 
3902   /* opt_pass methods: */
3903   /* The epiphany backend creates a second instance of this pass, so
3904      we need a clone method.  */
clone()3905   opt_pass * clone () { return new pass_split_all_insns (m_ctxt); }
execute()3906   unsigned int execute () { return rest_of_handle_split_all_insns (); }
3907 
3908 }; // class pass_split_all_insns
3909 
3910 } // anon namespace
3911 
3912 rtl_opt_pass *
make_pass_split_all_insns(gcc::context * ctxt)3913 make_pass_split_all_insns (gcc::context *ctxt)
3914 {
3915   return new pass_split_all_insns (ctxt);
3916 }
3917 
3918 static unsigned int
rest_of_handle_split_after_reload(void)3919 rest_of_handle_split_after_reload (void)
3920 {
3921   /* If optimizing, then go ahead and split insns now.  */
3922 #ifndef STACK_REGS
3923   if (optimize > 0)
3924 #endif
3925     split_all_insns ();
3926   return 0;
3927 }
3928 
3929 namespace {
3930 
3931 const pass_data pass_data_split_after_reload =
3932 {
3933   RTL_PASS, /* type */
3934   "split2", /* name */
3935   OPTGROUP_NONE, /* optinfo_flags */
3936   false, /* has_gate */
3937   true, /* has_execute */
3938   TV_NONE, /* tv_id */
3939   0, /* properties_required */
3940   0, /* properties_provided */
3941   0, /* properties_destroyed */
3942   0, /* todo_flags_start */
3943   0, /* todo_flags_finish */
3944 };
3945 
3946 class pass_split_after_reload : public rtl_opt_pass
3947 {
3948 public:
pass_split_after_reload(gcc::context * ctxt)3949   pass_split_after_reload (gcc::context *ctxt)
3950     : rtl_opt_pass (pass_data_split_after_reload, ctxt)
3951   {}
3952 
3953   /* opt_pass methods: */
execute()3954   unsigned int execute () { return rest_of_handle_split_after_reload (); }
3955 
3956 }; // class pass_split_after_reload
3957 
3958 } // anon namespace
3959 
3960 rtl_opt_pass *
make_pass_split_after_reload(gcc::context * ctxt)3961 make_pass_split_after_reload (gcc::context *ctxt)
3962 {
3963   return new pass_split_after_reload (ctxt);
3964 }
3965 
3966 static bool
gate_handle_split_before_regstack(void)3967 gate_handle_split_before_regstack (void)
3968 {
3969 #if HAVE_ATTR_length && defined (STACK_REGS)
3970   /* If flow2 creates new instructions which need splitting
3971      and scheduling after reload is not done, they might not be
3972      split until final which doesn't allow splitting
3973      if HAVE_ATTR_length.  */
3974 # ifdef INSN_SCHEDULING
3975   return (optimize && !flag_schedule_insns_after_reload);
3976 # else
3977   return (optimize);
3978 # endif
3979 #else
3980   return 0;
3981 #endif
3982 }
3983 
3984 static unsigned int
rest_of_handle_split_before_regstack(void)3985 rest_of_handle_split_before_regstack (void)
3986 {
3987   split_all_insns ();
3988   return 0;
3989 }
3990 
3991 namespace {
3992 
3993 const pass_data pass_data_split_before_regstack =
3994 {
3995   RTL_PASS, /* type */
3996   "split3", /* name */
3997   OPTGROUP_NONE, /* optinfo_flags */
3998   true, /* has_gate */
3999   true, /* has_execute */
4000   TV_NONE, /* tv_id */
4001   0, /* properties_required */
4002   0, /* properties_provided */
4003   0, /* properties_destroyed */
4004   0, /* todo_flags_start */
4005   0, /* todo_flags_finish */
4006 };
4007 
4008 class pass_split_before_regstack : public rtl_opt_pass
4009 {
4010 public:
pass_split_before_regstack(gcc::context * ctxt)4011   pass_split_before_regstack (gcc::context *ctxt)
4012     : rtl_opt_pass (pass_data_split_before_regstack, ctxt)
4013   {}
4014 
4015   /* opt_pass methods: */
gate()4016   bool gate () { return gate_handle_split_before_regstack (); }
execute()4017   unsigned int execute () {
4018     return rest_of_handle_split_before_regstack ();
4019   }
4020 
4021 }; // class pass_split_before_regstack
4022 
4023 } // anon namespace
4024 
4025 rtl_opt_pass *
make_pass_split_before_regstack(gcc::context * ctxt)4026 make_pass_split_before_regstack (gcc::context *ctxt)
4027 {
4028   return new pass_split_before_regstack (ctxt);
4029 }
4030 
4031 static bool
gate_handle_split_before_sched2(void)4032 gate_handle_split_before_sched2 (void)
4033 {
4034 #ifdef INSN_SCHEDULING
4035   return optimize > 0 && flag_schedule_insns_after_reload;
4036 #else
4037   return 0;
4038 #endif
4039 }
4040 
4041 static unsigned int
rest_of_handle_split_before_sched2(void)4042 rest_of_handle_split_before_sched2 (void)
4043 {
4044 #ifdef INSN_SCHEDULING
4045   split_all_insns ();
4046 #endif
4047   return 0;
4048 }
4049 
4050 namespace {
4051 
4052 const pass_data pass_data_split_before_sched2 =
4053 {
4054   RTL_PASS, /* type */
4055   "split4", /* name */
4056   OPTGROUP_NONE, /* optinfo_flags */
4057   true, /* has_gate */
4058   true, /* has_execute */
4059   TV_NONE, /* tv_id */
4060   0, /* properties_required */
4061   0, /* properties_provided */
4062   0, /* properties_destroyed */
4063   0, /* todo_flags_start */
4064   TODO_verify_flow, /* todo_flags_finish */
4065 };
4066 
4067 class pass_split_before_sched2 : public rtl_opt_pass
4068 {
4069 public:
pass_split_before_sched2(gcc::context * ctxt)4070   pass_split_before_sched2 (gcc::context *ctxt)
4071     : rtl_opt_pass (pass_data_split_before_sched2, ctxt)
4072   {}
4073 
4074   /* opt_pass methods: */
gate()4075   bool gate () { return gate_handle_split_before_sched2 (); }
execute()4076   unsigned int execute () { return rest_of_handle_split_before_sched2 (); }
4077 
4078 }; // class pass_split_before_sched2
4079 
4080 } // anon namespace
4081 
4082 rtl_opt_pass *
make_pass_split_before_sched2(gcc::context * ctxt)4083 make_pass_split_before_sched2 (gcc::context *ctxt)
4084 {
4085   return new pass_split_before_sched2 (ctxt);
4086 }
4087 
4088 /* The placement of the splitting that we do for shorten_branches
4089    depends on whether regstack is used by the target or not.  */
4090 static bool
gate_do_final_split(void)4091 gate_do_final_split (void)
4092 {
4093 #if HAVE_ATTR_length && !defined (STACK_REGS)
4094   return 1;
4095 #else
4096   return 0;
4097 #endif
4098 }
4099 
4100 namespace {
4101 
4102 const pass_data pass_data_split_for_shorten_branches =
4103 {
4104   RTL_PASS, /* type */
4105   "split5", /* name */
4106   OPTGROUP_NONE, /* optinfo_flags */
4107   true, /* has_gate */
4108   true, /* has_execute */
4109   TV_NONE, /* tv_id */
4110   0, /* properties_required */
4111   0, /* properties_provided */
4112   0, /* properties_destroyed */
4113   0, /* todo_flags_start */
4114   TODO_verify_rtl_sharing, /* todo_flags_finish */
4115 };
4116 
4117 class pass_split_for_shorten_branches : public rtl_opt_pass
4118 {
4119 public:
pass_split_for_shorten_branches(gcc::context * ctxt)4120   pass_split_for_shorten_branches (gcc::context *ctxt)
4121     : rtl_opt_pass (pass_data_split_for_shorten_branches, ctxt)
4122   {}
4123 
4124   /* opt_pass methods: */
gate()4125   bool gate () { return gate_do_final_split (); }
execute()4126   unsigned int execute () { return split_all_insns_noflow (); }
4127 
4128 }; // class pass_split_for_shorten_branches
4129 
4130 } // anon namespace
4131 
4132 rtl_opt_pass *
make_pass_split_for_shorten_branches(gcc::context * ctxt)4133 make_pass_split_for_shorten_branches (gcc::context *ctxt)
4134 {
4135   return new pass_split_for_shorten_branches (ctxt);
4136 }
4137